blob: aee3f8b39c05462f03ba70d23bf6f986084329f5 [file] [log] [blame]
James Kuszmaul38735e82019-12-07 16:42:06 -08001#include "aos/events/logging/logger.h"
Austin Schuhe309d2a2019-11-29 13:25:21 -08002
3#include <fcntl.h>
Austin Schuh4c4e0092019-12-22 16:18:03 -08004#include <limits.h>
Austin Schuhe309d2a2019-11-29 13:25:21 -08005#include <sys/stat.h>
6#include <sys/types.h>
7#include <sys/uio.h>
8#include <vector>
9
Austin Schuh8bd96322020-02-13 21:18:22 -080010#include "Eigen/Dense"
Austin Schuh2f8fd752020-09-01 22:38:28 -070011#include "absl/strings/escaping.h"
Austin Schuhe309d2a2019-11-29 13:25:21 -080012#include "absl/types/span.h"
13#include "aos/events/event_loop.h"
James Kuszmaul38735e82019-12-07 16:42:06 -080014#include "aos/events/logging/logger_generated.h"
Austin Schuh64fab802020-09-09 22:47:47 -070015#include "aos/events/logging/uuid.h"
Austin Schuhe309d2a2019-11-29 13:25:21 -080016#include "aos/flatbuffer_merge.h"
Austin Schuh288479d2019-12-18 19:47:52 -080017#include "aos/network/team_number.h"
Austin Schuhe309d2a2019-11-29 13:25:21 -080018#include "aos/time/time.h"
19#include "flatbuffers/flatbuffers.h"
Austin Schuh2f8fd752020-09-01 22:38:28 -070020#include "third_party/gmp/gmpxx.h"
Austin Schuhe309d2a2019-11-29 13:25:21 -080021
Austin Schuh15649d62019-12-28 16:36:38 -080022DEFINE_bool(skip_missing_forwarding_entries, false,
23 "If true, drop any forwarding entries with missing data. If "
24 "false, CHECK.");
Austin Schuhe309d2a2019-11-29 13:25:21 -080025
Austin Schuh8bd96322020-02-13 21:18:22 -080026DEFINE_bool(timestamps_to_csv, false,
27 "If true, write all the time synchronization information to a set "
28 "of CSV files in /tmp/. This should only be needed when debugging "
29 "time synchronization.");
30
Austin Schuh2f8fd752020-09-01 22:38:28 -070031DEFINE_bool(skip_order_validation, false,
32 "If true, ignore any out of orderness in replay");
33
Austin Schuhe309d2a2019-11-29 13:25:21 -080034namespace aos {
35namespace logger {
Austin Schuhe309d2a2019-11-29 13:25:21 -080036namespace chrono = std::chrono;
37
Austin Schuh2f8fd752020-09-01 22:38:28 -070038
39Logger::Logger(std::string_view base_name, EventLoop *event_loop,
Austin Schuhe309d2a2019-11-29 13:25:21 -080040 std::chrono::milliseconds polling_period)
Austin Schuh0c297012020-09-16 18:41:59 -070041 : Logger(base_name, event_loop, event_loop->configuration(),
42 polling_period) {}
43Logger::Logger(std::string_view base_name, EventLoop *event_loop,
44 const Configuration *configuration,
45 std::chrono::milliseconds polling_period)
Austin Schuh2f8fd752020-09-01 22:38:28 -070046 : Logger(std::make_unique<LocalLogNamer>(base_name, event_loop->node()),
Austin Schuh0c297012020-09-16 18:41:59 -070047 event_loop, configuration, polling_period) {}
48Logger::Logger(std::unique_ptr<LogNamer> log_namer, EventLoop *event_loop,
49 std::chrono::milliseconds polling_period)
50 : Logger(std::move(log_namer), event_loop, event_loop->configuration(),
51 polling_period) {}
Austin Schuh6f3babe2020-01-26 20:34:50 -080052
53Logger::Logger(std::unique_ptr<LogNamer> log_namer, EventLoop *event_loop,
Austin Schuh0c297012020-09-16 18:41:59 -070054 const Configuration *configuration,
Austin Schuh6f3babe2020-01-26 20:34:50 -080055 std::chrono::milliseconds polling_period)
Austin Schuhe309d2a2019-11-29 13:25:21 -080056 : event_loop_(event_loop),
Austin Schuh64fab802020-09-09 22:47:47 -070057 uuid_(UUID::Random()),
Austin Schuh6f3babe2020-01-26 20:34:50 -080058 log_namer_(std::move(log_namer)),
Austin Schuh0c297012020-09-16 18:41:59 -070059 configuration_(configuration),
60 name_(network::GetHostname()),
Austin Schuhe309d2a2019-11-29 13:25:21 -080061 timer_handler_(event_loop_->AddTimer([this]() { DoLogData(); })),
Austin Schuh2f8fd752020-09-01 22:38:28 -070062 polling_period_(polling_period),
63 server_statistics_fetcher_(
64 configuration::MultiNode(event_loop_->configuration())
65 ? event_loop_->MakeFetcher<message_bridge::ServerStatistics>(
66 "/aos")
67 : aos::Fetcher<message_bridge::ServerStatistics>()) {
Austin Schuh6f3babe2020-01-26 20:34:50 -080068 VLOG(1) << "Starting logger for " << FlatbufferToJson(event_loop_->node());
69 int channel_index = 0;
Austin Schuh2f8fd752020-09-01 22:38:28 -070070
71 // Find all the nodes which are logging timestamps on our node.
72 std::set<const Node *> timestamp_logger_nodes;
Austin Schuh0c297012020-09-16 18:41:59 -070073 for (const Channel *channel : *configuration_->channels()) {
Austin Schuh2f8fd752020-09-01 22:38:28 -070074 if (!configuration::ChannelIsSendableOnNode(channel, event_loop_->node()) ||
75 !channel->has_destination_nodes()) {
76 continue;
77 }
78 for (const Connection *connection : *channel->destination_nodes()) {
79 const Node *other_node = configuration::GetNode(
Austin Schuh0c297012020-09-16 18:41:59 -070080 configuration_, connection->name()->string_view());
Austin Schuh2f8fd752020-09-01 22:38:28 -070081
82 if (configuration::ConnectionDeliveryTimeIsLoggedOnNode(
83 connection, event_loop_->node())) {
84 VLOG(1) << "Timestamps are logged from "
85 << FlatbufferToJson(other_node);
86 timestamp_logger_nodes.insert(other_node);
87 }
88 }
89 }
90
91 std::map<const Channel *, const Node *> timestamp_logger_channels;
92
93 // Now that we have all the nodes accumulated, make remote timestamp loggers
94 // for them.
95 for (const Node *node : timestamp_logger_nodes) {
96 const Channel *channel = configuration::GetChannel(
Austin Schuh0c297012020-09-16 18:41:59 -070097 configuration_,
Austin Schuh2f8fd752020-09-01 22:38:28 -070098 absl::StrCat("/aos/remote_timestamps/", node->name()->string_view()),
99 logger::MessageHeader::GetFullyQualifiedName(), event_loop_->name(),
100 event_loop_->node());
101
102 CHECK(channel != nullptr)
103 << ": Remote timestamps are logged on "
104 << event_loop_->node()->name()->string_view()
105 << " but can't find channel /aos/remote_timestamps/"
106 << node->name()->string_view();
107 timestamp_logger_channels.insert(std::make_pair(channel, node));
108 }
109
110 const size_t our_node_index = configuration::GetNodeIndex(
Austin Schuh0c297012020-09-16 18:41:59 -0700111 configuration_, event_loop_->node());
Austin Schuh2f8fd752020-09-01 22:38:28 -0700112
Austin Schuh0c297012020-09-16 18:41:59 -0700113 for (const Channel *config_channel : *configuration_->channels()) {
114 // The MakeRawFetcher method needs a channel which is in the event loop
115 // configuration() object, not the configuration_ object. Go look that up
116 // from the config.
117 const Channel *channel = aos::configuration::GetChannel(
118 event_loop_->configuration(), config_channel->name()->string_view(),
119 config_channel->type()->string_view(), "", event_loop_->node());
120
Austin Schuhe309d2a2019-11-29 13:25:21 -0800121 FetcherStruct fs;
Austin Schuh2f8fd752020-09-01 22:38:28 -0700122 fs.node_index = our_node_index;
Austin Schuh6f3babe2020-01-26 20:34:50 -0800123 const bool is_local =
124 configuration::ChannelIsSendableOnNode(channel, event_loop_->node());
125
Austin Schuh15649d62019-12-28 16:36:38 -0800126 const bool is_readable =
127 configuration::ChannelIsReadableOnNode(channel, event_loop_->node());
128 const bool log_message = configuration::ChannelMessageIsLoggedOnNode(
129 channel, event_loop_->node()) &&
130 is_readable;
131
132 const bool log_delivery_times =
133 (event_loop_->node() == nullptr)
134 ? false
135 : configuration::ConnectionDeliveryTimeIsLoggedOnNode(
136 channel, event_loop_->node(), event_loop_->node());
137
Austin Schuh2f8fd752020-09-01 22:38:28 -0700138 // Now, detect a MessageHeader timestamp logger where we should just log the
139 // contents to a file directly.
140 const bool log_contents = timestamp_logger_channels.find(channel) !=
141 timestamp_logger_channels.end();
142 const Node *timestamp_node =
143 log_contents ? timestamp_logger_channels.find(channel)->second
144 : nullptr;
145
146 if (log_message || log_delivery_times || log_contents) {
Austin Schuh15649d62019-12-28 16:36:38 -0800147 fs.fetcher = event_loop->MakeRawFetcher(channel);
148 VLOG(1) << "Logging channel "
149 << configuration::CleanedChannelToString(channel);
150
151 if (log_delivery_times) {
Austin Schuh6f3babe2020-01-26 20:34:50 -0800152 VLOG(1) << " Delivery times";
153 fs.timestamp_writer = log_namer_->MakeTimestampWriter(channel);
Austin Schuh15649d62019-12-28 16:36:38 -0800154 }
Austin Schuh6f3babe2020-01-26 20:34:50 -0800155 if (log_message) {
156 VLOG(1) << " Data";
157 fs.writer = log_namer_->MakeWriter(channel);
158 if (!is_local) {
159 fs.log_type = LogType::kLogRemoteMessage;
160 }
161 }
Austin Schuh2f8fd752020-09-01 22:38:28 -0700162 if (log_contents) {
163 VLOG(1) << "Timestamp logger channel "
164 << configuration::CleanedChannelToString(channel);
165 fs.contents_writer =
166 log_namer_->MakeForwardedTimestampWriter(channel, timestamp_node);
Austin Schuh0c297012020-09-16 18:41:59 -0700167 fs.node_index =
168 configuration::GetNodeIndex(configuration_, timestamp_node);
Austin Schuh2f8fd752020-09-01 22:38:28 -0700169 }
Austin Schuh6f3babe2020-01-26 20:34:50 -0800170 fs.channel_index = channel_index;
171 fs.written = false;
172 fetchers_.emplace_back(std::move(fs));
Austin Schuh15649d62019-12-28 16:36:38 -0800173 }
Austin Schuh6f3babe2020-01-26 20:34:50 -0800174 ++channel_index;
Austin Schuhe309d2a2019-11-29 13:25:21 -0800175 }
176
Austin Schuh0c297012020-09-16 18:41:59 -0700177 node_state_.resize(configuration::MultiNode(configuration_)
178 ? configuration_->nodes()->size()
Austin Schuh2f8fd752020-09-01 22:38:28 -0700179 : 1u);
Austin Schuhe309d2a2019-11-29 13:25:21 -0800180
Austin Schuh2f8fd752020-09-01 22:38:28 -0700181 for (const Node *node : log_namer_->nodes()) {
182 const int node_index =
Austin Schuh0c297012020-09-16 18:41:59 -0700183 configuration::GetNodeIndex(configuration_, node);
Austin Schuhe309d2a2019-11-29 13:25:21 -0800184
Austin Schuh2f8fd752020-09-01 22:38:28 -0700185 node_state_[node_index].log_file_header = MakeHeader(node);
186 }
Austin Schuhe309d2a2019-11-29 13:25:21 -0800187
Austin Schuh2f8fd752020-09-01 22:38:28 -0700188 // When things start, we want to log the header, then the most recent
189 // messages available on each fetcher to capture the previous state, then
190 // start polling.
191 event_loop_->OnRun([this]() { StartLogging(); });
Austin Schuhe309d2a2019-11-29 13:25:21 -0800192}
193
Austin Schuh0c297012020-09-16 18:41:59 -0700194Logger::~Logger() {
195 // If we are replaying a log file, or in simulation, we want to force the last
196 // bit of data to be logged. The easiest way to deal with this is to poll
197 // everything as we go to destroy the class, ie, shut down the logger, and
198 // write it to disk.
199 DoLogData();
200}
201
Austin Schuh2f8fd752020-09-01 22:38:28 -0700202void Logger::StartLogging() {
203 // Grab data from each channel right before we declare the log file started
204 // so we can capture the latest message on each channel. This lets us have
205 // non periodic messages with configuration that now get logged.
206 for (FetcherStruct &f : fetchers_) {
207 f.written = !f.fetcher->Fetch();
208 }
209
210 // Clear out any old timestamps in case we are re-starting logging.
211 for (size_t i = 0; i < node_state_.size(); ++i) {
212 SetStartTime(i, monotonic_clock::min_time, realtime_clock::min_time);
213 }
214
215 WriteHeader();
216
217 LOG(INFO) << "Logging node as " << FlatbufferToJson(event_loop_->node())
218 << " start_time " << last_synchronized_time_;
219
220 timer_handler_->Setup(event_loop_->monotonic_now() + polling_period_,
221 polling_period_);
222}
223
Austin Schuhfa895892020-01-07 20:07:41 -0800224void Logger::WriteHeader() {
Austin Schuh0c297012020-09-16 18:41:59 -0700225 if (configuration::MultiNode(configuration_)) {
Austin Schuh2f8fd752020-09-01 22:38:28 -0700226 server_statistics_fetcher_.Fetch();
227 }
228
229 aos::monotonic_clock::time_point monotonic_start_time =
230 event_loop_->monotonic_now();
231 aos::realtime_clock::time_point realtime_start_time =
232 event_loop_->realtime_now();
233
234 // We need to pick a point in time to declare the log file "started". This
235 // starts here. It needs to be after everything is fetched so that the
236 // fetchers are all pointed at the most recent message before the start
237 // time.
238 last_synchronized_time_ = monotonic_start_time;
239
Austin Schuh6f3babe2020-01-26 20:34:50 -0800240 for (const Node *node : log_namer_->nodes()) {
Austin Schuh2f8fd752020-09-01 22:38:28 -0700241 const int node_index =
Austin Schuh0c297012020-09-16 18:41:59 -0700242 configuration::GetNodeIndex(configuration_, node);
Austin Schuh2f8fd752020-09-01 22:38:28 -0700243 MaybeUpdateTimestamp(node, node_index, monotonic_start_time,
244 realtime_start_time);
Austin Schuh64fab802020-09-09 22:47:47 -0700245 log_namer_->WriteHeader(&node_state_[node_index].log_file_header, node);
Austin Schuh6f3babe2020-01-26 20:34:50 -0800246 }
247}
Austin Schuh8bd96322020-02-13 21:18:22 -0800248
Austin Schuh2f8fd752020-09-01 22:38:28 -0700249void Logger::WriteMissingTimestamps() {
Austin Schuh0c297012020-09-16 18:41:59 -0700250 if (configuration::MultiNode(configuration_)) {
Austin Schuh2f8fd752020-09-01 22:38:28 -0700251 server_statistics_fetcher_.Fetch();
252 } else {
253 return;
254 }
255
256 if (server_statistics_fetcher_.get() == nullptr) {
257 return;
258 }
259
260 for (const Node *node : log_namer_->nodes()) {
261 const int node_index =
Austin Schuh0c297012020-09-16 18:41:59 -0700262 configuration::GetNodeIndex(configuration_, node);
Austin Schuh2f8fd752020-09-01 22:38:28 -0700263 if (MaybeUpdateTimestamp(
264 node, node_index,
265 server_statistics_fetcher_.context().monotonic_event_time,
266 server_statistics_fetcher_.context().realtime_event_time)) {
Austin Schuh64fab802020-09-09 22:47:47 -0700267 log_namer_->Rotate(node, &node_state_[node_index].log_file_header);
Austin Schuh2f8fd752020-09-01 22:38:28 -0700268 }
269 }
270}
271
272void Logger::SetStartTime(size_t node_index,
273 aos::monotonic_clock::time_point monotonic_start_time,
274 aos::realtime_clock::time_point realtime_start_time) {
275 node_state_[node_index].monotonic_start_time = monotonic_start_time;
276 node_state_[node_index].realtime_start_time = realtime_start_time;
277 node_state_[node_index]
278 .log_file_header.mutable_message()
279 ->mutate_monotonic_start_time(
280 std::chrono::duration_cast<std::chrono::nanoseconds>(
281 monotonic_start_time.time_since_epoch())
282 .count());
283 if (node_state_[node_index]
284 .log_file_header.mutable_message()
285 ->has_realtime_start_time()) {
286 node_state_[node_index]
287 .log_file_header.mutable_message()
288 ->mutate_realtime_start_time(
289 std::chrono::duration_cast<std::chrono::nanoseconds>(
290 realtime_start_time.time_since_epoch())
291 .count());
292 }
293}
294
295bool Logger::MaybeUpdateTimestamp(
296 const Node *node, int node_index,
297 aos::monotonic_clock::time_point monotonic_start_time,
298 aos::realtime_clock::time_point realtime_start_time) {
Brian Silverman87ac0402020-09-17 14:47:01 -0700299 // Bail early if the start times are already set.
Austin Schuh2f8fd752020-09-01 22:38:28 -0700300 if (node_state_[node_index].monotonic_start_time !=
301 monotonic_clock::min_time) {
302 return false;
303 }
Austin Schuh0c297012020-09-16 18:41:59 -0700304 if (configuration::MultiNode(configuration_)) {
Austin Schuh2f8fd752020-09-01 22:38:28 -0700305 if (event_loop_->node() == node) {
306 // There are no offsets to compute for ourself, so always succeed.
307 SetStartTime(node_index, monotonic_start_time, realtime_start_time);
308 return true;
309 } else if (server_statistics_fetcher_.get() != nullptr) {
310 // We must be a remote node now. Look for the connection and see if it is
311 // connected.
312
313 for (const message_bridge::ServerConnection *connection :
314 *server_statistics_fetcher_->connections()) {
315 if (connection->node()->name()->string_view() !=
316 node->name()->string_view()) {
317 continue;
318 }
319
320 if (connection->state() != message_bridge::State::CONNECTED) {
321 VLOG(1) << node->name()->string_view()
322 << " is not connected, can't start it yet.";
323 break;
324 }
325
326 if (!connection->has_monotonic_offset()) {
327 VLOG(1) << "Missing monotonic offset for setting start time for node "
328 << aos::FlatbufferToJson(node);
329 break;
330 }
331
332 VLOG(1) << "Updating start time for " << aos::FlatbufferToJson(node);
333
334 // Found it and it is connected. Compensate and go.
335 monotonic_start_time +=
336 std::chrono::nanoseconds(connection->monotonic_offset());
337
338 SetStartTime(node_index, monotonic_start_time, realtime_start_time);
339 return true;
340 }
341 }
342 } else {
343 SetStartTime(node_index, monotonic_start_time, realtime_start_time);
344 return true;
345 }
346 return false;
347}
348
349aos::SizePrefixedFlatbufferDetachedBuffer<LogFileHeader> Logger::MakeHeader(
350 const Node *node) {
Austin Schuhfa895892020-01-07 20:07:41 -0800351 // Now write the header with this timestamp in it.
352 flatbuffers::FlatBufferBuilder fbb;
Austin Schuhd7b15da2020-02-17 15:06:11 -0800353 fbb.ForceDefaults(true);
Austin Schuhfa895892020-01-07 20:07:41 -0800354
Austin Schuh2f8fd752020-09-01 22:38:28 -0700355 // TODO(austin): Compress this much more efficiently. There are a bunch of
356 // duplicated schemas.
Austin Schuhfa895892020-01-07 20:07:41 -0800357 flatbuffers::Offset<aos::Configuration> configuration_offset =
Austin Schuh0c297012020-09-16 18:41:59 -0700358 CopyFlatBuffer(configuration_, &fbb);
Austin Schuhfa895892020-01-07 20:07:41 -0800359
Austin Schuh64fab802020-09-09 22:47:47 -0700360 flatbuffers::Offset<flatbuffers::String> name_offset =
Austin Schuh0c297012020-09-16 18:41:59 -0700361 fbb.CreateString(name_);
Austin Schuhfa895892020-01-07 20:07:41 -0800362
Austin Schuh64fab802020-09-09 22:47:47 -0700363 flatbuffers::Offset<flatbuffers::String> logger_uuid_offset =
364 fbb.CreateString(uuid_.string_view());
365
366 flatbuffers::Offset<flatbuffers::String> parts_uuid_offset =
367 fbb.CreateString("00000000-0000-4000-8000-000000000000");
368
Austin Schuhfa895892020-01-07 20:07:41 -0800369 flatbuffers::Offset<Node> node_offset;
Austin Schuh2f8fd752020-09-01 22:38:28 -0700370
Austin Schuh0c297012020-09-16 18:41:59 -0700371 if (configuration::MultiNode(configuration_)) {
Austin Schuh6f3babe2020-01-26 20:34:50 -0800372 node_offset = CopyFlatBuffer(node, &fbb);
Austin Schuhfa895892020-01-07 20:07:41 -0800373 }
374
375 aos::logger::LogFileHeader::Builder log_file_header_builder(fbb);
376
Austin Schuh64fab802020-09-09 22:47:47 -0700377 log_file_header_builder.add_name(name_offset);
Austin Schuhfa895892020-01-07 20:07:41 -0800378
379 // Only add the node if we are running in a multinode configuration.
Austin Schuh6f3babe2020-01-26 20:34:50 -0800380 if (node != nullptr) {
Austin Schuhfa895892020-01-07 20:07:41 -0800381 log_file_header_builder.add_node(node_offset);
382 }
383
384 log_file_header_builder.add_configuration(configuration_offset);
385 // The worst case theoretical out of order is the polling period times 2.
386 // One message could get logged right after the boundary, but be for right
387 // before the next boundary. And the reverse could happen for another
388 // message. Report back 3x to be extra safe, and because the cost isn't
389 // huge on the read side.
390 log_file_header_builder.add_max_out_of_order_duration(
391 std::chrono::duration_cast<std::chrono::nanoseconds>(3 * polling_period_)
392 .count());
393
394 log_file_header_builder.add_monotonic_start_time(
395 std::chrono::duration_cast<std::chrono::nanoseconds>(
Austin Schuh2f8fd752020-09-01 22:38:28 -0700396 monotonic_clock::min_time.time_since_epoch())
Austin Schuhfa895892020-01-07 20:07:41 -0800397 .count());
Austin Schuh2f8fd752020-09-01 22:38:28 -0700398 if (node == event_loop_->node()) {
399 log_file_header_builder.add_realtime_start_time(
400 std::chrono::duration_cast<std::chrono::nanoseconds>(
401 realtime_clock::min_time.time_since_epoch())
402 .count());
Austin Schuh6f3babe2020-01-26 20:34:50 -0800403 }
404
Austin Schuh64fab802020-09-09 22:47:47 -0700405 log_file_header_builder.add_logger_uuid(logger_uuid_offset);
406
407 log_file_header_builder.add_parts_uuid(parts_uuid_offset);
408 log_file_header_builder.add_parts_index(0);
409
Austin Schuh2f8fd752020-09-01 22:38:28 -0700410 fbb.FinishSizePrefixed(log_file_header_builder.Finish());
411 return fbb.Release();
412}
413
414void Logger::Rotate() {
415 for (const Node *node : log_namer_->nodes()) {
416 const int node_index =
Austin Schuh0c297012020-09-16 18:41:59 -0700417 configuration::GetNodeIndex(configuration_, node);
Austin Schuh64fab802020-09-09 22:47:47 -0700418 log_namer_->Rotate(node, &node_state_[node_index].log_file_header);
Austin Schuh2f8fd752020-09-01 22:38:28 -0700419 }
420}
421
422void Logger::LogUntil(monotonic_clock::time_point t) {
423 WriteMissingTimestamps();
424
425 // Write each channel to disk, one at a time.
426 for (FetcherStruct &f : fetchers_) {
427 while (true) {
428 if (f.written) {
429 if (!f.fetcher->FetchNext()) {
430 VLOG(2) << "No new data on "
431 << configuration::CleanedChannelToString(
432 f.fetcher->channel());
433 break;
434 } else {
435 f.written = false;
436 }
437 }
438
439 CHECK(!f.written);
440
441 // TODO(james): Write tests to exercise this logic.
442 if (f.fetcher->context().monotonic_event_time < t) {
443 if (f.writer != nullptr) {
444 // Write!
445 flatbuffers::FlatBufferBuilder fbb(f.fetcher->context().size +
446 max_header_size_);
447 fbb.ForceDefaults(true);
448
449 fbb.FinishSizePrefixed(PackMessage(&fbb, f.fetcher->context(),
450 f.channel_index, f.log_type));
451
452 VLOG(2) << "Writing data as node "
453 << FlatbufferToJson(event_loop_->node()) << " for channel "
454 << configuration::CleanedChannelToString(f.fetcher->channel())
455 << " to " << f.writer->filename() << " data "
456 << FlatbufferToJson(
457 flatbuffers::GetSizePrefixedRoot<MessageHeader>(
458 fbb.GetBufferPointer()));
459
460 max_header_size_ = std::max(
461 max_header_size_, fbb.GetSize() - f.fetcher->context().size);
462 f.writer->QueueSizedFlatbuffer(&fbb);
463 }
464
465 if (f.timestamp_writer != nullptr) {
466 // And now handle timestamps.
467 flatbuffers::FlatBufferBuilder fbb;
468 fbb.ForceDefaults(true);
469
470 fbb.FinishSizePrefixed(PackMessage(&fbb, f.fetcher->context(),
471 f.channel_index,
472 LogType::kLogDeliveryTimeOnly));
473
474 VLOG(2) << "Writing timestamps as node "
475 << FlatbufferToJson(event_loop_->node()) << " for channel "
476 << configuration::CleanedChannelToString(f.fetcher->channel())
477 << " to " << f.timestamp_writer->filename() << " timestamp "
478 << FlatbufferToJson(
479 flatbuffers::GetSizePrefixedRoot<MessageHeader>(
480 fbb.GetBufferPointer()));
481
482 f.timestamp_writer->QueueSizedFlatbuffer(&fbb);
483 }
484
485 if (f.contents_writer != nullptr) {
486 // And now handle the special message contents channel. Copy the
487 // message into a FlatBufferBuilder and save it to disk.
488 // TODO(austin): We can be more efficient here when we start to
489 // care...
490 flatbuffers::FlatBufferBuilder fbb;
491 fbb.ForceDefaults(true);
492
493 const MessageHeader *msg =
494 flatbuffers::GetRoot<MessageHeader>(f.fetcher->context().data);
495
496 logger::MessageHeader::Builder message_header_builder(fbb);
497
498 // Note: this must match the same order as MessageBridgeServer and
499 // PackMessage. We want identical headers to have identical
500 // on-the-wire formats to make comparing them easier.
501 message_header_builder.add_channel_index(msg->channel_index());
502
503 message_header_builder.add_queue_index(msg->queue_index());
504 message_header_builder.add_monotonic_sent_time(
505 msg->monotonic_sent_time());
506 message_header_builder.add_realtime_sent_time(
507 msg->realtime_sent_time());
508
509 message_header_builder.add_monotonic_remote_time(
510 msg->monotonic_remote_time());
511 message_header_builder.add_realtime_remote_time(
512 msg->realtime_remote_time());
513 message_header_builder.add_remote_queue_index(
514 msg->remote_queue_index());
515
516 fbb.FinishSizePrefixed(message_header_builder.Finish());
517
518 f.contents_writer->QueueSizedFlatbuffer(&fbb);
519 }
520
521 f.written = true;
522 } else {
523 break;
524 }
525 }
526 }
527 last_synchronized_time_ = t;
Austin Schuhfa895892020-01-07 20:07:41 -0800528}
529
Austin Schuhe309d2a2019-11-29 13:25:21 -0800530void Logger::DoLogData() {
531 // We want to guarentee that messages aren't out of order by more than
532 // max_out_of_order_duration. To do this, we need sync points. Every write
533 // cycle should be a sync point.
Austin Schuhfa895892020-01-07 20:07:41 -0800534 const monotonic_clock::time_point monotonic_now =
535 event_loop_->monotonic_now();
Austin Schuhe309d2a2019-11-29 13:25:21 -0800536
537 do {
538 // Move the sync point up by at most polling_period. This forces one sync
539 // per iteration, even if it is small.
Austin Schuh2f8fd752020-09-01 22:38:28 -0700540 LogUntil(
541 std::min(last_synchronized_time_ + polling_period_, monotonic_now));
Austin Schuhe309d2a2019-11-29 13:25:21 -0800542
Austin Schuhe309d2a2019-11-29 13:25:21 -0800543 // If we missed cycles, we could be pretty far behind. Spin until we are
544 // caught up.
545 } while (last_synchronized_time_ + polling_period_ < monotonic_now);
Austin Schuhe309d2a2019-11-29 13:25:21 -0800546}
547
Austin Schuh11d43732020-09-21 17:28:30 -0700548std::vector<LogFile> SortParts(const std::vector<std::string> &parts) {
Austin Schuh5212cad2020-09-09 23:12:09 -0700549 // Start by grouping all parts by UUID, and extracting the part index.
Austin Schuh11d43732020-09-21 17:28:30 -0700550 // Datastructure to hold all the info extracted from a set of parts which go
551 // together so we can sort them afterwords.
552 struct UnsortedLogParts {
553 // Start times.
554 aos::monotonic_clock::time_point monotonic_start_time;
555 aos::realtime_clock::time_point realtime_start_time;
556
557 // Node to save.
558 std::string node;
559
560 // Pairs of the filename and the part index for sorting.
561 std::vector<std::pair<std::string, int>> parts;
562 };
563
564 // Map holding the logger_uuid -> second map. The second map holds the
565 // parts_uuid -> list of parts for sorting.
566 std::map<std::string, std::map<std::string, UnsortedLogParts>> parts_list;
Austin Schuh5212cad2020-09-09 23:12:09 -0700567
568 // Sort part files without UUIDs and part indexes as well. Extract everything
569 // useful from the log in the first pass, then sort later.
Austin Schuh11d43732020-09-21 17:28:30 -0700570 struct UnsortedOldParts {
571 // Part information with everything but the list of parts.
572 LogParts parts;
573
574 // Tuple of time for the data and filename needed for sorting after
575 // extracting.
576 std::vector<std::pair<monotonic_clock::time_point, std::string>> unsorted_parts;
Austin Schuh5212cad2020-09-09 23:12:09 -0700577 };
578
Austin Schuh11d43732020-09-21 17:28:30 -0700579 // A list of all the old parts which we don't know how to sort using uuids.
580 // There are enough of these in the wild that this is worth supporting.
581 std::vector<UnsortedOldParts> old_parts;
Austin Schuh5212cad2020-09-09 23:12:09 -0700582
Austin Schuh11d43732020-09-21 17:28:30 -0700583 // Now extract everything into our datastructures above for sorting.
Austin Schuh5212cad2020-09-09 23:12:09 -0700584 for (const std::string &part : parts) {
585 FlatbufferVector<LogFileHeader> log_header = ReadHeader(part);
586
Austin Schuh11d43732020-09-21 17:28:30 -0700587 const monotonic_clock::time_point monotonic_start_time(
588 chrono::nanoseconds(log_header.message().monotonic_start_time()));
589 const realtime_clock::time_point realtime_start_time(
590 chrono::nanoseconds(log_header.message().realtime_start_time()));
591
592 const std::string_view node =
593 log_header.message().has_node()
594 ? log_header.message().node()->name()->string_view()
595 : "";
596
Austin Schuh5212cad2020-09-09 23:12:09 -0700597 // Looks like an old log. No UUID, index, and also single node. We have
598 // little to no multi-node log files in the wild without part UUIDs and
599 // indexes which we care much about.
600 if (!log_header.message().has_parts_uuid() &&
601 !log_header.message().has_parts_index() &&
602 !log_header.message().has_node()) {
Austin Schuh5212cad2020-09-09 23:12:09 -0700603 FlatbufferVector<MessageHeader> first_message = ReadNthMessage(part, 0);
Austin Schuh11d43732020-09-21 17:28:30 -0700604 const monotonic_clock::time_point first_message_time(
Austin Schuh5212cad2020-09-09 23:12:09 -0700605 chrono::nanoseconds(first_message.message().monotonic_sent_time()));
Austin Schuh11d43732020-09-21 17:28:30 -0700606
607 // Find anything with a matching start time. They all go together.
608 auto result = std::find_if(
609 old_parts.begin(), old_parts.end(),
610 [&](const UnsortedOldParts &parts) {
611 return parts.parts.monotonic_start_time == monotonic_start_time &&
612 parts.parts.realtime_start_time == realtime_start_time;
613 });
614
615 if (result == old_parts.end()) {
616 old_parts.emplace_back();
617 old_parts.back().parts.monotonic_start_time = monotonic_start_time;
618 old_parts.back().parts.realtime_start_time = realtime_start_time;
619 old_parts.back().unsorted_parts.emplace_back(
620 std::make_pair(first_message_time, part));
621 } else {
622 result->unsorted_parts.emplace_back(
623 std::make_pair(first_message_time, part));
624 }
Austin Schuh5212cad2020-09-09 23:12:09 -0700625 continue;
626 }
627
Austin Schuh11d43732020-09-21 17:28:30 -0700628 CHECK(log_header.message().has_logger_uuid());
Austin Schuh5212cad2020-09-09 23:12:09 -0700629 CHECK(log_header.message().has_parts_uuid());
630 CHECK(log_header.message().has_parts_index());
631
Austin Schuh11d43732020-09-21 17:28:30 -0700632 const std::string logger_uuid = log_header.message().logger_uuid()->str();
Austin Schuh5212cad2020-09-09 23:12:09 -0700633 const std::string parts_uuid = log_header.message().parts_uuid()->str();
Austin Schuh11d43732020-09-21 17:28:30 -0700634 int32_t parts_index = log_header.message().parts_index();
635
636 auto log_it = parts_list.find(logger_uuid);
637 if (log_it == parts_list.end()) {
638 log_it = parts_list
Austin Schuh5212cad2020-09-09 23:12:09 -0700639 .insert(std::make_pair(
Austin Schuh11d43732020-09-21 17:28:30 -0700640 logger_uuid, std::map<std::string, UnsortedLogParts>()))
Austin Schuh5212cad2020-09-09 23:12:09 -0700641 .first;
642 }
Austin Schuh11d43732020-09-21 17:28:30 -0700643
644 auto it = log_it->second.find(parts_uuid);
645 if (it == log_it->second.end()) {
646 it = log_it->second.insert(std::make_pair(parts_uuid, UnsortedLogParts()))
647 .first;
648 it->second.monotonic_start_time = monotonic_start_time;
649 it->second.realtime_start_time = realtime_start_time;
650 it->second.node = std::string(node);
651 }
652
653 // First part might be min_time. If it is, try to put a better time on it.
654 if (it->second.monotonic_start_time == monotonic_clock::min_time) {
655 it->second.monotonic_start_time = monotonic_start_time;
656 } else if (monotonic_start_time != monotonic_clock::min_time) {
657 CHECK_EQ(it->second.monotonic_start_time, monotonic_start_time);
658 }
659 if (it->second.realtime_start_time == realtime_clock::min_time) {
660 it->second.realtime_start_time = realtime_start_time;
661 } else if (realtime_start_time != realtime_clock::min_time) {
662 CHECK_EQ(it->second.realtime_start_time, realtime_start_time);
663 }
664
665 it->second.parts.emplace_back(std::make_pair(part, parts_index));
Austin Schuh5212cad2020-09-09 23:12:09 -0700666 }
667
668 CHECK_NE(old_parts.empty(), parts_list.empty())
669 << ": Can't have a mix of old and new parts.";
670
Austin Schuh11d43732020-09-21 17:28:30 -0700671 // Now reformat old_parts to be in the right datastructure to report.
Austin Schuh5212cad2020-09-09 23:12:09 -0700672 if (!old_parts.empty()) {
Austin Schuh11d43732020-09-21 17:28:30 -0700673 std::vector<LogFile> result;
674 for (UnsortedOldParts &p : old_parts) {
675 // Sort by the oldest message in each file.
676 std::sort(
677 p.unsorted_parts.begin(), p.unsorted_parts.end(),
678 [](const std::pair<monotonic_clock::time_point, std::string> &a,
679 const std::pair<monotonic_clock::time_point, std::string> &b) {
680 return a.first < b.first;
681 });
682 LogFile log_file;
683 for (std::pair<monotonic_clock::time_point, std::string> &f :
684 p.unsorted_parts) {
685 p.parts.parts.emplace_back(std::move(f.second));
686 }
687 log_file.parts.emplace_back(std::move(p.parts));
688 result.emplace_back(std::move(log_file));
Austin Schuh5212cad2020-09-09 23:12:09 -0700689 }
Austin Schuh5212cad2020-09-09 23:12:09 -0700690
Austin Schuh11d43732020-09-21 17:28:30 -0700691 return result;
Austin Schuh5212cad2020-09-09 23:12:09 -0700692 }
693
694 // Now, sort them and produce the final vector form.
Austin Schuh11d43732020-09-21 17:28:30 -0700695 std::vector<LogFile> result;
Austin Schuh5212cad2020-09-09 23:12:09 -0700696 result.reserve(parts_list.size());
Austin Schuh11d43732020-09-21 17:28:30 -0700697 for (std::pair<const std::string, std::map<std::string, UnsortedLogParts>> &logs : parts_list) {
698 LogFile new_file;
699 new_file.logger_uuid = logs.first;
700 for (std::pair<const std::string, UnsortedLogParts> &parts : logs.second) {
701 LogParts new_parts;
702 new_parts.monotonic_start_time = parts.second.monotonic_start_time;
703 new_parts.realtime_start_time = parts.second.realtime_start_time;
704 new_parts.logger_uuid = logs.first;
705 new_parts.parts_uuid = parts.first;
706 new_parts.node = std::move(parts.second.node);
707
708 std::sort(parts.second.parts.begin(), parts.second.parts.end(),
709 [](const std::pair<std::string, int> &a,
710 const std::pair<std::string, int> &b) {
711 return a.second < b.second;
712 });
713 new_parts.parts.reserve(parts.second.parts.size());
714 for (std::pair<std::string, int> &p : parts.second.parts) {
715 new_parts.parts.emplace_back(std::move(p.first));
716 }
717 new_file.parts.emplace_back(std::move(new_parts));
Austin Schuh5212cad2020-09-09 23:12:09 -0700718 }
Austin Schuh11d43732020-09-21 17:28:30 -0700719 result.emplace_back(std::move(new_file));
720 }
721 return result;
722}
723
724std::ostream &operator<<(std::ostream &stream, const LogFile &file) {
725 stream << "{";
726 if (!file.logger_uuid.empty()) {
727 stream << "\"logger_uuid\": \"" << file.logger_uuid << "\", ";
728 }
729 stream << "\"parts\": [";
730 for (size_t i = 0; i < file.parts.size(); ++i) {
731 if (i != 0u) {
732 stream << ", ";
733 }
734 stream << file.parts[i];
735 }
736 stream << "]}";
737 return stream;
738}
739std::ostream &operator<<(std::ostream &stream, const LogParts &parts) {
740 stream << "{";
741 if (!parts.logger_uuid.empty()) {
742 stream << "\"logger_uuid\": \"" << parts.logger_uuid << "\", ";
743 }
744 if (!parts.parts_uuid.empty()) {
745 stream << "\"parts_uuid\": \"" << parts.parts_uuid << "\", ";
746 }
747 if (!parts.node.empty()) {
748 stream << "\"node\": \"" << parts.node << "\", ";
749 }
750 stream << "\"monotonic_start_time\": " << parts.monotonic_start_time
751 << ", \"realtime_start_time\": " << parts.realtime_start_time << ", [";
752
753 for (size_t i = 0; i < parts.parts.size(); ++i) {
754 if (i != 0u) {
755 stream << ", ";
756 }
757 stream << parts.parts[i];
758 }
759
760 stream << "]}";
761 return stream;
762}
763
764std::vector<std::vector<std::string>> ToLogReaderVector(
765 const std::vector<LogFile> &log_files) {
766 std::vector<std::vector<std::string>> result;
767 for (const LogFile &log_file : log_files) {
768 for (const LogParts &log_parts : log_file.parts) {
769 std::vector<std::string> parts;
770 for (const std::string &part : log_parts.parts) {
771 parts.emplace_back(part);
772 }
773 result.emplace_back(std::move(parts));
774 }
Austin Schuh5212cad2020-09-09 23:12:09 -0700775 }
776 return result;
777}
778
James Kuszmaulc7bbb3e2020-01-03 20:01:00 -0800779LogReader::LogReader(std::string_view filename,
780 const Configuration *replay_configuration)
Austin Schuhfa895892020-01-07 20:07:41 -0800781 : LogReader(std::vector<std::string>{std::string(filename)},
782 replay_configuration) {}
783
784LogReader::LogReader(const std::vector<std::string> &filenames,
785 const Configuration *replay_configuration)
Austin Schuh6f3babe2020-01-26 20:34:50 -0800786 : LogReader(std::vector<std::vector<std::string>>{filenames},
787 replay_configuration) {}
788
Austin Schuh11d43732020-09-21 17:28:30 -0700789// TODO(austin): Make this the base and kill the others. This has much better
790// context for sorting.
791LogReader::LogReader(const std::vector<LogFile> &log_files,
792 const Configuration *replay_configuration)
793 : LogReader(ToLogReaderVector(log_files), replay_configuration) {}
794
Austin Schuh6f3babe2020-01-26 20:34:50 -0800795LogReader::LogReader(const std::vector<std::vector<std::string>> &filenames,
796 const Configuration *replay_configuration)
797 : filenames_(filenames),
798 log_file_header_(ReadHeader(filenames[0][0])),
James Kuszmaulc7bbb3e2020-01-03 20:01:00 -0800799 replay_configuration_(replay_configuration) {
Austin Schuh6331ef92020-01-07 18:28:09 -0800800 MakeRemappedConfig();
Austin Schuh6f3babe2020-01-26 20:34:50 -0800801
Austin Schuh6aa77be2020-02-22 21:06:40 -0800802 if (replay_configuration) {
803 CHECK_EQ(configuration::MultiNode(configuration()),
804 configuration::MultiNode(replay_configuration))
Austin Schuh2f8fd752020-09-01 22:38:28 -0700805 << ": Log file and replay config need to both be multi or single "
806 "node.";
Austin Schuh6aa77be2020-02-22 21:06:40 -0800807 }
808
Austin Schuh6f3babe2020-01-26 20:34:50 -0800809 if (!configuration::MultiNode(configuration())) {
Austin Schuh858c9f32020-08-31 16:56:12 -0700810 states_.emplace_back(
811 std::make_unique<State>(std::make_unique<ChannelMerger>(filenames)));
Austin Schuh8bd96322020-02-13 21:18:22 -0800812 } else {
Austin Schuh6aa77be2020-02-22 21:06:40 -0800813 if (replay_configuration) {
James Kuszmaul46d82582020-05-09 19:50:09 -0700814 CHECK_EQ(logged_configuration()->nodes()->size(),
Austin Schuh6aa77be2020-02-22 21:06:40 -0800815 replay_configuration->nodes()->size())
Austin Schuh2f8fd752020-09-01 22:38:28 -0700816 << ": Log file and replay config need to have matching nodes "
817 "lists.";
James Kuszmaul46d82582020-05-09 19:50:09 -0700818 for (const Node *node : *logged_configuration()->nodes()) {
819 if (configuration::GetNode(replay_configuration, node) == nullptr) {
Austin Schuh2f8fd752020-09-01 22:38:28 -0700820 LOG(FATAL) << "Found node " << FlatbufferToJson(node)
821 << " in logged config that is not present in the replay "
822 "config.";
James Kuszmaul46d82582020-05-09 19:50:09 -0700823 }
824 }
Austin Schuh6aa77be2020-02-22 21:06:40 -0800825 }
Austin Schuh8bd96322020-02-13 21:18:22 -0800826 states_.resize(configuration()->nodes()->size());
Austin Schuh6f3babe2020-01-26 20:34:50 -0800827 }
Austin Schuhe309d2a2019-11-29 13:25:21 -0800828}
829
Austin Schuh6aa77be2020-02-22 21:06:40 -0800830LogReader::~LogReader() {
Austin Schuh39580f12020-08-01 14:44:08 -0700831 if (event_loop_factory_unique_ptr_) {
832 Deregister();
833 } else if (event_loop_factory_ != nullptr) {
834 LOG(FATAL) << "Must call Deregister before the SimulatedEventLoopFactory "
835 "is destroyed";
836 }
Austin Schuh8bd96322020-02-13 21:18:22 -0800837 if (offset_fp_ != nullptr) {
838 fclose(offset_fp_);
839 }
Austin Schuh2f8fd752020-09-01 22:38:28 -0700840 // Zero out some buffers. It's easy to do use-after-frees on these, so make
841 // it more obvious.
Austin Schuh39580f12020-08-01 14:44:08 -0700842 if (remapped_configuration_buffer_) {
843 remapped_configuration_buffer_->Wipe();
844 }
845 log_file_header_.Wipe();
Austin Schuh8bd96322020-02-13 21:18:22 -0800846}
Austin Schuhe309d2a2019-11-29 13:25:21 -0800847
James Kuszmaulc7bbb3e2020-01-03 20:01:00 -0800848const Configuration *LogReader::logged_configuration() const {
Austin Schuh6f3babe2020-01-26 20:34:50 -0800849 return log_file_header_.message().configuration();
Austin Schuhe309d2a2019-11-29 13:25:21 -0800850}
851
James Kuszmaulc7bbb3e2020-01-03 20:01:00 -0800852const Configuration *LogReader::configuration() const {
James Kuszmaulc7bbb3e2020-01-03 20:01:00 -0800853 return remapped_configuration_;
854}
855
Austin Schuh6f3babe2020-01-26 20:34:50 -0800856std::vector<const Node *> LogReader::Nodes() const {
Austin Schuh2f8fd752020-09-01 22:38:28 -0700857 // Because the Node pointer will only be valid if it actually points to
858 // memory owned by remapped_configuration_, we need to wait for the
James Kuszmaulc7bbb3e2020-01-03 20:01:00 -0800859 // remapped_configuration_ to be populated before accessing it.
Austin Schuh6f3babe2020-01-26 20:34:50 -0800860 //
861 // Also, note, that when ever a map is changed, the nodes in here are
862 // invalidated.
James Kuszmaulc7bbb3e2020-01-03 20:01:00 -0800863 CHECK(remapped_configuration_ != nullptr)
864 << ": Need to call Register before the node() pointer will be valid.";
Austin Schuh6f3babe2020-01-26 20:34:50 -0800865 return configuration::GetNodes(remapped_configuration_);
James Kuszmaulc7bbb3e2020-01-03 20:01:00 -0800866}
Austin Schuh15649d62019-12-28 16:36:38 -0800867
Austin Schuh11d43732020-09-21 17:28:30 -0700868monotonic_clock::time_point LogReader::monotonic_start_time(
869 const Node *node) const {
Austin Schuh8bd96322020-02-13 21:18:22 -0800870 State *state =
871 states_[configuration::GetNodeIndex(configuration(), node)].get();
872 CHECK(state != nullptr) << ": Unknown node " << FlatbufferToJson(node);
873
Austin Schuh858c9f32020-08-31 16:56:12 -0700874 return state->monotonic_start_time();
Austin Schuhe309d2a2019-11-29 13:25:21 -0800875}
876
Austin Schuh11d43732020-09-21 17:28:30 -0700877realtime_clock::time_point LogReader::realtime_start_time(
878 const Node *node) const {
Austin Schuh8bd96322020-02-13 21:18:22 -0800879 State *state =
880 states_[configuration::GetNodeIndex(configuration(), node)].get();
881 CHECK(state != nullptr) << ": Unknown node " << FlatbufferToJson(node);
882
Austin Schuh858c9f32020-08-31 16:56:12 -0700883 return state->realtime_start_time();
Austin Schuhe309d2a2019-11-29 13:25:21 -0800884}
885
James Kuszmaul84ff3e52020-01-03 19:48:53 -0800886void LogReader::Register() {
887 event_loop_factory_unique_ptr_ =
Austin Schuhac0771c2020-01-07 18:36:30 -0800888 std::make_unique<SimulatedEventLoopFactory>(configuration());
James Kuszmaul84ff3e52020-01-03 19:48:53 -0800889 Register(event_loop_factory_unique_ptr_.get());
890}
891
Austin Schuh92547522019-12-28 14:33:43 -0800892void LogReader::Register(SimulatedEventLoopFactory *event_loop_factory) {
Austin Schuh92547522019-12-28 14:33:43 -0800893 event_loop_factory_ = event_loop_factory;
Austin Schuhe5bbd9e2020-09-21 17:29:20 -0700894 remapped_configuration_ = event_loop_factory_->configuration();
Austin Schuh92547522019-12-28 14:33:43 -0800895
Austin Schuhe5bbd9e2020-09-21 17:29:20 -0700896 for (const Node *node :
897 configuration::GetNodes(configuration())) {
Austin Schuh8bd96322020-02-13 21:18:22 -0800898 const size_t node_index =
899 configuration::GetNodeIndex(configuration(), node);
Austin Schuh858c9f32020-08-31 16:56:12 -0700900 states_[node_index] =
901 std::make_unique<State>(std::make_unique<ChannelMerger>(filenames_));
Austin Schuh8bd96322020-02-13 21:18:22 -0800902 State *state = states_[node_index].get();
Austin Schuh6f3babe2020-01-26 20:34:50 -0800903
Austin Schuh858c9f32020-08-31 16:56:12 -0700904 Register(state->SetNodeEventLoopFactory(
905 event_loop_factory_->GetNodeEventLoopFactory(node)));
Austin Schuhcde938c2020-02-02 17:30:07 -0800906 }
James Kuszmaul46d82582020-05-09 19:50:09 -0700907 if (live_nodes_ == 0) {
908 LOG(FATAL)
909 << "Don't have logs from any of the nodes in the replay config--are "
910 "you sure that the replay config matches the original config?";
911 }
Austin Schuh6f3babe2020-01-26 20:34:50 -0800912
Austin Schuh2f8fd752020-09-01 22:38:28 -0700913 // We need to now seed our per-node time offsets and get everything set up
914 // to run.
915 const size_t num_nodes = nodes_count();
Austin Schuhcde938c2020-02-02 17:30:07 -0800916
Austin Schuh8bd96322020-02-13 21:18:22 -0800917 // It is easiest to solve for per node offsets with a matrix rather than
918 // trying to solve the equations by hand. So let's get after it.
919 //
920 // Now, build up the map matrix.
921 //
Austin Schuh2f8fd752020-09-01 22:38:28 -0700922 // offset_matrix_ = (map_matrix_ + slope_matrix_) * [ta; tb; tc]
923 map_matrix_ = Eigen::Matrix<mpq_class, Eigen::Dynamic, Eigen::Dynamic>::Zero(
924 filters_.size() + 1, num_nodes);
925 slope_matrix_ =
926 Eigen::Matrix<mpq_class, Eigen::Dynamic, Eigen::Dynamic>::Zero(
927 filters_.size() + 1, num_nodes);
Austin Schuhcde938c2020-02-02 17:30:07 -0800928
Austin Schuh2f8fd752020-09-01 22:38:28 -0700929 offset_matrix_ =
930 Eigen::Matrix<mpq_class, Eigen::Dynamic, 1>::Zero(filters_.size() + 1);
931 valid_matrix_ =
932 Eigen::Matrix<bool, Eigen::Dynamic, 1>::Zero(filters_.size() + 1);
933 last_valid_matrix_ =
934 Eigen::Matrix<bool, Eigen::Dynamic, 1>::Zero(filters_.size() + 1);
Austin Schuhcde938c2020-02-02 17:30:07 -0800935
Austin Schuh2f8fd752020-09-01 22:38:28 -0700936 time_offset_matrix_ = Eigen::VectorXd::Zero(num_nodes);
937 time_slope_matrix_ = Eigen::VectorXd::Zero(num_nodes);
Austin Schuh8bd96322020-02-13 21:18:22 -0800938
Austin Schuh2f8fd752020-09-01 22:38:28 -0700939 // All times should average out to the distributed clock.
940 for (int i = 0; i < map_matrix_.cols(); ++i) {
941 // 1/num_nodes.
942 map_matrix_(0, i) = mpq_class(1, num_nodes);
943 }
944 valid_matrix_(0) = true;
Austin Schuh8bd96322020-02-13 21:18:22 -0800945
946 {
947 // Now, add the a - b -> sample elements.
948 size_t i = 1;
949 for (std::pair<const std::tuple<const Node *, const Node *>,
Austin Schuh2f8fd752020-09-01 22:38:28 -0700950 std::tuple<message_bridge::NoncausalOffsetEstimator>>
951 &filter : filters_) {
Austin Schuh8bd96322020-02-13 21:18:22 -0800952 const Node *const node_a = std::get<0>(filter.first);
953 const Node *const node_b = std::get<1>(filter.first);
954
955 const size_t node_a_index =
956 configuration::GetNodeIndex(configuration(), node_a);
957 const size_t node_b_index =
958 configuration::GetNodeIndex(configuration(), node_b);
959
Austin Schuh2f8fd752020-09-01 22:38:28 -0700960 // -a
961 map_matrix_(i, node_a_index) = mpq_class(-1);
962 // +b
963 map_matrix_(i, node_b_index) = mpq_class(1);
Austin Schuh8bd96322020-02-13 21:18:22 -0800964
965 // -> sample
Austin Schuh2f8fd752020-09-01 22:38:28 -0700966 std::get<0>(filter.second)
967 .set_slope_pointer(&slope_matrix_(i, node_a_index));
968 std::get<0>(filter.second).set_offset_pointer(&offset_matrix_(i, 0));
969
970 valid_matrix_(i) = false;
971 std::get<0>(filter.second).set_valid_pointer(&valid_matrix_(i));
Austin Schuh8bd96322020-02-13 21:18:22 -0800972
973 ++i;
Austin Schuh6f3babe2020-01-26 20:34:50 -0800974 }
975 }
976
Austin Schuh858c9f32020-08-31 16:56:12 -0700977 for (std::unique_ptr<State> &state : states_) {
978 state->SeedSortedMessages();
979 }
980
Austin Schuh2f8fd752020-09-01 22:38:28 -0700981 // Rank of the map matrix tells you if all the nodes are in communication
982 // with each other, which tells you if the offsets are observable.
983 const size_t connected_nodes =
984 Eigen::FullPivLU<
985 Eigen::Matrix<mpq_class, Eigen::Dynamic, Eigen::Dynamic>>(map_matrix_)
986 .rank();
987
988 // We don't need to support isolated nodes until someone has a real use
989 // case.
990 CHECK_EQ(connected_nodes, num_nodes)
991 << ": There is a node which isn't communicating with the rest.";
992
993 // And solve.
Austin Schuh8bd96322020-02-13 21:18:22 -0800994 UpdateOffsets();
995
Austin Schuh2f8fd752020-09-01 22:38:28 -0700996 // We want to start the log file at the last start time of the log files
997 // from all the nodes. Compute how long each node's simulation needs to run
998 // to move time to this point.
Austin Schuh8bd96322020-02-13 21:18:22 -0800999 distributed_clock::time_point start_time = distributed_clock::min_time;
Austin Schuhcde938c2020-02-02 17:30:07 -08001000
Austin Schuh2f8fd752020-09-01 22:38:28 -07001001 // TODO(austin): We want an "OnStart" callback for each node rather than
1002 // running until the last node.
1003
Austin Schuh8bd96322020-02-13 21:18:22 -08001004 for (std::unique_ptr<State> &state : states_) {
Austin Schuh2f8fd752020-09-01 22:38:28 -07001005 VLOG(1) << "Start time is " << state->monotonic_start_time() << " for node "
1006 << MaybeNodeName(state->event_loop()->node()) << "now "
1007 << state->monotonic_now();
1008 // And start computing the start time on the distributed clock now that
1009 // that works.
Austin Schuh858c9f32020-08-31 16:56:12 -07001010 start_time = std::max(
1011 start_time, state->ToDistributedClock(state->monotonic_start_time()));
Austin Schuhcde938c2020-02-02 17:30:07 -08001012 }
Austin Schuh2f8fd752020-09-01 22:38:28 -07001013
1014 CHECK_GE(start_time, distributed_clock::epoch())
1015 << ": Hmm, we have a node starting before the start of time. Offset "
1016 "everything.";
Austin Schuhcde938c2020-02-02 17:30:07 -08001017
Austin Schuh6f3babe2020-01-26 20:34:50 -08001018 // Forwarding is tracked per channel. If it is enabled, we want to turn it
1019 // off. Otherwise messages replayed will get forwarded across to the other
Austin Schuh2f8fd752020-09-01 22:38:28 -07001020 // nodes, and also replayed on the other nodes. This may not satisfy all
1021 // our users, but it'll start the discussion.
Austin Schuh6f3babe2020-01-26 20:34:50 -08001022 if (configuration::MultiNode(event_loop_factory_->configuration())) {
1023 for (size_t i = 0; i < logged_configuration()->channels()->size(); ++i) {
1024 const Channel *channel = logged_configuration()->channels()->Get(i);
1025 const Node *node = configuration::GetNode(
1026 configuration(), channel->source_node()->string_view());
1027
Austin Schuh8bd96322020-02-13 21:18:22 -08001028 State *state =
1029 states_[configuration::GetNodeIndex(configuration(), node)].get();
Austin Schuh6f3babe2020-01-26 20:34:50 -08001030
1031 const Channel *remapped_channel =
Austin Schuh858c9f32020-08-31 16:56:12 -07001032 RemapChannel(state->event_loop(), channel);
Austin Schuh6f3babe2020-01-26 20:34:50 -08001033
1034 event_loop_factory_->DisableForwarding(remapped_channel);
1035 }
Austin Schuh4c3b9702020-08-30 11:34:55 -07001036
1037 // If we are replaying a log, we don't want a bunch of redundant messages
1038 // from both the real message bridge and simulated message bridge.
1039 event_loop_factory_->DisableStatistics();
Austin Schuh6f3babe2020-01-26 20:34:50 -08001040 }
1041
Austin Schuhcde938c2020-02-02 17:30:07 -08001042 // While we are starting the system up, we might be relying on matching data
1043 // to timestamps on log files where the timestamp log file starts before the
1044 // data. In this case, it is reasonable to expect missing data.
1045 ignore_missing_data_ = true;
Austin Schuh2f8fd752020-09-01 22:38:28 -07001046 VLOG(1) << "Running until " << start_time << " in Register";
Austin Schuh8bd96322020-02-13 21:18:22 -08001047 event_loop_factory_->RunFor(start_time.time_since_epoch());
Brian Silverman8a32ce62020-08-12 12:02:38 -07001048 VLOG(1) << "At start time";
Austin Schuhcde938c2020-02-02 17:30:07 -08001049 // Now that we are running for real, missing data means that the log file is
1050 // corrupted or went wrong.
1051 ignore_missing_data_ = false;
Austin Schuh92547522019-12-28 14:33:43 -08001052
Austin Schuh8bd96322020-02-13 21:18:22 -08001053 for (std::unique_ptr<State> &state : states_) {
Austin Schuh2f8fd752020-09-01 22:38:28 -07001054 // Make the RT clock be correct before handing it to the user.
1055 if (state->realtime_start_time() != realtime_clock::min_time) {
1056 state->SetRealtimeOffset(state->monotonic_start_time(),
1057 state->realtime_start_time());
1058 }
1059 VLOG(1) << "Start time is " << state->monotonic_start_time() << " for node "
1060 << MaybeNodeName(state->event_loop()->node()) << "now "
1061 << state->monotonic_now();
1062 }
1063
1064 if (FLAGS_timestamps_to_csv) {
1065 for (std::pair<const std::tuple<const Node *, const Node *>,
1066 std::tuple<message_bridge::NoncausalOffsetEstimator>>
1067 &filter : filters_) {
1068 const Node *const node_a = std::get<0>(filter.first);
1069 const Node *const node_b = std::get<1>(filter.first);
1070
1071 std::get<0>(filter.second)
1072 .SetFirstFwdTime(event_loop_factory_->GetNodeEventLoopFactory(node_a)
1073 ->monotonic_now());
1074 std::get<0>(filter.second)
1075 .SetFirstRevTime(event_loop_factory_->GetNodeEventLoopFactory(node_b)
1076 ->monotonic_now());
1077 }
Austin Schuh8bd96322020-02-13 21:18:22 -08001078 }
1079}
1080
Austin Schuh2f8fd752020-09-01 22:38:28 -07001081void LogReader::UpdateOffsets() {
1082 VLOG(2) << "Samples are " << offset_matrix_;
1083 VLOG(2) << "Map is " << (map_matrix_ + slope_matrix_);
1084 std::tie(time_slope_matrix_, time_offset_matrix_) = SolveOffsets();
1085 Eigen::IOFormat HeavyFmt(Eigen::FullPrecision, 0, ", ", ";\n", "[", "]", "[",
1086 "]");
1087 VLOG(1) << "First slope " << time_slope_matrix_.transpose().format(HeavyFmt)
1088 << " offset " << time_offset_matrix_.transpose().format(HeavyFmt);
1089
1090 size_t node_index = 0;
1091 for (std::unique_ptr<State> &state : states_) {
1092 state->SetDistributedOffset(offset(node_index), slope(node_index));
1093 VLOG(1) << "Offset for node " << node_index << " "
1094 << MaybeNodeName(state->event_loop()->node()) << "is "
1095 << aos::distributed_clock::time_point(offset(node_index))
1096 << " slope " << std::setprecision(9) << std::fixed
1097 << slope(node_index);
1098 ++node_index;
1099 }
1100
1101 if (VLOG_IS_ON(1)) {
1102 LogFit("Offset is");
1103 }
1104}
1105
1106void LogReader::LogFit(std::string_view prefix) {
1107 for (std::unique_ptr<State> &state : states_) {
1108 VLOG(1) << MaybeNodeName(state->event_loop()->node()) << " now "
1109 << state->monotonic_now() << " distributed "
1110 << event_loop_factory_->distributed_now();
1111 }
1112
1113 for (std::pair<const std::tuple<const Node *, const Node *>,
1114 std::tuple<message_bridge::NoncausalOffsetEstimator>> &filter :
1115 filters_) {
1116 message_bridge::NoncausalOffsetEstimator *estimator =
1117 &std::get<0>(filter.second);
1118
1119 if (estimator->a_timestamps().size() == 0 &&
1120 estimator->b_timestamps().size() == 0) {
1121 continue;
1122 }
1123
1124 if (VLOG_IS_ON(1)) {
1125 estimator->LogFit(prefix);
1126 }
1127
1128 const Node *const node_a = std::get<0>(filter.first);
1129 const Node *const node_b = std::get<1>(filter.first);
1130
1131 const size_t node_a_index =
1132 configuration::GetNodeIndex(configuration(), node_a);
1133 const size_t node_b_index =
1134 configuration::GetNodeIndex(configuration(), node_b);
1135
1136 const double recovered_slope =
1137 slope(node_b_index) / slope(node_a_index) - 1.0;
1138 const int64_t recovered_offset =
1139 offset(node_b_index).count() - offset(node_a_index).count() *
1140 slope(node_b_index) /
1141 slope(node_a_index);
1142
1143 VLOG(1) << "Recovered slope " << std::setprecision(20) << recovered_slope
1144 << " (error " << recovered_slope - estimator->fit().slope() << ") "
1145 << " offset " << std::setprecision(20) << recovered_offset
1146 << " (error "
1147 << recovered_offset - estimator->fit().offset().count() << ")";
1148
1149 const aos::distributed_clock::time_point a0 =
1150 states_[node_a_index]->ToDistributedClock(
1151 std::get<0>(estimator->a_timestamps()[0]));
1152 const aos::distributed_clock::time_point a1 =
1153 states_[node_a_index]->ToDistributedClock(
1154 std::get<0>(estimator->a_timestamps()[1]));
1155
1156 VLOG(1) << node_a->name()->string_view() << " timestamps()[0] = "
1157 << std::get<0>(estimator->a_timestamps()[0]) << " -> " << a0
1158 << " distributed -> " << node_b->name()->string_view() << " "
1159 << states_[node_b_index]->FromDistributedClock(a0) << " should be "
1160 << aos::monotonic_clock::time_point(
1161 std::chrono::nanoseconds(static_cast<int64_t>(
1162 std::get<0>(estimator->a_timestamps()[0])
1163 .time_since_epoch()
1164 .count() *
1165 (1.0 + estimator->fit().slope()))) +
1166 estimator->fit().offset())
1167 << ((a0 <= event_loop_factory_->distributed_now())
1168 ? ""
1169 : " After now, investigate");
1170 VLOG(1) << node_a->name()->string_view() << " timestamps()[1] = "
1171 << std::get<0>(estimator->a_timestamps()[1]) << " -> " << a1
1172 << " distributed -> " << node_b->name()->string_view() << " "
1173 << states_[node_b_index]->FromDistributedClock(a1) << " should be "
1174 << aos::monotonic_clock::time_point(
1175 std::chrono::nanoseconds(static_cast<int64_t>(
1176 std::get<0>(estimator->a_timestamps()[1])
1177 .time_since_epoch()
1178 .count() *
1179 (1.0 + estimator->fit().slope()))) +
1180 estimator->fit().offset())
1181 << ((event_loop_factory_->distributed_now() <= a1)
1182 ? ""
1183 : " Before now, investigate");
1184
1185 const aos::distributed_clock::time_point b0 =
1186 states_[node_b_index]->ToDistributedClock(
1187 std::get<0>(estimator->b_timestamps()[0]));
1188 const aos::distributed_clock::time_point b1 =
1189 states_[node_b_index]->ToDistributedClock(
1190 std::get<0>(estimator->b_timestamps()[1]));
1191
1192 VLOG(1) << node_b->name()->string_view() << " timestamps()[0] = "
1193 << std::get<0>(estimator->b_timestamps()[0]) << " -> " << b0
1194 << " distributed -> " << node_a->name()->string_view() << " "
1195 << states_[node_a_index]->FromDistributedClock(b0)
1196 << ((b0 <= event_loop_factory_->distributed_now())
1197 ? ""
1198 : " After now, investigate");
1199 VLOG(1) << node_b->name()->string_view() << " timestamps()[1] = "
1200 << std::get<0>(estimator->b_timestamps()[1]) << " -> " << b1
1201 << " distributed -> " << node_a->name()->string_view() << " "
1202 << states_[node_a_index]->FromDistributedClock(b1)
1203 << ((event_loop_factory_->distributed_now() <= b1)
1204 ? ""
1205 : " Before now, investigate");
1206 }
1207}
1208
1209message_bridge::NoncausalOffsetEstimator *LogReader::GetFilter(
Austin Schuh8bd96322020-02-13 21:18:22 -08001210 const Node *node_a, const Node *node_b) {
1211 CHECK_NE(node_a, node_b);
1212 CHECK_EQ(configuration::GetNode(configuration(), node_a), node_a);
1213 CHECK_EQ(configuration::GetNode(configuration(), node_b), node_b);
1214
1215 if (node_a > node_b) {
Austin Schuh2f8fd752020-09-01 22:38:28 -07001216 return GetFilter(node_b, node_a);
Austin Schuh8bd96322020-02-13 21:18:22 -08001217 }
1218
1219 auto tuple = std::make_tuple(node_a, node_b);
1220
1221 auto it = filters_.find(tuple);
1222
1223 if (it == filters_.end()) {
Austin Schuh2f8fd752020-09-01 22:38:28 -07001224 auto &x =
1225 filters_
1226 .insert(std::make_pair(
1227 tuple, std::make_tuple(message_bridge::NoncausalOffsetEstimator(
1228 node_a, node_b))))
1229 .first->second;
Austin Schuh8bd96322020-02-13 21:18:22 -08001230 if (FLAGS_timestamps_to_csv) {
Austin Schuh2f8fd752020-09-01 22:38:28 -07001231 std::get<0>(x).SetFwdCsvFileName(absl::StrCat(
1232 "/tmp/timestamp_noncausal_", node_a->name()->string_view(), "_",
1233 node_b->name()->string_view()));
1234 std::get<0>(x).SetRevCsvFileName(absl::StrCat(
1235 "/tmp/timestamp_noncausal_", node_b->name()->string_view(), "_",
1236 node_a->name()->string_view()));
Austin Schuh8bd96322020-02-13 21:18:22 -08001237 }
1238
Austin Schuh2f8fd752020-09-01 22:38:28 -07001239 return &std::get<0>(x);
Austin Schuh8bd96322020-02-13 21:18:22 -08001240 } else {
Austin Schuh2f8fd752020-09-01 22:38:28 -07001241 return &std::get<0>(it->second);
Austin Schuh8bd96322020-02-13 21:18:22 -08001242 }
1243}
1244
Austin Schuh8bd96322020-02-13 21:18:22 -08001245
Austin Schuhe309d2a2019-11-29 13:25:21 -08001246void LogReader::Register(EventLoop *event_loop) {
Austin Schuh8bd96322020-02-13 21:18:22 -08001247 State *state =
1248 states_[configuration::GetNodeIndex(configuration(), event_loop->node())]
1249 .get();
Austin Schuh6f3babe2020-01-26 20:34:50 -08001250
Austin Schuh858c9f32020-08-31 16:56:12 -07001251 state->set_event_loop(event_loop);
Austin Schuhe309d2a2019-11-29 13:25:21 -08001252
Tyler Chatow67ddb032020-01-12 14:30:04 -08001253 // We don't run timing reports when trying to print out logged data, because
1254 // otherwise we would end up printing out the timing reports themselves...
1255 // This is only really relevant when we are replaying into a simulation.
Austin Schuh6f3babe2020-01-26 20:34:50 -08001256 event_loop->SkipTimingReport();
1257 event_loop->SkipAosLog();
Austin Schuh39788ff2019-12-01 18:22:57 -08001258
Austin Schuh858c9f32020-08-31 16:56:12 -07001259 const bool has_data = state->SetNode();
Austin Schuhe309d2a2019-11-29 13:25:21 -08001260
Austin Schuh858c9f32020-08-31 16:56:12 -07001261 state->SetChannelCount(logged_configuration()->channels()->size());
Austin Schuh8bd96322020-02-13 21:18:22 -08001262
Austin Schuh858c9f32020-08-31 16:56:12 -07001263 for (size_t i = 0; i < logged_configuration()->channels()->size(); ++i) {
Austin Schuh6f3babe2020-01-26 20:34:50 -08001264 const Channel *channel =
1265 RemapChannel(event_loop, logged_configuration()->channels()->Get(i));
Austin Schuh6331ef92020-01-07 18:28:09 -08001266
Austin Schuh858c9f32020-08-31 16:56:12 -07001267 NodeEventLoopFactory *channel_target_event_loop_factory = nullptr;
Austin Schuh2f8fd752020-09-01 22:38:28 -07001268 message_bridge::NoncausalOffsetEstimator *filter = nullptr;
Austin Schuh8bd96322020-02-13 21:18:22 -08001269
1270 if (!configuration::ChannelIsSendableOnNode(channel, event_loop->node()) &&
1271 configuration::ChannelIsReadableOnNode(channel, event_loop->node())) {
1272 const Node *target_node = configuration::GetNode(
1273 event_loop->configuration(), channel->source_node()->string_view());
Austin Schuh858c9f32020-08-31 16:56:12 -07001274 filter = GetFilter(event_loop->node(), target_node);
Austin Schuh8bd96322020-02-13 21:18:22 -08001275
1276 if (event_loop_factory_ != nullptr) {
Austin Schuh858c9f32020-08-31 16:56:12 -07001277 channel_target_event_loop_factory =
Austin Schuh8bd96322020-02-13 21:18:22 -08001278 event_loop_factory_->GetNodeEventLoopFactory(target_node);
1279 }
1280 }
Austin Schuh858c9f32020-08-31 16:56:12 -07001281
1282 state->SetChannel(i, event_loop->MakeRawSender(channel), filter,
1283 channel_target_event_loop_factory);
Austin Schuhe309d2a2019-11-29 13:25:21 -08001284 }
1285
Austin Schuh6aa77be2020-02-22 21:06:40 -08001286 // If we didn't find any log files with data in them, we won't ever get a
1287 // callback or be live. So skip the rest of the setup.
1288 if (!has_data) {
1289 return;
1290 }
1291
Austin Schuh858c9f32020-08-31 16:56:12 -07001292 state->set_timer_handler(event_loop->AddTimer([this, state]() {
Austin Schuh2f8fd752020-09-01 22:38:28 -07001293 VLOG(1) << "Starting sending " << MaybeNodeName(state->event_loop()->node())
1294 << "at " << state->event_loop()->context().monotonic_event_time
1295 << " now " << state->monotonic_now();
Austin Schuh858c9f32020-08-31 16:56:12 -07001296 if (state->OldestMessageTime() == monotonic_clock::max_time) {
Austin Schuh6f3babe2020-01-26 20:34:50 -08001297 --live_nodes_;
Austin Schuh2f8fd752020-09-01 22:38:28 -07001298 VLOG(1) << MaybeNodeName(state->event_loop()->node()) << "Node down!";
Austin Schuh6f3babe2020-01-26 20:34:50 -08001299 if (live_nodes_ == 0) {
1300 event_loop_factory_->Exit();
1301 }
James Kuszmaul314f1672020-01-03 20:02:08 -08001302 return;
1303 }
Austin Schuh6f3babe2020-01-26 20:34:50 -08001304 TimestampMerger::DeliveryTimestamp channel_timestamp;
Austin Schuh05b70472020-01-01 17:11:17 -08001305 int channel_index;
1306 FlatbufferVector<MessageHeader> channel_data =
1307 FlatbufferVector<MessageHeader>::Empty();
1308
Austin Schuh2f8fd752020-09-01 22:38:28 -07001309 if (VLOG_IS_ON(1)) {
1310 LogFit("Offset was");
1311 }
1312
1313 bool update_time;
Austin Schuh05b70472020-01-01 17:11:17 -08001314 std::tie(channel_timestamp, channel_index, channel_data) =
Austin Schuh2f8fd752020-09-01 22:38:28 -07001315 state->PopOldest(&update_time);
Austin Schuh05b70472020-01-01 17:11:17 -08001316
Austin Schuhe309d2a2019-11-29 13:25:21 -08001317 const monotonic_clock::time_point monotonic_now =
Austin Schuh858c9f32020-08-31 16:56:12 -07001318 state->event_loop()->context().monotonic_event_time;
Austin Schuh2f8fd752020-09-01 22:38:28 -07001319 if (!FLAGS_skip_order_validation) {
1320 CHECK(monotonic_now == channel_timestamp.monotonic_event_time)
1321 << ": " << FlatbufferToJson(state->event_loop()->node()) << " Now "
1322 << monotonic_now << " trying to send "
1323 << channel_timestamp.monotonic_event_time << " failure "
1324 << state->DebugString();
1325 } else if (monotonic_now != channel_timestamp.monotonic_event_time) {
1326 LOG(WARNING) << "Check failed: monotonic_now == "
1327 "channel_timestamp.monotonic_event_time) ("
1328 << monotonic_now << " vs. "
1329 << channel_timestamp.monotonic_event_time
1330 << "): " << FlatbufferToJson(state->event_loop()->node())
1331 << " Now " << monotonic_now << " trying to send "
1332 << channel_timestamp.monotonic_event_time << " failure "
1333 << state->DebugString();
1334 }
Austin Schuhe309d2a2019-11-29 13:25:21 -08001335
Austin Schuh6f3babe2020-01-26 20:34:50 -08001336 if (channel_timestamp.monotonic_event_time >
Austin Schuh858c9f32020-08-31 16:56:12 -07001337 state->monotonic_start_time() ||
Austin Schuh15649d62019-12-28 16:36:38 -08001338 event_loop_factory_ != nullptr) {
Austin Schuh8bd96322020-02-13 21:18:22 -08001339 if ((!ignore_missing_data_ && !FLAGS_skip_missing_forwarding_entries &&
Austin Schuh858c9f32020-08-31 16:56:12 -07001340 !state->at_end()) ||
Austin Schuh05b70472020-01-01 17:11:17 -08001341 channel_data.message().data() != nullptr) {
1342 CHECK(channel_data.message().data() != nullptr)
1343 << ": Got a message without data. Forwarding entry which was "
Austin Schuh2f8fd752020-09-01 22:38:28 -07001344 "not matched? Use --skip_missing_forwarding_entries to "
Brian Silverman87ac0402020-09-17 14:47:01 -07001345 "ignore this.";
Austin Schuh92547522019-12-28 14:33:43 -08001346
Austin Schuh2f8fd752020-09-01 22:38:28 -07001347 if (update_time) {
Austin Schuh8bd96322020-02-13 21:18:22 -08001348 // Confirm that the message was sent on the sending node before the
1349 // destination node (this node). As a proxy, do this by making sure
1350 // that time on the source node is past when the message was sent.
Austin Schuh2f8fd752020-09-01 22:38:28 -07001351 if (!FLAGS_skip_order_validation) {
1352 CHECK_LT(channel_timestamp.monotonic_remote_time,
1353 state->monotonic_remote_now(channel_index))
1354 << state->event_loop()->node()->name()->string_view() << " to "
1355 << state->remote_node(channel_index)->name()->string_view()
1356 << " " << state->DebugString();
1357 } else if (channel_timestamp.monotonic_remote_time >=
1358 state->monotonic_remote_now(channel_index)) {
1359 LOG(WARNING)
1360 << "Check failed: channel_timestamp.monotonic_remote_time < "
1361 "state->monotonic_remote_now(channel_index) ("
1362 << channel_timestamp.monotonic_remote_time << " vs. "
1363 << state->monotonic_remote_now(channel_index) << ") "
1364 << state->event_loop()->node()->name()->string_view() << " to "
1365 << state->remote_node(channel_index)->name()->string_view()
1366 << " currently " << channel_timestamp.monotonic_event_time
1367 << " ("
1368 << state->ToDistributedClock(
1369 channel_timestamp.monotonic_event_time)
1370 << ") remote event time "
1371 << channel_timestamp.monotonic_remote_time << " ("
1372 << state->RemoteToDistributedClock(
1373 channel_index, channel_timestamp.monotonic_remote_time)
1374 << ") " << state->DebugString();
1375 }
Austin Schuh8bd96322020-02-13 21:18:22 -08001376
1377 if (FLAGS_timestamps_to_csv) {
1378 if (offset_fp_ == nullptr) {
1379 offset_fp_ = fopen("/tmp/offsets.csv", "w");
1380 fprintf(
1381 offset_fp_,
1382 "# time_since_start, offset node 0, offset node 1, ...\n");
1383 first_time_ = channel_timestamp.realtime_event_time;
1384 }
1385
1386 fprintf(offset_fp_, "%.9f",
1387 std::chrono::duration_cast<std::chrono::duration<double>>(
1388 channel_timestamp.realtime_event_time - first_time_)
1389 .count());
Austin Schuh2f8fd752020-09-01 22:38:28 -07001390 for (int i = 1; i < time_offset_matrix_.rows(); ++i) {
1391 fprintf(offset_fp_, ", %.9f",
1392 time_offset_matrix_(i, 0) +
1393 time_slope_matrix_(i, 0) *
1394 chrono::duration<double>(
1395 event_loop_factory_->distributed_now()
1396 .time_since_epoch())
1397 .count());
Austin Schuh8bd96322020-02-13 21:18:22 -08001398 }
1399 fprintf(offset_fp_, "\n");
1400 }
Austin Schuh8bd96322020-02-13 21:18:22 -08001401 }
1402
Austin Schuh15649d62019-12-28 16:36:38 -08001403 // If we have access to the factory, use it to fix the realtime time.
Austin Schuh858c9f32020-08-31 16:56:12 -07001404 state->SetRealtimeOffset(channel_timestamp.monotonic_event_time,
1405 channel_timestamp.realtime_event_time);
Austin Schuh15649d62019-12-28 16:36:38 -08001406
Austin Schuh2f8fd752020-09-01 22:38:28 -07001407 VLOG(1) << MaybeNodeName(state->event_loop()->node()) << "Sending "
1408 << channel_timestamp.monotonic_event_time;
1409 // TODO(austin): std::move channel_data in and make that efficient in
1410 // simulation.
Austin Schuh858c9f32020-08-31 16:56:12 -07001411 state->Send(channel_index, channel_data.message().data()->Data(),
1412 channel_data.message().data()->size(),
1413 channel_timestamp.monotonic_remote_time,
1414 channel_timestamp.realtime_remote_time,
1415 channel_timestamp.remote_queue_index);
Austin Schuh2f8fd752020-09-01 22:38:28 -07001416 } else if (state->at_end() && !ignore_missing_data_) {
Austin Schuh8bd96322020-02-13 21:18:22 -08001417 // We are at the end of the log file and found missing data. Finish
Austin Schuh2f8fd752020-09-01 22:38:28 -07001418 // reading the rest of the log file and call it quits. We don't want
1419 // to replay partial data.
Austin Schuh858c9f32020-08-31 16:56:12 -07001420 while (state->OldestMessageTime() != monotonic_clock::max_time) {
1421 bool update_time_dummy;
1422 state->PopOldest(&update_time_dummy);
Austin Schuh8bd96322020-02-13 21:18:22 -08001423 }
Austin Schuh2f8fd752020-09-01 22:38:28 -07001424 } else {
1425 CHECK(channel_data.message().data() == nullptr) << ": Nullptr";
Austin Schuh92547522019-12-28 14:33:43 -08001426 }
Austin Schuhe309d2a2019-11-29 13:25:21 -08001427 } else {
Austin Schuh6f3babe2020-01-26 20:34:50 -08001428 LOG(WARNING)
1429 << "Not sending data from before the start of the log file. "
1430 << channel_timestamp.monotonic_event_time.time_since_epoch().count()
1431 << " start " << monotonic_start_time().time_since_epoch().count()
1432 << " " << FlatbufferToJson(channel_data);
Austin Schuhe309d2a2019-11-29 13:25:21 -08001433 }
1434
Austin Schuh858c9f32020-08-31 16:56:12 -07001435 const monotonic_clock::time_point next_time = state->OldestMessageTime();
Austin Schuh6f3babe2020-01-26 20:34:50 -08001436 if (next_time != monotonic_clock::max_time) {
Austin Schuh2f8fd752020-09-01 22:38:28 -07001437 VLOG(1) << "Scheduling " << MaybeNodeName(state->event_loop()->node())
1438 << "wakeup for " << next_time << "("
1439 << state->ToDistributedClock(next_time)
1440 << " distributed), now is " << state->monotonic_now();
Austin Schuh858c9f32020-08-31 16:56:12 -07001441 state->Setup(next_time);
James Kuszmaul314f1672020-01-03 20:02:08 -08001442 } else {
Austin Schuh2f8fd752020-09-01 22:38:28 -07001443 VLOG(1) << MaybeNodeName(state->event_loop()->node())
1444 << "No next message, scheduling shutdown";
1445 // Set a timer up immediately after now to die. If we don't do this,
1446 // then the senders waiting on the message we just read will never get
1447 // called.
Austin Schuheecb9282020-01-08 17:43:30 -08001448 if (event_loop_factory_ != nullptr) {
Austin Schuh858c9f32020-08-31 16:56:12 -07001449 state->Setup(monotonic_now + event_loop_factory_->send_delay() +
1450 std::chrono::nanoseconds(1));
Austin Schuheecb9282020-01-08 17:43:30 -08001451 }
Austin Schuhe309d2a2019-11-29 13:25:21 -08001452 }
Austin Schuh8bd96322020-02-13 21:18:22 -08001453
Austin Schuh2f8fd752020-09-01 22:38:28 -07001454 // Once we make this call, the current time changes. So do everything
1455 // which involves time before changing it. That especially includes
1456 // sending the message.
1457 if (update_time) {
1458 VLOG(1) << MaybeNodeName(state->event_loop()->node())
1459 << "updating offsets";
1460
1461 std::vector<aos::monotonic_clock::time_point> before_times;
1462 before_times.resize(states_.size());
1463 std::transform(states_.begin(), states_.end(), before_times.begin(),
1464 [](const std::unique_ptr<State> &state) {
1465 return state->monotonic_now();
1466 });
1467
1468 for (size_t i = 0; i < states_.size(); ++i) {
1469 VLOG(1) << MaybeNodeName(
1470 states_[i]->event_loop()->node())
1471 << "before " << states_[i]->monotonic_now();
1472 }
1473
Austin Schuh8bd96322020-02-13 21:18:22 -08001474 UpdateOffsets();
Austin Schuh2f8fd752020-09-01 22:38:28 -07001475 VLOG(1) << MaybeNodeName(state->event_loop()->node()) << "Now is now "
1476 << state->monotonic_now();
1477
1478 for (size_t i = 0; i < states_.size(); ++i) {
1479 VLOG(1) << MaybeNodeName(
1480 states_[i]->event_loop()->node())
1481 << "after " << states_[i]->monotonic_now();
1482 }
1483
1484 // TODO(austin): We should be perfect.
1485 const std::chrono::nanoseconds kTolerance{3};
1486 if (!FLAGS_skip_order_validation) {
1487 CHECK_GE(next_time, state->monotonic_now())
1488 << ": Time skipped the next event.";
1489
1490 for (size_t i = 0; i < states_.size(); ++i) {
1491 CHECK_GE(states_[i]->monotonic_now(), before_times[i] - kTolerance)
1492 << ": Time changed too much on node "
1493 << MaybeNodeName(states_[i]->event_loop()->node());
1494 CHECK_LE(states_[i]->monotonic_now(), before_times[i] + kTolerance)
1495 << ": Time changed too much on node "
1496 << states_[i]->event_loop()->node()->name()->string_view();
1497 }
1498 } else {
1499 if (next_time < state->monotonic_now()) {
1500 LOG(WARNING) << "Check failed: next_time >= "
1501 "state->monotonic_now() ("
1502 << next_time << " vs. " << state->monotonic_now()
1503 << "): Time skipped the next event.";
1504 }
1505 for (size_t i = 0; i < states_.size(); ++i) {
1506 if (states_[i]->monotonic_now() >= before_times[i] - kTolerance) {
1507 LOG(WARNING) << "Check failed: "
1508 "states_[i]->monotonic_now() "
1509 ">= before_times[i] - kTolerance ("
1510 << states_[i]->monotonic_now() << " vs. "
1511 << before_times[i] - kTolerance
1512 << ") : Time changed too much on node "
1513 << MaybeNodeName(states_[i]->event_loop()->node());
1514 }
1515 if (states_[i]->monotonic_now() <= before_times[i] + kTolerance) {
1516 LOG(WARNING) << "Check failed: "
1517 "states_[i]->monotonic_now() "
1518 "<= before_times[i] + kTolerance ("
1519 << states_[i]->monotonic_now() << " vs. "
1520 << before_times[i] - kTolerance
1521 << ") : Time changed too much on node "
1522 << MaybeNodeName(states_[i]->event_loop()->node());
1523 }
1524 }
1525 }
Austin Schuh8bd96322020-02-13 21:18:22 -08001526 }
Austin Schuh2f8fd752020-09-01 22:38:28 -07001527
1528 VLOG(1) << MaybeNodeName(state->event_loop()->node()) << "Done sending at "
1529 << state->event_loop()->context().monotonic_event_time << " now "
1530 << state->monotonic_now();
Austin Schuh858c9f32020-08-31 16:56:12 -07001531 }));
Austin Schuhe309d2a2019-11-29 13:25:21 -08001532
Austin Schuh6f3babe2020-01-26 20:34:50 -08001533 ++live_nodes_;
1534
Austin Schuh858c9f32020-08-31 16:56:12 -07001535 if (state->OldestMessageTime() != monotonic_clock::max_time) {
1536 event_loop->OnRun([state]() { state->Setup(state->OldestMessageTime()); });
Austin Schuhe309d2a2019-11-29 13:25:21 -08001537 }
1538}
1539
1540void LogReader::Deregister() {
James Kuszmaul84ff3e52020-01-03 19:48:53 -08001541 // Make sure that things get destroyed in the correct order, rather than
1542 // relying on getting the order correct in the class definition.
Austin Schuh8bd96322020-02-13 21:18:22 -08001543 for (std::unique_ptr<State> &state : states_) {
Austin Schuh858c9f32020-08-31 16:56:12 -07001544 state->Deregister();
Austin Schuhe309d2a2019-11-29 13:25:21 -08001545 }
Austin Schuh92547522019-12-28 14:33:43 -08001546
James Kuszmaul84ff3e52020-01-03 19:48:53 -08001547 event_loop_factory_unique_ptr_.reset();
1548 event_loop_factory_ = nullptr;
Austin Schuhe309d2a2019-11-29 13:25:21 -08001549}
1550
James Kuszmaulc7bbb3e2020-01-03 20:01:00 -08001551void LogReader::RemapLoggedChannel(std::string_view name, std::string_view type,
1552 std::string_view add_prefix) {
James Kuszmaulc7bbb3e2020-01-03 20:01:00 -08001553 for (size_t ii = 0; ii < logged_configuration()->channels()->size(); ++ii) {
1554 const Channel *const channel = logged_configuration()->channels()->Get(ii);
1555 if (channel->name()->str() == name &&
1556 channel->type()->string_view() == type) {
1557 CHECK_EQ(0u, remapped_channels_.count(ii))
1558 << "Already remapped channel "
1559 << configuration::CleanedChannelToString(channel);
1560 remapped_channels_[ii] = std::string(add_prefix) + std::string(name);
1561 VLOG(1) << "Remapping channel "
1562 << configuration::CleanedChannelToString(channel)
1563 << " to have name " << remapped_channels_[ii];
Austin Schuh6331ef92020-01-07 18:28:09 -08001564 MakeRemappedConfig();
James Kuszmaulc7bbb3e2020-01-03 20:01:00 -08001565 return;
1566 }
1567 }
1568 LOG(FATAL) << "Unabled to locate channel with name " << name << " and type "
1569 << type;
1570}
1571
Austin Schuh01b4c352020-09-21 23:09:39 -07001572void LogReader::RemapLoggedChannel(std::string_view name, std::string_view type,
1573 const Node *node,
1574 std::string_view add_prefix) {
1575 VLOG(1) << "Node is " << aos::FlatbufferToJson(node);
1576 const Channel *remapped_channel =
1577 configuration::GetChannel(logged_configuration(), name, type, "", node);
1578 CHECK(remapped_channel != nullptr) << ": Failed to find {\"name\": \"" << name
1579 << "\", \"type\": \"" << type << "\"}";
1580 VLOG(1) << "Original {\"name\": \"" << name << "\", \"type\": \"" << type
1581 << "\"}";
1582 VLOG(1) << "Remapped "
1583 << aos::configuration::StrippedChannelToString(remapped_channel);
1584
1585 // We want to make /spray on node 0 go to /0/spray by snooping the maps. And
1586 // we want it to degrade if the heuristics fail to just work.
1587 //
1588 // The easiest way to do this is going to be incredibly specific and verbose.
1589 // Look up /spray, to /0/spray. Then, prefix the result with /original to get
1590 // /original/0/spray. Then, create a map from /original/spray to
1591 // /original/0/spray for just the type we were asked for.
1592 if (name != remapped_channel->name()->string_view()) {
1593 MapT new_map;
1594 new_map.match = std::make_unique<ChannelT>();
1595 new_map.match->name = absl::StrCat(add_prefix, name);
1596 new_map.match->type = type;
1597 if (node != nullptr) {
1598 new_map.match->source_node = node->name()->str();
1599 }
1600 new_map.rename = std::make_unique<ChannelT>();
1601 new_map.rename->name =
1602 absl::StrCat(add_prefix, remapped_channel->name()->string_view());
1603 maps_.emplace_back(std::move(new_map));
1604 }
1605
1606 const size_t channel_index =
1607 configuration::ChannelIndex(logged_configuration(), remapped_channel);
1608 CHECK_EQ(0u, remapped_channels_.count(channel_index))
1609 << "Already remapped channel "
1610 << configuration::CleanedChannelToString(remapped_channel);
1611 remapped_channels_[channel_index] =
1612 absl::StrCat(add_prefix, remapped_channel->name()->string_view());
1613 MakeRemappedConfig();
1614}
1615
James Kuszmaulc7bbb3e2020-01-03 20:01:00 -08001616void LogReader::MakeRemappedConfig() {
Austin Schuh8bd96322020-02-13 21:18:22 -08001617 for (std::unique_ptr<State> &state : states_) {
Austin Schuh6aa77be2020-02-22 21:06:40 -08001618 if (state) {
Austin Schuh858c9f32020-08-31 16:56:12 -07001619 CHECK(!state->event_loop())
Austin Schuh6aa77be2020-02-22 21:06:40 -08001620 << ": Can't change the mapping after the events are scheduled.";
1621 }
Austin Schuh6f3babe2020-01-26 20:34:50 -08001622 }
Austin Schuhac0771c2020-01-07 18:36:30 -08001623
James Kuszmaulc7bbb3e2020-01-03 20:01:00 -08001624 // If no remapping occurred and we are using the original config, then there
1625 // is nothing interesting to do here.
1626 if (remapped_channels_.empty() && replay_configuration_ == nullptr) {
Austin Schuh6f3babe2020-01-26 20:34:50 -08001627 remapped_configuration_ = logged_configuration();
James Kuszmaulc7bbb3e2020-01-03 20:01:00 -08001628 return;
1629 }
1630 // Config to copy Channel definitions from. Use the specified
1631 // replay_configuration_ if it has been provided.
1632 const Configuration *const base_config = replay_configuration_ == nullptr
1633 ? logged_configuration()
1634 : replay_configuration_;
1635 // The remapped config will be identical to the base_config, except that it
1636 // will have a bunch of extra channels in the channel list, which are exact
1637 // copies of the remapped channels, but with different names.
1638 // Because the flatbuffers API is a pain to work with, this requires a bit of
1639 // a song-and-dance to get copied over.
1640 // The order of operations is to:
1641 // 1) Make a flatbuffer builder for a config that will just contain a list of
1642 // the new channels that we want to add.
1643 // 2) For each channel that we are remapping:
1644 // a) Make a buffer/builder and construct into it a Channel table that only
1645 // contains the new name for the channel.
1646 // b) Merge the new channel with just the name into the channel that we are
1647 // trying to copy, built in the flatbuffer builder made in 1. This gives
1648 // us the new channel definition that we need.
1649 // 3) Using this list of offsets, build the Configuration of just new
1650 // Channels.
1651 // 4) Merge the Configuration with the new Channels into the base_config.
1652 // 5) Call MergeConfiguration() on that result to give MergeConfiguration a
1653 // chance to sanitize the config.
1654
1655 // This is the builder that we use for the config containing all the new
1656 // channels.
1657 flatbuffers::FlatBufferBuilder new_config_fbb;
Austin Schuhd7b15da2020-02-17 15:06:11 -08001658 new_config_fbb.ForceDefaults(true);
James Kuszmaulc7bbb3e2020-01-03 20:01:00 -08001659 std::vector<flatbuffers::Offset<Channel>> channel_offsets;
1660 for (auto &pair : remapped_channels_) {
1661 // This is the builder that we use for creating the Channel with just the
1662 // new name.
1663 flatbuffers::FlatBufferBuilder new_name_fbb;
Austin Schuhd7b15da2020-02-17 15:06:11 -08001664 new_name_fbb.ForceDefaults(true);
James Kuszmaulc7bbb3e2020-01-03 20:01:00 -08001665 const flatbuffers::Offset<flatbuffers::String> name_offset =
1666 new_name_fbb.CreateString(pair.second);
1667 ChannelBuilder new_name_builder(new_name_fbb);
1668 new_name_builder.add_name(name_offset);
1669 new_name_fbb.Finish(new_name_builder.Finish());
1670 const FlatbufferDetachedBuffer<Channel> new_name = new_name_fbb.Release();
Austin Schuh2f8fd752020-09-01 22:38:28 -07001671 // Retrieve the channel that we want to copy, confirming that it is
1672 // actually present in base_config.
James Kuszmaulc7bbb3e2020-01-03 20:01:00 -08001673 const Channel *const base_channel = CHECK_NOTNULL(configuration::GetChannel(
1674 base_config, logged_configuration()->channels()->Get(pair.first), "",
1675 nullptr));
1676 // Actually create the new channel and put it into the vector of Offsets
1677 // that we will use to create the new Configuration.
1678 channel_offsets.emplace_back(MergeFlatBuffers<Channel>(
1679 reinterpret_cast<const flatbuffers::Table *>(base_channel),
1680 reinterpret_cast<const flatbuffers::Table *>(&new_name.message()),
1681 &new_config_fbb));
1682 }
1683 // Create the Configuration containing the new channels that we want to add.
Austin Schuh01b4c352020-09-21 23:09:39 -07001684 const auto new_channel_vector_offsets =
Austin Schuhfa895892020-01-07 20:07:41 -08001685 new_config_fbb.CreateVector(channel_offsets);
Austin Schuh01b4c352020-09-21 23:09:39 -07001686
1687 // Now create the new maps.
1688 std::vector<flatbuffers::Offset<Map>> map_offsets;
1689 for (const MapT &map : maps_) {
1690 const flatbuffers::Offset<flatbuffers::String> match_name_offset =
1691 new_config_fbb.CreateString(map.match->name);
1692 const flatbuffers::Offset<flatbuffers::String> match_type_offset =
1693 new_config_fbb.CreateString(map.match->type);
1694 const flatbuffers::Offset<flatbuffers::String> rename_name_offset =
1695 new_config_fbb.CreateString(map.rename->name);
1696 flatbuffers::Offset<flatbuffers::String> match_source_node_offset;
1697 if (!map.match->source_node.empty()) {
1698 match_source_node_offset =
1699 new_config_fbb.CreateString(map.match->source_node);
1700 }
1701 Channel::Builder match_builder(new_config_fbb);
1702 match_builder.add_name(match_name_offset);
1703 match_builder.add_type(match_type_offset);
1704 if (!map.match->source_node.empty()) {
1705 match_builder.add_source_node(match_source_node_offset);
1706 }
1707 const flatbuffers::Offset<Channel> match_offset = match_builder.Finish();
1708
1709 Channel::Builder rename_builder(new_config_fbb);
1710 rename_builder.add_name(rename_name_offset);
1711 const flatbuffers::Offset<Channel> rename_offset = rename_builder.Finish();
1712
1713 Map::Builder map_builder(new_config_fbb);
1714 map_builder.add_match(match_offset);
1715 map_builder.add_rename(rename_offset);
1716 map_offsets.emplace_back(map_builder.Finish());
1717 }
1718
1719 const auto new_maps_offsets = new_config_fbb.CreateVector(map_offsets);
1720
James Kuszmaulc7bbb3e2020-01-03 20:01:00 -08001721 ConfigurationBuilder new_config_builder(new_config_fbb);
Austin Schuh01b4c352020-09-21 23:09:39 -07001722 new_config_builder.add_channels(new_channel_vector_offsets);
1723 new_config_builder.add_maps(new_maps_offsets);
James Kuszmaulc7bbb3e2020-01-03 20:01:00 -08001724 new_config_fbb.Finish(new_config_builder.Finish());
1725 const FlatbufferDetachedBuffer<Configuration> new_name_config =
1726 new_config_fbb.Release();
1727 // Merge the new channels configuration into the base_config, giving us the
1728 // remapped configuration.
1729 remapped_configuration_buffer_ =
1730 std::make_unique<FlatbufferDetachedBuffer<Configuration>>(
1731 MergeFlatBuffers<Configuration>(base_config,
1732 &new_name_config.message()));
1733 // Call MergeConfiguration to deal with sanitizing the config.
1734 remapped_configuration_buffer_ =
1735 std::make_unique<FlatbufferDetachedBuffer<Configuration>>(
1736 configuration::MergeConfiguration(*remapped_configuration_buffer_));
1737
1738 remapped_configuration_ = &remapped_configuration_buffer_->message();
1739}
1740
Austin Schuh6f3babe2020-01-26 20:34:50 -08001741const Channel *LogReader::RemapChannel(const EventLoop *event_loop,
1742 const Channel *channel) {
1743 std::string_view channel_name = channel->name()->string_view();
1744 std::string_view channel_type = channel->type()->string_view();
1745 const int channel_index =
1746 configuration::ChannelIndex(logged_configuration(), channel);
1747 // If the channel is remapped, find the correct channel name to use.
1748 if (remapped_channels_.count(channel_index) > 0) {
Austin Schuhee711052020-08-24 16:06:09 -07001749 VLOG(3) << "Got remapped channel on "
Austin Schuh6f3babe2020-01-26 20:34:50 -08001750 << configuration::CleanedChannelToString(channel);
1751 channel_name = remapped_channels_[channel_index];
1752 }
1753
Austin Schuhee711052020-08-24 16:06:09 -07001754 VLOG(2) << "Going to remap channel " << channel_name << " " << channel_type;
Austin Schuh6f3babe2020-01-26 20:34:50 -08001755 const Channel *remapped_channel = configuration::GetChannel(
1756 event_loop->configuration(), channel_name, channel_type,
1757 event_loop->name(), event_loop->node());
1758
1759 CHECK(remapped_channel != nullptr)
1760 << ": Unable to send {\"name\": \"" << channel_name << "\", \"type\": \""
1761 << channel_type << "\"} because it is not in the provided configuration.";
1762
1763 return remapped_channel;
1764}
1765
Austin Schuh858c9f32020-08-31 16:56:12 -07001766LogReader::State::State(std::unique_ptr<ChannelMerger> channel_merger)
1767 : channel_merger_(std::move(channel_merger)) {}
1768
1769EventLoop *LogReader::State::SetNodeEventLoopFactory(
1770 NodeEventLoopFactory *node_event_loop_factory) {
1771 node_event_loop_factory_ = node_event_loop_factory;
1772 event_loop_unique_ptr_ =
1773 node_event_loop_factory_->MakeEventLoop("log_reader");
1774 return event_loop_unique_ptr_.get();
1775}
1776
1777void LogReader::State::SetChannelCount(size_t count) {
1778 channels_.resize(count);
1779 filters_.resize(count);
1780 channel_target_event_loop_factory_.resize(count);
1781}
1782
1783void LogReader::State::SetChannel(
1784 size_t channel, std::unique_ptr<RawSender> sender,
Austin Schuh2f8fd752020-09-01 22:38:28 -07001785 message_bridge::NoncausalOffsetEstimator *filter,
Austin Schuh858c9f32020-08-31 16:56:12 -07001786 NodeEventLoopFactory *channel_target_event_loop_factory) {
1787 channels_[channel] = std::move(sender);
1788 filters_[channel] = filter;
1789 channel_target_event_loop_factory_[channel] =
1790 channel_target_event_loop_factory;
1791}
1792
1793std::tuple<TimestampMerger::DeliveryTimestamp, int,
1794 FlatbufferVector<MessageHeader>>
1795LogReader::State::PopOldest(bool *update_time) {
1796 CHECK_GT(sorted_messages_.size(), 0u);
1797
1798 std::tuple<TimestampMerger::DeliveryTimestamp, int,
Austin Schuh2f8fd752020-09-01 22:38:28 -07001799 FlatbufferVector<MessageHeader>,
1800 message_bridge::NoncausalOffsetEstimator *>
Austin Schuh858c9f32020-08-31 16:56:12 -07001801 result = std::move(sorted_messages_.front());
Austin Schuh2f8fd752020-09-01 22:38:28 -07001802 VLOG(2) << MaybeNodeName(event_loop_->node()) << "PopOldest Popping "
Austin Schuh858c9f32020-08-31 16:56:12 -07001803 << std::get<0>(result).monotonic_event_time;
1804 sorted_messages_.pop_front();
1805 SeedSortedMessages();
1806
Austin Schuh2f8fd752020-09-01 22:38:28 -07001807 if (std::get<3>(result) != nullptr) {
1808 *update_time = std::get<3>(result)->Pop(
1809 event_loop_->node(), std::get<0>(result).monotonic_event_time);
1810 } else {
1811 *update_time = false;
1812 }
Austin Schuh858c9f32020-08-31 16:56:12 -07001813 return std::make_tuple(std::get<0>(result), std::get<1>(result),
1814 std::move(std::get<2>(result)));
1815}
1816
1817monotonic_clock::time_point LogReader::State::OldestMessageTime() const {
1818 if (sorted_messages_.size() > 0) {
Austin Schuh2f8fd752020-09-01 22:38:28 -07001819 VLOG(2) << MaybeNodeName(event_loop_->node()) << "oldest message at "
Austin Schuh858c9f32020-08-31 16:56:12 -07001820 << std::get<0>(sorted_messages_.front()).monotonic_event_time;
1821 return std::get<0>(sorted_messages_.front()).monotonic_event_time;
1822 }
1823
1824 return channel_merger_->OldestMessageTime();
1825}
1826
1827void LogReader::State::SeedSortedMessages() {
1828 const aos::monotonic_clock::time_point end_queue_time =
1829 (sorted_messages_.size() > 0
1830 ? std::get<0>(sorted_messages_.front()).monotonic_event_time
1831 : channel_merger_->monotonic_start_time()) +
1832 std::chrono::seconds(2);
1833
1834 while (true) {
1835 if (channel_merger_->OldestMessageTime() == monotonic_clock::max_time) {
1836 return;
1837 }
1838 if (sorted_messages_.size() > 0) {
1839 // Stop placing sorted messages on the list once we have 2 seconds
1840 // queued up (but queue at least until the log starts.
1841 if (end_queue_time <
1842 std::get<0>(sorted_messages_.back()).monotonic_event_time) {
1843 return;
1844 }
1845 }
1846
1847 TimestampMerger::DeliveryTimestamp channel_timestamp;
1848 int channel_index;
1849 FlatbufferVector<MessageHeader> channel_data =
1850 FlatbufferVector<MessageHeader>::Empty();
1851
Austin Schuh2f8fd752020-09-01 22:38:28 -07001852 message_bridge::NoncausalOffsetEstimator *filter = nullptr;
1853
Austin Schuh858c9f32020-08-31 16:56:12 -07001854 std::tie(channel_timestamp, channel_index, channel_data) =
1855 channel_merger_->PopOldest();
1856
Austin Schuh2f8fd752020-09-01 22:38:28 -07001857 // Skip any messages without forwarding information.
1858 if (channel_timestamp.monotonic_remote_time != monotonic_clock::min_time) {
1859 // Got a forwarding timestamp!
1860 filter = filters_[channel_index];
1861
1862 CHECK(filter != nullptr);
1863
1864 // Call the correct method depending on if we are the forward or
1865 // reverse direction here.
1866 filter->Sample(event_loop_->node(),
1867 channel_timestamp.monotonic_event_time,
1868 channel_timestamp.monotonic_remote_time);
1869 }
Austin Schuh858c9f32020-08-31 16:56:12 -07001870 sorted_messages_.emplace_back(channel_timestamp, channel_index,
Austin Schuh2f8fd752020-09-01 22:38:28 -07001871 std::move(channel_data), filter);
Austin Schuh858c9f32020-08-31 16:56:12 -07001872 }
1873}
1874
1875void LogReader::State::Deregister() {
1876 for (size_t i = 0; i < channels_.size(); ++i) {
1877 channels_[i].reset();
1878 }
1879 event_loop_unique_ptr_.reset();
1880 event_loop_ = nullptr;
1881 timer_handler_ = nullptr;
1882 node_event_loop_factory_ = nullptr;
1883}
1884
Austin Schuhe309d2a2019-11-29 13:25:21 -08001885} // namespace logger
1886} // namespace aos