blob: 3032a6f106a619ddb113bdec523670eb84125add [file] [log] [blame]
James Kuszmaul38735e82019-12-07 16:42:06 -08001#include "aos/events/logging/logger.h"
Austin Schuhe309d2a2019-11-29 13:25:21 -08002
3#include <fcntl.h>
Austin Schuh4c4e0092019-12-22 16:18:03 -08004#include <limits.h>
Austin Schuhe309d2a2019-11-29 13:25:21 -08005#include <sys/stat.h>
6#include <sys/types.h>
7#include <sys/uio.h>
8#include <vector>
9
Austin Schuh8bd96322020-02-13 21:18:22 -080010#include "Eigen/Dense"
Austin Schuh2f8fd752020-09-01 22:38:28 -070011#include "absl/strings/escaping.h"
Austin Schuhe309d2a2019-11-29 13:25:21 -080012#include "absl/types/span.h"
13#include "aos/events/event_loop.h"
James Kuszmaul38735e82019-12-07 16:42:06 -080014#include "aos/events/logging/logger_generated.h"
Austin Schuh64fab802020-09-09 22:47:47 -070015#include "aos/events/logging/uuid.h"
Austin Schuhe309d2a2019-11-29 13:25:21 -080016#include "aos/flatbuffer_merge.h"
Austin Schuh288479d2019-12-18 19:47:52 -080017#include "aos/network/team_number.h"
Austin Schuhe309d2a2019-11-29 13:25:21 -080018#include "aos/time/time.h"
19#include "flatbuffers/flatbuffers.h"
Austin Schuh2f8fd752020-09-01 22:38:28 -070020#include "third_party/gmp/gmpxx.h"
Austin Schuhe309d2a2019-11-29 13:25:21 -080021
Austin Schuh15649d62019-12-28 16:36:38 -080022DEFINE_bool(skip_missing_forwarding_entries, false,
23 "If true, drop any forwarding entries with missing data. If "
24 "false, CHECK.");
Austin Schuhe309d2a2019-11-29 13:25:21 -080025
Austin Schuh8bd96322020-02-13 21:18:22 -080026DEFINE_bool(timestamps_to_csv, false,
27 "If true, write all the time synchronization information to a set "
28 "of CSV files in /tmp/. This should only be needed when debugging "
29 "time synchronization.");
30
Austin Schuh2f8fd752020-09-01 22:38:28 -070031DEFINE_bool(skip_order_validation, false,
32 "If true, ignore any out of orderness in replay");
33
Austin Schuhe309d2a2019-11-29 13:25:21 -080034namespace aos {
35namespace logger {
Austin Schuhe309d2a2019-11-29 13:25:21 -080036namespace chrono = std::chrono;
37
Austin Schuh2f8fd752020-09-01 22:38:28 -070038Logger::Logger(std::string_view base_name, EventLoop *event_loop,
Austin Schuhe309d2a2019-11-29 13:25:21 -080039 std::chrono::milliseconds polling_period)
Austin Schuh0c297012020-09-16 18:41:59 -070040 : Logger(base_name, event_loop, event_loop->configuration(),
41 polling_period) {}
42Logger::Logger(std::string_view base_name, EventLoop *event_loop,
43 const Configuration *configuration,
44 std::chrono::milliseconds polling_period)
Austin Schuh2f8fd752020-09-01 22:38:28 -070045 : Logger(std::make_unique<LocalLogNamer>(base_name, event_loop->node()),
Austin Schuh0c297012020-09-16 18:41:59 -070046 event_loop, configuration, polling_period) {}
47Logger::Logger(std::unique_ptr<LogNamer> log_namer, EventLoop *event_loop,
48 std::chrono::milliseconds polling_period)
49 : Logger(std::move(log_namer), event_loop, event_loop->configuration(),
50 polling_period) {}
Austin Schuh6f3babe2020-01-26 20:34:50 -080051
52Logger::Logger(std::unique_ptr<LogNamer> log_namer, EventLoop *event_loop,
Austin Schuh0c297012020-09-16 18:41:59 -070053 const Configuration *configuration,
Austin Schuh6f3babe2020-01-26 20:34:50 -080054 std::chrono::milliseconds polling_period)
Austin Schuhe309d2a2019-11-29 13:25:21 -080055 : event_loop_(event_loop),
Austin Schuh64fab802020-09-09 22:47:47 -070056 uuid_(UUID::Random()),
Austin Schuh6f3babe2020-01-26 20:34:50 -080057 log_namer_(std::move(log_namer)),
Austin Schuh0c297012020-09-16 18:41:59 -070058 configuration_(configuration),
59 name_(network::GetHostname()),
Austin Schuhe309d2a2019-11-29 13:25:21 -080060 timer_handler_(event_loop_->AddTimer([this]() { DoLogData(); })),
Austin Schuh2f8fd752020-09-01 22:38:28 -070061 polling_period_(polling_period),
62 server_statistics_fetcher_(
63 configuration::MultiNode(event_loop_->configuration())
64 ? event_loop_->MakeFetcher<message_bridge::ServerStatistics>(
65 "/aos")
66 : aos::Fetcher<message_bridge::ServerStatistics>()) {
Austin Schuh6f3babe2020-01-26 20:34:50 -080067 VLOG(1) << "Starting logger for " << FlatbufferToJson(event_loop_->node());
68 int channel_index = 0;
Austin Schuh2f8fd752020-09-01 22:38:28 -070069
70 // Find all the nodes which are logging timestamps on our node.
71 std::set<const Node *> timestamp_logger_nodes;
Austin Schuh0c297012020-09-16 18:41:59 -070072 for (const Channel *channel : *configuration_->channels()) {
Austin Schuh2f8fd752020-09-01 22:38:28 -070073 if (!configuration::ChannelIsSendableOnNode(channel, event_loop_->node()) ||
74 !channel->has_destination_nodes()) {
75 continue;
76 }
77 for (const Connection *connection : *channel->destination_nodes()) {
78 const Node *other_node = configuration::GetNode(
Austin Schuh0c297012020-09-16 18:41:59 -070079 configuration_, connection->name()->string_view());
Austin Schuh2f8fd752020-09-01 22:38:28 -070080
81 if (configuration::ConnectionDeliveryTimeIsLoggedOnNode(
82 connection, event_loop_->node())) {
83 VLOG(1) << "Timestamps are logged from "
84 << FlatbufferToJson(other_node);
85 timestamp_logger_nodes.insert(other_node);
86 }
87 }
88 }
89
90 std::map<const Channel *, const Node *> timestamp_logger_channels;
91
92 // Now that we have all the nodes accumulated, make remote timestamp loggers
93 // for them.
94 for (const Node *node : timestamp_logger_nodes) {
95 const Channel *channel = configuration::GetChannel(
Austin Schuh0c297012020-09-16 18:41:59 -070096 configuration_,
Austin Schuh2f8fd752020-09-01 22:38:28 -070097 absl::StrCat("/aos/remote_timestamps/", node->name()->string_view()),
98 logger::MessageHeader::GetFullyQualifiedName(), event_loop_->name(),
99 event_loop_->node());
100
101 CHECK(channel != nullptr)
102 << ": Remote timestamps are logged on "
103 << event_loop_->node()->name()->string_view()
104 << " but can't find channel /aos/remote_timestamps/"
105 << node->name()->string_view();
106 timestamp_logger_channels.insert(std::make_pair(channel, node));
107 }
108
Brian Silvermand90905f2020-09-23 14:42:56 -0700109 const size_t our_node_index =
110 configuration::GetNodeIndex(configuration_, event_loop_->node());
Austin Schuh2f8fd752020-09-01 22:38:28 -0700111
Austin Schuh0c297012020-09-16 18:41:59 -0700112 for (const Channel *config_channel : *configuration_->channels()) {
113 // The MakeRawFetcher method needs a channel which is in the event loop
114 // configuration() object, not the configuration_ object. Go look that up
115 // from the config.
116 const Channel *channel = aos::configuration::GetChannel(
117 event_loop_->configuration(), config_channel->name()->string_view(),
118 config_channel->type()->string_view(), "", event_loop_->node());
119
Austin Schuhe309d2a2019-11-29 13:25:21 -0800120 FetcherStruct fs;
Austin Schuh2f8fd752020-09-01 22:38:28 -0700121 fs.node_index = our_node_index;
Austin Schuh6f3babe2020-01-26 20:34:50 -0800122 const bool is_local =
123 configuration::ChannelIsSendableOnNode(channel, event_loop_->node());
124
Austin Schuh15649d62019-12-28 16:36:38 -0800125 const bool is_readable =
126 configuration::ChannelIsReadableOnNode(channel, event_loop_->node());
127 const bool log_message = configuration::ChannelMessageIsLoggedOnNode(
128 channel, event_loop_->node()) &&
129 is_readable;
130
131 const bool log_delivery_times =
132 (event_loop_->node() == nullptr)
133 ? false
134 : configuration::ConnectionDeliveryTimeIsLoggedOnNode(
135 channel, event_loop_->node(), event_loop_->node());
136
Austin Schuh2f8fd752020-09-01 22:38:28 -0700137 // Now, detect a MessageHeader timestamp logger where we should just log the
138 // contents to a file directly.
139 const bool log_contents = timestamp_logger_channels.find(channel) !=
140 timestamp_logger_channels.end();
141 const Node *timestamp_node =
142 log_contents ? timestamp_logger_channels.find(channel)->second
143 : nullptr;
144
145 if (log_message || log_delivery_times || log_contents) {
Austin Schuh15649d62019-12-28 16:36:38 -0800146 fs.fetcher = event_loop->MakeRawFetcher(channel);
147 VLOG(1) << "Logging channel "
148 << configuration::CleanedChannelToString(channel);
149
150 if (log_delivery_times) {
Austin Schuh6f3babe2020-01-26 20:34:50 -0800151 VLOG(1) << " Delivery times";
152 fs.timestamp_writer = log_namer_->MakeTimestampWriter(channel);
Austin Schuh15649d62019-12-28 16:36:38 -0800153 }
Austin Schuh6f3babe2020-01-26 20:34:50 -0800154 if (log_message) {
155 VLOG(1) << " Data";
156 fs.writer = log_namer_->MakeWriter(channel);
157 if (!is_local) {
158 fs.log_type = LogType::kLogRemoteMessage;
159 }
160 }
Austin Schuh2f8fd752020-09-01 22:38:28 -0700161 if (log_contents) {
162 VLOG(1) << "Timestamp logger channel "
163 << configuration::CleanedChannelToString(channel);
164 fs.contents_writer =
165 log_namer_->MakeForwardedTimestampWriter(channel, timestamp_node);
Austin Schuh0c297012020-09-16 18:41:59 -0700166 fs.node_index =
167 configuration::GetNodeIndex(configuration_, timestamp_node);
Austin Schuh2f8fd752020-09-01 22:38:28 -0700168 }
Austin Schuh6f3babe2020-01-26 20:34:50 -0800169 fs.channel_index = channel_index;
170 fs.written = false;
171 fetchers_.emplace_back(std::move(fs));
Austin Schuh15649d62019-12-28 16:36:38 -0800172 }
Austin Schuh6f3babe2020-01-26 20:34:50 -0800173 ++channel_index;
Austin Schuhe309d2a2019-11-29 13:25:21 -0800174 }
175
Austin Schuh0c297012020-09-16 18:41:59 -0700176 node_state_.resize(configuration::MultiNode(configuration_)
177 ? configuration_->nodes()->size()
Austin Schuh2f8fd752020-09-01 22:38:28 -0700178 : 1u);
Austin Schuhe309d2a2019-11-29 13:25:21 -0800179
Austin Schuh2f8fd752020-09-01 22:38:28 -0700180 for (const Node *node : log_namer_->nodes()) {
Brian Silvermand90905f2020-09-23 14:42:56 -0700181 const int node_index = configuration::GetNodeIndex(configuration_, node);
Austin Schuhe309d2a2019-11-29 13:25:21 -0800182
Austin Schuh2f8fd752020-09-01 22:38:28 -0700183 node_state_[node_index].log_file_header = MakeHeader(node);
184 }
Austin Schuhe309d2a2019-11-29 13:25:21 -0800185
Austin Schuh2f8fd752020-09-01 22:38:28 -0700186 // When things start, we want to log the header, then the most recent
187 // messages available on each fetcher to capture the previous state, then
188 // start polling.
189 event_loop_->OnRun([this]() { StartLogging(); });
Austin Schuhe309d2a2019-11-29 13:25:21 -0800190}
191
Austin Schuh0c297012020-09-16 18:41:59 -0700192Logger::~Logger() {
193 // If we are replaying a log file, or in simulation, we want to force the last
194 // bit of data to be logged. The easiest way to deal with this is to poll
195 // everything as we go to destroy the class, ie, shut down the logger, and
196 // write it to disk.
197 DoLogData();
198}
199
Austin Schuh2f8fd752020-09-01 22:38:28 -0700200void Logger::StartLogging() {
201 // Grab data from each channel right before we declare the log file started
202 // so we can capture the latest message on each channel. This lets us have
203 // non periodic messages with configuration that now get logged.
204 for (FetcherStruct &f : fetchers_) {
205 f.written = !f.fetcher->Fetch();
206 }
207
208 // Clear out any old timestamps in case we are re-starting logging.
209 for (size_t i = 0; i < node_state_.size(); ++i) {
210 SetStartTime(i, monotonic_clock::min_time, realtime_clock::min_time);
211 }
212
213 WriteHeader();
214
215 LOG(INFO) << "Logging node as " << FlatbufferToJson(event_loop_->node())
216 << " start_time " << last_synchronized_time_;
217
218 timer_handler_->Setup(event_loop_->monotonic_now() + polling_period_,
219 polling_period_);
220}
221
Austin Schuhfa895892020-01-07 20:07:41 -0800222void Logger::WriteHeader() {
Austin Schuh0c297012020-09-16 18:41:59 -0700223 if (configuration::MultiNode(configuration_)) {
Austin Schuh2f8fd752020-09-01 22:38:28 -0700224 server_statistics_fetcher_.Fetch();
225 }
226
227 aos::monotonic_clock::time_point monotonic_start_time =
228 event_loop_->monotonic_now();
229 aos::realtime_clock::time_point realtime_start_time =
230 event_loop_->realtime_now();
231
232 // We need to pick a point in time to declare the log file "started". This
233 // starts here. It needs to be after everything is fetched so that the
234 // fetchers are all pointed at the most recent message before the start
235 // time.
236 last_synchronized_time_ = monotonic_start_time;
237
Austin Schuh6f3babe2020-01-26 20:34:50 -0800238 for (const Node *node : log_namer_->nodes()) {
Brian Silvermand90905f2020-09-23 14:42:56 -0700239 const int node_index = configuration::GetNodeIndex(configuration_, node);
Austin Schuh2f8fd752020-09-01 22:38:28 -0700240 MaybeUpdateTimestamp(node, node_index, monotonic_start_time,
241 realtime_start_time);
Austin Schuh64fab802020-09-09 22:47:47 -0700242 log_namer_->WriteHeader(&node_state_[node_index].log_file_header, node);
Austin Schuh6f3babe2020-01-26 20:34:50 -0800243 }
244}
Austin Schuh8bd96322020-02-13 21:18:22 -0800245
Austin Schuh2f8fd752020-09-01 22:38:28 -0700246void Logger::WriteMissingTimestamps() {
Austin Schuh0c297012020-09-16 18:41:59 -0700247 if (configuration::MultiNode(configuration_)) {
Austin Schuh2f8fd752020-09-01 22:38:28 -0700248 server_statistics_fetcher_.Fetch();
249 } else {
250 return;
251 }
252
253 if (server_statistics_fetcher_.get() == nullptr) {
254 return;
255 }
256
257 for (const Node *node : log_namer_->nodes()) {
Brian Silvermand90905f2020-09-23 14:42:56 -0700258 const int node_index = configuration::GetNodeIndex(configuration_, node);
Austin Schuh2f8fd752020-09-01 22:38:28 -0700259 if (MaybeUpdateTimestamp(
260 node, node_index,
261 server_statistics_fetcher_.context().monotonic_event_time,
262 server_statistics_fetcher_.context().realtime_event_time)) {
Austin Schuh64fab802020-09-09 22:47:47 -0700263 log_namer_->Rotate(node, &node_state_[node_index].log_file_header);
Austin Schuh2f8fd752020-09-01 22:38:28 -0700264 }
265 }
266}
267
268void Logger::SetStartTime(size_t node_index,
269 aos::monotonic_clock::time_point monotonic_start_time,
270 aos::realtime_clock::time_point realtime_start_time) {
271 node_state_[node_index].monotonic_start_time = monotonic_start_time;
272 node_state_[node_index].realtime_start_time = realtime_start_time;
273 node_state_[node_index]
274 .log_file_header.mutable_message()
275 ->mutate_monotonic_start_time(
276 std::chrono::duration_cast<std::chrono::nanoseconds>(
277 monotonic_start_time.time_since_epoch())
278 .count());
279 if (node_state_[node_index]
280 .log_file_header.mutable_message()
281 ->has_realtime_start_time()) {
282 node_state_[node_index]
283 .log_file_header.mutable_message()
284 ->mutate_realtime_start_time(
285 std::chrono::duration_cast<std::chrono::nanoseconds>(
286 realtime_start_time.time_since_epoch())
287 .count());
288 }
289}
290
291bool Logger::MaybeUpdateTimestamp(
292 const Node *node, int node_index,
293 aos::monotonic_clock::time_point monotonic_start_time,
294 aos::realtime_clock::time_point realtime_start_time) {
Brian Silverman87ac0402020-09-17 14:47:01 -0700295 // Bail early if the start times are already set.
Austin Schuh2f8fd752020-09-01 22:38:28 -0700296 if (node_state_[node_index].monotonic_start_time !=
297 monotonic_clock::min_time) {
298 return false;
299 }
Austin Schuh0c297012020-09-16 18:41:59 -0700300 if (configuration::MultiNode(configuration_)) {
Austin Schuh2f8fd752020-09-01 22:38:28 -0700301 if (event_loop_->node() == node) {
302 // There are no offsets to compute for ourself, so always succeed.
303 SetStartTime(node_index, monotonic_start_time, realtime_start_time);
304 return true;
305 } else if (server_statistics_fetcher_.get() != nullptr) {
306 // We must be a remote node now. Look for the connection and see if it is
307 // connected.
308
309 for (const message_bridge::ServerConnection *connection :
310 *server_statistics_fetcher_->connections()) {
311 if (connection->node()->name()->string_view() !=
312 node->name()->string_view()) {
313 continue;
314 }
315
316 if (connection->state() != message_bridge::State::CONNECTED) {
317 VLOG(1) << node->name()->string_view()
318 << " is not connected, can't start it yet.";
319 break;
320 }
321
322 if (!connection->has_monotonic_offset()) {
323 VLOG(1) << "Missing monotonic offset for setting start time for node "
324 << aos::FlatbufferToJson(node);
325 break;
326 }
327
328 VLOG(1) << "Updating start time for " << aos::FlatbufferToJson(node);
329
330 // Found it and it is connected. Compensate and go.
331 monotonic_start_time +=
332 std::chrono::nanoseconds(connection->monotonic_offset());
333
334 SetStartTime(node_index, monotonic_start_time, realtime_start_time);
335 return true;
336 }
337 }
338 } else {
339 SetStartTime(node_index, monotonic_start_time, realtime_start_time);
340 return true;
341 }
342 return false;
343}
344
345aos::SizePrefixedFlatbufferDetachedBuffer<LogFileHeader> Logger::MakeHeader(
346 const Node *node) {
Austin Schuhfa895892020-01-07 20:07:41 -0800347 // Now write the header with this timestamp in it.
348 flatbuffers::FlatBufferBuilder fbb;
Austin Schuhd7b15da2020-02-17 15:06:11 -0800349 fbb.ForceDefaults(true);
Austin Schuhfa895892020-01-07 20:07:41 -0800350
Austin Schuh2f8fd752020-09-01 22:38:28 -0700351 // TODO(austin): Compress this much more efficiently. There are a bunch of
352 // duplicated schemas.
Austin Schuhfa895892020-01-07 20:07:41 -0800353 flatbuffers::Offset<aos::Configuration> configuration_offset =
Austin Schuh0c297012020-09-16 18:41:59 -0700354 CopyFlatBuffer(configuration_, &fbb);
Austin Schuhfa895892020-01-07 20:07:41 -0800355
Austin Schuh64fab802020-09-09 22:47:47 -0700356 flatbuffers::Offset<flatbuffers::String> name_offset =
Austin Schuh0c297012020-09-16 18:41:59 -0700357 fbb.CreateString(name_);
Austin Schuhfa895892020-01-07 20:07:41 -0800358
Austin Schuh64fab802020-09-09 22:47:47 -0700359 flatbuffers::Offset<flatbuffers::String> logger_uuid_offset =
360 fbb.CreateString(uuid_.string_view());
361
362 flatbuffers::Offset<flatbuffers::String> parts_uuid_offset =
363 fbb.CreateString("00000000-0000-4000-8000-000000000000");
364
Austin Schuhfa895892020-01-07 20:07:41 -0800365 flatbuffers::Offset<Node> node_offset;
Austin Schuh2f8fd752020-09-01 22:38:28 -0700366
Austin Schuh0c297012020-09-16 18:41:59 -0700367 if (configuration::MultiNode(configuration_)) {
Austin Schuh6f3babe2020-01-26 20:34:50 -0800368 node_offset = CopyFlatBuffer(node, &fbb);
Austin Schuhfa895892020-01-07 20:07:41 -0800369 }
370
371 aos::logger::LogFileHeader::Builder log_file_header_builder(fbb);
372
Austin Schuh64fab802020-09-09 22:47:47 -0700373 log_file_header_builder.add_name(name_offset);
Austin Schuhfa895892020-01-07 20:07:41 -0800374
375 // Only add the node if we are running in a multinode configuration.
Austin Schuh6f3babe2020-01-26 20:34:50 -0800376 if (node != nullptr) {
Austin Schuhfa895892020-01-07 20:07:41 -0800377 log_file_header_builder.add_node(node_offset);
378 }
379
380 log_file_header_builder.add_configuration(configuration_offset);
381 // The worst case theoretical out of order is the polling period times 2.
382 // One message could get logged right after the boundary, but be for right
383 // before the next boundary. And the reverse could happen for another
384 // message. Report back 3x to be extra safe, and because the cost isn't
385 // huge on the read side.
386 log_file_header_builder.add_max_out_of_order_duration(
387 std::chrono::duration_cast<std::chrono::nanoseconds>(3 * polling_period_)
388 .count());
389
390 log_file_header_builder.add_monotonic_start_time(
391 std::chrono::duration_cast<std::chrono::nanoseconds>(
Austin Schuh2f8fd752020-09-01 22:38:28 -0700392 monotonic_clock::min_time.time_since_epoch())
Austin Schuhfa895892020-01-07 20:07:41 -0800393 .count());
Austin Schuh2f8fd752020-09-01 22:38:28 -0700394 if (node == event_loop_->node()) {
395 log_file_header_builder.add_realtime_start_time(
396 std::chrono::duration_cast<std::chrono::nanoseconds>(
397 realtime_clock::min_time.time_since_epoch())
398 .count());
Austin Schuh6f3babe2020-01-26 20:34:50 -0800399 }
400
Austin Schuh64fab802020-09-09 22:47:47 -0700401 log_file_header_builder.add_logger_uuid(logger_uuid_offset);
402
403 log_file_header_builder.add_parts_uuid(parts_uuid_offset);
404 log_file_header_builder.add_parts_index(0);
405
Austin Schuh2f8fd752020-09-01 22:38:28 -0700406 fbb.FinishSizePrefixed(log_file_header_builder.Finish());
407 return fbb.Release();
408}
409
410void Logger::Rotate() {
411 for (const Node *node : log_namer_->nodes()) {
Brian Silvermand90905f2020-09-23 14:42:56 -0700412 const int node_index = configuration::GetNodeIndex(configuration_, node);
Austin Schuh64fab802020-09-09 22:47:47 -0700413 log_namer_->Rotate(node, &node_state_[node_index].log_file_header);
Austin Schuh2f8fd752020-09-01 22:38:28 -0700414 }
415}
416
417void Logger::LogUntil(monotonic_clock::time_point t) {
418 WriteMissingTimestamps();
419
420 // Write each channel to disk, one at a time.
421 for (FetcherStruct &f : fetchers_) {
422 while (true) {
423 if (f.written) {
424 if (!f.fetcher->FetchNext()) {
425 VLOG(2) << "No new data on "
426 << configuration::CleanedChannelToString(
427 f.fetcher->channel());
428 break;
429 } else {
430 f.written = false;
431 }
432 }
433
434 CHECK(!f.written);
435
436 // TODO(james): Write tests to exercise this logic.
437 if (f.fetcher->context().monotonic_event_time < t) {
438 if (f.writer != nullptr) {
439 // Write!
440 flatbuffers::FlatBufferBuilder fbb(f.fetcher->context().size +
441 max_header_size_);
442 fbb.ForceDefaults(true);
443
444 fbb.FinishSizePrefixed(PackMessage(&fbb, f.fetcher->context(),
445 f.channel_index, f.log_type));
446
447 VLOG(2) << "Writing data as node "
448 << FlatbufferToJson(event_loop_->node()) << " for channel "
449 << configuration::CleanedChannelToString(f.fetcher->channel())
450 << " to " << f.writer->filename() << " data "
451 << FlatbufferToJson(
452 flatbuffers::GetSizePrefixedRoot<MessageHeader>(
453 fbb.GetBufferPointer()));
454
455 max_header_size_ = std::max(
456 max_header_size_, fbb.GetSize() - f.fetcher->context().size);
457 f.writer->QueueSizedFlatbuffer(&fbb);
458 }
459
460 if (f.timestamp_writer != nullptr) {
461 // And now handle timestamps.
462 flatbuffers::FlatBufferBuilder fbb;
463 fbb.ForceDefaults(true);
464
465 fbb.FinishSizePrefixed(PackMessage(&fbb, f.fetcher->context(),
466 f.channel_index,
467 LogType::kLogDeliveryTimeOnly));
468
469 VLOG(2) << "Writing timestamps as node "
470 << FlatbufferToJson(event_loop_->node()) << " for channel "
471 << configuration::CleanedChannelToString(f.fetcher->channel())
472 << " to " << f.timestamp_writer->filename() << " timestamp "
473 << FlatbufferToJson(
474 flatbuffers::GetSizePrefixedRoot<MessageHeader>(
475 fbb.GetBufferPointer()));
476
477 f.timestamp_writer->QueueSizedFlatbuffer(&fbb);
478 }
479
480 if (f.contents_writer != nullptr) {
481 // And now handle the special message contents channel. Copy the
482 // message into a FlatBufferBuilder and save it to disk.
483 // TODO(austin): We can be more efficient here when we start to
484 // care...
485 flatbuffers::FlatBufferBuilder fbb;
486 fbb.ForceDefaults(true);
487
488 const MessageHeader *msg =
489 flatbuffers::GetRoot<MessageHeader>(f.fetcher->context().data);
490
491 logger::MessageHeader::Builder message_header_builder(fbb);
492
493 // Note: this must match the same order as MessageBridgeServer and
494 // PackMessage. We want identical headers to have identical
495 // on-the-wire formats to make comparing them easier.
496 message_header_builder.add_channel_index(msg->channel_index());
497
498 message_header_builder.add_queue_index(msg->queue_index());
499 message_header_builder.add_monotonic_sent_time(
500 msg->monotonic_sent_time());
501 message_header_builder.add_realtime_sent_time(
502 msg->realtime_sent_time());
503
504 message_header_builder.add_monotonic_remote_time(
505 msg->monotonic_remote_time());
506 message_header_builder.add_realtime_remote_time(
507 msg->realtime_remote_time());
508 message_header_builder.add_remote_queue_index(
509 msg->remote_queue_index());
510
511 fbb.FinishSizePrefixed(message_header_builder.Finish());
512
513 f.contents_writer->QueueSizedFlatbuffer(&fbb);
514 }
515
516 f.written = true;
517 } else {
518 break;
519 }
520 }
521 }
522 last_synchronized_time_ = t;
Austin Schuhfa895892020-01-07 20:07:41 -0800523}
524
Austin Schuhe309d2a2019-11-29 13:25:21 -0800525void Logger::DoLogData() {
526 // We want to guarentee that messages aren't out of order by more than
527 // max_out_of_order_duration. To do this, we need sync points. Every write
528 // cycle should be a sync point.
Austin Schuhfa895892020-01-07 20:07:41 -0800529 const monotonic_clock::time_point monotonic_now =
530 event_loop_->monotonic_now();
Austin Schuhe309d2a2019-11-29 13:25:21 -0800531
532 do {
533 // Move the sync point up by at most polling_period. This forces one sync
534 // per iteration, even if it is small.
Austin Schuh2f8fd752020-09-01 22:38:28 -0700535 LogUntil(
536 std::min(last_synchronized_time_ + polling_period_, monotonic_now));
Austin Schuhe309d2a2019-11-29 13:25:21 -0800537
Austin Schuhe309d2a2019-11-29 13:25:21 -0800538 // If we missed cycles, we could be pretty far behind. Spin until we are
539 // caught up.
540 } while (last_synchronized_time_ + polling_period_ < monotonic_now);
Austin Schuhe309d2a2019-11-29 13:25:21 -0800541}
542
Austin Schuh11d43732020-09-21 17:28:30 -0700543std::vector<LogFile> SortParts(const std::vector<std::string> &parts) {
Austin Schuh5212cad2020-09-09 23:12:09 -0700544 // Start by grouping all parts by UUID, and extracting the part index.
Austin Schuh11d43732020-09-21 17:28:30 -0700545 // Datastructure to hold all the info extracted from a set of parts which go
546 // together so we can sort them afterwords.
547 struct UnsortedLogParts {
548 // Start times.
549 aos::monotonic_clock::time_point monotonic_start_time;
550 aos::realtime_clock::time_point realtime_start_time;
551
552 // Node to save.
553 std::string node;
554
555 // Pairs of the filename and the part index for sorting.
556 std::vector<std::pair<std::string, int>> parts;
557 };
558
559 // Map holding the logger_uuid -> second map. The second map holds the
560 // parts_uuid -> list of parts for sorting.
561 std::map<std::string, std::map<std::string, UnsortedLogParts>> parts_list;
Austin Schuh5212cad2020-09-09 23:12:09 -0700562
563 // Sort part files without UUIDs and part indexes as well. Extract everything
564 // useful from the log in the first pass, then sort later.
Austin Schuh11d43732020-09-21 17:28:30 -0700565 struct UnsortedOldParts {
566 // Part information with everything but the list of parts.
567 LogParts parts;
568
569 // Tuple of time for the data and filename needed for sorting after
570 // extracting.
Brian Silvermand90905f2020-09-23 14:42:56 -0700571 std::vector<std::pair<monotonic_clock::time_point, std::string>>
572 unsorted_parts;
Austin Schuh5212cad2020-09-09 23:12:09 -0700573 };
574
Austin Schuh11d43732020-09-21 17:28:30 -0700575 // A list of all the old parts which we don't know how to sort using uuids.
576 // There are enough of these in the wild that this is worth supporting.
577 std::vector<UnsortedOldParts> old_parts;
Austin Schuh5212cad2020-09-09 23:12:09 -0700578
Austin Schuh11d43732020-09-21 17:28:30 -0700579 // Now extract everything into our datastructures above for sorting.
Austin Schuh5212cad2020-09-09 23:12:09 -0700580 for (const std::string &part : parts) {
581 FlatbufferVector<LogFileHeader> log_header = ReadHeader(part);
582
Austin Schuh11d43732020-09-21 17:28:30 -0700583 const monotonic_clock::time_point monotonic_start_time(
584 chrono::nanoseconds(log_header.message().monotonic_start_time()));
585 const realtime_clock::time_point realtime_start_time(
586 chrono::nanoseconds(log_header.message().realtime_start_time()));
587
588 const std::string_view node =
589 log_header.message().has_node()
590 ? log_header.message().node()->name()->string_view()
591 : "";
592
Austin Schuh5212cad2020-09-09 23:12:09 -0700593 // Looks like an old log. No UUID, index, and also single node. We have
594 // little to no multi-node log files in the wild without part UUIDs and
595 // indexes which we care much about.
596 if (!log_header.message().has_parts_uuid() &&
597 !log_header.message().has_parts_index() &&
598 !log_header.message().has_node()) {
Austin Schuh5212cad2020-09-09 23:12:09 -0700599 FlatbufferVector<MessageHeader> first_message = ReadNthMessage(part, 0);
Austin Schuh11d43732020-09-21 17:28:30 -0700600 const monotonic_clock::time_point first_message_time(
Austin Schuh5212cad2020-09-09 23:12:09 -0700601 chrono::nanoseconds(first_message.message().monotonic_sent_time()));
Austin Schuh11d43732020-09-21 17:28:30 -0700602
603 // Find anything with a matching start time. They all go together.
604 auto result = std::find_if(
605 old_parts.begin(), old_parts.end(),
606 [&](const UnsortedOldParts &parts) {
607 return parts.parts.monotonic_start_time == monotonic_start_time &&
608 parts.parts.realtime_start_time == realtime_start_time;
609 });
610
611 if (result == old_parts.end()) {
612 old_parts.emplace_back();
613 old_parts.back().parts.monotonic_start_time = monotonic_start_time;
614 old_parts.back().parts.realtime_start_time = realtime_start_time;
615 old_parts.back().unsorted_parts.emplace_back(
616 std::make_pair(first_message_time, part));
617 } else {
618 result->unsorted_parts.emplace_back(
619 std::make_pair(first_message_time, part));
620 }
Austin Schuh5212cad2020-09-09 23:12:09 -0700621 continue;
622 }
623
Austin Schuh11d43732020-09-21 17:28:30 -0700624 CHECK(log_header.message().has_logger_uuid());
Austin Schuh5212cad2020-09-09 23:12:09 -0700625 CHECK(log_header.message().has_parts_uuid());
626 CHECK(log_header.message().has_parts_index());
627
Austin Schuh11d43732020-09-21 17:28:30 -0700628 const std::string logger_uuid = log_header.message().logger_uuid()->str();
Austin Schuh5212cad2020-09-09 23:12:09 -0700629 const std::string parts_uuid = log_header.message().parts_uuid()->str();
Austin Schuh11d43732020-09-21 17:28:30 -0700630 int32_t parts_index = log_header.message().parts_index();
631
632 auto log_it = parts_list.find(logger_uuid);
633 if (log_it == parts_list.end()) {
634 log_it = parts_list
Brian Silvermand90905f2020-09-23 14:42:56 -0700635 .insert(std::make_pair(
636 logger_uuid, std::map<std::string, UnsortedLogParts>()))
637 .first;
Austin Schuh5212cad2020-09-09 23:12:09 -0700638 }
Austin Schuh11d43732020-09-21 17:28:30 -0700639
640 auto it = log_it->second.find(parts_uuid);
641 if (it == log_it->second.end()) {
642 it = log_it->second.insert(std::make_pair(parts_uuid, UnsortedLogParts()))
643 .first;
644 it->second.monotonic_start_time = monotonic_start_time;
645 it->second.realtime_start_time = realtime_start_time;
646 it->second.node = std::string(node);
647 }
648
649 // First part might be min_time. If it is, try to put a better time on it.
650 if (it->second.monotonic_start_time == monotonic_clock::min_time) {
651 it->second.monotonic_start_time = monotonic_start_time;
652 } else if (monotonic_start_time != monotonic_clock::min_time) {
653 CHECK_EQ(it->second.monotonic_start_time, monotonic_start_time);
654 }
655 if (it->second.realtime_start_time == realtime_clock::min_time) {
656 it->second.realtime_start_time = realtime_start_time;
657 } else if (realtime_start_time != realtime_clock::min_time) {
658 CHECK_EQ(it->second.realtime_start_time, realtime_start_time);
659 }
660
661 it->second.parts.emplace_back(std::make_pair(part, parts_index));
Austin Schuh5212cad2020-09-09 23:12:09 -0700662 }
663
664 CHECK_NE(old_parts.empty(), parts_list.empty())
665 << ": Can't have a mix of old and new parts.";
666
Austin Schuh11d43732020-09-21 17:28:30 -0700667 // Now reformat old_parts to be in the right datastructure to report.
Austin Schuh5212cad2020-09-09 23:12:09 -0700668 if (!old_parts.empty()) {
Austin Schuh11d43732020-09-21 17:28:30 -0700669 std::vector<LogFile> result;
670 for (UnsortedOldParts &p : old_parts) {
671 // Sort by the oldest message in each file.
672 std::sort(
673 p.unsorted_parts.begin(), p.unsorted_parts.end(),
674 [](const std::pair<monotonic_clock::time_point, std::string> &a,
675 const std::pair<monotonic_clock::time_point, std::string> &b) {
676 return a.first < b.first;
677 });
678 LogFile log_file;
679 for (std::pair<monotonic_clock::time_point, std::string> &f :
680 p.unsorted_parts) {
681 p.parts.parts.emplace_back(std::move(f.second));
682 }
683 log_file.parts.emplace_back(std::move(p.parts));
684 result.emplace_back(std::move(log_file));
Austin Schuh5212cad2020-09-09 23:12:09 -0700685 }
Austin Schuh5212cad2020-09-09 23:12:09 -0700686
Austin Schuh11d43732020-09-21 17:28:30 -0700687 return result;
Austin Schuh5212cad2020-09-09 23:12:09 -0700688 }
689
690 // Now, sort them and produce the final vector form.
Austin Schuh11d43732020-09-21 17:28:30 -0700691 std::vector<LogFile> result;
Austin Schuh5212cad2020-09-09 23:12:09 -0700692 result.reserve(parts_list.size());
Brian Silvermand90905f2020-09-23 14:42:56 -0700693 for (std::pair<const std::string, std::map<std::string, UnsortedLogParts>>
694 &logs : parts_list) {
Austin Schuh11d43732020-09-21 17:28:30 -0700695 LogFile new_file;
696 new_file.logger_uuid = logs.first;
697 for (std::pair<const std::string, UnsortedLogParts> &parts : logs.second) {
698 LogParts new_parts;
699 new_parts.monotonic_start_time = parts.second.monotonic_start_time;
700 new_parts.realtime_start_time = parts.second.realtime_start_time;
701 new_parts.logger_uuid = logs.first;
702 new_parts.parts_uuid = parts.first;
703 new_parts.node = std::move(parts.second.node);
704
705 std::sort(parts.second.parts.begin(), parts.second.parts.end(),
706 [](const std::pair<std::string, int> &a,
707 const std::pair<std::string, int> &b) {
708 return a.second < b.second;
709 });
710 new_parts.parts.reserve(parts.second.parts.size());
711 for (std::pair<std::string, int> &p : parts.second.parts) {
712 new_parts.parts.emplace_back(std::move(p.first));
713 }
714 new_file.parts.emplace_back(std::move(new_parts));
Austin Schuh5212cad2020-09-09 23:12:09 -0700715 }
Austin Schuh11d43732020-09-21 17:28:30 -0700716 result.emplace_back(std::move(new_file));
717 }
718 return result;
719}
720
721std::ostream &operator<<(std::ostream &stream, const LogFile &file) {
722 stream << "{";
723 if (!file.logger_uuid.empty()) {
724 stream << "\"logger_uuid\": \"" << file.logger_uuid << "\", ";
725 }
726 stream << "\"parts\": [";
727 for (size_t i = 0; i < file.parts.size(); ++i) {
728 if (i != 0u) {
729 stream << ", ";
730 }
731 stream << file.parts[i];
732 }
733 stream << "]}";
734 return stream;
735}
736std::ostream &operator<<(std::ostream &stream, const LogParts &parts) {
737 stream << "{";
738 if (!parts.logger_uuid.empty()) {
739 stream << "\"logger_uuid\": \"" << parts.logger_uuid << "\", ";
740 }
741 if (!parts.parts_uuid.empty()) {
742 stream << "\"parts_uuid\": \"" << parts.parts_uuid << "\", ";
743 }
744 if (!parts.node.empty()) {
745 stream << "\"node\": \"" << parts.node << "\", ";
746 }
747 stream << "\"monotonic_start_time\": " << parts.monotonic_start_time
748 << ", \"realtime_start_time\": " << parts.realtime_start_time << ", [";
749
750 for (size_t i = 0; i < parts.parts.size(); ++i) {
751 if (i != 0u) {
752 stream << ", ";
753 }
754 stream << parts.parts[i];
755 }
756
757 stream << "]}";
758 return stream;
759}
760
761std::vector<std::vector<std::string>> ToLogReaderVector(
762 const std::vector<LogFile> &log_files) {
763 std::vector<std::vector<std::string>> result;
764 for (const LogFile &log_file : log_files) {
765 for (const LogParts &log_parts : log_file.parts) {
766 std::vector<std::string> parts;
767 for (const std::string &part : log_parts.parts) {
768 parts.emplace_back(part);
769 }
770 result.emplace_back(std::move(parts));
771 }
Austin Schuh5212cad2020-09-09 23:12:09 -0700772 }
773 return result;
774}
775
James Kuszmaulc7bbb3e2020-01-03 20:01:00 -0800776LogReader::LogReader(std::string_view filename,
777 const Configuration *replay_configuration)
Austin Schuhfa895892020-01-07 20:07:41 -0800778 : LogReader(std::vector<std::string>{std::string(filename)},
779 replay_configuration) {}
780
781LogReader::LogReader(const std::vector<std::string> &filenames,
782 const Configuration *replay_configuration)
Austin Schuh6f3babe2020-01-26 20:34:50 -0800783 : LogReader(std::vector<std::vector<std::string>>{filenames},
784 replay_configuration) {}
785
Austin Schuh11d43732020-09-21 17:28:30 -0700786// TODO(austin): Make this the base and kill the others. This has much better
787// context for sorting.
788LogReader::LogReader(const std::vector<LogFile> &log_files,
789 const Configuration *replay_configuration)
790 : LogReader(ToLogReaderVector(log_files), replay_configuration) {}
791
Austin Schuh6f3babe2020-01-26 20:34:50 -0800792LogReader::LogReader(const std::vector<std::vector<std::string>> &filenames,
793 const Configuration *replay_configuration)
794 : filenames_(filenames),
795 log_file_header_(ReadHeader(filenames[0][0])),
James Kuszmaulc7bbb3e2020-01-03 20:01:00 -0800796 replay_configuration_(replay_configuration) {
Austin Schuh6331ef92020-01-07 18:28:09 -0800797 MakeRemappedConfig();
Austin Schuh6f3babe2020-01-26 20:34:50 -0800798
Austin Schuh6aa77be2020-02-22 21:06:40 -0800799 if (replay_configuration) {
800 CHECK_EQ(configuration::MultiNode(configuration()),
801 configuration::MultiNode(replay_configuration))
Austin Schuh2f8fd752020-09-01 22:38:28 -0700802 << ": Log file and replay config need to both be multi or single "
803 "node.";
Austin Schuh6aa77be2020-02-22 21:06:40 -0800804 }
805
Austin Schuh6f3babe2020-01-26 20:34:50 -0800806 if (!configuration::MultiNode(configuration())) {
Austin Schuh858c9f32020-08-31 16:56:12 -0700807 states_.emplace_back(
808 std::make_unique<State>(std::make_unique<ChannelMerger>(filenames)));
Austin Schuh8bd96322020-02-13 21:18:22 -0800809 } else {
Austin Schuh6aa77be2020-02-22 21:06:40 -0800810 if (replay_configuration) {
James Kuszmaul46d82582020-05-09 19:50:09 -0700811 CHECK_EQ(logged_configuration()->nodes()->size(),
Austin Schuh6aa77be2020-02-22 21:06:40 -0800812 replay_configuration->nodes()->size())
Austin Schuh2f8fd752020-09-01 22:38:28 -0700813 << ": Log file and replay config need to have matching nodes "
814 "lists.";
James Kuszmaul46d82582020-05-09 19:50:09 -0700815 for (const Node *node : *logged_configuration()->nodes()) {
816 if (configuration::GetNode(replay_configuration, node) == nullptr) {
Austin Schuh2f8fd752020-09-01 22:38:28 -0700817 LOG(FATAL) << "Found node " << FlatbufferToJson(node)
818 << " in logged config that is not present in the replay "
819 "config.";
James Kuszmaul46d82582020-05-09 19:50:09 -0700820 }
821 }
Austin Schuh6aa77be2020-02-22 21:06:40 -0800822 }
Austin Schuh8bd96322020-02-13 21:18:22 -0800823 states_.resize(configuration()->nodes()->size());
Austin Schuh6f3babe2020-01-26 20:34:50 -0800824 }
Austin Schuhe309d2a2019-11-29 13:25:21 -0800825}
826
Austin Schuh6aa77be2020-02-22 21:06:40 -0800827LogReader::~LogReader() {
Austin Schuh39580f12020-08-01 14:44:08 -0700828 if (event_loop_factory_unique_ptr_) {
829 Deregister();
830 } else if (event_loop_factory_ != nullptr) {
831 LOG(FATAL) << "Must call Deregister before the SimulatedEventLoopFactory "
832 "is destroyed";
833 }
Austin Schuh8bd96322020-02-13 21:18:22 -0800834 if (offset_fp_ != nullptr) {
835 fclose(offset_fp_);
836 }
Austin Schuh2f8fd752020-09-01 22:38:28 -0700837 // Zero out some buffers. It's easy to do use-after-frees on these, so make
838 // it more obvious.
Austin Schuh39580f12020-08-01 14:44:08 -0700839 if (remapped_configuration_buffer_) {
840 remapped_configuration_buffer_->Wipe();
841 }
842 log_file_header_.Wipe();
Austin Schuh8bd96322020-02-13 21:18:22 -0800843}
Austin Schuhe309d2a2019-11-29 13:25:21 -0800844
James Kuszmaulc7bbb3e2020-01-03 20:01:00 -0800845const Configuration *LogReader::logged_configuration() const {
Austin Schuh6f3babe2020-01-26 20:34:50 -0800846 return log_file_header_.message().configuration();
Austin Schuhe309d2a2019-11-29 13:25:21 -0800847}
848
James Kuszmaulc7bbb3e2020-01-03 20:01:00 -0800849const Configuration *LogReader::configuration() const {
James Kuszmaulc7bbb3e2020-01-03 20:01:00 -0800850 return remapped_configuration_;
851}
852
Austin Schuh6f3babe2020-01-26 20:34:50 -0800853std::vector<const Node *> LogReader::Nodes() const {
Austin Schuh2f8fd752020-09-01 22:38:28 -0700854 // Because the Node pointer will only be valid if it actually points to
855 // memory owned by remapped_configuration_, we need to wait for the
James Kuszmaulc7bbb3e2020-01-03 20:01:00 -0800856 // remapped_configuration_ to be populated before accessing it.
Austin Schuh6f3babe2020-01-26 20:34:50 -0800857 //
858 // Also, note, that when ever a map is changed, the nodes in here are
859 // invalidated.
James Kuszmaulc7bbb3e2020-01-03 20:01:00 -0800860 CHECK(remapped_configuration_ != nullptr)
861 << ": Need to call Register before the node() pointer will be valid.";
Austin Schuh6f3babe2020-01-26 20:34:50 -0800862 return configuration::GetNodes(remapped_configuration_);
James Kuszmaulc7bbb3e2020-01-03 20:01:00 -0800863}
Austin Schuh15649d62019-12-28 16:36:38 -0800864
Austin Schuh11d43732020-09-21 17:28:30 -0700865monotonic_clock::time_point LogReader::monotonic_start_time(
866 const Node *node) const {
Austin Schuh8bd96322020-02-13 21:18:22 -0800867 State *state =
868 states_[configuration::GetNodeIndex(configuration(), node)].get();
869 CHECK(state != nullptr) << ": Unknown node " << FlatbufferToJson(node);
870
Austin Schuh858c9f32020-08-31 16:56:12 -0700871 return state->monotonic_start_time();
Austin Schuhe309d2a2019-11-29 13:25:21 -0800872}
873
Austin Schuh11d43732020-09-21 17:28:30 -0700874realtime_clock::time_point LogReader::realtime_start_time(
875 const Node *node) const {
Austin Schuh8bd96322020-02-13 21:18:22 -0800876 State *state =
877 states_[configuration::GetNodeIndex(configuration(), node)].get();
878 CHECK(state != nullptr) << ": Unknown node " << FlatbufferToJson(node);
879
Austin Schuh858c9f32020-08-31 16:56:12 -0700880 return state->realtime_start_time();
Austin Schuhe309d2a2019-11-29 13:25:21 -0800881}
882
James Kuszmaul84ff3e52020-01-03 19:48:53 -0800883void LogReader::Register() {
884 event_loop_factory_unique_ptr_ =
Austin Schuhac0771c2020-01-07 18:36:30 -0800885 std::make_unique<SimulatedEventLoopFactory>(configuration());
James Kuszmaul84ff3e52020-01-03 19:48:53 -0800886 Register(event_loop_factory_unique_ptr_.get());
887}
888
Austin Schuh92547522019-12-28 14:33:43 -0800889void LogReader::Register(SimulatedEventLoopFactory *event_loop_factory) {
Austin Schuh92547522019-12-28 14:33:43 -0800890 event_loop_factory_ = event_loop_factory;
Austin Schuhe5bbd9e2020-09-21 17:29:20 -0700891 remapped_configuration_ = event_loop_factory_->configuration();
Austin Schuh92547522019-12-28 14:33:43 -0800892
Brian Silvermand90905f2020-09-23 14:42:56 -0700893 for (const Node *node : configuration::GetNodes(configuration())) {
Austin Schuh8bd96322020-02-13 21:18:22 -0800894 const size_t node_index =
895 configuration::GetNodeIndex(configuration(), node);
Austin Schuh858c9f32020-08-31 16:56:12 -0700896 states_[node_index] =
897 std::make_unique<State>(std::make_unique<ChannelMerger>(filenames_));
Austin Schuh8bd96322020-02-13 21:18:22 -0800898 State *state = states_[node_index].get();
Austin Schuh6f3babe2020-01-26 20:34:50 -0800899
Austin Schuh858c9f32020-08-31 16:56:12 -0700900 Register(state->SetNodeEventLoopFactory(
901 event_loop_factory_->GetNodeEventLoopFactory(node)));
Austin Schuhcde938c2020-02-02 17:30:07 -0800902 }
James Kuszmaul46d82582020-05-09 19:50:09 -0700903 if (live_nodes_ == 0) {
904 LOG(FATAL)
905 << "Don't have logs from any of the nodes in the replay config--are "
906 "you sure that the replay config matches the original config?";
907 }
Austin Schuh6f3babe2020-01-26 20:34:50 -0800908
Austin Schuh2f8fd752020-09-01 22:38:28 -0700909 // We need to now seed our per-node time offsets and get everything set up
910 // to run.
911 const size_t num_nodes = nodes_count();
Austin Schuhcde938c2020-02-02 17:30:07 -0800912
Austin Schuh8bd96322020-02-13 21:18:22 -0800913 // It is easiest to solve for per node offsets with a matrix rather than
914 // trying to solve the equations by hand. So let's get after it.
915 //
916 // Now, build up the map matrix.
917 //
Austin Schuh2f8fd752020-09-01 22:38:28 -0700918 // offset_matrix_ = (map_matrix_ + slope_matrix_) * [ta; tb; tc]
919 map_matrix_ = Eigen::Matrix<mpq_class, Eigen::Dynamic, Eigen::Dynamic>::Zero(
920 filters_.size() + 1, num_nodes);
921 slope_matrix_ =
922 Eigen::Matrix<mpq_class, Eigen::Dynamic, Eigen::Dynamic>::Zero(
923 filters_.size() + 1, num_nodes);
Austin Schuhcde938c2020-02-02 17:30:07 -0800924
Austin Schuh2f8fd752020-09-01 22:38:28 -0700925 offset_matrix_ =
926 Eigen::Matrix<mpq_class, Eigen::Dynamic, 1>::Zero(filters_.size() + 1);
927 valid_matrix_ =
928 Eigen::Matrix<bool, Eigen::Dynamic, 1>::Zero(filters_.size() + 1);
929 last_valid_matrix_ =
930 Eigen::Matrix<bool, Eigen::Dynamic, 1>::Zero(filters_.size() + 1);
Austin Schuhcde938c2020-02-02 17:30:07 -0800931
Austin Schuh2f8fd752020-09-01 22:38:28 -0700932 time_offset_matrix_ = Eigen::VectorXd::Zero(num_nodes);
933 time_slope_matrix_ = Eigen::VectorXd::Zero(num_nodes);
Austin Schuh8bd96322020-02-13 21:18:22 -0800934
Austin Schuh2f8fd752020-09-01 22:38:28 -0700935 // All times should average out to the distributed clock.
936 for (int i = 0; i < map_matrix_.cols(); ++i) {
937 // 1/num_nodes.
938 map_matrix_(0, i) = mpq_class(1, num_nodes);
939 }
940 valid_matrix_(0) = true;
Austin Schuh8bd96322020-02-13 21:18:22 -0800941
942 {
943 // Now, add the a - b -> sample elements.
944 size_t i = 1;
945 for (std::pair<const std::tuple<const Node *, const Node *>,
Austin Schuh2f8fd752020-09-01 22:38:28 -0700946 std::tuple<message_bridge::NoncausalOffsetEstimator>>
947 &filter : filters_) {
Austin Schuh8bd96322020-02-13 21:18:22 -0800948 const Node *const node_a = std::get<0>(filter.first);
949 const Node *const node_b = std::get<1>(filter.first);
950
951 const size_t node_a_index =
952 configuration::GetNodeIndex(configuration(), node_a);
953 const size_t node_b_index =
954 configuration::GetNodeIndex(configuration(), node_b);
955
Austin Schuh2f8fd752020-09-01 22:38:28 -0700956 // -a
957 map_matrix_(i, node_a_index) = mpq_class(-1);
958 // +b
959 map_matrix_(i, node_b_index) = mpq_class(1);
Austin Schuh8bd96322020-02-13 21:18:22 -0800960
961 // -> sample
Austin Schuh2f8fd752020-09-01 22:38:28 -0700962 std::get<0>(filter.second)
963 .set_slope_pointer(&slope_matrix_(i, node_a_index));
964 std::get<0>(filter.second).set_offset_pointer(&offset_matrix_(i, 0));
965
966 valid_matrix_(i) = false;
967 std::get<0>(filter.second).set_valid_pointer(&valid_matrix_(i));
Austin Schuh8bd96322020-02-13 21:18:22 -0800968
969 ++i;
Austin Schuh6f3babe2020-01-26 20:34:50 -0800970 }
971 }
972
Austin Schuh858c9f32020-08-31 16:56:12 -0700973 for (std::unique_ptr<State> &state : states_) {
974 state->SeedSortedMessages();
975 }
976
Austin Schuh2f8fd752020-09-01 22:38:28 -0700977 // Rank of the map matrix tells you if all the nodes are in communication
978 // with each other, which tells you if the offsets are observable.
979 const size_t connected_nodes =
980 Eigen::FullPivLU<
981 Eigen::Matrix<mpq_class, Eigen::Dynamic, Eigen::Dynamic>>(map_matrix_)
982 .rank();
983
984 // We don't need to support isolated nodes until someone has a real use
985 // case.
986 CHECK_EQ(connected_nodes, num_nodes)
987 << ": There is a node which isn't communicating with the rest.";
988
989 // And solve.
Austin Schuh8bd96322020-02-13 21:18:22 -0800990 UpdateOffsets();
991
Austin Schuh2f8fd752020-09-01 22:38:28 -0700992 // We want to start the log file at the last start time of the log files
993 // from all the nodes. Compute how long each node's simulation needs to run
994 // to move time to this point.
Austin Schuh8bd96322020-02-13 21:18:22 -0800995 distributed_clock::time_point start_time = distributed_clock::min_time;
Austin Schuhcde938c2020-02-02 17:30:07 -0800996
Austin Schuh2f8fd752020-09-01 22:38:28 -0700997 // TODO(austin): We want an "OnStart" callback for each node rather than
998 // running until the last node.
999
Austin Schuh8bd96322020-02-13 21:18:22 -08001000 for (std::unique_ptr<State> &state : states_) {
Austin Schuh2f8fd752020-09-01 22:38:28 -07001001 VLOG(1) << "Start time is " << state->monotonic_start_time() << " for node "
1002 << MaybeNodeName(state->event_loop()->node()) << "now "
1003 << state->monotonic_now();
1004 // And start computing the start time on the distributed clock now that
1005 // that works.
Austin Schuh858c9f32020-08-31 16:56:12 -07001006 start_time = std::max(
1007 start_time, state->ToDistributedClock(state->monotonic_start_time()));
Austin Schuhcde938c2020-02-02 17:30:07 -08001008 }
Austin Schuh2f8fd752020-09-01 22:38:28 -07001009
1010 CHECK_GE(start_time, distributed_clock::epoch())
1011 << ": Hmm, we have a node starting before the start of time. Offset "
1012 "everything.";
Austin Schuhcde938c2020-02-02 17:30:07 -08001013
Austin Schuh6f3babe2020-01-26 20:34:50 -08001014 // Forwarding is tracked per channel. If it is enabled, we want to turn it
1015 // off. Otherwise messages replayed will get forwarded across to the other
Austin Schuh2f8fd752020-09-01 22:38:28 -07001016 // nodes, and also replayed on the other nodes. This may not satisfy all
1017 // our users, but it'll start the discussion.
Austin Schuh6f3babe2020-01-26 20:34:50 -08001018 if (configuration::MultiNode(event_loop_factory_->configuration())) {
1019 for (size_t i = 0; i < logged_configuration()->channels()->size(); ++i) {
1020 const Channel *channel = logged_configuration()->channels()->Get(i);
1021 const Node *node = configuration::GetNode(
1022 configuration(), channel->source_node()->string_view());
1023
Austin Schuh8bd96322020-02-13 21:18:22 -08001024 State *state =
1025 states_[configuration::GetNodeIndex(configuration(), node)].get();
Austin Schuh6f3babe2020-01-26 20:34:50 -08001026
1027 const Channel *remapped_channel =
Austin Schuh858c9f32020-08-31 16:56:12 -07001028 RemapChannel(state->event_loop(), channel);
Austin Schuh6f3babe2020-01-26 20:34:50 -08001029
1030 event_loop_factory_->DisableForwarding(remapped_channel);
1031 }
Austin Schuh4c3b9702020-08-30 11:34:55 -07001032
1033 // If we are replaying a log, we don't want a bunch of redundant messages
1034 // from both the real message bridge and simulated message bridge.
1035 event_loop_factory_->DisableStatistics();
Austin Schuh6f3babe2020-01-26 20:34:50 -08001036 }
1037
Austin Schuhcde938c2020-02-02 17:30:07 -08001038 // While we are starting the system up, we might be relying on matching data
1039 // to timestamps on log files where the timestamp log file starts before the
1040 // data. In this case, it is reasonable to expect missing data.
1041 ignore_missing_data_ = true;
Austin Schuh2f8fd752020-09-01 22:38:28 -07001042 VLOG(1) << "Running until " << start_time << " in Register";
Austin Schuh8bd96322020-02-13 21:18:22 -08001043 event_loop_factory_->RunFor(start_time.time_since_epoch());
Brian Silverman8a32ce62020-08-12 12:02:38 -07001044 VLOG(1) << "At start time";
Austin Schuhcde938c2020-02-02 17:30:07 -08001045 // Now that we are running for real, missing data means that the log file is
1046 // corrupted or went wrong.
1047 ignore_missing_data_ = false;
Austin Schuh92547522019-12-28 14:33:43 -08001048
Austin Schuh8bd96322020-02-13 21:18:22 -08001049 for (std::unique_ptr<State> &state : states_) {
Austin Schuh2f8fd752020-09-01 22:38:28 -07001050 // Make the RT clock be correct before handing it to the user.
1051 if (state->realtime_start_time() != realtime_clock::min_time) {
1052 state->SetRealtimeOffset(state->monotonic_start_time(),
1053 state->realtime_start_time());
1054 }
1055 VLOG(1) << "Start time is " << state->monotonic_start_time() << " for node "
1056 << MaybeNodeName(state->event_loop()->node()) << "now "
1057 << state->monotonic_now();
1058 }
1059
1060 if (FLAGS_timestamps_to_csv) {
1061 for (std::pair<const std::tuple<const Node *, const Node *>,
1062 std::tuple<message_bridge::NoncausalOffsetEstimator>>
1063 &filter : filters_) {
1064 const Node *const node_a = std::get<0>(filter.first);
1065 const Node *const node_b = std::get<1>(filter.first);
1066
1067 std::get<0>(filter.second)
1068 .SetFirstFwdTime(event_loop_factory_->GetNodeEventLoopFactory(node_a)
1069 ->monotonic_now());
1070 std::get<0>(filter.second)
1071 .SetFirstRevTime(event_loop_factory_->GetNodeEventLoopFactory(node_b)
1072 ->monotonic_now());
1073 }
Austin Schuh8bd96322020-02-13 21:18:22 -08001074 }
1075}
1076
Austin Schuh2f8fd752020-09-01 22:38:28 -07001077void LogReader::UpdateOffsets() {
1078 VLOG(2) << "Samples are " << offset_matrix_;
1079 VLOG(2) << "Map is " << (map_matrix_ + slope_matrix_);
1080 std::tie(time_slope_matrix_, time_offset_matrix_) = SolveOffsets();
1081 Eigen::IOFormat HeavyFmt(Eigen::FullPrecision, 0, ", ", ";\n", "[", "]", "[",
1082 "]");
1083 VLOG(1) << "First slope " << time_slope_matrix_.transpose().format(HeavyFmt)
1084 << " offset " << time_offset_matrix_.transpose().format(HeavyFmt);
1085
1086 size_t node_index = 0;
1087 for (std::unique_ptr<State> &state : states_) {
1088 state->SetDistributedOffset(offset(node_index), slope(node_index));
1089 VLOG(1) << "Offset for node " << node_index << " "
1090 << MaybeNodeName(state->event_loop()->node()) << "is "
1091 << aos::distributed_clock::time_point(offset(node_index))
1092 << " slope " << std::setprecision(9) << std::fixed
1093 << slope(node_index);
1094 ++node_index;
1095 }
1096
1097 if (VLOG_IS_ON(1)) {
1098 LogFit("Offset is");
1099 }
1100}
1101
1102void LogReader::LogFit(std::string_view prefix) {
1103 for (std::unique_ptr<State> &state : states_) {
1104 VLOG(1) << MaybeNodeName(state->event_loop()->node()) << " now "
1105 << state->monotonic_now() << " distributed "
1106 << event_loop_factory_->distributed_now();
1107 }
1108
1109 for (std::pair<const std::tuple<const Node *, const Node *>,
1110 std::tuple<message_bridge::NoncausalOffsetEstimator>> &filter :
1111 filters_) {
1112 message_bridge::NoncausalOffsetEstimator *estimator =
1113 &std::get<0>(filter.second);
1114
1115 if (estimator->a_timestamps().size() == 0 &&
1116 estimator->b_timestamps().size() == 0) {
1117 continue;
1118 }
1119
1120 if (VLOG_IS_ON(1)) {
1121 estimator->LogFit(prefix);
1122 }
1123
1124 const Node *const node_a = std::get<0>(filter.first);
1125 const Node *const node_b = std::get<1>(filter.first);
1126
1127 const size_t node_a_index =
1128 configuration::GetNodeIndex(configuration(), node_a);
1129 const size_t node_b_index =
1130 configuration::GetNodeIndex(configuration(), node_b);
1131
1132 const double recovered_slope =
1133 slope(node_b_index) / slope(node_a_index) - 1.0;
1134 const int64_t recovered_offset =
1135 offset(node_b_index).count() - offset(node_a_index).count() *
1136 slope(node_b_index) /
1137 slope(node_a_index);
1138
1139 VLOG(1) << "Recovered slope " << std::setprecision(20) << recovered_slope
1140 << " (error " << recovered_slope - estimator->fit().slope() << ") "
1141 << " offset " << std::setprecision(20) << recovered_offset
1142 << " (error "
1143 << recovered_offset - estimator->fit().offset().count() << ")";
1144
1145 const aos::distributed_clock::time_point a0 =
1146 states_[node_a_index]->ToDistributedClock(
1147 std::get<0>(estimator->a_timestamps()[0]));
1148 const aos::distributed_clock::time_point a1 =
1149 states_[node_a_index]->ToDistributedClock(
1150 std::get<0>(estimator->a_timestamps()[1]));
1151
1152 VLOG(1) << node_a->name()->string_view() << " timestamps()[0] = "
1153 << std::get<0>(estimator->a_timestamps()[0]) << " -> " << a0
1154 << " distributed -> " << node_b->name()->string_view() << " "
1155 << states_[node_b_index]->FromDistributedClock(a0) << " should be "
1156 << aos::monotonic_clock::time_point(
1157 std::chrono::nanoseconds(static_cast<int64_t>(
1158 std::get<0>(estimator->a_timestamps()[0])
1159 .time_since_epoch()
1160 .count() *
1161 (1.0 + estimator->fit().slope()))) +
1162 estimator->fit().offset())
1163 << ((a0 <= event_loop_factory_->distributed_now())
1164 ? ""
1165 : " After now, investigate");
1166 VLOG(1) << node_a->name()->string_view() << " timestamps()[1] = "
1167 << std::get<0>(estimator->a_timestamps()[1]) << " -> " << a1
1168 << " distributed -> " << node_b->name()->string_view() << " "
1169 << states_[node_b_index]->FromDistributedClock(a1) << " should be "
1170 << aos::monotonic_clock::time_point(
1171 std::chrono::nanoseconds(static_cast<int64_t>(
1172 std::get<0>(estimator->a_timestamps()[1])
1173 .time_since_epoch()
1174 .count() *
1175 (1.0 + estimator->fit().slope()))) +
1176 estimator->fit().offset())
1177 << ((event_loop_factory_->distributed_now() <= a1)
1178 ? ""
1179 : " Before now, investigate");
1180
1181 const aos::distributed_clock::time_point b0 =
1182 states_[node_b_index]->ToDistributedClock(
1183 std::get<0>(estimator->b_timestamps()[0]));
1184 const aos::distributed_clock::time_point b1 =
1185 states_[node_b_index]->ToDistributedClock(
1186 std::get<0>(estimator->b_timestamps()[1]));
1187
1188 VLOG(1) << node_b->name()->string_view() << " timestamps()[0] = "
1189 << std::get<0>(estimator->b_timestamps()[0]) << " -> " << b0
1190 << " distributed -> " << node_a->name()->string_view() << " "
1191 << states_[node_a_index]->FromDistributedClock(b0)
1192 << ((b0 <= event_loop_factory_->distributed_now())
1193 ? ""
1194 : " After now, investigate");
1195 VLOG(1) << node_b->name()->string_view() << " timestamps()[1] = "
1196 << std::get<0>(estimator->b_timestamps()[1]) << " -> " << b1
1197 << " distributed -> " << node_a->name()->string_view() << " "
1198 << states_[node_a_index]->FromDistributedClock(b1)
1199 << ((event_loop_factory_->distributed_now() <= b1)
1200 ? ""
1201 : " Before now, investigate");
1202 }
1203}
1204
1205message_bridge::NoncausalOffsetEstimator *LogReader::GetFilter(
Austin Schuh8bd96322020-02-13 21:18:22 -08001206 const Node *node_a, const Node *node_b) {
1207 CHECK_NE(node_a, node_b);
1208 CHECK_EQ(configuration::GetNode(configuration(), node_a), node_a);
1209 CHECK_EQ(configuration::GetNode(configuration(), node_b), node_b);
1210
1211 if (node_a > node_b) {
Austin Schuh2f8fd752020-09-01 22:38:28 -07001212 return GetFilter(node_b, node_a);
Austin Schuh8bd96322020-02-13 21:18:22 -08001213 }
1214
1215 auto tuple = std::make_tuple(node_a, node_b);
1216
1217 auto it = filters_.find(tuple);
1218
1219 if (it == filters_.end()) {
Austin Schuh2f8fd752020-09-01 22:38:28 -07001220 auto &x =
1221 filters_
1222 .insert(std::make_pair(
1223 tuple, std::make_tuple(message_bridge::NoncausalOffsetEstimator(
1224 node_a, node_b))))
1225 .first->second;
Austin Schuh8bd96322020-02-13 21:18:22 -08001226 if (FLAGS_timestamps_to_csv) {
Austin Schuh2f8fd752020-09-01 22:38:28 -07001227 std::get<0>(x).SetFwdCsvFileName(absl::StrCat(
1228 "/tmp/timestamp_noncausal_", node_a->name()->string_view(), "_",
1229 node_b->name()->string_view()));
1230 std::get<0>(x).SetRevCsvFileName(absl::StrCat(
1231 "/tmp/timestamp_noncausal_", node_b->name()->string_view(), "_",
1232 node_a->name()->string_view()));
Austin Schuh8bd96322020-02-13 21:18:22 -08001233 }
1234
Austin Schuh2f8fd752020-09-01 22:38:28 -07001235 return &std::get<0>(x);
Austin Schuh8bd96322020-02-13 21:18:22 -08001236 } else {
Austin Schuh2f8fd752020-09-01 22:38:28 -07001237 return &std::get<0>(it->second);
Austin Schuh8bd96322020-02-13 21:18:22 -08001238 }
1239}
1240
Austin Schuhe309d2a2019-11-29 13:25:21 -08001241void LogReader::Register(EventLoop *event_loop) {
Austin Schuh8bd96322020-02-13 21:18:22 -08001242 State *state =
1243 states_[configuration::GetNodeIndex(configuration(), event_loop->node())]
1244 .get();
Austin Schuh6f3babe2020-01-26 20:34:50 -08001245
Austin Schuh858c9f32020-08-31 16:56:12 -07001246 state->set_event_loop(event_loop);
Austin Schuhe309d2a2019-11-29 13:25:21 -08001247
Tyler Chatow67ddb032020-01-12 14:30:04 -08001248 // We don't run timing reports when trying to print out logged data, because
1249 // otherwise we would end up printing out the timing reports themselves...
1250 // This is only really relevant when we are replaying into a simulation.
Austin Schuh6f3babe2020-01-26 20:34:50 -08001251 event_loop->SkipTimingReport();
1252 event_loop->SkipAosLog();
Austin Schuh39788ff2019-12-01 18:22:57 -08001253
Austin Schuh858c9f32020-08-31 16:56:12 -07001254 const bool has_data = state->SetNode();
Austin Schuhe309d2a2019-11-29 13:25:21 -08001255
Austin Schuh858c9f32020-08-31 16:56:12 -07001256 state->SetChannelCount(logged_configuration()->channels()->size());
Austin Schuh8bd96322020-02-13 21:18:22 -08001257
Austin Schuh858c9f32020-08-31 16:56:12 -07001258 for (size_t i = 0; i < logged_configuration()->channels()->size(); ++i) {
Austin Schuh6f3babe2020-01-26 20:34:50 -08001259 const Channel *channel =
1260 RemapChannel(event_loop, logged_configuration()->channels()->Get(i));
Austin Schuh6331ef92020-01-07 18:28:09 -08001261
Austin Schuh858c9f32020-08-31 16:56:12 -07001262 NodeEventLoopFactory *channel_target_event_loop_factory = nullptr;
Austin Schuh2f8fd752020-09-01 22:38:28 -07001263 message_bridge::NoncausalOffsetEstimator *filter = nullptr;
Austin Schuh8bd96322020-02-13 21:18:22 -08001264
1265 if (!configuration::ChannelIsSendableOnNode(channel, event_loop->node()) &&
1266 configuration::ChannelIsReadableOnNode(channel, event_loop->node())) {
1267 const Node *target_node = configuration::GetNode(
1268 event_loop->configuration(), channel->source_node()->string_view());
Austin Schuh858c9f32020-08-31 16:56:12 -07001269 filter = GetFilter(event_loop->node(), target_node);
Austin Schuh8bd96322020-02-13 21:18:22 -08001270
1271 if (event_loop_factory_ != nullptr) {
Austin Schuh858c9f32020-08-31 16:56:12 -07001272 channel_target_event_loop_factory =
Austin Schuh8bd96322020-02-13 21:18:22 -08001273 event_loop_factory_->GetNodeEventLoopFactory(target_node);
1274 }
1275 }
Austin Schuh858c9f32020-08-31 16:56:12 -07001276
1277 state->SetChannel(i, event_loop->MakeRawSender(channel), filter,
1278 channel_target_event_loop_factory);
Austin Schuhe309d2a2019-11-29 13:25:21 -08001279 }
1280
Austin Schuh6aa77be2020-02-22 21:06:40 -08001281 // If we didn't find any log files with data in them, we won't ever get a
1282 // callback or be live. So skip the rest of the setup.
1283 if (!has_data) {
1284 return;
1285 }
1286
Austin Schuh858c9f32020-08-31 16:56:12 -07001287 state->set_timer_handler(event_loop->AddTimer([this, state]() {
Austin Schuh2f8fd752020-09-01 22:38:28 -07001288 VLOG(1) << "Starting sending " << MaybeNodeName(state->event_loop()->node())
1289 << "at " << state->event_loop()->context().monotonic_event_time
1290 << " now " << state->monotonic_now();
Austin Schuh858c9f32020-08-31 16:56:12 -07001291 if (state->OldestMessageTime() == monotonic_clock::max_time) {
Austin Schuh6f3babe2020-01-26 20:34:50 -08001292 --live_nodes_;
Austin Schuh2f8fd752020-09-01 22:38:28 -07001293 VLOG(1) << MaybeNodeName(state->event_loop()->node()) << "Node down!";
Austin Schuh6f3babe2020-01-26 20:34:50 -08001294 if (live_nodes_ == 0) {
1295 event_loop_factory_->Exit();
1296 }
James Kuszmaul314f1672020-01-03 20:02:08 -08001297 return;
1298 }
Austin Schuh6f3babe2020-01-26 20:34:50 -08001299 TimestampMerger::DeliveryTimestamp channel_timestamp;
Austin Schuh05b70472020-01-01 17:11:17 -08001300 int channel_index;
1301 FlatbufferVector<MessageHeader> channel_data =
1302 FlatbufferVector<MessageHeader>::Empty();
1303
Austin Schuh2f8fd752020-09-01 22:38:28 -07001304 if (VLOG_IS_ON(1)) {
1305 LogFit("Offset was");
1306 }
1307
1308 bool update_time;
Austin Schuh05b70472020-01-01 17:11:17 -08001309 std::tie(channel_timestamp, channel_index, channel_data) =
Austin Schuh2f8fd752020-09-01 22:38:28 -07001310 state->PopOldest(&update_time);
Austin Schuh05b70472020-01-01 17:11:17 -08001311
Austin Schuhe309d2a2019-11-29 13:25:21 -08001312 const monotonic_clock::time_point monotonic_now =
Austin Schuh858c9f32020-08-31 16:56:12 -07001313 state->event_loop()->context().monotonic_event_time;
Austin Schuh2f8fd752020-09-01 22:38:28 -07001314 if (!FLAGS_skip_order_validation) {
1315 CHECK(monotonic_now == channel_timestamp.monotonic_event_time)
1316 << ": " << FlatbufferToJson(state->event_loop()->node()) << " Now "
1317 << monotonic_now << " trying to send "
1318 << channel_timestamp.monotonic_event_time << " failure "
1319 << state->DebugString();
1320 } else if (monotonic_now != channel_timestamp.monotonic_event_time) {
1321 LOG(WARNING) << "Check failed: monotonic_now == "
1322 "channel_timestamp.monotonic_event_time) ("
1323 << monotonic_now << " vs. "
1324 << channel_timestamp.monotonic_event_time
1325 << "): " << FlatbufferToJson(state->event_loop()->node())
1326 << " Now " << monotonic_now << " trying to send "
1327 << channel_timestamp.monotonic_event_time << " failure "
1328 << state->DebugString();
1329 }
Austin Schuhe309d2a2019-11-29 13:25:21 -08001330
Austin Schuh6f3babe2020-01-26 20:34:50 -08001331 if (channel_timestamp.monotonic_event_time >
Austin Schuh858c9f32020-08-31 16:56:12 -07001332 state->monotonic_start_time() ||
Austin Schuh15649d62019-12-28 16:36:38 -08001333 event_loop_factory_ != nullptr) {
Austin Schuh8bd96322020-02-13 21:18:22 -08001334 if ((!ignore_missing_data_ && !FLAGS_skip_missing_forwarding_entries &&
Austin Schuh858c9f32020-08-31 16:56:12 -07001335 !state->at_end()) ||
Austin Schuh05b70472020-01-01 17:11:17 -08001336 channel_data.message().data() != nullptr) {
1337 CHECK(channel_data.message().data() != nullptr)
1338 << ": Got a message without data. Forwarding entry which was "
Austin Schuh2f8fd752020-09-01 22:38:28 -07001339 "not matched? Use --skip_missing_forwarding_entries to "
Brian Silverman87ac0402020-09-17 14:47:01 -07001340 "ignore this.";
Austin Schuh92547522019-12-28 14:33:43 -08001341
Austin Schuh2f8fd752020-09-01 22:38:28 -07001342 if (update_time) {
Austin Schuh8bd96322020-02-13 21:18:22 -08001343 // Confirm that the message was sent on the sending node before the
1344 // destination node (this node). As a proxy, do this by making sure
1345 // that time on the source node is past when the message was sent.
Austin Schuh2f8fd752020-09-01 22:38:28 -07001346 if (!FLAGS_skip_order_validation) {
1347 CHECK_LT(channel_timestamp.monotonic_remote_time,
1348 state->monotonic_remote_now(channel_index))
1349 << state->event_loop()->node()->name()->string_view() << " to "
1350 << state->remote_node(channel_index)->name()->string_view()
1351 << " " << state->DebugString();
1352 } else if (channel_timestamp.monotonic_remote_time >=
1353 state->monotonic_remote_now(channel_index)) {
1354 LOG(WARNING)
1355 << "Check failed: channel_timestamp.monotonic_remote_time < "
1356 "state->monotonic_remote_now(channel_index) ("
1357 << channel_timestamp.monotonic_remote_time << " vs. "
1358 << state->monotonic_remote_now(channel_index) << ") "
1359 << state->event_loop()->node()->name()->string_view() << " to "
1360 << state->remote_node(channel_index)->name()->string_view()
1361 << " currently " << channel_timestamp.monotonic_event_time
1362 << " ("
1363 << state->ToDistributedClock(
1364 channel_timestamp.monotonic_event_time)
1365 << ") remote event time "
1366 << channel_timestamp.monotonic_remote_time << " ("
1367 << state->RemoteToDistributedClock(
1368 channel_index, channel_timestamp.monotonic_remote_time)
1369 << ") " << state->DebugString();
1370 }
Austin Schuh8bd96322020-02-13 21:18:22 -08001371
1372 if (FLAGS_timestamps_to_csv) {
1373 if (offset_fp_ == nullptr) {
1374 offset_fp_ = fopen("/tmp/offsets.csv", "w");
1375 fprintf(
1376 offset_fp_,
1377 "# time_since_start, offset node 0, offset node 1, ...\n");
1378 first_time_ = channel_timestamp.realtime_event_time;
1379 }
1380
1381 fprintf(offset_fp_, "%.9f",
1382 std::chrono::duration_cast<std::chrono::duration<double>>(
1383 channel_timestamp.realtime_event_time - first_time_)
1384 .count());
Austin Schuh2f8fd752020-09-01 22:38:28 -07001385 for (int i = 1; i < time_offset_matrix_.rows(); ++i) {
1386 fprintf(offset_fp_, ", %.9f",
1387 time_offset_matrix_(i, 0) +
1388 time_slope_matrix_(i, 0) *
1389 chrono::duration<double>(
1390 event_loop_factory_->distributed_now()
1391 .time_since_epoch())
1392 .count());
Austin Schuh8bd96322020-02-13 21:18:22 -08001393 }
1394 fprintf(offset_fp_, "\n");
1395 }
Austin Schuh8bd96322020-02-13 21:18:22 -08001396 }
1397
Austin Schuh15649d62019-12-28 16:36:38 -08001398 // If we have access to the factory, use it to fix the realtime time.
Austin Schuh858c9f32020-08-31 16:56:12 -07001399 state->SetRealtimeOffset(channel_timestamp.monotonic_event_time,
1400 channel_timestamp.realtime_event_time);
Austin Schuh15649d62019-12-28 16:36:38 -08001401
Austin Schuh2f8fd752020-09-01 22:38:28 -07001402 VLOG(1) << MaybeNodeName(state->event_loop()->node()) << "Sending "
1403 << channel_timestamp.monotonic_event_time;
1404 // TODO(austin): std::move channel_data in and make that efficient in
1405 // simulation.
Austin Schuh858c9f32020-08-31 16:56:12 -07001406 state->Send(channel_index, channel_data.message().data()->Data(),
1407 channel_data.message().data()->size(),
1408 channel_timestamp.monotonic_remote_time,
1409 channel_timestamp.realtime_remote_time,
1410 channel_timestamp.remote_queue_index);
Austin Schuh2f8fd752020-09-01 22:38:28 -07001411 } else if (state->at_end() && !ignore_missing_data_) {
Austin Schuh8bd96322020-02-13 21:18:22 -08001412 // We are at the end of the log file and found missing data. Finish
Austin Schuh2f8fd752020-09-01 22:38:28 -07001413 // reading the rest of the log file and call it quits. We don't want
1414 // to replay partial data.
Austin Schuh858c9f32020-08-31 16:56:12 -07001415 while (state->OldestMessageTime() != monotonic_clock::max_time) {
1416 bool update_time_dummy;
1417 state->PopOldest(&update_time_dummy);
Austin Schuh8bd96322020-02-13 21:18:22 -08001418 }
Austin Schuh2f8fd752020-09-01 22:38:28 -07001419 } else {
1420 CHECK(channel_data.message().data() == nullptr) << ": Nullptr";
Austin Schuh92547522019-12-28 14:33:43 -08001421 }
Austin Schuhe309d2a2019-11-29 13:25:21 -08001422 } else {
Austin Schuh6f3babe2020-01-26 20:34:50 -08001423 LOG(WARNING)
1424 << "Not sending data from before the start of the log file. "
1425 << channel_timestamp.monotonic_event_time.time_since_epoch().count()
1426 << " start " << monotonic_start_time().time_since_epoch().count()
1427 << " " << FlatbufferToJson(channel_data);
Austin Schuhe309d2a2019-11-29 13:25:21 -08001428 }
1429
Austin Schuh858c9f32020-08-31 16:56:12 -07001430 const monotonic_clock::time_point next_time = state->OldestMessageTime();
Austin Schuh6f3babe2020-01-26 20:34:50 -08001431 if (next_time != monotonic_clock::max_time) {
Austin Schuh2f8fd752020-09-01 22:38:28 -07001432 VLOG(1) << "Scheduling " << MaybeNodeName(state->event_loop()->node())
1433 << "wakeup for " << next_time << "("
1434 << state->ToDistributedClock(next_time)
1435 << " distributed), now is " << state->monotonic_now();
Austin Schuh858c9f32020-08-31 16:56:12 -07001436 state->Setup(next_time);
James Kuszmaul314f1672020-01-03 20:02:08 -08001437 } else {
Austin Schuh2f8fd752020-09-01 22:38:28 -07001438 VLOG(1) << MaybeNodeName(state->event_loop()->node())
1439 << "No next message, scheduling shutdown";
1440 // Set a timer up immediately after now to die. If we don't do this,
1441 // then the senders waiting on the message we just read will never get
1442 // called.
Austin Schuheecb9282020-01-08 17:43:30 -08001443 if (event_loop_factory_ != nullptr) {
Austin Schuh858c9f32020-08-31 16:56:12 -07001444 state->Setup(monotonic_now + event_loop_factory_->send_delay() +
1445 std::chrono::nanoseconds(1));
Austin Schuheecb9282020-01-08 17:43:30 -08001446 }
Austin Schuhe309d2a2019-11-29 13:25:21 -08001447 }
Austin Schuh8bd96322020-02-13 21:18:22 -08001448
Austin Schuh2f8fd752020-09-01 22:38:28 -07001449 // Once we make this call, the current time changes. So do everything
1450 // which involves time before changing it. That especially includes
1451 // sending the message.
1452 if (update_time) {
1453 VLOG(1) << MaybeNodeName(state->event_loop()->node())
1454 << "updating offsets";
1455
1456 std::vector<aos::monotonic_clock::time_point> before_times;
1457 before_times.resize(states_.size());
1458 std::transform(states_.begin(), states_.end(), before_times.begin(),
1459 [](const std::unique_ptr<State> &state) {
1460 return state->monotonic_now();
1461 });
1462
1463 for (size_t i = 0; i < states_.size(); ++i) {
Brian Silvermand90905f2020-09-23 14:42:56 -07001464 VLOG(1) << MaybeNodeName(states_[i]->event_loop()->node()) << "before "
1465 << states_[i]->monotonic_now();
Austin Schuh2f8fd752020-09-01 22:38:28 -07001466 }
1467
Austin Schuh8bd96322020-02-13 21:18:22 -08001468 UpdateOffsets();
Austin Schuh2f8fd752020-09-01 22:38:28 -07001469 VLOG(1) << MaybeNodeName(state->event_loop()->node()) << "Now is now "
1470 << state->monotonic_now();
1471
1472 for (size_t i = 0; i < states_.size(); ++i) {
Brian Silvermand90905f2020-09-23 14:42:56 -07001473 VLOG(1) << MaybeNodeName(states_[i]->event_loop()->node()) << "after "
1474 << states_[i]->monotonic_now();
Austin Schuh2f8fd752020-09-01 22:38:28 -07001475 }
1476
1477 // TODO(austin): We should be perfect.
1478 const std::chrono::nanoseconds kTolerance{3};
1479 if (!FLAGS_skip_order_validation) {
1480 CHECK_GE(next_time, state->monotonic_now())
1481 << ": Time skipped the next event.";
1482
1483 for (size_t i = 0; i < states_.size(); ++i) {
1484 CHECK_GE(states_[i]->monotonic_now(), before_times[i] - kTolerance)
1485 << ": Time changed too much on node "
1486 << MaybeNodeName(states_[i]->event_loop()->node());
1487 CHECK_LE(states_[i]->monotonic_now(), before_times[i] + kTolerance)
1488 << ": Time changed too much on node "
1489 << states_[i]->event_loop()->node()->name()->string_view();
1490 }
1491 } else {
1492 if (next_time < state->monotonic_now()) {
1493 LOG(WARNING) << "Check failed: next_time >= "
1494 "state->monotonic_now() ("
1495 << next_time << " vs. " << state->monotonic_now()
1496 << "): Time skipped the next event.";
1497 }
1498 for (size_t i = 0; i < states_.size(); ++i) {
1499 if (states_[i]->monotonic_now() >= before_times[i] - kTolerance) {
1500 LOG(WARNING) << "Check failed: "
1501 "states_[i]->monotonic_now() "
1502 ">= before_times[i] - kTolerance ("
1503 << states_[i]->monotonic_now() << " vs. "
1504 << before_times[i] - kTolerance
1505 << ") : Time changed too much on node "
1506 << MaybeNodeName(states_[i]->event_loop()->node());
1507 }
1508 if (states_[i]->monotonic_now() <= before_times[i] + kTolerance) {
1509 LOG(WARNING) << "Check failed: "
1510 "states_[i]->monotonic_now() "
1511 "<= before_times[i] + kTolerance ("
1512 << states_[i]->monotonic_now() << " vs. "
1513 << before_times[i] - kTolerance
1514 << ") : Time changed too much on node "
1515 << MaybeNodeName(states_[i]->event_loop()->node());
1516 }
1517 }
1518 }
Austin Schuh8bd96322020-02-13 21:18:22 -08001519 }
Austin Schuh2f8fd752020-09-01 22:38:28 -07001520
1521 VLOG(1) << MaybeNodeName(state->event_loop()->node()) << "Done sending at "
1522 << state->event_loop()->context().monotonic_event_time << " now "
1523 << state->monotonic_now();
Austin Schuh858c9f32020-08-31 16:56:12 -07001524 }));
Austin Schuhe309d2a2019-11-29 13:25:21 -08001525
Austin Schuh6f3babe2020-01-26 20:34:50 -08001526 ++live_nodes_;
1527
Austin Schuh858c9f32020-08-31 16:56:12 -07001528 if (state->OldestMessageTime() != monotonic_clock::max_time) {
1529 event_loop->OnRun([state]() { state->Setup(state->OldestMessageTime()); });
Austin Schuhe309d2a2019-11-29 13:25:21 -08001530 }
1531}
1532
1533void LogReader::Deregister() {
James Kuszmaul84ff3e52020-01-03 19:48:53 -08001534 // Make sure that things get destroyed in the correct order, rather than
1535 // relying on getting the order correct in the class definition.
Austin Schuh8bd96322020-02-13 21:18:22 -08001536 for (std::unique_ptr<State> &state : states_) {
Austin Schuh858c9f32020-08-31 16:56:12 -07001537 state->Deregister();
Austin Schuhe309d2a2019-11-29 13:25:21 -08001538 }
Austin Schuh92547522019-12-28 14:33:43 -08001539
James Kuszmaul84ff3e52020-01-03 19:48:53 -08001540 event_loop_factory_unique_ptr_.reset();
1541 event_loop_factory_ = nullptr;
Austin Schuhe309d2a2019-11-29 13:25:21 -08001542}
1543
James Kuszmaulc7bbb3e2020-01-03 20:01:00 -08001544void LogReader::RemapLoggedChannel(std::string_view name, std::string_view type,
1545 std::string_view add_prefix) {
James Kuszmaulc7bbb3e2020-01-03 20:01:00 -08001546 for (size_t ii = 0; ii < logged_configuration()->channels()->size(); ++ii) {
1547 const Channel *const channel = logged_configuration()->channels()->Get(ii);
1548 if (channel->name()->str() == name &&
1549 channel->type()->string_view() == type) {
1550 CHECK_EQ(0u, remapped_channels_.count(ii))
1551 << "Already remapped channel "
1552 << configuration::CleanedChannelToString(channel);
1553 remapped_channels_[ii] = std::string(add_prefix) + std::string(name);
1554 VLOG(1) << "Remapping channel "
1555 << configuration::CleanedChannelToString(channel)
1556 << " to have name " << remapped_channels_[ii];
Austin Schuh6331ef92020-01-07 18:28:09 -08001557 MakeRemappedConfig();
James Kuszmaulc7bbb3e2020-01-03 20:01:00 -08001558 return;
1559 }
1560 }
1561 LOG(FATAL) << "Unabled to locate channel with name " << name << " and type "
1562 << type;
1563}
1564
Austin Schuh01b4c352020-09-21 23:09:39 -07001565void LogReader::RemapLoggedChannel(std::string_view name, std::string_view type,
1566 const Node *node,
1567 std::string_view add_prefix) {
1568 VLOG(1) << "Node is " << aos::FlatbufferToJson(node);
1569 const Channel *remapped_channel =
1570 configuration::GetChannel(logged_configuration(), name, type, "", node);
1571 CHECK(remapped_channel != nullptr) << ": Failed to find {\"name\": \"" << name
1572 << "\", \"type\": \"" << type << "\"}";
1573 VLOG(1) << "Original {\"name\": \"" << name << "\", \"type\": \"" << type
1574 << "\"}";
1575 VLOG(1) << "Remapped "
1576 << aos::configuration::StrippedChannelToString(remapped_channel);
1577
1578 // We want to make /spray on node 0 go to /0/spray by snooping the maps. And
1579 // we want it to degrade if the heuristics fail to just work.
1580 //
1581 // The easiest way to do this is going to be incredibly specific and verbose.
1582 // Look up /spray, to /0/spray. Then, prefix the result with /original to get
1583 // /original/0/spray. Then, create a map from /original/spray to
1584 // /original/0/spray for just the type we were asked for.
1585 if (name != remapped_channel->name()->string_view()) {
1586 MapT new_map;
1587 new_map.match = std::make_unique<ChannelT>();
1588 new_map.match->name = absl::StrCat(add_prefix, name);
1589 new_map.match->type = type;
1590 if (node != nullptr) {
1591 new_map.match->source_node = node->name()->str();
1592 }
1593 new_map.rename = std::make_unique<ChannelT>();
1594 new_map.rename->name =
1595 absl::StrCat(add_prefix, remapped_channel->name()->string_view());
1596 maps_.emplace_back(std::move(new_map));
1597 }
1598
1599 const size_t channel_index =
1600 configuration::ChannelIndex(logged_configuration(), remapped_channel);
1601 CHECK_EQ(0u, remapped_channels_.count(channel_index))
1602 << "Already remapped channel "
1603 << configuration::CleanedChannelToString(remapped_channel);
1604 remapped_channels_[channel_index] =
1605 absl::StrCat(add_prefix, remapped_channel->name()->string_view());
1606 MakeRemappedConfig();
1607}
1608
James Kuszmaulc7bbb3e2020-01-03 20:01:00 -08001609void LogReader::MakeRemappedConfig() {
Austin Schuh8bd96322020-02-13 21:18:22 -08001610 for (std::unique_ptr<State> &state : states_) {
Austin Schuh6aa77be2020-02-22 21:06:40 -08001611 if (state) {
Austin Schuh858c9f32020-08-31 16:56:12 -07001612 CHECK(!state->event_loop())
Austin Schuh6aa77be2020-02-22 21:06:40 -08001613 << ": Can't change the mapping after the events are scheduled.";
1614 }
Austin Schuh6f3babe2020-01-26 20:34:50 -08001615 }
Austin Schuhac0771c2020-01-07 18:36:30 -08001616
James Kuszmaulc7bbb3e2020-01-03 20:01:00 -08001617 // If no remapping occurred and we are using the original config, then there
1618 // is nothing interesting to do here.
1619 if (remapped_channels_.empty() && replay_configuration_ == nullptr) {
Austin Schuh6f3babe2020-01-26 20:34:50 -08001620 remapped_configuration_ = logged_configuration();
James Kuszmaulc7bbb3e2020-01-03 20:01:00 -08001621 return;
1622 }
1623 // Config to copy Channel definitions from. Use the specified
1624 // replay_configuration_ if it has been provided.
1625 const Configuration *const base_config = replay_configuration_ == nullptr
1626 ? logged_configuration()
1627 : replay_configuration_;
1628 // The remapped config will be identical to the base_config, except that it
1629 // will have a bunch of extra channels in the channel list, which are exact
1630 // copies of the remapped channels, but with different names.
1631 // Because the flatbuffers API is a pain to work with, this requires a bit of
1632 // a song-and-dance to get copied over.
1633 // The order of operations is to:
1634 // 1) Make a flatbuffer builder for a config that will just contain a list of
1635 // the new channels that we want to add.
1636 // 2) For each channel that we are remapping:
1637 // a) Make a buffer/builder and construct into it a Channel table that only
1638 // contains the new name for the channel.
1639 // b) Merge the new channel with just the name into the channel that we are
1640 // trying to copy, built in the flatbuffer builder made in 1. This gives
1641 // us the new channel definition that we need.
1642 // 3) Using this list of offsets, build the Configuration of just new
1643 // Channels.
1644 // 4) Merge the Configuration with the new Channels into the base_config.
1645 // 5) Call MergeConfiguration() on that result to give MergeConfiguration a
1646 // chance to sanitize the config.
1647
1648 // This is the builder that we use for the config containing all the new
1649 // channels.
1650 flatbuffers::FlatBufferBuilder new_config_fbb;
Austin Schuhd7b15da2020-02-17 15:06:11 -08001651 new_config_fbb.ForceDefaults(true);
James Kuszmaulc7bbb3e2020-01-03 20:01:00 -08001652 std::vector<flatbuffers::Offset<Channel>> channel_offsets;
1653 for (auto &pair : remapped_channels_) {
1654 // This is the builder that we use for creating the Channel with just the
1655 // new name.
1656 flatbuffers::FlatBufferBuilder new_name_fbb;
Austin Schuhd7b15da2020-02-17 15:06:11 -08001657 new_name_fbb.ForceDefaults(true);
James Kuszmaulc7bbb3e2020-01-03 20:01:00 -08001658 const flatbuffers::Offset<flatbuffers::String> name_offset =
1659 new_name_fbb.CreateString(pair.second);
1660 ChannelBuilder new_name_builder(new_name_fbb);
1661 new_name_builder.add_name(name_offset);
1662 new_name_fbb.Finish(new_name_builder.Finish());
1663 const FlatbufferDetachedBuffer<Channel> new_name = new_name_fbb.Release();
Austin Schuh2f8fd752020-09-01 22:38:28 -07001664 // Retrieve the channel that we want to copy, confirming that it is
1665 // actually present in base_config.
James Kuszmaulc7bbb3e2020-01-03 20:01:00 -08001666 const Channel *const base_channel = CHECK_NOTNULL(configuration::GetChannel(
1667 base_config, logged_configuration()->channels()->Get(pair.first), "",
1668 nullptr));
1669 // Actually create the new channel and put it into the vector of Offsets
1670 // that we will use to create the new Configuration.
1671 channel_offsets.emplace_back(MergeFlatBuffers<Channel>(
1672 reinterpret_cast<const flatbuffers::Table *>(base_channel),
1673 reinterpret_cast<const flatbuffers::Table *>(&new_name.message()),
1674 &new_config_fbb));
1675 }
1676 // Create the Configuration containing the new channels that we want to add.
Austin Schuh01b4c352020-09-21 23:09:39 -07001677 const auto new_channel_vector_offsets =
Austin Schuhfa895892020-01-07 20:07:41 -08001678 new_config_fbb.CreateVector(channel_offsets);
Austin Schuh01b4c352020-09-21 23:09:39 -07001679
1680 // Now create the new maps.
1681 std::vector<flatbuffers::Offset<Map>> map_offsets;
1682 for (const MapT &map : maps_) {
1683 const flatbuffers::Offset<flatbuffers::String> match_name_offset =
1684 new_config_fbb.CreateString(map.match->name);
1685 const flatbuffers::Offset<flatbuffers::String> match_type_offset =
1686 new_config_fbb.CreateString(map.match->type);
1687 const flatbuffers::Offset<flatbuffers::String> rename_name_offset =
1688 new_config_fbb.CreateString(map.rename->name);
1689 flatbuffers::Offset<flatbuffers::String> match_source_node_offset;
1690 if (!map.match->source_node.empty()) {
1691 match_source_node_offset =
1692 new_config_fbb.CreateString(map.match->source_node);
1693 }
1694 Channel::Builder match_builder(new_config_fbb);
1695 match_builder.add_name(match_name_offset);
1696 match_builder.add_type(match_type_offset);
1697 if (!map.match->source_node.empty()) {
1698 match_builder.add_source_node(match_source_node_offset);
1699 }
1700 const flatbuffers::Offset<Channel> match_offset = match_builder.Finish();
1701
1702 Channel::Builder rename_builder(new_config_fbb);
1703 rename_builder.add_name(rename_name_offset);
1704 const flatbuffers::Offset<Channel> rename_offset = rename_builder.Finish();
1705
1706 Map::Builder map_builder(new_config_fbb);
1707 map_builder.add_match(match_offset);
1708 map_builder.add_rename(rename_offset);
1709 map_offsets.emplace_back(map_builder.Finish());
1710 }
1711
1712 const auto new_maps_offsets = new_config_fbb.CreateVector(map_offsets);
1713
James Kuszmaulc7bbb3e2020-01-03 20:01:00 -08001714 ConfigurationBuilder new_config_builder(new_config_fbb);
Austin Schuh01b4c352020-09-21 23:09:39 -07001715 new_config_builder.add_channels(new_channel_vector_offsets);
1716 new_config_builder.add_maps(new_maps_offsets);
James Kuszmaulc7bbb3e2020-01-03 20:01:00 -08001717 new_config_fbb.Finish(new_config_builder.Finish());
1718 const FlatbufferDetachedBuffer<Configuration> new_name_config =
1719 new_config_fbb.Release();
1720 // Merge the new channels configuration into the base_config, giving us the
1721 // remapped configuration.
1722 remapped_configuration_buffer_ =
1723 std::make_unique<FlatbufferDetachedBuffer<Configuration>>(
1724 MergeFlatBuffers<Configuration>(base_config,
1725 &new_name_config.message()));
1726 // Call MergeConfiguration to deal with sanitizing the config.
1727 remapped_configuration_buffer_ =
1728 std::make_unique<FlatbufferDetachedBuffer<Configuration>>(
1729 configuration::MergeConfiguration(*remapped_configuration_buffer_));
1730
1731 remapped_configuration_ = &remapped_configuration_buffer_->message();
1732}
1733
Austin Schuh6f3babe2020-01-26 20:34:50 -08001734const Channel *LogReader::RemapChannel(const EventLoop *event_loop,
1735 const Channel *channel) {
1736 std::string_view channel_name = channel->name()->string_view();
1737 std::string_view channel_type = channel->type()->string_view();
1738 const int channel_index =
1739 configuration::ChannelIndex(logged_configuration(), channel);
1740 // If the channel is remapped, find the correct channel name to use.
1741 if (remapped_channels_.count(channel_index) > 0) {
Austin Schuhee711052020-08-24 16:06:09 -07001742 VLOG(3) << "Got remapped channel on "
Austin Schuh6f3babe2020-01-26 20:34:50 -08001743 << configuration::CleanedChannelToString(channel);
1744 channel_name = remapped_channels_[channel_index];
1745 }
1746
Austin Schuhee711052020-08-24 16:06:09 -07001747 VLOG(2) << "Going to remap channel " << channel_name << " " << channel_type;
Austin Schuh6f3babe2020-01-26 20:34:50 -08001748 const Channel *remapped_channel = configuration::GetChannel(
1749 event_loop->configuration(), channel_name, channel_type,
1750 event_loop->name(), event_loop->node());
1751
1752 CHECK(remapped_channel != nullptr)
1753 << ": Unable to send {\"name\": \"" << channel_name << "\", \"type\": \""
1754 << channel_type << "\"} because it is not in the provided configuration.";
1755
1756 return remapped_channel;
1757}
1758
Austin Schuh858c9f32020-08-31 16:56:12 -07001759LogReader::State::State(std::unique_ptr<ChannelMerger> channel_merger)
1760 : channel_merger_(std::move(channel_merger)) {}
1761
1762EventLoop *LogReader::State::SetNodeEventLoopFactory(
1763 NodeEventLoopFactory *node_event_loop_factory) {
1764 node_event_loop_factory_ = node_event_loop_factory;
1765 event_loop_unique_ptr_ =
1766 node_event_loop_factory_->MakeEventLoop("log_reader");
1767 return event_loop_unique_ptr_.get();
1768}
1769
1770void LogReader::State::SetChannelCount(size_t count) {
1771 channels_.resize(count);
1772 filters_.resize(count);
1773 channel_target_event_loop_factory_.resize(count);
1774}
1775
1776void LogReader::State::SetChannel(
1777 size_t channel, std::unique_ptr<RawSender> sender,
Austin Schuh2f8fd752020-09-01 22:38:28 -07001778 message_bridge::NoncausalOffsetEstimator *filter,
Austin Schuh858c9f32020-08-31 16:56:12 -07001779 NodeEventLoopFactory *channel_target_event_loop_factory) {
1780 channels_[channel] = std::move(sender);
1781 filters_[channel] = filter;
1782 channel_target_event_loop_factory_[channel] =
1783 channel_target_event_loop_factory;
1784}
1785
1786std::tuple<TimestampMerger::DeliveryTimestamp, int,
1787 FlatbufferVector<MessageHeader>>
1788LogReader::State::PopOldest(bool *update_time) {
1789 CHECK_GT(sorted_messages_.size(), 0u);
1790
1791 std::tuple<TimestampMerger::DeliveryTimestamp, int,
Austin Schuh2f8fd752020-09-01 22:38:28 -07001792 FlatbufferVector<MessageHeader>,
1793 message_bridge::NoncausalOffsetEstimator *>
Austin Schuh858c9f32020-08-31 16:56:12 -07001794 result = std::move(sorted_messages_.front());
Austin Schuh2f8fd752020-09-01 22:38:28 -07001795 VLOG(2) << MaybeNodeName(event_loop_->node()) << "PopOldest Popping "
Austin Schuh858c9f32020-08-31 16:56:12 -07001796 << std::get<0>(result).monotonic_event_time;
1797 sorted_messages_.pop_front();
1798 SeedSortedMessages();
1799
Austin Schuh2f8fd752020-09-01 22:38:28 -07001800 if (std::get<3>(result) != nullptr) {
1801 *update_time = std::get<3>(result)->Pop(
1802 event_loop_->node(), std::get<0>(result).monotonic_event_time);
1803 } else {
1804 *update_time = false;
1805 }
Austin Schuh858c9f32020-08-31 16:56:12 -07001806 return std::make_tuple(std::get<0>(result), std::get<1>(result),
1807 std::move(std::get<2>(result)));
1808}
1809
1810monotonic_clock::time_point LogReader::State::OldestMessageTime() const {
1811 if (sorted_messages_.size() > 0) {
Austin Schuh2f8fd752020-09-01 22:38:28 -07001812 VLOG(2) << MaybeNodeName(event_loop_->node()) << "oldest message at "
Austin Schuh858c9f32020-08-31 16:56:12 -07001813 << std::get<0>(sorted_messages_.front()).monotonic_event_time;
1814 return std::get<0>(sorted_messages_.front()).monotonic_event_time;
1815 }
1816
1817 return channel_merger_->OldestMessageTime();
1818}
1819
1820void LogReader::State::SeedSortedMessages() {
1821 const aos::monotonic_clock::time_point end_queue_time =
1822 (sorted_messages_.size() > 0
1823 ? std::get<0>(sorted_messages_.front()).monotonic_event_time
1824 : channel_merger_->monotonic_start_time()) +
1825 std::chrono::seconds(2);
1826
1827 while (true) {
1828 if (channel_merger_->OldestMessageTime() == monotonic_clock::max_time) {
1829 return;
1830 }
1831 if (sorted_messages_.size() > 0) {
1832 // Stop placing sorted messages on the list once we have 2 seconds
1833 // queued up (but queue at least until the log starts.
1834 if (end_queue_time <
1835 std::get<0>(sorted_messages_.back()).monotonic_event_time) {
1836 return;
1837 }
1838 }
1839
1840 TimestampMerger::DeliveryTimestamp channel_timestamp;
1841 int channel_index;
1842 FlatbufferVector<MessageHeader> channel_data =
1843 FlatbufferVector<MessageHeader>::Empty();
1844
Austin Schuh2f8fd752020-09-01 22:38:28 -07001845 message_bridge::NoncausalOffsetEstimator *filter = nullptr;
1846
Austin Schuh858c9f32020-08-31 16:56:12 -07001847 std::tie(channel_timestamp, channel_index, channel_data) =
1848 channel_merger_->PopOldest();
1849
Austin Schuh2f8fd752020-09-01 22:38:28 -07001850 // Skip any messages without forwarding information.
1851 if (channel_timestamp.monotonic_remote_time != monotonic_clock::min_time) {
1852 // Got a forwarding timestamp!
1853 filter = filters_[channel_index];
1854
1855 CHECK(filter != nullptr);
1856
1857 // Call the correct method depending on if we are the forward or
1858 // reverse direction here.
1859 filter->Sample(event_loop_->node(),
1860 channel_timestamp.monotonic_event_time,
1861 channel_timestamp.monotonic_remote_time);
1862 }
Austin Schuh858c9f32020-08-31 16:56:12 -07001863 sorted_messages_.emplace_back(channel_timestamp, channel_index,
Austin Schuh2f8fd752020-09-01 22:38:28 -07001864 std::move(channel_data), filter);
Austin Schuh858c9f32020-08-31 16:56:12 -07001865 }
1866}
1867
1868void LogReader::State::Deregister() {
1869 for (size_t i = 0; i < channels_.size(); ++i) {
1870 channels_[i].reset();
1871 }
1872 event_loop_unique_ptr_.reset();
1873 event_loop_ = nullptr;
1874 timer_handler_ = nullptr;
1875 node_event_loop_factory_ = nullptr;
1876}
1877
Austin Schuhe309d2a2019-11-29 13:25:21 -08001878} // namespace logger
1879} // namespace aos