Merge "Add support for multithreaded lzma encoder"
diff --git a/aos/configuration.cc b/aos/configuration.cc
index ed99e2a..d05cba8 100644
--- a/aos/configuration.cc
+++ b/aos/configuration.cc
@@ -458,6 +458,58 @@
}
}
+void HandleReverseMaps(
+ const flatbuffers::Vector<flatbuffers::Offset<aos::Map>> *maps,
+ std::string_view type, const Node *node, std::set<std::string> *names) {
+ for (const Map *map : *maps) {
+ CHECK_NOTNULL(map);
+ const Channel *const match = CHECK_NOTNULL(map->match());
+ const Channel *const rename = CHECK_NOTNULL(map->rename());
+
+ // Handle type specific maps.
+ const flatbuffers::String *const match_type_string = match->type();
+ if (match_type_string != nullptr &&
+ match_type_string->string_view() != type) {
+ continue;
+ }
+
+ // Now handle node specific maps.
+ const flatbuffers::String *const match_source_node_string =
+ match->source_node();
+ if (node != nullptr && match_source_node_string != nullptr &&
+ match_source_node_string->string_view() !=
+ node->name()->string_view()) {
+ continue;
+ }
+
+ const flatbuffers::String *const match_name_string = match->name();
+ const flatbuffers::String *const rename_name_string = rename->name();
+ if (match_name_string == nullptr || rename_name_string == nullptr) {
+ continue;
+ }
+
+ const std::string rename_name = rename_name_string->str();
+ const std::string_view match_name = match_name_string->string_view();
+
+ std::set<std::string> possible_renames;
+
+ // Check if the current name(s) could have been reached using the provided
+ // rename.
+ if (match_name.back() == '*') {
+ for (const std::string &option : *names) {
+ if (option.substr(0, rename_name.size()) == rename_name) {
+ possible_renames.insert(
+ absl::StrCat(match_name.substr(0, match_name.size() - 1),
+ option.substr(rename_name.size())));
+ }
+ }
+ names->insert(possible_renames.begin(), possible_renames.end());
+ } else if (names->count(rename_name) != 0) {
+ names->insert(std::string(match_name));
+ }
+ }
+}
+
} // namespace
// Maps name for the provided maps. Modifies name.
@@ -466,6 +518,9 @@
// pointers. These combine to make it a performance hotspot during many tests
// under msan, so there is some optimizing around caching intermediates instead
// of dereferencing the pointer multiple times.
+//
+// Deliberately not in an anonymous namespace so that the log-reading code can
+// reference it.
void HandleMaps(const flatbuffers::Vector<flatbuffers::Offset<aos::Map>> *maps,
std::string *name, std::string_view type, const Node *node) {
// For the same reason we merge configs in reverse order, we want to process
@@ -527,6 +582,25 @@
}
}
+std::set<std::string> GetChannelAliases(const Configuration *config,
+ std::string_view name,
+ std::string_view type,
+ const std::string_view application_name,
+ const Node *node) {
+ std::set<std::string> names{std::string(name)};
+ if (config->has_maps()) {
+ HandleReverseMaps(config->maps(), type, node, &names);
+ }
+ {
+ const Application *application =
+ GetApplication(config, node, application_name);
+ if (application != nullptr && application->has_maps()) {
+ HandleReverseMaps(application->maps(), type, node, &names);
+ }
+ }
+ return names;
+}
+
FlatbufferDetachedBuffer<Configuration> MergeConfiguration(
const Flatbuffer<Configuration> &config) {
// auto_merge_config will contain all the fields of the Configuration that are
diff --git a/aos/configuration.h b/aos/configuration.h
index cb52d09..31603fe 100644
--- a/aos/configuration.h
+++ b/aos/configuration.h
@@ -91,6 +91,23 @@
channel->type()->string_view(), application_name, node);
}
+// Returns a list of all the channel names that can be used to refer to the
+// specified channel on the given node/application. this allows a reverse-lookup
+// of any renames that happen.
+// Does not perform forwards-lookup first.
+std::set<std::string> GetChannelAliases(const Configuration *config,
+ std::string_view name,
+ std::string_view type,
+ const std::string_view application_name,
+ const Node *node);
+inline std::set<std::string> GetChannelAliases(
+ const Configuration *config, const Channel *channel,
+ const std::string_view application_name, const Node *node) {
+ return GetChannelAliases(config, channel->name()->string_view(),
+ channel->type()->string_view(), application_name,
+ node);
+}
+
// Returns the channel index (or dies) of channel in the provided config.
size_t ChannelIndex(const Configuration *config, const Channel *channel);
diff --git a/aos/configuration_test.cc b/aos/configuration_test.cc
index b493058..242ee17 100644
--- a/aos/configuration_test.cc
+++ b/aos/configuration_test.cc
@@ -190,6 +190,31 @@
aos::testing::FlatbufferEq(ExpectedLocation()));
}
+// Tests that we can do reverse-lookups of channel names.
+TEST_F(ConfigurationTest, GetChannelAliases) {
+ FlatbufferDetachedBuffer<Configuration> config =
+ ReadConfig(ArtifactPath("aos/testdata/config1.json"));
+
+ // Test a basic lookup first.
+ EXPECT_THAT(
+ GetChannelAliases(&config.message(), "/foo", ".aos.bar", "app1", nullptr),
+ ::testing::UnorderedElementsAre("/foo", "/batman", "/bar"));
+ EXPECT_THAT(
+ GetChannelAliases(&config.message(), "/bar", ".aos.bar", "app1", nullptr),
+ ::testing::UnorderedElementsAre("/batman", "/bar"));
+ EXPECT_THAT(GetChannelAliases(&config.message(), "/batman", ".aos.bar",
+ "app1", nullptr),
+ ::testing::UnorderedElementsAre("/batman"));
+ // /bar (deliberately) does not get included because of the ordering in the
+ // map.
+ EXPECT_THAT(
+ GetChannelAliases(&config.message(), "/foo", ".aos.bar", "", nullptr),
+ ::testing::UnorderedElementsAre("/foo", "/batman"));
+ EXPECT_THAT(
+ GetChannelAliases(&config.message(), "/foo", ".aos.bar", "app2", nullptr),
+ ::testing::UnorderedElementsAre("/foo", "/batman", "/baz"));
+}
+
// Tests that we can lookup a location with node specific maps.
TEST_F(ConfigurationTest, GetChannelMultinode) {
FlatbufferDetachedBuffer<Configuration> config =
@@ -221,6 +246,34 @@
EXPECT_EQ(GetChannel(config, "/batman3", ".aos.bar", "app1", pi2), nullptr);
}
+// Tests that reverse channel lookup on a multi-node config (including with
+// wildcards) works.
+TEST_F(ConfigurationTest, GetChannelAliasesMultinode) {
+ FlatbufferDetachedBuffer<Configuration> config =
+ ReadConfig(ArtifactPath("aos/testdata/good_multinode.json"));
+
+ const Node *pi1 = GetNode(&config.message(), "pi1");
+ const Node *pi2 = GetNode(&config.message(), "pi2");
+
+ EXPECT_THAT(
+ GetChannelAliases(&config.message(), "/foo", ".aos.bar", "app1", pi1),
+ ::testing::UnorderedElementsAre("/foo", "/batman", "/batman2", "/batman3",
+ "/magic/string"));
+
+ EXPECT_THAT(
+ GetChannelAliases(&config.message(), "/foo", ".aos.baz", "app1", pi1),
+ ::testing::UnorderedElementsAre("/foo", "/batman3", "/magic/string"));
+
+ EXPECT_THAT(
+ GetChannelAliases(&config.message(), "/foo/testing", ".aos.bar", "", pi1),
+ ::testing::UnorderedElementsAre("/foo/testing", "/magic/string/testing"));
+
+ EXPECT_THAT(
+ GetChannelAliases(&config.message(), "/foo/testing", ".aos.bar", "app1",
+ pi2),
+ ::testing::UnorderedElementsAre("/foo/testing", "/magic/string/testing"));
+}
+
// Tests that we can lookup a location with type specific maps.
TEST_F(ConfigurationTest, GetChannelTypedMultinode) {
FlatbufferDetachedBuffer<Configuration> config =
diff --git a/aos/network/log_web_proxy_main.cc b/aos/network/log_web_proxy_main.cc
index 782c2be..1cc8e17 100644
--- a/aos/network/log_web_proxy_main.cc
+++ b/aos/network/log_web_proxy_main.cc
@@ -15,6 +15,8 @@
DEFINE_string(data_dir, "www", "Directory to serve data files from");
DEFINE_string(node, "", "Directory to serve data files from");
DEFINE_int32(buffer_size, -1, "-1 if infinite, in # of messages / channel.");
+DEFINE_double(monotonic_start_time, -1.0, "Start time (sec)");
+DEFINE_double(monotonic_end_time, -1.0, "End time (sec)");
int main(int argc, char **argv) {
aos::InitGoogle(&argc, &argv);
@@ -43,6 +45,15 @@
event_loop->SkipTimingReport();
+ if (FLAGS_monotonic_start_time > 0) {
+ event_loop->AddTimer([&reader]() { reader.event_loop_factory()->Exit(); })
+ ->Setup(aos::monotonic_clock::time_point(
+ std::chrono::duration_cast<std::chrono::nanoseconds>(
+ std::chrono::duration<double>(FLAGS_monotonic_start_time))));
+
+ reader.event_loop_factory()->Run();
+ }
+
aos::web_proxy::WebProxy web_proxy(
event_loop.get(), aos::web_proxy::StoreHistory::kYes, FLAGS_buffer_size);
@@ -51,5 +62,12 @@
// Keep the web proxy alive past when we finish reading the logfile.
reader.set_exit_on_finish(false);
+ if (FLAGS_monotonic_end_time > 0) {
+ event_loop->AddTimer([&web_proxy]() { web_proxy.StopRecording(); })
+ ->Setup(aos::monotonic_clock::time_point(
+ std::chrono::duration_cast<std::chrono::nanoseconds>(
+ std::chrono::duration<double>(FLAGS_monotonic_end_time))));
+ }
+
reader.event_loop_factory()->Run();
}
diff --git a/aos/network/timestamp_filter.cc b/aos/network/timestamp_filter.cc
index de150c5..c1cf612 100644
--- a/aos/network/timestamp_filter.cc
+++ b/aos/network/timestamp_filter.cc
@@ -673,39 +673,42 @@
monotonic_clock::time_point ta) {
return ta > std::get<0>(t) + std::get<1>(t);
});
- auto other_t1_it = std::upper_bound(
- other_t0_it, other->timestamps_.end(), std::get<0>(pointer.t1_),
- [](monotonic_clock::time_point ta,
- std::tuple<aos::monotonic_clock::time_point,
- std::chrono::nanoseconds>
- t) { return ta < std::get<0>(t) + std::get<1>(t); });
+ if (other_t0_it != other->timestamps_.end()) {
+ auto other_t1_it = std::upper_bound(
+ other_t0_it, other->timestamps_.end(), std::get<0>(pointer.t1_),
+ [](monotonic_clock::time_point ta,
+ std::tuple<aos::monotonic_clock::time_point,
+ std::chrono::nanoseconds>
+ t) { return ta < std::get<0>(t) + std::get<1>(t); });
- if (std::get<0>(*other_t0_it) + std::get<1>(*other_t0_it) <
- std::get<0>(pointer.t1_)) {
- pointer.other_points_.clear();
+ if (std::get<0>(*other_t0_it) + std::get<1>(*other_t0_it) <
+ std::get<0>(pointer.t1_)) {
+ pointer.other_points_.clear();
- // Now, we've got a range. [other_t0_it, other_t1_it).
- for (auto other_it = other_t0_it; other_it != other_t1_it; ++other_it) {
- const std::tuple<monotonic_clock::time_point, std::chrono::nanoseconds>
- flipped_point =
- std::make_tuple(std::get<0>(*other_it) + std::get<1>(*other_it),
- -std::get<1>(*other_it) - kMinNetworkDelay());
+ // Now, we've got a range. [other_t0_it, other_t1_it).
+ for (auto other_it = other_t0_it; other_it != other_t1_it; ++other_it) {
+ const std::tuple<monotonic_clock::time_point,
+ std::chrono::nanoseconds>
+ flipped_point = std::make_tuple(
+ std::get<0>(*other_it) + std::get<1>(*other_it),
+ -std::get<1>(*other_it) - kMinNetworkDelay());
- // If the new point from the opposite direction filter is below the
- // interpolated value at that point, then the opposite direction point
- // defines a new min and we should take it.
- if (NoncausalTimestampFilter::InterpolateOffset(
- pointer.t0_, pointer.t1_, std::get<0>(flipped_point)) >
- std::get<1>(flipped_point)) {
- // Add it to the list of points to consider.
- pointer.other_points_.emplace_back(std::make_pair(
- std::distance(other->timestamps_.begin(), other_it),
- flipped_point));
+ // If the new point from the opposite direction filter is below the
+ // interpolated value at that point, then the opposite direction point
+ // defines a new min and we should take it.
+ if (NoncausalTimestampFilter::InterpolateOffset(
+ pointer.t0_, pointer.t1_, std::get<0>(flipped_point)) >
+ std::get<1>(flipped_point)) {
+ // Add it to the list of points to consider.
+ pointer.other_points_.emplace_back(std::make_pair(
+ std::distance(other->timestamps_.begin(), other_it),
+ flipped_point));
+ }
}
- }
- if (pointer.other_points_.size() > 0) {
- return InterpolateWithOtherFilter(pointer, ta, t0, t1);
+ if (pointer.other_points_.size() > 0) {
+ return InterpolateWithOtherFilter(pointer, ta, t0, t1);
+ }
}
}
diff --git a/aos/network/web_proxy.cc b/aos/network/web_proxy.cc
index 7161ee3..d384f72 100644
--- a/aos/network/web_proxy.cc
+++ b/aos/network/web_proxy.cc
@@ -59,7 +59,7 @@
}
TimerHandler *const timer = event_loop_->AddTimer([this]() {
for (auto &subscriber : subscribers_) {
- if (subscriber) subscriber->RunIteration();
+ if (subscriber) subscriber->RunIteration(recording_);
}
});
@@ -201,54 +201,58 @@
global_epoll = nullptr;
}
-void Subscriber::RunIteration() {
- if (channels_.empty() && (buffer_size_ == 0 || !store_history_)) {
- fetcher_->Fetch();
- message_buffer_.clear();
- return;
- }
+void WebProxy::StopRecording() { websocket_handler_->StopRecording(); }
- while (fetcher_->FetchNext()) {
- // If we aren't building up a buffer, short-circuit the FetchNext().
- if (buffer_size_ == 0) {
+void Subscriber::RunIteration(bool fetch_new) {
+ if (fetch_new) {
+ if (channels_.empty() && (buffer_size_ == 0 || !store_history_)) {
fetcher_->Fetch();
+ message_buffer_.clear();
+ return;
}
- Message message;
- message.index = fetcher_->context().queue_index;
- VLOG(2) << "Packing a message with " << GetPacketCount(fetcher_->context())
- << "packets";
- for (int packet_index = 0;
- packet_index < GetPacketCount(fetcher_->context()); ++packet_index) {
- // Pack directly into the mbuffer. This is admittedly a bit painful.
- const size_t packet_size =
- PackedMessageSize(fetcher_->context(), packet_index);
- struct mbuf *mbuffer = mbuf_alloc(packet_size);
- {
- // Wrap a pre-allocated builder around the mbuffer.
- PreallocatedAllocator allocator(mbuf_buf(mbuffer), packet_size);
- flatbuffers::FlatBufferBuilder fbb(packet_size, &allocator);
- flatbuffers::Offset<MessageHeader> message_offset = PackMessage(
- &fbb, fetcher_->context(), channel_index_, packet_index);
- fbb.Finish(message_offset);
-
- // Now, the flatbuffer is built from the back to the front. So any
- // extra memory will be at the front. Setup the end and start pointers
- // on the mbuf.
- mbuf_set_end(mbuffer, packet_size);
- mbuf_set_pos(mbuffer, packet_size - fbb.GetSize());
+ while (fetcher_->FetchNext()) {
+ // If we aren't building up a buffer, short-circuit the FetchNext().
+ if (buffer_size_ == 0) {
+ fetcher_->Fetch();
}
+ Message message;
+ message.index = fetcher_->context().queue_index;
+ VLOG(2) << "Packing a message with "
+ << GetPacketCount(fetcher_->context()) << "packets";
+ for (int packet_index = 0;
+ packet_index < GetPacketCount(fetcher_->context()); ++packet_index) {
+ // Pack directly into the mbuffer. This is admittedly a bit painful.
+ const size_t packet_size =
+ PackedMessageSize(fetcher_->context(), packet_index);
+ struct mbuf *mbuffer = mbuf_alloc(packet_size);
- message.data.emplace_back(
- std::shared_ptr<struct mbuf>(mbuffer, mem_deref));
- }
- message_buffer_.push_back(std::move(message));
- // If we aren't keeping a buffer, then we should only do one iteration of
- // the while loop--otherwise, if additional messages arrive between the
- // first FetchNext() and the second iteration then we can end up behaving
- // poorly (since we do a Fetch() when buffer_size_ == 0).
- if (buffer_size_ == 0) {
- break;
+ {
+ // Wrap a pre-allocated builder around the mbuffer.
+ PreallocatedAllocator allocator(mbuf_buf(mbuffer), packet_size);
+ flatbuffers::FlatBufferBuilder fbb(packet_size, &allocator);
+ flatbuffers::Offset<MessageHeader> message_offset = PackMessage(
+ &fbb, fetcher_->context(), channel_index_, packet_index);
+ fbb.Finish(message_offset);
+
+ // Now, the flatbuffer is built from the back to the front. So any
+ // extra memory will be at the front. Setup the end and start
+ // pointers on the mbuf.
+ mbuf_set_end(mbuffer, packet_size);
+ mbuf_set_pos(mbuffer, packet_size - fbb.GetSize());
+ }
+
+ message.data.emplace_back(
+ std::shared_ptr<struct mbuf>(mbuffer, mem_deref));
+ }
+ message_buffer_.push_back(std::move(message));
+ // If we aren't keeping a buffer, then we should only do one iteration of
+ // the while loop--otherwise, if additional messages arrive between the
+ // first FetchNext() and the second iteration then we can end up behaving
+ // poorly (since we do a Fetch() when buffer_size_ == 0).
+ if (buffer_size_ == 0) {
+ break;
+ }
}
}
for (auto &conn : channels_) {
diff --git a/aos/network/web_proxy.h b/aos/network/web_proxy.h
index baca26e..0c1d1dc 100644
--- a/aos/network/web_proxy.h
+++ b/aos/network/web_proxy.h
@@ -41,6 +41,10 @@
void onData(::seasocks::WebSocket *sock, const uint8_t *data,
size_t size) override;
void onDisconnect(::seasocks::WebSocket *sock) override;
+ // Stops recording data, even if the event loop continues running. This allows
+ // us to continue serving the webserver + websocket server, without having to
+ // load more actual data.
+ void StopRecording() { recording_ = false; }
private:
::seasocks::Server *server_;
@@ -51,6 +55,8 @@
connections_;
EventLoop *const event_loop_;
+ // Whether to pay attention to new messages.
+ bool recording_ = true;
};
// Wrapper class that manages the seasocks server and WebsocketHandler.
@@ -75,6 +81,9 @@
void SetDataPath(const char *path) { server_.setStaticPath(path); }
+ // Stops recording data. Useful for setting end times in log replay.
+ void StopRecording();
+
private:
WebProxy(aos::EventLoop *event_loop, aos::internal::EPoll *epoll,
StoreHistory store_history, int per_channel_buffer_size_bytes);
@@ -121,7 +130,12 @@
store_history_(store_history == StoreHistory::kYes),
buffer_size_(buffer_size) {}
- void RunIteration();
+ // Runs a single iteration of going through and fetching new data as needed
+ // and servicing any WebRTC channels that are requesting messages.
+ // fetch_new specifies whether we should actually attempt to retrieve new data
+ // on the channel--if false, will only worry about sending existing data to
+ // any clients.
+ void RunIteration(bool fetch_new);
void AddListener(std::shared_ptr<ScopedDataChannel> data_channel,
TransferMethod transfer_method);
diff --git a/aos/util/log_to_mcap.cc b/aos/util/log_to_mcap.cc
index 49aca69..5330c60 100644
--- a/aos/util/log_to_mcap.cc
+++ b/aos/util/log_to_mcap.cc
@@ -5,6 +5,7 @@
DEFINE_string(node, "", "Node to replay from the perspective of.");
DEFINE_string(output_path, "/tmp/log.mcap", "Log to output.");
+DEFINE_string(mode, "json", "json or flatbuffer serialization.");
// Converts an AOS log to an MCAP log that can be fed into Foxglove. To try this
// out, run:
@@ -30,6 +31,9 @@
std::unique_ptr<aos::EventLoop> mcap_event_loop =
reader.event_loop_factory()->MakeEventLoop("mcap", node);
CHECK(!FLAGS_output_path.empty());
- aos::McapLogger relogger(mcap_event_loop.get(), FLAGS_output_path);
+ aos::McapLogger relogger(mcap_event_loop.get(), FLAGS_output_path,
+ FLAGS_mode == "flatbuffer"
+ ? aos::McapLogger::Serialization::kFlatbuffer
+ : aos::McapLogger::Serialization::kJson);
reader.event_loop_factory()->Run();
}
diff --git a/aos/util/log_to_mcap_test.py b/aos/util/log_to_mcap_test.py
index 86c3d1d..72c9078 100644
--- a/aos/util/log_to_mcap_test.py
+++ b/aos/util/log_to_mcap_test.py
@@ -22,7 +22,7 @@
subprocess.run([args.generate_log, "--output_folder", log_name]).check_returncode()
# Run with a really small chunk size, to force a multi-chunk file.
subprocess.run(
- [args.log_to_mcap, "--output_path", mcap_name, "--mcap_chunk_size", "1000",
+ [args.log_to_mcap, "--output_path", mcap_name, "--mcap_chunk_size", "1000", "--mode", "json",
log_name]).check_returncode()
# MCAP attempts to find $HOME/.mcap.yaml, and dies on $HOME not existing. So
# give it an arbitrary config location (it seems to be fine with a non-existent config).
diff --git a/aos/util/mcap_logger.cc b/aos/util/mcap_logger.cc
index f4056ea..561a02b 100644
--- a/aos/util/mcap_logger.cc
+++ b/aos/util/mcap_logger.cc
@@ -1,10 +1,17 @@
#include "aos/util/mcap_logger.h"
#include "absl/strings/str_replace.h"
+#include "aos/flatbuffer_merge.h"
#include "single_include/nlohmann/json.hpp"
-DEFINE_uint64(mcap_chunk_size, 10000000,
+DEFINE_uint64(mcap_chunk_size, 10'000'000,
"Size, in bytes, of individual MCAP chunks");
+DEFINE_bool(fetch, false,
+ "Whether to fetch most recent messages at start of logfile. Turn "
+ "this on if there are, e.g., one-time messages sent before the "
+ "start of the logfile that you need access to. Turn it off if you "
+ "don't want to deal with having messages that have timestamps that "
+ "may be arbitrarily far before any other interesting messages.");
namespace aos {
@@ -74,8 +81,11 @@
return schema;
}
-McapLogger::McapLogger(EventLoop *event_loop, const std::string &output_path)
- : event_loop_(event_loop), output_(output_path) {
+McapLogger::McapLogger(EventLoop *event_loop, const std::string &output_path,
+ Serialization serialization)
+ : event_loop_(event_loop),
+ output_(output_path),
+ serialization_(serialization) {
event_loop->SkipTimingReport();
event_loop->SkipAosLog();
CHECK(output_);
@@ -119,13 +129,34 @@
// summary and summary offset sections.
WriteFooter(summary_offset, summary_offset_offset);
WriteMagic();
+
+ // TODO(james): Add compression. With flatbuffers messages that contain large
+ // numbers of zeros (e.g., large grids or thresholded images) this can result
+ // in massive savings.
+ if (VLOG_IS_ON(2)) {
+ // For debugging, print out how much space each channel is taking in the
+ // overall log.
+ LOG(INFO) << total_message_bytes_;
+ std::vector<std::pair<size_t, const Channel *>> channel_bytes;
+ for (const auto &pair : total_channel_bytes_) {
+ channel_bytes.push_back(std::make_pair(pair.second, pair.first));
+ }
+ std::sort(channel_bytes.begin(), channel_bytes.end());
+ for (const auto &pair : channel_bytes) {
+ LOG(INFO) << configuration::StrippedChannelToString(pair.second) << ": "
+ << static_cast<float>(pair.first) * 1e-6 << "MB "
+ << static_cast<float>(pair.first) / total_message_bytes_
+ << "\n";
+ }
+ }
}
std::vector<McapLogger::SummaryOffset> McapLogger::WriteSchemasAndChannels(
RegisterHandlers register_handlers) {
- uint16_t id = 1;
+ uint16_t id = 0;
std::map<uint16_t, const Channel *> channels;
for (const Channel *channel : *event_loop_->configuration()->channels()) {
+ ++id;
if (!configuration::ChannelIsReadableOnNode(channel, event_loop_->node())) {
continue;
}
@@ -141,8 +172,13 @@
WriteChunk();
}
});
+ fetchers_[id] = event_loop_->MakeRawFetcher(channel);
+ event_loop_->OnRun([this, id, channel]() {
+ if (FLAGS_fetch && fetchers_[id]->Fetch()) {
+ WriteMessage(id, channel, fetchers_[id]->context(), ¤t_chunk_);
+ }
+ });
}
- ++id;
}
std::vector<SummaryOffset> offsets;
@@ -200,7 +236,9 @@
void McapLogger::WriteSchema(const uint16_t id, const aos::Channel *channel) {
CHECK(channel->has_schema());
- std::string schema = JsonSchemaForFlatbuffer({channel->schema()}).dump();
+
+ const FlatbufferDetachedBuffer<reflection::Schema> schema =
+ CopyFlatBuffer(channel->schema());
// Write out the schema (we don't bother deduplicating schema types):
string_builder_.Reset();
@@ -208,10 +246,23 @@
AppendInt16(&string_builder_, id);
// Type name
AppendString(&string_builder_, channel->type()->string_view());
- // Encoding
- AppendString(&string_builder_, "jsonschema");
- // Actual schema itself
- AppendString(&string_builder_, schema);
+ switch (serialization_) {
+ case Serialization::kJson:
+ // Encoding
+ AppendString(&string_builder_, "jsonschema");
+ // Actual schema itself
+ AppendString(&string_builder_,
+ JsonSchemaForFlatbuffer({channel->schema()}).dump());
+ break;
+ case Serialization::kFlatbuffer:
+ // Encoding
+ AppendString(&string_builder_, "flatbuffer");
+ // Actual schema itself
+ AppendString(&string_builder_,
+ {reinterpret_cast<const char *>(schema.span().data()),
+ schema.span().size()});
+ break;
+ }
WriteRecord(OpCode::kSchema, string_builder_.Result());
}
@@ -227,7 +278,15 @@
absl::StrCat(channel->name()->string_view(), " ",
channel->type()->string_view()));
// Encoding
- AppendString(&string_builder_, "json");
+ switch (serialization_) {
+ case Serialization::kJson:
+ AppendString(&string_builder_, "json");
+ break;
+ case Serialization::kFlatbuffer:
+ AppendString(&string_builder_, "flatbuffer");
+ break;
+ }
+
// Metadata (technically supposed to be a Map<string, string>)
AppendString(&string_builder_, "");
WriteRecord(OpCode::kChannel, string_builder_.Result());
@@ -241,9 +300,15 @@
if (!earliest_message_.has_value()) {
earliest_message_ = context.monotonic_event_time;
+ } else {
+ earliest_message_ =
+ std::min(context.monotonic_event_time, earliest_message_.value());
}
if (!earliest_chunk_message_.has_value()) {
earliest_chunk_message_ = context.monotonic_event_time;
+ } else {
+ earliest_chunk_message_ =
+ std::min(context.monotonic_event_time, earliest_chunk_message_.value());
}
latest_message_ = context.monotonic_event_time;
@@ -257,6 +322,8 @@
// TODO(james): If we use this for multi-node logfiles, use distributed clock.
AppendInt64(&string_builder_,
context.monotonic_event_time.time_since_epoch().count());
+ // Note: Foxglove Studio doesn't appear to actually support using publish time
+ // right now.
AppendInt64(&string_builder_,
context.monotonic_event_time.time_since_epoch().count());
@@ -267,8 +334,18 @@
<< ": Corrupted flatbuffer on " << channel->name()->c_str() << " "
<< channel->type()->c_str();
- aos::FlatbufferToJson(&string_builder_, channel->schema(),
- static_cast<const uint8_t *>(context.data));
+ switch (serialization_) {
+ case Serialization::kJson:
+ aos::FlatbufferToJson(&string_builder_, channel->schema(),
+ static_cast<const uint8_t *>(context.data));
+ break;
+ case Serialization::kFlatbuffer:
+ string_builder_.Append(
+ {static_cast<const char *>(context.data), context.size});
+ break;
+ }
+ total_message_bytes_ += context.size;
+ total_channel_bytes_[channel] += context.size;
message_indices_[channel_id].push_back(std::make_pair<uint64_t, uint64_t>(
context.monotonic_event_time.time_since_epoch().count(),
diff --git a/aos/util/mcap_logger.h b/aos/util/mcap_logger.h
index dcacb68..5ae6413 100644
--- a/aos/util/mcap_logger.h
+++ b/aos/util/mcap_logger.h
@@ -30,7 +30,14 @@
// available, to be able to support Foxglove fully.
class McapLogger {
public:
- McapLogger(EventLoop *event_loop, const std::string &output_path);
+ // Whether to serialize the messages into the MCAP file as JSON or
+ // flatbuffers.
+ enum class Serialization {
+ kJson,
+ kFlatbuffer,
+ };
+ McapLogger(EventLoop *event_loop, const std::string &output_path,
+ Serialization serialization);
~McapLogger();
private:
@@ -122,6 +129,9 @@
aos::EventLoop *event_loop_;
std::ofstream output_;
+ const Serialization serialization_;
+ size_t total_message_bytes_ = 0;
+ std::map<const Channel *, size_t> total_channel_bytes_;
// Buffer containing serialized message data for the currently-being-built
// chunk.
std::stringstream current_chunk_;
@@ -136,6 +146,7 @@
aos::monotonic_clock::min_time;
// Count of all messages on each channel, indexed by channel ID.
std::map<uint16_t, uint64_t> message_counts_;
+ std::map<uint16_t, std::unique_ptr<RawFetcher>> fetchers_;
// MessageIndex's for each message. The std::map is indexed by channel ID. The
// vector is then a series of pairs of (timestamp, offset from start of
// current_chunk_).
diff --git a/frc971/codelab/README.md b/frc971/codelab/README.md
index 962af02..e4cfb4b 100644
--- a/frc971/codelab/README.md
+++ b/frc971/codelab/README.md
@@ -1,7 +1,7 @@
# FRC971 "Codelab"
Welcome! This folder contains a "codelab" where you can go through the process
-of fleshing out a basic control-loop using the same infrastructure as we do for
+of fleshing out a basic control loop using the same infrastructure as we do for
the control loops that normally run on our robots. Currently, this just consists
of a single codelab; the instructions can be found below.
@@ -11,12 +11,12 @@
## Flatbuffers tutorial
-Our code uses flatbuffers extensively. If you're unfamiliar with them, you can take a look at these [tutorials](https://google.github.io/flatbuffers/flatbuffers_guide_tutorial.html) for how to use them. This is optional but reommended if you are looking for more background on flatbuffers, and can be done before or after the codelab.
+Our code uses flatbuffers extensively. If you're unfamiliar with them, you can take a look at these [tutorials](https://google.github.io/flatbuffers/flatbuffers_guide_tutorial.html) for how to use them. This is optional but recommended if you are looking for more background on flatbuffers, and can be done before or after the codelab.
## Instructions
This codelab helps build basic knowledge of how to use 971 control loop
-primatives.
+primitives.
When this codelab is run, it performs a series of tests to check whether the code is working properly. Your job is to add or make changes to the code to get the tests to pass.
@@ -33,7 +33,7 @@
### Control loops
-A control loop is a piece of code that is repeatedly executed while the robot is running, recieiving input from the robot controllers and sensors and sending intructions to the motors that control the robot.
+A control loop is a piece of code that is repeatedly executed while the robot is running, receiving input from the robot controllers and sensors and sending instructions to the motors that control the robot.
Control loops all follow the same structure:
There are 4 channels that send and recieve instructions. These channels are goal, position, output, and status. Goal and position are input channels, which recieve messages from the robot's sensors and input from the controller. Output and status are output channels, which send messages to the motors.
@@ -52,12 +52,12 @@
implementation in basic.cc so that it uses the input goal/position to
meaningfully populate the output/status messages. You can find descriptions
of exactly what the fields of the messages mean by reading all the *.fbs
-files, and the tests below can be reviewed to help understand exactly what
-behavior is expected.
+files. The tests in basic_test.cc can be reviewed to help understand exactly
+what behavior is expected.
### Submitting a code review
Once you can get the tests to pass, follow the directions in [this file](https://software.frc971.org/gerrit/plugins/gitiles/971-Robot-Code/+/refs/heads/master/documentation/tutorials/submitting-code-for-a-review.md) for creating a
code review of the change. We will not actually *submit* the change (since
-that would remove the challenge for future students), but we will go through
-the code review process.
\ No newline at end of file
+that would remove the challenge for future students), but we will go through
+the code review process.
diff --git a/frc971/codelab/basic.cc b/frc971/codelab/basic.cc
index 325db73..66f08f9 100644
--- a/frc971/codelab/basic.cc
+++ b/frc971/codelab/basic.cc
@@ -10,17 +10,16 @@
void Basic::RunIteration(const Goal *goal, const Position *position,
aos::Sender<Output>::Builder *output,
aos::Sender<Status>::Builder *status) {
-
// FIX HERE: Set the intake_voltage to 12 Volts when
// intake is requested (via intake in goal). Make sure not to set
// the motor to anything but 0 V when the limit_sensor is pressed.
- // This line tells the compiler to to ignore the fact that goal and
+ // This line tells the compiler to ignore the fact that goal and
// position are not used in the code. You will need to read these messages
// and use their values to determine the necessary output and status.
(void)goal, (void)position;
- if (output) {
+ if (output != nullptr) {
Output::Builder builder = output->MakeBuilder<Output>();
// FIX HERE: As of now, this sets the intake voltage to 0 in
@@ -33,7 +32,7 @@
(void)output->Send(builder.Finish());
}
- if (status) {
+ if (status != nullptr) {
Status::Builder builder = status->MakeBuilder<Status>();
// FIX HERE: Fill out the Status message! In order to fill the
// information in the message, use the add_<name of the field>() method
diff --git a/frc971/codelab/basic_status.fbs b/frc971/codelab/basic_status.fbs
index 58a29db..d03c785 100644
--- a/frc971/codelab/basic_status.fbs
+++ b/frc971/codelab/basic_status.fbs
@@ -5,8 +5,6 @@
// finished. There is one field, intake_complete, which should be set to
// true by the intake subsystem when the Goal message is requesting the intake
// to be on and the limit sensor from the position message has been enabled.
-
-
intake_complete:bool (id: 0);
}
diff --git a/frc971/control_loops/python/graph.py b/frc971/control_loops/python/graph.py
index c974d68..2801f88 100644
--- a/frc971/control_loops/python/graph.py
+++ b/frc971/control_loops/python/graph.py
@@ -17,10 +17,10 @@
super(Graph, self).__init__()
fig = Figure(figsize=(5, 4), dpi=100)
self.axis = fig.add_subplot(111)
- canvas = FigureCanvas(fig) # a Gtk.DrawingArea
- canvas.set_vexpand(True)
- canvas.set_size_request(800, 250)
- self.add(canvas)
+ self.canvas = FigureCanvas(fig) # a Gtk.DrawingArea
+ self.canvas.set_vexpand(True)
+ self.canvas.set_size_request(800, 250)
+ self.add(self.canvas)
self.queue = queue.Queue(maxsize=1)
thread = threading.Thread(target=self.worker)
@@ -78,5 +78,5 @@
# the total time to drive the spline
self.axis.xaxis.set_ticks(np.linspace(0, total_time, num=8))
- # ask to be redrawn
- self.queue_draw()
+ # redraw
+ self.canvas.draw()
diff --git a/third_party/akaze/AKAZEConfig.h b/third_party/akaze/AKAZEConfig.h
new file mode 100644
index 0000000..5e754ed
--- /dev/null
+++ b/third_party/akaze/AKAZEConfig.h
@@ -0,0 +1,66 @@
+/**
+ * @file AKAZEConfig.h
+ * @brief AKAZE configuration file
+ * @date Feb 23, 2014
+ * @author Pablo F. Alcantarilla, Jesus Nuevo
+ */
+
+#ifndef __OPENCV_FEATURES_2D_AKAZE_CONFIG_H__
+#define __OPENCV_FEATURES_2D_AKAZE_CONFIG_H__
+
+#include <opencv2/features2d.hpp>
+
+namespace cv {
+/* ************************************************************************* */
+/// AKAZE configuration options structure
+struct AKAZEOptionsV2 {
+ AKAZEOptionsV2()
+ : omax(4),
+ nsublevels(4),
+ img_width(0),
+ img_height(0),
+ soffset(1.6f),
+ derivative_factor(1.5f),
+ sderivatives(1.0),
+ diffusivity(KAZE::DIFF_PM_G2)
+
+ ,
+ dthreshold(0.001f),
+ min_dthreshold(0.00001f)
+
+ ,
+ descriptor(AKAZE::DESCRIPTOR_MLDB),
+ descriptor_size(0),
+ descriptor_channels(3),
+ descriptor_pattern_size(10)
+
+ ,
+ kcontrast_percentile(0.7f),
+ kcontrast_nbins(300) {}
+
+ int omax; ///< Maximum octave evolution of the image 2^sigma (coarsest scale
+ ///< sigma units)
+ int nsublevels; ///< Default number of sublevels per scale level
+ int img_width; ///< Width of the input image
+ int img_height; ///< Height of the input image
+ float soffset; ///< Base scale offset (sigma units)
+ float derivative_factor; ///< Factor for the multiscale derivatives
+ float sderivatives; ///< Smoothing factor for the derivatives
+ int diffusivity; ///< Diffusivity type
+
+ float dthreshold; ///< Detector response threshold to accept point
+ float min_dthreshold; ///< Minimum detector threshold to accept a point
+
+ int descriptor; ///< Type of descriptor
+ int descriptor_size; ///< Size of the descriptor in bits. 0->Full size
+ int descriptor_channels; ///< Number of channels in the descriptor (1, 2, 3)
+ int descriptor_pattern_size; ///< Actual patch size is
+ ///< 2*pattern_size*point.scale
+
+ float kcontrast_percentile; ///< Percentile level for the contrast factor
+ int kcontrast_nbins; ///< Number of bins for the contrast factor histogram
+};
+
+} // namespace cv
+
+#endif
\ No newline at end of file
diff --git a/third_party/akaze/AKAZEFeatures.cpp b/third_party/akaze/AKAZEFeatures.cpp
new file mode 100644
index 0000000..2827dd5
--- /dev/null
+++ b/third_party/akaze/AKAZEFeatures.cpp
@@ -0,0 +1,2186 @@
+/**
+ * @file AKAZEFeatures.cpp
+ * @brief Main class for detecting and describing binary features in an
+ * accelerated nonlinear scale space
+ * @date Sep 15, 2013
+ * @author Pablo F. Alcantarilla, Jesus Nuevo
+ */
+
+#include "AKAZEFeatures.h"
+
+#include <cstdint>
+#include <cstring>
+#include <iostream>
+#include <opencv2/core.hpp>
+#include <opencv2/core/hal/hal.hpp>
+#include <opencv2/imgproc.hpp>
+
+#include "fed.h"
+#include "nldiffusion_functions.h"
+#include "utils.h"
+
+#ifdef AKAZE_USE_CPP11_THREADING
+#include <atomic>
+#include <functional> // std::ref
+#include <future>
+#include <thread>
+#endif
+
+// Taken from opencv2/internal.hpp: IEEE754 constants and macros
+#define CV_TOGGLE_FLT(x) ((x) ^ ((int)(x) < 0 ? 0x7fffffff : 0))
+
+// Namespaces
+namespace cv {
+using namespace std;
+
+/// Internal Functions
+inline void Compute_Main_Orientation(cv::KeyPoint& kpt,
+ const TEvolutionV2& evolution_);
+static void generateDescriptorSubsampleV2(cv::Mat& sampleList,
+ cv::Mat& comparisons, int nbits,
+ int pattern_size, int nchannels);
+
+/* ************************************************************************* */
+/**
+ * @brief AKAZEFeatures constructor with input options
+ * @param options AKAZEFeatures configuration options
+ * @note This constructor allocates memory for the nonlinear scale space
+ */
+AKAZEFeaturesV2::AKAZEFeaturesV2(const AKAZEOptionsV2& options)
+ : options_(options) {
+ cout << "AKAZEFeaturesV2 constructor called" << endl;
+
+#ifdef AKAZE_USE_CPP11_THREADING
+ cout << "hardware_concurrency: " << thread::hardware_concurrency() << endl;
+#endif
+
+ reordering_ = true;
+
+ if (options_.descriptor_size > 0 &&
+ options_.descriptor >= AKAZE::DESCRIPTOR_MLDB_UPRIGHT) {
+ generateDescriptorSubsampleV2(
+ descriptorSamples_, descriptorBits_, options_.descriptor_size,
+ options_.descriptor_pattern_size, options_.descriptor_channels);
+ }
+
+ Allocate_Memory_Evolution();
+}
+
+/* ************************************************************************* */
+/**
+ * @brief This method allocates the memory for the nonlinear diffusion evolution
+ */
+void AKAZEFeaturesV2::Allocate_Memory_Evolution(void) {
+ CV_Assert(options_.img_height > 2 &&
+ options_.img_width > 2); // The size of modgs_ must be positive
+
+ // Set maximum size of the area for the descriptor computation
+ float smax = 0.0;
+ if (options_.descriptor == AKAZE::DESCRIPTOR_MLDB_UPRIGHT ||
+ options_.descriptor == AKAZE::DESCRIPTOR_MLDB) {
+ smax = 10.0f * sqrtf(2.0f);
+ } else if (options_.descriptor == AKAZE::DESCRIPTOR_KAZE_UPRIGHT ||
+ options_.descriptor == AKAZE::DESCRIPTOR_KAZE) {
+ smax = 12.0f * sqrtf(2.0f);
+ }
+
+ // Allocate the dimension of the matrices for the evolution
+ int level_height = options_.img_height;
+ int level_width = options_.img_width;
+ int power = 1;
+
+ for (int i = 0; i < options_.omax; i++) {
+ for (int j = 0; j < options_.nsublevels; j++) {
+ TEvolutionV2 step;
+ step.Lt.create(level_height, level_width, CV_32FC1);
+ step.Ldet.create(level_height, level_width, CV_32FC1);
+ step.Lsmooth.create(level_height, level_width, CV_32FC1);
+ step.Lx.create(level_height, level_width, CV_32FC1);
+ step.Ly.create(level_height, level_width, CV_32FC1);
+ step.Lxx.create(level_height, level_width, CV_32FC1);
+ step.Lxy.create(level_height, level_width, CV_32FC1);
+ step.Lyy.create(level_height, level_width, CV_32FC1);
+ step.esigma =
+ options_.soffset * pow(2.f, (float)j / options_.nsublevels + i);
+ step.sigma_size =
+ fRoundV2(step.esigma * options_.derivative_factor /
+ power); // In fact sigma_size only depends on j
+ step.border = fRoundV2(smax * step.sigma_size) + 1;
+ step.etime = 0.5f * (step.esigma * step.esigma);
+ step.octave = i;
+ step.sublevel = j;
+ step.octave_ratio = (float)power;
+
+ // Descriptors cannot be computed for the points on the border
+ if (step.border * 2 + 1 >= level_width ||
+ step.border * 2 + 1 >= level_height)
+ goto out; // The image becomes too small
+
+ // Pre-calculate the derivative kernels
+ compute_scharr_derivative_kernelsV2(step.DxKx, step.DxKy, 1, 0,
+ step.sigma_size);
+ compute_scharr_derivative_kernelsV2(step.DyKx, step.DyKy, 0, 1,
+ step.sigma_size);
+
+ evolution_.push_back(step);
+ }
+
+ power <<= 1;
+ level_height >>= 1;
+ level_width >>= 1;
+
+ // The next octave becomes too small
+ if (level_width < 80 || level_height < 40) {
+ options_.omax = i + 1;
+ break;
+ }
+ }
+out:
+
+ // Allocate memory for workspaces
+ lx_.create(options_.img_height, options_.img_width, CV_32FC1);
+ ly_.create(options_.img_height, options_.img_width, CV_32FC1);
+ lflow_.create(options_.img_height, options_.img_width, CV_32FC1);
+ lstep_.create(options_.img_height, options_.img_width, CV_32FC1);
+ histgram_.create(1, options_.kcontrast_nbins, CV_32SC1);
+ modgs_.create(1, (options_.img_height - 2) * (options_.img_width - 2),
+ CV_32FC1); // excluding the border
+
+ kpts_aux_.resize(evolution_.size());
+ for (size_t i = 0; i < evolution_.size(); i++)
+ kpts_aux_[i].reserve(
+ 1024); // reserve 1K points' space for each evolution step
+
+ // Allocate memory for the number of cycles and time steps
+ tsteps_.resize(evolution_.size() - 1);
+ for (size_t i = 1; i < evolution_.size(); i++) {
+ fed_tau_by_process_timeV2(evolution_[i].etime - evolution_[i - 1].etime, 1,
+ 0.25f, reordering_, tsteps_[i - 1]);
+ }
+
+#ifdef AKAZE_USE_CPP11_THREADING
+ tasklist_.resize(2);
+ for (auto& list : tasklist_) list.resize(evolution_.size());
+
+ vector<atomic_int> atomic_vec(evolution_.size());
+ taskdeps_.swap(atomic_vec);
+#endif
+}
+
+/* ************************************************************************* */
+/**
+ * @brief This function wraps the parallel computation of Scharr derivatives.
+ * @param Lsmooth Input image to compute Scharr derivatives.
+ * @param Lx Output derivative image (horizontal)
+ * @param Ly Output derivative image (vertical)
+ * should be parallelized or not.
+ */
+static inline void image_derivatives(const cv::Mat& Lsmooth, cv::Mat& Lx,
+ cv::Mat& Ly) {
+#ifdef AKAZE_USE_CPP11_THREADING
+
+ if (getNumThreads() > 1 && (Lsmooth.rows * Lsmooth.cols) > (1 << 15)) {
+ auto task = async(launch::async, image_derivatives_scharrV2, ref(Lsmooth),
+ ref(Lx), 1, 0);
+
+ image_derivatives_scharrV2(Lsmooth, Ly, 0, 1);
+ task.get();
+ return;
+ }
+
+ // Fall back to the serial path if Lsmooth is small or OpenCV parallelization
+ // is disabled
+#endif
+
+ image_derivatives_scharrV2(Lsmooth, Lx, 1, 0);
+ image_derivatives_scharrV2(Lsmooth, Ly, 0, 1);
+}
+
+/* ************************************************************************* */
+/**
+ * @brief This method compute the first evolution step of the nonlinear scale
+ * space
+ * @param img Input image for which the nonlinear scale space needs to be
+ * created
+ * @return kcontrast factor
+ */
+float AKAZEFeaturesV2::Compute_Base_Evolution_Level(const cv::Mat& img) {
+ Mat Lsmooth(evolution_[0].Lt.rows, evolution_[0].Lt.cols, CV_32FC1,
+ lflow_.data /* like-a-union */);
+ Mat Lx(evolution_[0].Lt.rows, evolution_[0].Lt.cols, CV_32FC1, lx_.data);
+ Mat Ly(evolution_[0].Lt.rows, evolution_[0].Lt.cols, CV_32FC1, ly_.data);
+
+#ifdef AKAZE_USE_CPP11_THREADING
+
+ if (getNumThreads() > 2 && (img.rows * img.cols) > (1 << 16)) {
+ auto e0_Lsmooth = async(launch::async, gaussian_2D_convolutionV2, ref(img),
+ ref(evolution_[0].Lsmooth), 0, 0, options_.soffset);
+
+ gaussian_2D_convolutionV2(img, Lsmooth, 0, 0, 1.0f);
+ image_derivatives(Lsmooth, Lx, Ly);
+ kcontrast_ =
+ async(launch::async, compute_k_percentileV2, Lx, Ly,
+ options_.kcontrast_percentile, ref(modgs_), ref(histgram_));
+
+ e0_Lsmooth.get();
+ Compute_Determinant_Hessian_Response(0);
+
+ evolution_[0].Lsmooth.copyTo(evolution_[0].Lt);
+ return 1.0f;
+ }
+
+#endif
+
+ // Compute the determinant Hessian
+ gaussian_2D_convolutionV2(img, evolution_[0].Lsmooth, 0, 0, options_.soffset);
+ Compute_Determinant_Hessian_Response(0);
+
+ // Compute the kcontrast factor using local variables
+ gaussian_2D_convolutionV2(img, Lsmooth, 0, 0, 1.0f);
+ image_derivatives(Lsmooth, Lx, Ly);
+ float kcontrast = compute_k_percentileV2(
+ Lx, Ly, options_.kcontrast_percentile, modgs_, histgram_);
+
+ // Copy the smoothed original image to the first level of the evolution Lt
+ evolution_[0].Lsmooth.copyTo(evolution_[0].Lt);
+
+ return kcontrast;
+}
+
+/* ************************************************************************* */
+/**
+ * @brief This method creates the nonlinear scale space for a given image
+ * @param img Input image for which the nonlinear scale space needs to be
+ * created
+ * @return 0 if the nonlinear scale space was created successfully, -1 otherwise
+ */
+int AKAZEFeaturesV2::Create_Nonlinear_Scale_Space(const Mat& img) {
+ CV_Assert(evolution_.size() > 0);
+
+ // Setup the gray-scale image
+ const Mat* gray = &img;
+ if (img.channels() != 1) {
+ cvtColor(img, gray_, COLOR_BGR2GRAY);
+ gray = &gray_;
+ }
+
+ if (gray->type() == CV_8UC1) {
+ gray->convertTo(evolution_[0].Lt, CV_32F, 1 / 255.0);
+ gray = &evolution_[0].Lt;
+ } else if (gray->type() == CV_16UC1) {
+ gray->convertTo(evolution_[0].Lt, CV_32F, 1 / 65535.0);
+ gray = &evolution_[0].Lt;
+ }
+ CV_Assert(gray->type() == CV_32FC1);
+
+ // Handle the trivial case
+ if (evolution_.size() == 1) {
+ gaussian_2D_convolutionV2(*gray, evolution_[0].Lsmooth, 0, 0,
+ options_.soffset);
+ evolution_[0].Lsmooth.copyTo(evolution_[0].Lt);
+ Compute_Determinant_Hessian_Response_Single(0);
+ return 0;
+ }
+
+ // First compute Lsmooth, Hessian, and the kcontrast factor for the base
+ // evolution level
+ float kcontrast = Compute_Base_Evolution_Level(*gray);
+
+ // Prepare Mats to be used as local workspace
+ Mat Lx(evolution_[0].Lt.rows, evolution_[0].Lt.cols, CV_32FC1, lx_.data);
+ Mat Ly(evolution_[0].Lt.rows, evolution_[0].Lt.cols, CV_32FC1, ly_.data);
+ Mat Lflow(evolution_[0].Lt.rows, evolution_[0].Lt.cols, CV_32FC1,
+ lflow_.data);
+ Mat Lstep(evolution_[0].Lt.rows, evolution_[0].Lt.cols, CV_32FC1,
+ lstep_.data);
+
+ // Now generate the rest of evolution levels
+ for (size_t i = 1; i < evolution_.size(); i++) {
+ if (evolution_[i].octave > evolution_[i - 1].octave) {
+ halfsample_imageV2(evolution_[i - 1].Lt, evolution_[i].Lt);
+ kcontrast = kcontrast * 0.75f;
+
+ // Resize the workspace images to fit Lt
+ Lx = cv::Mat(evolution_[i].Lt.rows, evolution_[i].Lt.cols, CV_32FC1,
+ lx_.data);
+ Ly = cv::Mat(evolution_[i].Lt.rows, evolution_[i].Lt.cols, CV_32FC1,
+ ly_.data);
+ Lflow = cv::Mat(evolution_[i].Lt.rows, evolution_[i].Lt.cols, CV_32FC1,
+ lflow_.data);
+ Lstep = cv::Mat(evolution_[i].Lt.rows, evolution_[i].Lt.cols, CV_32FC1,
+ lstep_.data);
+ } else {
+ evolution_[i - 1].Lt.copyTo(evolution_[i].Lt);
+ }
+
+ gaussian_2D_convolutionV2(evolution_[i].Lt, evolution_[i].Lsmooth, 0, 0,
+ 1.0f);
+
+#ifdef AKAZE_USE_CPP11_THREADING
+ if (kcontrast_.valid())
+ kcontrast *=
+ kcontrast_
+ .get(); /* Join the kcontrast task so Lx and Ly can be reused */
+#endif
+
+ // Compute the Gaussian derivatives Lx and Ly
+ image_derivatives(evolution_[i].Lsmooth, Lx, Ly);
+
+ // Compute the Hessian for feature detection
+ Compute_Determinant_Hessian_Response((int)i);
+
+ // Compute the conductivity equation Lflow
+ switch (options_.diffusivity) {
+ case KAZE::DIFF_PM_G1:
+ pm_g1V2(Lx, Ly, Lflow, kcontrast);
+ break;
+ case KAZE::DIFF_PM_G2:
+ pm_g2V2(Lx, Ly, Lflow, kcontrast);
+ break;
+ case KAZE::DIFF_WEICKERT:
+ weickert_diffusivityV2(Lx, Ly, Lflow, kcontrast);
+ break;
+ case KAZE::DIFF_CHARBONNIER:
+ charbonnier_diffusivityV2(Lx, Ly, Lflow, kcontrast);
+ break;
+ default:
+ CV_Error(options_.diffusivity, "Diffusivity is not supported");
+ break;
+ }
+
+ // Perform Fast Explicit Diffusion on Lt
+ const int total = Lstep.rows * Lstep.cols;
+ float* lt = evolution_[i].Lt.ptr<float>(0);
+ float* lstep = Lstep.ptr<float>(0);
+ std::vector<float>& tsteps = tsteps_[i - 1];
+
+ for (size_t j = 0; j < tsteps.size(); j++) {
+ nld_step_scalarV2(evolution_[i].Lt, Lflow, Lstep);
+
+ const float step_size = tsteps[j];
+ for (int k = 0; k < total; k++) lt[k] += lstep[k] * 0.5f * step_size;
+ }
+ }
+
+#ifdef AKAZE_USE_CPP11_THREADING
+
+ if (getNumThreads() > 1) {
+ // Wait all background tasks to finish
+ for (size_t i = 0; i < evolution_.size(); i++) {
+ tasklist_[0][i].get();
+ tasklist_[1][i].get();
+ }
+ }
+
+#endif
+
+ return 0;
+}
+
+/* ************************************************************************* */
+/**
+ * @brief This method selects interesting keypoints through the nonlinear scale
+ * space
+ * @param kpts Vector of detected keypoints
+ */
+void AKAZEFeaturesV2::Feature_Detection(std::vector<KeyPoint>& kpts) {
+ Find_Scale_Space_Extrema(kpts_aux_);
+ Do_Subpixel_Refinement(kpts_aux_, kpts);
+}
+
+/* ************************************************************************* */
+/**
+ * @brief This method computes the feature detector response for the nonlinear
+ * scale space
+ * @param level The evolution level to compute Hessian determinant
+ * @note We use the Hessian determinant as the feature detector response
+ */
+inline void AKAZEFeaturesV2::Compute_Determinant_Hessian_Response_Single(
+ const int level) {
+ TEvolutionV2& e = evolution_[level];
+
+ const int total = e.Lsmooth.cols * e.Lsmooth.rows;
+ float* lxx = e.Lxx.ptr<float>(0);
+ float* lxy = e.Lxy.ptr<float>(0);
+ float* lyy = e.Lyy.ptr<float>(0);
+ float* ldet = e.Ldet.ptr<float>(0);
+
+ // Firstly compute the multiscale derivatives
+ sepFilter2D(e.Lsmooth, e.Lx, CV_32F, e.DxKx, e.DxKy);
+ sepFilter2D(e.Lx, e.Lxx, CV_32F, e.DxKx, e.DxKy);
+ sepFilter2D(e.Lx, e.Lxy, CV_32F, e.DyKx, e.DyKy);
+ sepFilter2D(e.Lsmooth, e.Ly, CV_32F, e.DyKx, e.DyKy);
+ sepFilter2D(e.Ly, e.Lyy, CV_32F, e.DyKx, e.DyKy);
+
+ // Compute Ldet by Lxx.mul(Lyy) - Lxy.mul(Lxy)
+ for (int j = 0; j < total; j++) ldet[j] = lxx[j] * lyy[j] - lxy[j] * lxy[j];
+}
+
+#ifdef AKAZE_USE_CPP11_THREADING
+
+/* ************************************************************************* */
+/**
+ * @brief This method computes the feature detector response for the nonlinear
+ * scale space
+ * @param level The evolution level to compute Hessian determinant
+ * @note This is parallelized version of
+ * Compute_Determinant_Hessian_Response_Single()
+ */
+void AKAZEFeaturesV2::Compute_Determinant_Hessian_Response(const int level) {
+ if (getNumThreads() == 1) {
+ Compute_Determinant_Hessian_Response_Single(level);
+ return;
+ }
+
+ TEvolutionV2& e = evolution_[level];
+ atomic_int& dep = taskdeps_[level];
+
+ const int total = e.Lsmooth.cols * e.Lsmooth.rows;
+ float* lxx = e.Lxx.ptr<float>(0);
+ float* lxy = e.Lxy.ptr<float>(0);
+ float* lyy = e.Lyy.ptr<float>(0);
+ float* ldet = e.Ldet.ptr<float>(0);
+
+ dep = 0;
+
+ tasklist_[0][level] = async(launch::async, [=, &e, &dep] {
+ sepFilter2D(e.Lsmooth, e.Ly, CV_32F, e.DyKx, e.DyKy);
+ sepFilter2D(e.Ly, e.Lyy, CV_32F, e.DyKx, e.DyKy);
+
+ if (dep.fetch_add(1, memory_order_relaxed) != 1)
+ return; // The other dependency is not ready
+
+ sepFilter2D(e.Lx, e.Lxy, CV_32F, e.DyKx, e.DyKy);
+ for (int j = 0; j < total; j++) ldet[j] = lxx[j] * lyy[j] - lxy[j] * lxy[j];
+ });
+
+ tasklist_[1][level] = async(launch::async, [=, &e, &dep] {
+ sepFilter2D(e.Lsmooth, e.Lx, CV_32F, e.DxKx, e.DxKy);
+ sepFilter2D(e.Lx, e.Lxx, CV_32F, e.DxKx, e.DxKy);
+
+ if (dep.fetch_add(1, memory_order_relaxed) != 1)
+ return; // The other dependency is not ready
+
+ sepFilter2D(e.Lx, e.Lxy, CV_32F, e.DyKx, e.DyKy);
+ for (int j = 0; j < total; j++) ldet[j] = lxx[j] * lyy[j] - lxy[j] * lxy[j];
+ });
+
+ // tasklist_[1,2][level] have to be waited later on
+}
+
+#else
+
+void AKAZEFeaturesV2::Compute_Determinant_Hessian_Response(const int level) {
+ Compute_Determinant_Hessian_Response_Single(level);
+}
+
+#endif
+
+/* ************************************************************************* */
+/**
+ * @brief This method searches v for a neighbor point of the point candidate p
+ * @param p The keypoint candidate to search a neighbor
+ * @param v The vector to store the points to be searched
+ * @param offset The starting location in the vector v to be searched at
+ * @param idx The index of the vector v if a neighbor is found
+ * @return true if a neighbor point is found; false otherwise
+ */
+inline bool find_neighbor_point(const KeyPoint& p, const vector<KeyPoint>& v,
+ const int offset, int& idx) {
+ const int sz = (int)v.size();
+
+ for (int i = offset; i < sz; i++) {
+ if (v[i].class_id == -1) // Skip a deleted point
+ continue;
+
+ float dx = p.pt.x - v[i].pt.x;
+ float dy = p.pt.y - v[i].pt.y;
+ if (dx * dx + dy * dy <= p.size * p.size) {
+ idx = i;
+ return true;
+ }
+ }
+
+ return false;
+}
+
+inline bool find_neighbor_point_inv(const KeyPoint& p,
+ const vector<KeyPoint>& v, const int offset,
+ int& idx) {
+ const int sz = (int)v.size();
+
+ for (int i = offset; i < sz; i++) {
+ if (v[i].class_id == -1) // Skip a deleted point
+ continue;
+
+ float dx = p.pt.x - v[i].pt.x;
+ float dy = p.pt.y - v[i].pt.y;
+ if (dx * dx + dy * dy <= v[i].size * v[i].size) {
+ idx = i;
+ return true;
+ }
+ }
+
+ return false;
+}
+
+/* ************************************************************************* */
+/**
+ * @brief This method finds extrema in the nonlinear scale space
+ * @param kpts_aux Output vectors of detected keypoints; one vector for each
+ * evolution level
+ */
+inline void AKAZEFeaturesV2::Find_Scale_Space_Extrema_Single(
+ std::vector<vector<KeyPoint>>& kpts_aux) {
+ // Clear the workspace to hold the keypoint candidates
+ for (size_t i = 0; i < kpts_aux_.size(); i++) kpts_aux_[i].clear();
+
+ for (int i = 0; i < (int)evolution_.size(); i++) {
+ const TEvolutionV2& step = evolution_[i];
+
+ const float* prev = step.Ldet.ptr<float>(step.border - 1);
+ const float* curr = step.Ldet.ptr<float>(step.border);
+ const float* next = step.Ldet.ptr<float>(step.border + 1);
+
+ for (int y = step.border; y < step.Ldet.rows - step.border; y++) {
+ for (int x = step.border; x < step.Ldet.cols - step.border; x++) {
+ const float value = curr[x];
+
+ // Filter the points with the detector threshold
+ if (value <= options_.dthreshold) continue;
+ if (value <= curr[x - 1] || value <= curr[x + 1]) continue;
+ if (value <= prev[x - 1] || value <= prev[x] || value <= prev[x + 1])
+ continue;
+ if (value <= next[x - 1] || value <= next[x] || value <= next[x + 1])
+ continue;
+
+ KeyPoint point(/* x */ static_cast<float>(x * step.octave_ratio),
+ /* y */ static_cast<float>(y * step.octave_ratio),
+ /* size */ step.esigma * options_.derivative_factor,
+ /* angle */ -1,
+ /* response */ value,
+ /* octave */ step.octave,
+ /* class_id */ i);
+
+ int idx = 0;
+
+ // Compare response with the same scale
+ if (find_neighbor_point(point, kpts_aux[i], 0, idx)) {
+ if (point.response > kpts_aux[i][idx].response)
+ kpts_aux[i][idx] = point; // Replace the old point
+ continue;
+ }
+
+ // Compare response with the lower scale
+ if (i > 0 && find_neighbor_point(point, kpts_aux[i - 1], 0, idx)) {
+ if (point.response > kpts_aux[i - 1][idx].response) {
+ kpts_aux[i - 1][idx].class_id = -1; // Mark it as deleted
+ kpts_aux[i].push_back(
+ point); // Insert the new point to the right layer
+ }
+ continue;
+ }
+
+ kpts_aux[i].push_back(point); // A good keypoint candidate is found
+ }
+ prev = curr;
+ curr = next;
+ next += step.Ldet.cols;
+ }
+ }
+
+ // Now filter points with the upper scale level
+ for (int i = 0; i < (int)kpts_aux.size() - 1; i++) {
+ for (int j = 0; j < (int)kpts_aux[i].size(); j++) {
+ KeyPoint& pt = kpts_aux[i][j];
+
+ if (pt.class_id == -1) // Skip a deleted point
+ continue;
+
+ int idx = 0;
+ while (find_neighbor_point_inv(pt, kpts_aux[i + 1], idx, idx)) {
+ if (pt.response > kpts_aux[i + 1][idx].response)
+ kpts_aux[i + 1][idx].class_id = -1;
+ ++idx;
+ }
+ }
+ }
+}
+
+#ifndef AKAZE_USE_CPP11_THREADING
+
+/* ************************************************************************* */
+/**
+ * @brief This method finds extrema in the nonlinear scale space
+ * @param kpts_aux Output vectors of detected keypoints; one vector for each
+ * evolution level
+ * @note This is parallelized version of Find_Scale_Space_Extrema()
+ */
+void AKAZEFeaturesV2::Find_Scale_Space_Extrema(
+ std::vector<vector<KeyPoint>>& kpts_aux) {
+ if (getNumThreads() == 1) {
+ Find_Scale_Space_Extrema_Single(kpts_aux);
+ return;
+ }
+
+ for (int i = 0; i < (int)evolution_.size(); i++) {
+ const TEvolutionV2& step = evolution_[i];
+ vector<cv::KeyPoint>& kpts = kpts_aux[i];
+
+ // Clear the workspace to hold the keypoint candidates
+ kpts_aux_[i].clear();
+
+ auto mode = (i > 0 ? launch::async : launch::deferred);
+ tasklist_[0][i] = async(
+ mode,
+ [&step, &kpts, i](const AKAZEOptionsV2& opt) {
+ const float* prev = step.Ldet.ptr<float>(step.border - 1);
+ const float* curr = step.Ldet.ptr<float>(step.border);
+ const float* next = step.Ldet.ptr<float>(step.border + 1);
+
+ for (int y = step.border; y < step.Ldet.rows - step.border; y++) {
+ for (int x = step.border; x < step.Ldet.cols - step.border; x++) {
+ const float value = curr[x];
+
+ // Filter the points with the detector threshold
+ if (value <= opt.dthreshold) continue;
+ if (value <= curr[x - 1] || value <= curr[x + 1]) continue;
+ if (value <= prev[x - 1] || value <= prev[x] ||
+ value <= prev[x + 1])
+ continue;
+ if (value <= next[x - 1] || value <= next[x] ||
+ value <= next[x + 1])
+ continue;
+
+ KeyPoint point(/* x */ static_cast<float>(x * step.octave_ratio),
+ /* y */ static_cast<float>(y * step.octave_ratio),
+ /* size */ step.esigma * opt.derivative_factor,
+ /* angle */ -1,
+ /* response */ value,
+ /* octave */ step.octave,
+ /* class_id */ i);
+
+ int idx = 0;
+
+ // Compare response with the same scale
+ if (find_neighbor_point(point, kpts, 0, idx)) {
+ if (point.response > kpts[idx].response)
+ kpts[idx] = point; // Replace the old point
+ continue;
+ }
+
+ kpts.push_back(point);
+ }
+
+ prev = curr;
+ curr = next;
+ next += step.Ldet.cols;
+ }
+ },
+ options_);
+ }
+
+ tasklist_[0][0].get();
+
+ // Filter points with the lower scale level
+ for (int i = 1; i < (int)kpts_aux.size(); i++) {
+ tasklist_[0][i].get();
+
+ for (int j = 0; j < (int)kpts_aux[i].size(); j++) {
+ KeyPoint& pt = kpts_aux[i][j];
+
+ int idx = 0;
+ while (find_neighbor_point(pt, kpts_aux[i - 1], idx, idx)) {
+ if (pt.response > kpts_aux[i - 1][idx].response)
+ kpts_aux[i - 1][idx].class_id = -1;
+ // else this pt may be pruned by the upper scale
+ ++idx;
+ }
+ }
+ }
+
+ // Now filter points with the upper scale level (the other direction)
+ for (int i = (int)kpts_aux.size() - 2; i >= 0; i--) {
+ for (int j = 0; j < (int)kpts_aux[i].size(); j++) {
+ KeyPoint& pt = kpts_aux[i][j];
+
+ if (pt.class_id == -1) // Skip a deleted point
+ continue;
+
+ int idx = 0;
+ while (find_neighbor_point_inv(pt, kpts_aux[i + 1], idx, idx)) {
+ if (pt.response > kpts_aux[i + 1][idx].response)
+ kpts_aux[i + 1][idx].class_id = -1;
+ ++idx;
+ }
+ }
+ }
+}
+
+#else
+
+void AKAZEFeaturesV2::Find_Scale_Space_Extrema(
+ std::vector<vector<KeyPoint>>& kpts_aux) {
+ Find_Scale_Space_Extrema_Single(kpts_aux);
+}
+
+#endif
+
+/* ************************************************************************* */
+/**
+ * @brief This method performs subpixel refinement of the detected keypoints
+ * @param kpts_aux Input vectors of detected keypoints, sorted by evolution
+ * levels
+ * @param kpts Output vector of the final refined keypoints
+ */
+void AKAZEFeaturesV2::Do_Subpixel_Refinement(
+ std::vector<std::vector<KeyPoint>>& kpts_aux, std::vector<KeyPoint>& kpts) {
+ // Clear the keypoint vector
+ kpts.clear();
+
+ for (int i = 0; i < (int)kpts_aux.size(); i++) {
+ const float* const ldet = evolution_[i].Ldet.ptr<float>(0);
+ const float ratio = evolution_[i].octave_ratio;
+ const int cols = evolution_[i].Ldet.cols;
+
+ for (int j = 0; j < (int)kpts_aux[i].size(); j++) {
+ KeyPoint& kp = kpts_aux[i][j];
+
+ if (kp.class_id == -1) continue; // Skip a deleted keypoint
+
+ int x = (int)(kp.pt.x / ratio);
+ int y = (int)(kp.pt.y / ratio);
+
+ // Compute the gradient
+ float Dx = 0.5f * (ldet[y * cols + x + 1] - ldet[y * cols + x - 1]);
+ float Dy = 0.5f * (ldet[(y + 1) * cols + x] - ldet[(y - 1) * cols + x]);
+
+ // Compute the Hessian
+ float Dxx = ldet[y * cols + x + 1] + ldet[y * cols + x - 1] -
+ 2.0f * ldet[y * cols + x];
+ float Dyy = ldet[(y + 1) * cols + x] + ldet[(y - 1) * cols + x] -
+ 2.0f * ldet[y * cols + x];
+ float Dxy =
+ 0.25f * (ldet[(y + 1) * cols + x + 1] + ldet[(y - 1) * cols + x - 1] -
+ ldet[(y - 1) * cols + x + 1] - ldet[(y + 1) * cols + x - 1]);
+
+ // Solve the linear system
+ Matx22f A{Dxx, Dxy, Dxy, Dyy};
+ Vec2f b{-Dx, -Dy};
+ Vec2f dst{0.0f, 0.0f};
+ solve(A, b, dst, DECOMP_LU);
+
+ float dx = dst(0);
+ float dy = dst(1);
+
+ if (fabs(dx) > 1.0f || fabs(dy) > 1.0f)
+ continue; // Ignore the point that is not stable
+
+ // Refine the coordinates
+ kp.pt.x += dx * ratio;
+ kp.pt.y += dy * ratio;
+
+ kp.angle = 0.0;
+ kp.size *= 2.0f; // In OpenCV the size of a keypoint is the diameter
+
+ // Push the refined keypoint to the final storage
+ kpts.push_back(kp);
+ }
+ }
+}
+
+/* ************************************************************************* */
+
+class SURF_Descriptor_Upright_64_InvokerV2 : public ParallelLoopBody {
+ public:
+ SURF_Descriptor_Upright_64_InvokerV2(
+ std::vector<KeyPoint>& kpts, Mat& desc,
+ const std::vector<TEvolutionV2>& evolution)
+ : keypoints_(kpts), descriptors_(desc), evolution_(evolution) {}
+
+ void operator()(const Range& range) const {
+ for (int i = range.start; i < range.end; i++) {
+ Get_SURF_Descriptor_Upright_64(keypoints_[i], descriptors_.ptr<float>(i));
+ }
+ }
+
+ void Get_SURF_Descriptor_Upright_64(const KeyPoint& kpt, float* desc) const;
+
+ private:
+ std::vector<KeyPoint>& keypoints_;
+ Mat& descriptors_;
+ const std::vector<TEvolutionV2>& evolution_;
+};
+
+class SURF_Descriptor_64_InvokerV2 : public ParallelLoopBody {
+ public:
+ SURF_Descriptor_64_InvokerV2(std::vector<KeyPoint>& kpts, Mat& desc,
+ const std::vector<TEvolutionV2>& evolution)
+ : keypoints_(kpts), descriptors_(desc), evolution_(evolution) {}
+
+ void operator()(const Range& range) const {
+ for (int i = range.start; i < range.end; i++) {
+ KeyPoint& kp = keypoints_[i];
+ Compute_Main_Orientation(kp, evolution_[kp.class_id]);
+ Get_SURF_Descriptor_64(kp, descriptors_.ptr<float>(i));
+ }
+ }
+
+ void Get_SURF_Descriptor_64(const KeyPoint& kpt, float* desc) const;
+
+ private:
+ std::vector<KeyPoint>& keypoints_;
+ Mat& descriptors_;
+ const std::vector<TEvolutionV2>& evolution_;
+};
+
+class MSURF_Upright_Descriptor_64_InvokerV2 : public ParallelLoopBody {
+ public:
+ MSURF_Upright_Descriptor_64_InvokerV2(
+ std::vector<KeyPoint>& kpts, Mat& desc,
+ const std::vector<TEvolutionV2>& evolution)
+ : keypoints_(kpts), descriptors_(desc), evolution_(evolution) {}
+
+ void operator()(const Range& range) const {
+ for (int i = range.start; i < range.end; i++) {
+ Get_MSURF_Upright_Descriptor_64(keypoints_[i],
+ descriptors_.ptr<float>(i));
+ }
+ }
+
+ void Get_MSURF_Upright_Descriptor_64(const KeyPoint& kpt, float* desc) const;
+
+ private:
+ std::vector<KeyPoint>& keypoints_;
+ Mat& descriptors_;
+ const std::vector<TEvolutionV2>& evolution_;
+};
+
+class MSURF_Descriptor_64_InvokerV2 : public ParallelLoopBody {
+ public:
+ MSURF_Descriptor_64_InvokerV2(std::vector<KeyPoint>& kpts, Mat& desc,
+ const std::vector<TEvolutionV2>& evolution)
+ : keypoints_(kpts), descriptors_(desc), evolution_(evolution) {}
+
+ void operator()(const Range& range) const {
+ for (int i = range.start; i < range.end; i++) {
+ Compute_Main_Orientation(keypoints_[i],
+ evolution_[keypoints_[i].class_id]);
+ Get_MSURF_Descriptor_64(keypoints_[i], descriptors_.ptr<float>(i));
+ }
+ }
+
+ void Get_MSURF_Descriptor_64(const KeyPoint& kpt, float* desc) const;
+
+ private:
+ std::vector<KeyPoint>& keypoints_;
+ Mat& descriptors_;
+ const std::vector<TEvolutionV2>& evolution_;
+};
+
+class Upright_MLDB_Full_Descriptor_InvokerV2 : public ParallelLoopBody {
+ public:
+ Upright_MLDB_Full_Descriptor_InvokerV2(
+ std::vector<KeyPoint>& kpts, Mat& desc,
+ const std::vector<TEvolutionV2>& evolution, const AKAZEOptionsV2& options)
+ : keypoints_(kpts),
+ descriptors_(desc),
+ evolution_(evolution),
+ options_(options) {}
+
+ void operator()(const Range& range) const {
+ for (int i = range.start; i < range.end; i++) {
+ Get_Upright_MLDB_Full_Descriptor(keypoints_[i],
+ descriptors_.ptr<unsigned char>(i));
+ }
+ }
+
+ void Get_Upright_MLDB_Full_Descriptor(const KeyPoint& kpt,
+ unsigned char* desc) const;
+
+ private:
+ std::vector<KeyPoint>& keypoints_;
+ Mat& descriptors_;
+ const std::vector<TEvolutionV2>& evolution_;
+ const AKAZEOptionsV2& options_;
+};
+
+class Upright_MLDB_Descriptor_Subset_InvokerV2 : public ParallelLoopBody {
+ public:
+ Upright_MLDB_Descriptor_Subset_InvokerV2(
+ std::vector<KeyPoint>& kpts, Mat& desc,
+ const std::vector<TEvolutionV2>& evolution, const AKAZEOptionsV2& options,
+ const Mat& descriptorSamples, const Mat& descriptorBits)
+ : keypoints_(kpts),
+ descriptors_(desc),
+ evolution_(evolution),
+ options_(options),
+ descriptorSamples_(descriptorSamples),
+ descriptorBits_(descriptorBits) {}
+
+ void operator()(const Range& range) const {
+ for (int i = range.start; i < range.end; i++) {
+ Get_Upright_MLDB_Descriptor_Subset(keypoints_[i],
+ descriptors_.ptr<unsigned char>(i));
+ }
+ }
+
+ void Get_Upright_MLDB_Descriptor_Subset(const KeyPoint& kpt,
+ unsigned char* desc) const;
+
+ private:
+ std::vector<KeyPoint>& keypoints_;
+ Mat& descriptors_;
+ const std::vector<TEvolutionV2>& evolution_;
+ const AKAZEOptionsV2& options_;
+
+ const Mat& descriptorSamples_; // List of positions in the grids to sample
+ // LDB bits from.
+ const Mat& descriptorBits_;
+};
+
+class MLDB_Full_Descriptor_InvokerV2 : public ParallelLoopBody {
+ public:
+ MLDB_Full_Descriptor_InvokerV2(std::vector<KeyPoint>& kpts, Mat& desc,
+ const std::vector<TEvolutionV2>& evolution,
+ const AKAZEOptionsV2& options)
+ : keypoints_(kpts),
+ descriptors_(desc),
+ evolution_(evolution),
+ options_(options) {}
+
+ void operator()(const Range& range) const {
+ for (int i = range.start; i < range.end; i++) {
+ Compute_Main_Orientation(keypoints_[i],
+ evolution_[keypoints_[i].class_id]);
+ Get_MLDB_Full_Descriptor(keypoints_[i],
+ descriptors_.ptr<unsigned char>(i));
+ keypoints_[i].angle *= (float)(180.0 / CV_PI);
+ }
+ }
+
+ void Get_MLDB_Full_Descriptor(const KeyPoint& kpt, unsigned char* desc) const;
+ void MLDB_Fill_Values(float* values, int sample_step, int level, float xf,
+ float yf, float co, float si, float scale) const;
+ void MLDB_Binary_Comparisons(float* values, unsigned char* desc, int count,
+ int& dpos) const;
+
+ private:
+ std::vector<KeyPoint>& keypoints_;
+ Mat& descriptors_;
+ const std::vector<TEvolutionV2>& evolution_;
+ const AKAZEOptionsV2& options_;
+};
+
+class MLDB_Descriptor_Subset_InvokerV2 : public ParallelLoopBody {
+ public:
+ MLDB_Descriptor_Subset_InvokerV2(std::vector<KeyPoint>& kpts, Mat& desc,
+ const std::vector<TEvolutionV2>& evolution,
+ const AKAZEOptionsV2& options,
+ const Mat& descriptorSamples,
+ const Mat& descriptorBits)
+ : keypoints_(kpts),
+ descriptors_(desc),
+ evolution_(evolution),
+ options_(options),
+ descriptorSamples_(descriptorSamples),
+ descriptorBits_(descriptorBits) {}
+
+ void operator()(const Range& range) const {
+ for (int i = range.start; i < range.end; i++) {
+ Compute_Main_Orientation(keypoints_[i],
+ evolution_[keypoints_[i].class_id]);
+ Get_MLDB_Descriptor_Subset(keypoints_[i],
+ descriptors_.ptr<unsigned char>(i));
+ keypoints_[i].angle *= (float)(180.0 / CV_PI);
+ }
+ }
+
+ void Get_MLDB_Descriptor_Subset(const KeyPoint& kpt,
+ unsigned char* desc) const;
+
+ private:
+ std::vector<KeyPoint>& keypoints_;
+ Mat& descriptors_;
+ const std::vector<TEvolutionV2>& evolution_;
+ const AKAZEOptionsV2& options_;
+
+ const Mat& descriptorSamples_; // List of positions in the grids to sample
+ // LDB bits from.
+ const Mat& descriptorBits_;
+};
+
+/**
+ * @brief This method computes the set of descriptors through the nonlinear
+ * scale space
+ * @param kpts Vector of detected keypoints
+ * @param desc Matrix to store the descriptors
+ */
+void AKAZEFeaturesV2::Compute_Descriptors(std::vector<KeyPoint>& kpts,
+ Mat& desc) {
+ for (size_t i = 0; i < kpts.size(); i++) {
+ CV_Assert(0 <= kpts[i].class_id &&
+ kpts[i].class_id < static_cast<int>(evolution_.size()));
+ }
+
+ // Allocate memory for the descriptor matrix
+ if (options_.descriptor < AKAZE::DESCRIPTOR_MLDB_UPRIGHT) {
+ desc.create((int)kpts.size(), 64, CV_32FC1);
+ } else {
+ // We use the full length binary descriptor -> 486 bits
+ if (options_.descriptor_size == 0) {
+ int t = (6 + 36 + 120) * options_.descriptor_channels;
+ desc.create((int)kpts.size(), (int)ceil(t / 8.), CV_8UC1);
+ } else {
+ // We use the random bit selection length binary descriptor
+ desc.create((int)kpts.size(), (int)ceil(options_.descriptor_size / 8.),
+ CV_8UC1);
+ }
+ }
+
+ // Compute descriptors by blocks of 16 keypoints
+ const double stride = kpts.size() / (double)(1 << 4);
+
+ switch (options_.descriptor) {
+ case AKAZE::DESCRIPTOR_KAZE_UPRIGHT: // Upright descriptors, not invariant
+ // to rotation
+ {
+ parallel_for_(
+ Range(0, (int)kpts.size()),
+ MSURF_Upright_Descriptor_64_InvokerV2(kpts, desc, evolution_),
+ stride);
+ } break;
+ case AKAZE::DESCRIPTOR_KAZE: {
+ parallel_for_(Range(0, (int)kpts.size()),
+ MSURF_Descriptor_64_InvokerV2(kpts, desc, evolution_),
+ stride);
+ } break;
+ case AKAZE::DESCRIPTOR_MLDB_UPRIGHT: // Upright descriptors, not invariant
+ // to rotation
+ {
+ if (options_.descriptor_size == 0)
+ parallel_for_(Range(0, (int)kpts.size()),
+ Upright_MLDB_Full_Descriptor_InvokerV2(
+ kpts, desc, evolution_, options_),
+ stride);
+ else
+ parallel_for_(Range(0, (int)kpts.size()),
+ Upright_MLDB_Descriptor_Subset_InvokerV2(
+ kpts, desc, evolution_, options_, descriptorSamples_,
+ descriptorBits_),
+ stride);
+ } break;
+ case AKAZE::DESCRIPTOR_MLDB: {
+ if (options_.descriptor_size == 0)
+ parallel_for_(
+ Range(0, (int)kpts.size()),
+ MLDB_Full_Descriptor_InvokerV2(kpts, desc, evolution_, options_),
+ stride);
+ else
+ parallel_for_(Range(0, (int)kpts.size()),
+ MLDB_Descriptor_Subset_InvokerV2(
+ kpts, desc, evolution_, options_, descriptorSamples_,
+ descriptorBits_),
+ stride);
+ } break;
+ }
+}
+
+/* ************************************************************************* */
+/**
+ * @brief This function samples the derivative responses Lx and Ly for the
+ * points within the radius of 6*scale from (x0, y0), then multiply 2D Gaussian
+ * weight
+ * @param Lx Horizontal derivative
+ * @param Ly Vertical derivative
+ * @param x0 X-coordinate of the center point
+ * @param y0 Y-coordinate of the center point
+ * @param scale The sampling step
+ * @param resX Output array of the weighted horizontal derivative responses
+ * @param resY Output array of the weighted vertical derivative responses
+ */
+static inline void Sample_Derivative_Response_Radius6(
+ const Mat& Lx, const Mat& Ly, const int x0, const int y0, const int scale,
+ float* resX, float* resY) {
+ /* *************************************************************************
+ */
+ /// Lookup table for 2d gaussian (sigma = 2.5) where (0,0) is top left and
+ /// (6,6) is bottom right
+ static const float gauss25[7][7] = {
+ {0.02546481f, 0.02350698f, 0.01849125f, 0.01239505f, 0.00708017f,
+ 0.00344629f, 0.00142946f},
+ {0.02350698f, 0.02169968f, 0.01706957f, 0.01144208f, 0.00653582f,
+ 0.00318132f, 0.00131956f},
+ {0.01849125f, 0.01706957f, 0.01342740f, 0.00900066f, 0.00514126f,
+ 0.00250252f, 0.00103800f},
+ {0.01239505f, 0.01144208f, 0.00900066f, 0.00603332f, 0.00344629f,
+ 0.00167749f, 0.00069579f},
+ {0.00708017f, 0.00653582f, 0.00514126f, 0.00344629f, 0.00196855f,
+ 0.00095820f, 0.00039744f},
+ {0.00344629f, 0.00318132f, 0.00250252f, 0.00167749f, 0.00095820f,
+ 0.00046640f, 0.00019346f},
+ {0.00142946f, 0.00131956f, 0.00103800f, 0.00069579f, 0.00039744f,
+ 0.00019346f, 0.00008024f}};
+ static const int id[] = {6, 5, 4, 3, 2, 1, 0, 1, 2, 3, 4, 5, 6};
+ static const struct gtable {
+ float weight[109];
+ int8_t xidx[109];
+ int8_t yidx[109];
+
+ explicit gtable(void) {
+ // Generate the weight and indices by one-time initialization
+ int k = 0;
+ for (int i = -6; i <= 6; ++i) {
+ for (int j = -6; j <= 6; ++j) {
+ if (i * i + j * j < 36) {
+ weight[k] = gauss25[id[i + 6]][id[j + 6]];
+ yidx[k] = i;
+ xidx[k] = j;
+ ++k;
+ }
+ }
+ }
+ CV_DbgAssert(k == 109);
+ }
+ } g;
+
+ const float* lx = Lx.ptr<float>(0);
+ const float* ly = Ly.ptr<float>(0);
+ int cols = Lx.cols;
+
+ for (int i = 0; i < 109; i++) {
+ int j = (y0 + g.yidx[i] * scale) * cols + (x0 + g.xidx[i] * scale);
+
+ resX[i] = g.weight[i] * lx[j];
+ resY[i] = g.weight[i] * ly[j];
+ }
+}
+
+/* ************************************************************************* */
+/**
+ * @brief This function sorts a[] by quantized float values
+ * @param a[] Input floating point array to sort
+ * @param n The length of a[]
+ * @param quantum The interval to convert a[i]'s float values to integers
+ * @param max The upper bound of a[], meaning a[i] must be in [0, max]
+ * @param idx[] Output array of the indices: a[idx[i]] forms a sorted array
+ * @param cum[] Output array of the starting indices of quantized floats
+ * @note The values of a[] in [k*quantum, (k + 1)*quantum) is labeled by
+ * the integer k, which is calculated by floor(a[i]/quantum). After sorting,
+ * the values from a[idx[cum[k]]] to a[idx[cum[k+1]-1]] are all labeled by k.
+ * This sorting is unstable to reduce the memory access.
+ */
+static inline void quantized_counting_sort(const float a[], const int n,
+ const float quantum, const float max,
+ uint8_t idx[], uint8_t cum[]) {
+ const int nkeys = (int)(max / quantum);
+
+ // The size of cum[] must be nkeys + 1
+ memset(cum, 0, nkeys + 1);
+
+ // Count up the quantized values
+ for (int i = 0; i < n; i++) cum[(int)(a[i] / quantum)]++;
+
+ // Compute the inclusive prefix sum i.e. the end indices; cum[nkeys] is the
+ // total
+ for (int i = 1; i <= nkeys; i++) cum[i] += cum[i - 1];
+
+ // Generate the sorted indices; cum[] becomes the exclusive prefix sum i.e.
+ // the start indices of keys
+ for (int i = 0; i < n; i++) idx[--cum[(int)(a[i] / quantum)]] = i;
+}
+
+/* ************************************************************************* */
+/**
+ * @brief This function computes the main orientation for a given keypoint
+ * @param kpt Input keypoint
+ * @note The orientation is computed using a similar approach as described in
+ * the original SURF method. See Bay et al., Speeded Up Robust Features, ECCV
+ * 2006
+ */
+inline void Compute_Main_Orientation(KeyPoint& kpt, const TEvolutionV2& e) {
+ // Get the information from the keypoint
+ int scale = fRoundV2(0.5f * kpt.size / e.octave_ratio);
+ int x0 = fRoundV2(kpt.pt.x / e.octave_ratio);
+ int y0 = fRoundV2(kpt.pt.y / e.octave_ratio);
+
+ // Sample derivatives responses for the points within radius of 6*scale
+ const int ang_size = 109;
+ float resX[ang_size], resY[ang_size];
+ Sample_Derivative_Response_Radius6(e.Lx, e.Ly, x0, y0, scale, resX, resY);
+
+ // Compute the angle of each gradient vector
+ float Ang[ang_size];
+ hal::fastAtan2(resY, resX, Ang, ang_size, false);
+
+ // Sort by the angles; angles are labeled by slices of 0.15 radian
+ const int slices = 42;
+ const float ang_step = (float)(2.0 * CV_PI / slices);
+ uint8_t slice[slices + 1];
+ uint8_t sorted_idx[ang_size];
+ quantized_counting_sort(Ang, ang_size, ang_step, (float)(2.0 * CV_PI),
+ sorted_idx, slice);
+
+ // Find the main angle by sliding a window of 7-slice size(=PI/3) around the
+ // keypoint
+ const int win = 7;
+
+ float maxX = 0.0f, maxY = 0.0f;
+ for (int i = slice[0]; i < slice[win]; i++) {
+ maxX += resX[sorted_idx[i]];
+ maxY += resY[sorted_idx[i]];
+ }
+ float maxNorm = maxX * maxX + maxY * maxY;
+
+ for (int sn = 1; sn <= slices - win; sn++) {
+ if (slice[sn] == slice[sn - 1] && slice[sn + win] == slice[sn + win - 1])
+ continue; // The contents of the window didn't change; don't repeat the
+ // computation
+
+ float sumX = 0.0f, sumY = 0.0f;
+ for (int i = slice[sn]; i < slice[sn + win]; i++) {
+ sumX += resX[sorted_idx[i]];
+ sumY += resY[sorted_idx[i]];
+ }
+
+ float norm = sumX * sumX + sumY * sumY;
+ if (norm > maxNorm)
+ maxNorm = norm, maxX = sumX, maxY = sumY; // Found bigger one; update
+ }
+
+ for (int sn = slices - win + 1; sn < slices; sn++) {
+ int remain = sn + win - slices;
+
+ if (slice[sn] == slice[sn - 1] && slice[remain] == slice[remain - 1])
+ continue;
+
+ float sumX = 0.0f, sumY = 0.0f;
+ for (int i = slice[sn]; i < slice[slices]; i++) {
+ sumX += resX[sorted_idx[i]];
+ sumY += resY[sorted_idx[i]];
+ }
+ for (int i = slice[0]; i < slice[remain]; i++) {
+ sumX += resX[sorted_idx[i]];
+ sumY += resY[sorted_idx[i]];
+ }
+
+ float norm = sumX * sumX + sumY * sumY;
+ if (norm > maxNorm) maxNorm = norm, maxX = sumX, maxY = sumY;
+ }
+
+ // Store the final result
+ kpt.angle = getAngleV2(maxX, maxY);
+}
+
+/* ************************************************************************* */
+/**
+ * @brief This method computes the upright descriptor (not rotation invariant)
+ * of the provided keypoint
+ * @param kpt Input keypoint
+ * @param desc Descriptor vector
+ * @note Rectangular grid of 24 s x 24 s. Descriptor Length 64. The descriptor
+ * is inspired from Agrawal et al., CenSurE: Center Surround Extremas for
+ * Realtime Feature Detection and Matching, ECCV 2008
+ */
+void MSURF_Upright_Descriptor_64_InvokerV2::Get_MSURF_Upright_Descriptor_64(
+ const KeyPoint& kpt, float* desc) const {
+ float dx = 0.0, dy = 0.0, mdx = 0.0, mdy = 0.0, gauss_s1 = 0.0,
+ gauss_s2 = 0.0;
+ float rx = 0.0, ry = 0.0, len = 0.0, xf = 0.0, yf = 0.0, ys = 0.0, xs = 0.0;
+ float sample_x = 0.0, sample_y = 0.0;
+ int x1 = 0, y1 = 0, sample_step = 0, pattern_size = 0;
+ int x2 = 0, y2 = 0, kx = 0, ky = 0, i = 0, j = 0, dcount = 0;
+ float fx = 0.0, fy = 0.0, ratio = 0.0, res1 = 0.0, res2 = 0.0, res3 = 0.0,
+ res4 = 0.0;
+ int scale = 0, dsize = 0, level = 0;
+
+ // Subregion centers for the 4x4 gaussian weighting
+ float cx = -0.5f, cy = 0.5f;
+
+ // Set the descriptor size and the sample and pattern sizes
+ dsize = 64;
+ sample_step = 5;
+ pattern_size = 12;
+
+ // Get the information from the keypoint
+ level = kpt.class_id;
+ ratio = evolution_[level].octave_ratio;
+ scale = fRoundV2(0.5f * kpt.size / ratio);
+ yf = kpt.pt.y / ratio;
+ xf = kpt.pt.x / ratio;
+
+ i = -8;
+
+ // Calculate descriptor for this interest point
+ // Area of size 24 s x 24 s
+ while (i < pattern_size) {
+ j = -8;
+ i = i - 4;
+
+ cx += 1.0f;
+ cy = -0.5f;
+
+ while (j < pattern_size) {
+ dx = dy = mdx = mdy = 0.0;
+ cy += 1.0f;
+ j = j - 4;
+
+ ky = i + sample_step;
+ kx = j + sample_step;
+
+ ys = yf + (ky * scale);
+ xs = xf + (kx * scale);
+
+ for (int k = i; k < i + 9; k++) {
+ for (int l = j; l < j + 9; l++) {
+ sample_y = k * scale + yf;
+ sample_x = l * scale + xf;
+
+ // Get the gaussian weighted x and y responses
+ gauss_s1 = gaussianV2(xs - sample_x, ys - sample_y, 2.50f * scale);
+
+ y1 = (int)(sample_y - .5);
+ x1 = (int)(sample_x - .5);
+
+ y2 = (int)(sample_y + .5);
+ x2 = (int)(sample_x + .5);
+
+ fx = sample_x - x1;
+ fy = sample_y - y1;
+
+ res1 = *(evolution_[level].Lx.ptr<float>(y1) + x1);
+ res2 = *(evolution_[level].Lx.ptr<float>(y1) + x2);
+ res3 = *(evolution_[level].Lx.ptr<float>(y2) + x1);
+ res4 = *(evolution_[level].Lx.ptr<float>(y2) + x2);
+ rx = (1.0f - fx) * (1.0f - fy) * res1 + fx * (1.0f - fy) * res2 +
+ (1.0f - fx) * fy * res3 + fx * fy * res4;
+
+ res1 = *(evolution_[level].Ly.ptr<float>(y1) + x1);
+ res2 = *(evolution_[level].Ly.ptr<float>(y1) + x2);
+ res3 = *(evolution_[level].Ly.ptr<float>(y2) + x1);
+ res4 = *(evolution_[level].Ly.ptr<float>(y2) + x2);
+ ry = (1.0f - fx) * (1.0f - fy) * res1 + fx * (1.0f - fy) * res2 +
+ (1.0f - fx) * fy * res3 + fx * fy * res4;
+
+ rx = gauss_s1 * rx;
+ ry = gauss_s1 * ry;
+
+ // Sum the derivatives to the cumulative descriptor
+ dx += rx;
+ dy += ry;
+ mdx += fabs(rx);
+ mdy += fabs(ry);
+ }
+ }
+
+ // Add the values to the descriptor vector
+ gauss_s2 = gaussianV2(cx - 2.0f, cy - 2.0f, 1.5f);
+
+ desc[dcount++] = dx * gauss_s2;
+ desc[dcount++] = dy * gauss_s2;
+ desc[dcount++] = mdx * gauss_s2;
+ desc[dcount++] = mdy * gauss_s2;
+
+ len += (dx * dx + dy * dy + mdx * mdx + mdy * mdy) * gauss_s2 * gauss_s2;
+
+ j += 9;
+ }
+
+ i += 9;
+ }
+
+ // convert to unit vector
+ len = sqrt(len);
+
+ for (i = 0; i < dsize; i++) {
+ desc[i] /= len;
+ }
+}
+
+/* ************************************************************************* */
+/**
+ * @brief This method computes the descriptor of the provided keypoint given the
+ * main orientation of the keypoint
+ * @param kpt Input keypoint
+ * @param desc Descriptor vector
+ * @note Rectangular grid of 24 s x 24 s. Descriptor Length 64. The descriptor
+ * is inspired from Agrawal et al., CenSurE: Center Surround Extremas for
+ * Realtime Feature Detection and Matching, ECCV 2008
+ */
+void MSURF_Descriptor_64_InvokerV2::Get_MSURF_Descriptor_64(const KeyPoint& kpt,
+ float* desc) const {
+ float dx = 0.0, dy = 0.0, mdx = 0.0, mdy = 0.0, gauss_s1 = 0.0,
+ gauss_s2 = 0.0;
+ float rx = 0.0, ry = 0.0, rrx = 0.0, rry = 0.0, len = 0.0, xf = 0.0, yf = 0.0,
+ ys = 0.0, xs = 0.0;
+ float sample_x = 0.0, sample_y = 0.0, co = 0.0, si = 0.0, angle = 0.0;
+ float fx = 0.0, fy = 0.0, ratio = 0.0, res1 = 0.0, res2 = 0.0, res3 = 0.0,
+ res4 = 0.0;
+ int x1 = 0, y1 = 0, x2 = 0, y2 = 0, sample_step = 0, pattern_size = 0;
+ int kx = 0, ky = 0, i = 0, j = 0, dcount = 0;
+ int scale = 0, dsize = 0, level = 0;
+
+ // Subregion centers for the 4x4 gaussian weighting
+ float cx = -0.5f, cy = 0.5f;
+
+ // Set the descriptor size and the sample and pattern sizes
+ dsize = 64;
+ sample_step = 5;
+ pattern_size = 12;
+
+ // Get the information from the keypoint
+ level = kpt.class_id;
+ ratio = evolution_[level].octave_ratio;
+ scale = fRoundV2(0.5f * kpt.size / ratio);
+ angle = kpt.angle;
+ yf = kpt.pt.y / ratio;
+ xf = kpt.pt.x / ratio;
+ co = cos(angle);
+ si = sin(angle);
+
+ i = -8;
+
+ // Calculate descriptor for this interest point
+ // Area of size 24 s x 24 s
+ while (i < pattern_size) {
+ j = -8;
+ i = i - 4;
+
+ cx += 1.0f;
+ cy = -0.5f;
+
+ while (j < pattern_size) {
+ dx = dy = mdx = mdy = 0.0;
+ cy += 1.0f;
+ j = j - 4;
+
+ ky = i + sample_step;
+ kx = j + sample_step;
+
+ xs = xf + (-kx * scale * si + ky * scale * co);
+ ys = yf + (kx * scale * co + ky * scale * si);
+
+ for (int k = i; k < i + 9; ++k) {
+ for (int l = j; l < j + 9; ++l) {
+ // Get coords of sample point on the rotated axis
+ sample_y = yf + (l * scale * co + k * scale * si);
+ sample_x = xf + (-l * scale * si + k * scale * co);
+
+ // Get the gaussian weighted x and y responses
+ gauss_s1 = gaussianV2(xs - sample_x, ys - sample_y, 2.5f * scale);
+
+ y1 = fRoundV2(sample_y - 0.5f);
+ x1 = fRoundV2(sample_x - 0.5f);
+
+ y2 = fRoundV2(sample_y + 0.5f);
+ x2 = fRoundV2(sample_x + 0.5f);
+
+ fx = sample_x - x1;
+ fy = sample_y - y1;
+
+ res1 = *(evolution_[level].Lx.ptr<float>(y1) + x1);
+ res2 = *(evolution_[level].Lx.ptr<float>(y1) + x2);
+ res3 = *(evolution_[level].Lx.ptr<float>(y2) + x1);
+ res4 = *(evolution_[level].Lx.ptr<float>(y2) + x2);
+ rx = (1.0f - fx) * (1.0f - fy) * res1 + fx * (1.0f - fy) * res2 +
+ (1.0f - fx) * fy * res3 + fx * fy * res4;
+
+ res1 = *(evolution_[level].Ly.ptr<float>(y1) + x1);
+ res2 = *(evolution_[level].Ly.ptr<float>(y1) + x2);
+ res3 = *(evolution_[level].Ly.ptr<float>(y2) + x1);
+ res4 = *(evolution_[level].Ly.ptr<float>(y2) + x2);
+ ry = (1.0f - fx) * (1.0f - fy) * res1 + fx * (1.0f - fy) * res2 +
+ (1.0f - fx) * fy * res3 + fx * fy * res4;
+
+ // Get the x and y derivatives on the rotated axis
+ rry = gauss_s1 * (rx * co + ry * si);
+ rrx = gauss_s1 * (-rx * si + ry * co);
+
+ // Sum the derivatives to the cumulative descriptor
+ dx += rrx;
+ dy += rry;
+ mdx += fabs(rrx);
+ mdy += fabs(rry);
+ }
+ }
+
+ // Add the values to the descriptor vector
+ gauss_s2 = gaussianV2(cx - 2.0f, cy - 2.0f, 1.5f);
+ desc[dcount++] = dx * gauss_s2;
+ desc[dcount++] = dy * gauss_s2;
+ desc[dcount++] = mdx * gauss_s2;
+ desc[dcount++] = mdy * gauss_s2;
+
+ len += (dx * dx + dy * dy + mdx * mdx + mdy * mdy) * gauss_s2 * gauss_s2;
+
+ j += 9;
+ }
+
+ i += 9;
+ }
+
+ // convert to unit vector
+ len = sqrt(len);
+
+ for (i = 0; i < dsize; i++) {
+ desc[i] /= len;
+ }
+}
+
+/* ************************************************************************* */
+/**
+ * @brief This method computes the rupright descriptor (not rotation invariant)
+ * of the provided keypoint
+ * @param kpt Input keypoint
+ * @param desc Descriptor vector
+ */
+void Upright_MLDB_Full_Descriptor_InvokerV2::Get_Upright_MLDB_Full_Descriptor(
+ const KeyPoint& kpt, unsigned char* desc) const {
+ float di = 0.0, dx = 0.0, dy = 0.0;
+ float ri = 0.0, rx = 0.0, ry = 0.0, xf = 0.0, yf = 0.0;
+ float sample_x = 0.0, sample_y = 0.0, ratio = 0.0;
+ int x1 = 0, y1 = 0, sample_step = 0, pattern_size = 0;
+ int level = 0, nsamples = 0, scale = 0;
+ int dcount1 = 0, dcount2 = 0;
+
+ CV_DbgAssert(options_.descriptor_channels <= 3);
+
+ // Matrices for the M-LDB descriptor: the dimensions are [grid size] by
+ // [channel size]
+ float values_1[4][3];
+ float values_2[9][3];
+ float values_3[16][3];
+
+ // Get the information from the keypoint
+ level = kpt.class_id;
+ ratio = evolution_[level].octave_ratio;
+ scale = evolution_[level].sigma_size;
+ yf = kpt.pt.y / ratio;
+ xf = kpt.pt.x / ratio;
+
+ // First 2x2 grid
+ pattern_size = options_.descriptor_pattern_size;
+ sample_step = pattern_size;
+
+ for (int i = -pattern_size; i < pattern_size; i += sample_step) {
+ for (int j = -pattern_size; j < pattern_size; j += sample_step) {
+ di = dx = dy = 0.0;
+ nsamples = 0;
+
+ for (int k = i; k < i + sample_step; k++) {
+ for (int l = j; l < j + sample_step; l++) {
+ // Get the coordinates of the sample point
+ sample_y = yf + l * scale;
+ sample_x = xf + k * scale;
+
+ y1 = fRoundV2(sample_y);
+ x1 = fRoundV2(sample_x);
+
+ ri = *(evolution_[level].Lt.ptr<float>(y1) + x1);
+ rx = *(evolution_[level].Lx.ptr<float>(y1) + x1);
+ ry = *(evolution_[level].Ly.ptr<float>(y1) + x1);
+
+ di += ri;
+ dx += rx;
+ dy += ry;
+ nsamples++;
+ }
+ }
+
+ di /= nsamples;
+ dx /= nsamples;
+ dy /= nsamples;
+
+ values_1[dcount2][0] = di;
+ values_1[dcount2][1] = dx;
+ values_1[dcount2][2] = dy;
+ dcount2++;
+ }
+ }
+
+ // Do binary comparison first level
+ for (int i = 0; i < 4; i++) {
+ for (int j = i + 1; j < 4; j++) {
+ if (values_1[i][0] > values_1[j][0]) {
+ desc[dcount1 / 8] |= (1 << (dcount1 % 8));
+ } else {
+ desc[dcount1 / 8] &= ~(1 << (dcount1 % 8));
+ }
+ dcount1++;
+
+ if (values_1[i][1] > values_1[j][1]) {
+ desc[dcount1 / 8] |= (1 << (dcount1 % 8));
+ } else {
+ desc[dcount1 / 8] &= ~(1 << (dcount1 % 8));
+ }
+ dcount1++;
+
+ if (values_1[i][2] > values_1[j][2]) {
+ desc[dcount1 / 8] |= (1 << (dcount1 % 8));
+ } else {
+ desc[dcount1 / 8] &= ~(1 << (dcount1 % 8));
+ }
+ dcount1++;
+ }
+ }
+
+ // Second 3x3 grid
+ sample_step = static_cast<int>(ceil(pattern_size * 2. / 3.));
+ dcount2 = 0;
+
+ for (int i = -pattern_size; i < pattern_size; i += sample_step) {
+ for (int j = -pattern_size; j < pattern_size; j += sample_step) {
+ di = dx = dy = 0.0;
+ nsamples = 0;
+
+ for (int k = i; k < i + sample_step; k++) {
+ for (int l = j; l < j + sample_step; l++) {
+ // Get the coordinates of the sample point
+ sample_y = yf + l * scale;
+ sample_x = xf + k * scale;
+
+ y1 = fRoundV2(sample_y);
+ x1 = fRoundV2(sample_x);
+
+ ri = *(evolution_[level].Lt.ptr<float>(y1) + x1);
+ rx = *(evolution_[level].Lx.ptr<float>(y1) + x1);
+ ry = *(evolution_[level].Ly.ptr<float>(y1) + x1);
+
+ di += ri;
+ dx += rx;
+ dy += ry;
+ nsamples++;
+ }
+ }
+
+ di /= nsamples;
+ dx /= nsamples;
+ dy /= nsamples;
+
+ values_2[dcount2][0] = di;
+ values_2[dcount2][1] = dx;
+ values_2[dcount2][2] = dy;
+ dcount2++;
+ }
+ }
+
+ // Do binary comparison second level
+ dcount2 = 0;
+ for (int i = 0; i < 9; i++) {
+ for (int j = i + 1; j < 9; j++) {
+ if (values_2[i][0] > values_2[j][0]) {
+ desc[dcount1 / 8] |= (1 << (dcount1 % 8));
+ } else {
+ desc[dcount1 / 8] &= ~(1 << (dcount1 % 8));
+ }
+ dcount1++;
+
+ if (values_2[i][1] > values_2[j][1]) {
+ desc[dcount1 / 8] |= (1 << (dcount1 % 8));
+ } else {
+ desc[dcount1 / 8] &= ~(1 << (dcount1 % 8));
+ }
+ dcount1++;
+
+ if (values_2[i][2] > values_2[j][2]) {
+ desc[dcount1 / 8] |= (1 << (dcount1 % 8));
+ } else {
+ desc[dcount1 / 8] &= ~(1 << (dcount1 % 8));
+ }
+ dcount1++;
+ }
+ }
+
+ // Third 4x4 grid
+ sample_step = pattern_size / 2;
+ dcount2 = 0;
+
+ for (int i = -pattern_size; i < pattern_size; i += sample_step) {
+ for (int j = -pattern_size; j < pattern_size; j += sample_step) {
+ di = dx = dy = 0.0;
+ nsamples = 0;
+
+ for (int k = i; k < i + sample_step; k++) {
+ for (int l = j; l < j + sample_step; l++) {
+ // Get the coordinates of the sample point
+ sample_y = yf + l * scale;
+ sample_x = xf + k * scale;
+
+ y1 = fRoundV2(sample_y);
+ x1 = fRoundV2(sample_x);
+
+ ri = *(evolution_[level].Lt.ptr<float>(y1) + x1);
+ rx = *(evolution_[level].Lx.ptr<float>(y1) + x1);
+ ry = *(evolution_[level].Ly.ptr<float>(y1) + x1);
+
+ di += ri;
+ dx += rx;
+ dy += ry;
+ nsamples++;
+ }
+ }
+
+ di /= nsamples;
+ dx /= nsamples;
+ dy /= nsamples;
+
+ values_3[dcount2][0] = di;
+ values_3[dcount2][1] = dx;
+ values_3[dcount2][2] = dy;
+ dcount2++;
+ }
+ }
+
+ // Do binary comparison third level
+ dcount2 = 0;
+ for (int i = 0; i < 16; i++) {
+ for (int j = i + 1; j < 16; j++) {
+ if (values_3[i][0] > values_3[j][0]) {
+ desc[dcount1 / 8] |= (1 << (dcount1 % 8));
+ } else {
+ desc[dcount1 / 8] &= ~(1 << (dcount1 % 8));
+ }
+ dcount1++;
+
+ if (values_3[i][1] > values_3[j][1]) {
+ desc[dcount1 / 8] |= (1 << (dcount1 % 8));
+ } else {
+ desc[dcount1 / 8] &= ~(1 << (dcount1 % 8));
+ }
+ dcount1++;
+
+ if (values_3[i][2] > values_3[j][2]) {
+ desc[dcount1 / 8] |= (1 << (dcount1 % 8));
+ } else {
+ desc[dcount1 / 8] &= ~(1 << (dcount1 % 8));
+ }
+ dcount1++;
+ }
+ }
+}
+
+inline void MLDB_Full_Descriptor_InvokerV2::MLDB_Fill_Values(
+ float* values, int sample_step, int level, float xf, float yf, float co,
+ float si, float scale) const {
+ int pattern_size = options_.descriptor_pattern_size;
+ int chan = options_.descriptor_channels;
+ int valpos = 0;
+
+ for (int i = -pattern_size; i < pattern_size; i += sample_step) {
+ for (int j = -pattern_size; j < pattern_size; j += sample_step) {
+ float di, dx, dy;
+ di = dx = dy = 0.0;
+ int nsamples = 0;
+
+ for (int k = i; k < i + sample_step; k++) {
+ for (int l = j; l < j + sample_step; l++) {
+ float sample_y = yf + (l * co * scale + k * si * scale);
+ float sample_x = xf + (-l * si * scale + k * co * scale);
+
+ int y1 = fRoundV2(sample_y);
+ int x1 = fRoundV2(sample_x);
+
+ float ri = *(evolution_[level].Lt.ptr<float>(y1) + x1);
+ di += ri;
+
+ if (chan > 1) {
+ float rx = *(evolution_[level].Lx.ptr<float>(y1) + x1);
+ float ry = *(evolution_[level].Ly.ptr<float>(y1) + x1);
+ if (chan == 2) {
+ dx += sqrtf(rx * rx + ry * ry);
+ } else {
+ float rry = rx * co + ry * si;
+ float rrx = -rx * si + ry * co;
+ dx += rrx;
+ dy += rry;
+ }
+ }
+ nsamples++;
+ }
+ }
+ di /= nsamples;
+ dx /= nsamples;
+ dy /= nsamples;
+
+ values[valpos] = di;
+ if (chan > 1) {
+ values[valpos + 1] = dx;
+ }
+ if (chan > 2) {
+ values[valpos + 2] = dy;
+ }
+ valpos += chan;
+ }
+ }
+}
+
+void MLDB_Full_Descriptor_InvokerV2::MLDB_Binary_Comparisons(
+ float* values, unsigned char* desc, int count, int& dpos) const {
+ int chan = options_.descriptor_channels;
+ int32_t* ivalues = (int32_t*)values;
+ for (int i = 0; i < count * chan; i++) {
+ ivalues[i] = CV_TOGGLE_FLT(ivalues[i]);
+ }
+
+ for (int pos = 0; pos < chan; pos++) {
+ for (int i = 0; i < count; i++) {
+ int32_t ival = ivalues[chan * i + pos];
+ for (int j = i + 1; j < count; j++) {
+ if (ival > ivalues[chan * j + pos]) {
+ desc[dpos >> 3] |= (1 << (dpos & 7));
+ } else {
+ desc[dpos >> 3] &= ~(1 << (dpos & 7));
+ }
+ dpos++;
+ }
+ }
+ }
+}
+
+/* ************************************************************************* */
+/**
+ * @brief This method computes the descriptor of the provided keypoint given the
+ * main orientation of the keypoint
+ * @param kpt Input keypoint
+ * @param desc Descriptor vector
+ */
+void MLDB_Full_Descriptor_InvokerV2::Get_MLDB_Full_Descriptor(
+ const KeyPoint& kpt, unsigned char* desc) const {
+ const int max_channels = 3;
+ CV_Assert(options_.descriptor_channels <= max_channels);
+ float values[16 * max_channels];
+ const double size_mult[3] = {1, 2.0 / 3.0, 1.0 / 2.0};
+
+ float ratio = evolution_[kpt.class_id].octave_ratio;
+ float scale = (float)(evolution_[kpt.class_id].sigma_size);
+ float xf = kpt.pt.x / ratio;
+ float yf = kpt.pt.y / ratio;
+ float co = cos(kpt.angle);
+ float si = sin(kpt.angle);
+ int pattern_size = options_.descriptor_pattern_size;
+
+ int dpos = 0;
+ for (int lvl = 0; lvl < 3; lvl++) {
+ int val_count = (lvl + 2) * (lvl + 2);
+ int sample_step = static_cast<int>(ceil(pattern_size * size_mult[lvl]));
+ MLDB_Fill_Values(values, sample_step, kpt.class_id, xf, yf, co, si, scale);
+ MLDB_Binary_Comparisons(values, desc, val_count, dpos);
+ }
+
+ // Clear the uninitialized bits of the last byte
+ int remain = dpos % 8;
+ if (remain > 0) desc[dpos >> 3] &= (0xff >> (8 - remain));
+}
+
+/* ************************************************************************* */
+/**
+ * @brief This function compares two values specified by comps[] and set the
+ * i-th bit of desc if the comparison is true.
+ * @param values Input array of values to compare
+ * @param comps Input array of indices at which two values are compared
+ * @param nbits The length of values[] as well as the number of bits to write in
+ * desc
+ * @param desc Descriptor vector
+ */
+template <typename Typ_ = uint64_t>
+inline void compare_and_pack_descriptor(const float values[], const int* comps,
+ const int nbits, unsigned char* desc) {
+ const int nbits_in_bucket = sizeof(Typ_) << 3;
+ const int(*idx)[2] = (const int(*)[2])comps;
+ int written = 0;
+
+ Typ_ bucket = 0;
+ for (int i = 0; i < nbits; i++) {
+ bucket <<= 1;
+ if (values[idx[i][0]] > values[idx[i][1]]) bucket |= 1;
+
+ if ((i & (nbits_in_bucket - 1)) == (nbits_in_bucket - 1))
+ (reinterpret_cast<Typ_*>(desc))[written++] = bucket, bucket = 0;
+ }
+
+ // Flush the remaining bits in bucket
+ if (written * nbits_in_bucket < nbits) {
+ written *= sizeof(Typ_); /* Convert the unit from bucket to byte */
+
+ int remain = (nbits + 7) / 8 - written;
+ for (int i = 0; i < remain; i++)
+ desc[written++] = (uint8_t)(bucket & 0xFF), bucket >>= 8;
+ }
+}
+
+/* ************************************************************************* */
+/**
+ * @brief This method computes the M-LDB descriptor of the provided keypoint
+ * given the main orientation of the keypoint. The descriptor is computed based
+ * on a subset of the bits of the whole descriptor
+ * @param kpt Input keypoint
+ * @param desc Descriptor vector
+ */
+void MLDB_Descriptor_Subset_InvokerV2::Get_MLDB_Descriptor_Subset(
+ const KeyPoint& kpt, unsigned char* desc) const {
+ const TEvolutionV2& e = evolution_[kpt.class_id];
+
+ // Get the information from the keypoint
+ const int scale = e.sigma_size;
+ const float yf = kpt.pt.y / e.octave_ratio;
+ const float xf = kpt.pt.x / e.octave_ratio;
+ const float co = cos(kpt.angle);
+ const float si = sin(kpt.angle);
+
+ // Matrices for the M-LDB descriptor: the size is [grid size] * [channel size]
+ CV_DbgAssert(descriptorSamples_.rows <= (4 + 9 + 16));
+ CV_DbgAssert(options_.descriptor_channels <= 3);
+ float values[(4 + 9 + 16) * 3];
+
+ // coords[3] is { grid_width, x, y }
+ const int* coords = descriptorSamples_.ptr<int>(0);
+
+ // Sample everything, but only do the comparisons
+ for (int i = 0; i < descriptorSamples_.rows; i++, coords += 3) {
+ float di = 0.0f;
+ float dx = 0.0f;
+ float dy = 0.0f;
+
+ for (int x = coords[1]; x < coords[1] + coords[0]; x++) {
+ for (int y = coords[2]; y < coords[2] + coords[0]; y++) {
+ // Get the coordinates of the sample point
+ int x1 = fRoundV2(xf + (x * scale * co - y * scale * si));
+ int y1 = fRoundV2(yf + (x * scale * si + y * scale * co));
+
+ di += *(e.Lt.ptr<float>(y1) + x1);
+
+ if (options_.descriptor_channels > 1) {
+ float rx = *(e.Lx.ptr<float>(y1) + x1);
+ float ry = *(e.Ly.ptr<float>(y1) + x1);
+
+ if (options_.descriptor_channels == 2) {
+ dx += sqrtf(rx * rx + ry * ry);
+ } else if (options_.descriptor_channels == 3) {
+ // Get the x and y derivatives on the rotated axis
+ dx += rx * co + ry * si;
+ dy += -rx * si + ry * co;
+ }
+ }
+ }
+ }
+
+ values[i * options_.descriptor_channels] = di;
+
+ if (options_.descriptor_channels == 2) {
+ values[i * options_.descriptor_channels + 1] = dx;
+ } else if (options_.descriptor_channels == 3) {
+ values[i * options_.descriptor_channels + 1] = dx;
+ values[i * options_.descriptor_channels + 2] = dy;
+ }
+ }
+
+ // Do the comparisons
+ compare_and_pack_descriptor<uint64_t>(values, descriptorBits_.ptr<int>(0),
+ descriptorBits_.rows, desc);
+}
+
+/* ************************************************************************* */
+/**
+ * @brief This method computes the upright (not rotation invariant) M-LDB
+ * descriptor of the provided keypoint given the main orientation of the
+ * keypoint. The descriptor is computed based on a subset of the bits of the
+ * whole descriptor
+ * @param kpt Input keypoint
+ * @param desc Descriptor vector
+ */
+void Upright_MLDB_Descriptor_Subset_InvokerV2::
+ Get_Upright_MLDB_Descriptor_Subset(const KeyPoint& kpt,
+ unsigned char* desc) const {
+ const TEvolutionV2& e = evolution_[kpt.class_id];
+
+ // Get the information from the keypoint
+ const int scale = e.sigma_size;
+ const float yf = kpt.pt.y / e.octave_ratio;
+ const float xf = kpt.pt.x / e.octave_ratio;
+
+ // Matrices for the M-LDB descriptor: the size is [grid size] * [channel size]
+ CV_DbgAssert(descriptorSamples_.rows <= (4 + 9 + 16));
+ CV_DbgAssert(options_.descriptor_channels <= 3);
+ float values[(4 + 9 + 16) * 3];
+
+ // coords[3] is { grid_width, x, y }
+ const int* coords = descriptorSamples_.ptr<int>(0);
+
+ for (int i = 0; i < descriptorSamples_.rows; i++, coords += 3) {
+ float di = 0.0f;
+ float dx = 0.0f;
+ float dy = 0.0f;
+
+ for (int x = coords[1]; x < coords[1] + coords[0]; x++) {
+ for (int y = coords[2]; y < coords[2] + coords[0]; y++) {
+ // Get the coordinates of the sample point
+ int x1 = fRoundV2(xf + x * scale);
+ int y1 = fRoundV2(yf + y * scale);
+
+ di += *(e.Lt.ptr<float>(y1) + x1);
+
+ if (options_.descriptor_channels > 1) {
+ float rx = *(e.Lx.ptr<float>(y1) + x1);
+ float ry = *(e.Ly.ptr<float>(y1) + x1);
+
+ if (options_.descriptor_channels == 2) {
+ dx += sqrtf(rx * rx + ry * ry);
+ } else if (options_.descriptor_channels == 3) {
+ dx += rx;
+ dy += ry;
+ }
+ }
+ }
+ }
+
+ values[i * options_.descriptor_channels] = di;
+
+ if (options_.descriptor_channels == 2) {
+ values[i * options_.descriptor_channels + 1] = dx;
+ } else if (options_.descriptor_channels == 3) {
+ values[i * options_.descriptor_channels + 1] = dx;
+ values[i * options_.descriptor_channels + 2] = dy;
+ }
+ }
+
+ // Do the comparisons
+ compare_and_pack_descriptor<uint64_t>(values, descriptorBits_.ptr<int>(0),
+ descriptorBits_.rows, desc);
+}
+
+/* ************************************************************************* */
+/**
+ * @brief This function computes a (quasi-random) list of bits to be taken
+ * from the full descriptor. To speed the extraction, the function creates
+ * a list of the samples that are involved in generating at least a bit
+ * (sampleList) and a list of the comparisons between those samples
+ * (comparisons)
+ * @param sampleList
+ * @param comparisons The matrix with the binary comparisons
+ * @param nbits The number of bits of the descriptor
+ * @param pattern_size The pattern size for the binary descriptor
+ * @param nchannels Number of channels to consider in the descriptor (1-3)
+ * @note The function keeps the 18 bits (3-channels by 6 comparisons) of the
+ * coarser grid, since it provides the most robust estimations
+ */
+static void generateDescriptorSubsampleV2(Mat& sampleList, Mat& comparisons,
+ int nbits, int pattern_size,
+ int nchannels) {
+#if 0
+ // Replaced by an immediate to use stack; need C++11 constexpr to use the logic
+ int fullM_rows = 0;
+ for (int i = 0; i < 3; i++) {
+ int gz = (i + 2)*(i + 2);
+ fullM_rows += gz*(gz - 1) / 2;
+ }
+#else
+ const int fullM_rows = 162;
+#endif
+
+ int ssz = fullM_rows * nchannels; // ssz is 486 when nchannels is 3
+
+ CV_Assert(nbits <=
+ ssz); // Descriptor size can't be bigger than full descriptor
+
+ const int steps[3] = {pattern_size, (int)ceil(2.f * pattern_size / 3.f),
+ pattern_size / 2};
+
+ // Since the full descriptor is usually under 10k elements, we pick
+ // the selection from the full matrix. We take as many samples per
+ // pick as the number of channels. For every pick, we
+ // take the two samples involved and put them in the sampling list
+
+ int fullM_stack[fullM_rows *
+ 5]; // About 6.3KB workspace with 64-bit int on stack
+ Mat_<int> fullM(fullM_rows, 5, fullM_stack);
+
+ for (int i = 0, c = 0; i < 3; i++) {
+ int gdiv = i + 2; // grid divisions, per row
+ int gsz = gdiv * gdiv;
+ int psz = (int)ceil(2.f * pattern_size / (float)gdiv);
+
+ for (int j = 0; j < gsz; j++) {
+ for (int k = j + 1; k < gsz; k++, c++) {
+ fullM(c, 0) = steps[i];
+ fullM(c, 1) = psz * (j % gdiv) - pattern_size;
+ fullM(c, 2) = psz * (j / gdiv) - pattern_size;
+ fullM(c, 3) = psz * (k % gdiv) - pattern_size;
+ fullM(c, 4) = psz * (k / gdiv) - pattern_size;
+ }
+ }
+ }
+
+ int comps_stack[486 * 2]; // About 7.6KB workspace with 64-bit int on stack
+ Mat_<int> comps(486, 2, comps_stack);
+ comps = 1000;
+
+ int samples_stack[(4 + 9 + 16) *
+ 3]; // 696 bytes workspace with 64-bit int on stack
+ Mat_<int> samples((4 + 9 + 16), 3, samples_stack);
+
+ // Select some samples. A sample includes all channels
+ int count = 0;
+ int npicks = (int)ceil(nbits / (float)nchannels);
+ samples = -1;
+
+ srand(1024);
+ for (int i = 0; i < npicks; i++) {
+ int k = rand() % (fullM_rows - i);
+ if (i < 6) {
+ // Force use of the coarser grid values and comparisons
+ k = i;
+ }
+
+ bool n = true;
+
+ for (int j = 0; j < count; j++) {
+ if (samples(j, 0) == fullM(k, 0) && samples(j, 1) == fullM(k, 1) &&
+ samples(j, 2) == fullM(k, 2)) {
+ n = false;
+ comps(i * nchannels, 0) = nchannels * j;
+ comps(i * nchannels + 1, 0) = nchannels * j + 1;
+ comps(i * nchannels + 2, 0) = nchannels * j + 2;
+ break;
+ }
+ }
+
+ if (n) {
+ samples(count, 0) = fullM(k, 0);
+ samples(count, 1) = fullM(k, 1);
+ samples(count, 2) = fullM(k, 2);
+ comps(i * nchannels, 0) = nchannels * count;
+ comps(i * nchannels + 1, 0) = nchannels * count + 1;
+ comps(i * nchannels + 2, 0) = nchannels * count + 2;
+ count++;
+ }
+
+ n = true;
+ for (int j = 0; j < count; j++) {
+ if (samples(j, 0) == fullM(k, 0) && samples(j, 1) == fullM(k, 3) &&
+ samples(j, 2) == fullM(k, 4)) {
+ n = false;
+ comps(i * nchannels, 1) = nchannels * j;
+ comps(i * nchannels + 1, 1) = nchannels * j + 1;
+ comps(i * nchannels + 2, 1) = nchannels * j + 2;
+ break;
+ }
+ }
+
+ if (n) {
+ samples(count, 0) = fullM(k, 0);
+ samples(count, 1) = fullM(k, 3);
+ samples(count, 2) = fullM(k, 4);
+ comps(i * nchannels, 1) = nchannels * count;
+ comps(i * nchannels + 1, 1) = nchannels * count + 1;
+ comps(i * nchannels + 2, 1) = nchannels * count + 2;
+ count++;
+ }
+
+ fullM.row(fullM.rows - i - 1).copyTo(fullM.row(k));
+ }
+
+ sampleList = samples.rowRange(0, count).clone();
+ comparisons = comps.rowRange(0, nbits).clone();
+}
+
+} // namespace cv
\ No newline at end of file
diff --git a/third_party/akaze/AKAZEFeatures.h b/third_party/akaze/AKAZEFeatures.h
new file mode 100644
index 0000000..77740b2
--- /dev/null
+++ b/third_party/akaze/AKAZEFeatures.h
@@ -0,0 +1,94 @@
+/**
+ * @file AKAZE.h
+ * @brief Main class for detecting and computing binary descriptors in an
+ * accelerated nonlinear scale space
+ * @date Mar 27, 2013
+ * @author Pablo F. Alcantarilla, Jesus Nuevo
+ */
+
+#ifndef __OPENCV_FEATURES_2D_AKAZE_FEATURES_H__
+#define __OPENCV_FEATURES_2D_AKAZE_FEATURES_H__
+
+/* ************************************************************************* */
+// Includes
+#include <vector>
+
+#define AKAZE_USE_CPP11_THREADING
+
+#ifdef AKAZE_USE_CPP11_THREADING
+#include <atomic>
+#include <future>
+#endif
+
+#include <opencv2/core.hpp>
+
+#include "AKAZEConfig.h"
+#include "TEvolution.h"
+
+namespace cv {
+
+/* ************************************************************************* */
+// AKAZE Class Declaration
+class AKAZEFeaturesV2 {
+ private:
+ AKAZEOptionsV2 options_; ///< Configuration options for AKAZE
+ std::vector<TEvolutionV2>
+ evolution_; ///< Vector of nonlinear diffusion evolution
+
+ /// FED parameters
+ bool reordering_; ///< Flag for reordering time steps
+ std::vector<std::vector<float>>
+ tsteps_; ///< Vector of FED dynamic time steps
+
+ /// Matrices for the M-LDB descriptor computation
+ cv::Mat descriptorSamples_; // List of positions in the grids to sample LDB
+ // bits from.
+ cv::Mat descriptorBits_;
+ cv::Mat bitMask_;
+
+ /// Preallocated temporary variables
+ cv::Mat gray_, lx_, ly_;
+ cv::Mat lflow_, lstep_;
+ cv::Mat histgram_, modgs_;
+ std::vector<std::vector<cv::KeyPoint>> kpts_aux_;
+
+#ifdef AKAZE_USE_CPP11_THREADING
+ using task = std::future<void>;
+ std::vector<std::vector<task>> tasklist_;
+ std::vector<std::atomic_int> taskdeps_;
+ std::future<float> kcontrast_;
+#endif
+
+ public:
+ /// Constructor with input arguments
+ AKAZEFeaturesV2(const AKAZEOptionsV2& options);
+
+ /// Getters and Setters
+ void setThreshold(double threshold_) {
+ options_.dthreshold = std::max((float)threshold_, options_.min_dthreshold);
+ };
+ double getThreshold() const { return options_.dthreshold; }
+ void setDiffusivity(int diff_) { options_.diffusivity = diff_; }
+ int getDiffusivity() const { return options_.diffusivity; }
+
+ /// Scale Space methods
+ void Allocate_Memory_Evolution();
+ int Create_Nonlinear_Scale_Space(const cv::Mat& img);
+ float Compute_Base_Evolution_Level(const cv::Mat& img);
+ void Feature_Detection(std::vector<cv::KeyPoint>& kpts);
+ void Compute_Determinant_Hessian_Response(const int level);
+ void Compute_Determinant_Hessian_Response_Single(const int level);
+ void Find_Scale_Space_Extrema(
+ std::vector<std::vector<cv::KeyPoint>>& kpts_aux);
+ void Find_Scale_Space_Extrema_Single(
+ std::vector<std::vector<cv::KeyPoint>>& kpts_aux);
+ void Do_Subpixel_Refinement(std::vector<std::vector<cv::KeyPoint>>& kpts_aux,
+ std::vector<cv::KeyPoint>& kpts);
+
+ /// Feature description methods
+ void Compute_Descriptors(std::vector<cv::KeyPoint>& kpts, cv::Mat& desc);
+};
+
+} // namespace cv
+
+#endif
\ No newline at end of file
diff --git a/third_party/akaze/BUILD b/third_party/akaze/BUILD
new file mode 100644
index 0000000..fe74381
--- /dev/null
+++ b/third_party/akaze/BUILD
@@ -0,0 +1,101 @@
+cc_library(
+ name = "akaze",
+ srcs = [
+ "akaze.cpp",
+ ],
+ hdrs = [
+ "akaze.h",
+ ],
+ target_compatible_with = ["@platforms//os:linux"],
+ visibility = ["//visibility:public"],
+ deps = [
+ ":akaze_features",
+ "//third_party:opencv",
+ ],
+)
+
+cc_library(
+ name = "akaze_config",
+ hdrs = [
+ "AKAZEConfig.h",
+ ],
+ target_compatible_with = ["@platforms//os:linux"],
+ visibility = ["//visibility:public"],
+ deps = [
+ "//third_party:opencv",
+ ],
+)
+
+cc_library(
+ name = "akaze_features",
+ srcs = [
+ "AKAZEFeatures.cpp",
+ ],
+ hdrs = [
+ "AKAZEFeatures.h",
+ ],
+ target_compatible_with = ["@platforms//os:linux"],
+ visibility = ["//visibility:public"],
+ deps = [
+ ":akaze_config",
+ ":fed",
+ ":nldiffusion_functions",
+ ":t_evolution",
+ ":utils",
+ "//third_party:opencv",
+ ],
+)
+
+cc_library(
+ name = "fed",
+ srcs = [
+ "fed.cpp",
+ ],
+ hdrs = [
+ "fed.h",
+ ],
+ target_compatible_with = ["@platforms//os:linux"],
+ visibility = ["//visibility:public"],
+ deps = [
+ "//third_party:opencv",
+ ],
+)
+
+cc_library(
+ name = "nldiffusion_functions",
+ srcs = [
+ "nldiffusion_functions.cpp",
+ ],
+ hdrs = [
+ "nldiffusion_functions.h",
+ ],
+ target_compatible_with = ["@platforms//os:linux"],
+ visibility = ["//visibility:public"],
+ deps = [
+ "//third_party:opencv",
+ ],
+)
+
+cc_library(
+ name = "t_evolution",
+ hdrs = [
+ "TEvolution.h",
+ ],
+ target_compatible_with = ["@platforms//os:linux"],
+ visibility = ["//visibility:public"],
+ deps = [
+ "//third_party:opencv",
+ ],
+)
+
+cc_library(
+ name = "utils",
+ hdrs = [
+ "utils.h",
+ ],
+ target_compatible_with = ["@platforms//os:linux"],
+ visibility = ["//visibility:public"],
+ deps = [
+ "//third_party:opencv",
+ ],
+)
diff --git a/third_party/akaze/README.md b/third_party/akaze/README.md
new file mode 100644
index 0000000..8cfb1c6
--- /dev/null
+++ b/third_party/akaze/README.md
@@ -0,0 +1,4 @@
+This is a copy of the following repo:
+https://github.com/h2suzuki/fast_akaze
+
+OpenCV Akaze worked at 30 fps on laptop (which is too slow), so trying to speed this up.
\ No newline at end of file
diff --git a/third_party/akaze/TEvolution.h b/third_party/akaze/TEvolution.h
new file mode 100644
index 0000000..f2bc0ee
--- /dev/null
+++ b/third_party/akaze/TEvolution.h
@@ -0,0 +1,48 @@
+/**
+ * @file TEvolution.h
+ * @brief Header file with the declaration of the TEvolution struct
+ * @date Jun 02, 2014
+ * @author Pablo F. Alcantarilla
+ */
+
+#ifndef __OPENCV_FEATURES_2D_TEVOLUTION_H__
+#define __OPENCV_FEATURES_2D_TEVOLUTION_H__
+
+#include <opencv2/core.hpp>
+
+namespace cv {
+
+/* ************************************************************************* */
+/// KAZE/A-KAZE nonlinear diffusion filtering evolution
+struct TEvolutionV2 {
+ TEvolutionV2() {
+ etime = 0.0f;
+ esigma = 0.0f;
+ octave = 0;
+ sublevel = 0;
+ sigma_size = 0;
+ octave_ratio = 1.0f;
+ }
+
+ Mat Lx, Ly; ///< First order spatial derivatives
+ Mat Lxx, Lxy, Lyy; ///< Second order spatial derivatives
+ Mat Lt; ///< Evolution image
+ Mat Lsmooth; ///< Smoothed image
+ Mat Ldet; ///< Detector response
+
+ Mat DxKx, DxKy; ///< Derivative kernels (kx and ky) of xorder = 1
+ Mat DyKx, DyKy; ///< Derivative kernels (kx and ky) of yorder = 1
+
+ float etime; ///< Evolution time
+ float esigma; ///< Evolution sigma. For linear diffusion t = sigma^2 / 2
+ int octave; ///< Image octave
+ int sublevel; ///< Image sublevel in each octave
+ int sigma_size; ///< Scaling factor of esigma that is round(esigma *
+ ///< derivative_factor / power)
+ int border; ///< Width of border where descriptors cannot be computed
+ float octave_ratio; ///< Scaling ratio of this octave. ratio = 2^octave
+};
+
+} // namespace cv
+
+#endif
\ No newline at end of file
diff --git a/third_party/akaze/akaze.cpp b/third_party/akaze/akaze.cpp
new file mode 100644
index 0000000..2ad044c
--- /dev/null
+++ b/third_party/akaze/akaze.cpp
@@ -0,0 +1,273 @@
+/*M///////////////////////////////////////////////////////////////////////////////////////
+//
+// IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING.
+//
+// By downloading, copying, installing or using the software you agree to this
+license.
+// If you do not agree to this license, do not download, install,
+// copy or use the software.
+//
+//
+// License Agreement
+// For Open Source Computer Vision Library
+//
+// Copyright (C) 2008, Willow Garage Inc., all rights reserved.
+// Third party copyrights are property of their respective owners.
+//
+// Redistribution and use in source and binary forms, with or without
+modification,
+// are permitted provided that the following conditions are met:
+//
+// * Redistribution's of source code must retain the above copyright notice,
+// this list of conditions and the following disclaimer.
+//
+// * Redistribution's in binary form must reproduce the above copyright
+notice,
+// this list of conditions and the following disclaimer in the documentation
+// and/or other materials provided with the distribution.
+//
+// * The name of Intel Corporation may not be used to endorse or promote
+products
+// derived from this software without specific prior written permission.
+//
+// This software is provided by the copyright holders and contributors "as is"
+and
+// any express or implied warranties, including, but not limited to, the implied
+// warranties of merchantability and fitness for a particular purpose are
+disclaimed.
+// In no event shall the Intel Corporation or contributors be liable for any
+direct,
+// indirect, incidental, special, exemplary, or consequential damages
+// (including, but not limited to, procurement of substitute goods or services;
+// loss of use, data, or profits; or business interruption) however caused
+// and on any theory of liability, whether in contract, strict liability,
+// or tort (including negligence or otherwise) arising in any way out of
+// the use of this software, even if advised of the possibility of such damage.
+//
+//M*/
+
+/*
+OpenCV wrapper of reference implementation of
+[1] Fast Explicit Diffusion for Accelerated Features in Nonlinear Scale Spaces.
+Pablo F. Alcantarilla, J. Nuevo and Adrien Bartoli.
+In British Machine Vision Conference (BMVC), Bristol, UK, September 2013
+http://www.robesafe.com/personal/pablo.alcantarilla/papers/Alcantarilla13bmvc.pdf
+@author Eugene Khvedchenya <ekhvedchenya@gmail.com>
+*/
+
+#include "akaze.h" // Define AKAZE2; included in place of <opencv2/features2d.hpp>
+
+#include <iostream>
+#include <opencv2/core.hpp>
+#include <opencv2/imgproc.hpp>
+
+#include "AKAZEFeatures.h"
+
+namespace cv {
+using namespace std;
+
+class AKAZE_Impl2 : public AKAZE2 {
+ public:
+ AKAZE_Impl2(int _descriptor_type, int _descriptor_size,
+ int _descriptor_channels, float _threshold, int _octaves,
+ int _sublevels, int _diffusivity)
+ : descriptor(_descriptor_type),
+ descriptor_channels(_descriptor_channels),
+ descriptor_size(_descriptor_size),
+ threshold(_threshold),
+ octaves(_octaves),
+ sublevels(_sublevels),
+ diffusivity(_diffusivity),
+ img_width(-1),
+ img_height(-1) {
+ cout << "AKAZE_Impl2 constructor called" << endl;
+ }
+
+ virtual ~AKAZE_Impl2() {}
+
+ void setDescriptorType(int dtype_) {
+ descriptor = dtype_;
+ impl.release();
+ }
+ int getDescriptorType() const { return descriptor; }
+
+ void setDescriptorSize(int dsize_) {
+ descriptor_size = dsize_;
+ impl.release();
+ }
+ int getDescriptorSize() const { return descriptor_size; }
+
+ void setDescriptorChannels(int dch_) {
+ descriptor_channels = dch_;
+ impl.release();
+ }
+ int getDescriptorChannels() const { return descriptor_channels; }
+
+ void setThreshold(double th_) {
+ threshold = (float)th_;
+ if (!impl.empty()) impl->setThreshold(th_);
+ }
+ double getThreshold() const { return threshold; }
+
+ void setNOctaves(int octaves_) {
+ octaves = octaves_;
+ impl.release();
+ }
+ int getNOctaves() const { return octaves; }
+
+ void setNOctaveLayers(int octaveLayers_) {
+ sublevels = octaveLayers_;
+ impl.release();
+ }
+ int getNOctaveLayers() const { return sublevels; }
+
+ void setDiffusivity(int diff_) {
+ diffusivity = diff_;
+ if (!impl.empty()) impl->setDiffusivity(diff_);
+ }
+ int getDiffusivity() const { return diffusivity; }
+
+ // returns the descriptor size in bytes
+ int descriptorSize() const {
+ switch (descriptor) {
+ case DESCRIPTOR_KAZE:
+ case DESCRIPTOR_KAZE_UPRIGHT:
+ return 64;
+
+ case DESCRIPTOR_MLDB:
+ case DESCRIPTOR_MLDB_UPRIGHT:
+ // We use the full length binary descriptor -> 486 bits
+ if (descriptor_size == 0) {
+ int t = (6 + 36 + 120) * descriptor_channels;
+ return (int)ceil(t / 8.);
+ } else {
+ // We use the random bit selection length binary descriptor
+ return (int)ceil(descriptor_size / 8.);
+ }
+
+ default:
+ return -1;
+ }
+ }
+
+ // returns the descriptor type
+ int descriptorType() const {
+ switch (descriptor) {
+ case DESCRIPTOR_KAZE:
+ case DESCRIPTOR_KAZE_UPRIGHT:
+ return CV_32F;
+
+ case DESCRIPTOR_MLDB:
+ case DESCRIPTOR_MLDB_UPRIGHT:
+ return CV_8U;
+
+ default:
+ return -1;
+ }
+ }
+
+ // returns the default norm type
+ int defaultNorm() const {
+ switch (descriptor) {
+ case DESCRIPTOR_KAZE:
+ case DESCRIPTOR_KAZE_UPRIGHT:
+ return NORM_L2;
+
+ case DESCRIPTOR_MLDB:
+ case DESCRIPTOR_MLDB_UPRIGHT:
+ return NORM_HAMMING;
+
+ default:
+ return -1;
+ }
+ }
+
+ void detectAndCompute(InputArray image, InputArray mask,
+ std::vector<KeyPoint>& keypoints,
+ OutputArray descriptors, bool useProvidedKeypoints) {
+ Mat img = image.getMat();
+
+ if (img_width != img.cols) {
+ img_width = img.cols;
+ impl.release();
+ }
+
+ if (img_height != img.rows) {
+ img_height = img.rows;
+ impl.release();
+ }
+
+ if (impl.empty()) {
+ AKAZEOptionsV2 options;
+ options.descriptor = descriptor;
+ options.descriptor_channels = descriptor_channels;
+ options.descriptor_size = descriptor_size;
+ options.img_width = img_width;
+ options.img_height = img_height;
+ options.dthreshold = threshold;
+ options.omax = octaves;
+ options.nsublevels = sublevels;
+ options.diffusivity = diffusivity;
+
+ impl = makePtr<AKAZEFeaturesV2>(options);
+ }
+
+ impl->Create_Nonlinear_Scale_Space(img);
+
+ if (!useProvidedKeypoints) {
+ impl->Feature_Detection(keypoints);
+ }
+
+ if (!mask.empty()) {
+ KeyPointsFilter::runByPixelsMask(keypoints, mask.getMat());
+ }
+
+ if (descriptors.needed()) {
+ Mat& desc = descriptors.getMatRef();
+ impl->Compute_Descriptors(keypoints, desc);
+
+ CV_Assert((!desc.rows || desc.cols == descriptorSize()));
+ CV_Assert((!desc.rows || (desc.type() == descriptorType())));
+ }
+ }
+
+ void write(FileStorage& fs) const {
+ fs << "descriptor" << descriptor;
+ fs << "descriptor_channels" << descriptor_channels;
+ fs << "descriptor_size" << descriptor_size;
+ fs << "threshold" << threshold;
+ fs << "octaves" << octaves;
+ fs << "sublevels" << sublevels;
+ fs << "diffusivity" << diffusivity;
+ }
+
+ void read(const FileNode& fn) {
+ descriptor = (int)fn["descriptor"];
+ descriptor_channels = (int)fn["descriptor_channels"];
+ descriptor_size = (int)fn["descriptor_size"];
+ threshold = (float)fn["threshold"];
+ octaves = (int)fn["octaves"];
+ sublevels = (int)fn["sublevels"];
+ diffusivity = (int)fn["diffusivity"];
+ }
+
+ Ptr<AKAZEFeaturesV2> impl;
+ int descriptor;
+ int descriptor_channels;
+ int descriptor_size;
+ float threshold;
+ int octaves;
+ int sublevels;
+ int diffusivity;
+ int img_width;
+ int img_height;
+};
+
+Ptr<AKAZE2> AKAZE2::create(int descriptor_type, int descriptor_size,
+ int descriptor_channels, float threshold,
+ int octaves, int sublevels, int diffusivity) {
+ return makePtr<AKAZE_Impl2>(descriptor_type, descriptor_size,
+ descriptor_channels, threshold, octaves,
+ sublevels, diffusivity);
+}
+} // namespace cv
\ No newline at end of file
diff --git a/third_party/akaze/akaze.h b/third_party/akaze/akaze.h
new file mode 100644
index 0000000..1930eae
--- /dev/null
+++ b/third_party/akaze/akaze.h
@@ -0,0 +1,86 @@
+#ifndef __OPENCV_FEATURES_2D_AKAZE2_HPP__
+#define __OPENCV_FEATURES_2D_AKAZE2_HPP__
+
+#include <opencv2/core.hpp>
+#include <opencv2/features2d.hpp>
+
+/*
+ This file is the excerpt from opencv/feature2d.hpp to provide the local AKAZE
+ class definition. In addition, the class name is changed from AKAZE to AKAZE2
+ to avoid possible confusion between this local variant and OpenCV's original
+ AKAZE class.
+*/
+
+namespace cv {
+
+//! @addtogroup features2d
+//! @{
+
+//! @addtogroup features2d_main
+//! @{
+
+/** @brief Class implementing the AKAZE keypoint detector and descriptor
+extractor, described in @cite ANB13 . :
+@note AKAZE descriptors can only be used with KAZE or AKAZE keypoints. Try to
+avoid using *extract* and *detect* instead of *operator()* due to performance
+reasons. .. [ANB13] Fast Explicit Diffusion for Accelerated Features in
+Nonlinear Scale Spaces. Pablo F. Alcantarilla, Jesús Nuevo and Adrien Bartoli.
+In British Machine Vision Conference (BMVC), Bristol, UK, September 2013.
+ */
+class CV_EXPORTS_W AKAZE2 : public Feature2D {
+ public:
+ // AKAZE descriptor type
+ enum {
+ DESCRIPTOR_KAZE_UPRIGHT =
+ 2, ///< Upright descriptors, not invariant to rotation
+ DESCRIPTOR_KAZE = 3,
+ DESCRIPTOR_MLDB_UPRIGHT =
+ 4, ///< Upright descriptors, not invariant to rotation
+ DESCRIPTOR_MLDB = 5
+ };
+
+ /** @brief The AKAZE constructor
+ @param descriptor_type Type of the extracted descriptor: DESCRIPTOR_KAZE,
+ DESCRIPTOR_KAZE_UPRIGHT, DESCRIPTOR_MLDB or DESCRIPTOR_MLDB_UPRIGHT.
+ @param descriptor_size Size of the descriptor in bits. 0 -\> Full size
+ @param descriptor_channels Number of channels in the descriptor (1, 2, 3)
+ @param threshold Detector response threshold to accept point
+ @param nOctaves Maximum octave evolution of the image
+ @param nOctaveLayers Default number of sublevels per scale level
+ @param diffusivity Diffusivity type. DIFF_PM_G1, DIFF_PM_G2, DIFF_WEICKERT or
+ DIFF_CHARBONNIER
+ */
+ CV_WRAP static Ptr<AKAZE2> create(
+ int descriptor_type = AKAZE::DESCRIPTOR_MLDB, int descriptor_size = 0,
+ int descriptor_channels = 3, float threshold = 0.001f, int nOctaves = 4,
+ int nOctaveLayers = 4, int diffusivity = KAZE::DIFF_PM_G2);
+
+ CV_WRAP virtual void setDescriptorType(int dtype) = 0;
+ CV_WRAP virtual int getDescriptorType() const = 0;
+
+ CV_WRAP virtual void setDescriptorSize(int dsize) = 0;
+ CV_WRAP virtual int getDescriptorSize() const = 0;
+
+ CV_WRAP virtual void setDescriptorChannels(int dch) = 0;
+ CV_WRAP virtual int getDescriptorChannels() const = 0;
+
+ CV_WRAP virtual void setThreshold(double threshold) = 0;
+ CV_WRAP virtual double getThreshold() const = 0;
+
+ CV_WRAP virtual void setNOctaves(int octaves) = 0;
+ CV_WRAP virtual int getNOctaves() const = 0;
+
+ CV_WRAP virtual void setNOctaveLayers(int octaveLayers) = 0;
+ CV_WRAP virtual int getNOctaveLayers() const = 0;
+
+ CV_WRAP virtual void setDiffusivity(int diff) = 0;
+ CV_WRAP virtual int getDiffusivity() const = 0;
+};
+
+//! @} features2d_main
+
+//! @} features2d
+
+} /* namespace cv */
+
+#endif
\ No newline at end of file
diff --git a/third_party/akaze/fed.cpp b/third_party/akaze/fed.cpp
new file mode 100644
index 0000000..835441c
--- /dev/null
+++ b/third_party/akaze/fed.cpp
@@ -0,0 +1,181 @@
+//=============================================================================
+//
+// fed.cpp
+// Authors: Pablo F. Alcantarilla (1), Jesus Nuevo (2)
+// Institutions: Georgia Institute of Technology (1)
+// TrueVision Solutions (2)
+// Date: 15/09/2013
+// Email: pablofdezalc@gmail.com
+//
+// AKAZE Features Copyright 2013, Pablo F. Alcantarilla, Jesus Nuevo
+// All Rights Reserved
+// See LICENSE for the license information
+//=============================================================================
+
+/**
+ * @file fed.cpp
+ * @brief Functions for performing Fast Explicit Diffusion and building the
+ * nonlinear scale space
+ * @date Sep 15, 2013
+ * @author Pablo F. Alcantarilla, Jesus Nuevo
+ * @note This code is derived from FED/FJ library from Grewenig et al.,
+ * The FED/FJ library allows solving more advanced problems
+ * Please look at the following papers for more information about FED:
+ * [1] S. Grewenig, J. Weickert, C. Schroers, A. Bruhn. Cyclic Schemes for
+ * PDE-Based Image Analysis. Technical Report No. 327, Department of
+ * Mathematics, Saarland University, Saarbrücken, Germany, March 2013 [2] S.
+ * Grewenig, J. Weickert, A. Bruhn. From box filtering to fast explicit
+ * diffusion. DAGM, 2010
+ *
+ */
+#include "fed.h"
+
+#include <opencv2/core.hpp>
+
+using namespace std;
+
+//*************************************************************************************
+//*************************************************************************************
+
+/**
+ * @brief This function allocates an array of the least number of time steps
+ * such that a certain stopping time for the whole process can be obtained and
+ * fills it with the respective FED time step sizes for one cycle The function
+ * returns the number of time steps per cycle or 0 on failure
+ * @param T Desired process stopping time
+ * @param M Desired number of cycles
+ * @param tau_max Stability limit for the explicit scheme
+ * @param reordering Reordering flag
+ * @param tau The vector with the dynamic step sizes
+ */
+int fed_tau_by_process_timeV2(const float& T, const int& M,
+ const float& tau_max, const bool& reordering,
+ std::vector<float>& tau) {
+ // All cycles have the same fraction of the stopping time
+ return fed_tau_by_cycle_timeV2(T / (float)M, tau_max, reordering, tau);
+}
+
+//*************************************************************************************
+//*************************************************************************************
+
+/**
+ * @brief This function allocates an array of the least number of time steps
+ * such that a certain stopping time for the whole process can be obtained and
+ * fills it it with the respective FED time step sizes for one cycle The
+ * function returns the number of time steps per cycle or 0 on failure
+ * @param t Desired cycle stopping time
+ * @param tau_max Stability limit for the explicit scheme
+ * @param reordering Reordering flag
+ * @param tau The vector with the dynamic step sizes
+ */
+inline int fed_tau_by_cycle_timeV2(const float& t, const float& tau_max,
+ const bool& reordering,
+ std::vector<float>& tau) {
+ int n = 0; // Number of time steps
+ float scale = 0.0; // Ratio of t we search to maximal t
+
+ // Compute necessary number of time steps
+ n = (int)(ceilf(sqrtf(3.0f * t / tau_max + 0.25f) - 0.5f - 1.0e-8f) + 0.5f);
+ scale = 3.0f * t / (tau_max * (float)(n * (n + 1)));
+
+ // Call internal FED time step creation routine
+ return fed_tau_internalV2(n, scale, tau_max, reordering, tau);
+}
+
+//*************************************************************************************
+//*************************************************************************************
+
+/**
+ * @brief This function allocates an array of time steps and fills it with FED
+ * time step sizes
+ * The function returns the number of time steps per cycle or 0 on failure
+ * @param n Number of internal steps
+ * @param scale Ratio of t we search to maximal t
+ * @param tau_max Stability limit for the explicit scheme
+ * @param reordering Reordering flag
+ * @param tau The vector with the dynamic step sizes
+ */
+inline int fed_tau_internalV2(const int& n, const float& scale,
+ const float& tau_max, const bool& reordering,
+ std::vector<float>& tau) {
+ if (n <= 0) {
+ return 0;
+ }
+
+ // Allocate memory for the time step size (Helper vector for unsorted taus)
+ vector<float> tauh(n);
+
+ // Compute time saver
+ float c = 1.0f / (4.0f * n + 2.0f);
+ float d = scale * tau_max / 2.0f;
+
+ // Set up originally ordered tau vector
+ for (int k = 0; k < n; ++k) {
+ float h = cos((float)CV_PI * (2.0f * k + 1.0f) * c);
+ tauh[k] = d / (h * h);
+ }
+
+ if (!reordering || n == 1) {
+ std::swap(tau, tauh);
+ } else {
+ // Permute list of time steps according to chosen reordering function
+
+ // Choose kappa cycle with k = n/2
+ // This is a heuristic. We can use Leja ordering instead!!
+ int kappa = n / 2;
+
+ // Get modulus for permutation
+ int prime = n + 1;
+
+ while (!fed_is_prime_internalV2(prime)) prime++;
+
+ // Perform permutation
+ tau.resize(n);
+ for (int k = 0, l = 0; l < n; ++k, ++l) {
+ int index = 0;
+ while ((index = ((k + 1) * kappa) % prime - 1) >= n) {
+ k++;
+ }
+
+ tau[l] = tauh[index];
+ }
+ }
+
+ return n;
+}
+
+//*************************************************************************************
+//*************************************************************************************
+
+/**
+ * @brief This function checks if a number is prime or not
+ * @param number Number to check if it is prime or not
+ * @return true if the number is prime
+ */
+inline bool fed_is_prime_internalV2(const int& number) {
+ bool is_prime = false;
+
+ if (number <= 1) {
+ return false;
+ } else if (number == 1 || number == 2 || number == 3 || number == 5 ||
+ number == 7) {
+ return true;
+ } else if ((number % 2) == 0 || (number % 3) == 0 || (number % 5) == 0 ||
+ (number % 7) == 0) {
+ return false;
+ } else {
+ is_prime = true;
+ int upperLimit = (int)sqrt(1.0f + number);
+ int divisor = 11;
+
+ while (divisor <= upperLimit) {
+ if (number % divisor == 0) {
+ is_prime = false;
+ }
+
+ divisor += 2;
+ }
+
+ return is_prime;
+ }
+}
\ No newline at end of file
diff --git a/third_party/akaze/fed.h b/third_party/akaze/fed.h
new file mode 100644
index 0000000..f2a78fc
--- /dev/null
+++ b/third_party/akaze/fed.h
@@ -0,0 +1,26 @@
+#ifndef __OPENCV_FEATURES_2D_FED_H__
+#define __OPENCV_FEATURES_2D_FED_H__
+
+//******************************************************************************
+//******************************************************************************
+
+// Includes
+#include <vector>
+
+//*************************************************************************************
+//*************************************************************************************
+
+// Declaration of functions
+int fed_tau_by_process_timeV2(const float& T, const int& M,
+ const float& tau_max, const bool& reordering,
+ std::vector<float>& tau);
+int fed_tau_by_cycle_timeV2(const float& t, const float& tau_max,
+ const bool& reordering, std::vector<float>& tau);
+int fed_tau_internalV2(const int& n, const float& scale, const float& tau_max,
+ const bool& reordering, std::vector<float>& tau);
+bool fed_is_prime_internalV2(const int& number);
+
+//*************************************************************************************
+//*************************************************************************************
+
+#endif // __OPENCV_FEATURES_2D_FED_H__
\ No newline at end of file
diff --git a/third_party/akaze/nldiffusion_functions.cpp b/third_party/akaze/nldiffusion_functions.cpp
new file mode 100644
index 0000000..39ec70e
--- /dev/null
+++ b/third_party/akaze/nldiffusion_functions.cpp
@@ -0,0 +1,457 @@
+//=============================================================================
+//
+// nldiffusion_functions.cpp
+// Author: Pablo F. Alcantarilla
+// Institution: University d'Auvergne
+// Address: Clermont Ferrand, France
+// Date: 27/12/2011
+// Email: pablofdezalc@gmail.com
+//
+// KAZE Features Copyright 2012, Pablo F. Alcantarilla
+// All Rights Reserved
+// See LICENSE for the license information
+//=============================================================================
+
+/**
+ * @file nldiffusion_functions.cpp
+ * @brief Functions for non-linear diffusion applications:
+ * 2D Gaussian Derivatives
+ * Perona and Malik conductivity equations
+ * Perona and Malik evolution
+ * @date Dec 27, 2011
+ * @author Pablo F. Alcantarilla
+ */
+
+#include "nldiffusion_functions.h"
+
+#include <cstdint>
+#include <cstring>
+#include <iostream>
+#include <opencv2/core.hpp>
+#include <opencv2/imgproc.hpp>
+
+// Namespaces
+
+/* ************************************************************************* */
+
+namespace cv {
+using namespace std;
+
+/* ************************************************************************* */
+/**
+ * @brief This function smoothes an image with a Gaussian kernel
+ * @param src Input image
+ * @param dst Output image
+ * @param ksize_x Kernel size in X-direction (horizontal)
+ * @param ksize_y Kernel size in Y-direction (vertical)
+ * @param sigma Kernel standard deviation
+ */
+void gaussian_2D_convolutionV2(const cv::Mat& src, cv::Mat& dst, int ksize_x,
+ int ksize_y, float sigma) {
+ int ksize_x_ = 0, ksize_y_ = 0;
+
+ // Compute an appropriate kernel size according to the specified sigma
+ if (sigma > ksize_x || sigma > ksize_y || ksize_x == 0 || ksize_y == 0) {
+ ksize_x_ = (int)ceil(2.0f * (1.0f + (sigma - 0.8f) / (0.3f)));
+ ksize_y_ = ksize_x_;
+ }
+
+ // The kernel size must be and odd number
+ if ((ksize_x_ % 2) == 0) {
+ ksize_x_ += 1;
+ }
+
+ if ((ksize_y_ % 2) == 0) {
+ ksize_y_ += 1;
+ }
+
+ // Perform the Gaussian Smoothing with border replication
+ GaussianBlur(src, dst, Size(ksize_x_, ksize_y_), sigma, sigma,
+ BORDER_REPLICATE);
+}
+
+/* ************************************************************************* */
+/**
+ * @brief This function computes image derivatives with Scharr kernel
+ * @param src Input image
+ * @param dst Output image
+ * @param xorder Derivative order in X-direction (horizontal)
+ * @param yorder Derivative order in Y-direction (vertical)
+ * @note Scharr operator approximates better rotation invariance than
+ * other stencils such as Sobel. See Weickert and Scharr,
+ * A Scheme for Coherence-Enhancing Diffusion Filtering with Optimized Rotation
+ * Invariance, Journal of Visual Communication and Image Representation 2002
+ */
+void image_derivatives_scharrV2(const cv::Mat& src, cv::Mat& dst, int xorder,
+ int yorder) {
+ Scharr(src, dst, CV_32F, xorder, yorder, 1.0, 0, BORDER_DEFAULT);
+}
+
+/* ************************************************************************* */
+/**
+ * @brief This function computes the Perona and Malik conductivity coefficient
+ * g1 g1 = exp(-|dL|^2/k^2)
+ * @param Lx First order image derivative in X-direction (horizontal)
+ * @param Ly First order image derivative in Y-direction (vertical)
+ * @param dst Output image
+ * @param k Contrast factor parameter
+ */
+void pm_g1V2(const cv::Mat& Lx, const cv::Mat& Ly, cv::Mat& dst, float k) {
+ // Compute: dst = exp((Lx.mul(Lx) + Ly.mul(Ly)) / (-k * k))
+
+ const float neg_inv_k2 = -1.0f / (k * k);
+
+ const int total = Lx.rows * Lx.cols;
+ const float* lx = Lx.ptr<float>(0);
+ const float* ly = Ly.ptr<float>(0);
+ float* d = dst.ptr<float>(0);
+
+ for (int i = 0; i < total; i++)
+ d[i] = neg_inv_k2 * (lx[i] * lx[i] + ly[i] * ly[i]);
+
+ exp(dst, dst);
+}
+
+/* ************************************************************************* */
+/**
+ * @brief This function computes the Perona and Malik conductivity coefficient
+ * g2 g2 = 1 / (1 + dL^2 / k^2)
+ * @param Lx First order image derivative in X-direction (horizontal)
+ * @param Ly First order image derivative in Y-direction (vertical)
+ * @param dst Output image
+ * @param k Contrast factor parameter
+ */
+void pm_g2V2(const cv::Mat& Lx, const cv::Mat& Ly, cv::Mat& dst, float k) {
+ // Compute: dst = 1.0f / (1.0f + ((Lx.mul(Lx) + Ly.mul(Ly)) / (k * k)) );
+
+ const float inv_k2 = 1.0f / (k * k);
+
+ const int total = Lx.rows * Lx.cols;
+ const float* lx = Lx.ptr<float>(0);
+ const float* ly = Ly.ptr<float>(0);
+ float* d = dst.ptr<float>(0);
+
+ for (int i = 0; i < total; i++)
+ d[i] = 1.0f / (1.0f + ((lx[i] * lx[i] + ly[i] * ly[i]) * inv_k2));
+}
+
+/* ************************************************************************* */
+/**
+ * @brief This function computes Weickert conductivity coefficient gw
+ * @param Lx First order image derivative in X-direction (horizontal)
+ * @param Ly First order image derivative in Y-direction (vertical)
+ * @param dst Output image
+ * @param k Contrast factor parameter
+ * @note For more information check the following paper: J. Weickert
+ * Applications of nonlinear diffusion in image processing and computer vision,
+ * Proceedings of Algorithmy 2000
+ */
+void weickert_diffusivityV2(const cv::Mat& Lx, const cv::Mat& Ly, cv::Mat& dst,
+ float k) {
+ // Compute: dst = 1.0f - exp(-3.315f / ((Lx.mul(Lx) + Ly.mul(Ly)) / (k *
+ // k))^4)
+
+ const float inv_k2 = 1.0f / (k * k);
+
+ const int total = Lx.rows * Lx.cols;
+ const float* lx = Lx.ptr<float>(0);
+ const float* ly = Ly.ptr<float>(0);
+ float* d = dst.ptr<float>(0);
+
+ for (int i = 0; i < total; i++) {
+ float dL = inv_k2 * (lx[i] * lx[i] + ly[i] * ly[i]);
+ d[i] = -3.315f / (dL * dL * dL * dL);
+ }
+
+ exp(dst, dst);
+
+ for (int i = 0; i < total; i++) d[i] = 1.0f - d[i];
+}
+
+/* ************************************************************************* */
+/**
+ * @brief This function computes Charbonnier conductivity coefficient gc
+ * gc = 1 / sqrt(1 + dL^2 / k^2)
+ * @param Lx First order image derivative in X-direction (horizontal)
+ * @param Ly First order image derivative in Y-direction (vertical)
+ * @param dst Output image
+ * @param k Contrast factor parameter
+ * @note For more information check the following paper: J. Weickert
+ * Applications of nonlinear diffusion in image processing and computer vision,
+ * Proceedings of Algorithmy 2000
+ */
+void charbonnier_diffusivityV2(const cv::Mat& Lx, const cv::Mat& Ly,
+ cv::Mat& dst, float k) {
+ // Compute: dst = 1.0f / sqrt(1.0f + (Lx.mul(Lx) + Ly.mul(Ly)) / (k * k))
+
+ const float inv_k2 = 1.0f / (k * k);
+
+ const int total = Lx.rows * Lx.cols;
+ const float* lx = Lx.ptr<float>(0);
+ const float* ly = Ly.ptr<float>(0);
+ float* d = dst.ptr<float>(0);
+
+ for (int i = 0; i < total; i++)
+ d[i] = 1.0f / sqrtf(1.0f + inv_k2 * (lx[i] * lx[i] + ly[i] * ly[i]));
+}
+
+/* ************************************************************************* */
+/**
+ * @brief This function computes a good empirical value for the k contrast
+ * factor given two gradient images, the percentile (0-1), the temporal storage
+ * to hold gradient norms and the histogram bins
+ * @param Lx Horizontal gradient of the input image
+ * @param Ly Vertical gradient of the input image
+ * @param perc Percentile of the image gradient histogram (0-1)
+ * @param modgs Temporal vector to hold the gradient norms
+ * @param histogram Temporal vector to hold the gradient histogram
+ * @return k contrast factor
+ */
+float compute_k_percentileV2(const cv::Mat& Lx, const cv::Mat& Ly, float perc,
+ cv::Mat& modgs, cv::Mat& histogram) {
+ const int total = modgs.cols;
+ const int nbins = histogram.cols;
+
+ CV_DbgAssert(total == (Lx.rows - 2) * (Lx.cols - 2));
+ CV_DbgAssert(nbins > 2);
+
+ float* modg = modgs.ptr<float>(0);
+ int32_t* hist = histogram.ptr<int32_t>(0);
+
+ for (int i = 1; i < Lx.rows - 1; i++) {
+ const float* lx = Lx.ptr<float>(i) + 1;
+ const float* ly = Ly.ptr<float>(i) + 1;
+ const int cols = Lx.cols - 2;
+
+ for (int j = 0; j < cols; j++)
+ *modg++ = sqrtf(lx[j] * lx[j] + ly[j] * ly[j]);
+ }
+ modg = modgs.ptr<float>(0);
+
+ // Get the maximum
+ float hmax = 0.0f;
+ for (int i = 0; i < total; i++)
+ if (hmax < modg[i]) hmax = modg[i];
+
+ if (hmax == 0.0f) return 0.03f; // e.g. a blank image
+
+ // Compute the bin numbers: the value range [0, hmax] -> [0, nbins-1]
+ for (int i = 0; i < total; i++) modg[i] *= (nbins - 1) / hmax;
+
+ // Count up
+ std::memset(hist, 0, sizeof(int32_t) * nbins);
+ for (int i = 0; i < total; i++) hist[(int)modg[i]]++;
+
+ // Now find the perc of the histogram percentile
+ const int nthreshold =
+ (int)((total - hist[0]) * perc); // Exclude hist[0] as background
+ int nelements = 0;
+ for (int k = 1; k < nbins; k++) {
+ if (nelements >= nthreshold) return (float)hmax * k / nbins;
+
+ nelements = nelements + hist[k];
+ }
+
+ return 0.03f;
+}
+
+/* ************************************************************************* */
+/**
+ * @brief Compute Scharr derivative kernels for sizes different than 3
+ * @param _kx Horizontal kernel ues
+ * @param _ky Vertical kernel values
+ * @param dx Derivative order in X-direction (horizontal)
+ * @param dy Derivative order in Y-direction (vertical)
+ * @param scale_ Scale factor or derivative size
+ */
+void compute_scharr_derivative_kernelsV2(cv::OutputArray _kx,
+ cv::OutputArray _ky, int dx, int dy,
+ int scale) {
+ int ksize = 3 + 2 * (scale - 1);
+
+ // The standard Scharr kernel
+ if (scale == 1) {
+ getDerivKernels(_kx, _ky, dx, dy, FILTER_SCHARR, true, CV_32F);
+ return;
+ }
+
+ _kx.create(ksize, 1, CV_32F, -1, true);
+ _ky.create(ksize, 1, CV_32F, -1, true);
+ Mat kx = _kx.getMat();
+ Mat ky = _ky.getMat();
+
+ float w = 10.0f / 3.0f;
+ float norm = 1.0f / (2.0f * (w + 2.0f));
+
+ std::vector<float> kerI(ksize, 0.0f);
+
+ if (dx == 0) {
+ kerI[0] = norm, kerI[ksize / 2] = w * norm, kerI[ksize - 1] = norm;
+ } else if (dx == 1) {
+ kerI[0] = -1, kerI[ksize / 2] = 0, kerI[ksize - 1] = 1;
+ }
+ Mat(kx.rows, kx.cols, CV_32F, &kerI[0]).copyTo(kx);
+
+ kerI.assign(ksize, 0.0f);
+
+ if (dy == 0) {
+ kerI[0] = norm, kerI[ksize / 2] = w * norm, kerI[ksize - 1] = norm;
+ } else if (dy == 1) {
+ kerI[0] = -1, kerI[ksize / 2] = 0, kerI[ksize - 1] = 1;
+ }
+ Mat(ky.rows, ky.cols, CV_32F, &kerI[0]).copyTo(ky);
+}
+
+inline void nld_step_scalar_one_lane(const cv::Mat& Lt, const cv::Mat& Lf,
+ cv::Mat& Lstep, int idx, int skip) {
+ /* The labeling scheme for this five star stencil:
+ [ a ]
+ [ -1 c +1 ]
+ [ b ]
+ */
+
+ const int cols = Lt.cols - 2;
+ int row = idx;
+
+ const float *lt_a, *lt_c, *lt_b;
+ const float *lf_a, *lf_c, *lf_b;
+ float* dst;
+
+ // Process the top row
+ if (row == 0) {
+ lt_c = Lt.ptr<float>(0) + 1; /* Skip the left-most column by +1 */
+ lf_c = Lf.ptr<float>(0) + 1;
+ lt_b = Lt.ptr<float>(1) + 1;
+ lf_b = Lf.ptr<float>(1) + 1;
+ dst = Lstep.ptr<float>(0) + 1;
+
+ for (int j = 0; j < cols; j++) {
+ dst[j] = (lf_c[j] + lf_c[j + 1]) * (lt_c[j + 1] - lt_c[j]) +
+ (lf_c[j] + lf_c[j - 1]) * (lt_c[j - 1] - lt_c[j]) +
+ (lf_c[j] + lf_b[j]) * (lt_b[j] - lt_c[j]);
+ }
+ row += skip;
+ }
+
+ // Process the middle rows
+ for (; row < Lt.rows - 1; row += skip) {
+ lt_a = Lt.ptr<float>(row - 1);
+ lf_a = Lf.ptr<float>(row - 1);
+ lt_c = Lt.ptr<float>(row);
+ lf_c = Lf.ptr<float>(row);
+ lt_b = Lt.ptr<float>(row + 1);
+ lf_b = Lf.ptr<float>(row + 1);
+ dst = Lstep.ptr<float>(row);
+
+ // The left-most column
+ dst[0] = (lf_c[0] + lf_c[1]) * (lt_c[1] - lt_c[0]) +
+ (lf_c[0] + lf_b[0]) * (lt_b[0] - lt_c[0]) +
+ (lf_c[0] + lf_a[0]) * (lt_a[0] - lt_c[0]);
+
+ lt_a++;
+ lt_c++;
+ lt_b++;
+ lf_a++;
+ lf_c++;
+ lf_b++;
+ dst++;
+
+ // The middle columns
+ for (int j = 0; j < cols; j++) {
+ dst[j] = (lf_c[j] + lf_c[j + 1]) * (lt_c[j + 1] - lt_c[j]) +
+ (lf_c[j] + lf_c[j - 1]) * (lt_c[j - 1] - lt_c[j]) +
+ (lf_c[j] + lf_b[j]) * (lt_b[j] - lt_c[j]) +
+ (lf_c[j] + lf_a[j]) * (lt_a[j] - lt_c[j]);
+ }
+
+ // The right-most column
+ dst[cols] = (lf_c[cols] + lf_c[cols - 1]) * (lt_c[cols - 1] - lt_c[cols]) +
+ (lf_c[cols] + lf_b[cols]) * (lt_b[cols] - lt_c[cols]) +
+ (lf_c[cols] + lf_a[cols]) * (lt_a[cols] - lt_c[cols]);
+ }
+
+ // Process the bottom row
+ if (row == Lt.rows - 1) {
+ lt_a = Lt.ptr<float>(row - 1) + 1; /* Skip the left-most column by +1 */
+ lf_a = Lf.ptr<float>(row - 1) + 1;
+ lt_c = Lt.ptr<float>(row) + 1;
+ lf_c = Lf.ptr<float>(row) + 1;
+ dst = Lstep.ptr<float>(row) + 1;
+
+ for (int j = 0; j < cols; j++) {
+ dst[j] = (lf_c[j] + lf_c[j + 1]) * (lt_c[j + 1] - lt_c[j]) +
+ (lf_c[j] + lf_c[j - 1]) * (lt_c[j - 1] - lt_c[j]) +
+ (lf_c[j] + lf_a[j]) * (lt_a[j] - lt_c[j]);
+ }
+ }
+}
+
+/* ************************************************************************* */
+/**
+ * @brief This function computes a scalar non-linear diffusion step
+ * @param Ld Base image in the evolution
+ * @param c Conductivity image
+ * @param Lstep Output image that gives the difference between the current
+ * Ld and the next Ld being evolved
+ * @note Forward Euler Scheme 3x3 stencil
+ * The function c is a scalar value that depends on the gradient norm
+ * dL_by_ds = d(c dL_by_dx)_by_dx + d(c dL_by_dy)_by_dy
+ */
+void nld_step_scalarV2(const cv::Mat& Ld, const cv::Mat& c, cv::Mat& Lstep) {
+ nld_step_scalar_one_lane(Ld, c, Lstep, 0, 1);
+}
+
+/* ************************************************************************* */
+/**
+ * @brief This function downsamples the input image using OpenCV resize
+ * @param src Input image to be downsampled
+ * @param dst Output image with half of the resolution of the input image
+ */
+void halfsample_imageV2(const cv::Mat& src, cv::Mat& dst) {
+ // Make sure the destination image is of the right size
+ CV_Assert(src.cols / 2 == dst.cols);
+ CV_Assert(src.rows / 2 == dst.rows);
+ resize(src, dst, dst.size(), 0, 0, cv::INTER_AREA);
+}
+
+/* ************************************************************************* */
+/**
+ * @brief This function checks if a given pixel is a maximum in a local
+ * neighbourhood
+ * @param img Input image where we will perform the maximum search
+ * @param dsize Half size of the neighbourhood
+ * @param value Response value at (x,y) position
+ * @param row Image row coordinate
+ * @param col Image column coordinate
+ * @param same_img Flag to indicate if the image value at (x,y) is in the input
+ * image
+ * @return 1->is maximum, 0->otherwise
+ */
+bool check_maximum_neighbourhoodV2(const cv::Mat& img, int dsize, float value,
+ int row, int col, bool same_img) {
+ bool response = true;
+
+ for (int i = row - dsize; i <= row + dsize; i++) {
+ for (int j = col - dsize; j <= col + dsize; j++) {
+ if (i >= 0 && i < img.rows && j >= 0 && j < img.cols) {
+ if (same_img == true) {
+ if (i != row || j != col) {
+ if ((*(img.ptr<float>(i) + j)) > value) {
+ response = false;
+ return response;
+ }
+ }
+ } else {
+ if ((*(img.ptr<float>(i) + j)) > value) {
+ response = false;
+ return response;
+ }
+ }
+ }
+ }
+ }
+
+ return response;
+}
+
+} // namespace cv
\ No newline at end of file
diff --git a/third_party/akaze/nldiffusion_functions.h b/third_party/akaze/nldiffusion_functions.h
new file mode 100644
index 0000000..67d5640
--- /dev/null
+++ b/third_party/akaze/nldiffusion_functions.h
@@ -0,0 +1,55 @@
+/**
+ * @file nldiffusion_functions.h
+ * @brief Functions for non-linear diffusion applications:
+ * 2D Gaussian Derivatives
+ * Perona and Malik conductivity equations
+ * Perona and Malik evolution
+ * @date Dec 27, 2011
+ * @author Pablo F. Alcantarilla
+ */
+
+#ifndef __OPENCV_FEATURES_2D_NLDIFFUSION_FUNCTIONS_H__
+#define __OPENCV_FEATURES_2D_NLDIFFUSION_FUNCTIONS_H__
+
+/* ************************************************************************* */
+// Declaration of functions
+
+#include <opencv2/core.hpp>
+
+namespace cv {
+
+// Gaussian 2D convolution
+void gaussian_2D_convolutionV2(const cv::Mat& src, cv::Mat& dst, int ksize_x,
+ int ksize_y, float sigma);
+
+// Diffusivity functions
+void pm_g1V2(const cv::Mat& Lx, const cv::Mat& Ly, cv::Mat& dst, float k);
+void pm_g2V2(const cv::Mat& Lx, const cv::Mat& Ly, cv::Mat& dst, float k);
+void weickert_diffusivityV2(const cv::Mat& Lx, const cv::Mat& Ly, cv::Mat& dst,
+ float k);
+void charbonnier_diffusivityV2(const cv::Mat& Lx, const cv::Mat& Ly,
+ cv::Mat& dst, float k);
+
+float compute_k_percentileV2(const cv::Mat& Lx, const cv::Mat& Ly, float perc,
+ cv::Mat& modgs, cv::Mat& hist);
+
+// Image derivatives
+void compute_scharr_derivative_kernelsV2(cv::OutputArray _kx,
+ cv::OutputArray _ky, int dx, int dy,
+ int scale);
+void image_derivatives_scharrV2(const cv::Mat& src, cv::Mat& dst, int xorder,
+ int yorder);
+
+// Nonlinear diffusion filtering scalar step
+void nld_step_scalarV2(const cv::Mat& Ld, const cv::Mat& c, cv::Mat& Lstep);
+
+// For non-maxima suppresion
+bool check_maximum_neighbourhoodV2(const cv::Mat& img, int dsize, float value,
+ int row, int col, bool same_img);
+
+// Image downsampling
+void halfsample_imageV2(const cv::Mat& src, cv::Mat& dst);
+
+} // namespace cv
+
+#endif
\ No newline at end of file
diff --git a/third_party/akaze/utils.h b/third_party/akaze/utils.h
new file mode 100644
index 0000000..fe17305
--- /dev/null
+++ b/third_party/akaze/utils.h
@@ -0,0 +1,67 @@
+#ifndef __OPENCV_FEATURES_2D_KAZE_UTILS_H__
+#define __OPENCV_FEATURES_2D_KAZE_UTILS_H__
+
+#include <opencv2/core/cvdef.h>
+
+#include <cmath>
+
+/* ************************************************************************* */
+/**
+ * @brief This function computes the angle from the vector given by (X Y). From
+ * 0 to 2*Pi
+ */
+inline float getAngleV2(float x, float y) {
+ float theta = atan2f(y, x);
+
+ if (theta >= 0)
+ return theta;
+ else
+ return theta + static_cast<float>(2.0f * CV_PI);
+}
+
+/* ************************************************************************* */
+/**
+ * @brief This function computes the value of a 2D Gaussian function
+ * @param x X Position
+ * @param y Y Position
+ * @param sig Standard Deviation
+ */
+inline float gaussianV2(float x, float y, float sigma) {
+ return expf(-(x * x + y * y) / (2.0f * sigma * sigma));
+}
+
+/* ************************************************************************* */
+/**
+ * @brief This function checks descriptor limits
+ * @param x X Position
+ * @param y Y Position
+ * @param width Image width
+ * @param height Image height
+ */
+inline void checkDescriptorLimitsV2(int &x, int &y, int width, int height) {
+ if (x < 0) {
+ x = 0;
+ }
+
+ if (y < 0) {
+ y = 0;
+ }
+
+ if (x > width - 1) {
+ x = width - 1;
+ }
+
+ if (y > height - 1) {
+ y = height - 1;
+ }
+}
+
+/* ************************************************************************* */
+/**
+ * @brief This function rounds float to nearest integer
+ * @param flt Input float
+ * @return dst Nearest integer
+ */
+inline int fRoundV2(float flt) { return (int)(flt + 0.5f); }
+
+#endif
\ No newline at end of file
diff --git a/third_party/autocxx/.github/workflows/ci.yml b/third_party/autocxx/.github/workflows/ci.yml
index 3eb54d1..367b729 100644
--- a/third_party/autocxx/.github/workflows/ci.yml
+++ b/third_party/autocxx/.github/workflows/ci.yml
@@ -139,6 +139,9 @@
- name: Build non-trivial-type-on-stack example
working-directory: ./examples/non-trivial-type-on-stack
run: cargo build
+ - name: Build reference-wrappers example
+ working-directory: ./examples/reference-wrappers
+ run: cargo build
# We do not build the LLVM example because even 'apt-get install llvm-13-dev'
# does not work to install the LLVM 13 headers.
diff --git a/third_party/autocxx/.github/workflows/site.yml b/third_party/autocxx/.github/workflows/site.yml
index 0dd4237..e1951c8 100644
--- a/third_party/autocxx/.github/workflows/site.yml
+++ b/third_party/autocxx/.github/workflows/site.yml
@@ -27,9 +27,7 @@
run: cargo install mdbook-linkcheck
- name: Install mdbook-mermaid
- run: |
- curl -LSfs https://japaric.github.io/trust/install.sh | \
- sh -s -- --git badboy/mdbook-mermaid
+ run: cargo install mdbook-mermaid
- run: mdbook build
working-directory: book
diff --git a/third_party/autocxx/Cargo.lock b/third_party/autocxx/Cargo.lock
index a5bfc0a..4f58182 100644
--- a/third_party/autocxx/Cargo.lock
+++ b/third_party/autocxx/Cargo.lock
@@ -87,7 +87,7 @@
[[package]]
name = "autocxx"
-version = "0.22.0"
+version = "0.22.3"
dependencies = [
"aquamarine",
"autocxx-macro",
@@ -97,9 +97,9 @@
[[package]]
name = "autocxx-bindgen"
-version = "0.59.16"
+version = "0.59.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "435723e14bf88f198322f8555a4fdb108363021d97a47bb6492891ca86055e79"
+checksum = "f9a9a26dd38d385d23b1bf61bd231b77f690c4368aef4c77cee1b7a6da2e2042"
dependencies = [
"bitflags",
"cexpr",
@@ -121,7 +121,7 @@
[[package]]
name = "autocxx-build"
-version = "0.22.0"
+version = "0.22.3"
dependencies = [
"autocxx-engine",
"env_logger 0.9.0",
@@ -131,7 +131,7 @@
[[package]]
name = "autocxx-demo"
-version = "0.22.0"
+version = "0.22.3"
dependencies = [
"autocxx",
"autocxx-build",
@@ -141,7 +141,7 @@
[[package]]
name = "autocxx-engine"
-version = "0.22.0"
+version = "0.22.3"
dependencies = [
"aquamarine",
"autocxx-bindgen",
@@ -167,7 +167,7 @@
[[package]]
name = "autocxx-gen"
-version = "0.22.0"
+version = "0.22.3"
dependencies = [
"assert_cmd",
"autocxx",
@@ -181,12 +181,12 @@
"miette",
"pathdiff",
"proc-macro2",
- "tempdir",
+ "tempfile",
]
[[package]]
name = "autocxx-integration-tests"
-version = "0.22.0"
+version = "0.22.3"
dependencies = [
"autocxx",
"autocxx-engine",
@@ -209,7 +209,7 @@
[[package]]
name = "autocxx-macro"
-version = "0.22.0"
+version = "0.22.3"
dependencies = [
"autocxx-parser",
"proc-macro-error",
@@ -220,7 +220,7 @@
[[package]]
name = "autocxx-mdbook-preprocessor"
-version = "0.22.0"
+version = "0.22.3"
dependencies = [
"anyhow",
"autocxx-integration-tests",
@@ -238,7 +238,7 @@
[[package]]
name = "autocxx-parser"
-version = "0.22.0"
+version = "0.22.3"
dependencies = [
"indexmap",
"itertools 0.10.3",
@@ -254,7 +254,7 @@
[[package]]
name = "autocxx-reduce"
-version = "0.22.0"
+version = "0.22.3"
dependencies = [
"assert_cmd",
"autocxx-engine",
@@ -271,7 +271,6 @@
"serde_derive",
"serde_json",
"syn",
- "tempdir",
"tempfile",
]
@@ -491,9 +490,9 @@
[[package]]
name = "cxx"
-version = "1.0.66"
+version = "1.0.68"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ce2295fe8865279f404147e9b2328e5af0ad11a2c016e58c13acfd48a07d8a55"
+checksum = "7e599641dff337570f6aa9c304ecca92341d30bf72e1c50287869ed6a36615a6"
dependencies = [
"cc",
"cxxbridge-flags",
@@ -503,9 +502,9 @@
[[package]]
name = "cxx-gen"
-version = "0.7.66"
+version = "0.7.68"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "2bba249c8ea90cff9c647cd76928efc82f1e8684da0a6cedb4416cc79478050d"
+checksum = "1e2c726d93799c3129c65224ab09eae1a31276bc593d4f7344be1c592c16a1ec"
dependencies = [
"codespan-reporting",
"proc-macro2",
@@ -515,15 +514,15 @@
[[package]]
name = "cxxbridge-flags"
-version = "1.0.66"
+version = "1.0.68"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0a670224c6686471df12560a0b97a08145082e70bd38e2b0b5383b79e46c3da7"
+checksum = "3894ad0c6d517cb5a4ce8ec20b37cd0ea31b480fe582a104c5db67ae21270853"
[[package]]
name = "cxxbridge-macro"
-version = "1.0.66"
+version = "1.0.68"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "7b700096ca0dece28d9535fdb17ab784a8ae155d7f29d39c273643e6292c9620"
+checksum = "34fa7e395dc1c001083c7eed28c8f0f0b5a225610f3b6284675f444af6fab86b"
dependencies = [
"proc-macro2",
"quote",
@@ -610,12 +609,6 @@
]
[[package]]
-name = "fuchsia-cprng"
-version = "0.1.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a06f77d526c1a601b7c4cdd98f54b5eaabffc14d5f2f0296febdc7f357c6d3ba"
-
-[[package]]
name = "gag"
version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -1135,34 +1128,6 @@
]
[[package]]
-name = "rand"
-version = "0.4.6"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "552840b97013b1a26992c11eac34bdd778e464601a4c2054b5f0bff7c6761293"
-dependencies = [
- "fuchsia-cprng",
- "libc",
- "rand_core 0.3.1",
- "rdrand",
- "winapi",
-]
-
-[[package]]
-name = "rand_core"
-version = "0.3.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "7a6fdeb83b075e8266dcc8762c22776f6877a63111121f5f8c7411e5be7eed4b"
-dependencies = [
- "rand_core 0.4.2",
-]
-
-[[package]]
-name = "rand_core"
-version = "0.4.2"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9c33a3c44ca05fa6f1807d8e6743f3824e8509beca625669633be0acbdf509dc"
-
-[[package]]
name = "rayon"
version = "1.5.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -1187,15 +1152,6 @@
]
[[package]]
-name = "rdrand"
-version = "0.4.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "678054eb77286b51581ba43620cc911abf02758c91f93f479767aed0f90458b2"
-dependencies = [
- "rand_core 0.3.1",
-]
-
-[[package]]
name = "redox_syscall"
version = "0.2.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -1392,16 +1348,6 @@
]
[[package]]
-name = "tempdir"
-version = "0.3.7"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "15f2b5fb00ccdf689e0149d1b1b3c03fead81c2b37735d812fa8bddbbf41b6d8"
-dependencies = [
- "rand",
- "remove_dir_all",
-]
-
-[[package]]
name = "tempfile"
version = "3.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
diff --git a/third_party/autocxx/Cargo.toml b/third_party/autocxx/Cargo.toml
index 7d34f78..a741ece 100644
--- a/third_party/autocxx/Cargo.toml
+++ b/third_party/autocxx/Cargo.toml
@@ -8,7 +8,7 @@
[package]
name = "autocxx"
-version = "0.22.0"
+version = "0.22.3"
authors = ["Adrian Taylor <adetaylor@chromium.org>"]
license = "MIT OR Apache-2.0"
description = "Safe autogenerated interop between Rust and C++"
@@ -25,8 +25,8 @@
resolver = "2"
[dependencies]
-autocxx-macro = { path="macro", version="0.22.0" }
-cxx = "1.0.54" # ... also needed because expansion of type_id refers to ::cxx
+autocxx-macro = { path="macro", version="0.22.3" }
+cxx = "1.0.68" # ... also needed because expansion of type_id refers to ::cxx
aquamarine = "0.1" # docs
moveit = { version = "0.5", features = [ "cxx" ] }
diff --git a/third_party/autocxx/book/src/contributing.md b/third_party/autocxx/book/src/contributing.md
index 1f00bc9..e95ca5a 100644
--- a/third_party/autocxx/book/src/contributing.md
+++ b/third_party/autocxx/book/src/contributing.md
@@ -80,7 +80,7 @@
order of preference here's how we would like to hear about your problem:
* Raise a pull request adding a new failing integration test to
- `engine/src/integration_tests.rs`.
+ [`integration_test.rs`](https://github.com/google/autocxx/blob/main/integration-tests/tests/integration_test.rs)
* Minimize the test using `tools/reduce`, something like this:
`target/debug/autocxx-reduce file -d "safety!(unsafe_ffi)" -d
'generate_pod!("A")' -I ~/my-include-dir -h my-header.h -p
diff --git a/third_party/autocxx/book/src/references_etc.md b/third_party/autocxx/book/src/references_etc.md
index a4b6d09..95456dc 100644
--- a/third_party/autocxx/book/src/references_etc.md
+++ b/third_party/autocxx/book/src/references_etc.md
@@ -39,12 +39,13 @@
Exactly the same issues apply to C++ references _in theory_, but in practice,
they usually don't. Therefore [`cxx`](https://cxx.rs) has taken the view that we can "trust"
a C++ reference to a higher degree than a pointer, and autocxx follows that
-lead. In practice, of course, references are rarely return values from C++
+lead (in fact we 'trust' references even slightly more than cxx).
+In practice, of course, references are rarely return values from C++
APIs so we rarely have to navel-gaze about the trustworthiness of a
reference.
(See also the discussion of [`safety`](safety.md) - if you haven't specified
-an unsafety policy, _all_ C++ APIs require `unsafe` so the discussion is moot.)
+an unsafety policy, _all_ C++ APIs require `unsafe` so the discussion is moot.
If you're given a C++ object by pointer, and you want to interact with it,
you'll need to figure out the guarantees attached to the C++ object - most
diff --git a/third_party/autocxx/book/src/safety.md b/third_party/autocxx/book/src/safety.md
index 34724d2..e4c858a 100644
--- a/third_party/autocxx/book/src/safety.md
+++ b/third_party/autocxx/book/src/safety.md
@@ -59,7 +59,6 @@
)
```
-
## Pragmatism in a complex C++ codebase
This crate mostly intends to follow the lead of the `cxx` crate in where and when `unsafe` is required. But, this crate is opinionated. It believes some unsafety requires more careful review than other bits, along the following spectrum:
@@ -89,3 +88,12 @@
* It doesn't delete it.
* or any of the other things that you're not permitted to do in unsafe Rust.
+## Soundness
+
+This crate shares the general approach to safety and soundness pioneered by cxx, but has two important differences:
+
+* cxx requires you to specify your interface in detail, and thus think through all aspects of the language boundary. autocxx doesn't, and may autogenerate footguns.
+* cxx may allow multiple conflicting Rust references to exist to 'trivial' data types ("plain old data" or POD in autocxx parlance), but they're rare. autocxx may allow conflicting Rust references to exist even to 'opaque' (non-POD) data, and they're more common. This difference exists because opaque data is zero-sized in cxx, and zero-sized references cannot conflict. (In autocxx, we tell Rust about the size in order that we can allocate such types on the stack.)
+
+There are preliminary explorations to avoid this problem by using a C++ reference wrapper type. See `examples/reference-wrappers`.
+
diff --git a/third_party/autocxx/book/src/tutorial.md b/third_party/autocxx/book/src/tutorial.md
index 3bedb26..3e8e5b2 100644
--- a/third_party/autocxx/book/src/tutorial.md
+++ b/third_party/autocxx/book/src/tutorial.md
@@ -15,15 +15,17 @@
The rest of this 'getting started' section assumes Cargo - if you're using something else, see the [building](building.md) section.
-First, add `autocxx` *and `cxx`* to your `dependencies` and `autocxx-build` to your `build-dependencies` in your `Cargo.toml`.
+First, add `autocxx` *and `cxx`* to your `dependencies` and `autocxx-build` to your `build-dependencies` in your `Cargo.toml`. **You must specify both.**
+
+
```toml
[dependencies]
-autocxx = "0.22.0"
+autocxx = "0.22.3"
cxx = "1.0"
[build-dependencies]
-autocxx-build = "0.22.0"
+autocxx-build = "0.22.3"
miette = { version="4.3", features=["fancy"] } # optional but gives nicer error messages!
```
diff --git a/third_party/autocxx/demo/Cargo.toml b/third_party/autocxx/demo/Cargo.toml
index c6a7227..e81cb80 100644
--- a/third_party/autocxx/demo/Cargo.toml
+++ b/third_party/autocxx/demo/Cargo.toml
@@ -8,14 +8,14 @@
[package]
name = "autocxx-demo"
-version = "0.22.0"
+version = "0.22.3"
authors = ["Adrian Taylor <adetaylor@chromium.org>"]
edition = "2021"
[dependencies]
-cxx = "1.0.54"
-autocxx = { path = "..", version="0.22.0" }
+cxx = "1.0.68"
+autocxx = { path = "..", version="0.22.3" }
[build-dependencies]
-autocxx-build = { path = "../gen/build", version="0.22.0" }
+autocxx-build = { path = "../gen/build", version="0.22.3" }
miette = { version="4.3", features=["fancy"]}
diff --git a/third_party/autocxx/engine/Cargo.toml b/third_party/autocxx/engine/Cargo.toml
index 964108e..1400847 100644
--- a/third_party/autocxx/engine/Cargo.toml
+++ b/third_party/autocxx/engine/Cargo.toml
@@ -8,7 +8,7 @@
[package]
name = "autocxx-engine"
-version = "0.22.0"
+version = "0.22.3"
authors = ["Adrian Taylor <adetaylor@chromium.org>"]
license = "MIT OR Apache-2.0"
description = "Safe autogenerated interop between Rust and C++"
@@ -30,15 +30,15 @@
proc-macro2 = "1.0.11"
quote = "1.0"
indoc = "1.0"
-autocxx-bindgen = "=0.59.16"
+autocxx-bindgen = "=0.59.17"
#autocxx-bindgen = { git = "https://github.com/adetaylor/rust-bindgen", branch = "pollute-fewer-typedefs" }
itertools = "0.10.3"
cc = { version = "1.0", optional = true }
# Note: Keep the patch-level version of cxx-gen and cxx in sync.
# There can be interdependencies between the code generated by cxx-gen and
# what cxx expects to be there.
-cxx-gen = "0.7.54"
-autocxx-parser = { version = "=0.22.0", path="../parser" }
+cxx-gen = "0.7.68"
+autocxx-parser = { version = "=0.22.3", path="../parser" }
version_check = "0.9"
aquamarine = "0.1" # docs
tempfile = "3.1"
diff --git a/third_party/autocxx/engine/src/conversion/analysis/fun/function_wrapper.rs b/third_party/autocxx/engine/src/conversion/analysis/fun/function_wrapper.rs
index f2f6aaa..ab3b7d9 100644
--- a/third_party/autocxx/engine/src/conversion/analysis/fun/function_wrapper.rs
+++ b/third_party/autocxx/engine/src/conversion/analysis/fun/function_wrapper.rs
@@ -10,7 +10,8 @@
conversion::api::SubclassName,
types::{Namespace, QualifiedName},
};
-use syn::{parse_quote, Ident, Type};
+use quote::ToTokens;
+use syn::{parse_quote, Ident, Type, TypeReference};
#[derive(Clone, Debug)]
pub(crate) enum CppConversionType {
@@ -23,6 +24,8 @@
/// Ignored in the sense that it isn't passed into the C++ function.
IgnoredPlacementPtrParameter,
FromReturnValueToPlacementPtr,
+ FromPointerToReference, // unwrapped_type is always Type::Ptr
+ FromReferenceToPointer, // unwrapped_type is always Type::Ptr
}
impl CppConversionType {
@@ -36,6 +39,8 @@
CppConversionType::FromValueToUniquePtr
}
CppConversionType::FromValueToUniquePtr => CppConversionType::FromUniquePtrToValue,
+ CppConversionType::FromPointerToReference => CppConversionType::FromReferenceToPointer,
+ CppConversionType::FromReferenceToPointer => CppConversionType::FromPointerToReference,
_ => panic!("Did not expect to have to invert this conversion"),
}
}
@@ -52,6 +57,8 @@
FromValueParamToPtr,
FromPlacementParamToNewReturn,
FromRValueParamToPtr,
+ FromReferenceWrapperToPointer, // unwrapped_type is always Type::Ptr
+ FromPointerToReferenceWrapper, // unwrapped_type is always Type::Ptr
}
impl RustConversionType {
@@ -74,19 +81,53 @@
/// * Finally, the actual C++ API receives a `std::string` by value.
/// The implementation here is distributed across this file, and
/// `function_wrapper_rs` and `function_wrapper_cpp`.
+/// TODO: we should make this into a single enum, with the Type as enum
+/// variant params. That would remove the possibility of various runtime
+/// panics by enforcing (for example) that conversion from a pointer always
+/// has a Type::Ptr.
#[derive(Clone)]
pub(crate) struct TypeConversionPolicy {
- pub(crate) unwrapped_type: Type,
+ unwrapped_type: Type,
pub(crate) cpp_conversion: CppConversionType,
pub(crate) rust_conversion: RustConversionType,
}
impl TypeConversionPolicy {
pub(crate) fn new_unconverted(ty: Type) -> Self {
- TypeConversionPolicy {
+ Self::new(ty, CppConversionType::None, RustConversionType::None)
+ }
+
+ pub(crate) fn new(
+ ty: Type,
+ cpp_conversion: CppConversionType,
+ rust_conversion: RustConversionType,
+ ) -> Self {
+ Self {
unwrapped_type: ty,
- cpp_conversion: CppConversionType::None,
- rust_conversion: RustConversionType::None,
+ cpp_conversion,
+ rust_conversion,
+ }
+ }
+
+ pub(crate) fn cxxbridge_type(&self) -> &Type {
+ &self.unwrapped_type
+ }
+
+ pub(crate) fn return_reference_into_wrapper(ty: Type) -> Self {
+ let (unwrapped_type, is_mut) = match ty {
+ Type::Reference(TypeReference {
+ elem, mutability, ..
+ }) => (*elem, mutability.is_some()),
+ _ => panic!("Not a ptr: {}", ty.to_token_stream()),
+ };
+ TypeConversionPolicy {
+ unwrapped_type: if is_mut {
+ parse_quote! { *mut #unwrapped_type }
+ } else {
+ parse_quote! { *const #unwrapped_type }
+ },
+ cpp_conversion: CppConversionType::FromReferenceToPointer,
+ rust_conversion: RustConversionType::FromPointerToReferenceWrapper,
}
}
@@ -161,6 +202,8 @@
RustConversionType::FromValueParamToPtr
| RustConversionType::FromRValueParamToPtr
| RustConversionType::FromPlacementParamToNewReturn
+ | RustConversionType::FromPointerToReferenceWrapper { .. }
+ | RustConversionType::FromReferenceWrapperToPointer { .. }
)
}
diff --git a/third_party/autocxx/engine/src/conversion/analysis/fun/mod.rs b/third_party/autocxx/engine/src/conversion/analysis/fun/mod.rs
index 19340f9..7194746 100644
--- a/third_party/autocxx/engine/src/conversion/analysis/fun/mod.rs
+++ b/third_party/autocxx/engine/src/conversion/analysis/fun/mod.rs
@@ -41,7 +41,7 @@
use quote::quote;
use syn::{
parse_quote, punctuated::Punctuated, token::Comma, FnArg, Ident, Pat, ReturnType, Type,
- TypePtr, Visibility,
+ TypePath, TypePtr, TypeReference, Visibility,
};
use crate::{
@@ -183,7 +183,7 @@
pub(crate) is_placement_return_destination: bool,
}
-struct ReturnTypeAnalysis {
+pub(crate) struct ReturnTypeAnalysis {
rt: ReturnType,
conversion: Option<TypeConversionPolicy>,
was_reference: bool,
@@ -270,7 +270,7 @@
}
pub(crate) struct FnAnalyzer<'a> {
- unsafe_policy: UnsafePolicy,
+ unsafe_policy: &'a UnsafePolicy,
extra_apis: ApiVec<NullPhase>,
type_converter: TypeConverter<'a>,
bridge_name_tracker: BridgeNameTracker,
@@ -288,7 +288,7 @@
impl<'a> FnAnalyzer<'a> {
pub(crate) fn analyze_functions(
apis: ApiVec<PodPhase>,
- unsafe_policy: UnsafePolicy,
+ unsafe_policy: &'a UnsafePolicy,
config: &'a IncludeCppConfig,
) -> ApiVec<FnPrePhase2> {
let mut me = Self {
@@ -476,7 +476,9 @@
UnsafetyNeeded::Always => UnsafetyNeeded::JustBridge,
_ => unsafest_param,
},
- _ if self.unsafe_policy == UnsafePolicy::AllFunctionsUnsafe => UnsafetyNeeded::Always,
+ _ if matches!(self.unsafe_policy, UnsafePolicy::AllFunctionsUnsafe) => {
+ UnsafetyNeeded::Always
+ }
_ => match unsafest_non_placement_param {
UnsafetyNeeded::Always => UnsafetyNeeded::Always,
UnsafetyNeeded::JustBridge => match unsafest_param {
@@ -638,6 +640,7 @@
receiver_mutability,
sup,
subclass_fn_deps,
+ self.unsafe_policy,
));
// Create the trait item for the <superclass>_methods and <superclass>_supers
@@ -655,6 +658,7 @@
receiver_mutability,
sup.clone(),
is_pure_virtual,
+ self.unsafe_policy,
));
}
}
@@ -836,7 +840,7 @@
let arg_is_reference = matches!(
param_details
.get(1)
- .map(|param| ¶m.conversion.unwrapped_type),
+ .map(|param| param.conversion.cxxbridge_type()),
Some(Type::Reference(_))
);
// Some exotic forms of copy constructor have const and/or volatile qualifiers.
@@ -1209,6 +1213,11 @@
let ret_type_conversion_needed = ret_type_conversion
.as_ref()
.map_or(false, |x| x.cpp_work_needed());
+ let return_needs_rust_conversion = ret_type_conversion
+ .as_ref()
+ .map(|ra| ra.rust_work_needed())
+ .unwrap_or_default();
+
// See https://github.com/dtolnay/cxx/issues/878 for the reason for this next line.
let effective_cpp_name = cpp_name.as_ref().unwrap_or(&rust_name);
let cpp_name_incompatible_with_cxx =
@@ -1346,9 +1355,10 @@
.any(|pd| pd.conversion.rust_work_needed());
let rust_wrapper_needed = match kind {
+ _ if any_param_needs_rust_conversion || return_needs_rust_conversion => true,
FnKind::TraitMethod { .. } => true,
- FnKind::Method { .. } => any_param_needs_rust_conversion || cxxbridge_name != rust_name,
- _ => any_param_needs_rust_conversion,
+ FnKind::Method { .. } => cxxbridge_name != rust_name,
+ _ => false,
};
// Naming, part two.
@@ -1642,6 +1652,7 @@
}
_ => old_pat,
};
+
let is_placement_return_destination = is_placement_return_destination
|| matches!(
force_rust_conversion,
@@ -1653,6 +1664,8 @@
is_move_constructor,
force_rust_conversion,
sophistication,
+ self_type.is_some(),
+ is_placement_return_destination,
);
let new_ty = annotated_type.ty;
pt.pat = Box::new(new_pat.clone());
@@ -1694,6 +1707,8 @@
is_move_constructor: bool,
force_rust_conversion: Option<RustConversionType>,
sophistication: TypeConversionSophistication,
+ is_self: bool,
+ is_placement_return_destination: bool,
) -> TypeConversionPolicy {
let is_subclass_holder = match &annotated_type.kind {
type_converter::TypeKind::SubclassHolder(holder) => Some(holder),
@@ -1703,6 +1718,9 @@
annotated_type.kind,
type_converter::TypeKind::RValueReference
);
+ let is_reference =
+ matches!(annotated_type.kind, type_converter::TypeKind::Reference) || is_self;
+ let rust_conversion_forced = force_rust_conversion.is_some();
let ty = &*annotated_type.ty;
if let Some(holder_id) = is_subclass_holder {
let subclass = SubclassName::from_holder_name(holder_id);
@@ -1710,91 +1728,127 @@
let ty = parse_quote! {
rust::Box<#holder_id>
};
- TypeConversionPolicy {
- unwrapped_type: ty,
- cpp_conversion: CppConversionType::Move,
- rust_conversion: RustConversionType::ToBoxedUpHolder(subclass),
- }
+ TypeConversionPolicy::new(
+ ty,
+ CppConversionType::Move,
+ RustConversionType::ToBoxedUpHolder(subclass),
+ )
};
} else if matches!(
force_rust_conversion,
Some(RustConversionType::FromPlacementParamToNewReturn)
) && matches!(sophistication, TypeConversionSophistication::Regular)
{
- return TypeConversionPolicy {
- unwrapped_type: ty.clone(),
- cpp_conversion: CppConversionType::IgnoredPlacementPtrParameter,
- rust_conversion: RustConversionType::FromPlacementParamToNewReturn,
- };
+ return TypeConversionPolicy::new(
+ ty.clone(),
+ CppConversionType::IgnoredPlacementPtrParameter,
+ RustConversionType::FromPlacementParamToNewReturn,
+ );
}
match ty {
Type::Path(p) => {
let ty = ty.clone();
let tn = QualifiedName::from_type_path(p);
- if self.pod_safe_types.contains(&tn) {
+ if matches!(
+ self.config.unsafe_policy,
+ UnsafePolicy::ReferencesWrappedAllFunctionsSafe
+ ) && is_reference
+ && !rust_conversion_forced
+ // must be std::pin::Pin<&mut T>
+ {
+ let unwrapped_type = extract_type_from_pinned_mut_ref(p);
+ TypeConversionPolicy::new(
+ parse_quote! { *mut #unwrapped_type },
+ CppConversionType::FromPointerToReference,
+ RustConversionType::FromReferenceWrapperToPointer,
+ )
+ } else if self.pod_safe_types.contains(&tn) {
if known_types().lacks_copy_constructor(&tn) {
- TypeConversionPolicy {
- unwrapped_type: ty,
- cpp_conversion: CppConversionType::Move,
- rust_conversion: RustConversionType::None,
- }
+ TypeConversionPolicy::new(
+ ty,
+ CppConversionType::Move,
+ RustConversionType::None,
+ )
} else {
TypeConversionPolicy::new_unconverted(ty)
}
} else if known_types().convertible_from_strs(&tn)
&& !self.config.exclude_utilities()
{
- TypeConversionPolicy {
- unwrapped_type: ty,
- cpp_conversion: CppConversionType::FromUniquePtrToValue,
- rust_conversion: RustConversionType::FromStr,
- }
+ TypeConversionPolicy::new(
+ ty,
+ CppConversionType::FromUniquePtrToValue,
+ RustConversionType::FromStr,
+ )
} else if matches!(
sophistication,
TypeConversionSophistication::SimpleForSubclasses
) {
- TypeConversionPolicy {
- unwrapped_type: ty,
- cpp_conversion: CppConversionType::FromUniquePtrToValue,
- rust_conversion: RustConversionType::None,
- }
+ TypeConversionPolicy::new(
+ ty,
+ CppConversionType::FromUniquePtrToValue,
+ RustConversionType::None,
+ )
} else {
- TypeConversionPolicy {
- unwrapped_type: ty,
- cpp_conversion: CppConversionType::FromPtrToValue,
- rust_conversion: RustConversionType::FromValueParamToPtr,
- }
+ TypeConversionPolicy::new(
+ ty,
+ CppConversionType::FromPtrToValue,
+ RustConversionType::FromValueParamToPtr,
+ )
}
}
Type::Ptr(tp) => {
let rust_conversion = force_rust_conversion.unwrap_or(RustConversionType::None);
if is_move_constructor {
- TypeConversionPolicy {
- unwrapped_type: ty.clone(),
- cpp_conversion: CppConversionType::FromPtrToMove,
+ TypeConversionPolicy::new(
+ ty.clone(),
+ CppConversionType::FromPtrToMove,
rust_conversion,
- }
+ )
} else if is_rvalue_ref {
- TypeConversionPolicy {
- unwrapped_type: *tp.elem.clone(),
- cpp_conversion: CppConversionType::FromPtrToValue,
- rust_conversion: RustConversionType::FromRValueParamToPtr,
- }
+ TypeConversionPolicy::new(
+ *tp.elem.clone(),
+ CppConversionType::FromPtrToValue,
+ RustConversionType::FromRValueParamToPtr,
+ )
+ } else if matches!(
+ self.config.unsafe_policy,
+ UnsafePolicy::ReferencesWrappedAllFunctionsSafe
+ ) && is_reference
+ && !rust_conversion_forced
+ && !is_placement_return_destination
+ {
+ TypeConversionPolicy::new(
+ ty.clone(),
+ CppConversionType::FromPointerToReference,
+ RustConversionType::FromReferenceWrapperToPointer,
+ )
} else {
- TypeConversionPolicy {
- unwrapped_type: ty.clone(),
- cpp_conversion: CppConversionType::None,
- rust_conversion,
- }
+ TypeConversionPolicy::new(ty.clone(), CppConversionType::None, rust_conversion)
}
}
+ Type::Reference(TypeReference {
+ elem, mutability, ..
+ }) if matches!(
+ self.config.unsafe_policy,
+ UnsafePolicy::ReferencesWrappedAllFunctionsSafe
+ ) && !rust_conversion_forced
+ && !is_placement_return_destination =>
+ {
+ let is_mut = mutability.is_some();
+ TypeConversionPolicy::new(
+ if is_mut {
+ panic!("Never expected to find &mut T at this point, we should be Pin<&mut T> by now")
+ } else {
+ parse_quote! { *const #elem }
+ },
+ CppConversionType::FromPointerToReference,
+ RustConversionType::FromReferenceWrapperToPointer,
+ )
+ }
_ => {
let rust_conversion = force_rust_conversion.unwrap_or(RustConversionType::None);
- TypeConversionPolicy {
- unwrapped_type: ty.clone(),
- cpp_conversion: CppConversionType::None,
- rust_conversion,
- }
+ TypeConversionPolicy::new(ty.clone(), CppConversionType::None, rust_conversion)
}
}
}
@@ -1870,8 +1924,19 @@
}
}
_ => {
- let was_reference = matches!(boxed_type.as_ref(), Type::Reference(_));
- let conversion = Some(TypeConversionPolicy::new_unconverted(ty.clone()));
+ let was_reference = references.ref_return;
+ let conversion = Some(
+ if was_reference
+ && matches!(
+ self.config.unsafe_policy,
+ UnsafePolicy::ReferencesWrappedAllFunctionsSafe
+ )
+ {
+ TypeConversionPolicy::return_reference_into_wrapper(ty.clone())
+ } else {
+ TypeConversionPolicy::new_unconverted(ty.clone())
+ },
+ );
ReturnTypeAnalysis {
rt: ReturnType::Type(*rarrow, boxed_type),
conversion,
@@ -2138,3 +2203,24 @@
}
}
}
+
+fn extract_type_from_pinned_mut_ref(ty: &TypePath) -> Type {
+ match ty
+ .path
+ .segments
+ .last()
+ .expect("was not std::pin::Pin")
+ .arguments
+ {
+ syn::PathArguments::AngleBracketed(ref ab) => {
+ match ab.args.first().expect("did not have angle bracketed args") {
+ syn::GenericArgument::Type(ref ty) => match ty {
+ Type::Reference(ref tyr) => tyr.elem.as_ref().clone(),
+ _ => panic!("pin did not contain a reference"),
+ },
+ _ => panic!("argument was not a type"),
+ }
+ }
+ _ => panic!("did not find angle bracketed args"),
+ }
+}
diff --git a/third_party/autocxx/engine/src/conversion/analysis/fun/subclass.rs b/third_party/autocxx/engine/src/conversion/analysis/fun/subclass.rs
index c017249..6383d2c 100644
--- a/third_party/autocxx/engine/src/conversion/analysis/fun/subclass.rs
+++ b/third_party/autocxx/engine/src/conversion/analysis/fun/subclass.rs
@@ -10,7 +10,7 @@
use syn::{parse_quote, FnArg, PatType, Type, TypePtr};
-use crate::conversion::analysis::fun::{FnKind, MethodKind, ReceiverMutability};
+use crate::conversion::analysis::fun::{FnKind, MethodKind, ReceiverMutability, UnsafePolicy};
use crate::conversion::analysis::pod::PodPhase;
use crate::conversion::api::{
CppVisibility, FuncToConvert, Provenance, RustSubclassFnDetails, SubclassConstructorDetails,
@@ -79,12 +79,18 @@
receiver_mutability: &ReceiverMutability,
receiver: QualifiedName,
is_pure_virtual: bool,
+ unsafe_policy: &UnsafePolicy,
) -> Api<FnPrePhase1> {
let param_names = analysis
.param_details
.iter()
.map(|pd| pd.name.clone())
.collect();
+ let requires_unsafe = if matches!(unsafe_policy, UnsafePolicy::AllFunctionsUnsafe) {
+ UnsafetyNeeded::Always
+ } else {
+ UnsafetyNeeded::from_param_details(&analysis.param_details, false)
+ };
Api::SubclassTraitItem {
name,
details: SuperclassMethod {
@@ -93,7 +99,7 @@
ret_type: analysis.ret_type.clone(),
param_names,
receiver_mutability: receiver_mutability.clone(),
- requires_unsafe: UnsafetyNeeded::from_param_details(&analysis.param_details, false),
+ requires_unsafe,
is_pure_virtual,
receiver,
},
@@ -107,6 +113,7 @@
receiver_mutability: &ReceiverMutability,
superclass: &QualifiedName,
dependencies: Vec<QualifiedName>,
+ unsafe_policy: &UnsafePolicy,
) -> Api<FnPrePhase1> {
let cpp = sub.cpp();
let holder_name = sub.holder();
@@ -131,6 +138,11 @@
.skip(1)
.map(|p| p.conversion.clone())
.collect();
+ let requires_unsafe = if matches!(unsafe_policy, UnsafePolicy::AllFunctionsUnsafe) {
+ UnsafetyNeeded::Always
+ } else {
+ UnsafetyNeeded::from_param_details(&analysis.param_details, false)
+ };
Api::RustSubclassFn {
name: ApiName::new_in_root_namespace(rust_call_name.clone()),
subclass: sub.clone(),
@@ -151,7 +163,7 @@
superclass: superclass.clone(),
receiver_mutability: receiver_mutability.clone(),
dependencies,
- requires_unsafe: UnsafetyNeeded::from_param_details(&analysis.param_details, false),
+ requires_unsafe,
is_pure_virtual: matches!(
analysis.kind,
FnKind::Method {
diff --git a/third_party/autocxx/engine/src/conversion/analysis/pod/mod.rs b/third_party/autocxx/engine/src/conversion/analysis/pod/mod.rs
index 6722c23..eeb5051 100644
--- a/third_party/autocxx/engine/src/conversion/analysis/pod/mod.rs
+++ b/third_party/autocxx/engine/src/conversion/analysis/pod/mod.rs
@@ -18,7 +18,7 @@
use crate::{
conversion::{
analysis::type_converter::{self, add_analysis, TypeConversionContext, TypeConverter},
- api::{AnalysisPhase, Api, ApiName, CppVisibility, NullPhase, StructDetails, TypeKind},
+ api::{AnalysisPhase, Api, ApiName, NullPhase, StructDetails, TypeKind},
apivec::ApiVec,
convert_error::{ConvertErrorWithContext, ErrorContext},
error_reporter::convert_apis,
@@ -134,12 +134,6 @@
config: &IncludeCppConfig,
) -> Result<Box<dyn Iterator<Item = Api<PodPhase>>>, ConvertErrorWithContext> {
let id = name.name.get_final_ident();
- if details.vis != CppVisibility::Public {
- return Err(ConvertErrorWithContext(
- ConvertError::NonPublicNestedType,
- Some(ErrorContext::new_for_item(id)),
- ));
- }
let metadata = BindgenSemanticAttributes::new_retaining_others(&mut details.item.attrs);
metadata.check_for_fatal_attrs(&id)?;
let bases = get_bases(&details.item);
@@ -208,9 +202,14 @@
extra_apis: &mut ApiVec<NullPhase>,
) -> Vec<ConvertError> {
let mut convert_errors = Vec::new();
+ let struct_type_params = s
+ .generics
+ .type_params()
+ .map(|tp| tp.ident.clone())
+ .collect();
+ let type_conversion_context = TypeConversionContext::WithinStructField { struct_type_params };
for f in &s.fields {
- let annotated =
- type_converter.convert_type(f.ty.clone(), ns, &TypeConversionContext::WithinReference);
+ let annotated = type_converter.convert_type(f.ty.clone(), ns, &type_conversion_context);
match annotated {
Ok(mut r) => {
extra_apis.append(&mut r.extra_apis);
diff --git a/third_party/autocxx/engine/src/conversion/analysis/type_converter.rs b/third_party/autocxx/engine/src/conversion/analysis/type_converter.rs
index afdda8a..7e2d2bd 100644
--- a/third_party/autocxx/engine/src/conversion/analysis/type_converter.rs
+++ b/third_party/autocxx/engine/src/conversion/analysis/type_converter.rs
@@ -91,6 +91,7 @@
/// from [TypeConverter] _might_ be used in the [cxx::bridge].
pub(crate) enum TypeConversionContext {
WithinReference,
+ WithinStructField { struct_type_params: HashSet<Ident> },
WithinContainer,
OuterType { pointer_treatment: PointerTreatment },
}
@@ -98,13 +99,25 @@
impl TypeConversionContext {
fn pointer_treatment(&self) -> PointerTreatment {
match self {
- Self::WithinReference | Self::WithinContainer => PointerTreatment::Pointer,
+ Self::WithinReference | Self::WithinContainer | Self::WithinStructField { .. } => {
+ PointerTreatment::Pointer
+ }
Self::OuterType { pointer_treatment } => *pointer_treatment,
}
}
fn allow_instantiation_of_forward_declaration(&self) -> bool {
matches!(self, Self::WithinReference)
}
+ fn allowed_generic_type(&self, ident: &Ident) -> bool {
+ match self {
+ Self::WithinStructField { struct_type_params }
+ if struct_type_params.contains(ident) =>
+ {
+ false
+ }
+ _ => true,
+ }
+ }
}
/// A type which can convert from a type encountered in `bindgen`
@@ -156,7 +169,7 @@
) -> Result<Annotated<Type>, ConvertError> {
let result = match ty {
Type::Path(p) => {
- let newp = self.convert_type_path(p, ns)?;
+ let newp = self.convert_type_path(p, ns, ctx)?;
if let Type::Path(newpp) = &newp.ty {
let qn = QualifiedName::from_type_path(newpp);
if !ctx.allow_instantiation_of_forward_declaration()
@@ -216,6 +229,7 @@
&mut self,
mut typ: TypePath,
ns: &Namespace,
+ ctx: &TypeConversionContext,
) -> Result<Annotated<Type>, ConvertError> {
// First, qualify any unqualified paths.
if typ.path.segments.iter().next().unwrap().ident != "root" {
@@ -323,7 +337,24 @@
// Oh poop. It's a generic type which cxx won't be able to handle.
// We'll have to come up with a concrete type in both the cxx::bridge (in Rust)
// and a corresponding typedef in C++.
- // Let's first see if this is a concrete version of a templated type
+ // First let's see if this actually depends on a generic type
+ // param of the surrounding struct.
+ for seg in &typ.path.segments {
+ if let PathArguments::AngleBracketed(args) = &seg.arguments {
+ for arg in args.args.iter() {
+ if let GenericArgument::Type(Type::Path(typ)) = arg {
+ if let Some(seg) = typ.path.segments.last() {
+ if typ.path.segments.len() == 1
+ && !ctx.allowed_generic_type(&seg.ident)
+ {
+ return Err(ConvertError::ReferringToGenericTypeParam);
+ }
+ }
+ }
+ }
+ }
+ }
+ // Let's second see if this is a concrete version of a templated type
// which we already rejected. Some, but possibly not all, of the reasons
// for its rejection would also apply to any concrete types we
// make. Err on the side of caution. In future we may be able to relax
@@ -393,6 +424,14 @@
if encountered.contains(&new_tn) {
return Err(ConvertError::InfinitelyRecursiveTypedef(tn.clone()));
}
+ if typ
+ .path
+ .segments
+ .iter()
+ .any(|seg| seg.ident.to_string().starts_with("_bindgen_mod"))
+ {
+ return Err(ConvertError::TypedefToTypeInAnonymousNamespace);
+ }
encountered.insert(new_tn.clone());
tn = new_tn;
}
diff --git a/third_party/autocxx/engine/src/conversion/api.rs b/third_party/autocxx/engine/src/conversion/api.rs
index 3d04674..c5a1b60 100644
--- a/third_party/autocxx/engine/src/conversion/api.rs
+++ b/third_party/autocxx/engine/src/conversion/api.rs
@@ -54,7 +54,6 @@
/// Details about a C++ struct.
pub(crate) struct StructDetails {
- pub(crate) vis: CppVisibility,
pub(crate) item: ItemStruct,
pub(crate) layout: Option<Layout>,
pub(crate) has_rvalue_reference_fields: bool,
@@ -713,3 +712,10 @@
Ok(Box::new(std::iter::once(Api::Enum { name, item })))
}
}
+
+/// Whether a type is a pointer of some kind.
+pub(crate) enum Pointerness {
+ Not,
+ ConstPtr,
+ MutPtr,
+}
diff --git a/third_party/autocxx/engine/src/conversion/codegen_cpp/function_wrapper_cpp.rs b/third_party/autocxx/engine/src/conversion/codegen_cpp/function_wrapper_cpp.rs
index b2e1ea3..5367626 100644
--- a/third_party/autocxx/engine/src/conversion/codegen_cpp/function_wrapper_cpp.rs
+++ b/third_party/autocxx/engine/src/conversion/codegen_cpp/function_wrapper_cpp.rs
@@ -6,8 +6,11 @@
// option. This file may not be copied, modified, or distributed
// except according to those terms.
+use syn::{Type, TypePtr};
+
use crate::conversion::{
analysis::fun::function_wrapper::{CppConversionType, TypeConversionPolicy},
+ api::Pointerness,
ConvertError,
};
@@ -30,12 +33,39 @@
pub(super) fn converted_type(&self, cpp_name_map: &CppNameMap) -> Result<String, ConvertError> {
match self.cpp_conversion {
CppConversionType::FromValueToUniquePtr => self.unique_ptr_wrapped_type(cpp_name_map),
+ CppConversionType::FromReferenceToPointer => {
+ let (const_string, ty) = match self.cxxbridge_type() {
+ Type::Ptr(TypePtr {
+ mutability: Some(_),
+ elem,
+ ..
+ }) => ("", elem.as_ref()),
+ Type::Ptr(TypePtr { elem, .. }) => ("const ", elem.as_ref()),
+ _ => panic!("Not a pointer"),
+ };
+ Ok(format!(
+ "{}{}*",
+ const_string,
+ type_to_cpp(ty, cpp_name_map)?
+ ))
+ }
_ => self.unwrapped_type_as_string(cpp_name_map),
}
}
fn unwrapped_type_as_string(&self, cpp_name_map: &CppNameMap) -> Result<String, ConvertError> {
- type_to_cpp(&self.unwrapped_type, cpp_name_map)
+ type_to_cpp(self.cxxbridge_type(), cpp_name_map)
+ }
+
+ pub(crate) fn is_a_pointer(&self) -> Pointerness {
+ match self.cxxbridge_type() {
+ Type::Ptr(TypePtr {
+ mutability: Some(_),
+ ..
+ }) => Pointerness::MutPtr,
+ Type::Ptr(_) => Pointerness::ConstPtr,
+ _ => Pointerness::Not,
+ }
}
fn unique_ptr_wrapped_type(
@@ -60,6 +90,7 @@
CppConversionType::None | CppConversionType::FromReturnValueToPlacementPtr => {
Some(var_name.to_string())
}
+ CppConversionType::FromPointerToReference { .. } => Some(format!("(*{})", var_name)),
CppConversionType::Move => Some(format!("std::move({})", var_name)),
CppConversionType::FromUniquePtrToValue | CppConversionType::FromPtrToMove => {
Some(format!("std::move(*{})", var_name))
@@ -78,6 +109,7 @@
})
}
CppConversionType::IgnoredPlacementPtrParameter => None,
+ CppConversionType::FromReferenceToPointer { .. } => Some(format!("&{}", var_name)),
})
}
}
diff --git a/third_party/autocxx/engine/src/conversion/codegen_cpp/mod.rs b/third_party/autocxx/engine/src/conversion/codegen_cpp/mod.rs
index 02e92b2..0e6dcce 100644
--- a/third_party/autocxx/engine/src/conversion/codegen_cpp/mod.rs
+++ b/third_party/autocxx/engine/src/conversion/codegen_cpp/mod.rs
@@ -559,7 +559,7 @@
underlying_function_call = match placement_param {
Some(placement_param) => {
- let tyname = type_to_cpp(&ret.unwrapped_type, &self.original_name_map)?;
+ let tyname = type_to_cpp(ret.cxxbridge_type(), &self.original_name_map)?;
format!("new({}) {}({})", placement_param, tyname, call_itself)
}
None => format!("return {}", call_itself),
@@ -684,6 +684,13 @@
"{}& As_{}_mut() {{ return *this; }}",
super_name, super_name
));
+ self.additional_functions.push(ExtraCpp {
+ declaration: Some(format!(
+ "inline std::unique_ptr<{}> {}_As_{}_UniquePtr(std::unique_ptr<{}> u) {{ return std::unique_ptr<{}>(u.release()); }}",
+ superclass.to_cpp_name(), subclass.cpp(), super_name, subclass.cpp(), superclass.to_cpp_name(),
+ )),
+ ..Default::default()
+ });
// And now constructors
let mut constructor_decls: Vec<String> = Vec::new();
for constructor in constructors {
@@ -700,7 +707,7 @@
}
self.additional_functions.push(ExtraCpp {
type_definition: Some(format!(
- "class {} : {}\n{{\npublic:\n{}\n{}\nvoid {}() const;\nprivate:rust::Box<{}> obs;\nvoid really_remove_ownership();\n\n}};",
+ "class {} : public {}\n{{\npublic:\n{}\n{}\nvoid {}() const;\nprivate:rust::Box<{}> obs;\nvoid really_remove_ownership();\n\n}};",
subclass.cpp(),
superclass.to_cpp_name(),
constructor_decls.join("\n"),
diff --git a/third_party/autocxx/engine/src/conversion/codegen_rs/fun_codegen.rs b/third_party/autocxx/engine/src/conversion/codegen_rs/fun_codegen.rs
index 7c32b1b..db222a6 100644
--- a/third_party/autocxx/engine/src/conversion/codegen_rs/fun_codegen.rs
+++ b/third_party/autocxx/engine/src/conversion/codegen_rs/fun_codegen.rs
@@ -6,6 +6,7 @@
// option. This file may not be copied, modified, or distributed
// except according to those terms.
+use autocxx_parser::IncludeCppConfig;
use indexmap::set::IndexSet as HashSet;
use std::borrow::Cow;
@@ -23,15 +24,15 @@
function_wrapper_rs::RustParamConversion,
maybe_unsafes_to_tokens,
unqualify::{unqualify_params, unqualify_ret_type},
- ImplBlockDetails, MaybeUnsafeStmt, RsCodegenResult, TraitImplBlockDetails, Use,
+ ImplBlockDetails, ImplBlockKey, MaybeUnsafeStmt, RsCodegenResult, TraitImplBlockDetails, Use,
};
use crate::{
conversion::{
analysis::fun::{
- ArgumentAnalysis, FnAnalysis, FnKind, MethodKind, RustRenameStrategy,
- TraitMethodDetails,
+ function_wrapper::TypeConversionPolicy, ArgumentAnalysis, FnAnalysis, FnKind,
+ MethodKind, RustRenameStrategy, TraitMethodDetails,
},
- api::UnsafetyNeeded,
+ api::{Pointerness, UnsafetyNeeded},
},
types::{Namespace, QualifiedName},
};
@@ -89,6 +90,7 @@
analysis: FnAnalysis,
cpp_call_name: String,
non_pod_types: &HashSet<QualifiedName>,
+ config: &IncludeCppConfig,
) -> RsCodegenResult {
if analysis.ignore_reason.is_err() || !analysis.externally_callable {
return RsCodegenResult::default();
@@ -96,6 +98,7 @@
let cxxbridge_name = analysis.cxxbridge_name;
let rust_name = &analysis.rust_name;
let ret_type = analysis.ret_type;
+ let ret_conversion = analysis.ret_conversion;
let param_details = analysis.param_details;
let wrapper_function_needed = analysis.cpp_wrapper.is_some();
let params = analysis.params;
@@ -119,6 +122,9 @@
always_unsafe_due_to_trait_definition,
doc_attrs: &doc_attrs,
non_pod_types,
+ ret_type: &ret_type,
+ ret_conversion: &ret_conversion,
+ reference_wrappers: config.unsafe_policy.requires_cpprefs(),
};
// In rare occasions, we might need to give an explicit lifetime.
let (lifetime_tokens, params, ret_type) = add_explicit_lifetime_if_necessary(
@@ -148,15 +154,14 @@
impl_entry = Some(fn_generator.generate_method_impl(
matches!(method_kind, MethodKind::Constructor { .. }),
impl_for,
- &ret_type,
));
}
FnKind::TraitMethod { ref details, .. } => {
- trait_impl_entry = Some(fn_generator.generate_trait_impl(details, &ret_type));
+ trait_impl_entry = Some(fn_generator.generate_trait_impl(details));
}
_ => {
// Generate plain old function
- bindgen_mod_items.push(fn_generator.generate_function_impl(&ret_type));
+ bindgen_mod_items.push(fn_generator.generate_function_impl());
}
}
}
@@ -225,20 +230,23 @@
#[derive(Clone)]
struct FnGenerator<'a> {
param_details: &'a [ArgumentAnalysis],
+ ret_conversion: &'a Option<TypeConversionPolicy>,
+ ret_type: &'a ReturnType,
cxxbridge_name: &'a Ident,
rust_name: &'a str,
unsafety: &'a UnsafetyNeeded,
always_unsafe_due_to_trait_definition: bool,
doc_attrs: &'a Vec<Attribute>,
non_pod_types: &'a HashSet<QualifiedName>,
+ reference_wrappers: bool,
}
impl<'a> FnGenerator<'a> {
fn common_parts<'b>(
- &self,
+ &'b self,
avoid_self: bool,
parameter_reordering: &Option<Vec<usize>>,
- ret_type: &'b ReturnType,
+ ret_type: Option<ReturnType>,
) -> (
Option<TokenStream>,
Punctuated<FnArg, Comma>,
@@ -249,15 +257,20 @@
let mut local_variables = Vec::new();
let mut arg_list = Vec::new();
let mut ptr_arg_name = None;
- let mut ret_type = Cow::Borrowed(ret_type);
+ let mut ret_type: Cow<'a, _> = ret_type
+ .map(Cow::Owned)
+ .unwrap_or(Cow::Borrowed(self.ret_type));
let mut any_conversion_requires_unsafe = false;
+ let mut variable_counter = 0usize;
for pd in self.param_details {
let wrapper_arg_name = if pd.self_type.is_some() && !avoid_self {
parse_quote!(self)
} else {
pd.name.clone()
};
- let rust_for_param = pd.conversion.rust_conversion(wrapper_arg_name.clone());
+ let rust_for_param = pd
+ .conversion
+ .rust_conversion(parse_quote! { #wrapper_arg_name }, &mut variable_counter);
match rust_for_param {
RustParamConversion::Param {
ty,
@@ -305,6 +318,39 @@
},
any_conversion_requires_unsafe || matches!(self.unsafety, UnsafetyNeeded::JustBridge),
);
+ let context_is_unsafe = matches!(self.unsafety, UnsafetyNeeded::Always)
+ || self.always_unsafe_due_to_trait_definition;
+ let (call_body, ret_type) = match self.ret_conversion {
+ Some(ret_conversion) if ret_conversion.rust_work_needed() => {
+ let expr = maybe_unsafes_to_tokens(vec![call_body], context_is_unsafe);
+ let conv =
+ ret_conversion.rust_conversion(parse_quote! { #expr }, &mut variable_counter);
+ let (conversion, requires_unsafe, ty) = match conv {
+ RustParamConversion::Param {
+ local_variables, ..
+ } if !local_variables.is_empty() => panic!("return type required variables"),
+ RustParamConversion::Param {
+ conversion,
+ conversion_requires_unsafe,
+ ty,
+ ..
+ } => (conversion, conversion_requires_unsafe, ty),
+ _ => panic!(
+ "Unexpected - return type is supposed to be converted to a return type"
+ ),
+ };
+ (
+ if requires_unsafe {
+ MaybeUnsafeStmt::NeedsUnsafe(conversion)
+ } else {
+ MaybeUnsafeStmt::Normal(conversion)
+ },
+ Cow::Owned(parse_quote! { -> #ty }),
+ )
+ }
+ _ => (call_body, ret_type),
+ };
+
let call_stmts = if let Some(ptr_arg_name) = ptr_arg_name {
let mut closure_stmts = local_variables;
closure_stmts.push(MaybeUnsafeStmt::binary(
@@ -323,8 +369,6 @@
call_stmts.push(call_body);
call_stmts
};
- let context_is_unsafe = matches!(self.unsafety, UnsafetyNeeded::Always)
- || self.always_unsafe_due_to_trait_definition;
let call_body = maybe_unsafes_to_tokens(call_stmts, context_is_unsafe);
(lifetime_tokens, wrapper_params, ret_type, call_body)
}
@@ -334,13 +378,44 @@
&self,
avoid_self: bool,
impl_block_type_name: &QualifiedName,
- ret_type: &ReturnType,
) -> Box<ImplBlockDetails> {
let (lifetime_tokens, wrapper_params, ret_type, call_body) =
- self.common_parts(avoid_self, &None, ret_type);
+ self.common_parts(avoid_self, &None, None);
let rust_name = make_ident(self.rust_name);
let unsafety = self.unsafety.wrapper_token();
let doc_attrs = self.doc_attrs;
+ let receiver_pointerness = self
+ .param_details
+ .iter()
+ .next()
+ .map(|pd| pd.conversion.is_a_pointer())
+ .unwrap_or(Pointerness::Not);
+ let ty = impl_block_type_name.get_final_ident();
+ let ty = if self.reference_wrappers {
+ match receiver_pointerness {
+ Pointerness::MutPtr => ImplBlockKey {
+ ty: parse_quote! {
+ CppMutRef< 'a, #ty>
+ },
+ lifetime: Some(parse_quote! { 'a }),
+ },
+ Pointerness::ConstPtr => ImplBlockKey {
+ ty: parse_quote! {
+ CppRef< 'a, #ty>
+ },
+ lifetime: Some(parse_quote! { 'a }),
+ },
+ Pointerness::Not => ImplBlockKey {
+ ty: parse_quote! { # ty },
+ lifetime: None,
+ },
+ }
+ } else {
+ ImplBlockKey {
+ ty: parse_quote! { # ty },
+ lifetime: None,
+ }
+ };
Box::new(ImplBlockDetails {
item: ImplItem::Method(parse_quote! {
#(#doc_attrs)*
@@ -348,18 +423,14 @@
#call_body
}
}),
- ty: impl_block_type_name.get_final_ident(),
+ ty,
})
}
/// Generate an 'impl Trait for Type { methods-go-here }' in its entrety.
- fn generate_trait_impl(
- &self,
- details: &TraitMethodDetails,
- ret_type: &ReturnType,
- ) -> Box<TraitImplBlockDetails> {
+ fn generate_trait_impl(&self, details: &TraitMethodDetails) -> Box<TraitImplBlockDetails> {
let (lifetime_tokens, wrapper_params, ret_type, call_body) =
- self.common_parts(details.avoid_self, &details.parameter_reordering, ret_type);
+ self.common_parts(details.avoid_self, &details.parameter_reordering, None);
let doc_attrs = self.doc_attrs;
let unsafety = self.unsafety.wrapper_token();
let key = details.trt.clone();
@@ -381,25 +452,28 @@
) -> Box<ImplBlockDetails> {
let ret_type: ReturnType = parse_quote! { -> impl autocxx::moveit::new::New<Output=Self> };
let (lifetime_tokens, wrapper_params, ret_type, call_body) =
- self.common_parts(true, &None, &ret_type);
+ self.common_parts(true, &None, Some(ret_type));
let rust_name = make_ident(&self.rust_name);
let doc_attrs = self.doc_attrs;
let unsafety = self.unsafety.wrapper_token();
- Box::new(ImplBlockDetails {
- item: ImplItem::Method(parse_quote! {
+ let ty = impl_block_type_name.get_final_ident();
+ let ty = parse_quote! { #ty };
+ let stuff = quote! {
#(#doc_attrs)*
pub #unsafety fn #rust_name #lifetime_tokens ( #wrapper_params ) #ret_type {
#call_body
}
- }),
- ty: impl_block_type_name.get_final_ident(),
+ };
+ Box::new(ImplBlockDetails {
+ item: ImplItem::Method(parse_quote! { #stuff }),
+ ty: ImplBlockKey { ty, lifetime: None },
})
}
/// Generate a function call wrapper
- fn generate_function_impl(&self, ret_type: &ReturnType) -> Item {
+ fn generate_function_impl(&self) -> Item {
let (lifetime_tokens, wrapper_params, ret_type, call_body) =
- self.common_parts(false, &None, ret_type);
+ self.common_parts(false, &None, None);
let rust_name = make_ident(self.rust_name);
let doc_attrs = self.doc_attrs;
let unsafety = self.unsafety.wrapper_token();
diff --git a/third_party/autocxx/engine/src/conversion/codegen_rs/function_wrapper_rs.rs b/third_party/autocxx/engine/src/conversion/codegen_rs/function_wrapper_rs.rs
index a3fc71f..708d41c 100644
--- a/third_party/autocxx/engine/src/conversion/codegen_rs/function_wrapper_rs.rs
+++ b/third_party/autocxx/engine/src/conversion/codegen_rs/function_wrapper_rs.rs
@@ -7,7 +7,7 @@
// except according to those terms.
use proc_macro2::TokenStream;
-use syn::{Pat, Type, TypePtr};
+use syn::{Expr, Type, TypePtr};
use crate::{
conversion::analysis::fun::function_wrapper::{RustConversionType, TypeConversionPolicy},
@@ -32,8 +32,7 @@
}
impl TypeConversionPolicy {
- /// If returns `None` then this parameter should be omitted entirely.
- pub(super) fn rust_conversion(&self, var: Pat) -> RustParamConversion {
+ pub(super) fn rust_conversion(&self, var: Expr, counter: &mut usize) -> RustParamConversion {
match self.rust_conversion {
RustConversionType::None => RustParamConversion::Param {
ty: self.converted_rust_type(),
@@ -63,7 +62,7 @@
}
}
RustConversionType::FromPinMaybeUninitToPtr => {
- let ty = match &self.unwrapped_type {
+ let ty = match self.cxxbridge_type() {
Type::Ptr(TypePtr { elem, .. }) => &*elem,
_ => panic!("Not a ptr"),
};
@@ -80,7 +79,7 @@
}
}
RustConversionType::FromPinMoveRefToPtr => {
- let ty = match &self.unwrapped_type {
+ let ty = match self.cxxbridge_type() {
Type::Ptr(TypePtr { elem, .. }) => &*elem,
_ => panic!("Not a ptr"),
};
@@ -99,7 +98,7 @@
}
}
RustConversionType::FromTypeToPtr => {
- let ty = match &self.unwrapped_type {
+ let ty = match self.cxxbridge_type() {
Type::Ptr(TypePtr { elem, .. }) => &*elem,
_ => panic!("Not a ptr"),
};
@@ -123,13 +122,11 @@
};
let handler_type = make_ident(handler_type);
let param_trait = make_ident(param_trait);
- let var_name = if let Pat::Ident(pti) = &var {
- &pti.ident
- } else {
- panic!("Unexpected non-ident parameter name");
- };
- let space_var_name = make_ident(format!("{}_space", var_name));
- let ty = &self.unwrapped_type;
+ let var_counter = *counter;
+ *counter += 1;
+ let space_var_name = format!("space{}", var_counter);
+ let space_var_name = make_ident(space_var_name);
+ let ty = self.cxxbridge_type();
let ty = parse_quote! { impl autocxx::#param_trait<#ty> };
// This is the usual trick to put something on the stack, then
// immediately shadow the variable name so it can't be accessed or moved.
@@ -148,7 +145,7 @@
},
),
MaybeUnsafeStmt::needs_unsafe(
- quote! { #space_var_name.as_mut().populate(#var_name); },
+ quote! { #space_var_name.as_mut().populate(#var); },
),
],
conversion: quote! {
@@ -161,12 +158,55 @@
// but not in the arguments for the wrapper function, because instead we return an
// impl New which uses the cxx::bridge function's pointer parameter.
RustConversionType::FromPlacementParamToNewReturn => {
- let ty = match &self.unwrapped_type {
+ let ty = match self.cxxbridge_type() {
Type::Ptr(TypePtr { elem, .. }) => *(*elem).clone(),
_ => panic!("Not a ptr"),
};
RustParamConversion::ReturnValue { ty }
}
+ RustConversionType::FromPointerToReferenceWrapper => {
+ let (is_mut, ty) = match self.cxxbridge_type() {
+ Type::Ptr(TypePtr {
+ mutability, elem, ..
+ }) => (mutability.is_some(), elem.as_ref()),
+ _ => panic!("Not a pointer"),
+ };
+ let (ty, wrapper_name) = if is_mut {
+ (parse_quote! { CppMutRef<'a, #ty> }, "CppMutRef")
+ } else {
+ (parse_quote! { CppRef<'a, #ty> }, "CppRef")
+ };
+ let wrapper_name = make_ident(wrapper_name);
+ RustParamConversion::Param {
+ ty,
+ local_variables: Vec::new(),
+ conversion: quote! {
+ #wrapper_name (#var, std::marker::PhantomData)
+ },
+ conversion_requires_unsafe: false,
+ }
+ }
+ RustConversionType::FromReferenceWrapperToPointer => {
+ let (is_mut, ty) = match self.cxxbridge_type() {
+ Type::Ptr(TypePtr {
+ mutability, elem, ..
+ }) => (mutability.is_some(), elem.as_ref()),
+ _ => panic!("Not a pointer"),
+ };
+ let ty = if is_mut {
+ parse_quote! { &mut CppMutRef<'a, #ty> }
+ } else {
+ parse_quote! { &CppRef<'a, #ty> }
+ };
+ RustParamConversion::Param {
+ ty,
+ local_variables: Vec::new(),
+ conversion: quote! {
+ #var .0
+ },
+ conversion_requires_unsafe: false,
+ }
+ }
}
}
}
diff --git a/third_party/autocxx/engine/src/conversion/codegen_rs/mod.rs b/third_party/autocxx/engine/src/conversion/codegen_rs/mod.rs
index d488d52..5dd4cbb 100644
--- a/third_party/autocxx/engine/src/conversion/codegen_rs/mod.rs
+++ b/third_party/autocxx/engine/src/conversion/codegen_rs/mod.rs
@@ -17,13 +17,14 @@
use indexmap::map::IndexMap as HashMap;
use indexmap::set::IndexSet as HashSet;
-use autocxx_parser::{ExternCppType, IncludeCppConfig, RustFun};
+use autocxx_parser::{ExternCppType, IncludeCppConfig, RustFun, UnsafePolicy};
use itertools::Itertools;
use proc_macro2::{Span, TokenStream};
use syn::{
parse_quote, punctuated::Punctuated, token::Comma, Attribute, Expr, FnArg, ForeignItem,
- ForeignItemFn, Ident, ImplItem, Item, ItemForeignMod, ItemMod, TraitItem, TypePath,
+ ForeignItemFn, Ident, ImplItem, Item, ItemForeignMod, ItemMod, Lifetime, TraitItem, Type,
+ TypePath,
};
use crate::{
@@ -61,10 +62,16 @@
use super::{convert_error::ErrorContext, ConvertError};
use quote::quote;
+#[derive(Clone, Hash, PartialEq, Eq)]
+struct ImplBlockKey {
+ ty: Type,
+ lifetime: Option<Lifetime>,
+}
+
/// An entry which needs to go into an `impl` block for a given type.
struct ImplBlockDetails {
item: ImplItem,
- ty: Ident,
+ ty: ImplBlockKey,
}
struct TraitImplBlockDetails {
@@ -130,10 +137,96 @@
.to_vec()
}
+fn get_cppref_items() -> Vec<Item> {
+ [
+ Item::Struct(parse_quote! {
+ #[repr(transparent)]
+ pub struct CppRef<'a, T>(pub *const T, pub ::std::marker::PhantomData<&'a T>);
+ }),
+ Item::Impl(parse_quote! {
+ impl<'a, T> autocxx::CppRef<'a, T> for CppRef<'a, T> {
+ fn as_ptr(&self) -> *const T {
+ self.0
+ }
+ }
+ }),
+ Item::Struct(parse_quote! {
+ #[repr(transparent)]
+ pub struct CppMutRef<'a, T>(pub *mut T, pub ::std::marker::PhantomData<&'a T>);
+ }),
+ Item::Impl(parse_quote! {
+ impl<'a, T> autocxx::CppRef<'a, T> for CppMutRef<'a, T> {
+ fn as_ptr(&self) -> *const T {
+ self.0
+ }
+ }
+ }),
+ Item::Impl(parse_quote! {
+ impl<'a, T> autocxx::CppMutRef<'a, T> for CppMutRef<'a, T> {
+ fn as_mut_ptr(&self) -> *mut T {
+ self.0
+ }
+ }
+ }),
+ Item::Impl(parse_quote! {
+ impl<'a, T: ::cxx::private::UniquePtrTarget> CppMutRef<'a, T> {
+ /// Create a const C++ reference from this mutable C++ reference.
+ pub fn as_cpp_ref(&self) -> CppRef<'a, T> {
+ use autocxx::CppRef;
+ CppRef(self.as_ptr(), ::std::marker::PhantomData)
+ }
+ }
+ }),
+ Item::Struct(parse_quote! {
+ /// "Pins" a `UniquePtr` to an object, so that C++-compatible references can be created.
+ /// See [`::autocxx::CppPin`]
+ #[repr(transparent)]
+ pub struct CppUniquePtrPin<T: ::cxx::private::UniquePtrTarget>(::cxx::UniquePtr<T>);
+ }),
+ Item::Impl(parse_quote! {
+ impl<'a, T: 'a + ::cxx::private::UniquePtrTarget> autocxx::CppPin<'a, T> for CppUniquePtrPin<T>
+ {
+ type CppRef = CppRef<'a, T>;
+ type CppMutRef = CppMutRef<'a, T>;
+ fn as_ptr(&self) -> *const T {
+ // TODO add as_ptr to cxx to avoid the ephemeral reference
+ self.0.as_ref().unwrap() as *const T
+ }
+ fn as_mut_ptr(&mut self) -> *mut T {
+ unsafe { ::std::pin::Pin::into_inner_unchecked(self.0.as_mut().unwrap()) as *mut T }
+ }
+ fn as_cpp_ref(&self) -> Self::CppRef {
+ CppRef(self.as_ptr(), ::std::marker::PhantomData)
+ }
+ fn as_cpp_mut_ref(&mut self) -> Self::CppMutRef {
+ CppMutRef(self.as_mut_ptr(), ::std::marker::PhantomData)
+ }
+ }
+ }),
+ Item::Impl(parse_quote! {
+ impl<T: ::cxx::private::UniquePtrTarget> CppUniquePtrPin<T> {
+ pub fn new(item: ::cxx::UniquePtr<T>) -> Self {
+ Self(item)
+ }
+ }
+ }),
+ Item::Fn(parse_quote! {
+ /// Pin this item so that we can create C++ references to it.
+ /// This makes it impossible to hold Rust references because Rust
+ /// references are fundamentally incompatible with C++ references.
+ pub fn cpp_pin_uniqueptr<T: ::cxx::private::UniquePtrTarget> (item: ::cxx::UniquePtr<T>) -> CppUniquePtrPin<T> {
+ CppUniquePtrPin::new(item)
+ }
+ })
+ ]
+ .to_vec()
+}
+
/// Type which handles generation of Rust code.
/// In practice, much of the "generation" involves connecting together
/// existing lumps of code within the Api structures.
pub(crate) struct RsCodeGenerator<'a> {
+ unsafe_policy: &'a UnsafePolicy,
include_list: &'a [String],
bindgen_mod: ItemMod,
original_name_map: CppNameMap,
@@ -145,12 +238,14 @@
/// Generate code for a set of APIs that was discovered during parsing.
pub(crate) fn generate_rs_code(
all_apis: ApiVec<FnPhase>,
+ unsafe_policy: &'a UnsafePolicy,
include_list: &'a [String],
bindgen_mod: ItemMod,
config: &'a IncludeCppConfig,
header_name: Option<String>,
) -> Vec<Item> {
let c = Self {
+ unsafe_policy,
include_list,
bindgen_mod,
original_name_map: original_name_map_from_apis(&all_apis),
@@ -219,6 +314,9 @@
let mut extern_rust_mod_items = extern_rust_mod_items.into_iter().flatten().collect();
// And a list of global items to include at the top level.
let mut all_items: Vec<Item> = all_items.into_iter().flatten().collect();
+ if self.config.unsafe_policy.requires_cpprefs() {
+ all_items.append(&mut get_cppref_items())
+ }
// And finally any C++ we need to generate. And by "we" I mean autocxx not cxx.
let has_additional_cpp_needs = additional_cpp_needs.into_iter().any(std::convert::identity);
extern_c_mod_items.extend(self.build_include_foreign_items(has_additional_cpp_needs));
@@ -357,23 +455,24 @@
}
fn append_uses_for_ns(&mut self, items: &mut Vec<Item>, ns: &Namespace) {
+ let mut imports_from_super = vec!["cxxbridge"];
+ if !self.config.exclude_utilities() {
+ imports_from_super.push("ToCppString");
+ }
+ if self.config.unsafe_policy.requires_cpprefs() {
+ imports_from_super.extend(["CppRef", "CppMutRef"]);
+ }
+ let imports_from_super = imports_from_super.into_iter().map(make_ident);
let super_duper = std::iter::repeat(make_ident("super")); // I'll get my coat
let supers = super_duper.clone().take(ns.depth() + 2);
items.push(Item::Use(parse_quote! {
#[allow(unused_imports)]
use self::
#(#supers)::*
- ::cxxbridge;
+ ::{
+ #(#imports_from_super),*
+ };
}));
- if !self.config.exclude_utilities() {
- let supers = super_duper.clone().take(ns.depth() + 2);
- items.push(Item::Use(parse_quote! {
- #[allow(unused_imports)]
- use self::
- #(#supers)::*
- ::ToCppString;
- }));
- }
let supers = super_duper.take(ns.depth() + 1);
items.push(Item::Use(parse_quote! {
#[allow(unused_imports)]
@@ -407,8 +506,10 @@
}
}
for (ty, entries) in impl_entries_by_type.into_iter() {
+ let lt = ty.lifetime.map(|lt| quote! { < #lt > });
+ let ty = ty.ty;
output_items.push(Item::Impl(parse_quote! {
- impl #ty {
+ impl #lt #ty {
#(#entries)*
}
}))
@@ -487,6 +588,7 @@
analysis,
cpp_call_name,
non_pod_types,
+ self.config,
),
Api::Const { const_item, .. } => RsCodegenResult {
bindgen_mod_items: vec![Item::Const(const_item)],
@@ -609,8 +711,11 @@
name, superclass, ..
} => {
let methods = associated_methods.get(&superclass);
- let generate_peer_constructor =
- subclasses_with_a_single_trivial_constructor.contains(&name.0.name);
+ let generate_peer_constructor = subclasses_with_a_single_trivial_constructor.contains(&name.0.name) &&
+ // TODO: Create an UnsafeCppPeerConstructor trait for calling an unsafe
+ // constructor instead? Need to create unsafe versions of everything that uses
+ // it too.
+ matches!(self.unsafe_policy, UnsafePolicy::AllFunctionsSafe);
self.generate_subclass(name, &superclass, methods, generate_peer_constructor)
}
Api::ExternCppType {
@@ -723,6 +828,10 @@
extern_c_mod_items.push(parse_quote! {
fn #as_mut_id(self: Pin<&mut #cpp_id>) -> Pin<&mut #super_cxxxbridge_id>;
});
+ let as_unique_ptr_id = make_ident(format!("{}_As_{}_UniquePtr", cpp_id, super_name));
+ extern_c_mod_items.push(parse_quote! {
+ fn #as_unique_ptr_id(u: UniquePtr<#cpp_id>) -> UniquePtr<#super_cxxxbridge_id>;
+ });
bindgen_mod_items.push(parse_quote! {
impl AsRef<#super_path> for super::super::super::#id {
fn as_ref(&self) -> &cxxbridge::#super_cxxxbridge_id {
@@ -740,6 +849,14 @@
}
}
});
+ let rs_as_unique_ptr_id = make_ident(format!("as_{}_unique_ptr", super_name));
+ bindgen_mod_items.push(parse_quote! {
+ impl super::super::super::#id {
+ pub fn #rs_as_unique_ptr_id(u: cxx::UniquePtr<#cpp_id>) -> cxx::UniquePtr<cxxbridge::#super_cxxxbridge_id> {
+ cxxbridge::#as_unique_ptr_id(u)
+ }
+ }
+ });
let remove_ownership = sub.remove_ownership();
global_items.push(parse_quote! {
#[allow(non_snake_case)]
@@ -813,7 +930,7 @@
.as_ref()
.#borrow()
.expect(#reentrancy_panic_msg);
- let r = std::ops::#deref_ty::#deref_call(& #mut_token b);
+ let r = ::std::ops::#deref_ty::#deref_call(& #mut_token b);
#methods_trait :: #method_name
(r,
#args)
@@ -1012,7 +1129,7 @@
rust_path: TypePath,
ns_depth: usize,
) -> RsCodegenResult {
- let id = name.get_final_ident();
+ let id = name.type_path_from_root();
let super_duper = std::iter::repeat(make_ident("super"));
let supers = super_duper.take(ns_depth + 2);
let use_statement = parse_quote! {
@@ -1057,7 +1174,10 @@
fn #method(_uhoh: autocxx::BindingGenerationFailure) {
}
},
- ty: self_ty,
+ ty: ImplBlockKey {
+ ty: parse_quote! { #self_ty },
+ lifetime: None,
+ },
})),
None,
None,
diff --git a/third_party/autocxx/engine/src/conversion/convert_error.rs b/third_party/autocxx/engine/src/conversion/convert_error.rs
index 0de4f19..ba8344d 100644
--- a/third_party/autocxx/engine/src/conversion/convert_error.rs
+++ b/third_party/autocxx/engine/src/conversion/convert_error.rs
@@ -125,6 +125,12 @@
ConcreteVersionOfIgnoredTemplate,
#[error("bindgen decided to call this type _bindgen_ty_N because it couldn't deduce the correct name for it. That means we can't generate C++ bindings to it.")]
BindgenTy,
+ #[error("This is a typedef to a type in an anonymous namespace, not currently supported.")]
+ TypedefToTypeInAnonymousNamespace,
+ #[error("This type refers to a generic type parameter of an outer type, which is not yet supported.")]
+ ReferringToGenericTypeParam,
+ #[error("This forward declaration was nested within another struct/class. autocxx is unable to represent inner types if they are forward declarations.")]
+ ForwardDeclaredNestedType,
}
/// Ensures that error contexts are always created using the constructors in this
diff --git a/third_party/autocxx/engine/src/conversion/mod.rs b/third_party/autocxx/engine/src/conversion/mod.rs
index 3043dcf..aa639a2 100644
--- a/third_party/autocxx/engine/src/conversion/mod.rs
+++ b/third_party/autocxx/engine/src/conversion/mod.rs
@@ -157,7 +157,7 @@
// parameterized by a richer set of metadata.
Self::dump_apis("adding casts", &analyzed_apis);
let analyzed_apis =
- FnAnalyzer::analyze_functions(analyzed_apis, unsafe_policy, self.config);
+ FnAnalyzer::analyze_functions(analyzed_apis, &unsafe_policy, self.config);
// If any of those functions turned out to be pure virtual, don't attempt
// to generate UniquePtr implementations for the type, since it can't
// be instantiated.
@@ -197,6 +197,7 @@
)?;
let rs = RsCodeGenerator::generate_rs_code(
analyzed_apis,
+ &unsafe_policy,
self.include_list,
bindgen_mod,
self.config,
diff --git a/third_party/autocxx/engine/src/conversion/parse/bindgen_semantic_attributes.rs b/third_party/autocxx/engine/src/conversion/parse/bindgen_semantic_attributes.rs
index 8b789ae..a8de9ce 100644
--- a/third_party/autocxx/engine/src/conversion/parse/bindgen_semantic_attributes.rs
+++ b/third_party/autocxx/engine/src/conversion/parse/bindgen_semantic_attributes.rs
@@ -61,6 +61,11 @@
ConvertError::UnusedTemplateParam,
Some(ErrorContext::new_for_item(id_for_context.clone())),
))
+ } else if self.get_cpp_visibility() != CppVisibility::Public {
+ Err(ConvertErrorWithContext(
+ ConvertError::NonPublicNestedType,
+ Some(ErrorContext::new_for_item(id_for_context.clone())),
+ ))
} else {
Ok(())
}
diff --git a/third_party/autocxx/engine/src/conversion/parse/parse_bindgen.rs b/third_party/autocxx/engine/src/conversion/parse/parse_bindgen.rs
index 0818aa5..2d4e3de 100644
--- a/third_party/autocxx/engine/src/conversion/parse/parse_bindgen.rs
+++ b/third_party/autocxx/engine/src/conversion/parse/parse_bindgen.rs
@@ -207,7 +207,7 @@
// cxx::bridge can't cope with type aliases to generic
// types at the moment.
let name = api_name_qualified(ns, s.ident.clone(), &annotations)?;
- let err = annotations.check_for_fatal_attrs(&s.ident).err();
+ let mut err = annotations.check_for_fatal_attrs(&s.ident).err();
let api = if ns.is_empty() && self.config.is_rust_type(&s.ident) {
None
} else if Self::spot_forward_declaration(&s.fields)
@@ -219,6 +219,15 @@
// we spot in the previous clause) but instead with an _address field.
// So, solely in the case where we're storing up an error about such
// a templated type, we'll also treat such cases as forward declarations.
+ //
+ // We'll also at this point check for one specific problem with
+ // forward declarations.
+ if err.is_none() && name.cpp_name().contains("::") {
+ err = Some(ConvertErrorWithContext(
+ ConvertError::ForwardDeclaredNestedType,
+ Some(ErrorContext::new_for_item(s.ident)),
+ ));
+ }
Some(UnanalyzedApi::ForwardDeclaration { name, err })
} else {
let has_rvalue_reference_fields = s.fields.iter().any(|f| {
@@ -227,7 +236,6 @@
Some(UnanalyzedApi::Struct {
name,
details: Box::new(StructDetails {
- vis: annotations.get_cpp_visibility(),
layout: annotations.get_layout(),
item: s,
has_rvalue_reference_fields,
diff --git a/third_party/autocxx/engine/src/lib.rs b/third_party/autocxx/engine/src/lib.rs
index 4edc4a4..86a31ea 100644
--- a/third_party/autocxx/engine/src/lib.rs
+++ b/third_party/autocxx/engine/src/lib.rs
@@ -304,6 +304,7 @@
.default_enum_style(bindgen::EnumVariation::Rust {
non_exhaustive: false,
})
+ .rustfmt_bindings(log::log_enabled!(log::Level::Info))
.size_t_is_usize(true)
.enable_cxx_namespaces()
.generate_inline_functions(true)
diff --git a/third_party/autocxx/engine/src/types.rs b/third_party/autocxx/engine/src/types.rs
index 0d11895..337da14 100644
--- a/third_party/autocxx/engine/src/types.rs
+++ b/third_party/autocxx/engine/src/types.rs
@@ -195,6 +195,16 @@
}
}
+ pub(crate) fn type_path_from_root(&self) -> TypePath {
+ let segs = self
+ .ns_segment_iter()
+ .chain(std::iter::once(&self.1))
+ .map(make_ident);
+ parse_quote! {
+ #(#segs)::*
+ }
+ }
+
/// Iterator over segments in the namespace of this name.
pub(crate) fn ns_segment_iter(&self) -> impl Iterator<Item = &String> {
self.0.iter()
diff --git a/third_party/autocxx/examples/chromium-fake-render-frame-host/Cargo.toml b/third_party/autocxx/examples/chromium-fake-render-frame-host/Cargo.toml
index a961708..3862f2c 100644
--- a/third_party/autocxx/examples/chromium-fake-render-frame-host/Cargo.toml
+++ b/third_party/autocxx/examples/chromium-fake-render-frame-host/Cargo.toml
@@ -13,9 +13,9 @@
edition = "2021"
[dependencies]
-cxx = "1.0.54"
-autocxx = { path = "../..", version="0.22.0" }
+cxx = "1.0.68"
+autocxx = { path = "../..", version="0.22.3" }
[build-dependencies]
-autocxx-build = { path = "../../gen/build", version="0.22.0" }
+autocxx-build = { path = "../../gen/build", version="0.22.3" }
miette = { version="4.3", features = [ "fancy" ] }
diff --git a/third_party/autocxx/examples/llvm/Cargo.toml b/third_party/autocxx/examples/llvm/Cargo.toml
index d8806c8..4c03db7 100644
--- a/third_party/autocxx/examples/llvm/Cargo.toml
+++ b/third_party/autocxx/examples/llvm/Cargo.toml
@@ -13,7 +13,7 @@
edition = "2021"
[dependencies]
-cxx = "1.0.54"
+cxx = "1.0.68"
autocxx = { path = "../..", version="0.17.2" }
[build-dependencies]
diff --git a/third_party/autocxx/examples/non-trivial-type-on-stack/Cargo.toml b/third_party/autocxx/examples/non-trivial-type-on-stack/Cargo.toml
index ff227d4..110bd1b 100644
--- a/third_party/autocxx/examples/non-trivial-type-on-stack/Cargo.toml
+++ b/third_party/autocxx/examples/non-trivial-type-on-stack/Cargo.toml
@@ -13,9 +13,9 @@
edition = "2021"
[dependencies]
-cxx = "1.0.54"
-autocxx = { path = "../..", version="0.22.0" }
+cxx = "1.0.68"
+autocxx = { path = "../..", version="0.22.3" }
[build-dependencies]
-autocxx-build = { path = "../../gen/build", version="0.22.0" }
+autocxx-build = { path = "../../gen/build", version="0.22.3" }
miette = { version="4.3", features = [ "fancy" ] }
diff --git a/third_party/autocxx/examples/pod/Cargo.toml b/third_party/autocxx/examples/pod/Cargo.toml
index 71bd5a0..9f4ee5a 100644
--- a/third_party/autocxx/examples/pod/Cargo.toml
+++ b/third_party/autocxx/examples/pod/Cargo.toml
@@ -13,9 +13,9 @@
edition = "2021"
[dependencies]
-cxx = "1.0.54"
-autocxx = { path = "../..", version="0.22.0" }
+cxx = "1.0.68"
+autocxx = { path = "../..", version="0.22.3" }
[build-dependencies]
-autocxx-build = { path = "../../gen/build", version="0.22.0" }
+autocxx-build = { path = "../../gen/build", version="0.22.3" }
miette = { version="4.3", features = [ "fancy" ] }
diff --git a/third_party/autocxx/examples/reference-wrappers/Cargo.toml b/third_party/autocxx/examples/reference-wrappers/Cargo.toml
new file mode 100644
index 0000000..cb85e80
--- /dev/null
+++ b/third_party/autocxx/examples/reference-wrappers/Cargo.toml
@@ -0,0 +1,21 @@
+# Copyright 2022 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+# https://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+# <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your
+# option. This file may not be copied, modified, or distributed
+# except according to those terms.
+
+[package]
+name = "autocxx-reference-wrapper-example"
+version = "0.22.1"
+authors = ["Adrian Taylor <adetaylor@chromium.org>"]
+edition = "2021"
+
+[dependencies]
+cxx = "1.0.68"
+autocxx = { path = "../..", version="0.22.3" }
+
+[build-dependencies]
+autocxx-build = { path = "../../gen/build", version="0.22.3" }
+miette = { version="4.3", features=["fancy"]}
diff --git a/third_party/autocxx/examples/reference-wrappers/build.rs b/third_party/autocxx/examples/reference-wrappers/build.rs
new file mode 100644
index 0000000..64c573d
--- /dev/null
+++ b/third_party/autocxx/examples/reference-wrappers/build.rs
@@ -0,0 +1,18 @@
+// Copyright 2022 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// https://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+fn main() -> miette::Result<()> {
+ let path = std::path::PathBuf::from("src");
+ let mut b = autocxx_build::Builder::new("src/main.rs", &[&path]).build()?;
+ b.flag_if_supported("-std=c++14")
+ .file("src/input.cc").compile("autocxx-reference-wrapper-example");
+
+ println!("cargo:rerun-if-changed=src/main.rs");
+ println!("cargo:rerun-if-changed=src/input.h");
+ Ok(())
+}
diff --git a/third_party/autocxx/examples/reference-wrappers/src/input.cc b/third_party/autocxx/examples/reference-wrappers/src/input.cc
new file mode 100644
index 0000000..afb7eb2
--- /dev/null
+++ b/third_party/autocxx/examples/reference-wrappers/src/input.cc
@@ -0,0 +1,11 @@
+// Copyright 2022 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// https://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#include "input.h"
+
+Goat the_goat;
diff --git a/third_party/autocxx/examples/reference-wrappers/src/input.h b/third_party/autocxx/examples/reference-wrappers/src/input.h
new file mode 100644
index 0000000..5e3c6e9
--- /dev/null
+++ b/third_party/autocxx/examples/reference-wrappers/src/input.h
@@ -0,0 +1,42 @@
+// Copyright 2022 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// https://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#pragma once
+
+#include <cstdint>
+#include <sstream>
+#include <stdint.h>
+#include <string>
+
+class Goat {
+public:
+ Goat() : horns(0) {}
+ void add_a_horn();
+ std::string describe() const;
+private:
+ uint32_t horns;
+};
+
+
+inline void Goat::add_a_horn() { horns++; }
+inline std::string Goat::describe() const {
+ std::ostringstream oss;
+ std::string plural = horns == 1 ? "" : "s";
+ oss << "This goat has " << horns << " horn" << plural << ".";
+ return oss.str();
+}
+
+class Field {
+public:
+ const Goat& get_goat() const {
+ return the_goat;
+ }
+
+private:
+ Goat the_goat;
+};
diff --git a/third_party/autocxx/examples/reference-wrappers/src/main.rs b/third_party/autocxx/examples/reference-wrappers/src/main.rs
new file mode 100644
index 0000000..6bea2ff
--- /dev/null
+++ b/third_party/autocxx/examples/reference-wrappers/src/main.rs
@@ -0,0 +1,67 @@
+// Copyright 2022 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// https://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// This example serves to demonstrate the experimental C++
+// reference wrappers. They exist because C++ references are not
+// the same as Rust references: C++ references may alias, whereas
+// Rust references may not.
+//
+// Standard autocxx behavior therefore introduces unsoundness when
+// C++ references are encountered and treated like Rust references.
+// (cxx has this soundness problem for Trivial types; autocxx
+// makes it worse in that the same problem applies even for
+// opaque types, because we make them sized such that we can allocate
+// them on the stack).
+//
+// Reference wrappers solve that problem because internally, they're
+// just pointers. On the other hand, they're awkward to use,
+// especially in the absence of the Rust "arbitrary self types"
+// feature.
+
+use autocxx::prelude::*;
+
+include_cpp! {
+ #include "input.h"
+ // This next line enables C++ reference wrappers
+ safety!(unsafe_references_wrapped)
+ generate!("Goat")
+ generate!("Field")
+}
+
+fn main() {
+ // Create a cxx::UniquePtr as normal for a Field object.
+ let field = ffi::Field::new().within_unique_ptr();
+ // We assume at this point that C++ has had no opportunity
+ // to retain any reference to the Field. That's not strictly
+ // true, due to RVO, but under all reasonable circumstances
+ // Rust currently has exclusive ownership of the Field we've
+ // been given.
+ // Therefore, at this point in the program, it's still
+ // OK to take Rust references to this Field.
+ let _field_rust_ref = field.as_ref();
+ // However, as soon as we want to pass a reference to the field
+ // back to C++, we have to ensure we have no Rust references
+ // in existence. So: we imprison the object in a "CppPin":
+ let field = ffi::cpp_pin_uniqueptr(field);
+ // We can no longer take Rust references to the field...
+ // let _field_rust_ref = field.as_ref();
+ // However, we can take C++ references. And use such references
+ // to call methods...
+ let another_goat = field.as_cpp_ref().get_goat();
+ // The 'get_goat' method in C++ returns a reference, so this is
+ // another CppRef, not a Rust reference.
+ assert_eq!(
+ another_goat
+ .describe() // returns a UniquePtr<CxxString>, there
+ // are no Rust or C++ references involved at this point.
+ .as_ref()
+ .unwrap()
+ .to_string_lossy(),
+ "This goat has 0 horns."
+ );
+}
diff --git a/third_party/autocxx/examples/s2/Cargo.toml b/third_party/autocxx/examples/s2/Cargo.toml
index 994a381..04524b1 100644
--- a/third_party/autocxx/examples/s2/Cargo.toml
+++ b/third_party/autocxx/examples/s2/Cargo.toml
@@ -15,9 +15,9 @@
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
-cxx = "1.0.54"
-autocxx = { path = "../..", version="0.22.0" }
+cxx = "1.0.68"
+autocxx = { path = "../..", version="0.22.3" }
[build-dependencies]
-autocxx-build = { path = "../../gen/build", version="0.22.0" }
+autocxx-build = { path = "../../gen/build", version="0.22.3" }
miette = { version="4.3", features = [ "fancy" ] }
diff --git a/third_party/autocxx/examples/steam-mini/Cargo.toml b/third_party/autocxx/examples/steam-mini/Cargo.toml
index b292f25..ceb81ff 100644
--- a/third_party/autocxx/examples/steam-mini/Cargo.toml
+++ b/third_party/autocxx/examples/steam-mini/Cargo.toml
@@ -15,9 +15,9 @@
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
-cxx = "1.0.54"
-autocxx = { path = "../..", version="0.22.0" }
+cxx = "1.0.68"
+autocxx = { path = "../..", version="0.22.3" }
[build-dependencies]
-autocxx-build = { path = "../../gen/build", version="0.22.0" }
+autocxx-build = { path = "../../gen/build", version="0.22.3" }
miette = { version="4.3", features = [ "fancy" ] }
diff --git a/third_party/autocxx/examples/subclass/Cargo.toml b/third_party/autocxx/examples/subclass/Cargo.toml
index 8a01205..dc93f53 100644
--- a/third_party/autocxx/examples/subclass/Cargo.toml
+++ b/third_party/autocxx/examples/subclass/Cargo.toml
@@ -15,13 +15,13 @@
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
-cxx = "1.0.54"
-autocxx = { path = "../..", version="0.22.0" }
+cxx = "1.0.68"
+autocxx = { path = "../..", version="0.22.3" }
uwuify = "0.2.2"
textwrap = "0.14"
fastrand = "1.5.0"
[build-dependencies]
-autocxx-build = { path = "../../gen/build", version="0.22.0" }
+autocxx-build = { path = "../../gen/build", version="0.22.3" }
regex = "1.5.4"
miette = { version="4.3", features = [ "fancy" ] }
diff --git a/third_party/autocxx/gen/build/Cargo.toml b/third_party/autocxx/gen/build/Cargo.toml
index 2895526..678a844 100644
--- a/third_party/autocxx/gen/build/Cargo.toml
+++ b/third_party/autocxx/gen/build/Cargo.toml
@@ -8,7 +8,7 @@
[package]
name = "autocxx-build"
-version = "0.22.0"
+version = "0.22.3"
authors = ["Adrian Taylor <adetaylor@chromium.org>"]
edition = "2021"
license = "MIT OR Apache-2.0"
@@ -22,7 +22,7 @@
static = [ "autocxx-engine/static" ]
[dependencies]
-autocxx-engine = { version="=0.22.0", path="../../engine", features = ["build"] }
+autocxx-engine = { version="=0.22.3", path="../../engine", features = ["build"] }
env_logger = "0.9.0"
indexmap = "1.8"
diff --git a/third_party/autocxx/gen/cmd/Cargo.toml b/third_party/autocxx/gen/cmd/Cargo.toml
index 744d29a..51eb56c 100644
--- a/third_party/autocxx/gen/cmd/Cargo.toml
+++ b/third_party/autocxx/gen/cmd/Cargo.toml
@@ -8,7 +8,7 @@
[package]
name = "autocxx-gen"
-version = "0.22.0"
+version = "0.22.3"
authors = ["Adrian Taylor <adetaylor@chromium.org>"]
edition = "2021"
license = "MIT OR Apache-2.0"
@@ -22,7 +22,7 @@
static = [ "autocxx-engine/static" ]
[dependencies]
-autocxx-engine = { version="=0.22.0", path="../../engine" }
+autocxx-engine = { version="=0.22.3", path="../../engine" }
clap = { version = "3.1.2", features = ["cargo"] }
proc-macro2 = "1.0"
env_logger = "0.9.0"
@@ -32,10 +32,10 @@
[dev-dependencies]
assert_cmd = "1.0.3"
-tempdir = "0.3.7"
-autocxx-integration-tests = { path = "../../integration-tests", version="=0.22.0" }
+tempfile = "3.1"
+autocxx-integration-tests = { path = "../../integration-tests", version="=0.22.3" }
# This is necessary for building the projects created
# by the trybuild test system...
autocxx = { path="../.." }
-cxx = "1.0.54"
+cxx = "1.0.68"
itertools = "0.10.3"
\ No newline at end of file
diff --git a/third_party/autocxx/gen/cmd/src/depfile.rs b/third_party/autocxx/gen/cmd/src/depfile.rs
index 5bb7a66..d8d3626 100644
--- a/third_party/autocxx/gen/cmd/src/depfile.rs
+++ b/third_party/autocxx/gen/cmd/src/depfile.rs
@@ -63,13 +63,13 @@
mod tests {
use std::{fs::File, io::Read};
- use tempdir::TempDir;
+ use tempfile::tempdir;
use super::Depfile;
#[test]
fn test_simple_depfile() {
- let tmp_dir = TempDir::new("depfile-test").unwrap();
+ let tmp_dir = tempdir().unwrap();
let f = tmp_dir.path().join("depfile.d");
let mut df = Depfile::new(&f).unwrap();
df.add_output(&tmp_dir.path().join("a/b"));
@@ -85,7 +85,7 @@
#[test]
fn test_multiple_outputs() {
- let tmp_dir = TempDir::new("depfile-test").unwrap();
+ let tmp_dir = tempdir().unwrap();
let f = tmp_dir.path().join("depfile.d");
let mut df = Depfile::new(&f).unwrap();
df.add_output(&tmp_dir.path().join("a/b"));
diff --git a/third_party/autocxx/gen/cmd/src/main.rs b/third_party/autocxx/gen/cmd/src/main.rs
index 20f278b..8b109c3 100644
--- a/third_party/autocxx/gen/cmd/src/main.rs
+++ b/third_party/autocxx/gen/cmd/src/main.rs
@@ -316,27 +316,33 @@
name_autocxxgen_h,
)?;
}
- //writer.write_placeholders(header_counter.into_inner(), desired_number, "h")?;
if matches.is_present("gen-rs-include") {
+ if !matches.is_present("fix-rs-include-name") && desired_number.is_some() {
+ return Err(miette::Report::msg(
+ "gen-rs-include and generate-exact requires fix-rs-include-name.",
+ ));
+ }
+ let mut counter = 0usize;
let rust_buildables = parsed_files
.iter()
.flat_map(|parsed_file| parsed_file.get_rs_outputs());
- for (counter, include_cxx) in rust_buildables.enumerate() {
+ for include_cxx in rust_buildables {
let rs_code = generate_rs_single(include_cxx);
let fname = if matches.is_present("fix-rs-include-name") {
- format!("gen{}.include.rs", counter)
+ name_include_rs(counter)
} else {
rs_code.filename
};
writer.write_to_file(fname, rs_code.code.as_bytes())?;
+ counter += 1;
}
+ writer.write_placeholders(counter, desired_number, name_include_rs)?;
}
if matches.is_present("gen-rs-archive") {
let rust_buildables = parsed_files
.iter()
.flat_map(|parsed_file| parsed_file.get_rs_outputs());
let json = generate_rs_archive(rust_buildables);
- eprintln!("Writing to gen.rs.json in {:?}", outdir);
writer.write_to_file("gen.rs.json".into(), json.as_bytes())?;
}
if let Some(depfile) = depfile {
@@ -353,6 +359,10 @@
format!("gen{}.h", counter)
}
+fn name_include_rs(counter: usize) -> String {
+ format!("gen{}.include.rs", counter)
+}
+
fn get_dependency_recorder(depfile: Rc<RefCell<Depfile>>) -> Box<dyn RebuildDependencyRecorder> {
Box::new(RecordIntoDepfile(depfile))
}
diff --git a/third_party/autocxx/gen/cmd/tests/cmd_test.rs b/third_party/autocxx/gen/cmd/tests/cmd_test.rs
index d0e671e..7e455a2 100644
--- a/third_party/autocxx/gen/cmd/tests/cmd_test.rs
+++ b/third_party/autocxx/gen/cmd/tests/cmd_test.rs
@@ -13,7 +13,7 @@
use assert_cmd::Command;
use autocxx_integration_tests::{build_from_folder, RsFindMode};
use itertools::Itertools;
-use tempdir::TempDir;
+use tempfile::{tempdir, TempDir};
static MAIN_RS: &str = concat!(
include_str!("../../../demo/src/main.rs"),
@@ -31,7 +31,7 @@
static INPUT2_H: &str = include_str!("data/input2.h");
static INPUT3_H: &str = include_str!("data/input3.h");
-const KEEP_TEMPDIRS: bool = false;
+const KEEP_TEMPDIRS: bool = true;
#[test]
fn test_help() -> Result<(), Box<dyn std::error::Error>> {
@@ -107,7 +107,7 @@
#[test]
fn test_gen() -> Result<(), Box<dyn std::error::Error>> {
- let tmp_dir = TempDir::new("example")?;
+ let tmp_dir = tempdir()?;
base_test(&tmp_dir, RsGenMode::Single, |_| {})?;
File::create(tmp_dir.path().join("cxx.h"))
.and_then(|mut cxx_h| cxx_h.write_all(autocxx_engine::HEADER.as_bytes()))?;
@@ -128,7 +128,7 @@
#[test]
fn test_gen_archive() -> Result<(), Box<dyn std::error::Error>> {
- let tmp_dir = TempDir::new("example")?;
+ let tmp_dir = tempdir()?;
base_test(&tmp_dir, RsGenMode::Archive, |_| {})?;
File::create(tmp_dir.path().join("cxx.h"))
.and_then(|mut cxx_h| cxx_h.write_all(autocxx_engine::HEADER.as_bytes()))?;
@@ -148,7 +148,7 @@
#[test]
fn test_gen_multiple_in_archive() -> Result<(), Box<dyn std::error::Error>> {
- let tmp_dir = TempDir::new("example")?;
+ let tmp_dir = tempdir()?;
let mut files = HashMap::new();
files.insert("input2.h", INPUT2_H.as_bytes());
@@ -186,14 +186,15 @@
#[test]
fn test_include_prefixes() -> Result<(), Box<dyn std::error::Error>> {
- let tmp_dir = TempDir::new("example")?;
+ let tmp_dir = tempdir()?;
base_test(&tmp_dir, RsGenMode::Single, |cmd| {
cmd.arg("--cxx-h-path")
.arg("foo/")
.arg("--cxxgen-h-path")
.arg("bar/")
.arg("--generate-exact")
- .arg("3");
+ .arg("3")
+ .arg("--fix-rs-include-name");
})?;
assert_contains(&tmp_dir, "autocxxgen0.h", "foo/cxx.h");
// Currently we don't test cxxgen-h-path because we build the demo code
@@ -203,11 +204,12 @@
#[test]
fn test_gen_fixed_num() -> Result<(), Box<dyn std::error::Error>> {
- let tmp_dir = TempDir::new("example")?;
+ let tmp_dir = tempdir()?;
let depfile = tmp_dir.path().join("test.d");
base_test(&tmp_dir, RsGenMode::Single, |cmd| {
cmd.arg("--generate-exact")
.arg("2")
+ .arg("--fix-rs-include-name")
.arg("--depfile")
.arg(depfile);
})?;
@@ -217,16 +219,16 @@
assert_contentful(&tmp_dir, "autocxxgen0.h");
assert_not_contentful(&tmp_dir, "gen1.h");
assert_not_contentful(&tmp_dir, "autocxxgen1.h");
- assert_contentful(&tmp_dir, "autocxx-ffi-default-gen.rs");
+ assert_contentful(&tmp_dir, "gen0.include.rs");
assert_contentful(&tmp_dir, "test.d");
File::create(tmp_dir.path().join("cxx.h"))
.and_then(|mut cxx_h| cxx_h.write_all(autocxx_engine::HEADER.as_bytes()))?;
let r = build_from_folder(
tmp_dir.path(),
&tmp_dir.path().join("demo/main.rs"),
- vec![tmp_dir.path().join("autocxx-ffi-default-gen.rs")],
+ vec![tmp_dir.path().join("gen0.include.rs")],
&["gen0.cc"],
- RsFindMode::AutocxxRs,
+ RsFindMode::AutocxxRsFile,
);
if KEEP_TEMPDIRS {
println!("Tempdir: {:?}", tmp_dir.into_path().to_str());
@@ -237,7 +239,7 @@
#[test]
fn test_gen_preprocess() -> Result<(), Box<dyn std::error::Error>> {
- let tmp_dir = TempDir::new("example")?;
+ let tmp_dir = tempdir()?;
let prepro_path = tmp_dir.path().join("preprocessed.h");
base_test(&tmp_dir, RsGenMode::Single, |cmd| {
cmd.env("AUTOCXX_PREPROCESS", prepro_path.to_str().unwrap());
@@ -251,7 +253,7 @@
#[test]
fn test_gen_repro() -> Result<(), Box<dyn std::error::Error>> {
- let tmp_dir = TempDir::new("example")?;
+ let tmp_dir = tempdir()?;
let repro_path = tmp_dir.path().join("repro.json");
base_test(&tmp_dir, RsGenMode::Single, |cmd| {
cmd.env("AUTOCXX_REPRO_CASE", repro_path.to_str().unwrap());
diff --git a/third_party/autocxx/integration-tests/Cargo.toml b/third_party/autocxx/integration-tests/Cargo.toml
index c139479..7791ea5 100644
--- a/third_party/autocxx/integration-tests/Cargo.toml
+++ b/third_party/autocxx/integration-tests/Cargo.toml
@@ -8,7 +8,7 @@
[package]
name = "autocxx-integration-tests"
-version = "0.22.0"
+version = "0.22.3"
autotests = false
edition = "2021"
authors = ["Adrian Taylor <adetaylor@chromium.org>"]
@@ -27,15 +27,15 @@
cc = "1.0"
quote = "1.0"
once_cell = "1.7"
-autocxx-engine = { version="=0.22.0", path="../engine", features = ["build"] }
+autocxx-engine = { version="=0.22.3", path="../engine", features = ["build"] }
# This is necessary for building the projects created
# by the trybuild test system...
-autocxx = { path="..", version="=0.22.0" }
+autocxx = { path="..", version="=0.22.3" }
link-cplusplus = "1.0"
tempfile = "3.1"
indoc = "1.0"
log = "0.4"
-cxx = "1.0.54"
+cxx = "1.0.68"
itertools = "0.10"
[dependencies.syn]
diff --git a/third_party/autocxx/integration-tests/src/lib.rs b/third_party/autocxx/integration-tests/src/lib.rs
index 3499af8..352c8ab 100644
--- a/third_party/autocxx/integration-tests/src/lib.rs
+++ b/third_party/autocxx/integration-tests/src/lib.rs
@@ -21,7 +21,7 @@
use log::info;
use once_cell::sync::OnceCell;
use proc_macro2::{Span, TokenStream};
-use quote::{quote, TokenStreamExt};
+use quote::{format_ident, quote, TokenStreamExt};
use syn::Token;
use tempfile::{tempdir, TempDir};
@@ -56,6 +56,7 @@
pub enum RsFindMode {
AutocxxRs,
AutocxxRsArchive,
+ AutocxxRsFile,
}
/// API to test building pre-generated files.
@@ -169,6 +170,10 @@
"AUTOCXX_RS_JSON_ARCHIVE",
self.temp_dir.path().join("gen.rs.json"),
),
+ RsFindMode::AutocxxRsFile => std::env::set_var(
+ "AUTOCXX_RS_FILE",
+ self.temp_dir.path().join("gen0.include.rs"),
+ ),
};
std::panic::catch_unwind(|| {
let test_cases = trybuild::TestCases::new();
@@ -200,6 +205,7 @@
None,
None,
None,
+ "unsafe_ffi",
)
.unwrap()
}
@@ -254,10 +260,23 @@
builder_modifier,
code_checker,
extra_rust,
+ "unsafe_ffi",
)
.unwrap()
}
+pub fn run_generate_all_test(header_code: &str) {
+ run_test_ex(
+ "",
+ header_code,
+ quote! {},
+ quote! { generate_all!() },
+ None,
+ None,
+ None,
+ );
+}
+
pub fn run_test_expect_fail(
cxx_code: &str,
header_code: &str,
@@ -273,6 +292,7 @@
None,
None,
None,
+ "unsafe_ffi",
)
.expect_err("Unexpected success");
}
@@ -294,6 +314,7 @@
builder_modifier,
code_checker,
extra_rust,
+ "unsafe_ffi",
)
.expect_err("Unexpected success");
}
@@ -343,14 +364,16 @@
builder_modifier: Option<BuilderModifier>,
rust_code_checker: Option<CodeChecker>,
extra_rust: Option<TokenStream>,
+ safety_policy: &str,
) -> Result<(), TestError> {
let hexathorpe = Token);
+ let safety_policy = format_ident!("{}", safety_policy);
let unexpanded_rust = quote! {
use autocxx::prelude::*;
include_cpp!(
#hexathorpe include "input.h"
- safety!(unsafe_ffi)
+ safety!(#safety_policy)
#directives
);
diff --git a/third_party/autocxx/integration-tests/tests/cpprefs_test.rs b/third_party/autocxx/integration-tests/tests/cpprefs_test.rs
new file mode 100644
index 0000000..4241dec
--- /dev/null
+++ b/third_party/autocxx/integration-tests/tests/cpprefs_test.rs
@@ -0,0 +1,96 @@
+// Copyright 2022 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// https://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+//! Tests specific to reference wrappers.
+
+use autocxx_integration_tests::{directives_from_lists, do_run_test};
+use indoc::indoc;
+use proc_macro2::TokenStream;
+use quote::quote;
+
+/// A positive test, we expect to pass.
+fn run_cpprefs_test(
+ cxx_code: &str,
+ header_code: &str,
+ rust_code: TokenStream,
+ generate: &[&str],
+ generate_pods: &[&str],
+) {
+ do_run_test(
+ cxx_code,
+ header_code,
+ rust_code,
+ directives_from_lists(generate, generate_pods, None),
+ None,
+ None,
+ None,
+ "unsafe_references_wrapped",
+ )
+ .unwrap()
+}
+
+#[test]
+fn test_method_call_mut() {
+ run_cpprefs_test(
+ "",
+ indoc! {"
+ #include <string>
+ #include <sstream>
+
+ class Goat {
+ public:
+ Goat() : horns(0) {}
+ void add_a_horn();
+ private:
+ uint32_t horns;
+ };
+
+ inline void Goat::add_a_horn() { horns++; }
+ "},
+ quote! {
+ let goat = ffi::Goat::new().within_unique_ptr();
+ let mut goat = ffi::CppUniquePtrPin::new(goat);
+ goat.as_cpp_mut_ref().add_a_horn();
+ },
+ &["Goat"],
+ &[],
+ )
+}
+
+#[test]
+fn test_method_call_const() {
+ run_cpprefs_test(
+ "",
+ indoc! {"
+ #include <string>
+ #include <sstream>
+
+ class Goat {
+ public:
+ Goat() : horns(0) {}
+ std::string describe() const;
+ private:
+ uint32_t horns;
+ };
+
+ inline std::string Goat::describe() const {
+ std::ostringstream oss;
+ std::string plural = horns == 1 ? \"\" : \"s\";
+ oss << \"This goat has \" << horns << \" horn\" << plural << \".\";
+ return oss.str();
+ }
+ "},
+ quote! {
+ let goat = ffi::Goat::new().within_unique_ptr();
+ let goat = ffi::cpp_pin_uniqueptr(goat);
+ goat.as_cpp_ref().describe();
+ },
+ &["Goat"],
+ &[],
+ )
+}
diff --git a/third_party/autocxx/integration-tests/tests/integration_test.rs b/third_party/autocxx/integration-tests/tests/integration_test.rs
index 2a3340a..16c792b8 100644
--- a/third_party/autocxx/integration-tests/tests/integration_test.rs
+++ b/third_party/autocxx/integration-tests/tests/integration_test.rs
@@ -16,8 +16,8 @@
},
};
use autocxx_integration_tests::{
- directives_from_lists, do_run_test, do_run_test_manual, run_test, run_test_ex,
- run_test_expect_fail, run_test_expect_fail_ex, TestError,
+ directives_from_lists, do_run_test, do_run_test_manual, run_generate_all_test, run_test,
+ run_test_ex, run_test_expect_fail, run_test_expect_fail_ex, TestError,
};
use indoc::indoc;
use itertools::Itertools;
@@ -3541,20 +3541,7 @@
template <class _Ty>
using _Remove_cvref_t = remove_cv_t<remove_reference_t<_Ty>>;
"};
-
- let rs = quote! {};
-
- run_test_ex(
- "",
- hdr,
- rs,
- quote! {
- generate_all!()
- },
- None,
- None,
- None,
- );
+ run_generate_all_test(hdr);
}
#[test]
@@ -5044,15 +5031,7 @@
inline void a() {}
};
"};
- run_test_ex(
- "",
- hdr,
- quote! {},
- quote! { generate_all!() },
- None,
- None,
- None,
- );
+ run_generate_all_test(hdr);
}
#[test]
@@ -5941,6 +5920,7 @@
None,
None,
None,
+ "unsafe_ffi",
) {
Err(TestError::CppBuild(_)) => {} // be sure this fails due to a static_assert
// rather than some runtime problem
@@ -6232,20 +6212,7 @@
typedef bitset<1> mybitset;
"};
-
- let rs = quote! {};
-
- run_test_ex(
- "",
- hdr,
- rs,
- quote! {
- generate_all!()
- },
- None,
- None,
- None,
- );
+ run_generate_all_test(hdr);
}
#[test]
@@ -6433,18 +6400,7 @@
typedef std::a<b<d>::c, int, int> e;
};
"};
- let rs = quote! {};
- run_test_ex(
- "",
- hdr,
- rs,
- quote! {
- generate_all!()
- },
- None,
- None,
- None,
- );
+ run_generate_all_test(hdr);
}
#[test]
@@ -6505,18 +6461,7 @@
}
typedef char daft;
"};
- let rs = quote! {};
- run_test_ex(
- "",
- hdr,
- rs,
- quote! {
- generate_all!()
- },
- None,
- None,
- None,
- );
+ run_generate_all_test(hdr);
}
#[test]
@@ -6738,6 +6683,50 @@
}
#[test]
+/// Tests extern_cpp_type with a type inside a namespace.
+fn test_extern_cpp_type_namespace() {
+ let hdr = indoc! {"
+ #include <cstdint>
+ namespace b {
+ struct B {
+ B() {}
+ };
+ } // namespace b
+ struct A {
+ A() {}
+ b::B make_b() { return b::B(); }
+ };
+ "};
+ let hexathorpe = Token);
+ let rs = quote! {
+ pub mod b {
+ autocxx::include_cpp! {
+ #hexathorpe include "input.h"
+ safety!(unsafe_ffi)
+ name!(ffi_b)
+ generate_pod!("b::B")
+ }
+ pub use ffi_b::b::B;
+ }
+ pub mod a {
+ autocxx::include_cpp! {
+ #hexathorpe include "input.h"
+ safety!(unsafe_ffi)
+ name!(ffi_a)
+ generate_pod!("A")
+ extern_cpp_type!("b::B", crate::b::B)
+ }
+ pub use ffi_a::A;
+ }
+ fn main() {
+ use autocxx::prelude::*;
+ let _ = crate::a::A::new().within_unique_ptr().as_mut().unwrap().make_b();
+ }
+ };
+ do_run_test_manual("", hdr, rs, None, None).unwrap();
+}
+
+#[test]
#[ignore] // because we currently require UniquePtrTarget which this can't implement
fn test_extern_cpp_type_manual() {
let hdr = indoc! {"
@@ -7600,6 +7589,49 @@
}
#[test]
+/// Tests the Rust code generated for subclasses when there's a `std` module in scope representing
+/// the C++ `std` namespace. This breaks if any of the generated Rust code fails to fully qualify
+/// its references to the Rust `std`.
+fn test_subclass_with_std() {
+ let hdr = indoc! {"
+ #include <cstdint>
+ #include <chrono>
+
+ class Observer {
+ public:
+ Observer() {}
+ virtual void foo() const {}
+ virtual ~Observer() {}
+
+ void unused(std::chrono::nanoseconds) {}
+ };
+ "};
+ run_test_ex(
+ "",
+ hdr,
+ quote! {
+ let obs = MyObserver::new_rust_owned(MyObserver { a: 3, cpp_peer: Default::default() });
+ obs.borrow().foo();
+ },
+ quote! {
+ subclass!("Observer",MyObserver)
+ },
+ None,
+ None,
+ Some(quote! {
+ use autocxx::subclass::CppSubclass;
+ use ffi::Observer_methods;
+ #[autocxx::subclass::subclass]
+ pub struct MyObserver {
+ a: u32
+ }
+ impl Observer_methods for MyObserver {
+ }
+ }),
+ );
+}
+
+#[test]
fn test_two_subclasses() {
let hdr = indoc! {"
#include <cstdint>
@@ -7704,6 +7736,54 @@
}
#[test]
+fn test_subclass_no_safety() {
+ let hdr = indoc! {"
+ #include <cstdint>
+
+ class Observer {
+ public:
+ Observer() {}
+ virtual void foo() = 0;
+ virtual ~Observer() {}
+ };
+ "};
+ let hexathorpe = Token);
+ let unexpanded_rust = quote! {
+ use autocxx::prelude::*;
+
+ include_cpp!(
+ #hexathorpe include "input.h"
+ subclass!("Observer",MyObserver)
+ );
+
+ use ffi::Observer_methods;
+ #hexathorpe [autocxx::subclass::subclass]
+ pub struct MyObserver;
+ impl Observer_methods for MyObserver {
+ unsafe fn foo(&mut self) {}
+ }
+
+ use autocxx::subclass::{CppSubclass, CppPeerConstructor, CppSubclassRustPeerHolder};
+ use cxx::UniquePtr;
+ impl CppPeerConstructor<ffi::MyObserverCpp> for MyObserver {
+ fn make_peer(
+ &mut self,
+ peer_holder: CppSubclassRustPeerHolder<Self>,
+ ) -> UniquePtr<ffi::MyObserverCpp> {
+ UniquePtr::emplace(unsafe { ffi::MyObserverCpp::new(peer_holder) })
+ }
+ }
+
+ fn main() {
+ let obs = MyObserver::new_rust_owned(MyObserver { cpp_peer: Default::default() });
+ unsafe { obs.borrow_mut().foo() };
+ }
+ };
+
+ do_run_test_manual("", hdr, unexpanded_rust, None, None).unwrap()
+}
+
+#[test]
fn test_pv_protected_constructor() {
let hdr = indoc! {"
#include <cstdint>
@@ -8291,6 +8371,74 @@
}
#[test]
+fn test_pv_subclass_as_superclass() {
+ let hdr = indoc! {"
+ #include <cstdint>
+ #include <memory>
+
+ class TestObserver {
+ public:
+ TestObserver() {}
+ virtual void a() const = 0;
+ virtual ~TestObserver() {}
+ };
+
+ inline void call_observer(std::unique_ptr<TestObserver> obs) { obs->a(); }
+ "};
+ run_test_ex(
+ "",
+ hdr,
+ quote! {
+ use autocxx::subclass::CppSubclass;
+ let obs = MyTestObserver::new_cpp_owned(
+ MyTestObserver::default()
+ );
+ let obs = MyTestObserver::as_TestObserver_unique_ptr(obs);
+ assert!(!Lazy::force(&STATUS).lock().unwrap().dropped);
+ ffi::call_observer(obs);
+ assert!(Lazy::force(&STATUS).lock().unwrap().sub_a_called);
+ assert!(Lazy::force(&STATUS).lock().unwrap().dropped);
+ *Lazy::force(&STATUS).lock().unwrap() = Default::default();
+ },
+ quote! {
+ generate!("call_observer")
+ subclass!("TestObserver",MyTestObserver)
+ },
+ None,
+ None,
+ Some(quote! {
+ use once_cell::sync::Lazy;
+ use std::sync::Mutex;
+
+ use ffi::TestObserver_methods;
+ #[autocxx::subclass::subclass]
+ #[derive(Default)]
+ pub struct MyTestObserver {
+ }
+ impl TestObserver_methods for MyTestObserver {
+ fn a(&self) {
+ assert!(!Lazy::force(&STATUS).lock().unwrap().dropped);
+ Lazy::force(&STATUS).lock().unwrap().sub_a_called = true;
+ }
+ }
+ impl Drop for MyTestObserver {
+ fn drop(&mut self) {
+ Lazy::force(&STATUS).lock().unwrap().dropped = true;
+ }
+ }
+
+ #[derive(Default)]
+ struct Status {
+ sub_a_called: bool,
+ dropped: bool,
+ }
+
+ static STATUS: Lazy<Mutex<Status>> = Lazy::new(|| Mutex::new(Status::default()));
+ }),
+ );
+}
+
+#[test]
fn test_cycle_nonpod_simple() {
let hdr = indoc! {"
#include <string>
@@ -10925,15 +11073,7 @@
d e();
};
"};
- run_test_ex(
- "",
- hdr,
- quote! {},
- quote! { generate_all!() },
- None,
- None,
- None,
- );
+ run_generate_all_test(hdr);
}
#[test]
@@ -10948,15 +11088,7 @@
};
} // namespace
"};
- run_test_ex(
- "",
- hdr,
- quote! {},
- quote! { generate_all!() },
- None,
- None,
- None,
- );
+ run_generate_all_test(hdr);
}
#[test]
@@ -11015,15 +11147,7 @@
};
} // namespace a
"};
- run_test_ex(
- "",
- hdr,
- quote! {},
- quote! { generate_all!() },
- None,
- None,
- None,
- );
+ run_generate_all_test(hdr);
}
#[test]
@@ -11044,15 +11168,7 @@
_CharT b;
};
"};
- run_test_ex(
- "",
- hdr,
- quote! {},
- quote! { generate_all!() },
- None,
- None,
- None,
- );
+ run_generate_all_test(hdr);
}
#[test]
@@ -11063,15 +11179,7 @@
b c;
};
"};
- run_test_ex(
- "",
- hdr,
- quote! {},
- quote! { generate_all!() },
- None,
- None,
- None,
- );
+ run_generate_all_test(hdr);
}
#[test]
@@ -11086,15 +11194,7 @@
}
}
"};
- run_test_ex(
- "",
- hdr,
- quote! {},
- quote! { generate_all!() },
- None,
- None,
- None,
- );
+ run_generate_all_test(hdr);
}
#[test]
@@ -11116,15 +11216,171 @@
} // namespace
} // namespace a
"};
- run_test_ex(
- "",
- hdr,
- quote! {},
- quote! { generate_all!() },
- None,
- None,
- None,
- );
+ run_generate_all_test(hdr);
+}
+
+/// The problem here is that 'g' doesn't get annotated with
+/// the unused_template semantic attribute.
+/// This seems to be because both g and f have template
+/// parameters, so they're all "used", but effectively cancel
+/// out and thus bindgen generates
+/// pub type g = root::b::f;
+/// So, what we should do here is spot any typedef depending
+/// on a template which takes template args, and reject that too.
+/// Probably.
+#[test]
+#[ignore] // https://github.com/google/autocxx/pull/1094
+fn test_issue_1094() {
+ let hdr = indoc! {"
+ namespace {
+ typedef int a;
+ }
+ namespace b {
+ template <typename> struct c;
+ template <typename d, d e> using f = __make_integer_seq<c, d, e>;
+ template <a e> using g = f<a, e>;
+ } // namespace b
+ "};
+ run_generate_all_test(hdr);
+}
+
+#[test]
+fn test_issue_1096a() {
+ let hdr = indoc! {"
+ namespace a {
+ class b {
+ class c;
+ };
+ } // namespace a
+ "};
+ run_generate_all_test(hdr);
+}
+
+#[test]
+fn test_issue_1096b() {
+ let hdr = indoc! {"
+ namespace a {
+ class b {
+ public:
+ class c;
+ };
+ } // namespace a
+ "};
+ run_generate_all_test(hdr);
+}
+
+#[test]
+fn test_issue_1096c() {
+ let hdr = indoc! {"
+ namespace a {
+ class b {
+ public:
+ class c {
+ public:
+ int d;
+ };
+ };
+ } // namespace a
+ "};
+ run_generate_all_test(hdr);
+}
+
+#[test]
+fn test_issue_1096d() {
+ let hdr = indoc! {"
+ namespace a {
+ class b {
+ private:
+ class c {
+ public:
+ int d;
+ };
+ };
+ } // namespace a
+ "};
+ run_generate_all_test(hdr);
+}
+
+#[test]
+fn test_issue_1096e() {
+ let hdr = indoc! {"
+ namespace a {
+ class b {
+ private:
+ enum c {
+ D,
+ };
+ };
+ } // namespace a
+ "};
+ run_generate_all_test(hdr);
+}
+
+/// Unclear why minimization resulted in this particular test case.
+#[test]
+#[ignore] // https://github.com/google/autocxx/pull/1097
+fn test_issue_1097() {
+ let hdr = indoc! {"
+ namespace rust {
+ inline namespace a {
+ class Str {
+ public:
+ ~Str();
+ };
+ } // namespace a
+ } // namespace rust
+ "};
+ run_generate_all_test(hdr);
+}
+
+#[test]
+fn test_issue_1098a() {
+ let hdr = indoc! {"
+ namespace {
+ namespace {
+ template <typename _CharT> class a {
+ typedef _CharT b;
+ b c;
+ };
+ template <typename _CharT> class d : a<_CharT> {};
+ } // namespace
+ } // namespace
+ "};
+ run_generate_all_test(hdr);
+}
+
+/// Need to spot structs like this:
+/// pub struct d<_CharT> {
+/// _base: root::a<_CharT>,
+/// }
+/// and not create concrete types where the inner type is something from
+/// the outer context.
+#[test]
+fn test_issue_1098b() {
+ let hdr = indoc! {"
+ template <typename _CharT> class a {
+ typedef _CharT b;
+ b c;
+ };
+ template <typename _CharT> class d : a<_CharT> {};
+ "};
+ run_generate_all_test(hdr);
+}
+
+#[test]
+fn test_issue_1098c() {
+ let hdr = indoc! {"
+ namespace {
+ namespace {
+ struct A {
+ int a;
+ };
+ typedef A B;
+ } // namespace
+ } // namespace
+ inline void take_b(const B&) {}
+ "};
+ run_generate_all_test(hdr);
}
#[test]
@@ -11185,6 +11441,58 @@
run_test("", hdr, rs, &["RenderFrameHost"], &[]);
}
+#[test]
+fn test_issue_1081() {
+ let hdr = indoc! {"
+ namespace libtorrent {
+ char version;
+ }
+ namespace libtorrent {
+ struct session;
+ }
+ "};
+ let rs = quote! {};
+ run_test("", hdr, rs, &["libtorrent::session"], &[]);
+}
+
+#[test]
+#[ignore] // This test passes under all normal builds. However
+ // it triggers a stack use-after-return in older versions of
+ // libclang which is only detected under ASAN (obviously it
+ // sometimes causes crashes the rest of the time).
+ // This UaR does not occur when the same code is processed
+ // with a HEAD version of clang itself as of June 2022. This
+ // may mean that the UaR has been fixed in later versions of
+ // the clang code, or that it only occurs when the code is used
+ // in a libclang context (not a plain clang compilation context).
+ // If the problem recurs, we should work out which of these is
+ // the case.
+fn test_issue_1125() {
+ let hdr = indoc! {"
+ namespace {
+ namespace {
+ template <class a> class b {
+ typedef a c;
+ struct {
+ c : sizeof(c);
+ };
+ };
+ } // namespace
+ } // namespace
+ "};
+ run_test_ex(
+ "",
+ hdr,
+ quote! {},
+ quote! {
+ generate_all!()
+ },
+ make_cpp17_adder(),
+ None,
+ None,
+ );
+}
+
// Yet to test:
// - Ifdef
// - Out param pointers
diff --git a/third_party/autocxx/integration-tests/tests/lib.rs b/third_party/autocxx/integration-tests/tests/lib.rs
index 17f076a..8d9eba9 100644
--- a/third_party/autocxx/integration-tests/tests/lib.rs
+++ b/third_party/autocxx/integration-tests/tests/lib.rs
@@ -8,4 +8,5 @@
mod builder_modifiers;
mod code_checkers;
+mod cpprefs_test;
mod integration_test;
diff --git a/third_party/autocxx/macro/Cargo.toml b/third_party/autocxx/macro/Cargo.toml
index d1d633a..1f81ab1 100644
--- a/third_party/autocxx/macro/Cargo.toml
+++ b/third_party/autocxx/macro/Cargo.toml
@@ -8,7 +8,7 @@
[package]
name = "autocxx-macro"
-version = "0.22.0"
+version = "0.22.3"
authors = ["Adrian Taylor <adetaylor@chromium.org>"]
license = "MIT OR Apache-2.0"
description = "Safe autogenerated interop between Rust and C++"
@@ -21,7 +21,7 @@
proc-macro = true
[dependencies]
-autocxx-parser = { path="../parser", version="=0.22.0" }
+autocxx-parser = { path="../parser", version="=0.22.3" }
proc-macro-error = "1.0"
proc-macro2 = "1.0.11"
quote = "1.0"
diff --git a/third_party/autocxx/parser/Cargo.toml b/third_party/autocxx/parser/Cargo.toml
index 481f21d..3668e65 100644
--- a/third_party/autocxx/parser/Cargo.toml
+++ b/third_party/autocxx/parser/Cargo.toml
@@ -8,7 +8,7 @@
[package]
name = "autocxx-parser"
-version = "0.22.0"
+version = "0.22.3"
authors = ["Adrian Taylor <adetaylor@chromium.org>"]
license = "MIT OR Apache-2.0"
description = "Safe autogenerated interop between Rust and C++"
diff --git a/third_party/autocxx/parser/src/config.rs b/third_party/autocxx/parser/src/config.rs
index 4204acf..8d30249 100644
--- a/third_party/autocxx/parser/src/config.rs
+++ b/third_party/autocxx/parser/src/config.rs
@@ -33,6 +33,7 @@
pub enum UnsafePolicy {
AllFunctionsSafe,
AllFunctionsUnsafe,
+ ReferencesWrappedAllFunctionsSafe,
}
impl Default for UnsafePolicy {
@@ -50,8 +51,13 @@
Some(id) => {
if id == "unsafe_ffi" {
Ok(UnsafePolicy::AllFunctionsSafe)
+ } else if id == "unsafe_references_wrapped" {
+ Ok(UnsafePolicy::ReferencesWrappedAllFunctionsSafe)
} else {
- Err(syn::Error::new(id.span(), "expected unsafe_ffi"))
+ Err(syn::Error::new(
+ id.span(),
+ "expected unsafe_ffi or unsafe_references_wrapped",
+ ))
}
}
None => Ok(UnsafePolicy::AllFunctionsUnsafe),
@@ -70,10 +76,20 @@
fn to_tokens(&self, tokens: &mut proc_macro2::TokenStream) {
if *self == UnsafePolicy::AllFunctionsSafe {
tokens.extend(quote! { unsafe })
+ } else if *self == UnsafePolicy::ReferencesWrappedAllFunctionsSafe {
+ tokens.extend(quote! { unsafe_references_wrapped })
}
}
}
+impl UnsafePolicy {
+ /// Whether we are treating C++ references as a different thing from Rust
+ /// references and therefore have to generate lots of code for a CppRef type
+ pub fn requires_cpprefs(&self) -> bool {
+ matches!(self, Self::ReferencesWrappedAllFunctionsSafe)
+ }
+}
+
/// An entry in the allowlist.
#[derive(Hash, Debug)]
pub enum AllowlistEntry {
diff --git a/third_party/autocxx/parser/src/directives.rs b/third_party/autocxx/parser/src/directives.rs
index 160829e..70c88fc 100644
--- a/third_party/autocxx/parser/src/directives.rs
+++ b/third_party/autocxx/parser/src/directives.rs
@@ -268,10 +268,8 @@
) -> Box<dyn Iterator<Item = TokenStream> + 'a> {
let policy = &config.unsafe_policy;
match config.unsafe_policy {
- crate::UnsafePolicy::AllFunctionsSafe => {
- Box::new(std::iter::once(policy.to_token_stream()))
- }
crate::UnsafePolicy::AllFunctionsUnsafe => Box::new(std::iter::empty()),
+ _ => Box::new(std::iter::once(policy.to_token_stream())),
}
}
}
diff --git a/third_party/autocxx/src/lib.rs b/third_party/autocxx/src/lib.rs
index 6592011..4b9f26b 100644
--- a/third_party/autocxx/src/lib.rs
+++ b/third_party/autocxx/src/lib.rs
@@ -14,10 +14,13 @@
// do anything - all the magic is handled entirely by
// autocxx_macro::include_cpp_impl.
+mod reference_wrapper;
mod rvalue_param;
pub mod subclass;
mod value_param;
+pub use reference_wrapper::{CppMutRef, CppPin, CppRef};
+
#[cfg_attr(doc, aquamarine::aquamarine)]
/// Include some C++ headers in your Rust project.
///
@@ -257,6 +260,14 @@
///
/// Generated C++ APIs which use raw pointers remain `unsafe`
/// no matter what policy you choose.
+///
+/// There's an additional possible experimental safety
+/// policy available here:
+/// `safety!(unsafe_references_wrapped)`
+/// This policy treats C++ references as scary and requires
+/// them to be wrapped in a `CppRef` type. This `CppRef`
+/// type is implemented within the generated bindings but
+/// follows the contract of [`CppRef`].
#[macro_export]
macro_rules! safety {
($($tt:tt)*) => { $crate::usage!{$($tt)*} };
@@ -613,6 +624,9 @@
pub use crate::c_void;
pub use crate::cpp_semantics;
pub use crate::include_cpp;
+ pub use crate::CppMutRef;
+ pub use crate::CppPin;
+ pub use crate::CppRef;
pub use crate::PinMut;
pub use crate::RValueParam;
pub use crate::ValueParam;
diff --git a/third_party/autocxx/src/reference_wrapper.rs b/third_party/autocxx/src/reference_wrapper.rs
new file mode 100644
index 0000000..ed943d3
--- /dev/null
+++ b/third_party/autocxx/src/reference_wrapper.rs
@@ -0,0 +1,109 @@
+// Copyright 2022 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// https://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+/// A C++ const reference. These are different from Rust's `&T` in that
+/// these may exist even while the object is mutated elsewhere.
+///
+/// This is a trait not a struct due to the nuances of Rust's orphan rule
+/// - implemntations of this trait are found in each set of generated bindings
+/// but they are essentially the same.
+pub trait CppRef<'a, T> {
+ /// Retrieve the underlying C++ pointer.
+ fn as_ptr(&self) -> *const T;
+
+ /// Get a regular Rust reference out of this C++ reference.
+ ///
+ /// # Safety
+ ///
+ /// Callers must guarantee that the referent is not modified by any other
+ /// C++ or Rust code while the returned reference exists. Callers must
+ /// also guarantee that no mutable Rust reference is created to the
+ /// referent while the returned reference exists.
+ unsafe fn as_ref(&self) -> &T {
+ &*self.as_ptr()
+ }
+}
+
+/// A C++ non-const reference. These are different from Rust's `&mut T` in that
+/// several C++ references can exist to the same underlying data ("aliasing")
+/// and that's not permitted in Rust.
+///
+/// This is a trait not a struct due to the nuances of Rust's orphan rule
+/// - implemntations of this trait are found in each set of generated bindings
+/// but they are essentially the same.
+pub trait CppMutRef<'a, T>: CppRef<'a, T> {
+ /// Retrieve the underlying C++ pointer.
+ fn as_mut_ptr(&self) -> *mut T;
+
+ /// Get a regular Rust mutable reference out of this C++ reference.
+ ///
+ /// # Safety
+ ///
+ /// Callers must guarantee that the referent is not modified by any other
+ /// C++ or Rust code while the returned reference exists. Callers must
+ /// also guarantee that no other Rust reference is created to the referent
+ /// while the returned reference exists.
+ unsafe fn as_mut(&mut self) -> &mut T {
+ &mut *self.as_mut_ptr()
+ }
+}
+
+/// Any newtype wrapper which causes the contained object to obey C++ reference
+/// semantics rather than Rust reference semantics.
+///
+/// The complex generics here are working around the orphan rule - the only
+/// important generic is `T` which is the underlying stored type.
+///
+/// C++ references are permitted to alias one another, and commonly do.
+/// Rust references must alias according only to the narrow rules of the
+/// borrow checker.
+///
+/// If you need C++ to access your Rust object, first imprison it in one of these
+/// objects, then use [`Self::as_cpp_ref`] to obtain C++ references to it.
+pub trait CppPin<'a, T: 'a> {
+ /// The type of C++ reference created to the contained object.
+ type CppRef: CppRef<'a, T>;
+
+ /// The type of C++ mutable reference created to the contained object..
+ type CppMutRef: CppMutRef<'a, T>;
+
+ /// Get an immutable pointer to the underlying object.
+ fn as_ptr(&self) -> *const T;
+
+ /// Get a mutable pointer to the underlying object.
+ fn as_mut_ptr(&mut self) -> *mut T;
+
+ /// Returns a reference which obeys C++ reference semantics
+ fn as_cpp_ref(&self) -> Self::CppRef;
+
+ /// Returns a mutable reference which obeys C++ reference semantics.
+ ///
+ /// Note that this requires unique ownership of `self`, but this is
+ /// advisory since the resulting reference can be cloned.
+ fn as_cpp_mut_ref(&mut self) -> Self::CppMutRef;
+
+ /// Get a normal Rust reference to the underlying object. This is unsafe.
+ ///
+ /// # Safety
+ ///
+ /// You must guarantee that C++ will not mutate the object while the
+ /// reference exists.
+ unsafe fn as_ref(&self) -> &T {
+ &*self.as_ptr()
+ }
+
+ /// Get a normal Rust mutable reference to the underlying object. This is unsafe.
+ ///
+ /// # Safety
+ ///
+ /// You must guarantee that C++ will not mutate the object while the
+ /// reference exists.
+ unsafe fn as_mut(&mut self) -> &mut T {
+ &mut *self.as_mut_ptr()
+ }
+}
diff --git a/third_party/autocxx/src/subclass.rs b/third_party/autocxx/src/subclass.rs
index d2827ca..6c6ee31 100644
--- a/third_party/autocxx/src/subclass.rs
+++ b/third_party/autocxx/src/subclass.rs
@@ -207,8 +207,9 @@
/// * You _may_ need to implement [`CppPeerConstructor`] for your subclass,
/// but only if autocxx determines that there are multiple possible superclass
/// constructors so you need to call one explicitly (or if there's a single
-/// non-trivial superclass constructor.) autocxx will implemente this trait
-/// for you if there's no ambiguity.
+/// non-trivial superclass constructor.) autocxx will implement this trait
+/// for you if there's no ambiguity and FFI functions are safe to call due to
+/// `autocxx::safety!` being used.
///
/// # How to access your Rust structure from outside
///
diff --git a/third_party/autocxx/tools/mdbook-preprocessor/Cargo.toml b/third_party/autocxx/tools/mdbook-preprocessor/Cargo.toml
index 19d775d..f88e833 100644
--- a/third_party/autocxx/tools/mdbook-preprocessor/Cargo.toml
+++ b/third_party/autocxx/tools/mdbook-preprocessor/Cargo.toml
@@ -8,7 +8,7 @@
[package]
name = "autocxx-mdbook-preprocessor"
-version = "0.22.0"
+version = "0.22.3"
authors = ["adetaylor <adetaylor@chromium.org>"]
edition = "2021"
@@ -18,7 +18,7 @@
itertools = "0.10"
anyhow = "1"
regex = "1"
-autocxx-integration-tests = { path = "../../integration-tests", version="=0.22.0"}
+autocxx-integration-tests = { path = "../../integration-tests", version="=0.22.3"}
rayon = "1.5"
gag = "1.0"
env_logger = "0.9.0"
diff --git a/third_party/autocxx/tools/reduce/Cargo.toml b/third_party/autocxx/tools/reduce/Cargo.toml
index 092ccac..83e3916 100644
--- a/third_party/autocxx/tools/reduce/Cargo.toml
+++ b/third_party/autocxx/tools/reduce/Cargo.toml
@@ -8,15 +8,15 @@
[package]
name = "autocxx-reduce"
-version = "0.22.0"
+version = "0.22.3"
authors = ["adetaylor <adetaylor@chromium.org>"]
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
-autocxx-engine = { version = "=0.22.0", path="../../engine" }
-autocxx-parser = { version = "=0.22.0", path="../../parser", features = [ "reproduction_case" ] }
+autocxx-engine = { version = "=0.22.3", path="../../engine" }
+autocxx-parser = { version = "=0.22.3", path="../../parser", features = [ "reproduction_case" ] }
clap = { version = "3.1.2", features = ["cargo"] }
tempfile = "3.1"
indoc = "1.0"
@@ -26,12 +26,12 @@
serde_derive = "1.0"
syn = "1.0.39"
quote = "1.0"
-cxx-gen = "0.7.54"
+cxx-gen = "0.7.68"
regex = "1.5"
indexmap = "1.8"
[dev-dependencies]
assert_cmd = "1.0.3"
-tempdir = "0.3.7"
+tempfile = "3.1"
indoc = "1.0"
proc-macro2 = "1.0"
\ No newline at end of file
diff --git a/third_party/autocxx/tools/reduce/src/main.rs b/third_party/autocxx/tools/reduce/src/main.rs
index 17c5bf0..06110cd 100644
--- a/third_party/autocxx/tools/reduce/src/main.rs
+++ b/third_party/autocxx/tools/reduce/src/main.rs
@@ -482,7 +482,7 @@
mv concat.h concat-body.h
echo Codegen
(echo \"#ifndef __CONCAT_H__\"; echo \"#define __CONCAT_H__\"; echo '#include \"concat-body.h\"'; echo \"#endif\") > concat.h
- ({} {} 2>&1 && cat autocxx-ffi-default-gen.rs && cat autocxxgen*.h && {} 2>&1 ) {}
+ (trap \"if [[ \\$? -eq 139 ]]; then echo Segfault; fi\" CHLD; {} {} 2>&1 && cat autocxx-ffi-default-gen.rs && cat autocxxgen*.h && {} 2>&1 ) {}
echo Remove
rm concat.h
echo Swap back
diff --git a/third_party/autocxx/tools/reduce/tests/reduce_test.rs b/third_party/autocxx/tools/reduce/tests/reduce_test.rs
index ceac5f4..b277585 100644
--- a/third_party/autocxx/tools/reduce/tests/reduce_test.rs
+++ b/third_party/autocxx/tools/reduce/tests/reduce_test.rs
@@ -16,7 +16,7 @@
path::{Path, PathBuf},
};
use syn::Token;
-use tempdir::TempDir;
+use tempfile::tempdir;
static INPUT_H: &str = indoc::indoc! {"
inline int DoMath(int a) {
@@ -169,7 +169,7 @@
if creduce_is_broken() {
return Ok(());
}
- let tmp_dir = TempDir::new("example")?;
+ let tmp_dir = tempdir()?;
let demo_code_dir = tmp_dir.path().join("demo");
std::fs::create_dir(&demo_code_dir).unwrap();
let input_header = if include_cxx_h {
diff --git a/third_party/autocxx/tools/stress-test/Cargo.toml b/third_party/autocxx/tools/stress-test/Cargo.toml
index 6f15e42..a651f60 100644
--- a/third_party/autocxx/tools/stress-test/Cargo.toml
+++ b/third_party/autocxx/tools/stress-test/Cargo.toml
@@ -8,14 +8,14 @@
[package]
name = "autocxx-stress-test"
-version = "0.22.0"
+version = "0.22.3"
authors = ["Adrian Taylor <adetaylor@chromium.org>"]
edition = "2021"
[dependencies]
-cxx = "1.0.54"
-autocxx = { path = "../..", version="0.22.0" }
+cxx = "1.0.68"
+autocxx = { path = "../..", version="0.22.3" }
[build-dependencies]
-autocxx-build = { path = "../../gen/build", version="0.22.0" }
+autocxx-build = { path = "../../gen/build", version="0.22.3" }
miette = { version="4.3", features=["fancy"]}
diff --git a/third_party/cargo/BUILD.bazel b/third_party/cargo/BUILD.bazel
index 1ab9728..417ffae 100644
--- a/third_party/cargo/BUILD.bazel
+++ b/third_party/cargo/BUILD.bazel
@@ -14,7 +14,7 @@
# Aliased targets
alias(
name = "anyhow",
- actual = "@raze__anyhow__1_0_57//:anyhow",
+ actual = "@raze__anyhow__1_0_58//:anyhow",
tags = [
"cargo-raze",
"manual",
@@ -41,7 +41,7 @@
alias(
name = "autocxx_bindgen",
- actual = "@raze__autocxx_bindgen__0_59_16//:autocxx_bindgen",
+ actual = "@raze__autocxx_bindgen__0_59_17//:autocxx_bindgen",
tags = [
"cargo-raze",
"manual",
@@ -86,7 +86,7 @@
alias(
name = "clap",
- actual = "@raze__clap__3_1_18//:clap",
+ actual = "@raze__clap__3_2_11//:clap",
tags = [
"cargo-raze",
"manual",
@@ -95,7 +95,7 @@
alias(
name = "cxx",
- actual = "@raze__cxx__1_0_68//:cxx",
+ actual = "@raze__cxx__1_0_71//:cxx",
tags = [
"cargo-raze",
"manual",
@@ -104,7 +104,7 @@
alias(
name = "cxx_cc",
- actual = "@raze__cxx__1_0_68//:cxx_cc",
+ actual = "@raze__cxx__1_0_71//:cxx_cc",
tags = [
"cargo-raze",
"manual",
@@ -113,7 +113,7 @@
alias(
name = "cxx_gen",
- actual = "@raze__cxx_gen__0_7_68//:cxx_gen",
+ actual = "@raze__cxx_gen__0_7_71//:cxx_gen",
tags = [
"cargo-raze",
"manual",
@@ -122,7 +122,7 @@
alias(
name = "cargo_bin_cxxbridge",
- actual = "@raze__cxxbridge_cmd__1_0_68//:cargo_bin_cxxbridge",
+ actual = "@raze__cxxbridge_cmd__1_0_71//:cargo_bin_cxxbridge",
tags = [
"cargo-raze",
"manual",
@@ -131,7 +131,7 @@
alias(
name = "cxxbridge_cmd",
- actual = "@raze__cxxbridge_cmd__1_0_68//:cxxbridge_cmd",
+ actual = "@raze__cxxbridge_cmd__1_0_71//:cxxbridge_cmd",
tags = [
"cargo-raze",
"manual",
@@ -140,7 +140,7 @@
alias(
name = "cxxbridge_macro",
- actual = "@raze__cxxbridge_macro__1_0_68//:cxxbridge_macro",
+ actual = "@raze__cxxbridge_macro__1_0_71//:cxxbridge_macro",
tags = [
"cargo-raze",
"manual",
@@ -158,7 +158,7 @@
alias(
name = "indexmap",
- actual = "@raze__indexmap__1_8_1//:indexmap",
+ actual = "@raze__indexmap__1_9_1//:indexmap",
tags = [
"cargo-raze",
"manual",
@@ -230,7 +230,7 @@
alias(
name = "once_cell",
- actual = "@raze__once_cell__1_10_0//:once_cell",
+ actual = "@raze__once_cell__1_13_0//:once_cell",
tags = [
"cargo-raze",
"manual",
@@ -248,7 +248,7 @@
alias(
name = "proc_macro2",
- actual = "@raze__proc_macro2__1_0_39//:proc_macro2",
+ actual = "@raze__proc_macro2__1_0_40//:proc_macro2",
tags = [
"cargo-raze",
"manual",
@@ -266,7 +266,7 @@
alias(
name = "quote",
- actual = "@raze__quote__1_0_18//:quote",
+ actual = "@raze__quote__1_0_20//:quote",
tags = [
"cargo-raze",
"manual",
@@ -275,7 +275,7 @@
alias(
name = "regex",
- actual = "@raze__regex__1_5_5//:regex",
+ actual = "@raze__regex__1_6_0//:regex",
tags = [
"cargo-raze",
"manual",
@@ -293,7 +293,7 @@
alias(
name = "serde",
- actual = "@raze__serde__1_0_137//:serde",
+ actual = "@raze__serde__1_0_139//:serde",
tags = [
"cargo-raze",
"manual",
@@ -302,7 +302,7 @@
alias(
name = "serde_json",
- actual = "@raze__serde_json__1_0_81//:serde_json",
+ actual = "@raze__serde_json__1_0_82//:serde_json",
tags = [
"cargo-raze",
"manual",
@@ -311,7 +311,7 @@
alias(
name = "smallvec",
- actual = "@raze__smallvec__1_8_0//:smallvec",
+ actual = "@raze__smallvec__1_9_0//:smallvec",
tags = [
"cargo-raze",
"manual",
@@ -320,7 +320,7 @@
alias(
name = "strum_macros",
- actual = "@raze__strum_macros__0_24_0//:strum_macros",
+ actual = "@raze__strum_macros__0_24_2//:strum_macros",
tags = [
"cargo-raze",
"manual",
@@ -329,16 +329,7 @@
alias(
name = "syn",
- actual = "@raze__syn__1_0_95//:syn",
- tags = [
- "cargo-raze",
- "manual",
- ],
-)
-
-alias(
- name = "tempdir",
- actual = "@raze__tempdir__0_3_7//:tempdir",
+ actual = "@raze__syn__1_0_98//:syn",
tags = [
"cargo-raze",
"manual",
@@ -383,7 +374,7 @@
alias(
name = "trybuild",
- actual = "@raze__trybuild__1_0_61//:trybuild",
+ actual = "@raze__trybuild__1_0_63//:trybuild",
tags = [
"cargo-raze",
"manual",
@@ -392,7 +383,7 @@
alias(
name = "uuid",
- actual = "@raze__uuid__1_0_0//:uuid",
+ actual = "@raze__uuid__1_1_2//:uuid",
tags = [
"cargo-raze",
"manual",
diff --git a/third_party/cargo/Cargo.raze.lock b/third_party/cargo/Cargo.raze.lock
index 9b8251b..58310ac 100644
--- a/third_party/cargo/Cargo.raze.lock
+++ b/third_party/cargo/Cargo.raze.lock
@@ -35,9 +35,9 @@
[[package]]
name = "anyhow"
-version = "1.0.57"
+version = "1.0.58"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "08f9b8508dccb7687a1d6c4ce66b2b0ecef467c94667de27d8d7fe1f8d2a9cdc"
+checksum = "bb07d2053ccdbe10e2af2995a2f116c1330396493dc1269f6a91d0ae82e19704"
[[package]]
name = "aquamarine"
@@ -85,7 +85,7 @@
[[package]]
name = "autocxx"
-version = "0.22.0"
+version = "0.22.3"
dependencies = [
"aquamarine",
"autocxx-macro",
@@ -95,9 +95,9 @@
[[package]]
name = "autocxx-bindgen"
-version = "0.59.16"
+version = "0.59.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "435723e14bf88f198322f8555a4fdb108363021d97a47bb6492891ca86055e79"
+checksum = "f9a9a26dd38d385d23b1bf61bd231b77f690c4368aef4c77cee1b7a6da2e2042"
dependencies = [
"bitflags",
"cexpr 0.6.0",
@@ -119,7 +119,7 @@
[[package]]
name = "autocxx-engine"
-version = "0.22.0"
+version = "0.22.3"
dependencies = [
"aquamarine",
"autocxx-bindgen",
@@ -145,13 +145,13 @@
[[package]]
name = "autocxx-gen"
-version = "0.22.0"
+version = "0.22.3"
dependencies = [
"assert_cmd",
"autocxx",
"autocxx-engine",
"autocxx-integration-tests",
- "clap 3.1.18",
+ "clap 3.2.11",
"cxx",
"env_logger 0.9.0",
"indexmap",
@@ -159,12 +159,12 @@
"miette",
"pathdiff",
"proc-macro2",
- "tempdir",
+ "tempfile",
]
[[package]]
name = "autocxx-integration-tests"
-version = "0.22.0"
+version = "0.22.3"
dependencies = [
"autocxx",
"autocxx-engine",
@@ -187,7 +187,7 @@
[[package]]
name = "autocxx-macro"
-version = "0.22.0"
+version = "0.22.3"
dependencies = [
"autocxx-parser",
"proc-macro-error",
@@ -198,7 +198,7 @@
[[package]]
name = "autocxx-parser"
-version = "0.22.0"
+version = "0.22.3"
dependencies = [
"indexmap",
"itertools 0.10.3",
@@ -214,9 +214,9 @@
[[package]]
name = "backtrace"
-version = "0.3.65"
+version = "0.3.66"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "11a17d453482a265fd5f8479f2a3f405566e6ca627837aaddb85af8b1ab8ef61"
+checksum = "cab84319d616cfb654d03394f38ab7e6f0919e181b1b57e1fd15e7fb4077d9a7"
dependencies = [
"addr2line",
"cc",
@@ -305,9 +305,9 @@
[[package]]
name = "clang-sys"
-version = "1.3.2"
+version = "1.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "bf6b561dcf059c85bbe388e0a7b0a1469acb3934cc0cfa148613a830629e3049"
+checksum = "5a050e2153c5be08febd6734e29298e844fdb0fa21aeddd63b4eb7baa106c69b"
dependencies = [
"glob",
"libc",
@@ -331,15 +331,15 @@
[[package]]
name = "clap"
-version = "3.1.18"
+version = "3.2.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d2dbdf4bdacb33466e854ce889eee8dfd5729abf7ccd7664d0a2d60cd384440b"
+checksum = "d646c7ade5eb07c4aa20e907a922750df0c448892513714fd3e4acbc7130829f"
dependencies = [
"atty",
"bitflags",
"clap_lex",
"indexmap",
- "lazy_static",
+ "once_cell",
"strsim 0.10.0",
"termcolor",
"textwrap 0.15.0",
@@ -347,9 +347,9 @@
[[package]]
name = "clap_lex"
-version = "0.2.0"
+version = "0.2.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a37c35f1112dad5e6e0b1adaff798507497a18fceeb30cceb3bae7d1427b9213"
+checksum = "2850f2f5a82cbf437dd5af4d49848fbdfc27c157c3d010345776f952765261c5"
dependencies = [
"os_str_bytes",
]
@@ -390,9 +390,9 @@
[[package]]
name = "cxx"
-version = "1.0.68"
+version = "1.0.71"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "7e599641dff337570f6aa9c304ecca92341d30bf72e1c50287869ed6a36615a6"
+checksum = "5469a6f42296f4fd40789b397383718f9a0bd75d2f9b7cedbb249996811fba27"
dependencies = [
"cc",
"cxxbridge-flags",
@@ -402,9 +402,9 @@
[[package]]
name = "cxx-gen"
-version = "0.7.68"
+version = "0.7.71"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1e2c726d93799c3129c65224ab09eae1a31276bc593d4f7344be1c592c16a1ec"
+checksum = "c606d018d5f8298464809ab01eb1aaf3efc2d6a984f527477cabda650f9f8688"
dependencies = [
"codespan-reporting",
"proc-macro2",
@@ -414,11 +414,11 @@
[[package]]
name = "cxxbridge-cmd"
-version = "1.0.68"
+version = "1.0.71"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "08bf87cef93c3987aab9316b83fbf041a9a6fd19d0e08b0e9deb79321a58f766"
+checksum = "384d7699599cc149694e38151d20820e8ab5550037526870bee8a27b069ed922"
dependencies = [
- "clap 3.1.18",
+ "clap 3.2.11",
"codespan-reporting",
"proc-macro2",
"quote",
@@ -427,15 +427,15 @@
[[package]]
name = "cxxbridge-flags"
-version = "1.0.68"
+version = "1.0.71"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "3894ad0c6d517cb5a4ce8ec20b37cd0ea31b480fe582a104c5db67ae21270853"
+checksum = "0fef2b4ffdc935c973bc7817d541fc936fdc8a85194cfdd9c761aca8387edd48"
[[package]]
name = "cxxbridge-macro"
-version = "1.0.68"
+version = "1.0.71"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "34fa7e395dc1c001083c7eed28c8f0f0b5a225610f3b6284675f444af6fab86b"
+checksum = "9d3a240a54f5526967ffae81fdcda1fc80564964220d90816960b2eae2eab7f4"
dependencies = [
"proc-macro2",
"quote",
@@ -456,9 +456,9 @@
[[package]]
name = "either"
-version = "1.6.1"
+version = "1.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e78d4f1cc4ae33bbfc157ed5d5a5ef3bc29227303d595861deb238fcec4e9457"
+checksum = "3f107b87b6afc2a64fd13cac55fe06d6c8859f12d4b14cbcdd2c67d0976781be"
[[package]]
name = "env_logger"
@@ -508,12 +508,6 @@
]
[[package]]
-name = "fuchsia-cprng"
-version = "0.1.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a06f77d526c1a601b7c4cdd98f54b5eaabffc14d5f2f0296febdc7f357c6d3ba"
-
-[[package]]
name = "gimli"
version = "0.26.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -527,9 +521,9 @@
[[package]]
name = "hashbrown"
-version = "0.11.2"
+version = "0.12.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ab5ef0d4909ef3724cc8cce6ccc8572c5c817592e9285f5464f8e86f8bd3726e"
+checksum = "607c8a29735385251a339424dd462993c0fed8fa09d378f259377df08c126022"
[[package]]
name = "heck"
@@ -554,9 +548,9 @@
[[package]]
name = "indexmap"
-version = "1.8.1"
+version = "1.9.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0f647032dfaa1f8b6dc29bd3edb7bbef4861b8b8007ebb118d6db284fd59f6ee"
+checksum = "10a35a97730320ffe8e2d410b5d3b69279b98d2c14bdb8b70ea89ecf7888d41e"
dependencies = [
"autocfg",
"hashbrown",
@@ -709,9 +703,9 @@
[[package]]
name = "miniz_oxide"
-version = "0.5.1"
+version = "0.5.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d2b29bd4bc3f33391105ebee3589c19197c4271e3e5a9ec9bfe8127eeff8f082"
+checksum = "6f5c75688da582b8ffc1f1799e9db273f32133c49e048f614d22ec3256773ccc"
dependencies = [
"adler",
]
@@ -747,24 +741,24 @@
[[package]]
name = "object"
-version = "0.28.4"
+version = "0.29.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e42c982f2d955fac81dd7e1d0e1426a7d702acd9c98d19ab01083a6a0328c424"
+checksum = "21158b2c33aa6d4561f1c0a6ea283ca92bc54802a93b263e910746d679a7eb53"
dependencies = [
"memchr",
]
[[package]]
name = "once_cell"
-version = "1.10.0"
+version = "1.13.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "87f3e037eac156d1775da914196f0f37741a274155e34a0b7e427c35d2a2ecb9"
+checksum = "18a6dbe30758c9f83eb00cbea4ac95966305f5a7772f3f42ebfc7fc7eddbd8e1"
[[package]]
name = "os_str_bytes"
-version = "6.0.1"
+version = "6.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "029d8d0b2f198229de29dca79676f2738ff952edf3fde542eb8bf94d8c21b435"
+checksum = "21326818e99cfe6ce1e524c2a805c189a99b5ae555a35d19f9a284b427d86afa"
[[package]]
name = "owo-colors"
@@ -837,60 +831,23 @@
[[package]]
name = "proc-macro2"
-version = "1.0.39"
+version = "1.0.40"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c54b25569025b7fc9651de43004ae593a75ad88543b17178aa5e1b9c4f15f56f"
+checksum = "dd96a1e8ed2596c337f8eae5f24924ec83f5ad5ab21ea8e455d3566c69fbcaf7"
dependencies = [
"unicode-ident",
]
[[package]]
name = "quote"
-version = "1.0.18"
+version = "1.0.20"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a1feb54ed693b93a84e14094943b84b7c4eae204c512b7ccb95ab0c66d278ad1"
+checksum = "3bcdf212e9776fbcb2d23ab029360416bb1706b1aea2d1a5ba002727cbcab804"
dependencies = [
"proc-macro2",
]
[[package]]
-name = "rand"
-version = "0.4.6"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "552840b97013b1a26992c11eac34bdd778e464601a4c2054b5f0bff7c6761293"
-dependencies = [
- "fuchsia-cprng",
- "libc",
- "rand_core 0.3.1",
- "rdrand",
- "winapi",
-]
-
-[[package]]
-name = "rand_core"
-version = "0.3.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "7a6fdeb83b075e8266dcc8762c22776f6877a63111121f5f8c7411e5be7eed4b"
-dependencies = [
- "rand_core 0.4.2",
-]
-
-[[package]]
-name = "rand_core"
-version = "0.4.2"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9c33a3c44ca05fa6f1807d8e6743f3824e8509beca625669633be0acbdf509dc"
-
-[[package]]
-name = "rdrand"
-version = "0.4.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "678054eb77286b51581ba43620cc911abf02758c91f93f479767aed0f90458b2"
-dependencies = [
- "rand_core 0.3.1",
-]
-
-[[package]]
name = "redox_syscall"
version = "0.2.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -901,9 +858,9 @@
[[package]]
name = "regex"
-version = "1.5.5"
+version = "1.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1a11647b6b25ff05a515cb92c365cec08801e83423a235b51e231e1808747286"
+checksum = "4c4eb3267174b8c6c2f654116623910a0fef09c4753f8dd83db29c48a0df988b"
dependencies = [
"aho-corasick",
"memchr",
@@ -918,9 +875,9 @@
[[package]]
name = "regex-syntax"
-version = "0.6.25"
+version = "0.6.27"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f497285884f3fcff424ffc933e56d7cbca511def0c9831a7f9b5f6153e3cc89b"
+checksum = "a3f87b73ce11b1619a3c6332f45341e0047173771e8b8b73f87bfeefb7b56244"
[[package]]
name = "remove_dir_all"
@@ -951,9 +908,9 @@
[[package]]
name = "rustversion"
-version = "1.0.6"
+version = "1.0.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f2cc38e8fa666e2de3c4aba7edeb5ffc5246c1c2ed0e3d17e560aeeba736b23f"
+checksum = "a0a5f7c728f5d284929a1cccb5bc19884422bfe6ef4d6c409da2c41838983fcf"
[[package]]
name = "ryu"
@@ -963,18 +920,18 @@
[[package]]
name = "serde"
-version = "1.0.137"
+version = "1.0.139"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "61ea8d54c77f8315140a05f4c7237403bf38b72704d031543aa1d16abbf517d1"
+checksum = "0171ebb889e45aa68b44aee0859b3eede84c6f5f5c228e6f140c0b2a0a46cad6"
dependencies = [
"serde_derive",
]
[[package]]
name = "serde_derive"
-version = "1.0.137"
+version = "1.0.139"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1f26faba0c3959972377d3b2d306ee9f71faee9714294e41bb777f83f88578be"
+checksum = "dc1d3230c1de7932af58ad8ffbe1d784bd55efd5a9d84ac24f69c72d83543dfb"
dependencies = [
"proc-macro2",
"quote",
@@ -983,9 +940,9 @@
[[package]]
name = "serde_json"
-version = "1.0.81"
+version = "1.0.82"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9b7ce2b32a1aed03c558dc61a5cd328f15aff2dbc17daad8fb8af04d2100e15c"
+checksum = "82c2c1fdcd807d1098552c5b9a36e425e42e9fbd7c6a37a8425f390f781f7fa7"
dependencies = [
"itoa",
"ryu",
@@ -1000,9 +957,9 @@
[[package]]
name = "smallvec"
-version = "1.8.0"
+version = "1.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f2dd574626839106c320a323308629dcb1acfc96e32a8cba364ddc61ac23ee83"
+checksum = "2fd0db749597d91ff862fd1d55ea87f7855a744a8425a64695b6fca237d1dad1"
[[package]]
name = "smawk"
@@ -1024,9 +981,9 @@
[[package]]
name = "strum_macros"
-version = "0.24.0"
+version = "0.24.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6878079b17446e4d3eba6192bb0a2950d5b14f0ed8424b852310e5a94345d0ef"
+checksum = "4faebde00e8ff94316c01800f9054fd2ba77d30d9e922541913051d1d978918b"
dependencies = [
"heck",
"proc-macro2",
@@ -1065,9 +1022,9 @@
[[package]]
name = "syn"
-version = "1.0.95"
+version = "1.0.98"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "fbaf6116ab8924f39d52792136fb74fd60a80194cf1b1c6ffa6453eef1c3f942"
+checksum = "c50aef8a904de4c23c788f104b7dddc7d6f79c647c7c8ce4cc8f73eb0ca773dd"
dependencies = [
"proc-macro2",
"quote",
@@ -1075,16 +1032,6 @@
]
[[package]]
-name = "tempdir"
-version = "0.3.7"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "15f2b5fb00ccdf689e0149d1b1b3c03fead81c2b37735d812fa8bddbbf41b6d8"
-dependencies = [
- "rand",
- "remove_dir_all",
-]
-
-[[package]]
name = "tempfile"
version = "3.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -1206,9 +1153,9 @@
[[package]]
name = "trybuild"
-version = "1.0.61"
+version = "1.0.63"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "7fc92f558afb6d1d7c6f175eb8d615b8ef49c227543e68e19c123d4ee43d8a7d"
+checksum = "764b9e244b482a9b81bde596aa37aa6f1347bf8007adab25e59f901b32b4e0a0"
dependencies = [
"glob",
"once_cell",
@@ -1221,9 +1168,9 @@
[[package]]
name = "unicode-ident"
-version = "1.0.0"
+version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d22af068fba1eb5edcb4aea19d382b2a3deb4c8f9d475c589b6ada9e0fd493ee"
+checksum = "5bd2fe26506023ed7b5e1e315add59d6f584c621d037f9368fea9cfb988f368c"
[[package]]
name = "unicode-linebreak"
@@ -1242,9 +1189,9 @@
[[package]]
name = "uuid"
-version = "1.0.0"
+version = "1.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8cfcd319456c4d6ea10087ed423473267e1a071f3bc0aa89f80d60997843c6f0"
+checksum = "dd6469f4314d5f1ffec476e05f17cc9a78bc7a27a6a857842170bdf8d6f98d2f"
[[package]]
name = "vec_map"
diff --git a/third_party/cargo/crates.bzl b/third_party/cargo/crates.bzl
index 8549a4d..515b67a 100644
--- a/third_party/cargo/crates.bzl
+++ b/third_party/cargo/crates.bzl
@@ -53,12 +53,12 @@
maybe(
http_archive,
- name = "raze__anyhow__1_0_57",
- url = "https://crates.io/api/v1/crates/anyhow/1.0.57/download",
+ name = "raze__anyhow__1_0_58",
+ url = "https://crates.io/api/v1/crates/anyhow/1.0.58/download",
type = "tar.gz",
- sha256 = "08f9b8508dccb7687a1d6c4ce66b2b0ecef467c94667de27d8d7fe1f8d2a9cdc",
- strip_prefix = "anyhow-1.0.57",
- build_file = Label("//third_party/cargo/remote:BUILD.anyhow-1.0.57.bazel"),
+ sha256 = "bb07d2053ccdbe10e2af2995a2f116c1330396493dc1269f6a91d0ae82e19704",
+ strip_prefix = "anyhow-1.0.58",
+ build_file = Label("//third_party/cargo/remote:BUILD.anyhow-1.0.58.bazel"),
)
maybe(
@@ -103,22 +103,22 @@
maybe(
http_archive,
- name = "raze__autocxx_bindgen__0_59_16",
- url = "https://crates.io/api/v1/crates/autocxx-bindgen/0.59.16/download",
+ name = "raze__autocxx_bindgen__0_59_17",
+ url = "https://crates.io/api/v1/crates/autocxx-bindgen/0.59.17/download",
type = "tar.gz",
- sha256 = "435723e14bf88f198322f8555a4fdb108363021d97a47bb6492891ca86055e79",
- strip_prefix = "autocxx-bindgen-0.59.16",
- build_file = Label("//third_party/cargo/remote:BUILD.autocxx-bindgen-0.59.16.bazel"),
+ sha256 = "f9a9a26dd38d385d23b1bf61bd231b77f690c4368aef4c77cee1b7a6da2e2042",
+ strip_prefix = "autocxx-bindgen-0.59.17",
+ build_file = Label("//third_party/cargo/remote:BUILD.autocxx-bindgen-0.59.17.bazel"),
)
maybe(
http_archive,
- name = "raze__backtrace__0_3_65",
- url = "https://crates.io/api/v1/crates/backtrace/0.3.65/download",
+ name = "raze__backtrace__0_3_66",
+ url = "https://crates.io/api/v1/crates/backtrace/0.3.66/download",
type = "tar.gz",
- sha256 = "11a17d453482a265fd5f8479f2a3f405566e6ca627837aaddb85af8b1ab8ef61",
- strip_prefix = "backtrace-0.3.65",
- build_file = Label("//third_party/cargo/remote:BUILD.backtrace-0.3.65.bazel"),
+ sha256 = "cab84319d616cfb654d03394f38ab7e6f0919e181b1b57e1fd15e7fb4077d9a7",
+ strip_prefix = "backtrace-0.3.66",
+ build_file = Label("//third_party/cargo/remote:BUILD.backtrace-0.3.66.bazel"),
)
maybe(
@@ -203,12 +203,12 @@
maybe(
http_archive,
- name = "raze__clang_sys__1_3_2",
- url = "https://crates.io/api/v1/crates/clang-sys/1.3.2/download",
+ name = "raze__clang_sys__1_3_3",
+ url = "https://crates.io/api/v1/crates/clang-sys/1.3.3/download",
type = "tar.gz",
- sha256 = "bf6b561dcf059c85bbe388e0a7b0a1469acb3934cc0cfa148613a830629e3049",
- strip_prefix = "clang-sys-1.3.2",
- build_file = Label("//third_party/cargo/remote:BUILD.clang-sys-1.3.2.bazel"),
+ sha256 = "5a050e2153c5be08febd6734e29298e844fdb0fa21aeddd63b4eb7baa106c69b",
+ strip_prefix = "clang-sys-1.3.3",
+ build_file = Label("//third_party/cargo/remote:BUILD.clang-sys-1.3.3.bazel"),
)
maybe(
@@ -223,22 +223,22 @@
maybe(
http_archive,
- name = "raze__clap__3_1_18",
- url = "https://crates.io/api/v1/crates/clap/3.1.18/download",
+ name = "raze__clap__3_2_11",
+ url = "https://crates.io/api/v1/crates/clap/3.2.11/download",
type = "tar.gz",
- sha256 = "d2dbdf4bdacb33466e854ce889eee8dfd5729abf7ccd7664d0a2d60cd384440b",
- strip_prefix = "clap-3.1.18",
- build_file = Label("//third_party/cargo/remote:BUILD.clap-3.1.18.bazel"),
+ sha256 = "d646c7ade5eb07c4aa20e907a922750df0c448892513714fd3e4acbc7130829f",
+ strip_prefix = "clap-3.2.11",
+ build_file = Label("//third_party/cargo/remote:BUILD.clap-3.2.11.bazel"),
)
maybe(
http_archive,
- name = "raze__clap_lex__0_2_0",
- url = "https://crates.io/api/v1/crates/clap_lex/0.2.0/download",
+ name = "raze__clap_lex__0_2_4",
+ url = "https://crates.io/api/v1/crates/clap_lex/0.2.4/download",
type = "tar.gz",
- sha256 = "a37c35f1112dad5e6e0b1adaff798507497a18fceeb30cceb3bae7d1427b9213",
- strip_prefix = "clap_lex-0.2.0",
- build_file = Label("//third_party/cargo/remote:BUILD.clap_lex-0.2.0.bazel"),
+ sha256 = "2850f2f5a82cbf437dd5af4d49848fbdfc27c157c3d010345776f952765261c5",
+ strip_prefix = "clap_lex-0.2.4",
+ build_file = Label("//third_party/cargo/remote:BUILD.clap_lex-0.2.4.bazel"),
)
maybe(
@@ -253,52 +253,52 @@
maybe(
http_archive,
- name = "raze__cxx__1_0_68",
- url = "https://crates.io/api/v1/crates/cxx/1.0.68/download",
+ name = "raze__cxx__1_0_71",
+ url = "https://crates.io/api/v1/crates/cxx/1.0.71/download",
type = "tar.gz",
- sha256 = "7e599641dff337570f6aa9c304ecca92341d30bf72e1c50287869ed6a36615a6",
- strip_prefix = "cxx-1.0.68",
- build_file = Label("//third_party/cargo/remote:BUILD.cxx-1.0.68.bazel"),
+ sha256 = "5469a6f42296f4fd40789b397383718f9a0bd75d2f9b7cedbb249996811fba27",
+ strip_prefix = "cxx-1.0.71",
+ build_file = Label("//third_party/cargo/remote:BUILD.cxx-1.0.71.bazel"),
)
maybe(
http_archive,
- name = "raze__cxx_gen__0_7_68",
- url = "https://crates.io/api/v1/crates/cxx-gen/0.7.68/download",
+ name = "raze__cxx_gen__0_7_71",
+ url = "https://crates.io/api/v1/crates/cxx-gen/0.7.71/download",
type = "tar.gz",
- sha256 = "1e2c726d93799c3129c65224ab09eae1a31276bc593d4f7344be1c592c16a1ec",
- strip_prefix = "cxx-gen-0.7.68",
- build_file = Label("//third_party/cargo/remote:BUILD.cxx-gen-0.7.68.bazel"),
+ sha256 = "c606d018d5f8298464809ab01eb1aaf3efc2d6a984f527477cabda650f9f8688",
+ strip_prefix = "cxx-gen-0.7.71",
+ build_file = Label("//third_party/cargo/remote:BUILD.cxx-gen-0.7.71.bazel"),
)
maybe(
http_archive,
- name = "raze__cxxbridge_cmd__1_0_68",
- url = "https://crates.io/api/v1/crates/cxxbridge-cmd/1.0.68/download",
+ name = "raze__cxxbridge_cmd__1_0_71",
+ url = "https://crates.io/api/v1/crates/cxxbridge-cmd/1.0.71/download",
type = "tar.gz",
- sha256 = "08bf87cef93c3987aab9316b83fbf041a9a6fd19d0e08b0e9deb79321a58f766",
- strip_prefix = "cxxbridge-cmd-1.0.68",
- build_file = Label("//third_party/cargo/remote:BUILD.cxxbridge-cmd-1.0.68.bazel"),
+ sha256 = "384d7699599cc149694e38151d20820e8ab5550037526870bee8a27b069ed922",
+ strip_prefix = "cxxbridge-cmd-1.0.71",
+ build_file = Label("//third_party/cargo/remote:BUILD.cxxbridge-cmd-1.0.71.bazel"),
)
maybe(
http_archive,
- name = "raze__cxxbridge_flags__1_0_68",
- url = "https://crates.io/api/v1/crates/cxxbridge-flags/1.0.68/download",
+ name = "raze__cxxbridge_flags__1_0_71",
+ url = "https://crates.io/api/v1/crates/cxxbridge-flags/1.0.71/download",
type = "tar.gz",
- sha256 = "3894ad0c6d517cb5a4ce8ec20b37cd0ea31b480fe582a104c5db67ae21270853",
- strip_prefix = "cxxbridge-flags-1.0.68",
- build_file = Label("//third_party/cargo/remote:BUILD.cxxbridge-flags-1.0.68.bazel"),
+ sha256 = "0fef2b4ffdc935c973bc7817d541fc936fdc8a85194cfdd9c761aca8387edd48",
+ strip_prefix = "cxxbridge-flags-1.0.71",
+ build_file = Label("//third_party/cargo/remote:BUILD.cxxbridge-flags-1.0.71.bazel"),
)
maybe(
http_archive,
- name = "raze__cxxbridge_macro__1_0_68",
- url = "https://crates.io/api/v1/crates/cxxbridge-macro/1.0.68/download",
+ name = "raze__cxxbridge_macro__1_0_71",
+ url = "https://crates.io/api/v1/crates/cxxbridge-macro/1.0.71/download",
type = "tar.gz",
- sha256 = "34fa7e395dc1c001083c7eed28c8f0f0b5a225610f3b6284675f444af6fab86b",
- strip_prefix = "cxxbridge-macro-1.0.68",
- build_file = Label("//third_party/cargo/remote:BUILD.cxxbridge-macro-1.0.68.bazel"),
+ sha256 = "9d3a240a54f5526967ffae81fdcda1fc80564964220d90816960b2eae2eab7f4",
+ strip_prefix = "cxxbridge-macro-1.0.71",
+ build_file = Label("//third_party/cargo/remote:BUILD.cxxbridge-macro-1.0.71.bazel"),
)
maybe(
@@ -323,12 +323,12 @@
maybe(
http_archive,
- name = "raze__either__1_6_1",
- url = "https://crates.io/api/v1/crates/either/1.6.1/download",
+ name = "raze__either__1_7_0",
+ url = "https://crates.io/api/v1/crates/either/1.7.0/download",
type = "tar.gz",
- sha256 = "e78d4f1cc4ae33bbfc157ed5d5a5ef3bc29227303d595861deb238fcec4e9457",
- strip_prefix = "either-1.6.1",
- build_file = Label("//third_party/cargo/remote:BUILD.either-1.6.1.bazel"),
+ sha256 = "3f107b87b6afc2a64fd13cac55fe06d6c8859f12d4b14cbcdd2c67d0976781be",
+ strip_prefix = "either-1.7.0",
+ build_file = Label("//third_party/cargo/remote:BUILD.either-1.7.0.bazel"),
)
maybe(
@@ -363,16 +363,6 @@
maybe(
http_archive,
- name = "raze__fuchsia_cprng__0_1_1",
- url = "https://crates.io/api/v1/crates/fuchsia-cprng/0.1.1/download",
- type = "tar.gz",
- sha256 = "a06f77d526c1a601b7c4cdd98f54b5eaabffc14d5f2f0296febdc7f357c6d3ba",
- strip_prefix = "fuchsia-cprng-0.1.1",
- build_file = Label("//third_party/cargo/remote:BUILD.fuchsia-cprng-0.1.1.bazel"),
- )
-
- maybe(
- http_archive,
name = "raze__gimli__0_26_1",
url = "https://crates.io/api/v1/crates/gimli/0.26.1/download",
type = "tar.gz",
@@ -393,12 +383,12 @@
maybe(
http_archive,
- name = "raze__hashbrown__0_11_2",
- url = "https://crates.io/api/v1/crates/hashbrown/0.11.2/download",
+ name = "raze__hashbrown__0_12_2",
+ url = "https://crates.io/api/v1/crates/hashbrown/0.12.2/download",
type = "tar.gz",
- sha256 = "ab5ef0d4909ef3724cc8cce6ccc8572c5c817592e9285f5464f8e86f8bd3726e",
- strip_prefix = "hashbrown-0.11.2",
- build_file = Label("//third_party/cargo/remote:BUILD.hashbrown-0.11.2.bazel"),
+ sha256 = "607c8a29735385251a339424dd462993c0fed8fa09d378f259377df08c126022",
+ strip_prefix = "hashbrown-0.12.2",
+ build_file = Label("//third_party/cargo/remote:BUILD.hashbrown-0.12.2.bazel"),
)
maybe(
@@ -433,12 +423,12 @@
maybe(
http_archive,
- name = "raze__indexmap__1_8_1",
- url = "https://crates.io/api/v1/crates/indexmap/1.8.1/download",
+ name = "raze__indexmap__1_9_1",
+ url = "https://crates.io/api/v1/crates/indexmap/1.9.1/download",
type = "tar.gz",
- sha256 = "0f647032dfaa1f8b6dc29bd3edb7bbef4861b8b8007ebb118d6db284fd59f6ee",
- strip_prefix = "indexmap-1.8.1",
- build_file = Label("//third_party/cargo/remote:BUILD.indexmap-1.8.1.bazel"),
+ sha256 = "10a35a97730320ffe8e2d410b5d3b69279b98d2c14bdb8b70ea89ecf7888d41e",
+ strip_prefix = "indexmap-1.9.1",
+ build_file = Label("//third_party/cargo/remote:BUILD.indexmap-1.9.1.bazel"),
)
maybe(
@@ -613,12 +603,12 @@
maybe(
http_archive,
- name = "raze__miniz_oxide__0_5_1",
- url = "https://crates.io/api/v1/crates/miniz_oxide/0.5.1/download",
+ name = "raze__miniz_oxide__0_5_3",
+ url = "https://crates.io/api/v1/crates/miniz_oxide/0.5.3/download",
type = "tar.gz",
- sha256 = "d2b29bd4bc3f33391105ebee3589c19197c4271e3e5a9ec9bfe8127eeff8f082",
- strip_prefix = "miniz_oxide-0.5.1",
- build_file = Label("//third_party/cargo/remote:BUILD.miniz_oxide-0.5.1.bazel"),
+ sha256 = "6f5c75688da582b8ffc1f1799e9db273f32133c49e048f614d22ec3256773ccc",
+ strip_prefix = "miniz_oxide-0.5.3",
+ build_file = Label("//third_party/cargo/remote:BUILD.miniz_oxide-0.5.3.bazel"),
)
maybe(
@@ -653,32 +643,32 @@
maybe(
http_archive,
- name = "raze__object__0_28_4",
- url = "https://crates.io/api/v1/crates/object/0.28.4/download",
+ name = "raze__object__0_29_0",
+ url = "https://crates.io/api/v1/crates/object/0.29.0/download",
type = "tar.gz",
- sha256 = "e42c982f2d955fac81dd7e1d0e1426a7d702acd9c98d19ab01083a6a0328c424",
- strip_prefix = "object-0.28.4",
- build_file = Label("//third_party/cargo/remote:BUILD.object-0.28.4.bazel"),
+ sha256 = "21158b2c33aa6d4561f1c0a6ea283ca92bc54802a93b263e910746d679a7eb53",
+ strip_prefix = "object-0.29.0",
+ build_file = Label("//third_party/cargo/remote:BUILD.object-0.29.0.bazel"),
)
maybe(
http_archive,
- name = "raze__once_cell__1_10_0",
- url = "https://crates.io/api/v1/crates/once_cell/1.10.0/download",
+ name = "raze__once_cell__1_13_0",
+ url = "https://crates.io/api/v1/crates/once_cell/1.13.0/download",
type = "tar.gz",
- sha256 = "87f3e037eac156d1775da914196f0f37741a274155e34a0b7e427c35d2a2ecb9",
- strip_prefix = "once_cell-1.10.0",
- build_file = Label("//third_party/cargo/remote:BUILD.once_cell-1.10.0.bazel"),
+ sha256 = "18a6dbe30758c9f83eb00cbea4ac95966305f5a7772f3f42ebfc7fc7eddbd8e1",
+ strip_prefix = "once_cell-1.13.0",
+ build_file = Label("//third_party/cargo/remote:BUILD.once_cell-1.13.0.bazel"),
)
maybe(
http_archive,
- name = "raze__os_str_bytes__6_0_1",
- url = "https://crates.io/api/v1/crates/os_str_bytes/6.0.1/download",
+ name = "raze__os_str_bytes__6_1_0",
+ url = "https://crates.io/api/v1/crates/os_str_bytes/6.1.0/download",
type = "tar.gz",
- sha256 = "029d8d0b2f198229de29dca79676f2738ff952edf3fde542eb8bf94d8c21b435",
- strip_prefix = "os_str_bytes-6.0.1",
- build_file = Label("//third_party/cargo/remote:BUILD.os_str_bytes-6.0.1.bazel"),
+ sha256 = "21326818e99cfe6ce1e524c2a805c189a99b5ae555a35d19f9a284b427d86afa",
+ strip_prefix = "os_str_bytes-6.1.0",
+ build_file = Label("//third_party/cargo/remote:BUILD.os_str_bytes-6.1.0.bazel"),
)
maybe(
@@ -763,62 +753,22 @@
maybe(
http_archive,
- name = "raze__proc_macro2__1_0_39",
- url = "https://crates.io/api/v1/crates/proc-macro2/1.0.39/download",
+ name = "raze__proc_macro2__1_0_40",
+ url = "https://crates.io/api/v1/crates/proc-macro2/1.0.40/download",
type = "tar.gz",
- sha256 = "c54b25569025b7fc9651de43004ae593a75ad88543b17178aa5e1b9c4f15f56f",
- strip_prefix = "proc-macro2-1.0.39",
- build_file = Label("//third_party/cargo/remote:BUILD.proc-macro2-1.0.39.bazel"),
+ sha256 = "dd96a1e8ed2596c337f8eae5f24924ec83f5ad5ab21ea8e455d3566c69fbcaf7",
+ strip_prefix = "proc-macro2-1.0.40",
+ build_file = Label("//third_party/cargo/remote:BUILD.proc-macro2-1.0.40.bazel"),
)
maybe(
http_archive,
- name = "raze__quote__1_0_18",
- url = "https://crates.io/api/v1/crates/quote/1.0.18/download",
+ name = "raze__quote__1_0_20",
+ url = "https://crates.io/api/v1/crates/quote/1.0.20/download",
type = "tar.gz",
- sha256 = "a1feb54ed693b93a84e14094943b84b7c4eae204c512b7ccb95ab0c66d278ad1",
- strip_prefix = "quote-1.0.18",
- build_file = Label("//third_party/cargo/remote:BUILD.quote-1.0.18.bazel"),
- )
-
- maybe(
- http_archive,
- name = "raze__rand__0_4_6",
- url = "https://crates.io/api/v1/crates/rand/0.4.6/download",
- type = "tar.gz",
- sha256 = "552840b97013b1a26992c11eac34bdd778e464601a4c2054b5f0bff7c6761293",
- strip_prefix = "rand-0.4.6",
- build_file = Label("//third_party/cargo/remote:BUILD.rand-0.4.6.bazel"),
- )
-
- maybe(
- http_archive,
- name = "raze__rand_core__0_3_1",
- url = "https://crates.io/api/v1/crates/rand_core/0.3.1/download",
- type = "tar.gz",
- sha256 = "7a6fdeb83b075e8266dcc8762c22776f6877a63111121f5f8c7411e5be7eed4b",
- strip_prefix = "rand_core-0.3.1",
- build_file = Label("//third_party/cargo/remote:BUILD.rand_core-0.3.1.bazel"),
- )
-
- maybe(
- http_archive,
- name = "raze__rand_core__0_4_2",
- url = "https://crates.io/api/v1/crates/rand_core/0.4.2/download",
- type = "tar.gz",
- sha256 = "9c33a3c44ca05fa6f1807d8e6743f3824e8509beca625669633be0acbdf509dc",
- strip_prefix = "rand_core-0.4.2",
- build_file = Label("//third_party/cargo/remote:BUILD.rand_core-0.4.2.bazel"),
- )
-
- maybe(
- http_archive,
- name = "raze__rdrand__0_4_0",
- url = "https://crates.io/api/v1/crates/rdrand/0.4.0/download",
- type = "tar.gz",
- sha256 = "678054eb77286b51581ba43620cc911abf02758c91f93f479767aed0f90458b2",
- strip_prefix = "rdrand-0.4.0",
- build_file = Label("//third_party/cargo/remote:BUILD.rdrand-0.4.0.bazel"),
+ sha256 = "3bcdf212e9776fbcb2d23ab029360416bb1706b1aea2d1a5ba002727cbcab804",
+ strip_prefix = "quote-1.0.20",
+ build_file = Label("//third_party/cargo/remote:BUILD.quote-1.0.20.bazel"),
)
maybe(
@@ -833,12 +783,12 @@
maybe(
http_archive,
- name = "raze__regex__1_5_5",
- url = "https://crates.io/api/v1/crates/regex/1.5.5/download",
+ name = "raze__regex__1_6_0",
+ url = "https://crates.io/api/v1/crates/regex/1.6.0/download",
type = "tar.gz",
- sha256 = "1a11647b6b25ff05a515cb92c365cec08801e83423a235b51e231e1808747286",
- strip_prefix = "regex-1.5.5",
- build_file = Label("//third_party/cargo/remote:BUILD.regex-1.5.5.bazel"),
+ sha256 = "4c4eb3267174b8c6c2f654116623910a0fef09c4753f8dd83db29c48a0df988b",
+ strip_prefix = "regex-1.6.0",
+ build_file = Label("//third_party/cargo/remote:BUILD.regex-1.6.0.bazel"),
)
maybe(
@@ -853,12 +803,12 @@
maybe(
http_archive,
- name = "raze__regex_syntax__0_6_25",
- url = "https://crates.io/api/v1/crates/regex-syntax/0.6.25/download",
+ name = "raze__regex_syntax__0_6_27",
+ url = "https://crates.io/api/v1/crates/regex-syntax/0.6.27/download",
type = "tar.gz",
- sha256 = "f497285884f3fcff424ffc933e56d7cbca511def0c9831a7f9b5f6153e3cc89b",
- strip_prefix = "regex-syntax-0.6.25",
- build_file = Label("//third_party/cargo/remote:BUILD.regex-syntax-0.6.25.bazel"),
+ sha256 = "a3f87b73ce11b1619a3c6332f45341e0047173771e8b8b73f87bfeefb7b56244",
+ strip_prefix = "regex-syntax-0.6.27",
+ build_file = Label("//third_party/cargo/remote:BUILD.regex-syntax-0.6.27.bazel"),
)
maybe(
@@ -903,12 +853,12 @@
maybe(
http_archive,
- name = "raze__rustversion__1_0_6",
- url = "https://crates.io/api/v1/crates/rustversion/1.0.6/download",
+ name = "raze__rustversion__1_0_7",
+ url = "https://crates.io/api/v1/crates/rustversion/1.0.7/download",
type = "tar.gz",
- sha256 = "f2cc38e8fa666e2de3c4aba7edeb5ffc5246c1c2ed0e3d17e560aeeba736b23f",
- strip_prefix = "rustversion-1.0.6",
- build_file = Label("//third_party/cargo/remote:BUILD.rustversion-1.0.6.bazel"),
+ sha256 = "a0a5f7c728f5d284929a1cccb5bc19884422bfe6ef4d6c409da2c41838983fcf",
+ strip_prefix = "rustversion-1.0.7",
+ build_file = Label("//third_party/cargo/remote:BUILD.rustversion-1.0.7.bazel"),
)
maybe(
@@ -923,32 +873,32 @@
maybe(
http_archive,
- name = "raze__serde__1_0_137",
- url = "https://crates.io/api/v1/crates/serde/1.0.137/download",
+ name = "raze__serde__1_0_139",
+ url = "https://crates.io/api/v1/crates/serde/1.0.139/download",
type = "tar.gz",
- sha256 = "61ea8d54c77f8315140a05f4c7237403bf38b72704d031543aa1d16abbf517d1",
- strip_prefix = "serde-1.0.137",
- build_file = Label("//third_party/cargo/remote:BUILD.serde-1.0.137.bazel"),
+ sha256 = "0171ebb889e45aa68b44aee0859b3eede84c6f5f5c228e6f140c0b2a0a46cad6",
+ strip_prefix = "serde-1.0.139",
+ build_file = Label("//third_party/cargo/remote:BUILD.serde-1.0.139.bazel"),
)
maybe(
http_archive,
- name = "raze__serde_derive__1_0_137",
- url = "https://crates.io/api/v1/crates/serde_derive/1.0.137/download",
+ name = "raze__serde_derive__1_0_139",
+ url = "https://crates.io/api/v1/crates/serde_derive/1.0.139/download",
type = "tar.gz",
- sha256 = "1f26faba0c3959972377d3b2d306ee9f71faee9714294e41bb777f83f88578be",
- strip_prefix = "serde_derive-1.0.137",
- build_file = Label("//third_party/cargo/remote:BUILD.serde_derive-1.0.137.bazel"),
+ sha256 = "dc1d3230c1de7932af58ad8ffbe1d784bd55efd5a9d84ac24f69c72d83543dfb",
+ strip_prefix = "serde_derive-1.0.139",
+ build_file = Label("//third_party/cargo/remote:BUILD.serde_derive-1.0.139.bazel"),
)
maybe(
http_archive,
- name = "raze__serde_json__1_0_81",
- url = "https://crates.io/api/v1/crates/serde_json/1.0.81/download",
+ name = "raze__serde_json__1_0_82",
+ url = "https://crates.io/api/v1/crates/serde_json/1.0.82/download",
type = "tar.gz",
- sha256 = "9b7ce2b32a1aed03c558dc61a5cd328f15aff2dbc17daad8fb8af04d2100e15c",
- strip_prefix = "serde_json-1.0.81",
- build_file = Label("//third_party/cargo/remote:BUILD.serde_json-1.0.81.bazel"),
+ sha256 = "82c2c1fdcd807d1098552c5b9a36e425e42e9fbd7c6a37a8425f390f781f7fa7",
+ strip_prefix = "serde_json-1.0.82",
+ build_file = Label("//third_party/cargo/remote:BUILD.serde_json-1.0.82.bazel"),
)
maybe(
@@ -963,12 +913,12 @@
maybe(
http_archive,
- name = "raze__smallvec__1_8_0",
- url = "https://crates.io/api/v1/crates/smallvec/1.8.0/download",
+ name = "raze__smallvec__1_9_0",
+ url = "https://crates.io/api/v1/crates/smallvec/1.9.0/download",
type = "tar.gz",
- sha256 = "f2dd574626839106c320a323308629dcb1acfc96e32a8cba364ddc61ac23ee83",
- strip_prefix = "smallvec-1.8.0",
- build_file = Label("//third_party/cargo/remote:BUILD.smallvec-1.8.0.bazel"),
+ sha256 = "2fd0db749597d91ff862fd1d55ea87f7855a744a8425a64695b6fca237d1dad1",
+ strip_prefix = "smallvec-1.9.0",
+ build_file = Label("//third_party/cargo/remote:BUILD.smallvec-1.9.0.bazel"),
)
maybe(
@@ -1003,12 +953,12 @@
maybe(
http_archive,
- name = "raze__strum_macros__0_24_0",
- url = "https://crates.io/api/v1/crates/strum_macros/0.24.0/download",
+ name = "raze__strum_macros__0_24_2",
+ url = "https://crates.io/api/v1/crates/strum_macros/0.24.2/download",
type = "tar.gz",
- sha256 = "6878079b17446e4d3eba6192bb0a2950d5b14f0ed8424b852310e5a94345d0ef",
- strip_prefix = "strum_macros-0.24.0",
- build_file = Label("//third_party/cargo/remote:BUILD.strum_macros-0.24.0.bazel"),
+ sha256 = "4faebde00e8ff94316c01800f9054fd2ba77d30d9e922541913051d1d978918b",
+ strip_prefix = "strum_macros-0.24.2",
+ build_file = Label("//third_party/cargo/remote:BUILD.strum_macros-0.24.2.bazel"),
)
maybe(
@@ -1043,22 +993,12 @@
maybe(
http_archive,
- name = "raze__syn__1_0_95",
- url = "https://crates.io/api/v1/crates/syn/1.0.95/download",
+ name = "raze__syn__1_0_98",
+ url = "https://crates.io/api/v1/crates/syn/1.0.98/download",
type = "tar.gz",
- sha256 = "fbaf6116ab8924f39d52792136fb74fd60a80194cf1b1c6ffa6453eef1c3f942",
- strip_prefix = "syn-1.0.95",
- build_file = Label("//third_party/cargo/remote:BUILD.syn-1.0.95.bazel"),
- )
-
- maybe(
- http_archive,
- name = "raze__tempdir__0_3_7",
- url = "https://crates.io/api/v1/crates/tempdir/0.3.7/download",
- type = "tar.gz",
- sha256 = "15f2b5fb00ccdf689e0149d1b1b3c03fead81c2b37735d812fa8bddbbf41b6d8",
- strip_prefix = "tempdir-0.3.7",
- build_file = Label("//third_party/cargo/remote:BUILD.tempdir-0.3.7.bazel"),
+ sha256 = "c50aef8a904de4c23c788f104b7dddc7d6f79c647c7c8ce4cc8f73eb0ca773dd",
+ strip_prefix = "syn-1.0.98",
+ build_file = Label("//third_party/cargo/remote:BUILD.syn-1.0.98.bazel"),
)
maybe(
@@ -1163,22 +1103,22 @@
maybe(
http_archive,
- name = "raze__trybuild__1_0_61",
- url = "https://crates.io/api/v1/crates/trybuild/1.0.61/download",
+ name = "raze__trybuild__1_0_63",
+ url = "https://crates.io/api/v1/crates/trybuild/1.0.63/download",
type = "tar.gz",
- sha256 = "7fc92f558afb6d1d7c6f175eb8d615b8ef49c227543e68e19c123d4ee43d8a7d",
- strip_prefix = "trybuild-1.0.61",
- build_file = Label("//third_party/cargo/remote:BUILD.trybuild-1.0.61.bazel"),
+ sha256 = "764b9e244b482a9b81bde596aa37aa6f1347bf8007adab25e59f901b32b4e0a0",
+ strip_prefix = "trybuild-1.0.63",
+ build_file = Label("//third_party/cargo/remote:BUILD.trybuild-1.0.63.bazel"),
)
maybe(
http_archive,
- name = "raze__unicode_ident__1_0_0",
- url = "https://crates.io/api/v1/crates/unicode-ident/1.0.0/download",
+ name = "raze__unicode_ident__1_0_1",
+ url = "https://crates.io/api/v1/crates/unicode-ident/1.0.1/download",
type = "tar.gz",
- sha256 = "d22af068fba1eb5edcb4aea19d382b2a3deb4c8f9d475c589b6ada9e0fd493ee",
- strip_prefix = "unicode-ident-1.0.0",
- build_file = Label("//third_party/cargo/remote:BUILD.unicode-ident-1.0.0.bazel"),
+ sha256 = "5bd2fe26506023ed7b5e1e315add59d6f584c621d037f9368fea9cfb988f368c",
+ strip_prefix = "unicode-ident-1.0.1",
+ build_file = Label("//third_party/cargo/remote:BUILD.unicode-ident-1.0.1.bazel"),
)
maybe(
@@ -1203,12 +1143,12 @@
maybe(
http_archive,
- name = "raze__uuid__1_0_0",
- url = "https://crates.io/api/v1/crates/uuid/1.0.0/download",
+ name = "raze__uuid__1_1_2",
+ url = "https://crates.io/api/v1/crates/uuid/1.1.2/download",
type = "tar.gz",
- sha256 = "8cfcd319456c4d6ea10087ed423473267e1a071f3bc0aa89f80d60997843c6f0",
- strip_prefix = "uuid-1.0.0",
- build_file = Label("//third_party/cargo/remote:BUILD.uuid-1.0.0.bazel"),
+ sha256 = "dd6469f4314d5f1ffec476e05f17cc9a78bc7a27a6a857842170bdf8d6f98d2f",
+ strip_prefix = "uuid-1.1.2",
+ build_file = Label("//third_party/cargo/remote:BUILD.uuid-1.1.2.bazel"),
)
maybe(
diff --git a/third_party/cargo/remote/BUILD.anyhow-1.0.57.bazel b/third_party/cargo/remote/BUILD.anyhow-1.0.58.bazel
similarity index 97%
rename from third_party/cargo/remote/BUILD.anyhow-1.0.57.bazel
rename to third_party/cargo/remote/BUILD.anyhow-1.0.58.bazel
index 82466d9..f7d5b14 100644
--- a/third_party/cargo/remote/BUILD.anyhow-1.0.57.bazel
+++ b/third_party/cargo/remote/BUILD.anyhow-1.0.58.bazel
@@ -56,7 +56,7 @@
"cargo-raze",
"manual",
],
- version = "1.0.57",
+ version = "1.0.58",
visibility = ["//visibility:private"],
deps = [
],
@@ -80,7 +80,7 @@
"crate-name=anyhow",
"manual",
],
- version = "1.0.57",
+ version = "1.0.58",
# buildifier: leave-alone
deps = [
":anyhow_build_script",
diff --git a/third_party/cargo/remote/BUILD.aquamarine-0.1.11.bazel b/third_party/cargo/remote/BUILD.aquamarine-0.1.11.bazel
index 87dc642..f24a8c1 100644
--- a/third_party/cargo/remote/BUILD.aquamarine-0.1.11.bazel
+++ b/third_party/cargo/remote/BUILD.aquamarine-0.1.11.bazel
@@ -51,9 +51,9 @@
# buildifier: leave-alone
deps = [
"@raze__itertools__0_9_0//:itertools",
- "@raze__proc_macro2__1_0_39//:proc_macro2",
+ "@raze__proc_macro2__1_0_40//:proc_macro2",
"@raze__proc_macro_error__1_0_4//:proc_macro_error",
- "@raze__quote__1_0_18//:quote",
- "@raze__syn__1_0_95//:syn",
+ "@raze__quote__1_0_20//:quote",
+ "@raze__syn__1_0_98//:syn",
],
)
diff --git a/third_party/cargo/remote/BUILD.autocxx-bindgen-0.59.16.bazel b/third_party/cargo/remote/BUILD.autocxx-bindgen-0.59.17.bazel
similarity index 88%
rename from third_party/cargo/remote/BUILD.autocxx-bindgen-0.59.16.bazel
rename to third_party/cargo/remote/BUILD.autocxx-bindgen-0.59.17.bazel
index 34dc60a..20770f5 100644
--- a/third_party/cargo/remote/BUILD.autocxx-bindgen-0.59.16.bazel
+++ b/third_party/cargo/remote/BUILD.autocxx-bindgen-0.59.17.bazel
@@ -62,10 +62,10 @@
"cargo-raze",
"manual",
],
- version = "0.59.16",
+ version = "0.59.17",
visibility = ["//visibility:private"],
deps = [
- "@raze__clang_sys__1_3_2//:clang_sys",
+ "@raze__clang_sys__1_3_3//:clang_sys",
],
)
@@ -95,14 +95,14 @@
"crate-name=autocxx-bindgen",
"manual",
],
- version = "0.59.16",
+ version = "0.59.17",
# buildifier: leave-alone
deps = [
":autocxx_bindgen",
":autocxx_bindgen_build_script",
"@raze__bitflags__1_3_2//:bitflags",
"@raze__cexpr__0_6_0//:cexpr",
- "@raze__clang_sys__1_3_2//:clang_sys",
+ "@raze__clang_sys__1_3_3//:clang_sys",
"@raze__clap__2_34_0//:clap",
"@raze__env_logger__0_9_0//:env_logger",
"@raze__itertools__0_10_3//:itertools",
@@ -110,9 +110,9 @@
"@raze__lazycell__1_3_0//:lazycell",
"@raze__log__0_4_17//:log",
"@raze__peeking_take_while__0_1_2//:peeking_take_while",
- "@raze__proc_macro2__1_0_39//:proc_macro2",
- "@raze__quote__1_0_18//:quote",
- "@raze__regex__1_5_5//:regex",
+ "@raze__proc_macro2__1_0_40//:proc_macro2",
+ "@raze__quote__1_0_20//:quote",
+ "@raze__regex__1_6_0//:regex",
"@raze__rustc_hash__1_1_0//:rustc_hash",
"@raze__shlex__1_1_0//:shlex",
"@raze__which__4_2_5//:which",
@@ -143,13 +143,13 @@
"crate-name=autocxx-bindgen",
"manual",
],
- version = "0.59.16",
+ version = "0.59.17",
# buildifier: leave-alone
deps = [
":autocxx_bindgen_build_script",
"@raze__bitflags__1_3_2//:bitflags",
"@raze__cexpr__0_6_0//:cexpr",
- "@raze__clang_sys__1_3_2//:clang_sys",
+ "@raze__clang_sys__1_3_3//:clang_sys",
"@raze__clap__2_34_0//:clap",
"@raze__env_logger__0_9_0//:env_logger",
"@raze__itertools__0_10_3//:itertools",
@@ -157,9 +157,9 @@
"@raze__lazycell__1_3_0//:lazycell",
"@raze__log__0_4_17//:log",
"@raze__peeking_take_while__0_1_2//:peeking_take_while",
- "@raze__proc_macro2__1_0_39//:proc_macro2",
- "@raze__quote__1_0_18//:quote",
- "@raze__regex__1_5_5//:regex",
+ "@raze__proc_macro2__1_0_40//:proc_macro2",
+ "@raze__quote__1_0_20//:quote",
+ "@raze__regex__1_6_0//:regex",
"@raze__rustc_hash__1_1_0//:rustc_hash",
"@raze__shlex__1_1_0//:shlex",
"@raze__which__4_2_5//:which",
diff --git a/third_party/cargo/remote/BUILD.backtrace-0.3.65.bazel b/third_party/cargo/remote/BUILD.backtrace-0.3.66.bazel
similarity index 94%
rename from third_party/cargo/remote/BUILD.backtrace-0.3.65.bazel
rename to third_party/cargo/remote/BUILD.backtrace-0.3.66.bazel
index 4e77d85..21cd9bf 100644
--- a/third_party/cargo/remote/BUILD.backtrace-0.3.65.bazel
+++ b/third_party/cargo/remote/BUILD.backtrace-0.3.66.bazel
@@ -56,7 +56,7 @@
"cargo-raze",
"manual",
],
- version = "0.3.65",
+ version = "0.3.66",
visibility = ["//visibility:private"],
deps = [
"@raze__cc__1_0_73//:cc",
@@ -87,15 +87,15 @@
"crate-name=backtrace",
"manual",
],
- version = "0.3.65",
+ version = "0.3.66",
# buildifier: leave-alone
deps = [
":backtrace_build_script",
"@raze__addr2line__0_17_0//:addr2line",
"@raze__cfg_if__1_0_0//:cfg_if",
"@raze__libc__0_2_126//:libc",
- "@raze__miniz_oxide__0_5_1//:miniz_oxide",
- "@raze__object__0_28_4//:object",
+ "@raze__miniz_oxide__0_5_3//:miniz_oxide",
+ "@raze__object__0_29_0//:object",
"@raze__rustc_demangle__0_1_21//:rustc_demangle",
],
)
diff --git a/third_party/cargo/remote/BUILD.bindgen-0.58.1.bazel b/third_party/cargo/remote/BUILD.bindgen-0.58.1.bazel
index afa92f1..c79629a 100644
--- a/third_party/cargo/remote/BUILD.bindgen-0.58.1.bazel
+++ b/third_party/cargo/remote/BUILD.bindgen-0.58.1.bazel
@@ -65,7 +65,7 @@
version = "0.58.1",
visibility = ["//visibility:private"],
deps = [
- "@raze__clang_sys__1_3_2//:clang_sys",
+ "@raze__clang_sys__1_3_3//:clang_sys",
],
)
@@ -102,16 +102,16 @@
":bindgen_build_script",
"@raze__bitflags__1_3_2//:bitflags",
"@raze__cexpr__0_4_0//:cexpr",
- "@raze__clang_sys__1_3_2//:clang_sys",
+ "@raze__clang_sys__1_3_3//:clang_sys",
"@raze__clap__2_34_0//:clap",
"@raze__env_logger__0_8_4//:env_logger",
"@raze__lazy_static__1_4_0//:lazy_static",
"@raze__lazycell__1_3_0//:lazycell",
"@raze__log__0_4_17//:log",
"@raze__peeking_take_while__0_1_2//:peeking_take_while",
- "@raze__proc_macro2__1_0_39//:proc_macro2",
- "@raze__quote__1_0_18//:quote",
- "@raze__regex__1_5_5//:regex",
+ "@raze__proc_macro2__1_0_40//:proc_macro2",
+ "@raze__quote__1_0_20//:quote",
+ "@raze__regex__1_6_0//:regex",
"@raze__rustc_hash__1_1_0//:rustc_hash",
"@raze__shlex__1_1_0//:shlex",
"@raze__which__3_1_1//:which",
@@ -148,16 +148,16 @@
":bindgen_build_script",
"@raze__bitflags__1_3_2//:bitflags",
"@raze__cexpr__0_4_0//:cexpr",
- "@raze__clang_sys__1_3_2//:clang_sys",
+ "@raze__clang_sys__1_3_3//:clang_sys",
"@raze__clap__2_34_0//:clap",
"@raze__env_logger__0_8_4//:env_logger",
"@raze__lazy_static__1_4_0//:lazy_static",
"@raze__lazycell__1_3_0//:lazycell",
"@raze__log__0_4_17//:log",
"@raze__peeking_take_while__0_1_2//:peeking_take_while",
- "@raze__proc_macro2__1_0_39//:proc_macro2",
- "@raze__quote__1_0_18//:quote",
- "@raze__regex__1_5_5//:regex",
+ "@raze__proc_macro2__1_0_40//:proc_macro2",
+ "@raze__quote__1_0_20//:quote",
+ "@raze__regex__1_6_0//:regex",
"@raze__rustc_hash__1_1_0//:rustc_hash",
"@raze__shlex__1_1_0//:shlex",
"@raze__which__3_1_1//:which",
diff --git a/third_party/cargo/remote/BUILD.clang-sys-1.3.2.bazel b/third_party/cargo/remote/BUILD.clang-sys-1.3.3.bazel
similarity index 97%
rename from third_party/cargo/remote/BUILD.clang-sys-1.3.2.bazel
rename to third_party/cargo/remote/BUILD.clang-sys-1.3.3.bazel
index 049c6dd..19592f5 100644
--- a/third_party/cargo/remote/BUILD.clang-sys-1.3.2.bazel
+++ b/third_party/cargo/remote/BUILD.clang-sys-1.3.3.bazel
@@ -65,7 +65,7 @@
"cargo-raze",
"manual",
],
- version = "1.3.2",
+ version = "1.3.3",
visibility = ["//visibility:private"],
deps = [
"@raze__glob__0_3_0//:glob",
@@ -98,7 +98,7 @@
"crate-name=clang-sys",
"manual",
],
- version = "1.3.2",
+ version = "1.3.3",
# buildifier: leave-alone
deps = [
":clang_sys_build_script",
diff --git a/third_party/cargo/remote/BUILD.clap-3.1.18.bazel b/third_party/cargo/remote/BUILD.clap-3.2.11.bazel
similarity index 87%
rename from third_party/cargo/remote/BUILD.clap-3.1.18.bazel
rename to third_party/cargo/remote/BUILD.clap-3.2.11.bazel
index 872a85e..0ae48b7 100644
--- a/third_party/cargo/remote/BUILD.clap-3.1.18.bazel
+++ b/third_party/cargo/remote/BUILD.clap-3.2.11.bazel
@@ -31,18 +31,6 @@
# Generated Targets
-# Unsupported target "01_default" with type "bench" omitted
-
-# Unsupported target "02_simple" with type "bench" omitted
-
-# Unsupported target "03_complex" with type "bench" omitted
-
-# Unsupported target "04_new_help" with type "bench" omitted
-
-# Unsupported target "05_ripgrep" with type "bench" omitted
-
-# Unsupported target "06_rustup" with type "bench" omitted
-
rust_binary(
# Prefix bin name to disambiguate from (probable) collision with lib name
# N.B.: The exact form of this is subject to change.
@@ -53,7 +41,7 @@
"cargo",
"color",
"default",
- "lazy_static",
+ "once_cell",
"std",
"strsim",
"suggestions",
@@ -61,7 +49,7 @@
],
crate_root = "src/bin/stdio-fixture.rs",
data = [],
- edition = "2018",
+ edition = "2021",
rustc_flags = [
"--cap-lints=allow",
],
@@ -70,15 +58,15 @@
"crate-name=stdio-fixture",
"manual",
],
- version = "3.1.18",
+ version = "3.2.11",
# buildifier: leave-alone
deps = [
":clap",
"@raze__atty__0_2_14//:atty",
"@raze__bitflags__1_3_2//:bitflags",
- "@raze__clap_lex__0_2_0//:clap_lex",
- "@raze__indexmap__1_8_1//:indexmap",
- "@raze__lazy_static__1_4_0//:lazy_static",
+ "@raze__clap_lex__0_2_4//:clap_lex",
+ "@raze__indexmap__1_9_1//:indexmap",
+ "@raze__once_cell__1_13_0//:once_cell",
"@raze__strsim__0_10_0//:strsim",
"@raze__termcolor__1_1_3//:termcolor",
"@raze__textwrap__0_15_0//:textwrap",
@@ -195,7 +183,7 @@
"cargo",
"color",
"default",
- "lazy_static",
+ "once_cell",
"std",
"strsim",
"suggestions",
@@ -203,7 +191,7 @@
],
crate_root = "src/lib.rs",
data = [],
- edition = "2018",
+ edition = "2021",
rustc_flags = [
"--cap-lints=allow",
],
@@ -212,14 +200,14 @@
"crate-name=clap",
"manual",
],
- version = "3.1.18",
+ version = "3.2.11",
# buildifier: leave-alone
deps = [
"@raze__atty__0_2_14//:atty",
"@raze__bitflags__1_3_2//:bitflags",
- "@raze__clap_lex__0_2_0//:clap_lex",
- "@raze__indexmap__1_8_1//:indexmap",
- "@raze__lazy_static__1_4_0//:lazy_static",
+ "@raze__clap_lex__0_2_4//:clap_lex",
+ "@raze__indexmap__1_9_1//:indexmap",
+ "@raze__once_cell__1_13_0//:once_cell",
"@raze__strsim__0_10_0//:strsim",
"@raze__termcolor__1_1_3//:termcolor",
"@raze__textwrap__0_15_0//:textwrap",
diff --git a/third_party/cargo/remote/BUILD.clap_lex-0.2.0.bazel b/third_party/cargo/remote/BUILD.clap_lex-0.2.4.bazel
similarity index 90%
rename from third_party/cargo/remote/BUILD.clap_lex-0.2.0.bazel
rename to third_party/cargo/remote/BUILD.clap_lex-0.2.4.bazel
index 3627766..7ad903b 100644
--- a/third_party/cargo/remote/BUILD.clap_lex-0.2.0.bazel
+++ b/third_party/cargo/remote/BUILD.clap_lex-0.2.4.bazel
@@ -38,7 +38,7 @@
],
crate_root = "src/lib.rs",
data = [],
- edition = "2018",
+ edition = "2021",
rustc_flags = [
"--cap-lints=allow",
],
@@ -47,9 +47,9 @@
"crate-name=clap_lex",
"manual",
],
- version = "0.2.0",
+ version = "0.2.4",
# buildifier: leave-alone
deps = [
- "@raze__os_str_bytes__6_0_1//:os_str_bytes",
+ "@raze__os_str_bytes__6_1_0//:os_str_bytes",
],
)
diff --git a/third_party/cargo/remote/BUILD.cxx-1.0.68.bazel b/third_party/cargo/remote/BUILD.cxx-1.0.71.bazel
similarity index 95%
rename from third_party/cargo/remote/BUILD.cxx-1.0.68.bazel
rename to third_party/cargo/remote/BUILD.cxx-1.0.71.bazel
index 363747f..4158d4d 100644
--- a/third_party/cargo/remote/BUILD.cxx-1.0.68.bazel
+++ b/third_party/cargo/remote/BUILD.cxx-1.0.71.bazel
@@ -45,7 +45,7 @@
data = [],
edition = "2018",
proc_macro_deps = [
- "@raze__cxxbridge_macro__1_0_68//:cxxbridge_macro",
+ "@raze__cxxbridge_macro__1_0_71//:cxxbridge_macro",
],
rustc_flags = [
"--cap-lints=allow",
@@ -55,7 +55,7 @@
"crate-name=cxx",
"manual",
],
- version = "1.0.68",
+ version = "1.0.71",
# buildifier: leave-alone
deps = [
"@raze__link_cplusplus__1_0_6//:link_cplusplus",
diff --git a/third_party/cargo/remote/BUILD.cxx-gen-0.7.68.bazel b/third_party/cargo/remote/BUILD.cxx-gen-0.7.71.bazel
similarity index 88%
rename from third_party/cargo/remote/BUILD.cxx-gen-0.7.68.bazel
rename to third_party/cargo/remote/BUILD.cxx-gen-0.7.71.bazel
index 2b29949..011b877 100644
--- a/third_party/cargo/remote/BUILD.cxx-gen-0.7.68.bazel
+++ b/third_party/cargo/remote/BUILD.cxx-gen-0.7.71.bazel
@@ -48,13 +48,13 @@
"crate-name=cxx-gen",
"manual",
],
- version = "0.7.68",
+ version = "0.7.71",
# buildifier: leave-alone
deps = [
"@raze__codespan_reporting__0_11_1//:codespan_reporting",
- "@raze__proc_macro2__1_0_39//:proc_macro2",
- "@raze__quote__1_0_18//:quote",
- "@raze__syn__1_0_95//:syn",
+ "@raze__proc_macro2__1_0_40//:proc_macro2",
+ "@raze__quote__1_0_20//:quote",
+ "@raze__syn__1_0_98//:syn",
],
)
diff --git a/third_party/cargo/remote/BUILD.cxxbridge-cmd-1.0.68.bazel b/third_party/cargo/remote/BUILD.cxxbridge-cmd-1.0.71.bazel
similarity index 82%
rename from third_party/cargo/remote/BUILD.cxxbridge-cmd-1.0.68.bazel
rename to third_party/cargo/remote/BUILD.cxxbridge-cmd-1.0.71.bazel
index 399187a..5a5fae3 100644
--- a/third_party/cargo/remote/BUILD.cxxbridge-cmd-1.0.68.bazel
+++ b/third_party/cargo/remote/BUILD.cxxbridge-cmd-1.0.71.bazel
@@ -50,15 +50,15 @@
"crate-name=cxxbridge",
"manual",
],
- version = "1.0.68",
+ version = "1.0.71",
# buildifier: leave-alone
deps = [
":cxxbridge_cmd",
- "@raze__clap__3_1_18//:clap",
+ "@raze__clap__3_2_11//:clap",
"@raze__codespan_reporting__0_11_1//:codespan_reporting",
- "@raze__proc_macro2__1_0_39//:proc_macro2",
- "@raze__quote__1_0_18//:quote",
- "@raze__syn__1_0_95//:syn",
+ "@raze__proc_macro2__1_0_40//:proc_macro2",
+ "@raze__quote__1_0_20//:quote",
+ "@raze__syn__1_0_98//:syn",
],
)
@@ -79,13 +79,13 @@
"crate-name=cxxbridge-cmd",
"manual",
],
- version = "1.0.68",
+ version = "1.0.71",
# buildifier: leave-alone
deps = [
- "@raze__clap__3_1_18//:clap",
+ "@raze__clap__3_2_11//:clap",
"@raze__codespan_reporting__0_11_1//:codespan_reporting",
- "@raze__proc_macro2__1_0_39//:proc_macro2",
- "@raze__quote__1_0_18//:quote",
- "@raze__syn__1_0_95//:syn",
+ "@raze__proc_macro2__1_0_40//:proc_macro2",
+ "@raze__quote__1_0_20//:quote",
+ "@raze__syn__1_0_98//:syn",
],
)
diff --git a/third_party/cargo/remote/BUILD.cxxbridge-flags-1.0.68.bazel b/third_party/cargo/remote/BUILD.cxxbridge-flags-1.0.71.bazel
similarity index 97%
rename from third_party/cargo/remote/BUILD.cxxbridge-flags-1.0.68.bazel
rename to third_party/cargo/remote/BUILD.cxxbridge-flags-1.0.71.bazel
index 05f205c..dc02110 100644
--- a/third_party/cargo/remote/BUILD.cxxbridge-flags-1.0.68.bazel
+++ b/third_party/cargo/remote/BUILD.cxxbridge-flags-1.0.71.bazel
@@ -48,7 +48,7 @@
"crate-name=cxxbridge-flags",
"manual",
],
- version = "1.0.68",
+ version = "1.0.71",
# buildifier: leave-alone
deps = [
],
diff --git a/third_party/cargo/remote/BUILD.cxxbridge-macro-1.0.68.bazel b/third_party/cargo/remote/BUILD.cxxbridge-macro-1.0.71.bazel
similarity index 87%
rename from third_party/cargo/remote/BUILD.cxxbridge-macro-1.0.68.bazel
rename to third_party/cargo/remote/BUILD.cxxbridge-macro-1.0.71.bazel
index 8e3bf0c..e41298e 100644
--- a/third_party/cargo/remote/BUILD.cxxbridge-macro-1.0.68.bazel
+++ b/third_party/cargo/remote/BUILD.cxxbridge-macro-1.0.71.bazel
@@ -47,11 +47,11 @@
"crate-name=cxxbridge-macro",
"manual",
],
- version = "1.0.68",
+ version = "1.0.71",
# buildifier: leave-alone
deps = [
- "@raze__proc_macro2__1_0_39//:proc_macro2",
- "@raze__quote__1_0_18//:quote",
- "@raze__syn__1_0_95//:syn",
+ "@raze__proc_macro2__1_0_40//:proc_macro2",
+ "@raze__quote__1_0_20//:quote",
+ "@raze__syn__1_0_98//:syn",
],
)
diff --git a/third_party/cargo/remote/BUILD.either-1.6.1.bazel b/third_party/cargo/remote/BUILD.either-1.7.0.bazel
similarity index 95%
rename from third_party/cargo/remote/BUILD.either-1.6.1.bazel
rename to third_party/cargo/remote/BUILD.either-1.7.0.bazel
index 8807190..834d2c3 100644
--- a/third_party/cargo/remote/BUILD.either-1.6.1.bazel
+++ b/third_party/cargo/remote/BUILD.either-1.7.0.bazel
@@ -40,7 +40,7 @@
],
crate_root = "src/lib.rs",
data = [],
- edition = "2015",
+ edition = "2018",
rustc_flags = [
"--cap-lints=allow",
],
@@ -49,7 +49,7 @@
"crate-name=either",
"manual",
],
- version = "1.6.1",
+ version = "1.7.0",
# buildifier: leave-alone
deps = [
],
diff --git a/third_party/cargo/remote/BUILD.env_logger-0.8.4.bazel b/third_party/cargo/remote/BUILD.env_logger-0.8.4.bazel
index 14baebe..a9eb981 100644
--- a/third_party/cargo/remote/BUILD.env_logger-0.8.4.bazel
+++ b/third_party/cargo/remote/BUILD.env_logger-0.8.4.bazel
@@ -58,7 +58,7 @@
"@raze__atty__0_2_14//:atty",
"@raze__humantime__2_1_0//:humantime",
"@raze__log__0_4_17//:log",
- "@raze__regex__1_5_5//:regex",
+ "@raze__regex__1_6_0//:regex",
"@raze__termcolor__1_1_3//:termcolor",
],
)
diff --git a/third_party/cargo/remote/BUILD.env_logger-0.9.0.bazel b/third_party/cargo/remote/BUILD.env_logger-0.9.0.bazel
index d439a05..0f4e8d1 100644
--- a/third_party/cargo/remote/BUILD.env_logger-0.9.0.bazel
+++ b/third_party/cargo/remote/BUILD.env_logger-0.9.0.bazel
@@ -58,7 +58,7 @@
"@raze__atty__0_2_14//:atty",
"@raze__humantime__2_1_0//:humantime",
"@raze__log__0_4_17//:log",
- "@raze__regex__1_5_5//:regex",
+ "@raze__regex__1_6_0//:regex",
"@raze__termcolor__1_1_3//:termcolor",
],
)
diff --git a/third_party/cargo/remote/BUILD.fuchsia-cprng-0.1.1.bazel b/third_party/cargo/remote/BUILD.fuchsia-cprng-0.1.1.bazel
deleted file mode 100644
index 6dd24c5..0000000
--- a/third_party/cargo/remote/BUILD.fuchsia-cprng-0.1.1.bazel
+++ /dev/null
@@ -1,54 +0,0 @@
-"""
-@generated
-cargo-raze crate build file.
-
-DO NOT EDIT! Replaced on runs of cargo-raze
-"""
-
-# buildifier: disable=load
-load("@bazel_skylib//lib:selects.bzl", "selects")
-
-# buildifier: disable=load
-load(
- "@rules_rust//rust:defs.bzl",
- "rust_binary",
- "rust_library",
- "rust_proc_macro",
- "rust_test",
-)
-
-package(default_visibility = [
- # Public for visibility by "@raze__crate__version//" targets.
- #
- # Prefer access through "//third_party/cargo", which limits external
- # visibility to explicit Cargo.toml dependencies.
- "//visibility:public",
-])
-
-licenses([
- "restricted", # no license
-])
-
-# Generated Targets
-
-rust_library(
- name = "fuchsia_cprng",
- srcs = glob(["**/*.rs"]),
- crate_features = [
- ],
- crate_root = "src/lib.rs",
- data = [],
- edition = "2018",
- rustc_flags = [
- "--cap-lints=allow",
- ],
- tags = [
- "cargo-raze",
- "crate-name=fuchsia-cprng",
- "manual",
- ],
- version = "0.1.1",
- # buildifier: leave-alone
- deps = [
- ],
-)
diff --git a/third_party/cargo/remote/BUILD.hashbrown-0.11.2.bazel b/third_party/cargo/remote/BUILD.hashbrown-0.12.2.bazel
similarity index 87%
rename from third_party/cargo/remote/BUILD.hashbrown-0.11.2.bazel
rename to third_party/cargo/remote/BUILD.hashbrown-0.12.2.bazel
index 6ed9c49..7d61548 100644
--- a/third_party/cargo/remote/BUILD.hashbrown-0.11.2.bazel
+++ b/third_party/cargo/remote/BUILD.hashbrown-0.12.2.bazel
@@ -26,13 +26,15 @@
])
licenses([
- "notice", # Apache-2.0 from expression "Apache-2.0 OR MIT"
+ "notice", # MIT from expression "MIT OR Apache-2.0"
])
# Generated Targets
# Unsupported target "bench" with type "bench" omitted
+# Unsupported target "insert_unique_unchecked" with type "bench" omitted
+
rust_library(
name = "hashbrown",
srcs = glob(["**/*.rs"]),
@@ -41,7 +43,7 @@
],
crate_root = "src/lib.rs",
data = [],
- edition = "2018",
+ edition = "2021",
rustc_flags = [
"--cap-lints=allow",
],
@@ -50,7 +52,7 @@
"crate-name=hashbrown",
"manual",
],
- version = "0.11.2",
+ version = "0.12.2",
# buildifier: leave-alone
deps = [
],
diff --git a/third_party/cargo/remote/BUILD.indexmap-1.8.1.bazel b/third_party/cargo/remote/BUILD.indexmap-1.9.1.bazel
similarity index 91%
rename from third_party/cargo/remote/BUILD.indexmap-1.8.1.bazel
rename to third_party/cargo/remote/BUILD.indexmap-1.9.1.bazel
index f8a1ad2..baf2641 100644
--- a/third_party/cargo/remote/BUILD.indexmap-1.8.1.bazel
+++ b/third_party/cargo/remote/BUILD.indexmap-1.9.1.bazel
@@ -48,7 +48,7 @@
],
crate_root = "build.rs",
data = glob(["**"]),
- edition = "2018",
+ edition = "2021",
rustc_flags = [
"--cap-lints=allow",
],
@@ -56,7 +56,7 @@
"cargo-raze",
"manual",
],
- version = "1.8.1",
+ version = "1.9.1",
visibility = ["//visibility:private"],
deps = [
"@raze__autocfg__1_1_0//:autocfg",
@@ -76,7 +76,7 @@
],
crate_root = "src/lib.rs",
data = [],
- edition = "2018",
+ edition = "2021",
rustc_flags = [
"--cap-lints=allow",
],
@@ -85,12 +85,12 @@
"crate-name=indexmap",
"manual",
],
- version = "1.8.1",
+ version = "1.9.1",
# buildifier: leave-alone
deps = [
":indexmap_build_script",
- "@raze__hashbrown__0_11_2//:hashbrown",
- "@raze__serde__1_0_137//:serde",
+ "@raze__hashbrown__0_12_2//:hashbrown",
+ "@raze__serde__1_0_139//:serde",
],
)
diff --git a/third_party/cargo/remote/BUILD.itertools-0.10.3.bazel b/third_party/cargo/remote/BUILD.itertools-0.10.3.bazel
index 46ee0f9..d919774 100644
--- a/third_party/cargo/remote/BUILD.itertools-0.10.3.bazel
+++ b/third_party/cargo/remote/BUILD.itertools-0.10.3.bazel
@@ -71,7 +71,7 @@
version = "0.10.3",
# buildifier: leave-alone
deps = [
- "@raze__either__1_6_1//:either",
+ "@raze__either__1_7_0//:either",
],
)
diff --git a/third_party/cargo/remote/BUILD.itertools-0.9.0.bazel b/third_party/cargo/remote/BUILD.itertools-0.9.0.bazel
index ddc54c6..6dc4e8e 100644
--- a/third_party/cargo/remote/BUILD.itertools-0.9.0.bazel
+++ b/third_party/cargo/remote/BUILD.itertools-0.9.0.bazel
@@ -66,7 +66,7 @@
version = "0.9.0",
# buildifier: leave-alone
deps = [
- "@raze__either__1_6_1//:either",
+ "@raze__either__1_7_0//:either",
],
)
diff --git a/third_party/cargo/remote/BUILD.miette-4.7.1.bazel b/third_party/cargo/remote/BUILD.miette-4.7.1.bazel
index 0d176e6..1ce2130 100644
--- a/third_party/cargo/remote/BUILD.miette-4.7.1.bazel
+++ b/third_party/cargo/remote/BUILD.miette-4.7.1.bazel
@@ -65,8 +65,8 @@
# buildifier: leave-alone
deps = [
"@raze__atty__0_2_14//:atty",
- "@raze__backtrace__0_3_65//:backtrace",
- "@raze__once_cell__1_10_0//:once_cell",
+ "@raze__backtrace__0_3_66//:backtrace",
+ "@raze__once_cell__1_13_0//:once_cell",
"@raze__owo_colors__3_4_0//:owo_colors",
"@raze__supports_color__1_3_0//:supports_color",
"@raze__supports_hyperlinks__1_2_0//:supports_hyperlinks",
diff --git a/third_party/cargo/remote/BUILD.miette-derive-4.7.1.bazel b/third_party/cargo/remote/BUILD.miette-derive-4.7.1.bazel
index 6e2b8ee..1f97719 100644
--- a/third_party/cargo/remote/BUILD.miette-derive-4.7.1.bazel
+++ b/third_party/cargo/remote/BUILD.miette-derive-4.7.1.bazel
@@ -50,8 +50,8 @@
version = "4.7.1",
# buildifier: leave-alone
deps = [
- "@raze__proc_macro2__1_0_39//:proc_macro2",
- "@raze__quote__1_0_18//:quote",
- "@raze__syn__1_0_95//:syn",
+ "@raze__proc_macro2__1_0_40//:proc_macro2",
+ "@raze__quote__1_0_20//:quote",
+ "@raze__syn__1_0_98//:syn",
],
)
diff --git a/third_party/cargo/remote/BUILD.miniz_oxide-0.5.1.bazel b/third_party/cargo/remote/BUILD.miniz_oxide-0.5.3.bazel
similarity index 97%
rename from third_party/cargo/remote/BUILD.miniz_oxide-0.5.1.bazel
rename to third_party/cargo/remote/BUILD.miniz_oxide-0.5.3.bazel
index 0d2eae2..70f3666 100644
--- a/third_party/cargo/remote/BUILD.miniz_oxide-0.5.1.bazel
+++ b/third_party/cargo/remote/BUILD.miniz_oxide-0.5.3.bazel
@@ -47,7 +47,7 @@
"crate-name=miniz_oxide",
"manual",
],
- version = "0.5.1",
+ version = "0.5.3",
# buildifier: leave-alone
deps = [
"@raze__adler__1_0_2//:adler",
diff --git a/third_party/cargo/remote/BUILD.moveit-0.5.0.bazel b/third_party/cargo/remote/BUILD.moveit-0.5.0.bazel
index c81ec4a..38f2da3 100644
--- a/third_party/cargo/remote/BUILD.moveit-0.5.0.bazel
+++ b/third_party/cargo/remote/BUILD.moveit-0.5.0.bazel
@@ -53,6 +53,6 @@
version = "0.5.0",
# buildifier: leave-alone
deps = [
- "@raze__cxx__1_0_68//:cxx",
+ "@raze__cxx__1_0_71//:cxx",
],
)
diff --git a/third_party/cargo/remote/BUILD.object-0.28.4.bazel b/third_party/cargo/remote/BUILD.object-0.29.0.bazel
similarity index 97%
rename from third_party/cargo/remote/BUILD.object-0.28.4.bazel
rename to third_party/cargo/remote/BUILD.object-0.29.0.bazel
index e04c45c..273891f 100644
--- a/third_party/cargo/remote/BUILD.object-0.28.4.bazel
+++ b/third_party/cargo/remote/BUILD.object-0.29.0.bazel
@@ -54,7 +54,7 @@
"crate-name=object",
"manual",
],
- version = "0.28.4",
+ version = "0.29.0",
# buildifier: leave-alone
deps = [
"@raze__memchr__2_5_0//:memchr",
diff --git a/third_party/cargo/remote/BUILD.once_cell-1.10.0.bazel b/third_party/cargo/remote/BUILD.once_cell-1.13.0.bazel
similarity index 98%
rename from third_party/cargo/remote/BUILD.once_cell-1.10.0.bazel
rename to third_party/cargo/remote/BUILD.once_cell-1.13.0.bazel
index dc4d474..741f97e 100644
--- a/third_party/cargo/remote/BUILD.once_cell-1.10.0.bazel
+++ b/third_party/cargo/remote/BUILD.once_cell-1.13.0.bazel
@@ -65,7 +65,7 @@
"crate-name=once_cell",
"manual",
],
- version = "1.10.0",
+ version = "1.13.0",
# buildifier: leave-alone
deps = [
],
diff --git a/third_party/cargo/remote/BUILD.os_str_bytes-6.0.1.bazel b/third_party/cargo/remote/BUILD.os_str_bytes-6.1.0.bazel
similarity index 97%
rename from third_party/cargo/remote/BUILD.os_str_bytes-6.0.1.bazel
rename to third_party/cargo/remote/BUILD.os_str_bytes-6.1.0.bazel
index ac08769..cbbf9c5 100644
--- a/third_party/cargo/remote/BUILD.os_str_bytes-6.0.1.bazel
+++ b/third_party/cargo/remote/BUILD.os_str_bytes-6.1.0.bazel
@@ -48,7 +48,7 @@
"crate-name=os_str_bytes",
"manual",
],
- version = "6.0.1",
+ version = "6.1.0",
# buildifier: leave-alone
deps = [
],
diff --git a/third_party/cargo/remote/BUILD.proc-macro-error-1.0.4.bazel b/third_party/cargo/remote/BUILD.proc-macro-error-1.0.4.bazel
index d571524..66f5bfa 100644
--- a/third_party/cargo/remote/BUILD.proc-macro-error-1.0.4.bazel
+++ b/third_party/cargo/remote/BUILD.proc-macro-error-1.0.4.bazel
@@ -90,9 +90,9 @@
# buildifier: leave-alone
deps = [
":proc_macro_error_build_script",
- "@raze__proc_macro2__1_0_39//:proc_macro2",
- "@raze__quote__1_0_18//:quote",
- "@raze__syn__1_0_95//:syn",
+ "@raze__proc_macro2__1_0_40//:proc_macro2",
+ "@raze__quote__1_0_20//:quote",
+ "@raze__syn__1_0_98//:syn",
],
)
diff --git a/third_party/cargo/remote/BUILD.proc-macro-error-attr-1.0.4.bazel b/third_party/cargo/remote/BUILD.proc-macro-error-attr-1.0.4.bazel
index b737055..fdca1a4 100644
--- a/third_party/cargo/remote/BUILD.proc-macro-error-attr-1.0.4.bazel
+++ b/third_party/cargo/remote/BUILD.proc-macro-error-attr-1.0.4.bazel
@@ -81,7 +81,7 @@
# buildifier: leave-alone
deps = [
":proc_macro_error_attr_build_script",
- "@raze__proc_macro2__1_0_39//:proc_macro2",
- "@raze__quote__1_0_18//:quote",
+ "@raze__proc_macro2__1_0_40//:proc_macro2",
+ "@raze__quote__1_0_20//:quote",
],
)
diff --git a/third_party/cargo/remote/BUILD.proc-macro2-1.0.39.bazel b/third_party/cargo/remote/BUILD.proc-macro2-1.0.40.bazel
similarity index 94%
rename from third_party/cargo/remote/BUILD.proc-macro2-1.0.39.bazel
rename to third_party/cargo/remote/BUILD.proc-macro2-1.0.40.bazel
index 82d43bd..72d4c9a 100644
--- a/third_party/cargo/remote/BUILD.proc-macro2-1.0.39.bazel
+++ b/third_party/cargo/remote/BUILD.proc-macro2-1.0.40.bazel
@@ -57,7 +57,7 @@
"cargo-raze",
"manual",
],
- version = "1.0.39",
+ version = "1.0.40",
visibility = ["//visibility:private"],
deps = [
],
@@ -82,11 +82,11 @@
"crate-name=proc-macro2",
"manual",
],
- version = "1.0.39",
+ version = "1.0.40",
# buildifier: leave-alone
deps = [
":proc_macro2_build_script",
- "@raze__unicode_ident__1_0_0//:unicode_ident",
+ "@raze__unicode_ident__1_0_1//:unicode_ident",
],
)
diff --git a/third_party/cargo/remote/BUILD.quote-1.0.18.bazel b/third_party/cargo/remote/BUILD.quote-1.0.18.bazel
deleted file mode 100644
index daa4fcb..0000000
--- a/third_party/cargo/remote/BUILD.quote-1.0.18.bazel
+++ /dev/null
@@ -1,61 +0,0 @@
-"""
-@generated
-cargo-raze crate build file.
-
-DO NOT EDIT! Replaced on runs of cargo-raze
-"""
-
-# buildifier: disable=load
-load("@bazel_skylib//lib:selects.bzl", "selects")
-
-# buildifier: disable=load
-load(
- "@rules_rust//rust:defs.bzl",
- "rust_binary",
- "rust_library",
- "rust_proc_macro",
- "rust_test",
-)
-
-package(default_visibility = [
- # Public for visibility by "@raze__crate__version//" targets.
- #
- # Prefer access through "//third_party/cargo", which limits external
- # visibility to explicit Cargo.toml dependencies.
- "//visibility:public",
-])
-
-licenses([
- "notice", # MIT from expression "MIT OR Apache-2.0"
-])
-
-# Generated Targets
-
-rust_library(
- name = "quote",
- srcs = glob(["**/*.rs"]),
- crate_features = [
- "default",
- "proc-macro",
- ],
- crate_root = "src/lib.rs",
- data = [],
- edition = "2018",
- rustc_flags = [
- "--cap-lints=allow",
- ],
- tags = [
- "cargo-raze",
- "crate-name=quote",
- "manual",
- ],
- version = "1.0.18",
- # buildifier: leave-alone
- deps = [
- "@raze__proc_macro2__1_0_39//:proc_macro2",
- ],
-)
-
-# Unsupported target "compiletest" with type "test" omitted
-
-# Unsupported target "test" with type "test" omitted
diff --git a/third_party/cargo/remote/BUILD.serde-1.0.137.bazel b/third_party/cargo/remote/BUILD.quote-1.0.20.bazel
similarity index 78%
copy from third_party/cargo/remote/BUILD.serde-1.0.137.bazel
copy to third_party/cargo/remote/BUILD.quote-1.0.20.bazel
index f171109..d23d8a4 100644
--- a/third_party/cargo/remote/BUILD.serde-1.0.137.bazel
+++ b/third_party/cargo/remote/BUILD.quote-1.0.20.bazel
@@ -38,19 +38,17 @@
)
cargo_build_script(
- name = "serde_build_script",
+ name = "quote_build_script",
srcs = glob(["**/*.rs"]),
build_script_env = {
},
crate_features = [
"default",
- "derive",
- "serde_derive",
- "std",
+ "proc-macro",
],
crate_root = "build.rs",
data = glob(["**"]),
- edition = "2015",
+ edition = "2018",
rustc_flags = [
"--cap-lints=allow",
],
@@ -58,38 +56,38 @@
"cargo-raze",
"manual",
],
- version = "1.0.137",
+ version = "1.0.20",
visibility = ["//visibility:private"],
deps = [
],
)
rust_library(
- name = "serde",
+ name = "quote",
srcs = glob(["**/*.rs"]),
crate_features = [
"default",
- "derive",
- "serde_derive",
- "std",
+ "proc-macro",
],
crate_root = "src/lib.rs",
data = [],
- edition = "2015",
- proc_macro_deps = [
- "@raze__serde_derive__1_0_137//:serde_derive",
- ],
+ edition = "2018",
rustc_flags = [
"--cap-lints=allow",
],
tags = [
"cargo-raze",
- "crate-name=serde",
+ "crate-name=quote",
"manual",
],
- version = "1.0.137",
+ version = "1.0.20",
# buildifier: leave-alone
deps = [
- ":serde_build_script",
+ ":quote_build_script",
+ "@raze__proc_macro2__1_0_40//:proc_macro2",
],
)
+
+# Unsupported target "compiletest" with type "test" omitted
+
+# Unsupported target "test" with type "test" omitted
diff --git a/third_party/cargo/remote/BUILD.rand-0.4.6.bazel b/third_party/cargo/remote/BUILD.rand-0.4.6.bazel
deleted file mode 100644
index aa44832..0000000
--- a/third_party/cargo/remote/BUILD.rand-0.4.6.bazel
+++ /dev/null
@@ -1,75 +0,0 @@
-"""
-@generated
-cargo-raze crate build file.
-
-DO NOT EDIT! Replaced on runs of cargo-raze
-"""
-
-# buildifier: disable=load
-load("@bazel_skylib//lib:selects.bzl", "selects")
-
-# buildifier: disable=load
-load(
- "@rules_rust//rust:defs.bzl",
- "rust_binary",
- "rust_library",
- "rust_proc_macro",
- "rust_test",
-)
-
-package(default_visibility = [
- # Public for visibility by "@raze__crate__version//" targets.
- #
- # Prefer access through "//third_party/cargo", which limits external
- # visibility to explicit Cargo.toml dependencies.
- "//visibility:public",
-])
-
-licenses([
- "notice", # MIT from expression "MIT OR Apache-2.0"
-])
-
-# Generated Targets
-
-# Unsupported target "bench" with type "bench" omitted
-
-# Unsupported target "generators" with type "bench" omitted
-
-# Unsupported target "misc" with type "bench" omitted
-
-rust_library(
- name = "rand",
- srcs = glob(["**/*.rs"]),
- aliases = {
- },
- crate_features = [
- "default",
- "libc",
- "std",
- ],
- crate_root = "src/lib.rs",
- data = [],
- edition = "2015",
- rustc_flags = [
- "--cap-lints=allow",
- ],
- tags = [
- "cargo-raze",
- "crate-name=rand",
- "manual",
- ],
- version = "0.4.6",
- # buildifier: leave-alone
- deps = [
- ] + selects.with_or({
- # cfg(unix)
- (
- "@rules_rust//rust/platform:x86_64-unknown-linux-gnu",
- "@rules_rust//rust/platform:arm-unknown-linux-gnueabi",
- "@rules_rust//rust/platform:armv7-unknown-linux-gnueabihf",
- ): [
- "@raze__libc__0_2_126//:libc",
- ],
- "//conditions:default": [],
- }),
-)
diff --git a/third_party/cargo/remote/BUILD.rand_core-0.3.1.bazel b/third_party/cargo/remote/BUILD.rand_core-0.3.1.bazel
deleted file mode 100644
index c2649f2..0000000
--- a/third_party/cargo/remote/BUILD.rand_core-0.3.1.bazel
+++ /dev/null
@@ -1,55 +0,0 @@
-"""
-@generated
-cargo-raze crate build file.
-
-DO NOT EDIT! Replaced on runs of cargo-raze
-"""
-
-# buildifier: disable=load
-load("@bazel_skylib//lib:selects.bzl", "selects")
-
-# buildifier: disable=load
-load(
- "@rules_rust//rust:defs.bzl",
- "rust_binary",
- "rust_library",
- "rust_proc_macro",
- "rust_test",
-)
-
-package(default_visibility = [
- # Public for visibility by "@raze__crate__version//" targets.
- #
- # Prefer access through "//third_party/cargo", which limits external
- # visibility to explicit Cargo.toml dependencies.
- "//visibility:public",
-])
-
-licenses([
- "notice", # MIT from expression "MIT OR Apache-2.0"
-])
-
-# Generated Targets
-
-rust_library(
- name = "rand_core",
- srcs = glob(["**/*.rs"]),
- crate_features = [
- ],
- crate_root = "src/lib.rs",
- data = [],
- edition = "2015",
- rustc_flags = [
- "--cap-lints=allow",
- ],
- tags = [
- "cargo-raze",
- "crate-name=rand_core",
- "manual",
- ],
- version = "0.3.1",
- # buildifier: leave-alone
- deps = [
- "@raze__rand_core__0_4_2//:rand_core",
- ],
-)
diff --git a/third_party/cargo/remote/BUILD.rand_core-0.4.2.bazel b/third_party/cargo/remote/BUILD.rand_core-0.4.2.bazel
deleted file mode 100644
index ea89b87..0000000
--- a/third_party/cargo/remote/BUILD.rand_core-0.4.2.bazel
+++ /dev/null
@@ -1,54 +0,0 @@
-"""
-@generated
-cargo-raze crate build file.
-
-DO NOT EDIT! Replaced on runs of cargo-raze
-"""
-
-# buildifier: disable=load
-load("@bazel_skylib//lib:selects.bzl", "selects")
-
-# buildifier: disable=load
-load(
- "@rules_rust//rust:defs.bzl",
- "rust_binary",
- "rust_library",
- "rust_proc_macro",
- "rust_test",
-)
-
-package(default_visibility = [
- # Public for visibility by "@raze__crate__version//" targets.
- #
- # Prefer access through "//third_party/cargo", which limits external
- # visibility to explicit Cargo.toml dependencies.
- "//visibility:public",
-])
-
-licenses([
- "notice", # MIT from expression "MIT OR Apache-2.0"
-])
-
-# Generated Targets
-
-rust_library(
- name = "rand_core",
- srcs = glob(["**/*.rs"]),
- crate_features = [
- ],
- crate_root = "src/lib.rs",
- data = [],
- edition = "2015",
- rustc_flags = [
- "--cap-lints=allow",
- ],
- tags = [
- "cargo-raze",
- "crate-name=rand_core",
- "manual",
- ],
- version = "0.4.2",
- # buildifier: leave-alone
- deps = [
- ],
-)
diff --git a/third_party/cargo/remote/BUILD.rdrand-0.4.0.bazel b/third_party/cargo/remote/BUILD.rdrand-0.4.0.bazel
deleted file mode 100644
index 768dc0b..0000000
--- a/third_party/cargo/remote/BUILD.rdrand-0.4.0.bazel
+++ /dev/null
@@ -1,63 +0,0 @@
-"""
-@generated
-cargo-raze crate build file.
-
-DO NOT EDIT! Replaced on runs of cargo-raze
-"""
-
-# buildifier: disable=load
-load("@bazel_skylib//lib:selects.bzl", "selects")
-
-# buildifier: disable=load
-load(
- "@rules_rust//rust:defs.bzl",
- "rust_binary",
- "rust_library",
- "rust_proc_macro",
- "rust_test",
-)
-
-package(default_visibility = [
- # Public for visibility by "@raze__crate__version//" targets.
- #
- # Prefer access through "//third_party/cargo", which limits external
- # visibility to explicit Cargo.toml dependencies.
- "//visibility:public",
-])
-
-licenses([
- "notice", # ISC from expression "ISC"
-])
-
-# Generated Targets
-
-# Unsupported target "rdrand" with type "bench" omitted
-
-# Unsupported target "rdseed" with type "bench" omitted
-
-# Unsupported target "std" with type "bench" omitted
-
-rust_library(
- name = "rdrand",
- srcs = glob(["**/*.rs"]),
- crate_features = [
- "default",
- "std",
- ],
- crate_root = "src/lib.rs",
- data = [],
- edition = "2015",
- rustc_flags = [
- "--cap-lints=allow",
- ],
- tags = [
- "cargo-raze",
- "crate-name=rdrand",
- "manual",
- ],
- version = "0.4.0",
- # buildifier: leave-alone
- deps = [
- "@raze__rand_core__0_3_1//:rand_core",
- ],
-)
diff --git a/third_party/cargo/remote/BUILD.regex-1.5.5.bazel b/third_party/cargo/remote/BUILD.regex-1.6.0.bazel
similarity index 96%
rename from third_party/cargo/remote/BUILD.regex-1.5.5.bazel
rename to third_party/cargo/remote/BUILD.regex-1.6.0.bazel
index 6180973..af9b051 100644
--- a/third_party/cargo/remote/BUILD.regex-1.5.5.bazel
+++ b/third_party/cargo/remote/BUILD.regex-1.6.0.bazel
@@ -76,12 +76,12 @@
"crate-name=regex",
"manual",
],
- version = "1.5.5",
+ version = "1.6.0",
# buildifier: leave-alone
deps = [
"@raze__aho_corasick__0_7_18//:aho_corasick",
"@raze__memchr__2_5_0//:memchr",
- "@raze__regex_syntax__0_6_25//:regex_syntax",
+ "@raze__regex_syntax__0_6_27//:regex_syntax",
],
)
diff --git a/third_party/cargo/remote/BUILD.regex-syntax-0.6.25.bazel b/third_party/cargo/remote/BUILD.regex-syntax-0.6.27.bazel
similarity index 97%
rename from third_party/cargo/remote/BUILD.regex-syntax-0.6.25.bazel
rename to third_party/cargo/remote/BUILD.regex-syntax-0.6.27.bazel
index 29becb6..7dba3ce 100644
--- a/third_party/cargo/remote/BUILD.regex-syntax-0.6.25.bazel
+++ b/third_party/cargo/remote/BUILD.regex-syntax-0.6.27.bazel
@@ -58,7 +58,7 @@
"crate-name=regex-syntax",
"manual",
],
- version = "0.6.25",
+ version = "0.6.27",
# buildifier: leave-alone
deps = [
],
diff --git a/third_party/cargo/remote/BUILD.rustversion-1.0.6.bazel b/third_party/cargo/remote/BUILD.rustversion-1.0.7.bazel
similarity index 97%
rename from third_party/cargo/remote/BUILD.rustversion-1.0.6.bazel
rename to third_party/cargo/remote/BUILD.rustversion-1.0.7.bazel
index b106cae..f407ef6 100644
--- a/third_party/cargo/remote/BUILD.rustversion-1.0.6.bazel
+++ b/third_party/cargo/remote/BUILD.rustversion-1.0.7.bazel
@@ -54,7 +54,7 @@
"cargo-raze",
"manual",
],
- version = "1.0.6",
+ version = "1.0.7",
visibility = ["//visibility:private"],
deps = [
],
@@ -76,7 +76,7 @@
"crate-name=rustversion",
"manual",
],
- version = "1.0.6",
+ version = "1.0.7",
# buildifier: leave-alone
deps = [
":rustversion_build_script",
diff --git a/third_party/cargo/remote/BUILD.serde-1.0.137.bazel b/third_party/cargo/remote/BUILD.serde-1.0.139.bazel
similarity index 94%
rename from third_party/cargo/remote/BUILD.serde-1.0.137.bazel
rename to third_party/cargo/remote/BUILD.serde-1.0.139.bazel
index f171109..ecc80bc 100644
--- a/third_party/cargo/remote/BUILD.serde-1.0.137.bazel
+++ b/third_party/cargo/remote/BUILD.serde-1.0.139.bazel
@@ -58,7 +58,7 @@
"cargo-raze",
"manual",
],
- version = "1.0.137",
+ version = "1.0.139",
visibility = ["//visibility:private"],
deps = [
],
@@ -77,7 +77,7 @@
data = [],
edition = "2015",
proc_macro_deps = [
- "@raze__serde_derive__1_0_137//:serde_derive",
+ "@raze__serde_derive__1_0_139//:serde_derive",
],
rustc_flags = [
"--cap-lints=allow",
@@ -87,7 +87,7 @@
"crate-name=serde",
"manual",
],
- version = "1.0.137",
+ version = "1.0.139",
# buildifier: leave-alone
deps = [
":serde_build_script",
diff --git a/third_party/cargo/remote/BUILD.serde_derive-1.0.137.bazel b/third_party/cargo/remote/BUILD.serde_derive-1.0.139.bazel
similarity index 90%
rename from third_party/cargo/remote/BUILD.serde_derive-1.0.137.bazel
rename to third_party/cargo/remote/BUILD.serde_derive-1.0.139.bazel
index e6b7a2b..29ce18e 100644
--- a/third_party/cargo/remote/BUILD.serde_derive-1.0.137.bazel
+++ b/third_party/cargo/remote/BUILD.serde_derive-1.0.139.bazel
@@ -55,7 +55,7 @@
"cargo-raze",
"manual",
],
- version = "1.0.137",
+ version = "1.0.139",
visibility = ["//visibility:private"],
deps = [
],
@@ -78,12 +78,12 @@
"crate-name=serde_derive",
"manual",
],
- version = "1.0.137",
+ version = "1.0.139",
# buildifier: leave-alone
deps = [
":serde_derive_build_script",
- "@raze__proc_macro2__1_0_39//:proc_macro2",
- "@raze__quote__1_0_18//:quote",
- "@raze__syn__1_0_95//:syn",
+ "@raze__proc_macro2__1_0_40//:proc_macro2",
+ "@raze__quote__1_0_20//:quote",
+ "@raze__syn__1_0_98//:syn",
],
)
diff --git a/third_party/cargo/remote/BUILD.serde_json-1.0.81.bazel b/third_party/cargo/remote/BUILD.serde_json-1.0.82.bazel
similarity index 95%
rename from third_party/cargo/remote/BUILD.serde_json-1.0.81.bazel
rename to third_party/cargo/remote/BUILD.serde_json-1.0.82.bazel
index 1869171..beb9b62 100644
--- a/third_party/cargo/remote/BUILD.serde_json-1.0.81.bazel
+++ b/third_party/cargo/remote/BUILD.serde_json-1.0.82.bazel
@@ -56,7 +56,7 @@
"cargo-raze",
"manual",
],
- version = "1.0.81",
+ version = "1.0.82",
visibility = ["//visibility:private"],
deps = [
],
@@ -80,13 +80,13 @@
"crate-name=serde_json",
"manual",
],
- version = "1.0.81",
+ version = "1.0.82",
# buildifier: leave-alone
deps = [
":serde_json_build_script",
"@raze__itoa__1_0_2//:itoa",
"@raze__ryu__1_0_10//:ryu",
- "@raze__serde__1_0_137//:serde",
+ "@raze__serde__1_0_139//:serde",
],
)
diff --git a/third_party/cargo/remote/BUILD.smallvec-1.8.0.bazel b/third_party/cargo/remote/BUILD.smallvec-1.9.0.bazel
similarity index 97%
rename from third_party/cargo/remote/BUILD.smallvec-1.8.0.bazel
rename to third_party/cargo/remote/BUILD.smallvec-1.9.0.bazel
index 06f9025..d6861e5 100644
--- a/third_party/cargo/remote/BUILD.smallvec-1.8.0.bazel
+++ b/third_party/cargo/remote/BUILD.smallvec-1.9.0.bazel
@@ -49,7 +49,7 @@
"crate-name=smallvec",
"manual",
],
- version = "1.8.0",
+ version = "1.9.0",
# buildifier: leave-alone
deps = [
],
diff --git a/third_party/cargo/remote/BUILD.strum_macros-0.24.0.bazel b/third_party/cargo/remote/BUILD.strum_macros-0.24.2.bazel
similarity index 84%
rename from third_party/cargo/remote/BUILD.strum_macros-0.24.0.bazel
rename to third_party/cargo/remote/BUILD.strum_macros-0.24.2.bazel
index 6d97605..13aceb8 100644
--- a/third_party/cargo/remote/BUILD.strum_macros-0.24.0.bazel
+++ b/third_party/cargo/remote/BUILD.strum_macros-0.24.2.bazel
@@ -40,7 +40,7 @@
data = [],
edition = "2018",
proc_macro_deps = [
- "@raze__rustversion__1_0_6//:rustversion",
+ "@raze__rustversion__1_0_7//:rustversion",
],
rustc_flags = [
"--cap-lints=allow",
@@ -50,12 +50,12 @@
"crate-name=strum_macros",
"manual",
],
- version = "0.24.0",
+ version = "0.24.2",
# buildifier: leave-alone
deps = [
"@raze__heck__0_4_0//:heck",
- "@raze__proc_macro2__1_0_39//:proc_macro2",
- "@raze__quote__1_0_18//:quote",
- "@raze__syn__1_0_95//:syn",
+ "@raze__proc_macro2__1_0_40//:proc_macro2",
+ "@raze__quote__1_0_20//:quote",
+ "@raze__syn__1_0_98//:syn",
],
)
diff --git a/third_party/cargo/remote/BUILD.syn-1.0.95.bazel b/third_party/cargo/remote/BUILD.syn-1.0.98.bazel
similarity index 94%
rename from third_party/cargo/remote/BUILD.syn-1.0.95.bazel
rename to third_party/cargo/remote/BUILD.syn-1.0.98.bazel
index 2e805e7..4f04f19 100644
--- a/third_party/cargo/remote/BUILD.syn-1.0.95.bazel
+++ b/third_party/cargo/remote/BUILD.syn-1.0.98.bazel
@@ -63,7 +63,7 @@
"cargo-raze",
"manual",
],
- version = "1.0.95",
+ version = "1.0.98",
visibility = ["//visibility:private"],
deps = [
],
@@ -98,13 +98,13 @@
"crate-name=syn",
"manual",
],
- version = "1.0.95",
+ version = "1.0.98",
# buildifier: leave-alone
deps = [
":syn_build_script",
- "@raze__proc_macro2__1_0_39//:proc_macro2",
- "@raze__quote__1_0_18//:quote",
- "@raze__unicode_ident__1_0_0//:unicode_ident",
+ "@raze__proc_macro2__1_0_40//:proc_macro2",
+ "@raze__quote__1_0_20//:quote",
+ "@raze__unicode_ident__1_0_1//:unicode_ident",
],
)
diff --git a/third_party/cargo/remote/BUILD.tempdir-0.3.7.bazel b/third_party/cargo/remote/BUILD.tempdir-0.3.7.bazel
deleted file mode 100644
index a32d1f4..0000000
--- a/third_party/cargo/remote/BUILD.tempdir-0.3.7.bazel
+++ /dev/null
@@ -1,58 +0,0 @@
-"""
-@generated
-cargo-raze crate build file.
-
-DO NOT EDIT! Replaced on runs of cargo-raze
-"""
-
-# buildifier: disable=load
-load("@bazel_skylib//lib:selects.bzl", "selects")
-
-# buildifier: disable=load
-load(
- "@rules_rust//rust:defs.bzl",
- "rust_binary",
- "rust_library",
- "rust_proc_macro",
- "rust_test",
-)
-
-package(default_visibility = [
- # Public for visibility by "@raze__crate__version//" targets.
- #
- # Prefer access through "//third_party/cargo", which limits external
- # visibility to explicit Cargo.toml dependencies.
- "//visibility:public",
-])
-
-licenses([
- "notice", # MIT from expression "MIT OR Apache-2.0"
-])
-
-# Generated Targets
-
-rust_library(
- name = "tempdir",
- srcs = glob(["**/*.rs"]),
- crate_features = [
- ],
- crate_root = "src/lib.rs",
- data = [],
- edition = "2015",
- rustc_flags = [
- "--cap-lints=allow",
- ],
- tags = [
- "cargo-raze",
- "crate-name=tempdir",
- "manual",
- ],
- version = "0.3.7",
- # buildifier: leave-alone
- deps = [
- "@raze__rand__0_4_6//:rand",
- "@raze__remove_dir_all__0_5_3//:remove_dir_all",
- ],
-)
-
-# Unsupported target "smoke" with type "test" omitted
diff --git a/third_party/cargo/remote/BUILD.test-log-0.2.10.bazel b/third_party/cargo/remote/BUILD.test-log-0.2.10.bazel
index c1c1ac5..f67c730 100644
--- a/third_party/cargo/remote/BUILD.test-log-0.2.10.bazel
+++ b/third_party/cargo/remote/BUILD.test-log-0.2.10.bazel
@@ -52,8 +52,8 @@
version = "0.2.10",
# buildifier: leave-alone
deps = [
- "@raze__proc_macro2__1_0_39//:proc_macro2",
- "@raze__quote__1_0_18//:quote",
- "@raze__syn__1_0_95//:syn",
+ "@raze__proc_macro2__1_0_40//:proc_macro2",
+ "@raze__quote__1_0_20//:quote",
+ "@raze__syn__1_0_98//:syn",
],
)
diff --git a/third_party/cargo/remote/BUILD.thiserror-impl-1.0.31.bazel b/third_party/cargo/remote/BUILD.thiserror-impl-1.0.31.bazel
index c357661..b2f0429 100644
--- a/third_party/cargo/remote/BUILD.thiserror-impl-1.0.31.bazel
+++ b/third_party/cargo/remote/BUILD.thiserror-impl-1.0.31.bazel
@@ -50,8 +50,8 @@
version = "1.0.31",
# buildifier: leave-alone
deps = [
- "@raze__proc_macro2__1_0_39//:proc_macro2",
- "@raze__quote__1_0_18//:quote",
- "@raze__syn__1_0_95//:syn",
+ "@raze__proc_macro2__1_0_40//:proc_macro2",
+ "@raze__quote__1_0_20//:quote",
+ "@raze__syn__1_0_98//:syn",
],
)
diff --git a/third_party/cargo/remote/BUILD.toml-0.5.9.bazel b/third_party/cargo/remote/BUILD.toml-0.5.9.bazel
index c78c445..ce029fa 100644
--- a/third_party/cargo/remote/BUILD.toml-0.5.9.bazel
+++ b/third_party/cargo/remote/BUILD.toml-0.5.9.bazel
@@ -57,7 +57,7 @@
version = "0.5.9",
# buildifier: leave-alone
deps = [
- "@raze__serde__1_0_137//:serde",
+ "@raze__serde__1_0_139//:serde",
],
)
diff --git a/third_party/cargo/remote/BUILD.trybuild-1.0.61.bazel b/third_party/cargo/remote/BUILD.trybuild-1.0.63.bazel
similarity index 88%
rename from third_party/cargo/remote/BUILD.trybuild-1.0.61.bazel
rename to third_party/cargo/remote/BUILD.trybuild-1.0.63.bazel
index 63092b8..9a9d516 100644
--- a/third_party/cargo/remote/BUILD.trybuild-1.0.61.bazel
+++ b/third_party/cargo/remote/BUILD.trybuild-1.0.63.bazel
@@ -54,7 +54,7 @@
"cargo-raze",
"manual",
],
- version = "1.0.61",
+ version = "1.0.63",
visibility = ["//visibility:private"],
deps = [
],
@@ -69,7 +69,7 @@
data = [],
edition = "2018",
proc_macro_deps = [
- "@raze__serde_derive__1_0_137//:serde_derive",
+ "@raze__serde_derive__1_0_139//:serde_derive",
],
rustc_flags = [
"--cap-lints=allow",
@@ -79,14 +79,14 @@
"crate-name=trybuild",
"manual",
],
- version = "1.0.61",
+ version = "1.0.63",
# buildifier: leave-alone
deps = [
":trybuild_build_script",
"@raze__glob__0_3_0//:glob",
- "@raze__once_cell__1_10_0//:once_cell",
- "@raze__serde__1_0_137//:serde",
- "@raze__serde_json__1_0_81//:serde_json",
+ "@raze__once_cell__1_13_0//:once_cell",
+ "@raze__serde__1_0_139//:serde",
+ "@raze__serde_json__1_0_82//:serde_json",
"@raze__termcolor__1_1_3//:termcolor",
"@raze__toml__0_5_9//:toml",
],
diff --git a/third_party/cargo/remote/BUILD.unicode-ident-1.0.0.bazel b/third_party/cargo/remote/BUILD.unicode-ident-1.0.1.bazel
similarity index 97%
rename from third_party/cargo/remote/BUILD.unicode-ident-1.0.0.bazel
rename to third_party/cargo/remote/BUILD.unicode-ident-1.0.1.bazel
index c3ec41f..c3676d5 100644
--- a/third_party/cargo/remote/BUILD.unicode-ident-1.0.0.bazel
+++ b/third_party/cargo/remote/BUILD.unicode-ident-1.0.1.bazel
@@ -49,7 +49,7 @@
"crate-name=unicode-ident",
"manual",
],
- version = "1.0.0",
+ version = "1.0.1",
# buildifier: leave-alone
deps = [
],
diff --git a/third_party/cargo/remote/BUILD.unicode-linebreak-0.1.2.bazel b/third_party/cargo/remote/BUILD.unicode-linebreak-0.1.2.bazel
index c1b4bf8..f2e3045 100644
--- a/third_party/cargo/remote/BUILD.unicode-linebreak-0.1.2.bazel
+++ b/third_party/cargo/remote/BUILD.unicode-linebreak-0.1.2.bazel
@@ -57,7 +57,7 @@
version = "0.1.2",
visibility = ["//visibility:private"],
deps = [
- "@raze__regex__1_5_5//:regex",
+ "@raze__regex__1_6_0//:regex",
],
)
diff --git a/third_party/cargo/remote/BUILD.uuid-1.0.0.bazel b/third_party/cargo/remote/BUILD.uuid-1.1.2.bazel
similarity index 98%
rename from third_party/cargo/remote/BUILD.uuid-1.0.0.bazel
rename to third_party/cargo/remote/BUILD.uuid-1.1.2.bazel
index 1121fea..7d633bb 100644
--- a/third_party/cargo/remote/BUILD.uuid-1.0.0.bazel
+++ b/third_party/cargo/remote/BUILD.uuid-1.1.2.bazel
@@ -61,7 +61,7 @@
"crate-name=uuid",
"manual",
],
- version = "1.0.0",
+ version = "1.1.2",
# buildifier: leave-alone
deps = [
],
diff --git a/third_party/cargo/remote/BUILD.which-4.2.5.bazel b/third_party/cargo/remote/BUILD.which-4.2.5.bazel
index f09777f..7dd0db4 100644
--- a/third_party/cargo/remote/BUILD.which-4.2.5.bazel
+++ b/third_party/cargo/remote/BUILD.which-4.2.5.bazel
@@ -50,7 +50,7 @@
version = "4.2.5",
# buildifier: leave-alone
deps = [
- "@raze__either__1_6_1//:either",
+ "@raze__either__1_7_0//:either",
"@raze__libc__0_2_126//:libc",
],
)
diff --git a/third_party/cargo/remote/BUILD.winapi-0.3.9.bazel b/third_party/cargo/remote/BUILD.winapi-0.3.9.bazel
index fef3c89..b162b5d 100644
--- a/third_party/cargo/remote/BUILD.winapi-0.3.9.bazel
+++ b/third_party/cargo/remote/BUILD.winapi-0.3.9.bazel
@@ -50,9 +50,7 @@
"libloaderapi",
"minwinbase",
"minwindef",
- "ntsecapi",
"processenv",
- "profileapi",
"std",
"winbase",
"wincon",
@@ -86,9 +84,7 @@
"libloaderapi",
"minwinbase",
"minwindef",
- "ntsecapi",
"processenv",
- "profileapi",
"std",
"winbase",
"wincon",