blob: 6c9a732bebdf8eade7b519124728f277b01d2479 [file] [log] [blame]
Tyler Chatowb3850c12020-02-26 20:55:48 -08001#define GST_USE_UNSTABLE_API
2#define GST_DISABLE_REGISTRY 1
3
4#include <glib-unix.h>
5#include <glib.h>
6#include <gst/app/app.h>
7#include <gst/gst.h>
8#include <gst/sdp/sdp.h>
9#include <gst/webrtc/icetransport.h>
10#include <gst/webrtc/webrtc.h>
11#include <sys/stat.h>
12#include <sys/types.h>
13
14#include <map>
15#include <thread>
16
17#include "absl/strings/str_format.h"
Philipp Schrader790cb542023-07-05 21:06:52 -070018#include "flatbuffers/flatbuffers.h"
19#include "gflags/gflags.h"
20#include "glog/logging.h"
21
Tyler Chatowb3850c12020-02-26 20:55:48 -080022#include "aos/events/glib_main_loop.h"
23#include "aos/events/shm_event_loop.h"
24#include "aos/init.h"
25#include "aos/network/web_proxy_generated.h"
26#include "aos/seasocks/seasocks_logger.h"
Tyler Chatowb3850c12020-02-26 20:55:48 -080027#include "frc971/vision/vision_generated.h"
Tyler Chatowb3850c12020-02-26 20:55:48 -080028#include "internal/Embedded.h"
29#include "seasocks/Server.h"
30#include "seasocks/StringUtil.h"
31#include "seasocks/WebSocket.h"
32
milind-ub0773e92023-02-05 15:57:43 -080033DEFINE_string(config, "aos_config.json",
Tyler Chatowb3850c12020-02-26 20:55:48 -080034 "Name of the config file to replay using.");
Tyler Chatow39b6a322022-04-15 00:03:58 -070035DEFINE_string(device, "/dev/video0",
36 "Camera fd. Ignored if reading from channel");
Tyler Chatowb3850c12020-02-26 20:55:48 -080037DEFINE_string(data_dir, "image_streamer_www",
38 "Directory to serve data files from");
Austin Schuhf5dbe2c2024-04-06 16:10:24 -070039DEFINE_bool(publish_images, true,
40 "If true, publish images read from v4l2 to /camera.");
Tyler Chatowb3850c12020-02-26 20:55:48 -080041DEFINE_int32(width, 400, "Image width");
42DEFINE_int32(height, 300, "Image height");
43DEFINE_int32(framerate, 25, "Framerate (FPS)");
44DEFINE_int32(brightness, 50, "Camera brightness");
45DEFINE_int32(exposure, 300, "Manual exposure");
46DEFINE_int32(bitrate, 500000, "H264 encode bitrate");
milind-ub0773e92023-02-05 15:57:43 -080047DEFINE_int32(streaming_port, 1180, "Port to stream images on with seasocks");
Tyler Chatowb3850c12020-02-26 20:55:48 -080048DEFINE_int32(min_port, 5800, "Min rtp port");
49DEFINE_int32(max_port, 5810, "Max rtp port");
Tyler Chatow39b6a322022-04-15 00:03:58 -070050DEFINE_string(listen_on, "",
51 "Channel on which to receive frames from. Used in place of "
52 "internal V4L2 reader. Note: width and height MUST match the "
53 "expected size of channel images.");
Tyler Chatowb3850c12020-02-26 20:55:48 -080054
55class Connection;
56
57using aos::web_proxy::Payload;
58using aos::web_proxy::SdpType;
59using aos::web_proxy::WebSocketIce;
60using aos::web_proxy::WebSocketMessage;
61using aos::web_proxy::WebSocketSdp;
62
Tyler Chatow39b6a322022-04-15 00:03:58 -070063class GstSampleSource {
64 public:
65 GstSampleSource() = default;
66
67 virtual ~GstSampleSource() = default;
68
69 private:
70 GstSampleSource(const GstSampleSource &) = delete;
71};
72
73class V4L2Source : public GstSampleSource {
74 public:
75 V4L2Source(std::function<void(GstSample *)> callback)
76 : callback_(std::move(callback)) {
77 GError *error = NULL;
78
79 // Create pipeline to read from camera, pack into rtp stream, and dump
80 // stream to callback. v4l2 device should already be configured with correct
81 // bitrate from v4l2-ctl. do-timestamp marks the time the frame was taken to
82 // track when it should be dropped under latency.
83
84 // With the Pi's hardware encoder, we can encode and package the stream once
85 // and the clients will jump in at any point unsynchronized. With the stream
86 // from x264enc this doesn't seem to work. For now, just reencode for each
87 // client since we don't expect more than 1 or 2.
88
Austin Schuhf5dbe2c2024-04-06 16:10:24 -070089 std::string exposure;
90 if (FLAGS_exposure > 0) {
91 exposure = absl::StrFormat(",auto_exposure=1,exposure_time_absolute=%d",
92 FLAGS_exposure);
93 }
94
Tyler Chatow39b6a322022-04-15 00:03:58 -070095 pipeline_ = gst_parse_launch(
96 absl::StrFormat("v4l2src device=%s do-timestamp=true "
Austin Schuhf5dbe2c2024-04-06 16:10:24 -070097 "extra-controls=\"c,brightness=%d%s\" ! "
Tyler Chatow39b6a322022-04-15 00:03:58 -070098 "video/x-raw,width=%d,height=%d,framerate=%d/"
99 "1,format=YUY2 ! appsink "
100 "name=appsink "
101 "emit-signals=true sync=false async=false "
102 "caps=video/x-raw,format=YUY2",
Austin Schuhf5dbe2c2024-04-06 16:10:24 -0700103 FLAGS_device, FLAGS_brightness, exposure, FLAGS_width,
104 FLAGS_height, FLAGS_framerate)
Tyler Chatow39b6a322022-04-15 00:03:58 -0700105 .c_str(),
106 &error);
107
108 if (error != NULL) {
109 LOG(FATAL) << "Could not create v4l2 pipeline: " << error->message;
110 }
111
112 appsink_ = gst_bin_get_by_name(GST_BIN(pipeline_), "appsink");
113 if (appsink_ == NULL) {
114 LOG(FATAL) << "Could not get appsink";
115 }
116
117 g_signal_connect(appsink_, "new-sample",
118 G_CALLBACK(V4L2Source::OnSampleCallback),
119 static_cast<gpointer>(this));
120
121 gst_element_set_state(pipeline_, GST_STATE_PLAYING);
122 }
123
124 ~V4L2Source() {
125 if (pipeline_ != NULL) {
126 gst_element_set_state(GST_ELEMENT(pipeline_), GST_STATE_NULL);
127 gst_object_unref(GST_OBJECT(pipeline_));
128 gst_object_unref(GST_OBJECT(appsink_));
129 }
130 }
131
132 private:
133 static GstFlowReturn OnSampleCallback(GstElement *, gpointer user_data) {
134 static_cast<V4L2Source *>(user_data)->OnSample();
135 return GST_FLOW_OK;
136 }
137
138 void OnSample() {
139 GstSample *sample = gst_app_sink_pull_sample(GST_APP_SINK(appsink_));
140 if (sample == NULL) {
141 LOG(WARNING) << "Received null sample";
142 return;
143 }
144 callback_(sample);
145 gst_sample_unref(sample);
146 }
147
148 GstElement *pipeline_;
149 GstElement *appsink_;
150
151 std::function<void(GstSample *)> callback_;
152};
153
154class ChannelSource : public GstSampleSource {
155 public:
156 ChannelSource(aos::ShmEventLoop *event_loop,
157 std::function<void(GstSample *)> callback)
158 : callback_(std::move(callback)) {
159 event_loop->MakeWatcher(
160 FLAGS_listen_on,
161 [this](const frc971::vision::CameraImage &image) { OnImage(image); });
162 }
163
164 private:
165 void OnImage(const frc971::vision::CameraImage &image) {
166 if (!image.has_rows() || !image.has_cols() || !image.has_data()) {
167 VLOG(2) << "Skipping CameraImage with no data";
168 return;
169 }
170 CHECK_EQ(image.rows(), FLAGS_height);
171 CHECK_EQ(image.cols(), FLAGS_width);
172
173 GBytes *bytes = g_bytes_new(image.data()->data(), image.data()->size());
174 GstBuffer *buffer = gst_buffer_new_wrapped_bytes(bytes);
175
176 GST_BUFFER_PTS(buffer) = image.monotonic_timestamp_ns();
177
178 GstCaps *caps = CHECK_NOTNULL(gst_caps_new_simple(
179 "video/x-raw", "width", G_TYPE_INT, image.cols(), "height", G_TYPE_INT,
180 image.rows(), "format", G_TYPE_STRING, "YUY2", nullptr));
181
182 GstSample *sample = gst_sample_new(buffer, caps, nullptr, nullptr);
183
184 callback_(sample);
185
186 gst_sample_unref(sample);
187 gst_caps_unref(caps);
188 gst_buffer_unref(buffer);
189 g_bytes_unref(bytes);
190 }
191
192 std::function<void(GstSample *)> callback_;
193};
194
Tyler Chatowb3850c12020-02-26 20:55:48 -0800195// Basic class that handles receiving new websocket connections. Creates a new
196// Connection to manage the rest of the negotiation and data passing. When the
197// websocket closes, it deletes the Connection.
198class WebsocketHandler : public ::seasocks::WebSocket::Handler {
199 public:
200 WebsocketHandler(aos::ShmEventLoop *event_loop, ::seasocks::Server *server);
Tyler Chatow39b6a322022-04-15 00:03:58 -0700201 ~WebsocketHandler() override = default;
Tyler Chatowb3850c12020-02-26 20:55:48 -0800202
203 void onConnect(::seasocks::WebSocket *sock) override;
204 void onData(::seasocks::WebSocket *sock, const uint8_t *data,
205 size_t size) override;
206 void onDisconnect(::seasocks::WebSocket *sock) override;
207
208 private:
Tyler Chatow39b6a322022-04-15 00:03:58 -0700209 void OnSample(GstSample *sample);
Tyler Chatowb3850c12020-02-26 20:55:48 -0800210
James Kuszmaul345b0812024-04-18 13:07:45 -0500211 aos::ShmEventLoop *event_loop_;
Tyler Chatowb3850c12020-02-26 20:55:48 -0800212 std::map<::seasocks::WebSocket *, std::unique_ptr<Connection>> connections_;
213 ::seasocks::Server *server_;
Tyler Chatow39b6a322022-04-15 00:03:58 -0700214 std::unique_ptr<GstSampleSource> source_;
James Kuszmaul345b0812024-04-18 13:07:45 -0500215 aos::TimerHandler *manual_restart_handle_;
Tyler Chatowb3850c12020-02-26 20:55:48 -0800216
217 aos::Sender<frc971::vision::CameraImage> sender_;
218};
219
220// Seasocks requires that sends happen on the correct thread. This class takes a
221// detached buffer to send on a specific websocket connection and sends it when
222// seasocks is ready.
223class UpdateData : public ::seasocks::Server::Runnable {
224 public:
225 UpdateData(::seasocks::WebSocket *websocket,
226 flatbuffers::DetachedBuffer &&buffer)
227 : sock_(websocket), buffer_(std::move(buffer)) {}
228 ~UpdateData() override = default;
229 UpdateData(const UpdateData &) = delete;
230 UpdateData &operator=(const UpdateData &) = delete;
231
232 void run() override { sock_->send(buffer_.data(), buffer_.size()); }
233
234 private:
235 ::seasocks::WebSocket *sock_;
236 const flatbuffers::DetachedBuffer buffer_;
237};
238
239class Connection {
240 public:
241 Connection(::seasocks::WebSocket *sock, ::seasocks::Server *server);
242
243 ~Connection();
244
245 void HandleWebSocketData(const uint8_t *data, size_t size);
246
247 void OnSample(GstSample *sample);
248
249 private:
250 static void OnOfferCreatedCallback(GstPromise *promise, gpointer user_data) {
251 static_cast<Connection *>(user_data)->OnOfferCreated(promise);
252 }
253
254 static void OnNegotiationNeededCallback(GstElement *, gpointer user_data) {
255 static_cast<Connection *>(user_data)->OnNegotiationNeeded();
256 }
257
258 static void OnIceCandidateCallback(GstElement *, guint mline_index,
259 gchar *candidate, gpointer user_data) {
260 static_cast<Connection *>(user_data)->OnIceCandidate(mline_index,
261 candidate);
262 }
263
264 void OnOfferCreated(GstPromise *promise);
265 void OnNegotiationNeeded();
266 void OnIceCandidate(guint mline_index, gchar *candidate);
267
268 ::seasocks::WebSocket *sock_;
269 ::seasocks::Server *server_;
270
271 GstElement *pipeline_;
272 GstElement *webrtcbin_;
273 GstElement *appsrc_;
274
275 bool first_sample_ = true;
276};
277
278WebsocketHandler::WebsocketHandler(aos::ShmEventLoop *event_loop,
279 ::seasocks::Server *server)
James Kuszmaul345b0812024-04-18 13:07:45 -0500280 : event_loop_(event_loop),
281 server_(server),
282 manual_restart_handle_(
283 event_loop_->AddTimer([this]() { event_loop_->Exit(); })) {
Tyler Chatow39b6a322022-04-15 00:03:58 -0700284 if (FLAGS_listen_on.empty()) {
Austin Schuhf5dbe2c2024-04-06 16:10:24 -0700285 if (FLAGS_publish_images) {
286 sender_ = event_loop->MakeSender<frc971::vision::CameraImage>("/camera");
287 }
Tyler Chatow39b6a322022-04-15 00:03:58 -0700288 source_ =
289 std::make_unique<V4L2Source>([this](auto sample) { OnSample(sample); });
290 } else {
291 source_ = std::make_unique<ChannelSource>(
292 event_loop, [this](auto sample) { OnSample(sample); });
Tyler Chatowb3850c12020-02-26 20:55:48 -0800293 }
James Kuszmaul345b0812024-04-18 13:07:45 -0500294 event_loop_->OnRun([this]() {
295 manual_restart_handle_->Schedule(event_loop_->monotonic_now() +
296 std::chrono::seconds(10));
297 });
Tyler Chatowb3850c12020-02-26 20:55:48 -0800298}
299
300void WebsocketHandler::onConnect(::seasocks::WebSocket *sock) {
301 std::unique_ptr<Connection> conn =
302 std::make_unique<Connection>(sock, server_);
303 connections_.insert({sock, std::move(conn)});
304}
305
306void WebsocketHandler::onData(::seasocks::WebSocket *sock, const uint8_t *data,
307 size_t size) {
308 connections_[sock]->HandleWebSocketData(data, size);
309}
310
Tyler Chatow39b6a322022-04-15 00:03:58 -0700311void WebsocketHandler::OnSample(GstSample *sample) {
Tyler Chatowb3850c12020-02-26 20:55:48 -0800312 for (auto iter = connections_.begin(); iter != connections_.end(); ++iter) {
313 iter->second->OnSample(sample);
314 }
315
Tyler Chatow39b6a322022-04-15 00:03:58 -0700316 if (sender_.valid()) {
Tyler Chatowb3850c12020-02-26 20:55:48 -0800317 const GstCaps *caps = CHECK_NOTNULL(gst_sample_get_caps(sample));
318 CHECK_GT(gst_caps_get_size(caps), 0U);
319 const GstStructure *str = gst_caps_get_structure(caps, 0);
320
321 gint width;
322 gint height;
323
324 CHECK(gst_structure_get_int(str, "width", &width));
325 CHECK(gst_structure_get_int(str, "height", &height));
326
327 GstBuffer *buffer = CHECK_NOTNULL(gst_sample_get_buffer(sample));
328
329 const gsize size = gst_buffer_get_size(buffer);
330
331 auto builder = sender_.MakeBuilder();
332
333 uint8_t *image_data;
334 auto image_offset =
335 builder.fbb()->CreateUninitializedVector(size, &image_data);
336 gst_buffer_extract(buffer, 0, image_data, size);
337
338 auto image_builder = builder.MakeBuilder<frc971::vision::CameraImage>();
339 image_builder.add_rows(height);
340 image_builder.add_cols(width);
341 image_builder.add_data(image_offset);
342
343 builder.CheckOk(builder.Send(image_builder.Finish()));
344 }
James Kuszmaul345b0812024-04-18 13:07:45 -0500345 manual_restart_handle_->Schedule(event_loop_->monotonic_now() +
346 std::chrono::seconds(10));
Tyler Chatowb3850c12020-02-26 20:55:48 -0800347}
348
349void WebsocketHandler::onDisconnect(::seasocks::WebSocket *sock) {
350 connections_.erase(sock);
351}
352
353Connection::Connection(::seasocks::WebSocket *sock, ::seasocks::Server *server)
354 : sock_(sock), server_(server) {
355 GError *error = NULL;
356
357 // Build pipeline to read data from application into pipeline, place in
358 // webrtcbin group, and stream.
359
360 pipeline_ = gst_parse_launch(
361 // aggregate-mode should be zero-latency but this drops the stream on
362 // bitrate spikes for some reason - probably the weak CPU on the pi.
363 absl::StrFormat(
364 "webrtcbin name=webrtcbin appsrc "
365 "name=appsrc block=false "
366 "is-live=true "
367 "format=3 max-buffers=0 leaky-type=2 "
368 "caps=video/x-raw,width=%d,height=%d,format=YUY2 ! videoconvert ! "
369 "x264enc bitrate=%d speed-preset=ultrafast "
370 "tune=zerolatency key-int-max=15 sliced-threads=true ! "
371 "video/x-h264,profile=constrained-baseline ! h264parse ! "
372 "rtph264pay "
373 "config-interval=-1 name=payloader aggregate-mode=none ! "
374 "application/"
375 "x-rtp,media=video,encoding-name=H264,payload=96,clock-rate=90000 !"
376 "webrtcbin. ",
377 FLAGS_width, FLAGS_height, FLAGS_bitrate / 1000)
378 .c_str(),
379 &error);
380
381 if (error != NULL) {
382 LOG(FATAL) << "Could not create WebRTC pipeline: " << error->message;
383 }
384
385 webrtcbin_ = gst_bin_get_by_name(GST_BIN(pipeline_), "webrtcbin");
386 if (webrtcbin_ == NULL) {
387 LOG(FATAL) << "Could not initialize webrtcbin";
388 }
389
390 appsrc_ = gst_bin_get_by_name(GST_BIN(pipeline_), "appsrc");
391 if (appsrc_ == NULL) {
392 LOG(FATAL) << "Could not initialize appsrc";
393 }
394
395 {
396 GArray *transceivers;
397 g_signal_emit_by_name(webrtcbin_, "get-transceivers", &transceivers);
398 if (transceivers == NULL || transceivers->len <= 0) {
399 LOG(FATAL) << "Could not initialize transceivers";
400 }
401
402 GstWebRTCRTPTransceiver *trans =
403 g_array_index(transceivers, GstWebRTCRTPTransceiver *, 0);
404 g_object_set(trans, "direction",
405 GST_WEBRTC_RTP_TRANSCEIVER_DIRECTION_SENDONLY, nullptr);
406
407 g_array_unref(transceivers);
408 }
409
410 {
411 GstObject *ice = nullptr;
412 g_object_get(G_OBJECT(webrtcbin_), "ice-agent", &ice, nullptr);
413 CHECK_NOTNULL(ice);
414
415 g_object_set(ice, "min-rtp-port", FLAGS_min_port, "max-rtp-port",
416 FLAGS_max_port, nullptr);
417
418 // We don't need upnp on a local network.
419 {
420 GstObject *nice = nullptr;
421 g_object_get(ice, "agent", &nice, nullptr);
422 CHECK_NOTNULL(nice);
423
424 g_object_set(nice, "upnp", false, nullptr);
425 g_object_unref(nice);
426 }
427
428 gst_object_unref(ice);
429 }
430
431 g_signal_connect(webrtcbin_, "on-negotiation-needed",
432 G_CALLBACK(Connection::OnNegotiationNeededCallback),
433 static_cast<gpointer>(this));
434
435 g_signal_connect(webrtcbin_, "on-ice-candidate",
436 G_CALLBACK(Connection::OnIceCandidateCallback),
437 static_cast<gpointer>(this));
438
439 gst_element_set_state(pipeline_, GST_STATE_READY);
440 gst_element_set_state(pipeline_, GST_STATE_PLAYING);
441}
442
443Connection::~Connection() {
444 if (pipeline_ != NULL) {
445 gst_element_set_state(pipeline_, GST_STATE_NULL);
446
447 gst_object_unref(GST_OBJECT(webrtcbin_));
448 gst_object_unref(GST_OBJECT(pipeline_));
449 gst_object_unref(GST_OBJECT(appsrc_));
450 }
451}
452
453void Connection::OnSample(GstSample *sample) {
454 GstFlowReturn response =
455 gst_app_src_push_sample(GST_APP_SRC(appsrc_), sample);
456 if (response != GST_FLOW_OK) {
457 LOG(WARNING) << "Sample pushed, did not receive OK";
458 }
459
460 // Since the stream is already running (the camera turns on with
461 // image_streamer) we need to tell the new appsrc where
462 // we are starting in the stream so it can catch up immediately.
463 if (first_sample_) {
464 GstPad *src = gst_element_get_static_pad(appsrc_, "src");
465 if (src == NULL) {
466 return;
467 }
468
469 GstSegment *segment = gst_sample_get_segment(sample);
470 GstBuffer *buffer = gst_sample_get_buffer(sample);
471
472 guint64 offset = gst_segment_to_running_time(segment, GST_FORMAT_TIME,
473 GST_BUFFER_PTS(buffer));
474 LOG(INFO) << "Fixing offset " << offset;
475 gst_pad_set_offset(src, -offset);
476
477 gst_object_unref(GST_OBJECT(src));
478 first_sample_ = false;
479 }
480}
481
482void Connection::OnOfferCreated(GstPromise *promise) {
483 LOG(INFO) << "OnOfferCreated";
484
485 GstWebRTCSessionDescription *offer = NULL;
486 gst_structure_get(gst_promise_get_reply(promise), "offer",
487 GST_TYPE_WEBRTC_SESSION_DESCRIPTION, &offer, NULL);
488 gst_promise_unref(promise);
489
490 {
491 std::unique_ptr<GstPromise, decltype(&gst_promise_unref)>
492 local_desc_promise(gst_promise_new(), &gst_promise_unref);
493 g_signal_emit_by_name(webrtcbin_, "set-local-description", offer,
494 local_desc_promise.get());
495 gst_promise_interrupt(local_desc_promise.get());
496 }
497
498 GstSDPMessage *sdp_msg = offer->sdp;
499 std::string sdp_str(gst_sdp_message_as_text(sdp_msg));
500
501 LOG(INFO) << "Negotiation offer created:\n" << sdp_str;
502
503 flatbuffers::FlatBufferBuilder fbb(512);
504 flatbuffers::Offset<WebSocketSdp> sdp_fb =
505 CreateWebSocketSdpDirect(fbb, SdpType::OFFER, sdp_str.c_str());
506 flatbuffers::Offset<WebSocketMessage> answer_message =
507 CreateWebSocketMessage(fbb, Payload::WebSocketSdp, sdp_fb.Union());
508 fbb.Finish(answer_message);
509
510 server_->execute(std::make_shared<UpdateData>(sock_, fbb.Release()));
511}
512
513void Connection::OnNegotiationNeeded() {
514 LOG(INFO) << "OnNegotiationNeeded";
515
516 GstPromise *promise;
517 promise = gst_promise_new_with_change_func(Connection::OnOfferCreatedCallback,
518 static_cast<gpointer>(this), NULL);
519 g_signal_emit_by_name(G_OBJECT(webrtcbin_), "create-offer", NULL, promise);
520}
521
522void Connection::OnIceCandidate(guint mline_index, gchar *candidate) {
523 LOG(INFO) << "OnIceCandidate";
524
525 flatbuffers::FlatBufferBuilder fbb(512);
526
Austin Schuhf5dbe2c2024-04-06 16:10:24 -0700527 flatbuffers::Offset<flatbuffers::String> sdp_mid_offset =
528 fbb.CreateString("video0");
529 flatbuffers::Offset<flatbuffers::String> candidate_offset =
530 fbb.CreateString(static_cast<char *>(candidate));
531
Tyler Chatowb3850c12020-02-26 20:55:48 -0800532 auto ice_fb_builder = WebSocketIce::Builder(fbb);
533 ice_fb_builder.add_sdp_m_line_index(mline_index);
Austin Schuhf5dbe2c2024-04-06 16:10:24 -0700534 ice_fb_builder.add_sdp_mid(sdp_mid_offset);
535 ice_fb_builder.add_candidate(candidate_offset);
Tyler Chatowb3850c12020-02-26 20:55:48 -0800536 flatbuffers::Offset<WebSocketIce> ice_fb = ice_fb_builder.Finish();
537
538 flatbuffers::Offset<WebSocketMessage> ice_message =
539 CreateWebSocketMessage(fbb, Payload::WebSocketIce, ice_fb.Union());
540 fbb.Finish(ice_message);
541
542 server_->execute(std::make_shared<UpdateData>(sock_, fbb.Release()));
543
544 g_signal_emit_by_name(webrtcbin_, "add-ice-candidate", mline_index,
545 candidate);
546}
547
548void Connection::HandleWebSocketData(const uint8_t *data, size_t /* size*/) {
549 LOG(INFO) << "HandleWebSocketData";
550
551 const WebSocketMessage *message =
552 flatbuffers::GetRoot<WebSocketMessage>(data);
553
554 switch (message->payload_type()) {
555 case Payload::WebSocketSdp: {
556 const WebSocketSdp *offer = message->payload_as_WebSocketSdp();
557 if (offer->type() != SdpType::ANSWER) {
558 LOG(WARNING) << "Expected SDP message type \"answer\"";
559 break;
560 }
561 const flatbuffers::String *sdp_string = offer->payload();
562
563 LOG(INFO) << "Received SDP:\n" << sdp_string->c_str();
564
565 GstSDPMessage *sdp;
566 GstSDPResult status = gst_sdp_message_new(&sdp);
567 if (status != GST_SDP_OK) {
568 LOG(WARNING) << "Could not create SDP message";
569 break;
570 }
571
572 status = gst_sdp_message_parse_buffer((const guint8 *)sdp_string->c_str(),
573 sdp_string->size(), sdp);
574
575 if (status != GST_SDP_OK) {
576 LOG(WARNING) << "Could not parse SDP string";
577 break;
578 }
579
580 std::unique_ptr<GstWebRTCSessionDescription,
581 decltype(&gst_webrtc_session_description_free)>
582 answer(gst_webrtc_session_description_new(GST_WEBRTC_SDP_TYPE_ANSWER,
583 sdp),
584 &gst_webrtc_session_description_free);
585 std::unique_ptr<GstPromise, decltype(&gst_promise_unref)> promise(
586 gst_promise_new(), &gst_promise_unref);
587 g_signal_emit_by_name(webrtcbin_, "set-remote-description", answer.get(),
588 promise.get());
589 gst_promise_interrupt(promise.get());
590
591 break;
592 }
593 case Payload::WebSocketIce: {
594 const WebSocketIce *ice = message->payload_as_WebSocketIce();
595 if (!ice->has_candidate() || ice->candidate()->size() == 0) {
596 LOG(WARNING) << "Received ICE message without candidate";
597 break;
598 }
599
600 const gchar *candidate =
601 static_cast<const gchar *>(ice->candidate()->c_str());
602 guint mline_index = ice->sdp_m_line_index();
603
604 LOG(INFO) << "Received ICE candidate with mline index " << mline_index
605 << "; candidate: " << candidate;
606
607 g_signal_emit_by_name(webrtcbin_, "add-ice-candidate", mline_index,
608 candidate);
609
610 break;
611 }
612 default:
613 break;
614 }
615}
616
Tyler Chatowb3850c12020-02-26 20:55:48 -0800617int main(int argc, char **argv) {
618 aos::InitGoogle(&argc, &argv);
619
620 findEmbeddedContent("");
621
622 std::string openssl_env = "OPENSSL_CONF=\"\"";
623 putenv(const_cast<char *>(openssl_env.c_str()));
624
Tyler Chatowb3850c12020-02-26 20:55:48 -0800625 gst_init(&argc, &argv);
Tyler Chatowb3850c12020-02-26 20:55:48 -0800626
627 aos::FlatbufferDetachedBuffer<aos::Configuration> config =
628 aos::configuration::ReadConfig(FLAGS_config);
629 aos::ShmEventLoop event_loop(&config.message());
630
631 {
632 aos::GlibMainLoop main_loop(&event_loop);
633
634 seasocks::Server server(::std::shared_ptr<seasocks::Logger>(
635 new ::aos::seasocks::SeasocksLogger(seasocks::Logger::Level::Info)));
636
637 LOG(INFO) << "Serving from " << FLAGS_data_dir;
638
639 auto websocket_handler =
640 std::make_shared<WebsocketHandler>(&event_loop, &server);
641 server.addWebSocketHandler("/ws", websocket_handler);
642
milind-ub0773e92023-02-05 15:57:43 -0800643 server.startListening(FLAGS_streaming_port);
Tyler Chatowb3850c12020-02-26 20:55:48 -0800644 server.setStaticPath(FLAGS_data_dir.c_str());
645
646 aos::internal::EPoll *epoll = event_loop.epoll();
647
648 epoll->OnReadable(server.fd(), [&server] {
649 CHECK(::seasocks::Server::PollResult::Continue == server.poll(0));
650 });
651
652 event_loop.Run();
653
654 epoll->DeleteFd(server.fd());
655 server.terminate();
656 }
657
658 gst_deinit();
659
660 return 0;
661}