blob: de56d55e9af0dbfd67313da2afc2bfc434bf29e4 [file] [log] [blame]
Tyler Chatowb3850c12020-02-26 20:55:48 -08001#define GST_USE_UNSTABLE_API
2#define GST_DISABLE_REGISTRY 1
3
4#include <glib-unix.h>
5#include <glib.h>
6#include <gst/app/app.h>
7#include <gst/gst.h>
8#include <gst/sdp/sdp.h>
9#include <gst/webrtc/icetransport.h>
10#include <gst/webrtc/webrtc.h>
11#include <sys/stat.h>
12#include <sys/types.h>
13
14#include <map>
15#include <thread>
16
17#include "absl/strings/str_format.h"
18#include "aos/events/glib_main_loop.h"
19#include "aos/events/shm_event_loop.h"
20#include "aos/init.h"
21#include "aos/network/web_proxy_generated.h"
22#include "aos/seasocks/seasocks_logger.h"
23#include "flatbuffers/flatbuffers.h"
24#include "frc971/vision/vision_generated.h"
25#include "gflags/gflags.h"
26#include "glog/logging.h"
27#include "internal/Embedded.h"
28#include "seasocks/Server.h"
29#include "seasocks/StringUtil.h"
30#include "seasocks/WebSocket.h"
31
32extern "C" {
33GST_PLUGIN_STATIC_DECLARE(app);
34GST_PLUGIN_STATIC_DECLARE(coreelements);
35GST_PLUGIN_STATIC_DECLARE(dtls);
36GST_PLUGIN_STATIC_DECLARE(nice);
37GST_PLUGIN_STATIC_DECLARE(rtp);
38GST_PLUGIN_STATIC_DECLARE(rtpmanager);
39GST_PLUGIN_STATIC_DECLARE(srtp);
40GST_PLUGIN_STATIC_DECLARE(webrtc);
41GST_PLUGIN_STATIC_DECLARE(video4linux2);
42GST_PLUGIN_STATIC_DECLARE(videoconvert);
43GST_PLUGIN_STATIC_DECLARE(videoparsersbad);
44GST_PLUGIN_STATIC_DECLARE(videorate);
45GST_PLUGIN_STATIC_DECLARE(videoscale);
46GST_PLUGIN_STATIC_DECLARE(videotestsrc);
47GST_PLUGIN_STATIC_DECLARE(x264);
48}
Tyler Chatow39b6a322022-04-15 00:03:58 -070049
milind-ub0773e92023-02-05 15:57:43 -080050DEFINE_string(config, "aos_config.json",
Tyler Chatowb3850c12020-02-26 20:55:48 -080051 "Name of the config file to replay using.");
Tyler Chatow39b6a322022-04-15 00:03:58 -070052DEFINE_string(device, "/dev/video0",
53 "Camera fd. Ignored if reading from channel");
Tyler Chatowb3850c12020-02-26 20:55:48 -080054DEFINE_string(data_dir, "image_streamer_www",
55 "Directory to serve data files from");
56DEFINE_int32(width, 400, "Image width");
57DEFINE_int32(height, 300, "Image height");
58DEFINE_int32(framerate, 25, "Framerate (FPS)");
59DEFINE_int32(brightness, 50, "Camera brightness");
60DEFINE_int32(exposure, 300, "Manual exposure");
61DEFINE_int32(bitrate, 500000, "H264 encode bitrate");
milind-ub0773e92023-02-05 15:57:43 -080062DEFINE_int32(streaming_port, 1180, "Port to stream images on with seasocks");
Tyler Chatowb3850c12020-02-26 20:55:48 -080063DEFINE_int32(min_port, 5800, "Min rtp port");
64DEFINE_int32(max_port, 5810, "Max rtp port");
Tyler Chatow39b6a322022-04-15 00:03:58 -070065DEFINE_string(listen_on, "",
66 "Channel on which to receive frames from. Used in place of "
67 "internal V4L2 reader. Note: width and height MUST match the "
68 "expected size of channel images.");
Tyler Chatowb3850c12020-02-26 20:55:48 -080069
70class Connection;
71
72using aos::web_proxy::Payload;
73using aos::web_proxy::SdpType;
74using aos::web_proxy::WebSocketIce;
75using aos::web_proxy::WebSocketMessage;
76using aos::web_proxy::WebSocketSdp;
77
Tyler Chatow39b6a322022-04-15 00:03:58 -070078class GstSampleSource {
79 public:
80 GstSampleSource() = default;
81
82 virtual ~GstSampleSource() = default;
83
84 private:
85 GstSampleSource(const GstSampleSource &) = delete;
86};
87
88class V4L2Source : public GstSampleSource {
89 public:
90 V4L2Source(std::function<void(GstSample *)> callback)
91 : callback_(std::move(callback)) {
92 GError *error = NULL;
93
94 // Create pipeline to read from camera, pack into rtp stream, and dump
95 // stream to callback. v4l2 device should already be configured with correct
96 // bitrate from v4l2-ctl. do-timestamp marks the time the frame was taken to
97 // track when it should be dropped under latency.
98
99 // With the Pi's hardware encoder, we can encode and package the stream once
100 // and the clients will jump in at any point unsynchronized. With the stream
101 // from x264enc this doesn't seem to work. For now, just reencode for each
102 // client since we don't expect more than 1 or 2.
103
104 pipeline_ = gst_parse_launch(
105 absl::StrFormat("v4l2src device=%s do-timestamp=true "
106 "extra-controls=\"c,brightness=%d,auto_exposure=1,"
107 "exposure_time_absolute=%d\" ! "
108 "video/x-raw,width=%d,height=%d,framerate=%d/"
109 "1,format=YUY2 ! appsink "
110 "name=appsink "
111 "emit-signals=true sync=false async=false "
112 "caps=video/x-raw,format=YUY2",
113 FLAGS_device, FLAGS_brightness, FLAGS_exposure,
114 FLAGS_width, FLAGS_height, FLAGS_framerate)
115 .c_str(),
116 &error);
117
118 if (error != NULL) {
119 LOG(FATAL) << "Could not create v4l2 pipeline: " << error->message;
120 }
121
122 appsink_ = gst_bin_get_by_name(GST_BIN(pipeline_), "appsink");
123 if (appsink_ == NULL) {
124 LOG(FATAL) << "Could not get appsink";
125 }
126
127 g_signal_connect(appsink_, "new-sample",
128 G_CALLBACK(V4L2Source::OnSampleCallback),
129 static_cast<gpointer>(this));
130
131 gst_element_set_state(pipeline_, GST_STATE_PLAYING);
132 }
133
134 ~V4L2Source() {
135 if (pipeline_ != NULL) {
136 gst_element_set_state(GST_ELEMENT(pipeline_), GST_STATE_NULL);
137 gst_object_unref(GST_OBJECT(pipeline_));
138 gst_object_unref(GST_OBJECT(appsink_));
139 }
140 }
141
142 private:
143 static GstFlowReturn OnSampleCallback(GstElement *, gpointer user_data) {
144 static_cast<V4L2Source *>(user_data)->OnSample();
145 return GST_FLOW_OK;
146 }
147
148 void OnSample() {
149 GstSample *sample = gst_app_sink_pull_sample(GST_APP_SINK(appsink_));
150 if (sample == NULL) {
151 LOG(WARNING) << "Received null sample";
152 return;
153 }
154 callback_(sample);
155 gst_sample_unref(sample);
156 }
157
158 GstElement *pipeline_;
159 GstElement *appsink_;
160
161 std::function<void(GstSample *)> callback_;
162};
163
164class ChannelSource : public GstSampleSource {
165 public:
166 ChannelSource(aos::ShmEventLoop *event_loop,
167 std::function<void(GstSample *)> callback)
168 : callback_(std::move(callback)) {
169 event_loop->MakeWatcher(
170 FLAGS_listen_on,
171 [this](const frc971::vision::CameraImage &image) { OnImage(image); });
172 }
173
174 private:
175 void OnImage(const frc971::vision::CameraImage &image) {
176 if (!image.has_rows() || !image.has_cols() || !image.has_data()) {
177 VLOG(2) << "Skipping CameraImage with no data";
178 return;
179 }
180 CHECK_EQ(image.rows(), FLAGS_height);
181 CHECK_EQ(image.cols(), FLAGS_width);
182
183 GBytes *bytes = g_bytes_new(image.data()->data(), image.data()->size());
184 GstBuffer *buffer = gst_buffer_new_wrapped_bytes(bytes);
185
186 GST_BUFFER_PTS(buffer) = image.monotonic_timestamp_ns();
187
188 GstCaps *caps = CHECK_NOTNULL(gst_caps_new_simple(
189 "video/x-raw", "width", G_TYPE_INT, image.cols(), "height", G_TYPE_INT,
190 image.rows(), "format", G_TYPE_STRING, "YUY2", nullptr));
191
192 GstSample *sample = gst_sample_new(buffer, caps, nullptr, nullptr);
193
194 callback_(sample);
195
196 gst_sample_unref(sample);
197 gst_caps_unref(caps);
198 gst_buffer_unref(buffer);
199 g_bytes_unref(bytes);
200 }
201
202 std::function<void(GstSample *)> callback_;
203};
204
Tyler Chatowb3850c12020-02-26 20:55:48 -0800205// Basic class that handles receiving new websocket connections. Creates a new
206// Connection to manage the rest of the negotiation and data passing. When the
207// websocket closes, it deletes the Connection.
208class WebsocketHandler : public ::seasocks::WebSocket::Handler {
209 public:
210 WebsocketHandler(aos::ShmEventLoop *event_loop, ::seasocks::Server *server);
Tyler Chatow39b6a322022-04-15 00:03:58 -0700211 ~WebsocketHandler() override = default;
Tyler Chatowb3850c12020-02-26 20:55:48 -0800212
213 void onConnect(::seasocks::WebSocket *sock) override;
214 void onData(::seasocks::WebSocket *sock, const uint8_t *data,
215 size_t size) override;
216 void onDisconnect(::seasocks::WebSocket *sock) override;
217
218 private:
Tyler Chatow39b6a322022-04-15 00:03:58 -0700219 void OnSample(GstSample *sample);
Tyler Chatowb3850c12020-02-26 20:55:48 -0800220
221 std::map<::seasocks::WebSocket *, std::unique_ptr<Connection>> connections_;
222 ::seasocks::Server *server_;
Tyler Chatow39b6a322022-04-15 00:03:58 -0700223 std::unique_ptr<GstSampleSource> source_;
Tyler Chatowb3850c12020-02-26 20:55:48 -0800224
225 aos::Sender<frc971::vision::CameraImage> sender_;
226};
227
228// Seasocks requires that sends happen on the correct thread. This class takes a
229// detached buffer to send on a specific websocket connection and sends it when
230// seasocks is ready.
231class UpdateData : public ::seasocks::Server::Runnable {
232 public:
233 UpdateData(::seasocks::WebSocket *websocket,
234 flatbuffers::DetachedBuffer &&buffer)
235 : sock_(websocket), buffer_(std::move(buffer)) {}
236 ~UpdateData() override = default;
237 UpdateData(const UpdateData &) = delete;
238 UpdateData &operator=(const UpdateData &) = delete;
239
240 void run() override { sock_->send(buffer_.data(), buffer_.size()); }
241
242 private:
243 ::seasocks::WebSocket *sock_;
244 const flatbuffers::DetachedBuffer buffer_;
245};
246
247class Connection {
248 public:
249 Connection(::seasocks::WebSocket *sock, ::seasocks::Server *server);
250
251 ~Connection();
252
253 void HandleWebSocketData(const uint8_t *data, size_t size);
254
255 void OnSample(GstSample *sample);
256
257 private:
258 static void OnOfferCreatedCallback(GstPromise *promise, gpointer user_data) {
259 static_cast<Connection *>(user_data)->OnOfferCreated(promise);
260 }
261
262 static void OnNegotiationNeededCallback(GstElement *, gpointer user_data) {
263 static_cast<Connection *>(user_data)->OnNegotiationNeeded();
264 }
265
266 static void OnIceCandidateCallback(GstElement *, guint mline_index,
267 gchar *candidate, gpointer user_data) {
268 static_cast<Connection *>(user_data)->OnIceCandidate(mline_index,
269 candidate);
270 }
271
272 void OnOfferCreated(GstPromise *promise);
273 void OnNegotiationNeeded();
274 void OnIceCandidate(guint mline_index, gchar *candidate);
275
276 ::seasocks::WebSocket *sock_;
277 ::seasocks::Server *server_;
278
279 GstElement *pipeline_;
280 GstElement *webrtcbin_;
281 GstElement *appsrc_;
282
283 bool first_sample_ = true;
284};
285
286WebsocketHandler::WebsocketHandler(aos::ShmEventLoop *event_loop,
287 ::seasocks::Server *server)
Tyler Chatow39b6a322022-04-15 00:03:58 -0700288 : server_(server) {
289 if (FLAGS_listen_on.empty()) {
290 sender_ = event_loop->MakeSender<frc971::vision::CameraImage>("/camera");
291 source_ =
292 std::make_unique<V4L2Source>([this](auto sample) { OnSample(sample); });
293 } else {
294 source_ = std::make_unique<ChannelSource>(
295 event_loop, [this](auto sample) { OnSample(sample); });
Tyler Chatowb3850c12020-02-26 20:55:48 -0800296 }
297}
298
299void WebsocketHandler::onConnect(::seasocks::WebSocket *sock) {
300 std::unique_ptr<Connection> conn =
301 std::make_unique<Connection>(sock, server_);
302 connections_.insert({sock, std::move(conn)});
303}
304
305void WebsocketHandler::onData(::seasocks::WebSocket *sock, const uint8_t *data,
306 size_t size) {
307 connections_[sock]->HandleWebSocketData(data, size);
308}
309
Tyler Chatow39b6a322022-04-15 00:03:58 -0700310void WebsocketHandler::OnSample(GstSample *sample) {
Tyler Chatowb3850c12020-02-26 20:55:48 -0800311 for (auto iter = connections_.begin(); iter != connections_.end(); ++iter) {
312 iter->second->OnSample(sample);
313 }
314
Tyler Chatow39b6a322022-04-15 00:03:58 -0700315 if (sender_.valid()) {
Tyler Chatowb3850c12020-02-26 20:55:48 -0800316 const GstCaps *caps = CHECK_NOTNULL(gst_sample_get_caps(sample));
317 CHECK_GT(gst_caps_get_size(caps), 0U);
318 const GstStructure *str = gst_caps_get_structure(caps, 0);
319
320 gint width;
321 gint height;
322
323 CHECK(gst_structure_get_int(str, "width", &width));
324 CHECK(gst_structure_get_int(str, "height", &height));
325
326 GstBuffer *buffer = CHECK_NOTNULL(gst_sample_get_buffer(sample));
327
328 const gsize size = gst_buffer_get_size(buffer);
329
330 auto builder = sender_.MakeBuilder();
331
332 uint8_t *image_data;
333 auto image_offset =
334 builder.fbb()->CreateUninitializedVector(size, &image_data);
335 gst_buffer_extract(buffer, 0, image_data, size);
336
337 auto image_builder = builder.MakeBuilder<frc971::vision::CameraImage>();
338 image_builder.add_rows(height);
339 image_builder.add_cols(width);
340 image_builder.add_data(image_offset);
341
342 builder.CheckOk(builder.Send(image_builder.Finish()));
343 }
Tyler Chatowb3850c12020-02-26 20:55:48 -0800344}
345
346void WebsocketHandler::onDisconnect(::seasocks::WebSocket *sock) {
347 connections_.erase(sock);
348}
349
350Connection::Connection(::seasocks::WebSocket *sock, ::seasocks::Server *server)
351 : sock_(sock), server_(server) {
352 GError *error = NULL;
353
354 // Build pipeline to read data from application into pipeline, place in
355 // webrtcbin group, and stream.
356
357 pipeline_ = gst_parse_launch(
358 // aggregate-mode should be zero-latency but this drops the stream on
359 // bitrate spikes for some reason - probably the weak CPU on the pi.
360 absl::StrFormat(
361 "webrtcbin name=webrtcbin appsrc "
362 "name=appsrc block=false "
363 "is-live=true "
364 "format=3 max-buffers=0 leaky-type=2 "
365 "caps=video/x-raw,width=%d,height=%d,format=YUY2 ! videoconvert ! "
366 "x264enc bitrate=%d speed-preset=ultrafast "
367 "tune=zerolatency key-int-max=15 sliced-threads=true ! "
368 "video/x-h264,profile=constrained-baseline ! h264parse ! "
369 "rtph264pay "
370 "config-interval=-1 name=payloader aggregate-mode=none ! "
371 "application/"
372 "x-rtp,media=video,encoding-name=H264,payload=96,clock-rate=90000 !"
373 "webrtcbin. ",
374 FLAGS_width, FLAGS_height, FLAGS_bitrate / 1000)
375 .c_str(),
376 &error);
377
378 if (error != NULL) {
379 LOG(FATAL) << "Could not create WebRTC pipeline: " << error->message;
380 }
381
382 webrtcbin_ = gst_bin_get_by_name(GST_BIN(pipeline_), "webrtcbin");
383 if (webrtcbin_ == NULL) {
384 LOG(FATAL) << "Could not initialize webrtcbin";
385 }
386
387 appsrc_ = gst_bin_get_by_name(GST_BIN(pipeline_), "appsrc");
388 if (appsrc_ == NULL) {
389 LOG(FATAL) << "Could not initialize appsrc";
390 }
391
392 {
393 GArray *transceivers;
394 g_signal_emit_by_name(webrtcbin_, "get-transceivers", &transceivers);
395 if (transceivers == NULL || transceivers->len <= 0) {
396 LOG(FATAL) << "Could not initialize transceivers";
397 }
398
399 GstWebRTCRTPTransceiver *trans =
400 g_array_index(transceivers, GstWebRTCRTPTransceiver *, 0);
401 g_object_set(trans, "direction",
402 GST_WEBRTC_RTP_TRANSCEIVER_DIRECTION_SENDONLY, nullptr);
403
404 g_array_unref(transceivers);
405 }
406
407 {
408 GstObject *ice = nullptr;
409 g_object_get(G_OBJECT(webrtcbin_), "ice-agent", &ice, nullptr);
410 CHECK_NOTNULL(ice);
411
412 g_object_set(ice, "min-rtp-port", FLAGS_min_port, "max-rtp-port",
413 FLAGS_max_port, nullptr);
414
415 // We don't need upnp on a local network.
416 {
417 GstObject *nice = nullptr;
418 g_object_get(ice, "agent", &nice, nullptr);
419 CHECK_NOTNULL(nice);
420
421 g_object_set(nice, "upnp", false, nullptr);
422 g_object_unref(nice);
423 }
424
425 gst_object_unref(ice);
426 }
427
428 g_signal_connect(webrtcbin_, "on-negotiation-needed",
429 G_CALLBACK(Connection::OnNegotiationNeededCallback),
430 static_cast<gpointer>(this));
431
432 g_signal_connect(webrtcbin_, "on-ice-candidate",
433 G_CALLBACK(Connection::OnIceCandidateCallback),
434 static_cast<gpointer>(this));
435
436 gst_element_set_state(pipeline_, GST_STATE_READY);
437 gst_element_set_state(pipeline_, GST_STATE_PLAYING);
438}
439
440Connection::~Connection() {
441 if (pipeline_ != NULL) {
442 gst_element_set_state(pipeline_, GST_STATE_NULL);
443
444 gst_object_unref(GST_OBJECT(webrtcbin_));
445 gst_object_unref(GST_OBJECT(pipeline_));
446 gst_object_unref(GST_OBJECT(appsrc_));
447 }
448}
449
450void Connection::OnSample(GstSample *sample) {
451 GstFlowReturn response =
452 gst_app_src_push_sample(GST_APP_SRC(appsrc_), sample);
453 if (response != GST_FLOW_OK) {
454 LOG(WARNING) << "Sample pushed, did not receive OK";
455 }
456
457 // Since the stream is already running (the camera turns on with
458 // image_streamer) we need to tell the new appsrc where
459 // we are starting in the stream so it can catch up immediately.
460 if (first_sample_) {
461 GstPad *src = gst_element_get_static_pad(appsrc_, "src");
462 if (src == NULL) {
463 return;
464 }
465
466 GstSegment *segment = gst_sample_get_segment(sample);
467 GstBuffer *buffer = gst_sample_get_buffer(sample);
468
469 guint64 offset = gst_segment_to_running_time(segment, GST_FORMAT_TIME,
470 GST_BUFFER_PTS(buffer));
471 LOG(INFO) << "Fixing offset " << offset;
472 gst_pad_set_offset(src, -offset);
473
474 gst_object_unref(GST_OBJECT(src));
475 first_sample_ = false;
476 }
477}
478
479void Connection::OnOfferCreated(GstPromise *promise) {
480 LOG(INFO) << "OnOfferCreated";
481
482 GstWebRTCSessionDescription *offer = NULL;
483 gst_structure_get(gst_promise_get_reply(promise), "offer",
484 GST_TYPE_WEBRTC_SESSION_DESCRIPTION, &offer, NULL);
485 gst_promise_unref(promise);
486
487 {
488 std::unique_ptr<GstPromise, decltype(&gst_promise_unref)>
489 local_desc_promise(gst_promise_new(), &gst_promise_unref);
490 g_signal_emit_by_name(webrtcbin_, "set-local-description", offer,
491 local_desc_promise.get());
492 gst_promise_interrupt(local_desc_promise.get());
493 }
494
495 GstSDPMessage *sdp_msg = offer->sdp;
496 std::string sdp_str(gst_sdp_message_as_text(sdp_msg));
497
498 LOG(INFO) << "Negotiation offer created:\n" << sdp_str;
499
500 flatbuffers::FlatBufferBuilder fbb(512);
501 flatbuffers::Offset<WebSocketSdp> sdp_fb =
502 CreateWebSocketSdpDirect(fbb, SdpType::OFFER, sdp_str.c_str());
503 flatbuffers::Offset<WebSocketMessage> answer_message =
504 CreateWebSocketMessage(fbb, Payload::WebSocketSdp, sdp_fb.Union());
505 fbb.Finish(answer_message);
506
507 server_->execute(std::make_shared<UpdateData>(sock_, fbb.Release()));
508}
509
510void Connection::OnNegotiationNeeded() {
511 LOG(INFO) << "OnNegotiationNeeded";
512
513 GstPromise *promise;
514 promise = gst_promise_new_with_change_func(Connection::OnOfferCreatedCallback,
515 static_cast<gpointer>(this), NULL);
516 g_signal_emit_by_name(G_OBJECT(webrtcbin_), "create-offer", NULL, promise);
517}
518
519void Connection::OnIceCandidate(guint mline_index, gchar *candidate) {
520 LOG(INFO) << "OnIceCandidate";
521
522 flatbuffers::FlatBufferBuilder fbb(512);
523
524 auto ice_fb_builder = WebSocketIce::Builder(fbb);
525 ice_fb_builder.add_sdp_m_line_index(mline_index);
526 ice_fb_builder.add_sdp_mid(fbb.CreateString("video0"));
527 ice_fb_builder.add_candidate(
528 fbb.CreateString(static_cast<char *>(candidate)));
529 flatbuffers::Offset<WebSocketIce> ice_fb = ice_fb_builder.Finish();
530
531 flatbuffers::Offset<WebSocketMessage> ice_message =
532 CreateWebSocketMessage(fbb, Payload::WebSocketIce, ice_fb.Union());
533 fbb.Finish(ice_message);
534
535 server_->execute(std::make_shared<UpdateData>(sock_, fbb.Release()));
536
537 g_signal_emit_by_name(webrtcbin_, "add-ice-candidate", mline_index,
538 candidate);
539}
540
541void Connection::HandleWebSocketData(const uint8_t *data, size_t /* size*/) {
542 LOG(INFO) << "HandleWebSocketData";
543
544 const WebSocketMessage *message =
545 flatbuffers::GetRoot<WebSocketMessage>(data);
546
547 switch (message->payload_type()) {
548 case Payload::WebSocketSdp: {
549 const WebSocketSdp *offer = message->payload_as_WebSocketSdp();
550 if (offer->type() != SdpType::ANSWER) {
551 LOG(WARNING) << "Expected SDP message type \"answer\"";
552 break;
553 }
554 const flatbuffers::String *sdp_string = offer->payload();
555
556 LOG(INFO) << "Received SDP:\n" << sdp_string->c_str();
557
558 GstSDPMessage *sdp;
559 GstSDPResult status = gst_sdp_message_new(&sdp);
560 if (status != GST_SDP_OK) {
561 LOG(WARNING) << "Could not create SDP message";
562 break;
563 }
564
565 status = gst_sdp_message_parse_buffer((const guint8 *)sdp_string->c_str(),
566 sdp_string->size(), sdp);
567
568 if (status != GST_SDP_OK) {
569 LOG(WARNING) << "Could not parse SDP string";
570 break;
571 }
572
573 std::unique_ptr<GstWebRTCSessionDescription,
574 decltype(&gst_webrtc_session_description_free)>
575 answer(gst_webrtc_session_description_new(GST_WEBRTC_SDP_TYPE_ANSWER,
576 sdp),
577 &gst_webrtc_session_description_free);
578 std::unique_ptr<GstPromise, decltype(&gst_promise_unref)> promise(
579 gst_promise_new(), &gst_promise_unref);
580 g_signal_emit_by_name(webrtcbin_, "set-remote-description", answer.get(),
581 promise.get());
582 gst_promise_interrupt(promise.get());
583
584 break;
585 }
586 case Payload::WebSocketIce: {
587 const WebSocketIce *ice = message->payload_as_WebSocketIce();
588 if (!ice->has_candidate() || ice->candidate()->size() == 0) {
589 LOG(WARNING) << "Received ICE message without candidate";
590 break;
591 }
592
593 const gchar *candidate =
594 static_cast<const gchar *>(ice->candidate()->c_str());
595 guint mline_index = ice->sdp_m_line_index();
596
597 LOG(INFO) << "Received ICE candidate with mline index " << mline_index
598 << "; candidate: " << candidate;
599
600 g_signal_emit_by_name(webrtcbin_, "add-ice-candidate", mline_index,
601 candidate);
602
603 break;
604 }
605 default:
606 break;
607 }
608}
609
610void RegisterPlugins() {
611 GST_PLUGIN_STATIC_REGISTER(app);
612 GST_PLUGIN_STATIC_REGISTER(coreelements);
613 GST_PLUGIN_STATIC_REGISTER(dtls);
614 GST_PLUGIN_STATIC_REGISTER(nice);
615 GST_PLUGIN_STATIC_REGISTER(rtp);
616 GST_PLUGIN_STATIC_REGISTER(rtpmanager);
617 GST_PLUGIN_STATIC_REGISTER(srtp);
618 GST_PLUGIN_STATIC_REGISTER(webrtc);
619 GST_PLUGIN_STATIC_REGISTER(video4linux2);
620 GST_PLUGIN_STATIC_REGISTER(videoconvert);
621 GST_PLUGIN_STATIC_REGISTER(videoparsersbad);
622 GST_PLUGIN_STATIC_REGISTER(videorate);
623 GST_PLUGIN_STATIC_REGISTER(videoscale);
624 GST_PLUGIN_STATIC_REGISTER(videotestsrc);
625 GST_PLUGIN_STATIC_REGISTER(x264);
626}
627
628int main(int argc, char **argv) {
629 aos::InitGoogle(&argc, &argv);
630
631 findEmbeddedContent("");
632
633 std::string openssl_env = "OPENSSL_CONF=\"\"";
634 putenv(const_cast<char *>(openssl_env.c_str()));
635
636 putenv(const_cast<char *>("GST_REGISTRY_DISABLE=yes"));
637
638 gst_init(&argc, &argv);
639 RegisterPlugins();
640
641 aos::FlatbufferDetachedBuffer<aos::Configuration> config =
642 aos::configuration::ReadConfig(FLAGS_config);
643 aos::ShmEventLoop event_loop(&config.message());
644
645 {
646 aos::GlibMainLoop main_loop(&event_loop);
647
648 seasocks::Server server(::std::shared_ptr<seasocks::Logger>(
649 new ::aos::seasocks::SeasocksLogger(seasocks::Logger::Level::Info)));
650
651 LOG(INFO) << "Serving from " << FLAGS_data_dir;
652
653 auto websocket_handler =
654 std::make_shared<WebsocketHandler>(&event_loop, &server);
655 server.addWebSocketHandler("/ws", websocket_handler);
656
milind-ub0773e92023-02-05 15:57:43 -0800657 server.startListening(FLAGS_streaming_port);
Tyler Chatowb3850c12020-02-26 20:55:48 -0800658 server.setStaticPath(FLAGS_data_dir.c_str());
659
660 aos::internal::EPoll *epoll = event_loop.epoll();
661
662 epoll->OnReadable(server.fd(), [&server] {
663 CHECK(::seasocks::Server::PollResult::Continue == server.poll(0));
664 });
665
666 event_loop.Run();
667
668 epoll->DeleteFd(server.fd());
669 server.terminate();
670 }
671
672 gst_deinit();
673
674 return 0;
675}