Music Hub  ..
A session-wide music playback service
playbin.cpp
Go to the documentation of this file.
1 /*
2  * Copyright © 2013-2015 Canonical Ltd.
3  *
4  * This program is free software: you can redistribute it and/or modify it
5  * under the terms of the GNU Lesser General Public License version 3,
6  * as published by the Free Software Foundation.
7  *
8  * This program is distributed in the hope that it will be useful,
9  * but WITHOUT ANY WARRANTY; without even the implied warranty of
10  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11  * GNU Lesser General Public License for more details.
12  *
13  * You should have received a copy of the GNU Lesser General Public License
14  * along with this program. If not, see <http://www.gnu.org/licenses/>.
15  *
16  * Authored by: Thomas Voß <thomas.voss@canonical.com>
17  * Alfonso Sanchez-Beato <alfonso.sanchez-beato@canonical.com>
18  */
19 
23 
24 #include <gst/pbutils/missing-plugins.h>
25 
26 #include <hybris/media/surface_texture_client_hybris.h>
27 #include <hybris/media/media_codec_layer.h>
28 
31 
32 #include <sys/socket.h>
33 #include <sys/un.h>
34 
35 #include <utility>
36 #include <cstring>
37 
38 static const char *PULSE_SINK = "pulsesink";
39 static const char *HYBRIS_SINK = "hybrissink";
40 static const char *MIR_SINK = "mirsink";
41 
42 using namespace std;
43 
44 void gstreamer::Playbin::setup_video_sink_for_buffer_streaming()
45 {
46  IGBPWrapperHybris igbp;
47  SurfaceTextureClientHybris stc;
48  GstContext *context;
49  GstStructure *structure;
50 
51  switch (backend) {
52  case core::ubuntu::media::AVBackend::Backend::hybris:
53  // Get the service-side BufferQueue (IGraphicBufferProducer) and
54  // associate with it the SurfaceTextureClientHybris instance.
55  igbp = decoding_service_get_igraphicbufferproducer();
56  stc = surface_texture_client_create_by_igbp(igbp);
57 
58  // Because mirsink is being loaded, we are definitely doing * hardware rendering.
59  surface_texture_client_set_hardware_rendering(stc, TRUE);
60 
61  context = gst_context_new("gst.mir.MirContext", TRUE);
62  structure = gst_context_writable_structure(context);
63  gst_structure_set(structure, "gst_mir_context", G_TYPE_POINTER, stc, NULL);
64 
65  /* Propagate context in pipeline (needed by amchybris and mirsink) */
66  gst_element_set_context(pipeline, context);
67  break;
68  case core::ubuntu::media::AVBackend::Backend::mir:
69  // Connect to buffer consumer socket
70  connect_to_consumer();
71  // Configure mirsink so it exports buffers (otherwise it would create
72  // its own window).
73  g_object_set (G_OBJECT (video_sink), "export-buffers", TRUE, nullptr);
74  break;
75  case core::ubuntu::media::AVBackend::Backend::none:
76  default:
77  throw core::ubuntu::media::Player::Errors::
78  OutOfProcessBufferStreamingNotSupported{};
79  }
80 }
81 
82 bool gstreamer::Playbin::is_supported_video_sink(void) const
83 {
84  if (video_sink_name == HYBRIS_SINK || video_sink_name == MIR_SINK)
85  return TRUE;
86 
87  return FALSE;
88 }
89 
90 // Uncomment to generate a dot file at the time that the pipeline
91 // goes to the PLAYING state. Make sure to export GST_DEBUG_DUMP_DOT_DIR
92 // before starting media-hub-server. To convert the dot file to something
93 // other image format, use: dot pipeline.dot -Tpng -o pipeline.png
94 //#define DEBUG_GST_PIPELINE
95 
96 namespace media = core::ubuntu::media;
98 
100 {
101  static const std::string s{"playbin"};
102  return s;
103 }
104 
105 void gstreamer::Playbin::about_to_finish(GstElement*, gpointer user_data)
106 {
107  auto thiz = static_cast<Playbin*>(user_data);
108  thiz->signals.about_to_finish();
109 }
110 
112  GstElement *source,
113  gpointer user_data)
114 {
115  if (user_data == nullptr)
116  return;
117 
118  static_cast<Playbin*>(user_data)->setup_source(source);
119 }
120 
122  : pipeline(gst_element_factory_make("playbin", pipeline_name().c_str())),
123  bus{gst_element_get_bus(pipeline)},
125  video_sink(nullptr),
126  audio_sink(nullptr),
128  bus.on_new_message_async.connect(
129  std::bind(
131  this,
132  std::placeholders::_1))),
133  is_seeking(false),
138  player_lifetime(media::Player::Lifetime::normal),
141  is_missing_audio_codec(false),
142  is_missing_video_codec(false),
143  audio_stream_id(-1),
144  video_stream_id(-1),
145  current_new_state(GST_STATE_NULL),
146  key(key_in),
148  sock_consumer(-1)
149 {
150  if (!pipeline)
151  throw std::runtime_error("Could not create pipeline for playbin.");
152 
153  // Add audio and/or video sink elements depending on environment variables
154  // being set or not set
156 
157  about_to_finish_handler_id = g_signal_connect(
158  pipeline,
159  "about-to-finish",
160  G_CALLBACK(about_to_finish),
161  this
162  );
163 
164  source_setup_handler_id = g_signal_connect(
165  pipeline,
166  "source-setup",
167  G_CALLBACK(source_setup),
168  this
169  );
170 }
171 
172 // Note that we might be accessing freed memory here, so activate DEBUG_REFS
173 // only for debugging
174 //#define DEBUG_REFS
175 #ifdef DEBUG_REFS
176 static void print_refs(const gstreamer::Playbin &pb, const char *func)
177 {
178  using namespace std;
179 
180  MH_DEBUG("%s", func);
181  if (pb.pipeline)
182  MH_DEBUG("pipeline: %d", (const void *) GST_OBJECT_REFCOUNT(pb.pipeline));
183  if (pb.video_sink)
184  MH_DEBUG("video_sink: %d", (const void *) GST_OBJECT_REFCOUNT(pb.video_sink));
185  if (pb.audio_sink)
186  MH_DEBUG("audio_sink: %d", (const void *) GST_OBJECT_REFCOUNT(pb.audio_sink));
187 }
188 #endif
189 
191 {
192 #ifdef DEBUG_REFS
193  print_refs(*this, "gstreamer::Playbin::~Playbin pipeline");
194 #endif
195 
196  g_signal_handler_disconnect(pipeline, about_to_finish_handler_id);
197  g_signal_handler_disconnect(pipeline, source_setup_handler_id);
198 
199  if (pipeline)
200  gst_object_unref(pipeline);
201 
202  if (sock_consumer != -1) {
203  close(sock_consumer);
204  sock_consumer = -1;
205  }
206 
207 #ifdef DEBUG_REFS
208  print_refs(*this, "gstreamer::Playbin::~Playbin pipeline");
209 #endif
210 }
211 
213 {
214  MH_INFO("Client died, resetting pipeline");
215  // When the client dies, tear down the current pipeline and get it
216  // in a state that is ready for the next client that connects to the
217  // service
218 
219  // Don't reset the pipeline if we want to resume
220  if (player_lifetime != media::Player::Lifetime::resumable) {
221  reset_pipeline();
222  }
223  // Signal to the Player class that the client side has disconnected
224  signals.client_disconnected();
225 }
226 
228 {
229  MH_TRACE("");
230  const auto ret = gst_element_set_state(pipeline, GST_STATE_NULL);
231  switch (ret)
232  {
233  case GST_STATE_CHANGE_FAILURE:
234  MH_WARNING("Failed to reset the pipeline state. Client reconnect may not function properly.");
235  break;
236  case GST_STATE_CHANGE_NO_PREROLL:
237  case GST_STATE_CHANGE_SUCCESS:
238  case GST_STATE_CHANGE_ASYNC:
239  break;
240  default:
241  MH_WARNING("Failed to reset the pipeline state. Client reconnect may not function properly.");
242  }
244  is_missing_audio_codec = false;
245  is_missing_video_codec = false;
246  audio_stream_id = -1;
247  video_stream_id = -1;
248  if (sock_consumer != -1) {
249  close(sock_consumer);
250  sock_consumer = -1;
251  }
252 }
253 
254 void gstreamer::Playbin::process_missing_plugin_message(GstMessage *message)
255 {
256  gchar *desc = gst_missing_plugin_message_get_description(message);
257  MH_WARNING("Missing plugin: %s", desc);
258  g_free(desc);
259 
260  const GstStructure *msg_data = gst_message_get_structure(message);
261  if (g_strcmp0("decoder", gst_structure_get_string(msg_data, "type")) != 0)
262  return;
263 
264  GstCaps *caps;
265  if (!gst_structure_get(msg_data, "detail", GST_TYPE_CAPS, &caps, NULL)) {
266  MH_ERROR("No detail");
267  return;
268  }
269 
270  GstStructure *caps_data = gst_caps_get_structure(caps, 0);
271  if (!caps_data) {
272  MH_ERROR("No caps data");
273  return;
274  }
275 
276  const gchar *mime = gst_structure_get_name(caps_data);
277  if (strstr(mime, "audio"))
278  is_missing_audio_codec = true;
279  else if (strstr(mime, "video"))
280  is_missing_video_codec = true;
281 
282  MH_ERROR("Missing decoder for %s", mime);
283 }
284 
286 {
287  const GstStructure *msg_data = gst_message_get_structure(message);
288  const gchar *struct_name = gst_structure_get_name(msg_data);
289 
290  if (g_strcmp0("buffer-export-data", struct_name) == 0)
291  {
292  int fd;
294  if (!gst_structure_get(msg_data,
295  "fd", G_TYPE_INT, &fd,
296  "width", G_TYPE_INT, &meta.width,
297  "height", G_TYPE_INT, &meta.height,
298  "fourcc", G_TYPE_INT, &meta.fourcc,
299  "stride", G_TYPE_INT, &meta.stride,
300  "offset", G_TYPE_INT, &meta.offset,
301  NULL))
302  {
303  MH_ERROR("Bad buffer-export-data message: mirsink version mismatch?");
304  return;
305  }
306  MH_DEBUG("Exporting %dx%d buffer (fd %d)", meta.width, meta.height, fd);
307  send_buffer_data(fd, &meta, sizeof meta);
308  }
309  else if (g_strcmp0("frame-ready", struct_name) == 0)
310  {
311  send_frame_ready();
312  }
313  else
314  {
315  MH_ERROR("Unknown GST_MESSAGE_ELEMENT with struct %s", struct_name);
316  }
317 }
318 
320 {
321  switch (message.type)
322  {
323  case GST_MESSAGE_ERROR:
324  signals.on_error(message.detail.error_warning_info);
325  break;
326  case GST_MESSAGE_WARNING:
327  signals.on_warning(message.detail.error_warning_info);
328  break;
329  case GST_MESSAGE_INFO:
330  signals.on_info(message.detail.error_warning_info);
331  break;
332  case GST_MESSAGE_STATE_CHANGED:
333  if (message.source == "playbin") {
334  g_object_get(G_OBJECT(pipeline), "current-audio", &audio_stream_id, NULL);
335  g_object_get(G_OBJECT(pipeline), "current-video", &video_stream_id, NULL);
336  }
337  signals.on_state_changed(std::make_pair(message.detail.state_changed, message.source));
338  break;
339  case GST_MESSAGE_ELEMENT:
340  if (gst_is_missing_plugin_message(message.message))
341  process_missing_plugin_message(message.message);
342  else
344  break;
345  case GST_MESSAGE_TAG:
346  {
347  gchar *orientation;
348  if (gst_tag_list_get_string(message.detail.tag.tag_list, "image-orientation", &orientation))
349  {
350  // If the image-orientation tag is in the GstTagList, signal the Engine
351  signals.on_orientation_changed(orientation_lut(orientation));
352  g_free (orientation);
353  }
354 
355  signals.on_tag_available(message.detail.tag);
356  }
357  break;
358  case GST_MESSAGE_ASYNC_DONE:
359  if (is_seeking)
360  {
361  // FIXME: Pass the actual playback time position to the signal call
362  signals.on_seeked_to(0);
363  is_seeking = false;
364  }
365  break;
366  case GST_MESSAGE_EOS:
367  signals.on_end_of_stream();
368  break;
369  case GST_MESSAGE_BUFFERING:
370  signals.on_buffering_changed(message.detail.buffering.percent);
371  break;
372  default:
373  break;
374  }
375 }
376 
378 {
379  return bus;
380 }
381 
383 {
384  gint flags;
385  g_object_get (pipeline, "flags", &flags, nullptr);
386  flags |= GST_PLAY_FLAG_AUDIO;
387  flags |= GST_PLAY_FLAG_VIDEO;
388  flags &= ~GST_PLAY_FLAG_TEXT;
389  g_object_set (pipeline, "flags", flags, nullptr);
390 
391  const char *asink_name = ::getenv("CORE_UBUNTU_MEDIA_SERVICE_AUDIO_SINK_NAME");
392 
393  if (asink_name == nullptr)
394  asink_name = PULSE_SINK;
395 
396  audio_sink = gst_element_factory_make (asink_name, "audio-sink");
397  if (audio_sink)
398  g_object_set (pipeline, "audio-sink", audio_sink, NULL);
399  else
400  MH_ERROR("Error trying to create audio sink %s", asink_name);
401 
402  const char *vsink_name = ::getenv("CORE_UBUNTU_MEDIA_SERVICE_VIDEO_SINK_NAME");
403 
404  if (vsink_name == nullptr) {
405  if (backend == core::ubuntu::media::AVBackend::Backend::hybris)
406  vsink_name = HYBRIS_SINK;
407  else if (backend == core::ubuntu::media::AVBackend::Backend::mir)
408  vsink_name = MIR_SINK;
409  }
410 
411  if (vsink_name) {
412  video_sink_name = vsink_name;
413  video_sink = gst_element_factory_make (vsink_name, "video-sink");
414  if (video_sink)
415  g_object_set (pipeline, "video-sink", video_sink, NULL);
416  else
417  MH_ERROR("Error trying to create video sink %s", vsink_name);
418  }
419 }
420 
422 {
423  if (not video_sink) throw std::logic_error
424  {
425  "No video sink configured for the current pipeline"
426  };
427 
428  setup_video_sink_for_buffer_streaming();
429 }
430 
431 void gstreamer::Playbin::set_volume(double new_volume)
432 {
433  g_object_set (pipeline, "volume", new_volume, NULL);
434 }
435 
438 {
439  switch (audio_role)
440  {
441  case media::Player::AudioStreamRole::alarm:
442  return "alarm";
443  break;
444  case media::Player::AudioStreamRole::alert:
445  return "alert";
446  break;
447  case media::Player::AudioStreamRole::multimedia:
448  return "multimedia";
449  break;
450  case media::Player::AudioStreamRole::phone:
451  return "phone";
452  break;
453  default:
454  return "multimedia";
455  break;
456  }
457 }
458 
460 {
461  if (g_strcmp0(orientation, "rotate-0") == 0)
462  return media::Player::Orientation::rotate0;
463  else if (g_strcmp0(orientation, "rotate-90") == 0)
464  return media::Player::Orientation::rotate90;
465  else if (g_strcmp0(orientation, "rotate-180") == 0)
466  return media::Player::Orientation::rotate180;
467  else if (g_strcmp0(orientation, "rotate-270") == 0)
468  return media::Player::Orientation::rotate270;
469  else
470  return media::Player::Orientation::rotate0;
471 }
472 
475 {
476  const std::string role_str("props,media.role=" + get_audio_role_str(new_audio_role));
477  MH_INFO("Audio stream role: %s", role_str);
478 
479  GstStructure *props = gst_structure_from_string (role_str.c_str(), NULL);
480  if (audio_sink != nullptr && props != nullptr)
481  {
482  g_object_set (audio_sink, "stream-properties", props, NULL);
483  }
484  else
485  {
486  MH_WARNING("Couldn't set audio stream role - couldn't get audio_sink from pipeline");
487  }
488 
489  gst_structure_free (props);
490 }
491 
493 {
494  player_lifetime = lifetime;
495 }
496 
498 {
499  int64_t pos = 0;
500  gst_element_query_position (pipeline, GST_FORMAT_TIME, &pos);
501 
502  // This prevents a 0 position from being reported to the app which happens while seeking.
503  // This is covering over a GStreamer issue
504  if ((static_cast<uint64_t>(pos) < duration()) && is_seeking && pos == 0)
505  {
506  return previous_position;
507  }
508 
509  // Save the current position to use just in case it's needed the next time position is
510  // requested
511  previous_position = static_cast<uint64_t>(pos);
512 
513  // FIXME: this should be int64_t, but dbus-cpp doesn't seem to handle it correctly
514  return static_cast<uint64_t>(pos);
515 }
516 
518 {
519  int64_t dur = 0;
520  gst_element_query_duration (pipeline, GST_FORMAT_TIME, &dur);
521 
522  // FIXME: this should be int64_t, but dbus-cpp doesn't seem to handle it correctly
523  return static_cast<uint64_t>(dur);
524 }
525 
527  const std::string& uri,
529  bool do_pipeline_reset)
530 {
531  gchar *current_uri = nullptr;
532  g_object_get(pipeline, "current-uri", &current_uri, NULL);
533 
534  // Checking for a current_uri being set and not resetting the pipeline
535  // if there isn't a current_uri causes the first play to start playback
536  // sooner since reset_pipeline won't be called
537  if (current_uri and do_pipeline_reset)
538  reset_pipeline();
539 
540  std::string tmp_uri{uri};
541  media::UriCheck::Ptr uri_check{std::make_shared<media::UriCheck>(uri)};
542  if (uri_check->is_local_file())
543  {
544  if (uri_check->is_encoded())
545  {
546  // First decode the URI just in case it's partially encoded already
547  tmp_uri = decode_uri(uri);
548  MH_DEBUG("File URI was encoded, now decoded: %s", tmp_uri);
549  }
550  tmp_uri = encode_uri(tmp_uri);
551  }
552 
553  g_object_set(pipeline, "uri", tmp_uri.c_str(), NULL);
554  if (is_video_file(tmp_uri))
556  else if (is_audio_file(tmp_uri))
558 
559  request_headers = headers;
560 
561  g_free(current_uri);
562 }
563 
564 void gstreamer::Playbin::setup_source(GstElement *source)
565 {
566  if (source == NULL || request_headers.empty())
567  return;
568 
569  if (request_headers.find("Cookie") != request_headers.end()) {
570  if (g_object_class_find_property(G_OBJECT_GET_CLASS(source),
571  "cookies") != NULL) {
572  gchar ** cookies = g_strsplit(request_headers["Cookie"].c_str(), ";", 0);
573  g_object_set(source, "cookies", cookies, NULL);
574  g_strfreev(cookies);
575  }
576  }
577 
578  if (request_headers.find("User-Agent") != request_headers.end()) {
579  if (g_object_class_find_property(G_OBJECT_GET_CLASS(source),
580  "user-agent") != NULL) {
581  g_object_set(source, "user-agent", request_headers["User-Agent"].c_str(), NULL);
582  }
583  }
584 }
585 
586 std::string gstreamer::Playbin::uri() const
587 {
588  gchar* data = nullptr;
589  g_object_get(pipeline, "current-uri", &data, nullptr);
590 
591  std::string result((data == nullptr ? "" : data));
592  g_free(data);
593 
594  return result;
595 }
596 
598 {
599  MH_TRACE("");
600  auto thiz = static_cast<Playbin*>(user_data);
601  if (thiz and thiz->pipeline)
602  gst_element_set_state(thiz->pipeline, thiz->current_new_state);
603 
604  // Always return false so this is a single shot function call
605  return false;
606 }
607 bool gstreamer::Playbin::set_state_and_wait(GstState new_state, bool use_main_thread)
608 {
609  static const std::chrono::nanoseconds state_change_timeout
610  {
611  // We choose a quite high value here as tests are run under valgrind
612  // and gstreamer pipeline setup/state changes take longer in that scenario.
613  // The value does not negatively impact runtime performance.
614  std::chrono::milliseconds{5000}
615  };
616 
617  bool result = false;
618  GstState current, pending;
619  if (use_main_thread)
620  {
621  // Cache this value for the static g_idle_add handler function
622  current_new_state = new_state;
623  g_idle_add((GSourceFunc) gstreamer::Playbin::set_state_in_main_thread, (gpointer) this);
624 
625  MH_DEBUG("Requested state change in main thread context.");
626 
627  GstState current, pending;
628  result = GST_STATE_CHANGE_SUCCESS == gst_element_get_state(
629  pipeline,
630  &current,
631  &pending,
632  state_change_timeout.count());
633  }
634  else
635  {
636  const auto ret = gst_element_set_state(pipeline, new_state);
637 
638  MH_DEBUG("Requested state change not using main thread context.");
639 
640  switch (ret)
641  {
642  case GST_STATE_CHANGE_FAILURE:
643  result = false; break;
644  case GST_STATE_CHANGE_NO_PREROLL:
645  case GST_STATE_CHANGE_SUCCESS:
646  result = true; break;
647  case GST_STATE_CHANGE_ASYNC:
648  result = GST_STATE_CHANGE_SUCCESS == gst_element_get_state(
649  pipeline,
650  &current,
651  &pending,
652  state_change_timeout.count());
653  break;
654  }
655  }
656 
657  // We only should query the pipeline if we actually succeeded in
658  // setting the requested state.
659  if (result && new_state == GST_STATE_PLAYING)
660  {
661  // Get the video height/width from the video sink
662  try
663  {
666  cached_video_dimensions = new_dimensions;
667  }
668  catch (const std::exception& e)
669  {
670  MH_WARNING("Problem querying video dimensions: %s", e.what());
671  }
672  catch (...)
673  {
674  MH_WARNING("Problem querying video dimensions.");
675  }
676 
677 #ifdef DEBUG_GST_PIPELINE
678  MH_DEBUG("Dumping pipeline dot file");
679  GST_DEBUG_BIN_TO_DOT_FILE((GstBin*)pipeline, GST_DEBUG_GRAPH_SHOW_ALL, "pipeline");
680 #endif
681  }
682 
683  return result;
684 }
685 
686 bool gstreamer::Playbin::seek(const std::chrono::microseconds& ms)
687 {
688  is_seeking = true;
689  return gst_element_seek_simple(
690  pipeline,
691  GST_FORMAT_TIME,
692  (GstSeekFlags)(GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_KEY_UNIT),
693  ms.count() * 1000);
694 }
695 
697 {
698  if (not video_sink || not is_supported_video_sink())
699  throw std::runtime_error
700  {
701  "Missing video sink or video sink does not support query of width and height."
702  };
703 
704  // Initialize to default value prior to querying actual values from the sink.
705  int video_width = 0, video_height = 0;
706 
707  // There should be only one pad actually
708  GstIterator *iter = gst_element_iterate_pads(video_sink);
709  for (GValue item{};
710  gst_iterator_next(iter, &item) == GST_ITERATOR_OK;
711  g_value_unset(&item))
712  {
713  GstPad *pad = GST_PAD(g_value_get_object(&item));
714  GstCaps *caps = gst_pad_get_current_caps(pad);
715 
716  if (caps) {
717  const GstStructure *s = gst_caps_get_structure(caps, 0);
718  gst_structure_get_int(s, "width", &video_width);
719  gst_structure_get_int(s, "height", &video_height);
720  MH_DEBUG("Video dimensions are %d x %d", video_width, video_height);
721 
722  gst_caps_unref(caps);
723  }
724  }
725  gst_iterator_free(iter);
726 
727  // TODO(tvoss): We should probably check here if width and height are valid.
729  {
732  };
733 }
734 
736 {
737  // Only signal the application layer if the dimensions have in fact changed. This might happen
738  // if reusing the same media-hub session to play two different video sources.
739  if (new_dimensions != cached_video_dimensions)
740  signals.on_video_dimensions_changed(new_dimensions);
741 }
742 
743 std::string gstreamer::Playbin::file_info_from_uri(const std::string& uri) const
744 {
745  GError *error = nullptr;
746  // Open the URI and get the mime type from it. This will currently only work for
747  // a local file
748  std::unique_ptr<GFile, void(*)(void *)> file(
749  g_file_new_for_uri(uri.c_str()), g_object_unref);
750  std::unique_ptr<GFileInfo, void(*)(void *)> info(
751  g_file_query_info(
752  file.get(), G_FILE_ATTRIBUTE_STANDARD_FAST_CONTENT_TYPE ","
753  G_FILE_ATTRIBUTE_ETAG_VALUE, G_FILE_QUERY_INFO_NONE,
754  /* cancellable */ NULL, &error),
755  g_object_unref);
756  if (!info)
757  return std::string();
758 
759  std::string content_type = g_file_info_get_attribute_string(
760  info.get(), G_FILE_ATTRIBUTE_STANDARD_FAST_CONTENT_TYPE);
761  if (content_type.empty())
762  return std::string();
763 
764  if (content_type == "application/octet-stream")
765  {
766  std::unique_ptr<GFileInfo, void(*)(void *)> full_info(
767  g_file_query_info(file.get(), G_FILE_ATTRIBUTE_STANDARD_CONTENT_TYPE,
768  G_FILE_QUERY_INFO_NONE,
769  /* cancellable */ NULL, &error),g_object_unref);
770 
771  if (!full_info)
772  return std::string();
773 
774  content_type = g_file_info_get_attribute_string(
775  full_info.get(), G_FILE_ATTRIBUTE_STANDARD_CONTENT_TYPE);
776  if (content_type.empty())
777  return std::string();
778  }
779  return content_type;
780 }
781 
782 std::string gstreamer::Playbin::encode_uri(const std::string& uri) const
783 {
784  if (uri.empty())
785  return std::string();
786 
787  std::string encoded_uri;
788  media::UriCheck::Ptr uri_check{std::make_shared<media::UriCheck>(uri)};
789  gchar *uri_scheme = g_uri_parse_scheme(uri.c_str());
790  // We have a URI and it is already percent encoded
791  if (uri_scheme and strlen(uri_scheme) > 0 and uri_check->is_encoded())
792  {
793  MH_DEBUG("Is a URI and is already percent encoded");
794  encoded_uri = uri;
795  }
796  // We have a URI but it's not already percent encoded
797  else if (uri_scheme and strlen(uri_scheme) > 0 and !uri_check->is_encoded())
798  {
799  MH_DEBUG("Is a URI and is not already percent encoded");
800  gchar *encoded = g_uri_escape_string(uri.c_str(),
801  "!$&'()*+,;=:/?[]@", // reserved chars
802  TRUE); // Allow UTF-8 chars
803  if (!encoded)
804  {
805  g_free(uri_scheme);
806  return std::string();
807  }
808  encoded_uri.assign(encoded);
809  g_free(encoded);
810  }
811  else // We have a path and not a URI. Turn it into a full URI and encode it
812  {
813  GError *error = nullptr;
814  MH_DEBUG("Is a path and is not already percent encoded");
815  gchar *str = g_filename_to_uri(uri.c_str(), nullptr, &error);
816  if (!str)
817  {
818  g_free(uri_scheme);
819  return std::string();
820  }
821  encoded_uri.assign(str);
822  g_free(str);
823  if (error != nullptr)
824  {
825  MH_WARNING("Failed to get actual track content type: %s", error->message);
826  g_error_free(error);
827  g_free(str);
828  g_free(uri_scheme);
829  return std::string("audio/video/");
830  }
831  gchar *escaped = g_uri_escape_string(encoded_uri.c_str(),
832  "!$&'()*+,;=:/?[]@", // reserved chars
833  TRUE); // Allow UTF-8 chars
834  if (!escaped)
835  {
836  g_free(uri_scheme);
837  return std::string();
838  }
839  encoded_uri.assign(escaped);
840  g_free(escaped);
841  }
842 
843  g_free(uri_scheme);
844 
845  return encoded_uri;
846 }
847 
848 std::string gstreamer::Playbin::decode_uri(const std::string& uri) const
849 {
850  if (uri.empty())
851  return std::string();
852 
853  gchar *decoded_gchar = g_uri_unescape_string(uri.c_str(), nullptr);
854  if (!decoded_gchar)
855  return std::string();
856 
857  const std::string decoded{decoded_gchar};
858  g_free(decoded_gchar);
859  return decoded;
860 }
861 
862 std::string gstreamer::Playbin::get_file_content_type(const std::string& uri) const
863 {
864  if (uri.empty())
865  return std::string();
866 
867  const std::string encoded_uri{encode_uri(uri)};
868 
869  const std::string content_type {file_info_from_uri(encoded_uri)};
870  if (content_type.empty())
871  {
872  MH_WARNING("Failed to get actual track content type");
873  return std::string("audio/video/");
874  }
875 
876  MH_INFO("Found content type: %s", content_type);
877 
878  return content_type;
879 }
880 
881 bool gstreamer::Playbin::is_audio_file(const std::string& uri) const
882 {
883  if (uri.empty())
884  return false;
885 
886  if (get_file_content_type(uri).find("audio/") == 0)
887  {
888  MH_INFO("Found audio content");
889  return true;
890  }
891 
892  return false;
893 }
894 
895 bool gstreamer::Playbin::is_video_file(const std::string& uri) const
896 {
897  if (uri.empty())
898  return false;
899 
900  if (get_file_content_type(uri).find("video/") == 0)
901  {
902  MH_INFO("Found video content");
903  return true;
904  }
905 
906  return false;
907 }
908 
910 {
911  return file_type;
912 }
913 
915 {
916  /*
917  * We do not consider that we can play the video when
918  * 1. No audio stream selected due to missing decoder
919  * 2. No video stream selected due to missing decoder
920  * 3. No stream selected at all
921  * Note that if there are several, say, audio streams, we will play the file
922  * provided that we can decode just one of them, even if there are missing
923  * audio codecs. We will also play files with only one type of stream.
924  */
925  if ((is_missing_audio_codec && audio_stream_id == -1) ||
927  (audio_stream_id == -1 && video_stream_id == -1))
928  return false;
929  else
930  return true;
931 }
932 
933 bool gstreamer::Playbin::connect_to_consumer(void)
934 {
935  static const char *local_socket = "media-hub-server";
936  static const char *consumer_socket = "media-consumer";
937 
938  int len;
939  struct sockaddr_un local, remote;
940 
941  if (sock_consumer != -1) {
942  MH_DEBUG("Resetting socket");
943  close(sock_consumer);
944  }
945 
946  if ((sock_consumer = socket(AF_UNIX, SOCK_DGRAM, 0)) == -1)
947  {
948  MH_ERROR("Cannot create socket: %s (%d)", strerror(errno), errno);
949  return false;
950  }
951 
952  // Bind client to local -abstract- socket (media-hub-server<session>)
953  ostringstream local_ss;
954  local_ss << local_socket << key;
955  local.sun_family = AF_UNIX;
956  local.sun_path[0] = '\0';
957  strcpy(local.sun_path + 1, local_ss.str().c_str());
958  len = sizeof(local.sun_family) + local_ss.str().length() + 1;
959  if (bind(sock_consumer, (struct sockaddr *) &local, len) == -1)
960  {
961  MH_ERROR("Cannot bind socket: %s (%d)", strerror(errno), errno);
962  close(sock_consumer);
963  sock_consumer = -1;
964  return false;
965  }
966 
967  // Connect to buffer consumer (media-consumer<session>)
968  ostringstream remote_ss;
969  remote_ss << consumer_socket << key;
970  remote.sun_family = AF_UNIX;
971  remote.sun_path[0] = '\0';
972  strcpy(remote.sun_path + 1, remote_ss.str().c_str());
973  len = sizeof(remote.sun_family) + remote_ss.str().length() + 1;
974  if (connect(sock_consumer, (struct sockaddr *) &remote, len) == -1)
975  {
976  MH_ERROR("Cannot connect to consumer: %s (%d)", strerror(errno), errno);
977  close(sock_consumer);
978  sock_consumer = -1;
979  return false;
980  }
981 
982  MH_DEBUG("Connected to buffer consumer socket");
983 
984  return true;
985 }
986 
987 void gstreamer::Playbin::send_buffer_data(int fd, void *data, size_t len)
988 {
989  struct msghdr msg{};
990  char buf[CMSG_SPACE(sizeof fd)]{};
991  struct cmsghdr *cmsg;
992  struct iovec io = { .iov_base = data, .iov_len = len };
993 
994  msg.msg_iov = &io;
995  msg.msg_iovlen = 1;
996  msg.msg_control = buf;
997  msg.msg_controllen = sizeof buf;
998 
999  cmsg = CMSG_FIRSTHDR(&msg);
1000  cmsg->cmsg_level = SOL_SOCKET;
1001  cmsg->cmsg_type = SCM_RIGHTS;
1002  cmsg->cmsg_len = CMSG_LEN(sizeof fd);
1003 
1004  memmove(CMSG_DATA(cmsg), &fd, sizeof fd);
1005 
1006  msg.msg_controllen = cmsg->cmsg_len;
1007 
1008  if (sendmsg(sock_consumer, &msg, 0) < 0)
1009  MH_ERROR("Failed to send dma_buf fd to consumer: %s (%d)",
1010  strerror(errno), errno);
1011 }
1012 
1013 void gstreamer::Playbin::send_frame_ready(void)
1014 {
1015  const char ready = 'r';
1016 
1017  if (send (sock_consumer, &ready, sizeof ready, 0) == -1)
1018  MH_ERROR("Error when sending frame ready flag to client: %s (%d)",
1019  strerror(errno), errno);
1020 }
static std::string get_audio_role_str(core::ubuntu::media::Player::AudioStreamRole audio_role)
Definition: playbin.cpp:437
MediaFileType media_file_type() const
Definition: playbin.cpp:909
core::ubuntu::media::video::Dimensions get_video_dimensions() const
Definition: playbin.cpp:696
bool is_audio_file(const std::string &uri) const
Definition: playbin.cpp:881
static void source_setup(GstElement *, GstElement *source, gpointer user_data)
Definition: playbin.cpp:111
void setup_source(GstElement *source)
Definition: playbin.cpp:564
std::tuple< Height, Width > Dimensions
Height and Width of a video.
Definition: dimensions.h:139
gint audio_stream_id
Definition: playbin.h:157
void set_uri(const std::string &uri, const core::ubuntu::media::Player::HeadersType &headers, bool do_pipeline_reset=true)
Definition: playbin.cpp:526
GstMessageType type
Definition: bus.h:181
void process_message_element(GstMessage *message)
Definition: playbin.cpp:285
#define MH_INFO(...)
Definition: logger.h:125
static gboolean set_state_in_main_thread(gpointer user_data)
Definition: playbin.cpp:597
bool seek(const std::chrono::microseconds &ms)
Definition: playbin.cpp:686
void reset_pipeline()
Definition: playbin.cpp:227
core::Connection on_new_message_connection_async
Definition: playbin.h:131
static const std::string & pipeline_name()
Definition: playbin.cpp:99
std::string file_info_from_uri(const std::string &uri) const
Definition: playbin.cpp:743
Playbin(const core::ubuntu::media::Player::PlayerKey key)
Definition: playbin.cpp:121
void emit_video_dimensions_changed_if_changed(const core::ubuntu::media::video::Dimensions &new_dimensions)
Definition: playbin.cpp:735
STL namespace.
void set_lifetime(core::ubuntu::media::Player::Lifetime)
Definition: playbin.cpp:492
#define MH_ERROR(...)
Definition: logger.h:128
struct gstreamer::Playbin::@12 signals
GstElement * video_sink
Definition: playbin.h:129
void set_audio_stream_role(core::ubuntu::media::Player::AudioStreamRole new_audio_role)
Definition: playbin.cpp:474
bool is_missing_audio_codec
Definition: playbin.h:155
gint video_stream_id
Definition: playbin.h:158
std::map< std::string, std::string > HeadersType
Definition: player.h:65
GstState current_new_state
Definition: playbin.h:159
std::string get_file_content_type(const std::string &uri) const
Definition: playbin.cpp:862
#define MH_DEBUG(...)
Definition: logger.h:123
core::ubuntu::media::Player::Orientation orientation_lut(const gchar *orientation)
Definition: playbin.cpp:459
uint64_t position() const
Definition: playbin.cpp:497
#define MH_WARNING(...)
Definition: logger.h:127
struct gstreamer::Bus::Message::Detail::@1 buffering
gstreamer::Bus & message_bus()
Definition: playbin.cpp:377
static Backend get_backend_type()
Returns the type of audio/video decoding/encoding backend being used.
Definition: backend.cpp:26
core::Signal< void > about_to_finish
Definition: playbin.h:141
std::shared_ptr< UriCheck > Ptr
Definition: uri_check.h:37
bool is_video_file(const std::string &uri) const
Definition: playbin.cpp:895
std::string uri() const
Definition: playbin.cpp:586
void setup_pipeline_for_audio_video()
Definition: playbin.cpp:382
bool set_state_and_wait(GstState new_state, bool use_main_thread=false)
Definition: playbin.cpp:607
std::string decode_uri(const std::string &uri) const
Definition: playbin.cpp:848
GstMessage * message
Definition: bus.h:180
union gstreamer::Bus::Message::Detail detail
GstElement * pipeline
Definition: playbin.h:126
GstElement * audio_sink
Definition: playbin.h:130
gulong source_setup_handler_id
Definition: playbin.h:138
#define MH_TRACE(...)
Definition: logger.h:121
core::ubuntu::media::video::Dimensions cached_video_dimensions
Definition: playbin.h:134
struct gstreamer::Bus::Message::Detail::Tag tag
boost::flyweight< std::string > source
Definition: bus.h:182
bool can_play_streams() const
Definition: playbin.cpp:914
struct gstreamer::Bus::Message::Detail::ErrorWarningInfo error_warning_info
void set_volume(double new_volume)
Definition: playbin.cpp:431
void on_new_message_async(const Bus::Message &message)
Definition: playbin.cpp:319
void create_video_sink(uint32_t texture_id)
Definition: playbin.cpp:421
gulong about_to_finish_handler_id
Definition: playbin.h:137
std::string encode_uri(const std::string &uri) const
Definition: playbin.cpp:782
gstreamer::Bus bus
Definition: playbin.h:127
bool is_missing_video_codec
Definition: playbin.h:156
core::Signal< Message > on_new_message_async
Definition: bus.h:333
MediaFileType file_type
Definition: playbin.h:128
core::ubuntu::media::Player::HeadersType request_headers
Definition: playbin.h:135
uint64_t duration() const
Definition: playbin.cpp:517
IntWrapper is a type-safe integer that allows for encoding/enforcing semantics by means of tags...
Definition: dimensions.h:68
struct gstreamer::Bus::Message::Detail::StateChanged state_changed
uint64_t previous_position
Definition: playbin.h:133
core::ubuntu::media::Player::Lifetime player_lifetime
Definition: playbin.h:136