diff --git a/LayoutTests/ChangeLog b/LayoutTests/ChangeLog index ca8b2dcc9013f..760c02705e1ea 100644 --- a/LayoutTests/ChangeLog +++ b/LayoutTests/ChangeLog @@ -1,3 +1,42 @@ +2018-11-16 Thibault Saunier + + [GStreamer][MediaStream] Handle track addition and removal + https://bugs.webkit.org/show_bug.cgi?id=191599 + + Reviewed by Xabier Rodriguez-Calvar. + + * fast/mediastream/MediaStream-video-element-remove-track-expected.txt: Added. + * fast/mediastream/MediaStream-video-element-remove-track.html: Added. + +2018-10-11 Alejandro G. Castro + + [GTK][WPE] Add mediaDevices.enumerateDevices support + https://bugs.webkit.org/show_bug.cgi?id=185761 + + Reviewed by Youenn Fablet. + + Remove the failure expectation for the test + media-devices-enumerate-devices.html. Just for the record we have + also checked the following tests. + + fast/mediastream/media-devices-enumerate-devices.html + fast/mediastream/resources/enumerate-devices-frame.html + http/tests/media/media-stream/enumerate-devices-source-id.html + http/tests/media/media-stream/enumerate-devices-source-id-persistent.html + http/tests/media/media-stream/resources/enumerate-devices-source-id-frame.html + + * platform/gtk/TestExpectations: + +2018-10-02 Thibault Saunier + + [GTK][WPE] Blacklist more tests that are requiring webrtc <-> webaudio bridging + https://bugs.webkit.org/show_bug.cgi?id=189829 + + Reviewed by Alejandro G. Castro. + + * platform/gtk/TestExpectations: + * platform/wpe/TestExpectations: + 2018-10-01 Alicia Boya García [MSE][GStreamer] Reset running time in PlaybackPipeline::flush() diff --git a/LayoutTests/fast/mediastream/MediaStream-video-element-remove-track-expected.txt b/LayoutTests/fast/mediastream/MediaStream-video-element-remove-track-expected.txt new file mode 100644 index 0000000000000..606fa54f558c7 --- /dev/null +++ b/LayoutTests/fast/mediastream/MediaStream-video-element-remove-track-expected.txt @@ -0,0 +1,47 @@ +Tests checking removing MediaStream track applies to the video element. + +On success, you will see a series of "PASS" messages, followed by "TEST COMPLETE". + + + + +**** calling mediaDevices.getUserMedia() **** +PASS mediaDevices.getUserMedia succeeded. + +**** setup video element **** +video.srcObject = mediaStream +Event 'canplay' + +*** start playback **** +video.play() +video.pause() + +**** check video element **** + +**** check video tracks **** +PASS video.videoTracks.length is 1 +PASS video.videoTracks[0].id is mediaStream.getVideoTracks()[0].id + +**** check audio tracks **** +PASS video.audioTracks.length is 1 +PASS video.audioTracks[0].id is mediaStream.getAudioTracks()[0].id + +**** removing audio track **** + +**** check video element **** +PASS video.videoWidth is mediaStream.getVideoTracks()[0].getSettings().width +PASS video.videoHeight is mediaStream.getVideoTracks()[0].getSettings().height + +**** check video tracks **** +PASS video.videoTracks.length is 1 +PASS video.videoTracks[0].id is mediaStream.getVideoTracks()[0].id +PASS video.videoTracks[0].language is "" +PASS video.videoTracks[0].kind is "main" + +**** check no audio track **** +PASS video.audioTracks.length is 0 +PASS mediaStream.getAudioTracks().length is 0 +PASS successfullyParsed is true + +TEST COMPLETE + diff --git a/LayoutTests/fast/mediastream/MediaStream-video-element-remove-track.html b/LayoutTests/fast/mediastream/MediaStream-video-element-remove-track.html new file mode 100644 index 0000000000000..2321e72db5291 --- /dev/null +++ b/LayoutTests/fast/mediastream/MediaStream-video-element-remove-track.html @@ -0,0 +1,112 @@ + + + + + + + +

+ +
+ + + diff --git a/LayoutTests/platform/gtk/TestExpectations b/LayoutTests/platform/gtk/TestExpectations index a4dc890e45161..643774f165a55 100644 --- a/LayoutTests/platform/gtk/TestExpectations +++ b/LayoutTests/platform/gtk/TestExpectations @@ -603,6 +603,7 @@ webkit.org/b/186933 webrtc/clone-audio-track.html webkit.org/b/186933 webrtc/audio-replace-track.html webkit.org/b/186933 webrtc/audio-peer-connection-webaudio.html webkit.org/b/186933 webrtc/audio-muted-stats.html +webkit.org/b/186933 webrtc/getUserMedia-webaudio-autoplay.html imported/w3c/web-platform-tests/webrtc/ [ Skip ] http/tests/webrtc [ Skip ] @@ -635,7 +636,6 @@ webkit.org/b/79203 imported/w3c/web-platform-tests/mediacapture-streams/MediaStr webkit.org/b/151344 fast/mediastream/MediaStream-add-ended-tracks.html [ Timeout ] # Crash is bug #176801 webkit.org/b/160996 fast/mediastream/MediaStream-video-element-video-tracks-disabled.html [ ImageOnlyFailure Crash ] -webkit.org/b/172269 fast/mediastream/media-devices-enumerate-devices.html [ Failure ] webkit.org/b/173257 fast/mediastream/getUserMedia-grant-persistency3.html [ Pass Failure ] webkit.org/b/176801 fast/mediastream/argument-types.html [ Crash Pass ] diff --git a/LayoutTests/platform/wpe/TestExpectations b/LayoutTests/platform/wpe/TestExpectations index 8748cf0830f5b..52d3aa9673d13 100644 --- a/LayoutTests/platform/wpe/TestExpectations +++ b/LayoutTests/platform/wpe/TestExpectations @@ -76,10 +76,11 @@ webkit.org/b/186933 webrtc/clone-audio-track.html webkit.org/b/186933 webrtc/audio-replace-track.html webkit.org/b/186933 webrtc/audio-peer-connection-webaudio.html webkit.org/b/186933 webrtc/audio-muted-stats.html +webkit.org/b/186933 webrtc/getUserMedia-webaudio-autoplay.html # The MediaStream implementation is still not completed webkit.org/b/79203 fast/mediastream/mock-media-source-webaudio.html [ Timeout ] -webkit.org/b/79203 fast/mediastream/getUserMedia-webaudio.html [ Failure ] +webkit.org/b/79203 fast/mediastream/getUserMedia-webaudio.html [ Failure Crash ] webkit.org/b/79203 fast/mediastream/MediaStream-video-element-track-stop.html [ Timeout ] webkit.org/b/79203 fast/mediastream/RTCPeerConnection-dtmf.html [ Timeout ] webkit.org/b/79203 fast/mediastream/RTCPeerConnection-icecandidate-event.html [ Pass Failure Crash ] diff --git a/Source/ThirdParty/libwebrtc/CMakeLists.txt b/Source/ThirdParty/libwebrtc/CMakeLists.txt index 550cf2a16de43..c167cd73a61a0 100644 --- a/Source/ThirdParty/libwebrtc/CMakeLists.txt +++ b/Source/ThirdParty/libwebrtc/CMakeLists.txt @@ -16,6 +16,11 @@ if (NOT ALSALIB_FOUND) message(FATAL_ERROR "alsa-lib is needed for USE_LIBWEBRTC.") endif () +find_package(Opus 1.3) +if (NOT LIBOPUS_FOUND) + message(FATAL_ERROR "libopus is needed for USE_LIBWEBRTC.") +endif () + set(webrtc_SOURCES Source/third_party/boringssl/err_data.c Source/third_party/boringssl/src/crypto/asn1/a_bitstr.c @@ -290,158 +295,31 @@ set(webrtc_SOURCES Source/third_party/libyuv/source/mjpeg_decoder.cc Source/third_party/libyuv/source/mjpeg_validate.cc Source/third_party/libyuv/source/planar_functions.cc + Source/third_party/libyuv/source/compare_neon.cc + Source/third_party/libyuv/source/compare_neon64.cc Source/third_party/libyuv/source/rotate.cc Source/third_party/libyuv/source/rotate_any.cc Source/third_party/libyuv/source/rotate_argb.cc Source/third_party/libyuv/source/rotate_common.cc Source/third_party/libyuv/source/rotate_dspr2.cc Source/third_party/libyuv/source/rotate_gcc.cc + Source/third_party/libyuv/source/rotate_neon.cc + Source/third_party/libyuv/source/rotate_neon64.cc Source/third_party/libyuv/source/row_any.cc Source/third_party/libyuv/source/row_common.cc Source/third_party/libyuv/source/row_dspr2.cc Source/third_party/libyuv/source/row_gcc.cc + Source/third_party/libyuv/source/row_neon.cc + Source/third_party/libyuv/source/row_neon64.cc Source/third_party/libyuv/source/scale.cc Source/third_party/libyuv/source/scale_any.cc Source/third_party/libyuv/source/scale_argb.cc Source/third_party/libyuv/source/scale_common.cc Source/third_party/libyuv/source/scale_dspr2.cc Source/third_party/libyuv/source/scale_gcc.cc + Source/third_party/libyuv/source/scale_neon.cc + Source/third_party/libyuv/source/scale_neon64.cc Source/third_party/libyuv/source/video_common.cc - Source/third_party/opus/src/celt/bands.c - Source/third_party/opus/src/celt/celt.c - Source/third_party/opus/src/celt/celt_decoder.c - Source/third_party/opus/src/celt/celt_encoder.c - Source/third_party/opus/src/celt/celt_lpc.c - Source/third_party/opus/src/celt/cwrs.c - Source/third_party/opus/src/celt/entcode.c - Source/third_party/opus/src/celt/entdec.c - Source/third_party/opus/src/celt/entenc.c - Source/third_party/opus/src/celt/kiss_fft.c - Source/third_party/opus/src/celt/laplace.c - Source/third_party/opus/src/celt/mathops.c - Source/third_party/opus/src/celt/mdct.c - Source/third_party/opus/src/celt/modes.c - Source/third_party/opus/src/celt/pitch.c - Source/third_party/opus/src/celt/quant_bands.c - Source/third_party/opus/src/celt/rate.c - Source/third_party/opus/src/celt/vq.c - Source/third_party/opus/src/silk/A2NLSF.c - Source/third_party/opus/src/silk/CNG.c - Source/third_party/opus/src/silk/HP_variable_cutoff.c - Source/third_party/opus/src/silk/LPC_analysis_filter.c - Source/third_party/opus/src/silk/LPC_inv_pred_gain.c - Source/third_party/opus/src/silk/LP_variable_cutoff.c - Source/third_party/opus/src/silk/NLSF2A.c - Source/third_party/opus/src/silk/NLSF_VQ.c - Source/third_party/opus/src/silk/NLSF_VQ_weights_laroia.c - Source/third_party/opus/src/silk/NLSF_decode.c - Source/third_party/opus/src/silk/NLSF_del_dec_quant.c - Source/third_party/opus/src/silk/NLSF_encode.c - Source/third_party/opus/src/silk/NLSF_stabilize.c - Source/third_party/opus/src/silk/NLSF_unpack.c - Source/third_party/opus/src/silk/NSQ.c - Source/third_party/opus/src/silk/NSQ_del_dec.c - Source/third_party/opus/src/silk/PLC.c - Source/third_party/opus/src/silk/VAD.c - Source/third_party/opus/src/silk/VQ_WMat_EC.c - Source/third_party/opus/src/silk/ana_filt_bank_1.c - Source/third_party/opus/src/silk/biquad_alt.c - Source/third_party/opus/src/silk/bwexpander.c - Source/third_party/opus/src/silk/bwexpander_32.c - Source/third_party/opus/src/silk/check_control_input.c - Source/third_party/opus/src/silk/code_signs.c - Source/third_party/opus/src/silk/control_SNR.c - Source/third_party/opus/src/silk/control_audio_bandwidth.c - Source/third_party/opus/src/silk/control_codec.c - Source/third_party/opus/src/silk/debug.c - Source/third_party/opus/src/silk/dec_API.c - Source/third_party/opus/src/silk/decode_core.c - Source/third_party/opus/src/silk/decode_frame.c - Source/third_party/opus/src/silk/decode_indices.c - Source/third_party/opus/src/silk/decode_parameters.c - Source/third_party/opus/src/silk/decode_pitch.c - Source/third_party/opus/src/silk/decode_pulses.c - Source/third_party/opus/src/silk/decoder_set_fs.c - Source/third_party/opus/src/silk/enc_API.c - Source/third_party/opus/src/silk/encode_indices.c - Source/third_party/opus/src/silk/encode_pulses.c - Source/third_party/opus/src/silk/float/LPC_analysis_filter_FLP.c - Source/third_party/opus/src/silk/float/LPC_inv_pred_gain_FLP.c - Source/third_party/opus/src/silk/float/LTP_analysis_filter_FLP.c - Source/third_party/opus/src/silk/float/LTP_scale_ctrl_FLP.c - Source/third_party/opus/src/silk/float/apply_sine_window_FLP.c - Source/third_party/opus/src/silk/float/autocorrelation_FLP.c - Source/third_party/opus/src/silk/float/burg_modified_FLP.c - Source/third_party/opus/src/silk/float/bwexpander_FLP.c - Source/third_party/opus/src/silk/float/corrMatrix_FLP.c - Source/third_party/opus/src/silk/float/encode_frame_FLP.c - Source/third_party/opus/src/silk/float/energy_FLP.c - Source/third_party/opus/src/silk/float/find_LPC_FLP.c - Source/third_party/opus/src/silk/float/find_LTP_FLP.c - Source/third_party/opus/src/silk/float/find_pitch_lags_FLP.c - Source/third_party/opus/src/silk/float/find_pred_coefs_FLP.c - Source/third_party/opus/src/silk/float/inner_product_FLP.c - Source/third_party/opus/src/silk/float/k2a_FLP.c - Source/third_party/opus/src/silk/float/levinsondurbin_FLP.c - Source/third_party/opus/src/silk/float/noise_shape_analysis_FLP.c - Source/third_party/opus/src/silk/float/pitch_analysis_core_FLP.c - Source/third_party/opus/src/silk/float/prefilter_FLP.c - Source/third_party/opus/src/silk/float/process_gains_FLP.c - Source/third_party/opus/src/silk/float/regularize_correlations_FLP.c - Source/third_party/opus/src/silk/float/residual_energy_FLP.c - Source/third_party/opus/src/silk/float/scale_copy_vector_FLP.c - Source/third_party/opus/src/silk/float/scale_vector_FLP.c - Source/third_party/opus/src/silk/float/schur_FLP.c - Source/third_party/opus/src/silk/float/solve_LS_FLP.c - Source/third_party/opus/src/silk/float/sort_FLP.c - Source/third_party/opus/src/silk/float/warped_autocorrelation_FLP.c - Source/third_party/opus/src/silk/float/wrappers_FLP.c - Source/third_party/opus/src/silk/gain_quant.c - Source/third_party/opus/src/silk/init_decoder.c - Source/third_party/opus/src/silk/init_encoder.c - Source/third_party/opus/src/silk/inner_prod_aligned.c - Source/third_party/opus/src/silk/interpolate.c - Source/third_party/opus/src/silk/lin2log.c - Source/third_party/opus/src/silk/log2lin.c - Source/third_party/opus/src/silk/pitch_est_tables.c - Source/third_party/opus/src/silk/process_NLSFs.c - Source/third_party/opus/src/silk/quant_LTP_gains.c - Source/third_party/opus/src/silk/resampler.c - Source/third_party/opus/src/silk/resampler_down2.c - Source/third_party/opus/src/silk/resampler_down2_3.c - Source/third_party/opus/src/silk/resampler_private_AR2.c - Source/third_party/opus/src/silk/resampler_private_IIR_FIR.c - Source/third_party/opus/src/silk/resampler_private_down_FIR.c - Source/third_party/opus/src/silk/resampler_private_up2_HQ.c - Source/third_party/opus/src/silk/resampler_rom.c - Source/third_party/opus/src/silk/shell_coder.c - Source/third_party/opus/src/silk/sigm_Q15.c - Source/third_party/opus/src/silk/sort.c - Source/third_party/opus/src/silk/stereo_LR_to_MS.c - Source/third_party/opus/src/silk/stereo_MS_to_LR.c - Source/third_party/opus/src/silk/stereo_decode_pred.c - Source/third_party/opus/src/silk/stereo_encode_pred.c - Source/third_party/opus/src/silk/stereo_find_predictor.c - Source/third_party/opus/src/silk/stereo_quant_pred.c - Source/third_party/opus/src/silk/sum_sqr_shift.c - Source/third_party/opus/src/silk/table_LSF_cos.c - Source/third_party/opus/src/silk/tables_LTP.c - Source/third_party/opus/src/silk/tables_NLSF_CB_NB_MB.c - Source/third_party/opus/src/silk/tables_NLSF_CB_WB.c - Source/third_party/opus/src/silk/tables_gain.c - Source/third_party/opus/src/silk/tables_other.c - Source/third_party/opus/src/silk/tables_pitch_lag.c - Source/third_party/opus/src/silk/tables_pulses_per_block.c - Source/third_party/opus/src/src/analysis.c - Source/third_party/opus/src/src/mlp.c - Source/third_party/opus/src/src/mlp_data.c - Source/third_party/opus/src/src/opus.c - Source/third_party/opus/src/src/opus_decoder.c - Source/third_party/opus/src/src/opus_encoder.c - Source/third_party/opus/src/src/opus_multistream.c - Source/third_party/opus/src/src/opus_multistream_decoder.c - Source/third_party/opus/src/src/opus_multistream_encoder.c - Source/third_party/opus/src/src/repacketizer.c Source/third_party/usrsctp/usrsctplib/netinet/sctp_asconf.c Source/third_party/usrsctp/usrsctplib/netinet/sctp_auth.c Source/third_party/usrsctp/usrsctplib/netinet/sctp_bsd_addr.c @@ -875,11 +753,7 @@ set(webrtc_SOURCES Source/webrtc/modules/audio_device/fine_audio_buffer.cc Source/webrtc/modules/audio_device/linux/alsasymboltable_linux.cc Source/webrtc/modules/audio_device/linux/audio_device_alsa_linux.cc - Source/webrtc/modules/audio_device/linux/audio_device_pulse_linux.cc - Source/webrtc/modules/audio_device/linux/audio_mixer_manager_alsa_linux.cc - Source/webrtc/modules/audio_device/linux/audio_mixer_manager_pulse_linux.cc Source/webrtc/modules/audio_device/linux/latebindingsymboltable_linux.cc - Source/webrtc/modules/audio_device/linux/pulseaudiosymboltable_linux.cc Source/webrtc/modules/audio_mixer/audio_frame_manipulator.cc Source/webrtc/modules/audio_mixer/audio_mixer_impl.cc Source/webrtc/modules/audio_mixer/default_output_rate_calculator.cc @@ -1386,6 +1260,17 @@ if (WTF_CPU_X86_64 OR WTF_CPU_X86) ) endif() +find_package(Libpulse) +if (NOT LIBPULSE_FOUND) + message("libpulse is not found, not building support.") +else() + list(APPEND webrtc_SOURCES + Source/webrtc/modules/audio_device/linux/audio_device_pulse_linux.cc + Source/webrtc/modules/audio_device/linux/audio_mixer_manager_pulse_linux.cc + Source/webrtc/modules/audio_device/linux/pulseaudiosymboltable_linux.cc + ) +endif () + add_library(webrtc STATIC ${webrtc_SOURCES}) target_compile_options(webrtc PRIVATE @@ -1425,6 +1310,7 @@ target_compile_definitions(webrtc PRIVATE WEBRTC_NS_FLOAT WEBRTC_OPUS_SUPPORT_120MS_PTIME=0 WEBRTC_OPUS_VARIABLE_COMPLEXITY=0 + WEBRTC_USE_BUILTIN_OPUS=1 WEBRTC_POSIX WEBRTC_USE_BUILTIN_ISAC_FIX=1 WEBRTC_USE_BUILTIN_ISAC_FLOAT=0 @@ -1434,6 +1320,16 @@ target_compile_definitions(webrtc PRIVATE __Userspace_os_Linux ) +if (WTF_CPU_ARM) + target_compile_definitions(webrtc PRIVATE + WEBRTC_ARCH_ARM=1 + ) +elseif (WTF_CPU_ARM64) + target_compile_definitions(webrtc PRIVATE + WEBRTC_ARCH_ARM64=1 + ) +endif() + target_include_directories(webrtc PRIVATE Source Source/third_party/boringssl/src/include @@ -1462,6 +1358,8 @@ target_link_libraries(webrtc ${VPX_LIBRARY}) target_link_libraries(webrtc ${EVENT_LIBRARY}) +target_link_libraries(webrtc ${OPUS_LIBRARY}) + # libsrtp package compilation set(libsrtp_SOURCES Source/third_party/libsrtp/crypto/cipher/aes_gcm_ossl.c diff --git a/Source/ThirdParty/libwebrtc/ChangeLog b/Source/ThirdParty/libwebrtc/ChangeLog index 8f6aebb7b6a31..2a21e6d100221 100644 --- a/Source/ThirdParty/libwebrtc/ChangeLog +++ b/Source/ThirdParty/libwebrtc/ChangeLog @@ -1,3 +1,52 @@ +2018-11-27 Thibault Saunier + + [GStreamer][WebRTC] Use LibWebRTC provided vp8 decoders and encoders + https://bugs.webkit.org/show_bug.cgi?id=191861 + + Reviewed by Philippe Normand. + + * CMakeLists.txt: Build LibVPX vp8 encoder and decoders. + +2018-10-16 Alejandro G. Castro + + [GTK][WPE] Make libwebrtc compile using the system opus library + https://bugs.webkit.org/show_bug.cgi?id=190573 + + Reviewed by Philippe Normand. + + We found some situations where gstreamer gets confused when it + tries to use opus because it finds opus symbols compiled for + liwebrtc. We are going to try the option to use the system opus + library also for libwebrtc. + + * CMakeLists.txt: Added opus dependency. + * cmake/FindOpus.cmake: Added the hints to find the opus library + in the compilation. + +2018-09-21 Thibault Saunier + + [libwebrtc] Allow IP mismatch for local connections on localhost + https://bugs.webkit.org/show_bug.cgi?id=189828 + + Reviewed by Alejandro G. Castro. + + The rest of the code allows it, but there was an unecessary assert + + See Bug 187302 + + * Source/webrtc/p2p/base/tcpport.cc: + +2018-12-01 Thibault Saunier + + [GStreamer][WebRTC] Build opus decoder support in libwebrtc + https://bugs.webkit.org/show_bug.cgi?id=192226 + + Reviewed by Philippe Normand. + + Somehow that was overlooked at some point (it used to work). + + * CMakeLists.txt: + 2018-08-06 David Kilzer [libwebrtc] SafeSetError() in peerconnection.cc contains use-after-move of webrtc::RTCError variable diff --git a/Source/ThirdParty/libwebrtc/Source/webrtc/p2p/base/tcpport.cc b/Source/ThirdParty/libwebrtc/Source/webrtc/p2p/base/tcpport.cc index 89d60d8d1b1f2..a715c96d3d8ca 100644 --- a/Source/ThirdParty/libwebrtc/Source/webrtc/p2p/base/tcpport.cc +++ b/Source/ThirdParty/libwebrtc/Source/webrtc/p2p/base/tcpport.cc @@ -343,9 +343,21 @@ TCPConnection::TCPConnection(TCPPort* port, << ", port() Network:" << port->Network()->ToString(); const std::vector& desired_addresses = port_->Network()->GetIPs(); + +#if defined(WEBRTC_WEBKIT_BUILD) RTC_DCHECK(socket->GetLocalAddress().IsLoopbackIP() || - std::find(desired_addresses.begin(), desired_addresses.end(), - socket_->GetLocalAddress().ipaddr()) != desired_addresses.end()); + (std::find_if(desired_addresses.begin(), desired_addresses.end(), + [this](const rtc::InterfaceAddress& addr) { + return socket_->GetLocalAddress().ipaddr() == + addr; + }) != desired_addresses.end())); +#else + RTC_DCHECK(std::find_if(desired_addresses.begin(), desired_addresses.end(), + [this](const rtc::InterfaceAddress& addr) { + return socket_->GetLocalAddress().ipaddr() == + addr; + }) != desired_addresses.end()); +#endif ConnectSocketSignals(socket); } } diff --git a/Source/ThirdParty/libwebrtc/cmake/FindLibpulse.cmake b/Source/ThirdParty/libwebrtc/cmake/FindLibpulse.cmake new file mode 100644 index 0000000000000..07a7fc9749a38 --- /dev/null +++ b/Source/ThirdParty/libwebrtc/cmake/FindLibpulse.cmake @@ -0,0 +1,34 @@ +# - Try to find libpulse library. +# Once done, this will define +# +# LIBPULSE_FOUND - system has alsa library. +# +# Copyright (C) 2018 Igalia S.L. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in the +# documentation and/or other materials provided with the distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER AND ITS CONTRIBUTORS ``AS +# IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, +# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR ITS +# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, +# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, +# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; +# OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, +# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR +# OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF +# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +find_package(PkgConfig) +pkg_check_modules(PC_LIBPULSE libpulse) + +include(FindPackageHandleStandardArgs) +find_package_handle_standard_args(libpulse DEFAULT_MSG PC_LIBPULSE_VERSION) + diff --git a/Source/ThirdParty/libwebrtc/cmake/FindOpus.cmake b/Source/ThirdParty/libwebrtc/cmake/FindOpus.cmake new file mode 100644 index 0000000000000..4d49aa7ce657c --- /dev/null +++ b/Source/ThirdParty/libwebrtc/cmake/FindOpus.cmake @@ -0,0 +1,46 @@ +# - Try to find opus. +# Once done, this will define +# +# LIBOPUS_FOUND - system has opus. +# OPUS_LIBRARY - link this to use opus. +# +# Copyright (C) 2018 Igalia S.L. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in the +# documentation and/or other materials provided with the distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER AND ITS CONTRIBUTORS ``AS +# IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, +# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR ITS +# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, +# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, +# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; +# OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, +# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR +# OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF +# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +find_package(PkgConfig) +pkg_check_modules(PC_LIBOPUS opus) + +find_library(OPUS_LIBRARY + NAME opus + HINTS ${PC_LIBOPUS_LIBDIR} +) + +include(FindPackageHandleStandardArgs) +find_package_handle_standard_args(LibOpus + REQUIRED_VARS OPUS_LIBRARY + FOUND_VAR LIBOPUS_FOUND + VERSION_VAR PC_LIBOPUS_VERSION) + +mark_as_advanced( + OPUS_LIBRARY +) diff --git a/Source/WebCore/ChangeLog b/Source/WebCore/ChangeLog index 25a5c78bc478d..61f7991f745da 100644 --- a/Source/WebCore/ChangeLog +++ b/Source/WebCore/ChangeLog @@ -1,3 +1,426 @@ +2018-11-28 Thibault Saunier + + [WebRTC][GStreamer] Make sure to have the default microphone on the top of the list + https://bugs.webkit.org/show_bug.cgi?id=192026 + + Reviewed by Philippe Normand. + + Otherwise we might end up picking a useless one in some applications + (not sure what those application do though). + + GStreamer patch proposed as https://gitlab.freedesktop.org/gstreamer/gst-plugins-good/merge_requests/34/diffs + + * platform/mediastream/gstreamer/GStreamerCaptureDeviceManager.cpp: + (WebCore::sortDevices): + (WebCore::GStreamerCaptureDeviceManager::addDevice): + (WebCore::GStreamerCaptureDeviceManager::refreshCaptureDevices): + * platform/mediastream/gstreamer/GStreamerCaptureDeviceManager.h: + +2018-11-28 Thibault Saunier + + [WebRTC][GStreamer] Tag all cameras with as 'Unknown' facing mode + https://bugs.webkit.org/show_bug.cgi?id=192028 + + Reviewed by Philippe Normand. + + * platform/mediastream/gstreamer/GStreamerVideoCaptureSource.cpp: + (WebCore::GStreamerVideoCaptureSource::capabilities): + +2018-11-28 Thibault Saunier + + [WebRTC][GStreamer] Use a GUniquePtr to hold the GstAudioConverter in our OutgoingAudioSource + https://bugs.webkit.org/show_bug.cgi?id=192027 + + Reviewed by Xabier Rodriguez-Calvar. + + Cleaning up a bit the code. + + It is a minor refactoring, no new test required. + + * platform/graphics/gstreamer/GUniquePtrGStreamer.h: + * platform/mediastream/gstreamer/RealtimeOutgoingAudioSourceLibWebRTC.cpp: + (WebCore::RealtimeOutgoingAudioSourceLibWebRTC::~RealtimeOutgoingAudioSourceLibWebRTC): + (WebCore::RealtimeOutgoingAudioSourceLibWebRTC::audioSamplesAvailable): + (WebCore::RealtimeOutgoingAudioSourceLibWebRTC::pullAudioData): + * platform/mediastream/gstreamer/RealtimeOutgoingAudioSourceLibWebRTC.h: + +2018-11-28 Thibault Saunier + + [GStreamer][WebRTC] Do not run device monitor for device type we do not handle + https://bugs.webkit.org/show_bug.cgi?id=191904 + + This is useless work and it throws warning about use GstDeviceMonitor without filters. + + Reviewed by Philippe Normand. + + * platform/mediastream/gstreamer/GStreamerCaptureDeviceManager.cpp: + (WebCore::GStreamerCaptureDeviceManager::refreshCaptureDevices): + +2018-11-20 Thibault Saunier + + [GStreamer] Enhance debugging by making sure to print the pipeline in MediaPlayerPrivateGStreamer + https://bugs.webkit.org/show_bug.cgi?id=191586 + + Reviewed by Xabier Rodriguez-Calvar. + + This is minor changes that do not require tests. + + * platform/graphics/gstreamer/MediaPlayerPrivateGStreamer.cpp: + (WebCore::MediaPlayerPrivateGStreamer::setAudioStreamProperties): + (WebCore::MediaPlayerPrivateGStreamer::~MediaPlayerPrivateGStreamer): + (WebCore::MediaPlayerPrivateGStreamer::setPlaybinURL): + (WebCore::MediaPlayerPrivateGStreamer::loadFull): + (WebCore::MediaPlayerPrivateGStreamer::commitLoad): + (WebCore::MediaPlayerPrivateGStreamer::readyTimerFired): + (WebCore::MediaPlayerPrivateGStreamer::changePipelineState): + (WebCore::MediaPlayerPrivateGStreamer::prepareToPlay): + (WebCore::MediaPlayerPrivateGStreamer::play): + (WebCore::MediaPlayerPrivateGStreamer::pause): + (WebCore::MediaPlayerPrivateGStreamer::durationMediaTime const): + (WebCore::MediaPlayerPrivateGStreamer::seek): + (WebCore::MediaPlayerPrivateGStreamer::updatePlaybackRate): + (WebCore::MediaPlayerPrivateGStreamer::paused const): + (WebCore::MediaPlayerPrivateGStreamer::enableTrack): + (WebCore::MediaPlayerPrivateGStreamer::notifyPlayerOfVideo): + (WebCore::MediaPlayerPrivateGStreamer::notifyPlayerOfAudio): + (WebCore::MediaPlayerPrivateGStreamer::notifyPlayerOfText): + (WebCore::MediaPlayerPrivateGStreamer::handleMessage): + (WebCore::MediaPlayerPrivateGStreamer::processBufferingStats): + (WebCore::MediaPlayerPrivateGStreamer::fillTimerFired): + (WebCore::MediaPlayerPrivateGStreamer::maxMediaTimeSeekable const): + (WebCore::MediaPlayerPrivateGStreamer::totalBytes const): + (WebCore::MediaPlayerPrivateGStreamer::uriDecodeBinElementAddedCallback): + (WebCore::MediaPlayerPrivateGStreamer::downloadBufferFileCreatedCallback): + (WebCore::MediaPlayerPrivateGStreamer::sourceSetup): + (WebCore::MediaPlayerPrivateGStreamer::asyncStateChangeDone): + (WebCore::MediaPlayerPrivateGStreamer::updateStates): + (WebCore::MediaPlayerPrivateGStreamer::loadNextLocation): + (WebCore::MediaPlayerPrivateGStreamer::didEnd): + (WebCore::MediaPlayerPrivateGStreamer::setDownloadBuffering): + (WebCore::MediaPlayerPrivateGStreamer::setPreload): + (WebCore::MediaPlayerPrivateGStreamer::createGSTPlayBin): + +2018-11-27 Thibault Saunier + + [GStreamer][WebRTC] Use LibWebRTC provided vp8 decoders and encoders + https://bugs.webkit.org/show_bug.cgi?id=191861 + + The GStreamer implementations are less feature full and less tested, now that Apple + also use the LibWebRTC provided implementations it makes a lot of sense for us to + do the same. + + Basically everything related to temporal scalability is not implemented in GStreamer. + + We should make sure to use GStreamer elements on low powered platforms and for + accelerated encoders and decoders. + + Reviewed by Philippe Normand. + + This is mostly refactoring, no new test required. + + * platform/graphics/gstreamer/GStreamerCommon.h: Added GstMappedFrame similar to GstMappedBuffer but for video frames. + (WebCore::GstMappedFrame::GstMappedFrame): + (WebCore::GstMappedFrame::get): + (WebCore::GstMappedFrame::ComponentData): + (WebCore::GstMappedFrame::ComponentStride): + (WebCore::GstMappedFrame::info): + (WebCore::GstMappedFrame::width): + (WebCore::GstMappedFrame::height): + (WebCore::GstMappedFrame::format): + (WebCore::GstMappedFrame::~GstMappedFrame): + (WebCore::GstMappedFrame::operator bool const): + * platform/graphics/gstreamer/GUniquePtrGStreamer.h: + * platform/mediastream/gstreamer/GStreamerVideoFrameLibWebRTC.cpp: + (WebCore::GStreamerVideoFrameLibWebRTC::ToI420): Implemented support for converting frame formats with the GstVideoConverter API + * platform/mediastream/libwebrtc/GStreamerVideoDecoderFactory.cpp: + (WebCore::GStreamerVideoDecoder::GstDecoderFactory): + (WebCore::GStreamerVideoDecoder::HasGstDecoder): + (WebCore::VP8Decoder::Create): Creates a `webrtc::LibvpxVp8Decoder()` if GStreamer decoder would be the LibVPX based one. + (WebCore::GStreamerVideoDecoderFactory::CreateVideoDecoder): + * platform/mediastream/libwebrtc/GStreamerVideoEncoder.cpp: + (gst_webrtc_video_encoder_class_init): + * platform/mediastream/libwebrtc/GStreamerVideoEncoderFactory.cpp: Stop using vp8enc and use LibWebRTC based implementation + (WebCore::GStreamerH264Encoder::GStreamerH264Encoder): Renamed H264Encoder to GStreamerH264Encoder to be more coherent with what is done in LibVPX + (WebCore::GStreamerVP8Encoder::GStreamerVP8Encoder): Renamed VP8Encoder to GStreamerVP8Encoder to be more coherent with what is done in LibVPX + (WebCore::GStreamerVideoEncoderFactory::CreateVideoEncoder): + (WebCore::GStreamerVideoEncoderFactory::GetSupportedFormats const): + +2018-11-16 Thibault Saunier + + [GStreamer][WebRTC] Add API to enable/disable device mocks + https://bugs.webkit.org/show_bug.cgi?id=191699 + + This basically us to test MediaStream/WebRTC support without + requiring cameras or microphones and is quite useful. + + Also fix the GStreamerAudioMock by: + - Stop setting `leaky-upstream` on the GStreamerCapturer queue, + this was usefull when we were trying to bring the MediaStream + sources inside the main pipeline, it is not the case anymore + (and not doable with latest version of LibWebRTC). + - Use a 'ticks' wave on the gstreamer audiotestsrc so the test + stream is similar to what Apple port does. + + Reviewed by Xabier Rodriguez-Calvar. + + The mocks are already tested and the API is really simple. + + * platform/mediastream/gstreamer/GStreamerAudioCapturer.cpp: + (WebCore::GStreamerAudioCapturer::createSource): + * platform/mediastream/gstreamer/GStreamerAudioCapturer.h: + * platform/mediastream/gstreamer/GStreamerCapturer.cpp: + (WebCore::GStreamerCapturer::addSink): + * platform/mediastream/gstreamer/GStreamerCapturer.h: + +2018-11-16 Thibault Saunier + + [GStreamer][MediaStream] Handle track addition and removal + https://bugs.webkit.org/show_bug.cgi?id=191599 + + Reviewed by Xabier Rodriguez-Calvar. + + Test: fast/mediastream/MediaStream-video-element-remove-track.html + + * platform/mediastream/gstreamer/GStreamerMediaStreamSource.cpp: + (WebCore::WebKitMediaStreamObserver::~WebKitMediaStreamObserver): + (WebCore::WebKitMediaStreamObserver::WebKitMediaStreamObserver): + (WebCore::webkitMediaStreamSrcFinalize): + (WebCore::webkitMediaStreamSrcChangeState): + (WebCore::webkit_media_stream_src_init): + (WebCore::webkitMediaStreamSrcSetupSrc): + (WebCore::webkitMediaStreamSrcAddTrack): + (WebCore::webkitMediaStreamSrcRemoveTrackByType): + (WebCore::webkitMediaStreamSrcSetStream): + +2018-11-15 Thibault Saunier + + [GStreamer][WebRTC] Add support for sending silence or silencing an incoming track + https://bugs.webkit.org/show_bug.cgi?id=191631 + + Reviewed by Xabier Rodriguez-Calvar. + + This will be tested once webkit.org/b/186933 is implemented. + + * platform/mediastream/gstreamer/RealtimeIncomingAudioSourceLibWebRTC.cpp: + (WebCore::RealtimeIncomingAudioSourceLibWebRTC::OnData): + * platform/mediastream/gstreamer/RealtimeOutgoingAudioSourceLibWebRTC.cpp: + (WebCore::RealtimeOutgoingAudioSourceLibWebRTC::pullAudioData): + +2018-11-06 Thibault Saunier + + [GStreamer][WebRTC] Handle setting max number of frame between keyframes + https://bugs.webkit.org/show_bug.cgi?id=190682 + + Reviewed by Philippe Normand. + + That has been manually checked. + + * platform/mediastream/libwebrtc/GStreamerVideoEncoder.cpp: + (gst_webrtc_video_encoder_get_property): + (gst_webrtc_video_encoder_set_property): + (register_known_encoder): + (gst_webrtc_video_encoder_class_init): + * platform/mediastream/libwebrtc/GStreamerVideoEncoderFactory.cpp: + (WebCore::GStreamerVideoEncoder::InitEncode): + +2018-11-06 Thibault Saunier + + [GStreamer][WebRTC] Do not try to handle framerate modulation in the encoder + https://bugs.webkit.org/show_bug.cgi?id=190683 + + Reviewed by Philippe Normand. + + This has to already be handled in capturing pipeline or in libwebrtc itself. + + No other encoder implementation do that, and libwebrtc is not happy with encoder that do not output the exact number of frames that have been passed in. + + No regressions detected and libwebrtc is happier this way, less warning output and no more frame corruption in H264 streams found. + + * platform/mediastream/libwebrtc/GStreamerVideoEncoderFactory.cpp: + (WebCore::GStreamerVideoEncoder::InitEncode): + +2018-11-05 Thibault Saunier + + [GStreamer] Fix EncodedImage timestamps to match what libWebRTC expects + https://bugs.webkit.org/show_bug.cgi?id=190035 + + Reviewed by Philippe Normand. + + We can't rely on GStreamer timestamps to pass to EncodedImages after encoding + because libWebRTC doesn't use the timestamp we fed it but does + some computation on the input timestamp in the images we pass in before it passes + them back to the encoder. Then internally LibWebRTC relies on those exact timestamps + passed into the encoder to do checks and compute RTP timestamps so we need to carefully + pass the exact timestamps to LibWebRTC (in practice we still use GStreamer timestamps in + all the GStreamer processing pipelines as the WebRTC object basically wraps the "same" + `GstSample` all around, but we are not synced on the clock anyway). + + * platform/mediastream/gstreamer/GStreamerMediaStreamSource.cpp: + * platform/mediastream/gstreamer/GStreamerVideoFrameLibWebRTC.cpp: + (WebCore::LibWebRTCVideoFrameFromGStreamerSample): + * platform/mediastream/gstreamer/GStreamerVideoFrameLibWebRTC.h: + * platform/mediastream/libwebrtc/GStreamerVideoDecoderFactory.cpp: + (WebCore::GStreamerVideoDecoder::newSampleCallback): + * platform/mediastream/libwebrtc/GStreamerVideoEncoderFactory.cpp: + (WebCore::GStreamerVideoEncoder::GStreamerVideoEncoder): + (WebCore::GStreamerVideoEncoder::newSampleCallback): + +2018-11-05 Thibault Saunier + + [GStreamer][WebRTC] Add webrtcencoder bin to cleanup and refactor the way we set encoders + https://bugs.webkit.org/show_bug.cgi?id=190674 + + Reviewed by Philippe Normand. + + webrtcencoder is a simple GstBin with a set of well known GStreamer encoders which + it can use to implement encoding for different formats exposing a trimmed down unified API. + + It also adds proper handling of H264 profiles. + + The added files follow GStreamer coding style as we aim at upstreaming the element + in the future. + + This is a refactoring so current tests already cover it. + + * platform/GStreamer.cmake: + * platform/graphics/gstreamer/MediaPlayerPrivateGStreamerBase.cpp: + (WebCore::MediaPlayerPrivateGStreamerBase::initializeGStreamerAndRegisterWebKitElements): + * platform/mediastream/libwebrtc/GStreamerVideoEncoder.cpp: Added. + (gst_webrtc_video_encoder_get_property): + (gst_webrtc_video_encoder_set_bitrate): + (gst_webrtc_video_encoder_set_format): + (gst_webrtc_video_encoder_set_property): + (register_known_encoder): + (setup_x264enc): + (setup_vp8enc): + (setup_openh264enc): + (set_bitrate_kbit_per_sec): + (set_bitrate_bit_per_sec): + (gst_webrtc_video_encoder_class_init): + (gst_webrtc_video_encoder_init): + * platform/mediastream/libwebrtc/GStreamerVideoEncoder.h: Added. + * platform/mediastream/libwebrtc/GStreamerVideoEncoderFactory.cpp: + (WebCore::GStreamerVideoEncoder::GStreamerVideoEncoder): + (WebCore::GStreamerVideoEncoder::InitEncode): + (WebCore::GStreamerVideoEncoder::createEncoder): + (WebCore::GStreamerVideoEncoder::AddCodecIfSupported): + (WebCore::GStreamerVideoEncoder::ImplementationName const): + (WebCore::GStreamerVideoEncoder::SetRestrictionCaps): + +2018-11-05 Thibault Saunier + + [GStreamer][WebRTC] properly mark H.264 stream type in the "decoder" + https://bugs.webkit.org/show_bug.cgi?id=190676 + + Reviewed by Philippe Normand. + + Avoiding to have h264parse make assumption (which might be wrong at some + point). + + * platform/mediastream/libwebrtc/GStreamerVideoDecoderFactory.cpp: + (WebCore::GStreamerVideoDecoder::GetCapsForFrame): + +2018-11-05 Thibault Saunier + + [GStreamer] Do not sync libwerbtc stream on the clock + https://bugs.webkit.org/show_bug.cgi?id=190677 + + The approach here is basically to let libwebrtc do all the + synchronisation for us, and the same way as it is done in apple ports, + we basically try to display what libwebrtc outputs as fast as possible. + + Reviewed by Philippe Normand. + + Manually tested + + * platform/graphics/gstreamer/MediaPlayerPrivateGStreamer.cpp: + (WebCore::setSyncOnSink): + (WebCore::MediaPlayerPrivateGStreamer::syncOnClock): + (WebCore::MediaPlayerPrivateGStreamer::loadFull): + (WebCore::MediaPlayerPrivateGStreamer::load): + * platform/graphics/gstreamer/MediaPlayerPrivateGStreamer.h: + +2018-10-11 Thibault Saunier + + [GStreamer] Fix race condition in GStreamerVideoDecoder + https://bugs.webkit.org/show_bug.cgi?id=190470 + + The GStreamerVideoDecoder.m_dtsPtsMap filed is accessed from + the main thread and some GStreamer streaming thread, make sure + to protect its access. + + And use WTF::StdMap instead of std::map. + + Reviewed by Philippe Normand. + + Manually tested and a random crash is gone. + + * platform/mediastream/libwebrtc/GStreamerVideoDecoderFactory.cpp: + (WebCore::GStreamerVideoDecoder::newSampleCallback): + +2018-10-11 Alejandro G. Castro + + [GTK][WPE] Add mediaDevices.enumerateDevices support + https://bugs.webkit.org/show_bug.cgi?id=185761 + + Reviewed by Youenn Fablet. + + We are adopting the same policy COCOA is using when returning the + list of media devices if the user does not have persistent + access. Namely, we just return the first media device for audio + and video capture. + + * Modules/mediastream/MediaDevicesRequest.cpp: + (WebCore::MediaDevicesRequest::filterDeviceList): Add support for + other platforms when filtering devices if there is no persistent + access to the origin. + +2018-09-25 Thibault Saunier + + [WPE][GTK][WebRTC] Fixup VP8 encoding support + https://bugs.webkit.org/show_bug.cgi?id=189921 + + Previous leak fixing commit introduced a regression in + the way the encoded buffer were prepared in the default + GStreamerVideoEncoder::Fragmentize implementation (when + encoding with VP8 basically). + + + Fix a build warning in the decoder. + + Fix some wrong object members namings. + + Properly move the caps reference when setting restriction caps. + + Do not raise a GStreamer error when GStreamerVideoEncoder::OnEncodedImage + fails - this might be a network issue and other encoders do not consider that + fatal. + + Use GstMappedBuffer where appropriate. + + Reviewed by Philippe Normand. + + * platform/mediastream/libwebrtc/GStreamerVideoDecoderFactory.cpp: + * platform/mediastream/libwebrtc/GStreamerVideoEncoderFactory.cpp: + (WebCore::GStreamerVideoEncoder::InitEncode): + (WebCore::GStreamerVideoEncoder::newSampleCallback): + (WebCore::GStreamerVideoEncoder::Fragmentize): + (WebCore::GStreamerVideoEncoder::SetRestrictionCaps): + +2018-09-24 Thibault Saunier + + [WPE][GTK][WebRTC] Fix leaks in the libwebrtc Decoder and Encoder + https://bugs.webkit.org/show_bug.cgi?id=189835 + + Reviewed by Philippe Normand. + + - Rework memory management to avoid leaking encoded frames (basically use the same + strategy as other libwebrtc encoder implementation). + - Plug a GstCaps leak. + + * platform/mediastream/gstreamer/GStreamerVideoCapturer.cpp: + * platform/mediastream/libwebrtc/GStreamerVideoDecoderFactory.cpp: + * platform/mediastream/libwebrtc/GStreamerVideoEncoderFactory.cpp: + (WebCore::GStreamerVideoEncoder::InitEncode): + (WebCore::GStreamerVideoEncoder::newSampleCallback): + (WebCore::GStreamerVideoEncoder::Fragmentize): + (WebCore::GStreamerVideoEncoder::SetRestrictionCaps): + 2018-10-11 Enrique Ocaña González [GStreamer][MSE] Fix height calculation for streams with source aspect ratio diff --git a/Source/WebCore/Modules/mediastream/MediaDevicesRequest.cpp b/Source/WebCore/Modules/mediastream/MediaDevicesRequest.cpp index e2c5bf70a2672..b6507b10575eb 100644 --- a/Source/WebCore/Modules/mediastream/MediaDevicesRequest.cpp +++ b/Source/WebCore/Modules/mediastream/MediaDevicesRequest.cpp @@ -81,16 +81,12 @@ void MediaDevicesRequest::contextDestroyed() void MediaDevicesRequest::filterDeviceList(Vector>& devices) { -#if !PLATFORM(COCOA) - UNUSED_PARAM(devices); -#else - #if PLATFORM(IOS) static const int defaultCameraCount = 2; -#endif -#if PLATFORM(MAC) +#else static const int defaultCameraCount = 1; #endif + static const int defaultMicrophoneCount = 1; int cameraCount = 0; @@ -103,8 +99,6 @@ void MediaDevicesRequest::filterDeviceList(Vector>& devices return false; }); - -#endif } void MediaDevicesRequest::start() diff --git a/Source/WebCore/platform/GStreamer.cmake b/Source/WebCore/platform/GStreamer.cmake index 83c91502f3f98..063b577b204ac 100644 --- a/Source/WebCore/platform/GStreamer.cmake +++ b/Source/WebCore/platform/GStreamer.cmake @@ -33,6 +33,7 @@ if (ENABLE_VIDEO OR ENABLE_WEB_AUDIO) platform/graphics/gstreamer/mse/WebKitMediaSourceGStreamer.cpp platform/mediastream/libwebrtc/GStreamerVideoDecoderFactory.cpp + platform/mediastream/libwebrtc/GStreamerVideoEncoder.cpp platform/mediastream/libwebrtc/GStreamerVideoEncoderFactory.cpp platform/mediastream/libwebrtc/LibWebRTCAudioModule.cpp diff --git a/Source/WebCore/platform/graphics/gstreamer/GStreamerCommon.h b/Source/WebCore/platform/graphics/gstreamer/GStreamerCommon.h index 9691efff93820..04bfcaada04f1 100644 --- a/Source/WebCore/platform/graphics/gstreamer/GStreamerCommon.h +++ b/Source/WebCore/platform/graphics/gstreamer/GStreamerCommon.h @@ -110,9 +110,92 @@ class GstMappedBuffer { bool m_isValid { false }; }; +class GstMappedFrame { + WTF_MAKE_NONCOPYABLE(GstMappedFrame); +public: + + GstMappedFrame(GstBuffer* buffer, GstVideoInfo info, GstMapFlags flags) + { + m_isValid = gst_video_frame_map(&m_frame, &info, buffer, flags); + } + + GstMappedFrame(GRefPtr sample, GstMapFlags flags) + { + GstVideoInfo info; + + if (!gst_video_info_from_caps(&info, gst_sample_get_caps(sample.get()))) { + m_isValid = false; + return; + } + + m_isValid = gst_video_frame_map(&m_frame, &info, gst_sample_get_buffer(sample.get()), flags); + } + + GstVideoFrame* get() + { + if (!m_isValid) { + GST_INFO("Invalid frame, returning NULL"); + + return nullptr; + } + + return &m_frame; + } + + uint8_t* ComponentData(int comp) + { + return GST_VIDEO_FRAME_COMP_DATA(&m_frame, comp); + } + + int ComponentStride(int stride) + { + return GST_VIDEO_FRAME_COMP_STRIDE(&m_frame, stride); + } + + GstVideoInfo* info() + { + if (!m_isValid) { + GST_INFO("Invalid frame, returning NULL"); + + return nullptr; + } + + return &m_frame.info; + } + + int width() + { + return m_isValid ? GST_VIDEO_FRAME_WIDTH(&m_frame) : -1; + } + + int height() + { + return m_isValid ? GST_VIDEO_FRAME_HEIGHT(&m_frame) : -1; + } + + int format() + { + return m_isValid ? GST_VIDEO_FRAME_FORMAT(&m_frame) : GST_VIDEO_FORMAT_UNKNOWN; + } + + ~GstMappedFrame() + { + if (m_isValid) + gst_video_frame_unmap(&m_frame); + m_isValid = false; + } + + explicit operator bool() const { return m_isValid; } + +private: + GstVideoFrame m_frame; + bool m_isValid { false }; +}; + + bool gstRegistryHasElementForMediaType(GList* elementFactories, const char* capsString); -void connectSimpleBusMessageCallback(GstElement *pipeline); -void disconnectSimpleBusMessageCallback(GstElement *pipeline); +void connectSimpleBusMessageCallback(GstElement* pipeline); +void disconnectSimpleBusMessageCallback(GstElement* pipeline); } diff --git a/Source/WebCore/platform/graphics/gstreamer/GUniquePtrGStreamer.h b/Source/WebCore/platform/graphics/gstreamer/GUniquePtrGStreamer.h index 9127b6b7c424a..9ce383818d9da 100644 --- a/Source/WebCore/platform/graphics/gstreamer/GUniquePtrGStreamer.h +++ b/Source/WebCore/platform/graphics/gstreamer/GUniquePtrGStreamer.h @@ -21,11 +21,13 @@ #define GUniquePtrGStreamer_h #if USE(GSTREAMER) +#include #include #include #include #include #include +#include #include namespace WTF { @@ -36,6 +38,8 @@ WTF_DEFINE_GPTR_DELETER(GstInstallPluginsContext, gst_install_plugins_context_fr WTF_DEFINE_GPTR_DELETER(GstSegment, gst_segment_free) WTF_DEFINE_GPTR_DELETER(GstFlowCombiner, gst_flow_combiner_free) WTF_DEFINE_GPTR_DELETER(GstIterator, gst_iterator_free) +WTF_DEFINE_GPTR_DELETER(GstVideoConverter, gst_video_converter_free) +WTF_DEFINE_GPTR_DELETER(GstAudioConverter, gst_audio_converter_free) } diff --git a/Source/WebCore/platform/graphics/gstreamer/MediaPlayerPrivateGStreamer.cpp b/Source/WebCore/platform/graphics/gstreamer/MediaPlayerPrivateGStreamer.cpp index 027ecec9f17b0..7420157104f5b 100644 --- a/Source/WebCore/platform/graphics/gstreamer/MediaPlayerPrivateGStreamer.cpp +++ b/Source/WebCore/platform/graphics/gstreamer/MediaPlayerPrivateGStreamer.cpp @@ -109,7 +109,7 @@ void MediaPlayerPrivateGStreamer::setAudioStreamProperties(GObject* object) g_object_set(object, "stream-properties", structure, nullptr); gst_structure_free(structure); GUniquePtr elementName(gst_element_get_name(GST_ELEMENT(object))); - GST_DEBUG("Set media.role as %s at %s", role, elementName.get()); + GST_DEBUG_OBJECT(pipeline(), "Set media.role as %s at %s", role, elementName.get()); } void MediaPlayerPrivateGStreamer::registerMediaEngine(MediaEngineRegistrar registrar) @@ -173,7 +173,7 @@ MediaPlayerPrivateGStreamer::MediaPlayerPrivateGStreamer(MediaPlayer* player) MediaPlayerPrivateGStreamer::~MediaPlayerPrivateGStreamer() { - GST_DEBUG("Disposing player"); + GST_DEBUG_OBJECT(pipeline(), "Disposing player"); #if ENABLE(VIDEO_TRACK) for (auto& track : m_audioTracks.values()) @@ -245,7 +245,7 @@ void MediaPlayerPrivateGStreamer::setPlaybinURL(const URL& url) m_url = URL(URL(), cleanURLString); convertToInternalProtocol(m_url); - GST_INFO("Load %s", m_url.string().utf8().data()); + GST_INFO_OBJECT(pipeline(), "Load %s", m_url.string().utf8().data()); g_object_set(m_pipeline.get(), "uri", m_url.string().utf8().data(), nullptr); } @@ -254,6 +254,28 @@ void MediaPlayerPrivateGStreamer::load(const String& urlString) loadFull(urlString, nullptr, String()); } +static void setSyncOnClock(GstElement *element, bool sync) +{ + if (!GST_IS_BIN(element)) { + g_object_set(element, "sync", sync, NULL); + return; + } + + GstIterator* it = gst_bin_iterate_sinks(GST_BIN(element)); + while (gst_iterator_foreach(it, (GstIteratorForeachFunction)([](const GValue* item, void* syncPtr) { + bool* sync = static_cast(syncPtr); + setSyncOnClock(GST_ELEMENT(g_value_get_object(item)), *sync); + }), &sync) == GST_ITERATOR_RESYNC) + gst_iterator_resync(it); + gst_iterator_free(it); +} + +void MediaPlayerPrivateGStreamer::syncOnClock(bool sync) +{ + setSyncOnClock(videoSink(), sync); + setSyncOnClock(audioSink(), sync); +} + void MediaPlayerPrivateGStreamer::loadFull(const String& urlString, const gchar* playbinName, const String& pipelineName) { @@ -276,7 +298,7 @@ void MediaPlayerPrivateGStreamer::loadFull(const String& urlString, const gchar* if (!m_pipeline) createGSTPlayBin(isMediaSource() ? "playbin" : playbinName, pipelineName); - + syncOnClock(true); if (m_fillTimer.isActive()) m_fillTimer.stop(); @@ -284,9 +306,9 @@ void MediaPlayerPrivateGStreamer::loadFull(const String& urlString, const gchar* setPlaybinURL(url); - GST_DEBUG("preload: %s", convertEnumerationToString(m_preload).utf8().data()); + GST_DEBUG_OBJECT(pipeline(), "preload: %s", convertEnumerationToString(m_preload).utf8().data()); if (m_preload == MediaPlayer::None) { - GST_INFO("Delaying load."); + GST_INFO_OBJECT(pipeline(), "Delaying load."); m_delayingLoad = true; } @@ -322,6 +344,8 @@ void MediaPlayerPrivateGStreamer::load(MediaStreamPrivate& stream) (stream.hasCaptureVideoSource() || stream.hasCaptureAudioSource()) ? "Local" : "Remote", this); loadFull(String("mediastream://") + stream.id(), "playbin3", pipelineName); + syncOnClock(false); + #if USE(GSTREAMER_GL) ensureGLVideoSinkContext(); #endif @@ -338,7 +362,7 @@ void MediaPlayerPrivateGStreamer::load(MediaStreamPrivate& stream) void MediaPlayerPrivateGStreamer::commitLoad() { ASSERT(!m_delayingLoad); - GST_DEBUG("Committing load."); + GST_DEBUG_OBJECT(pipeline(), "Committing load."); // GStreamer needs to have the pipeline set to a paused state to // start providing anything useful. @@ -416,7 +440,7 @@ MediaTime MediaPlayerPrivateGStreamer::playbackPosition() const if (!GST_IS_ELEMENT(positionElement)) { g_object_get(m_pipeline.get(), "audio-sink", &positionElement, nullptr); if(!GST_IS_ELEMENT(positionElement)) { - GST_DEBUG("Returning zero time"); + GST_DEBUG_OBJECT(pipeline(), "Returning zero time"); return MediaTime::zeroTime(); } } @@ -445,7 +469,7 @@ MediaTime MediaPlayerPrivateGStreamer::playbackPosition() const g_object_get(videoDec, videoPtsPropertyName, ¤tPts, nullptr); if (currentPts > -1) { playbackPosition = MediaTime(((currentPts * GST_MSECOND) / 45), GST_SECOND); - GST_DEBUG("Using position reported by the video decoder: %s", toString(playbackPosition).utf8().data()); + GST_DEBUG_OBJECT(pipeline(), "Using position reported by the video decoder: %s", toString(playbackPosition).utf8().data()); } if (!playbackPosition && m_seekTime.isValid()) playbackPosition = m_seekTime; @@ -468,7 +492,7 @@ GstSeekFlags MediaPlayerPrivateGStreamer::hardwareDependantSeekFlags() void MediaPlayerPrivateGStreamer::readyTimerFired() { - GST_DEBUG("In READY for too long. Releasing pipeline resources."); + GST_DEBUG_OBJECT(pipeline(), "In READY for too long. Releasing pipeline resources."); changePipelineState(GST_STATE_NULL); } @@ -481,12 +505,12 @@ bool MediaPlayerPrivateGStreamer::changePipelineState(GstState newState) gst_element_get_state(m_pipeline.get(), ¤tState, &pending, 0); if (currentState == newState || pending == newState) { - GST_DEBUG("Rejected state change to %s from %s with %s pending", gst_element_state_get_name(newState), + GST_DEBUG_OBJECT(pipeline(), "Rejected state change to %s from %s with %s pending", gst_element_state_get_name(newState), gst_element_state_get_name(currentState), gst_element_state_get_name(pending)); return true; } - GST_DEBUG("Changing state change to %s from %s with %s pending", gst_element_state_get_name(newState), + GST_DEBUG_OBJECT(pipeline(), "Changing state change to %s from %s with %s pending", gst_element_state_get_name(newState), gst_element_state_get_name(currentState), gst_element_state_get_name(pending)); #if USE(GSTREAMER_GL) @@ -516,7 +540,7 @@ bool MediaPlayerPrivateGStreamer::changePipelineState(GstState newState) void MediaPlayerPrivateGStreamer::prepareToPlay() { - GST_DEBUG("Prepare to play"); + GST_DEBUG_OBJECT(pipeline(), "Prepare to play"); m_preload = MediaPlayer::Auto; if (m_delayingLoad) { m_delayingLoad = false; @@ -539,7 +563,7 @@ void MediaPlayerPrivateGStreamer::play() if (!isMediaSource()) totalBytes(); setDownloadBuffering(); - GST_INFO("Play"); + GST_INFO_OBJECT(pipeline(), "Play"); } else loadingFailed(MediaPlayer::Empty); } @@ -554,7 +578,7 @@ void MediaPlayerPrivateGStreamer::pause() if (changePipelineState(GST_STATE_PAUSED)) { m_paused = true; - GST_INFO("Pause"); + GST_INFO_OBJECT(pipeline(), "Pause"); } else loadingFailed(MediaPlayer::Empty); } @@ -574,7 +598,7 @@ MediaTime MediaPlayerPrivateGStreamer::durationMediaTime() const gint64 timeLength = 0; if (!gst_element_query_duration(m_pipeline.get(), GST_FORMAT_TIME, &timeLength) || !GST_CLOCK_TIME_IS_VALID(timeLength)) { - GST_DEBUG("Time duration query failed for %s", m_url.string().utf8().data()); + GST_DEBUG_OBJECT(pipeline(), "Time duration query failed for %s", m_url.string().utf8().data()); // For some mp3 files duration query fails even at EOS. // Below is workaround for the case when we reach EOS and duration query still fails // then we return the cached position if it is valid. @@ -586,7 +610,7 @@ MediaTime MediaPlayerPrivateGStreamer::durationMediaTime() const return MediaTime::positiveInfiniteTime(); } - GST_LOG("Duration: %" GST_TIME_FORMAT, GST_TIME_ARGS(timeLength)); + GST_LOG_OBJECT(pipeline(), "Duration: %" GST_TIME_FORMAT, GST_TIME_ARGS(timeLength)); return MediaTime(timeLength, GST_SECOND); // FIXME: handle 3.14.9.5 properly @@ -619,7 +643,7 @@ void MediaPlayerPrivateGStreamer::seek(const MediaTime& mediaTime) if (m_errorOccured) return; - GST_INFO("[Seek] seek attempt to %s", toString(mediaTime).utf8().data()); + GST_INFO_OBJECT(pipeline(), "[Seek] seek attempt to %s", toString(mediaTime).utf8().data()); // Avoid useless seeking. if (mediaTime == currentMediaTime()) @@ -630,7 +654,7 @@ void MediaPlayerPrivateGStreamer::seek(const MediaTime& mediaTime) if (isLiveStream()) return; - GST_INFO("[Seek] seeking to %s", toString(time).utf8().data()); + GST_INFO_OBJECT(pipeline(), "[Seek] seeking to %s", toString(time).utf8().data()); if (m_seeking) { m_timeOfOverlappingSeek = time; @@ -644,7 +668,7 @@ void MediaPlayerPrivateGStreamer::seek(const MediaTime& mediaTime) GstState newState; GstStateChangeReturn getStateResult = gst_element_get_state(m_pipeline.get(), &state, &newState, 0); if (getStateResult == GST_STATE_CHANGE_FAILURE || getStateResult == GST_STATE_CHANGE_NO_PREROLL) { - GST_DEBUG("[Seek] cannot seek, current state change is %s", gst_element_state_change_return_get_name(getStateResult)); + GST_DEBUG_OBJECT(pipeline(), "[Seek] cannot seek, current state change is %s", gst_element_state_change_return_get_name(getStateResult)); return; } @@ -654,10 +678,10 @@ void MediaPlayerPrivateGStreamer::seek(const MediaTime& mediaTime) else if (state < GST_STATE_PAUSED) reason = "State less than PAUSED"; else if (m_isEndReached) reason = "End reached"; - GST_DEBUG("Delaying the seek: %s", reason.data()); + GST_DEBUG_OBJECT(pipeline(), "Delaying the seek: %s", reason.data()); m_seekIsPending = true; if (m_isEndReached) { - GST_DEBUG("[Seek] reset pipeline"); + GST_DEBUG_OBJECT(pipeline(), "[Seek] reset pipeline"); m_resetPipeline = true; if (!changePipelineState(GST_STATE_PAUSED)) loadingFailed(MediaPlayer::Empty); @@ -665,7 +689,7 @@ void MediaPlayerPrivateGStreamer::seek(const MediaTime& mediaTime) } else { // We can seek now. if (!doSeek(time, m_player->rate(), static_cast(GST_SEEK_FLAG_FLUSH | hardwareDependantSeekFlags()))) { - GST_DEBUG("[Seek] seeking to %s failed", toString(time).utf8().data()); + GST_DEBUG_OBJECT(pipeline(), "[Seek] seeking to %s failed", toString(time).utf8().data()); return; } } @@ -707,12 +731,12 @@ void MediaPlayerPrivateGStreamer::updatePlaybackRate() if (!m_changingRate) return; - GST_INFO("Set Rate to %f", m_playbackRate); + GST_INFO_OBJECT(pipeline(), "Set Rate to %f", m_playbackRate); // Mute the sound if the playback rate is negative or too extreme and audio pitch is not adjusted. bool mute = m_playbackRate <= 0 || (!m_preservesPitch && (m_playbackRate < 0.8 || m_playbackRate > 2)); - GST_INFO(mute ? "Need to mute audio" : "Do not need to mute audio"); + GST_INFO_OBJECT(pipeline(), mute ? "Need to mute audio" : "Do not need to mute audio"); if (doSeek(playbackPosition(), m_playbackRate, static_cast(GST_SEEK_FLAG_FLUSH | hardwareDependantSeekFlags()))) { g_object_set(m_pipeline.get(), "mute", mute, nullptr); @@ -739,12 +763,12 @@ void MediaPlayerPrivateGStreamer::updatePlaybackRate() bool MediaPlayerPrivateGStreamer::paused() const { if (m_isEndReached) { - GST_DEBUG("Ignoring pause at EOS"); + GST_DEBUG_OBJECT(pipeline(), "Ignoring pause at EOS"); return true; } if (m_playbackRatePause) { - GST_DEBUG("Playback rate is 0, simulating PAUSED state"); + GST_DEBUG_OBJECT(pipeline(), "Playback rate is 0, simulating PAUSED state"); return false; } @@ -755,7 +779,7 @@ bool MediaPlayerPrivateGStreamer::paused() const state = pending; bool paused = state <= GST_STATE_PAUSED; - GST_DEBUG("Paused: %s", toString(paused).utf8().data()); + GST_DEBUG_OBJECT(pipeline(), "Paused: %s", toString(paused).utf8().data()); return state <= GST_STATE_PAUSED; } @@ -928,7 +952,7 @@ void MediaPlayerPrivateGStreamer::enableTrack(TrackPrivateBaseGStreamer::TrackTy ASSERT_NOT_REACHED(); } - GST_INFO("Enabling %s track with index: %u", trackTypeAsString, index); + GST_INFO_OBJECT(pipeline(), "Enabling %s track with index: %u", trackTypeAsString, index); if (m_isLegacyPlaybin) g_object_set(m_pipeline.get(), propertyName, index, nullptr); #if GST_CHECK_VERSION(1, 10, 0) @@ -964,7 +988,7 @@ void MediaPlayerPrivateGStreamer::notifyPlayerOfVideo() GstElement* element = useMediaSource ? m_source.get() : m_pipeline.get(); g_object_get(element, "n-video", &numTracks, nullptr); - GST_DEBUG("Stream has %d video tracks", numTracks); + GST_DEBUG_OBJECT(pipeline(), "Stream has %d video tracks", numTracks); bool oldHasVideo = m_hasVideo; m_hasVideo = numTracks > 0; @@ -975,7 +999,7 @@ void MediaPlayerPrivateGStreamer::notifyPlayerOfVideo() m_player->sizeChanged(); if (useMediaSource) { - GST_DEBUG("Tracks managed by source element. Bailing out now."); + GST_DEBUG_OBJECT(pipeline(), "Tracks managed by source element. Bailing out now."); m_player->client().mediaPlayerEngineUpdated(m_player); return; } @@ -1043,14 +1067,14 @@ void MediaPlayerPrivateGStreamer::notifyPlayerOfAudio() GstElement* element = useMediaSource ? m_source.get() : m_pipeline.get(); g_object_get(element, "n-audio", &numTracks, nullptr); - GST_DEBUG("Stream has %d audio tracks", numTracks); + GST_DEBUG_OBJECT(pipeline(), "Stream has %d audio tracks", numTracks); bool oldHasAudio = m_hasAudio; m_hasAudio = numTracks > 0; if (oldHasAudio != m_hasAudio) m_player->characteristicChanged(); if (useMediaSource) { - GST_DEBUG("Tracks managed by source element. Bailing out now."); + GST_DEBUG_OBJECT(pipeline(), "Tracks managed by source element. Bailing out now."); m_player->client().mediaPlayerEngineUpdated(m_player); return; } @@ -1105,10 +1129,10 @@ void MediaPlayerPrivateGStreamer::notifyPlayerOfText() GstElement* element = useMediaSource ? m_source.get() : m_pipeline.get(); g_object_get(element, "n-text", &numTracks, nullptr); - GST_INFO("Media has %d text tracks", numTracks); + GST_INFO_OBJECT(pipeline(), "Media has %d text tracks", numTracks); if (useMediaSource) { - GST_DEBUG("Tracks managed by source element. Bailing out now."); + GST_DEBUG_OBJECT(pipeline(), "Tracks managed by source element. Bailing out now."); return; } @@ -1296,13 +1320,13 @@ void MediaPlayerPrivateGStreamer::handleMessage(GstMessage* message) // We ignore state changes from internal elements. They are forwarded to playbin2 anyway. bool messageSourceIsPlaybin = GST_MESSAGE_SRC(message) == reinterpret_cast(m_pipeline.get()); - GST_LOG("Message %s received from element %s", GST_MESSAGE_TYPE_NAME(message), GST_MESSAGE_SRC_NAME(message)); + GST_LOG_OBJECT(pipeline(), "Message %s received from element %s", GST_MESSAGE_TYPE_NAME(message), GST_MESSAGE_SRC_NAME(message)); switch (GST_MESSAGE_TYPE(message)) { case GST_MESSAGE_ERROR: if (m_resetPipeline || !m_missingPluginCallbacks.isEmpty() || m_errorOccured) break; gst_message_parse_error(message, &err.outPtr(), &debug.outPtr()); - GST_ERROR("Error %d: %s (url=%s)", err->code, err->message, m_url.string().utf8().data()); + GST_ERROR_OBJECT(pipeline(), "Error %d: %s (url=%s) - %s", err->code, err->message, m_url.string().utf8().data(), debug.get()); GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(m_pipeline.get()), GST_DEBUG_GRAPH_SHOW_ALL, "webkit-video.error"); @@ -1373,11 +1397,11 @@ void MediaPlayerPrivateGStreamer::handleMessage(GstMessage* message) CString dotFileName = String::format("%s.%s_%s", GST_OBJECT_NAME(m_pipeline.get()), gst_element_state_get_name(currentState), gst_element_state_get_name(newState)).utf8(); GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(m_pipeline.get()), GST_DEBUG_GRAPH_SHOW_ALL, dotFileName.data()); - GST_INFO("Playbin changed %s --> %s", gst_element_state_get_name(currentState), gst_element_state_get_name(newState)); + GST_INFO_OBJECT(pipeline(), "Playbin changed %s --> %s", gst_element_state_get_name(currentState), gst_element_state_get_name(newState)); break; } case GST_MESSAGE_BUFFERING: - GST_DEBUG("Message %s received from element %s", GST_MESSAGE_TYPE_NAME(message), GST_MESSAGE_SRC_NAME(message)); + GST_DEBUG_OBJECT(pipeline(), "Message %s received from element %s", GST_MESSAGE_TYPE_NAME(message), GST_MESSAGE_SRC_NAME(message)); processBufferingStats(message); break; case GST_MESSAGE_DURATION_CHANGED: @@ -1390,7 +1414,7 @@ void MediaPlayerPrivateGStreamer::handleMessage(GstMessage* message) gst_message_parse_request_state(message, &requestedState); gst_element_get_state(m_pipeline.get(), ¤tState, nullptr, 250 * GST_NSECOND); if (requestedState < currentState) { - GST_INFO("Element %s requested state change to %s", GST_MESSAGE_SRC_NAME(message), + GST_INFO_OBJECT(pipeline(), "Element %s requested state change to %s", GST_MESSAGE_SRC_NAME(message), gst_element_state_get_name(requestedState)); m_requestedState = requestedState; if (!changePipelineState(requestedState)) @@ -1450,12 +1474,12 @@ void MediaPlayerPrivateGStreamer::handleMessage(GstMessage* message) #endif #if 0 && ENABLE(ENCRYPTED_MEDIA) else if (gst_structure_has_name(structure, "drm-key-needed")) { - GST_DEBUG("drm-key-needed message from %s", GST_MESSAGE_SRC_NAME(message)); + GST_DEBUG_OBJECT(pipeline(), "drm-key-needed message from %s", GST_MESSAGE_SRC_NAME(message)); GRefPtr event; gst_structure_get(structure, "event", GST_TYPE_EVENT, &event.outPtr(), nullptr); handleProtectionEvent(event.get()); } else if (gst_structure_has_name(structure, "decrypt-key-needed")) { - GST_DEBUG("decrypt-key-needed message from %s", GST_MESSAGE_SRC_NAME(message)); + GST_DEBUG_OBJECT(pipeline(), "decrypt-key-needed message from %s", GST_MESSAGE_SRC_NAME(message)); MediaPlayerPrivateGStreamerBase::dispatchCDMInstance(); } #endif @@ -1463,7 +1487,7 @@ void MediaPlayerPrivateGStreamer::handleMessage(GstMessage* message) GstStructure* responseHeaders; if (gst_structure_get(structure, "response-headers", GST_TYPE_STRUCTURE, &responseHeaders, nullptr)) { if (!gst_structure_has_field(responseHeaders, httpHeaderNameString(HTTPHeaderName::ContentLength).utf8().data())) { - GST_INFO("Live stream detected. Disabling on-disk buffering"); + GST_INFO_OBJECT(pipeline(), "Live stream detected. Disabling on-disk buffering"); m_isStreaming = true; setDownloadBuffering(); } @@ -1474,7 +1498,7 @@ void MediaPlayerPrivateGStreamer::handleMessage(GstMessage* message) GUniqueOutPtr uri; GstClockTime time; gst_structure_get(structure, "uri", G_TYPE_STRING, &uri.outPtr(), "fragment-download-time", GST_TYPE_CLOCK_TIME, &time, nullptr); - GST_TRACE("Fragment %s download time %" GST_TIME_FORMAT, uri.get(), GST_TIME_ARGS(time)); + GST_TRACE_OBJECT(pipeline(), "Fragment %s download time %" GST_TIME_FORMAT, uri.get(), GST_TIME_ARGS(time)); } else if (gst_structure_has_name(structure, "GstCacheDownloadComplete")) { m_downloadFinished = true; m_buffering = false; @@ -1482,18 +1506,18 @@ void MediaPlayerPrivateGStreamer::handleMessage(GstMessage* message) } #if ENABLE(ENCRYPTED_MEDIA) else if (gst_structure_has_name(structure, "drm-initialization-data-encountered")) { - GST_DEBUG("drm-initialization-data-encountered message from %s", GST_MESSAGE_SRC_NAME(message)); + GST_DEBUG_OBJECT(pipeline(), "drm-initialization-data-encountered message from %s", GST_MESSAGE_SRC_NAME(message)); handleProtectionStructure(structure); } else if (gst_structure_has_name(structure, "drm-waiting-for-key")) { - GST_DEBUG("drm-waiting-for-key message from %s", GST_MESSAGE_SRC_NAME(message)); + GST_DEBUG_OBJECT(pipeline(), "drm-waiting-for-key message from %s", GST_MESSAGE_SRC_NAME(message)); m_player->waitingForKey(); } else if (gst_structure_has_name(structure, "drm-cdm-instance-needed")) { - GST_DEBUG("drm-cdm-instance-needed message from %s", GST_MESSAGE_SRC_NAME(message)); + GST_DEBUG_OBJECT(pipeline(), "drm-cdm-instance-needed message from %s", GST_MESSAGE_SRC_NAME(message)); dispatchLocalCDMInstance(); } #endif else - GST_DEBUG("Unhandled element message: %" GST_PTR_FORMAT, structure); + GST_DEBUG_OBJECT(pipeline(), "Unhandled element message: %" GST_PTR_FORMAT, structure); } break; #if ENABLE(VIDEO_TRACK) @@ -1538,7 +1562,7 @@ void MediaPlayerPrivateGStreamer::handleMessage(GstMessage* message) GstStreamType type = gst_stream_get_stream_type(stream.get()); String streamId(gst_stream_get_stream_id(stream.get())); - GST_DEBUG("Selecting %s track with ID: %s", gst_stream_type_get_name(type), streamId.utf8().data()); + GST_DEBUG_OBJECT(pipeline(), "Selecting %s track with ID: %s", gst_stream_type_get_name(type), streamId.utf8().data()); // Playbin3 can send more than one selected stream of the same type // but there's no priority or ordering system in place, so we assume // the selected stream is the last one as reported by playbin3. @@ -1561,7 +1585,7 @@ void MediaPlayerPrivateGStreamer::handleMessage(GstMessage* message) } #endif default: - GST_DEBUG("Unhandled GStreamer message type: %s", GST_MESSAGE_TYPE_NAME(message)); + GST_DEBUG_OBJECT(pipeline(), "Unhandled GStreamer message type: %s", GST_MESSAGE_TYPE_NAME(message)); break; } } @@ -1571,7 +1595,7 @@ void MediaPlayerPrivateGStreamer::processBufferingStats(GstMessage* message) m_buffering = true; gst_message_parse_buffering(message, &m_bufferingPercentage); - GST_DEBUG("[Buffering] Buffering: %d%%.", m_bufferingPercentage); + GST_DEBUG_OBJECT(pipeline(), "[Buffering] Buffering: %d%%.", m_bufferingPercentage); if (m_bufferingPercentage == 100) updateStates(); @@ -1756,7 +1780,7 @@ void MediaPlayerPrivateGStreamer::fillTimerFired() if (stop != -1) fillStatus = 100.0 * stop / GST_FORMAT_PERCENT_MAX; - GST_DEBUG("[Buffering] Download buffer filled up to %f%%", fillStatus); + GST_DEBUG_OBJECT(pipeline(), "[Buffering] Download buffer filled up to %f%%", fillStatus); MediaTime mediaDuration = durationMediaTime(); @@ -1767,7 +1791,7 @@ void MediaPlayerPrivateGStreamer::fillTimerFired() m_maxTimeLoaded = mediaDuration; else m_maxTimeLoaded = MediaTime(fillStatus * static_cast(toGstUnsigned64Time(mediaDuration)) / 100, GST_SECOND); - GST_DEBUG("[Buffering] Updated maxTimeLoaded: %s", toString(m_maxTimeLoaded).utf8().data()); + GST_DEBUG_OBJECT(pipeline(), "[Buffering] Updated maxTimeLoaded: %s", toString(m_maxTimeLoaded).utf8().data()); } m_downloadFinished = fillStatus == 100.0; @@ -1789,7 +1813,7 @@ MediaTime MediaPlayerPrivateGStreamer::maxMediaTimeSeekable() const return MediaTime::zeroTime(); MediaTime duration = durationMediaTime(); - GST_DEBUG("maxMediaTimeSeekable, duration: %s", toString(duration).utf8().data()); + GST_DEBUG_OBJECT(pipeline(), "maxMediaTimeSeekable, duration: %s", toString(duration).utf8().data()); // infinite duration means live stream if (duration.isPositiveInfinite()) return MediaTime::zeroTime(); @@ -1810,7 +1834,7 @@ MediaTime MediaPlayerPrivateGStreamer::maxTimeLoaded() const loaded = m_durationAtEOS; } if (m_isEndReached && m_durationAtEOS.isValid() && m_durationAtEOS) { - GST_DEBUG("maxTimeLoaded at EOS: %s", toString(loaded).utf8().data()); + GST_DEBUG_OBJECT(pipeline(), "maxTimeLoaded at EOS: %s", toString(loaded).utf8().data()); loaded = m_durationAtEOS; } GST_TRACE("maxTimeLoaded: %s", toString(loaded).utf8().data()); @@ -1831,7 +1855,7 @@ bool MediaPlayerPrivateGStreamer::didLoadingProgress() const MediaTime currentMaxTimeLoaded = maxTimeLoaded(); bool didLoadingProgress = currentMaxTimeLoaded != m_maxTimeLoadedAtLastDidLoadingProgress; m_maxTimeLoadedAtLastDidLoadingProgress = currentMaxTimeLoaded; - GST_LOG("didLoadingProgress: %s", toString(didLoadingProgress).utf8().data()); + GST_LOG_OBJECT(pipeline(), "didLoadingProgress: %s", toString(didLoadingProgress).utf8().data()); return didLoadingProgress; } @@ -1852,7 +1876,7 @@ unsigned long long MediaPlayerPrivateGStreamer::totalBytes() const GstFormat fmt = GST_FORMAT_BYTES; gint64 length = 0; if (gst_element_query_duration(m_source.get(), fmt, &length)) { - GST_INFO("totalBytes %" G_GINT64_FORMAT, length); + GST_INFO_OBJECT(pipeline(), "totalBytes %" G_GINT64_FORMAT, length); m_totalBytes = static_cast(length); m_isStreaming = !length; return m_totalBytes; @@ -1887,7 +1911,7 @@ unsigned long long MediaPlayerPrivateGStreamer::totalBytes() const gst_iterator_free(iter); - GST_INFO("totalBytes %" G_GINT64_FORMAT, length); + GST_INFO_OBJECT(pipeline(), "totalBytes %" G_GINT64_FORMAT, length); m_totalBytes = static_cast(length); m_isStreaming = !length; return m_totalBytes; @@ -1916,7 +1940,7 @@ void MediaPlayerPrivateGStreamer::uriDecodeBinElementAddedCallback(GstBin* bin, GUniquePtr newDownloadTemplate(g_build_filename(G_DIR_SEPARATOR_S, mediaDiskCachePath, "WebKit-Media-XXXXXX", nullptr)); g_object_set(element, "temp-template", newDownloadTemplate.get(), nullptr); - GST_DEBUG("Reconfigured file download template from '%s' to '%s'", oldDownloadTemplate.get(), newDownloadTemplate.get()); + GST_DEBUG_OBJECT(player->pipeline(), "Reconfigured file download template from '%s' to '%s'", oldDownloadTemplate.get(), newDownloadTemplate.get()); player->purgeOldDownloadFiles(oldDownloadTemplate.get()); } @@ -1936,7 +1960,7 @@ void MediaPlayerPrivateGStreamer::downloadBufferFileCreatedCallback(MediaPlayerP return; } - GST_DEBUG("Unlinked media temporary file %s after creation", downloadFile.get()); + GST_DEBUG_OBJECT(player->pipeline(), "Unlinked media temporary file %s after creation", downloadFile.get()); } void MediaPlayerPrivateGStreamer::purgeOldDownloadFiles(const char* downloadFileTemplate) @@ -1960,7 +1984,7 @@ void MediaPlayerPrivateGStreamer::purgeOldDownloadFiles(const char* downloadFile void MediaPlayerPrivateGStreamer::sourceSetup(GstElement* sourceElement) { - GST_DEBUG("Source element set-up for %s", GST_ELEMENT_NAME(sourceElement)); + GST_DEBUG_OBJECT(pipeline(), "Source element set-up for %s", GST_ELEMENT_NAME(sourceElement)); if (WEBKIT_IS_WEB_SRC(m_source.get()) && GST_OBJECT_PARENT(m_source.get())) g_signal_handlers_disconnect_by_func(GST_ELEMENT_PARENT(m_source.get()), reinterpret_cast(uriDecodeBinElementAddedCallback), this); @@ -2023,7 +2047,7 @@ void MediaPlayerPrivateGStreamer::asyncStateChangeDone() if (m_seekIsPending) updateStates(); else { - GST_DEBUG("[Seek] seeked to %s", toString(m_seekTime).utf8().data()); + GST_DEBUG_OBJECT(pipeline(), "[Seek] seeked to %s", toString(m_seekTime).utf8().data()); m_seeking = false; m_cachedPosition = MediaTime::invalidTime(); if (m_timeOfOverlappingSeek != m_seekTime && m_timeOfOverlappingSeek.isValid()) { @@ -2066,7 +2090,7 @@ void MediaPlayerPrivateGStreamer::updateStates() bool shouldUpdatePlaybackState = false; switch (getStateResult) { case GST_STATE_CHANGE_SUCCESS: { - GST_DEBUG("State: %s, pending: %s", gst_element_state_get_name(m_currentState), gst_element_state_get_name(pending)); + GST_DEBUG_OBJECT(pipeline(), "State: %s, pending: %s", gst_element_state_get_name(m_currentState), gst_element_state_get_name(pending)); // Do nothing if on EOS and state changed to READY to avoid recreating the player // on HTMLMediaElement and properly generate the video 'ended' event. @@ -2091,14 +2115,14 @@ void MediaPlayerPrivateGStreamer::updateStates() case GST_STATE_PLAYING: if (m_buffering) { if (m_bufferingPercentage == 100) { - GST_DEBUG("[Buffering] Complete."); + GST_DEBUG_OBJECT(pipeline(), "[Buffering] Complete."); m_buffering = false; m_readyState = MediaPlayer::HaveEnoughData; m_networkState = m_downloadFinished ? MediaPlayer::Idle : MediaPlayer::Loading; if (!m_fillTimer.isActive() && (state == GST_STATE_PAUSED)) m_networkState = MediaPlayer::Idle; } else { - GST_DEBUG("[Buffering] Stream still downloading."); + GST_DEBUG_OBJECT(pipeline(), "[Buffering] Stream still downloading."); m_readyState = MediaPlayer::HaveCurrentData; m_networkState = MediaPlayer::Loading; } @@ -2127,7 +2151,7 @@ void MediaPlayerPrivateGStreamer::updateStates() } if (didBuffering && !m_buffering && !m_paused && m_playbackRate) { - GST_DEBUG("[Buffering] Restarting playback."); + GST_DEBUG_OBJECT(pipeline(), "[Buffering] Restarting playback."); changePipelineState(GST_STATE_PLAYING); } } else if (m_currentState == GST_STATE_PLAYING) { @@ -2137,10 +2161,10 @@ void MediaPlayerPrivateGStreamer::updateStates() } else m_paused = true; - GST_DEBUG("Old state: %s, new state: %s (requested: %s)", gst_element_state_get_name(m_oldState), gst_element_state_get_name(m_currentState), gst_element_state_get_name(m_requestedState)); + GST_DEBUG_OBJECT(pipeline(), "Old state: %s, new state: %s (requested: %s)", gst_element_state_get_name(m_oldState), gst_element_state_get_name(m_currentState), gst_element_state_get_name(m_requestedState)); if (m_requestedState == GST_STATE_PAUSED && m_currentState == GST_STATE_PAUSED) { shouldUpdatePlaybackState = true; - GST_INFO("Requested state change to %s was completed", gst_element_state_get_name(m_currentState)); + GST_INFO_OBJECT(pipeline(), "Requested state change to %s was completed", gst_element_state_get_name(m_currentState)); } // Emit play state change notification only when going to PLAYING so that @@ -2148,22 +2172,22 @@ void MediaPlayerPrivateGStreamer::updateStates() // Emitting this notification in more cases triggers unwanted code paths // and test timeouts. if (stateReallyChanged && (m_oldState != m_currentState) && (m_oldState == GST_STATE_PAUSED && m_currentState == GST_STATE_PLAYING)) { - GST_INFO("Playback state changed from %s to %s. Notifying the media player client", gst_element_state_get_name(m_oldState), gst_element_state_get_name(m_currentState)); + GST_INFO_OBJECT(pipeline(), "Playback state changed from %s to %s. Notifying the media player client", gst_element_state_get_name(m_oldState), gst_element_state_get_name(m_currentState)); shouldUpdatePlaybackState = true; } break; } case GST_STATE_CHANGE_ASYNC: - GST_DEBUG("Async: State: %s, pending: %s", gst_element_state_get_name(m_currentState), gst_element_state_get_name(pending)); + GST_DEBUG_OBJECT(pipeline(), "Async: State: %s, pending: %s", gst_element_state_get_name(m_currentState), gst_element_state_get_name(pending)); // Change in progress. break; case GST_STATE_CHANGE_FAILURE: - GST_DEBUG("Failure: State: %s, pending: %s", gst_element_state_get_name(m_currentState), gst_element_state_get_name(pending)); + GST_DEBUG_OBJECT(pipeline(), "Failure: State: %s, pending: %s", gst_element_state_get_name(m_currentState), gst_element_state_get_name(pending)); // Change failed return; case GST_STATE_CHANGE_NO_PREROLL: - GST_DEBUG("No preroll: State: %s, pending: %s", gst_element_state_get_name(m_currentState), gst_element_state_get_name(pending)); + GST_DEBUG_OBJECT(pipeline(), "No preroll: State: %s, pending: %s", gst_element_state_get_name(m_currentState), gst_element_state_get_name(pending)); // Live pipelines go in PAUSED without prerolling. m_isStreaming = true; @@ -2183,7 +2207,7 @@ void MediaPlayerPrivateGStreamer::updateStates() m_networkState = MediaPlayer::Loading; break; default: - GST_DEBUG("Else : %d", getStateResult); + GST_DEBUG_OBJECT(pipeline(), "Else : %d", getStateResult); break; } @@ -2193,23 +2217,23 @@ void MediaPlayerPrivateGStreamer::updateStates() m_player->playbackStateChanged(); if (m_networkState != oldNetworkState) { - GST_DEBUG("Network State Changed from %s to %s", convertEnumerationToString(oldNetworkState).utf8().data(), convertEnumerationToString(m_networkState).utf8().data()); + GST_DEBUG_OBJECT(pipeline(), "Network State Changed from %s to %s", convertEnumerationToString(oldNetworkState).utf8().data(), convertEnumerationToString(m_networkState).utf8().data()); m_player->networkStateChanged(); } if (m_readyState != oldReadyState) { - GST_DEBUG("Ready State Changed from %s to %s", convertEnumerationToString(oldReadyState).utf8().data(), convertEnumerationToString(m_readyState).utf8().data()); + GST_DEBUG_OBJECT(pipeline(), "Ready State Changed from %s to %s", convertEnumerationToString(oldReadyState).utf8().data(), convertEnumerationToString(m_readyState).utf8().data()); m_player->readyStateChanged(); } if (getStateResult == GST_STATE_CHANGE_SUCCESS && m_currentState >= GST_STATE_PAUSED) { updatePlaybackRate(); if (m_seekIsPending) { - GST_DEBUG("[Seek] committing pending seek to %s", toString(m_seekTime).utf8().data()); + GST_DEBUG_OBJECT(pipeline(), "[Seek] committing pending seek to %s", toString(m_seekTime).utf8().data()); m_seekIsPending = false; m_seeking = doSeek(m_seekTime, m_player->rate(), static_cast(GST_SEEK_FLAG_FLUSH | hardwareDependantSeekFlags())); if (!m_seeking) { m_cachedPosition = MediaTime::invalidTime(); - GST_DEBUG("[Seek] seeking to %s failed", toString(m_seekTime).utf8().data()); + GST_DEBUG_OBJECT(pipeline(), "[Seek] seeking to %s failed", toString(m_seekTime).utf8().data()); } } } @@ -2296,7 +2320,7 @@ bool MediaPlayerPrivateGStreamer::loadNextLocation() RefPtr securityOrigin = SecurityOrigin::create(m_url); if (securityOrigin->canRequest(newUrl)) { - GST_INFO("New media url: %s", newUrl.string().utf8().data()); + GST_INFO_OBJECT(pipeline(), "New media url: %s", newUrl.string().utf8().data()); // Reset player states. m_networkState = MediaPlayer::Loading; @@ -2317,7 +2341,7 @@ bool MediaPlayerPrivateGStreamer::loadNextLocation() return true; } } else - GST_INFO("Not allowed to load new media location: %s", newUrl.string().utf8().data()); + GST_INFO_OBJECT(pipeline(), "Not allowed to load new media location: %s", newUrl.string().utf8().data()); } m_mediaLocationCurrentIndex--; return false; @@ -2336,7 +2360,7 @@ void MediaPlayerPrivateGStreamer::timeChanged() void MediaPlayerPrivateGStreamer::didEnd() { - GST_INFO("Playback ended"); + GST_INFO_OBJECT(pipeline(), "Playback ended"); // Synchronize position and duration values to not confuse the // HTMLMediaElement. In some cases like reverse playback the @@ -2362,7 +2386,7 @@ void MediaPlayerPrivateGStreamer::durationChanged() MediaTime newDuration = durationMediaTime(); if (newDuration != m_previousDuration) { - GST_DEBUG("Triggering durationChanged"); + GST_DEBUG_OBJECT(pipeline(), "Triggering durationChanged"); m_player->durationChanged(); } m_previousDuration = newDuration; @@ -2590,11 +2614,11 @@ void MediaPlayerPrivateGStreamer::setDownloadBuffering() bool shouldDownload = !isLiveStream() && m_preload == MediaPlayer::Auto && !isMediaDiskCacheDisabled(); if (shouldDownload) { - GST_INFO("Enabling on-disk buffering"); + GST_INFO_OBJECT(pipeline(), "Enabling on-disk buffering"); g_object_set(m_pipeline.get(), "flags", flags | flagDownload, nullptr); m_fillTimer.startRepeating(200_ms); } else { - GST_INFO("Disabling on-disk buffering"); + GST_INFO_OBJECT(pipeline(), "Disabling on-disk buffering"); g_object_set(m_pipeline.get(), "flags", flags & ~flagDownload, nullptr); m_fillTimer.stop(); } @@ -2602,7 +2626,7 @@ void MediaPlayerPrivateGStreamer::setDownloadBuffering() void MediaPlayerPrivateGStreamer::setPreload(MediaPlayer::Preload preload) { - GST_DEBUG("Setting preload to %s", convertEnumerationToString(preload).utf8().data()); + GST_DEBUG_OBJECT(pipeline(), "Setting preload to %s", convertEnumerationToString(preload).utf8().data()); if (preload == MediaPlayer::Auto && isLiveStream()) return; @@ -2744,7 +2768,7 @@ void MediaPlayerPrivateGStreamer::createGSTPlayBin(const gchar* playbinName, con pipelineName.isEmpty() ? String::format("play_%p", this).utf8().data() : pipelineName.utf8().data())); setStreamVolumeElement(GST_STREAM_VOLUME(m_pipeline.get())); - GST_INFO("Using legacy playbin element: %s", boolForPrinting(m_isLegacyPlaybin)); + GST_INFO_OBJECT(pipeline(), "Using legacy playbin element: %s", boolForPrinting(m_isLegacyPlaybin)); #if ENABLE(TEXT_SINK) unsigned flagText = getGstPlayFlag("text"); diff --git a/Source/WebCore/platform/graphics/gstreamer/MediaPlayerPrivateGStreamer.h b/Source/WebCore/platform/graphics/gstreamer/MediaPlayerPrivateGStreamer.h index f41d250dfb9c4..ad5b545491daf 100644 --- a/Source/WebCore/platform/graphics/gstreamer/MediaPlayerPrivateGStreamer.h +++ b/Source/WebCore/platform/graphics/gstreamer/MediaPlayerPrivateGStreamer.h @@ -140,6 +140,8 @@ class MediaPlayerPrivateGStreamer : public MediaPlayerPrivateGStreamerBase { static bool isAvailable(); + void syncOnClock(bool sync); + GstElement* createAudioSink() override; MediaTime playbackPosition() const; diff --git a/Source/WebCore/platform/mediastream/gstreamer/GStreamerAudioCapturer.cpp b/Source/WebCore/platform/mediastream/gstreamer/GStreamerAudioCapturer.cpp index 64157ef70a6a0..8df758a109fee 100644 --- a/Source/WebCore/platform/mediastream/gstreamer/GStreamerAudioCapturer.cpp +++ b/Source/WebCore/platform/mediastream/gstreamer/GStreamerAudioCapturer.cpp @@ -40,6 +40,16 @@ GStreamerAudioCapturer::GStreamerAudioCapturer() { } +GstElement* GStreamerAudioCapturer::createSource() +{ + GstElement* source = GStreamerCapturer::createSource(); + + if (!m_device) + gst_util_set_object_arg(G_OBJECT(m_src.get()), "wave", "ticks"); + + return source; +} + GstElement* GStreamerAudioCapturer::createConverter() { auto converter = gst_parse_bin_from_description("audioconvert ! audioresample", TRUE, nullptr); diff --git a/Source/WebCore/platform/mediastream/gstreamer/GStreamerAudioCapturer.h b/Source/WebCore/platform/mediastream/gstreamer/GStreamerAudioCapturer.h index 9aaf761d6c4b4..7b279ee344e5b 100644 --- a/Source/WebCore/platform/mediastream/gstreamer/GStreamerAudioCapturer.h +++ b/Source/WebCore/platform/mediastream/gstreamer/GStreamerAudioCapturer.h @@ -32,6 +32,7 @@ class GStreamerAudioCapturer : public GStreamerCapturer { GStreamerAudioCapturer(GStreamerCaptureDevice); GStreamerAudioCapturer(); + GstElement* createSource() final; GstElement* createConverter() final; const char* name() final { return "Audio"; } diff --git a/Source/WebCore/platform/mediastream/gstreamer/GStreamerCaptureDeviceManager.cpp b/Source/WebCore/platform/mediastream/gstreamer/GStreamerCaptureDeviceManager.cpp index 3b78287e7aaf5..c7cf33b8bd6d9 100644 --- a/Source/WebCore/platform/mediastream/gstreamer/GStreamerCaptureDeviceManager.cpp +++ b/Source/WebCore/platform/mediastream/gstreamer/GStreamerCaptureDeviceManager.cpp @@ -28,6 +28,26 @@ namespace WebCore { +static gint sortDevices(gconstpointer a, gconstpointer b) +{ + GstDevice* adev = GST_DEVICE(a), *bdev = GST_DEVICE(b); + GUniquePtr aprops(gst_device_get_properties(adev)); + GUniquePtr bprops(gst_device_get_properties(bdev)); + gboolean aIsDefault = FALSE, bIsDefault = FALSE; + + gst_structure_get_boolean(aprops.get(), "is-default", &aIsDefault); + gst_structure_get_boolean(bprops.get(), "is-default", &bIsDefault); + + if (aIsDefault == bIsDefault) { + GUniquePtr aName(gst_device_get_display_name(adev)); + GUniquePtr bName(gst_device_get_display_name(bdev)); + + return g_strcmp0(aName.get(), bName.get()); + } + + return aIsDefault > bIsDefault ? -1 : 1; +} + GStreamerAudioCaptureDeviceManager& GStreamerAudioCaptureDeviceManager::singleton() { static NeverDestroyed manager; @@ -66,7 +86,7 @@ const Vector& GStreamerCaptureDeviceManager::captureDevices() return m_devices; } -void GStreamerCaptureDeviceManager::deviceAdded(GRefPtr&& device) +void GStreamerCaptureDeviceManager::addDevice(GRefPtr&& device) { GUniquePtr properties(gst_device_get_properties(device.get())); const char* klass = gst_structure_get_string(properties.get(), "device.class"); @@ -85,7 +105,10 @@ void GStreamerCaptureDeviceManager::deviceAdded(GRefPtr&& device) // This isn't really a UID but should be good enough (libwebrtc // itself does that at least for pulseaudio devices). GUniquePtr deviceName(gst_device_get_display_name(device.get())); - String identifier = String::fromUTF8(deviceName.get()); + gboolean isDefault = FALSE; + gst_structure_get_boolean(properties.get(), "is-default", &isDefault); + + String identifier = String::format("%s%s", isDefault ? "default: " : "", deviceName.get()); auto gstCaptureDevice = GStreamerCaptureDevice(WTFMove(device), identifier, type, identifier); gstCaptureDevice.setEnabled(true); @@ -108,10 +131,11 @@ void GStreamerCaptureDeviceManager::refreshCaptureDevices() } else if (type == CaptureDevice::DeviceType::Microphone) { GRefPtr caps = adoptGRef(gst_caps_new_empty_simple("audio/x-raw")); gst_device_monitor_add_filter(m_deviceMonitor.get(), "Audio/Source", caps.get()); - } - // FIXME: Add monitor for added/removed messages on the bus. + } else + return; } + // FIXME: Add monitor for added/removed messages on the bus. if (!gst_device_monitor_start(m_deviceMonitor.get())) { GST_WARNING_OBJECT(m_deviceMonitor.get(), "Could not start device monitor"); m_deviceMonitor = nullptr; @@ -119,10 +143,11 @@ void GStreamerCaptureDeviceManager::refreshCaptureDevices() return; } - GList* devices = gst_device_monitor_get_devices(m_deviceMonitor.get()); + GList* devices = g_list_sort(gst_device_monitor_get_devices(m_deviceMonitor.get()), sortDevices); while (devices) { GRefPtr device = adoptGRef(GST_DEVICE_CAST(devices->data)); - deviceAdded(WTFMove(device)); + + addDevice(WTFMove(device)); devices = g_list_delete_link(devices, devices); } diff --git a/Source/WebCore/platform/mediastream/gstreamer/GStreamerCaptureDeviceManager.h b/Source/WebCore/platform/mediastream/gstreamer/GStreamerCaptureDeviceManager.h index b1fd7828d41ff..76fbe24a874c5 100644 --- a/Source/WebCore/platform/mediastream/gstreamer/GStreamerCaptureDeviceManager.h +++ b/Source/WebCore/platform/mediastream/gstreamer/GStreamerCaptureDeviceManager.h @@ -37,7 +37,7 @@ class GStreamerCaptureDeviceManager : public CaptureDeviceManager { virtual CaptureDevice::DeviceType deviceType() = 0; private: - void deviceAdded(GRefPtr&&); + void addDevice(GRefPtr&&); void refreshCaptureDevices(); GRefPtr m_deviceMonitor; diff --git a/Source/WebCore/platform/mediastream/gstreamer/GStreamerCapturer.cpp b/Source/WebCore/platform/mediastream/gstreamer/GStreamerCapturer.cpp index 92bfa415a2d62..2933eea450c36 100644 --- a/Source/WebCore/platform/mediastream/gstreamer/GStreamerCapturer.cpp +++ b/Source/WebCore/platform/mediastream/gstreamer/GStreamerCapturer.cpp @@ -155,13 +155,6 @@ void GStreamerCapturer::addSink(GstElement* newSink) return; } - if (newSink == m_sink.get()) { - GST_INFO_OBJECT(m_pipeline.get(), "Setting queue as leaky upstream" - " so that the player can set the sink to PAUSED without " - " setting the whole capturer to PAUSED"); - g_object_set(queue, "leaky", 2 /* upstream */, nullptr); - } - GST_INFO_OBJECT(pipeline(), "Adding sink: %" GST_PTR_FORMAT, newSink); GUniquePtr dumpName(g_strdup_printf("%s_sink_%s_added", GST_OBJECT_NAME(pipeline()), GST_OBJECT_NAME(newSink))); diff --git a/Source/WebCore/platform/mediastream/gstreamer/GStreamerCapturer.h b/Source/WebCore/platform/mediastream/gstreamer/GStreamerCapturer.h index 7bfcedad801df..bbe5ae73954ae 100644 --- a/Source/WebCore/platform/mediastream/gstreamer/GStreamerCapturer.h +++ b/Source/WebCore/platform/mediastream/gstreamer/GStreamerCapturer.h @@ -43,7 +43,7 @@ class GStreamerCapturer { GstCaps* caps(); void addSink(GstElement *newSink); GstElement* makeElement(const char* factoryName); - GstElement* createSource(); + virtual GstElement* createSource(); GstElement* source() { return m_src.get(); } virtual const char* name() = 0; diff --git a/Source/WebCore/platform/mediastream/gstreamer/GStreamerMediaStreamSource.cpp b/Source/WebCore/platform/mediastream/gstreamer/GStreamerMediaStreamSource.cpp index ffdae4eb508ee..2513ca94be9c7 100644 --- a/Source/WebCore/platform/mediastream/gstreamer/GStreamerMediaStreamSource.cpp +++ b/Source/WebCore/platform/mediastream/gstreamer/GStreamerMediaStreamSource.cpp @@ -41,6 +41,8 @@ namespace WebCore { static void webkitMediaStreamSrcPushVideoSample(WebKitMediaStreamSrc* self, GstSample* gstsample); static void webkitMediaStreamSrcPushAudioSample(WebKitMediaStreamSrc* self, GstSample* gstsample); static void webkitMediaStreamSrcTrackEnded(WebKitMediaStreamSrc* self, MediaStreamTrackPrivate&); +static void webkitMediaStreamSrcAddTrack(WebKitMediaStreamSrc* self, MediaStreamTrackPrivate*); +static void webkitMediaStreamSrcRemoveTrackByType(WebKitMediaStreamSrc* self, RealtimeMediaSource::Type trackType); static GstStaticPadTemplate videoSrcTemplate = GST_STATIC_PAD_TEMPLATE("video_src", GST_PAD_SRC, @@ -93,7 +95,7 @@ GstStream* webkitMediaStreamNew(MediaStreamTrackPrivate* track) caps = adoptGRef(gst_static_pad_template_get_caps(&videoSrcTemplate)); type = GST_STREAM_TYPE_VIDEO; } else { - GST_FIXME("Handle %d type", (gint) track->type()); + GST_FIXME("Handle %d type", static_cast(track->type())); return nullptr; } @@ -142,6 +144,30 @@ class WebKitMediaStreamTrackObserver WebKitMediaStreamSrc* m_mediaStreamSrc; }; +class WebKitMediaStreamObserver + : public MediaStreamPrivate::Observer { +public: + virtual ~WebKitMediaStreamObserver() { }; + WebKitMediaStreamObserver(WebKitMediaStreamSrc* src) + : m_mediaStreamSrc(src) { } + + void characteristicsChanged() final { GST_DEBUG_OBJECT(m_mediaStreamSrc.get(), "renegotiation should happen"); } + void activeStatusChanged() final { } + + void didAddTrack(MediaStreamTrackPrivate& track) final + { + webkitMediaStreamSrcAddTrack(m_mediaStreamSrc.get(), &track); + } + + void didRemoveTrack(MediaStreamTrackPrivate& track) final + { + webkitMediaStreamSrcRemoveTrackByType(m_mediaStreamSrc.get(), track.type()); + } + +private: + GRefPtr m_mediaStreamSrc; +}; + typedef struct _WebKitMediaStreamSrcClass WebKitMediaStreamSrcClass; struct _WebKitMediaStreamSrc { GstBin parent_instance; @@ -153,8 +179,8 @@ struct _WebKitMediaStreamSrc { GstElement* videoSrc; GstClockTime firstFramePts; - std::unique_ptr observer; - String videoTrackID; + std::unique_ptr mediaStreamTrackObserver; + std::unique_ptr mediaStreamObserver; volatile gint npads; gulong probeid; RefPtr stream; @@ -267,8 +293,13 @@ static void webkitMediaStreamSrcFinalize(GObject* object) WebKitMediaStreamSrc* self = WEBKIT_MEDIA_STREAM_SRC(object); GST_OBJECT_LOCK(self); - for (auto& track : self->stream->tracks()) - track->removeObserver(*self->observer.get()); + if (self->stream) { + for (auto& track : self->stream->tracks()) + track->removeObserver(*self->mediaStreamTrackObserver.get()); + + self->stream->removeObserver(*self->mediaStreamObserver); + self->stream = nullptr; + } GST_OBJECT_UNLOCK(self); g_clear_pointer(&self->uri, g_free); @@ -284,7 +315,7 @@ static GstStateChangeReturn webkitMediaStreamSrcChangeState(GstElement* element, GST_OBJECT_LOCK(self); for (auto& track : self->stream->tracks()) - track->removeObserver(*self->observer.get()); + track->removeObserver(*self->mediaStreamTrackObserver.get()); GST_OBJECT_UNLOCK(self); } @@ -320,7 +351,8 @@ static void webkit_media_stream_src_class_init(WebKitMediaStreamSrcClass* klass) static void webkit_media_stream_src_init(WebKitMediaStreamSrc* self) { - self->observer = std::make_unique(self); + self->mediaStreamTrackObserver = std::make_unique(self); + self->mediaStreamObserver = std::make_unique(self); self->flowCombiner = gst_flow_combiner_new(); self->firstAudioBufferPts = GST_CLOCK_TIME_NONE; self->firstFramePts = GST_CLOCK_TIME_NONE; @@ -421,7 +453,7 @@ static gboolean webkitMediaStreamSrcSetupSrc(WebKitMediaStreamSrc* self, }); if (observe_track) - track->addObserver(*self->observer.get()); + track->addObserver(*self->mediaStreamTrackObserver.get()); gst_element_sync_state_with_parent(element); return TRUE; @@ -452,37 +484,47 @@ static void webkitMediaStreamSrcPostStreamCollection(WebKitMediaStreamSrc* self, gst_message_new_stream_collection(GST_OBJECT(self), self->streamCollection.get())); } -gboolean webkitMediaStreamSrcSetStream(WebKitMediaStreamSrc* self, MediaStreamPrivate* stream) +static void webkitMediaStreamSrcAddTrack(WebKitMediaStreamSrc* self, MediaStreamTrackPrivate* track) { - g_return_val_if_fail(WEBKIT_IS_MEDIA_STREAM_SRC(self), FALSE); + if (track->type() == RealtimeMediaSource::Type::Audio) + webkitMediaStreamSrcSetupAppSrc(self, track, &self->audioSrc, &audioSrcTemplate); + else if (track->type() == RealtimeMediaSource::Type::Video) + webkitMediaStreamSrcSetupAppSrc(self, track, &self->videoSrc, &videoSrcTemplate); + else + GST_INFO("Unsupported track type: %d", static_cast(track->type())); +} - if (self->audioSrc) { - gst_element_set_state(self->audioSrc, GST_STATE_NULL); - gst_bin_remove(GST_BIN(self), self->audioSrc); - self->audioSrc = nullptr; - } +static void webkitMediaStreamSrcRemoveTrackByType(WebKitMediaStreamSrc* self, RealtimeMediaSource::Type trackType) +{ + if (trackType == RealtimeMediaSource::Type::Audio) { + if (self->audioSrc) { + gst_element_set_state(self->audioSrc, GST_STATE_NULL); + gst_bin_remove(GST_BIN(self), self->audioSrc); + self->audioSrc = nullptr; + } + } else if (trackType == RealtimeMediaSource::Type::Video) { + if (self->videoSrc) { + gst_element_set_state(self->videoSrc, GST_STATE_NULL); + gst_bin_remove(GST_BIN(self), self->videoSrc); + self->videoSrc = nullptr; + } + } else + GST_INFO("Unsupported track type: %d", static_cast(trackType)); +} - if (self->videoSrc) { - gst_element_set_state(self->videoSrc, GST_STATE_NULL); - gst_bin_remove(GST_BIN(self), self->videoSrc); - self->videoSrc = nullptr; - } +bool webkitMediaStreamSrcSetStream(WebKitMediaStreamSrc* self, MediaStreamPrivate* stream) +{ + ASSERT(WEBKIT_IS_MEDIA_STREAM_SRC(self)); + + webkitMediaStreamSrcRemoveTrackByType(self, RealtimeMediaSource::Type::Audio); + webkitMediaStreamSrcRemoveTrackByType(self, RealtimeMediaSource::Type::Video); webkitMediaStreamSrcPostStreamCollection(self, stream); self->stream = stream; - for (auto& track : stream->tracks()) { - if (track->type() == RealtimeMediaSource::Type::Audio) { - webkitMediaStreamSrcSetupAppSrc(self, track.get(), &self->audioSrc, - &audioSrcTemplate); - } else if (track->type() == RealtimeMediaSource::Type::Video) { - webkitMediaStreamSrcSetupAppSrc(self, track.get(), &self->videoSrc, - &videoSrcTemplate); - } else { - GST_INFO("Unsuported track type: %d", (gint) track->type()); - continue; - } - } + self->stream->addObserver(*self->mediaStreamObserver.get()); + for (auto& track : stream->tracks()) + webkitMediaStreamSrcAddTrack(self, track.get()); return TRUE; } diff --git a/Source/WebCore/platform/mediastream/gstreamer/GStreamerMediaStreamSource.h b/Source/WebCore/platform/mediastream/gstreamer/GStreamerMediaStreamSource.h index eb4112e707f57..1599bae786271 100644 --- a/Source/WebCore/platform/mediastream/gstreamer/GStreamerMediaStreamSource.h +++ b/Source/WebCore/platform/mediastream/gstreamer/GStreamerMediaStreamSource.h @@ -40,7 +40,7 @@ typedef struct _WebKitMediaStreamSrc WebKitMediaStreamSrc; #define WEBKIT_IS_MEDIA_STREAM_SRC(o) (G_TYPE_CHECK_INSTANCE_TYPE((o), WEBKIT_TYPE_MEDIA_STREAM_SRC)) #define WEBKIT_TYPE_MEDIA_STREAM_SRC (webkit_media_stream_src_get_type()) GType webkit_media_stream_src_get_type(void) G_GNUC_CONST; -gboolean webkitMediaStreamSrcSetStream(WebKitMediaStreamSrc*, MediaStreamPrivate*); +bool webkitMediaStreamSrcSetStream(WebKitMediaStreamSrc*, MediaStreamPrivate*); } // WebCore #endif // ENABLE(VIDEO) && ENABLE(MEDIA_STREAM) && USE(LIBWEBRTC) diff --git a/Source/WebCore/platform/mediastream/gstreamer/GStreamerVideoCaptureSource.cpp b/Source/WebCore/platform/mediastream/gstreamer/GStreamerVideoCaptureSource.cpp index 5a9dfb517fa24..c781ed3058a10 100644 --- a/Source/WebCore/platform/mediastream/gstreamer/GStreamerVideoCaptureSource.cpp +++ b/Source/WebCore/platform/mediastream/gstreamer/GStreamerVideoCaptureSource.cpp @@ -233,6 +233,7 @@ const RealtimeMediaSourceCapabilities& GStreamerVideoCaptureSource::capabilities capabilities.setWidth(CapabilityValueOrRange(minWidth, maxWidth)); capabilities.setHeight(CapabilityValueOrRange(minHeight, maxHeight)); capabilities.setFrameRate(CapabilityValueOrRange(minFramerate, maxFramerate)); + capabilities.addFacingMode(RealtimeMediaSourceSettings::Unknown); m_capabilities = WTFMove(capabilities); } diff --git a/Source/WebCore/platform/mediastream/gstreamer/GStreamerVideoCapturer.cpp b/Source/WebCore/platform/mediastream/gstreamer/GStreamerVideoCapturer.cpp index 88b6050216f39..67cc5f3b24169 100644 --- a/Source/WebCore/platform/mediastream/gstreamer/GStreamerVideoCapturer.cpp +++ b/Source/WebCore/platform/mediastream/gstreamer/GStreamerVideoCapturer.cpp @@ -24,8 +24,6 @@ #if ENABLE(MEDIA_STREAM) && USE(LIBWEBRTC) && USE(GSTREAMER) #include "GStreamerVideoCapturer.h" -#include - namespace WebCore { GStreamerVideoCapturer::GStreamerVideoCapturer(GStreamerCaptureDevice device) diff --git a/Source/WebCore/platform/mediastream/gstreamer/GStreamerVideoFrameLibWebRTC.cpp b/Source/WebCore/platform/mediastream/gstreamer/GStreamerVideoFrameLibWebRTC.cpp index 57c48d2e081de..14b4822cc7e6d 100644 --- a/Source/WebCore/platform/mediastream/gstreamer/GStreamerVideoFrameLibWebRTC.cpp +++ b/Source/WebCore/platform/mediastream/gstreamer/GStreamerVideoFrameLibWebRTC.cpp @@ -21,6 +21,8 @@ #if USE(GSTREAMER) && USE(LIBWEBRTC) #include "GStreamerVideoFrameLibWebRTC.h" +#include + namespace WebCore { const GRefPtr GStreamerSampleFromLibWebRTCVideoFrame(const webrtc::VideoFrame& frame) @@ -69,12 +71,13 @@ rtc::scoped_refptr GStreamerVideoFrameLibWebRTC::creat return rtc::scoped_refptr(new GStreamerVideoFrameLibWebRTC(sample, info)); } -std::unique_ptr LibWebRTCVideoFrameFromGStreamerSample(GstSample* sample, webrtc::VideoRotation rotation) +std::unique_ptr LibWebRTCVideoFrameFromGStreamerSample(GstSample* sample, webrtc::VideoRotation rotation, + int64_t timestamp, int64_t renderTimeMs) { auto frameBuffer(GStreamerVideoFrameLibWebRTC::create(sample)); - auto buffer = gst_sample_get_buffer(sample); - return std::unique_ptr(new webrtc::VideoFrame(frameBuffer, GST_BUFFER_DTS(buffer), GST_BUFFER_PTS(buffer), rotation)); + return std::unique_ptr( + new webrtc::VideoFrame(frameBuffer, timestamp, renderTimeMs, rotation)); } webrtc::VideoFrameBuffer::Type GStreamerVideoFrameLibWebRTC::type() const @@ -89,37 +92,55 @@ GRefPtr GStreamerVideoFrameLibWebRTC::getSample() rtc::scoped_refptr GStreamerVideoFrameLibWebRTC::ToI420() { - GstVideoInfo info; - GstVideoFrame frame; + GstMappedFrame inFrame(m_sample, GST_MAP_READ); - if (!gst_video_info_from_caps(&info, gst_sample_get_caps(m_sample.get()))) - ASSERT_NOT_REACHED(); + if (!inFrame) { + GST_WARNING("Could not map frame"); - if (GST_VIDEO_INFO_FORMAT(&info) != GST_VIDEO_FORMAT_I420) return nullptr; + } - gst_video_frame_map(&frame, &info, gst_sample_get_buffer(m_sample.get()), GST_MAP_READ); - - auto newBuffer = m_bufferPool.CreateBuffer(GST_VIDEO_FRAME_WIDTH(&frame), - GST_VIDEO_FRAME_HEIGHT(&frame)); - + auto newBuffer = m_bufferPool.CreateBuffer(inFrame.width(), inFrame.height()); ASSERT(newBuffer); if (!newBuffer) { - gst_video_frame_unmap(&frame); GST_WARNING("RealtimeOutgoingVideoSourceGStreamer::videoSampleAvailable unable to allocate buffer for conversion to YUV"); return nullptr; } + if (inFrame.format() != GST_VIDEO_FORMAT_I420) { + GstVideoInfo outInfo; + + gst_video_info_set_format(&outInfo, GST_VIDEO_FORMAT_I420, inFrame.width(), + inFrame.height()); + auto info = inFrame.info(); + outInfo.fps_n = info->fps_n; + outInfo.fps_d = info->fps_d; + + GRefPtr buffer = adoptGRef(gst_buffer_new_wrapped_full(GST_MEMORY_FLAG_NO_SHARE, newBuffer->MutableDataY(), + outInfo.size, 0, outInfo.size, nullptr, nullptr)); + + GstMappedFrame outFrame(buffer.get(), outInfo, GST_MAP_WRITE); + + GUniquePtr videoConverter(gst_video_converter_new(inFrame.info(), + &outInfo, gst_structure_new("GstVideoConvertConfig", + GST_VIDEO_CONVERTER_OPT_THREADS, G_TYPE_UINT, std::thread::hardware_concurrency() || 1 , nullptr))); + + ASSERT(videoConverter); + + gst_video_converter_frame(videoConverter.get(), inFrame.get(), outFrame.get()); + + return newBuffer; + } + newBuffer->Copy( - GST_VIDEO_FRAME_WIDTH(&frame), - GST_VIDEO_FRAME_HEIGHT(&frame), - GST_VIDEO_FRAME_COMP_DATA(&frame, 0), - GST_VIDEO_FRAME_COMP_STRIDE(&frame, 0), - GST_VIDEO_FRAME_COMP_DATA(&frame, 1), - GST_VIDEO_FRAME_COMP_STRIDE(&frame, 1), - GST_VIDEO_FRAME_COMP_DATA(&frame, 2), - GST_VIDEO_FRAME_COMP_STRIDE(&frame, 2)); - gst_video_frame_unmap(&frame); + inFrame.width(), + inFrame.height(), + inFrame.ComponentData(0), + inFrame.ComponentStride(0), + inFrame.ComponentData(1), + inFrame.ComponentStride(1), + inFrame.ComponentData(2), + inFrame.ComponentStride(2)); return newBuffer; } diff --git a/Source/WebCore/platform/mediastream/gstreamer/GStreamerVideoFrameLibWebRTC.h b/Source/WebCore/platform/mediastream/gstreamer/GStreamerVideoFrameLibWebRTC.h index 53ada42361bc8..9e53e96da2b62 100644 --- a/Source/WebCore/platform/mediastream/gstreamer/GStreamerVideoFrameLibWebRTC.h +++ b/Source/WebCore/platform/mediastream/gstreamer/GStreamerVideoFrameLibWebRTC.h @@ -32,7 +32,7 @@ namespace WebCore { const GRefPtr GStreamerSampleFromLibWebRTCVideoFrame(const webrtc::VideoFrame&); -std::unique_ptr LibWebRTCVideoFrameFromGStreamerSample(GstSample*, webrtc::VideoRotation); +std::unique_ptr LibWebRTCVideoFrameFromGStreamerSample(GstSample*, webrtc::VideoRotation, int64_t timestamp, int64_t renderTimeMs); class GStreamerVideoFrameLibWebRTC : public rtc::RefCountedObject { public: diff --git a/Source/WebCore/platform/mediastream/gstreamer/RealtimeIncomingAudioSourceLibWebRTC.cpp b/Source/WebCore/platform/mediastream/gstreamer/RealtimeIncomingAudioSourceLibWebRTC.cpp index 541829ed120cc..ef0e1c24354c8 100644 --- a/Source/WebCore/platform/mediastream/gstreamer/RealtimeIncomingAudioSourceLibWebRTC.cpp +++ b/Source/WebCore/platform/mediastream/gstreamer/RealtimeIncomingAudioSourceLibWebRTC.cpp @@ -64,9 +64,14 @@ void RealtimeIncomingAudioSourceLibWebRTC::OnData(const void* audioData, int, in gst_audio_info_set_format(&info, format, sampleRate, numberOfChannels, NULL); - auto buffer = adoptGRef(gst_buffer_new_wrapped( - g_memdup(audioData, GST_AUDIO_INFO_BPF(&info) * numberOfFrames), - GST_AUDIO_INFO_BPF(&info) * numberOfFrames)); + auto bufferSize = GST_AUDIO_INFO_BPF(&info) * numberOfFrames; + gpointer bufferData = g_malloc(bufferSize); + if (muted()) + gst_audio_format_fill_silence(info.finfo, bufferData, bufferSize); + else + memcpy(bufferData, audioData, bufferSize); + + auto buffer = adoptGRef(gst_buffer_new_wrapped(bufferData, bufferSize)); GRefPtr caps = adoptGRef(gst_audio_info_to_caps(&info)); auto sample = adoptGRef(gst_sample_new(buffer.get(), caps.get(), nullptr, nullptr)); auto data(std::unique_ptr(new GStreamerAudioData(WTFMove(sample), info))); diff --git a/Source/WebCore/platform/mediastream/gstreamer/RealtimeOutgoingAudioSourceLibWebRTC.cpp b/Source/WebCore/platform/mediastream/gstreamer/RealtimeOutgoingAudioSourceLibWebRTC.cpp index 1115d06912038..c9cb1df426acc 100644 --- a/Source/WebCore/platform/mediastream/gstreamer/RealtimeOutgoingAudioSourceLibWebRTC.cpp +++ b/Source/WebCore/platform/mediastream/gstreamer/RealtimeOutgoingAudioSourceLibWebRTC.cpp @@ -73,17 +73,16 @@ void RealtimeOutgoingAudioSourceLibWebRTC::audioSamplesAvailable(const MediaTime if (m_sampleConverter && !gst_audio_info_is_equal(m_inputStreamDescription->getInfo(), desc.getInfo())) { GST_ERROR_OBJECT(this, "FIXME - Audio format renegotiation is not possible yet!"); - g_clear_pointer(&m_sampleConverter, gst_audio_converter_free); + m_sampleConverter = nullptr; } if (!m_sampleConverter) { m_inputStreamDescription = std::unique_ptr(new GStreamerAudioStreamDescription(desc.getInfo())); m_outputStreamDescription = libwebrtcAudioFormat(LibWebRTCAudioFormat::sampleRate, streamDescription.numberOfChannels()); - - m_sampleConverter = gst_audio_converter_new(GST_AUDIO_CONVERTER_FLAG_IN_WRITABLE, + m_sampleConverter.reset(gst_audio_converter_new(GST_AUDIO_CONVERTER_FLAG_IN_WRITABLE, m_inputStreamDescription->getInfo(), m_outputStreamDescription->getInfo(), - nullptr); + nullptr)); } LockHolder locker(m_adapterMutex); @@ -106,7 +105,7 @@ void RealtimeOutgoingAudioSourceLibWebRTC::pullAudioData() size_t outBufferSize = outChunkSampleCount * m_outputStreamDescription->getInfo()->bpf; LockHolder locker(m_adapterMutex); - size_t inChunkSampleCount = gst_audio_converter_get_in_frames(m_sampleConverter, outChunkSampleCount); + size_t inChunkSampleCount = gst_audio_converter_get_in_frames(m_sampleConverter.get(), outChunkSampleCount); size_t inBufferSize = inChunkSampleCount * m_inputStreamDescription->getInfo()->bpf; auto available = gst_adapter_available(m_adapter.get()); @@ -117,29 +116,30 @@ void RealtimeOutgoingAudioSourceLibWebRTC::pullAudioData() return; } - auto inbuf = adoptGRef(gst_adapter_take_buffer(m_adapter.get(), inBufferSize)); - GstMapInfo inmap; - gst_buffer_map(inbuf.get(), &inmap, static_cast(GST_MAP_READ)); - - GstMapInfo outmap; - auto outbuf = adoptGRef(gst_buffer_new_allocate(nullptr, outBufferSize, 0)); - gst_buffer_map(outbuf.get(), &outmap, static_cast(GST_MAP_WRITE)); - - gpointer in[1] = { inmap.data }; - gpointer out[1] = { outmap.data }; - if (gst_audio_converter_samples(m_sampleConverter, static_cast(0), in, inChunkSampleCount, out, outChunkSampleCount)) { - for (auto sink : m_sinks) { - sink->OnData(outmap.data, - LibWebRTCAudioFormat::sampleSize, - static_cast(m_outputStreamDescription->sampleRate()), - static_cast(m_outputStreamDescription->numberOfChannels()), - outChunkSampleCount); + auto inBuffer = adoptGRef(gst_adapter_take_buffer(m_adapter.get(), inBufferSize)); + auto outBuffer = adoptGRef(gst_buffer_new_allocate(nullptr, outBufferSize, 0)); + GstMappedBuffer outMap(outBuffer.get(), GST_MAP_WRITE); + if (isSilenced()) + gst_audio_format_fill_silence(m_outputStreamDescription->getInfo()->finfo, outMap.data(), outMap.size()); + else { + GstMappedBuffer inMap(inBuffer.get(), GST_MAP_READ); + + gpointer in[1] = { inMap.data() }; + gpointer out[1] = { outMap.data() }; + if (!gst_audio_converter_samples(m_sampleConverter.get(), static_cast(0), in, inChunkSampleCount, out, outChunkSampleCount)) { + GST_ERROR("Could not convert samples."); + + return; } - } else - GST_ERROR("Could not convert samples."); + } - gst_buffer_unmap(inbuf.get(), &inmap); - gst_buffer_unmap(outbuf.get(), &outmap); + for (auto sink : m_sinks) { + sink->OnData(outMap.data(), + LibWebRTCAudioFormat::sampleSize, + static_cast(m_outputStreamDescription->sampleRate()), + static_cast(m_outputStreamDescription->numberOfChannels()), + outChunkSampleCount); + } } bool RealtimeOutgoingAudioSourceLibWebRTC::isReachingBufferedAudioDataHighLimit() diff --git a/Source/WebCore/platform/mediastream/gstreamer/RealtimeOutgoingAudioSourceLibWebRTC.h b/Source/WebCore/platform/mediastream/gstreamer/RealtimeOutgoingAudioSourceLibWebRTC.h index 53d15cb8e091b..4609e9d9bad25 100644 --- a/Source/WebCore/platform/mediastream/gstreamer/RealtimeOutgoingAudioSourceLibWebRTC.h +++ b/Source/WebCore/platform/mediastream/gstreamer/RealtimeOutgoingAudioSourceLibWebRTC.h @@ -48,7 +48,7 @@ class RealtimeOutgoingAudioSourceLibWebRTC final : public RealtimeOutgoingAudioS void pullAudioData() final; - GstAudioConverter* m_sampleConverter; + GUniquePtr m_sampleConverter; std::unique_ptr m_inputStreamDescription; std::unique_ptr m_outputStreamDescription; diff --git a/Source/WebCore/platform/mediastream/libwebrtc/GStreamerVideoDecoderFactory.cpp b/Source/WebCore/platform/mediastream/libwebrtc/GStreamerVideoDecoderFactory.cpp index 0ec2f754e7ce2..7ef57e44bb928 100644 --- a/Source/WebCore/platform/mediastream/libwebrtc/GStreamerVideoDecoderFactory.cpp +++ b/Source/WebCore/platform/mediastream/libwebrtc/GStreamerVideoDecoderFactory.cpp @@ -34,6 +34,8 @@ #include #include #include +#include +#include #include #include @@ -42,6 +44,11 @@ GST_DEBUG_CATEGORY(webkit_webrtcdec_debug); namespace WebCore { +typedef struct { + uint64_t timestamp; + int64_t renderTimeMs; +} InputTimestamps; + class GStreamerVideoDecoder : public webrtc::VideoDecoder { public: GStreamerVideoDecoder() @@ -72,7 +79,7 @@ class GStreamerVideoDecoder : public webrtc::VideoDecoder { return gst_element_factory_make(factoryName, name.get()); } - int32_t InitDecode(const webrtc::VideoCodec*, int32_t) + int32_t InitDecode(const webrtc::VideoCodec*, int32_t) override { m_src = makeElement("appsrc"); @@ -160,15 +167,19 @@ class GStreamerVideoDecoder : public webrtc::VideoDecoder { } // FIXME- Use a GstBufferPool. - auto buffer = gst_buffer_new_wrapped(g_memdup(inputImage._buffer, inputImage._size), - inputImage._size); - GST_BUFFER_DTS(buffer) = (static_cast(inputImage._timeStamp) * GST_MSECOND) - m_firstBufferDts; - GST_BUFFER_PTS(buffer) = (static_cast(renderTimeMs) * GST_MSECOND) - m_firstBufferPts; - m_dtsPtsMap[GST_BUFFER_PTS(buffer)] = inputImage._timeStamp; - - GST_LOG_OBJECT(pipeline(), "%ld Decoding: %" GST_PTR_FORMAT, renderTimeMs, buffer); - switch (gst_app_src_push_sample(GST_APP_SRC(m_src), - gst_sample_new(buffer, GetCapsForFrame(inputImage), nullptr, nullptr))) { + auto buffer = adoptGRef(gst_buffer_new_wrapped(g_memdup(inputImage._buffer, inputImage._size), + inputImage._size)); + GST_BUFFER_DTS(buffer.get()) = (static_cast(inputImage._timeStamp) * GST_MSECOND) - m_firstBufferDts; + GST_BUFFER_PTS(buffer.get()) = (static_cast(renderTimeMs) * GST_MSECOND) - m_firstBufferPts; + { + auto locker = holdLock(m_bufferMapLock); + InputTimestamps timestamps = {inputImage._timeStamp, renderTimeMs}; + m_dtsPtsMap[GST_BUFFER_PTS(buffer.get())] = timestamps; + } + + GST_LOG_OBJECT(pipeline(), "%ld Decoding: %" GST_PTR_FORMAT, renderTimeMs, buffer.get()); + auto sample = adoptGRef(gst_sample_new(buffer.get(), GetCapsForFrame(inputImage), nullptr, nullptr)); + switch (gst_app_src_push_sample(GST_APP_SRC(m_src), sample.get())) { case GST_FLOW_OK: return WEBRTC_VIDEO_CODEC_OK; case GST_FLOW_FLUSHING: @@ -178,7 +189,7 @@ class GStreamerVideoDecoder : public webrtc::VideoDecoder { } } - GstCaps* GetCapsForFrame(const webrtc::EncodedImage& image) + virtual GstCaps* GetCapsForFrame(const webrtc::EncodedImage& image) { if (!m_caps) { m_caps = adoptGRef(gst_caps_new_simple(Caps(), @@ -204,19 +215,26 @@ class GStreamerVideoDecoder : public webrtc::VideoDecoder { return webrtc::SdpVideoFormat(Name()); } - bool HasGstDecoder() + static GRefPtr GstDecoderFactory(const char *capsStr) { - auto all_decoders = gst_element_factory_list_get_elements(GST_ELEMENT_FACTORY_TYPE_DECODER, GST_RANK_MARGINAL); - auto caps = adoptGRef(gst_caps_from_string(Caps())); + auto caps = adoptGRef(gst_caps_from_string(capsStr)); auto decoders = gst_element_factory_list_filter(all_decoders, caps.get(), GST_PAD_SINK, FALSE); gst_plugin_feature_list_free(all_decoders); + GRefPtr res; + if (decoders) + res = GST_ELEMENT_FACTORY(decoders->data); gst_plugin_feature_list_free(decoders); - return decoders != nullptr; + return res; + } + + bool HasGstDecoder() + { + return GstDecoderFactory(Caps()); } GstFlowReturn newSampleCallback(GstElement* sink) @@ -224,11 +242,16 @@ class GStreamerVideoDecoder : public webrtc::VideoDecoder { auto sample = gst_app_sink_pull_sample(GST_APP_SINK(sink)); auto buffer = gst_sample_get_buffer(sample); + m_bufferMapLock.lock(); // Make sure that the frame.timestamp == previsouly input_frame._timeStamp // as it is required by the VideoDecoder baseclass. - GST_BUFFER_DTS(buffer) = m_dtsPtsMap[GST_BUFFER_PTS(buffer)]; + auto timestamps = m_dtsPtsMap[GST_BUFFER_PTS(buffer)]; m_dtsPtsMap.erase(GST_BUFFER_PTS(buffer)); - auto frame(LibWebRTCVideoFrameFromGStreamerSample(sample, webrtc::kVideoRotation_0)); + m_bufferMapLock.unlock(); + + auto frame(LibWebRTCVideoFrameFromGStreamerSample(sample, webrtc::kVideoRotation_0, + timestamps.timestamp, timestamps.renderTimeMs)); + GST_BUFFER_DTS(buffer) = GST_CLOCK_TIME_NONE; GST_LOG_OBJECT(pipeline(), "Output decoded frame! %d -> %" GST_PTR_FORMAT, frame->timestamp(), buffer); @@ -259,7 +282,8 @@ class GStreamerVideoDecoder : public webrtc::VideoDecoder { GstVideoInfo m_info; webrtc::DecodedImageCallback* m_imageReadyCb; - std::map m_dtsPtsMap; + Lock m_bufferMapLock; + StdMap m_dtsPtsMap; GstClockTime m_firstBufferPts; GstClockTime m_firstBufferDts; }; @@ -267,9 +291,58 @@ class GStreamerVideoDecoder : public webrtc::VideoDecoder { class H264Decoder : public GStreamerVideoDecoder { public: H264Decoder() { } + + int32_t InitDecode(const webrtc::VideoCodec* codecInfo, int32_t nCores) final + { + if (codecInfo && codecInfo->codecType != webrtc::kVideoCodecH264) + return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; + + m_profile = nullptr; + if (codecInfo) { + auto h264Info = codecInfo->H264(); + + switch (h264Info.profile) { + case webrtc::H264::kProfileConstrainedBaseline: + m_profile = "constrained-baseline"; + break; + case webrtc::H264::kProfileBaseline: + m_profile = "baseline"; + break; + case webrtc::H264::kProfileMain: + m_profile = "main"; + break; + case webrtc::H264::kProfileConstrainedHigh: + m_profile = "constrained-high"; + break; + case webrtc::H264::kProfileHigh: + m_profile = "high"; + break; + } + } + + return GStreamerVideoDecoder::InitDecode(codecInfo, nCores); + } + + GstCaps* GetCapsForFrame(const webrtc::EncodedImage& image) final + { + if (!m_caps) { + m_caps = adoptGRef(gst_caps_new_simple(Caps(), + "width", G_TYPE_INT, image._encodedWidth, + "height", G_TYPE_INT, image._encodedHeight, + "profile", G_TYPE_STRING, m_profile ? m_profile : "baseline", + "stream-format", G_TYPE_STRING, "byte-stream", + "alignment", G_TYPE_STRING, "au", + nullptr)); + } + + return m_caps.get(); + } const gchar* Caps() final { return "video/x-h264"; } const gchar* Name() final { return cricket::kH264CodecName; } webrtc::VideoCodecType CodecType() final { return webrtc::kVideoCodecH264; } + +private: + const gchar* m_profile; }; class VP8Decoder : public GStreamerVideoDecoder { @@ -278,16 +351,30 @@ class VP8Decoder : public GStreamerVideoDecoder { const gchar* Caps() final { return "video/x-vp8"; } const gchar* Name() final { return cricket::kVp8CodecName; } webrtc::VideoCodecType CodecType() final { return webrtc::kVideoCodecVP8; } + static std::unique_ptr Create() + { + auto factory = GstDecoderFactory("video/x-vp8"); + + if (factory && !g_strcmp0(GST_OBJECT_NAME(GST_OBJECT(factory.get())), "vp8dec") && + // FIXME - Fix omxvp8dec usage, it work "sometimes" only right now. + !g_strcmp0(GST_OBJECT_NAME(GST_OBJECT(factory.get())), "omxvp8dec")) { + GST_INFO("Our best GStreamer VP8 decoder is vp8dec, better use the one from LibWebRTC"); + + return webrtc::VP8Decoder::Create(); + } + + return std::unique_ptr(new VP8Decoder()); + } }; std::unique_ptr GStreamerVideoDecoderFactory::CreateVideoDecoder(const webrtc::SdpVideoFormat& format) { - GStreamerVideoDecoder* dec; + webrtc::VideoDecoder* dec; if (format.name == cricket::kH264CodecName) dec = new H264Decoder(); else if (format.name == cricket::kVp8CodecName) - dec = new VP8Decoder(); + return VP8Decoder::Create(); else { GST_ERROR("Could not create decoder for %s", format.name.c_str()); diff --git a/Source/WebCore/platform/mediastream/libwebrtc/GStreamerVideoEncoder.cpp b/Source/WebCore/platform/mediastream/libwebrtc/GStreamerVideoEncoder.cpp new file mode 100644 index 0000000000000..11839f9d019e4 --- /dev/null +++ b/Source/WebCore/platform/mediastream/libwebrtc/GStreamerVideoEncoder.cpp @@ -0,0 +1,369 @@ +/* + * Copyright (C) 2018 Metrological Group B.V. + * Copyright (C) 2018 Igalia S.L. All rights reserved. + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public License + * aint with this library; see the file COPYING.LIB. If not, write to + * the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, + * Boston, MA 02110-1301, USA. + */ + +/* NOTE: This file respects GStreamer coding style as we might want to upstream + * that element in the future */ + +#include "config.h" + +#if ENABLE(VIDEO) && ENABLE(MEDIA_STREAM) && USE(LIBWEBRTC) && USE(GSTREAMER) +#include "GStreamerVideoEncoder.h" + +GST_DEBUG_CATEGORY (gst_webrtcenc_debug); +#define GST_CAT_DEFAULT gst_webrtcenc_debug + +#define KBIT_TO_BIT 1024 + +static GstStaticPadTemplate sinkTemplate = GST_STATIC_PAD_TEMPLATE ("sink", + GST_PAD_SINK, + GST_PAD_ALWAYS, + GST_STATIC_CAPS ("video/x-raw(ANY);")); + +static GstStaticPadTemplate srcTemplate = GST_STATIC_PAD_TEMPLATE ("src", + GST_PAD_SRC, + GST_PAD_ALWAYS, + GST_STATIC_CAPS ("video/x-h264;video/x-vp8")); + +typedef void (*SetBitrateFunc) (GObject * encoder, const gchar * propname, + gint bitrate); +typedef void (*SetupEncoder) (GObject * encoder); +typedef struct +{ + gboolean avalaible; + GstCaps *caps; + const gchar *name; + const gchar *parser_name; + GstCaps *encoded_format; + SetBitrateFunc setBitrate; + SetupEncoder setupEncoder; + const gchar *bitrate_propname; + const gchar *keyframe_interval_propname; +} EncoderDefinition; + +typedef enum +{ + ENCODER_NONE = 0, + ENCODER_X264, + ENCODER_OPENH264, + ENCODER_OMXH264, + ENCODER_VP8, + ENCODER_LAST, +} EncoderId; + +EncoderDefinition encoders[ENCODER_LAST] = { + FALSE, + NULL, + NULL, + NULL, + NULL, + NULL, + NULL, + NULL, + NULL, +}; + +typedef struct +{ + EncoderId encoderId; + GstElement *encoder; + GstElement *parser; + GstElement *capsfilter; + guint bitrate; +} GstWebrtcVideoEncoderPrivate; + +/* *INDENT-OFF* */ +G_DEFINE_TYPE_WITH_PRIVATE (GstWebrtcVideoEncoder, gst_webrtc_video_encoder, + GST_TYPE_BIN) +#define PRIV(self) ((GstWebrtcVideoEncoderPrivate*)gst_webrtc_video_encoder_get_instance_private(self)) +/* *INDENT-ON* */ + +enum +{ + PROP_0, + PROP_FORMAT, + PROP_ENCODER, + PROP_BITRATE, + PROP_KEYFRAME_INTERVAL, + N_PROPS +}; + +static void +gst_webrtc_video_encoder_finalize (GObject * object) +{ + G_OBJECT_CLASS (gst_webrtc_video_encoder_parent_class)->finalize (object); +} + +static void +gst_webrtc_video_encoder_get_property (GObject * object, + guint prop_id, GValue * value, GParamSpec * pspec) +{ + GstWebrtcVideoEncoder *self = GST_WEBRTC_VIDEO_ENCODER (object); + GstWebrtcVideoEncoderPrivate *priv = PRIV (self); + + switch (prop_id) { + case PROP_FORMAT: + if (priv->encoderId != ENCODER_NONE) + g_value_set_boxed (value, encoders[priv->encoderId].caps); + else + g_value_set_boxed (value, NULL); + break; + case PROP_ENCODER: + g_value_set_object (value, priv->encoder); + break; + case PROP_BITRATE: + g_value_set_uint (value, priv->bitrate); + break; + case PROP_KEYFRAME_INTERVAL: + if (priv->encoder) + g_object_get_property (G_OBJECT (priv->encoder), + encoders[priv->encoderId].keyframe_interval_propname, value); + break; + default: + G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); + } +} + +static void +gst_webrtc_video_encoder_set_bitrate (GstWebrtcVideoEncoder * self, + guint bitrate) +{ + GstWebrtcVideoEncoderPrivate *priv = PRIV (self); + + priv->bitrate = bitrate; + if (priv->encoder) { + encoders[priv->encoderId].setBitrate (G_OBJECT (priv->encoder), + encoders[priv->encoderId].bitrate_propname, priv->bitrate); + } +} + +static void +gst_webrtc_video_encoder_set_format (GstWebrtcVideoEncoder * self, + const GstCaps * caps) +{ + gint i; + GstWebrtcVideoEncoderPrivate *priv = PRIV (self); + g_return_if_fail (priv->encoderId == ENCODER_NONE); + g_return_if_fail (caps); + + for (i = 1; i < ENCODER_LAST; i++) { + if (encoders[i].avalaible + && gst_caps_can_intersect (encoders[i].caps, caps)) { + GstPad *tmppad; + priv->encoderId = (EncoderId) i; + priv->encoder = gst_element_factory_make (encoders[i].name, NULL); + encoders[priv->encoderId].setupEncoder (G_OBJECT (priv->encoder)); + + if (encoders[i].parser_name) + priv->parser = gst_element_factory_make (encoders[i].parser_name, NULL); + + if (encoders[i].encoded_format) { + priv->capsfilter = gst_element_factory_make ("capsfilter", NULL); + g_object_set (priv->capsfilter, "caps", encoders[i].encoded_format, + NULL); + } + + gst_bin_add (GST_BIN (self), priv->encoder); + + tmppad = gst_element_get_static_pad (priv->encoder, "sink"); + gst_ghost_pad_set_target (GST_GHOST_PAD (GST_ELEMENT (self)-> + sinkpads->data), tmppad); + gst_object_unref (tmppad); + + tmppad = gst_element_get_static_pad (priv->encoder, "src"); + if (priv->parser) { + gst_bin_add (GST_BIN (self), priv->parser); + gst_element_link (priv->encoder, priv->parser); + gst_object_unref (tmppad); + tmppad = gst_element_get_static_pad (priv->parser, "src"); + } + + if (priv->capsfilter) { + GstPad *tmppad2 = gst_element_get_static_pad (priv->capsfilter, "sink"); + + gst_bin_add (GST_BIN (self), priv->capsfilter); + gst_pad_link (tmppad, tmppad2); + gst_object_unref (tmppad); + tmppad = gst_element_get_static_pad (priv->capsfilter, "src"); + gst_object_unref (tmppad2); + } + + g_assert (gst_ghost_pad_set_target (GST_GHOST_PAD (GST_ELEMENT + (self)->srcpads->data), tmppad)); + gst_object_unref (tmppad); + + gst_webrtc_video_encoder_set_bitrate (self, priv->bitrate); + return; + } + } + + GST_ERROR ("No encoder found for format %" GST_PTR_FORMAT, caps); +} + +static void +gst_webrtc_video_encoder_set_property (GObject * object, + guint prop_id, const GValue * value, GParamSpec * pspec) +{ + GstWebrtcVideoEncoder *self = GST_WEBRTC_VIDEO_ENCODER (object); + GstWebrtcVideoEncoderPrivate *priv = PRIV (self); + + switch (prop_id) { + case PROP_FORMAT: + gst_webrtc_video_encoder_set_format (self, gst_value_get_caps (value)); + break; + case PROP_BITRATE: + gst_webrtc_video_encoder_set_bitrate (self, g_value_get_uint (value)); + break; + case PROP_KEYFRAME_INTERVAL: + if (priv->encoder) + g_object_set (priv->encoder, + encoders[priv->encoderId].keyframe_interval_propname, + g_value_get_uint (value), NULL); + + break; + default: + G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); + } +} + +static void +register_known_encoder (EncoderId encId, const gchar * name, + const gchar * parser_name, const gchar * caps, const gchar * encoded_format, + SetupEncoder setupEncoder, const gchar * bitrate_propname, + SetBitrateFunc setBitrate, const gchar * keyframe_interval_propname) +{ + GstPluginFeature *feature = + gst_registry_lookup_feature (gst_registry_get (), name); + if (!feature) { + GST_WARNING ("Could not find %s", name); + encoders[encId].avalaible = FALSE; + + return; + } + gst_object_unref (feature); + + encoders[encId].avalaible = TRUE; + encoders[encId].name = name; + encoders[encId].parser_name = parser_name; + encoders[encId].caps = gst_caps_from_string (caps); + if (encoded_format) + encoders[encId].encoded_format = gst_caps_from_string (encoded_format); + else + encoders[encId].encoded_format = NULL; + encoders[encId].setupEncoder = setupEncoder; + encoders[encId].bitrate_propname = bitrate_propname; + encoders[encId].setBitrate = setBitrate; + encoders[encId].keyframe_interval_propname = keyframe_interval_propname; +} + +static void +setup_x264enc (GObject * encoder) +{ + gst_util_set_object_arg (encoder, "tune", "zerolatency"); +} + +static void +setup_openh264enc (GObject *) +{ +} + +static void +setup_omxh264enc (GObject * encoder) +{ + gst_util_set_object_arg (encoder, "control-rate", "constant"); +} + +static void +set_bitrate_kbit_per_sec (GObject * encoder, const gchar * prop_name, + gint bitrate) +{ + g_object_set (encoder, prop_name, bitrate, NULL); +} + +static void +set_bitrate_bit_per_sec (GObject * encoder, const gchar * prop_name, + gint bitrate) +{ + g_object_set (encoder, prop_name, bitrate * KBIT_TO_BIT, NULL); +} + +static void +gst_webrtc_video_encoder_class_init (GstWebrtcVideoEncoderClass * klass) +{ + GObjectClass *object_class = G_OBJECT_CLASS (klass); + + GST_DEBUG_CATEGORY_INIT (gst_webrtcenc_debug, "webrtcencoder", 0, + "Video encoder for WebRTC"); + + object_class->finalize = gst_webrtc_video_encoder_finalize; + object_class->get_property = gst_webrtc_video_encoder_get_property; + object_class->set_property = gst_webrtc_video_encoder_set_property; + + g_object_class_install_property (object_class, PROP_FORMAT, + g_param_spec_boxed ("format", "Format as caps", + "Set the caps of the format to be used.", + GST_TYPE_CAPS, + (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); + + g_object_class_install_property (object_class, PROP_ENCODER, + g_param_spec_object ("encoder", "The actual encoder element", + "The encoder element", GST_TYPE_ELEMENT, + (GParamFlags) (G_PARAM_READABLE | G_PARAM_STATIC_STRINGS))); + + g_object_class_install_property (object_class, PROP_BITRATE, + g_param_spec_uint ("bitrate", "Bitrate", + "The bitrate in kbit per second", 0, G_MAXINT, 2048, + (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | + G_PARAM_CONSTRUCT))); + + g_object_class_install_property (object_class, PROP_KEYFRAME_INTERVAL, + g_param_spec_uint ("keyframe-interval", "Keyframe interval", + "The interval between keyframes", 0, G_MAXINT, 0, + (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | + G_PARAM_CONSTRUCT))); + + register_known_encoder (ENCODER_X264, "x264enc", "h264parse", "video/x-h264", + "video/x-h264,alignment=au,stream-format=byte-stream,profile=baseline", + setup_x264enc, "bitrate", set_bitrate_kbit_per_sec, "key-int-max"); + register_known_encoder (ENCODER_OPENH264, "openh264enc", "h264parse", + "video/x-h264", + "video/x-h264,alignment=au,stream-format=byte-stream,profile=baseline", + setup_openh264enc, "bitrate", set_bitrate_kbit_per_sec, "gop-size"); + register_known_encoder (ENCODER_OMXH264, "omxh264enc", "h264parse", + "video/x-h264", + "video/x-h264,alignment=au,stream-format=byte-stream,profile=baseline", + setup_omxh264enc, "target-bitrate", set_bitrate_bit_per_sec, "interval-intraframes"); +} + +static void +gst_webrtc_video_encoder_init (GstWebrtcVideoEncoder * self) +{ + GstWebrtcVideoEncoderPrivate *priv = PRIV (self); + + priv->encoderId = ENCODER_NONE; + gst_element_add_pad (GST_ELEMENT (self), + gst_ghost_pad_new_no_target_from_template ("sink", + gst_static_pad_template_get (&sinkTemplate))); + + gst_element_add_pad (GST_ELEMENT (self), + gst_ghost_pad_new_no_target_from_template ("src", + gst_static_pad_template_get (&srcTemplate))); +} + +#endif // ENABLE(VIDEO) && ENABLE(MEDIA_STREAM) && USE(LIBWEBRTC) && USE(GSTREAMER) diff --git a/Source/WebCore/platform/mediastream/libwebrtc/GStreamerVideoEncoder.h b/Source/WebCore/platform/mediastream/libwebrtc/GStreamerVideoEncoder.h new file mode 100644 index 0000000000000..10ea25c4eee3e --- /dev/null +++ b/Source/WebCore/platform/mediastream/libwebrtc/GStreamerVideoEncoder.h @@ -0,0 +1,36 @@ +/* + * Copyright (C) 2018 Metrological Group B.V. + * Copyright (C) 2018 Igalia S.L. All rights reserved. + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public License + * aint with this library; see the file COPYING.LIB. If not, write to + * the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, + * Boston, MA 02110-1301, USA. + */ + +#pragma once + +#include + +G_BEGIN_DECLS + +#define GST_TYPE_WEBRTC_VIDEO_ENCODER (gst_webrtc_video_encoder_get_type()) + +G_DECLARE_DERIVABLE_TYPE (GstWebrtcVideoEncoder, gst_webrtc_video_encoder, GST, WEBRTC_VIDEO_ENCODER, GstBin) + +struct _GstWebrtcVideoEncoderClass +{ + GstBinClass parent_class; +}; + +G_END_DECLS diff --git a/Source/WebCore/platform/mediastream/libwebrtc/GStreamerVideoEncoderFactory.cpp b/Source/WebCore/platform/mediastream/libwebrtc/GStreamerVideoEncoderFactory.cpp index 21649c25664bc..206753b77b935 100644 --- a/Source/WebCore/platform/mediastream/libwebrtc/GStreamerVideoEncoderFactory.cpp +++ b/Source/WebCore/platform/mediastream/libwebrtc/GStreamerVideoEncoderFactory.cpp @@ -23,6 +23,7 @@ #if ENABLE(VIDEO) && ENABLE(MEDIA_STREAM) && USE(LIBWEBRTC) && USE(GSTREAMER) #include "GStreamerVideoEncoderFactory.h" +#include "GStreamerVideoEncoder.h" #include "GStreamerVideoFrameLibWebRTC.h" #include "webrtc/common_video/h264/h264_common.h" #include "webrtc/common_video/h264/profile_level_id.h" @@ -38,8 +39,9 @@ #undef GST_USE_UNSTABLE_API #include #include - -#include +#include +#include +#include // Required for unified builds #ifdef GST_CAT_DEFAULT @@ -53,10 +55,11 @@ GST_DEBUG_CATEGORY(webkit_webrtcenc_debug); namespace WebCore { -typedef void (*BitrateSetter)(GstElement* encoder, uint32_t bitrate); -static GRefPtr targetBitrateBitPerSec; -static GRefPtr bitrateBitPerSec; -static GRefPtr bitrateKBitPerSec; +typedef struct { + uint64_t rtpTimestamp; + int64_t captureTimeMs; + webrtc::CodecSpecificInfo codecInfo; +} FrameData; class GStreamerVideoEncoder : public webrtc::VideoEncoder { public: @@ -64,14 +67,14 @@ class GStreamerVideoEncoder : public webrtc::VideoEncoder { : m_pictureId(0) , m_firstFramePts(GST_CLOCK_TIME_NONE) , m_restrictionCaps(adoptGRef(gst_caps_new_empty_simple("video/x-raw"))) - , m_bitrateSetter(nullptr) + , m_adapter(adoptGRef(gst_adapter_new())) { } GStreamerVideoEncoder() : m_pictureId(0) , m_firstFramePts(GST_CLOCK_TIME_NONE) , m_restrictionCaps(adoptGRef(gst_caps_new_empty_simple("video/x-raw"))) - , m_bitrateSetter(nullptr) + , m_adapter(adoptGRef(gst_adapter_new())) { } @@ -80,13 +83,12 @@ class GStreamerVideoEncoder : public webrtc::VideoEncoder { GST_INFO_OBJECT(m_pipeline.get(), "New bitrate: %d - framerate is %d", newBitrate, frameRate); - auto caps = gst_caps_make_writable(m_restrictionCaps.get()); - gst_caps_set_simple(caps, "framerate", GST_TYPE_FRACTION, frameRate, 1, nullptr); + auto caps = adoptGRef(gst_caps_copy(m_restrictionCaps.get())); - SetRestrictionCaps(caps); + SetRestrictionCaps(WTFMove(caps)); - if (m_bitrateSetter && m_encoder) - m_bitrateSetter(m_encoder, newBitrate); + if (m_encoder) + g_object_set(m_encoder, "bitrate", newBitrate, nullptr); return WEBRTC_VIDEO_CODEC_OK; } @@ -109,23 +111,38 @@ class GStreamerVideoEncoder : public webrtc::VideoEncoder { g_return_val_if_fail(codecSettings, WEBRTC_VIDEO_CODEC_ERR_PARAMETER); g_return_val_if_fail(codecSettings->codecType == CodecType(), WEBRTC_VIDEO_CODEC_ERR_PARAMETER); + m_encodedFrame._size = codecSettings->width * codecSettings->height * 3; + m_encodedFrame._buffer = new uint8_t[m_encodedFrame._size]; + m_encodedImageBuffer.reset(m_encodedFrame._buffer); + m_encodedFrame._completeFrame = true; + m_encodedFrame._encodedWidth = 0; + m_encodedFrame._encodedHeight = 0; + m_encodedFrame._length = 0; + m_pipeline = makeElement("pipeline"); connectSimpleBusMessageCallback(m_pipeline.get()); - auto encodebin = createEncoder(&m_encoder).leakRef(); - ASSERT(m_encoder); - m_bitrateSetter = getBitrateSetter(gst_element_get_factory(m_encoder)); + auto encoder = createEncoder(); + ASSERT(encoder); + m_encoder = encoder.get(); + + g_object_set(m_encoder, "keyframe-interval", KeyframeInterval(codecSettings), nullptr); m_src = makeElement("appsrc"); g_object_set(m_src, "is-live", true, "format", GST_FORMAT_TIME, nullptr); - auto capsfilter = CreateFilter(); + auto videoconvert = makeElement("videoconvert"); auto sink = makeElement("appsink"); gst_app_sink_set_emit_signals(GST_APP_SINK(sink), TRUE); g_signal_connect(sink, "new-sample", G_CALLBACK(newSampleCallbackTramp), this); - gst_bin_add_many(GST_BIN(m_pipeline.get()), m_src, encodebin, capsfilter, sink, nullptr); - if (!gst_element_link_many(m_src, encodebin, capsfilter, sink, nullptr)) + auto name = String::format("%s_enc_rawcapsfilter_%p", Name(), this); + m_capsFilter = gst_element_factory_make("capsfilter", name.utf8().data()); + if (m_restrictionCaps) + g_object_set(m_capsFilter, "caps", m_restrictionCaps.get(), nullptr); + + gst_bin_add_many(GST_BIN(m_pipeline.get()), m_src, videoconvert, m_capsFilter, encoder.leakRef(), sink, nullptr); + if (!gst_element_link_many(m_src, videoconvert, m_capsFilter, m_encoder, sink, nullptr)) ASSERT_NOT_REACHED(); gst_element_set_state(m_pipeline.get(), GST_STATE_PLAYING); @@ -138,11 +155,6 @@ class GStreamerVideoEncoder : public webrtc::VideoEncoder { return true; } - virtual GstElement* CreateFilter() - { - return makeElement("capsfilter"); - } - int32_t RegisterEncodeCompleteCallback(webrtc::EncodedImageCallback* callback) final { m_imageReadyCb = callback; @@ -152,12 +164,18 @@ class GStreamerVideoEncoder : public webrtc::VideoEncoder { int32_t Release() final { - GRefPtr bus = adoptGRef(gst_pipeline_get_bus(GST_PIPELINE(m_pipeline.get()))); - gst_bus_set_sync_handler(bus.get(), nullptr, nullptr, nullptr); - - gst_element_set_state(m_pipeline.get(), GST_STATE_NULL); - m_src = nullptr; - m_pipeline = nullptr; + m_encodedFrame._buffer = nullptr; + m_encodedImageBuffer.reset(); + if (m_pipeline) { + GRefPtr bus = adoptGRef(gst_pipeline_get_bus(GST_PIPELINE(m_pipeline.get()))); + gst_bus_set_sync_handler(bus.get(), nullptr, nullptr, nullptr); + + gst_element_set_state(m_pipeline.get(), GST_STATE_NULL); + m_src = nullptr; + m_encoder = nullptr; + m_capsFilter = nullptr; + m_pipeline = nullptr; + } return WEBRTC_VIDEO_CODEC_OK; } @@ -168,7 +186,7 @@ class GStreamerVideoEncoder : public webrtc::VideoEncoder { } int32_t Encode(const webrtc::VideoFrame& frame, - const webrtc::CodecSpecificInfo*, + const webrtc::CodecSpecificInfo* codecInfo, const std::vector* frameTypes) final { if (!m_imageReadyCb) { @@ -192,11 +210,19 @@ class GStreamerVideoEncoder : public webrtc::VideoEncoder { gst_pad_set_offset(pad.get(), -m_firstFramePts); } + webrtc::CodecSpecificInfo localCodecInfo; + FrameData frameData = { frame.timestamp(), frame.render_time_ms(), codecInfo ? *codecInfo : localCodecInfo }; + { + auto locker = holdLock(m_bufferMapLock); + m_framesData.append(frameData); + } + for (auto frame_type : *frameTypes) { if (frame_type == webrtc::kVideoFrameKey) { auto pad = adoptGRef(gst_element_get_static_pad(m_src, "src")); auto forceKeyUnit = gst_video_event_new_downstream_force_key_unit(GST_CLOCK_TIME_NONE, GST_CLOCK_TIME_NONE, GST_CLOCK_TIME_NONE, FALSE, 1); + GST_INFO_OBJECT(m_pipeline.get(), "Requesting KEYFRAME!"); if (!gst_pad_push_event(pad.get(), forceKeyUnit)) GST_WARNING_OBJECT(pipeline(), "Could not send ForceKeyUnit event"); @@ -221,124 +247,84 @@ class GStreamerVideoEncoder : public webrtc::VideoEncoder { auto buffer = gst_sample_get_buffer(sample.get()); auto caps = gst_sample_get_caps(sample.get()); - webrtc::RTPFragmentationHeader* fragmentationInfo; - auto frame = Fragmentize(buffer, &fragmentationInfo); - if (!frame._size) - return GST_FLOW_OK; - - gst_structure_get(gst_caps_get_structure(caps, 0), - "width", G_TYPE_INT, &frame._encodedWidth, - "height", G_TYPE_INT, &frame._encodedHeight, - nullptr); + webrtc::CodecSpecificInfo localCodecInfo; + FrameData frameData = { 0, 0, localCodecInfo}; + { + auto locker = holdLock(m_bufferMapLock); + if (!m_framesData.size()) { + gst_adapter_push(m_adapter.get(), gst_buffer_ref(buffer)); - frame._frameType = GST_BUFFER_FLAG_IS_SET(buffer, GST_BUFFER_FLAG_DELTA_UNIT) ? webrtc::kVideoFrameDelta : webrtc::kVideoFrameKey; - frame._completeFrame = true; - frame.capture_time_ms_ = GST_TIME_AS_MSECONDS(GST_BUFFER_PTS(buffer)); - frame._timeStamp = GST_TIME_AS_MSECONDS(GST_BUFFER_DTS(buffer)); - GST_LOG_OBJECT(m_pipeline.get(), "Got buffer TS: %" GST_TIME_FORMAT, GST_TIME_ARGS(GST_BUFFER_PTS(buffer))); + return GST_FLOW_OK; + } - webrtc::CodecSpecificInfo codecSpecifiInfos; - PopulateCodecSpecific(&codecSpecifiInfos, buffer); + if (gst_adapter_available(m_adapter.get()) > 0) { + uint flags = GST_BUFFER_FLAGS(buffer); - webrtc::EncodedImageCallback::Result result = m_imageReadyCb->OnEncodedImage(frame, &codecSpecifiInfos, fragmentationInfo); - m_pictureId = (m_pictureId + 1) & 0x7FFF; - if (result.error != webrtc::EncodedImageCallback::Result::OK) { - GST_ELEMENT_ERROR(m_pipeline.get(), LIBRARY, FAILED, (nullptr), - ("Encode callback failed: %d", result.error)); + GST_INFO_OBJECT(m_pipeline.get(), "Got more buffer than pushed frame, trying to deal with it."); + gst_adapter_push(m_adapter.get(), gst_buffer_ref(buffer)); - return GST_FLOW_ERROR; + buffer = gst_adapter_take_buffer(m_adapter.get(), gst_adapter_available(m_adapter.get())); + GST_BUFFER_FLAGS(buffer) = flags; + } + frameData = m_framesData[0]; + m_framesData.remove(static_cast(0)); } - return GST_FLOW_OK; - } + webrtc::RTPFragmentationHeader fragmentationInfo; + Fragmentize(&m_encodedFrame, &m_encodedImageBuffer, &m_encodedImageBufferSize, buffer, &fragmentationInfo); + if (!m_encodedFrame._size) + return GST_FLOW_OK; -#define RETURN_BITRATE_SETTER_IF_MATCHES(regex, propertyName, bitrateMultiplier, unit) \ - if (g_regex_match(regex.get(), factoryName, static_cast(0), nullptr)) { \ - GST_INFO_OBJECT(encoderFactory, "Detected as having a " #propertyName " property in " unit); \ - return [](GstElement* encoder, uint32_t bitrate) \ - { \ - g_object_set(encoder, propertyName, bitrate * bitrateMultiplier, nullptr); \ - }; \ - } + gst_structure_get(gst_caps_get_structure(caps, 0), + "width", G_TYPE_INT, &m_encodedFrame._encodedWidth, + "height", G_TYPE_INT, &m_encodedFrame._encodedHeight, + nullptr); - // GStreamer doesn't have a unified encoder API and the encoders have their - // own semantics and naming to set the bitrate, this is a best effort to handle - // setting bitrate for the well known encoders. - // See https://bugzilla.gnome.org/show_bug.cgi?id=796716 - BitrateSetter getBitrateSetter(GstElementFactory* encoderFactory) - { - static std::once_flag regexRegisteredFlag; + m_encodedFrame._frameType = GST_BUFFER_FLAG_IS_SET(buffer, GST_BUFFER_FLAG_DELTA_UNIT) ? webrtc::kVideoFrameDelta : webrtc::kVideoFrameKey; + m_encodedFrame._completeFrame = false; + m_encodedFrame.capture_time_ms_ = frameData.captureTimeMs; + m_encodedFrame._timeStamp = frameData.rtpTimestamp; - std::call_once(regexRegisteredFlag, [] { - targetBitrateBitPerSec = g_regex_new("^vp.enc$|^omx.*enc$", static_cast(0), static_cast(0), nullptr); - bitrateBitPerSec = g_regex_new("^openh264enc$", static_cast(0), static_cast(0), nullptr); - bitrateKBitPerSec = g_regex_new("^x264enc$|vaapi.*enc$", static_cast(0), static_cast(0), nullptr); - ASSERT(targetBitrateBitPerSec.get() && bitrateBitPerSec.get() && bitrateKBitPerSec.get()); - }); + GST_LOG_OBJECT(m_pipeline.get(), "Got buffer capture_time_ms: %ld _timestamp: %ld", + m_encodedFrame.capture_time_ms_, m_encodedFrame._timeStamp); - auto factoryName = GST_OBJECT_NAME(encoderFactory); - RETURN_BITRATE_SETTER_IF_MATCHES(targetBitrateBitPerSec, "target-bitrate", KBIT_TO_BIT, "Bits Per Second") - RETURN_BITRATE_SETTER_IF_MATCHES(bitrateBitPerSec, "bitrate", KBIT_TO_BIT, "Bits Per Second") - RETURN_BITRATE_SETTER_IF_MATCHES(bitrateKBitPerSec, "bitrate", 1, "KBits Per Second") + PopulateCodecSpecific(&frameData.codecInfo, buffer); - GST_WARNING_OBJECT(encoderFactory, "unkonwn encoder, can't set bitrates on it"); - return nullptr; + webrtc::EncodedImageCallback::Result result = m_imageReadyCb->OnEncodedImage(m_encodedFrame, &frameData.codecInfo, &fragmentationInfo); + if (result.error != webrtc::EncodedImageCallback::Result::OK) + GST_ERROR_OBJECT(m_pipeline.get(), "Encode callback failed: %d", result.error); + + return GST_FLOW_OK; } -#undef RETURN_BITRATE_SETTER_IF_MATCHES - GRefPtr createEncoder(GstElement** encoder) + GRefPtr createEncoder(void) { - GstElement* enc = nullptr; - - m_profile = GST_ENCODING_PROFILE(gst_encoding_video_profile_new( - adoptGRef(gst_caps_from_string(Caps())).get(), - ProfileName(), - gst_caps_ref(m_restrictionCaps.get()), - 1)); - GRefPtr encodebin = makeElement("encodebin"); - - if (!encodebin.get()) { - GST_ERROR("No encodebin present... can't use GStreamer based encoders"); - return nullptr; - } - g_object_set(encodebin.get(), "profile", m_profile.get(), nullptr); + GRefPtr encoder = nullptr; + GstElement* webrtcencoder = GST_ELEMENT(g_object_ref_sink(gst_element_factory_make("webrtcvideoencoder", NULL))); - for (GList* tmp = GST_BIN_CHILDREN(encodebin.get()); tmp; tmp = tmp->next) { - GstElement* elem = GST_ELEMENT(tmp->data); - GstElementFactory* factory = gst_element_get_factory((elem)); + g_object_set(webrtcencoder, "format", adoptGRef(gst_caps_from_string(Caps())).get(), NULL); + g_object_get(webrtcencoder, "encoder", &encoder.outPtr(), NULL); - if (!factory || !gst_element_factory_list_is_type(factory, GST_ELEMENT_FACTORY_TYPE_VIDEO_ENCODER)) - continue; + if (!encoder) { + GST_INFO("No encoder found for %s", Caps()); - enc = elem; - break; - } - - if (!enc) return nullptr; + } - if (encoder) - *encoder = enc; - - return encodebin; + return webrtcencoder; } void AddCodecIfSupported(std::vector* supportedFormats) { GstElement* encoder; - if (createEncoder(&encoder).get() != nullptr) { + if (createEncoder().get() != nullptr) { webrtc::SdpVideoFormat format = ConfigureSupportedCodec(encoder); supportedFormats->push_back(format); } } - virtual const gchar* ProfileName() - { - return nullptr; - } - virtual const gchar* Caps() { return nullptr; @@ -352,41 +338,46 @@ class GStreamerVideoEncoder : public webrtc::VideoEncoder { virtual void PopulateCodecSpecific(webrtc::CodecSpecificInfo*, GstBuffer*) = 0; - virtual webrtc::EncodedImage Fragmentize(GstBuffer* buffer, webrtc::RTPFragmentationHeader** outFragmentationInfo) + virtual void Fragmentize(webrtc::EncodedImage* encodedImage, std::unique_ptr* encodedImageBuffer, + size_t* bufferSize, GstBuffer* buffer, webrtc::RTPFragmentationHeader* fragmentationInfo) { - GstMapInfo map; + GstMappedBuffer map(buffer, GST_MAP_READ); - gst_buffer_map(buffer, &map, GST_MAP_READ); - webrtc::EncodedImage frame(map.data, map.size, map.size); - gst_buffer_unmap(buffer, &map); + if (*bufferSize < map.size()) { + encodedImage->_size = map.size(); + encodedImage->_buffer = new uint8_t[encodedImage->_size]; + encodedImageBuffer->reset(encodedImage->_buffer); + *bufferSize = map.size(); + } - // No fragmentation by default. - webrtc::RTPFragmentationHeader* fragmentationInfo = new webrtc::RTPFragmentationHeader(); + memcpy(encodedImage->_buffer, map.data(), map.size()); + encodedImage->_length = map.size(); + encodedImage->_size = map.size(); fragmentationInfo->VerifyAndAllocateFragmentationHeader(1); fragmentationInfo->fragmentationOffset[0] = 0; - fragmentationInfo->fragmentationLength[0] = gst_buffer_get_size(buffer); + fragmentationInfo->fragmentationLength[0] = map.size(); fragmentationInfo->fragmentationPlType[0] = 0; fragmentationInfo->fragmentationTimeDiff[0] = 0; - - *outFragmentationInfo = fragmentationInfo; - - return frame; } const char* ImplementationName() const { + GRefPtr encoderImplementation; g_return_val_if_fail(m_encoder, nullptr); - return GST_OBJECT_NAME(gst_element_get_factory(m_encoder)); + g_object_get(m_encoder, "encoder", &encoderImplementation.outPtr(), nullptr); + + return GST_OBJECT_NAME(gst_element_get_factory(encoderImplementation.get())); } virtual const gchar* Name() = 0; + virtual int KeyframeInterval(const webrtc::VideoCodec* codecSettings) = 0; - void SetRestrictionCaps(GstCaps* caps) + void SetRestrictionCaps(GRefPtr caps) { - if (caps && m_profile.get() && gst_caps_is_equal(m_restrictionCaps.get(), caps)) - g_object_set(m_profile.get(), "restriction-caps", caps, nullptr); + if (m_restrictionCaps) + g_object_set(m_capsFilter, "caps", m_restrictionCaps.get(), nullptr); m_restrictionCaps = caps; } @@ -403,19 +394,25 @@ class GStreamerVideoEncoder : public webrtc::VideoEncoder { GRefPtr m_pipeline; GstElement* m_src; GstElement* m_encoder; + GstElement* m_capsFilter; webrtc::EncodedImageCallback* m_imageReadyCb; GstClockTime m_firstFramePts; GRefPtr m_restrictionCaps; - GRefPtr m_profile; - BitrateSetter m_bitrateSetter; + webrtc::EncodedImage m_encodedFrame; + std::unique_ptr m_encodedImageBuffer; + size_t m_encodedImageBufferSize; + + Lock m_bufferMapLock; + GRefPtr m_adapter; + Vector m_framesData; }; -class H264Encoder : public GStreamerVideoEncoder { +class GStreamerH264Encoder : public GStreamerVideoEncoder { public: - H264Encoder() { } + GStreamerH264Encoder() { } - H264Encoder(const webrtc::SdpVideoFormat& format) + GStreamerH264Encoder(const webrtc::SdpVideoFormat& format) : m_parser(gst_h264_nal_parser_new()) , packetizationMode(webrtc::H264PacketizationMode::NonInterleaved) { @@ -425,10 +422,15 @@ class H264Encoder : public GStreamerVideoEncoder { packetizationMode = webrtc::H264PacketizationMode::NonInterleaved; } + int KeyframeInterval(const webrtc::VideoCodec* codecSettings) final + { + return codecSettings->H264().keyFrameInterval; + } + // FIXME - MT. safety! - webrtc::EncodedImage Fragmentize(GstBuffer* gstbuffer, webrtc::RTPFragmentationHeader** outFragmentationInfo) final + void Fragmentize(webrtc::EncodedImage* encodedImage, std::unique_ptr* encodedImageBuffer, size_t *bufferSize, + GstBuffer* gstbuffer, webrtc::RTPFragmentationHeader* fragmentationHeader) final { - GstMapInfo map; GstH264NalUnit nalu; auto parserResult = GST_H264_PARSER_OK; @@ -436,12 +438,11 @@ class H264Encoder : public GStreamerVideoEncoder { size_t requiredSize = 0; std::vector nals; - webrtc::EncodedImage encodedImage; const uint8_t startCode[4] = { 0, 0, 0, 1 }; - gst_buffer_map(gstbuffer, &map, GST_MAP_READ); + GstMappedBuffer map(gstbuffer, GST_MAP_READ); while (parserResult == GST_H264_PARSER_OK) { - parserResult = gst_h264_parser_identify_nalu(m_parser, map.data, offset, map.size, &nalu); + parserResult = gst_h264_parser_identify_nalu(m_parser, map.data(), offset, map.size(), &nalu); nalu.sc_offset = offset; nalu.offset = offset + sizeof(startCode); @@ -453,43 +454,31 @@ class H264Encoder : public GStreamerVideoEncoder { offset = nalu.offset + nalu.size; } - encodedImage._size = requiredSize; - encodedImage._buffer = new uint8_t[encodedImage._size]; + if (encodedImage->_size < requiredSize) { + encodedImage->_size = requiredSize; + encodedImage->_buffer = new uint8_t[encodedImage->_size]; + encodedImageBuffer->reset(encodedImage->_buffer); + *bufferSize = map.size(); + } + // Iterate nal units and fill the Fragmentation info. - webrtc::RTPFragmentationHeader* fragmentationHeader = new webrtc::RTPFragmentationHeader(); fragmentationHeader->VerifyAndAllocateFragmentationHeader(nals.size()); size_t fragmentIndex = 0; - encodedImage._length = 0; + encodedImage->_length = 0; for (std::vector::iterator nal = nals.begin(); nal != nals.end(); ++nal, fragmentIndex++) { - ASSERT(map.data[nal->sc_offset + 0] == startCode[0]); - ASSERT(map.data[nal->sc_offset + 1] == startCode[1]); - ASSERT(map.data[nal->sc_offset + 2] == startCode[2]); - ASSERT(map.data[nal->sc_offset + 3] == startCode[3]); + ASSERT(map.data()[nal->sc_offset + 0] == startCode[0]); + ASSERT(map.data()[nal->sc_offset + 1] == startCode[1]); + ASSERT(map.data()[nal->sc_offset + 2] == startCode[2]); + ASSERT(map.data()[nal->sc_offset + 3] == startCode[3]); fragmentationHeader->fragmentationOffset[fragmentIndex] = nal->offset; fragmentationHeader->fragmentationLength[fragmentIndex] = nal->size; - memcpy(encodedImage._buffer + encodedImage._length, &map.data[nal->sc_offset], + memcpy(encodedImage->_buffer + encodedImage->_length, &map.data()[nal->sc_offset], sizeof(startCode) + nal->size); - encodedImage._length += nal->size + sizeof(startCode); + encodedImage->_length += nal->size + sizeof(startCode); } - - *outFragmentationInfo = fragmentationHeader; - gst_buffer_unmap(gstbuffer, &map); - return encodedImage; - } - - GstElement* CreateFilter() final - { - GstElement* filter = makeElement("capsfilter"); - auto caps = adoptGRef(gst_caps_new_simple(Caps(), - "alignment", G_TYPE_STRING, "au", - "stream-format", G_TYPE_STRING, "byte-stream", - nullptr)); - g_object_set(filter, "caps", caps.get(), nullptr); - - return filter; } webrtc::SdpVideoFormat ConfigureSupportedCodec(GstElement*) final @@ -517,14 +506,18 @@ class H264Encoder : public GStreamerVideoEncoder { webrtc::H264PacketizationMode packetizationMode; }; -class VP8Encoder : public GStreamerVideoEncoder { +class GStreamerVP8Encoder : public GStreamerVideoEncoder { public: - VP8Encoder() { } - VP8Encoder(const webrtc::SdpVideoFormat&) { } + GStreamerVP8Encoder() { } + GStreamerVP8Encoder(const webrtc::SdpVideoFormat&) { } const gchar* Caps() final { return "video/x-vp8"; } const gchar* Name() final { return cricket::kVp8CodecName; } webrtc::VideoCodecType CodecType() final { return webrtc::kVideoCodecVP8; } - virtual const gchar* ProfileName() { return "Profile Realtime"; } + + int KeyframeInterval(const webrtc::VideoCodec* codecSettings) final + { + return codecSettings->VP8().keyFrameInterval; + } void PopulateCodecSpecific(webrtc::CodecSpecificInfo* codecSpecifiInfos, GstBuffer* buffer) final { @@ -543,11 +536,22 @@ class VP8Encoder : public GStreamerVideoEncoder { std::unique_ptr GStreamerVideoEncoderFactory::CreateVideoEncoder(const webrtc::SdpVideoFormat& format) { - if (format.name == cricket::kVp8CodecName) - return std::make_unique(format); + if (format.name == cricket::kVp8CodecName) { + GRefPtr webrtcencoder = adoptGRef(GST_ELEMENT(g_object_ref_sink(gst_element_factory_make("webrtcvideoencoder", NULL)))); + GRefPtr encoder = nullptr; + + g_object_set(webrtcencoder.get(), "format", adoptGRef(gst_caps_from_string("video/x-vp8")).get(), NULL); + g_object_get(webrtcencoder.get(), "encoder", &encoder.outPtr(), NULL); + + if (encoder) + return std::make_unique(format); + + GST_INFO("Using VP8 Encoder from LibWebRTC."); + return webrtc::VP8Encoder::Create(); + } if (format.name == cricket::kH264CodecName) - return std::make_unique(format); + return std::make_unique(format); return nullptr; } @@ -558,6 +562,7 @@ GStreamerVideoEncoderFactory::GStreamerVideoEncoderFactory() std::call_once(debugRegisteredFlag, [] { GST_DEBUG_CATEGORY_INIT(webkit_webrtcenc_debug, "webkitlibwebrtcvideoencoder", 0, "WebKit WebRTC video encoder"); + gst_element_register(nullptr, "webrtcvideoencoder", GST_RANK_PRIMARY, GST_TYPE_WEBRTC_VIDEO_ENCODER); }); } @@ -565,8 +570,8 @@ std::vector GStreamerVideoEncoderFactory::GetSupportedFo { std::vector supportedCodecs; - VP8Encoder().AddCodecIfSupported(&supportedCodecs); - H264Encoder().AddCodecIfSupported(&supportedCodecs); + supportedCodecs.push_back(webrtc::SdpVideoFormat(cricket::kVp8CodecName)); + GStreamerH264Encoder().AddCodecIfSupported(&supportedCodecs); return supportedCodecs; } diff --git a/Source/WebKit/ChangeLog b/Source/WebKit/ChangeLog index f37ba739169cf..e4d5167759a1f 100644 --- a/Source/WebKit/ChangeLog +++ b/Source/WebKit/ChangeLog @@ -1,3 +1,187 @@ +2018-11-16 Thibault Saunier + + [GStreamer][WebRTC] Add API to enable/disable device mocks + https://bugs.webkit.org/show_bug.cgi?id=191699 + + This basically us to test MediaStream/WebRTC support without + requiring cameras or microphones and is quite useful. + + Also fix the GStreamerAudioMock by: + - Stop setting `leaky-upstream` on the GStreamerCapturer queue, + this was usefull when we were trying to bring the MediaStream + sources inside the main pipeline, it is not the case anymore + (and not doable with latest version of LibWebRTC). + - Use a 'ticks' wave on the gstreamer audiotestsrc so the test + stream is similar to what Apple port does. + + Reviewed by Xabier Rodriguez-Calvar. + + The mocks are already tested and the API is really simple. + + * UIProcess/API/glib/WebKitSettings.cpp: + (webKitSettingsSetProperty): + (webKitSettingsGetProperty): + (webkit_settings_class_init): + (webkit_settings_get_enable_mock_capture_devices): + (webkit_settings_set_enable_mock_capture_devices): + * UIProcess/API/gtk/WebKitSettings.h: + * UIProcess/API/gtk/docs/webkit2gtk-4.0-sections.txt: + +2018-11-16 Thibault Saunier + + [GTK][WPE] Add "WebKitDeviceInfoPermissionRequest.h" into webkit2.h + https://bugs.webkit.org/show_bug.cgi?id=191744 + + It should always have been there. + + Reviewed by Carlos Garcia Campos. + + * UIProcess/API/gtk/webkit2.h: + +2018-10-11 Alejandro G. Castro + + [GTK][WPE] Add mediaDevices.enumerateDevices support + https://bugs.webkit.org/show_bug.cgi?id=185761 + + Reviewed by Youenn Fablet. + + Implemented the enumerateDevices API using a new WebKit class + (DeviceIdHashSaltStorage) that generates and stores the device ids + hash salts per origin, used to avoid fingerprinting in the + webpages. + + The patch also adds a new type of data for the WebsiteDataStore. + That way the users can call the WebsiteDataStore to show what + origins have hash salt generated and remove them at some point. + + For the moment just GTK+ and WPE ports are using this class to + generate the hash salts. The patch adds code to the + checkUserMediaPermissionForOrigin API implementation, it was empty + until now for these ports. In this function we create an instance + of a new class WebKitDeviceInfoPermissionRequest that implements + the WebKitPermissionRequestIface interface, that allows the ports + to use the current permission managers implemented in the + embedders to handle this new kind of request the way they like + it. The default implementation is deny. + + The class WebKitDeviceInfoPermissionRequest takes care of + contacting the DeviceIdHashSaltStorage and request/regenerate the + hash salts accordingly. + + Persistency is still not implemented, we will add it in a + future patch. + + * Shared/WebsiteData/WebsiteDataType.h: Added the new type of + webside data: DeviceIdHashSalt. + * Sources.txt: Added new files to compilation. + * SourcesGTK.txt: Ditto. + * SourcesWPE.txt: Ditto. + * UIProcess/API/glib/WebKitDeviceInfoPermissionRequest.cpp: Added + this class to represent a request of permission to access the + devices information. This is specific for glib ports and implements + the policies regarding the hash salts when the embedder allows or + denies the access using the DeviceIdHashSaltStorage class. + (webkitDeviceInfoPermissionRequestAllow): Get the device hash salt + when the origin is allowed and set it in the original request. + (webkitDeviceInfoPermissionRequestDeny): Regenerate the device id + hash salt when the user does not allow to access the device information. + (webkit_permission_request_interface_init): + (webkitDeviceInfoPermissionRequestDispose): + (webkit_device_info_permission_request_class_init): + (webkitDeviceInfoPermissionRequestCreate): Create the class using + the proxy request from the webprocess and a reference to the + DeviceIdHashSaltStorage. + * UIProcess/API/glib/WebKitDeviceInfoPermissionRequestPrivate.h: + Ditto. + * UIProcess/API/glib/WebKitUIClient.cpp: Added the implementation + for the checkUserMediaPermissionForOrigin API for the glib ports, + it creates the device info request and calls the permission + request API. + * UIProcess/API/glib/WebKitWebsiteData.cpp: + (recordContainsSupportedDataTypes): Added the DeviceIdHashSalt + type. + (toWebKitWebsiteDataTypes): Added the conversion from the WebKit + types to the glib types for the WebsiteDataType::DeviceIdHashSalt. + * UIProcess/API/glib/WebKitWebsiteDataManager.cpp: + (toWebsiteDataTypes): Added the conversion from the glib type + WEBKIT_WEBSITE_DATA_DEVICE_ID_HASH_SALT to the WebKit type. + (webkit_website_data_manager_remove): Make sure we remote the + DeviceIdHashSalt if the Cookies are selected. + * UIProcess/API/gtk/WebKitDeviceInfoPermissionRequest.h: Added + this new class that represents a permission request for the device + information in the GTK+ port. + * UIProcess/API/gtk/WebKitWebsiteData.h: Added the new type of + website that in the GTK+ port: + WEBKIT_WEBSITE_DATA_DEVICE_ID_HASH_SALT. + * UIProcess/API/wpe/WebKitDeviceInfoPermissionRequest.h: Added + this new class that represents a permission request for the device + information in the WPE port. + * UIProcess/API/wpe/WebKitWebsiteData.h: Added the new type of + website that in the WPE port: + WEBKIT_WEBSITE_DATA_DEVICE_ID_HASH_SALT. + * UIProcess/DeviceIdHashSaltStorage.cpp: Added this new class that + handles how to generate and store the hash salts inside + WebKit. Persistency is still not implemented, we will add it in a + future patch. + (WebKit::DeviceIdHashSaltStorage::create): + (WebKit::DeviceIdHashSaltStorage::deviceIdentifierHashSaltForOrigin): + Check the map to get the hash salt for an origin, if there is none + create a new random one. + (WebKit::DeviceIdHashSaltStorage::regenerateDeviceIdentifierHashSaltForOrigin): + Delete the hash salt of an origin if it already exists and create a new one. + (WebKit::DeviceIdHashSaltStorage::getDeviceIdHashSaltOrigins): + Returns the list of origins that have a hash salt generated for + them. It is used in the WebsiteDataStore. + (WebKit::DeviceIdHashSaltStorage::deleteDeviceIdHashSaltForOrigins): + Deletes the hash salts in the map for an origin. + (WebKit::DeviceIdHashSaltStorage::deleteDeviceIdHashSaltOriginsModifiedSince): + Delete the origins that have not been modified since a time. + * UIProcess/DeviceIdHashSaltStorage.h: Ditto. + * UIProcess/UserMediaPermissionCheckProxy.h: Remove uneeded class + definition. + * UIProcess/WebsiteData/WebsiteDataStore.cpp: Added support to + return and remove the origins with a generate hash salt used to + generate the device ids. + (WebKit::WebsiteDataStore::WebsiteDataStore): Added the new + reference to the DeviceIdHashSaltStorage class, used to handle the + hash salts. + (WebKit::WebsiteDataStore::fetchDataAndApply): Get the list of + origins with a hash salts in the DeviceIdHashSaltStorage. + (WebKit::WebsiteDataStore::removeData): Remove the hash salts in + DeviceIdHashSaltStorage for an origin. + * UIProcess/WebsiteData/WebsiteDataStore.h: Ditto. + (WebKit::WebsiteDataStore::deviceIdHashSaltStorage): + * WebKit.xcodeproj/project.pbxproj: Added the + DeviceIdHashSaltStorage to the compilation. + +2018-09-20 Thibault Saunier + + [GTK][WPE] Make sure MediaDeviceEnabled and PeerConnectionEnabled are always synced with enable-media-stream + https://bugs.webkit.org/show_bug.cgi?id=188704 + + By default m_isMediaDevicesEnabled and m_isScreenCaptureEnabled are false but + m_isMediaStreamEnabled is true meaning that in the WPE minibrowser getUserMedia + was disabled even if we explicitly set `"enable-media-stream", TRUE`. + + Reviewed by Michael Catanzaro. + + * UIProcess/API/glib/WebKitSettings.cpp: + (webKitSettingsConstructed): + +2018-09-20 Thibault Saunier + + [GTK][WPE] Make sure MediaDeviceEnabled and PeerConnectionEnabled are always synced with enable-media-stream + https://bugs.webkit.org/show_bug.cgi?id=188704 + + By default m_isMediaDevicesEnabled and m_isScreenCaptureEnabled are false but + m_isMediaStreamEnabled is true meaning that in the WPE minibrowser getUserMedia + was disabled even if we explicitly set `"enable-media-stream", TRUE`. + + Reviewed by Michael Catanzaro. + + * UIProcess/API/glib/WebKitSettings.cpp: + (webKitSettingsConstructed): + 2018-10-02 Adrian Perez de Castro Unreviewed. Update OptionsWPE.cmake and NEWS for the 2.22.0 release. diff --git a/Source/WebKit/Shared/WebsiteData/WebsiteDataType.h b/Source/WebKit/Shared/WebsiteData/WebsiteDataType.h index 9d50361e81a37..601bb44367bf1 100644 --- a/Source/WebKit/Shared/WebsiteData/WebsiteDataType.h +++ b/Source/WebKit/Shared/WebsiteData/WebsiteDataType.h @@ -49,6 +49,7 @@ enum class WebsiteDataType { ServiceWorkerRegistrations = 1 << 14, #endif DOMCache = 1 << 15, + DeviceIdHashSalt = 1 << 16, }; }; diff --git a/Source/WebKit/Sources.txt b/Source/WebKit/Sources.txt index d081eea403598..0c9c358455738 100644 --- a/Source/WebKit/Sources.txt +++ b/Source/WebKit/Sources.txt @@ -233,6 +233,7 @@ UIProcess/FrameLoadState.cpp UIProcess/GeolocationPermissionRequestManagerProxy.cpp UIProcess/GeolocationPermissionRequestProxy.cpp UIProcess/PageLoadState.cpp +UIProcess/DeviceIdHashSaltStorage.cpp UIProcess/ProcessAssertion.cpp UIProcess/ProcessThrottler.cpp UIProcess/RemoteWebInspectorProxy.cpp diff --git a/Source/WebKit/SourcesGTK.txt b/Source/WebKit/SourcesGTK.txt index 442996d91bf2e..ef470f4d6168d 100644 --- a/Source/WebKit/SourcesGTK.txt +++ b/Source/WebKit/SourcesGTK.txt @@ -135,6 +135,7 @@ UIProcess/API/glib/WebKitContextMenuClient.cpp @no-unify UIProcess/API/glib/WebKitCookieManager.cpp @no-unify UIProcess/API/glib/WebKitCredential.cpp @no-unify UIProcess/API/glib/WebKitCustomProtocolManagerClient.cpp @no-unify +UIProcess/API/glib/WebKitDeviceInfoPermissionRequest.cpp @no-unify UIProcess/API/glib/WebKitDownload.cpp @no-unify UIProcess/API/glib/WebKitDownloadClient.cpp @no-unify UIProcess/API/glib/WebKitEditorState.cpp @no-unify diff --git a/Source/WebKit/SourcesWPE.txt b/Source/WebKit/SourcesWPE.txt index 14a34d9998332..b629993630643 100644 --- a/Source/WebKit/SourcesWPE.txt +++ b/Source/WebKit/SourcesWPE.txt @@ -125,6 +125,7 @@ UIProcess/API/glib/WebKitContextMenuClient.cpp @no-unify UIProcess/API/glib/WebKitCookieManager.cpp @no-unify UIProcess/API/glib/WebKitCredential.cpp @no-unify UIProcess/API/glib/WebKitCustomProtocolManagerClient.cpp @no-unify +UIProcess/API/glib/WebKitDeviceInfoPermissionRequest.cpp @no-unify UIProcess/API/glib/WebKitDownload.cpp @no-unify UIProcess/API/glib/WebKitDownloadClient.cpp @no-unify UIProcess/API/glib/WebKitEditorState.cpp @no-unify diff --git a/Source/WebKit/UIProcess/API/glib/WebKitDeviceInfoPermissionRequest.cpp b/Source/WebKit/UIProcess/API/glib/WebKitDeviceInfoPermissionRequest.cpp new file mode 100644 index 0000000000000..3f9485ef2d09b --- /dev/null +++ b/Source/WebKit/UIProcess/API/glib/WebKitDeviceInfoPermissionRequest.cpp @@ -0,0 +1,126 @@ +/* + * Copyright (C) 2018 Igalia S.L + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with this library; if not, write to the Free Software + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + */ + +#include "config.h" +#include "WebKitDeviceInfoPermissionRequest.h" + +#include "DeviceIdHashSaltStorage.h" +#include "UserMediaPermissionCheckProxy.h" +#include "WebKitDeviceInfoPermissionRequestPrivate.h" +#include "WebKitPermissionRequest.h" +#include "WebsiteDataStore.h" +#include +#include + +using namespace WebKit; + +/** + * SECTION: WebKitDeviceInfoPermissionRequest + * @Short_description: A permission request for accessing user's audio/video devices. + * @Title: WebKitDeviceInfoPermissionRequest + * @See_also: #WebKitPermissionRequest, #WebKitWebView + * + * WebKitUserMediaPermissionRequest represents a request for + * permission to whether WebKit should be allowed to access the user's + * devices information when requested through the enumeraceDevices API. + * + * When a WebKitDeviceInfoPermissionRequest is not handled by the user, + * it is denied by default. + * + * Since: 2.22 + */ + +static void webkit_permission_request_interface_init(WebKitPermissionRequestIface*); + +struct _WebKitDeviceInfoPermissionRequestPrivate { + RefPtr request; + RefPtr deviceIdHashSaltStorage; + bool madeDecision; +}; + +WEBKIT_DEFINE_TYPE_WITH_CODE( + WebKitDeviceInfoPermissionRequest, webkit_device_info_permission_request, G_TYPE_OBJECT, + G_IMPLEMENT_INTERFACE(WEBKIT_TYPE_PERMISSION_REQUEST, webkit_permission_request_interface_init)) + +static void webkitDeviceInfoPermissionRequestAllow(WebKitPermissionRequest* request) +{ + ASSERT(WEBKIT_IS_DEVICE_INFO_PERMISSION_REQUEST(request)); + + auto& priv = WEBKIT_DEVICE_INFO_PERMISSION_REQUEST(request)->priv; + + if (!priv->deviceIdHashSaltStorage) { + priv->request->setUserMediaAccessInfo(String(), false); + return; + } + + // Only one decision at a time. + if (priv->madeDecision) + return; + + priv->madeDecision = true; + auto salt = priv->deviceIdHashSaltStorage->deviceIdHashSaltForOrigin(priv->request->topLevelDocumentSecurityOrigin()); + priv->request->setUserMediaAccessInfo(WTFMove(salt), true); +} + +static void webkitDeviceInfoPermissionRequestDeny(WebKitPermissionRequest* request) +{ + ASSERT(WEBKIT_IS_DEVICE_INFO_PERMISSION_REQUEST(request)); + + auto& priv = WEBKIT_DEVICE_INFO_PERMISSION_REQUEST(request)->priv; + + if (!priv->deviceIdHashSaltStorage) { + priv->request->setUserMediaAccessInfo(String(), false); + return; + } + + // Only one decision at a time. + if (priv->madeDecision) + return; + + priv->madeDecision = true; + auto salt = priv->deviceIdHashSaltStorage->regenerateDeviceIdHashSaltForOrigin(*priv->request); + priv->request->setUserMediaAccessInfo(WTFMove(salt), false); +} + +static void webkit_permission_request_interface_init(WebKitPermissionRequestIface* iface) +{ + iface->allow = webkitDeviceInfoPermissionRequestAllow; + iface->deny = webkitDeviceInfoPermissionRequestDeny; +} + +static void webkitDeviceInfoPermissionRequestDispose(GObject* object) +{ + // Default behaviour when no decision has been made is denying the request. + webkitDeviceInfoPermissionRequestDeny(WEBKIT_PERMISSION_REQUEST(object)); + G_OBJECT_CLASS(webkit_device_info_permission_request_parent_class)->dispose(object); +} + +static void webkit_device_info_permission_request_class_init(WebKitDeviceInfoPermissionRequestClass* klass) +{ + GObjectClass* objectClass = G_OBJECT_CLASS(klass); + objectClass->dispose = webkitDeviceInfoPermissionRequestDispose; +} + +WebKitDeviceInfoPermissionRequest* webkitDeviceInfoPermissionRequestCreate(UserMediaPermissionCheckProxy& request, DeviceIdHashSaltStorage* deviceIdHashSaltStorage) +{ + auto* deviceInfoPermissionRequest = WEBKIT_DEVICE_INFO_PERMISSION_REQUEST(g_object_new(WEBKIT_TYPE_DEVICE_INFO_PERMISSION_REQUEST, nullptr)); + + deviceInfoPermissionRequest->priv->request = &request; + deviceInfoPermissionRequest->priv->deviceIdHashSaltStorage = deviceIdHashSaltStorage; + return deviceInfoPermissionRequest; +} diff --git a/Source/WebKit/UIProcess/API/glib/WebKitDeviceInfoPermissionRequestPrivate.h b/Source/WebKit/UIProcess/API/glib/WebKitDeviceInfoPermissionRequestPrivate.h new file mode 100644 index 0000000000000..c060b2e0ccd6b --- /dev/null +++ b/Source/WebKit/UIProcess/API/glib/WebKitDeviceInfoPermissionRequestPrivate.h @@ -0,0 +1,29 @@ +/* + * Copyright (C) 2018 Igalia S.L + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with this library; if not, write to the Free Software + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + */ + +#pragma once + +#include "APISecurityOrigin.h" + +typedef struct _WebKitDeviceInfoPermissionRequest WebKitDeviceInfoPermissionRequest; + +namespace WebKit { +class DeviceIdHashSaltStorage; +}; + +WebKitDeviceInfoPermissionRequest* webkitDeviceInfoPermissionRequestCreate(WebKit::UserMediaPermissionCheckProxy&, WebKit::DeviceIdHashSaltStorage*); diff --git a/Source/WebKit/UIProcess/API/glib/WebKitSettings.cpp b/Source/WebKit/UIProcess/API/glib/WebKitSettings.cpp index 1af8554af8585..eb303901e0319 100644 --- a/Source/WebKit/UIProcess/API/glib/WebKitSettings.cpp +++ b/Source/WebKit/UIProcess/API/glib/WebKitSettings.cpp @@ -152,6 +152,7 @@ enum { PROP_ENABLE_ACCELERATED_2D_CANVAS, PROP_ENABLE_WRITE_CONSOLE_MESSAGES_TO_STDOUT, PROP_ENABLE_MEDIA_STREAM, + PROP_ENABLE_MOCK_CAPTURE_DEVICES, PROP_ENABLE_SPATIAL_NAVIGATION, PROP_ENABLE_MEDIASOURCE, PROP_ENABLE_ENCRYPTED_MEDIA, @@ -180,6 +181,10 @@ static void webKitSettingsConstructed(GObject* object) WebPreferences* prefs = settings->priv->preferences.get(); prefs->setShouldRespectImageOrientation(true); + bool mediaStreamEnabled = prefs->mediaStreamEnabled(); + prefs->setMediaDevicesEnabled(mediaStreamEnabled); + prefs->setPeerConnectionEnabled(mediaStreamEnabled); + settings->priv->screenDpi = WebCore::screenDPI(); WebCore::setScreenDPIObserverHandler([settings]() { auto newScreenDpi = WebCore::screenDPI(); @@ -351,6 +356,9 @@ static void webKitSettingsSetProperty(GObject* object, guint propId, const GValu case PROP_ENABLE_MEDIA_STREAM: webkit_settings_set_enable_media_stream(settings, g_value_get_boolean(value)); break; + case PROP_ENABLE_MOCK_CAPTURE_DEVICES: + webkit_settings_set_enable_mock_capture_devices(settings, g_value_get_boolean(value)); + break; case PROP_ENABLE_SPATIAL_NAVIGATION: webkit_settings_set_enable_spatial_navigation(settings, g_value_get_boolean(value)); break; @@ -532,6 +540,9 @@ static void webKitSettingsGetProperty(GObject* object, guint propId, GValue* val case PROP_ENABLE_MEDIA_STREAM: g_value_set_boolean(value, webkit_settings_get_enable_media_stream(settings)); break; + case PROP_ENABLE_MOCK_CAPTURE_DEVICES: + g_value_set_boolean(value, webkit_settings_get_enable_mock_capture_devices(settings)); + break; case PROP_ENABLE_SPATIAL_NAVIGATION: g_value_set_boolean(value, webkit_settings_get_enable_spatial_navigation(settings)); break; @@ -1278,6 +1289,23 @@ static void webkit_settings_class_init(WebKitSettingsClass* klass) FALSE, readWriteConstructParamFlags)); + /** + * WebKitSettings:enable-mock-capture-devices: + * + * Enable or disable the Mock Capture Devices. Those are fake + * Microphone and Camera devices to be used as MediaStream + * sources. + * + * Since: 2.24 + */ + g_object_class_install_property(gObjectClass, + PROP_ENABLE_MOCK_CAPTURE_DEVICES, + g_param_spec_boolean("enable-mock-capture-devices", + _("Enable mock capture devices"), + _("Whether we expose mock capture devices or not"), + FALSE, + readWriteConstructParamFlags)); + /** * WebKitSettings:enable-spatial-navigation: * @@ -3156,6 +3184,45 @@ void webkit_settings_set_enable_media_stream(WebKitSettings* settings, gboolean g_object_notify(G_OBJECT(settings), "enable-media-stream"); } +/** + * webkit_settings_get_enable_mock_capture_devices: + * @settings: a #WebKitSettings + * + * Get the #WebKitSettings:enable-mock-capture-devices property. + * + * Returns: %TRUE If mock capture devices is enabled or %FALSE otherwise. + * + * Since: 2.24 + */ +gboolean webkit_settings_get_enable_mock_capture_devices(WebKitSettings* settings) +{ + g_return_val_if_fail(WEBKIT_IS_SETTINGS(settings), FALSE); + + return settings->priv->preferences->mockCaptureDevicesEnabled(); +} + +/** + * webkit_settings_set_enable_mock_capture_devices: + * @settings: a #WebKitSettings + * @enabled: Value to be set + * + * Set the #WebKitSettings:enable-mock-capture-devices property. + * + * Since: 2.4 + */ +void webkit_settings_set_enable_mock_capture_devices(WebKitSettings* settings, gboolean enabled) +{ + g_return_if_fail(WEBKIT_IS_SETTINGS(settings)); + + WebKitSettingsPrivate* priv = settings->priv; + bool currentValue = priv->preferences->mockCaptureDevicesEnabled(); + if (currentValue == enabled) + return; + + priv->preferences->setMockCaptureDevicesEnabled(enabled); + g_object_notify(G_OBJECT(settings), "enable-mock-capture-devices"); +} + /** * webkit_settings_set_enable_spatial_navigation: * @settings: a #WebKitSettings diff --git a/Source/WebKit/UIProcess/API/glib/WebKitUIClient.cpp b/Source/WebKit/UIProcess/API/glib/WebKitUIClient.cpp index c6c21d0d8bba2..ff3fe41d6acc1 100644 --- a/Source/WebKit/UIProcess/API/glib/WebKitUIClient.cpp +++ b/Source/WebKit/UIProcess/API/glib/WebKitUIClient.cpp @@ -22,6 +22,7 @@ #include "APIUIClient.h" #include "DrawingAreaProxy.h" +#include "WebKitDeviceInfoPermissionRequestPrivate.h" #include "WebKitFileChooserRequestPrivate.h" #include "WebKitGeolocationPermissionRequestPrivate.h" #include "WebKitNavigationActionPrivate.h" @@ -32,6 +33,7 @@ #include "WebKitWindowPropertiesPrivate.h" #include "WebPageProxy.h" #include "WebProcessProxy.h" +#include "WebsiteDataStore.h" #include #if PLATFORM(GTK) @@ -196,6 +198,13 @@ class UIClient : public API::UIClient { return true; } + bool checkUserMediaPermissionForOrigin(WebPageProxy& page, WebFrameProxy&, API::SecurityOrigin& userMediaDocumentOrigin, API::SecurityOrigin& topLevelDocumentOrigin, UserMediaPermissionCheckProxy& permissionRequest) override + { + auto deviceInfoPermissionRequest = adoptGRef(webkitDeviceInfoPermissionRequestCreate(permissionRequest, page.websiteDataStore().deviceIdHashSaltStorage())); + webkitWebViewMakePermissionRequest(m_webView, WEBKIT_PERMISSION_REQUEST(deviceInfoPermissionRequest.get())); + return true; + } + void decidePolicyForNotificationPermissionRequest(WebPageProxy&, API::SecurityOrigin&, Function&& completionHandler) final { GRefPtr notificationPermissionRequest = adoptGRef(webkitNotificationPermissionRequestCreate(NotificationPermissionRequest::create(WTFMove(completionHandler)).ptr())); diff --git a/Source/WebKit/UIProcess/API/glib/WebKitWebsiteData.cpp b/Source/WebKit/UIProcess/API/glib/WebKitWebsiteData.cpp index a24da4d6e1cc8..7c97e0b1b24a1 100644 --- a/Source/WebKit/UIProcess/API/glib/WebKitWebsiteData.cpp +++ b/Source/WebKit/UIProcess/API/glib/WebKitWebsiteData.cpp @@ -76,7 +76,8 @@ static bool recordContainsSupportedDataTypes(const WebsiteDataRecord& record) #if ENABLE(NETSCAPE_PLUGIN_API) WebsiteDataType::PlugInData, #endif - WebsiteDataType::Cookies + WebsiteDataType::Cookies, + WebsiteDataType::DeviceIdHashSalt }); } @@ -103,6 +104,8 @@ static WebKitWebsiteDataTypes toWebKitWebsiteDataTypes(OptionSet(returnValue); } diff --git a/Source/WebKit/UIProcess/API/glib/WebKitWebsiteDataManager.cpp b/Source/WebKit/UIProcess/API/glib/WebKitWebsiteDataManager.cpp index 3020ecbb24ea4..7f489200f71b6 100644 --- a/Source/WebKit/UIProcess/API/glib/WebKitWebsiteDataManager.cpp +++ b/Source/WebKit/UIProcess/API/glib/WebKitWebsiteDataManager.cpp @@ -649,6 +649,8 @@ static OptionSet toWebsiteDataTypes(WebKitWebsiteDataTypes type #endif if (types & WEBKIT_WEBSITE_DATA_COOKIES) returnValue |= WebsiteDataType::Cookies; + if (types & WEBKIT_WEBSITE_DATA_DEVICE_ID_HASH_SALT) + returnValue |= WebsiteDataType::DeviceIdHashSalt; return returnValue; } @@ -732,7 +734,12 @@ void webkit_website_data_manager_remove(WebKitWebsiteDataManager* manager, WebKi for (GList* item = websiteData; item; item = g_list_next(item)) { WebKitWebsiteData* data = static_cast(item->data); - if (webkit_website_data_get_types(data) & types) + // We have to remove the hash salts when cookies are removed. + auto dataTypes = webkit_website_data_get_types(data); + if (dataTypes & WEBKIT_WEBSITE_DATA_DEVICE_ID_HASH_SALT) + dataTypes = static_cast(dataTypes | WEBKIT_WEBSITE_DATA_COOKIES); + + if (dataTypes & types) records.append(webkitWebsiteDataGetRecord(data)); } diff --git a/Source/WebKit/UIProcess/API/gtk/WebKitDeviceInfoPermissionRequest.h b/Source/WebKit/UIProcess/API/gtk/WebKitDeviceInfoPermissionRequest.h new file mode 100644 index 0000000000000..1f8c42ecb6084 --- /dev/null +++ b/Source/WebKit/UIProcess/API/gtk/WebKitDeviceInfoPermissionRequest.h @@ -0,0 +1,63 @@ +/* + * Copyright (C) 2018 Igalia S.L + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with this library; if not, write to the Free Software + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + */ + +#if !defined(__WEBKIT2_H_INSIDE__) && !defined(WEBKIT2_COMPILATION) +#error "Only can be included directly." +#endif + +#ifndef WebKitDeviceInfoPermissionRequest_h +#define WebKitDeviceInfoPermissionRequest_h + +#include +#include + +G_BEGIN_DECLS + +#define WEBKIT_TYPE_DEVICE_INFO_PERMISSION_REQUEST (webkit_device_info_permission_request_get_type()) +#define WEBKIT_DEVICE_INFO_PERMISSION_REQUEST(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj), WEBKIT_TYPE_DEVICE_INFO_PERMISSION_REQUEST, WebKitDeviceInfoPermissionRequest)) +#define WEBKIT_DEVICE_INFO_PERMISSION_REQUEST_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST((klass), WEBKIT_TYPE_DEVICE_INFO_PERMISSION_REQUEST, WebKitDeviceInfoPermissionRequestClass)) +#define WEBKIT_IS_DEVICE_INFO_PERMISSION_REQUEST(obj) (G_TYPE_CHECK_INSTANCE_TYPE((obj), WEBKIT_TYPE_DEVICE_INFO_PERMISSION_REQUEST)) +#define WEBKIT_IS_DEVICE_INFO_PERMISSION_REQUEST_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE((klass), WEBKIT_TYPE_DEVICE_INFO_PERMISSION_REQUEST)) +#define WEBKIT_DEVICE_INFO_PERMISSION_REQUEST_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS((obj), WEBKIT_TYPE_DEVICE_INFO_PERMISSION_REQUEST, WebKitDeviceInfoPermissionRequestClass)) + +typedef struct _WebKitDeviceInfoPermissionRequest WebKitDeviceInfoPermissionRequest; +typedef struct _WebKitDeviceInfoPermissionRequestClass WebKitDeviceInfoPermissionRequestClass; +typedef struct _WebKitDeviceInfoPermissionRequestPrivate WebKitDeviceInfoPermissionRequestPrivate; + +struct _WebKitDeviceInfoPermissionRequest { + GObject parent; + + /*< private >*/ + WebKitDeviceInfoPermissionRequestPrivate *priv; +}; + +struct _WebKitDeviceInfoPermissionRequestClass { + GObjectClass parent_class; + + void (*_webkit_reserved0) (void); + void (*_webkit_reserved1) (void); + void (*_webkit_reserved2) (void); + void (*_webkit_reserved3) (void); +}; + +WEBKIT_API GType +webkit_device_info_permission_request_get_type (void); + +G_END_DECLS + +#endif diff --git a/Source/WebKit/UIProcess/API/gtk/WebKitSettings.h b/Source/WebKit/UIProcess/API/gtk/WebKitSettings.h index 84999ed30a929..cb2c206e2f687 100644 --- a/Source/WebKit/UIProcess/API/gtk/WebKitSettings.h +++ b/Source/WebKit/UIProcess/API/gtk/WebKitSettings.h @@ -416,6 +416,13 @@ WEBKIT_API void webkit_settings_set_enable_media_stream (WebKitSettings *settings, gboolean enabled); +WEBKIT_API gboolean +webkit_settings_get_enable_mock_capture_devices (WebKitSettings *settings); + +WEBKIT_API void +webkit_settings_set_enable_mock_capture_devices (WebKitSettings *settings, + gboolean enabled); + WEBKIT_API gboolean webkit_settings_get_enable_spatial_navigation (WebKitSettings *settings); diff --git a/Source/WebKit/UIProcess/API/gtk/WebKitWebsiteData.h b/Source/WebKit/UIProcess/API/gtk/WebKitWebsiteData.h index 065047fe17c58..674305cc01f0d 100644 --- a/Source/WebKit/UIProcess/API/gtk/WebKitWebsiteData.h +++ b/Source/WebKit/UIProcess/API/gtk/WebKitWebsiteData.h @@ -44,6 +44,7 @@ typedef struct _WebKitWebsiteData WebKitWebsiteData; * @WEBKIT_WEBSITE_DATA_INDEXEDDB_DATABASES: IndexedDB databases. * @WEBKIT_WEBSITE_DATA_PLUGIN_DATA: Plugins data. * @WEBKIT_WEBSITE_DATA_COOKIES: Cookies. + * @WEBKIT_WEBSITE_DATA_DEVICE_ID_HASH_SALT: Hash salt used to generate the device ids used by webpages. * @WEBKIT_WEBSITE_DATA_ALL: All types. * * Enum values with flags representing types of Website data. @@ -60,7 +61,8 @@ typedef enum { WEBKIT_WEBSITE_DATA_INDEXEDDB_DATABASES = 1 << 6, WEBKIT_WEBSITE_DATA_PLUGIN_DATA = 1 << 7, WEBKIT_WEBSITE_DATA_COOKIES = 1 << 8, - WEBKIT_WEBSITE_DATA_ALL = (1 << 9) - 1 + WEBKIT_WEBSITE_DATA_DEVICE_ID_HASH_SALT = 1 << 9, + WEBKIT_WEBSITE_DATA_ALL = (1 << 10) - 1 } WebKitWebsiteDataTypes; WEBKIT_API GType diff --git a/Source/WebKit/UIProcess/API/gtk/docs/webkit2gtk-4.0-sections.txt b/Source/WebKit/UIProcess/API/gtk/docs/webkit2gtk-4.0-sections.txt index 55605db4043f1..575ec5a0d0566 100644 --- a/Source/WebKit/UIProcess/API/gtk/docs/webkit2gtk-4.0-sections.txt +++ b/Source/WebKit/UIProcess/API/gtk/docs/webkit2gtk-4.0-sections.txt @@ -466,6 +466,8 @@ webkit_settings_get_enable_write_console_messages_to_stdout webkit_settings_set_enable_write_console_messages_to_stdout webkit_settings_get_enable_media_stream webkit_settings_set_enable_media_stream +webkit_settings_get_enable_mock_capture_devices +webkit_settings_set_enable_mock_capture_devices webkit_settings_get_enable_spatial_navigation webkit_settings_set_enable_spatial_navigation webkit_settings_get_enable_mediasource diff --git a/Source/WebKit/UIProcess/API/gtk/webkit2.h b/Source/WebKit/UIProcess/API/gtk/webkit2.h index fb0476f1b179b..b9c000970a04d 100644 --- a/Source/WebKit/UIProcess/API/gtk/webkit2.h +++ b/Source/WebKit/UIProcess/API/gtk/webkit2.h @@ -38,6 +38,7 @@ #include #include #include +#include #include #include #include diff --git a/Source/WebKit/UIProcess/API/wpe/WebKitDeviceInfoPermissionRequest.h b/Source/WebKit/UIProcess/API/wpe/WebKitDeviceInfoPermissionRequest.h new file mode 100644 index 0000000000000..a77d18d691803 --- /dev/null +++ b/Source/WebKit/UIProcess/API/wpe/WebKitDeviceInfoPermissionRequest.h @@ -0,0 +1,63 @@ +/* + * Copyright (C) 2018 Igalia S.L + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with this library; if not, write to the Free Software + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + */ + +#if !defined(__WEBKIT_H_INSIDE__) && !defined(WEBKIT2_COMPILATION) +#error "Only can be included directly." +#endif + +#ifndef WebKitDeviceInfoPermissionRequest_h +#define WebKitDeviceInfoPermissionRequest_h + +#include +#include + +G_BEGIN_DECLS + +#define WEBKIT_TYPE_DEVICE_INFO_PERMISSION_REQUEST (webkit_device_info_permission_request_get_type()) +#define WEBKIT_DEVICE_INFO_PERMISSION_REQUEST(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj), WEBKIT_TYPE_DEVICE_INFO_PERMISSION_REQUEST, WebKitDeviceInfoPermissionRequest)) +#define WEBKIT_DEVICE_INFO_PERMISSION_REQUEST_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST((klass), WEBKIT_TYPE_DEVICE_INFO_PERMISSION_REQUEST, WebKitDeviceInfoPermissionRequestClass)) +#define WEBKIT_IS_DEVICE_INFO_PERMISSION_REQUEST(obj) (G_TYPE_CHECK_INSTANCE_TYPE((obj), WEBKIT_TYPE_DEVICE_INFO_PERMISSION_REQUEST)) +#define WEBKIT_IS_DEVICE_INFO_PERMISSION_REQUEST_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE((klass), WEBKIT_TYPE_DEVICE_INFO_PERMISSION_REQUEST)) +#define WEBKIT_DEVICE_INFO_PERMISSION_REQUEST_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS((obj), WEBKIT_TYPE_DEVICE_INFO_PERMISSION_REQUEST, WebKitDeviceInfoPermissionRequestClass)) + +typedef struct _WebKitDeviceInfoPermissionRequest WebKitDeviceInfoPermissionRequest; +typedef struct _WebKitDeviceInfoPermissionRequestClass WebKitDeviceInfoPermissionRequestClass; +typedef struct _WebKitDeviceInfoPermissionRequestPrivate WebKitDeviceInfoPermissionRequestPrivate; + +struct _WebKitDeviceInfoPermissionRequest { + GObject parent; + + /*< private >*/ + WebKitDeviceInfoPermissionRequestPrivate *priv; +}; + +struct _WebKitDeviceInfoPermissionRequestClass { + GObjectClass parent_class; + + void (*_webkit_reserved0) (void); + void (*_webkit_reserved1) (void); + void (*_webkit_reserved2) (void); + void (*_webkit_reserved3) (void); +}; + +WEBKIT_API GType +webkit_device_info_permission_request_get_type (void); + +G_END_DECLS + +#endif diff --git a/Source/WebKit/UIProcess/API/wpe/WebKitSettings.h b/Source/WebKit/UIProcess/API/wpe/WebKitSettings.h index 49fd6f14031b4..da3a7f97a11b9 100644 --- a/Source/WebKit/UIProcess/API/wpe/WebKitSettings.h +++ b/Source/WebKit/UIProcess/API/wpe/WebKitSettings.h @@ -393,6 +393,13 @@ WEBKIT_API void webkit_settings_set_enable_media_stream (WebKitSettings *settings, gboolean enabled); +WEBKIT_API gboolean +webkit_settings_get_enable_mock_capture_devices (WebKitSettings *settings); + +WEBKIT_API void +webkit_settings_set_enable_mock_capture_devices (WebKitSettings *settings, + gboolean enabled); + WEBKIT_API gboolean webkit_settings_get_enable_spatial_navigation (WebKitSettings *settings); diff --git a/Source/WebKit/UIProcess/API/wpe/WebKitWebsiteData.h b/Source/WebKit/UIProcess/API/wpe/WebKitWebsiteData.h index 3a462aaff9b87..03dd585d1265a 100644 --- a/Source/WebKit/UIProcess/API/wpe/WebKitWebsiteData.h +++ b/Source/WebKit/UIProcess/API/wpe/WebKitWebsiteData.h @@ -44,6 +44,7 @@ typedef struct _WebKitWebsiteData WebKitWebsiteData; * @WEBKIT_WEBSITE_DATA_INDEXEDDB_DATABASES: IndexedDB databases. * @WEBKIT_WEBSITE_DATA_PLUGIN_DATA: Plugins data. * @WEBKIT_WEBSITE_DATA_COOKIES: Cookies. + * @WEBKIT_WEBSITE_DATA_DEVICE_ID_HASH_SALT: Hash salt used to generate the device ids used by webpages. * @WEBKIT_WEBSITE_DATA_ALL: All types. * * Enum values with flags representing types of Website data. @@ -60,7 +61,8 @@ typedef enum { WEBKIT_WEBSITE_DATA_INDEXEDDB_DATABASES = 1 << 6, WEBKIT_WEBSITE_DATA_PLUGIN_DATA = 1 << 7, WEBKIT_WEBSITE_DATA_COOKIES = 1 << 8, - WEBKIT_WEBSITE_DATA_ALL = (1 << 9) - 1 + WEBKIT_WEBSITE_DATA_DEVICE_ID_HASH_SALT = 1 << 9, + WEBKIT_WEBSITE_DATA_ALL = (1 << 10) - 1 } WebKitWebsiteDataTypes; WEBKIT_API GType diff --git a/Source/WebKit/UIProcess/API/wpe/webkit.h b/Source/WebKit/UIProcess/API/wpe/webkit.h index 943ade2afa819..4c272d2ec8666 100644 --- a/Source/WebKit/UIProcess/API/wpe/webkit.h +++ b/Source/WebKit/UIProcess/API/wpe/webkit.h @@ -38,6 +38,7 @@ #include #include #include +#include #include #include #include diff --git a/Source/WebKit/UIProcess/DeviceIdHashSaltStorage.cpp b/Source/WebKit/UIProcess/DeviceIdHashSaltStorage.cpp new file mode 100644 index 0000000000000..12c100d54ceaa --- /dev/null +++ b/Source/WebKit/UIProcess/DeviceIdHashSaltStorage.cpp @@ -0,0 +1,112 @@ +/* + * Copyright (C) 2018 Igalia S.L. All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions + * are met: + * 1. Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * 2. Redistributions in binary form must reproduce the above copyright + * notice, this list of conditions and the following disclaimer in the + * documentation and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS'' + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, + * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR + * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS + * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF + * THE POSSIBILITY OF SUCH DAMAGE. + */ + +#include "config.h" +#include "DeviceIdHashSaltStorage.h" + +#include +#include +#include +#include +#include +#include + +// FIXME: Implement persistency. + +namespace WebKit { +using namespace WebCore; + +static const int hashSaltSize = 48; +static const int randomDataSize = hashSaltSize / 16; + +Ref DeviceIdHashSaltStorage::create() +{ + return adoptRef(*new DeviceIdHashSaltStorage()); +} + +const String& DeviceIdHashSaltStorage::regenerateDeviceIdHashSaltForOrigin(UserMediaPermissionCheckProxy& request) +{ + auto& documentOrigin = request.topLevelDocumentSecurityOrigin(); + + auto documentOriginData = documentOrigin.data(); + m_deviceIdHashSaltForOrigins.removeIf([&documentOriginData](auto& keyAndValue) { + return keyAndValue.value->documentOrigin == documentOriginData; + }); + + return deviceIdHashSaltForOrigin(documentOrigin); +} + +const String& DeviceIdHashSaltStorage::deviceIdHashSaltForOrigin(SecurityOrigin& documentOrigin) +{ + auto& deviceIdHashSalt = m_deviceIdHashSaltForOrigins.ensure(documentOrigin.toRawString(), [&documentOrigin] () { + uint64_t randomData[randomDataSize]; + cryptographicallyRandomValues(reinterpret_cast(randomData), sizeof(randomData)); + + StringBuilder builder; + builder.reserveCapacity(hashSaltSize); + for (int i = 0; i < randomDataSize; i++) + appendUnsigned64AsHex(randomData[0], builder); + + String deviceIdHashSalt = builder.toString(); + + return std::make_unique(documentOrigin.data().isolatedCopy(), WTFMove(deviceIdHashSalt)); + }).iterator->value; + + deviceIdHashSalt->lastTimeUsed = WallTime::now(); + + return deviceIdHashSalt->deviceIdHashSalt; +} + +void DeviceIdHashSaltStorage::getDeviceIdHashSaltOrigins(CompletionHandler&&)>&& completionHandler) +{ + HashSet origins; + + for (auto& hashSaltForOrigin : m_deviceIdHashSaltForOrigins) + origins.add(hashSaltForOrigin.value->documentOrigin); + + RunLoop::main().dispatch([origins = WTFMove(origins), completionHandler = WTFMove(completionHandler)]() mutable { + completionHandler(WTFMove(origins)); + }); +} + +void DeviceIdHashSaltStorage::deleteDeviceIdHashSaltForOrigins(const Vector& origins, CompletionHandler&& completionHandler) +{ + m_deviceIdHashSaltForOrigins.removeIf([&origins](auto& keyAndValue) { + return origins.contains(keyAndValue.value->documentOrigin); + }); + + RunLoop::main().dispatch(WTFMove(completionHandler)); +} + +void DeviceIdHashSaltStorage::deleteDeviceIdHashSaltOriginsModifiedSince(WallTime time, CompletionHandler&& completionHandler) +{ + m_deviceIdHashSaltForOrigins.removeIf([time](auto& keyAndValue) { + return keyAndValue.value->lastTimeUsed > time; + }); + + RunLoop::main().dispatch(WTFMove(completionHandler)); +} + +} // namespace WebKit diff --git a/Source/WebKit/UIProcess/DeviceIdHashSaltStorage.h b/Source/WebKit/UIProcess/DeviceIdHashSaltStorage.h new file mode 100644 index 0000000000000..e8ab83206ccc7 --- /dev/null +++ b/Source/WebKit/UIProcess/DeviceIdHashSaltStorage.h @@ -0,0 +1,66 @@ +/* + * Copyright (C) 2018 Igalia S.L. All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions + * are met: + * 1. Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * 2. Redistributions in binary form must reproduce the above copyright + * notice, this list of conditions and the following disclaimer in the + * documentation and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS'' + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, + * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR + * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS + * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF + * THE POSSIBILITY OF SUCH DAMAGE. + */ + +#pragma once + +#include "UserMediaPermissionCheckProxy.h" +#include +#include +#include +#include + +namespace WebKit { + +class DeviceIdHashSaltStorage : public RefCounted { +public: + static Ref create(); + ~DeviceIdHashSaltStorage() = default; + + const String& deviceIdHashSaltForOrigin(WebCore::SecurityOrigin&); + const String& regenerateDeviceIdHashSaltForOrigin(UserMediaPermissionCheckProxy&); + + void getDeviceIdHashSaltOrigins(CompletionHandler&&)>&&); + void deleteDeviceIdHashSaltForOrigins(const Vector&, CompletionHandler&&); + void deleteDeviceIdHashSaltOriginsModifiedSince(WallTime, CompletionHandler&&); + +private: + struct HashSaltForOrigin { + HashSaltForOrigin(WebCore::SecurityOriginData&& securityOrigin, String&& deviceIdHashSalt) + : documentOrigin(securityOrigin) + , deviceIdHashSalt(WTFMove(deviceIdHashSalt)) + , lastTimeUsed(WallTime::now()) + { }; + + WebCore::SecurityOriginData documentOrigin; + String deviceIdHashSalt; + WallTime lastTimeUsed; + }; + + DeviceIdHashSaltStorage() = default; + + HashMap> m_deviceIdHashSaltForOrigins; +}; + +} // namespace WebKit diff --git a/Source/WebKit/UIProcess/UserMediaPermissionCheckProxy.h b/Source/WebKit/UIProcess/UserMediaPermissionCheckProxy.h index f37be7ab21f3f..fbcc4cfee3a5b 100644 --- a/Source/WebKit/UIProcess/UserMediaPermissionCheckProxy.h +++ b/Source/WebKit/UIProcess/UserMediaPermissionCheckProxy.h @@ -36,8 +36,6 @@ class SecurityOrigin; namespace WebKit { -class UserMediaPermissionRequestManagerProxy; - class UserMediaPermissionCheckProxy : public API::ObjectImpl { public: diff --git a/Source/WebKit/UIProcess/WebsiteData/WebsiteDataStore.cpp b/Source/WebKit/UIProcess/WebsiteData/WebsiteDataStore.cpp index ec483361ff7ce..8ce64c6fe0be4 100644 --- a/Source/WebKit/UIProcess/WebsiteData/WebsiteDataStore.cpp +++ b/Source/WebKit/UIProcess/WebsiteData/WebsiteDataStore.cpp @@ -29,6 +29,7 @@ #include "APIProcessPoolConfiguration.h" #include "APIWebsiteDataRecord.h" #include "APIWebsiteDataStore.h" +#include "DeviceIdHashSaltStorage.h" #include "NetworkProcessMessages.h" #include "StorageManager.h" #include "StorageProcessCreationParameters.h" @@ -95,6 +96,7 @@ WebsiteDataStore::WebsiteDataStore(Configuration configuration, PAL::SessionID s : m_sessionID(sessionID) , m_configuration(WTFMove(configuration)) , m_storageManager(StorageManager::create(m_configuration.localStorageDirectory, m_configuration.localStorageQuota)) + , m_deviceIdHashSaltStorage(DeviceIdHashSaltStorage::create()) , m_queue(WorkQueue::create("com.apple.WebKit.WebsiteDataStore")) { WTF::setProcessPrivileges(allPrivileges()); @@ -472,6 +474,19 @@ void WebsiteDataStore::fetchDataAndApply(OptionSet dataTypes, O }); } + if (m_deviceIdHashSaltStorage && dataTypes.contains(WebsiteDataType::DeviceIdHashSalt)) { + callbackAggregator->addPendingCallback(); + + m_deviceIdHashSaltStorage->getDeviceIdHashSaltOrigins([callbackAggregator](auto&& origins) { + WebsiteData websiteData; + + while (!origins.isEmpty()) + websiteData.entries.append(WebsiteData::Entry { origins.takeAny(), WebsiteDataType::DeviceIdHashSalt, 0 }); + + callbackAggregator->removePendingCallback(WTFMove(websiteData)); + }); + } + if (dataTypes.contains(WebsiteDataType::OfflineWebApplicationCache) && isPersistent()) { callbackAggregator->addPendingCallback(); @@ -794,6 +809,14 @@ void WebsiteDataStore::removeData(OptionSet dataTypes, WallTime }); } + if (m_deviceIdHashSaltStorage && (dataTypes.contains(WebsiteDataType::DeviceIdHashSalt) || (dataTypes.contains(WebsiteDataType::Cookies)))) { + callbackAggregator->addPendingCallback(); + + m_deviceIdHashSaltStorage->deleteDeviceIdHashSaltOriginsModifiedSince(modifiedSince, [callbackAggregator] { + callbackAggregator->removePendingCallback(); + }); + } + if (dataTypes.contains(WebsiteDataType::OfflineWebApplicationCache) && isPersistent()) { callbackAggregator->addPendingCallback(); @@ -1083,6 +1106,14 @@ void WebsiteDataStore::removeData(OptionSet dataTypes, const Ve }); } + if (m_deviceIdHashSaltStorage && (dataTypes.contains(WebsiteDataType::DeviceIdHashSalt) || (dataTypes.contains(WebsiteDataType::Cookies)))) { + callbackAggregator->addPendingCallback(); + + m_deviceIdHashSaltStorage->deleteDeviceIdHashSaltForOrigins(origins, [callbackAggregator] { + callbackAggregator->removePendingCallback(); + }); + } + if (dataTypes.contains(WebsiteDataType::OfflineWebApplicationCache) && isPersistent()) { HashSet origins; for (const auto& dataRecord : dataRecords) { diff --git a/Source/WebKit/UIProcess/WebsiteData/WebsiteDataStore.h b/Source/WebKit/UIProcess/WebsiteData/WebsiteDataStore.h index ebd884fbe2110..2a58e7766fc93 100644 --- a/Source/WebKit/UIProcess/WebsiteData/WebsiteDataStore.h +++ b/Source/WebKit/UIProcess/WebsiteData/WebsiteDataStore.h @@ -53,6 +53,7 @@ namespace WebKit { class SecKeyProxyStore; class StorageManager; +class DeviceIdHashSaltStorage; class WebPageProxy; class WebProcessPool; class WebResourceLoadStatisticsStore; @@ -91,6 +92,7 @@ class WebsiteDataStore : public RefCounted, public WebProcessL String localStorageDirectory; uint32_t localStorageQuota; String mediaKeysStorageDirectory; + String deviceIdHashSaltsStorageDirectory; String resourceLoadStatisticsDirectory; String javaScriptConfigurationDirectory; String cookieStorageFile; @@ -156,6 +158,8 @@ class WebsiteDataStore : public RefCounted, public WebProcessL StorageManager* storageManager() { return m_storageManager.get(); } + DeviceIdHashSaltStorage* deviceIdHashSaltStorage() { return m_deviceIdHashSaltStorage.get(); } + WebProcessPool* processPoolForCookieStorageOperations(); bool isAssociatedProcessPool(WebProcessPool&) const; @@ -228,6 +232,7 @@ class WebsiteDataStore : public RefCounted, public WebProcessL bool m_hasResolvedDirectories { false }; const RefPtr m_storageManager; + const RefPtr m_deviceIdHashSaltStorage; RefPtr m_resourceLoadStatistics; bool m_resourceLoadStatisticsDebugMode { false }; diff --git a/Tools/ChangeLog b/Tools/ChangeLog index 4de76d3f6b989..07d06a3cf950d 100644 --- a/Tools/ChangeLog +++ b/Tools/ChangeLog @@ -1,3 +1,84 @@ +2018-11-28 Thibault Saunier + + [WebRTC][GStreamer] Make sure to have the default microphone on the top of the list + https://bugs.webkit.org/show_bug.cgi?id=192026 + + Reviewed by Philippe Normand. + + Otherwise we might end up picking a useless one in some applications + (not sure what those application do though). + + GStreamer patch proposed as https://gitlab.freedesktop.org/gstreamer/gst-plugins-good/merge_requests/34/diffs + + * flatpak/org.webkit.CommonModules.yaml: + * gstreamer/jhbuild.modules: + * gstreamer/patches/gst-plugins-good-0014-pulse-Mark-default-devices-as-default.patch: Added. + +2018-11-05 Thibault Saunier + + [GStreamer][WebRTC] Add webrtcencoder bin to cleanup and refactor the way we set encoders + https://bugs.webkit.org/show_bug.cgi?id=190674 + + Reviewed by Philippe Normand. + + webrtcencoder is a simple GstBin with a set of well known GStreamer encoders which + it can use to implement encoding for different formats exposing a trimmed down unified API. + + It also adds proper handling of H264 profiles. + + The added files follow GStreamer coding style as we aim at upstreaming the element + in the future. + + Reviewed by Philippe Normand. + + * Scripts/webkitpy/style/checker.py: + +2018-10-11 Alejandro G. Castro + + [GTK][WPE] Add mediaDevices.enumerateDevices support + https://bugs.webkit.org/show_bug.cgi?id=185761 + + Reviewed by Youenn Fablet. + + Added new API test for the mediaDevices.enumerateDevices: + usermedia-enumeratedevices-permission-check. And a new API test + for the WebsiteDataStore: testWebsiteDataDeviceIdHashSalt. + + * MiniBrowser/gtk/main.c: + (gotWebsiteDataCallback): Added a new section to the about:data + webpage to include the information about the hash salt. + * TestWebKitAPI/Tests/WebKitGLib/TestUIClient.cpp: + (testWebViewUserMediaEnumerateDevicesPermissionCheck): Added the + new test checking the enumerateDevices API when permission is + denied and when permission is allowed for the origin. + (beforeAll): Defined the new test. + * TestWebKitAPI/Tests/WebKitGLib/TestWebsiteData.cpp: + (serverCallback): Register a new URI for the enumerateDevices. + (testWebsiteDataConfiguration): Remove the hash salts from the + directories. + (testWebsiteDataDeviceIdHashSalt): New test cheking the + enumerateDevices API. + (beforeAll): Added the new test testWebsiteDataDeviceIdHashSalt to + the init structure. + * TestWebKitAPI/glib/WebKitGLib/WebViewTest.cpp: + (WebViewTest::initializeWebView): Make sure the media-stream is + activated in the webView. + +2018-09-22 Thibault Saunier + + [WPE] Be very permissive in the MiniBrowser. + https://bugs.webkit.org/show_bug.cgi?id=189800 + + This is just a test tool and we should make + it just work, security and privacy is not really + a primary focus here. + + Reviewed by Žan Doberšek. + + * MiniBrowser/wpe/main.cpp: + (decidePermissionRequest): + (main): + 2018-09-24 Alicia Boya García [MSE][GStreamer] Use no-more-pads event for noticing initialization segments diff --git a/Tools/MiniBrowser/gtk/main.c b/Tools/MiniBrowser/gtk/main.c index 58701fc842ce5..5133a4dd61b92 100644 --- a/Tools/MiniBrowser/gtk/main.c +++ b/Tools/MiniBrowser/gtk/main.c @@ -402,6 +402,7 @@ static void gotWebsiteDataCallback(WebKitWebsiteDataManager *manager, GAsyncResu guint64 pageID = webkit_web_view_get_page_id(webkit_uri_scheme_request_get_web_view(dataRequest->request)); aboutDataFillTable(result, dataRequest, dataList, "Cookies", WEBKIT_WEBSITE_DATA_COOKIES, NULL, pageID); + aboutDataFillTable(result, dataRequest, dataList, "Device Id Hash Salt", WEBKIT_WEBSITE_DATA_DEVICE_ID_HASH_SALT, NULL, pageID); aboutDataFillTable(result, dataRequest, dataList, "Memory Cache", WEBKIT_WEBSITE_DATA_MEMORY_CACHE, NULL, pageID); aboutDataFillTable(result, dataRequest, dataList, "Disk Cache", WEBKIT_WEBSITE_DATA_DISK_CACHE, webkit_website_data_manager_get_disk_cache_directory(manager), pageID); aboutDataFillTable(result, dataRequest, dataList, "Session Storage", WEBKIT_WEBSITE_DATA_SESSION_STORAGE, NULL, pageID); diff --git a/Tools/MiniBrowser/wpe/main.cpp b/Tools/MiniBrowser/wpe/main.cpp index 3f76942a46d20..2088cd425b2ae 100644 --- a/Tools/MiniBrowser/wpe/main.cpp +++ b/Tools/MiniBrowser/wpe/main.cpp @@ -92,6 +92,14 @@ static void automationStartedCallback(WebKitWebContext*, WebKitAutomationSession g_signal_connect(session, "create-web-view", G_CALLBACK(createWebViewForAutomationCallback), view); } +static gboolean decidePermissionRequest(WebKitWebView *webView, WebKitPermissionRequest *request, gpointer unused_udata) +{ + g_print("Accepting %s request\n", G_OBJECT_TYPE_NAME(request)); + webkit_permission_request_allow(request); + + return TRUE; +} + static std::unique_ptr createViewBackend(uint32_t width, uint32_t height) { if (headlessMode) @@ -178,6 +186,7 @@ int main(int argc, char *argv[]) webkit_web_context_set_automation_allowed(webContext, automationMode); g_signal_connect(webContext, "automation-started", G_CALLBACK(automationStartedCallback), webView); + g_signal_connect(webView, "permission-request", G_CALLBACK(decidePermissionRequest), NULL); if (ignoreTLSErrors) webkit_web_context_set_tls_errors_policy(webContext, WEBKIT_TLS_ERRORS_POLICY_IGNORE); diff --git a/Tools/Scripts/webkitpy/style/checker.py b/Tools/Scripts/webkitpy/style/checker.py index a5908e6016a21..17424934e187f 100644 --- a/Tools/Scripts/webkitpy/style/checker.py +++ b/Tools/Scripts/webkitpy/style/checker.py @@ -237,6 +237,19 @@ "-whitespace/declaration", "-whitespace/indent"]), + ([ # Files following GStreamer coding style (for a simpler upstreaming process for example) + os.path.join('Source', 'WebCore', 'platform', 'mediastream', 'libwebrtc', 'GStreamerVideoEncoder.cpp'), + os.path.join('Source', 'WebCore', 'platform', 'mediastream', 'libwebrtc', 'GStreamerVideoEncoder.h'), + ], + ["-whitespace/indent", + "-whitespace/declaration", + "-whitespace/parens", + "-readability/null", + "-whitespace/braces", + "-readability/naming/underscores", + "-readability/enum_casing", + ]), + ([ # There is no way to avoid the symbols __jit_debug_register_code # and __jit_debug_descriptor when integrating with gdb. diff --git a/Tools/TestWebKitAPI/Tests/WebKitGLib/TestUIClient.cpp b/Tools/TestWebKitAPI/Tests/WebKitGLib/TestUIClient.cpp index 6939102fd318d..e9a2f92fdea03 100644 --- a/Tools/TestWebKitAPI/Tests/WebKitGLib/TestUIClient.cpp +++ b/Tools/TestWebKitAPI/Tests/WebKitGLib/TestUIClient.cpp @@ -818,6 +818,47 @@ static void testWebViewGeolocationPermissionRequests(UIClientTest* test, gconstp #endif // ENABLE(GEOLOCATION) #if ENABLE(MEDIA_STREAM) +static void testWebViewUserMediaEnumerateDevicesPermissionCheck(UIClientTest* test, gconstpointer) +{ + WebKitSettings* settings = webkit_web_view_get_settings(test->m_webView); + gboolean enabled = webkit_settings_get_enable_media_stream(settings); + webkit_settings_set_enable_media_stream(settings, TRUE); + +#if PLATFORM(GTK) + test->showInWindowAndWaitUntilMapped(); +#endif + static const char* userMediaRequestHTML = + "" + " " + " " + ""; + + test->m_verifyMediaTypes = TRUE; + + // Test denying a permission request. + test->m_allowPermissionRequests = false; + test->loadHtml(userMediaRequestHTML, nullptr); + test->waitUntilTitleChangedTo("Permission denied"); + + // Test allowing a permission request. + test->m_allowPermissionRequests = true; + test->loadHtml(userMediaRequestHTML, nullptr); + test->waitUntilTitleChangedTo("OK"); + + webkit_settings_set_enable_media_stream(settings, enabled); +} + static void testWebViewUserMediaPermissionRequests(UIClientTest* test, gconstpointer) { WebKitSettings* settings = webkit_web_view_get_settings(test->m_webView); @@ -1140,6 +1181,7 @@ void beforeAll() UIClientTest::add("WebKitWebView", "geolocation-permission-requests", testWebViewGeolocationPermissionRequests); #endif #if ENABLE(MEDIA_STREAM) + UIClientTest::add("WebKitWebView", "usermedia-enumeratedevices-permission-check", testWebViewUserMediaEnumerateDevicesPermissionCheck); UIClientTest::add("WebKitWebView", "usermedia-permission-requests", testWebViewUserMediaPermissionRequests); UIClientTest::add("WebKitWebView", "audio-usermedia-permission-request", testWebViewAudioOnlyUserMediaPermissionRequests); #endif diff --git a/Tools/TestWebKitAPI/Tests/WebKitGLib/TestWebsiteData.cpp b/Tools/TestWebKitAPI/Tests/WebKitGLib/TestWebsiteData.cpp index 97e189b6f8239..d6a32b7464c78 100644 --- a/Tools/TestWebKitAPI/Tests/WebKitGLib/TestWebsiteData.cpp +++ b/Tools/TestWebKitAPI/Tests/WebKitGLib/TestWebsiteData.cpp @@ -21,6 +21,7 @@ #include "WebKitTestServer.h" #include "WebViewTest.h" +#include static WebKitTestServer* kServer; @@ -61,6 +62,11 @@ static void serverCallback(SoupServer* server, SoupMessage* message, const char* soup_message_body_append(message->response_body, SOUP_MEMORY_STATIC, localStorageHTML, strlen(localStorageHTML)); soup_message_body_complete(message->response_body); soup_message_set_status(message, SOUP_STATUS_OK); + } else if (g_str_equal(path, "/enumeratedevices")) { + const char* enumerateDevicesHTML = ""; + soup_message_body_append(message->response_body, SOUP_MEMORY_STATIC, enumerateDevicesHTML, strlen(enumerateDevicesHTML)); + soup_message_body_complete(message->response_body); + soup_message_set_status(message, SOUP_STATUS_OK); } else soup_message_set_status(message, SOUP_STATUS_NOT_FOUND); } @@ -163,7 +169,7 @@ static void testWebsiteDataConfiguration(WebsiteDataTest* test, gconstpointer) // Clear all persistent caches, since the data dir is common to all test cases. static const WebKitWebsiteDataTypes persistentCaches = static_cast(WEBKIT_WEBSITE_DATA_DISK_CACHE | WEBKIT_WEBSITE_DATA_LOCAL_STORAGE - | WEBKIT_WEBSITE_DATA_WEBSQL_DATABASES | WEBKIT_WEBSITE_DATA_INDEXEDDB_DATABASES | WEBKIT_WEBSITE_DATA_OFFLINE_APPLICATION_CACHE); + | WEBKIT_WEBSITE_DATA_WEBSQL_DATABASES | WEBKIT_WEBSITE_DATA_INDEXEDDB_DATABASES | WEBKIT_WEBSITE_DATA_OFFLINE_APPLICATION_CACHE | WEBKIT_WEBSITE_DATA_DEVICE_ID_HASH_SALT); test->clear(persistentCaches, 0); g_assert(!test->fetch(persistentCaches)); @@ -517,6 +523,53 @@ static void testWebsiteDataCookies(WebsiteDataTest* test, gconstpointer) g_assert(!dataList); } +static void testWebsiteDataDeviceIdHashSalt(WebsiteDataTest* test, gconstpointer) +{ + test->clear(WEBKIT_WEBSITE_DATA_DEVICE_ID_HASH_SALT, 0); + + GList* dataList = test->fetch(WEBKIT_WEBSITE_DATA_DEVICE_ID_HASH_SALT); + g_assert(!dataList); + + test->loadURI(kServer->getURIForPath("/enumeratedevices").data()); + test->waitUntilTitleChangedTo("Finished"); + + dataList = test->fetch(WEBKIT_WEBSITE_DATA_DEVICE_ID_HASH_SALT); + g_assert(dataList); + + g_assert_cmpuint(g_list_length(dataList), ==, 1); + WebKitWebsiteData* data = static_cast(dataList->data); + g_assert(data); + WebKitSecurityOrigin* origin = webkit_security_origin_new_for_uri(kServer->getURIForPath("/").data()); + g_assert_cmpstr(webkit_website_data_get_name(data), ==, webkit_security_origin_get_host(origin)); + webkit_security_origin_unref(origin); + g_assert_cmpuint(webkit_website_data_get_types(data), ==, WEBKIT_WEBSITE_DATA_DEVICE_ID_HASH_SALT); + + GList removeList = { data, nullptr, nullptr }; + test->remove(WEBKIT_WEBSITE_DATA_DEVICE_ID_HASH_SALT, &removeList); + dataList = test->fetch(WEBKIT_WEBSITE_DATA_DEVICE_ID_HASH_SALT); + g_assert(!dataList); + + // Test removing the cookies. + test->loadURI(kServer->getURIForPath("/enumeratedevices").data()); + test->waitUntilTitleChangedTo("Finished"); + + dataList = test->fetch(WEBKIT_WEBSITE_DATA_DEVICE_ID_HASH_SALT); + g_assert(dataList); + data = static_cast(dataList->data); + g_assert(data); + + GList removeCookieList = { data, nullptr, nullptr }; + test->remove(WEBKIT_WEBSITE_DATA_COOKIES, &removeCookieList); + dataList = test->fetch(WEBKIT_WEBSITE_DATA_DEVICE_ID_HASH_SALT); + g_assert(!dataList); + + // Clear all. + static const WebKitWebsiteDataTypes cacheAndAppcacheTypes = static_cast(WEBKIT_WEBSITE_DATA_DEVICE_ID_HASH_SALT); + test->clear(cacheAndAppcacheTypes, 0); + dataList = test->fetch(cacheAndAppcacheTypes); + g_assert(!dataList); +} + void beforeAll() { kServer = new WebKitTestServer(); @@ -529,6 +582,7 @@ void beforeAll() WebsiteDataTest::add("WebKitWebsiteData", "databases", testWebsiteDataDatabases); WebsiteDataTest::add("WebKitWebsiteData", "appcache", testWebsiteDataAppcache); WebsiteDataTest::add("WebKitWebsiteData", "cookies", testWebsiteDataCookies); + WebsiteDataTest::add("WebKitWebsiteData", "deviceidhashsalt", testWebsiteDataDeviceIdHashSalt); } void afterAll() diff --git a/Tools/TestWebKitAPI/glib/WebKitGLib/WebViewTest.cpp b/Tools/TestWebKitAPI/glib/WebKitGLib/WebViewTest.cpp index 3c095c37b42f3..7e3540b781c29 100644 --- a/Tools/TestWebKitAPI/glib/WebKitGLib/WebViewTest.cpp +++ b/Tools/TestWebKitAPI/glib/WebKitGLib/WebViewTest.cpp @@ -48,10 +48,15 @@ WebViewTest::~WebViewTest() void WebViewTest::initializeWebView() { g_assert(!m_webView); + + WebKitSettings* webkitSettings = webkit_settings_new(); + webkit_settings_set_enable_media_stream(webkitSettings, TRUE); + m_webView = WEBKIT_WEB_VIEW(g_object_new(WEBKIT_TYPE_WEB_VIEW, #if PLATFORM(WPE) "backend", Test::createWebViewBackend(), #endif + "settings", webkitSettings, "web-context", m_webContext.get(), "user-content-manager", m_userContentManager.get(), nullptr)); diff --git a/Tools/gstreamer/patches/gst-plugins-good-0014-pulse-Mark-default-devices-as-default.patch b/Tools/gstreamer/patches/gst-plugins-good-0014-pulse-Mark-default-devices-as-default.patch new file mode 100644 index 0000000000000..435b6d7d97f84 --- /dev/null +++ b/Tools/gstreamer/patches/gst-plugins-good-0014-pulse-Mark-default-devices-as-default.patch @@ -0,0 +1,318 @@ +From 32c833ebb42b8e8f46c5a72a16a874bc00fc5553 Mon Sep 17 00:00:00 2001 +From: Thibault Saunier +Date: Mon, 26 Nov 2018 13:48:56 -0300 +Subject: [PATCH] pulse: Mark default devices as "default" + +--- + ext/pulse/pulsedeviceprovider.c | 117 +++++++++++++++++++++----------- + ext/pulse/pulsedeviceprovider.h | 3 + + 2 files changed, 79 insertions(+), 41 deletions(-) + +diff --git a/ext/pulse/pulsedeviceprovider.c b/ext/pulse/pulsedeviceprovider.c +index a1964dab2..9b66a0e33 100644 +--- a/ext/pulse/pulsedeviceprovider.c ++++ b/ext/pulse/pulsedeviceprovider.c +@@ -40,7 +40,7 @@ GST_DEBUG_CATEGORY_EXTERN (pulse_debug); + + static GstDevice *gst_pulse_device_new (guint id, + const gchar * device_name, GstCaps * caps, const gchar * internal_name, +- GstPulseDeviceType type, GstStructure * properties); ++ GstPulseDeviceType type, GstStructure * properties, gboolean is_default); + + G_DEFINE_TYPE (GstPulseDeviceProvider, gst_pulse_device_provider, + GST_TYPE_DEVICE_PROVIDER); +@@ -65,6 +65,12 @@ enum + }; + + ++typedef struct ++{ ++ GList *devices; ++ GstPulseDeviceProvider *self; ++} ListDevicesData; ++ + static void + gst_pulse_device_provider_class_init (GstPulseDeviceProviderClass * klass) + { +@@ -114,6 +120,8 @@ gst_pulse_device_provider_finalize (GObject * object) + + g_free (self->client_name); + g_free (self->server); ++ g_free (self->default_sink_name); ++ g_free (self->default_source_name); + + G_OBJECT_CLASS (gst_pulse_device_provider_parent_class)->finalize (object); + } +@@ -186,7 +194,7 @@ context_state_cb (pa_context * c, void *userdata) + } + + static GstDevice * +-new_source (const pa_source_info * info) ++new_source (GstPulseDeviceProvider * self, const pa_source_info * info) + { + GstCaps *caps; + GstStructure *props; +@@ -200,11 +208,12 @@ new_source (const pa_source_info * info) + props = gst_pulse_make_structure (info->proplist); + + return gst_pulse_device_new (info->index, info->description, +- caps, info->name, GST_PULSE_DEVICE_TYPE_SOURCE, props); ++ caps, info->name, GST_PULSE_DEVICE_TYPE_SOURCE, props, ++ !g_strcmp0 (info->name, self->default_source_name)); + } + + static GstDevice * +-new_sink (const pa_sink_info * info) ++new_sink (GstPulseDeviceProvider * self, const pa_sink_info * info) + { + GstCaps *caps; + GstStructure *props; +@@ -218,7 +227,8 @@ new_sink (const pa_sink_info * info) + props = gst_pulse_make_structure (info->proplist); + + return gst_pulse_device_new (info->index, info->description, +- caps, info->name, GST_PULSE_DEVICE_TYPE_SINK, props); ++ caps, info->name, GST_PULSE_DEVICE_TYPE_SINK, props, ++ !g_strcmp0 (info->name, self->default_sink_name)); + } + + static void +@@ -233,12 +243,26 @@ get_source_info_cb (pa_context * context, + return; + } + +- dev = new_source (info); ++ dev = new_source (self, info); + + if (dev) + gst_device_provider_device_add (GST_DEVICE_PROVIDER (self), dev); + } + ++static void ++get_server_info_cb (pa_context * context, const pa_server_info * info, ++ void *userdata) ++{ ++ GstPulseDeviceProvider *self = userdata; ++ ++ g_free (self->default_sink_name); ++ g_free (self->default_source_name); ++ self->default_sink_name = g_strdup (info->default_sink_name); ++ self->default_source_name = g_strdup (info->default_source_name); ++ ++ pa_threaded_mainloop_signal (self->mainloop, 0); ++} ++ + static void + get_sink_info_cb (pa_context * context, + const pa_sink_info * info, int eol, void *userdata) +@@ -251,7 +275,7 @@ get_sink_info_cb (pa_context * context, + return; + } + +- dev = new_sink (info); ++ dev = new_sink (self, info); + + if (dev) + gst_device_provider_device_add (GST_DEVICE_PROVIDER (self), dev); +@@ -312,34 +336,38 @@ static void + get_source_info_list_cb (pa_context * context, const pa_source_info * info, + int eol, void *userdata) + { +- GList **devices = userdata; ++ ListDevicesData *data = userdata; + + if (eol) + return; + +- *devices = g_list_prepend (*devices, gst_object_ref_sink (new_source (info))); ++ data->devices = ++ g_list_prepend (data->devices, ++ gst_object_ref_sink (new_source (data->self, info))); + } + + static void + get_sink_info_list_cb (pa_context * context, const pa_sink_info * info, + int eol, void *userdata) + { +- GList **devices = userdata; ++ ListDevicesData *data = userdata; + + if (eol) + return; + +- *devices = g_list_prepend (*devices, gst_object_ref_sink (new_sink (info))); ++ data->devices = ++ g_list_prepend (data->devices, gst_object_ref_sink (new_sink (data->self, ++ info))); + } + + static GList * + gst_pulse_device_provider_probe (GstDeviceProvider * provider) + { + GstPulseDeviceProvider *self = GST_PULSE_DEVICE_PROVIDER (provider); +- GList *devices = NULL; + pa_mainloop *m = NULL; + pa_context *c = NULL; + pa_operation *o; ++ ListDevicesData data = { NULL, self }; + + if (!(m = pa_mainloop_new ())) + return NULL; +@@ -376,7 +404,7 @@ gst_pulse_device_provider_probe (GstDeviceProvider * provider) + } + GST_DEBUG_OBJECT (self, "connected"); + +- o = pa_context_get_sink_info_list (c, get_sink_info_list_cb, &devices); ++ o = pa_context_get_sink_info_list (c, get_sink_info_list_cb, &data); + while (pa_operation_get_state (o) == PA_OPERATION_RUNNING && + pa_operation_get_state (o) == PA_OPERATION_RUNNING) { + if (pa_mainloop_iterate (m, TRUE, NULL) < 0) +@@ -384,7 +412,7 @@ gst_pulse_device_provider_probe (GstDeviceProvider * provider) + } + pa_operation_unref (o); + +- o = pa_context_get_source_info_list (c, get_source_info_list_cb, &devices); ++ o = pa_context_get_source_info_list (c, get_source_info_list_cb, &data); + while (pa_operation_get_state (o) == PA_OPERATION_RUNNING && + pa_operation_get_state (o) == PA_OPERATION_RUNNING) { + if (pa_mainloop_iterate (m, TRUE, NULL) < 0) +@@ -395,18 +423,38 @@ gst_pulse_device_provider_probe (GstDeviceProvider * provider) + pa_context_disconnect (c); + pa_mainloop_free (m); + +- return devices; ++ return data.devices; + + failed: + + return NULL; + } + ++static gboolean ++run_pulse_operation (GstPulseDeviceProvider * self, pa_operation * operation) ++{ ++ if (!operation) ++ return FALSE; ++ ++ while (pa_operation_get_state (operation) == PA_OPERATION_RUNNING) { ++ if (!PA_CONTEXT_IS_GOOD (pa_context_get_state ((self->context)))) { ++ pa_operation_cancel (operation); ++ pa_operation_unref (operation); ++ return FALSE; ++ } ++ ++ pa_threaded_mainloop_wait (self->mainloop); ++ } ++ ++ pa_operation_unref (operation); ++ ++ return TRUE; ++} ++ + static gboolean + gst_pulse_device_provider_start (GstDeviceProvider * provider) + { + GstPulseDeviceProvider *self = GST_PULSE_DEVICE_PROVIDER (provider); +- pa_operation *initial_operation; + + if (!(self->mainloop = pa_threaded_mainloop_new ())) { + GST_ERROR_OBJECT (self, "Could not create pulseaudio mainloop"); +@@ -462,27 +510,18 @@ gst_pulse_device_provider_start (GstDeviceProvider * provider) + pa_context_subscribe (self->context, + PA_SUBSCRIPTION_MASK_SOURCE | PA_SUBSCRIPTION_MASK_SINK, NULL, NULL); + +- initial_operation = pa_context_get_source_info_list (self->context, +- get_source_info_cb, self); +- while (pa_operation_get_state (initial_operation) == PA_OPERATION_RUNNING) { +- if (!PA_CONTEXT_IS_GOOD (pa_context_get_state ((self->context)))) +- goto cancel_and_fail; +- +- pa_threaded_mainloop_wait (self->mainloop); +- } +- pa_operation_unref (initial_operation); ++ if (!run_pulse_operation (self, pa_context_get_server_info (self->context, ++ get_server_info_cb, self))) ++ goto unlock_and_fail; + +- initial_operation = pa_context_get_sink_info_list (self->context, +- get_sink_info_cb, self); +- if (!initial_operation) ++ if (!run_pulse_operation (self, ++ pa_context_get_source_info_list (self->context, get_source_info_cb, ++ self))) + goto unlock_and_fail; +- while (pa_operation_get_state (initial_operation) == PA_OPERATION_RUNNING) { +- if (!PA_CONTEXT_IS_GOOD (pa_context_get_state ((self->context)))) +- goto cancel_and_fail; + +- pa_threaded_mainloop_wait (self->mainloop); +- } +- pa_operation_unref (initial_operation); ++ if (!run_pulse_operation (self, pa_context_get_sink_info_list (self->context, ++ get_sink_info_cb, self))) ++ goto unlock_and_fail; + + pa_threaded_mainloop_unlock (self->mainloop); + +@@ -495,11 +534,6 @@ unlock_and_fail: + + mainloop_failed: + return FALSE; +- +-cancel_and_fail: +- pa_operation_cancel (initial_operation); +- pa_operation_unref (initial_operation); +- goto unlock_and_fail; + } + + static void +@@ -611,7 +645,7 @@ gst_pulse_device_reconfigure_element (GstDevice * device, GstElement * element) + static GstDevice * + gst_pulse_device_new (guint device_index, const gchar * device_name, + GstCaps * caps, const gchar * internal_name, GstPulseDeviceType type, +- GstStructure * props) ++ GstStructure * props, gboolean is_default) + { + GstPulseDevice *gstdev; + const gchar *element = NULL; +@@ -636,7 +670,7 @@ gst_pulse_device_new (guint device_index, const gchar * device_name, + break; + } + +- ++ gst_structure_set (props, "is-default", G_TYPE_BOOLEAN, is_default, NULL); + gstdev = g_object_new (GST_TYPE_PULSE_DEVICE, + "display-name", device_name, "caps", caps, "device-class", klass, + "internal-name", internal_name, "properties", props, NULL); +@@ -644,6 +678,7 @@ gst_pulse_device_new (guint device_index, const gchar * device_name, + gstdev->type = type; + gstdev->device_index = device_index; + gstdev->element = element; ++ gstdev->is_default = is_default; + + gst_structure_free (props); + gst_caps_unref (caps); +diff --git a/ext/pulse/pulsedeviceprovider.h b/ext/pulse/pulsedeviceprovider.h +index 0892ad586..7bcd1bc47 100644 +--- a/ext/pulse/pulsedeviceprovider.h ++++ b/ext/pulse/pulsedeviceprovider.h +@@ -50,6 +50,8 @@ struct _GstPulseDeviceProvider { + + gchar *server; + gchar *client_name; ++ gchar *default_source_name; ++ gchar *default_sink_name; + + pa_threaded_mainloop *mainloop; + pa_context *context; +@@ -84,6 +86,7 @@ struct _GstPulseDevice { + GstPulseDeviceType type; + guint device_index; + gchar *internal_name; ++ gboolean is_default; + const gchar *element; + }; + +-- +2.19.1 +