Bug 1376873 - Rollup conflict fixes for audio/video code; r=pehrsons

MozReview-Commit-ID: 1T8mgqdkzq3

Differential Revision: https://phabricator.services.mozilla.com/D7427

--HG--
extra : rebase_source : 2ca9a77c98b703f849073c92b6b2448de9b8e1f5
This commit is contained in:
Dan Minor 2018-01-22 15:04:26 -05:00
parent 31794753d6
commit 593c290d35
131 changed files with 2046 additions and 620 deletions

View File

@ -592,7 +592,7 @@ MediaEngineRemoteVideoSource::DeliverFrame(uint8_t* aBuffer,
}
rtc::Callback0<void> callback_unused;
rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer =
rtc::scoped_refptr<webrtc::I420BufferInterface> buffer =
new rtc::RefCountedObject<webrtc::WrappedI420Buffer>(
aProps.width(),
aProps.height(),
@ -831,11 +831,9 @@ LogCapability(const char* aHeader,
"Unknown codec"
};
LOG(("%s: %4u x %4u x %2u maxFps, %s, %s. Distance = %" PRIu32,
LOG(("%s: %4u x %4u x %2u maxFps, %s. Distance = %" PRIu32,
aHeader, aCapability.width, aCapability.height, aCapability.maxFPS,
types[std::min(std::max(uint32_t(0), uint32_t(aCapability.rawType)),
uint32_t(sizeof(types) / sizeof(*types) - 1))],
codec[std::min(std::max(uint32_t(0), uint32_t(aCapability.codecType)),
codec[std::min(std::max(uint32_t(0), uint32_t(aCapability.videoType)),
uint32_t(sizeof(codec) / sizeof(*codec) - 1))],
aDistance));
}
@ -1003,24 +1001,7 @@ MediaEngineRemoteVideoSource::ChooseCapability(
TrimLessFitCandidates(candidateSet);
}
// Any remaining multiples all have the same distance, but may vary on
// format. Some formats are more desirable for certain use like WebRTC.
// E.g. I420 over RGB24 can remove a needless format conversion.
bool found = false;
for (auto& candidate : candidateSet) {
const webrtc::CaptureCapability& cap = candidate.mCapability;
if (cap.rawType == webrtc::RawVideoType::kVideoI420 ||
cap.rawType == webrtc::RawVideoType::kVideoYUY2 ||
cap.rawType == webrtc::RawVideoType::kVideoYV12) {
aCapability = cap;
found = true;
break;
}
}
if (!found) {
aCapability = candidateSet[0].mCapability;
}
aCapability = candidateSet[0].mCapability;
LogCapability("Chosen capability", aCapability, sameDistance);
return true;

View File

@ -115,12 +115,32 @@ const int RtpExtension::kVideoTimingDefaultId = 8;
const char RtpExtension::kEncryptHeaderExtensionsUri[] =
"urn:ietf:params:rtp-hdrext:encrypt";
const char* RtpExtension::kRtpStreamIdUri =
"urn:ietf:params:rtp-hdrext:sdes:rtp-stream-id";
const int RtpExtension::kRtpStreamIdDefaultId = 9;
const char* RtpExtension::kRepairedRtpStreamIdUri =
"urn:ietf:params:rtp-hdrext:sdes:repaired-rtp-stream-id";
const int RtpExtension::kRepairedRtpStreamIdDefaultId = 10;
const char* RtpExtension::kMIdUri =
"urn:ietf:params:rtp-hdrext:sdes:mid";
const int RtpExtension::kMIdDefaultId = 11;
const char* RtpExtension::kCsrcAudioLevelUri =
"urn:ietf:params:rtp-hdrext:csrc-audio-level";
const int RtpExtension::kCsrcAudioLevelDefaultId = 12;
const int RtpExtension::kMinId = 1;
const int RtpExtension::kMaxId = 14;
bool RtpExtension::IsSupportedForAudio(const std::string& uri) {
return uri == webrtc::RtpExtension::kAudioLevelUri ||
uri == webrtc::RtpExtension::kTransportSequenceNumberUri;
uri == webrtc::RtpExtension::kTransportSequenceNumberUri ||
uri == webrtc::RtpExtension::kRtpStreamIdUri ||
uri == webrtc::RtpExtension::kRepairedRtpStreamIdUri ||
uri == webrtc::RtpExtension::kMIdUri ||
uri == webrtc::RtpExtension::kCsrcAudioLevelUri;
}
bool RtpExtension::IsSupportedForVideo(const std::string& uri) {
@ -130,7 +150,10 @@ bool RtpExtension::IsSupportedForVideo(const std::string& uri) {
uri == webrtc::RtpExtension::kTransportSequenceNumberUri ||
uri == webrtc::RtpExtension::kPlayoutDelayUri ||
uri == webrtc::RtpExtension::kVideoContentTypeUri ||
uri == webrtc::RtpExtension::kVideoTimingUri;
uri == webrtc::RtpExtension::kVideoTimingUri ||
uri == webrtc::RtpExtension::kRtpStreamIdUri ||
uri == webrtc::RtpExtension::kRepairedRtpStreamIdUri ||
uri == webrtc::RtpExtension::kMIdUri;
}
bool RtpExtension::IsEncryptionSupported(const std::string& uri) {

View File

@ -280,6 +280,18 @@ struct RtpExtension {
// https://tools.ietf.org/html/rfc6904
static const char kEncryptHeaderExtensionsUri[];
static const char* kRtpStreamIdUri;
static const int kRtpStreamIdDefaultId;
static const char* kRepairedRtpStreamIdUri;
static const int kRepairedRtpStreamIdDefaultId;
static const char* kMIdUri;
static const int kMIdDefaultId;
static const char* kCsrcAudioLevelUri;
static const int kCsrcAudioLevelDefaultId;
// Inclusive min and max IDs for one-byte header extensions, per RFC5285.
static const int kMinId;
static const int kMaxId;

View File

@ -208,7 +208,7 @@ webrtc::AudioReceiveStream::Stats AudioReceiveStream::GetStats() const {
stats.accelerate_rate = Q14ToFloat(ns.currentAccelerateRate);
stats.preemptive_expand_rate = Q14ToFloat(ns.currentPreemptiveRate);
auto ds = channel_proxy_->GetDecodingCallStatistics();
auto ds(channel_proxy_->GetDecodingCallStatistics());
stats.decoding_calls_to_silence_generator = ds.calls_to_silence_generator;
stats.decoding_calls_to_neteq = ds.calls_to_neteq;
stats.decoding_normal = ds.decoded_normal;

View File

@ -238,6 +238,15 @@ class Call : public webrtc::Call,
void OnAllocationLimitsChanged(uint32_t min_send_bitrate_bps,
uint32_t max_padding_bitrate_bps) override;
VoiceEngine* voice_engine() override {
internal::AudioState* audio_state =
static_cast<internal::AudioState*>(config_.audio_state.get());
if (audio_state)
return audio_state->voice_engine();
else
return nullptr;
}
private:
DeliveryStatus DeliverRtcp(MediaType media_type, const uint8_t* packet,
size_t length);

View File

@ -23,7 +23,9 @@ VideoStream::VideoStream()
min_bitrate_bps(-1),
target_bitrate_bps(-1),
max_bitrate_bps(-1),
max_qp(-1) {}
max_qp(-1) {
rid[0] = '\0';
}
VideoStream::~VideoStream() = default;
@ -36,6 +38,7 @@ std::string VideoStream::ToString() const {
ss << ", target_bitrate_bps:" << target_bitrate_bps;
ss << ", max_bitrate_bps:" << max_bitrate_bps;
ss << ", max_qp: " << max_qp;
ss << ", rid: " << rid;
ss << ", temporal_layer_thresholds_bps: [";
for (size_t i = 0; i < temporal_layer_thresholds_bps.size(); ++i) {

View File

@ -38,6 +38,20 @@ struct VideoStream {
int max_qp;
char rid[kRIDSize+1];
const std::string Rid() const {
return std::string(rid);
}
void SetRid(const std::string & aRid) {
static_assert(sizeof(rid) > kRIDSize,
"mRid must be large enought to hold a RID + null termination");
auto len = std::min((size_t)kRIDSize-1, aRid.length());
strncpy(&rid[0], aRid.c_str(), len);
rid[len] = 0;
}
// Bitrate thresholds for enabling additional temporal layers. Since these are
// thresholds in between layers, we have one additional layer. One threshold
// gives two temporal layers, one below the threshold and one above, two give

View File

@ -23,6 +23,7 @@
#include "common_video/include/frame_callback.h"
#include "media/base/videosinkinterface.h"
#include "rtc_base/platform_file.h"
#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h"
namespace webrtc {
@ -160,9 +161,14 @@ class VideoReceiveStream {
// See draft-alvestrand-rmcat-remb for information.
bool remb = false;
bool tmmbr = false;
// See draft-holmer-rmcat-transport-wide-cc-extensions for details.
bool transport_cc = false;
// TODO(jesup) - there should be a kKeyFrameReqNone
KeyFrameRequestMethod keyframe_method = kKeyFrameReqPliRtcp;
// See NackConfig for description.
NackConfig nack;
@ -229,6 +235,9 @@ class VideoReceiveStream {
// TODO(pbos): Add info on currently-received codec to Stats.
virtual Stats GetStats() const = 0;
//TODO: find replacement for this using call interface
//virtual void SetSyncChannel(VoiceEngine* voice_engine, int audio_channel_id) = 0;
// Takes ownership of the file, is responsible for closing it later.
// Calling this method will close and finalize any current log.
// Giving rtc::kInvalidPlatformFileValue disables logging.

View File

@ -193,6 +193,9 @@ class VideoSendStream {
// RTCP CNAME, see RFC 3550.
std::string c_name;
std::vector<std::string> rids;
std::string mid;
} rtp;
// Transport for outgoing packets.
@ -262,6 +265,10 @@ class VideoSendStream {
rtc::VideoSourceInterface<webrtc::VideoFrame>* source,
const DegradationPreference& degradation_preference) = 0;
// Gets interface used to signal the current CPU work level to the encoder.
// Valid as long as the VideoSendStream is valid.
virtual CPULoadStateObserver* LoadStateObserver() = 0;
// Set which streams to send. Must have at least as many SSRCs as configured
// in the config. Encoder settings are passed on to the encoder instance along
// with the VideoStream settings.

View File

@ -118,8 +118,6 @@ double SincScaleFactor(double io_ratio) {
} // namespace
namespace webrtc {
const size_t SincResampler::kKernelSize;
// If we know the minimum architecture at compile time, avoid CPU detection.

View File

@ -55,7 +55,8 @@ RTPHeaderExtension::RTPHeaderExtension()
videoRotation(kVideoRotation_0),
hasVideoContentType(false),
videoContentType(VideoContentType::UNSPECIFIED),
has_video_timing(false) {}
has_video_timing(false),
csrcAudioLevels() {}
RTPHeaderExtension::RTPHeaderExtension(const RTPHeaderExtension& other) =
default;

View File

@ -11,6 +11,7 @@
#ifndef COMMON_TYPES_H_
#define COMMON_TYPES_H_
#include <atomic>
#include <stddef.h>
#include <string.h>
#include <ostream>
@ -366,14 +367,36 @@ struct AudioDecodingCallStats {
decoded_plc_cng(0),
decoded_muted_output(0) {}
int calls_to_silence_generator; // Number of calls where silence generated,
// and NetEq was disengaged from decoding.
int calls_to_neteq; // Number of calls to NetEq.
int decoded_normal; // Number of calls where audio RTP packet decoded.
int decoded_plc; // Number of calls resulted in PLC.
int decoded_cng; // Number of calls where comfort noise generated due to DTX.
int decoded_plc_cng; // Number of calls resulted where PLC faded to CNG.
int decoded_muted_output; // Number of calls returning a muted state output.
AudioDecodingCallStats(const AudioDecodingCallStats& other)
{
calls_to_silence_generator = other.calls_to_silence_generator.load();
calls_to_neteq = other.calls_to_neteq.load();
decoded_normal = other.decoded_normal.load();
decoded_plc = other.decoded_plc.load();
decoded_cng = other.decoded_cng.load();
decoded_plc_cng = other.decoded_plc_cng.load();
decoded_muted_output = other.decoded_muted_output.load();
}
AudioDecodingCallStats& operator=(const AudioDecodingCallStats& other)
{
calls_to_silence_generator = other.calls_to_silence_generator.load();
calls_to_neteq = other.calls_to_neteq.load();
decoded_normal = other.decoded_normal.load();
decoded_plc = other.decoded_plc.load();
decoded_cng = other.decoded_cng.load();
decoded_plc_cng = other.decoded_plc_cng.load();
decoded_muted_output = other.decoded_muted_output.load();
return *this;
}
std::atomic<int> calls_to_silence_generator; // Number of calls where silence generated,
std::atomic<int> calls_to_neteq; // Number of calls to NetEq.
std::atomic<int> decoded_normal; // Number of calls where audio RTP packet decoded.
std::atomic<int> decoded_plc; // Number of calls resulted in PLC.
std::atomic<int> decoded_cng; // Number of calls where comfort noise generated due to DTX.
std::atomic<int> decoded_plc_cng; // Number of calls resulted where PLC faded to CNG.
std::atomic<int> decoded_muted_output; // Number of calls returning a muted state output.
};
// ==================================================================
@ -406,6 +429,7 @@ enum { kPayloadNameSize = 32 };
enum { kMaxSimulcastStreams = 4 };
enum { kMaxSpatialLayers = 5 };
enum { kMaxTemporalStreams = 4 };
enum { kRIDSize = 32};
enum VideoCodecComplexity {
kComplexityNormal = 0,
@ -758,6 +782,17 @@ typedef StringRtpHeaderExtension StreamId;
// Mid represents RtpMid which is a string.
typedef StringRtpHeaderExtension Mid;
// Audio level of CSRCs See:
// https://tools.ietf.org/html/rfc6465
struct CsrcAudioLevelList {
CsrcAudioLevelList() : numAudioLevels(0) { }
CsrcAudioLevelList(const CsrcAudioLevelList&) = default;
CsrcAudioLevelList& operator=(const CsrcAudioLevelList&) = default;
uint8_t numAudioLevels;
// arrOfAudioLevels has the same ordering as RTPHeader.arrOfCSRCs
uint8_t arrOfAudioLevels[kRtpCsrcSize];
};
struct RTPHeaderExtension {
RTPHeaderExtension();
RTPHeaderExtension(const RTPHeaderExtension& other);
@ -801,6 +836,7 @@ struct RTPHeaderExtension {
// For identifying the media section used to interpret this RTP packet. See
// https://tools.ietf.org/html/draft-ietf-mmusic-sdp-bundle-negotiation-38
Mid mid;
CsrcAudioLevelList csrcAudioLevels;
};
struct RTPHeader {

View File

@ -9,8 +9,10 @@
*/
#include "common_video/libyuv/include/webrtc_libyuv.h"
#include "libyuv/planar_functions.h"
#include <string.h>
#include <limits>
#include "rtc_base/checks.h"
#include "api/video/i420_buffer.h"
@ -23,6 +25,7 @@ namespace webrtc {
size_t CalcBufferSize(VideoType type, int width, int height) {
RTC_DCHECK_GE(width, 0);
RTC_DCHECK_GE(height, 0);
size_t buffer_size = 0;
switch (type) {
case VideoType::kI420:

View File

@ -168,6 +168,16 @@ bool VideoAdapter::AdaptFrameResolution(int in_width,
// The max output pixel count is the minimum of the requests from
// OnOutputFormatRequest and OnResolutionRequest.
int max_pixel_count = resolution_request_max_pixel_count_;
if (scale_) {
// We calculate the scaled pixel count from the in_width and in_height,
// which is the input resolution. We then take the minimum of the scaled
// resolution and the current max_pixel_count. This will allow the
// quality scaler to reduce the resolution in response to load, but we
// will never go above the requested scaled resolution.
int scaled_pixel_count = (in_width*in_height/scale_resolution_by_)/scale_resolution_by_;
max_pixel_count = std::min(max_pixel_count, scaled_pixel_count);
}
if (requested_format_) {
max_pixel_count = std::min(
max_pixel_count, requested_format_->width * requested_format_->height);
@ -236,8 +246,8 @@ bool VideoAdapter::AdaptFrameResolution(int in_width,
if (scale.numerator != scale.denominator)
++frames_scaled_;
if (previous_width_ && (previous_width_ != *out_width ||
previous_height_ != *out_height)) {
if ((previous_width_ || scale_) && (previous_width_ != *out_width ||
previous_height_ != *out_height)) {
++adaption_changes_;
RTC_LOG(LS_INFO) << "Frame size changed: scaled " << frames_scaled_
<< " / out " << frames_out_ << " / in " << frames_in_
@ -272,4 +282,12 @@ void VideoAdapter::OnResolutionFramerateRequest(
max_framerate_request_ = max_framerate_fps;
}
void VideoAdapter::OnScaleResolutionBy(
rtc::Optional<float> scale_resolution_by) {
rtc::CritScope cs(&critical_section_);
scale_resolution_by_ = scale_resolution_by.value_or(1.0);
RTC_DCHECK_GE(scale_resolution_by_, 1.0);
scale_ = static_cast<bool>(scale_resolution_by);
}
} // namespace cricket

View File

@ -62,6 +62,10 @@ class VideoAdapter {
int max_pixel_count,
int max_framerate_fps);
// Requests the output frame size from |AdaptFrameResolution| be scaled
// down from the input by a factor of scale_resolution_by (min 1.0)
virtual void OnScaleResolutionBy(rtc::Optional<float> scale_resolution_by);
private:
// Determine if frame should be dropped based on input fps and requested fps.
bool KeepFrame(int64_t in_timestamp_ns);
@ -86,6 +90,8 @@ class VideoAdapter {
int resolution_request_target_pixel_count_ RTC_GUARDED_BY(critical_section_);
int resolution_request_max_pixel_count_ RTC_GUARDED_BY(critical_section_);
int max_framerate_request_ RTC_GUARDED_BY(critical_section_);
float scale_resolution_by_ RTC_GUARDED_BY(critical_section_);
bool scale_ RTC_GUARDED_BY(critical_section_);
// The critical section to protect the above variables.
rtc::CriticalSection critical_section_;

View File

@ -102,6 +102,7 @@ int AcmReceiver::InsertPacket(const WebRtcRTPHeader& rtp_header,
} else {
last_audio_decoder_ = ci;
last_audio_format_ = neteq_->GetDecoderFormat(ci->pltype);
last_audio_format_clockrate_hz_ = last_audio_format_->clockrate_hz;
RTC_DCHECK(last_audio_format_);
last_packet_sample_rate_hz_ = ci->plfreq;
}
@ -121,8 +122,6 @@ int AcmReceiver::GetAudio(int desired_freq_hz,
AudioFrame* audio_frame,
bool* muted) {
RTC_DCHECK(muted);
// Accessing members, take the lock.
rtc::CritScope lock(&crit_sect_);
if (neteq_->GetAudio(audio_frame, muted) != NetEq::kOK) {
RTC_LOG(LERROR) << "AcmReceiver::GetAudio - NetEq Failed.";

View File

@ -15,6 +15,7 @@
#include <memory>
#include <string>
#include <vector>
#include <atomic>
#include "api/array_view.h"
#include "api/optional.h"
@ -284,12 +285,16 @@ class AcmReceiver {
rtc::Optional<CodecInst> last_audio_decoder_ RTC_GUARDED_BY(crit_sect_);
rtc::Optional<SdpAudioFormat> last_audio_format_ RTC_GUARDED_BY(crit_sect_);
ACMResampler resampler_ RTC_GUARDED_BY(crit_sect_);
std::unique_ptr<int16_t[]> last_audio_buffer_ RTC_GUARDED_BY(crit_sect_);
// After construction, this is only ever touched on the thread that calls
// AcmReceiver::GetAudio, and only modified in this method.
std::unique_ptr<int16_t[]> last_audio_buffer_;
CallStatistics call_stats_ RTC_GUARDED_BY(crit_sect_);
const std::unique_ptr<NetEq> neteq_; // NetEq is thread-safe; no lock needed.
const Clock* const clock_;
bool resampled_last_output_frame_ RTC_GUARDED_BY(crit_sect_);
std::atomic<bool> resampled_last_output_frame_;
rtc::Optional<int> last_packet_sample_rate_hz_ RTC_GUARDED_BY(crit_sect_);
std::atomic<int> last_audio_format_clockrate_hz_;
};
} // namespace acm2

View File

@ -1068,6 +1068,10 @@ rtc::Optional<SdpAudioFormat> AudioCodingModuleImpl::ReceiveFormat() const {
return receiver_.LastAudioFormat();
}
int AudioCodingModuleImpl::ReceiveSampleRate() const {
return receiver_.LastAudioSampleRate();
}
// Incoming packet from network parsed and ready for decode.
int AudioCodingModuleImpl::IncomingPacket(const uint8_t* incoming_payload,
const size_t payload_length,

View File

@ -36,54 +36,62 @@
namespace webrtc {
// The filter coefficient a = 2*cos(2*pi*f/fs) for the low frequency tone, for
// sample rates fs = {8000, 16000, 32000, 48000} Hz, and events 0 through 15.
// sample rates fs = {8000, 16000, 32000, 44100, 48000} Hz, and events 0 through 15.
// Values are in Q14.
const int DtmfToneGenerator::kCoeff1[4][16] = {
const int DtmfToneGenerator::kCoeff1[NumDtmfSampleRates][16] = {
{ 24219, 27980, 27980, 27980, 26956, 26956, 26956, 25701, 25701, 25701,
24219, 24219, 27980, 26956, 25701, 24219 },
{ 30556, 31548, 31548, 31548, 31281, 31281, 31281, 30951, 30951, 30951,
30556, 30556, 31548, 31281, 30951, 30556 },
{ 32210, 32462, 32462, 32462, 32394, 32394, 32394, 32311, 32311, 32311,
32210, 32210, 32462, 32394, 32311, 32210 },
{ 32474, 32607, 32607, 32607, 32571, 32571, 32571, 32527, 32527, 32527,
32474, 32474, 32607, 32571, 32527, 32474 },
{ 32520, 32632, 32632, 32632, 32602, 32602, 32602, 32564, 32564, 32564,
32520, 32520, 32632, 32602, 32564, 32520 } };
// The filter coefficient a = 2*cos(2*pi*f/fs) for the high frequency tone, for
// sample rates fs = {8000, 16000, 32000, 48000} Hz, and events 0 through 15.
// sample rates fs = {8000, 16000, 32000, 44100, 48000} Hz, and events 0 through 15.
// Values are in Q14.
const int DtmfToneGenerator::kCoeff2[4][16] = {
const int DtmfToneGenerator::kCoeff2[NumDtmfSampleRates][16] = {
{ 16325, 19073, 16325, 13085, 19073, 16325, 13085, 19073, 16325, 13085,
19073, 13085, 9315, 9315, 9315, 9315},
{ 28361, 29144, 28361, 27409, 29144, 28361, 27409, 29144, 28361, 27409,
29144, 27409, 26258, 26258, 26258, 26258},
{ 31647, 31849, 31647, 31400, 31849, 31647, 31400, 31849, 31647, 31400,
31849, 31400, 31098, 31098, 31098, 31098},
{ 32176, 32283, 32176, 32045, 32283, 32176, 32045, 32283, 32176, 32045,
32283, 32045, 31885, 31885, 31885, 31885},
{ 32268, 32359, 32268, 32157, 32359, 32268, 32157, 32359, 32268, 32157,
32359, 32157, 32022, 32022, 32022, 32022} };
// The initialization value x[-2] = sin(2*pi*f/fs) for the low frequency tone,
// for sample rates fs = {8000, 16000, 32000, 48000} Hz, and events 0-15.
// for sample rates fs = {8000, 16000, 32000, 44100, 48000} Hz, and events 0-15.
// Values are in Q14.
const int DtmfToneGenerator::kInitValue1[4][16] = {
const int DtmfToneGenerator::kInitValue1[NumDtmfSampleRates][16] = {
{ 11036, 8528, 8528, 8528, 9315, 9315, 9315, 10163, 10163, 10163, 11036,
11036, 8528, 9315, 10163, 11036},
{ 5918, 4429, 4429, 4429, 4879, 4879, 4879, 5380, 5380, 5380, 5918, 5918,
4429, 4879, 5380, 5918},
{ 3010, 2235, 2235, 2235, 2468, 2468, 2468, 2728, 2728, 2728, 3010, 3010,
2235, 2468, 2728, 3010},
{ 2190, 1624, 1624, 1624, 1794, 1794, 1794, 1984, 1984, 1984, 2190, 2190,
1624, 1794, 1984, 2190},
{ 2013, 1493, 1493, 1493, 1649, 1649, 1649, 1823, 1823, 1823, 2013, 2013,
1493, 1649, 1823, 2013 } };
// The initialization value x[-2] = sin(2*pi*f/fs) for the high frequency tone,
// for sample rates fs = {8000, 16000, 32000, 48000} Hz, and events 0-15.
// for sample rates fs = {8000, 16000, 32000, 44100, 48000} Hz, and events 0-15.
// Values are in Q14.
const int DtmfToneGenerator::kInitValue2[4][16] = {
const int DtmfToneGenerator::kInitValue2[NumDtmfSampleRates][16] = {
{ 14206, 13323, 14206, 15021, 13323, 14206, 15021, 13323, 14206, 15021,
13323, 15021, 15708, 15708, 15708, 15708},
{ 8207, 7490, 8207, 8979, 7490, 8207, 8979, 7490, 8207, 8979, 7490, 8979,
9801, 9801, 9801, 9801},
{ 4249, 3853, 4249, 4685, 3853, 4249, 4685, 3853, 4249, 4685, 3853, 4685,
5164, 5164, 5164, 5164},
{ 3100, 2808, 3100, 3422, 3778, 2808, 3100, 3422, 3778, 2808, 3100, 3422,
3778, 3778, 3778, 3778},
{ 2851, 2582, 2851, 3148, 2582, 2851, 3148, 2582, 2851, 3148, 2582, 3148,
3476, 3476, 3476, 3476} };

View File

@ -17,6 +17,15 @@
namespace webrtc {
const int NumDtmfSampleRates = 5;
const int DtmfSampleRates[NumDtmfSampleRates] = {
8000,
16000,
32000,
44100,
48000
};
// This class provides a generator for DTMF tones.
class DtmfToneGenerator {
public:
@ -33,10 +42,11 @@ class DtmfToneGenerator {
virtual bool initialized() const;
private:
static const int kCoeff1[4][16]; // 1st oscillator model coefficient table.
static const int kCoeff2[4][16]; // 2nd oscillator model coefficient table.
static const int kInitValue1[4][16]; // Initialization for 1st oscillator.
static const int kInitValue2[4][16]; // Initialization for 2nd oscillator.
static const int kCoeff1[NumDtmfSampleRates][16]; // 1st oscillator model coefficient table.
static const int kCoeff2[NumDtmfSampleRates][16]; // 2nd oscillator model coefficient table.
static const int kInitValue1[NumDtmfSampleRates][16]; // Initialization for 1st oscillator.
static const int kInitValue2[NumDtmfSampleRates][16]; // Initialization for 2nd oscillator.
static const int kAmplitude[64]; // Amplitude for 0 through -63 dBm0.
static const int16_t kAmpMultiplier = 23171; // 3 dB attenuation (in Q15).

View File

@ -214,6 +214,12 @@ int16_t Merge::SignalScaling(const int16_t* input, size_t input_length,
// Adjust muting factor if new vector is more or less of the BGN energy.
const auto mod_input_length = rtc::SafeMin<size_t>(
64 * rtc::dchecked_cast<size_t>(fs_mult_), input_length);
// Missing input, do no muting
if (mod_input_length == 0) {
return 16384;
}
const int16_t expanded_max =
WebRtcSpl_MaxAbsValueW16(expanded_signal, mod_input_length);
int32_t factor = (expanded_max * expanded_max) /

View File

@ -429,7 +429,6 @@ rtc::Optional<uint32_t> NetEqImpl::GetPlayoutTimestamp() const {
}
int NetEqImpl::last_output_sample_rate_hz() const {
rtc::CritScope lock(&crit_sect_);
return last_output_sample_rate_hz_;
}

View File

@ -412,7 +412,7 @@ class NetEqImpl : public webrtc::NetEq {
StatisticsCalculator stats_ RTC_GUARDED_BY(crit_sect_);
int fs_hz_ RTC_GUARDED_BY(crit_sect_);
int fs_mult_ RTC_GUARDED_BY(crit_sect_);
int last_output_sample_rate_hz_ RTC_GUARDED_BY(crit_sect_);
std::atomic<int> last_output_sample_rate_hz_;
size_t output_size_samples_ RTC_GUARDED_BY(crit_sect_);
size_t decoder_frame_length_ RTC_GUARDED_BY(crit_sect_);
Modes last_mode_ RTC_GUARDED_BY(crit_sect_);

View File

@ -11,8 +11,8 @@
#ifndef WEBRTC_MODULES_AUDIO_DEVICE_ANDROID_SINGLE_RW_FIFO_H_
#define WEBRTC_MODULES_AUDIO_DEVICE_ANDROID_SINGLE_RW_FIFO_H_
#include "webrtc/system_wrappers/include/atomic32.h"
#include "webrtc/typedefs.h"
#include "system_wrappers/include/atomic32.h"
#include "typedefs.h"
namespace webrtc {

View File

@ -33,7 +33,7 @@ namespace webrtc {
// Functor used to use as a custom deleter in the map of file pointers to raw
// files.
struct RawFileCloseFunctor {
void operator()(FILE* f) const { fclose(f); }
void operator()(FILE* f) const { if (f) fclose(f); }
};
#endif
@ -52,6 +52,7 @@ class ApmDataDumper {
void InitiateNewSetOfRecordings() {
#if WEBRTC_APM_DEBUG_DUMP == 1
++recording_set_index_;
debug_written_ = 0;
#endif
}
@ -59,15 +60,23 @@ class ApmDataDumper {
// various formats.
void DumpRaw(const char* name, double v) {
#if WEBRTC_APM_DEBUG_DUMP == 1
FILE* file = GetRawFile(name);
fwrite(&v, sizeof(v), 1, file);
if (webrtc::Trace::aec_debug()) {
FILE* file = GetRawFile(name);
if (file) {
fwrite(&v, sizeof(v), 1, file);
}
}
#endif
}
void DumpRaw(const char* name, size_t v_length, const double* v) {
#if WEBRTC_APM_DEBUG_DUMP == 1
FILE* file = GetRawFile(name);
fwrite(v, sizeof(v[0]), v_length, file);
if (webrtc::Trace::aec_debug()) {
FILE* file = GetRawFile(name);
if (file) {
fwrite(v, sizeof(v[0]), v_length, file);
}
}
#endif
}
@ -79,15 +88,23 @@ class ApmDataDumper {
void DumpRaw(const char* name, float v) {
#if WEBRTC_APM_DEBUG_DUMP == 1
FILE* file = GetRawFile(name);
fwrite(&v, sizeof(v), 1, file);
if (webrtc::Trace::aec_debug()) {
FILE* file = GetRawFile(name);
if (file) {
fwrite(&v, sizeof(v), 1, file);
}
}
#endif
}
void DumpRaw(const char* name, size_t v_length, const float* v) {
#if WEBRTC_APM_DEBUG_DUMP == 1
FILE* file = GetRawFile(name);
fwrite(v, sizeof(v[0]), v_length, file);
if (webrtc::Trace::aec_debug()) {
FILE* file = GetRawFile(name);
if (file) {
fwrite(v, sizeof(v[0]), v_length, file);
}
}
#endif
}
@ -105,10 +122,14 @@ class ApmDataDumper {
void DumpRaw(const char* name, size_t v_length, const bool* v) {
#if WEBRTC_APM_DEBUG_DUMP == 1
FILE* file = GetRawFile(name);
for (size_t k = 0; k < v_length; ++k) {
int16_t value = static_cast<int16_t>(v[k]);
fwrite(&value, sizeof(value), 1, file);
if (webrtc::Trace::aec_debug()) {
FILE* file = GetRawFile(name);
if (file) {
for (int k = 0; k < v_length; ++k) {
int16_t value = static_cast<int16_t>(v[k]);
fwrite(&value, sizeof(value), 1, file);
}
}
}
#endif
}
@ -121,15 +142,23 @@ class ApmDataDumper {
void DumpRaw(const char* name, int16_t v) {
#if WEBRTC_APM_DEBUG_DUMP == 1
FILE* file = GetRawFile(name);
fwrite(&v, sizeof(v), 1, file);
if (webrtc::Trace::aec_debug()) {
FILE* file = GetRawFile(name);
if (file) {
fwrite(&v, sizeof(v), 1, file);
}
}
#endif
}
void DumpRaw(const char* name, size_t v_length, const int16_t* v) {
#if WEBRTC_APM_DEBUG_DUMP == 1
FILE* file = GetRawFile(name);
fwrite(v, sizeof(v[0]), v_length, file);
if (webrtc::Trace::aec_debug()) {
FILE* file = GetRawFile(name);
if (file) {
fwrite(v, sizeof(v[0]), v_length, file);
}
}
#endif
}
@ -141,29 +170,45 @@ class ApmDataDumper {
void DumpRaw(const char* name, int32_t v) {
#if WEBRTC_APM_DEBUG_DUMP == 1
FILE* file = GetRawFile(name);
fwrite(&v, sizeof(v), 1, file);
if (webrtc::Trace::aec_debug()) {
FILE* file = GetRawFile(name);
if (file) {
fwrite(&v, sizeof(v), 1, file);
}
}
#endif
}
void DumpRaw(const char* name, size_t v_length, const int32_t* v) {
#if WEBRTC_APM_DEBUG_DUMP == 1
FILE* file = GetRawFile(name);
fwrite(v, sizeof(v[0]), v_length, file);
if (webrtc::Trace::aec_debug()) {
FILE* file = GetRawFile(name);
if (file) {
fwrite(v, sizeof(v[0]), v_length, file);
}
}
#endif
}
void DumpRaw(const char* name, size_t v) {
#if WEBRTC_APM_DEBUG_DUMP == 1
FILE* file = GetRawFile(name);
fwrite(&v, sizeof(v), 1, file);
if (webrtc::Trace::aec_debug()) {
FILE* file = GetRawFile(name);
if (file) {
fwrite(&v, sizeof(v), 1, file);
}
}
#endif
}
void DumpRaw(const char* name, size_t v_length, const size_t* v) {
#if WEBRTC_APM_DEBUG_DUMP == 1
FILE* file = GetRawFile(name);
fwrite(v, sizeof(v[0]), v_length, file);
if (webrtc::Trace::aec_debug()) {
FILE* file = GetRawFile(name);
if (file) {
fwrite(v, sizeof(v[0]), v_length, file);
}
}
#endif
}
@ -179,8 +224,16 @@ class ApmDataDumper {
int sample_rate_hz,
int num_channels) {
#if WEBRTC_APM_DEBUG_DUMP == 1
WavWriter* file = GetWavFile(name, sample_rate_hz, num_channels);
file->WriteSamples(v, v_length);
if (webrtc::Trace::aec_debug()) {
WavWriter* file = GetWavFile(name, sample_rate_hz, num_channels);
file->WriteSamples(v, v_length);
// Cheat and use aec_near as a stand-in for "size of the largest file"
// in the dump. We're looking to limit the total time, and that's a
// reasonable stand-in.
if (strcmp(name, "aec_near") == 0) {
updateDebugWritten(v_length * sizeof(float));
}
}
#endif
}

View File

@ -8,8 +8,8 @@
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/modules/desktop_capture/app_capturer.h"
#include "webrtc/modules/desktop_capture/desktop_capture_options.h"
#include "modules/desktop_capture/app_capturer.h"
#include "modules/desktop_capture/desktop_capture_options.h"
namespace webrtc {

View File

@ -14,9 +14,9 @@
#include <vector>
#include <string>
#include "webrtc/modules/desktop_capture/desktop_capture_types.h"
#include "webrtc/modules/desktop_capture/desktop_capturer.h"
#include "webrtc/typedefs.h"
#include "modules/desktop_capture/desktop_capture_types.h"
#include "modules/desktop_capture/desktop_capturer.h"
#include "typedefs.h"
namespace webrtc {

View File

@ -8,18 +8,17 @@
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/modules/desktop_capture/app_capturer.h"
#include "webrtc/modules/desktop_capture/shared_desktop_frame.h"
#include "webrtc/modules/desktop_capture/win/win_shared.h"
#include "modules/desktop_capture/app_capturer.h"
#include "modules/desktop_capture/shared_desktop_frame.h"
#include "modules/desktop_capture/win/win_shared.h"
#include <windows.h>
#include <vector>
#include <cassert>
#include "webrtc/modules/desktop_capture/desktop_capturer.h"
#include "webrtc/modules/desktop_capture/desktop_capture_options.h"
#include "webrtc/modules/desktop_capture/desktop_frame_win.h"
#include "webrtc/system_wrappers/include/logging.h"
#include "modules/desktop_capture/desktop_capturer.h"
#include "modules/desktop_capture/desktop_capture_options.h"
#include "modules/desktop_capture/desktop_frame_win.h"
namespace webrtc {

View File

@ -7,9 +7,9 @@
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/modules/desktop_capture/app_capturer.h"
#include "webrtc/modules/desktop_capture/shared_desktop_frame.h"
#include "webrtc/modules/desktop_capture/x11/shared_x_util.h"
#include "modules/desktop_capture/app_capturer.h"
#include "modules/desktop_capture/shared_desktop_frame.h"
#include "modules/desktop_capture/x11/shared_x_util.h"
#include <assert.h>
#include <string.h>
@ -21,12 +21,12 @@
#include <algorithm>
#include "webrtc/modules/desktop_capture/desktop_capture_options.h"
#include "webrtc/modules/desktop_capture/desktop_frame.h"
#include "webrtc/modules/desktop_capture/x11/shared_x_display.h"
#include "webrtc/modules/desktop_capture/x11/x_error_trap.h"
#include "webrtc/modules/desktop_capture/x11/x_server_pixel_buffer.h"
#include "webrtc/system_wrappers/include/logging.h"
#include "modules/desktop_capture/desktop_capture_options.h"
#include "modules/desktop_capture/desktop_frame.h"
#include "modules/desktop_capture/x11/shared_x_display.h"
#include "modules/desktop_capture/x11/x_error_trap.h"
#include "modules/desktop_capture/x11/x_server_pixel_buffer.h"
#include "rtc_base/logging.h"
namespace webrtc {
@ -220,7 +220,7 @@ bool AppCapturerLinux::UpdateRegions() {
unsigned int num_children;
int status = XQueryTree(GetDisplay(), root_window, &root_return, &parent, &children, &num_children);
if (status == 0) {
LOG(LS_ERROR) << "Failed to query for child windows for screen " << screen;
RTC_LOG(LS_ERROR) << "Failed to query for child windows for screen " << screen;
continue;
}
for (unsigned int i = 0; i < num_children; ++i) {

View File

@ -36,6 +36,12 @@ void BlankDetectorDesktopCapturerWrapper::Start(
callback_ = callback;
}
void BlankDetectorDesktopCapturerWrapper::Stop()
{
capturer_->Stop();
callback_ = nullptr;
}
void BlankDetectorDesktopCapturerWrapper::SetSharedMemoryFactory(
std::unique_ptr<SharedMemoryFactory> shared_memory_factory) {
capturer_->SetSharedMemoryFactory(std::move(shared_memory_factory));

View File

@ -36,6 +36,7 @@ class BlankDetectorDesktopCapturerWrapper final
// DesktopCapturer interface.
void Start(DesktopCapturer::Callback* callback) override;
void Stop() override;
void SetSharedMemoryFactory(
std::unique_ptr<SharedMemoryFactory> shared_memory_factory) override;
void CaptureFrame() override;

View File

@ -159,6 +159,13 @@ void DesktopAndCursorComposer::Start(DesktopCapturer::Callback* callback) {
desktop_capturer_->Start(this);
}
void DesktopAndCursorComposer::Stop() {
desktop_capturer_->Stop();
if (mouse_monitor_.get())
mouse_monitor_->Stop();
callback_ = NULL;
}
void DesktopAndCursorComposer::SetSharedMemoryFactory(
std::unique_ptr<SharedMemoryFactory> shared_memory_factory) {
desktop_capturer_->SetSharedMemoryFactory(std::move(shared_memory_factory));
@ -217,4 +224,8 @@ void DesktopAndCursorComposer::OnMouseCursorPosition(
}
}
bool DesktopAndCursorComposer::FocusOnSelectedSource() {
return desktop_capturer_->FocusOnSelectedSource();
}
} // namespace webrtc

View File

@ -47,10 +47,12 @@ class DesktopAndCursorComposer : public DesktopCapturer,
// DesktopCapturer interface.
void Start(DesktopCapturer::Callback* callback) override;
void Stop() override;
void SetSharedMemoryFactory(
std::unique_ptr<SharedMemoryFactory> shared_memory_factory) override;
void CaptureFrame() override;
void SetExcludedWindow(WindowId window) override;
bool FocusOnSelectedSource() override;
private:
// Allows test cases to use a fake MouseCursorMonitor implementation.

View File

@ -112,7 +112,7 @@ class FakeMouseMonitor : public MouseCursorMonitor {
hotspot_ = hotspot;
}
void Start(Callback* callback, Mode mode) { callback_ = callback; }
void Init(Callback* callback, Mode mode) { callback_ = callback; }
void Stop() override {};
void Capture() override {

View File

@ -11,6 +11,9 @@
#ifndef MODULES_DESKTOP_CAPTURE_DESKTOP_CAPTURE_TYPES_H_
#define MODULES_DESKTOP_CAPTURE_DESKTOP_CAPTURE_TYPES_H_
#ifndef XP_WIN
#include <sys/types.h> // pid_t
#endif
#include <stdint.h>
#include "typedefs.h" // NOLINT(build/include)
@ -38,6 +41,13 @@ const ScreenId kFullDesktopScreenId = -1;
const ScreenId kInvalidScreenId = -2;
typedef intptr_t ProcessId;
const ProcessId DesktopProcessId = 0;
#ifdef XP_WIN
typedef int pid_t;
#endif
// An integer to attach to each DesktopFrame to differentiate the generator of
// the frame.
namespace DesktopCapturerId {

View File

@ -2,7 +2,7 @@
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
* You can obtain one at http://mozilla.org/MPL/2.0/. */
#include "webrtc/modules/desktop_capture/desktop_device_info.h"
#include "modules/desktop_capture/desktop_device_info.h"
#include <cstddef>
#include <cstdlib>

View File

@ -6,7 +6,7 @@
#define WEBRTC_MODULES_DESKTOP_CAPTURE_DEVICE_INFO_H_
#include <map>
#include "webrtc/modules/desktop_capture/desktop_capture_types.h"
#include "modules/desktop_capture/desktop_capture_types.h"
namespace webrtc {

View File

@ -12,7 +12,7 @@
#include <string.h>
#include "third_party/libyuv/include/libyuv/rotate_argb.h"
#include "libyuv/rotate_argb.h"
#include "rtc_base/checks.h"
namespace webrtc {

View File

@ -53,6 +53,7 @@ class FakeDesktopCapturer : public DesktopCapturer {
// DesktopCapturer interface
void Start(DesktopCapturer::Callback* callback) override;
void Stop() override {};
void CaptureFrame() override;
void SetSharedMemoryFactory(
std::unique_ptr<SharedMemoryFactory> shared_memory_factory) override;

View File

@ -89,6 +89,11 @@ void FallbackDesktopCapturerWrapper::Start(
callback_ = callback;
}
void FallbackDesktopCapturerWrapper::Stop()
{
callback_ = nullptr;
}
void FallbackDesktopCapturerWrapper::SetSharedMemoryFactory(
std::unique_ptr<SharedMemoryFactory> shared_memory_factory) {
shared_memory_factory_ = std::move(shared_memory_factory);

View File

@ -37,6 +37,7 @@ class FallbackDesktopCapturerWrapper final : public DesktopCapturer,
// DesktopCapturer interface.
void Start(DesktopCapturer::Callback* callback) override;
void Stop() override;
void SetSharedMemoryFactory(
std::unique_ptr<SharedMemoryFactory> shared_memory_factory) override;
void CaptureFrame() override;

View File

@ -14,16 +14,6 @@
#include <algorithm>
#include <Cocoa/Cocoa.h>
#if !defined(MAC_OS_X_VERSION_10_7) || \
MAC_OS_X_VERSION_MIN_REQUIRED < MAC_OS_X_VERSION_10_7
@interface NSScreen (LionAPI)
- (CGFloat)backingScaleFactor;
- (NSRect)convertRectToBacking:(NSRect)aRect;
@end
#endif // MAC_OS_X_VERSION_10_7
namespace webrtc {
namespace {

View File

@ -148,6 +148,19 @@ bool GetWindowList(rtc::FunctionView<bool(CFDictionaryRef)> on_window,
continue;
}
//Skip windows of zero area
CFDictionaryRef bounds_ref = reinterpret_cast<CFDictionaryRef>(
CFDictionaryGetValue(window, kCGWindowBounds));
CGRect bounds_rect;
if(!(bounds_ref) ||
!(CGRectMakeWithDictionaryRepresentation(bounds_ref, &bounds_rect))){
continue;
}
bounds_rect = CGRectStandardize(bounds_rect);
if((bounds_rect.size.width <= 0) || (bounds_rect.size.height <= 0)){
continue;
}
// Skip windows with layer=0 (menu, dock).
// TODO(zijiehe): The windows with layer != 0 are skipped, is this a bug in
// code (not likely) or a bug in comments? What's the meaning of window

View File

@ -97,6 +97,9 @@ class MouseCursorMonitor {
// capturer is destroyed.
virtual void Init(Callback* callback, Mode mode) = 0;
// clears the callback
virtual void Stop() = 0;
// Captures current cursor shape and position (depending on the |mode| passed
// to Init()). Calls Callback::OnMouseCursor() if cursor shape has
// changed since the last call (or when Capture() is called for the first

View File

@ -62,7 +62,7 @@ class MouseCursorMonitorMac : public MouseCursorMonitor {
ScreenId screen_id);
~MouseCursorMonitorMac() override;
void Start(Callback* callback, Mode mode) override;
void Init(Callback* callback, Mode mode) override;
void Stop() override;
void Capture() override;
@ -109,6 +109,10 @@ void MouseCursorMonitorMac::Init(Callback* callback, Mode mode) {
mode_ = mode;
}
void MouseCursorMonitorMac::Stop() {
callback_ = NULL;
}
void MouseCursorMonitorMac::Capture() {
assert(callback_);

View File

@ -66,7 +66,7 @@ TEST_F(MouseCursorMonitorTest, MAYBE(FromScreen)) {
DesktopCaptureOptions::CreateDefault(),
webrtc::kFullDesktopScreenId));
assert(capturer.get());
capturer->Start(this, MouseCursorMonitor::SHAPE_AND_POSITION);
capturer->Init(this, MouseCursorMonitor::SHAPE_AND_POSITION);
capturer->Capture();
EXPECT_TRUE(cursor_image_.get());
@ -105,7 +105,7 @@ TEST_F(MouseCursorMonitorTest, MAYBE(FromWindow)) {
DesktopCaptureOptions::CreateDefault(), sources[i].id));
assert(capturer.get());
capturer->Start(this, MouseCursorMonitor::SHAPE_AND_POSITION);
capturer->Init(this, MouseCursorMonitor::SHAPE_AND_POSITION);
capturer->Capture();
EXPECT_TRUE(cursor_image_.get());
@ -120,7 +120,7 @@ TEST_F(MouseCursorMonitorTest, MAYBE(ShapeOnly)) {
DesktopCaptureOptions::CreateDefault(),
webrtc::kFullDesktopScreenId));
assert(capturer.get());
capturer->Start(this, MouseCursorMonitor::SHAPE_ONLY);
capturer->Init(this, MouseCursorMonitor::SHAPE_ONLY);
capturer->Capture();
EXPECT_TRUE(cursor_image_.get());

View File

@ -24,6 +24,8 @@
#include "modules/desktop_capture/win/window_capture_utils.h"
#include "rtc_base/logging.h"
#include <windows.h>
namespace webrtc {
namespace {
@ -43,7 +45,7 @@ class MouseCursorMonitorWin : public MouseCursorMonitor {
explicit MouseCursorMonitorWin(ScreenId screen);
~MouseCursorMonitorWin() override;
void Start(Callback* callback, Mode mode) override;
void Init(Callback* callback, Mode mode) override;
void Stop() override;
void Capture() override;
@ -92,6 +94,7 @@ MouseCursorMonitorWin::~MouseCursorMonitorWin() {
void MouseCursorMonitorWin::Init(Callback* callback, Mode mode) {
assert(!callback_);
assert(callback);
assert(IsGUIThread(false));
callback_ = callback;
mode_ = mode;
@ -99,7 +102,16 @@ void MouseCursorMonitorWin::Init(Callback* callback, Mode mode) {
desktop_dc_ = GetDC(NULL);
}
void MouseCursorMonitorWin::Stop() {
callback_ = NULL;
if (desktop_dc_)
ReleaseDC(NULL, desktop_dc_);
desktop_dc_ = NULL;
}
void MouseCursorMonitorWin::Capture() {
assert(IsGUIThread(false));
assert(callback_);
CURSORINFO cursor_info;
@ -111,7 +123,8 @@ void MouseCursorMonitorWin::Capture() {
}
if (!IsSameCursorShape(cursor_info, last_cursor_)) {
if (cursor_info.flags == CURSOR_SUPPRESSED) {
// Mozilla - CURSOR_SUPPRESSED is win8 and above; so we seem not to be able to see the symbol
if (cursor_info.flags != CURSOR_SHOWING) {
// The cursor is intentionally hidden now, send an empty bitmap.
last_cursor_ = cursor_info;
callback_->OnMouseCursor(new MouseCursor(
@ -172,6 +185,7 @@ void MouseCursorMonitorWin::Capture() {
}
DesktopRect MouseCursorMonitorWin::GetScreenRect() {
assert(IsGUIThread(false));
assert(screen_ != kInvalidScreenId);
if (screen_ == kFullDesktopScreenId) {
return DesktopRect::MakeXYWH(

View File

@ -66,7 +66,7 @@ class MouseCursorMonitorX11 : public MouseCursorMonitor,
MouseCursorMonitorX11(const DesktopCaptureOptions& options, Window window, Window inner_window);
~MouseCursorMonitorX11() override;
void Start(Callback* callback, Mode mode) override;
void Init(Callback* callback, Mode mode) override;
void Stop() override;
void Capture() override;
@ -128,14 +128,11 @@ MouseCursorMonitorX11::MouseCursorMonitorX11(
}
MouseCursorMonitorX11::~MouseCursorMonitorX11() {
if (have_xfixes_) {
x_display_->RemoveEventHandler(xfixes_event_base_ + XFixesCursorNotify,
this);
}
Stop();
}
void MouseCursorMonitorX11::Init(Callback* callback, Mode mode) {
// Init can be called only once per instance of MouseCursorMonitor.
// Init can be called only if not started
RTC_DCHECK(!callback_);
RTC_DCHECK(callback);
@ -147,6 +144,7 @@ void MouseCursorMonitorX11::Init(Callback* callback, Mode mode) {
if (have_xfixes_) {
// Register for changes to the cursor shape.
XErrorTrap error_trap(display());
XFixesSelectCursorInput(display(), window_, XFixesDisplayCursorNotifyMask);
x_display_->AddEventHandler(xfixes_event_base_ + XFixesCursorNotify, this);
@ -156,6 +154,14 @@ void MouseCursorMonitorX11::Init(Callback* callback, Mode mode) {
}
}
void MouseCursorMonitorX11::Stop() {
callback_ = NULL;
if (have_xfixes_) {
x_display_->RemoveEventHandler(xfixes_event_base_ + XFixesCursorNotify,
this);
}
}
void MouseCursorMonitorX11::Capture() {
RTC_DCHECK(callback_);

View File

@ -38,6 +38,7 @@ std::unique_ptr<DesktopCapturer> CreateScreenCapturerWinDirectx() {
std::unique_ptr<DesktopCapturer> DesktopCapturer::CreateRawScreenCapturer(
const DesktopCaptureOptions& options) {
std::unique_ptr<DesktopCapturer> capturer(new ScreenCapturerWinGdi(options));
#ifdef CAPTURE_ALLOW_DIRECTX
if (options.allow_directx_capturer()) {
// |dxgi_duplicator_controller| should be alive in this scope to ensure it
// won't unload DxgiDuplicatorController.
@ -47,6 +48,7 @@ std::unique_ptr<DesktopCapturer> DesktopCapturer::CreateRawScreenCapturer(
CreateScreenCapturerWinDirectx(), std::move(capturer)));
}
}
#endif
if (options.allow_use_magnification_api()) {
// ScreenCapturerWinMagnifier cannot work on Windows XP or earlier, as well

View File

@ -418,7 +418,7 @@ std::unique_ptr<DesktopCapturer> DesktopCapturer::CreateRawScreenCapturer(
return nullptr;
}
return std::move(capturer);
return capturer;
}
} // namespace webrtc

View File

@ -2,9 +2,9 @@
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
* You can obtain one at http://mozilla.org/MPL/2.0/. */
#include "webrtc/modules/desktop_capture/win/desktop_device_info_win.h"
#include "webrtc/modules/desktop_capture/win/screen_capture_utils.h"
#include "webrtc/modules/desktop_capture/win/win_shared.h"
#include "modules/desktop_capture/win/desktop_device_info_win.h"
#include "modules/desktop_capture/win/screen_capture_utils.h"
#include "modules/desktop_capture/win/win_shared.h"
#include <inttypes.h>
#include <stdio.h>
#include <VersionHelpers.h>

View File

@ -5,8 +5,8 @@
#ifndef WEBRTC_MODULES_DESKTOP_CAPTURE_WIN_DEVICE_INFO_H_
#define WEBRTC_MODULES_DESKTOP_CAPTURE_WIN_DEVICE_INFO_H_
#include "webrtc/typedefs.h"
#include "webrtc/modules/desktop_capture/desktop_device_info.h"
#include "typedefs.h"
#include "modules/desktop_capture/desktop_device_info.h"
namespace webrtc {

View File

@ -42,7 +42,7 @@ bool GetScreenList(DesktopCapturer::SourceList* screens,
if (!(device.StateFlags & DISPLAY_DEVICE_ACTIVE))
continue;
screens->push_back({device_index, std::string()});
screens->push_back({device_index, 0, std::string()});
if (device_names) {
device_names->push_back(rtc::ToUtf8(device.DeviceName));
}
@ -74,6 +74,7 @@ DesktopRect GetFullscreenRect() {
DesktopRect GetScreenRect(DesktopCapturer::SourceId screen,
const std::wstring& device_key) {
RTC_DCHECK(IsGUIThread(false));
if (screen == kFullDesktopScreenId) {
return GetFullscreenRect();
}

View File

@ -110,6 +110,10 @@ void ScreenCapturerWinDirectx::Start(Callback* callback) {
callback_ = callback;
}
void ScreenCapturerWinDirectx::Stop() {
callback_ = nullptr;
}
void ScreenCapturerWinDirectx::SetSharedMemoryFactory(
std::unique_ptr<SharedMemoryFactory> shared_memory_factory) {
shared_memory_factory_ = std::move(shared_memory_factory);

View File

@ -74,6 +74,7 @@ class ScreenCapturerWinDirectx : public DesktopCapturer {
// DesktopCapturer implementation.
void Start(Callback* callback) override;
void Stop() override;
void SetSharedMemoryFactory(
std::unique_ptr<SharedMemoryFactory> shared_memory_factory) override;
void CaptureFrame() override;

View File

@ -113,8 +113,28 @@ void ScreenCapturerWinGdi::Start(Callback* callback) {
// Vote to disable Aero composited desktop effects while capturing. Windows
// will restore Aero automatically if the process exits. This has no effect
// under Windows 8 or higher. See crbug.com/124018.
if (composition_func_)
(*composition_func_)(DWM_EC_DISABLECOMPOSITION);
if (disable_composition_) {
if (composition_func_)
(*composition_func_)(DWM_EC_DISABLECOMPOSITION);
}
}
void ScreenCapturerWinGdi::Stop() {
if (desktop_dc_) {
ReleaseDC(NULL, desktop_dc_);
desktop_dc_ = NULL;
}
if (memory_dc_) {
DeleteDC(memory_dc_);
memory_dc_ = NULL;
}
if (disable_composition_) {
// Restore Aero.
if (composition_func_)
(*composition_func_)(DWM_EC_ENABLECOMPOSITION);
}
callback_ = NULL;
}
void ScreenCapturerWinGdi::PrepareCaptureResources() {

View File

@ -76,6 +76,9 @@ class ScreenCapturerWinGdi : public DesktopCapturer {
HMODULE dwmapi_library_ = NULL;
DwmEnableCompositionFunc composition_func_ = nullptr;
DwmIsCompositionEnabledFunc composition_enabled_func_;
bool disable_composition_;
RTC_DISALLOW_COPY_AND_ASSIGN(ScreenCapturerWinGdi);
};

View File

@ -44,19 +44,7 @@ static LPCTSTR kMagnifierWindowName = L"MagnifierWindow";
ScreenCapturerWinMagnifier::ScreenCapturerWinMagnifier() = default;
ScreenCapturerWinMagnifier::~ScreenCapturerWinMagnifier() {
// DestroyWindow must be called before MagUninitialize. magnifier_window_ is
// destroyed automatically when host_window_ is destroyed.
if (host_window_)
DestroyWindow(host_window_);
if (magnifier_initialized_)
mag_uninitialize_func_();
if (mag_lib_handle_)
FreeLibrary(mag_lib_handle_);
if (desktop_dc_)
ReleaseDC(NULL, desktop_dc_);
Stop();
}
void ScreenCapturerWinMagnifier::Start(Callback* callback) {
@ -69,6 +57,32 @@ void ScreenCapturerWinMagnifier::Start(Callback* callback) {
}
}
void ScreenCapturerWinMagnifier::Stop() {
callback_ = NULL;
// DestroyWindow must be called before MagUninitialize. magnifier_window_ is
// destroyed automatically when host_window_ is destroyed.
if (host_window_) {
DestroyWindow(host_window_);
host_window_ = NULL;
}
if (magnifier_initialized_) {
mag_uninitialize_func_();
magnifier_initialized_ = false;
}
if (mag_lib_handle_) {
FreeLibrary(mag_lib_handle_);
mag_lib_handle_ = NULL;
}
if (desktop_dc_) {
ReleaseDC(NULL, desktop_dc_);
desktop_dc_ = NULL;
}
}
void ScreenCapturerWinMagnifier::SetSharedMemoryFactory(
std::unique_ptr<SharedMemoryFactory> shared_memory_factory) {
shared_memory_factory_ = std::move(shared_memory_factory);

View File

@ -44,6 +44,7 @@ class ScreenCapturerWinMagnifier : public DesktopCapturer {
// Overridden from ScreenCapturer:
void Start(Callback* callback) override;
void Stop() override;
void SetSharedMemoryFactory(
std::unique_ptr<SharedMemoryFactory> shared_memory_factory) override;
void CaptureFrame() override;

View File

@ -4,7 +4,7 @@
#include <windows.h>
#include <assert.h>
#include "webrtc/modules/desktop_capture/win/win_shared.h"
#include "modules/desktop_capture/win/win_shared.h"
namespace webrtc {

View File

@ -7,8 +7,11 @@
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_MODULES_DESKTOP_CAPTURE_WIN_WINDOW_CAPTURE_UTILS_H_
#define WEBRTC_MODULES_DESKTOP_CAPTURE_WIN_WINDOW_CAPTURE_UTILS_H_
#include <windows.h>
#include <dwmapi.h>
#include "modules/desktop_capture/desktop_geometry.h"
#include "rtc_base/constructormagic.h"
@ -57,7 +60,7 @@ bool GetDcSize(HDC hdc, DesktopSize* size);
// function returns false if native APIs fail.
bool IsWindowMaximized(HWND window, bool* result);
typedef HRESULT (WINAPI *DwmIsCompositionEnabledFunc)(BOOL* enabled);
typedef HRESULT (WINAPI *DwmIsCompositionEnabledFunc)(BOOL*);
class AeroChecker {
public:
AeroChecker();
@ -73,3 +76,5 @@ class AeroChecker {
};
} // namespace webrtc
#endif

View File

@ -55,6 +55,7 @@ class WindowCapturerMac : public DesktopCapturer {
// DesktopCapturer interface.
void Start(Callback* callback) override;
void Stop() override;
void CaptureFrame() override;
bool GetSourceList(SourceList* sources) override;
bool SelectSource(SourceId id) override;

View File

@ -21,12 +21,14 @@
#include "rtc_base/constructormagic.h"
#include "rtc_base/logging.h"
#include "rtc_base/win32.h"
#include <VersionHelpers.h>
namespace webrtc {
namespace {
BOOL CALLBACK WindowsEnumerationHandler(HWND hwnd, LPARAM param) {
assert(IsGUIThread(false));
DesktopCapturer::SourceList* list =
reinterpret_cast<DesktopCapturer::SourceList*>(param);
@ -66,6 +68,10 @@ BOOL CALLBACK WindowsEnumerationHandler(HWND hwnd, LPARAM param) {
DesktopCapturer::Source window;
window.id = reinterpret_cast<WindowId>(hwnd);
DWORD pid;
GetWindowThreadProcessId(hwnd, &pid);
window.pid = (pid_t)pid;
const size_t kTitleLength = 500;
WCHAR window_title[kTitleLength];
// Truncate the title if it's longer than kTitleLength.
@ -76,6 +82,13 @@ BOOL CALLBACK WindowsEnumerationHandler(HWND hwnd, LPARAM param) {
if (window.title.empty())
return TRUE;
// Skip windows of zero visible area, except IconicWindows
RECT bounds;
if(GetClientRect(hwnd,&bounds) && !IsIconic(hwnd)
&& IsRectEmpty(&bounds)){
return TRUE;
}
list->push_back(window);
return TRUE;
@ -116,6 +129,7 @@ class WindowCapturerWin : public DesktopCapturer {
// DesktopCapturer interface.
void Start(Callback* callback) override;
void Stop() override;
void CaptureFrame() override;
bool GetSourceList(SourceList* sources) override;
bool SelectSource(SourceId id) override;
@ -146,6 +160,7 @@ WindowCapturerWin::WindowCapturerWin() {}
WindowCapturerWin::~WindowCapturerWin() {}
bool WindowCapturerWin::GetSourceList(SourceList* sources) {
assert(IsGUIThread(false));
SourceList result;
LPARAM param = reinterpret_cast<LPARAM>(&result);
// EnumWindows only enumerates root windows.
@ -200,7 +215,12 @@ void WindowCapturerWin::Start(Callback* callback) {
callback_ = callback;
}
void WindowCapturerWin::Stop() {
callback_ = NULL;
}
void WindowCapturerWin::CaptureFrame() {
assert(IsGUIThread(false));
if (!window_) {
RTC_LOG(LS_ERROR) << "Window hasn't been selected: " << GetLastError();
callback_->OnCaptureResult(Result::ERROR_PERMANENT, nullptr);

View File

@ -28,6 +28,7 @@
#include "rtc_base/constructormagic.h"
#include "rtc_base/logging.h"
#include "rtc_base/scoped_ref_ptr.h"
#include "modules/desktop_capture/x11/shared_x_util.h"
namespace webrtc {
@ -41,6 +42,7 @@ class WindowCapturerLinux : public DesktopCapturer,
// DesktopCapturer interface.
void Start(Callback* callback) override;
void Stop() override;
void CaptureFrame() override;
bool GetSourceList(SourceList* sources) override;
bool SelectSource(SourceId id) override;
@ -56,6 +58,9 @@ class WindowCapturerLinux : public DesktopCapturer,
// Returns window title for the specified X |window|.
bool GetWindowTitle(::Window window, std::string* title);
// Returns the id of the owning process.
int GetWindowProcessID(::Window window);
Callback* callback_ = nullptr;
rtc::scoped_refptr<SharedXDisplay> x_display_;
@ -96,6 +101,7 @@ bool WindowCapturerLinux::GetSourceList(SourceList* sources) {
[this, sources](::Window window) {
Source w;
w.id = window;
w.pid = (pid_t)GetWindowProcessID(window);
if (this->GetWindowTitle(window, &w.title)) {
sources->push_back(w);
}
@ -180,15 +186,19 @@ void WindowCapturerLinux::Start(Callback* callback) {
callback_ = callback;
}
void WindowCapturerLinux::Stop() {
callback_ = NULL;
}
void WindowCapturerLinux::CaptureFrame() {
x_display_->ProcessPendingXEvents();
if (!x_server_pixel_buffer_.IsWindowValid()) {
RTC_LOG(LS_INFO) << "The window is no longer valid.";
callback_->OnCaptureResult(Result::ERROR_PERMANENT, nullptr);
return;
}
x_display_->ProcessPendingXEvents();
if (!has_composite_extension_) {
// Without the Xcomposite extension we capture when the whole window is
// visible on screen and not covered by any other window. This is not

View File

@ -2,14 +2,13 @@
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
* You can obtain one at http://mozilla.org/MPL/2.0/. */
#include "webrtc/modules/desktop_capture/x11/desktop_device_info_x11.h"
#include "webrtc/system_wrappers/include/logging.h"
#include "modules/desktop_capture/x11/desktop_device_info_x11.h"
#include <inttypes.h>
#include <unistd.h>
#include <stdio.h>
#include "webrtc/modules/desktop_capture/x11/shared_x_util.h"
#include "webrtc/modules/desktop_capture/x11/x_error_trap.h"
#include "webrtc/modules/desktop_capture/x11/x_server_pixel_buffer.h"
#include "modules/desktop_capture/x11/shared_x_util.h"
#include "modules/desktop_capture/x11/x_error_trap.h"
#include "modules/desktop_capture/x11/x_server_pixel_buffer.h"
namespace webrtc {
@ -61,7 +60,7 @@ void DesktopDeviceInfoX11::InitializeApplicationList() {
int status = XQueryTree(SharedDisplay->display(), root_window, &root_window, &parent,
&children, &num_children);
if (status == 0) {
LOG(LS_ERROR) << "Failed to query for child windows for screen " << screen;
RTC_LOG(LS_ERROR) << "Failed to query for child windows for screen " << screen;
continue;
}

View File

@ -9,8 +9,8 @@
#ifndef WEBRTC_MODULES_DESKTOP_CAPTURE_X11_DEVICE_INFO_H_
#define WEBRTC_MODULES_DESKTOP_CAPTURE_X11_DEVICE_INFO_H_
#include "webrtc/typedefs.h"
#include "webrtc/modules/desktop_capture/desktop_device_info.h"
#include "typedefs.h"
#include "modules/desktop_capture/desktop_device_info.h"
namespace webrtc {

View File

@ -8,7 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/modules/desktop_capture/x11/shared_x_util.h"
#include "modules/desktop_capture/x11/shared_x_util.h"
namespace webrtc {
@ -44,7 +44,7 @@ WindowUtilX11::~WindowUtilX11() {
::Window *children;
unsigned int num_children;
if (!XQueryTree(display(), window, &root, &parent, &children, &num_children)) {
LOG(LS_ERROR) << "Failed to query for child windows although window"
RTC_LOG(LS_ERROR) << "Failed to query for child windows although window"
<< "does not have a valid WM_STATE.";
return 0;
}
@ -118,7 +118,7 @@ bool WindowUtilX11::GetWindowTitle(::Window window, std::string* title) {
&cnt);
if (status >= Success && cnt && *list) {
if (cnt > 1) {
LOG(LS_INFO) << "Window has " << cnt << " text properties, only using the first one.";
RTC_LOG(LS_INFO) << "Window has " << cnt << " text properties, only using the first one.";
}
*title = *list;
result = true;
@ -147,7 +147,7 @@ bool WindowUtilX11::BringWindowToFront(::Window window) {
// Find the root window to pass event to.
int status = XQueryTree(display(), window, &root, &parent, &children, &num_children);
if (status == 0) {
LOG(LS_ERROR) << "Failed to query for the root window.";
RTC_LOG(LS_ERROR) << "Failed to query for the root window.";
return false;
}

View File

@ -11,9 +11,8 @@
#ifndef WEBRTC_MODULES_DESKTOP_CAPTURE_X11_SHARED_X_UTIL_H_
#define WEBRTC_MODULES_DESKTOP_CAPTURE_X11_SHARED_X_UTIL_H_
#include "webrtc/system_wrappers/include/atomic32.h"
#include "webrtc/modules/desktop_capture/x11/shared_x_display.h"
#include "webrtc/system_wrappers/include/logging.h"
#include "system_wrappers/include/atomic32.h"
#include "modules/desktop_capture/x11/shared_x_display.h"
#include <unistd.h>
#include <string.h>

View File

@ -166,7 +166,7 @@ bool IsDesktopElement(XAtomCache* cache, ::Window window) {
int32_t GetWindowState(XAtomCache* cache, ::Window window) {
// Get WM_STATE property of the window.
XWindowProperty<uint32_t> window_state(
webrtc::XWindowProperty<uint32_t> window_state(
cache->display(), window, cache->WmState());
// WM_STATE is considered to be set to WithdrawnState when it missing.
@ -209,6 +209,15 @@ bool GetWindowList(XAtomCache* cache,
::Window app_window =
GetApplicationWindow(cache, children[num_children - 1 - i]);
if (app_window && !IsDesktopElement(cache, app_window)) {
XWindowAttributes window_attr;
if(!XGetWindowAttributes(display, app_window, &window_attr)) {
RTC_LOG(LS_ERROR)<<"Bad request for attributes for window ID:" << app_window;
continue;
}
if((window_attr.width <= 0) || (window_attr.height <=0)){
continue;
}
if (!on_window(app_window)) {
return true;
}

View File

@ -12,53 +12,55 @@
#include <assert.h>
#if defined(TOOLKIT_GTK)
#include <gdk/gdk.h>
#endif // !defined(TOOLKIT_GTK)
#include <limits>
namespace webrtc {
namespace {
#if !defined(TOOLKIT_GTK)
// TODO(sergeyu): This code is not thread safe. Fix it. Bug 2202.
static bool g_xserver_error_trap_enabled = false;
static int g_last_xserver_error_code = 0;
int XServerErrorHandler(Display* display, XErrorEvent* error_event) {
assert(g_xserver_error_trap_enabled);
g_last_xserver_error_code = error_event->error_code;
return 0;
Bool XErrorTrap::XServerErrorHandler(Display* display, xReply* rep,
char* /* buf */, int /* len */,
XPointer data) {
XErrorTrap* self = reinterpret_cast<XErrorTrap*>(data);
if (rep->generic.type != X_Error ||
// Overflow-safe last_request_read <= last_ignored_request_ for skipping
// async replies from requests before XErrorTrap was created.
self->last_ignored_request_ - display->last_request_read <
std::numeric_limits<unsigned long>::max() >> 1)
return False;
self->last_xserver_error_code_ = rep->error.errorCode;
return True;
}
#endif // !defined(TOOLKIT_GTK)
} // namespace
XErrorTrap::XErrorTrap(Display* display)
: original_error_handler_(NULL),
: display_(display),
last_xserver_error_code_(0),
enabled_(true) {
#if defined(TOOLKIT_GTK)
gdk_error_trap_push();
#else // !defined(TOOLKIT_GTK)
assert(!g_xserver_error_trap_enabled);
original_error_handler_ = XSetErrorHandler(&XServerErrorHandler);
g_xserver_error_trap_enabled = true;
g_last_xserver_error_code = 0;
#endif // !defined(TOOLKIT_GTK)
// Use async_handlers instead of XSetErrorHandler(). async_handlers can
// remain in place and then be safely removed at the right time even if a
// handler change happens concurrently on another thread. async_handlers
// are processed first and so can prevent errors reaching the global
// XSetErrorHandler handler. They also will not see errors from or affect
// handling of errors on other Displays, which may be processed on other
// threads.
LockDisplay(display);
async_handler_.next = display->async_handlers;
async_handler_.handler = XServerErrorHandler;
async_handler_.data = reinterpret_cast<XPointer>(this);
display->async_handlers = &async_handler_;
last_ignored_request_ = display->request;
UnlockDisplay(display);
}
int XErrorTrap::GetLastErrorAndDisable() {
assert(enabled_);
enabled_ = false;
#if defined(TOOLKIT_GTK)
return gdk_error_trap_push();
#else // !defined(TOOLKIT_GTK)
assert(g_xserver_error_trap_enabled);
XSetErrorHandler(original_error_handler_);
g_xserver_error_trap_enabled = false;
return g_last_xserver_error_code;
#endif // !defined(TOOLKIT_GTK)
LockDisplay(display_);
DeqAsyncHandler(display_, &async_handler_);
UnlockDisplay(display_);
return last_xserver_error_code_;
}
XErrorTrap::~XErrorTrap() {

View File

@ -11,7 +11,9 @@
#ifndef MODULES_DESKTOP_CAPTURE_X11_X_ERROR_TRAP_H_
#define MODULES_DESKTOP_CAPTURE_X11_X_ERROR_TRAP_H_
#include <X11/Xlib.h>
#include <X11/Xlibint.h>
#undef max // Xlibint.h defines this and it breaks std::max
#undef min // Xlibint.h defines this and it breaks std::min
#include "rtc_base/constructormagic.h"
@ -19,16 +21,27 @@ namespace webrtc {
// Helper class that registers X Window error handler. Caller can use
// GetLastErrorAndDisable() to get the last error that was caught, if any.
// An XErrorTrap may be constructed on any thread, but errors are collected
// from all threads and so |display| should be used only on one thread.
// Other Displays are unaffected.
class XErrorTrap {
public:
explicit XErrorTrap(Display* display);
~XErrorTrap();
// Returns last error and removes unregisters the error handler.
// Must not be called more than once.
int GetLastErrorAndDisable();
private:
XErrorHandler original_error_handler_;
static Bool XServerErrorHandler(Display* display, xReply* rep,
char* /* buf */, int /* len */,
XPointer data);
_XAsyncHandler async_handler_;
Display* display_;
unsigned long last_ignored_request_;
int last_xserver_error_code_;
bool enabled_;
RTC_DISALLOW_COPY_AND_ASSIGN(XErrorTrap);

View File

@ -592,6 +592,11 @@ struct PacedPacketInfo {
int probe_cluster_min_bytes = -1;
};
inline bool IsNewerOrSameTimestamp(uint32_t timestamp, uint32_t prev_timestamp) {
return timestamp == prev_timestamp ||
static_cast<uint32_t>(timestamp - prev_timestamp) < 0x80000000;
}
} // namespace webrtc
#endif // MODULES_INCLUDE_MODULE_COMMON_TYPES_H_

View File

@ -258,8 +258,8 @@ int32_t ModuleFileUtility::InitWavCodec(uint32_t samplesPerSec,
} else if (samplesPerSec == 44100) {
strcpy(codec_info_.plname, "L16");
_codecId = kCodecL16_16kHz;
codec_info_.pacsize = 440;
codec_info_.plfreq = 44000;
codec_info_.pacsize = 441;
codec_info_.plfreq = 44100;
} else if (samplesPerSec == 48000) {
strcpy(codec_info_.plname, "L16");
_codecId = kCodecL16_16kHz;

View File

@ -76,7 +76,7 @@ class RemoteEstimatorProxyTest : public ::testing::Test {
}
SimulatedClock clock_;
testing::StrictMock<MockTransportFeedbackSender> router_;
::testing::StrictMock<MockTransportFeedbackSender> router_;
RemoteEstimatorProxy proxy_;
};

View File

@ -16,6 +16,12 @@
#include "rtc_base/checks.h"
namespace mozilla {
namespace jni {
jclass GetClassRef(JNIEnv* aEnv, const char* aClassName);
}
}
#define TAG "JVM"
#define ALOGD(...) __android_log_print(ANDROID_LOG_DEBUG, TAG, __VA_ARGS__)
#define ALOGE(...) __android_log_print(ANDROID_LOG_ERROR, TAG, __VA_ARGS__)
@ -216,7 +222,9 @@ std::string JNIEnvironment::JavaToStdString(const jstring& j_string) {
// static
void JVM::Initialize(JavaVM* jvm) {
ALOGD("JVM::Initialize%s", GetThreadInfo().c_str());
RTC_CHECK(!g_jvm);
if (g_jvm) {
return;
}
g_jvm = new JVM(jvm);
}

View File

@ -117,7 +117,7 @@ void ProcessThreadImpl::PostTask(std::unique_ptr<rtc::QueuedTask> task) {
void ProcessThreadImpl::RegisterModule(Module* module,
const rtc::Location& from) {
RTC_DCHECK(thread_checker_.CalledOnValidThread());
// RTC_DCHECK(thread_checker_.CalledOnValidThread()); Not really needed
RTC_DCHECK(module) << from.ToString();
#if RTC_DCHECK_IS_ON

View File

@ -41,7 +41,7 @@ int32_t DeviceInfoImpl::NumberOfCapabilities(const char* deviceUniqueIdUTF8) {
if (_lastUsedDeviceNameLength == strlen((char*)deviceUniqueIdUTF8)) {
// Is it the same device that is asked for again.
#if defined(WEBRTC_MAC) || defined(WEBRTC_LINUX)
#if defined(WEBRTC_MAC) || defined(WEBRTC_LINUX) || defined(WEBRTC_BSD)
if (strncasecmp((char*)_lastUsedDeviceName, (char*)deviceUniqueIdUTF8,
_lastUsedDeviceNameLength) == 0)
#else
@ -70,7 +70,7 @@ int32_t DeviceInfoImpl::GetCapability(const char* deviceUniqueIdUTF8,
ReadLockScoped cs(_apiLock);
if ((_lastUsedDeviceNameLength != strlen((char*)deviceUniqueIdUTF8))
#if defined(WEBRTC_MAC) || defined(WEBRTC_LINUX)
#if defined(WEBRTC_MAC) || defined(WEBRTC_LINUX) || defined(WEBRTC_BSD)
|| (strncasecmp((char*)_lastUsedDeviceName, (char*)deviceUniqueIdUTF8,
_lastUsedDeviceNameLength) != 0))
#else
@ -92,7 +92,7 @@ int32_t DeviceInfoImpl::GetCapability(const char* deviceUniqueIdUTF8,
// Make sure the number is valid
if (deviceCapabilityNumber >= (unsigned int)_captureCapabilities.size()) {
RTC_LOG(LS_ERROR) << "Invalid deviceCapabilityNumber "
RTC_LOG(LS_ERROR) << deviceUniqueIdUTF8 << " Invalid deviceCapabilityNumber "
<< deviceCapabilityNumber << ">= number of capabilities ("
<< _captureCapabilities.size() << ").";
return -1;
@ -111,7 +111,7 @@ int32_t DeviceInfoImpl::GetBestMatchedCapability(
ReadLockScoped cs(_apiLock);
if ((_lastUsedDeviceNameLength != strlen((char*)deviceUniqueIdUTF8))
#if defined(WEBRTC_MAC) || defined(WEBRTC_LINUX)
#if defined(WEBRTC_MAC) || defined(WEBRTC_LINUX) || defined(WEBRTC_BSD)
|| (strncasecmp((char*)_lastUsedDeviceName, (char*)deviceUniqueIdUTF8,
_lastUsedDeviceNameLength) != 0))
#else

View File

@ -42,6 +42,7 @@ protected:
/* Initialize this object*/
virtual int32_t Init()=0;
virtual int32_t Refresh() { return 0; }
/*
* Fills the member variable _captureCapabilities with capabilities for the given device name.
*/

View File

@ -22,19 +22,195 @@
#include "rtc_base/logging.h"
#ifdef WEBRTC_LINUX
#define EVENT_SIZE ( sizeof (struct inotify_event) )
#define BUF_LEN ( 1024 * ( EVENT_SIZE + 16 ) )
#endif
namespace webrtc {
namespace videocapturemodule {
VideoCaptureModule::DeviceInfo* VideoCaptureImpl::CreateDeviceInfo() {
return new videocapturemodule::DeviceInfoLinux();
}
DeviceInfoLinux::DeviceInfoLinux() : DeviceInfoImpl() {}
#ifdef WEBRTC_LINUX
void DeviceInfoLinux::HandleEvent(inotify_event* event, int fd)
{
if (event->mask & IN_CREATE) {
if (fd == _fd_v4l || fd == _fd_snd) {
DeviceChange();
} else if ((event->mask & IN_ISDIR) && (fd == _fd_dev)) {
if (_wd_v4l < 0) {
// Sometimes inotify_add_watch failed if we call it immediately after receiving this event
// Adding 5ms delay to let file system settle down
usleep(5*1000);
_wd_v4l = inotify_add_watch(_fd_v4l, "/dev/v4l/by-path/", IN_CREATE | IN_DELETE | IN_DELETE_SELF);
if (_wd_v4l >= 0) {
DeviceChange();
}
}
if (_wd_snd < 0) {
usleep(5*1000);
_wd_snd = inotify_add_watch(_fd_snd, "/dev/snd/by-path/", IN_CREATE | IN_DELETE | IN_DELETE_SELF);
if (_wd_snd >= 0) {
DeviceChange();
}
}
}
} else if (event->mask & IN_DELETE) {
if (fd == _fd_v4l || fd == _fd_snd) {
DeviceChange();
}
} else if (event->mask & IN_DELETE_SELF) {
if (fd == _fd_v4l) {
inotify_rm_watch(_fd_v4l, _wd_v4l);
_wd_v4l = -1;
} else if (fd == _fd_snd) {
inotify_rm_watch(_fd_snd, _wd_snd);
_wd_snd = -1;
} else {
assert(false);
}
}
}
int DeviceInfoLinux::EventCheck(int fd)
{
struct timeval timeout;
fd_set rfds;
timeout.tv_sec = 0;
timeout.tv_usec = 100000;
FD_ZERO(&rfds);
FD_SET(fd, &rfds);
return select(fd+1, &rfds, NULL, NULL, &timeout);
}
int DeviceInfoLinux::HandleEvents(int fd)
{
char buffer[BUF_LEN];
ssize_t r = read(fd, buffer, BUF_LEN);
if (r <= 0) {
return r;
}
ssize_t buffer_i = 0;
inotify_event* pevent;
size_t eventSize;
int count = 0;
while (buffer_i < r)
{
pevent = (inotify_event *) (&buffer[buffer_i]);
eventSize = sizeof(inotify_event) + pevent->len;
char event[sizeof(inotify_event) + FILENAME_MAX + 1] // null-terminated
__attribute__ ((aligned(__alignof__(struct inotify_event))));
memcpy(event, pevent, eventSize);
HandleEvent((inotify_event*)(event), fd);
buffer_i += eventSize;
count++;
}
return count;
}
int DeviceInfoLinux::ProcessInotifyEvents()
{
while (0 == _isShutdown.Value()) {
if (EventCheck(_fd_dev) > 0) {
if (HandleEvents(_fd_dev) < 0) {
break;
}
}
if (EventCheck(_fd_v4l) > 0) {
if (HandleEvents(_fd_v4l) < 0) {
break;
}
}
if (EventCheck(_fd_snd) > 0) {
if (HandleEvents(_fd_snd) < 0) {
break;
}
}
}
return 0;
}
bool DeviceInfoLinux::InotifyEventThread(void* obj)
{
return static_cast<DeviceInfoLinux*> (obj)->InotifyProcess();
}
bool DeviceInfoLinux::InotifyProcess()
{
_fd_v4l = inotify_init();
_fd_snd = inotify_init();
_fd_dev = inotify_init();
if (_fd_v4l >= 0 && _fd_snd >= 0 && _fd_dev >= 0) {
_wd_v4l = inotify_add_watch(_fd_v4l, "/dev/v4l/by-path/", IN_CREATE | IN_DELETE | IN_DELETE_SELF);
_wd_snd = inotify_add_watch(_fd_snd, "/dev/snd/by-path/", IN_CREATE | IN_DELETE | IN_DELETE_SELF);
_wd_dev = inotify_add_watch(_fd_dev, "/dev/", IN_CREATE);
ProcessInotifyEvents();
if (_wd_v4l >= 0) {
inotify_rm_watch(_fd_v4l, _wd_v4l);
}
if (_wd_snd >= 0) {
inotify_rm_watch(_fd_snd, _wd_snd);
}
if (_wd_dev >= 0) {
inotify_rm_watch(_fd_dev, _wd_dev);
}
close(_fd_v4l);
close(_fd_snd);
close(_fd_dev);
return true;
} else {
return false;
}
}
#endif
DeviceInfoLinux::DeviceInfoLinux() : DeviceInfoImpl()
#ifdef WEBRTC_LINUX
, _inotifyEventThread(new rtc::PlatformThread(
InotifyEventThread, this, "InotifyEventThread"))
, _isShutdown(0)
#endif
{
#ifdef WEBRTC_LINUX
if (_inotifyEventThread)
{
_inotifyEventThread->Start();
_inotifyEventThread->SetPriority(rtc::kHighPriority);
}
}
#endif
int32_t DeviceInfoLinux::Init() {
return 0;
}
DeviceInfoLinux::~DeviceInfoLinux() {}
DeviceInfoLinux::~DeviceInfoLinux() {
#ifdef WEBRTC_LINUX
++_isShutdown;
if (_inotifyEventThread) {
_inotifyEventThread->Stop();
_inotifyEventThread = nullptr;
}
#endif
}
uint32_t DeviceInfoLinux::NumberOfDevices() {
RTC_LOG(LS_INFO) << __FUNCTION__;
@ -61,7 +237,8 @@ int32_t DeviceInfoLinux::GetDeviceName(uint32_t deviceNumber,
char* deviceUniqueIdUTF8,
uint32_t deviceUniqueIdUTF8Length,
char* /*productUniqueIdUTF8*/,
uint32_t /*productUniqueIdUTF8Length*/) {
uint32_t /*productUniqueIdUTF8Length*/,
pid_t* /*pid*/) {
RTC_LOG(LS_INFO) << __FUNCTION__;
// Travel through /dev/video [0-63]
@ -69,8 +246,9 @@ int32_t DeviceInfoLinux::GetDeviceName(uint32_t deviceNumber,
char device[20];
int fd = -1;
bool found = false;
for (int n = 0; n < 64; n++) {
sprintf(device, "/dev/video%d", n);
int device_index;
for (device_index = 0; device_index < 64; device_index++) {
sprintf(device, "/dev/video%d", device_index);
if ((fd = open(device, O_RDONLY)) != -1) {
if (count == deviceNumber) {
// Found the device
@ -119,8 +297,15 @@ int32_t DeviceInfoLinux::GetDeviceName(uint32_t deviceNumber,
RTC_LOG(LS_INFO) << "buffer passed is too small";
return -1;
}
} else {
// if there's no bus info to use for uniqueId, invent one - and it has to be repeatable
if (snprintf(deviceUniqueIdUTF8,
deviceUniqueIdUTF8Length, "fake_%u", device_index) >=
(int) deviceUniqueIdUTF8Length)
{
return -1;
}
}
return 0;
}

View File

@ -13,6 +13,13 @@
#include "modules/video_capture/device_info_impl.h"
#include "modules/video_capture/video_capture_impl.h"
#ifdef WEBRTC_LINUX
#include <memory>
#include "rtc_base/platform_thread.h"
#include "system_wrappers/include/atomic32.h"
#include <sys/inotify.h>
#endif
namespace webrtc
{

View File

@ -61,6 +61,13 @@ int32_t VideoCaptureModuleV4L2::Init(const char* deviceUniqueIdUTF8) {
memcpy(_deviceUniqueId, deviceUniqueIdUTF8, len + 1);
}
int device_index;
if (sscanf(deviceUniqueIdUTF8,"fake_%d", &device_index) == 1)
{
_deviceId = device_index;
return 0;
}
int fd;
char device[32];
bool found = false;

View File

@ -83,7 +83,8 @@ int32_t DeviceInfoIos::GetDeviceName(uint32_t deviceNumber,
char* deviceUniqueIdUTF8,
uint32_t deviceUniqueIdUTF8Length,
char* productUniqueIdUTF8,
uint32_t productUniqueIdUTF8Length) {
uint32_t productUniqueIdUTF8Length,
pid_t* pid) {
NSString* deviceName = [DeviceInfoIosObjC deviceNameForIndex:deviceNumber];
NSString* deviceUniqueId = [DeviceInfoIosObjC deviceUniqueIdForIndex:deviceNumber];

View File

@ -398,7 +398,8 @@ TEST_F(VideoCaptureExternalTest, TestExternalCapture) {
EXPECT_TRUE(capture_callback_.CompareLastFrame(*test_frame_));
}
TEST_F(VideoCaptureExternalTest, Rotation) {
// Disabled, see Bug 1368816
TEST_F(VideoCaptureExternalTest, DISABLED_Rotation) {
EXPECT_EQ(0, capture_module_->SetCaptureRotation(webrtc::kVideoRotation_0));
size_t length = webrtc::CalcBufferSize(
webrtc::VideoType::kI420, test_frame_->width(), test_frame_->height());

View File

@ -11,19 +11,94 @@
#ifndef MODULES_VIDEO_CAPTURE_VIDEO_CAPTURE_H_
#define MODULES_VIDEO_CAPTURE_VIDEO_CAPTURE_H_
#include "modules/audio_processing/include/config.h"
#include "api/video/video_rotation.h"
#include "media/base/videosinkinterface.h"
#include "modules/include/module.h"
#include "modules/video_capture/video_capture_defines.h"
#include <set>
#if defined(ANDROID)
#include <jni.h>
#endif
namespace webrtc {
// Mozilla addition
enum class CaptureDeviceType {
Camera = 0,
Screen = 1,
Application = 2,
Window = 3,
Browser = 4
};
// Mozilla addition
struct CaptureDeviceInfo {
CaptureDeviceType type;
CaptureDeviceInfo() : type(CaptureDeviceType::Camera) {}
CaptureDeviceInfo(CaptureDeviceType t) : type(t) {}
static const ConfigOptionID identifier = ConfigOptionID::kCaptureDeviceInfo;
const char * TypeName() const
{
switch(type) {
case CaptureDeviceType::Camera: {
return "Camera";
}
case CaptureDeviceType::Screen: {
return "Screen";
}
case CaptureDeviceType::Application: {
return "Application";
}
case CaptureDeviceType::Window: {
return "Window";
}
case CaptureDeviceType::Browser: {
return "Browser";
}
}
assert(false);
return "UNKOWN-CaptureDeviceType!";
}
};
class VideoInputFeedBack
{
public:
virtual void OnDeviceChange() = 0;
protected:
virtual ~VideoInputFeedBack(){}
};
#if defined(ANDROID) && !defined(WEBRTC_CHROMIUM_BUILD)
int32_t SetCaptureAndroidVM(JavaVM* javaVM);
#endif
class VideoCaptureModule: public rtc::RefCountInterface {
public:
// Interface for receiving information about available camera devices.
class DeviceInfo {
public:
virtual uint32_t NumberOfDevices() = 0;
virtual int32_t Refresh() = 0;
virtual void DeviceChange() {
for (auto inputCallBack : _inputCallBacks) {
inputCallBack->OnDeviceChange();
}
}
virtual void RegisterVideoInputFeedBack(VideoInputFeedBack* callBack) {
_inputCallBacks.insert(callBack);
}
virtual void DeRegisterVideoInputFeedBack(VideoInputFeedBack* callBack) {
auto it = _inputCallBacks.find(callBack);
if (it != _inputCallBacks.end()) {
_inputCallBacks.erase(it);
}
}
// Returns the available capture devices.
// deviceNumber - Index of capture device.
@ -39,7 +114,8 @@ class VideoCaptureModule: public rtc::RefCountInterface {
char* deviceUniqueIdUTF8,
uint32_t deviceUniqueIdUTF8Length,
char* productUniqueIdUTF8 = 0,
uint32_t productUniqueIdUTF8Length = 0) = 0;
uint32_t productUniqueIdUTF8Length = 0,
pid_t* pid = 0) = 0;
// Returns the number of capabilities this device.

View File

@ -15,6 +15,10 @@
#include "modules/include/module_common_types.h"
#include "typedefs.h" // NOLINT(build/include)
#ifdef XP_WIN
typedef int pid_t;
#endif
namespace webrtc
{
// Defines

View File

@ -16,11 +16,7 @@ namespace webrtc {
rtc::scoped_refptr<VideoCaptureModule> VideoCaptureFactory::Create(
const char* deviceUniqueIdUTF8) {
#if defined(WEBRTC_ANDROID)
return nullptr;
#else
return videocapturemodule::VideoCaptureImpl::Create(deviceUniqueIdUTF8);
#endif
}
rtc::scoped_refptr<VideoCaptureModule> VideoCaptureFactory::Create(
@ -29,11 +25,7 @@ rtc::scoped_refptr<VideoCaptureModule> VideoCaptureFactory::Create(
}
VideoCaptureModule::DeviceInfo* VideoCaptureFactory::CreateDeviceInfo() {
#if defined(WEBRTC_ANDROID)
return nullptr;
#else
return videocapturemodule::VideoCaptureImpl::CreateDeviceInfo();
#endif
}
} // namespace webrtc

View File

@ -85,7 +85,6 @@ VideoCaptureImpl::VideoCaptureImpl()
_requestedCapability(),
_lastProcessTimeNanos(rtc::TimeNanos()),
_lastFrameRateCallbackTimeNanos(rtc::TimeNanos()),
_dataCallBack(NULL),
_lastProcessFrameTimeNanos(rtc::TimeNanos()),
_rotateFrame(kVideoRotation_0),
apply_rotation_(false) {
@ -97,7 +96,6 @@ VideoCaptureImpl::VideoCaptureImpl()
}
VideoCaptureImpl::~VideoCaptureImpl() {
DeRegisterCaptureDataCallback();
if (_deviceUniqueId)
delete[] _deviceUniqueId;
}
@ -105,18 +103,31 @@ VideoCaptureImpl::~VideoCaptureImpl() {
void VideoCaptureImpl::RegisterCaptureDataCallback(
rtc::VideoSinkInterface<VideoFrame>* dataCallBack) {
rtc::CritScope cs(&_apiCs);
_dataCallBack = dataCallBack;
_dataCallBacks.insert(dataCallBack);
}
void VideoCaptureImpl::DeRegisterCaptureDataCallback() {
void VideoCaptureImpl::DeRegisterCaptureDataCallback(
rtc::VideoSinkInterface<VideoFrame>* dataCallBack) {
rtc::CritScope cs(&_apiCs);
_dataCallBack = NULL;
auto it = _dataCallBacks.find(dataCallBack);
if (it != _dataCallBacks.end()) {
_dataCallBacks.erase(it);
}
}
int32_t VideoCaptureImpl::StopCaptureIfAllClientsClose() {
if (_dataCallBacks.empty()) {
return StopCapture();
} else {
return 0;
}
}
int32_t VideoCaptureImpl::DeliverCapturedFrame(VideoFrame& captureFrame) {
UpdateFrameCount(); // frame count used for local frame rate callback.
if (_dataCallBack) {
_dataCallBack->OnFrame(captureFrame);
for (auto dataCallBack : _dataCallBacks) {
dataCallBack->OnFrame(captureFrame);
}
return 0;
@ -149,13 +160,11 @@ int32_t VideoCaptureImpl::IncomingFrame(uint8_t* videoFrame,
// SetApplyRotation doesn't take any lock. Make a local copy here.
bool apply_rotation = apply_rotation_;
if (apply_rotation) {
// Rotating resolution when for 90/270 degree rotations.
if (_rotateFrame == kVideoRotation_90 ||
_rotateFrame == kVideoRotation_270) {
target_width = abs(height);
target_height = width;
}
if (apply_rotation &&
(_rotateFrame == kVideoRotation_90 ||
_rotateFrame == kVideoRotation_270)) {
target_width = abs(height);
target_height = width;
}
// Setting absolute height (in case it was negative).
@ -191,7 +200,7 @@ int32_t VideoCaptureImpl::IncomingFrame(uint8_t* videoFrame,
buffer.get()->StrideV(), 0, 0, // No Cropping
width, height, target_width, target_height, rotation_mode,
ConvertVideoType(frameInfo.videoType));
if (conversionResult < 0) {
if (conversionResult != 0) {
RTC_LOG(LS_ERROR) << "Failed to convert capture frame from type "
<< static_cast<int>(frameInfo.videoType) << "to I420.";
return -1;
@ -201,6 +210,13 @@ int32_t VideoCaptureImpl::IncomingFrame(uint8_t* videoFrame,
!apply_rotation ? _rotateFrame : kVideoRotation_0);
captureFrame.set_ntp_time_ms(captureTime);
// This is one ugly hack to let CamerasParent know what rotation
// the frame was captured at. Note that this goes against the intended
// meaning of rotation of the frame (how to rotate it before rendering).
// We do this so CamerasChild can scale to the proper dimensions
// later on in the pipe.
captureFrame.set_rotation(_rotateFrame);
DeliverCapturedFrame(captureFrame);
return 0;

View File

@ -17,11 +17,51 @@
#include "rtc_base/logging.h"
#include <Dvdmedia.h>
#include <Streams.h>
#include <dbt.h>
#include <ks.h>
namespace webrtc {
namespace videocapturemodule {
LRESULT CALLBACK WndProc(HWND hWnd, UINT uiMsg, WPARAM wParam, LPARAM lParam)
{
DeviceInfoDS* pParent;
if (uiMsg == WM_CREATE)
{
pParent = (DeviceInfoDS*)((LPCREATESTRUCT)lParam)->lpCreateParams;
SetWindowLongPtr(hWnd, GWLP_USERDATA, (LONG_PTR)pParent);
}
else if (uiMsg == WM_DESTROY)
{
SetWindowLongPtr(hWnd, GWLP_USERDATA, NULL);
}
else if (uiMsg == WM_DEVICECHANGE)
{
pParent = (DeviceInfoDS*)GetWindowLongPtr(hWnd, GWLP_USERDATA);
if (pParent)
{
pParent->DeviceChange();
}
}
return DefWindowProc(hWnd, uiMsg, wParam, lParam);
}
void _FreeMediaType(AM_MEDIA_TYPE& mt)
{
if (mt.cbFormat != 0)
{
CoTaskMemFree((PVOID)mt.pbFormat);
mt.cbFormat = 0;
mt.pbFormat = NULL;
}
if (mt.pUnk != NULL)
{
// pUnk should not be used.
mt.pUnk->Release();
mt.pUnk = NULL;
}
}
// static
DeviceInfoDS* DeviceInfoDS::Create() {
DeviceInfoDS* dsInfo = new DeviceInfoDS();
@ -33,8 +73,7 @@ DeviceInfoDS* DeviceInfoDS::Create() {
}
DeviceInfoDS::DeviceInfoDS()
: _dsDevEnum(NULL),
_dsMonikerDevEnum(NULL),
: DeviceInfoImpl(), _dsDevEnum(NULL),
_CoUninitializeIsRequired(true) {
// 1) Initialize the COM library (make Windows load the DLLs).
//
@ -78,14 +117,31 @@ DeviceInfoDS::DeviceInfoDS()
<< " => RPC_E_CHANGED_MODE, error 0x" << std::hex << hr;
}
}
_hInstance = reinterpret_cast<HINSTANCE>(GetModuleHandle(NULL));
_wndClass = {0};
_wndClass.lpfnWndProc = &WndProc;
_wndClass.lpszClassName = TEXT("DeviceInfoDS");
_wndClass.hInstance = _hInstance;
if (RegisterClass(&_wndClass))
{
_hwnd = CreateWindow(_wndClass.lpszClassName, NULL, 0, CW_USEDEFAULT,
CW_USEDEFAULT, CW_USEDEFAULT, CW_USEDEFAULT, NULL,
NULL, _hInstance, this);
}
}
DeviceInfoDS::~DeviceInfoDS() {
RELEASE_AND_CLEAR(_dsMonikerDevEnum);
RELEASE_AND_CLEAR(_dsDevEnum);
if (_CoUninitializeIsRequired) {
CoUninitialize();
}
if (_hwnd != NULL)
{
DestroyWindow(_hwnd);
}
UnregisterClass(_wndClass.lpszClassName, _hInstance);
}
int32_t DeviceInfoDS::Init() {
@ -100,7 +156,7 @@ int32_t DeviceInfoDS::Init() {
}
uint32_t DeviceInfoDS::NumberOfDevices() {
ReadLockScoped cs(_apiLock);
return GetDeviceInfo(0, 0, 0, 0, 0, 0, 0);
return GetDeviceInfo(0, 0, 0, 0, 0, 0, 0, 0);
}
int32_t DeviceInfoDS::GetDeviceName(uint32_t deviceNumber,
@ -109,11 +165,14 @@ int32_t DeviceInfoDS::GetDeviceName(uint32_t deviceNumber,
char* deviceUniqueIdUTF8,
uint32_t deviceUniqueIdUTF8Length,
char* productUniqueIdUTF8,
uint32_t productUniqueIdUTF8Length) {
uint32_t productUniqueIdUTF8Length,
pid_t* pid)
{
ReadLockScoped cs(_apiLock);
const int32_t result = GetDeviceInfo(
deviceNumber, deviceNameUTF8, deviceNameLength, deviceUniqueIdUTF8,
deviceUniqueIdUTF8Length, productUniqueIdUTF8, productUniqueIdUTF8Length);
deviceUniqueIdUTF8Length, productUniqueIdUTF8, productUniqueIdUTF8Length,
pid);
return result > (int32_t)deviceNumber ? 0 : -1;
}
@ -123,16 +182,17 @@ int32_t DeviceInfoDS::GetDeviceInfo(uint32_t deviceNumber,
char* deviceUniqueIdUTF8,
uint32_t deviceUniqueIdUTF8Length,
char* productUniqueIdUTF8,
uint32_t productUniqueIdUTF8Length)
uint32_t productUniqueIdUTF8Length,
pid_t* pid)
{
// enumerate all video capture devices
RELEASE_AND_CLEAR(_dsMonikerDevEnum);
IEnumMoniker* _dsMonikerDevEnum = NULL;
HRESULT hr = _dsDevEnum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory,
&_dsMonikerDevEnum, 0);
if (hr != NOERROR) {
RTC_LOG(LS_INFO) << "Failed to enumerate CLSID_SystemDeviceEnum, error 0x"
<< std::hex << hr << ". No webcam exist?";
RELEASE_AND_CLEAR(_dsMonikerDevEnum);
return 0;
}
@ -165,6 +225,7 @@ int32_t DeviceInfoDS::GetDeviceInfo(uint32_t deviceNumber,
if (convResult == 0) {
RTC_LOG(LS_INFO) << "Failed to convert device name to UTF8, "
<< "error = " << GetLastError();
RELEASE_AND_CLEAR(_dsMonikerDevEnum);
return -1;
}
}
@ -184,6 +245,7 @@ int32_t DeviceInfoDS::GetDeviceInfo(uint32_t deviceNumber,
RTC_LOG(LS_INFO)
<< "Failed to convert device "
<< "name to UTF8, error = " << GetLastError();
RELEASE_AND_CLEAR(_dsMonikerDevEnum);
return -1;
}
if (productUniqueIdUTF8 && productUniqueIdUTF8Length > 0) {
@ -202,8 +264,9 @@ int32_t DeviceInfoDS::GetDeviceInfo(uint32_t deviceNumber,
}
}
if (deviceNameLength) {
RTC_LOG(LS_INFO) << __FUNCTION__ << " " << deviceNameUTF8;
RTC_LOG(LS_INFO) << __FUNCTION__ << ": deviceName: " << deviceNameUTF8;
}
RELEASE_AND_CLEAR(_dsMonikerDevEnum);
return index;
}
@ -218,12 +281,13 @@ IBaseFilter* DeviceInfoDS::GetDeviceFilter(const char* deviceUniqueIdUTF8,
}
// enumerate all video capture devices
RELEASE_AND_CLEAR(_dsMonikerDevEnum);
IEnumMoniker* _dsMonikerDevEnum = NULL;
HRESULT hr = _dsDevEnum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory,
&_dsMonikerDevEnum, 0);
if (hr != NOERROR) {
RTC_LOG(LS_INFO) << "Failed to enumerate CLSID_SystemDeviceEnum, error 0x"
<< std::hex << hr << ". No webcam exist?";
RELEASE_AND_CLEAR(_dsMonikerDevEnum);
return 0;
}
_dsMonikerDevEnum->Reset();
@ -279,6 +343,7 @@ IBaseFilter* DeviceInfoDS::GetDeviceFilter(const char* deviceUniqueIdUTF8,
pM->Release();
}
}
RELEASE_AND_CLEAR(_dsMonikerDevEnum);
return captureFilter;
}
@ -297,7 +362,6 @@ int32_t DeviceInfoDS::GetWindowsCapability(
}
int32_t DeviceInfoDS::CreateCapabilityMap(const char* deviceUniqueIdUTF8)
{
// Reset old capability list
_captureCapabilities.clear();
@ -371,7 +435,8 @@ int32_t DeviceInfoDS::CreateCapabilityMap(const char* deviceUniqueIdUTF8)
GUID preferedVideoFormat = FORMAT_VideoInfo;
for (int32_t tmp = 0; tmp < count; ++tmp) {
hr = streamConfig->GetStreamCaps(tmp, &pmt, reinterpret_cast<BYTE*>(&caps));
if (!FAILED(hr)) {
// Bug 1181265 - perhaps a helper dll returns success with nullptr
if (!FAILED(hr) && pmt) {
if (pmt->majortype == MEDIATYPE_Video &&
pmt->formattype == FORMAT_VideoInfo2) {
RTC_LOG(LS_INFO) << "Device support FORMAT_VideoInfo2";
@ -436,7 +501,7 @@ int32_t DeviceInfoDS::CreateCapabilityMap(const char* deviceUniqueIdUTF8)
}
if (hrVC == S_OK) {
LONGLONG* frameDurationList;
LONGLONG* frameDurationList = NULL;
LONGLONG maxFPS;
long listSize;
SIZE size;
@ -453,7 +518,9 @@ int32_t DeviceInfoDS::CreateCapabilityMap(const char* deviceUniqueIdUTF8)
// On some odd cameras, you may get a 0 for duration.
// GetMaxOfFrameArray returns the lowest duration (highest FPS)
if (hrVC == S_OK && listSize > 0 &&
// Initialize and check the returned list for null since
// some broken drivers don't modify it.
if (hrVC == S_OK && listSize > 0 && frameDurationList &&
0 != (maxFPS = GetMaxOfFrameArray(frameDurationList, listSize))) {
capability.maxFPS = static_cast<int>(10000000 / maxFPS);
capability.supportFrameRateControl = true;
@ -465,6 +532,9 @@ int32_t DeviceInfoDS::CreateCapabilityMap(const char* deviceUniqueIdUTF8)
else
capability.maxFPS = 0;
}
if (frameDurationList) {
CoTaskMemFree((PVOID)frameDurationList); // NULL not safe
}
} else // use existing method in case IAMVideoControl is not supported
{
if (avgTimePerFrame > 0)
@ -510,7 +580,6 @@ int32_t DeviceInfoDS::CreateCapabilityMap(const char* deviceUniqueIdUTF8)
RTC_LOG(LS_WARNING)
<< "Device support unknown media type " << strGuid << ", width "
<< capability.width << ", height " << capability.height;
continue;
}
_captureCapabilities.push_back(capability);
@ -520,7 +589,7 @@ int32_t DeviceInfoDS::CreateCapabilityMap(const char* deviceUniqueIdUTF8)
<< " type:" << static_cast<int>(capability.videoType)
<< " fps:" << capability.maxFPS;
}
DeleteMediaType(pmt);
_FreeMediaType(*pmt);
pmt = NULL;
}
RELEASE_AND_CLEAR(streamConfig);

View File

@ -14,7 +14,8 @@
#include "modules/video_capture/device_info_impl.h"
#include "modules/video_capture/video_capture_impl.h"
#include <Dshow.h>
#include <dshow.h>
#include <windows.h>
namespace webrtc
{

View File

@ -13,7 +13,7 @@
namespace webrtc {
namespace videocapturemodule {
DeviceInfoMF::DeviceInfoMF() {
DeviceInfoMF::DeviceInfoMF() : DeviceInfoImpl() {
}
DeviceInfoMF::~DeviceInfoMF() {

View File

@ -17,7 +17,7 @@
#include "rtc_base/logging.h"
#include "rtc_base/platform_thread.h"
#include <Dvdmedia.h> // VIDEOINFOHEADER2
#include <dvdmedia.h> // VIDEOINFOHEADER2
#include <initguid.h>
#define DELETE_RESET(p) \
@ -39,6 +39,9 @@ DEFINE_GUID(CLSID_SINKFILTER,
0x12,
0xc3);
using namespace mozilla::media;
using namespace mozilla;
namespace webrtc {
namespace videocapturemodule {
@ -49,21 +52,23 @@ typedef struct tagTHREADNAME_INFO {
DWORD dwFlags; // reserved for future use, must be zero
} THREADNAME_INFO;
CaptureInputPin::CaptureInputPin(IN TCHAR* szName,
CaptureInputPin::CaptureInputPin(int32_t moduleId,
IN TCHAR * szName,
IN CaptureSinkFilter* pFilter,
IN CCritSec* pLock,
IN CriticalSection * pLock,
OUT HRESULT* pHr,
IN LPCWSTR pszName)
: CBaseInputPin(szName, pFilter, pLock, pHr, pszName),
: BaseInputPin(szName, pFilter, pLock, pHr, pszName),
_requestedCapability(),
_resultingCapability() {
_moduleId=moduleId;
_threadHandle = NULL;
}
CaptureInputPin::~CaptureInputPin() {}
HRESULT
CaptureInputPin::GetMediaType(IN int iPosition, OUT CMediaType* pmt) {
CaptureInputPin::GetMediaType(IN int iPosition, OUT MediaType* pmt) {
// reset the thread handle
_threadHandle = NULL;
@ -153,7 +158,7 @@ CaptureInputPin::GetMediaType(IN int iPosition, OUT CMediaType* pmt) {
}
HRESULT
CaptureInputPin::CheckMediaType(IN const CMediaType* pMediaType) {
CaptureInputPin::CheckMediaType(IN const MediaType* pMediaType) {
// reset the thread handle
_threadHandle = NULL;
@ -284,7 +289,7 @@ HRESULT
CaptureInputPin::Receive(IN IMediaSample* pIMediaSample) {
HRESULT hr = S_OK;
RTC_DCHECK(m_pFilter);
RTC_DCHECK(mFilter);
RTC_DCHECK(pIMediaSample);
// get the thread handle of the delivering thread inc its priority
@ -292,28 +297,42 @@ CaptureInputPin::Receive(IN IMediaSample* pIMediaSample) {
HANDLE handle = GetCurrentThread();
SetThreadPriority(handle, THREAD_PRIORITY_HIGHEST);
_threadHandle = handle;
// See http://msdn.microsoft.com/en-us/library/xcb2z8hs(VS.71).aspx for details on the code
// in this function. Name of article is "Setting a Thread Name (Unmanaged)".
rtc::SetCurrentThreadName("webrtc_video_capture");
THREADNAME_INFO info;
info.dwType = 0x1000;
info.szName = "capture_thread";
info.dwThreadID = (DWORD)-1;
info.dwFlags = 0;
__try
{
RaiseException( 0x406D1388, 0, sizeof(info)/sizeof(DWORD),
(DWORD_PTR*)&info );
}
__except (EXCEPTION_CONTINUE_EXECUTION)
{
}
}
reinterpret_cast<CaptureSinkFilter*>(m_pFilter)->LockReceive();
hr = CBaseInputPin::Receive(pIMediaSample);
reinterpret_cast<CaptureSinkFilter*>(mFilter)->LockReceive();
hr = BaseInputPin::Receive(pIMediaSample);
if (SUCCEEDED(hr)) {
const LONG length = pIMediaSample->GetActualDataLength();
RTC_DCHECK(length >= 0);
const int32_t length = pIMediaSample->GetActualDataLength();
unsigned char* pBuffer = NULL;
if (S_OK != pIMediaSample->GetPointer(&pBuffer)) {
reinterpret_cast<CaptureSinkFilter*>(m_pFilter)->UnlockReceive();
reinterpret_cast <CaptureSinkFilter *>(mFilter)->UnlockReceive();
return S_FALSE;
}
// NOTE: filter unlocked within Send call
reinterpret_cast<CaptureSinkFilter*>(m_pFilter)->ProcessCapturedFrame(
pBuffer, static_cast<size_t>(length), _resultingCapability);
reinterpret_cast <CaptureSinkFilter *> (mFilter)->ProcessCapturedFrame(
pBuffer,length,_resultingCapability);
} else {
reinterpret_cast<CaptureSinkFilter*>(m_pFilter)->UnlockReceive();
reinterpret_cast<CaptureSinkFilter*>(mFilter)->UnlockReceive();
}
return hr;
@ -330,12 +349,17 @@ HRESULT CaptureInputPin::SetMatchingMediaType(
CaptureSinkFilter::CaptureSinkFilter(IN TCHAR* tszName,
IN LPUNKNOWN punk,
OUT HRESULT* phr,
VideoCaptureExternal& captureObserver)
: CBaseFilter(tszName, punk, &m_crtFilter, CLSID_SINKFILTER),
VideoCaptureExternal& captureObserver,
int32_t moduleId)
: BaseFilter(tszName, CLSID_SINKFILTER),
m_crtFilter("CaptureSinkFilter::m_crtFilter"),
m_crtRecv("CaptureSinkFilter::m_crtRecv"),
m_pInput(NULL),
_captureObserver(captureObserver) {
_captureObserver(captureObserver),
_moduleId(moduleId) {
(*phr) = S_OK;
m_pInput = new CaptureInputPin(NAME("VideoCaptureInputPin"), this,
m_pInput = new CaptureInputPin(moduleId, L"VideoCaptureInputPin",
this,
&m_crtFilter, phr, L"VideoCapture");
if (m_pInput == NULL || FAILED(*phr)) {
(*phr) = FAILED(*phr) ? (*phr) : E_OUTOFMEMORY;
@ -353,8 +377,8 @@ int CaptureSinkFilter::GetPinCount() {
return 1;
}
CBasePin* CaptureSinkFilter::GetPin(IN int Index) {
CBasePin* pPin;
BasePin* CaptureSinkFilter::GetPin(IN int Index) {
BasePin* pPin;
LockFilter();
if (Index == 0) {
pPin = m_pInput;
@ -368,17 +392,17 @@ CBasePin* CaptureSinkFilter::GetPin(IN int Index) {
STDMETHODIMP CaptureSinkFilter::Pause() {
LockReceive();
LockFilter();
if (m_State == State_Stopped) {
if (mState == State_Stopped) {
// change the state, THEN activate the input pin
m_State = State_Paused;
mState = State_Paused;
if (m_pInput && m_pInput->IsConnected()) {
m_pInput->Active();
}
if (m_pInput && !m_pInput->IsConnected()) {
m_State = State_Running;
mState = State_Running;
}
} else if (m_State == State_Running) {
m_State = State_Paused;
} else if (mState == State_Running) {
mState = State_Paused;
}
UnlockFilter();
UnlockReceive();
@ -390,7 +414,7 @@ STDMETHODIMP CaptureSinkFilter::Stop() {
LockFilter();
// set the state
m_State = State_Stopped;
mState = State_Stopped;
// inactivate the pins
if (m_pInput)
@ -403,16 +427,16 @@ STDMETHODIMP CaptureSinkFilter::Stop() {
void CaptureSinkFilter::SetFilterGraph(IGraphBuilder* graph) {
LockFilter();
m_pGraph = graph;
mGraph = graph;
UnlockFilter();
}
void CaptureSinkFilter::ProcessCapturedFrame(
unsigned char* pBuffer,
size_t length,
int32_t length,
const VideoCaptureCapability& frameInfo) {
// we have the receiver lock
if (m_State == State_Running) {
if (mState == State_Running) {
_captureObserver.IncomingFrame(pBuffer, length, frameInfo);
// trying to hold it since it's only a memcpy

View File

@ -11,9 +11,10 @@
#ifndef MODULES_VIDEO_CAPTURE_MAIN_SOURCE_WINDOWS_SINK_FILTER_DS_H_
#define MODULES_VIDEO_CAPTURE_MAIN_SOURCE_WINDOWS_SINK_FILTER_DS_H_
#include <Streams.h> // Include base DS filter header files
#include "modules/video_capture/video_capture_defines.h"
#include "BaseInputPin.h"
#include "BaseFilter.h"
#include "MediaType.h"
namespace webrtc
{
@ -26,69 +27,78 @@ class CaptureSinkFilter;
* input pin for camera input
*
*/
class CaptureInputPin: public CBaseInputPin
class CaptureInputPin: public mozilla::media::BaseInputPin
{
public:
int32_t _moduleId;
VideoCaptureCapability _requestedCapability;
VideoCaptureCapability _resultingCapability;
HANDLE _threadHandle;
CaptureInputPin(IN TCHAR* szName,
CaptureInputPin(int32_t moduleId,
IN TCHAR* szName,
IN CaptureSinkFilter* pFilter,
IN CCritSec * pLock,
IN mozilla::CriticalSection * pLock,
OUT HRESULT * pHr,
IN LPCWSTR pszName);
virtual ~CaptureInputPin();
HRESULT GetMediaType (IN int iPos, OUT CMediaType * pmt);
HRESULT CheckMediaType (IN const CMediaType * pmt);
HRESULT GetMediaType (IN int iPos, OUT mozilla::media::MediaType * pmt);
HRESULT CheckMediaType (IN const mozilla::media::MediaType * pmt);
STDMETHODIMP Receive (IN IMediaSample *);
HRESULT SetMatchingMediaType(const VideoCaptureCapability& capability);
};
class CaptureSinkFilter: public CBaseFilter
class CaptureSinkFilter: public mozilla::media::BaseFilter
{
public:
CaptureSinkFilter(IN TCHAR * tszName,
IN LPUNKNOWN punk,
OUT HRESULT * phr,
VideoCaptureExternal& captureObserver);
VideoCaptureExternal& captureObserver,
int32_t moduleId);
virtual ~CaptureSinkFilter();
// --------------------------------------------------------------------
// class methods
void ProcessCapturedFrame(unsigned char* pBuffer, size_t length,
void ProcessCapturedFrame(unsigned char* pBuffer, int32_t length,
const VideoCaptureCapability& frameInfo);
// explicit receiver lock aquisition and release
void LockReceive() { m_crtRecv.Lock();}
void UnlockReceive() {m_crtRecv.Unlock();}
void LockReceive() { m_crtRecv.Enter();}
void UnlockReceive() {m_crtRecv.Leave();}
// explicit filter lock aquisition and release
void LockFilter() {m_crtFilter.Lock();}
void UnlockFilter() { m_crtFilter.Unlock(); }
void LockFilter() {m_crtFilter.Enter();}
void UnlockFilter() { m_crtFilter.Leave(); }
void SetFilterGraph(IGraphBuilder* graph); // Used if EVR
// --------------------------------------------------------------------
// COM interfaces
DECLARE_IUNKNOWN ;
STDMETHODIMP QueryInterface(REFIID aIId, void **aInterface)
{
return mozilla::media::BaseFilter::QueryInterface(aIId, aInterface);
}
STDMETHODIMP SetMatchingMediaType(const VideoCaptureCapability& capability);
// --------------------------------------------------------------------
// CBaseFilter methods
int GetPinCount ();
CBasePin * GetPin ( IN int Index);
mozilla::media::BasePin * GetPin ( IN int Index);
STDMETHODIMP Pause ();
STDMETHODIMP Stop ();
STDMETHODIMP GetClassID ( OUT CLSID * pCLSID);
// --------------------------------------------------------------------
// class factory calls this
static CUnknown * CreateInstance (IN LPUNKNOWN punk, OUT HRESULT * phr);
static IUnknown * CreateInstance (IN LPUNKNOWN punk, OUT HRESULT * phr);
private:
CCritSec m_crtFilter; // filter lock
CCritSec m_crtRecv; // receiver lock; always acquire before filter lock
mozilla::CriticalSection m_crtFilter; // filter lock
mozilla::CriticalSection m_crtRecv; // receiver lock; always acquire before filter lock
CaptureInputPin * m_pInput;
VideoCaptureExternal& _captureObserver;
int32_t _moduleId;
};
} // namespace videocapturemodule
} // namespace webrtc

View File

@ -20,7 +20,7 @@
namespace webrtc {
namespace videocapturemodule {
VideoCaptureDS::VideoCaptureDS()
: _captureFilter(NULL),
: VideoCaptureImpl(), _dsInfo(), _captureFilter(NULL),
_graphBuilder(NULL),
_mediaControl(NULL),
_sinkFilter(NULL),
@ -102,7 +102,7 @@ int32_t VideoCaptureDS::Init(const char* deviceUniqueIdUTF8) {
}
// Create the sink filte used for receiving Captured frames.
_sinkFilter = new CaptureSinkFilter(SINK_FILTER_NAME, NULL, &hr, *this);
_sinkFilter = new CaptureSinkFilter(SINK_FILTER_NAME, NULL, &hr, *this, 0);
if (hr != S_OK) {
RTC_LOG(LS_INFO) << "Failed to create send filter";
return -1;
@ -122,7 +122,7 @@ int32_t VideoCaptureDS::Init(const char* deviceUniqueIdUTF8) {
}
// Temporary connect here.
// This is done so that no one else can use the capture device.
if (SetCameraOutput(_requestedCapability) != 0) {
if (SetCameraOutputIfNeeded(_requestedCapability) != 0) {
return -1;
}
hr = _mediaControl->Pause();
@ -139,12 +139,8 @@ int32_t VideoCaptureDS::Init(const char* deviceUniqueIdUTF8) {
int32_t VideoCaptureDS::StartCapture(const VideoCaptureCapability& capability) {
rtc::CritScope cs(&_apiCs);
if (capability != _requestedCapability) {
DisconnectGraph();
if (SetCameraOutput(capability) != 0) {
return -1;
}
if (SetCameraOutputIfNeeded(capability) != 0) {
return -1;
}
HRESULT hr = _mediaControl->Run();
if (FAILED(hr)) {
@ -157,7 +153,7 @@ int32_t VideoCaptureDS::StartCapture(const VideoCaptureCapability& capability) {
int32_t VideoCaptureDS::StopCapture() {
rtc::CritScope cs(&_apiCs);
HRESULT hr = _mediaControl->Pause();
HRESULT hr = _mediaControl->Stop();
if (FAILED(hr)) {
RTC_LOG(LS_INFO) << "Failed to stop the capture graph. " << hr;
return -1;
@ -178,8 +174,9 @@ int32_t VideoCaptureDS::CaptureSettings(VideoCaptureCapability& settings) {
return 0;
}
int32_t VideoCaptureDS::SetCameraOutput(
const VideoCaptureCapability& requestedCapability) {
int32_t VideoCaptureDS::SetCameraOutputIfNeeded(
const VideoCaptureCapability& requestedCapability)
{
// Get the best matching capability
VideoCaptureCapability capability;
int32_t capabilityIndex;
@ -191,6 +188,16 @@ int32_t VideoCaptureDS::SetCameraOutput(
_deviceUniqueId, _requestedCapability, capability)) < 0) {
return -1;
}
if (capability != _activeCapability) {
DisconnectGraph();
// Store the new mode the camera actually selected
_activeCapability = capability;
} else {
// Camera selected the same mode, nothing to do
return 0;
}
// Reduce the frame rate if possible.
if (capability.maxFPS > requestedCapability.maxFPS) {
capability.maxFPS = requestedCapability.maxFPS;
@ -198,6 +205,13 @@ int32_t VideoCaptureDS::SetCameraOutput(
capability.maxFPS = 30;
}
return SetCameraOutput(capability, capabilityIndex);
}
int32_t VideoCaptureDS::SetCameraOutput(const VideoCaptureCapability& capability,
int32_t capabilityIndex)
{
// Convert it to the windows capability index since they are not nexessary
// the same
VideoCaptureCapabilityWindows windowsCapability;

View File

@ -219,7 +219,6 @@ bool VCMCodecDataBase::RequiresEncoderReset(const VideoCodec& new_send_codec) {
new_send_codec.plType != send_codec_.plType ||
new_send_codec.width != send_codec_.width ||
new_send_codec.height != send_codec_.height ||
new_send_codec.resolution_divisor != send_codec_.resolution_divisor ||
new_send_codec.maxBitrate != send_codec_.maxBitrate ||
new_send_codec.minBitrate != send_codec_.minBitrate ||
new_send_codec.qpMax != send_codec_.qpMax ||

View File

@ -335,6 +335,7 @@ void VP8EncoderImpl::SetupTemporalLayers(int num_streams,
const VideoCodec& codec) {
RTC_DCHECK(codec.VP8().tl_factory != nullptr);
const TemporalLayersFactory* tl_factory = codec.VP8().tl_factory;
RTC_DCHECK(temporal_layers_.empty());
if (num_streams == 1) {
temporal_layers_.emplace_back(
tl_factory->Create(0, num_temporal_layers, tl0_pic_idx_[0]));
@ -366,7 +367,7 @@ int VP8EncoderImpl::InitEncode(const VideoCodec* inst,
if (inst->maxBitrate > 0 && inst->startBitrate > inst->maxBitrate) {
return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
}
if (inst->width <= 1 || inst->height <= 1) {
if (inst->width < 1 || inst->height < 1) {
return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
}
if (number_of_cores < 1) {
@ -435,8 +436,9 @@ int VP8EncoderImpl::InitEncode(const VideoCodec* inst,
if (encoded_images_[i]._buffer != NULL) {
delete[] encoded_images_[i]._buffer;
}
// Reserve 100 extra bytes for overhead at small resolutions.
encoded_images_[i]._size =
CalcBufferSize(VideoType::kI420, codec_.width, codec_.height);
CalcBufferSize(VideoType::kI420, codec_.width, codec_.height) + 100;
encoded_images_[i]._buffer = new uint8_t[encoded_images_[i]._size];
encoded_images_[i]._completeFrame = true;
}
@ -568,7 +570,9 @@ int VP8EncoderImpl::InitEncode(const VideoCodec* inst,
SetStreamState(stream_bitrates[stream_idx] > 0, stream_idx);
configurations_[i].rc_target_bitrate = stream_bitrates[stream_idx];
temporal_layers_[stream_idx]->OnRatesUpdated(
stream_bitrates[stream_idx], inst->maxBitrate, inst->maxFramerate);
// here too - VP8 won't init if it thinks temporal layers have no bits
stream_bitrates[stream_idx] > 0 ? stream_bitrates[stream_idx] : inst->simulcastStream[stream_idx].minBitrate,
inst->maxBitrate, inst->maxFramerate);
temporal_layers_[stream_idx]->UpdateConfiguration(&configurations_[i]);
}

View File

@ -77,6 +77,7 @@ VP9EncoderImpl::VP9EncoderImpl()
frames_since_kf_(0),
num_temporal_layers_(0),
num_spatial_layers_(0),
num_cores_(0),
is_flexible_mode_(false),
frames_encoded_(0),
// Use two spatial when screensharing with flexible mode.
@ -500,13 +501,6 @@ int VP9EncoderImpl::Encode(const VideoFrame& input_image,
if (frame_types && frame_types->size() > 0) {
frame_type = (*frame_types)[0];
}
if (input_image.width() != codec_.width ||
input_image.height() != codec_.height) {
int ret = UpdateCodecFrameSize(input_image);
if (ret < 0) {
return ret;
}
}
RTC_DCHECK_EQ(input_image.width(), raw_->d_w);
RTC_DCHECK_EQ(input_image.height(), raw_->d_h);

View File

@ -122,6 +122,7 @@ class VP9EncoderImpl : public VP9Encoder {
size_t frames_since_kf_;
uint8_t num_temporal_layers_;
uint8_t num_spatial_layers_;
uint8_t num_cores_;
// Used for flexible mode.
bool is_flexible_mode_;

View File

@ -470,25 +470,6 @@ TEST_F(TestFrameBuffer2, LastContinuousFrameTwoLayers) {
EXPECT_EQ(pid + 3, InsertFrame(pid + 3, 1, ts, true, pid + 2));
}
TEST_F(TestFrameBuffer2, ForwardJumps) {
EXPECT_EQ(5453, InsertFrame(5453, 0, 1, false));
ExtractFrame();
EXPECT_EQ(5454, InsertFrame(5454, 0, 1, false, 5453));
ExtractFrame();
EXPECT_EQ(15670, InsertFrame(15670, 0, 1, false));
ExtractFrame();
EXPECT_EQ(29804, InsertFrame(29804, 0, 1, false));
ExtractFrame();
EXPECT_EQ(29805, InsertFrame(29805, 0, 1, false, 29804));
ExtractFrame();
EXPECT_EQ(29806, InsertFrame(29806, 0, 1, false, 29805));
ExtractFrame();
EXPECT_EQ(33819, InsertFrame(33819, 0, 1, false));
ExtractFrame();
EXPECT_EQ(41248, InsertFrame(41248, 0, 1, false));
ExtractFrame();
}
TEST_F(TestFrameBuffer2, PictureIdJumpBack) {
uint16_t pid = Rand();
uint32_t ts = Rand();

View File

@ -25,7 +25,7 @@ namespace webrtc {
class VCMReceiveCallback;
enum { kDecoderFrameMemoryLength = 10 };
enum { kDecoderFrameMemoryLength = 30 };
struct VCMFrameInformation {
int64_t renderTimeMs;

View File

@ -18,7 +18,7 @@
namespace webrtc {
class H264SpropParameterSetsTest : public testing::Test {
class H264SpropParameterSetsTest : public ::testing::Test {
public:
H264SpropParameterSets h264_sprop;
};

Some files were not shown because too many files have changed in this diff Show More