summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorMoonchild <moonchild@palemoon.org>2023-10-25 02:19:47 +0000
committerMoonchild <moonchild@palemoon.org>2023-10-25 02:19:47 +0000
commitc30be25773684b7182d41a94f880f7f29b947569 (patch)
tree25c3558024b96b4406fd1912f60d77ad56873290
parent72f30730e790ad800bc8912463eb33f39cf9418b (diff)
parentf4d408dffe96b01530e00362e2e3b97021327428 (diff)
downloaduxp-c30be25773684b7182d41a94f880f7f29b947569.tar.gz
Merge pull request 'Implement support for WebM transparency/alpha channel.' (#2358) from athenian200/UXP:webm_alpha_work into master
Reviewed-on: https://repo.palemoon.org/MoonchildProductions/UXP/pulls/2358 Reviewed-by: Travis W. <travawine@palemoon.org>
-rw-r--r--dom/media/MediaData.cpp151
-rw-r--r--dom/media/MediaData.h23
-rw-r--r--dom/media/MediaInfo.h14
-rw-r--r--dom/media/platforms/agnostic/VPXDecoder.cpp153
-rw-r--r--dom/media/platforms/agnostic/VPXDecoder.h5
-rw-r--r--dom/media/platforms/ffmpeg/FFmpegDecoderModule.h7
-rw-r--r--dom/media/platforms/wmf/WMFDecoderModule.cpp8
-rw-r--r--dom/media/webm/WebMDemuxer.cpp33
-rw-r--r--gfx/layers/ipc/SharedRGBImage.cpp27
-rw-r--r--gfx/layers/ipc/SharedRGBImage.h1
-rw-r--r--gfx/ycbcr/YCbCrUtils.cpp21
-rw-r--r--gfx/ycbcr/YCbCrUtils.h11
-rw-r--r--gfx/ycbcr/yuv_convert.cpp21
-rw-r--r--gfx/ycbcr/yuv_convert.h12
-rw-r--r--layout/media/symbols.def.in1
-rw-r--r--layout/reftests/webm-video/reftest.list1
-rw-r--r--layout/reftests/webm-video/webm-alpha-ref.html5
-rw-r--r--layout/reftests/webm-video/webm-alpha.html10
-rw-r--r--layout/reftests/webm-video/webm-alpha.webmbin0 -> 7643 bytes
19 files changed, 432 insertions, 72 deletions
diff --git a/dom/media/MediaData.cpp b/dom/media/MediaData.cpp
index 4852ea486b..9f1205a0e1 100644
--- a/dom/media/MediaData.cpp
+++ b/dom/media/MediaData.cpp
@@ -7,6 +7,8 @@
#include "MediaInfo.h"
#include "VideoUtils.h"
#include "ImageContainer.h"
+#include "mozilla/layers/SharedRGBImage.h"
+#include "YCbCrUtils.h"
#include <stdint.h>
@@ -89,6 +91,45 @@ ValidatePlane(const VideoData::YCbCrBuffer::Plane& aPlane)
aPlane.mStride > 0 && aPlane.mWidth <= aPlane.mStride;
}
+static bool ValidateBufferAndPicture(const VideoData::YCbCrBuffer& aBuffer,
+ const IntRect& aPicture)
+{
+ // The following situation should never happen unless there is a bug
+ // in the decoder
+ if (aBuffer.mPlanes[1].mWidth != aBuffer.mPlanes[2].mWidth ||
+ aBuffer.mPlanes[1].mHeight != aBuffer.mPlanes[2].mHeight) {
+ NS_ERROR("C planes with different sizes");
+ return false;
+ }
+
+ // The following situations could be triggered by invalid input
+ if (aPicture.width <= 0 || aPicture.height <= 0) {
+ // In debug mode, makes the error more noticeable
+ MOZ_ASSERT(false, "Empty picture rect");
+ return false;
+ }
+ if (!ValidatePlane(aBuffer.mPlanes[0]) ||
+ !ValidatePlane(aBuffer.mPlanes[1]) ||
+ !ValidatePlane(aBuffer.mPlanes[2])) {
+ NS_WARNING("Invalid plane size");
+ return false;
+ }
+
+ // Ensure the picture size specified in the headers can be extracted out of
+ // the frame we've been supplied without indexing out of bounds.
+ CheckedUint32 xLimit = aPicture.x + CheckedUint32(aPicture.width);
+ CheckedUint32 yLimit = aPicture.y + CheckedUint32(aPicture.height);
+ if (!xLimit.isValid() || xLimit.value() > aBuffer.mPlanes[0].mStride ||
+ !yLimit.isValid() || yLimit.value() > aBuffer.mPlanes[0].mHeight)
+ {
+ // The specified picture dimensions can't be contained inside the video
+ // frame, we'll stomp memory if we try to copy it. Fail.
+ NS_WARNING("Overflowing picture rect");
+ return false;
+ }
+ return true;
+}
+
VideoData::VideoData(int64_t aOffset,
int64_t aTime,
int64_t aDuration,
@@ -242,36 +283,7 @@ VideoData::CreateAndCopyData(const VideoInfo& aInfo,
return v.forget();
}
- // The following situation should never happen unless there is a bug
- // in the decoder
- if (aBuffer.mPlanes[1].mWidth != aBuffer.mPlanes[2].mWidth ||
- aBuffer.mPlanes[1].mHeight != aBuffer.mPlanes[2].mHeight) {
- NS_ERROR("C planes with different sizes");
- return nullptr;
- }
-
- // The following situations could be triggered by invalid input
- if (aPicture.width <= 0 || aPicture.height <= 0) {
- // In debug mode, makes the error more noticeable
- MOZ_ASSERT(false, "Empty picture rect");
- return nullptr;
- }
- if (!ValidatePlane(aBuffer.mPlanes[0]) || !ValidatePlane(aBuffer.mPlanes[1]) ||
- !ValidatePlane(aBuffer.mPlanes[2])) {
- NS_WARNING("Invalid plane size");
- return nullptr;
- }
-
- // Ensure the picture size specified in the headers can be extracted out of
- // the frame we've been supplied without indexing out of bounds.
- CheckedUint32 xLimit = aPicture.x + CheckedUint32(aPicture.width);
- CheckedUint32 yLimit = aPicture.y + CheckedUint32(aPicture.height);
- if (!xLimit.isValid() || xLimit.value() > aBuffer.mPlanes[0].mStride ||
- !yLimit.isValid() || yLimit.value() > aBuffer.mPlanes[0].mHeight)
- {
- // The specified picture dimensions can't be contained inside the video
- // frame, we'll stomp memory if we try to copy it. Fail.
- NS_WARNING("Overflowing picture rect");
+ if (!ValidateBufferAndPicture(aBuffer, aPicture)) {
return nullptr;
}
@@ -307,6 +319,73 @@ VideoData::CreateAndCopyData(const VideoInfo& aInfo,
/* static */
already_AddRefed<VideoData>
+VideoData::CreateAndCopyData(const VideoInfo& aInfo,
+ ImageContainer* aContainer,
+ int64_t aOffset,
+ int64_t aTime,
+ int64_t aDuration,
+ const YCbCrBuffer& aBuffer,
+ const YCbCrBuffer::Plane &aAlphaPlane,
+ bool aKeyframe,
+ int64_t aTimecode,
+ const IntRect& aPicture)
+{
+ if (!aContainer) {
+ // Create a dummy VideoData with no image. This gives us something to
+ // send to media streams if necessary.
+ RefPtr<VideoData> v(new VideoData(aOffset,
+ aTime,
+ aDuration,
+ aKeyframe,
+ aTimecode,
+ aInfo.mDisplay,
+ 0));
+ return v.forget();
+ }
+
+ if (!ValidateBufferAndPicture(aBuffer, aPicture)) {
+ return nullptr;
+ }
+
+ RefPtr<VideoData> v(new VideoData(aOffset,
+ aTime,
+ aDuration,
+ aKeyframe,
+ aTimecode,
+ aInfo.mDisplay,
+ 0));
+
+ // Convert from YUVA to BGRA format on the software side.
+ RefPtr<layers::SharedRGBImage> videoImage =
+ aContainer->CreateSharedRGBImage();
+ v->mImage = videoImage;
+
+ if (!v->mImage) {
+ return nullptr;
+ }
+ if (!videoImage->Allocate(IntSize(aBuffer.mPlanes[0].mWidth,
+ aBuffer.mPlanes[0].mHeight),
+ SurfaceFormat::B8G8R8A8)) {
+ return nullptr;
+ }
+ uint8_t* argb_buffer = videoImage->GetBuffer();
+ IntSize size = videoImage->GetSize();
+
+ // The naming convention for libyuv and associated utils is word-order.
+ // The naming convention in the gfx stack is byte-order.
+ ConvertYCbCrAToARGB(aBuffer.mPlanes[0].mData,
+ aBuffer.mPlanes[1].mData,
+ aBuffer.mPlanes[2].mData,
+ aAlphaPlane.mData,
+ aBuffer.mPlanes[0].mStride, aBuffer.mPlanes[1].mStride,
+ argb_buffer, size.width * 4,
+ size.width, size.height);
+
+ return v.forget();
+}
+
+/* static */
+already_AddRefed<VideoData>
VideoData::CreateFromImage(const VideoInfo& aInfo,
int64_t aOffset,
int64_t aTime,
@@ -340,6 +419,15 @@ MediaRawData::MediaRawData(const uint8_t* aData, size_t aSize)
{
}
+MediaRawData::MediaRawData(const uint8_t* aData, size_t aSize,
+ const uint8_t* aAlphaData, size_t aAlphaSize)
+ : MediaData(RAW_DATA, 0)
+ , mCrypto(mCryptoInternal)
+ , mBuffer(aData, aSize)
+ , mAlphaBuffer(aAlphaData, aAlphaSize)
+{
+}
+
already_AddRefed<MediaRawData>
MediaRawData::Clone() const
{
@@ -356,6 +444,9 @@ MediaRawData::Clone() const
if (!s->mBuffer.Append(mBuffer.Data(), mBuffer.Length())) {
return nullptr;
}
+ if (!s->mAlphaBuffer.Append(mAlphaBuffer.Data(), mAlphaBuffer.Length())) {
+ return nullptr;
+ }
return s.forget();
}
diff --git a/dom/media/MediaData.h b/dom/media/MediaData.h
index dc93c84872..02a7162f42 100644
--- a/dom/media/MediaData.h
+++ b/dom/media/MediaData.h
@@ -474,6 +474,17 @@ public:
int64_t aTimecode,
const IntRect& aPicture);
+ static already_AddRefed<VideoData> CreateAndCopyData(const VideoInfo& aInfo,
+ ImageContainer* aContainer,
+ int64_t aOffset,
+ int64_t aTime,
+ int64_t aDuration,
+ const YCbCrBuffer &aBuffer,
+ const YCbCrBuffer::Plane &aAlphaPlane,
+ bool aKeyframe,
+ int64_t aTimecode,
+ const IntRect& aPicture);
+
static already_AddRefed<VideoData> CreateAndCopyIntoTextureClient(const VideoInfo& aInfo,
int64_t aOffset,
int64_t aTime,
@@ -622,15 +633,22 @@ private:
class MediaRawData : public MediaData {
public:
MediaRawData();
- MediaRawData(const uint8_t* aData, size_t mSize);
+ MediaRawData(const uint8_t* aData, size_t aSize);
+ MediaRawData(const uint8_t* aData, size_t aSize,
+ const uint8_t* aAlphaData, size_t aAlphaSize);
// Pointer to data or null if not-yet allocated
const uint8_t* Data() const { return mBuffer.Data(); }
+ // Pointer to alpha data or null if not-yet allocated
+ const uint8_t* AlphaData() const { return mAlphaBuffer.Data(); }
// Size of buffer.
size_t Size() const { return mBuffer.Length(); }
+ size_t AlphaSize() const { return mAlphaBuffer.Length(); }
size_t ComputedSizeOfIncludingThis() const
{
- return sizeof(*this) + mBuffer.ComputedSizeOfExcludingThis();
+ return sizeof(*this)
+ + mBuffer.ComputedSizeOfExcludingThis()
+ + mAlphaBuffer.ComputedSizeOfExcludingThis();
}
// Access the buffer as a Span.
operator Span<const uint8_t>() { return MakeSpan(Data(), Size()); }
@@ -661,6 +679,7 @@ protected:
private:
friend class MediaRawDataWriter;
AlignedByteBuffer mBuffer;
+ AlignedByteBuffer mAlphaBuffer;
CryptoSample mCryptoInternal;
MediaRawData(const MediaRawData&); // Not implemented
};
diff --git a/dom/media/MediaInfo.h b/dom/media/MediaInfo.h
index 2ddaf5a43f..62477cabd8 100644
--- a/dom/media/MediaInfo.h
+++ b/dom/media/MediaInfo.h
@@ -215,6 +215,7 @@ public:
, mRotation(aOther.mRotation)
, mBitDepth(aOther.mBitDepth)
, mImageRect(aOther.mImageRect)
+ , mAlphaPresent(aOther.mAlphaPresent)
{
}
@@ -238,6 +239,16 @@ public:
return MakeUnique<VideoInfo>(*this);
}
+ void SetAlpha(bool aAlphaPresent)
+ {
+ mAlphaPresent = aAlphaPresent;
+ }
+
+ bool HasAlpha() const
+ {
+ return mAlphaPresent;
+ }
+
nsIntRect ImageRect() const
{
if (mImageRect.width < 0 || mImageRect.height < 0) {
@@ -312,6 +323,9 @@ private:
// mImage may be cropped; currently only used with the WebM container.
// A negative width or height indicate that no cropping is to occur.
nsIntRect mImageRect;
+
+ // Indicates whether or not frames may contain alpha information.
+ bool mAlphaPresent = false;
};
class AudioInfo : public TrackInfo {
diff --git a/dom/media/platforms/agnostic/VPXDecoder.cpp b/dom/media/platforms/agnostic/VPXDecoder.cpp
index 007ead0c7d..00cf7d85cc 100644
--- a/dom/media/platforms/agnostic/VPXDecoder.cpp
+++ b/dom/media/platforms/agnostic/VPXDecoder.cpp
@@ -35,6 +35,38 @@ static VPXDecoder::Codec MimeTypeToCodec(const nsACString& aMimeType)
return VPXDecoder::Codec::Unknown;
}
+static nsresult
+InitContext(vpx_codec_ctx_t* aCtx,
+ const VideoInfo& aInfo,
+ const VPXDecoder::Codec aCodec)
+{
+ int decode_threads = 2;
+
+ vpx_codec_iface_t* dx = nullptr;
+ if (aCodec == VPXDecoder::Codec::VP8) {
+ dx = vpx_codec_vp8_dx();
+ }
+ else if (aCodec == VPXDecoder::Codec::VP9) {
+ dx = vpx_codec_vp9_dx();
+ if (aInfo.mDisplay.width >= 2048) {
+ decode_threads = 8;
+ }
+ else if (aInfo.mDisplay.width >= 1024) {
+ decode_threads = 4;
+ }
+ }
+ decode_threads = std::min(decode_threads, PR_GetNumberOfProcessors());
+
+ vpx_codec_dec_cfg_t config;
+ config.threads = decode_threads;
+ config.w = config.h = 0; // set after decode
+
+ if (!dx || vpx_codec_dec_init(aCtx, dx, &config, 0)) {
+ return NS_ERROR_FAILURE;
+ }
+ return NS_OK;
+}
+
VPXDecoder::VPXDecoder(const CreateDecoderParams& aParams)
: mImageContainer(aParams.mImageContainer)
, mTaskQueue(aParams.mTaskQueue)
@@ -45,6 +77,7 @@ VPXDecoder::VPXDecoder(const CreateDecoderParams& aParams)
{
MOZ_COUNT_CTOR(VPXDecoder);
PodZero(&mVPX);
+ PodZero(&mVPXAlpha);
}
VPXDecoder::~VPXDecoder()
@@ -56,34 +89,24 @@ void
VPXDecoder::Shutdown()
{
vpx_codec_destroy(&mVPX);
+ vpx_codec_destroy(&mVPXAlpha);
}
RefPtr<MediaDataDecoder::InitPromise>
VPXDecoder::Init()
{
- int decode_threads = 2;
-
- vpx_codec_iface_t* dx = nullptr;
- if (mCodec == Codec::VP8) {
- dx = vpx_codec_vp8_dx();
- } else if (mCodec == Codec::VP9) {
- dx = vpx_codec_vp9_dx();
- if (mInfo.mDisplay.width >= 2048) {
- decode_threads = 8;
- } else if (mInfo.mDisplay.width >= 1024) {
- decode_threads = 4;
- }
+ if (NS_FAILED(InitContext(&mVPX, mInfo, mCodec))) {
+ return VPXDecoder::InitPromise::CreateAndReject(NS_ERROR_DOM_MEDIA_FATAL_ERR,
+ __func__);
}
- decode_threads = std::min(decode_threads, PR_GetNumberOfProcessors());
-
- vpx_codec_dec_cfg_t config;
- config.threads = decode_threads;
- config.w = config.h = 0; // set after decode
-
- if (!dx || vpx_codec_dec_init(&mVPX, dx, &config, 0)) {
- return InitPromise::CreateAndReject(NS_ERROR_DOM_MEDIA_FATAL_ERR, __func__);
+ if (mInfo.HasAlpha()) {
+ if (NS_FAILED(InitContext(&mVPXAlpha, mInfo, mCodec))) {
+ return VPXDecoder::InitPromise::CreateAndReject(NS_ERROR_DOM_MEDIA_FATAL_ERR,
+ __func__);
+ }
}
- return InitPromise::CreateAndResolve(TrackInfo::kVideoTrack, __func__);
+ return VPXDecoder::InitPromise::CreateAndResolve(TrackInfo::kVideoTrack,
+ __func__);
}
void
@@ -115,14 +138,27 @@ VPXDecoder::DoDecode(MediaRawData* aSample)
RESULT_DETAIL("VPX error: %s", vpx_codec_err_to_string(r)));
}
- vpx_codec_iter_t iter = nullptr;
- vpx_image_t *img;
+ vpx_codec_iter_t iter = nullptr;
+ vpx_image_t *img;
+ vpx_image_t *img_alpha = nullptr;
+ bool alpha_decoded = false;
while ((img = vpx_codec_get_frame(&mVPX, &iter))) {
NS_ASSERTION(img->fmt == VPX_IMG_FMT_I420 ||
img->fmt == VPX_IMG_FMT_I444,
"WebM image format not I420 or I444");
-
+ NS_ASSERTION(!alpha_decoded,
+ "Multiple frames per packet that contains alpha");
+
+ if (aSample->AlphaSize() > 0) {
+ if(!alpha_decoded){
+ MediaResult rv = DecodeAlpha(&img_alpha, aSample);
+ if (NS_FAILED(rv)) {
+ return(rv);
+ }
+ alpha_decoded = true;
+ }
+ }
// Chroma shifts are rounded down as per the decoding examples in the SDK
VideoData::YCbCrBuffer b;
b.mPlanes[0].mData = img->planes[0];
@@ -174,17 +210,38 @@ VPXDecoder::DoDecode(MediaRawData* aSample)
}();
// TODO: need a newer libvpx to support full color range
- RefPtr<VideoData> v =
- VideoData::CreateAndCopyData(mInfo,
- mImageContainer,
- aSample->mOffset,
- aSample->mTime,
- aSample->mDuration,
- b,
- aSample->mKeyframe,
- aSample->mTimecode,
- mInfo.ScaledImageRect(img->d_w,
- img->d_h));
+ RefPtr<VideoData> v;
+ if (!img_alpha) {
+ v = VideoData::CreateAndCopyData(mInfo,
+ mImageContainer,
+ aSample->mOffset,
+ aSample->mTime,
+ aSample->mDuration,
+ b,
+ aSample->mKeyframe,
+ aSample->mTimecode,
+ mInfo.ScaledImageRect(img->d_w,
+ img->d_h));
+ } else {
+ VideoData::YCbCrBuffer::Plane alpha_plane;
+ alpha_plane.mData = img_alpha->planes[0];
+ alpha_plane.mStride = img_alpha->stride[0];
+ alpha_plane.mHeight = img_alpha->d_h;
+ alpha_plane.mWidth = img_alpha->d_w;
+ alpha_plane.mOffset = alpha_plane.mSkip = 0;
+ v = VideoData::CreateAndCopyData(mInfo,
+ mImageContainer,
+ aSample->mOffset,
+ aSample->mTime,
+ aSample->mDuration,
+ b,
+ alpha_plane,
+ aSample->mKeyframe,
+ aSample->mTimecode,
+ mInfo.ScaledImageRect(img->d_w,
+ img->d_h));
+
+ }
if (!v) {
LOG("Image allocation error source %ldx%ld display %ldx%ld picture %ldx%ld",
@@ -234,6 +291,32 @@ VPXDecoder::Drain()
mTaskQueue->Dispatch(NewRunnableMethod(this, &VPXDecoder::ProcessDrain));
}
+MediaResult
+VPXDecoder::DecodeAlpha(vpx_image_t** aImgAlpha,
+ MediaRawData* aSample)
+{
+ vpx_codec_err_t r = vpx_codec_decode(&mVPXAlpha,
+ aSample->AlphaData(),
+ aSample->AlphaSize(),
+ nullptr,
+ 0);
+ if (r) {
+ LOG("VPX decode alpha error: %s", vpx_codec_err_to_string(r));
+ return MediaResult(
+ NS_ERROR_DOM_MEDIA_DECODE_ERR,
+ RESULT_DETAIL("VPX decode alpha error: %s", vpx_codec_err_to_string(r)));
+ }
+
+ vpx_codec_iter_t iter = nullptr;
+
+ *aImgAlpha = vpx_codec_get_frame(&mVPXAlpha, &iter);
+ NS_ASSERTION((*aImgAlpha)->fmt == VPX_IMG_FMT_I420 ||
+ (*aImgAlpha)->fmt == VPX_IMG_FMT_I444,
+ "WebM image format not I420 or I444");
+
+ return NS_OK;
+}
+
/* static */
bool
VPXDecoder::IsVPX(const nsACString& aMimeType, uint8_t aCodecMask)
diff --git a/dom/media/platforms/agnostic/VPXDecoder.h b/dom/media/platforms/agnostic/VPXDecoder.h
index f7e63e3a92..25d9bc1d33 100644
--- a/dom/media/platforms/agnostic/VPXDecoder.h
+++ b/dom/media/platforms/agnostic/VPXDecoder.h
@@ -57,6 +57,8 @@ private:
void ProcessDecode(MediaRawData* aSample);
MediaResult DoDecode(MediaRawData* aSample);
void ProcessDrain();
+ MediaResult DecodeAlpha(vpx_image_t** aImgAlpha,
+ MediaRawData* aSample);
const RefPtr<ImageContainer> mImageContainer;
const RefPtr<TaskQueue> mTaskQueue;
@@ -66,6 +68,9 @@ private:
// VPx decoder state
vpx_codec_ctx_t mVPX;
+ // VPx alpha decoder state
+ vpx_codec_ctx_t mVPXAlpha;
+
const VideoInfo& mInfo;
const Codec mCodec;
diff --git a/dom/media/platforms/ffmpeg/FFmpegDecoderModule.h b/dom/media/platforms/ffmpeg/FFmpegDecoderModule.h
index c27f61aad9..95b156ff96 100644
--- a/dom/media/platforms/ffmpeg/FFmpegDecoderModule.h
+++ b/dom/media/platforms/ffmpeg/FFmpegDecoderModule.h
@@ -32,6 +32,13 @@ public:
already_AddRefed<MediaDataDecoder>
CreateVideoDecoder(const CreateDecoderParams& aParams) override
{
+ // Temporary - forces use of VPXDecoder when alpha is present.
+ // Bug 1263836 will handle alpha scenario once implemented. It will shift
+ // the check for alpha to PDMFactory but not itself remove the need for a
+ // check.
+ if (aParams.VideoConfig().HasAlpha()) {
+ return nullptr;
+ }
RefPtr<MediaDataDecoder> decoder =
new FFmpegVideoDecoder<V>(mLib,
aParams.mTaskQueue,
diff --git a/dom/media/platforms/wmf/WMFDecoderModule.cpp b/dom/media/platforms/wmf/WMFDecoderModule.cpp
index 98412a80a2..46e78fb17f 100644
--- a/dom/media/platforms/wmf/WMFDecoderModule.cpp
+++ b/dom/media/platforms/wmf/WMFDecoderModule.cpp
@@ -83,6 +83,14 @@ WMFDecoderModule::Startup()
already_AddRefed<MediaDataDecoder>
WMFDecoderModule::CreateVideoDecoder(const CreateDecoderParams& aParams)
{
+ // Temporary - forces use of VPXDecoder when alpha is present.
+ // Bug 1263836 will handle alpha scenario once implemented. It will shift
+ // the check for alpha to PDMFactory but not itself remove the need for a
+ // check.
+ if (aParams.VideoConfig().HasAlpha()) {
+ return nullptr;
+ }
+
nsAutoPtr<WMFVideoMFTManager> manager(
new WMFVideoMFTManager(aParams.VideoConfig(),
aParams.mKnowsCompositor,
diff --git a/dom/media/webm/WebMDemuxer.cpp b/dom/media/webm/WebMDemuxer.cpp
index 090229365e..1009fedc69 100644
--- a/dom/media/webm/WebMDemuxer.cpp
+++ b/dom/media/webm/WebMDemuxer.cpp
@@ -378,6 +378,7 @@ WebMDemuxer::ReadMetadata()
mInfo.mVideo.mDisplay = displaySize;
mInfo.mVideo.mImage = frameSize;
mInfo.mVideo.SetImageRect(pictureRect);
+ mInfo.mVideo.SetAlpha(params.alpha_mode);
switch (params.stereo_mode) {
case NESTEGG_VIDEO_MONO:
@@ -656,6 +657,21 @@ WebMDemuxer::GetNextPacket(TrackInfo::TrackType aType, MediaRawDataQueue *aSampl
WEBM_DEBUG("nestegg_packet_data failed r=%d", r);
return NS_ERROR_DOM_MEDIA_DEMUXER_ERR;
}
+ unsigned char* alphaData;
+ size_t alphaLength = 0;
+ // Check packets for alpha information if file has declared alpha frames
+ // may be present.
+ if (mInfo.mVideo.HasAlpha()) {
+ r = nestegg_packet_additional_data(holder->Packet(),
+ 1,
+ &alphaData,
+ &alphaLength);
+ if (r == -1) {
+ WEBM_DEBUG(
+ "nestegg_packet_additional_data failed to retrieve alpha data r=%d",
+ r);
+ }
+ }
bool isKeyframe = false;
if (aType == TrackInfo::kAudioTrack) {
isKeyframe = true;
@@ -713,10 +729,19 @@ WebMDemuxer::GetNextPacket(TrackInfo::TrackType aType, MediaRawDataQueue *aSampl
WEBM_DEBUG("push sample tstamp: %ld next_tstamp: %ld length: %ld kf: %d",
tstamp, next_tstamp, length, isKeyframe);
- RefPtr<MediaRawData> sample = new MediaRawData(data, length);
- if (length && !sample->Data()) {
- // OOM.
- return NS_ERROR_OUT_OF_MEMORY;
+ RefPtr<MediaRawData> sample;
+ if (mInfo.mVideo.HasAlpha() && alphaLength != 0) {
+ sample = new MediaRawData(data, length, alphaData, alphaLength);
+ if (length && !sample->Data() || (alphaLength && !sample->AlphaData())) {
+ // OOM.
+ return NS_ERROR_OUT_OF_MEMORY;
+ }
+ } else {
+ sample = new MediaRawData(data, length);
+ if (length && !sample->Data()) {
+ // OOM.
+ return NS_ERROR_OUT_OF_MEMORY;
+ }
}
sample->mTimecode = tstamp;
sample->mTime = tstamp;
diff --git a/gfx/layers/ipc/SharedRGBImage.cpp b/gfx/layers/ipc/SharedRGBImage.cpp
index bb3bb968cd..2db2ef105a 100644
--- a/gfx/layers/ipc/SharedRGBImage.cpp
+++ b/gfx/layers/ipc/SharedRGBImage.cpp
@@ -106,7 +106,32 @@ SharedRGBImage::GetTextureClient(KnowsCompositor* aForwarder)
already_AddRefed<gfx::SourceSurface>
SharedRGBImage::GetAsSourceSurface()
{
- return nullptr;
+ NS_ASSERTION(NS_IsMainThread(), "Must be main thread");
+
+ if (mSourceSurface) {
+ RefPtr<gfx::SourceSurface> surface(mSourceSurface);
+ return surface.forget();
+ }
+
+ RefPtr<gfx::SourceSurface> surface;
+ {
+ // We are 'borrowing' the DrawTarget and retaining a permanent reference to
+ // the underlying data (via the surface). It is in this instance since we
+ // know that the TextureClient is always wrapping a BufferTextureData and
+ // therefore it won't go away underneath us.
+ BufferTextureData* decoded_buffer =
+ mTextureClient->GetInternalData()->AsBufferTextureData();
+ RefPtr<gfx::DrawTarget> drawTarget = decoded_buffer->BorrowDrawTarget();
+
+ if (!drawTarget) {
+ return nullptr;
+ }
+
+ surface = drawTarget->Snapshot();
+ }
+
+ mSourceSurface = surface;
+ return surface.forget();
}
} // namespace layers
diff --git a/gfx/layers/ipc/SharedRGBImage.h b/gfx/layers/ipc/SharedRGBImage.h
index 2c6009c19c..7122b27bc4 100644
--- a/gfx/layers/ipc/SharedRGBImage.h
+++ b/gfx/layers/ipc/SharedRGBImage.h
@@ -51,6 +51,7 @@ private:
gfx::IntSize mSize;
RefPtr<ImageClient> mCompositable;
RefPtr<TextureClient> mTextureClient;
+ nsCountedRef<nsMainThreadSourceSurfaceRef> mSourceSurface;
};
} // namespace layers
diff --git a/gfx/ycbcr/YCbCrUtils.cpp b/gfx/ycbcr/YCbCrUtils.cpp
index f5a4353e2f..0f9c2c8ebc 100644
--- a/gfx/ycbcr/YCbCrUtils.cpp
+++ b/gfx/ycbcr/YCbCrUtils.cpp
@@ -155,5 +155,26 @@ ConvertYCbCrToRGB(const layers::PlanarYCbCrData& aData,
}
}
+void
+ConvertYCbCrAToARGB(const uint8_t* aSrcY,
+ const uint8_t* aSrcU,
+ const uint8_t* aSrcV,
+ const uint8_t* aSrcA,
+ int aSrcStrideYA, int aSrcStrideUV,
+ uint8_t* aDstARGB, int aDstStrideARGB,
+ int aWidth, int aHeight) {
+
+ ConvertYCbCrAToARGB32(aSrcY,
+ aSrcU,
+ aSrcV,
+ aSrcA,
+ aDstARGB,
+ aWidth,
+ aHeight,
+ aSrcStrideYA,
+ aSrcStrideUV,
+ aDstStrideARGB);
+}
+
} // namespace gfx
} // namespace mozilla
diff --git a/gfx/ycbcr/YCbCrUtils.h b/gfx/ycbcr/YCbCrUtils.h
index 1cd2e1c4fd..dcc7b5e9aa 100644
--- a/gfx/ycbcr/YCbCrUtils.h
+++ b/gfx/ycbcr/YCbCrUtils.h
@@ -24,6 +24,17 @@ ConvertYCbCrToRGB(const layers::PlanarYCbCrData& aData,
unsigned char* aDestBuffer,
int32_t aStride);
+// Currently this function only has support for I420 type.
+void
+ConvertYCbCrAToARGB(const uint8_t* aSrcY,
+ const uint8_t* aSrcU,
+ const uint8_t* aSrcV,
+ const uint8_t* aSrcA,
+ int aSrcStrideYA, int aSrcStrideUV,
+ uint8_t* aDstARGB, int aDstStrideARGB,
+ int aWidth, int aHeight);
+
+
} // namespace gfx
} // namespace mozilla
diff --git a/gfx/ycbcr/yuv_convert.cpp b/gfx/ycbcr/yuv_convert.cpp
index d3a8c53312..89f6dcfa06 100644
--- a/gfx/ycbcr/yuv_convert.cpp
+++ b/gfx/ycbcr/yuv_convert.cpp
@@ -550,6 +550,27 @@ void ScaleYCbCrToRGB32_deprecated(const uint8_t* y_buf,
if (has_mmx)
EMMS();
}
+void ConvertYCbCrAToARGB32(const uint8_t* y_buf,
+ const uint8_t* u_buf,
+ const uint8_t* v_buf,
+ const uint8_t* a_buf,
+ uint8_t* argb_buf,
+ int pic_width,
+ int pic_height,
+ int ya_pitch,
+ int uv_pitch,
+ int argb_pitch) {
+
+ // The downstream graphics stack expects an attenuated input, hence why the
+ // attenuation parameter is set.
+ DebugOnly<int> err = libyuv::I420AlphaToARGB(y_buf, ya_pitch,
+ u_buf, uv_pitch,
+ v_buf, uv_pitch,
+ a_buf, ya_pitch,
+ argb_buf, argb_pitch,
+ pic_width, pic_height, 1);
+ MOZ_ASSERT(!err);
+}
} // namespace gfx
} // namespace mozilla
diff --git a/gfx/ycbcr/yuv_convert.h b/gfx/ycbcr/yuv_convert.h
index 108e14b679..2e85ada0a3 100644
--- a/gfx/ycbcr/yuv_convert.h
+++ b/gfx/ycbcr/yuv_convert.h
@@ -106,6 +106,18 @@ void ScaleYCbCrToRGB32_deprecated(const uint8_t* yplane,
Rotate view_rotate,
ScaleFilter filter);
+void ConvertYCbCrAToARGB32(const uint8_t* yplane,
+ const uint8_t* uplane,
+ const uint8_t* vplane,
+ const uint8_t* aplane,
+ uint8_t* argbframe,
+ int pic_width,
+ int pic_height,
+ int yastride,
+ int uvstride,
+ int argbstride);
+
+
} // namespace gfx
} // namespace mozilla
diff --git a/layout/media/symbols.def.in b/layout/media/symbols.def.in
index 2ad49cbfcf..140d8e7480 100644
--- a/layout/media/symbols.def.in
+++ b/layout/media/symbols.def.in
@@ -9,6 +9,7 @@ nestegg_duration
nestegg_free_packet
nestegg_init
nestegg_offset_seek
+nestegg_packet_additional_data
nestegg_packet_count
nestegg_packet_discard_padding
nestegg_packet_data
diff --git a/layout/reftests/webm-video/reftest.list b/layout/reftests/webm-video/reftest.list
index 91532af2bb..a178f3e886 100644
--- a/layout/reftests/webm-video/reftest.list
+++ b/layout/reftests/webm-video/reftest.list
@@ -32,6 +32,7 @@ random == poster-12.html poster-ref-blue140x100.html
== poster-13.html poster-ref-blue400x300.html
== poster-15.html poster-ref-green70x30.html
random-if(winWidget) random-if(cocoaWidget) == bug686957.html bug686957-ref.html # bug 922951 for OS X
+== webm-alpha.html webm-alpha-ref.html
# Tests for <video src> with 'object-fit' & 'object-position':
# These tests should be very similar to tests in our w3c-css/submitted/images3
diff --git a/layout/reftests/webm-video/webm-alpha-ref.html b/layout/reftests/webm-video/webm-alpha-ref.html
new file mode 100644
index 0000000000..6675a23a13
--- /dev/null
+++ b/layout/reftests/webm-video/webm-alpha-ref.html
@@ -0,0 +1,5 @@
+<!DOCTYPE HTML>
+<html>
+<body style="background:black;">
+</body>
+</html>
diff --git a/layout/reftests/webm-video/webm-alpha.html b/layout/reftests/webm-video/webm-alpha.html
new file mode 100644
index 0000000000..8c613b473d
--- /dev/null
+++ b/layout/reftests/webm-video/webm-alpha.html
@@ -0,0 +1,10 @@
+<!DOCTYPE HTML>
+<html>
+<body style="background:black;">
+<!--
+ Test that if a WebM video has an alpha channel, it is displayed correctly.
+-->
+<video src="webm-alpha.webm">
+</video>
+</body>
+</html>
diff --git a/layout/reftests/webm-video/webm-alpha.webm b/layout/reftests/webm-video/webm-alpha.webm
new file mode 100644
index 0000000000..b6fe4d7b68
--- /dev/null
+++ b/layout/reftests/webm-video/webm-alpha.webm
Binary files differ