summarylogtreecommitdiffstats
diff options
context:
space:
mode:
authorMaxim Baz2019-10-12 14:55:18 +0200
committerMaxim Baz2019-10-14 20:56:47 +0200
commit69a605164f02e93654915aaa3387885789a4d43e (patch)
treecc27cfcb42bcf2ee53c4f5f409d2058cfe149412
parent98a8fb3cf37c91cc0033fc37910f0c9551986665 (diff)
downloadaur-69a605164f02e93654915aaa3387885789a4d43e.tar.gz
chromium-vaapi: switch to a better patch
-rw-r--r--.SRCINFO4
-rw-r--r--PKGBUILD4
-rw-r--r--vaapi-fix.patch176
3 files changed, 42 insertions, 142 deletions
diff --git a/.SRCINFO b/.SRCINFO
index ec716e5c34d9..d790657bdcf1 100644
--- a/.SRCINFO
+++ b/.SRCINFO
@@ -1,7 +1,7 @@
pkgbase = chromium-vaapi
pkgdesc = Chromium with VA-API support to enable hardware acceleration
pkgver = 77.0.3865.120
- pkgrel = 1
+ pkgrel = 2
url = https://www.chromium.org/Home
install = chromium.install
arch = x86_64
@@ -72,7 +72,7 @@ pkgbase = chromium-vaapi
sha256sums = d792f9b09b1dcfd64e68f47a611c540dd1383dd9abd78ca1e06b2a7e2ff06af8
sha256sums = 04917e3cd4307d8e31bfb0027a5dce6d086edb10ff8a716024fbb8bb0c7dccf1
sha256sums = babda4f5c1179825797496898d77334ac067149cac03d797ab27ac69671a7feb
- sha256sums = 333b1e0997ad8831906f66550efc73f51b8650ec3436a247d920b5d12e2169de
+ sha256sums = 7496762a1953b15a48d3e5503fb76d9835940afd850a45b7de976de9f51479f9
sha256sums = 33a5bcd1df2cc7aa7467fa882790ef143a4497d2b704c9e1ea86c8ede90c2d90
sha256sums = ab986e4b723dfcedab1bc8dcada07526facae28a8a7ff3345f658532c1d99987
sha256sums = 840f555020751ec284dca35b9317a9dd7dc69fcb910ea1cae2dd7cc9b237dfb7
diff --git a/PKGBUILD b/PKGBUILD
index eb2186da0167..049f5468dabb 100644
--- a/PKGBUILD
+++ b/PKGBUILD
@@ -11,7 +11,7 @@
pkgname=chromium-vaapi
pkgver=77.0.3865.120
-pkgrel=1
+pkgrel=2
_launcher_ver=6
pkgdesc="Chromium with VA-API support to enable hardware acceleration"
arch=('x86_64')
@@ -45,7 +45,7 @@ source=(https://commondatastorage.googleapis.com/chromium-browser-official/chrom
sha256sums=('d792f9b09b1dcfd64e68f47a611c540dd1383dd9abd78ca1e06b2a7e2ff06af8'
'04917e3cd4307d8e31bfb0027a5dce6d086edb10ff8a716024fbb8bb0c7dccf1'
'babda4f5c1179825797496898d77334ac067149cac03d797ab27ac69671a7feb'
- '333b1e0997ad8831906f66550efc73f51b8650ec3436a247d920b5d12e2169de'
+ '7496762a1953b15a48d3e5503fb76d9835940afd850a45b7de976de9f51479f9'
'33a5bcd1df2cc7aa7467fa882790ef143a4497d2b704c9e1ea86c8ede90c2d90'
'ab986e4b723dfcedab1bc8dcada07526facae28a8a7ff3345f658532c1d99987'
'840f555020751ec284dca35b9317a9dd7dc69fcb910ea1cae2dd7cc9b237dfb7'
diff --git a/vaapi-fix.patch b/vaapi-fix.patch
index b298a78446a1..db9d6082756d 100644
--- a/vaapi-fix.patch
+++ b/vaapi-fix.patch
@@ -1,154 +1,54 @@
-From fe61d25580bd22d68b20323e7cd684dcba0cd3f4 Mon Sep 17 00:00:00 2001
-From: Akarshan Biswas <akarshanbiswas@fedoraproject.org>
-Date: Tue, 9 Jul 2019 18:39:54 +0530
-Subject: [PATCH] Disable VAProfileNone on Linux and fix vaapi drm
-
----
- .../gpu_video_decode_accelerator_factory.cc | 2 ++
- .../vaapi/vaapi_video_decode_accelerator.cc | 21 ++++++++++++++++
- media/gpu/vaapi/vaapi_wrapper.cc | 24 +++++++++++++++----
- 3 files changed, 42 insertions(+), 5 deletions(-)
-
-diff --git a/media/gpu/gpu_video_decode_accelerator_factory.cc b/media/gpu/gpu_video_decode_accelerator_factory.cc
-index dc9ff3fb3..53189759c 100644
---- a/media/gpu/gpu_video_decode_accelerator_factory.cc
-+++ b/media/gpu/gpu_video_decode_accelerator_factory.cc
-@@ -183,6 +183,8 @@ GpuVideoDecodeAcceleratorFactory::CreateVDA(
- vda = (this->*create_vda_function)(workarounds, gpu_preferences, media_log);
- if (vda && vda->Initialize(config, client))
- return vda;
-+ else
-+ LOG(ERROR) << "Initialization of one or more VDAs failed.";
- }
-
- return nullptr;
-diff --git a/media/gpu/vaapi/vaapi_video_decode_accelerator.cc b/media/gpu/vaapi/vaapi_video_decode_accelerator.cc
-index 3c1316dca..a5fa64b5a 100644
--- a/media/gpu/vaapi/vaapi_video_decode_accelerator.cc
+++ b/media/gpu/vaapi/vaapi_video_decode_accelerator.cc
-@@ -64,6 +64,9 @@ void ReportToUMA(VAVDADecoderFailure failure) {
- VAVDA_DECODER_FAILURES_MAX + 1);
- }
-
-+// Move this thing to chromeos only as build can fail if treat warnings as errors been set to true
-+#if defined(OS_ANDROID) || defined(OS_CHROMEOS)
-+
- // Returns true if the CPU is an Intel Gemini Lake or later (including Kaby
- // Lake) Cpu platform id's are referenced from the following file in kernel
- // source arch/x86/include/asm/intel-family.h
-@@ -77,6 +80,8 @@ bool IsGeminiLakeOrLater() {
- return is_geminilake_or_later;
- }
-
-+#endif
-+
- } // namespace
-
- #define RETURN_AND_NOTIFY_ON_FAILURE(result, log, error_code, ret) \
-@@ -627,6 +632,13 @@ void VaapiVideoDecodeAccelerator::AssignPictureBuffers(
- const unsigned int va_format = GetVaFormatForVideoCodecProfile(profile_);
- std::vector<VASurfaceID> va_surface_ids;
-
-+ // Nvidia doesn't support VAProfileNone, so don't try to create a temporary
-+ // copy buffer there. It's not needed anyways for hardware video decoding
-+ // to work.
-+
-+ #if defined(OS_ANDROID) || defined(OS_CHROMEOS)
-+
-+
- // If we aren't in BufferAllocationMode::kNone, we have to allocate a
+@@ -635,6 +635,7 @@
// |vpp_vaapi_wrapper_| for VaapiPicture to DownloadFromSurface() the VA's
// internal decoded frame.
-@@ -640,6 +652,7 @@ void VaapiVideoDecodeAccelerator::AssignPictureBuffers(
- NotifyError(PLATFORM_FAILURE);
- }
- }
-+ #endif
-
- for (size_t i = 0; i < buffers.size(); ++i) {
- DCHECK(requested_pic_size_ == buffers[i].size());
-@@ -648,9 +661,13 @@ void VaapiVideoDecodeAccelerator::AssignPictureBuffers(
+ if (buffer_allocation_mode_ != BufferAllocationMode::kNone &&
++ buffer_allocation_mode_ != BufferAllocationMode::kWrapVdpau &&
+ !vpp_vaapi_wrapper_) {
+ vpp_vaapi_wrapper_ = VaapiWrapper::Create(
+ VaapiWrapper::kVideoProcess, VAProfileNone,
+@@ -650,7 +651,8 @@
// only used as a copy destination. Therefore, the VaapiWrapper used and
// owned by |picture| is |vpp_vaapi_wrapper_|.
std::unique_ptr<VaapiPicture> picture = vaapi_picture_factory_->Create(
-+ #if defined(OS_LINUX) && !defined(OS_ANDROID) && !defined(OS_CHROMEOS)
-+ vaapi_wrapper_,
-+ #else
- (buffer_allocation_mode_ == BufferAllocationMode::kNone)
+- (buffer_allocation_mode_ == BufferAllocationMode::kNone)
++ ((buffer_allocation_mode_ == BufferAllocationMode::kNone) ||
++ (buffer_allocation_mode_ == BufferAllocationMode::kWrapVdpau))
? vaapi_wrapper_
: vpp_vaapi_wrapper_,
-+ #endif
make_context_current_cb_, bind_image_cb_, buffers[i]);
- RETURN_AND_NOTIFY_ON_FAILURE(picture, "Failed creating a VaapiPicture",
- PLATFORM_FAILURE, );
-@@ -1063,6 +1080,9 @@ VaapiVideoDecodeAccelerator::GetSupportedProfiles() {
-
+@@ -1077,6 +1079,14 @@
+
VaapiVideoDecodeAccelerator::BufferAllocationMode
VaapiVideoDecodeAccelerator::DecideBufferAllocationMode() {
-+ #if defined(OS_LINUX) && !defined(OS_ANDROID) && !defined(OS_CHROMEOS)
-+ return BufferAllocationMode::kNormal;
-+ #else
++ // NVIDIA blobs use VDPAU
++ if (base::StartsWith(VaapiWrapper::GetVendorStringForTesting(),
++ "Splitted-Desktop Systems VDPAU",
++ base::CompareCase::SENSITIVE)) {
++ LOG(INFO) << "VA-API driver on VDPAU backend";
++ return BufferAllocationMode::kWrapVdpau;
++ }
++
// TODO(crbug.com/912295): Enable a better BufferAllocationMode for IMPORT
// |output_mode_| as well.
if (output_mode_ == VideoDecodeAccelerator::Config::OutputMode::IMPORT)
-@@ -1095,6 +1115,7 @@ VaapiVideoDecodeAccelerator::DecideBufferAllocationMode() {
- return BufferAllocationMode::kReduced;
-
- return BufferAllocationMode::kSuperReduced;
-+ #endif
- }
-
- bool VaapiVideoDecodeAccelerator::IsBufferAllocationModeReducedOrSuperReduced()
-diff --git a/media/gpu/vaapi/vaapi_wrapper.cc b/media/gpu/vaapi/vaapi_wrapper.cc
-index f6008d288..d829582a6 100644
---- a/media/gpu/vaapi/vaapi_wrapper.cc
-+++ b/media/gpu/vaapi/vaapi_wrapper.cc
-@@ -266,6 +266,10 @@ void VADisplayState::PreSandboxInitialization() {
- base::File::FLAG_OPEN | base::File::FLAG_READ | base::File::FLAG_WRITE);
- if (drm_file.IsValid())
- VADisplayState::Get()->SetDrmFd(drm_file.GetPlatformFile());
-+ const char kNvidiaPath[] = "/dev/dri/nvidiactl";
-+ base::File nvidia_file = base::File(
-+ base::FilePath::FromUTF8Unsafe(kNvidiaPath),
-+ base::File::FLAG_OPEN | base::File::FLAG_READ | base::File::FLAG_WRITE);
- }
-
- VADisplayState::VADisplayState()
-@@ -303,10 +307,11 @@ bool VADisplayState::InitializeOnce() {
- case gl::kGLImplementationDesktopGL:
- #if defined(USE_X11)
- va_display_ = vaGetDisplay(gfx::GetXDisplay());
--#else
-- LOG(WARNING) << "VAAPI video acceleration not available without "
-- "DesktopGL (GLX).";
-+ if (vaDisplayIsValid(va_display_))
-+ break;
-+
- #endif // USE_X11
-+ va_display_ = vaGetDisplayDRM(drm_fd_.get());
- break;
- // Cannot infer platform from GL, try all available displays
- case gl::kGLImplementationNone:
-@@ -339,8 +344,17 @@ bool VADisplayState::InitializeOnce() {
- int major_version, minor_version;
- VAStatus va_res = vaInitialize(va_display_, &major_version, &minor_version);
- if (va_res != VA_STATUS_SUCCESS) {
-- LOG(ERROR) << "vaInitialize failed: " << vaErrorStr(va_res);
-- return false;
-+ LOG(ERROR) << "vaInitialize failed (ignore if using Wayland desktop environment, refer:(Github)akarshanbiswas/chromium-vaapi/issues/7): " << vaErrorStr(va_res);
-+ va_display_ = vaGetDisplayDRM(drm_fd_.get());
-+ if (!vaDisplayIsValid(va_display_)) {
-+ LOG(ERROR) << "Could not get a valid DRM VA display";
-+ return false;
-+ }
-+ va_res = vaInitialize(va_display_, &major_version, &minor_version);
-+ if (va_res != VA_STATUS_SUCCESS) {
-+ LOG(ERROR) << "vaInitialize failed using DRM: " << vaErrorStr(va_res);
-+ return false;
-+ }
- }
-
- va_initialized_ = true;
---
-2.21.0
+@@ -1089,7 +1099,7 @@
+ // depends on the bitstream and sometimes it's not enough to cover the amount
+ // of frames needed by the client pipeline (see b/133733739).
+ // TODO(crbug.com/911754): Enable for VP9 Profile 2.
+- if (IsGeminiLakeOrLater() &&
++ if (false && IsGeminiLakeOrLater() &&
+ (profile_ == VP9PROFILE_PROFILE0 || profile_ == VP8PROFILE_ANY)) {
+ // Add one to the reference frames for the one being currently egressed, and
+ // an extra allocation for both |client_| and |decoder_|, see
+--- a/media/gpu/vaapi/vaapi_video_decode_accelerator.h
++++ b/media/gpu/vaapi/vaapi_video_decode_accelerator.h
+@@ -204,6 +204,7 @@
+ // Using |client_|s provided PictureBuffers and as many internally
+ // allocated.
+ kNormal,
++ kWrapVdpau,
+ };
+ // Decides the concrete buffer allocation mode, depending on the hardware