From 8d70cc2dde81aad0f484a2f0d0c5b90e6e1b97cd Mon Sep 17 00:00:00 2001
From: Joe Rogers <1337joe@gmail.com>
Date: Thu, 7 Oct 2021 22:37:59 +0200
Subject: Add support for non-jpg image extractions
---
MediaBrowser.Controller/MediaEncoding/IMediaEncoder.cs | 3 ++-
1 file changed, 2 insertions(+), 1 deletion(-)
(limited to 'MediaBrowser.Controller/MediaEncoding/IMediaEncoder.cs')
diff --git a/MediaBrowser.Controller/MediaEncoding/IMediaEncoder.cs b/MediaBrowser.Controller/MediaEncoding/IMediaEncoder.cs
index c5522bc3c..638588560 100644
--- a/MediaBrowser.Controller/MediaEncoding/IMediaEncoder.cs
+++ b/MediaBrowser.Controller/MediaEncoding/IMediaEncoder.cs
@@ -95,9 +95,10 @@ namespace MediaBrowser.Controller.MediaEncoding
/// Media source information.
/// Media stream information.
/// Index of the stream to extract from.
+ /// The extension of the file to write.
/// CancellationToken to use for operation.
/// Location of video image.
- Task ExtractVideoImage(string inputFile, string container, MediaSourceInfo mediaSource, MediaStream imageStream, int? imageStreamIndex, CancellationToken cancellationToken);
+ Task ExtractVideoImage(string inputFile, string container, MediaSourceInfo mediaSource, MediaStream imageStream, int? imageStreamIndex, string outputExtension, CancellationToken cancellationToken);
///
/// Extracts the video images on interval.
--
cgit v1.2.3
From e3eee10d05e9ecc7e3fac1f8fdad92329d38a4db Mon Sep 17 00:00:00 2001
From: Joe Rogers <1337joe@gmail.com>
Date: Mon, 11 Oct 2021 12:34:18 +0200
Subject: Add image provider tests and clean up
---
.../MediaEncoding/IMediaEncoder.cs | 2 +-
MediaBrowser.MediaEncoding/Encoder/MediaEncoder.cs | 6 +-
.../MediaInfo/EmbeddedImageProvider.cs | 41 ++--
.../MediaInfo/VideoImageProvider.cs | 9 +-
.../MediaInfo/EmbeddedImageProviderTests.cs | 211 +++++++++++++++++++++
.../MediaInfo/VideoImageProviderTests.cs | 168 ++++++++++++++++
6 files changed, 408 insertions(+), 29 deletions(-)
create mode 100644 tests/Jellyfin.Providers.Tests/MediaInfo/EmbeddedImageProviderTests.cs
create mode 100644 tests/Jellyfin.Providers.Tests/MediaInfo/VideoImageProviderTests.cs
(limited to 'MediaBrowser.Controller/MediaEncoding/IMediaEncoder.cs')
diff --git a/MediaBrowser.Controller/MediaEncoding/IMediaEncoder.cs b/MediaBrowser.Controller/MediaEncoding/IMediaEncoder.cs
index 638588560..e6511ca8d 100644
--- a/MediaBrowser.Controller/MediaEncoding/IMediaEncoder.cs
+++ b/MediaBrowser.Controller/MediaEncoding/IMediaEncoder.cs
@@ -95,7 +95,7 @@ namespace MediaBrowser.Controller.MediaEncoding
/// Media source information.
/// Media stream information.
/// Index of the stream to extract from.
- /// The extension of the file to write.
+ /// The extension of the file to write, including the '.'.
/// CancellationToken to use for operation.
/// Location of video image.
Task ExtractVideoImage(string inputFile, string container, MediaSourceInfo mediaSource, MediaStream imageStream, int? imageStreamIndex, string outputExtension, CancellationToken cancellationToken);
diff --git a/MediaBrowser.MediaEncoding/Encoder/MediaEncoder.cs b/MediaBrowser.MediaEncoding/Encoder/MediaEncoder.cs
index 30bc7125d..dac2c6a26 100644
--- a/MediaBrowser.MediaEncoding/Encoder/MediaEncoder.cs
+++ b/MediaBrowser.MediaEncoding/Encoder/MediaEncoder.cs
@@ -468,12 +468,12 @@ namespace MediaBrowser.MediaEncoding.Encoder
Protocol = MediaProtocol.File
};
- return ExtractImage(path, null, null, imageStreamIndex, mediaSource, true, null, null, "jpg", cancellationToken);
+ return ExtractImage(path, null, null, imageStreamIndex, mediaSource, true, null, null, ".jpg", cancellationToken);
}
public Task ExtractVideoImage(string inputFile, string container, MediaSourceInfo mediaSource, MediaStream videoStream, Video3DFormat? threedFormat, TimeSpan? offset, CancellationToken cancellationToken)
{
- return ExtractImage(inputFile, container, videoStream, null, mediaSource, false, threedFormat, offset, "jpg", cancellationToken);
+ return ExtractImage(inputFile, container, videoStream, null, mediaSource, false, threedFormat, offset, ".jpg", cancellationToken);
}
public Task ExtractVideoImage(string inputFile, string container, MediaSourceInfo mediaSource, MediaStream imageStream, int? imageStreamIndex, string outputExtension, CancellationToken cancellationToken)
@@ -548,7 +548,7 @@ namespace MediaBrowser.MediaEncoding.Encoder
throw new ArgumentNullException(nameof(inputPath));
}
- var tempExtractPath = Path.Combine(_configurationManager.ApplicationPaths.TempDirectory, Guid.NewGuid() + "." + outputExtension);
+ var tempExtractPath = Path.Combine(_configurationManager.ApplicationPaths.TempDirectory, Guid.NewGuid() + outputExtension);
Directory.CreateDirectory(Path.GetDirectoryName(tempExtractPath));
// apply some filters to thumbnail extracted below (below) crop any black lines that we made and get the correct ar.
diff --git a/MediaBrowser.Providers/MediaInfo/EmbeddedImageProvider.cs b/MediaBrowser.Providers/MediaInfo/EmbeddedImageProvider.cs
index ad95cdb06..df87f2d49 100644
--- a/MediaBrowser.Providers/MediaInfo/EmbeddedImageProvider.cs
+++ b/MediaBrowser.Providers/MediaInfo/EmbeddedImageProvider.cs
@@ -1,9 +1,7 @@
-#nullable enable
#pragma warning disable CS1591
using System;
using System.Collections.Generic;
-using System.Collections.Immutable;
using System.IO;
using System.Linq;
using System.Threading;
@@ -17,7 +15,6 @@ using MediaBrowser.Model.Dto;
using MediaBrowser.Model.Entities;
using MediaBrowser.Model.MediaInfo;
using MediaBrowser.Model.Net;
-using Microsoft.Extensions.Logging;
namespace MediaBrowser.Providers.MediaInfo
{
@@ -48,12 +45,10 @@ namespace MediaBrowser.Providers.MediaInfo
};
private readonly IMediaEncoder _mediaEncoder;
- private readonly ILogger _logger;
- public EmbeddedImageProvider(IMediaEncoder mediaEncoder, ILogger logger)
+ public EmbeddedImageProvider(IMediaEncoder mediaEncoder)
{
_mediaEncoder = mediaEncoder;
- _logger = logger;
}
///
@@ -84,7 +79,7 @@ namespace MediaBrowser.Providers.MediaInfo
};
}
- return ImmutableList.Empty;
+ return new List();
}
///
@@ -98,13 +93,6 @@ namespace MediaBrowser.Providers.MediaInfo
return Task.FromResult(new DynamicImageResponse { HasImage = false });
}
- // Can't extract if we didn't find any video streams in the file
- if (!video.DefaultVideoStreamIndex.HasValue)
- {
- _logger.LogInformation("Skipping image extraction due to missing DefaultVideoStreamIndex for {Path}.", video.Path ?? string.Empty);
- return Task.FromResult(new DynamicImageResponse { HasImage = false });
- }
-
return GetEmbeddedImage(video, type, cancellationToken);
}
@@ -128,24 +116,29 @@ namespace MediaBrowser.Providers.MediaInfo
// Try attachments first
var attachmentSources = item.GetMediaSources(false).SelectMany(source => source.MediaAttachments).ToList();
var attachmentStream = attachmentSources
- .Where(stream => !string.IsNullOrEmpty(stream.FileName))
- .First(stream => imageFileNames.Any(name => stream.FileName.Contains(name, StringComparison.OrdinalIgnoreCase)));
+ .Where(attachment => !string.IsNullOrEmpty(attachment.FileName))
+ .FirstOrDefault(attachment => imageFileNames.Any(name => attachment.FileName.Contains(name, StringComparison.OrdinalIgnoreCase)));
if (attachmentStream != null)
{
- var extension = (string.IsNullOrEmpty(attachmentStream.MimeType) ?
+ var extension = string.IsNullOrEmpty(attachmentStream.MimeType) ?
Path.GetExtension(attachmentStream.FileName) :
- MimeTypes.ToExtension(attachmentStream.MimeType)) ?? "jpg";
+ MimeTypes.ToExtension(attachmentStream.MimeType);
+
+ if (string.IsNullOrEmpty(extension))
+ {
+ extension = ".jpg";
+ }
string extractedAttachmentPath = await _mediaEncoder.ExtractVideoImage(item.Path, item.Container, mediaSource, null, attachmentStream.Index, extension, cancellationToken).ConfigureAwait(false);
ImageFormat format = extension switch
{
- "bmp" => ImageFormat.Bmp,
- "gif" => ImageFormat.Gif,
- "jpg" => ImageFormat.Jpg,
- "png" => ImageFormat.Png,
- "webp" => ImageFormat.Webp,
+ ".bmp" => ImageFormat.Bmp,
+ ".gif" => ImageFormat.Gif,
+ ".jpg" => ImageFormat.Jpg,
+ ".png" => ImageFormat.Png,
+ ".webp" => ImageFormat.Webp,
_ => ImageFormat.Jpg
};
@@ -170,7 +163,7 @@ namespace MediaBrowser.Providers.MediaInfo
// Extract first stream containing an element of imageFileNames
var imageStream = imageStreams
.Where(stream => !string.IsNullOrEmpty(stream.Comment))
- .First(stream => imageFileNames.Any(name => stream.Comment.Contains(name, StringComparison.OrdinalIgnoreCase)));
+ .FirstOrDefault(stream => imageFileNames.Any(name => stream.Comment.Contains(name, StringComparison.OrdinalIgnoreCase)));
// Primary type only: default to first image if none found by label
if (imageStream == null)
diff --git a/MediaBrowser.Providers/MediaInfo/VideoImageProvider.cs b/MediaBrowser.Providers/MediaInfo/VideoImageProvider.cs
index 8f2009950..60739f156 100644
--- a/MediaBrowser.Providers/MediaInfo/VideoImageProvider.cs
+++ b/MediaBrowser.Providers/MediaInfo/VideoImageProvider.cs
@@ -81,7 +81,14 @@ namespace MediaBrowser.Providers.MediaInfo
? TimeSpan.FromTicks(item.RunTimeTicks.Value / 10)
: TimeSpan.FromSeconds(10);
- var videoStream = item.GetMediaStreams().FirstOrDefault(i => i.Type == MediaStreamType.Video);
+ var videoStream = item.GetDefaultVideoStream() ?? item.GetMediaStreams().FirstOrDefault(i => i.Type == MediaStreamType.Video);
+
+ if (videoStream == null)
+ {
+ _logger.LogInformation("Skipping image extraction: no video stream found for {Path}.", item.Path ?? string.Empty);
+ return new DynamicImageResponse { HasImage = false };
+ }
+
string extractedImagePath = await _mediaEncoder.ExtractVideoImage(item.Path, item.Container, mediaSource, videoStream, item.Video3DFormat, imageOffset, cancellationToken).ConfigureAwait(false);
return new DynamicImageResponse
diff --git a/tests/Jellyfin.Providers.Tests/MediaInfo/EmbeddedImageProviderTests.cs b/tests/Jellyfin.Providers.Tests/MediaInfo/EmbeddedImageProviderTests.cs
new file mode 100644
index 000000000..fcea1532a
--- /dev/null
+++ b/tests/Jellyfin.Providers.Tests/MediaInfo/EmbeddedImageProviderTests.cs
@@ -0,0 +1,211 @@
+using System.Collections.Generic;
+using System.Linq;
+using System.Threading;
+using System.Threading.Tasks;
+using MediaBrowser.Controller.Entities;
+using MediaBrowser.Controller.Entities.Movies;
+using MediaBrowser.Controller.Entities.TV;
+using MediaBrowser.Controller.MediaEncoding;
+using MediaBrowser.Model.Drawing;
+using MediaBrowser.Model.Dto;
+using MediaBrowser.Model.Entities;
+using MediaBrowser.Providers.MediaInfo;
+using Moq;
+using Xunit;
+
+namespace Jellyfin.Providers.Tests.MediaInfo
+{
+ public class EmbeddedImageProviderTests
+ {
+ public static TheoryData GetSupportedImages_Empty_TestData =>
+ new ()
+ {
+ new AudioBook(),
+ new BoxSet(),
+ new Series(),
+ new Season(),
+ };
+
+ public static TheoryData> GetSupportedImages_Populated_TestData =>
+ new TheoryData>
+ {
+ { new Episode(), new List { ImageType.Primary } },
+ { new Movie(), new List { ImageType.Logo, ImageType.Backdrop, ImageType.Primary } },
+ };
+
+ private EmbeddedImageProvider GetEmbeddedImageProvider(IMediaEncoder? mediaEncoder)
+ {
+ return new EmbeddedImageProvider(mediaEncoder);
+ }
+
+ [Theory]
+ [MemberData(nameof(GetSupportedImages_Empty_TestData))]
+ public void GetSupportedImages_Empty(BaseItem item)
+ {
+ var embeddedImageProvider = GetEmbeddedImageProvider(null);
+ Assert.False(embeddedImageProvider.GetSupportedImages(item).Any());
+ }
+
+ [Theory]
+ [MemberData(nameof(GetSupportedImages_Populated_TestData))]
+ public void GetSupportedImages_Populated(BaseItem item, IEnumerable expected)
+ {
+ var embeddedImageProvider = GetEmbeddedImageProvider(null);
+ var actual = embeddedImageProvider.GetSupportedImages(item);
+ Assert.Equal(expected.OrderBy(i => i.ToString()), actual.OrderBy(i => i.ToString()));
+ }
+
+ [Fact]
+ public async void GetImage_Empty_NoStreams()
+ {
+ var embeddedImageProvider = GetEmbeddedImageProvider(null);
+
+ var input = new Mock();
+ input.Setup(movie => movie.GetMediaSources(It.IsAny()))
+ .Returns(new List());
+ input.Setup(movie => movie.GetMediaStreams())
+ .Returns(new List());
+
+ var actual = await embeddedImageProvider.GetImage(input.Object, ImageType.Primary, CancellationToken.None);
+ Assert.NotNull(actual);
+ Assert.False(actual.HasImage);
+ }
+
+ [Fact]
+ public async void GetImage_Empty_NoLabeledAttachments()
+ {
+ var embeddedImageProvider = GetEmbeddedImageProvider(null);
+
+ var input = new Mock();
+ // add an attachment without a filename - has a list to look through but finds nothing
+ input.Setup(movie => movie.GetMediaSources(It.IsAny()))
+ .Returns(new List { new () { MediaAttachments = new List { new () } } });
+ input.Setup(movie => movie.GetMediaStreams())
+ .Returns(new List());
+
+ var actual = await embeddedImageProvider.GetImage(input.Object, ImageType.Primary, CancellationToken.None);
+ Assert.NotNull(actual);
+ Assert.False(actual.HasImage);
+ }
+
+ [Fact]
+ public async void GetImage_Empty_NoEmbeddedLabeledBackdrop()
+ {
+ var embeddedImageProvider = GetEmbeddedImageProvider(null);
+
+ var input = new Mock();
+ input.Setup(movie => movie.GetMediaSources(It.IsAny()))
+ .Returns(new List());
+ input.Setup(movie => movie.GetMediaStreams())
+ .Returns(new List { new () { Type = MediaStreamType.EmbeddedImage } });
+
+ var actual = await embeddedImageProvider.GetImage(input.Object, ImageType.Backdrop, CancellationToken.None);
+ Assert.NotNull(actual);
+ Assert.False(actual.HasImage);
+ }
+
+ [Fact]
+ public async void GetImage_Attached()
+ {
+ // first tests file extension detection, second uses mimetype, third defaults to jpg
+ MediaAttachment sampleAttachment1 = new () { FileName = "clearlogo.png", Index = 1 };
+ MediaAttachment sampleAttachment2 = new () { FileName = "backdrop", MimeType = "image/bmp", Index = 2 };
+ MediaAttachment sampleAttachment3 = new () { FileName = "poster", Index = 3 };
+ string targetPath1 = "path1.png";
+ string targetPath2 = "path2.bmp";
+ string targetPath3 = "path2.jpg";
+
+ var mediaEncoder = new Mock(MockBehavior.Strict);
+ mediaEncoder.Setup(encoder => encoder.ExtractVideoImage(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny(), 1, ".png", CancellationToken.None))
+ .Returns(Task.FromResult(targetPath1));
+ mediaEncoder.Setup(encoder => encoder.ExtractVideoImage(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny(), 2, ".bmp", CancellationToken.None))
+ .Returns(Task.FromResult(targetPath2));
+ mediaEncoder.Setup(encoder => encoder.ExtractVideoImage(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny(), 3, ".jpg", CancellationToken.None))
+ .Returns(Task.FromResult(targetPath3));
+ var embeddedImageProvider = GetEmbeddedImageProvider(mediaEncoder.Object);
+
+ var input = new Mock();
+ input.Setup(movie => movie.GetMediaSources(It.IsAny()))
+ .Returns(new List { new () { MediaAttachments = new List { sampleAttachment1, sampleAttachment2, sampleAttachment3 } } });
+ input.Setup(movie => movie.GetMediaStreams())
+ .Returns(new List());
+
+ var actualLogo = await embeddedImageProvider.GetImage(input.Object, ImageType.Logo, CancellationToken.None);
+ Assert.NotNull(actualLogo);
+ Assert.True(actualLogo.HasImage);
+ Assert.Equal(targetPath1, actualLogo.Path);
+ Assert.Equal(ImageFormat.Png, actualLogo.Format);
+
+ var actualBackdrop = await embeddedImageProvider.GetImage(input.Object, ImageType.Backdrop, CancellationToken.None);
+ Assert.NotNull(actualBackdrop);
+ Assert.True(actualBackdrop.HasImage);
+ Assert.Equal(targetPath2, actualBackdrop.Path);
+ Assert.Equal(ImageFormat.Bmp, actualBackdrop.Format);
+
+ var actualPrimary = await embeddedImageProvider.GetImage(input.Object, ImageType.Primary, CancellationToken.None);
+ Assert.NotNull(actualPrimary);
+ Assert.True(actualPrimary.HasImage);
+ Assert.Equal(targetPath3, actualPrimary.Path);
+ Assert.Equal(ImageFormat.Jpg, actualPrimary.Format);
+ }
+
+ [Fact]
+ public async void GetImage_EmbeddedDefault()
+ {
+ MediaStream sampleStream = new () { Type = MediaStreamType.EmbeddedImage, Index = 1 };
+ string targetPath = "path";
+
+ var mediaEncoder = new Mock(MockBehavior.Strict);
+ mediaEncoder.Setup(encoder => encoder.ExtractVideoImage(It.IsAny(), It.IsAny(), It.IsAny(), sampleStream, 1, "jpg", CancellationToken.None))
+ .Returns(Task.FromResult(targetPath));
+ var embeddedImageProvider = GetEmbeddedImageProvider(mediaEncoder.Object);
+
+ var input = new Mock();
+ input.Setup(movie => movie.GetMediaSources(It.IsAny()))
+ .Returns(new List());
+ input.Setup(movie => movie.GetMediaStreams())
+ .Returns(new List() { sampleStream });
+
+ var actual = await embeddedImageProvider.GetImage(input.Object, ImageType.Primary, CancellationToken.None);
+ Assert.NotNull(actual);
+ Assert.True(actual.HasImage);
+ Assert.Equal(targetPath, actual.Path);
+ Assert.Equal(ImageFormat.Jpg, actual.Format);
+ }
+
+ [Fact]
+ public async void GetImage_EmbeddedSelection()
+ {
+ // primary is second stream to ensure it's not defaulting, backdrop is first
+ MediaStream sampleStream1 = new () { Type = MediaStreamType.EmbeddedImage, Index = 1, Comment = "backdrop" };
+ MediaStream sampleStream2 = new () { Type = MediaStreamType.EmbeddedImage, Index = 2, Comment = "cover" };
+ string targetPath1 = "path1.jpg";
+ string targetPath2 = "path2.jpg";
+
+ var mediaEncoder = new Mock(MockBehavior.Strict);
+ mediaEncoder.Setup(encoder => encoder.ExtractVideoImage(It.IsAny(), It.IsAny(), It.IsAny(), sampleStream1, 1, "jpg", CancellationToken.None))
+ .Returns(Task.FromResult(targetPath1));
+ mediaEncoder.Setup(encoder => encoder.ExtractVideoImage(It.IsAny(), It.IsAny(), It.IsAny(), sampleStream2, 2, "jpg", CancellationToken.None))
+ .Returns(Task.FromResult(targetPath2));
+ var embeddedImageProvider = GetEmbeddedImageProvider(mediaEncoder.Object);
+
+ var input = new Mock();
+ input.Setup(movie => movie.GetMediaSources(It.IsAny()))
+ .Returns(new List());
+ input.Setup(movie => movie.GetMediaStreams())
+ .Returns(new List { sampleStream1, sampleStream2 });
+
+ var actualPrimary = await embeddedImageProvider.GetImage(input.Object, ImageType.Primary, CancellationToken.None);
+ Assert.NotNull(actualPrimary);
+ Assert.True(actualPrimary.HasImage);
+ Assert.Equal(targetPath2, actualPrimary.Path);
+ Assert.Equal(ImageFormat.Jpg, actualPrimary.Format);
+
+ var actualBackdrop = await embeddedImageProvider.GetImage(input.Object, ImageType.Backdrop, CancellationToken.None);
+ Assert.NotNull(actualBackdrop);
+ Assert.True(actualBackdrop.HasImage);
+ Assert.Equal(targetPath1, actualBackdrop.Path);
+ Assert.Equal(ImageFormat.Jpg, actualBackdrop.Format);
+ }
+ }
+}
diff --git a/tests/Jellyfin.Providers.Tests/MediaInfo/VideoImageProviderTests.cs b/tests/Jellyfin.Providers.Tests/MediaInfo/VideoImageProviderTests.cs
new file mode 100644
index 000000000..9a5cd79bb
--- /dev/null
+++ b/tests/Jellyfin.Providers.Tests/MediaInfo/VideoImageProviderTests.cs
@@ -0,0 +1,168 @@
+using System;
+using System.Collections.Generic;
+using System.Threading;
+using System.Threading.Tasks;
+using MediaBrowser.Controller.Entities.Movies;
+using MediaBrowser.Controller.MediaEncoding;
+using MediaBrowser.Model.Drawing;
+using MediaBrowser.Model.Dto;
+using MediaBrowser.Model.Entities;
+using MediaBrowser.Providers.MediaInfo;
+using Microsoft.Extensions.Logging.Abstractions;
+using Moq;
+using Xunit;
+
+namespace Jellyfin.Providers.Tests.MediaInfo
+{
+ public class VideoImageProviderTests
+ {
+ private VideoImageProvider GetVideoImageProvider(IMediaEncoder? mediaEncoder)
+ {
+ // strict to ensure this isn't accidentally used where a prepared mock is intended
+ mediaEncoder ??= new Mock(MockBehavior.Strict).Object;
+ return new VideoImageProvider(mediaEncoder, new NullLogger());
+ }
+
+ [Fact]
+ public async void GetImage_Empty_IsPlaceholder()
+ {
+ var videoImageProvider = GetVideoImageProvider(null);
+
+ var input = new Mock();
+ input.Object.IsPlaceHolder = true;
+
+ var actual = await videoImageProvider.GetImage(input.Object, ImageType.Primary, CancellationToken.None);
+ Assert.NotNull(actual);
+ Assert.False(actual.HasImage);
+ }
+
+ [Fact]
+ public async void GetImage_Empty_NoDefaultVideoStream()
+ {
+ var videoImageProvider = GetVideoImageProvider(null);
+
+ var input = new Mock();
+
+ var actual = await videoImageProvider.GetImage(input.Object, ImageType.Primary, CancellationToken.None);
+ Assert.NotNull(actual);
+ Assert.False(actual.HasImage);
+ }
+
+ [Fact]
+ public async void GetImage_Empty_DefaultSet_NoVideoStream()
+ {
+ var videoImageProvider = GetVideoImageProvider(null);
+
+ var input = new Mock();
+ input.Setup(movie => movie.GetMediaStreams())
+ .Returns(new List());
+ // set a default index but don't put anything there (invalid input, but provider shouldn't break)
+ input.Object.DefaultVideoStreamIndex = 1;
+
+ var actual = await videoImageProvider.GetImage(input.Object, ImageType.Primary, CancellationToken.None);
+ Assert.NotNull(actual);
+ Assert.False(actual.HasImage);
+ }
+
+ [Fact]
+ public async void GetImage_Extract_DefaultStream()
+ {
+ MediaStream firstStream = new () { Type = MediaStreamType.Video, Index = 0 };
+ MediaStream targetStream = new () { Type = MediaStreamType.Video, Index = 1 };
+ string targetPath = "path.jpg";
+
+ var mediaEncoder = new Mock(MockBehavior.Strict);
+ mediaEncoder.Setup(encoder => encoder.ExtractVideoImage(It.IsAny(), It.IsAny(), It.IsAny(), firstStream, It.IsAny(), It.IsAny(), CancellationToken.None))
+ .Returns(Task.FromResult("wrong stream called!"));
+ mediaEncoder.Setup(encoder => encoder.ExtractVideoImage(It.IsAny(), It.IsAny(), It.IsAny(), targetStream, It.IsAny(), It.IsAny(), CancellationToken.None))
+ .Returns(Task.FromResult(targetPath));
+ var videoImageProvider = GetVideoImageProvider(mediaEncoder.Object);
+
+ var input = new Mock();
+ input.Setup(movie => movie.GetDefaultVideoStream())
+ .Returns(targetStream);
+ input.Setup(movie => movie.GetMediaStreams())
+ .Returns(new List() { firstStream, targetStream });
+ input.Object.DefaultVideoStreamIndex = 1;
+
+ var actual = await videoImageProvider.GetImage(input.Object, ImageType.Primary, CancellationToken.None);
+ Assert.NotNull(actual);
+ Assert.True(actual.HasImage);
+ Assert.Equal(targetPath, actual.Path);
+ Assert.Equal(ImageFormat.Jpg, actual.Format);
+ }
+
+ [Fact]
+ public async void GetImage_Extract_FallbackToFirstVideoStream()
+ {
+ MediaStream targetStream = new () { Type = MediaStreamType.Video, Index = 0 };
+ string targetPath = "path.jpg";
+
+ var mediaEncoder = new Mock(MockBehavior.Strict);
+ mediaEncoder.Setup(encoder => encoder.ExtractVideoImage(It.IsAny(), It.IsAny(), It.IsAny(), targetStream, It.IsAny(), It.IsAny(), CancellationToken.None))
+ .Returns(Task.FromResult(targetPath));
+ var videoImageProvider = GetVideoImageProvider(mediaEncoder.Object);
+
+ var input = new Mock();
+ input.Setup(movie => movie.GetMediaStreams())
+ .Returns(new List() { targetStream });
+ // default must be set, ensure a stream is still found if not pointed at a video
+ input.Object.DefaultVideoStreamIndex = 5;
+
+ var actual = await videoImageProvider.GetImage(input.Object, ImageType.Primary, CancellationToken.None);
+ Assert.NotNull(actual);
+ Assert.True(actual.HasImage);
+ Assert.Equal(targetPath, actual.Path);
+ Assert.Equal(ImageFormat.Jpg, actual.Format);
+ }
+
+ [Fact]
+ public async void GetImage_Time_Default()
+ {
+ MediaStream targetStream = new () { Type = MediaStreamType.Video, Index = 0 };
+
+ TimeSpan? actualTimeSpan = null;
+ var mediaEncoder = new Mock(MockBehavior.Strict);
+ mediaEncoder.Setup(encoder => encoder.ExtractVideoImage(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny(), CancellationToken.None))
+ .Callback((_, _, _, _, _, timeSpan, _) => actualTimeSpan = timeSpan)
+ .Returns(Task.FromResult("path"));
+ var videoImageProvider = GetVideoImageProvider(mediaEncoder.Object);
+
+ var input = new Mock();
+ input.Setup(movie => movie.GetMediaStreams())
+ .Returns(new List() { targetStream });
+ // default must be set
+ input.Object.DefaultVideoStreamIndex = 0;
+
+ // not testing return, just verifying what gets requested for time span
+ await videoImageProvider.GetImage(input.Object, ImageType.Primary, CancellationToken.None);
+
+ Assert.Equal(TimeSpan.FromSeconds(10), actualTimeSpan);
+ }
+
+ [Fact]
+ public async void GetImage_Time_Calculated()
+ {
+ MediaStream targetStream = new () { Type = MediaStreamType.Video, Index = 0 };
+
+ TimeSpan? actualTimeSpan = null;
+ var mediaEncoder = new Mock(MockBehavior.Strict);
+ mediaEncoder.Setup(encoder => encoder.ExtractVideoImage(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny(), CancellationToken.None))
+ .Callback((_, _, _, _, _, timeSpan, _) => actualTimeSpan = timeSpan)
+ .Returns(Task.FromResult("path"));
+ var videoImageProvider = GetVideoImageProvider(mediaEncoder.Object);
+
+ var input = new Mock();
+ input.Setup(movie => movie.GetMediaStreams())
+ .Returns(new List() { targetStream });
+ // default must be set
+ input.Object.DefaultVideoStreamIndex = 0;
+ input.Object.RunTimeTicks = 5000;
+
+ // not testing return, just verifying what gets requested for time span
+ await videoImageProvider.GetImage(input.Object, ImageType.Primary, CancellationToken.None);
+
+ Assert.Equal(TimeSpan.FromTicks(500), actualTimeSpan);
+ }
+ }
+}
--
cgit v1.2.3
From 1d19a5be617c191a731b76e556fae1e395eb3788 Mon Sep 17 00:00:00 2001
From: Bond_009
Date: Tue, 9 Nov 2021 22:29:33 +0100
Subject: Fix some warnings
down to 580
---
Emby.Dlna/DlnaManager.cs | 19 +---
Emby.Dlna/Main/DlnaEntryPoint.cs | 7 +-
Emby.Server.Implementations/Dto/DtoService.cs | 7 +-
Emby.Server.Implementations/IO/LibraryMonitor.cs | 2 +-
.../Library/LibraryManager.cs | 3 +-
.../Library/Resolvers/Audio/MusicAlbumResolver.cs | 2 +-
.../LiveTv/EmbyTV/EmbyTV.cs | 2 +-
.../LiveTv/EmbyTV/EncodedRecorder.cs | 5 +-
.../ScheduledTasks/ScheduledTaskWorker.cs | 10 +-
.../Updates/InstallationManager.cs | 2 +-
Jellyfin.Api/Controllers/DynamicHlsController.cs | 2 +-
Jellyfin.Api/Helpers/TranscodingJobHelper.cs | 6 +-
.../Models/PlaybackDtos/TranscodingThrottler.cs | 2 +-
MediaBrowser.Controller/Entities/Folder.cs | 4 +-
.../MediaEncoding/IMediaEncoder.cs | 26 -----
.../Parsers/BaseItemXmlParser.cs | 2 +-
.../Attachments/AttachmentExtractor.cs | 7 +-
MediaBrowser.MediaEncoding/Encoder/MediaEncoder.cs | 117 +--------------------
.../Subtitles/SubtitleEncoder.cs | 11 +-
.../Parsers/SeriesNfoParser.cs | 2 +-
jellyfin.ruleset | 4 +
21 files changed, 38 insertions(+), 204 deletions(-)
(limited to 'MediaBrowser.Controller/MediaEncoding/IMediaEncoder.cs')
diff --git a/Emby.Dlna/DlnaManager.cs b/Emby.Dlna/DlnaManager.cs
index f37d2d7d7..277a0e678 100644
--- a/Emby.Dlna/DlnaManager.cs
+++ b/Emby.Dlna/DlnaManager.cs
@@ -112,7 +112,7 @@ namespace Emby.Dlna
if (profile == null)
{
- LogUnmatchedProfile(deviceInfo);
+ _logger.LogInformation("No matching device profile found. The default will need to be used. \n{@Profile}", deviceInfo);
}
else
{
@@ -122,23 +122,6 @@ namespace Emby.Dlna
return profile;
}
- private void LogUnmatchedProfile(DeviceIdentification profile)
- {
- var builder = new StringBuilder();
-
- builder.AppendLine("No matching device profile found. The default will need to be used.");
- builder.Append("FriendlyName: ").AppendLine(profile.FriendlyName);
- builder.Append("Manufacturer: ").AppendLine(profile.Manufacturer);
- builder.Append("ManufacturerUrl: ").AppendLine(profile.ManufacturerUrl);
- builder.Append("ModelDescription: ").AppendLine(profile.ModelDescription);
- builder.Append("ModelName: ").AppendLine(profile.ModelName);
- builder.Append("ModelNumber: ").AppendLine(profile.ModelNumber);
- builder.Append("ModelUrl: ").AppendLine(profile.ModelUrl);
- builder.Append("SerialNumber: ").AppendLine(profile.SerialNumber);
-
- _logger.LogInformation(builder.ToString());
- }
-
///
/// Attempts to match a device with a profile.
/// Rules:
diff --git a/Emby.Dlna/Main/DlnaEntryPoint.cs b/Emby.Dlna/Main/DlnaEntryPoint.cs
index 8e89d9ae6..722428c73 100644
--- a/Emby.Dlna/Main/DlnaEntryPoint.cs
+++ b/Emby.Dlna/Main/DlnaEntryPoint.cs
@@ -218,11 +218,6 @@ namespace Emby.Dlna.Main
}
}
- private void LogMessage(string msg)
- {
- _logger.LogDebug(msg);
- }
-
private void StartDeviceDiscovery(ISsdpCommunicationsServer communicationsServer)
{
try
@@ -272,7 +267,7 @@ namespace Emby.Dlna.Main
Environment.OSVersion.VersionString,
_config.GetDlnaConfiguration().SendOnlyMatchedHost)
{
- LogFunction = LogMessage,
+ LogFunction = (msg) => _logger.LogDebug("{Msg}", msg),
SupportPnpRootDevice = false
};
diff --git a/Emby.Server.Implementations/Dto/DtoService.cs b/Emby.Server.Implementations/Dto/DtoService.cs
index c6b32a52c..67ecd04e0 100644
--- a/Emby.Server.Implementations/Dto/DtoService.cs
+++ b/Emby.Server.Implementations/Dto/DtoService.cs
@@ -134,14 +134,11 @@ namespace Emby.Server.Implementations.Dto
var dto = GetBaseItemDtoInternal(item, options, user, owner);
if (item is LiveTvChannel tvChannel)
{
- var list = new List<(BaseItemDto, LiveTvChannel)>(1) { (dto, tvChannel) };
- LivetvManager.AddChannelInfo(list, options, user);
+ LivetvManager.AddChannelInfo(new[] { (dto, tvChannel) }, options, user);
}
else if (item is LiveTvProgram)
{
- var list = new List<(BaseItem, BaseItemDto)>(1) { (item, dto) };
- var task = LivetvManager.AddInfoToProgramDto(list, options.Fields, user);
- Task.WaitAll(task);
+ LivetvManager.AddInfoToProgramDto(new[] { (item, dto) }, options.Fields, user).GetAwaiter().GetResult();
}
if (item is IItemByName itemByName
diff --git a/Emby.Server.Implementations/IO/LibraryMonitor.cs b/Emby.Server.Implementations/IO/LibraryMonitor.cs
index 7ebc800b9..b525f5a2f 100644
--- a/Emby.Server.Implementations/IO/LibraryMonitor.cs
+++ b/Emby.Server.Implementations/IO/LibraryMonitor.cs
@@ -267,7 +267,7 @@ namespace Emby.Server.Implementations.IO
if (_fileSystemWatchers.TryAdd(path, newWatcher))
{
newWatcher.EnableRaisingEvents = true;
- _logger.LogInformation("Watching directory " + path);
+ _logger.LogInformation("Watching directory {Path}", path);
}
else
{
diff --git a/Emby.Server.Implementations/Library/LibraryManager.cs b/Emby.Server.Implementations/Library/LibraryManager.cs
index 2dbb569c6..559da7f5c 100644
--- a/Emby.Server.Implementations/Library/LibraryManager.cs
+++ b/Emby.Server.Implementations/Library/LibraryManager.cs
@@ -333,8 +333,7 @@ namespace Emby.Server.Implementations.Library
{
try
{
- var task = BaseItem.ChannelManager.DeleteItem(item);
- Task.WaitAll(task);
+ BaseItem.ChannelManager.DeleteItem(item).GetAwaiter().GetResult();
}
catch (ArgumentException)
{
diff --git a/Emby.Server.Implementations/Library/Resolvers/Audio/MusicAlbumResolver.cs b/Emby.Server.Implementations/Library/Resolvers/Audio/MusicAlbumResolver.cs
index 60720dd2f..9e3f62276 100644
--- a/Emby.Server.Implementations/Library/Resolvers/Audio/MusicAlbumResolver.cs
+++ b/Emby.Server.Implementations/Library/Resolvers/Audio/MusicAlbumResolver.cs
@@ -151,7 +151,7 @@ namespace Emby.Server.Implementations.Library.Resolvers.Audio
{
if (parser.IsMultiPart(path))
{
- logger.LogDebug("Found multi-disc folder: " + path);
+ logger.LogDebug("Found multi-disc folder: {Path}", path);
Interlocked.Increment(ref discSubfolderCount);
}
else
diff --git a/Emby.Server.Implementations/LiveTv/EmbyTV/EmbyTV.cs b/Emby.Server.Implementations/LiveTv/EmbyTV/EmbyTV.cs
index 367f3cb9e..644f9050d 100644
--- a/Emby.Server.Implementations/LiveTv/EmbyTV/EmbyTV.cs
+++ b/Emby.Server.Implementations/LiveTv/EmbyTV/EmbyTV.cs
@@ -957,7 +957,7 @@ namespace Emby.Server.Implementations.LiveTv.EmbyTV
public async Task GetChannelStreamWithDirectStreamProvider(string channelId, string streamId, List currentLiveStreams, CancellationToken cancellationToken)
{
- _logger.LogInformation("Streaming Channel " + channelId);
+ _logger.LogInformation("Streaming Channel {Id}", channelId);
var result = string.IsNullOrEmpty(streamId) ?
null :
diff --git a/Emby.Server.Implementations/LiveTv/EmbyTV/EncodedRecorder.cs b/Emby.Server.Implementations/LiveTv/EmbyTV/EncodedRecorder.cs
index 8688688e9..5726d7158 100644
--- a/Emby.Server.Implementations/LiveTv/EmbyTV/EncodedRecorder.cs
+++ b/Emby.Server.Implementations/LiveTv/EmbyTV/EncodedRecorder.cs
@@ -87,8 +87,7 @@ namespace Emby.Server.Implementations.LiveTv.EmbyTV
ErrorDialog = false
};
- var commandLineLogMessage = processStartInfo.FileName + " " + processStartInfo.Arguments;
- _logger.LogInformation(commandLineLogMessage);
+ _logger.LogInformation("{Filename} {Arguments}", processStartInfo.FileName, processStartInfo.Arguments);
var logFilePath = Path.Combine(_appPaths.LogDirectoryPath, "record-transcode-" + Guid.NewGuid() + ".txt");
Directory.CreateDirectory(Path.GetDirectoryName(logFilePath));
@@ -97,7 +96,7 @@ namespace Emby.Server.Implementations.LiveTv.EmbyTV
_logFileStream = new FileStream(logFilePath, FileMode.CreateNew, FileAccess.Write, FileShare.Read, IODefaults.FileStreamBufferSize, FileOptions.Asynchronous);
await JsonSerializer.SerializeAsync(_logFileStream, mediaSource, _jsonOptions, cancellationToken).ConfigureAwait(false);
- await _logFileStream.WriteAsync(Encoding.UTF8.GetBytes(Environment.NewLine + Environment.NewLine + commandLineLogMessage + Environment.NewLine + Environment.NewLine), cancellationToken).ConfigureAwait(false);
+ await _logFileStream.WriteAsync(Encoding.UTF8.GetBytes(Environment.NewLine + Environment.NewLine + processStartInfo.FileName + " " + processStartInfo.Arguments + Environment.NewLine + Environment.NewLine), cancellationToken).ConfigureAwait(false);
_process = new Process
{
diff --git a/Emby.Server.Implementations/ScheduledTasks/ScheduledTaskWorker.cs b/Emby.Server.Implementations/ScheduledTasks/ScheduledTaskWorker.cs
index f2cdfeb16..21a7f4f5f 100644
--- a/Emby.Server.Implementations/ScheduledTasks/ScheduledTaskWorker.cs
+++ b/Emby.Server.Implementations/ScheduledTasks/ScheduledTaskWorker.cs
@@ -638,7 +638,7 @@ namespace Emby.Server.Implementations.ScheduledTasks
{
try
{
- _logger.LogInformation(Name + ": Cancelling");
+ _logger.LogInformation("{Name}: Cancelling", Name);
token.Cancel();
}
catch (Exception ex)
@@ -652,16 +652,16 @@ namespace Emby.Server.Implementations.ScheduledTasks
{
try
{
- _logger.LogInformation(Name + ": Waiting on Task");
+ _logger.LogInformation("{Name}: Waiting on Task", Name);
var exited = task.Wait(2000);
if (exited)
{
- _logger.LogInformation(Name + ": Task exited");
+ _logger.LogInformation("{Name}: Task exited", Name);
}
else
{
- _logger.LogInformation(Name + ": Timed out waiting for task to stop");
+ _logger.LogInformation("{Name}: Timed out waiting for task to stop", Name);
}
}
catch (Exception ex)
@@ -674,7 +674,7 @@ namespace Emby.Server.Implementations.ScheduledTasks
{
try
{
- _logger.LogDebug(Name + ": Disposing CancellationToken");
+ _logger.LogDebug("{Name}: Disposing CancellationToken", Name);
token.Dispose();
}
catch (Exception ex)
diff --git a/Emby.Server.Implementations/Updates/InstallationManager.cs b/Emby.Server.Implementations/Updates/InstallationManager.cs
index 4a022c5db..ef95ebf94 100644
--- a/Emby.Server.Implementations/Updates/InstallationManager.cs
+++ b/Emby.Server.Implementations/Updates/InstallationManager.cs
@@ -571,7 +571,7 @@ namespace Emby.Server.Implementations.Updates
?? _pluginManager.Plugins.FirstOrDefault(p => p.Name.Equals(package.Name, StringComparison.OrdinalIgnoreCase) && p.Version.Equals(package.Version));
await PerformPackageInstallation(package, plugin?.Manifest.Status ?? PluginStatus.Active, cancellationToken).ConfigureAwait(false);
- _logger.LogInformation(plugin == null ? "New plugin installed: {PluginName} {PluginVersion}" : "Plugin updated: {PluginName} {PluginVersion}", package.Name, package.Version);
+ _logger.LogInformation("Plugin {Action}: {PluginName} {PluginVersion}", plugin == null ? "installed" : "updated", package.Name, package.Version);
return plugin != null;
}
diff --git a/Jellyfin.Api/Controllers/DynamicHlsController.cs b/Jellyfin.Api/Controllers/DynamicHlsController.cs
index 049fd503b..caa3d2368 100644
--- a/Jellyfin.Api/Controllers/DynamicHlsController.cs
+++ b/Jellyfin.Api/Controllers/DynamicHlsController.cs
@@ -1391,7 +1391,7 @@ namespace Jellyfin.Api.Controllers
}
else
{
- _logger.LogError("Invalid HLS segment container: " + segmentFormat);
+ _logger.LogError("Invalid HLS segment container: {SegmentFormat}", segmentFormat);
}
var maxMuxingQueueSize = _encodingOptions.MaxMuxingQueueSize > 128
diff --git a/Jellyfin.Api/Helpers/TranscodingJobHelper.cs b/Jellyfin.Api/Helpers/TranscodingJobHelper.cs
index f435bbf00..9d80070eb 100644
--- a/Jellyfin.Api/Helpers/TranscodingJobHelper.cs
+++ b/Jellyfin.Api/Helpers/TranscodingJobHelper.cs
@@ -543,8 +543,7 @@ namespace Jellyfin.Api.Helpers
state,
cancellationTokenSource);
- var commandLineLogMessage = process.StartInfo.FileName + " " + process.StartInfo.Arguments;
- _logger.LogInformation(commandLineLogMessage);
+ _logger.LogInformation("{Filename} {Arguments}", process.StartInfo.FileName, process.StartInfo.Arguments);
var logFilePrefix = "FFmpeg.Transcode-";
if (state.VideoRequest != null
@@ -562,8 +561,9 @@ namespace Jellyfin.Api.Helpers
// FFmpeg writes debug/error info to stderr. This is useful when debugging so let's put it in the log directory.
Stream logStream = new FileStream(logFilePath, FileMode.Create, FileAccess.Write, FileShare.Read, IODefaults.FileStreamBufferSize, FileOptions.Asynchronous);
+ var commandLineLogMessage = process.StartInfo.FileName + " " + process.StartInfo.Arguments;
var commandLineLogMessageBytes = Encoding.UTF8.GetBytes(request.Path + Environment.NewLine + Environment.NewLine + JsonSerializer.Serialize(state.MediaSource) + Environment.NewLine + Environment.NewLine + commandLineLogMessage + Environment.NewLine + Environment.NewLine);
- await logStream.WriteAsync(commandLineLogMessageBytes, 0, commandLineLogMessageBytes.Length, cancellationTokenSource.Token).ConfigureAwait(false);
+ await logStream.WriteAsync(commandLineLogMessageBytes, cancellationTokenSource.Token).ConfigureAwait(false);
process.Exited += (sender, args) => OnFfMpegProcessExited(process, transcodingJob, state);
diff --git a/Jellyfin.Api/Models/PlaybackDtos/TranscodingThrottler.cs b/Jellyfin.Api/Models/PlaybackDtos/TranscodingThrottler.cs
index 7b32d76ba..0136d9f86 100644
--- a/Jellyfin.Api/Models/PlaybackDtos/TranscodingThrottler.cs
+++ b/Jellyfin.Api/Models/PlaybackDtos/TranscodingThrottler.cs
@@ -197,7 +197,7 @@ namespace Jellyfin.Api.Models.PlaybackDtos
}
}
- _logger.LogDebug("No throttle data for " + path);
+ _logger.LogDebug("No throttle data for {Path}", path);
return false;
}
diff --git a/MediaBrowser.Controller/Entities/Folder.cs b/MediaBrowser.Controller/Entities/Folder.cs
index ffd1c7f0a..ec1ebaabe 100644
--- a/MediaBrowser.Controller/Entities/Folder.cs
+++ b/MediaBrowser.Controller/Entities/Folder.cs
@@ -425,7 +425,7 @@ namespace MediaBrowser.Controller.Entities
{
if (item.IsFileProtocol)
{
- Logger.LogDebug("Removed item: " + item.Path);
+ Logger.LogDebug("Removed item: {Path}", item.Path);
item.SetParent(null);
LibraryManager.DeleteItem(item, new DeleteOptions { DeleteFileLocation = false }, this, false);
@@ -807,7 +807,7 @@ namespace MediaBrowser.Controller.Entities
{
if (this is not ICollectionFolder)
{
- Logger.LogDebug("Query requires post-filtering due to LinkedChildren. Type: " + GetType().Name);
+ Logger.LogDebug("{Type}: Query requires post-filtering due to LinkedChildren.", GetType().Name);
return true;
}
}
diff --git a/MediaBrowser.Controller/MediaEncoding/IMediaEncoder.cs b/MediaBrowser.Controller/MediaEncoding/IMediaEncoder.cs
index e6511ca8d..7d62fb6e1 100644
--- a/MediaBrowser.Controller/MediaEncoding/IMediaEncoder.cs
+++ b/MediaBrowser.Controller/MediaEncoding/IMediaEncoder.cs
@@ -100,32 +100,6 @@ namespace MediaBrowser.Controller.MediaEncoding
/// Location of video image.
Task ExtractVideoImage(string inputFile, string container, MediaSourceInfo mediaSource, MediaStream imageStream, int? imageStreamIndex, string outputExtension, CancellationToken cancellationToken);
- ///
- /// Extracts the video images on interval.
- ///
- /// Input file.
- /// Video container type.
- /// Media stream information.
- /// Media source information.
- /// Video 3D format.
- /// Time interval.
- /// Directory to write images.
- /// Filename prefix to use.
- /// Maximum width of image.
- /// CancellationToken to use for operation.
- /// A task.
- Task ExtractVideoImagesOnInterval(
- string inputFile,
- string container,
- MediaStream videoStream,
- MediaSourceInfo mediaSource,
- Video3DFormat? threedFormat,
- TimeSpan interval,
- string targetDirectory,
- string filenamePrefix,
- int? maxWidth,
- CancellationToken cancellationToken);
-
///
/// Gets the media info.
///
diff --git a/MediaBrowser.LocalMetadata/Parsers/BaseItemXmlParser.cs b/MediaBrowser.LocalMetadata/Parsers/BaseItemXmlParser.cs
index 80eb45423..777fe6774 100644
--- a/MediaBrowser.LocalMetadata/Parsers/BaseItemXmlParser.cs
+++ b/MediaBrowser.LocalMetadata/Parsers/BaseItemXmlParser.cs
@@ -149,7 +149,7 @@ namespace MediaBrowser.LocalMetadata.Parsers
}
else
{
- Logger.LogWarning("Invalid Added value found: " + val);
+ Logger.LogWarning("Invalid Added value found: {Value}", val);
}
}
diff --git a/MediaBrowser.MediaEncoding/Attachments/AttachmentExtractor.cs b/MediaBrowser.MediaEncoding/Attachments/AttachmentExtractor.cs
index a524aeaa9..9ebc0d0cf 100644
--- a/MediaBrowser.MediaEncoding/Attachments/AttachmentExtractor.cs
+++ b/MediaBrowser.MediaEncoding/Attachments/AttachmentExtractor.cs
@@ -223,11 +223,10 @@ namespace MediaBrowser.MediaEncoding.Attachments
if (failed)
{
- var msg = $"ffmpeg attachment extraction failed for {inputPath} to {outputPath}";
+ _logger.LogError("ffmpeg attachment extraction failed for {InputPath} to {OutputPath}", inputPath, outputPath);
- _logger.LogError(msg);
-
- throw new InvalidOperationException(msg);
+ throw new InvalidOperationException(
+ string.Format(CultureInfo.InvariantCulture, "ffmpeg attachment extraction failed for {0} to {1}", inputPath, outputPath));
}
else
{
diff --git a/MediaBrowser.MediaEncoding/Encoder/MediaEncoder.cs b/MediaBrowser.MediaEncoding/Encoder/MediaEncoder.cs
index fbc7ba72f..a2bac7b49 100644
--- a/MediaBrowser.MediaEncoding/Encoder/MediaEncoder.cs
+++ b/MediaBrowser.MediaEncoding/Encoder/MediaEncoder.cs
@@ -682,11 +682,9 @@ namespace MediaBrowser.MediaEncoding.Encoder
if (exitCode == -1 || !file.Exists || file.Length == 0)
{
- var msg = string.Format(CultureInfo.InvariantCulture, "ffmpeg image extraction failed for {0}", inputPath);
+ _logger.LogError("ffmpeg image extraction failed for {Path}", inputPath);
- _logger.LogError(msg);
-
- throw new FfmpegException(msg);
+ throw new FfmpegException(string.Format(CultureInfo.InvariantCulture, "ffmpeg image extraction failed for {0}", inputPath));
}
return tempExtractPath;
@@ -705,117 +703,6 @@ namespace MediaBrowser.MediaEncoding.Encoder
return time.ToString(@"hh\:mm\:ss\.fff", CultureInfo.InvariantCulture);
}
- public async Task ExtractVideoImagesOnInterval(
- string inputFile,
- string container,
- MediaStream videoStream,
- MediaSourceInfo mediaSource,
- Video3DFormat? threedFormat,
- TimeSpan interval,
- string targetDirectory,
- string filenamePrefix,
- int? maxWidth,
- CancellationToken cancellationToken)
- {
- var inputArgument = GetInputArgument(inputFile, mediaSource);
-
- var vf = "fps=fps=1/" + interval.TotalSeconds.ToString(CultureInfo.InvariantCulture);
-
- if (maxWidth.HasValue)
- {
- var maxWidthParam = maxWidth.Value.ToString(CultureInfo.InvariantCulture);
-
- vf += string.Format(CultureInfo.InvariantCulture, ",scale=min(iw\\,{0}):trunc(ow/dar/2)*2", maxWidthParam);
- }
-
- Directory.CreateDirectory(targetDirectory);
- var outputPath = Path.Combine(targetDirectory, filenamePrefix + "%05d.jpg");
-
- var args = string.Format(CultureInfo.InvariantCulture, "-i {0} -threads {3} -v quiet {2} -f image2 \"{1}\"", inputArgument, outputPath, vf, _threads);
-
- if (!string.IsNullOrWhiteSpace(container))
- {
- var inputFormat = EncodingHelper.GetInputFormat(container);
- if (!string.IsNullOrWhiteSpace(inputFormat))
- {
- args = "-f " + inputFormat + " " + args;
- }
- }
-
- var processStartInfo = new ProcessStartInfo
- {
- CreateNoWindow = true,
- UseShellExecute = false,
- FileName = _ffmpegPath,
- Arguments = args,
- WindowStyle = ProcessWindowStyle.Hidden,
- ErrorDialog = false
- };
-
- _logger.LogInformation(processStartInfo.FileName + " " + processStartInfo.Arguments);
-
- await _thumbnailResourcePool.WaitAsync(cancellationToken).ConfigureAwait(false);
-
- bool ranToCompletion = false;
-
- var process = new Process
- {
- StartInfo = processStartInfo,
- EnableRaisingEvents = true
- };
- using (var processWrapper = new ProcessWrapper(process, this))
- {
- try
- {
- StartProcess(processWrapper);
-
- // Need to give ffmpeg enough time to make all the thumbnails, which could be a while,
- // but we still need to detect if the process hangs.
- // Making the assumption that as long as new jpegs are showing up, everything is good.
-
- bool isResponsive = true;
- int lastCount = 0;
-
- while (isResponsive)
- {
- if (await process.WaitForExitAsync(TimeSpan.FromSeconds(30)).ConfigureAwait(false))
- {
- ranToCompletion = true;
- break;
- }
-
- cancellationToken.ThrowIfCancellationRequested();
-
- var jpegCount = _fileSystem.GetFilePaths(targetDirectory)
- .Count(i => string.Equals(Path.GetExtension(i), ".jpg", StringComparison.OrdinalIgnoreCase));
-
- isResponsive = jpegCount > lastCount;
- lastCount = jpegCount;
- }
-
- if (!ranToCompletion)
- {
- StopProcess(processWrapper, 1000);
- }
- }
- finally
- {
- _thumbnailResourcePool.Release();
- }
-
- var exitCode = ranToCompletion ? processWrapper.ExitCode ?? 0 : -1;
-
- if (exitCode == -1)
- {
- var msg = string.Format(CultureInfo.InvariantCulture, "ffmpeg image extraction failed for {0}", inputArgument);
-
- _logger.LogError(msg);
-
- throw new FfmpegException(msg);
- }
- }
- }
-
private void StartProcess(ProcessWrapper process)
{
process.Process.Start();
diff --git a/MediaBrowser.MediaEncoding/Subtitles/SubtitleEncoder.cs b/MediaBrowser.MediaEncoding/Subtitles/SubtitleEncoder.cs
index 2b2de2ff6..89365a516 100644
--- a/MediaBrowser.MediaEncoding/Subtitles/SubtitleEncoder.cs
+++ b/MediaBrowser.MediaEncoding/Subtitles/SubtitleEncoder.cs
@@ -636,17 +636,14 @@ namespace MediaBrowser.MediaEncoding.Subtitles
if (failed)
{
- var msg = $"ffmpeg subtitle extraction failed for {inputPath} to {outputPath}";
+ _logger.LogError("ffmpeg subtitle extraction failed for {InputPath} to {OutputPath}", inputPath, outputPath);
- _logger.LogError(msg);
-
- throw new FfmpegException(msg);
+ throw new FfmpegException(
+ string.Format(CultureInfo.InvariantCulture, "ffmpeg subtitle extraction failed for {0} to {1}", inputPath, outputPath));
}
else
{
- var msg = $"ffmpeg subtitle extraction completed for {inputPath} to {outputPath}";
-
- _logger.LogInformation(msg);
+ _logger.LogInformation("ffmpeg subtitle extraction completed for {InputPath} to {OutputPath}", inputPath, outputPath);
}
if (string.Equals(outputCodec, "ass", StringComparison.OrdinalIgnoreCase))
diff --git a/MediaBrowser.XbmcMetadata/Parsers/SeriesNfoParser.cs b/MediaBrowser.XbmcMetadata/Parsers/SeriesNfoParser.cs
index 2c893ac9f..3011d65a6 100644
--- a/MediaBrowser.XbmcMetadata/Parsers/SeriesNfoParser.cs
+++ b/MediaBrowser.XbmcMetadata/Parsers/SeriesNfoParser.cs
@@ -103,7 +103,7 @@ namespace MediaBrowser.XbmcMetadata.Parsers
}
else
{
- Logger.LogInformation("Unrecognized series status: " + status);
+ Logger.LogInformation("Unrecognized series status: {Status}", status);
}
}
diff --git a/jellyfin.ruleset b/jellyfin.ruleset
index 3bced438c..e14c1c427 100644
--- a/jellyfin.ruleset
+++ b/jellyfin.ruleset
@@ -44,9 +44,13 @@
+
+
+
+
--
cgit v1.2.3
From f73a7a6ed8554a188809c955ddccb48445f4dd71 Mon Sep 17 00:00:00 2001
From: Joe Rogers <1337joe@gmail.com>
Date: Fri, 12 Nov 2021 16:11:15 +0100
Subject: Use ImageFormat instead of string for extension
---
.../MediaEncoding/IMediaEncoder.cs | 5 +--
MediaBrowser.MediaEncoding/Encoder/MediaEncoder.cs | 36 ++++++++++++----------
.../MediaInfo/EmbeddedImageProvider.cs | 13 ++++----
.../MediaInfo/EmbeddedImageProviderTests.cs | 18 +++++------
4 files changed, 38 insertions(+), 34 deletions(-)
(limited to 'MediaBrowser.Controller/MediaEncoding/IMediaEncoder.cs')
diff --git a/MediaBrowser.Controller/MediaEncoding/IMediaEncoder.cs b/MediaBrowser.Controller/MediaEncoding/IMediaEncoder.cs
index 7d62fb6e1..1418e583e 100644
--- a/MediaBrowser.Controller/MediaEncoding/IMediaEncoder.cs
+++ b/MediaBrowser.Controller/MediaEncoding/IMediaEncoder.cs
@@ -7,6 +7,7 @@ using System.Collections.Generic;
using System.Threading;
using System.Threading.Tasks;
using MediaBrowser.Model.Dlna;
+using MediaBrowser.Model.Drawing;
using MediaBrowser.Model.Dto;
using MediaBrowser.Model.Entities;
using MediaBrowser.Model.MediaInfo;
@@ -95,10 +96,10 @@ namespace MediaBrowser.Controller.MediaEncoding
/// Media source information.
/// Media stream information.
/// Index of the stream to extract from.
- /// The extension of the file to write, including the '.'.
+ /// The format of the file to write.
/// CancellationToken to use for operation.
/// Location of video image.
- Task ExtractVideoImage(string inputFile, string container, MediaSourceInfo mediaSource, MediaStream imageStream, int? imageStreamIndex, string outputExtension, CancellationToken cancellationToken);
+ Task ExtractVideoImage(string inputFile, string container, MediaSourceInfo mediaSource, MediaStream imageStream, int? imageStreamIndex, ImageFormat? targetFormat, CancellationToken cancellationToken);
///
/// Gets the media info.
diff --git a/MediaBrowser.MediaEncoding/Encoder/MediaEncoder.cs b/MediaBrowser.MediaEncoding/Encoder/MediaEncoder.cs
index a2bac7b49..1c97a1982 100644
--- a/MediaBrowser.MediaEncoding/Encoder/MediaEncoder.cs
+++ b/MediaBrowser.MediaEncoding/Encoder/MediaEncoder.cs
@@ -19,6 +19,7 @@ using MediaBrowser.Controller.Configuration;
using MediaBrowser.Controller.MediaEncoding;
using MediaBrowser.MediaEncoding.Probing;
using MediaBrowser.Model.Dlna;
+using MediaBrowser.Model.Drawing;
using MediaBrowser.Model.Dto;
using MediaBrowser.Model.Entities;
using MediaBrowser.Model.Globalization;
@@ -478,17 +479,17 @@ namespace MediaBrowser.MediaEncoding.Encoder
Protocol = MediaProtocol.File
};
- return ExtractImage(path, null, null, imageStreamIndex, mediaSource, true, null, null, ".jpg", cancellationToken);
+ return ExtractImage(path, null, null, imageStreamIndex, mediaSource, true, null, null, ImageFormat.Jpg, cancellationToken);
}
public Task ExtractVideoImage(string inputFile, string container, MediaSourceInfo mediaSource, MediaStream videoStream, Video3DFormat? threedFormat, TimeSpan? offset, CancellationToken cancellationToken)
{
- return ExtractImage(inputFile, container, videoStream, null, mediaSource, false, threedFormat, offset, ".jpg", cancellationToken);
+ return ExtractImage(inputFile, container, videoStream, null, mediaSource, false, threedFormat, offset, ImageFormat.Jpg, cancellationToken);
}
- public Task ExtractVideoImage(string inputFile, string container, MediaSourceInfo mediaSource, MediaStream imageStream, int? imageStreamIndex, string outputExtension, CancellationToken cancellationToken)
+ public Task ExtractVideoImage(string inputFile, string container, MediaSourceInfo mediaSource, MediaStream imageStream, int? imageStreamIndex, ImageFormat? targetFormat, CancellationToken cancellationToken)
{
- return ExtractImage(inputFile, container, imageStream, imageStreamIndex, mediaSource, false, null, null, outputExtension, cancellationToken);
+ return ExtractImage(inputFile, container, imageStream, imageStreamIndex, mediaSource, false, null, null, targetFormat, cancellationToken);
}
private async Task ExtractImage(
@@ -500,7 +501,7 @@ namespace MediaBrowser.MediaEncoding.Encoder
bool isAudio,
Video3DFormat? threedFormat,
TimeSpan? offset,
- string outputExtension,
+ ImageFormat? targetFormat,
CancellationToken cancellationToken)
{
var inputArgument = GetInputArgument(inputFile, mediaSource);
@@ -510,7 +511,7 @@ namespace MediaBrowser.MediaEncoding.Encoder
// The failure of HDR extraction usually occurs when using custom ffmpeg that does not contain the zscale filter.
try
{
- return await ExtractImageInternal(inputArgument, container, videoStream, imageStreamIndex, threedFormat, offset, true, true, outputExtension, cancellationToken).ConfigureAwait(false);
+ return await ExtractImageInternal(inputArgument, container, videoStream, imageStreamIndex, threedFormat, offset, true, true, targetFormat, cancellationToken).ConfigureAwait(false);
}
catch (ArgumentException)
{
@@ -523,7 +524,7 @@ namespace MediaBrowser.MediaEncoding.Encoder
try
{
- return await ExtractImageInternal(inputArgument, container, videoStream, imageStreamIndex, threedFormat, offset, false, true, outputExtension, cancellationToken).ConfigureAwait(false);
+ return await ExtractImageInternal(inputArgument, container, videoStream, imageStreamIndex, threedFormat, offset, false, true, targetFormat, cancellationToken).ConfigureAwait(false);
}
catch (ArgumentException)
{
@@ -536,7 +537,7 @@ namespace MediaBrowser.MediaEncoding.Encoder
try
{
- return await ExtractImageInternal(inputArgument, container, videoStream, imageStreamIndex, threedFormat, offset, true, false, outputExtension, cancellationToken).ConfigureAwait(false);
+ return await ExtractImageInternal(inputArgument, container, videoStream, imageStreamIndex, threedFormat, offset, true, false, targetFormat, cancellationToken).ConfigureAwait(false);
}
catch (ArgumentException)
{
@@ -548,24 +549,25 @@ namespace MediaBrowser.MediaEncoding.Encoder
}
}
- return await ExtractImageInternal(inputArgument, container, videoStream, imageStreamIndex, threedFormat, offset, false, false, outputExtension, cancellationToken).ConfigureAwait(false);
+ return await ExtractImageInternal(inputArgument, container, videoStream, imageStreamIndex, threedFormat, offset, false, false, targetFormat, cancellationToken).ConfigureAwait(false);
}
- private async Task ExtractImageInternal(string inputPath, string container, MediaStream videoStream, int? imageStreamIndex, Video3DFormat? threedFormat, TimeSpan? offset, bool useIFrame, bool allowTonemap, string outputExtension, CancellationToken cancellationToken)
+ private async Task ExtractImageInternal(string inputPath, string container, MediaStream videoStream, int? imageStreamIndex, Video3DFormat? threedFormat, TimeSpan? offset, bool useIFrame, bool allowTonemap, ImageFormat? targetFormat, CancellationToken cancellationToken)
{
if (string.IsNullOrEmpty(inputPath))
{
throw new ArgumentNullException(nameof(inputPath));
}
- if (string.IsNullOrEmpty(outputExtension))
+ var outputExtension = targetFormat switch
{
- outputExtension = ".jpg";
- }
- else if (outputExtension[0] != '.')
- {
- outputExtension = "." + outputExtension;
- }
+ ImageFormat.Bmp => ".bmp",
+ ImageFormat.Gif => ".gif",
+ ImageFormat.Jpg => ".jpg",
+ ImageFormat.Png => ".png",
+ ImageFormat.Webp => ".webp",
+ _ => ".jpg"
+ };
var tempExtractPath = Path.Combine(_configurationManager.ApplicationPaths.TempDirectory, Guid.NewGuid() + outputExtension);
Directory.CreateDirectory(Path.GetDirectoryName(tempExtractPath));
diff --git a/MediaBrowser.Providers/MediaInfo/EmbeddedImageProvider.cs b/MediaBrowser.Providers/MediaInfo/EmbeddedImageProvider.cs
index ca0e72e49..79189416e 100644
--- a/MediaBrowser.Providers/MediaInfo/EmbeddedImageProvider.cs
+++ b/MediaBrowser.Providers/MediaInfo/EmbeddedImageProvider.cs
@@ -156,13 +156,14 @@ namespace MediaBrowser.Providers.MediaInfo
}
}
+ var format = ImageFormat.Jpg;
string extractedImagePath =
- await _mediaEncoder.ExtractVideoImage(item.Path, item.Container, mediaSource, imageStream, imageStream.Index, ".jpg", cancellationToken)
+ await _mediaEncoder.ExtractVideoImage(item.Path, item.Container, mediaSource, imageStream, imageStream.Index, format, cancellationToken)
.ConfigureAwait(false);
return new DynamicImageResponse
{
- Format = ImageFormat.Jpg,
+ Format = format,
HasImage = true,
Path = extractedImagePath,
Protocol = MediaProtocol.File
@@ -180,10 +181,6 @@ namespace MediaBrowser.Providers.MediaInfo
extension = ".jpg";
}
- string extractedAttachmentPath =
- await _mediaEncoder.ExtractVideoImage(item.Path, item.Container, mediaSource, null, attachmentStream.Index, extension, cancellationToken)
- .ConfigureAwait(false);
-
ImageFormat format = extension switch
{
".bmp" => ImageFormat.Bmp,
@@ -194,6 +191,10 @@ namespace MediaBrowser.Providers.MediaInfo
_ => ImageFormat.Jpg
};
+ string extractedAttachmentPath =
+ await _mediaEncoder.ExtractVideoImage(item.Path, item.Container, mediaSource, null, attachmentStream.Index, format, cancellationToken)
+ .ConfigureAwait(false);
+
return new DynamicImageResponse
{
Format = format,
diff --git a/tests/Jellyfin.Providers.Tests/MediaInfo/EmbeddedImageProviderTests.cs b/tests/Jellyfin.Providers.Tests/MediaInfo/EmbeddedImageProviderTests.cs
index 19391ba68..b6d6c3b25 100644
--- a/tests/Jellyfin.Providers.Tests/MediaInfo/EmbeddedImageProviderTests.cs
+++ b/tests/Jellyfin.Providers.Tests/MediaInfo/EmbeddedImageProviderTests.cs
@@ -57,7 +57,7 @@ namespace Jellyfin.Providers.Tests.MediaInfo
for (int i = 1; i <= targetIndex; i++)
{
var name = i == targetIndex ? filename : "unmatched";
- attachments.Add(new()
+ attachments.Add(new ()
{
FileName = name,
MimeType = mimetype,
@@ -66,8 +66,8 @@ namespace Jellyfin.Providers.Tests.MediaInfo
}
var mediaEncoder = new Mock(MockBehavior.Strict);
- mediaEncoder.Setup(encoder => encoder.ExtractVideoImage(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny()))
- .Returns((_, _, _, _, index, ext, _) => Task.FromResult(pathPrefix + index + ext));
+ mediaEncoder.Setup(encoder => encoder.ExtractVideoImage(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny()))
+ .Returns((_, _, _, _, index, ext, _) => Task.FromResult(pathPrefix + index + "." + ext));
var embeddedImageProvider = new EmbeddedImageProvider(mediaEncoder.Object);
var input = GetMovie(attachments, new List());
@@ -81,7 +81,7 @@ namespace Jellyfin.Providers.Tests.MediaInfo
else
{
Assert.True(actual.HasImage);
- Assert.Equal(pathPrefix + targetIndex + "." + format, actual.Path, StringComparer.InvariantCultureIgnoreCase);
+ Assert.Equal(pathPrefix + targetIndex + "." + format, actual.Path, StringComparer.OrdinalIgnoreCase);
Assert.Equal(format, actual.Format);
}
}
@@ -97,7 +97,7 @@ namespace Jellyfin.Providers.Tests.MediaInfo
for (int i = 1; i <= targetIndex; i++)
{
var comment = i == targetIndex ? label : "unmatched";
- streams.Add(new()
+ streams.Add(new ()
{
Type = MediaStreamType.EmbeddedImage,
Index = i,
@@ -107,11 +107,11 @@ namespace Jellyfin.Providers.Tests.MediaInfo
var pathPrefix = "path";
var mediaEncoder = new Mock(MockBehavior.Strict);
- mediaEncoder.Setup(encoder => encoder.ExtractVideoImage(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny()))
- .Returns((_, _, _, stream, index, ext, _) =>
+ mediaEncoder.Setup(encoder => encoder.ExtractVideoImage(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny()))
+ .Returns((_, _, _, stream, index, ext, _) =>
{
Assert.Equal(streams[index - 1], stream);
- return Task.FromResult(pathPrefix + index + ext);
+ return Task.FromResult(pathPrefix + index + "." + ext);
});
var embeddedImageProvider = new EmbeddedImageProvider(mediaEncoder.Object);
@@ -122,7 +122,7 @@ namespace Jellyfin.Providers.Tests.MediaInfo
Assert.Equal(hasImage, actual.HasImage);
if (hasImage)
{
- Assert.Equal(pathPrefix + targetIndex + ".jpg", actual.Path);
+ Assert.Equal(pathPrefix + targetIndex + ".jpg", actual.Path, StringComparer.OrdinalIgnoreCase);
Assert.Equal(ImageFormat.Jpg, actual.Format);
}
}
--
cgit v1.2.3
From 4b9c84c52e884e6d35d9bfdc41cbfc04f77b156c Mon Sep 17 00:00:00 2001
From: nyanmisaka
Date: Thu, 2 Dec 2021 00:49:50 +0800
Subject: EncodingHelper hwaccel pipelines refactor
separate the HW pipeline according to HWA method for maintainability.
---
Jellyfin.Api/Controllers/DynamicHlsController.cs | 22 +-
Jellyfin.Api/Controllers/VideoHlsController.cs | 24 +-
.../MediaEncoding/EncodingHelper.cs | 4195 +++++++++++++-------
.../MediaEncoding/FilterOptionType.cs | 12 +-
.../MediaEncoding/IMediaEncoder.cs | 18 +
.../Encoder/EncoderValidator.cs | 82 +-
MediaBrowser.MediaEncoding/Encoder/MediaEncoder.cs | 51 +-
.../Probing/ProbeResultNormalizer.cs | 27 +-
.../Configuration/EncodingOptions.cs | 14 +-
9 files changed, 2841 insertions(+), 1604 deletions(-)
(limited to 'MediaBrowser.Controller/MediaEncoding/IMediaEncoder.cs')
diff --git a/Jellyfin.Api/Controllers/DynamicHlsController.cs b/Jellyfin.Api/Controllers/DynamicHlsController.cs
index caa3d2368..769f3372d 100644
--- a/Jellyfin.Api/Controllers/DynamicHlsController.cs
+++ b/Jellyfin.Api/Controllers/DynamicHlsController.cs
@@ -1567,24 +1567,18 @@ namespace Jellyfin.Api.Controllers
// args += " -mixed-refs 0 -refs 3 -x264opts b_pyramid=0:weightb=0:weightp=0";
- var hasGraphicalSubs = state.SubtitleStream != null && !state.SubtitleStream.IsTextSubtitleStream && state.SubtitleDeliveryMethod == SubtitleDeliveryMethod.Encode;
-
- if (hasGraphicalSubs)
- {
- // Graphical subs overlay and resolution params.
- args += _encodingHelper.GetGraphicalSubtitleParam(state, _encodingOptions, codec);
- }
- else
- {
- // Resolution params.
- args += _encodingHelper.GetOutputSizeParam(state, _encodingOptions, codec);
- }
+ // video processing filters.
+ args += _encodingHelper.GetVideoProcessingFilterParam(state, _encodingOptions, codec);
// -start_at_zero is necessary to use with -ss when seeking,
// otherwise the target position cannot be determined.
- if (!(state.SubtitleStream != null && state.SubtitleStream.IsExternal && !state.SubtitleStream.IsTextSubtitleStream))
+ if (state.SubtitleStream != null)
{
- args += " -start_at_zero";
+ // Disable start_at_zero for external graphical subs
+ if (!(state.SubtitleStream.IsExternal && !state.SubtitleStream.IsTextSubtitleStream))
+ {
+ args += " -start_at_zero";
+ }
}
// args += " -flags -global_header";
diff --git a/Jellyfin.Api/Controllers/VideoHlsController.cs b/Jellyfin.Api/Controllers/VideoHlsController.cs
index ef25db8c9..2f9565497 100644
--- a/Jellyfin.Api/Controllers/VideoHlsController.cs
+++ b/Jellyfin.Api/Controllers/VideoHlsController.cs
@@ -552,22 +552,18 @@ namespace Jellyfin.Api.Controllers
args += " -bf 0";
}
- var hasGraphicalSubs = state.SubtitleStream != null && !state.SubtitleStream.IsTextSubtitleStream && state.SubtitleDeliveryMethod == SubtitleDeliveryMethod.Encode;
+ // video processing filters.
+ args += _encodingHelper.GetVideoProcessingFilterParam(state, _encodingOptions, codec);
- if (hasGraphicalSubs)
+ // -start_at_zero is necessary to use with -ss when seeking,
+ // otherwise the target position cannot be determined.
+ if (state.SubtitleStream != null)
{
- // Graphical subs overlay and resolution params.
- args += _encodingHelper.GetGraphicalSubtitleParam(state, _encodingOptions, codec);
- }
- else
- {
- // Resolution params.
- args += _encodingHelper.GetOutputSizeParam(state, _encodingOptions, codec);
- }
-
- if (state.SubtitleStream == null || !state.SubtitleStream.IsExternal || state.SubtitleStream.IsTextSubtitleStream)
- {
- args += " -start_at_zero";
+ // Disable start_at_zero for external graphical subs
+ if (!(state.SubtitleStream.IsExternal && !state.SubtitleStream.IsTextSubtitleStream))
+ {
+ args += " -start_at_zero";
+ }
}
}
diff --git a/MediaBrowser.Controller/MediaEncoding/EncodingHelper.cs b/MediaBrowser.Controller/MediaEncoding/EncodingHelper.cs
index 5715194b8..75a36d815 100644
--- a/MediaBrowser.Controller/MediaEncoding/EncodingHelper.cs
+++ b/MediaBrowser.Controller/MediaEncoding/EncodingHelper.cs
@@ -24,7 +24,7 @@ namespace MediaBrowser.Controller.MediaEncoding
private readonly IMediaEncoder _mediaEncoder;
private readonly ISubtitleEncoder _subtitleEncoder;
- private static readonly string[] _videoProfiles = new[]
+ private static readonly string[] _videoProfilesH264 = new[]
{
"ConstrainedBaseline",
"Baseline",
@@ -32,10 +32,22 @@ namespace MediaBrowser.Controller.MediaEncoding
"Main",
"High",
"ProgressiveHigh",
- "ConstrainedHigh"
+ "ConstrainedHigh",
+ "High10"
};
- private static readonly Version _minVersionForCudaOverlay = new Version(4, 4);
+ private static readonly string[] _videoProfilesH265 = new[]
+ {
+ "Main",
+ "Main10"
+ };
+
+ private static readonly string _qsvAlias = "qs";
+ private static readonly string _vaapiAlias = "va";
+ private static readonly string _d3d11vaAlias = "dx11";
+ private static readonly string _videotoolboxAlias = "vt";
+ private static readonly string _openclAlias = "ocl";
+ private static readonly string _cudaAlias = "cu";
public EncodingHelper(
IMediaEncoder mediaEncoder,
@@ -62,15 +74,13 @@ namespace MediaBrowser.Controller.MediaEncoding
var codecMap = new Dictionary(StringComparer.OrdinalIgnoreCase)
{
- { "qsv", hwEncoder + "_qsv" },
- { hwEncoder + "_qsv", hwEncoder + "_qsv" },
- { "nvenc", hwEncoder + "_nvenc" },
{ "amf", hwEncoder + "_amf" },
- { "omx", hwEncoder + "_omx" },
- { hwEncoder + "_v4l2m2m", hwEncoder + "_v4l2m2m" },
- { "mediacodec", hwEncoder + "_mediacodec" },
+ { "nvenc", hwEncoder + "_nvenc" },
+ { "qsv", hwEncoder + "_qsv" },
{ "vaapi", hwEncoder + "_vaapi" },
- { "videotoolbox", hwEncoder + "_videotoolbox" }
+ { "videotoolbox", hwEncoder + "_videotoolbox" },
+ { "v4l2m2m", hwEncoder + "_v4l2m2m" },
+ { "omx", hwEncoder + "_omx" },
};
if (!string.IsNullOrEmpty(hwType)
@@ -91,11 +101,9 @@ namespace MediaBrowser.Controller.MediaEncoding
private bool IsVaapiSupported(EncodingJobInfo state)
{
- var videoStream = state.VideoStream;
-
// vaapi will throw an error with this input
// [vaapi @ 0x7faed8000960] No VAAPI support for codec mpeg4 profile -99.
- if (string.Equals(videoStream?.Codec, "mpeg4", StringComparison.OrdinalIgnoreCase))
+ if (string.Equals(state.VideoStream?.Codec, "mpeg4", StringComparison.OrdinalIgnoreCase))
{
return false;
}
@@ -103,82 +111,59 @@ namespace MediaBrowser.Controller.MediaEncoding
return _mediaEncoder.SupportsHwaccel("vaapi");
}
- private bool IsCudaSupported()
+ private bool IsVaapiFullSupported()
{
- return _mediaEncoder.SupportsHwaccel("cuda")
- && _mediaEncoder.SupportsFilter("scale_cuda")
- && _mediaEncoder.SupportsFilter("yadif_cuda")
- && _mediaEncoder.SupportsFilter("hwupload_cuda");
+ return _mediaEncoder.SupportsHwaccel("vaapi")
+ && _mediaEncoder.SupportsFilter("scale_vaapi")
+ && _mediaEncoder.SupportsFilter("deinterlace_vaapi")
+ && _mediaEncoder.SupportsFilter("tonemap_vaapi")
+ && _mediaEncoder.SupportsFilterWithOption(FilterOptionType.OverlayVaapiFrameSync)
+ && _mediaEncoder.SupportsFilter("hwupload_vaapi");
}
- private bool IsOpenclTonemappingSupported(EncodingJobInfo state, EncodingOptions options)
+ private bool IsOpenclFullSupported()
{
- var videoStream = state.VideoStream;
- if (videoStream == null)
- {
- return false;
- }
+ return _mediaEncoder.SupportsHwaccel("opencl")
+ && _mediaEncoder.SupportsFilter("scale_opencl")
+ && _mediaEncoder.SupportsFilterWithOption(FilterOptionType.TonemapOpenclBt2390)
+ && _mediaEncoder.SupportsFilterWithOption(FilterOptionType.OverlayOpenclFrameSync);
+ }
- return options.EnableTonemapping
- && (string.Equals(videoStream.ColorTransfer, "smpte2084", StringComparison.OrdinalIgnoreCase)
- || string.Equals(videoStream.ColorTransfer, "arib-std-b67", StringComparison.OrdinalIgnoreCase))
- && IsColorDepth10(state)
- && _mediaEncoder.SupportsHwaccel("opencl")
- && _mediaEncoder.SupportsFilter("tonemap_opencl");
+ private bool IsCudaFullSupported()
+ {
+ return _mediaEncoder.SupportsHwaccel("cuda")
+ && _mediaEncoder.SupportsFilterWithOption(FilterOptionType.ScaleCudaFormat)
+ && _mediaEncoder.SupportsFilter("yadif_cuda")
+ && _mediaEncoder.SupportsFilterWithOption(FilterOptionType.TonemapCudaName)
+ && _mediaEncoder.SupportsFilter("overlay_cuda")
+ && _mediaEncoder.SupportsFilter("hwupload_cuda");
}
- private bool IsCudaTonemappingSupported(EncodingJobInfo state, EncodingOptions options)
+ private bool IsHwTonemapAvailable(EncodingJobInfo state, EncodingOptions options)
{
- var videoStream = state.VideoStream;
- if (videoStream == null)
+ if (state.VideoStream == null)
{
return false;
}
return options.EnableTonemapping
- && (string.Equals(videoStream.ColorTransfer, "smpte2084", StringComparison.OrdinalIgnoreCase)
- || string.Equals(videoStream.ColorTransfer, "arib-std-b67", StringComparison.OrdinalIgnoreCase))
- && IsColorDepth10(state)
- && _mediaEncoder.SupportsHwaccel("cuda")
- && _mediaEncoder.SupportsFilterWithOption(FilterOptionType.TonemapCudaName);
+ && (string.Equals(state.VideoStream.ColorTransfer, "smpte2084", StringComparison.OrdinalIgnoreCase)
+ || string.Equals(state.VideoStream.ColorTransfer, "arib-std-b67", StringComparison.OrdinalIgnoreCase))
+ && GetVideoColorBitDepth(state) == 10;
}
- private bool IsVppTonemappingSupported(EncodingJobInfo state, EncodingOptions options)
+ private bool IsVaapiVppTonemapAvailable(EncodingJobInfo state, EncodingOptions options)
{
- var videoStream = state.VideoStream;
- if (videoStream == null)
+ if (state.VideoStream == null)
{
- // Remote stream doesn't have media info, disable vpp tonemapping.
return false;
}
- var codec = videoStream.Codec;
- if (string.Equals(options.HardwareAccelerationType, "vaapi", StringComparison.OrdinalIgnoreCase))
- {
- // Limited to HEVC for now since the filter doesn't accept master data from VP9.
- return options.EnableVppTonemapping
- && string.Equals(videoStream.ColorTransfer, "smpte2084", StringComparison.OrdinalIgnoreCase)
- && IsColorDepth10(state)
- && string.Equals(codec, "hevc", StringComparison.OrdinalIgnoreCase)
- && _mediaEncoder.SupportsHwaccel("vaapi")
- && _mediaEncoder.SupportsFilter("tonemap_vaapi");
- }
-
- // Hybrid VPP tonemapping for QSV with VAAPI
- if (OperatingSystem.IsLinux() && string.Equals(options.HardwareAccelerationType, "qsv", StringComparison.OrdinalIgnoreCase))
- {
- // Limited to HEVC for now since the filter doesn't accept master data from VP9.
- return options.EnableVppTonemapping
- && string.Equals(videoStream.ColorTransfer, "smpte2084", StringComparison.OrdinalIgnoreCase)
- && IsColorDepth10(state)
- && string.Equals(codec, "hevc", StringComparison.OrdinalIgnoreCase)
- && _mediaEncoder.SupportsHwaccel("vaapi")
- && _mediaEncoder.SupportsFilter("tonemap_vaapi")
- && _mediaEncoder.SupportsHwaccel("qsv");
- }
-
// Native VPP tonemapping may come to QSV in the future.
- return false;
+
+ return options.EnableVppTonemapping
+ && string.Equals(state.VideoStream.ColorTransfer, "smpte2084", StringComparison.OrdinalIgnoreCase)
+ && GetVideoColorBitDepth(state) == 10;
}
///
@@ -463,11 +448,20 @@ namespace MediaBrowser.Controller.MediaEncoding
return "copy";
}
- public int GetVideoProfileScore(string profile)
+ public int GetVideoProfileScore(string videoCodec, string videoProfile)
{
// strip spaces because they may be stripped out on the query string
- profile = profile.Replace(" ", string.Empty, StringComparison.Ordinal);
- return Array.FindIndex(_videoProfiles, x => string.Equals(x, profile, StringComparison.OrdinalIgnoreCase));
+ string profile = videoProfile.Replace(" ", string.Empty, StringComparison.Ordinal);
+ if (string.Equals("h264", videoCodec, StringComparison.OrdinalIgnoreCase))
+ {
+ return Array.FindIndex(_videoProfilesH264, x => string.Equals(x, profile, StringComparison.OrdinalIgnoreCase));
+ }
+ else if (string.Equals("hevc", videoCodec, StringComparison.OrdinalIgnoreCase))
+ {
+ return Array.FindIndex(_videoProfilesH265, x => string.Equals(x, profile, StringComparison.OrdinalIgnoreCase));
+ }
+
+ return -1;
}
public string GetInputPathArgument(EncodingJobInfo state)
@@ -526,161 +520,359 @@ namespace MediaBrowser.Controller.MediaEncoding
return codec.ToLowerInvariant();
}
+ public string GetVideoToolboxDeviceArgs(string alias)
+ {
+ alias ??= _videotoolboxAlias;
+
+ // device selection in vt is not supported.
+ return " -init_hw_device videotoolbox=" + alias;
+ }
+
+ public string GetCudaDeviceArgs(int deviceIndex, string alias)
+ {
+ alias ??= _cudaAlias;
+ deviceIndex = deviceIndex >= 0
+ ? deviceIndex
+ : 0;
+
+ return string.Format(
+ CultureInfo.InvariantCulture,
+ " -init_hw_device cuda={0}:{1}",
+ alias, deviceIndex);
+ }
+
+ public string GetOpenclDeviceArgs(int deviceIndex, string deviceVendorName, string srcDeviceAlias, string alias)
+ {
+ alias ??= _openclAlias;
+ deviceIndex = deviceIndex >= 0
+ ? deviceIndex
+ : 0;
+ var vendorOpts = !string.IsNullOrEmpty(deviceVendorName)
+ ? (":." + deviceIndex + ",device_vendor=\"" + deviceVendorName + "\"")
+ : ":0.0";
+ var options = !string.IsNullOrEmpty(srcDeviceAlias)
+ ? ("@" + srcDeviceAlias)
+ : vendorOpts;
+
+ return string.Format(
+ CultureInfo.InvariantCulture,
+ " -init_hw_device opencl={0}{1}",
+ alias, options);
+ }
+
+ public string GetD3d11vaDeviceArgs(int deviceIndex, string deviceVendorId, string alias)
+ {
+ alias ??= _d3d11vaAlias;
+ deviceIndex = deviceIndex >= 0 ? deviceIndex : 0;
+ var options = !string.IsNullOrEmpty(deviceVendorId)
+ ? (",vendor=" + deviceVendorId)
+ : Convert.ToString(deviceIndex, CultureInfo.InvariantCulture);
+
+ return string.Format(
+ CultureInfo.InvariantCulture,
+ " -init_hw_device d3d11va={0}:{1}",
+ alias, options);
+ }
+
+ public string GetVaapiDeviceArgs(string renderNodePath, string kernelDriver, string driver, string alias)
+ {
+ alias ??= _vaapiAlias;
+ renderNodePath = renderNodePath ?? "/dev/dri/renderD128";
+ var options = (!string.IsNullOrEmpty(kernelDriver) && !string.IsNullOrEmpty(driver))
+ ? (",kernel_driver=" + kernelDriver + ",driver=" + driver)
+ : renderNodePath;
+
+ return string.Format(
+ CultureInfo.InvariantCulture,
+ " -init_hw_device vaapi={0}:{1}",
+ alias, options);
+ }
+
+ public string GetQsvDeviceArgs(string alias)
+ {
+ var arg = " -init_hw_device qsv=" + (alias ?? _qsvAlias);
+ var args = new StringBuilder();
+ var isWindows = OperatingSystem.IsWindows();
+ var isLinux = OperatingSystem.IsLinux();
+ if (isLinux)
+ {
+ // derive qsv from vaapi device
+ string srcAlias = _vaapiAlias;
+ args.Append(GetVaapiDeviceArgs(null, "i915", "iHD", srcAlias))
+ .Append(arg + "@" + srcAlias);
+ }
+ else if (isWindows)
+ {
+ // derive qsv from d3d11va device
+ string srcAlias = _d3d11vaAlias;
+ args.Append(GetD3d11vaDeviceArgs(0, "0x8086", srcAlias))
+ .Append(arg + "@" + srcAlias);
+ }
+ else
+ {
+ return null;
+ }
+
+ return args.ToString();
+ }
+
+ public string GetFilterHwDeviceArgs(string alias)
+ {
+ return !string.IsNullOrEmpty(alias)
+ ? (" -filter_hw_device " + alias)
+ : string.Empty;
+ }
+
+ public string GetGraphicalSubCanvasSize(EncodingJobInfo state)
+ {
+ if (state.SubtitleStream != null
+ && state.SubtitleDeliveryMethod == SubtitleDeliveryMethod.Encode
+ && !state.SubtitleStream.IsTextSubtitleStream)
+ {
+ var inW = state.VideoStream?.Width;
+ var inH = state.VideoStream?.Height;
+ var reqW = state.BaseRequest.Width;
+ var reqH = state.BaseRequest.Height;
+ var reqMaxW = state.BaseRequest.MaxWidth;
+ var reqMaxH = state.BaseRequest.MaxHeight;
+
+ // setup a relative small canvas_size for overlay_qsv to reduce transfer overhead
+ var (overlayW, overlayH) = GetFixedOutputSize(inW, inH, reqW, reqH, reqMaxW, 1080);
+
+ if (overlayW.HasValue && overlayH.HasValue)
+ {
+ return string.Format(
+ CultureInfo.InvariantCulture,
+ " -canvas_size {0}x{1}",
+ overlayW.Value,
+ overlayH.Value);
+ }
+ }
+
+ return string.Empty;
+ }
+
///
- /// Gets the input argument.
+ /// Gets the input video hwaccel argument.
///
/// Encoding state.
/// Encoding options.
- /// Input arguments.
- public string GetInputArgument(EncodingJobInfo state, EncodingOptions options)
+ /// Input video hwaccel arguments.
+ public string GetInputVideoHwaccelArgs(EncodingJobInfo state, EncodingOptions options)
{
- var arg = new StringBuilder();
- var videoDecoder = GetHardwareAcceleratedVideoDecoder(state, options) ?? string.Empty;
- var outputVideoCodec = GetVideoEncoder(state, options) ?? string.Empty;
+ if (!state.IsVideoRequest)
+ {
+ return null;
+ }
+
+ var vidEncoder = GetVideoEncoder(state, options) ?? string.Empty;
+ if (IsCopyCodec(vidEncoder))
+ {
+ return null;
+ }
+
+ var args = new StringBuilder();
var isWindows = OperatingSystem.IsWindows();
var isLinux = OperatingSystem.IsLinux();
var isMacOS = OperatingSystem.IsMacOS();
+ var optHwaccelType = options.HardwareAccelerationType;
+ var vidDecoder = GetHardwareVideoDecoder(state, options) ?? string.Empty;
#pragma warning disable CA1508 // Defaults to string.Empty
- var isSwDecoder = string.IsNullOrEmpty(videoDecoder);
+ var isSwVidDecoder = string.IsNullOrEmpty(vidDecoder);
#pragma warning restore CA1508
- var isD3d11vaDecoder = videoDecoder.IndexOf("d3d11va", StringComparison.OrdinalIgnoreCase) != -1;
- var isVaapiDecoder = videoDecoder.IndexOf("vaapi", StringComparison.OrdinalIgnoreCase) != -1;
- var isVaapiEncoder = outputVideoCodec.IndexOf("vaapi", StringComparison.OrdinalIgnoreCase) != -1;
- var isQsvDecoder = videoDecoder.IndexOf("qsv", StringComparison.OrdinalIgnoreCase) != -1;
- var isQsvEncoder = outputVideoCodec.IndexOf("qsv", StringComparison.OrdinalIgnoreCase) != -1;
- var isNvdecDecoder = videoDecoder.Contains("cuda", StringComparison.OrdinalIgnoreCase);
- var isCuvidHevcDecoder = videoDecoder.Contains("hevc_cuvid", StringComparison.OrdinalIgnoreCase);
- var isCuvidVp9Decoder = videoDecoder.Contains("vp9_cuvid", StringComparison.OrdinalIgnoreCase);
- var isOpenclTonemappingSupported = IsOpenclTonemappingSupported(state, options);
- var isVppTonemappingSupported = IsVppTonemappingSupported(state, options);
- var isCudaTonemappingSupported = IsCudaTonemappingSupported(state, options);
-
- if (!IsCopyCodec(outputVideoCodec))
- {
- if (state.IsVideoRequest
- && _mediaEncoder.SupportsHwaccel("vaapi")
- && string.Equals(options.HardwareAccelerationType, "vaapi", StringComparison.OrdinalIgnoreCase))
- {
- if (isVaapiDecoder)
+ var isHwTonemapAvailable = IsHwTonemapAvailable(state, options);
+
+ if (string.Equals(optHwaccelType, "vaapi", StringComparison.OrdinalIgnoreCase))
+ {
+ if (!isLinux || !_mediaEncoder.SupportsHwaccel("vaapi"))
+ {
+ return string.Empty;
+ }
+
+ var isVaapiDecoder = vidDecoder.Contains("vaapi", StringComparison.OrdinalIgnoreCase);
+ var isVaapiEncoder = vidEncoder.Contains("vaapi", StringComparison.OrdinalIgnoreCase);
+ if (!isVaapiDecoder && !isVaapiEncoder)
+ {
+ return string.Empty;
+ }
+
+ var vaArgs = GetVaapiDeviceArgs(options.VaapiDevice, null, null, _vaapiAlias);
+ var filterDevArgs = GetFilterHwDeviceArgs(_vaapiAlias);
+
+ if (isHwTonemapAvailable && IsOpenclFullSupported())
+ {
+ if (_mediaEncoder.IsVaapiDeviceInteliHD() || _mediaEncoder.IsVaapiDeviceInteli965())
{
- if (isOpenclTonemappingSupported && !isVppTonemappingSupported)
+ if (!isVaapiDecoder)
{
- arg.Append("-init_hw_device vaapi=va:")
- .Append(options.VaapiDevice)
- .Append(" -init_hw_device opencl=ocl@va ")
- .Append("-hwaccel_device va ")
- .Append("-hwaccel_output_format vaapi ")
- .Append("-filter_hw_device ocl ");
- }
- else
- {
- arg.Append("-hwaccel_output_format vaapi ")
- .Append("-vaapi_device ")
- .Append(options.VaapiDevice)
- .Append(' ');
+ vaArgs += GetOpenclDeviceArgs(0, null, _vaapiAlias, _openclAlias);
+ filterDevArgs = GetFilterHwDeviceArgs(_openclAlias);
}
}
- else if (!isVaapiDecoder && isVaapiEncoder)
+ else if (_mediaEncoder.IsVaapiDeviceAmd())
+ {
+ vaArgs += GetOpenclDeviceArgs(0, "Advanced Micro Devices", null, _openclAlias);
+ filterDevArgs = GetFilterHwDeviceArgs(_openclAlias);
+ }
+ else
{
- arg.Append("-vaapi_device ")
- .Append(options.VaapiDevice)
- .Append(' ');
+ vaArgs += GetOpenclDeviceArgs(0, null, null, _openclAlias);
+ filterDevArgs = GetFilterHwDeviceArgs(_openclAlias);
}
+ }
- arg.Append("-autorotate 0 ");
+ args.Append(vaArgs)
+ .Append(filterDevArgs);
+ }
+ else if (string.Equals(optHwaccelType, "qsv", StringComparison.OrdinalIgnoreCase))
+ {
+ if ((!isLinux && !isWindows) || !_mediaEncoder.SupportsHwaccel("qsv"))
+ {
+ return string.Empty;
}
- if (state.IsVideoRequest
- && string.Equals(options.HardwareAccelerationType, "qsv", StringComparison.OrdinalIgnoreCase))
+ var isD3d11vaDecoder = vidDecoder.Contains("d3d11va", StringComparison.OrdinalIgnoreCase);
+ var isVaapiDecoder = vidDecoder.Contains("vaapi", StringComparison.OrdinalIgnoreCase);
+ var isQsvDecoder = vidDecoder.Contains("qsv", StringComparison.OrdinalIgnoreCase);
+ var isQsvEncoder = vidEncoder.Contains("qsv", StringComparison.OrdinalIgnoreCase);
+ var isHwDecoder = isQsvDecoder || isVaapiDecoder || isD3d11vaDecoder;
+ if (!isHwDecoder && !isQsvEncoder)
{
- var hasGraphicalSubs = state.SubtitleStream != null && !state.SubtitleStream.IsTextSubtitleStream && state.SubtitleDeliveryMethod == SubtitleDeliveryMethod.Encode;
+ return string.Empty;
+ }
- if (isQsvEncoder)
+ var qsvArgs = GetQsvDeviceArgs(_qsvAlias);
+ var filterDevArgs = GetFilterHwDeviceArgs(_qsvAlias);
+ // child device used by qsv.
+ if (_mediaEncoder.SupportsHwaccel("vaapi") || _mediaEncoder.SupportsHwaccel("d3d11va"))
+ {
+ if (isHwTonemapAvailable && IsOpenclFullSupported())
{
- if (isQsvDecoder)
+ var srcAlias = isLinux ? _vaapiAlias : _d3d11vaAlias;
+ qsvArgs += GetOpenclDeviceArgs(0, null, srcAlias, _openclAlias);
+ if (!isHwDecoder)
{
- if (isLinux)
- {
- if (hasGraphicalSubs)
- {
- arg.Append("-init_hw_device qsv=hw -filter_hw_device hw ");
- }
- else
- {
- arg.Append("-hwaccel qsv ");
- }
- }
-
- if (isWindows)
- {
- arg.Append("-hwaccel qsv ");
- }
+ filterDevArgs = GetFilterHwDeviceArgs(_openclAlias);
}
+ }
+ }
- // While using SW decoder
- else if (isSwDecoder)
- {
- arg.Append("-init_hw_device qsv=hw -filter_hw_device hw ");
- }
+ args.Append(qsvArgs)
+ .Append(filterDevArgs);
+ }
+ else if (string.Equals(optHwaccelType, "nvenc", StringComparison.OrdinalIgnoreCase))
+ {
+ if ((!isLinux && !isWindows) || !IsCudaFullSupported())
+ {
+ return string.Empty;
+ }
- // Hybrid VPP tonemapping with VAAPI
- else if (isVaapiDecoder && isVppTonemappingSupported)
- {
- arg.Append("-init_hw_device vaapi=va:")
- .Append(options.VaapiDevice)
- .Append(" -init_hw_device qsv@va ")
- .Append("-hwaccel_output_format vaapi ");
- }
+ var isCuvidDecoder = vidDecoder.Contains("cuvid", StringComparison.OrdinalIgnoreCase);
+ var isNvdecDecoder = vidDecoder.Contains("cuda", StringComparison.OrdinalIgnoreCase);
+ var isNvencEncoder = vidEncoder.Contains("nvenc", StringComparison.OrdinalIgnoreCase);
+ var isHwDecoder = isNvdecDecoder || isCuvidDecoder;
+ if (!isHwDecoder && !isNvencEncoder)
+ {
+ return string.Empty;
+ }
- arg.Append("-autorotate 0 ");
- }
+ var cuArgs = GetCudaDeviceArgs(0, _cudaAlias);
+ var filterDevArgs = GetFilterHwDeviceArgs(_cudaAlias);
+
+ // workaround for "No decoder surfaces left" error,
+ // but will increase vram usage. https://trac.ffmpeg.org/ticket/7562
+ var extraHwFramesArgs = " -extra_hw_frames 3";
+
+ args.Append(cuArgs)
+ .Append(filterDevArgs)
+ .Append(extraHwFramesArgs);
+ }
+ else if (string.Equals(optHwaccelType, "amf", StringComparison.OrdinalIgnoreCase))
+ {
+ if (!isWindows || !_mediaEncoder.SupportsHwaccel("d3d11va"))
+ {
+ return string.Empty;
}
- if (state.IsVideoRequest
- && string.Equals(options.HardwareAccelerationType, "nvenc", StringComparison.OrdinalIgnoreCase)
- && isNvdecDecoder)
+ var isD3d11vaDecoder = vidDecoder.Contains("d3d11va", StringComparison.OrdinalIgnoreCase);
+ var isAmfEncoder = vidEncoder.Contains("amf", StringComparison.OrdinalIgnoreCase);
+ if (!isD3d11vaDecoder && !isAmfEncoder)
{
- // Fix for 'No decoder surfaces left' error. https://trac.ffmpeg.org/ticket/7562
- arg.Append("-hwaccel_output_format cuda -extra_hw_frames 3 -autorotate 0 ");
+ return string.Empty;
}
- if (state.IsVideoRequest
- && string.Equals(options.HardwareAccelerationType, "nvenc", StringComparison.OrdinalIgnoreCase)
- && (isNvdecDecoder || isCuvidHevcDecoder || isCuvidVp9Decoder || isSwDecoder))
+ // no dxva video processor hw filter.
+ var dx11Args = GetD3d11vaDeviceArgs(0, "0x1002", _d3d11vaAlias);
+ var filterDevArgs = string.Empty;
+ if (IsOpenclFullSupported())
{
- if (!isCudaTonemappingSupported && isOpenclTonemappingSupported)
- {
- arg.Append("-init_hw_device opencl=ocl:")
- .Append(options.OpenclDevice)
- .Append(" -filter_hw_device ocl ");
- }
+ dx11Args += GetOpenclDeviceArgs(0, null, _d3d11vaAlias, _openclAlias);
+ filterDevArgs = GetFilterHwDeviceArgs(_openclAlias);
}
- if (state.IsVideoRequest
- && string.Equals(options.HardwareAccelerationType, "amf", StringComparison.OrdinalIgnoreCase)
- && (isD3d11vaDecoder || isSwDecoder))
+ args.Append(dx11Args)
+ .Append(filterDevArgs);
+ }
+ else if (string.Equals(optHwaccelType, "videotoolbox", StringComparison.OrdinalIgnoreCase))
+ {
+ if (!isMacOS || !_mediaEncoder.SupportsHwaccel("videotoolbox"))
{
- if (isOpenclTonemappingSupported)
- {
- arg.Append("-init_hw_device opencl=ocl:")
- .Append(options.OpenclDevice)
- .Append(" -filter_hw_device ocl ");
- }
+ return string.Empty;
}
- if (state.IsVideoRequest
- && string.Equals(options.HardwareAccelerationType, "videotoolbox", StringComparison.OrdinalIgnoreCase))
+ var isVideotoolboxDecoder = vidDecoder.Contains("videotoolbox", StringComparison.OrdinalIgnoreCase);
+ var isVideotoolboxEncoder = vidEncoder.Contains("videotoolbox", StringComparison.OrdinalIgnoreCase);
+ if (!isVideotoolboxDecoder && !isVideotoolboxEncoder)
{
- arg.Append("-hwaccel videotoolbox ");
+ return string.Empty;
}
+
+ // no videotoolbox hw filter.
+ var vtArgs = GetVideoToolboxDeviceArgs(_videotoolboxAlias);
+ args.Append(vtArgs);
}
- arg.Append("-i ")
+ if (!string.IsNullOrEmpty(vidDecoder))
+ {
+ args.Append(vidDecoder);
+ }
+
+ // hw transpose filters should be added manually.
+ args.Append(" -autorotate 0");
+
+ return args.ToString().Trim();
+ }
+
+ ///
+ /// Gets the input argument.
+ ///
+ /// Encoding state.
+ /// Encoding options.
+ /// Input arguments.
+ public string GetInputArgument(EncodingJobInfo state, EncodingOptions options)
+ {
+ var arg = new StringBuilder();
+ var inputVidHwaccelArgs = GetInputVideoHwaccelArgs(state, options);
+
+ if (!string.IsNullOrEmpty(inputVidHwaccelArgs))
+ {
+ arg.Append(inputVidHwaccelArgs);
+ }
+
+ var canvasArgs = GetGraphicalSubCanvasSize(state);
+ if (!string.IsNullOrEmpty(canvasArgs))
+ {
+ arg.Append(canvasArgs);
+ }
+
+ arg.Append(" -i ")
.Append(GetInputPathArgument(state));
+ // sub2video for external graphical subtitles
if (state.SubtitleStream != null
&& state.SubtitleDeliveryMethod == SubtitleDeliveryMethod.Encode
- && state.SubtitleStream.IsExternal && !state.SubtitleStream.IsTextSubtitleStream)
+ && !state.SubtitleStream.IsTextSubtitleStream
+ && state.SubtitleStream.IsExternal)
{
var subtitlePath = state.SubtitleStream.Path;
@@ -693,7 +885,19 @@ namespace MediaBrowser.Controller.MediaEncoding
}
}
- arg.Append(" -i \"").Append(subtitlePath).Append('\"');
+ // Also seek the external subtitles stream.
+ var seekSubParam = GetFastSeekCommandLineParameter(state, options);
+ if (!string.IsNullOrEmpty(seekSubParam))
+ {
+ arg.Append(' ').Append(seekSubParam);
+ }
+
+ if (!string.IsNullOrEmpty(canvasArgs))
+ {
+ arg.Append(canvasArgs);
+ }
+
+ arg.Append(" -i file:\"").Append(subtitlePath).Append('\"');
}
return arg.ToString();
@@ -809,6 +1013,26 @@ namespace MediaBrowser.Controller.MediaEncoding
return FormattableString.Invariant($" -maxrate {bitrate} -bufsize {bufsize}");
}
+ if (string.Equals(videoCodec, "h264_amf", StringComparison.OrdinalIgnoreCase)
+ || string.Equals(videoCodec, "hevc_amf", StringComparison.OrdinalIgnoreCase))
+ {
+ return FormattableString.Invariant($" -qmin 18 -qmax 32 -b:v {bitrate} -maxrate {bitrate} -bufsize {bufsize}");
+ }
+
+ if (string.Equals(videoCodec, "h264_vaapi", StringComparison.OrdinalIgnoreCase)
+ || string.Equals(videoCodec, "hevc_vaapi", StringComparison.OrdinalIgnoreCase))
+ {
+ // VBR in i965 driver may result in pixelated output.
+ if (_mediaEncoder.IsVaapiDeviceInteli965())
+ {
+ return FormattableString.Invariant($" -rc_mode CBR -b:v {bitrate} -maxrate {bitrate} -bufsize {bufsize}");
+ }
+ else
+ {
+ return FormattableString.Invariant($" -rc_mode VBR -b:v {bitrate} -maxrate {bitrate} -bufsize {bufsize}");
+ }
+ }
+
return FormattableString.Invariant($" -b:v {bitrate} -maxrate {bitrate} -bufsize {bufsize}");
}
@@ -845,8 +1069,10 @@ namespace MediaBrowser.Controller.MediaEncoding
/// Gets the text subtitle param.
///
/// The state.
+ /// Enable alpha processing.
+ /// Enable sub2video mode.
/// System.String.
- public string GetTextSubtitleParam(EncodingJobInfo state)
+ public string GetTextSubtitlesFilter(EncodingJobInfo state, bool enableAlpha, bool enableSub2video)
{
var seconds = Math.Round(TimeSpan.FromTicks(state.StartTimeTicks ?? 0).TotalSeconds);
@@ -855,6 +1081,9 @@ namespace MediaBrowser.Controller.MediaEncoding
? string.Empty
: string.Format(CultureInfo.InvariantCulture, ",setpts=PTS -{0}/TB", seconds);
+ var alphaParam = enableAlpha ? (":alpha=" + Convert.ToInt32(enableAlpha)) : string.Empty;
+ var sub2videoParam = enableSub2video ? (":sub2video=" + Convert.ToInt32(enableSub2video)) : string.Empty;
+
// TODO
// var fallbackFontPath = Path.Combine(_appPaths.ProgramDataPath, "fonts", "DroidSansFallback.ttf");
// string fallbackFontParam = string.Empty;
@@ -876,7 +1105,6 @@ namespace MediaBrowser.Controller.MediaEncoding
if (state.SubtitleStream.IsExternal)
{
var subtitlePath = state.SubtitleStream.Path;
-
var charsetParam = string.Empty;
if (!string.IsNullOrEmpty(state.SubtitleStream.Language))
@@ -896,9 +1124,11 @@ namespace MediaBrowser.Controller.MediaEncoding
// TODO: Perhaps also use original_size=1920x800 ??
return string.Format(
CultureInfo.InvariantCulture,
- "subtitles=filename='{0}'{1}{2}",
+ "subtitles=f='{0}'{1}{2}{3}{4}",
_mediaEncoder.EscapeSubtitleFilterPath(subtitlePath),
charsetParam,
+ alphaParam,
+ sub2videoParam,
// fallbackFontParam,
setPtsParam);
}
@@ -907,9 +1137,11 @@ namespace MediaBrowser.Controller.MediaEncoding
return string.Format(
CultureInfo.InvariantCulture,
- "subtitles='{0}:si={1}'{2}",
+ "subtitles='{0}:si={1}{2}{3}'{4}",
_mediaEncoder.EscapeSubtitleFilterPath(mediaPath),
state.InternalSubtitleStreamOffset.ToString(CultureInfo.InvariantCulture),
+ alphaParam,
+ sub2videoParam,
// fallbackFontParam,
setPtsParam);
}
@@ -994,11 +1226,11 @@ namespace MediaBrowser.Controller.MediaEncoding
|| string.Equals(codec, "h264_vaapi", StringComparison.OrdinalIgnoreCase)
|| string.Equals(codec, "hevc_vaapi", StringComparison.OrdinalIgnoreCase))
{
- args += " " + keyFrameArg;
+ args += keyFrameArg;
}
else
{
- args += " " + keyFrameArg + gopArg;
+ args += keyFrameArg + gopArg;
}
return args;
@@ -1016,54 +1248,49 @@ namespace MediaBrowser.Controller.MediaEncoding
{
var param = string.Empty;
- if (!string.Equals(videoEncoder, "h264_omx", StringComparison.OrdinalIgnoreCase)
- && !string.Equals(videoEncoder, "h264_qsv", StringComparison.OrdinalIgnoreCase)
- && !string.Equals(videoEncoder, "h264_vaapi", StringComparison.OrdinalIgnoreCase)
- && !string.Equals(videoEncoder, "h264_nvenc", StringComparison.OrdinalIgnoreCase)
- && !string.Equals(videoEncoder, "h264_amf", StringComparison.OrdinalIgnoreCase)
- && !string.Equals(videoEncoder, "h264_v4l2m2m", StringComparison.OrdinalIgnoreCase)
- && !string.Equals(videoEncoder, "hevc_qsv", StringComparison.OrdinalIgnoreCase)
- && !string.Equals(videoEncoder, "hevc_vaapi", StringComparison.OrdinalIgnoreCase)
- && !string.Equals(videoEncoder, "hevc_nvenc", StringComparison.OrdinalIgnoreCase)
- && !string.Equals(videoEncoder, "hevc_amf", StringComparison.OrdinalIgnoreCase))
- {
- param += " -pix_fmt yuv420p";
- }
-
- if (string.Equals(videoEncoder, "h264_nvenc", StringComparison.OrdinalIgnoreCase)
- || string.Equals(videoEncoder, "h264_amf", StringComparison.OrdinalIgnoreCase)
- || string.Equals(videoEncoder, "hevc_nvenc", StringComparison.OrdinalIgnoreCase)
- || string.Equals(videoEncoder, "hevc_amf", StringComparison.OrdinalIgnoreCase))
- {
- var videoStream = state.VideoStream;
- var isColorDepth10 = IsColorDepth10(state);
- var videoDecoder = GetHardwareAcceleratedVideoDecoder(state, encodingOptions) ?? string.Empty;
- var isNvdecDecoder = videoDecoder.Contains("cuda", StringComparison.OrdinalIgnoreCase);
-
- if (!isNvdecDecoder)
- {
- if (isColorDepth10
- && _mediaEncoder.SupportsHwaccel("opencl")
- && encodingOptions.EnableTonemapping
- && !string.IsNullOrEmpty(videoStream.VideoRange)
- && videoStream.VideoRange.Contains("HDR", StringComparison.OrdinalIgnoreCase))
- {
- param += " -pix_fmt nv12";
- }
- else
- {
- param += " -pix_fmt yuv420p";
- }
+ // Tutorials: Enable Intel GuC / HuC firmware loading for Low Power Encoding.
+ // https://01.org/linuxgraphics/downloads/firmware
+ // https://wiki.archlinux.org/title/intel_graphics#Enable_GuC_/_HuC_firmware_loading
+ // Intel Low Power Encoding can save unnecessary CPU-GPU synchronization,
+ // which will reduce overhead in performance intensive tasks such as 4k transcoding and tonemapping.
+ var intelLowPowerHwEncoding = false;
+
+ if (string.Equals(encodingOptions.HardwareAccelerationType, "vaapi", StringComparison.OrdinalIgnoreCase))
+ {
+ var isIntelVaapiDriver = _mediaEncoder.IsVaapiDeviceInteliHD() || _mediaEncoder.IsVaapiDeviceInteli965();
+
+ if (string.Equals(videoEncoder, "h264_vaapi", StringComparison.OrdinalIgnoreCase))
+ {
+ intelLowPowerHwEncoding = encodingOptions.EnableIntelLowPowerH264HwEncoder && isIntelVaapiDriver;
+ }
+ else if (string.Equals(videoEncoder, "hevc_vaapi", StringComparison.OrdinalIgnoreCase))
+ {
+ intelLowPowerHwEncoding = encodingOptions.EnableIntelLowPowerHevcHwEncoder && isIntelVaapiDriver;
+ }
+ }
+ else if (string.Equals(encodingOptions.HardwareAccelerationType, "qsv", StringComparison.OrdinalIgnoreCase))
+ {
+ if (string.Equals(videoEncoder, "h264_qsv", StringComparison.OrdinalIgnoreCase))
+ {
+ intelLowPowerHwEncoding = encodingOptions.EnableIntelLowPowerH264HwEncoder;
+ }
+ else if (string.Equals(videoEncoder, "hevc_qsv", StringComparison.OrdinalIgnoreCase))
+ {
+ intelLowPowerHwEncoding = encodingOptions.EnableIntelLowPowerHevcHwEncoder;
}
}
+ if (intelLowPowerHwEncoding)
+ {
+ param += " -low_power 1";
+ }
+
if (string.Equals(videoEncoder, "h264_v4l2m2m", StringComparison.OrdinalIgnoreCase))
{
param += " -pix_fmt nv21";
}
- var isVc1 = state.VideoStream != null &&
- string.Equals(state.VideoStream.Codec, "vc1", StringComparison.OrdinalIgnoreCase);
+ var isVc1 = string.Equals(state.VideoStream.Codec ?? string.Empty, "vc1", StringComparison.OrdinalIgnoreCase);
var isLibX265 = string.Equals(videoEncoder, "libx265", StringComparison.OrdinalIgnoreCase);
if (string.Equals(videoEncoder, "libx264", StringComparison.OrdinalIgnoreCase) || isLibX265)
@@ -1174,19 +1401,6 @@ namespace MediaBrowser.Controller.MediaEncoding
break;
}
- var videoStream = state.VideoStream;
- var isColorDepth10 = IsColorDepth10(state);
-
- if (isColorDepth10
- && _mediaEncoder.SupportsHwaccel("opencl")
- && encodingOptions.EnableTonemapping
- && !string.IsNullOrEmpty(videoStream.VideoRange)
- && videoStream.VideoRange.Contains("HDR", StringComparison.OrdinalIgnoreCase))
- {
- // Enhance workload when tone mapping with AMF on some APUs
- param += " -preanalysis true";
- }
-
if (string.Equals(videoEncoder, "hevc_amf", StringComparison.OrdinalIgnoreCase))
{
param += " -header_insertion_mode gop -gops_per_idr 1";
@@ -1359,13 +1573,6 @@ namespace MediaBrowser.Controller.MediaEncoding
profile = "constrained_high";
}
- // Currently hevc_amf only support encoding HEVC Main Profile, otherwise force Main Profile.
- if (string.Equals(videoEncoder, "hevc_amf", StringComparison.OrdinalIgnoreCase)
- && profile.Contains("main10", StringComparison.OrdinalIgnoreCase))
- {
- profile = "main";
- }
-
if (!string.IsNullOrEmpty(profile))
{
if (!string.Equals(videoEncoder, "h264_omx", StringComparison.OrdinalIgnoreCase)
@@ -1382,7 +1589,7 @@ namespace MediaBrowser.Controller.MediaEncoding
{
level = NormalizeTranscodingLevel(state, level);
- // libx264, QSV, AMF, VAAPI can adjust the given level to match the output.
+ // libx264, QSV, AMF can adjust the given level to match the output.
if (string.Equals(videoEncoder, "h264_qsv", StringComparison.OrdinalIgnoreCase)
|| string.Equals(videoEncoder, "libx264", StringComparison.OrdinalIgnoreCase))
{
@@ -1407,6 +1614,11 @@ namespace MediaBrowser.Controller.MediaEncoding
// level option may cause NVENC to fail.
// NVENC cannot adjust the given level, just throw an error.
}
+ else if (string.Equals(videoEncoder, "h264_vaapi", StringComparison.OrdinalIgnoreCase)
+ || string.Equals(videoEncoder, "hevc_vaapi", StringComparison.OrdinalIgnoreCase))
+ {
+ // level option may cause corrupted frames on AMD VAAPI.
+ }
else if (!string.Equals(videoEncoder, "h264_omx", StringComparison.OrdinalIgnoreCase)
|| !string.Equals(videoEncoder, "libx265", StringComparison.OrdinalIgnoreCase))
{
@@ -1491,8 +1703,8 @@ namespace MediaBrowser.Controller.MediaEncoding
if (!string.IsNullOrEmpty(videoStream.Profile)
&& !requestedProfiles.Contains(videoStream.Profile.Replace(" ", string.Empty, StringComparison.Ordinal), StringComparer.OrdinalIgnoreCase))
{
- var currentScore = GetVideoProfileScore(videoStream.Profile);
- var requestedScore = GetVideoProfileScore(requestedProfile);
+ var currentScore = GetVideoProfileScore(videoStream.Codec, videoStream.Profile);
+ var requestedScore = GetVideoProfileScore(videoStream.Codec, requestedProfile);
if (currentScore == -1 || currentScore > requestedScore)
{
@@ -1703,7 +1915,8 @@ namespace MediaBrowser.Controller.MediaEncoding
{
if (string.Equals(codec, "h265", StringComparison.OrdinalIgnoreCase)
|| string.Equals(codec, "hevc", StringComparison.OrdinalIgnoreCase)
- || string.Equals(codec, "vp9", StringComparison.OrdinalIgnoreCase))
+ || string.Equals(codec, "vp9", StringComparison.OrdinalIgnoreCase)
+ || string.Equals(codec, "av1", StringComparison.OrdinalIgnoreCase))
{
return .6;
}
@@ -1940,19 +2153,35 @@ namespace MediaBrowser.Controller.MediaEncoding
///
/// Gets the fast seek command line parameter.
///
- /// The request.
+ /// The state.
+ /// The options.
/// System.String.
/// The fast seek command line parameter.
- public string GetFastSeekCommandLineParameter(BaseEncodingJobOptions request)
+ public string GetFastSeekCommandLineParameter(EncodingJobInfo state, EncodingOptions options)
{
- var time = request.StartTimeTicks ?? 0;
+ var time = state.BaseRequest.StartTimeTicks ?? 0;
+ var seekParam = string.Empty;
if (time > 0)
{
- return string.Format(CultureInfo.InvariantCulture, "-ss {0}", _mediaEncoder.GetTimeParameter(time));
+ seekParam += string.Format(CultureInfo.InvariantCulture, "-ss {0}", _mediaEncoder.GetTimeParameter(time));
+
+ if (state.IsVideoRequest)
+ {
+ var outputVideoCodec = GetVideoEncoder(state, options);
+
+ // Important: If this is ever re-enabled, make sure not to use it with wtv because it breaks seeking
+ if (!string.Equals(state.InputContainer, "wtv", StringComparison.OrdinalIgnoreCase)
+ && state.TranscodingType != TranscodingJobType.Progressive
+ && !state.EnableBreakOnNonKeyFrames(outputVideoCodec)
+ && (state.BaseRequest.StartTimeTicks ?? 0) > 0)
+ {
+ seekParam += " -noaccurate_seek";
+ }
+ }
}
- return string.Empty;
+ return seekParam;
}
///
@@ -2061,180 +2290,84 @@ namespace MediaBrowser.Controller.MediaEncoding
return returnFirstIfNoIndex ? streams.FirstOrDefault() : null;
}
- ///
- /// Gets the graphical subtitle parameter.
- ///
- /// Encoding state.
- /// Encoding options.
- /// Video codec to use.
- /// Graphical subtitle parameter.
- public string GetGraphicalSubtitleParam(
- EncodingJobInfo state,
- EncodingOptions options,
- string outputVideoCodec)
+ public static (int? width, int? height) GetFixedOutputSize(
+ int? videoWidth,
+ int? videoHeight,
+ int? requestedWidth,
+ int? requestedHeight,
+ int? requestedMaxWidth,
+ int? requestedMaxHeight)
{
- outputVideoCodec ??= string.Empty;
-
- var outputSizeParam = ReadOnlySpan.Empty;
- var request = state.BaseRequest;
-
- outputSizeParam = GetOutputSizeParamInternal(state, options, outputVideoCodec);
-
- var videoSizeParam = string.Empty;
- var videoDecoder = GetHardwareAcceleratedVideoDecoder(state, options) ?? string.Empty;
- var isLinux = OperatingSystem.IsLinux();
-
- var isVaapiDecoder = videoDecoder.IndexOf("vaapi", StringComparison.OrdinalIgnoreCase) != -1;
- var isVaapiH264Encoder = outputVideoCodec.IndexOf("h264_vaapi", StringComparison.OrdinalIgnoreCase) != -1;
- var isVaapiHevcEncoder = outputVideoCodec.IndexOf("hevc_vaapi", StringComparison.OrdinalIgnoreCase) != -1;
- var isQsvH264Encoder = outputVideoCodec.Contains("h264_qsv", StringComparison.OrdinalIgnoreCase);
- var isQsvHevcEncoder = outputVideoCodec.Contains("hevc_qsv", StringComparison.OrdinalIgnoreCase);
- var isNvdecDecoder = videoDecoder.Contains("cuda", StringComparison.OrdinalIgnoreCase);
- var isNvencEncoder = outputVideoCodec.Contains("nvenc", StringComparison.OrdinalIgnoreCase);
- var isTonemappingSupportedOnVaapi = string.Equals(options.HardwareAccelerationType, "vaapi", StringComparison.OrdinalIgnoreCase) && isVaapiDecoder && (isVaapiH264Encoder || isVaapiHevcEncoder);
- var isTonemappingSupportedOnQsv = string.Equals(options.HardwareAccelerationType, "qsv", StringComparison.OrdinalIgnoreCase) && isVaapiDecoder && (isQsvH264Encoder || isQsvHevcEncoder);
- var isOpenclTonemappingSupported = IsOpenclTonemappingSupported(state, options);
- var isVppTonemappingSupported = IsVppTonemappingSupported(state, options);
-
- var mediaEncoderVersion = _mediaEncoder.GetMediaEncoderVersion();
- var isCudaOverlaySupported = _mediaEncoder.SupportsFilter("overlay_cuda") && mediaEncoderVersion != null && mediaEncoderVersion >= _minVersionForCudaOverlay;
- var isCudaFormatConversionSupported = _mediaEncoder.SupportsFilterWithOption(FilterOptionType.ScaleCudaFormat);
-
- // Tonemapping and burn-in graphical subtitles requires overlay_vaapi.
- // But it's still in ffmpeg mailing list. Disable it for now.
- if (isTonemappingSupportedOnVaapi && isOpenclTonemappingSupported && !isVppTonemappingSupported)
+ if (!videoWidth.HasValue && !requestedWidth.HasValue)
{
- return GetOutputSizeParam(state, options, outputVideoCodec);
+ return (null, null);
}
- // Setup subtitle scaling
- if (state.VideoStream != null && state.VideoStream.Width.HasValue && state.VideoStream.Height.HasValue)
+ if (!videoHeight.HasValue && !requestedHeight.HasValue)
{
- // Adjust the size of graphical subtitles to fit the video stream.
- var videoStream = state.VideoStream;
- var inputWidth = videoStream.Width;
- var inputHeight = videoStream.Height;
- var (width, height) = GetFixedOutputSize(inputWidth, inputHeight, request.Width, request.Height, request.MaxWidth, request.MaxHeight);
-
- if (width.HasValue && height.HasValue)
- {
- videoSizeParam = string.Format(
- CultureInfo.InvariantCulture,
- "scale={0}x{1}",
- width.Value,
- height.Value);
- }
+ return (null, null);
+ }
- if (!string.IsNullOrEmpty(videoSizeParam)
- && !(isTonemappingSupportedOnQsv && isVppTonemappingSupported))
- {
- // upload graphical subtitle to QSV
- if (isLinux && (string.Equals(outputVideoCodec, "h264_qsv", StringComparison.OrdinalIgnoreCase)
- || string.Equals(outputVideoCodec, "hevc_qsv", StringComparison.OrdinalIgnoreCase)))
- {
- videoSizeParam += ",hwupload=extra_hw_frames=64";
- }
- }
+ decimal inputWidth = Convert.ToDecimal(videoWidth ?? requestedWidth, CultureInfo.InvariantCulture);
+ decimal inputHeight = Convert.ToDecimal(videoHeight ?? requestedHeight, CultureInfo.InvariantCulture);
+ decimal outputWidth = requestedWidth.HasValue ? Convert.ToDecimal(requestedWidth.Value) : inputWidth;
+ decimal outputHeight = requestedHeight.HasValue ? Convert.ToDecimal(requestedHeight.Value) : inputHeight;
+ decimal maximumWidth = requestedMaxWidth.HasValue ? Convert.ToDecimal(requestedMaxWidth.Value) : outputWidth;
+ decimal maximumHeight = requestedMaxHeight.HasValue ? Convert.ToDecimal(requestedMaxHeight.Value) : outputHeight;
- if (!string.IsNullOrEmpty(videoSizeParam))
- {
- // upload graphical subtitle to cuda
- if (isNvdecDecoder && isNvencEncoder && isCudaOverlaySupported && isCudaFormatConversionSupported)
- {
- videoSizeParam += ",hwupload_cuda";
- }
- }
+ if (outputWidth > maximumWidth || outputHeight > maximumHeight)
+ {
+ var scale = Math.Min(maximumWidth / outputWidth, maximumHeight / outputHeight);
+ outputWidth = Math.Min(maximumWidth, Math.Truncate(outputWidth * scale));
+ outputHeight = Math.Min(maximumHeight, Math.Truncate(outputHeight * scale));
}
- var mapPrefix = state.SubtitleStream.IsExternal ?
- 1 :
- 0;
+ outputWidth = 2 * Math.Truncate(outputWidth / 2);
+ outputHeight = 2 * Math.Truncate(outputHeight / 2);
- var subtitleStreamIndex = state.SubtitleStream.IsExternal
- ? 0
- : state.SubtitleStream.Index;
+ return (Convert.ToInt32(outputWidth), Convert.ToInt32(outputHeight));
+ }
- // Setup default filtergraph utilizing FFMpeg overlay() and FFMpeg scale() (see the return of this function for index reference)
- // Always put the scaler before the overlay for better performance
- var retStr = outputSizeParam.IsEmpty
- ? " -filter_complex \"[{0}:{1}]{4}[sub];[0:{2}][sub]overlay\""
- : " -filter_complex \"[{0}:{1}]{4}[sub];[0:{2}]{3}[base];[base][sub]overlay\"";
+ public static string GetHwScaleFilter(
+ string hwScaleSuffix,
+ string videoFormat,
+ int? videoWidth,
+ int? videoHeight,
+ int? requestedWidth,
+ int? requestedHeight,
+ int? requestedMaxWidth,
+ int? requestedMaxHeight)
+ {
+ var (outWidth, outHeight) = GetFixedOutputSize(videoWidth, videoHeight,
+ requestedWidth, requestedHeight,
+ requestedMaxWidth, requestedMaxHeight);
+ var isFormatFixed = !string.IsNullOrEmpty(videoFormat);
+ var isSizeFixed = !videoWidth.HasValue
+ || outWidth.Value != videoWidth.Value
+ || !videoHeight.HasValue
+ || outHeight.Value != videoHeight.Value;
- // When the input may or may not be hardware VAAPI decodable
- if (string.Equals(outputVideoCodec, "h264_vaapi", StringComparison.OrdinalIgnoreCase)
- || string.Equals(outputVideoCodec, "hevc_vaapi", StringComparison.OrdinalIgnoreCase))
+ var arg1 = isSizeFixed ? ("=w=" + outWidth.Value + ":h=" + outHeight.Value) : string.Empty;
+ var arg2 = isFormatFixed ? ("format=" + videoFormat) : string.Empty;
+ if (isFormatFixed)
{
- /*
- [base]: HW scaling video to OutputSize
- [sub]: SW scaling subtitle to FixedOutputSize
- [base][sub]: SW overlay
- */
- retStr = outputSizeParam.IsEmpty
- ? " -filter_complex \"[{0}:{1}]{4}[sub];[0:{2}]hwdownload[base];[base][sub]overlay,format=nv12,hwupload\""
- : " -filter_complex \"[{0}:{1}]{4}[sub];[0:{2}]{3},hwdownload[base];[base][sub]overlay,format=nv12,hwupload\"";
+ arg2 = (isSizeFixed ? ':' : '=') + arg2;
}
- // If we're hardware VAAPI decoding and software encoding, download frames from the decoder first
- else if (_mediaEncoder.SupportsHwaccel("vaapi") && videoDecoder.IndexOf("vaapi", StringComparison.OrdinalIgnoreCase) != -1
- && (string.Equals(outputVideoCodec, "libx264", StringComparison.OrdinalIgnoreCase)
- || string.Equals(outputVideoCodec, "libx265", StringComparison.OrdinalIgnoreCase)))
- {
- /*
- [base]: SW scaling video to OutputSize
- [sub]: SW scaling subtitle to FixedOutputSize
- [base][sub]: SW overlay
- */
- retStr = outputSizeParam.IsEmpty
- ? " -filter_complex \"[{0}:{1}]{4}[sub];[0:{2}][sub]overlay\""
- : " -filter_complex \"[{0}:{1}]{4}[sub];[0:{2}]{3}[base];[base][sub]overlay\"";
- }
- else if (string.Equals(outputVideoCodec, "h264_qsv", StringComparison.OrdinalIgnoreCase)
- || string.Equals(outputVideoCodec, "hevc_qsv", StringComparison.OrdinalIgnoreCase))
- {
- /*
- QSV in FFMpeg can now setup hardware overlay for transcodes.
- For software decoding and hardware encoding option, frames must be hwuploaded into hardware
- with fixed frame size.
- Currently only supports linux.
- */
- if (isTonemappingSupportedOnQsv && isVppTonemappingSupported)
- {
- retStr = " -filter_complex \"[{0}:{1}]{4}[sub];[0:{2}]{3},hwdownload,format=nv12[base];[base][sub]overlay\"";
- }
- else if (isLinux)
- {
- retStr = outputSizeParam.IsEmpty
- ? " -filter_complex \"[{0}:{1}]{4}[sub];[0:{2}][sub]overlay_qsv\""
- : " -filter_complex \"[{0}:{1}]{4}[sub];[0:{2}]{3}[base];[base][sub]overlay_qsv\"";
- }
- }
- else if (isNvdecDecoder && isNvencEncoder)
+ if (!string.IsNullOrEmpty(hwScaleSuffix) && (isSizeFixed || isFormatFixed))
{
- if (isCudaOverlaySupported && isCudaFormatConversionSupported)
- {
- retStr = outputSizeParam.IsEmpty
- ? " -filter_complex \"[{0}:{1}]{4}[sub];[0:{2}]scale_cuda=format=yuv420p[base];[base][sub]overlay_cuda\""
- : " -filter_complex \"[{0}:{1}]{4}[sub];[0:{2}]{3}[base];[base][sub]overlay_cuda\"";
- }
- else
- {
- retStr = outputSizeParam.IsEmpty
- ? " -filter_complex \"[{0}:{1}]{4}[sub];[0:{2}][sub]overlay,format=nv12|yuv420p,hwupload_cuda\""
- : " -filter_complex \"[{0}:{1}]{4}[sub];[0:{2}]{3}[base];[base][sub]overlay,format=nv12|yuv420p,hwupload_cuda\"";
- }
+ return string.Format(
+ CultureInfo.InvariantCulture,
+ "scale_{0}{1}{2}",
+ hwScaleSuffix,
+ arg1,
+ arg2);
}
- return string.Format(
- CultureInfo.InvariantCulture,
- retStr,
- mapPrefix,
- subtitleStreamIndex,
- state.VideoStream.Index,
- outputSizeParam.ToString(),
- videoSizeParam);
+ return string.Empty;
}
- public static (int? width, int? height) GetFixedOutputSize(
+ public static string GetCustomSwScaleFilter(
int? videoWidth,
int? videoHeight,
int? requestedWidth,
@@ -2242,332 +2375,165 @@ namespace MediaBrowser.Controller.MediaEncoding
int? requestedMaxWidth,
int? requestedMaxHeight)
{
- if (!videoWidth.HasValue && !requestedWidth.HasValue)
- {
- return (null, null);
- }
-
- if (!videoHeight.HasValue && !requestedHeight.HasValue)
+ var (outWidth, outHeight) = GetFixedOutputSize(videoWidth, videoHeight,
+ requestedWidth, requestedHeight,
+ requestedMaxWidth, requestedMaxHeight);
+ if (outWidth.HasValue && outHeight.HasValue)
{
- return (null, null);
+ return string.Format(
+ CultureInfo.InvariantCulture,
+ "scale=s={0}x{1}:flags=fast_bilinear",
+ outWidth.Value,
+ outHeight.Value);
}
- decimal inputWidth = Convert.ToDecimal(videoWidth ?? requestedWidth, CultureInfo.InvariantCulture);
- decimal inputHeight = Convert.ToDecimal(videoHeight ?? requestedHeight, CultureInfo.InvariantCulture);
- decimal outputWidth = requestedWidth.HasValue ? Convert.ToDecimal(requestedWidth.Value) : inputWidth;
- decimal outputHeight = requestedHeight.HasValue ? Convert.ToDecimal(requestedHeight.Value) : inputHeight;
- decimal maximumWidth = requestedMaxWidth.HasValue ? Convert.ToDecimal(requestedMaxWidth.Value) : outputWidth;
- decimal maximumHeight = requestedMaxHeight.HasValue ? Convert.ToDecimal(requestedMaxHeight.Value) : outputHeight;
+ return string.Empty;
+ }
- if (outputWidth > maximumWidth || outputHeight > maximumHeight)
+ public static string GetAlphaSrcFilter(
+ EncodingJobInfo state,
+ int? videoWidth,
+ int? videoHeight,
+ int? requestedWidth,
+ int? requestedHeight,
+ int? requestedMaxWidth,
+ int? requestedMaxHeight,
+ int? framerate)
+ {
+ var reqTicks = state.BaseRequest.StartTimeTicks ?? 0;
+ var startTime = TimeSpan.FromTicks(reqTicks).ToString(@"hh\\\:mm\\\:ss\\\.fff", CultureInfo.InvariantCulture);
+ var (outWidth, outHeight) = GetFixedOutputSize(videoWidth, videoHeight,
+ requestedWidth, requestedHeight,
+ requestedMaxWidth, requestedMaxHeight);
+ if (outWidth.HasValue && outHeight.HasValue)
{
- var scale = Math.Min(maximumWidth / outputWidth, maximumHeight / outputHeight);
- outputWidth = Math.Min(maximumWidth, Math.Truncate(outputWidth * scale));
- outputHeight = Math.Min(maximumHeight, Math.Truncate(outputHeight * scale));
+ return string.Format(
+ CultureInfo.InvariantCulture,
+ "alphasrc=s={0}x{1}:r={2}:start='{3}'",
+ outWidth.Value,
+ outHeight.Value,
+ framerate ?? 10,
+ reqTicks > 0 ? startTime : 0);
}
- outputWidth = 2 * Math.Truncate(outputWidth / 2);
- outputHeight = 2 * Math.Truncate(outputHeight / 2);
-
- return (Convert.ToInt32(outputWidth), Convert.ToInt32(outputHeight));
+ return string.Empty;
}
- public List GetScalingFilters(
+ public static List GetSwScaleFilter(
EncodingJobInfo state,
EncodingOptions options,
+ string videoEncoder,
int? videoWidth,
int? videoHeight,
Video3DFormat? threedFormat,
- string videoDecoder,
- string videoEncoder,
int? requestedWidth,
int? requestedHeight,
int? requestedMaxWidth,
int? requestedMaxHeight)
{
var filters = new List();
- var (width, height) = GetFixedOutputSize(
- videoWidth,
- videoHeight,
- requestedWidth,
- requestedHeight,
- requestedMaxWidth,
- requestedMaxHeight);
-
- if ((string.Equals(videoEncoder, "h264_vaapi", StringComparison.OrdinalIgnoreCase)
- || string.Equals(videoEncoder, "h264_qsv", StringComparison.OrdinalIgnoreCase)
- || string.Equals(videoEncoder, "hevc_vaapi", StringComparison.OrdinalIgnoreCase)
- || string.Equals(videoEncoder, "hevc_qsv", StringComparison.OrdinalIgnoreCase))
- && width.HasValue
- && height.HasValue)
- {
- // Given the input dimensions (inputWidth, inputHeight), determine the output dimensions
- // (outputWidth, outputHeight). The user may request precise output dimensions or maximum
- // output dimensions. Output dimensions are guaranteed to be even.
- var outputWidth = width.Value;
- var outputHeight = height.Value;
- var qsv_or_vaapi = string.Equals(videoEncoder, "h264_qsv", StringComparison.OrdinalIgnoreCase)
- || string.Equals(videoEncoder, "hevc_qsv", StringComparison.OrdinalIgnoreCase);
- var isDeintEnabled = state.DeInterlace("h264", true)
- || state.DeInterlace("avc", true)
- || state.DeInterlace("h265", true)
- || state.DeInterlace("hevc", true);
-
- var isVaapiDecoder = videoDecoder.Contains("vaapi", StringComparison.OrdinalIgnoreCase);
- var isVaapiH264Encoder = videoEncoder.Contains("h264_vaapi", StringComparison.OrdinalIgnoreCase);
- var isVaapiHevcEncoder = videoEncoder.Contains("hevc_vaapi", StringComparison.OrdinalIgnoreCase);
- var isQsvH264Encoder = videoEncoder.Contains("h264_qsv", StringComparison.OrdinalIgnoreCase);
- var isQsvHevcEncoder = videoEncoder.Contains("hevc_qsv", StringComparison.OrdinalIgnoreCase);
- var isOpenclTonemappingSupported = IsOpenclTonemappingSupported(state, options);
- var isVppTonemappingSupported = IsVppTonemappingSupported(state, options);
- var isTonemappingSupportedOnVaapi = string.Equals(options.HardwareAccelerationType, "vaapi", StringComparison.OrdinalIgnoreCase) && isVaapiDecoder && (isVaapiH264Encoder || isVaapiHevcEncoder);
- var isTonemappingSupportedOnQsv = string.Equals(options.HardwareAccelerationType, "qsv", StringComparison.OrdinalIgnoreCase) && isVaapiDecoder && (isQsvH264Encoder || isQsvHevcEncoder);
- var isP010PixFmtRequired = (isTonemappingSupportedOnVaapi && (isOpenclTonemappingSupported || isVppTonemappingSupported))
- || (isTonemappingSupportedOnQsv && isVppTonemappingSupported);
-
- var outputPixFmt = "format=nv12";
- if (isP010PixFmtRequired)
- {
- outputPixFmt = "format=p010";
- }
-
- if (isTonemappingSupportedOnQsv && isVppTonemappingSupported)
- {
- qsv_or_vaapi = false;
- }
-
- if (!videoWidth.HasValue
- || outputWidth != videoWidth.Value
- || !videoHeight.HasValue
- || outputHeight != videoHeight.Value)
- {
- // Force nv12 pixel format to enable 10-bit to 8-bit colour conversion.
- // use vpp_qsv filter to avoid green bar when the fixed output size is requested.
- filters.Add(
- string.Format(
- CultureInfo.InvariantCulture,
- "{0}=w={1}:h={2}{3}{4}",
- qsv_or_vaapi ? "vpp_qsv" : "scale_vaapi",
- outputWidth,
- outputHeight,
- ":" + outputPixFmt,
- (qsv_or_vaapi && isDeintEnabled) ? ":deinterlace=1" : string.Empty));
- }
-
- // Assert 10-bit is P010 so as we can avoid the extra scaler to get a bit more fps on high res HDR videos.
- else if (!isP010PixFmtRequired)
- {
- filters.Add(
- string.Format(
- CultureInfo.InvariantCulture,
- "{0}={1}{2}",
- qsv_or_vaapi ? "vpp_qsv" : "scale_vaapi",
- outputPixFmt,
- (qsv_or_vaapi && isDeintEnabled) ? ":deinterlace=1" : string.Empty));
- }
- }
- else if ((videoDecoder ?? string.Empty).Contains("cuda", StringComparison.OrdinalIgnoreCase)
- && width.HasValue
- && height.HasValue)
- {
- var outputWidth = width.Value;
- var outputHeight = height.Value;
-
- var isNvencEncoder = videoEncoder.Contains("nvenc", StringComparison.OrdinalIgnoreCase);
- var isOpenclTonemappingSupported = IsOpenclTonemappingSupported(state, options);
- var isCudaTonemappingSupported = IsCudaTonemappingSupported(state, options);
- var isTonemappingSupportedOnNvenc = string.Equals(options.HardwareAccelerationType, "nvenc", StringComparison.OrdinalIgnoreCase);
- var mediaEncoderVersion = _mediaEncoder.GetMediaEncoderVersion();
- var isCudaOverlaySupported = _mediaEncoder.SupportsFilter("overlay_cuda") && mediaEncoderVersion != null && mediaEncoderVersion >= _minVersionForCudaOverlay;
- var isCudaFormatConversionSupported = _mediaEncoder.SupportsFilterWithOption(FilterOptionType.ScaleCudaFormat);
- var hasGraphicalSubs = state.SubtitleStream != null && !state.SubtitleStream.IsTextSubtitleStream && state.SubtitleDeliveryMethod == SubtitleDeliveryMethod.Encode;
+ var isV4l2 = string.Equals(videoEncoder, "h264_v4l2m2m", StringComparison.OrdinalIgnoreCase);
+ var scaleVal = isV4l2 ? 64 : 2;
- var outputPixFmt = string.Empty;
- if (isCudaFormatConversionSupported)
+ // If fixed dimensions were supplied
+ if (requestedWidth.HasValue && requestedHeight.HasValue)
+ {
+ if (isV4l2)
{
- outputPixFmt = (hasGraphicalSubs && isCudaOverlaySupported && isNvencEncoder)
- ? "format=yuv420p"
- : "format=nv12";
- if ((isOpenclTonemappingSupported || isCudaTonemappingSupported)
- && isTonemappingSupportedOnNvenc)
- {
- outputPixFmt = "format=p010";
- }
- }
+ var widthParam = requestedWidth.Value.ToString(CultureInfo.InvariantCulture);
+ var heightParam = requestedHeight.Value.ToString(CultureInfo.InvariantCulture);
- if (!videoWidth.HasValue
- || outputWidth != videoWidth.Value
- || !videoHeight.HasValue
- || outputHeight != videoHeight.Value)
- {
filters.Add(
string.Format(
CultureInfo.InvariantCulture,
- "scale_cuda=w={0}:h={1}{2}",
- outputWidth,
- outputHeight,
- isCudaFormatConversionSupported ? (":" + outputPixFmt) : string.Empty));
+ "scale=trunc({0}/64)*64:trunc({1}/2)*2",
+ widthParam,
+ heightParam));
}
- else if (isCudaFormatConversionSupported)
+ else
{
- filters.Add(
- string.Format(
- CultureInfo.InvariantCulture,
- "scale_cuda={0}",
- outputPixFmt));
+ filters.Add(GetFixedSwScaleFilter(threedFormat, requestedWidth.Value, requestedHeight.Value));
}
}
- else if ((videoDecoder ?? string.Empty).IndexOf("cuvid", StringComparison.OrdinalIgnoreCase) != -1
- && width.HasValue
- && height.HasValue)
- {
- // Nothing to do, it's handled as an input resize filter
- }
- else
+
+ // If Max dimensions were supplied, for width selects lowest even number between input width and width req size and selects lowest even number from in width*display aspect and requested size
+ else if (requestedMaxWidth.HasValue && requestedMaxHeight.HasValue)
{
- var isExynosV4L2 = string.Equals(videoEncoder, "h264_v4l2m2m", StringComparison.OrdinalIgnoreCase);
+ var maxWidthParam = requestedMaxWidth.Value.ToString(CultureInfo.InvariantCulture);
+ var maxHeightParam = requestedMaxHeight.Value.ToString(CultureInfo.InvariantCulture);
- // If fixed dimensions were supplied
- if (requestedWidth.HasValue && requestedHeight.HasValue)
- {
- if (isExynosV4L2)
- {
- var widthParam = requestedWidth.Value.ToString(CultureInfo.InvariantCulture);
- var heightParam = requestedHeight.Value.ToString(CultureInfo.InvariantCulture);
-
- filters.Add(
- string.Format(
- CultureInfo.InvariantCulture,
- "scale=trunc({0}/64)*64:trunc({1}/2)*2",
- widthParam,
- heightParam));
- }
- else
- {
- filters.Add(GetFixedSizeScalingFilter(threedFormat, requestedWidth.Value, requestedHeight.Value));
- }
- }
+ filters.Add(
+ string.Format(
+ CultureInfo.InvariantCulture,
+ "scale=trunc(min(max(iw\\,ih*dar)\\,min({0}\\,{1}*dar))/{2})*{2}:trunc(min(max(iw/dar\\,ih)\\,min({0}/dar\\,{1}))/2)*2",
+ maxWidthParam,
+ maxHeightParam,
+ scaleVal));
+ }
- // If Max dimensions were supplied, for width selects lowest even number between input width and width req size and selects lowest even number from in width*display aspect and requested size
- else if (requestedMaxWidth.HasValue && requestedMaxHeight.HasValue)
+ // If a fixed width was requested
+ else if (requestedWidth.HasValue)
+ {
+ if (threedFormat.HasValue)
{
- var maxWidthParam = requestedMaxWidth.Value.ToString(CultureInfo.InvariantCulture);
- var maxHeightParam = requestedMaxHeight.Value.ToString(CultureInfo.InvariantCulture);
-
- if (isExynosV4L2)
- {
- filters.Add(
- string.Format(
- CultureInfo.InvariantCulture,
- "scale=trunc(min(max(iw\\,ih*dar)\\,min({0}\\,{1}*dar))/64)*64:trunc(min(max(iw/dar\\,ih)\\,min({0}/dar\\,{1}))/2)*2",
- maxWidthParam,
- maxHeightParam));
- }
- else
- {
- filters.Add(
- string.Format(
- CultureInfo.InvariantCulture,
- "scale=trunc(min(max(iw\\,ih*dar)\\,min({0}\\,{1}*dar))/2)*2:trunc(min(max(iw/dar\\,ih)\\,min({0}/dar\\,{1}))/2)*2",
- maxWidthParam,
- maxHeightParam));
- }
+ // This method can handle 0 being passed in for the requested height
+ filters.Add(GetFixedSwScaleFilter(threedFormat, requestedWidth.Value, 0));
}
-
- // If a fixed width was requested
- else if (requestedWidth.HasValue)
+ else
{
- if (threedFormat.HasValue)
- {
- // This method can handle 0 being passed in for the requested height
- filters.Add(GetFixedSizeScalingFilter(threedFormat, requestedWidth.Value, 0));
- }
- else
- {
- var widthParam = requestedWidth.Value.ToString(CultureInfo.InvariantCulture);
+ var widthParam = requestedWidth.Value.ToString(CultureInfo.InvariantCulture);
- filters.Add(
- string.Format(
- CultureInfo.InvariantCulture,
- "scale={0}:trunc(ow/a/2)*2",
- widthParam));
- }
+ filters.Add(
+ string.Format(
+ CultureInfo.InvariantCulture,
+ "scale={0}:trunc(ow/a/2)*2",
+ widthParam));
}
+ }
- // If a fixed height was requested
- else if (requestedHeight.HasValue)
- {
- var heightParam = requestedHeight.Value.ToString(CultureInfo.InvariantCulture);
+ // If a fixed height was requested
+ else if (requestedHeight.HasValue)
+ {
+ var heightParam = requestedHeight.Value.ToString(CultureInfo.InvariantCulture);
- if (isExynosV4L2)
- {
- filters.Add(
- string.Format(
- CultureInfo.InvariantCulture,
- "scale=trunc(oh*a/64)*64:{0}",
- heightParam));
- }
- else
- {
- filters.Add(
- string.Format(
- CultureInfo.InvariantCulture,
- "scale=trunc(oh*a/2)*2:{0}",
- heightParam));
- }
- }
+ filters.Add(
+ string.Format(
+ CultureInfo.InvariantCulture,
+ "scale=trunc(oh*a/{1})*{1}:{0}",
+ heightParam,
+ scaleVal));
+ }
- // If a max width was requested
- else if (requestedMaxWidth.HasValue)
- {
- var maxWidthParam = requestedMaxWidth.Value.ToString(CultureInfo.InvariantCulture);
+ // If a max width was requested
+ else if (requestedMaxWidth.HasValue)
+ {
+ var maxWidthParam = requestedMaxWidth.Value.ToString(CultureInfo.InvariantCulture);
- if (isExynosV4L2)
- {
- filters.Add(
- string.Format(
- CultureInfo.InvariantCulture,
- "scale=trunc(min(max(iw\\,ih*dar)\\,{0})/64)*64:trunc(ow/dar/2)*2",
- maxWidthParam));
- }
- else
- {
- filters.Add(
- string.Format(
- CultureInfo.InvariantCulture,
- "scale=trunc(min(max(iw\\,ih*dar)\\,{0})/2)*2:trunc(ow/dar/2)*2",
- maxWidthParam));
- }
- }
+ filters.Add(
+ string.Format(
+ CultureInfo.InvariantCulture,
+ "scale=trunc(min(max(iw\\,ih*dar)\\,{0})/{1})*{1}:trunc(ow/dar/2)*2",
+ maxWidthParam,
+ scaleVal));
+ }
- // If a max height was requested
- else if (requestedMaxHeight.HasValue)
- {
- var maxHeightParam = requestedMaxHeight.Value.ToString(CultureInfo.InvariantCulture);
+ // If a max height was requested
+ else if (requestedMaxHeight.HasValue)
+ {
+ var maxHeightParam = requestedMaxHeight.Value.ToString(CultureInfo.InvariantCulture);
- if (isExynosV4L2)
- {
- filters.Add(
- string.Format(
- CultureInfo.InvariantCulture,
- "scale=trunc(oh*a/64)*64:min(max(iw/dar\\,ih)\\,{0})",
- maxHeightParam));
- }
- else
- {
- filters.Add(
- string.Format(
- CultureInfo.InvariantCulture,
- "scale=trunc(oh*a/2)*2:min(max(iw/dar\\,ih)\\,{0})",
- maxHeightParam));
- }
- }
+ filters.Add(
+ string.Format(
+ CultureInfo.InvariantCulture,
+ "scale=trunc(oh*a/{1})*{1}:min(max(iw/dar\\,ih)\\,{0})",
+ maxHeightParam,
+ scaleVal));
}
return filters;
}
- private string GetFixedSizeScalingFilter(Video3DFormat? threedFormat, int requestedWidth, int requestedHeight)
+ private static string GetFixedSwScaleFilter(Video3DFormat? threedFormat, int requestedWidth, int requestedHeight)
{
var widthParam = requestedWidth.ToString(CultureInfo.InvariantCulture);
var heightParam = requestedHeight.ToString(CultureInfo.InvariantCulture);
@@ -2615,572 +2581,2144 @@ namespace MediaBrowser.Controller.MediaEncoding
return string.Format(CultureInfo.InvariantCulture, filter, widthParam, heightParam);
}
- ///
- /// Gets the output size parameter.
- ///
- /// Encoding state.
- /// Encoding options.
- /// Video codec to use.
- /// The output size parameter.
- public string GetOutputSizeParam(
- EncodingJobInfo state,
- EncodingOptions options,
- string outputVideoCodec)
+ public static string GetSwDeinterlaceFilter(EncodingJobInfo state, EncodingOptions options)
+ {
+ var doubleRateDeint = options.DeinterlaceDoubleRate && (state.VideoStream?.AverageFrameRate ?? 60) <= 30;
+ if (string.Equals(options.DeinterlaceMethod, "bwdif", StringComparison.OrdinalIgnoreCase))
+ {
+ return string.Format(
+ CultureInfo.InvariantCulture,
+ "bwdif={0}:-1:0",
+ doubleRateDeint ? "1" : "0");
+ }
+ else
+ {
+ return string.Format(
+ CultureInfo.InvariantCulture,
+ "yadif={0}:-1:0",
+ doubleRateDeint ? "1" : "0");
+ }
+ }
+
+ public static string GetHwDeinterlaceFilter(EncodingJobInfo state, EncodingOptions options, string hwDeintSuffix)
+ {
+ var doubleRateDeint = options.DeinterlaceDoubleRate && (state.VideoStream?.AverageFrameRate ?? 60) <= 30;
+ if (hwDeintSuffix.Contains("cuda", StringComparison.OrdinalIgnoreCase))
+ {
+ return string.Format(
+ CultureInfo.InvariantCulture,
+ "yadif_cuda={0}:-1:0",
+ doubleRateDeint ? "1" : "0");
+ }
+ else if (hwDeintSuffix.Contains("vaapi", StringComparison.OrdinalIgnoreCase))
+ {
+ return string.Format(
+ CultureInfo.InvariantCulture,
+ "deinterlace_vaapi=rate={0}",
+ doubleRateDeint ? "field" : "frame");
+ }
+ else if (hwDeintSuffix.Contains("qsv", StringComparison.OrdinalIgnoreCase))
+ {
+ return "deinterlace_qsv=mode=2";
+ }
+
+ return string.Empty;
+ }
+
+ public static string GetHwTonemapFilter(EncodingOptions options, string hwTonemapSuffix, string videoFormat)
{
- string filters = GetOutputSizeParamInternal(state, options, outputVideoCodec);
- return string.IsNullOrEmpty(filters) ? string.Empty : " -vf \"" + filters + "\"";
+ if (string.IsNullOrEmpty(hwTonemapSuffix))
+ {
+ return string.Empty;
+ }
+
+ var args = "tonemap_{0}=format={1}:p=bt709:t=bt709:m=bt709";
+
+ if (!hwTonemapSuffix.Contains("vaapi", StringComparison.OrdinalIgnoreCase))
+ {
+ args += ":tonemap={2}:peak={3}:desat={4}";
+
+ if (options.TonemappingParam != 0)
+ {
+ args += ":param={5}";
+ }
+
+ if (!string.Equals(options.TonemappingRange, "auto", StringComparison.OrdinalIgnoreCase))
+ {
+ args += ":range={6}";
+ }
+ }
+
+ return string.Format(
+ CultureInfo.InvariantCulture,
+ args,
+ hwTonemapSuffix,
+ videoFormat ?? "nv12",
+ options.TonemappingAlgorithm,
+ options.TonemappingPeak,
+ options.TonemappingDesat,
+ options.TonemappingParam,
+ options.TonemappingRange);
}
///
- /// Gets the output size parameter.
- /// If we're going to put a fixed size on the command line, this will calculate it.
+ /// Gets the parameter of software filter chain.
///
/// Encoding state.
/// Encoding options.
- /// Video codec to use.
- /// The output size parameter.
- public string GetOutputSizeParamInternal(
+ /// Video encoder to use.
+ /// The tuple contains three lists: main, sub and overlay filters
+ public Tuple, List, List> GetSwVidFilterChain(
EncodingJobInfo state,
EncodingOptions options,
- string outputVideoCodec)
+ string vidEncoder)
{
- // http://sonnati.wordpress.com/2012/10/19/ffmpeg-the-swiss-army-knife-of-internet-streaming-part-vi/
-
- var request = state.BaseRequest;
- var videoStream = state.VideoStream;
- var filters = new List();
-
- var videoDecoder = GetHardwareAcceleratedVideoDecoder(state, options) ?? string.Empty;
- var inputWidth = videoStream?.Width;
- var inputHeight = videoStream?.Height;
+ var inW = state.VideoStream?.Width;
+ var inH = state.VideoStream?.Height;
+ var reqW = state.BaseRequest.Width;
+ var reqH = state.BaseRequest.Height;
+ var reqMaxW = state.BaseRequest.MaxWidth;
+ var reqMaxH = state.BaseRequest.MaxHeight;
var threeDFormat = state.MediaSource.Video3DFormat;
- var isSwDecoder = string.IsNullOrEmpty(videoDecoder);
- var isD3d11vaDecoder = videoDecoder.IndexOf("d3d11va", StringComparison.OrdinalIgnoreCase) != -1;
- var isVaapiDecoder = videoDecoder.IndexOf("vaapi", StringComparison.OrdinalIgnoreCase) != -1;
- var isVaapiEncoder = outputVideoCodec.IndexOf("vaapi", StringComparison.OrdinalIgnoreCase) != -1;
- var isVaapiH264Encoder = outputVideoCodec.IndexOf("h264_vaapi", StringComparison.OrdinalIgnoreCase) != -1;
- var isVaapiHevcEncoder = outputVideoCodec.IndexOf("hevc_vaapi", StringComparison.OrdinalIgnoreCase) != -1;
- var isQsvH264Encoder = outputVideoCodec.IndexOf("h264_qsv", StringComparison.OrdinalIgnoreCase) != -1;
- var isQsvHevcEncoder = outputVideoCodec.IndexOf("hevc_qsv", StringComparison.OrdinalIgnoreCase) != -1;
- var isNvdecDecoder = videoDecoder.Contains("cuda", StringComparison.OrdinalIgnoreCase);
- var isNvencEncoder = outputVideoCodec.Contains("nvenc", StringComparison.OrdinalIgnoreCase);
- var isCuvidH264Decoder = videoDecoder.Contains("h264_cuvid", StringComparison.OrdinalIgnoreCase);
- var isCuvidHevcDecoder = videoDecoder.Contains("hevc_cuvid", StringComparison.OrdinalIgnoreCase);
- var isCuvidVp9Decoder = videoDecoder.Contains("vp9_cuvid", StringComparison.OrdinalIgnoreCase);
- var isLibX264Encoder = outputVideoCodec.IndexOf("libx264", StringComparison.OrdinalIgnoreCase) != -1;
- var isLibX265Encoder = outputVideoCodec.IndexOf("libx265", StringComparison.OrdinalIgnoreCase) != -1;
- var isLinux = OperatingSystem.IsLinux();
- var isColorDepth10 = IsColorDepth10(state);
-
- var isTonemappingSupportedOnNvenc = string.Equals(options.HardwareAccelerationType, "nvenc", StringComparison.OrdinalIgnoreCase) && (isNvdecDecoder || isCuvidHevcDecoder || isCuvidVp9Decoder || isSwDecoder);
- var isTonemappingSupportedOnAmf = string.Equals(options.HardwareAccelerationType, "amf", StringComparison.OrdinalIgnoreCase) && (isD3d11vaDecoder || isSwDecoder);
- var isTonemappingSupportedOnVaapi = string.Equals(options.HardwareAccelerationType, "vaapi", StringComparison.OrdinalIgnoreCase) && isVaapiDecoder && (isVaapiH264Encoder || isVaapiHevcEncoder);
- var isTonemappingSupportedOnQsv = string.Equals(options.HardwareAccelerationType, "qsv", StringComparison.OrdinalIgnoreCase) && isVaapiDecoder && (isQsvH264Encoder || isQsvHevcEncoder);
- var isOpenclTonemappingSupported = IsOpenclTonemappingSupported(state, options);
- var isVppTonemappingSupported = IsVppTonemappingSupported(state, options);
- var isCudaTonemappingSupported = IsCudaTonemappingSupported(state, options);
- var mediaEncoderVersion = _mediaEncoder.GetMediaEncoderVersion();
- var isCudaOverlaySupported = _mediaEncoder.SupportsFilter("overlay_cuda") && mediaEncoderVersion != null && mediaEncoderVersion >= _minVersionForCudaOverlay;
+ var vidDecoder = GetHardwareVideoDecoder(state, options) ?? string.Empty;
+ var isSwDecoder = string.IsNullOrEmpty(vidDecoder);
+ var isVaapiEncoder = vidEncoder.Contains("vaapi", StringComparison.OrdinalIgnoreCase);
+
+ var doDeintH264 = state.DeInterlace("h264", true) || state.DeInterlace("avc", true);
+ var doDeintHevc = state.DeInterlace("h265", true) || state.DeInterlace("hevc", true);
+ var doDeintH2645 = doDeintH264 || doDeintHevc;
var hasSubs = state.SubtitleStream != null && state.SubtitleDeliveryMethod == SubtitleDeliveryMethod.Encode;
- var hasTextSubs = state.SubtitleStream != null && state.SubtitleStream.IsTextSubtitleStream && state.SubtitleDeliveryMethod == SubtitleDeliveryMethod.Encode;
- var hasGraphicalSubs = state.SubtitleStream != null && !state.SubtitleStream.IsTextSubtitleStream && state.SubtitleDeliveryMethod == SubtitleDeliveryMethod.Encode;
-
- // If double rate deinterlacing is enabled and the input framerate is 30fps or below, otherwise the output framerate will be too high for many devices
- var doubleRateDeinterlace = options.DeinterlaceDoubleRate && (videoStream?.AverageFrameRate ?? 60) <= 30;
-
- var isScalingInAdvance = false;
- var isCudaDeintInAdvance = false;
- var isHwuploadCudaRequired = false;
- var isNoTonemapFilterApplied = true;
- var isDeinterlaceH264 = state.DeInterlace("h264", true) || state.DeInterlace("avc", true);
- var isDeinterlaceHevc = state.DeInterlace("h265", true) || state.DeInterlace("hevc", true);
-
- // Add OpenCL tonemapping filter for NVENC/AMF/VAAPI.
- if ((isTonemappingSupportedOnNvenc && !isCudaTonemappingSupported) || isTonemappingSupportedOnAmf || (isTonemappingSupportedOnVaapi && !isVppTonemappingSupported))
- {
- // NVIDIA Pascal and Turing or higher are recommended.
- // AMD Polaris and Vega or higher are recommended.
- // Intel Kaby Lake or newer is required.
- if (isOpenclTonemappingSupported)
- {
- isNoTonemapFilterApplied = false;
- var inputHdrParams = GetInputHdrParams(videoStream.ColorTransfer);
- if (!string.IsNullOrEmpty(inputHdrParams))
- {
- filters.Add(inputHdrParams);
- }
+ var hasTextSubs = hasSubs && state.SubtitleStream.IsTextSubtitleStream;
+ var hasGraphicalSubs = hasSubs && !state.SubtitleStream.IsTextSubtitleStream;
- var parameters = "tonemap_opencl=format=nv12:primaries=bt709:transfer=bt709:matrix=bt709:tonemap={0}:desat={1}:threshold={2}:peak={3}";
+ /* Make main filters for video stream */
+ var mainFilters = new List();
- if (options.TonemappingParam != 0)
- {
- parameters += ":param={4}";
- }
+ mainFilters.Add(GetOverwriteColorPropertiesParam(state, false));
- if (!string.Equals(options.TonemappingRange, "auto", StringComparison.OrdinalIgnoreCase))
- {
- parameters += ":range={5}";
- }
+ // INPUT sw surface(memory/copy-back from vram)
+ var outFormat = isSwDecoder ? "yuv420p" : "nv12";
+ var swScaleFilter = GetSwScaleFilter(state, options, vidEncoder, inW, inH, threeDFormat, reqW, reqH, reqMaxW, reqMaxH);
+ if (isVaapiEncoder)
+ {
+ outFormat = "nv12";
+ }
+ // sw scale
+ mainFilters.AddRange(swScaleFilter);
+ mainFilters.Add("format=" + outFormat);
- if (isSwDecoder || isD3d11vaDecoder)
- {
- isScalingInAdvance = true;
- // Add zscale filter before tone mapping filter for performance.
- var (width, height) = GetFixedOutputSize(inputWidth, inputHeight, request.Width, request.Height, request.MaxWidth, request.MaxHeight);
- if (width.HasValue && height.HasValue)
- {
- filters.Add(
- string.Format(
- CultureInfo.InvariantCulture,
- "zscale=s={0}x{1}",
- width.Value,
- height.Value));
- }
+ if (doDeintH2645)
+ {
+ var deintFilter = GetSwDeinterlaceFilter(state, options);
+ // sw deint
+ mainFilters.Add(deintFilter);
+ }
- // Convert to hardware pixel format p010 when using SW decoder.
- filters.Add("format=p010");
- }
+ // sw tonemap <= TODO: finsh the fast tonemap filter
- if ((isDeinterlaceH264 || isDeinterlaceHevc) && isNvdecDecoder)
- {
- isCudaDeintInAdvance = true;
- filters.Add(
- string.Format(
- CultureInfo.InvariantCulture,
- "yadif_cuda={0}:-1:0",
- doubleRateDeinterlace ? "1" : "0"));
- }
+ // OUTPUT yuv420p/nv12 surface(memory)
- if (isVaapiDecoder || isNvdecDecoder)
- {
- isScalingInAdvance = true;
- filters.AddRange(
- GetScalingFilters(
- state,
- options,
- inputWidth,
- inputHeight,
- threeDFormat,
- videoDecoder,
- outputVideoCodec,
- request.Width,
- request.Height,
- request.MaxWidth,
- request.MaxHeight));
- }
+ /* Make sub and overlay filters for subtitle stream */
+ var subFilters = new List();
+ var overlayFilters = new List();
+ if (hasTextSubs)
+ {
+ // subtitles=f='*.ass':alpha=0
+ var textSubtitlesFilter = GetTextSubtitlesFilter(state, false, false);
+ mainFilters.Add(textSubtitlesFilter);
+ }
+ else if (hasGraphicalSubs)
+ {
+ // [0:s]scale=s=1280x720
+ var subSwScaleFilter = GetCustomSwScaleFilter(inW, inH, reqW, reqH, reqMaxW, reqMaxH);
+ subFilters.Add(subSwScaleFilter);
+ overlayFilters.Add("overlay=eof_action=endall:shortest=1:repeatlast=0");
+ }
- // hwmap the HDR data to opencl device by cl-va p010 interop.
- if (isVaapiDecoder)
- {
- filters.Add("hwmap");
- }
+ return new Tuple, List, List>(mainFilters, subFilters, overlayFilters);
+ }
- // convert cuda device data to p010 host data.
- if (isNvdecDecoder)
- {
- filters.Add("hwdownload,format=p010");
- }
+ ///
+ /// Gets the parameter of Nvidia NVENC filter chain.
+ ///
+ /// Encoding state.
+ /// Encoding options.
+ /// Video encoder to use.
+ /// The tuple contains three lists: main, sub and overlay filters
+ public Tuple, List, List> GetNvidiaVidFilterChain(
+ EncodingJobInfo state,
+ EncodingOptions options,
+ string vidEncoder)
+ {
+ if (!string.Equals(options.HardwareAccelerationType, "nvenc", StringComparison.OrdinalIgnoreCase))
+ {
+ return new Tuple, List, List>(null, null, null);
+ }
- if (isNvdecDecoder
- || isCuvidHevcDecoder
- || isCuvidVp9Decoder
- || isSwDecoder
- || isD3d11vaDecoder)
- {
- // Upload the HDR10 or HLG data to the OpenCL device,
- // use tonemap_opencl filter for tone mapping,
- // and then download the SDR data to memory.
- filters.Add("hwupload");
- }
+ var vidDecoder = GetHardwareVideoDecoder(state, options) ?? string.Empty;
+ var isSwDecoder = string.IsNullOrEmpty(vidDecoder);
+ var isSwEncoder = !vidEncoder.Contains("nvenc", StringComparison.OrdinalIgnoreCase);
- // Fallback to hable if bt2390 is chosen but not supported in tonemap_opencl.
- var isBt2390SupportedInOpenclTonemap = _mediaEncoder.SupportsFilterWithOption(FilterOptionType.TonemapOpenclBt2390);
- if (string.Equals(options.TonemappingAlgorithm, "bt2390", StringComparison.OrdinalIgnoreCase)
- && !isBt2390SupportedInOpenclTonemap)
- {
- options.TonemappingAlgorithm = "hable";
- }
+ // legacy cuvid(resize/deint/sw) pipeline(copy-back)
+ if ((isSwDecoder && isSwEncoder)
+ || !IsCudaFullSupported()
+ || !options.EnableEnhancedNvdecDecoder
+ || !_mediaEncoder.SupportsFilter("alphasrc"))
+ {
+ return GetSwVidFilterChain(state, options, vidEncoder);
+ }
- filters.Add(
- string.Format(
- CultureInfo.InvariantCulture,
- parameters,
- options.TonemappingAlgorithm,
- options.TonemappingDesat,
- options.TonemappingThreshold,
- options.TonemappingPeak,
- options.TonemappingParam,
- options.TonemappingRange));
-
- if (isNvdecDecoder
- || isCuvidHevcDecoder
- || isCuvidVp9Decoder
- || isSwDecoder
- || isD3d11vaDecoder)
- {
- filters.Add("hwdownload");
- filters.Add("format=nv12");
- }
+ // prefered nvdec + cuda filters + nvenc pipeline
+ return GetNvidiaVidFiltersPrefered(state, options, vidDecoder, vidEncoder);
+ }
- if (isNvdecDecoder && isNvencEncoder)
- {
- isHwuploadCudaRequired = true;
- }
+ public Tuple, List, List> GetNvidiaVidFiltersPrefered(
+ EncodingJobInfo state,
+ EncodingOptions options,
+ string vidDecoder,
+ string vidEncoder)
+ {
+ var inW = state.VideoStream?.Width;
+ var inH = state.VideoStream?.Height;
+ var reqW = state.BaseRequest.Width;
+ var reqH = state.BaseRequest.Height;
+ var reqMaxW = state.BaseRequest.MaxWidth;
+ var reqMaxH = state.BaseRequest.MaxHeight;
+ var threeDFormat = state.MediaSource.Video3DFormat;
- if (isVaapiDecoder)
- {
- // Reverse the data route from opencl to vaapi.
- filters.Add("hwmap=derive_device=vaapi:reverse=1");
- }
+ var isNvdecDecoder = vidDecoder.Contains("cuda", StringComparison.OrdinalIgnoreCase);
+ var isNvencEncoder = vidEncoder.Contains("nvenc", StringComparison.OrdinalIgnoreCase);
+ var isSwDecoder = string.IsNullOrEmpty(vidDecoder);
+ var isSwEncoder = !isNvencEncoder;
+ var isCuInCuOut = isNvdecDecoder && isNvencEncoder;
- var outputSdrParams = GetOutputSdrParams(options.TonemappingRange);
- if (!string.IsNullOrEmpty(outputSdrParams))
- {
- filters.Add(outputSdrParams);
- }
+ var doubleRateDeint = options.DeinterlaceDoubleRate && (state.VideoStream?.AverageFrameRate ?? 60) <= 30;
+ var doDeintH264 = state.DeInterlace("h264", true) || state.DeInterlace("avc", true);
+ var doDeintHevc = state.DeInterlace("h265", true) || state.DeInterlace("hevc", true);
+ var doDeintH2645 = doDeintH264 || doDeintHevc;
+ var doCuTonemap = IsHwTonemapAvailable(state, options);
+
+ var hasSubs = state.SubtitleStream != null && state.SubtitleDeliveryMethod == SubtitleDeliveryMethod.Encode;
+ var hasTextSubs = hasSubs && state.SubtitleStream.IsTextSubtitleStream;
+ var hasGraphicalSubs = hasSubs && !state.SubtitleStream.IsTextSubtitleStream;
+ var hasAssSubs = hasSubs
+ && (string.Equals(state.SubtitleStream.Codec, "ass", StringComparison.OrdinalIgnoreCase)
+ || string.Equals(state.SubtitleStream.Codec, "ssa", StringComparison.OrdinalIgnoreCase));
+
+ /* Make main filters for video stream */
+ var mainFilters = new List();
+
+ mainFilters.Add(GetOverwriteColorPropertiesParam(state, doCuTonemap));
+
+ if (isSwDecoder)
+ {
+ // INPUT sw surface(memory)
+ var outFormat = doCuTonemap ? "yuv420p10" : "yuv420p";
+ var swScaleFilter = GetSwScaleFilter(state, options, vidEncoder, inW, inH, threeDFormat, reqW, reqH, reqMaxW, reqMaxH);
+ // sw scale
+ mainFilters.AddRange(swScaleFilter);
+ mainFilters.Add("format=" + outFormat);
+ // sw deint
+ if (doDeintH2645)
+ {
+ var swDeintFilter = GetSwDeinterlaceFilter(state, options);
+ mainFilters.Add(swDeintFilter);
+ }
+ // sw => hw
+ if (doCuTonemap)
+ {
+ mainFilters.Add("hwupload");
}
}
-
- // When the input may or may not be hardware VAAPI decodable.
- if ((isVaapiH264Encoder || isVaapiHevcEncoder)
- && !(isTonemappingSupportedOnVaapi && (isOpenclTonemappingSupported || isVppTonemappingSupported)))
+ if (isNvdecDecoder)
{
- filters.Add("format=nv12|vaapi");
- filters.Add("hwupload");
+ // INPUT cuda surface(vram)
+ var outFormat = doCuTonemap ? string.Empty : "yuv420p";
+ var hwScaleFilter = GetHwScaleFilter("cuda", outFormat, inW, inH, reqW, reqH, reqMaxW, reqMaxH);
+ // hw scale
+ mainFilters.Add(hwScaleFilter);
+ // hw deint
+ if (doDeintH2645)
+ {
+ var deintFilter = GetHwDeinterlaceFilter(state, options, "cuda");
+ mainFilters.Add(deintFilter);
+ }
}
- // When burning in graphical subtitles using overlay_qsv, upload videostream to the same qsv context.
- else if (isLinux && hasGraphicalSubs && (isQsvH264Encoder || isQsvHevcEncoder)
- && !(isTonemappingSupportedOnQsv && isVppTonemappingSupported))
+ // hw tonemap
+ if (doCuTonemap)
{
- filters.Add("hwupload=extra_hw_frames=64");
+ var tonemapFilter = GetHwTonemapFilter(options, "cuda", "yuv420p");
+ mainFilters.Add(tonemapFilter);
}
- // If we're hardware VAAPI decoding and software encoding, download frames from the decoder first.
- else if ((IsVaapiSupported(state) && isVaapiDecoder) && (isLibX264Encoder || isLibX265Encoder)
- && !(isTonemappingSupportedOnQsv && isVppTonemappingSupported))
+ var memoryOutput = false;
+ var isUploadForOclTonemap = isSwDecoder && doCuTonemap;
+ if ((isNvdecDecoder && isSwEncoder) || isUploadForOclTonemap)
{
- var codec = videoStream.Codec;
+ memoryOutput = true;
- // Assert 10-bit hardware VAAPI decodable
- if (isColorDepth10 && (string.Equals(codec, "hevc", StringComparison.OrdinalIgnoreCase)
- || string.Equals(codec, "h265", StringComparison.OrdinalIgnoreCase)
- || string.Equals(codec, "vp9", StringComparison.OrdinalIgnoreCase)))
- {
- /*
- Download data from GPU to CPU as p010le format.
- Colorspace conversion is unnecessary here as libx264 will handle it.
- If this step is missing, it will fail on AMD but not on intel.
- */
- filters.Add("hwdownload");
- filters.Add("format=p010le");
- }
+ // OUTPUT yuv420p surface(memory)
+ mainFilters.Add("hwdownload");
+ mainFilters.Add("format=yuv420p");
+ }
+
+ // OUTPUT yuv420p surface(memory)
+ if (isSwDecoder && isNvencEncoder)
+ {
+ memoryOutput = true;
+ }
- // Assert 8-bit hardware VAAPI decodable
- else if (!isColorDepth10)
+ if (memoryOutput)
+ {
+ // text subtitles
+ if (hasTextSubs)
{
- filters.Add("hwdownload");
- filters.Add("format=nv12");
+ var textSubtitlesFilter = GetTextSubtitlesFilter(state, false, false);
+ mainFilters.Add(textSubtitlesFilter);
}
}
- // Add hardware deinterlace filter before scaling filter.
- if (isDeinterlaceH264 || isDeinterlaceHevc)
+ // OUTPUT cuda(yuv420p) surface(vram)
+
+ /* Make sub and overlay filters for subtitle stream */
+ var subFilters = new List();
+ var overlayFilters = new List();
+ if (isCuInCuOut)
{
- if (isVaapiEncoder
- || (isTonemappingSupportedOnQsv && isVppTonemappingSupported))
+ if (hasSubs)
{
- filters.Add(
- string.Format(
- CultureInfo.InvariantCulture,
- "deinterlace_vaapi=rate={0}",
- doubleRateDeinterlace ? "field" : "frame"));
+ if (hasGraphicalSubs)
+ {
+ // scale=s=1280x720,format=yuva420p,hwupload
+ var subSwScaleFilter = GetCustomSwScaleFilter(inW, inH, reqW, reqH, reqMaxW, reqMaxH);
+ subFilters.Add(subSwScaleFilter);
+ subFilters.Add("format=yuva420p");
+ }
+ else if (hasTextSubs)
+ {
+ // alphasrc=s=1280x720:r=10:start=0,format=yuva420p,subtitles,hwupload
+ var alphaSrcFilter = GetAlphaSrcFilter(state, inW, inH, reqW, reqH, reqMaxW, reqMaxH, hasAssSubs ? 10 : 5);
+ var subTextSubtitlesFilter = GetTextSubtitlesFilter(state, true, true);
+ subFilters.Add(alphaSrcFilter);
+ subFilters.Add("format=yuva420p");
+ subFilters.Add(subTextSubtitlesFilter);
+ }
+
+ subFilters.Add("hwupload");
+ overlayFilters.Add("overlay_cuda=eof_action=endall:shortest=1:repeatlast=0");
}
- else if (isNvdecDecoder && !isCudaDeintInAdvance)
+ }
+ else
+ {
+ if (hasGraphicalSubs)
{
- filters.Add(
- string.Format(
- CultureInfo.InvariantCulture,
- "yadif_cuda={0}:-1:0",
- doubleRateDeinterlace ? "1" : "0"));
+ var subSwScaleFilter = GetCustomSwScaleFilter(inW, inH, reqW, reqH, reqMaxW, reqMaxH);
+ subFilters.Add(subSwScaleFilter);
+ overlayFilters.Add("overlay=eof_action=endall:shortest=1:repeatlast=0");
}
}
- // Add software deinterlace filter before scaling filter.
- if ((isDeinterlaceH264 || isDeinterlaceHevc)
- && !isVaapiH264Encoder
- && !isVaapiHevcEncoder
- && !isQsvH264Encoder
- && !isQsvHevcEncoder
- && !isNvdecDecoder
- && !isCuvidH264Decoder)
+ return new Tuple, List, List>(mainFilters, subFilters, overlayFilters);
+ }
+
+ ///
+ /// Gets the parameter of AMD AMF filter chain.
+ ///
+ /// Encoding state.
+ /// Encoding options.
+ /// Video encoder to use.
+ /// The tuple contains three lists: main, sub and overlay filters
+ public Tuple, List, List> GetAmdVidFilterChain(
+ EncodingJobInfo state,
+ EncodingOptions options,
+ string vidEncoder)
+ {
+ if (!string.Equals(options.HardwareAccelerationType, "amf", StringComparison.OrdinalIgnoreCase))
+ {
+ return new Tuple, List, List>(null, null, null);
+ }
+
+ var isWindows = OperatingSystem.IsWindows();
+ var vidDecoder = GetHardwareVideoDecoder(state, options) ?? string.Empty;
+ var isSwDecoder = string.IsNullOrEmpty(vidDecoder);
+ var isSwEncoder = !vidEncoder.Contains("amf", StringComparison.OrdinalIgnoreCase);
+ var isAmfDx11OclSupported = isWindows && _mediaEncoder.SupportsHwaccel("d3d11va") && IsOpenclFullSupported();
+
+ // legacy d3d11va pipeline(copy-back)
+ if ((isSwDecoder && isSwEncoder)
+ || !isAmfDx11OclSupported
+ || !_mediaEncoder.SupportsFilter("alphasrc"))
+ {
+ return GetSwVidFilterChain(state, options, vidEncoder);
+ }
+
+ // prefered d3d11va + opencl filters + amf pipeline
+ return GetAmdDx11VidFiltersPrefered(state, options, vidDecoder, vidEncoder);
+ }
+
+ public Tuple, List, List> GetAmdDx11VidFiltersPrefered(
+ EncodingJobInfo state,
+ EncodingOptions options,
+ string vidDecoder,
+ string vidEncoder)
+ {
+ var inW = state.VideoStream?.Width;
+ var inH = state.VideoStream?.Height;
+ var reqW = state.BaseRequest.Width;
+ var reqH = state.BaseRequest.Height;
+ var reqMaxW = state.BaseRequest.MaxWidth;
+ var reqMaxH = state.BaseRequest.MaxHeight;
+ var threeDFormat = state.MediaSource.Video3DFormat;
+
+ var isD3d11vaDecoder = vidDecoder.Contains("d3d11va", StringComparison.OrdinalIgnoreCase);
+ var isAmfEncoder = vidEncoder.Contains("amf", StringComparison.OrdinalIgnoreCase);
+ var isSwDecoder = string.IsNullOrEmpty(vidDecoder);
+ var isSwEncoder = !isAmfEncoder;
+ var isDxInDxOut = isD3d11vaDecoder && isAmfEncoder;
+
+ var doDeintH264 = state.DeInterlace("h264", true) || state.DeInterlace("avc", true);
+ var doDeintHevc = state.DeInterlace("h265", true) || state.DeInterlace("hevc", true);
+ var doDeintH2645 = doDeintH264 || doDeintHevc;
+ var doOclTonemap = IsHwTonemapAvailable(state, options);
+
+ var hasSubs = state.SubtitleStream != null && state.SubtitleDeliveryMethod == SubtitleDeliveryMethod.Encode;
+ var hasTextSubs = hasSubs && state.SubtitleStream.IsTextSubtitleStream;
+ var hasGraphicalSubs = hasSubs && !state.SubtitleStream.IsTextSubtitleStream;
+ var hasAssSubs = hasSubs
+ && (string.Equals(state.SubtitleStream.Codec, "ass", StringComparison.OrdinalIgnoreCase)
+ || string.Equals(state.SubtitleStream.Codec, "ssa", StringComparison.OrdinalIgnoreCase));
+
+ /* Make main filters for video stream */
+ var mainFilters = new List();
+
+ mainFilters.Add(GetOverwriteColorPropertiesParam(state, doOclTonemap));
+
+ if (isSwDecoder)
{
- if (string.Equals(options.DeinterlaceMethod, "bwdif", StringComparison.OrdinalIgnoreCase))
+ // INPUT sw surface(memory)
+ var outFormat = doOclTonemap ? "yuv420p10" : "yuv420p";
+ var swScaleFilter = GetSwScaleFilter(state, options, vidEncoder, inW, inH, threeDFormat, reqW, reqH, reqMaxW, reqMaxH);
+ // sw scale
+ mainFilters.AddRange(swScaleFilter);
+ mainFilters.Add("format=" + outFormat);
+ // sw deint
+ if (doDeintH2645)
{
- filters.Add(
- string.Format(
- CultureInfo.InvariantCulture,
- "bwdif={0}:-1:0",
- doubleRateDeinterlace ? "1" : "0"));
+ var swDeintFilter = GetSwDeinterlaceFilter(state, options);
+ mainFilters.Add(swDeintFilter);
}
- else
+
+ // keep video at memory except ocl tonemap,
+ // since the overhead caused by hwupload >>> using sw filter.
+ // sw => hw
+ if (doOclTonemap)
{
- filters.Add(
- string.Format(
- CultureInfo.InvariantCulture,
- "yadif={0}:-1:0",
- doubleRateDeinterlace ? "1" : "0"));
+ mainFilters.Add("hwupload");
}
}
- // Add scaling filter: scale_*=format=nv12 or scale_*=w=*:h=*:format=nv12 or scale=expr
- if (!isScalingInAdvance)
+ if (isD3d11vaDecoder)
{
- filters.AddRange(
- GetScalingFilters(
- state,
- options,
- inputWidth,
- inputHeight,
- threeDFormat,
- videoDecoder,
- outputVideoCodec,
- request.Width,
- request.Height,
- request.MaxWidth,
- request.MaxHeight));
+ // INPUT d3d11 surface(vram)
+ // map from d3d11va to opencl via d3d11-opencl interop.
+ mainFilters.Add("hwmap=derive_device=opencl");
+ var outFormat = doOclTonemap ? string.Empty : "nv12";
+ var hwScaleFilter = GetHwScaleFilter("opencl", outFormat, inW, inH, reqW, reqH, reqMaxW, reqMaxH);
+ // hw scale
+ mainFilters.Add(hwScaleFilter);
+
+ // hw deint <= TODO: finsh the 'yadif_opencl' filter
}
- // Add Cuda tonemapping filter.
- if (isNvdecDecoder && isCudaTonemappingSupported)
+ // hw tonemap
+ if (doOclTonemap)
{
- isNoTonemapFilterApplied = false;
- var inputHdrParams = GetInputHdrParams(videoStream.ColorTransfer);
- if (!string.IsNullOrEmpty(inputHdrParams))
- {
- filters.Add(inputHdrParams);
- }
+ var tonemapFilter = GetHwTonemapFilter(options, "opencl", "nv12");
+ mainFilters.Add(tonemapFilter);
+ }
- var parameters = (hasGraphicalSubs && isCudaOverlaySupported && isNvencEncoder)
- ? "tonemap_cuda=format=yuv420p:primaries=bt709:transfer=bt709:matrix=bt709:tonemap={0}:peak={1}:desat={2}"
- : "tonemap_cuda=format=nv12:primaries=bt709:transfer=bt709:matrix=bt709:tonemap={0}:peak={1}:desat={2}";
+ var memoryOutput = false;
+ var isUploadForOclTonemap = isSwDecoder && doOclTonemap;
+ if ((isD3d11vaDecoder && isSwEncoder) || isUploadForOclTonemap)
+ {
+ memoryOutput = true;
- if (options.TonemappingParam != 0)
- {
- parameters += ":param={3}";
- }
+ // OUTPUT nv12 surface(memory)
+ // prefer hwmap to hwdownload on opencl.
+ var hwTransferFilter = hasGraphicalSubs ? "hwdownload" : "hwmap";
+ mainFilters.Add(hwTransferFilter);
+ mainFilters.Add("format=nv12");
+ }
- if (!string.Equals(options.TonemappingRange, "auto", StringComparison.OrdinalIgnoreCase))
+ // OUTPUT yuv420p surface
+ if (isSwDecoder && isAmfEncoder)
+ {
+ memoryOutput = true;
+ }
+
+ if (memoryOutput)
+ {
+ // text subtitles
+ if (hasTextSubs)
{
- parameters += ":range={4}";
+ var textSubtitlesFilter = GetTextSubtitlesFilter(state, false, false);
+ mainFilters.Add(textSubtitlesFilter);
}
+ }
- filters.Add(
- string.Format(
- CultureInfo.InvariantCulture,
- parameters,
- options.TonemappingAlgorithm,
- options.TonemappingPeak,
- options.TonemappingDesat,
- options.TonemappingParam,
- options.TonemappingRange));
-
- if (isLibX264Encoder
- || isLibX265Encoder
- || hasTextSubs
- || (hasGraphicalSubs && !isCudaOverlaySupported && isNvencEncoder))
- {
- if (isNvencEncoder)
+ if (isDxInDxOut && !hasSubs)
+ {
+ // OUTPUT d3d11(nv12) surface(vram)
+ // reverse-mapping via d3d11-opencl interop.
+ mainFilters.Add("hwmap=derive_device=d3d11va:reverse=1");
+ mainFilters.Add("format=d3d11");
+ }
+
+ /* Make sub and overlay filters for subtitle stream */
+ var subFilters = new List();
+ var overlayFilters = new List();
+ if (isDxInDxOut)
+ {
+ if (hasSubs)
+ {
+ if (hasGraphicalSubs)
{
- isHwuploadCudaRequired = true;
+ // scale=s=1280x720,format=yuva420p,hwupload
+ var subSwScaleFilter = GetCustomSwScaleFilter(inW, inH, reqW, reqH, reqMaxW, reqMaxH);
+ subFilters.Add(subSwScaleFilter);
+ subFilters.Add("format=yuva420p");
+ }
+ else if (hasTextSubs)
+ {
+ // alphasrc=s=1280x720:r=10:start=0,format=yuva420p,subtitles,hwupload
+ var alphaSrcFilter = GetAlphaSrcFilter(state, inW, inH, reqW, reqH, reqMaxW, reqMaxH, hasAssSubs ? 10 : 5);
+ var subTextSubtitlesFilter = GetTextSubtitlesFilter(state, true, true);
+ subFilters.Add(alphaSrcFilter);
+ subFilters.Add("format=yuva420p");
+ subFilters.Add(subTextSubtitlesFilter);
}
- filters.Add("hwdownload");
- filters.Add("format=nv12");
+ subFilters.Add("hwupload");
+ overlayFilters.Add("overlay_opencl=eof_action=endall:shortest=1:repeatlast=0");
+ overlayFilters.Add("hwmap=derive_device=d3d11va:reverse=1");
+ overlayFilters.Add("format=d3d11");
}
-
- var outputSdrParams = GetOutputSdrParams(options.TonemappingRange);
- if (!string.IsNullOrEmpty(outputSdrParams))
+ }
+ else if (memoryOutput)
+ {
+ if (hasGraphicalSubs)
{
- filters.Add(outputSdrParams);
+ var subSwScaleFilter = GetCustomSwScaleFilter(inW, inH, reqW, reqH, reqMaxW, reqMaxH);
+ subFilters.Add(subSwScaleFilter);
+ overlayFilters.Add("overlay=eof_action=endall:shortest=1:repeatlast=0");
}
}
- // Add VPP tonemapping filter for VAAPI.
- // Full hardware based video post processing, faster than OpenCL but lacks fine tuning options.
- if ((isTonemappingSupportedOnVaapi || isTonemappingSupportedOnQsv)
- && isVppTonemappingSupported)
+ return new Tuple, List, List>(mainFilters, subFilters, overlayFilters);
+ }
+
+ ///
+ /// Gets the parameter of Intel QSV filter chain.
+ ///
+ /// Encoding state.
+ /// Encoding options.
+ /// Video encoder to use.
+ /// The tuple contains three lists: main, sub and overlay filters
+ public Tuple, List, List> GetIntelVidFilterChain(
+ EncodingJobInfo state,
+ EncodingOptions options,
+ string vidEncoder)
+ {
+ if (!string.Equals(options.HardwareAccelerationType, "qsv", StringComparison.OrdinalIgnoreCase))
{
- filters.Add("tonemap_vaapi=format=nv12:transfer=bt709:matrix=bt709:primaries=bt709");
+ return new Tuple, List, List>(null, null, null);
}
- // Another case is when using Nvenc decoder.
- if (isNvdecDecoder && !isOpenclTonemappingSupported && !isCudaTonemappingSupported)
+ var isWindows = OperatingSystem.IsWindows();
+ var isLinux = OperatingSystem.IsLinux();
+ var vidDecoder = GetHardwareVideoDecoder(state, options) ?? string.Empty;
+ var isSwDecoder = string.IsNullOrEmpty(vidDecoder);
+ var isSwEncoder = !vidEncoder.Contains("qsv", StringComparison.OrdinalIgnoreCase);
+ var isQsvOclSupported = _mediaEncoder.SupportsHwaccel("qsv") && IsOpenclFullSupported();
+ var isIntelDx11OclSupported = isWindows
+ && _mediaEncoder.SupportsHwaccel("d3d11va")
+ && isQsvOclSupported;
+ var isIntelVaapiOclSupported = isLinux
+ && IsVaapiSupported(state)
+ && isQsvOclSupported;
+
+ // legacy qsv pipeline(copy-back)
+ if ((isSwDecoder && isSwEncoder)
+ || (!isIntelVaapiOclSupported && !isIntelDx11OclSupported)
+ || !_mediaEncoder.SupportsFilter("alphasrc"))
{
- var codec = videoStream.Codec;
- var isCudaFormatConversionSupported = _mediaEncoder.SupportsFilterWithOption(FilterOptionType.ScaleCudaFormat);
+ return GetSwVidFilterChain(state, options, vidEncoder);
+ }
- // Assert 10-bit hardware decodable
- if (isColorDepth10 && (string.Equals(codec, "hevc", StringComparison.OrdinalIgnoreCase)
- || string.Equals(codec, "h265", StringComparison.OrdinalIgnoreCase)
- || string.Equals(codec, "vp9", StringComparison.OrdinalIgnoreCase)))
- {
- if (isCudaFormatConversionSupported)
- {
- if (isLibX264Encoder
- || isLibX265Encoder
- || hasTextSubs
- || (hasGraphicalSubs && !isCudaOverlaySupported && isNvencEncoder))
- {
- if (isNvencEncoder)
- {
- isHwuploadCudaRequired = true;
- }
+ // prefered qsv(vaapi) + opencl filters pipeline
+ if (isIntelVaapiOclSupported)
+ {
+ return GetIntelQsvVaapiVidFiltersPrefered(state, options, vidDecoder, vidEncoder);
+ }
- filters.Add("hwdownload");
- filters.Add("format=nv12");
- }
- }
- else
- {
- // Download data from GPU to CPU as p010 format.
- filters.Add("hwdownload");
- filters.Add("format=p010");
+ // prefered qsv(d3d11) + opencl filters pipeline
+ if (isIntelDx11OclSupported)
+ {
+ return GetIntelQsvDx11VidFiltersPrefered(state, options, vidDecoder, vidEncoder);
+ }
- // Cuda lacks of a pixel format converter.
- if (isNvencEncoder)
- {
- isHwuploadCudaRequired = true;
- filters.Add("format=yuv420p");
- }
- }
+ return new Tuple, List, List>(null, null, null);
+ }
+
+ public Tuple, List, List> GetIntelQsvDx11VidFiltersPrefered(
+ EncodingJobInfo state,
+ EncodingOptions options,
+ string vidDecoder,
+ string vidEncoder)
+ {
+ var inW = state.VideoStream?.Width;
+ var inH = state.VideoStream?.Height;
+ var reqW = state.BaseRequest.Width;
+ var reqH = state.BaseRequest.Height;
+ var reqMaxW = state.BaseRequest.MaxWidth;
+ var reqMaxH = state.BaseRequest.MaxHeight;
+ var threeDFormat = state.MediaSource.Video3DFormat;
+
+ var isD3d11vaDecoder = vidDecoder.Contains("d3d11va", StringComparison.OrdinalIgnoreCase);
+ var isQsvDecoder = vidDecoder.Contains("qsv", StringComparison.OrdinalIgnoreCase);
+ var isQsvEncoder = vidEncoder.Contains("qsv", StringComparison.OrdinalIgnoreCase);
+ var isHwDecoder = isD3d11vaDecoder || isQsvDecoder;
+ var isSwDecoder = string.IsNullOrEmpty(vidDecoder);
+ var isSwEncoder = !isQsvEncoder;
+ var isQsvInQsvOut = isHwDecoder && isQsvEncoder;
+
+ var doDeintH264 = state.DeInterlace("h264", true) || state.DeInterlace("avc", true);
+ var doDeintHevc = state.DeInterlace("h265", true) || state.DeInterlace("hevc", true);
+ var doDeintH2645 = doDeintH264 || doDeintHevc;
+ var doOclTonemap = IsHwTonemapAvailable(state, options);
+
+ var hasSubs = state.SubtitleStream != null && state.SubtitleDeliveryMethod == SubtitleDeliveryMethod.Encode;
+ var hasTextSubs = hasSubs && state.SubtitleStream.IsTextSubtitleStream;
+ var hasGraphicalSubs = hasSubs && !state.SubtitleStream.IsTextSubtitleStream;
+ var hasAssSubs = hasSubs
+ && (string.Equals(state.SubtitleStream.Codec, "ass", StringComparison.OrdinalIgnoreCase)
+ || string.Equals(state.SubtitleStream.Codec, "ssa", StringComparison.OrdinalIgnoreCase));
+
+ /* Make main filters for video stream */
+ var mainFilters = new List();
+
+ mainFilters.Add(GetOverwriteColorPropertiesParam(state, doOclTonemap));
+
+ if (isSwDecoder)
+ {
+ // INPUT sw surface(memory)
+ var outFormat = doOclTonemap ? "yuv420p10" : "yuv420p";
+ var swScaleFilter = GetSwScaleFilter(state, options, vidEncoder, inW, inH, threeDFormat, reqW, reqH, reqMaxW, reqMaxH);
+ // sw scale
+ mainFilters.AddRange(swScaleFilter);
+ mainFilters.Add("format=" + outFormat);
+ // sw deint
+ if (doDeintH2645)
+ {
+ var swDeintFilter = GetSwDeinterlaceFilter(state, options);
+ mainFilters.Add(swDeintFilter);
}
- // Assert 8-bit hardware decodable
- else if (!isColorDepth10
- && (isLibX264Encoder
- || isLibX265Encoder
- || hasTextSubs
- || (hasGraphicalSubs && !isCudaOverlaySupported && isNvencEncoder)))
+ // keep video at memory except ocl tonemap,
+ // since the overhead caused by hwupload >>> using sw filter.
+ // sw => hw
+ if (doOclTonemap)
+ {
+ mainFilters.Add("hwupload");
+ }
+ }
+ else if (isD3d11vaDecoder || isQsvDecoder)
+ {
+ var outFormat = doOclTonemap ? string.Empty : "nv12";
+ var hwScaleFilter = GetHwScaleFilter("qsv", outFormat, inW, inH, reqW, reqH, reqMaxW, reqMaxH);
+
+ if (isD3d11vaDecoder)
{
- if (isNvencEncoder)
+ if (!string.IsNullOrEmpty(hwScaleFilter) || doDeintH2645)
{
- isHwuploadCudaRequired = true;
+ // INPUT d3d11 surface(vram)
+ // map from d3d11va to qsv.
+ mainFilters.Add("hwmap=derive_device=qsv");
}
+ }
- filters.Add("hwdownload");
- filters.Add("format=nv12");
+ // hw scale
+ mainFilters.Add(hwScaleFilter);
+
+ // hw deint
+ if (doDeintH2645)
+ {
+ var deintFilter = GetHwDeinterlaceFilter(state, options, "qsv");
+ mainFilters.Add(deintFilter);
}
}
- // Add parameters to use VAAPI with burn-in text subtitles (GH issue #642)
- if (isVaapiH264Encoder
- || isVaapiHevcEncoder
- || (isTonemappingSupportedOnQsv && isVppTonemappingSupported))
+ if (doOclTonemap && isHwDecoder)
+ {
+ // map from qsv to opencl via qsv(d3d11)-opencl interop.
+ mainFilters.Add("hwmap=derive_device=opencl");
+ }
+
+ // hw tonemap
+ if (doOclTonemap)
+ {
+ var tonemapFilter = GetHwTonemapFilter(options, "opencl", "nv12");
+ mainFilters.Add(tonemapFilter);
+ }
+
+ var memoryOutput = false;
+ var isUploadForOclTonemap = isSwDecoder && doOclTonemap;
+ var isHwmapUsable = isSwEncoder && doOclTonemap;
+ if ((isHwDecoder && isSwEncoder) || isUploadForOclTonemap)
+ {
+ memoryOutput = true;
+
+ // OUTPUT nv12 surface(memory)
+ // prefer hwmap to hwdownload on opencl.
+ // qsv hwmap is not fully implemented for the time being.
+ mainFilters.Add(isHwmapUsable ? "hwmap" : "hwdownload");
+ mainFilters.Add("format=nv12");
+ }
+
+ // OUTPUT nv12 surface(memory)
+ if (isSwDecoder && isQsvEncoder)
+ {
+ memoryOutput = true;
+ }
+
+ if (memoryOutput)
{
+ // text subtitles
if (hasTextSubs)
{
- // Convert hw context from ocl to va.
- // For tonemapping and text subs burn-in.
- if (isTonemappingSupportedOnVaapi && isOpenclTonemappingSupported && !isVppTonemappingSupported)
+ var textSubtitlesFilter = GetTextSubtitlesFilter(state, false, false);
+ mainFilters.Add(textSubtitlesFilter);
+ }
+ }
+
+ if (isQsvInQsvOut && doOclTonemap)
+ {
+ // OUTPUT qsv(nv12) surface(vram)
+ // reverse-mapping via qsv(d3d11)-opencl interop.
+ mainFilters.Add("hwmap=derive_device=qsv:reverse=1:extra_hw_frames=16");
+ mainFilters.Add("format=qsv");
+ }
+
+ /* Make sub and overlay filters for subtitle stream */
+ var subFilters = new List();
+ var overlayFilters = new List();
+ if (isQsvInQsvOut)
+ {
+ if (hasSubs)
+ {
+ if (hasGraphicalSubs)
+ {
+ // scale,format=bgra,hwupload
+ // overlay_qsv can handle overlay scaling,
+ // add a dummy scale filter to pair with -canvas_size.
+ subFilters.Add("scale=flags=fast_bilinear");
+ subFilters.Add("format=bgra");
+ }
+ else if (hasTextSubs)
{
- filters.Add("scale_vaapi");
+ // alphasrc=s=1280x720:r=10:start=0,format=bgra,subtitles,hwupload
+ var alphaSrcFilter = GetAlphaSrcFilter(state, inW, inH, reqW, reqH, reqMaxW, 1080, hasAssSubs ? 10 : 5);
+ var subTextSubtitlesFilter = GetTextSubtitlesFilter(state, true, true);
+ subFilters.Add(alphaSrcFilter);
+ subFilters.Add("format=bgra");
+ subFilters.Add(subTextSubtitlesFilter);
}
- // Test passed on Intel and AMD gfx
- filters.Add("hwmap=mode=read+write");
- filters.Add("format=nv12");
+ // qsv requires a fixed pool size.
+ subFilters.Add("hwupload=extra_hw_frames=32");
+
+ var (overlayW, overlayH) = GetFixedOutputSize(inW, inH, reqW, reqH, reqMaxW, reqMaxH);
+ var overlaySize = (overlayW.HasValue && overlayH.HasValue)
+ ? (":w=" + overlayW.Value + ":h=" + overlayH.Value)
+ : string.Empty;
+ var overlayQsvFilter = string.Format(
+ CultureInfo.InvariantCulture,
+ "overlay_qsv=eof_action=endall:shortest=1:repeatlast=0{0}",
+ overlaySize);
+ overlayFilters.Add(overlayQsvFilter);
}
}
-
- if (hasTextSubs)
+ else if (memoryOutput)
{
- var subParam = GetTextSubtitleParam(state);
+ if (hasGraphicalSubs)
+ {
+ var subSwScaleFilter = GetCustomSwScaleFilter(inW, inH, reqW, reqH, reqMaxW, reqMaxH);
+ subFilters.Add(subSwScaleFilter);
+ overlayFilters.Add("overlay=eof_action=endall:shortest=1:repeatlast=0");
+ }
+ }
+
+ return new Tuple, List, List>(mainFilters, subFilters, overlayFilters);
+ }
+
+ public Tuple, List, List> GetIntelQsvVaapiVidFiltersPrefered(
+ EncodingJobInfo state,
+ EncodingOptions options,
+ string vidDecoder,
+ string vidEncoder)
+ {
+ var inW = state.VideoStream?.Width;
+ var inH = state.VideoStream?.Height;
+ var reqW = state.BaseRequest.Width;
+ var reqH = state.BaseRequest.Height;
+ var reqMaxW = state.BaseRequest.MaxWidth;
+ var reqMaxH = state.BaseRequest.MaxHeight;
+ var threeDFormat = state.MediaSource.Video3DFormat;
+
+ var isVaapiDecoder = vidDecoder.Contains("vaapi", StringComparison.OrdinalIgnoreCase);
+ var isQsvDecoder = vidDecoder.Contains("qsv", StringComparison.OrdinalIgnoreCase);
+ var isQsvEncoder = vidEncoder.Contains("qsv", StringComparison.OrdinalIgnoreCase);
+ var isHwDecoder = isVaapiDecoder || isQsvDecoder;
+ var isSwDecoder = string.IsNullOrEmpty(vidDecoder);
+ var isSwEncoder = !isQsvEncoder;
+ var isQsvInQsvOut = isHwDecoder && isQsvEncoder;
+
+ var doDeintH264 = state.DeInterlace("h264", true) || state.DeInterlace("avc", true);
+ var doDeintHevc = state.DeInterlace("h265", true) || state.DeInterlace("hevc", true);
+ var doVaVppTonemap = IsVaapiVppTonemapAvailable(state, options);
+ var doOclTonemap = !doVaVppTonemap && IsHwTonemapAvailable(state, options);
+ var doTonemap = doVaVppTonemap || doOclTonemap;
+ var doDeintH2645 = doDeintH264 || doDeintHevc;
+
+ var hasSubs = state.SubtitleStream != null && state.SubtitleDeliveryMethod == SubtitleDeliveryMethod.Encode;
+ var hasTextSubs = hasSubs && state.SubtitleStream.IsTextSubtitleStream;
+ var hasGraphicalSubs = hasSubs && !state.SubtitleStream.IsTextSubtitleStream;
+ var hasAssSubs = hasSubs
+ && (string.Equals(state.SubtitleStream.Codec, "ass", StringComparison.OrdinalIgnoreCase)
+ || string.Equals(state.SubtitleStream.Codec, "ssa", StringComparison.OrdinalIgnoreCase));
+
+ /* Make main filters for video stream */
+ var mainFilters = new List();
- filters.Add(subParam);
+ mainFilters.Add(GetOverwriteColorPropertiesParam(state, doTonemap));
- // Ensure proper filters are passed to ffmpeg in case of hardware acceleration via VA-API
- // Reference: https://trac.ffmpeg.org/wiki/Hardware/VAAPI
- if (isVaapiH264Encoder || isVaapiHevcEncoder)
+ if (isSwDecoder)
+ {
+ // INPUT sw surface(memory)
+ var outFormat = doOclTonemap ? "yuv420p10" : "yuv420p";
+ var swScaleFilter = GetSwScaleFilter(state, options, vidEncoder, inW, inH, threeDFormat, reqW, reqH, reqMaxW, reqMaxH);
+ // sw scale
+ mainFilters.AddRange(swScaleFilter);
+ mainFilters.Add("format=" + outFormat);
+ // sw deint
+ if (doDeintH2645)
{
- filters.Add("hwmap");
+ var swDeintFilter = GetSwDeinterlaceFilter(state, options);
+ mainFilters.Add(swDeintFilter);
}
- if (isTonemappingSupportedOnQsv && isVppTonemappingSupported)
- {
- filters.Add("hwmap,format=vaapi");
- }
+ // keep video at memory except ocl tonemap,
+ // since the overhead caused by hwupload >>> using sw filter.
+ // sw => hw
+ if (doOclTonemap)
+ {
+ mainFilters.Add("hwupload");
+ }
+ }
+ else if (isVaapiDecoder || isQsvDecoder)
+ {
+ // INPUT vaapi/qsv surface(vram)
+ var outFormat = doTonemap ? string.Empty : "nv12";
+ var hwScaleFilter = GetHwScaleFilter(isVaapiDecoder ? "vaapi" : "qsv", outFormat, inW, inH, reqW, reqH, reqMaxW, reqMaxH);
+ // hw scale
+ mainFilters.Add(hwScaleFilter);
+ }
+
+ // hw deint
+ if (doDeintH2645 && isHwDecoder)
+ {
+ var deintFilter = string.Empty;
+ if (isVaapiDecoder)
+ {
+ deintFilter = GetHwDeinterlaceFilter(state, options, "vaapi");
+ }
+ else if (isQsvDecoder)
+ {
+ deintFilter = GetHwDeinterlaceFilter(state, options, "qsv");
+ }
+
+ mainFilters.Add(deintFilter);
+ }
+
+ // vaapi vpp tonemap
+ if (doVaVppTonemap && isHwDecoder)
+ {
+ if (isQsvDecoder)
+ {
+ // map from qsv to vaapi.
+ mainFilters.Add("hwmap=derive_device=vaapi");
+ }
+
+ var tonemapFilter = GetHwTonemapFilter(options, "vaapi", "nv12");
+ mainFilters.Add(tonemapFilter);
+
+ if (isQsvDecoder)
+ {
+ // map from vaapi to qsv.
+ mainFilters.Add("hwmap=derive_device=qsv");
+ }
+ }
+
+ if (doOclTonemap && isHwDecoder)
+ {
+ // map from qsv to opencl via qsv(vaapi)-opencl interop.
+ mainFilters.Add("hwmap=derive_device=opencl");
+ }
+
+ // ocl tonemap
+ if (doOclTonemap)
+ {
+ var tonemapFilter = GetHwTonemapFilter(options, "opencl", "nv12");
+ mainFilters.Add(tonemapFilter);
+ }
+
+ var memoryOutput = false;
+ var isUploadForOclTonemap = isSwDecoder && doOclTonemap;
+ var isHwmapUsable = isSwEncoder && (doOclTonemap || isVaapiDecoder);
+ if ((isHwDecoder && isSwEncoder) || isUploadForOclTonemap)
+ {
+ memoryOutput = true;
+
+ // OUTPUT nv12 surface(memory)
+ // prefer hwmap to hwdownload on opencl/vaapi.
+ // qsv hwmap is not fully implemented for the time being.
+ mainFilters.Add(isHwmapUsable ? "hwmap" : "hwdownload");
+ mainFilters.Add("format=nv12");
+ }
+
+ // OUTPUT nv12 surface(memory)
+ if (isSwDecoder && isQsvEncoder)
+ {
+ memoryOutput = true;
+ }
+
+ if (memoryOutput)
+ {
+ // text subtitles
+ if (hasTextSubs)
+ {
+ var textSubtitlesFilter = GetTextSubtitlesFilter(state, false, false);
+ mainFilters.Add(textSubtitlesFilter);
+ }
+ }
+
+ if (isQsvInQsvOut)
+ {
+ if (doOclTonemap)
+ {
+ // OUTPUT qsv(nv12) surface(vram)
+ // reverse-mapping via qsv(vaapi)-opencl interop.
+ mainFilters.Add("hwmap=derive_device=qsv:reverse=1:extra_hw_frames=16");
+ mainFilters.Add("format=qsv");
+ }
+ else if (isVaapiDecoder)
+ {
+ mainFilters.Add("hwmap=derive_device=qsv");
+ mainFilters.Add("format=qsv");
+ }
+ }
+
+ /* Make sub and overlay filters for subtitle stream */
+ var subFilters = new List();
+ var overlayFilters = new List();
+ if (isQsvInQsvOut)
+ {
+ if (hasSubs)
+ {
+ if (hasGraphicalSubs)
+ {
+ subFilters.Add("scale=flags=fast_bilinear");
+ subFilters.Add("format=bgra");
+ }
+ else if (hasTextSubs)
+ {
+ var alphaSrcFilter = GetAlphaSrcFilter(state, inW, inH, reqW, reqH, reqMaxW, 1080, hasAssSubs ? 10 : 5);
+ var subTextSubtitlesFilter = GetTextSubtitlesFilter(state, true, true);
+ subFilters.Add(alphaSrcFilter);
+ subFilters.Add("format=bgra");
+ subFilters.Add(subTextSubtitlesFilter);
+ }
+
+ // qsv requires a fixed pool size.
+ subFilters.Add("hwupload=extra_hw_frames=32");
+
+ var (overlayW, overlayH) = GetFixedOutputSize(inW, inH, reqW, reqH, reqMaxW, reqMaxH);
+ var overlaySize = (overlayW.HasValue && overlayH.HasValue)
+ ? (":w=" + overlayW.Value + ":h=" + overlayH.Value)
+ : string.Empty;
+ var overlayQsvFilter = string.Format(
+ CultureInfo.InvariantCulture,
+ "overlay_qsv=eof_action=endall:shortest=1:repeatlast=0{0}",
+ overlaySize);
+ overlayFilters.Add(overlayQsvFilter);
+ }
+ }
+ else if (memoryOutput)
+ {
+ if (hasGraphicalSubs)
+ {
+ var subSwScaleFilter = GetCustomSwScaleFilter(inW, inH, reqW, reqH, reqMaxW, reqMaxH);
+ subFilters.Add(subSwScaleFilter);
+ overlayFilters.Add("overlay=eof_action=pass:shortest=1:repeatlast=0");
+ }
+ }
+
+ return new Tuple, List, List>(mainFilters, subFilters, overlayFilters);
+ }
+
+ ///
+ /// Gets the parameter of Intel/AMD VAAPI filter chain.
+ ///
+ /// Encoding state.
+ /// Encoding options.
+ /// Video encoder to use.
+ /// The tuple contains three lists: main, sub and overlay filters
+ public Tuple, List, List> GetVaapiVidFilterChain(
+ EncodingJobInfo state,
+ EncodingOptions options,
+ string vidEncoder)
+ {
+ if (!string.Equals(options.HardwareAccelerationType, "vaapi", StringComparison.OrdinalIgnoreCase))
+ {
+ return new Tuple, List, List>(null, null, null);
+ }
+
+ var isLinux = OperatingSystem.IsLinux();
+ var vidDecoder = GetHardwareVideoDecoder(state, options) ?? string.Empty;
+ var isSwDecoder = string.IsNullOrEmpty(vidDecoder);
+ var isSwEncoder = !vidEncoder.Contains("vaapi", StringComparison.OrdinalIgnoreCase);
+ var isVaapiOclSupported = isLinux && IsVaapiSupported(state) && IsVaapiFullSupported() && IsOpenclFullSupported();
+
+ // legacy vaapi pipeline(copy-back)
+ if ((isSwDecoder && isSwEncoder)
+ || !isVaapiOclSupported
+ || !_mediaEncoder.SupportsFilter("alphasrc"))
+ {
+ var swFilterChain = GetSwVidFilterChain(state, options, vidEncoder);
+
+ if (!isSwEncoder)
+ {
+ var newfilters = new List();
+ var noOverlay = swFilterChain.Item3.Count == 0;
+ newfilters.AddRange(noOverlay ? swFilterChain.Item1 : swFilterChain.Item3);
+ newfilters.Add("hwupload");
+
+ var mainFilters = noOverlay ? newfilters : swFilterChain.Item1;
+ var overlayFilters = noOverlay ? swFilterChain.Item3 : newfilters;
+ return new Tuple, List, List>(mainFilters, swFilterChain.Item2, overlayFilters);
+ }
+
+ return swFilterChain;
+ }
+
+ // prefered vaapi + opencl filters pipeline
+ if (_mediaEncoder.IsVaapiDeviceInteliHD())
+ {
+ // Intel iHD path, with extra vpp tonemap and overlay support.
+ return GetVaapiFullVidFiltersPrefered(state, options, vidDecoder, vidEncoder);
+ }
+
+ // Intel i965 and Amd radeonsi/r600 path, only featuring scale and deinterlace support.
+ return GetVaapiLimitedVidFiltersPrefered(state, options, vidDecoder, vidEncoder);
+ }
+
+ public Tuple, List, List> GetVaapiFullVidFiltersPrefered(
+ EncodingJobInfo state,
+ EncodingOptions options,
+ string vidDecoder,
+ string vidEncoder)
+ {
+ var inW = state.VideoStream?.Width;
+ var inH = state.VideoStream?.Height;
+ var reqW = state.BaseRequest.Width;
+ var reqH = state.BaseRequest.Height;
+ var reqMaxW = state.BaseRequest.MaxWidth;
+ var reqMaxH = state.BaseRequest.MaxHeight;
+ var threeDFormat = state.MediaSource.Video3DFormat;
+
+ var isVaapiDecoder = vidDecoder.Contains("vaapi", StringComparison.OrdinalIgnoreCase);
+ var isVaapiEncoder = vidEncoder.Contains("vaapi", StringComparison.OrdinalIgnoreCase);
+ var isSwDecoder = string.IsNullOrEmpty(vidDecoder);
+ var isSwEncoder = !isVaapiEncoder;
+ var isVaInVaOut = isVaapiDecoder && isVaapiEncoder;
+
+ var doDeintH264 = state.DeInterlace("h264", true) || state.DeInterlace("avc", true);
+ var doDeintHevc = state.DeInterlace("h265", true) || state.DeInterlace("hevc", true);
+ var doVaVppTonemap = isVaapiDecoder && IsVaapiVppTonemapAvailable(state, options);
+ var doOclTonemap = !doVaVppTonemap && IsHwTonemapAvailable(state, options);
+ var doTonemap = doVaVppTonemap || doOclTonemap;
+ var doDeintH2645 = doDeintH264 || doDeintHevc;
+
+ var hasSubs = state.SubtitleStream != null && state.SubtitleDeliveryMethod == SubtitleDeliveryMethod.Encode;
+ var hasTextSubs = hasSubs && state.SubtitleStream.IsTextSubtitleStream;
+ var hasGraphicalSubs = hasSubs && !state.SubtitleStream.IsTextSubtitleStream;
+ var hasAssSubs = hasSubs
+ && (string.Equals(state.SubtitleStream.Codec, "ass", StringComparison.OrdinalIgnoreCase)
+ || string.Equals(state.SubtitleStream.Codec, "ssa", StringComparison.OrdinalIgnoreCase));
+
+ /* Make main filters for video stream */
+ var mainFilters = new List();
+
+ mainFilters.Add(GetOverwriteColorPropertiesParam(state, doTonemap));
+
+ if (isSwDecoder)
+ {
+ // INPUT sw surface(memory)
+ var outFormat = doOclTonemap ? "yuv420p10" : "nv12";
+ var swScaleFilter = GetSwScaleFilter(state, options, vidEncoder, inW, inH, threeDFormat, reqW, reqH, reqMaxW, reqMaxH);
+ // sw scale
+ mainFilters.AddRange(swScaleFilter);
+ mainFilters.Add("format=" + outFormat);
+ // sw deint
+ if (doDeintH2645)
+ {
+ var swDeintFilter = GetSwDeinterlaceFilter(state, options);
+ mainFilters.Add(swDeintFilter);
+ }
+
+ // keep video at memory except ocl tonemap,
+ // since the overhead caused by hwupload >>> using sw filter.
+ // sw => hw
+ if (doOclTonemap)
+ {
+ mainFilters.Add("hwupload");
+ }
+ }
+ else if (isVaapiDecoder)
+ {
+ // INPUT vaapi surface(vram)
+ var outFormat = doTonemap ? string.Empty : "nv12";
+ var hwScaleFilter = GetHwScaleFilter("vaapi", outFormat, inW, inH, reqW, reqH, reqMaxW, reqMaxH);
+ // hw scale
+ mainFilters.Add(hwScaleFilter);
+ }
+
+ // hw deint
+ if (doDeintH2645 && isVaapiDecoder)
+ {
+ var deintFilter = GetHwDeinterlaceFilter(state, options, "vaapi");
+ mainFilters.Add(deintFilter);
+ }
+
+ // vaapi vpp tonemap
+ if (doVaVppTonemap && isVaapiDecoder)
+ {
+ var tonemapFilter = GetHwTonemapFilter(options, "vaapi", "nv12");
+ mainFilters.Add(tonemapFilter);
+ }
+
+ if (doOclTonemap && isVaapiDecoder)
+ {
+ // map from vaapi to opencl via vaapi-opencl interop(Intel only).
+ mainFilters.Add("hwmap=derive_device=opencl");
+ }
+
+ // ocl tonemap
+ if (doOclTonemap)
+ {
+ var tonemapFilter = GetHwTonemapFilter(options, "opencl", "nv12");
+ mainFilters.Add(tonemapFilter);
+ }
+
+ if (doOclTonemap && isVaInVaOut)
+ {
+ // OUTPUT vaapi(nv12) surface(vram)
+ // reverse-mapping via vaapi-opencl interop.
+ mainFilters.Add("hwmap=derive_device=vaapi:reverse=1");
+ mainFilters.Add("format=vaapi");
+ }
+
+ var memoryOutput = false;
+ var isUploadForOclTonemap = isSwDecoder && doOclTonemap;
+ var isHwmapNotUsable = isUploadForOclTonemap && isVaapiEncoder;
+ if ((isVaapiDecoder && isSwEncoder) || isUploadForOclTonemap)
+ {
+ memoryOutput = true;
+
+ // OUTPUT nv12 surface(memory)
+ // prefer hwmap to hwdownload on opencl/vaapi.
+ mainFilters.Add(isHwmapNotUsable ? "hwdownload" : "hwmap");
+ mainFilters.Add("format=nv12");
+ }
+
+ // OUTPUT nv12 surface(memory)
+ if (isSwDecoder && isVaapiEncoder)
+ {
+ memoryOutput = true;
+ }
+
+ if (memoryOutput)
+ {
+ // text subtitles
+ if (hasTextSubs)
+ {
+ var textSubtitlesFilter = GetTextSubtitlesFilter(state, false, false);
+ mainFilters.Add(textSubtitlesFilter);
+ }
+ }
+
+ if (memoryOutput && isVaapiEncoder)
+ {
+ if (!hasGraphicalSubs)
+ {
+ mainFilters.Add("hwupload_vaapi");
+ }
+ }
+
+ /* Make sub and overlay filters for subtitle stream */
+ var subFilters = new List();
+ var overlayFilters = new List();
+ if (isVaInVaOut)
+ {
+ if (hasSubs)
+ {
+ if (hasGraphicalSubs)
+ {
+ subFilters.Add("scale=flags=fast_bilinear");
+ subFilters.Add("format=bgra");
+ }
+ else if (hasTextSubs)
+ {
+ var alphaSrcFilter = GetAlphaSrcFilter(state, inW, inH, reqW, reqH, reqMaxW, 1080, hasAssSubs ? 10 : 5);
+ var subTextSubtitlesFilter = GetTextSubtitlesFilter(state, true, true);
+ subFilters.Add(alphaSrcFilter);
+ subFilters.Add("format=bgra");
+ subFilters.Add(subTextSubtitlesFilter);
+ }
+
+ subFilters.Add("hwupload");
+
+ var (overlayW, overlayH) = GetFixedOutputSize(inW, inH, reqW, reqH, reqMaxW, reqMaxH);
+ var overlaySize = (overlayW.HasValue && overlayH.HasValue)
+ ? (":w=" + overlayW.Value + ":h=" + overlayH.Value)
+ : string.Empty;
+ var overlayVaapiFilter = string.Format(
+ CultureInfo.InvariantCulture,
+ "overlay_vaapi=eof_action=endall:shortest=1:repeatlast=0{0}",
+ overlaySize);
+ overlayFilters.Add(overlayVaapiFilter);
+ }
+ }
+ else if (memoryOutput)
+ {
+ if (hasGraphicalSubs)
+ {
+ var subSwScaleFilter = GetCustomSwScaleFilter(inW, inH, reqW, reqH, reqMaxW, reqMaxH);
+ subFilters.Add(subSwScaleFilter);
+ overlayFilters.Add("overlay=eof_action=pass:shortest=1:repeatlast=0");
+
+ if (isVaapiEncoder)
+ {
+ overlayFilters.Add("hwupload_vaapi");
+ }
+ }
+ }
+
+ return new Tuple, List, List