diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index c6d3529..94a858f 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -10,7 +10,7 @@ on: - FFMpegCore.Test/** pull_request: branches: - - master + - main - release paths: - .github/workflows/ci.yml @@ -45,7 +45,8 @@ jobs: - name: Test with dotnet run: dotnet test FFMpegCore.sln --collect "XPlat Code Coverage" --logger GitHubActions - - name: Upload coverage reports to Codecov + - if: matrix.os == 'windows-latest' + name: Upload coverage reports to Codecov uses: codecov/codecov-action@v3 with: directory: FFMpegCore.Test/TestResults diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 0c7725b..00a1ea7 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -16,8 +16,8 @@ jobs: dotnet-version: '7.0.x' - name: Build solution - run: dotnet pack FFMpegCore.sln --output build -c Release + run: dotnet pack FFMpegCore.sln -c Release - name: Publish NuGet package - run: dotnet nuget push build/*.nupkg --source nuget.org --api-key ${{ secrets.NUGET_TOKEN }} + run: dotnet nuget push nupkg/*.nupkg --skip-duplicate --source nuget.org --api-key ${{ secrets.NUGET_TOKEN }} diff --git a/FFMpegCore.Examples/FFMpegCore.Examples.csproj b/FFMpegCore.Examples/FFMpegCore.Examples.csproj index db3c66e..f4a2b9b 100644 --- a/FFMpegCore.Examples/FFMpegCore.Examples.csproj +++ b/FFMpegCore.Examples/FFMpegCore.Examples.csproj @@ -1,14 +1,15 @@ - - Exe - net6.0 - false - + + Exe + net6.0 + false + - - - - + + + + + diff --git a/FFMpegCore.Examples/Program.cs b/FFMpegCore.Examples/Program.cs index ac4bce5..d7abde4 100644 --- a/FFMpegCore.Examples/Program.cs +++ b/FFMpegCore.Examples/Program.cs @@ -1,8 +1,11 @@ using System.Drawing; using FFMpegCore; using FFMpegCore.Enums; +using FFMpegCore.Extensions.SkiaSharp; using FFMpegCore.Extensions.System.Drawing.Common; using FFMpegCore.Pipes; +using SkiaSharp; +using FFMpegImage = FFMpegCore.Extensions.System.Drawing.Common.FFMpegImage; var inputPath = "/path/to/input"; var outputPath = "/path/to/output"; @@ -77,11 +80,14 @@ await FFMpegArguments var inputImagePath = "/path/to/input/image"; { FFMpeg.PosterWithAudio(inputPath, inputAudioPath, outputPath); - // or + // or using FFMpegCore.Extensions.System.Drawing.Common #pragma warning disable CA1416 using var image = Image.FromFile(inputImagePath); image.AddAudio(inputAudioPath, outputPath); #pragma warning restore CA1416 + // or using FFMpegCore.Extensions.SkiaSharp + using var skiaSharpImage = SKBitmap.Decode(inputImagePath); + skiaSharpImage.AddAudio(inputAudioPath, outputPath); } IVideoFrame GetNextFrame() => throw new NotImplementedException(); diff --git a/FFMpegCore.Extensions.SkiaSharp/BitmapExtensions.cs b/FFMpegCore.Extensions.SkiaSharp/BitmapExtensions.cs new file mode 100644 index 0000000..34e303a --- /dev/null +++ b/FFMpegCore.Extensions.SkiaSharp/BitmapExtensions.cs @@ -0,0 +1,28 @@ +using SkiaSharp; + +namespace FFMpegCore.Extensions.SkiaSharp +{ + public static class BitmapExtensions + { + public static bool AddAudio(this SKBitmap poster, string audio, string output) + { + var destination = $"{Environment.TickCount}.png"; + using (var fileStream = File.OpenWrite(destination)) + { + poster.Encode(fileStream, SKEncodedImageFormat.Png, default); // PNG does not respect the quality parameter + } + + try + { + return FFMpeg.PosterWithAudio(destination, audio, output); + } + finally + { + if (File.Exists(destination)) + { + File.Delete(destination); + } + } + } + } +} diff --git a/FFMpegCore.Extensions.SkiaSharp/BitmapVideoFrameWrapper.cs b/FFMpegCore.Extensions.SkiaSharp/BitmapVideoFrameWrapper.cs new file mode 100644 index 0000000..7bb98fb --- /dev/null +++ b/FFMpegCore.Extensions.SkiaSharp/BitmapVideoFrameWrapper.cs @@ -0,0 +1,59 @@ +using FFMpegCore.Pipes; +using SkiaSharp; + +namespace FFMpegCore.Extensions.SkiaSharp +{ + public class BitmapVideoFrameWrapper : IVideoFrame, IDisposable + { + public int Width => Source.Width; + + public int Height => Source.Height; + + public string Format { get; private set; } + + public SKBitmap Source { get; private set; } + + public BitmapVideoFrameWrapper(SKBitmap bitmap) + { + Source = bitmap ?? throw new ArgumentNullException(nameof(bitmap)); + Format = ConvertStreamFormat(bitmap.ColorType); + } + + public void Serialize(Stream stream) + { + var data = Source.Bytes; + stream.Write(data, 0, data.Length); + } + + public async Task SerializeAsync(Stream stream, CancellationToken token) + { + var data = Source.Bytes; + await stream.WriteAsync(data, 0, data.Length, token).ConfigureAwait(false); + } + + public void Dispose() + { + Source.Dispose(); + } + + private static string ConvertStreamFormat(SKColorType fmt) + { + // TODO: Add support for additional formats + switch (fmt) + { + case SKColorType.Gray8: + return "gray8"; + case SKColorType.Bgra8888: + return "bgra"; + case SKColorType.Rgb888x: + return "rgb"; + case SKColorType.Rgba8888: + return "rgba"; + case SKColorType.Rgb565: + return "rgb565"; + default: + throw new NotSupportedException($"Not supported pixel format {fmt}"); + } + } + } +} diff --git a/FFMpegCore.Extensions.SkiaSharp/FFMpegCore.Extensions.SkiaSharp.csproj b/FFMpegCore.Extensions.SkiaSharp/FFMpegCore.Extensions.SkiaSharp.csproj new file mode 100644 index 0000000..d15a7bd --- /dev/null +++ b/FFMpegCore.Extensions.SkiaSharp/FFMpegCore.Extensions.SkiaSharp.csproj @@ -0,0 +1,23 @@ + + + + true + Image extension for FFMpegCore using SkiaSharp + 5.0.0 + ../nupkg + + + ffmpeg ffprobe convert video audio mediafile resize analyze muxing skiasharp + Malte Rosenbjerg, Vlad Jerca, Max Bagryantsev, Dimitri Vranken + + + + + + + + + + + + diff --git a/FFMpegCore.Extensions.SkiaSharp/FFMpegImage.cs b/FFMpegCore.Extensions.SkiaSharp/FFMpegImage.cs new file mode 100644 index 0000000..69929d3 --- /dev/null +++ b/FFMpegCore.Extensions.SkiaSharp/FFMpegImage.cs @@ -0,0 +1,57 @@ +using System.Drawing; +using FFMpegCore.Pipes; +using SkiaSharp; + +namespace FFMpegCore.Extensions.SkiaSharp +{ + public static class FFMpegImage + { + /// + /// Saves a 'png' thumbnail to an in-memory bitmap + /// + /// Source video file. + /// Seek position where the thumbnail should be taken. + /// Thumbnail size. If width or height equal 0, the other will be computed automatically. + /// Selected video stream index. + /// Input file index + /// Bitmap with the requested snapshot. + public static SKBitmap Snapshot(string input, Size? size = null, TimeSpan? captureTime = null, int? streamIndex = null, int inputFileIndex = 0) + { + var source = FFProbe.Analyse(input); + var (arguments, outputOptions) = SnapshotArgumentBuilder.BuildSnapshotArguments(input, source, size, captureTime, streamIndex, inputFileIndex); + using var ms = new MemoryStream(); + + arguments + .OutputToPipe(new StreamPipeSink(ms), options => outputOptions(options + .ForceFormat("rawvideo"))) + .ProcessSynchronously(); + + ms.Position = 0; + using var bitmap = SKBitmap.Decode(ms); + return bitmap.Copy(); + } + /// + /// Saves a 'png' thumbnail to an in-memory bitmap + /// + /// Source video file. + /// Seek position where the thumbnail should be taken. + /// Thumbnail size. If width or height equal 0, the other will be computed automatically. + /// Selected video stream index. + /// Input file index + /// Bitmap with the requested snapshot. + public static async Task SnapshotAsync(string input, Size? size = null, TimeSpan? captureTime = null, int? streamIndex = null, int inputFileIndex = 0) + { + var source = await FFProbe.AnalyseAsync(input).ConfigureAwait(false); + var (arguments, outputOptions) = SnapshotArgumentBuilder.BuildSnapshotArguments(input, source, size, captureTime, streamIndex, inputFileIndex); + using var ms = new MemoryStream(); + + await arguments + .OutputToPipe(new StreamPipeSink(ms), options => outputOptions(options + .ForceFormat("rawvideo"))) + .ProcessAsynchronously(); + + ms.Position = 0; + return SKBitmap.Decode(ms); + } + } +} diff --git a/FFMpegCore.Extensions.System.Drawing.Common/FFMpegCore.Extensions.System.Drawing.Common.csproj b/FFMpegCore.Extensions.System.Drawing.Common/FFMpegCore.Extensions.System.Drawing.Common.csproj index aafb577..13cdc1a 100644 --- a/FFMpegCore.Extensions.System.Drawing.Common/FFMpegCore.Extensions.System.Drawing.Common.csproj +++ b/FFMpegCore.Extensions.System.Drawing.Common/FFMpegCore.Extensions.System.Drawing.Common.csproj @@ -1,21 +1,22 @@ - - true - Image extension for FFMpegCore using System.Common.Drawing - 5.0.0 - - - ffmpeg ffprobe convert video audio mediafile resize analyze muxing - Malte Rosenbjerg, Vlad Jerca, Max Bagryantsev - + + true + Image extension for FFMpegCore using System.Common.Drawing + 5.0.0 + ../nupkg + + + ffmpeg ffprobe convert video audio mediafile resize analyze muxing + Malte Rosenbjerg, Vlad Jerca, Max Bagryantsev + - - - + + + - - - + + + diff --git a/FFMpegCore.Extensions.System.Drawing.Common/FFMpegImage.cs b/FFMpegCore.Extensions.System.Drawing.Common/FFMpegImage.cs index f36f83d..c946507 100644 --- a/FFMpegCore.Extensions.System.Drawing.Common/FFMpegImage.cs +++ b/FFMpegCore.Extensions.System.Drawing.Common/FFMpegImage.cs @@ -29,6 +29,7 @@ public static Bitmap Snapshot(string input, Size? size = null, TimeSpan? capture using var bitmap = new Bitmap(ms); return bitmap.Clone(new Rectangle(0, 0, bitmap.Width, bitmap.Height), bitmap.PixelFormat); } + /// /// Saves a 'png' thumbnail to an in-memory bitmap /// diff --git a/FFMpegCore.Test/ArgumentBuilderTest.cs b/FFMpegCore.Test/ArgumentBuilderTest.cs index f676a44..30adabd 100644 --- a/FFMpegCore.Test/ArgumentBuilderTest.cs +++ b/FFMpegCore.Test/ArgumentBuilderTest.cs @@ -1,4 +1,5 @@ -using FFMpegCore.Arguments; +using System.Drawing; +using FFMpegCore.Arguments; using FFMpegCore.Enums; using Microsoft.VisualStudio.TestTools.UnitTesting; @@ -258,6 +259,13 @@ public void Builder_BuildString_Seek() Assert.AreEqual("-ss 00:00:10.000 -i \"input.mp4\" -ss 00:00:10.000 \"output.mp4\"", str); } + [TestMethod] + public void Builder_BuildString_EndSeek() + { + var str = FFMpegArguments.FromFileInput("input.mp4", false, opt => opt.EndSeek(TimeSpan.FromSeconds(10))).OutputToFile("output.mp4", false, opt => opt.EndSeek(TimeSpan.FromSeconds(10))).Arguments; + Assert.AreEqual("-to 00:00:10.000 -i \"input.mp4\" -to 00:00:10.000 \"output.mp4\"", str); + } + [TestMethod] public void Builder_BuildString_Shortest() { @@ -530,5 +538,38 @@ public void Builder_BuildString_PadFilter_Alt() "-i \"input.mp4\" -vf \"pad=aspect=4/3:x=(ow-iw)/2:y=(oh-ih)/2:color=violet:eval=frame\" \"output.mp4\"", str); } + + [TestMethod] + public void Builder_BuildString_GifPalette() + { + var streamIndex = 0; + var size = new Size(640, 480); + + var str = FFMpegArguments + .FromFileInput("input.mp4") + .OutputToFile("output.gif", false, opt => opt + .WithGifPaletteArgument(streamIndex, size)) + .Arguments; + + Assert.AreEqual($""" + -i "input.mp4" -filter_complex "[0:v] fps=12,scale=w={size.Width}:h={size.Height},split [a][b];[a] palettegen=max_colors=32 [p];[b][p] paletteuse=dither=bayer" "output.gif" + """, str); + } + + [TestMethod] + public void Builder_BuildString_GifPalette_NullSize_FpsSupplied() + { + var streamIndex = 1; + + var str = FFMpegArguments + .FromFileInput("input.mp4") + .OutputToFile("output.gif", false, opt => opt + .WithGifPaletteArgument(streamIndex, null, 10)) + .Arguments; + + Assert.AreEqual($""" + -i "input.mp4" -filter_complex "[{streamIndex}:v] fps=10,split [a][b];[a] palettegen=max_colors=32 [p];[b][p] paletteuse=dither=bayer" "output.gif" + """, str); + } } } diff --git a/FFMpegCore.Test/FFMpegCore.Test.csproj b/FFMpegCore.Test/FFMpegCore.Test.csproj index def07d2..b78af1b 100644 --- a/FFMpegCore.Test/FFMpegCore.Test.csproj +++ b/FFMpegCore.Test/FFMpegCore.Test.csproj @@ -12,17 +12,19 @@ all runtime; build; native; contentfiles; analyzers; buildtransitive - + all runtime; build; native; contentfiles; analyzers; buildtransitive - + + + diff --git a/FFMpegCore.Test/Resources/TestResources.cs b/FFMpegCore.Test/Resources/TestResources.cs index de84080..b958b80 100644 --- a/FFMpegCore.Test/Resources/TestResources.cs +++ b/FFMpegCore.Test/Resources/TestResources.cs @@ -1,15 +1,5 @@ namespace FFMpegCore.Test.Resources { - public enum AudioType - { - Mp3 - } - - public enum ImageType - { - Png - } - public static class TestResources { public static readonly string Mp4Video = "./Resources/input_3sec.mp4"; diff --git a/FFMpegCore.Test/Utilities/BitmapSources.cs b/FFMpegCore.Test/Utilities/BitmapSources.cs index b7ecb45..f3b657a 100644 --- a/FFMpegCore.Test/Utilities/BitmapSources.cs +++ b/FFMpegCore.Test/Utilities/BitmapSources.cs @@ -2,14 +2,14 @@ using System.Drawing.Imaging; using System.Numerics; using System.Runtime.Versioning; -using FFMpegCore.Extensions.System.Drawing.Common; using FFMpegCore.Pipes; +using SkiaSharp; namespace FFMpegCore.Test.Utilities { - [SupportedOSPlatform("windows")] internal static class BitmapSource { + [SupportedOSPlatform("windows")] public static IEnumerable CreateBitmaps(int count, PixelFormat fmt, int w, int h) { for (var i = 0; i < count; i++) @@ -21,10 +21,44 @@ public static IEnumerable CreateBitmaps(int count, PixelFormat fmt, } } - public static BitmapVideoFrameWrapper CreateVideoFrame(int index, PixelFormat fmt, int w, int h, float scaleNoise, float offset) + public static IEnumerable CreateBitmaps(int count, SKColorType fmt, int w, int h) + { + for (var i = 0; i < count; i++) + { + using (var frame = CreateVideoFrame(i, fmt, w, h, 0.025f, 0.025f * w * 0.03f)) + { + yield return frame; + } + } + } + + [SupportedOSPlatform("windows")] + public static Extensions.System.Drawing.Common.BitmapVideoFrameWrapper CreateVideoFrame(int index, PixelFormat fmt, int w, int h, float scaleNoise, float offset) { var bitmap = new Bitmap(w, h, fmt); + foreach (var (x, y, red, green, blue) in GenerateVideoFramePixels(index, w, h, scaleNoise, offset)) + { + var color = Color.FromArgb(red, blue, green); + bitmap.SetPixel(x, y, color); + } + + return new Extensions.System.Drawing.Common.BitmapVideoFrameWrapper(bitmap); + } + + public static Extensions.SkiaSharp.BitmapVideoFrameWrapper CreateVideoFrame(int index, SKColorType fmt, int w, int h, float scaleNoise, float offset) + { + var bitmap = new SKBitmap(w, h, fmt, SKAlphaType.Opaque); + + bitmap.Pixels = GenerateVideoFramePixels(index, w, h, scaleNoise, offset) + .Select(args => new SKColor(args.red, args.blue, args.green)) + .ToArray(); + + return new Extensions.SkiaSharp.BitmapVideoFrameWrapper(bitmap); + } + + private static IEnumerable<(int x, int y, byte red, byte green, byte blue)> GenerateVideoFramePixels(int index, int w, int h, float scaleNoise, float offset) + { offset = offset * index; for (var y = 0; y < h; y++) @@ -36,15 +70,11 @@ public static BitmapVideoFrameWrapper CreateVideoFrame(int index, PixelFormat fm var nx = x * scaleNoise + offset; var ny = y * scaleNoise + offset; - var value = (int)((Perlin.Noise(nx, ny) + 1.0f) / 2.0f * 255); + var value = (byte)((Perlin.Noise(nx, ny) + 1.0f) / 2.0f * 255); - var color = Color.FromArgb((int)(value * xf), (int)(value * yf), value); - - bitmap.SetPixel(x, y, color); + yield return ((x, y, (byte)(value * xf), (byte)(value * yf), value)); } } - - return new BitmapVideoFrameWrapper(bitmap); } // diff --git a/FFMpegCore.Test/VideoTest.cs b/FFMpegCore.Test/VideoTest.cs index e3e4b6b..5071a48 100644 --- a/FFMpegCore.Test/VideoTest.cs +++ b/FFMpegCore.Test/VideoTest.cs @@ -1,10 +1,10 @@ -using System.Drawing.Imaging; +using System.Drawing; +using System.Drawing.Imaging; using System.Runtime.Versioning; using System.Text; using FFMpegCore.Arguments; using FFMpegCore.Enums; using FFMpegCore.Exceptions; -using FFMpegCore.Extensions.System.Drawing.Common; using FFMpegCore.Pipes; using FFMpegCore.Test.Resources; using FFMpegCore.Test.Utilities; @@ -15,7 +15,9 @@ namespace FFMpegCore.Test [TestClass] public class VideoTest { - [TestMethod, Timeout(10000)] + private const int BaseTimeoutMilliseconds = 15_000; + + [TestMethod, Timeout(BaseTimeoutMilliseconds)] public void Video_ToOGV() { using var outputFile = new TemporaryFile($"out{VideoType.Ogv.Extension}"); @@ -27,7 +29,7 @@ public void Video_ToOGV() Assert.IsTrue(success); } - [TestMethod, Timeout(10000)] + [TestMethod, Timeout(BaseTimeoutMilliseconds)] public void Video_ToMP4() { using var outputFile = new TemporaryFile($"out{VideoType.Mp4.Extension}"); @@ -39,7 +41,7 @@ public void Video_ToMP4() Assert.IsTrue(success); } - [TestMethod, Timeout(10000)] + [TestMethod, Timeout(BaseTimeoutMilliseconds)] public void Video_ToMP4_YUV444p() { using var outputFile = new TemporaryFile($"out{VideoType.Mp4.Extension}"); @@ -55,7 +57,7 @@ public void Video_ToMP4_YUV444p() Assert.IsTrue(analysis.VideoStreams.First().PixelFormat == "yuv444p"); } - [TestMethod, Timeout(10000)] + [TestMethod, Timeout(BaseTimeoutMilliseconds)] public void Video_ToMP4_Args() { using var outputFile = new TemporaryFile($"out{VideoType.Mp4.Extension}"); @@ -68,7 +70,7 @@ public void Video_ToMP4_Args() Assert.IsTrue(success); } - [TestMethod, Timeout(10000)] + [TestMethod, Timeout(BaseTimeoutMilliseconds)] public void Video_ToH265_MKV_Args() { using var outputFile = new TemporaryFile($"out.mkv"); @@ -82,10 +84,17 @@ public void Video_ToH265_MKV_Args() } [SupportedOSPlatform("windows")] - [WindowsOnlyDataTestMethod, Timeout(10000)] + [WindowsOnlyDataTestMethod, Timeout(BaseTimeoutMilliseconds)] [DataRow(System.Drawing.Imaging.PixelFormat.Format24bppRgb)] [DataRow(System.Drawing.Imaging.PixelFormat.Format32bppArgb)] - public void Video_ToMP4_Args_Pipe(System.Drawing.Imaging.PixelFormat pixelFormat) + public void Video_ToMP4_Args_Pipe_WindowsOnly(System.Drawing.Imaging.PixelFormat pixelFormat) => Video_ToMP4_Args_Pipe_Internal(pixelFormat); + + [TestMethod, Timeout(BaseTimeoutMilliseconds)] + [DataRow(SkiaSharp.SKColorType.Rgb565)] + [DataRow(SkiaSharp.SKColorType.Bgra8888)] + public void Video_ToMP4_Args_Pipe(SkiaSharp.SKColorType pixelFormat) => Video_ToMP4_Args_Pipe_Internal(pixelFormat); + + private static void Video_ToMP4_Args_Pipe_Internal(dynamic pixelFormat) { using var outputFile = new TemporaryFile($"out{VideoType.Mp4.Extension}"); @@ -99,15 +108,20 @@ public void Video_ToMP4_Args_Pipe(System.Drawing.Imaging.PixelFormat pixelFormat } [SupportedOSPlatform("windows")] - [WindowsOnlyTestMethod, Timeout(10000)] - public void Video_ToMP4_Args_Pipe_DifferentImageSizes() + [WindowsOnlyTestMethod, Timeout(BaseTimeoutMilliseconds)] + public void Video_ToMP4_Args_Pipe_DifferentImageSizes_WindowsOnly() => Video_ToMP4_Args_Pipe_DifferentImageSizes_Internal(System.Drawing.Imaging.PixelFormat.Format24bppRgb); + + [TestMethod, Timeout(BaseTimeoutMilliseconds)] + public void Video_ToMP4_Args_Pipe_DifferentImageSizes() => Video_ToMP4_Args_Pipe_DifferentImageSizes_Internal(SkiaSharp.SKColorType.Rgb565); + + private static void Video_ToMP4_Args_Pipe_DifferentImageSizes_Internal(dynamic pixelFormat) { using var outputFile = new TemporaryFile($"out{VideoType.Mp4.Extension}"); var frames = new List { - BitmapSource.CreateVideoFrame(0, System.Drawing.Imaging.PixelFormat.Format24bppRgb, 255, 255, 1, 0), - BitmapSource.CreateVideoFrame(0, System.Drawing.Imaging.PixelFormat.Format24bppRgb, 256, 256, 1, 0) + BitmapSource.CreateVideoFrame(0, pixelFormat, 255, 255, 1, 0), + BitmapSource.CreateVideoFrame(0, pixelFormat, 256, 256, 1, 0) }; var videoFramesSource = new RawVideoPipeSource(frames); @@ -119,15 +133,20 @@ public void Video_ToMP4_Args_Pipe_DifferentImageSizes() } [SupportedOSPlatform("windows")] - [WindowsOnlyTestMethod, Timeout(10000)] - public async Task Video_ToMP4_Args_Pipe_DifferentImageSizes_Async() + [WindowsOnlyTestMethod, Timeout(BaseTimeoutMilliseconds)] + public async Task Video_ToMP4_Args_Pipe_DifferentImageSizes_WindowsOnly_Async() => await Video_ToMP4_Args_Pipe_DifferentImageSizes_Internal_Async(System.Drawing.Imaging.PixelFormat.Format24bppRgb); + + [TestMethod, Timeout(BaseTimeoutMilliseconds)] + public async Task Video_ToMP4_Args_Pipe_DifferentImageSizes_Async() => await Video_ToMP4_Args_Pipe_DifferentImageSizes_Internal_Async(SkiaSharp.SKColorType.Rgb565); + + private static async Task Video_ToMP4_Args_Pipe_DifferentImageSizes_Internal_Async(dynamic pixelFormat) { using var outputFile = new TemporaryFile($"out{VideoType.Mp4.Extension}"); var frames = new List { - BitmapSource.CreateVideoFrame(0, System.Drawing.Imaging.PixelFormat.Format24bppRgb, 255, 255, 1, 0), - BitmapSource.CreateVideoFrame(0, System.Drawing.Imaging.PixelFormat.Format24bppRgb, 256, 256, 1, 0) + BitmapSource.CreateVideoFrame(0, pixelFormat, 255, 255, 1, 0), + BitmapSource.CreateVideoFrame(0, pixelFormat, 256, 256, 1, 0) }; var videoFramesSource = new RawVideoPipeSource(frames); @@ -139,15 +158,21 @@ public async Task Video_ToMP4_Args_Pipe_DifferentImageSizes_Async() } [SupportedOSPlatform("windows")] - [WindowsOnlyTestMethod, Timeout(10000)] - public void Video_ToMP4_Args_Pipe_DifferentPixelFormats() + [WindowsOnlyTestMethod, Timeout(BaseTimeoutMilliseconds)] + public void Video_ToMP4_Args_Pipe_DifferentPixelFormats_WindowsOnly() => + Video_ToMP4_Args_Pipe_DifferentPixelFormats_Internal(System.Drawing.Imaging.PixelFormat.Format24bppRgb, System.Drawing.Imaging.PixelFormat.Format32bppRgb); + + [TestMethod, Timeout(BaseTimeoutMilliseconds)] + public void Video_ToMP4_Args_Pipe_DifferentPixelFormats() => Video_ToMP4_Args_Pipe_DifferentPixelFormats_Internal(SkiaSharp.SKColorType.Rgb565, SkiaSharp.SKColorType.Bgra8888); + + private static void Video_ToMP4_Args_Pipe_DifferentPixelFormats_Internal(dynamic pixelFormatFrame1, dynamic pixelFormatFrame2) { using var outputFile = new TemporaryFile($"out{VideoType.Mp4.Extension}"); var frames = new List { - BitmapSource.CreateVideoFrame(0, System.Drawing.Imaging.PixelFormat.Format24bppRgb, 255, 255, 1, 0), - BitmapSource.CreateVideoFrame(0, System.Drawing.Imaging.PixelFormat.Format32bppRgb, 255, 255, 1, 0) + BitmapSource.CreateVideoFrame(0, pixelFormatFrame1, 255, 255, 1, 0), + BitmapSource.CreateVideoFrame(0, pixelFormatFrame2, 255, 255, 1, 0) }; var videoFramesSource = new RawVideoPipeSource(frames); @@ -159,15 +184,21 @@ public void Video_ToMP4_Args_Pipe_DifferentPixelFormats() } [SupportedOSPlatform("windows")] - [WindowsOnlyTestMethod, Timeout(10000)] - public async Task Video_ToMP4_Args_Pipe_DifferentPixelFormats_Async() + [WindowsOnlyTestMethod, Timeout(BaseTimeoutMilliseconds)] + public async Task Video_ToMP4_Args_Pipe_DifferentPixelFormats_WindowsOnly_Async() => + await Video_ToMP4_Args_Pipe_DifferentPixelFormats_Internal_Async(System.Drawing.Imaging.PixelFormat.Format24bppRgb, System.Drawing.Imaging.PixelFormat.Format32bppRgb); + + [TestMethod, Timeout(BaseTimeoutMilliseconds)] + public async Task Video_ToMP4_Args_Pipe_DifferentPixelFormats_Async() => await Video_ToMP4_Args_Pipe_DifferentPixelFormats_Internal_Async(SkiaSharp.SKColorType.Rgb565, SkiaSharp.SKColorType.Bgra8888); + + private static async Task Video_ToMP4_Args_Pipe_DifferentPixelFormats_Internal_Async(dynamic pixelFormatFrame1, dynamic pixelFormatFrame2) { using var outputFile = new TemporaryFile($"out{VideoType.Mp4.Extension}"); var frames = new List { - BitmapSource.CreateVideoFrame(0, System.Drawing.Imaging.PixelFormat.Format24bppRgb, 255, 255, 1, 0), - BitmapSource.CreateVideoFrame(0, System.Drawing.Imaging.PixelFormat.Format32bppRgb, 255, 255, 1, 0) + BitmapSource.CreateVideoFrame(0, pixelFormatFrame1, 255, 255, 1, 0), + BitmapSource.CreateVideoFrame(0, pixelFormatFrame2, 255, 255, 1, 0) }; var videoFramesSource = new RawVideoPipeSource(frames); @@ -178,7 +209,7 @@ public async Task Video_ToMP4_Args_Pipe_DifferentPixelFormats_Async() .ProcessAsynchronously()); } - [TestMethod, Timeout(10000)] + [TestMethod, Timeout(BaseTimeoutMilliseconds)] public void Video_ToMP4_Args_StreamPipe() { using var input = File.OpenRead(TestResources.WebmVideo); @@ -192,7 +223,7 @@ public void Video_ToMP4_Args_StreamPipe() Assert.IsTrue(success); } - [TestMethod, Timeout(10000)] + [TestMethod, Timeout(BaseTimeoutMilliseconds)] public async Task Video_ToMP4_Args_StreamOutputPipe_Async_Failure() { await Assert.ThrowsExceptionAsync(async () => @@ -206,7 +237,7 @@ await FFMpegArguments }); } - [TestMethod, Timeout(10000)] + [TestMethod, Timeout(BaseTimeoutMilliseconds)] public void Video_StreamFile_OutputToMemoryStream() { var output = new MemoryStream(); @@ -223,7 +254,7 @@ public void Video_StreamFile_OutputToMemoryStream() Console.WriteLine(result.Duration); } - [TestMethod, Timeout(10000)] + [TestMethod, Timeout(BaseTimeoutMilliseconds)] public void Video_ToMP4_Args_StreamOutputPipe_Failure() { Assert.ThrowsException(() => @@ -237,7 +268,7 @@ public void Video_ToMP4_Args_StreamOutputPipe_Failure() }); } - [TestMethod, Timeout(10000)] + [TestMethod, Timeout(BaseTimeoutMilliseconds)] public async Task Video_ToMP4_Args_StreamOutputPipe_Async() { await using var ms = new MemoryStream(); @@ -250,7 +281,7 @@ await FFMpegArguments .ProcessAsynchronously(); } - [TestMethod, Timeout(10000)] + [TestMethod, Timeout(BaseTimeoutMilliseconds)] public async Task TestDuplicateRun() { FFMpegArguments @@ -266,7 +297,7 @@ await FFMpegArguments File.Delete("temporary.mp4"); } - [TestMethod, Timeout(10000)] + [TestMethod, Timeout(BaseTimeoutMilliseconds)] public void TranscodeToMemoryStream_Success() { using var output = new MemoryStream(); @@ -284,7 +315,7 @@ public void TranscodeToMemoryStream_Success() Assert.AreEqual(inputAnalysis.Duration.TotalSeconds, outputAnalysis.Duration.TotalSeconds, 0.3); } - [TestMethod, Timeout(10000)] + [TestMethod, Timeout(BaseTimeoutMilliseconds)] public void Video_ToTS() { using var outputFile = new TemporaryFile($"out{VideoType.MpegTs.Extension}"); @@ -296,7 +327,7 @@ public void Video_ToTS() Assert.IsTrue(success); } - [TestMethod, Timeout(10000)] + [TestMethod, Timeout(BaseTimeoutMilliseconds)] public void Video_ToTS_Args() { using var outputFile = new TemporaryFile($"out{VideoType.MpegTs.Extension}"); @@ -312,10 +343,17 @@ public void Video_ToTS_Args() } [SupportedOSPlatform("windows")] - [WindowsOnlyDataTestMethod, Timeout(10000)] + [WindowsOnlyDataTestMethod, Timeout(BaseTimeoutMilliseconds)] [DataRow(System.Drawing.Imaging.PixelFormat.Format24bppRgb)] [DataRow(System.Drawing.Imaging.PixelFormat.Format32bppArgb)] - public async Task Video_ToTS_Args_Pipe(System.Drawing.Imaging.PixelFormat pixelFormat) + public async Task Video_ToTS_Args_Pipe_WindowsOnly(System.Drawing.Imaging.PixelFormat pixelFormat) => await Video_ToTS_Args_Pipe_Internal(pixelFormat); + + [TestMethod, Timeout(BaseTimeoutMilliseconds)] + [DataRow(SkiaSharp.SKColorType.Rgb565)] + [DataRow(SkiaSharp.SKColorType.Bgra8888)] + public async Task Video_ToTS_Args_Pipe(SkiaSharp.SKColorType pixelFormat) => await Video_ToTS_Args_Pipe_Internal(pixelFormat); + + private static async Task Video_ToTS_Args_Pipe_Internal(dynamic pixelFormat) { using var output = new TemporaryFile($"out{VideoType.Ts.Extension}"); var input = new RawVideoPipeSource(BitmapSource.CreateBitmaps(128, pixelFormat, 256, 256)); @@ -331,7 +369,7 @@ public async Task Video_ToTS_Args_Pipe(System.Drawing.Imaging.PixelFormat pixelF Assert.AreEqual(VideoType.Ts.Name, analysis.Format.FormatName); } - [TestMethod, Timeout(10000)] + [TestMethod, Timeout(BaseTimeoutMilliseconds)] public async Task Video_ToOGV_Resize() { using var outputFile = new TemporaryFile($"out{VideoType.Ogv.Extension}"); @@ -345,11 +383,10 @@ public async Task Video_ToOGV_Resize() } [SupportedOSPlatform("windows")] - [WindowsOnlyDataTestMethod, Timeout(10000)] - [DataRow(System.Drawing.Imaging.PixelFormat.Format24bppRgb)] - [DataRow(System.Drawing.Imaging.PixelFormat.Format32bppArgb)] - [DataRow(System.Drawing.Imaging.PixelFormat.Format48bppRgb)] - public void RawVideoPipeSource_Ogv_Scale(System.Drawing.Imaging.PixelFormat pixelFormat) + [WindowsOnlyDataTestMethod, Timeout(BaseTimeoutMilliseconds)] + [DataRow(SkiaSharp.SKColorType.Rgb565)] + [DataRow(SkiaSharp.SKColorType.Bgra8888)] + public void RawVideoPipeSource_Ogv_Scale(SkiaSharp.SKColorType pixelFormat) { using var outputFile = new TemporaryFile($"out{VideoType.Ogv.Extension}"); var videoFramesSource = new RawVideoPipeSource(BitmapSource.CreateBitmaps(128, pixelFormat, 256, 256)); @@ -366,7 +403,7 @@ public void RawVideoPipeSource_Ogv_Scale(System.Drawing.Imaging.PixelFormat pixe Assert.AreEqual((int)VideoSize.Ed, analysis.PrimaryVideoStream!.Width); } - [TestMethod, Timeout(10000)] + [TestMethod, Timeout(BaseTimeoutMilliseconds)] public void Scale_Mp4_Multithreaded() { using var outputFile = new TemporaryFile($"out{VideoType.Mp4.Extension}"); @@ -381,11 +418,18 @@ public void Scale_Mp4_Multithreaded() } [SupportedOSPlatform("windows")] - [WindowsOnlyDataTestMethod, Timeout(10000)] + [WindowsOnlyDataTestMethod, Timeout(BaseTimeoutMilliseconds)] [DataRow(System.Drawing.Imaging.PixelFormat.Format24bppRgb)] [DataRow(System.Drawing.Imaging.PixelFormat.Format32bppArgb)] // [DataRow(PixelFormat.Format48bppRgb)] - public void Video_ToMP4_Resize_Args_Pipe(System.Drawing.Imaging.PixelFormat pixelFormat) + public void Video_ToMP4_Resize_Args_Pipe(System.Drawing.Imaging.PixelFormat pixelFormat) => Video_ToMP4_Resize_Args_Pipe_Internal(pixelFormat); + + [DataTestMethod, Timeout(BaseTimeoutMilliseconds)] + [DataRow(SkiaSharp.SKColorType.Rgb565)] + [DataRow(SkiaSharp.SKColorType.Bgra8888)] + public void Video_ToMP4_Resize_Args_Pipe(SkiaSharp.SKColorType pixelFormat) => Video_ToMP4_Resize_Args_Pipe_Internal(pixelFormat); + + private static void Video_ToMP4_Resize_Args_Pipe_Internal(dynamic pixelFormat) { using var outputFile = new TemporaryFile($"out{VideoType.Mp4.Extension}"); var videoFramesSource = new RawVideoPipeSource(BitmapSource.CreateBitmaps(128, pixelFormat, 256, 256)); @@ -399,10 +443,10 @@ public void Video_ToMP4_Resize_Args_Pipe(System.Drawing.Imaging.PixelFormat pixe } [SupportedOSPlatform("windows")] - [WindowsOnlyTestMethod, Timeout(10000)] - public void Video_Snapshot_InMemory() + [WindowsOnlyTestMethod, Timeout(BaseTimeoutMilliseconds)] + public void Video_Snapshot_InMemory_SystemDrawingCommon() { - using var bitmap = FFMpegImage.Snapshot(TestResources.Mp4Video); + using var bitmap = Extensions.System.Drawing.Common.FFMpegImage.Snapshot(TestResources.Mp4Video); var input = FFProbe.Analyse(TestResources.Mp4Video); Assert.AreEqual(input.PrimaryVideoStream!.Width, bitmap.Width); @@ -410,10 +454,22 @@ public void Video_Snapshot_InMemory() Assert.AreEqual(bitmap.RawFormat, ImageFormat.Png); } - [TestMethod, Timeout(10000)] + [TestMethod, Timeout(BaseTimeoutMilliseconds)] + public void Video_Snapshot_InMemory_SkiaSharp() + { + using var bitmap = Extensions.SkiaSharp.FFMpegImage.Snapshot(TestResources.Mp4Video); + + var input = FFProbe.Analyse(TestResources.Mp4Video); + Assert.AreEqual(input.PrimaryVideoStream!.Width, bitmap.Width); + Assert.AreEqual(input.PrimaryVideoStream.Height, bitmap.Height); + // Note: The resulting ColorType is dependent on the execution environment and therefore not assessed, + // e.g. Bgra8888 on Windows and Rgba8888 on macOS. + } + + [TestMethod, Timeout(BaseTimeoutMilliseconds)] public void Video_Snapshot_PersistSnapshot() { - var outputPath = new TemporaryFile("out.png"); + using var outputPath = new TemporaryFile("out.png"); var input = FFProbe.Analyse(TestResources.Mp4Video); FFMpeg.Snapshot(TestResources.Mp4Video, outputPath); @@ -424,13 +480,71 @@ public void Video_Snapshot_PersistSnapshot() Assert.AreEqual("png", analysis.PrimaryVideoStream!.CodecName); } - [TestMethod, Timeout(10000)] + [TestMethod, Timeout(BaseTimeoutMilliseconds)] + public void Video_GifSnapshot_PersistSnapshot() + { + using var outputPath = new TemporaryFile("out.gif"); + var input = FFProbe.Analyse(TestResources.Mp4Video); + + FFMpeg.GifSnapshot(TestResources.Mp4Video, outputPath, captureTime: TimeSpan.FromSeconds(0)); + + var analysis = FFProbe.Analyse(outputPath); + Assert.AreNotEqual(input.PrimaryVideoStream!.Width, analysis.PrimaryVideoStream!.Width); + Assert.AreNotEqual(input.PrimaryVideoStream.Height, analysis.PrimaryVideoStream!.Height); + Assert.AreEqual("gif", analysis.PrimaryVideoStream!.CodecName); + } + + [TestMethod, Timeout(BaseTimeoutMilliseconds)] + public void Video_GifSnapshot_PersistSnapshot_SizeSupplied() + { + using var outputPath = new TemporaryFile("out.gif"); + var input = FFProbe.Analyse(TestResources.Mp4Video); + var desiredGifSize = new Size(320, 240); + + FFMpeg.GifSnapshot(TestResources.Mp4Video, outputPath, desiredGifSize, captureTime: TimeSpan.FromSeconds(0)); + + var analysis = FFProbe.Analyse(outputPath); + Assert.AreNotEqual(input.PrimaryVideoStream!.Width, desiredGifSize.Width); + Assert.AreNotEqual(input.PrimaryVideoStream.Height, desiredGifSize.Height); + Assert.AreEqual("gif", analysis.PrimaryVideoStream!.CodecName); + } + + [TestMethod, Timeout(BaseTimeoutMilliseconds)] + public async Task Video_GifSnapshot_PersistSnapshotAsync() + { + using var outputPath = new TemporaryFile("out.gif"); + var input = FFProbe.Analyse(TestResources.Mp4Video); + + await FFMpeg.GifSnapshotAsync(TestResources.Mp4Video, outputPath, captureTime: TimeSpan.FromSeconds(0)); + + var analysis = FFProbe.Analyse(outputPath); + Assert.AreNotEqual(input.PrimaryVideoStream!.Width, analysis.PrimaryVideoStream!.Width); + Assert.AreNotEqual(input.PrimaryVideoStream.Height, analysis.PrimaryVideoStream!.Height); + Assert.AreEqual("gif", analysis.PrimaryVideoStream!.CodecName); + } + + [TestMethod, Timeout(BaseTimeoutMilliseconds)] + public async Task Video_GifSnapshot_PersistSnapshotAsync_SizeSupplied() + { + using var outputPath = new TemporaryFile("out.gif"); + var input = FFProbe.Analyse(TestResources.Mp4Video); + var desiredGifSize = new Size(320, 240); + + await FFMpeg.GifSnapshotAsync(TestResources.Mp4Video, outputPath, desiredGifSize, captureTime: TimeSpan.FromSeconds(0)); + + var analysis = FFProbe.Analyse(outputPath); + Assert.AreNotEqual(input.PrimaryVideoStream!.Width, desiredGifSize.Width); + Assert.AreNotEqual(input.PrimaryVideoStream.Height, desiredGifSize.Height); + Assert.AreEqual("gif", analysis.PrimaryVideoStream!.CodecName); + } + + [TestMethod, Timeout(BaseTimeoutMilliseconds)] public void Video_Join() { - var inputCopy = new TemporaryFile("copy-input.mp4"); + using var inputCopy = new TemporaryFile("copy-input.mp4"); File.Copy(TestResources.Mp4Video, inputCopy); - var outputPath = new TemporaryFile("out.mp4"); + using var outputPath = new TemporaryFile("out.mp4"); var input = FFProbe.Analyse(TestResources.Mp4Video); var success = FFMpeg.Join(outputPath, TestResources.Mp4Video, inputCopy); Assert.IsTrue(success); @@ -446,7 +560,7 @@ public void Video_Join() Assert.AreEqual(input.PrimaryVideoStream.Width, result.PrimaryVideoStream.Width); } - [TestMethod, Timeout(20000)] + [TestMethod, Timeout(2 * BaseTimeoutMilliseconds)] public void Video_Join_Image_Sequence() { var imageSet = new List(); @@ -461,7 +575,7 @@ public void Video_Join_Image_Sequence() }); var imageAnalysis = FFProbe.Analyse(imageSet.First()); - var outputFile = new TemporaryFile("out.mp4"); + using var outputFile = new TemporaryFile("out.mp4"); var success = FFMpeg.JoinImageSequence(outputFile, frameRate: 10, images: imageSet.ToArray()); Assert.IsTrue(success); var result = FFProbe.Analyse(outputFile); @@ -471,7 +585,7 @@ public void Video_Join_Image_Sequence() Assert.AreEqual(imageAnalysis.PrimaryVideoStream!.Height, result.PrimaryVideoStream.Height); } - [TestMethod, Timeout(10000)] + [TestMethod, Timeout(BaseTimeoutMilliseconds)] public void Video_With_Only_Audio_Should_Extract_Metadata() { var video = FFProbe.Analyse(TestResources.Mp4WithoutVideo); @@ -480,11 +594,11 @@ public void Video_With_Only_Audio_Should_Extract_Metadata() Assert.AreEqual(10, video.Duration.TotalSeconds, 0.5); } - [TestMethod, Timeout(10000)] + [TestMethod, Timeout(BaseTimeoutMilliseconds)] public void Video_Duration() { var video = FFProbe.Analyse(TestResources.Mp4Video); - var outputFile = new TemporaryFile("out.mp4"); + using var outputFile = new TemporaryFile("out.mp4"); FFMpegArguments .FromFileInput(TestResources.Mp4Video) @@ -500,10 +614,10 @@ public void Video_Duration() Assert.AreEqual(video.Duration.Seconds - 2, outputVideo.Duration.Seconds); } - [TestMethod, Timeout(10000)] + [TestMethod, Timeout(BaseTimeoutMilliseconds)] public void Video_UpdatesProgress() { - var outputFile = new TemporaryFile("out.mp4"); + using var outputFile = new TemporaryFile("out.mp4"); var percentageDone = 0.0; var timeDone = TimeSpan.Zero; @@ -541,10 +655,10 @@ void OnTimeProgess(TimeSpan time) Assert.AreNotEqual(analysis.Duration, timeDone); } - [TestMethod, Timeout(10000)] + [TestMethod, Timeout(BaseTimeoutMilliseconds)] public void Video_OutputsData() { - var outputFile = new TemporaryFile("out.mp4"); + using var outputFile = new TemporaryFile("out.mp4"); var dataReceived = false; GlobalFFOptions.Configure(opt => opt.Encoding = Encoding.UTF8); @@ -563,12 +677,17 @@ public void Video_OutputsData() } [SupportedOSPlatform("windows")] - [WindowsOnlyTestMethod, Timeout(10000)] - public void Video_TranscodeInMemory() + [WindowsOnlyTestMethod, Timeout(BaseTimeoutMilliseconds)] + public void Video_TranscodeInMemory_WindowsOnly() => Video_TranscodeInMemory_Internal(System.Drawing.Imaging.PixelFormat.Format24bppRgb); + + [TestMethod, Timeout(BaseTimeoutMilliseconds)] + public void Video_TranscodeInMemory() => Video_TranscodeInMemory_Internal(SkiaSharp.SKColorType.Rgb565); + + private static void Video_TranscodeInMemory_Internal(dynamic pixelFormat) { using var resStream = new MemoryStream(); var reader = new StreamPipeSink(resStream); - var writer = new RawVideoPipeSource(BitmapSource.CreateBitmaps(128, System.Drawing.Imaging.PixelFormat.Format24bppRgb, 128, 128)); + var writer = new RawVideoPipeSource(BitmapSource.CreateBitmaps(128, pixelFormat, 128, 128)); FFMpegArguments .FromPipeInput(writer) @@ -583,7 +702,7 @@ public void Video_TranscodeInMemory() Assert.AreEqual(vi.PrimaryVideoStream.Height, 128); } - [TestMethod, Timeout(20000)] + [TestMethod, Timeout(2 * BaseTimeoutMilliseconds)] public void Video_TranscodeToMemory() { using var memoryStream = new MemoryStream(); @@ -601,10 +720,10 @@ public void Video_TranscodeToMemory() Assert.AreEqual(vi.PrimaryVideoStream.Height, 360); } - [TestMethod, Timeout(10000)] + [TestMethod, Timeout(BaseTimeoutMilliseconds)] public async Task Video_Cancel_Async() { - var outputFile = new TemporaryFile("out.mp4"); + using var outputFile = new TemporaryFile("out.mp4"); var task = FFMpegArguments .FromFileInput("testsrc2=size=320x240[out0]; sine[out1]", false, args => args @@ -625,10 +744,10 @@ public async Task Video_Cancel_Async() Assert.IsFalse(result); } - [TestMethod, Timeout(10000)] + [TestMethod, Timeout(BaseTimeoutMilliseconds)] public void Video_Cancel() { - var outputFile = new TemporaryFile("out.mp4"); + using var outputFile = new TemporaryFile("out.mp4"); var task = FFMpegArguments .FromFileInput("testsrc2=size=320x240[out0]; sine[out1]", false, args => args .WithCustomArgument("-re") @@ -646,10 +765,10 @@ public void Video_Cancel() Assert.IsFalse(result); } - [TestMethod, Timeout(10000)] + [TestMethod, Timeout(BaseTimeoutMilliseconds)] public async Task Video_Cancel_Async_With_Timeout() { - var outputFile = new TemporaryFile("out.mp4"); + using var outputFile = new TemporaryFile("out.mp4"); var task = FFMpegArguments .FromFileInput("testsrc2=size=320x240[out0]; sine[out1]", false, args => args @@ -676,10 +795,10 @@ public async Task Video_Cancel_Async_With_Timeout() Assert.AreEqual("aac", outputInfo.PrimaryAudioStream!.CodecName); } - [TestMethod, Timeout(10000)] + [TestMethod, Timeout(BaseTimeoutMilliseconds)] public async Task Video_Cancel_CancellationToken_Async() { - var outputFile = new TemporaryFile("out.mp4"); + using var outputFile = new TemporaryFile("out.mp4"); var cts = new CancellationTokenSource(); @@ -701,10 +820,10 @@ public async Task Video_Cancel_CancellationToken_Async() Assert.IsFalse(result); } - [TestMethod, Timeout(10000)] + [TestMethod, Timeout(BaseTimeoutMilliseconds)] public async Task Video_Cancel_CancellationToken_Async_Throws() { - var outputFile = new TemporaryFile("out.mp4"); + using var outputFile = new TemporaryFile("out.mp4"); var cts = new CancellationTokenSource(); @@ -724,10 +843,10 @@ public async Task Video_Cancel_CancellationToken_Async_Throws() await Assert.ThrowsExceptionAsync(() => task); } - [TestMethod, Timeout(10000)] + [TestMethod, Timeout(BaseTimeoutMilliseconds)] public void Video_Cancel_CancellationToken_Throws() { - var outputFile = new TemporaryFile("out.mp4"); + using var outputFile = new TemporaryFile("out.mp4"); var cts = new CancellationTokenSource(); @@ -746,10 +865,10 @@ public void Video_Cancel_CancellationToken_Throws() Assert.ThrowsException(() => task.ProcessSynchronously()); } - [TestMethod, Timeout(10000)] + [TestMethod, Timeout(BaseTimeoutMilliseconds)] public async Task Video_Cancel_CancellationToken_Async_With_Timeout() { - var outputFile = new TemporaryFile("out.mp4"); + using var outputFile = new TemporaryFile("out.mp4"); var cts = new CancellationTokenSource(); diff --git a/FFMpegCore.sln b/FFMpegCore.sln index 5a9faa8..7ab0929 100644 --- a/FFMpegCore.sln +++ b/FFMpegCore.sln @@ -9,7 +9,9 @@ Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "FFMpegCore.Test", "FFMpegCo EndProject Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "FFMpegCore.Examples", "FFMpegCore.Examples\FFMpegCore.Examples.csproj", "{3125CF91-FFBD-4E4E-8930-247116AFE772}" EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "FFMpegCore.Extensions.System.Drawing.Common", "FFMpegCore.Extensions.System.Drawing.Common\FFMpegCore.Extensions.System.Drawing.Common.csproj", "{9C1A4930-9369-4A18-AD98-929A2A510D80}" +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "FFMpegCore.Extensions.System.Drawing.Common", "FFMpegCore.Extensions.System.Drawing.Common\FFMpegCore.Extensions.System.Drawing.Common.csproj", "{9C1A4930-9369-4A18-AD98-929A2A510D80}" +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "FFMpegCore.Extensions.SkiaSharp", "FFMpegCore.Extensions.SkiaSharp\FFMpegCore.Extensions.SkiaSharp.csproj", "{5A76F9B7-3681-4551-A9B6-8D3AC5DA1090}" EndProject Global GlobalSection(SolutionConfigurationPlatforms) = preSolution @@ -33,6 +35,10 @@ Global {9C1A4930-9369-4A18-AD98-929A2A510D80}.Debug|Any CPU.Build.0 = Debug|Any CPU {9C1A4930-9369-4A18-AD98-929A2A510D80}.Release|Any CPU.ActiveCfg = Release|Any CPU {9C1A4930-9369-4A18-AD98-929A2A510D80}.Release|Any CPU.Build.0 = Release|Any CPU + {5A76F9B7-3681-4551-A9B6-8D3AC5DA1090}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {5A76F9B7-3681-4551-A9B6-8D3AC5DA1090}.Debug|Any CPU.Build.0 = Debug|Any CPU + {5A76F9B7-3681-4551-A9B6-8D3AC5DA1090}.Release|Any CPU.ActiveCfg = Release|Any CPU + {5A76F9B7-3681-4551-A9B6-8D3AC5DA1090}.Release|Any CPU.Build.0 = Release|Any CPU EndGlobalSection GlobalSection(SolutionProperties) = preSolution HideSolutionNode = FALSE diff --git a/FFMpegCore/Extend/TimeSpanExtensions.cs b/FFMpegCore/Extend/TimeSpanExtensions.cs new file mode 100644 index 0000000..3e70d5c --- /dev/null +++ b/FFMpegCore/Extend/TimeSpanExtensions.cs @@ -0,0 +1,15 @@ +namespace FFMpegCore.Extend; + +public static class TimeSpanExtensions +{ + public static string ToLongString(this TimeSpan timeSpan) + { + var hours = timeSpan.Hours; + if (timeSpan.Days > 0) + { + hours += timeSpan.Days * 24; + } + + return $"{hours:00}:{timeSpan.Minutes:00}:{timeSpan.Seconds:00}.{timeSpan.Milliseconds:000}"; + } +} diff --git a/FFMpegCore/FFMpeg/Arguments/EndSeekArgument.cs b/FFMpegCore/FFMpeg/Arguments/EndSeekArgument.cs new file mode 100644 index 0000000..e4e8f5d --- /dev/null +++ b/FFMpegCore/FFMpeg/Arguments/EndSeekArgument.cs @@ -0,0 +1,19 @@ +using FFMpegCore.Extend; + +namespace FFMpegCore.Arguments +{ + /// + /// Represents seek parameter + /// + public class EndSeekArgument : IArgument + { + public readonly TimeSpan? SeekTo; + + public EndSeekArgument(TimeSpan? seekTo) + { + SeekTo = seekTo; + } + + public string Text => SeekTo.HasValue ? $"-to {SeekTo.Value.ToLongString()}" : string.Empty; + } +} diff --git a/FFMpegCore/FFMpeg/Arguments/GifPaletteArgument.cs b/FFMpegCore/FFMpeg/Arguments/GifPaletteArgument.cs new file mode 100644 index 0000000..ac67fcd --- /dev/null +++ b/FFMpegCore/FFMpeg/Arguments/GifPaletteArgument.cs @@ -0,0 +1,24 @@ +using System.Drawing; + +namespace FFMpegCore.Arguments +{ + public class GifPaletteArgument : IArgument + { + private readonly int _streamIndex; + + private readonly int _fps; + + private readonly Size? _size; + + public GifPaletteArgument(int streamIndex, int fps, Size? size) + { + _streamIndex = streamIndex; + _fps = fps; + _size = size; + } + + private string ScaleText => _size.HasValue ? $"scale=w={_size.Value.Width}:h={_size.Value.Height}," : string.Empty; + + public string Text => $"-filter_complex \"[{_streamIndex}:v] fps={_fps},{ScaleText}split [a][b];[a] palettegen=max_colors=32 [p];[b][p] paletteuse=dither=bayer\""; + } +} diff --git a/FFMpegCore/FFMpeg/Arguments/SeekArgument.cs b/FFMpegCore/FFMpeg/Arguments/SeekArgument.cs index 8862e76..29cda7f 100644 --- a/FFMpegCore/FFMpeg/Arguments/SeekArgument.cs +++ b/FFMpegCore/FFMpeg/Arguments/SeekArgument.cs @@ -1,4 +1,6 @@ -namespace FFMpegCore.Arguments +using FFMpegCore.Extend; + +namespace FFMpegCore.Arguments { /// /// Represents seek parameter @@ -12,25 +14,6 @@ public SeekArgument(TimeSpan? seekTo) SeekTo = seekTo; } - public string Text - { - get - { - if (SeekTo.HasValue) - { - var hours = SeekTo.Value.Hours; - if (SeekTo.Value.Days > 0) - { - hours += SeekTo.Value.Days * 24; - } - - return $"-ss {hours.ToString("00")}:{SeekTo.Value.Minutes.ToString("00")}:{SeekTo.Value.Seconds.ToString("00")}.{SeekTo.Value.Milliseconds.ToString("000")}"; - } - else - { - return string.Empty; - } - } - } + public string Text => SeekTo.HasValue ? $"-ss {SeekTo.Value.ToLongString()}" : string.Empty; } } diff --git a/FFMpegCore/FFMpeg/Enums/FileExtension.cs b/FFMpegCore/FFMpeg/Enums/FileExtension.cs index b5e775d..f3067ba 100644 --- a/FFMpegCore/FFMpeg/Enums/FileExtension.cs +++ b/FFMpegCore/FFMpeg/Enums/FileExtension.cs @@ -20,5 +20,6 @@ public static string Extension(this Codec type) public static readonly string WebM = VideoType.WebM.Extension; public static readonly string Png = ".png"; public static readonly string Mp3 = ".mp3"; + public static readonly string Gif = ".gif"; } } diff --git a/FFMpegCore/FFMpeg/FFMpeg.cs b/FFMpegCore/FFMpeg/FFMpeg.cs index 58526b8..a8de12b 100644 --- a/FFMpegCore/FFMpeg/FFMpeg.cs +++ b/FFMpegCore/FFMpeg/FFMpeg.cs @@ -57,6 +57,36 @@ public static async Task SnapshotAsync(string input, string output, Size? .ProcessAsynchronously(); } + public static bool GifSnapshot(string input, string output, Size? size = null, TimeSpan? captureTime = null, TimeSpan? duration = null, int? streamIndex = null) + { + if (Path.GetExtension(output)?.ToLower() != FileExtension.Gif) + { + output = Path.Combine(Path.GetDirectoryName(output), Path.GetFileNameWithoutExtension(output) + FileExtension.Gif); + } + + var source = FFProbe.Analyse(input); + var (arguments, outputOptions) = SnapshotArgumentBuilder.BuildGifSnapshotArguments(input, source, size, captureTime, duration, streamIndex); + + return arguments + .OutputToFile(output, true, outputOptions) + .ProcessSynchronously(); + } + + public static async Task GifSnapshotAsync(string input, string output, Size? size = null, TimeSpan? captureTime = null, TimeSpan? duration = null, int? streamIndex = null) + { + if (Path.GetExtension(output)?.ToLower() != FileExtension.Gif) + { + output = Path.Combine(Path.GetDirectoryName(output), Path.GetFileNameWithoutExtension(output) + FileExtension.Gif); + } + + var source = await FFProbe.AnalyseAsync(input).ConfigureAwait(false); + var (arguments, outputOptions) = SnapshotArgumentBuilder.BuildGifSnapshotArguments(input, source, size, captureTime, duration, streamIndex); + + return await arguments + .OutputToFile(output, true, outputOptions) + .ProcessAsynchronously(); + } + /// /// Converts an image sequence to a video. /// @@ -66,25 +96,34 @@ public static async Task SnapshotAsync(string input, string output, Size? /// Output video information. public static bool JoinImageSequence(string output, double frameRate = 30, params string[] images) { - int? width = null, height = null; - var tempFolderName = Path.Combine(GlobalFFOptions.Current.TemporaryFilesFolder, Guid.NewGuid().ToString()); - var temporaryImageFiles = images.Select((imagePath, index) => + var fileExtensions = images.Select(Path.GetExtension).Distinct().ToArray(); + if (fileExtensions.Length != 1) { - var analysis = FFProbe.Analyse(imagePath); - FFMpegHelper.ConversionSizeExceptionCheck(analysis.PrimaryVideoStream!.Width, analysis.PrimaryVideoStream!.Height); - width ??= analysis.PrimaryVideoStream.Width; - height ??= analysis.PrimaryVideoStream.Height; + throw new ArgumentException("All images must have the same extension", nameof(images)); + } - var destinationPath = Path.Combine(tempFolderName, $"{index.ToString().PadLeft(9, '0')}{Path.GetExtension(imagePath)}"); - Directory.CreateDirectory(tempFolderName); - File.Copy(imagePath, destinationPath); - return destinationPath; - }).ToArray(); + var fileExtension = fileExtensions[0].ToLowerInvariant(); + int? width = null, height = null; + + var tempFolderName = Path.Combine(GlobalFFOptions.Current.TemporaryFilesFolder, Guid.NewGuid().ToString()); + Directory.CreateDirectory(tempFolderName); try { + var index = 0; + foreach (var imagePath in images) + { + var analysis = FFProbe.Analyse(imagePath); + FFMpegHelper.ConversionSizeExceptionCheck(analysis.PrimaryVideoStream!.Width, analysis.PrimaryVideoStream!.Height); + width ??= analysis.PrimaryVideoStream.Width; + height ??= analysis.PrimaryVideoStream.Height; + + var destinationPath = Path.Combine(tempFolderName, $"{index++.ToString().PadLeft(9, '0')}{fileExtension}"); + File.Copy(imagePath, destinationPath); + } + return FFMpegArguments - .FromFileInput(Path.Combine(tempFolderName, "%09d.png"), false) + .FromFileInput(Path.Combine(tempFolderName, $"%09d{fileExtension}"), false) .OutputToFile(output, true, options => options .ForcePixelFormat("yuv420p") .Resize(width!.Value, height!.Value) @@ -93,8 +132,7 @@ public static bool JoinImageSequence(string output, double frameRate = 30, param } finally { - Cleanup(temporaryImageFiles); - Directory.Delete(tempFolderName); + Directory.Delete(tempFolderName, true); } } @@ -239,6 +277,46 @@ public static bool Join(string output, params string[] videos) } } + private static FFMpegArgumentProcessor BaseSubVideo(string input, string output, TimeSpan startTime, TimeSpan endTime) + { + if (Path.GetExtension(input) != Path.GetExtension(output)) + { + output = Path.Combine(Path.GetDirectoryName(output), Path.GetFileNameWithoutExtension(output), Path.GetExtension(input)); + } + + return FFMpegArguments + .FromFileInput(input, true, options => options.Seek(startTime).EndSeek(endTime)) + .OutputToFile(output, true, options => options.CopyChannel()); + } + + /// + /// Creates a new video starting and ending at the specified times + /// + /// Input video file. + /// Output video file. + /// The start time of when the sub video needs to start + /// The end time of where the sub video needs to end + /// Output video information. + public static bool SubVideo(string input, string output, TimeSpan startTime, TimeSpan endTime) + { + return BaseSubVideo(input, output, startTime, endTime) + .ProcessSynchronously(); + } + + /// + /// Creates a new video starting and ending at the specified times + /// + /// Input video file. + /// Output video file. + /// The start time of when the sub video needs to start + /// The end time of where the sub video needs to end + /// Output video information. + public static async Task SubVideoAsync(string input, string output, TimeSpan startTime, TimeSpan endTime) + { + return await BaseSubVideo(input, output, startTime, endTime) + .ProcessAsynchronously(); + } + /// /// Records M3U8 streams to the specified output. /// diff --git a/FFMpegCore/FFMpeg/FFMpegArgumentOptions.cs b/FFMpegCore/FFMpeg/FFMpegArgumentOptions.cs index 0f54b8c..4930b52 100644 --- a/FFMpegCore/FFMpeg/FFMpegArgumentOptions.cs +++ b/FFMpegCore/FFMpeg/FFMpegArgumentOptions.cs @@ -54,6 +54,7 @@ public FFMpegArgumentOptions WithAudioFilters(Action audioFi public FFMpegArgumentOptions WithCustomArgument(string argument) => WithArgument(new CustomArgument(argument)); public FFMpegArgumentOptions Seek(TimeSpan? seekTo) => WithArgument(new SeekArgument(seekTo)); + public FFMpegArgumentOptions EndSeek(TimeSpan? seekTo) => WithArgument(new EndSeekArgument(seekTo)); public FFMpegArgumentOptions Loop(int times) => WithArgument(new LoopArgument(times)); public FFMpegArgumentOptions OverwriteExisting() => WithArgument(new OverwriteArgument()); public FFMpegArgumentOptions SelectStream(int streamIndex, int inputFileIndex = 0, @@ -75,6 +76,7 @@ public FFMpegArgumentOptions DeselectStreams(IEnumerable streamIndices, int public FFMpegArgumentOptions WithAudibleEncryptionKeys(string key, string iv) => WithArgument(new AudibleEncryptionKeyArgument(key, iv)); public FFMpegArgumentOptions WithAudibleActivationBytes(string activationBytes) => WithArgument(new AudibleEncryptionKeyArgument(activationBytes)); public FFMpegArgumentOptions WithTagVersion(int id3v2Version = 3) => WithArgument(new ID3V2VersionArgument(id3v2Version)); + public FFMpegArgumentOptions WithGifPaletteArgument(int streamIndex, Size? size, int fps = 12) => WithArgument(new GifPaletteArgument(streamIndex, fps, size)); public FFMpegArgumentOptions WithArgument(IArgument argument) { diff --git a/FFMpegCore/FFMpeg/Pipes/RawVideoPipeSource.cs b/FFMpegCore/FFMpeg/Pipes/RawVideoPipeSource.cs index fe4c881..2f3028f 100644 --- a/FFMpegCore/FFMpeg/Pipes/RawVideoPipeSource.cs +++ b/FFMpegCore/FFMpeg/Pipes/RawVideoPipeSource.cs @@ -15,13 +15,11 @@ public class RawVideoPipeSource : IPipeSource private bool _formatInitialized; private readonly IEnumerator _framesEnumerator; - public RawVideoPipeSource(IEnumerator framesEnumerator) + public RawVideoPipeSource(IEnumerable framesEnumerator) { - _framesEnumerator = framesEnumerator; + _framesEnumerator = framesEnumerator.GetEnumerator(); } - public RawVideoPipeSource(IEnumerable framesEnumerator) : this(framesEnumerator.GetEnumerator()) { } - public string GetStreamArguments() { if (!_formatInitialized) diff --git a/FFMpegCore/FFMpeg/SnapshotArgumentBuilder.cs b/FFMpegCore/FFMpeg/SnapshotArgumentBuilder.cs index 4456837..0d9b414 100644 --- a/FFMpegCore/FFMpeg/SnapshotArgumentBuilder.cs +++ b/FFMpegCore/FFMpeg/SnapshotArgumentBuilder.cs @@ -31,6 +31,31 @@ public static (FFMpegArguments, Action outputOptions) Bui .Resize(size)); } + public static (FFMpegArguments, Action outputOptions) BuildGifSnapshotArguments( + string input, + IMediaAnalysis source, + Size? size = null, + TimeSpan? captureTime = null, + TimeSpan? duration = null, + int? streamIndex = null, + int fps = 12) + { + var defaultGifOutputSize = new Size(480, -1); + + captureTime ??= TimeSpan.FromSeconds(source.Duration.TotalSeconds / 3); + size = PrepareSnapshotSize(source, size) ?? defaultGifOutputSize; + streamIndex ??= source.PrimaryVideoStream?.Index + ?? source.VideoStreams.FirstOrDefault()?.Index + ?? 0; + + return (FFMpegArguments + .FromFileInput(input, false, options => options + .Seek(captureTime) + .WithDuration(duration)), + options => options + .WithGifPaletteArgument((int)streamIndex, size, fps)); + } + private static Size? PrepareSnapshotSize(IMediaAnalysis source, Size? wantedSize) { if (wantedSize == null || (wantedSize.Value.Height <= 0 && wantedSize.Value.Width <= 0) || source.PrimaryVideoStream == null) diff --git a/FFMpegCore/FFMpegCore.csproj b/FFMpegCore/FFMpegCore.csproj index 7c3f7bb..2af7f16 100644 --- a/FFMpegCore/FFMpegCore.csproj +++ b/FFMpegCore/FFMpegCore.csproj @@ -1,23 +1,24 @@  - - true - A .NET Standard FFMpeg/FFProbe wrapper for easily integrating media analysis and conversion into your .NET applications - 5.0.0 - - - ffmpeg ffprobe convert video audio mediafile resize analyze muxing - Malte Rosenbjerg, Vlad Jerca, Max Bagryantsev - README.md - + + true + A .NET Standard FFMpeg/FFProbe wrapper for easily integrating media analysis and conversion into your .NET applications + 5.1.0 + ../nupkg + + + ffmpeg ffprobe convert video audio mediafile resize analyze muxing + Malte Rosenbjerg, Vlad Jerca, Max Bagryantsev + README.md + - - - + + + - - - - + + + + diff --git a/FFMpegCore/FFProbe/MediaAnalysis.cs b/FFMpegCore/FFProbe/MediaAnalysis.cs index 53943dc..9fce0fe 100644 --- a/FFMpegCore/FFProbe/MediaAnalysis.cs +++ b/FFMpegCore/FFProbe/MediaAnalysis.cs @@ -50,7 +50,7 @@ private MediaFormat ParseFormat(Format analysisFormat) { var bitDepth = int.TryParse(stream.BitsPerRawSample, out var bprs) ? bprs : stream.BitsPerSample; - return bitDepth == 0 ? null : (int?)bitDepth; + return bitDepth == 0 ? null : bitDepth; } private VideoStream ParseVideoStream(FFProbeStream stream) @@ -126,7 +126,7 @@ public static class MediaAnalysisUtils { private static readonly Regex DurationRegex = new(@"^(\d+):(\d{1,2}):(\d{1,2})\.(\d{1,3})", RegexOptions.Compiled); - internal static Dictionary? ToCaseInsensitive(this Dictionary? dictionary) + internal static Dictionary ToCaseInsensitive(this Dictionary? dictionary) { return dictionary?.ToDictionary(tag => tag.Key, tag => tag.Value, StringComparer.OrdinalIgnoreCase) ?? new Dictionary(); } @@ -195,11 +195,6 @@ public static TimeSpan ParseDuration(string duration) } } - public static TimeSpan ParseDuration(FFProbeStream ffProbeStream) - { - return ParseDuration(ffProbeStream.Duration); - } - public static int ParseRotation(FFProbeStream fFProbeStream) { var displayMatrixSideData = fFProbeStream.SideData?.Find(item => item.TryGetValue("side_data_type", out var rawSideDataType) && rawSideDataType.ToString() == "Display Matrix"); diff --git a/FFMpegCore/Helpers/FFProbeHelper.cs b/FFMpegCore/Helpers/FFProbeHelper.cs index 0c44ab6..ff1ff20 100644 --- a/FFMpegCore/Helpers/FFProbeHelper.cs +++ b/FFMpegCore/Helpers/FFProbeHelper.cs @@ -3,27 +3,10 @@ namespace FFMpegCore.Helpers { - public class FFProbeHelper + public static class FFProbeHelper { private static bool _ffprobeVerified; - public static int Gcd(int first, int second) - { - while (first != 0 && second != 0) - { - if (first > second) - { - first -= second; - } - else - { - second -= first; - } - } - - return first == 0 ? second : first; - } - public static void RootExceptionCheck() { if (GlobalFFOptions.Current.BinaryFolder == null) diff --git a/README.md b/README.md index 990361e..365d0be 100644 --- a/README.md +++ b/README.md @@ -63,6 +63,17 @@ var bitmap = FFMpeg.Snapshot(inputPath, new Size(200, 400), TimeSpan.FromMinutes FFMpeg.Snapshot(inputPath, outputPath, new Size(200, 400), TimeSpan.FromMinutes(1)); ``` +### You can also capture GIF snapshots from a video file: +```csharp +FFMpeg.GifSnapshot(inputPath, outputPath, new Size(200, 400), TimeSpan.FromSeconds(10)); + +// or async +await FFMpeg.GifSnapshotAsync(inputPath, outputPath, new Size(200, 400), TimeSpan.FromSeconds(10)); + +// you can also supply -1 to either one of Width/Height Size properties if you'd like FFMPEG to resize while maintaining the aspect ratio +await FFMpeg.GifSnapshotAsync(inputPath, outputPath, new Size(480, -1), TimeSpan.FromSeconds(10)); +``` + ### Join video parts into one single file: ```csharp FFMpeg.Join(@"..\joined_video.mp4", @@ -72,6 +83,15 @@ FFMpeg.Join(@"..\joined_video.mp4", ); ``` +### Create a sub video +``` csharp +FFMpeg.SubVideo(inputPath, + outputPath, + TimeSpan.FromSeconds(0) + TimeSpan.FromSeconds(30) +); +``` + ### Join images into a video: ```csharp FFMpeg.JoinImageSequence(@"..\joined_video.mp4", frameRate: 1, diff --git a/testenvironments.json b/testenvironments.json new file mode 100644 index 0000000..14b2763 --- /dev/null +++ b/testenvironments.json @@ -0,0 +1,10 @@ +{ + "version": "1", + "environments": [ + { + "name": "Ubuntu", + "type": "wsl", + "wslDistribution": "Ubuntu" + } + ] +} \ No newline at end of file