Merge branch 'master' into master

Former-commit-id: 5a2ebcf0f3
This commit is contained in:
Malte Rosenbjerg 2021-03-15 23:20:27 +01:00 committed by GitHub
commit 8817b4c765
41 changed files with 1145 additions and 759 deletions

View file

@ -0,0 +1,12 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<OutputType>Exe</OutputType>
<TargetFramework>net5.0</TargetFramework>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="..\FFMpegCore\FFMpegCore.csproj" />
</ItemGroup>
</Project>

View file

@ -0,0 +1,124 @@
using System;
using System.Collections.Generic;
using System.Drawing;
using System.IO;
using FFMpegCore;
using FFMpegCore.Enums;
using FFMpegCore.Pipes;
using FFMpegCore.Extend;
var inputPath = "/path/to/input";
var outputPath = "/path/to/output";
{
var mediaInfo = FFProbe.Analyse(inputPath);
}
{
var mediaInfo = await FFProbe.AnalyseAsync(inputPath);
}
{
FFMpegArguments
.FromFileInput(inputPath)
.OutputToFile(outputPath, false, options => options
.WithVideoCodec(VideoCodec.LibX264)
.WithConstantRateFactor(21)
.WithAudioCodec(AudioCodec.Aac)
.WithVariableBitrate(4)
.WithVideoFilters(filterOptions => filterOptions
.Scale(VideoSize.Hd))
.WithFastStart())
.ProcessSynchronously();
}
{
// process the snapshot in-memory and use the Bitmap directly
var bitmap = FFMpeg.Snapshot(inputPath, new Size(200, 400), TimeSpan.FromMinutes(1));
// or persists the image on the drive
FFMpeg.Snapshot(inputPath, outputPath, new Size(200, 400), TimeSpan.FromMinutes(1));
}
var inputStream = new MemoryStream();
var outputStream = new MemoryStream();
{
await FFMpegArguments
.FromPipeInput(new StreamPipeSource(inputStream))
.OutputToPipe(new StreamPipeSink(outputStream), options => options
.WithVideoCodec("vp9")
.ForceFormat("webm"))
.ProcessAsynchronously();
}
{
FFMpeg.Join(@"..\joined_video.mp4",
@"..\part1.mp4",
@"..\part2.mp4",
@"..\part3.mp4"
);
}
{
FFMpeg.JoinImageSequence(@"..\joined_video.mp4", frameRate: 1,
ImageInfo.FromPath(@"..\1.png"),
ImageInfo.FromPath(@"..\2.png"),
ImageInfo.FromPath(@"..\3.png")
);
}
{
FFMpeg.Mute(inputPath, outputPath);
}
{
FFMpeg.ExtractAudio(inputPath, outputPath);
}
var inputAudioPath = "/path/to/input/audio";
{
FFMpeg.ReplaceAudio(inputPath, inputAudioPath, outputPath);
}
var inputImagePath = "/path/to/input/image";
{
FFMpeg.PosterWithAudio(inputPath, inputAudioPath, outputPath);
// or
var image = Image.FromFile(inputImagePath);
image.AddAudio(inputAudioPath, outputPath);
}
IVideoFrame GetNextFrame() => throw new NotImplementedException();
{
IEnumerable<IVideoFrame> CreateFrames(int count)
{
for(int i = 0; i < count; i++)
{
yield return GetNextFrame(); //method of generating new frames
}
}
var videoFramesSource = new RawVideoPipeSource(CreateFrames(64)) //pass IEnumerable<IVideoFrame> or IEnumerator<IVideoFrame> to constructor of RawVideoPipeSource
{
FrameRate = 30 //set source frame rate
};
await FFMpegArguments
.FromPipeInput(videoFramesSource)
.OutputToFile(outputPath, false, options => options
.WithVideoCodec(VideoCodec.LibVpx))
.ProcessAsynchronously();
}
{
// setting global options
GlobalFFOptions.Configure(new FFOptions { BinaryFolder = "./bin", TemporaryFilesFolder = "/tmp" });
// or
GlobalFFOptions.Configure(options => options.BinaryFolder = "./bin");
// or individual, per-run options
await FFMpegArguments
.FromFileInput(inputPath)
.OutputToFile(outputPath)
.ProcessAsynchronously(true, new FFOptions { BinaryFolder = "./bin", TemporaryFilesFolder = "/tmp" });
}

View file

@ -21,28 +21,35 @@ public void Builder_BuildString_IO_1()
[TestMethod]
public void Builder_BuildString_Scale()
{
var str = FFMpegArguments.FromFileInput("input.mp4").OutputToFile("output.mp4", true, opt => opt.Scale(VideoSize.Hd)).Arguments;
Assert.AreEqual("-i \"input.mp4\" -vf scale=-1:720 \"output.mp4\" -y", str);
var str = FFMpegArguments.FromFileInput("input.mp4")
.OutputToFile("output.mp4", true, opt => opt
.WithVideoFilters(filterOptions => filterOptions
.Scale(VideoSize.Hd)))
.Arguments;
Assert.AreEqual("-i \"input.mp4\" -vf \"scale=-1:720\" \"output.mp4\" -y", str);
}
[TestMethod]
public void Builder_BuildString_AudioCodec()
{
var str = FFMpegArguments.FromFileInput("input.mp4").OutputToFile("output.mp4", true, opt => opt.WithAudioCodec(AudioCodec.Aac)).Arguments;
var str = FFMpegArguments.FromFileInput("input.mp4")
.OutputToFile("output.mp4", true, opt => opt.WithAudioCodec(AudioCodec.Aac)).Arguments;
Assert.AreEqual("-i \"input.mp4\" -c:a aac \"output.mp4\" -y", str);
}
[TestMethod]
public void Builder_BuildString_AudioBitrate()
{
var str = FFMpegArguments.FromFileInput("input.mp4").OutputToFile("output.mp4", true, opt => opt.WithAudioBitrate(AudioQuality.Normal)).Arguments;
var str = FFMpegArguments.FromFileInput("input.mp4")
.OutputToFile("output.mp4", true, opt => opt.WithAudioBitrate(AudioQuality.Normal)).Arguments;
Assert.AreEqual("-i \"input.mp4\" -b:a 128k \"output.mp4\" -y", str);
}
[TestMethod]
public void Builder_BuildString_Quiet()
{
var str = FFMpegArguments.FromFileInput("input.mp4").WithGlobalOptions(opt => opt.WithVerbosityLevel()).OutputToFile("output.mp4", false).Arguments;
var str = FFMpegArguments.FromFileInput("input.mp4").WithGlobalOptions(opt => opt.WithVerbosityLevel())
.OutputToFile("output.mp4", false).Arguments;
Assert.AreEqual("-hide_banner -loglevel error -i \"input.mp4\" \"output.mp4\"", str);
}
@ -50,27 +57,32 @@ public void Builder_BuildString_Quiet()
[TestMethod]
public void Builder_BuildString_AudioCodec_Fluent()
{
var str = FFMpegArguments.FromFileInput("input.mp4").OutputToFile("output.mp4", false, opt => opt.WithAudioCodec(AudioCodec.Aac).WithAudioBitrate(128)).Arguments;
var str = FFMpegArguments.FromFileInput("input.mp4").OutputToFile("output.mp4", false,
opt => opt.WithAudioCodec(AudioCodec.Aac).WithAudioBitrate(128)).Arguments;
Assert.AreEqual("-i \"input.mp4\" -c:a aac -b:a 128k \"output.mp4\"", str);
}
[TestMethod]
public void Builder_BuildString_BitStream()
{
var str = FFMpegArguments.FromFileInput("input.mp4").OutputToFile("output.mp4", false, opt => opt.WithBitStreamFilter(Channel.Audio, Filter.H264_Mp4ToAnnexB)).Arguments;
var str = FFMpegArguments.FromFileInput("input.mp4").OutputToFile("output.mp4", false,
opt => opt.WithBitStreamFilter(Channel.Audio, Filter.H264_Mp4ToAnnexB)).Arguments;
Assert.AreEqual("-i \"input.mp4\" -bsf:a h264_mp4toannexb \"output.mp4\"", str);
}
[TestMethod]
public void Builder_BuildString_HardwareAcceleration_Auto()
{
var str = FFMpegArguments.FromFileInput("input.mp4").OutputToFile("output.mp4", false, opt => opt.WithHardwareAcceleration()).Arguments;
var str = FFMpegArguments.FromFileInput("input.mp4")
.OutputToFile("output.mp4", false, opt => opt.WithHardwareAcceleration()).Arguments;
Assert.AreEqual("-i \"input.mp4\" -hwaccel \"output.mp4\"", str);
}
[TestMethod]
public void Builder_BuildString_HardwareAcceleration_Specific()
{
var str = FFMpegArguments.FromFileInput("input.mp4").OutputToFile("output.mp4", false, opt => opt.WithHardwareAcceleration(HardwareAccelerationDevice.CUVID)).Arguments;
var str = FFMpegArguments.FromFileInput("input.mp4").OutputToFile("output.mp4", false,
opt => opt.WithHardwareAcceleration(HardwareAccelerationDevice.CUVID)).Arguments;
Assert.AreEqual("-i \"input.mp4\" -hwaccel cuvid \"output.mp4\"", str);
}
@ -84,112 +96,143 @@ public void Builder_BuildString_Concat()
[TestMethod]
public void Builder_BuildString_Copy_Audio()
{
var str = FFMpegArguments.FromFileInput("input.mp4").OutputToFile("output.mp4", false, opt => opt.CopyChannel(Channel.Audio)).Arguments;
var str = FFMpegArguments.FromFileInput("input.mp4")
.OutputToFile("output.mp4", false, opt => opt.CopyChannel(Channel.Audio)).Arguments;
Assert.AreEqual("-i \"input.mp4\" -c:a copy \"output.mp4\"", str);
}
[TestMethod]
public void Builder_BuildString_Copy_Video()
{
var str = FFMpegArguments.FromFileInput("input.mp4").OutputToFile("output.mp4", false, opt => opt.CopyChannel(Channel.Video)).Arguments;
var str = FFMpegArguments.FromFileInput("input.mp4")
.OutputToFile("output.mp4", false, opt => opt.CopyChannel(Channel.Video)).Arguments;
Assert.AreEqual("-i \"input.mp4\" -c:v copy \"output.mp4\"", str);
}
[TestMethod]
public void Builder_BuildString_Copy_Both()
{
var str = FFMpegArguments.FromFileInput("input.mp4").OutputToFile("output.mp4", false, opt => opt.CopyChannel()).Arguments;
var str = FFMpegArguments.FromFileInput("input.mp4")
.OutputToFile("output.mp4", false, opt => opt.CopyChannel()).Arguments;
Assert.AreEqual("-i \"input.mp4\" -c copy \"output.mp4\"", str);
}
[TestMethod]
public void Builder_BuildString_DisableChannel_Audio()
{
var str = FFMpegArguments.FromFileInput("input.mp4").OutputToFile("output.mp4", false, opt => opt.DisableChannel(Channel.Audio)).Arguments;
var str = FFMpegArguments.FromFileInput("input.mp4")
.OutputToFile("output.mp4", false, opt => opt.DisableChannel(Channel.Audio)).Arguments;
Assert.AreEqual("-i \"input.mp4\" -an \"output.mp4\"", str);
}
[TestMethod]
public void Builder_BuildString_DisableChannel_Video()
{
var str = FFMpegArguments.FromFileInput("input.mp4").OutputToFile("output.mp4", false, opt => opt.DisableChannel(Channel.Video)).Arguments;
var str = FFMpegArguments.FromFileInput("input.mp4")
.OutputToFile("output.mp4", false, opt => opt.DisableChannel(Channel.Video)).Arguments;
Assert.AreEqual("-i \"input.mp4\" -vn \"output.mp4\"", str);
}
[TestMethod]
public void Builder_BuildString_AudioSamplingRate_Default()
{
var str = FFMpegArguments.FromFileInput("input.mp4").OutputToFile("output.mp4", false, opt => opt.WithAudioSamplingRate()).Arguments;
var str = FFMpegArguments.FromFileInput("input.mp4")
.OutputToFile("output.mp4", false, opt => opt.WithAudioSamplingRate()).Arguments;
Assert.AreEqual("-i \"input.mp4\" -ar 48000 \"output.mp4\"", str);
}
[TestMethod]
public void Builder_BuildString_AudioSamplingRate()
{
var str = FFMpegArguments.FromFileInput("input.mp4").OutputToFile("output.mp4", false, opt => opt.WithAudioSamplingRate(44000)).Arguments;
var str = FFMpegArguments.FromFileInput("input.mp4")
.OutputToFile("output.mp4", false, opt => opt.WithAudioSamplingRate(44000)).Arguments;
Assert.AreEqual("-i \"input.mp4\" -ar 44000 \"output.mp4\"", str);
}
[TestMethod]
public void Builder_BuildString_VariableBitrate()
{
var str = FFMpegArguments.FromFileInput("input.mp4").OutputToFile("output.mp4", false, opt => opt.WithVariableBitrate(5)).Arguments;
var str = FFMpegArguments.FromFileInput("input.mp4")
.OutputToFile("output.mp4", false, opt => opt.WithVariableBitrate(5)).Arguments;
Assert.AreEqual("-i \"input.mp4\" -vbr 5 \"output.mp4\"", str);
}
[TestMethod]
public void Builder_BuildString_Faststart()
{
var str = FFMpegArguments.FromFileInput("input.mp4").OutputToFile("output.mp4", false, opt => opt.WithFastStart()).Arguments;
var str = FFMpegArguments.FromFileInput("input.mp4")
.OutputToFile("output.mp4", false, opt => opt.WithFastStart()).Arguments;
Assert.AreEqual("-i \"input.mp4\" -movflags faststart \"output.mp4\"", str);
}
[TestMethod]
public void Builder_BuildString_Overwrite()
{
var str = FFMpegArguments.FromFileInput("input.mp4").OutputToFile("output.mp4", false, opt => opt.OverwriteExisting()).Arguments;
var str = FFMpegArguments.FromFileInput("input.mp4")
.OutputToFile("output.mp4", false, opt => opt.OverwriteExisting()).Arguments;
Assert.AreEqual("-i \"input.mp4\" -y \"output.mp4\"", str);
}
[TestMethod]
public void Builder_BuildString_RemoveMetadata()
{
var str = FFMpegArguments.FromFileInput("input.mp4").OutputToFile("output.mp4", false, opt => opt.WithoutMetadata()).Arguments;
var str = FFMpegArguments.FromFileInput("input.mp4")
.OutputToFile("output.mp4", false, opt => opt.WithoutMetadata()).Arguments;
Assert.AreEqual("-i \"input.mp4\" -map_metadata -1 \"output.mp4\"", str);
}
[TestMethod]
public void Builder_BuildString_Transpose()
{
var str = FFMpegArguments.FromFileInput("input.mp4").OutputToFile("output.mp4", false, opt => opt.Transpose(Transposition.CounterClockwise90)).Arguments;
var str = FFMpegArguments.FromFileInput("input.mp4")
.OutputToFile("output.mp4", false, opt => opt
.WithVideoFilters(filterOptions => filterOptions
.Transpose(Transposition.CounterClockwise90)))
.Arguments;
Assert.AreEqual("-i \"input.mp4\" -vf \"transpose=2\" \"output.mp4\"", str);
}
[TestMethod]
public void Builder_BuildString_TransposeScale()
{
var str = FFMpegArguments.FromFileInput("input.mp4")
.OutputToFile("output.mp4", false, opt => opt
.WithVideoFilters(filterOptions => filterOptions
.Transpose(Transposition.CounterClockwise90)
.Scale(200, 300)))
.Arguments;
Assert.AreEqual("-i \"input.mp4\" -vf \"transpose=2, scale=200:300\" \"output.mp4\"", str);
}
[TestMethod]
public void Builder_BuildString_ForceFormat()
{
var str = FFMpegArguments.FromFileInput("input.mp4", false, opt => opt.ForceFormat(VideoType.Mp4)).OutputToFile("output.mp4", false, opt => opt.ForceFormat(VideoType.Mp4)).Arguments;
var str = FFMpegArguments.FromFileInput("input.mp4", false, opt => opt.ForceFormat(VideoType.Mp4))
.OutputToFile("output.mp4", false, opt => opt.ForceFormat(VideoType.Mp4)).Arguments;
Assert.AreEqual("-f mp4 -i \"input.mp4\" -f mp4 \"output.mp4\"", str);
}
[TestMethod]
public void Builder_BuildString_FrameOutputCount()
{
var str = FFMpegArguments.FromFileInput("input.mp4").OutputToFile("output.mp4", false, opt => opt.WithFrameOutputCount(50)).Arguments;
var str = FFMpegArguments.FromFileInput("input.mp4")
.OutputToFile("output.mp4", false, opt => opt.WithFrameOutputCount(50)).Arguments;
Assert.AreEqual("-i \"input.mp4\" -vframes 50 \"output.mp4\"", str);
}
[TestMethod]
public void Builder_BuildString_FrameRate()
{
var str = FFMpegArguments.FromFileInput("input.mp4").OutputToFile("output.mp4", false, opt => opt.WithFramerate(50)).Arguments;
var str = FFMpegArguments.FromFileInput("input.mp4")
.OutputToFile("output.mp4", false, opt => opt.WithFramerate(50)).Arguments;
Assert.AreEqual("-i \"input.mp4\" -r 50 \"output.mp4\"", str);
}
[TestMethod]
public void Builder_BuildString_Loop()
{
var str = FFMpegArguments.FromFileInput("input.mp4").OutputToFile("output.mp4", false, opt => opt.Loop(50)).Arguments;
var str = FFMpegArguments.FromFileInput("input.mp4").OutputToFile("output.mp4", false, opt => opt.Loop(50))
.Arguments;
Assert.AreEqual("-i \"input.mp4\" -loop 50 \"output.mp4\"", str);
}
@ -203,21 +246,24 @@ public void Builder_BuildString_Seek()
[TestMethod]
public void Builder_BuildString_Shortest()
{
var str = FFMpegArguments.FromFileInput("input.mp4").OutputToFile("output.mp4", false, opt => opt.UsingShortest()).Arguments;
var str = FFMpegArguments.FromFileInput("input.mp4")
.OutputToFile("output.mp4", false, opt => opt.UsingShortest()).Arguments;
Assert.AreEqual("-i \"input.mp4\" -shortest \"output.mp4\"", str);
}
[TestMethod]
public void Builder_BuildString_Size()
{
var str = FFMpegArguments.FromFileInput("input.mp4").OutputToFile("output.mp4", false, opt => opt.Resize(1920, 1080)).Arguments;
var str = FFMpegArguments.FromFileInput("input.mp4")
.OutputToFile("output.mp4", false, opt => opt.Resize(1920, 1080)).Arguments;
Assert.AreEqual("-i \"input.mp4\" -s 1920x1080 \"output.mp4\"", str);
}
[TestMethod]
public void Builder_BuildString_Speed()
{
var str = FFMpegArguments.FromFileInput("input.mp4").OutputToFile("output.mp4", false, opt => opt.WithSpeedPreset(Speed.Fast)).Arguments;
var str = FFMpegArguments.FromFileInput("input.mp4")
.OutputToFile("output.mp4", false, opt => opt.WithSpeedPreset(Speed.Fast)).Arguments;
Assert.AreEqual("-i \"input.mp4\" -preset fast \"output.mp4\"", str);
}
@ -227,6 +273,7 @@ public void Builder_BuildString_DrawtextFilter()
var str = FFMpegArguments
.FromFileInput("input.mp4")
.OutputToFile("output.mp4", false, opt => opt
.WithVideoFilters(filterOptions => filterOptions
.DrawText(DrawTextOptions
.Create("Stack Overflow", "/path/to/font.ttf")
.WithParameter("fontcolor", "white")
@ -235,10 +282,12 @@ public void Builder_BuildString_DrawtextFilter()
.WithParameter("boxcolor", "black@0.5")
.WithParameter("boxborderw", "5")
.WithParameter("x", "(w-text_w)/2")
.WithParameter("y", "(h-text_h)/2")))
.WithParameter("y", "(h-text_h)/2"))))
.Arguments;
Assert.AreEqual("-i \"input.mp4\" -vf drawtext=\"text='Stack Overflow':fontfile=/path/to/font.ttf:fontcolor=white:fontsize=24:box=1:boxcolor=black@0.5:boxborderw=5:x=(w-text_w)/2:y=(h-text_h)/2\" \"output.mp4\"", str);
Assert.AreEqual(
"-i \"input.mp4\" -vf \"drawtext=text='Stack Overflow':fontfile=/path/to/font.ttf:fontcolor=white:fontsize=24:box=1:boxcolor=black@0.5:boxborderw=5:x=(w-text_w)/2:y=(h-text_h)/2\" \"output.mp4\"",
str);
}
[TestMethod]
@ -247,45 +296,53 @@ public void Builder_BuildString_DrawtextFilter_Alt()
var str = FFMpegArguments
.FromFileInput("input.mp4")
.OutputToFile("output.mp4", false, opt => opt
.WithVideoFilters(filterOptions => filterOptions
.DrawText(DrawTextOptions
.Create("Stack Overflow", "/path/to/font.ttf", ("fontcolor", "white"), ("fontsize", "24"))))
.Create("Stack Overflow", "/path/to/font.ttf", ("fontcolor", "white"), ("fontsize", "24")))))
.Arguments;
Assert.AreEqual("-i \"input.mp4\" -vf drawtext=\"text='Stack Overflow':fontfile=/path/to/font.ttf:fontcolor=white:fontsize=24\" \"output.mp4\"", str);
Assert.AreEqual(
"-i \"input.mp4\" -vf \"drawtext=text='Stack Overflow':fontfile=/path/to/font.ttf:fontcolor=white:fontsize=24\" \"output.mp4\"",
str);
}
[TestMethod]
public void Builder_BuildString_StartNumber()
{
var str = FFMpegArguments.FromFileInput("input.mp4").OutputToFile("output.mp4", false, opt => opt.WithStartNumber(50)).Arguments;
var str = FFMpegArguments.FromFileInput("input.mp4")
.OutputToFile("output.mp4", false, opt => opt.WithStartNumber(50)).Arguments;
Assert.AreEqual("-i \"input.mp4\" -start_number 50 \"output.mp4\"", str);
}
[TestMethod]
public void Builder_BuildString_Threads_1()
{
var str = FFMpegArguments.FromFileInput("input.mp4").OutputToFile("output.mp4", false, opt => opt.UsingThreads(50)).Arguments;
var str = FFMpegArguments.FromFileInput("input.mp4")
.OutputToFile("output.mp4", false, opt => opt.UsingThreads(50)).Arguments;
Assert.AreEqual("-i \"input.mp4\" -threads 50 \"output.mp4\"", str);
}
[TestMethod]
public void Builder_BuildString_Threads_2()
{
var str = FFMpegArguments.FromFileInput("input.mp4").OutputToFile("output.mp4", false, opt => opt.UsingMultithreading(true)).Arguments;
var str = FFMpegArguments.FromFileInput("input.mp4")
.OutputToFile("output.mp4", false, opt => opt.UsingMultithreading(true)).Arguments;
Assert.AreEqual($"-i \"input.mp4\" -threads {Environment.ProcessorCount} \"output.mp4\"", str);
}
[TestMethod]
public void Builder_BuildString_Codec()
{
var str = FFMpegArguments.FromFileInput("input.mp4").OutputToFile("output.mp4", false, opt => opt.WithVideoCodec(VideoCodec.LibX264)).Arguments;
var str = FFMpegArguments.FromFileInput("input.mp4")
.OutputToFile("output.mp4", false, opt => opt.WithVideoCodec(VideoCodec.LibX264)).Arguments;
Assert.AreEqual("-i \"input.mp4\" -c:v libx264 \"output.mp4\"", str);
}
[TestMethod]
public void Builder_BuildString_Codec_Override()
{
var str = FFMpegArguments.FromFileInput("input.mp4").OutputToFile("output.mp4", true, opt => opt.WithVideoCodec(VideoCodec.LibX264).ForcePixelFormat("yuv420p")).Arguments;
var str = FFMpegArguments.FromFileInput("input.mp4").OutputToFile("output.mp4", true,
opt => opt.WithVideoCodec(VideoCodec.LibX264).ForcePixelFormat("yuv420p")).Arguments;
Assert.AreEqual("-i \"input.mp4\" -c:v libx264 -pix_fmt yuv420p \"output.mp4\" -y", str);
}
@ -293,17 +350,20 @@ public void Builder_BuildString_Codec_Override()
[TestMethod]
public void Builder_BuildString_Duration()
{
var str = FFMpegArguments.FromFileInput("input.mp4").OutputToFile("output.mp4", false, opt => opt.WithDuration(TimeSpan.FromSeconds(20))).Arguments;
var str = FFMpegArguments.FromFileInput("input.mp4")
.OutputToFile("output.mp4", false, opt => opt.WithDuration(TimeSpan.FromSeconds(20))).Arguments;
Assert.AreEqual("-i \"input.mp4\" -t 00:00:20 \"output.mp4\"", str);
}
[TestMethod]
public void Builder_BuildString_Raw()
{
var str = FFMpegArguments.FromFileInput("input.mp4", false, opt => opt.WithCustomArgument(null!)).OutputToFile("output.mp4", false, opt => opt.WithCustomArgument(null!)).Arguments;
var str = FFMpegArguments.FromFileInput("input.mp4", false, opt => opt.WithCustomArgument(null!))
.OutputToFile("output.mp4", false, opt => opt.WithCustomArgument(null!)).Arguments;
Assert.AreEqual(" -i \"input.mp4\" \"output.mp4\"", str);
str = FFMpegArguments.FromFileInput("input.mp4").OutputToFile("output.mp4", false, opt => opt.WithCustomArgument("-acodec copy")).Arguments;
str = FFMpegArguments.FromFileInput("input.mp4")
.OutputToFile("output.mp4", false, opt => opt.WithCustomArgument("-acodec copy")).Arguments;
Assert.AreEqual("-i \"input.mp4\" -acodec copy \"output.mp4\"", str);
}
@ -311,7 +371,8 @@ public void Builder_BuildString_Raw()
[TestMethod]
public void Builder_BuildString_ForcePixelFormat()
{
var str = FFMpegArguments.FromFileInput("input.mp4").OutputToFile("output.mp4", false, opt => opt.ForcePixelFormat("yuv444p")).Arguments;
var str = FFMpegArguments.FromFileInput("input.mp4")
.OutputToFile("output.mp4", false, opt => opt.ForcePixelFormat("yuv444p")).Arguments;
Assert.AreEqual("-i \"input.mp4\" -pix_fmt yuv444p \"output.mp4\"", str);
}
}

View file

@ -39,10 +39,10 @@
</ItemGroup>
<ItemGroup>
<PackageReference Include="GitHubActionsTestLogger" Version="1.1.2" />
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="16.8.3" />
<PackageReference Include="MSTest.TestAdapter" Version="2.1.2" />
<PackageReference Include="MSTest.TestFramework" Version="2.1.2" />
<PackageReference Include="GitHubActionsTestLogger" Version="1.2.0" />
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="16.9.1" />
<PackageReference Include="MSTest.TestAdapter" Version="2.2.1" />
<PackageReference Include="MSTest.TestFramework" Version="2.2.1" />
</ItemGroup>
<ItemGroup>

View file

@ -10,39 +10,39 @@ public class FFMpegOptionsTest
[TestMethod]
public void Options_Initialized()
{
Assert.IsNotNull(FFMpegOptions.Options);
Assert.IsNotNull(GlobalFFOptions.Current);
}
[TestMethod]
public void Options_Defaults_Configured()
{
Assert.AreEqual(new FFMpegOptions().RootDirectory, $"");
Assert.AreEqual(new FFOptions().BinaryFolder, $"");
}
[TestMethod]
public void Options_Loaded_From_File()
{
Assert.AreEqual(
FFMpegOptions.Options.RootDirectory,
JsonConvert.DeserializeObject<FFMpegOptions>(File.ReadAllText("ffmpeg.config.json")).RootDirectory
GlobalFFOptions.Current.BinaryFolder,
JsonConvert.DeserializeObject<FFOptions>(File.ReadAllText("ffmpeg.config.json")).BinaryFolder
);
}
[TestMethod]
public void Options_Set_Programmatically()
{
var original = FFMpegOptions.Options;
var original = GlobalFFOptions.Current;
try
{
FFMpegOptions.Configure(new FFMpegOptions { RootDirectory = "Whatever" });
GlobalFFOptions.Configure(new FFOptions { BinaryFolder = "Whatever" });
Assert.AreEqual(
FFMpegOptions.Options.RootDirectory,
GlobalFFOptions.Current.BinaryFolder,
"Whatever"
);
}
finally
{
FFMpegOptions.Configure(original);
GlobalFFOptions.Configure(original);
}
}
}

View file

@ -1,4 +1,5 @@
using System.IO;
using System;
using System.IO;
using System.Threading.Tasks;
using FFMpegCore.Test.Resources;
using Microsoft.VisualStudio.TestTools.UnitTesting;
@ -40,15 +41,20 @@ public void MediaAnalysis_ParseDuration()
Assert.IsTrue(testdurationHHMMSS.Days == 0 && testshortDuration.Hours == 0 && testshortDuration.Minutes == 0 && testshortDuration.Seconds == 0 && testshortDuration.Milliseconds == 830);
}
[TestMethod]
public async Task Uri_Duration()
{
var fileAnalysis = await FFProbe.AnalyseAsync(new Uri("https://github.com/rosenbjerg/FFMpegCore/raw/master/FFMpegCore.Test/Resources/input_3sec.webm"));
Assert.IsNotNull(fileAnalysis);
}
[TestMethod]
public void Probe_Success()
{
var info = FFProbe.Analyse(TestResources.Mp4Video);
Assert.AreEqual(3, info.Duration.Seconds);
Assert.AreEqual(".mp4", info.Extension);
Assert.AreEqual(TestResources.Mp4Video, info.Path);
Assert.AreEqual("5.1", info.PrimaryAudioStream.ChannelLayout);
Assert.AreEqual("5.1", info.PrimaryAudioStream!.ChannelLayout);
Assert.AreEqual(6, info.PrimaryAudioStream.Channels);
Assert.AreEqual("AAC (Advanced Audio Coding)", info.PrimaryAudioStream.CodecLongName);
Assert.AreEqual("aac", info.PrimaryAudioStream.CodecName);
@ -56,7 +62,7 @@ public void Probe_Success()
Assert.AreEqual(377351, info.PrimaryAudioStream.BitRate);
Assert.AreEqual(48000, info.PrimaryAudioStream.SampleRateHz);
Assert.AreEqual(1471810, info.PrimaryVideoStream.BitRate);
Assert.AreEqual(1471810, info.PrimaryVideoStream!.BitRate);
Assert.AreEqual(16, info.PrimaryVideoStream.DisplayAspectRatio.Width);
Assert.AreEqual(9, info.PrimaryVideoStream.DisplayAspectRatio.Height);
Assert.AreEqual("yuv420p", info.PrimaryVideoStream.PixelFormat);

View file

@ -1,10 +0,0 @@
using System.Threading.Tasks;
namespace FFMpegCore.Test
{
static class TasksExtensions
{
public static T WaitForResult<T>(this Task<T> task) =>
task.ConfigureAwait(false).GetAwaiter().GetResult();
}
}

View file

@ -21,7 +21,7 @@ public static IEnumerable<IVideoFrame> CreateBitmaps(int count, PixelFormat fmt,
}
}
private static BitmapVideoFrameWrapper CreateVideoFrame(int index, PixelFormat fmt, int w, int h, float scaleNoise, float offset)
public static BitmapVideoFrameWrapper CreateVideoFrame(int index, PixelFormat fmt, int w, int h, float scaleNoise, float offset)
{
var bitmap = new Bitmap(w, h, fmt);

View file

@ -18,239 +18,57 @@ namespace FFMpegCore.Test
[TestClass]
public class VideoTest
{
public bool Convert(ContainerFormat type, bool multithreaded = false, VideoSize size = VideoSize.Original)
[TestMethod, Timeout(10000)]
public void Video_ToOGV()
{
using var outputFile = new TemporaryFile($"out{type.Extension}");
var input = FFProbe.Analyse(TestResources.Mp4Video);
FFMpeg.Convert(input, outputFile, type, size: size, multithreaded: multithreaded);
var outputVideo = FFProbe.Analyse(outputFile);
Assert.IsTrue(File.Exists(outputFile));
Assert.AreEqual(outputVideo.Duration.TotalSeconds, input.Duration.TotalSeconds, 0.1);
if (size == VideoSize.Original)
{
Assert.AreEqual(outputVideo.PrimaryVideoStream.Width, input.PrimaryVideoStream.Width);
Assert.AreEqual(outputVideo.PrimaryVideoStream.Height, input.PrimaryVideoStream.Height);
}
else
{
Assert.AreNotEqual(outputVideo.PrimaryVideoStream.Width, input.PrimaryVideoStream.Width);
Assert.AreNotEqual(outputVideo.PrimaryVideoStream.Height, input.PrimaryVideoStream.Height);
Assert.AreEqual(outputVideo.PrimaryVideoStream.Height, (int)size);
}
return File.Exists(outputFile) &&
outputVideo.Duration == input.Duration &&
(
(
size == VideoSize.Original &&
outputVideo.PrimaryVideoStream.Width == input.PrimaryVideoStream.Width &&
outputVideo.PrimaryVideoStream.Height == input.PrimaryVideoStream.Height
) ||
(
size != VideoSize.Original &&
outputVideo.PrimaryVideoStream.Width != input.PrimaryVideoStream.Width &&
outputVideo.PrimaryVideoStream.Height != input.PrimaryVideoStream.Height &&
outputVideo.PrimaryVideoStream.Height == (int)size
)
);
}
private void ConvertFromStreamPipe(ContainerFormat type, params IArgument[] arguments)
{
using var outputFile = new TemporaryFile($"out{type.Extension}");
var input = FFProbe.Analyse(TestResources.WebmVideo);
using var inputStream = File.OpenRead(input.Path);
var processor = FFMpegArguments
.FromPipeInput(new StreamPipeSource(inputStream))
.OutputToFile(outputFile, false, opt =>
{
foreach (var arg in arguments)
opt.WithArgument(arg);
});
var scaling = arguments.OfType<ScaleArgument>().FirstOrDefault();
var success = processor.ProcessSynchronously();
var outputVideo = FFProbe.Analyse(outputFile);
using var outputFile = new TemporaryFile($"out{VideoType.Ogv.Extension}");
var success = FFMpegArguments
.FromFileInput(TestResources.WebmVideo)
.OutputToFile(outputFile, false)
.ProcessSynchronously();
Assert.IsTrue(success);
Assert.IsTrue(File.Exists(outputFile));
Assert.IsTrue(Math.Abs((outputVideo.Duration - input.Duration).TotalMilliseconds) < 1000.0 / input.PrimaryVideoStream.FrameRate);
if (scaling?.Size == null)
{
Assert.AreEqual(outputVideo.PrimaryVideoStream.Width, input.PrimaryVideoStream.Width);
Assert.AreEqual(outputVideo.PrimaryVideoStream.Height, input.PrimaryVideoStream.Height);
}
else
{
if (scaling.Size.Value.Width != -1)
{
Assert.AreEqual(outputVideo.PrimaryVideoStream.Width, scaling.Size.Value.Width);
}
if (scaling.Size.Value.Height != -1)
{
Assert.AreEqual(outputVideo.PrimaryVideoStream.Height, scaling.Size.Value.Height);
}
Assert.AreNotEqual(outputVideo.PrimaryVideoStream.Width, input.PrimaryVideoStream.Width);
Assert.AreNotEqual(outputVideo.PrimaryVideoStream.Height, input.PrimaryVideoStream.Height);
}
}
private void ConvertToStreamPipe(params IArgument[] arguments)
{
using var ms = new MemoryStream();
var processor = FFMpegArguments
.FromFileInput(TestResources.Mp4Video)
.OutputToPipe(new StreamPipeSink(ms), opt =>
{
foreach (var arg in arguments)
opt.WithArgument(arg);
});
var scaling = arguments.OfType<ScaleArgument>().FirstOrDefault();
processor.ProcessSynchronously();
ms.Position = 0;
var outputVideo = FFProbe.Analyse(ms);
var input = FFProbe.Analyse(TestResources.Mp4Video);
// Assert.IsTrue(Math.Abs((outputVideo.Duration - input.Duration).TotalMilliseconds) < 1000.0 / input.PrimaryVideoStream.FrameRate);
if (scaling?.Size == null)
{
Assert.AreEqual(outputVideo.PrimaryVideoStream.Width, input.PrimaryVideoStream.Width);
Assert.AreEqual(outputVideo.PrimaryVideoStream.Height, input.PrimaryVideoStream.Height);
}
else
{
if (scaling.Size.Value.Width != -1)
{
Assert.AreEqual(outputVideo.PrimaryVideoStream.Width, scaling.Size.Value.Width);
}
if (scaling.Size.Value.Height != -1)
{
Assert.AreEqual(outputVideo.PrimaryVideoStream.Height, scaling.Size.Value.Height);
}
Assert.AreNotEqual(outputVideo.PrimaryVideoStream.Width, input.PrimaryVideoStream.Width);
Assert.AreNotEqual(outputVideo.PrimaryVideoStream.Height, input.PrimaryVideoStream.Height);
}
}
public void Convert(ContainerFormat type, Action<IMediaAnalysis> validationMethod, params IArgument[] arguments)
{
using var outputFile = new TemporaryFile($"out{type.Extension}");
var input = FFProbe.Analyse(TestResources.Mp4Video);
var processor = FFMpegArguments
.FromFileInput(TestResources.Mp4Video)
.OutputToFile(outputFile, false, opt =>
{
foreach (var arg in arguments)
opt.WithArgument(arg);
});
var scaling = arguments.OfType<ScaleArgument>().FirstOrDefault();
processor.ProcessSynchronously();
var outputVideo = FFProbe.Analyse(outputFile);
Assert.IsTrue(File.Exists(outputFile));
Assert.AreEqual(outputVideo.Duration.TotalSeconds, input.Duration.TotalSeconds, 0.1);
validationMethod?.Invoke(outputVideo);
if (scaling?.Size == null)
{
Assert.AreEqual(outputVideo.PrimaryVideoStream.Width, input.PrimaryVideoStream.Width);
Assert.AreEqual(outputVideo.PrimaryVideoStream.Height, input.PrimaryVideoStream.Height);
}
else
{
if (scaling.Size.Value.Width != -1)
{
Assert.AreEqual(outputVideo.PrimaryVideoStream.Width, scaling.Size.Value.Width);
}
if (scaling.Size.Value.Height != -1)
{
Assert.AreEqual(outputVideo.PrimaryVideoStream.Height, scaling.Size.Value.Height);
}
Assert.AreNotEqual(outputVideo.PrimaryVideoStream.Width, input.PrimaryVideoStream.Width);
Assert.AreNotEqual(outputVideo.PrimaryVideoStream.Height, input.PrimaryVideoStream.Height);
}
}
public void Convert(ContainerFormat type, params IArgument[] inputArguments)
{
Convert(type, null, inputArguments);
}
public void ConvertFromPipe(ContainerFormat type, System.Drawing.Imaging.PixelFormat fmt, params IArgument[] arguments)
{
using var outputFile = new TemporaryFile($"out{type.Extension}");
var videoFramesSource = new RawVideoPipeSource(BitmapSource.CreateBitmaps(128, fmt, 256, 256));
var processor = FFMpegArguments.FromPipeInput(videoFramesSource).OutputToFile(outputFile, false, opt =>
{
foreach (var arg in arguments)
opt.WithArgument(arg);
});
var scaling = arguments.OfType<ScaleArgument>().FirstOrDefault();
processor.ProcessSynchronously();
var outputVideo = FFProbe.Analyse(outputFile);
Assert.IsTrue(File.Exists(outputFile));
if (scaling?.Size == null)
{
Assert.AreEqual(outputVideo.PrimaryVideoStream.Width, videoFramesSource.Width);
Assert.AreEqual(outputVideo.PrimaryVideoStream.Height, videoFramesSource.Height);
}
else
{
if (scaling.Size.Value.Width != -1)
{
Assert.AreEqual(outputVideo.PrimaryVideoStream.Width, scaling.Size.Value.Width);
}
if (scaling.Size.Value.Height != -1)
{
Assert.AreEqual(outputVideo.PrimaryVideoStream.Height, scaling.Size.Value.Height);
}
Assert.AreNotEqual(outputVideo.PrimaryVideoStream.Width, videoFramesSource.Width);
Assert.AreNotEqual(outputVideo.PrimaryVideoStream.Height, videoFramesSource.Height);
}
}
[TestMethod, Timeout(10000)]
public void Video_ToMP4()
{
Convert(VideoType.Mp4);
using var outputFile = new TemporaryFile($"out{VideoType.Mp4.Extension}");
var success = FFMpegArguments
.FromFileInput(TestResources.WebmVideo)
.OutputToFile(outputFile, false)
.ProcessSynchronously();
Assert.IsTrue(success);
}
[TestMethod, Timeout(10000)]
public void Video_ToMP4_YUV444p()
{
Convert(VideoType.Mp4, (a) => Assert.IsTrue(a.VideoStreams.First().PixelFormat == "yuv444p"),
new ForcePixelFormat("yuv444p"));
using var outputFile = new TemporaryFile($"out{VideoType.Mp4.Extension}");
var success = FFMpegArguments
.FromFileInput(TestResources.WebmVideo)
.OutputToFile(outputFile, false, opt => opt
.WithVideoCodec(VideoCodec.LibX264)
.ForcePixelFormat("yuv444p"))
.ProcessSynchronously();
Assert.IsTrue(success);
var analysis = FFProbe.Analyse(outputFile);
Assert.IsTrue(analysis.VideoStreams.First().PixelFormat == "yuv444p");
}
[TestMethod, Timeout(10000)]
public void Video_ToMP4_Args()
{
Convert(VideoType.Mp4, new VideoCodecArgument(VideoCodec.LibX264));
using var outputFile = new TemporaryFile($"out{VideoType.Mp4.Extension}");
var success = FFMpegArguments
.FromFileInput(TestResources.WebmVideo)
.OutputToFile(outputFile, false, opt => opt
.WithVideoCodec(VideoCodec.LibX264))
.ProcessSynchronously();
Assert.IsTrue(success);
}
[DataTestMethod, Timeout(10000)]
@ -258,13 +76,115 @@ public void Video_ToMP4_Args()
[DataRow(System.Drawing.Imaging.PixelFormat.Format32bppArgb)]
public void Video_ToMP4_Args_Pipe(System.Drawing.Imaging.PixelFormat pixelFormat)
{
ConvertFromPipe(VideoType.Mp4, pixelFormat, new VideoCodecArgument(VideoCodec.LibX264));
using var outputFile = new TemporaryFile($"out{VideoType.Mp4.Extension}");
var videoFramesSource = new RawVideoPipeSource(BitmapSource.CreateBitmaps(128, pixelFormat, 256, 256));
var success = FFMpegArguments
.FromPipeInput(videoFramesSource)
.OutputToFile(outputFile, false, opt => opt
.WithVideoCodec(VideoCodec.LibX264))
.ProcessSynchronously();
Assert.IsTrue(success);
}
[TestMethod, Timeout(10000)]
public void Video_ToMP4_Args_Pipe_DifferentImageSizes()
{
using var outputFile = new TemporaryFile($"out{VideoType.Mp4.Extension}");
var frames = new List<IVideoFrame>
{
BitmapSource.CreateVideoFrame(0, System.Drawing.Imaging.PixelFormat.Format24bppRgb, 255, 255, 1, 0),
BitmapSource.CreateVideoFrame(0, System.Drawing.Imaging.PixelFormat.Format24bppRgb, 256, 256, 1, 0)
};
var videoFramesSource = new RawVideoPipeSource(frames);
var ex = Assert.ThrowsException<FFMpegException>(() => FFMpegArguments
.FromPipeInput(videoFramesSource)
.OutputToFile(outputFile, false, opt => opt
.WithVideoCodec(VideoCodec.LibX264))
.ProcessSynchronously());
Assert.IsInstanceOfType(ex.GetBaseException(), typeof(FFMpegStreamFormatException));
}
[TestMethod, Timeout(10000)]
public async Task Video_ToMP4_Args_Pipe_DifferentImageSizes_Async()
{
using var outputFile = new TemporaryFile($"out{VideoType.Mp4.Extension}");
var frames = new List<IVideoFrame>
{
BitmapSource.CreateVideoFrame(0, System.Drawing.Imaging.PixelFormat.Format24bppRgb, 255, 255, 1, 0),
BitmapSource.CreateVideoFrame(0, System.Drawing.Imaging.PixelFormat.Format24bppRgb, 256, 256, 1, 0)
};
var videoFramesSource = new RawVideoPipeSource(frames);
var ex = await Assert.ThrowsExceptionAsync<FFMpegException>(() => FFMpegArguments
.FromPipeInput(videoFramesSource)
.OutputToFile(outputFile, false, opt => opt
.WithVideoCodec(VideoCodec.LibX264))
.ProcessAsynchronously());
Assert.IsInstanceOfType(ex.GetBaseException(), typeof(FFMpegStreamFormatException));
}
[TestMethod, Timeout(10000)]
public void Video_ToMP4_Args_Pipe_DifferentPixelFormats()
{
using var outputFile = new TemporaryFile($"out{VideoType.Mp4.Extension}");
var frames = new List<IVideoFrame>
{
BitmapSource.CreateVideoFrame(0, System.Drawing.Imaging.PixelFormat.Format24bppRgb, 255, 255, 1, 0),
BitmapSource.CreateVideoFrame(0, System.Drawing.Imaging.PixelFormat.Format32bppRgb, 255, 255, 1, 0)
};
var videoFramesSource = new RawVideoPipeSource(frames);
var ex = Assert.ThrowsException<FFMpegException>(() => FFMpegArguments
.FromPipeInput(videoFramesSource)
.OutputToFile(outputFile, false, opt => opt
.WithVideoCodec(VideoCodec.LibX264))
.ProcessSynchronously());
Assert.IsInstanceOfType(ex.GetBaseException(), typeof(FFMpegStreamFormatException));
}
[TestMethod, Timeout(10000)]
public async Task Video_ToMP4_Args_Pipe_DifferentPixelFormats_Async()
{
using var outputFile = new TemporaryFile($"out{VideoType.Mp4.Extension}");
var frames = new List<IVideoFrame>
{
BitmapSource.CreateVideoFrame(0, System.Drawing.Imaging.PixelFormat.Format24bppRgb, 255, 255, 1, 0),
BitmapSource.CreateVideoFrame(0, System.Drawing.Imaging.PixelFormat.Format32bppRgb, 255, 255, 1, 0)
};
var videoFramesSource = new RawVideoPipeSource(frames);
var ex = await Assert.ThrowsExceptionAsync<FFMpegException>(() => FFMpegArguments
.FromPipeInput(videoFramesSource)
.OutputToFile(outputFile, false, opt => opt
.WithVideoCodec(VideoCodec.LibX264))
.ProcessAsynchronously());
Assert.IsInstanceOfType(ex.GetBaseException(), typeof(FFMpegStreamFormatException));
}
[TestMethod, Timeout(10000)]
public void Video_ToMP4_Args_StreamPipe()
{
ConvertFromStreamPipe(VideoType.Mp4, new VideoCodecArgument(VideoCodec.LibX264));
using var input = File.OpenRead(TestResources.WebmVideo);
using var output = new TemporaryFile($"out{VideoType.Mp4.Extension}");
var success = FFMpegArguments
.FromPipeInput(new StreamPipeSource(input))
.OutputToFile(output, false, opt => opt
.WithVideoCodec(VideoCodec.LibX264))
.ProcessSynchronously();
Assert.IsTrue(success);
}
[TestMethod, Timeout(10000)]
@ -276,18 +196,21 @@ await Assert.ThrowsExceptionAsync<FFMpegException>(async () =>
var pipeSource = new StreamPipeSink(ms);
await FFMpegArguments
.FromFileInput(TestResources.Mp4Video)
.OutputToPipe(pipeSource, opt => opt.ForceFormat("mkv"))
.OutputToPipe(pipeSource, opt => opt.ForceFormat("mp4"))
.ProcessAsynchronously();
});
}
[TestMethod, Timeout(10000)]
public void Video_StreamFile_OutputToMemoryStream()
{
var output = new MemoryStream();
FFMpegArguments
.FromPipeInput(new StreamPipeSource(File.OpenRead(TestResources.WebmVideo)), options => options.ForceFormat("webm"))
.OutputToPipe(new StreamPipeSink(output), options => options
.FromPipeInput(new StreamPipeSource(File.OpenRead(TestResources.WebmVideo)), opt => opt
.ForceFormat("webm"))
.OutputToPipe(new StreamPipeSink(output), opt => opt
.ForceFormat("mpegts"))
.ProcessSynchronously();
@ -299,32 +222,41 @@ public void Video_StreamFile_OutputToMemoryStream()
[TestMethod, Timeout(10000)]
public void Video_ToMP4_Args_StreamOutputPipe_Failure()
{
Assert.ThrowsException<FFMpegException>(() => ConvertToStreamPipe(new ForceFormatArgument("mkv")));
Assert.ThrowsException<FFMpegException>(() =>
{
using var ms = new MemoryStream();
FFMpegArguments
.FromFileInput(TestResources.Mp4Video)
.OutputToPipe(new StreamPipeSink(ms), opt => opt
.ForceFormat("mkv"))
.ProcessSynchronously();
});
}
[TestMethod, Timeout(10000)]
public void Video_ToMP4_Args_StreamOutputPipe_Async()
public async Task Video_ToMP4_Args_StreamOutputPipe_Async()
{
using var ms = new MemoryStream();
await using var ms = new MemoryStream();
var pipeSource = new StreamPipeSink(ms);
FFMpegArguments
await FFMpegArguments
.FromFileInput(TestResources.Mp4Video)
.OutputToPipe(pipeSource, opt => opt
.WithVideoCodec(VideoCodec.LibX264)
.ForceFormat("matroska"))
.ProcessAsynchronously()
.WaitForResult();
.ProcessAsynchronously();
}
[TestMethod, Timeout(10000)]
public async Task TestDuplicateRun()
{
FFMpegArguments.FromFileInput(TestResources.Mp4Video)
FFMpegArguments
.FromFileInput(TestResources.Mp4Video)
.OutputToFile("temporary.mp4")
.ProcessSynchronously();
await FFMpegArguments.FromFileInput(TestResources.Mp4Video)
await FFMpegArguments
.FromFileInput(TestResources.Mp4Video)
.OutputToFile("temporary.mp4")
.ProcessAsynchronously();
@ -332,65 +264,115 @@ await FFMpegArguments.FromFileInput(TestResources.Mp4Video)
}
[TestMethod, Timeout(10000)]
public void Video_ToMP4_Args_StreamOutputPipe()
public void TranscodeToMemoryStream_Success()
{
ConvertToStreamPipe(new VideoCodecArgument(VideoCodec.LibX264), new ForceFormatArgument("matroska"));
using var output = new MemoryStream();
var success = FFMpegArguments
.FromFileInput(TestResources.WebmVideo)
.OutputToPipe(new StreamPipeSink(output), opt => opt
.WithVideoCodec(VideoCodec.LibVpx)
.ForceFormat("matroska"))
.ProcessSynchronously();
Assert.IsTrue(success);
output.Position = 0;
var inputAnalysis = FFProbe.Analyse(TestResources.WebmVideo);
var outputAnalysis = FFProbe.Analyse(output);
Assert.AreEqual(inputAnalysis.Duration.TotalSeconds, outputAnalysis.Duration.TotalSeconds, 0.3);
}
[TestMethod, Timeout(10000)]
public void Video_ToTS()
{
Convert(VideoType.Ts);
using var outputFile = new TemporaryFile($"out{VideoType.MpegTs.Extension}");
var success = FFMpegArguments
.FromFileInput(TestResources.Mp4Video)
.OutputToFile(outputFile, false)
.ProcessSynchronously();
Assert.IsTrue(success);
}
[TestMethod, Timeout(10000)]
public void Video_ToTS_Args()
{
Convert(VideoType.Ts,
new CopyArgument(),
new BitStreamFilterArgument(Channel.Video, Filter.H264_Mp4ToAnnexB),
new ForceFormatArgument(VideoType.MpegTs));
using var outputFile = new TemporaryFile($"out{VideoType.MpegTs.Extension}");
var success = FFMpegArguments
.FromFileInput(TestResources.Mp4Video)
.OutputToFile(outputFile, false, opt => opt
.CopyChannel()
.WithBitStreamFilter(Channel.Video, Filter.H264_Mp4ToAnnexB)
.ForceFormat(VideoType.MpegTs))
.ProcessSynchronously();
Assert.IsTrue(success);
}
[DataTestMethod, Timeout(10000)]
[DataRow(System.Drawing.Imaging.PixelFormat.Format24bppRgb)]
[DataRow(System.Drawing.Imaging.PixelFormat.Format32bppArgb)]
public void Video_ToTS_Args_Pipe(System.Drawing.Imaging.PixelFormat pixelFormat)
public async Task Video_ToTS_Args_Pipe(System.Drawing.Imaging.PixelFormat pixelFormat)
{
ConvertFromPipe(VideoType.Ts, pixelFormat, new ForceFormatArgument(VideoType.Ts));
using var output = new TemporaryFile($"out{VideoType.Ts.Extension}");
var input = new RawVideoPipeSource(BitmapSource.CreateBitmaps(128, pixelFormat, 256, 256));
var success = await FFMpegArguments
.FromPipeInput(input)
.OutputToFile(output, false, opt => opt
.ForceFormat(VideoType.Ts))
.ProcessAsynchronously();
Assert.IsTrue(success);
var analysis = await FFProbe.AnalyseAsync(output);
Assert.AreEqual(VideoType.Ts.Name, analysis.Format.FormatName);
}
[TestMethod, Timeout(10000)]
public void Video_ToOGV_Resize()
public async Task Video_ToOGV_Resize()
{
Convert(VideoType.Ogv, true, VideoSize.Ed);
}
[TestMethod, Timeout(10000)]
public void Video_ToOGV_Resize_Args()
{
Convert(VideoType.Ogv, new ScaleArgument(VideoSize.Ed), new VideoCodecArgument(VideoCodec.LibTheora));
using var outputFile = new TemporaryFile($"out{VideoType.Ogv.Extension}");
var success = await FFMpegArguments
.FromFileInput(TestResources.Mp4Video)
.OutputToFile(outputFile, false, opt => opt
.Resize(200, 200)
.WithVideoCodec(VideoCodec.LibTheora))
.ProcessAsynchronously();
Assert.IsTrue(success);
}
[DataTestMethod, Timeout(10000)]
[DataRow(System.Drawing.Imaging.PixelFormat.Format24bppRgb)]
[DataRow(System.Drawing.Imaging.PixelFormat.Format32bppArgb)]
// [DataRow(PixelFormat.Format48bppRgb)]
public void Video_ToOGV_Resize_Args_Pipe(System.Drawing.Imaging.PixelFormat pixelFormat)
public void RawVideoPipeSource_Ogv_Scale(System.Drawing.Imaging.PixelFormat pixelFormat)
{
ConvertFromPipe(VideoType.Ogv, pixelFormat, new ScaleArgument(VideoSize.Ed), new VideoCodecArgument(VideoCodec.LibTheora));
using var outputFile = new TemporaryFile($"out{VideoType.Ogv.Extension}");
var videoFramesSource = new RawVideoPipeSource(BitmapSource.CreateBitmaps(128, pixelFormat, 256, 256));
FFMpegArguments
.FromPipeInput(videoFramesSource)
.OutputToFile(outputFile, false, opt => opt
.WithVideoFilters(filterOptions => filterOptions
.Scale(VideoSize.Ed))
.WithVideoCodec(VideoCodec.LibTheora))
.ProcessSynchronously();
var analysis = FFProbe.Analyse(outputFile);
Assert.AreEqual((int)VideoSize.Ed, analysis.PrimaryVideoStream!.Width);
}
[TestMethod, Timeout(10000)]
public void Video_ToMP4_Resize()
public void Scale_Mp4_Multithreaded()
{
Convert(VideoType.Mp4, true, VideoSize.Ed);
}
using var outputFile = new TemporaryFile($"out{VideoType.Mp4.Extension}");
[TestMethod, Timeout(10000)]
public void Video_ToMP4_Resize_Args()
{
Convert(VideoType.Mp4, new ScaleArgument(VideoSize.Ld), new VideoCodecArgument(VideoCodec.LibX264));
var success = FFMpegArguments
.FromFileInput(TestResources.Mp4Video)
.OutputToFile(outputFile, false, opt => opt
.UsingMultithreading(true)
.WithVideoCodec(VideoCodec.LibX264))
.ProcessSynchronously();
Assert.IsTrue(success);
}
[DataTestMethod, Timeout(10000)]
@ -399,40 +381,24 @@ public void Video_ToMP4_Resize_Args()
// [DataRow(PixelFormat.Format48bppRgb)]
public void Video_ToMP4_Resize_Args_Pipe(System.Drawing.Imaging.PixelFormat pixelFormat)
{
ConvertFromPipe(VideoType.Mp4, pixelFormat, new ScaleArgument(VideoSize.Ld), new VideoCodecArgument(VideoCodec.LibX264));
}
using var outputFile = new TemporaryFile($"out{VideoType.Mp4.Extension}");
var videoFramesSource = new RawVideoPipeSource(BitmapSource.CreateBitmaps(128, pixelFormat, 256, 256));
[TestMethod, Timeout(10000)]
public void Video_ToOGV()
{
Convert(VideoType.Ogv);
}
[TestMethod, Timeout(10000)]
public void Video_ToMP4_MultiThread()
{
Convert(VideoType.Mp4, true);
}
[TestMethod, Timeout(10000)]
public void Video_ToTS_MultiThread()
{
Convert(VideoType.Ts, true);
}
[TestMethod, Timeout(10000)]
public void Video_ToOGV_MultiThread()
{
Convert(VideoType.Ogv, true);
var success = FFMpegArguments
.FromPipeInput(videoFramesSource)
.OutputToFile(outputFile, false, opt => opt
.WithVideoCodec(VideoCodec.LibX264))
.ProcessSynchronously();
Assert.IsTrue(success);
}
[TestMethod, Timeout(10000)]
public void Video_Snapshot_InMemory()
{
var input = FFProbe.Analyse(TestResources.Mp4Video);
using var bitmap = FFMpeg.Snapshot(input);
using var bitmap = FFMpeg.Snapshot(TestResources.Mp4Video);
Assert.AreEqual(input.PrimaryVideoStream.Width, bitmap.Width);
Assert.AreEqual(input.PrimaryVideoStream!.Width, bitmap.Width);
Assert.AreEqual(input.PrimaryVideoStream.Height, bitmap.Height);
Assert.AreEqual(bitmap.RawFormat, ImageFormat.Png);
}
@ -443,10 +409,10 @@ public void Video_Snapshot_PersistSnapshot()
var outputPath = new TemporaryFile("out.png");
var input = FFProbe.Analyse(TestResources.Mp4Video);
FFMpeg.Snapshot(input, outputPath);
FFMpeg.Snapshot(TestResources.Mp4Video, outputPath);
using var bitmap = Image.FromFile(outputPath);
Assert.AreEqual(input.PrimaryVideoStream.Width, bitmap.Width);
Assert.AreEqual(input.PrimaryVideoStream!.Width, bitmap.Width);
Assert.AreEqual(input.PrimaryVideoStream.Height, bitmap.Height);
Assert.AreEqual(bitmap.RawFormat, ImageFormat.Png);
}
@ -469,7 +435,7 @@ public void Video_Join()
Assert.AreEqual(expectedDuration.Hours, result.Duration.Hours);
Assert.AreEqual(expectedDuration.Minutes, result.Duration.Minutes);
Assert.AreEqual(expectedDuration.Seconds, result.Duration.Seconds);
Assert.AreEqual(input.PrimaryVideoStream.Height, result.PrimaryVideoStream.Height);
Assert.AreEqual(input.PrimaryVideoStream!.Height, result.PrimaryVideoStream!.Height);
Assert.AreEqual(input.PrimaryVideoStream.Width, result.PrimaryVideoStream.Width);
}
@ -493,7 +459,7 @@ public void Video_Join_Image_Sequence()
Assert.IsTrue(success);
var result = FFProbe.Analyse(outputFile);
Assert.AreEqual(3, result.Duration.Seconds);
Assert.AreEqual(imageSet.First().Width, result.PrimaryVideoStream.Width);
Assert.AreEqual(imageSet.First().Width, result.PrimaryVideoStream!.Width);
Assert.AreEqual(imageSet.First().Height, result.PrimaryVideoStream.Height);
}
@ -502,7 +468,7 @@ public void Video_With_Only_Audio_Should_Extract_Metadata()
{
var video = FFProbe.Analyse(TestResources.Mp4WithoutVideo);
Assert.AreEqual(null, video.PrimaryVideoStream);
Assert.AreEqual("aac", video.PrimaryAudioStream.CodecName);
Assert.AreEqual("aac", video.PrimaryAudioStream!.CodecName);
Assert.AreEqual(10, video.Duration.TotalSeconds, 0.5);
}
@ -557,7 +523,7 @@ public void Video_OutputsData()
var outputFile = new TemporaryFile("out.mp4");
var dataReceived = false;
FFMpegOptions.Configure(opt => opt.Encoding = Encoding.UTF8);
GlobalFFOptions.Configure(opt => opt.Encoding = Encoding.UTF8);
var success = FFMpegArguments
.FromFileInput(TestResources.Mp4Video)
.WithGlobalOptions(options => options
@ -588,7 +554,7 @@ public void Video_TranscodeInMemory()
resStream.Position = 0;
var vi = FFProbe.Analyse(resStream);
Assert.AreEqual(vi.PrimaryVideoStream.Width, 128);
Assert.AreEqual(vi.PrimaryVideoStream!.Width, 128);
Assert.AreEqual(vi.PrimaryVideoStream.Height, 128);
}
@ -598,14 +564,13 @@ public async Task Video_Cancel_Async()
var outputFile = new TemporaryFile("out.mp4");
var task = FFMpegArguments
.FromFileInput(TestResources.Mp4Video)
.FromFileInput("testsrc2=size=320x240[out0]; sine[out1]", false, args => args
.WithCustomArgument("-re")
.ForceFormat("lavfi"))
.OutputToFile(outputFile, false, opt => opt
.Resize(new Size(1000, 1000))
.WithAudioCodec(AudioCodec.Aac)
.WithVideoCodec(VideoCodec.LibX264)
.WithConstantRateFactor(14)
.WithSpeedPreset(Speed.VerySlow)
.Loop(3))
.WithSpeedPreset(Speed.VeryFast))
.CancellableThrough(out var cancel)
.ProcessAsynchronously(false);
@ -613,7 +578,39 @@ public async Task Video_Cancel_Async()
cancel();
var result = await task;
Assert.IsFalse(result);
}
[TestMethod, Timeout(10000)]
public async Task Video_Cancel_Async_With_Timeout()
{
var outputFile = new TemporaryFile("out.mp4");
var task = FFMpegArguments
.FromFileInput("testsrc2=size=320x240[out0]; sine[out1]", false, args => args
.WithCustomArgument("-re")
.ForceFormat("lavfi"))
.OutputToFile(outputFile, false, opt => opt
.WithAudioCodec(AudioCodec.Aac)
.WithVideoCodec(VideoCodec.LibX264)
.WithSpeedPreset(Speed.VeryFast))
.CancellableThrough(out var cancel, 10000)
.ProcessAsynchronously(false);
await Task.Delay(300);
cancel();
var result = await task;
var outputInfo = await FFProbe.AnalyseAsync(outputFile);
Assert.IsTrue(result);
Assert.IsNotNull(outputInfo);
Assert.AreEqual(320, outputInfo.PrimaryVideoStream!.Width);
Assert.AreEqual(240, outputInfo.PrimaryVideoStream.Height);
Assert.AreEqual("h264", outputInfo.PrimaryVideoStream.CodecName);
Assert.AreEqual("aac", outputInfo.PrimaryAudioStream!.CodecName);
}
}
}

View file

@ -7,6 +7,8 @@ Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "FFMpegCore", "FFMpegCore\FF
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "FFMpegCore.Test", "FFMpegCore.Test\FFMpegCore.Test.csproj", "{F20C8353-72D9-454B-9F16-3624DBAD2328}"
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "FFMpegCore.Examples", "FFMpegCore.Examples\FFMpegCore.Examples.csproj", "{3125CF91-FFBD-4E4E-8930-247116AFE772}"
EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution
Debug|Any CPU = Debug|Any CPU
@ -21,6 +23,10 @@ Global
{F20C8353-72D9-454B-9F16-3624DBAD2328}.Debug|Any CPU.Build.0 = Debug|Any CPU
{F20C8353-72D9-454B-9F16-3624DBAD2328}.Release|Any CPU.ActiveCfg = Release|Any CPU
{F20C8353-72D9-454B-9F16-3624DBAD2328}.Release|Any CPU.Build.0 = Release|Any CPU
{3125CF91-FFBD-4E4E-8930-247116AFE772}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{3125CF91-FFBD-4E4E-8930-247116AFE772}.Debug|Any CPU.Build.0 = Debug|Any CPU
{3125CF91-FFBD-4E4E-8930-247116AFE772}.Release|Any CPU.ActiveCfg = Release|Any CPU
{3125CF91-FFBD-4E4E-8930-247116AFE772}.Release|Any CPU.Build.0 = Release|Any CPU
EndGlobalSection
GlobalSection(SolutionProperties) = preSolution
HideSolutionNode = FALSE

View file

@ -6,7 +6,7 @@ namespace FFMpegCore.Extend
{
public static class BitmapExtensions
{
public static bool AddAudio(this Bitmap poster, string audio, string output)
public static bool AddAudio(this Image poster, string audio, string output)
{
var destination = $"{Environment.TickCount}.png";
poster.Save(destination);

View file

@ -18,7 +18,7 @@ public DemuxConcatArgument(IEnumerable<string> values)
{
Values = values.Select(value => $"file '{value}'");
}
private readonly string _tempFileName = Path.Combine(FFMpegOptions.Options.TempDirectory, Guid.NewGuid() + ".txt");
private readonly string _tempFileName = Path.Combine(GlobalFFOptions.Current.TemporaryFilesFolder, $"concat_{Guid.NewGuid()}.txt");
public void Pre() => File.WriteAllLines(_tempFileName, Values);
public Task During(CancellationToken cancellationToken = default) => Task.CompletedTask;

View file

@ -6,7 +6,7 @@ namespace FFMpegCore.Arguments
/// <summary>
/// Drawtext video filter argument
/// </summary>
public class DrawTextArgument : IArgument
public class DrawTextArgument : IVideoFilterArgument
{
public readonly DrawTextOptions Options;
@ -15,7 +15,8 @@ public DrawTextArgument(DrawTextOptions options)
Options = options;
}
public string Text => $"-vf drawtext=\"{Options.TextInternal}\"";
public string Key { get; } = "drawtext";
public string Value => Options.TextInternal;
}
public class DrawTextOptions

View file

@ -0,0 +1,26 @@
using System.Threading;
using System.Threading.Tasks;
namespace FFMpegCore.Arguments
{
/// <summary>
/// Represents an input device parameter
/// </summary>
public class InputDeviceArgument : IInputArgument
{
private readonly string Device;
public InputDeviceArgument(string device)
{
Device = device;
}
public Task During(CancellationToken cancellationToken = default) => Task.CompletedTask;
public void Pre() { }
public void Post() { }
public string Text => $"-i {Device}";
}
}

View file

@ -0,0 +1,27 @@
using System.Threading;
using System.Threading.Tasks;
namespace FFMpegCore.Arguments
{
/// <summary>
/// Represents outputting to url using supported protocols
/// See http://ffmpeg.org/ffmpeg-protocols.html
/// </summary>
public class OutputUrlArgument : IOutputArgument
{
public readonly string Url;
public OutputUrlArgument(string url)
{
Url = url;
}
public void Post() { }
public Task During(CancellationToken cancellationToken = default) => Task.CompletedTask;
public void Pre() { }
public string Text => Url;
}
}

View file

@ -41,13 +41,16 @@ public async Task During(CancellationToken cancellationToken = default)
try
{
await ProcessDataAsync(cancellationToken);
Debug.WriteLine($"Disconnecting NamedPipeServerStream on {GetType().Name}");
Pipe?.Disconnect();
}
catch (TaskCanceledException)
{
Debug.WriteLine($"ProcessDataAsync on {GetType().Name} cancelled");
}
finally
{
Debug.WriteLine($"Disconnecting NamedPipeServerStream on {GetType().Name}");
Pipe?.Disconnect();
}
}
protected abstract Task ProcessDataAsync(CancellationToken token);

View file

@ -6,7 +6,7 @@ namespace FFMpegCore.Arguments
/// <summary>
/// Represents scale parameter
/// </summary>
public class ScaleArgument : IArgument
public class ScaleArgument : IVideoFilterArgument
{
public readonly Size? Size;
public ScaleArgument(Size? size)
@ -18,9 +18,10 @@ public ScaleArgument(int width, int height) : this(new Size(width, height)) { }
public ScaleArgument(VideoSize videosize)
{
Size = videosize == VideoSize.Original ? new Size(-1, -1) : new Size(-1, (int)videosize);
Size = videosize == VideoSize.Original ? null : (Size?)new Size(-1, (int)videosize);
}
public virtual string Text => Size.HasValue ? $"-vf scale={Size.Value.Width}:{Size.Value.Height}" : string.Empty;
public string Key { get; } = "scale";
public string Value => Size == null ? string.Empty : $"{Size.Value.Width}:{Size.Value.Height}";
}
}

View file

@ -6,14 +6,16 @@ namespace FFMpegCore.Arguments
/// <summary>
/// Represents size parameter
/// </summary>
public class SizeArgument : ScaleArgument
public class SizeArgument : IArgument
{
public SizeArgument(Size? value) : base(value) { }
public readonly Size? Size;
public SizeArgument(Size? size)
{
Size = size;
}
public SizeArgument(VideoSize videosize) : base(videosize) { }
public SizeArgument(int width, int height) : this(new Size(width, height)) { }
public SizeArgument(int width, int height) : base(width, height) { }
public override string Text => Size.HasValue ? $"-s {Size.Value.Width}x{Size.Value.Height}" : string.Empty;
public string Text => Size == null ? string.Empty : $"-s {Size.Value.Width}x{Size.Value.Height}";
}
}

View file

@ -9,7 +9,7 @@ namespace FFMpegCore.Arguments
/// 2 = 90CounterClockwise
/// 3 = 90Clockwise and Vertical Flip
/// </summary>
public class TransposeArgument : IArgument
public class TransposeArgument : IVideoFilterArgument
{
public readonly Transposition Transposition;
public TransposeArgument(Transposition transposition)
@ -17,6 +17,7 @@ public TransposeArgument(Transposition transposition)
Transposition = transposition;
}
public string Text => $"-vf \"transpose={(int)Transposition}\"";
public string Key { get; } = "transpose";
public string Value => ((int)Transposition).ToString();
}
}

View file

@ -0,0 +1,51 @@
using System.Collections.Generic;
using System.Drawing;
using System.Linq;
using FFMpegCore.Enums;
using FFMpegCore.Exceptions;
namespace FFMpegCore.Arguments
{
public class VideoFiltersArgument : IArgument
{
public readonly VideoFilterOptions Options;
public VideoFiltersArgument(VideoFilterOptions options)
{
Options = options;
}
public string Text => GetText();
public string GetText()
{
if (!Options.Arguments.Any())
throw new FFMpegArgumentException("No video-filter arguments provided");
return $"-vf \"{string.Join(", ", Options.Arguments.Where(arg => !string.IsNullOrEmpty(arg.Value)).Select(arg => $"{arg.Key}={arg.Value.Replace(",", "\\,")}"))}\"";
}
}
public interface IVideoFilterArgument
{
public string Key { get; }
public string Value { get; }
}
public class VideoFilterOptions
{
public List<IVideoFilterArgument> Arguments { get; } = new List<IVideoFilterArgument>();
public VideoFilterOptions Scale(VideoSize videoSize) => WithArgument(new ScaleArgument(videoSize));
public VideoFilterOptions Scale(int width, int height) => WithArgument(new ScaleArgument(width, height));
public VideoFilterOptions Scale(Size size) => WithArgument(new ScaleArgument(size));
public VideoFilterOptions Transpose(Transposition transposition) => WithArgument(new TransposeArgument(transposition));
public VideoFilterOptions DrawText(DrawTextOptions drawTextOptions) => WithArgument(new DrawTextArgument(drawTextOptions));
private VideoFilterOptions WithArgument(IVideoFilterArgument argument)
{
Arguments.Add(argument);
return this;
}
}
}

View file

@ -15,8 +15,8 @@ public string Extension
{
get
{
if (FFMpegOptions.Options.ExtensionOverrides.ContainsKey(Name))
return FFMpegOptions.Options.ExtensionOverrides[Name];
if (GlobalFFOptions.Current.ExtensionOverrides.ContainsKey(Name))
return GlobalFFOptions.Current.ExtensionOverrides[Name];
return "." + Name;
}
}

View file

@ -4,7 +4,6 @@ namespace FFMpegCore.Exceptions
{
public enum FFMpegExceptionType
{
Dependency,
Conversion,
File,
Operation,
@ -13,16 +12,49 @@ public enum FFMpegExceptionType
public class FFMpegException : Exception
{
public FFMpegException(FFMpegExceptionType type, string? message = null, Exception? innerException = null, string ffmpegErrorOutput = "", string ffmpegOutput = "")
public FFMpegException(FFMpegExceptionType type, string message, Exception? innerException = null, string ffMpegErrorOutput = "")
: base(message, innerException)
{
FfmpegOutput = ffmpegOutput;
FfmpegErrorOutput = ffmpegErrorOutput;
FFMpegErrorOutput = ffMpegErrorOutput;
Type = type;
}
public FFMpegException(FFMpegExceptionType type, string message, string ffMpegErrorOutput = "")
: base(message)
{
FFMpegErrorOutput = ffMpegErrorOutput;
Type = type;
}
public FFMpegException(FFMpegExceptionType type, string message)
: base(message)
{
FFMpegErrorOutput = string.Empty;
Type = type;
}
public FFMpegExceptionType Type { get; }
public string FfmpegOutput { get; }
public string FfmpegErrorOutput { get; }
public string FFMpegErrorOutput { get; }
}
public class FFOptionsException : Exception
{
public FFOptionsException(string message, Exception? innerException = null)
: base(message, innerException)
{
}
}
public class FFMpegArgumentException : Exception
{
public FFMpegArgumentException(string? message = null, Exception? innerException = null)
: base(message, innerException)
{
}
}
public class FFMpegStreamFormatException : FFMpegException
{
public FFMpegStreamFormatException(FFMpegExceptionType type, string message, Exception? innerException = null)
: base(type, message, innerException)
{
}
}
}

View file

@ -16,17 +16,18 @@ public static class FFMpeg
/// <summary>
/// Saves a 'png' thumbnail from the input video to drive
/// </summary>
/// <param name="source">Source video analysis</param>
/// <param name="input">Source video analysis</param>
/// <param name="output">Output video file path</param>
/// <param name="captureTime">Seek position where the thumbnail should be taken.</param>
/// <param name="size">Thumbnail size. If width or height equal 0, the other will be computed automatically.</param>
/// <returns>Bitmap with the requested snapshot.</returns>
public static bool Snapshot(IMediaAnalysis source, string output, Size? size = null, TimeSpan? captureTime = null)
public static bool Snapshot(string input, string output, Size? size = null, TimeSpan? captureTime = null)
{
if (Path.GetExtension(output) != FileExtension.Png)
output = Path.GetFileNameWithoutExtension(output) + FileExtension.Png;
var (arguments, outputOptions) = BuildSnapshotArguments(source, size, captureTime);
var source = FFProbe.Analyse(input);
var (arguments, outputOptions) = BuildSnapshotArguments(input, source, size, captureTime);
return arguments
.OutputToFile(output, true, outputOptions)
@ -35,32 +36,35 @@ public static bool Snapshot(IMediaAnalysis source, string output, Size? size = n
/// <summary>
/// Saves a 'png' thumbnail from the input video to drive
/// </summary>
/// <param name="source">Source video analysis</param>
/// <param name="input">Source video analysis</param>
/// <param name="output">Output video file path</param>
/// <param name="captureTime">Seek position where the thumbnail should be taken.</param>
/// <param name="size">Thumbnail size. If width or height equal 0, the other will be computed automatically.</param>
/// <returns>Bitmap with the requested snapshot.</returns>
public static Task<bool> SnapshotAsync(IMediaAnalysis source, string output, Size? size = null, TimeSpan? captureTime = null)
public static async Task<bool> SnapshotAsync(string input, string output, Size? size = null, TimeSpan? captureTime = null)
{
if (Path.GetExtension(output) != FileExtension.Png)
output = Path.GetFileNameWithoutExtension(output) + FileExtension.Png;
var (arguments, outputOptions) = BuildSnapshotArguments(source, size, captureTime);
var source = await FFProbe.AnalyseAsync(input);
var (arguments, outputOptions) = BuildSnapshotArguments(input, source, size, captureTime);
return arguments
return await arguments
.OutputToFile(output, true, outputOptions)
.ProcessAsynchronously();
}
/// <summary>
/// Saves a 'png' thumbnail to an in-memory bitmap
/// </summary>
/// <param name="source">Source video file.</param>
/// <param name="input">Source video file.</param>
/// <param name="captureTime">Seek position where the thumbnail should be taken.</param>
/// <param name="size">Thumbnail size. If width or height equal 0, the other will be computed automatically.</param>
/// <returns>Bitmap with the requested snapshot.</returns>
public static Bitmap Snapshot(IMediaAnalysis source, Size? size = null, TimeSpan? captureTime = null)
public static Bitmap Snapshot(string input, Size? size = null, TimeSpan? captureTime = null)
{
var (arguments, outputOptions) = BuildSnapshotArguments(source, size, captureTime);
var source = FFProbe.Analyse(input);
var (arguments, outputOptions) = BuildSnapshotArguments(input, source, size, captureTime);
using var ms = new MemoryStream();
arguments
@ -75,13 +79,14 @@ public static Bitmap Snapshot(IMediaAnalysis source, Size? size = null, TimeSpan
/// <summary>
/// Saves a 'png' thumbnail to an in-memory bitmap
/// </summary>
/// <param name="source">Source video file.</param>
/// <param name="input">Source video file.</param>
/// <param name="captureTime">Seek position where the thumbnail should be taken.</param>
/// <param name="size">Thumbnail size. If width or height equal 0, the other will be computed automatically.</param>
/// <returns>Bitmap with the requested snapshot.</returns>
public static async Task<Bitmap> SnapshotAsync(IMediaAnalysis source, Size? size = null, TimeSpan? captureTime = null)
public static async Task<Bitmap> SnapshotAsync(string input, Size? size = null, TimeSpan? captureTime = null)
{
var (arguments, outputOptions) = BuildSnapshotArguments(source, size, captureTime);
var source = await FFProbe.AnalyseAsync(input);
var (arguments, outputOptions) = BuildSnapshotArguments(input, source, size, captureTime);
using var ms = new MemoryStream();
await arguments
@ -93,13 +98,13 @@ await arguments
return new Bitmap(ms);
}
private static (FFMpegArguments, Action<FFMpegArgumentOptions> outputOptions) BuildSnapshotArguments(IMediaAnalysis source, Size? size = null, TimeSpan? captureTime = null)
private static (FFMpegArguments, Action<FFMpegArgumentOptions> outputOptions) BuildSnapshotArguments(string input, IMediaAnalysis source, Size? size = null, TimeSpan? captureTime = null)
{
captureTime ??= TimeSpan.FromSeconds(source.Duration.TotalSeconds / 3);
size = PrepareSnapshotSize(source, size);
return (FFMpegArguments
.FromFileInput(source, options => options
.FromFileInput(input, false, options => options
.Seek(captureTime)),
options => options
.WithVideoCodec(VideoCodec.Png)
@ -109,7 +114,7 @@ private static (FFMpegArguments, Action<FFMpegArgumentOptions> outputOptions) Bu
private static Size? PrepareSnapshotSize(IMediaAnalysis source, Size? wantedSize)
{
if (wantedSize == null || (wantedSize.Value.Height <= 0 && wantedSize.Value.Width <= 0))
if (wantedSize == null || (wantedSize.Value.Height <= 0 && wantedSize.Value.Width <= 0) || source.PrimaryVideoStream == null)
return null;
var currentSize = new Size(source.PrimaryVideoStream.Width, source.PrimaryVideoStream.Height);
@ -146,7 +151,7 @@ private static (FFMpegArguments, Action<FFMpegArgumentOptions> outputOptions) Bu
/// <param name="multithreaded">Is encoding multithreaded.</param>
/// <returns>Output video information.</returns>
public static bool Convert(
IMediaAnalysis source,
string input,
string output,
ContainerFormat format,
Speed speed = Speed.SuperFast,
@ -155,6 +160,7 @@ public static bool Convert(
bool multithreaded = false)
{
FFMpegHelper.ExtensionExceptionCheck(output, format.Extension);
var source = FFProbe.Analyse(input);
FFMpegHelper.ConversionSizeExceptionCheck(source);
var scale = VideoSize.Original == size ? 1 : (double)source.PrimaryVideoStream.Height / (int)size;
@ -166,41 +172,44 @@ public static bool Convert(
return format.Name switch
{
"mp4" => FFMpegArguments
.FromFileInput(source)
.FromFileInput(input)
.OutputToFile(output, true, options => options
.UsingMultithreading(multithreaded)
.WithVideoCodec(VideoCodec.LibX264)
.WithVideoBitrate(2400)
.Scale(outputSize)
.WithVideoFilters(filterOptions => filterOptions
.Scale(outputSize))
.WithSpeedPreset(speed)
.WithAudioCodec(AudioCodec.Aac)
.WithAudioBitrate(audioQuality))
.ProcessSynchronously(),
"ogv" => FFMpegArguments
.FromFileInput(source)
.FromFileInput(input)
.OutputToFile(output, true, options => options
.UsingMultithreading(multithreaded)
.WithVideoCodec(VideoCodec.LibTheora)
.WithVideoBitrate(2400)
.Scale(outputSize)
.WithVideoFilters(filterOptions => filterOptions
.Scale(outputSize))
.WithSpeedPreset(speed)
.WithAudioCodec(AudioCodec.LibVorbis)
.WithAudioBitrate(audioQuality))
.ProcessSynchronously(),
"mpegts" => FFMpegArguments
.FromFileInput(source)
.FromFileInput(input)
.OutputToFile(output, true, options => options
.CopyChannel()
.WithBitStreamFilter(Channel.Video, Filter.H264_Mp4ToAnnexB)
.ForceFormat(VideoType.Ts))
.ProcessSynchronously(),
"webm" => FFMpegArguments
.FromFileInput(source)
.FromFileInput(input)
.OutputToFile(output, true, options => options
.UsingMultithreading(multithreaded)
.WithVideoCodec(VideoCodec.LibVpx)
.WithVideoBitrate(2400)
.Scale(outputSize)
.WithVideoFilters(filterOptions => filterOptions
.Scale(outputSize))
.WithSpeedPreset(speed)
.WithAudioCodec(AudioCodec.LibVorbis)
.WithAudioBitrate(audioQuality))
@ -239,14 +248,15 @@ public static bool PosterWithAudio(string image, string audio, string output)
/// <param name="output">Output video file.</param>
/// <param name="videos">List of vides that need to be joined together.</param>
/// <returns>Output video information.</returns>
public static bool Join(string output, params IMediaAnalysis[] videos)
public static bool Join(string output, params string[] videos)
{
var temporaryVideoParts = videos.Select(video =>
var temporaryVideoParts = videos.Select(videoPath =>
{
var video = FFProbe.Analyse(videoPath);
FFMpegHelper.ConversionSizeExceptionCheck(video);
var destinationPath = Path.Combine(FFMpegOptions.Options.TempDirectory, $"{Path.GetFileNameWithoutExtension(video.Path)}{FileExtension.Ts}");
Directory.CreateDirectory(FFMpegOptions.Options.TempDirectory);
Convert(video, destinationPath, VideoType.Ts);
var destinationPath = Path.Combine(GlobalFFOptions.Current.TemporaryFilesFolder, $"{Path.GetFileNameWithoutExtension(videoPath)}{FileExtension.Ts}");
Directory.CreateDirectory(GlobalFFOptions.Current.TemporaryFilesFolder);
Convert(videoPath, destinationPath, VideoType.Ts);
return destinationPath;
}).ToArray();
@ -264,16 +274,6 @@ public static bool Join(string output, params IMediaAnalysis[] videos)
Cleanup(temporaryVideoParts);
}
}
/// <summary>
/// Joins a list of video files.
/// </summary>
/// <param name="output">Output video file.</param>
/// <param name="videos">List of vides that need to be joined together.</param>
/// <returns>Output video information.</returns>
public static bool Join(string output, params string[] videos)
{
return Join(output, videos.Select(videoPath => FFProbe.Analyse(videoPath)).ToArray());
}
/// <summary>
/// Converts an image sequence to a video.
@ -284,7 +284,7 @@ public static bool Join(string output, params string[] videos)
/// <returns>Output video information.</returns>
public static bool JoinImageSequence(string output, double frameRate = 30, params ImageInfo[] images)
{
var tempFolderName = Path.Combine(FFMpegOptions.Options.TempDirectory, Guid.NewGuid().ToString());
var tempFolderName = Path.Combine(GlobalFFOptions.Current.TemporaryFilesFolder, Guid.NewGuid().ToString());
var temporaryImageFiles = images.Select((image, index) =>
{
FFMpegHelper.ConversionSizeExceptionCheck(Image.FromFile(image.FullName));
@ -340,10 +340,10 @@ public static bool Mute(string input, string output)
{
var source = FFProbe.Analyse(input);
FFMpegHelper.ConversionSizeExceptionCheck(source);
FFMpegHelper.ExtensionExceptionCheck(output, source.Extension);
// FFMpegHelper.ExtensionExceptionCheck(output, source.Extension);
return FFMpegArguments
.FromFileInput(source)
.FromFileInput(input)
.OutputToFile(output, true, options => options
.CopyChannel(Channel.Video)
.DisableChannel(Channel.Audio))
@ -379,10 +379,10 @@ public static bool ReplaceAudio(string input, string inputAudio, string output,
{
var source = FFProbe.Analyse(input);
FFMpegHelper.ConversionSizeExceptionCheck(source);
FFMpegHelper.ExtensionExceptionCheck(output, source.Extension);
// FFMpegHelper.ExtensionExceptionCheck(output, source.Format.);
return FFMpegArguments
.FromFileInput(source)
.FromFileInput(input)
.AddFileInput(inputAudio)
.OutputToFile(output, true, options => options
.CopyChannel()
@ -398,7 +398,7 @@ internal static IReadOnlyList<PixelFormat> GetPixelFormatsInternal()
FFMpegHelper.RootExceptionCheck();
var list = new List<PixelFormat>();
using var instance = new Instances.Instance(FFMpegOptions.Options.FFmpegBinary(), "-pix_fmts");
using var instance = new Instances.Instance(GlobalFFOptions.GetFFMpegBinaryPath(), "-pix_fmts");
instance.DataReceived += (e, args) =>
{
if (PixelFormat.TryParse(args.Data, out var format))
@ -413,14 +413,14 @@ internal static IReadOnlyList<PixelFormat> GetPixelFormatsInternal()
public static IReadOnlyList<PixelFormat> GetPixelFormats()
{
if (!FFMpegOptions.Options.UseCache)
if (!GlobalFFOptions.Current.UseCache)
return GetPixelFormatsInternal();
return FFMpegCache.PixelFormats.Values.ToList().AsReadOnly();
}
public static bool TryGetPixelFormat(string name, out PixelFormat fmt)
{
if (!FFMpegOptions.Options.UseCache)
if (!GlobalFFOptions.Current.UseCache)
{
fmt = GetPixelFormatsInternal().FirstOrDefault(x => x.Name == name.ToLowerInvariant().Trim());
return fmt != null;
@ -443,7 +443,7 @@ private static void ParsePartOfCodecs(Dictionary<string, Codec> codecs, string a
{
FFMpegHelper.RootExceptionCheck();
using var instance = new Instances.Instance(FFMpegOptions.Options.FFmpegBinary(), arguments);
using var instance = new Instances.Instance(GlobalFFOptions.GetFFMpegBinaryPath(), arguments);
instance.DataReceived += (e, args) =>
{
var codec = parser(args.Data);
@ -485,14 +485,14 @@ internal static Dictionary<string, Codec> GetCodecsInternal()
public static IReadOnlyList<Codec> GetCodecs()
{
if (!FFMpegOptions.Options.UseCache)
if (!GlobalFFOptions.Current.UseCache)
return GetCodecsInternal().Values.ToList().AsReadOnly();
return FFMpegCache.Codecs.Values.ToList().AsReadOnly();
}
public static IReadOnlyList<Codec> GetCodecs(CodecType type)
{
if (!FFMpegOptions.Options.UseCache)
if (!GlobalFFOptions.Current.UseCache)
return GetCodecsInternal().Values.Where(x => x.Type == type).ToList().AsReadOnly();
return FFMpegCache.Codecs.Values.Where(x=>x.Type == type).ToList().AsReadOnly();
}
@ -504,7 +504,7 @@ public static IReadOnlyList<Codec> GetCodecs(CodecType type)
public static bool TryGetCodec(string name, out Codec codec)
{
if (!FFMpegOptions.Options.UseCache)
if (!GlobalFFOptions.Current.UseCache)
{
codec = GetCodecsInternal().Values.FirstOrDefault(x => x.Name == name.ToLowerInvariant().Trim());
return codec != null;
@ -527,7 +527,7 @@ internal static IReadOnlyList<ContainerFormat> GetContainersFormatsInternal()
FFMpegHelper.RootExceptionCheck();
var list = new List<ContainerFormat>();
using var instance = new Instances.Instance(FFMpegOptions.Options.FFmpegBinary(), "-formats");
using var instance = new Instances.Instance(GlobalFFOptions.GetFFMpegBinaryPath(), "-formats");
instance.DataReceived += (e, args) =>
{
if (ContainerFormat.TryParse(args.Data, out var fmt))
@ -542,14 +542,14 @@ internal static IReadOnlyList<ContainerFormat> GetContainersFormatsInternal()
public static IReadOnlyList<ContainerFormat> GetContainerFormats()
{
if (!FFMpegOptions.Options.UseCache)
if (!GlobalFFOptions.Current.UseCache)
return GetContainersFormatsInternal();
return FFMpegCache.ContainerFormats.Values.ToList().AsReadOnly();
}
public static bool TryGetContainerFormat(string name, out ContainerFormat fmt)
{
if (!FFMpegOptions.Options.UseCache)
if (!GlobalFFOptions.Current.UseCache)
{
fmt = GetContainersFormatsInternal().FirstOrDefault(x => x.Name == name.ToLowerInvariant().Trim());
return fmt != null;

View file

@ -5,7 +5,7 @@
namespace FFMpegCore
{
public class FFMpegArgumentOptions : FFMpegOptionsBase
public class FFMpegArgumentOptions : FFMpegArgumentsBase
{
internal FFMpegArgumentOptions() { }
@ -15,14 +15,10 @@ internal FFMpegArgumentOptions() { }
public FFMpegArgumentOptions WithAudioBitrate(int bitrate) => WithArgument(new AudioBitrateArgument(bitrate));
public FFMpegArgumentOptions WithAudioSamplingRate(int samplingRate = 48000) => WithArgument(new AudioSamplingRateArgument(samplingRate));
public FFMpegArgumentOptions WithVariableBitrate(int vbr) => WithArgument(new VariableBitRateArgument(vbr));
public FFMpegArgumentOptions Resize(VideoSize videoSize) => WithArgument(new SizeArgument(videoSize));
public FFMpegArgumentOptions Resize(int width, int height) => WithArgument(new SizeArgument(width, height));
public FFMpegArgumentOptions Resize(Size? size) => WithArgument(new SizeArgument(size));
public FFMpegArgumentOptions Scale(VideoSize videoSize) => WithArgument(new ScaleArgument(videoSize));
public FFMpegArgumentOptions Scale(int width, int height) => WithArgument(new ScaleArgument(width, height));
public FFMpegArgumentOptions Scale(Size size) => WithArgument(new ScaleArgument(size));
public FFMpegArgumentOptions WithBitStreamFilter(Channel channel, Filter filter) => WithArgument(new BitStreamFilterArgument(channel, filter));
public FFMpegArgumentOptions WithConstantRateFactor(int crf) => WithArgument(new ConstantRateFactorArgument(crf));
@ -40,6 +36,13 @@ internal FFMpegArgumentOptions() { }
public FFMpegArgumentOptions WithVideoCodec(Codec videoCodec) => WithArgument(new VideoCodecArgument(videoCodec));
public FFMpegArgumentOptions WithVideoCodec(string videoCodec) => WithArgument(new VideoCodecArgument(videoCodec));
public FFMpegArgumentOptions WithVideoBitrate(int bitrate) => WithArgument(new VideoBitrateArgument(bitrate));
public FFMpegArgumentOptions WithVideoFilters(Action<VideoFilterOptions> videoFilterOptions)
{
var videoFilterOptionsObj = new VideoFilterOptions();
videoFilterOptions(videoFilterOptionsObj);
return WithArgument(new VideoFiltersArgument(videoFilterOptionsObj));
}
public FFMpegArgumentOptions WithFramerate(double framerate) => WithArgument(new FrameRateArgument(framerate));
public FFMpegArgumentOptions WithoutMetadata() => WithArgument(new RemoveMetadataArgument());
public FFMpegArgumentOptions WithSpeedPreset(Speed speed) => WithArgument(new SpeedPresetArgument(speed));
@ -47,7 +50,6 @@ internal FFMpegArgumentOptions() { }
public FFMpegArgumentOptions WithCustomArgument(string argument) => WithArgument(new CustomArgument(argument));
public FFMpegArgumentOptions Seek(TimeSpan? seekTo) => WithArgument(new SeekArgument(seekTo));
public FFMpegArgumentOptions Transpose(Transposition transposition) => WithArgument(new TransposeArgument(transposition));
public FFMpegArgumentOptions Loop(int times) => WithArgument(new LoopArgument(times));
public FFMpegArgumentOptions OverwriteExisting() => WithArgument(new OverwriteArgument());
@ -56,8 +58,6 @@ internal FFMpegArgumentOptions() { }
public FFMpegArgumentOptions ForcePixelFormat(string pixelFormat) => WithArgument(new ForcePixelFormat(pixelFormat));
public FFMpegArgumentOptions ForcePixelFormat(PixelFormat pixelFormat) => WithArgument(new ForcePixelFormat(pixelFormat));
public FFMpegArgumentOptions DrawText(DrawTextOptions drawTextOptions) => WithArgument(new DrawTextArgument(drawTextOptions));
public FFMpegArgumentOptions WithArgument(IArgument argument)
{
Arguments.Add(argument);

View file

@ -27,7 +27,7 @@ internal FFMpegArgumentProcessor(FFMpegArguments ffMpegArguments)
public string Arguments => _ffMpegArguments.Text;
private event EventHandler CancelEvent = null!;
private event EventHandler<int> CancelEvent = null!;
public FFMpegArgumentProcessor NotifyOnProgress(Action<double> onPercentageProgress, TimeSpan totalTimeSpan)
{
@ -45,22 +45,26 @@ public FFMpegArgumentProcessor NotifyOnOutput(Action<string, DataType> onOutput)
_onOutput = onOutput;
return this;
}
public FFMpegArgumentProcessor CancellableThrough(out Action cancel)
public FFMpegArgumentProcessor CancellableThrough(out Action cancel, int timeout = 0)
{
cancel = () => CancelEvent?.Invoke(this, EventArgs.Empty);
cancel = () => CancelEvent?.Invoke(this, timeout);
return this;
}
public bool ProcessSynchronously(bool throwOnError = true)
public bool ProcessSynchronously(bool throwOnError = true, FFOptions? ffMpegOptions = null)
{
using var instance = PrepareInstance(out var cancellationTokenSource);
using var instance = PrepareInstance(ffMpegOptions ?? GlobalFFOptions.Current, out var cancellationTokenSource);
var errorCode = -1;
void OnCancelEvent(object sender, EventArgs args)
void OnCancelEvent(object sender, int timeout)
{
instance.SendInput("q");
if (!cancellationTokenSource.Token.WaitHandle.WaitOne(timeout, true))
{
cancellationTokenSource.Cancel();
instance.Started = false;
}
}
CancelEvent += OnCancelEvent;
instance.Exited += delegate { cancellationTokenSource.Cancel(); };
@ -76,38 +80,31 @@ void OnCancelEvent(object sender, EventArgs args)
}
catch (Exception e)
{
if (!HandleException(throwOnError, e, instance.ErrorData, instance.OutputData)) return false;
if (!HandleException(throwOnError, e, instance.ErrorData)) return false;
}
finally
{
CancelEvent -= OnCancelEvent;
}
return HandleCompletion(throwOnError, errorCode, instance.ErrorData, instance.OutputData);
return HandleCompletion(throwOnError, errorCode, instance.ErrorData);
}
private bool HandleCompletion(bool throwOnError, int errorCode, IReadOnlyList<string> errorData, IReadOnlyList<string> outputData)
public async Task<bool> ProcessAsynchronously(bool throwOnError = true, FFOptions? ffMpegOptions = null)
{
if (throwOnError && errorCode != 0)
throw new FFMpegException(FFMpegExceptionType.Conversion, "FFMpeg exited with non-zero exitcode.", null, string.Join("\n", errorData), string.Join("\n", outputData));
_onPercentageProgress?.Invoke(100.0);
if (_totalTimespan.HasValue) _onTimeProgress?.Invoke(_totalTimespan.Value);
return errorCode == 0;
}
public async Task<bool> ProcessAsynchronously(bool throwOnError = true)
{
using var instance = PrepareInstance(out var cancellationTokenSource);
using var instance = PrepareInstance(ffMpegOptions ?? GlobalFFOptions.Current, out var cancellationTokenSource);
var errorCode = -1;
void OnCancelEvent(object sender, EventArgs args)
void OnCancelEvent(object sender, int timeout)
{
instance.SendInput("q");
if (!cancellationTokenSource.Token.WaitHandle.WaitOne(timeout, true))
{
cancellationTokenSource.Cancel();
instance.Started = false;
}
}
CancelEvent += OnCancelEvent;
try
@ -122,26 +119,38 @@ await Task.WhenAll(instance.FinishedRunning().ContinueWith(t =>
}
catch (Exception e)
{
if (!HandleException(throwOnError, e, instance.ErrorData, instance.OutputData)) return false;
if (!HandleException(throwOnError, e, instance.ErrorData)) return false;
}
finally
{
CancelEvent -= OnCancelEvent;
}
return HandleCompletion(throwOnError, errorCode, instance.ErrorData, instance.OutputData);
return HandleCompletion(throwOnError, errorCode, instance.ErrorData);
}
private Instance PrepareInstance(out CancellationTokenSource cancellationTokenSource)
private bool HandleCompletion(bool throwOnError, int exitCode, IReadOnlyList<string> errorData)
{
if (throwOnError && exitCode != 0)
throw new FFMpegException(FFMpegExceptionType.Process, $"ffmpeg exited with non-zero exit-code ({exitCode} - {string.Join("\n", errorData)})", null, string.Join("\n", errorData));
_onPercentageProgress?.Invoke(100.0);
if (_totalTimespan.HasValue) _onTimeProgress?.Invoke(_totalTimespan.Value);
return exitCode == 0;
}
private Instance PrepareInstance(FFOptions ffMpegOptions,
out CancellationTokenSource cancellationTokenSource)
{
FFMpegHelper.RootExceptionCheck();
FFMpegHelper.VerifyFFMpegExists();
FFMpegHelper.VerifyFFMpegExists(ffMpegOptions);
var startInfo = new ProcessStartInfo
{
FileName = FFMpegOptions.Options.FFmpegBinary(),
FileName = GlobalFFOptions.GetFFMpegBinaryPath(ffMpegOptions),
Arguments = _ffMpegArguments.Text,
StandardOutputEncoding = FFMpegOptions.Options.Encoding,
StandardErrorEncoding = FFMpegOptions.Options.Encoding,
StandardOutputEncoding = ffMpegOptions.Encoding,
StandardErrorEncoding = ffMpegOptions.Encoding,
};
var instance = new Instance(startInfo);
cancellationTokenSource = new CancellationTokenSource();
@ -153,12 +162,12 @@ private Instance PrepareInstance(out CancellationTokenSource cancellationTokenSo
}
private static bool HandleException(bool throwOnError, Exception e, IReadOnlyList<string> errorData, IReadOnlyList<string> outputData)
private static bool HandleException(bool throwOnError, Exception e, IReadOnlyList<string> errorData)
{
if (!throwOnError)
return false;
throw new FFMpegException(FFMpegExceptionType.Process, "Exception thrown during processing", e, string.Join("\n", errorData), string.Join("\n", outputData));
throw new FFMpegException(FFMpegExceptionType.Process, "Exception thrown during processing", e, string.Join("\n", errorData));
}
private void OutputData(object sender, (DataType Type, string Data) msg)

View file

@ -9,26 +9,26 @@
namespace FFMpegCore
{
public sealed class FFMpegArguments : FFMpegOptionsBase
public sealed class FFMpegArguments : FFMpegArgumentsBase
{
private readonly FFMpegGlobalOptions _globalOptions = new FFMpegGlobalOptions();
private readonly FFMpegGlobalArguments _globalArguments = new FFMpegGlobalArguments();
private FFMpegArguments() { }
public string Text => string.Join(" ", _globalOptions.Arguments.Concat(Arguments).Select(arg => arg.Text));
public string Text => string.Join(" ", _globalArguments.Arguments.Concat(Arguments).Select(arg => arg.Text));
public static FFMpegArguments FromConcatInput(IEnumerable<string> filePaths, Action<FFMpegArgumentOptions>? addArguments = null) => new FFMpegArguments().WithInput(new ConcatArgument(filePaths), addArguments);
public static FFMpegArguments FromDemuxConcatInput(IEnumerable<string> filePaths, Action<FFMpegArgumentOptions>? addArguments = null) => new FFMpegArguments().WithInput(new DemuxConcatArgument(filePaths), addArguments);
public static FFMpegArguments FromFileInput(string filePath, bool verifyExists = true, Action<FFMpegArgumentOptions>? addArguments = null) => new FFMpegArguments().WithInput(new InputArgument(verifyExists, filePath), addArguments);
public static FFMpegArguments FromFileInput(FileInfo fileInfo, Action<FFMpegArgumentOptions>? addArguments = null) => new FFMpegArguments().WithInput(new InputArgument(fileInfo.FullName, false), addArguments);
public static FFMpegArguments FromFileInput(IMediaAnalysis mediaAnalysis, Action<FFMpegArgumentOptions>? addArguments = null) => new FFMpegArguments().WithInput(new InputArgument(mediaAnalysis.Path, false), addArguments);
public static FFMpegArguments FromUrlInput(Uri uri, Action<FFMpegArgumentOptions>? addArguments = null) => new FFMpegArguments().WithInput(new InputArgument(uri.AbsoluteUri, false), addArguments);
public static FFMpegArguments FromDeviceInput(string device, Action<FFMpegArgumentOptions>? addArguments = null) => new FFMpegArguments().WithInput(new InputDeviceArgument(device), addArguments);
public static FFMpegArguments FromPipeInput(IPipeSource sourcePipe, Action<FFMpegArgumentOptions>? addArguments = null) => new FFMpegArguments().WithInput(new InputPipeArgument(sourcePipe), addArguments);
public FFMpegArguments WithGlobalOptions(Action<FFMpegGlobalOptions> configureOptions)
public FFMpegArguments WithGlobalOptions(Action<FFMpegGlobalArguments> configureOptions)
{
configureOptions(_globalOptions);
configureOptions(_globalArguments);
return this;
}
@ -36,7 +36,6 @@ public FFMpegArguments WithGlobalOptions(Action<FFMpegGlobalOptions> configureOp
public FFMpegArguments AddDemuxConcatInput(IEnumerable<string> filePaths, Action<FFMpegArgumentOptions>? addArguments = null) => WithInput(new DemuxConcatArgument(filePaths), addArguments);
public FFMpegArguments AddFileInput(string filePath, bool verifyExists = true, Action<FFMpegArgumentOptions>? addArguments = null) => WithInput(new InputArgument(verifyExists, filePath), addArguments);
public FFMpegArguments AddFileInput(FileInfo fileInfo, Action<FFMpegArgumentOptions>? addArguments = null) => WithInput(new InputArgument(fileInfo.FullName, false), addArguments);
public FFMpegArguments AddFileInput(IMediaAnalysis mediaAnalysis, Action<FFMpegArgumentOptions>? addArguments = null) => WithInput(new InputArgument(mediaAnalysis.Path, false), addArguments);
public FFMpegArguments AddUrlInput(Uri uri, Action<FFMpegArgumentOptions>? addArguments = null) => WithInput(new InputArgument(uri.AbsoluteUri, false), addArguments);
public FFMpegArguments AddPipeInput(IPipeSource sourcePipe, Action<FFMpegArgumentOptions>? addArguments = null) => WithInput(new InputPipeArgument(sourcePipe), addArguments);
@ -50,7 +49,8 @@ private FFMpegArguments WithInput(IInputArgument inputArgument, Action<FFMpegArg
}
public FFMpegArgumentProcessor OutputToFile(string file, bool overwrite = true, Action<FFMpegArgumentOptions>? addArguments = null) => ToProcessor(new OutputArgument(file, overwrite), addArguments);
public FFMpegArgumentProcessor OutputToFile(Uri uri, bool overwrite = true, Action<FFMpegArgumentOptions>? addArguments = null) => ToProcessor(new OutputArgument(uri.AbsolutePath, overwrite), addArguments);
public FFMpegArgumentProcessor OutputToUrl(string uri, Action<FFMpegArgumentOptions>? addArguments = null) => ToProcessor(new OutputUrlArgument(uri), addArguments);
public FFMpegArgumentProcessor OutputToUrl(Uri uri, Action<FFMpegArgumentOptions>? addArguments = null) => ToProcessor(new OutputUrlArgument(uri.ToString()), addArguments);
public FFMpegArgumentProcessor OutputToPipe(IPipeSink reader, Action<FFMpegArgumentOptions>? addArguments = null) => ToProcessor(new OutputPipeArgument(reader), addArguments);
private FFMpegArgumentProcessor ToProcessor(IOutputArgument argument, Action<FFMpegArgumentOptions>? addArguments)

View file

@ -3,7 +3,7 @@
namespace FFMpegCore
{
public abstract class FFMpegOptionsBase
public abstract class FFMpegArgumentsBase
{
internal readonly List<IArgument> Arguments = new List<IArgument>();
}

View file

@ -0,0 +1,18 @@
using FFMpegCore.Arguments;
namespace FFMpegCore
{
public sealed class FFMpegGlobalArguments : FFMpegArgumentsBase
{
internal FFMpegGlobalArguments() { }
public FFMpegGlobalArguments WithVerbosityLevel(VerbosityLevel verbosityLevel = VerbosityLevel.Error) => WithOption(new VerbosityLevelArgument(verbosityLevel));
private FFMpegGlobalArguments WithOption(IArgument argument)
{
Arguments.Add(argument);
return this;
}
}
}

View file

@ -1,18 +0,0 @@
using FFMpegCore.Arguments;
namespace FFMpegCore
{
public sealed class FFMpegGlobalOptions : FFMpegOptionsBase
{
internal FFMpegGlobalOptions() { }
public FFMpegGlobalOptions WithVerbosityLevel(VerbosityLevel verbosityLevel = VerbosityLevel.Error) => WithOption(new VerbosityLevelArgument(verbosityLevel));
private FFMpegGlobalOptions WithOption(IArgument argument)
{
Arguments.Add(argument);
return this;
}
}
}

View file

@ -1,66 +0,0 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Runtime.InteropServices;
using System.Text;
using System.Text.Json;
namespace FFMpegCore
{
public class FFMpegOptions
{
private static readonly string ConfigFile = "ffmpeg.config.json";
private static readonly string DefaultRoot = "";
private static readonly string DefaultTemp = Path.GetTempPath();
private static readonly Dictionary<string, string> DefaultExtensionsOverrides = new Dictionary<string, string>
{
{ "mpegts", ".ts" },
};
public static FFMpegOptions Options { get; private set; } = new FFMpegOptions();
public static void Configure(Action<FFMpegOptions> optionsAction)
{
optionsAction?.Invoke(Options);
}
public static void Configure(FFMpegOptions options)
{
Options = options ?? throw new ArgumentNullException(nameof(options));
}
static FFMpegOptions()
{
if (File.Exists(ConfigFile))
{
Options = JsonSerializer.Deserialize<FFMpegOptions>(File.ReadAllText(ConfigFile))!;
foreach (var pair in DefaultExtensionsOverrides)
if (!Options.ExtensionOverrides.ContainsKey(pair.Key)) Options.ExtensionOverrides.Add(pair.Key, pair.Value);
}
}
public string RootDirectory { get; set; } = DefaultRoot;
public string TempDirectory { get; set; } = DefaultTemp;
public bool UseCache { get; set; } = true;
public Encoding Encoding { get; set; } = Encoding.Default;
public string FFmpegBinary() => FFBinary("FFMpeg");
public string FFProbeBinary() => FFBinary("FFProbe");
public Dictionary<string, string> ExtensionOverrides { get; private set; } = new Dictionary<string, string>();
private static string FFBinary(string name)
{
var ffName = name.ToLowerInvariant();
if (RuntimeInformation.IsOSPlatform(OSPlatform.Windows))
ffName += ".exe";
var target = Environment.Is64BitProcess ? "x64" : "x86";
if (Directory.Exists(Path.Combine(Options.RootDirectory, target)))
ffName = Path.Combine(target, ffName);
return Path.Combine(Options.RootDirectory, ffName);
}
}
}

View file

@ -1,5 +1,6 @@
using System;
using System.Collections.Generic;
using System.Globalization;
using System.Threading;
using System.Threading.Tasks;
using FFMpegCore.Exceptions;
@ -14,7 +15,7 @@ public class RawVideoPipeSource : IPipeSource
public string StreamFormat { get; private set; } = null!;
public int Width { get; private set; }
public int Height { get; private set; }
public int FrameRate { get; set; } = 25;
public double FrameRate { get; set; } = 25;
private bool _formatInitialized;
private readonly IEnumerator<IVideoFrame> _framesEnumerator;
@ -42,7 +43,7 @@ public string GetStreamArguments()
_formatInitialized = true;
}
return $"-f rawvideo -r {FrameRate} -pix_fmt {StreamFormat} -s {Width}x{Height}";
return $"-f rawvideo -r {FrameRate.ToString(CultureInfo.InvariantCulture)} -pix_fmt {StreamFormat} -s {Width}x{Height}";
}
public async Task WriteAsync(System.IO.Stream outputStream, CancellationToken cancellationToken)
@ -63,7 +64,7 @@ public async Task WriteAsync(System.IO.Stream outputStream, CancellationToken ca
private void CheckFrameAndThrow(IVideoFrame frame)
{
if (frame.Width != Width || frame.Height != Height || frame.Format != StreamFormat)
throw new FFMpegException(FFMpegExceptionType.Operation, "Video frame is not the same format as created raw video stream\r\n" +
throw new FFMpegStreamFormatException(FFMpegExceptionType.Operation, "Video frame is not the same format as created raw video stream\r\n" +
$"Frame format: {frame.Width}x{frame.Height} pix_fmt: {frame.Format}\r\n" +
$"Stream format: {Width}x{Height} pix_fmt: {StreamFormat}");
}

View file

@ -5,16 +5,16 @@
<RepositoryUrl>https://github.com/rosenbjerg/FFMpegCore</RepositoryUrl>
<PackageProjectUrl>https://github.com/rosenbjerg/FFMpegCore</PackageProjectUrl>
<Copyright></Copyright>
<Description>A .NET Standard FFMpeg/FFProbe wrapper for easily integrating media analysis and conversion into your C# applications</Description>
<Description>A .NET Standard FFMpeg/FFProbe wrapper for easily integrating media analysis and conversion into your .NET applications</Description>
<Version>3.0.0.0</Version>
<AssemblyVersion>3.0.0.0</AssemblyVersion>
<FileVersion>3.0.0.0</FileVersion>
<PackageReleaseNotes>- return null from FFProbe.Analyse* when no media format was detected
- Expose tags as string dictionary on IMediaAnalysis (thanks hey-red)</PackageReleaseNotes>
<PackageReleaseNotes>- Fixes for RawVideoPipeSource hanging (thanks to max619)
- Added .OutputToUrl(..) method for outputting to url using supported protocol (thanks to TFleury) </PackageReleaseNotes>
<LangVersion>8</LangVersion>
<PackageVersion>3.4.0</PackageVersion>
<PackageVersion>4.1.0</PackageVersion>
<PackageLicenseExpression>MIT</PackageLicenseExpression>
<Authors>Malte Rosenbjerg, Vlad Jerca</Authors>
<Authors>Malte Rosenbjerg, Vlad Jerca, Max Bagryantsev</Authors>
<PackageTags>ffmpeg ffprobe convert video audio mediafile resize analyze muxing</PackageTags>
<RepositoryType>GitHub</RepositoryType>
<GeneratePackageOnBuild>true</GeneratePackageOnBuild>
@ -30,7 +30,7 @@
<ItemGroup>
<PackageReference Include="Instances" Version="1.6.0" />
<PackageReference Include="System.Drawing.Common" Version="5.0.0" />
<PackageReference Include="System.Drawing.Common" Version="5.0.2" />
<PackageReference Include="System.Text.Json" Version="5.0.1" />
</ItemGroup>

View file

@ -1,2 +1,3 @@
<wpf:ResourceDictionary xml:space="preserve" xmlns:x="http://schemas.microsoft.com/winfx/2006/xaml" xmlns:s="clr-namespace:System;assembly=mscorlib" xmlns:ss="urn:shemas-jetbrains-com:settings-storage-xaml" xmlns:wpf="http://schemas.microsoft.com/winfx/2006/xaml/presentation">
<s:Boolean x:Key="/Default/CodeInspection/NamespaceProvider/NamespaceFoldersToSkip/=ffmpeg/@EntryIndexedValue">True</s:Boolean></wpf:ResourceDictionary>
<s:Boolean x:Key="/Default/CodeInspection/NamespaceProvider/NamespaceFoldersToSkip/=ffmpeg/@EntryIndexedValue">True</s:Boolean>
<s:Boolean x:Key="/Default/CodeInspection/NamespaceProvider/NamespaceFoldersToSkip/=ffprobe/@EntryIndexedValue">True</s:Boolean></wpf:ResourceDictionary>

37
FFMpegCore/FFOptions.cs Normal file
View file

@ -0,0 +1,37 @@
using System.Collections.Generic;
using System.IO;
using System.Text;
namespace FFMpegCore
{
public class FFOptions
{
/// <summary>
/// Folder container ffmpeg and ffprobe binaries. Leave empty if ffmpeg and ffprobe are present in PATH
/// </summary>
public string BinaryFolder { get; set; } = string.Empty;
/// <summary>
/// Folder used for temporary files necessary for static methods on FFMpeg class
/// </summary>
public string TemporaryFilesFolder { get; set; } = Path.GetTempPath();
/// <summary>
/// Encoding used for parsing stdout/stderr on ffmpeg and ffprobe processes
/// </summary>
public Encoding Encoding { get; set; } = Encoding.Default;
/// <summary>
///
/// </summary>
public Dictionary<string, string> ExtensionOverrides { get; set; } = new Dictionary<string, string>
{
{ "mpegts", ".ts" },
};
/// <summary>
/// Whether to cache calls to get ffmpeg codec, pixel- and container-formats
/// </summary>
public bool UseCache { get; set; } = true;
}
}

View file

@ -12,26 +12,32 @@ namespace FFMpegCore
{
public static class FFProbe
{
public static IMediaAnalysis? Analyse(string filePath, int outputCapacity = int.MaxValue)
public static IMediaAnalysis Analyse(string filePath, int outputCapacity = int.MaxValue, FFOptions? ffOptions = null)
{
if (!File.Exists(filePath))
throw new FFMpegException(FFMpegExceptionType.File, $"No file found at '{filePath}'");
using var instance = PrepareInstance(filePath, outputCapacity);
instance.BlockUntilFinished();
return ParseOutput(filePath, instance);
using var instance = PrepareInstance(filePath, outputCapacity, ffOptions ?? GlobalFFOptions.Current);
var exitCode = instance.BlockUntilFinished();
if (exitCode != 0)
throw new FFMpegException(FFMpegExceptionType.Process, $"ffprobe exited with non-zero exit-code ({exitCode} - {string.Join("\n", instance.ErrorData)})", null, string.Join("\n", instance.ErrorData));
return ParseOutput(instance);
}
public static IMediaAnalysis? Analyse(Uri uri, int outputCapacity = int.MaxValue)
public static IMediaAnalysis Analyse(Uri uri, int outputCapacity = int.MaxValue, FFOptions? ffOptions = null)
{
using var instance = PrepareInstance(uri.AbsoluteUri, outputCapacity);
instance.BlockUntilFinished();
return ParseOutput(uri.AbsoluteUri, instance);
using var instance = PrepareInstance(uri.AbsoluteUri, outputCapacity, ffOptions ?? GlobalFFOptions.Current);
var exitCode = instance.BlockUntilFinished();
if (exitCode != 0)
throw new FFMpegException(FFMpegExceptionType.Process, $"ffprobe exited with non-zero exit-code ({exitCode} - {string.Join("\n", instance.ErrorData)})", null, string.Join("\n", instance.ErrorData));
return ParseOutput(instance);
}
public static IMediaAnalysis? Analyse(Stream stream, int outputCapacity = int.MaxValue)
public static IMediaAnalysis Analyse(Stream stream, int outputCapacity = int.MaxValue, FFOptions? ffOptions = null)
{
var streamPipeSource = new StreamPipeSource(stream);
var pipeArgument = new InputPipeArgument(streamPipeSource);
using var instance = PrepareInstance(pipeArgument.PipePath, outputCapacity);
using var instance = PrepareInstance(pipeArgument.PipePath, outputCapacity, ffOptions ?? GlobalFFOptions.Current);
pipeArgument.Pre();
var task = instance.FinishedRunning();
@ -46,36 +52,36 @@ public static class FFProbe
}
var exitCode = task.ConfigureAwait(false).GetAwaiter().GetResult();
if (exitCode != 0)
throw new FFMpegException(FFMpegExceptionType.Process, $"FFProbe process returned exit status {exitCode}", null, string.Join("\n", instance.ErrorData), string.Join("\n", instance.OutputData));
throw new FFMpegException(FFMpegExceptionType.Process, $"ffprobe exited with non-zero exit-code ({exitCode} - {string.Join("\n", instance.ErrorData)})", null, string.Join("\n", instance.ErrorData));
return ParseOutput(pipeArgument.PipePath, instance);
return ParseOutput(instance);
}
public static async Task<IMediaAnalysis?> AnalyseAsync(string filePath, int outputCapacity = int.MaxValue)
public static async Task<IMediaAnalysis> AnalyseAsync(string filePath, int outputCapacity = int.MaxValue, FFOptions? ffOptions = null)
{
if (!File.Exists(filePath))
throw new FFMpegException(FFMpegExceptionType.File, $"No file found at '{filePath}'");
using var instance = PrepareInstance(filePath, outputCapacity);
await instance.FinishedRunning();
return ParseOutput(filePath, instance);
using var instance = PrepareInstance(filePath, outputCapacity, ffOptions ?? GlobalFFOptions.Current);
await instance.FinishedRunning().ConfigureAwait(false);
return ParseOutput(instance);
}
public static async Task<IMediaAnalysis?> AnalyseAsync(Uri uri, int outputCapacity = int.MaxValue)
public static async Task<IMediaAnalysis> AnalyseAsync(Uri uri, int outputCapacity = int.MaxValue, FFOptions? ffOptions = null)
{
using var instance = PrepareInstance(uri.AbsoluteUri, outputCapacity);
await instance.FinishedRunning();
return ParseOutput(uri.AbsoluteUri, instance);
using var instance = PrepareInstance(uri.AbsoluteUri, outputCapacity, ffOptions ?? GlobalFFOptions.Current);
await instance.FinishedRunning().ConfigureAwait(false);
return ParseOutput(instance);
}
public static async Task<IMediaAnalysis?> AnalyseAsync(Stream stream, int outputCapacity = int.MaxValue)
public static async Task<IMediaAnalysis> AnalyseAsync(Stream stream, int outputCapacity = int.MaxValue, FFOptions? ffOptions = null)
{
var streamPipeSource = new StreamPipeSource(stream);
var pipeArgument = new InputPipeArgument(streamPipeSource);
using var instance = PrepareInstance(pipeArgument.PipePath, outputCapacity);
using var instance = PrepareInstance(pipeArgument.PipePath, outputCapacity, ffOptions ?? GlobalFFOptions.Current);
pipeArgument.Pre();
var task = instance.FinishedRunning();
try
{
await pipeArgument.During();
await pipeArgument.During().ConfigureAwait(false);
}
catch(IOException)
{
@ -84,31 +90,34 @@ public static class FFProbe
{
pipeArgument.Post();
}
var exitCode = await task;
var exitCode = await task.ConfigureAwait(false);
if (exitCode != 0)
throw new FFMpegException(FFMpegExceptionType.Process, $"FFProbe process returned exit status {exitCode}", null, string.Join("\n", instance.ErrorData), string.Join("\n", instance.OutputData));
throw new FFMpegException(FFMpegExceptionType.Process, $"FFProbe process returned exit status {exitCode}", null, string.Join("\n", instance.ErrorData));
pipeArgument.Post();
return ParseOutput(pipeArgument.PipePath, instance);
return ParseOutput(instance);
}
private static IMediaAnalysis? ParseOutput(string filePath, Instance instance)
private static IMediaAnalysis ParseOutput(Instance instance)
{
var json = string.Join(string.Empty, instance.OutputData);
var ffprobeAnalysis = JsonSerializer.Deserialize<FFProbeAnalysis>(json, new JsonSerializerOptions
{
PropertyNameCaseInsensitive = true
})!;
if (ffprobeAnalysis?.Format == null) return null;
return new MediaAnalysis(filePath, ffprobeAnalysis);
});
if (ffprobeAnalysis?.Format == null)
throw new Exception();
return new MediaAnalysis(ffprobeAnalysis);
}
private static Instance PrepareInstance(string filePath, int outputCapacity)
private static Instance PrepareInstance(string filePath, int outputCapacity, FFOptions ffOptions)
{
FFProbeHelper.RootExceptionCheck();
FFProbeHelper.VerifyFFProbeExists();
FFProbeHelper.VerifyFFProbeExists(ffOptions);
var arguments = $"-loglevel error -print_format json -show_format -sexagesimal -show_streams \"{filePath}\"";
var instance = new Instance(FFMpegOptions.Options.FFProbeBinary(), arguments) {DataBufferCapacity = outputCapacity};
var instance = new Instance(GlobalFFOptions.GetFFProbeBinaryPath(), arguments) {DataBufferCapacity = outputCapacity};
return instance;
}
}

View file

@ -5,12 +5,10 @@ namespace FFMpegCore
{
public interface IMediaAnalysis
{
string Path { get; }
string Extension { get; }
TimeSpan Duration { get; }
MediaFormat Format { get; }
AudioStream PrimaryAudioStream { get; }
VideoStream PrimaryVideoStream { get; }
AudioStream? PrimaryAudioStream { get; }
VideoStream? PrimaryVideoStream { get; }
List<VideoStream> VideoStreams { get; }
List<AudioStream> AudioStreams { get; }
}

View file

@ -7,16 +7,11 @@ namespace FFMpegCore
{
internal class MediaAnalysis : IMediaAnalysis
{
private static readonly Regex DurationRegex = new Regex(@"^(\d+):(\d{1,2}):(\d{1,2})\.(\d{1,3})", RegexOptions.Compiled);
internal MediaAnalysis(string path, FFProbeAnalysis analysis)
internal MediaAnalysis(FFProbeAnalysis analysis)
{
Format = ParseFormat(analysis.Format);
VideoStreams = analysis.Streams.Where(stream => stream.CodecType == "video").Select(ParseVideoStream).ToList();
AudioStreams = analysis.Streams.Where(stream => stream.CodecType == "audio").Select(ParseAudioStream).ToList();
PrimaryVideoStream = VideoStreams.OrderBy(stream => stream.Index).FirstOrDefault();
PrimaryAudioStream = AudioStreams.OrderBy(stream => stream.Index).FirstOrDefault();
Path = path;
}
private MediaFormat ParseFormat(Format analysisFormat)
@ -33,9 +28,6 @@ private MediaFormat ParseFormat(Format analysisFormat)
};
}
public string Path { get; }
public string Extension => System.IO.Path.GetExtension(Path);
public TimeSpan Duration => new[]
{
Format.Duration,
@ -44,9 +36,9 @@ private MediaFormat ParseFormat(Format analysisFormat)
}.Max();
public MediaFormat Format { get; }
public AudioStream PrimaryAudioStream { get; }
public AudioStream? PrimaryAudioStream => AudioStreams.OrderBy(stream => stream.Index).FirstOrDefault();
public VideoStream PrimaryVideoStream { get; }
public VideoStream? PrimaryVideoStream => VideoStreams.OrderBy(stream => stream.Index).FirstOrDefault();
public List<VideoStream> VideoStreams { get; }
public List<AudioStream> AudioStreams { get; }
@ -56,14 +48,14 @@ private VideoStream ParseVideoStream(FFProbeStream stream)
return new VideoStream
{
Index = stream.Index,
AvgFrameRate = DivideRatio(ParseRatioDouble(stream.AvgFrameRate, '/')),
BitRate = !string.IsNullOrEmpty(stream.BitRate) ? ParseIntInvariant(stream.BitRate) : default,
BitsPerRawSample = !string.IsNullOrEmpty(stream.BitsPerRawSample) ? ParseIntInvariant(stream.BitsPerRawSample) : default,
AvgFrameRate = MediaAnalysisUtils.DivideRatio(MediaAnalysisUtils.ParseRatioDouble(stream.AvgFrameRate, '/')),
BitRate = !string.IsNullOrEmpty(stream.BitRate) ? MediaAnalysisUtils.ParseIntInvariant(stream.BitRate) : default,
BitsPerRawSample = !string.IsNullOrEmpty(stream.BitsPerRawSample) ? MediaAnalysisUtils.ParseIntInvariant(stream.BitsPerRawSample) : default,
CodecName = stream.CodecName,
CodecLongName = stream.CodecLongName,
DisplayAspectRatio = ParseRatioInt(stream.DisplayAspectRatio, ':'),
Duration = ParseDuration(stream),
FrameRate = DivideRatio(ParseRatioDouble(stream.FrameRate, '/')),
DisplayAspectRatio = MediaAnalysisUtils.ParseRatioInt(stream.DisplayAspectRatio, ':'),
Duration = MediaAnalysisUtils.ParseDuration(stream),
FrameRate = MediaAnalysisUtils.DivideRatio(MediaAnalysisUtils.ParseRatioDouble(stream.FrameRate, '/')),
Height = stream.Height ?? 0,
Width = stream.Width ?? 0,
Profile = stream.Profile,
@ -74,7 +66,55 @@ private VideoStream ParseVideoStream(FFProbeStream stream)
};
}
internal static TimeSpan ParseDuration(string duration)
private AudioStream ParseAudioStream(FFProbeStream stream)
{
return new AudioStream
{
Index = stream.Index,
BitRate = !string.IsNullOrEmpty(stream.BitRate) ? MediaAnalysisUtils.ParseIntInvariant(stream.BitRate) : default,
CodecName = stream.CodecName,
CodecLongName = stream.CodecLongName,
Channels = stream.Channels ?? default,
ChannelLayout = stream.ChannelLayout,
Duration = MediaAnalysisUtils.ParseDuration(stream),
SampleRateHz = !string.IsNullOrEmpty(stream.SampleRate) ? MediaAnalysisUtils.ParseIntInvariant(stream.SampleRate) : default,
Profile = stream.Profile,
Language = stream.GetLanguage(),
Tags = stream.Tags,
};
}
}
public static class MediaAnalysisUtils
{
private static readonly Regex DurationRegex = new Regex("^(\\d{1,5}:\\d{1,2}:\\d{1,2}(.\\d{1,7})?)", RegexOptions.Compiled);
public static double DivideRatio((double, double) ratio) => ratio.Item1 / ratio.Item2;
public static (int, int) ParseRatioInt(string input, char separator)
{
if (string.IsNullOrEmpty(input)) return (0, 0);
var ratio = input.Split(separator);
return (ParseIntInvariant(ratio[0]), ParseIntInvariant(ratio[1]));
}
public static (double, double) ParseRatioDouble(string input, char separator)
{
if (string.IsNullOrEmpty(input)) return (0, 0);
var ratio = input.Split(separator);
return (ratio.Length > 0 ? ParseDoubleInvariant(ratio[0]) : 0, ratio.Length > 1 ? ParseDoubleInvariant(ratio[1]) : 0);
}
public static double ParseDoubleInvariant(string line) =>
double.Parse(line, System.Globalization.NumberStyles.Any, System.Globalization.CultureInfo.InvariantCulture);
public static int ParseIntInvariant(string line) =>
int.Parse(line, System.Globalization.NumberStyles.Any, System.Globalization.CultureInfo.InvariantCulture);
public static TimeSpan ParseDuration(string duration)
{
if (!string.IsNullOrEmpty(duration))
{
@ -106,49 +146,15 @@ internal static TimeSpan ParseDuration(string duration)
}
}
internal static TimeSpan ParseDuration(FFProbeStream ffProbeStream)
public static TimeSpan ParseDuration(FFProbeStream ffProbeStream)
{
return ParseDuration(ffProbeStream.Duration);
}
private AudioStream ParseAudioStream(FFProbeStream stream)
private static string? TrimTimeSpan(string? durationTag)
{
return new AudioStream
{
Index = stream.Index,
BitRate = !string.IsNullOrEmpty(stream.BitRate) ? ParseIntInvariant(stream.BitRate) : default,
CodecName = stream.CodecName,
CodecLongName = stream.CodecLongName,
Channels = stream.Channels ?? default,
ChannelLayout = stream.ChannelLayout,
Duration = ParseDuration(stream),
SampleRateHz = !string.IsNullOrEmpty(stream.SampleRate) ? ParseIntInvariant(stream.SampleRate) : default,
Profile = stream.Profile,
Language = stream.GetLanguage(),
Tags = stream.Tags,
};
}
private static double DivideRatio((double, double) ratio) => ratio.Item1 / ratio.Item2;
private static (int, int) ParseRatioInt(string input, char separator)
{
if (string.IsNullOrEmpty(input)) return (0, 0);
var ratio = input.Split(separator);
return (ParseIntInvariant(ratio[0]), ParseIntInvariant(ratio[1]));
}
private static (double, double) ParseRatioDouble(string input, char separator)
{
if (string.IsNullOrEmpty(input)) return (0, 0);
var ratio = input.Split(separator);
return (ratio.Length > 0 ? ParseDoubleInvariant(ratio[0]) : 0, ratio.Length > 1 ? ParseDoubleInvariant(ratio[1]) : 0);
}
private static double ParseDoubleInvariant(string line) =>
double.Parse(line, System.Globalization.NumberStyles.Any, System.Globalization.CultureInfo.InvariantCulture);
private static int ParseIntInvariant(string line) =>
int.Parse(line, System.Globalization.NumberStyles.Any, System.Globalization.CultureInfo.InvariantCulture);
var durationMatch = DurationRegex.Match(durationTag ?? "");
return durationMatch.Success ? durationMatch.Groups[1].Value : null;
}
}
}

View file

@ -0,0 +1,52 @@
using System;
using System.IO;
using System.Runtime.InteropServices;
using System.Text.Json;
namespace FFMpegCore
{
public static class GlobalFFOptions
{
private static readonly string ConfigFile = "ffmpeg.config.json";
public static FFOptions Current { get; private set; }
static GlobalFFOptions()
{
if (File.Exists(ConfigFile))
{
Current = JsonSerializer.Deserialize<FFOptions>(File.ReadAllText(ConfigFile))!;
}
else
{
Current = new FFOptions();
}
}
public static void Configure(Action<FFOptions> optionsAction)
{
optionsAction?.Invoke(Current);
}
public static void Configure(FFOptions ffOptions)
{
Current = ffOptions ?? throw new ArgumentNullException(nameof(ffOptions));
}
public static string GetFFMpegBinaryPath(FFOptions? ffOptions = null) => GetFFBinaryPath("FFMpeg", ffOptions ?? Current);
public static string GetFFProbeBinaryPath(FFOptions? ffOptions = null) => GetFFBinaryPath("FFProbe", ffOptions ?? Current);
private static string GetFFBinaryPath(string name, FFOptions ffOptions)
{
var ffName = name.ToLowerInvariant();
if (RuntimeInformation.IsOSPlatform(OSPlatform.Windows))
ffName += ".exe";
var target = Environment.Is64BitProcess ? "x64" : "x86";
if (Directory.Exists(Path.Combine(ffOptions.BinaryFolder, target)))
ffName = Path.Combine(target, ffName);
return Path.Combine(ffOptions.BinaryFolder, ffName);
}
}
}

View file

@ -11,22 +11,16 @@ public static class FFMpegHelper
private static bool _ffmpegVerified;
public static void ConversionSizeExceptionCheck(Image image)
{
ConversionSizeExceptionCheck(image.Size);
}
=> ConversionSizeExceptionCheck(image.Size.Width, image.Size.Height);
public static void ConversionSizeExceptionCheck(IMediaAnalysis info)
{
ConversionSizeExceptionCheck(new Size(info.PrimaryVideoStream.Width, info.PrimaryVideoStream.Height));
}
=> ConversionSizeExceptionCheck(info.PrimaryVideoStream!.Width, info.PrimaryVideoStream.Height);
private static void ConversionSizeExceptionCheck(Size size)
{
if (size.Height % 2 != 0 || size.Width % 2 != 0 )
private static void ConversionSizeExceptionCheck(int width, int height)
{
if (height % 2 != 0 || width % 2 != 0 )
throw new ArgumentException("FFMpeg yuv420p encoding requires the width and height to be a multiple of 2!");
}
}
public static void ExtensionExceptionCheck(string filename, string extension)
{
@ -37,17 +31,17 @@ public static void ExtensionExceptionCheck(string filename, string extension)
public static void RootExceptionCheck()
{
if (FFMpegOptions.Options.RootDirectory == null)
throw new FFMpegException(FFMpegExceptionType.Dependency,
"FFMpeg root is not configured in app config. Missing key 'ffmpegRoot'.");
if (GlobalFFOptions.Current.BinaryFolder == null)
throw new FFOptionsException("FFMpeg root is not configured in app config. Missing key 'BinaryFolder'.");
}
public static void VerifyFFMpegExists()
public static void VerifyFFMpegExists(FFOptions ffMpegOptions)
{
if (_ffmpegVerified) return;
var (exitCode, _) = Instance.Finish(FFMpegOptions.Options.FFmpegBinary(), "-version");
var (exitCode, _) = Instance.Finish(GlobalFFOptions.GetFFMpegBinaryPath(ffMpegOptions), "-version");
_ffmpegVerified = exitCode == 0;
if (!_ffmpegVerified) throw new FFMpegException(FFMpegExceptionType.Operation, "ffmpeg was not found on your system");
if (!_ffmpegVerified)
throw new FFMpegException(FFMpegExceptionType.Operation, "ffmpeg was not found on your system");
}
}
}

View file

@ -20,18 +20,17 @@ public static int Gcd(int first, int second)
public static void RootExceptionCheck()
{
if (FFMpegOptions.Options.RootDirectory == null)
throw new FFMpegException(FFMpegExceptionType.Dependency,
"FFProbe root is not configured in app config. Missing key 'ffmpegRoot'.");
if (GlobalFFOptions.Current.BinaryFolder == null)
throw new FFOptionsException("FFProbe root is not configured in app config. Missing key 'BinaryFolder'.");
}
public static void VerifyFFProbeExists()
public static void VerifyFFProbeExists(FFOptions ffMpegOptions)
{
if (_ffprobeVerified) return;
var (exitCode, _) = Instance.Finish(FFMpegOptions.Options.FFProbeBinary(), "-version");
var (exitCode, _) = Instance.Finish(GlobalFFOptions.GetFFProbeBinaryPath(ffMpegOptions), "-version");
_ffprobeVerified = exitCode == 0;
if (!_ffprobeVerified) throw new FFMpegException(FFMpegExceptionType.Operation, "ffprobe was not found on your system");
if (!_ffprobeVerified)
throw new FFMpegException(FFMpegExceptionType.Operation, "ffprobe was not found on your system");
}
}
}

View file

@ -22,11 +22,11 @@ A .NET Standard FFMpeg/FFProbe wrapper for easily integrating media analysis and
FFProbe is used to gather media information:
```csharp
var mediaInfo = FFProbe.Analyse(inputFile);
var mediaInfo = FFProbe.Analyse(inputPath);
```
or
```csharp
var mediaInfo = await FFProbe.AnalyseAsync(inputFile);
var mediaInfo = await FFProbe.AnalyseAsync(inputPath);
```
@ -43,20 +43,19 @@ FFMpegArguments
.WithConstantRateFactor(21)
.WithAudioCodec(AudioCodec.Aac)
.WithVariableBitrate(4)
.WithFastStart()
.WithVideoFilters(filterOptions => filterOptions
.Scale(VideoSize.Hd))
.WithFastStart())
.ProcessSynchronously();
```
Easily capture screens from your videos:
```csharp
var mediaFileAnalysis = FFProbe.Analyse(inputPath);
// process the snapshot in-memory and use the Bitmap directly
var bitmap = FFMpeg.Snapshot(mediaFileAnalysis, new Size(200, 400), TimeSpan.FromMinutes(1));
var bitmap = FFMpeg.Snapshot(inputPath, new Size(200, 400), TimeSpan.FromMinutes(1));
// or persists the image on the drive
FFMpeg.Snapshot(mediaFileAnalysis, outputPath, new Size(200, 400), TimeSpan.FromMinutes(1))
FFMpeg.Snapshot(inputPath, outputPath, new Size(200, 400), TimeSpan.FromMinutes(1));
```
Convert to and/or from streams
@ -89,25 +88,25 @@ FFMpeg.JoinImageSequence(@"..\joined_video.mp4", frameRate: 1,
Mute videos:
```csharp
FFMpeg.Mute(inputFilePath, outputFilePath);
FFMpeg.Mute(inputPath, outputPath);
```
Save audio track from video:
```csharp
FFMpeg.ExtractAudio(inputVideoFilePath, outputAudioFilePath);
FFMpeg.ExtractAudio(inputPath, outputPath);
```
Add or replace audio track on video:
```csharp
FFMpeg.ReplaceAudio(inputVideoFilePath, inputAudioFilePath, outputVideoFilePath);
FFMpeg.ReplaceAudio(inputPath, inputAudioPath, outputPath);
```
Add poster image to audio file (good for youtube videos):
```csharp
FFMpeg.PosterWithAudio(inputImageFilePath, inputAudioFilePath, outputVideoFilePath);
FFMpeg.PosterWithAudio(inputPath, inputAudioPath, outputPath);
// or
var image = Image.FromFile(inputImageFile);
image.AddAudio(inputAudioFilePath, outputVideoFilePath);
var image = Image.FromFile(inputImagePath);
image.AddAudio(inputAudioPath, outputPath);
```
Other available arguments could be found in `FFMpegCore.Arguments` namespace.
@ -135,10 +134,11 @@ var videoFramesSource = new RawVideoPipeSource(CreateFrames(64)) //pass IEnumera
{
FrameRate = 30 //set source frame rate
};
FFMpegArguments
.FromPipeInput(videoFramesSource, <input_stream_options>)
.OutputToFile("temporary.mp4", false, <output_options>)
.ProcessSynchronously();
await FFMpegArguments
.FromPipeInput(videoFramesSource)
.OutputToFile(outputPath, false, options => options
.WithVideoCodec(VideoCodec.LibVpx))
.ProcessAsynchronously();
```
if you want to use `System.Drawing.Bitmap` as `IVideoFrame`, there is a `BitmapVideoFrameWrapper` wrapper class.
@ -179,13 +179,19 @@ If these folders are not defined, it will try to find the binaries in `/root/(ff
#### Option 1
The default value (`\\FFMPEG\\bin`) can be overwritten via the `FFMpegOptions` class:
The default value of an empty string (expecting ffmpeg to be found through PATH) can be overwritten via the `FFOptions` class:
```c#
public Startup()
{
FFMpegOptions.Configure(new FFMpegOptions { RootDirectory = "./bin", TempDirectory = "/tmp" });
}
// setting global options
GlobalFFOptions.Configure(new FFOptions { BinaryFolder = "./bin", TemporaryFilesFolder = "/tmp" });
// or
GlobalFFOptions.Configure(options => options.BinaryFolder = "./bin");
// or individual, per-run options
await FFMpegArguments
.FromFileInput(inputPath)
.OutputToFile(outputPath)
.ProcessAsynchronously(true, new FFOptions { BinaryFolder = "./bin", TemporaryFilesFolder = "/tmp" });
```
#### Option 2
@ -194,8 +200,8 @@ The root and temp directory for the ffmpeg binaries can be configured via the `f
```json
{
"RootDirectory": "./bin",
"TempDirectory": "/tmp"
"BinaryFolder": "./bin",
"TemporaryFilesFolder": "/tmp"
}
```
@ -217,6 +223,6 @@ The root and temp directory for the ffmpeg binaries can be configured via the `f
### License
Copyright © 2020
Copyright © 2021
Released under [MIT license](https://github.com/rosenbjerg/FFMpegCore/blob/master/LICENSE)