Merge pull request #46 from max619/feature/piping

Added input and output piping support
This commit is contained in:
Malte Rosenbjerg 2020-05-06 20:55:10 +02:00 committed by GitHub
commit 001f12c915
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
24 changed files with 1337 additions and 65 deletions

View file

@ -0,0 +1,219 @@
using FFMpegCore.Extend;
using FFMpegCore.FFMPEG.Pipes;
using System;
using System.Collections.Generic;
using System.Drawing;
using System.Drawing.Imaging;
using System.Numerics;
using System.Text;
namespace FFMpegCore.Test
{
static class BitmapSource
{
public static IEnumerable<IVideoFrame> CreateBitmaps(int count, PixelFormat fmt, int w, int h)
{
for (int i = 0; i < count; i++)
{
using (var frame = CreateVideoFrame(i, fmt, w, h, 0.025f, 0.025f * w * 0.03f))
{
yield return frame;
}
}
}
private static BitmapVideoFrameWrapper CreateVideoFrame(int index, PixelFormat fmt, int w, int h, float scaleNoise, float offset)
{
var bitmap = new Bitmap(w, h, fmt);
offset = offset * index;
for (int y = 0; y < h; y++)
for (int x = 0; x < w; x++)
{
var nx = x * scaleNoise + offset;
var ny = y * scaleNoise + offset;
var value = (int)((Perlin.Noise(nx, ny) + 1.0f) / 2.0f * 255);
var color = Color.FromArgb(value, value, value);
bitmap.SetPixel(x, y, color);
}
return new BitmapVideoFrameWrapper(bitmap);
}
//
// Perlin noise generator for Unity
// Keijiro Takahashi, 2013, 2015
// https://github.com/keijiro/PerlinNoise
//
// Based on the original implementation by Ken Perlin
// http://mrl.nyu.edu/~perlin/noise/
//
static class Perlin
{
#region Noise functions
public static float Noise(float x)
{
var X = (int)MathF.Floor(x) & 0xff;
x -= MathF.Floor(x);
var u = Fade(x);
return Lerp(u, Grad(perm[X], x), Grad(perm[X + 1], x - 1)) * 2;
}
public static float Noise(float x, float y)
{
var X = (int)MathF.Floor(x) & 0xff;
var Y = (int)MathF.Floor(y) & 0xff;
x -= MathF.Floor(x);
y -= MathF.Floor(y);
var u = Fade(x);
var v = Fade(y);
var A = (perm[X] + Y) & 0xff;
var B = (perm[X + 1] + Y) & 0xff;
return Lerp(v, Lerp(u, Grad(perm[A], x, y), Grad(perm[B], x - 1, y)),
Lerp(u, Grad(perm[A + 1], x, y - 1), Grad(perm[B + 1], x - 1, y - 1)));
}
public static float Noise(Vector2 coord)
{
return Noise(coord.X, coord.Y);
}
public static float Noise(float x, float y, float z)
{
var X = (int)MathF.Floor(x) & 0xff;
var Y = (int)MathF.Floor(y) & 0xff;
var Z = (int)MathF.Floor(z) & 0xff;
x -= MathF.Floor(x);
y -= MathF.Floor(y);
z -= MathF.Floor(z);
var u = Fade(x);
var v = Fade(y);
var w = Fade(z);
var A = (perm[X] + Y) & 0xff;
var B = (perm[X + 1] + Y) & 0xff;
var AA = (perm[A] + Z) & 0xff;
var BA = (perm[B] + Z) & 0xff;
var AB = (perm[A + 1] + Z) & 0xff;
var BB = (perm[B + 1] + Z) & 0xff;
return Lerp(w, Lerp(v, Lerp(u, Grad(perm[AA], x, y, z), Grad(perm[BA], x - 1, y, z)),
Lerp(u, Grad(perm[AB], x, y - 1, z), Grad(perm[BB], x - 1, y - 1, z))),
Lerp(v, Lerp(u, Grad(perm[AA + 1], x, y, z - 1), Grad(perm[BA + 1], x - 1, y, z - 1)),
Lerp(u, Grad(perm[AB + 1], x, y - 1, z - 1), Grad(perm[BB + 1], x - 1, y - 1, z - 1))));
}
public static float Noise(Vector3 coord)
{
return Noise(coord.X, coord.Y, coord.Z);
}
#endregion
#region fBm functions
public static float Fbm(float x, int octave)
{
var f = 0.0f;
var w = 0.5f;
for (var i = 0; i < octave; i++)
{
f += w * Noise(x);
x *= 2.0f;
w *= 0.5f;
}
return f;
}
public static float Fbm(Vector2 coord, int octave)
{
var f = 0.0f;
var w = 0.5f;
for (var i = 0; i < octave; i++)
{
f += w * Noise(coord);
coord *= 2.0f;
w *= 0.5f;
}
return f;
}
public static float Fbm(float x, float y, int octave)
{
return Fbm(new Vector2(x, y), octave);
}
public static float Fbm(Vector3 coord, int octave)
{
var f = 0.0f;
var w = 0.5f;
for (var i = 0; i < octave; i++)
{
f += w * Noise(coord);
coord *= 2.0f;
w *= 0.5f;
}
return f;
}
public static float Fbm(float x, float y, float z, int octave)
{
return Fbm(new Vector3(x, y, z), octave);
}
#endregion
#region Private functions
static float Fade(float t)
{
return t * t * t * (t * (t * 6 - 15) + 10);
}
static float Lerp(float t, float a, float b)
{
return a + t * (b - a);
}
static float Grad(int hash, float x)
{
return (hash & 1) == 0 ? x : -x;
}
static float Grad(int hash, float x, float y)
{
return ((hash & 1) == 0 ? x : -x) + ((hash & 2) == 0 ? y : -y);
}
static float Grad(int hash, float x, float y, float z)
{
var h = hash & 15;
var u = h < 8 ? x : y;
var v = h < 4 ? y : (h == 12 || h == 14 ? x : z);
return ((h & 1) == 0 ? u : -u) + ((h & 2) == 0 ? v : -v);
}
static int[] perm = {
151,160,137,91,90,15,
131,13,201,95,96,53,194,233,7,225,140,36,103,30,69,142,8,99,37,240,21,10,23,
190, 6,148,247,120,234,75,0,26,197,62,94,252,219,203,117,35,11,32,57,177,33,
88,237,149,56,87,174,20,125,136,171,168, 68,175,74,165,71,134,139,48,27,166,
77,146,158,231,83,111,229,122,60,211,133,230,220,105,92,41,55,46,245,40,244,
102,143,54, 65,25,63,161, 1,216,80,73,209,76,132,187,208, 89,18,169,200,196,
135,130,116,188,159,86,164,100,109,198,173,186, 3,64,52,217,226,250,124,123,
5,202,38,147,118,126,255,82,85,212,207,206,59,227,47,16,58,17,182,189,28,42,
223,183,170,213,119,248,152, 2,44,154,163, 70,221,153,101,155,167, 43,172,9,
129,22,39,253, 19,98,108,110,79,113,224,232,178,185, 112,104,218,246,97,228,
251,34,242,193,238,210,144,12,191,179,162,241, 81,51,145,235,249,14,239,107,
49,192,214, 31,181,199,106,157,184, 84,204,176,115,121,50,45,127, 4,150,254,
138,236,205,93,222,114,67,29,24,72,243,141,128,195,78,66,215,61,156,180,
151
};
#endregion
}
}
}

View file

@ -12,7 +12,7 @@
<ItemGroup> <ItemGroup>
<Content Include="ffmpeg.config.json"> <Content Include="ffmpeg.config.json">
<CopyToOutputDirectory>Always</CopyToOutputDirectory> <CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
</Content> </Content>
</ItemGroup> </ItemGroup>

View file

@ -31,5 +31,30 @@ public void Probe_Success()
Assert.AreEqual(13, info.Duration.Seconds); Assert.AreEqual(13, info.Duration.Seconds);
} }
[TestMethod]
public void Probe_Success_FromStream()
{
var output = new FFProbe();
using (var stream = File.OpenRead(VideoLibrary.LocalVideo.FullName))
{
var info = output.ParseVideoInfo(stream);
Assert.AreEqual(13, info.Duration.Seconds);
}
}
[TestMethod]
public void Probe_Success_FromStream_Async()
{
var output = new FFProbe();
using (var stream = File.OpenRead(VideoLibrary.LocalVideo.FullName))
{
var info = output.ParseVideoInfoAsync(stream).WaitForResult();
Assert.AreEqual(13, info.Duration.Seconds);
}
}
} }
} }

View file

@ -17,6 +17,7 @@ public enum ImageType
public static class VideoLibrary public static class VideoLibrary
{ {
public static readonly FileInfo LocalVideo = new FileInfo($".{Path.DirectorySeparatorChar}Resources{Path.DirectorySeparatorChar}input.mp4"); public static readonly FileInfo LocalVideo = new FileInfo($".{Path.DirectorySeparatorChar}Resources{Path.DirectorySeparatorChar}input.mp4");
public static readonly FileInfo LocalVideoWebm = new FileInfo($".{Path.DirectorySeparatorChar}Resources{Path.DirectorySeparatorChar}input.webm");
public static readonly FileInfo LocalVideoAudioOnly = new FileInfo($".{Path.DirectorySeparatorChar}Resources{Path.DirectorySeparatorChar}audio_only.mp4"); public static readonly FileInfo LocalVideoAudioOnly = new FileInfo($".{Path.DirectorySeparatorChar}Resources{Path.DirectorySeparatorChar}audio_only.mp4");
public static readonly FileInfo LocalVideoNoAudio = new FileInfo($".{Path.DirectorySeparatorChar}Resources{Path.DirectorySeparatorChar}mute.mp4"); public static readonly FileInfo LocalVideoNoAudio = new FileInfo($".{Path.DirectorySeparatorChar}Resources{Path.DirectorySeparatorChar}mute.mp4");
public static readonly FileInfo LocalAudio = new FileInfo($".{Path.DirectorySeparatorChar}Resources{Path.DirectorySeparatorChar}audio.mp3"); public static readonly FileInfo LocalAudio = new FileInfo($".{Path.DirectorySeparatorChar}Resources{Path.DirectorySeparatorChar}audio.mp3");

View file

@ -0,0 +1,13 @@
using System;
using System.Collections.Generic;
using System.Text;
using System.Threading.Tasks;
namespace FFMpegCore.Test
{
static class TasksExtensions
{
public static T WaitForResult<T>(this Task<T> task) =>
task.ConfigureAwait(false).GetAwaiter().GetResult();
}
}

View file

@ -1,6 +1,8 @@
using FFMpegCore.Enums; using FFMpegCore.Enums;
using FFMpegCore.FFMPEG.Argument; using FFMpegCore.FFMPEG.Argument;
using FFMpegCore.FFMPEG.Enums; using FFMpegCore.FFMPEG.Enums;
using FFMpegCore.FFMPEG.Exceptions;
using FFMpegCore.FFMPEG.Pipes;
using FFMpegCore.Test.Resources; using FFMpegCore.Test.Resources;
using Microsoft.VisualStudio.TestTools.UnitTesting; using Microsoft.VisualStudio.TestTools.UnitTesting;
using System; using System;
@ -62,6 +64,109 @@ public bool Convert(VideoType type, bool multithreaded = false, VideoSize size =
} }
} }
private void ConvertFromStreamPipe(VideoType type, ArgumentContainer container)
{
var output = Input.OutputLocation(type);
try
{
var input = VideoInfo.FromFileInfo(VideoLibrary.LocalVideoWebm);
using (var inputStream = System.IO.File.OpenRead(input.FullName))
{
var pipeSource = new StreamPipeDataWriter(inputStream);
var arguments = new ArgumentContainer { new InputPipeArgument(pipeSource) };
foreach (var arg in container)
{
arguments.Add(arg.Value);
}
arguments.Add(new OutputArgument(output));
var scaling = container.Find<ScaleArgument>();
Encoder.Convert(arguments);
var outputVideo = new VideoInfo(output.FullName);
Assert.IsTrue(File.Exists(output.FullName));
Assert.IsTrue(Math.Abs((outputVideo.Duration - input.Duration).TotalMilliseconds) < 1000.0 / input.FrameRate);
if (scaling == null)
{
Assert.AreEqual(outputVideo.Width, input.Width);
Assert.AreEqual(outputVideo.Height, input.Height);
}
else
{
if (scaling.Value.Width != -1)
{
Assert.AreEqual(outputVideo.Width, scaling.Value.Width);
}
if (scaling.Value.Height != -1)
{
Assert.AreEqual(outputVideo.Height, scaling.Value.Height);
}
Assert.AreNotEqual(outputVideo.Width, input.Width);
Assert.AreNotEqual(outputVideo.Height, input.Height);
}
}
}
finally
{
if (File.Exists(output.FullName))
File.Delete(output.FullName);
}
}
private void ConvertToStreamPipe(VideoType type, ArgumentContainer container)
{
using (var ms = new MemoryStream())
{
var input = VideoInfo.FromFileInfo(VideoLibrary.LocalVideo);
var arguments = new ArgumentContainer { new InputArgument(input) };
foreach (var arg in container)
{
arguments.Add(arg.Value);
}
var streamPipeDataReader = new StreamPipeDataReader(ms);
streamPipeDataReader.BlockSize = streamPipeDataReader.BlockSize * 16;
arguments.Add(new OutputPipeArgument(streamPipeDataReader));
var scaling = container.Find<ScaleArgument>();
Encoder.Convert(arguments);
ms.Position = 0;
var outputVideo = VideoInfo.FromStream(ms);
//Assert.IsTrue(Math.Abs((outputVideo.Duration - input.Duration).TotalMilliseconds) < 1000.0 / input.FrameRate);
if (scaling == null)
{
Assert.AreEqual(outputVideo.Width, input.Width);
Assert.AreEqual(outputVideo.Height, input.Height);
}
else
{
if (scaling.Value.Width != -1)
{
Assert.AreEqual(outputVideo.Width, scaling.Value.Width);
}
if (scaling.Value.Height != -1)
{
Assert.AreEqual(outputVideo.Height, scaling.Value.Height);
}
Assert.AreNotEqual(outputVideo.Width, input.Width);
Assert.AreNotEqual(outputVideo.Height, input.Height);
}
}
}
public void Convert(VideoType type, ArgumentContainer container) public void Convert(VideoType type, ArgumentContainer container)
{ {
var output = Input.OutputLocation(type); var output = Input.OutputLocation(type);
@ -70,7 +175,7 @@ public void Convert(VideoType type, ArgumentContainer container)
{ {
var input = VideoInfo.FromFileInfo(Input); var input = VideoInfo.FromFileInfo(Input);
var arguments = new ArgumentContainer {new InputArgument(input)}; var arguments = new ArgumentContainer { new InputArgument(input) };
foreach (var arg in container) foreach (var arg in container)
{ {
arguments.Add(arg.Value); arguments.Add(arg.Value);
@ -114,6 +219,64 @@ public void Convert(VideoType type, ArgumentContainer container)
} }
} }
public void ConvertFromPipe(VideoType type, ArgumentContainer container)
{
ConvertFromPipe(type, container, PixelFormat.Format24bppRgb);
ConvertFromPipe(type, container, PixelFormat.Format32bppArgb);
ConvertFromPipe(type, container, PixelFormat.Format48bppRgb);
}
public void ConvertFromPipe(VideoType type, ArgumentContainer container, PixelFormat fmt)
{
var output = Input.OutputLocation(type);
try
{
var videoFramesSource = new RawVideoPipeDataWriter(BitmapSource.CreateBitmaps(128, fmt, 256, 256));
var arguments = new ArgumentContainer { new InputPipeArgument(videoFramesSource) };
foreach (var arg in container)
{
arguments.Add(arg.Value);
}
arguments.Add(new OutputArgument(output));
var scaling = container.Find<ScaleArgument>();
Encoder.Convert(arguments);
var outputVideo = new VideoInfo(output.FullName);
Assert.IsTrue(File.Exists(output.FullName));
if (scaling == null)
{
Assert.AreEqual(outputVideo.Width, videoFramesSource.Width);
Assert.AreEqual(outputVideo.Height, videoFramesSource.Height);
}
else
{
if (scaling.Value.Width != -1)
{
Assert.AreEqual(outputVideo.Width, scaling.Value.Width);
}
if (scaling.Value.Height != -1)
{
Assert.AreEqual(outputVideo.Height, scaling.Value.Height);
}
Assert.AreNotEqual(outputVideo.Width, videoFramesSource.Width);
Assert.AreNotEqual(outputVideo.Height, videoFramesSource.Height);
}
}
finally
{
if (File.Exists(output.FullName))
File.Delete(output.FullName);
}
}
[TestMethod] [TestMethod]
public void Video_ToMP4() public void Video_ToMP4()
{ {
@ -123,10 +286,90 @@ public void Video_ToMP4()
[TestMethod] [TestMethod]
public void Video_ToMP4_Args() public void Video_ToMP4_Args()
{ {
var container = new ArgumentContainer {new VideoCodecArgument(VideoCodec.LibX264)}; var container = new ArgumentContainer { new VideoCodecArgument(VideoCodec.LibX264) };
Convert(VideoType.Mp4, container); Convert(VideoType.Mp4, container);
} }
[TestMethod]
public void Video_ToMP4_Args_Pipe()
{
var container = new ArgumentContainer { new VideoCodecArgument(VideoCodec.LibX264) };
ConvertFromPipe(VideoType.Mp4, container);
}
[TestMethod]
public void Video_ToMP4_Args_StreamPipe()
{
var container = new ArgumentContainer { new VideoCodecArgument(VideoCodec.LibX264) };
ConvertFromStreamPipe(VideoType.Mp4, container);
}
[TestMethod]
public void Video_ToMP4_Args_StreamOutputPipe_Async_Failure()
{
Assert.ThrowsException<FFMpegException>(() =>
{
using (var ms = new MemoryStream())
{
var pipeSource = new StreamPipeDataReader(ms);
var container = new ArgumentContainer
{
new InputArgument(VideoLibrary.LocalVideo),
new VideoCodecArgument(VideoCodec.LibX264),
new ForceFormatArgument("mkv"),
new OutputPipeArgument(pipeSource)
};
var input = VideoInfo.FromFileInfo(VideoLibrary.LocalVideoWebm);
Encoder.ConvertAsync(container).WaitForResult();
}
});
}
[TestMethod]
public void Video_ToMP4_Args_StreamOutputPipe_Failure()
{
Assert.ThrowsException<FFMpegException>(() =>
{
var container = new ArgumentContainer
{
new ForceFormatArgument("mkv")
};
ConvertToStreamPipe(VideoType.Mp4, container);
});
}
[TestMethod]
public void Video_ToMP4_Args_StreamOutputPipe_Async()
{
using (var ms = new MemoryStream())
{
var pipeSource = new StreamPipeDataReader(ms);
var container = new ArgumentContainer
{
new InputArgument(VideoLibrary.LocalVideo),
new VideoCodecArgument(VideoCodec.LibX264),
new ForceFormatArgument("matroska"),
new OutputPipeArgument(pipeSource)
};
var input = VideoInfo.FromFileInfo(VideoLibrary.LocalVideoWebm);
Encoder.ConvertAsync(container).WaitForResult();
}
}
[TestMethod]
public void Video_ToMP4_Args_StreamOutputPipe()
{
var container = new ArgumentContainer
{
new VideoCodecArgument(VideoCodec.LibX264),
new ForceFormatArgument("matroska")
};
ConvertToStreamPipe(VideoType.Mp4, container);
}
[TestMethod] [TestMethod]
public void Video_ToTS() public void Video_ToTS()
{ {
@ -145,6 +388,15 @@ public void Video_ToTS_Args()
Convert(VideoType.Ts, container); Convert(VideoType.Ts, container);
} }
[TestMethod]
public void Video_ToTS_Args_Pipe()
{
var container = new ArgumentContainer
{
new ForceFormatArgument(VideoCodec.MpegTs)
};
ConvertFromPipe(VideoType.Ts, container);
}
[TestMethod] [TestMethod]
public void Video_ToOGV_Resize() public void Video_ToOGV_Resize()
@ -163,6 +415,17 @@ public void Video_ToOGV_Resize_Args()
Convert(VideoType.Ogv, container); Convert(VideoType.Ogv, container);
} }
[TestMethod]
public void Video_ToOGV_Resize_Args_Pipe()
{
var container = new ArgumentContainer
{
new ScaleArgument(VideoSize.Ed),
new VideoCodecArgument(VideoCodec.LibTheora)
};
ConvertFromPipe(VideoType.Ogv, container);
}
[TestMethod] [TestMethod]
public void Video_ToMP4_Resize() public void Video_ToMP4_Resize()
{ {
@ -180,6 +443,17 @@ public void Video_ToMP4_Resize_Args()
Convert(VideoType.Mp4, container); Convert(VideoType.Mp4, container);
} }
[TestMethod]
public void Video_ToMP4_Resize_Args_Pipe()
{
var container = new ArgumentContainer
{
new ScaleArgument(VideoSize.Ld),
new VideoCodecArgument(VideoCodec.LibX264)
};
ConvertFromPipe(VideoType.Mp4, container);
}
[TestMethod] [TestMethod]
public void Video_ToOGV() public void Video_ToOGV()
{ {
@ -325,7 +599,8 @@ public void Video_With_Only_Audio_Should_Extract_Metadata()
} }
[TestMethod] [TestMethod]
public void Video_Duration() { public void Video_Duration()
{
var video = VideoInfo.FromFileInfo(VideoLibrary.LocalVideo); var video = VideoInfo.FromFileInfo(VideoLibrary.LocalVideo);
var output = Input.OutputLocation(VideoType.Mp4); var output = Input.OutputLocation(VideoType.Mp4);
@ -336,7 +611,8 @@ public void Video_Duration() {
new OutputArgument(output) new OutputArgument(output)
}; };
try { try
{
Encoder.Convert(arguments); Encoder.Convert(arguments);
Assert.IsTrue(File.Exists(output.FullName)); Assert.IsTrue(File.Exists(output.FullName));
@ -346,14 +622,17 @@ public void Video_Duration() {
Assert.AreEqual(video.Duration.Hours, outputVideo.Duration.Hours); Assert.AreEqual(video.Duration.Hours, outputVideo.Duration.Hours);
Assert.AreEqual(video.Duration.Minutes, outputVideo.Duration.Minutes); Assert.AreEqual(video.Duration.Minutes, outputVideo.Duration.Minutes);
Assert.AreEqual(video.Duration.Seconds - 5, outputVideo.Duration.Seconds); Assert.AreEqual(video.Duration.Seconds - 5, outputVideo.Duration.Seconds);
} finally { }
finally
{
if (File.Exists(output.FullName)) if (File.Exists(output.FullName))
output.Delete(); output.Delete();
} }
} }
[TestMethod] [TestMethod]
public void Video_UpdatesProgress() { public void Video_UpdatesProgress()
{
var output = Input.OutputLocation(VideoType.Mp4); var output = Input.OutputLocation(VideoType.Mp4);
var percentageDone = 0.0; var percentageDone = 0.0;
@ -367,16 +646,44 @@ public void Video_UpdatesProgress() {
new OutputArgument(output) new OutputArgument(output)
}; };
try { try
{
Encoder.Convert(arguments); Encoder.Convert(arguments);
Encoder.OnProgress -= OnProgess; Encoder.OnProgress -= OnProgess;
Assert.IsTrue(File.Exists(output.FullName)); Assert.IsTrue(File.Exists(output.FullName));
Assert.AreNotEqual(0.0, percentageDone); Assert.AreNotEqual(0.0, percentageDone);
} finally { }
finally
{
if (File.Exists(output.FullName)) if (File.Exists(output.FullName))
output.Delete(); output.Delete();
} }
} }
[TestMethod]
public void Video_TranscodeInMemory()
{
using (var resStream = new MemoryStream())
{
var reader = new StreamPipeDataReader(resStream);
var writer = new RawVideoPipeDataWriter(BitmapSource.CreateBitmaps(128, PixelFormat.Format24bppRgb, 128, 128));
var container = new ArgumentContainer
{
new InputPipeArgument(writer),
new VideoCodecArgument("vp9"),
new ForceFormatArgument("webm"),
new OutputPipeArgument(reader)
};
Encoder.Convert(container);
resStream.Position = 0;
var vi = VideoInfo.FromStream(resStream);
Assert.AreEqual(vi.Width, 128);
Assert.AreEqual(vi.Height, 128);
}
}
} }
} }

View file

@ -0,0 +1,89 @@
using FFMpegCore.FFMPEG.Pipes;
using System;
using System.Collections.Generic;
using System.Drawing;
using System.Drawing.Imaging;
using System.Runtime.InteropServices;
using System.Text;
using System.Threading.Tasks;
namespace FFMpegCore.Extend
{
public class BitmapVideoFrameWrapper : IVideoFrame, IDisposable
{
public int Width => Source.Width;
public int Height => Source.Height;
public string Format { get; private set; }
public Bitmap Source { get; private set; }
public BitmapVideoFrameWrapper(Bitmap bitmap)
{
Source = bitmap ?? throw new ArgumentNullException(nameof(bitmap));
Format = ConvertStreamFormat(bitmap.PixelFormat);
}
public void Serialize(System.IO.Stream stream)
{
var data = Source.LockBits(new Rectangle(0, 0, Width, Height), ImageLockMode.ReadOnly, Source.PixelFormat);
try
{
var buffer = new byte[data.Stride * data.Height];
Marshal.Copy(data.Scan0, buffer, 0, buffer.Length);
stream.Write(buffer, 0, buffer.Length);
}
finally
{
Source.UnlockBits(data);
}
}
public async Task SerializeAsync(System.IO.Stream stream)
{
var data = Source.LockBits(new Rectangle(0, 0, Width, Height), ImageLockMode.ReadOnly, Source.PixelFormat);
try
{
var buffer = new byte[data.Stride * data.Height];
Marshal.Copy(data.Scan0, buffer, 0, buffer.Length);
await stream.WriteAsync(buffer, 0, buffer.Length);
}
finally
{
Source.UnlockBits(data);
}
}
public void Dispose()
{
Source.Dispose();
}
private static string ConvertStreamFormat(PixelFormat fmt)
{
switch (fmt)
{
case PixelFormat.Format16bppGrayScale:
return "gray16le";
case PixelFormat.Format16bppRgb565:
return "bgr565le";
case PixelFormat.Format24bppRgb:
return "rgb24";
case PixelFormat.Format32bppArgb:
return "rgba";
case PixelFormat.Format32bppPArgb:
//This is not really same as argb32
return "argb";
case PixelFormat.Format32bppRgb:
return "rgba";
case PixelFormat.Format48bppRgb:
return "rgb48le";
default:
throw new NotSupportedException($"Not supported pixel format {fmt}");
}
}
}
}

View file

@ -1,6 +1,7 @@
using System; using System;
using System.Collections; using System.Collections;
using System.Collections.Generic; using System.Collections.Generic;
using System.Linq;
namespace FFMpegCore.FFMPEG.Argument namespace FFMpegCore.FFMPEG.Argument
{ {
@ -15,7 +16,7 @@ public ArgumentContainer(params Argument[] arguments)
{ {
_args = new Dictionary<Type, Argument>(); _args = new Dictionary<Type, Argument>();
foreach(var argument in arguments) foreach (var argument in arguments)
{ {
Add(argument); Add(argument);
} }
@ -28,7 +29,7 @@ public bool TryGetArgument<T>(out T output)
{ {
if (_args.TryGetValue(typeof(T), out var arg)) if (_args.TryGetValue(typeof(T), out var arg))
{ {
output = (T) arg; output = (T)arg;
return true; return true;
} }
@ -90,7 +91,7 @@ public bool Contains(KeyValuePair<Type, Argument> item)
/// <param name="value">Argument that should be added to collection</param> /// <param name="value">Argument that should be added to collection</param>
public void Add(params Argument[] values) public void Add(params Argument[] values)
{ {
foreach(var value in values) foreach (var value in values)
{ {
_args.Add(value.GetType(), value); _args.Add(value.GetType(), value);
} }
@ -102,9 +103,8 @@ public void Add(params Argument[] values)
/// <returns></returns> /// <returns></returns>
public bool ContainsInputOutput() public bool ContainsInputOutput()
{ {
return ((ContainsKey(typeof(InputArgument)) && !ContainsKey(typeof(ConcatArgument))) || return ContainsOnlyOneOf(typeof(InputArgument), typeof(ConcatArgument), typeof(InputPipeArgument)) &&
(!ContainsKey(typeof(InputArgument)) && ContainsKey(typeof(ConcatArgument)))) ContainsOnlyOneOf(typeof(OutputArgument), typeof(OutputPipeArgument));
&& ContainsKey(typeof(OutputArgument));
} }
/// <summary> /// <summary>
@ -117,6 +117,11 @@ public bool ContainsKey(Type key)
return _args.ContainsKey(key); return _args.ContainsKey(key);
} }
public bool ContainsOnlyOneOf(params Type[] types)
{
return types.Count(t => _args.ContainsKey(t)) == 1;
}
public void CopyTo(KeyValuePair<Type, Argument>[] array, int arrayIndex) public void CopyTo(KeyValuePair<Type, Argument>[] array, int arrayIndex)
{ {
_args.CopyTo(array, arrayIndex); _args.CopyTo(array, arrayIndex);

View file

@ -5,16 +5,17 @@ namespace FFMpegCore.FFMPEG.Argument
/// <summary> /// <summary>
/// Represents force format parameter /// Represents force format parameter
/// </summary> /// </summary>
public class ForceFormatArgument : Argument<VideoCodec> public class ForceFormatArgument : Argument<string>
{ {
public ForceFormatArgument() { } public ForceFormatArgument() { }
public ForceFormatArgument(string format) : base(format) { }
public ForceFormatArgument(VideoCodec value) : base(value) { } public ForceFormatArgument(VideoCodec value) : base(value.ToString().ToLower()) { }
/// <inheritdoc/> /// <inheritdoc/>
public override string GetStringValue() public override string GetStringValue()
{ {
return $"-f {Value.ToString().ToLower()}"; return $"-f {Value}";
} }
} }
} }

View file

@ -0,0 +1,39 @@
using FFMpegCore.FFMPEG.Pipes;
using Instances;
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.IO;
using System.IO.Pipes;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
namespace FFMpegCore.FFMPEG.Argument
{
/// <summary>
/// Represents input parameter for a named pipe
/// </summary>
public class InputPipeArgument : PipeArgument
{
public IPipeDataWriter Writer { get; private set; }
public InputPipeArgument(IPipeDataWriter writer) : base(PipeDirection.Out)
{
Writer = writer;
}
public override string GetStringValue()
{
return $"-y {Writer.GetFormat()} -i \"{PipePath}\"";
}
public override async Task ProcessDataAsync(CancellationToken token)
{
await Pipe.WaitForConnectionAsync(token).ConfigureAwait(false);
if (!Pipe.IsConnected)
throw new TaskCanceledException();
await Writer.WriteDataAsync(Pipe).ConfigureAwait(false);
}
}
}

View file

@ -0,0 +1,33 @@
using FFMpegCore.FFMPEG.Pipes;
using System;
using System.Collections.Generic;
using System.IO.Pipes;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
namespace FFMpegCore.FFMPEG.Argument
{
public class OutputPipeArgument : PipeArgument
{
public IPipeDataReader Reader { get; private set; }
public OutputPipeArgument(IPipeDataReader reader) : base(PipeDirection.In)
{
Reader = reader;
}
public override string GetStringValue()
{
return $"\"{PipePath}\" -y";
}
public override async Task ProcessDataAsync(CancellationToken token)
{
await Pipe.WaitForConnectionAsync(token).ConfigureAwait(false);
if (!Pipe.IsConnected)
throw new TaskCanceledException();
await Reader.ReadDataAsync(Pipe).ConfigureAwait(false);
}
}
}

View file

@ -0,0 +1,45 @@
using FFMpegCore.FFMPEG.Pipes;
using System;
using System.Collections.Generic;
using System.IO.Pipes;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
namespace FFMpegCore.FFMPEG.Argument
{
public abstract class PipeArgument : Argument
{
public string PipeName { get; private set; }
public string PipePath => PipeHelpers.GetPipePath(PipeName);
protected NamedPipeServerStream Pipe { get; private set; }
private PipeDirection direction;
protected PipeArgument(PipeDirection direction)
{
PipeName = PipeHelpers.GetUnqiuePipeName();
this.direction = direction;
}
public void OpenPipe()
{
if (Pipe != null)
throw new InvalidOperationException("Pipe already has been opened");
Pipe = new NamedPipeServerStream(PipeName, direction, 1, PipeTransmissionMode.Byte, PipeOptions.Asynchronous);
}
public void ClosePipe()
{
Pipe?.Dispose();
Pipe = null;
}
public Task ProcessDataAsync()
{
return ProcessDataAsync(CancellationToken.None);
}
public abstract Task ProcessDataAsync(CancellationToken token);
}
}

View file

@ -5,15 +5,17 @@ namespace FFMpegCore.FFMPEG.Argument
/// <summary> /// <summary>
/// Represents video codec parameter /// Represents video codec parameter
/// </summary> /// </summary>
public class VideoCodecArgument : Argument<VideoCodec> public class VideoCodecArgument : Argument<string>
{ {
public int Bitrate { get; protected set; } = 0; public int Bitrate { get; protected set; } = 0;
public VideoCodecArgument() { } public VideoCodecArgument() { }
public VideoCodecArgument(VideoCodec value) : base(value) { } public VideoCodecArgument(string codec) : base(codec) { }
public VideoCodecArgument(VideoCodec value, int bitrate) : base(value) public VideoCodecArgument(VideoCodec value) : base(value.ToString().ToLower()) { }
public VideoCodecArgument(VideoCodec value, int bitrate) : base(value.ToString().ToLower())
{ {
Bitrate = bitrate; Bitrate = bitrate;
} }
@ -21,7 +23,7 @@ public VideoCodecArgument(VideoCodec value, int bitrate) : base(value)
/// <inheritdoc/> /// <inheritdoc/>
public override string GetStringValue() public override string GetStringValue()
{ {
var video = $"-c:v {Value.ToString().ToLower()} -pix_fmt yuv420p"; var video = $"-c:v {Value} -pix_fmt yuv420p";
if (Bitrate != default) if (Bitrate != default)
{ {

View file

@ -15,6 +15,8 @@
using System.Text.RegularExpressions; using System.Text.RegularExpressions;
using System.Threading.Tasks; using System.Threading.Tasks;
using Instances; using Instances;
using System.Runtime.CompilerServices;
using System.Threading;
namespace FFMpegCore.FFMPEG namespace FFMpegCore.FFMPEG
{ {
@ -65,16 +67,16 @@ public Bitmap Snapshot(VideoInfo source, FileInfo output, Size? size = null, Tim
{ {
if (size.Value.Width == 0) if (size.Value.Width == 0)
{ {
var ratio = source.Width / (double) size.Value.Width; var ratio = source.Width / (double)size.Value.Width;
size = new Size((int) (source.Width * ratio), (int) (source.Height * ratio)); size = new Size((int)(source.Width * ratio), (int)(source.Height * ratio));
} }
if (size.Value.Height == 0) if (size.Value.Height == 0)
{ {
var ratio = source.Height / (double) size.Value.Height; var ratio = source.Height / (double)size.Value.Height;
size = new Size((int) (source.Width * ratio), (int) (source.Height * ratio)); size = new Size((int)(source.Width * ratio), (int)(source.Height * ratio));
} }
} }
@ -96,7 +98,7 @@ public Bitmap Snapshot(VideoInfo source, FileInfo output, Size? size = null, Tim
output.Refresh(); output.Refresh();
Bitmap result; Bitmap result;
using (var bmp = (Bitmap) Image.FromFile(output.FullName)) using (var bmp = (Bitmap)Image.FromFile(output.FullName))
{ {
using var ms = new MemoryStream(); using var ms = new MemoryStream();
bmp.Save(ms, ImageFormat.Png); bmp.Save(ms, ImageFormat.Png);
@ -135,8 +137,8 @@ public VideoInfo Convert(
FFMpegHelper.ExtensionExceptionCheck(output, FileExtension.ForType(type)); FFMpegHelper.ExtensionExceptionCheck(output, FileExtension.ForType(type));
FFMpegHelper.ConversionSizeExceptionCheck(source); FFMpegHelper.ConversionSizeExceptionCheck(source);
var scale = VideoSize.Original == size ? 1 : (double) source.Height / (int) size; var scale = VideoSize.Original == size ? 1 : (double)source.Height / (int)size;
var outputSize = new Size((int) (source.Width / scale), (int) (source.Height / scale)); var outputSize = new Size((int)(source.Width / scale), (int)(source.Height / scale));
if (outputSize.Width % 2 != 0) if (outputSize.Width % 2 != 0)
outputSize.Width += 1; outputSize.Width += 1;
@ -389,34 +391,47 @@ public VideoInfo ReplaceAudio(VideoInfo source, FileInfo audio, FileInfo output,
public VideoInfo Convert(ArgumentContainer arguments, bool skipExistsCheck = false) public VideoInfo Convert(ArgumentContainer arguments, bool skipExistsCheck = false)
{ {
var (sources, output) = GetInputOutput(arguments); var (sources, output) = GetInputOutput(arguments);
if (sources != null)
_totalTime = TimeSpan.FromSeconds(sources.Sum(source => source.Duration.TotalSeconds)); _totalTime = TimeSpan.FromSeconds(sources.Sum(source => source.Duration.TotalSeconds));
if (!RunProcess(arguments, output, skipExistsCheck)) if (!RunProcess(arguments, output, skipExistsCheck))
throw new FFMpegException(FFMpegExceptionType.Conversion, "Could not process file without error"); throw new FFMpegException(FFMpegExceptionType.Conversion, "Could not process file without error");
_totalTime = TimeSpan.MinValue; _totalTime = TimeSpan.MinValue;
return output.Exists ? new VideoInfo(output) : null;
return output != null && output.Exists ? new VideoInfo(output) : null;
} }
public async Task<VideoInfo> ConvertAsync(ArgumentContainer arguments, bool skipExistsCheck = false) public async Task<VideoInfo> ConvertAsync(ArgumentContainer arguments, bool skipExistsCheck = false)
{ {
var (sources, output) = GetInputOutput(arguments); var (sources, output) = GetInputOutput(arguments);
if (sources != null)
_totalTime = TimeSpan.FromSeconds(sources.Sum(source => source.Duration.TotalSeconds)); _totalTime = TimeSpan.FromSeconds(sources.Sum(source => source.Duration.TotalSeconds));
if (!await RunProcessAsync(arguments, output, skipExistsCheck)) if (!await RunProcessAsync(arguments, output, skipExistsCheck))
throw new FFMpegException(FFMpegExceptionType.Conversion, "Could not process file without error"); throw new FFMpegException(FFMpegExceptionType.Conversion, "Could not process file without error");
_totalTime = TimeSpan.MinValue; _totalTime = TimeSpan.MinValue;
return output.Exists ? new VideoInfo(output) : null;
return output != null && output.Exists ? new VideoInfo(output) : null;
} }
private static (VideoInfo[] Input, FileInfo Output) GetInputOutput(ArgumentContainer arguments) private static (VideoInfo[] Input, FileInfo Output) GetInputOutput(ArgumentContainer arguments)
{ {
var output = ((OutputArgument) arguments[typeof(OutputArgument)]).GetAsFileInfo(); FileInfo output;
if (arguments.TryGetArgument<OutputArgument>(out var outputArg))
output = outputArg.GetAsFileInfo();
else if (arguments.TryGetArgument<OutputPipeArgument>(out var outputPipeArg))
output = null;
else
throw new FFMpegException(FFMpegExceptionType.Operation, "No output argument found");
VideoInfo[] sources; VideoInfo[] sources;
if (arguments.TryGetArgument<InputArgument>(out var input)) if (arguments.TryGetArgument<InputArgument>(out var input))
sources = input.GetAsVideoInfo(); sources = input.GetAsVideoInfo();
else if (arguments.TryGetArgument<ConcatArgument>(out var concat)) else if (arguments.TryGetArgument<ConcatArgument>(out var concat))
sources = concat.GetAsVideoInfo(); sources = concat.GetAsVideoInfo();
else if (arguments.TryGetArgument<InputPipeArgument>(out var pipe))
sources = null;
else else
throw new FFMpegException(FFMpegExceptionType.Operation, "No input or concat argument found"); throw new FFMpegException(FFMpegExceptionType.Operation, "No input or concat argument found");
return (sources, output); return (sources, output);
@ -447,12 +462,71 @@ private bool RunProcess(ArgumentContainer container, FileInfo output, bool skipE
{ {
_instance?.Dispose(); _instance?.Dispose();
var arguments = ArgumentBuilder.BuildArguments(container); var arguments = ArgumentBuilder.BuildArguments(container);
var exitCode = -1;
if (container.TryGetArgument<InputPipeArgument>(out var inputPipeArgument))
{
inputPipeArgument.OpenPipe();
}
if (container.TryGetArgument<OutputPipeArgument>(out var outputPipeArgument))
{
outputPipeArgument.OpenPipe();
}
_instance = new Instance(_ffmpegPath, arguments); _instance = new Instance(_ffmpegPath, arguments);
_instance.DataReceived += OutputData; _instance.DataReceived += OutputData;
var exitCode = _instance.BlockUntilFinished();
if (!skipExistsCheck && (!File.Exists(output.FullName) || new FileInfo(output.FullName).Length == 0)) if (inputPipeArgument != null || outputPipeArgument != null)
{
try
{
using (var tokenSource = new CancellationTokenSource())
{
var concurrentTasks = new List<Task>();
concurrentTasks.Add(_instance.FinishedRunning()
.ContinueWith((t =>
{
exitCode = t.Result;
if (exitCode != 0)
tokenSource.Cancel();
})));
if (inputPipeArgument != null)
concurrentTasks.Add(inputPipeArgument.ProcessDataAsync(tokenSource.Token)
.ContinueWith((t) =>
{
inputPipeArgument.ClosePipe();
if (t.Exception != null)
throw t.Exception;
}));
if (outputPipeArgument != null)
concurrentTasks.Add(outputPipeArgument.ProcessDataAsync(tokenSource.Token)
.ContinueWith((t) =>
{
outputPipeArgument.ClosePipe();
if (t.Exception != null)
throw t.Exception;
}));
Task.WaitAll(concurrentTasks.ToArray()/*, tokenSource.Token*/);
}
}
catch (Exception ex)
{
inputPipeArgument?.ClosePipe();
outputPipeArgument?.ClosePipe();
throw new FFMpegException(FFMpegExceptionType.Process, string.Join("\n", _instance.ErrorData), ex);
}
}
else
{
exitCode = _instance.BlockUntilFinished();
}
if(exitCode != 0)
throw new FFMpegException(FFMpegExceptionType.Process, string.Join("\n", _instance.ErrorData));
if (outputPipeArgument == null && !skipExistsCheck && (!File.Exists(output.FullName) || new FileInfo(output.FullName).Length == 0))
throw new FFMpegException(FFMpegExceptionType.Process, string.Join("\n", _instance.ErrorData)); throw new FFMpegException(FFMpegExceptionType.Process, string.Join("\n", _instance.ErrorData));
return exitCode == 0; return exitCode == 0;
@ -461,12 +535,71 @@ private async Task<bool> RunProcessAsync(ArgumentContainer container, FileInfo o
{ {
_instance?.Dispose(); _instance?.Dispose();
var arguments = ArgumentBuilder.BuildArguments(container); var arguments = ArgumentBuilder.BuildArguments(container);
var exitCode = -1;
if (container.TryGetArgument<InputPipeArgument>(out var inputPipeArgument))
{
inputPipeArgument.OpenPipe();
}
if (container.TryGetArgument<OutputPipeArgument>(out var outputPipeArgument))
{
outputPipeArgument.OpenPipe();
}
_instance = new Instance(_ffmpegPath, arguments); _instance = new Instance(_ffmpegPath, arguments);
_instance.DataReceived += OutputData; _instance.DataReceived += OutputData;
var exitCode = await _instance.FinishedRunning();
if (!skipExistsCheck && (!File.Exists(output.FullName) || new FileInfo(output.FullName).Length == 0)) if (inputPipeArgument != null || outputPipeArgument != null)
{
try
{
using (var tokenSource = new CancellationTokenSource())
{
var concurrentTasks = new List<Task>();
concurrentTasks.Add(_instance.FinishedRunning()
.ContinueWith((t =>
{
exitCode = t.Result;
if (exitCode != 0)
tokenSource.Cancel();
})));
if (inputPipeArgument != null)
concurrentTasks.Add(inputPipeArgument.ProcessDataAsync(tokenSource.Token)
.ContinueWith((t) =>
{
inputPipeArgument.ClosePipe();
if (t.Exception != null)
throw t.Exception;
}));
if (outputPipeArgument != null)
concurrentTasks.Add(outputPipeArgument.ProcessDataAsync(tokenSource.Token)
.ContinueWith((t) =>
{
outputPipeArgument.ClosePipe();
if (t.Exception != null)
throw t.Exception;
}));
await Task.WhenAll(concurrentTasks);
}
}
catch (Exception ex)
{
inputPipeArgument?.ClosePipe();
outputPipeArgument?.ClosePipe();
throw new FFMpegException(FFMpegExceptionType.Process, string.Join("\n", _instance.ErrorData), ex);
}
}
else
{
exitCode = await _instance.FinishedRunning();
}
if (exitCode != 0)
throw new FFMpegException(FFMpegExceptionType.Process, string.Join("\n", _instance.ErrorData));
if (outputPipeArgument == null && !skipExistsCheck && (!File.Exists(output.FullName) || new FileInfo(output.FullName).Length == 0))
throw new FFMpegException(FFMpegExceptionType.Process, string.Join("\n", _instance.ErrorData)); throw new FFMpegException(FFMpegExceptionType.Process, string.Join("\n", _instance.ErrorData));
return exitCode == 0; return exitCode == 0;

View file

@ -5,6 +5,9 @@
using System.Globalization; using System.Globalization;
using System.Threading.Tasks; using System.Threading.Tasks;
using Instances; using Instances;
using FFMpegCore.FFMPEG.Argument;
using FFMpegCore.FFMPEG.Pipes;
using System.IO;
namespace FFMpegCore.FFMPEG namespace FFMpegCore.FFMPEG
{ {
@ -47,7 +50,7 @@ public Task<VideoInfo> ParseVideoInfoAsync(string source)
/// <returns>A video info object containing all details necessary.</returns> /// <returns>A video info object containing all details necessary.</returns>
public VideoInfo ParseVideoInfo(VideoInfo info) public VideoInfo ParseVideoInfo(VideoInfo info)
{ {
var instance = new Instance(_ffprobePath, BuildFFProbeArguments(info)) {DataBufferCapacity = _outputCapacity}; var instance = new Instance(_ffprobePath, BuildFFProbeArguments(info.FullName)) {DataBufferCapacity = _outputCapacity};
instance.BlockUntilFinished(); instance.BlockUntilFinished();
var output = string.Join("", instance.OutputData); var output = string.Join("", instance.OutputData);
return ParseVideoInfoInternal(info, output); return ParseVideoInfoInternal(info, output);
@ -59,14 +62,86 @@ public VideoInfo ParseVideoInfo(VideoInfo info)
/// <returns>A video info object containing all details necessary.</returns> /// <returns>A video info object containing all details necessary.</returns>
public async Task<VideoInfo> ParseVideoInfoAsync(VideoInfo info) public async Task<VideoInfo> ParseVideoInfoAsync(VideoInfo info)
{ {
var instance = new Instance(_ffprobePath, BuildFFProbeArguments(info)) {DataBufferCapacity = _outputCapacity}; var instance = new Instance(_ffprobePath, BuildFFProbeArguments(info.FullName)) {DataBufferCapacity = _outputCapacity};
await instance.FinishedRunning(); await instance.FinishedRunning();
var output = string.Join("", instance.OutputData); var output = string.Join("", instance.OutputData);
return ParseVideoInfoInternal(info, output); return ParseVideoInfoInternal(info, output);
} }
private static string BuildFFProbeArguments(VideoInfo info) => /// <summary>
$"-v quiet -print_format json -show_streams \"{info.FullName}\""; /// Probes the targeted video stream and retrieves all available details.
/// </summary>
/// <param name="stream">Encoded video stream.</param>
/// <returns>A video info object containing all details necessary.</returns>
public VideoInfo ParseVideoInfo(System.IO.Stream stream)
{
var info = new VideoInfo();
var streamPipeSource = new StreamPipeDataWriter(stream);
var pipeArgument = new InputPipeArgument(streamPipeSource);
var instance = new Instance(_ffprobePath, BuildFFProbeArguments(pipeArgument.PipePath)) { DataBufferCapacity = _outputCapacity };
pipeArgument.OpenPipe();
var task = instance.FinishedRunning();
try
{
pipeArgument.ProcessDataAsync().ConfigureAwait(false).GetAwaiter().GetResult();
pipeArgument.ClosePipe();
}
catch(IOException)
{
}
finally
{
pipeArgument.ClosePipe();
}
var exitCode = task.ConfigureAwait(false).GetAwaiter().GetResult();
if (exitCode != 0)
throw new FFMpegException(FFMpegExceptionType.Process, "FFProbe process returned exit status " + exitCode);
var output = string.Join("", instance.OutputData);
return ParseVideoInfoInternal(info, output);
}
/// <summary>
/// Probes the targeted video stream asynchronously and retrieves all available details.
/// </summary>
/// <param name="stream">Encoded video stream.</param>
/// <returns>A video info object containing all details necessary.</returns>
public async Task<VideoInfo> ParseVideoInfoAsync(System.IO.Stream stream)
{
var info = new VideoInfo();
var streamPipeSource = new StreamPipeDataWriter(stream);
var pipeArgument = new InputPipeArgument(streamPipeSource);
var instance = new Instance(_ffprobePath, BuildFFProbeArguments(pipeArgument.PipePath)) { DataBufferCapacity = _outputCapacity };
pipeArgument.OpenPipe();
var task = instance.FinishedRunning();
try
{
await pipeArgument.ProcessDataAsync();
pipeArgument.ClosePipe();
}
catch (IOException)
{
}
finally
{
pipeArgument.ClosePipe();
}
var exitCode = await task;
if (exitCode != 0)
throw new FFMpegException(FFMpegExceptionType.Process, "FFProbe process returned exit status " + exitCode);
var output = string.Join("", instance.OutputData);
return ParseVideoInfoInternal(info, output);
}
private static string BuildFFProbeArguments(string fullPath) =>
$"-v quiet -print_format json -show_streams \"{fullPath}\"";
private VideoInfo ParseVideoInfoInternal(VideoInfo info, string probeOutput) private VideoInfo ParseVideoInfoInternal(VideoInfo info, string probeOutput)
{ {
@ -133,5 +208,21 @@ private VideoInfo ParseVideoInfoInternal(VideoInfo info, string probeOutput)
return info; return info;
} }
internal FFMpegStreamMetadata GetMetadata(string path)
{
var instance = new Instance(_ffprobePath, BuildFFProbeArguments(path)) { DataBufferCapacity = _outputCapacity };
instance.BlockUntilFinished();
var output = string.Join("", instance.OutputData);
return JsonConvert.DeserializeObject<FFMpegStreamMetadata>(output);
}
internal async Task<FFMpegStreamMetadata> GetMetadataAsync(string path)
{
var instance = new Instance(_ffprobePath, BuildFFProbeArguments(path)) { DataBufferCapacity = _outputCapacity };
await instance.FinishedRunning();
var output = string.Join("", instance.OutputData);
return JsonConvert.DeserializeObject<FFMpegStreamMetadata>(output);
}
} }
} }

View file

@ -0,0 +1,14 @@
using System;
using System.Collections.Generic;
using System.Text;
using System.Threading.Tasks;
namespace FFMpegCore.FFMPEG.Pipes
{
public interface IPipeDataReader
{
void ReadData(System.IO.Stream stream);
Task ReadDataAsync(System.IO.Stream stream);
string GetFormat();
}
}

View file

@ -0,0 +1,18 @@
using FFMpegCore.FFMPEG.Argument;
using System;
using System.Collections.Generic;
using System.Text;
using System.Threading.Tasks;
namespace FFMpegCore.FFMPEG.Pipes
{
/// <summary>
/// Interface for ffmpeg pipe source data IO
/// </summary>
public interface IPipeDataWriter
{
string GetFormat();
void WriteData(System.IO.Stream pipe);
Task WriteDataAsync(System.IO.Stream pipe);
}
}

View file

@ -0,0 +1,20 @@
using System;
using System.Collections.Generic;
using System.Text;
using System.Threading.Tasks;
namespace FFMpegCore.FFMPEG.Pipes
{
/// <summary>
/// Interface for Video frame
/// </summary>
public interface IVideoFrame
{
int Width { get; }
int Height { get; }
string Format { get; }
void Serialize(System.IO.Stream pipe);
Task SerializeAsync(System.IO.Stream pipe);
}
}

View file

@ -0,0 +1,16 @@
using System;
using System.Collections.Generic;
using System.Text;
namespace FFMpegCore.FFMPEG.Pipes
{
static class PipeHelpers
{
public static string GetUnqiuePipeName() => "FFMpegCore_Pipe_" + Guid.NewGuid();
public static string GetPipePath(string pipeName)
{
return $@"\\.\pipe\{pipeName}";
}
}
}

View file

@ -0,0 +1,85 @@
using FFMpegCore.FFMPEG.Argument;
using FFMpegCore.FFMPEG.Exceptions;
using System;
using System.Collections.Generic;
using System.Text;
using System.Threading.Tasks;
namespace FFMpegCore.FFMPEG.Pipes
{
/// <summary>
/// Implementation of <see cref="IPipeDataWriter"/> for a raw video stream that is gathered from <see cref="IEnumerator{IVideoFrame}"/>
/// </summary>
public class RawVideoPipeDataWriter : IPipeDataWriter
{
public string StreamFormat { get; private set; }
public int Width { get; private set; }
public int Height { get; private set; }
public int FrameRate { get; set; } = 25;
private bool formatInitialized = false;
private IEnumerator<IVideoFrame> framesEnumerator;
public RawVideoPipeDataWriter(IEnumerator<IVideoFrame> framesEnumerator)
{
this.framesEnumerator = framesEnumerator;
}
public RawVideoPipeDataWriter(IEnumerable<IVideoFrame> framesEnumerator) : this(framesEnumerator.GetEnumerator()) { }
public string GetFormat()
{
if (!formatInitialized)
{
//see input format references https://lists.ffmpeg.org/pipermail/ffmpeg-user/2012-July/007742.html
if (framesEnumerator.Current == null)
{
if (!framesEnumerator.MoveNext())
throw new InvalidOperationException("Enumerator is empty, unable to get frame");
}
StreamFormat = framesEnumerator.Current.Format;
Width = framesEnumerator.Current.Width;
Height = framesEnumerator.Current.Height;
formatInitialized = true;
}
return $"-f rawvideo -r {FrameRate} -pix_fmt {StreamFormat} -s {Width}x{Height}";
}
public void WriteData(System.IO.Stream stream)
{
if (framesEnumerator.Current != null)
{
CheckFrameAndThrow(framesEnumerator.Current);
framesEnumerator.Current.Serialize(stream);
}
while (framesEnumerator.MoveNext())
{
CheckFrameAndThrow(framesEnumerator.Current);
framesEnumerator.Current.Serialize(stream);
}
}
public async Task WriteDataAsync(System.IO.Stream stream)
{
if (framesEnumerator.Current != null)
{
await framesEnumerator.Current.SerializeAsync(stream);
}
while (framesEnumerator.MoveNext())
{
await framesEnumerator.Current.SerializeAsync(stream);
}
}
private void CheckFrameAndThrow(IVideoFrame frame)
{
if (frame.Width != Width || frame.Height != Height || frame.Format != StreamFormat)
throw new FFMpegException(FFMpegExceptionType.Operation, "Video frame is not the same format as created raw video stream\r\n" +
$"Frame format: {frame.Width}x{frame.Height} pix_fmt: {frame.Format}\r\n" +
$"Stream format: {Width}x{Height} pix_fmt: {StreamFormat}");
}
}
}

View file

@ -0,0 +1,30 @@
using System;
using System.Collections.Generic;
using System.Text;
using System.Threading.Tasks;
namespace FFMpegCore.FFMPEG.Pipes
{
public class StreamPipeDataReader : IPipeDataReader
{
public System.IO.Stream DestanationStream { get; private set; }
public int BlockSize { get; set; } = 4096;
public string Format { get; set; } = string.Empty;
public StreamPipeDataReader(System.IO.Stream destanationStream)
{
DestanationStream = destanationStream;
}
public void ReadData(System.IO.Stream stream) =>
stream.CopyTo(DestanationStream, BlockSize);
public Task ReadDataAsync(System.IO.Stream stream) =>
stream.CopyToAsync(DestanationStream, BlockSize);
public string GetFormat()
{
return Format;
}
}
}

View file

@ -0,0 +1,33 @@
using System;
using System.Collections.Generic;
using System.Text;
using System.Threading.Tasks;
namespace FFMpegCore.FFMPEG.Pipes
{
/// <summary>
/// Implementation of <see cref="IPipeDataWriter"/> used for stream redirection
/// </summary>
public class StreamPipeDataWriter : IPipeDataWriter
{
public System.IO.Stream Source { get; private set; }
public int BlockSize { get; set; } = 4096;
public string StreamFormat { get; set; } = string.Empty;
public StreamPipeDataWriter(System.IO.Stream stream)
{
Source = stream;
}
public void WriteData(System.IO.Stream pipe)=>
Source.CopyTo(pipe, BlockSize);
public Task WriteDataAsync(System.IO.Stream pipe) =>
Source.CopyToAsync(pipe, BlockSize);
public string GetFormat()
{
return StreamFormat;
}
}
}

View file

@ -1,4 +1,6 @@
using FFMpegCore.FFMPEG; using FFMpegCore.FFMPEG;
using FFMpegCore.FFMPEG.Argument;
using FFMpegCore.FFMPEG.Pipes;
using System; using System;
using System.IO; using System.IO;
@ -6,8 +8,13 @@ namespace FFMpegCore
{ {
public class VideoInfo public class VideoInfo
{ {
private const string NoVideoPlaceholder = "NULL";
private FileInfo _file; private FileInfo _file;
internal VideoInfo()
{
}
/// <summary> /// <summary>
/// Create a video information object from a file information object. /// Create a video information object from a file information object.
/// </summary> /// </summary>
@ -74,37 +81,37 @@ public VideoInfo(string path, int outputCapacity = int.MaxValue) : this(new File
/// <summary> /// <summary>
/// Gets the name of the file. /// Gets the name of the file.
/// </summary> /// </summary>
public string Name => _file.Name; public string Name => _file != null ? _file.Name : throw new FileNotFoundException();
/// <summary> /// <summary>
/// Gets the full path of the file. /// Gets the full path of the file.
/// </summary> /// </summary>
public string FullName => _file.FullName; public string FullName => _file != null ? _file.FullName : throw new FileNotFoundException();
/// <summary> /// <summary>
/// Gets the file extension. /// Gets the file extension.
/// </summary> /// </summary>
public string Extension => _file.Extension; public string Extension => _file != null ? _file.Extension : throw new FileNotFoundException();
/// <summary> /// <summary>
/// Gets a flag indicating if the file is read-only. /// Gets a flag indicating if the file is read-only.
/// </summary> /// </summary>
public bool IsReadOnly => _file.IsReadOnly; public bool IsReadOnly => _file != null ? _file.IsReadOnly : throw new FileNotFoundException();
/// <summary> /// <summary>
/// Gets a flag indicating if the file exists (no cache, per call verification). /// Gets a flag indicating if the file exists (no cache, per call verification).
/// </summary> /// </summary>
public bool Exists => File.Exists(FullName); public bool Exists => _file != null ? File.Exists(FullName) : false;
/// <summary> /// <summary>
/// Gets the creation date. /// Gets the creation date.
/// </summary> /// </summary>
public DateTime CreationTime => _file.CreationTime; public DateTime CreationTime => _file != null ? _file.CreationTime : throw new FileNotFoundException();
/// <summary> /// <summary>
/// Gets the parent directory information. /// Gets the parent directory information.
/// </summary> /// </summary>
public DirectoryInfo Directory => _file.Directory; public DirectoryInfo Directory => _file != null ? _file.Directory : throw new FileNotFoundException();
/// <summary> /// <summary>
/// Create a video information object from a file information object. /// Create a video information object from a file information object.
@ -126,16 +133,26 @@ public static VideoInfo FromPath(string path)
return new VideoInfo(path); return new VideoInfo(path);
} }
/// <summary>
/// Create a video information object from a encoded stream.
/// </summary>
/// <param name="stream">Encoded video stream.</param>
/// <returns></returns>
public static VideoInfo FromStream(System.IO.Stream stream)
{
return new FFProbe().ParseVideoInfo(stream);
}
/// <summary> /// <summary>
/// Pretty prints the video information. /// Pretty prints the video information.
/// </summary> /// </summary>
/// <returns></returns> /// <returns></returns>
public override string ToString() public override string ToString()
{ {
return "Video Path : " + FullName + Environment.NewLine + return "Video Path : " + (_file != null ? FullName : NoVideoPlaceholder) + Environment.NewLine +
"Video Root : " + Directory.FullName + Environment.NewLine + "Video Root : " + (_file != null ? Directory.FullName : NoVideoPlaceholder) + Environment.NewLine +
"Video Name: " + Name + Environment.NewLine + "Video Name: " + (_file != null ? Name : NoVideoPlaceholder) + Environment.NewLine +
"Video Extension : " + Extension + Environment.NewLine + "Video Extension : " + (_file != null ? Extension : NoVideoPlaceholder) + Environment.NewLine +
"Video Duration : " + Duration + Environment.NewLine + "Video Duration : " + Duration + Environment.NewLine +
"Audio Format : " + AudioFormat + Environment.NewLine + "Audio Format : " + AudioFormat + Environment.NewLine +
"Video Format : " + VideoFormat + Environment.NewLine + "Video Format : " + VideoFormat + Environment.NewLine +

View file

@ -381,6 +381,42 @@ public class OverrideArgument : Argument
} }
} }
``` ```
### Input piping
With input piping it is possible to write video frames directly from program memory without saving them to jpeg or png and then passing path to input of ffmpeg. This feature also allows us to convert video on-the-fly while frames are beeing generated/created/processed.
`IPipeSource` interface is used as source of data. It could be represented as encoded video stream or raw frames stream. Currently `IPipeSource` interface has single implementation, `RawVideoPipeSource` that is used for raw stream encoding.
For example:
Method that is generate bitmap frames:
```csharp
IEnumerable<IVideoFrame> CreateFrames(int count)
{
for(int i = 0; i < count; i++)
{
yield return GetNextFrame(); //method of generating new frames
}
}
```
Then create `ArgumentsContainer` with `InputPipeArgument`
```csharp
var videoFramesSource = new RawVideoPipeSource(CreateFrames(64)) //pass IEnumerable<IVideoFrame> or IEnumerator<IVideoFrame> to constructor of RawVideoPipeSource
{
FrameRate = 30 //set source frame rate
};
var container = new ArgumentsContainer
{
new InputPipeArgument(videoFramesSource),
... //Other encoding arguments
new OutputArgument("temporary.mp4")
};
var ffmpeg = new FFMpeg();
var result = ffmpeg.Convert(arguments);
```
if you want to use `System.Drawing.Bitmap` as `IVideoFrame`, there is `BitmapVideoFrameWrapper` wrapper class.
## Contributors ## Contributors
<a href="https://github.com/vladjerca"><img src="https://avatars.githubusercontent.com/u/6339681?v=4" title="vladjerca" width="80" height="80"></a> <a href="https://github.com/vladjerca"><img src="https://avatars.githubusercontent.com/u/6339681?v=4" title="vladjerca" width="80" height="80"></a>