diff --git a/.editorconfig b/.editorconfig
index 863b800..51e768e 100644
--- a/.editorconfig
+++ b/.editorconfig
@@ -33,6 +33,7 @@ csharp_style_prefer_switch_expression = false:warning
csharp_style_prefer_pattern_matching = false:warning
csharp_style_implicit_object_creation_when_type_is_apparent = false:warning
csharp_prefer_braces = when_multiline:warning
+csharp_style_namespace_declarations = file_scoped:warning
# Naming Rules
diff --git a/SeeShark.Example.Ascii/Program.cs b/SeeShark.Example.Ascii/Program.cs
index 9a6de10..1d68996 100644
--- a/SeeShark.Example.Ascii/Program.cs
+++ b/SeeShark.Example.Ascii/Program.cs
@@ -10,230 +10,229 @@
using SeeShark.Device;
using static SeeShark.FFmpeg.FFmpegManager;
-namespace SeeShark.Example.Ascii
+namespace SeeShark.Example.Ascii;
+
+class Program
{
- class Program
+ static Camera? karen;
+ static CameraManager? manager;
+ static FrameConverter? converter;
+
+ static void Main(string[] args)
{
- static Camera? karen;
- static CameraManager? manager;
- static FrameConverter? converter;
+ // Casually displaying "Oof :(" when exiting the program with force.
+ Console.CancelKeyPress += (object? _sender, ConsoleCancelEventArgs e) =>
+ {
+ Console.Error.WriteLine("\n\n");
+ Console.ForegroundColor = ConsoleColor.Red;
+ Console.Error.WriteLine("Oof :(");
+ Console.ResetColor();
+ Dispose();
+ };
+
+ // You can add your own path for FFmpeg libraries here!
+ SetupFFmpeg(
+ FFmpeg.FFmpegLogLevel.Info,
+ ConsoleColor.Yellow,
+ AppDomain.CurrentDomain.BaseDirectory,
+ "/usr/lib",
+ "/usr/lib64"
+ );
+
+ Console.WriteLine($"Current directory: {Environment.CurrentDirectory}");
+ Console.WriteLine("Running in {0}-bit mode.", Environment.Is64BitProcess ? "64" : "32");
+ Console.WriteLine($"FFmpeg version info: {FFmpegVersion}");
+
+ manager = new CameraManager();
- static void Main(string[] args)
+ CameraInfo device;
+ if (args.Length < 1)
{
- // Casually displaying "Oof :(" when exiting the program with force.
- Console.CancelKeyPress += (object? _sender, ConsoleCancelEventArgs e) =>
+ /// Select an available camera device.
+ /// only gets filled when the camera manager is instanciated,
+ /// since it is not watching devices by default.
+ while (true)
{
- Console.Error.WriteLine("\n\n");
- Console.ForegroundColor = ConsoleColor.Red;
- Console.Error.WriteLine("Oof :(");
- Console.ResetColor();
- Dispose();
- };
-
- // You can add your own path for FFmpeg libraries here!
- SetupFFmpeg(
- FFmpeg.FFmpegLogLevel.Info,
- ConsoleColor.Yellow,
- AppDomain.CurrentDomain.BaseDirectory,
- "/usr/lib",
- "/usr/lib64"
- );
-
- Console.WriteLine($"Current directory: {Environment.CurrentDirectory}");
- Console.WriteLine("Running in {0}-bit mode.", Environment.Is64BitProcess ? "64" : "32");
- Console.WriteLine($"FFmpeg version info: {FFmpegVersion}");
-
- manager = new CameraManager();
-
- CameraInfo device;
- if (args.Length < 1)
- {
- /// Select an available camera device.
- /// only gets filled when the camera manager is instanciated,
- /// since it is not watching devices by default.
- while (true)
+ Console.WriteLine("\nDevices available:");
+ for (int i = 0; i < manager.Devices.Count; i++)
+ Console.WriteLine($"| #{i}: {manager.Devices[i]}");
+
+ Console.Write("\nChoose a camera by index: ");
+ Console.Out.Flush();
+ if (int.TryParse(Console.ReadLine(), out int index) && index < manager.Devices.Count && index >= 0)
{
- Console.WriteLine("\nDevices available:");
- for (int i = 0; i < manager.Devices.Count; i++)
- Console.WriteLine($"| #{i}: {manager.Devices[i]}");
-
- Console.Write("\nChoose a camera by index: ");
- Console.Out.Flush();
- if (int.TryParse(Console.ReadLine(), out int index) && index < manager.Devices.Count && index >= 0)
- {
- device = manager.Devices[index];
- break;
- }
+ device = manager.Devices[index];
+ break;
}
}
- else
- {
- device = manager.Devices.First((ci) => ci.Path == args[0]);
- }
+ }
+ else
+ {
+ device = manager.Devices.First((ci) => ci.Path == args[0]);
+ }
- /// Select video input options for the given device path.
- VideoInputOptions? vios = null;
- if (device.AvailableVideoInputOptions != null)
+ /// Select video input options for the given device path.
+ VideoInputOptions? vios = null;
+ if (device.AvailableVideoInputOptions != null)
+ {
+ while (true)
{
- while (true)
+ Console.WriteLine("\nVideo input options available:");
+ for (int i = 0; i < device.AvailableVideoInputOptions.Length; i++)
+ Console.WriteLine($"| #{i}: {device.AvailableVideoInputOptions[i]}");
+
+ Console.Write("\nChoose an input option by index: ");
+ Console.Out.Flush();
+ if (int.TryParse(Console.ReadLine(), out int index) && index < device.AvailableVideoInputOptions.Length && index >= 0)
{
- Console.WriteLine("\nVideo input options available:");
- for (int i = 0; i < device.AvailableVideoInputOptions.Length; i++)
- Console.WriteLine($"| #{i}: {device.AvailableVideoInputOptions[i]}");
-
- Console.Write("\nChoose an input option by index: ");
- Console.Out.Flush();
- if (int.TryParse(Console.ReadLine(), out int index) && index < device.AvailableVideoInputOptions.Length && index >= 0)
- {
- vios = device.AvailableVideoInputOptions[index];
- break;
- }
+ vios = device.AvailableVideoInputOptions[index];
+ break;
}
}
+ }
- /// You can get a from either a string
- /// representing the device path, or a .
+ /// You can get a from either a string
+ /// representing the device path, or a .
- // Unfortunately, she saw the manager
- karen = manager.GetDevice(device, vios);
+ // Unfortunately, she saw the manager
+ karen = manager.GetDevice(device, vios);
- /// Attach our method to the camera's frame event handler,
- /// so that we can process every coming frame the way we want.
- karen.OnFrame += OnFrameEventHandler;
+ /// Attach our method to the camera's frame event handler,
+ /// so that we can process every coming frame the way we want.
+ karen.OnFrame += OnFrameEventHandler;
- Console.WriteLine($"Camera chosen: {karen.Info}");
- Console.WriteLine("Press Space or P to play/pause the camera.");
- Console.WriteLine("Press Enter or Q or Escape to exit the program.");
+ Console.WriteLine($"Camera chosen: {karen.Info}");
+ Console.WriteLine("Press Space or P to play/pause the camera.");
+ Console.WriteLine("Press Enter or Q or Escape to exit the program.");
- // I could have written a simple `while (true)`, but then I used a `switch` statement.
- // If only C# had labelled loops like Rust :(
- for (var loop = true; loop;)
+ // I could have written a simple `while (true)`, but then I used a `switch` statement.
+ // If only C# had labelled loops like Rust :(
+ for (var loop = true; loop;)
+ {
+ Console.WriteLine("\x1b[2K\rCamera is {0}", karen.IsPlaying ? "Playing" : "Paused");
+ var cki = Console.ReadKey(true);
+ switch (cki.Key)
{
- Console.WriteLine("\x1b[2K\rCamera is {0}", karen.IsPlaying ? "Playing" : "Paused");
- var cki = Console.ReadKey(true);
- switch (cki.Key)
- {
- case ConsoleKey.P:
- case ConsoleKey.Spacebar:
- if (karen.IsPlaying)
- karen.StopCapture();
- else
- karen.StartCapture();
- Console.CursorVisible = !karen.IsPlaying;
- break;
-
- case ConsoleKey.Q:
- case ConsoleKey.Enter:
- case ConsoleKey.Escape:
- Console.CursorVisible = true;
- loop = false;
- break;
- }
+ case ConsoleKey.P:
+ case ConsoleKey.Spacebar:
+ if (karen.IsPlaying)
+ karen.StopCapture();
+ else
+ karen.StartCapture();
+ Console.CursorVisible = !karen.IsPlaying;
+ break;
+
+ case ConsoleKey.Q:
+ case ConsoleKey.Enter:
+ case ConsoleKey.Escape:
+ Console.CursorVisible = true;
+ loop = false;
+ break;
}
-
- Dispose();
-
- // Unless you filmed a shark with your camera, no.
- Console.WriteLine("\n\nDid you SeeShark? :)");
}
- static long frameCount = 0;
-
- ///
- /// Our custom frame event callback.
- /// Each time it is triggered, it will draw a new ASCII frame on the screen
- /// and update the terminal window title.
- ///
- public static void OnFrameEventHandler(object? _sender, FrameEventArgs e)
- {
- // Don't redraw the frame if it's not new, unless it's resized.
- if (e.Status != DecodeStatus.NewFrame)
- return;
+ Dispose();
- var frame = e.Frame;
- if (converter == null || Console.WindowWidth != converter.SrcWidth ||
- Console.WindowHeight != converter.SrcHeight)
- {
- // We can't just override the FrameConverter's DstWidth and DstHeight, due to how FFmpeg works.
- // We have to dispose the previous one and instanciate a new one with the new window size.
- converter?.Dispose();
- converter = new FrameConverter(frame, Console.WindowWidth, Console.WindowHeight, PixelFormat.Gray8);
- }
+ // Unless you filmed a shark with your camera, no.
+ Console.WriteLine("\n\nDid you SeeShark? :)");
+ }
- // Resize the frame to the size of the terminal window, then draw it in ASCII.
- Frame cFrame = converter.Convert(frame);
- DrawAsciiFrame(cFrame);
- DisplayTitle(frameCount, cFrame.Width, cFrame.Height);
+ static long frameCount = 0;
- frameCount++;
- }
+ ///
+ /// Our custom frame event callback.
+ /// Each time it is triggered, it will draw a new ASCII frame on the screen
+ /// and update the terminal window title.
+ ///
+ public static void OnFrameEventHandler(object? _sender, FrameEventArgs e)
+ {
+ // Don't redraw the frame if it's not new, unless it's resized.
+ if (e.Status != DecodeStatus.NewFrame)
+ return;
- ///
- /// Dispose our objects.
- ///
- public static void Dispose()
+ var frame = e.Frame;
+ if (converter == null || Console.WindowWidth != converter.SrcWidth ||
+ Console.WindowHeight != converter.SrcHeight)
{
- karen?.StopCapture();
- karen?.Dispose();
- manager?.Dispose();
+ // We can't just override the FrameConverter's DstWidth and DstHeight, due to how FFmpeg works.
+ // We have to dispose the previous one and instanciate a new one with the new window size.
converter?.Dispose();
+ converter = new FrameConverter(frame, Console.WindowWidth, Console.WindowHeight, PixelFormat.Gray8);
}
- static readonly StringBuilder builder = new StringBuilder();
- static readonly char[] asciiPixels = " `'.,-~:;<>\"^=+*!?|\\/(){}[]#&$@".ToCharArray();
-
- ///
- /// Draw a frame in ASCII art.
- ///
- ///
- /// In this particular example we know that the frame has the Gray8 pixel format
- /// and that it has been resized to have the exact size of the terminal window.
- ///
- /// Frame containing raw Gray8 pixel data.
- public static void DrawAsciiFrame(Frame frame)
+ // Resize the frame to the size of the terminal window, then draw it in ASCII.
+ Frame cFrame = converter.Convert(frame);
+ DrawAsciiFrame(cFrame);
+ DisplayTitle(frameCount, cFrame.Width, cFrame.Height);
+
+ frameCount++;
+ }
+
+ ///
+ /// Dispose our objects.
+ ///
+ public static void Dispose()
+ {
+ karen?.StopCapture();
+ karen?.Dispose();
+ manager?.Dispose();
+ converter?.Dispose();
+ }
+
+ static readonly StringBuilder builder = new StringBuilder();
+ static readonly char[] asciiPixels = " `'.,-~:;<>\"^=+*!?|\\/(){}[]#&$@".ToCharArray();
+
+ ///
+ /// Draw a frame in ASCII art.
+ ///
+ ///
+ /// In this particular example we know that the frame has the Gray8 pixel format
+ /// and that it has been resized to have the exact size of the terminal window.
+ ///
+ /// Frame containing raw Gray8 pixel data.
+ public static void DrawAsciiFrame(Frame frame)
+ {
+ // We don't call Console.Clear() here because it actually adds stutter.
+ // Go ahead and try this example in Alacritty to see how smooth it is!
+ builder.Clear();
+ Console.SetCursorPosition(0, 0);
+ int length = frame.Width * frame.Height;
+
+ // Since we know that the frame has the exact size of the terminal window,
+ // we have no need to add any newline characters. Thus we can just go through
+ // the entire byte array to build the ASCII converted string.
+ for (int i = 0; i < length; i++)
+ builder.Append(asciiPixels[RangeMap(frame.RawData[i], 0, 255, 0, asciiPixels.Length - 1)]);
+
+ Console.Write(builder.ToString());
+ Console.Out.Flush();
+ }
+
+ ///
+ /// Stopwatch used to measure the FPS.
+ ///
+ static readonly Stopwatch watch = new Stopwatch();
+
+ ///
+ /// Updates the title of the terminal window to display width, height and FPS information.
+ ///
+ /// Number of frames decoded so far.
+ /// Current terminal window width.
+ /// Current terminal window height.
+ public static void DisplayTitle(long frameCount, int width, int height)
+ {
+ if (frameCount == 0)
{
- // We don't call Console.Clear() here because it actually adds stutter.
- // Go ahead and try this example in Alacritty to see how smooth it is!
- builder.Clear();
- Console.SetCursorPosition(0, 0);
- int length = frame.Width * frame.Height;
-
- // Since we know that the frame has the exact size of the terminal window,
- // we have no need to add any newline characters. Thus we can just go through
- // the entire byte array to build the ASCII converted string.
- for (int i = 0; i < length; i++)
- builder.Append(asciiPixels[RangeMap(frame.RawData[i], 0, 255, 0, asciiPixels.Length - 1)]);
-
- Console.Write(builder.ToString());
- Console.Out.Flush();
+ watch.Start();
}
-
- ///
- /// Stopwatch used to measure the FPS.
- ///
- static readonly Stopwatch watch = new Stopwatch();
-
- ///
- /// Updates the title of the terminal window to display width, height and FPS information.
- ///
- /// Number of frames decoded so far.
- /// Current terminal window width.
- /// Current terminal window height.
- public static void DisplayTitle(long frameCount, int width, int height)
+ else if (frameCount % 10 == 0)
{
- if (frameCount == 0)
- {
- watch.Start();
- }
- else if (frameCount % 10 == 0)
- {
- var fps = 1000f / watch.ElapsedMilliseconds;
- Console.Title = $"{width}x{height}@{fps:#.##}fps";
- watch.Restart();
- }
+ var fps = 1000f / watch.ElapsedMilliseconds;
+ Console.Title = $"{width}x{height}@{fps:#.##}fps";
+ watch.Restart();
}
-
- public static int RangeMap(int x, int in_min, int in_max, int out_min, int out_max)
- => (x - in_min) * (out_max - out_min) / (in_max - in_min) + out_min;
}
+
+ public static int RangeMap(int x, int in_min, int in_max, int out_min, int out_max)
+ => (x - in_min) * (out_max - out_min) / (in_max - in_min) + out_min;
}
diff --git a/SeeShark.Example.Stats/Program.cs b/SeeShark.Example.Stats/Program.cs
index 0584ea9..9bd0c00 100644
--- a/SeeShark.Example.Stats/Program.cs
+++ b/SeeShark.Example.Stats/Program.cs
@@ -8,155 +8,154 @@
using SeeShark.Device;
using static SeeShark.FFmpeg.FFmpegManager;
-namespace SeeShark.Example.Stats
+namespace SeeShark.Example.Stats;
+
+class Program
{
- class Program
+ static Camera? karen;
+ static CameraManager? manager;
+
+ static void Main(string[] args)
{
- static Camera? karen;
- static CameraManager? manager;
+ // Casually displaying "Oof :(" when exiting the program with force.
+ Console.CancelKeyPress += (object? _sender, ConsoleCancelEventArgs e) =>
+ {
+ Console.Error.WriteLine("\n\n");
+ Console.ForegroundColor = ConsoleColor.Red;
+ Console.Error.WriteLine("Oof :(");
+ Console.ResetColor();
+ Dispose();
+ };
+
+ // You can add your own path for FFmpeg libraries here!
+ SetupFFmpeg(
+ AppDomain.CurrentDomain.BaseDirectory,
+ "/usr/lib",
+ "/usr/lib64"
+ );
- static void Main(string[] args)
+ manager = new CameraManager();
+
+ string devicePath;
+ if (args.Length < 1)
{
- // Casually displaying "Oof :(" when exiting the program with force.
- Console.CancelKeyPress += (object? _sender, ConsoleCancelEventArgs e) =>
- {
- Console.Error.WriteLine("\n\n");
- Console.ForegroundColor = ConsoleColor.Red;
- Console.Error.WriteLine("Oof :(");
- Console.ResetColor();
- Dispose();
- };
-
- // You can add your own path for FFmpeg libraries here!
- SetupFFmpeg(
- AppDomain.CurrentDomain.BaseDirectory,
- "/usr/lib",
- "/usr/lib64"
- );
-
- manager = new CameraManager();
-
- string devicePath;
- if (args.Length < 1)
+ /// Select an available camera device.
+ /// only gets filled when the camera manager is instanciated,
+ /// since it is not watching devices by default.
+ while (true)
{
- /// Select an available camera device.
- /// only gets filled when the camera manager is instanciated,
- /// since it is not watching devices by default.
- while (true)
+ Console.WriteLine("\nDevices available:");
+ for (int i = 0; i < manager.Devices.Count; i++)
+ Console.WriteLine($"| #{i}: {manager.Devices[i]}");
+
+ Console.Write("\nChoose a camera by index: ");
+ Console.Out.Flush();
+ if (int.TryParse(Console.ReadLine(), out int index) && index < manager.Devices.Count && index >= 0)
{
- Console.WriteLine("\nDevices available:");
- for (int i = 0; i < manager.Devices.Count; i++)
- Console.WriteLine($"| #{i}: {manager.Devices[i]}");
-
- Console.Write("\nChoose a camera by index: ");
- Console.Out.Flush();
- if (int.TryParse(Console.ReadLine(), out int index) && index < manager.Devices.Count && index >= 0)
- {
- devicePath = manager.Devices[index].Path;
- break;
- }
+ devicePath = manager.Devices[index].Path;
+ break;
}
}
- else
- {
- devicePath = args[0];
- }
+ }
+ else
+ {
+ devicePath = args[0];
+ }
- /// You can get a from either a string
- /// representing the device path, or a .
+ /// You can get a from either a string
+ /// representing the device path, or a .
- // Unfortunately, she saw the manager
- karen = manager.GetDevice(devicePath);
+ // Unfortunately, she saw the manager
+ karen = manager.GetDevice(devicePath);
- /// Attach our method to the camera's frame event handler,
- /// so that we can process every coming frame the way we want.
- karen.OnFrame += OnFrameEventHandler;
+ /// Attach our method to the camera's frame event handler,
+ /// so that we can process every coming frame the way we want.
+ karen.OnFrame += OnFrameEventHandler;
- Console.WriteLine($"Camera chosen: {karen.Info}");
- Console.WriteLine("Press Space or P to play/pause the camera.");
- Console.WriteLine("Press Enter or Q or Escape to exit the program.");
+ Console.WriteLine($"Camera chosen: {karen.Info}");
+ Console.WriteLine("Press Space or P to play/pause the camera.");
+ Console.WriteLine("Press Enter or Q or Escape to exit the program.");
- // I could have written a simple `while (true)`, but then I used a `switch` statement.
- // If only C# had labelled loops like Rust :(
- for (var loop = true; loop;)
+ // I could have written a simple `while (true)`, but then I used a `switch` statement.
+ // If only C# had labelled loops like Rust :(
+ for (var loop = true; loop;)
+ {
+ Console.WriteLine("\x1b[2K\rCamera is {0}", karen.IsPlaying ? "Playing" : "Paused");
+ var cki = Console.ReadKey(true);
+ switch (cki.Key)
{
- Console.WriteLine("\x1b[2K\rCamera is {0}", karen.IsPlaying ? "Playing" : "Paused");
- var cki = Console.ReadKey(true);
- switch (cki.Key)
- {
- case ConsoleKey.P:
- case ConsoleKey.Spacebar:
- if (karen.IsPlaying)
- karen.StopCapture();
- else
- karen.StartCapture();
- Console.CursorVisible = !karen.IsPlaying;
- break;
-
- case ConsoleKey.Q:
- case ConsoleKey.Enter:
- case ConsoleKey.Escape:
- Console.CursorVisible = true;
- loop = false;
- break;
- }
+ case ConsoleKey.P:
+ case ConsoleKey.Spacebar:
+ if (karen.IsPlaying)
+ karen.StopCapture();
+ else
+ karen.StartCapture();
+ Console.CursorVisible = !karen.IsPlaying;
+ break;
+
+ case ConsoleKey.Q:
+ case ConsoleKey.Enter:
+ case ConsoleKey.Escape:
+ Console.CursorVisible = true;
+ loop = false;
+ break;
}
-
- Dispose();
-
- // Unless you filmed a shark with your camera, no.
- Console.WriteLine("\n\nDid you SeeShark? :)");
}
- static long frameCount = 0;
- static double fps = 0;
- static double minFps = double.PositiveInfinity;
- static double maxFps = double.NegativeInfinity;
+ Dispose();
+ // Unless you filmed a shark with your camera, no.
+ Console.WriteLine("\n\nDid you SeeShark? :)");
+ }
- ///
- /// Stopwatch used to measure the FPS.
- ///
- static readonly Stopwatch watch = new Stopwatch();
+ static long frameCount = 0;
+ static double fps = 0;
+ static double minFps = double.PositiveInfinity;
+ static double maxFps = double.NegativeInfinity;
- ///
- /// Our custom frame event callback.
- /// Each time it is triggered, it will display some simple stats in the console.
- ///
- public static void OnFrameEventHandler(object? _sender, FrameEventArgs e)
- {
- // Don't redraw the frame if it's not new.
- if (e.Status != DecodeStatus.NewFrame)
- return;
- var frame = e.Frame;
+ ///
+ /// Stopwatch used to measure the FPS.
+ ///
+ static readonly Stopwatch watch = new Stopwatch();
- #region Stats
- if (frameCount == 0)
- {
- watch.Start();
- }
- else
- {
- fps = TimeSpan.TicksPerSecond * 100 / (double)watch.ElapsedTicks;
- minFps = fps < minFps ? fps : minFps;
- maxFps = fps > maxFps ? fps : maxFps;
- Console.WriteLine($"\x1b[2K\r{frame.Width}x{frame.Height} @ {fps:#.##} fps [{minFps} - {maxFps}]");
- watch.Restart();
- }
- #endregion
+ ///
+ /// Our custom frame event callback.
+ /// Each time it is triggered, it will display some simple stats in the console.
+ ///
+ public static void OnFrameEventHandler(object? _sender, FrameEventArgs e)
+ {
+ // Don't redraw the frame if it's not new.
+ if (e.Status != DecodeStatus.NewFrame)
+ return;
- frameCount++;
- }
+ var frame = e.Frame;
- ///
- /// Dispose our objects.
- ///
- public static void Dispose()
+ #region Stats
+ if (frameCount == 0)
+ {
+ watch.Start();
+ }
+ else
{
- karen?.StopCapture();
- karen?.Dispose();
- manager?.Dispose();
+ fps = TimeSpan.TicksPerSecond * 100 / (double)watch.ElapsedTicks;
+ minFps = fps < minFps ? fps : minFps;
+ maxFps = fps > maxFps ? fps : maxFps;
+ Console.WriteLine($"\x1b[2K\r{frame.Width}x{frame.Height} @ {fps:#.##} fps [{minFps} - {maxFps}]");
+ watch.Restart();
}
+ #endregion
+
+ frameCount++;
+ }
+
+ ///
+ /// Dispose our objects.
+ ///
+ public static void Dispose()
+ {
+ karen?.StopCapture();
+ karen?.Dispose();
+ manager?.Dispose();
}
}
diff --git a/SeeShark/Decode/DecodeStatus.cs b/SeeShark/Decode/DecodeStatus.cs
index a0a5166..5ce3d0e 100644
--- a/SeeShark/Decode/DecodeStatus.cs
+++ b/SeeShark/Decode/DecodeStatus.cs
@@ -2,27 +2,26 @@
// This file is part of SeeShark.
// SeeShark is licensed under the BSD 3-Clause License. See LICENSE for details.
-namespace SeeShark.Decode
+namespace SeeShark.Decode;
+
+///
+/// Describes the decoding status of a given
+/// after calling its method.
+///
+public enum DecodeStatus
{
///
- /// Describes the decoding status of a given
- /// after calling its method.
+ /// A new frame has been returned.
///
- public enum DecodeStatus
- {
- ///
- /// A new frame has been returned.
- ///
- NewFrame,
- ///
- /// No new frame is available at the moment.
- /// The given is expected to try decoding a new frame again.
- ///
- NoFrameAvailable,
- ///
- /// Decoder reached the end of the stream.
- /// The given is expected to stop decoding.
- ///
- EndOfStream,
- }
+ NewFrame,
+ ///
+ /// No new frame is available at the moment.
+ /// The given is expected to try decoding a new frame again.
+ ///
+ NoFrameAvailable,
+ ///
+ /// Decoder reached the end of the stream.
+ /// The given is expected to stop decoding.
+ ///
+ EndOfStream,
}
diff --git a/SeeShark/Decode/HardwareAccelDevice.cs b/SeeShark/Decode/HardwareAccelDevice.cs
index cd95ded..c1ebc9a 100644
--- a/SeeShark/Decode/HardwareAccelDevice.cs
+++ b/SeeShark/Decode/HardwareAccelDevice.cs
@@ -4,43 +4,42 @@
using FFmpeg.AutoGen;
-namespace SeeShark.Decode
+namespace SeeShark.Decode;
+
+public enum HardwareAccelDevice : int
{
- public enum HardwareAccelDevice : int
- {
- None = AVHWDeviceType.AV_HWDEVICE_TYPE_NONE,
- Vdpau = AVHWDeviceType.AV_HWDEVICE_TYPE_VDPAU,
- Cuda = AVHWDeviceType.AV_HWDEVICE_TYPE_CUDA,
- Vaapi = AVHWDeviceType.AV_HWDEVICE_TYPE_VAAPI,
- Dxva2 = AVHWDeviceType.AV_HWDEVICE_TYPE_DXVA2,
- Qsv = AVHWDeviceType.AV_HWDEVICE_TYPE_QSV,
- Videotoolbox = AVHWDeviceType.AV_HWDEVICE_TYPE_VIDEOTOOLBOX,
- D3D11Va = AVHWDeviceType.AV_HWDEVICE_TYPE_D3D11VA,
- Drm = AVHWDeviceType.AV_HWDEVICE_TYPE_DRM,
- Opencl = AVHWDeviceType.AV_HWDEVICE_TYPE_OPENCL,
- Mediacodec = AVHWDeviceType.AV_HWDEVICE_TYPE_MEDIACODEC,
- Vulkan = AVHWDeviceType.AV_HWDEVICE_TYPE_VULKAN
- }
+ None = AVHWDeviceType.AV_HWDEVICE_TYPE_NONE,
+ Vdpau = AVHWDeviceType.AV_HWDEVICE_TYPE_VDPAU,
+ Cuda = AVHWDeviceType.AV_HWDEVICE_TYPE_CUDA,
+ Vaapi = AVHWDeviceType.AV_HWDEVICE_TYPE_VAAPI,
+ Dxva2 = AVHWDeviceType.AV_HWDEVICE_TYPE_DXVA2,
+ Qsv = AVHWDeviceType.AV_HWDEVICE_TYPE_QSV,
+ Videotoolbox = AVHWDeviceType.AV_HWDEVICE_TYPE_VIDEOTOOLBOX,
+ D3D11Va = AVHWDeviceType.AV_HWDEVICE_TYPE_D3D11VA,
+ Drm = AVHWDeviceType.AV_HWDEVICE_TYPE_DRM,
+ Opencl = AVHWDeviceType.AV_HWDEVICE_TYPE_OPENCL,
+ Mediacodec = AVHWDeviceType.AV_HWDEVICE_TYPE_MEDIACODEC,
+ Vulkan = AVHWDeviceType.AV_HWDEVICE_TYPE_VULKAN
+}
- public static class HardwareAccelDeviceExtension
+public static class HardwareAccelDeviceExtension
+{
+ public static PixelFormat ToPixelFormat(this HardwareAccelDevice hwAccelDevice)
{
- public static PixelFormat ToPixelFormat(this HardwareAccelDevice hwAccelDevice)
+ return hwAccelDevice switch
{
- return hwAccelDevice switch
- {
- HardwareAccelDevice.Vdpau => PixelFormat.Vdpau,
- HardwareAccelDevice.Cuda => PixelFormat.Cuda,
- HardwareAccelDevice.Vaapi => PixelFormat.Vaapi,
- HardwareAccelDevice.Dxva2 => PixelFormat.Dxva2Vld,
- HardwareAccelDevice.Qsv => PixelFormat.Qsv,
- HardwareAccelDevice.Videotoolbox => PixelFormat.Videotoolbox,
- HardwareAccelDevice.D3D11Va => PixelFormat.D3D11VaVld,
- HardwareAccelDevice.Drm => PixelFormat.DrmPrime,
- HardwareAccelDevice.Opencl => PixelFormat.Opencl,
- HardwareAccelDevice.Mediacodec => PixelFormat.Mediacodec,
- HardwareAccelDevice.Vulkan => PixelFormat.Vulkan,
- _ => PixelFormat.None
- };
- }
+ HardwareAccelDevice.Vdpau => PixelFormat.Vdpau,
+ HardwareAccelDevice.Cuda => PixelFormat.Cuda,
+ HardwareAccelDevice.Vaapi => PixelFormat.Vaapi,
+ HardwareAccelDevice.Dxva2 => PixelFormat.Dxva2Vld,
+ HardwareAccelDevice.Qsv => PixelFormat.Qsv,
+ HardwareAccelDevice.Videotoolbox => PixelFormat.Videotoolbox,
+ HardwareAccelDevice.D3D11Va => PixelFormat.D3D11VaVld,
+ HardwareAccelDevice.Drm => PixelFormat.DrmPrime,
+ HardwareAccelDevice.Opencl => PixelFormat.Opencl,
+ HardwareAccelDevice.Mediacodec => PixelFormat.Mediacodec,
+ HardwareAccelDevice.Vulkan => PixelFormat.Vulkan,
+ _ => PixelFormat.None
+ };
}
}
diff --git a/SeeShark/Decode/HardwareAccelStreamDecoder.cs b/SeeShark/Decode/HardwareAccelStreamDecoder.cs
index 83213b7..a6c2bf2 100644
--- a/SeeShark/Decode/HardwareAccelStreamDecoder.cs
+++ b/SeeShark/Decode/HardwareAccelStreamDecoder.cs
@@ -5,44 +5,43 @@
using FFmpeg.AutoGen;
using SeeShark.FFmpeg;
-namespace SeeShark.Decode
+namespace SeeShark.Decode;
+
+///
+/// Decodes a video stream using hardware acceleration.
+/// This may not be needed at all for the library, we will have to investigate that later.
+///
+public unsafe class HardwareAccelVideoStreamDecoder : VideoStreamDecoder
{
- ///
- /// Decodes a video stream using hardware acceleration.
- /// This may not be needed at all for the library, we will have to investigate that later.
- ///
- public unsafe class HardwareAccelVideoStreamDecoder : VideoStreamDecoder
- {
- protected readonly Frame HwFrame;
+ protected readonly Frame HwFrame;
- public HardwareAccelVideoStreamDecoder(string url,
- HardwareAccelDevice hwAccelDevice, AVInputFormat* inputFormat = null)
- : base(url, inputFormat)
- {
- HwFrame = new Frame();
+ public HardwareAccelVideoStreamDecoder(string url,
+ HardwareAccelDevice hwAccelDevice, AVInputFormat* inputFormat = null)
+ : base(url, inputFormat)
+ {
+ HwFrame = new Frame();
- ffmpeg.av_hwdevice_ctx_create(
- &CodecContext->hw_device_ctx,
- (AVHWDeviceType)hwAccelDevice,
- null, null, 0
- ).ThrowExceptionIfError();
- }
+ ffmpeg.av_hwdevice_ctx_create(
+ &CodecContext->hw_device_ctx,
+ (AVHWDeviceType)hwAccelDevice,
+ null, null, 0
+ ).ThrowExceptionIfError();
+ }
- public new DecodeStatus TryDecodeNextFrame(out Frame nextFrame)
- {
- DecodeStatus ret = base.TryDecodeNextFrame(out var frame);
+ public new DecodeStatus TryDecodeNextFrame(out Frame nextFrame)
+ {
+ DecodeStatus ret = base.TryDecodeNextFrame(out var frame);
- frame.HardwareAccelCopyTo(HwFrame).ThrowExceptionIfError();
- nextFrame = HwFrame;
+ frame.HardwareAccelCopyTo(HwFrame).ThrowExceptionIfError();
+ nextFrame = HwFrame;
- return ret;
- }
+ return ret;
+ }
- protected override void DisposeManaged()
- {
- base.DisposeManaged();
+ protected override void DisposeManaged()
+ {
+ base.DisposeManaged();
- HwFrame.Dispose();
- }
+ HwFrame.Dispose();
}
}
diff --git a/SeeShark/Decode/VideoStreamDecoder.cs b/SeeShark/Decode/VideoStreamDecoder.cs
index 6497b78..0b1a950 100644
--- a/SeeShark/Decode/VideoStreamDecoder.cs
+++ b/SeeShark/Decode/VideoStreamDecoder.cs
@@ -11,161 +11,160 @@
using SeeShark.FFmpeg;
using static SeeShark.FFmpeg.FFmpegManager;
-namespace SeeShark.Decode
+namespace SeeShark.Decode;
+
+///
+/// Decodes a video stream.
+/// Based on https://github.com/Ruslan-B/FFmpeg.AutoGen/blob/master/FFmpeg.AutoGen.Example/VideoStreamDecoder.cs.
+///
+public unsafe class VideoStreamDecoder : Disposable
{
- ///
- /// Decodes a video stream.
- /// Based on https://github.com/Ruslan-B/FFmpeg.AutoGen/blob/master/FFmpeg.AutoGen.Example/VideoStreamDecoder.cs.
- ///
- public unsafe class VideoStreamDecoder : Disposable
+ protected readonly AVCodecContext* CodecContext;
+ protected readonly AVFormatContext* FormatContext;
+ protected readonly Frame Frame;
+ protected readonly AVPacket* Packet;
+ protected readonly AVStream* Stream;
+ protected readonly int StreamIndex;
+
+ public readonly string CodecName;
+ public readonly int FrameWidth;
+ public readonly int FrameHeight;
+ public readonly PixelFormat PixelFormat;
+ public AVRational Framerate => Stream->r_frame_rate;
+
+ private bool isFormatContextOpen = false;
+
+ public VideoStreamDecoder(string url, DeviceInputFormat inputFormat, IDictionary? options = null)
+ : this(url, ffmpeg.av_find_input_format(inputFormat.ToString()), options)
{
- protected readonly AVCodecContext* CodecContext;
- protected readonly AVFormatContext* FormatContext;
- protected readonly Frame Frame;
- protected readonly AVPacket* Packet;
- protected readonly AVStream* Stream;
- protected readonly int StreamIndex;
-
- public readonly string CodecName;
- public readonly int FrameWidth;
- public readonly int FrameHeight;
- public readonly PixelFormat PixelFormat;
- public AVRational Framerate => Stream->r_frame_rate;
-
- private bool isFormatContextOpen = false;
-
- public VideoStreamDecoder(string url, DeviceInputFormat inputFormat, IDictionary? options = null)
- : this(url, ffmpeg.av_find_input_format(inputFormat.ToString()), options)
- {
- }
-
- public VideoStreamDecoder(string url, AVInputFormat* inputFormat = null, IDictionary? options = null)
- {
- SetupFFmpeg();
+ }
- FormatContext = ffmpeg.avformat_alloc_context();
- FormatContext->flags = ffmpeg.AVFMT_FLAG_NONBLOCK;
+ public VideoStreamDecoder(string url, AVInputFormat* inputFormat = null, IDictionary? options = null)
+ {
+ SetupFFmpeg();
- var formatContext = FormatContext;
- AVDictionary* dict = null;
+ FormatContext = ffmpeg.avformat_alloc_context();
+ FormatContext->flags = ffmpeg.AVFMT_FLAG_NONBLOCK;
- if (options != null)
- {
- foreach (KeyValuePair pair in options)
- ffmpeg.av_dict_set(&dict, pair.Key, pair.Value, 0);
- }
+ var formatContext = FormatContext;
+ AVDictionary* dict = null;
- int openInputErr = ffmpeg.avformat_open_input(&formatContext, url, inputFormat, &dict);
- ffmpeg.av_dict_free(&dict);
- openInputErr.ThrowExceptionIfError();
- isFormatContextOpen = true;
-
- AVCodec* codec = null;
- StreamIndex = ffmpeg
- .av_find_best_stream(formatContext, AVMediaType.AVMEDIA_TYPE_VIDEO, -1, -1, &codec, 0)
- .ThrowExceptionIfError();
- Stream = formatContext->streams[StreamIndex];
- CodecContext = ffmpeg.avcodec_alloc_context3(codec);
-
- ffmpeg.avcodec_parameters_to_context(CodecContext, Stream->codecpar)
- .ThrowExceptionIfError();
- ffmpeg.avcodec_open2(CodecContext, codec, null).ThrowExceptionIfError();
-
- CodecName = ffmpeg.avcodec_get_name(codec->id);
- FrameWidth = CodecContext->width;
- FrameHeight = CodecContext->height;
- PixelFormat = (PixelFormat)CodecContext->pix_fmt;
-
- Packet = ffmpeg.av_packet_alloc();
- Frame = new Frame();
+ if (options != null)
+ {
+ foreach (KeyValuePair pair in options)
+ ffmpeg.av_dict_set(&dict, pair.Key, pair.Value, 0);
}
- public DecodeStatus TryDecodeNextFrame(out Frame nextFrame)
+ int openInputErr = ffmpeg.avformat_open_input(&formatContext, url, inputFormat, &dict);
+ ffmpeg.av_dict_free(&dict);
+ openInputErr.ThrowExceptionIfError();
+ isFormatContextOpen = true;
+
+ AVCodec* codec = null;
+ StreamIndex = ffmpeg
+ .av_find_best_stream(formatContext, AVMediaType.AVMEDIA_TYPE_VIDEO, -1, -1, &codec, 0)
+ .ThrowExceptionIfError();
+ Stream = formatContext->streams[StreamIndex];
+ CodecContext = ffmpeg.avcodec_alloc_context3(codec);
+
+ ffmpeg.avcodec_parameters_to_context(CodecContext, Stream->codecpar)
+ .ThrowExceptionIfError();
+ ffmpeg.avcodec_open2(CodecContext, codec, null).ThrowExceptionIfError();
+
+ CodecName = ffmpeg.avcodec_get_name(codec->id);
+ FrameWidth = CodecContext->width;
+ FrameHeight = CodecContext->height;
+ PixelFormat = (PixelFormat)CodecContext->pix_fmt;
+
+ Packet = ffmpeg.av_packet_alloc();
+ Frame = new Frame();
+ }
+
+ public DecodeStatus TryDecodeNextFrame(out Frame nextFrame)
+ {
+ int eagain = ffmpeg.AVERROR(ffmpeg.EAGAIN);
+ int error;
+
+ do
{
- int eagain = ffmpeg.AVERROR(ffmpeg.EAGAIN);
- int error;
+ #region Read frame
+ // Manually wait for a new frame instead of letting it block
+ ffmpeg.av_packet_unref(Packet);
+ error = ffmpeg.av_read_frame(FormatContext, Packet);
- do
+ if (error < 0)
{
- #region Read frame
- // Manually wait for a new frame instead of letting it block
- ffmpeg.av_packet_unref(Packet);
- error = ffmpeg.av_read_frame(FormatContext, Packet);
-
- if (error < 0)
- {
- nextFrame = Frame;
- GC.Collect();
-
- // We only wait longer once to make sure we catch the frame on time.
- return error == eagain
- ? DecodeStatus.NoFrameAvailable
- : DecodeStatus.EndOfStream;
- }
-
- error.ThrowExceptionIfError();
- #endregion
-
- #region Decode packet
- if (Packet->stream_index != StreamIndex)
- throw new InvalidOperationException("Packet does not belong to the decoder's video stream");
-
- ffmpeg.avcodec_send_packet(CodecContext, Packet).ThrowExceptionIfError();
-
- Frame.Unref();
- error = Frame.Receive(CodecContext);
- #endregion
+ nextFrame = Frame;
+ GC.Collect();
+
+ // We only wait longer once to make sure we catch the frame on time.
+ return error == eagain
+ ? DecodeStatus.NoFrameAvailable
+ : DecodeStatus.EndOfStream;
}
- while (error == eagain);
+
error.ThrowExceptionIfError();
+ #endregion
- nextFrame = Frame;
- GC.Collect();
- return DecodeStatus.NewFrame;
- }
+ #region Decode packet
+ if (Packet->stream_index != StreamIndex)
+ throw new InvalidOperationException("Packet does not belong to the decoder's video stream");
- public IReadOnlyDictionary GetContextInfo()
- {
- AVDictionaryEntry* tag = null;
- var result = new Dictionary();
+ ffmpeg.avcodec_send_packet(CodecContext, Packet).ThrowExceptionIfError();
- while ((tag = ffmpeg.av_dict_get(FormatContext->metadata, "", tag, ffmpeg.AV_DICT_IGNORE_SUFFIX)) != null)
- {
- var key = Marshal.PtrToStringAnsi((IntPtr)tag->key);
- var value = Marshal.PtrToStringAnsi((IntPtr)tag->value);
+ Frame.Unref();
+ error = Frame.Receive(CodecContext);
+ #endregion
+ }
+ while (error == eagain);
+ error.ThrowExceptionIfError();
- if (key != null && value != null)
- result.Add(key, value);
- }
+ nextFrame = Frame;
+ GC.Collect();
+ return DecodeStatus.NewFrame;
+ }
- return result;
- }
+ public IReadOnlyDictionary GetContextInfo()
+ {
+ AVDictionaryEntry* tag = null;
+ var result = new Dictionary();
- protected override void DisposeManaged()
+ while ((tag = ffmpeg.av_dict_get(FormatContext->metadata, "", tag, ffmpeg.AV_DICT_IGNORE_SUFFIX)) != null)
{
- Frame.Dispose();
+ var key = Marshal.PtrToStringAnsi((IntPtr)tag->key);
+ var value = Marshal.PtrToStringAnsi((IntPtr)tag->value);
+
+ if (key != null && value != null)
+ result.Add(key, value);
}
- protected override void DisposeUnmanaged()
- {
- // Constructor initialization can fail at some points,
- // so we need to null check everything.
- // See https://github.com/vignetteapp/SeeShark/issues/27
+ return result;
+ }
- if (CodecContext != null && ffmpeg.avcodec_is_open(CodecContext) > 0)
- ffmpeg.avcodec_close(CodecContext);
+ protected override void DisposeManaged()
+ {
+ Frame.Dispose();
+ }
- if (FormatContext != null && isFormatContextOpen)
- {
- AVFormatContext* formatContext = FormatContext;
- ffmpeg.avformat_close_input(&formatContext);
- }
+ protected override void DisposeUnmanaged()
+ {
+ // Constructor initialization can fail at some points,
+ // so we need to null check everything.
+ // See https://github.com/vignetteapp/SeeShark/issues/27
- if (Packet != null)
- {
- AVPacket* packet = Packet;
- ffmpeg.av_packet_free(&packet);
- }
+ if (CodecContext != null && ffmpeg.avcodec_is_open(CodecContext) > 0)
+ ffmpeg.avcodec_close(CodecContext);
+
+ if (FormatContext != null && isFormatContextOpen)
+ {
+ AVFormatContext* formatContext = FormatContext;
+ ffmpeg.avformat_close_input(&formatContext);
+ }
+
+ if (Packet != null)
+ {
+ AVPacket* packet = Packet;
+ ffmpeg.av_packet_free(&packet);
}
}
}
diff --git a/SeeShark/Device/Camera.cs b/SeeShark/Device/Camera.cs
index 1c21e61..6e8cdf2 100644
--- a/SeeShark/Device/Camera.cs
+++ b/SeeShark/Device/Camera.cs
@@ -2,13 +2,12 @@
// This file is part of SeeShark.
// SeeShark is licensed under the BSD 3-Clause License. See LICENSE for details.
-namespace SeeShark.Device
+namespace SeeShark.Device;
+
+public class Camera : VideoDevice
{
- public class Camera : VideoDevice
+ public Camera(VideoDeviceInfo info, DeviceInputFormat inputFormat, VideoInputOptions? options = null)
+ : base(info, inputFormat, options)
{
- public Camera(VideoDeviceInfo info, DeviceInputFormat inputFormat, VideoInputOptions? options = null)
- : base(info, inputFormat, options)
- {
- }
}
}
diff --git a/SeeShark/Device/CameraInfo.cs b/SeeShark/Device/CameraInfo.cs
index ca429d4..9dbed0b 100644
--- a/SeeShark/Device/CameraInfo.cs
+++ b/SeeShark/Device/CameraInfo.cs
@@ -2,9 +2,8 @@
// This file is part of SeeShark.
// SeeShark is licensed under the BSD 3-Clause License. See LICENSE for details.
-namespace SeeShark.Device
+namespace SeeShark.Device;
+
+public class CameraInfo : VideoDeviceInfo
{
- public class CameraInfo : VideoDeviceInfo
- {
- }
}
diff --git a/SeeShark/Device/CameraManager.cs b/SeeShark/Device/CameraManager.cs
index dbc54ca..5eecf64 100644
--- a/SeeShark/Device/CameraManager.cs
+++ b/SeeShark/Device/CameraManager.cs
@@ -6,64 +6,63 @@
using System.Runtime.InteropServices;
using SeeShark.Utils;
-namespace SeeShark.Device
+namespace SeeShark.Device;
+
+///
+/// Manages your camera devices. Is able to enumerate them and create new s.
+/// It can also watch for available devices, and fire up and
+/// events when it happens.
+///
+public sealed unsafe class CameraManager : VideoDeviceManager
{
- ///
- /// Manages your camera devices. Is able to enumerate them and create new s.
- /// It can also watch for available devices, and fire up and
- /// events when it happens.
- ///
- public sealed unsafe class CameraManager : VideoDeviceManager
+ public static DeviceInputFormat DefaultInputFormat
{
- public static DeviceInputFormat DefaultInputFormat
+ get
{
- get
- {
- return OperatingSystem.IsWindows() ? DeviceInputFormat.DShow
- : OperatingSystem.IsLinux() ? DeviceInputFormat.V4l2
- : OperatingSystem.IsMacOS() ? DeviceInputFormat.AVFoundation
- : throw new NotSupportedException(
- $"Cannot find adequate camera input format for RID '{RuntimeInformation.RuntimeIdentifier}'.");
- }
+ return OperatingSystem.IsWindows() ? DeviceInputFormat.DShow
+ : OperatingSystem.IsLinux() ? DeviceInputFormat.V4l2
+ : OperatingSystem.IsMacOS() ? DeviceInputFormat.AVFoundation
+ : throw new NotSupportedException(
+ $"Cannot find adequate camera input format for RID '{RuntimeInformation.RuntimeIdentifier}'.");
}
+ }
- ///
- /// Creates a new .
- /// It will call once, but won't be in a watching state.
- ///
- ///
- /// If you don't specify any input format, it will attempt to choose one suitable for your OS platform.
- ///
- ///
- /// Input format used to enumerate devices and create cameras.
- ///
- public CameraManager(DeviceInputFormat? inputFormat = null) : base(inputFormat ?? DefaultInputFormat)
- {
- }
+ ///
+ /// Creates a new .
+ /// It will call once, but won't be in a watching state.
+ ///
+ ///
+ /// If you don't specify any input format, it will attempt to choose one suitable for your OS platform.
+ ///
+ ///
+ /// Input format used to enumerate devices and create cameras.
+ ///
+ public CameraManager(DeviceInputFormat? inputFormat = null) : base(inputFormat ?? DefaultInputFormat)
+ {
+ }
- public override Camera GetDevice(CameraInfo info, VideoInputOptions? options = null) =>
- new Camera(info, InputFormat, options);
+ public override Camera GetDevice(CameraInfo info, VideoInputOptions? options = null) =>
+ new Camera(info, InputFormat, options);
- ///
- /// Enumerates available devices.
- ///
- protected override CameraInfo[] EnumerateDevices()
- {
- // FFmpeg doesn't implement avdevice_list_input_sources() for the DShow input format yet.
- // See first SeeShark issue: https://github.com/vignetteapp/SeeShark/issues/1
+ ///
+ /// Enumerates available devices.
+ ///
+ protected override CameraInfo[] EnumerateDevices()
+ {
+ // FFmpeg doesn't implement avdevice_list_input_sources() for the DShow input format yet.
+ // See first SeeShark issue: https://github.com/vignetteapp/SeeShark/issues/1
- // Supported formats won't use the default method to allow better video input options handling.
- switch (InputFormat)
- {
- case DeviceInputFormat.DShow:
- return DShowUtils.EnumerateDevices();
- case DeviceInputFormat.V4l2:
- CameraInfo[] devices = base.EnumerateDevices();
- V4l2Utils.FillDeviceOptions(devices);
- return devices;
- default:
- return base.EnumerateDevices();
- }
+ // Supported formats won't use the default method to allow better video input options handling.
+ switch (InputFormat)
+ {
+ case DeviceInputFormat.DShow:
+ return DShowUtils.EnumerateDevices();
+ case DeviceInputFormat.V4l2:
+ CameraInfo[] devices = base.EnumerateDevices();
+ V4l2Utils.FillDeviceOptions(devices);
+ return devices;
+ default:
+ return base.EnumerateDevices();
}
}
}
diff --git a/SeeShark/Device/DeviceInputFormat.cs b/SeeShark/Device/DeviceInputFormat.cs
index 267830d..3cecd8d 100644
--- a/SeeShark/Device/DeviceInputFormat.cs
+++ b/SeeShark/Device/DeviceInputFormat.cs
@@ -4,80 +4,79 @@
using System;
-namespace SeeShark.Device
+namespace SeeShark.Device;
+
+///
+/// Input format for camera devices.
+///
+public enum DeviceInputFormat
{
+ #region Windows compatible
///
- /// Input format for camera devices.
+ /// dshow (DirectShow) input device, the preferred option for Windows users.
+ /// See the wiki article about DirectShow
+ /// and the dshow input device documentation for more information.
///
- public enum DeviceInputFormat
- {
- #region Windows compatible
- ///
- /// dshow (DirectShow) input device, the preferred option for Windows users.
- /// See the wiki article about DirectShow
- /// and the dshow input device documentation for more information.
- ///
- DShow,
- ///
- /// vfwcap input device.
- /// See the vfwcap input device documentation for more information.
- ///
- ///
- /// vfwcap is outdated. Use dshow instead if possible.
- /// See DirectShow for more information.
- ///
- VfWCap,
- GdiGrab,
- #endregion
+ DShow,
+ ///
+ /// vfwcap input device.
+ /// See the vfwcap input device documentation for more information.
+ ///
+ ///
+ /// vfwcap is outdated. Use dshow instead if possible.
+ /// See DirectShow for more information.
+ ///
+ VfWCap,
+ GdiGrab,
+ #endregion
- #region Linux compatible
- ///
- /// Uses the video4linux2 (or simply v4l2) input device to capture live input such as from a webcam.
- /// See the v4l2 input device documentation for more information.
- ///
- V4l2,
- X11Grab,
- #endregion
+ #region Linux compatible
+ ///
+ /// Uses the video4linux2 (or simply v4l2) input device to capture live input such as from a webcam.
+ /// See the v4l2 input device documentation for more information.
+ ///
+ V4l2,
+ X11Grab,
+ #endregion
- #region MacOS compatible
- ///
- /// OS X users can use the avfoundation
- /// and qtkit input devices for grabbing
- /// integrated iSight cameras as well as cameras connected via USB or FireWire.
- ///
- ///
- /// AVFoundation is available on Mac OS X 10.7 (Lion) and later.
- /// Since then, Apple recommends AVFoundation for stream grabbing on OS X and iOS devices.
- ///
- AVFoundation,
- ///
- /// OS X users can use the avfoundation
- /// and qtkit input devices for grabbing
- /// integrated iSight cameras as well as cameras connected via USB or FireWire.
- ///
- ///
- /// QTKit is available on Mac OS X 10.4 (Tiger) and later.
- /// QTKit has been marked deprecated since OS X 10.7 (Lion) and may not be available on future releases.
- ///
- QTKit,
- #endregion
- }
+ #region MacOS compatible
+ ///
+ /// OS X users can use the avfoundation
+ /// and qtkit input devices for grabbing
+ /// integrated iSight cameras as well as cameras connected via USB or FireWire.
+ ///
+ ///
+ /// AVFoundation is available on Mac OS X 10.7 (Lion) and later.
+ /// Since then, Apple recommends AVFoundation for stream grabbing on OS X and iOS devices.
+ ///
+ AVFoundation,
+ ///
+ /// OS X users can use the avfoundation
+ /// and qtkit input devices for grabbing
+ /// integrated iSight cameras as well as cameras connected via USB or FireWire.
+ ///
+ ///
+ /// QTKit is available on Mac OS X 10.4 (Tiger) and later.
+ /// QTKit has been marked deprecated since OS X 10.7 (Lion) and may not be available on future releases.
+ ///
+ QTKit,
+ #endregion
+}
- public static class DeviceInputFormatExtension
+public static class DeviceInputFormatExtension
+{
+ public static string ToString(this DeviceInputFormat deviceInputFormat)
{
- public static string ToString(this DeviceInputFormat deviceInputFormat)
+ return deviceInputFormat switch
{
- return deviceInputFormat switch
- {
- DeviceInputFormat.DShow => "dshow",
- DeviceInputFormat.VfWCap => "vfwcap",
- DeviceInputFormat.GdiGrab => "gdigrab",
- DeviceInputFormat.V4l2 => "v4l2",
- DeviceInputFormat.X11Grab => "x11grab",
- DeviceInputFormat.AVFoundation => "avfoundation",
- DeviceInputFormat.QTKit => "qtkit",
- _ => throw new ArgumentException("Unknown device input format"),
- };
- }
+ DeviceInputFormat.DShow => "dshow",
+ DeviceInputFormat.VfWCap => "vfwcap",
+ DeviceInputFormat.GdiGrab => "gdigrab",
+ DeviceInputFormat.V4l2 => "v4l2",
+ DeviceInputFormat.X11Grab => "x11grab",
+ DeviceInputFormat.AVFoundation => "avfoundation",
+ DeviceInputFormat.QTKit => "qtkit",
+ _ => throw new ArgumentException("Unknown device input format"),
+ };
}
}
diff --git a/SeeShark/Device/Display.cs b/SeeShark/Device/Display.cs
index a54aa30..72eb27c 100644
--- a/SeeShark/Device/Display.cs
+++ b/SeeShark/Device/Display.cs
@@ -2,13 +2,12 @@
// This file is part of SeeShark.
// SeeShark is licensed under the BSD 3-Clause License. See LICENSE for details.
-namespace SeeShark.Device
+namespace SeeShark.Device;
+
+public class Display : VideoDevice
{
- public class Display : VideoDevice
+ public Display(VideoDeviceInfo info, DeviceInputFormat inputFormat, VideoInputOptions? options = null)
+ : base(info, inputFormat, options)
{
- public Display(VideoDeviceInfo info, DeviceInputFormat inputFormat, VideoInputOptions? options = null)
- : base(info, inputFormat, options)
- {
- }
}
}
diff --git a/SeeShark/Device/DisplayInfo.cs b/SeeShark/Device/DisplayInfo.cs
index c54dd98..bbaa56b 100644
--- a/SeeShark/Device/DisplayInfo.cs
+++ b/SeeShark/Device/DisplayInfo.cs
@@ -2,16 +2,15 @@
// This file is part of SeeShark.
// SeeShark is licensed under the BSD 3-Clause License. See LICENSE for details.
-namespace SeeShark.Device
+namespace SeeShark.Device;
+
+public class DisplayInfo : VideoDeviceInfo
{
- public class DisplayInfo : VideoDeviceInfo
- {
- public int X { get; init; }
- public int Y { get; init; }
- public int Width { get; init; }
- public int Height { get; init; }
- public bool Primary { get; init; }
+ public int X { get; init; }
+ public int Y { get; init; }
+ public int Width { get; init; }
+ public int Height { get; init; }
+ public bool Primary { get; init; }
- public bool IsComposite { get; init; }
- }
+ public bool IsComposite { get; init; }
}
diff --git a/SeeShark/Device/DisplayManager.cs b/SeeShark/Device/DisplayManager.cs
index 2b24801..138dd13 100644
--- a/SeeShark/Device/DisplayManager.cs
+++ b/SeeShark/Device/DisplayManager.cs
@@ -8,203 +8,202 @@
using SeeShark.Interop.Windows;
using SeeShark.Interop.X11;
-namespace SeeShark.Device
+namespace SeeShark.Device;
+
+public class DisplayManager : VideoDeviceManager
{
- public class DisplayManager : VideoDeviceManager
+ public static DeviceInputFormat DefaultInputFormat
{
- public static DeviceInputFormat DefaultInputFormat
+ get
{
- get
- {
- return OperatingSystem.IsWindows() ? DeviceInputFormat.GdiGrab
- : OperatingSystem.IsLinux() ? DeviceInputFormat.X11Grab
- : OperatingSystem.IsMacOS() ? DeviceInputFormat.AVFoundation
- : throw new NotSupportedException(
- $"Cannot find adequate display input format for RID '{RuntimeInformation.RuntimeIdentifier}'.");
- }
+ return OperatingSystem.IsWindows() ? DeviceInputFormat.GdiGrab
+ : OperatingSystem.IsLinux() ? DeviceInputFormat.X11Grab
+ : OperatingSystem.IsMacOS() ? DeviceInputFormat.AVFoundation
+ : throw new NotSupportedException(
+ $"Cannot find adequate display input format for RID '{RuntimeInformation.RuntimeIdentifier}'.");
}
+ }
- public DisplayManager(DeviceInputFormat? inputFormat = null) : base(inputFormat ?? DefaultInputFormat)
- {
- }
+ public DisplayManager(DeviceInputFormat? inputFormat = null) : base(inputFormat ?? DefaultInputFormat)
+ {
+ }
- public override Display GetDevice(DisplayInfo info, VideoInputOptions? options = null)
+ public override Display GetDevice(DisplayInfo info, VideoInputOptions? options = null)
+ {
+ if (options is { } o)
{
- if (options is { } o)
- {
- return new Display(info, InputFormat, o);
- }
- else
- {
- return new Display(info, InputFormat, generateInputOptions(info));
- }
+ return new Display(info, InputFormat, o);
}
-
- private VideoInputOptions generateInputOptions(DisplayInfo info)
+ else
{
- return new VideoInputOptions
- {
- VideoSize = (info.Width, info.Height),
- VideoPosition = (info.X, info.Y)
- };
+ return new Display(info, InputFormat, generateInputOptions(info));
}
+ }
- ///
- /// Enumerates available devices.
- ///
- protected override DisplayInfo[] EnumerateDevices()
+ private VideoInputOptions generateInputOptions(DisplayInfo info)
+ {
+ return new VideoInputOptions
{
- switch (InputFormat)
- {
- case DeviceInputFormat.X11Grab:
- return enumerateDevicesX11();
- case DeviceInputFormat.GdiGrab:
- return enumerateDevicesGdi();
- default:
- return base.EnumerateDevices();
- }
- }
+ VideoSize = (info.Width, info.Height),
+ VideoPosition = (info.X, info.Y)
+ };
+ }
- private DisplayInfo[] enumerateDevicesX11()
+ ///
+ /// Enumerates available devices.
+ ///
+ protected override DisplayInfo[] EnumerateDevices()
+ {
+ switch (InputFormat)
{
- unsafe
- {
- IntPtr display = XLib.XOpenDisplay(null);
- IntPtr rootWindow = XLib.XDefaultRootWindow(display);
- XRRMonitorInfo[] monitors = getXRandrDisplays(display, rootWindow);
-
- DisplayInfo[] info = new DisplayInfo[monitors.Length + 1];
-
- int compositeLeft = int.MaxValue;
- int compositeRight = int.MinValue;
- int compositeTop = int.MaxValue;
- int compositeBottom = int.MinValue;
-
- for (int i = 0; i < monitors.Length; i++)
- {
- XRRMonitorInfo monitor = monitors[i];
- var monitorNamePtr = XLib.XGetAtomName(display, monitor.Name);
- var monitorName = Marshal.PtrToStringAnsi(monitorNamePtr)!;
-
- string nameAddition = string.IsNullOrEmpty(monitorName) ? "" : $" ({monitorName})";
- info[i + 1] = new DisplayInfo
- {
- Name = $"Display {i}{nameAddition}",
- Path = ":0",
- X = monitor.X,
- Y = monitor.Y,
- Width = monitor.Width,
- Height = monitor.Height,
- Primary = monitor.Primary > 0,
- };
-
- if (monitor.X < compositeLeft)
- compositeLeft = monitor.X;
-
- if (monitor.X + monitor.Width > compositeRight)
- compositeRight = monitor.X + monitor.Width;
-
- if (monitor.Y < compositeTop)
- compositeTop = monitor.Y;
-
- if (monitor.Y + monitor.Height > compositeBottom)
- compositeBottom = monitor.Y + monitor.Height;
- }
-
- info[0] = new DisplayInfo
- {
- Name = $"Composite X11 Display",
- Path = ":0",
- X = compositeLeft,
- Y = compositeTop,
- Width = compositeRight - compositeLeft,
- Height = compositeBottom - compositeTop,
- Primary = false,
- IsComposite = true
- };
- return info;
- }
+ case DeviceInputFormat.X11Grab:
+ return enumerateDevicesX11();
+ case DeviceInputFormat.GdiGrab:
+ return enumerateDevicesGdi();
+ default:
+ return base.EnumerateDevices();
}
+ }
- private DisplayInfo[] enumerateDevicesGdi()
+ private DisplayInfo[] enumerateDevicesX11()
+ {
+ unsafe
{
- var displayInfo = new List();
+ IntPtr display = XLib.XOpenDisplay(null);
+ IntPtr rootWindow = XLib.XDefaultRootWindow(display);
+ XRRMonitorInfo[] monitors = getXRandrDisplays(display, rootWindow);
- int count = 0;
+ DisplayInfo[] info = new DisplayInfo[monitors.Length + 1];
int compositeLeft = int.MaxValue;
int compositeRight = int.MinValue;
int compositeTop = int.MaxValue;
int compositeBottom = int.MinValue;
- bool MonitorDelegate(IntPtr hMonitor, IntPtr hdcMonitor, ref Rect lprcMonitor, IntPtr dwData)
+ for (int i = 0; i < monitors.Length; i++)
{
- var monitorInfo = new MonitorInfoEx();
- monitorInfo.size = (uint)Marshal.SizeOf(monitorInfo);
+ XRRMonitorInfo monitor = monitors[i];
+ var monitorNamePtr = XLib.XGetAtomName(display, monitor.Name);
+ var monitorName = Marshal.PtrToStringAnsi(monitorNamePtr)!;
- if (User32.GetMonitorInfo(hMonitor, ref monitorInfo))
+ string nameAddition = string.IsNullOrEmpty(monitorName) ? "" : $" ({monitorName})";
+ info[i + 1] = new DisplayInfo
{
- var info = new DevMode();
- User32.EnumDisplaySettings(monitorInfo.deviceName, -1, ref info);
-
- var d = new DISPLAY_DEVICE();
- d.cb = Marshal.SizeOf(d);
- User32.EnumDisplayDevices(monitorInfo.deviceName, 0, ref d, 0);
-
- string nameAddition = string.IsNullOrEmpty(d.DeviceString) ? "" : $" ({d.DeviceString})";
- displayInfo.Add(new DisplayInfo
- {
- Name = $"Display {count}{nameAddition}",
- Path = "desktop",
- X = info.dmPositionX,
- Y = info.dmPositionY,
- Width = info.dmPelsWidth,
- Height = info.dmPelsHeight,
- Primary = count == 0
- });
- count++;
-
- if (info.dmPositionX < compositeLeft)
- compositeLeft = info.dmPositionX;
-
- if (info.dmPositionX + info.dmPelsWidth > compositeRight)
- compositeRight = (info.dmPositionX + info.dmPelsWidth);
-
- if (info.dmPositionY < compositeTop)
- compositeTop = info.dmPositionY;
-
- if (info.dmPositionY + info.dmPelsHeight > compositeBottom)
- compositeBottom = (info.dmPositionY + info.dmPelsHeight);
-
- }
- return true;
- }
+ Name = $"Display {i}{nameAddition}",
+ Path = ":0",
+ X = monitor.X,
+ Y = monitor.Y,
+ Width = monitor.Width,
+ Height = monitor.Height,
+ Primary = monitor.Primary > 0,
+ };
- User32.EnumDisplayMonitors(IntPtr.Zero, IntPtr.Zero, MonitorDelegate, IntPtr.Zero);
+ if (monitor.X < compositeLeft)
+ compositeLeft = monitor.X;
+
+ if (monitor.X + monitor.Width > compositeRight)
+ compositeRight = monitor.X + monitor.Width;
+
+ if (monitor.Y < compositeTop)
+ compositeTop = monitor.Y;
+
+ if (monitor.Y + monitor.Height > compositeBottom)
+ compositeBottom = monitor.Y + monitor.Height;
+ }
- displayInfo.Insert(0, new DisplayInfo
+ info[0] = new DisplayInfo
{
- Name = $"Composite GDI Display",
- Path = "desktop",
+ Name = $"Composite X11 Display",
+ Path = ":0",
X = compositeLeft,
Y = compositeTop,
Width = compositeRight - compositeLeft,
Height = compositeBottom - compositeTop,
Primary = false,
IsComposite = true
- });
-
- // TODO: using a list and converting to array is ugly, try to find alternative
- return displayInfo.ToArray();
+ };
+ return info;
}
+ }
+
+ private DisplayInfo[] enumerateDevicesGdi()
+ {
+ var displayInfo = new List();
+
+ int count = 0;
- private unsafe XRRMonitorInfo[] getXRandrDisplays(IntPtr display, IntPtr rootWindow)
+ int compositeLeft = int.MaxValue;
+ int compositeRight = int.MinValue;
+ int compositeTop = int.MaxValue;
+ int compositeBottom = int.MinValue;
+
+ bool MonitorDelegate(IntPtr hMonitor, IntPtr hdcMonitor, ref Rect lprcMonitor, IntPtr dwData)
{
- XRRMonitorInfo* xRandrMonitors = XRandr.XRRGetMonitors(display, rootWindow, true, out int count);
- XRRMonitorInfo[] monitors = new XRRMonitorInfo[count];
- for (int i = 0; i < count; i++)
- monitors[i] = xRandrMonitors[i];
- return monitors;
+ var monitorInfo = new MonitorInfoEx();
+ monitorInfo.size = (uint)Marshal.SizeOf(monitorInfo);
+
+ if (User32.GetMonitorInfo(hMonitor, ref monitorInfo))
+ {
+ var info = new DevMode();
+ User32.EnumDisplaySettings(monitorInfo.deviceName, -1, ref info);
+
+ var d = new DISPLAY_DEVICE();
+ d.cb = Marshal.SizeOf(d);
+ User32.EnumDisplayDevices(monitorInfo.deviceName, 0, ref d, 0);
+
+ string nameAddition = string.IsNullOrEmpty(d.DeviceString) ? "" : $" ({d.DeviceString})";
+ displayInfo.Add(new DisplayInfo
+ {
+ Name = $"Display {count}{nameAddition}",
+ Path = "desktop",
+ X = info.dmPositionX,
+ Y = info.dmPositionY,
+ Width = info.dmPelsWidth,
+ Height = info.dmPelsHeight,
+ Primary = count == 0
+ });
+ count++;
+
+ if (info.dmPositionX < compositeLeft)
+ compositeLeft = info.dmPositionX;
+
+ if (info.dmPositionX + info.dmPelsWidth > compositeRight)
+ compositeRight = (info.dmPositionX + info.dmPelsWidth);
+
+ if (info.dmPositionY < compositeTop)
+ compositeTop = info.dmPositionY;
+
+ if (info.dmPositionY + info.dmPelsHeight > compositeBottom)
+ compositeBottom = (info.dmPositionY + info.dmPelsHeight);
+
+ }
+ return true;
}
+
+ User32.EnumDisplayMonitors(IntPtr.Zero, IntPtr.Zero, MonitorDelegate, IntPtr.Zero);
+
+ displayInfo.Insert(0, new DisplayInfo
+ {
+ Name = $"Composite GDI Display",
+ Path = "desktop",
+ X = compositeLeft,
+ Y = compositeTop,
+ Width = compositeRight - compositeLeft,
+ Height = compositeBottom - compositeTop,
+ Primary = false,
+ IsComposite = true
+ });
+
+ // TODO: using a list and converting to array is ugly, try to find alternative
+ return displayInfo.ToArray();
+ }
+
+ private unsafe XRRMonitorInfo[] getXRandrDisplays(IntPtr display, IntPtr rootWindow)
+ {
+ XRRMonitorInfo* xRandrMonitors = XRandr.XRRGetMonitors(display, rootWindow, true, out int count);
+ XRRMonitorInfo[] monitors = new XRRMonitorInfo[count];
+ for (int i = 0; i < count; i++)
+ monitors[i] = xRandrMonitors[i];
+ return monitors;
}
}
diff --git a/SeeShark/Device/VideoDevice.cs b/SeeShark/Device/VideoDevice.cs
index f7e3578..d416d17 100644
--- a/SeeShark/Device/VideoDevice.cs
+++ b/SeeShark/Device/VideoDevice.cs
@@ -6,113 +6,112 @@
using System.Threading;
using SeeShark.Decode;
-namespace SeeShark.Device
+namespace SeeShark.Device;
+
+public class VideoDevice : Disposable
{
- public class VideoDevice : Disposable
- {
- private Thread? decodingThread;
- private readonly VideoStreamDecoder decoder;
+ private Thread? decodingThread;
+ private readonly VideoStreamDecoder decoder;
- public VideoDeviceInfo Info { get; }
- public bool IsPlaying { get; private set; }
+ public VideoDeviceInfo Info { get; }
+ public bool IsPlaying { get; private set; }
- public event EventHandler? OnFrame;
- public event EventHandler? OnEndOfStream;
+ public event EventHandler? OnFrame;
+ public event EventHandler? OnEndOfStream;
- public VideoDevice(VideoDeviceInfo info, DeviceInputFormat inputFormat, VideoInputOptions? options = null)
- {
- Info = info;
- decoder = new VideoStreamDecoder(info.Path, inputFormat, options?.ToAVDictOptions(inputFormat));
- }
+ public VideoDevice(VideoDeviceInfo info, DeviceInputFormat inputFormat, VideoInputOptions? options = null)
+ {
+ Info = info;
+ decoder = new VideoStreamDecoder(info.Path, inputFormat, options?.ToAVDictOptions(inputFormat));
+ }
- protected void DecodeLoop()
+ protected void DecodeLoop()
+ {
+ DecodeStatus status;
+ while ((status = TryGetFrame(out var frame)) != DecodeStatus.EndOfStream)
{
- DecodeStatus status;
- while ((status = TryGetFrame(out var frame)) != DecodeStatus.EndOfStream)
- {
- OnFrame?.Invoke(this, new FrameEventArgs(frame, status));
+ OnFrame?.Invoke(this, new FrameEventArgs(frame, status));
- if (!IsPlaying)
- break;
- }
-
- // End of stream happened
- OnEndOfStream?.Invoke(this, status);
- IsPlaying = false;
+ if (!IsPlaying)
+ break;
}
- ///
- /// Decodes the next frame from the stream.
- ///
- ///
- /// This operation is blocking.
- /// If you want a synchronous non-blocking solution, use .
- /// If you want an asynchronous solution, use the event instead
- /// and toggle capture with and .
- ///
- /// The decoded frame.
- public Frame GetFrame()
+ // End of stream happened
+ OnEndOfStream?.Invoke(this, status);
+ IsPlaying = false;
+ }
+
+ ///
+ /// Decodes the next frame from the stream.
+ ///
+ ///
+ /// This operation is blocking.
+ /// If you want a synchronous non-blocking solution, use .
+ /// If you want an asynchronous solution, use the event instead
+ /// and toggle capture with and .
+ ///
+ /// The decoded frame.
+ public Frame GetFrame()
+ {
+ while (true)
{
- while (true)
+ switch (TryGetFrame(out var frame))
{
- switch (TryGetFrame(out var frame))
- {
- case DecodeStatus.NewFrame:
- return frame;
- case DecodeStatus.EndOfStream:
- throw new InvalidOperationException("End of stream");
- }
+ case DecodeStatus.NewFrame:
+ return frame;
+ case DecodeStatus.EndOfStream:
+ throw new InvalidOperationException("End of stream");
}
}
+ }
- ///
- /// Tries to decode the next frame from the stream.
- ///
- ///
- /// This operation is non-blocking.
- /// If you want a synchronous blocking solution, use .
- /// If you want an asynchronous solution, use the event instead
- /// and toggle capture with and .
- ///
- public DecodeStatus TryGetFrame(out Frame frame)
- {
- DecodeStatus status = decoder.TryDecodeNextFrame(out frame);
+ ///
+ /// Tries to decode the next frame from the stream.
+ ///
+ ///
+ /// This operation is non-blocking.
+ /// If you want a synchronous blocking solution, use .
+ /// If you want an asynchronous solution, use the event instead
+ /// and toggle capture with and .
+ ///
+ public DecodeStatus TryGetFrame(out Frame frame)
+ {
+ DecodeStatus status = decoder.TryDecodeNextFrame(out frame);
- // Wait 1/4 of configured FPS only if no frame is available.
- // This circumvents possible camera buffering issues.
- // Some cameras have adaptive FPS, so the previous solution isn't adapted.
- // See https://github.com/vignetteapp/SeeShark/issues/29
+ // Wait 1/4 of configured FPS only if no frame is available.
+ // This circumvents possible camera buffering issues.
+ // Some cameras have adaptive FPS, so the previous solution isn't adapted.
+ // See https://github.com/vignetteapp/SeeShark/issues/29
- // (RIP big brain move to avoid overloading the CPU...)
- if (status == DecodeStatus.NoFrameAvailable)
- Thread.Sleep(1000 * decoder.Framerate.den / (decoder.Framerate.num * 4));
+ // (RIP big brain move to avoid overloading the CPU...)
+ if (status == DecodeStatus.NoFrameAvailable)
+ Thread.Sleep(1000 * decoder.Framerate.den / (decoder.Framerate.num * 4));
- return status;
- }
+ return status;
+ }
- public void StopCapture()
- {
- if (!IsPlaying)
- return;
+ public void StopCapture()
+ {
+ if (!IsPlaying)
+ return;
- IsPlaying = false;
- decodingThread?.Join();
- }
+ IsPlaying = false;
+ decodingThread?.Join();
+ }
- public void StartCapture()
- {
- if (IsPlaying)
- return;
+ public void StartCapture()
+ {
+ if (IsPlaying)
+ return;
- IsPlaying = true;
- decodingThread = new Thread(DecodeLoop);
- decodingThread.Start();
- }
+ IsPlaying = true;
+ decodingThread = new Thread(DecodeLoop);
+ decodingThread.Start();
+ }
- protected override void DisposeManaged()
- {
- StopCapture();
- decoder.Dispose();
- }
+ protected override void DisposeManaged()
+ {
+ StopCapture();
+ decoder.Dispose();
}
}
diff --git a/SeeShark/Device/VideoDeviceInfo.cs b/SeeShark/Device/VideoDeviceInfo.cs
index 57761a9..d833470 100644
--- a/SeeShark/Device/VideoDeviceInfo.cs
+++ b/SeeShark/Device/VideoDeviceInfo.cs
@@ -4,33 +4,32 @@
using System;
-namespace SeeShark.Device
+namespace SeeShark.Device;
+
+///
+/// Various information about the camera device.
+///
+public class VideoDeviceInfo : IEquatable
{
///
- /// Various information about the camera device.
+ /// Name of the camera. Can be null.
///
- public class VideoDeviceInfo : IEquatable
- {
- ///
- /// Name of the camera. Can be null.
- ///
- public string? Name { get; internal set; }
- ///
- /// Path of the camera device. It can be anything from a file on the system (on Linux for instance) or a UUID (on Windows for example).
- ///
- public string Path { get; internal set; } = "";
- ///
- /// Available sets of video input options for this device.
- ///
- public VideoInputOptions[]? AvailableVideoInputOptions { get; internal set; }
+ public string? Name { get; internal set; }
+ ///
+ /// Path of the camera device. It can be anything from a file on the system (on Linux for instance) or a UUID (on Windows for example).
+ ///
+ public string Path { get; internal set; } = "";
+ ///
+ /// Available sets of video input options for this device.
+ ///
+ public VideoInputOptions[]? AvailableVideoInputOptions { get; internal set; }
- public bool Equals(VideoDeviceInfo? other) => Path == other?.Path;
- public override bool Equals(object? obj) => obj is VideoDeviceInfo info && Equals(info);
- public override int GetHashCode() => Path.GetHashCode();
+ public bool Equals(VideoDeviceInfo? other) => Path == other?.Path;
+ public override bool Equals(object? obj) => obj is VideoDeviceInfo info && Equals(info);
+ public override int GetHashCode() => Path.GetHashCode();
- public static bool operator ==(VideoDeviceInfo left, VideoDeviceInfo right) => left.Equals(right);
- public static bool operator !=(VideoDeviceInfo left, VideoDeviceInfo right) => !(left == right);
+ public static bool operator ==(VideoDeviceInfo left, VideoDeviceInfo right) => left.Equals(right);
+ public static bool operator !=(VideoDeviceInfo left, VideoDeviceInfo right) => !(left == right);
- public override string? ToString() => Name == null ? Path : $"{Name} ({Path})";
- }
+ public override string? ToString() => Name == null ? Path : $"{Name} ({Path})";
}
diff --git a/SeeShark/Device/VideoDeviceManager.cs b/SeeShark/Device/VideoDeviceManager.cs
index 7bb41ec..7a41b45 100644
--- a/SeeShark/Device/VideoDeviceManager.cs
+++ b/SeeShark/Device/VideoDeviceManager.cs
@@ -10,135 +10,134 @@
using SeeShark.FFmpeg;
using static SeeShark.FFmpeg.FFmpegManager;
-namespace SeeShark.Device
+namespace SeeShark.Device;
+
+///
+/// Manages your video devices. Is able to enumerate them and create new s.
+/// It can also watch for available devices, and fire up and
+/// events when it happens.
+///
+public abstract unsafe class VideoDeviceManager : Disposable
+ where T : VideoDevice
+ where TDeviceInfo : VideoDeviceInfo, new()
{
+ protected readonly AVInputFormat* AvInputFormat;
+ protected Timer DeviceWatcher;
+
+ ///
+ /// Whether this is watching for devices.
+ ///
+ public bool IsWatching { get; protected set; }
+
+ ///
+ /// Input format used by this to watch devices.
+ ///
+ public DeviceInputFormat InputFormat { get; protected set; }
+
+ ///
+ /// List of all the available video devices.
+ ///
+ public ImmutableList Devices { get; protected set; } = ImmutableList.Empty;
+
+ ///
+ /// Invoked when a video device has been connected.
+ ///
+ public event Action? OnNewDevice;
+
///
- /// Manages your video devices. Is able to enumerate them and create new s.
- /// It can also watch for available devices, and fire up and
- /// events when it happens.
+ /// Invoked when a video device has been disconnected.
///
- public abstract unsafe class VideoDeviceManager : Disposable
- where T : VideoDevice
- where TDeviceInfo : VideoDeviceInfo, new()
+ public event Action? OnLostDevice;
+
+ protected VideoDeviceManager(DeviceInputFormat inputFormat)
{
- protected readonly AVInputFormat* AvInputFormat;
- protected Timer DeviceWatcher;
-
- ///
- /// Whether this is watching for devices.
- ///
- public bool IsWatching { get; protected set; }
-
- ///
- /// Input format used by this to watch devices.
- ///
- public DeviceInputFormat InputFormat { get; protected set; }
-
- ///
- /// List of all the available video devices.
- ///
- public ImmutableList Devices { get; protected set; } = ImmutableList.Empty;
-
- ///
- /// Invoked when a video device has been connected.
- ///
- public event Action? OnNewDevice;
-
- ///
- /// Invoked when a video device has been disconnected.
- ///
- public event Action? OnLostDevice;
-
- protected VideoDeviceManager(DeviceInputFormat inputFormat)
- {
- SetupFFmpeg();
+ SetupFFmpeg();
- InputFormat = inputFormat;
- AvInputFormat = ffmpeg.av_find_input_format(InputFormat.ToString());
+ InputFormat = inputFormat;
+ AvInputFormat = ffmpeg.av_find_input_format(InputFormat.ToString());
- SyncDevices();
- DeviceWatcher = new Timer(
- (_state) => SyncDevices(),
- null, Timeout.InfiniteTimeSpan, Timeout.InfiniteTimeSpan
- );
+ SyncDevices();
+ DeviceWatcher = new Timer(
+ (_state) => SyncDevices(),
+ null, Timeout.InfiniteTimeSpan, Timeout.InfiniteTimeSpan
+ );
- IsWatching = false;
- }
+ IsWatching = false;
+ }
- public abstract T GetDevice(TDeviceInfo info, VideoInputOptions? options = null);
- public T GetDevice(int index = 0, VideoInputOptions? options = null) =>
- GetDevice(Devices[index], options);
- public T GetDevice(string path, VideoInputOptions? options = null) =>
- GetDevice(Devices.First((ci) => ci.Path == path), options);
+ public abstract T GetDevice(TDeviceInfo info, VideoInputOptions? options = null);
+ public T GetDevice(int index = 0, VideoInputOptions? options = null) =>
+ GetDevice(Devices[index], options);
+ public T GetDevice(string path, VideoInputOptions? options = null) =>
+ GetDevice(Devices.First((ci) => ci.Path == path), options);
- ///
- /// Starts watching for available devices.
- ///
- public void StartWatching()
- {
- DeviceWatcher.Change(TimeSpan.Zero, TimeSpan.FromSeconds(1));
- IsWatching = true;
- }
+ ///
+ /// Starts watching for available devices.
+ ///
+ public void StartWatching()
+ {
+ DeviceWatcher.Change(TimeSpan.Zero, TimeSpan.FromSeconds(1));
+ IsWatching = true;
+ }
- ///
- /// Stops watching for available devices.
- ///
- public void StopWatching()
- {
- DeviceWatcher.Change(Timeout.InfiniteTimeSpan, Timeout.InfiniteTimeSpan);
- IsWatching = false;
- }
+ ///
+ /// Stops watching for available devices.
+ ///
+ public void StopWatching()
+ {
+ DeviceWatcher.Change(Timeout.InfiniteTimeSpan, Timeout.InfiniteTimeSpan);
+ IsWatching = false;
+ }
+
+ protected virtual TDeviceInfo[] EnumerateDevices()
+ {
+ AVDeviceInfoList* avDeviceInfoList = null;
+ ffmpeg.avdevice_list_input_sources(AvInputFormat, null, null, &avDeviceInfoList).ThrowExceptionIfError();
+ int nDevices = avDeviceInfoList->nb_devices;
+ AVDeviceInfo** avDevices = avDeviceInfoList->devices;
- protected virtual TDeviceInfo[] EnumerateDevices()
+ TDeviceInfo[] devices = new TDeviceInfo[nDevices];
+ for (int i = 0; i < nDevices; i++)
{
- AVDeviceInfoList* avDeviceInfoList = null;
- ffmpeg.avdevice_list_input_sources(AvInputFormat, null, null, &avDeviceInfoList).ThrowExceptionIfError();
- int nDevices = avDeviceInfoList->nb_devices;
- AVDeviceInfo** avDevices = avDeviceInfoList->devices;
+ AVDeviceInfo* avDevice = avDevices[i];
+ string name = new string((sbyte*)avDevice->device_description);
+ string path = new string((sbyte*)avDevice->device_name);
- TDeviceInfo[] devices = new TDeviceInfo[nDevices];
- for (int i = 0; i < nDevices; i++)
+ if (path == null)
+ throw new InvalidOperationException($"Device at index {i} doesn't have a path!");
+
+ devices[i] = new TDeviceInfo
{
- AVDeviceInfo* avDevice = avDevices[i];
- string name = new string((sbyte*)avDevice->device_description);
- string path = new string((sbyte*)avDevice->device_name);
-
- if (path == null)
- throw new InvalidOperationException($"Device at index {i} doesn't have a path!");
-
- devices[i] = new TDeviceInfo
- {
- Name = name,
- Path = path,
- };
- }
-
- ffmpeg.avdevice_free_list_devices(&avDeviceInfoList);
- return devices;
+ Name = name,
+ Path = path,
+ };
}
- ///
- /// Looks for available devices and triggers and events.
- ///
- public void SyncDevices()
- {
- ImmutableList newDevices = EnumerateDevices().ToImmutableList();
+ ffmpeg.avdevice_free_list_devices(&avDeviceInfoList);
+ return devices;
+ }
- if (Devices.SequenceEqual(newDevices))
- return;
+ ///
+ /// Looks for available devices and triggers and events.
+ ///
+ public void SyncDevices()
+ {
+ ImmutableList newDevices = EnumerateDevices().ToImmutableList();
- foreach (TDeviceInfo device in newDevices.Except(Devices))
- OnNewDevice?.Invoke(device);
+ if (Devices.SequenceEqual(newDevices))
+ return;
- foreach (TDeviceInfo device in Devices.Except(newDevices))
- OnLostDevice?.Invoke(device);
+ foreach (TDeviceInfo device in newDevices.Except(Devices))
+ OnNewDevice?.Invoke(device);
- Devices = newDevices;
- }
+ foreach (TDeviceInfo device in Devices.Except(newDevices))
+ OnLostDevice?.Invoke(device);
- protected override void DisposeManaged()
- {
- DeviceWatcher.Dispose();
- }
+ Devices = newDevices;
+ }
+
+ protected override void DisposeManaged()
+ {
+ DeviceWatcher.Dispose();
}
}
diff --git a/SeeShark/Disposable.cs b/SeeShark/Disposable.cs
index 984fa7e..3da3adb 100644
--- a/SeeShark/Disposable.cs
+++ b/SeeShark/Disposable.cs
@@ -5,57 +5,56 @@
using System;
using System.Threading;
-namespace SeeShark
+namespace SeeShark;
+
+///
+/// based on OpenCvSharp
+///
+public abstract class Disposable : IDisposable
{
- ///
- /// based on OpenCvSharp
- ///
- public abstract class Disposable : IDisposable
- {
- private volatile int disposeSignaled = 0;
+ private volatile int disposeSignaled = 0;
- public bool IsDisposed { get; protected set; }
- protected bool IsOwner { get; private set; }
+ public bool IsDisposed { get; protected set; }
+ protected bool IsOwner { get; private set; }
- protected Disposable(bool isOwner = true)
- {
- IsDisposed = false;
- IsOwner = isOwner;
- }
+ protected Disposable(bool isOwner = true)
+ {
+ IsDisposed = false;
+ IsOwner = isOwner;
+ }
- public void Dispose()
- {
- Dispose(true);
- GC.SuppressFinalize(this);
- }
+ public void Dispose()
+ {
+ Dispose(true);
+ GC.SuppressFinalize(this);
+ }
- protected virtual void Dispose(bool disposing)
- {
- if (Interlocked.Exchange(ref disposeSignaled, 1) != 0)
- return;
+ protected virtual void Dispose(bool disposing)
+ {
+ if (Interlocked.Exchange(ref disposeSignaled, 1) != 0)
+ return;
- IsDisposed = true;
+ IsDisposed = true;
- if (disposing)
- DisposeManaged();
+ if (disposing)
+ DisposeManaged();
- DisposeUnmanaged();
- }
+ DisposeUnmanaged();
+ }
- ~Disposable()
- {
- Dispose(false);
- }
+ ~Disposable()
+ {
+ Dispose(false);
+ }
- protected virtual void DisposeManaged() { }
- protected virtual void DisposeUnmanaged() { }
+ protected virtual void DisposeManaged() { }
+ protected virtual void DisposeUnmanaged() { }
- public void TransferOwnership() => IsOwner = false;
+ public void TransferOwnership() => IsOwner = false;
- public void ThrowIfDisposed()
- {
- if (IsDisposed)
- throw new ObjectDisposedException(GetType().FullName);
- }
+ public void ThrowIfDisposed()
+ {
+ if (IsDisposed)
+ throw new ObjectDisposedException(GetType().FullName);
}
}
diff --git a/SeeShark/FFmpeg/FFmpegHelper.cs b/SeeShark/FFmpeg/FFmpegHelper.cs
index db3f137..bd98415 100644
--- a/SeeShark/FFmpeg/FFmpegHelper.cs
+++ b/SeeShark/FFmpeg/FFmpegHelper.cs
@@ -6,24 +6,23 @@
using System.Runtime.InteropServices;
using FFmpeg.AutoGen;
-namespace SeeShark.FFmpeg
+namespace SeeShark.FFmpeg;
+
+internal static class FFmpegHelper
{
- internal static class FFmpegHelper
+ public static unsafe string? AvStrerror(int error)
{
- public static unsafe string? AvStrerror(int error)
- {
- var bufferSize = 1024;
- var buffer = stackalloc byte[bufferSize];
- ffmpeg.av_strerror(error, buffer, (ulong)bufferSize);
- var message = Marshal.PtrToStringAnsi((IntPtr)buffer);
- return message;
- }
+ var bufferSize = 1024;
+ var buffer = stackalloc byte[bufferSize];
+ ffmpeg.av_strerror(error, buffer, (ulong)bufferSize);
+ var message = Marshal.PtrToStringAnsi((IntPtr)buffer);
+ return message;
+ }
- public static int ThrowExceptionIfError(this int error)
- {
- if (error < 0)
- throw new ApplicationException(AvStrerror(error));
- return error;
- }
+ public static int ThrowExceptionIfError(this int error)
+ {
+ if (error < 0)
+ throw new ApplicationException(AvStrerror(error));
+ return error;
}
}
diff --git a/SeeShark/FFmpeg/FFmpegLogLevel.cs b/SeeShark/FFmpeg/FFmpegLogLevel.cs
index 8f8bf84..b215365 100644
--- a/SeeShark/FFmpeg/FFmpegLogLevel.cs
+++ b/SeeShark/FFmpeg/FFmpegLogLevel.cs
@@ -4,24 +4,23 @@
using FFmpeg.AutoGen;
-namespace SeeShark.FFmpeg
+namespace SeeShark.FFmpeg;
+
+///
+/// Log level for FFmpeg.
+///
+public enum FFmpegLogLevel : int
{
- ///
- /// Log level for FFmpeg.
- ///
- public enum FFmpegLogLevel : int
- {
- Quiet = ffmpeg.AV_LOG_QUIET,
- Panic = ffmpeg.AV_LOG_PANIC,
- SkipRepeated = ffmpeg.AV_LOG_SKIP_REPEATED,
- PrintLevel = ffmpeg.AV_LOG_PRINT_LEVEL,
- Fatal = ffmpeg.AV_LOG_FATAL,
- Error = ffmpeg.AV_LOG_ERROR,
- Warning = ffmpeg.AV_LOG_WARNING,
- Info = ffmpeg.AV_LOG_INFO,
- Verbose = ffmpeg.AV_LOG_VERBOSE,
- Debug = ffmpeg.AV_LOG_DEBUG,
- Trace = ffmpeg.AV_LOG_TRACE,
- MaxOffset = ffmpeg.AV_LOG_MAX_OFFSET,
- }
+ Quiet = ffmpeg.AV_LOG_QUIET,
+ Panic = ffmpeg.AV_LOG_PANIC,
+ SkipRepeated = ffmpeg.AV_LOG_SKIP_REPEATED,
+ PrintLevel = ffmpeg.AV_LOG_PRINT_LEVEL,
+ Fatal = ffmpeg.AV_LOG_FATAL,
+ Error = ffmpeg.AV_LOG_ERROR,
+ Warning = ffmpeg.AV_LOG_WARNING,
+ Info = ffmpeg.AV_LOG_INFO,
+ Verbose = ffmpeg.AV_LOG_VERBOSE,
+ Debug = ffmpeg.AV_LOG_DEBUG,
+ Trace = ffmpeg.AV_LOG_TRACE,
+ MaxOffset = ffmpeg.AV_LOG_MAX_OFFSET,
}
diff --git a/SeeShark/FFmpeg/FFmpegManager.cs b/SeeShark/FFmpeg/FFmpegManager.cs
index a17c452..794392f 100644
--- a/SeeShark/FFmpeg/FFmpegManager.cs
+++ b/SeeShark/FFmpeg/FFmpegManager.cs
@@ -11,201 +11,200 @@
using LF = SeeShark.FFmpeg.LibraryFlags;
using LibraryLoader = FFmpeg.AutoGen.Native.LibraryLoader;
-namespace SeeShark.FFmpeg
+namespace SeeShark.FFmpeg;
+
+public static class FFmpegManager
{
- public static class FFmpegManager
- {
- ///
- /// Whether FFmpeg has been setup.
- ///
- public static bool IsFFmpegSetup { get; private set; } = false;
+ ///
+ /// Whether FFmpeg has been setup.
+ ///
+ public static bool IsFFmpegSetup { get; private set; } = false;
- ///
- /// Set that to true if you're struggling to setup FFmpeg properly.
- ///
- public static bool LogLibrarySearch { get; set; } = false;
+ ///
+ /// Set that to true if you're struggling to setup FFmpeg properly.
+ ///
+ public static bool LogLibrarySearch { get; set; } = false;
- private static void llsLog(string message)
+ private static void llsLog(string message)
+ {
+ if (LogLibrarySearch)
{
- if (LogLibrarySearch)
- {
- Console.ForegroundColor = ConsoleColor.DarkCyan;
- Console.Error.WriteLine(message);
- Console.ResetColor();
- }
+ Console.ForegroundColor = ConsoleColor.DarkCyan;
+ Console.Error.WriteLine(message);
+ Console.ResetColor();
}
+ }
- ///
- /// Informative version string. It is usually the actual release version number or a git commit description. It has no fixed format and can change any time. It should never be parsed by code.
- ///
- /// Note: fetching this value will setup FFmpeg if it hasn't been done before.
- ///
- public static string FFmpegVersion
+ ///
+ /// Informative version string. It is usually the actual release version number or a git commit description. It has no fixed format and can change any time. It should never be parsed by code.
+ ///
+ /// Note: fetching this value will setup FFmpeg if it hasn't been done before.
+ ///
+ public static string FFmpegVersion
+ {
+ get
{
- get
- {
- SetupFFmpeg();
- return ffmpeg.av_version_info();
- }
+ SetupFFmpeg();
+ return ffmpeg.av_version_info();
}
+ }
- ///
- /// Root path for loading FFmpeg libraries.
- ///
- public static string FFmpegRootPath
- {
- get => ffmpeg.RootPath;
- private set => ffmpeg.RootPath = value;
- }
+ ///
+ /// Root path for loading FFmpeg libraries.
+ ///
+ public static string FFmpegRootPath
+ {
+ get => ffmpeg.RootPath;
+ private set => ffmpeg.RootPath = value;
+ }
- private static av_log_set_callback_callback? logCallback;
-
- ///
- /// Setup FFmpeg: root path and logging.
- ///
- /// It will only setup FFmpeg once. Any non-first call will do nothing.
- ///
- /// SeeShark is designed such that this method is called whenever it is
- /// necessary to have FFmpeg ready.
- ///
- /// However, if you want to, you can still call it at the beginning of
- /// your program to customize your FFmpeg setup.
- ///
- /// Root path for loading FFmpeg libraries.
- /// Log level for FFmpeg.
- /// Color of the FFmpeg logs.
- public static void SetupFFmpeg(FFmpegLogLevel logLevel, ConsoleColor logColor, params string[] paths)
- {
- if (IsFFmpegSetup)
- return;
+ private static av_log_set_callback_callback? logCallback;
+
+ ///
+ /// Setup FFmpeg: root path and logging.
+ ///
+ /// It will only setup FFmpeg once. Any non-first call will do nothing.
+ ///
+ /// SeeShark is designed such that this method is called whenever it is
+ /// necessary to have FFmpeg ready.
+ ///
+ /// However, if you want to, you can still call it at the beginning of
+ /// your program to customize your FFmpeg setup.
+ ///
+ /// Root path for loading FFmpeg libraries.
+ /// Log level for FFmpeg.
+ /// Color of the FFmpeg logs.
+ public static void SetupFFmpeg(FFmpegLogLevel logLevel, ConsoleColor logColor, params string[] paths)
+ {
+ if (IsFFmpegSetup)
+ return;
- llsLog("Setting up FFmpeg\nRequired libraries:" +
- $"\n - avcodec (v{ffmpeg.LIBAVCODEC_VERSION_MAJOR})" +
- $"\n - avdevice (v{ffmpeg.LIBAVDEVICE_VERSION_MAJOR})" +
- $"\n - avformat (v{ffmpeg.LIBAVFORMAT_VERSION_MAJOR})" +
- $"\n - swscale (v{ffmpeg.LIBSWSCALE_VERSION_MAJOR})");
+ llsLog("Setting up FFmpeg\nRequired libraries:" +
+ $"\n - avcodec (v{ffmpeg.LIBAVCODEC_VERSION_MAJOR})" +
+ $"\n - avdevice (v{ffmpeg.LIBAVDEVICE_VERSION_MAJOR})" +
+ $"\n - avformat (v{ffmpeg.LIBAVFORMAT_VERSION_MAJOR})" +
+ $"\n - swscale (v{ffmpeg.LIBSWSCALE_VERSION_MAJOR})");
- var requiredLibs = LF.AVCodec | LF.AVDevice | LF.AVFormat | LF.SWScale;
+ var requiredLibs = LF.AVCodec | LF.AVDevice | LF.AVFormat | LF.SWScale;
- if (paths.Length == 0)
- TrySetRootPath(requiredLibs, AppDomain.CurrentDomain.BaseDirectory);
- else
- TrySetRootPath(requiredLibs, paths);
- SetupFFmpegLogging(logLevel, logColor);
- ffmpeg.avdevice_register_all();
+ if (paths.Length == 0)
+ TrySetRootPath(requiredLibs, AppDomain.CurrentDomain.BaseDirectory);
+ else
+ TrySetRootPath(requiredLibs, paths);
+ SetupFFmpegLogging(logLevel, logColor);
+ ffmpeg.avdevice_register_all();
- IsFFmpegSetup = true;
- }
+ IsFFmpegSetup = true;
+ }
- public static void SetupFFmpeg(params string[] paths) => SetupFFmpeg(FFmpegLogLevel.Panic, ConsoleColor.Yellow, paths);
+ public static void SetupFFmpeg(params string[] paths) => SetupFFmpeg(FFmpegLogLevel.Panic, ConsoleColor.Yellow, paths);
- internal static unsafe void SetupFFmpegLogging(FFmpegLogLevel logLevel, ConsoleColor logColor)
- {
- ffmpeg.av_log_set_level((int)logLevel);
+ internal static unsafe void SetupFFmpegLogging(FFmpegLogLevel logLevel, ConsoleColor logColor)
+ {
+ ffmpeg.av_log_set_level((int)logLevel);
- // Do not convert to local function!
- logCallback = (p0, level, format, vl) =>
- {
- if (level > ffmpeg.av_log_get_level())
- return;
+ // Do not convert to local function!
+ logCallback = (p0, level, format, vl) =>
+ {
+ if (level > ffmpeg.av_log_get_level())
+ return;
- var lineSize = 1024;
- var lineBuffer = stackalloc byte[lineSize];
- var printPrefix = 1;
+ var lineSize = 1024;
+ var lineBuffer = stackalloc byte[lineSize];
+ var printPrefix = 1;
- ffmpeg.av_log_format_line(p0, level, format, vl, lineBuffer, lineSize, &printPrefix);
- var line = Marshal.PtrToStringAnsi((IntPtr)lineBuffer);
+ ffmpeg.av_log_format_line(p0, level, format, vl, lineBuffer, lineSize, &printPrefix);
+ var line = Marshal.PtrToStringAnsi((IntPtr)lineBuffer);
- // TODO: maybe make it possible to log this in any stream?
- Console.ForegroundColor = logColor;
- Console.Write(line);
- Console.ResetColor();
- };
+ // TODO: maybe make it possible to log this in any stream?
+ Console.ForegroundColor = logColor;
+ Console.Write(line);
+ Console.ResetColor();
+ };
- ffmpeg.av_log_set_callback(logCallback);
- }
+ ffmpeg.av_log_set_callback(logCallback);
+ }
- ///
- /// Tries to set the RootPath to the first path in which it can find all the native libraries.
- /// Ideally, you would want to only call this function once, before doing anything with FFmpeg.
- ///
- ///
- /// This function will not load the native libraries but merely check if they exist.
- ///
- /// Every path to try out. It will set the RootPath to the first one that works.
- public static void TrySetRootPath(params string[] paths) => TrySetRootPath(paths);
-
- ///
- /// Tries to set the RootPath to the first path in which it can find all the required native libraries.
- /// Ideally, you would want to only call this function once, before doing anything with FFmpeg.
- ///
- ///
- /// This function will not load the native libraries but merely check if they exist.
- ///
- /// The required libraries. If you don't need all of them, you can specify them here.
- /// Every path to try out. It will set the RootPath to the first one that works.
- public static void TrySetRootPath(LF requiredLibraries, params string[] paths)
+ ///
+ /// Tries to set the RootPath to the first path in which it can find all the native libraries.
+ /// Ideally, you would want to only call this function once, before doing anything with FFmpeg.
+ ///
+ ///
+ /// This function will not load the native libraries but merely check if they exist.
+ ///
+ /// Every path to try out. It will set the RootPath to the first one that works.
+ public static void TrySetRootPath(params string[] paths) => TrySetRootPath(paths);
+
+ ///
+ /// Tries to set the RootPath to the first path in which it can find all the required native libraries.
+ /// Ideally, you would want to only call this function once, before doing anything with FFmpeg.
+ ///
+ ///
+ /// This function will not load the native libraries but merely check if they exist.
+ ///
+ /// The required libraries. If you don't need all of them, you can specify them here.
+ /// Every path to try out. It will set the RootPath to the first one that works.
+ public static void TrySetRootPath(LF requiredLibraries, params string[] paths)
+ {
+ try
{
- try
- {
- ffmpeg.RootPath = paths.First((path) => CanLoadLibraries(requiredLibraries, path));
- }
- catch (InvalidOperationException)
- {
- string pathList = "\n - " + string.Join("\n - ", paths);
- throw new InvalidOperationException(
- $"Couldn't find native libraries in the following paths:{pathList}" +
- "\nMake sure you installed the correct versions of the native libraries.");
- }
+ ffmpeg.RootPath = paths.First((path) => CanLoadLibraries(requiredLibraries, path));
}
-
- ///
- /// Tries to load the native libraries from the set root path.
- /// You can specify which libraries need to be loaded with LibraryFlags.
- /// It will try to load all librares by default.
- /// Ideally, you would want to only call this function once, before doing anything with FFmpeg.
- /// If you try to do that later, it might unload all of your already loaded libraries and fail to provide them again.
- ///
- /// Whether it succeeded in loading all the requested libraries.
- public static bool CanLoadLibraries(LF libraries = LF.All, string path = "")
+ catch (InvalidOperationException)
{
- var validated = new List();
- llsLog($"Searching for libraries in {path}");
- return libraries.ToStrings().All((lib) => canLoadLibrary(lib, path, validated));
+ string pathList = "\n - " + string.Join("\n - ", paths);
+ throw new InvalidOperationException(
+ $"Couldn't find native libraries in the following paths:{pathList}" +
+ "\nMake sure you installed the correct versions of the native libraries.");
}
+ }
- ///
- /// Note: dependencies are not checked as they are optional in FFmpeg.AutoGen.
- /// See the following commit
- /// and this function
- ///
- private static bool canLoadLibrary(string lib, string path, List validated)
- {
- if (validated.Contains(lib))
- return true;
-
- int version = ffmpeg.LibraryVersionMap[lib];
- if (!canLoadNativeLibrary(path, lib, version))
- return false;
+ ///
+ /// Tries to load the native libraries from the set root path.
+ /// You can specify which libraries need to be loaded with LibraryFlags.
+ /// It will try to load all librares by default.
+ /// Ideally, you would want to only call this function once, before doing anything with FFmpeg.
+ /// If you try to do that later, it might unload all of your already loaded libraries and fail to provide them again.
+ ///
+ /// Whether it succeeded in loading all the requested libraries.
+ public static bool CanLoadLibraries(LF libraries = LF.All, string path = "")
+ {
+ var validated = new List();
+ llsLog($"Searching for libraries in {path}");
+ return libraries.ToStrings().All((lib) => canLoadLibrary(lib, path, validated));
+ }
- validated.Add(lib);
+ ///
+ /// Note: dependencies are not checked as they are optional in FFmpeg.AutoGen.
+ /// See the following commit
+ /// and this function
+ ///
+ private static bool canLoadLibrary(string lib, string path, List validated)
+ {
+ if (validated.Contains(lib))
return true;
- }
- ///
- /// Checks if it can load a native library using platform naming conventions.
- ///
- /// Path of the library.
- /// Name of the library.
- /// Version of the library.
- /// Whether it found the native library in the path.
- private static bool canLoadNativeLibrary(string path, string libraryName, int version)
- {
- string nativeLibraryName = LibraryLoader.GetNativeLibraryName(libraryName, version);
- string fullName = Path.Combine(path, nativeLibraryName);
- bool exists = File.Exists(fullName);
- llsLog($" {(exists ? "Found" : "Couldn't find")} library {nativeLibraryName}");
- return exists;
- }
+ int version = ffmpeg.LibraryVersionMap[lib];
+ if (!canLoadNativeLibrary(path, lib, version))
+ return false;
+
+ validated.Add(lib);
+ return true;
+ }
+
+ ///
+ /// Checks if it can load a native library using platform naming conventions.
+ ///
+ /// Path of the library.
+ /// Name of the library.
+ /// Version of the library.
+ /// Whether it found the native library in the path.
+ private static bool canLoadNativeLibrary(string path, string libraryName, int version)
+ {
+ string nativeLibraryName = LibraryLoader.GetNativeLibraryName(libraryName, version);
+ string fullName = Path.Combine(path, nativeLibraryName);
+ bool exists = File.Exists(fullName);
+ llsLog($" {(exists ? "Found" : "Couldn't find")} library {nativeLibraryName}");
+ return exists;
}
}
diff --git a/SeeShark/FFmpeg/LibraryFlags.cs b/SeeShark/FFmpeg/LibraryFlags.cs
index 5a871a7..33039f3 100644
--- a/SeeShark/FFmpeg/LibraryFlags.cs
+++ b/SeeShark/FFmpeg/LibraryFlags.cs
@@ -5,65 +5,64 @@
using System;
using System.Collections.Generic;
-namespace SeeShark.FFmpeg
+namespace SeeShark.FFmpeg;
+
+[Flags]
+public enum LibraryFlags : byte
{
- [Flags]
- public enum LibraryFlags : byte
- {
- /// No library.
- None = 0,
+ /// No library.
+ None = 0,
- /// avcodec library.
- AVCodec = 1 << 0,
- /// avdevice library.
- AVDevice = 1 << 1,
- /// avfilter library.
- AVFilter = 1 << 2,
- /// avformat library.
- AVFormat = 1 << 3,
- /// avutil library.
- AVUtil = 1 << 4,
- /// postproc library.
- PostProc = 1 << 5,
- /// swresample library.
- SWResample = 1 << 6,
- /// swscale library.
- SWScale = 1 << 7,
+ /// avcodec library.
+ AVCodec = 1 << 0,
+ /// avdevice library.
+ AVDevice = 1 << 1,
+ /// avfilter library.
+ AVFilter = 1 << 2,
+ /// avformat library.
+ AVFormat = 1 << 3,
+ /// avutil library.
+ AVUtil = 1 << 4,
+ /// postproc library.
+ PostProc = 1 << 5,
+ /// swresample library.
+ SWResample = 1 << 6,
+ /// swscale library.
+ SWScale = 1 << 7,
- /// All libav libraries: avcodec, avdevice, avfilter, avformat and avutil.
- AVAll = AVCodec | AVDevice | AVFilter | AVFormat | AVUtil,
- /// All libsw libraries: swresample and swscale.
- SWAll = SWResample | SWScale,
- /// All libraries: all libav libraries, all libsw libraries, and postproc.
- All = AVCodec | AVDevice | AVFilter | AVFormat | AVUtil | PostProc | SWResample | SWScale,
- }
+ /// All libav libraries: avcodec, avdevice, avfilter, avformat and avutil.
+ AVAll = AVCodec | AVDevice | AVFilter | AVFormat | AVUtil,
+ /// All libsw libraries: swresample and swscale.
+ SWAll = SWResample | SWScale,
+ /// All libraries: all libav libraries, all libsw libraries, and postproc.
+ All = AVCodec | AVDevice | AVFilter | AVFormat | AVUtil | PostProc | SWResample | SWScale,
+}
- public static class LibraryFlagsExtension
+public static class LibraryFlagsExtension
+{
+ ///
+ /// Gets the names of all the flagged libraries.
+ ///
+ /// A list of the names of all the flagged libraries.
+ public static List ToStrings(this LibraryFlags libraryFlags)
{
- ///
- /// Gets the names of all the flagged libraries.
- ///
- /// A list of the names of all the flagged libraries.
- public static List ToStrings(this LibraryFlags libraryFlags)
- {
- var strings = new List();
- if (libraryFlags.HasFlag(LibraryFlags.AVCodec))
- strings.Add("avcodec");
- if (libraryFlags.HasFlag(LibraryFlags.AVDevice))
- strings.Add("avdevice");
- if (libraryFlags.HasFlag(LibraryFlags.AVFilter))
- strings.Add("avfilter");
- if (libraryFlags.HasFlag(LibraryFlags.AVFormat))
- strings.Add("avformat");
- if (libraryFlags.HasFlag(LibraryFlags.AVUtil))
- strings.Add("avutil");
- if (libraryFlags.HasFlag(LibraryFlags.PostProc))
- strings.Add("postproc");
- if (libraryFlags.HasFlag(LibraryFlags.SWResample))
- strings.Add("swresample");
- if (libraryFlags.HasFlag(LibraryFlags.SWScale))
- strings.Add("swscale");
- return strings;
- }
+ var strings = new List();
+ if (libraryFlags.HasFlag(LibraryFlags.AVCodec))
+ strings.Add("avcodec");
+ if (libraryFlags.HasFlag(LibraryFlags.AVDevice))
+ strings.Add("avdevice");
+ if (libraryFlags.HasFlag(LibraryFlags.AVFilter))
+ strings.Add("avfilter");
+ if (libraryFlags.HasFlag(LibraryFlags.AVFormat))
+ strings.Add("avformat");
+ if (libraryFlags.HasFlag(LibraryFlags.AVUtil))
+ strings.Add("avutil");
+ if (libraryFlags.HasFlag(LibraryFlags.PostProc))
+ strings.Add("postproc");
+ if (libraryFlags.HasFlag(LibraryFlags.SWResample))
+ strings.Add("swresample");
+ if (libraryFlags.HasFlag(LibraryFlags.SWScale))
+ strings.Add("swscale");
+ return strings;
}
}
diff --git a/SeeShark/Frame.cs b/SeeShark/Frame.cs
index 3145c48..1f3b746 100644
--- a/SeeShark/Frame.cs
+++ b/SeeShark/Frame.cs
@@ -5,85 +5,84 @@
using System;
using FFmpeg.AutoGen;
-namespace SeeShark
+namespace SeeShark;
+
+public sealed unsafe class Frame : Disposable
{
- public sealed unsafe class Frame : Disposable
- {
- internal readonly AVFrame* AVFrame;
+ internal readonly AVFrame* AVFrame;
- ///
- /// Width of the frame in pixels.
- ///
- public int Width => AVFrame->width;
- ///
- /// Height of the frame in pixels.
- ///
- public int Height => AVFrame->height;
- ///
- /// Line size of the frame in pixels - is equal to its width multiplied by its pixel stride,
- /// that which is determined by its .
- ///
- public int WidthStep => AVFrame->linesize[0];
- ///
- /// Pixel format of the frame.
- ///
- ///
- public PixelFormat PixelFormat => (PixelFormat)AVFrame->format;
- ///
- /// Raw data of the frame in bytes.
- ///
- public ReadOnlySpan RawData => new ReadOnlySpan(AVFrame->data[0], WidthStep * Height);
+ ///
+ /// Width of the frame in pixels.
+ ///
+ public int Width => AVFrame->width;
+ ///
+ /// Height of the frame in pixels.
+ ///
+ public int Height => AVFrame->height;
+ ///
+ /// Line size of the frame in pixels - is equal to its width multiplied by its pixel stride,
+ /// that which is determined by its .
+ ///
+ public int WidthStep => AVFrame->linesize[0];
+ ///
+ /// Pixel format of the frame.
+ ///
+ ///
+ public PixelFormat PixelFormat => (PixelFormat)AVFrame->format;
+ ///
+ /// Raw data of the frame in bytes.
+ ///
+ public ReadOnlySpan RawData => new ReadOnlySpan(AVFrame->data[0], WidthStep * Height);
- // This constructor is internal because the user of the library
- // is not supposed to deal with any actual FFmpeg type.
- internal Frame()
- {
- AVFrame = ffmpeg.av_frame_alloc();
- }
+ // This constructor is internal because the user of the library
+ // is not supposed to deal with any actual FFmpeg type.
+ internal Frame()
+ {
+ AVFrame = ffmpeg.av_frame_alloc();
+ }
- internal Frame(AVFrame* avFrame)
- {
- AVFrame = avFrame;
- }
+ internal Frame(AVFrame* avFrame)
+ {
+ AVFrame = avFrame;
+ }
- ///
- /// Copies data to a hardware accelerated frame.
- ///
- /// The frame it copies data to must have an AVHWFramesContext attached.
- /// Hardware accelerated frame.
- /// 0 on success, a negative AVERROR error code on failure.
- internal int HardwareAccelCopyTo(Frame hwFrame) => ffmpeg.av_hwframe_transfer_data(hwFrame.AVFrame, AVFrame, 0);
+ ///
+ /// Copies data to a hardware accelerated frame.
+ ///
+ /// The frame it copies data to must have an AVHWFramesContext attached.
+ /// Hardware accelerated frame.
+ /// 0 on success, a negative AVERROR error code on failure.
+ internal int HardwareAccelCopyTo(Frame hwFrame) => ffmpeg.av_hwframe_transfer_data(hwFrame.AVFrame, AVFrame, 0);
- ///
- /// Unreference all the buffers referenced by frame and reset the frame fields.
- ///
- internal void Unref() => ffmpeg.av_frame_unref(AVFrame);
+ ///
+ /// Unreference all the buffers referenced by frame and reset the frame fields.
+ ///
+ internal void Unref() => ffmpeg.av_frame_unref(AVFrame);
- ///
- /// Return decoded output data from a decoder.
- ///
- ///
- /// 0: success, a frame was returned
- /// AVERROR(EAGAIN): output is not available in this state - user must try to send new input
- /// AVERROR_EOF: the decoder has been fully flushed, and there will be no more output frames
- /// AVERROR(EINVAL): codec not opened, or it is an encoder
- /// AVERROR_INPUT_CHANGED: current decoded frame has changed parameters with respect to first decoded frame.
- /// Applicable when flag AV_CODEC_FLAG_DROPCHANGED is set.
- /// Other negative values: legitimate decoding errors
- ///
- internal int Receive(AVCodecContext* codecContext) => ffmpeg.avcodec_receive_frame(codecContext, AVFrame);
+ ///
+ /// Return decoded output data from a decoder.
+ ///
+ ///
+ /// 0: success, a frame was returned
+ /// AVERROR(EAGAIN): output is not available in this state - user must try to send new input
+ /// AVERROR_EOF: the decoder has been fully flushed, and there will be no more output frames
+ /// AVERROR(EINVAL): codec not opened, or it is an encoder
+ /// AVERROR_INPUT_CHANGED: current decoded frame has changed parameters with respect to first decoded frame.
+ /// Applicable when flag AV_CODEC_FLAG_DROPCHANGED is set.
+ /// Other negative values: legitimate decoding errors
+ ///
+ internal int Receive(AVCodecContext* codecContext) => ffmpeg.avcodec_receive_frame(codecContext, AVFrame);
- protected override void DisposeManaged()
- {
- }
+ protected override void DisposeManaged()
+ {
+ }
- protected override void DisposeUnmanaged()
+ protected override void DisposeUnmanaged()
+ {
+ if (AVFrame != null)
{
- if (AVFrame != null)
- {
- AVFrame* frame = AVFrame;
- ffmpeg.av_frame_free(&frame);
- }
+ AVFrame* frame = AVFrame;
+ ffmpeg.av_frame_free(&frame);
}
}
}
diff --git a/SeeShark/FrameConverter.cs b/SeeShark/FrameConverter.cs
index 5508b6c..921a33c 100644
--- a/SeeShark/FrameConverter.cs
+++ b/SeeShark/FrameConverter.cs
@@ -6,121 +6,120 @@
using System.Runtime.InteropServices;
using FFmpeg.AutoGen;
-namespace SeeShark
+namespace SeeShark;
+
+///
+/// Converts a frame into another pixel format and/or resizes it.
+///
+public sealed unsafe class FrameConverter : Disposable
{
- ///
- /// Converts a frame into another pixel format and/or resizes it.
- ///
- public sealed unsafe class FrameConverter : Disposable
+ private readonly IntPtr convertedFrameBufferPtr;
+ private readonly SwsContext* convertContext;
+
+ public readonly Frame DstFrame;
+ public readonly int SrcWidth;
+ public readonly int SrcHeight;
+ public readonly int DstWidth;
+ public readonly int DstHeight;
+ public readonly PixelFormat SrcPixelFormat;
+ public readonly PixelFormat DstPixelFormat;
+
+ public FrameConverter(Frame frame, PixelFormat pixelFormat)
+ : this(frame, frame.Width, frame.Height, pixelFormat)
{
- private readonly IntPtr convertedFrameBufferPtr;
- private readonly SwsContext* convertContext;
-
- public readonly Frame DstFrame;
- public readonly int SrcWidth;
- public readonly int SrcHeight;
- public readonly int DstWidth;
- public readonly int DstHeight;
- public readonly PixelFormat SrcPixelFormat;
- public readonly PixelFormat DstPixelFormat;
-
- public FrameConverter(Frame frame, PixelFormat pixelFormat)
- : this(frame, frame.Width, frame.Height, pixelFormat)
- {
- }
-
- public FrameConverter(int width, int height,
- PixelFormat srcPixelFormat, PixelFormat dstPixelFormat)
- : this(width, height, srcPixelFormat, width, height, dstPixelFormat)
- {
- }
-
- public FrameConverter(Frame frame, int width, int height)
- : this(frame, width, height, frame.PixelFormat)
- {
- }
-
- public FrameConverter(int srcWidth, int srcHeight,
- PixelFormat srcPixelFormat, int dstWidth, int dstHeight)
- : this(srcWidth, srcHeight, srcPixelFormat, dstWidth, dstHeight, srcPixelFormat)
- {
- }
-
- public FrameConverter(Frame frame, int width, int height, PixelFormat pixelFormat)
- : this(frame.Width, frame.Height, frame.PixelFormat, width, height, pixelFormat)
- {
- }
-
- public FrameConverter(
- int srcWidth, int srcHeight, PixelFormat srcPixelFormat,
- int dstWidth, int dstHeight, PixelFormat dstPixelFormat)
- {
- if (srcWidth == 0 || srcHeight == 0 || dstWidth == 0 || dstHeight == 0)
- throw new ArgumentException("Source/Destination's Width/Height cannot be zero");
-
- SrcWidth = srcWidth;
- SrcHeight = srcHeight;
- DstWidth = dstWidth;
- DstHeight = dstHeight;
- SrcPixelFormat = srcPixelFormat;
- DstPixelFormat = dstPixelFormat;
-
- var srcPF = (AVPixelFormat)srcPixelFormat.RecycleDeprecated();
- var dstPF = (AVPixelFormat)dstPixelFormat.RecycleDeprecated();
-
- convertContext = ffmpeg.sws_getContext(
- srcWidth, srcHeight, srcPF,
- dstWidth, dstHeight, dstPF,
- ffmpeg.SWS_FAST_BILINEAR,
- null, null, null);
-
- if (convertContext == null)
- throw new ApplicationException("Could not initialize the conversion context.");
-
- var convertedFrameBufferSize = ffmpeg.av_image_get_buffer_size(dstPF, dstWidth, dstHeight, 1);
- convertedFrameBufferPtr = Marshal.AllocHGlobal(convertedFrameBufferSize);
-
- var dstData = new byte_ptrArray4();
- var dstLinesize = new int_array4();
- ffmpeg.av_image_fill_arrays(ref dstData, ref dstLinesize,
- (byte*)convertedFrameBufferPtr, dstPF, dstWidth, dstHeight, 1);
-
- var dstFrame = ffmpeg.av_frame_alloc();
- dstFrame->width = DstWidth;
- dstFrame->height = DstHeight;
- dstFrame->data.UpdateFrom(dstData);
- dstFrame->linesize.UpdateFrom(dstLinesize);
- dstFrame->format = (int)dstPF;
- DstFrame = new Frame(dstFrame);
- }
-
- public Frame Convert(Frame srcFrame)
- {
- var srcAVFrame = srcFrame.AVFrame;
- var dstAVFrame = DstFrame.AVFrame;
- ffmpeg.sws_scale(convertContext,
- srcAVFrame->data, srcAVFrame->linesize, 0, srcAVFrame->height,
- dstAVFrame->data, dstAVFrame->linesize);
-
- return DstFrame;
- }
-
- protected override void DisposeManaged()
- {
- DstFrame.Dispose();
- }
-
- protected override void DisposeUnmanaged()
- {
- // Constructor initialization can fail at some points,
- // so we need to null check everything.
- // See https://github.com/vignetteapp/SeeShark/issues/27
-
- if (convertedFrameBufferPtr != IntPtr.Zero)
- Marshal.FreeHGlobal(convertedFrameBufferPtr);
-
- if (convertContext != null)
- ffmpeg.sws_freeContext(convertContext);
- }
+ }
+
+ public FrameConverter(int width, int height,
+ PixelFormat srcPixelFormat, PixelFormat dstPixelFormat)
+ : this(width, height, srcPixelFormat, width, height, dstPixelFormat)
+ {
+ }
+
+ public FrameConverter(Frame frame, int width, int height)
+ : this(frame, width, height, frame.PixelFormat)
+ {
+ }
+
+ public FrameConverter(int srcWidth, int srcHeight,
+ PixelFormat srcPixelFormat, int dstWidth, int dstHeight)
+ : this(srcWidth, srcHeight, srcPixelFormat, dstWidth, dstHeight, srcPixelFormat)
+ {
+ }
+
+ public FrameConverter(Frame frame, int width, int height, PixelFormat pixelFormat)
+ : this(frame.Width, frame.Height, frame.PixelFormat, width, height, pixelFormat)
+ {
+ }
+
+ public FrameConverter(
+ int srcWidth, int srcHeight, PixelFormat srcPixelFormat,
+ int dstWidth, int dstHeight, PixelFormat dstPixelFormat)
+ {
+ if (srcWidth == 0 || srcHeight == 0 || dstWidth == 0 || dstHeight == 0)
+ throw new ArgumentException("Source/Destination's Width/Height cannot be zero");
+
+ SrcWidth = srcWidth;
+ SrcHeight = srcHeight;
+ DstWidth = dstWidth;
+ DstHeight = dstHeight;
+ SrcPixelFormat = srcPixelFormat;
+ DstPixelFormat = dstPixelFormat;
+
+ var srcPF = (AVPixelFormat)srcPixelFormat.RecycleDeprecated();
+ var dstPF = (AVPixelFormat)dstPixelFormat.RecycleDeprecated();
+
+ convertContext = ffmpeg.sws_getContext(
+ srcWidth, srcHeight, srcPF,
+ dstWidth, dstHeight, dstPF,
+ ffmpeg.SWS_FAST_BILINEAR,
+ null, null, null);
+
+ if (convertContext == null)
+ throw new ApplicationException("Could not initialize the conversion context.");
+
+ var convertedFrameBufferSize = ffmpeg.av_image_get_buffer_size(dstPF, dstWidth, dstHeight, 1);
+ convertedFrameBufferPtr = Marshal.AllocHGlobal(convertedFrameBufferSize);
+
+ var dstData = new byte_ptrArray4();
+ var dstLinesize = new int_array4();
+ ffmpeg.av_image_fill_arrays(ref dstData, ref dstLinesize,
+ (byte*)convertedFrameBufferPtr, dstPF, dstWidth, dstHeight, 1);
+
+ var dstFrame = ffmpeg.av_frame_alloc();
+ dstFrame->width = DstWidth;
+ dstFrame->height = DstHeight;
+ dstFrame->data.UpdateFrom(dstData);
+ dstFrame->linesize.UpdateFrom(dstLinesize);
+ dstFrame->format = (int)dstPF;
+ DstFrame = new Frame(dstFrame);
+ }
+
+ public Frame Convert(Frame srcFrame)
+ {
+ var srcAVFrame = srcFrame.AVFrame;
+ var dstAVFrame = DstFrame.AVFrame;
+ ffmpeg.sws_scale(convertContext,
+ srcAVFrame->data, srcAVFrame->linesize, 0, srcAVFrame->height,
+ dstAVFrame->data, dstAVFrame->linesize);
+
+ return DstFrame;
+ }
+
+ protected override void DisposeManaged()
+ {
+ DstFrame.Dispose();
+ }
+
+ protected override void DisposeUnmanaged()
+ {
+ // Constructor initialization can fail at some points,
+ // so we need to null check everything.
+ // See https://github.com/vignetteapp/SeeShark/issues/27
+
+ if (convertedFrameBufferPtr != IntPtr.Zero)
+ Marshal.FreeHGlobal(convertedFrameBufferPtr);
+
+ if (convertContext != null)
+ ffmpeg.sws_freeContext(convertContext);
}
}
diff --git a/SeeShark/FrameEventArgs.cs b/SeeShark/FrameEventArgs.cs
index 15fc968..dcff0da 100644
--- a/SeeShark/FrameEventArgs.cs
+++ b/SeeShark/FrameEventArgs.cs
@@ -5,26 +5,25 @@
using System;
using SeeShark.Decode;
-namespace SeeShark
+namespace SeeShark;
+
+///
+/// Contains event data for a camera frame - in other words, just a .
+///
+public class FrameEventArgs : EventArgs
{
///
- /// Contains event data for a camera frame - in other words, just a .
+ /// The frame sent from the camera.
///
- public class FrameEventArgs : EventArgs
- {
- ///
- /// The frame sent from the camera.
- ///
- public Frame Frame { get; private set; }
- ///
- /// The decode status when sending that frame.
- ///
- public DecodeStatus Status { get; private set; }
+ public Frame Frame { get; private set; }
+ ///
+ /// The decode status when sending that frame.
+ ///
+ public DecodeStatus Status { get; private set; }
- public FrameEventArgs(Frame frame, DecodeStatus status = DecodeStatus.NewFrame)
- {
- Frame = frame;
- Status = status;
- }
+ public FrameEventArgs(Frame frame, DecodeStatus status = DecodeStatus.NewFrame)
+ {
+ Frame = frame;
+ Status = status;
}
}
diff --git a/SeeShark/Interop/Libc/FileOpenFlags.cs b/SeeShark/Interop/Libc/FileOpenFlags.cs
index 274c214..c85b1c4 100644
--- a/SeeShark/Interop/Libc/FileOpenFlags.cs
+++ b/SeeShark/Interop/Libc/FileOpenFlags.cs
@@ -2,13 +2,12 @@
// This file is part of SeeShark.
// SeeShark is licensed under the BSD 3-Clause License. See LICENSE for details.
-namespace SeeShark.Interop.Libc
+namespace SeeShark.Interop.Libc;
+
+internal enum FileOpenFlags
{
- internal enum FileOpenFlags
- {
- O_RDONLY = 0x00,
- O_RDWR = 0x02,
- O_NONBLOCK = 0x800,
- O_SYNC = 0x101000
- }
+ O_RDONLY = 0x00,
+ O_RDWR = 0x02,
+ O_NONBLOCK = 0x800,
+ O_SYNC = 0x101000
}
diff --git a/SeeShark/Interop/Libc/Ioctl.cs b/SeeShark/Interop/Libc/Ioctl.cs
index 24ead31..0b4ed5f 100644
--- a/SeeShark/Interop/Libc/Ioctl.cs
+++ b/SeeShark/Interop/Libc/Ioctl.cs
@@ -5,36 +5,35 @@
using System;
using System.Runtime.InteropServices;
-namespace SeeShark.Interop.Libc
+namespace SeeShark.Interop.Libc;
+
+internal partial class Ioctl
{
- internal partial class Ioctl
- {
- const int ioc_nrbits = 8;
- const int ioc_typebits = 8;
- const int ioc_sizebits = 14;
- // const int ioc_dirbits = 2;
+ const int ioc_nrbits = 8;
+ const int ioc_typebits = 8;
+ const int ioc_sizebits = 14;
+ // const int ioc_dirbits = 2;
- // const int ioc_nrmask = (1 << ioc_nrbits) - 1;
- // const int ioc_typemask = (1 << ioc_typebits) - 1;
- // const int ioc_sizemask = (1 << ioc_sizebits) - 1;
- // const int ioc_dirmask = (1 << ioc_dirbits) - 1;
+ // const int ioc_nrmask = (1 << ioc_nrbits) - 1;
+ // const int ioc_typemask = (1 << ioc_typebits) - 1;
+ // const int ioc_sizemask = (1 << ioc_sizebits) - 1;
+ // const int ioc_dirmask = (1 << ioc_dirbits) - 1;
- const int ioc_nrshift = 0;
- const int ioc_typeshift = ioc_nrshift + ioc_nrbits;
- const int ioc_sizeshift = ioc_typeshift + ioc_typebits;
- const int ioc_dirshift = ioc_sizeshift + ioc_sizebits;
+ const int ioc_nrshift = 0;
+ const int ioc_typeshift = ioc_nrshift + ioc_nrbits;
+ const int ioc_sizeshift = ioc_typeshift + ioc_typebits;
+ const int ioc_dirshift = ioc_sizeshift + ioc_sizebits;
- const int ioc_none = 0;
- const int ioc_write = 1;
- const int ioc_read = 2;
+ const int ioc_none = 0;
+ const int ioc_write = 1;
+ const int ioc_read = 2;
- internal static int IOC(int dir, int type, int nr, int size)
- => dir << ioc_dirshift | type << ioc_typeshift | nr << ioc_nrshift | size << ioc_sizeshift;
+ internal static int IOC(int dir, int type, int nr, int size)
+ => dir << ioc_dirshift | type << ioc_typeshift | nr << ioc_nrshift | size << ioc_sizeshift;
- internal static int IO(int type, int nr) => IOC(ioc_none, type, nr, 0);
- internal static int IOR(int type, int nr, Type size) => IOC(ioc_read, type, nr, IOC_TYPECHECK(size));
- internal static int IOW(int type, int nr, Type size) => IOC(ioc_write, type, nr, IOC_TYPECHECK(size));
- internal static int IOWR(int type, int nr, Type size) => IOC(ioc_read | ioc_write, type, nr, IOC_TYPECHECK(size));
- internal static int IOC_TYPECHECK(Type t) => Marshal.SizeOf(t);
- }
+ internal static int IO(int type, int nr) => IOC(ioc_none, type, nr, 0);
+ internal static int IOR(int type, int nr, Type size) => IOC(ioc_read, type, nr, IOC_TYPECHECK(size));
+ internal static int IOW(int type, int nr, Type size) => IOC(ioc_write, type, nr, IOC_TYPECHECK(size));
+ internal static int IOWR(int type, int nr, Type size) => IOC(ioc_read | ioc_write, type, nr, IOC_TYPECHECK(size));
+ internal static int IOC_TYPECHECK(Type t) => Marshal.SizeOf(t);
}
diff --git a/SeeShark/Interop/Libc/Libc.cs b/SeeShark/Interop/Libc/Libc.cs
index eb43c59..d45e645 100644
--- a/SeeShark/Interop/Libc/Libc.cs
+++ b/SeeShark/Interop/Libc/Libc.cs
@@ -5,40 +5,39 @@
using System;
using System.Runtime.InteropServices;
-namespace SeeShark.Interop.Libc
+namespace SeeShark.Interop.Libc;
+
+internal class Libc
{
- internal class Libc
- {
- private const string libc_library = "libc";
- private const string explain_library = "explain";
+ private const string libc_library = "libc";
+ private const string explain_library = "explain";
- [DllImport(libc_library, SetLastError = true)]
- internal static extern int open([MarshalAs(UnmanagedType.LPStr)] string pathname, FileOpenFlags flags);
+ [DllImport(libc_library, SetLastError = true)]
+ internal static extern int open([MarshalAs(UnmanagedType.LPStr)] string pathname, FileOpenFlags flags);
- [DllImport(libc_library)]
- internal static extern int close(int fd);
+ [DllImport(libc_library)]
+ internal static extern int close(int fd);
- [DllImport(libc_library, SetLastError = true)]
- internal static extern int read(int fd, IntPtr buf, int count);
+ [DllImport(libc_library, SetLastError = true)]
+ internal static extern int read(int fd, IntPtr buf, int count);
- [DllImport(libc_library, SetLastError = true)]
- internal static extern int write(int fd, IntPtr buf, int count);
+ [DllImport(libc_library, SetLastError = true)]
+ internal static extern int write(int fd, IntPtr buf, int count);
- #region ioctl
- [DllImport(libc_library, SetLastError = true)]
- internal static extern int ioctl(int fd, int request, IntPtr argp);
+ #region ioctl
+ [DllImport(libc_library, SetLastError = true)]
+ internal static extern int ioctl(int fd, int request, IntPtr argp);
- [DllImport(explain_library, SetLastError = true)]
- internal static extern unsafe sbyte* explain_ioctl(int fd, int request, IntPtr argp);
+ [DllImport(explain_library, SetLastError = true)]
+ internal static extern unsafe sbyte* explain_ioctl(int fd, int request, IntPtr argp);
- [DllImport(explain_library, SetLastError = true)]
- internal static extern unsafe sbyte* explain_errno_ioctl(int errno, int fd, int request, IntPtr argp);
- #endregion
+ [DllImport(explain_library, SetLastError = true)]
+ internal static extern unsafe sbyte* explain_errno_ioctl(int errno, int fd, int request, IntPtr argp);
+ #endregion
- [DllImport(libc_library, SetLastError = true)]
- internal static extern IntPtr mmap(IntPtr addr, int length, MemoryMappedProtections prot, MemoryMappedFlags flags, int fd, int offset);
+ [DllImport(libc_library, SetLastError = true)]
+ internal static extern IntPtr mmap(IntPtr addr, int length, MemoryMappedProtections prot, MemoryMappedFlags flags, int fd, int offset);
- [DllImport(libc_library)]
- internal static extern int munmap(IntPtr addr, int length);
- }
+ [DllImport(libc_library)]
+ internal static extern int munmap(IntPtr addr, int length);
}
diff --git a/SeeShark/Interop/Libc/MemoryMappedFlags.cs b/SeeShark/Interop/Libc/MemoryMappedFlags.cs
index 09523de..dca45e4 100644
--- a/SeeShark/Interop/Libc/MemoryMappedFlags.cs
+++ b/SeeShark/Interop/Libc/MemoryMappedFlags.cs
@@ -4,13 +4,12 @@
using System;
-namespace SeeShark.Interop.Libc
+namespace SeeShark.Interop.Libc;
+
+[Flags]
+internal enum MemoryMappedFlags
{
- [Flags]
- internal enum MemoryMappedFlags
- {
- MAP_SHARED = 0x01,
- MAP_PRIVATE = 0x02,
- MAP_FIXED = 0x10
- }
+ MAP_SHARED = 0x01,
+ MAP_PRIVATE = 0x02,
+ MAP_FIXED = 0x10
}
diff --git a/SeeShark/Interop/Libc/MemoryMappedProtections.cs b/SeeShark/Interop/Libc/MemoryMappedProtections.cs
index 3924447..900fc40 100644
--- a/SeeShark/Interop/Libc/MemoryMappedProtections.cs
+++ b/SeeShark/Interop/Libc/MemoryMappedProtections.cs
@@ -4,14 +4,13 @@
using System;
-namespace SeeShark.Interop.Libc
+namespace SeeShark.Interop.Libc;
+
+[Flags]
+internal enum MemoryMappedProtections
{
- [Flags]
- internal enum MemoryMappedProtections
- {
- PROT_NONE = 0x0,
- PROT_READ = 0x1,
- PROT_WRITE = 0x2,
- PROT_EXEC = 0x4
- }
+ PROT_NONE = 0x0,
+ PROT_READ = 0x1,
+ PROT_WRITE = 0x2,
+ PROT_EXEC = 0x4
}
diff --git a/SeeShark/Interop/Libc/RawVideoSettings.cs b/SeeShark/Interop/Libc/RawVideoSettings.cs
index caa5f29..b0f4dea 100644
--- a/SeeShark/Interop/Libc/RawVideoSettings.cs
+++ b/SeeShark/Interop/Libc/RawVideoSettings.cs
@@ -4,77 +4,76 @@
using System;
-namespace SeeShark.Interop.Libc
+namespace SeeShark.Interop.Libc;
+
+///
+/// videodev2.h Request Definition
+///
+internal static class RawVideoSettings
{
- ///
- /// videodev2.h Request Definition
- ///
- internal static class RawVideoSettings
- {
- public static readonly int VIDIOC_QUERYCAP = Ioctl.IOR('V', 0, typeof(v4l2_capability));
- public static readonly int VIDIOC_ENUM_FMT = Ioctl.IOWR('V', 2, typeof(v4l2_fmtdesc));
- public static readonly int VIDIOC_G_FMT = Ioctl.IOWR('V', 4, typeof(v4l2_format));
- public static readonly int VIDIOC_S_FMT = Ioctl.IOWR('V', 5, typeof(v4l2_format));
- public static readonly int VIDIOC_REQBUFS = Ioctl.IOWR('V', 8, typeof(v4l2_requestbuffers));
- public static readonly int VIDIOC_QUERYBUF = Ioctl.IOWR('V', 9, typeof(v4l2_buffer));
- public static readonly int VIDIOC_OVERLAY = Ioctl.IOW('V', 14, typeof(int));
- public static readonly int VIDIOC_QBUF = Ioctl.IOWR('V', 15, typeof(v4l2_buffer));
- public static readonly int VIDIOC_DQBUF = Ioctl.IOWR('V', 17, typeof(v4l2_buffer));
- public static readonly int VIDIOC_STREAMON = Ioctl.IOW('V', 18, typeof(int));
- public static readonly int VIDIOC_STREAMOFF = Ioctl.IOW('V', 19, typeof(int));
- public static readonly int VIDIOC_G_PARM = Ioctl.IOWR('V', 21, typeof(v4l2_streamparm));
- public static readonly int VIDIOC_S_PARM = Ioctl.IOWR('V', 22, typeof(v4l2_streamparm));
- public static readonly int VIDIOC_G_CTRL = Ioctl.IOWR('V', 27, typeof(v4l2_control));
- public static readonly int VIDIOC_S_CTRL = Ioctl.IOWR('V', 28, typeof(v4l2_control));
- public static readonly int VIDIOC_QUERYCTRL = Ioctl.IOWR('V', 36, typeof(v4l2_queryctrl));
- public static readonly int VIDIOC_G_INPUT = Ioctl.IOR('V', 38, typeof(int));
- public static readonly int VIDIOC_S_INPUT = Ioctl.IOWR('V', 39, typeof(int));
- public static readonly int VIDIOC_G_OUTPUT = Ioctl.IOR('V', 46, typeof(int));
- public static readonly int VIDIOC_S_OUTPUT = Ioctl.IOWR('V', 47, typeof(int));
- public static readonly int VIDIOC_CROPCAP = Ioctl.IOWR('V', 58, typeof(v4l2_cropcap));
- public static readonly int VIDIOC_G_CROP = Ioctl.IOWR('V', 59, typeof(v4l2_crop));
- public static readonly int VIDIOC_S_CROP = Ioctl.IOW('V', 60, typeof(v4l2_crop));
- public static readonly int VIDIOC_TRY_FMT = Ioctl.IOWR('V', 64, typeof(v4l2_format));
- public static readonly int VIDIOC_G_PRIORITY = Ioctl.IOR('V', 67, typeof(uint));
- public static readonly int VIDIOC_S_PRIORITY = Ioctl.IOW('V', 68, typeof(uint));
- public static readonly int VIDIOC_ENUM_FRAMESIZES = Ioctl.IOWR('V', 74, typeof(v4l2_frmsizeenum));
- public static readonly int VIDIOC_ENUM_FRAMEINTERVALS = Ioctl.IOWR('V', 75, typeof(v4l2_frmivalenum));
- public static readonly int VIDIOC_PREPARE_BUF = Ioctl.IOWR('V', 93, typeof(v4l2_buffer));
+ public static readonly int VIDIOC_QUERYCAP = Ioctl.IOR('V', 0, typeof(v4l2_capability));
+ public static readonly int VIDIOC_ENUM_FMT = Ioctl.IOWR('V', 2, typeof(v4l2_fmtdesc));
+ public static readonly int VIDIOC_G_FMT = Ioctl.IOWR('V', 4, typeof(v4l2_format));
+ public static readonly int VIDIOC_S_FMT = Ioctl.IOWR('V', 5, typeof(v4l2_format));
+ public static readonly int VIDIOC_REQBUFS = Ioctl.IOWR('V', 8, typeof(v4l2_requestbuffers));
+ public static readonly int VIDIOC_QUERYBUF = Ioctl.IOWR('V', 9, typeof(v4l2_buffer));
+ public static readonly int VIDIOC_OVERLAY = Ioctl.IOW('V', 14, typeof(int));
+ public static readonly int VIDIOC_QBUF = Ioctl.IOWR('V', 15, typeof(v4l2_buffer));
+ public static readonly int VIDIOC_DQBUF = Ioctl.IOWR('V', 17, typeof(v4l2_buffer));
+ public static readonly int VIDIOC_STREAMON = Ioctl.IOW('V', 18, typeof(int));
+ public static readonly int VIDIOC_STREAMOFF = Ioctl.IOW('V', 19, typeof(int));
+ public static readonly int VIDIOC_G_PARM = Ioctl.IOWR('V', 21, typeof(v4l2_streamparm));
+ public static readonly int VIDIOC_S_PARM = Ioctl.IOWR('V', 22, typeof(v4l2_streamparm));
+ public static readonly int VIDIOC_G_CTRL = Ioctl.IOWR('V', 27, typeof(v4l2_control));
+ public static readonly int VIDIOC_S_CTRL = Ioctl.IOWR('V', 28, typeof(v4l2_control));
+ public static readonly int VIDIOC_QUERYCTRL = Ioctl.IOWR('V', 36, typeof(v4l2_queryctrl));
+ public static readonly int VIDIOC_G_INPUT = Ioctl.IOR('V', 38, typeof(int));
+ public static readonly int VIDIOC_S_INPUT = Ioctl.IOWR('V', 39, typeof(int));
+ public static readonly int VIDIOC_G_OUTPUT = Ioctl.IOR('V', 46, typeof(int));
+ public static readonly int VIDIOC_S_OUTPUT = Ioctl.IOWR('V', 47, typeof(int));
+ public static readonly int VIDIOC_CROPCAP = Ioctl.IOWR('V', 58, typeof(v4l2_cropcap));
+ public static readonly int VIDIOC_G_CROP = Ioctl.IOWR('V', 59, typeof(v4l2_crop));
+ public static readonly int VIDIOC_S_CROP = Ioctl.IOW('V', 60, typeof(v4l2_crop));
+ public static readonly int VIDIOC_TRY_FMT = Ioctl.IOWR('V', 64, typeof(v4l2_format));
+ public static readonly int VIDIOC_G_PRIORITY = Ioctl.IOR('V', 67, typeof(uint));
+ public static readonly int VIDIOC_S_PRIORITY = Ioctl.IOW('V', 68, typeof(uint));
+ public static readonly int VIDIOC_ENUM_FRAMESIZES = Ioctl.IOWR('V', 74, typeof(v4l2_frmsizeenum));
+ public static readonly int VIDIOC_ENUM_FRAMEINTERVALS = Ioctl.IOWR('V', 75, typeof(v4l2_frmivalenum));
+ public static readonly int VIDIOC_PREPARE_BUF = Ioctl.IOWR('V', 93, typeof(v4l2_buffer));
- public static void PrintConstants()
- {
- Console.WriteLine($" internal enum VideoSettings : int");
- Console.WriteLine($" {{");
- Console.WriteLine($" {nameof(VIDIOC_QUERYCAP)} = {VIDIOC_QUERYCAP},");
- Console.WriteLine($" {nameof(VIDIOC_ENUM_FMT)} = {VIDIOC_ENUM_FMT},");
- Console.WriteLine($" {nameof(VIDIOC_G_FMT)} = {VIDIOC_G_FMT},");
- Console.WriteLine($" {nameof(VIDIOC_S_FMT)} = {VIDIOC_S_FMT},");
- Console.WriteLine($" {nameof(VIDIOC_REQBUFS)} = {VIDIOC_REQBUFS},");
- Console.WriteLine($" {nameof(VIDIOC_QUERYBUF)} = {VIDIOC_QUERYBUF},");
- Console.WriteLine($" {nameof(VIDIOC_OVERLAY)} = {VIDIOC_OVERLAY},");
- Console.WriteLine($" {nameof(VIDIOC_QBUF)} = {VIDIOC_QBUF},");
- Console.WriteLine($" {nameof(VIDIOC_DQBUF)} = {VIDIOC_DQBUF},");
- Console.WriteLine($" {nameof(VIDIOC_STREAMON)} = {VIDIOC_STREAMON},");
- Console.WriteLine($" {nameof(VIDIOC_STREAMOFF)} = {VIDIOC_STREAMOFF},");
- Console.WriteLine($" {nameof(VIDIOC_G_PARM)} = {VIDIOC_G_PARM},");
- Console.WriteLine($" {nameof(VIDIOC_S_PARM)} = {VIDIOC_S_PARM},");
- Console.WriteLine($" {nameof(VIDIOC_G_CTRL)} = {VIDIOC_G_CTRL},");
- Console.WriteLine($" {nameof(VIDIOC_S_CTRL)} = {VIDIOC_S_CTRL},");
- Console.WriteLine($" {nameof(VIDIOC_QUERYCTRL)} = {VIDIOC_QUERYCTRL},");
- Console.WriteLine($" {nameof(VIDIOC_G_INPUT)} = {VIDIOC_G_INPUT},");
- Console.WriteLine($" {nameof(VIDIOC_S_INPUT)} = {VIDIOC_S_INPUT},");
- Console.WriteLine($" {nameof(VIDIOC_G_OUTPUT)} = {VIDIOC_G_OUTPUT},");
- Console.WriteLine($" {nameof(VIDIOC_S_OUTPUT)} = {VIDIOC_S_OUTPUT},");
- Console.WriteLine($" {nameof(VIDIOC_CROPCAP)} = {VIDIOC_CROPCAP},");
- Console.WriteLine($" {nameof(VIDIOC_G_CROP)} = {VIDIOC_G_CROP},");
- Console.WriteLine($" {nameof(VIDIOC_S_CROP)} = {VIDIOC_S_CROP},");
- Console.WriteLine($" {nameof(VIDIOC_TRY_FMT)} = {VIDIOC_TRY_FMT},");
- Console.WriteLine($" {nameof(VIDIOC_G_PRIORITY)} = {VIDIOC_G_PRIORITY},");
- Console.WriteLine($" {nameof(VIDIOC_S_PRIORITY)} = {VIDIOC_S_PRIORITY},");
- Console.WriteLine($" {nameof(VIDIOC_ENUM_FRAMESIZES)} = {VIDIOC_ENUM_FRAMESIZES},");
- Console.WriteLine($" {nameof(VIDIOC_ENUM_FRAMEINTERVALS)} = {VIDIOC_ENUM_FRAMEINTERVALS},");
- Console.WriteLine($" {nameof(VIDIOC_PREPARE_BUF)} = {VIDIOC_PREPARE_BUF},");
- Console.WriteLine($" }}");
- }
+ public static void PrintConstants()
+ {
+ Console.WriteLine($" internal enum VideoSettings : int");
+ Console.WriteLine($" {{");
+ Console.WriteLine($" {nameof(VIDIOC_QUERYCAP)} = {VIDIOC_QUERYCAP},");
+ Console.WriteLine($" {nameof(VIDIOC_ENUM_FMT)} = {VIDIOC_ENUM_FMT},");
+ Console.WriteLine($" {nameof(VIDIOC_G_FMT)} = {VIDIOC_G_FMT},");
+ Console.WriteLine($" {nameof(VIDIOC_S_FMT)} = {VIDIOC_S_FMT},");
+ Console.WriteLine($" {nameof(VIDIOC_REQBUFS)} = {VIDIOC_REQBUFS},");
+ Console.WriteLine($" {nameof(VIDIOC_QUERYBUF)} = {VIDIOC_QUERYBUF},");
+ Console.WriteLine($" {nameof(VIDIOC_OVERLAY)} = {VIDIOC_OVERLAY},");
+ Console.WriteLine($" {nameof(VIDIOC_QBUF)} = {VIDIOC_QBUF},");
+ Console.WriteLine($" {nameof(VIDIOC_DQBUF)} = {VIDIOC_DQBUF},");
+ Console.WriteLine($" {nameof(VIDIOC_STREAMON)} = {VIDIOC_STREAMON},");
+ Console.WriteLine($" {nameof(VIDIOC_STREAMOFF)} = {VIDIOC_STREAMOFF},");
+ Console.WriteLine($" {nameof(VIDIOC_G_PARM)} = {VIDIOC_G_PARM},");
+ Console.WriteLine($" {nameof(VIDIOC_S_PARM)} = {VIDIOC_S_PARM},");
+ Console.WriteLine($" {nameof(VIDIOC_G_CTRL)} = {VIDIOC_G_CTRL},");
+ Console.WriteLine($" {nameof(VIDIOC_S_CTRL)} = {VIDIOC_S_CTRL},");
+ Console.WriteLine($" {nameof(VIDIOC_QUERYCTRL)} = {VIDIOC_QUERYCTRL},");
+ Console.WriteLine($" {nameof(VIDIOC_G_INPUT)} = {VIDIOC_G_INPUT},");
+ Console.WriteLine($" {nameof(VIDIOC_S_INPUT)} = {VIDIOC_S_INPUT},");
+ Console.WriteLine($" {nameof(VIDIOC_G_OUTPUT)} = {VIDIOC_G_OUTPUT},");
+ Console.WriteLine($" {nameof(VIDIOC_S_OUTPUT)} = {VIDIOC_S_OUTPUT},");
+ Console.WriteLine($" {nameof(VIDIOC_CROPCAP)} = {VIDIOC_CROPCAP},");
+ Console.WriteLine($" {nameof(VIDIOC_G_CROP)} = {VIDIOC_G_CROP},");
+ Console.WriteLine($" {nameof(VIDIOC_S_CROP)} = {VIDIOC_S_CROP},");
+ Console.WriteLine($" {nameof(VIDIOC_TRY_FMT)} = {VIDIOC_TRY_FMT},");
+ Console.WriteLine($" {nameof(VIDIOC_G_PRIORITY)} = {VIDIOC_G_PRIORITY},");
+ Console.WriteLine($" {nameof(VIDIOC_S_PRIORITY)} = {VIDIOC_S_PRIORITY},");
+ Console.WriteLine($" {nameof(VIDIOC_ENUM_FRAMESIZES)} = {VIDIOC_ENUM_FRAMESIZES},");
+ Console.WriteLine($" {nameof(VIDIOC_ENUM_FRAMEINTERVALS)} = {VIDIOC_ENUM_FRAMEINTERVALS},");
+ Console.WriteLine($" {nameof(VIDIOC_PREPARE_BUF)} = {VIDIOC_PREPARE_BUF},");
+ Console.WriteLine($" }}");
}
}
diff --git a/SeeShark/Interop/Libc/V4l2InputFormat.cs b/SeeShark/Interop/Libc/V4l2InputFormat.cs
index b7a9a2e..8da4274 100644
--- a/SeeShark/Interop/Libc/V4l2InputFormat.cs
+++ b/SeeShark/Interop/Libc/V4l2InputFormat.cs
@@ -2,1011 +2,1010 @@
// This file is part of SeeShark.
// SeeShark is licensed under the BSD 3-Clause License. See LICENSE for details.
-namespace SeeShark.Interop.Libc
+namespace SeeShark.Interop.Libc;
+
+///
+/// The pixel format or codec of a video device.
+///
+internal enum V4l2InputFormat : uint
{
///
- /// The pixel format or codec of a video device.
- ///
- internal enum V4l2InputFormat : uint
- {
- ///
- /// RGB332
- ///
- RGB332 = 826427218,
-
- ///
- /// RGB444
- ///
- RGB444 = 875836498,
-
- ///
- /// ARGB444
- ///
- ARGB444 = 842093121,
-
- ///
- /// XRGB444
- ///
- XRGB444 = 842093144,
-
- ///
- /// RGBA444
- ///
- RGBA444 = 842088786,
-
- ///
- /// RGBX444
- ///
- RGBX444 = 842094674,
-
- ///
- /// ABGR444
- ///
- ABGR444 = 842089025,
-
- ///
- /// XBGR444
- ///
- XBGR444 = 842089048,
-
- ///
- /// BGRA444
- ///
- BGRA444 = 842088775,
-
- ///
- /// BGRX444
- ///
- BGRX444 = 842094658,
-
- ///
- /// RGB555
- ///
- RGB555 = 1329743698,
-
- ///
- /// ARGB555
- ///
- ARGB555 = 892424769,
-
- ///
- /// XRGB555
- ///
- XRGB555 = 892424792,
-
- ///
- /// RGBA555
- ///
- RGBA555 = 892420434,
-
- ///
- /// RGBX555
- ///
- RGBX555 = 892426322,
-
- ///
- /// ABGR555
- ///
- ABGR555 = 892420673,
-
- ///
- /// XBGR555
- ///
- XBGR555 = 892420696,
-
- ///
- /// BGRA555
- ///
- BGRA555 = 892420418,
-
- ///
- /// BGRX555
- ///
- BGRX555 = 892426306,
-
- ///
- /// RGB565
- ///
- RGB565 = 1346520914,
-
- ///
- /// RGB555X
- ///
- RGB555X = 1363298130,
-
- ///
- /// ARGB555X
- ///
- ARGB555X = 3039908417,
-
- ///
- /// XRGB555X
- ///
- XRGB555X = 3039908440,
-
- ///
- /// RGB565X
- ///
- RGB565X = 1380075346,
-
- ///
- /// BGR666
- ///
- BGR666 = 1213351746,
-
- ///
- /// BGR24
- ///
- BGR24 = 861030210,
-
- ///
- /// RGB24
- ///
- RGB24 = 859981650,
-
- ///
- /// BGR32
- ///
- BGR32 = 877807426,
-
- ///
- /// ABGR32
- ///
- ABGR32 = 875713089,
-
- ///
- /// XBGR32
- ///
- XBGR32 = 875713112,
-
- ///
- /// BGRA32
- ///
- BGRA32 = 875708754,
-
- ///
- /// BGRX32
- ///
- BGRX32 = 875714642,
-
- ///
- /// RGB32
- ///
- RGB32 = 876758866,
-
- ///
- /// RGBA32
- ///
- RGBA32 = 875708993,
-
- ///
- /// RGBX32
- ///
- RGBX32 = 875709016,
-
- ///
- /// ARGB32
- ///
- ARGB32 = 875708738,
-
- ///
- /// XRGB32
- ///
- XRGB32 = 875714626,
-
- ///
- /// GREY
- ///
- GREY = 1497715271,
-
- ///
- /// Y4
- ///
- Y4 = 540291161,
-
- ///
- /// Y6
- ///
- Y6 = 540422233,
-
- ///
- /// Y10
- ///
- Y10 = 540029273,
-
- ///
- /// Y12
- ///
- Y12 = 540160345,
-
- ///
- /// Y16
- ///
- Y16 = 540422489,
-
- ///
- /// Y16_BE
- ///
- Y16_BE = 2687906137,
-
- ///
- /// Y10BPACK
- ///
- Y10BPACK = 1110454617,
-
- ///
- /// Y10P
- ///
- Y10P = 1345335641,
-
- ///
- /// PAL8
- ///
- PAL8 = 944521552,
-
- ///
- /// UV8
- ///
- UV8 = 540563029,
-
- ///
- /// YUYV
- ///
- YUYV = 1448695129,
-
- ///
- /// YYUV
- ///
- YYUV = 1448434009,
-
- ///
- /// YVYU
- ///
- YVYU = 1431918169,
-
- ///
- /// UYVY
- ///
- UYVY = 1498831189,
-
- ///
- /// VYUY
- ///
- VYUY = 1498765654,
-
- ///
- /// Y41P
- ///
- Y41P = 1345401945,
-
- ///
- /// YUV444
- ///
- YUV444 = 875836505,
-
- ///
- /// YUV555
- ///
- YUV555 = 1331058009,
-
- ///
- /// YUV565
- ///
- YUV565 = 1347835225,
-
- ///
- /// YUV32
- ///
- YUV32 = 878073177,
-
- ///
- /// AYUV32
- ///
- AYUV32 = 1448433985,
-
- ///
- /// XYUV32
- ///
- XYUV32 = 1448434008,
-
- ///
- /// VUYA32
- ///
- VUYA32 = 1096373590,
-
- ///
- /// VUYX32
- ///
- VUYX32 = 1482249558,
-
- ///
- /// HI240
- ///
- HI240 = 875710792,
-
- ///
- /// HM12
- ///
- HM12 = 842091848,
-
- ///
- /// M420
- ///
- M420 = 808596557,
-
- ///
- /// NV12
- ///
- NV12 = 842094158,
-
- ///
- /// NV21
- ///
- NV21 = 825382478,
-
- ///
- /// NV16
- ///
- NV16 = 909203022,
-
- ///
- /// NV61
- ///
- NV61 = 825644622,
-
- ///
- /// NV24
- ///
- NV24 = 875714126,
-
- ///
- /// NV42
- ///
- NV42 = 842290766,
-
- ///
- /// NV12M
- ///
- NV12M = 842091854,
-
- ///
- /// NV21M
- ///
- NV21M = 825380174,
-
- ///
- /// NV16M
- ///
- NV16M = 909200718,
-
- ///
- /// NV61M
- ///
- NV61M = 825642318,
-
- ///
- /// NV12MT
- ///
- NV12MT = 842091860,
-
- ///
- /// NV12MT_16X16
- ///
- NV12MT_16X16 = 842091862,
-
- ///
- /// YUV410
- ///
- YUV410 = 961959257,
-
- ///
- /// YVU410
- ///
- YVU410 = 961893977,
-
- ///
- /// YUV411P
- ///
- YUV411P = 1345401140,
-
- ///
- /// YUV420
- ///
- YUV420 = 842093913,
-
- ///
- /// YVU420
- ///
- YVU420 = 842094169,
-
- ///
- /// YUV422P
- ///
- YUV422P = 1345466932,
-
- ///
- /// YUV420M
- ///
- YUV420M = 842091865,
-
- ///
- /// YVU420M
- ///
- YVU420M = 825380185,
-
- ///
- /// YUV422M
- ///
- YUV422M = 909200729,
-
- ///
- /// YVU422M
- ///
- YVU422M = 825642329,
-
- ///
- /// YUV444M
- ///
- YUV444M = 875711833,
-
- ///
- /// YVU444M
- ///
- YVU444M = 842288473,
-
- ///
- /// SBGGR8
- ///
- SBGGR8 = 825770306,
-
- ///
- /// SGBRG8
- ///
- SGBRG8 = 1196573255,
-
- ///
- /// SGRBG8
- ///
- SGRBG8 = 1195528775,
-
- ///
- /// SRGGB8
- ///
- SRGGB8 = 1111967570,
-
- ///
- /// SBGGR10
- ///
- SBGGR10 = 808535874,
-
- ///
- /// SGBRG10
- ///
- SGBRG10 = 808534599,
-
- ///
- /// SGRBG10
- ///
- SGRBG10 = 808534338,
-
- ///
- /// SRGGB10
- ///
- SRGGB10 = 808535890,
-
- ///
- /// SBGGR10P
- ///
- SBGGR10P = 1094795888,
-
- ///
- /// SGBRG10P
- ///
- SGBRG10P = 1094797168,
-
- ///
- /// SGRBG10P
- ///
- SGRBG10P = 1094805360,
-
- ///
- /// SRGGB10P
- ///
- SRGGB10P = 1094799984,
-
- ///
- /// SBGGR10ALAW8
- ///
- SBGGR10ALAW8 = 943800929,
-
- ///
- /// SGBRG10ALAW8
- ///
- SGBRG10ALAW8 = 943802209,
-
- ///
- /// SGRBG10ALAW8
- ///
- SGRBG10ALAW8 = 943810401,
-
- ///
- /// SRGGB10ALAW8
- ///
- SRGGB10ALAW8 = 943805025,
-
- ///
- /// SBGGR10DPCM8
- ///
- SBGGR10DPCM8 = 943800930,
-
- ///
- /// SGBRG10DPCM8
- ///
- SGBRG10DPCM8 = 943802210,
-
- ///
- /// SGRBG10DPCM8
- ///
- SGRBG10DPCM8 = 808535106,
-
- ///
- /// SRGGB10DPCM8
- ///
- SRGGB10DPCM8 = 943805026,
-
- ///
- /// SBGGR12
- ///
- SBGGR12 = 842090306,
-
- ///
- /// SGBRG12
- ///
- SGBRG12 = 842089031,
-
- ///
- /// SGRBG12
- ///
- SGRBG12 = 842088770,
-
- ///
- /// SRGGB12
- ///
- SRGGB12 = 842090322,
-
- ///
- /// SBGGR12P
- ///
- SBGGR12P = 1128481392,
-
- ///
- /// SGBRG12P
- ///
- SGBRG12P = 1128482672,
-
- ///
- /// SGRBG12P
- ///
- SGRBG12P = 1128490864,
-
- ///
- /// SRGGB12P
- ///
- SRGGB12P = 1128485488,
-
- ///
- /// SBGGR14P
- ///
- SBGGR14P = 1162166896,
-
- ///
- /// SGBRG14P
- ///
- SGBRG14P = 1162168176,
-
- ///
- /// SGRBG14P
- ///
- SGRBG14P = 1162176368,
-
- ///
- /// SRGGB14P
- ///
- SRGGB14P = 1162170992,
-
- ///
- /// SBGGR16
- ///
- SBGGR16 = 844257602,
-
- ///
- /// SGBRG16
- ///
- SGBRG16 = 909197895,
-
- ///
- /// SGRBG16
- ///
- SGRBG16 = 909201991,
-
- ///
- /// SRGGB16
- ///
- SRGGB16 = 909199186,
-
- ///
- /// HSV24
- ///
- HSV24 = 861295432,
-
- ///
- /// HSV32
- ///
- HSV32 = 878072648,
-
- ///
- /// MJPEG
- ///
- MJPEG = 1196444237,
-
- ///
- /// JPEG
- ///
- JPEG = 1195724874,
-
- ///
- /// DV
- ///
- DV = 1685288548,
-
- ///
- /// MPEG
- ///
- MPEG = 1195724877,
-
- ///
- /// H264
- ///
- H264 = 875967048,
-
- ///
- /// H264_NO_SC
- ///
- H264_NO_SC = 826496577,
-
- ///
- /// H264_MVC
- ///
- H264_MVC = 875967053,
-
- ///
- /// H263
- ///
- H263 = 859189832,
-
- ///
- /// MPEG1
- ///
- MPEG1 = 826757197,
-
- ///
- /// MPEG2
- ///
- MPEG2 = 843534413,
-
- ///
- /// MPEG2_SLICE
- ///
- MPEG2_SLICE = 1395803981,
-
- ///
- /// MPEG4
- ///
- MPEG4 = 877088845,
-
- ///
- /// XVID
- ///
- XVID = 1145656920,
-
- ///
- /// VC1_ANNEX_G
- ///
- VC1_ANNEX_G = 1194410838,
-
- ///
- /// VC1_ANNEX_L
- ///
- VC1_ANNEX_L = 1278296918,
-
- ///
- /// VP8
- ///
- VP8 = 808996950,
-
- ///
- /// VP9
- ///
- VP9 = 809062486,
-
- ///
- /// HEVC
- ///
- HEVC = 1129727304,
-
- ///
- /// FWHT
- ///
- FWHT = 1414027078,
-
- ///
- /// FWHT_STATELESS
- ///
- FWHT_STATELESS = 1213679187,
-
- ///
- /// CPIA1
- ///
- CPIA1 = 1095323715,
-
- ///
- /// WNVA
- ///
- WNVA = 1096175191,
-
- ///
- /// SN9C10X
- ///
- SN9C10X = 808532307,
-
- ///
- /// SN9C20X_I420
- ///
- SN9C20X_I420 = 808597843,
-
- ///
- /// PWC1
- ///
- PWC1 = 826496848,
-
- ///
- /// PWC2
- ///
- PWC2 = 843274064,
-
- ///
- /// ET61X251
- ///
- ET61X251 = 892483141,
-
- ///
- /// SPCA501
- ///
- SPCA501 = 825242963,
-
- ///
- /// SPCA505
- ///
- SPCA505 = 892351827,
-
- ///
- /// SPCA508
- ///
- SPCA508 = 942683475,
-
- ///
- /// SPCA561
- ///
- SPCA561 = 825636179,
-
- ///
- /// PAC207
- ///
- PAC207 = 925905488,
-
- ///
- /// MR97310A
- ///
- MR97310A = 808530765,
-
- ///
- /// JL2005BCD
- ///
- JL2005BCD = 808602698,
-
- ///
- /// SN9C2028
- ///
- SN9C2028 = 1481527123,
-
- ///
- /// SQ905C
- ///
- SQ905C = 1127559225,
-
- ///
- /// PJPG
- ///
- PJPG = 1196444240,
-
- ///
- /// OV511
- ///
- OV511 = 825308495,
-
- ///
- /// OV518
- ///
- OV518 = 942749007,
-
- ///
- /// STV0680
- ///
- STV0680 = 808990291,
-
- ///
- /// TM6000
- ///
- TM6000 = 808865108,
-
- ///
- /// CIT_YYVYUY
- ///
- CIT_YYVYUY = 1448364355,
-
- ///
- /// KONICA420
- ///
- KONICA420 = 1229868875,
-
- ///
- /// JPGL
- ///
- JPGL = 1279742026,
-
- ///
- /// SE401
- ///
- SE401 = 825242707,
-
- ///
- /// S5C_UYVY_JPG
- ///
- S5C_UYVY_JPG = 1229141331,
-
- ///
- /// Y8I
- ///
- Y8I = 541669465,
-
- ///
- /// Y12I
- ///
- Y12I = 1228026201,
-
- ///
- /// Z16
- ///
- Z16 = 540422490,
-
- ///
- /// MT21C
- ///
- MT21C = 825381965,
-
- ///
- /// INZI
- ///
- INZI = 1230655049,
-
- ///
- /// SUNXI_TILED_NV12
- ///
- SUNXI_TILED_NV12 = 842093651,
-
- ///
- /// CNF4
- ///
- CNF4 = 877022787,
-
- ///
- /// IPU3_SBGGR10
- ///
- IPU3_SBGGR10 = 1647538281,
-
- ///
- /// IPU3_SGBRG10
- ///
- IPU3_SGBRG10 = 1731424361,
-
- ///
- /// IPU3_SGRBG10
- ///
- IPU3_SGRBG10 = 1194553449,
-
- ///
- /// IPU3_SRGGB10
- ///
- IPU3_SRGGB10 = 1915973737,
-
- ///
- /// CU8
- ///
- CU8 = 942691651,
-
- ///
- /// CU16LE
- ///
- CU16LE = 909202755,
-
- ///
- /// CS8
- ///
- CS8 = 942691139,
-
- ///
- /// CS14LE
- ///
- CS14LE = 875647811,
-
- ///
- /// RU12LE
- ///
- RU12LE = 842093906,
-
- ///
- /// PCU16BE
- ///
- PCU16BE = 909198160,
-
- ///
- /// PCU18BE
- ///
- PCU18BE = 942752592,
-
- ///
- /// PCU20BE
- ///
- PCU20BE = 808600400,
-
- ///
- /// DELTA_TD16
- ///
- DELTA_TD16 = 909198420,
-
- ///
- /// DELTA_TD08
- ///
- DELTA_TD08 = 942687316,
-
- ///
- /// TU16
- ///
- TU16 = 909202772,
-
- ///
- /// TU08
- ///
- TU08 = 942691668,
-
- ///
- /// VSP1_HGO
- ///
- VSP1_HGO = 1213223766,
-
- ///
- /// VSP1_HGT
- ///
- VSP1_HGT = 1414550358,
-
- ///
- /// UVC
- ///
- UVC = 1212372565,
-
- ///
- /// D4XX
- ///
- D4XX = 1482175556,
- }
+ /// RGB332
+ ///
+ RGB332 = 826427218,
+
+ ///
+ /// RGB444
+ ///
+ RGB444 = 875836498,
+
+ ///
+ /// ARGB444
+ ///
+ ARGB444 = 842093121,
+
+ ///
+ /// XRGB444
+ ///
+ XRGB444 = 842093144,
+
+ ///
+ /// RGBA444
+ ///
+ RGBA444 = 842088786,
+
+ ///
+ /// RGBX444
+ ///
+ RGBX444 = 842094674,
+
+ ///
+ /// ABGR444
+ ///
+ ABGR444 = 842089025,
+
+ ///
+ /// XBGR444
+ ///
+ XBGR444 = 842089048,
+
+ ///
+ /// BGRA444
+ ///
+ BGRA444 = 842088775,
+
+ ///
+ /// BGRX444
+ ///
+ BGRX444 = 842094658,
+
+ ///
+ /// RGB555
+ ///
+ RGB555 = 1329743698,
+
+ ///
+ /// ARGB555
+ ///
+ ARGB555 = 892424769,
+
+ ///
+ /// XRGB555
+ ///
+ XRGB555 = 892424792,
+
+ ///
+ /// RGBA555
+ ///
+ RGBA555 = 892420434,
+
+ ///
+ /// RGBX555
+ ///
+ RGBX555 = 892426322,
+
+ ///
+ /// ABGR555
+ ///
+ ABGR555 = 892420673,
+
+ ///
+ /// XBGR555
+ ///
+ XBGR555 = 892420696,
+
+ ///
+ /// BGRA555
+ ///
+ BGRA555 = 892420418,
+
+ ///
+ /// BGRX555
+ ///
+ BGRX555 = 892426306,
+
+ ///
+ /// RGB565
+ ///
+ RGB565 = 1346520914,
+
+ ///
+ /// RGB555X
+ ///
+ RGB555X = 1363298130,
+
+ ///
+ /// ARGB555X
+ ///
+ ARGB555X = 3039908417,
+
+ ///
+ /// XRGB555X
+ ///
+ XRGB555X = 3039908440,
+
+ ///
+ /// RGB565X
+ ///
+ RGB565X = 1380075346,
+
+ ///
+ /// BGR666
+ ///
+ BGR666 = 1213351746,
+
+ ///
+ /// BGR24
+ ///
+ BGR24 = 861030210,
+
+ ///
+ /// RGB24
+ ///
+ RGB24 = 859981650,
+
+ ///
+ /// BGR32
+ ///
+ BGR32 = 877807426,
+
+ ///
+ /// ABGR32
+ ///
+ ABGR32 = 875713089,
+
+ ///
+ /// XBGR32
+ ///
+ XBGR32 = 875713112,
+
+ ///
+ /// BGRA32
+ ///
+ BGRA32 = 875708754,
+
+ ///
+ /// BGRX32
+ ///
+ BGRX32 = 875714642,
+
+ ///
+ /// RGB32
+ ///
+ RGB32 = 876758866,
+
+ ///
+ /// RGBA32
+ ///
+ RGBA32 = 875708993,
+
+ ///
+ /// RGBX32
+ ///
+ RGBX32 = 875709016,
+
+ ///
+ /// ARGB32
+ ///
+ ARGB32 = 875708738,
+
+ ///
+ /// XRGB32
+ ///
+ XRGB32 = 875714626,
+
+ ///
+ /// GREY
+ ///
+ GREY = 1497715271,
+
+ ///
+ /// Y4
+ ///
+ Y4 = 540291161,
+
+ ///
+ /// Y6
+ ///
+ Y6 = 540422233,
+
+ ///
+ /// Y10
+ ///
+ Y10 = 540029273,
+
+ ///
+ /// Y12
+ ///
+ Y12 = 540160345,
+
+ ///
+ /// Y16
+ ///
+ Y16 = 540422489,
+
+ ///
+ /// Y16_BE
+ ///
+ Y16_BE = 2687906137,
+
+ ///
+ /// Y10BPACK
+ ///
+ Y10BPACK = 1110454617,
+
+ ///
+ /// Y10P
+ ///
+ Y10P = 1345335641,
+
+ ///
+ /// PAL8
+ ///
+ PAL8 = 944521552,
+
+ ///
+ /// UV8
+ ///
+ UV8 = 540563029,
+
+ ///
+ /// YUYV
+ ///
+ YUYV = 1448695129,
+
+ ///
+ /// YYUV
+ ///
+ YYUV = 1448434009,
+
+ ///
+ /// YVYU
+ ///
+ YVYU = 1431918169,
+
+ ///
+ /// UYVY
+ ///
+ UYVY = 1498831189,
+
+ ///
+ /// VYUY
+ ///
+ VYUY = 1498765654,
+
+ ///
+ /// Y41P
+ ///
+ Y41P = 1345401945,
+
+ ///
+ /// YUV444
+ ///
+ YUV444 = 875836505,
+
+ ///
+ /// YUV555
+ ///
+ YUV555 = 1331058009,
+
+ ///
+ /// YUV565
+ ///
+ YUV565 = 1347835225,
+
+ ///
+ /// YUV32
+ ///
+ YUV32 = 878073177,
+
+ ///
+ /// AYUV32
+ ///
+ AYUV32 = 1448433985,
+
+ ///
+ /// XYUV32
+ ///
+ XYUV32 = 1448434008,
+
+ ///
+ /// VUYA32
+ ///
+ VUYA32 = 1096373590,
+
+ ///
+ /// VUYX32
+ ///
+ VUYX32 = 1482249558,
+
+ ///
+ /// HI240
+ ///
+ HI240 = 875710792,
+
+ ///
+ /// HM12
+ ///
+ HM12 = 842091848,
+
+ ///
+ /// M420
+ ///
+ M420 = 808596557,
+
+ ///
+ /// NV12
+ ///
+ NV12 = 842094158,
+
+ ///
+ /// NV21
+ ///
+ NV21 = 825382478,
+
+ ///
+ /// NV16
+ ///
+ NV16 = 909203022,
+
+ ///
+ /// NV61
+ ///
+ NV61 = 825644622,
+
+ ///
+ /// NV24
+ ///
+ NV24 = 875714126,
+
+ ///
+ /// NV42
+ ///
+ NV42 = 842290766,
+
+ ///
+ /// NV12M
+ ///
+ NV12M = 842091854,
+
+ ///
+ /// NV21M
+ ///
+ NV21M = 825380174,
+
+ ///
+ /// NV16M
+ ///
+ NV16M = 909200718,
+
+ ///
+ /// NV61M
+ ///
+ NV61M = 825642318,
+
+ ///
+ /// NV12MT
+ ///
+ NV12MT = 842091860,
+
+ ///
+ /// NV12MT_16X16
+ ///
+ NV12MT_16X16 = 842091862,
+
+ ///
+ /// YUV410
+ ///
+ YUV410 = 961959257,
+
+ ///
+ /// YVU410
+ ///
+ YVU410 = 961893977,
+
+ ///
+ /// YUV411P
+ ///
+ YUV411P = 1345401140,
+
+ ///
+ /// YUV420
+ ///
+ YUV420 = 842093913,
+
+ ///
+ /// YVU420
+ ///
+ YVU420 = 842094169,
+
+ ///
+ /// YUV422P
+ ///
+ YUV422P = 1345466932,
+
+ ///
+ /// YUV420M
+ ///
+ YUV420M = 842091865,
+
+ ///
+ /// YVU420M
+ ///
+ YVU420M = 825380185,
+
+ ///
+ /// YUV422M
+ ///
+ YUV422M = 909200729,
+
+ ///
+ /// YVU422M
+ ///
+ YVU422M = 825642329,
+
+ ///
+ /// YUV444M
+ ///
+ YUV444M = 875711833,
+
+ ///
+ /// YVU444M
+ ///
+ YVU444M = 842288473,
+
+ ///
+ /// SBGGR8
+ ///
+ SBGGR8 = 825770306,
+
+ ///
+ /// SGBRG8
+ ///
+ SGBRG8 = 1196573255,
+
+ ///
+ /// SGRBG8
+ ///
+ SGRBG8 = 1195528775,
+
+ ///
+ /// SRGGB8
+ ///
+ SRGGB8 = 1111967570,
+
+ ///
+ /// SBGGR10
+ ///
+ SBGGR10 = 808535874,
+
+ ///
+ /// SGBRG10
+ ///
+ SGBRG10 = 808534599,
+
+ ///
+ /// SGRBG10
+ ///
+ SGRBG10 = 808534338,
+
+ ///
+ /// SRGGB10
+ ///
+ SRGGB10 = 808535890,
+
+ ///
+ /// SBGGR10P
+ ///
+ SBGGR10P = 1094795888,
+
+ ///
+ /// SGBRG10P
+ ///
+ SGBRG10P = 1094797168,
+
+ ///
+ /// SGRBG10P
+ ///
+ SGRBG10P = 1094805360,
+
+ ///
+ /// SRGGB10P
+ ///
+ SRGGB10P = 1094799984,
+
+ ///
+ /// SBGGR10ALAW8
+ ///
+ SBGGR10ALAW8 = 943800929,
+
+ ///
+ /// SGBRG10ALAW8
+ ///
+ SGBRG10ALAW8 = 943802209,
+
+ ///
+ /// SGRBG10ALAW8
+ ///
+ SGRBG10ALAW8 = 943810401,
+
+ ///
+ /// SRGGB10ALAW8
+ ///
+ SRGGB10ALAW8 = 943805025,
+
+ ///
+ /// SBGGR10DPCM8
+ ///
+ SBGGR10DPCM8 = 943800930,
+
+ ///
+ /// SGBRG10DPCM8
+ ///
+ SGBRG10DPCM8 = 943802210,
+
+ ///
+ /// SGRBG10DPCM8
+ ///
+ SGRBG10DPCM8 = 808535106,
+
+ ///
+ /// SRGGB10DPCM8
+ ///
+ SRGGB10DPCM8 = 943805026,
+
+ ///
+ /// SBGGR12
+ ///
+ SBGGR12 = 842090306,
+
+ ///
+ /// SGBRG12
+ ///
+ SGBRG12 = 842089031,
+
+ ///
+ /// SGRBG12
+ ///
+ SGRBG12 = 842088770,
+
+ ///
+ /// SRGGB12
+ ///
+ SRGGB12 = 842090322,
+
+ ///
+ /// SBGGR12P
+ ///
+ SBGGR12P = 1128481392,
+
+ ///
+ /// SGBRG12P
+ ///
+ SGBRG12P = 1128482672,
+
+ ///
+ /// SGRBG12P
+ ///
+ SGRBG12P = 1128490864,
+
+ ///
+ /// SRGGB12P
+ ///
+ SRGGB12P = 1128485488,
+
+ ///
+ /// SBGGR14P
+ ///
+ SBGGR14P = 1162166896,
+
+ ///
+ /// SGBRG14P
+ ///
+ SGBRG14P = 1162168176,
+
+ ///
+ /// SGRBG14P
+ ///
+ SGRBG14P = 1162176368,
+
+ ///
+ /// SRGGB14P
+ ///
+ SRGGB14P = 1162170992,
+
+ ///
+ /// SBGGR16
+ ///
+ SBGGR16 = 844257602,
+
+ ///
+ /// SGBRG16
+ ///
+ SGBRG16 = 909197895,
+
+ ///
+ /// SGRBG16
+ ///
+ SGRBG16 = 909201991,
+
+ ///
+ /// SRGGB16
+ ///
+ SRGGB16 = 909199186,
+
+ ///
+ /// HSV24
+ ///
+ HSV24 = 861295432,
+
+ ///
+ /// HSV32
+ ///
+ HSV32 = 878072648,
+
+ ///
+ /// MJPEG
+ ///
+ MJPEG = 1196444237,
+
+ ///
+ /// JPEG
+ ///
+ JPEG = 1195724874,
+
+ ///
+ /// DV
+ ///
+ DV = 1685288548,
+
+ ///
+ /// MPEG
+ ///
+ MPEG = 1195724877,
+
+ ///
+ /// H264
+ ///
+ H264 = 875967048,
+
+ ///
+ /// H264_NO_SC
+ ///
+ H264_NO_SC = 826496577,
+
+ ///
+ /// H264_MVC
+ ///
+ H264_MVC = 875967053,
+
+ ///
+ /// H263
+ ///
+ H263 = 859189832,
+
+ ///
+ /// MPEG1
+ ///
+ MPEG1 = 826757197,
+
+ ///
+ /// MPEG2
+ ///
+ MPEG2 = 843534413,
+
+ ///
+ /// MPEG2_SLICE
+ ///
+ MPEG2_SLICE = 1395803981,
+
+ ///
+ /// MPEG4
+ ///
+ MPEG4 = 877088845,
+
+ ///
+ /// XVID
+ ///
+ XVID = 1145656920,
+
+ ///
+ /// VC1_ANNEX_G
+ ///
+ VC1_ANNEX_G = 1194410838,
+
+ ///
+ /// VC1_ANNEX_L
+ ///
+ VC1_ANNEX_L = 1278296918,
+
+ ///
+ /// VP8
+ ///
+ VP8 = 808996950,
+
+ ///
+ /// VP9
+ ///
+ VP9 = 809062486,
+
+ ///
+ /// HEVC
+ ///
+ HEVC = 1129727304,
+
+ ///
+ /// FWHT
+ ///
+ FWHT = 1414027078,
+
+ ///
+ /// FWHT_STATELESS
+ ///
+ FWHT_STATELESS = 1213679187,
+
+ ///
+ /// CPIA1
+ ///
+ CPIA1 = 1095323715,
+
+ ///
+ /// WNVA
+ ///
+ WNVA = 1096175191,
+
+ ///
+ /// SN9C10X
+ ///
+ SN9C10X = 808532307,
+
+ ///
+ /// SN9C20X_I420
+ ///
+ SN9C20X_I420 = 808597843,
+
+ ///
+ /// PWC1
+ ///
+ PWC1 = 826496848,
+
+ ///
+ /// PWC2
+ ///
+ PWC2 = 843274064,
+
+ ///
+ /// ET61X251
+ ///
+ ET61X251 = 892483141,
+
+ ///
+ /// SPCA501
+ ///
+ SPCA501 = 825242963,
+
+ ///
+ /// SPCA505
+ ///
+ SPCA505 = 892351827,
+
+ ///
+ /// SPCA508
+ ///
+ SPCA508 = 942683475,
+
+ ///
+ /// SPCA561
+ ///
+ SPCA561 = 825636179,
+
+ ///
+ /// PAC207
+ ///
+ PAC207 = 925905488,
+
+ ///
+ /// MR97310A
+ ///
+ MR97310A = 808530765,
+
+ ///
+ /// JL2005BCD
+ ///
+ JL2005BCD = 808602698,
+
+ ///
+ /// SN9C2028
+ ///
+ SN9C2028 = 1481527123,
+
+ ///
+ /// SQ905C
+ ///
+ SQ905C = 1127559225,
+
+ ///
+ /// PJPG
+ ///
+ PJPG = 1196444240,
+
+ ///
+ /// OV511
+ ///
+ OV511 = 825308495,
+
+ ///
+ /// OV518
+ ///
+ OV518 = 942749007,
+
+ ///
+ /// STV0680
+ ///
+ STV0680 = 808990291,
+
+ ///
+ /// TM6000
+ ///
+ TM6000 = 808865108,
+
+ ///
+ /// CIT_YYVYUY
+ ///
+ CIT_YYVYUY = 1448364355,
+
+ ///
+ /// KONICA420
+ ///
+ KONICA420 = 1229868875,
+
+ ///
+ /// JPGL
+ ///
+ JPGL = 1279742026,
+
+ ///
+ /// SE401
+ ///
+ SE401 = 825242707,
+
+ ///
+ /// S5C_UYVY_JPG
+ ///
+ S5C_UYVY_JPG = 1229141331,
+
+ ///
+ /// Y8I
+ ///
+ Y8I = 541669465,
+
+ ///
+ /// Y12I
+ ///
+ Y12I = 1228026201,
+
+ ///
+ /// Z16
+ ///
+ Z16 = 540422490,
+
+ ///
+ /// MT21C
+ ///
+ MT21C = 825381965,
+
+ ///
+ /// INZI
+ ///
+ INZI = 1230655049,
+
+ ///
+ /// SUNXI_TILED_NV12
+ ///
+ SUNXI_TILED_NV12 = 842093651,
+
+ ///
+ /// CNF4
+ ///
+ CNF4 = 877022787,
+
+ ///
+ /// IPU3_SBGGR10
+ ///
+ IPU3_SBGGR10 = 1647538281,
+
+ ///
+ /// IPU3_SGBRG10
+ ///
+ IPU3_SGBRG10 = 1731424361,
+
+ ///
+ /// IPU3_SGRBG10
+ ///
+ IPU3_SGRBG10 = 1194553449,
+
+ ///
+ /// IPU3_SRGGB10
+ ///
+ IPU3_SRGGB10 = 1915973737,
+
+ ///
+ /// CU8
+ ///
+ CU8 = 942691651,
+
+ ///
+ /// CU16LE
+ ///
+ CU16LE = 909202755,
+
+ ///
+ /// CS8
+ ///
+ CS8 = 942691139,
+
+ ///
+ /// CS14LE
+ ///
+ CS14LE = 875647811,
+
+ ///
+ /// RU12LE
+ ///
+ RU12LE = 842093906,
+
+ ///
+ /// PCU16BE
+ ///
+ PCU16BE = 909198160,
+
+ ///
+ /// PCU18BE
+ ///
+ PCU18BE = 942752592,
+
+ ///
+ /// PCU20BE
+ ///
+ PCU20BE = 808600400,
+
+ ///
+ /// DELTA_TD16
+ ///
+ DELTA_TD16 = 909198420,
+
+ ///
+ /// DELTA_TD08
+ ///
+ DELTA_TD08 = 942687316,
+
+ ///
+ /// TU16
+ ///
+ TU16 = 909202772,
+
+ ///
+ /// TU08
+ ///
+ TU08 = 942691668,
+
+ ///
+ /// VSP1_HGO
+ ///
+ VSP1_HGO = 1213223766,
+
+ ///
+ /// VSP1_HGT
+ ///
+ VSP1_HGT = 1414550358,
+
+ ///
+ /// UVC
+ ///
+ UVC = 1212372565,
+
+ ///
+ /// D4XX
+ ///
+ D4XX = 1482175556,
}
diff --git a/SeeShark/Interop/Libc/VideoDeviceValueType.cs b/SeeShark/Interop/Libc/VideoDeviceValueType.cs
index c1d9454..6b7368a 100644
--- a/SeeShark/Interop/Libc/VideoDeviceValueType.cs
+++ b/SeeShark/Interop/Libc/VideoDeviceValueType.cs
@@ -1,93 +1,92 @@
-#pragma warning disable IDE0073
+#pragma warning disable IDE0073
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
-namespace SeeShark.Interop.Libc
+namespace SeeShark.Interop.Libc;
+
+///
+/// The type of a video device's control.
+///
+public enum VideoDeviceValueType : uint
{
///
- /// The type of a video device's control.
- ///
- public enum VideoDeviceValueType : uint
- {
- ///
- /// Exposure Type
- ///
- ExposureType = 10094849,
-
- ///
- /// Exposure Time
- ///
- ExposureTime = 10094850,
-
- ///
- /// Sharpness
- ///
- Sharpness = 9963803,
-
- ///
- /// Contrast
- ///
- Contrast = 9963777,
-
- ///
- /// Brightness
- ///
- Brightness = 9963776,
-
- ///
- /// Saturation
- ///
- Saturation = 9963778,
-
- ///
- /// Gamma
- ///
- Gamma = 9963792,
-
- ///
- /// Gain
- ///
- Gain = 9963795,
-
- ///
- /// Rotate
- ///
- Rotate = 9963810,
-
- ///
- /// Horizontal Flip
- ///
- HorizontalFlip = 9963796,
-
- ///
- /// Vertical Flip
- ///
- VerticalFlip = 9963797,
-
- ///
- /// Power Line Frequency
- ///
- PowerLineFrequency = 9963800,
-
- ///
- /// White Balance Temperature
- ///
- WhiteBalanceTemperature = 9963802,
-
- ///
- /// Color Effect
- ///
- ColorEffect = 9963807,
-
- ///
- /// White Balance Effect
- ///
- WhiteBalanceEffect = 10094868,
-
- ///
- /// Scene Mode
- ///
- SceneMode = 10094874,
- }
+ /// Exposure Type
+ ///
+ ExposureType = 10094849,
+
+ ///
+ /// Exposure Time
+ ///
+ ExposureTime = 10094850,
+
+ ///
+ /// Sharpness
+ ///
+ Sharpness = 9963803,
+
+ ///
+ /// Contrast
+ ///
+ Contrast = 9963777,
+
+ ///
+ /// Brightness
+ ///
+ Brightness = 9963776,
+
+ ///
+ /// Saturation
+ ///
+ Saturation = 9963778,
+
+ ///
+ /// Gamma
+ ///
+ Gamma = 9963792,
+
+ ///
+ /// Gain
+ ///
+ Gain = 9963795,
+
+ ///
+ /// Rotate
+ ///
+ Rotate = 9963810,
+
+ ///
+ /// Horizontal Flip
+ ///
+ HorizontalFlip = 9963796,
+
+ ///
+ /// Vertical Flip
+ ///
+ VerticalFlip = 9963797,
+
+ ///
+ /// Power Line Frequency
+ ///
+ PowerLineFrequency = 9963800,
+
+ ///
+ /// White Balance Temperature
+ ///
+ WhiteBalanceTemperature = 9963802,
+
+ ///
+ /// Color Effect
+ ///
+ ColorEffect = 9963807,
+
+ ///
+ /// White Balance Effect
+ ///
+ WhiteBalanceEffect = 10094868,
+
+ ///
+ /// Scene Mode
+ ///
+ SceneMode = 10094874,
}
diff --git a/SeeShark/Interop/Libc/VideoSettings.cs b/SeeShark/Interop/Libc/VideoSettings.cs
index 4605673..c827986 100644
--- a/SeeShark/Interop/Libc/VideoSettings.cs
+++ b/SeeShark/Interop/Libc/VideoSettings.cs
@@ -2,38 +2,37 @@
// This file is part of SeeShark.
// SeeShark is licensed under the BSD 3-Clause License. See LICENSE for details.
-namespace SeeShark.Interop.Libc
+namespace SeeShark.Interop.Libc;
+
+internal enum VideoSettings : int
{
- internal enum VideoSettings : int
- {
- VIDIOC_QUERYCAP = -2140645888,
- VIDIOC_ENUM_FMT = -1069525502,
- VIDIOC_G_FMT = -1041213948, // previously -1060350460?
- VIDIOC_S_FMT = -1041213947, // previously -1060350459?
- VIDIOC_REQBUFS = -1072409080,
- VIDIOC_QUERYBUF = -1069263351,
- VIDIOC_OVERLAY = 1074025998,
- VIDIOC_QBUF = -1069263345,
- VIDIOC_DQBUF = -1069263343,
- VIDIOC_STREAMON = 1074026002,
- VIDIOC_STREAMOFF = 1074026003,
- VIDIOC_G_PARM = -1060350443,
- VIDIOC_S_PARM = -1060350442,
- VIDIOC_G_CTRL = -1073195493,
- VIDIOC_S_CTRL = -1073195492,
- VIDIOC_QUERYCTRL = -1069263324,
- VIDIOC_G_INPUT = -2147199450,
- VIDIOC_S_INPUT = -1073457625,
- VIDIOC_G_OUTPUT = -2147199442,
- VIDIOC_S_OUTPUT = -1073457617,
- VIDIOC_CROPCAP = -1070836166,
- VIDIOC_G_CROP = -1072409029,
- VIDIOC_S_CROP = 1075074620,
- VIDIOC_TRY_FMT = -1041213888,
- VIDIOC_G_PRIORITY = -2147199421,
- VIDIOC_S_PRIORITY = 1074026052,
- VIDIOC_ENUM_FRAMESIZES = -1070836150,
- VIDIOC_ENUM_FRAMEINTERVALS = -1070311861, // -1069787573 doesn't work
- VIDIOC_PREPARE_BUF = -1069263267,
- }
+ VIDIOC_QUERYCAP = -2140645888,
+ VIDIOC_ENUM_FMT = -1069525502,
+ VIDIOC_G_FMT = -1041213948, // previously -1060350460?
+ VIDIOC_S_FMT = -1041213947, // previously -1060350459?
+ VIDIOC_REQBUFS = -1072409080,
+ VIDIOC_QUERYBUF = -1069263351,
+ VIDIOC_OVERLAY = 1074025998,
+ VIDIOC_QBUF = -1069263345,
+ VIDIOC_DQBUF = -1069263343,
+ VIDIOC_STREAMON = 1074026002,
+ VIDIOC_STREAMOFF = 1074026003,
+ VIDIOC_G_PARM = -1060350443,
+ VIDIOC_S_PARM = -1060350442,
+ VIDIOC_G_CTRL = -1073195493,
+ VIDIOC_S_CTRL = -1073195492,
+ VIDIOC_QUERYCTRL = -1069263324,
+ VIDIOC_G_INPUT = -2147199450,
+ VIDIOC_S_INPUT = -1073457625,
+ VIDIOC_G_OUTPUT = -2147199442,
+ VIDIOC_S_OUTPUT = -1073457617,
+ VIDIOC_CROPCAP = -1070836166,
+ VIDIOC_G_CROP = -1072409029,
+ VIDIOC_S_CROP = 1075074620,
+ VIDIOC_TRY_FMT = -1041213888,
+ VIDIOC_G_PRIORITY = -2147199421,
+ VIDIOC_S_PRIORITY = 1074026052,
+ VIDIOC_ENUM_FRAMESIZES = -1070836150,
+ VIDIOC_ENUM_FRAMEINTERVALS = -1070311861, // -1069787573 doesn't work
+ VIDIOC_PREPARE_BUF = -1069263267,
}
diff --git a/SeeShark/Interop/Libc/Videodev2.struct.cs b/SeeShark/Interop/Libc/Videodev2.struct.cs
index 05b5a2b..19286d4 100644
--- a/SeeShark/Interop/Libc/Videodev2.struct.cs
+++ b/SeeShark/Interop/Libc/Videodev2.struct.cs
@@ -5,439 +5,438 @@
using System;
using System.Runtime.InteropServices;
-namespace SeeShark.Interop.Libc
-{
- // internal struct V4l2FrameBuffer
- // {
- // public IntPtr Start;
- // public uint Length;
- // }
+namespace SeeShark.Interop.Libc;
+
+// internal struct V4l2FrameBuffer
+// {
+// public IntPtr Start;
+// public uint Length;
+// }
#pragma warning disable IDE1006
- [StructLayout(LayoutKind.Sequential)]
- internal struct v4l2_capability
- {
- [MarshalAs(UnmanagedType.ByValTStr, SizeConst = 16)]
- public string driver;
- [MarshalAs(UnmanagedType.ByValTStr, SizeConst = 32)]
- public string card;
- [MarshalAs(UnmanagedType.ByValTStr, SizeConst = 32)]
- public string bus_info;
- public uint version;
- public uint capabilities;
- [MarshalAs(UnmanagedType.ByValArray, SizeConst = 4)]
- public uint[] reserved;
- }
+[StructLayout(LayoutKind.Sequential)]
+internal struct v4l2_capability
+{
+ [MarshalAs(UnmanagedType.ByValTStr, SizeConst = 16)]
+ public string driver;
+ [MarshalAs(UnmanagedType.ByValTStr, SizeConst = 32)]
+ public string card;
+ [MarshalAs(UnmanagedType.ByValTStr, SizeConst = 32)]
+ public string bus_info;
+ public uint version;
+ public uint capabilities;
+ [MarshalAs(UnmanagedType.ByValArray, SizeConst = 4)]
+ public uint[] reserved;
+}
- internal enum v4l2_ctrl_type : uint
- {
- V4L2_CTRL_TYPE_INTEGER = 1,
- V4L2_CTRL_TYPE_BOOLEAN = 2,
- V4L2_CTRL_TYPE_MENU = 3,
- V4L2_CTRL_TYPE_BUTTON = 4,
- V4L2_CTRL_TYPE_INTEGER64 = 5,
- V4L2_CTRL_TYPE_CTRL_CLASS = 6,
- V4L2_CTRL_TYPE_STRING = 7,
- V4L2_CTRL_TYPE_BITMASK = 8,
- V4L2_CTRL_TYPE_INTEGER_MENU = 9,
- V4L2_CTRL_COMPOUND_TYPES = 0x0100,
- V4L2_CTRL_TYPE_U8 = 0x0100,
- V4L2_CTRL_TYPE_U16 = 0x0101,
- V4L2_CTRL_TYPE_U32 = 0x0102,
- };
+internal enum v4l2_ctrl_type : uint
+{
+ V4L2_CTRL_TYPE_INTEGER = 1,
+ V4L2_CTRL_TYPE_BOOLEAN = 2,
+ V4L2_CTRL_TYPE_MENU = 3,
+ V4L2_CTRL_TYPE_BUTTON = 4,
+ V4L2_CTRL_TYPE_INTEGER64 = 5,
+ V4L2_CTRL_TYPE_CTRL_CLASS = 6,
+ V4L2_CTRL_TYPE_STRING = 7,
+ V4L2_CTRL_TYPE_BITMASK = 8,
+ V4L2_CTRL_TYPE_INTEGER_MENU = 9,
+ V4L2_CTRL_COMPOUND_TYPES = 0x0100,
+ V4L2_CTRL_TYPE_U8 = 0x0100,
+ V4L2_CTRL_TYPE_U16 = 0x0101,
+ V4L2_CTRL_TYPE_U32 = 0x0102,
+};
+
+[StructLayout(LayoutKind.Sequential)]
+internal struct v4l2_queryctrl
+{
+ public VideoDeviceValueType id;
+ public v4l2_ctrl_type type;
+ [MarshalAs(UnmanagedType.ByValTStr, SizeConst = 32)]
+ public string name;
+ public int minimum;
+ public int maximum;
+ public int step;
+ public int default_value;
+ public uint flags;
+ [MarshalAs(UnmanagedType.ByValArray, SizeConst = 2)]
+ public uint[] reserved;
+};
+
+[StructLayout(LayoutKind.Sequential)]
+internal struct v4l2_control
+{
+ public VideoDeviceValueType id;
+ public int value;
+}
- [StructLayout(LayoutKind.Sequential)]
- internal struct v4l2_queryctrl
- {
- public VideoDeviceValueType id;
- public v4l2_ctrl_type type;
- [MarshalAs(UnmanagedType.ByValTStr, SizeConst = 32)]
- public string name;
- public int minimum;
- public int maximum;
- public int step;
- public int default_value;
- public uint flags;
- [MarshalAs(UnmanagedType.ByValArray, SizeConst = 2)]
- public uint[] reserved;
- };
+[StructLayout(LayoutKind.Sequential)]
+internal struct v4l2_fmtdesc
+{
+ public uint index;
+ public v4l2_buf_type type;
+ public uint flags;
+ [MarshalAs(UnmanagedType.ByValTStr, SizeConst = 32)]
+ public string description;
+ public V4l2InputFormat pixelformat;
+ [MarshalAs(UnmanagedType.ByValArray, SizeConst = 4)]
+ public uint[] reserved;
+}
- [StructLayout(LayoutKind.Sequential)]
- internal struct v4l2_control
- {
- public VideoDeviceValueType id;
- public int value;
- }
+internal enum v4l2_buf_type : uint
+{
+ V4L2_BUF_TYPE_VIDEO_CAPTURE = 1,
+ V4L2_BUF_TYPE_VIDEO_OUTPUT = 2,
+ V4L2_BUF_TYPE_VIDEO_OVERLAY = 3,
+ V4L2_BUF_TYPE_VBI_CAPTURE = 4,
+ V4L2_BUF_TYPE_VBI_OUTPUT = 5,
+ V4L2_BUF_TYPE_SLICED_VBI_CAPTURE = 6,
+ V4L2_BUF_TYPE_SLICED_VBI_OUTPUT = 7,
+ V4L2_BUF_TYPE_VIDEO_OUTPUT_OVERLAY = 8,
+ V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE = 9,
+ V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE = 10,
+ V4L2_BUF_TYPE_SDR_CAPTURE = 11,
+ V4L2_BUF_TYPE_SDR_OUTPUT = 12,
+ V4L2_BUF_TYPE_META_CAPTURE = 13,
+ V4L2_BUF_TYPE_META_OUTPUT = 14,
+ V4L2_BUF_TYPE_PRIVATE = 0x80,
+}
- [StructLayout(LayoutKind.Sequential)]
- internal struct v4l2_fmtdesc
- {
- public uint index;
- public v4l2_buf_type type;
- public uint flags;
- [MarshalAs(UnmanagedType.ByValTStr, SizeConst = 32)]
- public string description;
- public V4l2InputFormat pixelformat;
- [MarshalAs(UnmanagedType.ByValArray, SizeConst = 4)]
- public uint[] reserved;
- }
+internal enum v4l2_field : uint
+{
+ V4L2_FIELD_ANY = 0,
+ V4L2_FIELD_NONE = 1,
+ V4L2_FIELD_TOP = 2,
+ V4L2_FIELD_BOTTOM = 3,
+ V4L2_FIELD_INTERLACED = 4,
+ V4L2_FIELD_SEQ_TB = 5,
+ V4L2_FIELD_SEQ_BT = 6,
+ V4L2_FIELD_ALTERNATE = 7,
+ V4L2_FIELD_INTERLACED_TB = 8,
+ V4L2_FIELD_INTERLACED_BT = 9,
+}
- internal enum v4l2_buf_type : uint
- {
- V4L2_BUF_TYPE_VIDEO_CAPTURE = 1,
- V4L2_BUF_TYPE_VIDEO_OUTPUT = 2,
- V4L2_BUF_TYPE_VIDEO_OVERLAY = 3,
- V4L2_BUF_TYPE_VBI_CAPTURE = 4,
- V4L2_BUF_TYPE_VBI_OUTPUT = 5,
- V4L2_BUF_TYPE_SLICED_VBI_CAPTURE = 6,
- V4L2_BUF_TYPE_SLICED_VBI_OUTPUT = 7,
- V4L2_BUF_TYPE_VIDEO_OUTPUT_OVERLAY = 8,
- V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE = 9,
- V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE = 10,
- V4L2_BUF_TYPE_SDR_CAPTURE = 11,
- V4L2_BUF_TYPE_SDR_OUTPUT = 12,
- V4L2_BUF_TYPE_META_CAPTURE = 13,
- V4L2_BUF_TYPE_META_OUTPUT = 14,
- V4L2_BUF_TYPE_PRIVATE = 0x80,
- }
+internal enum v4l2_colorspace : uint
+{
+ V4L2_COLORSPACE_DEFAULT = 0,
+ V4L2_COLORSPACE_SMPTE170M = 1,
+ V4L2_COLORSPACE_SMPTE240M = 2,
+ V4L2_COLORSPACE_REC709 = 3,
+ V4L2_COLORSPACE_BT878 = 4,
+ V4L2_COLORSPACE_470_SYSTEM_M = 5,
+ V4L2_COLORSPACE_470_SYSTEM_BG = 6,
+ V4L2_COLORSPACE_JPEG = 7,
+ V4L2_COLORSPACE_SRGB = 8,
+ V4L2_COLORSPACE_ADOBERGB = 9,
+ V4L2_COLORSPACE_BT2020 = 10,
+ V4L2_COLORSPACE_RAW = 11,
+}
- internal enum v4l2_field : uint
- {
- V4L2_FIELD_ANY = 0,
- V4L2_FIELD_NONE = 1,
- V4L2_FIELD_TOP = 2,
- V4L2_FIELD_BOTTOM = 3,
- V4L2_FIELD_INTERLACED = 4,
- V4L2_FIELD_SEQ_TB = 5,
- V4L2_FIELD_SEQ_BT = 6,
- V4L2_FIELD_ALTERNATE = 7,
- V4L2_FIELD_INTERLACED_TB = 8,
- V4L2_FIELD_INTERLACED_BT = 9,
- }
+[StructLayout(LayoutKind.Sequential)]
+internal struct v4l2_pix_format
+{
+ public uint width;
+ public uint height;
+ public V4l2InputFormat pixelformat;
+ public v4l2_field field;
+ public uint bytesperline;
+ public uint sizeimage;
+ public v4l2_colorspace colorspace;
+ public uint priv;
+}
- internal enum v4l2_colorspace : uint
- {
- V4L2_COLORSPACE_DEFAULT = 0,
- V4L2_COLORSPACE_SMPTE170M = 1,
- V4L2_COLORSPACE_SMPTE240M = 2,
- V4L2_COLORSPACE_REC709 = 3,
- V4L2_COLORSPACE_BT878 = 4,
- V4L2_COLORSPACE_470_SYSTEM_M = 5,
- V4L2_COLORSPACE_470_SYSTEM_BG = 6,
- V4L2_COLORSPACE_JPEG = 7,
- V4L2_COLORSPACE_SRGB = 8,
- V4L2_COLORSPACE_ADOBERGB = 9,
- V4L2_COLORSPACE_BT2020 = 10,
- V4L2_COLORSPACE_RAW = 11,
- }
+[StructLayout(LayoutKind.Sequential)]
+internal struct v4l2_rect
+{
+ public int left;
+ public int top;
+ public uint width;
+ public uint height;
+}
- [StructLayout(LayoutKind.Sequential)]
- internal struct v4l2_pix_format
- {
- public uint width;
- public uint height;
- public V4l2InputFormat pixelformat;
- public v4l2_field field;
- public uint bytesperline;
- public uint sizeimage;
- public v4l2_colorspace colorspace;
- public uint priv;
- }
+[StructLayout(LayoutKind.Sequential)]
+internal unsafe struct v4l2_clip
+{
+ public v4l2_rect c;
+ public v4l2_clip* next;
+}
- [StructLayout(LayoutKind.Sequential)]
- internal struct v4l2_rect
- {
- public int left;
- public int top;
- public uint width;
- public uint height;
- }
+[StructLayout(LayoutKind.Sequential)]
+internal unsafe struct v4l2_window
+{
+ public v4l2_rect w;
+ public v4l2_field field;
+ public uint chromakey;
+ public v4l2_clip* clips;
+ public uint clipcount;
+ public void* bitmap;
+ public byte global_alpha;
+}
- [StructLayout(LayoutKind.Sequential)]
- internal unsafe struct v4l2_clip
- {
- public v4l2_rect c;
- public v4l2_clip* next;
- }
+[StructLayout(LayoutKind.Sequential)]
+internal struct v4l2_vbi_format
+{
+ public uint sampling_rate;
+ public uint offset;
+ public uint samples_per_line;
+ public uint sample_format;
+ [MarshalAs(UnmanagedType.ByValArray, SizeConst = 2)]
+ public uint[] start;
+ [MarshalAs(UnmanagedType.ByValArray, SizeConst = 2)]
+ public uint[] count;
+ public uint flags;
+ [MarshalAs(UnmanagedType.ByValArray, SizeConst = 2)]
+ public uint[] reserved;
+}
- [StructLayout(LayoutKind.Sequential)]
- internal unsafe struct v4l2_window
- {
- public v4l2_rect w;
- public v4l2_field field;
- public uint chromakey;
- public v4l2_clip* clips;
- public uint clipcount;
- public void* bitmap;
- public byte global_alpha;
- }
+[StructLayout(LayoutKind.Sequential)]
+internal struct v4l2_sliced_vbi_format
+{
+ public uint service_set;
+ [MarshalAs(UnmanagedType.ByValArray, SizeConst = 48)]
+ public ushort[] service_lines;
+ public uint io_size;
+ [MarshalAs(UnmanagedType.ByValArray, SizeConst = 2)]
+ public uint[] reserved;
+}
- [StructLayout(LayoutKind.Sequential)]
- internal struct v4l2_vbi_format
- {
- public uint sampling_rate;
- public uint offset;
- public uint samples_per_line;
- public uint sample_format;
- [MarshalAs(UnmanagedType.ByValArray, SizeConst = 2)]
- public uint[] start;
- [MarshalAs(UnmanagedType.ByValArray, SizeConst = 2)]
- public uint[] count;
- public uint flags;
- [MarshalAs(UnmanagedType.ByValArray, SizeConst = 2)]
- public uint[] reserved;
- }
+[StructLayout(LayoutKind.Sequential)]
+internal struct v4l2_sdr_format
+{
+ public V4l2InputFormat pixelformat;
+ public uint buffersize;
+ [MarshalAs(UnmanagedType.ByValArray, SizeConst = 24)]
+ public byte[] reserved;
+}
- [StructLayout(LayoutKind.Sequential)]
- internal struct v4l2_sliced_vbi_format
- {
- public uint service_set;
- [MarshalAs(UnmanagedType.ByValArray, SizeConst = 48)]
- public ushort[] service_lines;
- public uint io_size;
- [MarshalAs(UnmanagedType.ByValArray, SizeConst = 2)]
- public uint[] reserved;
- }
+[StructLayout(LayoutKind.Sequential)]
+internal struct v4l2_meta_format
+{
+ public uint dataformat;
+ public uint buffersize;
+}
- [StructLayout(LayoutKind.Sequential)]
- internal struct v4l2_sdr_format
- {
- public V4l2InputFormat pixelformat;
- public uint buffersize;
- [MarshalAs(UnmanagedType.ByValArray, SizeConst = 24)]
- public byte[] reserved;
- }
+[StructLayout(LayoutKind.Sequential)]
+internal struct fmt
+{
+ public v4l2_pix_format pix;
+ public v4l2_window win;
+ public v4l2_vbi_format vbi;
+ public v4l2_sliced_vbi_format sliced;
+ public v4l2_sdr_format sdr;
+ public v4l2_meta_format meta;
+ [MarshalAs(UnmanagedType.ByValArray, SizeConst = 200)]
+ public byte[] raw;
+}
- [StructLayout(LayoutKind.Sequential)]
- internal struct v4l2_meta_format
- {
- public uint dataformat;
- public uint buffersize;
- }
+[StructLayout(LayoutKind.Sequential, Size = 204)]
+internal struct v4l2_format
+{
+ public v4l2_buf_type type;
+ public fmt fmt;
+}
- [StructLayout(LayoutKind.Sequential)]
- internal struct fmt
- {
- public v4l2_pix_format pix;
- public v4l2_window win;
- public v4l2_vbi_format vbi;
- public v4l2_sliced_vbi_format sliced;
- public v4l2_sdr_format sdr;
- public v4l2_meta_format meta;
- [MarshalAs(UnmanagedType.ByValArray, SizeConst = 200)]
- public byte[] raw;
- }
+[StructLayout(LayoutKind.Sequential)]
+internal struct v4l2_captureparm
+{
+ public uint capability;
+ public uint capturemode;
+ public v4l2_fract timeperframe;
+ public uint extendedmode;
+ public uint readbuffers;
+ [MarshalAs(UnmanagedType.ByValArray, SizeConst = 4)]
+ public uint[] reserved;
+}
- [StructLayout(LayoutKind.Sequential, Size = 204)]
- internal struct v4l2_format
- {
- public v4l2_buf_type type;
- public fmt fmt;
- }
- [StructLayout(LayoutKind.Sequential)]
- internal struct v4l2_captureparm
- {
- public uint capability;
- public uint capturemode;
- public v4l2_fract timeperframe;
- public uint extendedmode;
- public uint readbuffers;
- [MarshalAs(UnmanagedType.ByValArray, SizeConst = 4)]
- public uint[] reserved;
+[StructLayout(LayoutKind.Sequential)]
+internal struct v4l2_outputparm
+{
+ public uint capability;
+ public uint outputmode;
+ public v4l2_fract timeperframe;
+ public uint extendedmode;
+ public uint writebuffers;
+ [MarshalAs(UnmanagedType.ByValArray, SizeConst = 4)]
+ public uint[] reserved;
+}
+
+[StructLayout(LayoutKind.Sequential)]
+internal struct v4l2_streamparm
+{
+ public v4l2_buf_type type;
+ [StructLayout(LayoutKind.Explicit)]
+ internal struct parm_union
+ {
+ [FieldOffset(0)]
+ public v4l2_captureparm capture;
+ [FieldOffset(0)]
+ public v4l2_outputparm output;
+ [FieldOffset(0)]
+ [MarshalAs(UnmanagedType.ByValArray, SizeConst = 200)]
+ public byte[] raw;
}
+ public parm_union parm;
+}
+[StructLayout(LayoutKind.Sequential)]
+internal struct v4l2_fract
+{
+ public uint numerator;
+ public uint denominator;
+}
- [StructLayout(LayoutKind.Sequential)]
- internal struct v4l2_outputparm
- {
- public uint capability;
- public uint outputmode;
- public v4l2_fract timeperframe;
- public uint extendedmode;
- public uint writebuffers;
- [MarshalAs(UnmanagedType.ByValArray, SizeConst = 4)]
- public uint[] reserved;
- }
+[StructLayout(LayoutKind.Sequential)]
+internal struct v4l2_cropcap
+{
+ public v4l2_buf_type type;
+ public v4l2_rect bounds;
+ public v4l2_rect defrect;
+ public v4l2_fract pixelaspect;
+}
- [StructLayout(LayoutKind.Sequential)]
- internal struct v4l2_streamparm
- {
- public v4l2_buf_type type;
- [StructLayout(LayoutKind.Explicit)]
- internal struct parm_union
- {
- [FieldOffset(0)]
- public v4l2_captureparm capture;
- [FieldOffset(0)]
- public v4l2_outputparm output;
- [FieldOffset(0)]
- [MarshalAs(UnmanagedType.ByValArray, SizeConst = 200)]
- public byte[] raw;
- }
- public parm_union parm;
- }
+[StructLayout(LayoutKind.Sequential)]
+internal struct v4l2_crop
+{
+ public v4l2_buf_type type;
+ public v4l2_rect c;
+}
- [StructLayout(LayoutKind.Sequential)]
- internal struct v4l2_fract
- {
- public uint numerator;
- public uint denominator;
- }
+internal enum v4l2_memory : uint
+{
+ V4L2_MEMORY_MMAP = 1,
+ V4L2_MEMORY_USERPTR = 2,
+ V4L2_MEMORY_OVERLAY = 3,
+ V4L2_MEMORY_DMABUF = 4,
+}
- [StructLayout(LayoutKind.Sequential)]
- internal struct v4l2_cropcap
- {
- public v4l2_buf_type type;
- public v4l2_rect bounds;
- public v4l2_rect defrect;
- public v4l2_fract pixelaspect;
- }
+[StructLayout(LayoutKind.Sequential)]
+internal struct v4l2_requestbuffers
+{
+ public uint count;
+ public v4l2_buf_type type;
+ public v4l2_memory memory;
+ [MarshalAs(UnmanagedType.ByValArray, SizeConst = 2)]
+ public uint[] reserved;
+}
- [StructLayout(LayoutKind.Sequential)]
- internal struct v4l2_crop
- {
- public v4l2_buf_type type;
- public v4l2_rect c;
- }
+[StructLayout(LayoutKind.Sequential)]
+public struct v4l2_timeval
+{
+ public uint tv_sec;
+ public uint tv_usec;
+}
- internal enum v4l2_memory : uint
- {
- V4L2_MEMORY_MMAP = 1,
- V4L2_MEMORY_USERPTR = 2,
- V4L2_MEMORY_OVERLAY = 3,
- V4L2_MEMORY_DMABUF = 4,
- }
+[StructLayout(LayoutKind.Sequential)]
+internal struct v4l2_timecode
+{
+ public uint type;
+ public uint flags;
+ public byte frames;
+ public byte seconds;
+ public byte minutes;
+ public byte hours;
+ [MarshalAs(UnmanagedType.ByValArray, SizeConst = 4)]
+ public byte[] userbits;
+}
- [StructLayout(LayoutKind.Sequential)]
- internal struct v4l2_requestbuffers
- {
- public uint count;
- public v4l2_buf_type type;
- public v4l2_memory memory;
- [MarshalAs(UnmanagedType.ByValArray, SizeConst = 2)]
- public uint[] reserved;
- }
+[StructLayout(LayoutKind.Sequential)]
+internal struct v4l2_buffer
+{
+ public uint index;
+ public v4l2_buf_type type;
+ public uint bytesused;
+ public uint flags;
+ public v4l2_field field;
[StructLayout(LayoutKind.Sequential)]
- public struct v4l2_timeval
+ public struct timeval
{
public uint tv_sec;
public uint tv_usec;
}
+ public timeval timestamp;
- [StructLayout(LayoutKind.Sequential)]
- internal struct v4l2_timecode
- {
- public uint type;
- public uint flags;
- public byte frames;
- public byte seconds;
- public byte minutes;
- public byte hours;
- [MarshalAs(UnmanagedType.ByValArray, SizeConst = 4)]
- public byte[] userbits;
- }
+ public v4l2_timecode timecode;
+ public uint sequence;
+ public v4l2_memory memory;
- [StructLayout(LayoutKind.Sequential)]
- internal struct v4l2_buffer
+ [StructLayout(LayoutKind.Explicit)]
+ public struct m_union
{
- public uint index;
- public v4l2_buf_type type;
- public uint bytesused;
- public uint flags;
- public v4l2_field field;
-
- [StructLayout(LayoutKind.Sequential)]
- public struct timeval
- {
- public uint tv_sec;
- public uint tv_usec;
- }
- public timeval timestamp;
-
- public v4l2_timecode timecode;
- public uint sequence;
- public v4l2_memory memory;
-
- [StructLayout(LayoutKind.Explicit)]
- public struct m_union
- {
- [FieldOffset(0)]
- public uint offset;
- [FieldOffset(0)]
- public uint userptr;
- }
- public m_union m;
-
- public uint length;
- public uint input;
- public uint reserved;
+ [FieldOffset(0)]
+ public uint offset;
+ [FieldOffset(0)]
+ public uint userptr;
}
+ public m_union m;
- [StructLayout(LayoutKind.Sequential)]
- internal struct v4l2_frmsizeenum
- {
- public uint index;
- public V4l2InputFormat pixel_format;
- public v4l2_frmsizetypes type;
- public v4l2_frmsize_discrete discrete;
- public v4l2_frmsize_stepwise stepwise;
- [MarshalAs(UnmanagedType.ByValArray, SizeConst = 2)]
- public uint[] reserved;
- }
+ public uint length;
+ public uint input;
+ public uint reserved;
+}
- [StructLayout(LayoutKind.Sequential)]
- internal struct v4l2_frmsize_discrete
- {
- public uint width;
- public uint height;
- }
+[StructLayout(LayoutKind.Sequential)]
+internal struct v4l2_frmsizeenum
+{
+ public uint index;
+ public V4l2InputFormat pixel_format;
+ public v4l2_frmsizetypes type;
+ public v4l2_frmsize_discrete discrete;
+ public v4l2_frmsize_stepwise stepwise;
+ [MarshalAs(UnmanagedType.ByValArray, SizeConst = 2)]
+ public uint[] reserved;
+}
- [StructLayout(LayoutKind.Sequential)]
- internal struct v4l2_frmsize_stepwise
- {
- public uint min_width;
- public uint max_width;
- public uint step_width;
- public uint min_height;
- public uint max_height;
- public uint step_height;
- };
-
- internal enum v4l2_frmsizetypes : uint
- {
- V4L2_FRMSIZE_TYPE_DISCRETE = 1,
- V4L2_FRMSIZE_TYPE_CONTINUOUS = 2,
- V4L2_FRMSIZE_TYPE_STEPWISE = 3,
- }
+[StructLayout(LayoutKind.Sequential)]
+internal struct v4l2_frmsize_discrete
+{
+ public uint width;
+ public uint height;
+}
- [StructLayout(LayoutKind.Sequential)]
- internal struct v4l2_frmivalenum
- {
- public uint index;
- public V4l2InputFormat pixel_format;
- public uint width;
- public uint height;
- public v4l2_frmivaltypes type;
- public v4l2_fract discrete;
- public v4l2_frmival_stepwise stepwise;
- [MarshalAs(UnmanagedType.ByValArray, SizeConst = 2)]
- public uint[] reserved;
- }
+[StructLayout(LayoutKind.Sequential)]
+internal struct v4l2_frmsize_stepwise
+{
+ public uint min_width;
+ public uint max_width;
+ public uint step_width;
+ public uint min_height;
+ public uint max_height;
+ public uint step_height;
+};
+
+internal enum v4l2_frmsizetypes : uint
+{
+ V4L2_FRMSIZE_TYPE_DISCRETE = 1,
+ V4L2_FRMSIZE_TYPE_CONTINUOUS = 2,
+ V4L2_FRMSIZE_TYPE_STEPWISE = 3,
+}
- [StructLayout(LayoutKind.Sequential)]
- internal struct v4l2_frmival_stepwise
- {
- public v4l2_fract min;
- public v4l2_fract max;
- public v4l2_fract step;
- }
+[StructLayout(LayoutKind.Sequential)]
+internal struct v4l2_frmivalenum
+{
+ public uint index;
+ public V4l2InputFormat pixel_format;
+ public uint width;
+ public uint height;
+ public v4l2_frmivaltypes type;
+ public v4l2_fract discrete;
+ public v4l2_frmival_stepwise stepwise;
+ [MarshalAs(UnmanagedType.ByValArray, SizeConst = 2)]
+ public uint[] reserved;
+}
- internal enum v4l2_frmivaltypes : uint
- {
- V4L2_FRMIVAL_TYPE_DISCRETE = 1,
- V4L2_FRMIVAL_TYPE_CONTINUOUS = 2,
- V4L2_FRMIVAL_TYPE_STEPWISE = 3,
- }
+[StructLayout(LayoutKind.Sequential)]
+internal struct v4l2_frmival_stepwise
+{
+ public v4l2_fract min;
+ public v4l2_fract max;
+ public v4l2_fract step;
+}
+
+internal enum v4l2_frmivaltypes : uint
+{
+ V4L2_FRMIVAL_TYPE_DISCRETE = 1,
+ V4L2_FRMIVAL_TYPE_CONTINUOUS = 2,
+ V4L2_FRMIVAL_TYPE_STEPWISE = 3,
}
diff --git a/SeeShark/Interop/Windows/User32.cs b/SeeShark/Interop/Windows/User32.cs
index a4d39d9..a3208e9 100644
--- a/SeeShark/Interop/Windows/User32.cs
+++ b/SeeShark/Interop/Windows/User32.cs
@@ -1,149 +1,148 @@
- // Copyright (c) The Vignette Authors
- // This file is part of SeeShark.
- // SeeShark is licensed under the BSD 3-Clause License. See LICENSE for details.
-
- using System;
- using System.Runtime.InteropServices;
-
- namespace SeeShark.Interop.Windows
- {
- [StructLayout(LayoutKind.Sequential)]
- internal struct DevMode
- {
- [MarshalAs(UnmanagedType.ByValTStr, SizeConst = 0x20)]
- internal string dmDeviceName;
- internal short dmSpecVersion;
- internal short dmDriverVersion;
- internal short dmSize;
- internal short dmDriverExtra;
- internal int dmFields;
- internal int dmPositionX;
- internal int dmPositionY;
- internal int dmDisplayOrientation;
- internal int dmDisplayFixedOutput;
- internal short dmColor;
- internal short dmDuplex;
- internal short dmYResolution;
- internal short dmTTOption;
- internal short dmCollate;
- [MarshalAs(UnmanagedType.ByValTStr, SizeConst = 0x20)]
- internal string dmFormName;
- internal short dmLogPixels;
- internal int dmBitsPerPel;
- internal int dmPelsWidth;
- internal int dmPelsHeight;
- internal int dmDisplayFlags;
- internal int dmDisplayFrequency;
- internal int dmICMMethod;
- internal int dmICMIntent;
- internal int dmMediaType;
- internal int dmDitherType;
- internal int dmReserved1;
- internal int dmReserved2;
- internal int dmPanningWidth;
- internal int dmPanningHeight;
- }
-
- [StructLayout(LayoutKind.Sequential)]
- internal struct Rect
- {
- internal int left;
- internal int top;
- internal int right;
- internal int bottom;
- }
-
- internal enum MONITORINFOF : ulong
- {
- PRIMARY = 1
- }
-
- [StructLayout(LayoutKind.Sequential)]
- internal struct MonitorInfoEx
- {
- internal uint size;
- internal Rect monitor;
- internal Rect work;
- internal uint flags;
- [MarshalAs(UnmanagedType.ByValTStr, SizeConst = 32)]
- internal string deviceName;
- }
-
- internal enum DpiType
- {
- Effective,
- Angular,
- Raw
- }
-
-
- [Flags]
- internal enum DisplayDeviceStateFlags : int
- {
- ///
- /// The device is part of the desktop.
- ///
- AttachedToDesktop = 0x1,
- MultiDriver = 0x2,
- /// The device is part of the desktop.
- PrimaryDevice = 0x4,
- /// Represents a pseudo device used to mirror application drawing for remoting or other purposes.
- MirroringDriver = 0x8,
- /// The device is VGA compatible.
- VGACompatible = 0x10,
- /// The device is removable; it cannot be the primary display.
- Removable = 0x20,
- /// The device has more display modes than its output devices support.
- ModesPruned = 0x8000000,
- Remote = 0x4000000,
- Disconnect = 0x2000000
- }
-
-
- #pragma warning disable CA1815
-
- // This is never compared in the code, so we can suppress the warning.
- [StructLayout(LayoutKind.Sequential, CharSet = CharSet.Ansi)]
- internal struct DISPLAY_DEVICE
- {
- [MarshalAs(UnmanagedType.U4)]
- internal int cb;
- [MarshalAs(UnmanagedType.ByValTStr, SizeConst = 32)]
- internal string DeviceName;
- [MarshalAs(UnmanagedType.ByValTStr, SizeConst = 128)]
- internal string DeviceString;
- [MarshalAs(UnmanagedType.U4)]
- internal DisplayDeviceStateFlags StateFlags;
- [MarshalAs(UnmanagedType.ByValTStr, SizeConst = 128)]
- internal string DeviceID;
- [MarshalAs(UnmanagedType.ByValTStr, SizeConst = 128)]
- internal string DeviceKey;
- }
- #pragma warning restore CA1815
-
- internal static partial class User32
- {
- internal delegate bool MonitorEnumDelegate(IntPtr hMonitor, IntPtr hdcMonitor, ref Rect lprcMonitor, IntPtr dwData);
-
- [DllImport("user32.dll")]
- internal static extern bool EnumDisplayMonitors(IntPtr hdc, IntPtr lprcClip, MonitorEnumDelegate lpfnEnum, IntPtr dwData);
-
- [DllImport("user32.dll")]
- internal static extern bool GetMonitorInfo(IntPtr hmon, ref MonitorInfoEx mi);
-
- #pragma warning disable CA2101
- [DllImport("user32.dll", CharSet = CharSet.Ansi, BestFitMapping = false, ThrowOnUnmappableChar = true)]
- internal static extern bool EnumDisplaySettings(string deviceName, int modeNum, ref DevMode devMode);
-
- [DllImport("user32.dll", CharSet = CharSet.Ansi, BestFitMapping = false, ThrowOnUnmappableChar = true)]
- internal static extern bool EnumDisplayDevices(string lpDevice, uint iDevNum, ref DISPLAY_DEVICE lpDisplayDevice, uint dwFlags);
-
- #pragma warning restore CA2101
-
- [DllImport("Shcore.dll")]
- internal static extern int GetDpiForMonitor(IntPtr hmon, DpiType dpiType, out uint dpiX, out uint dpiY);
-
- [DllImport("Shcore.dll")]
- internal static extern int SetProcessDpiAwareness(int awareness);
- }
- }
+// Copyright (c) The Vignette Authors
+// This file is part of SeeShark.
+// SeeShark is licensed under the BSD 3-Clause License. See LICENSE for details.
+
+using System;
+using System.Runtime.InteropServices;
+
+namespace SeeShark.Interop.Windows;
+
+[StructLayout(LayoutKind.Sequential)]
+internal struct DevMode
+{
+ [MarshalAs(UnmanagedType.ByValTStr, SizeConst = 0x20)]
+ internal string dmDeviceName;
+ internal short dmSpecVersion;
+ internal short dmDriverVersion;
+ internal short dmSize;
+ internal short dmDriverExtra;
+ internal int dmFields;
+ internal int dmPositionX;
+ internal int dmPositionY;
+ internal int dmDisplayOrientation;
+ internal int dmDisplayFixedOutput;
+ internal short dmColor;
+ internal short dmDuplex;
+ internal short dmYResolution;
+ internal short dmTTOption;
+ internal short dmCollate;
+ [MarshalAs(UnmanagedType.ByValTStr, SizeConst = 0x20)]
+ internal string dmFormName;
+ internal short dmLogPixels;
+ internal int dmBitsPerPel;
+ internal int dmPelsWidth;
+ internal int dmPelsHeight;
+ internal int dmDisplayFlags;
+ internal int dmDisplayFrequency;
+ internal int dmICMMethod;
+ internal int dmICMIntent;
+ internal int dmMediaType;
+ internal int dmDitherType;
+ internal int dmReserved1;
+ internal int dmReserved2;
+ internal int dmPanningWidth;
+ internal int dmPanningHeight;
+}
+
+[StructLayout(LayoutKind.Sequential)]
+internal struct Rect
+{
+ internal int left;
+ internal int top;
+ internal int right;
+ internal int bottom;
+}
+
+internal enum MONITORINFOF : ulong
+{
+ PRIMARY = 1
+}
+
+[StructLayout(LayoutKind.Sequential)]
+internal struct MonitorInfoEx
+{
+ internal uint size;
+ internal Rect monitor;
+ internal Rect work;
+ internal uint flags;
+ [MarshalAs(UnmanagedType.ByValTStr, SizeConst = 32)]
+ internal string deviceName;
+}
+
+internal enum DpiType
+{
+ Effective,
+ Angular,
+ Raw
+}
+
+
+[Flags]
+internal enum DisplayDeviceStateFlags : int
+{
+ ///
+ /// The device is part of the desktop.
+ ///
+ AttachedToDesktop = 0x1,
+ MultiDriver = 0x2,
+ /// The device is part of the desktop.
+ PrimaryDevice = 0x4,
+ /// Represents a pseudo device used to mirror application drawing for remoting or other purposes.
+ MirroringDriver = 0x8,
+ /// The device is VGA compatible.
+ VGACompatible = 0x10,
+ /// The device is removable; it cannot be the primary display.
+ Removable = 0x20,
+ /// The device has more display modes than its output devices support.
+ ModesPruned = 0x8000000,
+ Remote = 0x4000000,
+ Disconnect = 0x2000000
+}
+
+
+#pragma warning disable CA1815
+
+// This is never compared in the code, so we can suppress the warning.
+[StructLayout(LayoutKind.Sequential, CharSet = CharSet.Ansi)]
+internal struct DISPLAY_DEVICE
+{
+ [MarshalAs(UnmanagedType.U4)]
+ internal int cb;
+ [MarshalAs(UnmanagedType.ByValTStr, SizeConst = 32)]
+ internal string DeviceName;
+ [MarshalAs(UnmanagedType.ByValTStr, SizeConst = 128)]
+ internal string DeviceString;
+ [MarshalAs(UnmanagedType.U4)]
+ internal DisplayDeviceStateFlags StateFlags;
+ [MarshalAs(UnmanagedType.ByValTStr, SizeConst = 128)]
+ internal string DeviceID;
+ [MarshalAs(UnmanagedType.ByValTStr, SizeConst = 128)]
+ internal string DeviceKey;
+}
+#pragma warning restore CA1815
+
+internal static partial class User32
+{
+ internal delegate bool MonitorEnumDelegate(IntPtr hMonitor, IntPtr hdcMonitor, ref Rect lprcMonitor, IntPtr dwData);
+
+ [DllImport("user32.dll")]
+ internal static extern bool EnumDisplayMonitors(IntPtr hdc, IntPtr lprcClip, MonitorEnumDelegate lpfnEnum, IntPtr dwData);
+
+ [DllImport("user32.dll")]
+ internal static extern bool GetMonitorInfo(IntPtr hmon, ref MonitorInfoEx mi);
+
+#pragma warning disable CA2101
+ [DllImport("user32.dll", CharSet = CharSet.Ansi, BestFitMapping = false, ThrowOnUnmappableChar = true)]
+ internal static extern bool EnumDisplaySettings(string deviceName, int modeNum, ref DevMode devMode);
+
+ [DllImport("user32.dll", CharSet = CharSet.Ansi, BestFitMapping = false, ThrowOnUnmappableChar = true)]
+ internal static extern bool EnumDisplayDevices(string lpDevice, uint iDevNum, ref DISPLAY_DEVICE lpDisplayDevice, uint dwFlags);
+
+#pragma warning restore CA2101
+
+ [DllImport("Shcore.dll")]
+ internal static extern int GetDpiForMonitor(IntPtr hmon, DpiType dpiType, out uint dpiX, out uint dpiY);
+
+ [DllImport("Shcore.dll")]
+ internal static extern int SetProcessDpiAwareness(int awareness);
+}
diff --git a/SeeShark/Interop/X11/XLib.cs b/SeeShark/Interop/X11/XLib.cs
index a1c8511..d150dfa 100644
--- a/SeeShark/Interop/X11/XLib.cs
+++ b/SeeShark/Interop/X11/XLib.cs
@@ -5,38 +5,37 @@
using System;
using System.Runtime.InteropServices;
-namespace SeeShark.Interop.X11
+namespace SeeShark.Interop.X11;
+
+using Atom = Int64;
+using Display = IntPtr;
+using Window = IntPtr;
+
+internal class XLib
{
- using Display = IntPtr;
- using Window = IntPtr;
- using Atom = Int64;
+ private const string lib_x11 = "libX11";
+ private static readonly object displayLock = new object();
- internal class XLib
+ [DllImport(lib_x11, EntryPoint = "XOpenDisplay")]
+ private static extern unsafe Display sys_XOpenDisplay(sbyte* display);
+ public static unsafe Display XOpenDisplay(sbyte* display)
{
- private const string lib_x11 = "libX11";
- private static readonly object displayLock = new object();
-
- [DllImport(lib_x11, EntryPoint = "XOpenDisplay")]
- private static extern unsafe Display sys_XOpenDisplay(sbyte* display);
- public static unsafe Display XOpenDisplay(sbyte* display)
- {
- lock (displayLock)
- return sys_XOpenDisplay(display);
- }
+ lock (displayLock)
+ return sys_XOpenDisplay(display);
+ }
- [DllImport(lib_x11, EntryPoint = "XCloseDisplay")]
- public static extern int XCloseDisplay(Display display);
+ [DllImport(lib_x11, EntryPoint = "XCloseDisplay")]
+ public static extern int XCloseDisplay(Display display);
- [DllImport(lib_x11, EntryPoint = "XDefaultRootWindow")]
- public static extern Window XDefaultRootWindow(Display display);
+ [DllImport(lib_x11, EntryPoint = "XDefaultRootWindow")]
+ public static extern Window XDefaultRootWindow(Display display);
- [DllImport(lib_x11, EntryPoint = "XDisplayWidth")]
- public static extern int XDisplayWidth(Display display, int screenNumber);
+ [DllImport(lib_x11, EntryPoint = "XDisplayWidth")]
+ public static extern int XDisplayWidth(Display display, int screenNumber);
- [DllImport(lib_x11, EntryPoint = "XDisplayHeight")]
- public static extern int XDisplayHeight(Display display, int screenNumber);
+ [DllImport(lib_x11, EntryPoint = "XDisplayHeight")]
+ public static extern int XDisplayHeight(Display display, int screenNumber);
- [DllImport(lib_x11, EntryPoint = "XGetAtomName")]
- public static extern IntPtr XGetAtomName(Display display, Atom atom);
- }
+ [DllImport(lib_x11, EntryPoint = "XGetAtomName")]
+ public static extern IntPtr XGetAtomName(Display display, Atom atom);
}
diff --git a/SeeShark/Interop/X11/XRRMonitorInfo.cs b/SeeShark/Interop/X11/XRRMonitorInfo.cs
index 2d72f16..faf579f 100644
--- a/SeeShark/Interop/X11/XRRMonitorInfo.cs
+++ b/SeeShark/Interop/X11/XRRMonitorInfo.cs
@@ -5,23 +5,22 @@
using System;
using System.Runtime.InteropServices;
-namespace SeeShark.Interop.X11
-{
- using Atom = Int64;
+namespace SeeShark.Interop.X11;
+
+using Atom = Int64;
- [StructLayout(LayoutKind.Sequential)]
- public unsafe struct XRRMonitorInfo
- {
- public Atom Name;
- public int Primary;
- public int Automatic;
- public int NOutput;
- public int X;
- public int Y;
- public int Width;
- public int Height;
- public int MWidth;
- public int MHeight;
- public IntPtr Outputs;
- }
+[StructLayout(LayoutKind.Sequential)]
+public unsafe struct XRRMonitorInfo
+{
+ public Atom Name;
+ public int Primary;
+ public int Automatic;
+ public int NOutput;
+ public int X;
+ public int Y;
+ public int Width;
+ public int Height;
+ public int MWidth;
+ public int MHeight;
+ public IntPtr Outputs;
}
diff --git a/SeeShark/Interop/X11/XRandr.cs b/SeeShark/Interop/X11/XRandr.cs
index faa8562..f31128c 100644
--- a/SeeShark/Interop/X11/XRandr.cs
+++ b/SeeShark/Interop/X11/XRandr.cs
@@ -5,16 +5,15 @@
using System;
using System.Runtime.InteropServices;
-namespace SeeShark.Interop.X11
-{
- using Display = IntPtr;
- using Window = IntPtr;
+namespace SeeShark.Interop.X11;
+
+using Display = IntPtr;
+using Window = IntPtr;
- internal static class XRandr
- {
- private const string lib_x_randr = "libXrandr";
+internal static class XRandr
+{
+ private const string lib_x_randr = "libXrandr";
- [DllImport(lib_x_randr, EntryPoint = "XRRGetMonitors")]
- public static extern unsafe XRRMonitorInfo* XRRGetMonitors(Display dpy, Window window, bool getActive, out int nmonitors);
- }
+ [DllImport(lib_x_randr, EntryPoint = "XRRGetMonitors")]
+ public static extern unsafe XRRMonitorInfo* XRRGetMonitors(Display dpy, Window window, bool getActive, out int nmonitors);
}
diff --git a/SeeShark/PixelFormat.cs b/SeeShark/PixelFormat.cs
index 0f484a0..3809b97 100644
--- a/SeeShark/PixelFormat.cs
+++ b/SeeShark/PixelFormat.cs
@@ -5,907 +5,906 @@
using System;
using FFmpeg.AutoGen;
-namespace SeeShark
+namespace SeeShark;
+
+///
+/// Includes any pixel format that libav supports.
+///
+public enum PixelFormat : int
{
///
- /// Includes any pixel format that libav supports.
+ /// No pixel format.
///
- public enum PixelFormat : int
- {
- ///
- /// No pixel format.
- ///
- None = AVPixelFormat.AV_PIX_FMT_NONE,
- ///
- /// planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
- ///
- Yuv420P = AVPixelFormat.AV_PIX_FMT_YUV420P,
- ///
- /// packed YUV 4:2:2, 16bpp, Y0 Cb Y1 Cr
- ///
- Yuyv422 = AVPixelFormat.AV_PIX_FMT_YUYV422,
- ///
- /// packed RGB 8:8:8, 24bpp, RGBRGB...
- ///
- Rgb24 = AVPixelFormat.AV_PIX_FMT_RGB24,
- ///
- /// packed RGB 8:8:8, 24bpp, BGRBGR...
- ///
- Bgr24 = AVPixelFormat.AV_PIX_FMT_BGR24,
- ///
- /// planar YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples)
- ///
- Yuv422P = AVPixelFormat.AV_PIX_FMT_YUV422P,
- ///
- /// planar YUV 4:4:4, 24bpp, (1 Cr & Cb sample per 1x1 Y samples)
- ///
- Yuv444P = AVPixelFormat.AV_PIX_FMT_YUV444P,
- ///
- /// planar YUV 4:1:0, 9bpp, (1 Cr & Cb sample per 4x4 Y samples)
- ///
- Yuv410P = AVPixelFormat.AV_PIX_FMT_YUV410P,
- ///
- /// planar YUV 4:1:1, 12bpp, (1 Cr & Cb sample per 4x1 Y samples)
- ///
- Yuv411P = AVPixelFormat.AV_PIX_FMT_YUV411P,
- ///
- /// Y , 8bpp
- ///
- Gray8 = AVPixelFormat.AV_PIX_FMT_GRAY8,
- ///
- /// Y , 1bpp, 0 is white, 1 is black, in each byte pixels are ordered from the msb
- /// to the lsb
- ///
- Monowhite = AVPixelFormat.AV_PIX_FMT_MONOWHITE,
- ///
- /// Y , 1bpp, 0 is black, 1 is white, in each byte pixels are ordered from the msb
- /// to the lsb
- ///
- Monoblack = AVPixelFormat.AV_PIX_FMT_MONOBLACK,
- ///
- /// 8 bits with AV_PIX_FMT_RGB32 palette
- ///
- Pal8 = AVPixelFormat.AV_PIX_FMT_PAL8,
- ///
- /// planar YUV 4:2:0, 12bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV420P
- /// and setting color_range
- ///
- [Obsolete("Deprecated in favor of AV_PIX_FMT_YUV420P and setting color_range")]
- Yuvj420P = AVPixelFormat.AV_PIX_FMT_YUVJ420P,
- ///
- /// planar YUV 4:2:2, 16bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV422P
- /// and setting color_range
- ///
- [Obsolete("Deprecated in favor of AV_PIX_FMT_YUV422P and setting color_range")]
- Yuvj422P = AVPixelFormat.AV_PIX_FMT_YUVJ422P,
- ///
- /// planar YUV 4:4:4, 24bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV444P
- /// and setting color_range
- ///
- [Obsolete("Deprecated in favor of AV_PIX_FMT_YUV444P and setting color_range")]
- Yuvj444P = AVPixelFormat.AV_PIX_FMT_YUVJ444P,
- ///
- /// packed YUV 4:2:2, 16bpp, Cb Y0 Cr Y1
- ///
- Uyvy422 = AVPixelFormat.AV_PIX_FMT_UYVY422,
- ///
- /// packed YUV 4:1:1, 12bpp, Cb Y0 Y1 Cr Y2 Y3
- ///
- Uyyvyy411 = AVPixelFormat.AV_PIX_FMT_UYYVYY411,
- ///
- /// packed RGB 3:3:2, 8bpp, (msb)2B 3G 3R(lsb)
- ///
- Bgr8 = AVPixelFormat.AV_PIX_FMT_BGR8,
- ///
- /// packed RGB 1:2:1 bitstream, 4bpp, (msb)1B 2G 1R(lsb), a byte contains two pixels,
- /// the first pixel in the byte is the one composed by the 4 msb bits
- ///
- Bgr4 = AVPixelFormat.AV_PIX_FMT_BGR4,
- ///
- /// packed RGB 1:2:1, 8bpp, (msb)1B 2G 1R(lsb)
- ///
- Bgr4Byte = AVPixelFormat.AV_PIX_FMT_BGR4_BYTE,
- ///
- /// packed RGB 3:3:2, 8bpp, (msb)2R 3G 3B(lsb)
- ///
- Rgb8 = AVPixelFormat.AV_PIX_FMT_RGB8,
- ///
- /// packed RGB 1:2:1 bitstream, 4bpp, (msb)1R 2G 1B(lsb), a byte contains two pixels,
- /// the first pixel in the byte is the one composed by the 4 msb bits
- ///
- Rgb4 = AVPixelFormat.AV_PIX_FMT_RGB4,
- ///
- /// packed RGB 1:2:1, 8bpp, (msb)1R 2G 1B(lsb)
- ///
- Rgb4Byte = AVPixelFormat.AV_PIX_FMT_RGB4_BYTE,
- ///
- /// planar YUV 4:2:0, 12bpp, 1 plane for Y and 1 plane for the UV components, which
- /// are interleaved (first byte U and the following byte V)
- ///
- Nv12 = AVPixelFormat.AV_PIX_FMT_NV12,
- ///
- /// as above, but U and V bytes are swapped
- ///
- Nv21 = AVPixelFormat.AV_PIX_FMT_NV21,
- ///
- /// packed ARGB 8:8:8:8, 32bpp, ARGBARGB...
- ///
- Argb = AVPixelFormat.AV_PIX_FMT_ARGB,
- ///
- /// packed RGBA 8:8:8:8, 32bpp, RGBARGBA...
- ///
- Rgba = AVPixelFormat.AV_PIX_FMT_RGBA,
- ///
- /// packed ABGR 8:8:8:8, 32bpp, ABGRABGR...
- ///
- Abgr = AVPixelFormat.AV_PIX_FMT_ABGR,
- ///
- /// packed BGRA 8:8:8:8, 32bpp, BGRABGRA...
- ///
- Bgra = AVPixelFormat.AV_PIX_FMT_BGRA,
- ///
- /// Y , 16bpp, big-endian
- ///
- Gray16Be = AVPixelFormat.AV_PIX_FMT_GRAY16BE,
- ///
- /// Y , 16bpp, little-endian
- ///
- Gray16Le = AVPixelFormat.AV_PIX_FMT_GRAY16LE,
- ///
- /// planar YUV 4:4:0 (1 Cr & Cb sample per 1x2 Y samples)
- ///
- Yuv440P = AVPixelFormat.AV_PIX_FMT_YUV440P,
- ///
- /// planar YUV 4:4:0 full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV440P
- /// and setting color_range
- ///
- [Obsolete("Deprecated in favor of AV_PIX_FMT_YUV440P and setting color range")]
- Yuvj440P = AVPixelFormat.AV_PIX_FMT_YUVJ440P,
- ///
- /// planar YUV 4:2:0, 20bpp, (1 Cr & Cb sample per 2x2 Y & A samples)
- ///
- Yuva420P = AVPixelFormat.AV_PIX_FMT_YUVA420P,
- ///
- /// packed RGB 16:16:16, 48bpp, 16R, 16G, 16B, the 2-byte value for each R/G/B component
- /// is stored as big-endian
- ///
- Rgb48Be = AVPixelFormat.AV_PIX_FMT_RGB48BE,
- ///
- /// packed RGB 16:16:16, 48bpp, 16R, 16G, 16B, the 2-byte value for each R/G/B component
- /// is stored as little-endian
- ///
- Rgb48Le = AVPixelFormat.AV_PIX_FMT_RGB48LE,
- ///
- /// packed RGB 5:6:5, 16bpp, (msb) 5R 6G 5B(lsb), big-endian
- ///
- Rgb565Be = AVPixelFormat.AV_PIX_FMT_RGB565BE,
- ///
- /// packed RGB 5:6:5, 16bpp, (msb) 5R 6G 5B(lsb), little-endian
- ///
- Rgb565Le = AVPixelFormat.AV_PIX_FMT_RGB565LE,
- ///
- /// packed RGB 5:5:5, 16bpp, (msb)1X 5R 5G 5B(lsb), big-endian , X=unused/undefined
- ///
- Rgb555Be = AVPixelFormat.AV_PIX_FMT_RGB555BE,
- ///
- /// packed RGB 5:5:5, 16bpp, (msb)1X 5R 5G 5B(lsb), little-endian, X=unused/undefined
- ///
- Rgb555Le = AVPixelFormat.AV_PIX_FMT_RGB555LE,
- ///
- /// packed BGR 5:6:5, 16bpp, (msb) 5B 6G 5R(lsb), big-endian
- ///
- Bgr565Be = AVPixelFormat.AV_PIX_FMT_BGR565BE,
- ///
- /// packed BGR 5:6:5, 16bpp, (msb) 5B 6G 5R(lsb), little-endian
- ///
- Bgr565Le = AVPixelFormat.AV_PIX_FMT_BGR565LE,
- ///
- /// packed BGR 5:5:5, 16bpp, (msb)1X 5B 5G 5R(lsb), big-endian , X=unused/undefined
- ///
- Bgr555Be = AVPixelFormat.AV_PIX_FMT_BGR555BE,
- ///
- /// packed BGR 5:5:5, 16bpp, (msb)1X 5B 5G 5R(lsb), little-endian, X=unused/undefined
- ///
- Bgr555Le = AVPixelFormat.AV_PIX_FMT_BGR555LE,
- ///
- /// Hardware acceleration through VA-API, data[3] contains a VASurfaceID.
- ///
- Vaapi = AVPixelFormat.AV_PIX_FMT_VAAPI,
- ///
- /// planar YUV 4:2:0, 24bpp, (1 Cr & Cb sample per 2x2 Y samples), little-endian
- ///
- Yuv420P16Le = AVPixelFormat.AV_PIX_FMT_YUV420P16LE,
- ///
- /// planar YUV 4:2:0, 24bpp, (1 Cr & Cb sample per 2x2 Y samples), big-endian
- ///
- Yuv420P16Be = AVPixelFormat.AV_PIX_FMT_YUV420P16BE,
- ///
- /// planar YUV 4:2:2, 32bpp, (1 Cr & Cb sample per 2x1 Y samples), little-endian
- ///
- Yuv422P16Le = AVPixelFormat.AV_PIX_FMT_YUV422P16LE,
- ///
- /// planar YUV 4:2:2, 32bpp, (1 Cr & Cb sample per 2x1 Y samples), big-endian
- ///
- Yuv422P16Be = AVPixelFormat.AV_PIX_FMT_YUV422P16BE,
- ///
- /// planar YUV 4:4:4, 48bpp, (1 Cr & Cb sample per 1x1 Y samples), little-endian
- ///
- Yuv444P16Le = AVPixelFormat.AV_PIX_FMT_YUV444P16LE,
- ///
- /// planar YUV 4:4:4, 48bpp, (1 Cr & Cb sample per 1x1 Y samples), big-endian
- ///
- Yuv444P16Be = AVPixelFormat.AV_PIX_FMT_YUV444P16BE,
- ///
- /// HW decoding through DXVA2, Picture.data[3] contains a LPDIRECT3DSURFACE9 pointer
- ///
- Dxva2Vld = AVPixelFormat.AV_PIX_FMT_DXVA2_VLD,
- ///
- /// packed RGB 4:4:4, 16bpp, (msb)4X 4R 4G 4B(lsb), little-endian, X=unused/undefined
- ///
- Rgb444Le = AVPixelFormat.AV_PIX_FMT_RGB444LE,
- ///
- /// packed RGB 4:4:4, 16bpp, (msb)4X 4R 4G 4B(lsb), big-endian, X=unused/undefined
- ///
- Rgb444Be = AVPixelFormat.AV_PIX_FMT_RGB444BE,
- ///
- /// packed BGR 4:4:4, 16bpp, (msb)4X 4B 4G 4R(lsb), little-endian, X=unused/undefined
- ///
- Bgr444Le = AVPixelFormat.AV_PIX_FMT_BGR444LE,
- ///
- /// packed BGR 4:4:4, 16bpp, (msb)4X 4B 4G 4R(lsb), big-endian, X=unused/undefined
- ///
- Bgr444Be = AVPixelFormat.AV_PIX_FMT_BGR444BE,
- ///
- /// 8 bits gray, 8 bits alpha
- ///
- Ya8 = AVPixelFormat.AV_PIX_FMT_YA8,
- ///
- /// alias for AV_PIX_FMT_YA8
- ///
- Y400A = AVPixelFormat.AV_PIX_FMT_Y400A,
- ///
- /// alias for AV_PIX_FMT_YA8
- ///
- Gray8A = AVPixelFormat.AV_PIX_FMT_GRAY8A,
- ///
- /// packed RGB 16:16:16, 48bpp, 16B, 16G, 16R, the 2-byte value for each R/G/B component
- /// is stored as big-endian
- ///
- Bgr48Be = AVPixelFormat.AV_PIX_FMT_BGR48BE,
- ///
- /// packed RGB 16:16:16, 48bpp, 16B, 16G, 16R, the 2-byte value for each R/G/B component
- /// is stored as little-endian
- ///
- Bgr48Le = AVPixelFormat.AV_PIX_FMT_BGR48LE,
- ///
- /// planar YUV 4:2:0, 13.5bpp, (1 Cr & Cb sample per 2x2 Y samples), big-endian
- ///
- Yuv420P9Be = AVPixelFormat.AV_PIX_FMT_YUV420P9BE,
- ///
- /// planar YUV 4:2:0, 13.5bpp, (1 Cr & Cb sample per 2x2 Y samples), little-endian
- ///
- Yuv420P9Le = AVPixelFormat.AV_PIX_FMT_YUV420P9LE,
- ///
- /// planar YUV 4:2:0, 15bpp, (1 Cr & Cb sample per 2x2 Y samples), big-endian
- ///
- Yuv420P10Be = AVPixelFormat.AV_PIX_FMT_YUV420P10BE,
- ///
- /// planar YUV 4:2:0, 15bpp, (1 Cr & Cb sample per 2x2 Y samples), little-endian
- ///
- Yuv420P10Le = AVPixelFormat.AV_PIX_FMT_YUV420P10LE,
- ///
- /// planar YUV 4:2:2, 20bpp, (1 Cr & Cb sample per 2x1 Y samples), big-endian
- ///
- Yuv422P10Be = AVPixelFormat.AV_PIX_FMT_YUV422P10BE,
- ///
- /// planar YUV 4:2:2, 20bpp, (1 Cr & Cb sample per 2x1 Y samples), little-endian
- ///
- Yuv422P10Le = AVPixelFormat.AV_PIX_FMT_YUV422P10LE,
- ///
- /// planar YUV 4:4:4, 27bpp, (1 Cr & Cb sample per 1x1 Y samples), big-endian
- ///
- Yuv444P9Be = AVPixelFormat.AV_PIX_FMT_YUV444P9BE,
- ///
- /// planar YUV 4:4:4, 27bpp, (1 Cr & Cb sample per 1x1 Y samples), little-endian
- ///
- Yuv444P9Le = AVPixelFormat.AV_PIX_FMT_YUV444P9LE,
- ///
- /// planar YUV 4:4:4, 30bpp, (1 Cr & Cb sample per 1x1 Y samples), big-endian
- ///
- Yuv444P10Be = AVPixelFormat.AV_PIX_FMT_YUV444P10BE,
- ///
- /// planar YUV 4:4:4, 30bpp, (1 Cr & Cb sample per 1x1 Y samples), little-endian
- ///
- Yuv444P10Le = AVPixelFormat.AV_PIX_FMT_YUV444P10LE,
- ///
- /// planar YUV 4:2:2, 18bpp, (1 Cr & Cb sample per 2x1 Y samples), big-endian
- ///
- Yuv422P9Be = AVPixelFormat.AV_PIX_FMT_YUV422P9BE,
- ///
- /// planar YUV 4:2:2, 18bpp, (1 Cr & Cb sample per 2x1 Y samples), little-endian
- ///
- Yuv422P9Le = AVPixelFormat.AV_PIX_FMT_YUV422P9LE,
- ///
- /// planar GBR 4:4:4 24bpp
- ///
- Gbrp = AVPixelFormat.AV_PIX_FMT_GBRP,
- ///
- /// alias for GBRP
- ///
- Gbr24P = AVPixelFormat.AV_PIX_FMT_GBR24P,
- ///
- /// planar GBR 4:4:4 27bpp, big-endian
- ///
- Gbrp9Be = AVPixelFormat.AV_PIX_FMT_GBRP9BE,
- ///
- /// planar GBR 4:4:4 27bpp, little-endian
- ///
- Gbrp9Le = AVPixelFormat.AV_PIX_FMT_GBRP9LE,
- ///
- /// planar GBR 4:4:4 30bpp, big-endian
- ///
- Gbrp10Be = AVPixelFormat.AV_PIX_FMT_GBRP10BE,
- ///
- /// planar GBR 4:4:4 30bpp, little-endian
- ///
- Gbrp10Le = AVPixelFormat.AV_PIX_FMT_GBRP10LE,
- ///
- /// planar GBR 4:4:4 48bpp, big-endian
- ///
- Gbrp16Be = AVPixelFormat.AV_PIX_FMT_GBRP16BE,
- ///
- /// planar GBR 4:4:4 48bpp, little-endian
- ///
- Gbrp16Le = AVPixelFormat.AV_PIX_FMT_GBRP16LE,
- ///
- /// planar YUV 4:2:2 24bpp, (1 Cr & Cb sample per 2x1 Y & A samples)
- ///
- Yuva422P = AVPixelFormat.AV_PIX_FMT_YUVA422P,
- ///
- /// planar YUV 4:4:4 32bpp, (1 Cr & Cb sample per 1x1 Y & A samples)
- ///
- Yuva444P = AVPixelFormat.AV_PIX_FMT_YUVA444P,
- ///
- /// planar YUV 4:2:0 22.5bpp, (1 Cr & Cb sample per 2x2 Y & A samples), big-endian
- ///
- Yuva420P9Be = AVPixelFormat.AV_PIX_FMT_YUVA420P9BE,
- ///
- /// planar YUV 4:2:0 22.5bpp, (1 Cr & Cb sample per 2x2 Y & A samples), little-endian
- ///
- Yuva420P9Le = AVPixelFormat.AV_PIX_FMT_YUVA420P9LE,
- ///
- /// planar YUV 4:2:2 27bpp, (1 Cr & Cb sample per 2x1 Y & A samples), big-endian
- ///
- Yuva422P9Be = AVPixelFormat.AV_PIX_FMT_YUVA422P9BE,
- ///
- /// planar YUV 4:2:2 27bpp, (1 Cr & Cb sample per 2x1 Y & A samples), little-endian
- ///
- Yuva422P9Le = AVPixelFormat.AV_PIX_FMT_YUVA422P9LE,
- ///
- /// planar YUV 4:4:4 36bpp, (1 Cr & Cb sample per 1x1 Y & A samples), big-endian
- ///
- Yuva444P9Be = AVPixelFormat.AV_PIX_FMT_YUVA444P9BE,
- ///
- /// planar YUV 4:4:4 36bpp, (1 Cr & Cb sample per 1x1 Y & A samples), little-endian
- ///
- Yuva444P9Le = AVPixelFormat.AV_PIX_FMT_YUVA444P9LE,
- ///
- /// planar YUV 4:2:0 25bpp, (1 Cr & Cb sample per 2x2 Y & A samples, big-endian)
- ///
- Yuva420P10Be = AVPixelFormat.AV_PIX_FMT_YUVA420P10BE,
- ///
- /// planar YUV 4:2:0 25bpp, (1 Cr & Cb sample per 2x2 Y & A samples, little-endian)
- ///
- Yuva420P10Le = AVPixelFormat.AV_PIX_FMT_YUVA420P10LE,
- ///
- /// planar YUV 4:2:2 30bpp, (1 Cr & Cb sample per 2x1 Y & A samples, big-endian)
- ///
- Yuva422P10Be = AVPixelFormat.AV_PIX_FMT_YUVA422P10BE,
- ///
- /// planar YUV 4:2:2 30bpp, (1 Cr & Cb sample per 2x1 Y & A samples, little-endian)
- ///
- Yuva422P10Le = AVPixelFormat.AV_PIX_FMT_YUVA422P10LE,
- ///
- /// planar YUV 4:4:4 40bpp, (1 Cr & Cb sample per 1x1 Y & A samples, big-endian)
- ///
- Yuva444P10Be = AVPixelFormat.AV_PIX_FMT_YUVA444P10BE,
- ///
- /// planar YUV 4:4:4 40bpp, (1 Cr & Cb sample per 1x1 Y & A samples, little-endian)
- ///
- Yuva444P10Le = AVPixelFormat.AV_PIX_FMT_YUVA444P10LE,
- ///
- /// planar YUV 4:2:0 40bpp, (1 Cr & Cb sample per 2x2 Y & A samples, big-endian)
- ///
- Yuva420P16Be = AVPixelFormat.AV_PIX_FMT_YUVA420P16BE,
- ///
- /// planar YUV 4:2:0 40bpp, (1 Cr & Cb sample per 2x2 Y & A samples, little-endian)
- ///
- Yuva420P16Le = AVPixelFormat.AV_PIX_FMT_YUVA420P16LE,
- ///
- /// planar YUV 4:2:2 48bpp, (1 Cr & Cb sample per 2x1 Y & A samples, big-endian)
- ///
- Yuva422P16Be = AVPixelFormat.AV_PIX_FMT_YUVA422P16BE,
- ///
- /// planar YUV 4:2:2 48bpp, (1 Cr & Cb sample per 2x1 Y & A samples, little-endian)
- ///
- Yuva422P16Le = AVPixelFormat.AV_PIX_FMT_YUVA422P16LE,
- ///
- /// planar YUV 4:4:4 64bpp, (1 Cr & Cb sample per 1x1 Y & A samples, big-endian)
- ///
- Yuva444P16Be = AVPixelFormat.AV_PIX_FMT_YUVA444P16BE,
- ///
- /// planar YUV 4:4:4 64bpp, (1 Cr & Cb sample per 1x1 Y & A samples, little-endian)
- ///
- Yuva444P16Le = AVPixelFormat.AV_PIX_FMT_YUVA444P16LE,
- ///
- /// HW acceleration through VDPAU, Picture.data[3] contains a VdpVideoSurface
- ///
- Vdpau = AVPixelFormat.AV_PIX_FMT_VDPAU,
- ///
- /// packed XYZ 4:4:4, 36 bpp, (msb) 12X, 12Y, 12Z (lsb), the 2-byte value for each
- /// X/Y/Z is stored as little-endian, the 4 lower bits are set to 0
- ///
- Xyz12Le = AVPixelFormat.AV_PIX_FMT_XYZ12LE,
- ///
- /// packed XYZ 4:4:4, 36 bpp, (msb) 12X, 12Y, 12Z (lsb), the 2-byte value for each
- /// X/Y/Z is stored as big-endian, the 4 lower bits are set to 0
- ///
- Xyz12Be = AVPixelFormat.AV_PIX_FMT_XYZ12BE,
- ///
- /// interleaved chroma YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples)
- ///
- Nv16 = AVPixelFormat.AV_PIX_FMT_NV16,
- ///
- /// interleaved chroma YUV 4:2:2, 20bpp, (1 Cr & Cb sample per 2x1 Y samples), little-endian
- ///
- Nv20Le = AVPixelFormat.AV_PIX_FMT_NV20LE,
- ///
- /// interleaved chroma YUV 4:2:2, 20bpp, (1 Cr & Cb sample per 2x1 Y samples), big-endian
- ///
- Nv20Be = AVPixelFormat.AV_PIX_FMT_NV20BE,
- ///
- /// packed RGBA 16:16:16:16, 64bpp, 16R, 16G, 16B, 16A, the 2-byte value for each
- /// R/G/B/A component is stored as big-endian
- ///
- Rgba64Be = AVPixelFormat.AV_PIX_FMT_RGBA64BE,
- ///
- /// packed RGBA 16:16:16:16, 64bpp, 16R, 16G, 16B, 16A, the 2-byte value for each
- /// R/G/B/A component is stored as little-endian
- ///
- Rgba64Le = AVPixelFormat.AV_PIX_FMT_RGBA64LE,
- ///
- /// packed RGBA 16:16:16:16, 64bpp, 16B, 16G, 16R, 16A, the 2-byte value for each
- /// R/G/B/A component is stored as big-endian
- ///
- Bgra64Be = AVPixelFormat.AV_PIX_FMT_BGRA64BE,
- ///
- /// packed RGBA 16:16:16:16, 64bpp, 16B, 16G, 16R, 16A, the 2-byte value for each
- /// R/G/B/A component is stored as little-endian
- ///
- Bgra64Le = AVPixelFormat.AV_PIX_FMT_BGRA64LE,
- ///
- /// packed YUV 4:2:2, 16bpp, Y0 Cr Y1 Cb
- ///
- Yvyu422 = AVPixelFormat.AV_PIX_FMT_YVYU422,
- ///
- /// 16 bits gray, 16 bits alpha (big-endian)
- ///
- Ya16Be = AVPixelFormat.AV_PIX_FMT_YA16BE,
- ///
- /// 16 bits gray, 16 bits alpha (little-endian)
- ///
- Ya16Le = AVPixelFormat.AV_PIX_FMT_YA16LE,
- ///
- /// planar GBRA 4:4:4:4 32bpp
- ///
- Gbrap = AVPixelFormat.AV_PIX_FMT_GBRAP,
- ///
- /// planar GBRA 4:4:4:4 64bpp, big-endian
- ///
- Gbrap16Be = AVPixelFormat.AV_PIX_FMT_GBRAP16BE,
- ///
- /// planar GBRA 4:4:4:4 64bpp, little-endian
- ///
- Gbrap16Le = AVPixelFormat.AV_PIX_FMT_GBRAP16LE,
- ///
- /// HW acceleration through QSV, data[3] contains a pointer to the mfxFrameSurface1
- /// structure.
- ///
- Qsv = AVPixelFormat.AV_PIX_FMT_QSV,
- ///
- /// HW acceleration though MMAL, data[3] contains a pointer to the MMAL_BUFFER_HEADER_T
- /// structure.
- ///
- Mmal = AVPixelFormat.AV_PIX_FMT_MMAL,
- ///
- /// HW decoding through Direct3D11 via old API, Picture.data[3] contains a ID3D11VideoDecoderOutputView
- /// pointer
- ///
- D3D11VaVld = AVPixelFormat.AV_PIX_FMT_D3D11VA_VLD,
- ///
- /// HW acceleration through CUDA. data[i] contain CUdeviceptr pointers exactly as
- /// for system memory frames.
- ///
- Cuda = AVPixelFormat.AV_PIX_FMT_CUDA,
- ///
- /// packed RGB 8:8:8, 32bpp, XRGBXRGB... X=unused/undefined
- ///
- Xrgb = AVPixelFormat.AV_PIX_FMT_0RGB,
- ///
- /// packed RGB 8:8:8, 32bpp, RGBXRGBX... X=unused/undefined
- ///
- Rgbx = AVPixelFormat.AV_PIX_FMT_RGB0,
- ///
- /// packed BGR 8:8:8, 32bpp, XBGRXBGR... X=unused/undefined
- ///
- Xbgr = AVPixelFormat.AV_PIX_FMT_0BGR,
- ///
- /// packed BGR 8:8:8, 32bpp, BGRXBGRX... X=unused/undefined
- ///
- Bgrx = AVPixelFormat.AV_PIX_FMT_BGR0,
- ///
- /// planar YUV 4:2:0,18bpp, (1 Cr & Cb sample per 2x2 Y samples), big-endian
- ///
- Yuv420P12Be = AVPixelFormat.AV_PIX_FMT_YUV420P12BE,
- ///
- /// planar YUV 4:2:0,18bpp, (1 Cr & Cb sample per 2x2 Y samples), little-endian
- ///
- Yuv420P12Le = AVPixelFormat.AV_PIX_FMT_YUV420P12LE,
- ///
- /// planar YUV 4:2:0,21bpp, (1 Cr & Cb sample per 2x2 Y samples), big-endian
- ///
- Yuv420P14Be = AVPixelFormat.AV_PIX_FMT_YUV420P14BE,
- ///
- /// planar YUV 4:2:0,21bpp, (1 Cr & Cb sample per 2x2 Y samples), little-endian
- ///
- Yuv420P14Le = AVPixelFormat.AV_PIX_FMT_YUV420P14LE,
- ///
- /// planar YUV 4:2:2,24bpp, (1 Cr & Cb sample per 2x1 Y samples), big-endian
- ///
- Yuv422P12Be = AVPixelFormat.AV_PIX_FMT_YUV422P12BE,
- ///
- /// planar YUV 4:2:2,24bpp, (1 Cr & Cb sample per 2x1 Y samples), little-endian
- ///
- Yuv422P12Le = AVPixelFormat.AV_PIX_FMT_YUV422P12LE,
- ///
- /// planar YUV 4:2:2,28bpp, (1 Cr & Cb sample per 2x1 Y samples), big-endian
- ///
- Yuv422P14Be = AVPixelFormat.AV_PIX_FMT_YUV422P14BE,
- ///
- /// planar YUV 4:2:2,28bpp, (1 Cr & Cb sample per 2x1 Y samples), little-endian
- ///
- Yuv422P14Le = AVPixelFormat.AV_PIX_FMT_YUV422P14LE,
- ///
- /// planar YUV 4:4:4,36bpp, (1 Cr & Cb sample per 1x1 Y samples), big-endian
- ///
- Yuv444P12Be = AVPixelFormat.AV_PIX_FMT_YUV444P12BE,
- ///
- /// planar YUV 4:4:4,36bpp, (1 Cr & Cb sample per 1x1 Y samples), little-endian
- ///
- Yuv444P12Le = AVPixelFormat.AV_PIX_FMT_YUV444P12LE,
- ///
- /// planar YUV 4:4:4,42bpp, (1 Cr & Cb sample per 1x1 Y samples), big-endian
- ///
- Yuv444P14Be = AVPixelFormat.AV_PIX_FMT_YUV444P14BE,
- ///
- /// planar YUV 4:4:4,42bpp, (1 Cr & Cb sample per 1x1 Y samples), little-endian
- ///
- Yuv444P14Le = AVPixelFormat.AV_PIX_FMT_YUV444P14LE,
- ///
- /// planar GBR 4:4:4 36bpp, big-endian
- ///
- Gbrp12Be = AVPixelFormat.AV_PIX_FMT_GBRP12BE,
- ///
- /// planar GBR 4:4:4 36bpp, little-endian
- ///
- Gbrp12Le = AVPixelFormat.AV_PIX_FMT_GBRP12LE,
- ///
- /// planar GBR 4:4:4 42bpp, big-endian
- ///
- Gbrp14Be = AVPixelFormat.AV_PIX_FMT_GBRP14BE,
- ///
- /// planar GBR 4:4:4 42bpp, little-endian
- ///
- Gbrp14Le = AVPixelFormat.AV_PIX_FMT_GBRP14LE,
- ///
- /// planar YUV 4:1:1, 12bpp, (1 Cr & Cb sample per 4x1 Y samples) full scale (JPEG),
- /// deprecated in favor of AV_PIX_FMT_YUV411P and setting color_range
- ///
- [Obsolete("Deprecated in favor of AV_PIX_FMT_YUV411P and setting color_range")]
- Yuvj411P = AVPixelFormat.AV_PIX_FMT_YUVJ411P,
- ///
- /// bayer, BGBG..(odd line), GRGR..(even line), 8-bit samples
- ///
- BayerBggr8 = AVPixelFormat.AV_PIX_FMT_BAYER_BGGR8,
- ///
- /// bayer, RGRG..(odd line), GBGB..(even line), 8-bit samples
- ///
- BayerRggb8 = AVPixelFormat.AV_PIX_FMT_BAYER_RGGB8,
- ///
- /// bayer, GBGB..(odd line), RGRG..(even line), 8-bit samples
- ///
- BayerGbrg8 = AVPixelFormat.AV_PIX_FMT_BAYER_GBRG8,
- ///
- /// bayer, GRGR..(odd line), BGBG..(even line), 8-bit samples
- ///
- BayerGrbg8 = AVPixelFormat.AV_PIX_FMT_BAYER_GRBG8,
- ///
- /// bayer, BGBG..(odd line), GRGR..(even line), 16-bit samples, little-endian
- ///
- BayerBggr16Le = AVPixelFormat.AV_PIX_FMT_BAYER_BGGR16LE,
- ///
- /// bayer, BGBG..(odd line), GRGR..(even line), 16-bit samples, big-endian
- ///
- BayerBggr16Be = AVPixelFormat.AV_PIX_FMT_BAYER_BGGR16BE,
- ///
- /// bayer, RGRG..(odd line), GBGB..(even line), 16-bit samples, little-endian
- ///
- BayerRggb16Le = AVPixelFormat.AV_PIX_FMT_BAYER_RGGB16LE,
- ///
- /// bayer, RGRG..(odd line), GBGB..(even line), 16-bit samples, big-endian
- ///
- BayerRggb16Be = AVPixelFormat.AV_PIX_FMT_BAYER_RGGB16BE,
- ///
- /// bayer, GBGB..(odd line), RGRG..(even line), 16-bit samples, little-endian
- ///
- BayerGbrg16Le = AVPixelFormat.AV_PIX_FMT_BAYER_GBRG16LE,
- ///
- /// bayer, GBGB..(odd line), RGRG..(even line), 16-bit samples, big-endian
- ///
- BayerGbrg16Be = AVPixelFormat.AV_PIX_FMT_BAYER_GBRG16BE,
- ///
- /// bayer, GRGR..(odd line), BGBG..(even line), 16-bit samples, little-endian
- ///
- BayerGrbg16Le = AVPixelFormat.AV_PIX_FMT_BAYER_GRBG16LE,
- ///
- /// bayer, GRGR..(odd line), BGBG..(even line), 16-bit samples, big-endian
- ///
- BayerGrbg16Be = AVPixelFormat.AV_PIX_FMT_BAYER_GRBG16BE,
- ///
- /// XVideo Motion Acceleration via common packet passing
- ///
- Xvmc = AVPixelFormat.AV_PIX_FMT_XVMC,
- ///
- /// planar YUV 4:4:0,20bpp, (1 Cr & Cb sample per 1x2 Y samples), little-endian
- ///
- Yuv440P10Le = AVPixelFormat.AV_PIX_FMT_YUV440P10LE,
- ///
- /// planar YUV 4:4:0,20bpp, (1 Cr & Cb sample per 1x2 Y samples), big-endian
- ///
- Yuv440P10Be = AVPixelFormat.AV_PIX_FMT_YUV440P10BE,
- ///
- /// planar YUV 4:4:0,24bpp, (1 Cr & Cb sample per 1x2 Y samples), little-endian
- ///
- Yuv440P12Le = AVPixelFormat.AV_PIX_FMT_YUV440P12LE,
- ///
- /// planar YUV 4:4:0,24bpp, (1 Cr & Cb sample per 1x2 Y samples), big-endian
- ///
- Yuv440P12Be = AVPixelFormat.AV_PIX_FMT_YUV440P12BE,
- ///
- /// packed AYUV 4:4:4,64bpp (1 Cr & Cb sample per 1x1 Y & A samples), little-endian
- ///
- Ayuv64Le = AVPixelFormat.AV_PIX_FMT_AYUV64LE,
- ///
- /// packed AYUV 4:4:4,64bpp (1 Cr & Cb sample per 1x1 Y & A samples), big-endian
- ///
- Ayuv64Be = AVPixelFormat.AV_PIX_FMT_AYUV64BE,
- ///
- /// hardware decoding through Videotoolbox
- ///
- Videotoolbox = AVPixelFormat.AV_PIX_FMT_VIDEOTOOLBOX,
- ///
- /// like NV12, with 10bpp per component, data in the high bits, zeros in the low
- /// bits, little-endian
- ///
- P010Le = AVPixelFormat.AV_PIX_FMT_P010LE,
- ///
- /// like NV12, with 10bpp per component, data in the high bits, zeros in the low
- /// bits, big-endian
- ///
- P010Be = AVPixelFormat.AV_PIX_FMT_P010BE,
- ///
- /// planar GBR 4:4:4:4 48bpp, big-endian
- ///
- Gbrap12Be = AVPixelFormat.AV_PIX_FMT_GBRAP12BE,
- ///
- /// planar GBR 4:4:4:4 48bpp, little-endian
- ///
- Gbrap12Le = AVPixelFormat.AV_PIX_FMT_GBRAP12LE,
- ///
- /// planar GBR 4:4:4:4 40bpp, big-endian
- ///
- Gbrap10Be = AVPixelFormat.AV_PIX_FMT_GBRAP10BE,
- ///
- /// planar GBR 4:4:4:4 40bpp, little-endian
- ///
- Gbrap10Le = AVPixelFormat.AV_PIX_FMT_GBRAP10LE,
- ///
- /// hardware decoding through MediaCodec
- ///
- Mediacodec = AVPixelFormat.AV_PIX_FMT_MEDIACODEC,
- ///
- /// Y , 12bpp, big-endian
- ///
- Gray12Be = AVPixelFormat.AV_PIX_FMT_GRAY12BE,
- ///
- /// Y , 12bpp, little-endian
- ///
- Gray12Le = AVPixelFormat.AV_PIX_FMT_GRAY12LE,
- ///
- /// Y , 10bpp, big-endian
- ///
- Gray10Be = AVPixelFormat.AV_PIX_FMT_GRAY10BE,
- ///
- /// Y , 10bpp, little-endian
- ///
- Gray10Le = AVPixelFormat.AV_PIX_FMT_GRAY10LE,
- ///
- /// like NV12, with 16bpp per component, little-endian
- ///
- P016Le = AVPixelFormat.AV_PIX_FMT_P016LE,
- ///
- /// like NV12, with 16bpp per component, big-endian
- ///
- P016Be = AVPixelFormat.AV_PIX_FMT_P016BE,
- ///
- /// Hardware surfaces for Direct3D11.
- ///
- D3D11 = AVPixelFormat.AV_PIX_FMT_D3D11,
- ///
- /// Y , 9bpp, big-endian
- ///
- Gray9Be = AVPixelFormat.AV_PIX_FMT_GRAY9BE,
- ///
- /// Y , 9bpp, little-endian
- ///
- Gray9Le = AVPixelFormat.AV_PIX_FMT_GRAY9LE,
- ///
- /// IEEE-754 single precision planar GBR 4:4:4, 96bpp, big-endian
- ///
- Gbrpf32Be = AVPixelFormat.AV_PIX_FMT_GBRPF32BE,
- ///
- /// IEEE-754 single precision planar GBR 4:4:4, 96bpp, little-endian
- ///
- Gbrpf32Le = AVPixelFormat.AV_PIX_FMT_GBRPF32LE,
- ///
- /// IEEE-754 single precision planar GBRA 4:4:4:4, 128bpp, big-endian
- ///
- Gbrapf32Be = AVPixelFormat.AV_PIX_FMT_GBRAPF32BE,
- ///
- /// IEEE-754 single precision planar GBRA 4:4:4:4, 128bpp, little-endian
- ///
- Gbrapf32Le = AVPixelFormat.AV_PIX_FMT_GBRAPF32LE,
- ///
- /// DRM-managed buffers exposed through PRIME buffer sharing.
- ///
- DrmPrime = AVPixelFormat.AV_PIX_FMT_DRM_PRIME,
- ///
- /// Hardware surfaces for OpenCL.
- ///
- Opencl = AVPixelFormat.AV_PIX_FMT_OPENCL,
- ///
- /// Y , 14bpp, big-endian
- ///
- Gray14Be = AVPixelFormat.AV_PIX_FMT_GRAY14BE,
- ///
- /// Y , 14bpp, little-endian
- ///
- Gray14Le = AVPixelFormat.AV_PIX_FMT_GRAY14LE,
- ///
- /// IEEE-754 single precision Y, 32bpp, big-endian
- ///
- Grayf32Be = AVPixelFormat.AV_PIX_FMT_GRAYF32BE,
- ///
- /// IEEE-754 single precision Y, 32bpp, little-endian
- ///
- Grayf32Le = AVPixelFormat.AV_PIX_FMT_GRAYF32LE,
- ///
- /// planar YUV 4:2:2,24bpp, (1 Cr & Cb sample per 2x1 Y samples), 12b alpha, big-endian
- ///
- Yuva422P12Be = AVPixelFormat.AV_PIX_FMT_YUVA422P12BE,
- ///
- /// planar YUV 4:2:2,24bpp, (1 Cr & Cb sample per 2x1 Y samples), 12b alpha, little-endian
- ///
- Yuva422P12Le = AVPixelFormat.AV_PIX_FMT_YUVA422P12LE,
- ///
- /// planar YUV 4:4:4,36bpp, (1 Cr & Cb sample per 1x1 Y samples), 12b alpha, big-endian
- ///
- Yuva444P12Be = AVPixelFormat.AV_PIX_FMT_YUVA444P12BE,
- ///
- /// planar YUV 4:4:4,36bpp, (1 Cr & Cb sample per 1x1 Y samples), 12b alpha, little-endian
- ///
- Yuva444P12Le = AVPixelFormat.AV_PIX_FMT_YUVA444P12LE,
- ///
- /// planar YUV 4:4:4, 24bpp, 1 plane for Y and 1 plane for the UV components, which
- /// are interleaved (first byte U and the following byte V)
- ///
- Nv24 = AVPixelFormat.AV_PIX_FMT_NV24,
- ///
- /// as above, but U and V bytes are swapped
- ///
- Nv42 = AVPixelFormat.AV_PIX_FMT_NV42,
- ///
- /// Vulkan hardware images.
- ///
- Vulkan = AVPixelFormat.AV_PIX_FMT_VULKAN,
- ///
- /// packed YUV 4:2:2 like YUYV422, 20bpp, data in the high bits, big-endian
- ///
- Y210Be = AVPixelFormat.AV_PIX_FMT_Y210BE,
- ///
- /// packed YUV 4:2:2 like YUYV422, 20bpp, data in the high bits, little-endian
- ///
- Y210Le = AVPixelFormat.AV_PIX_FMT_Y210LE,
- ///
- /// packed RGB 10:10:10, 30bpp, (msb)2X 10R 10G 10B(lsb), little-endian, X=unused/undefined
- ///
- X2Rgb10Le = AVPixelFormat.AV_PIX_FMT_X2RGB10LE,
- ///
- /// packed RGB 10:10:10, 30bpp, (msb)2X 10R 10G 10B(lsb), big-endian, X=unused/undefined
- ///
- X2Rgb10Be = AVPixelFormat.AV_PIX_FMT_X2RGB10BE,
- ///
- /// packed BGR 10:10:10, 30bpp, (msb)2X 10B 10G 10R(lsb), little-endian, X=unused/undefined
- ///
- X2Bgr10Le = AVPixelFormat.AV_PIX_FMT_X2BGR10LE,
- ///
- /// packed BGR 10:10:10, 30bpp, (msb)2X 10B 10G 10R(lsb), big-endian, X=unused/undefined
- ///
- X2Bgr10Be = AVPixelFormat.AV_PIX_FMT_X2BGR10BE,
- ///
- /// interleaved chroma YUV 4:2:2, 20bpp, data in the high bits, big-endian
- ///
- P210Be = AVPixelFormat.AV_PIX_FMT_P210BE,
- ///
- /// interleaved chroma YUV 4:2:2, 20bpp, data in the high bits, little-endian
- ///
- P210Le = AVPixelFormat.AV_PIX_FMT_P210LE,
- ///
- /// interleaved chroma YUV 4:4:4, 30bpp, data in the high bits, big-endian
- ///
- P410Be = AVPixelFormat.AV_PIX_FMT_P410BE,
- ///
- /// interleaved chroma YUV 4:4:4, 30bpp, data in the high bits, little-endian
- ///
- P410Le = AVPixelFormat.AV_PIX_FMT_P410LE,
- ///
- /// interleaved chroma YUV 4:2:2, 32bpp, big-endian
- ///
- P216Be = AVPixelFormat.AV_PIX_FMT_P216BE,
- ///
- /// interleaved chroma YUV 4:2:2, 32bpp, liddle-endian
- ///
- P216Le = AVPixelFormat.AV_PIX_FMT_P216LE,
- ///
- /// interleaved chroma YUV 4:4:4, 48bpp, big-endian
- ///
- P416Be = AVPixelFormat.AV_PIX_FMT_P416BE,
- ///
- /// interleaved chroma YUV 4:4:4, 48bpp, little-endian
- ///
- P416Le = AVPixelFormat.AV_PIX_FMT_P416LE,
- ///
- /// number of pixel formats, DO NOT USE THIS if you want to link with shared libav*
- /// because the number of formats might differ between versions
- ///
- Nb = AVPixelFormat.AV_PIX_FMT_NB
- }
+ None = AVPixelFormat.AV_PIX_FMT_NONE,
+ ///
+ /// planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
+ ///
+ Yuv420P = AVPixelFormat.AV_PIX_FMT_YUV420P,
+ ///
+ /// packed YUV 4:2:2, 16bpp, Y0 Cb Y1 Cr
+ ///
+ Yuyv422 = AVPixelFormat.AV_PIX_FMT_YUYV422,
+ ///
+ /// packed RGB 8:8:8, 24bpp, RGBRGB...
+ ///
+ Rgb24 = AVPixelFormat.AV_PIX_FMT_RGB24,
+ ///
+ /// packed RGB 8:8:8, 24bpp, BGRBGR...
+ ///
+ Bgr24 = AVPixelFormat.AV_PIX_FMT_BGR24,
+ ///
+ /// planar YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples)
+ ///
+ Yuv422P = AVPixelFormat.AV_PIX_FMT_YUV422P,
+ ///
+ /// planar YUV 4:4:4, 24bpp, (1 Cr & Cb sample per 1x1 Y samples)
+ ///
+ Yuv444P = AVPixelFormat.AV_PIX_FMT_YUV444P,
+ ///
+ /// planar YUV 4:1:0, 9bpp, (1 Cr & Cb sample per 4x4 Y samples)
+ ///
+ Yuv410P = AVPixelFormat.AV_PIX_FMT_YUV410P,
+ ///
+ /// planar YUV 4:1:1, 12bpp, (1 Cr & Cb sample per 4x1 Y samples)
+ ///
+ Yuv411P = AVPixelFormat.AV_PIX_FMT_YUV411P,
+ ///
+ /// Y , 8bpp
+ ///
+ Gray8 = AVPixelFormat.AV_PIX_FMT_GRAY8,
+ ///
+ /// Y , 1bpp, 0 is white, 1 is black, in each byte pixels are ordered from the msb
+ /// to the lsb
+ ///
+ Monowhite = AVPixelFormat.AV_PIX_FMT_MONOWHITE,
+ ///
+ /// Y , 1bpp, 0 is black, 1 is white, in each byte pixels are ordered from the msb
+ /// to the lsb
+ ///
+ Monoblack = AVPixelFormat.AV_PIX_FMT_MONOBLACK,
+ ///
+ /// 8 bits with AV_PIX_FMT_RGB32 palette
+ ///
+ Pal8 = AVPixelFormat.AV_PIX_FMT_PAL8,
+ ///
+ /// planar YUV 4:2:0, 12bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV420P
+ /// and setting color_range
+ ///
+ [Obsolete("Deprecated in favor of AV_PIX_FMT_YUV420P and setting color_range")]
+ Yuvj420P = AVPixelFormat.AV_PIX_FMT_YUVJ420P,
+ ///
+ /// planar YUV 4:2:2, 16bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV422P
+ /// and setting color_range
+ ///
+ [Obsolete("Deprecated in favor of AV_PIX_FMT_YUV422P and setting color_range")]
+ Yuvj422P = AVPixelFormat.AV_PIX_FMT_YUVJ422P,
+ ///
+ /// planar YUV 4:4:4, 24bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV444P
+ /// and setting color_range
+ ///
+ [Obsolete("Deprecated in favor of AV_PIX_FMT_YUV444P and setting color_range")]
+ Yuvj444P = AVPixelFormat.AV_PIX_FMT_YUVJ444P,
+ ///
+ /// packed YUV 4:2:2, 16bpp, Cb Y0 Cr Y1
+ ///
+ Uyvy422 = AVPixelFormat.AV_PIX_FMT_UYVY422,
+ ///
+ /// packed YUV 4:1:1, 12bpp, Cb Y0 Y1 Cr Y2 Y3
+ ///
+ Uyyvyy411 = AVPixelFormat.AV_PIX_FMT_UYYVYY411,
+ ///
+ /// packed RGB 3:3:2, 8bpp, (msb)2B 3G 3R(lsb)
+ ///
+ Bgr8 = AVPixelFormat.AV_PIX_FMT_BGR8,
+ ///
+ /// packed RGB 1:2:1 bitstream, 4bpp, (msb)1B 2G 1R(lsb), a byte contains two pixels,
+ /// the first pixel in the byte is the one composed by the 4 msb bits
+ ///
+ Bgr4 = AVPixelFormat.AV_PIX_FMT_BGR4,
+ ///
+ /// packed RGB 1:2:1, 8bpp, (msb)1B 2G 1R(lsb)
+ ///
+ Bgr4Byte = AVPixelFormat.AV_PIX_FMT_BGR4_BYTE,
+ ///
+ /// packed RGB 3:3:2, 8bpp, (msb)2R 3G 3B(lsb)
+ ///
+ Rgb8 = AVPixelFormat.AV_PIX_FMT_RGB8,
+ ///
+ /// packed RGB 1:2:1 bitstream, 4bpp, (msb)1R 2G 1B(lsb), a byte contains two pixels,
+ /// the first pixel in the byte is the one composed by the 4 msb bits
+ ///
+ Rgb4 = AVPixelFormat.AV_PIX_FMT_RGB4,
+ ///
+ /// packed RGB 1:2:1, 8bpp, (msb)1R 2G 1B(lsb)
+ ///
+ Rgb4Byte = AVPixelFormat.AV_PIX_FMT_RGB4_BYTE,
+ ///
+ /// planar YUV 4:2:0, 12bpp, 1 plane for Y and 1 plane for the UV components, which
+ /// are interleaved (first byte U and the following byte V)
+ ///
+ Nv12 = AVPixelFormat.AV_PIX_FMT_NV12,
+ ///
+ /// as above, but U and V bytes are swapped
+ ///
+ Nv21 = AVPixelFormat.AV_PIX_FMT_NV21,
+ ///
+ /// packed ARGB 8:8:8:8, 32bpp, ARGBARGB...
+ ///
+ Argb = AVPixelFormat.AV_PIX_FMT_ARGB,
+ ///
+ /// packed RGBA 8:8:8:8, 32bpp, RGBARGBA...
+ ///
+ Rgba = AVPixelFormat.AV_PIX_FMT_RGBA,
+ ///
+ /// packed ABGR 8:8:8:8, 32bpp, ABGRABGR...
+ ///
+ Abgr = AVPixelFormat.AV_PIX_FMT_ABGR,
+ ///
+ /// packed BGRA 8:8:8:8, 32bpp, BGRABGRA...
+ ///
+ Bgra = AVPixelFormat.AV_PIX_FMT_BGRA,
+ ///
+ /// Y , 16bpp, big-endian
+ ///
+ Gray16Be = AVPixelFormat.AV_PIX_FMT_GRAY16BE,
+ ///
+ /// Y , 16bpp, little-endian
+ ///
+ Gray16Le = AVPixelFormat.AV_PIX_FMT_GRAY16LE,
+ ///
+ /// planar YUV 4:4:0 (1 Cr & Cb sample per 1x2 Y samples)
+ ///
+ Yuv440P = AVPixelFormat.AV_PIX_FMT_YUV440P,
+ ///
+ /// planar YUV 4:4:0 full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV440P
+ /// and setting color_range
+ ///
+ [Obsolete("Deprecated in favor of AV_PIX_FMT_YUV440P and setting color range")]
+ Yuvj440P = AVPixelFormat.AV_PIX_FMT_YUVJ440P,
+ ///
+ /// planar YUV 4:2:0, 20bpp, (1 Cr & Cb sample per 2x2 Y & A samples)
+ ///
+ Yuva420P = AVPixelFormat.AV_PIX_FMT_YUVA420P,
+ ///
+ /// packed RGB 16:16:16, 48bpp, 16R, 16G, 16B, the 2-byte value for each R/G/B component
+ /// is stored as big-endian
+ ///
+ Rgb48Be = AVPixelFormat.AV_PIX_FMT_RGB48BE,
+ ///
+ /// packed RGB 16:16:16, 48bpp, 16R, 16G, 16B, the 2-byte value for each R/G/B component
+ /// is stored as little-endian
+ ///
+ Rgb48Le = AVPixelFormat.AV_PIX_FMT_RGB48LE,
+ ///
+ /// packed RGB 5:6:5, 16bpp, (msb) 5R 6G 5B(lsb), big-endian
+ ///
+ Rgb565Be = AVPixelFormat.AV_PIX_FMT_RGB565BE,
+ ///
+ /// packed RGB 5:6:5, 16bpp, (msb) 5R 6G 5B(lsb), little-endian
+ ///
+ Rgb565Le = AVPixelFormat.AV_PIX_FMT_RGB565LE,
+ ///
+ /// packed RGB 5:5:5, 16bpp, (msb)1X 5R 5G 5B(lsb), big-endian , X=unused/undefined
+ ///
+ Rgb555Be = AVPixelFormat.AV_PIX_FMT_RGB555BE,
+ ///
+ /// packed RGB 5:5:5, 16bpp, (msb)1X 5R 5G 5B(lsb), little-endian, X=unused/undefined
+ ///
+ Rgb555Le = AVPixelFormat.AV_PIX_FMT_RGB555LE,
+ ///
+ /// packed BGR 5:6:5, 16bpp, (msb) 5B 6G 5R(lsb), big-endian
+ ///
+ Bgr565Be = AVPixelFormat.AV_PIX_FMT_BGR565BE,
+ ///
+ /// packed BGR 5:6:5, 16bpp, (msb) 5B 6G 5R(lsb), little-endian
+ ///
+ Bgr565Le = AVPixelFormat.AV_PIX_FMT_BGR565LE,
+ ///
+ /// packed BGR 5:5:5, 16bpp, (msb)1X 5B 5G 5R(lsb), big-endian , X=unused/undefined
+ ///
+ Bgr555Be = AVPixelFormat.AV_PIX_FMT_BGR555BE,
+ ///
+ /// packed BGR 5:5:5, 16bpp, (msb)1X 5B 5G 5R(lsb), little-endian, X=unused/undefined
+ ///
+ Bgr555Le = AVPixelFormat.AV_PIX_FMT_BGR555LE,
+ ///
+ /// Hardware acceleration through VA-API, data[3] contains a VASurfaceID.
+ ///
+ Vaapi = AVPixelFormat.AV_PIX_FMT_VAAPI,
+ ///
+ /// planar YUV 4:2:0, 24bpp, (1 Cr & Cb sample per 2x2 Y samples), little-endian
+ ///
+ Yuv420P16Le = AVPixelFormat.AV_PIX_FMT_YUV420P16LE,
+ ///
+ /// planar YUV 4:2:0, 24bpp, (1 Cr & Cb sample per 2x2 Y samples), big-endian
+ ///
+ Yuv420P16Be = AVPixelFormat.AV_PIX_FMT_YUV420P16BE,
+ ///
+ /// planar YUV 4:2:2, 32bpp, (1 Cr & Cb sample per 2x1 Y samples), little-endian
+ ///
+ Yuv422P16Le = AVPixelFormat.AV_PIX_FMT_YUV422P16LE,
+ ///
+ /// planar YUV 4:2:2, 32bpp, (1 Cr & Cb sample per 2x1 Y samples), big-endian
+ ///
+ Yuv422P16Be = AVPixelFormat.AV_PIX_FMT_YUV422P16BE,
+ ///
+ /// planar YUV 4:4:4, 48bpp, (1 Cr & Cb sample per 1x1 Y samples), little-endian
+ ///
+ Yuv444P16Le = AVPixelFormat.AV_PIX_FMT_YUV444P16LE,
+ ///
+ /// planar YUV 4:4:4, 48bpp, (1 Cr & Cb sample per 1x1 Y samples), big-endian
+ ///
+ Yuv444P16Be = AVPixelFormat.AV_PIX_FMT_YUV444P16BE,
+ ///
+ /// HW decoding through DXVA2, Picture.data[3] contains a LPDIRECT3DSURFACE9 pointer
+ ///
+ Dxva2Vld = AVPixelFormat.AV_PIX_FMT_DXVA2_VLD,
+ ///
+ /// packed RGB 4:4:4, 16bpp, (msb)4X 4R 4G 4B(lsb), little-endian, X=unused/undefined
+ ///
+ Rgb444Le = AVPixelFormat.AV_PIX_FMT_RGB444LE,
+ ///
+ /// packed RGB 4:4:4, 16bpp, (msb)4X 4R 4G 4B(lsb), big-endian, X=unused/undefined
+ ///
+ Rgb444Be = AVPixelFormat.AV_PIX_FMT_RGB444BE,
+ ///
+ /// packed BGR 4:4:4, 16bpp, (msb)4X 4B 4G 4R(lsb), little-endian, X=unused/undefined
+ ///
+ Bgr444Le = AVPixelFormat.AV_PIX_FMT_BGR444LE,
+ ///
+ /// packed BGR 4:4:4, 16bpp, (msb)4X 4B 4G 4R(lsb), big-endian, X=unused/undefined
+ ///
+ Bgr444Be = AVPixelFormat.AV_PIX_FMT_BGR444BE,
+ ///
+ /// 8 bits gray, 8 bits alpha
+ ///
+ Ya8 = AVPixelFormat.AV_PIX_FMT_YA8,
+ ///
+ /// alias for AV_PIX_FMT_YA8
+ ///
+ Y400A = AVPixelFormat.AV_PIX_FMT_Y400A,
+ ///
+ /// alias for AV_PIX_FMT_YA8
+ ///
+ Gray8A = AVPixelFormat.AV_PIX_FMT_GRAY8A,
+ ///
+ /// packed RGB 16:16:16, 48bpp, 16B, 16G, 16R, the 2-byte value for each R/G/B component
+ /// is stored as big-endian
+ ///
+ Bgr48Be = AVPixelFormat.AV_PIX_FMT_BGR48BE,
+ ///
+ /// packed RGB 16:16:16, 48bpp, 16B, 16G, 16R, the 2-byte value for each R/G/B component
+ /// is stored as little-endian
+ ///
+ Bgr48Le = AVPixelFormat.AV_PIX_FMT_BGR48LE,
+ ///
+ /// planar YUV 4:2:0, 13.5bpp, (1 Cr & Cb sample per 2x2 Y samples), big-endian
+ ///
+ Yuv420P9Be = AVPixelFormat.AV_PIX_FMT_YUV420P9BE,
+ ///
+ /// planar YUV 4:2:0, 13.5bpp, (1 Cr & Cb sample per 2x2 Y samples), little-endian
+ ///
+ Yuv420P9Le = AVPixelFormat.AV_PIX_FMT_YUV420P9LE,
+ ///
+ /// planar YUV 4:2:0, 15bpp, (1 Cr & Cb sample per 2x2 Y samples), big-endian
+ ///
+ Yuv420P10Be = AVPixelFormat.AV_PIX_FMT_YUV420P10BE,
+ ///
+ /// planar YUV 4:2:0, 15bpp, (1 Cr & Cb sample per 2x2 Y samples), little-endian
+ ///
+ Yuv420P10Le = AVPixelFormat.AV_PIX_FMT_YUV420P10LE,
+ ///
+ /// planar YUV 4:2:2, 20bpp, (1 Cr & Cb sample per 2x1 Y samples), big-endian
+ ///
+ Yuv422P10Be = AVPixelFormat.AV_PIX_FMT_YUV422P10BE,
+ ///
+ /// planar YUV 4:2:2, 20bpp, (1 Cr & Cb sample per 2x1 Y samples), little-endian
+ ///
+ Yuv422P10Le = AVPixelFormat.AV_PIX_FMT_YUV422P10LE,
+ ///
+ /// planar YUV 4:4:4, 27bpp, (1 Cr & Cb sample per 1x1 Y samples), big-endian
+ ///
+ Yuv444P9Be = AVPixelFormat.AV_PIX_FMT_YUV444P9BE,
+ ///
+ /// planar YUV 4:4:4, 27bpp, (1 Cr & Cb sample per 1x1 Y samples), little-endian
+ ///
+ Yuv444P9Le = AVPixelFormat.AV_PIX_FMT_YUV444P9LE,
+ ///
+ /// planar YUV 4:4:4, 30bpp, (1 Cr & Cb sample per 1x1 Y samples), big-endian
+ ///
+ Yuv444P10Be = AVPixelFormat.AV_PIX_FMT_YUV444P10BE,
+ ///
+ /// planar YUV 4:4:4, 30bpp, (1 Cr & Cb sample per 1x1 Y samples), little-endian
+ ///
+ Yuv444P10Le = AVPixelFormat.AV_PIX_FMT_YUV444P10LE,
+ ///
+ /// planar YUV 4:2:2, 18bpp, (1 Cr & Cb sample per 2x1 Y samples), big-endian
+ ///
+ Yuv422P9Be = AVPixelFormat.AV_PIX_FMT_YUV422P9BE,
+ ///
+ /// planar YUV 4:2:2, 18bpp, (1 Cr & Cb sample per 2x1 Y samples), little-endian
+ ///
+ Yuv422P9Le = AVPixelFormat.AV_PIX_FMT_YUV422P9LE,
+ ///
+ /// planar GBR 4:4:4 24bpp
+ ///
+ Gbrp = AVPixelFormat.AV_PIX_FMT_GBRP,
+ ///
+ /// alias for GBRP
+ ///
+ Gbr24P = AVPixelFormat.AV_PIX_FMT_GBR24P,
+ ///
+ /// planar GBR 4:4:4 27bpp, big-endian
+ ///
+ Gbrp9Be = AVPixelFormat.AV_PIX_FMT_GBRP9BE,
+ ///
+ /// planar GBR 4:4:4 27bpp, little-endian
+ ///
+ Gbrp9Le = AVPixelFormat.AV_PIX_FMT_GBRP9LE,
+ ///
+ /// planar GBR 4:4:4 30bpp, big-endian
+ ///
+ Gbrp10Be = AVPixelFormat.AV_PIX_FMT_GBRP10BE,
+ ///
+ /// planar GBR 4:4:4 30bpp, little-endian
+ ///
+ Gbrp10Le = AVPixelFormat.AV_PIX_FMT_GBRP10LE,
+ ///
+ /// planar GBR 4:4:4 48bpp, big-endian
+ ///
+ Gbrp16Be = AVPixelFormat.AV_PIX_FMT_GBRP16BE,
+ ///
+ /// planar GBR 4:4:4 48bpp, little-endian
+ ///
+ Gbrp16Le = AVPixelFormat.AV_PIX_FMT_GBRP16LE,
+ ///
+ /// planar YUV 4:2:2 24bpp, (1 Cr & Cb sample per 2x1 Y & A samples)
+ ///
+ Yuva422P = AVPixelFormat.AV_PIX_FMT_YUVA422P,
+ ///
+ /// planar YUV 4:4:4 32bpp, (1 Cr & Cb sample per 1x1 Y & A samples)
+ ///
+ Yuva444P = AVPixelFormat.AV_PIX_FMT_YUVA444P,
+ ///
+ /// planar YUV 4:2:0 22.5bpp, (1 Cr & Cb sample per 2x2 Y & A samples), big-endian
+ ///
+ Yuva420P9Be = AVPixelFormat.AV_PIX_FMT_YUVA420P9BE,
+ ///
+ /// planar YUV 4:2:0 22.5bpp, (1 Cr & Cb sample per 2x2 Y & A samples), little-endian
+ ///
+ Yuva420P9Le = AVPixelFormat.AV_PIX_FMT_YUVA420P9LE,
+ ///
+ /// planar YUV 4:2:2 27bpp, (1 Cr & Cb sample per 2x1 Y & A samples), big-endian
+ ///
+ Yuva422P9Be = AVPixelFormat.AV_PIX_FMT_YUVA422P9BE,
+ ///
+ /// planar YUV 4:2:2 27bpp, (1 Cr & Cb sample per 2x1 Y & A samples), little-endian
+ ///
+ Yuva422P9Le = AVPixelFormat.AV_PIX_FMT_YUVA422P9LE,
+ ///
+ /// planar YUV 4:4:4 36bpp, (1 Cr & Cb sample per 1x1 Y & A samples), big-endian
+ ///
+ Yuva444P9Be = AVPixelFormat.AV_PIX_FMT_YUVA444P9BE,
+ ///
+ /// planar YUV 4:4:4 36bpp, (1 Cr & Cb sample per 1x1 Y & A samples), little-endian
+ ///
+ Yuva444P9Le = AVPixelFormat.AV_PIX_FMT_YUVA444P9LE,
+ ///
+ /// planar YUV 4:2:0 25bpp, (1 Cr & Cb sample per 2x2 Y & A samples, big-endian)
+ ///
+ Yuva420P10Be = AVPixelFormat.AV_PIX_FMT_YUVA420P10BE,
+ ///
+ /// planar YUV 4:2:0 25bpp, (1 Cr & Cb sample per 2x2 Y & A samples, little-endian)
+ ///
+ Yuva420P10Le = AVPixelFormat.AV_PIX_FMT_YUVA420P10LE,
+ ///
+ /// planar YUV 4:2:2 30bpp, (1 Cr & Cb sample per 2x1 Y & A samples, big-endian)
+ ///
+ Yuva422P10Be = AVPixelFormat.AV_PIX_FMT_YUVA422P10BE,
+ ///
+ /// planar YUV 4:2:2 30bpp, (1 Cr & Cb sample per 2x1 Y & A samples, little-endian)
+ ///
+ Yuva422P10Le = AVPixelFormat.AV_PIX_FMT_YUVA422P10LE,
+ ///
+ /// planar YUV 4:4:4 40bpp, (1 Cr & Cb sample per 1x1 Y & A samples, big-endian)
+ ///
+ Yuva444P10Be = AVPixelFormat.AV_PIX_FMT_YUVA444P10BE,
+ ///
+ /// planar YUV 4:4:4 40bpp, (1 Cr & Cb sample per 1x1 Y & A samples, little-endian)
+ ///
+ Yuva444P10Le = AVPixelFormat.AV_PIX_FMT_YUVA444P10LE,
+ ///
+ /// planar YUV 4:2:0 40bpp, (1 Cr & Cb sample per 2x2 Y & A samples, big-endian)
+ ///
+ Yuva420P16Be = AVPixelFormat.AV_PIX_FMT_YUVA420P16BE,
+ ///
+ /// planar YUV 4:2:0 40bpp, (1 Cr & Cb sample per 2x2 Y & A samples, little-endian)
+ ///
+ Yuva420P16Le = AVPixelFormat.AV_PIX_FMT_YUVA420P16LE,
+ ///
+ /// planar YUV 4:2:2 48bpp, (1 Cr & Cb sample per 2x1 Y & A samples, big-endian)
+ ///
+ Yuva422P16Be = AVPixelFormat.AV_PIX_FMT_YUVA422P16BE,
+ ///
+ /// planar YUV 4:2:2 48bpp, (1 Cr & Cb sample per 2x1 Y & A samples, little-endian)
+ ///
+ Yuva422P16Le = AVPixelFormat.AV_PIX_FMT_YUVA422P16LE,
+ ///
+ /// planar YUV 4:4:4 64bpp, (1 Cr & Cb sample per 1x1 Y & A samples, big-endian)
+ ///
+ Yuva444P16Be = AVPixelFormat.AV_PIX_FMT_YUVA444P16BE,
+ ///
+ /// planar YUV 4:4:4 64bpp, (1 Cr & Cb sample per 1x1 Y & A samples, little-endian)
+ ///
+ Yuva444P16Le = AVPixelFormat.AV_PIX_FMT_YUVA444P16LE,
+ ///
+ /// HW acceleration through VDPAU, Picture.data[3] contains a VdpVideoSurface
+ ///
+ Vdpau = AVPixelFormat.AV_PIX_FMT_VDPAU,
+ ///
+ /// packed XYZ 4:4:4, 36 bpp, (msb) 12X, 12Y, 12Z (lsb), the 2-byte value for each
+ /// X/Y/Z is stored as little-endian, the 4 lower bits are set to 0
+ ///
+ Xyz12Le = AVPixelFormat.AV_PIX_FMT_XYZ12LE,
+ ///
+ /// packed XYZ 4:4:4, 36 bpp, (msb) 12X, 12Y, 12Z (lsb), the 2-byte value for each
+ /// X/Y/Z is stored as big-endian, the 4 lower bits are set to 0
+ ///
+ Xyz12Be = AVPixelFormat.AV_PIX_FMT_XYZ12BE,
+ ///
+ /// interleaved chroma YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples)
+ ///
+ Nv16 = AVPixelFormat.AV_PIX_FMT_NV16,
+ ///
+ /// interleaved chroma YUV 4:2:2, 20bpp, (1 Cr & Cb sample per 2x1 Y samples), little-endian
+ ///
+ Nv20Le = AVPixelFormat.AV_PIX_FMT_NV20LE,
+ ///
+ /// interleaved chroma YUV 4:2:2, 20bpp, (1 Cr & Cb sample per 2x1 Y samples), big-endian
+ ///
+ Nv20Be = AVPixelFormat.AV_PIX_FMT_NV20BE,
+ ///
+ /// packed RGBA 16:16:16:16, 64bpp, 16R, 16G, 16B, 16A, the 2-byte value for each
+ /// R/G/B/A component is stored as big-endian
+ ///
+ Rgba64Be = AVPixelFormat.AV_PIX_FMT_RGBA64BE,
+ ///
+ /// packed RGBA 16:16:16:16, 64bpp, 16R, 16G, 16B, 16A, the 2-byte value for each
+ /// R/G/B/A component is stored as little-endian
+ ///
+ Rgba64Le = AVPixelFormat.AV_PIX_FMT_RGBA64LE,
+ ///
+ /// packed RGBA 16:16:16:16, 64bpp, 16B, 16G, 16R, 16A, the 2-byte value for each
+ /// R/G/B/A component is stored as big-endian
+ ///
+ Bgra64Be = AVPixelFormat.AV_PIX_FMT_BGRA64BE,
+ ///
+ /// packed RGBA 16:16:16:16, 64bpp, 16B, 16G, 16R, 16A, the 2-byte value for each
+ /// R/G/B/A component is stored as little-endian
+ ///
+ Bgra64Le = AVPixelFormat.AV_PIX_FMT_BGRA64LE,
+ ///
+ /// packed YUV 4:2:2, 16bpp, Y0 Cr Y1 Cb
+ ///
+ Yvyu422 = AVPixelFormat.AV_PIX_FMT_YVYU422,
+ ///
+ /// 16 bits gray, 16 bits alpha (big-endian)
+ ///
+ Ya16Be = AVPixelFormat.AV_PIX_FMT_YA16BE,
+ ///
+ /// 16 bits gray, 16 bits alpha (little-endian)
+ ///
+ Ya16Le = AVPixelFormat.AV_PIX_FMT_YA16LE,
+ ///
+ /// planar GBRA 4:4:4:4 32bpp
+ ///
+ Gbrap = AVPixelFormat.AV_PIX_FMT_GBRAP,
+ ///
+ /// planar GBRA 4:4:4:4 64bpp, big-endian
+ ///
+ Gbrap16Be = AVPixelFormat.AV_PIX_FMT_GBRAP16BE,
+ ///
+ /// planar GBRA 4:4:4:4 64bpp, little-endian
+ ///
+ Gbrap16Le = AVPixelFormat.AV_PIX_FMT_GBRAP16LE,
+ ///
+ /// HW acceleration through QSV, data[3] contains a pointer to the mfxFrameSurface1
+ /// structure.
+ ///
+ Qsv = AVPixelFormat.AV_PIX_FMT_QSV,
+ ///
+ /// HW acceleration though MMAL, data[3] contains a pointer to the MMAL_BUFFER_HEADER_T
+ /// structure.
+ ///
+ Mmal = AVPixelFormat.AV_PIX_FMT_MMAL,
+ ///
+ /// HW decoding through Direct3D11 via old API, Picture.data[3] contains a ID3D11VideoDecoderOutputView
+ /// pointer
+ ///
+ D3D11VaVld = AVPixelFormat.AV_PIX_FMT_D3D11VA_VLD,
+ ///
+ /// HW acceleration through CUDA. data[i] contain CUdeviceptr pointers exactly as
+ /// for system memory frames.
+ ///
+ Cuda = AVPixelFormat.AV_PIX_FMT_CUDA,
+ ///
+ /// packed RGB 8:8:8, 32bpp, XRGBXRGB... X=unused/undefined
+ ///
+ Xrgb = AVPixelFormat.AV_PIX_FMT_0RGB,
+ ///
+ /// packed RGB 8:8:8, 32bpp, RGBXRGBX... X=unused/undefined
+ ///
+ Rgbx = AVPixelFormat.AV_PIX_FMT_RGB0,
+ ///
+ /// packed BGR 8:8:8, 32bpp, XBGRXBGR... X=unused/undefined
+ ///
+ Xbgr = AVPixelFormat.AV_PIX_FMT_0BGR,
+ ///
+ /// packed BGR 8:8:8, 32bpp, BGRXBGRX... X=unused/undefined
+ ///
+ Bgrx = AVPixelFormat.AV_PIX_FMT_BGR0,
+ ///
+ /// planar YUV 4:2:0,18bpp, (1 Cr & Cb sample per 2x2 Y samples), big-endian
+ ///
+ Yuv420P12Be = AVPixelFormat.AV_PIX_FMT_YUV420P12BE,
+ ///
+ /// planar YUV 4:2:0,18bpp, (1 Cr & Cb sample per 2x2 Y samples), little-endian
+ ///
+ Yuv420P12Le = AVPixelFormat.AV_PIX_FMT_YUV420P12LE,
+ ///
+ /// planar YUV 4:2:0,21bpp, (1 Cr & Cb sample per 2x2 Y samples), big-endian
+ ///
+ Yuv420P14Be = AVPixelFormat.AV_PIX_FMT_YUV420P14BE,
+ ///
+ /// planar YUV 4:2:0,21bpp, (1 Cr & Cb sample per 2x2 Y samples), little-endian
+ ///
+ Yuv420P14Le = AVPixelFormat.AV_PIX_FMT_YUV420P14LE,
+ ///
+ /// planar YUV 4:2:2,24bpp, (1 Cr & Cb sample per 2x1 Y samples), big-endian
+ ///
+ Yuv422P12Be = AVPixelFormat.AV_PIX_FMT_YUV422P12BE,
+ ///
+ /// planar YUV 4:2:2,24bpp, (1 Cr & Cb sample per 2x1 Y samples), little-endian
+ ///
+ Yuv422P12Le = AVPixelFormat.AV_PIX_FMT_YUV422P12LE,
+ ///
+ /// planar YUV 4:2:2,28bpp, (1 Cr & Cb sample per 2x1 Y samples), big-endian
+ ///
+ Yuv422P14Be = AVPixelFormat.AV_PIX_FMT_YUV422P14BE,
+ ///
+ /// planar YUV 4:2:2,28bpp, (1 Cr & Cb sample per 2x1 Y samples), little-endian
+ ///
+ Yuv422P14Le = AVPixelFormat.AV_PIX_FMT_YUV422P14LE,
+ ///
+ /// planar YUV 4:4:4,36bpp, (1 Cr & Cb sample per 1x1 Y samples), big-endian
+ ///
+ Yuv444P12Be = AVPixelFormat.AV_PIX_FMT_YUV444P12BE,
+ ///
+ /// planar YUV 4:4:4,36bpp, (1 Cr & Cb sample per 1x1 Y samples), little-endian
+ ///
+ Yuv444P12Le = AVPixelFormat.AV_PIX_FMT_YUV444P12LE,
+ ///
+ /// planar YUV 4:4:4,42bpp, (1 Cr & Cb sample per 1x1 Y samples), big-endian
+ ///
+ Yuv444P14Be = AVPixelFormat.AV_PIX_FMT_YUV444P14BE,
+ ///
+ /// planar YUV 4:4:4,42bpp, (1 Cr & Cb sample per 1x1 Y samples), little-endian
+ ///
+ Yuv444P14Le = AVPixelFormat.AV_PIX_FMT_YUV444P14LE,
+ ///
+ /// planar GBR 4:4:4 36bpp, big-endian
+ ///
+ Gbrp12Be = AVPixelFormat.AV_PIX_FMT_GBRP12BE,
+ ///
+ /// planar GBR 4:4:4 36bpp, little-endian
+ ///
+ Gbrp12Le = AVPixelFormat.AV_PIX_FMT_GBRP12LE,
+ ///
+ /// planar GBR 4:4:4 42bpp, big-endian
+ ///
+ Gbrp14Be = AVPixelFormat.AV_PIX_FMT_GBRP14BE,
+ ///
+ /// planar GBR 4:4:4 42bpp, little-endian
+ ///
+ Gbrp14Le = AVPixelFormat.AV_PIX_FMT_GBRP14LE,
+ ///
+ /// planar YUV 4:1:1, 12bpp, (1 Cr & Cb sample per 4x1 Y samples) full scale (JPEG),
+ /// deprecated in favor of AV_PIX_FMT_YUV411P and setting color_range
+ ///
+ [Obsolete("Deprecated in favor of AV_PIX_FMT_YUV411P and setting color_range")]
+ Yuvj411P = AVPixelFormat.AV_PIX_FMT_YUVJ411P,
+ ///
+ /// bayer, BGBG..(odd line), GRGR..(even line), 8-bit samples
+ ///
+ BayerBggr8 = AVPixelFormat.AV_PIX_FMT_BAYER_BGGR8,
+ ///
+ /// bayer, RGRG..(odd line), GBGB..(even line), 8-bit samples
+ ///
+ BayerRggb8 = AVPixelFormat.AV_PIX_FMT_BAYER_RGGB8,
+ ///
+ /// bayer, GBGB..(odd line), RGRG..(even line), 8-bit samples
+ ///
+ BayerGbrg8 = AVPixelFormat.AV_PIX_FMT_BAYER_GBRG8,
+ ///
+ /// bayer, GRGR..(odd line), BGBG..(even line), 8-bit samples
+ ///
+ BayerGrbg8 = AVPixelFormat.AV_PIX_FMT_BAYER_GRBG8,
+ ///
+ /// bayer, BGBG..(odd line), GRGR..(even line), 16-bit samples, little-endian
+ ///
+ BayerBggr16Le = AVPixelFormat.AV_PIX_FMT_BAYER_BGGR16LE,
+ ///
+ /// bayer, BGBG..(odd line), GRGR..(even line), 16-bit samples, big-endian
+ ///
+ BayerBggr16Be = AVPixelFormat.AV_PIX_FMT_BAYER_BGGR16BE,
+ ///
+ /// bayer, RGRG..(odd line), GBGB..(even line), 16-bit samples, little-endian
+ ///
+ BayerRggb16Le = AVPixelFormat.AV_PIX_FMT_BAYER_RGGB16LE,
+ ///
+ /// bayer, RGRG..(odd line), GBGB..(even line), 16-bit samples, big-endian
+ ///
+ BayerRggb16Be = AVPixelFormat.AV_PIX_FMT_BAYER_RGGB16BE,
+ ///
+ /// bayer, GBGB..(odd line), RGRG..(even line), 16-bit samples, little-endian
+ ///
+ BayerGbrg16Le = AVPixelFormat.AV_PIX_FMT_BAYER_GBRG16LE,
+ ///
+ /// bayer, GBGB..(odd line), RGRG..(even line), 16-bit samples, big-endian
+ ///
+ BayerGbrg16Be = AVPixelFormat.AV_PIX_FMT_BAYER_GBRG16BE,
+ ///
+ /// bayer, GRGR..(odd line), BGBG..(even line), 16-bit samples, little-endian
+ ///
+ BayerGrbg16Le = AVPixelFormat.AV_PIX_FMT_BAYER_GRBG16LE,
+ ///
+ /// bayer, GRGR..(odd line), BGBG..(even line), 16-bit samples, big-endian
+ ///
+ BayerGrbg16Be = AVPixelFormat.AV_PIX_FMT_BAYER_GRBG16BE,
+ ///
+ /// XVideo Motion Acceleration via common packet passing
+ ///
+ Xvmc = AVPixelFormat.AV_PIX_FMT_XVMC,
+ ///
+ /// planar YUV 4:4:0,20bpp, (1 Cr & Cb sample per 1x2 Y samples), little-endian
+ ///
+ Yuv440P10Le = AVPixelFormat.AV_PIX_FMT_YUV440P10LE,
+ ///
+ /// planar YUV 4:4:0,20bpp, (1 Cr & Cb sample per 1x2 Y samples), big-endian
+ ///
+ Yuv440P10Be = AVPixelFormat.AV_PIX_FMT_YUV440P10BE,
+ ///
+ /// planar YUV 4:4:0,24bpp, (1 Cr & Cb sample per 1x2 Y samples), little-endian
+ ///
+ Yuv440P12Le = AVPixelFormat.AV_PIX_FMT_YUV440P12LE,
+ ///
+ /// planar YUV 4:4:0,24bpp, (1 Cr & Cb sample per 1x2 Y samples), big-endian
+ ///
+ Yuv440P12Be = AVPixelFormat.AV_PIX_FMT_YUV440P12BE,
+ ///
+ /// packed AYUV 4:4:4,64bpp (1 Cr & Cb sample per 1x1 Y & A samples), little-endian
+ ///
+ Ayuv64Le = AVPixelFormat.AV_PIX_FMT_AYUV64LE,
+ ///
+ /// packed AYUV 4:4:4,64bpp (1 Cr & Cb sample per 1x1 Y & A samples), big-endian
+ ///
+ Ayuv64Be = AVPixelFormat.AV_PIX_FMT_AYUV64BE,
+ ///
+ /// hardware decoding through Videotoolbox
+ ///
+ Videotoolbox = AVPixelFormat.AV_PIX_FMT_VIDEOTOOLBOX,
+ ///
+ /// like NV12, with 10bpp per component, data in the high bits, zeros in the low
+ /// bits, little-endian
+ ///
+ P010Le = AVPixelFormat.AV_PIX_FMT_P010LE,
+ ///
+ /// like NV12, with 10bpp per component, data in the high bits, zeros in the low
+ /// bits, big-endian
+ ///
+ P010Be = AVPixelFormat.AV_PIX_FMT_P010BE,
+ ///
+ /// planar GBR 4:4:4:4 48bpp, big-endian
+ ///
+ Gbrap12Be = AVPixelFormat.AV_PIX_FMT_GBRAP12BE,
+ ///
+ /// planar GBR 4:4:4:4 48bpp, little-endian
+ ///
+ Gbrap12Le = AVPixelFormat.AV_PIX_FMT_GBRAP12LE,
+ ///
+ /// planar GBR 4:4:4:4 40bpp, big-endian
+ ///
+ Gbrap10Be = AVPixelFormat.AV_PIX_FMT_GBRAP10BE,
+ ///
+ /// planar GBR 4:4:4:4 40bpp, little-endian
+ ///
+ Gbrap10Le = AVPixelFormat.AV_PIX_FMT_GBRAP10LE,
+ ///
+ /// hardware decoding through MediaCodec
+ ///
+ Mediacodec = AVPixelFormat.AV_PIX_FMT_MEDIACODEC,
+ ///
+ /// Y , 12bpp, big-endian
+ ///
+ Gray12Be = AVPixelFormat.AV_PIX_FMT_GRAY12BE,
+ ///
+ /// Y , 12bpp, little-endian
+ ///
+ Gray12Le = AVPixelFormat.AV_PIX_FMT_GRAY12LE,
+ ///
+ /// Y , 10bpp, big-endian
+ ///
+ Gray10Be = AVPixelFormat.AV_PIX_FMT_GRAY10BE,
+ ///
+ /// Y , 10bpp, little-endian
+ ///
+ Gray10Le = AVPixelFormat.AV_PIX_FMT_GRAY10LE,
+ ///
+ /// like NV12, with 16bpp per component, little-endian
+ ///
+ P016Le = AVPixelFormat.AV_PIX_FMT_P016LE,
+ ///
+ /// like NV12, with 16bpp per component, big-endian
+ ///
+ P016Be = AVPixelFormat.AV_PIX_FMT_P016BE,
+ ///
+ /// Hardware surfaces for Direct3D11.
+ ///
+ D3D11 = AVPixelFormat.AV_PIX_FMT_D3D11,
+ ///
+ /// Y , 9bpp, big-endian
+ ///
+ Gray9Be = AVPixelFormat.AV_PIX_FMT_GRAY9BE,
+ ///
+ /// Y , 9bpp, little-endian
+ ///
+ Gray9Le = AVPixelFormat.AV_PIX_FMT_GRAY9LE,
+ ///
+ /// IEEE-754 single precision planar GBR 4:4:4, 96bpp, big-endian
+ ///
+ Gbrpf32Be = AVPixelFormat.AV_PIX_FMT_GBRPF32BE,
+ ///
+ /// IEEE-754 single precision planar GBR 4:4:4, 96bpp, little-endian
+ ///
+ Gbrpf32Le = AVPixelFormat.AV_PIX_FMT_GBRPF32LE,
+ ///
+ /// IEEE-754 single precision planar GBRA 4:4:4:4, 128bpp, big-endian
+ ///
+ Gbrapf32Be = AVPixelFormat.AV_PIX_FMT_GBRAPF32BE,
+ ///
+ /// IEEE-754 single precision planar GBRA 4:4:4:4, 128bpp, little-endian
+ ///
+ Gbrapf32Le = AVPixelFormat.AV_PIX_FMT_GBRAPF32LE,
+ ///
+ /// DRM-managed buffers exposed through PRIME buffer sharing.
+ ///
+ DrmPrime = AVPixelFormat.AV_PIX_FMT_DRM_PRIME,
+ ///
+ /// Hardware surfaces for OpenCL.
+ ///
+ Opencl = AVPixelFormat.AV_PIX_FMT_OPENCL,
+ ///
+ /// Y , 14bpp, big-endian
+ ///
+ Gray14Be = AVPixelFormat.AV_PIX_FMT_GRAY14BE,
+ ///
+ /// Y , 14bpp, little-endian
+ ///
+ Gray14Le = AVPixelFormat.AV_PIX_FMT_GRAY14LE,
+ ///
+ /// IEEE-754 single precision Y, 32bpp, big-endian
+ ///
+ Grayf32Be = AVPixelFormat.AV_PIX_FMT_GRAYF32BE,
+ ///
+ /// IEEE-754 single precision Y, 32bpp, little-endian
+ ///
+ Grayf32Le = AVPixelFormat.AV_PIX_FMT_GRAYF32LE,
+ ///
+ /// planar YUV 4:2:2,24bpp, (1 Cr & Cb sample per 2x1 Y samples), 12b alpha, big-endian
+ ///
+ Yuva422P12Be = AVPixelFormat.AV_PIX_FMT_YUVA422P12BE,
+ ///
+ /// planar YUV 4:2:2,24bpp, (1 Cr & Cb sample per 2x1 Y samples), 12b alpha, little-endian
+ ///
+ Yuva422P12Le = AVPixelFormat.AV_PIX_FMT_YUVA422P12LE,
+ ///
+ /// planar YUV 4:4:4,36bpp, (1 Cr & Cb sample per 1x1 Y samples), 12b alpha, big-endian
+ ///
+ Yuva444P12Be = AVPixelFormat.AV_PIX_FMT_YUVA444P12BE,
+ ///
+ /// planar YUV 4:4:4,36bpp, (1 Cr & Cb sample per 1x1 Y samples), 12b alpha, little-endian
+ ///
+ Yuva444P12Le = AVPixelFormat.AV_PIX_FMT_YUVA444P12LE,
+ ///
+ /// planar YUV 4:4:4, 24bpp, 1 plane for Y and 1 plane for the UV components, which
+ /// are interleaved (first byte U and the following byte V)
+ ///
+ Nv24 = AVPixelFormat.AV_PIX_FMT_NV24,
+ ///
+ /// as above, but U and V bytes are swapped
+ ///
+ Nv42 = AVPixelFormat.AV_PIX_FMT_NV42,
+ ///
+ /// Vulkan hardware images.
+ ///
+ Vulkan = AVPixelFormat.AV_PIX_FMT_VULKAN,
+ ///
+ /// packed YUV 4:2:2 like YUYV422, 20bpp, data in the high bits, big-endian
+ ///
+ Y210Be = AVPixelFormat.AV_PIX_FMT_Y210BE,
+ ///
+ /// packed YUV 4:2:2 like YUYV422, 20bpp, data in the high bits, little-endian
+ ///
+ Y210Le = AVPixelFormat.AV_PIX_FMT_Y210LE,
+ ///
+ /// packed RGB 10:10:10, 30bpp, (msb)2X 10R 10G 10B(lsb), little-endian, X=unused/undefined
+ ///
+ X2Rgb10Le = AVPixelFormat.AV_PIX_FMT_X2RGB10LE,
+ ///
+ /// packed RGB 10:10:10, 30bpp, (msb)2X 10R 10G 10B(lsb), big-endian, X=unused/undefined
+ ///
+ X2Rgb10Be = AVPixelFormat.AV_PIX_FMT_X2RGB10BE,
+ ///
+ /// packed BGR 10:10:10, 30bpp, (msb)2X 10B 10G 10R(lsb), little-endian, X=unused/undefined
+ ///
+ X2Bgr10Le = AVPixelFormat.AV_PIX_FMT_X2BGR10LE,
+ ///
+ /// packed BGR 10:10:10, 30bpp, (msb)2X 10B 10G 10R(lsb), big-endian, X=unused/undefined
+ ///
+ X2Bgr10Be = AVPixelFormat.AV_PIX_FMT_X2BGR10BE,
+ ///
+ /// interleaved chroma YUV 4:2:2, 20bpp, data in the high bits, big-endian
+ ///
+ P210Be = AVPixelFormat.AV_PIX_FMT_P210BE,
+ ///
+ /// interleaved chroma YUV 4:2:2, 20bpp, data in the high bits, little-endian
+ ///
+ P210Le = AVPixelFormat.AV_PIX_FMT_P210LE,
+ ///
+ /// interleaved chroma YUV 4:4:4, 30bpp, data in the high bits, big-endian
+ ///
+ P410Be = AVPixelFormat.AV_PIX_FMT_P410BE,
+ ///
+ /// interleaved chroma YUV 4:4:4, 30bpp, data in the high bits, little-endian
+ ///
+ P410Le = AVPixelFormat.AV_PIX_FMT_P410LE,
+ ///
+ /// interleaved chroma YUV 4:2:2, 32bpp, big-endian
+ ///
+ P216Be = AVPixelFormat.AV_PIX_FMT_P216BE,
+ ///
+ /// interleaved chroma YUV 4:2:2, 32bpp, liddle-endian
+ ///
+ P216Le = AVPixelFormat.AV_PIX_FMT_P216LE,
+ ///
+ /// interleaved chroma YUV 4:4:4, 48bpp, big-endian
+ ///
+ P416Be = AVPixelFormat.AV_PIX_FMT_P416BE,
+ ///
+ /// interleaved chroma YUV 4:4:4, 48bpp, little-endian
+ ///
+ P416Le = AVPixelFormat.AV_PIX_FMT_P416LE,
+ ///
+ /// number of pixel formats, DO NOT USE THIS if you want to link with shared libav*
+ /// because the number of formats might differ between versions
+ ///
+ Nb = AVPixelFormat.AV_PIX_FMT_NB
+}
- public static class PixelFormatExtensions
- {
+public static class PixelFormatExtensions
+{
#pragma warning disable CS0618
- public static PixelFormat RecycleDeprecated(this PixelFormat pixelFormat)
+ public static PixelFormat RecycleDeprecated(this PixelFormat pixelFormat)
+ {
+ return pixelFormat switch
{
- return pixelFormat switch
- {
- PixelFormat.Yuvj411P => PixelFormat.Yuv411P,
- PixelFormat.Yuvj420P => PixelFormat.Yuv420P,
- PixelFormat.Yuvj422P => PixelFormat.Yuv422P,
- PixelFormat.Yuvj440P => PixelFormat.Yuv440P,
- PixelFormat.Yuvj444P => PixelFormat.Yuv444P,
- _ => pixelFormat
- };
- }
-#pragma warning restore CS0618
+ PixelFormat.Yuvj411P => PixelFormat.Yuv411P,
+ PixelFormat.Yuvj420P => PixelFormat.Yuv420P,
+ PixelFormat.Yuvj422P => PixelFormat.Yuv422P,
+ PixelFormat.Yuvj440P => PixelFormat.Yuv440P,
+ PixelFormat.Yuvj444P => PixelFormat.Yuv444P,
+ _ => pixelFormat
+ };
}
+#pragma warning restore CS0618
}
diff --git a/SeeShark/Utils/DShowUtils.cs b/SeeShark/Utils/DShowUtils.cs
index 7943fff..3df9dc1 100644
--- a/SeeShark/Utils/DShowUtils.cs
+++ b/SeeShark/Utils/DShowUtils.cs
@@ -10,189 +10,188 @@
using SeeShark.Device;
using SeeShark.Utils.PrivateFFmpeg;
-namespace SeeShark.Utils
+namespace SeeShark.Utils;
+
+internal static class DShowUtils
{
- internal static class DShowUtils
+ ///
+ /// Type of compression for a bitmap image.
+ /// See https://docs.microsoft.com/en-us/openspecs/windows_protocols/ms-wmf/4e588f70-bd92-4a6f-b77f-35d0feaf7a57
+ ///
+ private enum BitmapCompression : int
{
- ///
- /// Type of compression for a bitmap image.
- /// See https://docs.microsoft.com/en-us/openspecs/windows_protocols/ms-wmf/4e588f70-bd92-4a6f-b77f-35d0feaf7a57
- ///
- private enum BitmapCompression : int
- {
- Rgb = 0x00,
- Rle8 = 0x01,
- Rle4 = 0x02,
- Bitfields = 0x03,
- Jpeg = 0x04,
- Png = 0x05,
- Cmyk = 0x0B,
- Cmykrle8 = 0x0C,
- Cmykrle4 = 0x0D,
- }
+ Rgb = 0x00,
+ Rle8 = 0x01,
+ Rle4 = 0x02,
+ Bitfields = 0x03,
+ Jpeg = 0x04,
+ Png = 0x05,
+ Cmyk = 0x0B,
+ Cmykrle8 = 0x0C,
+ Cmykrle4 = 0x0D,
+ }
- public static CameraInfo[] EnumerateDevices()
+ public static CameraInfo[] EnumerateDevices()
+ {
+ DsDevice[] dsDevices = DsDevice.GetDevicesOfCat(FilterCategory.VideoInputDevice);
+ CameraInfo[] devices = new CameraInfo[dsDevices.Length];
+ for (int i = 0; i < dsDevices.Length; i++)
{
- DsDevice[] dsDevices = DsDevice.GetDevicesOfCat(FilterCategory.VideoInputDevice);
- CameraInfo[] devices = new CameraInfo[dsDevices.Length];
- for (int i = 0; i < dsDevices.Length; i++)
+ DsDevice dsDevice = dsDevices[i];
+ devices[i] = new CameraInfo
{
- DsDevice dsDevice = dsDevices[i];
- devices[i] = new CameraInfo
- {
- Name = dsDevice.Name,
- Path = $"video={dsDevice.Name}",
- AvailableVideoInputOptions = getAvailableOptions(dsDevice).ToArray(),
- };
- }
- return devices;
+ Name = dsDevice.Name,
+ Path = $"video={dsDevice.Name}",
+ AvailableVideoInputOptions = getAvailableOptions(dsDevice).ToArray(),
+ };
}
+ return devices;
+ }
- ///
- /// Get available video input options of a DirectShow device.
- /// Inspired from https://github.com/eldhosekpaul18/WebCam-Capture-Opencvsharp/blob/master/Camera%20Configuration/Camera.cs
- ///
- private unsafe static List getAvailableOptions(DsDevice dsDevice)
+ ///
+ /// Get available video input options of a DirectShow device.
+ /// Inspired from https://github.com/eldhosekpaul18/WebCam-Capture-Opencvsharp/blob/master/Camera%20Configuration/Camera.cs
+ ///
+ private unsafe static List getAvailableOptions(DsDevice dsDevice)
+ {
+ List options = new List();
+
+ try
{
- List options = new List();
+ uint bitCount = 0;
- try
- {
- uint bitCount = 0;
+ IFilterGraph2 filterGraph = (IFilterGraph2)new FilterGraph();
+ filterGraph.AddSourceFilterForMoniker(dsDevice.Mon, null, dsDevice.Name, out IBaseFilter sourceFilter);
+ IPin rawPin = DsFindPin.ByCategory(sourceFilter, PinCategory.Capture, 0);
- IFilterGraph2 filterGraph = (IFilterGraph2)new FilterGraph();
- filterGraph.AddSourceFilterForMoniker(dsDevice.Mon, null, dsDevice.Name, out IBaseFilter sourceFilter);
- IPin rawPin = DsFindPin.ByCategory(sourceFilter, PinCategory.Capture, 0);
+ VideoInfoHeader v = new VideoInfoHeader();
+ rawPin.EnumMediaTypes(out IEnumMediaTypes mediaTypeEnum);
- VideoInfoHeader v = new VideoInfoHeader();
- rawPin.EnumMediaTypes(out IEnumMediaTypes mediaTypeEnum);
+ AMMediaType[] mediaTypes = new AMMediaType[1];
+ IntPtr fetched = IntPtr.Zero;
+ mediaTypeEnum.Next(1, mediaTypes, fetched);
- AMMediaType[] mediaTypes = new AMMediaType[1];
- IntPtr fetched = IntPtr.Zero;
- mediaTypeEnum.Next(1, mediaTypes, fetched);
+ while (mediaTypes[0] != null)
+ {
+ Marshal.PtrToStructure(mediaTypes[0].formatPtr, v);
- while (mediaTypes[0] != null)
+ if (v.BmiHeader.Size != 0 && v.BmiHeader.BitCount != 0)
{
- Marshal.PtrToStructure(mediaTypes[0].formatPtr, v);
+ if (v.BmiHeader.BitCount > bitCount)
+ bitCount = (uint)v.BmiHeader.BitCount;
- if (v.BmiHeader.Size != 0 && v.BmiHeader.BitCount != 0)
- {
- if (v.BmiHeader.BitCount > bitCount)
- bitCount = (uint)v.BmiHeader.BitCount;
+ // Part of code inspired from dshow_get_format_info in dshow.c
+ // https://github.com/FFmpeg/FFmpeg/blob/a64e250680fbc7296eff714b81b54b1c0e2d185f/libavdevice/dshow.c#L692-L759
+ PixelFormat pixelFormat = dshowPixelFormat(v.BmiHeader.Compression, bitCount);
- // Part of code inspired from dshow_get_format_info in dshow.c
- // https://github.com/FFmpeg/FFmpeg/blob/a64e250680fbc7296eff714b81b54b1c0e2d185f/libavdevice/dshow.c#L692-L759
- PixelFormat pixelFormat = dshowPixelFormat(v.BmiHeader.Compression, bitCount);
-
- AVCodecID codecId;
- if (pixelFormat == PixelFormat.None)
+ AVCodecID codecId;
+ if (pixelFormat == PixelFormat.None)
+ {
+ AVCodecTag*[] tags = new[]
{
- AVCodecTag*[] tags = new[]
- {
- ffmpeg.avformat_get_riff_video_tags(),
- null,
- };
-
- fixed (AVCodecTag** tagsPtr = tags)
- {
- codecId = ffmpeg.av_codec_get_id(tagsPtr, bitCount);
- }
- }
- else
+ ffmpeg.avformat_get_riff_video_tags(),
+ null,
+ };
+
+ fixed (AVCodecTag** tagsPtr = tags)
{
- codecId = AVCodecID.AV_CODEC_ID_RAWVIDEO;
+ codecId = ffmpeg.av_codec_get_id(tagsPtr, bitCount);
}
+ }
+ else
+ {
+ codecId = AVCodecID.AV_CODEC_ID_RAWVIDEO;
+ }
- var vio = new VideoInputOptions
- {
- VideoSize = (v.BmiHeader.Width, v.BmiHeader.Height),
- // https://docs.microsoft.com/en-us/windows/win32/directshow/configure-the-video-output-format
- // "frames per second = 10,000,000 / frame duration"
- Framerate = ffmpeg.av_d2q((double)10_000_000L / v.AvgTimePerFrame, (int)10_000_000L),
- };
+ var vio = new VideoInputOptions
+ {
+ VideoSize = (v.BmiHeader.Width, v.BmiHeader.Height),
+ // https://docs.microsoft.com/en-us/windows/win32/directshow/configure-the-video-output-format
+ // "frames per second = 10,000,000 / frame duration"
+ Framerate = ffmpeg.av_d2q((double)10_000_000L / v.AvgTimePerFrame, (int)10_000_000L),
+ };
- if ((codecId != AVCodecID.AV_CODEC_ID_NONE) && (codecId != AVCodecID.AV_CODEC_ID_RAWVIDEO))
+ if ((codecId != AVCodecID.AV_CODEC_ID_NONE) && (codecId != AVCodecID.AV_CODEC_ID_RAWVIDEO))
+ {
+ AVCodec* codec = ffmpeg.avcodec_find_decoder(codecId);
+ vio.InputFormat = new string((sbyte*)codec->name);
+ vio.IsRaw = true;
+ }
+ else if (pixelFormat == PixelFormat.None)
+ {
+ // https://learn.microsoft.com/en-us/windows/win32/directshow/h-264-video-types:
+ if (mediaTypes[0].subType.Equals(MediaSubType.Video.H264)
+ || mediaTypes[0].subType.Equals(MediaSubType.Video.h264)
+ || mediaTypes[0].subType.Equals(MediaSubType.Video.X264)
+ || mediaTypes[0].subType.Equals(MediaSubType.Video.x264)
+ || mediaTypes[0].subType.Equals(MediaSubType.Video.Avc1)
+ || mediaTypes[0].subType.Equals(MediaSubType.Video.avc1))
+ {
+ vio.InputFormat = "h264";
+ vio.IsRaw = true;
+ }
+ else if (Equals(mediaTypes[0].subType, MediaSubType.MJPG))
{
- AVCodec* codec = ffmpeg.avcodec_find_decoder(codecId);
- vio.InputFormat = new string((sbyte*)codec->name);
+ vio.InputFormat = "mjpeg";
vio.IsRaw = true;
}
- else if (pixelFormat == PixelFormat.None)
+ else
{
- // https://learn.microsoft.com/en-us/windows/win32/directshow/h-264-video-types:
- if (mediaTypes[0].subType.Equals(MediaSubType.Video.H264)
- || mediaTypes[0].subType.Equals(MediaSubType.Video.h264)
- || mediaTypes[0].subType.Equals(MediaSubType.Video.X264)
- || mediaTypes[0].subType.Equals(MediaSubType.Video.x264)
- || mediaTypes[0].subType.Equals(MediaSubType.Video.Avc1)
- || mediaTypes[0].subType.Equals(MediaSubType.Video.avc1))
- {
- vio.InputFormat = "h264";
- vio.IsRaw = true;
- }
- else if (Equals(mediaTypes[0].subType, MediaSubType.MJPG))
- {
- vio.InputFormat = "mjpeg";
- vio.IsRaw = true;
- }
- else
- {
- // TODO: remove? maybe? idk
- Console.Error.WriteLine($"Warning: could not handle media type {mediaTypes[0].subType}");
- }
+ // TODO: remove? maybe? idk
+ Console.Error.WriteLine($"Warning: could not handle media type {mediaTypes[0].subType}");
}
+ }
- if (pixelFormat != PixelFormat.None)
- vio.InputFormat = pixelFormat.ToString().ToLower();
+ if (pixelFormat != PixelFormat.None)
+ vio.InputFormat = pixelFormat.ToString().ToLower();
- options.Add(vio);
- }
- mediaTypeEnum.Next(1, mediaTypes, fetched);
+ options.Add(vio);
}
+ mediaTypeEnum.Next(1, mediaTypes, fetched);
}
- catch (Exception)
- {
- }
-
- return options;
+ }
+ catch (Exception)
+ {
}
- ///
- /// Ported from libavdevice/dshow.c - dshow_pixfmt.
- /// See https://github.com/FFmpeg/FFmpeg/blob/a64e250680fbc7296eff714b81b54b1c0e2d185f/libavdevice/dshow.c#L59-L80
- ///
- private static PixelFormat dshowPixelFormat(int compression, uint bitCount)
+ return options;
+ }
+
+ ///
+ /// Ported from libavdevice/dshow.c - dshow_pixfmt.
+ /// See https://github.com/FFmpeg/FFmpeg/blob/a64e250680fbc7296eff714b81b54b1c0e2d185f/libavdevice/dshow.c#L59-L80
+ ///
+ private static PixelFormat dshowPixelFormat(int compression, uint bitCount)
+ {
+ if (compression == (int)BitmapCompression.Bitfields || compression == (int)BitmapCompression.Rgb)
{
- if (compression == (int)BitmapCompression.Bitfields || compression == (int)BitmapCompression.Rgb)
- {
- // Caution: There's something going on with BE vs LE pixel formats that I don't fully understand.
- // I'm using little endian variants of the missing pixel formats until I find a better solution.
- // https://github.com/FFmpeg/FFmpeg/blob/a64e250680fbc7296eff714b81b54b1c0e2d185f/libavutil/pixfmt.h#L373-L377
+ // Caution: There's something going on with BE vs LE pixel formats that I don't fully understand.
+ // I'm using little endian variants of the missing pixel formats until I find a better solution.
+ // https://github.com/FFmpeg/FFmpeg/blob/a64e250680fbc7296eff714b81b54b1c0e2d185f/libavutil/pixfmt.h#L373-L377
- // 1-8 are untested
- switch (bitCount)
- {
- case 1:
- return PixelFormat.Monowhite;
- case 4:
- return PixelFormat.Rgb4;
- case 8:
- return PixelFormat.Rgb8;
- case 16:
- // This pixel format was originally RGB555.
- // https://github.com/FFmpeg/FFmpeg/blob/a64e250680fbc7296eff714b81b54b1c0e2d185f/libavutil/pixfmt.h#L394
- return PixelFormat.Rgb555Le;
- case 24:
- return PixelFormat.Bgr24;
- case 32:
- // This pixel format was originally 0RGB32.
- // https://github.com/FFmpeg/FFmpeg/blob/a64e250680fbc7296eff714b81b54b1c0e2d185f/libavutil/pixfmt.h#L383
- return PixelFormat.Bgrx;
- }
+ // 1-8 are untested
+ switch (bitCount)
+ {
+ case 1:
+ return PixelFormat.Monowhite;
+ case 4:
+ return PixelFormat.Rgb4;
+ case 8:
+ return PixelFormat.Rgb8;
+ case 16:
+ // This pixel format was originally RGB555.
+ // https://github.com/FFmpeg/FFmpeg/blob/a64e250680fbc7296eff714b81b54b1c0e2d185f/libavutil/pixfmt.h#L394
+ return PixelFormat.Rgb555Le;
+ case 24:
+ return PixelFormat.Bgr24;
+ case 32:
+ // This pixel format was originally 0RGB32.
+ // https://github.com/FFmpeg/FFmpeg/blob/a64e250680fbc7296eff714b81b54b1c0e2d185f/libavutil/pixfmt.h#L383
+ return PixelFormat.Bgrx;
}
-
- // All others
- return PixelFormatTag.FindRawPixelFormat(compression);
}
+
+ // All others
+ return PixelFormatTag.FindRawPixelFormat(compression);
}
}
diff --git a/SeeShark/Utils/PrivateFFmpeg/PixelFormatTag.cs b/SeeShark/Utils/PrivateFFmpeg/PixelFormatTag.cs
index fc5a98c..effda96 100644
--- a/SeeShark/Utils/PrivateFFmpeg/PixelFormatTag.cs
+++ b/SeeShark/Utils/PrivateFFmpeg/PixelFormatTag.cs
@@ -2,314 +2,313 @@
// This file is part of SeeShark.
// SeeShark is licensed under the BSD 3-Clause License. See LICENSE for details.
-namespace SeeShark.Utils.PrivateFFmpeg
+namespace SeeShark.Utils.PrivateFFmpeg;
+
+///
+/// https://github.com/FFmpeg/FFmpeg/blob/a64e250680fbc7296eff714b81b54b1c0e2d185f/libavcodec/raw.h#L32-L35
+///
+internal class PixelFormatTag
{
- ///
- /// https://github.com/FFmpeg/FFmpeg/blob/a64e250680fbc7296eff714b81b54b1c0e2d185f/libavcodec/raw.h#L32-L35
- ///
- internal class PixelFormatTag
- {
- public PixelFormat PixelFormat { get; set; }
- public int FourCC { get; set; }
+ public PixelFormat PixelFormat { get; set; }
+ public int FourCC { get; set; }
- public PixelFormatTag(PixelFormat pixelFormat, int fourcc)
- {
- PixelFormat = pixelFormat;
- FourCC = fourcc;
- }
+ public PixelFormatTag(PixelFormat pixelFormat, int fourcc)
+ {
+ PixelFormat = pixelFormat;
+ FourCC = fourcc;
+ }
#pragma warning disable CS0618
- ///
- /// https://github.com/FFmpeg/FFmpeg/blob/a64e250680fbc7296eff714b81b54b1c0e2d185f/libavcodec/raw.c#L31-L298
- ///
- public static PixelFormatTag[] RawPixelFormatTags = new PixelFormatTag[]
- {
- new PixelFormatTag(PixelFormat.Yuv420P, mkTag('I', '4', '2', '0')), // Planar formats
- new PixelFormatTag(PixelFormat.Yuv420P, mkTag('I', 'Y', 'U', 'V')),
- new PixelFormatTag(PixelFormat.Yuv420P, mkTag('y', 'v', '1', '2')),
- new PixelFormatTag(PixelFormat.Yuv420P, mkTag('Y', 'V', '1', '2')),
- new PixelFormatTag(PixelFormat.Yuv410P, mkTag('Y', 'U', 'V', '9')),
- new PixelFormatTag(PixelFormat.Yuv410P, mkTag('Y', 'V', 'U', '9')),
- new PixelFormatTag(PixelFormat.Yuv411P, mkTag('Y', '4', '1', 'B')),
- new PixelFormatTag(PixelFormat.Yuv422P, mkTag('Y', '4', '2', 'B')),
- new PixelFormatTag(PixelFormat.Yuv422P, mkTag('P', '4', '2', '2')),
- new PixelFormatTag(PixelFormat.Yuv422P, mkTag('Y', 'V', '1', '6')),
+ ///
+ /// https://github.com/FFmpeg/FFmpeg/blob/a64e250680fbc7296eff714b81b54b1c0e2d185f/libavcodec/raw.c#L31-L298
+ ///
+ public static PixelFormatTag[] RawPixelFormatTags = new PixelFormatTag[]
+ {
+ new PixelFormatTag(PixelFormat.Yuv420P, mkTag('I', '4', '2', '0')), // Planar formats
+ new PixelFormatTag(PixelFormat.Yuv420P, mkTag('I', 'Y', 'U', 'V')),
+ new PixelFormatTag(PixelFormat.Yuv420P, mkTag('y', 'v', '1', '2')),
+ new PixelFormatTag(PixelFormat.Yuv420P, mkTag('Y', 'V', '1', '2')),
+ new PixelFormatTag(PixelFormat.Yuv410P, mkTag('Y', 'U', 'V', '9')),
+ new PixelFormatTag(PixelFormat.Yuv410P, mkTag('Y', 'V', 'U', '9')),
+ new PixelFormatTag(PixelFormat.Yuv411P, mkTag('Y', '4', '1', 'B')),
+ new PixelFormatTag(PixelFormat.Yuv422P, mkTag('Y', '4', '2', 'B')),
+ new PixelFormatTag(PixelFormat.Yuv422P, mkTag('P', '4', '2', '2')),
+ new PixelFormatTag(PixelFormat.Yuv422P, mkTag('Y', 'V', '1', '6')),
- // yuvjXXX formats are deprecated hacks specific to libav*, they are identical to yuvXXX
- new PixelFormatTag(PixelFormat.Yuvj420P, mkTag('I', '4', '2', '0')), // Planar formats
- new PixelFormatTag(PixelFormat.Yuvj420P, mkTag('I', 'Y', 'U', 'V')),
- new PixelFormatTag(PixelFormat.Yuvj420P, mkTag('Y', 'V', '1', '2')),
- new PixelFormatTag(PixelFormat.Yuvj422P, mkTag('Y', '4', '2', 'B')),
- new PixelFormatTag(PixelFormat.Yuvj422P, mkTag('P', '4', '2', '2')),
- new PixelFormatTag(PixelFormat.Gray8, mkTag('Y', '8', '0', '0')),
- new PixelFormatTag(PixelFormat.Gray8, mkTag('Y', '8', ' ', ' ')),
+ // yuvjXXX formats are deprecated hacks specific to libav*, they are identical to yuvXXX
+ new PixelFormatTag(PixelFormat.Yuvj420P, mkTag('I', '4', '2', '0')), // Planar formats
+ new PixelFormatTag(PixelFormat.Yuvj420P, mkTag('I', 'Y', 'U', 'V')),
+ new PixelFormatTag(PixelFormat.Yuvj420P, mkTag('Y', 'V', '1', '2')),
+ new PixelFormatTag(PixelFormat.Yuvj422P, mkTag('Y', '4', '2', 'B')),
+ new PixelFormatTag(PixelFormat.Yuvj422P, mkTag('P', '4', '2', '2')),
+ new PixelFormatTag(PixelFormat.Gray8, mkTag('Y', '8', '0', '0')),
+ new PixelFormatTag(PixelFormat.Gray8, mkTag('Y', '8', ' ', ' ')),
- new PixelFormatTag(PixelFormat.Yuyv422, mkTag('Y', 'U', 'Y', '2')), // Packed formats
- new PixelFormatTag(PixelFormat.Yuyv422, mkTag('Y', '4', '2', '2')),
- new PixelFormatTag(PixelFormat.Yuyv422, mkTag('V', '4', '2', '2')),
- new PixelFormatTag(PixelFormat.Yuyv422, mkTag('V', 'Y', 'U', 'Y')),
- new PixelFormatTag(PixelFormat.Yuyv422, mkTag('Y', 'U', 'N', 'V')),
- new PixelFormatTag(PixelFormat.Yuyv422, mkTag('Y', 'U', 'Y', 'V')),
- new PixelFormatTag(PixelFormat.Yvyu422, mkTag('Y', 'V', 'Y', 'U')), // Philips
- new PixelFormatTag(PixelFormat.Uyvy422, mkTag('U', 'Y', 'V', 'Y')),
- new PixelFormatTag(PixelFormat.Uyvy422, mkTag('H', 'D', 'Y', 'C')),
- new PixelFormatTag(PixelFormat.Uyvy422, mkTag('U', 'Y', 'N', 'V')),
- new PixelFormatTag(PixelFormat.Uyvy422, mkTag('U', 'Y', 'N', 'Y')),
- new PixelFormatTag(PixelFormat.Uyvy422, mkTag('u', 'y', 'v', '1')),
- new PixelFormatTag(PixelFormat.Uyvy422, mkTag('2', 'V', 'u', '1')),
- new PixelFormatTag(PixelFormat.Uyvy422, mkTag('A', 'V', 'R', 'n')), // Avid AVI Codec 1:1
- new PixelFormatTag(PixelFormat.Uyvy422, mkTag('A', 'V', '1', 'x')), // Avid 1:1x
- new PixelFormatTag(PixelFormat.Uyvy422, mkTag('A', 'V', 'u', 'p')),
- new PixelFormatTag(PixelFormat.Uyvy422, mkTag('V', 'D', 'T', 'Z')), // SoftLab-NSK VideoTizer
- new PixelFormatTag(PixelFormat.Uyvy422, mkTag('a', 'u', 'v', '2')),
- new PixelFormatTag(PixelFormat.Uyvy422, mkTag('c', 'y', 'u', 'v')), // CYUV is also Creative YUV
- new PixelFormatTag(PixelFormat.Uyyvyy411, mkTag('Y', '4', '1', '1')),
- new PixelFormatTag(PixelFormat.Gray8, mkTag('G', 'R', 'E', 'Y')),
- new PixelFormatTag(PixelFormat.Nv12, mkTag('N', 'V', '1', '2')),
- new PixelFormatTag(PixelFormat.Nv21, mkTag('N', 'V', '2', '1')),
+ new PixelFormatTag(PixelFormat.Yuyv422, mkTag('Y', 'U', 'Y', '2')), // Packed formats
+ new PixelFormatTag(PixelFormat.Yuyv422, mkTag('Y', '4', '2', '2')),
+ new PixelFormatTag(PixelFormat.Yuyv422, mkTag('V', '4', '2', '2')),
+ new PixelFormatTag(PixelFormat.Yuyv422, mkTag('V', 'Y', 'U', 'Y')),
+ new PixelFormatTag(PixelFormat.Yuyv422, mkTag('Y', 'U', 'N', 'V')),
+ new PixelFormatTag(PixelFormat.Yuyv422, mkTag('Y', 'U', 'Y', 'V')),
+ new PixelFormatTag(PixelFormat.Yvyu422, mkTag('Y', 'V', 'Y', 'U')), // Philips
+ new PixelFormatTag(PixelFormat.Uyvy422, mkTag('U', 'Y', 'V', 'Y')),
+ new PixelFormatTag(PixelFormat.Uyvy422, mkTag('H', 'D', 'Y', 'C')),
+ new PixelFormatTag(PixelFormat.Uyvy422, mkTag('U', 'Y', 'N', 'V')),
+ new PixelFormatTag(PixelFormat.Uyvy422, mkTag('U', 'Y', 'N', 'Y')),
+ new PixelFormatTag(PixelFormat.Uyvy422, mkTag('u', 'y', 'v', '1')),
+ new PixelFormatTag(PixelFormat.Uyvy422, mkTag('2', 'V', 'u', '1')),
+ new PixelFormatTag(PixelFormat.Uyvy422, mkTag('A', 'V', 'R', 'n')), // Avid AVI Codec 1:1
+ new PixelFormatTag(PixelFormat.Uyvy422, mkTag('A', 'V', '1', 'x')), // Avid 1:1x
+ new PixelFormatTag(PixelFormat.Uyvy422, mkTag('A', 'V', 'u', 'p')),
+ new PixelFormatTag(PixelFormat.Uyvy422, mkTag('V', 'D', 'T', 'Z')), // SoftLab-NSK VideoTizer
+ new PixelFormatTag(PixelFormat.Uyvy422, mkTag('a', 'u', 'v', '2')),
+ new PixelFormatTag(PixelFormat.Uyvy422, mkTag('c', 'y', 'u', 'v')), // CYUV is also Creative YUV
+ new PixelFormatTag(PixelFormat.Uyyvyy411, mkTag('Y', '4', '1', '1')),
+ new PixelFormatTag(PixelFormat.Gray8, mkTag('G', 'R', 'E', 'Y')),
+ new PixelFormatTag(PixelFormat.Nv12, mkTag('N', 'V', '1', '2')),
+ new PixelFormatTag(PixelFormat.Nv21, mkTag('N', 'V', '2', '1')),
- // nut
- new PixelFormatTag(PixelFormat.Rgb555Le, mkTag('R', 'G', 'B', 15)),
- new PixelFormatTag(PixelFormat.Bgr555Le, mkTag('B', 'G', 'R', 15)),
- new PixelFormatTag(PixelFormat.Rgb565Le, mkTag('R', 'G', 'B', 16)),
- new PixelFormatTag(PixelFormat.Bgr565Le, mkTag('B', 'G', 'R', 16)),
- new PixelFormatTag(PixelFormat.Rgb555Be, mkTag(15, 'B', 'G', 'R')),
- new PixelFormatTag(PixelFormat.Bgr555Be, mkTag(15, 'R', 'G', 'B')),
- new PixelFormatTag(PixelFormat.Rgb565Be, mkTag(16, 'B', 'G', 'R')),
- new PixelFormatTag(PixelFormat.Bgr565Be, mkTag(16, 'R', 'G', 'B')),
- new PixelFormatTag(PixelFormat.Rgb444Le, mkTag('R', 'G', 'B', 12)),
- new PixelFormatTag(PixelFormat.Bgr444Le, mkTag('B', 'G', 'R', 12)),
- new PixelFormatTag(PixelFormat.Rgb444Be, mkTag(12, 'B', 'G', 'R')),
- new PixelFormatTag(PixelFormat.Bgr444Be, mkTag(12, 'R', 'G', 'B')),
- new PixelFormatTag(PixelFormat.Rgba64Le, mkTag('R', 'B', 'A', 64)),
- new PixelFormatTag(PixelFormat.Bgra64Le, mkTag('B', 'R', 'A', 64)),
- new PixelFormatTag(PixelFormat.Rgba64Be, mkTag(64, 'R', 'B', 'A')),
- new PixelFormatTag(PixelFormat.Bgra64Be, mkTag(64, 'B', 'R', 'A')),
- new PixelFormatTag(PixelFormat.Rgba, mkTag('R', 'G', 'B', 'A')),
- new PixelFormatTag(PixelFormat.Rgbx, mkTag('R', 'G', 'B', 0)),
- new PixelFormatTag(PixelFormat.Bgra, mkTag('B', 'G', 'R', 'A')),
- new PixelFormatTag(PixelFormat.Bgrx, mkTag('B', 'G', 'R', 0)),
- new PixelFormatTag(PixelFormat.Abgr, mkTag('A', 'B', 'G', 'R')),
- new PixelFormatTag(PixelFormat.Xbgr, mkTag(0, 'B', 'G', 'R')),
- new PixelFormatTag(PixelFormat.Argb, mkTag('A', 'R', 'G', 'B')),
- new PixelFormatTag(PixelFormat.Xrgb, mkTag(0, 'R', 'G', 'B')),
- new PixelFormatTag(PixelFormat.Rgb24, mkTag('R', 'G', 'B', 24)),
- new PixelFormatTag(PixelFormat.Bgr24, mkTag('B', 'G', 'R', 24)),
- new PixelFormatTag(PixelFormat.Yuv411P, mkTag('4', '1', '1', 'P')),
- new PixelFormatTag(PixelFormat.Yuv422P, mkTag('4', '2', '2', 'P')),
- new PixelFormatTag(PixelFormat.Yuvj422P, mkTag('4', '2', '2', 'P')),
- new PixelFormatTag(PixelFormat.Yuv440P, mkTag('4', '4', '0', 'P')),
- new PixelFormatTag(PixelFormat.Yuvj440P, mkTag('4', '4', '0', 'P')),
- new PixelFormatTag(PixelFormat.Yuv444P, mkTag('4', '4', '4', 'P')),
- new PixelFormatTag(PixelFormat.Yuvj444P, mkTag('4', '4', '4', 'P')),
- new PixelFormatTag(PixelFormat.Monowhite, mkTag('B', '1', 'W', '0')),
- new PixelFormatTag(PixelFormat.Monoblack, mkTag('B', '0', 'W', '1')),
- new PixelFormatTag(PixelFormat.Bgr8, mkTag('B', 'G', 'R', 8)),
- new PixelFormatTag(PixelFormat.Rgb8, mkTag('R', 'G', 'B', 8)),
- new PixelFormatTag(PixelFormat.Bgr4, mkTag('B', 'G', 'R', 4)),
- new PixelFormatTag(PixelFormat.Rgb4, mkTag('R', 'G', 'B', 4)),
- new PixelFormatTag(PixelFormat.Rgb4Byte,mkTag('B', '4', 'B', 'Y')),
- new PixelFormatTag(PixelFormat.Bgr4Byte,mkTag('R', '4', 'B', 'Y')),
- new PixelFormatTag(PixelFormat.Rgb48Le, mkTag('R', 'G', 'B', 48)),
- new PixelFormatTag(PixelFormat.Rgb48Be, mkTag(48, 'R', 'G', 'B')),
- new PixelFormatTag(PixelFormat.Bgr48Le, mkTag('B', 'G', 'R', 48)),
- new PixelFormatTag(PixelFormat.Bgr48Be, mkTag(48, 'B', 'G', 'R')),
- new PixelFormatTag(PixelFormat.Gray9Le, mkTag('Y', '1', 0, 9)),
- new PixelFormatTag(PixelFormat.Gray9Be, mkTag(9, 0, '1', 'Y')),
- new PixelFormatTag(PixelFormat.Gray10Le, mkTag('Y', '1', 0, 10)),
- new PixelFormatTag(PixelFormat.Gray10Be, mkTag(10, 0, '1', 'Y')),
- new PixelFormatTag(PixelFormat.Gray12Le, mkTag('Y', '1', 0, 12)),
- new PixelFormatTag(PixelFormat.Gray12Be, mkTag(12, 0, '1', 'Y')),
- new PixelFormatTag(PixelFormat.Gray14Le, mkTag('Y', '1', 0, 14)),
- new PixelFormatTag(PixelFormat.Gray14Be, mkTag(14, 0, '1', 'Y')),
- new PixelFormatTag(PixelFormat.Gray16Le, mkTag('Y', '1', 0, 16)),
- new PixelFormatTag(PixelFormat.Gray16Be, mkTag(16, 0, '1', 'Y')),
- new PixelFormatTag(PixelFormat.Yuv420P9Le, mkTag('Y', '3', 11, 9)),
- new PixelFormatTag(PixelFormat.Yuv420P9Be, mkTag(9, 11, '3', 'Y')),
- new PixelFormatTag(PixelFormat.Yuv422P9Le, mkTag('Y', '3', 10, 9)),
- new PixelFormatTag(PixelFormat.Yuv422P9Be, mkTag(9, 10, '3', 'Y')),
- new PixelFormatTag(PixelFormat.Yuv444P9Le, mkTag('Y', '3', 0, 9)),
- new PixelFormatTag(PixelFormat.Yuv444P9Be, mkTag(9, 0, '3', 'Y')),
- new PixelFormatTag(PixelFormat.Yuv420P10Le, mkTag('Y', '3', 11, 10)),
- new PixelFormatTag(PixelFormat.Yuv420P10Be, mkTag(10, 11, '3', 'Y')),
- new PixelFormatTag(PixelFormat.Yuv422P10Le, mkTag('Y', '3', 10, 10)),
- new PixelFormatTag(PixelFormat.Yuv422P10Be, mkTag(10, 10, '3', 'Y')),
- new PixelFormatTag(PixelFormat.Yuv444P10Le, mkTag('Y', '3', 0, 10)),
- new PixelFormatTag(PixelFormat.Yuv444P10Be, mkTag(10, 0, '3', 'Y')),
- new PixelFormatTag(PixelFormat.Yuv420P12Le, mkTag('Y', '3', 11, 12)),
- new PixelFormatTag(PixelFormat.Yuv420P12Be, mkTag(12, 11, '3', 'Y')),
- new PixelFormatTag(PixelFormat.Yuv422P12Le, mkTag('Y', '3', 10, 12)),
- new PixelFormatTag(PixelFormat.Yuv422P12Be, mkTag(12, 10, '3', 'Y')),
- new PixelFormatTag(PixelFormat.Yuv444P12Le, mkTag('Y', '3', 0, 12)),
- new PixelFormatTag(PixelFormat.Yuv444P12Be, mkTag(12, 0, '3', 'Y')),
- new PixelFormatTag(PixelFormat.Yuv420P14Le, mkTag('Y', '3', 11, 14)),
- new PixelFormatTag(PixelFormat.Yuv420P14Be, mkTag(14, 11, '3', 'Y')),
- new PixelFormatTag(PixelFormat.Yuv422P14Le, mkTag('Y', '3', 10, 14)),
- new PixelFormatTag(PixelFormat.Yuv422P14Be, mkTag(14, 10, '3', 'Y')),
- new PixelFormatTag(PixelFormat.Yuv444P14Le, mkTag('Y', '3', 0, 14)),
- new PixelFormatTag(PixelFormat.Yuv444P14Be, mkTag(14, 0, '3', 'Y')),
- new PixelFormatTag(PixelFormat.Yuv420P16Le, mkTag('Y', '3', 11, 16)),
- new PixelFormatTag(PixelFormat.Yuv420P16Be, mkTag(16, 11, '3', 'Y')),
- new PixelFormatTag(PixelFormat.Yuv422P16Le, mkTag('Y', '3', 10, 16)),
- new PixelFormatTag(PixelFormat.Yuv422P16Be, mkTag(16, 10, '3', 'Y')),
- new PixelFormatTag(PixelFormat.Yuv444P16Le, mkTag('Y', '3', 0, 16)),
- new PixelFormatTag(PixelFormat.Yuv444P16Be, mkTag(16, 0, '3', 'Y')),
- new PixelFormatTag(PixelFormat.Yuva420P, mkTag('Y', '4', 11, 8)),
- new PixelFormatTag(PixelFormat.Yuva422P, mkTag('Y', '4', 10, 8)),
- new PixelFormatTag(PixelFormat.Yuva444P, mkTag('Y', '4', 0, 8)),
- new PixelFormatTag(PixelFormat.Ya8, mkTag('Y', '2', 0, 8)),
- new PixelFormatTag(PixelFormat.Pal8, mkTag('P', 'A', 'L', 8)),
+ // nut
+ new PixelFormatTag(PixelFormat.Rgb555Le, mkTag('R', 'G', 'B', 15)),
+ new PixelFormatTag(PixelFormat.Bgr555Le, mkTag('B', 'G', 'R', 15)),
+ new PixelFormatTag(PixelFormat.Rgb565Le, mkTag('R', 'G', 'B', 16)),
+ new PixelFormatTag(PixelFormat.Bgr565Le, mkTag('B', 'G', 'R', 16)),
+ new PixelFormatTag(PixelFormat.Rgb555Be, mkTag(15, 'B', 'G', 'R')),
+ new PixelFormatTag(PixelFormat.Bgr555Be, mkTag(15, 'R', 'G', 'B')),
+ new PixelFormatTag(PixelFormat.Rgb565Be, mkTag(16, 'B', 'G', 'R')),
+ new PixelFormatTag(PixelFormat.Bgr565Be, mkTag(16, 'R', 'G', 'B')),
+ new PixelFormatTag(PixelFormat.Rgb444Le, mkTag('R', 'G', 'B', 12)),
+ new PixelFormatTag(PixelFormat.Bgr444Le, mkTag('B', 'G', 'R', 12)),
+ new PixelFormatTag(PixelFormat.Rgb444Be, mkTag(12, 'B', 'G', 'R')),
+ new PixelFormatTag(PixelFormat.Bgr444Be, mkTag(12, 'R', 'G', 'B')),
+ new PixelFormatTag(PixelFormat.Rgba64Le, mkTag('R', 'B', 'A', 64)),
+ new PixelFormatTag(PixelFormat.Bgra64Le, mkTag('B', 'R', 'A', 64)),
+ new PixelFormatTag(PixelFormat.Rgba64Be, mkTag(64, 'R', 'B', 'A')),
+ new PixelFormatTag(PixelFormat.Bgra64Be, mkTag(64, 'B', 'R', 'A')),
+ new PixelFormatTag(PixelFormat.Rgba, mkTag('R', 'G', 'B', 'A')),
+ new PixelFormatTag(PixelFormat.Rgbx, mkTag('R', 'G', 'B', 0)),
+ new PixelFormatTag(PixelFormat.Bgra, mkTag('B', 'G', 'R', 'A')),
+ new PixelFormatTag(PixelFormat.Bgrx, mkTag('B', 'G', 'R', 0)),
+ new PixelFormatTag(PixelFormat.Abgr, mkTag('A', 'B', 'G', 'R')),
+ new PixelFormatTag(PixelFormat.Xbgr, mkTag(0, 'B', 'G', 'R')),
+ new PixelFormatTag(PixelFormat.Argb, mkTag('A', 'R', 'G', 'B')),
+ new PixelFormatTag(PixelFormat.Xrgb, mkTag(0, 'R', 'G', 'B')),
+ new PixelFormatTag(PixelFormat.Rgb24, mkTag('R', 'G', 'B', 24)),
+ new PixelFormatTag(PixelFormat.Bgr24, mkTag('B', 'G', 'R', 24)),
+ new PixelFormatTag(PixelFormat.Yuv411P, mkTag('4', '1', '1', 'P')),
+ new PixelFormatTag(PixelFormat.Yuv422P, mkTag('4', '2', '2', 'P')),
+ new PixelFormatTag(PixelFormat.Yuvj422P, mkTag('4', '2', '2', 'P')),
+ new PixelFormatTag(PixelFormat.Yuv440P, mkTag('4', '4', '0', 'P')),
+ new PixelFormatTag(PixelFormat.Yuvj440P, mkTag('4', '4', '0', 'P')),
+ new PixelFormatTag(PixelFormat.Yuv444P, mkTag('4', '4', '4', 'P')),
+ new PixelFormatTag(PixelFormat.Yuvj444P, mkTag('4', '4', '4', 'P')),
+ new PixelFormatTag(PixelFormat.Monowhite, mkTag('B', '1', 'W', '0')),
+ new PixelFormatTag(PixelFormat.Monoblack, mkTag('B', '0', 'W', '1')),
+ new PixelFormatTag(PixelFormat.Bgr8, mkTag('B', 'G', 'R', 8)),
+ new PixelFormatTag(PixelFormat.Rgb8, mkTag('R', 'G', 'B', 8)),
+ new PixelFormatTag(PixelFormat.Bgr4, mkTag('B', 'G', 'R', 4)),
+ new PixelFormatTag(PixelFormat.Rgb4, mkTag('R', 'G', 'B', 4)),
+ new PixelFormatTag(PixelFormat.Rgb4Byte,mkTag('B', '4', 'B', 'Y')),
+ new PixelFormatTag(PixelFormat.Bgr4Byte,mkTag('R', '4', 'B', 'Y')),
+ new PixelFormatTag(PixelFormat.Rgb48Le, mkTag('R', 'G', 'B', 48)),
+ new PixelFormatTag(PixelFormat.Rgb48Be, mkTag(48, 'R', 'G', 'B')),
+ new PixelFormatTag(PixelFormat.Bgr48Le, mkTag('B', 'G', 'R', 48)),
+ new PixelFormatTag(PixelFormat.Bgr48Be, mkTag(48, 'B', 'G', 'R')),
+ new PixelFormatTag(PixelFormat.Gray9Le, mkTag('Y', '1', 0, 9)),
+ new PixelFormatTag(PixelFormat.Gray9Be, mkTag(9, 0, '1', 'Y')),
+ new PixelFormatTag(PixelFormat.Gray10Le, mkTag('Y', '1', 0, 10)),
+ new PixelFormatTag(PixelFormat.Gray10Be, mkTag(10, 0, '1', 'Y')),
+ new PixelFormatTag(PixelFormat.Gray12Le, mkTag('Y', '1', 0, 12)),
+ new PixelFormatTag(PixelFormat.Gray12Be, mkTag(12, 0, '1', 'Y')),
+ new PixelFormatTag(PixelFormat.Gray14Le, mkTag('Y', '1', 0, 14)),
+ new PixelFormatTag(PixelFormat.Gray14Be, mkTag(14, 0, '1', 'Y')),
+ new PixelFormatTag(PixelFormat.Gray16Le, mkTag('Y', '1', 0, 16)),
+ new PixelFormatTag(PixelFormat.Gray16Be, mkTag(16, 0, '1', 'Y')),
+ new PixelFormatTag(PixelFormat.Yuv420P9Le, mkTag('Y', '3', 11, 9)),
+ new PixelFormatTag(PixelFormat.Yuv420P9Be, mkTag(9, 11, '3', 'Y')),
+ new PixelFormatTag(PixelFormat.Yuv422P9Le, mkTag('Y', '3', 10, 9)),
+ new PixelFormatTag(PixelFormat.Yuv422P9Be, mkTag(9, 10, '3', 'Y')),
+ new PixelFormatTag(PixelFormat.Yuv444P9Le, mkTag('Y', '3', 0, 9)),
+ new PixelFormatTag(PixelFormat.Yuv444P9Be, mkTag(9, 0, '3', 'Y')),
+ new PixelFormatTag(PixelFormat.Yuv420P10Le, mkTag('Y', '3', 11, 10)),
+ new PixelFormatTag(PixelFormat.Yuv420P10Be, mkTag(10, 11, '3', 'Y')),
+ new PixelFormatTag(PixelFormat.Yuv422P10Le, mkTag('Y', '3', 10, 10)),
+ new PixelFormatTag(PixelFormat.Yuv422P10Be, mkTag(10, 10, '3', 'Y')),
+ new PixelFormatTag(PixelFormat.Yuv444P10Le, mkTag('Y', '3', 0, 10)),
+ new PixelFormatTag(PixelFormat.Yuv444P10Be, mkTag(10, 0, '3', 'Y')),
+ new PixelFormatTag(PixelFormat.Yuv420P12Le, mkTag('Y', '3', 11, 12)),
+ new PixelFormatTag(PixelFormat.Yuv420P12Be, mkTag(12, 11, '3', 'Y')),
+ new PixelFormatTag(PixelFormat.Yuv422P12Le, mkTag('Y', '3', 10, 12)),
+ new PixelFormatTag(PixelFormat.Yuv422P12Be, mkTag(12, 10, '3', 'Y')),
+ new PixelFormatTag(PixelFormat.Yuv444P12Le, mkTag('Y', '3', 0, 12)),
+ new PixelFormatTag(PixelFormat.Yuv444P12Be, mkTag(12, 0, '3', 'Y')),
+ new PixelFormatTag(PixelFormat.Yuv420P14Le, mkTag('Y', '3', 11, 14)),
+ new PixelFormatTag(PixelFormat.Yuv420P14Be, mkTag(14, 11, '3', 'Y')),
+ new PixelFormatTag(PixelFormat.Yuv422P14Le, mkTag('Y', '3', 10, 14)),
+ new PixelFormatTag(PixelFormat.Yuv422P14Be, mkTag(14, 10, '3', 'Y')),
+ new PixelFormatTag(PixelFormat.Yuv444P14Le, mkTag('Y', '3', 0, 14)),
+ new PixelFormatTag(PixelFormat.Yuv444P14Be, mkTag(14, 0, '3', 'Y')),
+ new PixelFormatTag(PixelFormat.Yuv420P16Le, mkTag('Y', '3', 11, 16)),
+ new PixelFormatTag(PixelFormat.Yuv420P16Be, mkTag(16, 11, '3', 'Y')),
+ new PixelFormatTag(PixelFormat.Yuv422P16Le, mkTag('Y', '3', 10, 16)),
+ new PixelFormatTag(PixelFormat.Yuv422P16Be, mkTag(16, 10, '3', 'Y')),
+ new PixelFormatTag(PixelFormat.Yuv444P16Le, mkTag('Y', '3', 0, 16)),
+ new PixelFormatTag(PixelFormat.Yuv444P16Be, mkTag(16, 0, '3', 'Y')),
+ new PixelFormatTag(PixelFormat.Yuva420P, mkTag('Y', '4', 11, 8)),
+ new PixelFormatTag(PixelFormat.Yuva422P, mkTag('Y', '4', 10, 8)),
+ new PixelFormatTag(PixelFormat.Yuva444P, mkTag('Y', '4', 0, 8)),
+ new PixelFormatTag(PixelFormat.Ya8, mkTag('Y', '2', 0, 8)),
+ new PixelFormatTag(PixelFormat.Pal8, mkTag('P', 'A', 'L', 8)),
- new PixelFormatTag(PixelFormat.Yuva420P9Le, mkTag('Y', '4', 11, 9)),
- new PixelFormatTag(PixelFormat.Yuva420P9Be, mkTag(9, 11, '4', 'Y')),
- new PixelFormatTag(PixelFormat.Yuva422P9Le, mkTag('Y', '4', 10, 9)),
- new PixelFormatTag(PixelFormat.Yuva422P9Be, mkTag(9, 10, '4', 'Y')),
- new PixelFormatTag(PixelFormat.Yuva444P9Le, mkTag('Y', '4', 0, 9)),
- new PixelFormatTag(PixelFormat.Yuva444P9Be, mkTag(9, 0, '4', 'Y')),
- new PixelFormatTag(PixelFormat.Yuva420P10Le, mkTag('Y', '4', 11, 10)),
- new PixelFormatTag(PixelFormat.Yuva420P10Be, mkTag(10, 11, '4', 'Y')),
- new PixelFormatTag(PixelFormat.Yuva422P10Le, mkTag('Y', '4', 10, 10)),
- new PixelFormatTag(PixelFormat.Yuva422P10Be, mkTag(10, 10, '4', 'Y')),
- new PixelFormatTag(PixelFormat.Yuva444P10Le, mkTag('Y', '4', 0, 10)),
- new PixelFormatTag(PixelFormat.Yuva444P10Be, mkTag(10, 0, '4', 'Y')),
- new PixelFormatTag(PixelFormat.Yuva422P12Le, mkTag('Y', '4', 10, 12)),
- new PixelFormatTag(PixelFormat.Yuva422P12Be, mkTag(12, 10, '4', 'Y')),
- new PixelFormatTag(PixelFormat.Yuva444P12Le, mkTag('Y', '4', 0, 12)),
- new PixelFormatTag(PixelFormat.Yuva444P12Be, mkTag(12, 0, '4', 'Y')),
- new PixelFormatTag(PixelFormat.Yuva420P16Le, mkTag('Y', '4', 11, 16)),
- new PixelFormatTag(PixelFormat.Yuva420P16Be, mkTag(16, 11, '4', 'Y')),
- new PixelFormatTag(PixelFormat.Yuva422P16Le, mkTag('Y', '4', 10, 16)),
- new PixelFormatTag(PixelFormat.Yuva422P16Be, mkTag(16, 10, '4', 'Y')),
- new PixelFormatTag(PixelFormat.Yuva444P16Le, mkTag('Y', '4', 0, 16)),
- new PixelFormatTag(PixelFormat.Yuva444P16Be, mkTag(16, 0, '4', 'Y')),
+ new PixelFormatTag(PixelFormat.Yuva420P9Le, mkTag('Y', '4', 11, 9)),
+ new PixelFormatTag(PixelFormat.Yuva420P9Be, mkTag(9, 11, '4', 'Y')),
+ new PixelFormatTag(PixelFormat.Yuva422P9Le, mkTag('Y', '4', 10, 9)),
+ new PixelFormatTag(PixelFormat.Yuva422P9Be, mkTag(9, 10, '4', 'Y')),
+ new PixelFormatTag(PixelFormat.Yuva444P9Le, mkTag('Y', '4', 0, 9)),
+ new PixelFormatTag(PixelFormat.Yuva444P9Be, mkTag(9, 0, '4', 'Y')),
+ new PixelFormatTag(PixelFormat.Yuva420P10Le, mkTag('Y', '4', 11, 10)),
+ new PixelFormatTag(PixelFormat.Yuva420P10Be, mkTag(10, 11, '4', 'Y')),
+ new PixelFormatTag(PixelFormat.Yuva422P10Le, mkTag('Y', '4', 10, 10)),
+ new PixelFormatTag(PixelFormat.Yuva422P10Be, mkTag(10, 10, '4', 'Y')),
+ new PixelFormatTag(PixelFormat.Yuva444P10Le, mkTag('Y', '4', 0, 10)),
+ new PixelFormatTag(PixelFormat.Yuva444P10Be, mkTag(10, 0, '4', 'Y')),
+ new PixelFormatTag(PixelFormat.Yuva422P12Le, mkTag('Y', '4', 10, 12)),
+ new PixelFormatTag(PixelFormat.Yuva422P12Be, mkTag(12, 10, '4', 'Y')),
+ new PixelFormatTag(PixelFormat.Yuva444P12Le, mkTag('Y', '4', 0, 12)),
+ new PixelFormatTag(PixelFormat.Yuva444P12Be, mkTag(12, 0, '4', 'Y')),
+ new PixelFormatTag(PixelFormat.Yuva420P16Le, mkTag('Y', '4', 11, 16)),
+ new PixelFormatTag(PixelFormat.Yuva420P16Be, mkTag(16, 11, '4', 'Y')),
+ new PixelFormatTag(PixelFormat.Yuva422P16Le, mkTag('Y', '4', 10, 16)),
+ new PixelFormatTag(PixelFormat.Yuva422P16Be, mkTag(16, 10, '4', 'Y')),
+ new PixelFormatTag(PixelFormat.Yuva444P16Le, mkTag('Y', '4', 0, 16)),
+ new PixelFormatTag(PixelFormat.Yuva444P16Be, mkTag(16, 0, '4', 'Y')),
- new PixelFormatTag(PixelFormat.Gbrp, mkTag('G', '3', 00, 8)),
- new PixelFormatTag(PixelFormat.Gbrp9Le, mkTag('G', '3', 00, 9)),
- new PixelFormatTag(PixelFormat.Gbrp9Be, mkTag(9, 00, '3', 'G')),
- new PixelFormatTag(PixelFormat.Gbrp10Le, mkTag('G', '3', 00, 10)),
- new PixelFormatTag(PixelFormat.Gbrp10Be, mkTag(10, 00, '3', 'G')),
- new PixelFormatTag(PixelFormat.Gbrp12Le, mkTag('G', '3', 00, 12)),
- new PixelFormatTag(PixelFormat.Gbrp12Be, mkTag(12, 00, '3', 'G')),
- new PixelFormatTag(PixelFormat.Gbrp14Le, mkTag('G', '3', 00, 14)),
- new PixelFormatTag(PixelFormat.Gbrp14Be, mkTag(14, 00, '3', 'G')),
- new PixelFormatTag(PixelFormat.Gbrp16Le, mkTag('G', '3', 00, 16)),
- new PixelFormatTag(PixelFormat.Gbrp16Be, mkTag(16, 00, '3', 'G')),
+ new PixelFormatTag(PixelFormat.Gbrp, mkTag('G', '3', 00, 8)),
+ new PixelFormatTag(PixelFormat.Gbrp9Le, mkTag('G', '3', 00, 9)),
+ new PixelFormatTag(PixelFormat.Gbrp9Be, mkTag(9, 00, '3', 'G')),
+ new PixelFormatTag(PixelFormat.Gbrp10Le, mkTag('G', '3', 00, 10)),
+ new PixelFormatTag(PixelFormat.Gbrp10Be, mkTag(10, 00, '3', 'G')),
+ new PixelFormatTag(PixelFormat.Gbrp12Le, mkTag('G', '3', 00, 12)),
+ new PixelFormatTag(PixelFormat.Gbrp12Be, mkTag(12, 00, '3', 'G')),
+ new PixelFormatTag(PixelFormat.Gbrp14Le, mkTag('G', '3', 00, 14)),
+ new PixelFormatTag(PixelFormat.Gbrp14Be, mkTag(14, 00, '3', 'G')),
+ new PixelFormatTag(PixelFormat.Gbrp16Le, mkTag('G', '3', 00, 16)),
+ new PixelFormatTag(PixelFormat.Gbrp16Be, mkTag(16, 00, '3', 'G')),
- new PixelFormatTag(PixelFormat.Gbrap, mkTag('G', '4', 00, 8)),
- new PixelFormatTag(PixelFormat.Gbrap10Le, mkTag('G', '4', 00, 10)),
- new PixelFormatTag(PixelFormat.Gbrap10Be, mkTag(10, 00, '4', 'G')),
- new PixelFormatTag(PixelFormat.Gbrap12Le, mkTag('G', '4', 00, 12)),
- new PixelFormatTag(PixelFormat.Gbrap12Be, mkTag(12, 00, '4', 'G')),
- new PixelFormatTag(PixelFormat.Gbrap16Le, mkTag('G', '4', 00, 16)),
- new PixelFormatTag(PixelFormat.Gbrap16Be, mkTag(16, 00, '4', 'G')),
+ new PixelFormatTag(PixelFormat.Gbrap, mkTag('G', '4', 00, 8)),
+ new PixelFormatTag(PixelFormat.Gbrap10Le, mkTag('G', '4', 00, 10)),
+ new PixelFormatTag(PixelFormat.Gbrap10Be, mkTag(10, 00, '4', 'G')),
+ new PixelFormatTag(PixelFormat.Gbrap12Le, mkTag('G', '4', 00, 12)),
+ new PixelFormatTag(PixelFormat.Gbrap12Be, mkTag(12, 00, '4', 'G')),
+ new PixelFormatTag(PixelFormat.Gbrap16Le, mkTag('G', '4', 00, 16)),
+ new PixelFormatTag(PixelFormat.Gbrap16Be, mkTag(16, 00, '4', 'G')),
- new PixelFormatTag(PixelFormat.Xyz12Le, mkTag('X', 'Y', 'Z', 36)),
- new PixelFormatTag(PixelFormat.Xyz12Be, mkTag(36, 'Z', 'Y', 'X')),
+ new PixelFormatTag(PixelFormat.Xyz12Le, mkTag('X', 'Y', 'Z', 36)),
+ new PixelFormatTag(PixelFormat.Xyz12Be, mkTag(36, 'Z', 'Y', 'X')),
- new PixelFormatTag(PixelFormat.BayerBggr8, mkTag(0xBA, 'B', 'G', 8)),
- new PixelFormatTag(PixelFormat.BayerBggr16Le, mkTag(0xBA, 'B', 'G', 16)),
- new PixelFormatTag(PixelFormat.BayerBggr16Be, mkTag(16, 'G', 'B', 0xBA)),
- new PixelFormatTag(PixelFormat.BayerRggb8, mkTag(0xBA, 'R', 'G', 8)),
- new PixelFormatTag(PixelFormat.BayerRggb16Le, mkTag(0xBA, 'R', 'G', 16)),
- new PixelFormatTag(PixelFormat.BayerRggb16Be, mkTag(16, 'G', 'R', 0xBA)),
- new PixelFormatTag(PixelFormat.BayerGbrg8, mkTag(0xBA, 'G', 'B', 8)),
- new PixelFormatTag(PixelFormat.BayerGbrg16Le, mkTag(0xBA, 'G', 'B', 16)),
- new PixelFormatTag(PixelFormat.BayerGbrg16Be, mkTag(16, 'B', 'G', 0xBA)),
- new PixelFormatTag(PixelFormat.BayerGrbg8, mkTag(0xBA, 'G', 'R', 8)),
- new PixelFormatTag(PixelFormat.BayerGrbg16Le, mkTag(0xBA, 'G', 'R', 16)),
- new PixelFormatTag(PixelFormat.BayerGrbg16Be, mkTag(16, 'R', 'G', 0xBA)),
+ new PixelFormatTag(PixelFormat.BayerBggr8, mkTag(0xBA, 'B', 'G', 8)),
+ new PixelFormatTag(PixelFormat.BayerBggr16Le, mkTag(0xBA, 'B', 'G', 16)),
+ new PixelFormatTag(PixelFormat.BayerBggr16Be, mkTag(16, 'G', 'B', 0xBA)),
+ new PixelFormatTag(PixelFormat.BayerRggb8, mkTag(0xBA, 'R', 'G', 8)),
+ new PixelFormatTag(PixelFormat.BayerRggb16Le, mkTag(0xBA, 'R', 'G', 16)),
+ new PixelFormatTag(PixelFormat.BayerRggb16Be, mkTag(16, 'G', 'R', 0xBA)),
+ new PixelFormatTag(PixelFormat.BayerGbrg8, mkTag(0xBA, 'G', 'B', 8)),
+ new PixelFormatTag(PixelFormat.BayerGbrg16Le, mkTag(0xBA, 'G', 'B', 16)),
+ new PixelFormatTag(PixelFormat.BayerGbrg16Be, mkTag(16, 'B', 'G', 0xBA)),
+ new PixelFormatTag(PixelFormat.BayerGrbg8, mkTag(0xBA, 'G', 'R', 8)),
+ new PixelFormatTag(PixelFormat.BayerGrbg16Le, mkTag(0xBA, 'G', 'R', 16)),
+ new PixelFormatTag(PixelFormat.BayerGrbg16Be, mkTag(16, 'R', 'G', 0xBA)),
- // quicktime
- new PixelFormatTag(PixelFormat.Yuv420P, mkTag('R', '4', '2', '0')), // Radius DV YUV PAL
- new PixelFormatTag(PixelFormat.Yuv411P, mkTag('R', '4', '1', '1')), // Radius DV YUV NTSC
- new PixelFormatTag(PixelFormat.Uyvy422, mkTag('2', 'v', 'u', 'y')),
- new PixelFormatTag(PixelFormat.Uyvy422, mkTag('2', 'V', 'u', 'y')),
- new PixelFormatTag(PixelFormat.Uyvy422, mkTag('A', 'V', 'U', 'I')), // FIXME merge both fields
- new PixelFormatTag(PixelFormat.Uyvy422, mkTag('b', 'x', 'y', 'v')),
- new PixelFormatTag(PixelFormat.Yuyv422, mkTag('y', 'u', 'v', '2')),
- new PixelFormatTag(PixelFormat.Yuyv422, mkTag('y', 'u', 'v', 's')),
- new PixelFormatTag(PixelFormat.Yuyv422, mkTag('D', 'V', 'O', 'O')), // Digital Voodoo SD 8 Bit
- new PixelFormatTag(PixelFormat.Rgb555Le, mkTag('L', '5', '5', '5')),
- new PixelFormatTag(PixelFormat.Rgb565Le, mkTag('L', '5', '6', '5')),
- new PixelFormatTag(PixelFormat.Rgb565Be, mkTag('B', '5', '6', '5')),
- new PixelFormatTag(PixelFormat.Bgr24, mkTag('2', '4', 'B', 'G')),
- new PixelFormatTag(PixelFormat.Bgr24, mkTag('b', 'x', 'b', 'g')),
- new PixelFormatTag(PixelFormat.Bgra, mkTag('B', 'G', 'R', 'A')),
- new PixelFormatTag(PixelFormat.Rgba, mkTag('R', 'G', 'B', 'A')),
- new PixelFormatTag(PixelFormat.Rgb24, mkTag('b', 'x', 'r', 'g')),
- new PixelFormatTag(PixelFormat.Abgr, mkTag('A', 'B', 'G', 'R')),
- new PixelFormatTag(PixelFormat.Gray16Be, mkTag('b', '1', '6', 'g')),
- new PixelFormatTag(PixelFormat.Rgb48Be, mkTag('b', '4', '8', 'r')),
- new PixelFormatTag(PixelFormat.Rgba64Be, mkTag('b', '6', '4', 'a')),
- new PixelFormatTag(PixelFormat.BayerRggb16Be, mkTag('B', 'G', 'G', 'R')),
+ // quicktime
+ new PixelFormatTag(PixelFormat.Yuv420P, mkTag('R', '4', '2', '0')), // Radius DV YUV PAL
+ new PixelFormatTag(PixelFormat.Yuv411P, mkTag('R', '4', '1', '1')), // Radius DV YUV NTSC
+ new PixelFormatTag(PixelFormat.Uyvy422, mkTag('2', 'v', 'u', 'y')),
+ new PixelFormatTag(PixelFormat.Uyvy422, mkTag('2', 'V', 'u', 'y')),
+ new PixelFormatTag(PixelFormat.Uyvy422, mkTag('A', 'V', 'U', 'I')), // FIXME merge both fields
+ new PixelFormatTag(PixelFormat.Uyvy422, mkTag('b', 'x', 'y', 'v')),
+ new PixelFormatTag(PixelFormat.Yuyv422, mkTag('y', 'u', 'v', '2')),
+ new PixelFormatTag(PixelFormat.Yuyv422, mkTag('y', 'u', 'v', 's')),
+ new PixelFormatTag(PixelFormat.Yuyv422, mkTag('D', 'V', 'O', 'O')), // Digital Voodoo SD 8 Bit
+ new PixelFormatTag(PixelFormat.Rgb555Le, mkTag('L', '5', '5', '5')),
+ new PixelFormatTag(PixelFormat.Rgb565Le, mkTag('L', '5', '6', '5')),
+ new PixelFormatTag(PixelFormat.Rgb565Be, mkTag('B', '5', '6', '5')),
+ new PixelFormatTag(PixelFormat.Bgr24, mkTag('2', '4', 'B', 'G')),
+ new PixelFormatTag(PixelFormat.Bgr24, mkTag('b', 'x', 'b', 'g')),
+ new PixelFormatTag(PixelFormat.Bgra, mkTag('B', 'G', 'R', 'A')),
+ new PixelFormatTag(PixelFormat.Rgba, mkTag('R', 'G', 'B', 'A')),
+ new PixelFormatTag(PixelFormat.Rgb24, mkTag('b', 'x', 'r', 'g')),
+ new PixelFormatTag(PixelFormat.Abgr, mkTag('A', 'B', 'G', 'R')),
+ new PixelFormatTag(PixelFormat.Gray16Be, mkTag('b', '1', '6', 'g')),
+ new PixelFormatTag(PixelFormat.Rgb48Be, mkTag('b', '4', '8', 'r')),
+ new PixelFormatTag(PixelFormat.Rgba64Be, mkTag('b', '6', '4', 'a')),
+ new PixelFormatTag(PixelFormat.BayerRggb16Be, mkTag('B', 'G', 'G', 'R')),
- // vlc
- new PixelFormatTag(PixelFormat.Yuv410P, mkTag('I', '4', '1', '0')),
- new PixelFormatTag(PixelFormat.Yuv411P, mkTag('I', '4', '1', '1')),
- new PixelFormatTag(PixelFormat.Yuv422P, mkTag('I', '4', '2', '2')),
- new PixelFormatTag(PixelFormat.Yuv440P, mkTag('I', '4', '4', '0')),
- new PixelFormatTag(PixelFormat.Yuv444P, mkTag('I', '4', '4', '4')),
- new PixelFormatTag(PixelFormat.Yuvj420P, mkTag('J', '4', '2', '0')),
- new PixelFormatTag(PixelFormat.Yuvj422P, mkTag('J', '4', '2', '2')),
- new PixelFormatTag(PixelFormat.Yuvj440P, mkTag('J', '4', '4', '0')),
- new PixelFormatTag(PixelFormat.Yuvj444P, mkTag('J', '4', '4', '4')),
- new PixelFormatTag(PixelFormat.Yuva444P, mkTag('Y', 'U', 'V', 'A')),
- new PixelFormatTag(PixelFormat.Yuva420P, mkTag('I', '4', '0', 'A')),
- new PixelFormatTag(PixelFormat.Yuva422P, mkTag('I', '4', '2', 'A')),
- new PixelFormatTag(PixelFormat.Rgb8, mkTag('R', 'G', 'B', '2')),
- new PixelFormatTag(PixelFormat.Rgb555Le, mkTag('R', 'V', '1', '5')),
- new PixelFormatTag(PixelFormat.Rgb565Le, mkTag('R', 'V', '1', '6')),
- new PixelFormatTag(PixelFormat.Bgr24, mkTag('R', 'V', '2', '4')),
- new PixelFormatTag(PixelFormat.Bgrx, mkTag('R', 'V', '3', '2')),
- new PixelFormatTag(PixelFormat.Rgba, mkTag('A', 'V', '3', '2')),
- new PixelFormatTag(PixelFormat.Yuv420P9Le, mkTag('I', '0', '9', 'L')),
- new PixelFormatTag(PixelFormat.Yuv420P9Be, mkTag('I', '0', '9', 'B')),
- new PixelFormatTag(PixelFormat.Yuv422P9Le, mkTag('I', '2', '9', 'L')),
- new PixelFormatTag(PixelFormat.Yuv422P9Be, mkTag('I', '2', '9', 'B')),
- new PixelFormatTag(PixelFormat.Yuv444P9Le, mkTag('I', '4', '9', 'L')),
- new PixelFormatTag(PixelFormat.Yuv444P9Be, mkTag('I', '4', '9', 'B')),
- new PixelFormatTag(PixelFormat.Yuv420P10Le, mkTag('I', '0', 'A', 'L')),
- new PixelFormatTag(PixelFormat.Yuv420P10Be, mkTag('I', '0', 'A', 'B')),
- new PixelFormatTag(PixelFormat.Yuv422P10Le, mkTag('I', '2', 'A', 'L')),
- new PixelFormatTag(PixelFormat.Yuv422P10Be, mkTag('I', '2', 'A', 'B')),
- new PixelFormatTag(PixelFormat.Yuv444P10Le, mkTag('I', '4', 'A', 'L')),
- new PixelFormatTag(PixelFormat.Yuv444P10Be, mkTag('I', '4', 'A', 'B')),
- new PixelFormatTag(PixelFormat.Yuv420P12Le, mkTag('I', '0', 'C', 'L')),
- new PixelFormatTag(PixelFormat.Yuv420P12Be, mkTag('I', '0', 'C', 'B')),
- new PixelFormatTag(PixelFormat.Yuv422P12Le, mkTag('I', '2', 'C', 'L')),
- new PixelFormatTag(PixelFormat.Yuv422P12Be, mkTag('I', '2', 'C', 'B')),
- new PixelFormatTag(PixelFormat.Yuv444P12Le, mkTag('I', '4', 'C', 'L')),
- new PixelFormatTag(PixelFormat.Yuv444P12Be, mkTag('I', '4', 'C', 'B')),
- new PixelFormatTag(PixelFormat.Yuv420P16Le, mkTag('I', '0', 'F', 'L')),
- new PixelFormatTag(PixelFormat.Yuv420P16Be, mkTag('I', '0', 'F', 'B')),
- new PixelFormatTag(PixelFormat.Yuv444P16Le, mkTag('I', '4', 'F', 'L')),
- new PixelFormatTag(PixelFormat.Yuv444P16Be, mkTag('I', '4', 'F', 'B')),
+ // vlc
+ new PixelFormatTag(PixelFormat.Yuv410P, mkTag('I', '4', '1', '0')),
+ new PixelFormatTag(PixelFormat.Yuv411P, mkTag('I', '4', '1', '1')),
+ new PixelFormatTag(PixelFormat.Yuv422P, mkTag('I', '4', '2', '2')),
+ new PixelFormatTag(PixelFormat.Yuv440P, mkTag('I', '4', '4', '0')),
+ new PixelFormatTag(PixelFormat.Yuv444P, mkTag('I', '4', '4', '4')),
+ new PixelFormatTag(PixelFormat.Yuvj420P, mkTag('J', '4', '2', '0')),
+ new PixelFormatTag(PixelFormat.Yuvj422P, mkTag('J', '4', '2', '2')),
+ new PixelFormatTag(PixelFormat.Yuvj440P, mkTag('J', '4', '4', '0')),
+ new PixelFormatTag(PixelFormat.Yuvj444P, mkTag('J', '4', '4', '4')),
+ new PixelFormatTag(PixelFormat.Yuva444P, mkTag('Y', 'U', 'V', 'A')),
+ new PixelFormatTag(PixelFormat.Yuva420P, mkTag('I', '4', '0', 'A')),
+ new PixelFormatTag(PixelFormat.Yuva422P, mkTag('I', '4', '2', 'A')),
+ new PixelFormatTag(PixelFormat.Rgb8, mkTag('R', 'G', 'B', '2')),
+ new PixelFormatTag(PixelFormat.Rgb555Le, mkTag('R', 'V', '1', '5')),
+ new PixelFormatTag(PixelFormat.Rgb565Le, mkTag('R', 'V', '1', '6')),
+ new PixelFormatTag(PixelFormat.Bgr24, mkTag('R', 'V', '2', '4')),
+ new PixelFormatTag(PixelFormat.Bgrx, mkTag('R', 'V', '3', '2')),
+ new PixelFormatTag(PixelFormat.Rgba, mkTag('A', 'V', '3', '2')),
+ new PixelFormatTag(PixelFormat.Yuv420P9Le, mkTag('I', '0', '9', 'L')),
+ new PixelFormatTag(PixelFormat.Yuv420P9Be, mkTag('I', '0', '9', 'B')),
+ new PixelFormatTag(PixelFormat.Yuv422P9Le, mkTag('I', '2', '9', 'L')),
+ new PixelFormatTag(PixelFormat.Yuv422P9Be, mkTag('I', '2', '9', 'B')),
+ new PixelFormatTag(PixelFormat.Yuv444P9Le, mkTag('I', '4', '9', 'L')),
+ new PixelFormatTag(PixelFormat.Yuv444P9Be, mkTag('I', '4', '9', 'B')),
+ new PixelFormatTag(PixelFormat.Yuv420P10Le, mkTag('I', '0', 'A', 'L')),
+ new PixelFormatTag(PixelFormat.Yuv420P10Be, mkTag('I', '0', 'A', 'B')),
+ new PixelFormatTag(PixelFormat.Yuv422P10Le, mkTag('I', '2', 'A', 'L')),
+ new PixelFormatTag(PixelFormat.Yuv422P10Be, mkTag('I', '2', 'A', 'B')),
+ new PixelFormatTag(PixelFormat.Yuv444P10Le, mkTag('I', '4', 'A', 'L')),
+ new PixelFormatTag(PixelFormat.Yuv444P10Be, mkTag('I', '4', 'A', 'B')),
+ new PixelFormatTag(PixelFormat.Yuv420P12Le, mkTag('I', '0', 'C', 'L')),
+ new PixelFormatTag(PixelFormat.Yuv420P12Be, mkTag('I', '0', 'C', 'B')),
+ new PixelFormatTag(PixelFormat.Yuv422P12Le, mkTag('I', '2', 'C', 'L')),
+ new PixelFormatTag(PixelFormat.Yuv422P12Be, mkTag('I', '2', 'C', 'B')),
+ new PixelFormatTag(PixelFormat.Yuv444P12Le, mkTag('I', '4', 'C', 'L')),
+ new PixelFormatTag(PixelFormat.Yuv444P12Be, mkTag('I', '4', 'C', 'B')),
+ new PixelFormatTag(PixelFormat.Yuv420P16Le, mkTag('I', '0', 'F', 'L')),
+ new PixelFormatTag(PixelFormat.Yuv420P16Be, mkTag('I', '0', 'F', 'B')),
+ new PixelFormatTag(PixelFormat.Yuv444P16Le, mkTag('I', '4', 'F', 'L')),
+ new PixelFormatTag(PixelFormat.Yuv444P16Be, mkTag('I', '4', 'F', 'B')),
- // special
- new PixelFormatTag(PixelFormat.Rgb565Le, mkTag(3, 0, 0, 0)), // flipped RGB565LE
- new PixelFormatTag(PixelFormat.Yuv444P, mkTag('Y', 'V', '2', '4')), // YUV444P, swapped UV
+ // special
+ new PixelFormatTag(PixelFormat.Rgb565Le, mkTag(3, 0, 0, 0)), // flipped RGB565LE
+ new PixelFormatTag(PixelFormat.Yuv444P, mkTag('Y', 'V', '2', '4')), // YUV444P, swapped UV
- new PixelFormatTag(PixelFormat.None, 0),
- };
+ new PixelFormatTag(PixelFormat.None, 0),
+ };
#pragma warning restore CS0618
- ///
- /// https://github.com/FFmpeg/FFmpeg/blob/a64e250680fbc7296eff714b81b54b1c0e2d185f/libavutil/macros.h#L55
- ///
- private static int mkTag(int a, int b, int c, int d) => a | (b << 8) | (c << 16) | (d << 24);
+ ///
+ /// https://github.com/FFmpeg/FFmpeg/blob/a64e250680fbc7296eff714b81b54b1c0e2d185f/libavutil/macros.h#L55
+ ///
+ private static int mkTag(int a, int b, int c, int d) => a | (b << 8) | (c << 16) | (d << 24);
- ///
- /// https://github.com/FFmpeg/FFmpeg/blob/a64e250680fbc7296eff714b81b54b1c0e2d185f/libavcodec/raw.c#L341-L369
- ///
- public static PixelFormat FindRawPixelFormat(int fourcc)
+ ///
+ /// https://github.com/FFmpeg/FFmpeg/blob/a64e250680fbc7296eff714b81b54b1c0e2d185f/libavcodec/raw.c#L341-L369
+ ///
+ public static PixelFormat FindRawPixelFormat(int fourcc)
+ {
+ foreach (PixelFormatTag tag in RawPixelFormatTags)
{
- foreach (PixelFormatTag tag in RawPixelFormatTags)
- {
- if (tag.FourCC == fourcc)
- return tag.PixelFormat;
- }
-
- return PixelFormat.None;
+ if (tag.FourCC == fourcc)
+ return tag.PixelFormat;
}
+
+ return PixelFormat.None;
}
}
diff --git a/SeeShark/Utils/V4l2Utils.cs b/SeeShark/Utils/V4l2Utils.cs
index 19a9811..4839734 100644
--- a/SeeShark/Utils/V4l2Utils.cs
+++ b/SeeShark/Utils/V4l2Utils.cs
@@ -10,124 +10,123 @@
using SeeShark.Device;
using SeeShark.Interop.Libc;
-namespace SeeShark.Utils
+namespace SeeShark.Utils;
+
+internal static class V4l2Utils
{
- internal static class V4l2Utils
+ public static void FillDeviceOptions(CameraInfo[] devices)
{
- public static void FillDeviceOptions(CameraInfo[] devices)
- {
- foreach (CameraInfo device in devices)
- device.AvailableVideoInputOptions = getAvailableOptions(device).ToArray();
- }
+ foreach (CameraInfo device in devices)
+ device.AvailableVideoInputOptions = getAvailableOptions(device).ToArray();
+ }
+
+ ///
+ /// Get available video input options of a V4l2 device.
+ /// Inspired from https://github.com/ZhangGaoxing/v4l2.net
+ ///
+ private unsafe static List getAvailableOptions(CameraInfo device)
+ {
+ List options = new List();
+
+ int deviceFd = Libc.open(device.Path, FileOpenFlags.O_RDWR);
+ if (deviceFd < 0)
+ throw new IOException($"Error {Marshal.GetLastWin32Error()}: Can not open video device {device}");
- ///
- /// Get available video input options of a V4l2 device.
- /// Inspired from https://github.com/ZhangGaoxing/v4l2.net
- ///
- private unsafe static List getAvailableOptions(CameraInfo device)
+ v4l2_fmtdesc fmtdesc = new v4l2_fmtdesc
{
- List options = new List();
+ index = 0,
+ type = v4l2_buf_type.V4L2_BUF_TYPE_VIDEO_CAPTURE
+ };
- int deviceFd = Libc.open(device.Path, FileOpenFlags.O_RDWR);
- if (deviceFd < 0)
- throw new IOException($"Error {Marshal.GetLastWin32Error()}: Can not open video device {device}");
+ List supportedInputFormats = new List();
+ while (v4l2Struct(deviceFd, VideoSettings.VIDIOC_ENUM_FMT, ref fmtdesc) != -1)
+ {
+ supportedInputFormats.Add(fmtdesc.pixelformat);
+ fmtdesc.index++;
+ }
- v4l2_fmtdesc fmtdesc = new v4l2_fmtdesc
+ foreach (V4l2InputFormat inputFormat in supportedInputFormats)
+ {
+ v4l2_frmsizeenum frmsize = new v4l2_frmsizeenum
{
index = 0,
- type = v4l2_buf_type.V4L2_BUF_TYPE_VIDEO_CAPTURE
+ pixel_format = inputFormat,
};
- List supportedInputFormats = new List();
- while (v4l2Struct(deviceFd, VideoSettings.VIDIOC_ENUM_FMT, ref fmtdesc) != -1)
- {
- supportedInputFormats.Add(fmtdesc.pixelformat);
- fmtdesc.index++;
- }
-
- foreach (V4l2InputFormat inputFormat in supportedInputFormats)
+ while (v4l2Struct(deviceFd, VideoSettings.VIDIOC_ENUM_FRAMESIZES, ref frmsize) != -1)
{
- v4l2_frmsizeenum frmsize = new v4l2_frmsizeenum
+ if (frmsize.type == v4l2_frmsizetypes.V4L2_FRMSIZE_TYPE_DISCRETE)
{
- index = 0,
- pixel_format = inputFormat,
- };
-
- while (v4l2Struct(deviceFd, VideoSettings.VIDIOC_ENUM_FRAMESIZES, ref frmsize) != -1)
+ fillFrameIntervalOptions(options, deviceFd, inputFormat, frmsize.discrete.width, frmsize.discrete.height);
+ }
+ else
{
- if (frmsize.type == v4l2_frmsizetypes.V4L2_FRMSIZE_TYPE_DISCRETE)
+ for (uint width = frmsize.stepwise.min_width; width < frmsize.stepwise.max_width; width += frmsize.stepwise.step_width)
{
- fillFrameIntervalOptions(options, deviceFd, inputFormat, frmsize.discrete.width, frmsize.discrete.height);
+ for (uint height = frmsize.stepwise.min_height; height < frmsize.stepwise.max_height; height += frmsize.stepwise.step_height)
+ fillFrameIntervalOptions(options, deviceFd, inputFormat, width, height);
}
- else
- {
- for (uint width = frmsize.stepwise.min_width; width < frmsize.stepwise.max_width; width += frmsize.stepwise.step_width)
- {
- for (uint height = frmsize.stepwise.min_height; height < frmsize.stepwise.max_height; height += frmsize.stepwise.step_height)
- fillFrameIntervalOptions(options, deviceFd, inputFormat, width, height);
- }
- }
- frmsize.index++;
}
+ frmsize.index++;
}
-
- Libc.close(deviceFd);
- return options;
}
- private static void fillFrameIntervalOptions(List options, int deviceFd, V4l2InputFormat pixelFormat, uint width, uint height)
+ Libc.close(deviceFd);
+ return options;
+ }
+
+ private static void fillFrameIntervalOptions(List options, int deviceFd, V4l2InputFormat pixelFormat, uint width, uint height)
+ {
+ v4l2_frmivalenum frmival = new v4l2_frmivalenum
{
- v4l2_frmivalenum frmival = new v4l2_frmivalenum
- {
- index = 0,
- pixel_format = pixelFormat,
- width = width,
- height = height,
- };
+ index = 0,
+ pixel_format = pixelFormat,
+ width = width,
+ height = height,
+ };
- while (v4l2Struct(deviceFd, VideoSettings.VIDIOC_ENUM_FRAMEINTERVALS, ref frmival) != -1)
+ while (v4l2Struct(deviceFd, VideoSettings.VIDIOC_ENUM_FRAMEINTERVALS, ref frmival) != -1)
+ {
+ if (frmival.type == v4l2_frmivaltypes.V4L2_FRMIVAL_TYPE_DISCRETE)
{
- if (frmival.type == v4l2_frmivaltypes.V4L2_FRMIVAL_TYPE_DISCRETE)
+ options.Add(new VideoInputOptions
{
- options.Add(new VideoInputOptions
+ InputFormat = pixelFormat.ToString(),
+ VideoSize = ((int)width, (int)height),
+ Framerate = new AVRational
{
- InputFormat = pixelFormat.ToString(),
- VideoSize = ((int)width, (int)height),
- Framerate = new AVRational
- {
- num = (int)frmival.discrete.denominator,
- den = (int)frmival.discrete.numerator,
- },
- });
- }
- frmival.index++;
+ num = (int)frmival.discrete.denominator,
+ den = (int)frmival.discrete.numerator,
+ },
+ });
}
+ frmival.index++;
}
+ }
- ///
- /// Get and set v4l2 struct.
- ///
- /// V4L2 struct
- /// V4L2 request value
- /// The struct need to be read or set
- /// The ioctl result
- private static unsafe int v4l2Struct(int deviceFd, VideoSettings request, ref T @struct)
- where T : struct
- {
- IntPtr ptr = Marshal.AllocHGlobal(Marshal.SizeOf(@struct));
- Marshal.StructureToPtr(@struct, ptr, true);
+ ///
+ /// Get and set v4l2 struct.
+ ///
+ /// V4L2 struct
+ /// V4L2 request value
+ /// The struct need to be read or set
+ /// The ioctl result
+ private static unsafe int v4l2Struct(int deviceFd, VideoSettings request, ref T @struct)
+ where T : struct
+ {
+ IntPtr ptr = Marshal.AllocHGlobal(Marshal.SizeOf(@struct));
+ Marshal.StructureToPtr(@struct, ptr, true);
- int result = Libc.ioctl(deviceFd, (int)request, ptr);
- // if (result < 0)
- // {
- // int errno = Marshal.GetLastPInvokeError();
- // Console.Error.WriteLine($"Error: {errno}");
- // sbyte* explanation = Libc.explain_errno_ioctl(errno, deviceFd, (int)request, ptr);
- // Console.Error.WriteLine($"- {new string(explanation)}");
- // }
+ int result = Libc.ioctl(deviceFd, (int)request, ptr);
+ // if (result < 0)
+ // {
+ // int errno = Marshal.GetLastPInvokeError();
+ // Console.Error.WriteLine($"Error: {errno}");
+ // sbyte* explanation = Libc.explain_errno_ioctl(errno, deviceFd, (int)request, ptr);
+ // Console.Error.WriteLine($"- {new string(explanation)}");
+ // }
- @struct = Marshal.PtrToStructure(ptr);
- return result;
- }
+ @struct = Marshal.PtrToStructure(ptr);
+ return result;
}
}
diff --git a/SeeShark/VideoInputOptions.cs b/SeeShark/VideoInputOptions.cs
index 9b5598f..3ccacf8 100644
--- a/SeeShark/VideoInputOptions.cs
+++ b/SeeShark/VideoInputOptions.cs
@@ -6,117 +6,116 @@
using FFmpeg.AutoGen;
using SeeShark.Device;
-namespace SeeShark
+namespace SeeShark;
+
+///
+/// Options to give to a for it to feed them to FFmpeg when opening a video input stream.
+/// We can indeed open an input in different ways. For example, you might want to open your camera at a different resolution, or change the input format.
+///
+///
+/// Some examples of input options are:
+///
+/// - https://ffmpeg.org/ffmpeg-devices.html#video4linux2_002c-v4l2
+/// - https://ffmpeg.org/ffmpeg-devices.html#dshow
+/// - https://ffmpeg.org/ffmpeg-devices.html#avfoundation
+///
+///
+public class VideoInputOptions
{
///
- /// Options to give to a for it to feed them to FFmpeg when opening a video input stream.
- /// We can indeed open an input in different ways. For example, you might want to open your camera at a different resolution, or change the input format.
+ /// To request a specific resolution of the video stream.
///
///
- /// Some examples of input options are:
- ///
- /// - https://ffmpeg.org/ffmpeg-devices.html#video4linux2_002c-v4l2
- /// - https://ffmpeg.org/ffmpeg-devices.html#dshow
- /// - https://ffmpeg.org/ffmpeg-devices.html#avfoundation
- ///
+ /// The underlying driver will change it back to a compatible resolution.
///
- public class VideoInputOptions
- {
- ///
- /// To request a specific resolution of the video stream.
- ///
- ///
- /// The underlying driver will change it back to a compatible resolution.
- ///
- /// (width, height)
- public (int, int)? VideoSize { get; set; }
+ /// (width, height)
+ public (int, int)? VideoSize { get; set; }
- ///
- /// To request the capture to start from a specific point
- ///
- /// (x, y)
- public (int, int)? VideoPosition { get; set; }
- ///
- /// To request a specific framerate for the video stream.
- ///
- ///
- /// The underlying driver will change it back to a compatible framerate.
- ///
- public AVRational? Framerate { get; set; }
- ///
- /// To request a specific input format for the video stream.
- /// If the video stream is raw, it is the name of its pixel format, otherwise it is the name of its codec.
- ///
- public string? InputFormat { get; set; }
- ///
- /// Used on Windows only - tells us if the video stream is raw or not.
- /// If the video stream is raw, it is a pixel format, otherwise it is a codec.
- ///
- public bool IsRaw { get; set; }
+ ///
+ /// To request the capture to start from a specific point
+ ///
+ /// (x, y)
+ public (int, int)? VideoPosition { get; set; }
+ ///
+ /// To request a specific framerate for the video stream.
+ ///
+ ///
+ /// The underlying driver will change it back to a compatible framerate.
+ ///
+ public AVRational? Framerate { get; set; }
+ ///
+ /// To request a specific input format for the video stream.
+ /// If the video stream is raw, it is the name of its pixel format, otherwise it is the name of its codec.
+ ///
+ public string? InputFormat { get; set; }
+ ///
+ /// Used on Windows only - tells us if the video stream is raw or not.
+ /// If the video stream is raw, it is a pixel format, otherwise it is a codec.
+ ///
+ public bool IsRaw { get; set; }
+
+ ///
+ /// Combines all properties into a dictionary of options that FFmpeg can use.
+ ///
+ public virtual IDictionary ToAVDictOptions(DeviceInputFormat deviceFormat)
+ {
+ Dictionary dict = new();
- ///
- /// Combines all properties into a dictionary of options that FFmpeg can use.
- ///
- public virtual IDictionary ToAVDictOptions(DeviceInputFormat deviceFormat)
+ if (VideoSize != null)
{
- Dictionary dict = new();
+ (int width, int height) = VideoSize.Value;
+ dict.Add("video_size", $"{width}x{height}");
+ }
- if (VideoSize != null)
- {
- (int width, int height) = VideoSize.Value;
- dict.Add("video_size", $"{width}x{height}");
- }
+ if (Framerate != null)
+ dict.Add("framerate", $"{Framerate.Value.num}/{Framerate.Value.den}");
- if (Framerate != null)
- dict.Add("framerate", $"{Framerate.Value.num}/{Framerate.Value.den}");
+ if (InputFormat != null)
+ {
+ string key = "input_format";
+ if (deviceFormat == DeviceInputFormat.DShow)
+ key = IsRaw ? "pixel_format" : "vcodec";
- if (InputFormat != null)
+ // I have no idea why there is an inconsistency but here we are...
+ string inputFormat = InputFormat switch
{
- string key = "input_format";
- if (deviceFormat == DeviceInputFormat.DShow)
- key = IsRaw ? "pixel_format" : "vcodec";
-
- // I have no idea why there is an inconsistency but here we are...
- string inputFormat = InputFormat switch
- {
- "YUYV" => "yuv422p",
- "YUV420" => "yuv420p",
- _ => InputFormat.ToLower(),
- };
- dict.Add(key, inputFormat);
- }
+ "YUYV" => "yuv422p",
+ "YUV420" => "yuv420p",
+ _ => InputFormat.ToLower(),
+ };
+ dict.Add(key, inputFormat);
+ }
- if (VideoPosition != null)
+ if (VideoPosition != null)
+ {
+ switch (deviceFormat)
{
- switch (deviceFormat)
- {
- case DeviceInputFormat.X11Grab:
- {
- dict.Add("grab_x", VideoPosition.Value.Item1.ToString());
- dict.Add("grab_y", VideoPosition.Value.Item2.ToString());
- break;
- }
- case DeviceInputFormat.GdiGrab:
- {
- dict.Add("offset_x", VideoPosition.Value.Item1.ToString());
- dict.Add("offset_y", VideoPosition.Value.Item2.ToString());
- break;
- }
- }
+ case DeviceInputFormat.X11Grab:
+ {
+ dict.Add("grab_x", VideoPosition.Value.Item1.ToString());
+ dict.Add("grab_y", VideoPosition.Value.Item2.ToString());
+ break;
+ }
+ case DeviceInputFormat.GdiGrab:
+ {
+ dict.Add("offset_x", VideoPosition.Value.Item1.ToString());
+ dict.Add("offset_y", VideoPosition.Value.Item2.ToString());
+ break;
+ }
}
-
- return dict;
}
- public override string ToString()
+ return dict;
+ }
+
+ public override string ToString()
+ {
+ string s = $"{InputFormat} {VideoSize}";
+ if (Framerate != null)
{
- string s = $"{InputFormat} {VideoSize}";
- if (Framerate != null)
- {
- double fps = ffmpeg.av_q2d(Framerate.Value);
- s += $" - {fps:0.000} fps";
- }
- return s;
+ double fps = ffmpeg.av_q2d(Framerate.Value);
+ s += $" - {fps:0.000} fps";
}
+ return s;
}
}