diff --git a/samples/FFmpegView.AvaloniaDemo/App.axaml b/samples/FFmpegView.AvaloniaDemo/App.axaml
index 7cf6bcd..307e392 100644
--- a/samples/FFmpegView.AvaloniaDemo/App.axaml
+++ b/samples/FFmpegView.AvaloniaDemo/App.axaml
@@ -1,7 +1,25 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/samples/FFmpegView.AvaloniaDemo/App.axaml.cs b/samples/FFmpegView.AvaloniaDemo/App.axaml.cs
index 9c91187..890e219 100644
--- a/samples/FFmpegView.AvaloniaDemo/App.axaml.cs
+++ b/samples/FFmpegView.AvaloniaDemo/App.axaml.cs
@@ -1,7 +1,9 @@
using Avalonia;
+using Avalonia.Controls;
using Avalonia.Controls.ApplicationLifetimes;
using Avalonia.Markup.Xaml;
using Avalonia.Media;
+using FFmpegView.AvaloniaDemo.Models;
using FFmpegView.Bass;
using System;
@@ -14,14 +16,42 @@ public override void Initialize()
AvaloniaXamlLoader.Load(this);
BassCore.Initialize();
}
+
public override void OnFrameworkInitializationCompleted()
{
if (ApplicationLifetime is IClassicDesktopStyleApplicationLifetime desktop)
- desktop.MainWindow = new MainWindow();
+ {
+ desktop.MainWindow = new MainWindow()
+ {
+ DataContext = new MainWindowViewModel()
+ };
+ }
+
Console.WriteLine(FontManager.Current.DefaultFontFamilyName);
Console.WriteLine(FontManager.Current.PlatformImpl.GetDefaultFontFamilyName());
Console.WriteLine(string.Join(';', FontManager.Current.PlatformImpl.GetInstalledFontFamilyNames()));
base.OnFrameworkInitializationCompleted();
}
+
+ public static Window? MainWindow
+ {
+ get
+ {
+ if (Application.Current?.ApplicationLifetime is IClassicDesktopStyleApplicationLifetime desktop)
+ {
+ return desktop.MainWindow;
+ }
+
+ return null;
+ }
+ }
+
+ public static MainWindowViewModel? ViewModel
+ {
+ get
+ {
+ return MainWindow?.DataContext as MainWindowViewModel;
+ }
+ }
}
}
\ No newline at end of file
diff --git a/samples/FFmpegView.AvaloniaDemo/FFmpegView.AvaloniaDemo.csproj b/samples/FFmpegView.AvaloniaDemo/FFmpegView.AvaloniaDemo.csproj
index 4f0b769..cd93c11 100644
--- a/samples/FFmpegView.AvaloniaDemo/FFmpegView.AvaloniaDemo.csproj
+++ b/samples/FFmpegView.AvaloniaDemo/FFmpegView.AvaloniaDemo.csproj
@@ -4,10 +4,16 @@
netcoreapp3.1
+
-
+
+
+
+ MSBuild:Compile
+
+
\ No newline at end of file
diff --git a/samples/FFmpegView.AvaloniaDemo/MainWindow.axaml b/samples/FFmpegView.AvaloniaDemo/MainWindow.axaml
index 5c3206a..0ce53a6 100644
--- a/samples/FFmpegView.AvaloniaDemo/MainWindow.axaml
+++ b/samples/FFmpegView.AvaloniaDemo/MainWindow.axaml
@@ -2,8 +2,87 @@
xmlns:x="http://schemas.microsoft.com/winfx/2006/xaml"
xmlns:d="http://schemas.microsoft.com/expression/blend/2008"
xmlns:mc="http://schemas.openxmlformats.org/markup-compatibility/2006"
+ xmlns:vm="using:FFmpegView.AvaloniaDemo.Models"
mc:Ignorable="d" d:DesignWidth="400" d:DesignHeight="250"
x:Class="FFmpegView.AvaloniaDemo.MainWindow"
- Title="FFmpegView.AvaloniaDemo">
-
+ xmlns:player="clr-namespace:FFmpegView;assembly=FFmpegView.Avalonia"
+ Title="FFmpegView.AvaloniaDemo"
+ ShowInTaskbar="True"
+ WindowState="Normal"
+ ExtendClientAreaToDecorationsHint="True"
+ Background="#0E1621">
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/samples/FFmpegView.AvaloniaDemo/MainWindow.axaml.cs b/samples/FFmpegView.AvaloniaDemo/MainWindow.axaml.cs
index bdfc808..6806a39 100644
--- a/samples/FFmpegView.AvaloniaDemo/MainWindow.axaml.cs
+++ b/samples/FFmpegView.AvaloniaDemo/MainWindow.axaml.cs
@@ -1,27 +1,103 @@
using Avalonia.Controls;
+using Avalonia.Interactivity;
using Avalonia.Markup.Xaml;
+using FFmpegView.Avalonia;
using FFmpegView.Bass;
-using System.Collections.Generic;
+using System;
+using System.IO;
namespace FFmpegView.AvaloniaDemo
{
- public class MainWindow : Window
+ public partial class MainWindow : Window
{
+ private FFmpegView Media;
+ private string source;
+
public MainWindow()
{
AvaloniaXamlLoader.Load(this);
InitializeComponent();
+ DataContext = App.ViewModel;
+
+ var media = this.FindControl("Media");
+ media.SetAudioHandler(new BassAudioStreamDecoder());
+ media.PositionChanged += OnMediaPositionChanged;
}
+
private void InitializeComponent()
{
Width = 800;
Height = 600;
+ }
+
+ private void OnMediaPositionChanged(object sender, PositionChangedEventArgs e)
+ {
+ var media = sender as FFmpegView;
+
+ if (!media.IsOpen)
+ {
+ return;
+ }
+
+ if (media.State == MediaState.IsSeeking)
+ return;
+
+ App.ViewModel.PositionTime = media.Position;
+ App.ViewModel.Position = media.Position?.TotalSeconds;
+
+ App.ViewModel.PlaybackStartTime = media.PlaybackStartTime?.TotalSeconds ?? 0;
+ App.ViewModel.PlaybackEndTime = media.PlaybackEndTime?.TotalSeconds ?? 0;
+ }
+
+ private void OnPlayClick(object? sender, RoutedEventArgs e)
+ {
+ if (Design.IsDesignMode)
+ return;
+
+
+ App.ViewModel?.MediaElement?.Play();
+ }
+
+ private async void OnPauseClick(object sender, RoutedEventArgs e)
+ {
+ if (Design.IsDesignMode)
+ return;
+
+ App.ViewModel?.MediaElement?.Pause();
+ }
+
+ private void OnStopClick(object sender, RoutedEventArgs e)
+ {
+ if (Design.IsDesignMode)
+ return;
+
+ App.ViewModel.MediaElement.Position = TimeSpan.Zero;
+ }
+
+ private async void OnOpenFileClick(object? sender, RoutedEventArgs e)
+ {
+ if (Design.IsDesignMode)
+ return;
+
+ OpenFileDialog openFileDialog = new OpenFileDialog();
+ openFileDialog.AllowMultiple = false;
+
+ var result = await openFileDialog.ShowAsync(this);
+
+ if (result != null)
+ {
+ foreach (string filePath in result)
+ {
+ if (File.Exists(filePath))
+ {
+ source = filePath;
- var playerView = this.FindControl("playerView");
- var audioStreamDecoder = new BassAudioStreamDecoder();
- audioStreamDecoder.Headers = new Dictionary { { "User-Agent", "ffmpeg_demo" } };
- playerView.SetAudioHandler(audioStreamDecoder);
- playerView.Play("http://vfx.mtime.cn/Video/2019/02/04/mp4/190204084208765161.mp4");
+ App.ViewModel?.MediaElement?.Play(filePath);
+ System.Threading.Thread.Sleep(10);
+ App.ViewModel?.MediaElement?.Pause();
+ }
+ }
+ }
}
}
}
\ No newline at end of file
diff --git a/samples/FFmpegView.AvaloniaDemo/Models/MainWindowViewModel.cs b/samples/FFmpegView.AvaloniaDemo/Models/MainWindowViewModel.cs
new file mode 100644
index 0000000..8ec5211
--- /dev/null
+++ b/samples/FFmpegView.AvaloniaDemo/Models/MainWindowViewModel.cs
@@ -0,0 +1,89 @@
+using Avalonia.Controls;
+using FFmpegView.Bass;
+using System;
+using System.Collections.Generic;
+using System.Text;
+
+namespace FFmpegView.AvaloniaDemo.Models
+{
+ public class MainWindowViewModel: ViewModelBase
+ {
+ private FFmpegView mediaElement;
+ private double? position = 0;
+ private double positionStep = 0;
+ private TimeSpan? positionTime = TimeSpan.Zero;
+ private double playbackStartTime = 0.05;
+ private double playbackEndTime = 0.1;
+ private bool seekBarVisible = false;
+
+ public FFmpegView? MediaElement
+ {
+ get
+ {
+ if (mediaElement == null)
+ {
+ FFmpegView? media = App.MainWindow.FindControl("Media");
+ mediaElement = media;
+ }
+
+ return mediaElement;
+ }
+ }
+
+ public TimeSpan? PositionTime
+ {
+ get => positionTime;
+ set
+ {
+ positionTime = value;
+ NotifyPropertyChanged(nameof(PositionTime));
+ }
+ }
+
+ public double? Position
+ {
+ get => position;
+ set
+ {
+ position = value;
+ NotifyPropertyChanged(nameof(Position));
+ }
+ }
+
+ public double PositionStep
+ {
+ get => positionStep;
+ set
+ {
+ positionStep = value;
+ NotifyPropertyChanged(nameof(PositionStep));
+ }
+ }
+
+ public double PlaybackStartTime
+ {
+ get => playbackStartTime;
+ set
+ {
+ playbackStartTime = value;
+ NotifyPropertyChanged(nameof(PlaybackStartTime));
+ }
+ }
+
+ public double PlaybackEndTime
+ {
+ get => playbackEndTime;
+ set
+ {
+ playbackEndTime = value;
+ NotifyPropertyChanged(nameof(PlaybackEndTime));
+ }
+ }
+
+ public bool SeekBarVisible
+ {
+ get => seekBarVisible;
+ set => SetProperty(ref seekBarVisible, value);
+ }
+ }
+}
diff --git a/samples/FFmpegView.AvaloniaDemo/Models/ViewModelBase.cs b/samples/FFmpegView.AvaloniaDemo/Models/ViewModelBase.cs
new file mode 100644
index 0000000..8079939
--- /dev/null
+++ b/samples/FFmpegView.AvaloniaDemo/Models/ViewModelBase.cs
@@ -0,0 +1,99 @@
+using System;
+using System.Collections.Concurrent;
+using System.Collections.Generic;
+using System.ComponentModel;
+using System.Runtime.CompilerServices;
+using System.Threading.Tasks;
+using System.Linq;
+using ReactiveUI;
+
+namespace FFmpegView.AvaloniaDemo.Models
+{
+ public class ViewModelBase : ReactiveObject, INotifyPropertyChanged
+ {
+ public event PropertyChangedEventHandler PropertyChanged;
+
+ private readonly ConcurrentDictionary QueuedNotifications = new ConcurrentDictionary();
+ private readonly bool UseDeferredNotifications;
+
+ protected ViewModelBase() : this(false)
+ {
+ }
+
+ protected ViewModelBase(bool useDeferredNotifications)
+ {
+ UseDeferredNotifications = useDeferredNotifications;
+ }
+
+ /// Checks if a property already matches a desired value. Sets the property and
+ /// notifies listeners only when necessary.
+ /// Type of the property.
+ /// Reference to a property with both getter and setter.
+ /// Desired value for the property.
+ /// Name of the property used to notify listeners. This
+ /// value is optional and can be provided automatically when invoked from compilers that
+ /// support CallerMemberName.
+ /// An rray of property names to notify in addition to notifying the changes on the current property name.
+ /// True if the value was changed, false if the existing value matched the
+ /// desired value.
+ protected bool SetProperty(ref T storage, T value, [CallerMemberName] string propertyName = "", string[] notifyAlso = null)
+ {
+ if (EqualityComparer.Default.Equals(storage, value))
+ return false;
+
+ storage = value;
+ NotifyPropertyChanged(propertyName, notifyAlso);
+ return true;
+ }
+
+ protected void NotifyPropertyChanged(params string[] propertyNames) => NotifyPropertyChanged(null, propertyNames);
+
+ private void NotifyPropertyChanged(string mainProperty, string[] auxiliaryProperties)
+ {
+ // Queue property notification
+ if (string.IsNullOrWhiteSpace(mainProperty) == false)
+ QueuedNotifications[mainProperty] = true;
+
+ // Set the state for notification properties
+ if (auxiliaryProperties != null)
+ {
+ foreach (var property in auxiliaryProperties)
+ {
+ if (string.IsNullOrWhiteSpace(property) == false)
+ QueuedNotifications[property] = true;
+ }
+ }
+
+ // Depending on operation mode, either fire the notifications in the background
+ // or fire them immediately
+ if (UseDeferredNotifications)
+ Task.Run(NotifyQueuedProperties);
+ else
+ NotifyQueuedProperties();
+ }
+
+ private void NotifyQueuedProperties()
+ {
+ // get a snapshot of property names.
+ var propertyNames = QueuedNotifications.Keys.ToArray();
+
+ // Iterate through the properties
+ foreach (var property in propertyNames)
+ {
+ // don't notify if we don't have a change
+ if (!QueuedNotifications[property]) continue;
+
+ // notify and reset queued state to false
+ try { OnPropertyChanged(property); }
+ finally { QueuedNotifications[property] = false; }
+ }
+ }
+
+ ///
+ /// Called when a property changes its backing value.
+ ///
+ /// Name of the property.
+ private void OnPropertyChanged(string propertyName) =>
+ PropertyChanged?.Invoke(this, new PropertyChangedEventArgs(propertyName ?? string.Empty));
+ }
+}
diff --git a/samples/FFmpegView.AvaloniaDemo/Styles/Icons.axaml b/samples/FFmpegView.AvaloniaDemo/Styles/Icons.axaml
new file mode 100644
index 0000000..9b48e79
--- /dev/null
+++ b/samples/FFmpegView.AvaloniaDemo/Styles/Icons.axaml
@@ -0,0 +1,19 @@
+
+
+
+
+
+
+
+
+
+
diff --git a/samples/FFmpegView.AvaloniaDemo/ViewLocator.cs b/samples/FFmpegView.AvaloniaDemo/ViewLocator.cs
new file mode 100644
index 0000000..efd76b0
--- /dev/null
+++ b/samples/FFmpegView.AvaloniaDemo/ViewLocator.cs
@@ -0,0 +1,30 @@
+using System;
+using Avalonia.Controls;
+using Avalonia.Controls.Templates;
+using FFmpegView.AvaloniaDemo.Models;
+
+namespace FFmpegView
+{
+ public class ViewLocator : IDataTemplate
+ {
+ public IControl Build(object data)
+ {
+ var name = data.GetType().FullName!.Replace("ViewModel", "View");
+ var type = Type.GetType(name);
+
+ if (type != null)
+ {
+ return (Control)Activator.CreateInstance(type)!;
+ }
+ else
+ {
+ return new TextBlock { Text = "Not Found: " + name };
+ }
+ }
+
+ public bool Match(object data)
+ {
+ return data is ViewModelBase;
+ }
+ }
+}
diff --git a/src/FFmpegView.Avalonia/FFmpegView.cs b/src/FFmpegView.Avalonia/FFmpegView.cs
index 78f6e0c..e555597 100644
--- a/src/FFmpegView.Avalonia/FFmpegView.cs
+++ b/src/FFmpegView.Avalonia/FFmpegView.cs
@@ -2,15 +2,19 @@
using Avalonia.Controls;
using Avalonia.Controls.Metadata;
using Avalonia.Controls.Primitives;
+using Avalonia.Data;
using Avalonia.Logging;
using Avalonia.LogicalTree;
using Avalonia.Media;
using Avalonia.Media.Imaging;
using Avalonia.Platform;
using Avalonia.Threading;
+using FFmpegView.Avalonia;
using PCLUntils.Objects;
using System;
using System.Collections.Generic;
+using System.ComponentModel;
+using System.Runtime.CompilerServices;
using System.Threading;
using System.Threading.Tasks;
@@ -25,6 +29,7 @@ public unsafe class FFmpegView : TemplatedControl, IFFmpegView
private Task audioTask;
private Bitmap bitmap;
private bool _isAttached = false;
+ private bool _isRunning = true;
private readonly bool isInit = false;
private AudioStreamDecoder audio;
private readonly TimeSpan timeout;
@@ -32,6 +37,82 @@ public unsafe class FFmpegView : TemplatedControl, IFFmpegView
private CancellationTokenSource cancellationToken;
public static readonly StyledProperty StretchProperty =
AvaloniaProperty.Register(nameof(Stretch), Stretch.Uniform);
+
+
+ private bool isOpen;
+ private bool isPlaying;
+ private bool isSeeking;
+ private bool hasMediaEnded;
+ private bool isStopped;
+
+ public bool IsOpen
+ {
+ get => isOpen;
+ private set
+ {
+ isOpen = value;
+ OnPropertyChanged(nameof(IsOpen));
+ }
+ }
+
+ public bool IsPlaying
+ {
+ get => isPlaying;
+ set
+ {
+ isPlaying = value;
+ OnPropertyChanged(nameof(IsPlaying));
+ }
+ }
+
+ public bool IsSeeking
+ {
+ get => isSeeking;
+ set
+ {
+ isSeeking = value;
+ OnPropertyChanged(nameof(IsSeeking));
+ }
+ }
+
+ public bool HasMediaEnded
+ {
+ get => hasMediaEnded;
+ set
+ {
+ hasMediaEnded = value;
+ OnPropertyChanged(nameof(HasMediaEnded));
+ }
+ }
+
+ public bool IsStopped
+ {
+ get => isStopped;
+ set
+ {
+ isStopped = value;
+ OnPropertyChanged(nameof(IsStopped));
+ }
+ }
+
+ public event PropertyChangedEventHandler PropertyChanged;
+ public event EventHandler PositionChanged;
+
+ protected virtual void OnPropertyChanged(string propertyName)
+ {
+ PropertyChanged?.Invoke(this, new PropertyChangedEventArgs(propertyName));
+ }
+
+ public Uri Source { get; private set; }
+
+ public void UpdateSource(Uri newSource) => Source = newSource;
+
+ public TimeSpan? PlaybackStartTime => video?.StartTime;
+
+ public TimeSpan? PlaybackEndTime => video?.Duration;
+
+ public MediaState State => video.State;
+
///
/// Gets or sets a value controlling how the video will be stretched.
///
@@ -68,6 +149,7 @@ static FFmpegView()
{
StretchProperty.Changed.AddClassHandler(OnStretchChange);
}
+
public void SetAudioHandler(AudioStreamDecoder decoder) => audio = decoder;
public void SetHeader(Dictionary headers) => video.Headers = headers;
private static void OnStretchChange(FFmpegView sender, AvaloniaPropertyChangedEventArgs e)
@@ -83,6 +165,10 @@ public FFmpegView()
{
video = new VideoStreamDecoder();
video.Headers = new Dictionary { { "User-Agent", "ffmpeg_demo" } };
+
+ //audio = new BassAudioStreamDecoder();
+ //audio.Headers = new Dictionary { { "User-Agent", "ffmpeg_demo" } };
+
timeout = TimeSpan.FromTicks(10000);
video.MediaCompleted += VideoMediaCompleted;
video.MediaMsgRecevice += Video_MediaMsgRecevice;
@@ -102,29 +188,97 @@ protected override void OnApplyTemplate(TemplateAppliedEventArgs e)
}
private void VideoMediaCompleted(TimeSpan duration) =>
Dispatcher.UIThread.InvokeAsync(DisplayVideoInfo);
- public double? Position => video?.Position.TotalSeconds;
+
+ public static readonly StyledProperty PositionProperty = AvaloniaProperty.Register(
+ nameof(Position), TimeSpan.Zero, false, BindingMode.TwoWay, null, (o, v) => OnPositionPropertyChanging(o, v), null);
+
+ public TimeSpan? Position
+ {
+ get => (TimeSpan?)GetValue(PositionProperty);
+ set
+ {
+ SetValue(PositionProperty, value);
+ }
+ }
+
+ private void ReportPlaybackPosition() => ReportPlaybackPosition(video.Position);
+
+ [MethodImpl(MethodImplOptions.AggressiveInlining)]
+ private void ReportPlaybackPosition(TimeSpan newPosition)
+ {
+ Dispatcher.UIThread.InvokeAsync(() =>
+ {
+ var oldPosition = Position;
+ if (oldPosition?.Ticks == newPosition.Ticks || (newPosition.TotalSeconds > 0 && newPosition.TotalSeconds >= PlaybackEndTime?.TotalSeconds))
+ return;
+
+ Position = newPosition;
+ PositionChanged?.Invoke(this, new PositionChangedEventArgs(oldPosition ?? default, newPosition));
+ });
+ }
+
+ private static TimeSpan OnPositionPropertyChanging(IAvaloniaObject d, TimeSpan value)
+ {
+ if (d == null || d is FFmpegView == false) return value;
+
+ var element = (FFmpegView)d;
+
+ if (!element.IsOpen)
+ return TimeSpan.Zero;
+
+ if (!element.IsSeeking)
+ return value;
+
+ var targetSeek = (TimeSpan)value;
+ var minTarget = element.PlaybackStartTime ?? TimeSpan.Zero;
+ var maxTarget = element.PlaybackEndTime ?? TimeSpan.Zero;
+ var hasValidTaget = maxTarget > minTarget;
+
+ if (hasValidTaget)
+ {
+ targetSeek = targetSeek.Clamp(minTarget, maxTarget);
+
+ element?.Pause();
+ element?.SeekTo(targetSeek);
+ }
+ else
+ {
+ targetSeek = element.Position.Value;
+ }
+
+ return targetSeek;
+ }
+
public bool Play()
{
- bool state = false;
+ bool isPlaying = false;
+
try
{
- state = video.Play();
+ isPlaying = IsPlaying = video.Play();
+ //IsSeeking = false;
+ IsStopped = false;
+ HasMediaEnded = false;
audio?.Play();
}
catch (Exception ex)
{
Logger.TryGet(LogEventLevel.Error, LogArea.Control)?.Log(this, ex.Message);
}
- return state;
+
+ return isPlaying;
}
+
public bool Play(string uri, Dictionary headers = null)
{
+ bool isPlaying = false;
+
if (!isInit)
{
Logger.TryGet(LogEventLevel.Error, LogArea.Control)?.Log(this, "FFmpeg : dosnot initialize device");
return false;
}
- bool state = false;
+
try
{
if (video.State == MediaState.None)
@@ -134,21 +288,28 @@ public bool Play(string uri, Dictionary headers = null)
audio?.InitDecodecAudio(uri);
audio?.Prepare();
DisplayVideoInfo();
+
+ UpdateSource(new Uri(uri));
+ IsOpen = video != null ? video.IsInitialized : false;
}
- state = video.Play();
+
+ IsPlaying = isPlaying = video.Play();
+ IsStopped = false;
audio?.Play();
}
catch (Exception ex)
{
Logger.TryGet(LogEventLevel.Error, LogArea.Control)?.Log(this, ex.Message);
}
- return state;
+
+ return isPlaying;
}
- public bool SeekTo(int seekTime)
+
+ public bool SeekTo(TimeSpan seekTime)
{
try
{
- audio?.SeekProgress(seekTime);
+ _ = audio?.SeekProgress(seekTime);
return video.SeekProgress(seekTime);
}
catch (Exception ex)
@@ -157,12 +318,18 @@ public bool SeekTo(int seekTime)
return false;
}
}
+
public bool Pause()
{
try
{
audio?.Pause();
- return video.Pause();
+ bool isPaused = video.Pause();
+
+ IsPlaying = false;
+ IsStopped = false;
+
+ return isPaused;
}
catch (Exception ex)
{
@@ -175,7 +342,10 @@ public bool Stop()
try
{
audio?.Stop();
- video.Stop();
+ bool isStopped = video.Stop();
+ IsPlaying = false;
+ IsSeeking = false;
+ ReportPlaybackPosition();
return true;
}
catch (Exception ex)
@@ -189,45 +359,21 @@ bool Init()
try
{
cancellationToken = new CancellationTokenSource();
- playTask = new Task(() =>
- {
- while (true)
- {
- try
- {
- if (video.IsPlaying && _isAttached)
- {
- if (video.TryReadNextFrame(out var frame))
- {
- var convertedFrame = video.FrameConvert(&frame);
- bitmap?.Dispose();
- bitmap = new Bitmap(PixelFormat.Bgra8888, AlphaFormat.Premul, (IntPtr)convertedFrame.data[0], new PixelSize(video.FrameWidth, video.FrameHeight), new Vector(96, 96), convertedFrame.linesize[0]);
- Dispatcher.UIThread.InvokeAsync(() =>
- {
- if (image.IsNotEmpty())
- image.Source = bitmap;
- });
- }
- }
- Thread.Sleep(10);
- }
- catch (Exception ex)
- {
- Logger.TryGet(LogEventLevel.Error, LogArea.Control)?.Log(this, ex.Message);
- }
- }
- }, cancellationToken.Token);
+ playTask = new Task(DrawImage, cancellationToken.Token);
playTask.Start();
audioTask = new Task(() =>
{
- while (true)
+ while (_isRunning)
{
try
{
if (audio?.IsPlaying == true)
{
if (audio?.TryPlayNextFrame() == true)
- Thread.Sleep(audio.frameDuration.Subtract(timeout));
+ {
+ Thread.Sleep(audio.FrameDuration.Subtract(timeout));
+ ReportPlaybackPosition();
+ }
}
else
Thread.Sleep(10);
@@ -238,7 +384,9 @@ bool Init()
}
}
}, cancellationToken.Token);
+
audioTask.Start();
+
return true;
}
catch (Exception ex)
@@ -247,6 +395,49 @@ bool Init()
return false;
}
}
+
+
+#if NET40_OR_GREATER
+ [SecurityCritical]
+ [HandleProcessCorruptedStateExceptions]
+#endif
+
+ private void DrawImage()
+ {
+ while (_isRunning)
+ {
+ try
+ {
+ if (video.IsPlaying && _isAttached)
+ {
+ if (video.TryReadNextFrame(out var frame))
+ {
+ var convertedFrame = video.FrameConvert(&frame);
+ bitmap?.Dispose();
+ bitmap = new Bitmap(PixelFormat.Bgra8888, AlphaFormat.Premul, (IntPtr)convertedFrame.data[0], new PixelSize(video.FrameWidth, video.FrameHeight), new Vector(96, 96), convertedFrame.linesize[0]);
+
+ Dispatcher.UIThread.InvokeAsync(() =>
+ {
+ if (image.IsNotEmpty())
+ image.Source = bitmap;
+ });
+
+ ReportPlaybackPosition();
+ Thread.Sleep(video.FrameDuration.Subtract(timeout));
+ }
+ }
+ else
+ {
+ Thread.Sleep(10);
+ }
+ }
+ catch (Exception ex)
+ {
+ Logger.TryGet(LogEventLevel.Error, LogArea.Control)?.Log(this, ex.Message);
+ }
+ }
+ }
+
#region 视频信息
private string codec;
public string Codec => codec;
diff --git a/src/FFmpegView.Avalonia/MediaUtil.cs b/src/FFmpegView.Avalonia/MediaUtil.cs
new file mode 100644
index 0000000..678336e
--- /dev/null
+++ b/src/FFmpegView.Avalonia/MediaUtil.cs
@@ -0,0 +1,33 @@
+using System;
+using System.Collections.Generic;
+using System.Text;
+
+namespace FFmpegView.Avalonia
+{
+ internal static class MediaUtil
+ {
+ internal static T Clamp(this T value, T min, T max)
+ where T : struct, IComparable
+ {
+ switch (value)
+ {
+ case TimeSpan v:
+ {
+ var minT = (TimeSpan)(object)min;
+ var maxT = (TimeSpan)(object)max;
+
+ if (v.Ticks > maxT.Ticks) return max;
+ if (v.Ticks < minT.Ticks) return min;
+
+ return value;
+ }
+
+ default:
+ {
+ if (value.CompareTo(min) < 0) return min;
+ return value.CompareTo(max) > 0 ? max : value;
+ }
+ }
+ }
+ }
+}
diff --git a/src/FFmpegView.Avalonia/PositionChangedEventArgs.cs b/src/FFmpegView.Avalonia/PositionChangedEventArgs.cs
new file mode 100644
index 0000000..efc1727
--- /dev/null
+++ b/src/FFmpegView.Avalonia/PositionChangedEventArgs.cs
@@ -0,0 +1,34 @@
+using System;
+using System.Collections.Generic;
+using System.Text;
+
+namespace FFmpegView.Avalonia
+{
+ ///
+ /// Contains the position changed routed event args.
+ ///
+ ///
+ public class PositionChangedEventArgs : EventArgs
+ {
+ ///
+ /// Initializes a new instance of the class.
+ ///
+ /// The old position.
+ /// The new position.
+ internal PositionChangedEventArgs(TimeSpan oldPosition, TimeSpan newPosition)
+ {
+ Position = newPosition;
+ OldPosition = oldPosition;
+ }
+
+ ///
+ /// Gets the current position.
+ ///
+ public TimeSpan Position { get; }
+
+ ///
+ /// Gets the old position.
+ ///
+ public TimeSpan OldPosition { get; }
+ }
+}
diff --git a/src/FFmpegView.Bass/BassAudioStreamDecoder.cs b/src/FFmpegView.Bass/BassAudioStreamDecoder.cs
index f91bf1f..515b919 100644
--- a/src/FFmpegView.Bass/BassAudioStreamDecoder.cs
+++ b/src/FFmpegView.Bass/BassAudioStreamDecoder.cs
@@ -6,25 +6,38 @@ public unsafe class BassAudioStreamDecoder : AudioStreamDecoder
{
private Errors error;
private int decodeStream;
+ private bool isPausedOrStopped;
public Errors LastError => error;
+
+
public override void PauseCore()
{
- ManagedBass.Bass.ChannelPause(decodeStream);
+ isPausedOrStopped = ManagedBass.Bass.ChannelPause(decodeStream);
}
+
public override void StopCore()
{
- ManagedBass.Bass.ChannelStop(decodeStream);
+ isPausedOrStopped = ManagedBass.Bass.ChannelStop(decodeStream);
}
+
public override void Prepare()
{
if (decodeStream != 0)
ManagedBass.Bass.StreamFree(decodeStream);
+
decodeStream = ManagedBass.Bass.CreateStream(SampleRate, Channels, BassFlags.Mono, StreamProcedureType.Push);
if (!ManagedBass.Bass.ChannelPlay(decodeStream, true))
error = ManagedBass.Bass.LastError;
}
+
public override void PlayNextFrame(byte[] bytes)
{
+ if (isPausedOrStopped)
+ {
+ ManagedBass.Bass.ChannelPlay(decodeStream);
+ isPausedOrStopped = false;
+ }
+
if (ManagedBass.Bass.StreamPutData(decodeStream, bytes, bytes.Length) == -1)
error = ManagedBass.Bass.LastError;
}
diff --git a/src/FFmpegView.Bass/BassCore.cs b/src/FFmpegView.Bass/BassCore.cs
index 845169e..93bc2a0 100644
--- a/src/FFmpegView.Bass/BassCore.cs
+++ b/src/FFmpegView.Bass/BassCore.cs
@@ -29,8 +29,7 @@ private static bool InitDll()
dllPath = Path.Combine(AppDomain.CurrentDomain.BaseDirectory, "libbass.so");
if (!File.Exists(dllPath))
{
- var platform = $"linux-{PlantformUntils.ArchitectureString}";
- sourceFileName = Path.Combine(AppDomain.CurrentDomain.BaseDirectory, "libBass", platform, "libbass.so");
+ sourceFileName = Path.Combine(AppDomain.CurrentDomain.BaseDirectory, "Lib", "libBass", "linux", "libbass.so");
}
break;
}
@@ -38,23 +37,16 @@ private static bool InitDll()
{
dllPath = Path.Combine(AppDomain.CurrentDomain.BaseDirectory, "libbass.dylib");
if (!File.Exists(dllPath))
- sourceFileName = Path.Combine(AppDomain.CurrentDomain.BaseDirectory, "libBass", "osx", "libbass.dylib");
+ sourceFileName = Path.Combine(AppDomain.CurrentDomain.BaseDirectory, "Lib", "libBass", "osx", "libbass.dylib");
break;
}
case Platforms.Windows:
{
dllPath = Path.Combine(AppDomain.CurrentDomain.BaseDirectory, "bass.dll");
+
if (!File.Exists(dllPath))
- {
- var platform = $"win-{PlantformUntils.ArchitectureString}";
- if (platform.Equals("win-arm", StringComparison.CurrentCultureIgnoreCase))
- {
- canInit = false;
- Debug.WriteLine("Bass cannot run in win-arm platform.Stop init.");
- }
- else
- sourceFileName = Path.Combine(AppDomain.CurrentDomain.BaseDirectory, "libBass", platform, "bass.dll");
- }
+ sourceFileName = Path.Combine(AppDomain.CurrentDomain.BaseDirectory, "Lib", "libBass", "win", "bass.dll");
+
break;
}
}
diff --git a/src/FFmpegView.Shared/AudioStreamDecoder.cs b/src/FFmpegView.Shared/AudioStreamDecoder.cs
index 485ca58..4f4d4cc 100644
--- a/src/FFmpegView.Shared/AudioStreamDecoder.cs
+++ b/src/FFmpegView.Shared/AudioStreamDecoder.cs
@@ -9,6 +9,7 @@ namespace FFmpegView
{
public unsafe abstract class AudioStreamDecoder : IMedia
{
+ int error;
byte* bufferPtr;
IntPtr audioBuffer;
AVFormatContext* format;
@@ -24,7 +25,7 @@ public unsafe abstract class AudioStreamDecoder : IMedia
readonly object syncLock = new object();
readonly Stopwatch clock = new Stopwatch();
public MediaState State { get; private set; }
- public TimeSpan frameDuration { get; private set; }
+ public TimeSpan FrameDuration { get; private set; }
public TimeSpan Duration { get; private set; }
public TimeSpan Position => OffsetClock + clock.Elapsed;
public string CodecName { get; private set; }
@@ -38,50 +39,59 @@ public unsafe abstract class AudioStreamDecoder : IMedia
public Dictionary Headers { get; set; }
public event MediaHandler MediaCompleted;
public event MediaMsgHandler MediaMsgRecevice;
+
public AudioStreamDecoder()
{
Headers = new Dictionary();
}
+
public bool InitDecodecAudio(string path)
{
try
{
- int error = 0;
format = ffmpeg.avformat_alloc_context();
var tempFormat = format;
AVDictionary* options = Headers.ToHeader();
error = ffmpeg.avformat_open_input(&tempFormat, path, null, &options);
+
if (error < 0)
{
SendMsg(MsgType.Information, "Failed to open media file");
return false;
}
+
ffmpeg.avformat_find_stream_info(format, null);
AVCodec* codec;
audioStreamIndex = ffmpeg.av_find_best_stream(format, AVMediaType.AVMEDIA_TYPE_AUDIO, -1, -1, &codec, 0);
+
if (audioStreamIndex < 0)
{
SendMsg(MsgType.Information, "No audio stream found");
return false;
}
+
audioStream = format->streams[audioStreamIndex];
codecContext = ffmpeg.avcodec_alloc_context3(codec);
error = ffmpeg.avcodec_parameters_to_context(codecContext, audioStream->codecpar);
+
if (error < 0)
SendMsg(MsgType.Information, "Setting decoder parameters failed");
+
error = ffmpeg.avcodec_open2(codecContext, codec, null);
Duration = TimeSpan.FromMilliseconds(format->duration / 1000);
CodecId = codec->id.ToString();
CodecName = ffmpeg.avcodec_get_name(codec->id);
Bitrate = codecContext->bit_rate;
- var channelLayout = codecContext->ch_layout;
- Channels = (&channelLayout)->nb_channels;
+ long channelLayout = unchecked((long)codecContext->channel_layout);
+ Channels = codecContext->channels;
SampleRate = codecContext->sample_rate;
SampleFormat = codecContext->sample_fmt;
BitsPerSample = ffmpeg.av_samples_get_buffer_size(null, 2, codecContext->frame_size, AVSampleFormat.AV_SAMPLE_FMT_S16, 1);
audioBuffer = Marshal.AllocHGlobal((int)BitsPerSample);
bufferPtr = (byte*)audioBuffer;
+
InitConvert(channelLayout, AVSampleFormat.AV_SAMPLE_FMT_S16, SampleRate, channelLayout, SampleFormat, SampleRate);
+
packet = ffmpeg.av_packet_alloc();
frame = ffmpeg.av_frame_alloc();
State = MediaState.Read;
@@ -93,15 +103,18 @@ public bool InitDecodecAudio(string path)
}
return false;
}
- bool InitConvert(AVChannelLayout occ, AVSampleFormat osf, int osr, AVChannelLayout icc, AVSampleFormat isf, int isr)
+
+ bool InitConvert(long occ, AVSampleFormat osf, int osr, long icc, AVSampleFormat isf, int isr)
{
try
{
convert = ffmpeg.swr_alloc();
var tempConvert = convert;
- ffmpeg.swr_alloc_set_opts2(&tempConvert, &occ, osf, osr, &icc, isf, isr, 0, null);
+ ffmpeg.swr_alloc_set_opts(tempConvert, occ, osf, osr, icc, isf, isr, 0, null);
+
if (convert == null)
return false;
+
ffmpeg.swr_init(convert);
return true;
}
@@ -111,6 +124,7 @@ bool InitConvert(AVChannelLayout occ, AVSampleFormat osf, int osr, AVChannelLayo
}
return false;
}
+
bool TryReadNextFrame(out AVFrame outFrame)
{
try
@@ -122,7 +136,7 @@ bool TryReadNextFrame(out AVFrame outFrame)
}
else
{
- if (Position - lastTime >= frameDuration)
+ if (Position - lastTime >= FrameDuration)
{
lastTime = Position;
isNextFrame = true;
@@ -133,15 +147,15 @@ bool TryReadNextFrame(out AVFrame outFrame)
return false;
}
}
- if (isNextFrame)
+ if (isNextFrame && error >= 0)
{
lock (syncLock)
{
int result = -1;
ffmpeg.av_frame_unref(frame);
+
while (true)
{
- ffmpeg.av_packet_unref(packet);
result = ffmpeg.av_read_frame(format, packet);
if (result == ffmpeg.AVERROR_EOF || result < 0)
{
@@ -149,13 +163,19 @@ bool TryReadNextFrame(out AVFrame outFrame)
StopPlay();
return false;
}
+
if (packet->stream_index != audioStreamIndex)
continue;
+
ffmpeg.avcodec_send_packet(codecContext, packet);
result = ffmpeg.avcodec_receive_frame(codecContext, frame);
+
if (result < 0) continue;
- frameDuration = TimeSpan.FromTicks((long)Math.Round(TimeSpan.TicksPerMillisecond * 1000d * frame->nb_samples / frame->sample_rate, 0));
+
+ FrameDuration = TimeSpan.FromTicks((long)Math.Round(TimeSpan.TicksPerMillisecond * 1000d * frame->nb_samples / frame->sample_rate, 0));
outFrame = *frame;
+ ffmpeg.av_packet_unref(packet);
+
return true;
}
}
@@ -181,10 +201,13 @@ bool StopPlay()
{
if (State == MediaState.None)
return false;
+
IsPlaying = false;
OffsetClock = TimeSpan.FromSeconds(0);
+
clock.Reset();
clock.Stop();
+
var tempFormat = format;
ffmpeg.avformat_free_context(tempFormat);
format = null;
@@ -199,8 +222,9 @@ bool StopPlay()
Marshal.FreeHGlobal(audioBuffer);
bufferPtr = null;
audioStream = null;
+ error = -1;
audioStreamIndex = -1;
- Duration = TimeSpan.FromMilliseconds(0);
+ //Duration = TimeSpan.FromMilliseconds(0);
CodecName = string.Empty;
CodecId = string.Empty;
Channels = 0;
@@ -211,29 +235,40 @@ bool StopPlay()
lastTime = TimeSpan.Zero;
Invoke(Duration);
}
+
return true;
}
catch (Exception ex)
{
SendMsg(MsgType.Error, ex.Message);
}
+
return false;
}
- public bool SeekProgress(int seekTime)
+
+ public bool SeekProgress(TimeSpan seekTime)
{
try
{
- if (format == null || audioStreamIndex == -1)
+ if (format == null || error >= 0)
return false;
+
lock (syncLock)
{
- IsPlaying = false;
clock.Stop();
- var timestamp = seekTime / ffmpeg.av_q2d(audioStream->time_base);
- ffmpeg.av_seek_frame(format, audioStreamIndex, (long)timestamp, ffmpeg.AVSEEK_FLAG_BACKWARD | ffmpeg.AVSEEK_FLAG_FRAME);
+ clock.Reset();
+ State = MediaState.IsSeeking;
+ IsPlaying = false;
+
+ var timeBase = audioStream->time_base;
+ var timestamp = seekTime.ToLong(timeBase);
+
+ //seekTime / ffmpeg.av_q2d(audioStream->time_base);
+ ffmpeg.av_seek_frame(format, audioStreamIndex, timestamp, ffmpeg.AVSEEK_FLAG_BACKWARD | ffmpeg.AVSEEK_FLAG_FRAME);
ffmpeg.av_frame_unref(frame);
ffmpeg.av_packet_unref(packet);
int error = 0;
+
while (packet->pts < timestamp)
{
do
@@ -244,16 +279,20 @@ public bool SeekProgress(int seekTime)
error = ffmpeg.av_read_frame(format, packet);
if (error == ffmpeg.AVERROR_EOF)
return false;
- } while (packet->stream_index != audioStreamIndex);
+ }
+ while (packet->stream_index != audioStreamIndex);
+
ffmpeg.avcodec_send_packet(codecContext, packet);
error = ffmpeg.avcodec_receive_frame(codecContext, frame);
- } while (error == ffmpeg.AVERROR(ffmpeg.EAGAIN));
+
+ }
+ while (error == ffmpeg.AVERROR(ffmpeg.EAGAIN));
}
- OffsetClock = TimeSpan.FromSeconds(seekTime);
- clock.Restart();
- IsPlaying = true;
+
+ OffsetClock = seekTime;
lastTime = TimeSpan.Zero;
}
+
return true;
}
catch (Exception ex)
@@ -271,6 +310,7 @@ byte[] FrameConvertBytes(AVFrame* sourceFrame)
var outPutBufferLength = ffmpeg.av_samples_get_buffer_size(null, 2, outputSamplesPerChannel, AVSampleFormat.AV_SAMPLE_FMT_S16, 1);
if (outputSamplesPerChannel < 0)
return null;
+
byte[] bytes = new byte[outPutBufferLength];
Marshal.Copy(audioBuffer, bytes, 0, bytes.Length);
return bytes;
@@ -281,12 +321,14 @@ byte[] FrameConvertBytes(AVFrame* sourceFrame)
}
return null;
}
+
public bool Play()
{
try
{
if (State == MediaState.Play)
return false;
+
clock.Start();
IsPlaying = true;
State = MediaState.Play;
@@ -298,18 +340,22 @@ public bool Play()
}
return false;
}
+
public bool Pause()
{
try
{
PauseCore();
+
if (State != MediaState.Play)
return false;
- IsPlaying = false;
- OffsetClock = clock.Elapsed;
+
clock.Stop();
- clock.Reset();
+ IsPlaying = false;
+ //OffsetClock = clock.Elapsed;
+ //clock.Reset();
State = MediaState.Pause;
+
return true;
}
catch (Exception ex)
@@ -332,8 +378,8 @@ public bool TryPlayNextFrame()
if (bytes != null)
{
PlayNextFrame(bytes);
- return true;
- }
+ return true;
+ }
}
return false;
}
diff --git a/src/FFmpegView.Shared/Ext.cs b/src/FFmpegView.Shared/Ext.cs
index 0722adf..c0c6b1c 100644
--- a/src/FFmpegView.Shared/Ext.cs
+++ b/src/FFmpegView.Shared/Ext.cs
@@ -1,7 +1,6 @@
using FFmpeg.AutoGen;
using System;
using System.Collections.Generic;
-using System.Runtime.InteropServices;
using System.Text;
namespace FFmpegView
@@ -20,18 +19,25 @@ public static class Ext
ffmpeg.av_dict_set(&options, "headers", builder.ToString(), 0);
return options;
}
- public static unsafe string av_strerror(int error)
+
+ public static long ToLong(this TimeSpan ts, AVRational timeBase)
+ {
+ return Convert.ToInt64(ts.TotalSeconds * timeBase.den / timeBase.num); // (secs) * (units) / (secs) = (units)
+ }
+
+ public static TimeSpan ToTimeSpan(this long pts, AVRational timeBase)
{
- var bufferSize = 1024;
- var buffer = stackalloc byte[bufferSize];
- ffmpeg.av_strerror(error, buffer, (ulong)bufferSize);
- return Marshal.PtrToStringAnsi((IntPtr)buffer);
+ return Convert.ToDouble(pts).ToTimeSpan(timeBase);
}
- public static int ThrowExceptionIfError(this int error)
+
+ public static TimeSpan ToTimeSpan(this double pts, AVRational timeBase)
{
- if (error < 0)
- throw new ApplicationException(av_strerror(error));
- return error;
+ if (double.IsNaN(pts) || Math.Abs(pts - ffmpeg.AV_NOPTS_VALUE) <= double.Epsilon)
+ return TimeSpan.MinValue;
+
+ return TimeSpan.FromTicks(timeBase.den == 0 ?
+ Convert.ToInt64(TimeSpan.TicksPerMillisecond * 1000 * pts / ffmpeg.AV_TIME_BASE) :
+ Convert.ToInt64(TimeSpan.TicksPerMillisecond * 1000 * pts * timeBase.num / timeBase.den));
}
}
}
\ No newline at end of file
diff --git a/src/FFmpegView.Shared/IFFmpegView.cs b/src/FFmpegView.Shared/IFFmpegView.cs
index 1445f9d..64d2be2 100644
--- a/src/FFmpegView.Shared/IFFmpegView.cs
+++ b/src/FFmpegView.Shared/IFFmpegView.cs
@@ -1,4 +1,5 @@
-using System.Collections.Generic;
+using System;
+using System.Collections.Generic;
namespace FFmpegView
{
@@ -7,7 +8,7 @@ public interface IFFmpegView
bool Play();
bool Stop();
bool Pause();
- bool SeekTo(int seekTime);
+ bool SeekTo(TimeSpan seekTime);
void SetHeader(Dictionary headers);
bool Play(string uri, Dictionary headers = null);
}
diff --git a/src/FFmpegView.Shared/MediaState.cs b/src/FFmpegView.Shared/MediaState.cs
index 9e73c65..0853dd9 100644
--- a/src/FFmpegView.Shared/MediaState.cs
+++ b/src/FFmpegView.Shared/MediaState.cs
@@ -6,5 +6,7 @@ public enum MediaState
Read,
Play,
Pause,
+ IsSeeking,
+ IsSeekable
}
}
\ No newline at end of file
diff --git a/src/FFmpegView.Shared/VideoStreamDecoder.cs b/src/FFmpegView.Shared/VideoStreamDecoder.cs
index 8c233f2..7e349b5 100644
--- a/src/FFmpegView.Shared/VideoStreamDecoder.cs
+++ b/src/FFmpegView.Shared/VideoStreamDecoder.cs
@@ -38,14 +38,20 @@ public sealed unsafe class VideoStreamDecoder : IMedia
public int FrameWidth { get; private set; }
public int FrameHeight { get; private set; }
public bool IsPlaying { get; private set; }
+ public bool IsInitialized { get; private set; }
public MediaState State { get; private set; }
public TimeSpan Position => clock.Elapsed + OffsetClock;
- public TimeSpan frameDuration { get; private set; }
+
+ public TimeSpan FrameDuration { get; private set; }
+
+ public TimeSpan StartTime { get; private set; }
#endregion
+
public VideoStreamDecoder()
{
Headers = new Dictionary();
}
+
public void InitDecodecVideo(string uri)
{
try
@@ -57,6 +63,7 @@ public void InitDecodecVideo(string uri)
SendMsg(MsgType.Information, "Failed to create media format (container)");
return;
}
+
var tempFormat = format;
AVDictionary* options = Headers.ToHeader();
error = ffmpeg.avformat_open_input(&tempFormat, uri, null, &options);
@@ -65,14 +72,17 @@ public void InitDecodecVideo(string uri)
SendMsg(MsgType.Information, "Failed to open video");
return;
}
+
ffmpeg.avformat_find_stream_info(format, null);
AVCodec* codec = null;
videoStreamIndex = ffmpeg.av_find_best_stream(format, AVMediaType.AVMEDIA_TYPE_VIDEO, -1, -1, &codec, 0);
+
if (videoStreamIndex < 0)
{
SendMsg(MsgType.Information, "No video stream found");
return;
}
+
videoStream = format->streams[videoStreamIndex];
codecContext = ffmpeg.avcodec_alloc_context3(codec);
error = ffmpeg.avcodec_parameters_to_context(codecContext, videoStream->codecpar);
@@ -81,25 +91,32 @@ public void InitDecodecVideo(string uri)
SendMsg(MsgType.Information, "Failed to set decoder parameters");
return;
}
+
error = ffmpeg.avcodec_open2(codecContext, codec, null);
if (error < 0)
{
SendMsg(MsgType.Information, "Failed to open decoder");
return;
}
- Duration = TimeSpan.FromMilliseconds(format->duration / 1000);
+
+ StartTime = videoStream->start_time.ToTimeSpan(videoStream->time_base);
+ Duration = videoStream->duration.ToTimeSpan(videoStream->time_base);
CodecId = videoStream->codecpar->codec_id.ToString();
CodecName = ffmpeg.avcodec_get_name(videoStream->codecpar->codec_id);
Bitrate = (int)videoStream->codecpar->bit_rate;
FrameRate = ffmpeg.av_q2d(videoStream->r_frame_rate);
- FrameWidth = videoStream->codecpar->width;
- FrameHeight = videoStream->codecpar->height;
- frameDuration = TimeSpan.FromMilliseconds(1000 / FrameRate);
+ FrameWidth = codecContext->width;
+ FrameHeight = codecContext->height;
+ FrameDuration = TimeSpan.FromMilliseconds(1000 / FrameRate);
+
var result = InitConvert(FrameWidth, FrameHeight, codecContext->pix_fmt, FrameWidth, FrameHeight, AVPixelFormat.AV_PIX_FMT_BGR0);
if (result)
{
packet = ffmpeg.av_packet_alloc();
frame = ffmpeg.av_frame_alloc();
+
+ IsInitialized = true;
+ State = MediaState.Read;
}
}
catch (Exception ex)
@@ -107,6 +124,7 @@ public void InitDecodecVideo(string uri)
SendMsg(MsgType.Error, $"FFmpeg InitDecodecVideo Failed: {ex.Message}");
}
}
+
private void SendMsg(MsgType type, string msg) => MediaMsgRecevice?.Invoke(type, msg);
private bool InitConvert(int sourceWidth, int sourceHeight, AVPixelFormat sourceFormat, int targetWidth, int targetHeight, AVPixelFormat targetFormat)
{
@@ -118,11 +136,13 @@ private bool InitConvert(int sourceWidth, int sourceHeight, AVPixelFormat source
SendMsg(MsgType.Information, "Failed to create converter");
return false;
}
+
var bufferSize = ffmpeg.av_image_get_buffer_size(targetFormat, targetWidth, targetHeight, 1);
FrameBufferPtr = Marshal.AllocHGlobal(bufferSize);
TargetData = new byte_ptrArray4();
TargetLinesize = new int_array4();
ffmpeg.av_image_fill_arrays(ref TargetData, ref TargetLinesize, (byte*)FrameBufferPtr, targetFormat, targetWidth, targetHeight, 1);
+
return true;
}
catch (Exception ex)
@@ -131,6 +151,7 @@ private bool InitConvert(int sourceWidth, int sourceHeight, AVPixelFormat source
return false;
}
}
+
public AVFrame FrameConvert(AVFrame* sourceFrame)
{
ffmpeg.sws_scale(convert, sourceFrame->data, sourceFrame->linesize, 0, sourceFrame->height, TargetData, TargetLinesize);
@@ -138,6 +159,7 @@ public AVFrame FrameConvert(AVFrame* sourceFrame)
data.UpdateFrom(TargetData);
var linesize = new int_array8();
linesize.UpdateFrom(TargetLinesize);
+
return new AVFrame
{
data = data,
@@ -146,6 +168,7 @@ public AVFrame FrameConvert(AVFrame* sourceFrame)
height = FrameHeight
};
}
+
public bool TryReadNextFrame(out AVFrame outFrame)
{
try
@@ -157,7 +180,7 @@ public bool TryReadNextFrame(out AVFrame outFrame)
}
else
{
- if (Position - lastTime >= frameDuration)
+ if (Position - lastTime >= FrameDuration)
{
lastTime = Position;
isNextFrame = true;
@@ -207,13 +230,15 @@ public bool TryReadNextFrame(out AVFrame outFrame)
return false;
}
}
- private void StopPlay()
+
+ private bool StopPlay()
{
try
{
lock (SyncLock)
{
- if (State == MediaState.None) return;
+ if (State == MediaState.None) return false;
+
IsPlaying = false;
OffsetClock = TimeSpan.FromSeconds(0);
clock.Reset();
@@ -231,7 +256,7 @@ private void StopPlay()
ffmpeg.sws_freeContext(convert);
videoStream = null;
videoStreamIndex = -1;
- Duration = TimeSpan.FromMilliseconds(0);
+ //Duration = TimeSpan.FromMilliseconds(0);
CodecName = string.Empty;
CodecId = string.Empty;
Bitrate = 0;
@@ -243,28 +268,37 @@ private void StopPlay()
lastTime = TimeSpan.Zero;
MediaCompleted?.Invoke(Duration);
}
+
+ return true;
}
catch (Exception ex)
{
SendMsg(MsgType.Information, $"FFmpeg : Failed to stop ({ex.Message})");
}
+
+ return false;
}
- public bool SeekProgress(int seekTime)
+ public bool SeekProgress(TimeSpan seekTime)
{
try
{
if (format == null || videoStream == null)
return false;
+
lock (SyncLock)
{
- IsPlaying = false;
clock.Stop();
- var timestamp = seekTime / ffmpeg.av_q2d(videoStream->time_base);
+ clock.Reset();
+
+ var timeBase = videoStream->time_base;
+ var timestamp = seekTime.ToLong(timeBase);
+
ffmpeg.av_seek_frame(format, videoStreamIndex, (long)timestamp, ffmpeg.AVSEEK_FLAG_BACKWARD | ffmpeg.AVSEEK_FLAG_FRAME);
ffmpeg.av_frame_unref(frame);
ffmpeg.av_packet_unref(packet);
int error = 0;
receiveFrame();
+
void receiveFrame()
{
while (packet->pts < timestamp)
@@ -275,17 +309,20 @@ void receiveFrame()
{
ffmpeg.av_packet_unref(packet);
error = ffmpeg.av_read_frame(format, packet);
+
if (error == ffmpeg.AVERROR_EOF)
return;
+
} while (packet->stream_index != videoStreamIndex);
+
ffmpeg.avcodec_send_packet(codecContext, packet);
error = ffmpeg.avcodec_receive_frame(codecContext, frame);
+
} while (error == ffmpeg.AVERROR(ffmpeg.EAGAIN));
}
}
- OffsetClock = TimeSpan.FromSeconds(seekTime);
- clock.Restart();
- IsPlaying = true;
+
+ OffsetClock = seekTime;
lastTime = TimeSpan.Zero;
}
return true;
@@ -296,6 +333,7 @@ void receiveFrame()
return false;
}
}
+
public bool Play()
{
try
@@ -305,6 +343,7 @@ public bool Play()
SendMsg(MsgType.Information, "FFmpeg : dosnot initialize device");
return false;
}
+
if (State != MediaState.Play)
{
clock.Start();
@@ -320,6 +359,7 @@ public bool Play()
return false;
}
}
+
public bool Pause()
{
try
@@ -341,11 +381,12 @@ public bool Pause()
return false;
}
}
- public void Stop()
+ public bool Stop()
{
if (State == MediaState.None)
- return;
- StopPlay();
+ return false;
+
+ return StopPlay();
}
}
}
\ No newline at end of file
diff --git a/src/FFmpegView.Wpf/FFmpegView.cs b/src/FFmpegView.Wpf/FFmpegView.cs
index 5d3e224..cf5e093 100644
--- a/src/FFmpegView.Wpf/FFmpegView.cs
+++ b/src/FFmpegView.Wpf/FFmpegView.cs
@@ -24,6 +24,7 @@ public unsafe class FFmpegView : ContentControl, IFFmpegView
private WriteableBitmap writeableBitmap;
private readonly VideoStreamDecoder video;
private CancellationTokenSource cancellationToken;
+
public FFmpegView()
{
image = new Image();
@@ -34,6 +35,7 @@ public FFmpegView()
video.MediaMsgRecevice += Video_MediaMsgRecevice;
isInit = Init();
}
+
private void Video_MediaMsgRecevice(MsgType type, string msg) =>
Debug.WriteLine($"{(type == MsgType.Error ? "Error: " : "Info: ")}{msg}");
public void SetAudioHandler(AudioStreamDecoder decoder) => audio = decoder;
@@ -89,6 +91,7 @@ public bool Play()
}
return state;
}
+
public bool Play(string uri, Dictionary headers = null)
{
if (!isInit)
@@ -118,7 +121,7 @@ public bool Play(string uri, Dictionary headers = null)
return state;
}
public void SetHeader(Dictionary headers) => video.Headers = headers;
- public bool SeekTo(int seekTime)
+ public bool SeekTo(TimeSpan seekTime)
{
try
{
@@ -182,7 +185,7 @@ bool Init()
if (audio?.IsPlaying == true)
{
if (audio?.TryPlayNextFrame() == true)
- Thread.Sleep(audio.frameDuration.Subtract(timeout));
+ Thread.Sleep(audio.FrameDuration.Subtract(timeout));
}
else
Thread.Sleep(10);
diff --git a/src/FFmpegView.Wpf/FFmpegVisualView.cs b/src/FFmpegView.Wpf/FFmpegVisualView.cs
index 51bf40e..8581555 100644
--- a/src/FFmpegView.Wpf/FFmpegVisualView.cs
+++ b/src/FFmpegView.Wpf/FFmpegVisualView.cs
@@ -91,7 +91,7 @@ public bool Play(string uri, Dictionary headers = null)
}
return state;
}
- public bool SeekTo(int seekTime)
+ public bool SeekTo(TimeSpan seekTime)
{
try
{
@@ -156,7 +156,7 @@ bool Init()
if (audio?.IsPlaying == true)
{
if (audio?.TryPlayNextFrame() == true)
- Thread.Sleep(audio.frameDuration.Subtract(timeout));
+ Thread.Sleep(audio.FrameDuration.Subtract(timeout));
}
else
Thread.Sleep(10);