问题描述
**
通过Sharpavi录制
** 内部类记录器:Idisposable { 私有的只读int screenWidth; private readt int screenHeight; 私人只读AviWriter编写器; 私有只读IAviVideoStream videoStream; 私有只读IAviAudioStream audioStream; 私有只读WaveInEvent audioSource; 私有只读线程screenThread; 私有只读ManualResetEvent stopThread = new ManualResetEvent(false); 私有只读AutoResetEvent videoFrameWritten = new AutoResetEvent(false); 私有只读AutoResetEvent audioBlockWritten = new AutoResetEvent(false);
public Recorder(string fileName,FourCC codec,int quality,int audioSourceIndex,SupportedWaveFormat audioWaveFormat,bool encodeAudio,intaudioBitRate)
{
System.Windows.Media.Matrix toDevice;
using (var source = new HwndSource(new HwndSourceParameters()))
{
toDevice = source.CompositionTarget.TransformToDevice;
}
screenWidth = (int)Math.Round(SystemParameters.PrimaryScreenWidth * toDevice.M11);
screenHeight = (int)Math.Round(SystemParameters.PrimaryScreenHeight * toDevice.M22);
// Create AVI writer and specify FPS
writer = new AviWriter(fileName)
{
FramesPerSecond = 10,EmitIndex1 = true,};
**// Create video stream**
videoStream = CreateVideoStream(codec,quality);
**// Set only name. Other properties were when creating stream,**
**// either explicitly by arguments or implicitly by the encoder used**
videoStream.Name = "Screencast";
if (audioSourceIndex >= 0)
{
var waveFormat = ToWaveFormat(audioWaveFormat);
audioStream = CreateAudioStream(waveFormat,encodeAudio,audioBitRate);
// Set only name. Other properties were when creating stream,// either explicitly by arguments or implicitly by the encoder used
audioStream.Name = "Voice";
audioSource = new WaveInEvent
{
DeviceNumber = audioSourceIndex,WaveFormat = waveFormat,// Buffer size to store duration of 1 frame
BufferMilliseconds = (int)Math.Ceiling(1000 / writer.FramesPerSecond),NumberOfBuffers = 3,};
audioSource.DataAvailable += audioSource_DataAvailable;
}
screenThread = new Thread(RecordScreen)
{
Name = typeof(Recorder).Name + ".RecordScreen",IsBackground = true
};
if (audioSource != null)
{
videoFrameWritten.Set();
audioBlockWritten.Reset();
audioSource.StartRecording();
}
screenThread.Start();
}
private IAviVideoStream CreateVideoStream(FourCC codec,int quality)
{
// Select encoder type based on FOURCC of codec
if (codec == KNownFourCCs.Codecs.Uncompressed)
{
return writer.AddUncompressedVideoStream(screenWidth,screenHeight);
}
else if (codec == KNownFourCCs.Codecs.MotionJpeg)
{
return writer.AddMotionJpegVideoStream(screenWidth,screenHeight,quality
#if !FX45
// Implementation of this encoder for .NET 3.5 requires single-threaded access,forceSingleThreadedAccess: true
#endif
);
}
else
{
return writer.AddMpeg4VideoStream(screenWidth,(double)writer.FramesPerSecond,// It seems that all tested MPEG-4 VfW codecs ignore the quality affecting parameters passed through VfW API
// They only respect the settings from their own configuration dialogs,and Mpeg4VideoEncoder currently has no support for this
quality: quality,codec: codec,// Most of VfW codecs expect single-threaded use,so we wrap this encoder to special wrapper
// Thus all calls to the encoder (including its instantiation) will be invoked on a single thread although encoding (and writing) is performed asynchronously
forceSingleThreadedAccess: true);
}
}
private IAviAudioStream CreateAudioStream(WaveFormat waveFormat,bool encode,int bitRate)
{
// Create encoding or simple stream based on settings
return writer.AddAudioStream(
channelCount: waveFormat.Channels,samplesPerSecond: waveFormat.SampleRate,bitsPerSample: waveFormat.BitsPerSample);
}
private static WaveFormat ToWaveFormat(SupportedWaveFormat waveFormat)
{
switch (waveFormat)
{
case SupportedWaveFormat.WAVE_FORMAT_44M16:
return new WaveFormat(44100,16,1);
case SupportedWaveFormat.WAVE_FORMAT_44S16:
return new WaveFormat(44100,2);
default:
throw new NotSupportedException("Wave formats other than '16-bit 44.1kHz' are not currently supported.");
}
}
public void dispose()
{
stopThread.Set();
screenThread.Join();
if (audioSource != null)
{
audioSource.StopRecording();
audioSource.DataAvailable -= audioSource_DataAvailable;
}
// Close writer: the remaining data is written to a file and file is closed
writer.Close();
stopThread.Close();
}
private void RecordScreen()
{
var frameInterval = TimeSpan.FromSeconds(1 / (double)writer.FramesPerSecond);
var buffer = new byte[screenWidth * screenHeight * 4];
#if FX45
Task videoWriteTask = null;
#else
IAsyncResult videoWriteResult = null;
#endif
var isFirstFrame = true;
var timeTillNextFrame = TimeSpan.Zero;
while (!stopThread.WaitOne(timeTillNextFrame))
{
var timestamp = DateTime.Now;
GetScreenshot(buffer);
// Wait for the prevIoUs frame is written
if (!isFirstFrame)
{
#if FX45
videoWriteTask.Wait();
#else
videoStream.EndWriteFrame(videoWriteResult);
#endif
videoFrameWritten.Set();
}
if (audioStream != null)
{
var signalled = WaitHandle.WaitAny(new WaitHandle[] { audioBlockWritten,stopThread });
if (signalled == 1)
break;
}
// Start asynchronous (encoding and) writing of the new frame
#if FX45
videoWriteTask = videoStream.WriteFrameAsync(true,buffer,buffer.Length);
#else
videoWriteResult = videoStream.BeginWriteFrame(true,buffer.Length,null,null);
#endif
timeTillNextFrame = timestamp + frameInterval - DateTime.Now;
if (timeTillNextFrame < TimeSpan.Zero)
timeTillNextFrame = TimeSpan.Zero;
isFirstFrame = false;
}
// Wait for the last frame is written
if (!isFirstFrame)
{
#if FX45
videoWriteTask.Wait();
#else
videoStream.EndWriteFrame(videoWriteResult);
#endif
}
}
private void GetScreenshot(byte[] buffer)
{
using (var bitmap = new Bitmap(screenWidth,screenHeight))
using (var graphics = Graphics.FromImage(bitmap))
{
graphics.copyFromScreen(0,new System.Drawing.Size(screenWidth,screenHeight));
var bits = bitmap.LockBits(new Rectangle(0,screenWidth,screenHeight),ImageLockMode.ReadOnly,PixelFormat.Format32bppRgb);
Marshal.copy(bits.Scan0,buffer.Length);
bitmap.UnlockBits(bits);
// Should also capture the mouse cursor here,but skipping for simplicity
// For those who are interested,look at http://www.codeproject.com/Articles/12850/Capturing-the-Desktop-Screen-with-the-Mouse-Cursor
}
}
private void audioSource_DataAvailable(object sender,WaveInEventArgs e)
{
var signalled = WaitHandle.WaitAny(new WaitHandle[] { videoFrameWritten,stopThread });
if (signalled == 0)
{
audioStream.WriteBlock(e.Buffer,e.BytesRecorded);
audioBlockWritten.Set();
}
}
}
解决方法
暂无找到可以解决该程序问题的有效方法,小编努力寻找整理中!
如果你已经找到好的解决方法,欢迎将解决方案带上本链接一起发送给小编。
小编邮箱:dio#foxmail.com (将#修改为@)