Вот мой код c#: < /p>
using System;
using System.Runtime.InteropServices;
using System.Threading.Tasks;
using Foundation;
using AVFoundation;
using CoreGraphics;
using CoreVideo;
using CoreMedia;
using AppKit;
using ScreenCaptureKit;
using AudioToolbox;
using ScreenRecorder;
using CoreFoundation;
using Microsoft.VisualBasic.FileIO;
namespace ScreenRecorder
{
class Program
{
static void Main()
{
if (File.Exists("../recording113.mp4"))
File.Delete("../recording113.mp4");
TestRecorder recorder = new();
recorder.StartRecord();
Console.WriteLine("Started, enter to end...");
Console.ReadLine();
recorder.StopRecording();
}
}
internal class TestRecorder : NSObject, ISCStreamDelegate, ISCStreamOutput
{
private static string videoFormat = "mp4";
SCShareableContent? availableContent;
SCContentFilter? filter;
SCDisplay? screen;
AudioSettings audioSettings;
SCStream stream;
SCStreamType? streamType;
AVAssetWriter vW;
AVAssetWriterInput vwInput, awInput;
AVAudioEngine audioEngine = new AVAudioEngine();
public void StartRecord()
{
availableContent = SCShareableContent.GetShareableContentAsync(true, true).Result;
PrepareRecord();
}
public void PrepareRecord()
{
streamType = SCStreamType.Display;
UpdateAudioSettings();
screen = availableContent?.Displays.First();
filter = new SCContentFilter(screen, [], [], SCContentFilterOption.Exclude);
Record(filter);
}
public void Record(SCContentFilter filter)
{
var conf = new SCStreamConfiguration();
conf.Width = (nuint)(filter.ContentRect.Width * filter.PointPixelScale).Value;
conf.Height = (nuint)(filter.ContentRect.Height * filter.PointPixelScale).Value;
conf.MinimumFrameInterval = new CMTime(1, 30);
conf.ShowsCursor = true;
conf.CapturesAudio = true;
conf.SampleRate = (nint)audioSettings.SampleRate;
conf.ChannelCount = (int)audioSettings.NumberChannels;
stream = new SCStream(filter, conf, this);
NSError? errScrenn;
stream.AddStreamOutput(this, SCStreamOutputType.Screen, DispatchQueue.DefaultGlobalQueue, out errScrenn);
if (errScrenn != null)
{
Console.WriteLine("Can't add output screen: " + errScrenn.LocalizedDescription);
}
NSError? errAudio;
stream.AddStreamOutput(this, SCStreamOutputType.Audio, DispatchQueue.DefaultGlobalQueue, out errAudio);
if (errAudio != null)
{
Console.WriteLine("Can't add output audio: " + errAudio.LocalizedDescription);
}
InitVideo(conf);
bool started = false;
stream.StartCapture((err) => { Console.WriteLine("Запись начата, ошибка: " + err?.Code); started = true; });
while (!started)
{
Thread.Sleep(100);
}
}
public void StopRecording()
{
if (stream != null)
{
bool stoped = false;
stream.StopCapture((err) => { Console.WriteLine("Запись остановлена, ошибка: " + err?.LocalizedDescription); stoped = true; });
while (!stoped)
{
Thread.Sleep(100);
}
}
stream = null;
CloseVideo();
streamType = null;
}
public void UpdateAudioSettings()
{
audioSettings = new AudioSettings()
{
SampleRate = 48000,
NumberChannels = 2,
Format = AudioToolbox.AudioFormatType.MPEG4AAC,
EncoderBitRate = 128000
};
}
public void InitVideo(SCStreamConfiguration conf)
{
var fileEnding = videoFormat;
var fileType = AVFileTypes.Mpeg4;
vW = new AVAssetWriter(NSUrl.FromFilename($"../recording113.{fileEnding}"), AVFileTypesExtensions.GetConstant(fileType), out NSError error);
if (error != null)
{
Console.WriteLine("Can't asset writer: " + error.LocalizedDescription);
}
var fpsMultiplier = 30.0 / 8.0;
var encoderMultiplier = 0.9;
var targetBitrate = conf.Width * conf.Height * fpsMultiplier * encoderMultiplier;
var videoSettings = new NSMutableDictionary();
videoSettings[AVVideo.CodecKey] = AVVideoCodecTypeExtensions.GetConstant(AVVideoCodecType.Hevc);
videoSettings[AVVideo.WidthKey] = new NSNumber(conf.Width);
videoSettings[AVVideo.HeightKey] = new NSNumber(conf.Height);
var compressionProps = new NSMutableDictionary();
compressionProps[AVVideo.AverageBitRateKey] = new NSNumber(targetBitrate);
compressionProps[AVVideo.ExpectedSourceFrameRateKey] = new NSNumber(30);
videoSettings[AVVideo.CompressionPropertiesKey] = compressionProps;
vwInput = new AVAssetWriterInput(AVMediaTypesExtensions.GetConstant(AVMediaTypes.Video), new AVVideoSettingsCompressed(videoSettings));
awInput = new AVAssetWriterInput(AVMediaTypesExtensions.GetConstant(AVMediaTypes.Audio), audioSettings);
vwInput.ExpectsMediaDataInRealTime = true;
awInput.ExpectsMediaDataInRealTime = true;
if (vW.CanAddInput(vwInput))
{
vW.AddInput(vwInput);
}
if (vW.CanAddInput(awInput))
{
vW.AddInput(awInput);
}
if (!vW.StartWriting())
{
Console.WriteLine("Cant' start writing.");
Console.WriteLine(vW?.Error?.LocalizedDescription);
}
}
public void CloseVideo()
{
var dispatchGroup = new DispatchGroup();
dispatchGroup.Enter();
vwInput.MarkAsFinished();
awInput.MarkAsFinished();
vW.FinishWriting(() => dispatchGroup.Leave());
dispatchGroup.Wait(TimeSpan.MaxValue);
}
bool isStarted = false;
public void DidOutputSampleBuffer(SCStream stream, CMSampleBuffer sampleBuffer, SCStreamOutputType type)
{
if (!sampleBuffer.IsValid)
return;
if (type == SCStreamOutputType.Screen)
{
var attachmentsArrayPtr = CMSampleBufferGetSampleAttachmentsArray(sampleBuffer.Handle, false);
if (attachmentsArrayPtr == IntPtr.Zero)
return;
nint count = CFArrayGetCount(attachmentsArrayPtr);
if (count == 0)
return;
IntPtr dictPtr = CFArrayGetValueAtIndex(attachmentsArrayPtr, 0);
if (dictPtr == IntPtr.Zero)
return;
using var statusKey = new CFString(SCStreamFrameInfoKeys.Status);
IntPtr valuePtr = CFDictionaryGetValue(dictPtr, statusKey.Handle);
if (valuePtr == IntPtr.Zero)
return;
var status = GetCFInt(valuePtr);
if (status != (int)SCFrameStatus.Complete)
{
return;
}
if (vW != null && vW.Status == AVAssetWriterStatus.Writing && !isStarted)
{
isStarted = true;
vW.StartSessionAtSourceTime(CMSampleBufferGetPresentationTimeStamp(sampleBuffer.Handle));
}
if (vwInput.ReadyForMoreMediaData)
{
vwInput.AppendSampleBuffer(sampleBuffer);
}
}
else if (type == SCStreamOutputType.Audio)
{
if (awInput.ReadyForMoreMediaData)
{
awInput.AppendSampleBuffer(sampleBuffer);
}
}
}
public static int? GetCFInt(IntPtr cfNumber)
{
if (cfNumber == IntPtr.Zero)
return null;
if (CFNumberGetValue(cfNumber, CFNumberType.kCFNumberSInt32Type, out int result))
return result;
return null;
}
[DllImport("/System/Library/Frameworks/CoreFoundation.framework/CoreFoundation")]
private static extern bool CFNumberGetValue(
IntPtr number,
CFNumberType theType,
out int value
);
private enum CFNumberType : int
{
kCFNumberSInt32Type = 3
}
[Export("stream:didStopWithError:")]
public void DidStopWithError(SCStream stream, NSError error)
{
Console.WriteLine($"Поток остановлен с ошибкой: {error.LocalizedDescription}");
}
[DllImport("/System/Library/Frameworks/CoreMedia.framework/CoreMedia")]
public static extern CMTime CMSampleBufferGetPresentationTimeStamp(IntPtr sampleBuffer);
[DllImport("/System/Library/Frameworks/CoreMedia.framework/CoreMedia")]
public static extern IntPtr CMSampleBufferGetSampleAttachmentsArray(
IntPtr sampleBuffer,
[MarshalAs(UnmanagedType.U1)] bool createIfNecessary
);
[DllImport("/System/Library/Frameworks/CoreFoundation.framework/CoreFoundation")]
private static extern IntPtr CFArrayGetValueAtIndex(IntPtr theArray, nint index);
[DllImport("/System/Library/Frameworks/CoreFoundation.framework/CoreFoundation")]
private static extern nint CFArrayGetCount(IntPtr theArray);
[DllImport("/System/Library/Frameworks/CoreFoundation.framework/CoreFoundation")]
private static extern IntPtr CFDictionaryGetValue(IntPtr theDict, IntPtr key);
}
}
< /code>
Вот Swift Code: < /p>
import AVFAudio
import AVFoundation
import Cocoa
import ScreenCaptureKit
@available(macOS 14, *)
class ViewController: NSObject, SCStreamDelegate, SCStreamOutput {
enum StreamType: Int {
case screen, window, systemaudio
}
var audioQuality = 128
var videoFormat = "mp4"
var availableContent: SCShareableContent?
var filter: SCContentFilter?
var screen: SCDisplay?
var audioSettings: [String : Any]!
var stream: SCStream!
var streamType: StreamType?
var vW: AVAssetWriter!
var recordMic = false
var vwInput, awInput, micInput: AVAssetWriterInput!
let audioEngine = AVAudioEngine()
func startRecording() {
print("Starting recording...")
SCShareableContent.getExcludingDesktopWindows(true, onScreenWindowsOnly: true) { content, error in
if let error = error {
switch error {
case SCStreamError.userDeclined:
print("User declined")
default:
print("Error: \(error)")
}
return
}
self.availableContent = content
self.prepRecord()
}
print("Started")
}
func prepRecord() {
print("Preparing recording")
streamType = .screen
updateAudioSettings()
guard let firstDisplay = availableContent?.displays.first else {
print("No display found")
return
}
screen = firstDisplay
let excluded = availableContent?.applications.filter { app in
Bundle.main.bundleIdentifier == app.bundleIdentifier
}
filter = SCContentFilter(display: screen!, excludingApplications: excluded ?? [], exceptingWindows: [])
Task { await record(filter: filter!) }
}
func record(filter: SCContentFilter) async {
print("Recording process started")
let conf = SCStreamConfiguration()
conf.width = Int(filter.contentRect.width) * Int(filter.pointPixelScale)
conf.height = Int(filter.contentRect.height) * Int(filter.pointPixelScale)
conf.minimumFrameInterval = CMTime(value: 1, timescale: 30)
conf.showsCursor = true
conf.capturesAudio = true
conf.sampleRate = audioSettings["AVSampleRateKey"] as! Int
conf.channelCount = audioSettings["AVNumberOfChannelsKey"] as! Int
stream = SCStream(filter: filter, configuration: conf, delegate: self)
do {
try stream.addStreamOutput(self, type: .screen, sampleHandlerQueue: .global())
try stream.addStreamOutput(self, type: .audio, sampleHandlerQueue: .global())
initVideo(conf: conf)
try await stream.startCapture()
print("Capture started")
} catch {
print("Capture failed with error: \(error)")
return
}
}
func stopRecording() {
if stream != nil {
stream.stopCapture()
}
stream = nil
closeVideo()
streamType = nil
print("Stopped recording")
}
func updateAudioSettings() {
audioSettings = [AVSampleRateKey: 48000, AVNumberOfChannelsKey: 2]
audioSettings[AVFormatIDKey] = kAudioFormatMPEG4AAC
audioSettings[AVEncoderBitRateKey] = audioQuality * 1000
}
func initVideo(conf: SCStreamConfiguration) {
let fileEnding = videoFormat
let fileType = AVFileType.mp4
if let downloadsDirectory = FileManager.default.urls(for: .downloadsDirectory, in: .userDomainMask).first {
let url = downloadsDirectory.appendingPathComponent("recording \(Date()).\(fileEnding)")
vW = try? AVAssetWriter(outputURL: url, fileType: fileType)
let fpsM = Double(30)/8.0
let eM = 0.9
let tB = Double(conf.width * conf.height) * fpsM * eM
let videoSettings: [String: Any] = [
AVVideoCodecKey: AVVideoCodecType.hevc,
AVVideoWidthKey: conf.width,
AVVideoHeightKey: conf.height,
AVVideoCompressionPropertiesKey: [
AVVideoAverageBitRateKey: tB,
AVVideoExpectedSourceFrameRateKey: 30
]
]
vwInput = AVAssetWriterInput(mediaType: .video, outputSettings: videoSettings)
awInput = AVAssetWriterInput(mediaType: .audio, outputSettings: audioSettings)
vwInput.expectsMediaDataInRealTime = true;
awInput.expectsMediaDataInRealTime = true;
vW.add(vwInput)
vW.add(awInput)
vW.startWriting()
}
}
func closeVideo() {
let dispatchGroup = DispatchGroup()
dispatchGroup.enter()
vwInput.markAsFinished()
awInput.markAsFinished()
vW.finishWriting {
dispatchGroup.leave()
}
dispatchGroup.wait()
}
var isStarted = false
func stream(_ stream: SCStream, didOutputSampleBuffer sampleBuffer: CMSampleBuffer, of type: SCStreamOutputType) {
guard sampleBuffer.isValid else { return }
switch type {
case .screen:
guard let attachmentsArray = CMSampleBufferGetSampleAttachmentsArray(sampleBuffer, createIfNecessary: false) as? [[SCStreamFrameInfo: Any]],
let attachments = attachmentsArray.first else { return }
guard let statusRawValue = attachments[SCStreamFrameInfo.status] as? Int,
let status = SCFrameStatus(rawValue: statusRawValue),
status == .complete else { return }
if vW != nil && vW.status == .writing && !isStarted {
isStarted = true
vW.startSession(atSourceTime: CMSampleBufferGetPresentationTimeStamp(sampleBuffer))
}
if vwInput.isReadyForMoreMediaData {
vwInput.append(sampleBuffer)
}
case .audio:
if awInput.isReadyForMoreMediaData {
awInput.append(sampleBuffer)
}
@unknown default:
print("Unknown sample buffer type")
}
}
func stream(_ stream: SCStream, didStopWithError error: Error) {
print("Stream stopped with error: \(error)")
DispatchQueue.main.async {
self.stream = nil
self.stopRecording()
}
}
}
< /code>
Почему Swift Code отлично записывает экран в системе 30 FPS +, а вывод-полноценный MP4, в то время как код C# создает разбитый файл, который вы даже не можете запустить? Они почти полностью идентичны, я также заметил, что если вы измените кодек с HEVC на H264 в коде C# и установите conf.minimumframeinterval = new CMtime (1, 1); т.е. 1 кадры - выходной сигнал будет обычным файлом MP4 с записи экрана в 1 кадре, но звука не будет, и вы можете записать только 8 секунд, затем запись продолжается, но не попадает в файл.
Подробнее здесь: https://stackoverflow.com/questions/796 ... capturekit
Невозможно записать экран и системный аудио .NET9.0-MACOS15.0 ScreenCaptureKit ⇐ C#
-
- Похожие темы
- Ответы
- Просмотры
- Последнее сообщение
-
-
Невозможно записать экран и системный аудио .NET9.0-MACOS15.0 ScreenCaptureKit
Anonymous » » в форуме C# - 0 Ответы
- 2 Просмотры
-
Последнее сообщение Anonymous
-