下面是编程之家 jb51.cc 通过网络收集整理的代码片段。
编程之家小编现在分享给大家,也给大家做个参考。
github代码
1.打开相机
- (void)openCamera:(AVCaptureDevicePosition)cameraPostion {
BOOL hasCamera = ([[AVCaptureDevice devices] count] > );
if (hasCamera) {
AVCaptureSession *session = [[AVCaptureSession alloc] init];
session.sessionPreset = AVCaptureSessionPresetHigh;
AVCaptureVideoPreviewLayer *captureVideoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:session];
[captureVideoPreviewLayer setVideoGravity:AVLayerVideoGravityResizeAspectFill];
[captureVideoPreviewLayer setFrame:self.cameraimageView.bounds];
[self.cameraimageView.layer addSublayer:captureVideoPreviewLayer];
AVCaptureDevice *device = [self getCamera:cameraPostion];
NSError *error = nil;
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error];
[session addInput:input];
stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
NSDictionary *outputSettings = @{ AVVideoCodecKey : AVVideoCodecJPEG};
[stillImageOutput setoutputSettings:outputSettings];
[session addOutput:stillImageOutput];
movieOutput = [[AVCaptureMovieFileOutput alloc] init];
[session addOutput:movieOutput];
[session startRunning];
}
BOOL hasCamera = ([[AVCaptureDevice devices] count] > );
if (hasCamera) {
AVCaptureSession *session = [[AVCaptureSession alloc] init];
session.sessionPreset = AVCaptureSessionPresetHigh;
AVCaptureVideoPreviewLayer *captureVideoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:session];
[captureVideoPreviewLayer setVideoGravity:AVLayerVideoGravityResizeAspectFill];
[captureVideoPreviewLayer setFrame:self.cameraimageView.bounds];
[self.cameraimageView.layer addSublayer:captureVideoPreviewLayer];
AVCaptureDevice *device = [self getCamera:cameraPostion];
NSError *error = nil;
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error];
[session addInput:input];
stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
NSDictionary *outputSettings = @{ AVVideoCodecKey : AVVideoCodecJPEG};
[stillImageOutput setoutputSettings:outputSettings];
[session addOutput:stillImageOutput];
movieOutput = [[AVCaptureMovieFileOutput alloc] init];
[session addOutput:movieOutput];
[session startRunning];
}
}
- (AVCaptureDevice *)getCamera:(AVCaptureDevicePosition)cameraPostion {
NSArray *cameras = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
for (AVCaptureDevice *device in cameras) {
if (device.position == cameraPostion)
return device;
}
return [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
NSArray *cameras = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
for (AVCaptureDevice *device in cameras) {
if (device.position == cameraPostion)
return device;
}
return [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
}
2.获取静态图片
- (void)captureNow {
AVCaptureConnection *videoConnection = nil;
for (AVCaptureConnection *connection in stillImageOutput.connections) {
for (AVCaptureInputPort *port in [connection inputPorts]) {
if ([[port mediaType] isEqual:AVMediaTypeVideo] ) {
videoConnection = connection;
break;
}
}
if (videoConnection) { break; }
}
// 取静态图片
[stillImageOutput captureStillImageAsynchronouslyFromConnection:videoConnection
completionHandler: ^(CMSampleBufferRef imageSampleBuffer,NSError *error) {
NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageSampleBuffer];
NSData *compressedData = [imageData gzippedDataWithCompressionLevel:1.0];
NSData *outputData = [compressedData gunzippedData];
UIImage *imageT = [[UIImage alloc] initWithData:outputData];
_testimageView.image = imageT;
_testimageView.hidden = NO;
NSFileManager *fileManager = [NSFileManager defaultManager];
Nsstring *filePath = [[Nsstring alloc] initWithFormat:@"%@%@",NstemporaryDirectory(),@"main.png"];
[fileManager createFileAtPath:filePath contents:imageData attributes:nil];
Nsstring *zipFile = [[Nsstring alloc] initWithFormat:@"%@%@",@"main.zip"];
ZipArchive *za = [[ZipArchive alloc] init];
[za CreateZipFile2:zipFile];
[za addFiletoZip:filePath newname:@"main.png"];
}];
AVCaptureConnection *videoConnection = nil;
for (AVCaptureConnection *connection in stillImageOutput.connections) {
for (AVCaptureInputPort *port in [connection inputPorts]) {
if ([[port mediaType] isEqual:AVMediaTypeVideo] ) {
videoConnection = connection;
break;
}
}
if (videoConnection) { break; }
}
// 取静态图片
[stillImageOutput captureStillImageAsynchronouslyFromConnection:videoConnection
completionHandler: ^(CMSampleBufferRef imageSampleBuffer,NSError *error) {
NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageSampleBuffer];
NSData *compressedData = [imageData gzippedDataWithCompressionLevel:1.0];
NSData *outputData = [compressedData gunzippedData];
UIImage *imageT = [[UIImage alloc] initWithData:outputData];
_testimageView.image = imageT;
_testimageView.hidden = NO;
NSFileManager *fileManager = [NSFileManager defaultManager];
Nsstring *filePath = [[Nsstring alloc] initWithFormat:@"%@%@",NstemporaryDirectory(),@"main.png"];
[fileManager createFileAtPath:filePath contents:imageData attributes:nil];
Nsstring *zipFile = [[Nsstring alloc] initWithFormat:@"%@%@",@"main.zip"];
ZipArchive *za = [[ZipArchive alloc] init];
[za CreateZipFile2:zipFile];
[za addFiletoZip:filePath newname:@"main.png"];
}];
}
3.录像和播放录像
- (IBAction)beginRecord:(id)sender {
[movieOutput startRecordingToOutputFileURL:[self fileURLWithName:@"main.mp4"] recordingDelegate:self];
[movieOutput startRecordingToOutputFileURL:[self fileURLWithName:@"main.mp4"] recordingDelegate:self];
}
- (void)stopRecord {
[movieOutput stopRecording];
[movieOutput stopRecording];
}
- (void)captureOutput:(AVCaptureFileOutput *)captureOutput
didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL
fromConnections:(NSArray *)connections
error:(NSError *)error {
BOOL recordedSuccessfully = YES;
if (error == nil) {
[self playVideo:outputFileURL];
}
if ([error code] != noErr) {
id value = [[error userInfo] objectForKey:AVErrorRecordingSuccessfullyFinishedKey];
if (value) {
recordedSuccessfully = [value boolValue];
}
}
didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL
fromConnections:(NSArray *)connections
error:(NSError *)error {
BOOL recordedSuccessfully = YES;
if (error == nil) {
[self playVideo:outputFileURL];
}
if ([error code] != noErr) {
id value = [[error userInfo] objectForKey:AVErrorRecordingSuccessfullyFinishedKey];
if (value) {
recordedSuccessfully = [value boolValue];
}
}
}
- (void)playVideo:(NSURL *)url {
AVPlayer *player = [AVPlayer playerWithURL:url];
playerLayer = [AVPlayerLayer playerLayerWithPlayer:player];
playerLayer.frame = CGRectMake(,160,284);
playerLayer.position = self.view.center;
[self.view.layer addSublayer:playerLayer];
playerLayer.masksToBounds = YES;
playerLayer.cornerRadius = 20;
playerLayer.borderWidth = 1;
playerLayer.borderColor = [UIColor grayColor].CGColor;
[player play];
[[NSNotificationCenter defaultCenter]
addobserver:self
selector:@selector(removePlayer)
name:AVPlayerItemDidplayToEndTimeNotification
object:nil];
AVPlayer *player = [AVPlayer playerWithURL:url];
playerLayer = [AVPlayerLayer playerLayerWithPlayer:player];
playerLayer.frame = CGRectMake(,160,284);
playerLayer.position = self.view.center;
[self.view.layer addSublayer:playerLayer];
playerLayer.masksToBounds = YES;
playerLayer.cornerRadius = 20;
playerLayer.borderWidth = 1;
playerLayer.borderColor = [UIColor grayColor].CGColor;
[player play];
[[NSNotificationCenter defaultCenter]
addobserver:self
selector:@selector(removePlayer)
name:AVPlayerItemDidplayToEndTimeNotification
object:nil];
}
- (void)removePlayer {
[playerLayer removeFromSuperlayer];
[playerLayer removeFromSuperlayer];
}
等有空了,把服务器也写一下,或者谁有兴趣可以把它的服务器实现下.
以上是编程之家(jb51.cc)为你收集整理的全部代码内容,希望文章能够帮你解决所遇到的程序开发问题。
如果觉得编程之家网站内容还不错,欢迎将编程之家网站推荐给程序员好友。