我正在开发一个具有麦克风的按钮的iOS应用程序(以及其他功能)。当用户按下麦克风时,它会突出显示,应用程序现在应该开始从设备的麦克风录制声音,并发送到服务器(专用于应用程序的服务器,由我知道的人开发,因此我可以影响其设计)。
我正在寻找最简单但最坚固的方法来做到这一点,即我不需要开发复杂的流媒体解决方案或VoIP功能,除非像其他任何事情一样简单。
主要的问题是,我们不知道用户将录制多长时间,但是我们想要确保将声音连续发送到服务器,我们不希望等到用户完成录音。如果数据以块的形式到达服务器,那么我们不希望遗漏用户可能正在录制的任何信息,所以一个块必须继续前一个结束等等。
我们的第一个想法是创建例如10秒的声音片段的“块”,并将它们连续发送到服务器。有没有任何流媒体解决方案更好/更简单,我错过了?
解决方法
看看
this
在本教程中,记录的声音将保存在soundFileURL中,那么您只需要创建一个具有该内容的nsdata,然后将其发送到您的服务器。
希望这有帮助。
在本教程中,记录的声音将保存在soundFileURL中,那么您只需要创建一个具有该内容的nsdata,然后将其发送到您的服务器。
希望这有帮助。
编辑:
我刚刚创建了一个包含3个按钮,REC,SEND和Stop的版本:
REC:将开始录制到文件中。
SEND:将保存在NSData中记录在该文件上的内容,并将其发送到服务器,然后重新启动录像。
和STOP:将停止录制。
这里是代码:
在.h文件中:
#import <UIKit/UIKit.h> #import <AVFoundation/AVFoundation.h> @interface ViewController : UIViewController <AVAudioRecorderDelegate> @property (nonatomic,retain) AVAudioRecorder *audioRecorder; @property (nonatomic,retain) IBOutlet UIButton *recordButton; @property (nonatomic,retain) IBOutlet UIButton *stopButton; @property (nonatomic,retain) IBOutlet UIButton *sendButton; @property BOOL stoped; - (IBAction)startRec:(id)sender; - (IBAction)sendToServer:(id)sender; - (IBAction)stop:(id)sender; @end
并在.m文件中:
#import "ViewController.h" @implementation ViewController @synthesize audioRecorder; @synthesize recordButton,sendButton,stopButton; @synthesize stoped; - (void)didReceiveMemoryWarning { [super didReceiveMemoryWarning]; // Release any cached data,images,etc that aren't in use. } #pragma mark - View lifecycle - (void)viewDidLoad { [super viewDidLoad]; // Do any additional setup after loading the view,typically from a nib. sendButton.enabled = NO; stopButton.enabled = NO; stoped = YES; NSArray *dirPaths; Nsstring *docsDir; dirPaths = NSSearchPathForDirectoriesInDomains( NSDocumentDirectory,NSUserDomainMask,YES); docsDir = [dirPaths objectAtIndex:0]; Nsstring *soundFilePath = [docsDir stringByAppendingPathComponent:@"tempsound.caf"]; NSURL *soundFileURL = [NSURL fileURLWithPath:soundFilePath]; NSDictionary *recordSettings = [NSDictionary dictionaryWithObjectsAndKeys: [NSNumber numberWithInt:AVAudioQualityMin],AVEncoderAudioQualityKey,[NSNumber numberWithInt:16],AVEncoderBitRateKey,[NSNumber numberWithInt: 2],AVNumberOfChannelsKey,[NSNumber numberWithFloat:44100.0],AVSampleRateKey,nil]; NSError *error = nil; audioRecorder = [[AVAudioRecorder alloc] initWithURL:soundFileURL settings:recordSettings error:&error]; audioRecorder.delegate = self; if (error) { NSLog(@"error: %@",[error localizedDescription]); } else { [audioRecorder preparetoRecord]; } } - (void)viewDidUnload { [super viewDidUnload]; // Release any retained subviews of the main view. // e.g. self.myOutlet = nil; } - (void)viewWillAppear:(BOOL)animated { [super viewWillAppear:animated]; } - (void)viewDidAppear:(BOOL)animated { [super viewDidAppear:animated]; } - (void)viewWilldisappear:(BOOL)animated { [super viewWilldisappear:animated]; } - (void)viewDiddisappear:(BOOL)animated { [super viewDiddisappear:animated]; } - (BOOL)shouldAutorotatetoInterfaceOrientation:(UIInterfaceOrientation)interfaceOrientation { // Return YES for supported orientations return (interfaceOrientation != UIInterfaceOrientationPortraitUpsideDown); } - (BOOL) sendAudioToServer :(NSData *)data { NSData *d = [NSData dataWithData:data]; //Now you'll just have to send that NSData to your server return YES; } -(void)audioRecorderDidFinishRecording:(AVAudioRecorder *)recorder successfully:(BOOL)flag { NSLog(@"stoped"); if (!stoped) { NSData *data = [NSData dataWithContentsOfURL:recorder.url]; [self sendAudioToServer:data]; [recorder record]; NSLog(@"stoped sent and restarted"); } } - (IBAction)startRec:(id)sender { if (!audioRecorder.recording) { sendButton.enabled = YES; stopButton.enabled = YES; [audioRecorder record]; } } - (IBAction)sendToServer:(id)sender { stoped = NO; [audioRecorder stop]; } - (IBAction)stop:(id)sender { stopButton.enabled = NO; sendButton.enabled = NO; recordButton.enabled = YES; stoped = YES; if (audioRecorder.recording) { [audioRecorder stop]; } } @end
祝你好运。