ios - Capture movie with AVAssetWriter skipping frames in iphone 4 -
i'm trying capture movie using avassetwriter, in iphone 5 right, capture , save movie charm.
but when try capture movie in iphone 4, samplebuffer skip frames , movie not good.
so, code:
- (void) initcapturesession{ // opensession , set quality 1280x720 session = [[avcapturesession alloc] init]; if([session cansetsessionpreset:avcapturesessionpreset640x480]) session.sessionpreset = avcapturesessionpresethigh; // devices audio , video devicevideo = [avcapturedevice defaultdevicewithmediatype:avmediatypevideo]; deviceaudio = [avcapturedevice defaultdevicewithmediatype:avmediatypeaudio]; nserror *error = nil; // create input of audio , video inputvideo = [avcapturedeviceinput deviceinputwithdevice:devicevideo error:&error]; if (!inputvideo) nslog(@"error: trying open camera: %@", error); inputaudio = [avcapturedeviceinput deviceinputwithdevice:deviceaudio error:&error]; if (!inputaudio) nslog(@"error: trying open audio: %@", error); // cmtime maxduration = cmtimemake(60, 1); // create output audio , video outputvideo = [[avcapturevideodataoutput alloc] init]; outputvideo.alwaysdiscardslatevideoframes = no; outputvideo.videosettings = [nsdictionary dictionarywithobject: [nsnumber numberwithint:kcvpixelformattype_32bgra] forkey:(id)kcvpixelbufferpixelformattypekey]; outputaudio = [[avcaptureaudiodataoutput alloc] init]; // add inputs , outputs in current session [session beginconfiguration]; if ([session canaddinput:inputvideo])[session addinput:inputvideo]; if ([session canaddinput:inputaudio])[session addinput:inputaudio]; if ([session canaddoutput:outputvideo]) [session addoutput:outputvideo]; if ([session canaddoutput:outputaudio]) [session addoutput:outputaudio]; [session commitconfiguration]; // tourn of torch [devicevideo lockforconfiguration:&error]; if([devicevideo hastorch] && [devicevideo istorchmodesupported:avcapturetorchmodeoff]) [devicevideo settorchmode:avcapturetorchmodeoff]; [devicevideo unlockforconfiguration]; [self configdevice]; // create preview view show video capturevideopreviewlayer = [[avcapturevideopreviewlayer alloc] initwithsession:session]; [capturevideopreviewlayer setbackgroundcolor:[[uicolor blackcolor] cgcolor]]; [capturevideopreviewlayer setvideogravity:avlayervideogravityresizeaspectfill]; capturevideopreviewlayer.frame = viewpreview.bounds; [viewpreview.layer addsublayer:capturevideopreviewlayer]; calayer *viewlayer = viewpreview.layer; [viewlayer setmaskstobounds:yes]; [capturevideopreviewlayer setframe:[viewlayer bounds]]; [viewlayer addsublayer:capturevideopreviewlayer]; // dispatch outputs delegate in queue dispatch_queue_t queue = dispatch_queue_create("myqueue", null); [outputvideo setsamplebufferdelegate:self queue:queue]; [outputaudio setsamplebufferdelegate:self queue:queue]; // dispatch_release(queue); [session startrunning]; } -(bool) setupwriter{ urloutput = [self tempfileurl]; nserror *error = nil; videowriter = [[avassetwriter alloc] initwithurl:urloutput filetype:avfiletypempeg4 error:&error]; nsparameterassert(videowriter); // add metadata nsarray *existingmetadataarray = videowriter.metadata; nsmutablearray *newmetadataarray = nil; if (existingmetadataarray) { newmetadataarray = [existingmetadataarray mutablecopy]; } else { newmetadataarray = [[nsmutablearray alloc] init]; } avmutablemetadataitem *mutableitemlocation = [[avmutablemetadataitem alloc] init]; mutableitemlocation.keyspace = avmetadatakeyspacecommon; mutableitemlocation.key = avmetadatacommonkeylocation; mutableitemlocation.value = [nsstring stringwithformat:@"%+08.4lf%+09.4lf/", location.latitude, location.longitude]; avmutablemetadataitem *mutableitemmodel = [[avmutablemetadataitem alloc] init]; mutableitemmodel.keyspace = avmetadatakeyspacecommon; mutableitemmodel.key = avmetadatacommonkeymodel; mutableitemmodel.value = [[uidevice currentdevice] model]; [newmetadataarray addobject:mutableitemlocation]; [newmetadataarray addobject:mutableitemmodel]; videowriter.metadata = newmetadataarray; // video configuration nsdictionary *videocleanaperturesettings = [nsdictionary dictionarywithobjectsandkeys: [nsnumber numberwithint:640], avvideocleanaperturewidthkey, [nsnumber numberwithint:360], avvideocleanapertureheightkey, [nsnumber numberwithint:2], avvideocleanaperturehorizontaloffsetkey, [nsnumber numberwithint:2], avvideocleanapertureverticaloffsetkey, nil]; nsdictionary *videoaspectratiosettings = [nsdictionary dictionarywithobjectsandkeys: [nsnumber numberwithint:1], avvideopixelaspectratiohorizontalspacingkey, [nsnumber numberwithint:1],avvideopixelaspectratioverticalspacingkey, nil]; nsdictionary *codecsettings = [nsdictionary dictionarywithobjectsandkeys: [nsnumber numberwithint:1024000], avvideoaveragebitratekey, [nsnumber numberwithint:90],avvideomaxkeyframeintervalkey, videocleanaperturesettings, avvideocleanaperturekey, videoaspectratiosettings, avvideopixelaspectratiokey, avvideoprofilelevelh264main30, avvideoprofilelevelkey, nil]; nsdictionary *videosettings = [nsdictionary dictionarywithobjectsandkeys: avvideocodech264, avvideocodeckey, codecsettings,avvideocompressionpropertieskey, [nsnumber numberwithint:640], avvideowidthkey, [nsnumber numberwithint:360], avvideoheightkey, nil]; videowriterinput = [avassetwriterinput assetwriterinputwithmediatype:avmediatypevideo outputsettings:videosettings]; nsparameterassert(videowriterinput); videowriterinput.expectsmediadatainrealtime = yes; // add audio input audiochannellayout acl; bzero( &acl, sizeof(acl)); acl.mchannellayouttag = kaudiochannellayouttag_stereo; nsdictionary* audiooutputsettings = nil; // both type of audio inputs causes output video file corrupted. // if( no ) { // should work iphone 3gs on , ipod 3rd generation audiooutputsettings = [nsdictionary dictionarywithobjectsandkeys: [ nsnumber numberwithint: kaudioformatmpeg4aac ], avformatidkey, [ nsnumber numberwithint: 2 ], avnumberofchannelskey, [ nsnumber numberwithfloat: 44100.0 ], avsampleratekey, [ nsnumber numberwithint: 64000 ], avencoderbitratekey, [ nsdata datawithbytes: &acl length: sizeof( acl ) ], avchannellayoutkey, nil]; // } else { // // should work on device requires more space // audiooutputsettings = [ nsdictionary dictionarywithobjectsandkeys: // [ nsnumber numberwithint: kaudioformatapplelossless ], avformatidkey, // [ nsnumber numberwithint: 16 ], avencoderbitdepthhintkey, // [ nsnumber numberwithfloat: 44100.0 ], avsampleratekey, // [ nsnumber numberwithint: 1 ], avnumberofchannelskey, // [ nsdata datawithbytes: &acl length: sizeof( acl ) ], avchannellayoutkey, // nil ]; // } audiowriterinput = [avassetwriterinput assetwriterinputwithmediatype: avmediatypeaudio outputsettings: audiooutputsettings]; audiowriterinput.expectsmediadatainrealtime = yes; // add input [videowriter addinput:videowriterinput]; [videowriter addinput:audiowriterinput]; return yes; } - (void)captureoutput:(avcaptureoutput *)captureoutput didoutputsamplebuffer:(cmsamplebufferref)samplebuffer fromconnection:(avcaptureconnection *)connection{ if( !cmsamplebufferdataisready(samplebuffer) ){ nslog( @"sample buffer not ready. skipping sample" ); return; } if(isrecording == yes ){ lastsampletime = cmsamplebuffergetpresentationtimestamp(samplebuffer); if(videowriter.status != avassetwriterstatuswriting ){ [videowriter startwriting]; [videowriter startsessionatsourcetime:lastsampletime]; } if( captureoutput == outputvideo ){ [self newvideosample:samplebuffer]; } else if( captureoutput == outputaudio) { [self newaudiosample:samplebuffer]; } } } -(void) newvideosample:(cmsamplebufferref)samplebuffer{ if( isrecording ){ if( videowriter.status > avassetwriterstatuswriting ) { nslog(@"warning: writer status %d", videowriter.status); if( videowriter.status == avassetwriterstatusfailed ) nslog(@"error: %@", videowriter.error); return; } while (!videowriterinput.readyformoremediadata) { nsdate *maxdate = [nsdate datewithtimeintervalsincenow:0.1]; [[nsrunloop currentrunloop] rununtildate:maxdate]; } if( ![videowriterinput appendsamplebuffer:samplebuffer] ) nslog(@"unable write video input"); } } -(void) newaudiosample:(cmsamplebufferref)samplebuffer{ if( isrecording ){ if( videowriter.status > avassetwriterstatuswriting ) { nslog(@"warning: writer status %d", videowriter.status); if( videowriter.status == avassetwriterstatusfailed ) nslog(@"error: %@", videowriter.error); return; } while (!audiowriterinput.readyformoremediadata) { nsdate *maxdate = [nsdate datewithtimeintervalsincenow:0.1]; [[nsrunloop currentrunloop] rununtildate:maxdate]; } if( ![audiowriterinput appendsamplebuffer:samplebuffer] ) nslog(@"unable write audio input"); } } -(void) startvideorecording { if( !isrecording ){ nslog(@"start video recording..."); if( ![self setupwriter] ) { nslog(@"setup writer failed") ; return; } isrecording = yes; recorded = no; } } -(void) stopvideorecording { if( isrecording ) { isrecording = no; btrecord.hidden = no; btrecording.hidden = yes; [timertorecord invalidate]; timertorecord = nil; // [session stoprunning]; [videowriter finishwritingwithcompletionhandler:^{ if (videowriter.status != avassetwriterstatusfailed && videowriter.status == avassetwriterstatuscompleted) { videowriterinput = nil; audiowriterinput = nil; videowriter = nil; nslog(@"finishwriting returned succeful"); recorded = yes; } else { nslog(@"finishwriting returned unsucceful") ; } }]; nslog(@"video recording stopped"); [self performselector:@selector(openplayer) withobject:nil afterdelay:0.5]; } }
when remove lines:
while (!audiowriterinput.readyformoremediadata) { nsdate *maxdate = [nsdate datewithtimeintervalsincenow:0.1]; [[nsrunloop currentrunloop] rununtildate:maxdate]; }
i got error:
* terminating app due uncaught exception 'nsinternalinconsistencyexception', reason: '* -[avassetwriterinput appendsamplebuffer:] sample buffer cannot appended when readyformoremediadata no.'
in iphone 5 i'm not using looping.
i read examples here, didn't understand how can make movie smoother in iphone 4.
if have 1 sugestion or full example make movies using avassetwriter iphone 3gs, iphone 4, iphone 4s , iphone 5, lot.
thanks
after 1 week fight avfoundation got solution.
after watch wwdc2012 - session 520 made solution.
first record movie using avcapturemoviefileoutput session presset avcapturesessionpreset640x480
so after record user choose if want save , share, save or delete movie.
if user want save/save , share movie recorded , compact separately.
first compress movie, after compress audio , marge tracks.
see code:
-(void)exportmediawithurl:(nsurl *)url location:(cllocationcoordinate2d)location mirror:(bool)mirror{ urlmedia = url; locationmedia = location; videorecorded = no; audiorecorded = no; asset = [avasset assetwithurl:urlmedia]; progressvideo = 0.0; progressaudio = 0.0; progressmarge = 0.0; progressfactor = 3.0; mirrored = mirror; limittime = cmtimemake(1000*60, 1000); [asset loadvaluesasynchronouslyforkeys:[nsarray arraywithobject:@"tracks"] completionhandler:^() { nserror *error; avkeyvaluestatus stats = [asset statusofvalueforkey:@"tracks" error:&error]; if(stats == avkeyvaluestatusloaded){ if([[asset trackswithmediatype:avmediatypevideo] count] > 0) video_track = [[asset trackswithmediatype:avmediatypevideo] objectatindex:0]; if([[asset trackswithmediatype:avmediatypeaudio] count] > 0) audio_track = [[asset trackswithmediatype:avmediatypeaudio] objectatindex:0]; if(!audio_track) progressfactor = 1.0; if(video_track){ if (cmtimecompare(asset.duration, limittime) > 0) { totaltime = limittime; }else{ totaltime = asset.duration; } [self exportvideo]; } } }]; } -(void)exportvideo{ nserror *error; avassetreader *assetreader = [avassetreader assetreaderwithasset:asset error:&error]; nsdictionary* videosettings = [nsdictionary dictionarywithobject:[nsnumber numberwithunsignedint:kcvpixelformattype_32bgra] forkey:(nsstring*)kcvpixelbufferpixelformattypekey]; avassetreaderoutput *videooutput = [avassetreadertrackoutput assetreadertrackoutputwithtrack:video_track outputsettings:videosettings]; [assetreader addoutput:videooutput]; assetreader.timerange = cmtimerangemake(kcmtimezero, totaltime); // start session make movie if (assetvideowriter.status == avassetwriterstatusunknown) { if ([self setupwritervideo]) { if ([assetvideowriter startwriting]) { [assetvideowriter startsessionatsourcetime:kcmtimezero]; } } } if([assetreader startreading]){ bool videodone = no; cmsamplebufferref buffervideo; while (!videodone) { if ([assetreader status]== avassetreaderstatusreading ) buffervideo = [videooutput copynextsamplebuffer]; if(buffervideo){ [self newvideosample:buffervideo]; cfrelease(buffervideo); }else{ videodone = yes; } } // finish [videowriterinput markasfinished]; [assetvideowriter finishwritingwithcompletionhandler:^{}]; // gambiarra resolve dealloc problem when use block delegate while (!videorecorded) { if (assetvideowriter.status == avassetwriterstatuscompleted) { videowriterinput = nil; assetvideowriter = nil; videorecorded = yes; if (audio_track) { [self exportaudio]; }else{ nsmutabledictionary *infotosend = [nsmutabledictionary new]; [infotosend setvalue:urloutputvideo forkey:@"url_media"]; [[nsnotificationcenter defaultcenter] postnotificationname:export_status_done object:self userinfo:infotosend]; } } } } } -(void)exportaudio{ nserror *error; avassetreader *assetreader = [avassetreader assetreaderwithasset:asset error:&error]; nsdictionary* audiosettings = [nsdictionary dictionarywithobjectsandkeys:[nsnumber numberwithint:kaudioformatlinearpcm], avformatidkey, nil]; avassetreaderoutput *audiooutput = [avassetreadertrackoutput assetreadertrackoutputwithtrack:audio_track outputsettings:audiosettings]; [assetreader addoutput:audiooutput]; assetreader.timerange = cmtimerangemake(kcmtimezero, totaltime); // start session make movie if (assetaudiowriter.status == avassetwriterstatusunknown) { if ([self setupwriteraudio]) { if ([assetaudiowriter startwriting]) { [assetaudiowriter startsessionatsourcetime:kcmtimezero]; } } } if([assetreader startreading]){ bool audiodone = no; cmsamplebufferref bufferaudio; while (!audiodone) { if ([assetreader status]== avassetreaderstatusreading ) bufferaudio = [audiooutput copynextsamplebuffer]; if(bufferaudio){ [self newaudiosample:bufferaudio]; cfrelease(bufferaudio); }else{ audiodone = yes; } } // finish [audiowriterinput markasfinished]; [assetaudiowriter finishwritingwithcompletionhandler:^{}]; // gambiarra resolve dealloc problem when use block delegate while (!audiorecorded) { if (assetaudiowriter.status == avassetwriterstatuscompleted) { audiowriterinput = nil; assetaudiowriter = nil; audiorecorded = yes; [self margefile]; } } } } -(void)margefile{ avurlasset *assetvideo = [avurlasset assetwithurl:urloutputvideo]; avassettrack *video_track_marge = [[assetvideo trackswithmediatype:avmediatypevideo] objectatindex:0]; avurlasset *assetaudio = [avurlasset assetwithurl:urloutputaudio]; avassettrack *audio_track_marge = [[assetaudio trackswithmediatype:avmediatypeaudio] objectatindex:0]; cmtime starttime = cmtimemake(1, 1); cmtimerange timerangevideo = cmtimerangemake(kcmtimezero, assetvideo.duration); cmtimerange timerangeaudio = cmtimerangemake(kcmtimezero, assetaudio.duration); avmutablecomposition * composition = [avmutablecomposition composition]; avmutablecompositiontrack *compositionvideotrack = [composition addmutabletrackwithmediatype:avmediatypevideo preferredtrackid:kcmpersistenttrackid_invalid]; if(mirrored) compositionvideotrack.preferredtransform = cgaffinetransformmakerotation(m_pi); avmutablecompositiontrack *compositionaudiotrack = [composition addmutabletrackwithmediatype:avmediatypeaudio preferredtrackid:kcmpersistenttrackid_invalid]; nserror *error; [compositionvideotrack inserttimerange:timerangevideo oftrack:video_track_marge attime:starttime error:&error]; [compositionaudiotrack inserttimerange:timerangeaudio oftrack:audio_track_marge attime:starttime error:&error]; avassetexportsession *exportsession = [[avassetexportsession alloc] initwithasset:composition presetname:avassetexportpresetpassthrough]; exportsession.outputfiletype = avfiletypeapplem4v; exportsession.outputurl = [self tempfileurl:media_mixed]; exportsession.shouldoptimizefornetworkuse = yes; exportsession.metadata = newmetadataarray; exportsession.timerange = cmtimerangemake(cmtimemakewithseconds(1.0, 600), totaltime); [exportsession exportasynchronouslywithcompletionhandler:^{ nsmutabledictionary *infotosend = [nsmutabledictionary new]; switch (exportsession.status) { case avassetexportsessionstatuscompleted: [infotosend setvalue:exportsession.outputurl forkey:@"url_media"]; [[nsnotificationcenter defaultcenter] postnotificationname:export_status_done object:self userinfo:infotosend]; break; case avassetexportsessionstatusexporting: [[nsnotificationcenter defaultcenter] postnotificationname:export_status_exporting object:self]; break; case avassetexportsessionstatusfailed: nslog(@"failed"); break; } }]; while (exportsession.status == avassetexportsessionstatusexporting) { progressmarge = exportsession.progress; [self postprogress]; } } -(bool) setupwritervideo{ urloutputvideo = [self tempfileurl:media_video]; nserror *error = nil; assetvideowriter = [[avassetwriter alloc] initwithurl:urloutputvideo filetype:avfiletypempeg4 error:&error]; nsparameterassert(assetvideowriter); // add metadata nsarray *existingmetadataarray = assetvideowriter.metadata; if (existingmetadataarray) { newmetadataarray = [existingmetadataarray mutablecopy]; } else { newmetadataarray = [[nsmutablearray alloc] init]; } avmutablemetadataitem *mutableitemlocation = [[avmutablemetadataitem alloc] init]; mutableitemlocation.keyspace = avmetadatakeyspacecommon; mutableitemlocation.key = avmetadatacommonkeylocation; mutableitemlocation.value = [nsstring stringwithformat:@"%+08.4lf%+09.4lf/", locationmedia.latitude, locationmedia.longitude]; avmutablemetadataitem *mutableitemmodel = [[avmutablemetadataitem alloc] init]; mutableitemmodel.keyspace = avmetadatakeyspacecommon; mutableitemmodel.key = avmetadatacommonkeymodel; mutableitemmodel.value = [[uidevice currentdevice] model]; [newmetadataarray addobject:mutableitemlocation]; [newmetadataarray addobject:mutableitemmodel]; assetvideowriter.metadata = newmetadataarray; assetvideowriter.shouldoptimizefornetworkuse = yes; videowriterinput = [avassetwriterinput assetwriterinputwithmediatype:avmediatypevideo outputsettings:[self videoconfiguration]]; nsparameterassert(videowriterinput); videowriterinput.expectsmediadatainrealtime = no; // add input [assetvideowriter addinput:videowriterinput]; return yes; } -(bool) setupwriteraudio{ urloutputaudio = [self tempfileurl:media_audio]; nserror *error = nil; assetaudiowriter = [[avassetwriter alloc] initwithurl:urloutputaudio filetype:avfiletypeapplem4a error:&error]; nsparameterassert(assetaudiowriter); audiowriterinput = [avassetwriterinput assetwriterinputwithmediatype:avmediatypeaudio outputsettings:[self audioconfiguration]]; audiowriterinput.expectsmediadatainrealtime = no; // add input [assetaudiowriter addinput:audiowriterinput]; return yes; } - (nsdictionary *)videoconfiguration{ // video configuration // float bitsperpixel; // int numpixels = 640.0 * 360.0; // int bitspersecond; // // // assume lower-than-sd resolutions intended streaming, , use lower bitrate // if ( numpixels < (640 * 360.0) ) // bitsperpixel = 4.05; // bitrate matches quality produced avcapturesessionpresetmedium or low. // else // bitsperpixel = 11.4; // bitrate matches quality produced avcapturesessionpresethigh. // // bitspersecond = numpixels * bitsperpixel; nsdictionary *videocleanaperturesettings = [nsdictionary dictionarywithobjectsandkeys: [nsnumber numberwithint:640], avvideocleanaperturewidthkey, [nsnumber numberwithint:360], avvideocleanapertureheightkey, [nsnumber numberwithint:2], avvideocleanaperturehorizontaloffsetkey, [nsnumber numberwithint:2], avvideocleanapertureverticaloffsetkey, nil]; nsdictionary *videoaspectratiosettings = [nsdictionary dictionarywithobjectsandkeys: [nsnumber numberwithint:1],avvideopixelaspectratiohorizontalspacingkey, [nsnumber numberwithint:1],avvideopixelaspectratioverticalspacingkey, nil]; nsdictionary *codecsettings = [nsdictionary dictionarywithobjectsandkeys: [nsnumber numberwithint:1024000], avvideoaveragebitratekey, [nsnumber numberwithint:90],avvideomaxkeyframeintervalkey, videocleanaperturesettings, avvideocleanaperturekey, videoaspectratiosettings, avvideopixelaspectratiokey, avvideoprofilelevelh264main30, avvideoprofilelevelkey, nil]; nsdictionary *videosettings = [nsdictionary dictionarywithobjectsandkeys: avvideocodech264, avvideocodeckey, avvideoscalingmoderesizeaspectfill, avvideoscalingmodekey, codecsettings,avvideocompressionpropertieskey, [nsnumber numberwithint:640], avvideowidthkey, [nsnumber numberwithint:360], avvideoheightkey, nil]; return videosettings; } -(nsdictionary *)audioconfiguration{ // add audio input audiochannellayout acl; bzero( &acl, sizeof(acl)); acl.mchannellayouttag = kaudiochannellayouttag_stereo; nsdictionary* audiooutputsettings = nil; // both type of audio inputs causes output video file corrupted. // if( no ) { // should work iphone 3gs on , ipod 3rd generation audiooutputsettings = [nsdictionary dictionarywithobjectsandkeys: [ nsnumber numberwithint: kaudioformatmpeg4aac ], avformatidkey, [ nsnumber numberwithint: 2 ], avnumberofchannelskey, [ nsnumber numberwithfloat: 44100.0 ], avsampleratekey, [ nsnumber numberwithint: 128000 ], avencoderbitratekey, [ nsdata datawithbytes: &acl length: sizeof( acl ) ], avchannellayoutkey, nil]; // } else { // // should work on device requires more space // audiooutputsettings = [ nsdictionary dictionarywithobjectsandkeys: // [ nsnumber numberwithint: kaudioformatapplelossless ], avformatidkey, // [ nsnumber numberwithint: 16 ], avencoderbitdepthhintkey, // [ nsnumber numberwithfloat: 44100.0 ], avsampleratekey, // [ nsnumber numberwithint: 2 ], avnumberofchannelskey, // [ nsdata datawithbytes: &acl length: sizeof( acl ) ], avchannellayoutkey, // nil ]; // } return audiooutputsettings; } -(void) newvideosample:(cmsamplebufferref)samplebuffer{ if( assetvideowriter.status > avassetwriterstatuswriting ) { if( assetvideowriter.status == avassetwriterstatusfailed ) nslog(@"error: %@", assetvideowriter.error); return; } if (assetvideowriter.status == avassetwriterstatuswriting ) { while (!videowriterinput.readyformoremediadata) nslog(@"waitting video"); if (videowriterinput.readyformoremediadata) { cmtime prestime = cmsamplebuffergetpresentationtimestamp( samplebuffer ); float valueloading = (prestime.value / prestime.timescale); float valuetotal = (totaltime.value / totaltime.timescale); progressvideo = valueloading / valuetotal; [self postprogress]; if (![videowriterinput appendsamplebuffer:samplebuffer]) nslog(@"unable write video input"); } } } -(void) newaudiosample:(cmsamplebufferref)samplebuffer{ if( assetaudiowriter.status > avassetwriterstatuswriting ) { if( assetaudiowriter.status == avassetwriterstatusfailed ) nslog(@"error: %@", assetaudiowriter.error); return; } if (assetaudiowriter.status == avassetwriterstatuswriting ) { while (!audiowriterinput.readyformoremediadata) nslog(@"waitting audio"); if (audiowriterinput.readyformoremediadata) { cmtime prestime = cmsamplebuffergetpresentationtimestamp( samplebuffer ); float valueloading = (prestime.value / prestime.timescale); float valuetotal = (totaltime.value / totaltime.timescale); progressaudio = valueloading / valuetotal; [self postprogress]; if (![audiowriterinput appendsamplebuffer:samplebuffer]) { nslog(@"unable write audio input"); } } } } - (void)postprogress{ float totalprogress = (progressvideo + progressaudio + progressmarge) / progressfactor; nsmutabledictionary *infotosend = [nsmutabledictionary new]; [infotosend setvalue:[nsnumber numberwithfloat:totalprogress] forkey:@"progress"]; [[nsnotificationcenter defaultcenter] postnotificationname:export_status_exporting object:self userinfo:infotosend]; } - (nsurl *)tempfileurl:(int)typemedia { nsstring *outputpath; switch (typemedia) { case media_video: outputpath = [[nsstring alloc] initwithformat:@"%@%@", nstemporarydirectory(), @"output_export.mp4"]; break; case media_audio: outputpath = [[nsstring alloc] initwithformat:@"%@%@", nstemporarydirectory(), @"output_export.m4a"]; break; case media_mixed: outputpath = [[nsstring alloc] initwithformat:@"%@%@", nstemporarydirectory(), @"mixed.mp4"]; break; } nsurl *outputurl = [[nsurl alloc] initfileurlwithpath:outputpath]; nsfilemanager *filemanager = [nsfilemanager defaultmanager]; if ([filemanager fileexistsatpath:outputpath]) [[nsfilemanager defaultmanager] removeitematpath:outputpath error:nil]; return outputurl; } - (void) dealloc { nslog(@"dealloc video exporter"); [[nsnotificationcenter defaultcenter] removeobserver:self]; assetvideowriter = nil; assetaudiowriter = nil; videowriterinput = nil; audiowriterinput = nil; urlmedia = nil; urloutputvideo = nil; urloutputaudio = nil; urloutputfinal = nil; } @end
if have add, please post here!
Comments
Post a Comment