objective c - How to create a Song Visualization in WaveFrom in MPMusicPlayer Controller -


hai folks last couple of days iam working on code create song visualization in wavefrom,,,.this code got drawing waveform avassetreader can 1 me method call when song geting played want show visualization. code have

first, generic rendering method takes pointer averaged sample data, , returns uiimage. note these samples not playable audio samples.

-(uiimage *) audioimagegraph:(sint16 *) samples             normalizemax:(sint16) normalizemax              samplecount:(nsinteger) samplecount              channelcount:(nsinteger) channelcount              imageheight:(float) imageheight {  cgsize imagesize = cgsizemake(samplecount, imageheight); uigraphicsbeginimagecontext(imagesize); cgcontextref context = uigraphicsgetcurrentcontext();  cgcontextsetfillcolorwithcolor(context, [uicolor blackcolor].cgcolor); cgcontextsetalpha(context,1.0); cgrect rect; rect.size = imagesize; rect.origin.x = 0; rect.origin.y = 0;  cgcolorref leftcolor = [[uicolor whitecolor] cgcolor]; cgcolorref rightcolor = [[uicolor redcolor] cgcolor];  cgcontextfillrect(context, rect);  cgcontextsetlinewidth(context, 1.0);  float halfgraphheight = (imageheight / 2) / (float) channelcount ; float centerleft = halfgraphheight; float centerright = (halfgraphheight*3) ;  float sampleadjustmentfactor = (imageheight/ (float) channelcount) / (float) normalizemax;  (nsinteger intsample = 0 ; intsample < samplecount ; intsample ++ ) {     sint16 left = *samples++;     float pixels = (float) left;     pixels *= sampleadjustmentfactor;     cgcontextmovetopoint(context, intsample, centerleft-pixels);     cgcontextaddlinetopoint(context, intsample, centerleft+pixels);     cgcontextsetstrokecolorwithcolor(context, leftcolor);     cgcontextstrokepath(context);      if (channelcount==2) {         sint16 right = *samples++;         float pixels = (float) right;         pixels *= sampleadjustmentfactor;         cgcontextmovetopoint(context, intsample, centerright - pixels);         cgcontextaddlinetopoint(context, intsample, centerright + pixels);         cgcontextsetstrokecolorwithcolor(context, rightcolor);         cgcontextstrokepath(context);      } }  // create new image uiimage *newimage = uigraphicsgetimagefromcurrentimagecontext();  // tidy uigraphicsendimagecontext();     return newimage; 

}

next, method takes avurlasset, , returns png image data

- (nsdata *) renderpngaudiopictogramforassett:(avurlasset *)songasset  {  nserror * error = nil;   avassetreader * reader = [[avassetreader alloc] initwithasset:songasset error:&error];  avassettrack * songtrack = [songasset.tracks objectatindex:0];  nsdictionary* outputsettingsdict = [[nsdictionary alloc] initwithobjectsandkeys:                                      [nsnumber     numberwithint:kaudioformatlinearpcm],avformatidkey,                                     //     [nsnumber     numberwithint:44100.0],avsampleratekey, /*not supported*/                                     //     [nsnumber numberwithint: 2],avnumberofchannelskey,    /*not supported*/                                      [nsnumber numberwithint:16],avlinearpcmbitdepthkey,                                     [nsnumber numberwithbool:no],avlinearpcmisbigendiankey,                                     [nsnumber numberwithbool:no],avlinearpcmisfloatkey,                                     [nsnumber numberwithbool:no],avlinearpcmisnoninterleaved,                                      nil];   avassetreadertrackoutput* output = [[avassetreadertrackoutput alloc] initwithtrack:songtrack outputsettings:outputsettingsdict];  [reader addoutput:output]; [output release];  uint32 samplerate,channelcount;  nsarray* formatdesc = songtrack.formatdescriptions; for(unsigned int = 0; < [formatdesc count]; ++i) {     cmaudioformatdescriptionref item = (cmaudioformatdescriptionref)[formatdesc objectatindex:i];     const audiostreambasicdescription* fmtdesc = cmaudioformatdescriptiongetstreambasicdescription (item);     if(fmtdesc ) {          samplerate = fmtdesc->msamplerate;         channelcount = fmtdesc->mchannelsperframe;          //    nslog(@"channels:%u, bytes/packet: %u, samplerate %f",fmtdesc->mchannelsperframe, fmtdesc->mbytesperpacket,fmtdesc->msamplerate);     } }   uint32 bytespersample = 2 * channelcount; sint16 normalizemax = 0;  nsmutabledata * fullsongdata = [[nsmutabledata alloc] init]; [reader startreading];   uint64 totalbytes = 0;    sint64 totalleft = 0; sint64 totalright = 0; nsinteger sampletally = 0;  nsinteger samplesperpixel = samplerate / 50;   while (reader.status == avassetreaderstatusreading){      avassetreadertrackoutput * trackoutput = (avassetreadertrackoutput *)[reader.outputs objectatindex:0];     cmsamplebufferref samplebufferref = [trackoutput copynextsamplebuffer];      if (samplebufferref){         cmblockbufferref blockbufferref = cmsamplebuffergetdatabuffer(samplebufferref);          size_t length = cmblockbuffergetdatalength(blockbufferref);         totalbytes += length;           nsautoreleasepool *wader = [[nsautoreleasepool alloc] init];          nsmutabledata * data = [nsmutabledata datawithlength:length];         cmblockbuffercopydatabytes(blockbufferref, 0, length, data.mutablebytes);           sint16 * samples = (sint16 *) data.mutablebytes;         int samplecount = length / bytespersample;         (int = 0; < samplecount ; ++) {              sint16 left = *samples++;              totalleft  += left;                sint16 right;             if (channelcount==2) {                 right = *samples++;                  totalright += right;             }              sampletally++;              if (sampletally > samplesperpixel) {                  left  = totalleft / sampletally;                   sint16 fix = abs(left);                 if (fix > normalizemax) {                     normalizemax = fix;                 }                   [fullsongdata appendbytes:&left length:sizeof(left)];                  if (channelcount==2) {                     right = totalright / sampletally;                        sint16 fix = abs(right);                     if (fix > normalizemax) {                         normalizemax = fix;                     }                       [fullsongdata appendbytes:&right length:sizeof(right)];                 }                  totalleft   = 0;                 totalright  = 0;                 sampletally = 0;              }         }            [wader drain];           cmsamplebufferinvalidate(samplebufferref);          cfrelease(samplebufferref);     } }   nsdata * finaldata = nil;  if (reader.status == avassetreaderstatusfailed || reader.status == avassetreaderstatusunknown){     // went wrong. return nil      return nil; }  if (reader.status == avassetreaderstatuscompleted){      nslog(@"rendering output graphics using normalizemax %d",normalizemax);      uiimage *test = [self audioimagegraph:(sint16 *)                       fullsongdata.bytes                               normalizemax:normalizemax                                samplecount:fullsongdata.length / 4                               channelcount:2                               imageheight:100];      finaldata = imagetodata(test); }     [fullsongdata release]; [reader release];  return finaldata; 

}

logarithmic version of averaging , render methods

 -(uiimage *) audioimageloggraph:(float32 *) samples                normalizemax:(float32) normalizemax                 samplecount:(nsinteger) samplecount                 channelcount:(nsinteger) channelcount                 imageheight:(float) imageheight {  cgsize imagesize = cgsizemake(samplecount, imageheight); uigraphicsbeginimagecontext(imagesize); cgcontextref context = uigraphicsgetcurrentcontext();  cgcontextsetfillcolorwithcolor(context, [uicolor blackcolor].cgcolor); cgcontextsetalpha(context,1.0); cgrect rect; rect.size = imagesize; rect.origin.x = 0; rect.origin.y = 0;  cgcolorref leftcolor = [[uicolor whitecolor] cgcolor]; cgcolorref rightcolor = [[uicolor redcolor] cgcolor];  cgcontextfillrect(context, rect);  cgcontextsetlinewidth(context, 1.0);  float halfgraphheight = (imageheight / 2) / (float) channelcount ; float centerleft = halfgraphheight; float centerright = (halfgraphheight*3) ;  float sampleadjustmentfactor = (imageheight/ (float) channelcount) / (normalizemax -      noisefloor) / 2;  (nsinteger intsample = 0 ; intsample < samplecount ; intsample ++ ) {     float32 left = *samples++;     float pixels = (left - noisefloor) * sampleadjustmentfactor;     cgcontextmovetopoint(context, intsample, centerleft-pixels);     cgcontextaddlinetopoint(context, intsample, centerleft+pixels);     cgcontextsetstrokecolorwithcolor(context, leftcolor);     cgcontextstrokepath(context);      if (channelcount==2) {         float32 right = *samples++;         float pixels = (right - noisefloor) * sampleadjustmentfactor;         cgcontextmovetopoint(context, intsample, centerright - pixels);         cgcontextaddlinetopoint(context, intsample, centerright + pixels);         cgcontextsetstrokecolorwithcolor(context, rightcolor);         cgcontextstrokepath(context);      } }  // create new image uiimage *newimage = uigraphicsgetimagefromcurrentimagecontext();  // tidy uigraphicsendimagecontext();     return newimage; 

}

 - (nsdata *) renderpngaudiopictogramlogforassett:(avurlasset *)songasset {  nserror * error = nil;   avassetreader * reader = [[avassetreader alloc] initwithasset:songasset error:&error];  avassettrack * songtrack = [songasset.tracks objectatindex:0];  nsdictionary* outputsettingsdict = [[nsdictionary alloc] initwithobjectsandkeys:                                      [nsnumber numberwithint:kaudioformatlinearpcm],avformatidkey,                                     //     [nsnumber numberwithint:44100.0],avsampleratekey, /*not supported*/                                     //     [nsnumber numberwithint: 2],avnumberofchannelskey,    /*not supported*/                                      [nsnumber numberwithint:16],avlinearpcmbitdepthkey,                                     [nsnumber numberwithbool:no],avlinearpcmisbigendiankey,                                     [nsnumber numberwithbool:no],avlinearpcmisfloatkey,                                     [nsnumber numberwithbool:no],avlinearpcmisnoninterleaved,                                      nil];   avassetreadertrackoutput* output = [[avassetreadertrackoutput alloc] initwithtrack:songtrack outputsettings:outputsettingsdict];  [reader addoutput:output]; [output release];  uint32 samplerate,channelcount;  nsarray* formatdesc = songtrack.formatdescriptions; for(unsigned int = 0; < [formatdesc count]; ++i) {     cmaudioformatdescriptionref item = (cmaudioformatdescriptionref)[formatdesc objectatindex:i];     const audiostreambasicdescription* fmtdesc = cmaudioformatdescriptiongetstreambasicdescription (item);     if(fmtdesc ) {          samplerate = fmtdesc->msamplerate;         channelcount = fmtdesc->mchannelsperframe;          //    nslog(@"channels:%u, bytes/packet: %u, samplerate %f",fmtdesc->mchannelsperframe, fmtdesc->mbytesperpacket,fmtdesc->msamplerate);     } }   uint32 bytespersample = 2 * channelcount; float32 normalizemax = noisefloor; nslog(@"normalizemax = %f",normalizemax); nsmutabledata * fullsongdata = [[nsmutabledata alloc] init]; [reader startreading];   uint64 totalbytes = 0;    float64 totalleft = 0; float64 totalright = 0; float32 sampletally = 0;  nsinteger samplesperpixel = samplerate / 50;   while (reader.status == avassetreaderstatusreading){      avassetreadertrackoutput * trackoutput = (avassetreadertrackoutput *)[reader.outputs objectatindex:0];     cmsamplebufferref samplebufferref = [trackoutput copynextsamplebuffer];      if (samplebufferref){         cmblockbufferref blockbufferref = cmsamplebuffergetdatabuffer(samplebufferref);          size_t length = cmblockbuffergetdatalength(blockbufferref);         totalbytes += length;           nsautoreleasepool *wader = [[nsautoreleasepool alloc] init];          nsmutabledata * data = [nsmutabledata datawithlength:length];         cmblockbuffercopydatabytes(blockbufferref, 0, length, data.mutablebytes);           sint16 * samples = (sint16 *) data.mutablebytes;         int samplecount = length / bytespersample;         (int = 0; < samplecount ; ++) {              float32 left = (float32) *samples++;             left = decibel(left);             left = minmaxx(left,noisefloor,0);              totalleft  += left;                float32 right;             if (channelcount==2) {                 right = (float32) *samples++;                 right = decibel(right);                 right = minmaxx(right,noisefloor,0);                  totalright += right;             }              sampletally++;              if (sampletally > samplesperpixel) {                  left  = totalleft / sampletally;                  if (left > normalizemax) {                     normalizemax = left;                 }                  // nslog(@"left average = %f, normalizemax = %f",left,normalizemax);                   [fullsongdata appendbytes:&left length:sizeof(left)];                  if (channelcount==2) {                     right = totalright / sampletally;                        if (right > normalizemax) {                         normalizemax = right;                     }                       [fullsongdata appendbytes:&right length:sizeof(right)];                 }                  totalleft   = 0;                 totalright  = 0;                 sampletally = 0;              }         }            [wader drain];           cmsamplebufferinvalidate(samplebufferref);          cfrelease(samplebufferref);     } }   nsdata * finaldata = nil;  if (reader.status == avassetreaderstatusfailed || reader.status == avassetreaderstatusunknown){     // went wrong. handle it. }  if (reader.status == avassetreaderstatuscompleted){     // you're done. worked.      nslog(@"rendering output graphics using normalizemax %f",normalizemax);      uiimage *test = [self audioimageloggraph:(float32 *) fullsongdata.bytes                                  normalizemax:normalizemax                                   samplecount:fullsongdata.length / (sizeof(float32) * 2)                                  channelcount:2                                  imageheight:100];      finaldata = imagetodata(test); }     [fullsongdata release]; [reader release];   return finaldata; 

}

  **now init method "the business"**    - (id) initwithmpmediaitem:(mpmediaitem*) item         completionblock:(void (^)(uiimage* delayedimagepreparation))completionblock  {   nsfilemanager *fman = [nsfilemanager defaultmanager];  nsstring *assetpictogramfilepath = [[self class]    cachedaudiopictogrampathformpmediaitem:item];  if ([fman fileexistsatpath:assetpictogramfilepath]) {      nslog(@"returning cached waveform pictogram: %@",[assetpictogramfilepath lastpathcomponent]);      self = [self initwithcontentsoffile:assetpictogramfilepath];     return self;  }   nsstring *assetfilepath = [[self class] cachedaudiofilepathformpmediaitem:item];  nsurl *assetfileurl = [nsurl fileurlwithpath:assetfilepath];  if ([fman fileexistsatpath:assetfilepath]) {      nslog(@"scanning cached audio data create uiimage file: %@",[assetfilepath lastpathcomponent]);      [assetfileurl retain];     [assetpictogramfilepath retain];      [nsthread mcsm_performblockinbackground: ^{           avurlasset *asset = [[avurlasset alloc] initwithurl:assetfileurl options:nil];         nsdata *waveformdata = [self renderpngaudiopictogramforassett:asset];           [waveformdata writetofile:assetpictogramfilepath atomically:yes];          [assetfileurl release];          [assetpictogramfilepath release];          if (completionblock) {              [waveformdata retain];             [nsthread mcsm_performblockonmainthread:^{                    uiimage *result = [uiimage imagewithdata:waveformdata];                    nslog(@"returning rendered pictogram on main thread (%d bytes %@ data           in uiimage %0.0f x %0.0f pixels)",waveformdata.length, [imgextuppercasestring],result.size.width,result.size.height);                  completionblock(result);                  [waveformdata release];              }];          }       }];      return nil;  } else {       nsstring *assetfolder = [[self class] assetcachefolder];      [fman createdirectoryatpath:assetfolder withintermediatedirectories:yes     attributes:nil error:nil];      nslog(@"preparing import audio asset data %@",[assetfilepath lastpathcomponent]);      [assetpictogramfilepath retain];     [assetfileurl retain];      tslibraryimport* import = [[tslibraryimport alloc] init];     nsurl    * asseturl = [item valueforproperty:mpmediaitempropertyasseturl];      [import importasset:asseturl tourl:assetfileurl completionblock:^(tslibraryimport* import) {         //check status , error properties of         //tslibraryimport           if (import.error) {              nslog (@"audio data import failed:%@",import.error);           } else{             nslog (@"creating waveform pictogram file: %@", [assetpictogramfilepath lastpathcomponent]);             avurlasset *asset = [[avurlasset alloc] initwithurl:assetfileurl options:nil];             nsdata *waveformdata = [self renderpngaudiopictogramforassett:asset];               [waveformdata writetofile:assetpictogramfilepath atomically:yes];               if (completionblock) {                 [waveformdata retain];                 [nsthread mcsm_performblockonmainthread:^                 {                      uiimage *result = [uiimage imagewithdata:waveformdata];                     nslog(@"returning rendered pictogram on main thread (%d bytes %@ data in uiimage %0.0f x %0.0f pixels)",waveformdata.length,[imgext uppercasestring],result.size.width,result.size.height);                      completionblock(result);                      [waveformdata release];                  }];              }         }          [assetpictogramfilepath release];         [assetfileurl release];       }  ];      return nil; } 

}

an example of invoking :

  -(void) importmediaitem  

{

mpmediaitem* item = [self mediaitem];  // since needing playback, save url cached audio. [url release]; url = [[uiimage cachedaudiourlformpmediaitem:item] retain];   [waveformimage release];  waveformimage = [[uiimage alloc ] initwithmpmediaitem:item completionblock:^(uiimage* delayedimagepreparation){      waveformimage = [delayedimagepreparation retain];     [self displaywaveformimage];  }];  if (waveformimage) {     [waveformimage retain];     [self displaywaveformimage]; } 

}

i have added class methods,librabries,nsthread classes project,everything got fixed,but not geting method call,.its messy

create view controller , add importmediaitem method.

ensure media item not nil. can current track ipad player follows:

mpmusicplayercontroller* player = [mpmusicplayercontroller ipodmusicplayer]; mpmediaitem *item = [player nowplayingitem]; 

in viewdidappear method, call importmediaitem.

add displaywaveformimage method call back, , create uiimageview image returned.

be sure start ipod player before run app.

then works.


Comments

Popular posts from this blog

c# - SVN Error : "svnadmin: E205000: Too many arguments" -

c# - Copy ObservableCollection to another ObservableCollection -

All overlapping substrings matching a java regex -