Dallas Brown avatar Dallas Brown committed effe7f1

Inital Import

Comments (0)

Files changed (2)

HBIScreenCaptureView.h

+//
+//HBIScreenCaptureView.h
+//
+#import <UIKit/UIKit.h>
+#import <AVFoundation/AVFoundation.h>
+ 
+/**
+ * Delegate protocol.  Implement this if you want to receive a notification when the
+ * view completes a recording.
+ *
+ * When a recording is completed, the HBIScreenCaptureView will notify the delegate, passing
+ * it the path to the created recording file if the recording was successful, or a value
+ * of nil if the recording failed/could not be saved.
+ */
+@protocol HBIScreenCaptureViewDelegate <NSObject>
+- (void) recordingFinished:(NSString*)outputPathOrNil;
+@end
+ 
+/**
+ * HBIScreenCaptureView, a UIView subclass that periodically samples its current display
+ * and stores it as a UIImage available through the 'currentScreen' property.  The
+ * sample/update rate can be configured (within reason) by setting the 'frameRate'
+ * property.
+ *
+ * This class can also be used to record real-time video of its subviews, using the
+ * 'startRecording' and 'stopRecording' methods.  A new recording will overwrite any
+ * previously made recording file, so if you want to create multiple recordings per
+ * session (or across multiple sessions) then it is your responsibility to copy/back-up
+ * the recording output file after each session.
+ *
+ * To use this class, you must link against the following frameworks:
+ *
+ *  - AssetsLibrary
+ *  - AVFoundation
+ *  - CoreGraphics
+ *  - CoreMedia
+ *  - CoreVideo
+ *  - QuartzCore
+ *
+ */
+ 
+@interface HBIScreenCaptureView : UIView {
+   //video writing
+   AVAssetWriter *videoWriter;
+   AVAssetWriterInput *videoWriterInput;
+   AVAssetWriterInputPixelBufferAdaptor *avAdaptor;
+ 
+   //recording state
+   BOOL _recording;
+   NSDate* startedAt;
+   void* bitmapData;
+}
+ 
+//for recording video
+- (bool) startRecording;
+- (void) stopRecording;
+ 
+//for accessing the current screen and adjusting the capture rate, etc.
+@property(retain) UIImage* currentScreen;
+@property(assign) float frameRate;
+@property(nonatomic, assign) id<HBIScreenCaptureViewDelegate> delegate;
+ 
+@end

HBIScreenCaptureView.m

+//
+//HBIScreenCaptureView.m
+//
+#import "HBIScreenCaptureView.h"
+#import <QuartzCore/QuartzCore.h>
+#import <MobileCoreServices/UTCoreTypes.h>
+#import <AssetsLibrary/AssetsLibrary.h>
+ 
+@interface HBIScreenCaptureView(Private)
+- (void) writeVideoFrameAtTime:(CMTime)time;
+@end
+ 
+@implementation HBIScreenCaptureView
+ 
+@synthesize currentScreen, frameRate, delegate;
+ 
+- (void) initialize {
+   // Initialization code
+   self.clearsContextBeforeDrawing = YES;
+   self.currentScreen = nil;
+   self.frameRate = 10.0f;     //10 frames per seconds
+   _recording = false;
+   videoWriter = nil;
+   videoWriterInput = nil;
+   avAdaptor = nil;
+   startedAt = nil;
+   bitmapData = NULL;
+}
+ 
+- (id) initWithCoder:(NSCoder *)aDecoder {
+   self = [super initWithCoder:aDecoder];
+   if (self) {
+       [self initialize];
+   }
+   return self;
+}
+ 
+- (id) init {
+   self = [super init];
+   if (self) {
+       [self initialize];
+   }
+   return self;
+}
+ 
+- (id)initWithFrame:(CGRect)frame {
+   self = [super initWithFrame:frame];
+   if (self) {
+       [self initialize];
+   }
+   return self;
+}
+ 
+- (CGContextRef) createBitmapContextOfSize:(CGSize) size {
+   CGContextRef    context = NULL;
+   CGColorSpaceRef colorSpace;
+   int             bitmapByteCount;
+   int             bitmapBytesPerRow;
+ 
+   bitmapBytesPerRow   = (size.width * 4);
+   bitmapByteCount     = (bitmapBytesPerRow * size.height);
+   colorSpace = CGColorSpaceCreateDeviceRGB();
+   if (bitmapData != NULL) {
+       free(bitmapData);
+   }
+   bitmapData = malloc( bitmapByteCount );
+   if (bitmapData == NULL) {
+       fprintf (stderr, "Memory not allocated!");
+       return NULL;
+   }
+ 
+   context = CGBitmapContextCreate (bitmapData,
+                                    size.width,
+                                    size.height,
+                                    8,      // bits per component
+                                    bitmapBytesPerRow,
+                                    colorSpace,
+                                    kCGImageAlphaNoneSkipFirst);
+ 
+   CGContextSetAllowsAntialiasing(context,NO);
+   if (context== NULL) {
+       free (bitmapData);
+       fprintf (stderr, "Context not created!");
+       return NULL;
+   }
+   CGColorSpaceRelease( colorSpace );
+ 
+   return context;
+}
+ 
+//static int frameCount = 0;            //debugging
+- (void) drawRect:(CGRect)rect {
+   NSDate* start = [NSDate date];
+   CGContextRef context = [self createBitmapContextOfSize:self.frame.size];
+ 
+   //not sure why this is necessary...image renders upside-down and mirrored
+   CGAffineTransform flipVertical = CGAffineTransformMake(1, 0, 0, -1, 0, self.frame.size.height);
+   CGContextConcatCTM(context, flipVertical);
+ 
+   [self.layer renderInContext:context];
+ 
+   CGImageRef cgImage = CGBitmapContextCreateImage(context);
+   UIImage* background = [UIImage imageWithCGImage: cgImage];
+   CGImageRelease(cgImage);
+ 
+   self.currentScreen = background;
+ 
+   //debugging
+   //if (frameCount < 40) {
+   //      NSString* filename = [NSString stringWithFormat:@"Documents/frame_%d.png", frameCount];
+   //      NSString* pngPath = [NSHomeDirectory() stringByAppendingPathComponent:filename];
+   //      [UIImagePNGRepresentation(self.currentScreen) writeToFile: pngPath atomically: YES];
+   //      frameCount++;
+   //}
+ 
+   //NOTE:  to record a scrollview while it is scrolling you need to implement your UIScrollViewDelegate such that it calls
+   //       'setNeedsDisplay' on the ScreenCaptureView.
+   if (_recording) {
+       float millisElapsed = [[NSDate date] timeIntervalSinceDate:startedAt] * 1000.0;
+       [self writeVideoFrameAtTime:CMTimeMake((int)millisElapsed, 1000)];
+   }
+ 
+   float processingSeconds = [[NSDate date] timeIntervalSinceDate:start];
+   float delayRemaining = (1.0 / self.frameRate) - processingSeconds;
+ 
+   CGContextRelease(context);
+ 
+   //redraw at the specified framerate
+   [self performSelector:@selector(setNeedsDisplay) withObject:nil afterDelay:delayRemaining > 0.0 ? delayRemaining : 0.01];
+}
+ 
+- (void) cleanupWriter {
+   [avAdaptor release];
+   avAdaptor = nil;
+ 
+   [videoWriterInput release];
+   videoWriterInput = nil;
+ 
+   [videoWriter release];
+   videoWriter = nil;
+ 
+   [startedAt release];
+   startedAt = nil;
+ 
+   if (bitmapData != NULL) {
+       free(bitmapData);
+       bitmapData = NULL;
+   }
+}
+ 
+- (void)dealloc {
+   [self cleanupWriter];
+   [super dealloc];
+}
+ 
+- (NSURL*) tempFileURL {
+   NSString* outputPath = [[NSString alloc] initWithFormat:@"%@/%@", [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) objectAtIndex:0], @"output.mp4"];
+   NSURL* outputURL = [[NSURL alloc] initFileURLWithPath:outputPath];
+   NSFileManager* fileManager = [NSFileManager defaultManager];
+   if ([fileManager fileExistsAtPath:outputPath]) {
+       NSError* error;
+       if ([fileManager removeItemAtPath:outputPath error:&error] == NO) {
+           NSLog(@"Could not delete old recording file at path:  %@", outputPath);
+       }
+   }
+ 
+   [outputPath release];
+   return [outputURL autorelease];
+}
+ 
+-(BOOL) setUpWriter {
+   NSError* error = nil;
+   videoWriter = [[AVAssetWriter alloc] initWithURL:[self tempFileURL] fileType:AVFileTypeQuickTimeMovie error:&error];
+   NSParameterAssert(videoWriter);
+ 
+   //Configure video
+   NSDictionary* videoCompressionProps = [NSDictionary dictionaryWithObjectsAndKeys:
+                                          [NSNumber numberWithDouble:1024.0*1024.0], AVVideoAverageBitRateKey,
+                                          nil ];
+ 
+   NSDictionary* videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
+                                  AVVideoCodecH264, AVVideoCodecKey,
+                                  [NSNumber numberWithInt:self.frame.size.width], AVVideoWidthKey,
+                                  [NSNumber numberWithInt:self.frame.size.height], AVVideoHeightKey,
+                                  videoCompressionProps, AVVideoCompressionPropertiesKey,
+                                  nil];
+ 
+   videoWriterInput = [[AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings] retain];
+ 
+   NSParameterAssert(videoWriterInput);
+   videoWriterInput.expectsMediaDataInRealTime = YES;
+   NSDictionary* bufferAttributes = [NSDictionary dictionaryWithObjectsAndKeys:
+                                     [NSNumber numberWithInt:kCVPixelFormatType_32ARGB], kCVPixelBufferPixelFormatTypeKey, nil];
+ 
+   avAdaptor = [[AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:videoWriterInput sourcePixelBufferAttributes:bufferAttributes] retain];
+ 
+   //add input
+   [videoWriter addInput:videoWriterInput];
+   [videoWriter startWriting];
+   [videoWriter startSessionAtSourceTime:CMTimeMake(0, 1000)];
+ 
+   return YES;
+}
+ 
+- (void) completeRecordingSession {
+   NSAutoreleasePool* pool = [[NSAutoreleasePool alloc] init];
+ 
+   [videoWriterInput markAsFinished];
+ 
+   // Wait for the video
+   int status = videoWriter.status;
+   while (status == AVAssetWriterStatusUnknown) {
+       NSLog(@"Waiting...");
+       [NSThread sleepForTimeInterval:0.5f];
+       status = videoWriter.status;
+   }
+ 
+   @synchronized(self) {
+       BOOL success = [videoWriter finishWriting];
+       if (!success) {
+           NSLog(@"finishWriting returned NO");
+       }
+ 
+       [self cleanupWriter];
+ 
+       id delegateObj = self.delegate;
+       NSString *outputPath = [[NSString alloc] initWithFormat:@"%@/%@", [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) objectAtIndex:0], @"output.mp4"];
+       NSURL *outputURL = [[NSURL alloc] initFileURLWithPath:outputPath];
+ 
+       NSLog(@"Completed recording, file is stored at:  %@", outputURL);
+       if ([delegateObj respondsToSelector:@selector(recordingFinished:)]) {
+           [delegateObj performSelectorOnMainThread:@selector(recordingFinished:) withObject:(success ? outputURL : nil) waitUntilDone:YES];
+       }
+ 
+       [outputPath release];
+       [outputURL release];
+   }
+ 
+   [pool drain];
+}
+ 
+- (bool) startRecording {
+   bool result = NO;
+   @synchronized(self) {
+       if (! _recording) {
+           result = [self setUpWriter];
+           startedAt = [[NSDate date] retain];
+           _recording = true;
+       }
+   }
+ 
+   return result;
+}
+ 
+- (void) stopRecording {
+   @synchronized(self) {
+       if (_recording) {
+           _recording = false;
+           [self completeRecordingSession];
+       }
+   }
+}
+ 
+-(void) writeVideoFrameAtTime:(CMTime)time {
+   if (![videoWriterInput isReadyForMoreMediaData]) {
+       NSLog(@"Not ready for video data");
+   }
+   else {
+       @synchronized (self) {
+           UIImage* newFrame = [self.currentScreen retain];
+           CVPixelBufferRef pixelBuffer = NULL;
+           CGImageRef cgImage = CGImageCreateCopy([newFrame CGImage]);
+           CFDataRef image = CGDataProviderCopyData(CGImageGetDataProvider(cgImage));
+ 
+           int status = CVPixelBufferPoolCreatePixelBuffer(kCFAllocatorDefault, avAdaptor.pixelBufferPool, &pixelBuffer);
+           if(status != 0){
+               //could not get a buffer from the pool
+               NSLog(@"Error creating pixel buffer:  status=%d", status);
+           }
+                       // set image data into pixel buffer
+           CVPixelBufferLockBaseAddress( pixelBuffer, 0 );
+           uint8_t* destPixels = CVPixelBufferGetBaseAddress(pixelBuffer);
+           CFDataGetBytes(image, CFRangeMake(0, CFDataGetLength(image)), destPixels);  //XXX:  will work if the pixel buffer is contiguous and has the same bytesPerRow as the input data
+ 
+           if(status == 0){
+               BOOL success = [avAdaptor appendPixelBuffer:pixelBuffer withPresentationTime:time];
+               if (!success)
+                   NSLog(@"Warning:  Unable to write buffer to video");
+           }
+ 
+           //clean up
+           [newFrame release];
+           CVPixelBufferUnlockBaseAddress( pixelBuffer, 0 );
+           CVPixelBufferRelease( pixelBuffer );
+           CFRelease(image);
+           CGImageRelease(cgImage);
+       }
+ 
+   }
+ 
+}
+ 
+@end
Tip: Filter by directory path e.g. /media app.js to search for public/media/app.js.
Tip: Use camelCasing e.g. ProjME to search for ProjectModifiedEvent.java.
Tip: Filter by extension type e.g. /repo .js to search for all .js files in the /repo directory.
Tip: Separate your search with spaces e.g. /ssh pom.xml to search for src/ssh/pom.xml.
Tip: Use ↑ and ↓ arrow keys to navigate and return to view the file.
Tip: You can also navigate files with Ctrl+j (next) and Ctrl+k (previous) and view the file with Ctrl+o.
Tip: You can also navigate files with Alt+j (next) and Alt+k (previous) and view the file with Alt+o.