Swap the tarball for the extracted source tree

git-svn-id: file:///home/svn/framework3/trunk@6354 4d416f70-5f16-0410-b530-b9f4589650da
unstable
HD Moore 2009-03-19 20:10:27 +00:00
parent 13706d1bde
commit 24b324ccce
9 changed files with 633 additions and 0 deletions

Binary file not shown.

BIN
external/source/osx/isight/._main.m vendored Normal file

Binary file not shown.

45
external/source/osx/isight/CSGCamera.h vendored Normal file
View File

@ -0,0 +1,45 @@
//
// CSGCamera.h
// MotionTracker
//
// Created by Tim Omernick on 3/7/05.
// Copyright 2005 Tim Omernick. All rights reserved.
//
#import <Cocoa/Cocoa.h>
#import <QuickTime/QuickTime.h>
@class CSGImage;
/*
CSGCamera provides a simple way to access the default sequence grabber component (say, an iSight or other DV camera). To use:
- Instantiate an CSGCamera instance (using the plain old -init method)
- Set the CSGCamera's delegate using -setDelegate:. The delegate is the object which will receive -camera:didReceiveFrame: messages.
- Call -startWithSize: on the CSGCamera instance with a decent size (like 512x384).
- Call -stop to stop recording.
*/
@interface CSGCamera : NSObject
{
id delegate;
SeqGrabComponent component;
SGChannel channel;
GWorldPtr gWorld;
Rect boundsRect;
ImageSequence decompressionSequence;
TimeScale timeScale;
TimeValue lastTime;
NSTimeInterval startTime;
NSTimer *frameTimer;
}
- (void)setDelegate:(id)newDelegate;
- (BOOL)startWithSize:(NSSize)frameSize;
- (BOOL)stop;
@end
@interface NSObject (Private)
- (void)camera:(CSGCamera *)aCamera didReceiveFrame:(CSGImage *)aFrame;
@end

394
external/source/osx/isight/CSGCamera.m vendored Normal file
View File

@ -0,0 +1,394 @@
//
// CSGCamera.m
// MotionTracker
//
// Created by Tim Omernick on 3/7/05.
// Copyright 2005 Tim Omernick. All rights reserved.
//
// Portions of this file were inspired by Apple Computer, Inc.'s Cocoa SGDataProc example, which can be found here:
// <http://developer.apple.com/samplecode/Cocoa_-_SGDataProc/Cocoa_-_SGDataProc.html>
// Also, I'd like to thank Chris Meyer for his excellent -imageFromGWorld: method, which he gave me permission to use for this framework.
#import "CSGCamera.h"
#import "CSGImage.h"
@interface CSGCamera (Private)
- (void)_sequenceGrabberIdle;
- (BOOL)_setupDecompression;
- (void)_didUpdate;
- (CSGImage *)_imageFromGWorld:(GWorldPtr)gworld;
@end
@interface CSGCamera (SequenceGrabber)
pascal OSErr CSGCameraSGDataProc(SGChannel channel, Ptr data, long dataLength, long *offset, long channelRefCon, TimeValue time, short writeType, long refCon);
@end
@implementation CSGCamera
// Init and dealloc
- (void)dealloc;
{
[self stop];
[delegate release];
[super dealloc];
}
// API
- (void)setDelegate:(id)newDelegate;
{
if (delegate == newDelegate)
return;
[delegate release];
delegate = [newDelegate retain];
}
- (BOOL)startWithSize:(NSSize)frameSize;
{
OSErr theErr;
timeScale = 0;
lastTime = 0;
// Initialize movie toolbox
theErr = EnterMovies();
if (theErr != noErr) {
NSLog(@"EnterMovies() returned %ld", theErr);
return NO;
}
// Open default sequence grabber component
component = OpenDefaultComponent(SeqGrabComponentType, 0);
if (!component) {
NSLog(@"Could not open sequence grabber component.");
return NO;
}
// Initialize sequence grabber component
theErr = SGInitialize(component);
if (theErr != noErr) {
NSLog(@"SGInitialize() returned %ld", theErr);
return NO;
}
// Don't make movie
theErr = SGSetDataRef(component, 0, 0, seqGrabDontMakeMovie);
if (theErr != noErr) {
NSLog(@"SGSetDataRef() returned %ld", theErr);
return NO;
}
// Create sequence grabber video channel
theErr = SGNewChannel(component, VideoMediaType, &channel);
if (theErr != noErr) {
NSLog(@"SGNewChannel() returned %ld", theErr);
return NO;
}
// Set the grabber's bounds
boundsRect.top = 0;
boundsRect.left = 0;
boundsRect.bottom = frameSize.height;
boundsRect.right = frameSize.width;
// NSLog(@"boundsRect=(%d, %d, %d, %d)", boundsRect.top, boundsRect.left, boundsRect.bottom, boundsRect.right);
theErr = SGSetChannelBounds(component, &boundsRect);
// Create the GWorld
theErr = QTNewGWorld(&gWorld, k32ARGBPixelFormat, &boundsRect, 0, NULL, 0);
if (theErr != noErr) {
NSLog(@"QTNewGWorld() returned %ld", theErr);
return NO;
}
// Lock the pixmap
if (!LockPixels(GetPortPixMap(gWorld))) {
NSLog(@"Could not lock pixels.");
return NO;
}
// Set GWorld
theErr = SGSetGWorld(component, gWorld, GetMainDevice());
if (theErr != noErr) {
NSLog(@"SGSetGWorld() returned %ld", theErr);
return NO;
}
// Set the channel's bounds
theErr = SGSetChannelBounds(channel, &boundsRect);
if (theErr != noErr) {
NSLog(@"SGSetChannelBounds(2) returned %ld", theErr);
return NO;
}
// Set the channel usage to record
theErr = SGSetChannelUsage(channel, seqGrabRecord);
if (theErr != noErr) {
NSLog(@"SGSetChannelUsage() returned %ld", theErr);
return NO;
}
// Set data proc
theErr = SGSetDataProc(component, NewSGDataUPP(&CSGCameraSGDataProc), (long)self);
if (theErr != noErr) {
NSLog(@"SGSetDataProc() returned %ld", theErr);
return NO;
}
// Prepare
theErr = SGPrepare(component, false, true);
if (theErr != noErr) {
NSLog(@"SGPrepare() returned %ld", theErr);
return NO;
}
// Start recording
theErr = SGStartRecord(component);
if (theErr != noErr) {
NSLog(@"SGStartRecord() returned %ld", theErr);
return NO;
}
startTime = [NSDate timeIntervalSinceReferenceDate];
// Set up decompression sequence (camera -> GWorld)
[self _setupDecompression];
// Start frame timer
frameTimer = [[NSTimer scheduledTimerWithTimeInterval:0.0 target:self selector:@selector(_sequenceGrabberIdle) userInfo:nil repeats:YES] retain];
[self retain]; // Matches autorelease in -stop
return YES;
}
- (BOOL)stop;
{
// Stop frame timer
if (frameTimer) {
[frameTimer invalidate];
[frameTimer release];
frameTimer = nil;
}
// Stop recording
if (component)
SGStop(component);
ComponentResult theErr;
// End decompression sequence
if (decompressionSequence) {
theErr = CDSequenceEnd(decompressionSequence);
if (theErr != noErr) {
NSLog(@"CDSequenceEnd() returned %ld", theErr);
}
decompressionSequence = 0;
}
// Close sequence grabber component
if (component) {
theErr = CloseComponent(component);
if (theErr != noErr) {
NSLog(@"CloseComponent() returned %ld", theErr);
}
component = NULL;
}
// Dispose of GWorld
if (gWorld) {
DisposeGWorld(gWorld);
gWorld = NULL;
}
[self autorelease]; // Matches retain in -start
return YES;
}
@end
@implementation CSGCamera (Private)
- (void)_sequenceGrabberIdle;
{
OSErr theErr;
theErr = SGIdle(component);
if (theErr != noErr) {
NSLog(@"SGIdle returned %ld", theErr);
return;
}
}
- (BOOL)_setupDecompression;
{
ComponentResult theErr;
ImageDescriptionHandle imageDesc = (ImageDescriptionHandle)NewHandle(0);
theErr = SGGetChannelSampleDescription(channel, (Handle)imageDesc);
if (theErr != noErr) {
NSLog(@"SGGetChannelSampleDescription() returned %ld", theErr);
return NO;
}
Rect sourceRect;
sourceRect.top = 0;
sourceRect.left = 0;
sourceRect.right = (**imageDesc).width;
sourceRect.bottom = (**imageDesc).height;
MatrixRecord scaleMatrix;
RectMatrix(&scaleMatrix, &sourceRect, &boundsRect);
theErr = DecompressSequenceBegin(&decompressionSequence, imageDesc, gWorld, NULL, NULL, &scaleMatrix, srcCopy, NULL, 0, codecNormalQuality, bestSpeedCodec);
if (theErr != noErr) {
NSLog(@"DecompressionSequenceBegin() returned %ld", theErr);
return NO;
}
DisposeHandle((Handle)imageDesc);
return YES;
}
- (void)_didUpdate;
{
if ([delegate respondsToSelector:@selector(camera:didReceiveFrame:)]) {
CSGImage *frameImage = [self _imageFromGWorld:gWorld];
if (frameImage) {
[frameImage setSampleTime:startTime + ((double)lastTime / (double)timeScale)];
[delegate camera:self didReceiveFrame:frameImage];
}
}
}
// Thanks to Chris Meyer from http://www.cocoadev.com/
- (CSGImage *)_imageFromGWorld:(GWorldPtr)gworld;
{
NSParameterAssert( gworld != NULL );
PixMapHandle pixMapHandle = GetGWorldPixMap( gworld );
if ( LockPixels( pixMapHandle ) )
{
Rect portRect;
GetPortBounds( gworld, &portRect );
int pixels_wide = (portRect.right - portRect.left);
int pixels_high = (portRect.bottom - portRect.top);
int bps = 8;
int spp = 4;
BOOL has_alpha = YES;
NSBitmapImageRep *frameBitmap = [[[NSBitmapImageRep alloc]
initWithBitmapDataPlanes:NULL
pixelsWide:pixels_wide
pixelsHigh:pixels_high
bitsPerSample:bps
samplesPerPixel:spp
hasAlpha:has_alpha
isPlanar:NO
colorSpaceName:NSDeviceRGBColorSpace
bytesPerRow:0
bitsPerPixel:0] autorelease];
CGColorSpaceRef dst_colorspaceref = CGColorSpaceCreateDeviceRGB();
CGImageAlphaInfo dst_alphainfo = has_alpha ? kCGImageAlphaPremultipliedLast : kCGImageAlphaNone;
CGContextRef dst_contextref = CGBitmapContextCreate( [frameBitmap bitmapData],
pixels_wide,
pixels_high,
bps,
[frameBitmap bytesPerRow],
dst_colorspaceref,
dst_alphainfo );
void *pixBaseAddr = GetPixBaseAddr(pixMapHandle);
long pixmapRowBytes = GetPixRowBytes(pixMapHandle);
CGDataProviderRef dataproviderref = CGDataProviderCreateWithData( NULL, pixBaseAddr, pixmapRowBytes * pixels_high, NULL );
int src_bps = 8;
int src_spp = 4;
BOOL src_has_alpha = YES;
CGColorSpaceRef src_colorspaceref = CGColorSpaceCreateDeviceRGB();
CGImageAlphaInfo src_alphainfo = src_has_alpha ? kCGImageAlphaPremultipliedFirst : kCGImageAlphaNone;
CGImageRef src_imageref = CGImageCreate( pixels_wide,
pixels_high,
src_bps,
src_bps * src_spp,
pixmapRowBytes,
src_colorspaceref,
src_alphainfo,
dataproviderref,
NULL,
NO, // shouldInterpolate
kCGRenderingIntentDefault );
CGRect rect = CGRectMake( 0, 0, pixels_wide, pixels_high );
CGContextDrawImage( dst_contextref, rect, src_imageref );
CGImageRelease( src_imageref );
CGColorSpaceRelease( src_colorspaceref );
CGDataProviderRelease( dataproviderref );
CGContextRelease( dst_contextref );
CGColorSpaceRelease( dst_colorspaceref );
UnlockPixels( pixMapHandle );
CSGImage *image = [[CSGImage alloc] initWithSize:NSMakeSize(pixels_wide, pixels_high)];
[image addRepresentation:frameBitmap];
return [image autorelease];
}
return NULL;
}
@end
@implementation CSGCamera (SequenceGrabber)
pascal OSErr CSGCameraSGDataProc(SGChannel channel, Ptr data, long dataLength, long *offset, long channelRefCon, TimeValue time, short writeType, long refCon)
{
CSGCamera *camera = (CSGCamera *)refCon;
ComponentResult theErr;
if (camera->timeScale == 0) {
theErr = SGGetChannelTimeScale(camera->channel, &camera->timeScale);
if (theErr != noErr) {
NSLog(@"SGGetChannelTimeScale() returned %ld", theErr);
return theErr;
}
}
if (camera->gWorld) {
CodecFlags ignore;
theErr = DecompressSequenceFrameS(camera->decompressionSequence, data, dataLength, 0, &ignore, NULL);
if (theErr != noErr) {
NSLog(@"DecompressSequenceFrameS() returned %ld", theErr);
return theErr;
}
}
camera->lastTime = time;
[camera _didUpdate];
return noErr;
}
@end

19
external/source/osx/isight/CSGImage.h vendored Normal file
View File

@ -0,0 +1,19 @@
//
// CSGImage.h
// MotionTracker
//
// Created by Tim Omernick on 3/6/05.
// Copyright 2005 Tim Omernick. All rights reserved.
//
#import <Cocoa/Cocoa.h>
@interface CSGImage : NSImage
{
NSTimeInterval sampleTime;
}
- (NSTimeInterval)sampleTime;
- (void)setSampleTime:(NSTimeInterval)newSampleTime;
@end

32
external/source/osx/isight/CSGImage.m vendored Normal file
View File

@ -0,0 +1,32 @@
//
// CSGImage.m
// MotionTracker
//
// Created by Tim Omernick on 3/6/05.
// Copyright 2005 Tim Omernick. All rights reserved.
//
#import "CSGImage.h"
@implementation CSGImage
// NSObject subclass
- (NSString *)description;
{
return [NSString stringWithFormat:@"<%@: %p> (sampleTime=%.4f)", NSStringFromClass([self class]), self, sampleTime];
}
// API
- (NSTimeInterval)sampleTime;
{
return sampleTime;
}
- (void)setSampleTime:(NSTimeInterval)newSampleTime;
{
sampleTime = newSampleTime;
}
@end

View File

@ -0,0 +1,12 @@
/*
* CocoaSequenceGrabber.h
* CocoaSequenceGrabber
*
* Created by Tim Omernick on 3/18/05.
* Copyright 2005 Tim Omernick. All rights reserved.
*
*/
#import <Cocoa/Cocoa.h>
#import "CSGCamera.h"
#import "CSGImage.h"

11
external/source/osx/isight/Makefile vendored Normal file
View File

@ -0,0 +1,11 @@
CC=gcc
LDFLAGS=-bundle -framework Cocoa -framework CoreAudioKit -framework Foundation -framework QuartzCore -framework QuickTime -framework QuartzCore
OBJECTS=CSGCamera.o CSGImage.o main.o
BUNDLE=isight.bundle
$(BUNDLE): $(OBJECTS)
$(CC) $(CFLAGS) -o $(BUNDLE) $(OBJECTS) $(LDFLAGS)
clean:
rm -f *.o isight.bundle

120
external/source/osx/isight/main.m vendored Normal file
View File

@ -0,0 +1,120 @@
/**********************************************************************
* NAME
*
* isight -- An injectable bundle to capture an image from the
* attached iSight video camera.
*
* SYNOPSIS
* inject-bundle isight <pid>
* inject-bundle isight <cmd> [ <args> ... ]
* run-bundle isight
*
* DESCRIPTION
* This bundle is meant to be injected into a running or newly
* launched process by inject-bundle. It will capture a single
* image from the iSight video camera and store it as
* /tmp/isight.jpg.
*
* This bundle uses Tim Omernick's CocoaSequence Grabber from
* MacFUSE procfs.
*
* LICENSE
* Due to inclusion of GPL-licensed code, this bundle is also
* licended under the GNU Public License.
*
**********************************************************************/
#import "CocoaSequenceGrabber.h"
BOOL shouldKeepRunning = YES;
/*
* This delegate handles the didReceiveFrame callback from CSGCamera,
* which we use to convert the image to a JPEG.
*/
@interface CSGCameraDelegate : CSGCamera
{
CFMutableDataRef data;
}
/*
* Assign a CFMutableDataRef to receive JPEG image data
*/
- (void)setDataRef:(CFMutableDataRef)dataRef;
/*
* Convert captured frame into a JPEG datastream, stored in a CFDataRef
*/
- (void)camera:(CSGCamera *)aCamera didReceiveFrame:(CSGImage *)aFrame;
@end
@implementation CSGCameraDelegate
- (void)setDataRef:(CFMutableDataRef)dataRef
{
data = dataRef;
}
- (void)camera:(CSGCamera *)aCamera didReceiveFrame:(CSGImage *)aFrame;
{
// First, we must convert to a TIFF bitmap
NSBitmapImageRep *imageRep =
[NSBitmapImageRep imageRepWithData: [aFrame TIFFRepresentation]];
NSNumber *quality = [NSNumber numberWithFloat: 0.1];
NSDictionary *props =
[NSDictionary dictionaryWithObject:quality
forKey:NSImageCompressionFactor];
// Now convert TIFF bitmap to JPEG compressed image
NSData *jpeg =
[imageRep representationUsingType: NSJPEGFileType properties:props];
// Store JPEG image in a CFDataRef
CFIndex jpegLen = CFDataGetLength((CFDataRef)jpeg);
CFDataSetLength(data, jpegLen);
CFDataReplaceBytes(data, CFRangeMake((CFIndex)0, jpegLen),
CFDataGetBytePtr((CFDataRef)jpeg), jpegLen);
// Stop the camera and signal that we should exit the run loop
[aCamera stop];
shouldKeepRunning = NO;
}
@end
void run(int socket)
{
NSAutoreleasePool *pool = [[NSAutoreleasePool alloc] init];
/*
* Use CocoaSequenceGrabber to capture a single image from the
* iSight camera and store it as a JPEG data stream in picture.
*/
CFMutableDataRef picture = CFDataCreateMutable(NULL, 0);
CSGCameraDelegate *delegate = [[CSGCameraDelegate alloc] init];
[delegate setDataRef:picture];
CSGCamera *camera = [[CSGCamera alloc] init];
[camera setDelegate:delegate];
[camera startWithSize:NSMakeSize(640, 480)];
/*
* Execute RunLoop until global flag is cleared
*/
NSRunLoop *theRL = [NSRunLoop currentRunLoop];
while (shouldKeepRunning && [theRL runMode:NSDefaultRunLoopMode
beforeDate:[NSDate distantFuture]]);
/*
* Write out picture to to socket
*/
if (socket > 0) {
size_t len = CFDataGetLength(picture);
write(socket, &len, sizeof(len));
write(socket, CFDataGetBytePtr(picture), len);
}
[pool release];
}