2012-03-12 8 views
0

開発者のサンプルコードGLPaintをダウンロードしました.AppleのWebサイトから、OpenGLを使用してキャンバスに画像を描画します。OpenGLで描画したアイテムを画像として保存する

私は自分の要件を満たすためにGLPaintアプリケーションに多くの変更を加えました。さて、私は描画されたアイテムを画像としてフォトライブラリに保存したいと思います。

私はフォトライブラリに画像を保存する方法を知っています。そこで、絵を描いた後、対応するイメージファイルを作成しようとしました。あなたはそれをする良い方法が何であるか知っていますか?これに関する助言は高く評価されます。

コードの詳細は以下のとおりです。

PaintingView.h

EAGLContext *context; 

// OpenGL names for the renderbuffer and framebuffers used to render to this view 
GLuint viewRenderbuffer, viewFramebuffer; 

// OpenGL name for the depth buffer that is attached to viewFramebuffer, if it exists (0 if it does not exist) 
GLuint depthRenderbuffer; 

GLuint brushTexture; 
CGPoint location; 
CGPoint previousLocation; 

PaintingView.m

// Handles the start of a touch 
- (void)touchesBegan:(NSSet *)touches withEvent:(UIEvent *)event 
{ 
    CGRect    bounds = [self bounds]; 
    UITouch* touch = [[event touchesForView:self] anyObject]; 
    firstTouch = YES; 
    // Convert touch point from UIView referential to OpenGL one (upside-down flip) 
    location = [touch locationInView:self]; 
    location.y = bounds.size.height - location.y; 
} 

// Handles the continuation of a touch. 
- (void)touchesMoved:(NSSet *)touches withEvent:(UIEvent *)event 
{ 

    CGRect    bounds = [self bounds]; 
    UITouch*   touch = [[event touchesForView:self] anyObject]; 

    // Convert touch point from UIView referential to OpenGL one (upside-down flip) 
    if (firstTouch) { 
     firstTouch = NO; 
     previousLocation = [touch previousLocationInView:self]; 
     previousLocation.y = bounds.size.height - previousLocation.y; 
    } else { 
     location = [touch locationInView:self]; 
     location.y = bounds.size.height - location.y; 
     previousLocation = [touch previousLocationInView:self]; 
     previousLocation.y = bounds.size.height - previousLocation.y; 
    } 

    // Render the stroke 
    [self renderLineFromPoint:previousLocation toPoint:location]; 
} 

// Handles the end of a touch event when the touch is a tap. 
- (void)touchesEnded:(NSSet *)touches withEvent:(UIEvent *)event 
{ 
    CGRect    bounds = [self bounds]; 
    UITouch* touch = [[event touchesForView:self] anyObject]; 
    if (firstTouch) { 
     firstTouch = NO; 
     previousLocation = [touch previousLocationInView:self]; 
     previousLocation.y = bounds.size.height - previousLocation.y; 
     [self renderLineFromPoint:previousLocation toPoint:location]; 
    } 
} 



// Drawings a line onscreen based on where the user touches 
- (void) renderLineFromPoint:(CGPoint)start toPoint:(CGPoint)end 
{ 
    static GLfloat*  vertexBuffer = NULL; 
    static NSUInteger vertexMax = 64; 
    NSUInteger   vertexCount = 0, 
         count, 
         i; 

    [EAGLContext setCurrentContext:context]; 
    glBindFramebufferOES(GL_FRAMEBUFFER_OES, viewFramebuffer); 

    // Convert locations from Points to Pixels 
    CGFloat scale = self.contentScaleFactor; 
    start.x *= scale; 
    start.y *= scale; 
    end.x *= scale; 
    end.y *= scale; 

    // Allocate vertex array buffer 
    if(vertexBuffer == NULL) 
     vertexBuffer = malloc(vertexMax * 2 * sizeof(GLfloat)); 

    // Add points to the buffer so there are drawing points every X pixels 
    count = MAX(ceilf(sqrtf((end.x - start.x) * (end.x - start.x) + (end.y - start.y) * (end.y - start.y))/kBrushPixelStep), 1); 
    for(i = 0; i < count; ++i) { 
     if(vertexCount == vertexMax) { 
      vertexMax = 2 * vertexMax; 
      vertexBuffer = realloc(vertexBuffer, vertexMax * 2 * sizeof(GLfloat)); 
     } 

     vertexBuffer[2 * vertexCount + 0] = start.x + (end.x - start.x) * ((GLfloat)i/(GLfloat)count); 
     vertexBuffer[2 * vertexCount + 1] = start.y + (end.y - start.y) * ((GLfloat)i/(GLfloat)count); 
     vertexCount += 1; 
    } 

    // Render the vertex array 
    glVertexPointer(2, GL_FLOAT, 0, vertexBuffer); 
    glDrawArrays(GL_POINTS, 0, vertexCount); 

    // Display the buffer 
    glBindRenderbufferOES(GL_RENDERBUFFER_OES, viewRenderbuffer); 
    [context presentRenderbuffer:GL_RENDERBUFFER_OES]; 
} 


// Erases the screen 
- (void) erase 
{ 
    [EAGLContext setCurrentContext:context]; 

    // Clear the buffer 
    glBindFramebufferOES(GL_FRAMEBUFFER_OES, viewFramebuffer); 
    glClearColor(0.0, 0.0, 0.0, 0.0); 
    glClear(GL_COLOR_BUFFER_BIT); 

    // Display the buffer 
    glBindRenderbufferOES(GL_RENDERBUFFER_OES, viewRenderbuffer); 
    [context presentRenderbuffer:GL_RENDERBUFFER_OES]; 
} 



// The GL view is stored in the nib file. When it's unarchived it's sent -initWithCoder: 
- (id)initWithCoder:(NSCoder*)coder { 


    CGImageRef  brushImage; 
    CGContextRef brushContext; 
    GLubyte   *brushData; 
    size_t   width, height; 

    if ((self = [super initWithCoder:coder])) { 
     CAEAGLLayer *eaglLayer = (CAEAGLLayer *)self.layer; 

     eaglLayer.opaque = YES; 
     // In this application, we want to retain the EAGLDrawable contents after a call to presentRenderbuffer. 
     eaglLayer.drawableProperties = [NSDictionary dictionaryWithObjectsAndKeys: 
             [NSNumber numberWithBool:YES], kEAGLDrawablePropertyRetainedBacking, kEAGLColorFormatRGBA8, kEAGLDrawablePropertyColorFormat, nil]; 

     context = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES1]; 

     if (!context || ![EAGLContext setCurrentContext:context]) { 
      [self release]; 
      return nil; 
     } 

     // Create a texture from an image 
     // First create a UIImage object from the data in a image file, and then extract the Core Graphics image 
     brushImage = [UIImage imageNamed:@"Particle.png"].CGImage; 

     // Get the width and height of the image 
     width = CGImageGetWidth(brushImage); 
     height = CGImageGetHeight(brushImage); 

     // Texture dimensions must be a power of 2. If you write an application that allows users to supply an image, 
     // you'll want to add code that checks the dimensions and takes appropriate action if they are not a power of 2. 

     // Make sure the image exists 
     if(brushImage) { 
      // Allocate memory needed for the bitmap context 
      brushData = (GLubyte *) calloc(width * height * 4, sizeof(GLubyte)); 
      // Use the bitmatp creation function provided by the Core Graphics framework. 
      brushContext = CGBitmapContextCreate(brushData, width, height, 8, width * 4, CGImageGetColorSpace(brushImage), kCGImageAlphaPremultipliedLast); 
      // After you create the context, you can draw the image to the context. 
      CGContextDrawImage(brushContext, CGRectMake(0.0, 0.0, (CGFloat)width, (CGFloat)height), brushImage); 
      // You don't need the context at this point, so you need to release it to avoid memory leaks. 
      CGContextRelease(brushContext); 
      // Use OpenGL ES to generate a name for the texture. 
      glGenTextures(1, &brushTexture); 
      // Bind the texture name. 
      glBindTexture(GL_TEXTURE_2D, brushTexture); 
      // Set the texture parameters to use a minifying filter and a linear filer (weighted average) 
      glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); 
      // Specify a 2D texture image, providing the a pointer to the image data in memory 
      glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, width, height, 0, GL_RGBA, GL_UNSIGNED_BYTE, brushData); 
      // Release the image data; it's no longer needed 
      free(brushData); 
     } 

     // Set the view's scale factor 
     self.contentScaleFactor = 1.0; 

     // Setup OpenGL states 
     glMatrixMode(GL_PROJECTION); 
     CGRect frame = self.bounds; 
     CGFloat scale = self.contentScaleFactor; 
     // Setup the view port in Pixels 
     glOrthof(0, frame.size.width * scale, 0, frame.size.height * scale, -1, 1); 
     glViewport(0, 0, frame.size.width * scale, frame.size.height * scale); 
     glMatrixMode(GL_MODELVIEW); 

     glDisable(GL_DITHER); 
     glEnable(GL_TEXTURE_2D); 
     glEnableClientState(GL_VERTEX_ARRAY); 

     glEnable(GL_BLEND); 
     // Set a blending function appropriate for premultiplied alpha pixel data 
     glBlendFunc(GL_ONE, GL_ONE_MINUS_SRC_ALPHA); 

     glEnable(GL_POINT_SPRITE_OES); 
     glTexEnvf(GL_POINT_SPRITE_OES, GL_COORD_REPLACE_OES, GL_TRUE); 
     glPointSize(width/kBrushScale); 

     // Make sure to start with a cleared buffer 
     needsErase = YES; 



    } 

    return self; 
} 

AppDelegate.h


PaintingWindow  *window; //its a class inherited from window. 
PaintingView  *drawingView; 

@property (nonatomic, retain) IBOutlet PaintingWindow *window; 
@property (nonatomic, retain) IBOutlet PaintingView *drawingView; 

@synthesize window; 
@synthesize drawingView; 

AppDelegate.m

- (void) applicationDidFinishLaunching:(UIApplication*)application 
{ 
    CGRect     rect = [[UIScreen mainScreen] applicationFrame]; 
    CGFloat     components[3]; 

    // Create a segmented control so that the user can choose the brush color. 
    UISegmentedControl *segmentedControl = [[UISegmentedControl alloc] initWithItems: 
              [NSArray arrayWithObjects: 
               [UIImage imageNamed:@"Red.png"], 
               [UIImage imageNamed:@"Yellow.png"], 
               [UIImage imageNamed:@"Green.png"], 
               [UIImage imageNamed:@"Blue.png"], 
               [UIImage imageNamed:@"Purple.png"], 
               nil]]; 

    // Compute a rectangle that is positioned correctly for the segmented control you'll use as a brush color palette 
    //CGRect frame = CGRectMake(rect.origin.x + kLeftMargin, rect.size.height - kPaletteHeight - kTopMargin, rect.size.width - (kLeftMargin + kRightMargin), kPaletteHeight); 
    CGRect frame = CGRectMake(50, 22, (rect.size.width - (kLeftMargin + kRightMargin)) - 20, kPaletteHeight); 
    segmentedControl.frame = frame; 
    // When the user chooses a color, the method changeBrushColor: is called. 
    [segmentedControl addTarget:self action:@selector(changeBrushColor:) forControlEvents:UIControlEventValueChanged]; 
    segmentedControl.segmentedControlStyle = UISegmentedControlStyleBar; 
    // Make sure the color of the color complements the black background 
    segmentedControl.tintColor = [UIColor darkGrayColor]; 
    // Set the third color (index values start at 0) 
    segmentedControl.selectedSegmentIndex = 2; 

    // Add the control to the window 
    [window addSubview:segmentedControl]; 
    // Now that the control is added, you can release it 
    [segmentedControl release]; 


    [self addBackgroundSegmentControll]; 

    // Define a starting color 
    HSL2RGB((CGFloat) 2.0/(CGFloat)kPaletteSize, kSaturation, kLuminosity, &components[0], &components[1], &components[2]); 
    // Defer to the OpenGL view to set the brush color 
    [drawingView setBrushColorWithRed:components[0] green:components[1] blue:components[2]]; 

    // Look in the Info.plist file and you'll see the status bar is hidden 
    // Set the style to black so it matches the background of the application 
    [application setStatusBarStyle:UIStatusBarStyleBlackTranslucent animated:NO]; 
    // Now show the status bar, but animate to the style. 
    [application setStatusBarHidden:NO withAnimation:YES]; 

    // Load the sounds 
    NSBundle *mainBundle = [NSBundle mainBundle]; 
    erasingSound = [[SoundEffect alloc] initWithContentsOfFile:[mainBundle pathForResource:@"Erase" ofType:@"caf"]]; 
    selectSound = [[SoundEffect alloc] initWithContentsOfFile:[mainBundle pathForResource:@"Select" ofType:@"caf"]]; 

    [window setFrame:CGRectMake(0, 0, 768, 1024)]; 
    drawingView.frame = CGRectMake(0, 0, 768, 1024); 

    // Erase the view when recieving a notification named "shake" from the NSNotificationCenter object 
    // The "shake" nofification is posted by the PaintingWindow object when user shakes the device 
    [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(eraseView) name:@"shake" object:nil]; 
} 

答えて

4

写真ライブラリに画像としてOpenGL描画アイテムを保存するには、以下のリンクを参照してください。

Save an OpenGL drawn item as an image

コード詳細;

下記のコードを書いて[self captureToPhotoAlbum];に電話してください。

-(void)captureToPhotoAlbum { 

    UIImage *image = [self glToUIImage]; 
    UIImageWriteToSavedPhotosAlbum(image, self, nil, nil); 
} 

- (UIImage *)glToUIImage { 

    NSInteger myDataLength = 320 * 480 * 4; 

    // allocate array and read pixels into it. 
    GLubyte *buffer = (GLubyte *) malloc(myDataLength); 
    glReadPixels(0, 0, 320, 480, GL_RGBA, GL_UNSIGNED_BYTE, buffer); 

    // gl renders "upside down" so swap top to bottom into new array. 
    // there's gotta be a better way, but this works. 
    GLubyte *buffer2 = (GLubyte *) malloc(myDataLength); 
    for(int y = 0; y < 480; y++) 
    { 
     for(int x = 0; x < 320 * 4; x++) 
     { 
      buffer2[(479 - y) * 320 * 4 + x] = buffer[y * 4 * 320 + x]; 
     } 
    } 

    // make data provider with data. 
    CGDataProviderRef provider = CGDataProviderCreateWithData(NULL, buffer2, myDataLength, NULL); 

    // prep the ingredients 
    int bitsPerComponent = 8; 
    int bitsPerPixel = 32; 
    int bytesPerRow = 4 * 320; 
    CGColorSpaceRef colorSpaceRef = CGColorSpaceCreateDeviceRGB(); 
    CGBitmapInfo bitmapInfo = kCGBitmapByteOrderDefault; 
    CGColorRenderingIntent renderingIntent = kCGRenderingIntentDefault; 

    // make the cgimage 
    CGImageRef imageRef = CGImageCreate(320, 480, bitsPerComponent, bitsPerPixel, bytesPerRow,  colorSpaceRef, bitmapInfo, provider, NULL, NO, renderingIntent); 

    // then make the uiimage from that 
    UIImage *myImage = [UIImage imageWithCGImage:imageRef]; 
    return myImage; 
} 
  • のiPad用やスケーリングの問題を修正するには、640の代わりに、320など、すべての幅のを変更し、高さは960ではなく480変更
  • 身長のようだと幅は、あなたのスケーリングを満たすために、最大値。
  • メモリを管理する(バッファを解放)

感謝。

+0

あなたのコードをテストしましたが、それは黒いimage.whereでシミュレータのように正常に動作します – mandeep

+0

動作しますが、2番目のピクセルバッファで再生するのではなく、最後に画像を変換する方が効率的です3fpsから60fpsに変わります)。 – bprzemyslaw

2

iPhoneがそれをサポートしている場合は、glReadPixelsを使用してOpenGLのコンテキストから読み取ることができます。それが終わったら、あなたが読んだピクセルデータからUIImageのようなものを作成し、それをアプリケーションによって作成された他の画像と同じようにフォトライブラリに保存することができます。

+0

あなたは絶対に正しいです。私は以下の詳細な回答を追加しました。申し訳ありませんが、この良い情報を投票する特権はありません。あなたの専門的かつ効果的な助けをもう一度いただきありがとうございます。 –

0

このコードは、XCode 4.0でプログラミングされたiOS 5.0で完全に機能します。 XCode 4.5で、開発者がiOS 6.0をプレビューすると、コードが正しく動作しませんが、画像は保存されています。私の選択した解像度で「マイフォト」に保存されますが、それは黒い画像です!

私は何かが

glReadPixels(0, 0, 320, 480, GL_RGBA, GL_UNSIGNED_BYTE, buffer); 

にXCodeのプログラマから変化したか

CGImageRef imageRef = CGImageCreate(320, 480, bitsPerComponent, bitsPerPixel, bytesPerRow,  colorSpaceRef, bitmapInfo, provider, NULL, NO, renderingIntent); 

UIImage *myImage = [UIImage imageWithCGImage:imageRef]; 

にそれは320の問題、480または任意の他の解像度変数ではないことを推測します。 XCode 4.0では、Retina解像度でも完璧かつ高速に動作しました。

4

Ramshadの答えの改良版:この1つは何のメモリリークを持っていないとiOSの新バージョンで、異なるビューのサイズとディスプレイ(網膜と非網膜)のために働く

CGFloat scale = [[UIScreen mainScreen] scale]; // use nativeScale on iOS 8.0+ 

    CGSize imageSize = CGSizeMake((scale * view.frame.size.width), (scale * view.frame.size.height)); 
    NSUInteger length = imageSize.width * imageSize.height * 4; 

    GLubyte * buffer = (GLubyte *)malloc(length * sizeof(GLubyte)); 

    if(buffer == NULL) 
     return nil; 

    glReadPixels(0, 0, imageSize.width, imageSize.height, GL_RGBA, GL_UNSIGNED_BYTE, buffer); 

    CGDataProviderRef provider = CGDataProviderCreateWithData(NULL, buffer, length, NULL); 

    int bitsPerComponent = 8; 
    int bitsPerPixel = 32; 
    int bytesPerRow = 4 * imageSize.width; 

    CGColorSpaceRef colorSpaceRef = CGColorSpaceCreateDeviceRGB(); 
    CGBitmapInfo bitmapInfo = kCGBitmapByteOrderDefault; 
    CGColorRenderingIntent renderingIntent = kCGRenderingIntentDefault; 

    CGImageRef imageRef = CGImageCreate(imageSize.width, imageSize.height, bitsPerComponent, bitsPerPixel, bytesPerRow, colorSpaceRef, bitmapInfo, provider, NULL, NO, renderingIntent); 

    UIGraphicsBeginImageContext(imageSize); 
    CGContextDrawImage(UIGraphicsGetCurrentContext(), CGRectMake(0.0, 0.0, imageSize.width, imageSize.height), imageRef); 
    UIImage * image = UIGraphicsGetImageFromCurrentImageContext(); 
    UIGraphicsEndImageContext(); 

    CGImageRelease(imageRef); 
    CGColorSpaceRelease(colorSpaceRef); 
    CGDataProviderRelease(provider); 
    free(buffer); 

    return image; 
+0

これらのCGBitmapInfo設定は、透明なものの代わりに黒い色を与えます。私は 'CGBitmapInfo bitmapInfo = kCGBitmapByteOrderDefault;を' CGBitmapInfo bitmapInfo = kCGBitmapByteOrder32Big |に変更しました。 kCGImageAlphaLast; 'とする必要がありますどこに透明性を得た。 – alc77