1. pygame
  2. Untitled project
  3. pygame

Commits

zeroth  committed bbc6278

Renaming work, added support for misc. surfaces. Unfortunately, due to
threading issues, its... uh... not very effective. No image gets
displayed for some reason...

  • Participants
  • Parent commits 9d2d207
  • Branches tylerthemovie

Comments (0)

Files changed (7)

File Setup.in

View file
 _numericsurfarray src/_numericsurfarray.c $(SDL) $(DEBUG)
 _numericsndarray src/_numericsndarray.c $(SDL) $(MIXER) $(DEBUG)
 movie src/movie.c $(SDL) $(SMPEG) $(DEBUG)
-gmovie src/_gsound.c src/_gmovie.c src/gmovie.c $(SDL) $(FFMPEG) $(DEBUG)
+_movie src/_gsound.c src/_gmovie.c src/gmovie.c $(SDL) $(FFMPEG) $(DEBUG)
 scrap src/scrap.c $(SDL) $(SCRAP) $(DEBUG)
 _camera src/_camera.c src/camera_v4l2.c src/camera_v4l.c $(SDL) $(DEBUG)
 pypm src/pypm.c $(SDL) $(PORTMIDI) $(PORTTIME) $(DEBUG)

File config_unix.py

View file
 class DebugDependency(Dependency):
     def __init__(self, name, checkhead, checklib, libs):
         Dependency.__init__(self, name, checkhead, checklib, libs)
-        self.cflags="-Xcompiler -ggdb -Xlinker -fPIC"
+        self.cflags=""
 
 class DependencyPython:
     def __init__(self, name, module, header):
         Dependency('PORTMIDI', 'portmidi.h', 'libportmidi.so', ['portmidi']),
         Dependency('PORTTIME', 'porttime.h', 'libporttime.so', ['porttime']),
         DebugDependency('FFMPEG', 'libavformat/avformat.h', 'libavformat.a', ['avformat', 'avdevice', 'avcodec',  'avutil', 'swscale', 'SDL_mixer']),        
-        #Dependency('FFMPEG', 'ffmpeg/avformat.h',  'libavformat.so', ['avformat']) ,
-        #Dependency('FFMPEG', 'ffmpeg/avstring.h',  'libavstring.so', ['avstring']) ,
-        #Dependency('FFMPEG', 'ffmpeg/swscale.h',  'libswscale.so', ['swscale']) ,
-        #Dependency('FFMPEG', 'ffmpeg/avcodec.h',  'libavcodec.so', ['avcodec']) ,
-        #Dependency('FFMPEG', 'libavdevice/avdevice.h',  'libavdevice.so', ['avdevice']) ,
         #Dependency('GFX', 'SDL_gfxPrimitives.h', 'libSDL_gfx.so', ['SDL_gfx']),
     ]
     if not DEPS[0].found:

File src/_gmovie.c

View file
 
         if (dstx & 1) {
             YUVA_IN(y, u, v, a, p, pal);
-            lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
-            cb[0] = ALPHA_BLEND(a >> 2, cb[0], u, 0);
-            cr[0] = ALPHA_BLEND(a >> 2, cr[0], v, 0);
+            lum[0] = _ALPHA_BLEND(a, lum[0], y, 0);
+            cb[0] = _ALPHA_BLEND(a >> 2, cb[0], u, 0);
+            cr[0] = _ALPHA_BLEND(a >> 2, cr[0], v, 0);
             cb++;
             cr++;
             lum++;
             u1 = u;
             v1 = v;
             a1 = a;
-            lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
+            lum[0] = _ALPHA_BLEND(a, lum[0], y, 0);
 
             YUVA_IN(y, u, v, a, p + BPP, pal);
             u1 += u;
             v1 += v;
             a1 += a;
-            lum[1] = ALPHA_BLEND(a, lum[1], y, 0);
-            cb[0] = ALPHA_BLEND(a1 >> 2, cb[0], u1, 1);
-            cr[0] = ALPHA_BLEND(a1 >> 2, cr[0], v1, 1);
+            lum[1] = _ALPHA_BLEND(a, lum[1], y, 0);
+            cb[0] = _ALPHA_BLEND(a1 >> 2, cb[0], u1, 1);
+            cr[0] = _ALPHA_BLEND(a1 >> 2, cr[0], v1, 1);
             cb++;
             cr++;
             p += 2 * BPP;
         }
         if (w) {
             YUVA_IN(y, u, v, a, p, pal);
-            lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
-            cb[0] = ALPHA_BLEND(a >> 2, cb[0], u, 0);
-            cr[0] = ALPHA_BLEND(a >> 2, cr[0], v, 0);
+            lum[0] = _ALPHA_BLEND(a, lum[0], y, 0);
+            cb[0] = _ALPHA_BLEND(a >> 2, cb[0], u, 0);
+            cr[0] = _ALPHA_BLEND(a >> 2, cr[0], v, 0);
             p++;
             lum++;
         }
             u1 = u;
             v1 = v;
             a1 = a;
-            lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
+            lum[0] = _ALPHA_BLEND(a, lum[0], y, 0);
             p += wrap3;
             lum += wrap;
             YUVA_IN(y, u, v, a, p, pal);
             u1 += u;
             v1 += v;
             a1 += a;
-            lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
-            cb[0] = ALPHA_BLEND(a1 >> 2, cb[0], u1, 1);
-            cr[0] = ALPHA_BLEND(a1 >> 2, cr[0], v1, 1);
+            lum[0] = _ALPHA_BLEND(a, lum[0], y, 0);
+            cb[0] = _ALPHA_BLEND(a1 >> 2, cb[0], u1, 1);
+            cr[0] = _ALPHA_BLEND(a1 >> 2, cr[0], v1, 1);
             cb++;
             cr++;
             p += -wrap3 + BPP;
             u1 = u;
             v1 = v;
             a1 = a;
-            lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
+            lum[0] = _ALPHA_BLEND(a, lum[0], y, 0);
 
             YUVA_IN(y, u, v, a, p + BPP, pal);
             u1 += u;
             v1 += v;
             a1 += a;
-            lum[1] = ALPHA_BLEND(a, lum[1], y, 0);
+            lum[1] = _ALPHA_BLEND(a, lum[1], y, 0);
             p += wrap3;
             lum += wrap;
 
             u1 += u;
             v1 += v;
             a1 += a;
-            lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
+            lum[0] = _ALPHA_BLEND(a, lum[0], y, 0);
 
             YUVA_IN(y, u, v, a, p + BPP, pal);
             u1 += u;
             v1 += v;
             a1 += a;
-            lum[1] = ALPHA_BLEND(a, lum[1], y, 0);
+            lum[1] = _ALPHA_BLEND(a, lum[1], y, 0);
 
-            cb[0] = ALPHA_BLEND(a1 >> 2, cb[0], u1, 2);
-            cr[0] = ALPHA_BLEND(a1 >> 2, cr[0], v1, 2);
+            cb[0] = _ALPHA_BLEND(a1 >> 2, cb[0], u1, 2);
+            cr[0] = _ALPHA_BLEND(a1 >> 2, cr[0], v1, 2);
 
             cb++;
             cr++;
             u1 = u;
             v1 = v;
             a1 = a;
-            lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
+            lum[0] = _ALPHA_BLEND(a, lum[0], y, 0);
             p += wrap3;
             lum += wrap;
             YUVA_IN(y, u, v, a, p, pal);
             u1 += u;
             v1 += v;
             a1 += a;
-            lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
-            cb[0] = ALPHA_BLEND(a1 >> 2, cb[0], u1, 1);
-            cr[0] = ALPHA_BLEND(a1 >> 2, cr[0], v1, 1);
+            lum[0] = _ALPHA_BLEND(a, lum[0], y, 0);
+            cb[0] = _ALPHA_BLEND(a1 >> 2, cb[0], u1, 1);
+            cr[0] = _ALPHA_BLEND(a1 >> 2, cr[0], v1, 1);
             cb++;
             cr++;
             p += -wrap3 + BPP;
 
         if (dstx & 1) {
             YUVA_IN(y, u, v, a, p, pal);
-            lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
-            cb[0] = ALPHA_BLEND(a >> 2, cb[0], u, 0);
-            cr[0] = ALPHA_BLEND(a >> 2, cr[0], v, 0);
+            lum[0] = _ALPHA_BLEND(a, lum[0], y, 0);
+            cb[0] = _ALPHA_BLEND(a >> 2, cb[0], u, 0);
+            cr[0] = _ALPHA_BLEND(a >> 2, cr[0], v, 0);
             cb++;
             cr++;
             lum++;
             u1 = u;
             v1 = v;
             a1 = a;
-            lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
+            lum[0] = _ALPHA_BLEND(a, lum[0], y, 0);
 
             YUVA_IN(y, u, v, a, p + BPP, pal);
             u1 += u;
             v1 += v;
             a1 += a;
-            lum[1] = ALPHA_BLEND(a, lum[1], y, 0);
-            cb[0] = ALPHA_BLEND(a1 >> 2, cb[0], u, 1);
-            cr[0] = ALPHA_BLEND(a1 >> 2, cr[0], v, 1);
+            lum[1] = _ALPHA_BLEND(a, lum[1], y, 0);
+            cb[0] = _ALPHA_BLEND(a1 >> 2, cb[0], u, 1);
+            cr[0] = _ALPHA_BLEND(a1 >> 2, cr[0], v, 1);
             cb++;
             cr++;
             p += 2 * BPP;
         }
         if (w) {
             YUVA_IN(y, u, v, a, p, pal);
-            lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
-            cb[0] = ALPHA_BLEND(a >> 2, cb[0], u, 0);
-            cr[0] = ALPHA_BLEND(a >> 2, cr[0], v, 0);
+            lum[0] = _ALPHA_BLEND(a, lum[0], y, 0);
+            cb[0] = _ALPHA_BLEND(a >> 2, cb[0], u, 0);
+            cr[0] = _ALPHA_BLEND(a >> 2, cr[0], v, 0);
         }
     }
 }
 }
 
 
-void ConvertYUV420PtoRGBA( AVFrame *YUV420P, SDL_Surface *OUTPUT, int interlaced ) {
+void ConvertYUV420PtoRGBA( AVPicture *YUV420P, SDL_Surface *OUTPUT, int interlaced ) {
 
     uint8_t *Y, *U, *V;
 	uint32_t *RGBA = OUTPUT->pixels;
     int i;
     vp = &is->pictq[is->pictq_rindex];
     vp->ready =0;
-    if (vp->dest_overlay) {
+    if (vp->dest_overlay && vp->overlay>0) {
         /* XXX: use variable in the frame */
         if (is->video_st->sample_aspect_ratio.num)
             aspect_ratio = av_q2d(is->video_st->sample_aspect_ratio);
         }
         
     } 
+    else if(vp->dest_surface && vp->overlay<=0)
+    {
+    	/* XXX: use variable in the frame */
+        if (is->video_st->sample_aspect_ratio.num)
+            aspect_ratio = av_q2d(is->video_st->sample_aspect_ratio);
+        else if (is->video_st->codec->sample_aspect_ratio.num)
+            aspect_ratio = av_q2d(is->video_st->codec->sample_aspect_ratio);
+        else
+            aspect_ratio = 0;
+        if (aspect_ratio <= 0.0)
+            aspect_ratio = 1.0;
+        aspect_ratio *= (float)is->video_st->codec->width / is->video_st->codec->height;
+        /* if an active format is indicated, then it overrides the
+           mpeg format */
+    	
+    	height = vp->height;
+        width = ((int)rint(height * aspect_ratio)) & ~1;
+        if (width > vp->width) {
+            width = vp->width;
+            height = ((int)rint(width / aspect_ratio)) & ~1;
+        }
+        x = (vp->width - width) / 2;
+        y = (vp->height - height) / 2;
+       
+        vp->dest_rect.x = vp->xleft + x;
+        vp->dest_rect.y = vp->ytop  + y;
+        vp->dest_rect.w = width;
+        vp->dest_rect.h = height;
+        //GRABGIL
+        //PySys_WriteStdout("Just before blitting...\n");
+        //pygame_Blit (vp->dest_surface, &vp->dest_rect,
+        //     is->canon_surf, &vp->dest_rect, 0);
+    	SDL_BlitSurface(vp->dest_surface, &vp->dest_rect, is->canon_surf, &vp->dest_rect);
+    	//PySys_WriteStdout("After blitting...\n");
+    	//RELEASEGIL
+    }
     is->pictq_rindex= (is->pictq_rindex+1)%VIDEO_PICTURE_QUEUE_SIZE;
     is->pictq_size--;
     video_refresh_timer(is);
         }
         vp->overlay = is->overlay;
     } 
+    else if (!vp->dest_surface && is->overlay<=0)
+    {
+    	//now we have to open an overlay up
+        SDL_Surface *screen = is->canon_surf;
+        if (!SDL_WasInit (SDL_INIT_VIDEO))
+        {
+        	GRABGIL
+        	RAISE(PyExc_SDLError,"cannot create surfaces without pygame.display initialized");
+        	Py_DECREF(is);
+        	RELEASEGIL
+        	return -1;
+        }
+        if (!screen)
+		{
+			GRABGIL
+        	RAISE(PyExc_SDLError, "No video surface given."); //ideally this should have 
+        	Py_DECREF(is);									  //been caught at init, but this could feasibly 
+        	RELEASEGIL										  // happen if there's some cleaning up.
+        	return -1;	
+		}
+        vp->dest_surface = SDL_CreateRGBSurface(screen->flags, 
+        										screen->w, 
+        										screen->h, 
+        										screen->format->BitsPerPixel, 
+        										screen->format->Rmask, 
+        										screen->format->Gmask, 
+        										screen->format->Bmask, 
+        										screen->format->Amask);
+        if (!vp->dest_surface)
+        {
+        	GRABGIL
+            RAISE (PyExc_SDLError, "Cannot create new surface.");
+			Py_DECREF(is);
+			RELEASEGIL
+			return -1;
+        }
+        vp->overlay = is->overlay;
+    }
 
     is->width = w;
     vp->width = w;
     	   /* get a pointer on the bitmap */
         
         dst_pix_fmt = PIX_FMT_YUV420P;
-           
+
+		avpicture_alloc(&pict, dst_pix_fmt, vp->width, vp->height);
         SDL_LockSurface(vp->dest_surface);
-		ConvertYUV420PtoRGBA(src_frame, vp->dest_surface, 0 );
+		int sws_flags = SWS_BICUBIC;
+        img_convert_ctx = sws_getCachedContext(img_convert_ctx,
+            								   movie->video_st->codec->width, 
+            								   movie->video_st->codec->height,
+            								   movie->video_st->codec->pix_fmt,
+            								   vp->width,
+            								   vp->height,
+            								   dst_pix_fmt, 
+            								   sws_flags, 
+            								   NULL, NULL, NULL);
+        if (img_convert_ctx == NULL) {
+            fprintf(stderr, "Cannot initialize the conversion context\n");
+            exit(1);
+        }
+        movie->img_convert_ctx = img_convert_ctx;
+        sws_scale(img_convert_ctx, src_frame->data, src_frame->linesize,
+                  0, movie->video_st->codec->height, pict.data, pict.linesize);
 
-        SDL_UnlockSurface(vp->dest_surface);
-
+		ConvertYUV420PtoRGBA(&pict,vp->dest_surface, src_frame->interlaced_frame );
+		SDL_UnlockSurface(vp->dest_surface);
         vp->pts = movie->pts;  
     	movie->pictq_windex = (movie->pictq_windex+1)%VIDEO_PICTURE_QUEUE_SIZE;
 		movie->pictq_size++;
 		vp->ready=1;
+		avpicture_free(&pict);
     }	
 	GRABGIL
 	Py_DECREF(movie);
     int err, i, ret, video_index, audio_index, subtitle_index;
     AVFormatParameters params, *ap = &params;
     
-	is->overlay=1;
+	//is->overlay=1;
     av_strlcpy(is->filename, filename, strlen(filename)+1);
     is->iformat = iformat;
 
 	}
 	while((movie->loops>-1||eternity) )
 	{
-		/*GRABGIL
-		PySys_WriteStdout("Loops: %i\n", movie->loops);
-		RELEASEGIL*/
 		movie->loops--;
 		movie=stream_open(movie, movie->filename, NULL);
-		/*if(movie->audio_st)
-			movie->audio_tid = SDL_CreateThread(audio_thread, movie);*/
 		movie->paused=0;
 		state =decoder(movie);
 		if(movie->video_st)
             (is->videoq.size > MAX_VIDEOQ_SIZE )||
             (is->subtitleq.size > MAX_SUBTITLEQ_SIZE)) {
             /* wait 10 ms */
-            if(is->video_st)
+            if(is->videoq.size > MAX_VIDEOQ_SIZE && is->video_st)
             	video_render(is);
-            if(is->audio_st)
+            if(is->audioq.size > MAX_AUDIOQ_SIZE && is->audio_st)
 	            audio_thread(is);
             SDL_Delay(10);
             continue;

File src/_gmovie.h

View file
 (((FIX(0.50000*224.0/255.0) * r1 - FIX(0.41869*224.0/255.0) * g1 -           \
    FIX(0.08131*224.0/255.0) * b1 + (ONE_HALF << shift) - 1) >> (SCALEBITS + shift)) + 128)
 
-#define ALPHA_BLEND(a, oldp, newp, s)\
+#define _ALPHA_BLEND(a, oldp, newp, s)\
 ((((oldp << s) * (255 - (a))) + (newp * (a))) / (255 << s))
 
 #define RGBA_IN(r, g, b, a, s)\
 	int av_sync_type;
 	AVFormatContext *ic;    /* context information about the format of the video file */
 	int stop;
-	
+	SDL_Surface *canon_surf;	
 	
 	/* Seek-info */
     int seek_req;
 /* 		Misc*/
 void blend_subrect(AVPicture *dst, const AVSubtitleRect *rect, int imgw, int imgh);
 void free_subpicture(SubPicture *sp);
-void ConvertYUV420PtoRGBA( AVFrame *YUV420P, SDL_Surface *OUTPUT, int interlaced );
+void ConvertYUV420PtoRGBA( AVPicture *YUV420P, SDL_Surface *OUTPUT, int interlaced );
 void initializeLookupTables(void);
 /* 		Video Management */
 int video_open(PyMovie *is, int index);

File src/gmovie.c

View file
 	}
 	else
 	{
+		PySys_WriteStdout("Found a surface...\n");
 		self->overlay = 0;
-		self->dest_surface=surf;
+		self->canon_surf=surf;
 	}
 	self->start_time = AV_NOPTS_VALUE;
 	self=stream_open(self, filename, NULL);
 {
 	Py_INCREF(self);
 	const char *c;
-	if (!PyArg_ParseTuple (args, "s", &c))
+	PyObject *surf;
+	if (!PyArg_ParseTuple (args, "s|O", &c, &surf))
     {
         PyErr_SetString(PyExc_TypeError, "No valid arguments");
     	return -1;
-    }	
-	self = _movie_init_internal(self, c, NULL);
+    }
+    PySys_WriteStdout("Value of surf: %i\n", surf ? 1 : 0);
+    PySys_WriteStdout("Value of PySurface_Check(surf): %i\n", PySurface_Check(surf));
+    
+    if(surf)
+    {
+    	PySys_WriteStdout("Found a valid surface...\n");
+    	SDL_Surface *target = PySurface_AsSurface(surf);
+    	self= _movie_init_internal((PyMovie *)self, c, target);	
+    }
+    else
+    {
+    	PySys_WriteStdout("Did not find a surface... wonder why?\n");
+		self = _movie_init_internal((PyMovie *)self, c, NULL);
+    }
 	PyObject *er;
     er = PyErr_Occurred();
     Py_XINCREF(er);
     return pyo;
 }
 
+PyObject *_movie_get_surface(PyMovie *movie, void *closure)
+{
+	if(movie->canon_surf)
+	{
+		return (PyObject *)PySurface_New(movie->canon_surf);
+	}
+	Py_RETURN_NONE;
+}
+
+int _movie_set_surface(PyObject *mov, PyObject *surface, void *closure)
+{
+	PyMovie *movie = (PyMovie *)mov;
+	if(movie->canon_surf)
+	{
+		SDL_FreeSurface(movie->canon_surf);	
+	}
+	//PySurface_Check doesn't really work right for some reason... so we skip it for now.
+	movie->canon_surf=PySurface_AsSurface(surface);
+	movie->overlay=0;
+	return 1;
+}
+
  static PyMethodDef _movie_methods[] = {
    { "play",    (PyCFunction) _movie_play, METH_VARARGS,
                "Play the movie file from current time-mark. If loop<0, then it will loop infinitely. If there is no loop value, then it will play once." },
 
  static PyGetSetDef _movie_getsets[] =
 {
-    { "paused", (getter) _movie_get_paused, NULL, NULL, NULL },
-    { "playing", (getter) _movie_get_playing, NULL, NULL, NULL },
-    { "height", (getter) _movie_get_height, NULL, NULL, NULL },
-    { "width", (getter) _movie_get_width, NULL, NULL, NULL },
-    { NULL, NULL, NULL, NULL, NULL }
+    { "paused",  (getter) _movie_get_paused,  NULL,                        NULL, NULL },
+    { "playing", (getter) _movie_get_playing, NULL,                        NULL, NULL },
+    { "height",  (getter) _movie_get_height,  NULL,                        NULL, NULL },
+    { "width",   (getter) _movie_get_width,   NULL,                        NULL, NULL },
+    { "surface", (getter) _movie_get_surface, (setter) _movie_set_surface, NULL, NULL },
+    { NULL,      NULL,                        NULL,                        NULL, NULL }
 };
 
  static PyTypeObject PyMovie_Type =
 
 
 PyMODINIT_FUNC
-initgmovie(void)
+init_movie(void)
 {
     PyObject* module;
 
    }*/
    // Create the module
    
-   module = Py_InitModule3 ("gmovie", NULL, "pygame.gmovie plays movies and streams."); //movie doc needed
+   module = Py_InitModule3 ("_movie", NULL, "pygame._movie plays movies and streams."); //movie doc needed
 
    if (module == NULL) {
       return;
    avcodec_register_all();
    avdevice_register_all();
    av_register_all();
+   //initialize lookup tables for YUV-to-RGB conversion
    initializeLookupTables();
+   //import stuff we need
+   import_pygame_surface();
+   //initialize our flush marker for the queues.
    av_init_packet(&flush_pkt);
    uint8_t *s = (uint8_t *)"FLUSH";
    flush_pkt.data= s;
 
    // Add the type to the module.
    Py_INCREF(&PyMovie_Type);
-   //Py_INCREF(&PyAudioStream_Type);
-   //Py_INCREF(&PyVideoStream_Type);
    PyModule_AddObject(module, "Movie", (PyObject*)&PyMovie_Type);
-   //PyModule_AddObject(module, "AudioStream", (PyObject *)&PyAudioStream_Type);
-   //PyModule_AddObject(module, "VideoStream", (PyObject *)&PyVideoStream_Type);
 }
 

File src/gmovie.h

View file
 PyObject* _movie_get_playing (PyMovie *movie, void *closure);
 PyObject* _movie_get_width   (PyMovie *movie, void *closure);
 PyObject* _movie_get_height  (PyMovie *movie, void *closure);
+int _movie_set_surface       (PyObject *movie, PyObject *surface, void *closure);
 
 #endif /*GMOVIE_H_*/

File test/ff_movie_test.py

View file
 
 
 import pygame
-import pygame.gmovie
+import pygame._movie as gmovie
 from pygame.locals import *
 
 import os