Commits

Anonymous committed ecec7d7

Merged tylerthemovie branch changes r2168:2614 into the trunk

Comments (0)

Files changed (29)

+set breakpoint pending on
 SCRAP = -lX11
 PORTMIDI = -lportmidi
 PORTTIME = -lporttime
+#AVCODEC   = -lavcodec 
+AVFORMAT  = -lavformat 
+#AVDEVICE  = -lavdevice 
+#AVUTIL    = -lavutil 
+SWSCALE   = -lswscale 
+#ZLIB = -lz
 #--EndConfig
 
 #DEBUG = -C-W -C-Wall
 _numericsurfarray src/_numericsurfarray.c $(SDL) $(DEBUG)
 _numericsndarray src/_numericsndarray.c $(SDL) $(MIXER) $(DEBUG)
 movie src/movie.c $(SDL) $(SMPEG) $(DEBUG)
+#_movie src/_gsound.c src/_gmovie.c src/_gcommand.c src/gmovie.c $(SDL) $(AVCODEC) $(AVFORMAT) $(AVUTIL)  $(AVDEVICE)  $(SWSCALE) $(MIXER) -lz -lwsock32  $(DEBUG)
+_movie src/_gsound.c src/_gmovie.c src/_gcommand.c src/gmovie.c $(SDL)  $(AVFORMAT) $(SWSCALE) $(MIXER) $(DEBUG)
 scrap src/scrap.c $(SDL) $(SCRAP) $(DEBUG)
 _camera src/_camera.c src/camera_v4l2.c src/camera_v4l.c $(SDL) $(DEBUG)
 pypm src/pypm.c $(SDL) $(PORTMIDI) $(PORTTIME) $(DEBUG)
         Dependency('JPEG', 'jpeglib.h', 'libjpeg.dll.a'),
         Dependency('PORTMIDI', 'portmidi.h', 'libportmidi.dll.a'),
         Dependency('PORTTIME', 'portmidi.h', 'libportmidi.dll.a'),
-        DependencyDLL('TIFF'),
+		Dependency('AVFORMAT', 'libavformat/avformat.h', 'libavformat.a'), 
+		Dependency('AVCODEC', 'libavcodec/avcodec.h', 'libavcodec.a'),
+		Dependency('AVUTIL', 'libavutil/avutil.h', 'libavutil.a'),
+        Dependency('AVDEVICE', 'libavdevice/avdevice.h', 'libavdevice.a'),
+		Dependency('SWSCALE', 'libswscale/swscale.h', 'libswscale.a'),		
+		DependencyDLL('TIFF'),
         DependencyDLL('VORBISFILE'),
         DependencyDLL('VORBIS'),
         DependencyDLL('OGG'),
         else:
             print (self.name + '        '[len(self.name):] + ': not found')
 
+class DebugDependency(Dependency):
+    def __init__(self, name, checkhead, checklib, libs):
+        Dependency.__init__(self, name, checkhead, checklib, libs)
+        self.cflags="-Xcompiler -O0"
+
 class DependencyPython:
     def __init__(self, name, module, header):
         self.name = name
         Dependency('SCRAP', '', 'libX11', ['X11']),
         Dependency('PORTMIDI', 'portmidi.h', 'libportmidi.so', ['portmidi']),
         Dependency('PORTTIME', 'porttime.h', 'libporttime.so', ['porttime']),
+        DebugDependency('AVFORMAT', 'libavformat/avformat.h', 'libavformat.a', ['avformat']),
+        DebugDependency('SWSCALE', 'libswscale/swscale.h', 'libswscale.a', ['swscale']),        
         #Dependency('GFX', 'SDL_gfxPrimitives.h', 'libSDL_gfx.so', ['SDL_gfx']),
     ]
     if not DEPS[0].found:
                 print ('Too bad that is a requirement! Hand-fix the "Setup"')
         elif len(self.paths) == 1:
             self.path = self.paths[0]
-            print ("Path for %s:' % self.name")
+            print ("Path for %s:%s" % (self.name, self.path))
         else:
             print ("Select path for %s:" % self.name)
             for i in range(len(self.paths)):
 DEPS.add('PNG', 'png', ['libpng-[1-9].*'], r'(png|libpng13)\.dll$', ['z'])
 DEPS.add('JPEG', 'jpeg', ['jpeg-[6-9]*'], r'(lib){0,1}jpeg\.dll$')
 DEPS.add('PORTMIDI', 'portmidi', ['portmidi'], r'portmidi\.dll$')
+#DEPS.add('FFMPEG', 'libavformat/avformat.h', 'libavformat.a', ['avformat', 'swscale', 'SDL_mixer'], r'avformat-52\.dll')   
+dep = Dependency('FFMPEG', [r'avformat\.dll', r'swscale\.dll', r'SDL_mixer-[1-9].*'], ['avformat', 'swscale', 'SDL_mixer'], required=0)
+DEPS.dependencies.append(dep)
+DEPS.dlls.append(DependencyDLL(r'avformat\.dll', link=dep, libs=['avformat']))
+DEPS.dlls.append(DependencyDLL(r'swscale\.dll', link=dep, libs=['swscale']))
+DEPS.dlls.append(DependencyDLL(r'(lib){0,1}SDL_mixer\.dll$', link=dep, libs=['SDL', 'vorbisfile', 'smpeg']))
 #DEPS.add('PORTTIME', 'porttime', ['porttime'], r'porttime\.dll$')
 DEPS.add_dll(r'(lib){0,1}tiff\.dll$', 'tiff', ['tiff-[3-9].*'], ['jpeg', 'z'])
 DEPS.add_dll(r'(z|zlib1)\.dll$', 'z', ['zlib-[1-9].*'])
     ('SDL', 'SDL', r'SDL\.dll$', []),
     ('PORTMIDI', 'portmidi', r'portmidi\.dll', []),
     ('PORTTIME', 'portmidi', r'portmidi\.dll', []),
+    ('AVCODEC', 'avcodec', r'avcodec\.dll', []),
+    ('AVFORMAT', 'avformat', r'avformat-52\.dll', []),
+    ('AVDEVICE', 'avdevice', r'avdevice-52\.dll', []),
+    ('AVUTIL', 'avutil', r'avutil\.dll', []),
+    ('SWSCALE', 'swscale', r'swscale\.dll', []),
 ]
 
 # regexs: Maps name to DLL file name regex.
+#ifndef _MOVIE_DOC_H_
+#define _MOVIE_DOC_H_
+
+#define DOC_GMOVIE "pygame backend module that wraps the ffmpeg library to play video files"
+
+#define DOC_GMOVIEMOVIE "pygame._movie.Movie(filename, surface=None): returns Movie or None\nIf the optional surface argument is a surface, then the movie will output to that surface instead of using overlays."
+
+#define DOC_GMOVIEMOVIEPLAY "pygame._movie.Movie.play(loops=0): return None\nplays the video file loops+1 times."
+
+#define DOC_GMOVIEMOVIESTOP "pygame._movie.Movie.stop(): return None\nstops the video file and returns it to timestamp o."
+
+#define DOC_GMOVIEMOVIEPAUSE "pygame._movie.Movie.pause(): return None\npauses video file at that very moment or unpauses the video file."
+
+#define DOC_GMOVIEMOVIEREWIND "pygame._movie.Movie.rewind(): return None\nsame as stop()"
+
+#define DOC_GMOVIEMOVIERESIZE "pygame._movie.Movie.resize(width, height): return None\nresizes the video screen. If a surface has been provided, then resize will not work, to prevent image corruption issues.\nYou would need to provide a new surface to change the size."
+
+#define DOC_GMOVIEMOVIEPAUSED "pygame._movie.Movie.paused: return bool\nchecks if the movie file has been paused"
+
+#define DOC_GMOVIEMOVIEPLAYING "pygame._movie.Movie.playing: return bool\nchecks if the movie file is playing. True even when paused, but false when stop has been called."
+
+#define DOC_GMOVIEMOVIEWIDTH   "pygame._movie.Movie.width: Gets or sets the width\nGet or set the width of the screen for the video playback"
+
+#define DOC_GMOVIEMOVIEHEIGHT  "pygame._movie.Movie.height: Gets or sets the height\nGet or set the height of the screen for the video playback"
+
+#define DOC_GMOVIEMOVIESURFACE "pygame._movie.Movie.surface: Gets or sets the surface to which the video is displayed on."
+
+#define DOC_GMOVIEMOVIEFINISHED "pygame._movie.Movie.finished: Indicates when the video is played.\n If using multiple plays, this is not a reliable member to use, as when a video ends, regardless of if there are further plays, the finished member is triggered."
+
+#define DOC_GMOVIEMOVIEYTOP  "pygame._movie.Movie.ytop: Gets or sets the ytop of the display rect\nThis sets the distance between the image and the top of the window. Increase it to move the image down, or decrease it to move the image up."
+
+#define DOC_GMOVIEMOVIEXLEFT "pygame._movie.Movie.xleft: Gets or sets the xleft of the display rect\nThis sets the distance between the image and the left of the window. Increase it to move the image right, or decrease it to move the image left."
+
+#define DOC_GMOVIEMOVIEEASY_SEEK "pygame._movie.Movie.easy_seek(second, minute, hour, reverse): return None\nThis is a non-relative seek, instead seeking to the h:mm:ss timestamp on the video as given. All arguments are needed."
+
+#define DOC_GMOVIEMOVIESHIFT "pygame._movie.Movie.shift(ytop, xleft): return None\nShift the video image up, left, right, or down. Default values are 0,0."
+
+
+#endif /*_MOVIE_DOC_H_*/
+"""dummy Movie class if all else fails """
+class Movie:
+    def __init__(self, filename, surface=None):
+        self.filename=filename
+        self.surface = surface
+        self.process = None
+        self.loops=0
+        self.playing = False
+        self.paused  = False
+        self._backend = "DUMMY"
+        self.width=0
+        self.height=0
+        self.finished = 1
+    def play(self, loops=0):
+        self.playing= not self.playing
+        
+    def stop(self):
+        self.playing=not self.playing
+        self.paused =not self.paused
+        
+    def pause(self):
+        self.paused=not self.paused
+        
+    def resize(self, w, h):
+        self.width=w
+        self.height=h
+        
+    def __repr__(self):
+        return "(%s 0.0s)"%self.filename
+    

lib/_ffplaybackend.py

+import subprocess, os, time
+import threading
+
+player="ffplay"
+
+
+class Movie(object):
+    """pygame._ffmovbackend.Movie:
+        plays a video file via subprocess and a pre-packaged ffplay executable.
+    """
+    def __init__(self, filename, surface):
+        self.filename=filename
+        self._surface = surface
+        self.process = None
+        self.loops=0
+        self.playing = False
+        self.paused  = False
+        self._backend = "FFPLAY"
+    def getSurface(self):
+        #special stuff here
+        return self._surface
+    
+    def setSurface(self, value):
+        #special stuff here, like redirecting movie output here
+        self._surface = value
+    def delSurface(self):
+        del self._surface
+    surface=property(fget=getSurface, fset=setSurface, fdel=delSurface)
+    
+    def play(self, loops=0):
+        self.loops=loops
+        if loops<=-1: self.eternity=1
+        else:         self.eternity=0
+        self.loops -= 1
+        if not self.process:
+            self._play()
+        #otherwise stop playback, and start again with the new loops value.
+        else:
+            self.stop()
+            self._play()
+    def _play(self):
+        self.process=subprocess.Popen([player, self.filename], stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+        self.t=MovieWatcher(self, self.process.pid)
+        self.t.start()
+        self.playing = not self.playing
+        
+    def pause(self):
+        if self.process:
+            #send value to pause playback
+            self.paused = not self.paused
+            pass
+    
+    def stop(self):
+        if self.process:
+            pass
+            
+    def __repr__(self):
+        if self.process:
+            return "(%s: )" % self.filename #add on timestamp
+        else:
+            return "(%s)" % self.filename
+    
+        
+class MovieWatcher(threading.Thread):
+    def __init__(self, movie, pid):
+        threading.Thread.__init__(self)
+        self.movie=movie
+        self.pid=pid
+    def run():        
+        sts=os.waitpid(self.pid, 0)
+        #video finished playing, so we run it again
+        if(movie.loops>-1 or self.eternity):
+            movie._play()
+            movie.loops -= 1
+
+            
+import subprocess, os, time, socket, select
+import threading
+import re
+
+import sys
+if('win' in sys.platform):
+    player="vlc.exe"
+else:
+    player= "vlc"
+remote= "-I rc"
+port = 10000
+hostname = socket.getaddrinfo('localhost', 10000)[0][4][0]
+extra = "--rc-host %s:%d"
+
+class Communicator:
+    def __init__(self, player, remote, extra, port, hostname):
+        self.socket = socket.socket()
+        #print (hostname, port)
+        while 1:
+            try:
+                self.socket.connect((hostname, port))
+                break
+            except socket.error:
+                port+=1
+        self.commands =  ' '.join([player, remote, extra%(hostname, port)])
+        print self.commands
+        self.patterns = {
+            'size'  : re.compile("Resolution: \d{1,4}x\d{1,4}"), 
+            'width' : re.compile("Resolution: \d{1,4}(?=\d{1,4})"), 
+            'height': re.compile("Resolution: (?<=\dx)\d{1,4}|(?<=\d\dx)\d{1,4}|(?<=\d\d\dx)\d{1,4}|(?<=\d\d\d\dx)\d{1,4}"),
+                        }
+    def send(self, message):
+        self.socket.send(message)
+
+    def add(self, filename):
+        self.send("add %s\n" % filename)
+        
+    def enqueue(self, filename):
+        self.send("enqueue %s\n" % filename)
+        
+    def pause(self):
+        self.send("pause\n")
+        
+    def stop(self):
+        self.send("stop\n")
+        
+    def logout(self):
+        self.send("logout\n")
+
+    def info(self):
+        self.send("info\n")
+        d=[]
+        read =[0]
+        while(len(read)>0):
+            read, write, exce = select.select([self.socket], [], [], 0.10)
+            if(len(read)>0):
+                d.append(self.socket.recv(1))
+        d=''.join(d)
+        return d
+        
+    def _get_time(self):
+        self.send("get_time\n")
+        d=[]
+        read =[0]
+        while(len(read)>0):
+            read, write, exce = select.select([self.socket], [], [], 0.10)
+            if(len(read)>0):
+                d.append(self.socket.recv(1))
+        d=''.join(d)
+        d=int(d)#transforms into an int
+        return d
+    
+    def _get_height(self):
+        d=self.info()
+        p = self.patterns['height']
+        m =p.search(d)
+        if not m:
+            return 0
+        return int(m.group())
+
+    def _get_width(self):
+        d=self.info()
+        p= self.patterns['width']
+        m=p.search(d)
+        if not m:
+            return 0
+        return int(m.group())
+
+class Movie(object):
+    """pygame._vlcbackend.Movie:
+        plays a video file via subprocess and the available vlc executable
+    """
+    def __init__(self, filename, surface=None):
+        self.filename=filename
+        self._surface = surface
+        self.process = None
+        self.loops=0
+        self.playing = False
+        self.paused  = False
+        self._backend = "VLC"
+        self.comm = Communicator(player, remote, extra, port, hostname)
+        self.width = 0
+        self.height =0
+        self.finished =0
+    def getSurface(self):
+        #special stuff here
+        return self._surface
+    
+    def setSurface(self, value):
+        #special stuff here, like redirecting movie output here
+        self._surface = value
+    def delSurface(self):
+        del self._surface
+    surface=property(fget=getSurface, fset=setSurface, fdel=delSurface)
+    
+    
+    
+    def play(self, loops=0):
+        self.loops=loops
+        if loops<=-1: self.eternity=1
+        else:         self.eternity=0
+        self.loops -= 1
+        if not self.process:
+            self._play()
+        #otherwise stop playback, and start again with the new loops value.
+        else:
+            self.stop()
+            self._play()
+    def _play(self):
+        comm = self.comm.commands
+        self.process=subprocess.Popen(comm, shell=True, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
+        self.playing = not self.playing
+        self.width = self.comm._get_width()
+        self.height = self.comm._get_height()
+        if(self.eternity):
+            #well, we need to watch vlc, see when it finishes playing, then play the video again, over and over
+            self.comm.add(self.filename)
+            self.t=MovieWatcher(self.comm, 0.1 , self.eternity, self.filename)
+            return
+        #otherwise, we add loops+1 copies of the video to the playlist
+        self.comm.add(self.filename)
+        for i in range(1, self.loops+1):
+            self.comm.enqueue(self.filename)
+        
+        
+    def pause(self):
+        if self.process:
+            #send value to pause playback
+            self.paused = not self.paused
+            self.comm.pause()
+    
+    def stop(self):
+        if self.process:
+            #we kill the process...
+            self.paused = not self.paused
+            self.playing= not self.playing
+            self.comm.stop()
+            self.commd.logout()
+            self.process.terminate()
+            self.process=None
+            self.finished  = 1
+            
+    def __repr__(self):
+        if self.process:
+            return "(%s: %ds)" % (self.filename, self._get_time()) #add on timestamp
+        else:
+            return "(%s: 0.0s)" % self.filename
+    
+    def _get_time(self):
+        if self.process:
+            return self.comm._get_time()
+        
+    
+
+class MovieWatcher(threading.Thread):
+    def __init__(self, comm, time, eternity, filename):
+        threading.Thread.__init__(self)
+        self.comm=comm
+        self.time = time
+        self.eternity = eternity
+        self.filename = filename
+    def run():
+        while(1):        
+            time.sleep(self.time)
+            read, write, exce = select.select([self.comm.socket], [], [], 0.1)
+            d=[]
+            while(len(read)>0):
+                d.append(self.comm.socket.recv(1))
+                read, write, exce = select.select([self.comm.socket], [], [], 0.1)
+            s = ''.join(d)
+            if("status change: ( stop state: 0 )" in s): 
+                if("nothing to play" in s):
+                    self.comm.add(self.filename)
+                    
+"""Main newmovie module, imports first from _movie.so, then _vlcbackend if it finds the vlc executable."""
+
+try:
+    from pygame._movie import Movie
+except ImportError:
+    #try to transparently load the _vlcbackend.py Movie object.
+    import os, os.path, sys
+    path=os.path
+    if('win' in sys.platform):
+        if(os.path.exists(path.join(path.join(path.join('C:', 'Program Files'), 'VideoLan'), 'VLC'))):
+            try:
+                from pygame._vlcbackend import Movie, MovieWatcher
+            except ImportError:
+                #you're really hooped now...
+                print "Unable to find a working movie backend. Loading the dummy movie class..."
+                from pygame._dummybackend import Movie
+        else:
+            print "Unable to find a working movie backend. Loading the dummy movie class..."
+            from pygame._dummybackend import Movie
+    else:
+        #POSIX
+        if(os.path.exists(path.join(path.join(path.join(os.sep, 'usr'), 'bin'), 'vlc'))):
+            try:
+                from pygame._vlcbackend import Movie, MovieWatcher
+            except ImportError:
+                #oh man, I didn't mean for this to happen so badly...
+                print "Unable to find a working movie backend. Loading the dummy movie class..."
+                from pygame._dummybackend import Movie
+        else:
+            print "Unable to find a working movie backend. Loading the dummy movie class..."
+            from pygame._dummybackend import Movie
+            
 mingwrt-3.15.1
 win32api-3.12
 mingw32-make-3.81-20080326
-MSYS-1.0.10
+MSYS-1.0.11
 msysDTK-1.0.1
 msys-automake-1.8.2
 msys-autocont-2.59
 IHDR := -I$(pmcom) -I$(pmwin) -I$(pt)
 LIBS := $(LOADLIBES) $(LDLIBS) -lwinmm
 
+
+
 all : $(pmdll)
 .PHONY : all
 
 \tsed 's_#define DEBUG.*$$_/*&*/_' < "$<" > "$@"
 
 $(pmlib) : $(src) $(hdr)
-\t$(CC) $(CPPFLAGS) $(IHDR) -c $(CFLAGS) $(src)
+\tc++ $(CPPFLAGS) $(IHDR) -c $(CFLAGS) $(src)
 \tar rc $(pmlib) $(obj)
 \tranlib $(pmlib)
 
 $(pmdll) : $(pmlib) $(def)
-\t$(CC) -shared $(LDFLAGS) -def $(def) $(pmlib) $(LIBS) -o $@
+\tc++ -shared $(LDFLAGS) -def $(def) $(pmlib) $(LIBS) -o $@
 \tdlltool -D $(pmdll) -d $(def) -l $(pmimplib)
 \tranlib $(pmimplib)
 
   rm -f GNUmakefile portmidi.def
 fi
 """),
-    ]  # End dependencies = [.
+    Dependency('FFMPEG', ['ffmpeg'],
+    ['avformat-52.dll', 'swscale-0.dll', 'SDL_mixer.dll'], """
+
+set -e
+cd $BDWD
+
+if [ x$BDCONF == x1 ]; then
+  ./configure --enable-shared --enable-memalign-hack
+fi
+
+if [ x$BDCOMP == x1 ]; then
+  make
+fi
+
+if [ x$BDINST == x1 ]; then
+  make install
+fi
+
+if [ x$BDSTRIP == x1 ]; then
+  strip --strip-all /usr/local/bin/SDL_ttf.dll
+fi
+
+if [ x$BDINST == x1 ]; then
+  cd libswscale/
+  make
+  make install
+  cd ..
+fi
+
+if [ x$BDCLEAN == x1 ]; then
+  set +e
+  make clean
+fi
+"""),
+
+	
+	]  # End dependencies = [.
 
 
 msys_prep = Preparation('/usr/local', """

prebuilt-template/Setup_Win.in

 OGG = -Iprebuilt/include/ogg -logg
 PORTMIDI = -lportmidi
 PORTTIME =
+FFMPEG   = -lavcodec -lavformat -lavdevice -lavutil -lswscale -lSDL_mixer 
 COPYLIB_SDL -Lprebuilt/lib/SDL.dll
 COPYLIB_SDL_ttf -lSDL -lfreetype -Lprebuilt/lib/SDL_ttf.dll
 COPYLIB_SDL_image -lSDL -ljpef -lpng -ltiff -Lprebuilt/lib/SDL_image.dll
+/*
+  pygame - Python Game Library
+
+  This library is free software; you can redistribute it and/or
+  modify it under the terms of the GNU Library General Public
+  License as published by the Free Software Foundation; either
+  version 2 of the License, or (at your option) any later version.
+
+  This library is distributed in the hope that it will be useful,
+  but WITHOUT ANY WARRANTY; without even the implied warranty of
+  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+  Library General Public License for more details.
+
+  You should have received a copy of the GNU Library General Public
+  License along with this library; if not, write to the Free
+  Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
+  
+*/
+
+/*
+ * _movie - movie support for pygame with ffmpeg
+ * Author: Tyler Laing
+ *
+ * This module allows for the loading of, playing, pausing, stopping, and so on
+ *  of a video file. Any format supported by ffmpeg is supported by this
+ *  video player. Any bugs, please email trinioler@gmail.com :)
+ */
+
+#include "_gcommand.h"
+
+/* command queue management routines... I make a lot of queues, don't I? */
+void addCommand(CommandQueue *q, Command *comm)
+{
+	SDL_LockMutex(q->q_mutex);
+	comm->next=NULL;
+	q->registry[comm->type]++;
+	if(!q->size)
+	{
+		q->first=comm;
+		q->size++;
+		SDL_UnlockMutex(q->q_mutex);
+		return;
+	}
+	if(q->size==1)
+	{
+		q->last=comm;
+		q->first->next=comm;
+		q->size++;
+		SDL_UnlockMutex(q->q_mutex);
+		return;
+	}
+	q->last->next=comm;
+	q->last=comm;
+	q->size++;
+	SDL_UnlockMutex(q->q_mutex);
+	return;
+}
+
+Command *getCommand(CommandQueue *q)
+{
+	SDL_LockMutex(q->q_mutex);
+	Command *comm;
+	if(!q->last && q->first)
+	{
+		comm=q->first;
+		q->size--;
+		SDL_UnlockMutex(q->q_mutex);
+		return comm;
+	}
+	else if (!q->last && !q->first)
+	{
+		SDL_UnlockMutex(q->q_mutex);
+		return NULL;
+	}
+	comm=q->first;
+	q->first=q->first->next;
+	q->size--;
+	SDL_UnlockMutex(q->q_mutex);
+	return comm;
+}
+
+inline int hasCommand(CommandQueue *q)
+{
+	if(q->size>0)
+		return 1;
+	return 0;
+}
+
+void flushCommands(CommandQueue *q)
+{
+	SDL_LockMutex(q->q_mutex);
+	Command *prev;
+	Command *cur = q->first;
+	while(cur!=NULL)
+	{
+		prev=cur;
+		cur=cur->next;
+		PyMem_Free(prev);
+		q->size--;
+	}
+	SDL_UnlockMutex(q->q_mutex);
+}
+
+/* registers a command with a particular movie object's command queue. 
+ *  Basically, this means, theoretically, different movie objects could have different commands... 
+ */
+int registerCommand(CommandQueue *q)
+{
+	//int cur = q->reg_ix;
+	if(q->reg_ix>=1024)
+		q->reg_ix=0;
+	q->registry[q->reg_ix]=0;
+	q->reg_ix++;
+	return q->reg_ix-1;
+}
+/*
+  pygame - Python Game Library
+
+  This library is free software; you can redistribute it and/or
+  modify it under the terms of the GNU Library General Public
+  License as published by the Free Software Foundation; either
+  version 2 of the License, or (at your option) any later version.
+
+  This library is distributed in the hope that it will be useful,
+  but WITHOUT ANY WARRANTY; without even the implied warranty of
+  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+  Library General Public License for more details.
+
+  You should have received a copy of the GNU Library General Public
+  License along with this library; if not, write to the Free
+  Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
+  
+*/
+
+/*
+ * _movie - movie support for pygame with ffmpeg
+ * Author: Tyler Laing
+ *
+ * This module allows for the loading of, playing, pausing, stopping, and so on
+ *  of a video file. Any format supported by ffmpeg is supported by this
+ *  video player. Any bugs, please email trinioler@gmail.com :)
+ */
+ 
+
+#ifndef _GCOMMAND_H_
+#define _GCOMMAND_H_
+#include <SDL.h>
+#include <SDL_thread.h>
+#include <Python.h>
+
+
+/* Documentation: Command Queue Infrastructure
+ *  This lower-level infrastructure code is meant to provide greater stability and 
+ *  thread safety to the _movie module. Since we cannot manipulate the SDL event queue
+ *  we have to use our own hand-rolled solution. It is just a singly linked list, 
+ *  with references to the first and last item, allowing us to do a
+ *  simple push/pop implementation. The items in the list are structs that have
+ *  all the first members of the default Command struct, making it safe to 
+ *  cast the pointers from the pseudo-Command structs to a pointer to 
+ *  Command struct. Realistically, you can cast any pointer to any other kind of
+ *  pointer(as long as they are the same size!), and C will let you. This is dangerous,
+ *  and should only be done very, very carefully. This facility is only useful 
+ *  when you need a OO approach, like we did here.
+ *  
+ *  When making new commands, use the FULL_COMMAND macro, and add a line to registerCommands 
+ *  in _gmovie.c to add a new type value. This also enables future proofing as any changes to 
+ *  the Command struct will be opaque to the user... mostly.
+ * 
+ *  -Tyler Laing, August 4th, 2009
+ */
+
+typedef struct Command
+{
+	int type;
+	struct Command *next;
+} Command;
+
+#define FULL_COMMAND \
+	int type;\
+	struct Command *next;
+
+typedef struct CommandQueue
+{
+	int size;
+	SDL_mutex *q_mutex;
+	Command *first;
+	Command *last;
+	int registry[1024];
+	int reg_ix;
+} CommandQueue;
+
+
+void addCommand(CommandQueue *q, Command *comm);
+Command *getCommand(CommandQueue *q);
+int hasCommand(CommandQueue *q);
+void flushCommands(CommandQueue *q);
+int registerCommand(CommandQueue *q);
+
+//convience function for allocating a new command, and ensuring its type is set properly. 
+#define ALLOC_COMMAND(command, name) command* name = (command *)PyMem_Malloc(sizeof(command)); name->type=movie->command##Type;
+
+#endif /*_GCOMMAND_H_*/
+/*
+  pygame - Python Game Library
+
+  This library is free software; you can redistribute it and/or
+  modify it under the terms of the GNU Library General Public
+  License as published by the Free Software Foundation; either
+  version 2 of the License, or (at your option) any later version.
+
+  This library is distributed in the hope that it will be useful,
+  but WITHOUT ANY WARRANTY; without even the implied warranty of
+  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+  Library General Public License for more details.
+
+  You should have received a copy of the GNU Library General Public
+  License along with this library; if not, write to the Free
+  Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
+  
+*/
+
+/*
+ * _movie - movie support for pygame with ffmpeg
+ * Author: Tyler Laing
+ *
+ * This module allows for the loading of, playing, pausing, stopping, and so on
+ *  of a video file. Any format supported by ffmpeg is supported by this
+ *  video player. Any bugs, please email trinioler@gmail.com :)
+ */
+ 
+
+#ifndef _GMOVIE_H_
+#include "_gmovie.h"
+#endif
+
+#ifdef __MINGW32__
+#undef main /* We don't want SDL to override our main() */
+#endif
+
+/* packet queue handling */
+void packet_queue_init(PacketQueue *q)
+{
+    if(!q)
+    {
+        q=(PacketQueue *)PyMem_Malloc(sizeof(PacketQueue));
+    }
+    if(!q->mutex)
+        q->mutex = SDL_CreateMutex();
+    q->abort_request=0;
+}
+
+void packet_queue_flush(PacketQueue *q)
+{
+    AVPacketList *pkt, *pkt1;
+
+    if(q->mutex)
+        SDL_LockMutex(q->mutex);
+
+    for(pkt = q->first_pkt; pkt != NULL; pkt = pkt1)
+    {
+        pkt1 = pkt->next;
+        av_free_packet(&pkt->pkt);
+        PyMem_Free(pkt);
+    }
+    q->last_pkt = NULL;
+    q->first_pkt = NULL;
+    q->nb_packets = 0;
+    q->size = 0;
+
+    if(q->mutex)
+        SDL_UnlockMutex(q->mutex);
+}
+
+void packet_queue_end(PacketQueue *q, int end)
+{
+    AVPacketList *pkt, *pkt1;
+
+    for(pkt = q->first_pkt; pkt != NULL; pkt = pkt1)
+    {
+        pkt1 = pkt->next;
+        av_free_packet(&pkt->pkt);
+    }
+    if(end==0)
+    {
+        //we only destroy the mutex if its the last loop. This way we just reuse and don't fragment memory.
+        if(q->mutex)
+        {
+            SDL_DestroyMutex(q->mutex);
+        }
+    }
+}
+
+int packet_queue_put(PacketQueue *q, AVPacket *pkt)
+{
+    AVPacketList *pkt1;
+
+    /* duplicate the packet */
+    if (pkt!=&flush_pkt && av_dup_packet(pkt) < 0)
+        return -1;
+
+    pkt1 = PyMem_Malloc(sizeof(AVPacketList));
+
+    if (!pkt1)
+        return -1;
+    pkt1->pkt = *pkt;
+    pkt1->next = NULL;
+
+    if(q->mutex)
+        SDL_LockMutex(q->mutex);
+
+    if (!q->last_pkt)
+        q->first_pkt = pkt1;
+    else
+        q->last_pkt->next = pkt1;
+    q->last_pkt = pkt1;
+    q->nb_packets++;
+    q->size += pkt1->pkt.size;
+    /* XXX: should duplicate packet data in DV case */
+
+
+    if(q->mutex)
+        SDL_UnlockMutex(q->mutex);
+
+    return 0;
+}
+
+void packet_queue_abort(PacketQueue *q)
+{
+    if(q->mutex)
+        SDL_LockMutex(q->mutex);
+
+    q->abort_request = 1;
+
+    if(q->mutex)
+        SDL_UnlockMutex(q->mutex);
+}
+
+/* return < 0 if aborted, 0 if no packet and > 0 if packet.  */
+int packet_queue_get(PacketQueue *q, AVPacket *pkt, int block)
+{
+    AVPacketList *pkt1;
+    int ret;
+
+    if(q->mutex)
+        SDL_LockMutex(q->mutex);
+
+    for(;;)
+    {
+        if (q->abort_request)
+        {
+            ret = -1;
+            break;
+        }
+
+        pkt1 = q->first_pkt;
+        if (pkt1)
+        {
+            q->first_pkt = pkt1->next;
+            if (!q->first_pkt)
+                q->last_pkt = NULL;
+            q->nb_packets--;
+            q->size -= pkt1->pkt.size;
+            *pkt = pkt1->pkt;
+            PyMem_Free(pkt1);
+            ret = 1;
+            break;
+        }
+        else if (!block)
+        {
+            ret = 0;
+            break;
+        }
+        else
+        {
+            ret=0;
+            break;
+        }
+    }
+    if(q->mutex)
+        SDL_UnlockMutex(q->mutex);
+
+    return ret;
+}
+
+/* subtitles don't work yet. Code remains until it is needed */
+#if 0
+void blend_subrect(AVPicture *dst, const AVSubtitleRect *rect, int imgw, int imgh)
+{
+    int wrap, wrap3, width2, skip2;
+    int y, u, v, a, u1, v1, a1, w, h;
+    uint8_t *lum, *cb, *cr;
+    const uint8_t *p;
+    const uint32_t *pal;
+    int dstx, dsty, dstw, dsth;
+
+    dstw = av_clip(rect->w, 0, imgw);
+    dsth = av_clip(rect->h, 0, imgh);
+    dstx = av_clip(rect->x, 0, imgw - dstw);
+    dsty = av_clip(rect->y, 0, imgh - dsth);
+    lum = dst->data[0] + dsty * dst->linesize[0];
+    cb = dst->data[1] + (dsty >> 1) * dst->linesize[1];
+    cr = dst->data[2] + (dsty >> 1) * dst->linesize[2];
+
+    width2 = ((dstw + 1) >> 1) + (dstx & ~dstw & 1);
+    skip2 = dstx >> 1;
+    wrap = dst->linesize[0];
+    wrap3 = rect->pict.linesize[0];
+    p = rect->pict.data[0];
+    pal = (const uint32_t *)rect->pict.data[1];  /* Now in YCrCb! */
+
+    if (dsty & 1)
+    {
+        lum += dstx;
+        cb += skip2;
+        cr += skip2;
+
+        if (dstx & 1)
+        {
+            YUVA_IN(y, u, v, a, p, pal);
+            lum[0] = _ALPHA_BLEND(a, lum[0], y, 0);
+            cb[0] = _ALPHA_BLEND(a >> 2, cb[0], u, 0);
+            cr[0] = _ALPHA_BLEND(a >> 2, cr[0], v, 0);
+            cb++;
+            cr++;
+            lum++;
+            p += BPP;
+        }
+        for(w = dstw - (dstx & 1); w >= 2; w -= 2)
+        {
+            YUVA_IN(y, u, v, a, p, pal);
+            u1 = u;
+            v1 = v;
+            a1 = a;
+            lum[0] = _ALPHA_BLEND(a, lum[0], y, 0);
+
+            YUVA_IN(y, u, v, a, p + BPP, pal);
+            u1 += u;
+            v1 += v;
+            a1 += a;
+            lum[1] = _ALPHA_BLEND(a, lum[1], y, 0);
+            cb[0] = _ALPHA_BLEND(a1 >> 2, cb[0], u1, 1);
+            cr[0] = _ALPHA_BLEND(a1 >> 2, cr[0], v1, 1);
+            cb++;
+            cr++;
+            p += 2 * BPP;
+            lum += 2;
+        }
+        if (w)
+        {
+            YUVA_IN(y, u, v, a, p, pal);
+            lum[0] = _ALPHA_BLEND(a, lum[0], y, 0);
+            cb[0] = _ALPHA_BLEND(a >> 2, cb[0], u, 0);
+            cr[0] = _ALPHA_BLEND(a >> 2, cr[0], v, 0);
+            p++;
+            lum++;
+        }
+        p += wrap3 - dstw * BPP;
+        lum += wrap - dstw - dstx;
+        cb += dst->linesize[1] - width2 - skip2;
+        cr += dst->linesize[2] - width2 - skip2;
+    }
+    for(h = dsth - (dsty & 1); h >= 2; h -= 2)
+    {
+        lum += dstx;
+        cb += skip2;
+        cr += skip2;
+
+        if (dstx & 1)
+        {
+            YUVA_IN(y, u, v, a, p, pal);
+            u1 = u;
+            v1 = v;
+            a1 = a;
+            lum[0] = _ALPHA_BLEND(a, lum[0], y, 0);
+            p += wrap3;
+            lum += wrap;
+            YUVA_IN(y, u, v, a, p, pal);
+            u1 += u;
+            v1 += v;
+            a1 += a;
+            lum[0] = _ALPHA_BLEND(a, lum[0], y, 0);
+            cb[0] = _ALPHA_BLEND(a1 >> 2, cb[0], u1, 1);
+            cr[0] = _ALPHA_BLEND(a1 >> 2, cr[0], v1, 1);
+            cb++;
+            cr++;
+            p += -wrap3 + BPP;
+            lum += -wrap + 1;
+        }
+        for(w = dstw - (dstx & 1); w >= 2; w -= 2)
+        {
+            YUVA_IN(y, u, v, a, p, pal);
+            u1 = u;
+            v1 = v;
+            a1 = a;
+            lum[0] = _ALPHA_BLEND(a, lum[0], y, 0);
+
+            YUVA_IN(y, u, v, a, p + BPP, pal);
+            u1 += u;
+            v1 += v;
+            a1 += a;
+            lum[1] = _ALPHA_BLEND(a, lum[1], y, 0);
+            p += wrap3;
+            lum += wrap;
+
+            YUVA_IN(y, u, v, a, p, pal);
+            u1 += u;
+            v1 += v;
+            a1 += a;
+            lum[0] = _ALPHA_BLEND(a, lum[0], y, 0);
+
+            YUVA_IN(y, u, v, a, p + BPP, pal);
+            u1 += u;
+            v1 += v;
+            a1 += a;
+            lum[1] = _ALPHA_BLEND(a, lum[1], y, 0);
+
+            cb[0] = _ALPHA_BLEND(a1 >> 2, cb[0], u1, 2);
+            cr[0] = _ALPHA_BLEND(a1 >> 2, cr[0], v1, 2);
+
+            cb++;
+            cr++;
+            p += -wrap3 + 2 * BPP;
+            lum += -wrap + 2;
+        }
+        if (w)
+        {
+            YUVA_IN(y, u, v, a, p, pal);
+            u1 = u;
+            v1 = v;
+            a1 = a;
+            lum[0] = _ALPHA_BLEND(a, lum[0], y, 0);
+            p += wrap3;
+            lum += wrap;
+            YUVA_IN(y, u, v, a, p, pal);
+            u1 += u;
+            v1 += v;
+            a1 += a;
+            lum[0] = _ALPHA_BLEND(a, lum[0], y, 0);
+            cb[0] = _ALPHA_BLEND(a1 >> 2, cb[0], u1, 1);
+            cr[0] = _ALPHA_BLEND(a1 >> 2, cr[0], v1, 1);
+            cb++;
+            cr++;
+            p += -wrap3 + BPP;
+            lum += -wrap + 1;
+        }
+        p += wrap3 + (wrap3 - dstw * BPP);
+        lum += wrap + (wrap - dstw - dstx);
+        cb += dst->linesize[1] - width2 - skip2;
+        cr += dst->linesize[2] - width2 - skip2;
+    }
+    /* handle odd height */
+    if (h)
+    {
+        lum += dstx;
+        cb += skip2;
+        cr += skip2;
+
+        if (dstx & 1)
+        {
+            YUVA_IN(y, u, v, a, p, pal);
+            lum[0] = _ALPHA_BLEND(a, lum[0], y, 0);
+            cb[0] = _ALPHA_BLEND(a >> 2, cb[0], u, 0);
+            cr[0] = _ALPHA_BLEND(a >> 2, cr[0], v, 0);
+            cb++;
+            cr++;
+            lum++;
+            p += BPP;
+        }
+        for(w = dstw - (dstx & 1); w >= 2; w -= 2)
+        {
+            YUVA_IN(y, u, v, a, p, pal);
+            u1 = u;
+            v1 = v;
+            a1 = a;
+            lum[0] = _ALPHA_BLEND(a, lum[0], y, 0);
+
+            YUVA_IN(y, u, v, a, p + BPP, pal);
+            u1 += u;
+            v1 += v;
+            a1 += a;
+            lum[1] = _ALPHA_BLEND(a, lum[1], y, 0);
+            cb[0] = _ALPHA_BLEND(a1 >> 2, cb[0], u, 1);
+            cr[0] = _ALPHA_BLEND(a1 >> 2, cr[0], v, 1);
+            cb++;
+            cr++;
+            p += 2 * BPP;
+            lum += 2;
+        }
+        if (w)
+        {
+            YUVA_IN(y, u, v, a, p, pal);
+            lum[0] = _ALPHA_BLEND(a, lum[0], y, 0);
+            cb[0] = _ALPHA_BLEND(a >> 2, cb[0], u, 0);
+            cr[0] = _ALPHA_BLEND(a >> 2, cr[0], v, 0);
+        }
+    }
+}
+
+void free_subpicture(SubPicture *sp)
+{
+    int i;
+
+    for (i = 0; i < sp->sub.num_rects; i++)
+    {
+        av_freep(&sp->sub.rects[i]->pict.data[0]);
+        av_freep(&sp->sub.rects[i]->pict.data[1]);
+        av_freep(&sp->sub.rects[i]);
+    }
+
+    av_free(sp->sub.rects);
+
+    memset(&sp->sub, 0, sizeof(AVSubtitle));
+}
+#endif
+
+/* Sets the value of the variable width. Acts like a macro */
+void inline get_width(PyMovie *movie, int *width)
+{
+    if(movie->resize_w)
+    {
+        *width=movie->width;
+    }
+    else
+    {
+        if(movie->video_st)
+            *width=movie->video_st->codec->width;
+    }
+}
+/* Sets the value of the variable height. Acts like a macro */
+void inline get_height(PyMovie *movie, int *height)
+{
+    if(movie->resize_h)
+    {
+        *height=movie->height;
+    }
+    else
+    {
+        if(movie->video_st)
+            *height=movie->video_st->codec->height;
+    }
+}
+
+void get_height_width(PyMovie *movie, int *height, int*width)
+{
+    get_height(movie, height);
+    get_width(movie, width);
+}
+
+inline void jamPixels(int ix, AVPicture *picture, uint32_t *rgb, SDL_Surface *surface)
+{
+    //uint32_t *rgb           = surface->pixels;
+    uint8_t red   = picture->data[0][ix];
+    uint8_t green = picture->data[0][ix+1];
+    uint8_t blue  = picture->data[0][ix+2];
+    //skip the alpha... we don't care
+    /* shift components to the correct place in pixel */
+
+    *rgb = ( red   << (long) surface->format->Rshift) | /* red */
+           ( blue  << (long) surface->format->Bshift ) | /* green */
+           ( green << (long) surface->format->Gshift ) | /* blue */
+           ( 0   << (long) surface->format->Ashift);
+}
+
+//transfers data from the AVPicture written to by swscale to a surface
+void WritePicture2Surface(AVPicture *picture, SDL_Surface *surface, int w, int h)
+{
+    /* AVPicture initialized with PIX_FMT_RGBA only fills pict->data[0]
+     *  This however is only in {R,G,B, A} format. So we just copy the data over. 
+     */
+    /* Loop unrolling:
+     * 	We define a blocksize, and so we increment the index counter by blocksize*rgbstep
+     * 	All common resolutions are nicely divisible by 8(because 8 is a power of 2...)
+     *  An uncommon resolution  could have between 1 and 7 bytes left to convert... 
+     *   which I guess we'll leave alone. Its just 1-2 pixels in the lower right corner.
+     *  So we repeat the same actions blocksize times.  
+     */
+    int64_t   blocksize     = 8;
+    uint32_t *rgb           = surface->pixels;
+    int       BytesPerPixel = RGBSTEP;
+    int64_t   size          = w*h*BytesPerPixel;
+    int64_t   ix            = 0;
+    int64_t   blocklimit    = (size/blocksize)*blocksize;
+    while(ix<blocklimit)
+    {
+        //this will be unrolled by the compiler, meaning that we do less comparisons by a factor of blocksize
+        int i =0;
+        for(;i<blocksize;i++)
+        {
+            jamPixels(ix, picture, rgb, surface);
+            rgb++;
+            ix+=RGBSTEP;
+        }
+    }
+}
+
+
+int video_display(PyMovie *movie)
+{
+    double ret=1;
+
+    VidPicture *vp = &movie->pictq[movie->pictq_rindex];
+    if (movie->video_stream>=0 && vp->ready)
+    {
+        video_image_display(movie);
+    }
+    else if(!vp->ready)
+    {
+        ret=0;
+    }
+
+    /* If we didn't actually display the image, we need to not clear our timer out in decoder */
+    return ret;
+}
+
+int video_image_display(PyMovie *movie)
+{
+    /* Wrapped by video_display, which has a lock on the movie object */
+    DECLAREGIL
+
+    VidPicture *vp;
+    //SubPicture *sp;
+    float aspect_ratio;
+    int width, height, x, y;
+    vp = &movie->pictq[movie->pictq_rindex];
+    vp->ready =0;
+    //GRABGIL
+    //PySys_WriteStdout("video_current_pts: %f\tvp->pts: %f\ttime: %f\n", movie->video_current_pts, vp->pts, (av_gettime()/1000.0)-(movie->timing+movie->last_showtime));
+    //RELEASEGIL
+    //set up the aspect ratio values..
+    if(LIBAVFORMAT_VERSION_INT>= 3415808)
+    {
+        if (movie->video_st->sample_aspect_ratio.num)
+            aspect_ratio = av_q2d(movie->video_st->sample_aspect_ratio);
+        else if (movie->video_st->codec->sample_aspect_ratio.num)
+            aspect_ratio = av_q2d(movie->video_st->codec->sample_aspect_ratio);
+        else
+            aspect_ratio = 0;
+        if (aspect_ratio <= 0.0)
+            aspect_ratio = 1.0;
+    }
+    else
+    {
+        aspect_ratio = 1.0;
+    }
+    //then we load in width and height values based on the aspect ration and w/h.
+    int w=0;
+    int h=0;
+    get_height_width(movie, &h, &w);
+    aspect_ratio *= (float)w / h;
+    /* XXX: we suppose the screen has a 1.0 pixel ratio */
+    height = vp->height;
+    width = ((int)rint(height * aspect_ratio)) & ~1;
+    if (width > vp->width)
+    {
+        width = vp->width;
+        height = ((int)rint(width / aspect_ratio)) & ~1;
+    }
+    x = (vp->width - width) / 2;
+    y = (vp->height - height) / 2;
+
+    //we set the rect to have the values we need for blitting/overlay display
+    	
+	    vp->dest_rect.x = vp->xleft + x;
+    	vp->dest_rect.y = vp->ytop  + y;
+
+    	vp->dest_rect.w=width;
+ 	   	vp->dest_rect.h=height;
+	
+    if (vp->dest_overlay && vp->overlay>0 && !movie->skip_frame)
+    {
+        //SDL_Delay(10);
+#if 0
+        if (movie->sub_st)
+        {
+            if (movie->subpq_size > 0)
+            {
+                sp = &movie->subpq[movie->subpq_rindex];
+                AVPicture pict;
+                int i;
+
+                if (vp->pts >= sp->pts + ((float) sp->sub.start_display_time / 1000))
+                {
+                    SDL_LockYUVOverlay (vp->dest_overlay);
+
+                    pict.data[0] = vp->dest_overlay->pixels[0];
+                    pict.data[1] = vp->dest_overlay->pixels[2];
+                    pict.data[2] = vp->dest_overlay->pixels[1];
+
+                    pict.linesize[0] = vp->dest_overlay->pitches[0];
+                    pict.linesize[1] = vp->dest_overlay->pitches[2];
+                    pict.linesize[2] = vp->dest_overlay->pitches[1];
+
+                    for (i = 0; i < sp->sub.num_rects; i++)
+                        blend_subrect(&pict, sp->sub.rects[i],
+                                      vp->dest_overlay->w, vp->dest_overlay->h);
+
+                    SDL_UnlockYUVOverlay (vp->dest_overlay);
+                }
+            }
+        }
+#endif
+        if(vp->overlay>0)
+        {
+            SDL_LockYUVOverlay(vp->dest_overlay);
+            SDL_DisplayYUVOverlay(vp->dest_overlay, &vp->dest_rect);
+            SDL_UnlockYUVOverlay(vp->dest_overlay);
+        }
+    }
+    else if(vp->dest_surface && vp->overlay<=0)
+    {
+        SDL_BlitSurface(vp->dest_surface, &vp->dest_rect, movie->canon_surf, &vp->dest_rect);
+    }
+
+    movie->pictq_rindex= (movie->pictq_rindex+1)%VIDEO_PICTURE_QUEUE_SIZE;
+    movie->pictq_size--;
+    if(movie->skip_frame)
+        movie->skip_frame=0;
+    video_refresh_timer(movie);
+    return 1;
+}
+
+int video_open(PyMovie *movie, int index)
+{
+    int w=0;
+    int h=0;
+
+    DECLAREGIL
+    get_height_width(movie, &h, &w);
+    int tw=w;
+    int th=h;
+    VidPicture *vp;
+    for(index=0;index<VIDEO_PICTURE_QUEUE_SIZE; index++)
+    {
+        vp = &movie->pictq[index];
+        if(
+            //If we have no overlay, and we are supposed to, we jump right in
+            (!vp->dest_overlay && movie->overlay>0) ||
+            (
+                /* otherwise, we need to enter this block if
+                 * we need to resize AND there is an overlay AND
+                 *  it is not the right size
+                 */
+                (movie->resize_w||movie->resize_h) &&
+                vp->dest_overlay &&
+                (vp->height!=h || vp->width!=w)
+            )
+        )
+        {
+            if(movie->resize_w || movie->resize_h)
+            {
+                //we free this overlay, because we KNOW its not the right size.
+                SDL_FreeYUVOverlay(vp->dest_overlay);
+            }
+            if(movie->overlay>0)
+            {
+                //now we have to open an overlay up
+                SDL_Surface *screen;
+                if (!SDL_WasInit (SDL_INIT_VIDEO))
+                {
+                    GRABGIL
+                    RAISE(PyExc_SDLError,"cannot create overlay without pygame.display initialized");
+                    RELEASEGIL
+                    return -1;
+                }
+                screen = SDL_GetVideoSurface ();
+                if (!screen || (screen && (screen->w!=w || screen->h !=h)))
+                {
+                    //resize the main screen
+                    screen = SDL_SetVideoMode(w, h, 0, SDL_SWSURFACE);
+                    if(!screen)
+                    {
+                        GRABGIL
+                        RAISE(PyExc_SDLError, "Could not initialize a new video surface.");
+                        RELEASEGIL
+                        return -1;
+                    }
+                }
+                //create a new overlay
+                vp->dest_overlay = SDL_CreateYUVOverlay (w, h, SDL_YV12_OVERLAY, screen);
+                if (!vp->dest_overlay)
+                {
+                    GRABGIL
+                    RAISE (PyExc_SDLError, "Cannot create overlay");
+                    RELEASEGIL
+                    return -1;
+                }
+                vp->overlay = movie->overlay;
+            }
+        }
+        if (!vp->dest_surface && movie->overlay<=0)
+        {
+            //now we have to open an overlay up
+            if(movie->overlay<=0)
+            {
+                SDL_Surface *screen = movie->canon_surf;
+                if (!SDL_WasInit (SDL_INIT_VIDEO))
+                {
+                    GRABGIL
+                    RAISE(PyExc_SDLError,"cannot create surfaces without pygame.display initialized");
+                    RELEASEGIL
+                    return -1;
+                }
+                if (!screen)
+                {
+                    GRABGIL
+                    RAISE(PyExc_SDLError, "No video surface given."); //ideally this should have
+                    RELEASEGIL										  // happen if there's some cleaning up.
+                    return -1;
+                }
+                if(screen->h!=h)
+                {
+                    th=screen->h;
+                }
+                if(screen->w!=w)
+                {
+                    tw=screen->w;
+                }
+
+                vp->dest_surface = SDL_CreateRGBSurface(screen->flags,
+                                                        tw,
+                                                        th,
+                                                        screen->format->BitsPerPixel,
+                                                        screen->format->Rmask,
+                                                        screen->format->Gmask,
+                                                        screen->format->Bmask,
+                                                        screen->format->Amask);
+
+                if (!vp->dest_surface)
+                {
+                    GRABGIL
+                    RAISE (PyExc_SDLError, "Cannot create new surface.");
+                    RELEASEGIL
+                    return -1;
+                }
+                vp->overlay = movie->overlay;
+            }
+        }
+        vp->width = tw;
+        vp->height = th;
+        vp->ytop=movie->ytop;
+        vp->xleft=movie->xleft;
+
+    }
+    return 0;
+}
+
+/* called to determine a time to show each frame */
+void video_refresh_timer(PyMovie* movie)
+{
+    DECLAREGIL
+    double actual_delay, delay, sync_threshold, ref_clock, diff;
+    VidPicture *vp;
+
+    double cur_time=av_gettime();
+
+    if (movie->video_st)
+    { /*shouldn't ever even get this far if no video_st*/
+        movie->diff_co ++;
+
+        /* dequeue the picture */
+        vp = &movie->pictq[movie->pictq_rindex];
+
+        /* update current video pts */
+        movie->video_current_pts = vp->pts;
+        movie->video_current_pts_time = cur_time;
+
+        /* compute nominal delay */
+        delay = movie->video_current_pts - movie->frame_last_pts;
+        if (delay <= 0 || delay >= 10.0)
+        {
+            /* if incorrect delay, use previous one */
+            delay = movie->frame_last_delay;
+        }
+        else
+        {
+            movie->frame_last_delay = delay;
+        }
+        movie->frame_last_pts = movie->video_current_pts;
+
+        /* update delay to follow master synchronisation source */
+        if (((movie->av_sync_type == AV_SYNC_AUDIO_MASTER && movie->audio_st) ||
+                movie->av_sync_type == AV_SYNC_EXTERNAL_CLOCK))
+        {
+            /* if video is slave, we try to correct big delays by
+               duplicating or deleting a frame */
+            ref_clock = getAudioClock();
+            diff = movie->video_current_pts - ref_clock;
+            /* skip or repeat frame. We take into account the
+               delay to compute the threshold. I still don't know
+               if it is the best guess */
+
+            sync_threshold = FFMAX(AV_SYNC_THRESHOLD, delay);
+            if (fabs(diff) < AV_NOSYNC_THRESHOLD)
+            {
+                if (diff <= -sync_threshold)
+                {
+                    movie->skip_frame=1;
+                    delay = 0;
+                }
+                else if (diff >= sync_threshold)
+                    delay = 2 * delay;
+            }
+        }
+
+        movie->frame_timer += delay;
+        /* compute the REAL delay (we need to do that to avoid
+           long term errors */
+        actual_delay = movie->frame_timer - (cur_time / 1000000.0);
+        if (actual_delay < 0.010)
+        {
+            /* XXX: should skip picture */
+            actual_delay = 0.010;
+        }
+
+        GRABGIL
+        //PySys_WriteStdout("Actual Delay: %f\tdelay: %f\tdiff: %f\tpts: %f\tFrame-timer: %f\tCurrent_time: %f\tsync_thres: %f\n", (actual_delay*1000.0)+10, delay, diff, movie->video_current_pts, movie->frame_timer, (cur_time / 1000000.0), sync_threshold);
+        //double audio = getAudioClock();
+        //PySys_WriteStdout("Audio_Clock: %f\tVideo_Clock: %f\tDiff: %f\n", ref_clock, movie->video_current_pts, ref_clock-movie->video_current_pts);
+        movie->timing = (actual_delay*1000.0)+10;
+        RELEASEGIL
+    }
+
+}
+
+int queue_picture(PyMovie *movie, AVFrame *src_frame)
+{
+    DECLAREGIL
+    int dst_pix_fmt;
+    AVPicture pict;
+    VidPicture *vp;
+    struct SwsContext *img_convert_ctx=movie->img_convert_ctx;
+
+    vp = &movie->pictq[movie->pictq_windex];
+
+    int w=0;
+    int h=0;
+    get_height_width(movie, &h, &w);
+    int pw, ph;
+
+    if(vp->dest_surface)
+    {
+        pw=vp->dest_surface->w;
+        ph=vp->dest_surface->h;
+    }
+
+    dst_pix_fmt = PIX_FMT_YUV420P;
+#ifdef PROFILE
+
+    int64_t before = av_gettime();
+#endif
+
+    if (vp->dest_overlay && vp->overlay>0)
+    {
+        /* get a pointer on the bitmap */
+        SDL_LockYUVOverlay(vp->dest_overlay);
+
+        pict.data[0] = vp->dest_overlay->pixels[0];
+        pict.data[1] = vp->dest_overlay->pixels[2];
+        pict.data[2] = vp->dest_overlay->pixels[1];
+        pict.linesize[0] = vp->dest_overlay->pitches[0];
+        pict.linesize[1] = vp->dest_overlay->pitches[2];
+        pict.linesize[2] = vp->dest_overlay->pitches[1];
+    }
+    else if(vp->dest_surface)
+    {
+        /* get a pointer on the bitmap */
+        if(RGB24)
+        {
+            dst_pix_fmt = PIX_FMT_RGB24;
+        }
+        else if(RGBA)
+        {
+            dst_pix_fmt = PIX_FMT_RGBA;
+        }
+        avpicture_alloc(&pict, dst_pix_fmt, pw, ph);
+        SDL_LockSurface(vp->dest_surface);
+    }
+    int sws_flags = SWS_BICUBIC;
+    img_convert_ctx = sws_getCachedContext(img_convert_ctx,
+                                           movie->video_st->codec->width,
+                                           movie->video_st->codec->height,
+                                           movie->video_st->codec->pix_fmt,
+                                           w,
+                                           h,
+                                           dst_pix_fmt,
+                                           sws_flags,
+                                           NULL, NULL, NULL);
+    if (img_convert_ctx == NULL)
+    {
+        fprintf(stderr, "Cannot initialize the conversion context\n");
+        exit(1);
+    }
+    movie->img_convert_ctx = img_convert_ctx;
+
+    if((movie->resize_w||movie->resize_h) || vp->dest_overlay)
+    {
+        sws_scale(img_convert_ctx,
+                  src_frame->data,
+                  src_frame->linesize,
+                  0,
+                  h,
+                  pict.data,
+                  pict.linesize);
+    }
+    else if(vp->dest_surface)
+    {
+        sws_scale(img_convert_ctx,
+                  src_frame->data,
+                  src_frame->linesize,
+                  0,
+                  ph,
+                  pict.data,
+                  pict.linesize);
+    }
+
+    if (vp->dest_overlay && vp->overlay>0)
+    {
+        SDL_UnlockYUVOverlay(vp->dest_overlay);
+    }
+    else if(vp->dest_surface)
+    {
+        WritePicture2Surface(&pict, vp->dest_surface, pw, ph);
+        SDL_UnlockSurface(vp->dest_surface);
+        //avpicture_free(&pict);
+    }
+#ifdef PROFILE
+    TimeSampleNode *sample = (TimeSampleNode *)PyMem_Malloc(sizeof(TimeSampleNode));
+    sample->next=NULL;
+    sample->sample = av_gettime()-before;
+    if (!movie->istats->last || movie->istats->first->sample==0)
+        movie->istats->first = sample;
+    else
+        movie->istats->last->next = sample;
+    movie->istats->last = sample;
+    movie->istats->n_samples++;
+#endif
+
+    vp->pts = movie->pts;
+    movie->pictq_windex = (movie->pictq_windex+1)%VIDEO_PICTURE_QUEUE_SIZE;
+    movie->pictq_size++;
+    vp->ready=1;
+    return 0;
+}
+
+
+void update_video_clock(PyMovie *movie, AVFrame* frame, double pts1)
+{
+    double frame_delay, pts;
+
+    pts = pts1;
+
+    if (pts != 0)
+    {
+        /* update video clock with pts, if present */
+        movie->video_current_pts = pts;
+    }
+    else
+    {
+        pts = movie->video_current_pts;
+    }
+    /* update video clock for next frame */
+    frame_delay = av_q2d(movie->video_st->codec->time_base);
+    /* for MPEG2, the frame can be repeated, so we update the
+       clock accordingly */
+    frame_delay += frame->repeat_pict * (frame_delay * 0.5);
+    movie->video_current_pts += frame_delay;
+
+    movie->pts = pts;
+}
+
+/* get the current audio clock value */
+double get_audio_clock(PyMovie *movie)
+{
+    return getAudioClock();
+}
+
+/* get the current video clock value */
+double get_video_clock(PyMovie *movie)
+{
+    double delta;
+
+    if (movie->paused)
+    {
+        delta = 0;
+    }
+    else
+    {
+        delta = (av_gettime() - movie->video_current_pts_time) / 1000000.0;
+    }
+    double temp = movie->video_current_pts+delta;
+    return temp;
+}
+
+/* get the current external clock value */
+double get_external_clock(PyMovie *movie)
+{
+    int64_t ti;
+    ti = av_gettime();
+    double res = movie->external_clock + ((ti - movie->external_clock_time) * 1e-6);
+    return res;
+}
+
+/* get the current master clock value */
+double get_master_clock(PyMovie *movie)
+{
+    double val;
+
+    if (movie->av_sync_type == AV_SYNC_VIDEO_MASTER)
+    {
+        if (movie->video_st)
+            val = get_video_clock(movie);
+        else
+            val = get_audio_clock(movie);
+    }
+    else if (movie->av_sync_type == AV_SYNC_AUDIO_MASTER)
+    {
+        if (movie->audio_st)
+            val = get_audio_clock(movie);
+        else
+            val = get_video_clock(movie);
+    }
+    else
+    {
+        val = get_external_clock(movie);
+    }
+    return val;
+}
+
+void registerCommands(PyMovie *self)
+{
+    self->seekCommandType=registerCommand(self->commands);
+    self->pauseCommandType=registerCommand(self->commands);
+    self->stopCommandType=registerCommand(self->commands);
+    self->resizeCommandType=registerCommand(self->commands);
+    self->shiftCommandType = registerCommand(self->commands);
+    self->surfaceCommandType = registerCommand(self->commands);
+}
+
+/* seek in the stream */
+void stream_seek(PyMovie *movie, int64_t pos, int rel)
+{
+    ALLOC_COMMAND(seekCommand, seek)
+    seek->pos = pos;
+    seek->rel = rel;
+    addCommand(movie->commands, (Command *)seek);
+}
+
+/* pause or resume the video */
+void stream_pause(PyMovie *movie)
+{
+    ALLOC_COMMAND(pauseCommand, pause);
+    addCommand(movie->commands, (Command *)pause);
+}
+
+int audio_thread(void *arg)
+{
+    PyMovie *movie = arg;
+    DECLAREGIL
+    GRABGIL
+    Py_INCREF(movie);
+    RELEASEGIL
+    AVPacket *pkt = &movie->audio_pkt;
+    AVCodecContext *dec= movie->audio_st->codec;
+    int len1, data_size;
+    int filled =0;
+    len1=0;
+    for(;;)
+    {
+        if(movie->stop || movie->audioq.abort_request)
+        {
+            pauseBuffer(movie->channel);
+            stopBuffer(movie->channel);
+            goto closing;
+        }
+        if(movie->paused!=movie->audio_paused)
+        {
+            pauseBuffer(movie->channel);
+            movie->audio_paused=movie->paused;
+            if(movie->audio_paused)
+            {
+                movie->working=0;
+                continue;
+            }
+        }
+
+        if(movie->paused)
+        {
+            SDL_Delay(10);
+            continue;
+        }
+        //check if the movie has ended
+
+        if(getBufferQueueSize()>20)
+        {
+            SDL_Delay(100);
+            continue;
+        }
+
+        //fill up the buffer
+        while(movie->audio_pkt_size > 0)
+        {
+            data_size = sizeof(movie->audio_buf1);
+            len1 += avcodec_decode_audio2(dec, (int16_t *)movie->audio_buf1, &data_size, movie->audio_pkt_data, movie->audio_pkt_size);
+            if (len1 < 0)
+            {
+                /* if error, we skip the frame */
+                movie->audio_pkt_size = 0;
+                break;
+            }
+            movie->audio_pkt_data += len1;
+            movie->audio_pkt_size -= len1;
+            if (data_size <= 0)
+                continue;
+            //reformat_ctx here, but deleted
+            filled=1;
+
+        }
+        if(filled)
+        {
+            /* Buffer is filled up with a new frame, we spin lock/wait for a signal, where we then call playBuffer */
+            int chan = playBuffer(movie->audio_buf1, data_size, movie->channel, movie->audio_pts);
+            if(chan==-1)
+            {
+                GRABGIL
+                char *s = Mix_GetError();
+                PySys_WriteStdout("%s\n", s);
+                RELEASEGIL
+            }
+            movie->channel = chan;
+            filled=0;
+            len1=0;
+        }
+
+        //either buffer filled or no packets yet
+        /* free the current packet */
+        if (pkt->data)
+            av_free_packet(pkt);
+
+        /* read next packet */
+        if (packet_queue_get(&movie->audioq, pkt, 1) <= 0)
+        {
+            SDL_Delay(10);
+            continue;
+        }
+
+        if(pkt->data == flush_pkt.data)
+        {
+            avcodec_flush_buffers(dec);
+            SDL_Delay(10);
+            continue;
+        }
+
+        movie->audio_pts      = pkt->pts;
+        movie->audio_pkt_data = pkt->data;
+        movie->audio_pkt_size = pkt->size;
+
+    }
+closing:
+    GRABGIL
+    Py_DECREF(movie);
+    RELEASEGIL
+    return 0;
+}
+
+/* open a given stream. Return 0 if OK */
+int stream_component_open(PyMovie *movie, int stream_index, int threaded)
+{
+    DECLAREGIL
+    if(threaded)
+    {
+        GRABGIL
+    }
+    Py_INCREF( movie);
+    if(threaded)
+    {
+        RELEASEGIL
+    }
+
+    AVFormatContext *ic = movie->ic;
+    AVCodecContext *enc;
+    if (stream_index < 0 || stream_index >= ic->nb_streams)
+    {
+        if(threaded)
+            {GRABGIL}
+        Py_DECREF(movie);
+        if(threaded)
+            {RELEASEGIL}
+        return -1;
+    }
+
+    initialize_codec(movie, stream_index, threaded);
+
+    enc = ic->streams[stream_index]->codec;
+    switch(enc->codec_type)
+    {
+    case CODEC_TYPE_AUDIO:
+        movie->audio_stream = stream_index;
+        movie->audio_st = ic->streams[stream_index];
+        break;
+    case CODEC_TYPE_VIDEO:
+        movie->video_stream = stream_index;
+        movie->video_st = ic->streams[stream_index];
+        break;
+#if 0
+
+    case CODEC_TYPE_SUBTITLE:
+        movie->sub_stream = stream_index;
+        movie->sub_st     = ic->streams[stream_index];
+#endif
+
+    default:
+        break;
+    }
+    if(threaded)
+    {
+        GRABGIL
+    }
+    Py_DECREF( movie);
+    if(threaded)
+    {
+        RELEASEGIL
+    }
+    return 0;
+}
+/* open a given stream. Return 0 if OK */
+int stream_component_start(PyMovie *movie, int stream_index, int threaded)
+{
+    DECLAREGIL
+    if(threaded)
+    {
+        GRABGIL
+    }
+    Py_INCREF( movie);
+    if(threaded)
+    {
+        RELEASEGIL
+    }
+    AVFormatContext *ic = movie->ic;
+    AVCodecContext *enc;
+
+    if (stream_index < 0 || stream_index >= ic->nb_streams)
+    {
+        if(threaded)
+            GRABGIL
+            Py_DECREF(movie);
+        if(threaded)
+            RELEASEGIL
+            return -1;
+    }
+    initialize_codec(movie, stream_index, threaded);
+    enc = ic->streams[stream_index]->codec;
+    switch(enc->codec_type)
+    {
+    case CODEC_TYPE_AUDIO:
+        if(movie->replay)
+        {
+            movie->audio_st = ic->streams[stream_index];
+            movie->audio_stream = stream_index;
+        }
+        memset(&movie->audio_pkt, 0, sizeof(movie->audio_pkt));
+        packet_queue_init(&movie->audioq);
+        movie->audio_mutex = SDL_CreateMutex();
+        soundStart();
+        movie->audio_tid = SDL_CreateThread(audio_thread, movie);
+        break;
+    case CODEC_TYPE_VIDEO:
+        if(movie->replay)
+        {
+            movie->video_stream = stream_index;
+            movie->video_st = ic->streams[stream_index];
+        }
+        movie->frame_last_delay = 40e-3;
+        movie->frame_timer = (double)av_gettime() / 1000000.0;
+        movie->video_current_pts_time = av_gettime();
+        packet_queue_init(&movie->videoq);
+        break;
+#if 0
+
+    case CODEC_TYPE_SUBTITLE:
+        if(movie->replay)
+        {
+            movie->sub_stream = stream_index;
+            movie->sub_st     = ic->streams[stream_index];
+        }
+        packet_queue_init(&movie->subq);
+#endif
+
+    default:
+        break;
+    }
+    if(threaded)
+    {
+        GRABGIL
+    }
+    Py_DECREF( movie);
+    if(threaded)
+    {
+        RELEASEGIL
+    }
+    return 0;
+}
+
+void stream_component_end(PyMovie *movie, int stream_index, int threaded)
+{
+    DECLAREGIL
+    if(threaded)
+        GRABGIL
+        if(movie->ob_refcnt!=0)
+            Py_INCREF( movie);
+    if(threaded)
+        RELEASEGIL
+        AVFormatContext *ic = movie->ic;
+    AVCodecContext *enc;
+
+    if (stream_index < 0 || stream_index >= ic->nb_streams)
+    {
+        if(threaded)
+            GRABGIL
+            if(movie->ob_refcnt!=0)
+            {
+                Py_DECREF(movie);
+            }
+        if(threaded)
+            RELEASEGIL
+            return;
+    }
+    movie->replay=1;
+    enc = ic->streams[stream_index]->codec;
+    int i;
+    VidPicture *vp;
+    //SubPicture *sp;
+    switch(enc->codec_type)
+    {
+    case CODEC_TYPE_AUDIO:
+        packet_queue_abort(&movie->audioq);
+        SDL_WaitThread(movie->audio_tid, NULL);
+        SDL_DestroyMutex(movie->audio_mutex);
+        soundEnd();
+        memset(&movie->audio_buf1, 0, sizeof(movie->audio_buf1));
+        packet_queue_flush(&movie->audioq);
+        break;
+    case CODEC_TYPE_VIDEO:
+        for(i=0;i<VIDEO_PICTURE_QUEUE_SIZE;i++)
+        {
+            vp = &movie->pictq[i];
+            vp->ready=0;
+        }
+        movie->video_current_pts=0;
+        packet_queue_abort(&movie->videoq);
+        packet_queue_flush(&movie->videoq);
+        break;
+#if 0
+
+    case CODEC_TYPE_SUBTITLE:
+        packet_queue_abort(&movie->subq);
+        packet_queue_flush(&movie->subq);
+        break;
+#endif
+
+    default:
+        break;
+    }
+    ic->streams[stream_index]->discard = AVDISCARD_ALL;
+
+    if(threaded)
+        GRABGIL
+        if(movie->ob_refcnt!=0)
+        {
+            Py_DECREF( movie);
+        }
+    if(threaded)
+        RELEASEGIL
+    }
+void stream_component_close(PyMovie *movie, int stream_index, int threaded)
+{
+    DECLAREGIL
+    if(threaded)
+        GRABGIL
+        if(movie->ob_refcnt!=0)
+            Py_INCREF( movie);
+    if(threaded)
+        RELEASEGIL
+        AVFormatContext *ic = movie->ic;
+    AVCodecContext *enc;
+
+    if (stream_index < 0 || stream_index >= ic->nb_streams)
+    {
+        if(threaded)
+            GRABGIL