Commits

Anonymous committed 76c23a6

Cleaned up code, fixed loop issues.
Currently a small memory leak of ~1.124 MB per loop is observed.

Comments (0)

Files changed (4)

 
 }
 
-
  void packet_queue_flush(PacketQueue *q)
 {
     AVPacketList *pkt, *pkt1;
-
+#if THREADFREE!=1
+	if(q->mutex)
+		SDL_LockMutex(q->mutex)	
+#endif    
     for(pkt = q->first_pkt; pkt != NULL; pkt = pkt1) {
         pkt1 = pkt->next;
         av_free_packet(&pkt->pkt);
     q->first_pkt = NULL;
     q->nb_packets = 0;
     q->size = 0;
+#if THREADFREE!=1
+	if(q->mutex)
+		SDL_UnlockMutex(q->mutex)	
+#endif
 }
 
- void packet_queue_end(PacketQueue *q)
+ void packet_queue_end(PacketQueue *q, int end)
 {
     AVPacketList *pkt, *pkt1;
 
         pkt1 = pkt->next;
         av_free_packet(&pkt->pkt);
     }
-    SDL_DestroyMutex(q->mutex);
-    SDL_DestroyCond(q->cond);
+    if(end==0)
+    {
+	    if(q->mutex)
+	    {
+		    SDL_DestroyMutex(q->mutex);
+	    }
+   		if(q->cond)
+   		{
+	    	SDL_DestroyCond(q->cond);
+   		}
+    }
 }
 
  int packet_queue_put(PacketQueue *q, AVPacket *pkt)
 {
     AVPacketList *pkt1;
 
+	
     pkt1 = av_malloc(sizeof(AVPacketList));
     if (!pkt1)
         return -1;
     pkt1->pkt = *pkt;
     pkt1->next = NULL;
 
-
+#if THREADFREE!=1
+	if(q->mutex)
+		SDL_LockMutex(q->mutex)	
+#endif
     if (!q->last_pkt)
 
         q->first_pkt = pkt1;
     q->size += pkt1->pkt.size;
     /* XXX: should duplicate packet data in DV case */
 
+#if THREADFREE!=1
+	if(q->mutex)
+		SDL_UnlockMutex(q->mutex)	
+#endif
     return 0;
 }
 
- void packet_queue_abort(PacketQueue *q)
+void packet_queue_abort(PacketQueue *q)
 {
+#if THREADFREE!=1
+	if(q->mutex)
+		SDL_LockMutex(q->mutex)	
+#endif
     q->abort_request = 1;
+#if THREADFREE!=1
+	if(q->mutex)
+		SDL_UnlockMutex(q->mutex)	
+#endif
 }
 
 /* return < 0 if aborted, 0 if no packet and > 0 if packet.  */
- int packet_queue_get(PacketQueue *q, AVPacket *pkt, int block)
+int packet_queue_get(PacketQueue *q, AVPacket *pkt, int block)
 {
     AVPacketList *pkt1;
     int ret;
-
-
+    
+#if THREADFREE!=1
+	if(q->mutex)
+		SDL_LockMutex(q->mutex)	
+#endif
     for(;;) {
         if (q->abort_request) {
             ret = -1;
             break;
         }
     }
+#if THREADFREE!=1
+	if(q->mutex)
+		SDL_UnlockMutex(q->mutex)	
+#endif
     return ret;
 }
 
 
- void blend_subrect(AVPicture *dst, const AVSubtitleRect *rect, int imgw, int imgh)
+void blend_subrect(AVPicture *dst, const AVSubtitleRect *rect, int imgw, int imgh)
 {
     int wrap, wrap3, width2, skip2;
     int y, u, v, a, u1, v1, a1, w, h;
     memset(&sp->sub, 0, sizeof(AVSubtitle));
 }
 
-double calc_ca(int64_t diff, double ca, double i)
-{
-	/* CA_(i+1) = CA_i + ((x_(i+1) -CA_i)/i+1) */
-	double res = ((double)diff-ca)/(i+1);
-	res       += ca;
-	return res;
-}
-
- int video_display(PyMovie *movie)
+int video_display(PyMovie *movie)
 {
 /*DECODE THREAD - from video_refresh_timer*/
 	Py_INCREF(movie);
 	double ret=1;
 	VidPicture *vp = &movie->pictq[movie->pictq_rindex];
-	if (!vp->dest_overlay)
-    {    
-        video_open(movie, movie->pictq_rindex);
+    if(!vp->dest_overlay)
+    {
+    	video_open(movie, movie->pictq_rindex);
     	ret=0;
     }
     else if (movie->video_stream>=0 && vp->ready)
         video_image_display(movie);
     }
     else if(!vp->ready)
-    	ret= 0;
+    {
+    	ret=0;
+    }
 	Py_DECREF(movie);
 	return ret;
 }
 
- void video_image_display(PyMovie *is)
+void video_image_display(PyMovie *is)
 {
     Py_INCREF( is);
     SubPicture *sp;
     vp->ready =0;
     if (vp->dest_overlay) {
         /* XXX: use variable in the frame */
-        int64_t t_before = av_gettime();
         if (is->video_st->sample_aspect_ratio.num)
             aspect_ratio = av_q2d(is->video_st->sample_aspect_ratio);
         else if (is->video_st->codec->sample_aspect_ratio.num)
         {       
             SDL_DisplayYUVOverlay(vp->dest_overlay, &vp->dest_rect);
         }
-        int64_t t_after = av_gettime();
-        double ca = calc_ca((t_after-t_before), is->ca_render, is->ca_render_i);
-        is->ca_render=ca;
-        is->ca_render_i++;
         
     } 
     is->pictq_rindex= (is->pictq_rindex+1)%VIDEO_PICTURE_QUEUE_SIZE;
         }
         vp->overlay = is->overlay;
     } 
-#if 0    
-    else if (!pvs->out_surf && is->overlay<=0)
-    {
-        int flags = SDL_HWSURFACE|SDL_ASYNCBLIT|SDL_HWACCEL;
-        //we create a pygame surface
-        SDL_Surface *screen;
-        #ifndef __APPLE__
-        screen = SDL_SetVideoMode(w, h, 0, flags);
-        #else
-        /* setting bits_per_pixel = 0 or 32 causes blank video on OS X */
-        screen = SDL_SetVideoMode(w, h, 24, flags);
-        #endif
-        pvs->out_surf=(SDL_Surface *)PyMem_Malloc(sizeof(SDL_Surface));
-        if (!pvs->out_surf)
-        {
-            RAISE (PyExc_SDLError, "Could not create Surface object");
-        	return -1;
-        }
-    }
-#endif
 
     is->width = w;
     vp->width = w;
     VidPicture *vp;
     struct SwsContext *img_convert_ctx=NULL;
 
-    SDL_LockMutex(movie->dest_mutex);
 	vp = &movie->pictq[movie->pictq_windex];
-	int c=1;
 	
-	/*if(movie->timing)
-	{
-		video_display(movie);
-	}*/
 	if(!vp->dest_overlay)
 	{
 		video_open(movie, movie->pictq_windex);
         SDL_UnlockYUVOverlay(vp->dest_overlay);
 
         vp->pts = movie->pts;  
-    	movie->pictq_windex = (movie->pictq_windex+c)%VIDEO_PICTURE_QUEUE_SIZE;
+    	movie->pictq_windex = (movie->pictq_windex+1)%VIDEO_PICTURE_QUEUE_SIZE;
 		movie->pictq_size++;
 		vp->ready=1;
     }
-    SDL_UnlockMutex(movie->dest_mutex);
 	
 	Py_DECREF(movie);
     return 0;
 	movie->pts = pts;
 }
 
- int video_thread(void *arg)
-{
-    PyMovie *movie = arg;
-    PyGILState_STATE gstate;
-	gstate = PyGILState_Ensure();
-    
-    Py_INCREF( movie);
-    AVPacket pkt1, *pkt = &pkt1;
-    int len1, got_picture;
-    AVFrame *frame= avcodec_alloc_frame();
-    double pts;
-
-	if(gstate!=PyGILState_UNLOCKED)
-	{
-		PyGILState_Release(gstate);
-	}
-	
-    for(;;) {
-		gstate = PyGILState_Ensure();
-        while (movie->paused && !movie->videoq.abort_request) {
-            SDL_Delay(10);
-        }
-        int64_t t_before = av_gettime();
-        if (packet_queue_get(&movie->videoq, pkt, 0) < 0)
-            continue;
-
-        if(pkt->data == flush_pkt.data){
-            avcodec_flush_buffers(movie->video_st->codec);
-            continue;
-        }
-
-        /* NOTE: ipts is the PTS of the _first_ picture beginning in
-           this packet, if any */
-        movie->video_st->codec->reordered_opaque= pkt->pts;
-        
-        len1 = avcodec_decode_video(movie->video_st->codec,
-                                    frame, &got_picture,
-                                    pkt->data, pkt->size);
-		
-		int64_t t_after = av_gettime();
-		double ca = calc_ca((t_after-t_before), movie->ca_decode, movie->ca_decode_i);
-		movie->ca_decode=ca;
-		movie->ca_decode_i++;       
-        if(   ( pkt->dts == AV_NOPTS_VALUE)
-           && frame->reordered_opaque != AV_NOPTS_VALUE)
-            pts= frame->reordered_opaque;
-        else if(pkt->dts != AV_NOPTS_VALUE)
-            pts= pkt->dts;
-        else
-            pts= 0;
-        pts *= av_q2d(movie->video_st->time_base);
-
-//            if (len1 < 0)
-//                break;
-        if (got_picture) {
-        	update_video_clock(movie, frame, pts);
-            if (queue_picture(movie, frame) < 0)
-                goto the_end;
-        }
-        av_free_packet(pkt);
-		PyGILState_Release(gstate);
-        
-    }
- the_end:
-	gstate = PyGILState_Ensure();
-    Py_DECREF(movie);
-    av_free(frame);
-	PyGILState_Release(gstate);
-    return 0;
-}
-
  int audio_write_get_buf_size(PyMovie *movie)
 {
     Py_INCREF(movie);
         delta = (av_gettime() - is->video_current_pts_time) / 1000000.0;
     }
     double temp = is->video_current_pts+delta;
-    //PySys_WriteStdout("Video Clock: %f\n", temp);
     Py_DECREF( is);
     return temp;
 }
 }
 
 /* pause or resume the video */
- void stream_pause(PyMovie *is)
+void stream_pause(PyMovie *is)
 {
     Py_INCREF( is);
     is->paused = !is->paused;
 }
 
 
- int subtitle_thread(void *arg)
+int subtitle_thread(void *arg)
 {
     PyMovie *movie = arg;
     PyGILState_STATE gstate;
                 int len= data_size/istride[0];
                 if (av_audio_convert(is->reformat_ctx, obuf, ostride, ibuf, istride, len)<0) {
                     PyErr_WarnEx(NULL, "av_audio_convert() failed", 1);
-                    //printf("av_audio_convert() failed\n");
                     break;
                 }
                 is->audio_buf= is->audio_buf2;
             n = 2 * dec->channels;
             is->audio_clock += (double)data_size /
                 (double)(n * dec->sample_rate);
-#if defined(DEBUG_SYNC)
-            {
-                 double last_clock;
-                printf("audio: delay=%0.3f clock=%0.3f pts=%0.3f\n",
-                       is->audio_clock - last_clock,
-                       is->audio_clock, pts);
-                last_clock = is->audio_clock;
-            }
-#endif
             Py_DECREF(is);
             return data_size;
         }
 /* prepare a new audio buffer */
  void sdl_audio_callback(void *opaque, Uint8 *stream, int len)
 {
-	PyGILState_STATE gstate;
-	gstate = PyGILState_Ensure();
-	
     PyMovie *movie = opaque;
     Py_INCREF( movie);
     int audio_size, len1;
     double pts;
 
-
-    //audio_callback_time = av_gettime();
-
     while (len > 0) {
         if (movie->audio_buf_index >= movie->audio_buf_size) {
            audio_size = audio_decode_frame(movie, &pts);
         movie->audio_buf_index += len1;
     }
     Py_DECREF( movie);
-	if(gstate!=PyGILState_UNLOCKED)PyGILState_Release(gstate);
-	
 }
 
 /* open a given stream. Return 0 if OK */
         SDL_PauseAudio(0);
         break;
     case CODEC_TYPE_VIDEO:
-		
-		//PySys_WriteStdout("stream_component_open: Video Stream\n");
         movie->video_stream = stream_index;
         movie->video_st = ic->streams[stream_index];
 
         packet_queue_init(&movie->videoq);
       	if(!THREADFREE)
 	        movie->video_tid = SDL_CreateThread(video_thread, movie);
-		
         break;
     case CODEC_TYPE_SUBTITLE:
-    	//PySys_WriteStdout("stream_component_open: subtitle stream\n");
         movie->subtitle_stream = stream_index;
         
         movie->subtitle_st = ic->streams[stream_index];
 
  void stream_component_close(PyMovie *is, int stream_index)
 {
-	PyGILState_STATE gstate;
-	gstate = PyGILState_Ensure();
-	
     Py_INCREF( is);
     AVFormatContext *ic = is->ic;
     AVCodecContext *enc;
     if (stream_index < 0 || stream_index >= ic->nb_streams)
         return;
     enc = ic->streams[stream_index]->codec;
-
+	int end = is->loops;
     switch(enc->codec_type) {
     case CODEC_TYPE_AUDIO:
-        
         packet_queue_abort(&is->audioq);
-
         SDL_CloseAudio();
-
-        packet_queue_end(&is->audioq);
+        packet_queue_end(&is->audioq, end);
         if (is->reformat_ctx)
             av_audio_convert_free(is->reformat_ctx);
         break;
     case CODEC_TYPE_VIDEO:
-        
         packet_queue_abort(&is->videoq);
-
         SDL_WaitThread(is->video_tid, NULL);
-
-        packet_queue_end(&is->videoq);
+        packet_queue_end(&is->videoq, end);
         break;
     case CODEC_TYPE_SUBTITLE:
-        
         packet_queue_abort(&is->subtitleq);
-
         /* note: we also signal this mutex to make sure we deblock the
            video thread in all cases */
         SDL_LockMutex(is->subpq_mutex);
         is->subtitle_stream_changed = 1;
-
         SDL_CondSignal(is->subpq_cond);
         SDL_UnlockMutex(is->subpq_mutex);
-
         SDL_WaitThread(is->subtitle_tid, NULL);
-
-        packet_queue_end(&is->subtitleq);
+        packet_queue_end(&is->subtitleq, end);
         break;
     default:
         break;
     }
 
     Py_DECREF( is);
-	PyGILState_Release(gstate);
 }
 
 
     	return -1;
     }
     PyMovie *is = arg;
-    PyGILState_STATE gstate;
     Py_INCREF( is);
     AVFormatContext *ic;
     int err, i, ret, video_index, audio_index, subtitle_index;
     AVPacket pkt1, *pkt = &pkt1;
     AVFormatParameters params, *ap = &params;
-video_index = -1;
+	video_index = -1;
     audio_index = -1;
     subtitle_index = -1;
     is->video_stream = -1;
     err = av_open_input_file(&ic, is->filename, is->iformat, 0, ap);
     if (err < 0) {
         PyErr_Format(PyExc_IOError, "There was a problem opening up %s", is->filename);
-        //print_error(is->filename, err);
         ret = -1;
         goto fail;
     }
     err = av_find_stream_info(ic);
    if (err < 0) {
         PyErr_Format(PyExc_IOError, "%s: could not find codec parameters", is->filename);
-//        fprintf(stderr, "%s: could not find codec parameters\n", is->filename);
         ret = -1;
         goto fail;
    }
     }
 
 	/* open the streams */
-    gstate = PyGILState_Ensure();
     if (audio_index >= 0) {
 		stream_component_open(is, audio_index);
    	}
         goto fail;
     }
     is->frame_delay = av_q2d(is->video_st->codec->time_base);
-	PyGILState_Release(gstate);
-	gstate=PyGILState_Ensure();
-	int co=0;
 	is->last_showtime = av_gettime()/1000.0;
     video_open(is, is->pictq_windex);
-    PyGILState_Release(gstate);
     for(;;) {
-        	gstate=PyGILState_Ensure();
-        //SDL_LockMutex(is->_mutex);        
         if (is->abort_request)
-        {  //  SDL_UnlockMutex(is->general_mutex);
-			PyGILState_Release(gstate);
+        {  
             break;
         }
         if (is->paused != is->last_paused) {
     
 
             ret = av_seek_frame(is->ic, stream_index, seek_target, is->seek_flags);
-            if (ret < 0) {
+            if (ret < 0) 
+            {
                 PyErr_Format(PyExc_IOError, "%s: error while seeking", is->ic->filename);
-                //fprintf(stderr, "%s: error while seeking\n", is->ic->filename);
-            }else{
-                
-                
-                if (is->audio_stream >= 0) {
+            }
+            else
+            {
+                if (is->audio_stream >= 0) 
+                {
                     packet_queue_flush(&is->audioq);
                     packet_queue_put(&is->audioq, &flush_pkt);
                 }
-                if (is->subtitle_stream >= 0) {
+                if (is->subtitle_stream >= 0) 
+                {
 		            packet_queue_flush(&is->subtitleq);
                     packet_queue_put(&is->subtitleq, &flush_pkt);
                 }
-                if (is->video_stream >= 0) {
+                if (is->video_stream >= 0) 
+                {
                     packet_queue_flush(&is->videoq);
                     packet_queue_put(&is->videoq, &flush_pkt);
                 }
             (is->subtitleq.size > MAX_SUBTITLEQ_SIZE)) {
             /* wait 10 ms */
             SDL_Delay(10);
-			PyGILState_Release(gstate);
             continue;
         }
-        if(url_feof(ic->pb)) {
+        if(url_feof(ic->pb)) 
+        {
             av_init_packet(pkt);
             pkt->data=NULL;
             pkt->size=0;
             pkt->stream_index= is->video_stream;
             packet_queue_put(&is->videoq, pkt);
-			PyGILState_Release(gstate);
             continue;
         }
         ret = av_read_frame(ic, pkt);
-        if (ret < 0) {
-            if (ret != AVERROR_EOF && url_ferror(ic->pb) == 0) {
-				PyGILState_Release(gstate);
+        if (ret < 0) 
+        {
+            if (ret != AVERROR_EOF && url_ferror(ic->pb) == 0) 
+            {
                 SDL_Delay(100); /* wait for user event */
                 continue;
             } else
             {
-				PyGILState_Release(gstate);
                 break;
             }
         }
                 is->last_showtime = now;
                 is->timing =0;
             } else {
-//                printf("showtime not ready, waiting... (%.2f,%.2f)\n",
-//                            (float)now, (float)movie->dest_showtime);
                 SDL_Delay(10);
             }
         }
  fail:
     /* disable interrupting */
 
-
     /* close each stream */
     if (is->audio_stream >= 0)
     {
 	{
 		//throw python error
 	}
-/*    if(is->loops<0)
-    {
-        is->parse_tid = SDL_CreateThread(decode_thread, is);
-    }
-    else if (is->loops>0)
-    {   
-        is->loops--;
-        is->parse_tid = SDL_CreateThread(decode_thread, is);
-    }*/
-    Py_DECREF( is);
     return 0;
 }
 
- PyMovie *stream_open(PyMovie *is, const char *filename, AVInputFormat *iformat)
+PyMovie *stream_open(PyMovie *is, const char *filename, AVInputFormat *iformat)
 {
     if (!is)
         return NULL;
     
     is->paused = 1;
     is->av_sync_type = AV_SYNC_VIDEO_MASTER;
-    /*if(!THREADFREE)
-	{
-	    is->parse_tid = SDL_CreateThread(decode_thread, is);
-	}
-    if (!is->parse_tid && !THREADFREE) {
-        PyErr_SetString(PyExc_MemoryError, "Could not spawn a new thread.");
-        Py_DECREF( is);
-        return NULL;
-    }*/
     
 	Py_DECREF(is);
-    /*if(THREADFREE)
-    {
-    	is->paused=0;
-    	decoder(is);
-    }*/
     return is;
 }
 
 
  void stream_close(PyMovie *is)
 {
-	PyGILState_STATE gstate;
-	gstate = PyGILState_Ensure();
 	if(is->ob_refcnt!=0) Py_INCREF(is);
-    /* XXX: use a special url_shutdown call to abort parse cleanly */
     is->abort_request = 1;
     SDL_WaitThread(is->parse_tid, NULL);
 	VidPicture *vp;
     { 
     	Py_DECREF(is);
     }
-	PyGILState_Release(gstate);
 }
 
-
 void stream_cycle_channel(PyMovie *is, int codec_type)
 {
-	PyGILState_STATE gstate;
-	gstate = PyGILState_Ensure();
     AVFormatContext *ic = is->ic;
     int start_index, stream_index;
     AVStream *st;
     stream_component_open(is, stream_index);
     
     Py_DECREF(is);
-	PyGILState_Release(gstate);
 }
 
 
 /* this thread gets the stream from the disk or the network */
  int decoder(PyMovie *is)
 {
-	PyGILState_STATE gstate;
     Py_INCREF( is);
     AVFormatContext *ic;
     int err, i, ret, video_index, audio_index, subtitle_index;
     err = av_open_input_file(&ic, is->filename, is->iformat, 0, ap);
     if (err < 0) {
         PyErr_Format(PyExc_IOError, "There was a problem opening up %s", is->filename);
-        //print_error(is->filename, err);
         ret = -1;
         goto fail;
     }
     err = av_find_stream_info(ic);
    if (err < 0) {
         PyErr_Format(PyExc_IOError, "%s: could not find codec parameters", is->filename);
-//        fprintf(stderr, "%s: could not find codec parameters\n", is->filename);
         ret = -1;
         goto fail;
    }
     }
 
     /* open the streams */
-    gstate = PyGILState_Ensure();
     /*if (audio_index >= 0) {
 		stream_component_open(is, audio_index);
    	}*/
     } 
 
 /*    if (subtitle_index >= 0) {
-		//###PyGILBlock
-		gstate = PyGILState_Ensure();
         stream_component_open(is, subtitle_index);
-    	PyGILState_Release(gstate);
-    	//###End PyGILBlock
     }*/
     if (is->video_stream < 0 && is->audio_stream < 0) {
         PyErr_Format(PyExc_IOError, "%s: could not open codecs", is->filename);
     }
     
 	is->frame_delay = av_q2d(is->video_st->codec->time_base);
-	PyGILState_Release(gstate);
-	gstate=PyGILState_Ensure();
 	int co=0;
 	is->last_showtime = av_gettime()/1000.0;
     video_open(is, is->pictq_windex);
-    PyGILState_Release(gstate);
     for(;;) {
-    	gstate=PyGILState_Ensure();
 		//PySys_WriteStdout("decoder: loop %i.\n", co);
 		co++;
 		
-        //SDL_LockMutex(is->_mutex);        
         if (is->abort_request)
-        {  //  SDL_UnlockMutex(is->general_mutex);
-        	PyGILState_Release(gstate);
+        { 
             break;
         }
         if (is->paused != is->last_paused) {
             pkt->size=0;
             pkt->stream_index= is->video_stream;
             packet_queue_put(&is->videoq, pkt);
-            PyGILState_Release(gstate);
             continue;
         }
 		if(is->pictq_size<VIDEO_PICTURE_QUEUE_SIZE)
 	        if (ret < 0) {
 	            if (ret != AVERROR_EOF && url_ferror(ic->pb) == 0) {
 	                SDL_Delay(100); /* wait for user event */
-	                PyGILState_Release(gstate);
 	                continue;
 	            } else
 	            {
-	            	PyGILState_Release(gstate);
 	                break;
 	            }
 	        }
         if(is->timing>0) {
         	double showtime = is->timing+is->last_showtime;
             double now = av_gettime()/1000.0;
-            //PySys_WriteStdout("Now:           %f\nShowtime:      %f\nLast Showtime: %f\n", now, showtime, is->last_showtime);
             if(now >= showtime) {
             	double temp = is->timing;
                 is->timing =0;
                 is->last_showtime = av_gettime()/1000.0;
                 
             } else {
-//                printf("showtime not ready, waiting... (%.2f,%.2f)\n",
-//                            (float)now, (float)movie->dest_showtime);
                 SDL_Delay(10);
             }
         }
-    //if(gstate!=PyGILState_LOCKED)
-    //	PyGILState_Release(gstate);
     }
 
     ret = 0;
  fail:
     /* disable interrupting */
 
-
     /* close each stream */
     if (is->audio_stream >= 0)
     {
 
     do {
     	
-        while (movie->paused && !movie->videoq.abort_request) {
-            SDL_Delay(10);
+        if (movie->paused && !movie->videoq.abort_request) {
+            return 0;
         }
-        //if(movie->timing>0)
-        //	break;
-        int64_t t_before = av_gettime();
         if (packet_queue_get(&movie->videoq, pkt, 0) <=0)
             break;
 		
                                     frame, &got_picture,
                                     pkt->data, pkt->size);
 		
-		int64_t t_after = av_gettime();
-		double ca = calc_ca((t_after-t_before), movie->ca_decode, movie->ca_decode_i);
-		movie->ca_decode=ca;
-		movie->ca_decode_i++;
         if(   ( pkt->dts == AV_NOPTS_VALUE)
            && frame->reordered_opaque != AV_NOPTS_VALUE)
             pts= frame->reordered_opaque;
             pts= 0;
         pts *= av_q2d(movie->video_st->time_base);
 
-//            if (len1 < 0)
-//                break;
         if (got_picture) {
         	update_video_clock(movie, frame, pts);
         	if (queue_picture(movie, frame) < 0)
 #ifndef _GMOVIE_H_
 #define _GMOVIE_H_
 
-/* includes */
+/* local includes */
 #include "pygamedocs.h"
 #include "pygame.h"
 #include "pgcompat.h"
 #include "audioconvert.h"
 #include "surface.h"
-//#include "_ffmovie_vid.h"
-//#include "_ffmovie_aud.h"
 
+/* Library includes */
 #include <Python.h>
 #include <SDL.h>
 #include <SDL_thread.h>
 {\
     ((uint32_t *)(d))[0] = (a << 24) | (y << 16) | (u << 8) | v;\
 }
-
+//sets the module to single-thread mode.
 #define THREADFREE 1
 
+//backwards compatibility with blend_subrect
 #define BPP 1
 
-#define FF_ALLOC_EVENT   (SDL_USEREVENT)
-#define FF_REFRESH_EVENT (SDL_USEREVENT + 1)
-#define FF_QUIT_EVENT    (SDL_USEREVENT + 2)
+AVPacket flush_pkt;
 
- AVPacket flush_pkt;
-
- 
-
+/* Queues for already-loaded pictures, for rapid display */
 #define VIDEO_PICTURE_QUEUE_SIZE 16
 #define SUBPICTURE_QUEUE_SIZE 4
 
 
 
 /* structure definitions */
-
+/* PacketQueue to hold incoming ffmpeg packets from the stream */
 typedef struct PacketQueue {
     AVPacketList *first_pkt, *last_pkt;
     int nb_packets;
     SDL_cond *cond;
 } PacketQueue;
 
-
+/* Holds the subtitles for a specific timestamp */
 typedef struct SubPicture {
     double pts;         /* presentation time stamp for this picture */
     AVSubtitle sub;     //contains relevant info about subtitles    
 } SubPicture;
 
+/* Holds already loaded pictures, so that decoding, and writing to a overlay/surface can happen while waiting
+ * the <strong> very </strong> long time(in computer terms) to show the next frame. 
+ */
 typedef struct VidPicture{
-	SDL_Overlay *dest_overlay;
-	SDL_Surface *dest_surface;
-	SDL_Rect    dest_rect;
-	int         width;
+	SDL_Overlay *dest_overlay; /* Overlay for fast speedy yuv-rendering of the video */
+	SDL_Surface *dest_surface; /* Surface for other desires, for example, rendering a video in a small portion of the screen */ 
+	SDL_Rect    dest_rect;	   /* Dest-rect, which tells where to locate the video */
+	int         width;         /* Width and height */
 	int         height;
-	int         xleft;
-	int         ytop;
-	int         overlay;
-	int         ready;
-	double      pts;
+	int         xleft;		   /* Where left border of video is located */
+	int         ytop;		   /* Where top border of video is located */
+	int         overlay;	   /* Whether or not to use the overlay */
+	int         ready; 		   /* Boolean to indicate this picture is ready to be used. After displaying the contents, it changes to False */
+	double      pts;		   /* presentation time-stamp of the picture */
 } VidPicture;
 
 
 
 typedef struct PyMovie {
 	PyObject_HEAD
-    SDL_Thread *parse_tid;
-    int abort_request;
-    int paused;
-	int last_paused;
+    /* General purpose members */
+    SDL_Thread *parse_tid; /* Thread id for the decode_thread call */
+    int abort_request;     /* Tells whether or not to stop playing and return */
+    int paused; 		   /* Boolean for communicating to the threads to pause playback */
+	int last_paused;       /* For comparing the state of paused to what it was last time around. */
+    char filename[1024];
+    int overlay; //>0 if we are to use the overlay, otherwise <=0
+    int playing;
+    int height;
+    int width;
+    int ytop;
+    int xleft;
+    int loops;
+	int64_t start_time;
+	AVInputFormat *iformat;
+	SDL_mutex *dest_mutex;
+	int av_sync_type;
+	AVFormatContext *ic;    /* context information about the format of the video file */
+	
+	/* Seek-info */
+    int seek_req;
+    int seek_flags;
+    int64_t seek_pos;
 
-	/* We create a cumulative average of the time to render and 
-	 * the time to decode a video frame, and add those to the 
-	 * timing value */
-	double ca_render;   //actual cumulative average value
-	double ca_render_i; //need to keep track of how many values we've accumulated
-
-	double ca_decode;  //actual cumulative average value
-	double ca_decode_i;//need to keep track of how many values we've accumulated
-	
-	VidPicture pictq[VIDEO_PICTURE_QUEUE_SIZE];
-	int pictq_size, pictq_windex, pictq_rindex;
-	
-	AVFormatContext *ic;    /* context information about the format of the video file */
-    double external_clock; /* external clock base */
+	/* external clock members */
+	double external_clock; /* external clock base */
     int64_t external_clock_time;
 
-	SDL_Thread *video_tid;
-
+	/* Audio stream members */
     double audio_clock;
     double audio_diff_cum; /* used for AV difference average computation */
     double audio_diff_avg_coef;
     int audio_volume; /*must self implement*/
 	enum SampleFormat audio_src_fmt;
     AVAudioConvert *reformat_ctx;
-    
-	AVInputFormat *iformat;
-
-	int audio_stream;
-	int video_stream;
-
-    int16_t sample_array[SAMPLE_ARRAY_SIZE];
-    int sample_array_index;
-
+    int audio_stream;
+	int audio_disable;
+	
+	/* Frame/Video Management members */
     int frame_count;
     double frame_timer;
     double frame_last_pts;
     double frame_last_delay;
+    double last_frame_delay;
     double frame_delay; /*display time of each frame, based on fps*/
     double video_clock; /*seconds of video frame decoded*/
     AVStream *video_st;
 	double timing;
 	double last_showtime;
 	double pts;	
+	int video_stream;
+    SDL_Overlay *dest_overlay;
+    SDL_Surface *dest_surface;
+    SDL_Rect dest_rect;
+	
+	/* simple ring_buffer queue for holding VidPicture structs */
+	VidPicture pictq[VIDEO_PICTURE_QUEUE_SIZE];
+	int pictq_size, pictq_windex, pictq_rindex;
+
+	/* Thread id for the video_thread, when used in threaded mode */
+	SDL_Thread *video_tid;
+
 	PacketQueue videoq;
 	SDL_mutex *videoq_mutex;
 	SDL_cond *videoq_cond;
 
-	int av_sync_type;
-	
+	/* subtitle members */	
     SDL_Thread *subtitle_tid;                    //thread id for subtitle decode thread
     int subtitle_stream;                         //which subtitle thread we want
     int subtitle_stream_changed;                 //if the subtitle-stream has changed
     SDL_mutex *subpq_mutex;
     SDL_cond *subpq_cond;
 
-    SDL_mutex *dest_mutex;
-    double dest_showtime; /*when to next show the dest_overlay*/
-    SDL_Overlay *dest_overlay;
-    SDL_Surface *dest_surface;
-    SDL_Rect dest_rect;
-
-	double last_frame_delay;
-	
-    double time_offset; /*track paused time*/
-    
-    int audio_disable;
-    
-    char filename[1024];
-    
-    int overlay; //>0 if we are to use the overlay, otherwise <=0
- 
-    int playing;
-    int height;
-    int width;
-    
-    int ytop;
-    int xleft;
-    
-    int loops;
-
-	int64_t start_time;
-	
-    int seek_req;
-    int seek_flags;
-    int64_t seek_pos;
-
 } PyMovie;
 /* end of struct definitions */
 /* function definitions */
 
 /* 		PacketQueue Management */
 void packet_queue_init(PacketQueue *q);
- void packet_queue_flush(PacketQueue *q);
- void packet_queue_end(PacketQueue *q);
- int packet_queue_put(PacketQueue *q, AVPacket *pkt);
- void packet_queue_abort(PacketQueue *q);
- int packet_queue_get(PacketQueue *q, AVPacket *pkt, int block);
+void packet_queue_flush(PacketQueue *q);
+void packet_queue_end(PacketQueue *q, int end);
+int packet_queue_put(PacketQueue *q, AVPacket *pkt);
+void packet_queue_abort(PacketQueue *q);
+int packet_queue_get(PacketQueue *q, AVPacket *pkt, int block);
 
 /* 		Misc*/
- void blend_subrect(AVPicture *dst, const AVSubtitleRect *rect, int imgw, int imgh);
- void free_subpicture(SubPicture *sp);
- double calc_ca(int64_t diff, double ca, double i);
- int ff_get_buffer(struct AVCodecContext *c, AVFrame *pic);
- void ff_release_buffer(struct AVCodecContext *c, AVFrame *pic);
-
+void blend_subrect(AVPicture *dst, const AVSubtitleRect *rect, int imgw, int imgh);
+void free_subpicture(SubPicture *sp);
+double calc_ca(int64_t diff, double ca, double i);
+int ff_get_buffer(struct AVCodecContext *c, AVFrame *pic);
+void ff_release_buffer(struct AVCodecContext *c, AVFrame *pic);
 
 /* 		Video Management */
- int video_open(PyMovie *is, int index);
- void video_image_display(PyMovie *is);
- int video_display(PyMovie *is);
- int video_thread(void *arg);
- int video_render(PyMovie *movie);
- int queue_picture(PyMovie *is, AVFrame *src_frame);
- void update_video_clock(PyMovie *movie, AVFrame* frame, double pts);
- void video_refresh_timer(PyMovie *movie); //unlike in ffplay, this does the job of compute_frame_delay
-
+int video_open(PyMovie *is, int index);
+void video_image_display(PyMovie *is);
+int video_display(PyMovie *is);
+int video_thread(void *arg);
+int video_render(PyMovie *movie);
+int queue_picture(PyMovie *is, AVFrame *src_frame);
+void update_video_clock(PyMovie *movie, AVFrame* frame, double pts);
+void video_refresh_timer(PyMovie *movie); //unlike in ffplay, this does the job of compute_frame_delay
 
 /* 		Audio management */
- int audio_write_get_buf_size(PyMovie *is);
- int synchronize_audio(PyMovie *is, short *samples, int samples_size1, double pts);
- int audio_decode_frame(PyMovie *is, double *pts_ptr);
- void sdl_audio_callback(void *opaque, Uint8 *stream, int len);
-
+int audio_write_get_buf_size(PyMovie *is);
+int synchronize_audio(PyMovie *is, short *samples, int samples_size1, double pts);
+int audio_decode_frame(PyMovie *is, double *pts_ptr);
+void sdl_audio_callback(void *opaque, Uint8 *stream, int len);
 
 /* 		Subtitle management */
- int subtitle_thread(void *arg);
-
+int subtitle_thread(void *arg);
 
 /* 		General Movie Management */
- void stream_seek(PyMovie *is, int64_t pos, int rel);
- void stream_pause(PyMovie *is);
- int stream_component_open(PyMovie *is, int stream_index); //TODO: break down into separate functions
- void stream_component_close(PyMovie *is, int stream_index);
- int decode_thread(void *arg);
- int decoder(PyMovie *is);
- PyMovie *stream_open(PyMovie *is, const char *filename, AVInputFormat *iformat);
- void stream_close(PyMovie *is);
- void stream_cycle_channel(PyMovie *is, int codec_type);
+void stream_seek(PyMovie *is, int64_t pos, int rel);
+void stream_pause(PyMovie *is);
+int stream_component_open(PyMovie *is, int stream_index); //TODO: break down into separate functions
+void stream_component_close(PyMovie *is, int stream_index);
+int decode_thread(void *arg);
+int decoder(PyMovie *is);
+PyMovie *stream_open(PyMovie *is, const char *filename, AVInputFormat *iformat);
+void stream_close(PyMovie *is);
+void stream_cycle_channel(PyMovie *is, int codec_type);
 
 /* 		Clock Management */
- double get_audio_clock(PyMovie *is);
- double get_video_clock(PyMovie *is);
- double get_external_clock(PyMovie *is);
- double get_master_clock(PyMovie *is);
+double get_audio_clock(PyMovie *is);
+double get_video_clock(PyMovie *is);
+double get_external_clock(PyMovie *is);
+double get_master_clock(PyMovie *is);
 
 /*		Frame Management */
 // double compute_frame_delay(double frame_current_pts, PyMovie *is);
 	if(!self)
 	{
 		PyErr_SetString(PyExc_IOError, "stream_open failed");
-        //printf(stdout, "stream_open failed.\n");
         Py_DECREF(self);
         Py_RETURN_NONE;
     }	
 	if (!PyArg_ParseTuple (args, "s", &c))
     {
         PyErr_SetString(PyExc_TypeError, "No valid arguments");
-        //Py_RETURN_NONE;
     	return -1;
     }	
 	self = _movie_init_internal(self, c, NULL);
-	PyGILState_STATE gstate;
-	gstate = PyGILState_Ensure();
 	PyObject *er;
     er = PyErr_Occurred();
     if(er)
         PyErr_SetString(PyExc_IOError, "No movie object created.");
         PyErr_Print();
         Py_DECREF(self);
-		PyGILState_Release(gstate);
         return -1;
     }
     Py_DECREF(self);
     PySys_WriteStdout("Returning from _movie_init\n");
-	if(gstate!=PyGILState_UNLOCKED)PyGILState_Release(gstate);
     return 0;
 }   
 
  void _movie_dealloc(PyMovie *movie)
 {
- 	PyGILState_STATE gstate;
- 	gstate=PyGILState_Ensure();
     stream_close(movie);
     movie->ob_type->tp_free((PyObject *) movie);
-	PyGILState_Release(gstate);
 }
 
  PyObject* _movie_repr (PyMovie *movie)
 {
     return PyInt_FromLong((long)movie->paused);
 }
- PyObject* _movie_get_playing (PyMovie *movie, void *closure)
+PyObject* _movie_get_playing (PyMovie *movie, void *closure)
 {
     PyObject *pyo;
     pyo= PyInt_FromLong((long)movie->playing);
     return pyo;
 }
 
+PyObject* _movie_get_width (PyMovie *movie, void *closure)
+{
+    PyObject *pyo;
+    pyo= PyInt_FromLong((long)movie->width);
+    return pyo;
+}
+
+PyObject* _movie_get_height (PyMovie *movie, void *closure)
+{
+    PyObject *pyo;
+    pyo= PyInt_FromLong((long)movie->height);
+    return pyo;
+}
+
  static PyMethodDef _movie_methods[] = {
    { "play",    (PyCFunction) _movie_play, METH_VARARGS,
                "Play the movie file from current time-mark. If loop<0, then it will loop infinitely. If there is no loop value, then it will play once." },
 {
     { "paused", (getter) _movie_get_paused, NULL, NULL, NULL },
     { "playing", (getter) _movie_get_playing, NULL, NULL, NULL },
+    { "height", (getter) _movie_get_height, NULL, NULL, NULL },
+    { "width", (getter) _movie_get_width, NULL, NULL, NULL },
     { NULL, NULL, NULL, NULL, NULL }
 };
 
 #include "_gmovie.h"
 
 /*class methods and internals */
- PyMovie*  _movie_init_internal(PyMovie *self, const char *filename, SDL_Surface *surf);
- int       _movie_init         (PyObject *self, PyObject *args, PyObject *kwds);
- void      _movie_dealloc      (PyMovie *movie);
- PyObject* _movie_repr         (PyMovie *movie);
- PyObject* _movie_play         (PyMovie *movie, PyObject* args);
- PyObject* _movie_stop         (PyMovie *movie);
- PyObject* _movie_pause        (PyMovie *movie);
- PyObject* _movie_rewind       (PyMovie *movie, PyObject* args);
+PyMovie*  _movie_init_internal(PyMovie *self, const char *filename, SDL_Surface *surf);
+int       _movie_init         (PyObject *self, PyObject *args, PyObject *kwds);
+void      _movie_dealloc      (PyMovie *movie);
+PyObject* _movie_repr         (PyMovie *movie);
+PyObject* _movie_play         (PyMovie *movie, PyObject* args);
+PyObject* _movie_stop         (PyMovie *movie);
+PyObject* _movie_pause        (PyMovie *movie);
+PyObject* _movie_rewind       (PyMovie *movie, PyObject* args);
 
 /* Getters/setters */
- PyObject* _movie_get_paused  (PyMovie *movie, void *closure);
- PyObject* _movie_get_playing (PyMovie *movie, void *closure);
+PyObject* _movie_get_paused  (PyMovie *movie, void *closure);
+PyObject* _movie_get_playing (PyMovie *movie, void *closure);
+PyObject* _movie_get_width   (PyMovie *movie, void *closure);
+PyObject* _movie_get_height  (PyMovie *movie, void *closure);
 
 #endif /*GMOVIE_H_*/