commit 8ef87b6b0534a2656ec4307819e0749f6c49d4e5
Author: Mark Spieth <mspieth@digivation.com.au>
Date:   Tue Apr 27 07:51:51 2010 +1000

    smoother vsync with predictive frame skipping

diff --git a/mythtv/libs/libmyth/audiooutput.h b/mythtv/libs/libmyth/audiooutput.h
index 8947981..4fe5b8f 100644
--- a/mythtv/libs/libmyth/audiooutput.h
+++ b/mythtv/libs/libmyth/audiooutput.h
@@ -3,6 +3,7 @@
 
 #include <QString>
 
+#include "compat.h"
 #include "audiosettings.h"
 #include "mythcorecontext.h"
 #include "volumebase.h"
@@ -41,9 +42,9 @@ class MPUBLIC AudioOutput : public VolumeBase, public OutputListeners
 
     virtual void Reset(void) = 0;
 
-    virtual bool AddSamples(void *buffer, int samples, long long timecode) = 0;
+    virtual bool AddSamples(void *buffer, int samples, int64_t timecode) = 0;
 
-    virtual void SetTimecode(long long timecode) = 0;
+    virtual void SetTimecode(int64_t timecode) = 0;
     virtual bool IsPaused(void) const = 0;
     virtual void Pause(bool paused) = 0;
     virtual void PauseUntilBuffered(void) = 0;
@@ -51,10 +52,10 @@ class MPUBLIC AudioOutput : public VolumeBase, public OutputListeners
     // Wait for all data to finish playing
     virtual void Drain(void) = 0;
 
-    virtual int GetAudiotime(void) = 0;
+    virtual int64_t GetAudiotime(void) = 0;
 
     /// report amount of audio buffered in milliseconds.
-    virtual int GetAudioBufferedTime(void) { return 0; }
+    virtual int64_t GetAudioBufferedTime(void) { return 0; }
 
     virtual void SetSourceBitrate(int ) { }
 
diff --git a/mythtv/libs/libmyth/audiooutputbase.cpp b/mythtv/libs/libmyth/audiooutputbase.cpp
index 9213adf..dbb3815 100644
--- a/mythtv/libs/libmyth/audiooutputbase.cpp
+++ b/mythtv/libs/libmyth/audiooutputbase.cpp
@@ -56,6 +56,7 @@ AudioOutputBase::AudioOutputBase(const AudioSettings &settings) :
     passthru(false),            enc(false),
     reenc(false),
     stretchfactor(1.0f),
+    eff_stretchfactor(100000),
 
     source(settings.source),    killaudio(false),
 
@@ -179,6 +180,7 @@ void AudioOutputBase::SetStretchFactorLocked(float lstretchfactor)
         return;
 
     stretchfactor = lstretchfactor;
+    eff_stretchfactor = (int)(100000.0f * lstretchfactor + 0.5);
     if (pSoundStretch)
     {
         VBGENERAL(QString("Changing time stretch to %1").arg(stretchfactor));
@@ -202,6 +204,7 @@ void AudioOutputBase::SetStretchFactorLocked(float lstretchfactor)
             bytes_per_frame = source_channels *
                               AudioOutputSettings::SampleSize(FORMAT_FLT);
             waud = raud = 0;
+            reset_active.Ref();
         }
     }
 }
@@ -236,6 +239,7 @@ bool AudioOutputBase::ToggleUpmix(void)
     audio_buflock.lock();
     avsync_lock.lock();
     waud = raud = 0;
+    reset_active.Ref();
 
     configured_channels =
         configured_channels == max_channels ? 2 : max_channels;
@@ -311,6 +315,7 @@ void AudioOutputBase::Reconfigure(const AudioSettings &orig_settings)
     QMutexLocker lockav(&avsync_lock);
 
     waud = raud = 0;
+    reset_active.Clear();
     actually_paused = processing = false;
 
     channels               = settings.channels;
@@ -582,7 +587,9 @@ void AudioOutputBase::Reset()
     QMutexLocker lock(&audio_buflock);
     QMutexLocker lockav(&avsync_lock);
 
-    raud = waud = audbuf_timecode = audiotime = frames_buffered = 0;
+    audbuf_timecode = audiotime = frames_buffered = 0;
+    waud = raud;    // empty ring buffer
+    reset_active.Ref();
     current_seconds = -1;
     was_paused = !pauseaudio;
 
@@ -596,10 +603,10 @@ void AudioOutputBase::Reset()
  * Used by mythmusic for seeking since it doesn't provide timecodes to
  * AddSamples()
  */
-void AudioOutputBase::SetTimecode(long long timecode)
+void AudioOutputBase::SetTimecode(int64_t timecode)
 {
     audbuf_timecode = audiotime = timecode;
-    frames_buffered = (long long)((timecode * source_samplerate) / 1000);
+    frames_buffered = (int64_t)((timecode * source_samplerate) / 1000);
 }
 
 /**
@@ -654,18 +661,16 @@ int AudioOutputBase::audioready()
 /**
  * Calculate the timecode of the samples that are about to become audible
  */
-int AudioOutputBase::GetAudiotime(void)
+int64_t AudioOutputBase::GetAudiotime(void)
 {
     if (audbuf_timecode == 0)
         return 0;
 
-    int soundcard_buffer = 0;
     int obpf = output_bytes_per_frame;
-    int totalbuffer;
-    long long oldaudiotime;
+    int64_t oldaudiotime;
 
     /* We want to calculate 'audiotime', which is the timestamp of the audio
-       which is leaving the sound card at this instant.
+       Which is leaving the sound card at this instant.
 
        We use these variables:
 
@@ -677,31 +682,22 @@ int AudioOutputBase::GetAudiotime(void)
        'totalbuffer' is the total # of bytes in our audio buffer, and the
        sound card's buffer. */
 
-    soundcard_buffer = GetBufferedOnSoundcard(); // bytes
 
     QMutexLocker lockav(&avsync_lock);
 
+    int64_t soundcard_buffer = GetBufferedOnSoundcard(); // bytes
+    int64_t main_buffer = audioready();
+
     /* audioready tells us how many bytes are in audiobuffer
        scaled appropriately if output format != internal format */
-    totalbuffer = audioready() + soundcard_buffer;
-
-    if (needs_upmix && upmixer)
-        totalbuffer += upmixer->frameLatency() * obpf;
-
-    if (pSoundStretch)
-    {
-        totalbuffer += pSoundStretch->numUnprocessedSamples() * obpf /
-                       stretchfactor;
-        totalbuffer += pSoundStretch->numSamples() * obpf;
-    }
-
-    if (encoder)
-        totalbuffer += encoder->Buffered();
 
     oldaudiotime = audiotime;
 
-    audiotime = audbuf_timecode - (long long)(totalbuffer) * 100000 *
-                                        stretchfactor / (obpf * effdsp);
+    // timecode is the stretch adjusted version
+    // of major post-stretched buffer contents
+    // processing latencies are catered for in AddSamples/SetAudiotime to eliminate
+    // race
+    audiotime = audbuf_timecode - (( (main_buffer + soundcard_buffer) * eff_stretchfactor ) / (effdsp * obpf));
 
     /* audiotime should never go backwards, but we might get a negative
        value if GetBufferedOnSoundcard() isn't updated by the driver very
@@ -709,13 +705,56 @@ int AudioOutputBase::GetAudiotime(void)
     if (audiotime < oldaudiotime)
         audiotime = oldaudiotime;
 
-    VBAUDIOTS(QString("GetAudiotime audt=%3 atc=%4 tb=%5 sb=%6 "
-                      "sr=%7 obpf=%8 sf=%9")
+    VBAUDIOTS(QString("GetAudiotime audt=%1 atc=%2 mb=%3 sb=%4 tb=%5 "
+                      "sr=%6 obpf=%7 bpf=%8 sf=%9 %10 %11")
               .arg(audiotime).arg(audbuf_timecode)
-              .arg(totalbuffer).arg(soundcard_buffer)
-              .arg(samplerate).arg(obpf).arg(stretchfactor));
+              .arg(main_buffer)
+              .arg(soundcard_buffer)
+              .arg(main_buffer+soundcard_buffer)
+              .arg(samplerate).arg(obpf).arg(bytes_per_frame).arg(stretchfactor)
+              .arg((main_buffer + soundcard_buffer) * eff_stretchfactor)
+              .arg(( (main_buffer + soundcard_buffer) * eff_stretchfactor ) / (effdsp * obpf))
+              );
+
+    return audiotime;
+}
+
+/**
+ * Set the timecode of the top of the ringbuffer
+ * Exclude all other processing elements as they dont vary
+ * between AddSamples calls
+ */
+void AudioOutputBase::SetAudiotime(int frames, int64_t timecode)
+{
+    int64_t processframes_stretched = 0;
+    int64_t processframes_unstretched = 0;
+
+    if (needs_upmix && upmixer)
+        processframes_unstretched -= upmixer->frameLatency();
 
-    return (int)audiotime;
+    if (pSoundStretch)
+    {
+        processframes_unstretched -= pSoundStretch->numUnprocessedSamples();
+        processframes_stretched -= pSoundStretch->numSamples();
+    }
+
+    if (encoder)
+        // the input buffered data is still in audio_bytes_per_sample format
+        processframes_stretched -= encoder->Buffered() / output_bytes_per_frame;
+
+    audbuf_timecode = timecode + 
+                (((frames + processframes_unstretched) * 100000) +
+                  (processframes_stretched * eff_stretchfactor )) / effdsp;
+
+    VBAUDIOTS(QString("SetAudiotime atc=%1 tc=%2 f=%3 pfu=%4 pfs=%5")
+              .arg(audbuf_timecode)
+              .arg(timecode)
+              .arg(frames)
+              .arg(processframes_unstretched)
+              .arg(processframes_stretched));
+#ifdef AUDIOTSTESTING
+    GetAudiotime();
+#endif
 }
 
 /**
@@ -723,7 +762,7 @@ int AudioOutputBase::GetAudiotime(void)
  * audible and the samples most recently added to the audiobuffer, i.e. the
  * time in ms representing the sum total of buffered samples
  */
-int AudioOutputBase::GetAudioBufferedTime(void)
+int64_t AudioOutputBase::GetAudioBufferedTime(void)
 {
     int ret = audbuf_timecode - GetAudiotime();
     // Pulse can give us values that make this -ve
@@ -869,12 +908,14 @@ int AudioOutputBase::CopyWithUpmix(char *buffer, int frames, int &org_waud)
  *
  * Returns false if there's not enough space right now
  */
-bool AudioOutputBase::AddSamples(void *buffer, int frames, long long timecode)
+bool AudioOutputBase::AddSamples(void *buffer, int in_frames, int64_t timecode)
 {
     int org_waud = waud,               afree = audiofree();
-    int bpf      = bytes_per_frame,    len   = frames * source_bytes_per_frame;
+    int frames = in_frames;
+    int bpf      = bytes_per_frame,    len   = in_frames * source_bytes_per_frame;
     int used     = kAudioRingBufferSize - afree;
     bool music   = false;
+    int bdiff;
 
     VBAUDIOTS(QString("AddSamples frames=%1, bytes=%2, used=%3, free=%4, "
                       "timecode=%5 needsupmix=%6")
@@ -896,7 +937,7 @@ bool AudioOutputBase::AddSamples(void *buffer, int frames, long long timecode)
     if (timecode < 0)
     {
         // Send original samples to mythmusic visualisation
-        timecode = (long long)(frames_buffered) * 1000 / source_samplerate;
+        timecode = (int64_t)(frames_buffered) * 1000 / source_samplerate;
         frames_buffered += frames;
         dispatchVisual((uchar *)buffer, len, timecode, source_channels,
                        output_settings->FormatToBits(format));
@@ -949,7 +990,7 @@ bool AudioOutputBase::AddSamples(void *buffer, int frames, long long timecode)
                     .arg(src_strerror(error)));
 
         buffer = src_out;
-        frames = src_data.output_frames_gen;
+        in_frames = frames = src_data.output_frames_gen;
     }
     else if (processing)
         buffer = src_in;
@@ -957,15 +998,18 @@ bool AudioOutputBase::AddSamples(void *buffer, int frames, long long timecode)
     /* we want the timecode of the last sample added but we are given the
        timecode of the first - add the time in ms that the frames added
        represent */
-    audbuf_timecode = timecode + ((long long)(frames) * 100000 / effdsp);
+    //audbuf_timecode = timecode + ((int64_t)((frames) * 100000) / effdsp);
 
     // Copy samples into audiobuffer, with upmix if necessary
     if ((len = CopyWithUpmix((char *)buffer, frames, org_waud)) <= 0)
-        return true;
+    {
+        //return true;
+        goto done;
+    }
 
     frames = len / bpf;
 
-    int bdiff = kAudioRingBufferSize - waud;
+    bdiff = kAudioRingBufferSize - waud;
 
     if (pSoundStretch)
     {
@@ -1043,6 +1087,9 @@ bool AudioOutputBase::AddSamples(void *buffer, int frames, long long timecode)
 
     waud = org_waud;
 
+done:
+    SetAudiotime(in_frames, timecode);
+
     return true;
 }
 
@@ -1090,6 +1137,12 @@ void AudioOutputBase::OutputAudioLoop(void)
     uchar *fragment_buf = new uchar[fragment_size + 16];
     uchar *fragment     = (uchar *)AOALIGN(fragment_buf[0]);
 
+    // to reduce startup latency, write silence in 8ms chunks
+    int zero_fragment_size = (int)(0.008*samplerate/channels);
+    zero_fragment_size *= bytes_per_frame;   // make sure its a multiple of bytes_per_frame
+    if (zero_fragment_size > fragment_size)
+        zero_fragment_size = fragment_size;
+
     bzero(zeros, fragment_size);
 
     while (!killaudio)
@@ -1138,10 +1191,29 @@ void AudioOutputBase::OutputAudioLoop(void)
             continue;
         }
 
+#ifdef AUDIOTSTESTING
+        VBAUDIOTS("WriteAudio Start");
+#endif
         Status();
 
-        if (GetAudioData(fragment, fragment_size, true))
-            WriteAudio(fragment, fragment_size);
+        // delay setting raud until after phys buffer is filled
+        // so GetAudiotime will be accurate without locking
+        reset_active.TestAndDeref();
+        int next_raud = raud;
+        if (GetAudioData(fragment, fragment_size, true, &next_raud))
+        {
+            if (!reset_active.TestAndDeref())
+            {
+                WriteAudio(fragment, fragment_size);
+                if (!reset_active.TestAndDeref())
+                    raud = next_raud;
+            }
+        }
+#ifdef AUDIOTSTESTING
+        GetAudiotime();
+        VBAUDIOTS("WriteAudio Done");
+#endif
+
     }
 
     delete[] zeros;
@@ -1158,15 +1230,19 @@ void AudioOutputBase::OutputAudioLoop(void)
  * nothing. Otherwise, we'll copy less than 'size' bytes if that's all that's
  * available. Returns the number of bytes copied.
  */
-int AudioOutputBase::GetAudioData(uchar *buffer, int size, bool full_buffer)
+int AudioOutputBase::GetAudioData(uchar *buffer, int size, bool full_buffer, int *local_raud)
 {
 
+#define LRPOS audiobuffer + *local_raud
     // re-check audioready() in case things changed.
     // for example, ClearAfterSeek() might have run
     int avail_size   = audioready();
     int frag_size    = size;
     int written_size = size;
 
+    if (local_raud == NULL)
+        local_raud = &raud;
+
     if (!full_buffer && (size > avail_size))
     {
         // when full_buffer is false, return any available data
@@ -1192,26 +1268,26 @@ int AudioOutputBase::GetAudioData(uchar *buffer, int size, bool full_buffer)
     {
         if (fromFloats)
             off = AudioOutputUtil::fromFloat(output_format, buffer,
-                                             RPOS, bdiff);
+                                             LRPOS, bdiff);
         else
         {
-            memcpy(buffer, RPOS, bdiff);
+            memcpy(buffer, LRPOS, bdiff);
             off = bdiff;
         }
 
         frag_size -= bdiff;
-        raud = 0;
+        *local_raud = 0;
     }
     if (frag_size > 0)
     {
         if (fromFloats)
             AudioOutputUtil::fromFloat(output_format, buffer + off,
-                                       RPOS, frag_size);
+                                       LRPOS, frag_size);
         else
-            memcpy(buffer + off, RPOS, frag_size);
+            memcpy(buffer + off, LRPOS, frag_size);
     }
 
-    raud += frag_size;
+    *local_raud += frag_size;
 
     // Mute individual channels through mono->stereo duplication
     MuteState mute_state = GetMuteState();
diff --git a/mythtv/libs/libmyth/audiooutputbase.h b/mythtv/libs/libmyth/audiooutputbase.h
index 51e9be6..84e709d 100644
--- a/mythtv/libs/libmyth/audiooutputbase.h
+++ b/mythtv/libs/libmyth/audiooutputbase.h
@@ -32,6 +32,18 @@ class FreeSurround;
 class AudioOutputDigitalEncoder;
 struct AVCodecContext;
 
+class AsyncLooseLock
+{
+public:
+    AsyncLooseLock() { head = tail = 0; }
+    void Clear() { head = tail = 0; }
+    void Ref() { head++; }
+    bool TestAndDeref() { bool r; if ((r=(head != tail))) tail++; return r; }
+private:
+    int head;
+    int tail;
+};
+
 class AudioOutputBase : public AudioOutput, public QThread
 {
  public:
@@ -57,9 +69,9 @@ class AudioOutputBase : public AudioOutput, public QThread
     int GetSWVolume(void);
 
     // timecode is in milliseconds.
-    virtual bool AddSamples(void *buffer, int frames, long long timecode);
+    virtual bool AddSamples(void *buffer, int frames, int64_t timecode);
 
-    virtual void SetTimecode(long long timecode);
+    virtual void SetTimecode(int64_t timecode);
     virtual bool IsPaused(void) const { return actually_paused; }
     virtual void Pause(bool paused);
     void PauseUntilBuffered(void);
@@ -67,8 +79,8 @@ class AudioOutputBase : public AudioOutput, public QThread
     // Wait for all data to finish playing
     virtual void Drain(void);
 
-    virtual int GetAudiotime(void);
-    virtual int GetAudioBufferedTime(void);
+    virtual int64_t GetAudiotime(void);
+    virtual int64_t GetAudioBufferedTime(void);
 
     // Send output events showing current progress
     virtual void Status(void);
@@ -83,8 +95,8 @@ class AudioOutputBase : public AudioOutput, public QThread
 
     static const uint kAudioSRCInputSize  = 16384<<1;
     static const uint kAudioSRCOutputSize = 16384<<3;
-    /// Audio Buffer Size -- should be divisible by 12,10,8,6,4,2..
-    static const uint kAudioRingBufferSize   = 1536000;
+    /// Audio Buffer Size -- should be divisible by 32,24,16,12,10,8,6,4,2..
+    static const uint kAudioRingBufferSize   = 3072000;
 
  protected:
     // You need to implement the following functions
@@ -102,7 +114,7 @@ class AudioOutputBase : public AudioOutput, public QThread
     virtual bool StartOutputThread(void);
     virtual void StopOutputThread(void);
 
-    int GetAudioData(uchar *buffer, int buf_size, bool fill_buffer);
+    int GetAudioData(uchar *buffer, int buf_size, bool fill_buffer, int *local_raud = NULL);
 
     void OutputAudioLoop(void);
 
@@ -138,6 +150,7 @@ class AudioOutputBase : public AudioOutput, public QThread
     bool passthru, enc, reenc;
 
     float stretchfactor;
+    int  eff_stretchfactor;     // scaled to 100000 as effdsp is
     AudioOutputSource source;
 
     bool killaudio;
@@ -153,6 +166,7 @@ class AudioOutputBase : public AudioOutput, public QThread
 
  private:
     int CopyWithUpmix(char *buffer, int frames, int &org_waud);
+    void SetAudiotime(int frames, int64_t timecode);
     AudioOutputSettings *output_settings;
     bool need_resampler;
     SRC_STATE *src_ctx;
@@ -173,7 +187,7 @@ class AudioOutputBase : public AudioOutput, public QThread
 
     bool processing;
 
-    long long frames_buffered;
+    int64_t frames_buffered;
 
     bool audio_thread_exists;
 
@@ -186,12 +200,13 @@ class AudioOutputBase : public AudioOutput, public QThread
     QMutex avsync_lock;
 
     // timecode of audio leaving the soundcard (same units as timecodes)
-    long long audiotime;
+    int64_t audiotime;
 
     /* Audio circular buffer */
     int raud, waud;     /* read and write positions */
     // timecode of audio most recently placed into buffer
-    long long audbuf_timecode;
+    int64_t audbuf_timecode;
+    AsyncLooseLock reset_active;
 
     QMutex killAudioLock;
 
diff --git a/mythtv/libs/libmythfreesurround/freesurround.cpp b/mythtv/libs/libmythfreesurround/freesurround.cpp
index 5e8b1f5..aef65a3 100644
--- a/mythtv/libs/libmythfreesurround/freesurround.cpp
+++ b/mythtv/libs/libmythfreesurround/freesurround.cpp
@@ -29,35 +29,13 @@ Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301, USA.
 using namespace std;
 
 #include "compat.h"
+#include "mythverbose.h"
 #include "freesurround.h"
 #include "el_processor.h"
 
 #include <QString>
 #include <QDateTime>
 
-#if 0
-#define VERBOSE(args...) \
-    do { \
-        QDateTime dtmp = QDateTime::currentDateTime(); \
-        QString dtime = dtmp.toString("yyyy-MM-dd hh:mm:ss.zzz"); \
-        std::cout << dtime.toLocal8Bit().constData() << " " \
-            << QString(args).toLocal8Bit().constData() << std::endl; \
-    } while (0)
-#else
-#define VERBOSE(args...)
-#endif
-#if 0
-#define VERBOSE1(args...) \
-    do { \
-        QDateTime dtmp = QDateTime::currentDateTime(); \
-        QString dtime = dtmp.toString("yyyy-MM-dd hh:mm:ss.zzz"); \
-        std::cout << dtime.toLocal8Bit().constData() << " " \
-            << QString(args).toLocal8Bit().constData() << std::endl; \
-    } while (0)
-#else
-#define VERBOSE1(args...)
-#endif
-
 // our default internal block size, in floats
 static const unsigned default_block_size = 8192;
 // Gain of center and lfe channels in passive mode (sqrt 0.5)
@@ -161,7 +139,7 @@ FreeSurround::FreeSurround(uint srate, bool moviemode, SurroundMode smode) :
     processed_size(0),
     surround_mode(smode)
 {
-    VERBOSE(QString("FreeSurround::FreeSurround rate %1 moviemode %2")
+    VERBOSE(VB_AUDIO+VB_EXTRA, QString("FreeSurround::FreeSurround rate %1 moviemode %2")
             .arg(srate).arg(moviemode));
 
     if (moviemode)
@@ -193,10 +171,10 @@ FreeSurround::FreeSurround(uint srate, bool moviemode, SurroundMode smode) :
     channel_select++;
     if (channel_select>=6)
         channel_select = 0;
-    VERBOSE(QString("FreeSurround::FreeSurround channel_select %1")
+    VERBOSE(VB_AUDIO+VB_EXTRA, QString("FreeSurround::FreeSurround channel_select %1")
             .arg(channel_select));
 #endif
-    VERBOSE(QString("FreeSurround::FreeSurround done"));
+    VERBOSE(VB_AUDIO+VB_EXTRA, QString("FreeSurround::FreeSurround done"));
 }
 
 void FreeSurround::SetParams()
@@ -224,14 +202,14 @@ FreeSurround::fsurround_params::fsurround_params(int32_t center_width,
 
 FreeSurround::~FreeSurround()
 {
-    VERBOSE(QString("FreeSurround::~FreeSurround"));
+    VERBOSE(VB_AUDIO+VB_EXTRA, QString("FreeSurround::~FreeSurround"));
     close();
     if (bufs)
     {
         bp.release((void*)1);
         bufs = NULL;
     }
-    VERBOSE(QString("FreeSurround::~FreeSurround done"));
+    VERBOSE(VB_AUDIO+VB_EXTRA, QString("FreeSurround::~FreeSurround done"));
 }
 
 uint FreeSurround::putFrames(void* buffer, uint numFrames, uint numChannels)
@@ -289,19 +267,25 @@ uint FreeSurround::putFrames(void* buffer, uint numFrames, uint numChannels)
                     break;
             }
             ic += numFrames;
-            in_count = ic;
             processed = process;
             if (ic != bs)
+            {
+                // dont modify unless no processing is to be done
+                // for audiotime consistency
+                in_count = ic;
                 break;
-            in_count = 0;
+            }
+            // process_block takes some time so dont update in and out count
+            // before its finished so that Audiotime is correctly calculated
             if (process)
                 process_block();
+            in_count = 0;
             out_count = bs;
             processed_size = bs;
             break;
     }
 
-    VERBOSE1(QString("FreeSurround::putFrames %1 %2 used %4 generated %5")
+    VERBOSE(VB_AUDIO+VB_TIMESTAMP+VB_EXTRA, QString("FreeSurround::putFrames %1 #ch %2 used %4 generated %5")
             .arg(numFrames).arg(numChannels).arg(i).arg(out_count));
 
     return i;
@@ -318,7 +302,7 @@ uint FreeSurround::receiveFrames(void *buffer, uint maxFrames)
     switch (surround_mode)
     {
         case SurroundModePassive:
-            for (uint i = 0; i < maxFrames; i++) 
+            for (i = 0; i < maxFrames; i++) 
             {
                 *output++ = bufs->l[outindex];
                 *output++ = bufs->r[outindex];
@@ -341,7 +325,7 @@ uint FreeSurround::receiveFrames(void *buffer, uint maxFrames)
                 float *ls  = &outputs[3][outindex];
                 float *rs  = &outputs[4][outindex];
                 float *lfe = &outputs[5][outindex];
-                for (uint i = 0; i < maxFrames; i++) 
+                for (i = 0; i < maxFrames; i++) 
                 {
                     *output++ = *l++;
                     *output++ = *r++;
@@ -361,7 +345,7 @@ uint FreeSurround::receiveFrames(void *buffer, uint maxFrames)
                 float *ls  = &bufs->ls[outindex];
                 float *rs  = &bufs->rs[outindex];
                 float *lfe = &bufs->lfe[outindex];
-                for (uint i = 0; i < maxFrames; i++) 
+                for (i = 0; i < maxFrames; i++) 
                 {
                     *output++ = *l++;
                     *output++ = *r++;
@@ -376,7 +360,7 @@ uint FreeSurround::receiveFrames(void *buffer, uint maxFrames)
             break;
     }
     out_count = oc;
-    VERBOSE1(QString("FreeSurround::receiveFrames %1").arg(maxFrames));
+    VERBOSE(VB_AUDIO+VB_TIMESTAMP+VB_EXTRA, QString("FreeSurround::receiveFrames %1").arg(maxFrames));
     return maxFrames;
 }
 
diff --git a/mythtv/libs/libmythtv/NuppelVideoPlayer.cpp b/mythtv/libs/libmythtv/NuppelVideoPlayer.cpp
index 78b048e..b8ff202 100644
--- a/mythtv/libs/libmythtv/NuppelVideoPlayer.cpp
+++ b/mythtv/libs/libmythtv/NuppelVideoPlayer.cpp
@@ -208,7 +208,9 @@ NuppelVideoPlayer::NuppelVideoPlayer(bool muted)
       videosync(NULL),              delay(0),
       vsynctol(30/4),               avsync_delay(0),
       avsync_adjustment(0),         avsync_avg(0),
-      avsync_oldavg(0),             refreshrate(0),
+      avsync_oldavg(0),             
+      avsync_predictor(0),          avsync_predictor_enabled(false),
+      refreshrate(0),
       lastsync(false),              m_playing_slower(false),
       m_stored_audio_stretchfactor(1.0),
       audio_paused(false),
@@ -1141,7 +1143,7 @@ void NuppelVideoPlayer::SetVideoParams(int width, int height, double fps,
         video_frame_rate = fps;
         float temp_speed = (play_speed == 0.0f) ?
             audio_stretchfactor : play_speed;
-        frame_interval = (int)(1000000.0f / video_frame_rate / temp_speed);
+        SetFrameInterval(kScan_Progressive, 1.0 / (video_frame_rate * temp_speed));
     }
 
     if (videoOutput)
@@ -2358,6 +2360,34 @@ float NuppelVideoPlayer::WarpFactor(void)
     return divergence;
 }
 
+void NuppelVideoPlayer::SetFrameInterval(FrameScanType scan, double frame_period)
+{
+    frame_interval = (int)(1000000.0f * frame_period + 0.5f);
+    if (!avsync_predictor_enabled)
+        avsync_predictor = 0;
+    avsync_predictor_enabled = false;
+
+    VERBOSE(VB_PLAYBACK, LOC + QString("SetFrameInterval ps:%1 scan:%2")
+            .arg(play_speed).arg(scan)
+           );
+    if (play_speed < 1 || play_speed > 2 || refreshrate <= 0)
+        return;
+
+    avsync_predictor_enabled = ((frame_interval-(frame_interval/200)) < refreshrate);
+}
+
+void NuppelVideoPlayer::ResetAVSync(void)
+{
+    avsync_avg = 0;
+    avsync_oldavg = 0;
+    if (!avsync_predictor_enabled || avsync_predictor >= refreshrate)
+        avsync_predictor = 0;
+    prevtc = 0;
+    warpfactor = 1.0f;
+    warpfactor_avg = 1.0f;
+    VERBOSE(VB_PLAYBACK+VB_TIMESTAMP, LOC + "A/V sync reset");
+}
+
 void NuppelVideoPlayer::InitAVSync(void)
 {
     videosync->Start();
@@ -2379,16 +2409,33 @@ void NuppelVideoPlayer::InitAVSync(void)
         VERBOSE(VB_GENERAL, msg);
         msg = QString("Refresh rate: %1, frame interval: %2")
                        .arg(refreshrate).arg(frame_interval);
-        VERBOSE(VB_PLAYBACK, msg);
+        VERBOSE(VB_PLAYBACK, LOC + msg);
+
+        SetFrameInterval(m_scan, 1.0 / (video_frame_rate * play_speed));
 
         // try to get preferential scheduling, but ignore if we fail to.
         myth_nice(-19);
     }
 }
 
+int64_t NuppelVideoPlayer::AVSyncGetAudiotime(void)
+{
+    int64_t currentaudiotime = 0;
+    audio_lock.lock();
+    if (audioOutput && normal_speed)
+    {
+        currentaudiotime = audioOutput->GetAudiotime();
+    }
+    audio_lock.unlock();
+    return currentaudiotime;
+}
+
 void NuppelVideoPlayer::AVSync(void)
 {
     float diverge = 0.0f;
+    int vsync_delay_clock = 0;
+    int64_t currentaudiotime = 0;
+
     // attempt to reduce fps for standalone PIP
     if (player_ctx->IsPIP() && framesPlayed % 2)
     {
@@ -2428,16 +2475,38 @@ void NuppelVideoPlayer::AVSync(void)
         ps = kScan_Progressive;
 
     bool dropframe = false;
+    QString dbg;
+
+    if (avsync_predictor_enabled && !prebuffering)
+    {
+        avsync_predictor += frame_interval;
+        if (avsync_predictor >= refreshrate)
+        {
+            int refreshperiodsinframe = avsync_predictor/refreshrate;
+            avsync_predictor -= refreshrate * refreshperiodsinframe;
+        }
+        else
+        {
+            dropframe = true;
+            dbg = "A/V predict drop frame, ";
+        }
+    }
+
     if (diverge < -MAXDIVERGE)
     {
         dropframe = true;
         // If video is way behind of audio, adjust for it...
-        QString dbg = QString("Video is %1 frames behind audio (too slow), ")
+        dbg = QString("Video is %1 frames behind audio (too slow), ")
             .arg(-diverge);
+    }
 
+    if (dropframe)
+    {
         // Reset A/V Sync
         lastsync = true;
 
+        currentaudiotime = AVSyncGetAudiotime();
+
         if (buffer && !using_null_videoout &&
             videoOutput->hasHWAcceleration() &&
            !videoOutput->IsSyncLocked())
@@ -2462,16 +2531,17 @@ void NuppelVideoPlayer::AVSync(void)
         if (buffer)
             videoOutput->PrepareFrame(buffer, ps);
 
-        VERBOSE(VB_PLAYBACK|VB_TIMESTAMP, QString("AVSync waitforframe %1 %2")
+        VERBOSE(VB_PLAYBACK|VB_TIMESTAMP, LOC + QString("AVSync waitforframe %1 %2")
                 .arg(avsync_adjustment).arg(m_double_framerate));
-        videosync->WaitForFrame(avsync_adjustment + repeat_delay);
-        VERBOSE(VB_PLAYBACK|VB_TIMESTAMP, "AVSync show");
+        vsync_delay_clock = videosync->WaitForFrame(avsync_adjustment + repeat_delay);
+        currentaudiotime = AVSyncGetAudiotime();
+        VERBOSE(VB_PLAYBACK|VB_TIMESTAMP, LOC + "AVSync show");
         if (!resetvideo)
             videoOutput->Show(ps);
 
         if (videoOutput->IsErrored())
         {
-            VERBOSE(VB_IMPORTANT, "NVP: Error condition detected "
+            VERBOSE(VB_IMPORTANT, LOC + "Error condition detected "
                     "in videoOutput after Show(), aborting playback.");
             SetErrored(QObject::tr("Serious error detected in Video Output"));
             return;
@@ -2507,9 +2577,9 @@ void NuppelVideoPlayer::AVSync(void)
             // Display the second field
             videosync->AdvanceTrigger();
 #ifdef NEW_AVSYNC
-            videosync->WaitForFrame(avsync_adjustment);
+            vsync_delay_clock = videosync->WaitForFrame(avsync_adjustment);
 #else
-            videosync->WaitForFrame(0);
+            vsync_delay_clock = videosync->WaitForFrame(0);
 #endif
             if (!resetvideo)
             {
@@ -2520,17 +2590,18 @@ void NuppelVideoPlayer::AVSync(void)
         repeat_delay = frame_interval * buffer->repeat_pict * 0.5;
 
         if (repeat_delay)
-            VERBOSE(VB_TIMESTAMP, QString("A/V repeat_pict, adding %1 repeat "
+            VERBOSE(VB_TIMESTAMP, LOC + QString("A/V repeat_pict, adding %1 repeat "
                     "delay").arg(repeat_delay));
     }
     else
     {
-        videosync->WaitForFrame(0);
+        vsync_delay_clock = videosync->WaitForFrame(0);
+        currentaudiotime = AVSyncGetAudiotime();
     }
 
     if (output_jmeter && output_jmeter->RecordCycleTime())
     {
-        VERBOSE(VB_PLAYBACK+VB_TIMESTAMP, QString("A/V avsync_delay: %1, "
+        VERBOSE(VB_PLAYBACK+VB_TIMESTAMP, LOC + QString("A/V avsync_delay: %1, "
                 "avsync_avg: %2, warpfactor: %3, warpfactor_avg: %4")
                 .arg(avsync_delay / 1000).arg(avsync_avg / 1000)
                 .arg(warpfactor).arg(warpfactor_avg));
@@ -2546,7 +2617,9 @@ void NuppelVideoPlayer::AVSync(void)
         // by cutting the frame rate in half for the length of this frame
 
 #ifdef NEW_AVSYNC
-        avsync_adjustment = refreshrate;
+        //avsync_adjustment = refreshrate;
+        avsync_adjustment = frame_interval;
+        //avsync_adjustment = frame_interval*(((int)MAXDIVERGE)-1);
 #else
         avsync_adjustment = frame_interval;
 #endif
@@ -2556,62 +2629,73 @@ void NuppelVideoPlayer::AVSync(void)
                         "\t\t\tdoubling video frame interval to slow down.").arg(diverge));
     }
 
-    audio_lock.lock();
     if (audioOutput && normal_speed)
     {
-        long long currentaudiotime = audioOutput->GetAudiotime();
-        audio_lock.unlock();
 #if 0
-        VERBOSE(VB_PLAYBACK+VB_TIMESTAMP, QString(
+        VERBOSE(VB_PLAYBACK+VB_TIMESTAMP, LOC + QString(
                     "A/V timecodes audio %1 video %2 frameinterval %3 "
-                    "avdel %4 avg %5 tcoffset %6")
+                    "avdel %4 avg %5 tcoffset %6"
+                    " avp %7 avpen %8"
+                    " avdc %9"
+                    )
                 .arg(currentaudiotime)
                 .arg(buffer->timecode)
                 .arg(frame_interval)
-                .arg(buffer->timecode - currentaudiotime)
+                .arg(buffer->timecode - currentaudiotime - (int)(vsync_delay_clock*audio_stretchfactor+500)/1000)
                 .arg(avsync_avg)
                 .arg(tc_wrap[TC_AUDIO])
+                .arg(avsync_predictor)
+                .arg(avsync_predictor_enabled)
+                .arg(vsync_delay_clock)
                  );
 #endif
         if (currentaudiotime != 0 && buffer->timecode != 0)
         { // currentaudiotime == 0 after a seek
             // The time at the start of this frame (ie, now) is given by
             // last->timecode
-            int delta = (int)((buffer->timecode - prevtc)/play_speed) - (frame_interval / 1000);
-            prevtc = buffer->timecode;
-            //cerr << delta << " ";
-
-            // If the timecode is off by a frame (dropped frame) wait to sync
-            if (delta > (int) frame_interval / 1200 &&
-                delta < (int) frame_interval / 1000 * 3 &&
-                prevrp == 0)
+            if (prevtc != 0)
             {
-                //cerr << "+ ";
-                videosync->AdvanceTrigger();
-                if (m_double_framerate)
+                int delta = (int)((buffer->timecode - prevtc)/play_speed) - (frame_interval / 1000);
+                // If the timecode is off by a frame (dropped frame) wait to sync
+                if (delta > (int) frame_interval / 1200 &&
+                    delta < (int) frame_interval / 1000 * 3 &&
+                    prevrp == 0)
+                {
+                    //cerr << "+ ";
+                    VERBOSE(VB_PLAYBACK+VB_TIMESTAMP, LOC + QString("A/V delay %1").arg(delta));
                     videosync->AdvanceTrigger();
+                    if (m_double_framerate)
+                        videosync->AdvanceTrigger();
+                }
             }
+            prevtc = buffer->timecode;
             prevrp = buffer->repeat_pict;
 
-            avsync_delay = (buffer->timecode - currentaudiotime) * 1000;//usec
+            avsync_delay = (buffer->timecode - currentaudiotime) * 1000 - (int)(vsync_delay_clock*audio_stretchfactor);  //usec
             // prevents major jitter when pts resets during dvd title
             if (avsync_delay > 2000000 && player_ctx->buffer->isDVD())
                 avsync_delay = 90000;
             avsync_avg = (avsync_delay + (avsync_avg * 3)) / 4;
 
+            int avsync_used = avsync_avg;
+            if (labs(avsync_used) > labs(avsync_delay))
+                avsync_used = avsync_delay;
+
             /* If the audio time codes and video diverge, shift
                the video by one interlaced field (1/2 frame) */
             if (!lastsync)
             {
-                if (avsync_avg > frame_interval * 3 / 2)
+                if (avsync_used > refreshrate)
                 {
-                    avsync_adjustment = refreshrate;
-                    lastsync = true;
+                    avsync_adjustment += refreshrate;
+                    //lastsync = true;
+                    VERBOSE(VB_PLAYBACK+VB_TIMESTAMP, LOC + "A/V avg high extend");
                 }
-                else if (avsync_avg < 0 - frame_interval * 3 / 2)
+                else if (avsync_used < 0 - refreshrate)
                 {
-                    avsync_adjustment = -refreshrate;
-                    lastsync = true;
+                    avsync_adjustment -= refreshrate;
+                    //lastsync = true;
+                    VERBOSE(VB_PLAYBACK+VB_TIMESTAMP, LOC + "A/V avg high skip");
                 }
             }
             else
@@ -2619,12 +2703,13 @@ void NuppelVideoPlayer::AVSync(void)
         }
         else
         {
-            avsync_avg = 0;
-            avsync_oldavg = 0;
+            ResetAVSync();
         }
     }
     else
-        audio_lock.unlock();
+    {
+        VERBOSE(VB_PLAYBACK+VB_TIMESTAMP, LOC + QString("A/V no sync proc ns:%1 ao:%2").arg(normal_speed).arg(audioOutput != NULL));
+    }
 }
 
 void NuppelVideoPlayer::DisplayPauseFrame(void)
@@ -4237,7 +4322,7 @@ void NuppelVideoPlayer::DoPause(void)
     }
 
     float temp_speed = audio_stretchfactor;
-    frame_interval = (int)(1000000.0 * ffrew_skip / video_frame_rate / temp_speed);
+    SetFrameInterval(m_scan, ffrew_skip / (video_frame_rate * temp_speed));
     VERBOSE(VB_PLAYBACK, QString("rate: %1 speed: %2 skip: %3 = interval %4")
                                  .arg(video_frame_rate).arg(temp_speed)
                                  .arg(ffrew_skip).arg(frame_interval));
@@ -4299,8 +4384,7 @@ void NuppelVideoPlayer::DoPlay(void)
         ClearAfterSeek();
     }
 
-    frame_interval = (int) (1000000.0f * ffrew_skip / video_frame_rate /
-                            play_speed);
+    SetFrameInterval(m_scan, ffrew_skip / (video_frame_rate * play_speed));
 
     VERBOSE(VB_PLAYBACK, LOC + "DoPlay: " +
             QString("rate: %1 speed: %2 skip: %3 => new interval %4")
@@ -4698,6 +4782,7 @@ void NuppelVideoPlayer::ClearAfterSeek(bool clearvideobuffers)
         savedAudioTimecodeOffset = 0;
     }
 
+    ResetAVSync();
     SetPrebuffering(true);
     ResetAudio();
 
diff --git a/mythtv/libs/libmythtv/NuppelVideoPlayer.h b/mythtv/libs/libmythtv/NuppelVideoPlayer.h
index af02d21..8d4017c 100644
--- a/mythtv/libs/libmythtv/NuppelVideoPlayer.h
+++ b/mythtv/libs/libmythtv/NuppelVideoPlayer.h
@@ -514,6 +514,9 @@ class MPUBLIC NuppelVideoPlayer : public CC608Reader, public CC708Reader
     float WarpFactor(void);
     void  WrapTimecode(long long &timecode, TCTypes tc_type);
     void  InitAVSync(void);
+    void  ResetAVSync(void);
+    int64_t AVSyncGetAudiotime(void);
+    void  SetFrameInterval(FrameScanType scan, double speed);
     void  AVSync(void);
     void  FallbackDeint(void);
     void  CheckExtraAudioDecode(void);
@@ -800,6 +803,8 @@ class MPUBLIC NuppelVideoPlayer : public CC608Reader, public CC708Reader
     int        avsync_adjustment;
     int        avsync_avg;
     int        avsync_oldavg;
+    int        avsync_predictor;
+    bool       avsync_predictor_enabled;
     int        refreshrate;
     bool       lastsync;
     bool       m_playing_slower;
diff --git a/mythtv/libs/libmythtv/avformatdecoder.cpp b/mythtv/libs/libmythtv/avformatdecoder.cpp
index d6547c4..a5a1899 100644
--- a/mythtv/libs/libmythtv/avformatdecoder.cpp
+++ b/mythtv/libs/libmythtv/avformatdecoder.cpp
@@ -482,6 +482,7 @@ AvFormatDecoder::AvFormatDecoder(NuppelVideoPlayer *parent,
       start_code_state(0xffffffff),
       lastvpts(0),                  lastapts(0),
       lastccptsu(0),
+      firstvpts(0),                 firstvptsinuse(false),
       using_null_videoout(use_null_videoout),
       video_codec_id(kCodec_NONE),
       no_hardware_decoders(no_hardware_decode),
@@ -933,6 +934,12 @@ void AvFormatDecoder::SeekReset(long long newKey, uint skipFrames,
         if (decoded_video_frame)
             GetNVP()->DiscardVideoFrame(decoded_video_frame);
     }
+
+    if (doflush)
+    {
+        firstvpts = 0;
+        firstvptsinuse = true;
+    }
 }
 
 void AvFormatDecoder::Reset(bool reset_video_data, bool seek_reset)
@@ -2931,7 +2938,9 @@ void AvFormatDecoder::MpegPreProcessPkt(AVStream *stream, AVPacket *pkt)
 
                 gopset = false;
                 prevgoppos = 0;
+                firstvpts =
                 lastapts = lastvpts = lastccptsu = 0;
+                firstvptsinuse = true;
 
                 // fps debugging info
                 float avFPS = normalized_fps(stream, context);
@@ -3041,7 +3050,9 @@ bool AvFormatDecoder::H264PreProcessPkt(AVStream *stream, AVPacket *pkt)
 
             gopset = false;
             prevgoppos = 0;
+            firstvpts =
             lastapts = lastvpts = lastccptsu = 0;
+            firstvptsinuse = true;
 
             // fps debugging info
             float avFPS = normalized_fps(stream, context);
@@ -3270,6 +3281,8 @@ bool AvFormatDecoder::ProcessVideoPacket(AVStream *curstream, AVPacket *pkt)
     framesPlayed++;
 
     lastvpts = temppts;
+    if (!firstvpts && firstvptsinuse)
+        firstvpts = temppts;
 
     return true;
 }
@@ -4044,6 +4057,16 @@ bool AvFormatDecoder::ProcessAudioPacket(AVStream *curstream, AVPacket *pkt,
                 skipaudio = false;
         }
 
+        // skip any audio frames preceding first video frame
+        if (firstvptsinuse && firstvpts && (lastapts < firstvpts))
+        {
+            VERBOSE(VB_PLAYBACK+VB_TIMESTAMP,
+                LOC + QString("discarding early audio timecode %1 %2 %3")
+                .arg(pkt->pts).arg(pkt->dts).arg(lastapts));
+            break;
+        }
+        firstvptsinuse = false;
+
         avcodeclock->lock();
         data_size = 0;
 
diff --git a/mythtv/libs/libmythtv/avformatdecoder.h b/mythtv/libs/libmythtv/avformatdecoder.h
index 3ad2c70..1192546 100644
--- a/mythtv/libs/libmythtv/avformatdecoder.h
+++ b/mythtv/libs/libmythtv/avformatdecoder.h
@@ -261,6 +261,8 @@ class AvFormatDecoder : public DecoderBase
     long long lastvpts;
     long long lastapts;
     long long lastccptsu;
+    long long firstvpts;
+    bool      firstvptsinuse;
 
     bool using_null_videoout;
     MythCodecID video_codec_id;
diff --git a/mythtv/libs/libmythtv/vsync.cpp b/mythtv/libs/libmythtv/vsync.cpp
index 59b5db7..728a759 100644
--- a/mythtv/libs/libmythtv/vsync.cpp
+++ b/mythtv/libs/libmythtv/vsync.cpp
@@ -123,9 +123,10 @@ VideoSync::VideoSync(VideoOutput *video_output,
                      bool halve_frame_interval) :
     m_video_output(video_output),   m_frame_interval(frameint),
     m_refresh_interval(refreshint), m_interlaced(halve_frame_interval),
-    m_delay(-1)
+    m_nexttrigger(0),
+    m_delay(-1),
+    m_synchronous(false)
 {
-    bzero(&m_nexttrigger, sizeof(m_nexttrigger));
 
     int tolerance = m_refresh_interval / 200;
     if (m_interlaced && m_refresh_interval > ((m_frame_interval/2) + tolerance))
@@ -136,7 +137,9 @@ VideoSync::VideoSync(VideoOutput *video_output,
 
 void VideoSync::Start(void)
 {
-    gettimeofday(&m_nexttrigger, NULL); // now
+    struct timeval now_tv;
+    gettimeofday(&now_tv, NULL); // now
+    m_nexttrigger = now_tv.tv_sec * 1000000LL + now_tv.tv_usec;
 }
 
 /** \fn VideoSync::SetFrameInterval(int fr, bool intr)
@@ -147,26 +150,14 @@ void VideoSync::SetFrameInterval(int fr, bool intr)
     m_frame_interval = fr;
     m_interlaced = intr;
     int tolerance = m_refresh_interval / 200;
+    double sync_factor = fr * 2.0f / intr;
+    sync_factor = sync_factor - round(sync_factor);
+    m_synchronous = (sync_factor >= -0.005) && (sync_factor <= 0.005);
     if (m_interlaced && m_refresh_interval > ((m_frame_interval/2) + tolerance))
         m_interlaced = false; // can't display both fields at 2x rate
 
-    VERBOSE(VB_PLAYBACK, QString("Set video sync frame interval to %1")
-                                 .arg(m_frame_interval));
-}
-
-void VideoSync::OffsetTimeval(struct timeval& tv, int offset)
-{
-    tv.tv_usec += offset;
-    while (tv.tv_usec > 999999)
-    {
-        tv.tv_sec++;
-        tv.tv_usec -= 1000000;
-    }
-    while (tv.tv_usec < 0)
-    {
-        tv.tv_sec--;
-        tv.tv_usec += 1000000;
-    }
+    VERBOSE(VB_PLAYBACK, QString("Set video sync frame interval to %1 (synced:%2)")
+                                 .arg(m_frame_interval).arg(m_synchronous));
 }
 
 /** \fn VideoSync::UpdateNexttrigger()
@@ -179,9 +170,9 @@ void VideoSync::UpdateNexttrigger()
     // Offset by frame interval -- if interlaced, only delay by half
     // frame interval
     if (m_interlaced)
-        OffsetTimeval(m_nexttrigger, m_frame_interval/2);
+        m_nexttrigger += m_frame_interval/2;
     else
-        OffsetTimeval(m_nexttrigger, m_frame_interval);
+        m_nexttrigger += m_frame_interval;
 }
 
 /** \fn VideoSync::CalcDelay()
@@ -197,13 +188,13 @@ void VideoSync::UpdateNexttrigger()
  */
 int VideoSync::CalcDelay()
 {
-    struct timeval now;
-    gettimeofday(&now, NULL);
+    struct timeval now_tv;
+    gettimeofday(&now_tv, NULL);
     //cout << "CalcDelay: next: " << timeval_str(m_nexttrigger) << " now "
     // << timeval_str(now) << endl;
+    int64_t now = now_tv.tv_sec * 1000000LL + now_tv.tv_usec;
 
-    int ret_val = (m_nexttrigger.tv_sec - now.tv_sec) * 1000000 +
-                  (m_nexttrigger.tv_usec - now.tv_usec);
+    int ret_val = m_nexttrigger - now;
 
     //cout << "delay " << ret_val << endl;
 
@@ -215,19 +206,17 @@ int VideoSync::CalcDelay()
             ret_val = m_frame_interval * 4;
 
         // set nexttrigger to our new target time
-        m_nexttrigger.tv_sec = now.tv_sec;
-        m_nexttrigger.tv_usec = now.tv_usec;
-        OffsetTimeval(m_nexttrigger, ret_val);
+        m_nexttrigger = now;
+        m_nexttrigger += ret_val;
     }
 
-    if (ret_val < -m_frame_interval)
+    if ((ret_val < -m_frame_interval) && (m_frame_interval >= m_refresh_interval))
     {
         ret_val = -m_frame_interval;
 
         // set nexttrigger to our new target time
-        m_nexttrigger.tv_sec = now.tv_sec;
-        m_nexttrigger.tv_usec = now.tv_usec;
-        OffsetTimeval(m_nexttrigger, ret_val);
+        m_nexttrigger = now;
+        m_nexttrigger += ret_val;
     }
 
     return ret_val;
@@ -244,10 +233,20 @@ int VideoSync::CalcDelay()
 void VideoSync::KeepPhase()
 {
     // cerr << m_delay << endl;
-    if (m_delay < -(m_refresh_interval/2))
-        OffsetTimeval(m_nexttrigger, 200);
-    else if (m_delay > -500)
-        OffsetTimeval(m_nexttrigger, -2000);
+    if (m_synchronous)
+    {
+        if (m_delay < -(m_refresh_interval - 500))
+            m_nexttrigger += 200;
+        else if (m_delay > -500)
+            m_nexttrigger += -2000;
+    }
+    else
+    {
+        if (m_delay < -(m_refresh_interval + 500))
+            m_nexttrigger += 200;
+        else if (m_delay >= 0)
+            m_nexttrigger += -2000;
+    }
 }
 
 #ifndef _WIN32
@@ -337,10 +336,10 @@ void DRMVideoSync::Start(void)
     VideoSync::Start();
 }
 
-void DRMVideoSync::WaitForFrame(int sync_delay)
+int DRMVideoSync::WaitForFrame(int sync_delay)
 {
     // Offset for externally-provided A/V sync delay
-    OffsetTimeval(m_nexttrigger, sync_delay);
+    m_nexttrigger += sync_delay;
 
     m_delay = CalcDelay();
     //cerr << "WaitForFrame at : " << m_delay;
@@ -360,7 +359,7 @@ void DRMVideoSync::WaitForFrame(int sync_delay)
     if (m_delay > 0)
     {
         // Wait for any remaining retrace intervals in one pass.
-        int n = m_delay / m_refresh_interval + 1;
+        int n = (m_delay + m_refresh_interval - 1) / m_refresh_interval;
 
         drm_wait_vblank_t blank;
         blank.request.type = DRM_VBLANK_RELATIVE;
@@ -370,6 +369,7 @@ void DRMVideoSync::WaitForFrame(int sync_delay)
         //cerr << "Wait " << n << " intervals. Count " << blank.request.sequence;
         //cerr  << " Delay " << m_delay << endl;
     }
+    return m_delay;
 }
 
 void DRMVideoSync::AdvanceTrigger(void)
@@ -497,12 +497,16 @@ void OpenGLVideoSync::Start(void)
 #endif /* USING_OPENGL_VSYNC */
 }
 
-void OpenGLVideoSync::WaitForFrame(int sync_delay)
+int OpenGLVideoSync::WaitForFrame(int sync_delay)
 {
     (void) sync_delay;
 #ifdef USING_OPENGL_VSYNC
+//#define GLVSYNCDEBUG
+#ifdef GLVSYNCDEBUG
+    int refreshcount = 0;
+#endif
     const QString msg1("First A/V Sync"), msg2("Second A/V Sync");
-    OffsetTimeval(m_nexttrigger, sync_delay);
+    m_nexttrigger += sync_delay;
 
     VideoOutput *vo = dynamic_cast<VideoOutput*>(m_video_output);
     if (vo && vo->IsEmbedding())
@@ -510,36 +514,68 @@ void OpenGLVideoSync::WaitForFrame(int sync_delay)
         m_delay = CalcDelay();
         if (m_delay > 0)
             usleep(m_delay);
-        return;
+        return 0;
     }
 
     int err;
     if (!m_context)
-        return;
+        return 0;
     unsigned int frameNum = 0;
 
     OpenGLContextLocker ctx_lock(m_context);
     err = gMythGLXGetVideoSyncSGI(&frameNum);
     checkGLSyncError("Frame Number Query", err);
 
+#ifdef GLVSYNCDEBUG
+    int delay1 = m_delay;
+    int delay2;
+#endif
     // Always sync to the next retrace execpt when we are very late.
     if ((m_delay = CalcDelay()) > -(m_refresh_interval/2))
     {
+#ifdef GLVSYNCDEBUG
+        delay2 = m_delay;
+#endif
         err = gMythGLXWaitVideoSyncSGI(2, (frameNum+1)%2 ,&frameNum);
         checkGLSyncError(msg1, err);
         m_delay = CalcDelay();
+#ifdef GLVSYNCDEBUG
+        refreshcount++;
+#endif
     }
+#ifdef GLVSYNCDEBUG
+    else
+        delay2 = m_delay;
+#endif
 
+#ifdef GLVSYNCDEBUG
+    int delay3 = m_delay;
+#endif
     // Wait for any remaining retrace intervals in one pass.
     if (m_delay > 0)
     {
-        uint n = m_delay / m_refresh_interval + 1;
+        uint n = (m_delay + m_refresh_interval - 1) / m_refresh_interval;
+#ifdef GLVSYNCDEBUG
+        refreshcount += (int)n;
+#endif
         err = gMythGLXWaitVideoSyncSGI((n+1), (frameNum+n)%(n+1), &frameNum);
         checkGLSyncError(msg2, err);
         m_delay = CalcDelay();
     }
+#ifdef GLVSYNCDEBUG
+    VERBOSE(VB_PLAYBACK+VB_TIMESTAMP, QString("VS: WFF: ri:%1 fi:%2 delay1:%3 delay2:%4 delay3:%5 skip:%6 finaldelay:%7")
+            .arg(m_refresh_interval)
+            .arg(m_frame_interval)
+            .arg(delay1)
+            .arg(delay2)
+            .arg(delay3)
+            .arg(refreshcount)
+            .arg(m_delay)
+           );
+#endif
 
 #endif /* USING_OPENGL_VSYNC */
+    return m_delay;
 }
 
 void OpenGLVideoSync::AdvanceTrigger(void)
@@ -548,6 +584,9 @@ void OpenGLVideoSync::AdvanceTrigger(void)
 
     KeepPhase();
     UpdateNexttrigger();
+#ifdef GLVSYNCDEBUG
+    VERBOSE(VB_PLAYBACK+VB_TIMESTAMP, "VS: AdvanceTrigger");
+#endif
 #endif /* USING_OPENGL_VSYNC */
 }
 #endif /* !_WIN32 */
@@ -594,9 +633,9 @@ bool RTCVideoSync::TryInit(void)
     return true;
 }
 
-void RTCVideoSync::WaitForFrame(int sync_delay)
+int RTCVideoSync::WaitForFrame(int sync_delay)
 {
-    OffsetTimeval(m_nexttrigger, sync_delay);
+    m_nexttrigger += sync_delay;
 
     m_delay = CalcDelay();
 
@@ -609,6 +648,7 @@ void RTCVideoSync::WaitForFrame(int sync_delay)
         if ((val < 0) && (m_delay > 0))
             usleep(m_delay);
     }
+    return 0;
 }
 
 void RTCVideoSync::AdvanceTrigger(void)
@@ -637,10 +677,10 @@ bool VDPAUVideoSync::TryInit(void)
     return true;
 }
 
-void VDPAUVideoSync::WaitForFrame(int sync_delay)
+int VDPAUVideoSync::WaitForFrame(int sync_delay)
 {
     // Offset for externally-provided A/V sync delay
-    OffsetTimeval(m_nexttrigger, sync_delay);
+    m_nexttrigger += sync_delay;
     m_delay = CalcDelay();
 
     if (m_delay < 0)
@@ -648,6 +688,7 @@ void VDPAUVideoSync::WaitForFrame(int sync_delay)
 
     VideoOutputVDPAU *vo = (VideoOutputVDPAU *)(m_video_output);
     vo->SetNextFrameDisplayTimeOffset(m_delay);
+    return 0;
 }
 
 void VDPAUVideoSync::AdvanceTrigger(void)
@@ -674,10 +715,10 @@ bool BusyWaitVideoSync::TryInit(void)
     return true;
 }
 
-void BusyWaitVideoSync::WaitForFrame(int sync_delay)
+int BusyWaitVideoSync::WaitForFrame(int sync_delay)
 {
     // Offset for externally-provided A/V sync delay
-    OffsetTimeval(m_nexttrigger, sync_delay);
+    m_nexttrigger += sync_delay;
 
     m_delay = CalcDelay();
 
@@ -703,6 +744,7 @@ void BusyWaitVideoSync::WaitForFrame(int sync_delay)
         if (cnt > 1)
             m_cheat -= 200;
     }
+    return 0;
 }
 
 void BusyWaitVideoSync::AdvanceTrigger(void)
@@ -725,14 +767,15 @@ bool USleepVideoSync::TryInit(void)
     return true;
 }
 
-void USleepVideoSync::WaitForFrame(int sync_delay)
+int USleepVideoSync::WaitForFrame(int sync_delay)
 {
     // Offset for externally-provided A/V sync delay
-    OffsetTimeval(m_nexttrigger, sync_delay);
+    m_nexttrigger += sync_delay;
 
     m_delay = CalcDelay();
     if (m_delay > 0)
         usleep(m_delay);
+    return 0;
 }
 
 void USleepVideoSync::AdvanceTrigger(void)
diff --git a/mythtv/libs/libmythtv/vsync.h b/mythtv/libs/libmythtv/vsync.h
index f077949..f8b1c4b 100644
--- a/mythtv/libs/libmythtv/vsync.h
+++ b/mythtv/libs/libmythtv/vsync.h
@@ -70,6 +70,7 @@ class VideoSync
     virtual void Start(void);
 
     /** \brief Waits for next a frame or field.
+     *   Returns delay to real frame timing in usec
      *
      *   Start(void), WaitForFrame(void), and Stop(void) should
      *   always be called from same thread, to prevent bad
@@ -78,7 +79,7 @@ class VideoSync
      *  \param sync_delay time until the desired frame or field
      *  \sa CalcDelay(void), KeepPhase(void)
      */
-    virtual void WaitForFrame(int sync_delay) = 0;
+    virtual int WaitForFrame(int sync_delay) = 0;
 
     /// \brief Use the next frame or field for CalcDelay(void)
     ///        and WaitForFrame(int).
@@ -104,7 +105,6 @@ class VideoSync
                                  uint frame_interval, uint refresh_interval,
                                  bool interlaced);
   protected:
-    static void OffsetTimeval(struct timeval& tv, int offset);
     void UpdateNexttrigger(void);
     int CalcDelay(void);
     void KeepPhase(void);
@@ -113,8 +113,9 @@ class VideoSync
     int m_frame_interval; // of video
     int m_refresh_interval; // of display
     bool m_interlaced;
-    struct timeval m_nexttrigger;
+    int64_t m_nexttrigger;
     int m_delay;
+    bool m_synchronous;
     
     static int m_forceskip;
 };
@@ -136,7 +137,7 @@ class DRMVideoSync : public VideoSync
     QString getName(void) const { return QString("DRM"); }
     bool TryInit(void);
     void Start(void);
-    void WaitForFrame(int sync_delay);
+    int WaitForFrame(int sync_delay);
     void AdvanceTrigger(void);
 
   private:
@@ -178,7 +179,7 @@ class OpenGLVideoSync : public VideoSync
     QString getName(void) const { return QString("SGI OpenGL"); }
     bool TryInit(void);
     void Start(void);
-    void WaitForFrame(int sync_delay);
+    int WaitForFrame(int sync_delay);
     void AdvanceTrigger(void);
 
   private:
@@ -207,7 +208,7 @@ class RTCVideoSync : public VideoSync
 
     QString getName(void) const { return QString("RTC"); }
     bool TryInit(void);
-    void WaitForFrame(int sync_delay);
+    int WaitForFrame(int sync_delay);
     void AdvanceTrigger(void);
 
   private:
@@ -228,7 +229,7 @@ class VDPAUVideoSync : public VideoSync
 
     QString getName(void) const { return QString("VDPAU"); }
     bool TryInit(void);
-    void WaitForFrame(int sync_delay);
+    int WaitForFrame(int sync_delay);
     void AdvanceTrigger(void);
 
   private:
@@ -256,7 +257,7 @@ class BusyWaitVideoSync : public VideoSync
 
     QString getName(void) const { return QString("USleep with busy wait"); }
     bool TryInit(void);
-    void WaitForFrame(int sync_delay);
+    int WaitForFrame(int sync_delay);
     void AdvanceTrigger(void);
 
   private:
@@ -284,7 +285,7 @@ class USleepVideoSync : public VideoSync
 
     QString getName(void) const { return QString("USleep"); }
     bool TryInit(void);
-    void WaitForFrame(int sync_delay);
+    int WaitForFrame(int sync_delay);
     void AdvanceTrigger(void);
 };
 #endif /* VSYNC_H_INCLUDED */
