diff --git a/src/org/jitsi/impl/neomedia/jmfext/media/protocol/wasapi/DataSource.java b/src/org/jitsi/impl/neomedia/jmfext/media/protocol/wasapi/DataSource.java
index 1bbd889df08569d4be8916c41440d1ff169a9050..fed0cdcd7c3032661fba1e185812ce6debea5eb4 100644
--- a/src/org/jitsi/impl/neomedia/jmfext/media/protocol/wasapi/DataSource.java
+++ b/src/org/jitsi/impl/neomedia/jmfext/media/protocol/wasapi/DataSource.java
@@ -158,7 +158,7 @@ private Format[] getIAudioClientSupportedFormats(int streamIndex)
         Format[] superSupportedFormats = super.getSupportedFormats(streamIndex);
 
         /*
-         * If the capture endpoint device is report to support no Format, then
+         * If the capture endpoint device reports to support no Format, then
          * acoustic echo cancellation (AEC) will surely not work.
          */
         if ((superSupportedFormats == null)
diff --git a/src/org/jitsi/impl/neomedia/jmfext/media/protocol/wasapi/WASAPIStream.java b/src/org/jitsi/impl/neomedia/jmfext/media/protocol/wasapi/WASAPIStream.java
index d95837ffa57a5c1ff4d256d8d3ce16fdc9048174..94353ecc2ef684c932b6b480c815bf722ab87011 100644
--- a/src/org/jitsi/impl/neomedia/jmfext/media/protocol/wasapi/WASAPIStream.java
+++ b/src/org/jitsi/impl/neomedia/jmfext/media/protocol/wasapi/WASAPIStream.java
@@ -9,6 +9,7 @@
 import static org.jitsi.impl.neomedia.jmfext.media.protocol.wasapi.VoiceCaptureDSP.*;
 import static org.jitsi.impl.neomedia.jmfext.media.protocol.wasapi.WASAPI.*;
 
+import java.beans.*;
 import java.io.*;
 import java.util.*;
 
@@ -131,6 +132,33 @@ private static AudioFormat findClosestMatch(
         return match;
     }
 
+    /**
+     * Finds an <tt>AudioFormat</tt> in a specific list of <tt>AudioFormat</tt>s
+     * which is as similar to a specific <tt>AudioFormat</tt> as possible.
+     *
+     * @param formats the list of <tt>AudioFormat</tt>s into which an
+     * <tt>AudioFormat</tt> as similar to the specified <tt>format</tt> as
+     * possible is to be found 
+     * @param format the <tt>AudioFormat</tt> for which a similar
+     * <tt>AudioFormat</tt> is to be found in <tt>formats</tt>
+     * @param clazz the runtime type of the matches to be considered or
+     * <tt>null</tt> if the runtime type is to not be limited
+     * @return an <tt>AudioFormat</tt> which is an element of <tt>formats</tt>
+     * and is as similar to the specified <tt>format</tt> as possible or
+     * <tt>null</tt> if no similarity could be established
+     */
+    private static AudioFormat findClosestMatch(
+            List<AudioFormat> formats,
+            AudioFormat format,
+            Class<? extends AudioFormat> clazz)
+    {
+        return
+            findClosestMatch(
+                    formats.toArray(new Format[formats.size()]),
+                    format,
+                    clazz);
+    }
+
     /**
      * Finds the first element of a specific array of <tt>Format</tt>s which
      * matches in the sense of {@link Format#matches(Format)} a specific
@@ -466,6 +494,14 @@ static void throwNewIOException(String message, HResultException hre)
      */
     private long dmoOutputDataBuffer;
 
+    /**
+     * The <tt>AudioFormat</tt> of the {@link #capture} or
+     * {@link #iMediaObject}. If it does not match (in the FMJ sense of the
+     * <tt>Format</tt> class), {@link #resampler} will be used to resample from
+     * it into {@link #format}.
+     */
+    private AudioFormat effectiveFormat;
+
     /**
      * The <tt>AudioFormat</tt> of this <tt>SourceStream</tt>.
      */
@@ -519,6 +555,41 @@ static void throwNewIOException(String message, HResultException hre)
      */
     private Thread processThread;
 
+    /**
+     * The <tt>PropertyChangeListener</tt> which listens to changes in the
+     * values of the properties of the associate <tt>AudioSystem</tt>.
+     */
+    private final PropertyChangeListener propertyChangeListener
+        = new PropertyChangeListener()
+        {
+            public void propertyChange(PropertyChangeEvent ev)
+            {
+                try
+                {
+                    WASAPIStream.this.propertyChange(ev);
+                }
+                catch (Exception e)
+                {
+                    StringBuilder msg = new StringBuilder();
+
+                    msg.append(
+                            "Failed to handle a change to the value of the"
+                                + " property ");
+                    msg.append(ev.getPropertyName());
+
+                    Object source = ev.getSource();
+
+                    if (source != null)
+                    {
+                        msg.append(" of a ");
+                        msg.append(source.getClass());
+                    }
+                    msg.append('.');
+                    logger.error(msg, e);
+                }
+            }
+        };
+
     /**
      * The abstraction which delivers audio samples from the render endpoint
      * device into this instance (for the purposes of acoustic echo
@@ -553,6 +624,18 @@ static void throwNewIOException(String message, HResultException hre)
      */
     private boolean renderIsBusy;
 
+    /**
+     * The <tt>MediaLocator</tt> of the rendering endpoint device used by this
+     * instance if any. 
+     */
+    private MediaLocator renderDevice;
+
+    /**
+     * The zero-based index in an <tt>IMMDeviceConnection</tt> interface of
+     * {@link #renderDevice} if any.
+     */
+    private int renderDeviceIndex;
+
     /**
      * The <tt>WASAPIRenderer</tt> which maintains an active stream on the
      * rendering device selected in the Voice Capture DSP implementing acoustic
@@ -567,6 +650,19 @@ static void throwNewIOException(String message, HResultException hre)
      */
     private boolean replenishRender;
 
+    /**
+     * The <tt>Codec</tt> which is to resample the <tt>Format</tt> of
+     * {@link #capture} or {@link #processed} into {@link #format} if necessary. 
+     */
+    private Codec resampler;
+
+    /**
+     * The internal/intermediate <tt>Buffer</tt> instance utilized by
+     * {@link #resampler}. Cached in order to reduce the effects of the garbage
+     * collector.
+     */
+    private Buffer resamplerBuffer;
+
     /**
      * The indicator which determined whether {@link #iMediaObject} has been
      * initialized to operate in source (as opposed to filter) mode.
@@ -707,7 +803,7 @@ private void configureAEC(long iPropertyStore)
      * fails to connect to the audio endpoint device identified by
      * <tt>locator</tt>
      */
-    private void connect()
+    private synchronized void connect()
         throws IOException
     {
         if (capture != null)
@@ -748,7 +844,7 @@ private void connect()
      * <tt>SourceStream</tt> disconnects from the audio endpoint device it has
      * previously connected to during the execution of <tt>connect()</tt>
      */
-    private void disconnect()
+    private synchronized void disconnect()
         throws IOException
     {
         try
@@ -760,6 +856,7 @@ private void disconnect()
             uninitializeAEC();
             uninitializeRender();
             uninitializeCapture();
+            maybeCloseResampler();
 
             /*
              * Make sure this AbstractPullBufferStream asks its DataSource for
@@ -767,8 +864,12 @@ private void disconnect()
              * time it is connected instead of using its Format from a previous
              * connect.
              */
+            effectiveFormat = null;
             format = null;
             sourceMode = false;
+
+            dataSource.audioSystem.removePropertyChangeListener(
+                    propertyChangeListener);
         }
     }
 
@@ -790,17 +891,21 @@ private void doConnect()
         if (locator == null)
             throw new NullPointerException("No locator set.");
 
-        AudioFormat thisFormat = (AudioFormat) getFormat();
+        AudioFormat format = (AudioFormat) getFormat();
 
-        if (thisFormat == null)
+        if (format == null)
             throw new NullPointerException("No format set.");
+
+        WASAPISystem audioSystem = dataSource.audioSystem;
+        AudioFormat effectiveFormat = null;
+
         if (dataSource.aec)
         {
             aec = true;
             try
             {
                 CaptureDeviceInfo2 captureDevice
-                    = dataSource.audioSystem.getDevice(
+                    = audioSystem.getDevice(
                             AudioSystem.DataFlow.CAPTURE,
                             locator);
 
@@ -818,7 +923,7 @@ private void doConnect()
                      * cannot be found, AEC cannot be enabled. Period.
                      */
                     CaptureDeviceInfo2 renderDevice
-                        = dataSource.audioSystem.getSelectedDevice(
+                        = audioSystem.getSelectedDevice(
                                 AudioSystem.DataFlow.PLAYBACK);
 
                     if (renderDevice != null)
@@ -827,17 +932,19 @@ private void doConnect()
 
                         if (sourceMode)
                         {
-                            doConnectInSourceMode(
-                                    captureDevice,
-                                    renderDevice,
-                                    thisFormat);
+                            effectiveFormat
+                                = doConnectInSourceMode(
+                                        captureDevice,
+                                        renderDevice,
+                                        format);
                         }
                         else
                         {
-                            doConnectInFilterMode(
+                            effectiveFormat
+                                = doConnectInFilterMode(
                                     captureDevice,
                                     renderDevice,
-                                    thisFormat);
+                                    format);
                         }
 
                         this.sourceMode = sourceMode;
@@ -860,10 +967,52 @@ private void doConnect()
         if (iMediaObject == 0)
         {
             aec = false;
-            initializeCapture(locator, thisFormat);
+            renderDevice = null;
+            renderDeviceIndex = -1;
+            initializeCapture(locator, format);
+            effectiveFormat = capture.outFormat;
         }
 
-        this.format = thisFormat;
+        this.effectiveFormat = effectiveFormat;
+        this.format = format;
+
+        /*
+         * If we have to open a resampler and the opening fails, make sure that
+         * the state of this instance is reverted to disconnected because the
+         * connecting has just failed.
+         */
+        boolean disconnect = true;
+
+        try
+        {
+            maybeOpenResampler();
+            if (resampler != null)
+                resamplerBuffer = new Buffer();
+
+            /*
+             * If a rendering endpoint device is used by this instance (for the
+             * purposes of acoustic echo cancellation), make sure that this
+             * instance will be notified when its state changes in order to be
+             * able to switch the state of this instance to the up-to-date state
+             * of the currently-selected rendering endpint device.
+             */
+            if (dataSource.aec)
+            {
+                audioSystem.addPropertyChangeListener(propertyChangeListener);
+            }
+            else
+            {
+                audioSystem.removePropertyChangeListener(
+                        propertyChangeListener);
+            }
+
+            disconnect = false;
+        }
+        finally
+        {
+            if (disconnect)
+                disconnect();
+        }
     }
 
     /**
@@ -877,11 +1026,13 @@ private void doConnect()
      * render endpoint device to be used
      * @param outFormat the <tt>Format</tt> of the media data in which the Voice
      * Capture DSP is to output
+     * @return the <tt>AudioFormat</tt> in which the Voice Capture DSP will
+     * actually output media data
      * @throws Exception if this <tt>SourceStream</tt> fails to connect to the
      * associated audio endpoint device. The <tt>Exception</tt> is logged by the
      * <tt>connect()</tt> method.
      */
-    private void doConnectInFilterMode(
+    private AudioFormat doConnectInFilterMode(
             CaptureDeviceInfo2 captureDevice,
             CaptureDeviceInfo2 renderDevice,
             AudioFormat outFormat)
@@ -946,6 +1097,7 @@ private void doConnectInFilterMode(
         }
 
         boolean uninitialize = true;
+        AudioFormat aecOutFormat;
 
         initializeCapture(locator, captureFormat);
         try
@@ -954,13 +1106,14 @@ private void doConnectInFilterMode(
                 initializeRender(renderLocator, renderFormat);
             try
             {
-                initializeAEC(
-                        /* sourceMode */ false,
-                        captureDevice,
-                        captureFormat,
-                        renderDevice,
-                        renderFormat,
-                        outFormat);
+                aecOutFormat
+                    = initializeAEC(
+                            /* sourceMode */ false,
+                            captureDevice,
+                            captureFormat,
+                            renderDevice,
+                            renderFormat,
+                            outFormat);
                 uninitialize = false;
             }
             finally
@@ -974,6 +1127,7 @@ private void doConnectInFilterMode(
             if (uninitialize)
                 uninitializeCapture();
         }
+        return aecOutFormat;
     }
 
     /**
@@ -987,23 +1141,26 @@ private void doConnectInFilterMode(
      * render endpoint device to be used
      * @param outFormat the <tt>Format</tt> of the media data in which the Voice
      * Capture DSP is to output
+     * @return the <tt>AudioFormat</tt> in which the Voice Capture DSP will
+     * actually output media data
      * @throws Exception if this <tt>SourceStream</tt> fails to connect to the
      * associated audio endpoint device. The <tt>Exception</tt> is logged by the
      * <tt>connect()</tt> method.
      */
-    private void doConnectInSourceMode(
+    private AudioFormat doConnectInSourceMode(
             CaptureDeviceInfo2 captureDevice,
             CaptureDeviceInfo2 renderDevice,
             AudioFormat outFormat)
         throws Exception
     {
-        initializeAEC(
-                /* sourceMode */ true,
-                captureDevice,
-                /* captureFormat */ null,
-                renderDevice,
-                /* renderFormat */ null,
-                outFormat);
+        return
+            initializeAEC(
+                    /* sourceMode */ true,
+                    captureDevice,
+                    /* captureFormat */ null,
+                    renderDevice,
+                    /* renderFormat */ null,
+                    outFormat);
     }
 
     /**
@@ -1015,6 +1172,153 @@ protected Format doGetFormat()
         return (format == null) ? super.doGetFormat() : format;
     }
 
+    /**
+     * Reads media data from {@link #capture} or {@link #processed} into a
+     * specific <tt>Buffer</tt>.
+     *
+     * @param buffer the <tt>Buffer</tt> into which the media data read from
+     * <tt>capture</tt> or <tt>processed</tt> is to be written
+     * @throws IOException if an error occurs during the reading or writing
+     */
+    private void doRead(Buffer buffer)
+        throws IOException
+    {
+        // Reduce relocations as much as possible.
+        int capacity = aec ? bufferMaxLength : bufferSize;
+        byte[] data
+            = AbstractCodec2.validateByteArraySize(buffer, capacity, false);
+        int length = 0;
+
+        buffer.setLength(0);
+        buffer.setOffset(0);
+
+        do
+        {
+            String message;
+
+            synchronized (this)
+            {
+                /*
+                 * We explicitly want to support the case in which the user has
+                 * selected "none" for the playback/render endpoint device.
+                 * Otherwise, we could have added a check
+                 * (dataSource.aec && (render == null)). 
+                 */
+                boolean connected = (capture != null) || sourceMode;
+
+                if (connected)
+                {
+                    message = null;
+                    captureIsBusy = true;
+                    renderIsBusy = true;
+                }
+                else
+                    message = getClass().getName() + " is disconnected.";
+            }
+            /*
+             * The caller shouldn't call #read(Buffer) if this instance is
+             * disconnected or stopped. Additionally, if she does, she may be
+             * persistent. If we do not slow her down, she may hog the CPU.
+             */
+            if (message != null)
+            {
+                yield();
+                throw new IOException(message);
+            }
+
+            int read;
+            Throwable cause;
+
+            try
+            {
+                int toRead = capacity - length;
+
+                /*
+                 * We explicitly want to support the case in which the user has
+                 * selected "none" for the playback/render endpoint device.
+                 * Otherwise, we could have used a check (render == null).
+                 */
+                boolean aec = (iMediaObject != 0);
+
+                if (aec)
+                {
+                    toRead = Math.min(toRead, processedLength);
+                    if (toRead == 0)
+                        read = 0;
+                    else
+                    {
+                        System.arraycopy(processed, 0, data, length, toRead);
+                        popFromProcessed(toRead);
+                        read = toRead;
+                    }
+                }
+                else
+                    read = capture.read(data, length, toRead);
+                cause = null;
+            }
+            catch (Throwable t)
+            {
+                /*
+                 * The exception will be rethrown after we exit the busy block
+                 * of this Renderer.
+                 */
+                read = 0;
+                cause = t;
+            }
+            finally
+            {
+                synchronized (this)
+                {
+                    captureIsBusy = false;
+                    renderIsBusy = false;
+                    notifyAll();
+                }
+            }
+            if (cause == null)
+            {
+                if (length == 0)
+                {
+                    long timeStamp = System.nanoTime();
+
+                    buffer.setFlags(Buffer.FLAG_SYSTEM_TIME);
+                    buffer.setTimeStamp(timeStamp);
+                }
+                length += read;
+                if ((length >= capacity) || (read == 0))
+                {
+                    if (effectiveFormat != null)
+                        buffer.setFormat(effectiveFormat);
+                    buffer.setLength(length);
+                    break;
+                }
+                else
+                {
+                    /*
+                     * TODO The implementation of PushBufferStream.read(Buffer)
+                     * should not block, it should return with whatever is
+                     * available.
+                     */
+                    yield();
+                }
+            }
+            else
+            {
+                if (cause instanceof ThreadDeath)
+                    throw (ThreadDeath) cause;
+                else if (cause instanceof IOException)
+                    throw (IOException) cause;
+                else
+                {
+                    IOException ioe = new IOException();
+
+                    ioe.initCause(cause);
+                    throw ioe;
+                }
+            }
+        }
+        while (true);
+    }
+
     /**
      * Finds an <tt>AudioFormat</tt> in the list of supported <tt>Format</tt>s
      * of the associated capture endpoint device which is as similar to a
@@ -1067,11 +1371,13 @@ private MediaLocator getLocator()
      * delivered to the input stream representing the audio from the speaker
      * (line)
      * @param outFormat the <tt>AudioFormat</tt> of the media which is to be
-     * output by the <tt>IMediaObject</tt>/acoustic echo cancellation
+     * output by the <tt>IMediaObject</tt>/acoustic echo cancellation (AEC)
+     * @return the <tt>AudioFormat</tt> in which the Voice Capture DSP will
+     * actually output media data
      * @throws Exception if the initialization of the <tt>IMediaObject</tt>
      * implementing acoustic echo cancellation fails
      */
-    private void initializeAEC(
+    private AudioFormat initializeAEC(
             boolean sourceMode,
             CaptureDeviceInfo2 captureDevice,
             AudioFormat captureFormat,
@@ -1080,7 +1386,23 @@ private void initializeAEC(
             AudioFormat outFormat)
         throws Exception
     {
-        long iMediaObject = dataSource.audioSystem.initializeAEC();
+        WASAPISystem audioSystem = dataSource.audioSystem;
+        AudioFormat aecOutFormat
+            = findClosestMatch(
+                    audioSystem.getAECSupportedFormats(),
+                    outFormat,
+                    /* clazz */ null);
+
+        if (aecOutFormat == null)
+        {
+            throw new IllegalStateException(
+                    "Failed to determine an AudioFormat with which to"
+                        + " initialize Voice Capture DSP/acoustic echo"
+                        + " cancellation (AEC) based on AudioFormat "
+                        + outFormat);
+        }
+
+        long iMediaObject = audioSystem.initializeAEC();
 
         if (iMediaObject == 0)
         {
@@ -1120,22 +1442,22 @@ private void initializeAEC(
                             iMediaObject,
                             /* IMediaObject_SetOutputType */ false,
                             /* dwOutputStreamIndex */ 0,
-                            outFormat,
+                            aecOutFormat,
                             /* dwFlags */ 0);
                 if (FAILED(hresult))
                 {
                     throw new HResultException(
                             hresult,
-                            "IMediaObject_SetOutputType, " + outFormat);
+                            "IMediaObject_SetOutputType, " + aecOutFormat);
                 }
 
                 int outFrameSize
-                    = WASAPISystem.getSampleSizeInBytes(outFormat)
-                        * outFormat.getChannels();
+                    = WASAPISystem.getSampleSizeInBytes(aecOutFormat)
+                        * aecOutFormat.getChannels();
                 int outFrames
                     = (int)
                         (WASAPISystem.DEFAULT_BUFFER_DURATION
-                            * ((int) outFormat.getSampleRate()) / 1000);
+                            * ((int) aecOutFormat.getSampleRate()) / 1000);
                 long iMediaBuffer = MediaBuffer_alloc(outFrameSize * outFrames);
 
                 if (iMediaBuffer == 0)
@@ -1162,21 +1484,24 @@ private void initializeAEC(
                         processed = new byte[bufferMaxLength * 3];
                         processedLength = 0;
 
+                        this.renderDevice
+                            = (renderDevice == null)
+                                ? null
+                                : renderDevice.getLocator();
+                        this.renderDeviceIndex = -1;
                         if (sourceMode)
                         {
                             initializeAECInSourceMode(
                                     iPropertyStore,
                                     captureDevice,
-                                    renderDevice,
-                                    outFormat);
+                                    renderDevice);
                         }
                         else
                         {
                             initializeAECInFilterMode(
                                     iMediaObject,
                                     captureFormat,
-                                    renderFormat,
-                                    outFormat);
+                                    renderFormat);
                         }
 
                         this.dmoOutputDataBuffer = dmoOutputDataBuffer;
@@ -1209,6 +1534,7 @@ private void initializeAEC(
             if (iMediaObject != 0)
                 IMediaObject_Release(iMediaObject);
         }
+        return aecOutFormat;
     }
 
     /**
@@ -1223,15 +1549,12 @@ private void initializeAEC(
      * @param inFormat1 the <tt>AudioFormat</tt> of the media which will be
      * delivered to the input stream representing the audio from the speaker
      * (line)
-     * @param outFormat the <tt>AudioFormat</tt> of the media which is to be
-     * output by the <tt>IMediaObject</tt>/acoustic echo cancellation
      * @throws Exception if the initialization of the <tt>IMediaObject</tt>
      * implementing acoustic echo cancellation fails
      */
     private void initializeAECInFilterMode(
             long iMediaObject,
-            AudioFormat inFormat0, AudioFormat inFormat1,
-            AudioFormat outFormat)
+            AudioFormat inFormat0, AudioFormat inFormat1)
         throws Exception
     {
         int dwInputStreamIndex = CAPTURE_INPUT_STREAM_INDEX;
@@ -1358,16 +1681,13 @@ private void initializeAECInFilterMode(
      * capture endpoint device to be used
      * @param renderDevice <tt>CaptureDeviceInfo2</tt> which identifies the
      * render endpoint device to be used
-     * @param outFormat the <tt>AudioFormat</tt> of the media which is to be
-     * output by the <tt>IMediaObject</tt>/acoustic echo cancellation
      * @throws Exception if the initialization of the <tt>IMediaObject</tt>
      * implementing acoustic echo cancellation fails
      */
     private void initializeAECInSourceMode(
             long iPropertyStore,
             CaptureDeviceInfo2 captureDevice,
-            CaptureDeviceInfo2 renderDevice,
-            AudioFormat outFormat)
+            CaptureDeviceInfo2 renderDevice)
         throws Exception
     {
         WASAPISystem audioSystem = dataSource.audioSystem;
@@ -1429,6 +1749,7 @@ private void initializeAECInSourceMode(
         renderer.open();
 
         devicePeriod = WASAPISystem.DEFAULT_DEVICE_PERIOD / 2;
+        this.renderDeviceIndex = renderDeviceIndex;
         this.renderer = renderer;
     }
 
@@ -1452,6 +1773,22 @@ private void initializeCapture(
             = aec
                 ? Format.NOT_SPECIFIED
                 : WASAPISystem.DEFAULT_BUFFER_DURATION;
+        /*
+         * The capture endpoint device specified locator/MediaLocator may not
+         * support the specified format at all. In such a case, this
+         * SourceStream will have to resample.
+         */
+        AudioFormat captureFormat
+            = findClosestMatchCaptureSupportedFormat(format);
+
+        if (captureFormat == null)
+        {
+            throw new IllegalStateException(
+                    "Failed to determine an AudioFormat with which to"
+                        + " initialize IAudioClient for MediaLocator " + locator
+                        + " based on AudioFormat " + format);
+        }
+
         BufferTransferHandler transferHandler
             = new BufferTransferHandler()
                     {
@@ -1468,7 +1805,7 @@ public void transferData(PushBufferStream stream)
                     AudioSystem.DataFlow.CAPTURE,
                     /* streamFlags */ 0,
                     hnsBufferDuration,
-                    format,
+                    captureFormat,
                     transferHandler);
         bufferSize = capture.bufferSize;
         devicePeriod = capture.devicePeriod;
@@ -1477,7 +1814,7 @@ public void transferData(PushBufferStream stream)
     /**
      * Initializes the delivery of audio data/samples from a render endpoint
      * device identified by a specific <tt>MediaLocator</tt> into this instance
-     * for the purposes of acoust echo cancellation (AEC).
+     * for the purposes of acoustic echo cancellation (AEC).
      *
      * @param locator the <tt>MediaLocator</tt> identifying the render endpoint
      * device from which this instance is to read
@@ -1516,6 +1853,63 @@ public void transferData(PushBufferStream stream)
         replenishRender = true;
     }
 
+    /**
+     * Closes {@link #resampler} if it is non-<tt>null</tt>.
+     */
+    private void maybeCloseResampler()
+    {
+        Codec resampler = this.resampler;
+
+        if (resampler != null)
+        {
+            this.resampler = null;
+            resamplerBuffer = null;
+
+            try
+            {
+                resampler.close();
+            }
+            catch (Throwable t)
+            {
+                if (t instanceof ThreadDeath)
+                    throw (ThreadDeath) t;
+                else
+                    logger.error("Failed to close resampler.", t);
+            }
+        }
+    }
+
+    /**
+     * Initializes and opens a new instance of {@link #resampler} if the
+     * <tt>Format</tt>-related state of this instance deems its existence
+     * necessary.
+     */
+    private void maybeOpenResampler()
+    {
+        AudioFormat inFormat = this.effectiveFormat;
+        AudioFormat outFormat = this.format;
+
+        // We are able to translate between mono and stereo.
+        if ((inFormat.getSampleRate() == outFormat.getSampleRate())
+                && (inFormat.getSampleSizeInBits()
+                        == outFormat.getSampleSizeInBits()))
+        {
+            return;
+        }
+
+        Codec resampler
+            = WASAPIRenderer.maybeOpenResampler(inFormat, outFormat);
+
+        if (resampler == null)
+        {
+            throw new IllegalStateException(
+                    "Failed to open a codec to resample [" + inFormat
+                        + "] into [" + outFormat + "].");
+        }
+        else
+            this.resampler = resampler;
+    }
+
     /**
      * Pops a specific number of bytes from {@link #processed}. For example,
      * because such a number of bytes have been read from <tt>processed</tt> and
@@ -1790,145 +2184,103 @@ private void processOutput()
     }
 
     /**
-     * {@inheritDoc}
+     * Notifies this instance about a specific <tt>PropertyChangeEvent</tt>.
+     * <tt>WASAPIStream</tt> listens to changes in the values of the properties
+     * of the associated <tt>AudioSystem</tt>.
+     *
+     * @param ev the <tt>PropertyChangeEvent</tt> to notify this instance about
      */
-    public void read(Buffer buffer)
-        throws IOException
+    private synchronized void propertyChange(PropertyChangeEvent ev)
+        throws Exception
     {
-        // Reduce relocations as much as possible.
-        int capacity = aec ? bufferMaxLength : bufferSize;
-        byte[] data
-            = AbstractCodec2.validateByteArraySize(buffer, capacity, false);
-        int length = 0;
-
-        buffer.setLength(0);
-        buffer.setOffset(0);
+        /*
+         * The propertyChangeListener this invokes the method will be added only
+         * when acoustic echo cancellation (AEC) is enabled. 
+         */
 
-        do
+        if (DeviceSystem.PROP_DEVICES.equals(ev.getPropertyName()))
         {
-            String message;
+            MediaLocator oldRenderDevice = this.renderDevice;
+            WASAPISystem audioSystem = dataSource.audioSystem;
+            CaptureDeviceInfo2 newRenderDeviceInfo
+                = audioSystem.getSelectedDevice(
+                        AudioSystem.DataFlow.PLAYBACK);
+            MediaLocator newRenderDevice
+                = (newRenderDeviceInfo == null)
+                    ? null
+                    : newRenderDeviceInfo.getLocator();
 
-            synchronized (this)
+            /*
+             * If the MediaLocators are equal, make sure that the indexes within
+             * the IMMDeviceCollection interface are equal.
+             */
+            if ((oldRenderDevice == null)
+                    ? (newRenderDevice == null)
+                    : oldRenderDevice.equals(newRenderDevice))
             {
-                /*
-                 * We explicitly want to support the case in which the user has
-                 * selected "none" for the playback/render endpoint device.
-                 * Otherwise, we could have added a check
-                 * (dataSource.aec && (render == null)). 
-                 */
-                boolean connected = (capture != null) || sourceMode;
-
-                if (connected)
-                {
-                    message = null;
-                    captureIsBusy = true;
-                    renderIsBusy = true;
-                }
-                else
-                    message = getClass().getName() + " is disconnected.";
+                int oldRenderDeviceIndex = this.renderDeviceIndex;
+                int newRenderDeviceIndex
+                    = (newRenderDevice == null)
+                        ? -1
+                        : audioSystem.getIMMDeviceIndex(
+                                newRenderDevice.getRemainder(),
+                                eRender);
+
+                if (oldRenderDeviceIndex == newRenderDeviceIndex)
+                    return;
             }
+
             /*
-             * The caller shouldn't call #read(Buffer) if this instance is
-             * disconnected or stopped. Additionally, if she does, she may be
-             * persistent. If we do not slow her down, she may hog the CPU.
+             * If there are changes either to the MediaLocators or to the
+             * indexes within the IMMDeviceCollection interface, re-connect this
+             * instance.
              */
-            if (message != null)
+            waitWhileCaptureIsBusy();
+            waitWhileRenderIsBusy();
+
+            boolean connected = (capture != null) || (iMediaObject != 0);
+
+            if (connected)
             {
-                yield();
-                throw new IOException(message);
+                boolean started = this.started;
+
+                disconnect();
+                connect();
+                if (started)
+                    start();
             }
+        }
+    }
 
-            int read;
-            Throwable cause;
+    /**
+     * {@inheritDoc}
+     *
+     * If {@link #resampler} is non-<tt>null</tt>, uses it to resample the media
+     * data read from {@link #capture} or {@link #processed} into
+     * {@link #format}.
+     */
+    public void read(Buffer buffer)
+        throws IOException
+    {
+        Codec resampler = this.resampler;
 
-            try
-            {
-                int toRead = capacity - length;
+        if (resampler == null)
+            doRead(buffer);
+        else
+        {
+            Buffer resamplerBuffer = this.resamplerBuffer;
 
-                /*
-                 * We explicitly want to support the case in which the user has
-                 * selected "none" for the playback/render endpoint device.
-                 * Otherwise, we could have used a check (render == null).
-                 */
-                boolean aec = (iMediaObject != 0);
+            doRead(resamplerBuffer);
 
-                if (aec)
-                {
-                    toRead = Math.min(toRead, processedLength);
-                    if (toRead == 0)
-                        read = 0;
-                    else
-                    {
-                        System.arraycopy(processed, 0, data, length, toRead);
-                        popFromProcessed(toRead);
-                        read = toRead;
-                    }
-                }
-                else
-                    read = capture.read(data, length, toRead);
-                cause = null;
-            }
-            catch (Throwable t)
-            {
-                /*
-                 * The exception will be rethrown after we exit the busy block
-                 * of this Renderer.
-                 */
-                read = 0;
-                cause = t;
-            }
-            finally
-            {
-                synchronized (this)
-                {
-                    captureIsBusy = false;
-                    renderIsBusy = false;
-                    notifyAll();
-                }
-            }
-            if (cause == null)
-            {
-                if (length == 0)
-                {
-                    long timeStamp = System.nanoTime();
+            int process = resampler.process(resamplerBuffer, buffer);
 
-                    buffer.setFlags(Buffer.FLAG_SYSTEM_TIME);
-                    buffer.setTimeStamp(timeStamp);
-                }
-                length += read;
-                if ((length >= capacity) || (read == 0))
-                {
-                    if (format != null)
-                        buffer.setFormat(format);
-                    buffer.setLength(length);
-                    break;
-                }
-                else
-                {
-                    /*
-                     * TODO The implementation of PushBufferStream.read(Buffer)
-                     * should not block, it should return with whatever is
-                     * available.
-                     */
-                    yield();
-                }
-            }
-            else
+            if (process == PlugIn.BUFFER_PROCESSED_FAILED)
             {
-                if (cause instanceof ThreadDeath)
-                    throw (ThreadDeath) cause;
-                else if (cause instanceof IOException)
-                    throw (IOException) cause;
-                else
-                {
-                    IOException ioe = new IOException();
-
-                    ioe.initCause(cause);
-                    throw ioe;
-                }
+                throw new IOException(
+                        "Failed to resample from [" + effectiveFormat
+                            + "] into [" + format + "].");
             }
         }
-        while (true);
     }
 
     /**
diff --git a/src/org/jitsi/impl/neomedia/jmfext/media/renderer/audio/AbstractAudioRenderer.java b/src/org/jitsi/impl/neomedia/jmfext/media/renderer/audio/AbstractAudioRenderer.java
index 355b3e9c0cf7e0100bd8c52bae6418874739501d..d9fc9ddc75dcc0b4260843f3020c09e8a725a795 100644
--- a/src/org/jitsi/impl/neomedia/jmfext/media/renderer/audio/AbstractAudioRenderer.java
+++ b/src/org/jitsi/impl/neomedia/jmfext/media/renderer/audio/AbstractAudioRenderer.java
@@ -300,7 +300,7 @@ protected void playbackDevicePropertyChange(PropertyChangeEvent ev)
     /**
      * Notifies this instance about a specific <tt>PropertyChangeEvent</tt>.
      * <tt>AbstractAudioRenderer</tt> listens to changes in the values of the
-     * properties of {@link #audioSystem}
+     * properties of {@link #audioSystem}.
      *
      * @param ev the <tt>PropertyChangeEvent</tt> to notify this instance about
      */
diff --git a/src/org/jitsi/impl/neomedia/jmfext/media/renderer/audio/WASAPIRenderer.java b/src/org/jitsi/impl/neomedia/jmfext/media/renderer/audio/WASAPIRenderer.java
index 909b6ba81182614b803b4e0ea69ac3306087a837..a708e57e71e912d262480b2c64e9ea2d63b80c9a 100644
--- a/src/org/jitsi/impl/neomedia/jmfext/media/renderer/audio/WASAPIRenderer.java
+++ b/src/org/jitsi/impl/neomedia/jmfext/media/renderer/audio/WASAPIRenderer.java
@@ -529,7 +529,7 @@ private int maybeIAudioRenderClientWrite(
     private void maybeOpenResampler()
     {
         AudioFormat inFormat = this.inputFormat;
-        AudioFormat outFormat = dstFormat;
+        AudioFormat outFormat = this.dstFormat;
 
         // We are able to translate between mono and stereo.
         if ((inFormat.getSampleRate() == outFormat.getSampleRate())
@@ -555,6 +555,44 @@ private void maybeOpenResampler()
                         outFormat.getDataType());
         }
 
+        Codec resampler = maybeOpenResampler(inFormat, outFormat);
+
+        if (resampler == null)
+        {
+            throw new IllegalStateException(
+                    "Failed to open a codec to resample [" + inFormat
+                        + "] into [" + outFormat + "].");
+        }
+        else
+        {
+            this.resampler = resampler;
+            resamplerChannels = outFormat.getChannels();
+            resamplerSampleSize = WASAPISystem.getSampleSizeInBytes(outFormat);
+            resamplerFrameSize = resamplerChannels * resamplerSampleSize;
+        }
+    }
+
+    /**
+     * Attempts to initialize and open a new <tt>Codec</tt> to resample media
+     * data from a specific input <tt>AudioFormat</tt> into a specific output
+     * <tt>AudioFormat</tt>. If no suitable resampler is found, returns
+     * <tt>null</tt>. If a suitable resampler is found but its initialization or
+     * opening fails, logs and swallows any <tt>Throwable</tt> and returns
+     * <tt>null</tt>.
+     *
+     * @param inFormat the <tt>AudioFormat</tt> in which the new instance is to
+     * input media data
+     * @param outFormat the <tt>AudioFormat</tt> in which the new instance is to
+     * output media data
+     * @return a new <tt>Codec</tt> which is able to resample media data from
+     * the specified <tt>inFormat</tt> into the specified <tt>outFormat</tt> if
+     * such a resampler could be found, initialized and opened; otherwise,
+     * <tt>null</tt>
+     */
+    public static Codec maybeOpenResampler(
+            AudioFormat inFormat,
+            AudioFormat outFormat)
+    {
         @SuppressWarnings("unchecked")
         List<String> classNames
             = PlugInManager.getPlugInList(
@@ -598,19 +636,7 @@ private void maybeOpenResampler()
                 }
             }
         }
-        if (resampler == null)
-        {
-            throw new IllegalStateException(
-                    "Failed to open a codec to resample [" + inFormat
-                        + "] into [" + outFormat + "].");
-        }
-        else
-        {
-            this.resampler = resampler;
-            resamplerChannels = outFormat.getChannels();
-            resamplerSampleSize = WASAPISystem.getSampleSizeInBytes(outFormat);
-            resamplerFrameSize = resamplerChannels * resamplerSampleSize;
-        }
+        return resampler;
     }
 
     /**