diff --git a/src/native/build.xml b/src/native/build.xml
index 29e490403de277b90c1aa74f2faf0826e15446da..e8e270f623c7346288875e29297853d41d977cae 100644
--- a/src/native/build.xml
+++ b/src/native/build.xml
@@ -384,6 +384,7 @@
       <compilerarg value="-std=c99" />
       <compilerarg value="-Wall" />
 
+      <linkerarg value="-ldmoguids" location="end" />
       <linkerarg value="-lmsdmo" location="end" />
       <linkerarg value="-lole32" location="end" />
       <linkerarg value="-m32" if="cross_32" />
diff --git a/src/native/windows/wasapi/HResultException.c b/src/native/windows/wasapi/HResultException.c
new file mode 100644
index 0000000000000000000000000000000000000000..08523c3c56c8c374471875f4c912ff3674db41dc
--- /dev/null
+++ b/src/native/windows/wasapi/HResultException.c
@@ -0,0 +1,77 @@
+/*
+ * Jitsi, the OpenSource Java VoIP and Instant Messaging client.
+ *
+ * Distributable under LGPL license.
+ * See terms of license at gnu.org.
+ */
+
+#include "HResultException.h"
+
+#include <stdio.h> /* fflush */
+#include <tchar.h> /* _ftprintf */
+
+jclass WASAPI_hResultExceptionClass = 0;
+jmethodID WASAPI_hResultExceptionMethodID = 0;
+
+void
+WASAPI_throwNewHResultException
+    (JNIEnv *env, HRESULT hresult, const char *func, unsigned int line)
+{
+    /*
+     * Print the system message (if any) which represents a human-readable
+     * format of the specified HRESULT value on the standard error to facilitate
+     * debugging.
+     */
+    {
+        LPTSTR message = NULL;
+        DWORD length
+            = FormatMessage(
+                    FORMAT_MESSAGE_ALLOCATE_BUFFER
+                        | FORMAT_MESSAGE_FROM_SYSTEM
+                        | FORMAT_MESSAGE_IGNORE_INSERTS,
+                    /* lpSource */ NULL,
+                    hresult,
+                    /* dwLanguageId */ 0,
+                    (LPTSTR) &message,
+                    /* nSize */ 0,
+                    /* Arguments */ NULL);
+        BOOL printed = FALSE;
+
+        if (message)
+        {
+            if (length)
+            {
+                _ftprintf(stderr, TEXT("%s:%u: %s\r\n"), func, line, message);
+                printed = TRUE;
+            }
+            LocalFree(message);
+        }
+        if (!printed)
+        {
+            _ftprintf(
+                    stderr,
+                    TEXT("%s:%u: HRESULT 0x%x\r\n"),
+                    func, line,
+                    (unsigned int) hresult);
+        }
+        fflush(stderr);
+    }
+
+    {
+        jclass clazz = WASAPI_hResultExceptionClass;
+
+        if (clazz)
+        {
+            jmethodID methodID = WASAPI_hResultExceptionMethodID;
+
+            if (methodID)
+            {
+                jobject t
+                    = (*env)->NewObject(env, clazz, methodID, (jint) hresult);
+
+                if (t)
+                    (*env)->Throw(env, (jthrowable) t);
+            }
+        }
+    }
+}
diff --git a/src/native/windows/wasapi/HResultException.h b/src/native/windows/wasapi/HResultException.h
new file mode 100644
index 0000000000000000000000000000000000000000..6e1fc1affc873972c592f3920767d6d994549aa3
--- /dev/null
+++ b/src/native/windows/wasapi/HResultException.h
@@ -0,0 +1,20 @@
+/*
+ * Jitsi, the OpenSource Java VoIP and Instant Messaging client.
+ *
+ * Distributable under LGPL license.
+ * See terms of license at gnu.org.
+ */
+
+#ifndef _ORG_JITSI_IMPL_NEOMEDIA_JMFEXT_MEDIA_PROTOCOL_WASAPI_HRESULTEXCEPTION_H_
+#define _ORG_JITSI_IMPL_NEOMEDIA_JMFEXT_MEDIA_PROTOCOL_WASAPI_HRESULTEXCEPTION_H_
+
+#include <jni.h> /* jclass, jmethodID, JNIEnv */
+#include <windows.h> /* HRESULT */
+
+extern jclass WASAPI_hResultExceptionClass;
+extern jmethodID WASAPI_hResultExceptionMethodID;
+
+void WASAPI_throwNewHResultException
+    (JNIEnv *env, HRESULT hresult, const char *func, unsigned int line);
+
+#endif /* #ifndef _ORG_JITSI_IMPL_NEOMEDIA_JMFEXT_MEDIA_PROTOCOL_WASAPI_HRESULTEXCEPTION_H_ */
diff --git a/src/native/windows/wasapi/MediaBuffer.c b/src/native/windows/wasapi/MediaBuffer.c
new file mode 100644
index 0000000000000000000000000000000000000000..c2d42a778d7058e08595f2e3f9bc888c3a6fd963
--- /dev/null
+++ b/src/native/windows/wasapi/MediaBuffer.c
@@ -0,0 +1,150 @@
+/*
+ * Jitsi, the OpenSource Java VoIP and Instant Messaging client.
+ *
+ * Distributable under LGPL license.
+ * See terms of license at gnu.org.
+ */
+
+#include "MediaBuffer.h"
+
+#include <objbase.h> /* CoTaskMemAlloc */
+#include <string.h> /* memcpy */
+#include <windows.h> /* InterlockedDecrement, InterlockedIncrement */
+
+struct MediaBuffer
+{
+    CONST_VTBL IMediaBufferVtbl *lpVtbl;
+
+    BYTE *_buffer;
+    DWORD _length;
+    DWORD _maxLength;
+    LONG _refCount;
+    IMediaBufferVtbl _vtbl;
+};
+
+static STDMETHODIMP_(ULONG)
+MediaBuffer_AddRef(IMediaBuffer *thiz)
+{
+    return InterlockedIncrement(&(((MediaBuffer *) thiz)->_refCount));
+}
+
+static STDMETHODIMP
+MediaBuffer_GetBufferAndLength
+    (IMediaBuffer *thiz, BYTE **ppBuffer, DWORD *pcbLength)
+{
+    if (!ppBuffer && !pcbLength)
+        return E_POINTER;
+    else
+    {
+        MediaBuffer *thiz_ = (MediaBuffer *) thiz;
+
+        if (ppBuffer)
+            *ppBuffer = thiz_->_buffer;
+        if (pcbLength)
+            *pcbLength = thiz_->_length;
+        return S_OK;
+    }
+}
+
+static STDMETHODIMP
+MediaBuffer_GetMaxLength(IMediaBuffer *thiz, DWORD *pcbMaxLength)
+{
+    if (pcbMaxLength)
+    {
+        *pcbMaxLength = ((MediaBuffer *) thiz)->_maxLength;
+        return S_OK;
+    }
+    else
+        return E_POINTER;
+}
+
+static STDMETHODIMP
+MediaBuffer_QueryInterface(IMediaBuffer *thiz, REFIID riid, void **ppvObject)
+{
+    if (ppvObject)
+    {
+        if (IsEqualIID(&IID_IUnknown, riid)
+                || IsEqualIID(&IID_IMediaBuffer, riid))
+        {
+            *ppvObject = thiz;
+            IMediaObject_AddRef(thiz);
+            return 0;
+        }
+        else
+        {
+            *ppvObject = NULL;
+            return E_NOINTERFACE;
+        }
+    }
+    else
+        return E_POINTER;
+}
+
+static STDMETHODIMP_(ULONG)
+MediaBuffer_Release(IMediaBuffer *thiz)
+{
+    LONG refCount = InterlockedDecrement(&(((MediaBuffer *) thiz)->_refCount));
+
+    if (refCount == 0)
+        CoTaskMemFree(thiz);
+    return refCount;
+}
+
+static STDMETHODIMP
+MediaBuffer_SetLength(IMediaBuffer *thiz, DWORD cbLength)
+{
+    MediaBuffer *thiz_ = (MediaBuffer *) thiz;
+
+    if (cbLength > thiz_->_maxLength)
+        return E_INVALIDARG;
+    else
+    {
+        thiz_->_length = cbLength;
+        return S_OK;
+    }
+}
+
+MediaBuffer *
+MediaBuffer_alloc(DWORD maxLength)
+{
+    size_t sizeofMediaBuffer = sizeof(MediaBuffer);
+    MediaBuffer *thiz = CoTaskMemAlloc(sizeofMediaBuffer + maxLength);
+
+    if (thiz)
+    {
+        IMediaBufferVtbl *lpVtbl = &(thiz->_vtbl);
+
+        lpVtbl->AddRef = MediaBuffer_AddRef;
+        lpVtbl->GetBufferAndLength = MediaBuffer_GetBufferAndLength;
+        lpVtbl->GetMaxLength = MediaBuffer_GetMaxLength;
+        lpVtbl->QueryInterface = MediaBuffer_QueryInterface;
+        lpVtbl->Release = MediaBuffer_Release;
+        lpVtbl->SetLength = MediaBuffer_SetLength;
+        thiz->lpVtbl = lpVtbl;
+
+        thiz->_buffer = ((BYTE *) thiz) + sizeofMediaBuffer;
+        thiz->_length = 0;
+        thiz->_maxLength = maxLength;
+        thiz->_refCount = 1;
+    }
+    return thiz;
+}
+
+DWORD
+MediaBuffer_pop(MediaBuffer *thiz, BYTE *buffer, DWORD length)
+{
+    if (buffer)
+        memcpy(buffer, thiz->_buffer, length);
+    thiz->_length -= length;
+    for (DWORD i = 0; i < thiz->_length; i++)
+        thiz->_buffer[i] = thiz->_buffer[length + i];
+    return length;
+}
+
+DWORD
+MediaBuffer_push(MediaBuffer *thiz, BYTE *buffer, DWORD length)
+{
+    memcpy(thiz->_buffer + thiz->_length, buffer, length);
+    thiz->_length += length;
+    return length;
+}
diff --git a/src/native/windows/wasapi/MediaBuffer.h b/src/native/windows/wasapi/MediaBuffer.h
new file mode 100644
index 0000000000000000000000000000000000000000..99ed0d77d3283f6683deb59a9cc9f33634210b23
--- /dev/null
+++ b/src/native/windows/wasapi/MediaBuffer.h
@@ -0,0 +1,19 @@
+/*
+ * Jitsi, the OpenSource Java VoIP and Instant Messaging client.
+ *
+ * Distributable under LGPL license.
+ * See terms of license at gnu.org.
+ */
+
+#ifndef _ORG_JITSI_IMPL_NEOMEDIA_JMFEXT_MEDIA_PROTOCOL_WASAPI_MEDIABUFFER_
+#define _ORG_JITSI_IMPL_NEOMEDIA_JMFEXT_MEDIA_PROTOCOL_WASAPI_MEDIABUFFER_
+
+#include "MinGW_dmo.h" /* IMediaBuffer */
+
+typedef struct MediaBuffer MediaBuffer;
+
+MediaBuffer *MediaBuffer_alloc(DWORD maxLength);
+DWORD MediaBuffer_pop(MediaBuffer *thiz, BYTE *buffer, DWORD length);
+DWORD MediaBuffer_push(MediaBuffer *thiz, BYTE *buffer, DWORD length);
+
+#endif /* #ifndef _ORG_JITSI_IMPL_NEOMEDIA_JMFEXT_MEDIA_PROTOCOL_WASAPI_MEDIABUFFER_ */
diff --git a/src/native/windows/wasapi/MinGW_dmo.h b/src/native/windows/wasapi/MinGW_dmo.h
new file mode 100644
index 0000000000000000000000000000000000000000..d743c90179dadf400466757a7f3629f04900641b
--- /dev/null
+++ b/src/native/windows/wasapi/MinGW_dmo.h
@@ -0,0 +1,31 @@
+/*
+ * Jitsi, the OpenSource Java VoIP and Instant Messaging client.
+ *
+ * Distributable under LGPL license.
+ * See terms of license at gnu.org.
+ */
+
+#ifndef _ORG_JITSI_IMPL_NEOMEDIA_JMFEXT_MEDIA_PROTOCOL_WASAPI_MINGWDMO_H_
+#define _ORG_JITSI_IMPL_NEOMEDIA_JMFEXT_MEDIA_PROTOCOL_WASAPI_MINGWDMO_H_
+
+#ifndef _COM_Outptr_
+#define _COM_Outptr_
+#endif /* #ifndef _COM_Outptr_ */
+#ifndef _Out_opt_
+#define _Out_opt_
+#endif /* #ifndef _Out_opt_ */
+#ifndef _Out_writes_
+#define _Out_writes_(x)
+#endif /* #ifndef _Out_writes_ */
+#ifndef _Out_writes_bytes_
+#define _Out_writes_bytes_(x)
+#endif /* #ifndef _Out_writes_bytes_ */
+#ifndef _Out_writes_to_
+#define _Out_writes_to_(x,y)
+#endif /* #ifndef _Out_writes_to_ */
+#ifndef _Outptr_opt_result_bytebuffer_
+#define _Outptr_opt_result_bytebuffer_(x)
+#endif /* #ifndef _Outptr_opt_result_bytebuffer_ */
+#include <dmo.h>
+
+#endif /* #ifndef _ORG_JITSI_IMPL_NEOMEDIA_JMFEXT_MEDIA_PROTOCOL_WASAPI_MINGWDMO_H_ */
diff --git a/src/native/windows/wasapi/Typecasting.c b/src/native/windows/wasapi/Typecasting.c
new file mode 100644
index 0000000000000000000000000000000000000000..d55696c1e675334f2e00f8b2a1046cb488fcda58
--- /dev/null
+++ b/src/native/windows/wasapi/Typecasting.c
@@ -0,0 +1,36 @@
+/*
+ * Jitsi, the OpenSource Java VoIP and Instant Messaging client.
+ *
+ * Distributable under LGPL license.
+ * See terms of license at gnu.org.
+ */
+
+#include "Typecasting.h"
+
+#include <objbase.h>
+
+#include "HResultException.h"
+
+HRESULT
+WASAPI_iidFromString(JNIEnv *env, jstring str, LPIID iid)
+{
+    HRESULT hr;
+
+    if (str)
+    {
+        const jchar *sz = (*env)->GetStringChars(env, str, NULL);
+
+        if (sz)
+        {
+            hr = IIDFromString((LPOLESTR) sz, iid);
+            (*env)->ReleaseStringChars(env, str, sz);
+        }
+        else
+            hr = E_OUTOFMEMORY;
+        if (FAILED(hr))
+            WASAPI_throwNewHResultException(env, hr, __func__, __LINE__);
+    }
+    else
+        hr = S_OK;
+    return hr;
+}
diff --git a/src/native/windows/wasapi/Typecasting.h b/src/native/windows/wasapi/Typecasting.h
new file mode 100644
index 0000000000000000000000000000000000000000..952f0e76bddf1de79c943619dbe3af9532aff938
--- /dev/null
+++ b/src/native/windows/wasapi/Typecasting.h
@@ -0,0 +1,16 @@
+/*
+ * Jitsi, the OpenSource Java VoIP and Instant Messaging client.
+ *
+ * Distributable under LGPL license.
+ * See terms of license at gnu.org.
+ */
+
+#ifndef _ORG_JITSI_IMPL_NEOMEDIA_JMFEXT_MEDIA_PROTOCOL_WASAPI_TYPECASTING_H_
+#define _ORG_JITSI_IMPL_NEOMEDIA_JMFEXT_MEDIA_PROTOCOL_WASAPI_TYPECASTING_H_
+
+#include <jni.h> /* JNIEnv, jstring */
+#include <windows.h> /* HRESULT */
+
+HRESULT WASAPI_iidFromString(JNIEnv *env, jstring str, LPIID iid);
+
+#endif /* #ifndef _ORG_JITSI_IMPL_NEOMEDIA_JMFEXT_MEDIA_PROTOCOL_WASAPI_TYPECASTING_H_ */
diff --git a/src/native/windows/wasapi/org_jitsi_impl_neomedia_jmfext_media_protocol_wasapi_VoiceCaptureDSP.c b/src/native/windows/wasapi/org_jitsi_impl_neomedia_jmfext_media_protocol_wasapi_VoiceCaptureDSP.c
new file mode 100644
index 0000000000000000000000000000000000000000..e0e2b63e8e5361e3cb5243dc113f73a74b7e91d8
--- /dev/null
+++ b/src/native/windows/wasapi/org_jitsi_impl_neomedia_jmfext_media_protocol_wasapi_VoiceCaptureDSP.c
@@ -0,0 +1,472 @@
+/*
+ * Jitsi, the OpenSource Java VoIP and Instant Messaging client.
+ *
+ * Distributable under LGPL license.
+ * See terms of license at gnu.org.
+ */
+
+#include "org_jitsi_impl_neomedia_jmfext_media_protocol_wasapi_VoiceCaptureDSP.h"
+
+#include <objbase.h> /* CoTaskMemAlloc */
+#include <propsys.h> /* IPropertyStore */
+#include <stdint.h> /* intptr_t */
+
+#include "HResultException.h"
+#include "MediaBuffer.h"
+#include "MinGW_dmo.h" /* DMO_MEDIA_TYPE, IMediaObject */
+#include "Typecasting.h"
+
+JNIEXPORT jint JNICALL
+Java_org_jitsi_impl_neomedia_jmfext_media_protocol_wasapi_VoiceCaptureDSP_DMO_1MEDIA_1TYPE_1fill
+    (JNIEnv *env, jclass clazz, jlong thiz, jstring majortype, jstring subtype,
+        jboolean bFixedSizeSamples, jboolean bTemporalCompression,
+        jint lSampleSize, jstring formattype, jlong pUnk, jint cbFormat,
+        jlong pbFormat)
+{
+    HRESULT hr;
+    DMO_MEDIA_TYPE *thiz_ = (DMO_MEDIA_TYPE *) (intptr_t) thiz;
+
+    hr = WASAPI_iidFromString(env, majortype, &(thiz_->majortype));
+    if (SUCCEEDED(hr))
+    {
+        hr = WASAPI_iidFromString(env, subtype, &(thiz_->subtype));
+        if (SUCCEEDED(hr))
+        {
+            hr = WASAPI_iidFromString(env, formattype, &(thiz_->formattype));
+            if (SUCCEEDED(hr))
+            {
+                thiz_->bFixedSizeSamples
+                    = (JNI_TRUE == bFixedSizeSamples) ? TRUE : FALSE;
+                thiz_->bTemporalCompression
+                    = (JNI_TRUE == bTemporalCompression) ? TRUE : FALSE;
+                thiz_->lSampleSize = (ULONG) lSampleSize;
+                thiz_->pUnk = (IUnknown *) (intptr_t) pUnk;
+                thiz_->cbFormat = (ULONG) cbFormat;
+                thiz_->pbFormat = (BYTE *) (intptr_t) pbFormat;
+            }
+        }
+    }
+    if (FAILED(hr))
+        WASAPI_throwNewHResultException(env, hr, __func__, __LINE__);
+    return (jint) hr;
+}
+
+JNIEXPORT void JNICALL
+Java_org_jitsi_impl_neomedia_jmfext_media_protocol_wasapi_VoiceCaptureDSP_DMO_1MEDIA_1TYPE_1setCbFormat
+    (JNIEnv *env, jclass clazz, jlong thiz, jint cbFormat)
+{
+    ((DMO_MEDIA_TYPE *) (intptr_t) thiz)->cbFormat = (ULONG) cbFormat;
+}
+
+JNIEXPORT jint JNICALL
+Java_org_jitsi_impl_neomedia_jmfext_media_protocol_wasapi_VoiceCaptureDSP_DMO_1MEDIA_1TYPE_1setFormattype
+    (JNIEnv *env, jclass clazz, jlong thiz, jstring formattype)
+{
+    return
+        WASAPI_iidFromString(
+                env,
+                formattype,
+                &(((DMO_MEDIA_TYPE *) (intptr_t) thiz)->formattype));
+}
+
+JNIEXPORT void JNICALL
+Java_org_jitsi_impl_neomedia_jmfext_media_protocol_wasapi_VoiceCaptureDSP_DMO_1MEDIA_1TYPE_1setLSampleSize
+    (JNIEnv *env, jclass clazz, jlong thiz, jint lSampleSize)
+{
+    ((DMO_MEDIA_TYPE *) (intptr_t) thiz)->lSampleSize = (ULONG) lSampleSize;
+}
+
+JNIEXPORT void JNICALL
+Java_org_jitsi_impl_neomedia_jmfext_media_protocol_wasapi_VoiceCaptureDSP_DMO_1MEDIA_1TYPE_1setPbFormat
+    (JNIEnv *env, jclass clazz, jlong thiz, jlong pbFormat)
+{
+    ((DMO_MEDIA_TYPE *) (intptr_t) thiz)->pbFormat = (BYTE *) (intptr_t) pbFormat;
+}
+
+JNIEXPORT jlong JNICALL
+Java_org_jitsi_impl_neomedia_jmfext_media_protocol_wasapi_VoiceCaptureDSP_DMO_1OUTPUT_1DATA_1BUFFER_1alloc
+    (JNIEnv *env, jclass clazz, jlong pBuffer, jint dwStatus, jlong rtTimestamp,
+        jlong rtTimelength)
+{
+    DMO_OUTPUT_DATA_BUFFER *thiz = CoTaskMemAlloc(sizeof(DMO_OUTPUT_DATA_BUFFER));
+
+    if (thiz)
+    {
+        thiz->pBuffer = (IMediaBuffer *) (intptr_t) pBuffer;
+        thiz->dwStatus = (DWORD) dwStatus;
+        thiz->rtTimestamp = (REFERENCE_TIME) rtTimestamp;
+        thiz->rtTimelength = (REFERENCE_TIME) rtTimelength;
+    }
+    return (jlong) (intptr_t) thiz;
+}
+
+JNIEXPORT jint JNICALL
+Java_org_jitsi_impl_neomedia_jmfext_media_protocol_wasapi_VoiceCaptureDSP_DMO_1OUTPUT_1DATA_1BUFFER_1getDwStatus
+    (JNIEnv *env, jclass clazz, jlong thiz)
+{
+    return (jint) (((DMO_OUTPUT_DATA_BUFFER *) (intptr_t) thiz)->dwStatus);
+}
+
+JNIEXPORT void JNICALL
+Java_org_jitsi_impl_neomedia_jmfext_media_protocol_wasapi_VoiceCaptureDSP_DMO_1OUTPUT_1DATA_1BUFFER_1setDwStatus
+    (JNIEnv *env, jclass clazz, jlong thiz, jint dwStatus)
+{
+    ((DMO_OUTPUT_DATA_BUFFER *) (intptr_t) thiz)->dwStatus = (DWORD) dwStatus;
+}
+
+JNIEXPORT jint JNICALL
+Java_org_jitsi_impl_neomedia_jmfext_media_protocol_wasapi_VoiceCaptureDSP_IMediaBuffer_1AddRef
+    (JNIEnv *env, jclass clazz, jlong thiz)
+{
+    return (jint) IMediaBuffer_AddRef((IMediaBuffer *) (intptr_t) thiz);
+}
+
+JNIEXPORT jlong JNICALL
+Java_org_jitsi_impl_neomedia_jmfext_media_protocol_wasapi_VoiceCaptureDSP_IMediaBuffer_1GetBuffer
+    (JNIEnv *env, jclass clazz, jlong thiz)
+{
+    BYTE *pBuffer;
+    HRESULT hr
+        = IMediaBuffer_GetBufferAndLength(
+                (IMediaBuffer *) (intptr_t) thiz,
+                &pBuffer, NULL);
+
+    if (FAILED(hr))
+    {
+        pBuffer = NULL;
+        WASAPI_throwNewHResultException(env, hr, __func__, __LINE__);
+    }
+    return (jlong) (intptr_t) pBuffer;
+}
+
+JNIEXPORT jint JNICALL
+Java_org_jitsi_impl_neomedia_jmfext_media_protocol_wasapi_VoiceCaptureDSP_IMediaBuffer_1GetLength
+    (JNIEnv *env, jclass clazz, jlong thiz)
+{
+    DWORD cbLength;
+    HRESULT hr
+        = IMediaBuffer_GetBufferAndLength(
+                (IMediaBuffer *) (intptr_t) thiz,
+                NULL, &cbLength);
+
+    if (FAILED(hr))
+    {
+        cbLength = 0;
+        WASAPI_throwNewHResultException(env, hr, __func__, __LINE__);
+    }
+    return (jint) cbLength;
+}
+
+JNIEXPORT jint JNICALL
+Java_org_jitsi_impl_neomedia_jmfext_media_protocol_wasapi_VoiceCaptureDSP_IMediaBuffer_1GetMaxLength
+    (JNIEnv *env, jclass clazz, jlong thiz)
+{
+    DWORD cbMaxLength;
+    HRESULT hr
+        = IMediaBuffer_GetMaxLength(
+                (IMediaBuffer *) (intptr_t) thiz,
+                &cbMaxLength);
+
+    if (FAILED(hr))
+    {
+        cbMaxLength = 0;
+        WASAPI_throwNewHResultException(env, hr, __func__, __LINE__);
+    }
+    return (jint) cbMaxLength;
+}
+
+JNIEXPORT jint JNICALL
+Java_org_jitsi_impl_neomedia_jmfext_media_protocol_wasapi_VoiceCaptureDSP_IMediaBuffer_1Release
+    (JNIEnv *env, jclass clazz, jlong thiz)
+{
+    return (jint) IMediaBuffer_Release((IMediaBuffer *) (intptr_t) thiz);
+}
+
+JNIEXPORT void JNICALL
+Java_org_jitsi_impl_neomedia_jmfext_media_protocol_wasapi_VoiceCaptureDSP_IMediaBuffer_1SetLength
+    (JNIEnv *env, jclass clazz, jlong thiz, jint cbLength)
+{
+    HRESULT hr
+        = IMediaBuffer_SetLength(
+                (IMediaBuffer *) (intptr_t) thiz,
+                (DWORD) cbLength);
+
+    if (FAILED(hr))
+        WASAPI_throwNewHResultException(env, hr, __func__, __LINE__);
+}
+
+JNIEXPORT jint JNICALL
+Java_org_jitsi_impl_neomedia_jmfext_media_protocol_wasapi_VoiceCaptureDSP_IMediaObject_1GetInputStatus
+    (JNIEnv *env, jclass clazz, jlong thiz, jint dwInputStreamIndex)
+{
+    DWORD dwFlags;
+    HRESULT hr
+        = IMediaObject_GetInputStatus(
+                (IMediaObject *) (intptr_t) thiz,
+                (DWORD) dwInputStreamIndex,
+                &dwFlags);
+
+    if (FAILED(hr))
+    {
+        dwFlags = 0;
+        WASAPI_throwNewHResultException(env, hr, __func__, __LINE__);
+    }
+    return (jint) dwFlags;
+}
+
+JNIEXPORT jint JNICALL
+Java_org_jitsi_impl_neomedia_jmfext_media_protocol_wasapi_VoiceCaptureDSP_IMediaObject_1ProcessInput
+    (JNIEnv *env, jclass clazz, jlong thiz, jint dwInputStreamIndex,
+        jlong pBuffer, jint dwFlags, jlong rtTimestamp, jlong rtTimelength)
+{
+    HRESULT hr
+        = IMediaObject_ProcessInput(
+                (IMediaObject *) (intptr_t) thiz,
+                (DWORD) dwInputStreamIndex,
+                (IMediaBuffer *) (intptr_t) pBuffer,
+                (DWORD) dwFlags,
+                (REFERENCE_TIME) rtTimestamp,
+                (REFERENCE_TIME) rtTimelength);
+
+    if (FAILED(hr))
+        WASAPI_throwNewHResultException(env, hr, __func__, __LINE__);
+    return (jint) hr;
+}
+
+JNIEXPORT jint JNICALL
+Java_org_jitsi_impl_neomedia_jmfext_media_protocol_wasapi_VoiceCaptureDSP_IMediaObject_1ProcessOutput
+    (JNIEnv *env, jclass clazz, jlong thiz, jint dwFlags,
+        jint cOutputBufferCount, jlong pOutputBuffers)
+{
+    DWORD dwStatus;
+    HRESULT hr
+        = IMediaObject_ProcessOutput(
+                (IMediaObject *) (intptr_t) thiz,
+                (DWORD) dwFlags,
+                (DWORD) cOutputBufferCount,
+                (DMO_OUTPUT_DATA_BUFFER *) (intptr_t) pOutputBuffers,
+                &dwStatus);
+
+    if (FAILED(hr))
+    {
+        dwStatus = 0;
+        WASAPI_throwNewHResultException(env, hr, __func__, __LINE__);
+    }
+    return (jint) dwStatus;
+}
+
+JNIEXPORT jlong JNICALL
+Java_org_jitsi_impl_neomedia_jmfext_media_protocol_wasapi_VoiceCaptureDSP_IMediaObject_1QueryInterface
+    (JNIEnv *env, jclass clazz, jlong thiz, jstring iid)
+{
+    HRESULT hr;
+    IID iid_;
+    void *pvObject;
+
+    hr = WASAPI_iidFromString(env, iid, &iid_);
+    if (SUCCEEDED(hr))
+    {
+        hr = IMediaObject_QueryInterface((IMediaObject *) (intptr_t) thiz, &iid_, &pvObject);
+        if (FAILED(hr))
+        {
+            pvObject = NULL;
+            WASAPI_throwNewHResultException(env, hr, __func__, __LINE__);
+        }
+    }
+    else
+        pvObject = NULL;
+    return (jlong) (intptr_t) pvObject;
+}
+
+JNIEXPORT void JNICALL
+Java_org_jitsi_impl_neomedia_jmfext_media_protocol_wasapi_VoiceCaptureDSP_IMediaObject_1Release
+    (JNIEnv *env, jclass clazz, jlong thiz)
+{
+    IMediaObject_Release((IMediaObject *) (intptr_t) thiz);
+}
+
+JNIEXPORT jint JNICALL
+Java_org_jitsi_impl_neomedia_jmfext_media_protocol_wasapi_VoiceCaptureDSP_IMediaObject_1SetInputType
+    (JNIEnv *env, jclass clazz, jlong thiz, jint dwInputStreamIndex, jlong pmt,
+        jint dwFlags)
+{
+    HRESULT hr
+        = IMediaObject_SetInputType(
+                (IMediaObject *) (intptr_t) thiz,
+                (DWORD) dwInputStreamIndex,
+                (const DMO_MEDIA_TYPE *) (intptr_t) pmt,
+                (DWORD) dwFlags);
+
+    if (FAILED(hr))
+        WASAPI_throwNewHResultException(env, hr, __func__, __LINE__);
+    return (jint) hr;
+}
+
+JNIEXPORT jint JNICALL
+Java_org_jitsi_impl_neomedia_jmfext_media_protocol_wasapi_VoiceCaptureDSP_IMediaObject_1SetOutputType
+    (JNIEnv *env, jclass clazz, jlong thiz, jint dwOutputStreamIndex, jlong pmt,
+        jint dwFlags)
+{
+    HRESULT hr
+        = IMediaObject_SetOutputType(
+                (IMediaObject *) (intptr_t) thiz,
+                (DWORD) dwOutputStreamIndex,
+                (const DMO_MEDIA_TYPE *) (intptr_t) pmt,
+                (DWORD) dwFlags);
+
+    if (FAILED(hr))
+        WASAPI_throwNewHResultException(env, hr, __func__, __LINE__);
+    return (jint) hr;
+}
+
+JNIEXPORT jint JNICALL
+Java_org_jitsi_impl_neomedia_jmfext_media_protocol_wasapi_VoiceCaptureDSP_IPropertyStore_1SetValue__JJZ
+    (JNIEnv *env, jclass clazz, jlong thiz, jlong key, jboolean value)
+{
+    PROPVARIANT propvar;
+    HRESULT hr;
+
+    PropVariantInit(&propvar);
+    propvar.boolVal = (JNI_TRUE == value) ? VARIANT_TRUE : VARIANT_FALSE;
+    propvar.vt = VT_BOOL;
+    hr
+        = IPropertyStore_SetValue(
+                (IPropertyStore *) (intptr_t) thiz,
+                (REFPROPERTYKEY) (intptr_t) key,
+                &propvar);
+    if (FAILED(hr))
+        WASAPI_throwNewHResultException(env, hr, __func__, __LINE__);
+    return (jint) hr;
+}
+
+JNIEXPORT jint JNICALL
+Java_org_jitsi_impl_neomedia_jmfext_media_protocol_wasapi_VoiceCaptureDSP_IPropertyStore_1SetValue__JJI
+    (JNIEnv *env, jclass clazz, jlong thiz, jlong key, jint value)
+{
+    PROPVARIANT propvar;
+    HRESULT hr;
+
+    PropVariantInit(&propvar);
+    propvar.lVal = value;
+    propvar.vt = VT_I4;
+    hr
+        = IPropertyStore_SetValue(
+                (IPropertyStore *) (intptr_t) thiz,
+                (REFPROPERTYKEY) (intptr_t) key,
+                &propvar);
+    if (FAILED(hr))
+        WASAPI_throwNewHResultException(env, hr, __func__, __LINE__);
+    return (jint) hr;
+}
+
+JNIEXPORT jlong JNICALL
+Java_org_jitsi_impl_neomedia_jmfext_media_protocol_wasapi_VoiceCaptureDSP_MediaBuffer_1alloc
+    (JNIEnv *env, jclass clazz, jint maxLength)
+{
+    return (jlong) (intptr_t) MediaBuffer_alloc((DWORD) maxLength);
+}
+
+JNIEXPORT jint JNICALL
+Java_org_jitsi_impl_neomedia_jmfext_media_protocol_wasapi_VoiceCaptureDSP_MediaBuffer_1pop
+    (JNIEnv *env, jclass clazz, jlong thiz, jbyteArray buffer, jint offset,
+        jint length)
+{
+    jint read;
+
+    if (buffer)
+    {
+        jbyte *buffer_ = (*env)->GetPrimitiveArrayCritical(env, buffer, NULL);
+
+        if (buffer_)
+        {
+            read
+                = MediaBuffer_pop(
+                        (MediaBuffer *) (intptr_t) thiz,
+                        ((BYTE *) buffer_) + offset,
+                        (DWORD) length);
+            (*env)->ReleasePrimitiveArrayCritical(env, buffer, buffer_, 0);
+        }
+        else
+            read = 0;
+    }
+    else if (length)
+    {
+        read
+            = (jint) MediaBuffer_pop(
+                    (MediaBuffer *) (intptr_t) thiz,
+                    NULL,
+                    length);
+    }
+    else
+        read = 0;
+    return read;
+}
+
+JNIEXPORT jint JNICALL
+Java_org_jitsi_impl_neomedia_jmfext_media_protocol_wasapi_VoiceCaptureDSP_MediaBuffer_1push
+    (JNIEnv *env, jclass clazz, jlong thiz, jbyteArray buffer, jint offset,
+        jint length)
+{
+    jbyte *buffer_ = (*env)->GetPrimitiveArrayCritical(env, buffer, NULL);
+    jint written;
+
+    if (buffer_)
+    {
+        written
+            = (jint)
+                MediaBuffer_push(
+                        (MediaBuffer *) (intptr_t) thiz,
+                        ((BYTE *) buffer_) + offset,
+                        (DWORD) length);
+        (*env)->ReleasePrimitiveArrayCritical(env, buffer, buffer_, JNI_ABORT);
+    }
+    else
+        written = 0;
+    return written;
+}
+
+JNIEXPORT jlong JNICALL
+Java_org_jitsi_impl_neomedia_jmfext_media_protocol_wasapi_VoiceCaptureDSP_MoCreateMediaType
+    (JNIEnv *env, jclass clazz, jint cbFormat)
+{
+    DMO_MEDIA_TYPE *pmt;
+    HRESULT hr = MoCreateMediaType(&pmt, (DWORD) cbFormat);
+
+    if (FAILED(hr))
+    {
+        pmt = NULL;
+        WASAPI_throwNewHResultException(env, hr, __func__, __LINE__);
+    }
+    return (jlong) (intptr_t) pmt;
+}
+
+JNIEXPORT void JNICALL
+Java_org_jitsi_impl_neomedia_jmfext_media_protocol_wasapi_VoiceCaptureDSP_MoDeleteMediaType
+    (JNIEnv *env, jclass clazz, jlong pmt)
+{
+    HRESULT hr = MoDeleteMediaType((DMO_MEDIA_TYPE *) (intptr_t) pmt);
+
+    if (FAILED(hr))
+        WASAPI_throwNewHResultException(env, hr, __func__, __LINE__);
+}
+
+JNIEXPORT void JNICALL
+Java_org_jitsi_impl_neomedia_jmfext_media_protocol_wasapi_VoiceCaptureDSP_MoFreeMediaType
+    (JNIEnv *env, jclass clazz, jlong pmt)
+{
+    HRESULT hr = MoFreeMediaType((DMO_MEDIA_TYPE *) (intptr_t) pmt);
+
+    if (FAILED(hr))
+        WASAPI_throwNewHResultException(env, hr, __func__, __LINE__);
+}
+
+JNIEXPORT void JNICALL
+Java_org_jitsi_impl_neomedia_jmfext_media_protocol_wasapi_VoiceCaptureDSP_MoInitMediaType
+    (JNIEnv *env, jclass clazz, jlong pmt, jint cbFormat)
+{
+    HRESULT hr
+        = MoInitMediaType((DMO_MEDIA_TYPE *) (intptr_t) pmt, (DWORD) cbFormat);
+
+    if (FAILED(hr))
+        WASAPI_throwNewHResultException(env, hr, __func__, __LINE__);
+}
diff --git a/src/native/windows/wasapi/org_jitsi_impl_neomedia_jmfext_media_protocol_wasapi_VoiceCaptureDSP.h b/src/native/windows/wasapi/org_jitsi_impl_neomedia_jmfext_media_protocol_wasapi_VoiceCaptureDSP.h
new file mode 100644
index 0000000000000000000000000000000000000000..3aabf9caf86e1899f4d67ee813af90e34e7b2637
--- /dev/null
+++ b/src/native/windows/wasapi/org_jitsi_impl_neomedia_jmfext_media_protocol_wasapi_VoiceCaptureDSP.h
@@ -0,0 +1,263 @@
+/* DO NOT EDIT THIS FILE - it is machine generated */
+#include <jni.h>
+/* Header for class org_jitsi_impl_neomedia_jmfext_media_protocol_wasapi_VoiceCaptureDSP */
+
+#ifndef _Included_org_jitsi_impl_neomedia_jmfext_media_protocol_wasapi_VoiceCaptureDSP
+#define _Included_org_jitsi_impl_neomedia_jmfext_media_protocol_wasapi_VoiceCaptureDSP
+#ifdef __cplusplus
+extern "C" {
+#endif
+#undef org_jitsi_impl_neomedia_jmfext_media_protocol_wasapi_VoiceCaptureDSP_DMO_E_NOTACCEPTING
+#define org_jitsi_impl_neomedia_jmfext_media_protocol_wasapi_VoiceCaptureDSP_DMO_E_NOTACCEPTING -2147220988L
+#undef org_jitsi_impl_neomedia_jmfext_media_protocol_wasapi_VoiceCaptureDSP_DMO_INPUT_STATUSF_ACCEPT_DATA
+#define org_jitsi_impl_neomedia_jmfext_media_protocol_wasapi_VoiceCaptureDSP_DMO_INPUT_STATUSF_ACCEPT_DATA 1L
+#undef org_jitsi_impl_neomedia_jmfext_media_protocol_wasapi_VoiceCaptureDSP_DMO_OUTPUT_DATA_BUFFERF_INCOMPLETE
+#define org_jitsi_impl_neomedia_jmfext_media_protocol_wasapi_VoiceCaptureDSP_DMO_OUTPUT_DATA_BUFFERF_INCOMPLETE 16777216L
+#undef org_jitsi_impl_neomedia_jmfext_media_protocol_wasapi_VoiceCaptureDSP_DMO_SET_TYPEF_TEST_ONLY
+#define org_jitsi_impl_neomedia_jmfext_media_protocol_wasapi_VoiceCaptureDSP_DMO_SET_TYPEF_TEST_ONLY 1L
+#undef org_jitsi_impl_neomedia_jmfext_media_protocol_wasapi_VoiceCaptureDSP_SINGLE_CHANNEL_AEC
+#define org_jitsi_impl_neomedia_jmfext_media_protocol_wasapi_VoiceCaptureDSP_SINGLE_CHANNEL_AEC 0L
+/*
+ * Class:     org_jitsi_impl_neomedia_jmfext_media_protocol_wasapi_VoiceCaptureDSP
+ * Method:    DMO_MEDIA_TYPE_fill
+ * Signature: (JLjava/lang/String;Ljava/lang/String;ZZILjava/lang/String;JIJ)I
+ */
+JNIEXPORT jint JNICALL Java_org_jitsi_impl_neomedia_jmfext_media_protocol_wasapi_VoiceCaptureDSP_DMO_1MEDIA_1TYPE_1fill
+  (JNIEnv *, jclass, jlong, jstring, jstring, jboolean, jboolean, jint, jstring, jlong, jint, jlong);
+
+/*
+ * Class:     org_jitsi_impl_neomedia_jmfext_media_protocol_wasapi_VoiceCaptureDSP
+ * Method:    DMO_MEDIA_TYPE_setCbFormat
+ * Signature: (JI)V
+ */
+JNIEXPORT void JNICALL Java_org_jitsi_impl_neomedia_jmfext_media_protocol_wasapi_VoiceCaptureDSP_DMO_1MEDIA_1TYPE_1setCbFormat
+  (JNIEnv *, jclass, jlong, jint);
+
+/*
+ * Class:     org_jitsi_impl_neomedia_jmfext_media_protocol_wasapi_VoiceCaptureDSP
+ * Method:    DMO_MEDIA_TYPE_setFormattype
+ * Signature: (JLjava/lang/String;)I
+ */
+JNIEXPORT jint JNICALL Java_org_jitsi_impl_neomedia_jmfext_media_protocol_wasapi_VoiceCaptureDSP_DMO_1MEDIA_1TYPE_1setFormattype
+  (JNIEnv *, jclass, jlong, jstring);
+
+/*
+ * Class:     org_jitsi_impl_neomedia_jmfext_media_protocol_wasapi_VoiceCaptureDSP
+ * Method:    DMO_MEDIA_TYPE_setLSampleSize
+ * Signature: (JI)V
+ */
+JNIEXPORT void JNICALL Java_org_jitsi_impl_neomedia_jmfext_media_protocol_wasapi_VoiceCaptureDSP_DMO_1MEDIA_1TYPE_1setLSampleSize
+  (JNIEnv *, jclass, jlong, jint);
+
+/*
+ * Class:     org_jitsi_impl_neomedia_jmfext_media_protocol_wasapi_VoiceCaptureDSP
+ * Method:    DMO_MEDIA_TYPE_setPbFormat
+ * Signature: (JJ)V
+ */
+JNIEXPORT void JNICALL Java_org_jitsi_impl_neomedia_jmfext_media_protocol_wasapi_VoiceCaptureDSP_DMO_1MEDIA_1TYPE_1setPbFormat
+  (JNIEnv *, jclass, jlong, jlong);
+
+/*
+ * Class:     org_jitsi_impl_neomedia_jmfext_media_protocol_wasapi_VoiceCaptureDSP
+ * Method:    DMO_OUTPUT_DATA_BUFFER_alloc
+ * Signature: (JIJJ)J
+ */
+JNIEXPORT jlong JNICALL Java_org_jitsi_impl_neomedia_jmfext_media_protocol_wasapi_VoiceCaptureDSP_DMO_1OUTPUT_1DATA_1BUFFER_1alloc
+  (JNIEnv *, jclass, jlong, jint, jlong, jlong);
+
+/*
+ * Class:     org_jitsi_impl_neomedia_jmfext_media_protocol_wasapi_VoiceCaptureDSP
+ * Method:    DMO_OUTPUT_DATA_BUFFER_getDwStatus
+ * Signature: (J)I
+ */
+JNIEXPORT jint JNICALL Java_org_jitsi_impl_neomedia_jmfext_media_protocol_wasapi_VoiceCaptureDSP_DMO_1OUTPUT_1DATA_1BUFFER_1getDwStatus
+  (JNIEnv *, jclass, jlong);
+
+/*
+ * Class:     org_jitsi_impl_neomedia_jmfext_media_protocol_wasapi_VoiceCaptureDSP
+ * Method:    DMO_OUTPUT_DATA_BUFFER_setDwStatus
+ * Signature: (JI)V
+ */
+JNIEXPORT void JNICALL Java_org_jitsi_impl_neomedia_jmfext_media_protocol_wasapi_VoiceCaptureDSP_DMO_1OUTPUT_1DATA_1BUFFER_1setDwStatus
+  (JNIEnv *, jclass, jlong, jint);
+
+/*
+ * Class:     org_jitsi_impl_neomedia_jmfext_media_protocol_wasapi_VoiceCaptureDSP
+ * Method:    IMediaBuffer_AddRef
+ * Signature: (J)I
+ */
+JNIEXPORT jint JNICALL Java_org_jitsi_impl_neomedia_jmfext_media_protocol_wasapi_VoiceCaptureDSP_IMediaBuffer_1AddRef
+  (JNIEnv *, jclass, jlong);
+
+/*
+ * Class:     org_jitsi_impl_neomedia_jmfext_media_protocol_wasapi_VoiceCaptureDSP
+ * Method:    IMediaBuffer_GetBuffer
+ * Signature: (J)J
+ */
+JNIEXPORT jlong JNICALL Java_org_jitsi_impl_neomedia_jmfext_media_protocol_wasapi_VoiceCaptureDSP_IMediaBuffer_1GetBuffer
+  (JNIEnv *, jclass, jlong);
+
+/*
+ * Class:     org_jitsi_impl_neomedia_jmfext_media_protocol_wasapi_VoiceCaptureDSP
+ * Method:    IMediaBuffer_GetLength
+ * Signature: (J)I
+ */
+JNIEXPORT jint JNICALL Java_org_jitsi_impl_neomedia_jmfext_media_protocol_wasapi_VoiceCaptureDSP_IMediaBuffer_1GetLength
+  (JNIEnv *, jclass, jlong);
+
+/*
+ * Class:     org_jitsi_impl_neomedia_jmfext_media_protocol_wasapi_VoiceCaptureDSP
+ * Method:    IMediaBuffer_GetMaxLength
+ * Signature: (J)I
+ */
+JNIEXPORT jint JNICALL Java_org_jitsi_impl_neomedia_jmfext_media_protocol_wasapi_VoiceCaptureDSP_IMediaBuffer_1GetMaxLength
+  (JNIEnv *, jclass, jlong);
+
+/*
+ * Class:     org_jitsi_impl_neomedia_jmfext_media_protocol_wasapi_VoiceCaptureDSP
+ * Method:    IMediaBuffer_Release
+ * Signature: (J)I
+ */
+JNIEXPORT jint JNICALL Java_org_jitsi_impl_neomedia_jmfext_media_protocol_wasapi_VoiceCaptureDSP_IMediaBuffer_1Release
+  (JNIEnv *, jclass, jlong);
+
+/*
+ * Class:     org_jitsi_impl_neomedia_jmfext_media_protocol_wasapi_VoiceCaptureDSP
+ * Method:    IMediaBuffer_SetLength
+ * Signature: (JI)V
+ */
+JNIEXPORT void JNICALL Java_org_jitsi_impl_neomedia_jmfext_media_protocol_wasapi_VoiceCaptureDSP_IMediaBuffer_1SetLength
+  (JNIEnv *, jclass, jlong, jint);
+
+/*
+ * Class:     org_jitsi_impl_neomedia_jmfext_media_protocol_wasapi_VoiceCaptureDSP
+ * Method:    IMediaObject_GetInputStatus
+ * Signature: (JI)I
+ */
+JNIEXPORT jint JNICALL Java_org_jitsi_impl_neomedia_jmfext_media_protocol_wasapi_VoiceCaptureDSP_IMediaObject_1GetInputStatus
+  (JNIEnv *, jclass, jlong, jint);
+
+/*
+ * Class:     org_jitsi_impl_neomedia_jmfext_media_protocol_wasapi_VoiceCaptureDSP
+ * Method:    IMediaObject_ProcessInput
+ * Signature: (JIJIJJ)I
+ */
+JNIEXPORT jint JNICALL Java_org_jitsi_impl_neomedia_jmfext_media_protocol_wasapi_VoiceCaptureDSP_IMediaObject_1ProcessInput
+  (JNIEnv *, jclass, jlong, jint, jlong, jint, jlong, jlong);
+
+/*
+ * Class:     org_jitsi_impl_neomedia_jmfext_media_protocol_wasapi_VoiceCaptureDSP
+ * Method:    IMediaObject_ProcessOutput
+ * Signature: (JIIJ)I
+ */
+JNIEXPORT jint JNICALL Java_org_jitsi_impl_neomedia_jmfext_media_protocol_wasapi_VoiceCaptureDSP_IMediaObject_1ProcessOutput
+  (JNIEnv *, jclass, jlong, jint, jint, jlong);
+
+/*
+ * Class:     org_jitsi_impl_neomedia_jmfext_media_protocol_wasapi_VoiceCaptureDSP
+ * Method:    IMediaObject_QueryInterface
+ * Signature: (JLjava/lang/String;)J
+ */
+JNIEXPORT jlong JNICALL Java_org_jitsi_impl_neomedia_jmfext_media_protocol_wasapi_VoiceCaptureDSP_IMediaObject_1QueryInterface
+  (JNIEnv *, jclass, jlong, jstring);
+
+/*
+ * Class:     org_jitsi_impl_neomedia_jmfext_media_protocol_wasapi_VoiceCaptureDSP
+ * Method:    IMediaObject_Release
+ * Signature: (J)V
+ */
+JNIEXPORT void JNICALL Java_org_jitsi_impl_neomedia_jmfext_media_protocol_wasapi_VoiceCaptureDSP_IMediaObject_1Release
+  (JNIEnv *, jclass, jlong);
+
+/*
+ * Class:     org_jitsi_impl_neomedia_jmfext_media_protocol_wasapi_VoiceCaptureDSP
+ * Method:    IMediaObject_SetInputType
+ * Signature: (JIJI)I
+ */
+JNIEXPORT jint JNICALL Java_org_jitsi_impl_neomedia_jmfext_media_protocol_wasapi_VoiceCaptureDSP_IMediaObject_1SetInputType
+  (JNIEnv *, jclass, jlong, jint, jlong, jint);
+
+/*
+ * Class:     org_jitsi_impl_neomedia_jmfext_media_protocol_wasapi_VoiceCaptureDSP
+ * Method:    IMediaObject_SetOutputType
+ * Signature: (JIJI)I
+ */
+JNIEXPORT jint JNICALL Java_org_jitsi_impl_neomedia_jmfext_media_protocol_wasapi_VoiceCaptureDSP_IMediaObject_1SetOutputType
+  (JNIEnv *, jclass, jlong, jint, jlong, jint);
+
+/*
+ * Class:     org_jitsi_impl_neomedia_jmfext_media_protocol_wasapi_VoiceCaptureDSP
+ * Method:    IPropertyStore_SetValue
+ * Signature: (JJZ)I
+ */
+JNIEXPORT jint JNICALL Java_org_jitsi_impl_neomedia_jmfext_media_protocol_wasapi_VoiceCaptureDSP_IPropertyStore_1SetValue__JJZ
+  (JNIEnv *, jclass, jlong, jlong, jboolean);
+
+/*
+ * Class:     org_jitsi_impl_neomedia_jmfext_media_protocol_wasapi_VoiceCaptureDSP
+ * Method:    IPropertyStore_SetValue
+ * Signature: (JJI)I
+ */
+JNIEXPORT jint JNICALL Java_org_jitsi_impl_neomedia_jmfext_media_protocol_wasapi_VoiceCaptureDSP_IPropertyStore_1SetValue__JJI
+  (JNIEnv *, jclass, jlong, jlong, jint);
+
+/*
+ * Class:     org_jitsi_impl_neomedia_jmfext_media_protocol_wasapi_VoiceCaptureDSP
+ * Method:    MediaBuffer_alloc
+ * Signature: (I)J
+ */
+JNIEXPORT jlong JNICALL Java_org_jitsi_impl_neomedia_jmfext_media_protocol_wasapi_VoiceCaptureDSP_MediaBuffer_1alloc
+  (JNIEnv *, jclass, jint);
+
+/*
+ * Class:     org_jitsi_impl_neomedia_jmfext_media_protocol_wasapi_VoiceCaptureDSP
+ * Method:    MediaBuffer_pop
+ * Signature: (J[BII)I
+ */
+JNIEXPORT jint JNICALL Java_org_jitsi_impl_neomedia_jmfext_media_protocol_wasapi_VoiceCaptureDSP_MediaBuffer_1pop
+  (JNIEnv *, jclass, jlong, jbyteArray, jint, jint);
+
+/*
+ * Class:     org_jitsi_impl_neomedia_jmfext_media_protocol_wasapi_VoiceCaptureDSP
+ * Method:    MediaBuffer_push
+ * Signature: (J[BII)I
+ */
+JNIEXPORT jint JNICALL Java_org_jitsi_impl_neomedia_jmfext_media_protocol_wasapi_VoiceCaptureDSP_MediaBuffer_1push
+  (JNIEnv *, jclass, jlong, jbyteArray, jint, jint);
+
+/*
+ * Class:     org_jitsi_impl_neomedia_jmfext_media_protocol_wasapi_VoiceCaptureDSP
+ * Method:    MoCreateMediaType
+ * Signature: (I)J
+ */
+JNIEXPORT jlong JNICALL Java_org_jitsi_impl_neomedia_jmfext_media_protocol_wasapi_VoiceCaptureDSP_MoCreateMediaType
+  (JNIEnv *, jclass, jint);
+
+/*
+ * Class:     org_jitsi_impl_neomedia_jmfext_media_protocol_wasapi_VoiceCaptureDSP
+ * Method:    MoDeleteMediaType
+ * Signature: (J)V
+ */
+JNIEXPORT void JNICALL Java_org_jitsi_impl_neomedia_jmfext_media_protocol_wasapi_VoiceCaptureDSP_MoDeleteMediaType
+  (JNIEnv *, jclass, jlong);
+
+/*
+ * Class:     org_jitsi_impl_neomedia_jmfext_media_protocol_wasapi_VoiceCaptureDSP
+ * Method:    MoFreeMediaType
+ * Signature: (J)V
+ */
+JNIEXPORT void JNICALL Java_org_jitsi_impl_neomedia_jmfext_media_protocol_wasapi_VoiceCaptureDSP_MoFreeMediaType
+  (JNIEnv *, jclass, jlong);
+
+/*
+ * Class:     org_jitsi_impl_neomedia_jmfext_media_protocol_wasapi_VoiceCaptureDSP
+ * Method:    MoInitMediaType
+ * Signature: (JI)V
+ */
+JNIEXPORT void JNICALL Java_org_jitsi_impl_neomedia_jmfext_media_protocol_wasapi_VoiceCaptureDSP_MoInitMediaType
+  (JNIEnv *, jclass, jlong, jint);
+
+#ifdef __cplusplus
+}
+#endif
+#endif
diff --git a/src/native/windows/wasapi/org_jitsi_impl_neomedia_jmfext_media_protocol_wasapi_WASAPI.c b/src/native/windows/wasapi/org_jitsi_impl_neomedia_jmfext_media_protocol_wasapi_WASAPI.c
index d5029b61d807098747ad86981e59379ddfd58d02..536d4a3b383a13ee1db76d3cf2f55048da8accc2 100644
--- a/src/native/windows/wasapi/org_jitsi_impl_neomedia_jmfext_media_protocol_wasapi_WASAPI.c
+++ b/src/native/windows/wasapi/org_jitsi_impl_neomedia_jmfext_media_protocol_wasapi_WASAPI.c
@@ -19,11 +19,12 @@
 #include <mmreg.h> /* WAVEFORMATEX */
 #include <objbase.h>
 #include <stdint.h> /* intptr_t */
-#include <stdio.h> /* fflush */
 #include <string.h>
-#include <tchar.h> /* _ftprintf */
 #include <windows.h> /* LoadLibrary, GetProcAddress */
 
+#include "HResultException.h"
+#include "Typecasting.h"
+
 #ifndef __uuidof
 #define __uuidof(i) &i
 #endif /* #ifndef __uuidof */
@@ -54,8 +55,6 @@ ULONG STDMETHODCALLTYPE MMNotificationClient_Release
 static UINT32 WASAPI_audiocopy
     (void *src, jint srcSampleSize, jint srcChannels, void *dst,
         jint dstSampleSize, jint dstChannels, UINT32 numFramesRequested);
-static void WASAPI_throwNewHResultException
-    (JNIEnv *, HRESULT, const char *func, unsigned int line);
 
 static jclass MMNotificationClient_class = 0;
 static jmethodID MMNotificationClient_onDefaultDeviceChangedMethodID = 0;
@@ -63,8 +62,6 @@ static jmethodID MMNotificationClient_onDeviceAddedMethodID = 0;
 static jmethodID MMNotificationClient_onDeviceRemovedMethodID = 0;
 static jmethodID MMNotificationClient_onDeviceStateChangedMethodID = 0;
 static jmethodID MMNotificationClient_onPropertyValueChangedMethodID = 0;
-static jclass WASAPI_hResultExceptionClass = 0;
-static jmethodID WASAPI_hResultExceptionMethodID = 0;
 /**
  * The single IMMNotificationClient instance/implementation which is to be
  * registered with every IMMDeviceEnumerator instance.
@@ -184,25 +181,7 @@ Java_org_jitsi_impl_neomedia_jmfext_media_protocol_wasapi_WASAPI_CoCreateInstanc
     {
         IID iid_;
 
-        if (iid)
-        {
-            const jchar *szIid = (*env)->GetStringChars(env, iid, NULL);
-
-            if (szIid)
-            {
-                hr = IIDFromString((LPOLESTR) szIid, &iid_);
-                (*env)->ReleaseStringChars(env, iid, szIid);
-                if (FAILED(hr))
-                {
-                    WASAPI_throwNewHResultException(
-                            env,
-                            hr,
-                            __func__, __LINE__);
-                }
-            }
-            else
-                hr = E_OUTOFMEMORY;
-        }
+        hr = WASAPI_iidFromString(env, iid, &iid_);
         if (SUCCEEDED(hr))
         {
             hr
@@ -499,22 +478,7 @@ Java_org_jitsi_impl_neomedia_jmfext_media_protocol_wasapi_WASAPI_IAudioClient_1G
     IID iid_;
     void *pv;
 
-    if (iid)
-    {
-        const jchar *szIid = (*env)->GetStringChars(env, iid, NULL);
-
-        if (szIid)
-        {
-            hr = IIDFromString((LPOLESTR) szIid, &iid_);
-            (*env)->ReleaseStringChars(env, iid, szIid);
-            if (FAILED(hr))
-                WASAPI_throwNewHResultException(env, hr, __func__, __LINE__);
-        }
-        else
-            hr = E_OUTOFMEMORY;
-    }
-    else
-        hr = S_OK;
+    hr = WASAPI_iidFromString(env, iid, &iid_);
     if (SUCCEEDED(hr))
     {
         hr
@@ -542,29 +506,7 @@ Java_org_jitsi_impl_neomedia_jmfext_media_protocol_wasapi_WASAPI_IAudioClient_1I
     HRESULT hr;
     IID audioSessionGuid_;
 
-    if (audioSessionGuid)
-    {
-        const jchar *szAudioSessionGuid
-            = (*env)->GetStringChars(env, audioSessionGuid, NULL);
-
-        if (szAudioSessionGuid)
-        {
-            hr
-                = IIDFromString(
-                        (LPOLESTR) szAudioSessionGuid,
-                        &audioSessionGuid_);
-            (*env)->ReleaseStringChars(
-                    env,
-                    audioSessionGuid,
-                    szAudioSessionGuid);
-            if (FAILED(hr))
-                WASAPI_throwNewHResultException(env, hr, __func__, __LINE__);
-        }
-        else
-            hr = E_OUTOFMEMORY;
-    }
-    else
-        hr = S_OK;
+    hr = WASAPI_iidFromString(env, audioSessionGuid, &audioSessionGuid_);
     if (SUCCEEDED(hr))
     {
         hr
@@ -729,22 +671,7 @@ Java_org_jitsi_impl_neomedia_jmfext_media_protocol_wasapi_WASAPI_IMMDevice_1Acti
     IID iid_;
     void *pInterface;
 
-    if (iid)
-    {
-        const jchar *szIid = (*env)->GetStringChars(env, iid, NULL);
-
-        if (szIid)
-        {
-            hr = IIDFromString((LPOLESTR) szIid, &iid_);
-            (*env)->ReleaseStringChars(env, iid, szIid);
-            if (FAILED(hr))
-                WASAPI_throwNewHResultException(env, hr, __func__, __LINE__);
-        }
-        else
-            hr = E_OUTOFMEMORY;
-    }
-    else
-        hr = S_OK;
+    hr = WASAPI_iidFromString(env, iid, &iid_);
     if (SUCCEEDED(hr))
     {
         hr
@@ -834,22 +761,7 @@ Java_org_jitsi_impl_neomedia_jmfext_media_protocol_wasapi_WASAPI_IMMDevice_1Quer
     IID iid_;
     void *pvObject;
 
-    if (iid)
-    {
-        const jchar *szIid = (*env)->GetStringChars(env, iid, NULL);
-
-        if (szIid)
-        {
-            hr = IIDFromString((LPOLESTR) szIid, &iid_);
-            (*env)->ReleaseStringChars(env, iid, szIid);
-            if (FAILED(hr))
-                WASAPI_throwNewHResultException(env, hr, __func__, __LINE__);
-        }
-        else
-            hr = E_OUTOFMEMORY;
-    }
-    else
-        hr = S_OK;
+    hr = WASAPI_iidFromString(env, iid, &iid_);
     if (SUCCEEDED(hr))
     {
         hr
@@ -1254,6 +1166,13 @@ Java_org_jitsi_impl_neomedia_jmfext_media_protocol_wasapi_WASAPI_WAVEFORMATEX_1s
     ((WAVEFORMATEX *) (intptr_t) thiz)->wFormatTag = (WORD) wFormatTag;
 }
 
+JNIEXPORT jint JNICALL
+Java_org_jitsi_impl_neomedia_jmfext_media_protocol_wasapi_WASAPI_WAVEFORMATEX_1sizeof
+    (JNIEnv *env, jclass clazz)
+{
+    return sizeof(WAVEFORMATEX);
+}
+
 JNIEXPORT jint JNICALL
 JNI_OnLoad(JavaVM *vm, void *reserved)
 {
@@ -1668,66 +1587,3 @@ WASAPI_audiocopy
     }
     return numFramesWritten;
 }
-
-static void
-WASAPI_throwNewHResultException
-    (JNIEnv *env, HRESULT hresult, const char *func, unsigned int line)
-{
-    /*
-     * Print the system message (if any) which represents a human-readable
-     * format of the specified HRESULT value on the standard error to facilitate
-     * debugging.
-     */
-    {
-        LPTSTR message = NULL;
-        DWORD length
-            = FormatMessage(
-                    FORMAT_MESSAGE_ALLOCATE_BUFFER
-                        | FORMAT_MESSAGE_FROM_SYSTEM
-                        | FORMAT_MESSAGE_IGNORE_INSERTS,
-                    /* lpSource */ NULL,
-                    hresult,
-                    /* dwLanguageId */ 0,
-                    (LPTSTR) &message,
-                    /* nSize */ 0,
-                    /* Arguments */ NULL);
-        BOOL printed = FALSE;
-
-        if (message)
-        {
-            if (length)
-            {
-                _ftprintf(stderr, TEXT("%s:%u: %s\r\n"), func, line, message);
-                printed = TRUE;
-            }
-            LocalFree(message);
-        }
-        if (!printed)
-        {
-            _ftprintf(
-                    stderr,
-                    TEXT("%s:%u: HRESULT 0x%x\r\n"),
-                    func, line,
-                    (unsigned int) hresult);
-        }
-        fflush(stderr);
-    }
-
-    {
-        jclass clazz = WASAPI_hResultExceptionClass;
-
-        if (clazz)
-        {
-            jmethodID methodID = WASAPI_hResultExceptionMethodID;
-
-            if (methodID)
-            {
-                jobject t
-                    = (*env)->NewObject(env, clazz, methodID, (jint) hresult);
-
-                if (t)
-                    (*env)->Throw(env, (jthrowable) t);
-            }
-        }
-    }
-}
diff --git a/src/native/windows/wasapi/org_jitsi_impl_neomedia_jmfext_media_protocol_wasapi_WASAPI.h b/src/native/windows/wasapi/org_jitsi_impl_neomedia_jmfext_media_protocol_wasapi_WASAPI.h
index 27201c25c96e8846e46fb5c26f1c912eccbf0197..9f189d55603d188e7adf62a59bef863b8eb7b446 100644
--- a/src/native/windows/wasapi/org_jitsi_impl_neomedia_jmfext_media_protocol_wasapi_WASAPI.h
+++ b/src/native/windows/wasapi/org_jitsi_impl_neomedia_jmfext_media_protocol_wasapi_WASAPI.h
@@ -471,6 +471,14 @@ JNIEXPORT void JNICALL Java_org_jitsi_impl_neomedia_jmfext_media_protocol_wasapi
 JNIEXPORT void JNICALL Java_org_jitsi_impl_neomedia_jmfext_media_protocol_wasapi_WASAPI_WAVEFORMATEX_1setWFormatTag
   (JNIEnv *, jclass, jlong, jchar);
 
+/*
+ * Class:     org_jitsi_impl_neomedia_jmfext_media_protocol_wasapi_WASAPI
+ * Method:    WAVEFORMATEX_sizeof
+ * Signature: ()I
+ */
+JNIEXPORT jint JNICALL Java_org_jitsi_impl_neomedia_jmfext_media_protocol_wasapi_WASAPI_WAVEFORMATEX_1sizeof
+  (JNIEnv *, jclass);
+
 #ifdef __cplusplus
 }
 #endif
diff --git a/src/org/jitsi/impl/neomedia/device/WASAPISystem.java b/src/org/jitsi/impl/neomedia/device/WASAPISystem.java
index 6e3ae9a04c7079d5453932c6e2df9f3a10ec5223..a3c35604f4eae6783a4db4b2c15565f70d2b4fc5 100644
--- a/src/org/jitsi/impl/neomedia/device/WASAPISystem.java
+++ b/src/org/jitsi/impl/neomedia/device/WASAPISystem.java
@@ -6,6 +6,7 @@
  */
 package org.jitsi.impl.neomedia.device;
 
+import static org.jitsi.impl.neomedia.jmfext.media.protocol.wasapi.VoiceCaptureDSP.*;
 import static org.jitsi.impl.neomedia.jmfext.media.protocol.wasapi.WASAPI.*;
 
 import java.util.*;
@@ -214,6 +215,20 @@ public static void WAVEFORMATEX_fill(
                 /* cbSize */ (char) 0);
     }
 
+    /**
+     * The pointer to the native <tt>IMediaObject</tt> interface instance of the
+     * voice capture DMO that supports/implements the acoustic echo cancellation
+     * (AEC) feature.
+     */
+    private long aecIMediaObject;
+
+    /**
+     * The <tt>List</tt> of <tt>AudioFormat</tt>s supported by the voice capture
+     * DMO that supports/implements the acoustic echo cancellation (AEC) feature
+     * i.e. {@link #aecIMediaObject}.
+     */
+    private List<AudioFormat> aecSupportedFormats;
+
     /**
      * The pointer to the native <tt>IMMDeviceEnumerator</tt> interface instance
      * which this <tt>WASAPISystem</tt> uses to enumerate the audio endpoint
@@ -304,35 +319,52 @@ protected void doInitialize()
 
             captureDevices = new ArrayList<CaptureDeviceInfo2>(count);
             playbackDevices = new ArrayList<CaptureDeviceInfo2>(count);
-            for (int i = 0; i < count; i++)
-            {
-                long iMMDevice
-                    = IMMDeviceCollection_Item(iMMDeviceCollection, i);
 
-                if (iMMDevice == 0)
-                    throw new RuntimeException("IMMDeviceCollection_Item");
+            if (count > 0)
+            {
+                // The acoustic echo cancellation (AEC) feature is optional.
+                maybeInitializeAEC();
                 try
                 {
-                    doInitializeIMMDevice(
-                            iMMDevice,
-                            captureDevices, playbackDevices);
-                }
-                catch (Throwable t)
-                {
-                    if (t instanceof ThreadDeath)
-                        throw (ThreadDeath) t;
-                    /*
-                     * We do not want the initialization of one IMMDevice to
-                     * prevent the initialization of other IMMDevices.
-                     */
-                    logger.error(
-                            "Failed to doInitialize for IMMDevice at index "
-                                + i,
-                            t);
+                    for (int i = 0; i < count; i++)
+                    {
+                        long iMMDevice
+                            = IMMDeviceCollection_Item(iMMDeviceCollection, i);
+
+                        if (iMMDevice == 0)
+                        {
+                            throw new RuntimeException(
+                                    "IMMDeviceCollection_Item");
+                        }
+                        try
+                        {
+                            doInitializeIMMDevice(
+                                    iMMDevice,
+                                    captureDevices, playbackDevices);
+                        }
+                        catch (Throwable t)
+                        {
+                            if (t instanceof ThreadDeath)
+                                throw (ThreadDeath) t;
+                            /*
+                             * We do not want the initialization of one
+                             * IMMDevice to prevent the initialization of other
+                             * IMMDevices.
+                             */
+                            logger.error(
+                                    "Failed to doInitialize for IMMDevice"
+                                        + " at index " + i,
+                                    t);
+                        }
+                        finally
+                        {
+                            IMMDevice_Release(iMMDevice);
+                        }
+                    }
                 }
                 finally
                 {
-                    IMMDevice_Release(iMMDevice);
+                    maybeUninitializeAEC();
                 }
             }
         }
@@ -411,28 +443,50 @@ private void doInitializeIMMDevice(
                 name = id;
 
             int dataFlow = getIMMDeviceDataFlow(iMMDevice);
-            CaptureDeviceInfo2 cdi2
-                = new CaptureDeviceInfo2(
-                        name,
-                        new MediaLocator(LOCATOR_PROTOCOL + ":" + id),
-                        formats.toArray(new Format[formats.size()]),
-                        id,
-                        /* transportType */ null,
-                        /* modelIdentifier */ null);
+            List<CaptureDeviceInfo2> devices;
 
             switch (dataFlow)
             {
             case eCapture:
-                captureDevices.add(cdi2);
+                /*
+                 * If acoustic echo cancellation (AEC) is used later on, the
+                 * CaptureDevice/DataSource implementation will support its
+                 * formats.
+                 */
+                List<AudioFormat> aecSupportedFormats
+                    = getAECSupportedFormats();
+
+                if (!aecSupportedFormats.isEmpty())
+                {
+                    for (AudioFormat format : aecSupportedFormats)
+                        if (!formats.contains(format))
+                            formats.add(format);
+                }
+
+                devices = captureDevices;
                 break;
             case eRender:
-                playbackDevices.add(cdi2);
+                devices = playbackDevices;
                 break;
             default:
+                devices = null;
                 logger.error(
                         "Failed to retrieve dataFlow from IMMEndpoint " + id);
                 break;
             }
+            if (devices != null)
+            {
+                CaptureDeviceInfo2 cdi2
+                    = new CaptureDeviceInfo2(
+                            name,
+                            new MediaLocator(LOCATOR_PROTOCOL + ":" + id),
+                            formats.toArray(new Format[formats.size()]),
+                            id,
+                            /* transportType */ null,
+                            /* modelIdentifier */ null);
+
+                devices.add(cdi2);
+            }
         }
     }
 
@@ -457,6 +511,35 @@ protected void finalize()
         }
     }
 
+    /**
+     * Gets the <tt>List</tt> of <tt>AudioFormat</tt>s supported by the voice
+     * capture DMO that supports/implements the acoustic echo cancellation (AEC)
+     * feature.
+     * <p>
+     * If an <tt>AudioFormat</tt> instance contained in the returned
+     * <tt>List</tt> is one of the <tt>formats</tt> of a
+     * <tt>CaptureDeviceInfo2</tt> or the <tt>supportedFormats</tt> of a
+     * <tt>FormatControl</tt> associated with a WASAPI
+     * <tt>CaptureDevice</tt>/<tt>DataSource</tt> or <tt>SourceStream</tt>, it
+     * signals that the <tt>AudioFormat</tt> in question has been included in
+     * that <tt>formats</tt> or <tt>supportedFormat</tt>s only because it is
+     * supported by the voice capture DMO supporting/implementing the acoustic
+     * echo cancellation (AEC) feature. 
+     * </p>
+     *
+     * @return the <tt>List</tt> of <tt>AudioFormat</tt>s supported by the voice
+     * capture DMO that supports/implements the acoustic echo cancellation (AEC)
+     * feature
+     */
+    public List<AudioFormat> getAECSupportedFormats()
+    {
+        List<AudioFormat> aecSupportedFormats = this.aecSupportedFormats;
+
+        if (aecSupportedFormats == null)
+            aecSupportedFormats = Collections.emptyList();
+        return aecSupportedFormats;
+    }
+
     /**
      * Gets a <tt>List</tt> of the <tt>AudioFormat</tt>s supported by a specific
      * <tt>IAudioClient</tt>.
@@ -548,7 +631,8 @@ private List<AudioFormat> getIAudioClientSupportedFormats(long iAudioClient)
                                         /* channels */ 1,
                                         AudioFormat.LITTLE_ENDIAN,
                                         AudioFormat.SIGNED,
-                                        /* frameSizeInBits */ Format.NOT_SPECIFIED,
+                                        /* frameSizeInBits */
+                                            Format.NOT_SPECIFIED,
                                         /* frameRate */ Format.NOT_SPECIFIED,
                                         Format.byteArray);
                             if (!supportedFormats.contains(supportedFormat))
@@ -580,6 +664,128 @@ private List<AudioFormat> getIAudioClientSupportedFormats(long iAudioClient)
         return supportedFormats;
     }
 
+    /**
+     * Gets a <tt>List</tt> of the <tt>AudioFormat</tt>s supported by a specific
+     * <tt>IMediaObject</tt>.
+     *
+     * @param iMediaObject the <tt>IMediaObject</tt> to get the <tt>List</tt> of
+     * supported <tt>AudioFormat</tt>s of
+     * @return a <tt>List</tt> of the <tt>AudioFormat</tt>s supported by the
+     * specified <tt>iMediaObject</tt>
+     * @throws HResultException if an error occurs while retrieving the
+     * <tt>List</tt> of <tt>AudioFormat</tt>s supported by the specified
+     * <tt>iMediaObject</tt> in a native WASAPI function which returns an
+     * <tt>HRESULT</tt> value
+     */
+    private List<AudioFormat> getIMediaObjectSupportedFormats(long iMediaObject)
+        throws HResultException
+    {
+        List<AudioFormat> supportedFormats = new ArrayList<AudioFormat>();
+        long pmt = MoCreateMediaType(/* cbFormat */ 0);
+
+        if (pmt == 0)
+            throw new OutOfMemoryError("MoCreateMediaType");
+        try
+        {
+            char cbSize = 0;
+            int cbFormat = WAVEFORMATEX_sizeof() + cbSize;
+            int hresult
+                = DMO_MEDIA_TYPE_fill(
+                        pmt,
+                        /* majortype */ MEDIATYPE_Audio,
+                        /* subtype */ MEDIASUBTYPE_PCM,
+                        /* bFixedSizeSamples */ true,
+                        /* bTemporalCompression */ false,
+                        /* lSampleSize */ 0,
+                        /* formattype */ FORMAT_WaveFormatEx,
+                        /* pUnk */ 0,
+                        cbFormat,
+                        waveformatex);
+
+            if (FAILED(hresult))
+                throw new HResultException(hresult, "DMO_MEDIA_TYPE_fill");
+
+            for (char nChannels = 1; nChannels <= 2; nChannels++)
+            {
+                for (int i = 0; i < Constants.AUDIO_SAMPLE_RATES.length; i++)
+                {
+                    int nSamplesPerSec = (int) Constants.AUDIO_SAMPLE_RATES[i];
+
+                    for (char wBitsPerSample = 16;
+                            wBitsPerSample > 0;
+                            wBitsPerSample -= 8)
+                    {
+                        char nBlockAlign
+                            = (char) ((nChannels * wBitsPerSample) / 8);
+
+                        WASAPI.WAVEFORMATEX_fill(
+                                waveformatex,
+                                WAVE_FORMAT_PCM,
+                                nChannels,
+                                nSamplesPerSec,
+                                nSamplesPerSec * nBlockAlign,
+                                nBlockAlign,
+                                wBitsPerSample,
+                                cbSize);
+                        DMO_MEDIA_TYPE_setLSampleSize(pmt, wBitsPerSample / 8);
+
+                        try
+                        {
+                            hresult
+                                = IMediaObject_SetOutputType(
+                                        iMediaObject,
+                                        /* dwOutputStreamIndex */ 0,
+                                        pmt,
+                                        /* dwFlags */ DMO_SET_TYPEF_TEST_ONLY);
+                        }
+                        catch (HResultException hre)
+                        {
+                            /*
+                             * If the specified media type is not acceptable,
+                             * IMediaObject::SetOutputType should return
+                             * S_FALSE. Anyway, continue testing the other media
+                             * types.
+                             */
+                            hresult = hre.getHResult();
+                        }
+                        if (S_OK == hresult)
+                        {
+                            AudioFormat supportedFormat
+                                = new AudioFormat(
+                                        AudioFormat.LINEAR,
+                                        nSamplesPerSec,
+                                        wBitsPerSample,
+                                        nChannels,
+                                        AudioFormat.LITTLE_ENDIAN,
+                                        AudioFormat.SIGNED,
+                                        /* frameSizeInBits */
+                                            Format.NOT_SPECIFIED,
+                                        /* frameRate */ Format.NOT_SPECIFIED,
+                                        Format.byteArray);
+
+                            if (!supportedFormats.contains(supportedFormat))
+                                supportedFormats.add(supportedFormat);
+                        }
+                    }
+                }
+            }
+        }
+        finally
+        {
+            /*
+             * XXX MoDeleteMediaType is documented to internally call
+             * MoFreeMediaType to free the format block but the format block has
+             * not been internally allocated by MoInitMediaType.
+             */
+            DMO_MEDIA_TYPE_setCbFormat(pmt, 0);
+            DMO_MEDIA_TYPE_setFormattype(pmt, FORMAT_None);
+            DMO_MEDIA_TYPE_setPbFormat(pmt, 0);
+            MoDeleteMediaType(pmt);
+        }
+
+        return supportedFormats;
+    }
+
     /**
      * Gets an audio endpoint device that is identified by a specific endpoint
      * ID string.
@@ -687,6 +893,64 @@ protected String getRendererClassName()
         return WASAPIRenderer.class.getName();
     }
 
+    public long initializeAEC()
+        throws Exception
+    {
+        long iMediaObject = 0;
+        long iPropertyStore = 0;
+        long aecIMediaObject = 0;
+
+        try
+        {
+            iMediaObject
+                = CoCreateInstance(
+                        CLSID_CWMAudioAEC,
+                        /* pUnkOuter */ 0,
+                        CLSCTX_ALL,
+                        IID_IMediaObject);
+            if (iMediaObject == 0)
+                throw new RuntimeException("CoCreateInstance");
+            else
+            {
+                iPropertyStore
+                    = IMediaObject_QueryInterface(
+                            iMediaObject,
+                            IID_IPropertyStore);
+                if (iPropertyStore == 0)
+                    throw new RuntimeException("IMediaObject_QueryInterface");
+                else
+                {
+                    int hresult
+                        = IPropertyStore_SetValue(
+                                iPropertyStore,
+                                MFPKEY_WMAAECMA_SYSTEM_MODE,
+                                SINGLE_CHANNEL_AEC);
+
+                    if (FAILED(hresult))
+                    {
+                        throw new HResultException(
+                                hresult,
+                                "IPropertyStore_SetValue"
+                                    + " MFPKEY_WMAAECMA_SYSTEM_MODE");
+                    }
+                    else
+                    {
+                        aecIMediaObject = iMediaObject;
+                        iMediaObject = 0;
+                    }
+                }
+            }
+        }
+        finally
+        {
+            if (iPropertyStore != 0)
+                IPropertyStore_Release(iPropertyStore);
+            if (iMediaObject != 0)
+                IMediaObject_Release(iMediaObject);
+        }
+        return aecIMediaObject;
+    }
+
     /**
      * Initializes a new <tt>IAudioClient</tt> instance for an audio endpoint
      * device identified by a specific <tt>MediaLocator</tt>. The initialization
@@ -697,6 +961,7 @@ protected String getRendererClassName()
      * endpoint device to initialize a new <tt>IAudioClient</tt> instance for
      * @param dataFlow the flow of media data to be supported by the audio
      * endpoint device identified by the specified <tt>locator</tt>
+     * @param streamFlags
      * @param eventHandle
      * @param hnsBufferDuration
      * @param formats an array of alternative <tt>AudioFormat</tt>s with which
@@ -717,6 +982,7 @@ protected String getRendererClassName()
     public long initializeIAudioClient(
             MediaLocator locator,
             DataFlow dataFlow,
+            int streamFlags,
             long eventHandle,
             long hnsBufferDuration,
             AudioFormat[] formats)
@@ -829,10 +1095,9 @@ public long initializeIAudioClient(
                     if (!waveformatexIsInitialized)
                         throw new IllegalArgumentException("formats");
 
-                    int streamFlags = AUDCLNT_STREAMFLAGS_NOPERSIST;
-
+                    streamFlags |= AUDCLNT_STREAMFLAGS_NOPERSIST;
                     if (eventHandle != 0)
-                        eventHandle |= AUDCLNT_STREAMFLAGS_EVENTCALLBACK;
+                        streamFlags |= AUDCLNT_STREAMFLAGS_EVENTCALLBACK;
 
                     int hresult
                         = IAudioClient_Initialize(
@@ -882,6 +1147,81 @@ public long initializeIAudioClient(
         return ret;
     }
 
+    /**
+     * Initializes the acoustic echo cancellation (AEC) feature if possible and
+     * if it has not been initialized yet. The method swallows any exceptions
+     * because the feature in question is optional.
+     */
+    private void maybeInitializeAEC()
+    {
+        if ((aecIMediaObject != 0) || (aecSupportedFormats != null))
+            return;
+
+        try
+        {
+            long iMediaObject = initializeAEC();
+
+            try
+            {
+                List<AudioFormat> supportedFormats
+                    = getIMediaObjectSupportedFormats(iMediaObject);
+
+                if (!supportedFormats.isEmpty())
+                {
+                    aecIMediaObject = iMediaObject;
+                    iMediaObject = 0;
+                    aecSupportedFormats
+                        = Collections.unmodifiableList(
+                                supportedFormats);
+                }
+            }
+            finally
+            {
+                if (iMediaObject != 0)
+                    IMediaObject_Release(iMediaObject);
+            }
+        }
+        catch (Throwable t)
+        {
+            if (t instanceof ThreadDeath)
+                throw (ThreadDeath) t;
+            else
+            {
+                logger.error(
+                        "Failed to initialize acoustic echo cancellation (AEC)",
+                        t);
+            }
+        }
+    }
+
+    /**
+     * Uninitializes the acoustic echo cancellation (AEC) feature if it has been
+     * initialized. The method swallows any exceptions because the feature in
+     * question is optional.
+     */
+    private void maybeUninitializeAEC()
+    {
+        try
+        {
+            if (aecIMediaObject != 0)
+            {
+                IMediaObject_Release(aecIMediaObject);
+                aecIMediaObject = 0;
+            }
+        }
+        catch (Throwable t)
+        {
+            if (t instanceof ThreadDeath)
+                throw (ThreadDeath) t;
+            else
+            {
+                logger.error(
+                        "Failed to uninitialize acoustic echo cancellation (AEC)",
+                        t);
+            }
+        }
+    }
+
     /**
      * {@inheritDoc}
      */
diff --git a/src/org/jitsi/impl/neomedia/jmfext/media/protocol/wasapi/AudioCaptureClient.java b/src/org/jitsi/impl/neomedia/jmfext/media/protocol/wasapi/AudioCaptureClient.java
new file mode 100644
index 0000000000000000000000000000000000000000..07f7b48e2938ba3526dabc25b1887c35cb4759af
--- /dev/null
+++ b/src/org/jitsi/impl/neomedia/jmfext/media/protocol/wasapi/AudioCaptureClient.java
@@ -0,0 +1,723 @@
+/*
+ * Jitsi, the OpenSource Java VoIP and Instant Messaging client.
+ *
+ * Distributable under LGPL license.
+ * See terms of license at gnu.org.
+ */
+package org.jitsi.impl.neomedia.jmfext.media.protocol.wasapi;
+
+import static org.jitsi.impl.neomedia.jmfext.media.protocol.wasapi.WASAPI.*;
+
+import java.io.*;
+import java.util.concurrent.*;
+
+import javax.media.*;
+import javax.media.format.*;
+import javax.media.protocol.*;
+
+import org.jitsi.impl.neomedia.device.*;
+import org.jitsi.impl.neomedia.jmfext.media.renderer.audio.*;
+import org.jitsi.util.*;
+
+public class AudioCaptureClient
+{
+    /**
+     * The default duration of the audio data in milliseconds to be read from
+     * <tt>WASAPIStream</tt> in an invocation of {@link #read(Buffer)}.
+     */
+    static final long DEFAULT_BUFFER_DURATION = 20;
+
+    /**
+     * The <tt>Logger</tt> used by the <tt>AudioCaptureClient</tt> class and its
+     * instances to log debug information.
+     */
+    private static final Logger logger
+        = Logger.getLogger(AudioCaptureClient.class);
+
+    /**
+     * The number of frames to be filled in a <tt>Buffer</tt> in an invocation
+     * of {@link #read(Buffer)}. If this instance implements the
+     * <tt>PushBufferStream</tt> interface,
+     * {@link #runInEventHandleCmd(Runnable)} will push via
+     * {@link BufferTransferHandler#transferData(PushBufferStream)} when
+     * {@link #iAudioClient} has made at least that many frames available.
+     */
+    private int bufferFrames;
+
+    /**
+     * The size/length in bytes of the <tt>Buffer</tt> to be filled in an
+     * invocation of {@link #read(Buffer)}.
+     */
+    final int bufferSize;
+
+    /**
+     * The indicator which determines whether the audio stream represented by
+     * this instance, {@link #iAudioClient} and {@link #iAudioCaptureClient} is
+     * busy and, consequently, its state should not be modified. For example,
+     * the audio stream is busy during the execution of {@link #read(Buffer)}.
+     */
+    private boolean busy;
+
+    /**
+     * The length in milliseconds of the interval between successive, periodic
+     * processing passes by the audio engine on the data in the endpoint buffer.
+     */
+    final long devicePeriod;
+
+    /**
+     * The number of channels which which this <tt>SourceStream</tt> has been
+     * connected.
+     */
+    private int dstChannels;
+
+    /**
+     * The frame size in bytes with which this <tt>SourceStream</tt> has been
+     * connected. It is the product of {@link #dstSampleSize} and
+     * {@link #dstChannels}.
+     */
+    private int dstFrameSize;
+
+    /**
+     * The sample size in bytes with which this <tt>SourceStream</tt> has been
+     * connected.
+     */
+    private int dstSampleSize;
+
+    /**
+     * The event handle that the system signals when an audio buffer is ready to
+     * be processed by the client.
+     */
+    private long eventHandle;
+
+    /**
+     * The <tt>Runnable</tt> which is scheduled by this <tt>WASAPIStream</tt>
+     * and executed by {@link #eventHandleExecutor} and waits for
+     * {@link #eventHandle} to be signaled.
+     */
+    private Runnable eventHandleCmd;
+
+    /**
+     * The <tt>Executor</tt> implementation which is to execute
+     * {@link #eventHandleCmd}.
+     */
+    private Executor eventHandleExecutor;
+
+    /**
+     * The WASAPI <tt>IAudioCaptureClient</tt> obtained from
+     * {@link #iAudioClient} which enables this <tt>SourceStream</tt> to read
+     * input data from the capture endpoint buffer.
+     */
+    private long iAudioCaptureClient;
+
+    /**
+     * The WASAPI <tt>IAudioClient</tt> instance which enables this
+     * <tt>SourceStream</tt> to create and initialize an audio stream between
+     * this <tt>SourceStream</tt> and the audio engine of the associated audio
+     * endpoint device.
+     */
+    private long iAudioClient;
+
+    private byte[] remainder;
+
+    private int remainderLength;
+
+    /**
+     * The number of channels with which {@link #iAudioClient} has been
+     * initialized.
+     */
+    private int srcChannels;
+
+    /**
+     * The sample size in bytes with which {@link #iAudioClient} has been
+     * initialized.
+     */
+    private int srcSampleSize;
+
+    /**
+     * The indicator which determines whether this <tt>SourceStream</tt> is
+     * started i.e. there has been a successful invocation of {@link #start()}
+     * without an intervening invocation of {@link #stop()}.
+     */
+    private boolean started;
+
+    private final BufferTransferHandler transferHandler;
+
+    public AudioCaptureClient(
+            WASAPISystem audioSystem,
+            MediaLocator locator,
+            AudioSystem.DataFlow dataFlow,
+            int streamFlags,
+            AudioFormat outFormat,
+            BufferTransferHandler transferHandler)
+        throws Exception
+    {
+        AudioFormat[] formats
+            = WASAPISystem.getFormatsToInitializeIAudioClient(outFormat);
+        long eventHandle = CreateEvent(0, false, false, null);
+
+        if (eventHandle == 0)
+            throw new IOException("CreateEvent");
+        try
+        {
+            /*
+             * Presently, we attempt to have the same buffer length in
+             * WASAPIRenderer and WASAPIStream. There is no particular
+             * reason/requirement to do so.
+             */
+            long hnsBufferDuration = 3 * DEFAULT_BUFFER_DURATION * 10000;
+            long iAudioClient
+                = audioSystem.initializeIAudioClient(
+                        locator,
+                        dataFlow,
+                        streamFlags,
+                        eventHandle,
+                        hnsBufferDuration,
+                        formats);
+
+            if (iAudioClient == 0)
+            {
+                throw new ResourceUnavailableException(
+                        "Failed to initialize IAudioClient"
+                            + " for MediaLocator " + locator
+                            + " and AudioSystem.DataFlow " + dataFlow);
+            }
+            try
+            {
+                /*
+                 * Determine the AudioFormat with which the iAudioClient has
+                 * been initialized.
+                 */
+                AudioFormat inFormat = null;
+
+                for (AudioFormat aFormat : formats)
+                {
+                    if (aFormat != null)
+                    {
+                        inFormat = aFormat;
+                        break;
+                    }
+                }
+
+                long iAudioCaptureClient
+                    = IAudioClient_GetService(
+                            iAudioClient,
+                            IID_IAudioCaptureClient);
+
+                if (iAudioCaptureClient == 0)
+                {
+                    throw new ResourceUnavailableException(
+                            "IAudioClient_GetService"
+                                + "(IID_IAudioCaptureClient)");
+                }
+                try
+                {
+                    /*
+                     * The value hnsDefaultDevicePeriod is documented to
+                     * specify the default scheduling period for a
+                     * shared-mode stream.
+                     */
+                    long devicePeriod
+                        = IAudioClient_GetDefaultDevicePeriod(iAudioClient)
+                            / 10000L;
+
+                    int numBufferFrames
+                        = IAudioClient_GetBufferSize(iAudioClient);
+                    int sampleRate = (int) inFormat.getSampleRate();
+                    long bufferDuration
+                        = numBufferFrames * 1000 / sampleRate;
+
+                    /*
+                     * We will very likely be inefficient if we fail to
+                     * synchronize with the scheduling period of the audio
+                     * engine but we have to make do with what we have.
+                     */
+                    if (devicePeriod <= 1)
+                    {
+                        devicePeriod = bufferDuration / 2;
+                        if ((devicePeriod
+                                    > WASAPISystem.DEFAULT_DEVICE_PERIOD)
+                                || (devicePeriod <= 1))
+                            devicePeriod
+                                = WASAPISystem.DEFAULT_DEVICE_PERIOD;
+                    }
+                    this.devicePeriod = devicePeriod;
+
+                    srcChannels = inFormat.getChannels();
+                    srcSampleSize
+                        = WASAPISystem.getSampleSizeInBytes(inFormat);
+
+                    dstChannels = outFormat.getChannels();
+                    dstSampleSize
+                        = WASAPISystem.getSampleSizeInBytes(outFormat);
+
+                    dstFrameSize = dstSampleSize * dstChannels;
+                    bufferFrames
+                        = (int)
+                            (DEFAULT_BUFFER_DURATION * sampleRate / 1000);
+                    bufferSize = dstFrameSize * bufferFrames;
+
+                    remainder = new byte[numBufferFrames * dstFrameSize];
+                    remainderLength = 0;
+
+                    this.eventHandle = eventHandle;
+                    eventHandle = 0;
+                    this.iAudioClient = iAudioClient;
+                    iAudioClient = 0;
+                    this.iAudioCaptureClient = iAudioCaptureClient;
+                    iAudioCaptureClient = 0;
+
+                    this.transferHandler = transferHandler;
+                }
+                finally
+                {
+                    if (iAudioCaptureClient != 0)
+                        IAudioCaptureClient_Release(iAudioCaptureClient);
+                }
+            }
+            finally
+            {
+                if (iAudioClient != 0)
+                    IAudioClient_Release(iAudioClient);
+            }
+        }
+        finally
+        {
+            if (eventHandle != 0)
+                CloseHandle(eventHandle);
+        }
+    }
+
+    public void close()
+    {
+        if (iAudioCaptureClient != 0)
+        {
+            IAudioCaptureClient_Release(iAudioCaptureClient);
+            iAudioCaptureClient = 0;
+        }
+        if (iAudioClient != 0)
+        {
+            IAudioClient_Release(iAudioClient);
+            iAudioClient = 0;
+        }
+        if (eventHandle != 0)
+        {
+            try
+            {
+                CloseHandle(eventHandle);
+            }
+            catch (HResultException hre)
+            {
+                // The event HANDLE will be leaked.
+                logger.warn("Failed to close event HANDLE.", hre);
+            }
+            eventHandle = 0;
+        }
+
+        remainder = null;
+        remainderLength = 0;
+        started = false;
+    }
+
+    private int doRead(byte[] buffer, int offset, int length)
+        throws IOException
+    {
+        int toRead = Math.min(length, remainderLength);
+        int read;
+
+        if (toRead == 0)
+            read = 0;
+        else
+        {
+            System.arraycopy(remainder, 0, buffer, offset, toRead);
+            popFromRemainder(toRead);
+            read = toRead;
+        }
+        return read;
+    }
+
+    /**
+     * Pops a specific number of bytes from {@link #remainder}. For example,
+     * because such a number of bytes have been read from <tt>remainder</tt> and
+     * written into a <tt>Buffer</tt>.
+     *
+     * @param length the number of bytes to pop from <tt>remainder</tt>
+     */
+    private void popFromRemainder(int length)
+    {
+        remainderLength
+            = WASAPIRenderer.pop(remainder, remainderLength, length);
+    }
+
+    public int read(byte[] buffer, int offset, int length)
+        throws IOException
+    {
+        String message;
+
+        synchronized (this)
+        {
+            if ((iAudioClient == 0) || (iAudioCaptureClient == 0))
+                message = getClass().getName() + " is disconnected.";
+            else if (!started)
+                message = getClass().getName() + " is stopped.";
+            else
+            {
+                message = null;
+                busy = true;
+            }
+        }
+        if (message != null)
+            throw new IOException(message);
+
+        int read;
+        Throwable cause;
+
+        try
+        {
+            read = doRead(buffer, offset, length);
+            cause = null;
+        }
+        catch (Throwable t)
+        {
+            read = 0;
+            cause = t;
+        }
+        finally
+        {
+            synchronized (this)
+            {
+                busy = false;
+                notifyAll();
+            }
+        }
+        if (cause != null)
+        {
+            if (cause instanceof ThreadDeath)
+                throw (ThreadDeath) cause;
+            else if (cause instanceof IOException)
+                throw (IOException) cause;
+            else
+            {
+                IOException ioe = new IOException();
+
+                ioe.initCause(cause);
+                throw ioe;
+            }
+        }
+        return read;
+    }
+
+    /**
+     * Reads from {@link #iAudioCaptureClient} into {@link #remainder} and
+     * returns a non-<tt>null</tt> <tt>BufferTransferHandler</tt> if this
+     * instance is to push audio data.
+     *
+     * @return a <tt>BufferTransferHandler</tt> if this instance is to push
+     * audio data; otherwise, <tt>null</tt>
+     */
+    private BufferTransferHandler readInEventHandleCmd()
+    {
+        /*
+         * Determine the size in bytes of the next data packet in the capture
+         * endpoint buffer.
+         */
+        int numFramesInNextPacket;
+
+        try
+        {
+            numFramesInNextPacket
+                = IAudioCaptureClient_GetNextPacketSize(iAudioCaptureClient);
+        }
+        catch (HResultException hre)
+        {
+            numFramesInNextPacket = 0; // Silence the compiler.
+            logger.error("IAudioCaptureClient_GetNextPacketSize", hre);
+        }
+
+        if (numFramesInNextPacket != 0)
+        {
+            int toRead = numFramesInNextPacket * dstFrameSize;
+
+            /*
+             * Make sure there is enough room in remainder to accommodate
+             * toRead.
+             */
+            int toPop = toRead - (remainder.length - remainderLength);
+
+            if (toPop > 0)
+                popFromRemainder(toPop);
+
+            try
+            {
+                int read
+                    = IAudioCaptureClient_Read(
+                            iAudioCaptureClient,
+                            remainder, remainderLength, toRead,
+                            srcSampleSize, srcChannels,
+                            dstSampleSize, dstChannels);
+
+                remainderLength += read;
+            }
+            catch (HResultException hre)
+            {
+                logger.error("IAudioCaptureClient_Read", hre);
+            }
+        }
+
+        return (remainderLength >= bufferSize) ? transferHandler : null;
+    }
+
+    /**
+     * Runs/executes in the thread associated with a specific <tt>Runnable</tt>
+     * initialized to wait for {@link #eventHandle} to be signaled.
+     *
+     * @param eventHandleCmd the <tt>Runnable</tt> which has been initialized to
+     * wait for <tt>eventHandle</tt> to be signaled and in whose associated
+     * thread the method is invoked
+     */
+    private void runInEventHandleCmd(Runnable eventHandleCmd)
+    {
+        try
+        {
+            AbstractAudioRenderer.useAudioThreadPriority();
+
+            do
+            {
+                long eventHandle;
+                BufferTransferHandler transferHandler;
+
+                synchronized (this)
+                {
+                    /*
+                     * Does this WASAPIStream still want eventHandleCmd to
+                     * execute?
+                     */
+                    if (!eventHandleCmd.equals(this.eventHandleCmd))
+                        break;
+                    // Is this WASAPIStream still connected and started?
+                    if ((iAudioClient == 0)
+                            || (iAudioCaptureClient == 0)
+                            || !started)
+                        break;
+
+                    /*
+                     * The value of eventHandle will remain valid while this
+                     * WASAPIStream wants eventHandleCmd to execute.
+                     */
+                    eventHandle = this.eventHandle;
+                    if (eventHandle == 0)
+                        throw new IllegalStateException("eventHandle");
+
+                    waitWhileBusy();
+                    busy = true;
+                }
+                try
+                {
+                    transferHandler = readInEventHandleCmd();
+                }
+                finally
+                {
+                    synchronized (this)
+                    {
+                        busy = false;
+                        notifyAll();
+                    }
+                }
+
+                if (transferHandler != null)
+                {
+                    try
+                    {
+                        transferHandler.transferData(null);
+                        /*
+                         * If the transferData implementation throws an
+                         * exception, we will WaitForSingleObject in order to
+                         * give the application time to recover.
+                         */
+                        continue;
+                    }
+                    catch (Throwable t)
+                    {
+                        if (t instanceof ThreadDeath)
+                            throw (ThreadDeath) t;
+                        else
+                        {
+                            logger.error(
+                                    "BufferTransferHandler.transferData",
+                                    t);
+                        }
+                    }
+                }
+
+                int wfso;
+
+                try
+                {
+                    wfso = WaitForSingleObject(eventHandle, devicePeriod);
+                }
+                catch (HResultException hre)
+                {
+                    /*
+                     * WaitForSingleObject will throw HResultException only in
+                     * the case of WAIT_FAILED. Event if it didn't, it would
+                     * still be a failure from our point of view.
+                     */
+                    wfso = WAIT_FAILED;
+                    logger.error("WaitForSingleObject", hre);
+                }
+                /*
+                 * If the function WaitForSingleObject fails once, it will very
+                 * likely fail forever. Bail out of a possible busy wait.
+                 */
+                if ((wfso == WAIT_FAILED) || (wfso == WAIT_ABANDONED))
+                    break;
+            }
+            while (true);
+        }
+        finally
+        {
+            synchronized (this)
+            {
+                if (eventHandleCmd.equals(this.eventHandleCmd))
+                {
+                    this.eventHandleCmd = null;
+                    notifyAll();
+                }
+            }
+        }
+    }
+
+    public synchronized void start()
+        throws IOException
+    {
+        if (iAudioClient != 0)
+        {
+            waitWhileBusy();
+            waitWhileEventHandleCmd();
+
+            try
+            {
+                IAudioClient_Start(iAudioClient);
+                started = true;
+
+                remainderLength = 0;
+                if ((eventHandle != 0) && (this.eventHandleCmd == null))
+                {
+                    Runnable eventHandleCmd
+                        = new Runnable()
+                        {
+                            public void run()
+                            {
+                                runInEventHandleCmd(this);
+                            }
+                        };
+                    boolean submitted = false;
+
+                    try
+                    {
+                        if (eventHandleExecutor == null)
+                        {
+                            eventHandleExecutor
+                                = Executors.newSingleThreadExecutor();
+                        }
+
+                        this.eventHandleCmd = eventHandleCmd;
+                        eventHandleExecutor.execute(eventHandleCmd);
+                        submitted = true;
+                    }
+                    finally
+                    {
+                        if (!submitted
+                                && eventHandleCmd.equals(this.eventHandleCmd))
+                            this.eventHandleCmd = null;
+                    }
+                }
+            }
+            catch (HResultException hre)
+            {
+                /*
+                 * If IAudioClient_Start is invoked multiple times without
+                 * intervening IAudioClient_Stop, it will likely return/throw
+                 * AUDCLNT_E_NOT_STOPPED.
+                 */
+                if (hre.getHResult() != AUDCLNT_E_NOT_STOPPED)
+                    WASAPIStream.throwNewIOException("IAudioClient_Start", hre);
+            }
+        }
+    }
+
+    public synchronized void stop()
+        throws IOException
+    {
+        if (iAudioClient != 0)
+        {
+            waitWhileBusy();
+
+            try
+            {
+                /*
+                 * If IAudioClient_Stop is invoked multiple times without
+                 * intervening IAudioClient_Start, it is documented to return
+                 * S_FALSE.
+                 */
+                IAudioClient_Stop(iAudioClient);
+                started = false;
+
+                waitWhileEventHandleCmd();
+                remainderLength = 0;
+            }
+            catch (HResultException hre)
+            {
+                WASAPIStream.throwNewIOException("IAudioClient_Stop", hre);
+            }
+        }
+    }
+
+    /**
+     * Waits on this instance while the value of {@link #busy} is equal to
+     * <tt>true</tt>.
+     */
+    private synchronized void waitWhileBusy()
+    {
+        boolean interrupted = false;
+
+        while (busy)
+        {
+            try
+            {
+                wait(devicePeriod);
+            }
+            catch (InterruptedException ie)
+            {
+                interrupted = true;
+            }
+        }
+        if (interrupted)
+            Thread.currentThread().interrupt();
+    }
+
+    /**
+     * Waits on this instance while the value of {@link #eventHandleCmd} is
+     * non-<tt>null</tt>.
+     */
+    private synchronized void waitWhileEventHandleCmd()
+    {
+        if (eventHandle == 0)
+            throw new IllegalStateException("eventHandle");
+
+        boolean interrupted = false;
+
+        while (eventHandleCmd != null)
+        {
+            try
+            {
+                wait(devicePeriod);
+            }
+            catch (InterruptedException ie)
+            {
+                interrupted = true;
+            }
+        }
+        if (interrupted)
+            Thread.currentThread().interrupt();
+    }
+}
diff --git a/src/org/jitsi/impl/neomedia/jmfext/media/protocol/wasapi/DataSource.java b/src/org/jitsi/impl/neomedia/jmfext/media/protocol/wasapi/DataSource.java
index d07cd6946d0afe2df7c099d18428bc3d5aeb3afd..e1ec07feffd3d2dd02e4e59824432d48c3a6cb1b 100644
--- a/src/org/jitsi/impl/neomedia/jmfext/media/protocol/wasapi/DataSource.java
+++ b/src/org/jitsi/impl/neomedia/jmfext/media/protocol/wasapi/DataSource.java
@@ -7,10 +7,13 @@
 package org.jitsi.impl.neomedia.jmfext.media.protocol.wasapi;
 
 import java.io.*;
+import java.util.*;
 
 import javax.media.*;
 import javax.media.control.*;
+import javax.media.format.*;
 
+import org.jitsi.impl.neomedia.device.*;
 import org.jitsi.impl.neomedia.jmfext.media.protocol.*;
 import org.jitsi.util.*;
 
@@ -30,6 +33,18 @@ public class DataSource
      */
     private static final Logger logger = Logger.getLogger(DataSource.class);
 
+    /**
+     * The indicator which determines whether the voice capture DMO is to be
+     * used to perform echo cancellation and/or noise reduction.
+     */
+    final boolean aec;
+
+    /**
+     * The <tt>WASAPISystem</tt> which has contributed this
+     * <tt>CaptureDevice</tt>/<tt>DataSource</tt>.
+     */
+    final WASAPISystem audioSystem;
+
     /**
      * Initializes a new <tt>DataSource</tt> instance.
      */
@@ -48,6 +63,11 @@ public DataSource()
     public DataSource(MediaLocator locator)
     {
         super(locator);
+
+        audioSystem
+            = (WASAPISystem)
+                AudioSystem.getAudioSystem(AudioSystem.LOCATOR_PROTOCOL_WASAPI);
+        aec = audioSystem.isDenoise() || audioSystem.isEchoCancel();
     }
 
     /**
@@ -110,4 +130,94 @@ protected void doDisconnect()
             super.doDisconnect();
         }
     }
+
+    Format[] getIAudioClientSupportedFormats()
+    {
+        return
+            getIAudioClientSupportedFormats(
+                    /* streamIndex */ 0,
+                    audioSystem.getAECSupportedFormats());
+    }
+
+    private Format[] getIAudioClientSupportedFormats(
+            int streamIndex,
+            List<AudioFormat> aecSupportedFormats)
+    {
+        Format[] superSupportedFormats = super.getSupportedFormats(streamIndex);
+
+        /*
+         * If the capture endpoint device is report to support no Format, then
+         * acoustic echo cancellation (AEC) will surely not work.
+         */
+        if ((superSupportedFormats == null)
+                || (superSupportedFormats.length == 0))
+            return superSupportedFormats;
+
+        Format[] array;
+
+        if (aecSupportedFormats.isEmpty())
+            array = superSupportedFormats;
+        else
+        {
+            /*
+             * Filter out the Formats added to the list of supported because of
+             * the voice capture DMO alone.
+             */
+            List<Format> list
+                = new ArrayList<Format>(superSupportedFormats.length);
+
+            for (Format superSupportedFormat : superSupportedFormats)
+            {
+                /*
+                 * Reference equality to an aecSupportedFormat signals that the
+                 * superSupportedFormat is not supported by the capture endpoint
+                 * device and is supported by the voice capture DMO.
+                 */
+                boolean equals = false;
+
+                for (Format aecSupportedFormat : aecSupportedFormats)
+                {
+                    if (superSupportedFormat == aecSupportedFormat)
+                    {
+                        equals = true;
+                        break;
+                    }
+                }
+                if (!equals)
+                    list.add(superSupportedFormat);
+            }
+            array = list.toArray(new Format[list.size()]);
+        }
+        return array;
+    }
+
+    /**
+     * {@inheritDoc}
+     *
+     * The <tt>Format</tt>s supported by this
+     * <tt>CaptureDevice</tt>/<tt>DataSource</tt> are either the ones supported
+     * by the capture endpoint device or the ones supported by the voice capture
+     * DMO that implements the acoustic echo cancellation (AEC) feature
+     * depending on whether the feature in question is disabled or enabled.
+     */
+    @Override
+    protected Format[] getSupportedFormats(int streamIndex)
+    {
+        List<AudioFormat> aecSupportedFormats
+            = audioSystem.getAECSupportedFormats();
+
+        if (aec)
+        {
+            return
+                aecSupportedFormats.toArray(
+                        new Format[aecSupportedFormats.size()]);
+        }
+        else
+        {
+            return
+                getIAudioClientSupportedFormats(
+                        streamIndex,
+                        aecSupportedFormats);
+        }
+    }
 }
diff --git a/src/org/jitsi/impl/neomedia/jmfext/media/protocol/wasapi/HResultException.java b/src/org/jitsi/impl/neomedia/jmfext/media/protocol/wasapi/HResultException.java
index 6ae926827bf02dc15f8ba6cdd68204f78ef8cc30..52e4b4fa7dae679e5e9c0c7e8728b2565ba72784 100644
--- a/src/org/jitsi/impl/neomedia/jmfext/media/protocol/wasapi/HResultException.java
+++ b/src/org/jitsi/impl/neomedia/jmfext/media/protocol/wasapi/HResultException.java
@@ -29,7 +29,7 @@ public class HResultException
      */
     public HResultException(int hresult)
     {
-        this(hresult, "0x" + Long.toHexString(hresult & 0xffffffffL));
+        this(hresult, toString(hresult));
     }
 
     /**
@@ -56,4 +56,18 @@ public int getHResult()
     {
         return hresult;
     }
+
+    /**
+     * Returns a <tt>String</tt> representation of a specific
+     * <tt>HRESULT</tt> value.
+     *
+     * @param hresult the <tt>HRESULT</tt> value of which a <tt>String</tt>
+     * representation is to be returned
+     * @return a <tt>String</tt> representation of the specified
+     * <tt>hresult</tt>
+     */
+    public static String toString(int hresult)
+    {
+        return "0x" + Long.toHexString(hresult & 0xffffffffL);
+    }
 }
diff --git a/src/org/jitsi/impl/neomedia/jmfext/media/protocol/wasapi/VoiceCaptureDSP.java b/src/org/jitsi/impl/neomedia/jmfext/media/protocol/wasapi/VoiceCaptureDSP.java
new file mode 100644
index 0000000000000000000000000000000000000000..fac7536f7160a5f7afaf0ce7553337d378e3448e
--- /dev/null
+++ b/src/org/jitsi/impl/neomedia/jmfext/media/protocol/wasapi/VoiceCaptureDSP.java
@@ -0,0 +1,258 @@
+/*
+ * Jitsi, the OpenSource Java VoIP and Instant Messaging client.
+ *
+ * Distributable under LGPL license.
+ * See terms of license at gnu.org.
+ */
+package org.jitsi.impl.neomedia.jmfext.media.protocol.wasapi;
+
+import static org.jitsi.impl.neomedia.jmfext.media.protocol.wasapi.WASAPI.*;
+
+import org.jitsi.util.*;
+
+/**
+ * Defines the native interface of Voice Capture DSP as used by
+ * <tt>WASAPISystem</tt> and its associated <tt>CaptureDevice</tt>,
+ * <tt>DataSource</tt> and <tt>Renderer</tt> implementations.
+ *
+ * @author Lyubomir Marinov
+ */
+public class VoiceCaptureDSP
+{
+    public static final String CLSID_CWMAudioAEC
+        = "{745057c7-f353-4f2d-a7ee-58434477730e}";
+
+    public static final int DMO_E_NOTACCEPTING = 0x80040204;
+
+    public static final int DMO_INPUT_STATUSF_ACCEPT_DATA = 0x1;
+
+    public static final int DMO_OUTPUT_DATA_BUFFERF_INCOMPLETE = 0x01000000;
+
+    public static final int DMO_SET_TYPEF_TEST_ONLY = 0x1;
+
+    public static final String FORMAT_None
+        = "{0f6417d6-c318-11d0-a43f-00a0c9223196}";
+
+    public static final String FORMAT_WaveFormatEx
+        = "{05589f81-c356-11ce-bf01-00aa0055595a}";
+
+    public static final String IID_IMediaObject
+        = "{d8ad0f58-5494-4102-97c5-ec798e59bcf4}";
+
+    public static final String IID_IPropertyStore
+        = "{886d8eeb-8cf2-4446-8d02-cdba1dbdcf99}";
+
+    public static final String MEDIASUBTYPE_PCM
+        = "{00000001-0000-0010-8000-00AA00389B71}";
+
+    public static final String MEDIATYPE_Audio
+        = "{73647561-0000-0010-8000-00aa00389b71}";
+
+    public static final long MFPKEY_WMAAECMA_DMO_SOURCE_MODE;
+
+    public static final long MFPKEY_WMAAECMA_SYSTEM_MODE;
+
+    /**
+     * The value of the <tt>AEC_SYSTEM_MODE</tt> enumeration which is used with
+     * the <tt>MFPKEY_WMAAECMA_SYSTEM_MODE</tt> property to indicate that the
+     * Voice Capture DSP is to operate in acoustic echo cancellation (AEC) only
+     * mode.
+     */
+    public static final int SINGLE_CHANNEL_AEC = 0;
+
+    static
+    {
+        String pszString = null;
+        long _MFPKEY_WMAAECMA_DMO_SOURCE_MODE = 0;
+        long _MFPKEY_WMAAECMA_SYSTEM_MODE = 0;
+        /*
+         * XXX The pointer to native memory returned by PSPropertyKeyFromString
+         * is to be freed via CoTaskMemFree.
+         */
+        boolean coTaskMemFree = true;
+
+        try
+        {
+            pszString = "{6f52c567-0360-4bd2-9617-ccbf1421c939} 3";
+            _MFPKEY_WMAAECMA_DMO_SOURCE_MODE
+                = PSPropertyKeyFromString(pszString);
+            if (_MFPKEY_WMAAECMA_DMO_SOURCE_MODE == 0)
+            {
+                throw new IllegalStateException(
+                        "MFPKEY_WMAAECMA_DMO_SOURCE_MODE");
+            }
+
+            pszString = "{6f52c567-0360-4bd2-9617-ccbf1421c939} 2";
+            _MFPKEY_WMAAECMA_SYSTEM_MODE = PSPropertyKeyFromString(pszString);
+            if (_MFPKEY_WMAAECMA_SYSTEM_MODE == 0)
+                throw new IllegalStateException("MFPKEY_WMAAECMA_SYSTEM_MODE");
+
+            coTaskMemFree = false;
+        }
+        catch (HResultException hre)
+        {
+            Logger logger = Logger.getLogger(VoiceCaptureDSP.class);
+
+            logger.error("PSPropertyKeyFromString(" + pszString + ")", hre);
+            throw new RuntimeException(hre);
+        }
+        finally
+        {
+            /*
+             * XXX The pointer to native memory returned by
+             * PSPropertyKeyFromString is to be freed via CoTaskMemFree.
+             */
+            if (coTaskMemFree)
+            {
+                if (_MFPKEY_WMAAECMA_DMO_SOURCE_MODE != 0)
+                {
+                    CoTaskMemFree(_MFPKEY_WMAAECMA_DMO_SOURCE_MODE);
+                    _MFPKEY_WMAAECMA_DMO_SOURCE_MODE = 0;
+                }
+                if (_MFPKEY_WMAAECMA_SYSTEM_MODE != 0)
+                {
+                    CoTaskMemFree(_MFPKEY_WMAAECMA_SYSTEM_MODE);
+                    _MFPKEY_WMAAECMA_SYSTEM_MODE = 0;
+                }
+            }
+        }
+
+        MFPKEY_WMAAECMA_DMO_SOURCE_MODE = _MFPKEY_WMAAECMA_DMO_SOURCE_MODE;
+        MFPKEY_WMAAECMA_SYSTEM_MODE = _MFPKEY_WMAAECMA_SYSTEM_MODE;
+    }
+
+    public static native int DMO_MEDIA_TYPE_fill(
+            long thiz,
+            String majortype,
+            String subtype,
+            boolean bFixedSizeSamples,
+            boolean bTemporalCompression,
+            int lSampleSize,
+            String formattype,
+            long pUnk,
+            int cbFormat,
+            long pbFormat)
+        throws HResultException;
+
+    public static native void DMO_MEDIA_TYPE_setCbFormat(
+            long thiz,
+            int cbFormat);
+
+    public static native int DMO_MEDIA_TYPE_setFormattype(
+            long thiz,
+            String formattype)
+        throws HResultException;
+
+    public static native void DMO_MEDIA_TYPE_setLSampleSize(
+            long thiz,
+            int lSampleSize);
+
+    public static native void DMO_MEDIA_TYPE_setPbFormat(
+            long thiz,
+            long pbFormat);
+
+    public static native long DMO_OUTPUT_DATA_BUFFER_alloc(
+            long pBuffer,
+            int dwStatus,
+            long rtTimestamp,
+            long rtTimelength);
+
+    public static native int DMO_OUTPUT_DATA_BUFFER_getDwStatus(long thiz);
+
+    public static native void DMO_OUTPUT_DATA_BUFFER_setDwStatus(
+            long thiz,
+            int dwStatus);
+
+    public static native int IMediaBuffer_AddRef(long thiz);
+
+    public static native long IMediaBuffer_GetBuffer(long thiz)
+        throws HResultException;
+
+    public static native int IMediaBuffer_GetLength(long thiz)
+        throws HResultException;
+
+    public static native int IMediaBuffer_GetMaxLength(long thiz)
+        throws HResultException;
+
+    public static native int IMediaBuffer_Release(long thiz);
+
+    public static native void IMediaBuffer_SetLength(long thiz, int cbLength)
+        throws HResultException;
+
+    public static native int IMediaObject_GetInputStatus(
+            long thiz,
+            int dwInputStreamIndex)
+        throws HResultException;
+
+    public static native int IMediaObject_ProcessInput(
+            long thiz,
+            int dwInputStreamIndex,
+            long pBuffer,
+            int dwFlags,
+            long rtTimestamp,
+            long rtTimelength)
+        throws HResultException;
+
+    public static native int IMediaObject_ProcessOutput(
+            long thiz,
+            int dwFlags,
+            int cOutputBufferCount,
+            long pOutputBuffers)
+        throws HResultException;
+
+    public static native long IMediaObject_QueryInterface(long thiz, String iid)
+        throws HResultException;
+
+    public static native void IMediaObject_Release(long thiz);
+
+    public static native int IMediaObject_SetInputType(
+            long thiz,
+            int dwInputStreamIndex,
+            long pmt,
+            int dwFlags)
+        throws HResultException;
+
+    public static native int IMediaObject_SetOutputType(
+            long thiz,
+            int dwOutputStreamIndex,
+            long pmt,
+            int dwFlags)
+        throws HResultException;
+
+    public static native int IPropertyStore_SetValue(
+            long thiz,
+            long key, boolean value)
+        throws HResultException;
+
+    public static native int IPropertyStore_SetValue(
+            long thiz,
+            long key, int value)
+        throws HResultException;
+
+    public static native long MediaBuffer_alloc(int maxLength);
+
+    public static native int MediaBuffer_pop(
+            long thiz,
+            byte[] buffer, int offset, int length)
+        throws HResultException;
+
+    public static native int MediaBuffer_push(
+            long thiz,
+            byte[] buffer, int offset, int length)
+        throws HResultException;
+
+    public static native long MoCreateMediaType(int cbFormat)
+        throws HResultException;
+
+    public static native void MoDeleteMediaType(long pmt)
+        throws HResultException;
+
+    public static native void MoFreeMediaType(long pmt)
+        throws HResultException;
+
+    public static native void MoInitMediaType(long pmt, int cbFormat)
+        throws HResultException;
+
+    /** Prevents the initialization of <tt>VoiceCaptureDSP</tt> instances. */
+    private VoiceCaptureDSP() {}
+}
diff --git a/src/org/jitsi/impl/neomedia/jmfext/media/protocol/wasapi/WASAPI.java b/src/org/jitsi/impl/neomedia/jmfext/media/protocol/wasapi/WASAPI.java
index a947a9c071b763ee715e00baa32a4c3b8a3b2ed6..c55c879cfe7feb26f84b68e2d87eab507a4860bc 100644
--- a/src/org/jitsi/impl/neomedia/jmfext/media/protocol/wasapi/WASAPI.java
+++ b/src/org/jitsi/impl/neomedia/jmfext/media/protocol/wasapi/WASAPI.java
@@ -24,6 +24,8 @@ public class WASAPI
 
     public static final int AUDCLNT_STREAMFLAGS_EVENTCALLBACK = 0x00040000;
 
+    public static final int AUDCLNT_STREAMFLAGS_LOOPBACK = 0x00020000;
+
     public static final int AUDCLNT_STREAMFLAGS_NOPERSIST = 0x00080000;
 
     public static final int CLSCTX_ALL
@@ -119,11 +121,12 @@ public class WASAPI
          * XXX The pointer to native memory returned by PSPropertyKeyFromString
          * is to be freed via CoTaskMemFree.
          */
+        String pszString = null;
+
         try
         {
-            PKEY_Device_FriendlyName
-                = PSPropertyKeyFromString(
-                        "{a45c254e-df1c-4efd-8020-67d146a850e0} 14");
+            pszString = "{a45c254e-df1c-4efd-8020-67d146a850e0} 14";
+            PKEY_Device_FriendlyName = PSPropertyKeyFromString(pszString);
             if (PKEY_Device_FriendlyName == 0)
                 throw new IllegalStateException("PKEY_Device_FriendlyName");
         }
@@ -131,8 +134,7 @@ public class WASAPI
         {
             Logger logger = Logger.getLogger(WASAPI.class);
 
-            logger.error("PSPropertyKeyFromString", hre);
-
+            logger.error("PSPropertyKeyFromString(" + pszString + ")", hre);
             throw new RuntimeException(hre);
         }
     }
@@ -164,6 +166,19 @@ public static native long CreateEvent(
             String lpName)
         throws HResultException;
 
+    /**
+     * Determines whether a specific <tt>HRESULT</tt> value indicates failure.
+     *
+     * @param hresult the <tt>HRESULT</tt> value to be checked whether it
+     * indicates failure
+     * @return <tt>true</tt> if the specified <tt>hresult</tt> indicates
+     * failure; otherwise, <tt>false</tt>
+     */
+    public static boolean FAILED(int hresult)
+    {
+        return (hresult < 0);
+    }
+
     public static native int IAudioCaptureClient_GetNextPacketSize(long thiz)
         throws HResultException;
 
@@ -324,6 +339,19 @@ public static native long PSPropertyKeyFromString(String pszString)
     public static native void ResetEvent(long hEvent)
         throws HResultException;
 
+    /**
+     * Determines whether a specific <tt>HRESULT</tt> value indicates success.
+     *
+     * @param hresult the <tt>HRESULT</tt> value to be checked whether it
+     * indicates success
+     * @return <tt>true</tt> if the specified <tt>hresult</tt> indicates
+     * success; otherwise, <tt>false</tt>
+     */
+    public static boolean SUCCEEDED(int hresult)
+    {
+        return (hresult >= 0);
+    }
+
     /**
      * Waits until the specified object is in the signaled state or the
      * specified time-out interval elapses.
@@ -398,6 +426,8 @@ public static native void WAVEFORMATEX_setWFormatTag(
             long thiz,
             char wFormatTag);
 
+    public static native int WAVEFORMATEX_sizeof();
+
     /** Prevents the initialization of <tt>WASAPI</tt> instances. */
     private WASAPI() {}
 }
diff --git a/src/org/jitsi/impl/neomedia/jmfext/media/protocol/wasapi/WASAPIStream.java b/src/org/jitsi/impl/neomedia/jmfext/media/protocol/wasapi/WASAPIStream.java
index c60e837d28601cd7b6b7d961e9535d857c1c6168..db411f5bc9af5543320d64e696b409a37e1cac3d 100644
--- a/src/org/jitsi/impl/neomedia/jmfext/media/protocol/wasapi/WASAPIStream.java
+++ b/src/org/jitsi/impl/neomedia/jmfext/media/protocol/wasapi/WASAPIStream.java
@@ -6,10 +6,10 @@
  */
 package org.jitsi.impl.neomedia.jmfext.media.protocol.wasapi;
 
+import static org.jitsi.impl.neomedia.jmfext.media.protocol.wasapi.VoiceCaptureDSP.*;
 import static org.jitsi.impl.neomedia.jmfext.media.protocol.wasapi.WASAPI.*;
 
 import java.io.*;
-import java.util.concurrent.*;
 
 import javax.media.*;
 import javax.media.control.*;
@@ -32,18 +32,179 @@
 public class WASAPIStream
     extends AbstractPushBufferStream<DataSource>
 {
-    /**
-     * The default duration of the audio data in milliseconds to be read from
-     * <tt>WASAPIStream</tt> in an invocation of {@link #read(Buffer)}.
-     */
-    private static final long DEFAULT_BUFFER_DURATION = 20;
-
     /**
      * The <tt>Logger</tt> used by the <tt>WASAPIStream</tt> class and its
      * instances to log debug information.
      */
     private static Logger logger = Logger.getLogger(WASAPIStream.class);
 
+    private static AudioFormat findClosestMatch(
+            Format[] formats,
+            AudioFormat format)
+    {
+
+        // Try to find the very specified format.
+        AudioFormat match = findFirstMatch(formats, format);
+
+        if (match == null)
+        {
+            /*
+             * Relax the channels of the specified format because we are able to
+             * translate between mono and stereo.
+             */
+            match
+                = findFirstMatch(
+                        formats,
+                        new AudioFormat(
+                                format.getEncoding(),
+                                format.getSampleRate(),
+                                format.getSampleSizeInBits(),
+                                /* channels */ Format.NOT_SPECIFIED,
+                                format.getEndian(),
+                                format.getSigned(),
+                                /* frameSizeInBits */ Format.NOT_SPECIFIED,
+                                /* frameRate */ Format.NOT_SPECIFIED,
+                                format.getDataType()));
+            if (match == null)
+            {
+                /*
+                 * Relax the sampleRate of the specified format as well because
+                 * the voice capture DMO which implements the acoustic echo
+                 * cancellation (AEC) feature is able to automatically resample.
+                 */
+                match
+                    = findFirstMatch(
+                            formats,
+                            new AudioFormat(
+                                    format.getEncoding(),
+                                    /* sampleRate */ Format.NOT_SPECIFIED,
+                                    format.getSampleSizeInBits(),
+                                    /* channels */ Format.NOT_SPECIFIED,
+                                    format.getEndian(),
+                                    format.getSigned(),
+                                    /* frameSizeInBits */ Format.NOT_SPECIFIED,
+                                    /* frameRate */ Format.NOT_SPECIFIED,
+                                    format.getDataType()));
+            }
+        }
+        return match;
+    }
+
+    private static AudioFormat findFirstMatch(
+            Format[] formats,
+            AudioFormat format)
+    {
+        for (Format aFormat : formats)
+            if (aFormat.matches(format))
+                return (AudioFormat) aFormat.intersects(format);
+        return null;
+    }
+
+    private static int IMediaObject_SetXXXputType(
+            long iMediaObject,
+            boolean inOrOut,
+            int dwXXXputStreamIndex,
+            AudioFormat audioFormat,
+            int dwFlags)
+        throws HResultException
+    {
+        int channels = audioFormat.getChannels();
+        double sampleRate = audioFormat.getSampleRate();
+        int sampleSizeInBits = audioFormat.getSampleSizeInBits();
+
+        if (Format.NOT_SPECIFIED == channels)
+            throw new IllegalArgumentException("audioFormat.channels");
+        if (Format.NOT_SPECIFIED == sampleRate)
+            throw new IllegalArgumentException("audioFormat.sampleRate");
+        if (Format.NOT_SPECIFIED == sampleSizeInBits)
+            throw new IllegalArgumentException("audioFormat.sampleSizeInBits");
+
+        char nChannels = (char) channels;
+        int nSamplesPerSec = (int) sampleRate;
+        char wBitsPerSample = (char) sampleSizeInBits;
+        char nBlockAlign = (char) ((nChannels * wBitsPerSample) / 8);
+        char cbSize = 0;
+        int hresult;
+
+        long waveformatex = WAVEFORMATEX_alloc();
+
+        if (waveformatex == 0)
+            throw new OutOfMemoryError("WAVEFORMATEX_alloc");
+        try
+        {
+            WAVEFORMATEX_fill(
+                    waveformatex,
+                    WAVE_FORMAT_PCM,
+                    nChannels,
+                    nSamplesPerSec,
+                    nSamplesPerSec * nBlockAlign,
+                    nBlockAlign,
+                    wBitsPerSample,
+                    cbSize);
+
+            long pmt = MoCreateMediaType(/* cbFormat */ 0);
+
+            if (pmt == 0)
+                throw new OutOfMemoryError("MoCreateMediaType");
+            try
+            {
+                int cbFormat = WAVEFORMATEX_sizeof() + cbSize;
+
+                hresult
+                    = DMO_MEDIA_TYPE_fill(
+                            pmt,
+                            /* majortype */ MEDIATYPE_Audio,
+                            /* subtype */ MEDIASUBTYPE_PCM,
+                            /* bFixedSizeSamples */ true,
+                            /* bTemporalCompression */ false,
+                            wBitsPerSample / 8,
+                            /* formattype */ FORMAT_WaveFormatEx,
+                            /* pUnk */ 0,
+                            cbFormat,
+                            waveformatex);
+                if (FAILED(hresult))
+                    throw new HResultException(hresult, "DMO_MEDIA_TYPE_fill");
+                hresult
+                    = inOrOut
+                        ? VoiceCaptureDSP.IMediaObject_SetInputType(
+                                iMediaObject,
+                                dwXXXputStreamIndex,
+                                pmt,
+                                dwFlags)
+                        : VoiceCaptureDSP.IMediaObject_SetOutputType(
+                                iMediaObject,
+                                dwXXXputStreamIndex,
+                                pmt,
+                                dwFlags);
+                if (FAILED(hresult))
+                {
+                    throw new HResultException(
+                            hresult,
+                            inOrOut
+                                ? "IMediaObject_SetInputType"
+                                : "IMediaObject_SetOutputType");
+                }
+            }
+            finally
+            {
+                /*
+                 * XXX MoDeleteMediaType is documented to internally call
+                 * MoFreeMediaType to free the format block but the format block
+                 * has not been internally allocated by MoInitMediaType.
+                 */
+                DMO_MEDIA_TYPE_setCbFormat(pmt, 0);
+                DMO_MEDIA_TYPE_setFormattype(pmt, FORMAT_None);
+                DMO_MEDIA_TYPE_setPbFormat(pmt, 0);
+                MoDeleteMediaType(pmt);
+            }
+        }
+        finally
+        {
+            CoTaskMemFree(waveformatex);
+        }
+        return hresult;
+    }
+
     /**
      * Throws a new <tt>IOException</tt> instance initialized with a specific
      * <tt>String</tt> message and a specific <tt>HResultException</tt> cause.
@@ -53,9 +214,7 @@ public class WASAPIStream
      * @param hre an <tt>HResultException</tt> which is to be set as the
      * <tt>cause</tt> of the new <tt>IOException</tt> instance
      */
-    private static void throwNewIOException(
-            String message,
-            HResultException hre)
+    static void throwNewIOException(String message, HResultException hre)
         throws IOException
     {
         logger.error(message, hre);
@@ -66,21 +225,7 @@ private static void throwNewIOException(
         throw ioe;
     }
 
-    /**
-     * The <tt>WASAPISystem</tt> instance which has contributed the capture
-     * endpoint device identified by {@link #locator}.
-     */
-    private final WASAPISystem audioSystem;
-
-    /**
-     * The number of frames to be filled in a <tt>Buffer</tt> in an invocation
-     * of {@link #read(Buffer)}. If this instance implements the
-     * <tt>PushBufferStream</tt> interface,
-     * {@link #runInEventHandleCmd(Runnable)} will push via
-     * {@link BufferTransferHandler#transferData(PushBufferStream)} when
-     * {@link #iAudioClient} has made at least that many frames available.
-     */
-    private int bufferFrames;
+    private int bufferMaxLength;
 
     /**
      * The size/length in bytes of the <tt>Buffer</tt> to be filled in an
@@ -88,77 +233,30 @@ private static void throwNewIOException(
      */
     private int bufferSize;
 
-    /**
-     * The indicator which determines whether the audio stream represented by
-     * this instance, {@link #iAudioClient} and {@link #iAudioCaptureClient} is
-     * busy and, consequently, its state should not be modified. For example,
-     * the audio stream is busy during the execution of {@link #read(Buffer)}.
-     */
-    private boolean busy;
+    private AudioCaptureClient capture;
 
-    /**
-     * The length in milliseconds of the interval between successive, periodic
-     * processing passes by the audio engine on the data in the endpoint buffer.
-     */
-    private long devicePeriod = WASAPISystem.DEFAULT_DEVICE_PERIOD;
-
-    /**
-     * The number of channels which which this <tt>SourceStream</tt> has been
-     * connected.
-     */
-    private int dstChannels;
+    private int captureBufferMaxLength;
 
-    /**
-     * The frame size in bytes with which this <tt>SourceStream</tt> has been
-     * connected. It is the product of {@link #dstSampleSize} and
-     * {@link #dstChannels}.
-     */
-    private int dstFrameSize;
+    private long captureIMediaBuffer;
 
-    /**
-     * The sample size in bytes with which this <tt>SourceStream</tt> has been
-     * connected.
-     */
-    private int dstSampleSize;
-
-    /**
-     * The event handle that the system signals when an audio buffer is ready to
-     * be processed by the client.
-     */
-    private long eventHandle;
+    private boolean captureIsBusy;
 
     /**
-     * The <tt>Runnable</tt> which is scheduled by this <tt>WASAPIStream</tt>
-     * and executed by {@link #eventHandleExecutor} and waits for
-     * {@link #eventHandle} to be signaled.
+     * The length in milliseconds of the interval between successive, periodic
+     * processing passes by the audio engine on the data in the endpoint buffer.
      */
-    private Runnable eventHandleCmd;
+    private long devicePeriod;
 
-    /**
-     * The <tt>Executor</tt> implementation which is to execute
-     * {@link #eventHandleCmd}.
-     */
-    private Executor eventHandleExecutor;
+    private long dmoOutputDataBuffer;
 
     /**
      * The <tt>AudioFormat</tt> of this <tt>SourceStream</tt>.
      */
     private AudioFormat format;
 
-    /**
-     * The WASAPI <tt>IAudioCaptureClient</tt> obtained from
-     * {@link #iAudioClient} which enables this <tt>SourceStream</tt> to read
-     * input data from the capture endpoint buffer.
-     */
-    private long iAudioCaptureClient;
+    private long iMediaBuffer;
 
-    /**
-     * The WASAPI <tt>IAudioClient</tt> instance which enables this
-     * <tt>SourceStream</tt> to create and initialize an audio stream between
-     * this <tt>SourceStream</tt> and the audio engine of the associated audio
-     * endpoint device.
-     */
-    private long iAudioClient;
+    private long iMediaObject;
 
     /**
      * The <tt>MediaLocator</tt> which identifies the audio endpoint device this
@@ -166,28 +264,21 @@ private static void throwNewIOException(
      */
     private MediaLocator locator;
 
-    /**
-     * The indicator which determines whether this instance should act as a
-     * <tt>PushBufferStream</tt> rather than as a <tt>PullBufferStream</tt>
-     * implementation.
-     */
-    private final boolean push;
+    private byte[] processed;
 
-    private byte[] remainder;
+    private int processedLength;
 
-    private int remainderLength;
+    private byte[] processInputBuffer;
 
-    /**
-     * The number of channels with which {@link #iAudioClient} has been
-     * initialized.
-     */
-    private int srcChannels;
+    private Thread processThread;
 
-    /**
-     * The sample size in bytes with which {@link #iAudioClient} has been
-     * initialized.
-     */
-    private int srcSampleSize;
+    private AudioCaptureClient render;
+
+    private int renderBufferMaxLength;
+
+    private long renderIMediaBuffer;
+
+    private boolean renderIsBusy;
 
     /**
      * The indicator which determines whether this <tt>SourceStream</tt> is
@@ -209,194 +300,45 @@ private static void throwNewIOException(
     public WASAPIStream(DataSource dataSource, FormatControl formatControl)
     {
         super(dataSource, formatControl);
-
-        audioSystem
-            = (WASAPISystem)
-                AudioSystem.getAudioSystem(AudioSystem.LOCATOR_PROTOCOL_WASAPI);
-        if (audioSystem == null)
-            throw new IllegalStateException("audioSystem");
-
-        push = PushBufferStream.class.isInstance(this);
     }
 
     /**
      * Connects this <tt>SourceStream</tt> to the audio endpoint device
-     * identified by {@link #locator}.
+     * identified by {@link #locator} if disconnected.
      *
-     * @throws IOException if anything goes wrong while this
-     * <tt>SourceStream</tt> connects to the audio endpoint device identified by
+     * @throws IOException if this <tt>SourceStream</tt> is disconnected and
+     * fails to connect to the audio endpoint device identified by
      * <tt>locator</tt>
      */
     private void connect()
         throws IOException
     {
-        if (this.iAudioClient != 0)
+        if (capture != null)
             return;
 
         try
         {
-            MediaLocator locator = getLocator();
-
-            if (locator == null)
-                throw new NullPointerException("No locator/MediaLocator set.");
-
-            AudioFormat thisFormat = (AudioFormat) getFormat();
-            AudioFormat[] formats
-                = WASAPISystem.getFormatsToInitializeIAudioClient(thisFormat);
-            long eventHandle = CreateEvent(0, false, false, null);
-
-            /*
-             * If WASAPIStream is deployed as a PushBufferStream implementation,
-             * it relies on eventHandle to tick.
-             */
-            if (push && (eventHandle == 0))
-                throw new IOException("CreateEvent");
-
-            try
-            {
-                AudioSystem.DataFlow dataFlow = AudioSystem.DataFlow.CAPTURE;
-                /*
-                 * Presently, we attempt to have the same buffer length in
-                 * WASAPIRenderer and WASAPIStream. There is no particular
-                 * reason/requirement to do so.
-                 */
-                long hnsBufferDuration = 3 * DEFAULT_BUFFER_DURATION * 10000;
-                long iAudioClient
-                    = audioSystem.initializeIAudioClient(
-                            locator,
-                            dataFlow,
-                            eventHandle,
-                            hnsBufferDuration,
-                            formats);
-
-                if (iAudioClient == 0)
-                {
-                    throw new ResourceUnavailableException(
-                            "Failed to initialize IAudioClient"
-                                + " for MediaLocator " + locator
-                                + " and AudioSystem.DataFlow " + dataFlow);
-                }
-                try
-                {
-                    /*
-                     * Determine the AudioFormat with which the iAudioClient has
-                     * been initialized.
-                     */
-                    AudioFormat format = null;
-
-                    for (AudioFormat aFormat : formats)
-                    {
-                        if (aFormat != null)
-                        {
-                            format = aFormat;
-                            break;
-                        }
-                    }
-
-                    long iAudioCaptureClient
-                        = IAudioClient_GetService(
-                                iAudioClient,
-                                IID_IAudioCaptureClient);
-
-                    if (iAudioCaptureClient == 0)
-                    {
-                        throw new ResourceUnavailableException(
-                                "IAudioClient_GetService"
-                                    + "(IID_IAudioCaptureClient)");
-                    }
-                    try
-                    {
-                        /*
-                         * The value hnsDefaultDevicePeriod is documented to
-                         * specify the default scheduling period for a
-                         * shared-mode stream.
-                         */
-                        devicePeriod
-                            = IAudioClient_GetDefaultDevicePeriod(iAudioClient)
-                                / 10000L;
-
-                        int numBufferFrames
-                            = IAudioClient_GetBufferSize(iAudioClient);
-                        int sampleRate = (int) format.getSampleRate();
-                        long bufferDuration
-                            = numBufferFrames * 1000 / sampleRate;
-
-                        /*
-                         * We will very likely be inefficient if we fail to
-                         * synchronize with the scheduling period of the audio
-                         * engine but we have to make do with what we have.
-                         */
-                        if (devicePeriod <= 1)
-                        {
-                            devicePeriod = bufferDuration / 2;
-                            if ((devicePeriod
-                                        > WASAPISystem.DEFAULT_DEVICE_PERIOD)
-                                    || (devicePeriod <= 1))
-                                devicePeriod
-                                    = WASAPISystem.DEFAULT_DEVICE_PERIOD;
-                        }
-
-                        srcChannels = format.getChannels();
-                        srcSampleSize
-                            = WASAPISystem.getSampleSizeInBytes(format);
-
-                        dstChannels = thisFormat.getChannels();
-                        dstSampleSize
-                            = WASAPISystem.getSampleSizeInBytes(thisFormat);
-
-                        dstFrameSize = dstSampleSize * dstChannels;
-                        bufferFrames
-                            = (int)
-                                (DEFAULT_BUFFER_DURATION * sampleRate / 1000);
-                        bufferSize = dstFrameSize * bufferFrames;
-
-                        remainder = new byte[numBufferFrames * dstFrameSize];
-                        remainderLength = 0;
-
-                        this.format = thisFormat;
-
-                        this.eventHandle = eventHandle;
-                        eventHandle = 0;
-                        this.iAudioClient = iAudioClient;
-                        iAudioClient = 0;
-                        this.iAudioCaptureClient = iAudioCaptureClient;
-                        iAudioCaptureClient = 0;
-                    }
-                    finally
-                    {
-                        if (iAudioCaptureClient != 0)
-                            IAudioCaptureClient_Release(iAudioCaptureClient);
-                    }
-                }
-                finally
-                {
-                    if (iAudioClient != 0)
-                        IAudioClient_Release(iAudioClient);
-                }
-            }
-            finally
-            {
-                if (eventHandle != 0)
-                    CloseHandle(eventHandle);
-            }
+            doConnect();
         }
         catch (Throwable t)
         {
             if (t instanceof ThreadDeath)
                 throw (ThreadDeath) t;
-            else if (t instanceof IOException)
-                throw (IOException) t;
             else
             {
                 logger.error(
                         "Failed to connect a WASAPIStream"
                             + " to an audio endpoint device.",
                         t);
+                if (t instanceof IOException)
+                    throw (IOException) t;
+                else
+                {
+                    IOException ioe = new IOException();
 
-                IOException ioe = new IOException();
-
-                ioe.initCause(t);
-                throw ioe;
+                    ioe.initCause(t);
+                    throw ioe;
+                }
             }
         }
     }
@@ -418,41 +360,109 @@ private void disconnect()
         }
         finally
         {
-            if (iAudioCaptureClient != 0)
+            uninitializeAEC();
+            uninitializeRender();
+            uninitializeCapture();
+
+            /*
+             * Make sure this AbstractPullBufferStream asks its DataSource for
+             * the Format in which it is supposed to output audio data the next
+             * time it is connected instead of using its Format from a previous
+             * connect.
+             */
+            format = null;
+        }
+    }
+
+    /**
+     * Invoked by {@link #connect()} after a check that this
+     * <tt>SourceStream</tt> really needs to connect to the associated audio
+     * endpoint device has been passed i.e. it is certain that this instance is
+     * disconnected.
+     *
+     * @throws Exception if the <tt>SourceStream</tt> fails to connect to the
+     * associated audio endpoint device. The <tt>Exception</tt> is logged by the
+     * <tt>connect()</tt> method.
+     */
+    private void doConnect()
+        throws Exception
+    {
+        MediaLocator locator = getLocator();
+
+        if (locator == null)
+            throw new NullPointerException("No locator set.");
+
+        AudioFormat thisFormat = (AudioFormat) getFormat();
+
+        if (thisFormat == null)
+            throw new NullPointerException("No format set.");
+        if (dataSource.aec)
+        {
+            CaptureDeviceInfo2 renderDeviceInfo
+                = dataSource.audioSystem.getSelectedDevice(
+                        AudioSystem.DataFlow.PLAYBACK);
+
+            if (renderDeviceInfo == null)
+                throw new NullPointerException("No playback device set.");
+
+            MediaLocator renderLocator = renderDeviceInfo.getLocator();
+
+            /*
+             * This SourceStream will output in an AudioFormat supported by the
+             * voice capture DMO which implements the acoustic echo cancellation
+             * (AEC) feature. The IAudioClients will be initialized with
+             * AudioFormats based on thisFormat
+             */
+            AudioFormat captureFormat
+                = findClosestMatchCaptureSupportedFormat(thisFormat);
+
+            if (captureFormat == null)
             {
-                IAudioCaptureClient_Release(iAudioCaptureClient);
-                iAudioCaptureClient = 0;
+                throw new IllegalStateException(
+                        "Failed to determine an AudioFormat with which to"
+                            + " initialize IAudioClient for MediaLocator "
+                            + locator + " based on AudioFormat " + thisFormat);
             }
-            if (iAudioClient != 0)
+
+            AudioFormat renderFormat
+                = findClosestMatch(renderDeviceInfo.getFormats(), thisFormat);
+
+            if (renderFormat == null)
             {
-                IAudioClient_Release(iAudioClient);
-                iAudioClient = 0;
+                throw new IllegalStateException(
+                        "Failed to determine an AudioFormat with which to"
+                            + " initialize IAudioClient for MediaLocator "
+                            + renderLocator + " based on AudioFormat "
+                            + thisFormat);
             }
-            if (eventHandle != 0)
+
+            boolean uninitialize = true;
+
+            initializeCapture(locator, captureFormat);
+            try
             {
+                initializeRender(renderLocator, renderFormat);
                 try
                 {
-                    CloseHandle(eventHandle);
+                    initializeAEC(captureFormat, renderFormat, thisFormat);
+                    uninitialize = false;
                 }
-                catch (HResultException hre)
+                finally
                 {
-                    // The event HANDLE will be leaked.
-                    logger.warn("Failed to close event HANDLE.", hre);
+                    if (uninitialize)
+                        uninitializeRender();
                 }
-                eventHandle = 0;
             }
-
-            /*
-             * Make sure this AbstractPullBufferStream asks its DataSource for
-             * the Format in which it is supposed to output audio data the next
-             * time it is connected instead of using its Format from a previous
-             * connect.
-             */
-            format = null;
-            remainder = null;
-            remainderLength = 0;
-            started = false;
+            finally
+            {
+                if (uninitialize)
+                    uninitializeCapture();
+            }
         }
+        else
+            initializeCapture(locator, thisFormat);
+
+        this.format = thisFormat;
     }
 
     /**
@@ -464,46 +474,13 @@ protected Format doGetFormat()
         return (format == null) ? super.doGetFormat() : format;
     }
 
-    /**
-     * Reads the next data packet from the capture endpoint buffer into a
-     * specific <tt>Buffer</tt>.
-     *
-     * @param buffer the <tt>Buffer</tt> to read the next data packet from the
-     * capture endpoint buffer into
-     * @return the number of bytes read from the capture endpoint buffer into
-     * the value of the <tt>data</tt> property of <tt>buffer</tt>
-     * @throws IOException if an I/O error occurs
-     */
-    private int doRead(Buffer buffer)
-        throws IOException
+    private AudioFormat findClosestMatchCaptureSupportedFormat(
+            AudioFormat format)
     {
-        int toRead = Math.min(bufferSize, remainderLength);
-        int read;
-
-        if (toRead == 0)
-            read = 0;
-        else
-        {
-            int offset = buffer.getOffset() + buffer.getLength();
-            byte[] data
-                = AbstractCodec2.validateByteArraySize(
-                        buffer,
-                        offset + toRead,
-                        true);
-
-            System.arraycopy(remainder, 0, data, offset, toRead);
-            popFromRemainder(toRead);
-            read = toRead;
-
-            if (offset == 0)
-            {
-                long timeStamp = System.nanoTime();
-
-                buffer.setFlags(Buffer.FLAG_SYSTEM_TIME);
-                buffer.setTimeStamp(timeStamp);
-            }
-        }
-        return read;
+        return
+            findClosestMatch(
+                    dataSource.getIAudioClientSupportedFormats(),
+                    format);
     }
 
     /**
@@ -518,17 +495,380 @@ private MediaLocator getLocator()
         return locator;
     }
 
+    private void initializeAEC(
+            AudioFormat inFormat0, AudioFormat inFormat1,
+            AudioFormat outFormat)
+        throws Exception
+    {
+        long iMediaObject = dataSource.audioSystem.initializeAEC();
+
+        if (iMediaObject == 0)
+        {
+            throw new ResourceUnavailableException(
+                    "Failed to initialize a Voice Capture DSP for the purposes"
+                        + " of acoustic echo cancellation (AEC).");
+        }
+        try
+        {
+            int hresult
+                = IMediaObject_SetXXXputType(
+                        iMediaObject,
+                        /* IMediaObject_SetInputType */ true,
+                        /* dwInputStreamIndex */ 0,
+                        inFormat0,
+                        /* dwFlags */ 0);
+
+            if (FAILED(hresult))
+            {
+                throw new HResultException(
+                        hresult,
+                        "IMediaObject_SetInputType, dwOutputStreamIndex 0, "
+                                + inFormat0);
+            }
+            hresult
+                = IMediaObject_SetXXXputType(
+                        iMediaObject,
+                        /* IMediaObject_SetInputType */ true,
+                        /* dwInputStreamIndex */ 1,
+                        inFormat1,
+                        /* dwFlags */ 0);
+            if (FAILED(hresult))
+            {
+                throw new HResultException(
+                        hresult,
+                        "IMediaObject_SetInputType, dwOutputStreamIndex 1, "
+                                + inFormat1);
+            }
+            hresult
+                = IMediaObject_SetXXXputType(
+                        iMediaObject,
+                        /* IMediaObject_SetOutputType */ false,
+                        /* dwOutputStreamIndex */ 0,
+                        outFormat,
+                        /* dwFlags */ 0);
+            if (FAILED(hresult))
+            {
+                throw new HResultException(
+                        hresult,
+                        "IMediaObject_SetOutputType, " + outFormat);
+            }
+
+            long iPropertyStore
+                = IMediaObject_QueryInterface(
+                        iMediaObject,
+                        IID_IPropertyStore);
+
+            if (iPropertyStore == 0)
+            {
+                throw new RuntimeException(
+                        "IMediaObject_QueryInterface IID_IPropertyStore");
+            }
+            try
+            {
+                hresult
+                    = IPropertyStore_SetValue(
+                            iPropertyStore,
+                            MFPKEY_WMAAECMA_DMO_SOURCE_MODE,
+                            false);
+                if (FAILED(hresult))
+                {
+                    throw new HResultException(
+                            hresult,
+                            "IPropertyStore_SetValue"
+                                + " MFPKEY_WMAAECMA_DMO_SOURCE_MODE");
+                }
+
+                long captureIMediaBuffer
+                    = MediaBuffer_alloc(capture.bufferSize);
+
+                if (captureIMediaBuffer == 0)
+                    throw new OutOfMemoryError("MediaBuffer_alloc");
+                try
+                {
+                    long renderIMediaBuffer
+                        = MediaBuffer_alloc(render.bufferSize);
+
+                    if (renderIMediaBuffer == 0)
+                        throw new OutOfMemoryError("MediaBuffer_alloc");
+                    try
+                    {
+                        int outFrameSize
+                            = WASAPISystem.getSampleSizeInBytes(outFormat)
+                                * outFormat.getChannels();
+                        int outFrames
+                            = (int)
+                                (AudioCaptureClient.DEFAULT_BUFFER_DURATION
+                                    * ((int) outFormat.getSampleRate())
+                                    / 1000);
+                        long iMediaBuffer
+                            = MediaBuffer_alloc(outFrameSize * outFrames);
+
+                        if (iMediaBuffer == 0)
+                            throw new OutOfMemoryError("MediaBuffer_alloc");
+                        try
+                        {
+                            long dmoOutputDataBuffer
+                                = DMO_OUTPUT_DATA_BUFFER_alloc(
+                                        iMediaBuffer,
+                                        /* dwStatus */ 0,
+                                        /* rtTimestamp */ 0,
+                                        /* rtTimelength */ 0);
+
+                            if (dmoOutputDataBuffer == 0)
+                            {
+                                throw new OutOfMemoryError(
+                                        "DMO_OUTPUT_DATA_BUFFER_alloc");
+                            }
+                            try
+                            {
+                                bufferMaxLength
+                                    = IMediaBuffer_GetMaxLength(iMediaBuffer);
+                                captureBufferMaxLength
+                                    = IMediaBuffer_GetMaxLength(
+                                            captureIMediaBuffer);
+                                renderBufferMaxLength
+                                    = IMediaBuffer_GetMaxLength(
+                                            renderIMediaBuffer);
+
+                                processed = new byte[bufferMaxLength * 3];
+                                processedLength = 0;
+
+                                this.captureIMediaBuffer = captureIMediaBuffer;
+                                captureIMediaBuffer = 0;
+                                this.dmoOutputDataBuffer = dmoOutputDataBuffer;
+                                dmoOutputDataBuffer = 0;
+                                this.iMediaBuffer = iMediaBuffer;
+                                iMediaBuffer = 0;
+                                this.iMediaObject = iMediaObject;
+                                iMediaObject = 0;
+                                this.renderIMediaBuffer = renderIMediaBuffer;
+                                renderIMediaBuffer = 0;
+                            }
+                            finally
+                            {
+                                if (dmoOutputDataBuffer != 0)
+                                    CoTaskMemFree(dmoOutputDataBuffer);
+                            }
+                        }
+                        finally
+                        {
+                            if (iMediaBuffer != 0)
+                                IMediaBuffer_Release(iMediaBuffer);
+                        }
+                    }
+                    finally
+                    {
+                        if (renderIMediaBuffer != 0)
+                            IMediaBuffer_Release(renderIMediaBuffer);
+                    }
+                }
+                finally
+                {
+                    if (captureIMediaBuffer != 0)
+                        IMediaBuffer_Release(captureIMediaBuffer);
+                }
+            }
+            finally
+            {
+                if (iPropertyStore != 0)
+                    IPropertyStore_Release(iPropertyStore);
+            }
+        }
+        finally
+        {
+            if (iMediaObject != 0)
+                IMediaObject_Release(iMediaObject);
+        }
+    }
+
+    private void initializeCapture(MediaLocator locator, AudioFormat format)
+        throws Exception
+    {
+        capture
+            = new AudioCaptureClient(
+                    dataSource.audioSystem,
+                    locator,
+                    AudioSystem.DataFlow.CAPTURE,
+                    /* streamFlags */ 0,
+                    format,
+                    new BufferTransferHandler()
+                            {
+                                public void transferData(
+                                        PushBufferStream stream)
+                                {
+                                    transferCaptureData();
+                                }
+                            });
+        bufferSize = capture.bufferSize;
+        devicePeriod = capture.devicePeriod;
+    }
+
+    private void initializeRender(final MediaLocator locator, AudioFormat format)
+        throws Exception
+    {
+        render
+            = new AudioCaptureClient(
+                    dataSource.audioSystem,
+                    locator,
+                    AudioSystem.DataFlow.PLAYBACK,
+                    WASAPI.AUDCLNT_STREAMFLAGS_LOOPBACK,
+                    format,
+                    new BufferTransferHandler()
+                            {
+                                public void transferData(
+                                        PushBufferStream stream)
+                                {
+                                    transferRenderData();
+                                }
+                            });
+    }
+
     /**
-     * Pops a specific number of bytes from {@link #remainder}. For example,
-     * because such a number of bytes have been read from <tt>remainder</tt> and
+     * Pops a specific number of bytes from {@link #processed}. For example,
+     * because such a number of bytes have been read from <tt>processed</tt> and
      * written into a <tt>Buffer</tt>.
      *
-     * @param length the number of bytes to pop from <tt>remainder</tt>
+     * @param length the number of bytes to pop from <tt>processed</tt>
      */
-    private void popFromRemainder(int length)
+    private void popFromProcessed(int length)
     {
-        remainderLength
-            = WASAPIRenderer.pop(remainder, remainderLength, length);
+        processedLength
+            = WASAPIRenderer.pop(processed, processedLength, length);
+    }
+
+    private int processInput(int dwInputStreamIndex)
+    {
+        long pBuffer;
+        int maxLength;
+        AudioCaptureClient audioCaptureClient;
+
+        switch (dwInputStreamIndex)
+        {
+        case 0:
+            pBuffer = captureIMediaBuffer;
+            maxLength = captureBufferMaxLength;
+            audioCaptureClient = capture;
+            break;
+        case 1:
+            pBuffer = renderIMediaBuffer;
+            maxLength = renderBufferMaxLength;
+            audioCaptureClient = render;
+            break;
+        default:
+            throw new IllegalArgumentException("dwInputStreamIndex");
+        }
+
+        int hresult = S_OK;
+        int processed = 0;
+
+        do
+        {
+            int dwFlags;
+
+            try
+            {
+                dwFlags
+                    = IMediaObject_GetInputStatus(
+                            iMediaObject,
+                            dwInputStreamIndex);
+            }
+            catch (HResultException hre)
+            {
+                dwFlags = 0;
+                hresult = hre.getHResult();
+                logger.error("IMediaObject_GetInputStatus", hre);
+            }
+            if ((dwFlags & DMO_INPUT_STATUSF_ACCEPT_DATA)
+                    == DMO_INPUT_STATUSF_ACCEPT_DATA)
+            {
+                int toRead;
+
+                try
+                {
+                    toRead = maxLength - IMediaBuffer_GetLength(pBuffer);
+                }
+                catch (HResultException hre)
+                {
+                    hresult = hre.getHResult();
+                    toRead = 0;
+                    logger.error("IMediaBuffer_GetLength", hre);
+                }
+                if (toRead > 0)
+                {
+                    if ((processInputBuffer == null)
+                            || (processInputBuffer.length < toRead))
+                        processInputBuffer = new byte[toRead];
+
+                    int read;
+
+                    try
+                    {
+                        read
+                            = audioCaptureClient.read(
+                                    processInputBuffer,
+                                    0,
+                                    toRead);
+                    }
+                    catch (IOException ioe)
+                    {
+                        read = 0;
+                        logger.error(
+                                "Failed to read from IAudioCaptureClient.",
+                                ioe);
+                    }
+                    if (read > 0)
+                    {
+                        int written;
+
+                        try
+                        {
+                            written
+                                = MediaBuffer_push(
+                                        pBuffer,
+                                        processInputBuffer, 0, read);
+                        }
+                        catch (HResultException hre)
+                        {
+                            written = 0;
+                            logger.error("MediaBuffer_push", hre);
+                        }
+                        if (written < read)
+                        {
+                            logger.error(
+                                    "Failed to push/write "
+                                        + ((written <= 0)
+                                                ? read
+                                                : (read - written))
+                                        + " bytes into an IMediaBuffer.");
+                        }
+                        if (written > 0)
+                            processed += written;
+                    }
+                }
+                try
+                {
+                    hresult
+                        = IMediaObject_ProcessInput(
+                                iMediaObject,
+                                dwInputStreamIndex,
+                                pBuffer,
+                                /* dwFlags */ 0,
+                                /* rtTimestamp */ 0,
+                                /* rtTimelength */ 0);
+                }
+                catch (HResultException hre)
+                {
+                    hresult = hre.getHResult();
+                    if (hresult != DMO_E_NOTACCEPTING)
+                        logger.error("IMediaObject_ProcessInput", hre);
+                }
+            }
+            else
+                break; // The input stream cannot accept more input data.
+        }
+        while (SUCCEEDED(hresult));
+
+        return processed;
     }
 
     /**
@@ -537,8 +877,12 @@ private void popFromRemainder(int length)
     public void read(Buffer buffer)
         throws IOException
     {
-        if (bufferSize != 0) // Reduce relocation as much as possible.
-            AbstractCodec2.validateByteArraySize(buffer, bufferSize, false);
+        // Reduce relocations as much as possible.
+        int capacity = dataSource.aec ? bufferMaxLength : bufferSize;
+        byte[] data
+            = AbstractCodec2.validateByteArraySize(buffer, capacity, false);
+        int length = 0;
+
         buffer.setLength(0);
         buffer.setOffset(0);
 
@@ -548,17 +892,15 @@ public void read(Buffer buffer)
 
             synchronized (this)
             {
-                if ((iAudioClient == 0) || (iAudioCaptureClient == 0))
+                if ((capture == null) || (dataSource.aec && (render == null)))
                     message = getClass().getName() + " is disconnected.";
-                else if (!started)
-                    message = getClass().getName() + " is stopped.";
                 else
                 {
                     message = null;
-                    busy = true;
+                    captureIsBusy = true;
+                    renderIsBusy = true;
                 }
             }
-
             /*
              * The caller shouldn't call #read(Buffer) if this instance is
              * disconnected or stopped. Additionally, if she does, she may be
@@ -575,7 +917,22 @@ else if (!started)
 
             try
             {
-                read = doRead(buffer);
+                int toRead = capacity - length;
+
+                if (render == null)
+                    read = capture.read(data, length, toRead);
+                else
+                {
+                    toRead = Math.min(toRead, processedLength);
+                    if (toRead == 0)
+                        read = 0;
+                    else
+                    {
+                        System.arraycopy(processed, 0, data, length, toRead);
+                        popFromProcessed(toRead);
+                        read = toRead;
+                    }
+                }
                 cause = null;
             }
             catch (Throwable t)
@@ -591,56 +948,27 @@ else if (!started)
             {
                 synchronized (this)
                 {
-                    busy = false;
+                    captureIsBusy = false;
+                    renderIsBusy = false;
                     notifyAll();
                 }
             }
-
             if (cause == null)
             {
-                if (!push && (read == 0))
+                if (length == 0)
                 {
-                    /*
-                     * The next data packet in the capture endpoint buffer is
-                     * (very likely) not available yet, we will want to wait a
-                     * bit for it to be made available.
-                     */
-                    boolean interrupted = false;
+                    long timeStamp = System.nanoTime();
 
-                    synchronized (this)
-                    {
-                        /*
-                         * Spurious wake-ups should not be a big issue here.
-                         * While this SourceStream may query the availability of
-                         * the next data packet in the capture endpoint buffer
-                         * more often than practically necessary (which may very
-                         * well classify as a case of performance loss), the
-                         * ability to unblock this SourceStream is considered
-                         * more important.
-                         */
-                        try
-                        {
-                            wait(devicePeriod);
-                        }
-                        catch (InterruptedException ie)
-                        {
-                            interrupted = true;
-                        }
-                    }
-                    if (interrupted)
-                        Thread.currentThread().interrupt();
+                    buffer.setFlags(Buffer.FLAG_SYSTEM_TIME);
+                    buffer.setTimeStamp(timeStamp);
                 }
-                else
+                length += read;
+                if ((length >= capacity) || (read == 0))
                 {
-                    int length = buffer.getLength() + read;
-
+                    if (format != null)
+                        buffer.setFormat(format);
                     buffer.setLength(length);
-                    if ((length >= bufferSize) || (read == 0))
-                    {
-                        if (format != null)
-                            buffer.setFormat(format);
-                        break;
-                    }
+                    break;
                 }
             }
             else
@@ -661,76 +989,74 @@ else if (cause instanceof IOException)
         while (true);
     }
 
-    /**
-     * Reads from {@link #iAudioCaptureClient} into {@link #remainder} and
-     * returns a non-<tt>null</tt> <tt>BufferTransferHandler</tt> if this
-     * instance is to push audio data.
-     *
-     * @return a <tt>BufferTransferHandler</tt> if this instance is to push
-     * audio data; otherwise, <tt>null</tt>
-     */
-    private BufferTransferHandler readInEventHandleCmd()
+    private BufferTransferHandler runInProcessThread()
     {
-        /*
-         * Determine the size in bytes of the next data packet in the capture
-         * endpoint buffer.
-         */
-        int numFramesInNextPacket;
+        // ProcessInput
+        int numProcessedCapture = processInput(/* capture */ 0);
+        int numProcessedRender = processInput(/* render */ 1);
+if ((numProcessedCapture > 0) || (numProcessedRender > 0))
+    System.err.println("WASAPIStream.runInProcessThread: capture " + numProcessedCapture + ", render " + numProcessedRender);
 
-        try
-        {
-            numFramesInNextPacket
-                = IAudioCaptureClient_GetNextPacketSize(iAudioCaptureClient);
-        }
-        catch (HResultException hre)
-        {
-            numFramesInNextPacket = 0; // Silence the compiler.
-            logger.error("IAudioCaptureClient_GetNextPacketSize", hre);
-        }
+        // ProcessOutput
+        int dwStatus = 0;
 
-        if (numFramesInNextPacket != 0)
+        do
         {
-            int toRead = numFramesInNextPacket * dstFrameSize;
+            try
+            {
+                IMediaObject_ProcessOutput(
+                        iMediaObject,
+                        /* dwFlags */ 0,
+                        1,
+                        dmoOutputDataBuffer);
+                dwStatus
+                    = DMO_OUTPUT_DATA_BUFFER_getDwStatus(dmoOutputDataBuffer);
+            }
+            catch (HResultException hre)
+            {
+                dwStatus = 0;
+                logger.error("IMediaObject_ProcessOutput", hre);
+            }
+            try
+            {
+                int toRead = IMediaBuffer_GetLength(iMediaBuffer);
 
-            /*
-             * Make sure there is enough room in remainder to accommodate
-             * toRead.
-             */
-            int toPop = toRead - (remainder.length - remainderLength);
+                if (toRead > 0)
+                {
+                    /*
+                     * Make sure there is enough room in processed to
+                     * accommodate toRead.
+                     */
+                    int toPop = toRead - (processed.length - processedLength);
 
-            if (toPop > 0)
-                popFromRemainder(toPop);
+                    if (toPop > 0)
+                        popFromProcessed(toPop);
 
-            try
-            {
-                int read
-                    = IAudioCaptureClient_Read(
-                            iAudioCaptureClient,
-                            remainder, remainderLength, toRead,
-                            srcSampleSize, srcChannels,
-                            dstSampleSize, dstChannels);
-
-                remainderLength += read;
+                    int read
+                        = MediaBuffer_pop(
+                                iMediaBuffer,
+                                processed, processedLength, toRead);
+
+                    if (read > 0)
+                        processedLength += read;
+                }
             }
             catch (HResultException hre)
             {
-                logger.error("IAudioCaptureClient_Read", hre);
+                logger.error(
+                        "Failed to read from acoustic echo cancellation (AEC)"
+                            + " output IMediaBuffer.",
+                        hre);
+                break;
             }
         }
+        while ((dwStatus & DMO_OUTPUT_DATA_BUFFERF_INCOMPLETE)
+                == DMO_OUTPUT_DATA_BUFFERF_INCOMPLETE);
 
-        return
-            (push && (remainderLength >= bufferSize)) ? transferHandler : null;
+        return (processedLength >= bufferMaxLength) ? transferHandler : null;
     }
 
-    /**
-     * Runs/executes in the thread associated with a specific <tt>Runnable</tt>
-     * initialized to wait for {@link #eventHandle} to be signaled.
-     *
-     * @param eventHandleCmd the <tt>Runnable</tt> which has been initialized to
-     * wait for <tt>eventHandle</tt> to be signaled and in whose associated
-     * thread the method is invoked
-     */
-    private void runInEventHandleCmd(Runnable eventHandleCmd)
+    private void runInProcessThread(Thread processThread)
     {
         try
         {
@@ -738,43 +1064,30 @@ private void runInEventHandleCmd(Runnable eventHandleCmd)
 
             do
             {
-                long eventHandle;
                 BufferTransferHandler transferHandler;
 
                 synchronized (this)
                 {
-                    /*
-                     * Does this WASAPIStream still want eventHandleCmd to
-                     * execute?
-                     */
-                    if (!eventHandleCmd.equals(this.eventHandleCmd))
+                    if (!processThread.equals(this.processThread))
                         break;
-                    // Is this WASAPIStream still connected and started?
-                    if ((iAudioClient == 0)
-                            || (iAudioCaptureClient == 0)
-                            || !started)
+                    if ((capture == null) || (render == null) || !started)
                         break;
 
-                    /*
-                     * The value of eventHandle will remain valid while this
-                     * WASAPIStream wants eventHandleCmd to execute.
-                     */
-                    eventHandle = this.eventHandle;
-                    if (eventHandle == 0)
-                        throw new IllegalStateException("eventHandle");
-
-                    waitWhileBusy();
-                    busy = true;
+                    waitWhileCaptureIsBusy();
+                    waitWhileRenderIsBusy();
+                    captureIsBusy = true;
+                    renderIsBusy = true;
                 }
                 try
                 {
-                    transferHandler = readInEventHandleCmd();
+                    transferHandler = runInProcessThread();
                 }
                 finally
                 {
                     synchronized (this)
                     {
-                        busy = false;
+                        captureIsBusy = false;
+                        renderIsBusy = false;
                         notifyAll();
                     }
                 }
@@ -783,15 +1096,11 @@ private void runInEventHandleCmd(Runnable eventHandleCmd)
                 {
                     try
                     {
-                        Object o = this;
-                        PushBufferStream pushBufferStream
-                            = (PushBufferStream) o;
-
-                        transferHandler.transferData(pushBufferStream);
+                        transferHandler.transferData(this);
                         /*
                          * If the transferData implementation throws an
-                         * exception, we will WaitForSingleObject in order to
-                         * give the application time to recover.
+                         * exception, we will wait on a synchronization root in
+                         * order to give the application time to recover.
                          */
                         continue;
                     }
@@ -808,28 +1117,7 @@ private void runInEventHandleCmd(Runnable eventHandleCmd)
                     }
                 }
 
-                int wfso;
-
-                try
-                {
-                    wfso = WaitForSingleObject(eventHandle, devicePeriod);
-                }
-                catch (HResultException hre)
-                {
-                    /*
-                     * WaitForSingleObject will throw HResultException only in
-                     * the case of WAIT_FAILED. Event if it didn't, it would
-                     * still be a failure from our point of view.
-                     */
-                    wfso = WAIT_FAILED;
-                    logger.error("WaitForSingleObject", hre);
-                }
-                /*
-                 * If the function WaitForSingleObject fails once, it will very
-                 * likely fail forever. Bail out of a possible busy wait.
-                 */
-                if ((wfso == WAIT_FAILED) || (wfso == WAIT_ABANDONED))
-                    break;
+                yield();
             }
             while (true);
         }
@@ -837,9 +1125,9 @@ private void runInEventHandleCmd(Runnable eventHandleCmd)
         {
             synchronized (this)
             {
-                if (eventHandleCmd.equals(this.eventHandleCmd))
+                if (processThread.equals(this.processThread))
                 {
-                    this.eventHandleCmd = null;
+                    this.processThread = null;
                     notifyAll();
                 }
             }
@@ -877,59 +1165,30 @@ void setLocator(MediaLocator locator)
     public synchronized void start()
         throws IOException
     {
-        if (iAudioClient != 0)
+        if (capture != null)
         {
-            waitWhileBusy();
-            waitWhileEventHandleCmd();
-
-            try
-            {
-                IAudioClient_Start(iAudioClient);
-                started = true;
-
-                remainderLength = 0;
-                if ((eventHandle != 0) && (this.eventHandleCmd == null))
-                {
-                    Runnable eventHandleCmd
-                        = new Runnable()
-                        {
-                            public void run()
-                            {
-                                runInEventHandleCmd(this);
-                            }
-                        };
-                    boolean submitted = false;
-
-                    try
+            waitWhileCaptureIsBusy();
+            capture.start();
+        }
+        if (render != null)
+        {
+            waitWhileRenderIsBusy();
+            render.start();
+        }
+        started = true;
+        if ((capture != null) && (render != null) && (processThread == null))
+        {
+            processThread
+                = new Thread(WASAPIStream.class + ".processThread")
                     {
-                        if (eventHandleExecutor == null)
+                        @Override
+                        public void run()
                         {
-                            eventHandleExecutor
-                                = Executors.newSingleThreadExecutor();
+                            runInProcessThread(this);
                         }
-
-                        this.eventHandleCmd = eventHandleCmd;
-                        eventHandleExecutor.execute(eventHandleCmd);
-                        submitted = true;
-                    }
-                    finally
-                    {
-                        if (!submitted
-                                && eventHandleCmd.equals(this.eventHandleCmd))
-                            this.eventHandleCmd = null;
-                    }
-                }
-            }
-            catch (HResultException hre)
-            {
-                /*
-                 * If IAudioClient_Start is invoked multiple times without
-                 * intervening IAudioClient_Stop, it will likely return/throw
-                 * AUDCLNT_E_NOT_STOPPED.
-                 */
-                if (hre.getHResult() != AUDCLNT_E_NOT_STOPPED)
-                    throwNewIOException("IAudioClient_Start", hre);
-            }
+                    };
+            processThread.setDaemon(true);
+            processThread.start();
         }
     }
 
@@ -940,39 +1199,104 @@ public void run()
     public synchronized void stop()
         throws IOException
     {
-        if (iAudioClient != 0)
+        if (capture != null)
+        {
+            waitWhileCaptureIsBusy();
+            capture.stop();
+        }
+        if (render != null)
         {
-            waitWhileBusy();
+            waitWhileRenderIsBusy();
+            render.stop();
+        }
+        started = false;
 
-            try
-            {
-                /*
-                 * If IAudioClient_Stop is invoked multiple times without
-                 * intervening IAudioClient_Start, it is documented to return
-                 * S_FALSE.
-                 */
-                IAudioClient_Stop(iAudioClient);
-                started = false;
+        waitWhileProcessThread();
+        processedLength = 0;
+    }
 
-                waitWhileEventHandleCmd();
-                remainderLength = 0;
-            }
-            catch (HResultException hre)
+    private void transferCaptureData()
+    {
+        if (dataSource.aec)
+        {
+            synchronized (this)
             {
-                throwNewIOException("IAudioClient_Stop", hre);
+                notifyAll();
             }
         }
+        else
+        {
+            BufferTransferHandler transferHandler = this.transferHandler;
+
+            if (transferHandler != null)
+                transferHandler.transferData(this);
+        }
+    }
+
+    private void transferRenderData()
+    {
+        synchronized (this)
+        {
+            notifyAll();
+        }
+    }
+
+    private void uninitializeAEC()
+    {
+        if (iMediaObject != 0)
+        {
+            IMediaObject_Release(iMediaObject);
+            iMediaObject = 0;
+        }
+        if (dmoOutputDataBuffer != 0)
+        {
+            CoTaskMemFree(dmoOutputDataBuffer);
+            dmoOutputDataBuffer = 0;
+        }
+        if (iMediaBuffer != 0)
+        {
+            IMediaBuffer_Release(iMediaBuffer);
+            iMediaBuffer = 0;
+        }
+        if (renderIMediaBuffer != 0)
+        {
+            IMediaBuffer_Release(renderIMediaBuffer);
+            renderIMediaBuffer =0;
+        }
+        if (captureIMediaBuffer != 0)
+        {
+            IMediaBuffer_Release(captureIMediaBuffer);
+            captureIMediaBuffer = 0;
+        }
+    }
+
+    private void uninitializeCapture()
+    {
+        if (capture != null)
+        {
+            capture.close();
+            capture = null;
+        }
+    }
+
+    private void uninitializeRender()
+    {
+        if (render != null)
+        {
+            render.close();
+            render = null;
+        }
     }
 
     /**
-     * Waits on this instance while the value of {@link #busy} is equal to
-     * <tt>true</tt>.
+     * Waits on this instance while the value of {@link #captureIsBusy} is equal
+     * to <tt>true</tt>.
      */
-    private synchronized void waitWhileBusy()
+    private synchronized void waitWhileCaptureIsBusy()
     {
         boolean interrupted = false;
 
-        while (busy)
+        while (captureIsBusy)
         {
             try
             {
@@ -987,18 +1311,21 @@ private synchronized void waitWhileBusy()
             Thread.currentThread().interrupt();
     }
 
+    private synchronized void waitWhileProcessThread()
+    {
+        while (processThread != null)
+            yield();
+    }
+
     /**
-     * Waits on this instance while the value of {@link #eventHandleCmd} is
-     * non-<tt>null</tt>.
+     * Waits on this instance while the value of {@link #renderIsBusy} is equal
+     * to <tt>true</tt>.
      */
-    private synchronized void waitWhileEventHandleCmd()
+    private synchronized void waitWhileRenderIsBusy()
     {
-        if (eventHandle == 0)
-            throw new IllegalStateException("eventHandle");
-
         boolean interrupted = false;
 
-        while (eventHandleCmd != null)
+        while (renderIsBusy)
         {
             try
             {
@@ -1017,13 +1344,13 @@ private synchronized void waitWhileEventHandleCmd()
      * Causes the currently executing thread to temporarily pause and allow
      * other threads to execute.
      */
-    private void yield()
+    private synchronized void yield()
     {
         boolean interrupted = false;
 
         try
         {
-            Thread.sleep(devicePeriod);
+            wait(devicePeriod);
         }
         catch (InterruptedException ie)
         {
diff --git a/src/org/jitsi/impl/neomedia/jmfext/media/renderer/audio/WASAPIRenderer.java b/src/org/jitsi/impl/neomedia/jmfext/media/renderer/audio/WASAPIRenderer.java
index de3dd9efe71ef53f2b4715a7703163bdfb54d713..b795421fa082a62bc79343e51943f72bcc48bd16 100644
--- a/src/org/jitsi/impl/neomedia/jmfext/media/renderer/audio/WASAPIRenderer.java
+++ b/src/org/jitsi/impl/neomedia/jmfext/media/renderer/audio/WASAPIRenderer.java
@@ -321,6 +321,7 @@ public synchronized void open()
                     = audioSystem.initializeIAudioClient(
                             locator,
                             dataFlow,
+                            /* streamFlags */ 0,
                             eventHandle,
                             hnsBufferDuration,
                             formats);