From 37eaade2701a304e31e2a6fd6acebe89efd29ef3 Mon Sep 17 00:00:00 2001 From: Lyubomir Marinov <lyubomir.marinov@jitsi.org> Date: Wed, 10 Jul 2013 00:31:18 +0300 Subject: [PATCH] Adds javadocs. --- .../protocol/wasapi/AudioCaptureClient.java | 48 +++++++ .../media/protocol/wasapi/WASAPIStream.java | 117 ++++++++++++++++++ 2 files changed, 165 insertions(+) diff --git a/src/org/jitsi/impl/neomedia/jmfext/media/protocol/wasapi/AudioCaptureClient.java b/src/org/jitsi/impl/neomedia/jmfext/media/protocol/wasapi/AudioCaptureClient.java index 1778a82d..2c70ff59 100644 --- a/src/org/jitsi/impl/neomedia/jmfext/media/protocol/wasapi/AudioCaptureClient.java +++ b/src/org/jitsi/impl/neomedia/jmfext/media/protocol/wasapi/AudioCaptureClient.java @@ -40,6 +40,19 @@ public class AudioCaptureClient private static final Logger logger = Logger.getLogger(AudioCaptureClient.class); + /** + * Invokes {@link WASAPI#IAudioCaptureClient_GetNextPacketSize(long)} on a + * specific <tt>IAudioCaptureClient</tt> and logs and swallows any + * <tt>HResultException</tt>. + * + * @param iAudioCaptureClient the <tt>IAudioCaptureClient</tt> of which to + * retrieve the number of frames in the next data packet + * @return the number of frames in the next data packet in the capture + * endpoint buffer associated with the specified + * <tt>iAudioCaptureClient</tt>. If the function/method + * <tt>IAudioCaptureClient_GetNextPacketSize</tt> throws an + * <tt>HResultException</tt>, return <tt>0</tt>. + */ private static int maybeIAudioCaptureClientGetNextPacketSize( long iAudioCaptureClient) { @@ -504,6 +517,28 @@ public int read(byte[] buffer, int offset, int length) return read(/* iMediaBuffer */ null, buffer, offset, length); } + /** + * Reads audio data from this instance into a specific <tt>IMediaBuffer</tt> + * or a specific <tt>byte</tt> array. + * + * @param iMediaBuffer the <tt>IMediaBuffer</tt> into which the audio data + * read from this instance is to be written. If <tt>null</tt>, the writing + * occurs on <tt>buffer</tt> starting at <tt>offset</tt>. If + * non-<tt>null</tt>, <tt>buffer</tt> and <tt>offset</tt> are ignored. + * @param buffer the <tt>byte</tt> array into which the audio data read from + * this instance is to be written if <tt>iMediaBuffer</tt> is <tt>null</tt>; + * otherwise, <tt>buffer</tt> and <tt>offset</tt> are ignored + * @param offset the offset in <tt>buffer</tt> at which the writing of the + * audio data is to start if <tt>iMediaBuffer</tt> is <tt>null</tt>; + * otherwise, <tt>buffer</tt> and <tt>offset</tt> are ignored + * @param length the maximum number of bytes to be read from this instance + * into the specified <tt>iMediaBuffer</tt> or the specified <tt>buffer</tt> + * starting at <tt>offset</tt> + * @return the number of bytes read from this instance into the specified + * <tt>iMediaBuffer</tt> or the specified <tt>buffer</tt> starting at + * <tt>offset</tt> + * @throws IOException if the reading of audio data from this instance fails + */ private int read( IMediaBuffer iMediaBuffer, byte[] buffer, int offset, @@ -567,6 +602,19 @@ else if (cause instanceof IOException) return read; } + /** + * Reads audio data from this instance into a specific + * <tt>IMediaBuffer</tt>. + * + * @param iMediaBuffer the <tt>IMediaBuffer</tt> into which the audio data + * read from this instance is to be written + * @param length the maximum number of bytes to read from this instance and + * write into the specified <tt>iMediaBuffer</tt> + * @return the number of bytes read from this instance and written into the + * specified <tt>iMediaBuffer</tt> + * @throws IOException if the reading of audio data from this instance or + * the writing into the specified <tt>iMediaBuffer</tt> fails + */ public int read(IMediaBuffer iMediaBuffer, int length) throws IOException { diff --git a/src/org/jitsi/impl/neomedia/jmfext/media/protocol/wasapi/WASAPIStream.java b/src/org/jitsi/impl/neomedia/jmfext/media/protocol/wasapi/WASAPIStream.java index af743d8f..58edcaad 100644 --- a/src/org/jitsi/impl/neomedia/jmfext/media/protocol/wasapi/WASAPIStream.java +++ b/src/org/jitsi/impl/neomedia/jmfext/media/protocol/wasapi/WASAPIStream.java @@ -383,6 +383,9 @@ static void throwNewIOException(String message, HResultException hre) throw ioe; } + /** + * The maximum capacity/number of bytes of {@link #iMediaBuffer}. + */ private int bufferMaxLength; /** @@ -391,10 +394,23 @@ static void throwNewIOException(String message, HResultException hre) */ private int bufferSize; + /** + * The abstraction which delivers audio samples from the capture endpoint + * device into this instance. + */ private AudioCaptureClient capture; + /** + * The maximum capacity/number of bytes of {@link #captureIMediaBuffer}. + */ private int captureBufferMaxLength; + /** + * The <tt>IMediaBuffer</tt> instance which delivers audio samples from the + * capture endpoint device i.e. {@link #capture} into the voice capture DMO + * that implements the acoustic echo cancellation (AEC) feature i.e. + * {@link #iMediaBuffer}. + */ private PtrMediaBuffer captureIMediaBuffer; /** @@ -405,6 +421,10 @@ static void throwNewIOException(String message, HResultException hre) */ private boolean captureIsBusy; + /** + * The number of nonseconds of audio encoded in the <tt>outFormat</tt> of + * {@link #capture} represented by a <tt>byte</tt>. + */ private double captureNanosPerByte; /** @@ -413,6 +433,11 @@ static void throwNewIOException(String message, HResultException hre) */ private long devicePeriod; + /** + * The <tt>DMO_OUTPUT_DATA_BUFFER</tt> which provides {@link #iMediaBuffer} + * to + * {@link VoiceCaptureDSP#IMediaObject_ProcessOutput(long, int, int, long)}. + */ private long dmoOutputDataBuffer; /** @@ -420,6 +445,10 @@ static void throwNewIOException(String message, HResultException hre) */ private AudioFormat format; + /** + * The <tt>IMediaBuffer</tt> which receives the output of + * {@link #iMediaObject} i.e. the acoustic echo cancellation. + */ private long iMediaBuffer; /** @@ -434,20 +463,60 @@ static void throwNewIOException(String message, HResultException hre) */ private MediaLocator locator; + /** + * The buffer which stores the result/output of the processing performed by + * {@link #iMediaObject} i.e. the acoustic echo cancellation. + */ private byte[] processed; + /** + * The number of bytes in {@link #processed} which represent actual audio + * data/samples. + */ private int processedLength; + /** + * An array of <tt>byte</tt>s utilized by {@link #processInput(int, int)} + * and cached in order to reduce the effects of the garbage collector. + */ private byte[] processInputBuffer; + /** + * The background thread which invokes + * {@link VoiceCaptureDSP#IMediaObject_ProcessInput(long, int, long, int, long, long)} + * and + * {@link VoiceCaptureDSP#IMediaObject_ProcessOutput(long, int, int, long)} + * i.e. delivers audio samples from the capture and render endpoint devices + * into the voice capture DMO, invokes the acoustic echo cancellation and + * stores the result/output in {@link #processed} so that it may later be + * read out of this instance via {@link #read(Buffer)}. + */ private Thread processThread; + /** + * The abstraction which delivers audio samples from the render endpoint + * device into this instance (for the purposes of acoustic echo + * cancellation). + */ private AudioCaptureClient render; + /** + * The maximum capacity/number of bytes of {@link #renderIMediaBuffer}. + */ private int renderBufferMaxLength; + /** + * The number of bytes of audio encoded in the <tt>outFormat</tt> of + * {@link #render} which represent a duration of one nanosecond. + */ private double renderBytesPerNano; + /** + * The <tt>IMediaBuffer</tt> instance which delivers audio samples from the + * render endpoint device i.e. {@link #render} into the voice capture DMO + * that implements the acoustic echo cancellation (AEC) feature i.e. + * {@link #iMediaBuffer}. + */ private PtrMediaBuffer renderIMediaBuffer; /** @@ -502,6 +571,16 @@ private long computeCaptureDuration(int length) return (long) (length * captureNanosPerByte); } + /** + * Computes/determines the number of bytes of a specific duration in + * nanoseconds of audio samples encoded in the <tt>outFormat</tt> of + * {@link #render}. + * + * @param duration the duration in nanoseconds of the audio samples of which + * the number of bytes is to be computed/determined + * @return the number of bytes of the specified duration in nanoseconds of + * audio samples encoded in the <tt>outFormat</tt> of <tt>render</tt> + */ private int computeRenderLength(long duration) { return (int) (duration * renderBytesPerNano); @@ -748,6 +827,20 @@ private MediaLocator getLocator() return locator; } + /** + * Initializes the <tt>IMediaObject</tt> which is to perform acoustic echo + * cancellation. + * + * @param inFormat0 the <tt>AudioFormat</tt> of the media which will be + * delivered to the input stream representing the audio from the microphone + * @param inFormat1 the <tt>AudioFormat</tt> of the media which will be + * delivered to the input stream representing the audio from the speaker + * (line) + * @param outFormat the <tt>AudioFormat</tt> of the media which is to be + * output by the <tt>IMediaObject</tt>/acoustic echo cancellation + * @throws Exception if the initialization of the <tt>IMediaObject</tt> + * implementing acoustic echo cancellation fails + */ private void initializeAEC( AudioFormat inFormat0, AudioFormat inFormat1, AudioFormat outFormat) @@ -977,6 +1070,17 @@ private void initializeAEC( } } + /** + * Initializes the delivery of audio data/samples from a capture endpoint + * device identified by a specific <tt>MediaLocator</tt> into this instance. + * + * @param locator the <tt>MediaLocator</tt> identifying the capture endpoint + * device from which this instance is to read + * @param format the <tt>AudioFormat</tt> of the media to be read from the + * specified capture endpoint device + * @throws Exception if the initialization of the delivery of audio samples + * from the specified capture endpoint into this instance fails + */ private void initializeCapture(MediaLocator locator, AudioFormat format) throws Exception { @@ -1006,6 +1110,19 @@ public void transferData(PushBufferStream stream) devicePeriod = capture.devicePeriod; } + /** + * Initializes the delivery of audio data/samples from a render endpoint + * device identified by a specific <tt>MediaLocator</tt> into this instance + * for the purposes of acoust echo cancellation (AEC). + * + * @param locator the <tt>MediaLocator</tt> identifying the render endpoint + * device from which this instance is to read + * @param format the <tt>AudioFormat</tt> of the media to be read from the + * specified render endpoint device + * @throws Exception if the initialization of the delivery of audio samples + * from the specified render endpoint into this instance for the purposes of + * acoustic echo cancellation (AEC) fails + */ private void initializeRender(final MediaLocator locator, AudioFormat format) throws Exception { -- GitLab