Skip to content
Snippets Groups Projects
Commit 34d9464e authored by Damian Minkov's avatar Damian Minkov
Browse files

Recompiled ffmpeg to include mjpeg and add support for mjpeg cameras for windows.

parent ae76b616
Branches
Tags
No related merge requests found
Notes 2013-07-05:
- Latest versions used - vanilla FFmpeg 1.0,
x264 snapshot-20120928-2245-stable, lame 3.99.5
- Currently compiled with tdm-gcc-4.7.1-2 and tdm64-gcc-4.7.1-3 (msys)
- when configuring ffmpeg for x64
got error: undefined reference to `init_xrpow_core_sse` for liblame libs
fixed by editing config.h and removing "#define HAVE_XMMINTRIN_H 1"
(maybe removing nasm will do the same)
- when configuring ffmpeg for x86 got:
undefined reference to `has_MMX_nasm'
undefined reference to `has_3DNow_nasm'
....
Removing "--enable-nasm" in lame configure fixed error.
- compiling with MinGW-w64(4.8.1) for 64bit has the same problems as
described earlier, after compiling and making video call app crash with
EXCEPTION_ACCESS_VIOLATION in msvcrt.dll memcmp
1. lame 1. lame
./configure \ ./configure \
...@@ -63,6 +80,7 @@ Apply the following to libavcodec/Makefile ...@@ -63,6 +80,7 @@ Apply the following to libavcodec/Makefile
--disable-everything --disable-network \ --disable-everything --disable-network \
--disable-ffmpeg --disable-ffplay --disable-ffprobe --disable-ffserver \ --disable-ffmpeg --disable-ffplay --disable-ffprobe --disable-ffserver \
--enable-libmp3lame --enable-encoder=libmp3lame \ --enable-libmp3lame --enable-encoder=libmp3lame \
--enable-decoder=mjpeg --enable-parser=mjpeg \
--enable-decoder=h263 --enable-encoder=h263p --enable-parser=h263 \ --enable-decoder=h263 --enable-encoder=h263p --enable-parser=h263 \
--enable-libx264 --enable-gpl \ --enable-libx264 --enable-gpl \
--enable-decoder=h264 --enable-encoder=libx264 --enable-parser=h264 \ --enable-decoder=h264 --enable-encoder=libx264 --enable-parser=h264 \
......
...@@ -39,3 +39,4 @@ DEFINE_DSFORMAT_PIXELFORMAT(YUY2) ...@@ -39,3 +39,4 @@ DEFINE_DSFORMAT_PIXELFORMAT(YUY2)
DEFINE_DSFORMAT_PIXELFORMAT(YV12) DEFINE_DSFORMAT_PIXELFORMAT(YV12)
DEFINE_DSFORMAT_PIXELFORMAT(YVU9) DEFINE_DSFORMAT_PIXELFORMAT(YVU9)
DEFINE_DSFORMAT_PIXELFORMAT(YVYU) DEFINE_DSFORMAT_PIXELFORMAT(YVYU)
DEFINE_DSFORMAT_PIXELFORMAT(MJPG)
\ No newline at end of file
...@@ -167,6 +167,14 @@ JNIEXPORT jint JNICALL Java_org_jitsi_impl_neomedia_jmfext_media_protocol_direct ...@@ -167,6 +167,14 @@ JNIEXPORT jint JNICALL Java_org_jitsi_impl_neomedia_jmfext_media_protocol_direct
JNIEXPORT jint JNICALL Java_org_jitsi_impl_neomedia_jmfext_media_protocol_directshow_DSFormat_YVYU JNIEXPORT jint JNICALL Java_org_jitsi_impl_neomedia_jmfext_media_protocol_directshow_DSFormat_YVYU
(JNIEnv *, jclass); (JNIEnv *, jclass);
/*
* Class: org_jitsi_impl_neomedia_jmfext_media_protocol_directshow_DSFormat
* Method: MJPG
* Signature: ()I
*/
JNIEXPORT jint JNICALL Java_org_jitsi_impl_neomedia_jmfext_media_protocol_directshow_DSFormat_MJPG
(JNIEnv *, jclass);
#ifdef __cplusplus #ifdef __cplusplus
} }
#endif #endif
......
...@@ -62,6 +62,11 @@ public class DSFormat ...@@ -62,6 +62,11 @@ public class DSFormat
*/ */
public static final int YUY2; public static final int YUY2;
/**
* The MJPEG constant.
*/
public static final int MJPG;
static static
{ {
System.loadLibrary("jndirectshow"); System.loadLibrary("jndirectshow");
...@@ -75,6 +80,7 @@ public class DSFormat ...@@ -75,6 +80,7 @@ public class DSFormat
Y411 = Y411(); Y411 = Y411();
Y41P = Y41P(); Y41P = Y41P();
I420 = I420(); I420 = I420();
MJPG = MJPG();
} }
private static native int ARGB32(); private static native int ARGB32();
...@@ -117,6 +123,8 @@ public class DSFormat ...@@ -117,6 +123,8 @@ public class DSFormat
public static native int YVYU(); public static native int YVYU();
public static native int MJPG();
/** /**
* Video height. * Video height.
*/ */
...@@ -195,6 +203,7 @@ public String toString() ...@@ -195,6 +203,7 @@ public String toString()
s.append(", width ").append(width); s.append(", width ").append(width);
if (height != Format.NOT_SPECIFIED) if (height != Format.NOT_SPECIFIED)
s.append(", height ").append(height); s.append(", height ").append(height);
return s.toString(); return s.toString();
} }
} }
...@@ -46,6 +46,8 @@ public class DataSource ...@@ -46,6 +46,8 @@ public class DataSource
FFmpeg.PIX_FMT_ARGB, FFmpeg.PIX_FMT_ARGB,
DSFormat.YUY2, DSFormat.YUY2,
FFmpeg.PIX_FMT_YUYV422, FFmpeg.PIX_FMT_YUYV422,
DSFormat.MJPG,
FFmpeg.PIX_FMT_YUVJ422P,
DSFormat.UYVY, DSFormat.UYVY,
FFmpeg.PIX_FMT_UYVY422, FFmpeg.PIX_FMT_UYVY422,
DSFormat.Y411, DSFormat.Y411,
......
...@@ -13,6 +13,7 @@ ...@@ -13,6 +13,7 @@
import javax.media.control.*; import javax.media.control.*;
import javax.media.protocol.*; import javax.media.protocol.*;
import org.jitsi.impl.neomedia.codec.*;
import org.jitsi.impl.neomedia.codec.video.*; import org.jitsi.impl.neomedia.codec.video.*;
import org.jitsi.impl.neomedia.jmfext.media.protocol.*; import org.jitsi.impl.neomedia.jmfext.media.protocol.*;
import org.jitsi.util.*; import org.jitsi.util.*;
...@@ -77,7 +78,7 @@ static boolean isSupportedFormat(Format format) ...@@ -77,7 +78,7 @@ static boolean isSupportedFormat(Format format)
} }
/** /**
* The indicator which determines whether {@link #grabber} * The indicator which determines whether {@link #delegate}
* automatically drops late frames. If <tt>false</tt>, we have to drop them * automatically drops late frames. If <tt>false</tt>, we have to drop them
* ourselves because DirectShow will buffer them all and the video will * ourselves because DirectShow will buffer them all and the video will
* be late. * be late.
...@@ -86,7 +87,7 @@ static boolean isSupportedFormat(Format format) ...@@ -86,7 +87,7 @@ static boolean isSupportedFormat(Format format)
/** /**
* The pool of <tt>ByteBuffer</tt>s this instances is using to transfer the * The pool of <tt>ByteBuffer</tt>s this instances is using to transfer the
* media data captured by {@link #grabber} out of this instance * media data captured by {@link #delegate} out of this instance
* through the <tt>Buffer</tt>s specified in its {@link #read(Buffer)}. * through the <tt>Buffer</tt>s specified in its {@link #read(Buffer)}.
*/ */
private final ByteBufferPool byteBufferPool = new ByteBufferPool(); private final ByteBufferPool byteBufferPool = new ByteBufferPool();
...@@ -154,6 +155,22 @@ public void SampleCB(long source, long ptr, int length) ...@@ -154,6 +155,22 @@ public void SampleCB(long source, long ptr, int length)
*/ */
private Thread transferDataThread; private Thread transferDataThread;
/**
* Native Video pixel format.
*/
private int nativePixelFormat = 0;
/**
* The <tt>AVCodecContext</tt> of the MJPEG decoder.
*/
private long avctx = 0;
/**
* The <tt>AVFrame</tt> which represents the media data decoded by the MJPEG
* decoder/{@link #avctx}.
*/
private long avframe = 0;
/** /**
* Initializes a new <tt>DirectShowStream</tt> instance which is to have its * Initializes a new <tt>DirectShowStream</tt> instance which is to have its
* <tt>Format</tt>-related information abstracted by a specific * <tt>Format</tt>-related information abstracted by a specific
...@@ -294,14 +311,53 @@ public void read(Buffer buffer) throws IOException ...@@ -294,14 +311,53 @@ public void read(Buffer buffer) throws IOException
} }
if(bufferFormat instanceof AVFrameFormat) if(bufferFormat instanceof AVFrameFormat)
{ {
if (AVFrame.read(buffer, bufferFormat, data) < 0) if(nativePixelFormat == DSFormat.MJPG)
{
/* Initialize the FFmpeg MJPEG decoder if necessary. */
if(avctx == 0)
{
long avcodec
= FFmpeg.avcodec_find_decoder(FFmpeg.CODEC_ID_MJPEG);
avctx = FFmpeg.avcodec_alloc_context3(avcodec);
FFmpeg.avcodeccontext_set_workaround_bugs(avctx,
FFmpeg.FF_BUG_AUTODETECT);
if (FFmpeg.avcodec_open2(avctx, avcodec) < 0)
{
throw new RuntimeException("" +
"Could not open codec CODEC_ID_MJPEG");
}
avframe = FFmpeg.avcodec_alloc_frame();
}
if(FFmpeg.avcodec_decode_video(
avctx, avframe, data.getPtr(), data.getLength()) != -1)
{
Object out = buffer.getData();
if (!(out instanceof AVFrame)
|| (((AVFrame) out).getPtr() != avframe))
{
buffer.setData(new AVFrame(avframe));
}
}
data.free(); data.free();
/* data = null;
* XXX For the sake of safety, make sure that this instance does }
* not reference the data instance as soon as it is set on the else
* AVFrame. {
*/ if (AVFrame.read(buffer, bufferFormat, data) < 0)
data = null; data.free();
/*
* XXX For the sake of safety, make sure that this instance does
* not reference the data instance as soon as it is set on the
* AVFrame.
*/
data = null;
}
} }
else else
{ {
...@@ -457,7 +513,7 @@ private void runInTransferDataThread() ...@@ -457,7 +513,7 @@ private void runInTransferDataThread()
/** /**
* Process received frames from DirectShow capture device * Process received frames from DirectShow capture device
* *
* @param a pointer to the native <tt>DSCaptureDevice</tt> which is the * @param source pointer to the native <tt>DSCaptureDevice</tt> which is the
* source of the notification * source of the notification
* @param ptr native pointer to data * @param ptr native pointer to data
* @param length length of data * @param length length of data
...@@ -569,6 +625,7 @@ private void setDeviceFormat(Format format) ...@@ -569,6 +625,7 @@ private void setDeviceFormat(Format format)
else if (format instanceof AVFrameFormat) else if (format instanceof AVFrameFormat)
{ {
AVFrameFormat avFrameFormat = (AVFrameFormat) format; AVFrameFormat avFrameFormat = (AVFrameFormat) format;
nativePixelFormat = avFrameFormat.getDeviceSystemPixFmt();
Dimension size = avFrameFormat.getSize(); Dimension size = avFrameFormat.getSize();
if (size == null) if (size == null)
...@@ -684,6 +741,19 @@ public void stop() ...@@ -684,6 +741,19 @@ public void stop()
{ {
super.stop(); super.stop();
if(avctx != 0)
{
FFmpeg.avcodec_close(avctx);
FFmpeg.av_free(avctx);
avctx = 0;
}
if(avframe != 0)
{
FFmpeg.avcodec_free_frame(avframe);
avframe = 0;
}
byteBufferPool.drain(); byteBufferPool.drain();
} }
} }
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment