style: run gst-indent

This commit is contained in:
Joshua M. Doe
2012-06-14 02:34:06 -04:00
parent 0c4ab07572
commit 2296c487ae
13 changed files with 915 additions and 807 deletions

View File

@@ -33,8 +33,10 @@ GST_DEBUG_CATEGORY (freeimageenc_debug);
static gboolean
plugin_init (GstPlugin * plugin)
{
GST_DEBUG_CATEGORY_INIT (freeimagedec_debug, "freeimagedec", 0, "FreeImage image decoder");
GST_DEBUG_CATEGORY_INIT (freeimageenc_debug, "freeimageenc", 0, "FreeImage image encoder");
GST_DEBUG_CATEGORY_INIT (freeimagedec_debug, "freeimagedec", 0,
"FreeImage image decoder");
GST_DEBUG_CATEGORY_INIT (freeimageenc_debug, "freeimageenc", 0,
"FreeImage image encoder");
if (!gst_freeimagedec_register_plugins (plugin))
return FALSE;

View File

@@ -59,9 +59,12 @@ static gboolean gst_freeimagedec_sink_setcaps (GstPad * pad, GstCaps * caps);
static void gst_freeimagedec_task (GstPad * pad);
static gboolean gst_freeimagedec_freeimage_init (GstFreeImageDec * freeimagedec);
static gboolean gst_freeimagedec_freeimage_clear (GstFreeImageDec * freeimagedec);
static GstFlowReturn gst_freeimagedec_caps_create_and_set (GstFreeImageDec * freeimagedec);
static gboolean gst_freeimagedec_freeimage_init (GstFreeImageDec *
freeimagedec);
static gboolean gst_freeimagedec_freeimage_clear (GstFreeImageDec *
freeimagedec);
static GstFlowReturn gst_freeimagedec_caps_create_and_set (GstFreeImageDec *
freeimagedec);
static GstFlowReturn gst_freeimagedec_push_dib (GstFreeImageDec * freeimagedec);
static GstElementClass *parent_class = NULL;
@@ -100,7 +103,8 @@ gst_freeimagedec_user_tell (fi_handle handle)
}
unsigned DLL_CALLCONV
gst_freeimagedec_user_read (void *data, unsigned elsize, unsigned elcount, fi_handle handle)
gst_freeimagedec_user_read (void *data, unsigned elsize, unsigned elcount,
fi_handle handle)
{
GstFreeImageDec *freeimagedec;
GstBuffer *buffer;
@@ -113,7 +117,9 @@ gst_freeimagedec_user_read (void *data, unsigned elsize, unsigned elcount, fi_ha
GST_LOG ("reading %" G_GSIZE_FORMAT " bytes of data at offset %d", length,
freeimagedec->offset);
ret = gst_pad_pull_range (freeimagedec->sinkpad, freeimagedec->offset, length, &buffer);
ret =
gst_pad_pull_range (freeimagedec->sinkpad, freeimagedec->offset, length,
&buffer);
if (ret != GST_FLOW_OK)
goto pause;
@@ -160,14 +166,14 @@ gst_freeimagedec_class_init (GstFreeImageDecClass * klass,
GstFreeImageDecClassData * class_data)
{
GstElementClass *gstelement_class;
GstCaps * caps;
GstCaps *caps;
GstPadTemplate *templ;
const gchar * mimetype;
const gchar * format;
const gchar * format_description;
const gchar * extensions;
gchar * description;
gchar * longname;
const gchar *mimetype;
const gchar *format;
const gchar *format_description;
const gchar *extensions;
gchar *description;
gchar *longname;
klass->fif = class_data->fif;
@@ -199,8 +205,7 @@ gst_freeimagedec_class_init (GstFreeImageDecClass * klass,
format_description, extensions);
gst_element_class_set_details_simple (gstelement_class, longname,
"Codec/Decoder/Image",
description,
"Joshua M. Doe <oss@nvl.army.mil>");
description, "Joshua M. Doe <oss@nvl.army.mil>");
g_free (longname);
g_free (description);
@@ -210,23 +215,28 @@ gst_freeimagedec_class_init (GstFreeImageDecClass * klass,
static void
gst_freeimagedec_init (GstFreeImageDec * freeimagedec)
{
GstElementClass * klass = GST_ELEMENT_GET_CLASS (freeimagedec);
GstElementClass *klass = GST_ELEMENT_GET_CLASS (freeimagedec);
freeimagedec->sinkpad = gst_pad_new_from_template (
gst_element_class_get_pad_template (klass, "sink"), "sink");
freeimagedec->sinkpad =
gst_pad_new_from_template (gst_element_class_get_pad_template (klass,
"sink"), "sink");
gst_pad_set_activate_function (freeimagedec->sinkpad, gst_freeimagedec_sink_activate);
gst_pad_set_activate_function (freeimagedec->sinkpad,
gst_freeimagedec_sink_activate);
gst_pad_set_activatepull_function (freeimagedec->sinkpad,
gst_freeimagedec_sink_activate_pull);
gst_pad_set_activatepush_function (freeimagedec->sinkpad,
gst_freeimagedec_sink_activate_push);
gst_pad_set_chain_function (freeimagedec->sinkpad, gst_freeimagedec_chain);
gst_pad_set_event_function (freeimagedec->sinkpad, gst_freeimagedec_sink_event);
gst_pad_set_setcaps_function (freeimagedec->sinkpad, gst_freeimagedec_sink_setcaps);
gst_pad_set_event_function (freeimagedec->sinkpad,
gst_freeimagedec_sink_event);
gst_pad_set_setcaps_function (freeimagedec->sinkpad,
gst_freeimagedec_sink_setcaps);
gst_element_add_pad (GST_ELEMENT (freeimagedec), freeimagedec->sinkpad);
freeimagedec->srcpad = gst_pad_new_from_template (
gst_element_class_get_pad_template (klass, "src"), "src");
freeimagedec->srcpad =
gst_pad_new_from_template (gst_element_class_get_pad_template (klass,
"src"), "src");
gst_pad_use_fixed_caps (freeimagedec->srcpad);
gst_element_add_pad (GST_ELEMENT (freeimagedec), freeimagedec->srcpad);
@@ -261,12 +271,12 @@ gst_freeimagedec_caps_create_and_set (GstFreeImageDec * freeimagedec)
if (caps == NULL) {
/* we have an unsupported type, we'll try converting to RGB/RGBA */
FIBITMAP * dib;
FIBITMAP *dib;
if (FreeImage_IsTransparent (freeimagedec->dib)) {
GST_DEBUG ("Image is non-standard format with transparency, convert to 32-bit RGB");
GST_DEBUG
("Image is non-standard format with transparency, convert to 32-bit RGB");
dib = FreeImage_ConvertTo32Bits (freeimagedec->dib);
}
else {
} else {
GST_DEBUG ("Image is non-standard format, convert to 24-bit RGB");
dib = FreeImage_ConvertTo24Bits (freeimagedec->dib);
}
@@ -330,9 +340,11 @@ gst_freeimagedec_task (GstPad * pad)
format = GST_FORMAT_BYTES;
gst_pad_query_peer_duration (pad, &format, &freeimagedec->length);
imagetype = FreeImage_GetFileTypeFromHandle (&freeimagedec->fiio, freeimagedec, 0);
freeimagedec->dib = FreeImage_LoadFromHandle (imagetype, &freeimagedec->fiio,
freeimagedec, 0);
imagetype =
FreeImage_GetFileTypeFromHandle (&freeimagedec->fiio, freeimagedec, 0);
freeimagedec->dib =
FreeImage_LoadFromHandle (imagetype, &freeimagedec->fiio, freeimagedec,
0);
ret = gst_freeimagedec_push_dib (freeimagedec);
if (ret != GST_FLOW_OK)
@@ -362,12 +374,13 @@ gst_freeimagedec_chain (GstPad * pad, GstBuffer * buffer)
{
GstFreeImageDec *freeimagedec;
GstFlowReturn ret = GST_FLOW_OK;
FIMEMORY * fimem;
FIMEMORY *fimem;
FREE_IMAGE_FORMAT format;
freeimagedec = GST_FREEIMAGEDEC (gst_pad_get_parent (pad));
GST_LOG_OBJECT (freeimagedec, "Got buffer, size=%u", GST_BUFFER_SIZE (buffer));
GST_LOG_OBJECT (freeimagedec, "Got buffer, size=%u",
GST_BUFFER_SIZE (buffer));
if (G_UNLIKELY (!freeimagedec->setup))
goto not_configured;
@@ -375,12 +388,14 @@ gst_freeimagedec_chain (GstPad * pad, GstBuffer * buffer)
/* Return if we have bad flow conditions */
ret = freeimagedec->ret;
if (G_UNLIKELY (ret != GST_FLOW_OK)) {
GST_WARNING_OBJECT (freeimagedec, "we have a pending return code of %d", ret);
GST_WARNING_OBJECT (freeimagedec, "we have a pending return code of %d",
ret);
goto beach;
}
/* Decode image to DIB */
fimem = FreeImage_OpenMemory (GST_BUFFER_DATA (buffer), GST_BUFFER_SIZE (buffer));
fimem =
FreeImage_OpenMemory (GST_BUFFER_DATA (buffer), GST_BUFFER_SIZE (buffer));
format = FreeImage_GetFileTypeFromMemory (fimem, 0);
GST_LOG ("FreeImage format is %d", format);
freeimagedec->dib = FreeImage_LoadFromMemory (format, fimem, 0);
@@ -401,7 +416,8 @@ gst_freeimagedec_chain (GstPad * pad, GstBuffer * buffer)
gst_freeimagedec_freeimage_init (freeimagedec);
} else {
GST_LOG_OBJECT (freeimagedec, "sending EOS");
freeimagedec->ret = gst_pad_push_event (freeimagedec->srcpad, gst_event_new_eos ());
freeimagedec->ret =
gst_pad_push_event (freeimagedec->srcpad, gst_event_new_eos ());
}
/* grab new return code */
@@ -474,10 +490,11 @@ gst_freeimagedec_sink_event (GstPad * pad, GstEvent * event)
gst_event_parse_new_segment_full (event, &update, &rate, &arate, &fmt,
&start, &stop, &position);
gst_segment_set_newsegment_full (&freeimagedec->segment, update, rate, arate,
fmt, start, stop, position);
gst_segment_set_newsegment_full (&freeimagedec->segment, update, rate,
arate, fmt, start, stop, position);
GST_LOG_OBJECT (freeimagedec, "NEWSEGMENT (%s)", gst_format_get_name (fmt));
GST_LOG_OBJECT (freeimagedec, "NEWSEGMENT (%s)",
gst_format_get_name (fmt));
if (fmt == GST_FORMAT_TIME) {
freeimagedec->need_newsegment = FALSE;
@@ -663,19 +680,18 @@ gst_freeimagedec_push_dib (GstFreeImageDec * freeimagedec)
/* flip image and copy to buffer */
for (i = 0; i < height; i++) {
memcpy (GST_BUFFER_DATA (buffer) + i * pitch,
FreeImage_GetBits (freeimagedec->dib) + (height - i - 1) * pitch, pitch);
FreeImage_GetBits (freeimagedec->dib) + (height - i - 1) * pitch,
pitch);
}
if (GST_BUFFER_TIMESTAMP_IS_VALID (freeimagedec->in_timestamp))
GST_BUFFER_TIMESTAMP (buffer) = freeimagedec->in_timestamp;
else
if (freeimagedec->fps_d != 0)
else if (freeimagedec->fps_d != 0)
GST_BUFFER_TIMESTAMP (buffer) =
(freeimagedec->in_offset * freeimagedec->fps_n) / freeimagedec->fps_d;
if (GST_BUFFER_TIMESTAMP_IS_VALID (freeimagedec->in_duration))
GST_BUFFER_DURATION (buffer) = freeimagedec->in_duration;
else
if (freeimagedec->fps_n != 0)
else if (freeimagedec->fps_n != 0)
GST_BUFFER_DURATION (buffer) = freeimagedec->fps_d / freeimagedec->fps_n;
GST_BUFFER_OFFSET (buffer) = freeimagedec->in_offset;
GST_BUFFER_OFFSET_END (buffer) = freeimagedec->in_offset;
@@ -743,11 +759,13 @@ gst_freeimagedec_register_plugins (GstPlugin * plugin)
gint i;
gint nloaded = 0;
GST_LOG ("FreeImage indicates there are %d formats supported", FreeImage_GetFIFCount());
GST_LOG ("FreeImage indicates there are %d formats supported",
FreeImage_GetFIFCount ());
for (i = 0; i < FreeImage_GetFIFCount(); i++) {
if (FreeImage_FIFSupportsReading ((FREE_IMAGE_FORMAT)i)) {
if (gst_freeimagedec_register_plugin (plugin, (FREE_IMAGE_FORMAT)i) == TRUE)
for (i = 0; i < FreeImage_GetFIFCount (); i++) {
if (FreeImage_FIFSupportsReading ((FREE_IMAGE_FORMAT) i)) {
if (gst_freeimagedec_register_plugin (plugin,
(FREE_IMAGE_FORMAT) i) == TRUE)
nloaded += 1;
}
}

View File

@@ -52,8 +52,10 @@ static gboolean gst_freeimageenc_sink_setcaps (GstPad * pad, GstCaps * caps);
static void gst_freeimageenc_task (GstPad * pad);
static gboolean gst_freeimageenc_freeimage_init (GstFreeImageEnc * freeimageenc);
static gboolean gst_freeimageenc_freeimage_clear (GstFreeImageEnc * freeimageenc);
static gboolean gst_freeimageenc_freeimage_init (GstFreeImageEnc *
freeimageenc);
static gboolean gst_freeimageenc_freeimage_clear (GstFreeImageEnc *
freeimageenc);
static GstFlowReturn gst_freeimageenc_push_dib (GstFreeImageEnc * freeimageenc);
static GstElementClass *parent_class = NULL;
@@ -96,14 +98,14 @@ gst_freeimageenc_class_init (GstFreeImageEncClass * klass,
GstFreeImageEncClassData * class_data)
{
GstElementClass *gstelement_class;
GstCaps * caps;
GstCaps *caps;
GstPadTemplate *templ;
const gchar * mimetype;
const gchar * format;
const gchar * format_description;
const gchar * extensions;
gchar * description;
gchar * longname;
const gchar *mimetype;
const gchar *format;
const gchar *format_description;
const gchar *extensions;
gchar *description;
gchar *longname;
klass->fif = class_data->fif;
@@ -135,8 +137,7 @@ gst_freeimageenc_class_init (GstFreeImageEncClass * klass,
format_description, extensions);
gst_element_class_set_details_simple (gstelement_class, longname,
"Codec/Encoder/Image",
description,
"Joshua M. Doe <oss@nvl.army.mil>");
description, "Joshua M. Doe <oss@nvl.army.mil>");
g_free (longname);
g_free (description);
}
@@ -144,16 +145,19 @@ gst_freeimageenc_class_init (GstFreeImageEncClass * klass,
static void
gst_freeimageenc_init (GstFreeImageEnc * freeimageenc)
{
GstElementClass * klass = GST_ELEMENT_GET_CLASS (freeimageenc);
GstElementClass *klass = GST_ELEMENT_GET_CLASS (freeimageenc);
freeimageenc->sinkpad = gst_pad_new_from_template (
gst_element_class_get_pad_template (klass, "sink"), "sink");
freeimageenc->sinkpad =
gst_pad_new_from_template (gst_element_class_get_pad_template (klass,
"sink"), "sink");
gst_pad_set_chain_function (freeimageenc->sinkpad, gst_freeimageenc_chain);
gst_pad_set_setcaps_function (freeimageenc->sinkpad, gst_freeimageenc_sink_setcaps);
gst_pad_set_setcaps_function (freeimageenc->sinkpad,
gst_freeimageenc_sink_setcaps);
gst_element_add_pad (GST_ELEMENT (freeimageenc), freeimageenc->sinkpad);
freeimageenc->srcpad = gst_pad_new_from_template (
gst_element_class_get_pad_template (klass, "src"), "src");
freeimageenc->srcpad =
gst_pad_new_from_template (gst_element_class_get_pad_template (klass,
"src"), "src");
gst_pad_use_fixed_caps (freeimageenc->srcpad);
gst_element_add_pad (GST_ELEMENT (freeimageenc), freeimageenc->srcpad);
@@ -174,23 +178,24 @@ gst_freeimageenc_init (GstFreeImageEnc * freeimageenc)
static GstFlowReturn
gst_freeimageenc_chain (GstPad * pad, GstBuffer * buffer)
{
{
GstFreeImageEnc *freeimageenc;
GstFreeImageEncClass *klass;
GstFlowReturn ret = GST_FLOW_OK;
GstBuffer * buffer_out;
GstBuffer *buffer_out;
FIMEMORY *hmem = NULL;
gint srcPitch, dstPitch;
guint8 * pSrc, * pDst;
guint8 *pSrc, *pDst;
gint width, height, bpp;
size_t y;
BYTE * mem_buffer;
BYTE *mem_buffer;
DWORD size_in_bytes;
freeimageenc = GST_FREEIMAGEENC (gst_pad_get_parent (pad));
klass = GST_FREEIMAGEENC_GET_CLASS (freeimageenc);
GST_LOG_OBJECT (freeimageenc, "Got buffer, size=%u", GST_BUFFER_SIZE (buffer));
GST_LOG_OBJECT (freeimageenc, "Got buffer, size=%u",
GST_BUFFER_SIZE (buffer));
/* convert raw buffer to FIBITMAP */
width = FreeImage_GetWidth (freeimageenc->dib);
@@ -201,10 +206,10 @@ gst_freeimageenc_chain (GstPad * pad, GstBuffer * buffer)
srcPitch = GST_ROUND_UP_4 (width * bpp / 8);
/* Copy data, invert scanlines and respect FreeImage pitch */
pDst = FreeImage_GetBits(freeimageenc->dib);
pDst = FreeImage_GetBits (freeimageenc->dib);
for (y = 0; y < height; ++y) {
pSrc = GST_BUFFER_DATA (buffer) + (height - y - 1) * srcPitch;
memcpy(pDst, pSrc, srcPitch);
memcpy (pDst, pSrc, srcPitch);
pDst += dstPitch;
}
@@ -219,7 +224,7 @@ gst_freeimageenc_chain (GstPad * pad, GstBuffer * buffer)
return GST_FLOW_ERROR;
}
if (!FreeImage_AcquireMemory(hmem, &mem_buffer, &size_in_bytes)) {
if (!FreeImage_AcquireMemory (hmem, &mem_buffer, &size_in_bytes)) {
GST_ERROR ("Failed to acquire encoded image");
FreeImage_CloseMemory (hmem);
gst_buffer_unref (buffer);
@@ -382,11 +387,13 @@ gst_freeimageenc_register_plugins (GstPlugin * plugin)
gint i;
gint nloaded = 0;
GST_LOG ("FreeImage indicates there are %d formats supported", FreeImage_GetFIFCount());
GST_LOG ("FreeImage indicates there are %d formats supported",
FreeImage_GetFIFCount ());
for (i = 0; i < FreeImage_GetFIFCount(); i++) {
if (FreeImage_FIFSupportsWriting ((FREE_IMAGE_FORMAT)i)) {
if (gst_freeimageenc_register_plugin (plugin, (FREE_IMAGE_FORMAT)i) == TRUE)
for (i = 0; i < FreeImage_GetFIFCount (); i++) {
if (FreeImage_FIFSupportsWriting ((FREE_IMAGE_FORMAT) i)) {
if (gst_freeimageenc_register_plugin (plugin,
(FREE_IMAGE_FORMAT) i) == TRUE)
nloaded += 1;
}
}

View File

@@ -8,7 +8,7 @@ gst_freeimageutils_caps_from_dib (FIBITMAP * dib, gint fps_n, gint fps_d)
FREE_IMAGE_TYPE image_type;
guint width, height, bpp;
gint video_format = -1;
GstCaps * caps = NULL;
GstCaps *caps = NULL;
gint endianness;
if (dib == NULL)
@@ -33,40 +33,33 @@ gst_freeimageutils_caps_from_dib (FIBITMAP * dib, gint fps_n, gint fps_d)
FreeImage_GetGreenMask (dib) == GST_VIDEO_BYTE2_MASK_24_INT &&
FreeImage_GetBlueMask (dib) == GST_VIDEO_BYTE3_MASK_24_INT) {
video_format = GST_VIDEO_FORMAT_RGB;
}
else if (FreeImage_GetRedMask (dib) == GST_VIDEO_BYTE3_MASK_24_INT &&
} else if (FreeImage_GetRedMask (dib) == GST_VIDEO_BYTE3_MASK_24_INT &&
FreeImage_GetGreenMask (dib) == GST_VIDEO_BYTE2_MASK_24_INT &&
FreeImage_GetBlueMask (dib) == GST_VIDEO_BYTE1_MASK_24_INT) {
video_format = GST_VIDEO_FORMAT_BGR;
}
else {
} else {
return NULL;
}
}
else if (bpp == 32) {
} else if (bpp == 32) {
if (FreeImage_GetRedMask (dib) == GST_VIDEO_BYTE1_MASK_32_INT &&
FreeImage_GetGreenMask (dib) == GST_VIDEO_BYTE2_MASK_32_INT &&
FreeImage_GetBlueMask (dib) == GST_VIDEO_BYTE3_MASK_32_INT) {
video_format = GST_VIDEO_FORMAT_RGBA;
}
else if (FreeImage_GetRedMask (dib) == GST_VIDEO_BYTE3_MASK_32_INT &&
} else if (FreeImage_GetRedMask (dib) == GST_VIDEO_BYTE3_MASK_32_INT &&
FreeImage_GetGreenMask (dib) == GST_VIDEO_BYTE2_MASK_32_INT &&
FreeImage_GetBlueMask (dib) == GST_VIDEO_BYTE1_MASK_32_INT) {
video_format = GST_VIDEO_FORMAT_BGRA;
}
else {
} else {
return NULL;
}
}
else {
} else {
return NULL;
}
/* We could not find a supported format */
if (video_format == -1) {
caps = NULL;
}
else {
} else {
caps = gst_video_format_new_caps (video_format, width, height,
fps_n, fps_d, 1, 1);
}
@@ -80,8 +73,7 @@ gst_freeimageutils_caps_from_dib (FIBITMAP * dib, gint fps_n, gint fps_d)
"bpp", G_TYPE_INT, 16,
"depth", G_TYPE_INT, 16,
"endianness", G_TYPE_INT, endianness,
"framerate", GST_TYPE_FRACTION, fps_n, fps_d,
NULL);
"framerate", GST_TYPE_FRACTION, fps_n, fps_d, NULL);
break;
case FIT_INT16:
endianness = G_BYTE_ORDER;
@@ -93,8 +85,7 @@ gst_freeimageutils_caps_from_dib (FIBITMAP * dib, gint fps_n, gint fps_d)
"depth", G_TYPE_INT, 16,
"endianness", G_TYPE_INT, endianness,
"framerate", GST_TYPE_FRACTION, fps_n, fps_d,
"signed", G_TYPE_BOOLEAN, TRUE,
NULL);
"signed", G_TYPE_BOOLEAN, TRUE, NULL);
break;
default:
caps = NULL;
@@ -105,7 +96,7 @@ gst_freeimageutils_caps_from_dib (FIBITMAP * dib, gint fps_n, gint fps_d)
GstCaps *
gst_freeimageutils_caps_from_freeimage_format (FREE_IMAGE_FORMAT fif)
{
GstCaps * caps = gst_caps_new_empty ();
GstCaps *caps = gst_caps_new_empty ();
if (FreeImage_FIFSupportsExportType (fif, FIT_BITMAP)) {
if (FreeImage_FIFSupportsExportBPP (fif, 1) ||
@@ -114,8 +105,7 @@ gst_freeimageutils_caps_from_freeimage_format (FREE_IMAGE_FORMAT fif)
FreeImage_FIFSupportsExportBPP (fif, 24)) {
if (G_BYTE_ORDER == G_LITTLE_ENDIAN) {
gst_caps_append (caps, gst_caps_from_string (GST_VIDEO_CAPS_BGR));
}
else {
} else {
gst_caps_append (caps, gst_caps_from_string (GST_VIDEO_CAPS_RGB));
}
@@ -127,8 +117,7 @@ gst_freeimageutils_caps_from_freeimage_format (FREE_IMAGE_FORMAT fif)
if (FreeImage_FIFSupportsExportBPP (fif, 32)) {
if (G_BYTE_ORDER == G_LITTLE_ENDIAN) {
gst_caps_append (caps, gst_caps_from_string (GST_VIDEO_CAPS_BGRA));
}
else {
} else {
gst_caps_append (caps, gst_caps_from_string (GST_VIDEO_CAPS_RGBA));
}
@@ -136,11 +125,11 @@ gst_freeimageutils_caps_from_freeimage_format (FREE_IMAGE_FORMAT fif)
}
if (FreeImage_FIFSupportsExportType (fif, FIT_UINT16)) {
if (G_BYTE_ORDER == G_BIG_ENDIAN)
gst_caps_append (caps, gst_caps_from_string (
GST_VIDEO_CAPS_GRAY16 ("BIG_ENDIAN")));
gst_caps_append (caps,
gst_caps_from_string (GST_VIDEO_CAPS_GRAY16 ("BIG_ENDIAN")));
else
gst_caps_append (caps, gst_caps_from_string (
GST_VIDEO_CAPS_GRAY16 ("LITTLE_ENDIAN")));
gst_caps_append (caps,
gst_caps_from_string (GST_VIDEO_CAPS_GRAY16 ("LITTLE_ENDIAN")));
}
if (FreeImage_FIFSupportsExportType (fif, FIT_INT16)) {
}
@@ -168,8 +157,7 @@ gst_freeimageutils_caps_from_freeimage_format (FREE_IMAGE_FORMAT fif)
if (G_BYTE_ORDER == G_LITTLE_ENDIAN) {
gst_caps_append (caps, gst_caps_from_string (GST_VIDEO_CAPS_BGR));
gst_caps_append (caps, gst_caps_from_string (GST_VIDEO_CAPS_BGRA));
}
else {
} else {
gst_caps_append (caps, gst_caps_from_string (GST_VIDEO_CAPS_RGB));
gst_caps_append (caps, gst_caps_from_string (GST_VIDEO_CAPS_RGBA));
}
@@ -183,7 +171,7 @@ gst_freeimageutils_parse_caps (const GstCaps * caps, FREE_IMAGE_TYPE * type,
gint * width, gint * height, gint * bpp, guint32 * red_mask,
guint32 * green_mask, guint32 * blue_mask)
{
GstStructure * s;
GstStructure *s;
s = gst_caps_get_structure (caps, 0);
@@ -196,8 +184,7 @@ gst_freeimageutils_parse_caps (const GstCaps * caps, FREE_IMAGE_TYPE * type,
gst_structure_get_int (s, "red_mask", red_mask);
gst_structure_get_int (s, "green_mask", green_mask);
gst_structure_get_int (s, "blue_mask", blue_mask);
}
else if (g_strcmp0 (gst_structure_get_name (s), "video/x-raw-gray") == 0) {
} else if (g_strcmp0 (gst_structure_get_name (s), "video/x-raw-gray") == 0) {
gboolean is_signed;
if (!gst_structure_get_boolean (s, "signed", &is_signed))
is_signed = FALSE;

View File

@@ -16,7 +16,7 @@ plugin_init (GstPlugin * plugin)
GST_DEBUG ("plugin_init");
if ( !gst_element_register (plugin, "sfx3dnoise", GST_RANK_NONE,
if (!gst_element_register (plugin, "sfx3dnoise", GST_RANK_NONE,
GST_TYPE_SFX3DNOISE)) {
return FALSE;
}
@@ -28,9 +28,4 @@ GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
GST_VERSION_MINOR,
"sensorfx",
"Filters to simulate the effects of real sensors",
plugin_init,
VERSION,
GST_LICENSE,
PACKAGE_NAME,
GST_PACKAGE_ORIGIN
);
plugin_init, VERSION, GST_LICENSE, PACKAGE_NAME, GST_PACKAGE_ORIGIN);

View File

@@ -90,23 +90,28 @@ static void gst_sfx3dnoise_set_property (GObject * object, guint prop_id,
static void gst_sfx3dnoise_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec);
static GstFlowReturn gst_sfx3dnoise_cv_transform (GstOpencvBaseTransform * filter,
GstBuffer * buf, IplImage * img, GstBuffer * outbuf, IplImage * outimg);
static GstFlowReturn gst_sfx3dnoise_cv_transform (GstOpencvBaseTransform *
filter, GstBuffer * buf, IplImage * img, GstBuffer * outbuf,
IplImage * outimg);
static gboolean gst_sfx3dnoise_cv_set_caps (GstOpencvBaseTransform * trans,
gint in_width, gint in_height, gint in_depth, gint in_channels,
gint out_width, gint out_height, gint out_depth, gint out_channels);
void gst_sfx3dnoise_create_fixed_noise (GstSfx3DNoise * filter, CvMat * arr);
void gst_sfx3dnoise_add_sigma_t (GstSfx3DNoise * filter, CvMat * arr, double sigma);
void gst_sfx3dnoise_add_sigma_tv (GstSfx3DNoise * filter, CvMat * arr, double sigma);
void gst_sfx3dnoise_add_sigma_th (GstSfx3DNoise * filter, CvMat * arr, double sigma);
void gst_sfx3dnoise_add_sigma_tvh (GstSfx3DNoise * filter, CvMat * arr, double sigma);
void gst_sfx3dnoise_add_sigma_t (GstSfx3DNoise * filter, CvMat * arr,
double sigma);
void gst_sfx3dnoise_add_sigma_tv (GstSfx3DNoise * filter, CvMat * arr,
double sigma);
void gst_sfx3dnoise_add_sigma_th (GstSfx3DNoise * filter, CvMat * arr,
double sigma);
void gst_sfx3dnoise_add_sigma_tvh (GstSfx3DNoise * filter, CvMat * arr,
double sigma);
/* Clean up */
static void
gst_sfx3dnoise_finalize (GObject * obj)
{
GstSfx3DNoise * filter = GST_SFX3DNOISE (obj);
GstSfx3DNoise *filter = GST_SFX3DNOISE (obj);
if (filter->fixed_noise)
cvReleaseMat (&filter->fixed_noise);
@@ -129,7 +134,7 @@ gst_sfx3dnoise_base_init (gpointer gclass)
GstPadTemplate *templ;
/* add sink and source pad templates */
caps = gst_caps_from_string (GST_VIDEO_CAPS_GRAY16("1234"));
caps = gst_caps_from_string (GST_VIDEO_CAPS_GRAY16 ("1234"));
templ = gst_pad_template_new ("sink", GST_PAD_SINK, GST_PAD_ALWAYS,
gst_caps_ref (caps));
gst_element_class_add_pad_template (element_class, templ);
@@ -139,8 +144,7 @@ gst_sfx3dnoise_base_init (gpointer gclass)
gst_element_class_set_details_simple (element_class,
"sfx3dnoise",
"Transform/Effect/Video",
"Add 3D noise to video",
"Joshua M. Doe <oss@nvl.army.mil>");
"Add 3D noise to video", "Joshua M. Doe <oss@nvl.army.mil>");
}
static void
@@ -221,7 +225,7 @@ gst_sfx3dnoise_init (GstSfx3DNoise * filter, GstSfx3DNoiseClass * gclass)
filter->sigma_vh = filter->sigma_vh_old = DEFAULT_SIGMA_VH;
filter->sigma_tvh = DEFAULT_SIGMA_TVH;
filter->rng = cvRNG(-1);
filter->rng = cvRNG (-1);
filter->fixed_noise = NULL;
filter->intermediary = NULL;
@@ -239,25 +243,25 @@ gst_sfx3dnoise_set_property (GObject * object, guint prop_id,
switch (prop_id) {
case PROP_SIGMA_T:
filter->sigma_t= g_value_get_double (value);
filter->sigma_t = g_value_get_double (value);
break;
case PROP_SIGMA_V:
filter->sigma_v= g_value_get_double (value);
filter->sigma_v = g_value_get_double (value);
break;
case PROP_SIGMA_H:
filter->sigma_h= g_value_get_double (value);
filter->sigma_h = g_value_get_double (value);
break;
case PROP_SIGMA_TV:
filter->sigma_tv= g_value_get_double (value);
filter->sigma_tv = g_value_get_double (value);
break;
case PROP_SIGMA_TH:
filter->sigma_th= g_value_get_double (value);
filter->sigma_th = g_value_get_double (value);
break;
case PROP_SIGMA_VH:
filter->sigma_vh= g_value_get_double (value);
filter->sigma_vh = g_value_get_double (value);
break;
case PROP_SIGMA_TVH:
filter->sigma_tvh= g_value_get_double (value);
filter->sigma_tvh = g_value_get_double (value);
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
@@ -320,20 +324,24 @@ gst_sfx3dnoise_cv_transform (GstOpencvBaseTransform * base, GstBuffer * buf,
filter->sigma_vh_old = filter->sigma_vh;
}
if (filter->sigma_h != 0.0 || filter->sigma_v != 0.0 || filter->sigma_vh != 0.0) {
if (filter->sigma_h != 0.0 || filter->sigma_v != 0.0
|| filter->sigma_vh != 0.0) {
cvAdd (filter->intermediary, filter->fixed_noise, filter->intermediary, 0);
}
if (filter->sigma_tvh > 0.0) {
gst_sfx3dnoise_add_sigma_tvh (filter, filter->intermediary, filter->sigma_tvh);
gst_sfx3dnoise_add_sigma_tvh (filter, filter->intermediary,
filter->sigma_tvh);
}
if (filter->sigma_tv > 0.0) {
gst_sfx3dnoise_add_sigma_tv (filter, filter->intermediary, filter->sigma_tv);
gst_sfx3dnoise_add_sigma_tv (filter, filter->intermediary,
filter->sigma_tv);
}
if (filter->sigma_th > 0.0) {
gst_sfx3dnoise_add_sigma_th (filter, filter->intermediary, filter->sigma_th);
gst_sfx3dnoise_add_sigma_th (filter, filter->intermediary,
filter->sigma_th);
}
if (filter->sigma_t > 0.0) {
@@ -400,12 +408,13 @@ gst_sfx3dnoise_create_fixed_noise (GstSfx3DNoise * filter, CvMat * arr)
/* Add sigma-vh or sigma-tvh noise, which adds random noise to every pixel */
void
gst_sfx3dnoise_add_sigma_tvh (GstSfx3DNoise * filter, CvMat * arr, double sigma) {
CvMat * noise = cvCreateMat (filter->height, filter->width, CV_32F);
gst_sfx3dnoise_add_sigma_tvh (GstSfx3DNoise * filter, CvMat * arr, double sigma)
{
CvMat *noise = cvCreateMat (filter->height, filter->width, CV_32F);
/* TODO move scaling of sigma somewhere else? */
cvRandArr (&filter->rng, noise, CV_RAND_NORMAL, cvScalarAll (0),
cvScalarAll (sigma*(G_MAXUINT16-1)));
cvScalarAll (sigma * (G_MAXUINT16 - 1)));
cvAdd (arr, noise, arr, 0);
@@ -415,24 +424,25 @@ gst_sfx3dnoise_add_sigma_tvh (GstSfx3DNoise * filter, CvMat * arr, double sigma)
/* Add sigma-v or sigma-tv noise, which adds random horizontal lines */
void
gst_sfx3dnoise_add_sigma_tv (GstSfx3DNoise * filter, CvMat * arr, double sigma) {
CvMat * noise = cvCreateMat (1, filter->height, CV_32F);
gfloat * data;
gfloat * noisedata;
gst_sfx3dnoise_add_sigma_tv (GstSfx3DNoise * filter, CvMat * arr, double sigma)
{
CvMat *noise = cvCreateMat (1, filter->height, CV_32F);
gfloat *data;
gfloat *noisedata;
int step, x, y;
/* TODO move scaling of sigma somewhere else? */
cvRandArr (&filter->rng, noise, CV_RAND_NORMAL, cvScalarAll (0),
cvScalarAll (sigma*(G_MAXUINT16-1)));
cvScalarAll (sigma * (G_MAXUINT16 - 1)));
cvGetRawData (arr, (uchar**)&data, &step, NULL);
cvGetRawData (noise, (uchar**)&noisedata, NULL, NULL);
cvGetRawData (arr, (uchar **) & data, &step, NULL);
cvGetRawData (noise, (uchar **) & noisedata, NULL, NULL);
step /= sizeof(gfloat);
step /= sizeof (gfloat);
for (y = 0; y < filter->height; y++) {
for (x = 0; x < filter->width; x++) {
data[y*step + x] += noisedata[y];
data[y * step + x] += noisedata[y];
}
}
@@ -442,24 +452,25 @@ gst_sfx3dnoise_add_sigma_tv (GstSfx3DNoise * filter, CvMat * arr, double sigma)
/* Add sigma-h and sigma-th noise, which adds random vertical lines */
void
gst_sfx3dnoise_add_sigma_th (GstSfx3DNoise * filter, CvMat * arr, double sigma) {
CvMat * noise = cvCreateMat (1, filter->width, CV_32F);
gfloat * data;
gfloat * noisedata;
gst_sfx3dnoise_add_sigma_th (GstSfx3DNoise * filter, CvMat * arr, double sigma)
{
CvMat *noise = cvCreateMat (1, filter->width, CV_32F);
gfloat *data;
gfloat *noisedata;
int step, x, y;
/* TODO move scaling of sigma somewhere else? */
cvRandArr (&filter->rng, noise, CV_RAND_NORMAL, cvScalarAll (0),
cvScalarAll (sigma*(G_MAXUINT16-1)));
cvScalarAll (sigma * (G_MAXUINT16 - 1)));
cvGetRawData (arr, (uchar**)&data, &step, NULL);
cvGetRawData (noise, (uchar**)&noisedata, NULL, NULL);
cvGetRawData (arr, (uchar **) & data, &step, NULL);
cvGetRawData (noise, (uchar **) & noisedata, NULL, NULL);
step /= sizeof(gfloat);
step /= sizeof (gfloat);
for (y = 0; y < filter->height; y++) {
for (x = 0; x < filter->width; x++) {
data[y*step + x] += noisedata[x];
data[y * step + x] += noisedata[x];
}
}
@@ -470,14 +481,14 @@ gst_sfx3dnoise_add_sigma_th (GstSfx3DNoise * filter, CvMat * arr, double sigma)
void
gst_sfx3dnoise_add_sigma_t (GstSfx3DNoise * filter, CvMat * arr, double sigma)
{
CvMat * noise = cvCreateMat (1, 1, CV_32F);
float * data;
CvMat *noise = cvCreateMat (1, 1, CV_32F);
float *data;
/* TODO move scaling of sigma somewhere else? */
cvRandArr (&filter->rng, noise, CV_RAND_NORMAL, cvScalarAll (0),
cvScalarAll (sigma*(G_MAXUINT16-1)));
cvScalarAll (sigma * (G_MAXUINT16 - 1)));
cvGetRawData (noise, (uchar**)&data, NULL, NULL);
cvGetRawData (noise, (uchar **) & data, NULL, NULL);
cvAddS (arr, cvScalarAll (*data), arr, 0);

View File

@@ -71,55 +71,51 @@ GST_ELEMENT_DETAILS ("Blurs video",
/* the capabilities of the inputs and outputs */
static GstStaticPadTemplate gst_sfxblur_src_template =
GST_STATIC_PAD_TEMPLATE ("sink",
GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
GST_STATIC_CAPS (
"video/x-raw-gray, " \
"bpp = (int) 16, " \
"depth = (int) 16, " \
"endianness = (int) BYTE_ORDER, " \
"width = " GST_VIDEO_SIZE_RANGE ", " \
"height = " GST_VIDEO_SIZE_RANGE ", " \
"framerate = " GST_VIDEO_FPS_RANGE ";" \
"video/x-raw-gray-float, " \
"bpp = (int) 32, " \
"depth = (int) 32, " \
"endianness = (int) BYTE_ORDER, " \
"width = " GST_VIDEO_SIZE_RANGE ", " \
"height = " GST_VIDEO_SIZE_RANGE ", " \
"framerate = " GST_VIDEO_FPS_RANGE
)
);
GST_STATIC_CAPS ("video/x-raw-gray, "
"bpp = (int) 16, "
"depth = (int) 16, "
"endianness = (int) BYTE_ORDER, "
"width = " GST_VIDEO_SIZE_RANGE ", "
"height = " GST_VIDEO_SIZE_RANGE ", "
"framerate = " GST_VIDEO_FPS_RANGE ";"
"video/x-raw-gray-float, "
"bpp = (int) 32, "
"depth = (int) 32, "
"endianness = (int) BYTE_ORDER, "
"width = " GST_VIDEO_SIZE_RANGE ", "
"height = " GST_VIDEO_SIZE_RANGE ", "
"framerate = " GST_VIDEO_FPS_RANGE)
);
static GstStaticPadTemplate gst_sfxblur_sink_template =
GST_STATIC_PAD_TEMPLATE ("src",
GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
GST_STATIC_CAPS (
"video/x-raw-gray, " \
"bpp = (int) 16, " \
"depth = (int) 16, " \
"endianness = (int) BYTE_ORDER, " \
"width = " GST_VIDEO_SIZE_RANGE ", " \
"height = " GST_VIDEO_SIZE_RANGE ", " \
"framerate = " GST_VIDEO_FPS_RANGE ";" \
"video/x-raw-gray-float, " \
"bpp = (int) 32, " \
"depth = (int) 32, " \
"endianness = (int) BYTE_ORDER, " \
"width = " GST_VIDEO_SIZE_RANGE ", " \
"height = " GST_VIDEO_SIZE_RANGE ", " \
"framerate = " GST_VIDEO_FPS_RANGE
)
);
GST_STATIC_CAPS ("video/x-raw-gray, "
"bpp = (int) 16, "
"depth = (int) 16, "
"endianness = (int) BYTE_ORDER, "
"width = " GST_VIDEO_SIZE_RANGE ", "
"height = " GST_VIDEO_SIZE_RANGE ", "
"framerate = " GST_VIDEO_FPS_RANGE ";"
"video/x-raw-gray-float, "
"bpp = (int) 32, "
"depth = (int) 32, "
"endianness = (int) BYTE_ORDER, "
"width = " GST_VIDEO_SIZE_RANGE ", "
"height = " GST_VIDEO_SIZE_RANGE ", "
"framerate = " GST_VIDEO_FPS_RANGE)
);
/* GObject vmethod declarations */
static void gst_sfxblur_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec);
static void gst_sfxblur_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec);
static void gst_sfxblur_finalize (GObject *object);
static void gst_sfxblur_finalize (GObject * object);
/* GstBaseTransform vmethod declarations */
static gboolean gst_sfxblur_set_caps (GstBaseTransform * base,
@@ -128,7 +124,7 @@ static GstFlowReturn gst_sfxblur_transform_ip (GstBaseTransform * base,
GstBuffer * buf);
/* GstSensorFxBlur method declarations */
static void gst_sfxblur_reset(GstSensorFxBlur* filter);
static void gst_sfxblur_reset (GstSensorFxBlur * filter);
/* setup debug */
GST_DEBUG_CATEGORY_STATIC (sfxblur_debug);
@@ -138,7 +134,7 @@ GST_DEBUG_CATEGORY_STATIC (sfxblur_debug);
"sfxblur");
GST_BOILERPLATE_FULL (GstSensorFxBlur, gst_sfxblur, GstVideoFilter,
GST_TYPE_VIDEO_FILTER, DEBUG_INIT);\
GST_TYPE_VIDEO_FILTER, DEBUG_INIT);
/************************************************************************/
@@ -171,7 +167,7 @@ gst_sfxblur_base_init (gpointer klass)
*
*/
static void
gst_sfxblur_finalize (GObject *object)
gst_sfxblur_finalize (GObject * object)
{
GstSensorFxBlur *sfxblur = GST_SENSORFXBLUR (object);
@@ -204,8 +200,9 @@ gst_sfxblur_class_init (GstSensorFxBlurClass * object)
/* Install GObject properties */
g_object_class_install_property (obj_class, PROP_LOWIN,
g_param_spec_double ("lower-input-level", "Lower Input Level", "Lower Input Level",
0.0, 1.0, DEFAULT_PROP_LOWIN, G_PARAM_READWRITE));
g_param_spec_double ("lower-input-level", "Lower Input Level",
"Lower Input Level", 0.0, 1.0, DEFAULT_PROP_LOWIN,
G_PARAM_READWRITE));
/* Register GstBaseTransform vmethods */
trans_class->set_caps = GST_DEBUG_FUNCPTR (gst_sfxblur_set_caps);
@@ -220,8 +217,7 @@ gst_sfxblur_class_init (GstSensorFxBlurClass * object)
* Initialize the new element
*/
static void
gst_sfxblur_init (GstSensorFxBlur * sfxblur,
GstSensorFxBlurClass * g_class)
gst_sfxblur_init (GstSensorFxBlur * sfxblur, GstSensorFxBlurClass * g_class)
{
GST_DEBUG_OBJECT (sfxblur, "init class instance");
@@ -317,17 +313,17 @@ gst_sfxblur_set_caps (GstBaseTransform * base, GstCaps * incaps,
"height", G_TYPE_INT, &levels->height,
"bpp", G_TYPE_INT, &levels->bpp,
"depth", G_TYPE_INT, &levels->depth,
"endianness", G_TYPE_INT, &levels->endianness,
NULL);
"endianness", G_TYPE_INT, &levels->endianness, NULL);
if (!res)
return FALSE;
levels->stride = GST_ROUND_UP_4 (levels->width * levels->depth/8);
levels->stride = GST_ROUND_UP_4 (levels->width * levels->depth / 8);
return res;
}
GstFlowReturn gst_sfxblur_transform_ip( GstBaseTransform * base, GstBuffer * buf )
GstFlowReturn
gst_sfxblur_transform_ip (GstBaseTransform * base, GstBuffer * buf)
{
return GST_FLOW_OK;
@@ -344,7 +340,7 @@ GstFlowReturn gst_sfxblur_transform_ip( GstBaseTransform * base, GstBuffer * buf
* Reset instance variables and free memory
*/
static void
gst_sfxblur_reset(GstSensorFxBlur* sfxblur)
gst_sfxblur_reset (GstSensorFxBlur * sfxblur)
{
sfxblur->width = 0;
sfxblur->height = 0;

View File

@@ -18,7 +18,7 @@ plugin_init (GstPlugin * plugin)
GST_CAT_INFO (GST_CAT_DEFAULT, "registering videolevels element");
if ( !gst_element_register (plugin, "videolevels", GST_RANK_NONE,
if (!gst_element_register (plugin, "videolevels", GST_RANK_NONE,
GST_TYPE_VIDEOLEVELS)) {
return FALSE;
}
@@ -30,9 +30,4 @@ GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
GST_VERSION_MINOR,
"videoadjust",
"Filters that apply transform from 16-bit to 8-bit video",
plugin_init,
VERSION,
GST_LICENSE,
PACKAGE_NAME,
GST_PACKAGE_ORIGIN
);
plugin_init, VERSION, GST_LICENSE, PACKAGE_NAME, GST_PACKAGE_ORIGIN);

View File

@@ -60,7 +60,7 @@ enum
PROP_LOWOUT,
PROP_HIGHOUT,
PROP_AUTO,
PROP_INTERVAL/*,
PROP_INTERVAL /*,
PROP_NPIXSAT_LOW,
PROP_NPIXSAT_HIGH,
PROP_ROI_X,
@@ -91,30 +91,26 @@ static GstStaticPadTemplate gst_videolevels_src_template =
GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
GST_STATIC_CAPS (
"video/x-raw-gray, " \
"bpp = (int) [1, 16], " \
"depth = (int) 16, " \
"endianness = (int) {LITTLE_ENDIAN, BIG_ENDIAN}, " \
"width = " GST_VIDEO_SIZE_RANGE ", " \
"height = " GST_VIDEO_SIZE_RANGE ", " \
"framerate = " GST_VIDEO_FPS_RANGE
)
);
GST_STATIC_CAPS ("video/x-raw-gray, "
"bpp = (int) [1, 16], "
"depth = (int) 16, "
"endianness = (int) {LITTLE_ENDIAN, BIG_ENDIAN}, "
"width = " GST_VIDEO_SIZE_RANGE ", "
"height = " GST_VIDEO_SIZE_RANGE ", "
"framerate = " GST_VIDEO_FPS_RANGE)
);
static GstStaticPadTemplate gst_videolevels_sink_template =
GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
GST_STATIC_CAPS (
"video/x-raw-gray, " \
"bpp = (int) 8, " \
"depth = (int) 8, " \
"width = " GST_VIDEO_SIZE_RANGE ", " \
"height = " GST_VIDEO_SIZE_RANGE ", " \
"framerate = " GST_VIDEO_FPS_RANGE
)
);
GST_STATIC_CAPS ("video/x-raw-gray, "
"bpp = (int) 8, "
"depth = (int) 8, "
"width = " GST_VIDEO_SIZE_RANGE ", "
"height = " GST_VIDEO_SIZE_RANGE ", "
"framerate = " GST_VIDEO_FPS_RANGE)
);
#define GST_TYPE_VIDEOLEVELS_AUTO (gst_videolevels_auto_get_type())
static GType
@@ -140,10 +136,10 @@ static void gst_videolevels_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec);
static void gst_videolevels_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec);
static void gst_videolevels_finalize (GObject *object);
static void gst_videolevels_finalize (GObject * object);
/* GstBaseTransform vmethod declarations */
static GstCaps * gst_videolevels_transform_caps (GstBaseTransform * trans,
static GstCaps *gst_videolevels_transform_caps (GstBaseTransform * trans,
GstPadDirection direction, GstCaps * caps);
static gboolean gst_videolevels_set_caps (GstBaseTransform * base,
GstCaps * incaps, GstCaps * outcaps);
@@ -155,12 +151,12 @@ static gboolean gst_videolevels_get_unit_size (GstBaseTransform * base,
GstCaps * caps, guint * size);
/* GstVideoLevels method declarations */
static void gst_videolevels_reset(GstVideoLevels* filter);
static void gst_videolevels_reset (GstVideoLevels * filter);
static void gst_videolevels_calculate_tables (GstVideoLevels * videolevels);
static gboolean gst_videolevels_do_levels (GstVideoLevels * videolevels,
gpointer indata, gpointer outdata);
static gboolean gst_videolevels_calculate_histogram (GstVideoLevels * videolevels,
guint16 * data);
static gboolean gst_videolevels_calculate_histogram (GstVideoLevels *
videolevels, guint16 * data);
static gboolean gst_videolevels_auto_adjust (GstVideoLevels * videolevels,
guint16 * data);
@@ -204,7 +200,7 @@ gst_videolevels_base_init (gpointer klass)
*
*/
static void
gst_videolevels_finalize (GObject *object)
gst_videolevels_finalize (GObject * object)
{
GstVideoLevels *videolevels = GST_VIDEOLEVELS (object);
@@ -237,31 +233,37 @@ gst_videolevels_class_init (GstVideoLevelsClass * object)
/* Install GObject properties */
g_object_class_install_property (obj_class, PROP_LOWIN,
g_param_spec_double ("lower-input-level", "Lower Input Level", "Lower Input Level",
0.0, 1.0, DEFAULT_PROP_LOWIN, G_PARAM_READWRITE));
g_param_spec_double ("lower-input-level", "Lower Input Level",
"Lower Input Level", 0.0, 1.0, DEFAULT_PROP_LOWIN,
G_PARAM_READWRITE));
g_object_class_install_property (obj_class, PROP_HIGHIN,
g_param_spec_double ("upper-input-level", "Upper Input Level", "Upper Input Level",
0.0, 1.0, DEFAULT_PROP_HIGHIN, G_PARAM_READWRITE));
g_param_spec_double ("upper-input-level", "Upper Input Level",
"Upper Input Level", 0.0, 1.0, DEFAULT_PROP_HIGHIN,
G_PARAM_READWRITE));
g_object_class_install_property (obj_class, PROP_LOWOUT,
g_param_spec_double ("lower-output-level", "Lower Output Level", "Lower Output Level",
0.0, 1.0, DEFAULT_PROP_LOWOUT, G_PARAM_READWRITE));
g_param_spec_double ("lower-output-level", "Lower Output Level",
"Lower Output Level", 0.0, 1.0, DEFAULT_PROP_LOWOUT,
G_PARAM_READWRITE));
g_object_class_install_property (obj_class, PROP_HIGHOUT,
g_param_spec_double ("upper-output-level", "Upper Output Level", "Upper Output Level",
0.0, 1.0, DEFAULT_PROP_HIGHOUT, G_PARAM_READWRITE));
g_param_spec_double ("upper-output-level", "Upper Output Level",
"Upper Output Level", 0.0, 1.0, DEFAULT_PROP_HIGHOUT,
G_PARAM_READWRITE));
g_object_class_install_property (obj_class, PROP_AUTO,
g_param_spec_enum ("auto", "Auto Adjust",
"Auto adjust contrast", GST_TYPE_VIDEOLEVELS_AUTO,
DEFAULT_PROP_AUTO, G_PARAM_READWRITE));
g_param_spec_enum ("auto", "Auto Adjust", "Auto adjust contrast",
GST_TYPE_VIDEOLEVELS_AUTO, DEFAULT_PROP_AUTO, G_PARAM_READWRITE));
g_object_class_install_property (obj_class, PROP_INTERVAL,
g_param_spec_uint64 ("interval", "Interval", "Interval of time between adjustments (in nanoseconds)",
1, G_MAXUINT64, DEFAULT_PROP_INTERVAL, G_PARAM_READWRITE));
g_param_spec_uint64 ("interval", "Interval",
"Interval of time between adjustments (in nanoseconds)", 1,
G_MAXUINT64, DEFAULT_PROP_INTERVAL, G_PARAM_READWRITE));
/* Register GstBaseTransform vmethods */
trans_class->transform_caps = GST_DEBUG_FUNCPTR (gst_videolevels_transform_caps);
trans_class->transform_caps =
GST_DEBUG_FUNCPTR (gst_videolevels_transform_caps);
trans_class->set_caps = GST_DEBUG_FUNCPTR (gst_videolevels_set_caps);
trans_class->transform = GST_DEBUG_FUNCPTR (gst_videolevels_transform);
trans_class->transform_ip = GST_DEBUG_FUNCPTR (gst_videolevels_transform_ip);
trans_class->get_unit_size = GST_DEBUG_FUNCPTR (gst_videolevels_get_unit_size);
trans_class->get_unit_size =
GST_DEBUG_FUNCPTR (gst_videolevels_get_unit_size);
/* simply pass the data through if in/out caps are the same */
trans_class->passthrough_on_same_caps = TRUE;
@@ -317,7 +319,7 @@ gst_videolevels_set_property (GObject * object, guint prop_id,
videolevels->upper_output = g_value_get_double (value);
//gst_videolevels_calculate_tables (videolevels);
break;
case PROP_AUTO: {
case PROP_AUTO:{
videolevels->auto_adjust = g_value_get_enum (value);
break;
}
@@ -404,22 +406,17 @@ gst_videolevels_transform_caps (GstBaseTransform * base,
if (direction == GST_PAD_SINK) {
GST_DEBUG ("Pad direction is sink");
gst_caps_set_simple (newcaps,
"bpp", G_TYPE_INT, 8,
"depth", G_TYPE_INT, 8,
NULL);
"bpp", G_TYPE_INT, 8, "depth", G_TYPE_INT, 8, NULL);
structure = gst_caps_get_structure (newcaps, 0);
gst_structure_remove_field (structure, "endianness");
}
else {
GValue endianness = {0};
GValue ival = {0};
} else {
GValue endianness = { 0 };
GValue ival = { 0 };
GST_DEBUG ("Pad direction is src");
gst_caps_set_simple (newcaps,
"bpp", GST_TYPE_INT_RANGE, 1, 16,
"depth", G_TYPE_INT, 16,
NULL);
"bpp", GST_TYPE_INT_RANGE, 1, 16, "depth", G_TYPE_INT, 16, NULL);
structure = gst_caps_get_structure (newcaps, 0);
/* add BIG/LITTLE endianness to caps */
@@ -469,8 +466,7 @@ gst_videolevels_set_caps (GstBaseTransform * base, GstCaps * incaps,
"height", G_TYPE_INT, &levels->height,
"bpp", G_TYPE_INT, &levels->bpp_in,
"depth", G_TYPE_INT, &levels->depth_in,
"endianness", G_TYPE_INT, &levels->endianness_in,
NULL);
"endianness", G_TYPE_INT, &levels->endianness_in, NULL);
if (!res)
return FALSE;
@@ -478,13 +474,12 @@ gst_videolevels_set_caps (GstBaseTransform * base, GstCaps * incaps,
structure = gst_caps_get_structure (outcaps, 0);
res = gst_structure_get (structure,
"bpp", G_TYPE_INT, &levels->bpp_out,
"depth", G_TYPE_INT, &levels->depth_out,
NULL);
"depth", G_TYPE_INT, &levels->depth_out, NULL);
if (!res)
return FALSE;
levels->stride_in = GST_ROUND_UP_4 (levels->width * levels->depth_in/8);
levels->stride_out = GST_ROUND_UP_4 (levels->width * levels->depth_out/8);
levels->stride_in = GST_ROUND_UP_4 (levels->width * levels->depth_in / 8);
levels->stride_out = GST_ROUND_UP_4 (levels->width * levels->depth_out / 8);
//gst_videolevels_calculate_tables (levels);
@@ -517,7 +512,7 @@ gst_videolevels_get_unit_size (GstBaseTransform * base, GstCaps * caps,
if (gst_structure_get_int (structure, "width", &width) &&
gst_structure_get_int (structure, "height", &height) &&
gst_structure_get_int (structure, "depth", &depth)) {
guint stride = GST_ROUND_UP_4 (width*depth/8); /* need 4-byte alignment */
guint stride = GST_ROUND_UP_4 (width * depth / 8); /* need 4-byte alignment */
*size = stride * height;
GST_DEBUG ("Get unit size %dx%d, stride %u, %u bytes", width, height,
stride, *size);
@@ -562,13 +557,13 @@ gst_videolevels_transform (GstBaseTransform * base, GstBuffer * inbuf,
gst_videolevels_auto_adjust (videolevels, input);
videolevels->auto_adjust = 0;
g_object_notify (G_OBJECT (videolevels), "auto");
}
else if (videolevels->auto_adjust == 2) {
GST_DEBUG_OBJECT (videolevels, "Auto adjusting levels (every %d ns)", videolevels->interval);
elapsed = GST_CLOCK_DIFF (videolevels->last_auto_timestamp, inbuf->timestamp);
if (videolevels->last_auto_timestamp == GST_CLOCK_TIME_NONE ||
elapsed >= (GstClockTimeDiff)videolevels->interval ||
elapsed < 0) {
} else if (videolevels->auto_adjust == 2) {
GST_DEBUG_OBJECT (videolevels, "Auto adjusting levels (every %d ns)",
videolevels->interval);
elapsed =
GST_CLOCK_DIFF (videolevels->last_auto_timestamp, inbuf->timestamp);
if (videolevels->last_auto_timestamp == GST_CLOCK_TIME_NONE
|| elapsed >= (GstClockTimeDiff) videolevels->interval || elapsed < 0) {
gst_videolevels_auto_adjust (videolevels, input);
videolevels->last_auto_timestamp = GST_BUFFER_TIMESTAMP (inbuf);
}
@@ -584,7 +579,8 @@ gst_videolevels_transform (GstBaseTransform * base, GstBuffer * inbuf,
return GST_FLOW_ERROR;
}
GstFlowReturn gst_videolevels_transform_ip( GstBaseTransform * base, GstBuffer * buf )
GstFlowReturn
gst_videolevels_transform_ip (GstBaseTransform * base, GstBuffer * buf)
{
return GST_FLOW_OK;
@@ -600,7 +596,8 @@ GstFlowReturn gst_videolevels_transform_ip( GstBaseTransform * base, GstBuffer *
*
* Reset instance variables and free memory
*/
static void gst_videolevels_reset(GstVideoLevels* videolevels)
static void
gst_videolevels_reset (GstVideoLevels * videolevels)
{
videolevels->width = 0;
videolevels->height = 0;
@@ -700,7 +697,7 @@ gst_videolevels_calculate_tables (GstVideoLevels * videolevels)
/* TODO: use orc */
void
gst_videolevels_convert_uint16le_to_uint8(GstVideoLevels * videolevels,
gst_videolevels_convert_uint16le_to_uint8 (GstVideoLevels * videolevels,
guint16 * in, guint8 * out)
{
gint i;
@@ -721,7 +718,7 @@ gst_videolevels_convert_uint16le_to_uint8(GstVideoLevels * videolevels,
if (low_in == high_in)
m = 0.0;
else
m = (high_out - low_out) / (gdouble)(high_in - low_in);
m = (high_out - low_out) / (gdouble) (high_in - low_in);
b = low_out - m * low_in;
@@ -731,7 +728,7 @@ gst_videolevels_convert_uint16le_to_uint8(GstVideoLevels * videolevels,
/* TODO: use orc */
void
gst_videolevels_convert_uint16be_to_uint8(GstVideoLevels * videolevels,
gst_videolevels_convert_uint16be_to_uint8 (GstVideoLevels * videolevels,
guint16 * in, guint8 * out)
{
gint i;
@@ -751,7 +748,7 @@ gst_videolevels_convert_uint16be_to_uint8(GstVideoLevels * videolevels,
if (low_in == high_in)
m = 0.0;
else
m = (high_out - low_out) / (gdouble)(high_in - low_in);
m = (high_out - low_out) / (gdouble) (high_in - low_in);
b = low_out - m * low_in;
@@ -807,15 +804,16 @@ gst_videolevels_do_levels (GstVideoLevels * videolevels, gpointer indata,
* Returns: TRUE on success
*/
gboolean
gst_videolevels_calculate_histogram (GstVideoLevels * videolevels, guint16 * data)
gst_videolevels_calculate_histogram (GstVideoLevels * videolevels,
guint16 * data)
{
gint * hist;
gint *hist;
gint nbins = videolevels->nbins;
gint r;
gint c;
gfloat factor;
factor = nbins/(gfloat)(1 << videolevels->bpp_in);
factor = nbins / (gfloat) (1 << videolevels->bpp_in);
if (videolevels->histogram == NULL) {
GST_DEBUG ("First call, allocate memory for histogram (%d bins)", nbins);
@@ -825,7 +823,7 @@ gst_videolevels_calculate_histogram (GstVideoLevels * videolevels, guint16 * dat
hist = videolevels->histogram;
/* reset histogram */
memset (hist, 0, sizeof(gint)*nbins);
memset (hist, 0, sizeof (gint) * nbins);
GST_DEBUG ("Calculating histogram");
if (videolevels->endianness_in == G_BYTE_ORDER) {
@@ -833,14 +831,16 @@ gst_videolevels_calculate_histogram (GstVideoLevels * videolevels, guint16 * dat
for (c = 0; c < videolevels->width; c++) {
/* GST_DEBUG ("(%d, %d) = %d, hist[%d] = %d", r, c, data [c + r * videolevels->stride_in / 2], GINT_CLAMP (data [c + r * videolevels->stride_in / 2] * factor, 0, nbins - 1),
hist [GINT_CLAMP (data [c + r * videolevels->stride_in / 2] * factor, 0, nbins - 1)] + 1);*/
hist [GINT_CLAMP (data [c + r * videolevels->stride_in / 2] * factor, 0, nbins - 1)]++;
hist[GINT_CLAMP (data[c + r * videolevels->stride_in / 2] * factor, 0,
nbins - 1)]++;
}
}
}
else {
} else {
for (r = 0; r < videolevels->height; r++) {
for (c = 0; c < videolevels->width; c++) {
hist [GINT_CLAMP (GUINT16_FROM_BE (data [c + r * videolevels->stride_in / 2]) * factor, 0, nbins - 1)]++;
hist[GINT_CLAMP (GUINT16_FROM_BE (data[c +
r * videolevels->stride_in / 2]) * factor, 0,
nbins - 1)]++;
}
}
}
@@ -862,8 +862,7 @@ gst_videolevels_calculate_histogram (GstVideoLevels * videolevels, guint16 * dat
* Returns: TRUE on success
*/
gboolean
gst_videolevels_auto_adjust (GstVideoLevels * videolevels,
guint16 * data)
gst_videolevels_auto_adjust (GstVideoLevels * videolevels, guint16 * data)
{
guint npixsat;
guint sum;
@@ -877,25 +876,25 @@ gst_videolevels_auto_adjust (GstVideoLevels * videolevels,
size = videolevels->width * videolevels->height;
/* pixels to saturate on low end */
npixsat = (guint)(videolevels->lower_pix_sat * size);
npixsat = (guint) (videolevels->lower_pix_sat * size);
sum = 0;
for (i = 0; i < videolevels->nbins; i++) {
sum += videolevels->histogram[i];
if (sum > npixsat) {
videolevels->lower_input =
CLAMP (i / (gdouble)videolevels->nbins, 0.0, 1.0);
CLAMP (i / (gdouble) videolevels->nbins, 0.0, 1.0);
break;
}
}
/* pixels to saturate on high end */
npixsat = (guint)(videolevels->upper_pix_sat * size);
npixsat = (guint) (videolevels->upper_pix_sat * size);
sum = 0;
for (i = videolevels->nbins - 1; i >= 0; i--) {
sum += videolevels->histogram[i];
if (sum > npixsat) {
videolevels->upper_input =
CLAMP ((i + 1) / (gdouble)videolevels->nbins, 0.0, 1.0);
CLAMP ((i + 1) / (gdouble) videolevels->nbins, 0.0, 1.0);
break;
}
}

View File

@@ -93,15 +93,12 @@ enum
/* pad templates */
static GstStaticPadTemplate gst_euresys_src_template =
GST_STATIC_PAD_TEMPLATE ("src",
GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
GST_STATIC_CAPS (
GST_VIDEO_CAPS_GRAY8 ";"
GST_STATIC_CAPS (GST_VIDEO_CAPS_GRAY8 ";"
GST_VIDEO_CAPS_RGB ";"
GST_VIDEO_CAPS_ARGB ";"
GST_VIDEO_CAPS_RGB_15 ";"
GST_VIDEO_CAPS_RGB_16)
GST_VIDEO_CAPS_ARGB ";" GST_VIDEO_CAPS_RGB_15 ";" GST_VIDEO_CAPS_RGB_16)
);
@@ -140,8 +137,10 @@ gst_euresys_connector_get_type (void)
{MC_Connector_X2, "X2", "X2 input"},
{MC_Connector_Y1, "Y1", "Y1 input"},
{MC_Connector_Y2, "Y2", "Y2 input"},
{MC_Connector_A, "A", "A input (Grablink Expert 2 DuoCam mode, connector A)"},
{MC_Connector_B, "B", "B input (Grablink Expert 2 DuoCam mode, connector B)"},
{MC_Connector_A, "A",
"A input (Grablink Expert 2 DuoCam mode, connector A)"},
{MC_Connector_B, "B",
"B input (Grablink Expert 2 DuoCam mode, connector B)"},
{MC_Connector_M, "M", "M input (Grablink in MonoCam mode)"},
{0, NULL, NULL},
};
@@ -176,8 +175,7 @@ gst_euresys_camera_get_type (void)
/* class initialization */
GST_BOILERPLATE (GstEuresys, gst_euresys, GstPushSrc,
GST_TYPE_PUSH_SRC);
GST_BOILERPLATE (GstEuresys, gst_euresys, GstPushSrc, GST_TYPE_PUSH_SRC);
static GstVideoFormat
@@ -199,13 +197,16 @@ gst_euresys_color_format_to_video_format (INT32 color_format)
return GST_VIDEO_FORMAT_GRAY16_LE;
case MC_ColorFormat_Y41P:
case MC_ColorFormat_YUV411:
return gst_video_format_from_fourcc (GST_MAKE_FOURCC ('Y', '4', '1', 'P'));
return gst_video_format_from_fourcc (GST_MAKE_FOURCC ('Y', '4', '1',
'P'));
case MC_ColorFormat_YUV422:
case MC_ColorFormat_Y42P:
return gst_video_format_from_fourcc (GST_MAKE_FOURCC ('Y', '4', '2', 'P'));
return gst_video_format_from_fourcc (GST_MAKE_FOURCC ('Y', '4', '2',
'P'));
case MC_ColorFormat_YUV444:
case MC_ColorFormat_IYU2:
return gst_video_format_from_fourcc (GST_MAKE_FOURCC ('I', 'Y', 'U', '2'));
return gst_video_format_from_fourcc (GST_MAKE_FOURCC ('I', 'Y', 'U',
'2'));
case MC_ColorFormat_YUV411PL:
case MC_ColorFormat_Y41B:
return GST_VIDEO_FORMAT_Y41B;
@@ -222,7 +223,8 @@ gst_euresys_color_format_to_video_format (INT32 color_format)
case MC_ColorFormat_YUV411PL_DEC:
case MC_ColorFormat_YUV9:
case MC_ColorFormat_YVU9:
return gst_video_format_from_fourcc (GST_MAKE_FOURCC ('Y', 'V', 'U', '9'));
return gst_video_format_from_fourcc (GST_MAKE_FOURCC ('Y', 'V', 'U',
'9'));
case MC_ColorFormat_RGB15:
return GST_VIDEO_FORMAT_RGB15;
case MC_ColorFormat_RGB16:
@@ -271,7 +273,8 @@ gst_euresys_class_init (GstEuresysClass * klass)
base_src_class->get_size = GST_DEBUG_FUNCPTR (gst_euresys_get_size);
base_src_class->is_seekable = GST_DEBUG_FUNCPTR (gst_euresys_is_seekable);
base_src_class->query = GST_DEBUG_FUNCPTR (gst_euresys_query);
base_src_class->check_get_range = GST_DEBUG_FUNCPTR (gst_euresys_check_get_range);
base_src_class->check_get_range =
GST_DEBUG_FUNCPTR (gst_euresys_check_get_range);
base_src_class->fixate = GST_DEBUG_FUNCPTR (gst_euresys_fixate);
push_src_class->create = GST_DEBUG_FUNCPTR (gst_euresys_create);
@@ -295,8 +298,8 @@ gst_euresys_class_init (GstEuresysClass * klass)
static void
gst_euresys_init (GstEuresys * euresys, GstEuresysClass * euresys_class)
{
euresys->srcpad = gst_pad_new_from_static_template (&gst_euresys_src_template
, "src");
euresys->srcpad =
gst_pad_new_from_static_template (&gst_euresys_src_template, "src");
/* set source as live (no preroll) */
gst_base_src_set_live (GST_BASE_SRC (euresys), TRUE);
@@ -405,7 +408,9 @@ gst_euresys_get_caps (GstBaseSrc * src)
/* return template caps if we don't know the actual camera caps */
if (!euresys->caps) {
return gst_caps_copy (gst_pad_get_pad_template_caps (GST_BASE_SRC_PAD (euresys)));
return
gst_caps_copy (gst_pad_get_pad_template_caps (GST_BASE_SRC_PAD
(euresys)));
}
return gst_caps_copy (euresys->caps);
@@ -436,9 +441,7 @@ gst_euresys_set_caps (GstBaseSrc * src, GstCaps * caps)
structure = gst_caps_get_structure (caps, 0);
ret = gst_structure_get (structure,
"width", G_TYPE_INT, &width,
"height", G_TYPE_INT, &height,
NULL);
"width", G_TYPE_INT, &width, "height", G_TYPE_INT, &height, NULL);
if (!ret) {
GST_DEBUG ("Failed to retrieve width and height");
@@ -476,7 +479,9 @@ gst_euresys_start (GstBaseSrc * src)
return FALSE;
}
status = McGetParamInt (MC_BOARD + euresys->boardIdx, MC_BoardType, &euresys->boardType);
status =
McGetParamInt (MC_BOARD + euresys->boardIdx, MC_BoardType,
&euresys->boardType);
if (status != MC_OK) {
GST_ELEMENT_ERROR (euresys, RESOURCE, SETTINGS,
(_("Failed to get board type.")), (NULL));
@@ -490,7 +495,7 @@ gst_euresys_start (GstBaseSrc * src)
/* McSetParamStr (MC_CONFIGURATION, MC_ErrorLog, "mc_error.log"); */
/* Create a channel */
status = McCreate(MC_CHANNEL, &euresys->hChannel);
status = McCreate (MC_CHANNEL, &euresys->hChannel);
if (status != MC_OK) {
GST_ELEMENT_ERROR (euresys, RESOURCE, FAILED,
(_("Failed to create channel.")), (NULL));
@@ -498,7 +503,7 @@ gst_euresys_start (GstBaseSrc * src)
}
/* Link the channel to a board */
status = McSetParamInt(euresys->hChannel, MC_DriverIndex, euresys->boardIdx);
status = McSetParamInt (euresys->hChannel, MC_DriverIndex, euresys->boardIdx);
if (status != MC_OK) {
GST_ELEMENT_ERROR (euresys, RESOURCE, SETTINGS,
(_("Failed to link channel to board.")), (NULL));
@@ -508,7 +513,7 @@ gst_euresys_start (GstBaseSrc * src)
}
/* Select the video connector */
status = McSetParamInt(euresys->hChannel, MC_Connector, euresys->connector);
status = McSetParamInt (euresys->hChannel, MC_Connector, euresys->connector);
if (status != MC_OK) {
GST_ELEMENT_ERROR (euresys, RESOURCE, SETTINGS,
(_("Failed to set connector to channel.")), (NULL));
@@ -518,7 +523,7 @@ gst_euresys_start (GstBaseSrc * src)
}
/* Select the video signal type */
status = McSetParamInt(euresys->hChannel, MC_Camera, euresys->cameraType);
status = McSetParamInt (euresys->hChannel, MC_Camera, euresys->cameraType);
if (status != MC_OK) {
GST_ELEMENT_ERROR (euresys, RESOURCE, SETTINGS,
(_("Failed to set camera type = %d."), euresys->cameraType), (NULL));
@@ -528,7 +533,7 @@ gst_euresys_start (GstBaseSrc * src)
}
/* Set the color format */
status = McSetParamInt(euresys->hChannel, MC_ColorFormat, MC_ColorFormat_Y8);
status = McSetParamInt (euresys->hChannel, MC_ColorFormat, MC_ColorFormat_Y8);
if (status != MC_OK) {
GST_ELEMENT_ERROR (euresys, RESOURCE, SETTINGS,
(_("Failed to set color format = %d."), MC_ColorFormat_Y8), (NULL));
@@ -548,8 +553,12 @@ gst_euresys_start (GstBaseSrc * src)
}
/* Enable signals */
status = McSetParamInt(euresys->hChannel, MC_SignalEnable + MC_SIG_SURFACE_PROCESSING, MC_SignalEnable_ON);
status |= McSetParamInt(euresys->hChannel, MC_SignalEnable + MC_SIG_ACQUISITION_FAILURE, MC_SignalEnable_ON);
status =
McSetParamInt (euresys->hChannel,
MC_SignalEnable + MC_SIG_SURFACE_PROCESSING, MC_SignalEnable_ON);
status |=
McSetParamInt (euresys->hChannel,
MC_SignalEnable + MC_SIG_ACQUISITION_FAILURE, MC_SignalEnable_ON);
if (status != MC_OK) {
GST_ELEMENT_ERROR (euresys, RESOURCE, SETTINGS,
(_("Failed to enable signals.")), (NULL));
@@ -581,7 +590,8 @@ gst_euresys_start (GstBaseSrc * src)
if (euresys->caps)
gst_caps_unref (euresys->caps);
euresys->caps = gst_video_format_new_caps (videoFormat, width, height, 30, 1, 1, 1);
euresys->caps =
gst_video_format_new_caps (videoFormat, width, height, 30, 1, 1, 1);
if (euresys->caps == NULL) {
GST_ELEMENT_ERROR (euresys, STREAM, TOO_LAZY,
@@ -606,11 +616,11 @@ gst_euresys_stop (GstBaseSrc * src)
McSetParamInt (euresys->hChannel, MC_ChannelState, MC_ChannelState_IDLE);
/* Close the MultiCam driver */
McCloseDriver();
McCloseDriver ();
/* Delete the channel */
if (euresys->hChannel)
McDelete(euresys->hChannel);
McDelete (euresys->hChannel);
euresys->hChannel = 0;
gst_caps_unref (euresys->caps);
@@ -695,7 +705,9 @@ gst_euresys_create (GstPushSrc * src, GstBuffer ** buf)
/* Start acquisition */
if (!euresys->acq_started) {
status = McSetParamInt(euresys->hChannel, MC_ChannelState, MC_ChannelState_ACTIVE);
status =
McSetParamInt (euresys->hChannel, MC_ChannelState,
MC_ChannelState_ACTIVE);
if (status != MC_OK) {
GST_ELEMENT_ERROR (euresys, RESOURCE, FAILED,
(_("Failed to set channel state to ACTIVE.")), (NULL));
@@ -710,33 +722,31 @@ gst_euresys_create (GstPushSrc * src, GstBuffer ** buf)
status = McWaitSignal (euresys->hChannel, MC_SIG_ANY, 5000, &siginfo);
if (status == MC_TIMEOUT) {
GST_ELEMENT_ERROR (src, RESOURCE, FAILED,
(_("Timeout waiting for signal.")), (_("Timeout waiting for signal.")));
(_("Timeout waiting for signal.")),
(_("Timeout waiting for signal.")));
return GST_FLOW_ERROR;
}
else if (siginfo.Signal == MC_SIG_ACQUISITION_FAILURE) {
} else if (siginfo.Signal == MC_SIG_ACQUISITION_FAILURE) {
GST_ELEMENT_ERROR (src, RESOURCE, FAILED,
(_("Acquisition failure due to timeout.")), (NULL));
return GST_FLOW_ERROR;
}
else if (siginfo.Signal == MC_SIG_SURFACE_PROCESSING) {
} else if (siginfo.Signal == MC_SIG_SURFACE_PROCESSING) {
break;
}
else {
} else {
continue;
}
}
/* Get pointer to image data and other info*/
/* Get pointer to image data and other info */
hSurface = (MCHANDLE) siginfo.SignalInfo;
/* "number of bytes actually written into the surface" */
status = McGetParamInt (hSurface, MC_FillCount, &newsize);
/* "Internal numbering of surface during acquisition sequence" (zero-based)*/
/* "Internal numbering of surface during acquisition sequence" (zero-based) */
status |= McGetParamInt (hSurface, MC_TimeCode, &timeCode);
/* "number of microseconds elapsed since midnight (00:00:00),
* January 1, 1970, coordinated universal time (UTC), according
* to the system clock when the surface is filled" */
status |= McGetParamInt64 (hSurface, MC_TimeStamp_us, &timeStamp);
status |= McGetParamPtr (hSurface, MC_SurfaceAddr, (PVOID*)&pImage);
status |= McGetParamPtr (hSurface, MC_SurfaceAddr, (PVOID *) & pImage);
if (G_UNLIKELY (status != MC_OK)) {
GST_ELEMENT_ERROR (euresys, RESOURCE, FAILED,
(_("Failed to read surface parameter.")), (NULL));
@@ -748,7 +758,8 @@ gst_euresys_create (GstPushSrc * src, GstBuffer ** buf)
dropped_frame_count = timeCode - (euresys->last_time_code + 1);
if (dropped_frame_count != 0) {
euresys->dropped_frame_count += dropped_frame_count;
GST_WARNING ("Dropped %d frames (%d total)", dropped_frame_count, euresys->dropped_frame_count);
GST_WARNING ("Dropped %d frames (%d total)", dropped_frame_count,
euresys->dropped_frame_count);
/* TODO: emit message here about dropped frames */
}
euresys->last_time_code = timeCode;
@@ -779,7 +790,7 @@ gst_euresys_create (GstPushSrc * src, GstBuffer ** buf)
static gboolean
plugin_init (GstPlugin * plugin)
{
GST_DEBUG_CATEGORY_INIT (gst_euresys_debug, "euresys", 0, \
GST_DEBUG_CATEGORY_INIT (gst_euresys_debug, "euresys", 0,
"debug category for euresys element");
gst_element_register (plugin, "euresys", GST_RANK_NONE,
gst_euresys_get_type ());

View File

@@ -85,44 +85,69 @@ GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_ALWAYS,
GST_STATIC_CAPS (
//GST_AUDIO_INT_PAD_TEMPLATE_CAPS)
"audio/x-raw-int, " \
"rate = (int) 10000, " \
"channels = (int) 1, " \
"endianness = (int) BYTE_ORDER, " \
"width = (int) 16, " \
"depth = (int) 16, " \
"signed = (boolean) true")
"audio/x-raw-int, "
"rate = (int) 10000, "
"channels = (int) 1, "
"endianness = (int) BYTE_ORDER, "
"width = (int) 16, " "depth = (int) 16, " "signed = (boolean) true")
);
/* class initialization */
GST_BOILERPLATE (GstIOtechDaqX, gst_iotechdaqx, GstPushSrc,
GST_TYPE_PUSH_SRC);
GST_BOILERPLATE (GstIOtechDaqX, gst_iotechdaqx, GstPushSrc, GST_TYPE_PUSH_SRC);
char* GetProtocol(DaqProtocol protocol){
char *GetProtocol (DaqProtocol protocol)
{
/*This function is used to display the protcol name; since the
protcol itself is stored as a number, this function associates
the protocol number with a string.
*/
char* protocolName[64];
char *protocolName[64];
switch (protocol)
{
case DaqProtocolNone: *protocolName="DaqProtocolNone";break;
case DaqProtocol4: *protocolName="DaqProtocol4";break;
case DaqProtocol8: *protocolName="DaqProtocol8";break;
case DaqProtocolSMC666: *protocolName="DaqProtocolSMC666";break;
case DaqProtocolFastEPP: *protocolName="DaqProtocolFastEPP";break;
case DaqProtocolECP: *protocolName="DaqProtocolECP";break;
case DaqProtocol8BitEPP: *protocolName="DaqProtocol8BitEPP";break;
case DaqProtocolTCPIP: *protocolName="DaqProtocolTCPIP";break;
case DaqProtocolISA: *protocolName="DaqProtocolISA";break;
case DaqProtocolPcCard: *protocolName="DaqProtocolPcCard";break;
case DaqProtocolUSB: *protocolName="DaqProtocolUSB";break;
case DaqProtocolPCI: *protocolName="DaqProtocolPCI";break;
case DaqProtocolCPCI: *protocolName="DaqProtocolCPCI";break;
default: *protocolName="Unknown";break;
switch (protocol) {
case DaqProtocolNone:
*protocolName = "DaqProtocolNone";
break;
case DaqProtocol4:
*protocolName = "DaqProtocol4";
break;
case DaqProtocol8:
*protocolName = "DaqProtocol8";
break;
case DaqProtocolSMC666:
*protocolName = "DaqProtocolSMC666";
break;
case DaqProtocolFastEPP:
*protocolName = "DaqProtocolFastEPP";
break;
case DaqProtocolECP:
*protocolName = "DaqProtocolECP";
break;
case DaqProtocol8BitEPP:
*protocolName = "DaqProtocol8BitEPP";
break;
case DaqProtocolTCPIP:
*protocolName = "DaqProtocolTCPIP";
break;
case DaqProtocolISA:
*protocolName = "DaqProtocolISA";
break;
case DaqProtocolPcCard:
*protocolName = "DaqProtocolPcCard";
break;
case DaqProtocolUSB:
*protocolName = "DaqProtocolUSB";
break;
case DaqProtocolPCI:
*protocolName = "DaqProtocolPCI";
break;
case DaqProtocolCPCI:
*protocolName = "DaqProtocolCPCI";
break;
default:
*protocolName = "Unknown";
break;
}
return *protocolName;
}
@@ -138,8 +163,7 @@ gst_iotechdaqx_base_init (gpointer g_class)
gst_element_class_set_details_simple (element_class,
"IOtechDaqX Data Source", "Source/Audio",
"IOtechDaqX data source",
"Joshua Doe <oss@nvl.army.mil>");
"IOtechDaqX data source", "Joshua Doe <oss@nvl.army.mil>");
}
static void
@@ -162,7 +186,8 @@ gst_iotechdaqx_class_init (GstIOtechDaqXClass * klass)
base_src_class->get_size = GST_DEBUG_FUNCPTR (gst_iotechdaqx_get_size);
base_src_class->is_seekable = GST_DEBUG_FUNCPTR (gst_iotechdaqx_is_seekable);
base_src_class->query = GST_DEBUG_FUNCPTR (gst_iotechdaqx_query);
base_src_class->check_get_range = GST_DEBUG_FUNCPTR (gst_iotechdaqx_check_get_range);
base_src_class->check_get_range =
GST_DEBUG_FUNCPTR (gst_iotechdaqx_check_get_range);
base_src_class->fixate = GST_DEBUG_FUNCPTR (gst_iotechdaqx_fixate);
push_src_class->create = GST_DEBUG_FUNCPTR (gst_iotechdaqx_create);
@@ -176,10 +201,11 @@ gst_iotechdaqx_class_init (GstIOtechDaqXClass * klass)
}
static void
gst_iotechdaqx_init (GstIOtechDaqX * iotechdaqx, GstIOtechDaqXClass * iotechdaqx_class)
gst_iotechdaqx_init (GstIOtechDaqX * iotechdaqx,
GstIOtechDaqXClass * iotechdaqx_class)
{
iotechdaqx->srcpad = gst_pad_new_from_static_template (&gst_iotechdaqx_src_template
, "src");
iotechdaqx->srcpad =
gst_pad_new_from_static_template (&gst_iotechdaqx_src_template, "src");
/* set source as live (no preroll) */
gst_base_src_set_live (GST_BASE_SRC (iotechdaqx), TRUE);
@@ -263,7 +289,9 @@ gst_iotechdaqx_get_caps (GstBaseSrc * src)
/* return template caps if we don't know the actual camera caps */
if (!iotechdaqx->caps) {
return gst_caps_copy (gst_pad_get_pad_template_caps (GST_BASE_SRC_PAD (iotechdaqx)));
return
gst_caps_copy (gst_pad_get_pad_template_caps (GST_BASE_SRC_PAD
(iotechdaqx)));
}
return gst_caps_copy (iotechdaqx->caps);
@@ -295,8 +323,7 @@ gst_iotechdaqx_set_caps (GstBaseSrc * src, GstCaps * caps)
ret = gst_structure_get (structure,
"width", G_TYPE_INT, &iotechdaqx->width,
"rate", G_TYPE_INT, &iotechdaqx->rate,
"channels", G_TYPE_INT, &iotechdaqx->channels,
NULL);
"channels", G_TYPE_INT, &iotechdaqx->channels, NULL);
if (!ret) {
GST_DEBUG ("Failed to retrieve width and height");
@@ -328,18 +355,18 @@ gst_iotechdaqx_start (GstBaseSrc * src)
// Find out how many devices are installed and allocate memory for device list
daqGetDeviceCount(&devCount);
devList = (DaqDeviceListT*)malloc(sizeof(DaqDeviceListT)*devCount);
daqGetDeviceCount (&devCount);
devList = (DaqDeviceListT *) malloc (sizeof (DaqDeviceListT) * devCount);
GST_DEBUG ("Found %d devices", devCount);
// Get the names of all installed devices and the device count
daqGetDeviceList(devList, &devCount);
daqGetDeviceList (devList, &devCount);
deviceIndex = 0;
do {
// Get the device properties for each device
daqGetDeviceProperties(devList[deviceIndex].daqName, &devProps);
daqGetDeviceProperties (devList[deviceIndex].daqName, &devProps);
GST_DEBUG ("Device %i: %s", deviceIndex, devList[deviceIndex].daqName);
deviceIndex++;
@@ -348,11 +375,12 @@ gst_iotechdaqx_start (GstBaseSrc * src)
} while (deviceIndex < devCount);
// We are done with the device list
free(devList);
free (devList);
iotechdaqx->handle = daqOpen(devProps.daqName);
GST_DEBUG ("Connected to %s on LPT%d\n", devProps.daqName, devProps.basePortAddress+1);
GST_DEBUG ("Protocol: %s \n", GetProtocol(devProps.protocol));
iotechdaqx->handle = daqOpen (devProps.daqName);
GST_DEBUG ("Connected to %s on LPT%d\n", devProps.daqName,
devProps.basePortAddress + 1);
GST_DEBUG ("Protocol: %s \n", GetProtocol (devProps.protocol));
//FIXME check for errors!
@@ -444,18 +472,20 @@ gst_iotechdaqx_create (GstPushSrc * src, GstBuffer ** buf)
//GST_DEBUG ("create (handle=%d)", iotechdaqx->handle);
if (!iotechdaqx->opened) {
DWORD channels[1] = {1};
DaqAdcGain gains[1] = {DgainX1};
DWORD flags[1] = {DafAnalog|DafBipolar};
DWORD channels[1] = { 1 };
DaqAdcGain gains[1] = { DgainX1 };
DWORD flags[1] = { DafAnalog | DafBipolar };
GST_DEBUG ("Setting up acquisition: rate=%d", iotechdaqx->rate);
daqAdcSetScan (iotechdaqx->handle, channels, gains, flags, 1);
daqAdcSetAcq (iotechdaqx->handle, DaamInfinitePost, 0, 0);
daqSetTriggerEvent (iotechdaqx->handle, DatsSoftware, NULL, NULL, NULL, NULL, NULL, NULL, NULL, DaqStartEvent);
daqSetTriggerEvent (iotechdaqx->handle, DatsSoftware, NULL, NULL, NULL,
NULL, NULL, NULL, NULL, DaqStartEvent);
//daqSetTriggerEvent (iotechdaqx->handle, DatsSoftware, NULL, NULL, NULL, NULL, NULL, NULL, NULL, DaqStopEvent);
daqAdcSetFreq (iotechdaqx->handle, (float)iotechdaqx->rate);
daqAdcTransferSetBuffer (iotechdaqx->handle, NULL, 44100, DatmCycleOn|DatmDriverBuf);
daqAdcSetFreq (iotechdaqx->handle, (float) iotechdaqx->rate);
daqAdcTransferSetBuffer (iotechdaqx->handle, NULL, 44100,
DatmCycleOn | DatmDriverBuf);
daqAdcTransferStart (iotechdaqx->handle);
daqAdcArm (iotechdaqx->handle);
@@ -464,9 +494,11 @@ gst_iotechdaqx_create (GstPushSrc * src, GstBuffer ** buf)
iotechdaqx->opened = TRUE;
}
gst_pad_alloc_buffer_and_set_caps (GST_BASE_SRC_PAD (GST_BASE_SRC (src)), 0, 2048*2, iotechdaqx->caps, buf);
gst_pad_alloc_buffer_and_set_caps (GST_BASE_SRC_PAD (GST_BASE_SRC (src)), 0,
2048 * 2, iotechdaqx->caps, buf);
daqAdcTransferBufData (iotechdaqx->handle, GST_BUFFER_DATA(*buf), 2048, DabtmOldest|DabtmWait, &retCount);
daqAdcTransferBufData (iotechdaqx->handle, GST_BUFFER_DATA (*buf), 2048,
DabtmOldest | DabtmWait, &retCount);
//GST_DEBUG ("Asked for %d samples, got %d", 2048, retCount);
@@ -478,7 +510,7 @@ gst_iotechdaqx_create (GstPushSrc * src, GstBuffer ** buf)
static gboolean
plugin_init (GstPlugin * plugin)
{
GST_DEBUG_CATEGORY_INIT (gst_iotechdaqx_debug, "iotechdaqx", 0, \
GST_DEBUG_CATEGORY_INIT (gst_iotechdaqx_debug, "iotechdaqx", 0,
"debug category for iotechdaqx element");
gst_element_register (plugin, "iotechdaqx", GST_RANK_NONE,
gst_iotechdaqx_get_type ());

View File

@@ -68,8 +68,7 @@ enum
static GstStaticPadTemplate src_factory = GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
GST_STATIC_CAPS (
"video/x-raw-gray, "
GST_STATIC_CAPS ("video/x-raw-gray, "
"bpp = (int) 8, "
"depth = (int) 8, "
"width = (int) [ 1, max ], "
@@ -81,10 +80,8 @@ static GstStaticPadTemplate src_factory = GST_STATIC_PAD_TEMPLATE ("src",
"depth = (int) 16, "
"endianness = (int) LITTLE_ENDIAN, "
"width = (int) [ 1, max ], "
"height = (int) [ 1, max ], "
"framerate = (fraction) [ 0, max ]"
)
);
"height = (int) [ 1, max ], " "framerate = (fraction) [ 0, max ]")
);
static void gst_niimaqsrc_init_interfaces (GType type);
@@ -107,13 +104,15 @@ static gboolean gst_niimaqsrc_start (GstBaseSrc * src);
static gboolean gst_niimaqsrc_stop (GstBaseSrc * src);
/* GstPushSrc virtual methods */
static GstFlowReturn gst_niimaqsrc_create (GstPushSrc * psrc, GstBuffer ** buffer);
static GstFlowReturn gst_niimaqsrc_create (GstPushSrc * psrc,
GstBuffer ** buffer);
/* GstNiImaq methods */
static gboolean gst_niimaqsrc_parse_caps (const GstCaps * caps,
gint * width, gint * height, gint * depth, gint * bpp);
static gboolean gst_niimaqsrc_set_caps_color (GstStructure * gs, gint bpp, gint depth);
static gboolean gst_niimaqsrc_set_caps_color (GstStructure * gs, gint bpp,
gint depth);
static gboolean gst_niimaqsrc_set_caps_framesize (GstStructure * gs, gint width,
gint height);
@@ -121,7 +120,8 @@ static GstCaps *gst_niimaqsrc_get_cam_caps (GstNiImaqSrc * src);
static void gst_niimaqsrc_close_interface (GstNiImaqSrc * niimaqsrc);
uInt32
gst_niimaqsrc_report_imaq_error (uInt32 code) {
gst_niimaqsrc_report_imaq_error (uInt32 code)
{
static char imaq_error_string[256];
if (code) {
imgShowError (code, imaq_error_string);
@@ -132,13 +132,15 @@ gst_niimaqsrc_report_imaq_error (uInt32 code) {
typedef struct _GstNiImaqSrcFrameTime GstNiImaqSrcFrameTime;
struct _GstNiImaqSrcFrameTime {
struct _GstNiImaqSrcFrameTime
{
guint32 number;
GstClockTime time;
};
uInt32 gst_niimaqsrc_frame_start_callback (SESSION_ID sid, IMG_ERR err,
IMG_SIGNAL_TYPE signal_type, uInt32 signal_identifier, void * userdata)
uInt32
gst_niimaqsrc_frame_start_callback (SESSION_ID sid, IMG_ERR err,
IMG_SIGNAL_TYPE signal_type, uInt32 signal_identifier, void *userdata)
{
GstNiImaqSrc *niimaqsrc = GST_NIIMAQSRC (userdata);
GstClock *clock;
@@ -163,7 +165,7 @@ uInt32 gst_niimaqsrc_frame_start_callback (SESSION_ID sid, IMG_ERR err,
return 1;
}
static void _____BEGIN_FUNCTIONS_____();
static void _____BEGIN_FUNCTIONS_____ ();
/**
* gst_niimaqsrc_probe_get_properties:
@@ -180,7 +182,8 @@ gst_niimaqsrc_probe_get_properties (GstPropertyProbe * probe)
static GList *list = NULL;
if (!list) {
list = g_list_append (NULL, g_object_class_find_property (klass, "interface"));
list =
g_list_append (NULL, g_object_class_find_property (klass, "interface"));
}
return list;
@@ -219,7 +222,7 @@ gst_niimaqsrc_class_probe_interfaces (GstNiImaqSrcClass * klass, gboolean check)
guint32 iid;
guint32 nports;
guint32 port;
gchar * iname;
gchar *iname;
uInt32 rval;
/* get interface names until there are no more */
@@ -245,7 +248,7 @@ gst_niimaqsrc_class_probe_interfaces (GstNiImaqSrcClass * klass, gboolean check)
gst_niimaqsrc_report_imaq_error (rval);
/* iterate over all the available ports */
for (port=0; port < nports; port++) {
for (port = 0; port < nports; port++) {
/* if the there are multiple ports append the port number */
if (nports > 1)
iname = g_strdup_printf ("%s::%d", name, port);
@@ -459,17 +462,18 @@ gst_niimaqsrc_class_init (GstNiImaqSrcClass * klass)
g_object_class_install_property (G_OBJECT_CLASS (klass),
PROP_INTERFACE, g_param_spec_string ("interface",
"Interface",
"NI-IMAQ interface to open", DEFAULT_PROP_INTERFACE, G_PARAM_READWRITE));
"NI-IMAQ interface to open", DEFAULT_PROP_INTERFACE,
G_PARAM_READWRITE));
g_object_class_install_property (G_OBJECT_CLASS (klass),
PROP_TIMESTAMP_OFFSET, g_param_spec_int64 ("timestamp-offset",
"Timestamp offset",
"An offset added to timestamps set on buffers (in ns)", G_MININT64,
G_MAXINT64, DEFAULT_PROP_TIMESTAMP_OFFSET, G_PARAM_READWRITE));
g_object_class_install_property (G_OBJECT_CLASS (klass),
PROP_BUFSIZE, g_param_spec_int ("buffer-size",
g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_BUFSIZE,
g_param_spec_int ("buffer-size",
"Number of frames in the IMAQ ringbuffer",
"The number of frames in the IMAQ ringbuffer", 1,
G_MAXINT, DEFAULT_PROP_BUFSIZE, G_PARAM_READWRITE));
"The number of frames in the IMAQ ringbuffer", 1, G_MAXINT,
DEFAULT_PROP_BUFSIZE, G_PARAM_READWRITE));
/* install GstBaseSrc vmethod implementations */
gstbasesrc_class->get_caps = gst_niimaqsrc_get_caps;
@@ -604,7 +608,9 @@ gst_niimaqsrc_get_caps (GstBaseSrc * bsrc)
/* return template caps if we don't know the actual camera caps */
if (!niimaqsrc->caps) {
return gst_caps_copy (gst_pad_get_pad_template_caps (GST_BASE_SRC_PAD (niimaqsrc)));
return
gst_caps_copy (gst_pad_get_pad_template_caps (GST_BASE_SRC_PAD
(niimaqsrc)));
}
return gst_caps_copy (niimaqsrc->caps);
@@ -631,7 +637,8 @@ gst_niimaqsrc_set_caps (GstBaseSrc * bsrc, GstCaps * caps)
niimaqsrc->caps = gst_caps_copy (caps);
}
res = gst_niimaqsrc_parse_caps (niimaqsrc->caps, &width, &height, &depth, &bpp);
res =
gst_niimaqsrc_parse_caps (niimaqsrc->caps, &width, &height, &depth, &bpp);
if (res) {
/* looks ok here */
@@ -639,7 +646,7 @@ gst_niimaqsrc_set_caps (GstBaseSrc * bsrc, GstCaps * caps)
niimaqsrc->height = height;
niimaqsrc->depth = depth;
niimaqsrc->bpp = bpp;
niimaqsrc->framesize = width * height * (depth/8);
niimaqsrc->framesize = width * height * (depth / 8);
}
return res;
@@ -675,7 +682,7 @@ gst_niimaqsrc_create (GstPushSrc * psrc, GstBuffer ** buffer)
gpointer data;
GstFlowReturn res = GST_FLOW_OK;
guint i;
GstNiImaqSrcFrameTime * frametime;
GstNiImaqSrcFrameTime *frametime;
GstClockTime timestamp = GST_CLOCK_TIME_NONE;
GstClockTime timestamp2 = GST_CLOCK_TIME_NONE;
GstClockTime duration = GST_CLOCK_TIME_NONE;
@@ -694,8 +701,7 @@ gst_niimaqsrc_create (GstPushSrc * psrc, GstBuffer ** buffer)
rval = imgSessionStartAcquisition (niimaqsrc->sid);
if (rval == 0) {
break;
}
else {
} else {
gst_niimaqsrc_report_imaq_error (rval);
GST_LOG_OBJECT (niimaqsrc, "camera is still off , wait 50ms and retry");
g_usleep (50000);
@@ -715,11 +721,12 @@ gst_niimaqsrc_create (GstPushSrc * psrc, GstBuffer ** buffer)
niimaqsrc->session_started = TRUE;
}
data = g_malloc(niimaqsrc->framesize);
data = g_malloc (niimaqsrc->framesize);
GST_DEBUG_OBJECT(niimaqsrc, "Copying IMAQ buffer %d", niimaqsrc->cumbufnum);
GST_DEBUG_OBJECT (niimaqsrc, "Copying IMAQ buffer %d", niimaqsrc->cumbufnum);
rval = imgSessionCopyBufferByNumber (niimaqsrc->sid, niimaqsrc->cumbufnum, data,
rval =
imgSessionCopyBufferByNumber (niimaqsrc->sid, niimaqsrc->cumbufnum, data,
IMG_OVERWRITE_GET_OLDEST, &copied_number, &copied_index);
if (rval) {
gst_niimaqsrc_report_imaq_error (rval);
@@ -733,9 +740,9 @@ gst_niimaqsrc_create (GstPushSrc * psrc, GstBuffer ** buffer)
/*clock = gst_element_get_clock (GST_ELEMENT (niimaqsrc));
timestamp2 =
GST_CLOCK_DIFF (gst_element_get_base_time (GST_ELEMENT (niimaqsrc)), gst_clock_get_time (clock));
gst_object_unref (clock);*/
gst_object_unref (clock); */
GST_DEBUG_OBJECT(niimaqsrc, "Creating buffer");
GST_DEBUG_OBJECT (niimaqsrc, "Creating buffer");
*buffer = gst_buffer_new ();
@@ -745,7 +752,7 @@ gst_niimaqsrc_create (GstPushSrc * psrc, GstBuffer ** buffer)
GST_BUFFER_OFFSET (*buffer) = copied_number;
GST_BUFFER_OFFSET_END (*buffer) = copied_number;
GST_DEBUG_OBJECT(niimaqsrc, "Associating time with buffer");
GST_DEBUG_OBJECT (niimaqsrc, "Associating time with buffer");
/* search linked list for frame time */
g_mutex_lock (niimaqsrc->frametime_mutex);
@@ -753,7 +760,8 @@ gst_niimaqsrc_create (GstPushSrc * psrc, GstBuffer ** buffer)
/* remove all old frametimes from the list */
frametime = niimaqsrc->timelist->data;
while (frametime->number < copied_number) {
niimaqsrc->timelist = g_slist_delete_link (niimaqsrc->timelist, niimaqsrc->timelist);
niimaqsrc->timelist =
g_slist_delete_link (niimaqsrc->timelist, niimaqsrc->timelist);
frametime = niimaqsrc->timelist->data;
}
@@ -761,9 +769,9 @@ gst_niimaqsrc_create (GstPushSrc * psrc, GstBuffer ** buffer)
timestamp = frametime->time;
/* remove frame time as we no longer need it */
niimaqsrc->timelist = g_slist_delete_link (niimaqsrc->timelist, niimaqsrc->timelist);
}
else {
niimaqsrc->timelist =
g_slist_delete_link (niimaqsrc->timelist, niimaqsrc->timelist);
} else {
timestamp = GST_CLOCK_TIME_NONE;
}
}
@@ -776,23 +784,22 @@ gst_niimaqsrc_create (GstPushSrc * psrc, GstBuffer ** buffer)
/*clock = gst_element_get_clock (GST_ELEMENT (niimaqsrc));
GST_BUFFER_TIMESTAMP (*buffer) =
GST_CLOCK_DIFF (gst_element_get_base_time (GST_ELEMENT (niimaqsrc)), gst_clock_get_time (clock));
gst_object_unref (clock);*/
}
else {
gst_object_unref (clock); */
} else {
timestamp =
GST_CLOCK_DIFF (gst_element_get_base_time (GST_ELEMENT (niimaqsrc)), timestamp);
GST_CLOCK_DIFF (gst_element_get_base_time (GST_ELEMENT (niimaqsrc)),
timestamp);
}
/* make guess of duration from timestamp and cumulative buffer number */
if (GST_CLOCK_TIME_IS_VALID (timestamp)) {
duration = timestamp / (copied_number + 1);
}
else {
} else {
duration = 33 * GST_MSECOND;
}
/* TODO, DEBUG: set duration to see what the difference is between callback and create */
/*duration = GST_CLOCK_DIFF (timestamp, timestamp2);*/
/*duration = GST_CLOCK_DIFF (timestamp, timestamp2); */
GST_BUFFER_TIMESTAMP (*buffer) = timestamp;
@@ -805,14 +812,15 @@ gst_niimaqsrc_create (GstPushSrc * psrc, GstBuffer ** buffer)
if (src->rate_numerator != 0) {
GST_BUFFER_DURATION (outbuf) = gst_util_uint64_scale_int (GST_SECOND,
src->rate_denominator, src->rate_numerator);
}*/
} */
dropped = copied_number - niimaqsrc->cumbufnum;
if (dropped > 0) {
niimaqsrc->n_dropped_frames += dropped;
GST_WARNING_OBJECT (niimaqsrc, "Asked to copy buffer %d but was given %d",
niimaqsrc->cumbufnum, copied_number);
GST_WARNING_OBJECT (niimaqsrc, "Dropped %d frames (%d total)", dropped, niimaqsrc->n_dropped_frames);
GST_WARNING_OBJECT (niimaqsrc, "Dropped %d frames (%d total)", dropped,
niimaqsrc->n_dropped_frames);
}
/* set cumulative buffer number to get next frame */
@@ -822,7 +830,7 @@ gst_niimaqsrc_create (GstPushSrc * psrc, GstBuffer ** buffer)
/*if (src->rate_numerator != 0) {
src->running_time = gst_util_uint64_scale_int (src->n_frames * GST_SECOND,
src->rate_denominator, src->rate_numerator);
}*/
} */
return res;
@@ -859,9 +867,7 @@ gst_niimaqsrc_parse_caps (const GstCaps * caps, gint * width, gint * height,
ret = gst_structure_get (structure,
"width", G_TYPE_INT, width,
"height", G_TYPE_INT, height,
"depth", G_TYPE_INT, depth,
"bpp", G_TYPE_INT, bpp,
NULL);
"depth", G_TYPE_INT, depth, "bpp", G_TYPE_INT, bpp, NULL);
if (!ret) {
GST_DEBUG ("Failed to retrieve width, height, depth, or bpp");
@@ -888,12 +894,9 @@ gst_niimaqsrc_set_caps_color (GstStructure * gs, gint bpp, gint depth)
gst_structure_set_name (gs, "video/x-raw-gray");
gst_structure_set (gs,
"bpp", G_TYPE_INT, bpp,
"depth", G_TYPE_INT, depth, NULL);
"bpp", G_TYPE_INT, bpp, "depth", G_TYPE_INT, depth, NULL);
if (depth > 8) {
gst_structure_set(gs,
"endianness", G_TYPE_INT, G_LITTLE_ENDIAN,
NULL);
gst_structure_set (gs, "endianness", G_TYPE_INT, G_LITTLE_ENDIAN, NULL);
}
return ret;
@@ -913,9 +916,7 @@ static gboolean
gst_niimaqsrc_set_caps_framesize (GstStructure * gs, gint width, gint height)
{
gst_structure_set (gs,
"width", G_TYPE_INT, width,
"height", G_TYPE_INT, height,
NULL);
"width", G_TYPE_INT, width, "height", G_TYPE_INT, height, NULL);
return TRUE;
}
@@ -940,8 +941,8 @@ gst_niimaqsrc_get_cam_caps (GstNiImaqSrc * niimaqsrc)
gcaps = gst_caps_new_empty ();
if (!niimaqsrc->iid) {
GST_ELEMENT_ERROR (niimaqsrc, RESOURCE, FAILED, ("Camera interface not open"),
("Camera interface not open"));
GST_ELEMENT_ERROR (niimaqsrc, RESOURCE, FAILED,
("Camera interface not open"), ("Camera interface not open"));
goto error;
}
@@ -951,7 +952,7 @@ gst_niimaqsrc_get_cam_caps (GstNiImaqSrc * niimaqsrc)
bpp = val;
rval &= imgGetAttribute (niimaqsrc->iid, IMG_ATTR_BYTESPERPIXEL, &val);
gst_niimaqsrc_report_imaq_error (rval);
depth = val*8;
depth = val * 8;
rval &= imgGetAttribute (niimaqsrc->iid, IMG_ATTR_ROI_WIDTH, &val);
gst_niimaqsrc_report_imaq_error (rval);
width = val;
@@ -972,7 +973,8 @@ gst_niimaqsrc_get_cam_caps (GstNiImaqSrc * niimaqsrc)
!gst_niimaqsrc_set_caps_framesize (gs, width, height)) {
GST_ELEMENT_ERROR (niimaqsrc, STREAM, FAILED,
("attempt to set caps %dx%dx%d (%d) failed", width, height, depth, bpp),
("attempt to set caps %dx%dx%d (%d) failed", width, height, depth, bpp));
("attempt to set caps %dx%dx%d (%d) failed", width, height, depth,
bpp));
goto error;
}
@@ -1017,32 +1019,36 @@ error:
static gboolean
gst_niimaqsrc_start (GstBaseSrc * src)
{
GstNiImaqSrc* niimaqsrc = GST_NIIMAQSRC(src);
GstNiImaqSrc *niimaqsrc = GST_NIIMAQSRC (src);
Int32 rval;
gint i;
niimaqsrc->iid = 0;
niimaqsrc->sid = 0;
GST_DEBUG_OBJECT (niimaqsrc, "Opening IMAQ interface: %s", niimaqsrc->interface_name);
GST_DEBUG_OBJECT (niimaqsrc, "Opening IMAQ interface: %s",
niimaqsrc->interface_name);
/* open IMAQ interface */
rval=imgInterfaceOpen(niimaqsrc->interface_name,&(niimaqsrc->iid));
rval = imgInterfaceOpen (niimaqsrc->interface_name, &(niimaqsrc->iid));
if (rval) {
gst_niimaqsrc_report_imaq_error (rval);
GST_ELEMENT_ERROR (niimaqsrc, RESOURCE, FAILED, ("Failed to open IMAQ interface"),
GST_ELEMENT_ERROR (niimaqsrc, RESOURCE, FAILED,
("Failed to open IMAQ interface"),
("Failed to open camera interface %s", niimaqsrc->interface_name));
goto error;
}
GST_DEBUG_OBJECT (niimaqsrc, "Opening IMAQ session: %s", niimaqsrc->interface_name);
GST_DEBUG_OBJECT (niimaqsrc, "Opening IMAQ session: %s",
niimaqsrc->interface_name);
/* open IMAQ session */
rval=imgSessionOpen(niimaqsrc->iid, &(niimaqsrc->sid));
rval = imgSessionOpen (niimaqsrc->iid, &(niimaqsrc->sid));
if (rval) {
gst_niimaqsrc_report_imaq_error (rval);
GST_ELEMENT_ERROR (niimaqsrc, RESOURCE, FAILED, ("Failed to open IMAQ session"),
("Failed to open IMAQ session %d", niimaqsrc->sid));
GST_ELEMENT_ERROR (niimaqsrc, RESOURCE, FAILED,
("Failed to open IMAQ session"), ("Failed to open IMAQ session %d",
niimaqsrc->sid));
goto error;
}
@@ -1056,22 +1062,26 @@ gst_niimaqsrc_start (GstBaseSrc * src)
/* get caps from camera and set to src pad */
niimaqsrc->caps = gst_niimaqsrc_get_cam_caps (niimaqsrc);
if (niimaqsrc->caps == NULL) {
GST_ELEMENT_ERROR (niimaqsrc, RESOURCE, FAILED, ("Failed to get caps from IMAQ"),
("Failed to get caps from IMAQ"));
GST_ELEMENT_ERROR (niimaqsrc, RESOURCE, FAILED,
("Failed to get caps from IMAQ"), ("Failed to get caps from IMAQ"));
goto error;
}
GST_LOG_OBJECT (niimaqsrc, "Creating ring with %d buffers", niimaqsrc->bufsize);
GST_LOG_OBJECT (niimaqsrc, "Creating ring with %d buffers",
niimaqsrc->bufsize);
/* create array of pointers to give to IMAQ for creating internal buffers */
niimaqsrc->buflist = g_new (guint32*, niimaqsrc->bufsize);
for (i=0; i < niimaqsrc->bufsize; i++) {
niimaqsrc->buflist = g_new (guint32 *, niimaqsrc->bufsize);
for (i = 0; i < niimaqsrc->bufsize; i++) {
niimaqsrc->buflist[i] = 0;
}
rval=imgRingSetup (niimaqsrc->sid, niimaqsrc->bufsize, (void**)(niimaqsrc->buflist), 0, FALSE);
rval =
imgRingSetup (niimaqsrc->sid, niimaqsrc->bufsize,
(void **) (niimaqsrc->buflist), 0, FALSE);
if (rval) {
gst_niimaqsrc_report_imaq_error (rval);
GST_ELEMENT_ERROR (niimaqsrc, RESOURCE, FAILED, ("Failed to create ring buffer"),
GST_ELEMENT_ERROR (niimaqsrc, RESOURCE, FAILED,
("Failed to create ring buffer"),
("Failed to create ring buffer with %d buffers", niimaqsrc->bufsize));
goto error;
}
@@ -1082,7 +1092,8 @@ gst_niimaqsrc_start (GstBaseSrc * src)
gst_niimaqsrc_frame_start_callback, niimaqsrc);
if (rval) {
gst_niimaqsrc_report_imaq_error (rval);
GST_ELEMENT_ERROR (niimaqsrc, RESOURCE, FAILED, ("Failed to register BUF_COMPLETE callback"),
GST_ELEMENT_ERROR (niimaqsrc, RESOURCE, FAILED,
("Failed to register BUF_COMPLETE callback"),
("Failed to register BUF_COMPLETE callback"));
goto error;
}
@@ -1107,7 +1118,7 @@ error:
static gboolean
gst_niimaqsrc_stop (GstBaseSrc * src)
{
GstNiImaqSrc* niimaqsrc = GST_NIIMAQSRC (src);
GstNiImaqSrc *niimaqsrc = GST_NIIMAQSRC (src);
Int32 rval;
/* stop IMAQ session */
@@ -1115,14 +1126,14 @@ gst_niimaqsrc_stop (GstBaseSrc * src)
rval = imgSessionStopAcquisition (niimaqsrc->sid);
if (rval) {
gst_niimaqsrc_report_imaq_error (rval);
GST_ELEMENT_ERROR (niimaqsrc, RESOURCE, FAILED, ("Unable to stop acquisition"),
("Unable to stop acquisition"));
GST_ELEMENT_ERROR (niimaqsrc, RESOURCE, FAILED,
("Unable to stop acquisition"), ("Unable to stop acquisition"));
}
niimaqsrc->session_started = FALSE;
GST_DEBUG_OBJECT (niimaqsrc, "Acquisition stopped");
}
gst_niimaqsrc_close_interface(niimaqsrc);
gst_niimaqsrc_close_interface (niimaqsrc);
if (niimaqsrc->caps) {
gst_caps_unref (niimaqsrc->caps);
@@ -1143,14 +1154,14 @@ gst_niimaqsrc_close_interface (GstNiImaqSrc * niimaqsrc)
Int32 rval;
/* close IMAQ session and interface */
if(niimaqsrc->sid) {
rval = imgClose(niimaqsrc->sid,TRUE);
if (niimaqsrc->sid) {
rval = imgClose (niimaqsrc->sid, TRUE);
gst_niimaqsrc_report_imaq_error (rval);
niimaqsrc->sid = 0;
GST_DEBUG_OBJECT (niimaqsrc, "IMAQ session closed");
}
if(niimaqsrc->iid) {
rval = imgClose(niimaqsrc->iid,TRUE);
if (niimaqsrc->iid) {
rval = imgClose (niimaqsrc->iid, TRUE);
gst_niimaqsrc_report_imaq_error (rval);
niimaqsrc->iid = 0;
GST_DEBUG_OBJECT (niimaqsrc, "IMAQ interface closed");
@@ -1168,7 +1179,8 @@ gst_niimaqsrc_close_interface (GstNiImaqSrc * niimaqsrc)
static gboolean
plugin_init (GstPlugin * plugin)
{
GST_DEBUG_CATEGORY_INIT (niimaqsrc_debug, "niimaqsrc", 0, "NI-IMAQ interface");
GST_DEBUG_CATEGORY_INIT (niimaqsrc_debug, "niimaqsrc", 0,
"NI-IMAQ interface");
/* we only have one element in this plugin */
return gst_element_register (plugin, "niimaqsrc", GST_RANK_NONE,

View File

@@ -88,23 +88,19 @@ enum
/* pad templates */
static GstStaticPadTemplate gst_phoenixsrc_src_template =
GST_STATIC_PAD_TEMPLATE ("src",
GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
GST_STATIC_CAPS (
GST_VIDEO_CAPS_GRAY8 ";"
GST_VIDEO_CAPS_GRAY16("BIG_ENDIAN") ";"
GST_VIDEO_CAPS_GRAY16("LITTLE_ENDIAN") ";"
GST_STATIC_CAPS (GST_VIDEO_CAPS_GRAY8 ";"
GST_VIDEO_CAPS_GRAY16 ("BIG_ENDIAN") ";"
GST_VIDEO_CAPS_GRAY16 ("LITTLE_ENDIAN") ";"
GST_VIDEO_CAPS_RGB ";"
GST_VIDEO_CAPS_xRGB ";"
GST_VIDEO_CAPS_RGB_15 ";"
GST_VIDEO_CAPS_RGB_16)
GST_VIDEO_CAPS_xRGB ";" GST_VIDEO_CAPS_RGB_15 ";" GST_VIDEO_CAPS_RGB_16)
);
/* class initialization */
GST_BOILERPLATE (GstPhoenixSrc, gst_phoenixsrc, GstPushSrc,
GST_TYPE_PUSH_SRC);
GST_BOILERPLATE (GstPhoenixSrc, gst_phoenixsrc, GstPushSrc, GST_TYPE_PUSH_SRC);
static GstVideoFormat
@@ -173,7 +169,8 @@ gst_phoenixsrc_class_init (GstPhoenixSrcClass * klass)
base_src_class->get_size = GST_DEBUG_FUNCPTR (gst_phoenixsrc_get_size);
base_src_class->is_seekable = GST_DEBUG_FUNCPTR (gst_phoenixsrc_is_seekable);
base_src_class->query = GST_DEBUG_FUNCPTR (gst_phoenixsrc_query);
base_src_class->check_get_range = GST_DEBUG_FUNCPTR (gst_phoenixsrc_check_get_range);
base_src_class->check_get_range =
GST_DEBUG_FUNCPTR (gst_phoenixsrc_check_get_range);
base_src_class->fixate = GST_DEBUG_FUNCPTR (gst_phoenixsrc_fixate);
push_src_class->create = GST_DEBUG_FUNCPTR (gst_phoenixsrc_create);
@@ -187,15 +184,16 @@ gst_phoenixsrc_class_init (GstPhoenixSrcClass * klass)
g_param_spec_uint ("num-capture-buffers", "Number of capture buffers",
"Number of capture buffers", 1, G_MAXUINT,
DEFAULT_PROP_NUM_CAPTURE_BUFFERS,
(GParamFlags) (G_PARAM_READWRITE |G_PARAM_STATIC_STRINGS)));
(GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)));
}
static void
gst_phoenixsrc_init (GstPhoenixSrc * phoenixsrc, GstPhoenixSrcClass * phoenixsrc_class)
gst_phoenixsrc_init (GstPhoenixSrc * phoenixsrc,
GstPhoenixSrcClass * phoenixsrc_class)
{
phoenixsrc->srcpad = gst_pad_new_from_static_template (&gst_phoenixsrc_src_template
, "src");
phoenixsrc->srcpad =
gst_pad_new_from_static_template (&gst_phoenixsrc_src_template, "src");
/* set source as live (no preroll) */
gst_base_src_set_live (GST_BASE_SRC (phoenixsrc), TRUE);
@@ -208,8 +206,10 @@ gst_phoenixsrc_init (GstPhoenixSrc * phoenixsrc, GstPhoenixSrcClass * phoenixsrc
phoenixsrc->num_capture_buffers = DEFAULT_PROP_NUM_CAPTURE_BUFFERS;
phoenixsrc->first_phoenix_ts = GST_CLOCK_TIME_NONE;
phoenixsrc->frame_start_times = g_new (guint64, phoenixsrc->num_capture_buffers);
phoenixsrc->frame_end_times = g_new (guint64, phoenixsrc->num_capture_buffers);
phoenixsrc->frame_start_times =
g_new (guint64, phoenixsrc->num_capture_buffers);
phoenixsrc->frame_end_times =
g_new (guint64, phoenixsrc->num_capture_buffers);
phoenixsrc->buffer_ready = FALSE;
phoenixsrc->timeout_occurred = FALSE;
phoenixsrc->fifo_overflow_occurred = FALSE;
@@ -218,7 +218,7 @@ gst_phoenixsrc_init (GstPhoenixSrc * phoenixsrc, GstPhoenixSrcClass * phoenixsrc
phoenixsrc->buffer_processed_count = 0;
phoenixsrc->frame_end_count = 0;
phoenixsrc->frame_start_count = 0;
/*phoenixsrc->frame_count = 0;*/
/*phoenixsrc->frame_count = 0; */
phoenixsrc->mutex = g_mutex_new ();
phoenixsrc->cond = g_cond_new ();
@@ -243,15 +243,16 @@ gst_phoenixsrc_set_property (GObject * object, guint property_id,
GST_ELEMENT_WARNING (phoenixsrc, RESOURCE, SETTINGS,
("Number of capture buffers cannot be changed after acquisition has started."),
(NULL));
}
else {
} else {
phoenixsrc->num_capture_buffers = g_value_get_uint (value);
g_free (phoenixsrc->frame_start_times);
phoenixsrc->frame_start_times = g_new (guint64, phoenixsrc->num_capture_buffers);
phoenixsrc->frame_start_times =
g_new (guint64, phoenixsrc->num_capture_buffers);
g_free (phoenixsrc->frame_end_times);
phoenixsrc->frame_end_times = g_new (guint64, phoenixsrc->num_capture_buffers);
phoenixsrc->frame_end_times =
g_new (guint64, phoenixsrc->num_capture_buffers);
}
break;
default:
@@ -325,7 +326,9 @@ gst_phoenixsrc_get_caps (GstBaseSrc * src)
/* return template caps if we don't know the actual camera caps */
if (!phoenixsrc->caps) {
return gst_caps_copy (gst_pad_get_pad_template_caps (GST_BASE_SRC_PAD (phoenixsrc)));
return
gst_caps_copy (gst_pad_get_pad_template_caps (GST_BASE_SRC_PAD
(phoenixsrc)));
}
return gst_caps_copy (phoenixsrc->caps);
@@ -356,9 +359,7 @@ gst_phoenixsrc_set_caps (GstBaseSrc * src, GstCaps * caps)
structure = gst_caps_get_structure (caps, 0);
ret = gst_structure_get (structure,
"width", G_TYPE_INT, &width,
"height", G_TYPE_INT, &height,
NULL);
"width", G_TYPE_INT, &width, "height", G_TYPE_INT, &height, NULL);
if (!ret) {
GST_DEBUG ("Failed to retrieve width and height");
@@ -379,7 +380,7 @@ gst_phoenixsrc_newsegment (GstBaseSrc * src)
}
static inline GstClockTime
gst_phoenix_get_timestamp (GstPhoenixSrc *phoenixsrc)
gst_phoenix_get_timestamp (GstPhoenixSrc * phoenixsrc)
{
ui32 dwParam;
guint64 timestamp;
@@ -387,7 +388,7 @@ gst_phoenix_get_timestamp (GstPhoenixSrc *phoenixsrc)
/* get time in microseconds from start of acquisition */
/* TODO: check for rollover */
PHX_ParameterGet (phoenixsrc->hCamera, PHX_EVENTCOUNT, &dwParam);
timestamp = (guint64)1000 * dwParam;
timestamp = (guint64) 1000 *dwParam;
if (phoenixsrc->first_phoenix_ts == GST_CLOCK_TIME_NONE) {
phoenixsrc->first_phoenix_ts = timestamp;
@@ -397,7 +398,7 @@ gst_phoenix_get_timestamp (GstPhoenixSrc *phoenixsrc)
/* Callback function to handle image capture events. */
void
phx_callback (tHandle hCamera, ui32 dwMask, void* pvParams )
phx_callback (tHandle hCamera, ui32 dwMask, void *pvParams)
{
GstPhoenixSrc *phoenixsrc = GST_PHOENIX_SRC (pvParams);
GstClockTime ct = gst_phoenix_get_timestamp (phoenixsrc);
@@ -466,9 +467,9 @@ gst_phoenixsrc_start (GstBaseSrc * src)
GST_DEBUG_OBJECT (phoenixsrc, "start");
if (phoenixsrc->config_filepath == NULL) {
GST_WARNING_OBJECT (phoenixsrc, "No config file set, using default 640x480x8bpp");
}
else if (!g_file_test (phoenixsrc->config_filepath, G_FILE_TEST_EXISTS)) {
GST_WARNING_OBJECT (phoenixsrc,
"No config file set, using default 640x480x8bpp");
} else if (!g_file_test (phoenixsrc->config_filepath, G_FILE_TEST_EXISTS)) {
GST_ELEMENT_ERROR (phoenixsrc, RESOURCE, NOT_FOUND,
("Camera config file does not exist: %s", phoenixsrc->config_filepath),
(NULL));
@@ -477,7 +478,8 @@ gst_phoenixsrc_start (GstBaseSrc * src)
/* Initialize board */
/* TODO: this picks first digital board using default settings, parameterize this! */
eStat = PHX_CameraConfigLoad (&phoenixsrc->hCamera, phoenixsrc->config_filepath,
eStat =
PHX_CameraConfigLoad (&phoenixsrc->hCamera, phoenixsrc->config_filepath,
PHX_BOARD_AUTO | PHX_DIGITAL, PHX_ErrHandlerDefault);
if (eStat != PHX_OK) {
GST_ELEMENT_ERROR (phoenixsrc, LIBRARY, INIT, (NULL), (NULL));
@@ -486,48 +488,70 @@ gst_phoenixsrc_start (GstBaseSrc * src)
/* capture frames continuously */
eParamValue = PHX_ENABLE;
eStat = PHX_ParameterSet (phoenixsrc->hCamera, PHX_ACQ_CONTINUOUS, &eParamValue);
if (PHX_OK != eStat) goto ResourceSettingsError;
eStat =
PHX_ParameterSet (phoenixsrc->hCamera, PHX_ACQ_CONTINUOUS, &eParamValue);
if (PHX_OK != eStat)
goto ResourceSettingsError;
/* capture in blocking fashion, i.e. don't overwrite un-processed buffers */
eParamValue = PHX_DISABLE;
eStat = PHX_ParameterSet (phoenixsrc->hCamera, PHX_ACQ_BLOCKING, &eParamValue);
if (PHX_OK != eStat) goto ResourceSettingsError;
eStat =
PHX_ParameterSet (phoenixsrc->hCamera, PHX_ACQ_BLOCKING, &eParamValue);
if (PHX_OK != eStat)
goto ResourceSettingsError;
/* use event counter to count time from start of acquisition */
eParamValue = PHX_EVENTCOUNT_TIME;
eStat = PHX_ParameterSet (phoenixsrc->hCamera, PHX_EVENTCOUNT_SRC, &eParamValue);
if (PHX_OK != eStat) goto ResourceSettingsError;
eStat =
PHX_ParameterSet (phoenixsrc->hCamera, PHX_EVENTCOUNT_SRC, &eParamValue);
if (PHX_OK != eStat)
goto ResourceSettingsError;
eParamValue = PHX_EVENTGATE_ACQ;
eStat = PHX_ParameterSet (phoenixsrc->hCamera, PHX_EVENTGATE_SRC, &eParamValue);
if (PHX_OK != eStat) goto ResourceSettingsError;
eStat =
PHX_ParameterSet (phoenixsrc->hCamera, PHX_EVENTGATE_SRC, &eParamValue);
if (PHX_OK != eStat)
goto ResourceSettingsError;
/* Get format (mono, Bayer, RBG, etc.) */
eStat = PHX_ParameterGet (phoenixsrc->hCamera, PHX_DST_FORMAT, &dwParamValue);
if (PHX_OK != eStat) goto ResourceSettingsError;
if (PHX_OK != eStat)
goto ResourceSettingsError;
phx_format = dwParamValue;
/* Get endianness */
eStat = PHX_ParameterGet (phoenixsrc->hCamera, PHX_DST_ENDIAN, &dwParamValue);
if (PHX_OK != eStat) goto ResourceSettingsError;
if (PHX_OK != eStat)
goto ResourceSettingsError;
phx_endian = dwParamValue;
/* get width */
eStat = PHX_ParameterGet (phoenixsrc->hCamera, PHX_ROI_XLENGTH_SCALED, &dwParamValue);
if (PHX_OK != eStat) goto ResourceSettingsError;
eStat =
PHX_ParameterGet (phoenixsrc->hCamera, PHX_ROI_XLENGTH_SCALED,
&dwParamValue);
if (PHX_OK != eStat)
goto ResourceSettingsError;
width = dwParamValue;
/* get height */
eStat = PHX_ParameterGet (phoenixsrc->hCamera, PHX_ROI_YLENGTH_SCALED, &dwParamValue);
if (PHX_OK != eStat) goto ResourceSettingsError;
eStat =
PHX_ParameterGet (phoenixsrc->hCamera, PHX_ROI_YLENGTH_SCALED,
&dwParamValue);
if (PHX_OK != eStat)
goto ResourceSettingsError;
height = dwParamValue;
/* get buffer size; width (in bytes) and height (in lines) */
eStat = PHX_ParameterGet (phoenixsrc->hCamera, PHX_BUF_DST_XLENGTH, &dwBufferWidth);
if (PHX_OK != eStat) goto ResourceSettingsError;
eStat = PHX_ParameterGet (phoenixsrc->hCamera, PHX_BUF_DST_YLENGTH, &dwBufferHeight);
if (PHX_OK != eStat) goto ResourceSettingsError;
eStat =
PHX_ParameterGet (phoenixsrc->hCamera, PHX_BUF_DST_XLENGTH,
&dwBufferWidth);
if (PHX_OK != eStat)
goto ResourceSettingsError;
eStat =
PHX_ParameterGet (phoenixsrc->hCamera, PHX_BUF_DST_YLENGTH,
&dwBufferHeight);
if (PHX_OK != eStat)
goto ResourceSettingsError;
phoenixsrc->buffer_size = dwBufferHeight * dwBufferWidth;
/* Tell Phoenix to use N buffers. */
@@ -536,16 +560,25 @@ gst_phoenixsrc_start (GstBaseSrc * src)
/* Setup a one second timeout value (milliseconds) */
dwParamValue = 1000;
eStat = PHX_ParameterSet (phoenixsrc->hCamera, PHX_TIMEOUT_DMA, (void *) &dwParamValue);
if (PHX_OK != eStat) goto ResourceSettingsError;
eStat =
PHX_ParameterSet (phoenixsrc->hCamera, PHX_TIMEOUT_DMA,
(void *) &dwParamValue);
if (PHX_OK != eStat)
goto ResourceSettingsError;
/* The BUFFER_READY interrupt is already enabled by default,
* but we must enable other interrupts here. */
eParamValue = PHX_INTRPT_TIMEOUT | PHX_INTRPT_FIFO_OVERFLOW | PHX_INTRPT_FRAME_END | PHX_INTRPT_FRAME_START;
eStat = PHX_ParameterSet (phoenixsrc->hCamera, PHX_INTRPT_SET, (void *) &eParamValue);
if (PHX_OK != eStat) goto ResourceSettingsError;
eParamValue =
PHX_INTRPT_TIMEOUT | PHX_INTRPT_FIFO_OVERFLOW | PHX_INTRPT_FRAME_END |
PHX_INTRPT_FRAME_START;
eStat =
PHX_ParameterSet (phoenixsrc->hCamera, PHX_INTRPT_SET,
(void *) &eParamValue);
if (PHX_OK != eStat)
goto ResourceSettingsError;
videoFormat = gst_phoenixsrc_color_format_to_video_format (phx_format, phx_endian);
videoFormat =
gst_phoenixsrc_color_format_to_video_format (phx_format, phx_endian);
if (videoFormat == GST_VIDEO_FORMAT_UNKNOWN) {
GST_ELEMENT_ERROR (phoenixsrc, STREAM, WRONG_TYPE,
(_("Unknown or unsupported color format.")), (NULL));
@@ -554,7 +587,8 @@ gst_phoenixsrc_start (GstBaseSrc * src)
if (phoenixsrc->caps)
gst_caps_unref (phoenixsrc->caps);
phoenixsrc->caps = gst_video_format_new_caps (videoFormat, width, height, 30, 1, 1, 1);
phoenixsrc->caps =
gst_video_format_new_caps (videoFormat, width, height, 30, 1, 1, 1);
if (phoenixsrc->caps == NULL) {
GST_ELEMENT_ERROR (phoenixsrc, STREAM, TOO_LAZY,
@@ -570,7 +604,8 @@ ResourceSettingsError:
Error:
/* Now cease all captures */
if (phoenixsrc->hCamera) PHX_Acquire(phoenixsrc->hCamera, PHX_ABORT, NULL );
if (phoenixsrc->hCamera)
PHX_Acquire (phoenixsrc->hCamera, PHX_ABORT, NULL);
/* TODO Free all the user allocated memory */
//psImageBuff = pasImageBuffs;
@@ -583,7 +618,8 @@ Error:
//}
/* Release the Phoenix board */
if (phoenixsrc->hCamera) PHX_CameraRelease(&phoenixsrc->hCamera);
if (phoenixsrc->hCamera)
PHX_CameraRelease (&phoenixsrc->hCamera);
return FALSE;
}
@@ -608,7 +644,7 @@ gst_phoenixsrc_stop (GstBaseSrc * src)
phoenixsrc->caps = NULL;
phoenixsrc->dropped_frame_count = 0;
/*phoenixsrc->last_time_code = -1;*/
/*phoenixsrc->last_time_code = -1; */
return TRUE;
}
@@ -692,15 +728,16 @@ gst_phoenixsrc_create (GstPushSrc * src, GstBuffer ** buf)
/* Start acquisition */
if (!phoenixsrc->acq_started) {
/* make class instance pointer available to the callback, and flush cache*/
PHX_ParameterSet (phoenixsrc->hCamera, PHX_EVENT_CONTEXT | PHX_CACHE_FLUSH, (void *) phoenixsrc);
/* make class instance pointer available to the callback, and flush cache */
PHX_ParameterSet (phoenixsrc->hCamera, PHX_EVENT_CONTEXT | PHX_CACHE_FLUSH,
(void *) phoenixsrc);
/* Now start our capture */
eStat = PHX_Acquire (phoenixsrc->hCamera, PHX_START, (void*)phx_callback);
eStat = PHX_Acquire (phoenixsrc->hCamera, PHX_START, (void *) phx_callback);
if (PHX_OK != eStat) {
GST_ELEMENT_ERROR (phoenixsrc, RESOURCE, FAILED,
(_("Failed to start acquisition.")), (NULL));
return GST_FLOW_ERROR; /* TODO: make sure _stop is called if this happens to release resources*/
return GST_FLOW_ERROR; /* TODO: make sure _stop is called if this happens to release resources */
}
phoenixsrc->acq_started = TRUE;
}
@@ -729,17 +766,21 @@ gst_phoenixsrc_create (GstPushSrc * src, GstBuffer ** buf)
if (!phoenixsrc->buffer_ready) {
GST_ELEMENT_ERROR (src, RESOURCE, FAILED,
(_("You should not see this error, something very bad happened.")), (NULL));
(_("You should not see this error, something very bad happened.")),
(NULL));
g_mutex_unlock (phoenixsrc->mutex);
return GST_FLOW_ERROR;
}
GST_INFO_OBJECT (phoenixsrc, "Processing new buffer %d (Frame start: %d), ready-processed = %d",
phoenixsrc->buffer_ready_count, phoenixsrc->frame_start_count, phoenixsrc->buffer_ready_count-phoenixsrc->buffer_processed_count);
GST_INFO_OBJECT (phoenixsrc,
"Processing new buffer %d (Frame start: %d), ready-processed = %d",
phoenixsrc->buffer_ready_count, phoenixsrc->frame_start_count,
phoenixsrc->buffer_ready_count - phoenixsrc->buffer_processed_count);
phoenixsrc->buffer_ready = FALSE;
/* frame_start is always >= buffer_ready */
dropped_frame_count = phoenixsrc->frame_start_count - phoenixsrc->buffer_ready_count;
dropped_frame_count =
phoenixsrc->frame_start_count - phoenixsrc->buffer_ready_count;
g_mutex_unlock (phoenixsrc->mutex);
@@ -750,8 +791,9 @@ gst_phoenixsrc_create (GstPushSrc * src, GstBuffer ** buf)
return GST_FLOW_ERROR;
}
/* Copy image to buffer from surface TODO: use orc_memcpy*/
memcpy (GST_BUFFER_DATA (*buf), phx_buffer.pvAddress, phoenixsrc->buffer_size);
/* Copy image to buffer from surface TODO: use orc_memcpy */
memcpy (GST_BUFFER_DATA (*buf), phx_buffer.pvAddress,
phoenixsrc->buffer_size);
/* Having processed the data, release the buffer ready for further image data */
eStat = PHX_Acquire (phoenixsrc->hCamera, PHX_BUFFER_RELEASE, NULL);
@@ -768,7 +810,8 @@ gst_phoenixsrc_create (GstPushSrc * src, GstBuffer ** buf)
GST_BUFFER_SIZE (*buf) = phoenixsrc->buffer_size;
/* use time from capture board */
n = (phoenixsrc->buffer_processed_count - 1) % phoenixsrc->num_capture_buffers;
n = (phoenixsrc->buffer_processed_count -
1) % phoenixsrc->num_capture_buffers;
GST_BUFFER_TIMESTAMP (*buf) = phoenixsrc->frame_start_times[n];
GST_BUFFER_DURATION (*buf) = GST_CLOCK_DIFF (phoenixsrc->frame_start_times[n],
phoenixsrc->frame_end_times[n]);
@@ -782,7 +825,7 @@ gst_phoenixsrc_create (GstPushSrc * src, GstBuffer ** buf)
static gboolean
plugin_init (GstPlugin * plugin)
{
GST_DEBUG_CATEGORY_INIT (gst_phoenixsrc_debug, "phoenixsrc", 0, \
GST_DEBUG_CATEGORY_INIT (gst_phoenixsrc_debug, "phoenixsrc", 0,
"debug category for phoenixsrc element");
gst_element_register (plugin, "phoenixsrc", GST_RANK_NONE,
gst_phoenixsrc_get_type ());