/** * Type definition for the custom bounding box parsing function. * * @param[in] outputLayersInfo A vector containing information on the output * layers of the model. * @param[in] networkInfo Network information. * @param[in] detectionParams Detection parameters required for parsing * objects. * @param[out] objectList A reference to a vector in which the function * is to add parsed objects. */ typedefbool(* NvDsInferParseCustomFunc)( std::vector<NvDsInferLayerInfo> const &outputLayersInfo, NvDsInferNetworkInfo const &networkInfo, NvDsInferParseDetectionParams const &detectionParams, std::vector<NvDsInferObjectDetectionInfo> &objectList);
if (obj_meta->class_id == PGIE_CLASS_ID_VEHICLE) vehicle_count++; if (obj_meta->class_id == PGIE_CLASS_ID_PERSON) person_count++;
/* Now set the offsets where the string should appear */ txt_params->x_offset = obj_meta->rect_params.left; txt_params->y_offset = obj_meta->rect_params.top - 25;
/* Text background color */ txt_params->set_bg_clr = 1; txt_params->text_bg_clr.red = 0.0; txt_params->text_bg_clr.green = 0.0; txt_params->text_bg_clr.blue = 0.0; txt_params->text_bg_clr.alpha = 1.0;
/* * Ideally NVDS_EVENT_MSG_META should be attached to buffer by the * component implementing detection / recognition logic. * Here it demonstrates how to use / attach that meta data. */ if (is_first_object && !(frame_number % frame_interval)) { /* Frequency of messages to be send will be based on use case. * Here message is being sent for first object every frame_interval(default=30). */
/* pgie_src_pad_buffer_probe will extract metadata received on pgie src pad * and update params for drawing rectangle, object information etc. We also * iterate through the object list and encode the cropped objects as jpeg * images and attach it as user meta to the respective objects.*/ static GstPadProbeReturn pgie_src_pad_buffer_probe(GstPad * pad, GstPadProbeInfo * info, gpointer ctx) { GstBuffer *buf = (GstBuffer *) info->data; GstMapInfo inmap = GST_MAP_INFO_INIT; if (!gst_buffer_map (buf, &inmap, GST_MAP_READ)) { GST_ERROR ("input buffer mapinfo failed"); return GST_PAD_PROBE_DROP; } NvBufSurface *ip_surf = (NvBufSurface *) inmap.data; gst_buffer_unmap (buf, &inmap);
NvDsObjectMeta *obj_meta = NULL; guint vehicle_count = 0; guint person_count = 0; NvDsMetaList *l_frame = NULL; NvDsMetaList *l_obj = NULL; NvDsBatchMeta *batch_meta = gst_buffer_get_nvds_batch_meta (buf); for (l_frame = batch_meta->frame_meta_list; l_frame != NULL; l_frame = l_frame->next) { NvDsFrameMeta *frame_meta = (NvDsFrameMeta *) (l_frame->data); /* For demonstration purposes, we will encode the first 10 frames. */ if(frame_count <= 10) { NvDsObjEncUsrArgs frameData = { 0 }; /* Preset */ frameData.isFrame = 1; /* To be set by user */ frameData.saveImg = save_img; frameData.attachUsrMeta = attach_user_meta; /* Set if Image scaling Required */ frameData.scaleImg = FALSE; frameData.scaledWidth = 0; frameData.scaledHeight = 0; /* Quality */ frameData.quality = 80; /* Main Function Call */ nvds_obj_enc_process (ctx, &frameData, ip_surf, NULL, frame_meta); } guint num_rects = 0; for (l_obj = frame_meta->obj_meta_list; l_obj != NULL; l_obj = l_obj->next) { obj_meta = (NvDsObjectMeta *) (l_obj->data); if (obj_meta->class_id == PGIE_CLASS_ID_VEHICLE) { vehicle_count++; num_rects++; } if (obj_meta->class_id == PGIE_CLASS_ID_PERSON) { person_count++; num_rects++; } /* Conditions that user needs to set to encode the detected objects of * interest. Here, by default all the detected objects are encoded. * For demonstration, we will encode the first object in the frame. */ if ((obj_meta->class_id == PGIE_CLASS_ID_PERSON || obj_meta->class_id == PGIE_CLASS_ID_VEHICLE) && num_rects == 1) { NvDsObjEncUsrArgs objData = { 0 }; /* To be set by user */ objData.saveImg = save_img; objData.attachUsrMeta = attach_user_meta; /* Set if Image scaling Required */ objData.scaleImg = FALSE; objData.scaledWidth = 0; objData.scaledHeight = 0; /* Preset */ objData.objNum = num_rects; /* Quality */ objData.quality = 80; /*Main Function Call */ nvds_obj_enc_process (ctx, &objData, ip_surf, obj_meta, frame_meta); } } } nvds_obj_enc_finish (ctx); frame_count++; return GST_PAD_PROBE_OK; }
/* osd_sink_pad_buffer_probe will extract metadata received on OSD sink pad * and update params for drawing rectangle, object information. We also iterate * through the user meta of type "NVDS_CROP_IMAGE_META" to find image crop meta * and demonstrate how to access it.*/ static GstPadProbeReturn osd_sink_pad_buffer_probe(GstPad * pad, GstPadProbeInfo * info, gpointer u_data) { GstBuffer *buf = (GstBuffer *) info->data;
guint num_rects = 0; NvDsObjectMeta *obj_meta = NULL; guint vehicle_count = 0; guint person_count = 0; NvDsMetaList *l_frame = NULL; NvDsMetaList *l_obj = NULL; NvDsDisplayMeta *display_meta = NULL; NvDsBatchMeta *batch_meta = gst_buffer_get_nvds_batch_meta (buf); g_print ("Running osd_sink_pad_buffer_probe...\n"); for (l_frame = batch_meta->frame_meta_list; l_frame != NULL; l_frame = l_frame->next) { NvDsFrameMeta *frame_meta = (NvDsFrameMeta *) (l_frame->data); int offset = 0; /* To verify encoded metadata of cropped frames, we iterate through the * user metadata of each frame and if a metadata of the type * 'NVDS_CROP_IMAGE_META' is found then we write that to a file as * implemented below. */ char fileFrameNameString[FILE_NAME_SIZE]; constchar *osd_string = "OSD";
/* For Demonstration Purposes we are writing metadata to jpeg images of * the first 10 frames only. * The files generated have an 'OSD' prefix. */ if (frame_number < 11) { NvDsUserMetaList *usrMetaList = frame_meta->frame_user_meta_list; FILE *file; int stream_num = 0; while (usrMetaList != NULL) { NvDsUserMeta *usrMetaData = (NvDsUserMeta *) usrMetaList->data; if (usrMetaData->base_meta.meta_type == NVDS_CROP_IMAGE_META) { snprintf (fileFrameNameString, FILE_NAME_SIZE, "%s_frame_%d_%d.jpg", osd_string, frame_number, stream_num++); NvDsObjEncOutParams *enc_jpeg_image = (NvDsObjEncOutParams *) usrMetaData->user_meta_data; /* Write to File */ file = fopen (fileFrameNameString, "wb"); fwrite (enc_jpeg_image->outBuffer, sizeof (uint8_t), enc_jpeg_image->outLen, file); fclose (file); } usrMetaList = usrMetaList->next; } } for (l_obj = frame_meta->obj_meta_list; l_obj != NULL; l_obj = l_obj->next) { obj_meta = (NvDsObjectMeta *) (l_obj->data); if (obj_meta->class_id == PGIE_CLASS_ID_VEHICLE) { vehicle_count++; num_rects++; } if (obj_meta->class_id == PGIE_CLASS_ID_PERSON) { person_count++; num_rects++; } /* To verify encoded metadata of cropped objects, we iterate through the * user metadata of each object and if a metadata of the type * 'NVDS_CROP_IMAGE_META' is found then we write that to a file as * implemented below. */ char fileObjNameString[FILE_NAME_SIZE];
/* For Demonstration Purposes we are writing metadata to jpeg images of * vehicles or persons for the first 100 frames only. * The files generated have a 'OSD' prefix. */ if (frame_number < 100 && (obj_meta->class_id == PGIE_CLASS_ID_PERSON || obj_meta->class_id == PGIE_CLASS_ID_VEHICLE)) { NvDsUserMetaList *usrMetaList = obj_meta->obj_user_meta_list; FILE *file; while (usrMetaList != NULL) { NvDsUserMeta *usrMetaData = (NvDsUserMeta *) usrMetaList->data; if (usrMetaData->base_meta.meta_type == NVDS_CROP_IMAGE_META) { NvDsObjEncOutParams *enc_jpeg_image = (NvDsObjEncOutParams *) usrMetaData->user_meta_data;
user_meta_data : pointer to User specific meta data
meta_type : Metadata type that user sets to identify its metadata
copy_func : Metadata copy or transform function to be provided when there is buffer transformation
release_func : Metadata release function to be provided when it is no longer required.
這個範例添加一個亂數到metadata上面,以下是要達成這個目標要準備的函式
user_meta_data
1 2 3 4 5 6 7 8 9 10 11 12
void *set_metadata_ptr() { int i = 0; gchar *user_metadata = (gchar*)g_malloc0(USER_ARRAY_SIZE);
g_print("\n**************** Setting user metadata array of 16 on nvinfer src pad\n"); for(i = 0; i < USER_ARRAY_SIZE; i++) { user_metadata[i] = rand() % 255; g_print("user_meta_data [%d] = %d\n", i, user_metadata[i]); } return (void *)user_metadata; }
meta_type
記得要在在probe function裡面定義變數
1 2 3
/** set the user metadata type */ #define NVDS_USER_FRAME_META_EXAMPLE (nvds_get_user_meta_type("NVIDIA.NVINFER.USER_META")) NvDsMetaType user_meta_type = NVDS_USER_FRAME_META_EXAMPLE;
copy_func
1 2 3 4 5 6 7 8 9
/* copy function set by user. "data" holds a pointer to NvDsUserMeta*/ static gpointer copy_user_meta(gpointer data, gpointer user_data) { NvDsUserMeta *user_meta = (NvDsUserMeta *)data; gchar *src_user_metadata = (gchar*)user_meta->user_meta_data; gchar *dst_user_metadata = (gchar*)g_malloc0(USER_ARRAY_SIZE); memcpy(dst_user_metadata, src_user_metadata, USER_ARRAY_SIZE); return (gpointer)dst_user_metadata; }
release_func
1 2 3 4 5 6 7 8 9 10
/* release function set by user. "data" holds a pointer to NvDsUserMeta*/ staticvoidrelease_user_meta(gpointer data, gpointer user_data) { NvDsUserMeta *user_meta = (NvDsUserMeta *) data; if(user_meta->user_meta_data) { g_free(user_meta->user_meta_data); user_meta->user_meta_data = NULL; } }
/* Set nvds user metadata at frame level. User need to set 4 parameters after * acquring user meta from pool using nvds_acquire_user_meta_from_pool(). * * Below parameters are required to be set. * 1. user_meta_data : pointer to User specific meta data * 2. meta_type: Metadata type that user sets to identify its metadata * 3. copy_func: Metadata copy or transform function to be provided when there * is buffer transformation * 4. release_func: Metadata release function to be provided when it is no * longer required. * * osd_sink_pad_buffer_probe will extract metadata received on OSD sink pad * and update params for drawing rectangle, object information etc. */
#Install rabbitmq on your ubuntu system: https://www.rabbitmq.com/install-debian.html #The “Using rabbitmq.com APT Repository” procedure is known to work well
sudo apt-get install rabbitmq-server
#Ensure rabbitmq service has started by running (should be the case): sudo service rabbitmq-server status
# Rabbitmq management: It comes with a command line tool which you can use to create/configure all of your queues/exchanges/etc https://www.rabbitmq.com/management.html
# Use the default exchange amq.topic OR create an exchange as below, the same name as the one you specify within the cfg_amqp.txt #sudo rabbitmqadmin -u guest -p guest -V / declare exchange name=myexchange type=topic
/* Start playing */ gst_element_set_state (pipeline, GST_STATE_PLAYING);
/* Wait until error or EOS */ bus = gst_element_get_bus (pipeline); msg = gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE, GST_MESSAGE_ERROR | GST_MESSAGE_EOS);
/* See next tutorial for proper error message handling/parsing */ if (GST_MESSAGE_TYPE (msg) == GST_MESSAGE_ERROR) { g_error ("An error occurred! Re-run with the GST_DEBUG=*:WARN environment " "variable set for more details."); }
/* Wait until error or EOS */ bus = gst_element_get_bus (pipeline); msg = gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE, GST_MESSAGE_ERROR | GST_MESSAGE_EOS);
/* Start playing */ ret = gst_element_set_state (pipeline, GST_STATE_PLAYING); if (ret == GST_STATE_CHANGE_FAILURE) { g_printerr ("Unable to set the pipeline to the playing state.\n"); gst_object_unref (pipeline); return-1; }
/* Wait until error or EOS */ bus = gst_element_get_bus (pipeline); msg = gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE, GST_MESSAGE_ERROR | GST_MESSAGE_EOS);
switch (GST_MESSAGE_TYPE (msg)) { case GST_MESSAGE_ERROR: gst_message_parse_error (msg, &err, &debug_info); g_printerr ("Error received from element %s: %s\n", GST_OBJECT_NAME (msg->src), err->message); g_printerr ("Debugging information: %s\n", debug_info ? debug_info : "none"); g_clear_error (&err); g_free (debug_info); break; case GST_MESSAGE_EOS: g_print ("End-Of-Stream reached.\n"); break; default: /* We should not reach here because we only asked for ERRORs and EOS */ g_printerr ("Unexpected message received.\n"); break; } gst_message_unref (msg); }
/* Start playing */ ret = gst_element_set_state (pipeline, GST_STATE_PLAYING); if (ret == GST_STATE_CHANGE_FAILURE) { g_printerr ("Unable to set the pipeline to the playing state.\n"); gst_object_unref (pipeline); return-1; }
switch (GST_MESSAGE_TYPE (msg)) { case GST_MESSAGE_ERROR: gst_message_parse_error (msg, &err, &debug_info); g_printerr ("Error received from element %s: %s\n", GST_OBJECT_NAME (msg->src), err->message); g_printerr ("Debugging information: %s\n", debug_info ? debug_info : "none"); g_clear_error (&err); g_free (debug_info); break; case GST_MESSAGE_EOS: g_print ("End-Of-Stream reached.\n"); break; default: /* We should not reach here because we only asked for ERRORs and EOS */ g_printerr ("Unexpected message received.\n"); break; } gst_message_unref (msg); }
/* Structure to contain all our information, so we can pass it to callbacks */ typedefstruct _CustomData { GstElement *pipeline; GstElement *source; GstElement *convert; GstElement *resample; GstElement *sink; } CustomData;
/* Handler for the pad-added signal */ staticvoidpad_added_handler(GstElement *src, GstPad *pad, CustomData *data);
/* Create the empty pipeline */ data.pipeline = gst_pipeline_new ("test-pipeline");
if (!data.pipeline || !data.source || !data.convert || !data.resample || !data.sink) { g_printerr ("Not all elements could be created.\n"); return-1; }
/* Build the pipeline. Note that we are NOT linking the source at this * point. We will do it later. */ gst_bin_add_many (GST_BIN (data.pipeline), data.source, data.convert, data.resample, data.sink, NULL); if (!gst_element_link_many (data.convert, data.resample, data.sink, NULL)) { g_printerr ("Elements could not be linked.\n"); gst_object_unref (data.pipeline); return-1; }
/* Set the URI to play */ g_object_set (data.source, "uri", "https://www.freedesktop.org/software/gstreamer-sdk/data/media/sintel_trailer-480p.webm", NULL);
/* Connect to the pad-added signal */ g_signal_connect (data.source, "pad-added", G_CALLBACK (pad_added_handler), &data);
/* Start playing */ ret = gst_element_set_state (data.pipeline, GST_STATE_PLAYING); if (ret == GST_STATE_CHANGE_FAILURE) { g_printerr ("Unable to set the pipeline to the playing state.\n"); gst_object_unref (data.pipeline); return-1; }
/* Listen to the bus */ bus = gst_element_get_bus (data.pipeline); do { msg = gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE, GST_MESSAGE_STATE_CHANGED | GST_MESSAGE_ERROR | GST_MESSAGE_EOS);
/* This function will be called by the pad-added signal */ staticvoidpad_added_handler(GstElement *src, GstPad *new_pad, CustomData *data) { GstPad *sink_pad = gst_element_get_static_pad (data->convert, "sink"); GstPadLinkReturn ret; GstCaps *new_pad_caps = NULL; GstStructure *new_pad_struct = NULL; const gchar *new_pad_type = NULL;
g_print ("Received new pad '%s' from '%s':\n", GST_PAD_NAME (new_pad), GST_ELEMENT_NAME (src));
/* If our converter is already linked, we have nothing to do here */ if (gst_pad_is_linked (sink_pad)) { g_print ("We are already linked. Ignoring.\n"); gotoexit; }
/* Check the new pad's type */ new_pad_caps = gst_pad_get_current_caps (new_pad); new_pad_struct = gst_caps_get_structure (new_pad_caps, 0); new_pad_type = gst_structure_get_name (new_pad_struct); if (!g_str_has_prefix (new_pad_type, "audio/x-raw")) { g_print ("It has type '%s' which is not raw audio. Ignoring.\n", new_pad_type); gotoexit; }
/* Attempt the link */ ret = gst_pad_link (new_pad, sink_pad); if (GST_PAD_LINK_FAILED (ret)) { g_print ("Type is '%s' but link failed.\n", new_pad_type); } else { g_print ("Link succeeded (type '%s').\n", new_pad_type); }
exit: /* Unreference the new pad's caps, if we got them */ if (new_pad_caps != NULL) gst_caps_unref (new_pad_caps);
/* Unreference the sink pad */ gst_object_unref (sink_pad); }
{:file=’basic-tutorial-3.c’}
解說
首先我們先將資料組成一個struct以便後面使用
1 2 3 4 5 6 7 8
/* Structure to contain all our information, so we can pass it to callbacks */ typedef struct _CustomData { GstElement *pipeline; GstElement *source; GstElement *convert; GstElement *resample; GstElement *sink; } CustomData;
接下來這行是forward reference晚一點會實做這個函式。
1 2
/* Handler for the pad-added signal */ static void pad_added_handler (GstElement *src, GstPad *pad, CustomData *data);
接下來建立element,在這裡uridecodebin會自動初始化需要的element(sources, demuxers and decoders)以便將URI轉換成影音串流。跟playbin比起來他只完成了一半,因為它包含了demuxers,所以只有到執行階段的時候source pad才會被初始化。
接下來我們將converter, resample and sink這些element連接起來。注意這時候還不可以連接source,因為這時候souce還沒有source pad。
1 2 3 4 5
if (!gst_element_link_many (data.convert, data.resample, data.sink, NULL)) { g_printerr ("Elements could not be linked.\n"); gst_object_unref (data.pipeline); return-1; }
然後設定source要讀取的URI
1 2
/* Set the URI to play */ g_object_set (data.source, "uri", "https://www.freedesktop.org/software/gstreamer-sdk/data/media/sintel_trailer-480p.webm", NULL);
/* If our converter is already linked, we have nothing to do here */ if (gst_pad_is_linked (sink_pad)) { g_print ("We are already linked. Ignoring.\n"); gotoexit; }
/* Attempt the link */ ret = gst_pad_link (new_pad, sink_pad); if (GST_PAD_LINK_FAILED (ret)) { g_print ("Type is '%s' but link failed.\n", new_pad_type); } else { g_print ("Link succeeded (type '%s').\n", new_pad_type); }
case GST_MESSAGE_STATE_CHANGED: /* We are only interested in state-changed messages from the pipeline */ if (GST_MESSAGE_SRC (msg) == GST_OBJECT (data.pipeline)) { GstState old_state, new_state, pending_state; gst_message_parse_state_changed (msg, &old_state, &new_state, &pending_state); g_print ("Pipeline state changed from %s to %s:\n", gst_element_state_get_name (old_state), gst_element_state_get_name (new_state)); } break;
/* Structure to contain all our information, so we can pass it around */ typedefstruct _CustomData { GstElement *playbin; /* Our one and only element */ gboolean playing; /* Are we in the PLAYING state? */ gboolean terminate; /* Should we terminate execution? */ gboolean seek_enabled; /* Is seeking enabled for this media? */ gboolean seek_done; /* Have we performed the seek already? */ gint64 duration; /* How long does this media last, in nanoseconds */ } CustomData;
/* Forward definition of the message processing function */ staticvoidhandle_message(CustomData *data, GstMessage *msg);
/* Create the elements */ data.playbin = gst_element_factory_make ("playbin", "playbin");
if (!data.playbin) { g_printerr ("Not all elements could be created.\n"); return-1; }
/* Set the URI to play */ g_object_set (data.playbin, "uri", "https://www.freedesktop.org/software/gstreamer-sdk/data/media/sintel_trailer-480p.webm", NULL);
/* Start playing */ ret = gst_element_set_state (data.playbin, GST_STATE_PLAYING); if (ret == GST_STATE_CHANGE_FAILURE) { g_printerr ("Unable to set the pipeline to the playing state.\n"); gst_object_unref (data.playbin); return-1; }
/* Listen to the bus */ bus = gst_element_get_bus (data.playbin); do { msg = gst_bus_timed_pop_filtered (bus, 100 * GST_MSECOND, GST_MESSAGE_STATE_CHANGED | GST_MESSAGE_ERROR | GST_MESSAGE_EOS | GST_MESSAGE_DURATION);
/* Parse message */ if (msg != NULL) { handle_message (&data, msg); } else { /* We got no message, this means the timeout expired */ if (data.playing) { gint64 current = -1;
/* Query the current position of the stream */ if (!gst_element_query_position (data.playbin, GST_FORMAT_TIME, ¤t)) { g_printerr ("Could not query current position.\n"); }
/* If we didn't know it yet, query the stream duration */ if (!GST_CLOCK_TIME_IS_VALID (data.duration)) { if (!gst_element_query_duration (data.playbin, GST_FORMAT_TIME, &data.duration)) { g_printerr ("Could not query current duration.\n"); } }
/* Print current position and total duration */ g_print ("Position %" GST_TIME_FORMAT " / %" GST_TIME_FORMAT "\r", GST_TIME_ARGS (current), GST_TIME_ARGS (data.duration));
/* If seeking is enabled, we have not done it yet, and the time is right, seek */ if (data.seek_enabled && !data.seek_done && current > 10 * GST_SECOND) { g_print ("\nReached 10s, performing seek...\n"); gst_element_seek_simple (data.playbin, GST_FORMAT_TIME, GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_KEY_UNIT, 30 * GST_SECOND); data.seek_done = TRUE; } } } } while (!data.terminate);
switch (GST_MESSAGE_TYPE (msg)) { case GST_MESSAGE_ERROR: gst_message_parse_error (msg, &err, &debug_info); g_printerr ("Error received from element %s: %s\n", GST_OBJECT_NAME (msg->src), err->message); g_printerr ("Debugging information: %s\n", debug_info ? debug_info : "none"); g_clear_error (&err); g_free (debug_info); data->terminate = TRUE; break; case GST_MESSAGE_EOS: g_print ("End-Of-Stream reached.\n"); data->terminate = TRUE; break; case GST_MESSAGE_DURATION: /* The duration has changed, mark the current one as invalid */ data->duration = GST_CLOCK_TIME_NONE; break; case GST_MESSAGE_STATE_CHANGED: { GstState old_state, new_state, pending_state; gst_message_parse_state_changed (msg, &old_state, &new_state, &pending_state); if (GST_MESSAGE_SRC (msg) == GST_OBJECT (data->playbin)) { g_print ("Pipeline state changed from %s to %s:\n", gst_element_state_get_name (old_state), gst_element_state_get_name (new_state));
/* Remember whether we are in the PLAYING state or not */ data->playing = (new_state == GST_STATE_PLAYING);
if (data->playing) { /* We just moved to PLAYING. Check if seeking is possible */ GstQuery *query; gint64 start, end; query = gst_query_new_seeking (GST_FORMAT_TIME); if (gst_element_query (data->playbin, query)) { gst_query_parse_seeking (query, NULL, &data->seek_enabled, &start, &end); if (data->seek_enabled) { g_print ("Seeking is ENABLED from %" GST_TIME_FORMAT " to %" GST_TIME_FORMAT "\n", GST_TIME_ARGS (start), GST_TIME_ARGS (end)); } else { g_print ("Seeking is DISABLED for this stream.\n"); } } else { g_printerr ("Seeking query failed."); } gst_query_unref (query); } } } break; default: /* We should not reach here */ g_printerr ("Unexpected message received.\n"); break; } gst_message_unref (msg); }
定義資料struct
1 2 3 4 5 6 7 8 9 10 11 12
/* Structure to contain all our information, so we can pass it around */ typedefstruct _CustomData { GstElement *playbin; /* Our one and only element */ gboolean playing; /* Are we in the PLAYING state? */ gboolean terminate; /* Should we terminate execution? */ gboolean seek_enabled; /* Is seeking enabled for this media? */ gboolean seek_done; /* Have we performed the seek already? */ gint64 duration; /* How long does this media last, in nanoseconds */ } CustomData;
/* Forward definition of the message processing function */ staticvoidhandle_message(CustomData *data, GstMessage *msg);
/* We got no message, this means the timeout expired */ if (data.playing) {
接著用GstElement提供的方法取得時間。
1 2 3 4
/* Query the current position of the stream */ if (!gst_element_query_position (data.pipeline, GST_FORMAT_TIME, ¤t)) { g_printerr ("Could not query current position.\n"); }
如果無法取得就改成檢查是否可以詢問stream的長度
1 2 3 4 5 6
/* If we didn't know it yet, query the stream duration */ if (!GST_CLOCK_TIME_IS_VALID (data.duration)) { if (!gst_element_query_duration (data.pipeline, GST_FORMAT_TIME, &data.duration)) { g_printerr ("Could not query current duration.\n"); } }
接下來就可以詢問影片長度
1 2 3
/* Print current position and total duration */ g_print ("Position %" GST_TIME_FORMAT " / %" GST_TIME_FORMAT "\r", GST_TIME_ARGS (current), GST_TIME_ARGS (data.duration));
下一段是在影片時間軸跳躍的程式,利用gst_element_seek_simple()來達成。
1 2 3 4 5 6 7
/* If seeking is enabled, we have not done it yet, and the time is right, seek */ if (data.seek_enabled && !data.seek_done && current > 10 * GST_SECOND) { g_print ("\nReached 10s, performing seek...\n"); gst_element_seek_simple (data.pipeline, GST_FORMAT_TIME, GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_KEY_UNIT, 30 * GST_SECOND); data.seek_done = TRUE; }
GST_FORMAT_TIME: 目標時間的格式
GstSeekFlags: 指令跳躍的行為
GST_SEEK_FLAG_FLUSH: 直接拋棄掉目標時間之前的所有畫面。
GST_SEEK_FLAG_KEY_UNIT: 移動到目標時間附近的key frame
GST_SEEK_FLAG_ACCURATE: 精準的移動到目標時間上。
目標時間: 是指定要跳躍到的時間位置
Message Pump
首先如果影片長度改變我們就先讓pipeline不能被詢問影片時間。
1 2 3 4
case GST_MESSAGE_DURATION: /* The duration has changed, mark the current one as invalid */ data->duration = GST_CLOCK_TIME_NONE; break;
case GST_MESSAGE_STATE_CHANGED: { GstState old_state, new_state, pending_state; gst_message_parse_state_changed (msg, &old_state, &new_state, &pending_state); if (GST_MESSAGE_SRC (msg) == GST_OBJECT (data->playbin)) { g_print ("Pipeline state changed from %s to %s:\n", gst_element_state_get_name (old_state), gst_element_state_get_name (new_state));
/* Remember whether we are in the PLAYING state or not */ data->playing = (new_state == GST_STATE_PLAYING);
if (data->playing) { /* We just moved to PLAYING. Check if seeking is possible */ GstQuery *query; gint64 start, end; query = gst_query_new_seeking (GST_FORMAT_TIME); if (gst_element_query (data->playbin, query)) { gst_query_parse_seeking (query, NULL, &data->seek_enabled, &start, &end); if (data->seek_enabled) { g_print ("Seeking is ENABLED from %" GST_TIME_FORMAT " to %" GST_TIME_FORMAT "\n", GST_TIME_ARGS (start), GST_TIME_ARGS (end)); } else { g_print ("Seeking is DISABLED for this stream.\n"); } } else { g_printerr ("Seeking query failed."); } gst_query_unref (query); } } }
/* Prints information about a Pad Template, including its Capabilities */ staticvoidprint_pad_templates_information(GstElementFactory * factory) { const GList *pads; GstStaticPadTemplate *padtemplate;
g_print ("Pad Templates for %s:\n", gst_element_factory_get_longname (factory)); if (!gst_element_factory_get_num_pad_templates (factory)) { g_print (" none\n"); return; }
/* Shows the CURRENT capabilities of the requested pad in the given element */ staticvoidprint_pad_capabilities(GstElement *element, gchar *pad_name) { GstPad *pad = NULL; GstCaps *caps = NULL;
/* Retrieve pad */ pad = gst_element_get_static_pad (element, pad_name); if (!pad) { g_printerr ("Could not retrieve pad '%s'\n", pad_name); return; }
/* Retrieve negotiated caps (or acceptable caps if negotiation is not finished yet) */ caps = gst_pad_get_current_caps (pad); if (!caps) caps = gst_pad_query_caps (pad, NULL);
/* Print and free */ g_print ("Caps for the %s pad:\n", pad_name); print_caps (caps, " "); gst_caps_unref (caps); gst_object_unref (pad); }
/* Create the element factories */ source_factory = gst_element_factory_find ("audiotestsrc"); sink_factory = gst_element_factory_find ("autoaudiosink"); if (!source_factory || !sink_factory) { g_printerr ("Not all element factories could be created.\n"); return-1; }
/* Print information about the pad templates of these factories */ print_pad_templates_information (source_factory); print_pad_templates_information (sink_factory);
/* Ask the factories to instantiate actual elements */ source = gst_element_factory_create (source_factory, "source"); sink = gst_element_factory_create (sink_factory, "sink");
/* Create the empty pipeline */ pipeline = gst_pipeline_new ("test-pipeline");
if (!pipeline || !source || !sink) { g_printerr ("Not all elements could be created.\n"); return-1; }
/* Build the pipeline */ gst_bin_add_many (GST_BIN (pipeline), source, sink, NULL); if (gst_element_link (source, sink) != TRUE) { g_printerr ("Elements could not be linked.\n"); gst_object_unref (pipeline); return-1; }
/* Start playing */ ret = gst_element_set_state (pipeline, GST_STATE_PLAYING); if (ret == GST_STATE_CHANGE_FAILURE) { g_printerr ("Unable to set the pipeline to the playing state (check the bus for error messages).\n"); }
/* Wait until error, EOS or State Change */ bus = gst_element_get_bus (pipeline); do { msg = gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE, GST_MESSAGE_ERROR | GST_MESSAGE_EOS | GST_MESSAGE_STATE_CHANGED);
switch (GST_MESSAGE_TYPE (msg)) { case GST_MESSAGE_ERROR: gst_message_parse_error (msg, &err, &debug_info); g_printerr ("Error received from element %s: %s\n", GST_OBJECT_NAME (msg->src), err->message); g_printerr ("Debugging information: %s\n", debug_info ? debug_info : "none"); g_clear_error (&err); g_free (debug_info); terminate = TRUE; break; case GST_MESSAGE_EOS: g_print ("End-Of-Stream reached.\n"); terminate = TRUE; break; case GST_MESSAGE_STATE_CHANGED: /* We are only interested in state-changed messages from the pipeline */ if (GST_MESSAGE_SRC (msg) == GST_OBJECT (pipeline)) { GstState old_state, new_state, pending_state; gst_message_parse_state_changed (msg, &old_state, &new_state, &pending_state); g_print ("\nPipeline state changed from %s to %s:\n", gst_element_state_get_name (old_state), gst_element_state_get_name (new_state)); /* Print the current capabilities of the sink element */ print_pad_capabilities (sink, "sink"); } break; default: /* We should not reach here because we only asked for ERRORs, EOS and STATE_CHANGED */ g_printerr ("Unexpected message received.\n"); break; } gst_message_unref (msg); } } while (!terminate);
/* Link all elements that can be automatically linked because they have "Always" pads */ gst_bin_add_many (GST_BIN (pipeline), audio_source, tee, audio_queue, audio_convert, audio_resample, audio_sink, video_queue, visual, video_convert, video_sink, NULL); if (gst_element_link_many (audio_source, tee, NULL) != TRUE || gst_element_link_many (audio_queue, audio_convert, audio_resample, audio_sink, NULL) != TRUE || gst_element_link_many (video_queue, visual, video_convert, video_sink, NULL) != TRUE) { g_printerr ("Elements could not be linked.\n"); gst_object_unref (pipeline); return-1; }
/* Manually link the Tee, which has "Request" pads */ tee_audio_pad = gst_element_get_request_pad (tee, "src_%u"); g_print ("Obtained request pad %s for audio branch.\n", gst_pad_get_name (tee_audio_pad)); queue_audio_pad = gst_element_get_static_pad (audio_queue, "sink"); tee_video_pad = gst_element_get_request_pad (tee, "src_%u"); g_print ("Obtained request pad %s for video branch.\n", gst_pad_get_name (tee_video_pad)); queue_video_pad = gst_element_get_static_pad (video_queue, "sink"); if (gst_pad_link (tee_audio_pad, queue_audio_pad) != GST_PAD_LINK_OK || gst_pad_link (tee_video_pad, queue_video_pad) != GST_PAD_LINK_OK) { g_printerr ("Tee could not be linked.\n"); gst_object_unref (pipeline); return-1; } gst_object_unref (queue_audio_pad); gst_object_unref (queue_video_pad);
/* Start playing the pipeline */ gst_element_set_state (pipeline, GST_STATE_PLAYING);
/* Wait until error or EOS */ bus = gst_element_get_bus (pipeline); msg = gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE, GST_MESSAGE_ERROR | GST_MESSAGE_EOS);
/* Release the request pads from the Tee, and unref them */ gst_element_release_request_pad (tee, tee_audio_pad); gst_element_release_request_pad (tee, tee_video_pad); gst_object_unref (tee_audio_pad); gst_object_unref (tee_video_pad);
/* Link all elements that can be automatically linked because they have "Always" pads */ gst_bin_add_many (GST_BIN (pipeline), audio_source, tee, audio_queue, audio_convert, audio_sink, video_queue, visual, video_convert, video_sink, NULL); if (gst_element_link_many (audio_source, tee, NULL) != TRUE || gst_element_link_many (audio_queue, audio_convert, audio_sink, NULL) != TRUE || gst_element_link_many (video_queue, visual, video_convert, video_sink, NULL) != TRUE) { g_printerr ("Elements could not be linked.\n"); gst_object_unref (pipeline); return-1; }
/* Manually link the Tee, which has "Request" pads */ tee_audio_pad = gst_element_get_request_pad (tee, "src_%u"); g_print ("Obtained request pad %s for audio branch.\n", gst_pad_get_name (tee_audio_pad)); queue_audio_pad = gst_element_get_static_pad (audio_queue, "sink"); tee_video_pad = gst_element_get_request_pad (tee, "src_%u"); g_print ("Obtained request pad %s for video branch.\n", gst_pad_get_name (tee_video_pad)); queue_video_pad = gst_element_get_static_pad (video_queue, "sink"); if (gst_pad_link (tee_audio_pad, queue_audio_pad) != GST_PAD_LINK_OK || gst_pad_link (tee_video_pad, queue_video_pad) != GST_PAD_LINK_OK) { g_printerr ("Tee could not be linked.\n"); gst_object_unref (pipeline); return-1; } gst_object_unref (queue_audio_pad); gst_object_unref (queue_video_pad);
最後在程式結束後,要記得釋放request pad
1 2 3 4 5
/* Release the request pads from the Tee, and unref them */ gst_element_release_request_pad (tee, tee_audio_pad); gst_element_release_request_pad (tee, tee_video_pad); gst_object_unref (tee_audio_pad); gst_object_unref (tee_video_pad);
#define CHUNK_SIZE 1024 /* Amount of bytes we are sending in each buffer */ #define SAMPLE_RATE 44100 /* Samples per second we are sending */
/* Structure to contain all our information, so we can pass it to callbacks */ typedefstruct _CustomData { GstElement *pipeline, *app_source, *tee, *audio_queue, *audio_convert1, *audio_resample, *audio_sink; GstElement *video_queue, *audio_convert2, *visual, *video_convert, *video_sink; GstElement *app_queue, *app_sink;
guint64 num_samples; /* Number of samples generated so far (for timestamp generation) */ gfloat a, b, c, d; /* For waveform generation */
guint sourceid; /* To control the GSource */
GMainLoop *main_loop; /* GLib's Main Loop */ } CustomData;
/* This method is called by the idle GSource in the mainloop, to feed CHUNK_SIZE bytes into appsrc. * The idle handler is added to the mainloop when appsrc requests us to start sending data (need-data signal) * and is removed when appsrc has enough data (enough-data signal). */ static gboolean push_data(CustomData *data) { GstBuffer *buffer; GstFlowReturn ret; int i; GstMapInfo map; gint16 *raw; gint num_samples = CHUNK_SIZE / 2; /* Because each sample is 16 bits */ gfloat freq;
/* Create a new empty buffer */ buffer = gst_buffer_new_and_alloc (CHUNK_SIZE);
/* Set its timestamp and duration */ GST_BUFFER_TIMESTAMP (buffer) = gst_util_uint64_scale (data->num_samples, GST_SECOND, SAMPLE_RATE); GST_BUFFER_DURATION (buffer) = gst_util_uint64_scale (num_samples, GST_SECOND, SAMPLE_RATE);
/* Push the buffer into the appsrc */ g_signal_emit_by_name (data->app_source, "push-buffer", buffer, &ret);
/* Free the buffer now that we are done with it */ gst_buffer_unref (buffer);
if (ret != GST_FLOW_OK) { /* We got some error, stop sending data */ return FALSE; }
return TRUE; }
/* This signal callback triggers when appsrc needs data. Here, we add an idle handler * to the mainloop to start pushing data into the appsrc */ staticvoidstart_feed(GstElement *source, guint size, CustomData *data) { if (data->sourceid == 0) { g_print ("Start feeding\n"); data->sourceid = g_idle_add ((GSourceFunc) push_data, data); } }
/* This callback triggers when appsrc has enough data and we can stop sending. * We remove the idle handler from the mainloop */ staticvoidstop_feed(GstElement *source, CustomData *data) { if (data->sourceid != 0) { g_print ("Stop feeding\n"); g_source_remove (data->sourceid); data->sourceid = 0; } }
/* The appsink has received a buffer */ static GstFlowReturn new_sample(GstElement *sink, CustomData *data) { GstSample *sample;
/* Retrieve the buffer */ g_signal_emit_by_name (sink, "pull-sample", &sample); if (sample) { /* The only thing we do in this example is print a * to indicate a received buffer */ g_print ("*"); gst_sample_unref (sample); return GST_FLOW_OK; }
return GST_FLOW_ERROR; }
/* This function is called when an error message is posted on the bus */ staticvoiderror_cb(GstBus *bus, GstMessage *msg, CustomData *data) { GError *err; gchar *debug_info;
/* Print error details on the screen */ gst_message_parse_error (msg, &err, &debug_info); g_printerr ("Error received from element %s: %s\n", GST_OBJECT_NAME (msg->src), err->message); g_printerr ("Debugging information: %s\n", debug_info ? debug_info : "none"); g_clear_error (&err); g_free (debug_info);
/* Link all elements that can be automatically linked because they have "Always" pads */ gst_bin_add_many (GST_BIN (data.pipeline), data.app_source, data.tee, data.audio_queue, data.audio_convert1, data.audio_resample, data.audio_sink, data.video_queue, data.audio_convert2, data.visual, data.video_convert, data.video_sink, data.app_queue, data.app_sink, NULL); if (gst_element_link_many (data.app_source, data.tee, NULL) != TRUE || gst_element_link_many (data.audio_queue, data.audio_convert1, data.audio_resample, data.audio_sink, NULL) != TRUE || gst_element_link_many (data.video_queue, data.audio_convert2, data.visual, data.video_convert, data.video_sink, NULL) != TRUE || gst_element_link_many (data.app_queue, data.app_sink, NULL) != TRUE) { g_printerr ("Elements could not be linked.\n"); gst_object_unref (data.pipeline); return-1; }
/* Manually link the Tee, which has "Request" pads */ tee_audio_pad = gst_element_request_pad_simple (data.tee, "src_%u"); g_print ("Obtained request pad %s for audio branch.\n", gst_pad_get_name (tee_audio_pad)); queue_audio_pad = gst_element_get_static_pad (data.audio_queue, "sink"); tee_video_pad = gst_element_request_pad_simple (data.tee, "src_%u"); g_print ("Obtained request pad %s for video branch.\n", gst_pad_get_name (tee_video_pad)); queue_video_pad = gst_element_get_static_pad (data.video_queue, "sink"); tee_app_pad = gst_element_request_pad_simple (data.tee, "src_%u"); g_print ("Obtained request pad %s for app branch.\n", gst_pad_get_name (tee_app_pad)); queue_app_pad = gst_element_get_static_pad (data.app_queue, "sink"); if (gst_pad_link (tee_audio_pad, queue_audio_pad) != GST_PAD_LINK_OK || gst_pad_link (tee_video_pad, queue_video_pad) != GST_PAD_LINK_OK || gst_pad_link (tee_app_pad, queue_app_pad) != GST_PAD_LINK_OK) { g_printerr ("Tee could not be linked\n"); gst_object_unref (data.pipeline); return-1; } gst_object_unref (queue_audio_pad); gst_object_unref (queue_video_pad); gst_object_unref (queue_app_pad);
/* Instruct the bus to emit signals for each received message, and connect to the interesting signals */ bus = gst_element_get_bus (data.pipeline); gst_bus_add_signal_watch (bus); g_signal_connect (G_OBJECT (bus), "message::error", (GCallback)error_cb, &data); gst_object_unref (bus);
/* Start playing the pipeline */ gst_element_set_state (data.pipeline, GST_STATE_PLAYING);
/* Create a GLib Main Loop and set it to run */ data.main_loop = g_main_loop_new (NULL, FALSE); g_main_loop_run (data.main_loop);
/* Release the request pads from the Tee, and unref them */ gst_element_release_request_pad (data.tee, tee_audio_pad); gst_element_release_request_pad (data.tee, tee_video_pad); gst_element_release_request_pad (data.tee, tee_app_pad); gst_object_unref (tee_audio_pad); gst_object_unref (tee_video_pad); gst_object_unref (tee_app_pad);
/* This signal callback triggers when appsrc needs data. Here, we add an idle handler * to the mainloop to start pushing data into the appsrc */ staticvoidstart_feed(GstElement *source, guint size, CustomData *data) { if (data->sourceid == 0) { g_print ("Start feeding\n"); data->sourceid = g_idle_add ((GSourceFunc) push_data, data); } }
下面這個callback function當appsrc內部的queue滿的時候會被呼叫。在這裡我們就直接用g_source_remove()移除idle function
1 2 3 4 5 6 7 8 9
/* This callback triggers when appsrc has enough data and we can stop sending. * We remove the idle handler from the mainloop */ staticvoidstop_feed(GstElement *source, CustomData *data) { if (data->sourceid != 0) { g_print ("Stop feeding\n"); g_source_remove (data->sourceid); data->sourceid = 0; } }
/* This method is called by the idle GSource in the mainloop, to feed CHUNK_SIZE bytes into appsrc. * The ide handler is added to the mainloop when appsrc requests us to start sending data (need-data signal) * and is removed when appsrc has enough data (enough-data signal). */ static gboolean push_data(CustomData *data) { GstBuffer *buffer; GstFlowReturn ret; int i; gint16 *raw; gint num_samples = CHUNK_SIZE / 2; /* Because each sample is 16 bits */ gfloat freq;
/* Create a new empty buffer */ buffer = gst_buffer_new_and_alloc (CHUNK_SIZE);
/* Set its timestamp and duration */ GST_BUFFER_TIMESTAMP (buffer) = gst_util_uint64_scale (data->num_samples, GST_SECOND, SAMPLE_RATE); GST_BUFFER_DURATION (buffer) = gst_util_uint64_scale (num_samples, GST_SECOND, SAMPLE_RATE);
/* Generate some psychodelic waveforms */ raw = (gint16 *)GST_BUFFER_DATA (buffer);
最後就是把生成好的資料推送進appsrc裡面,並且會觸發push-buffer訊號。
1 2 3 4 5
/* Push the buffer into the appsrc */ g_signal_emit_by_name (data->app_source, "push-buffer", buffer, &ret);
/* Free the buffer now that we are done with it */ gst_buffer_unref (buffer);
| # | Name | Description | |---|---------|----------------------------------------------------------------| | 0 | none | No debug information is output. | | 1 | ERROR | Logs all fatal errors. These are errors that do not allow the | | | | core or elements to perform the requested action. The | | | | application can still recover if programmed to handle the | | | | conditions that triggered the error. | | 2 | WARNING | Logs all warnings. Typically these are non-fatal, but | | | | user-visible problems are expected to happen. | | 3 | FIXME | Logs all "fixme" messages. Those typically that a codepath that| | | | is known to be incomplete has been triggered. It may work in | | | | most cases, but may cause problems in specific instances. | | 4 | INFO | Logs all informational messages. These are typically used for | | | | events in the system that only happen once, or are important | | | | and rare enough to be logged at this level. | | 5 | DEBUG | Logs all debug messages. These are general debug messages for | | | | events that happen only a limited number of times during an | | | | object's lifetime; these include setup, teardown, change of | | | | parameters, etc. | | 6 | LOG | Logs all log messages. These are messages for events that | | | | happen repeatedly during an object's lifetime; these include | | | | streaming and steady-state conditions. This is used for log | | | | messages that happen on every buffer in an element for example.| | 7 | TRACE | Logs all trace messages. Those are message that happen very | | | | very often. This is for example is each time the reference | | | | count of a GstMiniObject, such as a GstBuffer or GstEvent, is | | | | modified. | | 9 | MEMDUMP | Logs all memory dump messages. This is the heaviest logging and| | | | may include dumping the content of blocks of memory. | +------------------------------------------------------------------------------+
/* Build the pipeline */ pipeline = gst_parse_launch ("playbin uri=https://www.freedesktop.org/software/gstreamer-sdk/data/media/sintel_trailer-480p.webm", NULL); bus = gst_element_get_bus (pipeline);
/* Start playing */ ret = gst_element_set_state (pipeline, GST_STATE_PLAYING); if (ret == GST_STATE_CHANGE_FAILURE) { g_printerr ("Unable to set the pipeline to the playing state.\n"); gst_object_unref (pipeline); return-1; } elseif (ret == GST_STATE_CHANGE_NO_PREROLL) { data.is_live = TRUE; }
/* Start playing */ ret = gst_element_set_state (pipeline, GST_STATE_PLAYING); if (ret == GST_STATE_CHANGE_FAILURE) { g_printerr ("Unable to set the pipeline to the playing state.\n"); gst_object_unref (pipeline); return-1; } elseif (ret == GST_STATE_CHANGE_NO_PREROLL) { data.is_live = TRUE; }
case GST_MESSAGE_CLOCK_LOST: /* Get a new clock */ gst_element_set_state (data->pipeline, GST_STATE_PAUSED); gst_element_set_state (data->pipeline, GST_STATE_PLAYING); break;