decodebin2 works with video files but not audio files

Peter Quiring pquiring at gmail.com
Sat Jul 21 14:44:18 PDT 2012


I'm using decodebin2 to decode files but it doesn't work with audio only
files.  The pipeline never moves from READY to PAUSED/PLAYING state.  I
tried to add 'queue's between the decodebin2 generated pads and my sinks as
the FAQ suggests but still not working.

I don't see any WARNs or ERRORs with GST_DEBUG set.

Any ideas?  I know it should work because I tried using gst-launch and it
works there.  What am I doing wrong?

Here is my source (Java Native):
The call to gst_app_sink_pull_buffer() blocks forever if using audio only
files (ie: wav, mp3).

JNIEXPORT jboolean JNICALL Java_javaforce_jni_JNI_gst_1read_1file
  (JNIEnv *env, jclass cls, jint id, jstring _file)
{
  GstBus *bus;

  inst[id].pipeline = gst_pipeline_new("my-pipeline");
  inst[id].filesrc = gst_element_factory_make("filesrc", "my-filesrc");
  inst[id].decoder = gst_element_factory_make("decodebin2", "my-decoder");

  inst[id].audio_queue = gst_element_factory_make("queue", "my-aqueue");
  inst[id].audioconvert = gst_element_factory_make("audioconvert",
"my-audioconvert");
  inst[id].appsink_audio = gst_element_factory_make("appsink",
"my-appsink-audio");

  inst[id].video_queue = gst_element_factory_make("queue", "my-vqueue");
  inst[id].imgenc = gst_element_factory_make(imageEncoder , "my-imgenc");
  inst[id].appsink_video = gst_element_factory_make("appsink",
"my-appsink-video");

  inst[id].audio_sink = inst[id].audio_queue;
  inst[id].video_sink = inst[id].video_queue;

  g_signal_connect(inst[id].decoder, "pad-added",
G_CALLBACK(cb_newpad_decoder), &inst[id]);

  gst_bin_add_many(GST_BIN(inst[id].pipeline), inst[id].filesrc,
inst[id].decoder
    , inst[id].appsink_audio, inst[id].appsink_video, inst[id].imgenc,
inst[id].audioconvert
    , inst[id].audio_queue, inst[id].video_queue
    , NULL);
  gst_element_link(inst[id].filesrc, inst[id].decoder);
  gst_element_link(inst[id].video_queue, inst[id].imgenc);
  gst_element_link(inst[id].imgenc, inst[id].appsink_video);

  GstCaps *caps = gst_caps_new_simple("audio/x-raw-int"
    , "signed", G_TYPE_BOOLEAN, TRUE
    , "endianness", G_TYPE_INT, 1234
    , NULL);

  gst_element_link(inst[id].audio_queue, inst[id].audioconvert);
  gst_element_link_filtered(inst[id].audioconvert, inst[id].appsink_audio,
caps);

  gst_caps_unref(caps);

  const char *file = (*env)->GetStringUTFChars(env,_file,NULL);
  if (file) g_object_set(G_OBJECT(inst[id].filesrc), "location", file,
NULL);
  (*env)->ReleaseStringUTFChars(env, _file, file);

  bus = gst_pipeline_get_bus(GST_PIPELINE(inst[id].pipeline));
  gst_bus_add_watch(bus, bus_call, &inst[id]);
  gst_object_unref(bus);

  gst_element_set_state(GST_ELEMENT(inst[id].pipeline), GST_STATE_PLAYING);

  return TRUE;
}

JNIEXPORT jbyteArray JNICALL Java_javaforce_jni_JNI_gst_1read_1audio
  (JNIEnv *env, jclass cls, jint id)
{
  GstBuffer *buf = (GstBuffer
*)gst_app_sink_pull_buffer((GstAppSink*)inst[id].appsink_audio);
  if (buf == NULL) return NULL;
  gsize bufsize = GST_BUFFER_SIZE(buf);
  void *bufdata = GST_BUFFER_DATA(buf);
  if (inst[id].caps_audio == NULL) {
    inst[id].caps_audio = gst_caps_copy(GST_BUFFER_CAPS(buf));
  }
  jbyteArray ba = (*env)->NewByteArray(env, bufsize);
  (*env)->SetByteArrayRegion(env, ba, 0, bufsize, bufdata);
  gst_buffer_unref(buf);
  return ba;
}




--
View this message in context: http://gstreamer-devel.966125.n4.nabble.com/decodebin2-works-with-video-files-but-not-audio-files-tp4655657.html
Sent from the GStreamer-devel mailing list archive at Nabble.com.


More information about the gstreamer-devel mailing list