Sound support working!
[projects/chimara/chimara.git] / libchimara / schannel.c
index 726b08dc9338ca177d33efa162fc8647c28e7e16..4ec75c0985c1df4ea8555ffc2434a0fa2155dc1c 100644 (file)
@@ -9,9 +9,143 @@
 #include "schannel.h"
 #include "chimara-glk-private.h"
 #include "gi_dispa.h"
+#include "gi_blorb.h"
+#include "resource.h"
 
 extern GPrivate *glk_data_key;
 
+#ifdef GSTREAMER_SOUND
+/* Stop any currently playing sound on this channel, and remove any
+ format-specific GStreamer elements from the channel. */
+static void
+clean_up_after_playing_sound(schanid_t chan)
+{
+       if(!gst_element_set_state(chan->pipeline, GST_STATE_NULL))
+               WARNING_S(_("Could not set GstElement state to"), "NULL");
+       if(chan->demux)
+       {
+               gst_bin_remove(GST_BIN(chan->pipeline), chan->demux);
+               chan->demux = NULL;
+       }
+       if(chan->decode)
+       {
+               gst_bin_remove(GST_BIN(chan->pipeline), chan->decode);
+               chan->decode = NULL;
+       }
+}
+
+/* This signal is thrown whenever the GStreamer pipeline generates a message.
+ Most messages are harmless. */
+static void
+on_pipeline_message(GstBus *bus, GstMessage *message, schanid_t s)
+{
+       /* g_printerr("Got %s message\n", GST_MESSAGE_TYPE_NAME(message)); */
+
+       GError *err;
+       gchar *debug_message;
+       
+       switch(GST_MESSAGE_TYPE(message)) {
+       case GST_MESSAGE_ERROR: 
+       {
+               gst_message_parse_error(message, &err, &debug_message);
+               IO_WARNING(_("GStreamer error"), err->message, debug_message);
+               g_error_free(err);
+               g_free(debug_message);
+               clean_up_after_playing_sound(s);
+       }
+               break;
+       case GST_MESSAGE_WARNING:
+       {
+               gst_message_parse_warning(message, &err, &debug_message);
+               IO_WARNING(_("GStreamer warning"), err->message, debug_message);
+               g_error_free(err);
+               g_free(debug_message);
+       }
+               break;
+       case GST_MESSAGE_INFO:
+       {
+               gst_message_parse_info(message, &err, &debug_message);
+               g_message("GStreamer info \"%s\": %s", err->message, debug_message);
+               g_error_free(err);
+               g_free(debug_message);
+       }
+               break;
+       case GST_MESSAGE_EOS:
+               /* end-of-stream */
+               clean_up_after_playing_sound(s);
+               break;
+       default:
+               /* unhandled message */
+               break;
+       }
+}
+
+/* This signal is thrown when the OGG demuxer element has decided what kind of
+ outputs it will output. We connect the decoder element dynamically. */
+static void
+on_ogg_demuxer_pad_added(GstElement *demux, GstPad *pad, schanid_t s)
+{
+       GstPad *sinkpad;
+       
+       /* We can now link this pad with the vorbis-decoder sink pad */
+       sinkpad = gst_element_get_static_pad(s->decode, "sink");
+       if(gst_pad_link(pad, sinkpad) != GST_PAD_LINK_OK)
+               WARNING(_("Could not link OGG demuxer with Vorbis decoder"));
+       gst_object_unref(sinkpad);
+}
+
+/* This signal is thrown when the typefinder element has found the type of its
+ input. Now that we know what kind of input stream we have, we can connect the
+ proper demuxer/decoder elements. */
+static void
+on_type_found(GstElement *typefind, guint probability, GstCaps *caps, schanid_t s)
+{
+       gchar *type = gst_caps_to_string(caps);
+       if(strcmp(type, "application/ogg") == 0) 
+       {
+               s->demux = gst_element_factory_make("oggdemux", NULL);
+               s->decode = gst_element_factory_make("vorbisdec", NULL);
+               if(!s->demux || !s->decode)
+               {
+                       WARNING(_("Could not create one or more GStreamer elements"));
+                       goto finally;
+               }
+               gst_bin_add_many(GST_BIN(s->pipeline), s->demux, s->decode, NULL);
+               if(!gst_element_link(s->typefind, s->demux) || !gst_element_link(s->decode, s->convert))
+               {
+                       WARNING(_("Could not link GStreamer elements"));
+                       goto finally;
+               }
+               /* We link the demuxer and decoder together dynamically, since the
+                demuxer doesn't know what source pads it will have until it starts
+                demuxing the stream */
+               g_signal_connect(s->demux, "pad-added", G_CALLBACK(on_ogg_demuxer_pad_added), s);
+       }
+       else if(strcmp(type, "audio/x-aiff") == 0)
+       {
+               s->decode = gst_element_factory_make("aiffparse", NULL);
+               if(!s->decode)
+               {
+                       WARNING(_("Could not create 'aiffparse' GStreamer element"));
+                       goto finally;
+               }
+               gst_bin_add(GST_BIN(s->pipeline), s->decode);
+               if(!gst_element_link_many(s->typefind, s->decode, s->convert, NULL))
+               {
+                       WARNING(_("Could not link GStreamer elements"));
+                       goto finally;
+               }
+       }
+       else
+       {
+               WARNING_S(_("Unexpected audio type in blorb"), type);
+       }
+
+finally:
+       g_free(type);
+}
+#endif /* GSTREAMER_SOUND */
+
 /**
  * glk_schannel_create:
  * @rock: The rock value to give the new sound channel.
@@ -44,20 +178,32 @@ glk_schannel_create(glui32 rock)
        s->pipeline = gst_pipeline_new(pipeline_name);
        g_free(pipeline_name);
 
+       /* Watch for messages from the pipeline */
+       GstBus *bus = gst_pipeline_get_bus(GST_PIPELINE(s->pipeline));
+       gst_bus_add_signal_watch(bus);
+       g_signal_connect(bus, "message", G_CALLBACK(on_pipeline_message), s);
+       gst_object_unref(bus);
+
        /* Create GStreamer elements to put in the pipeline */
-       s->source = gst_element_factory_make("audiotestsrc", NULL);
+       s->source = gst_element_factory_make("giostreamsrc", NULL);
+       s->typefind = gst_element_factory_make("typefind", NULL);
+       s->convert = gst_element_factory_make("audioconvert", NULL);
        s->filter = gst_element_factory_make("volume", NULL);
        s->sink = gst_element_factory_make("autoaudiosink", NULL);
-       if(!s->source || !s->filter || !s->sink) {
-               WARNING("Could not create one or more GStreamer elements");
+       if(!s->source || !s->typefind || !s->convert || !s->filter || !s->sink) {
+               WARNING(_("Could not create one or more GStreamer elements"));
                goto fail;
        }
-               
-       gst_bin_add_many(GST_BIN(s->pipeline), s->source, s->filter, s->sink, NULL);
-       if(!gst_element_link_many(s->source, s->filter, s->sink, NULL)) {
-               WARNING("Could not link GStreamer elements");
+
+       /* Put the elements in the pipeline and link as many together as we can
+        without knowing the type of the audio stream */
+       gst_bin_add_many(GST_BIN(s->pipeline), s->source, s->typefind, s->convert, s->filter, s->sink, NULL);
+       /* Link elements: Source -> typefinder -> ??? -> Converter -> Volume filter -> Sink */
+       if(!gst_element_link(s->source, s->typefind) || !gst_element_link_many(s->convert, s->filter, s->sink, NULL)) {
+               WARNING(_("Could not link GStreamer elements"));
                goto fail;
        }
+       g_signal_connect(s->typefind, "have-type", G_CALLBACK(on_type_found), s);
        
        return s;
 
@@ -94,7 +240,8 @@ glk_schannel_destroy(schanid_t chan)
                (*glk_data->unregister_obj)(chan, gidisp_Class_Schannel, chan->disprock);
                chan->disprock.ptr = NULL;
        }
-       
+
+       /* This also frees all the objects inside the pipeline */
        if(chan->pipeline)
                gst_object_unref(chan->pipeline);
        
@@ -225,6 +372,26 @@ glk_schannel_play_ext(schanid_t chan, glui32 snd, glui32 repeats, glui32 notify)
 {
        VALID_SCHANNEL(chan, return 0);
 #ifdef GSTREAMER_SOUND
+       ChimaraGlkPrivate *glk_data = g_private_get(glk_data_key);
+       
+       if(!glk_data->resource_map) {
+               if(!glk_data->resource_load_callback) {
+                       WARNING(_("No resource map has been loaded yet."));
+                       return 0;
+               }
+               WARNING(_("Loading sound resources from alternative location not yet supported."));
+               return 0;
+       }
+       
+       giblorb_result_t resource;
+       giblorb_err_t result = giblorb_load_resource(glk_data->resource_map, giblorb_method_Memory, &resource, giblorb_ID_Snd, snd);
+       if(result != giblorb_err_None) {
+               WARNING_S( _("Error loading resource"), giblorb_get_error_message(result) );
+               return 0;
+       }
+       GInputStream *stream = g_memory_input_stream_new_from_data(resource.data.ptr, resource.length, NULL);
+       g_object_set(chan->source, "stream", stream, NULL);
+       
        if(!gst_element_set_state(chan->pipeline, GST_STATE_PLAYING)) {
                WARNING_S(_("Could not set GstElement state to"), "PLAYING");
                return 0;
@@ -247,8 +414,7 @@ glk_schannel_stop(schanid_t chan)
 {
        VALID_SCHANNEL(chan, return);
 #ifdef GSTREAMER_SOUND
-       if(!gst_element_set_state(chan->pipeline, GST_STATE_READY))
-               WARNING_S(_("Could not set GstElement state to"), "READY");
+       clean_up_after_playing_sound(chan);
 #endif
 }