当前位置:  开发笔记 > 编程语言 > 正文

rtsp在java上流式传输

如何解决《rtsp在java上流式传输》经验,为你挑选了1个好方法。

任何人都可以帮我一个关于如何使用gstreamer在java中实现RTSP流媒体服务器的教程.

链接文字



1> enthusiastic..:

RTSP构建过程服务器(代码在C中,但查看我的客户端代码,看看如何使用gstreamer-java API.它应该是相当简单的)

我修改了URL中的代码

http://www.ip-sense.com/linuxsense/how-to-develop-a-rtsp-server-in-linux-using-gstreamer/

/* GStreamer
 * Copyright (C) 2008 Wim Taymans 
 * Copyright (c) 2012 enthusiasticgeek 
 *
 * This library is free software; you can redistribute it and/or
 * modify it under the terms of the GNU Library General Public
 * License as published by the Free Software Foundation; either
 * version 2 of the License, or (at your option) any later version.
 *
 * This library is distributed in the hope that it will be useful,
 * but WITHOUT ANY WARRANTY; without even the implied warranty of
 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
 * Library General Public License for more details.
 *
 * You should have received a copy of the GNU Library General Public
 * License along with this library; if not, write to the
 * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
 * Boston, MA 02111-1307, USA.
 */


//Edited by: enthusiasticgeek (c) 2012 for Stack Overflow Sept 11, 2012

//###########################################################################
//Important
//###########################################################################

//On ubuntu: sudo apt-get install libgstrtspserver-0.10-0 libgstrtspserver-0.10-dev

//Play with VLC
//rtsp://localhost:8554/test

//video decode only:  gst-launch -v rtspsrc location="rtsp://localhost:8554/test" ! rtph264depay ! ffdec_h264 ! autovideosink
//audio and video: 
//gst-launch -v rtspsrc location="rtsp://localhost:8554/test" name=demux demux. ! queue ! rtph264depay ! ffdec_h264 ! ffmpegcolorspace ! autovideosink sync=false demux. ! queue ! rtppcmadepay  ! alawdec ! autoaudiosink

//###########################################################################
#include 

#include 

/* define this if you want the resource to only be available when using
 * user/admin as the password */
#undef WITH_AUTH

/* this timeout is periodically run to clean up the expired sessions from the
 * pool. This needs to be run explicitly currently but might be done
 * automatically as part of the mainloop. */
static gboolean
timeout (GstRTSPServer * server, gboolean ignored)
{
  GstRTSPSessionPool *pool;

  pool = gst_rtsp_server_get_session_pool (server);
  gst_rtsp_session_pool_cleanup (pool);
  g_object_unref (pool);

  return TRUE;
}

int
main (int argc, char *argv[])
{
  GMainLoop *loop;
  GstRTSPServer *server;
  GstRTSPMediaMapping *mapping;
  GstRTSPMediaFactory *factory;
#ifdef WITH_AUTH
  GstRTSPAuth *auth;
  gchar *basic;
#endif

  gst_init (&argc, &argv);

  loop = g_main_loop_new (NULL, FALSE);

  /* create a server instance */
  server = gst_rtsp_server_new ();
  gst_rtsp_server_set_service(server,"8554"); //set the port #

  /* get the mapping for this server, every server has a default mapper object
   * that be used to map uri mount points to media factories */
  mapping = gst_rtsp_server_get_media_mapping (server);

#ifdef WITH_AUTH
  /* make a new authentication manager. it can be added to control access to all
   * the factories on the server or on individual factories. */
  auth = gst_rtsp_auth_new ();
  basic = gst_rtsp_auth_make_basic ("user", "admin");
  gst_rtsp_auth_set_basic (auth, basic);
  g_free (basic);
  /* configure in the server */
  gst_rtsp_server_set_auth (server, auth);
#endif

  /* make a media factory for a test stream. The default media factory can use
   * gst-launch syntax to create pipelines.
   * any launch line works as long as it contains elements named pay%d. Each
   * element with pay%d names will be a stream */
  factory = gst_rtsp_media_factory_new ();

  gst_rtsp_media_factory_set_launch (factory, "( "
      "videotestsrc ! video/x-raw-yuv,
      "x264enc ! queue ! rtph264pay name=pay0 pt=96 ! audiotestsrc ! audio/x-raw-int,rate=8000 ! alawenc ! rtppcmapay name=pay1 pt=97 "")");

  /* attach the test factory to the /test url */
  gst_rtsp_media_mapping_add_factory (mapping, "/test", factory);

  /* don't need the ref to the mapper anymore */
  g_object_unref (mapping);

  /* attach the server to the default maincontext */
  if (gst_rtsp_server_attach (server, NULL) == 0)
    goto failed;

  /* add a timeout for the session cleanup */
  g_timeout_add_seconds (2, (GSourceFunc) timeout, server);

  /* start serving, this never stops */
  g_main_loop_run (loop);

  return 0;

  /* ERRORS */
failed:
  {
    g_print ("failed to attach the server\n");
    return -1;
  }
}

Makefile文件

# Copyright (c) 2012 enthusiasticgeek
# RTSP demo for Stack Overflow

sample:
    gcc -Wall -I/usr/include/gstreamer-0.10 rtsp.c -o rtsp `pkg-config --libs --cflags gstreamer-0.10 gstreamer-rtsp-0.10` -lglib-2.0 -lgstrtspserver-0.10 -lgstreamer-0.10

构建二进制文件后,./rtsp只需运行它并在终端中打开另一个选项卡,即可使用以下管道测试服务器.

测试了解码管道.它工作正常!

gst-launch -v rtspsrc location="rtsp://localhost:8554/test" name=demux demux. ! queue ! rtph264depay ! ffdec_h264 ! ffmpegcolorspace ! autovideosink sync=false demux. ! queue ! rtppcmadepay  ! alawdec ! autoaudiosink

Java代码客户端

 // Display RTSP streaming of video
 // (c) 2011 enthusiasticgeek
 // This code is distributed in the hope that it will be useful,
 // but WITHOUT ANY WARRANTY; without even the implied warranty of
 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE
 // Leave Credits intact

package video2; //replace this with your package
import java.awt.BorderLayout;
import java.awt.Dimension;

import javax.swing.JFrame;
import javax.swing.SwingUtilities;

//import org.gstreamer.Caps;
import org.gstreamer.Element;
import org.gstreamer.ElementFactory;
import org.gstreamer.Gst;
import org.gstreamer.Pad;
import org.gstreamer.PadDirection;
import org.gstreamer.Pipeline;
import org.gstreamer.swing.VideoComponent;

/**
 * A Simple videotest example.
 */
public class Main {
    public Main() {
    }
    private static Pipeline pipe;
    public static void main(String[] args) {
    // Quartz is abysmally slow at scaling video for some reason, so turn it off.
    System.setProperty("apple.awt.graphics.UseQuartz", "false");

    args = Gst.init("SwingVideoTest", args);

    pipe = new Pipeline("pipeline");
    /*
    final Element videosrc = ElementFactory.make("videotestsrc", "source");
    final Element videofilter = ElementFactory.make("capsfilter", "flt");
    videofilter.setCaps(Caps.fromString("video/x-raw-yuv, 
            + ", bpp=32, depth=32, framerate=25/1"));
    */

     pipe.getBus().connect(new Bus.ERROR() {
        public void errorMessage(GstObject source, int code, String message) {
            System.out.println("Error occurred: " + message);
            Gst.quit();
        }
    });
    pipe.getBus().connect(new Bus.STATE_CHANGED() {
        public void stateChanged(GstObject source, State old, State current, State pending) {
            if (source == pipe) {
                System.out.println("Pipeline state changed from " + old + " to " + current);
            }
        }
    });
    pipe.getBus().connect(new Bus.EOS() {
        public void endOfStream(GstObject source) {
            System.out.println("Finished playing file");
            Gst.quit();
        }
    });        

     pipe.getBus().connect(new Bus.TAG() {
        public void tagsFound(GstObject source, TagList tagList) {
            for (String tag : tagList.getTagNames()) {
                System.out.println("Found tag " + tag + " = "
                        + tagList.getValue(tag, 0));
            }
        }
    });

    final Element source = ElementFactory.make("rtspsrc", "Source");
    final Element demux = ElementFactory.make("rtpmp4vdepay", "Depay");
    final Element decoder=ElementFactory.make("ffdec_mpeg4", "Decoder");
    final Element colorspace = ElementFactory.make("ffmpegcolorspace",  "Colorspace");
    //final Element sink = ElementFactory.make ("autovideosink", "Output");

    SwingUtilities.invokeLater(new Runnable() {

        public void run() {
            // Create the video component and link it in
            VideoComponent videoComponent = new VideoComponent();
            Element videosink = videoComponent.getElement();

           source.connect(new Element.PAD_ADDED() {
           public void padAdded(Element element, Pad pad) {
            pad.link(demux.getStaticPad("sink"));
           }
            });

           Pad p = new Pad(null, PadDirection.SRC);
           source.addPad(p);

            source.set("location","rtsp://:@/mpeg4/1/media.amp");  //replace this with your source

            pipe.addMany(source, demux, decoder, colorspace, videosink);
            Element.linkMany(demux, decoder, colorspace, videosink);

            // Now create a JFrame to display the video output
            JFrame frame = new JFrame("Swing Video Test");
            frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
            frame.add(videoComponent, BorderLayout.CENTER);
            videoComponent.setPreferredSize(new Dimension(720, 576));
            frame.pack();
            frame.setVisible(true);

            // Start the pipeline processing
            pipe.play();
        }
    });
    }
}

推荐阅读
个性2402852463
这个屌丝很懒,什么也没留下!
DevBox开发工具箱 | 专业的在线开发工具网站    京公网安备 11010802040832号  |  京ICP备19059560号-6
Copyright © 1998 - 2020 DevBox.CN. All Rights Reserved devBox.cn 开发工具箱 版权所有