neuray API Programmer's Manual

Extended example for the RTMP server

[Previous] [Next] [Up]

This example renders a scene and serves an interactive video stream over RTMP.

New Topics

  • Running an RTMP server together with a HTTP server and rendering a scene with mouse interaction.

Detailed Description

Running an RTMP and HTTP server with mouse interaction

This example first starts a HTTP server over which a flash application is fetched with a browser or a standalone flash client which the client then uses to view and interact with the video stream.

The stream will by default use the "screen video" codec which is provided with the library and encode the canvas produced by rendering the scene. This encoded frame is then send over the RTMP stream to the flash client.

On the connection a so called Remote Procedure Call (RPC) is installed which gets called when the client interacts with the video stream using the mouse.

The provided flash file (the .swf file) can be reproduced running the free Adobe Flex SDK compiler mxmlc on the included .mxml file.

Example Source

Source Code Location: examples/example_rtmp_server_render.cpp

‎/******************************************************************************
 * Copyright 1986, 2011 NVIDIA Corporation. All rights reserved.
 *****************************************************************************/

// examples/example_rtmp_server_render.cpp
//
// Serves a flash player (.swf file) over HTTP to a client browser which then connects to the RTMP
// server which produces a video stream from the rendering of a scene.

#include <mi/neuraylib.h>

// Include code shared by all examples.
#include "example_shared.h"
// Include an implementation of ITile, ICanvas, and IRender_target.
#include "example_render_target.h"

#include <fstream>
#include <iostream>
#include <vector>

// HTTP server implementation
//
// The HTTP servers just serves the .swf file.

// A simple implementation of the IBuffer interface.
class Buffer : public mi::base::Interface_implement<mi::IBuffer>
{
public:
    const mi::Uint8* get_data() { return &m_buffer[0]; }

    mi::Size get_data_size() const { return m_buffer.size(); }

    Buffer( const std::vector<mi::Uint8>& content) { m_buffer = content; }

private:
    std::vector<mi::Uint8> m_buffer;
};

// An HTTP response handler which always sets the content type for flash.
class Response_handler : public mi::base::Interface_implement<mi::http::IResponse_handler>
{
public:
    void handle( mi::http::IConnection* connection)
    {
        mi::http::IResponse* iresponse( connection->get_response());
        iresponse->set_header( "Content-Type", "application/x-shockwave-flash");
    }
};

// An HTTP request handler which always sends the .swf file.
class Request_handler : public mi::base::Interface_implement<mi::http::IRequest_handler>
{
public:
    Request_handler( const char* swf_file) : m_swf_file( swf_file) { }

    bool handle( mi::http::IConnection* connection)
    {
        std::ifstream file( m_swf_file, std::ios::in|std::ios::binary|std::ios::ate);
        check_success( file);

        std::ifstream::pos_type size = file.tellg();
        std::vector<mi::Uint8> data( size);
        file.seekg( 0, std::ios::beg);
        file.read( (char*) &data[0], size);
        file.close();

        mi::base::Handle< mi::IBuffer> buffer( new Buffer( data));
        connection->enqueue( buffer.get());
        return true;
    }

private:
    const char* m_swf_file;
};

// RTMP server implementation
//
// The RTMP renders a given scene and interprets mouse movements as camera movements.

// An RTMP play event handler that chooses the screen video codec and initializes it with a
// predefined window size.
class Play_event_handler : public mi::base::Interface_implement<mi::rtmp::IPlay_event_handler>
{
public:
    bool handle( bool is_start, mi::rtmp::IStream* stream, INVALID_DOXYREFmi::IVideo_data** out)
    {
        if( is_start) {
            check_success( stream->use_codec( "screen video"));
            mi::base::Handle< mi::IVideo_encoder> codec( stream->get_video_codec());
            check_success( codec->init( 512, 384, out));
        }
        else {
            mi::base::Handle< mi::IVideo_encoder> codec( stream->get_video_codec());
            check_success( codec->close( out));
        }
        return true;
    }
};

// An RTMP frame event handler that encodes a frame and gives it to the RTMP server for
// sending. Note that this event runs in another thread than the other event handlers, most
// importantly the render handler, so care needs to be taken to avoid synchronization issues.
class Frame_event_handler : public mi::base::Interface_implement<mi::rtmp::IFrame_event_handler>
{
public:
    bool handle( mi::rtmp::IStream* stream, INVALID_DOXYREFmi::IVideo_data** out, bool send_queue_is_full)
    {
        if (send_queue_is_full) // we do not want to increase buffering
            return true;
        mi::base::Handle< mi::IVideo_encoder> codec( stream->get_video_codec());
        mi::neuraylib::ICanvas* canvas = 0;
        {
            mi::base::Lock::Block block( &m_cached_canvas_lock);
            canvas = m_cached_canvas.get();
            if ( !canvas)
                return true;
            canvas->retain();
        }
        bool result = codec->encode_canvas( canvas, out);
        canvas->release();
        return result;
    }
    void update_canvas(mi::base::Handle< mi::neuraylib::ICanvas> new_canvas)
    {
        mi::base::Lock::Block block( &m_cached_canvas_lock);
        m_cached_canvas = new_canvas;
    }
private:
    mi::base::Lock m_cached_canvas_lock;
    mi::base::Handle< mi::neuraylib::ICanvas> m_cached_canvas;
};


// An RTMP render event handler that renders a given scene into a canvas and saves it for the
// frame event handler to encode.
class Render_event_handler : public mi::base::Interface_implement<mi::rtmp::IRender_event_handler>
{
public:
    Render_event_handler( mi::base::Handle< mi::neuraylib::IScope> scope,
                          mi::base::Handle< Frame_event_handler> handler)
        : m_scope( scope), m_frame_handler(handler) { }

    bool handle( mi::rtmp::IStream* stream)
    {
        mi::base::Handle< mi::neuraylib::ITransaction> transaction( m_scope->create_transaction());
        {
            mi::base::Handle< mi::neuraylib::IScene> scene(
                transaction->edit<mi::neuraylib::IScene>( "the_scene"));
            mi::base::Handle< mi::neuraylib::IRender_context> render_context(
                scene->get_render_context( transaction.get(), "rt_bsp"));
            check_success( render_context.is_valid_interface());
            Render_target render_target( 512, 384);
            render_context->render( transaction.get(), &render_target, 0);
            mi::base::Handle< mi::neuraylib::ICanvas> canvas( render_target.get_canvas( 0));
            m_frame_handler->update_canvas( canvas);
        }
        transaction->commit();
        return true;
    }

private:
    mi::base::Handle< mi::neuraylib::IScope> m_scope;
    mi::base::Handle< Frame_event_handler> m_frame_handler;
};

// An RTMP stream event handler that registers the play and render event handlers above.
class Stream_event_handler : public mi::base::Interface_implement<mi::rtmp::IStream_event_handler>
{
public:
    Stream_event_handler( mi::base::Handle< mi::neuraylib::IScope> scope) : m_scope( scope) { }

    bool handle(
        bool is_create, mi::rtmp::IStream* stream,
        const mi::IData* command_arguments)
    {
        if( is_create) {
            mi::base::Handle< mi::rtmp::IPlay_event_handler> play_event_handler(
                new Play_event_handler());
            stream->register_play_event_handler( play_event_handler.get());
            mi::base::Handle< Frame_event_handler> frame_event_handler( new Frame_event_handler());
            mi::base::Handle< mi::rtmp::IRender_event_handler> render_event_handler(
                new Render_event_handler( m_scope, frame_event_handler));
            stream->register_render_event_handler( render_event_handler.get());
            stream->register_frame_event_handler( frame_event_handler.get());
        }
        return true;
    }

private:
    mi::base::Handle< mi::neuraylib::IScope> m_scope;
};

// An RTMP call event handler that moves the camera according to the arguments 'pan_x' and 'pan_y'.
class Call_event_handler : public mi::base::Interface_implement<mi::rtmp::ICall_event_handler>
{
public:
    Call_event_handler( mi::base::Handle< mi::neuraylib::IScope> scope) : m_scope( scope) { }

    bool handle(
        mi::rtmp::IConnection* connection,
        const char* procedure_name,
        const mi::IData* command_arguments,
        const mi::IData* user_arguments,
        mi::IData** response_arguments)
    {
        mi::base::Handle< mi::neuraylib::ITransaction> transaction( m_scope->create_transaction());
        {
            mi::base::Handle< mi::ICamera> camera( transaction->edit<INVALID_DOXYREFmi::ICamera>( "cam"));
            check_success( camera.is_valid_interface());
            mi::base::Handle< const mi::IMap> imap( user_arguments->get_interface<const mi::IMap>());
            check_success( imap.is_valid_interface());
            mi::base::Handle< const mi::ISint32> pan_x( imap->get_value<mi::ISint32>( "pan_x"));
            if ( pan_x) {
                mi::Float64 x = camera->get_offset_x();
                camera->set_offset_x( x - pan_x->get_value<mi::Sint32>());
                // The example client also demonstrates how to send/parse a double.
                mi::base::Handle< const mi::IFloat64> pan_xd(
                    imap->get_value<mi::IFloat64>( "pan_xd"));
                if( pan_xd) {
                    mi::Float64 xd = pan_xd->get_value<mi::Float64>();
                    check_success( mi::Sint32(xd) == pan_x->get_value<mi::Sint32>());
                }
            }
            mi::base::Handle< const mi::ISint32> pan_y( imap->get_value<mi::ISint32>( "pan_y"));
            if( pan_y) {
                mi::Float64 y = camera->get_offset_y();
                camera->set_offset_y( y - pan_y->get_value<mi::Sint32>());
            }
            // Demonstrate getting a bool from the example client
            mi::base::Handle< const mi::IBoolean> dir(
                imap->get_value<mi::IBoolean>( "going_right"));
            if ( dir) {
                bool going_right = dir->get_value<bool>();
                going_right = !going_right; // avoid compiler warning
            }
        }
        transaction->commit();
        return true;
    }

private:
    mi::base::Handle< mi::neuraylib::IScope> m_scope;
};

// An RTMP connect event handler that registers the stream and call event handlers above.
class Connect_event_handler : public mi::base::Interface_implement<mi::rtmp::IConnect_event_handler>
{
public:
    Connect_event_handler( mi::base::Handle< mi::neuraylib::IScope> scope) : m_scope( scope) { }

    bool handle(
        bool is_create, mi::rtmp::IConnection* connection,
        const mi::IData* command_arguments,
        const mi::IData* user_arguments)
    {
        if( is_create) {
            mi::base::Handle< mi::rtmp::IStream_event_handler> stream_event_handler(
                new Stream_event_handler( m_scope));
            connection->register_stream_event_handler( stream_event_handler.get());
            mi::base::Handle< mi::rtmp::ICall_event_handler> call_event_handler(
                new Call_event_handler( m_scope));
            connection->register_remote_call_handler( call_event_handler.get(), "moveCamera");
        }
        return true;
    }

private:
      mi::base::Handle< mi::neuraylib::IScope> m_scope;
};

void configuration( mi::base::Handle< mi::neuraylib::INeuray> neuray, const char* shader_path)
{
    // Configure the neuray library. Here we set some paths needed by the renderer.
    mi::base::Handle< mi::neuraylib::IRendering_configuration> rendering_configuration(
        neuray->get_api_component<mi::neuraylib::IRendering_configuration>());
    check_success( rendering_configuration.is_valid_interface());
    check_success( rendering_configuration->add_shader_path( shader_path) == 0);

    // Load the FreeImage image plugin and the LLVM backend for MetaSL.
    // Also load the default video codec plugin which will be used to encode the rendered frames.
    mi::base::Handle< mi::neuraylib::IPlugin_configuration> plugin_configuration(
        neuray->get_api_component<mi::neuraylib::IPlugin_configuration>());
#ifndef MI_PLATFORM_WINDOWS
    check_success( plugin_configuration->load_plugin_library( "freeimage.so") == 0);
    check_success( plugin_configuration->load_plugin_library( "gen_llvm.so") == 0);
    check_success( plugin_configuration->load_plugin_library( "screen_video.so") == 0);
#else
    check_success( plugin_configuration->load_plugin_library( "freeimage.dll") == 0);
    check_success( plugin_configuration->load_plugin_library( "gen_llvm.dll") == 0);
    check_success( plugin_configuration->load_plugin_library( "screen_video.dll") == 0);
#endif
}

void prepare_rendering( mi::base::Handle< mi::neuraylib::INeuray> neuray,
                        const char* scene_file)
{
    // Get the database, the global scope, which is the root for all transactions,
    // and create a transaction for importing the scene file and storing the scene.
    mi::base::Handle< mi::neuraylib::IDatabase> database(
        neuray->get_api_component<mi::neuraylib::IDatabase>());
    check_success( database.is_valid_interface());
    mi::base::Handle< mi::neuraylib::IScope> scope(
        database->get_global_scope());
    mi::base::Handle< mi::neuraylib::ITransaction> transaction(
        scope->create_transaction());
    check_success( transaction.is_valid_interface());

    // Import the scene file
    mi::base::Handle< mi::neuraylib::IImport_api> import_api(
        neuray->get_api_component<mi::neuraylib::IImport_api>());
    check_success( import_api.is_valid_interface());
    mi::base::Handle< const mi::IImport_result> import_result(
        import_api->import_elements( transaction.get(), scene_file));
    check_success( import_result->get_error_number() == 0);

    // Create the scene object
    mi::base::Handle< mi::neuraylib::IScene> scene(
        transaction->create<mi::neuraylib::IScene>( "Scene"));
    scene->set_rootgroup(       import_result->get_rootgroup());
    scene->set_options(         import_result->get_options());
    scene->set_camera_instance( import_result->get_camera_inst());

    // And store it in the database such that the render loop can later access it
    transaction->store( scene.get(), "the_scene");
    transaction->commit();
}

void run_http_and_rtmp_server( mi::base::Handle< mi::neuraylib::INeuray> neuray,
                               const char* port, const char* swf_file)
{
    // Create an HTTP server instance
    mi::base::Handle< mi::http::IFactory> http_factory(
        neuray->get_api_component<mi::http::IFactory>());
    mi::base::Handle< mi::http::IServer> http_server(
        http_factory->create_server());

    // Install our HTTP request and response handlers
    mi::base::Handle< mi::http::IRequest_handler> request_handler(
        new Request_handler( swf_file));
    http_server->install( request_handler.get());
    mi::base::Handle< mi::http::IResponse_handler> response_handler(
        new Response_handler());
    http_server->install( response_handler.get());

    // Assemble HTTP server address
    const char* ip = "0.0.0.0:";
    char address[255];
    address[0] = '\0';
    strncat( address, ip, sizeof(address) - 1);
    strncat( address, port, sizeof(address) - 1 - strlen(address));

    // Start HTTP server
    http_server->start( address);

    // Create an RTMP server instance
    mi::base::Handle< mi::rtmp::IFactory> rtmp_factory(
        neuray->get_api_component<mi::rtmp::IFactory>());
    mi::base::Handle< mi::rtmp::IServer> rtmp_server( rtmp_factory->create_server());

    // Install our HTTP connect handler
    mi::base::Handle< mi::neuraylib::IDatabase> database(
        neuray->get_api_component<mi::neuraylib::IDatabase>());
    mi::base::Handle< mi::neuraylib::IScope> scope(
        database->get_global_scope());
    mi::base::Handle< mi::rtmp::IConnect_event_handler> connect_handler(
        new Connect_event_handler( scope));
    rtmp_server->install( connect_handler.get());

    // Start RTMP server
    rtmp_server->start( "0.0.0.0:1935");

    // Run both servers for fixed time interval
    sleep_seconds( 3000);
    http_server->shutdown();
    rtmp_server->shutdown();
}

// The example takes the following command line arguments:
//
//   example_rtmp_server_render <swf_file> <scene_file> <shader_path> <port>
//
// swf_file         the flash player, the .swf-file included in the examples directory
// scene_file       some scene file, e.g., main.mi
// shader_path      path to the shaders, e.g., neuray-<version>/shaders
// port             port for the HTTP server
//
int main( int argc, char* argv[])
{
    // Collect command line parameters
    if( argc != 5) {
        std::cerr << "Usage: example_rtmp_server_render <swf_file> <scene_file> <shader_path> "
                  << "<port>" << std::endl;
        return EXIT_FAILURE;
    }
    const char* swf_file    = argv[1];
    const char* scene_file  = argv[2];
    const char* shader_path = argv[3];
    const char* port        = argv[4];

    // Access the neuray library
    mi::base::Handle< mi::neuraylib::INeuray> neuray( load_and_get_ineuray());
    check_success( neuray.is_valid_interface());

    // Configure the neuray library
    configuration ( neuray, shader_path);

    // Start the neuray library
    check_success( neuray->start( true) == 0);

    // Set up the scene
    prepare_rendering( neuray, scene_file);

    // Serve video stream via RTMP server
    run_http_and_rtmp_server( neuray, port, swf_file);

    // Shut down the neuray library
    check_success( neuray->shutdown() == 0);
    neuray = 0;

    // Unload the neuray library
    check_success( unload());

    return EXIT_SUCCESS;
}

Flex Source Code Location: examples/example_rtmp_server_render.mxml and examples/example_rtmp_server_render_actionscript.as

‎<?xml version="1.0" encoding="utf-8"?>
<!--
/******************************************************************************
 * Copyright 1986, 2011 NVIDIA Corporation. All rights reserved.
 *****************************************************************************/
-->

<mx:Application
    xmlns:mx="http://www.adobe.com/2006/mxml"
    layout="horizontal"
    initialize="init()" xmlns:local="*">

<mx:Script>
    <![CDATA[
        import mx.core.Application;

        public function init():void {
            vidplayer.makeConnection("rtmp://" + getHost());
        }

        public function getHost():String  {
            var location:String = Application.application.url;
            var components:Array = location.split("/");
            if (components.length < 3)
                return "localhost";
            var host_port:Array = components[2].split(":");
            if (host_port.length <= 1)
               return "localhost";
            return host_port[0];
        }
    ]]>
</mx:Script>

<!-- refer to the actionscript object -->
<local:example_rtmp_server_render_actionscript includeInLayout="true" id="vidplayer" width="1024" height="786" />
</mx:Application>
‎/******************************************************************************
 * Copyright 1986, 2011 NVIDIA Corporation. All rights reserved.
 * Germany. All rights reserved
 *****************************************************************************/

package {
    import flash.events.MouseEvent;
    import flash.events.NetStatusEvent;
    import flash.events.SecurityErrorEvent;
    import flash.media.Video;
    import flash.net.NetConnection;
    import flash.net.NetStream;
    import mx.core.Application;
    import mx.core.UIComponent;

    public class example_rtmp_server_render_actionscript extends UIComponent {
        private var streamName:String = "example_rtmp_server_render";
        public var connection:NetConnection = null;
        private var video:Video = null;
        private var mystream:NetStream = null;
        private var client:Object = null;
        private var mouseButton:Boolean = false;
        private var mousePosX:int = 0;
        private var mousePosY:int = 0;

        public function example_rtmp_server_render_actionscript() {
            super();
            this.addEventListener(MouseEvent.MOUSE_DOWN, this.onMouseDown);
        }

        public function makeConnection(url:String):void {
            if (connection != null) {
                mystream = null;
                connection.close();
            } else {
                connection = new NetConnection();
            }
            connection.addEventListener(NetStatusEvent.NET_STATUS, netStatusHandler);
            connection.addEventListener(SecurityErrorEvent.SECURITY_ERROR, securityErrorHandler);
            var args:Object = new Object();
            args["resolution_x"] = floor16(this.width).toString();
            args["resolution_y"] = floor16(this.height).toString();
            connection.connect(url,args);
        }

        private function floor16(val:int):int  {
            return int(val/16) * 16;
        }

        public function closeConnection():void {
            if (connection != null) {
                mystream = null;
                connection.close();
            }
        }

        private function netStatusHandler(event:NetStatusEvent):void {
            switch (event.info.code) {
                case "NetConnection.Connect.Success":
                    connectStream();
                    break;
                case "NetStream.Play.StreamNotFound":
                    trace("Stream not found: " + streamName);
                    break;
            }
        }

        private function securityErrorHandler(event:SecurityErrorEvent):void {
            trace("securityErrorHandler: " + event);
        }

        private function connectStream():void {
            mystream = new NetStream(connection);
            mystream.addEventListener(NetStatusEvent.NET_STATUS, netStatusHandler);
            if (video == null) {
                video = new Video(this.width,this.height);
                video.smoothing = true;
            }
            video.attachNetStream(mystream);
            addChild(video);
            mystream.play(streamName);
        }

        public function onMouseDown(event: MouseEvent):void {
            var x: int = event.stageX - (event.target as UIComponent).parent.x;
            var y: int = event.stageY - (event.target as UIComponent).parent.y;
            mousePosX = x;
            mousePosY = y;
            Application.application.addEventListener(MouseEvent.MOUSE_UP, this.onMouseUp);
            Application.application.addEventListener(MouseEvent.MOUSE_MOVE, this.onMouseMove);
            mouseButton = true;
        }

        public function onMouseUp(event: MouseEvent):void {
            if (mouseButton) {
                mouseButton = false;
                Application.application.removeEventListener(MouseEvent.MOUSE_UP, this.onMouseUp);
                Application.application.removeEventListener(MouseEvent.MOUSE_MOVE, this.onMouseMove);
            }
        }

        public function onMouseMove(event: MouseEvent):void
        {
            var x: int = event.stageX - (event.target as UIComponent).parent.x;
            var y: int = event.stageY - (event.target as UIComponent).parent.y;
            if (mouseButton && connection && connection.connected && mystream) {
                var diff_x:int = x-mousePosX;
                var diff_y:int = y-mousePosY;
                var args:Object = new Object();
                if (diff_x != 0) args["pan_x"] = diff_x;
                if (diff_y != 0) args["pan_y"] = -diff_y;
                if (diff_x || diff_y) {
                   // For demonstration purposes also send a double..
                   args["pan_xd"] = (diff_x < 0) ? diff_x - 0.1 : diff_x + 0.1
                   // ..and some bool
                   args["going_right"] = diff_x > 0 ? true : false;
                   connection.call("moveCamera",null,args);
                }
                mousePosX = x;
                mousePosY = y;
            }
        }
    }
}

[Previous] [Next] [Up]