[FFmpeg-user] RTMP stream not working on LAN
Loadlinx
limiteddi at gmail.com
Sun Jan 12 00:05:44 CET 2014
Hi all!
I am successfully able to launch ffmpeg, pick up a stream from DVBS and
transcode it with H264/AAC. The problem is that it only works on the PC I
launch ffmpeg on.
Here is the FFMPEG command
./ffmpeg -re -i http://192.168.1.50:6002 -vcodec libx264 -vprofile baseline
-b:v 1500k -b:a 32k -s 320x180 -acodec libfdk_aac -ar 44100 -ac 1 -f flv
rtmp://192.168.1.50:1935/big/rt
On 192.168.1.11 i cant pick this stream up. On .50 strema works.
Am I doing something wrong?
Here is my ngix config
#user body;
worker_processes 1;
#error_log logs/error.log;
#error_log logs/error.log notice;
#error_log logs/error.log info;
#pid logs/nginx.pid;
events {
worker_connections 1024;
}
http {
include mime.types;
default_type application/octet-stream;
#log_format main .$remote_addr . $remote_user [$time_local] .$request. .
# .$status $body_bytes_sent .$http_referer. .
# ..$http_user_agent. .$http_x_forwarded_for..;
#access_log logs/access.log main;
sendfile on;
#tcp_nopush on;
#keepalive_timeout 0;
keepalive_timeout 65;
#gzip on;
# HTTP can be used for accessing RTMP stats
server {
listen 80;
location / {
root /usr/local/nginx/html;
index index.html index.htm;
}
# This URL provides RTMP statistics in XML
location /stat {
rtmp_stat all;
# Use this stylesheet to view XML as web page
# in browser
rtmp_stat_stylesheet stat.xsl;
#allow 127.0.0.1;
}
location /stat.xsl {
# XML stylesheet to view RTMP stats.
# Copy stat.xsl wherever you want
# and put the full directory path here
root /usr/apps/nginx-rtmp-module;
}
location /nclients {
proxy_pass http://127.0.0.1/stat;
xslt_stylesheet /usr/local/nginx/html/nclients.xsl app=.$arg_app.
name=.$arg_name.;
add_header Refresh .3; $request_uri.;
}
# rtmp control
location /control {
rtmp_control all;
}
location /hls {
# Serve HLS fragments
# alias /tmp/app;
alias /usr/local/nginx/html/video;
expires -1;
}
}
}
rtmp {
server {
listen 1935;
chunk_size 4000;
# TV mode: one publisher, many subscribers
application mytv {
# enable live streaming
live on;
# record first 1K of stream
record all;
record_path /tmp/av;
record_max_size 1K;
# append current timestamp to each flv
record_unique on;
# publish only from localhost
allow publish 127.0.0.1;
deny publish all;
#allow play all;
}
# Transcoding (ffmpeg needed)
application big {
live on;
# On every pusblished stream run this command (ffmpeg)
# with substitutions: $app/${app}, $name/${name} for application & stream
name.
#
# This ffmpeg call receives stream from this application &
# reduces the resolution down to 32×2. The stream is the published to
# .small. application (see below) under the same name.
#
# ffmpeg can do anything with the stream like video/audio
# transcoding, resizing, altering container/codec params etc
#
# Multiple exec lines can be specified.
# exec /usr/local/bin/ffmpeg -re -i rtmp://localhost:1935/$app/$name
-vcodec libx264 -threads 0 -r 25 -g 50 -b 500k -bt 500k -s 320×80 -acodec
mp3 -ar 44100 -ab 64k -f flv rtmp://localhost:1935/small/${name};
exec /usr/local/bin/ffmpeg -re -i rtmp://localhost:1935/$app/$name -vcodec
libx264 -threads 0 -r 25 -g 50 -b 500k -bt 500k -s 320×80 -acodec mp3 -ar
44100 -ab 64k -f flv rtmp://
1.13577498.fme.ustream.tv/ustreamVideo/13577498/RrHUx94pJWjaEhPjnQYwUxcU2T2w8CaX
;
# 2>>/tmp/ffmpeg.log;
# exec /usr/local/bin/ffmpeg -re -i rtmp://localhost:1935/$app/$name
# -vcodec libx264 -vprofile baseline -b:v 128k -b:a 32k -s 320×80 -acodec
libvo_aacenc -ar 44100 -ac 1 -f flv rtmp://localhost:1935/small/${name}_low
# -vcodec libx264 -vprofile baseline -b:v 384k -b:a 64k -s 640×60 -acodec
libvo_aacenc -ar 44100 -ac 1 -f flv rtmp://localhost:1935/small/${name}_mid
# -vcodec libx264 -vprofile baseline -b:v 1024k -b:a 128k -s 1280×20
-acodec libvo_aacenc -ar 44100 -ac 1 -f flv
rtmp://localhost:1935/small/${name}_hi;
# exec /usr/local/bin/ffmpeg -re -i rtmp://localhost:1935/$app/$name
# -vcodec libx264 -vprofile baseline -b:v 128k -b:a 32k -s 320×80 -acodec
libvo_aacenc -ar 44100 -ac 1 -f flv rtmp://localhost:1935/hls/${name}_low
# -vcodec libx264 -vprofile baseline -b:v 384k -b:a 64k -s 640×60 -acodec
libvo_aacenc -ar 44100 -ac 1 -f flv rtmp://localhost:1935/hls/${name}_mid
# -vcodec libx264 -vprofile baseline -b:v 1024k -b:a 128k -s 1280×20
-acodec libvo_aacenc -ar 44100 -ac 1 -f flv
rtmp://localhost:1935/hls/${name}_hi;
# 2>>/tmp/ffmpeg.log;
# exec /usr/local/bin/ffmpeg -re -i rtmp://localhost:1935/$app/$name
# -vcodec libx264 -vprofile baseline -acodec libvo_aacenc -ar 44100 -ac 1
-f flv rtmp://localhost:1935/hls/${name};
# record first 1K of stream
record all;
record_path /usr/local/nginx/html/video;
# record_max_size 1K;
# append current timestamp to each flv
record_unique on;
}
application small {
live on;
# Video with reduced resolution comes here from ffmpeg
}
application webcam {
live on;
# Stream from local webcam
exec_static ffmpeg -f video4linux2 -i /dev/video0 -c:v libx264 -an -f flv
rtmp://localhost:1935/webcam/mystream;
}
application mypush {
live on;
# Every stream published here
# is automatically pushed to
# these two machines
# push rtmp1.example.com;
# push rtmp2.example.com:1934;
}
application mypull {
live on;
# Pull all streams from remote machine
# and play locally
# pull rtmp://rtmp3.example.com pageUrl=www.example.com/index.html;
}
application mystaticpull {
live on;
# Static pull is started at nginx start
# pull rtmp://rtmp4.example.com
pageUrl=www.example.com/index.htmlname=mystream static;
}
# video on demand
application vod {
play /usr/local/nginx/html/video;
}
application vod2 {
play /var/mp4s;
}
# Many publishers, many subscribers
# no checks, no recording
application videochat {
live on;
# The following notifications receive all
# the session variables as well as
# particular call arguments in HTTP POST
# request
# Make HTTP request & use HTTP retcode
# to decide whether to allow publishing
# from this connection or not
on_publish http://localhost:80/publish;
# Same with playing
on_play http://localhost:80/play;
# Publish/play end (repeats on disconnect)
on_done http://localhost:80/done;
# All above mentioned notifications receive
# standard connect() arguments as well as
# play/publish ones. If any arguments are sent
# with GET-style syntax to play & publish
# these are also included.
# Example URL:
# rtmp://localhost/myapp/mystream?a=b&c=d
# record 10 video keyframes (no audio) every 2 minutes
record keyframes;
record_path /tmp/vc;
record_max_frames 10;
record_interval 2m;
# Async notify about an flv recorded
on_record_done http://localhost:80/record_done;
}
# HLS
# For HLS to work please create a directory in tmpfs (/tmp/app here)
# for the fragments. The directory contents is served via HTTP (see
# http{} section in config)
#
# Incoming stream must be in H264/AAC. For iPhones use baseline H264
# profile (see ffmpeg example).
# This example creates RTMP stream from movie ready for HLS:
#
# ffmpeg -loglevel verbose -re -i movie.avi -vcodec libx264
# -vprofile baseline -acodec libmp3lame -ar 44100 -ac 1
# -f flv rtmp://localhost:1935/hls/movie
#
# If you need to transcode live stream use .exec. feature.
#
application hls {
live on;
hls on;
# hls_path /tmp/app;
hls_path /usr/local/nginx/html/video;
hls_fragment 15s;
hls_playlist_length 10m;
hls_continuous on;
hls_nested on;
hls_variant _low BANDWIDTH=160000;
hls_variant _mid BANDWIDTH=448000;
hls_variant _hi BANDWIDTH=1152000;
}
}
}
More information about the ffmpeg-user
mailing list