Works on laptop. Not on Pi.

A project log for ISS HDEV image availability

The "High Definition Earth Viewing" experiment onboard the ISS runs a few cameras, but not always. Let's find out when video is available!

ChristophChristoph 10/19/2016 at 20:224 Comments

This is the current script, which outputs the expected difference between the current stream image and a reference:

#!/usr/bin/env python

# GST_DEBUG=3,python:5,gnl*:5 python worst novideo.png

from __future__ import print_function

import sys

import gi

from gi.repository import GObject as gobject, Gst as gst
from livestreamer import Livestreamer, StreamError, PluginError, NoPluginError

import cv2
import numpy

def exit(msg):
    print(msg, file=sys.stderr)

class Player(object):
    def __init__(self):
        self.fd = None
        self.mainloop = gobject.MainLoop()

        # This creates a playbin pipeline and using the appsrc source
        # we can feed it our stream data
        self.pipeline = gst.parse_launch('uridecodebin uri=appsrc:// name=decoder \
            decoder. ! videorate ! video/x-raw,framerate=1/1 ! tee name=t \
              t. ! queue ! videoconvert ! video/x-raw,format=RGB ! appsink name=appsink \
            decoder. ! queue ! audioconvert ! fakesink')
        if self.pipeline is None:
            exit("couldn't build pipeline")
        decoder = self.pipeline.get_by_name('decoder')
        if decoder is None:
            exit("couldn't get decoder")
        decoder.connect("source-setup", self.on_source_setup)
        vsink = self.pipeline.get_by_name('appsink')
        if vsink is None:
            exit("couldn't get sink")
        vsink.set_property("emit-signals", True)
        vsink.set_property("max-buffers", 1)
        vsink.connect("new-sample", self.on_new_sample)

        # Creates a bus and set callbacks to receive errors
        self.bus = self.pipeline.get_bus()
        self.bus.connect("message::eos", self.on_eos)
        self.bus.connect("message::error", self.on_error)

    def on_new_sample(self, sink):
        sample = sink.emit("pull-sample")
        buf = sample.get_buffer()
        caps = sample.get_caps()
        height = caps.get_structure(0).get_value('height')
        width = caps.get_structure(0).get_value('width')
        (result, mapinfo) =
        if result == True:
            arr = numpy.ndarray(
                buffer=buf.extract_dup(0, buf.get_size()),
            resized_refimage = cv2.resize(refArray, (width, height))
            diff = cv2.norm(arr, resized_refimage, cv2.NORM_L2)
        s = "diff = " + str(diff)
        return gst.FlowReturn.OK

    def exit(self, msg):

    def stop(self):
        # Stop playback and exit mainloop

        # Close the stream
        if self.fd:

    def play(self, stream):
        # Attempt to open the stream
            self.fd =
        except StreamError as err:
            self.exit("Failed to open stream: {0}".format(err))

        # Start playback

    def on_source_setup(self, element, source):
        # When this callback is called the appsrc expects
        # us to feed it more data
        print("source setup")
        source.connect("need-data", self.on_source_need_data)
    def on_pad_added(self, element, pad):
        string = pad.query_caps(None).to_string()
        if string.startswith('video/'):
        #type = pad.get_caps()[0].get_name()
        #if type.startswith("video"):

    def on_source_need_data(self, source, length):
        # Attempt to read data from the stream
            data =
        except IOError as err:
            self.exit("Failed to read data from stream: {0}".format(err))

        # If data is empty it's the end of stream
        if not data:

        # Convert the Python bytes into a GStreamer Buffer
        # and then push it to the appsrc
        buf = gst.Buffer.new_wrapped(data)
        source.emit("push-buffer", buf)
        #print("sent " + str(length) + " bytes")

    def on_eos(self, bus, msg):
        # Stop playback on end of stream

    def on_error(self, bus, msg):
        # Print error message and exit on error
        error = msg.parse_error()[1]

def main():
    if len(sys.argv) < 4:
        exit("Usage: {0} <url> <quality> <reference png image path>".format(sys.argv[0]))

    # Initialize and check GStreamer version
    gi.require_version("Gst", "1.0")

    # Collect arguments
    url = sys.argv[1]
    quality = sys.argv[2]
    refImage = sys.argv[3]
    global refArray
    image = cv2.imread(refImage)
    refArray = cv2.cvtColor(image, cv2.COLOR_BGR2RGB)
#    refArray = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
    refArray = cv2.blur(refArray, (3,3))

    # Create the Livestreamer session
    livestreamer = Livestreamer()

    # Enable logging

    # Attempt to fetch streams
        streams = livestreamer.streams(url)
    except NoPluginError:
        exit("Livestreamer is unable to handle the URL '{0}'".format(url))
    except PluginError as err:
        exit("Plugin error: {0}".format(err))

    if not streams:
        exit("No streams found on URL '{0}'".format(url))

    # Look for specified stream
    if quality not in streams:
        exit("Unable to find '{0}' stream on URL '{1}'".format(quality, url))

    # We found the stream
    stream = streams[quality]

    # Create the player and start playback
    player = Player()

    # Blocks until playback is done

if __name__ == "__main__":
Unfortunately, it doesn't really work on the Pi. Probably relevant warnings and errors:

from livestreamer:

[plugin.ustreamtv][warning] python-librtmp is not installed, but is needed to access the desktop streams
from gstreamer:
0:00:17.111833668  9423 0xb0641520 ERROR            vaapidecode ../../../gst/vaapi/gstvaapidecode.c:1025:gst_vaapidecode_ensure_allowed_caps: failed to retrieve VA display
0:00:17.130139346  9423 0xb0641520 WARN            uridecodebin gsturidecodebin.c:939:unknown_type_cb:<decoder> warning: No decoder available for type 'video/x-h264, stream-format=(string)byte-stream, alignment=(string)nal, width=(int)426, height=(int)240, framerate=(fraction)30/1, parsed=(boolean)true, pixel-aspect-ratio=(fraction)1/1, level=(string)2.1, profile=(string)main'.

python-librtmp couldn't be found so I couldn't install it. The error about a "VA display" seems to be related to h264 decoding (as per google), so fixing h264 might solve that. I just don't know how.

The reference image I use is in the project files.


Arsenijs wrote 10/21/2016 at 00:47 point

sudo apt-get install librtmp-dev
sudo pip install python-librtmp

Found by "pip search rtmp"

  Are you sure? yes | no

Christoph wrote 10/21/2016 at 21:22 point

Thank you, python-librtmp is now installed (needed libffi-dev as well) I don't know how you did that, but now the script stopped working on my laptop as well. With a different error, though.

  Are you sure? yes | no

Arsenijs wrote 10/21/2016 at 23:20 point

Oh, right, needed libffi for paramiko.
Wait, you installed stuff on your laptop and it stopped working? Or are you hinting at my well-hidden supernatural powers in electronics? ;-)

  Are you sure? yes | no

Christoph wrote 10/22/2016 at 20:32 point

I installed librtmp-dev on the Pi and it stopped working on both the Pi and the laptop. So your supernatural powers are definitely involved. Maybe some other change affected both systems, though.

  Are you sure? yes | no