Eines de l'usuari

Eines del lloc


tutorials:processing:25_video

Processing-Color-Based-Object-Tracking/ObjectTrackingAndDrawing.pde

objColorDetect.pde
import processing.video.*;
 
// Variable for capture device
Capture video;
 
// A variable for the color we are searching for.
color trackColor; 
 
int closestX = 0;
int closestY = 0;
float pclosestX = 0;
float pclosestY = 0;
 
void setup() {
  size(1280, 480);
  background(255);
  video = new Capture(this, 640, 480);
  video.start();
  // Start off tracking for red
  trackColor = color(255, 0, 0);
}
 
void captureEvent(Capture video) {
  // Read image from the camera
  video.read();
}
 
void draw() {
  video.loadPixels();
  image(video, 0, 0);
 
  // Before we begin searching, the "world record" for closest color is set to a high number that is easy for the first pixel to beat.
  float worldRecord = 500; 
 
  // XY coordinate of closest color
 
  float sumX = 0;
  float sumY = 0;
  int SameColorCount=0;
 
  // Begin loop to walk through every pixel
  for (int x = 0; x < video.width; x ++ ) {
    for (int y = 0; y < video.height; y ++ ) {
      int loc = x + y*video.width;
      // What is current color
      color currentColor = video.pixels[loc];
      float r1 = red(currentColor);
      float g1 = green(currentColor);
      float b1 = blue(currentColor);
      float r2 = red(trackColor);
      float g2 = green(trackColor);
      float b2 = blue(trackColor);
 
      // Using euclidean distance to compare colors
      float d = dist(r1, g1, b1, r2, g2, b2); // We are using the dist( ) function to compare the current color with the color we are tracking.
 
      // If current color is more similar to tracked color than
      // closest color, save current location and current difference
      if (d < 20) {
        worldRecord = d;
        sumX = x+sumX;
        sumY = y+sumY;
        SameColorCount++;
      }
    }
  }
  closestX = int(sumX / SameColorCount); 
  closestY = int(sumY / SameColorCount);
 
  // We only consider the color found if its color distance is less than 10. 
  // This threshold of 10 is arbitrary and you can adjust this number depending on how accurate you require the tracking to be.
  if (worldRecord < 20) { 
    // Draw a circle at the tracked pixel
    fill(trackColor);
    strokeWeight(3.0);
    stroke(0);
    ellipse(closestX, closestY, 16, 16);
    if(abs(float(closestY)-pclosestY)<40 && abs(float(closestX)-pclosestX)<40)
    {
    line(float(closestX)+640, float(closestY),pclosestX+640, pclosestY);
    }
  }
 
  pclosestX = closestX;
  pclosestY = closestY;
}
 
void mousePressed() {
  // Save color where the mouse is clicked in trackColor variable
  int loc = mouseX + mouseY*video.width;
  trackColor = video.pixels[loc];
  background(255);
}

OpenCV-Tracking-Processing-UI

Stop Motion Movie Maker

import processing.video.*;
      int beginh = hour();
      int beginm = minute();
	  int begins = second();
      int vorigeh = beginh;
      int vorigem = beginm;
	  int voriges = begins;
	  int beginpunt = beginh*3600+beginm*60+begins;
	  int timediff = 0;
      boolean gemaakt = false;
      String minuut;
      String seconde;
      String filenaam = "";
Capture cam;

void setup() {
  size(640, 480);
  String[] cameras = Capture.list();
      //int[] rood;
      
  if (cameras.length == 0) {
    println("There are no cameras available for capture.");
    exit();
  } else {
    println("Available cameras:");
    for (int i = 0; i < cameras.length; i++) {
      println(i + ": " + cameras[i]);
    }
    
    // The camera can be initialized directly using an 
    // element from the array returned by list():
    cam = new Capture(this, cameras[0]);
    cam.start();     
  }      
}

void draw() {
  if (cam.available() == true) {
    cam.read();//Get the image
  }
  //get the time
  int h  = hour();
  int m = minute();
  int s = second();
  //if we have moved a minute further than the image taken
  //we set this boolean 'gemaakt' to false, so we know it's
  //allowed to take a new picture
  if (vorigeh != h || vorigem !=m || voriges !=s){
   gemaakt = false; 
  }
  int step = 5;//time step size in seconds
  timediff = (h*3600 + m*60 + s) - beginpunt;
  image(cam, 0, 0);
  cam.loadPixels();
  
  cam.updatePixels(); 
 //println(filenaam);
 //% is the modulo operator
  if (!boolean(timediff % step) && gemaakt == false){
    //define the filename
    int som = 10000*h + 100*m + s;
    filenaam = ("images/" + som + ".jpg");
    println("time is: "+ h + ":" + m + ":" + s);
    saveFrame(filenaam);
    println(filenaam);
    gemaakt = true;
    vorigeh = h;
    vorigem = m;
    voriges = s;
  }
  
  
  
}

Motion Tracking in Processing

IpCapture

Podem utilitzar un mobil amb ipcam instal·lada i tractar les imatges. Podem reciclar així mòbils.

/* IPCapture sample sketch for Java and Android   *
 *                                                *
 * === IMPORTANT ===                              *
 * In Android mode, Remember to enable            *
 * INTERNET permissions in the                    *
 * Android -> IPwebcam                            */

import ipcapture.*;

IPCapture cam;

void setup() {
  size(820,600);
  //cam = new IPCapture(this, "http://192.168.39.71:8080/axis-cgi/mjpg/video.cgi?resolution=320x240", "", "");
  cam = new IPCapture(this, "http://192.168.39.71:8080/videofeed", "", "");
  cam.start();
  
  // this works as well:
  
  // cam = new IPCapture(this);
  // cam.start("url", "username", "password");
  
  // It is possible to change the MJPEG stream by calling stop()
  // on a running camera, and then start() it with the new
  // url, username and password.
}

void draw() {
  if (cam.isAvailable()) {
    cam.read();
    image(cam,0,0);
  }
}

void keyPressed() {
  if (key == ' ') {
    if (cam.isAlive()) cam.stop();
    else cam.start();
  }
}

 public void mouseMoved()
    {
        loadPixels();
        println("Red: "+red(pixels[mouseX + mouseY * width]));
        println("Green: "+green(pixels[mouseX + mouseY * width]));
        println("Blue: "+blue(pixels[mouseX + mouseY * width]));
        println();
    }
    
/* per detectar colors i la seva posicio:


// Learning Processing
// Daniel Shiffman
// http://www.learningprocessing.com

// Example 16-11: Simple color tracking

import processing.video.*;

// Variable for capture device
Capture video;
PImage room;

// A variable for the color we are searching for.
//color trackColor;
int node1x = 40;
int node1y = 40;
int radius = 40;

void setup() {
size(320,240);
video = new Capture(this,width,height,15);
// Start off tracking for red
// trackColor = color(255,0,0);
video.start();
smooth();
room = loadImage("room.png");

}

void draw() {
// Capture and display the video
if (video.available()) {
video.read();
}
video.loadPixels();
room.loadPixels();
image(video,0,0);

// Before we begin searching, the "world record" for closest color is set to a high number that is easy for the first pixel to beat.
float worldRecord = 1000;

// XY coordinate of closest color
int closestX = 0;
int closestY = 0;

// Begin loop to walk through every pixel
for (int x = 0; x < video.width; x ++ ) {
for (int y = 0; y < video.height; y ++ ) {
int loc = x + y*video.width;
// What is current color
color currentColor = video.pixels[loc];
float r1 = red(currentColor);
float g1 = green(currentColor);
float b1 = blue(currentColor);
float r2 = red(room.pixels[loc]);
float g2 = green(room.pixels[loc]);
float b2 = blue(room.pixels[loc]);

// Using euclidean distance to compare colors
float d = dist(r1,g1,b1,r2,g2,b2); // We are using the dist( ) function to compare the current color with the color we are tracking.

// If current color is more similar to tracked color than
// closest color, save current location and current difference
if (d < worldRecord) {
worldRecord = d;
closestX = x;
closestY = y;
}
}
}

// We only consider the color found if its color distance is less than 10.
// This threshold of 10 is arbitrary and you can adjust this number depending on how accurate you require the tracking to be.
if (worldRecord < 5) {
// Draw a circle at the tracked pixel
fill(0);
strokeWeight(4.0);
stroke(0);
ellipse(closestX,closestY,16,16);
}
}

//void mousePressed() {
// // Save color where the mouse is clicked in trackColor variable
// int loc = mouseX + mouseY*video.width;
// trackColor = video.pixels[loc];
//}

*/

PORBLEMES NO DETECTA

He tingut diversos problemes, no detecta la webcam o cam del portitil i ho solucionat amb, supose que es el nom que te un espai enblanc al final….però amb aquesta linea es soluciona.

   cam = new Capture(this, "pipeline:autovideosrc");
   

La solució la he trobada a https://stackoverflow.com/questions/66065614/processing-cant-access-built-in-webcam, per Neil C Smith on the Processing forum https://discourse.processing.org/t/processing-cant-find-the-camera-video-libraries-dont-work/25128/12,

Tot ha sigut per posar en marxa els programes d'exemple d'aquest processing-manikantapabba.zip, he trobat molts exemples de programes els posen al twitter https://twitter.com/processing_org, on sezillament posen l'enllaç al github.

pipeline:autovideosrc es un sistema automatic de detecció de la webcam que te el gstreamer, i el problema es que la web camp suporta els dos formats yuyv i mjpeg

I have run into this problem a few different times. I think it may have to do with cameras which support both yuyv and mjpeg. Anyway, I finally found this solution. Instead of Capture.list(), use a custom gstreamer pipeline. It defaults to yuyv, which is annoying if you wanted hd video at more than 2 fps. After much trial and error, here are some pipelines that worked for me. Generic yuyv:

cam = new Capture(this, "pipeline:autovideosrc");

yuyv with some parameters:

  cam = new Capture(this, 640, 480, "pipeline: ksvideosrc device-index=0 ! video/x-raw,width=640,height=480");

mjpeg:

 cam = new Capture(this, 1920,1080, "pipeline: ksvideosrc device-index=0! image/jpeg, width=1920, height=1080, framerate=30/1 ! jpegdec ! videoconvert");

Note that you should specify your resolution twice so that Processing and gstreamer are on the same page. It's not necessary in all cases, but you might save yourself a headache.

De fet amb gstreamer podem enviar video per internet, i tant javascrpts com altres programes en realitat l'utilitzen per obrir i enviar un pipeline(xorro de bits),

 gst-launch-1.0 v4l2src device=/dev/video0 ! videoconvert ! ximagesink
 

que es el mateix que:

gst-launch-1.0 autovideosrc ! videoconvert ! ximagesink

Resum:

Video and audio source elements

Here’s a list of alternative video source elements

  autovideosrc - automatically detects and chooses a video source
  ksvideosrc - video capture from cameras on Windows
  v4l2src - obtains video stream from a Video 4 Linux 2 device, such as a webcam
  ximagesrc - video stream is produced from screenshots

Here’s a list of alternative audio source elements

  autoaudiosrc - automatically detects and chooses an audio source
  alsasrc - captures audio stream from a specific device using alsa
  pulsesrc - captures audio stream from the default mic, based on system settings

An important point to note is that all these sources are live sources. GStreamer defines live sources as sources that discard data when paused, and produce data at a fixed rate thus providing a clock to publish this rate.

Stream live WebM video to browser using Node.js and GStreamer

var express = require('express')
var http = require('http')
var net = require('net');
var child = require('child_process');

var app = express();
var httpServer = http.createServer(app);

app.get('/', function (req, res) {
    var date = new Date();

    res.writeHead(200, {
        'Date': date.toUTCString(),
        'Connection': 'close',
        'Cache-Control': 'private',
        'Content-Type': 'video/webm',
        'Server': 'CustomStreamer/0.0.1',
    });

    var tcpServer = net.createServer(function (socket) {
        socket.on('data', function (data) {
            res.write(data);
        });
        socket.on('close', function (had_error) {
            res.end();
        });
    });

    tcpServer.maxConnections = 1;

    tcpServer.listen(function () {
        var cmd = 'gst-launch-1.0';
        var args =
            ['autovideosrc',
                '!', 'video/x-raw,framerate=30/1,width=320,height=240',
                '!', 'videoconvert',
                '!', 'queue', 'leaky=1',
                '!', 'vp8enc',
                '!', 'queue', 'leaky=1',
                '!', 'm.', 'autoaudiosrc',
                '!', 'queue', 'leaky=1',
                '!', 'audioconvert',
                '!', 'vorbisenc',
                '!', 'queue', 'leaky=1',
                '!', 'm.', 'webmmux', 'name=m', 'streamable=true',
                '!', 'queue', 'leaky=1',
                '!', 'tcpclientsink', 'host=localhost',
                'port=' + tcpServer.address().port];

        var gstMuxer = child.spawn(cmd, args);

        gstMuxer.stderr.on('data', onSpawnError);
        gstMuxer.on('exit', onSpawnExit);

        res.connection.on('close', function () {
            gstMuxer.kill();
        });
    });
});

httpServer.listen(9001);

function onSpawnError(data) {
    console.log(data.toString());
}

function onSpawnExit(code) {
    if (code != null) {
        console.log('GStreamer error, exit code ' + code);
    }
}

process.on('uncaughtException', function (err) {
    console.log(err);
});

Execute

Assuming you have saved the code to a file called script.js, run Node.js thus

node script.js

Now, you can play the WebM stream in Chrome by accessing http://localhost:9001/. Debug

You may want to trace all system calls, especially if you change the args to GStreamer and get a cryptic message such as

execvp(): No such file or directory

You can execute Node.js with strace

strace -fF -o strace.log node script.js

Video and audio source elements

Here’s a list of alternative video source elements

  autovideosrc - automatically detects and chooses a video source
  ksvideosrc - video capture from cameras on Windows
  v4l2src - obtains video stream from a Video 4 Linux 2 device, such as a webcam
  ximagesrc - video stream is produced from screenshots

Here’s a list of alternative audio source elements

  autoaudiosrc - automatically detects and chooses an audio source
  alsasrc - captures audio stream from a specific device using alsa
  pulsesrc - captures audio stream from the default mic, based on system settings

An important point to note is that all these sources are live sources. GStreamer defines live sources as sources that discard data when paused, and produce data at a fixed rate thus providing a clock to publish this rate. Limitations

GStreamer pipelines cannot simultaneously capture streams using sources that access the same device, hence tcpServer.maxConnections has been restricted to 1. Even assuming that simultaneous access to the device was possible, the code above is CPU intensive since audio and video encoding is done once per viewer.

Mes fàcil, amb un script i una senzilla web, html

index.html

<!DOCTYPE html>
<html>
        <head>
                <meta http-equiv="content-type" content="text/html; charset=utf-8">
                <title>gst-stream</title>
        </head>
        <body>
                <video width=320 height=240 autoplay>
                        <source src="http://localhost:8080">
                </video>
        </body>
</html>

streaming-linux.sh

#!/bin/sh
# for Linux

gst-launch-1.0 \
        v4l2src device=/dev/video0 \
        ! videoconvert ! videoscale ! video/x-raw,width=320,height=240 \
        ! clockoverlay shaded-background=true font-desc="Sans 38" \
        ! theoraenc ! oggmux ! tcpserversink host=127.0.0.1 port=8080

streaming-windows.sh

#!/bin/sh
# for Windows

gst-launch-1.0 \
        ksvideosrc device-index=0 \
        ! videoconvert ! videoscale ! video/x-raw,width=320,height=240 \
        ! clockoverlay shaded-background=true font-desc="Sans 38" \
        ! theoraenc ! oggmux ! tcpserversink host=127.0.0.1 port=8080
        

Altres llibreries i programes: http://webcamxtra.sourceforge.net/

tutorials/processing/25_video.txt · Darrera modificació: 2021/06/13 23:04 per crevert