Quantcast
Channel: Library Questions - Processing 2.x and 3.x Forum
Viewing all articles
Browse latest Browse all 2896

Toxiclibs- get boundary edges only

$
0
0

Hello,

this should be a simple task but I appear to be missing it. I am trying to get just the boundary edges of a WETriangleMesh Plane, so the edges and vertices which make up the 2d boundingbox of a plane. I would then like to displace the vertices which make up the edge, and fit them onto a blob-contour from openCV.

My plan was to get those edges, and turn only the bounding edges into particles, then create attractor particles for each point in the blob contour. However, I can't seem to work out the best way to get only the bounding edges.

From the AABB of the boundingbox, I could get the closest point, but that seems to only work for the corner points. I also tried get closest line.

Any advice?

` import toxi.geom.*; import toxi.geom.mesh.subdiv.*; import toxi.geom.mesh.*; import toxi.physics.*; import toxi.physics.behaviors.*; import toxi.physics.constraints.*; import toxi.processing.*; import toxi.math.*; import toxi.volume.*;

import processing.opengl.*; import gab.opencv.*; import processing.video.*; import java.awt.Rectangle; import java.util.Iterator;

import gab.opencv.*; import processing.video.*; import java.awt.Rectangle;

VerletPhysics physics, blobphysics; AttractionBehavior contract, contractor, attractor;

VerletPhysics phys; AttractionBehavior mouseAttractor;

Vec3D mousePos, blob;

WETriangleMesh plane; ToxiclibsSupport gfx; GravityBehavior gravity; VolumetricSpaceArray volume;

Capture video; OpenCV opencv; PImage src, colorFilteredImage; ArrayList contours;

int rangeLow = 20; int rangeHigh = 35;

boolean test = false; boolean drawblob = false;

int w= 50; int h= 30;

int w1 = 90; int h1 = 70;

int scaled= 10;

void setup() { size(900, 900, P3D);

physics = new VerletPhysics();
physics.setDrag(0.05f);


gfx = new ToxiclibsSupport(this);

blobphysics = new VerletPhysics();

//openCV

video = new Capture(this, 640, 480); video.start();

opencv = new OpenCV(this, video.width, video.height); contours = new ArrayList(); //

//sun is added to phys, it should be pulled down and attracted to physics //box is added to physics and sun is added to phys

//create mesh 1

plane = new WETriangleMesh(); for (int z = 0; z < w; z++){ for (int x = 0; x< h; x++) { plane.addFace(new Vec3D(x-1, 0, z-1), new Vec3D(x, 0, z-1), new Vec3D(x, 0, z)); plane.addFace(new Vec3D(x-1, 0, z-1), new Vec3D(x, 0, z), new Vec3D(x-1, 0, z)); } }

//too many sub divisions will make it slow //box.subdivide(); //box.subdivide(); plane.subdivide(); plane.center(null); plane.scale(scaled); println("box", plane.getVertices());

for (WingedEdge e : plane.edges.values()) { physics.addParticle(new VerletParticle((WEVertex)e.a)); }

for (Vec3D v : plane.getVertices()) { physics.addParticle(new VerletParticle(v)); }

attractor = new AttractionBehavior(new Vec3D(0, 0, 0), 50, 250, 0.01f); // //create particles only around the bounding box of the plane, or alternatively only create certain particles randomly etc //later there will be an attractor to grab to attract them to the contours //for each point in the boundingbox edge, for each point in the contour, move the edge point to the contour position /*for ( ) { Vec3D v = new Vec3D( phys.addParticle(new VerletParticle(v)); } */

}

////////////////////////////////////--------------/////-------------------/////////////////////////////////////////

void draw() { //openCV

//open CV // Read last captured frame if (video.available()) { video.read(); }

// <2> Load the new frame of our movie in to OpenCV opencv.loadImage(video);

// Tell OpenCV to use color information opencv.useColor(); src = opencv.getSnapshot();

// <3> Tell OpenCV to work in HSV color space. opencv.useColor(HSB);

// <4> Copy the Hue channel of our image into // the gray channel, which we process. opencv.setGray(opencv.getH().clone());

// <5> Filter the image based on the range of // hue values that match the object we want to track. opencv.inRange(rangeLow, rangeHigh);

// <6> Get the processed image for reference. colorFilteredImage = opencv.getSnapshot();

/////////////////////////////////////////// // We could process our image here! // See ImageFiltering.pde ///////////////////////////////////////////

// <7> Find contours in our range image. // Passing 'true' sorts them by descending area. contours = opencv.findContours(true, true);

// <8> Display background images image(src, 0, 0); image(colorFilteredImage, src.width, 0);

// <9> Check to make sure we've found any contours if (contours.size() > 0) { // <9> Get the first contour, which will be the largest one Contour biggestContour = contours.get(0);

// <10> Find the bounding box of the largest contour,
//      and hence our object.
Rectangle r = biggestContour.getBoundingBox();
//draw contour
if (drawblob == true) {
stroke(0, 255, 0);
biggestContour.draw();
stroke(255, 0, 0);
}


  for (PVector point : biggestContour.getPoints()) {

  vertex(point.x, point.y, point.z);
   Vec3D blob = new Vec3D(point.x, point.y, point.z);
   blobphysics.addParticle(new VerletParticle(blob));
   attractor.getAttractor().set(blob.x, blob.y, blob.z);
   //phys.addBehavior(new AttractionBehavior(blob, 20, -1.2f, 0.01f));
    //contractor = new AttractionBehavior(blob, 250, 0.9f);
   // phys.addBehavior(contractor);

}

 //phys.update();
  // update physics
  blobphysics.update();




// <11> Draw the bounding box of our object
noFill();
strokeWeight(2);
stroke(255, 0, 0);
rect(r.x, r.y, r.width, r.height);

// <12> Draw a dot in the middle of the bounding box, on the object.
noStroke();
fill(255, 0, 0);
ellipse(r.x + r.width/2, r.y + r.height/2, 30, 30);

}

// update mesh normals

// moving them to the current position
  // of their associated particles
  /*for (int i=0, num=phys.particles.size(); i<num; i++) {
    plane.getVertexForID(i).set(phys.particles.get(i));
  }
  */
  plane.computeFaceNormals();
  plane.computeVertexNormals();

//background(255, 255, 255);

noFill();
strokeWeight(2);
stroke(255, 0, 0);

//draw mesh
lights();
translate(width/2, height/2, 0);
rotateX((height / 2 - mouseY) * 0.01f);
rotateY((width / 2 - mouseX) * 0.01f);

directionalLight(255, 255, 255, -200, 1000, 500);
specular(255);
shininess(90);

fill(255, 25, 25);
stroke(190, 190, 190);
gfx.mesh(plane, true, 0);

}

void mousePressed() {

color c = get(mouseX, mouseY); println("r: " + red(c) + " g: " + green(c) + " b: " + blue(c));

int hue = int(map(hue(c), 0, 255, 0, 180)); println("hue to detect: " + hue);

rangeLow = hue - 5; rangeHigh = hue + 5;

}

void keyPressed() { if (key == 'r') { //initPhysics(); }

if (key == 'p') {
      //inflate=new AttractionBehavior(new Vec3D(), 150, -0.3f, 0.001f);
      // physics.addBehavior(inflate);
}

if (key == 'd')  {
    drawblob = true;
    contractor = new AttractionBehavior(blob, 250, 0.9f);
    phys.addBehavior(contractor);

} if (key == 's') { drawblob = false; } }

'


Viewing all articles
Browse latest Browse all 2896

Trending Articles