import java.awt.image.BufferedImage; String name = "imageProvider" AbstractImageProvider camera0=null; if(DeviceManager.getSpecificDevice(AbstractImageProvider.class, name)==null){ //BowlerStudio.speak("Connecting to camera 0."); camera0 = new OpenCVImageProvider(0);// grab the first camera DeviceManager.addConnection(camera0,name); /** //Static File Image provider FileChooser fileChooser = new FileChooser(); fileChooser.setTitle("Open Image File"); Platform.runLater({ File f = fileChooser.showOpenDialog(BowlerStudio.getPrimaryStage()); if(f!=null){ camera0 = new StaticFileProvider(f); DeviceManager.addConnection(camera0,name); }else return null; }) */ /** URL Image Provider Evample * * TextInputDialog dialog = new TextInputDialog("http://neuronrobotics.com/img/AndrewHarrington/2014-09-15-86.jpg"); dialog.setTitle("URL Image Source"); dialog.setHeaderText("This url will be loaded each capture."); dialog.setContentText("URL "); // Traditional way to get the response value. Optional result = dialog.showAndWait(); if (result.isPresent()){ URLImageProvider p; try { p = new URLImageProvider(new URL(result.get())); String name = "url"; addConnection(p,name); } catch (MalformedURLException e) { // TODO Auto-generated catch block e.printStackTrace(); } } */ }else{ // camera0 = (AbstractImageProvider)DeviceManager.getSpecificDevice(AbstractImageProvider.class, name); } while(DeviceManager.getSpecificDevice(AbstractImageProvider.class, name)==null){ ThreadUtil.wait(100) } // Starting with the connected camera from BowlerStudio println(camera0) File haarCascadeFile = ScriptingEngine.fileFromGit("https://github.com/madhephaestus/DefaultHaarCascade.git", "lbpcascade_frontalface.xml") //Create the default detector using "lbpcascade_frontalface.xml" IObjectDetector detector = new HaarDetector(haarCascadeFile) // Create the input and display images. The display is where the detector writes its detections overlay on the input image BufferedImage inputImage = AbstractImageProvider.newBufferImage(640,480) BufferedImage displayImage = AbstractImageProvider.newBufferImage(640,480) ArrayList inFiles = new ArrayList(); String dir = ScriptingEngine.getWorkspace().getAbsolutePath()+"/imageCache/" File dirFile= new File(dir) if(!dirFile.exists()){ dirFile.mkdir(); } // Loop checking the camera for faces int i=0; while(!Thread.interrupted() && i<200){ camera0.getLatestImage(inputImage,displayImage) // capture image List data = detector.getObjects(inputImage, displayImage) if(data.size()>0){ println("Got: "+data.size()+ " x location = "+data.get(0).getX()+ " y location "+data.get(0).getY()+ " size = "+data.get(0).getSize() ) int position = (data.get(0).getX()/320.0)*255; //dyio.setValue(0,position) } /*save images try { File outputfile = new File(dir+"/saved_"+i+".jpg"); ImageIO.write(displayImage, "jpg", outputfile); inFiles.add(outputfile) } catch (IOException e) { e.printStackTrace() }*/ i++ } /*saved images to video //println "Compiling to video" File outFile = new File(dir+"/Video.mov") ImagesToVideo imageToMovie = new ImagesToVideo(); imageToMovie.run(640, 480, 30, inFiles,outFile ); println "Video exported to "+outFile.getAbsolutePath() */