本文整理汇总了Java中org.openimaj.video.capture.VideoCapture类的典型用法代码示例。如果您正苦于以下问题:Java VideoCapture类的具体用法?Java VideoCapture怎么用?Java VideoCapture使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
VideoCapture类属于org.openimaj.video.capture包,在下文中一共展示了VideoCapture类的20个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Java代码示例。
示例1: itemStateChanged
import org.openimaj.video.capture.VideoCapture; //导入依赖的package包/类
@Override
public void itemStateChanged(ItemEvent event) {
if (event.getStateChange() == ItemEvent.SELECTED) {
final String item = (String) event.getItem();
final Device d = VideoCapture.getVideoDevices().get(sources.getSelectedIndex());
if (d.getNameStr().equals(item) && !currentDevice.equals(d)) {
try {
currentDevice = d;
display.setMode(Mode.STOP);
display.getVideo().close();
display.changeVideo(new VideoCapture(width, height, currentDevice));
display.setMode(Mode.PLAY);
} catch (final VideoCaptureException e) {
e.printStackTrace();
}
}
}
}
开发者ID:jonhare,项目名称:ecs-summer-school-vision-lecture,代码行数:19,代码来源:VideoCaptureComponent.java
示例2: VideoMustache
import org.openimaj.video.capture.VideoCapture; //导入依赖的package包/类
/**
* Default constructor
*
* @throws IOException
*/
public VideoMustache() throws IOException {
final VideoDisplay<MBFImage> vd = VideoDisplay.createVideoDisplay(new VideoCapture(320, 240));
vd.addVideoListener(new VideoDisplayListener<MBFImage>()
{
@Override
public void beforeUpdate(MBFImage frame)
{
frame.internalAssign(m.addMustaches(frame));
}
@Override
public void afterUpdate(VideoDisplay<MBFImage> display)
{
}
});
}
开发者ID:openimaj,项目名称:openimaj,代码行数:23,代码来源:Mustache.java
示例3: useWebcam
import org.openimaj.video.capture.VideoCapture; //导入依赖的package包/类
/**
* Set the video source to be the webcam
* @throws IOException
*/
public void useWebcam() throws IOException
{
// Stop any existing video
this.stopVideo();
// Setup a new video from the VideoCapture class
this.video = new VideoCapture( 320, 240 );
// Reset the video displayer to use the capture class
this.videoDisplay = new VideoDisplay<MBFImage>( this.video, this.ic );
// Make sure the listeners are sorted
this.addListeners();
// Start the new video playback thread
this.videoThread = new Thread(this.videoDisplay);
this.videoThread.start();
}
开发者ID:openimaj,项目名称:openimaj,代码行数:23,代码来源:VideoProcessingDemo.java
示例4: VideoCaptureFramesExample
import org.openimaj.video.capture.VideoCapture; //导入依赖的package包/类
/**
* @throws VideoCaptureException
*/
public VideoCaptureFramesExample() throws VideoCaptureException {
this.fpsDelayLabel = new JLabel(VideoCaptureFramesExample.DELAY_LBL + this.fpsDelayMillis);
// open the capture device and create a window to display in 320, 240
this.vc = new VideoCapture(1024, 768);
final FrameDemoImageComponent ic = new FrameDemoImageComponent();
ic.setAllowZoom( false );
ic.setAllowPanning( false );
ic.setTransparencyGrid( false );
ic.setShowPixelColours( false );
ic.setShowXYPosition( true );
this.buildGui(ic).setVisible(true);
this.display = new VideoDisplay<MBFImage>( this.vc, null, ic );
this.display.addVideoListener(ic);
this.displayThread = new Thread(this.display);
this.displayThread.start();
}
开发者ID:openimaj,项目名称:openimaj,代码行数:27,代码来源:VideoCaptureFramesExample.java
示例5: main
import org.openimaj.video.capture.VideoCapture; //导入依赖的package包/类
/**
* Run the example
*
* @param args
*/
public static void main(String[] args) {
try {
// Open the video. Here we're using the default webcam, but this
// could
// be any type of video, such as a XuggleVideo which reads from a
// file.
final Video<MBFImage> video = new VideoCapture(320, 240);
// create and display the UI
createUI(video);
} catch (final IOException e) {
// an error occured
JOptionPane.showMessageDialog(null, "Unable to open video.");
}
}
开发者ID:openimaj,项目名称:openimaj,代码行数:21,代码来源:VideoSnapshotExample.java
示例6: VideoRecordingExample
import org.openimaj.video.capture.VideoCapture; //导入依赖的package包/类
/**
* Default constructor
*
* @throws IOException
*/
public VideoRecordingExample() throws IOException {
// open webcam
video = new VideoCapture(320, 240);
// open display
display = VideoDisplay.createVideoDisplay(video);
// open a writer
writer = new XuggleVideoWriter("video.flv", video.getWidth(), video.getHeight(), 30);
// set this class to listen to video display events
display.addVideoListener(this);
// set this class to listen to keyboard events
SwingUtilities.getRoot(display.getScreen()).addKeyListener(this);
}
开发者ID:openimaj,项目名称:openimaj,代码行数:22,代码来源:VideoRecordingExample.java
示例7: setupVideo
import org.openimaj.video.capture.VideoCapture; //导入依赖的package包/类
private void setupVideo() {
if (comboBox.getSelectedItem().equals("None"))
return;
final Device dev = (Device) comboBox.getSelectedItem();
if (display != null) {
((VideoCapture) display.getVideo()).stopCapture();
panel.removeAll();
}
System.out.println(dev);
try {
display = VideoDisplay.createVideoDisplay(new VideoCapture(capWidth, capHeight, capRate, dev), panel);
} catch (final VideoCaptureException e) {
throw new RuntimeException(e);
}
revalidate();
repaint();
}
开发者ID:openimaj,项目名称:openimaj,代码行数:23,代码来源:CaptureComponent.java
示例8: Main
import org.openimaj.video.capture.VideoCapture; //导入依赖的package包/类
public Main() throws Exception {
capture = new VideoCapture(640, 480);
videoFrame = VideoDisplay.createVideoDisplay(capture);
videoFrame.addVideoListener(this);
SwingUtilities.getRoot(videoFrame.getScreen()).addKeyListener(this);
// videoFrame.getScreen().setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
final TrackingContext tc = new TrackingContext();
fl1 = new FeatureList(nFeatures);
tracker = new KLTTracker(tc, fl1);
tc.setSequentialMode(true);
tc.setWriteInternalImages(false);
tc.setAffineConsistencyCheck(-1);
final Matrix m = new Matrix(3, 3);
this.cam = new CameraIntrinsics(m, 640, 480);
cam.setFocalLengthX(640);
cam.setFocalLengthX(480);
cam.setPrincipalPointX(640 / 2);
cam.setPrincipalPointY(320 / 2);
}
开发者ID:openimaj,项目名称:openimaj,代码行数:23,代码来源:Main.java
示例9: main
import org.openimaj.video.capture.VideoCapture; //导入依赖的package包/类
public static void main(String[] args) throws VideoCaptureException {
if (args.length == 0) {
int i = 0;
System.out.println("Usage: VideoTest device [width [height [rate]]]");
for (final Device d : VideoCapture.getVideoDevices()) {
System.out.println(i + "\t" + d.getNameStr());
i++;
}
return;
}
final Device dev =
VideoCapture.getVideoDevices().get(Integer.parseInt(args[0]));
final int width = args.length > 2 ? Integer.parseInt(args[1]) : 320;
final int height = args.length > 2 ? Integer.parseInt(args[2]) : 240;
final double rate = args.length > 2 ? Double.parseDouble(args[3]) : 30;
final VideoCapture c = new VideoCapture(width, height, rate, dev);
VideoDisplay.createVideoDisplay(c);
}
开发者ID:openimaj,项目名称:openimaj,代码行数:24,代码来源:VideoTest.java
示例10: main
import org.openimaj.video.capture.VideoCapture; //导入依赖的package包/类
public static void main(String[] args) throws VideoCaptureException {
final SWTTextDetector detector = new SWTTextDetector();
detector.getOptions().direction = SWTTextDetector.Direction.LightOnDark;
VideoDisplay.createVideoDisplay(new VideoCapture(640, 480)).addVideoListener(new VideoDisplayAdapter<MBFImage>()
{
@Override
public void beforeUpdate(MBFImage frame) {
if (frame == null)
return;
detector.analyseImage(frame.flatten());
for (final LineCandidate line : detector.getLines()) {
frame.drawShape(line.getRegularBoundingBox(), RGBColour.RED);
for (final WordCandidate wc : line.getWords()) {
frame.drawShape(wc.getRegularBoundingBox(), RGBColour.BLUE);
for (final LetterCandidate lc : wc.getLetters())
frame.drawShape(lc.getRegularBoundingBox(), RGBColour.GREEN);
}
}
}
});
}
开发者ID:openimaj,项目名称:openimaj,代码行数:27,代码来源:SWTVideoTest.java
示例11: VideoKLT
import org.openimaj.video.capture.VideoCapture; //导入依赖的package包/类
public VideoKLT() throws Exception{
capture = new VideoCapture(640, 480);
videoFrame = VideoDisplay.createVideoDisplay(capture);
videoFrame.addVideoListener(this);
SwingUtilities.getRoot(videoFrame.getScreen()).addKeyListener(this);
// videoFrame.getScreen().setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
TrackingContext tc = new TrackingContext();
fl = new FeatureList(nFeatures );
ft = new FeatureTable(nFeatures);
tracker = new KLTTracker(tc, fl);
tc.setSequentialMode(true);
tc.setWriteInternalImages(false);
tc.setAffineConsistencyCheck(-1); /* set this to 2 to turn on affine consistency check */
}
开发者ID:openimaj,项目名称:openimaj,代码行数:17,代码来源:VideoKLT.java
示例12: VideoFacialExpressionRecognition
import org.openimaj.video.capture.VideoCapture; //导入依赖的package包/类
public VideoFacialExpressionRecognition() throws Exception {
capture = new VideoCapture(320, 240);
engine = new CLMFaceTracker();
engine.fpd = 120;
// engine.fcheck = true;
videoFrame = VideoDisplay.createVideoDisplay(capture);
videoFrame.addVideoListener(this);
SwingUtilities.getRoot(videoFrame.getScreen()).addKeyListener(this);
final CLMShapeFeature.Extractor extractor = new
CLMShapeFeature.Extractor();
final FacialFeatureComparator<CLMShapeFeature> comparator = new
FaceFVComparator<CLMShapeFeature, DoubleFV>(
DoubleFVComparison.EUCLIDEAN);
final KNNAnnotator<CLMDetectedFace, String, CLMShapeFeature> knn = KNNAnnotator.create(extractor, comparator, 1,
5f);
recogniser = AnnotatorFaceRecogniser.create(knn);
}
开发者ID:openimaj,项目名称:openimaj,代码行数:22,代码来源:VideoFacialExpressionRecognition.java
示例13: main
import org.openimaj.video.capture.VideoCapture; //导入依赖的package包/类
public static void main( String[] args ) throws IOException
{
VideoCapture vc = new VideoCapture( 1280, 960 );
VideoDisplay<MBFImage> vd = VideoDisplay.createVideoDisplay( vc );
vd.addVideoListener( new VideoDisplayListener<MBFImage>()
{
long lastTime = 0;
@Override
public void afterUpdate( VideoDisplay<MBFImage> display )
{
}
@Override
public void beforeUpdate( MBFImage frame )
{
long thisTime = System.currentTimeMillis();
if( lastTime != 0 )
{
System.out.println( 1000/(System.currentTimeMillis()-lastTime) );
}
lastTime = thisTime;
}
} );
}
开发者ID:openimaj,项目名称:openimaj,代码行数:27,代码来源:SimpleVideoSpeedTest.java
示例14: main
import org.openimaj.video.capture.VideoCapture; //导入依赖的package包/类
/**
* Main method
*
* @param args
* @throws IOException
*/
public static void main(String[] args) throws IOException {
final VideoCapture vc = new VideoCapture(320, 240);
final VideoDisplay<MBFImage> vd = VideoDisplay.createVideoDisplay(vc);
vd.addVideoListener(
new VideoDisplayListener<MBFImage>() {
@Override
public void beforeUpdate(MBFImage frame) {
final FaceDetector<DetectedFace, FImage> fd = new HaarCascadeDetector(40);
final List<DetectedFace> faces = fd.detectFaces(Transforms.calculateIntensity(frame));
for (final DetectedFace face : faces) {
frame.drawShape(face.getBounds(), RGBColour.RED);
}
}
@Override
public void afterUpdate(VideoDisplay<MBFImage> display) {
}
});
}
开发者ID:openimaj,项目名称:openimaj,代码行数:27,代码来源:App.java
示例15: main
import org.openimaj.video.capture.VideoCapture; //导入依赖的package包/类
/**
* Simple test program
*
* @param args
* @throws MalformedURLException
* @throws IOException
*/
public static void main(String[] args) throws MalformedURLException, IOException {
final FastChessboardDetector fcd = new FastChessboardDetector(9, 6);
final VideoDisplay<MBFImage> vd = VideoDisplay.createVideoDisplay(new VideoCapture(640,
480));
vd.setCalculateFPS(true);
vd.addVideoListener(new VideoDisplayAdapter<MBFImage>() {
@Override
public void beforeUpdate(MBFImage frame) {
fcd.analyseImage(frame.flatten());
frame.drawText(fcd.result + "", 100, 100, HersheyFont.FUTURA_LIGHT,
20, RGBColour.RED);
System.out.println(vd.getDisplayFPS());
}
});
}
开发者ID:openimaj,项目名称:openimaj,代码行数:23,代码来源:FastChessboardDetector.java
示例16: main
import org.openimaj.video.capture.VideoCapture; //导入依赖的package包/类
/**
* The main method.
*
* @param args
* @throws IOException
* @throws MalformedURLException
*/
public static void main(String[] args) throws MalformedURLException, IOException {
try {
final MultiPuppeteer puppeteer = new MultiPuppeteer();
VideoDisplay.createVideoDisplay(new VideoCapture(640, 480)).addVideoListener(puppeteer);
} catch (final VideoCaptureException e) {
JOptionPane.showMessageDialog(null, "No video capture devices were found!");
}
}
开发者ID:openimaj,项目名称:openimaj,代码行数:17,代码来源:MultiPuppeteer.java
示例17: main
import org.openimaj.video.capture.VideoCapture; //导入依赖的package包/类
/**
* The main method.
*
* @param args
*/
public static void main(String[] args) {
try {
VideoDisplay.createVideoDisplay(new VideoCapture(640, 480)).addVideoListener(new CLMMultiTrackerDemo());
} catch (final VideoCaptureException e) {
JOptionPane.showMessageDialog(null, "No video capture devices were found!");
}
}
开发者ID:openimaj,项目名称:openimaj,代码行数:13,代码来源:CLMMultiTrackerDemo.java
示例18: stopVideo
import org.openimaj.video.capture.VideoCapture; //导入依赖的package包/类
/**
* Stops the current video.
*/
private void stopVideo()
{
if( this.video instanceof VideoCapture )
((VideoCapture)this.video).stopCapture();
if( this.videoDisplay != null )
this.videoDisplay.setMode( Mode.STOP );
}
开发者ID:openimaj,项目名称:openimaj,代码行数:11,代码来源:VideoProcessingDemo.java
示例19: VideoFeatureExtraction
import org.openimaj.video.capture.VideoCapture; //导入依赖的package包/类
/**
* Default constructor
*
* @param window
* The window to display the demo in
* @throws IOException
*/
public VideoFeatureExtraction(final JComponent window) throws IOException {
this.capture = new VideoCapture(640, 480);
window.setLayout(new GridBagLayout());
final JPanel vidPanel = new JPanel(new GridBagLayout());
vidPanel.setBorder(BorderFactory.createTitledBorder("Live Video"));
this.videoDisplay = VideoDisplay.createVideoDisplay(this.capture, vidPanel);
this.videoDisplay.addVideoListener(this);
GridBagConstraints gbc = new GridBagConstraints();
gbc.anchor = GridBagConstraints.PAGE_START;
window.add(vidPanel, gbc);
this.modelPanel = new JPanel(new GridBagLayout());
this.modelPanel.setBorder(BorderFactory.createTitledBorder("Feature type: " + this.mode.toString()));
gbc = new GridBagConstraints();
gbc.anchor = GridBagConstraints.PAGE_END;
gbc.gridy = 1;
window.add(this.modelPanel, gbc);
this.modelFrame = new ImageComponent(true, false);
this.modelPanel.add(this.modelFrame);
this.histogramImage = new MBFImage(640, 60, ColourSpace.RGB);
this.modelFrame.setImage(ImageUtilities.createBufferedImageForDisplay(this.histogramImage));
((JFrame) SwingUtilities.getRoot(this.videoDisplay.getScreen())).addKeyListener(this);
}
开发者ID:openimaj,项目名称:openimaj,代码行数:35,代码来源:VideoFeatureExtraction.java
示例20: main
import org.openimaj.video.capture.VideoCapture; //导入依赖的package包/类
/**
* Main method
*
* @param args
* ignored
* @throws VideoCaptureException
*/
public static void main(String[] args) throws VideoCaptureException {
final int w = 640;
final int h = 480;
final Line2d axis = new Line2d(w / 2, h / 2, w / 2, h);
VideoDisplay.createVideoDisplay(new VideoFrameProcessor<MBFImage>(new VideoCapture(w, h),
ProcessorUtilities.wrap(new DioramaEffect(axis))));
}
开发者ID:openimaj,项目名称:openimaj,代码行数:16,代码来源:VideoMiniatureFakingDemo.java
注:本文中的org.openimaj.video.capture.VideoCapture类示例整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。 |
请发表评论