本文整理了Java中org.openimaj.video.Video.getCurrentFrame()
方法的一些代码示例,展示了Video.getCurrentFrame()
的具体用法。这些代码示例主要来源于Github
/Stackoverflow
/Maven
等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。Video.getCurrentFrame()
方法的具体详情如下:
包路径:org.openimaj.video.Video
类名称:Video
方法名:getCurrentFrame
[英]Get the current frame
[中]获取当前帧
代码示例来源:origin: openimaj/openimaj
/**
* @return the current frame
*/
public MBFImage getCurrentFrame() {
if (display != null)
return display.getVideo().getCurrentFrame();
return null;
}
}
代码示例来源:origin: org.openimaj/sandbox
@Override
public List<Identifiable> getIdentifiers()
{
final List<Identifiable> r =
new ArrayList<Identifiable>();
r.add(new IdentifiableVideoFrame(video.getCurrentFrame(),
new HrsMinSecFrameTimecode(video.getCurrentFrameIndex(),
video.getFPS())));
return r;
}
}, sp);
代码示例来源:origin: openimaj/openimaj
@Override
public void actionPerformed(ActionEvent evt) {
if (evt.getActionCommand() == SNAPSHOT_COMMAND) {
// this gets called if the button is pressed
// pause the video
display.setMode(VideoDisplay.Mode.PAUSE);
// display a file save dialog
final JFileChooser saveFile = new JFileChooser();
if (saveFile.showSaveDialog(frame) == JFileChooser.APPROVE_OPTION) {
// if a file was selected write the image
try {
// we're just going to add .jpg to the filename and
// save it... should be much more intelligent here
// in reality...
File outfile = saveFile.getSelectedFile();
outfile = new File(outfile.getParentFile(), outfile.getName() + ".jpg");
ImageUtilities.write(video.getCurrentFrame(), outfile);
} catch (final IOException ioe) {
// display an error if the file couldn't be saved
JOptionPane.showMessageDialog(null, "Unable to save file.");
}
}
// start the video playing again
display.setMode(VideoDisplay.Mode.PLAY);
}
}
});
代码示例来源:origin: openimaj/openimaj
@Override
public List<Identifiable> getIdentifiers()
{
final List<Identifiable> r =
new ArrayList<Identifiable>();
r.add(new IdentifiableVideoFrame(video.getCurrentFrame(),
new HrsMinSecFrameTimecode(video.getCurrentFrameIndex(),
video.getFPS())));
return r;
}
}, sp);
代码示例来源:origin: openimaj/openimaj
public void selectObject(Rectangle r) throws Exception{
this.tld.selectObject(this.source.getCurrentFrame().flatten(),r);
this.disp.setMode(Mode.PLAY);
}
代码示例来源:origin: org.openimaj/sandbox
FeatureTable trackFeatures(Video<FImage> video, int nFeatures, boolean replace) {
final TrackingContext tc = new TrackingContext();
final FeatureList fl = new FeatureList(nFeatures);
final FeatureTable ft = new FeatureTable(nFeatures);
final KLTTracker tracker = new KLTTracker(tc, fl);
tc.setSequentialMode(true);
tc.setWriteInternalImages(false);
tc.setAffineConsistencyCheck(-1);
FImage prev = video.getCurrentFrame();
tracker.selectGoodFeatures(prev);
ft.storeFeatureList(fl, 0);
while (video.hasNextFrame()) {
final FImage next = video.getNextFrame();
tracker.trackFeatures(prev, next);
if (replace)
tracker.replaceLostFeatures(next);
prev = next;
ft.storeFeatureList(fl, video.getCurrentFrameIndex());
}
return ft;
}
代码示例来源:origin: org.openimaj/sandbox
public SFMOrtho(Video<FImage> video, int nFeatures) {
texture = video.getCurrentFrame().clone();
FeatureTable features = trackFeatures(video, nFeatures, false);
features = filterNonTracked(features);
pts = features.features.get(0);
final List<Triangle> tris = DelaunayTriangulator.triangulate(pts);
triangleDefs = new int[tris.size()][3];
for (int i = 0; i < tris.size(); i++) {
final Triangle t = tris.get(i);
for (int j = 0; j < 3; j++) {
triangleDefs[i][j] = pts.indexOf(t.vertices[j]);
}
}
final Matrix w = buildMeasurementMatrix(features);
factorise(w);
applyMetricConstraint();
alignWithFrame(0);
}
代码示例来源:origin: openimaj/openimaj
FeatureTable trackFeatures(Video<FImage> video, int nFeatures, boolean replace) {
final TrackingContext tc = new TrackingContext();
final FeatureList fl = new FeatureList(nFeatures);
final FeatureTable ft = new FeatureTable(nFeatures);
final KLTTracker tracker = new KLTTracker(tc, fl);
tc.setSequentialMode(true);
tc.setWriteInternalImages(false);
tc.setAffineConsistencyCheck(-1);
FImage prev = video.getCurrentFrame();
tracker.selectGoodFeatures(prev);
ft.storeFeatureList(fl, 0);
while (video.hasNextFrame()) {
final FImage next = video.getNextFrame();
tracker.trackFeatures(prev, next);
if (replace)
tracker.replaceLostFeatures(next);
prev = next;
ft.storeFeatureList(fl, video.getCurrentFrameIndex());
}
return ft;
}
代码示例来源:origin: openimaj/openimaj
public SFMOrtho(Video<FImage> video, int nFeatures) {
texture = video.getCurrentFrame().clone();
FeatureTable features = trackFeatures(video, nFeatures, false);
features = filterNonTracked(features);
pts = features.features.get(0);
final List<Triangle> tris = DelaunayTriangulator.triangulate(pts);
triangleDefs = new int[tris.size()][3];
for (int i = 0; i < tris.size(); i++) {
final Triangle t = tris.get(i);
for (int j = 0; j < 3; j++) {
triangleDefs[i][j] = pts.indexOf(t.vertices[j]);
}
}
final Matrix w = buildMeasurementMatrix(features);
factorise(w);
applyMetricConstraint();
alignWithFrame(0);
}
代码示例来源:origin: openimaj/openimaj
I image = video.getCurrentFrame();
I keyframeImage = image.clone();
代码示例来源:origin: org.openimaj/core-video
this.currentFrame = this.video.getCurrentFrame();
this.currentFrameTimestamp = this.video.getTimeStamp();
代码示例来源:origin: openimaj/openimaj
this.currentFrame = this.video.getCurrentFrame();
this.currentFrameTimestamp = this.video.getTimeStamp();
代码示例来源:origin: openimaj/openimaj
@Override
public void afterUpdate(VideoDisplay<MBFImage> display) {
if (ransacReader && matcher != null && !videoFrame.isPaused()) {
final MBFImage capImg = videoFrame.getVideo().getCurrentFrame();
final LocalFeatureList<Keypoint> kpl = engine.findFeatures(Transforms.calculateIntensityNTSC(capImg));
if (matcher.findMatches(kpl)) {
try {
final Polygon poly = modelImage.getBounds()
.transform(((MatrixTransformProvider) matcher.getModel()).getTransform().inverse())
.asPolygon();
this.vwv.targetArea = poly;
} catch (final RuntimeException e) {
}
} else {
this.vwv.targetArea = null;
}
}
}
代码示例来源:origin: org.openimaj/sandbox
@Override
public void afterUpdate(VideoDisplay<MBFImage> display) {
if (ransacReader && matcher != null && !videoFrame.isPaused()) {
final MBFImage capImg = videoFrame.getVideo().getCurrentFrame();
final LocalFeatureList<Keypoint> kpl = engine.findFeatures(Transforms.calculateIntensityNTSC(capImg));
if (matcher.findMatches(kpl)) {
try {
final Polygon poly = modelImage.getBounds()
.transform(((MatrixTransformProvider) matcher.getModel()).getTransform().inverse())
.asPolygon();
this.vwv.targetArea = poly;
} catch (final RuntimeException e) {
}
} else {
this.vwv.targetArea = null;
}
}
}
代码示例来源:origin: openimaj/openimaj
@Override
public synchronized void afterUpdate(final VideoDisplay<MBFImage> display) {
if (this.matcher != null && !this.videoFrame.isPaused()) {
final MBFImage capImg = this.videoFrame.getVideo().getCurrentFrame();
final LocalFeatureList<Keypoint> kpl = this.engine.findFeatures(capImg);
final MBFImageRenderer renderer = capImg.createRenderer();
renderer.drawPoints(kpl, RGBColour.MAGENTA, 3);
MBFImage matches;
if (this.matcher.findMatches(kpl)) {
try {
// Shape sh =
// modelImage.getBounds().transform(((MatrixTransformProvider)
// matcher.getModel()).getTransform().inverse());
// renderer.drawShape(sh, 3, RGBColour.BLUE);
final Matrix boundsToPoly = ((MatrixTransformProvider) this.matcher.getModel()).getTransform()
.inverse();
this.renderMode.render(renderer, boundsToPoly, this.modelImage.getBounds());
} catch (final RuntimeException e) {
}
matches = MatchingUtilities
.drawMatches(this.modelImage, capImg, this.matcher.getMatches(), RGBColour.RED);
} else {
matches = MatchingUtilities
.drawMatches(this.modelImage, capImg, this.matcher.getMatches(), RGBColour.RED);
}
this.matchPanel.setPreferredSize(this.matchPanel.getSize());
this.matchFrame.setImage(ImageUtilities.createBufferedImageForDisplay(matches));
}
}
代码示例来源:origin: org.openimaj/demos
@Override
public synchronized void afterUpdate(final VideoDisplay<MBFImage> display) {
if (this.matcher != null && !this.videoFrame.isPaused()) {
final MBFImage capImg = this.videoFrame.getVideo().getCurrentFrame();
final LocalFeatureList<Keypoint> kpl = this.engine.findFeatures(capImg);
final MBFImageRenderer renderer = capImg.createRenderer();
renderer.drawPoints(kpl, RGBColour.MAGENTA, 3);
MBFImage matches;
if (this.matcher.findMatches(kpl)) {
try {
// Shape sh =
// modelImage.getBounds().transform(((MatrixTransformProvider)
// matcher.getModel()).getTransform().inverse());
// renderer.drawShape(sh, 3, RGBColour.BLUE);
final Matrix boundsToPoly = ((MatrixTransformProvider) this.matcher.getModel()).getTransform()
.inverse();
this.renderMode.render(renderer, boundsToPoly, this.modelImage.getBounds());
} catch (final RuntimeException e) {
}
matches = MatchingUtilities
.drawMatches(this.modelImage, capImg, this.matcher.getMatches(), RGBColour.RED);
} else {
matches = MatchingUtilities
.drawMatches(this.modelImage, capImg, this.matcher.getMatches(), RGBColour.RED);
}
this.matchPanel.setPreferredSize(this.matchPanel.getSize());
this.matchFrame.setImage(ImageUtilities.createBufferedImageForDisplay(matches));
}
}
代码示例来源:origin: openimaj/openimaj
@Override
public void afterUpdate(VideoDisplay<MBFImage> display) {
if (matcher != null && !videoFrame.isPaused()) {
final MBFImage capImg = videoFrame.getVideo().getCurrentFrame();
final LocalFeatureList<InterestPointKeypoint<InterestPointData>> kpl = engine.findFeatures(Transforms
.calculateIntensityNTSC(capImg));
final MBFImageRenderer renderer = capImg.createRenderer();
renderer.drawPoints(kpl, RGBColour.MAGENTA, 3);
MBFImage matches;
if (matcher.findMatches(kpl)) {
try {
final Shape sh = modelImage.getBounds().transform(
((MatrixTransformProvider) matcher.getModel()).getTransform().inverse());
renderer.drawShape(sh, 3, RGBColour.BLUE);
} catch (final RuntimeException e) {
}
matches = MatchingUtilities.drawMatches(modelImage, capImg, matcher.getMatches(), RGBColour.RED);
} else {
matches = MatchingUtilities.drawMatches(modelImage, capImg, null, RGBColour.RED);
}
if (matchFrame == null) {
matchFrame = DisplayUtilities.display(matches, "matches");
matchFrame.addKeyListener(this);
final Point pt = matchFrame.getLocation();
matchFrame.setLocation(pt.x, pt.y + matchFrame.getHeight());
} else {
DisplayUtilities.display(matches, matchFrame);
}
}
}
代码示例来源:origin: openimaj/openimaj
MBFImage image = video.getCurrentFrame();
代码示例来源:origin: org.openimaj/sandbox
MBFImage image = video.getCurrentFrame();
代码示例来源:origin: org.openimaj/sandbox
@Override
public void afterUpdate(VideoDisplay<MBFImage> display) {
if (matcher != null && !videoFrame.isPaused()) {
final MBFImage capImg = videoFrame.getVideo().getCurrentFrame();
final LocalFeatureList<InterestPointKeypoint<InterestPointData>> kpl = engine.findFeatures(Transforms
.calculateIntensityNTSC(capImg));
final MBFImageRenderer renderer = capImg.createRenderer();
renderer.drawPoints(kpl, RGBColour.MAGENTA, 3);
MBFImage matches;
if (matcher.findMatches(kpl)) {
try {
final Shape sh = modelImage.getBounds().transform(
((MatrixTransformProvider) matcher.getModel()).getTransform().inverse());
renderer.drawShape(sh, 3, RGBColour.BLUE);
} catch (final RuntimeException e) {
}
matches = MatchingUtilities.drawMatches(modelImage, capImg, matcher.getMatches(), RGBColour.RED);
} else {
matches = MatchingUtilities.drawMatches(modelImage, capImg, null, RGBColour.RED);
}
if (matchFrame == null) {
matchFrame = DisplayUtilities.display(matches, "matches");
matchFrame.addKeyListener(this);
final Point pt = matchFrame.getLocation();
matchFrame.setLocation(pt.x, pt.y + matchFrame.getHeight());
} else {
DisplayUtilities.display(matches, matchFrame);
}
}
}
内容来源于网络,如有侵权,请联系作者删除!