本文整理了Java中org.openimaj.video.Video
类的一些代码示例,展示了Video
类的具体用法。这些代码示例主要来源于Github
/Stackoverflow
/Maven
等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。Video
类的具体详情如下:
包路径:org.openimaj.video.Video
类名称:Video
[英]Abstract base class for videos.
[中]视频的抽象基类。
代码示例来源:origin: openimaj/openimaj
/**
* @return the current frame
*/
public MBFImage getCurrentFrame() {
if (display != null)
return display.getVideo().getCurrentFrame();
return null;
}
}
代码示例来源:origin: openimaj/openimaj
/**
* Cache the given time range from the given video.
*
* @param <I> The type of the video frames
* @param video The video to cache
* @param start The start of the video to cache
* @param end The end of the video to cache
* @return A {@link VideoCache}
*/
public static <I extends Image<?,I>> VideoCache<I> cacheVideo( Video<I> video,
VideoTimecode start, VideoTimecode end )
{
VideoCache<I> vc = new VideoCache<I>( video.getWidth(),
video.getHeight(), video.getFPS() );
video.setCurrentFrameIndex( start.getFrameNumber() );
while( video.hasNextFrame() &&
video.getCurrentFrameIndex() < end.getFrameNumber() )
vc.addFrame( video.getNextFrame().clone() );
return vc;
}
}
代码示例来源:origin: openimaj/openimaj
/**
* Set the position of the play head to the given percentage. If the video
* is a live video this method will have no effect.
*
* @param pc
* The percentage to set the play head to.
*/
public void setPosition(final double pc) {
if (pc > 100 || pc < 0)
throw new IllegalArgumentException("Percentage must be less than " +
"or equals to 100 and greater than or equal 0. Given " + pc);
// If it's a live video we cannot do anything
if (this.video.countFrames() == -1)
return;
// We have to seek to a millisecond position, so we find out the length
// of the video in ms and then multiply by the percentage
final double nMillis = this.video.countFrames() * this.video.getFPS();
final long msPos = (long) (nMillis * pc / 100d);
System.out.println("msPOs = " + msPos + " (" + pc + "%)");
this.seek(msPos);
}
代码示例来源:origin: openimaj/openimaj
/**
* Returns the position of the play head in this video as a percentage of
* the length of the video. IF the video is a live video, this method will
* always return 0;
*
* @return The percentage through the video.
*/
public double getPosition() {
final long nFrames = this.video.countFrames();
if (nFrames == -1)
return 0;
return this.video.getCurrentFrameIndex() * 100d / nFrames;
}
代码示例来源:origin: openimaj/openimaj
/**
* Set the current frame index (i.e. skips to a certain frame). If your
* video subclass can implement this in a cleverer way, then override this
* method, otherwise this method will simply grab frames until it gets to
* the given frame index. This method is naive and may take some time as
* each frame will be decoded by the video decoder.
*
* @param newFrame
* the new index
*/
public synchronized void setCurrentFrameIndex(long newFrame)
{
// We're already at the frame?
if (this.currentFrame == newFrame)
return;
// If we're ahread of where we want to be
if (this.currentFrame > newFrame)
{
this.reset();
}
// Grab frames until we read the new frame counter
// (or until the getNextFrame() method returns null)
while (this.currentFrame < newFrame && getNextFrame() != null)
;
}
代码示例来源:origin: openimaj/openimaj
public TLDMain(Video<MBFImage> imageSource){
source = imageSource;
tld = new TLD(imageSource.getWidth(),imageSource.getHeight());
}
void doWork() throws FileNotFoundException {
代码示例来源:origin: openimaj/openimaj
keyframeTime.getFrameNumber(), video.getFPS() );
HrsMinSecFrameTimecode endTime = new HrsMinSecFrameTimecode(
keyframeTime.getFrameNumber(), video.getFPS() );
video.setCurrentFrameIndex( startTime.getFrameNumber() );
I image = video.getCurrentFrame();
I keyframeImage = image.clone();
image = video.getNextFrame();
endTime.setFrameNumber( video.getCurrentFrameIndex() );
int nFramesToJumpBack = (int)video.getFPS()*2; // We'll go 2 seconds at a time
video.getFPS() );
代码示例来源:origin: openimaj/openimaj
/**
* Cache the whole of the given video.
* @param <I> Type of {@link Image}
*
* @param video The video to cache
* @return A {@link VideoCache}
*/
public static <I extends Image<?,I>> VideoCache<I> cacheVideo( Video<I> video )
{
VideoCache<I> vc = new VideoCache<I>( video.getWidth(),
video.getHeight(), video.getFPS() );
video.reset();
while( video.hasNextFrame() )
vc.addFrame( video.getNextFrame().clone() );
return vc;
}
代码示例来源:origin: org.openimaj/core-video
this.currentFrame = this.video.getCurrentFrame();
if (this.video.countFrames() != -1 && this.currentFrame != null) {
final long t = this.timeKeeper.getTime().getTimecodeInMilliseconds();
nextFrame = this.video.getNextFrame();
nextFrameTimestamp = this.video.getTimeStamp();
nextFrame = this.video.getNextFrame();
nextFrameTimestamp = this.video.getTimeStamp();
if (this.currentFrame == null && (this.timeKeeper instanceof VideoDisplay.BasicVideoTimeKeeper))
((VideoDisplay.BasicVideoTimeKeeper) this.timeKeeper).setTimeOffset(-nextFrameTimestamp);
roughSleepTime = (long) (1000 / this.video.getFPS()) - tolerance;
代码示例来源:origin: org.openimaj/sandbox
FeatureTable trackFeatures(Video<FImage> video, int nFeatures, boolean replace) {
final TrackingContext tc = new TrackingContext();
final FeatureList fl = new FeatureList(nFeatures);
final FeatureTable ft = new FeatureTable(nFeatures);
final KLTTracker tracker = new KLTTracker(tc, fl);
tc.setSequentialMode(true);
tc.setWriteInternalImages(false);
tc.setAffineConsistencyCheck(-1);
FImage prev = video.getCurrentFrame();
tracker.selectGoodFeatures(prev);
ft.storeFeatureList(fl, 0);
while (video.hasNextFrame()) {
final FImage next = video.getNextFrame();
tracker.trackFeatures(prev, next);
if (replace)
tracker.replaceLostFeatures(next);
prev = next;
ft.storeFeatureList(fl, video.getCurrentFrameIndex());
}
return ft;
}
代码示例来源:origin: openimaj/openimaj
/**
* Process the video and provide a URI which all relations will be linked to.
*
* @param v The video to process
* @param uri The URI of the video
*/
public void processVideo( final Video<MBFImage> v, final String uri )
{
this.videoURI = uri;
for (final MBFImage frame : v)
this.processFrame( frame,
new HrsMinSecFrameTimecode( v.getCurrentFrameIndex(), v.getFPS() ) );
}
代码示例来源:origin: org.openimaj/sandbox
@Override
public List<Identifiable> getIdentifiers()
{
final List<Identifiable> r =
new ArrayList<Identifiable>();
r.add(new IdentifiableVideoFrame(video.getCurrentFrame(),
new HrsMinSecFrameTimecode(video.getCurrentFrameIndex(),
video.getFPS())));
return r;
}
}, sp);
代码示例来源:origin: openimaj/openimaj
@Override
public double getFPS()
{
return video.getFPS();
}
代码示例来源:origin: openimaj/openimaj
nFrames = video.countFrames();
video.setCurrentFrameIndex( keyframeTime.getFrameNumber() );
MBFImage image = video.getCurrentFrame();
代码示例来源:origin: openimaj/openimaj
this.currentFrame = this.video.getCurrentFrame();
this.currentFrameTimestamp = this.video.getTimeStamp();
this.audioPlayer.reset();
this.video.reset();
this.currentFrameTimestamp = 0;
break;
this.video.close();
break;
代码示例来源:origin: openimaj/openimaj
@Override
public ImageCollectionEntry<T> next() {
final T image = video.getNextFrame();
final ImageCollectionEntry<T> entry = new ImageCollectionEntry<T>();
entry.meta = new HashMap<String, String>();
entry.meta.put("timestamp", "" + this.frameCount / this.video.getFPS());
entry.accepted = selection.acceptEntry(image);
entry.image = image;
this.frameCount++;
// hack to stop the iterator at the end until hasNext works properly
if (image == null)
frameCount = -1;
return entry;
}
代码示例来源:origin: openimaj/openimaj
@Override
public T next() {
return video.getNextFrame();
}
代码示例来源:origin: openimaj/openimaj
@Override
public long countFrames()
{
return video.countFrames();
}
代码示例来源:origin: openimaj/openimaj
/**
*
* @param video
*/
protected VideoObjectVisualisation( final Video<MBFImage> video )
{
super( video );
this.frameHeight = video.getHeight();
}
代码示例来源:origin: openimaj/openimaj
@Override
public boolean hasNextFrame()
{
return video.hasNextFrame();
}
内容来源于网络,如有侵权,请联系作者删除!