summaryrefslogtreecommitdiff
path: root/gtk2_ardour/video_timeline.cc
diff options
context:
space:
mode:
Diffstat (limited to 'gtk2_ardour/video_timeline.cc')
-rw-r--r--gtk2_ardour/video_timeline.cc118
1 files changed, 59 insertions, 59 deletions
diff --git a/gtk2_ardour/video_timeline.cc b/gtk2_ardour/video_timeline.cc
index 83d85f84fc..e336d59534 100644
--- a/gtk2_ardour/video_timeline.cc
+++ b/gtk2_ardour/video_timeline.cc
@@ -62,7 +62,7 @@ VideoTimeLine::VideoTimeLine (PublicEditor *ed, ArdourCanvas::Container *vbg, in
video_filename = "";
local_file = true;
video_file_fps = 25.0;
- flush_frames = false;
+ flush_samples = false;
vmonitor=0;
reopen_vmonitor=false;
find_xjadeo();
@@ -119,7 +119,7 @@ VideoTimeLine::close_session ()
sessionsave.disconnect();
close_video_monitor();
- remove_frames();
+ remove_samples();
video_filename = "";
video_duration = 0;
GuiUpdate("set-xjadeo-sensitive-off");
@@ -231,24 +231,24 @@ VideoTimeLine::get_state ()
}
void
-VideoTimeLine::remove_frames ()
+VideoTimeLine::remove_samples ()
{
- for (VideoFrames::iterator i = video_frames.begin(); i != video_frames.end(); ++i ) {
- VideoImageFrame *frame = (*i);
- delete frame;
+ for (VideoSamples::iterator i = video_frames.begin(); i != video_frames.end(); ++i ) {
+ VideoImageFrame *sample = (*i);
+ delete sample;
(*i) = 0;
}
video_frames.clear();
}
VideoImageFrame *
-VideoTimeLine::get_video_frame (framepos_t vfn, int cut, int rightend)
+VideoTimeLine::get_video_frame (samplepos_t vfn, int cut, int rightend)
{
if (vfn==0) cut=0;
- for (VideoFrames::iterator i = video_frames.begin(); i != video_frames.end(); ++i) {
- VideoImageFrame *frame = (*i);
- if (abs(frame->get_video_frame_number()-vfn)<=cut
- && frame->get_rightend() == rightend) { return frame; }
+ for (VideoSamples::iterator i = video_frames.begin(); i != video_frames.end(); ++i) {
+ VideoImageFrame *sample = (*i);
+ if (abs(sample->get_video_frame_number()-vfn)<=cut
+ && sample->get_rightend() == rightend) { return sample; }
}
return 0;
}
@@ -267,9 +267,9 @@ VideoTimeLine::get_apv()
}
if (_session->config.get_videotimeline_pullup()) {
- apv = _session->frame_rate();
+ apv = _session->sample_rate();
} else {
- apv = _session->nominal_frame_rate();
+ apv = _session->nominal_sample_rate();
}
if (_session->config.get_use_video_file_fps()) {
apv /= video_file_fps;
@@ -291,30 +291,30 @@ VideoTimeLine::update_video_timeline()
}
const double samples_per_pixel = editor->get_current_zoom();
- const framepos_t leftmost_sample = editor->leftmost_sample();
+ const samplepos_t leftmost_sample = editor->leftmost_sample();
/* Outline:
- * 1) calculate how many frames there should be in current zoom (plus 1 page on each side)
- * 2) calculate first frame and distance between video-frames (according to zoom)
- * 3) destroy/add frames
- * 4) reposition existing frames
- * 5) assign framenumber to frames -> request/decode video.
+ * 1) calculate how many samples there should be in current zoom (plus 1 page on each side)
+ * 2) calculate first sample and distance between video-frames (according to zoom)
+ * 3) destroy/add samples
+ * 4) reposition existing samples
+ * 5) assign framenumber to samples -> request/decode video.
*/
/* video-file and session properties */
double display_vframe_width; /* unit: pixels ; width of one thumbnail in the timeline */
float apv; /* audio samples per video frame; */
- framepos_t leftmost_video_frame; /* unit: video-frame number ; temporary var -> vtl_start */
+ samplepos_t leftmost_video_frame; /* unit: video-frame number ; temporary var -> vtl_start */
/* variables needed to render videotimeline -- what needs to computed first */
- framepos_t vtl_start; /* unit: audio-samples ; first displayed video-frame */
- framepos_t vtl_dist; /* unit: audio-samples ; distance between displayed video-frames */
- unsigned int visible_video_frames; /* number of frames that fit on current canvas */
+ samplepos_t vtl_start; /* unit: audio-samples ; first displayed video-frame */
+ samplepos_t vtl_dist; /* unit: audio-samples ; distance between displayed video-frames */
+ unsigned int visible_video_frames; /* number of samples that fit on current canvas */
if (_session->config.get_videotimeline_pullup()) {
- apv = _session->frame_rate();
+ apv = _session->sample_rate();
} else {
- apv = _session->nominal_frame_rate();
+ apv = _session->nominal_sample_rate();
}
if (_session->config.get_use_video_file_fps()) {
apv /= video_file_fps;
@@ -338,51 +338,51 @@ VideoTimeLine::update_video_timeline()
leftmost_video_frame = floor (floor((long double)(leftmost_sample - video_start_offset - video_offset ) / vtl_dist) * vtl_dist / apv);
vtl_start = rint (video_offset + video_start_offset + leftmost_video_frame * apv);
- visible_video_frames = 2 + ceil((double)editor->current_page_samples() / vtl_dist); /* +2 left+right partial frames */
+ visible_video_frames = 2 + ceil((double)editor->current_page_samples() / vtl_dist); /* +2 left+right partial samples */
/* expand timeline (cache next/prev page images) */
vtl_start -= visible_video_frames * vtl_dist;
visible_video_frames *=3;
- /* don't request frames that are too far to the right */
+ /* don't request samples that are too far to the right */
if (vtl_start < video_offset) {
visible_video_frames = std::max((double)0.0, (double)visible_video_frames + ceil((double)(vtl_start - video_offset)/vtl_dist));
vtl_start = video_offset;
}
/* apply video-file constraints
- * (first frame in video is at video_start_offset) */
+ * (first sample in video is at video_start_offset) */
if (vtl_start > video_start_offset + video_duration + video_offset ) {
visible_video_frames = 0;
}
/* trim end.
- * end = position on timeline (video-offset) minus video-file's first frame position
+ * end = position on timeline (video-offset) minus video-file's first sample position
* TODO optimize: compute rather than iterate */
while (visible_video_frames > 0 && vtl_start + (visible_video_frames-1) * vtl_dist >= video_start_offset + video_duration + video_offset) {
--visible_video_frames;
}
- if (flush_frames) {
- remove_frames();
- flush_frames=false;
+ if (flush_samples) {
+ remove_samples();
+ flush_samples=false;
}
while (video_frames.size() < visible_video_frames) {
- VideoImageFrame *frame;
- frame = new VideoImageFrame(*editor, *videotl_group, display_vframe_width, bar_height, video_server_url, translated_filename());
- frame->ImgChanged.connect (*this, invalidator (*this), boost::bind (&PublicEditor::queue_visual_videotimeline_update, editor), gui_context());
- video_frames.push_back(frame);
+ VideoImageFrame *sample;
+ sample = new VideoImageFrame(*editor, *videotl_group, display_vframe_width, bar_height, video_server_url, translated_filename());
+ sample->ImgChanged.connect (*this, invalidator (*this), boost::bind (&PublicEditor::queue_visual_videotimeline_update, editor), gui_context());
+ video_frames.push_back(sample);
}
- VideoFrames outdated_video_frames;
+ VideoSamples outdated_video_frames;
std::list<int> remaining;
outdated_video_frames = video_frames;
#if 1
- /* when zoomed out, ignore shifts by +-1 frame
+ /* when zoomed out, ignore shifts by +-1 sample
* which can occur due to rounding errors when
- * scrolling to a new leftmost-audio frame.
+ * scrolling to a new leftmost-audio sample.
*/
int cut =1;
if (vtl_dist/apv < 3.0) cut =0;
@@ -391,40 +391,40 @@ VideoTimeLine::update_video_timeline()
#endif
for (unsigned int vfcount=0; vfcount < visible_video_frames; ++vfcount){
- framepos_t vfpos = vtl_start + vfcount * vtl_dist; /* unit: audio-frames */
- framepos_t vframeno = rint ( (vfpos - video_offset) / apv); /* unit: video-frames */
- vfpos = (vframeno * apv ) + video_offset; /* audio-frame corresponding to /rounded/ video-frame */
+ samplepos_t vfpos = vtl_start + vfcount * vtl_dist; /* unit: audio-samples */
+ samplepos_t vframeno = rint ( (vfpos - video_offset) / apv); /* unit: video-frames */
+ vfpos = (vframeno * apv ) + video_offset; /* audio-sample corresponding to /rounded/ video-frame */
int rightend = -1; /* unit: pixels */
if (vfpos + vtl_dist > video_start_offset + video_duration + video_offset) {
rightend = display_vframe_width * (video_start_offset + video_duration + video_offset - vfpos) / vtl_dist;
//printf("lf(e): %lu\n", vframeno); // XXX
}
- VideoImageFrame * frame = get_video_frame(vframeno, cut, rightend);
- if (frame) {
- frame->set_position(vfpos);
- outdated_video_frames.remove(frame);
+ VideoImageFrame * sample = get_video_frame(vframeno, cut, rightend);
+ if (sample) {
+ sample->set_position(vfpos);
+ outdated_video_frames.remove(sample);
} else {
remaining.push_back(vfcount);
}
}
- for (VideoFrames::iterator i = outdated_video_frames.begin(); i != outdated_video_frames.end(); ++i ) {
- VideoImageFrame *frame = (*i);
+ for (VideoSamples::iterator i = outdated_video_frames.begin(); i != outdated_video_frames.end(); ++i ) {
+ VideoImageFrame *sample = (*i);
if (remaining.empty()) {
- frame->set_position(-2 * vtl_dist + leftmost_sample); /* move off screen */
+ sample->set_position(-2 * vtl_dist + leftmost_sample); /* move off screen */
} else {
int vfcount=remaining.front();
remaining.pop_front();
- framepos_t vfpos = vtl_start + vfcount * vtl_dist; /* unit: audio-frames */
- framepos_t vframeno = rint ((vfpos - video_offset) / apv); /* unit: video-frames */
+ samplepos_t vfpos = vtl_start + vfcount * vtl_dist; /* unit: audio-samples */
+ samplepos_t vframeno = rint ((vfpos - video_offset) / apv); /* unit: video-frames */
int rightend = -1; /* unit: pixels */
if (vfpos + vtl_dist > video_start_offset + video_duration + video_offset) {
rightend = display_vframe_width * (video_start_offset + video_duration + video_offset - vfpos) / vtl_dist;
//printf("lf(n): %lu\n", vframeno); // XXX
}
- frame->set_position(vfpos);
- frame->set_videoframe(vframeno, rightend);
+ sample->set_position(vfpos);
+ sample->set_videoframe(vframeno, rightend);
}
}
}
@@ -463,8 +463,8 @@ VideoTimeLine::video_file_info (std::string filename, bool local)
GuiUpdate("video-unavailable");
return false;
}
- video_duration = _duration * _session->nominal_frame_rate() / video_file_fps;
- video_start_offset = _start_offset * _session->nominal_frame_rate();
+ video_duration = _duration * _session->nominal_sample_rate() / video_file_fps;
+ video_start_offset = _start_offset * _session->nominal_sample_rate();
if (auto_set_session_fps && video_file_fps != _session->timecode_frames_per_second()) {
switch ((int)floorf(video_file_fps*1000.0)) {
@@ -609,9 +609,9 @@ VideoTimeLine::gui_update(std::string const & t) {
editor->toggle_xjadeo_viewoption(2, 1);
} else if (t == "xjadeo-window-osd-timecode-off") {
editor->toggle_xjadeo_viewoption(2, 0);
- } else if (t == "xjadeo-window-osd-frame-on") {
+ } else if (t == "xjadeo-window-osd-sample-on") {
editor->toggle_xjadeo_viewoption(3, 1);
- } else if (t == "xjadeo-window-osd-frame-off") {
+ } else if (t == "xjadeo-window-osd-sample-off") {
editor->toggle_xjadeo_viewoption(3, 0);
} else if (t == "xjadeo-window-osd-box-on") {
editor->toggle_xjadeo_viewoption(4, 1);
@@ -650,7 +650,7 @@ VideoTimeLine::vmon_update () {
void
VideoTimeLine::flush_local_cache () {
- flush_frames = true;
+ flush_samples = true;
vmon_update();
}
@@ -915,7 +915,7 @@ VideoTimeLine::terminated_video_monitor () {
}
void
-VideoTimeLine::manual_seek_video_monitor (framepos_t pos)
+VideoTimeLine::manual_seek_video_monitor (samplepos_t pos)
{
if (!vmonitor) { return; }
if (!vmonitor->is_started()) { return; }