OpenShot Library | libopenshot  0.3.3
Timeline.cpp
Go to the documentation of this file.
1 
9 // Copyright (c) 2008-2019 OpenShot Studios, LLC
10 //
11 // SPDX-License-Identifier: LGPL-3.0-or-later
12 
13 #include "Timeline.h"
14 
15 #include "CacheBase.h"
16 #include "CacheDisk.h"
17 #include "CacheMemory.h"
18 #include "CrashHandler.h"
19 #include "FrameMapper.h"
20 #include "Exceptions.h"
21 
22 #include <QDir>
23 #include <QFileInfo>
24 
25 using namespace openshot;
26 
27 // Default Constructor for the timeline (which sets the canvas width and height)
28 Timeline::Timeline(int width, int height, Fraction fps, int sample_rate, int channels, ChannelLayout channel_layout) :
29  is_open(false), auto_map_clips(true), managed_cache(true), path(""),
30  max_concurrent_frames(OPEN_MP_NUM_PROCESSORS), max_time(0.0)
31 {
32  // Create CrashHandler and Attach (incase of errors)
34 
35  // Init viewport size (curve based, because it can be animated)
36  viewport_scale = Keyframe(100.0);
37  viewport_x = Keyframe(0.0);
38  viewport_y = Keyframe(0.0);
39 
40  // Init background color
41  color.red = Keyframe(0.0);
42  color.green = Keyframe(0.0);
43  color.blue = Keyframe(0.0);
44 
45  // Init FileInfo struct (clear all values)
46  info.width = width;
47  info.height = height;
50  info.fps = fps;
51  info.sample_rate = sample_rate;
52  info.channels = channels;
53  info.channel_layout = channel_layout;
55  info.duration = 60 * 30; // 30 minute default duration
56  info.has_audio = true;
57  info.has_video = true;
59  info.display_ratio = openshot::Fraction(width, height);
62  info.acodec = "openshot::timeline";
63  info.vcodec = "openshot::timeline";
64 
65  // Init max image size
67 
68  // Init cache
69  final_cache = new CacheMemory();
70  final_cache->SetMaxBytesFromInfo(max_concurrent_frames * 4, info.width, info.height, info.sample_rate, info.channels);
71 }
72 
73 // Delegating constructor that copies parameters from a provided ReaderInfo
75  info.width, info.height, info.fps, info.sample_rate,
76  info.channels, info.channel_layout) {}
77 
78 // Constructor for the timeline (which loads a JSON structure from a file path, and initializes a timeline)
79 Timeline::Timeline(const std::string& projectPath, bool convert_absolute_paths) :
80  is_open(false), auto_map_clips(true), managed_cache(true), path(projectPath),
81  max_concurrent_frames(OPEN_MP_NUM_PROCESSORS), max_time(0.0) {
82 
83  // Create CrashHandler and Attach (incase of errors)
85 
86  // Init final cache as NULL (will be created after loading json)
87  final_cache = NULL;
88 
89  // Init viewport size (curve based, because it can be animated)
90  viewport_scale = Keyframe(100.0);
91  viewport_x = Keyframe(0.0);
92  viewport_y = Keyframe(0.0);
93 
94  // Init background color
95  color.red = Keyframe(0.0);
96  color.green = Keyframe(0.0);
97  color.blue = Keyframe(0.0);
98 
99  // Check if path exists
100  QFileInfo filePath(QString::fromStdString(path));
101  if (!filePath.exists()) {
102  throw InvalidFile("File could not be opened.", path);
103  }
104 
105  // Check OpenShot Install Path exists
107  QDir openshotPath(QString::fromStdString(s->PATH_OPENSHOT_INSTALL));
108  if (!openshotPath.exists()) {
109  throw InvalidFile("PATH_OPENSHOT_INSTALL could not be found.", s->PATH_OPENSHOT_INSTALL);
110  }
111  QDir openshotTransPath(openshotPath.filePath("transitions"));
112  if (!openshotTransPath.exists()) {
113  throw InvalidFile("PATH_OPENSHOT_INSTALL/transitions could not be found.", openshotTransPath.path().toStdString());
114  }
115 
116  // Determine asset path
117  QString asset_name = filePath.baseName().left(30) + "_assets";
118  QDir asset_folder(filePath.dir().filePath(asset_name));
119  if (!asset_folder.exists()) {
120  // Create directory if needed
121  asset_folder.mkpath(".");
122  }
123 
124  // Load UTF-8 project file into QString
125  QFile projectFile(QString::fromStdString(path));
126  projectFile.open(QFile::ReadOnly);
127  QString projectContents = QString::fromUtf8(projectFile.readAll());
128 
129  // Convert all relative paths into absolute paths (if requested)
130  if (convert_absolute_paths) {
131 
132  // Find all "image" or "path" references in JSON (using regex). Must loop through match results
133  // due to our path matching needs, which are not possible with the QString::replace() function.
134  QRegularExpression allPathsRegex(QStringLiteral("\"(image|path)\":.*?\"(.*?)\""));
135  std::vector<QRegularExpressionMatch> matchedPositions;
136  QRegularExpressionMatchIterator i = allPathsRegex.globalMatch(projectContents);
137  while (i.hasNext()) {
138  QRegularExpressionMatch match = i.next();
139  if (match.hasMatch()) {
140  // Push all match objects into a vector (so we can reverse them later)
141  matchedPositions.push_back(match);
142  }
143  }
144 
145  // Reverse the matches (bottom of file to top, so our replacements don't break our match positions)
146  std::vector<QRegularExpressionMatch>::reverse_iterator itr;
147  for (itr = matchedPositions.rbegin(); itr != matchedPositions.rend(); itr++) {
148  QRegularExpressionMatch match = *itr;
149  QString relativeKey = match.captured(1); // image or path
150  QString relativePath = match.captured(2); // relative file path
151  QString absolutePath = "";
152 
153  // Find absolute path of all path, image (including special replacements of @assets and @transitions)
154  if (relativePath.startsWith("@assets")) {
155  absolutePath = QFileInfo(asset_folder.absoluteFilePath(relativePath.replace("@assets", "."))).canonicalFilePath();
156  } else if (relativePath.startsWith("@transitions")) {
157  absolutePath = QFileInfo(openshotTransPath.absoluteFilePath(relativePath.replace("@transitions", "."))).canonicalFilePath();
158  } else {
159  absolutePath = QFileInfo(filePath.absoluteDir().absoluteFilePath(relativePath)).canonicalFilePath();
160  }
161 
162  // Replace path in JSON content, if an absolute path was successfully found
163  if (!absolutePath.isEmpty()) {
164  projectContents.replace(match.capturedStart(0), match.capturedLength(0), "\"" + relativeKey + "\": \"" + absolutePath + "\"");
165  }
166  }
167  // Clear matches
168  matchedPositions.clear();
169  }
170 
171  // Set JSON of project
172  SetJson(projectContents.toStdString());
173 
174  // Calculate valid duration and set has_audio and has_video
175  // based on content inside this Timeline's clips.
176  float calculated_duration = 0.0;
177  for (auto clip : clips)
178  {
179  float clip_last_frame = clip->Position() + clip->Duration();
180  if (clip_last_frame > calculated_duration)
181  calculated_duration = clip_last_frame;
182  if (clip->Reader() && clip->Reader()->info.has_audio)
183  info.has_audio = true;
184  if (clip->Reader() && clip->Reader()->info.has_video)
185  info.has_video = true;
186 
187  }
188  info.video_length = calculated_duration * info.fps.ToFloat();
189  info.duration = calculated_duration;
190 
191  // Init FileInfo settings
192  info.acodec = "openshot::timeline";
193  info.vcodec = "openshot::timeline";
195  info.has_video = true;
196  info.has_audio = true;
197 
198  // Init max image size
200 
201  // Init cache
202  final_cache = new CacheMemory();
203  final_cache->SetMaxBytesFromInfo(max_concurrent_frames * 4, info.width, info.height, info.sample_rate, info.channels);
204 }
205 
207  if (is_open) {
208  // Auto Close if not already
209  Close();
210  }
211 
212  // Remove all clips, effects, and frame mappers
213  Clear();
214 
215  // Destroy previous cache (if managed by timeline)
216  if (managed_cache && final_cache) {
217  delete final_cache;
218  final_cache = NULL;
219  }
220 }
221 
222 // Add to the tracked_objects map a pointer to a tracked object (TrackedObjectBBox)
223 void Timeline::AddTrackedObject(std::shared_ptr<openshot::TrackedObjectBase> trackedObject){
224 
225  // Search for the tracked object on the map
226  auto iterator = tracked_objects.find(trackedObject->Id());
227 
228  if (iterator != tracked_objects.end()){
229  // Tracked object's id already present on the map, overwrite it
230  iterator->second = trackedObject;
231  }
232  else{
233  // Tracked object's id not present -> insert it on the map
234  tracked_objects[trackedObject->Id()] = trackedObject;
235  }
236 
237  return;
238 }
239 
240 // Return tracked object pointer by it's id
241 std::shared_ptr<openshot::TrackedObjectBase> Timeline::GetTrackedObject(std::string id) const{
242 
243  // Search for the tracked object on the map
244  auto iterator = tracked_objects.find(id);
245 
246  if (iterator != tracked_objects.end()){
247  // Id found, return the pointer to the tracked object
248  std::shared_ptr<openshot::TrackedObjectBase> trackedObject = iterator->second;
249  return trackedObject;
250  }
251  else {
252  // Id not found, return a null pointer
253  return nullptr;
254  }
255 }
256 
257 // Return the ID's of the tracked objects as a list of strings
258 std::list<std::string> Timeline::GetTrackedObjectsIds() const{
259 
260  // Create a list of strings
261  std::list<std::string> trackedObjects_ids;
262 
263  // Iterate through the tracked_objects map
264  for (auto const& it: tracked_objects){
265  // Add the IDs to the list
266  trackedObjects_ids.push_back(it.first);
267  }
268 
269  return trackedObjects_ids;
270 }
271 
272 #ifdef USE_OPENCV
273 // Return the trackedObject's properties as a JSON string
274 std::string Timeline::GetTrackedObjectValues(std::string id, int64_t frame_number) const {
275 
276  // Initialize the JSON object
277  Json::Value trackedObjectJson;
278 
279  // Search for the tracked object on the map
280  auto iterator = tracked_objects.find(id);
281 
282  if (iterator != tracked_objects.end())
283  {
284  // Id found, Get the object pointer and cast it as a TrackedObjectBBox
285  std::shared_ptr<TrackedObjectBBox> trackedObject = std::static_pointer_cast<TrackedObjectBBox>(iterator->second);
286 
287  // Get the trackedObject values for it's first frame
288  if (trackedObject->ExactlyContains(frame_number)){
289  BBox box = trackedObject->GetBox(frame_number);
290  float x1 = box.cx - (box.width/2);
291  float y1 = box.cy - (box.height/2);
292  float x2 = box.cx + (box.width/2);
293  float y2 = box.cy + (box.height/2);
294  float rotation = box.angle;
295 
296  trackedObjectJson["x1"] = x1;
297  trackedObjectJson["y1"] = y1;
298  trackedObjectJson["x2"] = x2;
299  trackedObjectJson["y2"] = y2;
300  trackedObjectJson["rotation"] = rotation;
301 
302  } else {
303  BBox box = trackedObject->BoxVec.begin()->second;
304  float x1 = box.cx - (box.width/2);
305  float y1 = box.cy - (box.height/2);
306  float x2 = box.cx + (box.width/2);
307  float y2 = box.cy + (box.height/2);
308  float rotation = box.angle;
309 
310  trackedObjectJson["x1"] = x1;
311  trackedObjectJson["y1"] = y1;
312  trackedObjectJson["x2"] = x2;
313  trackedObjectJson["y2"] = y2;
314  trackedObjectJson["rotation"] = rotation;
315  }
316 
317  }
318  else {
319  // Id not found, return all 0 values
320  trackedObjectJson["x1"] = 0;
321  trackedObjectJson["y1"] = 0;
322  trackedObjectJson["x2"] = 0;
323  trackedObjectJson["y2"] = 0;
324  trackedObjectJson["rotation"] = 0;
325  }
326 
327  return trackedObjectJson.toStyledString();
328 }
329 #endif
330 
331 // Add an openshot::Clip to the timeline
333 {
334  // Get lock (prevent getting frames while this happens)
335  const std::lock_guard<std::recursive_mutex> guard(getFrameMutex);
336 
337  // Assign timeline to clip
338  clip->ParentTimeline(this);
339 
340  // Clear cache of clip and nested reader (if any)
341  if (clip->Reader() && clip->Reader()->GetCache())
342  clip->Reader()->GetCache()->Clear();
343 
344  // All clips should be converted to the frame rate of this timeline
345  if (auto_map_clips) {
346  // Apply framemapper (or update existing framemapper)
347  apply_mapper_to_clip(clip);
348  }
349 
350  // Add clip to list
351  clips.push_back(clip);
352 
353  // Sort clips
354  sort_clips();
355 }
356 
357 // Add an effect to the timeline
359 {
360  // Assign timeline to effect
361  effect->ParentTimeline(this);
362 
363  // Add effect to list
364  effects.push_back(effect);
365 
366  // Sort effects
367  sort_effects();
368 }
369 
370 // Remove an effect from the timeline
372 {
373  effects.remove(effect);
374 
375  // Delete effect object (if timeline allocated it)
376  bool allocated = allocated_effects.count(effect);
377  if (allocated) {
378  delete effect;
379  effect = NULL;
380  allocated_effects.erase(effect);
381  }
382 
383  // Sort effects
384  sort_effects();
385 }
386 
387 // Remove an openshot::Clip to the timeline
389 {
390  // Get lock (prevent getting frames while this happens)
391  const std::lock_guard<std::recursive_mutex> guard(getFrameMutex);
392 
393  clips.remove(clip);
394 
395  // Delete clip object (if timeline allocated it)
396  bool allocated = allocated_clips.count(clip);
397  if (allocated) {
398  delete clip;
399  clip = NULL;
400  allocated_clips.erase(clip);
401  }
402 
403  // Sort clips
404  sort_clips();
405 }
406 
407 // Look up a clip
408 openshot::Clip* Timeline::GetClip(const std::string& id)
409 {
410  // Find the matching clip (if any)
411  for (const auto& clip : clips) {
412  if (clip->Id() == id) {
413  return clip;
414  }
415  }
416  return nullptr;
417 }
418 
419 // Look up a timeline effect
421 {
422  // Find the matching effect (if any)
423  for (const auto& effect : effects) {
424  if (effect->Id() == id) {
425  return effect;
426  }
427  }
428  return nullptr;
429 }
430 
432 {
433  // Search all clips for matching effect ID
434  for (const auto& clip : clips) {
435  const auto e = clip->GetEffect(id);
436  if (e != nullptr) {
437  return e;
438  }
439  }
440  return nullptr;
441 }
442 
443 // Return the list of effects on all clips
444 std::list<openshot::EffectBase*> Timeline::ClipEffects() const {
445 
446  // Initialize the list
447  std::list<EffectBase*> timelineEffectsList;
448 
449  // Loop through all clips
450  for (const auto& clip : clips) {
451 
452  // Get the clip's list of effects
453  std::list<EffectBase*> clipEffectsList = clip->Effects();
454 
455  // Append the clip's effects to the list
456  timelineEffectsList.insert(timelineEffectsList.end(), clipEffectsList.begin(), clipEffectsList.end());
457  }
458 
459  return timelineEffectsList;
460 }
461 
462 // Compute the end time of the latest timeline element
464  // Return cached max_time variable (threadsafe)
465  return max_time;
466 }
467 
468 // Compute the highest frame# based on the latest time and FPS
470  double fps = info.fps.ToDouble();
471  auto max_time = GetMaxTime();
472  return std::round(max_time * fps) + 1;
473 }
474 
475 // Apply a FrameMapper to a clip which matches the settings of this timeline
476 void Timeline::apply_mapper_to_clip(Clip* clip)
477 {
478  // Determine type of reader
479  ReaderBase* clip_reader = NULL;
480  if (clip->Reader()->Name() == "FrameMapper")
481  {
482  // Get the existing reader
483  clip_reader = (ReaderBase*) clip->Reader();
484 
485  // Update the mapping
486  FrameMapper* clip_mapped_reader = (FrameMapper*) clip_reader;
488 
489  } else {
490 
491  // Create a new FrameMapper to wrap the current reader
493  allocated_frame_mappers.insert(mapper);
494  clip_reader = (ReaderBase*) mapper;
495  }
496 
497  // Update clip reader
498  clip->Reader(clip_reader);
499 }
500 
501 // Apply the timeline's framerate and samplerate to all clips
503 {
504  // Clear all cached frames
505  ClearAllCache();
506 
507  // Loop through all clips
508  for (auto clip : clips)
509  {
510  // Apply framemapper (or update existing framemapper)
511  apply_mapper_to_clip(clip);
512  }
513 }
514 
515 // Calculate time of a frame number, based on a framerate
516 double Timeline::calculate_time(int64_t number, Fraction rate)
517 {
518  // Get float version of fps fraction
519  double raw_fps = rate.ToFloat();
520 
521  // Return the time (in seconds) of this frame
522  return double(number - 1) / raw_fps;
523 }
524 
525 // Apply effects to the source frame (if any)
526 std::shared_ptr<Frame> Timeline::apply_effects(std::shared_ptr<Frame> frame, int64_t timeline_frame_number, int layer, TimelineInfoStruct* options)
527 {
528  // Debug output
530  "Timeline::apply_effects",
531  "frame->number", frame->number,
532  "timeline_frame_number", timeline_frame_number,
533  "layer", layer);
534 
535  // Find Effects at this position and layer
536  for (auto effect : effects)
537  {
538  // Does clip intersect the current requested time
539  long effect_start_position = round(effect->Position() * info.fps.ToDouble()) + 1;
540  long effect_end_position = round((effect->Position() + (effect->Duration())) * info.fps.ToDouble());
541 
542  bool does_effect_intersect = (effect_start_position <= timeline_frame_number && effect_end_position >= timeline_frame_number && effect->Layer() == layer);
543 
544  // Clip is visible
545  if (does_effect_intersect)
546  {
547  // Determine the frame needed for this clip (based on the position on the timeline)
548  long effect_start_frame = (effect->Start() * info.fps.ToDouble()) + 1;
549  long effect_frame_number = timeline_frame_number - effect_start_position + effect_start_frame;
550 
551  if (!options->is_top_clip)
552  continue; // skip effect, if overlapped/covered by another clip on same layer
553 
554  if (options->is_before_clip_keyframes != effect->info.apply_before_clip)
555  continue; // skip effect, if this filter does not match
556 
557  // Debug output
559  "Timeline::apply_effects (Process Effect)",
560  "effect_frame_number", effect_frame_number,
561  "does_effect_intersect", does_effect_intersect);
562 
563  // Apply the effect to this frame
564  frame = effect->GetFrame(frame, effect_frame_number);
565  }
566 
567  } // end effect loop
568 
569  // Return modified frame
570  return frame;
571 }
572 
573 // Get or generate a blank frame
574 std::shared_ptr<Frame> Timeline::GetOrCreateFrame(std::shared_ptr<Frame> background_frame, Clip* clip, int64_t number, openshot::TimelineInfoStruct* options)
575 {
576  std::shared_ptr<Frame> new_frame;
577 
578  // Init some basic properties about this frame
579  int samples_in_frame = Frame::GetSamplesPerFrame(number, info.fps, info.sample_rate, info.channels);
580 
581  try {
582  // Debug output
584  "Timeline::GetOrCreateFrame (from reader)",
585  "number", number,
586  "samples_in_frame", samples_in_frame);
587 
588  // Attempt to get a frame (but this could fail if a reader has just been closed)
589  new_frame = std::shared_ptr<Frame>(clip->GetFrame(background_frame, number, options));
590 
591  // Return real frame
592  return new_frame;
593 
594  } catch (const ReaderClosed & e) {
595  // ...
596  } catch (const OutOfBoundsFrame & e) {
597  // ...
598  }
599 
600  // Debug output
602  "Timeline::GetOrCreateFrame (create blank)",
603  "number", number,
604  "samples_in_frame", samples_in_frame);
605 
606  // Create blank frame
607  return new_frame;
608 }
609 
610 // Process a new layer of video or audio
611 void Timeline::add_layer(std::shared_ptr<Frame> new_frame, Clip* source_clip, int64_t clip_frame_number, bool is_top_clip, float max_volume)
612 {
613  // Create timeline options (with details about this current frame request)
614  TimelineInfoStruct* options = new TimelineInfoStruct();
615  options->is_top_clip = is_top_clip;
616  options->is_before_clip_keyframes = true;
617 
618  // Get the clip's frame, composited on top of the current timeline frame
619  std::shared_ptr<Frame> source_frame;
620  source_frame = GetOrCreateFrame(new_frame, source_clip, clip_frame_number, options);
621  delete options;
622 
623  // No frame found... so bail
624  if (!source_frame)
625  return;
626 
627  // Debug output
629  "Timeline::add_layer",
630  "new_frame->number", new_frame->number,
631  "clip_frame_number", clip_frame_number);
632 
633  /* COPY AUDIO - with correct volume */
634  if (source_clip->Reader()->info.has_audio) {
635  // Debug output
637  "Timeline::add_layer (Copy Audio)",
638  "source_clip->Reader()->info.has_audio", source_clip->Reader()->info.has_audio,
639  "source_frame->GetAudioChannelsCount()", source_frame->GetAudioChannelsCount(),
640  "info.channels", info.channels,
641  "clip_frame_number", clip_frame_number);
642 
643  if (source_frame->GetAudioChannelsCount() == info.channels && source_clip->has_audio.GetInt(clip_frame_number) != 0)
644  for (int channel = 0; channel < source_frame->GetAudioChannelsCount(); channel++)
645  {
646  // Get volume from previous frame and this frame
647  float previous_volume = source_clip->volume.GetValue(clip_frame_number - 1);
648  float volume = source_clip->volume.GetValue(clip_frame_number);
649  int channel_filter = source_clip->channel_filter.GetInt(clip_frame_number); // optional channel to filter (if not -1)
650  int channel_mapping = source_clip->channel_mapping.GetInt(clip_frame_number); // optional channel to map this channel to (if not -1)
651 
652  // Apply volume mixing strategy
653  if (source_clip->mixing == VOLUME_MIX_AVERAGE && max_volume > 1.0) {
654  // Don't allow this clip to exceed 100% (divide volume equally between all overlapping clips with volume
655  previous_volume = previous_volume / max_volume;
656  volume = volume / max_volume;
657  }
658  else if (source_clip->mixing == VOLUME_MIX_REDUCE && max_volume > 1.0) {
659  // Reduce clip volume by a bit, hoping it will prevent exceeding 100% (but it is very possible it will)
660  previous_volume = previous_volume * 0.77;
661  volume = volume * 0.77;
662  }
663 
664  // If channel filter enabled, check for correct channel (and skip non-matching channels)
665  if (channel_filter != -1 && channel_filter != channel)
666  continue; // skip to next channel
667 
668  // If no volume on this frame or previous frame, do nothing
669  if (previous_volume == 0.0 && volume == 0.0)
670  continue; // skip to next channel
671 
672  // If channel mapping disabled, just use the current channel
673  if (channel_mapping == -1)
674  channel_mapping = channel;
675 
676  // Apply ramp to source frame (if needed)
677  if (!isEqual(previous_volume, 1.0) || !isEqual(volume, 1.0))
678  source_frame->ApplyGainRamp(channel_mapping, 0, source_frame->GetAudioSamplesCount(), previous_volume, volume);
679 
680  // TODO: Improve FrameMapper (or Timeline) to always get the correct number of samples per frame.
681  // Currently, the ResampleContext sometimes leaves behind a few samples for the next call, and the
682  // number of samples returned is variable... and does not match the number expected.
683  // This is a crude solution at best. =)
684  if (new_frame->GetAudioSamplesCount() != source_frame->GetAudioSamplesCount()){
685  // Force timeline frame to match the source frame
686  new_frame->ResizeAudio(info.channels, source_frame->GetAudioSamplesCount(), info.sample_rate, info.channel_layout);
687  }
688  // Copy audio samples (and set initial volume). Mix samples with existing audio samples. The gains are added together, to
689  // be sure to set the gain's correctly, so the sum does not exceed 1.0 (of audio distortion will happen).
690  new_frame->AddAudio(false, channel_mapping, 0, source_frame->GetAudioSamples(channel), source_frame->GetAudioSamplesCount(), 1.0);
691  }
692  else
693  // Debug output
695  "Timeline::add_layer (No Audio Copied - Wrong # of Channels)",
696  "source_clip->Reader()->info.has_audio",
697  source_clip->Reader()->info.has_audio,
698  "source_frame->GetAudioChannelsCount()",
699  source_frame->GetAudioChannelsCount(),
700  "info.channels", info.channels,
701  "clip_frame_number", clip_frame_number);
702  }
703 
704  // Debug output
706  "Timeline::add_layer (Transform: Composite Image Layer: Completed)",
707  "source_frame->number", source_frame->number,
708  "new_frame->GetImage()->width()", new_frame->GetWidth(),
709  "new_frame->GetImage()->height()", new_frame->GetHeight());
710 }
711 
712 // Update the list of 'opened' clips
713 void Timeline::update_open_clips(Clip *clip, bool does_clip_intersect)
714 {
715  // Get lock (prevent getting frames while this happens)
716  const std::lock_guard<std::recursive_mutex> guard(getFrameMutex);
717 
719  "Timeline::update_open_clips (before)",
720  "does_clip_intersect", does_clip_intersect,
721  "closing_clips.size()", closing_clips.size(),
722  "open_clips.size()", open_clips.size());
723 
724  // is clip already in list?
725  bool clip_found = open_clips.count(clip);
726 
727  if (clip_found && !does_clip_intersect)
728  {
729  // Remove clip from 'opened' list, because it's closed now
730  open_clips.erase(clip);
731 
732  // Close clip
733  clip->Close();
734  }
735  else if (!clip_found && does_clip_intersect)
736  {
737  // Add clip to 'opened' list, because it's missing
738  open_clips[clip] = clip;
739 
740  try {
741  // Open the clip
742  clip->Open();
743 
744  } catch (const InvalidFile & e) {
745  // ...
746  }
747  }
748 
749  // Debug output
751  "Timeline::update_open_clips (after)",
752  "does_clip_intersect", does_clip_intersect,
753  "clip_found", clip_found,
754  "closing_clips.size()", closing_clips.size(),
755  "open_clips.size()", open_clips.size());
756 }
757 
758 // Calculate the max duration (in seconds) of the timeline, based on all the clips, and cache the value
759 void Timeline::calculate_max_duration() {
760  double last_clip = 0.0;
761  double last_effect = 0.0;
762 
763  if (!clips.empty()) {
764  const auto max_clip = std::max_element(
765  clips.begin(), clips.end(), CompareClipEndFrames());
766  last_clip = (*max_clip)->Position() + (*max_clip)->Duration();
767  }
768  if (!effects.empty()) {
769  const auto max_effect = std::max_element(
770  effects.begin(), effects.end(), CompareEffectEndFrames());
771  last_effect = (*max_effect)->Position() + (*max_effect)->Duration();
772  }
773  max_time = std::max(last_clip, last_effect);
774 }
775 
776 // Sort clips by position on the timeline
777 void Timeline::sort_clips()
778 {
779  // Get lock (prevent getting frames while this happens)
780  const std::lock_guard<std::recursive_mutex> guard(getFrameMutex);
781 
782  // Debug output
784  "Timeline::SortClips",
785  "clips.size()", clips.size());
786 
787  // sort clips
788  clips.sort(CompareClips());
789 
790  // calculate max timeline duration
791  calculate_max_duration();
792 }
793 
794 // Sort effects by position on the timeline
795 void Timeline::sort_effects()
796 {
797  // Get lock (prevent getting frames while this happens)
798  const std::lock_guard<std::recursive_mutex> guard(getFrameMutex);
799 
800  // sort clips
801  effects.sort(CompareEffects());
802 
803  // calculate max timeline duration
804  calculate_max_duration();
805 }
806 
807 // Clear all clips from timeline
809 {
810  ZmqLogger::Instance()->AppendDebugMethod("Timeline::Clear");
811 
812  // Get lock (prevent getting frames while this happens)
813  const std::lock_guard<std::recursive_mutex> guard(getFrameMutex);
814 
815  // Close all open clips
816  for (auto clip : clips)
817  {
818  update_open_clips(clip, false);
819 
820  // Delete clip object (if timeline allocated it)
821  bool allocated = allocated_clips.count(clip);
822  if (allocated) {
823  delete clip;
824  }
825  }
826  // Clear all clips
827  clips.clear();
828  allocated_clips.clear();
829 
830  // Close all effects
831  for (auto effect : effects)
832  {
833  // Delete effect object (if timeline allocated it)
834  bool allocated = allocated_effects.count(effect);
835  if (allocated) {
836  delete effect;
837  }
838  }
839  // Clear all effects
840  effects.clear();
841  allocated_effects.clear();
842 
843  // Delete all FrameMappers
844  for (auto mapper : allocated_frame_mappers)
845  {
846  mapper->Reader(NULL);
847  mapper->Close();
848  delete mapper;
849  }
850  allocated_frame_mappers.clear();
851 }
852 
853 // Close the reader (and any resources it was consuming)
855 {
856  ZmqLogger::Instance()->AppendDebugMethod("Timeline::Close");
857 
858  // Get lock (prevent getting frames while this happens)
859  const std::lock_guard<std::recursive_mutex> guard(getFrameMutex);
860 
861  // Close all open clips
862  for (auto clip : clips)
863  {
864  // Open or Close this clip, based on if it's intersecting or not
865  update_open_clips(clip, false);
866  }
867 
868  // Mark timeline as closed
869  is_open = false;
870 
871  // Clear all cache (deep clear, including nested Readers)
872  ClearAllCache(true);
873 }
874 
875 // Open the reader (and start consuming resources)
877 {
878  is_open = true;
879 }
880 
881 // Compare 2 floating point numbers for equality
882 bool Timeline::isEqual(double a, double b)
883 {
884  return fabs(a - b) < 0.000001;
885 }
886 
887 // Get an openshot::Frame object for a specific frame number of this reader.
888 std::shared_ptr<Frame> Timeline::GetFrame(int64_t requested_frame)
889 {
890  // Adjust out of bounds frame number
891  if (requested_frame < 1)
892  requested_frame = 1;
893 
894  // Check cache
895  std::shared_ptr<Frame> frame;
896  frame = final_cache->GetFrame(requested_frame);
897  if (frame) {
898  // Debug output
900  "Timeline::GetFrame (Cached frame found)",
901  "requested_frame", requested_frame);
902 
903  // Return cached frame
904  return frame;
905  }
906  else
907  {
908  // Prevent async calls to the following code
909  const std::lock_guard<std::recursive_mutex> lock(getFrameMutex);
910 
911  // Check cache 2nd time
912  std::shared_ptr<Frame> frame;
913  frame = final_cache->GetFrame(requested_frame);
914  if (frame) {
915  // Debug output
917  "Timeline::GetFrame (Cached frame found on 2nd check)",
918  "requested_frame", requested_frame);
919 
920  // Return cached frame
921  return frame;
922  } else {
923  // Get a list of clips that intersect with the requested section of timeline
924  // This also opens the readers for intersecting clips, and marks non-intersecting clips as 'needs closing'
925  std::vector<Clip *> nearby_clips;
926  nearby_clips = find_intersecting_clips(requested_frame, 1, true);
927 
928  // Debug output
930  "Timeline::GetFrame (processing frame)",
931  "requested_frame", requested_frame,
932  "omp_get_thread_num()", omp_get_thread_num());
933 
934  // Init some basic properties about this frame
935  int samples_in_frame = Frame::GetSamplesPerFrame(requested_frame, info.fps, info.sample_rate, info.channels);
936 
937  // Create blank frame (which will become the requested frame)
938  std::shared_ptr<Frame> new_frame(std::make_shared<Frame>(requested_frame, preview_width, preview_height, "#000000", samples_in_frame, info.channels));
939  new_frame->AddAudioSilence(samples_in_frame);
940  new_frame->SampleRate(info.sample_rate);
941  new_frame->ChannelsLayout(info.channel_layout);
942 
943  // Debug output
945  "Timeline::GetFrame (Adding solid color)",
946  "requested_frame", requested_frame,
947  "info.width", info.width,
948  "info.height", info.height);
949 
950  // Add Background Color to 1st layer (if animated or not black)
951  if ((color.red.GetCount() > 1 || color.green.GetCount() > 1 || color.blue.GetCount() > 1) ||
952  (color.red.GetValue(requested_frame) != 0.0 || color.green.GetValue(requested_frame) != 0.0 ||
953  color.blue.GetValue(requested_frame) != 0.0))
954  new_frame->AddColor(preview_width, preview_height, color.GetColorHex(requested_frame));
955 
956  // Debug output
958  "Timeline::GetFrame (Loop through clips)",
959  "requested_frame", requested_frame,
960  "clips.size()", clips.size(),
961  "nearby_clips.size()", nearby_clips.size());
962 
963  // Find Clips near this time
964  for (auto clip : nearby_clips) {
965  long clip_start_position = round(clip->Position() * info.fps.ToDouble()) + 1;
966  long clip_end_position = round((clip->Position() + clip->Duration()) * info.fps.ToDouble());
967  bool does_clip_intersect = (clip_start_position <= requested_frame && clip_end_position >= requested_frame);
968 
969  // Debug output
971  "Timeline::GetFrame (Does clip intersect)",
972  "requested_frame", requested_frame,
973  "clip->Position()", clip->Position(),
974  "clip->Duration()", clip->Duration(),
975  "does_clip_intersect", does_clip_intersect);
976 
977  // Clip is visible
978  if (does_clip_intersect) {
979  // Determine if clip is "top" clip on this layer (only happens when multiple clips are overlapping)
980  bool is_top_clip = true;
981  float max_volume = 0.0;
982  for (auto nearby_clip : nearby_clips) {
983  long nearby_clip_start_position = round(nearby_clip->Position() * info.fps.ToDouble()) + 1;
984  long nearby_clip_end_position = round((nearby_clip->Position() + nearby_clip->Duration()) * info.fps.ToDouble()) + 1;
985  long nearby_clip_start_frame = (nearby_clip->Start() * info.fps.ToDouble()) + 1;
986  long nearby_clip_frame_number = requested_frame - nearby_clip_start_position + nearby_clip_start_frame;
987 
988  // Determine if top clip
989  if (clip->Id() != nearby_clip->Id() && clip->Layer() == nearby_clip->Layer() &&
990  nearby_clip_start_position <= requested_frame && nearby_clip_end_position >= requested_frame &&
991  nearby_clip_start_position > clip_start_position && is_top_clip == true) {
992  is_top_clip = false;
993  }
994 
995  // Determine max volume of overlapping clips
996  if (nearby_clip->Reader() && nearby_clip->Reader()->info.has_audio &&
997  nearby_clip->has_audio.GetInt(nearby_clip_frame_number) != 0 &&
998  nearby_clip_start_position <= requested_frame && nearby_clip_end_position >= requested_frame) {
999  max_volume += nearby_clip->volume.GetValue(nearby_clip_frame_number);
1000  }
1001  }
1002 
1003  // Determine the frame needed for this clip (based on the position on the timeline)
1004  long clip_start_frame = (clip->Start() * info.fps.ToDouble()) + 1;
1005  long clip_frame_number = requested_frame - clip_start_position + clip_start_frame;
1006 
1007  // Debug output
1009  "Timeline::GetFrame (Calculate clip's frame #)",
1010  "clip->Position()", clip->Position(),
1011  "clip->Start()", clip->Start(),
1012  "info.fps.ToFloat()", info.fps.ToFloat(),
1013  "clip_frame_number", clip_frame_number);
1014 
1015  // Add clip's frame as layer
1016  add_layer(new_frame, clip, clip_frame_number, is_top_clip, max_volume);
1017 
1018  } else {
1019  // Debug output
1021  "Timeline::GetFrame (clip does not intersect)",
1022  "requested_frame", requested_frame,
1023  "does_clip_intersect", does_clip_intersect);
1024  }
1025 
1026  } // end clip loop
1027 
1028  // Debug output
1030  "Timeline::GetFrame (Add frame to cache)",
1031  "requested_frame", requested_frame,
1032  "info.width", info.width,
1033  "info.height", info.height);
1034 
1035  // Set frame # on mapped frame
1036  new_frame->SetFrameNumber(requested_frame);
1037 
1038  // Add final frame to cache
1039  final_cache->Add(new_frame);
1040 
1041  // Return frame (or blank frame)
1042  return new_frame;
1043  }
1044  }
1045 }
1046 
1047 
1048 // Find intersecting clips (or non intersecting clips)
1049 std::vector<Clip*> Timeline::find_intersecting_clips(int64_t requested_frame, int number_of_frames, bool include)
1050 {
1051  // Find matching clips
1052  std::vector<Clip*> matching_clips;
1053 
1054  // Calculate time of frame
1055  float min_requested_frame = requested_frame;
1056  float max_requested_frame = requested_frame + (number_of_frames - 1);
1057 
1058  // Find Clips at this time
1059  for (auto clip : clips)
1060  {
1061  // Does clip intersect the current requested time
1062  long clip_start_position = round(clip->Position() * info.fps.ToDouble()) + 1;
1063  long clip_end_position = round((clip->Position() + clip->Duration()) * info.fps.ToDouble()) + 1;
1064 
1065  bool does_clip_intersect =
1066  (clip_start_position <= min_requested_frame || clip_start_position <= max_requested_frame) &&
1067  (clip_end_position >= min_requested_frame || clip_end_position >= max_requested_frame);
1068 
1069  // Debug output
1071  "Timeline::find_intersecting_clips (Is clip near or intersecting)",
1072  "requested_frame", requested_frame,
1073  "min_requested_frame", min_requested_frame,
1074  "max_requested_frame", max_requested_frame,
1075  "clip->Position()", clip->Position(),
1076  "does_clip_intersect", does_clip_intersect);
1077 
1078  // Open (or schedule for closing) this clip, based on if it's intersecting or not
1079  update_open_clips(clip, does_clip_intersect);
1080 
1081  // Clip is visible
1082  if (does_clip_intersect && include)
1083  // Add the intersecting clip
1084  matching_clips.push_back(clip);
1085 
1086  else if (!does_clip_intersect && !include)
1087  // Add the non-intersecting clip
1088  matching_clips.push_back(clip);
1089 
1090  } // end clip loop
1091 
1092  // return list
1093  return matching_clips;
1094 }
1095 
1096 // Set the cache object used by this reader
1097 void Timeline::SetCache(CacheBase* new_cache) {
1098  // Get lock (prevent getting frames while this happens)
1099  const std::lock_guard<std::recursive_mutex> lock(getFrameMutex);
1100 
1101  // Destroy previous cache (if managed by timeline)
1102  if (managed_cache && final_cache) {
1103  delete final_cache;
1104  final_cache = NULL;
1105  managed_cache = false;
1106  }
1107 
1108  // Set new cache
1109  final_cache = new_cache;
1110 }
1111 
1112 // Generate JSON string of this object
1113 std::string Timeline::Json() const {
1114 
1115  // Return formatted string
1116  return JsonValue().toStyledString();
1117 }
1118 
1119 // Generate Json::Value for this object
1120 Json::Value Timeline::JsonValue() const {
1121 
1122  // Create root json object
1123  Json::Value root = ReaderBase::JsonValue(); // get parent properties
1124  root["type"] = "Timeline";
1125  root["viewport_scale"] = viewport_scale.JsonValue();
1126  root["viewport_x"] = viewport_x.JsonValue();
1127  root["viewport_y"] = viewport_y.JsonValue();
1128  root["color"] = color.JsonValue();
1129  root["path"] = path;
1130 
1131  // Add array of clips
1132  root["clips"] = Json::Value(Json::arrayValue);
1133 
1134  // Find Clips at this time
1135  for (const auto existing_clip : clips)
1136  {
1137  root["clips"].append(existing_clip->JsonValue());
1138  }
1139 
1140  // Add array of effects
1141  root["effects"] = Json::Value(Json::arrayValue);
1142 
1143  // loop through effects
1144  for (const auto existing_effect: effects)
1145  {
1146  root["effects"].append(existing_effect->JsonValue());
1147  }
1148 
1149  // return JsonValue
1150  return root;
1151 }
1152 
1153 // Load JSON string into this object
1154 void Timeline::SetJson(const std::string value) {
1155 
1156  // Get lock (prevent getting frames while this happens)
1157  const std::lock_guard<std::recursive_mutex> lock(getFrameMutex);
1158 
1159  // Parse JSON string into JSON objects
1160  try
1161  {
1162  const Json::Value root = openshot::stringToJson(value);
1163  // Set all values that match
1164  SetJsonValue(root);
1165  }
1166  catch (const std::exception& e)
1167  {
1168  // Error parsing JSON (or missing keys)
1169  throw InvalidJSON("JSON is invalid (missing keys or invalid data types)");
1170  }
1171 }
1172 
1173 // Load Json::Value into this object
1174 void Timeline::SetJsonValue(const Json::Value root) {
1175 
1176  // Get lock (prevent getting frames while this happens)
1177  const std::lock_guard<std::recursive_mutex> lock(getFrameMutex);
1178 
1179  // Close timeline before we do anything (this closes all clips)
1180  bool was_open = is_open;
1181  Close();
1182 
1183  // Set parent data
1185 
1186  // Set data from Json (if key is found)
1187  if (!root["path"].isNull())
1188  path = root["path"].asString();
1189 
1190  if (!root["clips"].isNull()) {
1191  // Clear existing clips
1192  clips.clear();
1193 
1194  // loop through clips
1195  for (const Json::Value existing_clip : root["clips"]) {
1196  // Skip NULL nodes
1197  if (existing_clip.isNull()) {
1198  continue;
1199  }
1200 
1201  // Create Clip
1202  Clip *c = new Clip();
1203 
1204  // Keep track of allocated clip objects
1205  allocated_clips.insert(c);
1206 
1207  // When a clip is attached to an object, it searches for the object
1208  // on it's parent timeline. Setting the parent timeline of the clip here
1209  // allows attaching it to an object when exporting the project (because)
1210  // the exporter script initializes the clip and it's effects
1211  // before setting its parent timeline.
1212  c->ParentTimeline(this);
1213 
1214  // Load Json into Clip
1215  c->SetJsonValue(existing_clip);
1216 
1217  // Add Clip to Timeline
1218  AddClip(c);
1219  }
1220  }
1221 
1222  if (!root["effects"].isNull()) {
1223  // Clear existing effects
1224  effects.clear();
1225 
1226  // loop through effects
1227  for (const Json::Value existing_effect :root["effects"]) {
1228  // Skip NULL nodes
1229  if (existing_effect.isNull()) {
1230  continue;
1231  }
1232 
1233  // Create Effect
1234  EffectBase *e = NULL;
1235 
1236  if (!existing_effect["type"].isNull()) {
1237  // Create instance of effect
1238  if ( (e = EffectInfo().CreateEffect(existing_effect["type"].asString())) ) {
1239 
1240  // Keep track of allocated effect objects
1241  allocated_effects.insert(e);
1242 
1243  // Load Json into Effect
1244  e->SetJsonValue(existing_effect);
1245 
1246  // Add Effect to Timeline
1247  AddEffect(e);
1248  }
1249  }
1250  }
1251  }
1252 
1253  if (!root["duration"].isNull()) {
1254  // Update duration of timeline
1255  info.duration = root["duration"].asDouble();
1257  }
1258 
1259  // Update preview settings
1262 
1263  // Re-open if needed
1264  if (was_open)
1265  Open();
1266 }
1267 
1268 // Apply a special formatted JSON object, which represents a change to the timeline (insert, update, delete)
1269 void Timeline::ApplyJsonDiff(std::string value) {
1270 
1271  // Get lock (prevent getting frames while this happens)
1272  const std::lock_guard<std::recursive_mutex> lock(getFrameMutex);
1273 
1274  // Parse JSON string into JSON objects
1275  try
1276  {
1277  const Json::Value root = openshot::stringToJson(value);
1278  // Process the JSON change array, loop through each item
1279  for (const Json::Value change : root) {
1280  std::string change_key = change["key"][(uint)0].asString();
1281 
1282  // Process each type of change
1283  if (change_key == "clips")
1284  // Apply to CLIPS
1285  apply_json_to_clips(change);
1286 
1287  else if (change_key == "effects")
1288  // Apply to EFFECTS
1289  apply_json_to_effects(change);
1290 
1291  else
1292  // Apply to TIMELINE
1293  apply_json_to_timeline(change);
1294 
1295  }
1296  }
1297  catch (const std::exception& e)
1298  {
1299  // Error parsing JSON (or missing keys)
1300  throw InvalidJSON("JSON is invalid (missing keys or invalid data types)");
1301  }
1302 }
1303 
1304 // Apply JSON diff to clips
1305 void Timeline::apply_json_to_clips(Json::Value change) {
1306 
1307  // Get key and type of change
1308  std::string change_type = change["type"].asString();
1309  std::string clip_id = "";
1310  Clip *existing_clip = NULL;
1311 
1312  // Find id of clip (if any)
1313  for (auto key_part : change["key"]) {
1314  // Get each change
1315  if (key_part.isObject()) {
1316  // Check for id
1317  if (!key_part["id"].isNull()) {
1318  // Set the id
1319  clip_id = key_part["id"].asString();
1320 
1321  // Find matching clip in timeline (if any)
1322  for (auto c : clips)
1323  {
1324  if (c->Id() == clip_id) {
1325  existing_clip = c;
1326  break; // clip found, exit loop
1327  }
1328  }
1329  break; // id found, exit loop
1330  }
1331  }
1332  }
1333 
1334  // Check for a more specific key (targetting this clip's effects)
1335  // For example: ["clips", {"id:123}, "effects", {"id":432}]
1336  if (existing_clip && change["key"].size() == 4 && change["key"][2] == "effects")
1337  {
1338  // This change is actually targetting a specific effect under a clip (and not the clip)
1339  Json::Value key_part = change["key"][3];
1340 
1341  if (key_part.isObject()) {
1342  // Check for id
1343  if (!key_part["id"].isNull())
1344  {
1345  // Set the id
1346  std::string effect_id = key_part["id"].asString();
1347 
1348  // Find matching effect in timeline (if any)
1349  std::list<EffectBase*> effect_list = existing_clip->Effects();
1350  for (auto e : effect_list)
1351  {
1352  if (e->Id() == effect_id) {
1353  // Apply the change to the effect directly
1354  apply_json_to_effects(change, e);
1355 
1356  // Calculate start and end frames that this impacts, and remove those frames from the cache
1357  int64_t new_starting_frame = (existing_clip->Position() * info.fps.ToDouble()) + 1;
1358  int64_t new_ending_frame = ((existing_clip->Position() + existing_clip->Duration()) * info.fps.ToDouble()) + 1;
1359  final_cache->Remove(new_starting_frame - 8, new_ending_frame + 8);
1360 
1361  return; // effect found, don't update clip
1362  }
1363  }
1364  }
1365  }
1366  }
1367 
1368  // Calculate start and end frames that this impacts, and remove those frames from the cache
1369  if (!change["value"].isArray() && !change["value"]["position"].isNull()) {
1370  int64_t new_starting_frame = (change["value"]["position"].asDouble() * info.fps.ToDouble()) + 1;
1371  int64_t new_ending_frame = ((change["value"]["position"].asDouble() + change["value"]["end"].asDouble() - change["value"]["start"].asDouble()) * info.fps.ToDouble()) + 1;
1372  final_cache->Remove(new_starting_frame - 8, new_ending_frame + 8);
1373  }
1374 
1375  // Determine type of change operation
1376  if (change_type == "insert") {
1377 
1378  // Create clip
1379  Clip *clip = new Clip();
1380 
1381  // Keep track of allocated clip objects
1382  allocated_clips.insert(clip);
1383 
1384  // Set properties of clip from JSON
1385  clip->SetJsonValue(change["value"]);
1386 
1387  // Add clip to timeline
1388  AddClip(clip);
1389 
1390  } else if (change_type == "update") {
1391 
1392  // Update existing clip
1393  if (existing_clip) {
1394 
1395  // Calculate start and end frames that this impacts, and remove those frames from the cache
1396  int64_t old_starting_frame = (existing_clip->Position() * info.fps.ToDouble()) + 1;
1397  int64_t old_ending_frame = ((existing_clip->Position() + existing_clip->Duration()) * info.fps.ToDouble()) + 1;
1398  final_cache->Remove(old_starting_frame - 8, old_ending_frame + 8);
1399 
1400  // Remove cache on clip's Reader (if found)
1401  if (existing_clip->Reader() && existing_clip->Reader()->GetCache())
1402  existing_clip->Reader()->GetCache()->Remove(old_starting_frame - 8, old_ending_frame + 8);
1403 
1404  // Update clip properties from JSON
1405  existing_clip->SetJsonValue(change["value"]);
1406 
1407  // Apply framemapper (or update existing framemapper)
1408  if (auto_map_clips) {
1409  apply_mapper_to_clip(existing_clip);
1410  }
1411  }
1412 
1413  } else if (change_type == "delete") {
1414 
1415  // Remove existing clip
1416  if (existing_clip) {
1417 
1418  // Calculate start and end frames that this impacts, and remove those frames from the cache
1419  int64_t old_starting_frame = (existing_clip->Position() * info.fps.ToDouble()) + 1;
1420  int64_t old_ending_frame = ((existing_clip->Position() + existing_clip->Duration()) * info.fps.ToDouble()) + 1;
1421  final_cache->Remove(old_starting_frame - 8, old_ending_frame + 8);
1422 
1423  // Remove clip from timeline
1424  RemoveClip(existing_clip);
1425  }
1426 
1427  }
1428 
1429  // Re-Sort Clips (since they likely changed)
1430  sort_clips();
1431 }
1432 
1433 // Apply JSON diff to effects
1434 void Timeline::apply_json_to_effects(Json::Value change) {
1435 
1436  // Get key and type of change
1437  std::string change_type = change["type"].asString();
1438  EffectBase *existing_effect = NULL;
1439 
1440  // Find id of an effect (if any)
1441  for (auto key_part : change["key"]) {
1442 
1443  if (key_part.isObject()) {
1444  // Check for id
1445  if (!key_part["id"].isNull())
1446  {
1447  // Set the id
1448  std::string effect_id = key_part["id"].asString();
1449 
1450  // Find matching effect in timeline (if any)
1451  for (auto e : effects)
1452  {
1453  if (e->Id() == effect_id) {
1454  existing_effect = e;
1455  break; // effect found, exit loop
1456  }
1457  }
1458  break; // id found, exit loop
1459  }
1460  }
1461  }
1462 
1463  // Now that we found the effect, apply the change to it
1464  if (existing_effect || change_type == "insert") {
1465  // Apply change to effect
1466  apply_json_to_effects(change, existing_effect);
1467  }
1468 }
1469 
1470 // Apply JSON diff to effects (if you already know which effect needs to be updated)
1471 void Timeline::apply_json_to_effects(Json::Value change, EffectBase* existing_effect) {
1472 
1473  // Get key and type of change
1474  std::string change_type = change["type"].asString();
1475 
1476  // Calculate start and end frames that this impacts, and remove those frames from the cache
1477  if (!change["value"].isArray() && !change["value"]["position"].isNull()) {
1478  int64_t new_starting_frame = (change["value"]["position"].asDouble() * info.fps.ToDouble()) + 1;
1479  int64_t new_ending_frame = ((change["value"]["position"].asDouble() + change["value"]["end"].asDouble() - change["value"]["start"].asDouble()) * info.fps.ToDouble()) + 1;
1480  final_cache->Remove(new_starting_frame - 8, new_ending_frame + 8);
1481  }
1482 
1483  // Determine type of change operation
1484  if (change_type == "insert") {
1485 
1486  // Determine type of effect
1487  std::string effect_type = change["value"]["type"].asString();
1488 
1489  // Create Effect
1490  EffectBase *e = NULL;
1491 
1492  // Init the matching effect object
1493  if ( (e = EffectInfo().CreateEffect(effect_type)) ) {
1494 
1495  // Keep track of allocated effect objects
1496  allocated_effects.insert(e);
1497 
1498  // Load Json into Effect
1499  e->SetJsonValue(change["value"]);
1500 
1501  // Add Effect to Timeline
1502  AddEffect(e);
1503  }
1504 
1505  } else if (change_type == "update") {
1506 
1507  // Update existing effect
1508  if (existing_effect) {
1509 
1510  // Calculate start and end frames that this impacts, and remove those frames from the cache
1511  int64_t old_starting_frame = (existing_effect->Position() * info.fps.ToDouble()) + 1;
1512  int64_t old_ending_frame = ((existing_effect->Position() + existing_effect->Duration()) * info.fps.ToDouble()) + 1;
1513  final_cache->Remove(old_starting_frame - 8, old_ending_frame + 8);
1514 
1515  // Update effect properties from JSON
1516  existing_effect->SetJsonValue(change["value"]);
1517  }
1518 
1519  } else if (change_type == "delete") {
1520 
1521  // Remove existing effect
1522  if (existing_effect) {
1523 
1524  // Calculate start and end frames that this impacts, and remove those frames from the cache
1525  int64_t old_starting_frame = (existing_effect->Position() * info.fps.ToDouble()) + 1;
1526  int64_t old_ending_frame = ((existing_effect->Position() + existing_effect->Duration()) * info.fps.ToDouble()) + 1;
1527  final_cache->Remove(old_starting_frame - 8, old_ending_frame + 8);
1528 
1529  // Remove effect from timeline
1530  RemoveEffect(existing_effect);
1531  }
1532 
1533  }
1534 
1535  // Re-Sort Effects (since they likely changed)
1536  sort_effects();
1537 }
1538 
1539 // Apply JSON diff to timeline properties
1540 void Timeline::apply_json_to_timeline(Json::Value change) {
1541  bool cache_dirty = true;
1542 
1543  // Get key and type of change
1544  std::string change_type = change["type"].asString();
1545  std::string root_key = change["key"][(uint)0].asString();
1546  std::string sub_key = "";
1547  if (change["key"].size() >= 2)
1548  sub_key = change["key"][(uint)1].asString();
1549 
1550  // Determine type of change operation
1551  if (change_type == "insert" || change_type == "update") {
1552 
1553  // INSERT / UPDATE
1554  // Check for valid property
1555  if (root_key == "color")
1556  // Set color
1557  color.SetJsonValue(change["value"]);
1558  else if (root_key == "viewport_scale")
1559  // Set viewport scale
1560  viewport_scale.SetJsonValue(change["value"]);
1561  else if (root_key == "viewport_x")
1562  // Set viewport x offset
1563  viewport_x.SetJsonValue(change["value"]);
1564  else if (root_key == "viewport_y")
1565  // Set viewport y offset
1566  viewport_y.SetJsonValue(change["value"]);
1567  else if (root_key == "duration") {
1568  // Update duration of timeline
1569  info.duration = change["value"].asDouble();
1571 
1572  // We don't want to clear cache for duration adjustments
1573  cache_dirty = false;
1574  }
1575  else if (root_key == "width") {
1576  // Set width
1577  info.width = change["value"].asInt();
1579  }
1580  else if (root_key == "height") {
1581  // Set height
1582  info.height = change["value"].asInt();
1584  }
1585  else if (root_key == "fps" && sub_key == "" && change["value"].isObject()) {
1586  // Set fps fraction
1587  if (!change["value"]["num"].isNull())
1588  info.fps.num = change["value"]["num"].asInt();
1589  if (!change["value"]["den"].isNull())
1590  info.fps.den = change["value"]["den"].asInt();
1591  }
1592  else if (root_key == "fps" && sub_key == "num")
1593  // Set fps.num
1594  info.fps.num = change["value"].asInt();
1595  else if (root_key == "fps" && sub_key == "den")
1596  // Set fps.den
1597  info.fps.den = change["value"].asInt();
1598  else if (root_key == "display_ratio" && sub_key == "" && change["value"].isObject()) {
1599  // Set display_ratio fraction
1600  if (!change["value"]["num"].isNull())
1601  info.display_ratio.num = change["value"]["num"].asInt();
1602  if (!change["value"]["den"].isNull())
1603  info.display_ratio.den = change["value"]["den"].asInt();
1604  }
1605  else if (root_key == "display_ratio" && sub_key == "num")
1606  // Set display_ratio.num
1607  info.display_ratio.num = change["value"].asInt();
1608  else if (root_key == "display_ratio" && sub_key == "den")
1609  // Set display_ratio.den
1610  info.display_ratio.den = change["value"].asInt();
1611  else if (root_key == "pixel_ratio" && sub_key == "" && change["value"].isObject()) {
1612  // Set pixel_ratio fraction
1613  if (!change["value"]["num"].isNull())
1614  info.pixel_ratio.num = change["value"]["num"].asInt();
1615  if (!change["value"]["den"].isNull())
1616  info.pixel_ratio.den = change["value"]["den"].asInt();
1617  }
1618  else if (root_key == "pixel_ratio" && sub_key == "num")
1619  // Set pixel_ratio.num
1620  info.pixel_ratio.num = change["value"].asInt();
1621  else if (root_key == "pixel_ratio" && sub_key == "den")
1622  // Set pixel_ratio.den
1623  info.pixel_ratio.den = change["value"].asInt();
1624 
1625  else if (root_key == "sample_rate")
1626  // Set sample rate
1627  info.sample_rate = change["value"].asInt();
1628  else if (root_key == "channels")
1629  // Set channels
1630  info.channels = change["value"].asInt();
1631  else if (root_key == "channel_layout")
1632  // Set channel layout
1633  info.channel_layout = (ChannelLayout) change["value"].asInt();
1634  else
1635  // Error parsing JSON (or missing keys)
1636  throw InvalidJSONKey("JSON change key is invalid", change.toStyledString());
1637 
1638 
1639  } else if (change["type"].asString() == "delete") {
1640 
1641  // DELETE / RESET
1642  // Reset the following properties (since we can't delete them)
1643  if (root_key == "color") {
1644  color = Color();
1645  color.red = Keyframe(0.0);
1646  color.green = Keyframe(0.0);
1647  color.blue = Keyframe(0.0);
1648  }
1649  else if (root_key == "viewport_scale")
1650  viewport_scale = Keyframe(1.0);
1651  else if (root_key == "viewport_x")
1652  viewport_x = Keyframe(0.0);
1653  else if (root_key == "viewport_y")
1654  viewport_y = Keyframe(0.0);
1655  else
1656  // Error parsing JSON (or missing keys)
1657  throw InvalidJSONKey("JSON change key is invalid", change.toStyledString());
1658 
1659  }
1660 
1661  if (cache_dirty) {
1662  // Clear entire cache
1663  ClearAllCache();
1664  }
1665 }
1666 
1667 // Clear all caches
1668 void Timeline::ClearAllCache(bool deep) {
1669 
1670  // Clear primary cache
1671  if (final_cache) {
1672  final_cache->Clear();
1673  }
1674 
1675  // Loop through all clips
1676  try {
1677  for (const auto clip : clips) {
1678  // Clear cache on clip
1679  clip->Reader()->GetCache()->Clear();
1680 
1681  // Clear nested Reader (if deep clear requested)
1682  if (deep && clip->Reader()->Name() == "FrameMapper") {
1683  FrameMapper *nested_reader = static_cast<FrameMapper *>(clip->Reader());
1684  if (nested_reader->Reader() && nested_reader->Reader()->GetCache())
1685  nested_reader->Reader()->GetCache()->Clear();
1686  }
1687 
1688  // Clear clip cache
1689  clip->GetCache()->Clear();
1690  }
1691  } catch (const ReaderClosed & e) {
1692  // ...
1693  }
1694 }
1695 
1696 // Set Max Image Size (used for performance optimization). Convenience function for setting
1697 // Settings::Instance()->MAX_WIDTH and Settings::Instance()->MAX_HEIGHT.
1698 void Timeline::SetMaxSize(int width, int height) {
1699  // Maintain aspect ratio regardless of what size is passed in
1700  QSize display_ratio_size = QSize(info.width, info.height);
1701  QSize proposed_size = QSize(std::min(width, info.width), std::min(height, info.height));
1702 
1703  // Scale QSize up to proposed size
1704  display_ratio_size.scale(proposed_size, Qt::KeepAspectRatio);
1705 
1706  // Update preview settings
1707  preview_width = display_ratio_size.width();
1708  preview_height = display_ratio_size.height();
1709 }
Header file for CacheBase class.
Header file for CacheDisk class.
Header file for CacheMemory class.
Header file for CrashHandler class.
Header file for all Exception classes.
Header file for the FrameMapper class.
#define OPEN_MP_NUM_PROCESSORS
Header file for Timeline class.
All cache managers in libopenshot are based on this CacheBase class.
Definition: CacheBase.h:35
virtual void Clear()=0
Clear the cache of all frames.
virtual void Remove(int64_t frame_number)=0
Remove a specific frame.
virtual void Add(std::shared_ptr< openshot::Frame > frame)=0
Add a Frame to the cache.
void SetMaxBytesFromInfo(int64_t number_of_frames, int width, int height, int sample_rate, int channels)
Set maximum bytes to a different amount based on a ReaderInfo struct.
Definition: CacheBase.cpp:30
virtual std::shared_ptr< openshot::Frame > GetFrame(int64_t frame_number)=0
Get a frame from the cache.
This class is a memory-based cache manager for Frame objects.
Definition: CacheMemory.h:29
float Start() const
Get start position (in seconds) of clip (trim start of video)
Definition: ClipBase.h:88
float Duration() const
Get the length of this clip (in seconds)
Definition: ClipBase.h:90
std::string Id() const
Get the Id of this clip object.
Definition: ClipBase.h:85
virtual std::shared_ptr< openshot::Frame > GetFrame(int64_t frame_number)=0
This method is required for all derived classes of ClipBase, and returns a new openshot::Frame object...
int Layer() const
Get layer of clip on timeline (lower number is covered by higher numbers)
Definition: ClipBase.h:87
virtual void SetJsonValue(const Json::Value root)=0
Load Json::Value into this object.
Definition: ClipBase.cpp:80
float Position() const
Get position on timeline (in seconds)
Definition: ClipBase.h:86
virtual openshot::TimelineBase * ParentTimeline()
Get the associated Timeline pointer (if any)
Definition: ClipBase.h:91
This class represents a clip (used to arrange readers on the timeline)
Definition: Clip.h:89
std::list< openshot::EffectBase * > Effects()
Return the list of effects on the timeline.
Definition: Clip.h:233
openshot::VolumeMixType mixing
What strategy should be followed when mixing audio with other clips.
Definition: Clip.h:171
openshot::Keyframe channel_filter
A number representing an audio channel to filter (clears all other channels)
Definition: Clip.h:337
openshot::TimelineBase * ParentTimeline() override
Get the associated Timeline pointer (if any)
Definition: Clip.h:284
void SetJsonValue(const Json::Value root) override
Load Json::Value into this object.
Definition: Clip.cpp:924
openshot::Keyframe has_audio
An optional override to determine if this clip has audio (-1=undefined, 0=no, 1=yes)
Definition: Clip.h:341
openshot::Keyframe volume
Curve representing the volume (0 to 1)
Definition: Clip.h:321
openshot::Keyframe channel_mapping
A number representing an audio channel to output (only works when filtering a channel)
Definition: Clip.h:338
void Reader(openshot::ReaderBase *new_reader)
Set the current reader.
Definition: Clip.cpp:274
This class represents a color (used on the timeline and clips)
Definition: Color.h:27
std::string GetColorHex(int64_t frame_number)
Get the HEX value of a color at a specific frame.
Definition: Color.cpp:47
openshot::Keyframe blue
Curve representing the red value (0 - 255)
Definition: Color.h:32
openshot::Keyframe red
Curve representing the red value (0 - 255)
Definition: Color.h:30
openshot::Keyframe green
Curve representing the green value (0 - 255)
Definition: Color.h:31
void SetJsonValue(const Json::Value root)
Load Json::Value into this object.
Definition: Color.cpp:117
Json::Value JsonValue() const
Generate Json::Value for this object.
Definition: Color.cpp:86
static CrashHandler * Instance()
This abstract class is the base class, used by all effects in libopenshot.
Definition: EffectBase.h:54
virtual void SetJsonValue(const Json::Value root)
Load Json::Value into this object.
Definition: EffectBase.cpp:115
This class returns a listing of all effects supported by libopenshot.
Definition: EffectInfo.h:29
EffectBase * CreateEffect(std::string effect_type)
Create an instance of an effect (factory style)
Definition: EffectInfo.cpp:26
This class represents a fraction.
Definition: Fraction.h:30
int num
Numerator for the fraction.
Definition: Fraction.h:32
float ToFloat()
Return this fraction as a float (i.e. 1/2 = 0.5)
Definition: Fraction.cpp:35
double ToDouble() const
Return this fraction as a double (i.e. 1/2 = 0.5)
Definition: Fraction.cpp:40
void Reduce()
Reduce this fraction (i.e. 640/480 = 4/3)
Definition: Fraction.cpp:65
Fraction Reciprocal() const
Return the reciprocal as a Fraction.
Definition: Fraction.cpp:78
int den
Denominator for the fraction.
Definition: Fraction.h:33
This class creates a mapping between 2 different frame rates, applying a specific pull-down technique...
Definition: FrameMapper.h:193
void ChangeMapping(Fraction target_fps, PulldownType pulldown, int target_sample_rate, int target_channels, ChannelLayout target_channel_layout)
Change frame rate or audio mapping details.
ReaderBase * Reader()
Get the current reader.
Definition: FrameMapper.cpp:64
void Close() override
Close the openshot::FrameMapper and internal reader.
int GetSamplesPerFrame(openshot::Fraction fps, int sample_rate, int channels)
Calculate the # of samples per video frame (for the current frame number)
Definition: Frame.cpp:484
Exception for files that can not be found or opened.
Definition: Exceptions.h:188
Exception for missing JSON Change key.
Definition: Exceptions.h:263
Exception for invalid JSON.
Definition: Exceptions.h:218
A Keyframe is a collection of Point instances, which is used to vary a number or property over time.
Definition: KeyFrame.h:53
int GetInt(int64_t index) const
Get the rounded INT value at a specific index.
Definition: KeyFrame.cpp:282
void SetJsonValue(const Json::Value root)
Load Json::Value into this object.
Definition: KeyFrame.cpp:372
double GetValue(int64_t index) const
Get the value at a specific index.
Definition: KeyFrame.cpp:258
Json::Value JsonValue() const
Generate Json::Value for this object.
Definition: KeyFrame.cpp:339
int64_t GetCount() const
Get the number of points (i.e. # of points)
Definition: KeyFrame.cpp:424
Exception for frames that are out of bounds.
Definition: Exceptions.h:301
This abstract class is the base class, used by all readers in libopenshot.
Definition: ReaderBase.h:76
openshot::ReaderInfo info
Information about the current media file.
Definition: ReaderBase.h:88
virtual openshot::CacheBase * GetCache()=0
Get the cache object used by this reader (note: not all readers use cache)
virtual void SetJsonValue(const Json::Value root)=0
Load Json::Value into this object.
Definition: ReaderBase.cpp:162
virtual Json::Value JsonValue() const =0
Generate Json::Value for this object.
Definition: ReaderBase.cpp:107
std::recursive_mutex getFrameMutex
Mutex for multiple threads.
Definition: ReaderBase.h:79
openshot::ClipBase * clip
Pointer to the parent clip instance (if any)
Definition: ReaderBase.h:80
Exception when a reader is closed, and a frame is requested.
Definition: Exceptions.h:364
This class is contains settings used by libopenshot (and can be safely toggled at any point)
Definition: Settings.h:26
std::string PATH_OPENSHOT_INSTALL
Definition: Settings.h:108
static Settings * Instance()
Create or get an instance of this logger singleton (invoke the class with this method)
Definition: Settings.cpp:23
int preview_height
Optional preview width of timeline image. If your preview window is smaller than the timeline,...
Definition: TimelineBase.h:45
int preview_width
Optional preview width of timeline image. If your preview window is smaller than the timeline,...
Definition: TimelineBase.h:44
This class represents a timeline.
Definition: Timeline.h:148
void AddTrackedObject(std::shared_ptr< openshot::TrackedObjectBase > trackedObject)
Add to the tracked_objects map a pointer to a tracked object (TrackedObjectBBox)
Definition: Timeline.cpp:223
Json::Value JsonValue() const override
Generate Json::Value for this object.
Definition: Timeline.cpp:1120
openshot::Keyframe viewport_scale
Curve representing the scale of the viewport (0 to 100)
Definition: Timeline.h:312
void ApplyJsonDiff(std::string value)
Apply a special formatted JSON object, which represents a change to the timeline (add,...
Definition: Timeline.cpp:1269
openshot::EffectBase * GetClipEffect(const std::string &id)
Look up a clip effect by ID.
Definition: Timeline.cpp:431
void AddClip(openshot::Clip *clip)
Add an openshot::Clip to the timeline.
Definition: Timeline.cpp:332
virtual ~Timeline()
Definition: Timeline.cpp:206
std::list< openshot::EffectBase * > ClipEffects() const
Return the list of effects on all clips.
Definition: Timeline.cpp:444
std::list< std::string > GetTrackedObjectsIds() const
Return the ID's of the tracked objects as a list of strings.
Definition: Timeline.cpp:258
std::string Json() const override
Generate JSON string of this object.
Definition: Timeline.cpp:1113
int64_t GetMaxFrame()
Look up the end frame number of the latest element on the timeline.
Definition: Timeline.cpp:469
std::shared_ptr< openshot::Frame > GetFrame(int64_t requested_frame) override
Definition: Timeline.cpp:888
void ApplyMapperToClips()
Apply the timeline's framerate and samplerate to all clips.
Definition: Timeline.cpp:502
openshot::Color color
Background color of timeline canvas.
Definition: Timeline.h:317
std::string GetTrackedObjectValues(std::string id, int64_t frame_number) const
Return the trackedObject's properties as a JSON string.
Definition: Timeline.cpp:274
Timeline(int width, int height, openshot::Fraction fps, int sample_rate, int channels, openshot::ChannelLayout channel_layout)
Constructor for the timeline (which configures the default frame properties)
Definition: Timeline.cpp:28
std::shared_ptr< openshot::TrackedObjectBase > GetTrackedObject(std::string id) const
Return tracked object pointer by it's id.
Definition: Timeline.cpp:241
openshot::EffectBase * GetEffect(const std::string &id)
Look up a timeline effect by ID.
Definition: Timeline.cpp:420
void SetJsonValue(const Json::Value root) override
Load Json::Value into this object.
Definition: Timeline.cpp:1174
openshot::Clip * GetClip(const std::string &id)
Look up a single clip by ID.
Definition: Timeline.cpp:408
void ClearAllCache(bool deep=false)
Definition: Timeline.cpp:1668
void AddEffect(openshot::EffectBase *effect)
Add an effect to the timeline.
Definition: Timeline.cpp:358
void SetCache(openshot::CacheBase *new_cache)
Definition: Timeline.cpp:1097
void Clear()
Clear all clips, effects, and frame mappers from timeline (and free memory)
Definition: Timeline.cpp:808
openshot::Keyframe viewport_x
Curve representing the x coordinate for the viewport.
Definition: Timeline.h:313
void RemoveClip(openshot::Clip *clip)
Remove an openshot::Clip from the timeline.
Definition: Timeline.cpp:388
void SetMaxSize(int width, int height)
Definition: Timeline.cpp:1698
double GetMaxTime()
Look up the end time of the latest timeline element.
Definition: Timeline.cpp:463
void RemoveEffect(openshot::EffectBase *effect)
Remove an effect from the timeline.
Definition: Timeline.cpp:371
std::shared_ptr< openshot::Frame > apply_effects(std::shared_ptr< openshot::Frame > frame, int64_t timeline_frame_number, int layer, TimelineInfoStruct *options)
Apply global/timeline effects to the source frame (if any)
Definition: Timeline.cpp:526
void Open() override
Open the reader (and start consuming resources)
Definition: Timeline.cpp:876
void SetJson(const std::string value) override
Load JSON string into this object.
Definition: Timeline.cpp:1154
openshot::Keyframe viewport_y
Curve representing the y coordinate for the viewport.
Definition: Timeline.h:314
void Close() override
Close the timeline reader (and any resources it was consuming)
Definition: Timeline.cpp:854
void AppendDebugMethod(std::string method_name, std::string arg1_name="", float arg1_value=-1.0, std::string arg2_name="", float arg2_value=-1.0, std::string arg3_name="", float arg3_value=-1.0, std::string arg4_name="", float arg4_value=-1.0, std::string arg5_name="", float arg5_value=-1.0, std::string arg6_name="", float arg6_value=-1.0)
Append debug information.
Definition: ZmqLogger.cpp:178
static ZmqLogger * Instance()
Create or get an instance of this logger singleton (invoke the class with this method)
Definition: ZmqLogger.cpp:35
This namespace is the default namespace for all code in the openshot library.
Definition: Compressor.h:29
@ PULLDOWN_NONE
Do not apply pull-down techniques, just repeat or skip entire frames.
Definition: FrameMapper.h:46
ChannelLayout
This enumeration determines the audio channel layout (such as stereo, mono, 5 point surround,...
@ VOLUME_MIX_AVERAGE
Evenly divide the overlapping clips volume keyframes, so that the sum does not exceed 100%.
Definition: Enums.h:63
@ VOLUME_MIX_REDUCE
Reduce volume by about %25, and then mix (louder, but could cause pops if the sum exceeds 100%)
Definition: Enums.h:64
const Json::Value stringToJson(const std::string value)
Definition: Json.cpp:16
This struct holds the information of a bounding-box.
float cy
y-coordinate of the bounding box center
float height
bounding box height
float cx
x-coordinate of the bounding box center
float width
bounding box width
float angle
bounding box rotation angle [degrees]
Like CompareClipEndFrames, but for effects.
Definition: Timeline.h:75
This struct contains info about a media file, such as height, width, frames per second,...
Definition: ReaderBase.h:39
float duration
Length of time (in seconds)
Definition: ReaderBase.h:43
int width
The width of the video (in pixesl)
Definition: ReaderBase.h:46
int channels
The number of audio channels used in the audio stream.
Definition: ReaderBase.h:61
openshot::Fraction fps
Frames per second, as a fraction (i.e. 24/1 = 24 fps)
Definition: ReaderBase.h:48
openshot::Fraction display_ratio
The ratio of width to height of the video stream (i.e. 640x480 has a ratio of 4/3)
Definition: ReaderBase.h:51
int height
The height of the video (in pixels)
Definition: ReaderBase.h:45
int64_t video_length
The number of frames in the video stream.
Definition: ReaderBase.h:53
std::string acodec
The name of the audio codec used to encode / decode the video stream.
Definition: ReaderBase.h:58
std::string vcodec
The name of the video codec used to encode / decode the video stream.
Definition: ReaderBase.h:52
openshot::Fraction pixel_ratio
The pixel ratio of the video stream as a fraction (i.e. some pixels are not square)
Definition: ReaderBase.h:50
openshot::ChannelLayout channel_layout
The channel layout (mono, stereo, 5 point surround, etc...)
Definition: ReaderBase.h:62
bool has_video
Determines if this file has a video stream.
Definition: ReaderBase.h:40
bool has_audio
Determines if this file has an audio stream.
Definition: ReaderBase.h:41
openshot::Fraction video_timebase
The video timebase determines how long each frame stays on the screen.
Definition: ReaderBase.h:55
int sample_rate
The number of audio samples per second (44100 is a common sample rate)
Definition: ReaderBase.h:60
This struct contains info about the current Timeline clip instance.
Definition: TimelineBase.h:33
bool is_before_clip_keyframes
Is this before clip keyframes are applied.
Definition: TimelineBase.h:35
bool is_top_clip
Is clip on top (if overlapping another clip)
Definition: TimelineBase.h:34