46Timeline::Timeline(
int width,
int height,
Fraction fps,
int sample_rate,
int channels,
ChannelLayout channel_layout) :
47 is_open(false), auto_map_clips(true), managed_cache(true),
path(
""),
93 info.channels, info.channel_layout) {};
97 is_open(false), auto_map_clips(true), managed_cache(true),
path(projectPath),
117 QFileInfo filePath(QString::fromStdString(path));
118 if (!filePath.exists()) {
119 throw InvalidFile(
"File could not be opened.", path);
125 if (!openshotPath.exists()) {
128 QDir openshotTransPath(openshotPath.filePath(
"transitions"));
129 if (!openshotTransPath.exists()) {
130 throw InvalidFile(
"PATH_OPENSHOT_INSTALL/transitions could not be found.", openshotTransPath.path().toStdString());
134 QString asset_name = filePath.baseName().left(30) +
"_assets";
135 QDir asset_folder(filePath.dir().filePath(asset_name));
136 if (!asset_folder.exists()) {
138 asset_folder.mkpath(
".");
142 QFile projectFile(QString::fromStdString(path));
143 projectFile.open(QFile::ReadOnly);
144 QString projectContents = QString::fromUtf8(projectFile.readAll());
147 if (convert_absolute_paths) {
151 QRegularExpression allPathsRegex(QStringLiteral(
"\"(image|path)\":.*?\"(.*?)\""));
152 std::vector<QRegularExpressionMatch> matchedPositions;
153 QRegularExpressionMatchIterator i = allPathsRegex.globalMatch(projectContents);
154 while (i.hasNext()) {
155 QRegularExpressionMatch match = i.next();
156 if (match.hasMatch()) {
158 matchedPositions.push_back(match);
163 std::vector<QRegularExpressionMatch>::reverse_iterator itr;
164 for (itr = matchedPositions.rbegin(); itr != matchedPositions.rend(); itr++) {
165 QRegularExpressionMatch match = *itr;
166 QString relativeKey = match.captured(1);
167 QString relativePath = match.captured(2);
168 QString absolutePath =
"";
171 if (relativePath.startsWith(
"@assets")) {
172 absolutePath = QFileInfo(asset_folder.absoluteFilePath(relativePath.replace(
"@assets",
"."))).canonicalFilePath();
173 }
else if (relativePath.startsWith(
"@transitions")) {
174 absolutePath = QFileInfo(openshotTransPath.absoluteFilePath(relativePath.replace(
"@transitions",
"."))).canonicalFilePath();
176 absolutePath = QFileInfo(filePath.absoluteDir().absoluteFilePath(relativePath)).canonicalFilePath();
180 if (!absolutePath.isEmpty()) {
181 projectContents.replace(match.capturedStart(0), match.capturedLength(0),
"\"" + relativeKey +
"\": \"" + absolutePath +
"\"");
185 matchedPositions.clear();
189 SetJson(projectContents.toStdString());
193 float calculated_duration = 0.0;
194 for (
auto clip : clips)
197 if (clip_last_frame > calculated_duration)
198 calculated_duration = clip_last_frame;
199 if (
clip->Reader() &&
clip->Reader()->info.has_audio)
201 if (
clip->Reader() &&
clip->Reader()->info.has_video)
228 std::set<FrameMapper *>::iterator it;
229 for (it = allocated_frame_mappers.begin(); it != allocated_frame_mappers.end(); ) {
236 it = allocated_frame_mappers.erase(it);
240 if (managed_cache && final_cache) {
250 auto iterator = tracked_objects.find(trackedObject->Id());
252 if (iterator != tracked_objects.end()){
254 iterator->second = trackedObject;
258 tracked_objects[trackedObject->Id()] = trackedObject;
268 auto iterator = tracked_objects.find(
id);
270 if (iterator != tracked_objects.end()){
272 std::shared_ptr<openshot::TrackedObjectBase> trackedObject = iterator->second;
273 return trackedObject;
285 std::list<std::string> trackedObjects_ids;
288 for (
auto const& it: tracked_objects){
290 trackedObjects_ids.push_back(it.first);
293 return trackedObjects_ids;
301 Json::Value trackedObjectJson;
304 auto iterator = tracked_objects.find(
id);
306 if (iterator != tracked_objects.end())
309 std::shared_ptr<TrackedObjectBBox> trackedObject = std::static_pointer_cast<TrackedObjectBBox>(iterator->second);
312 if (trackedObject->ExactlyContains(frame_number)){
313 BBox box = trackedObject->GetBox(frame_number);
314 float x1 = box.
cx - (box.
width/2);
316 float x2 = box.
cx + (box.
width/2);
318 float rotation = box.
angle;
320 trackedObjectJson[
"x1"] = x1;
321 trackedObjectJson[
"y1"] = y1;
322 trackedObjectJson[
"x2"] = x2;
323 trackedObjectJson[
"y2"] = y2;
324 trackedObjectJson[
"rotation"] = rotation;
327 BBox box = trackedObject->BoxVec.begin()->second;
328 float x1 = box.
cx - (box.
width/2);
330 float x2 = box.
cx + (box.
width/2);
332 float rotation = box.
angle;
334 trackedObjectJson[
"x1"] = x1;
335 trackedObjectJson[
"y1"] = y1;
336 trackedObjectJson[
"x2"] = x2;
337 trackedObjectJson[
"y2"] = y2;
338 trackedObjectJson[
"rotation"] = rotation;
344 trackedObjectJson[
"x1"] = 0;
345 trackedObjectJson[
"y1"] = 0;
346 trackedObjectJson[
"x2"] = 0;
347 trackedObjectJson[
"y2"] = 0;
348 trackedObjectJson[
"rotation"] = 0;
351 return trackedObjectJson.toStyledString();
363 if (
clip->Reader() &&
clip->Reader()->GetCache())
364 clip->Reader()->GetCache()->Clear();
369 apply_mapper_to_clip(
clip);
372 clips.push_back(
clip);
385 effects.push_back(effect);
394 effects.remove(effect);
407 for (
const auto&
clip : clips) {
419 for (
const auto& effect : effects) {
420 if (effect->Id() ==
id) {
430 for (
const auto&
clip : clips) {
431 const auto e =
clip->GetEffect(
id);
443 std::list<EffectBase*> timelineEffectsList;
446 for (
const auto&
clip : clips) {
449 std::list<EffectBase*> clipEffectsList =
clip->Effects();
452 timelineEffectsList.insert(timelineEffectsList.end(), clipEffectsList.begin(), clipEffectsList.end());
455 return timelineEffectsList;
460 double last_clip = 0.0;
461 double last_effect = 0.0;
463 if (!clips.empty()) {
464 const auto max_clip = std::max_element(
466 last_clip = (*max_clip)->Position() + (*max_clip)->Duration();
468 if (!effects.empty()) {
469 const auto max_effect = std::max_element(
471 last_effect = (*max_effect)->Position() + (*max_effect)->Duration();
473 return std::max(last_clip, last_effect);
480 return std::round(max_time * fps) + 1;
484void Timeline::apply_mapper_to_clip(
Clip* clip)
491 if (
clip->Reader()->Name() ==
"FrameMapper")
500 allocated_frame_mappers.insert(mapper);
509 clip->Reader(clip_reader);
519 for (
auto clip : clips)
522 apply_mapper_to_clip(
clip);
527double Timeline::calculate_time(int64_t number,
Fraction rate)
530 double raw_fps = rate.
ToFloat();
533 return double(number - 1) / raw_fps;
543 for (
auto effect : effects)
546 long effect_start_position = round(effect->Position() *
info.
fps.
ToDouble()) + 1;
547 long effect_end_position = round((effect->Position() + (effect->Duration())) *
info.
fps.
ToDouble()) + 1;
549 bool does_effect_intersect = (effect_start_position <= timeline_frame_number && effect_end_position >= timeline_frame_number && effect->Layer() == layer);
552 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::apply_effects (Does effect intersect)",
"effect->Position()", effect->Position(),
"does_effect_intersect", does_effect_intersect,
"timeline_frame_number", timeline_frame_number,
"layer", layer);
555 if (does_effect_intersect)
559 long effect_frame_number = timeline_frame_number - effect_start_position + effect_start_frame;
562 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::apply_effects (Process Effect)",
"effect_frame_number", effect_frame_number,
"does_effect_intersect", does_effect_intersect);
565 frame = effect->GetFrame(frame, effect_frame_number);
575std::shared_ptr<Frame> Timeline::GetOrCreateFrame(std::shared_ptr<Frame> background_frame,
Clip* clip, int64_t number,
openshot::TimelineInfoStruct* options)
577 std::shared_ptr<Frame> new_frame;
587 new_frame = std::shared_ptr<Frame>(
clip->
GetFrame(background_frame, number, options));
606void Timeline::add_layer(std::shared_ptr<Frame> new_frame,
Clip* source_clip, int64_t clip_frame_number,
bool is_top_clip,
float max_volume)
613 std::shared_ptr<Frame> source_frame;
614 source_frame = GetOrCreateFrame(new_frame, source_clip, clip_frame_number, options);
625 if (source_clip->
Reader()->info.has_audio) {
627 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::add_layer (Copy Audio)",
"source_clip->Reader()->info.has_audio", source_clip->
Reader()->info.has_audio,
"source_frame->GetAudioChannelsCount()", source_frame->GetAudioChannelsCount(),
"info.channels",
info.
channels,
"clip_frame_number", clip_frame_number);
630 for (
int channel = 0; channel < source_frame->GetAudioChannelsCount(); channel++)
633 float previous_volume = source_clip->
volume.
GetValue(clip_frame_number - 1);
641 previous_volume = previous_volume / max_volume;
642 volume = volume / max_volume;
646 previous_volume = previous_volume * 0.77;
647 volume = volume * 0.77;
651 if (channel_filter != -1 && channel_filter != channel)
655 if (previous_volume == 0.0 && volume == 0.0)
659 if (channel_mapping == -1)
660 channel_mapping = channel;
663 if (!isEqual(previous_volume, 1.0) || !isEqual(volume, 1.0))
664 source_frame->ApplyGainRamp(channel_mapping, 0, source_frame->GetAudioSamplesCount(), previous_volume, volume);
670 if (new_frame->GetAudioSamplesCount() != source_frame->GetAudioSamplesCount()){
676 new_frame->AddAudio(
false, channel_mapping, 0, source_frame->GetAudioSamples(channel), source_frame->GetAudioSamplesCount(), 1.0);
680 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::add_layer (No Audio Copied - Wrong # of Channels)",
"source_clip->Reader()->info.has_audio", source_clip->
Reader()->info.has_audio,
"source_frame->GetAudioChannelsCount()", source_frame->GetAudioChannelsCount(),
"info.channels",
info.
channels,
"clip_frame_number", clip_frame_number);
684 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::add_layer (Transform: Composite Image Layer: Completed)",
"source_frame->number", source_frame->number,
"new_frame->GetImage()->width()", new_frame->GetImage()->width());
688void Timeline::update_open_clips(
Clip *clip,
bool does_clip_intersect)
690 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::update_open_clips (before)",
"does_clip_intersect", does_clip_intersect,
"closing_clips.size()", closing_clips.size(),
"open_clips.size()", open_clips.size());
693 bool clip_found = open_clips.count(
clip);
695 if (clip_found && !does_clip_intersect)
698 open_clips.erase(
clip);
703 else if (!clip_found && does_clip_intersect)
718 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::update_open_clips (after)",
"does_clip_intersect", does_clip_intersect,
"clip_found", clip_found,
"closing_clips.size()", closing_clips.size(),
"open_clips.size()", open_clips.size());
722void Timeline::sort_clips()
732void Timeline::sort_effects()
744 for (
auto clip : clips)
747 update_open_clips(
clip,
false);
755 final_cache->
Clear();
765bool Timeline::isEqual(
double a,
double b)
767 return fabs(a - b) < 0.000001;
775 if (requested_frame < 1)
779 std::shared_ptr<Frame> frame;
780 std::lock_guard<std::mutex> guard(get_frame_mutex);
781 frame = final_cache->
GetFrame(requested_frame);
796 throw ReaderClosed(
"The Timeline is closed. Call Open() before calling this method.");
799 frame = final_cache->
GetFrame(requested_frame);
811 std::shared_ptr<Frame> previous_frame = final_cache->
GetFrame(requested_frame - 1);
812 if (!previous_frame) {
822 std::vector<Clip*> nearby_clips;
823 nearby_clips = find_intersecting_clips(requested_frame, 1,
true);
833 new_frame->AddAudioSilence(samples_in_frame);
846 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::GetFrame (Loop through clips)",
"requested_frame", requested_frame,
"clips.size()", clips.size(),
"nearby_clips.size()", nearby_clips.size());
849 for (
auto clip : nearby_clips)
854 bool does_clip_intersect = (clip_start_position <= requested_frame && clip_end_position >= requested_frame);
860 if (does_clip_intersect)
863 bool is_top_clip =
true;
864 float max_volume = 0.0;
865 for (
auto nearby_clip : nearby_clips)
867 long nearby_clip_start_position = round(nearby_clip->Position() *
info.
fps.
ToDouble()) + 1;
868 long nearby_clip_end_position = round((nearby_clip->Position() + nearby_clip->Duration()) *
info.
fps.
ToDouble()) + 1;
869 long nearby_clip_start_frame = (nearby_clip->Start() *
info.
fps.
ToDouble()) + 1;
870 long nearby_clip_frame_number = requested_frame - nearby_clip_start_position + nearby_clip_start_frame;
873 if (
clip->
Id() != nearby_clip->Id() &&
clip->
Layer() == nearby_clip->Layer() &&
874 nearby_clip_start_position <= requested_frame && nearby_clip_end_position >= requested_frame &&
875 nearby_clip_start_position > clip_start_position && is_top_clip ==
true) {
880 if (nearby_clip->Reader() && nearby_clip->Reader()->info.has_audio &&
881 nearby_clip->has_audio.GetInt(nearby_clip_frame_number) != 0 &&
882 nearby_clip_start_position <= requested_frame && nearby_clip_end_position >= requested_frame) {
883 max_volume += nearby_clip->volume.GetValue(nearby_clip_frame_number);
889 long clip_frame_number = requested_frame - clip_start_position + clip_start_frame;
895 add_layer(new_frame,
clip, clip_frame_number, is_top_clip, max_volume);
900 "requested_frame", requested_frame,
"does_clip_intersect",
901 does_clip_intersect);
910 new_frame->SetFrameNumber(requested_frame);
913 final_cache->
Add(new_frame);
916 return final_cache->
GetFrame(requested_frame);
922std::vector<Clip*> Timeline::find_intersecting_clips(int64_t requested_frame,
int number_of_frames,
bool include)
925 std::vector<Clip*> matching_clips;
928 float min_requested_frame = requested_frame;
929 float max_requested_frame = requested_frame + (number_of_frames - 1);
935 for (
auto clip : clips)
941 bool does_clip_intersect =
942 (clip_start_position <= min_requested_frame || clip_start_position <= max_requested_frame) &&
943 (clip_end_position >= min_requested_frame || clip_end_position >= max_requested_frame);
946 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::find_intersecting_clips (Is clip near or intersecting)",
"requested_frame", requested_frame,
"min_requested_frame", min_requested_frame,
"max_requested_frame", max_requested_frame,
"clip->Position()",
clip->
Position(),
"does_clip_intersect", does_clip_intersect);
949 update_open_clips(
clip, does_clip_intersect);
952 if (does_clip_intersect && include)
954 matching_clips.push_back(
clip);
956 else if (!does_clip_intersect && !include)
958 matching_clips.push_back(
clip);
963 return matching_clips;
969 if (managed_cache && final_cache) {
972 managed_cache =
false;
976 final_cache = new_cache;
991 root[
"type"] =
"Timeline";
999 root[
"clips"] = Json::Value(Json::arrayValue);
1002 for (
const auto existing_clip : clips)
1004 root[
"clips"].append(existing_clip->JsonValue());
1008 root[
"effects"] = Json::Value(Json::arrayValue);
1011 for (
const auto existing_effect: effects)
1013 root[
"effects"].append(existing_effect->JsonValue());
1033 catch (
const std::exception& e)
1036 throw InvalidJSON(
"JSON is invalid (missing keys or invalid data types)");
1044 bool was_open = is_open;
1051 if (!root[
"path"].isNull())
1052 path = root[
"path"].asString();
1054 if (!root[
"clips"].isNull()) {
1059 for (
const Json::Value existing_clip : root[
"clips"]) {
1078 if (!root[
"effects"].isNull()) {
1083 for (
const Json::Value existing_effect :root[
"effects"]) {
1087 if (!existing_effect[
"type"].isNull()) {
1089 if ( (e =
EffectInfo().CreateEffect(existing_effect[
"type"].asString())) ) {
1101 if (!root[
"duration"].isNull()) {
1127 for (
const Json::Value change : root) {
1128 std::string change_key = change[
"key"][(uint)0].asString();
1131 if (change_key ==
"clips")
1133 apply_json_to_clips(change);
1135 else if (change_key ==
"effects")
1137 apply_json_to_effects(change);
1141 apply_json_to_timeline(change);
1145 catch (
const std::exception& e)
1148 throw InvalidJSON(
"JSON is invalid (missing keys or invalid data types)");
1153void Timeline::apply_json_to_clips(Json::Value change) {
1156 std::string change_type = change[
"type"].asString();
1157 std::string clip_id =
"";
1158 Clip *existing_clip = NULL;
1161 for (
auto key_part : change[
"key"]) {
1163 if (key_part.isObject()) {
1165 if (!key_part[
"id"].isNull()) {
1167 clip_id = key_part[
"id"].asString();
1170 for (
auto c : clips)
1172 if (c->Id() == clip_id) {
1184 if (existing_clip && change[
"key"].size() == 4 && change[
"key"][2] ==
"effects")
1187 Json::Value key_part = change[
"key"][3];
1189 if (key_part.isObject()) {
1191 if (!key_part[
"id"].isNull())
1194 std::string effect_id = key_part[
"id"].asString();
1197 std::list<EffectBase*> effect_list = existing_clip->
Effects();
1198 for (
auto e : effect_list)
1200 if (e->Id() == effect_id) {
1202 apply_json_to_effects(change, e);
1207 final_cache->
Remove(new_starting_frame - 8, new_ending_frame + 8);
1217 if (!change[
"value"].isArray() && !change[
"value"][
"position"].isNull()) {
1218 int64_t new_starting_frame = (change[
"value"][
"position"].asDouble() *
info.
fps.
ToDouble()) + 1;
1219 int64_t new_ending_frame = ((change[
"value"][
"position"].asDouble() + change[
"value"][
"end"].asDouble() - change[
"value"][
"start"].asDouble()) *
info.
fps.
ToDouble()) + 1;
1220 final_cache->
Remove(new_starting_frame - 8, new_ending_frame + 8);
1224 if (change_type ==
"insert") {
1232 apply_mapper_to_clip(
clip);
1234 }
else if (change_type ==
"update") {
1237 if (existing_clip) {
1242 final_cache->
Remove(old_starting_frame - 8, old_ending_frame + 8);
1245 if (existing_clip->
Reader() && existing_clip->
Reader()->GetCache())
1246 existing_clip->
Reader()->GetCache()->Remove(old_starting_frame - 8, old_ending_frame + 8);
1252 apply_mapper_to_clip(existing_clip);
1255 }
else if (change_type ==
"delete") {
1258 if (existing_clip) {
1263 final_cache->
Remove(old_starting_frame - 8, old_ending_frame + 8);
1274void Timeline::apply_json_to_effects(Json::Value change) {
1277 std::string change_type = change[
"type"].asString();
1281 for (
auto key_part : change[
"key"]) {
1283 if (key_part.isObject()) {
1285 if (!key_part[
"id"].isNull())
1288 std::string effect_id = key_part[
"id"].asString();
1291 for (
auto e : effects)
1293 if (e->Id() == effect_id) {
1294 existing_effect = e;
1304 if (existing_effect || change_type ==
"insert")
1306 apply_json_to_effects(change, existing_effect);
1310void Timeline::apply_json_to_effects(Json::Value change,
EffectBase* existing_effect) {
1313 std::string change_type = change[
"type"].asString();
1316 if (!change[
"value"].isArray() && !change[
"value"][
"position"].isNull()) {
1317 int64_t new_starting_frame = (change[
"value"][
"position"].asDouble() *
info.
fps.
ToDouble()) + 1;
1318 int64_t new_ending_frame = ((change[
"value"][
"position"].asDouble() + change[
"value"][
"end"].asDouble() - change[
"value"][
"start"].asDouble()) *
info.
fps.
ToDouble()) + 1;
1319 final_cache->
Remove(new_starting_frame - 8, new_ending_frame + 8);
1323 if (change_type ==
"insert") {
1326 std::string effect_type = change[
"value"][
"type"].asString();
1341 }
else if (change_type ==
"update") {
1344 if (existing_effect) {
1349 final_cache->
Remove(old_starting_frame - 8, old_ending_frame + 8);
1355 }
else if (change_type ==
"delete") {
1358 if (existing_effect) {
1363 final_cache->
Remove(old_starting_frame - 8, old_ending_frame + 8);
1373void Timeline::apply_json_to_timeline(Json::Value change) {
1376 std::string change_type = change[
"type"].asString();
1377 std::string root_key = change[
"key"][(uint)0].asString();
1378 std::string sub_key =
"";
1379 if (change[
"key"].size() >= 2)
1380 sub_key = change[
"key"][(uint)1].asString();
1386 if (change_type ==
"insert" || change_type ==
"update") {
1390 if (root_key ==
"color")
1393 else if (root_key ==
"viewport_scale")
1396 else if (root_key ==
"viewport_x")
1399 else if (root_key ==
"viewport_y")
1402 else if (root_key ==
"duration") {
1407 else if (root_key ==
"width") {
1412 else if (root_key ==
"height") {
1417 else if (root_key ==
"fps" && sub_key ==
"" && change[
"value"].isObject()) {
1419 if (!change[
"value"][
"num"].isNull())
1420 info.
fps.
num = change[
"value"][
"num"].asInt();
1421 if (!change[
"value"][
"den"].isNull())
1422 info.
fps.
den = change[
"value"][
"den"].asInt();
1424 else if (root_key ==
"fps" && sub_key ==
"num")
1427 else if (root_key ==
"fps" && sub_key ==
"den")
1430 else if (root_key ==
"display_ratio" && sub_key ==
"" && change[
"value"].isObject()) {
1432 if (!change[
"value"][
"num"].isNull())
1434 if (!change[
"value"][
"den"].isNull())
1437 else if (root_key ==
"display_ratio" && sub_key ==
"num")
1440 else if (root_key ==
"display_ratio" && sub_key ==
"den")
1443 else if (root_key ==
"pixel_ratio" && sub_key ==
"" && change[
"value"].isObject()) {
1445 if (!change[
"value"][
"num"].isNull())
1447 if (!change[
"value"][
"den"].isNull())
1450 else if (root_key ==
"pixel_ratio" && sub_key ==
"num")
1453 else if (root_key ==
"pixel_ratio" && sub_key ==
"den")
1457 else if (root_key ==
"sample_rate")
1460 else if (root_key ==
"channels")
1463 else if (root_key ==
"channel_layout")
1468 throw InvalidJSONKey(
"JSON change key is invalid", change.toStyledString());
1471 }
else if (change[
"type"].asString() ==
"delete") {
1475 if (root_key ==
"color") {
1481 else if (root_key ==
"viewport_scale")
1483 else if (root_key ==
"viewport_x")
1485 else if (root_key ==
"viewport_y")
1489 throw InvalidJSONKey(
"JSON change key is invalid", change.toStyledString());
1502 final_cache->
Clear();
1505 for (
auto clip : clips)
1508 clip->Reader()->GetCache()->Clear();
1511 if (
clip->Reader()->Name() ==
"FrameMapper") {
1528 display_ratio_size.scale(proposed_size, Qt::KeepAspectRatio);
Header file for CacheBase class.
Header file for CacheDisk class.
Header file for CacheMemory class.
Header file for CrashHandler class.
Header file for all Exception classes.
Header file for the FrameMapper class.
#define OPEN_MP_NUM_PROCESSORS
Header file for Timeline class.
All cache managers in libopenshot are based on this CacheBase class.
virtual void Clear()=0
Clear the cache of all frames.
virtual void Remove(int64_t frame_number)=0
Remove a specific frame.
virtual std::shared_ptr< openshot::Frame > GetFrame(int64_t frame_number)=0
Get a frame from the cache.
virtual void Add(std::shared_ptr< openshot::Frame > frame)=0
Add a Frame to the cache.
void SetMaxBytesFromInfo(int64_t number_of_frames, int width, int height, int sample_rate, int channels)
Set maximum bytes to a different amount based on a ReaderInfo struct.
This class is a memory-based cache manager for Frame objects.
void Clear()
Clear the cache of all frames.
openshot::TimelineBase * ParentTimeline()
Get the associated Timeline pointer (if any)
float Start() const
Get start position (in seconds) of clip (trim start of video)
float Duration() const
Get the length of this clip (in seconds)
virtual std::shared_ptr< openshot::Frame > GetFrame(int64_t frame_number)=0
This method is required for all derived classes of ClipBase, and returns a new openshot::Frame object...
std::string Id() const
Get the Id of this clip object.
int Layer() const
Get layer of clip on timeline (lower number is covered by higher numbers)
virtual void SetJsonValue(const Json::Value root)=0
Load Json::Value into this object.
float Position() const
Get position on timeline (in seconds)
This class represents a clip (used to arrange readers on the timeline)
openshot::VolumeMixType mixing
What strategy should be followed when mixing audio with other clips.
openshot::Keyframe channel_filter
A number representing an audio channel to filter (clears all other channels)
void SetJsonValue(const Json::Value root) override
Load Json::Value into this object.
openshot::Keyframe has_audio
An optional override to determine if this clip has audio (-1=undefined, 0=no, 1=yes)
std::list< openshot::EffectBase * > Effects()
Return the list of effects on the timeline.
openshot::Keyframe volume
Curve representing the volume (0 to 1)
openshot::Keyframe channel_mapping
A number representing an audio channel to output (only works when filtering a channel)
void Reader(openshot::ReaderBase *new_reader)
Set the current reader.
This class represents a color (used on the timeline and clips)
std::string GetColorHex(int64_t frame_number)
Get the HEX value of a color at a specific frame.
openshot::Keyframe blue
Curve representing the red value (0 - 255)
openshot::Keyframe red
Curve representing the red value (0 - 255)
openshot::Keyframe green
Curve representing the green value (0 - 255)
void SetJsonValue(const Json::Value root)
Load Json::Value into this object.
Json::Value JsonValue() const
Generate Json::Value for this object.
static CrashHandler * Instance()
This abstract class is the base class, used by all effects in libopenshot.
virtual void SetJsonValue(const Json::Value root)
Load Json::Value into this object.
This class returns a listing of all effects supported by libopenshot.
EffectBase * CreateEffect(std::string effect_type)
Create an instance of an effect (factory style)
This class represents a fraction.
int num
Numerator for the fraction.
float ToFloat()
Return this fraction as a float (i.e. 1/2 = 0.5)
double ToDouble() const
Return this fraction as a double (i.e. 1/2 = 0.5)
void Reduce()
Reduce this fraction (i.e. 640/480 = 4/3)
Fraction Reciprocal() const
Return the reciprocal as a Fraction.
int den
Denominator for the fraction.
This class creates a mapping between 2 different frame rates, applying a specific pull-down technique...
void ChangeMapping(Fraction target_fps, PulldownType pulldown, int target_sample_rate, int target_channels, ChannelLayout target_channel_layout)
Change frame rate or audio mapping details.
ReaderBase * Reader()
Get the current reader.
void Close() override
Close the openshot::FrameMapper and internal reader.
int GetSamplesPerFrame(openshot::Fraction fps, int sample_rate, int channels)
Calculate the # of samples per video frame (for the current frame number)
Exception for files that can not be found or opened.
Exception for missing JSON Change key.
Exception for invalid JSON.
A Keyframe is a collection of Point instances, which is used to vary a number or property over time.
int GetInt(int64_t index) const
Get the rounded INT value at a specific index.
void SetJsonValue(const Json::Value root)
Load Json::Value into this object.
double GetValue(int64_t index) const
Get the value at a specific index.
Json::Value JsonValue() const
Generate Json::Value for this object.
int64_t GetCount() const
Get the number of points (i.e. # of points)
Exception for frames that are out of bounds.
This abstract class is the base class, used by all readers in libopenshot.
juce::CriticalSection getFrameCriticalSection
Section lock for multiple threads.
openshot::ReaderInfo info
Information about the current media file.
virtual void SetJsonValue(const Json::Value root)=0
Load Json::Value into this object.
virtual Json::Value JsonValue() const =0
Generate Json::Value for this object.
virtual openshot::CacheBase * GetCache()=0
Get the cache object used by this reader (note: not all readers use cache)
openshot::ClipBase * clip
Pointer to the parent clip instance (if any)
Exception when a reader is closed, and a frame is requested.
This class is contains settings used by libopenshot (and can be safely toggled at any point)
std::string PATH_OPENSHOT_INSTALL
static Settings * Instance()
Create or get an instance of this logger singleton (invoke the class with this method)
int preview_height
Optional preview width of timeline image. If your preview window is smaller than the timeline,...
int preview_width
Optional preview width of timeline image. If your preview window is smaller than the timeline,...
This class represents a timeline.
void AddTrackedObject(std::shared_ptr< openshot::TrackedObjectBase > trackedObject)
Add to the tracked_objects map a pointer to a tracked object (TrackedObjectBBox)
Json::Value JsonValue() const override
Generate Json::Value for this object.
openshot::Keyframe viewport_scale
Curve representing the scale of the viewport (0 to 100)
void ApplyJsonDiff(std::string value)
Apply a special formatted JSON object, which represents a change to the timeline (add,...
openshot::EffectBase * GetClipEffect(const std::string &id)
Look up a clip effect by ID.
void AddClip(openshot::Clip *clip)
Add an openshot::Clip to the timeline.
std::list< openshot::EffectBase * > ClipEffects() const
Return the list of effects on all clips.
std::list< std::string > GetTrackedObjectsIds() const
Return the ID's of the tracked objects as a list of strings.
std::string Json() const override
Generate JSON string of this object.
int64_t GetMaxFrame()
Look up the end frame number of the latest element on the timeline.
std::shared_ptr< openshot::Frame > GetFrame(int64_t requested_frame) override
void ClearAllCache()
Clear all cache for this timeline instance, and all clips, mappers, and readers under it.
void ApplyMapperToClips()
Apply the timeline's framerate and samplerate to all clips.
openshot::Color color
Background color of timeline canvas.
std::string GetTrackedObjectValues(std::string id, int64_t frame_number) const
Return the trackedObject's properties as a JSON string.
Timeline(int width, int height, openshot::Fraction fps, int sample_rate, int channels, openshot::ChannelLayout channel_layout)
Constructor for the timeline (which configures the default frame properties)
std::shared_ptr< openshot::TrackedObjectBase > GetTrackedObject(std::string id) const
Return tracked object pointer by it's id.
openshot::EffectBase * GetEffect(const std::string &id)
Look up a timeline effect by ID.
void SetJsonValue(const Json::Value root) override
Load Json::Value into this object.
openshot::Clip * GetClip(const std::string &id)
Look up a single clip by ID.
void AddEffect(openshot::EffectBase *effect)
Add an effect to the timeline.
std::shared_ptr< openshot::Frame > apply_effects(std::shared_ptr< openshot::Frame > frame, int64_t timeline_frame_number, int layer)
Apply global/timeline effects to the source frame (if any)
void SetCache(openshot::CacheBase *new_cache)
openshot::Keyframe viewport_x
Curve representing the x coordinate for the viewport.
void RemoveClip(openshot::Clip *clip)
Remove an openshot::Clip from the timeline.
void SetMaxSize(int width, int height)
double GetMaxTime()
Look up the end time of the latest timeline element.
void RemoveEffect(openshot::EffectBase *effect)
Remove an effect from the timeline.
void Open() override
Open the reader (and start consuming resources)
void SetJson(const std::string value) override
Load JSON string into this object.
openshot::Keyframe viewport_y
Curve representing the y coordinate for the viewport.
void Close() override
Close the timeline reader (and any resources it was consuming)
void AppendDebugMethod(std::string method_name, std::string arg1_name="", float arg1_value=-1.0, std::string arg2_name="", float arg2_value=-1.0, std::string arg3_name="", float arg3_value=-1.0, std::string arg4_name="", float arg4_value=-1.0, std::string arg5_name="", float arg5_value=-1.0, std::string arg6_name="", float arg6_value=-1.0)
Append debug information.
static ZmqLogger * Instance()
Create or get an instance of this logger singleton (invoke the class with this method)
This namespace is the default namespace for all code in the openshot library.
@ PULLDOWN_NONE
Do not apply pull-down techniques, just repeat or skip entire frames.
ChannelLayout
This enumeration determines the audio channel layout (such as stereo, mono, 5 point surround,...
@ VOLUME_MIX_AVERAGE
Evenly divide the overlapping clips volume keyframes, so that the sum does not exceed 100%.
@ VOLUME_MIX_REDUCE
Reduce volume by about %25, and then mix (louder, but could cause pops if the sum exceeds 100%)
const Json::Value stringToJson(const std::string value)
This struct holds the information of a bounding-box.
float cy
y-coordinate of the bounding box center
float height
bounding box height
float cx
x-coordinate of the bounding box center
float width
bounding box width
float angle
bounding box rotation angle [degrees]
Like CompareClipEndFrames, but for effects.
This struct contains info about a media file, such as height, width, frames per second,...
float duration
Length of time (in seconds)
int width
The width of the video (in pixesl)
int channels
The number of audio channels used in the audio stream.
openshot::Fraction fps
Frames per second, as a fraction (i.e. 24/1 = 24 fps)
openshot::Fraction display_ratio
The ratio of width to height of the video stream (i.e. 640x480 has a ratio of 4/3)
int height
The height of the video (in pixels)
int64_t video_length
The number of frames in the video stream.
std::string acodec
The name of the audio codec used to encode / decode the video stream.
std::string vcodec
The name of the video codec used to encode / decode the video stream.
openshot::Fraction pixel_ratio
The pixel ratio of the video stream as a fraction (i.e. some pixels are not square)
openshot::ChannelLayout channel_layout
The channel layout (mono, stereo, 5 point surround, etc...)
bool has_video
Determines if this file has a video stream.
bool has_audio
Determines if this file has an audio stream.
openshot::Fraction video_timebase
The video timebase determines how long each frame stays on the screen.
int sample_rate
The number of audio samples per second (44100 is a common sample rate)
This struct contains info about the current Timeline clip instance.
bool is_top_clip
Is clip on top (if overlapping another clip)