28 #include "../include/Timeline.h" 34 is_open(false), auto_map_clips(true)
76 apply_mapper_to_clip(clip);
79 clips.push_back(clip);
89 effects.push_back(effect);
98 effects.remove(effect);
108 void Timeline::apply_mapper_to_clip(
Clip* clip)
115 if (clip->
Reader()->Name() ==
"FrameMapper")
134 if (clip_offset != 0)
141 clip->
Reader(clip_reader);
151 list<Clip*>::iterator clip_itr;
152 for (clip_itr=clips.begin(); clip_itr != clips.end(); ++clip_itr)
155 Clip *clip = (*clip_itr);
158 apply_mapper_to_clip(clip);
163 double Timeline::calculate_time(int64_t number,
Fraction rate)
166 double raw_fps = rate.
ToFloat();
169 return double(number - 1) / raw_fps;
173 std::shared_ptr<Frame> Timeline::apply_effects(std::shared_ptr<Frame> frame, int64_t timeline_frame_number,
int layer)
176 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::apply_effects",
"frame->number", frame->number,
"timeline_frame_number", timeline_frame_number,
"layer", layer,
"", -1,
"", -1,
"", -1);
179 list<EffectBase*>::iterator effect_itr;
180 for (effect_itr=effects.begin(); effect_itr != effects.end(); ++effect_itr)
189 bool does_effect_intersect = (effect_start_position <= timeline_frame_number && effect_end_position >= timeline_frame_number && effect->
Layer() == layer);
192 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::apply_effects (Does effect intersect)",
"effect->Position()", effect->
Position(),
"does_effect_intersect", does_effect_intersect,
"timeline_frame_number", timeline_frame_number,
"layer", layer,
"", -1,
"", -1);
195 if (does_effect_intersect)
199 long effect_frame_number = timeline_frame_number - effect_start_position + effect_start_frame;
202 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::apply_effects (Process Effect)",
"effect_frame_number", effect_frame_number,
"does_effect_intersect", does_effect_intersect,
"", -1,
"", -1,
"", -1,
"", -1);
205 frame = effect->
GetFrame(frame, effect_frame_number);
215 std::shared_ptr<Frame> Timeline::GetOrCreateFrame(
Clip* clip, int64_t number)
217 std::shared_ptr<Frame> new_frame;
224 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::GetOrCreateFrame (from reader)",
"number", number,
"samples_in_frame", samples_in_frame,
"", -1,
"", -1,
"", -1,
"", -1);
230 #pragma omp critical (T_GetOtCreateFrame) 231 new_frame = std::shared_ptr<Frame>(clip->
GetFrame(number));
245 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::GetOrCreateFrame (create blank)",
"number", number,
"samples_in_frame", samples_in_frame,
"", -1,
"", -1,
"", -1,
"", -1);
249 #pragma omp critical (T_GetOtCreateFrame) 258 void Timeline::add_layer(std::shared_ptr<Frame> new_frame,
Clip* source_clip, int64_t clip_frame_number, int64_t timeline_frame_number,
bool is_top_clip)
261 std::shared_ptr<Frame> source_frame;
262 #pragma omp critical (T_addLayer) 263 source_frame = GetOrCreateFrame(source_clip, clip_frame_number);
270 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::add_layer",
"new_frame->number", new_frame->number,
"clip_frame_number", clip_frame_number,
"timeline_frame_number", timeline_frame_number,
"", -1,
"", -1,
"", -1);
276 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::add_layer (Generate Waveform Image)",
"source_frame->number", source_frame->number,
"source_clip->Waveform()", source_clip->
Waveform(),
"clip_frame_number", clip_frame_number,
"", -1,
"", -1,
"", -1);
285 std::shared_ptr<QImage> source_image;
286 #pragma omp critical (T_addLayer) 288 source_frame->AddImage(std::shared_ptr<QImage>(source_image));
293 if (is_top_clip && source_frame)
294 source_frame = apply_effects(source_frame, timeline_frame_number, source_clip->
Layer());
297 std::shared_ptr<QImage> source_image;
300 if (source_clip->
Reader()->info.has_audio) {
303 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::add_layer (Copy Audio)",
"source_clip->Reader()->info.has_audio", source_clip->
Reader()->info.has_audio,
"source_frame->GetAudioChannelsCount()", source_frame->GetAudioChannelsCount(),
"info.channels",
info.
channels,
"clip_frame_number", clip_frame_number,
"timeline_frame_number", timeline_frame_number,
"", -1);
305 if (source_frame->GetAudioChannelsCount() ==
info.
channels)
306 for (
int channel = 0; channel < source_frame->GetAudioChannelsCount(); channel++)
308 float initial_volume = 1.0f;
309 float previous_volume = source_clip->
volume.
GetValue(clip_frame_number - 1);
315 if (channel_filter != -1 && channel_filter != channel)
319 if (channel_mapping == -1)
320 channel_mapping = channel;
323 if (isEqual(previous_volume, volume))
324 initial_volume = volume;
327 if (!isEqual(previous_volume, volume))
328 source_frame->ApplyGainRamp(channel_mapping, 0, source_frame->GetAudioSamplesCount(), previous_volume, volume);
334 if (new_frame->GetAudioSamplesCount() != source_frame->GetAudioSamplesCount())
336 #pragma omp critical (T_addLayer)
341 #pragma omp critical (T_addLayer) 342 new_frame->AddAudio(
false, channel_mapping, 0, source_frame->GetAudioSamples(channel), source_frame->GetAudioSamplesCount(), initial_volume);
347 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::add_layer (No Audio Copied - Wrong # of Channels)",
"source_clip->Reader()->info.has_audio", source_clip->
Reader()->info.has_audio,
"source_frame->GetAudioChannelsCount()", source_frame->GetAudioChannelsCount(),
"info.channels",
info.
channels,
"clip_frame_number", clip_frame_number,
"timeline_frame_number", timeline_frame_number,
"", -1);
352 if (!source_clip->
Waveform() && !source_clip->
Reader()->info.has_video)
357 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::add_layer (Get Source Image)",
"source_frame->number", source_frame->number,
"source_clip->Waveform()", source_clip->
Waveform(),
"clip_frame_number", clip_frame_number,
"", -1,
"", -1,
"", -1);
360 source_image = source_frame->GetImage();
365 float alpha = source_clip->
alpha.
GetValue(clip_frame_number);
368 unsigned char *pixels = (
unsigned char *) source_image->bits();
371 for (
int pixel = 0, byte_index=0; pixel < source_image->width() * source_image->height(); pixel++, byte_index+=4)
374 int A = pixels[byte_index + 3];
377 pixels[byte_index + 3] *= alpha;
381 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::add_layer (Set Alpha & Opacity)",
"alpha", alpha,
"source_frame->number", source_frame->number,
"clip_frame_number", clip_frame_number,
"", -1,
"", -1,
"", -1);
385 QSize source_size = source_image->size();
386 switch (source_clip->
scale)
393 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::add_layer (Scale: SCALE_FIT)",
"source_frame->number", source_frame->number,
"source_width", source_size.width(),
"source_height", source_size.height(),
"", -1,
"", -1,
"", -1);
401 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::add_layer (Scale: SCALE_STRETCH)",
"source_frame->number", source_frame->number,
"source_width", source_size.width(),
"source_height", source_size.height(),
"", -1,
"", -1,
"", -1);
405 QSize width_size(
max_width, round(
max_width / (
float(source_size.width()) /
float(source_size.height()))));
406 QSize height_size(round(
max_height / (
float(source_size.height()) /
float(source_size.width()))),
max_height);
410 source_size.scale(width_size.width(), width_size.height(), Qt::KeepAspectRatio);
412 source_size.scale(height_size.width(), height_size.height(), Qt::KeepAspectRatio);
415 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::add_layer (Scale: SCALE_CROP)",
"source_frame->number", source_frame->number,
"source_width", source_size.width(),
"source_height", source_size.height(),
"", -1,
"", -1,
"", -1);
426 float scaled_source_width = source_size.width() * sx;
427 float scaled_source_height = source_size.height() * sy;
432 x = (
max_width - scaled_source_width) / 2.0;
438 y = (
max_height - scaled_source_height) / 2.0;
441 x = (
max_width - scaled_source_width) / 2.0;
442 y = (
max_height - scaled_source_height) / 2.0;
446 y = (
max_height - scaled_source_height) / 2.0;
452 x = (
max_width - scaled_source_width) / 2.0;
462 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::add_layer (Gravity)",
"source_frame->number", source_frame->number,
"source_clip->gravity", source_clip->
gravity,
"info.width",
info.
width,
"scaled_source_width", scaled_source_width,
"info.height",
info.
height,
"scaled_source_height", scaled_source_height);
471 bool transformed =
false;
472 QTransform transform;
475 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::add_layer (Build QTransform - if needed)",
"source_frame->number", source_frame->number,
"x", x,
"y", y,
"r", r,
"sx", sx,
"sy", sy);
477 if (!isEqual(r, 0)) {
479 float origin_x = x + (scaled_source_width / 2.0);
480 float origin_y = y + (scaled_source_height / 2.0);
481 transform.translate(origin_x, origin_y);
483 transform.translate(-origin_x,-origin_y);
487 if (!isEqual(x, 0) || !isEqual(y, 0)) {
489 transform.translate(x, y);
494 float source_width_scale = (float(source_size.width()) /
float(source_image->width())) * sx;
495 float source_height_scale = (float(source_size.height()) /
float(source_image->height())) * sy;
497 if (!isEqual(source_width_scale, 1.0) || !isEqual(source_height_scale, 1.0)) {
498 transform.scale(source_width_scale, source_height_scale);
502 if (!isEqual(shear_x, 0) || !isEqual(shear_y, 0)) {
504 transform.shear(shear_x, shear_y);
509 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::add_layer (Transform: Composite Image Layer: Prepare)",
"source_frame->number", source_frame->number,
"new_frame->GetImage()->width()", new_frame->GetImage()->width(),
"transformed", transformed,
"", -1,
"", -1,
"", -1);
512 std::shared_ptr<QImage> new_image;
513 #pragma omp critical (T_addLayer) 514 new_image = new_frame->GetImage();
517 QPainter painter(new_image.get());
518 painter.setRenderHints(QPainter::Antialiasing | QPainter::SmoothPixmapTransform | QPainter::TextAntialiasing,
true);
522 painter.setTransform(transform);
525 painter.setCompositionMode(QPainter::CompositionMode_SourceOver);
526 painter.drawImage(0, 0, *source_image);
530 stringstream frame_number_str;
534 frame_number_str << clip_frame_number;
538 frame_number_str << timeline_frame_number;
542 frame_number_str << timeline_frame_number <<
" (" << clip_frame_number <<
")";
547 painter.setPen(QColor(
"#ffffff"));
548 painter.drawText(20, 20, QString(frame_number_str.str().c_str()));
554 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::add_layer (Transform: Composite Image Layer: Completed)",
"source_frame->number", source_frame->number,
"new_frame->GetImage()->width()", new_frame->GetImage()->width(),
"transformed", transformed,
"", -1,
"", -1,
"", -1);
558 void Timeline::update_open_clips(
Clip *clip,
bool does_clip_intersect)
560 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::update_open_clips (before)",
"does_clip_intersect", does_clip_intersect,
"closing_clips.size()", closing_clips.size(),
"open_clips.size()", open_clips.size(),
"", -1,
"", -1,
"", -1);
563 bool clip_found = open_clips.count(clip);
565 if (clip_found && !does_clip_intersect)
568 open_clips.erase(clip);
573 else if (!clip_found && does_clip_intersect)
576 open_clips[clip] = clip;
583 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::update_open_clips (after)",
"does_clip_intersect", does_clip_intersect,
"clip_found", clip_found,
"closing_clips.size()", closing_clips.size(),
"open_clips.size()", open_clips.size(),
"", -1,
"", -1);
587 void Timeline::sort_clips()
590 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::SortClips",
"clips.size()", clips.size(),
"", -1,
"", -1,
"", -1,
"", -1,
"", -1);
597 void Timeline::sort_effects()
606 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::Close",
"", -1,
"", -1,
"", -1,
"", -1,
"", -1,
"", -1);
609 list<Clip*>::iterator clip_itr;
610 for (clip_itr=clips.begin(); clip_itr != clips.end(); ++clip_itr)
613 Clip *clip = (*clip_itr);
616 update_open_clips(clip,
false);
623 final_cache->
Clear();
633 bool Timeline::isEqual(
double a,
double b)
635 return fabs(a - b) < 0.000001;
642 if (requested_frame < 1)
646 std::shared_ptr<Frame> frame;
647 #pragma omp critical (T_GetFrame) 648 frame = final_cache->
GetFrame(requested_frame);
651 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::GetFrame (Cached frame found)",
"requested_frame", requested_frame,
"", -1,
"", -1,
"", -1,
"", -1,
"", -1);
663 throw ReaderClosed(
"The Timeline is closed. Call Open() before calling this method.",
"");
666 #pragma omp critical (T_GetFrame) 667 frame = final_cache->
GetFrame(requested_frame);
670 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::GetFrame (Cached frame found on 2nd look)",
"requested_frame", requested_frame,
"", -1,
"", -1,
"", -1,
"", -1,
"", -1);
681 vector<Clip*> nearby_clips;
682 #pragma omp critical (T_GetFrame) 683 nearby_clips = find_intersecting_clips(requested_frame, minimum_frames,
true);
687 omp_set_nested(
true);
690 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::GetFrame",
"requested_frame", requested_frame,
"minimum_frames", minimum_frames,
"OPEN_MP_NUM_PROCESSORS",
OPEN_MP_NUM_PROCESSORS,
"", -1,
"", -1,
"", -1);
694 for (int64_t frame_number = requested_frame; frame_number < requested_frame + minimum_frames; frame_number++)
697 for (
int clip_index = 0; clip_index < nearby_clips.size(); clip_index++)
700 Clip *clip = nearby_clips[clip_index];
704 bool does_clip_intersect = (clip_start_position <= frame_number && clip_end_position >= frame_number);
705 if (does_clip_intersect)
709 long clip_frame_number = frame_number - clip_start_position + clip_start_frame;
719 #pragma omp for ordered firstprivate(nearby_clips, requested_frame, minimum_frames) 720 for (int64_t frame_number = requested_frame; frame_number < requested_frame + minimum_frames; frame_number++)
723 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::GetFrame (processing frame)",
"frame_number", frame_number,
"omp_get_thread_num()", omp_get_thread_num(),
"", -1,
"", -1,
"", -1,
"", -1);
730 #pragma omp critical (T_GetFrame) 732 new_frame->AddAudioSilence(samples_in_frame);
738 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::GetFrame (Adding solid color)",
"frame_number", frame_number,
"info.width",
info.
width,
"info.height",
info.
height,
"", -1,
"", -1,
"", -1);
746 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::GetFrame (Loop through clips)",
"frame_number", frame_number,
"clips.size()", clips.size(),
"nearby_clips.size()", nearby_clips.size(),
"", -1,
"", -1,
"", -1);
749 for (
int clip_index = 0; clip_index < nearby_clips.size(); clip_index++)
752 Clip *clip = nearby_clips[clip_index];
756 bool does_clip_intersect = (clip_start_position <= frame_number && clip_end_position >= frame_number);
759 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::GetFrame (Does clip intersect)",
"frame_number", frame_number,
"clip->Position()", clip->
Position(),
"clip->Duration()", clip->
Duration(),
"does_clip_intersect", does_clip_intersect,
"", -1,
"", -1);
762 if (does_clip_intersect)
765 bool is_top_clip =
true;
766 for (
int top_clip_index = 0; top_clip_index < nearby_clips.size(); top_clip_index++)
768 Clip *nearby_clip = nearby_clips[top_clip_index];
772 if (clip->
Id() != nearby_clip->
Id() && clip->
Layer() == nearby_clip->
Layer() &&
773 nearby_clip_start_position <= frame_number && nearby_clip_end_position >= frame_number &&
774 nearby_clip_start_position > clip_start_position) {
782 long clip_frame_number = frame_number - clip_start_position + clip_start_frame;
785 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::GetFrame (Calculate clip's frame #)",
"clip->Position()", clip->
Position(),
"clip->Start()", clip->
Start(),
"info.fps.ToFloat()",
info.
fps.
ToFloat(),
"clip_frame_number", clip_frame_number,
"", -1,
"", -1);
788 add_layer(new_frame, clip, clip_frame_number, frame_number, is_top_clip);
792 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::GetFrame (clip does not intersect)",
"frame_number", frame_number,
"does_clip_intersect", does_clip_intersect,
"", -1,
"", -1,
"", -1,
"", -1);
797 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::GetFrame (Add frame to cache)",
"frame_number", frame_number,
"info.width",
info.
width,
"info.height",
info.
height,
"", -1,
"", -1,
"", -1);
800 #pragma omp critical (T_GetFrame) 802 new_frame->SetFrameNumber(frame_number);
805 final_cache->
Add(new_frame);
812 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::GetFrame (end parallel region)",
"requested_frame", requested_frame,
"omp_get_thread_num()", omp_get_thread_num(),
"", -1,
"", -1,
"", -1,
"", -1);
815 return final_cache->
GetFrame(requested_frame);
821 vector<Clip*> Timeline::find_intersecting_clips(int64_t requested_frame,
int number_of_frames,
bool include)
824 vector<Clip*> matching_clips;
827 float min_requested_frame = requested_frame;
828 float max_requested_frame = requested_frame + (number_of_frames - 1);
834 list<Clip*>::iterator clip_itr;
835 for (clip_itr=clips.begin(); clip_itr != clips.end(); ++clip_itr)
838 Clip *clip = (*clip_itr);
844 bool does_clip_intersect =
845 (clip_start_position <= min_requested_frame || clip_start_position <= max_requested_frame) &&
846 (clip_end_position >= min_requested_frame || clip_end_position >= max_requested_frame);
849 ZmqLogger::Instance()->
AppendDebugMethod(
"Timeline::find_intersecting_clips (Is clip near or intersecting)",
"requested_frame", requested_frame,
"min_requested_frame", min_requested_frame,
"max_requested_frame", max_requested_frame,
"clip->Position()", clip->
Position(),
"does_clip_intersect", does_clip_intersect,
"", -1);
852 #pragma omp critical (reader_lock) 853 update_open_clips(clip, does_clip_intersect);
856 if (does_clip_intersect && include)
858 matching_clips.push_back(clip);
860 else if (!does_clip_intersect && !include)
862 matching_clips.push_back(clip);
867 return matching_clips;
873 final_cache = new_cache;
888 root[
"type"] =
"Timeline";
895 root[
"clips"] = Json::Value(Json::arrayValue);
898 list<Clip*>::iterator clip_itr;
899 for (clip_itr=clips.begin(); clip_itr != clips.end(); ++clip_itr)
902 Clip *existing_clip = (*clip_itr);
903 root[
"clips"].append(existing_clip->
JsonValue());
907 root[
"effects"] = Json::Value(Json::arrayValue);
910 list<EffectBase*>::iterator effect_itr;
911 for (effect_itr=effects.begin(); effect_itr != effects.end(); ++effect_itr)
915 root[
"effects"].append(existing_effect->
JsonValue());
931 bool success = reader.parse( value, root );
934 throw InvalidJSON(
"JSON could not be parsed (or is invalid)",
"");
944 throw InvalidJSON(
"JSON is invalid (missing keys or invalid data types)",
"");
952 bool was_open = is_open;
958 if (!root[
"clips"].isNull()) {
963 for (
int x = 0; x < root[
"clips"].size(); x++) {
965 Json::Value existing_clip = root[
"clips"][x];
978 if (!root[
"effects"].isNull()) {
983 for (
int x = 0; x < root[
"effects"].size(); x++) {
985 Json::Value existing_effect = root[
"effects"][x];
990 if (!existing_effect[
"type"].isNull()) {
1003 if (!root[
"duration"].isNull()) {
1022 Json::Reader reader;
1023 bool success = reader.parse( value, root );
1024 if (!success || !root.isArray())
1026 throw InvalidJSON(
"JSON could not be parsed (or is invalid).",
"");
1031 for (
int x = 0; x < root.size(); x++) {
1033 Json::Value change = root[x];
1034 string root_key = change[
"key"][(uint)0].asString();
1037 if (root_key ==
"clips")
1039 apply_json_to_clips(change);
1041 else if (root_key ==
"effects")
1043 apply_json_to_effects(change);
1047 apply_json_to_timeline(change);
1054 throw InvalidJSON(
"JSON is invalid (missing keys or invalid data types)",
"");
1059 void Timeline::apply_json_to_clips(Json::Value change) {
1062 string change_type = change[
"type"].asString();
1063 string clip_id =
"";
1064 Clip *existing_clip = NULL;
1067 for (
int x = 0; x < change[
"key"].size(); x++) {
1069 Json::Value key_part = change[
"key"][x];
1071 if (key_part.isObject()) {
1073 if (!key_part[
"id"].isNull()) {
1075 clip_id = key_part[
"id"].asString();
1078 list<Clip*>::iterator clip_itr;
1079 for (clip_itr=clips.begin(); clip_itr != clips.end(); ++clip_itr)
1082 Clip *c = (*clip_itr);
1083 if (c->
Id() == clip_id) {
1095 if (existing_clip && change[
"key"].size() == 4 && change[
"key"][2] ==
"effects")
1098 Json::Value key_part = change[
"key"][3];
1100 if (key_part.isObject()) {
1102 if (!key_part[
"id"].isNull())
1105 string effect_id = key_part[
"id"].asString();
1108 list<EffectBase*> effect_list = existing_clip->
Effects();
1109 list<EffectBase*>::iterator effect_itr;
1110 for (effect_itr=effect_list.begin(); effect_itr != effect_list.end(); ++effect_itr)
1114 if (e->
Id() == effect_id) {
1116 apply_json_to_effects(change, e);
1121 final_cache->
Remove(new_starting_frame - 8, new_ending_frame + 8);
1131 if (!change[
"value"].isArray() && !change[
"value"][
"position"].isNull()) {
1132 int64_t new_starting_frame = (change[
"value"][
"position"].asDouble() *
info.
fps.
ToDouble()) + 1;
1133 int64_t new_ending_frame = ((change[
"value"][
"position"].asDouble() + change[
"value"][
"end"].asDouble() - change[
"value"][
"start"].asDouble()) *
info.
fps.
ToDouble()) + 1;
1134 final_cache->
Remove(new_starting_frame - 8, new_ending_frame + 8);
1138 if (change_type ==
"insert") {
1145 }
else if (change_type ==
"update") {
1148 if (existing_clip) {
1153 final_cache->
Remove(old_starting_frame - 8, old_ending_frame + 8);
1156 if (existing_clip->
Reader() && existing_clip->
Reader()->GetCache())
1157 existing_clip->
Reader()->GetCache()->Remove(old_starting_frame - 8, old_ending_frame + 8);
1164 if (existing_clip->
Reader()) {
1165 existing_clip->
Reader()->SetMaxSize(0, 0);
1166 if (existing_clip->
Reader()->Name() ==
"FrameMapper") {
1168 if (nested_reader->
Reader())
1174 }
else if (change_type ==
"delete") {
1177 if (existing_clip) {
1182 final_cache->
Remove(old_starting_frame - 8, old_ending_frame + 8);
1193 void Timeline::apply_json_to_effects(Json::Value change) {
1196 string change_type = change[
"type"].asString();
1200 for (
int x = 0; x < change[
"key"].size(); x++) {
1202 Json::Value key_part = change[
"key"][x];
1204 if (key_part.isObject()) {
1206 if (!key_part[
"id"].isNull())
1209 string effect_id = key_part[
"id"].asString();
1212 list<EffectBase*>::iterator effect_itr;
1213 for (effect_itr=effects.begin(); effect_itr != effects.end(); ++effect_itr)
1217 if (e->
Id() == effect_id) {
1218 existing_effect =
e;
1228 if (existing_effect || change_type ==
"insert")
1230 apply_json_to_effects(change, existing_effect);
1234 void Timeline::apply_json_to_effects(Json::Value change,
EffectBase* existing_effect) {
1237 string change_type = change[
"type"].asString();
1240 if (!change[
"value"].isArray() && !change[
"value"][
"position"].isNull()) {
1241 int64_t new_starting_frame = (change[
"value"][
"position"].asDouble() *
info.
fps.
ToDouble()) + 1;
1242 int64_t new_ending_frame = ((change[
"value"][
"position"].asDouble() + change[
"value"][
"end"].asDouble() - change[
"value"][
"start"].asDouble()) *
info.
fps.
ToDouble()) + 1;
1243 final_cache->
Remove(new_starting_frame - 8, new_ending_frame + 8);
1247 if (change_type ==
"insert") {
1250 string effect_type = change[
"value"][
"type"].asString();
1264 }
else if (change_type ==
"update") {
1267 if (existing_effect) {
1272 final_cache->
Remove(old_starting_frame - 8, old_ending_frame + 8);
1278 }
else if (change_type ==
"delete") {
1281 if (existing_effect) {
1286 final_cache->
Remove(old_starting_frame - 8, old_ending_frame + 8);
1296 void Timeline::apply_json_to_timeline(Json::Value change) {
1299 string change_type = change[
"type"].asString();
1300 string root_key = change[
"key"][(uint)0].asString();
1301 string sub_key =
"";
1302 if (change[
"key"].size() >= 2)
1303 sub_key = change[
"key"][(uint)1].asString();
1306 final_cache->
Clear();
1309 if (change_type ==
"insert" || change_type ==
"update") {
1313 if (root_key ==
"color")
1316 else if (root_key ==
"viewport_scale")
1319 else if (root_key ==
"viewport_x")
1322 else if (root_key ==
"viewport_y")
1325 else if (root_key ==
"duration") {
1330 else if (root_key ==
"width")
1333 else if (root_key ==
"height")
1336 else if (root_key ==
"fps" && sub_key ==
"" && change[
"value"].isObject()) {
1338 if (!change[
"value"][
"num"].isNull())
1339 info.
fps.
num = change[
"value"][
"num"].asInt();
1340 if (!change[
"value"][
"den"].isNull())
1341 info.
fps.
den = change[
"value"][
"den"].asInt();
1343 else if (root_key ==
"fps" && sub_key ==
"num")
1346 else if (root_key ==
"fps" && sub_key ==
"den")
1349 else if (root_key ==
"sample_rate")
1352 else if (root_key ==
"channels")
1355 else if (root_key ==
"channel_layout")
1362 throw InvalidJSONKey(
"JSON change key is invalid", change.toStyledString());
1365 }
else if (change[
"type"].asString() ==
"delete") {
1369 if (root_key ==
"color") {
1375 else if (root_key ==
"viewport_scale")
1377 else if (root_key ==
"viewport_x")
1379 else if (root_key ==
"viewport_y")
1383 throw InvalidJSONKey(
"JSON change key is invalid", change.toStyledString());
1396 final_cache->
Clear();
1399 list<Clip*>::iterator clip_itr;
1400 for (clip_itr=clips.begin(); clip_itr != clips.end(); ++clip_itr)
1403 Clip *clip = (*clip_itr);
1406 clip->
Reader()->GetCache()->Clear();
1409 if (clip->
Reader()->Name() ==
"FrameMapper") {
void SetJsonValue(Json::Value root)
Load Json::JsonValue into this object.
int max_height
The maximium image height needed by this clip (used for optimizations)
Display the timeline's frame number.
void Close()
Close the internal reader.
string Json()
Get and Set JSON methods.
Json::Value JsonValue()
Generate Json::JsonValue for this object.
int num
Numerator for the fraction.
Keyframe scale_y
Curve representing the vertical scaling in percent (0 to 1)
ReaderBase * Reader()
Get the current reader.
CriticalSection getFrameCriticalSection
Section lock for multiple threads.
This abstract class is the base class, used by all effects in libopenshot.
EffectBase * CreateEffect(string effect_type)
Align clip to the right of its parent (middle aligned)
Keyframe green
Curve representing the green value (0 - 255)
Keyframe viewport_scale
Curve representing the scale of the viewport (0 to 100)
Align clip to the bottom right of its parent.
void SetCache(CacheBase *new_cache)
Get the cache object used by this reader.
Json::Value JsonValue()
Generate Json::JsonValue for this object.
ChannelLayout channel_layout
The channel layout (mono, stereo, 5 point surround, etc...)
GravityType gravity
The gravity of a clip determines where it snaps to it's parent.
Keyframe alpha
Curve representing the alpha value (0 - 255)
int width
The width of the video (in pixesl)
Keyframe volume
Curve representing the volume (0 to 1)
virtual std::shared_ptr< Frame > GetFrame(std::shared_ptr< Frame > frame, int64_t frame_number)=0
This method is required for all derived classes of EffectBase, and returns a modified openshot::Frame...
float ToFloat()
Return this fraction as a float (i.e. 1/2 = 0.5)
Keyframe red
Curve representing the red value (0 - 255)
float duration
Length of time (in seconds)
string GetColorHex(int64_t frame_number)
Get the HEX value of a color at a specific frame.
Json::Value JsonValue()
Generate Json::JsonValue for this object.
void SetMaxBytesFromInfo(int64_t number_of_frames, int width, int height, int sample_rate, int channels)
Set maximum bytes to a different amount based on a ReaderInfo struct.
Scale the clip until both height and width fill the canvas (cropping the overlap) ...
Keyframe viewport_y
Curve representing the y coordinate for the viewport.
Fraction Reciprocal()
Return the reciprocal as a Fraction.
This abstract class is the base class, used by all readers in libopenshot.
int Layer()
Get layer of clip on timeline (lower number is covered by higher numbers)
#define OPEN_MP_NUM_PROCESSORS
Exception when a reader is closed, and a frame is requested.
bool has_video
Determines if this file has a video stream.
Do not display the frame number.
std::shared_ptr< Frame > GetFrame(int64_t requested_frame)
Get an openshot::Frame object for a specific frame number of this timeline.
Color wave_color
Curve representing the color of the audio wave form.
Align clip to the top right of its parent.
virtual Json::Value JsonValue()=0
Generate Json::JsonValue for this object.
Align clip to the bottom left of its parent.
void SetJsonValue(Json::Value root)
Load Json::JsonValue into this object.
void SetJsonValue(Json::Value root)
Load Json::JsonValue into this object.
Exception for missing JSON Change key.
Keyframe location_x
Curve representing the relative X position in percent based on the gravity (-1 to 1) ...
Keyframe location_y
Curve representing the relative Y position in percent based on the gravity (-1 to 1) ...
void SetMaxSize(int width, int height)
Set Max Image Size (used for performance optimization)
bool has_audio
Determines if this file has an audio stream.
This class represents a clip (used to arrange readers on the timeline)
void ChangeMapping(Fraction target_fps, PulldownType pulldown, int target_sample_rate, int target_channels, ChannelLayout target_channel_layout)
Change frame rate or audio mapping details.
Keyframe blue
Curve representing the red value (0 - 255)
virtual std::shared_ptr< Frame > GetFrame(int64_t frame_number)=0
Get a frame from the cache.
Keyframe shear_x
Curve representing X shear angle in degrees (-45.0=left, 45.0=right)
bool Waveform()
Waveform property.
void SetMaxSize(int width, int height)
Set Max Image Size (used for performance optimization)
int64_t video_length
The number of frames in the video stream.
ScaleType scale
The scale determines how a clip should be resized to fit it's parent.
int height
The height of the video (in pixels)
Align clip to the bottom center of its parent.
virtual void Remove(int64_t frame_number)=0
Remove a specific frame.
string Id()
Get basic properties.
Keyframe channel_filter
Audio channel filter and mappings.
void ClearAllCache()
Clear all cache for this timeline instance, and all clips, mappers, and readers under it...
float Position()
Get position on timeline (in seconds)
static CrashHandler * Instance()
void ApplyMapperToClips()
Apply the timeline's framerate and samplerate to all clips.
void Reader(ReaderBase *new_reader)
Set the current reader.
list< EffectBase * > Effects()
Return the list of effects on the timeline.
void AppendDebugMethod(string method_name, string arg1_name, float arg1_value, string arg2_name, float arg2_value, string arg3_name, float arg3_value, string arg4_name, float arg4_value, string arg5_name, float arg5_value, string arg6_name, float arg6_value)
Append debug information.
FrameDisplayType display
The format to display the frame number (if any)
std::shared_ptr< Frame > GetFrame(int64_t requested_frame)
This class represents a fraction.
All cache managers in libopenshot are based on this CacheBase class.
Keyframe channel_mapping
A number representing an audio channel to output (only works when filtering a channel) ...
virtual void Add(std::shared_ptr< Frame > frame)=0
Add a Frame to the cache.
ChannelLayout
This enumeration determines the audio channel layout (such as stereo, mono, 5 point surround...
Align clip to the left of its parent (middle aligned)
void AddClip(Clip *clip)
Add an openshot::Clip to the timeline.
virtual Json::Value JsonValue()=0
Generate Json::JsonValue for this object.
virtual void SetJsonValue(Json::Value root)=0
Load Json::JsonValue into this object.
void Close()
Close the timeline reader (and any resources it was consuming)
int GetInt(int64_t index)
Get the rounded INT value at a specific index.
Keyframe rotation
Curve representing the rotation (0 to 360)
virtual void SetJsonValue(Json::Value root)=0
Load Json::JsonValue into this object.
Scale the clip until both height and width fill the canvas (distort to fit)
Display the clip's internal frame number.
vector< Point > Points
Vector of all Points.
ReaderInfo info
Information about the current media file.
Keyframe shear_y
Curve representing Y shear angle in degrees (-45.0=down, 45.0=up)
Fraction fps
Frames per second, as a fraction (i.e. 24/1 = 24 fps)
Fraction video_timebase
The video timebase determines how long each frame stays on the screen.
Exception for frames that are out of bounds.
This class creates a mapping between 2 different frame rates, applying a specific pull-down technique...
void Open()
Open the internal reader.
This class represents a color (used on the timeline and clips)
static ZmqLogger * Instance()
Create or get an instance of this logger singleton (invoke the class with this method) ...
Align clip to the center of its parent (middle aligned)
void Open()
Open the reader (and start consuming resources)
void ApplyJsonDiff(string value)
Apply a special formatted JSON object, which represents a change to the timeline (add, update, delete) This is primarily designed to keep the timeline (and its child objects... such as clips and effects) in sync with another application... such as OpenShot Video Editor (http://www.openshot.org).
double GetValue(int64_t index)
Get the value at a specific index.
Display both the clip's and timeline's frame number.
This namespace is the default namespace for all code in the openshot library.
Do not apply pull-down techniques, just repeat or skip entire frames.
virtual void Clear()=0
Clear the cache of all frames.
void RemoveClip(Clip *clip)
Remove an openshot::Clip from the timeline.
void RemoveEffect(EffectBase *effect)
Remove an effect from the timeline.
Exception for invalid JSON.
Keyframe alpha
Curve representing the alpha (1 to 0)
virtual CacheBase * GetCache()=0
Get the cache object used by this reader (note: not all readers use cache)
Keyframe viewport_x
Curve representing the x coordinate for the viewport.
void SetJsonValue(Json::Value root)
Load Json::JsonValue into this object.
Keyframe scale_x
Curve representing the horizontal scaling in percent (0 to 1)
Color color
Background color of timeline canvas.
Timeline(int width, int height, Fraction fps, int sample_rate, int channels, ChannelLayout channel_layout)
Default Constructor for the timeline (which sets the canvas width and height and FPS) ...
This class returns a listing of all effects supported by libopenshot.
Align clip to the top center of its parent.
void SetJson(string value)
Load JSON string into this object.
int den
Denominator for the fraction.
int channels
The number of audio channels used in the audio stream.
A Keyframe is a collection of Point instances, which is used to vary a number or property over time...
Scale the clip until either height or width fills the canvas (with no cropping)
void AddEffect(EffectBase *effect)
Add an effect to the timeline.
int max_width
The maximum image width needed by this clip (used for optimizations)
int GetSamplesPerFrame(Fraction fps, int sample_rate, int channels)
Calculate the # of samples per video frame (for the current frame number)
Json::Value JsonValue()
Generate Json::JsonValue for this object.
float Duration()
Get the length of this clip (in seconds)
This class is a memory-based cache manager for Frame objects.
float Start()
Get start position (in seconds) of clip (trim start of video)
double ToDouble()
Return this fraction as a double (i.e. 1/2 = 0.5)
int sample_rate
The number of audio samples per second (44100 is a common sample rate)
Exception when too many seek attempts happen.
void SetTimelineFrameOffset(int64_t offset)