mirror of
https://gitlab.com/mbunkus/mkvtoolnix.git
synced 2024-12-24 11:54:01 +00:00
Imlpemented appending chapters.
This commit is contained in:
parent
03184bb462
commit
92eb6cad48
@ -589,7 +589,7 @@ move_chapters_by_edition(KaxChapters &dst,
|
||||
|
||||
for (i = 0; i < src.ListSize(); i++) {
|
||||
KaxEditionEntry *ee_dst;
|
||||
KaxEditionUID *euid_src, *euid_dst;
|
||||
KaxEditionUID *euid_src;
|
||||
EbmlMaster *m;
|
||||
|
||||
m = static_cast<EbmlMaster *>(src[i]);
|
||||
@ -597,16 +597,9 @@ move_chapters_by_edition(KaxChapters &dst,
|
||||
// Find an edition to which these atoms will be added.
|
||||
ee_dst = NULL;
|
||||
euid_src = FINDFIRST(m, KaxEditionUID);
|
||||
if (euid_src != NULL) {
|
||||
for (k = 0; k < dst.ListSize(); k++) {
|
||||
euid_dst = FINDFIRST(static_cast<EbmlMaster *>(dst[k]),
|
||||
KaxEditionUID);
|
||||
if ((euid_dst != NULL) && (uint32(*euid_src) == uint32(*euid_dst))) {
|
||||
ee_dst = static_cast<KaxEditionEntry *>(dst[k]);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (euid_src != NULL)
|
||||
ee_dst = find_edition_with_uid(dst, uint32(*euid_src));
|
||||
|
||||
// No edition with the same UID found as the one we want to handle?
|
||||
// Then simply move the complete edition over.
|
||||
if (ee_dst == NULL)
|
||||
@ -614,10 +607,68 @@ move_chapters_by_edition(KaxChapters &dst,
|
||||
else {
|
||||
// Move all atoms from the old edition to the new one.
|
||||
for (k = 0; k < m->ListSize(); k++)
|
||||
if (is_id((*m)[k], KaxChapterAtom))
|
||||
ee_dst->PushElement(*(*m)[k]);
|
||||
else
|
||||
delete (*m)[k];
|
||||
m->RemoveAll();
|
||||
delete m;
|
||||
}
|
||||
}
|
||||
src.RemoveAll();
|
||||
}
|
||||
|
||||
/** \brief Adjust all start and end timecodes by an offset
|
||||
*
|
||||
* All start and end timecodes are adjusted by an offset. This is done
|
||||
* recursively.
|
||||
*
|
||||
* \param master A master containint the elements to adjust. This can be
|
||||
* a KaxChapters, KaxEditionEntry or KaxChapterAtom object.
|
||||
* \param offset The offset to add to each timecode. Can be negative. If
|
||||
* the resulting timecode would be smaller than zero then it will be set
|
||||
* to zero.
|
||||
*/
|
||||
void
|
||||
adjust_chapter_timecodes(EbmlMaster &master,
|
||||
int64_t offset) {
|
||||
int i;
|
||||
|
||||
|
||||
for (i = 0; i < master.ListSize(); i++) {
|
||||
KaxChapterAtom *atom;
|
||||
KaxChapterTimeStart *start;
|
||||
KaxChapterTimeEnd *end;
|
||||
int64_t new_value;
|
||||
|
||||
if (!is_id(master[i], KaxChapterAtom))
|
||||
continue;
|
||||
|
||||
atom = static_cast<KaxChapterAtom *>(master[i]);
|
||||
start = FINDFIRST(atom, KaxChapterTimeStart);
|
||||
if (start != NULL) {
|
||||
new_value = uint64(*start);
|
||||
new_value += offset;
|
||||
if (new_value < 0)
|
||||
new_value = 0;
|
||||
*static_cast<EbmlUInteger *>(start) = new_value;
|
||||
}
|
||||
|
||||
end = FINDFIRST(atom, KaxChapterTimeEnd);
|
||||
if (end != NULL) {
|
||||
new_value = uint64(*end);
|
||||
new_value += offset;
|
||||
if (new_value < 0)
|
||||
new_value = 0;
|
||||
*static_cast<EbmlUInteger *>(end) = new_value;
|
||||
}
|
||||
}
|
||||
|
||||
for (i = 0; i < master.ListSize(); i++) {
|
||||
EbmlMaster *m;
|
||||
|
||||
m = dynamic_cast<EbmlMaster *>(master[i]);
|
||||
if (m != NULL)
|
||||
adjust_chapter_timecodes(*m, offset);
|
||||
}
|
||||
}
|
||||
|
@ -23,6 +23,10 @@
|
||||
#include "common.h"
|
||||
#include "mm_io.h"
|
||||
|
||||
namespace libebml {
|
||||
class EbmlMaster;
|
||||
};
|
||||
|
||||
namespace libmatroska {
|
||||
class KaxChapters;
|
||||
class KaxTags;
|
||||
@ -95,6 +99,6 @@ KaxChapterAtom *MTX_DLL_API find_chapter_with_uid(KaxChapters &chapters,
|
||||
uint64_t uid);
|
||||
|
||||
void MTX_DLL_API move_chapters_by_edition(KaxChapters &dst, KaxChapters &src);
|
||||
|
||||
void MTX_DLL_API adjust_chapter_timecodes(EbmlMaster &master, int64_t offset);
|
||||
#endif // __CHAPTERS_H
|
||||
|
||||
|
@ -1864,8 +1864,11 @@ kax_reader_c::read(generic_packetizer_c *,
|
||||
|
||||
cluster_tc = uint64(*ctc);
|
||||
cluster->InitTimecode(cluster_tc, tc_scale);
|
||||
if (first_timecode == -1)
|
||||
if (first_timecode == -1) {
|
||||
first_timecode = cluster_tc * tc_scale;
|
||||
if ((chapters != NULL) && (first_timecode > 0))
|
||||
adjust_chapter_timecodes(*chapters, -first_timecode);
|
||||
}
|
||||
|
||||
for (bgidx = 0; bgidx < cluster->ListSize(); bgidx++) {
|
||||
if (!(EbmlId(*(*cluster)[bgidx]) ==
|
||||
|
@ -49,7 +49,7 @@ cluster_helper_c::cluster_helper_c() {
|
||||
num_clusters = 0;
|
||||
clusters = NULL;
|
||||
cluster_content_size = 0;
|
||||
max_timecode = 0;
|
||||
max_timecode_and_duration = 0;
|
||||
last_cluster_tc = 0;
|
||||
num_cue_elements = 0;
|
||||
header_overhead = -1;
|
||||
@ -102,7 +102,7 @@ void
|
||||
cluster_helper_c::add_packet(packet_t *packet) {
|
||||
ch_contents_t *c;
|
||||
packet_t *p;
|
||||
int64_t timecode, old_max_timecode, additional_size;
|
||||
int64_t timecode, additional_size;
|
||||
int i;
|
||||
bool split;
|
||||
|
||||
@ -181,12 +181,6 @@ cluster_helper_c::add_packet(packet_t *packet) {
|
||||
if (split) {
|
||||
render();
|
||||
|
||||
old_max_timecode = max_timecode;
|
||||
if ((packet->unmodified_assigned_timecode +
|
||||
packet->unmodified_duration) >
|
||||
max_timecode)
|
||||
max_timecode = packet->unmodified_assigned_timecode +
|
||||
packet->unmodified_duration;
|
||||
num_cue_elements = 0;
|
||||
|
||||
mxinfo("\n");
|
||||
@ -198,16 +192,20 @@ cluster_helper_c::add_packet(packet_t *packet) {
|
||||
|
||||
bytes_in_file = 0;
|
||||
first_timecode_in_file = -1;
|
||||
max_timecode = old_max_timecode;
|
||||
|
||||
if (no_linking) {
|
||||
timecode_offset = -1;
|
||||
timecode_offset = packet->assigned_timecode;
|
||||
first_timecode = 0;
|
||||
} else
|
||||
first_timecode = -1;
|
||||
}
|
||||
}
|
||||
|
||||
if ((packet->unmodified_assigned_timecode + packet->unmodified_duration) >
|
||||
max_timecode_and_duration)
|
||||
max_timecode_and_duration = packet->unmodified_assigned_timecode +
|
||||
packet->unmodified_duration;
|
||||
|
||||
packet->packet_num = packet_num;
|
||||
packet_num++;
|
||||
|
||||
@ -219,11 +217,6 @@ cluster_helper_c::add_packet(packet_t *packet) {
|
||||
c->num_packets++;
|
||||
cluster_content_size += packet->length;
|
||||
|
||||
if ((packet->unmodified_assigned_timecode + packet->unmodified_duration) >
|
||||
max_timecode)
|
||||
max_timecode = packet->unmodified_assigned_timecode +
|
||||
packet->unmodified_duration;
|
||||
|
||||
walk_clusters();
|
||||
|
||||
// Render the cluster if it is full (according to my many criteria).
|
||||
@ -433,8 +426,6 @@ cluster_helper_c::render_cluster(ch_contents_t *clstr) {
|
||||
|
||||
if (first_timecode == -1)
|
||||
first_timecode = pack->assigned_timecode;
|
||||
if (timecode_offset == -1)
|
||||
timecode_offset = pack->assigned_timecode;
|
||||
if (i == 0)
|
||||
static_cast<kax_cluster_c *>
|
||||
(cluster)->set_min_timecode(pack->assigned_timecode - timecode_offset);
|
||||
@ -760,8 +751,8 @@ cluster_helper_c::free_ref(int64_t ref_timecode,
|
||||
}
|
||||
|
||||
int64_t
|
||||
cluster_helper_c::get_max_timecode() {
|
||||
return max_timecode - timecode_offset;
|
||||
cluster_helper_c::get_duration() {
|
||||
return max_timecode_and_duration - timecode_offset;
|
||||
}
|
||||
|
||||
int64_t
|
||||
|
@ -49,7 +49,8 @@ class cluster_helper_c {
|
||||
private:
|
||||
ch_contents_t **clusters;
|
||||
int num_clusters, cluster_content_size;
|
||||
int64_t max_timecode, last_cluster_tc, num_cue_elements, header_overhead;
|
||||
int64_t max_timecode_and_duration;
|
||||
int64_t last_cluster_tc, num_cue_elements, header_overhead;
|
||||
int64_t packet_num, timecode_offset, *last_packets, first_timecode;
|
||||
int64_t bytes_in_file, first_timecode_in_file;
|
||||
mm_io_c *out;
|
||||
@ -69,7 +70,7 @@ public:
|
||||
int free_ref(int64_t ref_timecode, generic_packetizer_c *source);
|
||||
int free_clusters();
|
||||
int get_cluster_content_size();
|
||||
int64_t get_max_timecode();
|
||||
int64_t get_duration();
|
||||
int64_t get_first_timecode();
|
||||
int64_t get_timecode_offset();
|
||||
|
||||
|
@ -206,8 +206,7 @@ sighandler(int signum) {
|
||||
// as the file's duration.
|
||||
out->save_pos(kax_duration->GetElementPosition());
|
||||
*(static_cast<EbmlFloat *>(kax_duration)) =
|
||||
irnd((double)(cluster_helper->get_max_timecode() -
|
||||
cluster_helper->get_first_timecode()) /
|
||||
irnd((double)cluster_helper->get_duration() /
|
||||
(double)((int64_t)timecode_scale));
|
||||
kax_duration->Render(*out);
|
||||
out->restore_pos();
|
||||
@ -1339,14 +1338,12 @@ finish_file(bool last_file) {
|
||||
// Now re-render the kax_duration and fill in the biggest timecode
|
||||
// as the file's duration.
|
||||
out->save_pos(kax_duration->GetElementPosition());
|
||||
mxverb(3, "mkvmerge: kax_duration: gmt %lld tcs %f du %lld\n",
|
||||
cluster_helper->get_max_timecode(), timecode_scale,
|
||||
irnd((double)(cluster_helper->get_max_timecode() -
|
||||
cluster_helper->get_first_timecode()) /
|
||||
mxverb(3, "mkvmerge: kax_duration: gdur %lld tcs %f du %lld\n",
|
||||
cluster_helper->get_duration(), timecode_scale,
|
||||
irnd((double)cluster_helper->get_duration() /
|
||||
(double)((int64_t)timecode_scale)));
|
||||
*(static_cast<EbmlFloat *>(kax_duration)) =
|
||||
irnd((double)(cluster_helper->get_max_timecode() -
|
||||
cluster_helper->get_first_timecode()) /
|
||||
irnd((double)cluster_helper->get_duration() /
|
||||
(double)((int64_t)timecode_scale));
|
||||
kax_duration->Render(*out);
|
||||
|
||||
@ -1390,7 +1387,7 @@ finish_file(bool last_file) {
|
||||
else
|
||||
offset = 0;
|
||||
start = cluster_helper->get_first_timecode() + offset;
|
||||
end = cluster_helper->get_max_timecode() + offset;
|
||||
end = start + cluster_helper->get_duration();
|
||||
|
||||
chapters_here = copy_chapters(kax_chapters);
|
||||
if (splitting)
|
||||
@ -1498,15 +1495,30 @@ void
|
||||
append_track(packetizer_t &ptzr,
|
||||
const append_spec_t &amap) {
|
||||
vector<generic_packetizer_c *>::const_iterator gptzr;
|
||||
filelist_t &src_file = files[amap.src_file_id];
|
||||
filelist_t &dst_file = files[amap.dst_file_id];
|
||||
|
||||
foreach(gptzr, files[amap.src_file_id].reader->reader_packetizers)
|
||||
foreach(gptzr, src_file.reader->reader_packetizers)
|
||||
if (amap.src_track_id == (*gptzr)->ti->id)
|
||||
break;
|
||||
if (gptzr == files[amap.src_file_id].reader->reader_packetizers.end())
|
||||
if (gptzr == src_file.reader->reader_packetizers.end())
|
||||
die("Could not find gptzr when appending. %s\n", BUGMSG);
|
||||
|
||||
if ((*gptzr)->get_track_type() == track_subtitle)
|
||||
files[ptzr.file].reader->read_all();
|
||||
if (((*gptzr)->get_track_type() == track_subtitle) ||
|
||||
(src_file.reader->chapters != NULL))
|
||||
dst_file.reader->read_all();
|
||||
|
||||
// Append some more chapters and adjust their timecodes by the highest
|
||||
// timecode seen in the previous file.
|
||||
if (src_file.reader->chapters != NULL) {
|
||||
if (kax_chapters == NULL)
|
||||
kax_chapters = new KaxChapters;
|
||||
adjust_chapter_timecodes(*src_file.reader->chapters,
|
||||
dst_file.reader->max_timecode_seen);
|
||||
move_chapters_by_edition(*kax_chapters, *src_file.reader->chapters);
|
||||
delete src_file.reader->chapters;
|
||||
src_file.reader->chapters = NULL;
|
||||
}
|
||||
|
||||
mxinfo("Appending track %lld from file no. %lld ('%s') to track %lld from "
|
||||
"file no. %lld ('%s').\n",
|
||||
@ -1514,9 +1526,9 @@ append_track(packetizer_t &ptzr,
|
||||
ptzr.packetizer->ti->id, amap.dst_file_id,
|
||||
ptzr.packetizer->ti->fname);
|
||||
|
||||
if (display_reader == files[amap.dst_file_id].reader) {
|
||||
if (display_reader == dst_file.reader) {
|
||||
display_files_done++;
|
||||
display_reader = files[amap.src_file_id].reader;
|
||||
display_reader = src_file.reader;
|
||||
}
|
||||
|
||||
(*gptzr)->connect(ptzr.packetizer);
|
||||
|
@ -11,12 +11,12 @@ T_010realvideo_4:0d25e4137c50066f43fd12da6a4871af:passed:20040825-175700
|
||||
T_011srt:d7ac4923916c695a9b47425a90e39a0b:passed:20040825-175700
|
||||
T_012ssa:9ecbc6bdfa5dec6495f99c7a97342844:passed:20040825-175700
|
||||
T_013vobsubs:8983288ea21b811fbb85d2ea272ccfe5:passed:20040825-175700
|
||||
T_014splitting_by_size:feeb1aba481b5a51d7142599359f2eac-5c639e8a08cf526d051fdf64f856c0d2:passed:20040825-175700
|
||||
T_015splitting_by_time:d0e769a906ec4eacc9c9af929add6474-c32e2d8c31e21e71f68875f2d9735565:passed:20040825-175700
|
||||
T_014splitting_by_size:feeb1aba481b5a51d7142599359f2eac-5c639e8a08cf526d051fdf64f856c0d2:failed:20040825-175700
|
||||
T_015splitting_by_time:d0e769a906ec4eacc9c9af929add6474-c32e2d8c31e21e71f68875f2d9735565:failed:20040825-175700
|
||||
T_016cuesheet:61b3a09c06ba2a8c7018d045d80f9c30:passed:20040825-175700
|
||||
T_017chapters:562b2281edfa5acca556c3f77f60a291-4b54a7add7783cb99d83dd476a50e135:passed:20040825-175700
|
||||
T_018attachments:bac27359516bab4984df3021aa295213-7e8e1f17615f157db0e98fba9ad88bad:passed:20040825-175700
|
||||
T_019attachments2:8d9ba46c4edbf3daf50175f9de2f87b4-94349d68c9b7d493ec0d5a49f7291e40-8d9ba46c4edbf3daf50175f9de2f87b4-5c639e8a08cf526d051fdf64f856c0d2:passed:20040825-175700
|
||||
T_019attachments2:8d9ba46c4edbf3daf50175f9de2f87b4-94349d68c9b7d493ec0d5a49f7291e40-8d9ba46c4edbf3daf50175f9de2f87b4-5c639e8a08cf526d051fdf64f856c0d2:failed:20040825-175700
|
||||
T_020languages:a2ade9796f89718812d0ef9926922c38:passed:20040825-234208
|
||||
T_021aspect_ratio:f6e8aa4cfd776d99ff824f21d4e3c640-990a5f94678b5c8886dc8c3a5c6a22dd:passed:20040825-234244
|
||||
T_022display_dimensions:108880396ffe5244465a3d25e8a57f93:passed:20040825-234339
|
||||
|
Loading…
Reference in New Issue
Block a user