From a5f837fb44ac5d3c2f29b11829bb2805021cb76c Mon Sep 17 00:00:00 2001 From: Moritz Bunkus Date: Sun, 6 Mar 2005 14:28:26 +0000 Subject: [PATCH] Concatenating + splitting at the same time is now possible. Track entries are only created once and not for each new file. --- ChangeLog | 6 +++ src/merge/output_control.cpp | 59 ++++++++++++++++-------------- src/merge/output_control.h | 2 +- src/merge/pr_generic.cpp | 4 +- src/merge/pr_generic.h | 3 -- tests/results.txt | 1 + tests/test-208cat_and_splitting.rb | 25 +++++++++++++ 7 files changed, 66 insertions(+), 34 deletions(-) create mode 100644 tests/test-208cat_and_splitting.rb diff --git a/ChangeLog b/ChangeLog index 2935cdfea..0b1d0172f 100644 --- a/ChangeLog +++ b/ChangeLog @@ -1,3 +1,9 @@ +2005-03-06 Moritz Bunkus + + * mkvmerge: bug fix: Appending + splitting was segfaulting if used + together and at least one split occured after a track has been + appended. + 2005-03-02 Moritz Bunkus * mkvmerge: Added more descriptive error messages if two tracks diff --git a/src/merge/output_control.cpp b/src/merge/output_control.cpp index b33e746f0..864675d0a 100644 --- a/src/merge/output_control.cpp +++ b/src/merge/output_control.cpp @@ -139,7 +139,7 @@ bool identifying = false, identify_verbose = false; cluster_helper_c *cluster_helper = NULL; KaxSegment *kax_segment; -KaxTracks *kax_tracks; +KaxTracks kax_tracks; KaxTrackEntry *kax_last_entry; KaxCues *kax_cues; KaxSeekHead *kax_sh_main = NULL, *kax_sh_cues = NULL; @@ -621,30 +621,30 @@ render_headers(mm_io_c *rout) { if (write_meta_seek_for_clusters) kax_sh_cues = new KaxSeekHead(); - kax_tracks = &GetChild(*kax_segment); - kax_last_entry = NULL; - - for (i = 0; i < track_order.size(); i++) - if ((track_order[i].file_id >= 0) && - (track_order[i].file_id < files.size()) && - !files[track_order[i].file_id].appending) - files[track_order[i].file_id].reader-> - set_headers_for_track(track_order[i].track_id); - for (i = 0; i < files.size(); i++) - if (!files[i].appending) - files[i].reader->set_headers(); - set_timecode_scale(); - for (i = 0; i < packetizers.size(); i++) - if (packetizers[i].packetizer != NULL) - packetizers[i].packetizer->fix_headers(); + if (first_file) { + kax_last_entry = NULL; + for (i = 0; i < track_order.size(); i++) + if ((track_order[i].file_id >= 0) && + (track_order[i].file_id < files.size()) && + !files[track_order[i].file_id].appending) + files[track_order[i].file_id].reader-> + set_headers_for_track(track_order[i].track_id); + for (i = 0; i < files.size(); i++) + if (!files[i].appending) + files[i].reader->set_headers(); + set_timecode_scale(); + for (i = 0; i < packetizers.size(); i++) + if (packetizers[i].packetizer != NULL) + packetizers[i].packetizer->fix_headers(); + } kax_infos->Render(*rout, true); kax_sh_main->IndexThis(*kax_infos, *kax_segment); if (packetizers.size() > 0) { - kax_tracks->Render(*rout, - !hack_engaged(ENGAGE_NO_DEFAULT_HEADER_VALUES)); - kax_sh_main->IndexThis(*kax_tracks, *kax_segment); + kax_segment->PushElement(kax_tracks); + kax_tracks.Render(*rout, !hack_engaged(ENGAGE_NO_DEFAULT_HEADER_VALUES)); + kax_sh_main->IndexThis(kax_tracks, *kax_segment); // Reserve some small amount of space for header changes by the // packetizers. @@ -668,13 +668,12 @@ void rerender_track_headers() { int64_t new_void_size; - kax_tracks->UpdateSize(!hack_engaged(ENGAGE_NO_DEFAULT_HEADER_VALUES)); + kax_tracks.UpdateSize(!hack_engaged(ENGAGE_NO_DEFAULT_HEADER_VALUES)); new_void_size = void_after_track_headers->GetElementPosition() + void_after_track_headers->GetSize() - - kax_tracks->GetElementPosition() - - kax_tracks->ElementSize(); - out->save_pos(kax_tracks->GetElementPosition()); - kax_tracks->Render(*out, !hack_engaged(ENGAGE_NO_DEFAULT_HEADER_VALUES)); + kax_tracks.GetElementPosition() - kax_tracks.ElementSize(); + out->save_pos(kax_tracks.GetElementPosition()); + kax_tracks.Render(*out, !hack_engaged(ENGAGE_NO_DEFAULT_HEADER_VALUES)); delete void_after_track_headers; void_after_track_headers = new EbmlVoid; void_after_track_headers->SetSize(new_void_size); @@ -1589,6 +1588,13 @@ finish_file(bool last_file) { kax_segment->OverwriteHead(*out); delete out; + + // The tracks element must not be deleted. + for (i = 0; i < kax_segment->ListSize(); ++i) + if (NULL == dynamic_cast((*kax_segment)[i])) + delete (*kax_segment)[i]; + kax_segment->RemoveAll(); + delete kax_segment; delete kax_cues; delete kax_sh_void; @@ -1596,9 +1602,6 @@ finish_file(bool last_file) { delete void_after_track_headers; if (kax_sh_cues != NULL) delete kax_sh_cues; - - for (i = 0; i < packetizers.size(); i++) - packetizers[i].packetizer->reset(); } static void establish_deferred_connections(filelist_t &file); diff --git a/src/merge/output_control.h b/src/merge/output_control.h index d56f38fb3..9f8507405 100644 --- a/src/merge/output_control.h +++ b/src/merge/output_control.h @@ -145,7 +145,7 @@ extern string segmentinfo_file_name; extern KaxTags *tags_from_cue_chapters; extern KaxSegment *kax_segment; -extern KaxTracks *kax_tracks; +extern KaxTracks kax_tracks; extern KaxTrackEntry *kax_last_entry; extern KaxCues *kax_cues; extern KaxSeekHead *kax_sh_main, *kax_sh_cues; diff --git a/src/merge/pr_generic.cpp b/src/merge/pr_generic.cpp index 5154cef14..debabeae3 100644 --- a/src/merge/pr_generic.cpp +++ b/src/merge/pr_generic.cpp @@ -567,10 +567,10 @@ generic_packetizer_c::set_headers() { if (track_entry == NULL) { if (kax_last_entry == NULL) - track_entry = &GetChild(*kax_tracks); + track_entry = &GetChild(kax_tracks); else track_entry = - &GetNextChild(*kax_tracks, *kax_last_entry); + &GetNextChild(kax_tracks, *kax_last_entry); kax_last_entry = track_entry; track_entry->SetGlobalTimecodeScale((int64_t)timecode_scale); } diff --git a/src/merge/pr_generic.h b/src/merge/pr_generic.h index b2fb1367c..80054bf31 100644 --- a/src/merge/pr_generic.h +++ b/src/merge/pr_generic.h @@ -496,9 +496,6 @@ public: virtual file_status_e read() { return reader->read(this); } - virtual void reset() { - track_entry = NULL; - } virtual void add_packet(memory_c &mem, int64_t timecode, int64_t duration, bool duration_mandatory = false, diff --git a/tests/results.txt b/tests/results.txt index 47132683d..c4d6b3fc3 100644 --- a/tests/results.txt +++ b/tests/results.txt @@ -54,3 +54,4 @@ T_204wavpack_without_correctiondata:0d01f5162a71fedc934d7e8a675a0004:passed:2005 T_205X_cuesheets:3b00b00c7d185137e30d7e95e3123d33-b3bb67d316e20da12926d5c1d628f6e5:passed:20050210-211853 T_206X_vobsub:1871f1e7e83dc90ba918a1337bc8eb30-287d40b353664d122f12dfeae3c84888-115f77a674d29a530d6aefc386ed5e85-a6c19d7405f1f9cb8227ac53a3ad23f8-24836c79a8fea4e0da5081e5ea8a643f:passed:20050211-231728 T_207segmentinfo:6f78ae296efa17b704ceca71de9ff728:passed:20050211-234856 +T_208cat_and_splitting:55c3d406e2b4f793f7d0e1f0547d66b3-2d5670d74da87a4ed48e00720fd8c16f:new:20050306-152640 diff --git a/tests/test-208cat_and_splitting.rb b/tests/test-208cat_and_splitting.rb new file mode 100644 index 000000000..e29da801a --- /dev/null +++ b/tests/test-208cat_and_splitting.rb @@ -0,0 +1,25 @@ +#!/usr/bin/ruby -w + +class T_208cat_and_splitting < Test + def description + return "mkvmerge / concatenation and splitting at the same time" + end + + def run + merge(tmp + "-%03d ", "--split-max-files 2 --split 4m data/avi/v.avi " + + "+data/avi/v.avi") + if (!FileTest.exist?(tmp + "-001")) + error("First split file does not exist.") + end + if (!FileTest.exist?(tmp + "-002")) + File.unlink(tmp + "-001") + error("Second split file does not exist.") + end + hash = hash_file(tmp + "-001") + "-" + hash_file(tmp + "-002") + File.unlink(tmp + "-001") + File.unlink(tmp + "-002") + + return hash + end +end +