mirror of
https://gitlab.com/mbunkus/mkvtoolnix.git
synced 2024-12-23 19:31:44 +00:00
Implemented a more flexible system for splitting as a basis for split point and splitting-by-chapters support.
This commit is contained in:
parent
5d5aeaf3ce
commit
62f78d92fb
@ -46,7 +46,9 @@ public:
|
||||
// #define walk_clusters() check_clusters(__LINE__)
|
||||
#define walk_clusters()
|
||||
|
||||
cluster_helper_c::cluster_helper_c() {
|
||||
cluster_helper_c::cluster_helper_c():
|
||||
current_split_point(split_points.begin()) {
|
||||
|
||||
cluster_content_size = 0;
|
||||
max_timecode_and_duration = 0;
|
||||
last_cluster_tc = 0;
|
||||
@ -112,7 +114,9 @@ cluster_helper_c::add_packet(packet_cptr packet) {
|
||||
add_cluster(new kax_cluster_c());
|
||||
}
|
||||
|
||||
if (splitting && (file_num <= split_max_num_files) &&
|
||||
if (splitting() &&
|
||||
(split_points.end() != current_split_point) &&
|
||||
(file_num <= split_max_num_files) &&
|
||||
(packet->bref == -1) &&
|
||||
((packet->source->get_track_type() == track_video) ||
|
||||
(video_packetizer == NULL))) {
|
||||
@ -120,7 +124,7 @@ cluster_helper_c::add_packet(packet_cptr packet) {
|
||||
c = clusters[clusters.size() - 1];
|
||||
|
||||
// Maybe we want to start a new file now.
|
||||
if (!split_by_time) {
|
||||
if (split_point_t::SPT_SIZE == current_split_point->m_type) {
|
||||
|
||||
if (c->packets.size() > 0) {
|
||||
// Cluster + Cluster timecode (roughly)
|
||||
@ -146,12 +150,14 @@ cluster_helper_c::add_packet(packet_cptr packet) {
|
||||
"additional_size: %lld, bytes_in_file: %lld, sum: %lld\n",
|
||||
header_overhead, additional_size, bytes_in_file,
|
||||
header_overhead + additional_size + bytes_in_file);
|
||||
if ((header_overhead + additional_size + bytes_in_file) >= split_after)
|
||||
if ((header_overhead + additional_size + bytes_in_file) >=
|
||||
current_split_point->m_point)
|
||||
split = true;
|
||||
|
||||
} else if ((0 <= first_timecode_in_file) &&
|
||||
} else if ((split_point_t::SPT_DURATION == current_split_point->m_type) &&
|
||||
(0 <= first_timecode_in_file) &&
|
||||
(packet->assigned_timecode - first_timecode_in_file) >=
|
||||
(split_after * 1000000ull))
|
||||
(current_split_point->m_point * 1000000ull))
|
||||
split = true;
|
||||
|
||||
if (split) {
|
||||
@ -171,6 +177,9 @@ cluster_helper_c::add_packet(packet_cptr packet) {
|
||||
|
||||
if (no_linking)
|
||||
timecode_offset = packet->assigned_timecode;
|
||||
|
||||
if (current_split_point->m_use_once)
|
||||
++current_split_point;
|
||||
}
|
||||
}
|
||||
|
||||
@ -345,7 +354,7 @@ cluster_helper_c::render_cluster(ch_contents_t *clstr) {
|
||||
cluster = clstr->cluster;
|
||||
|
||||
// Splitpoint stuff
|
||||
if ((header_overhead == -1) && splitting)
|
||||
if ((header_overhead == -1) && splitting())
|
||||
header_overhead = out->getFilePointer() + tags_size;
|
||||
|
||||
elements_in_cluster = 0;
|
||||
@ -724,3 +733,10 @@ cluster_helper_c::get_duration() {
|
||||
max_timecode_and_duration - first_timecode_in_file);
|
||||
return max_timecode_and_duration - first_timecode_in_file;
|
||||
}
|
||||
|
||||
void
|
||||
cluster_helper_c::add_split_point(const split_point_t &split_point) {
|
||||
split_points.push_back(split_point);
|
||||
if (1 == split_points.size())
|
||||
current_split_point = split_points.begin();
|
||||
}
|
||||
|
@ -55,6 +55,22 @@ struct render_groups_t {
|
||||
bool more_data, duration_mandatory;
|
||||
};
|
||||
|
||||
struct split_point_t {
|
||||
enum split_point_type_e {
|
||||
SPT_DURATION,
|
||||
SPT_SIZE,
|
||||
SPT_TIME,
|
||||
SPT_CHAPTER
|
||||
};
|
||||
|
||||
int64_t m_point;
|
||||
split_point_type_e m_type;
|
||||
bool m_use_once;
|
||||
|
||||
split_point_t(int64_t point, split_point_type_e type, bool use_once):
|
||||
m_point(point), m_type(type), m_use_once(use_once) { }
|
||||
};
|
||||
|
||||
class cluster_helper_c {
|
||||
private:
|
||||
vector<ch_contents_t *> clusters;
|
||||
@ -65,6 +81,9 @@ private:
|
||||
int64_t bytes_in_file, first_timecode_in_file;
|
||||
mm_io_c *out;
|
||||
|
||||
vector<split_point_t> split_points;
|
||||
vector<split_point_t>::iterator current_split_point;
|
||||
|
||||
public:
|
||||
cluster_helper_c();
|
||||
virtual ~cluster_helper_c();
|
||||
@ -85,6 +104,11 @@ public:
|
||||
return first_timecode_in_file;
|
||||
}
|
||||
|
||||
void add_split_point(const split_point_t &split_point);
|
||||
bool splitting() {
|
||||
return !split_points.empty();
|
||||
}
|
||||
|
||||
private:
|
||||
int find_cluster(KaxCluster *cluster);
|
||||
ch_contents_t *find_packet_cluster(int64_t ref_timecode,
|
||||
|
@ -43,6 +43,7 @@
|
||||
#include <matroska/KaxTags.h>
|
||||
|
||||
#include "chapters.h"
|
||||
#include "cluster_helper.h"
|
||||
#include "common.h"
|
||||
#include "commonebml.h"
|
||||
#include "extern_data.h"
|
||||
@ -655,7 +656,7 @@ parse_cropping(const string &s,
|
||||
*/
|
||||
static void
|
||||
parse_split(const string &arg) {
|
||||
int64_t modifier;
|
||||
int64_t split_after, modifier;
|
||||
char mod;
|
||||
string s;
|
||||
|
||||
@ -680,13 +681,14 @@ parse_split(const string &arg) {
|
||||
msecs = 0;
|
||||
|
||||
mxsscanf(s.c_str(), "%d:%d:%d", &hours, &mins, &secs);
|
||||
split_after = secs + mins * 60 + hours * 3600;
|
||||
if ((hours < 0) || (mins < 0) || (mins > 59) || (secs < 0) || (secs > 59))
|
||||
mxerror(_("Invalid time for '--split' in '--split %s'.\n"),
|
||||
arg.c_str());
|
||||
|
||||
split_after = split_after * 1000 + msecs;
|
||||
split_by_time = true;
|
||||
split_after = (int64_t)(secs + mins * 60 + hours * 3600) * 1000ll + msecs;
|
||||
cluster_helper->add_split_point(split_point_t(split_after,
|
||||
split_point_t::SPT_DURATION,
|
||||
false));
|
||||
return;
|
||||
}
|
||||
|
||||
@ -700,7 +702,9 @@ parse_split(const string &arg) {
|
||||
arg.c_str());
|
||||
|
||||
split_after *= 1000;
|
||||
split_by_time = true;
|
||||
cluster_helper->add_split_point(split_point_t(split_after,
|
||||
split_point_t::SPT_DURATION,
|
||||
false));
|
||||
return;
|
||||
}
|
||||
|
||||
@ -719,8 +723,9 @@ parse_split(const string &arg) {
|
||||
if (!parse_int(s, split_after))
|
||||
mxerror(_("Invalid split size in '--split %s'.\n"), arg.c_str());
|
||||
|
||||
split_after *= modifier;
|
||||
split_by_time = false;
|
||||
cluster_helper->add_split_point(split_point_t(split_after * modifier,
|
||||
split_point_t::SPT_SIZE,
|
||||
false));
|
||||
}
|
||||
|
||||
/** \brief Parse the \c --delay argument
|
||||
@ -1927,7 +1932,7 @@ parse_args(vector<string> args) {
|
||||
}
|
||||
}
|
||||
|
||||
if ((split_after <= 0) && !no_linking)
|
||||
if (!cluster_helper->splitting() && !no_linking)
|
||||
mxwarn(_("'--link' is only useful in combination with '--split'.\n"));
|
||||
|
||||
delete ti;
|
||||
@ -1959,9 +1964,6 @@ main(int argc,
|
||||
|
||||
parse_args(command_line_utf8(argc, argv));
|
||||
|
||||
if (split_after > 0)
|
||||
splitting = true;
|
||||
|
||||
start = time(NULL);
|
||||
|
||||
create_readers();
|
||||
|
@ -125,9 +125,6 @@ bool write_cues = true, cue_writing_requested = false;
|
||||
generic_packetizer_c *video_packetizer = NULL;
|
||||
bool write_meta_seek_for_clusters = true;
|
||||
bool no_lacing = false, no_linking = true;
|
||||
int64_t split_after = -1;
|
||||
bool split_by_time = false;
|
||||
int split_max_num_files = 65535;
|
||||
bool use_durations = false;
|
||||
|
||||
double timecode_scale = TIMECODE_SCALE;
|
||||
@ -173,7 +170,8 @@ int64_t tags_size = 0;
|
||||
static bool accept_tags = true;
|
||||
|
||||
int file_num = 1;
|
||||
bool splitting = false;
|
||||
|
||||
int split_max_num_files = 65535;
|
||||
|
||||
string default_language = "und";
|
||||
|
||||
@ -601,7 +599,7 @@ render_headers(mm_io_c *rout) {
|
||||
KaxNextUID &kax_nextuid = GetChild<KaxNextUID>(*kax_infos);
|
||||
kax_nextuid.CopyBuffer(seguid_link_next->data(), 128 / 8);
|
||||
}
|
||||
if (!no_linking && splitting) {
|
||||
if (!no_linking && cluster_helper->splitting()) {
|
||||
KaxNextUID &kax_nextuid = GetChild<KaxNextUID>(*kax_infos);
|
||||
kax_nextuid.CopyBuffer(seguid_next.data(), 128 / 8);
|
||||
|
||||
@ -1380,7 +1378,7 @@ create_next_output_file() {
|
||||
kax_cues = new KaxCues();
|
||||
kax_cues->SetGlobalTimecodeScale((int64_t)timecode_scale);
|
||||
|
||||
if (splitting)
|
||||
if (cluster_helper->splitting())
|
||||
this_outfile = create_output_name();
|
||||
else
|
||||
this_outfile = outfile;
|
||||
@ -1528,7 +1526,7 @@ finish_file(bool last_file) {
|
||||
end = start + cluster_helper->get_duration();
|
||||
|
||||
chapters_here = copy_chapters(kax_chapters);
|
||||
if (splitting)
|
||||
if (cluster_helper->splitting())
|
||||
chapters_here = select_chapters_in_timeframe(chapters_here, start, end,
|
||||
offset);
|
||||
|
||||
@ -1584,7 +1582,7 @@ finish_file(bool last_file) {
|
||||
if (!hack_engaged(ENGAGE_NO_CHAPTERS_IN_META_SEEK))
|
||||
kax_sh_main->IndexThis(*chapters_here, *kax_segment);
|
||||
delete chapters_here;
|
||||
} else if (!splitting && (kax_chapters != NULL))
|
||||
} else if (!cluster_helper->splitting() && (kax_chapters != NULL))
|
||||
if (!hack_engaged(ENGAGE_NO_CHAPTERS_IN_META_SEEK))
|
||||
kax_sh_main->IndexThis(*kax_chapters, *kax_segment);
|
||||
|
||||
|
@ -172,14 +172,13 @@ extern bool no_lacing, no_linking, use_durations;
|
||||
extern bool identifying, identify_verbose;
|
||||
|
||||
extern int file_num;
|
||||
extern bool splitting;
|
||||
|
||||
extern int64_t max_ns_per_cluster;
|
||||
extern int max_blocks_per_cluster;
|
||||
extern int default_tracks[3], default_tracks_priority[3];
|
||||
extern int64_t split_after;
|
||||
|
||||
extern bool splitting;
|
||||
extern int split_max_num_files;
|
||||
extern bool split_by_time;
|
||||
|
||||
extern double timecode_scale;
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user