chore: seperate large files

This commit is contained in:
CDN 2025-04-23 19:22:41 +08:00
parent ebbf516689
commit 76e1298ded
Signed by: CDN
GPG key ID: 0C656827F9F80080
44 changed files with 5745 additions and 4173 deletions

100
internal/sync/srt.go Normal file
View file

@ -0,0 +1,100 @@
package sync
import (
"fmt"
"sub-cli/internal/format/srt"
"sub-cli/internal/model"
)
// syncSRTFiles synchronizes two SRT files
func syncSRTFiles(sourceFile, targetFile string) error {
sourceEntries, err := srt.Parse(sourceFile)
if err != nil {
return fmt.Errorf("error parsing source SRT file: %w", err)
}
targetEntries, err := srt.Parse(targetFile)
if err != nil {
return fmt.Errorf("error parsing target SRT file: %w", err)
}
// Check if entry counts match
if len(sourceEntries) != len(targetEntries) {
fmt.Printf("Warning: Source (%d entries) and target (%d entries) have different entry counts. Timeline will be adjusted.\n",
len(sourceEntries), len(targetEntries))
}
// Sync the timelines
syncedEntries := syncSRTTimeline(sourceEntries, targetEntries)
// Write the synced entries to the target file
return srt.Generate(syncedEntries, targetFile)
}
// syncSRTTimeline applies the timing from source SRT entries to target SRT entries
func syncSRTTimeline(sourceEntries, targetEntries []model.SRTEntry) []model.SRTEntry {
result := make([]model.SRTEntry, len(targetEntries))
// Copy target entries
copy(result, targetEntries)
// If source is empty, just return the target entries as is
if len(sourceEntries) == 0 {
// Ensure proper sequence numbering
for i := range result {
result[i].Number = i + 1
}
return result
}
// If source and target have the same number of entries, directly apply timings
if len(sourceEntries) == len(targetEntries) {
for i := range result {
result[i].StartTime = sourceEntries[i].StartTime
result[i].EndTime = sourceEntries[i].EndTime
}
} else {
// If entry counts differ, scale the timing
for i := range result {
// Calculate scaled index
sourceIdx := 0
if len(sourceEntries) > 1 {
sourceIdx = i * (len(sourceEntries) - 1) / (len(targetEntries) - 1)
}
// Ensure the index is within bounds
if sourceIdx >= len(sourceEntries) {
sourceIdx = len(sourceEntries) - 1
}
// Apply the scaled timing
result[i].StartTime = sourceEntries[sourceIdx].StartTime
// Calculate end time: if not the last entry, use duration from source
if i < len(result)-1 {
// If next source entry exists, calculate duration
var duration model.Timestamp
if sourceIdx+1 < len(sourceEntries) {
duration = calculateDuration(sourceEntries[sourceIdx].StartTime, sourceEntries[sourceIdx+1].StartTime)
} else {
// If no next source entry, use the source's end time (usually a few seconds after start)
duration = calculateDuration(sourceEntries[sourceIdx].StartTime, sourceEntries[sourceIdx].EndTime)
}
// Apply duration to next start time
result[i].EndTime = addDuration(result[i].StartTime, duration)
} else {
// For the last entry, add a fixed duration (e.g., 3 seconds)
result[i].EndTime = sourceEntries[sourceIdx].EndTime
}
}
}
// Ensure proper sequence numbering
for i := range result {
result[i].Number = i + 1
}
return result
}