215 lines
4.4 KiB
Go
215 lines
4.4 KiB
Go
package dl
|
|
|
|
import (
|
|
"bufio"
|
|
"fmt"
|
|
"io"
|
|
"log"
|
|
"net/http"
|
|
"net/url"
|
|
"os"
|
|
"os/exec"
|
|
"path/filepath"
|
|
"strconv"
|
|
"strings"
|
|
"sync"
|
|
|
|
"git.meatbag.se/varl/subsyt/internal/config"
|
|
)
|
|
|
|
type Download struct {
|
|
Url string
|
|
OutDir string
|
|
Name string
|
|
DryRun bool
|
|
Metadata bool
|
|
}
|
|
|
|
func Youtube(d Download, p config.Provider) {
|
|
if p.Bgutil_server != "" && p.Po_token != "" {
|
|
log.Fatal("please only provide bgutil_server OR po_token, not both")
|
|
}
|
|
|
|
vUrl, err := url.Parse(d.Url)
|
|
if err != nil {
|
|
panic(err)
|
|
}
|
|
|
|
args := []string{
|
|
"--no-progress",
|
|
"--paths", d.OutDir,
|
|
"--restrict-filenames",
|
|
"--write-thumbnail",
|
|
"--write-info-json",
|
|
"--convert-thumbnails", "jpg",
|
|
}
|
|
|
|
if p.Verbose == true {
|
|
args = append(args, "--verbose")
|
|
}
|
|
|
|
if d.DryRun == true {
|
|
log.Println("/!\\ DRY RUN ENABLED /!\\")
|
|
args = append(args, "--simulate")
|
|
} else {
|
|
args = append(args, "--no-simulate")
|
|
}
|
|
|
|
if d.Metadata == true {
|
|
log.Println("Downloading metadata")
|
|
mArgs := []string{
|
|
"--skip-download",
|
|
"--no-overwrites",
|
|
"--playlist-items", "0:0:1",
|
|
}
|
|
args = append(args, mArgs...)
|
|
} else {
|
|
log.Println("Downloading video")
|
|
archive := filepath.Join(d.OutDir, "archive.txt")
|
|
throttle := strconv.Itoa(p.Throttle)
|
|
|
|
dArgs := []string{
|
|
"--no-playlist",
|
|
"--sleep-interval", throttle,
|
|
"--sleep-subtitles", throttle,
|
|
"--sleep-requests", throttle,
|
|
"--max-sleep-interval", "90",
|
|
"--embed-metadata",
|
|
"--write-subs",
|
|
"--no-write-automatic-subs",
|
|
"--sub-langs", "en",
|
|
"--prefer-free-formats",
|
|
"--download-archive", archive,
|
|
"--break-on-existing",
|
|
"--match-filters", "!is_live & duration>?60",
|
|
}
|
|
args = append(args, dArgs...)
|
|
|
|
if p.Quality != "" {
|
|
args = append(args, "--format-sort", p.Quality)
|
|
} else {
|
|
args = append(args, "--format-sort", "res:1080")
|
|
}
|
|
}
|
|
|
|
if p.Cookies_file != "" {
|
|
args = append(args, "--cookies")
|
|
args = append(args, p.Cookies_file)
|
|
} else {
|
|
args = append(args, "--no-cookies")
|
|
}
|
|
|
|
var youtubeArgs []string
|
|
if p.Po_token != "" {
|
|
youtubeArgs = append(youtubeArgs, fmt.Sprintf("po_token=web.gvs+%s", p.Po_token))
|
|
}
|
|
if p.Bgutil_server != "" {
|
|
youtubeArgs = append(youtubeArgs, fmt.Sprintf("getpot_bgutil_baseurl=%s", p.Bgutil_server))
|
|
}
|
|
if p.Player_client != "" {
|
|
youtubeArgs = append(youtubeArgs, fmt.Sprintf("player_client=%s", p.Player_client))
|
|
}
|
|
if len(youtubeArgs) > 0 {
|
|
args = append(args, "--extractor-args", fmt.Sprintf("youtube:%s", strings.Join(youtubeArgs, ";")))
|
|
}
|
|
|
|
if p.Output_path_template != "" {
|
|
args = append(args, "--output", p.Output_path_template)
|
|
} else {
|
|
args = append(args, "--output", "s%(upload_date>%Y)s/%(channel)s.s%(upload_date>%Y)Se%(upload_date>%m%d)S.%(title)s.%(id)s.%(ext)s")
|
|
}
|
|
|
|
args = append(args, vUrl.String())
|
|
|
|
cmd := exec.Command(p.Cmd, args...)
|
|
|
|
stdout, err := cmd.StdoutPipe()
|
|
if err != nil {
|
|
log.Fatal(err)
|
|
}
|
|
|
|
stderr, err := cmd.StderrPipe()
|
|
if err != nil {
|
|
log.Fatal(err)
|
|
}
|
|
|
|
log.Printf("[%s] running yt-dlp with args: %v\n", d.OutDir, args)
|
|
|
|
var wg sync.WaitGroup
|
|
wg.Add(2)
|
|
|
|
go func() {
|
|
defer wg.Done()
|
|
scanner := bufio.NewScanner(stdout)
|
|
for scanner.Scan() {
|
|
log.Printf("[%s] %s\n", d.OutDir, scanner.Text())
|
|
}
|
|
}()
|
|
|
|
go func() {
|
|
defer wg.Done()
|
|
scanner := bufio.NewScanner(stderr)
|
|
for scanner.Scan() {
|
|
log.Printf("[%s] %s\n", d.OutDir, scanner.Text())
|
|
}
|
|
}()
|
|
|
|
err = cmd.Start()
|
|
if err != nil {
|
|
log.Fatal(err)
|
|
}
|
|
|
|
wg.Wait()
|
|
err = cmd.Wait()
|
|
|
|
if err != nil {
|
|
log.Printf("Error: %s\n", err)
|
|
}
|
|
}
|
|
|
|
func Fetch(d Download) {
|
|
// Create output directory if it doesn't exist
|
|
if err := os.MkdirAll(d.OutDir, 0755); err != nil {
|
|
}
|
|
|
|
outputPath := filepath.Join(d.OutDir, d.Name)
|
|
|
|
out, err := os.Create(outputPath)
|
|
if err != nil {
|
|
log.Printf("failed to create '%s'\n", outputPath)
|
|
return
|
|
}
|
|
defer out.Close()
|
|
|
|
resp, err := http.Get(d.Url)
|
|
if err != nil {
|
|
log.Printf("failed to download '%s'\n", d.Url)
|
|
return
|
|
}
|
|
defer resp.Body.Close()
|
|
|
|
_, err = io.Copy(out, resp.Body)
|
|
if err != nil {
|
|
log.Printf("failed to write file")
|
|
}
|
|
}
|
|
|
|
func RssDownloader(url string) ([]byte, error) {
|
|
resp, err := http.Get(url)
|
|
if err != nil {
|
|
return nil, fmt.Errorf("failed to fetch RSS feed: %w", err)
|
|
}
|
|
defer resp.Body.Close()
|
|
|
|
if resp.StatusCode != http.StatusOK {
|
|
return nil, fmt.Errorf("failed to fetch RSS feed: %s", resp.Status)
|
|
}
|
|
|
|
data, err := io.ReadAll(resp.Body)
|
|
if err != nil {
|
|
return nil, fmt.Errorf("failed to read RSS data: %w", err)
|
|
}
|
|
|
|
return data, nil
|
|
}
|