326 lines
6.8 KiB
Go
326 lines
6.8 KiB
Go
package dl
|
|
|
|
import (
|
|
"bufio"
|
|
"fmt"
|
|
"io"
|
|
"log"
|
|
"net/http"
|
|
"net/url"
|
|
"os"
|
|
"os/exec"
|
|
"path/filepath"
|
|
"strconv"
|
|
"strings"
|
|
"sync"
|
|
|
|
"git.meatbag.se/varl/subsyt/internal/config"
|
|
)
|
|
|
|
type Download struct {
|
|
Url string
|
|
OutDir string
|
|
Name string
|
|
DryRun bool
|
|
Metadata bool
|
|
}
|
|
|
|
var upgradeMu sync.Mutex
|
|
|
|
func UpgradeYtDlp(cmd string) error {
|
|
upgradeMu.Lock()
|
|
defer upgradeMu.Unlock()
|
|
|
|
resolved, err := resolveYtDlpPath(cmd)
|
|
if err != nil {
|
|
return err
|
|
}
|
|
|
|
if strings.Contains(resolved, filepath.Join("pipx", "venvs")) {
|
|
return runLoggedCommand("pipx", exec.Command("pipx", "upgrade-all"))
|
|
}
|
|
|
|
return runLoggedCommand("yt-dlp", exec.Command(resolved, "--update"))
|
|
}
|
|
|
|
func resolveYtDlpPath(cmd string) (string, error) {
|
|
if cmd == "" {
|
|
cmd = "yt-dlp"
|
|
}
|
|
|
|
if filepath.IsAbs(cmd) {
|
|
return evalPath(cmd)
|
|
}
|
|
|
|
if strings.Contains(cmd, string(os.PathSeparator)) {
|
|
abs, err := filepath.Abs(cmd)
|
|
if err != nil {
|
|
return "", fmt.Errorf("resolve yt-dlp path: %w", err)
|
|
}
|
|
return evalPath(abs)
|
|
}
|
|
|
|
located, err := exec.LookPath(cmd)
|
|
if err != nil {
|
|
return "", fmt.Errorf("locate yt-dlp: %w", err)
|
|
}
|
|
|
|
return evalPath(located)
|
|
}
|
|
|
|
func evalPath(path string) (string, error) {
|
|
resolved, err := filepath.EvalSymlinks(path)
|
|
if err != nil {
|
|
return "", fmt.Errorf("resolve yt-dlp symlink: %w", err)
|
|
}
|
|
return resolved, nil
|
|
}
|
|
|
|
func runLoggedCommand(tag string, cmd *exec.Cmd) error {
|
|
stdout, err := cmd.StdoutPipe()
|
|
if err != nil {
|
|
return fmt.Errorf("%s stdout: %w", tag, err)
|
|
}
|
|
|
|
stderr, err := cmd.StderrPipe()
|
|
if err != nil {
|
|
return fmt.Errorf("%s stderr: %w", tag, err)
|
|
}
|
|
|
|
log.Printf("[%s] running %s %v\n", tag, cmd.Path, cmd.Args[1:])
|
|
|
|
var wg sync.WaitGroup
|
|
wg.Add(2)
|
|
|
|
go func() {
|
|
defer wg.Done()
|
|
scanner := bufio.NewScanner(stdout)
|
|
for scanner.Scan() {
|
|
log.Printf("[%s] %s\n", tag, scanner.Text())
|
|
}
|
|
if err := scanner.Err(); err != nil {
|
|
log.Printf("[%s] stdout error: %v\n", tag, err)
|
|
}
|
|
}()
|
|
|
|
go func() {
|
|
defer wg.Done()
|
|
scanner := bufio.NewScanner(stderr)
|
|
for scanner.Scan() {
|
|
log.Printf("[%s] %s\n", tag, scanner.Text())
|
|
}
|
|
if err := scanner.Err(); err != nil {
|
|
log.Printf("[%s] stderr error: %v\n", tag, err)
|
|
}
|
|
}()
|
|
|
|
if err := cmd.Start(); err != nil {
|
|
return fmt.Errorf("%s start: %w", tag, err)
|
|
}
|
|
|
|
wg.Wait()
|
|
|
|
if err := cmd.Wait(); err != nil {
|
|
return fmt.Errorf("%s failed: %w", tag, err)
|
|
}
|
|
|
|
return nil
|
|
}
|
|
|
|
func Youtube(d Download, p config.Provider) {
|
|
if p.Bgutil_server != "" && p.Po_token != "" {
|
|
log.Fatal("please only provide bgutil_server OR po_token, not both")
|
|
}
|
|
|
|
vUrl, err := url.Parse(d.Url)
|
|
if err != nil {
|
|
panic(err)
|
|
}
|
|
|
|
args := []string{
|
|
"--no-progress",
|
|
"--paths", d.OutDir,
|
|
"--restrict-filenames",
|
|
"--write-thumbnail",
|
|
"--write-info-json",
|
|
"--convert-thumbnails", "jpg",
|
|
}
|
|
|
|
if p.Verbose == true {
|
|
args = append(args, "--verbose")
|
|
}
|
|
|
|
if d.DryRun == true {
|
|
log.Println("/!\\ DRY RUN ENABLED /!\\")
|
|
args = append(args, "--simulate")
|
|
} else {
|
|
args = append(args, "--no-simulate")
|
|
}
|
|
|
|
// NOTE: we want to download the metadata for a channel, without
|
|
// downloading every video in the channel. This uses
|
|
// --playlist-items to download the video indices 0 to 0 with a step
|
|
// of 1 ... this results in zero videos being downloaded.
|
|
|
|
if d.Metadata == true {
|
|
log.Println("Downloading channel metadata")
|
|
mArgs := []string{
|
|
"--skip-download",
|
|
"--no-overwrites",
|
|
"--playlist-items", "0:0:1",
|
|
}
|
|
args = append(args, mArgs...)
|
|
} else {
|
|
log.Println("Downloading video")
|
|
archive := filepath.Join(d.OutDir, "archive.txt")
|
|
throttle := strconv.Itoa(p.Throttle)
|
|
|
|
dArgs := []string{
|
|
"--no-playlist",
|
|
"--sleep-interval", throttle,
|
|
"--sleep-subtitles", throttle,
|
|
"--sleep-requests", throttle,
|
|
"--max-sleep-interval", "90",
|
|
"--embed-metadata",
|
|
"--write-subs",
|
|
"--no-write-automatic-subs",
|
|
"--sub-langs", "en",
|
|
"--prefer-free-formats",
|
|
"--download-archive", archive,
|
|
"--break-on-existing",
|
|
"--match-filters", "!is_live & duration>?60",
|
|
}
|
|
args = append(args, dArgs...)
|
|
|
|
if p.Format != "" {
|
|
args = append(args, "--format", p.Format)
|
|
}
|
|
|
|
if p.Format_sort != "" {
|
|
args = append(args, "--format-sort", p.Format_sort)
|
|
}
|
|
}
|
|
|
|
if p.Cookies_file != "" {
|
|
args = append(args, "--cookies", p.Cookies_file)
|
|
} else {
|
|
args = append(args, "--no-cookies")
|
|
}
|
|
|
|
if p.Bgutil_server != "" {
|
|
args = append(args, "--extractor-args", fmt.Sprintf("youtubepot-bgutilhttp:base_url=%s", p.Bgutil_server))
|
|
}
|
|
|
|
// NOTE: every unqiue extractor's args needs to be combined into one string
|
|
var youtubeArgs []string
|
|
if p.Po_token != "" {
|
|
youtubeArgs = append(youtubeArgs, fmt.Sprintf("po_token=web.gvs+%s", p.Po_token))
|
|
}
|
|
if p.Player_client != "" {
|
|
youtubeArgs = append(youtubeArgs, fmt.Sprintf("player_client=%s", p.Player_client))
|
|
}
|
|
|
|
if len(youtubeArgs) > 0 {
|
|
args = append(args, "--extractor-args", fmt.Sprintf("youtube:%s", strings.Join(youtubeArgs, ";")))
|
|
}
|
|
|
|
if p.Output_path_template != "" {
|
|
args = append(args, "--output", p.Output_path_template)
|
|
} else {
|
|
args = append(args, "--output", "s%(upload_date>%Y)s/%(channel)s.s%(upload_date>%Y)Se%(upload_date>%m%d)S.%(title)s.%(id)s.%(ext)s")
|
|
}
|
|
|
|
args = append(args, vUrl.String())
|
|
|
|
cmd := exec.Command(p.Cmd, args...)
|
|
|
|
stdout, err := cmd.StdoutPipe()
|
|
if err != nil {
|
|
log.Fatal(err)
|
|
}
|
|
|
|
stderr, err := cmd.StderrPipe()
|
|
if err != nil {
|
|
log.Fatal(err)
|
|
}
|
|
|
|
log.Printf("[%s] running yt-dlp with args: %v\n", d.OutDir, args)
|
|
|
|
var wg sync.WaitGroup
|
|
wg.Add(2)
|
|
|
|
go func() {
|
|
defer wg.Done()
|
|
scanner := bufio.NewScanner(stdout)
|
|
for scanner.Scan() {
|
|
log.Printf("[%s] %s\n", d.OutDir, scanner.Text())
|
|
}
|
|
}()
|
|
|
|
go func() {
|
|
defer wg.Done()
|
|
scanner := bufio.NewScanner(stderr)
|
|
for scanner.Scan() {
|
|
log.Printf("[%s] %s\n", d.OutDir, scanner.Text())
|
|
}
|
|
}()
|
|
|
|
err = cmd.Start()
|
|
if err != nil {
|
|
log.Fatal(err)
|
|
}
|
|
|
|
wg.Wait()
|
|
err = cmd.Wait()
|
|
|
|
if err != nil {
|
|
log.Printf("Error: %s\n", err)
|
|
}
|
|
}
|
|
|
|
func Fetch(d Download) {
|
|
// Create output directory if it doesn't exist
|
|
if err := os.MkdirAll(d.OutDir, 0755); err != nil {
|
|
}
|
|
|
|
outputPath := filepath.Join(d.OutDir, d.Name)
|
|
|
|
out, err := os.Create(outputPath)
|
|
if err != nil {
|
|
log.Printf("failed to create '%s'\n", outputPath)
|
|
return
|
|
}
|
|
defer out.Close()
|
|
|
|
resp, err := http.Get(d.Url)
|
|
if err != nil {
|
|
log.Printf("failed to download '%s'\n", d.Url)
|
|
return
|
|
}
|
|
defer resp.Body.Close()
|
|
|
|
_, err = io.Copy(out, resp.Body)
|
|
if err != nil {
|
|
log.Printf("failed to write file")
|
|
}
|
|
}
|
|
|
|
func RssDownloader(url string) ([]byte, error) {
|
|
resp, err := http.Get(url)
|
|
if err != nil {
|
|
return nil, fmt.Errorf("failed to fetch RSS feed: %w", err)
|
|
}
|
|
defer resp.Body.Close()
|
|
|
|
if resp.StatusCode != http.StatusOK {
|
|
return nil, fmt.Errorf("failed to fetch RSS feed: %s", resp.Status)
|
|
}
|
|
|
|
data, err := io.ReadAll(resp.Body)
|
|
if err != nil {
|
|
return nil, fmt.Errorf("failed to read RSS data: %w", err)
|
|
}
|
|
|
|
return data, nil
|
|
}
|