Some checks are pending
build / build (push) Waiting to run
This uses RSS to fetch a list of videos to avoid the vid being invisible due to "restrictions", then downloads the videos one-by-one instead of scraping and parsing the channel page using yt-dlp. We lose metadata for the entire channel (show-level) so introducing a hack to download just the metadata of a channel.
39 lines
817 B
Go
39 lines
817 B
Go
package nfo
|
|
|
|
import (
|
|
"encoding/xml"
|
|
"log"
|
|
"os"
|
|
"strings"
|
|
|
|
"git.meatbag.se/varl/subsyt/internal/model"
|
|
)
|
|
|
|
func WriteEpisodeNFO(ep model.Episode, info_path string) {
|
|
out_path := strings.Replace(info_path, ".info.json", ".nfo", 1)
|
|
|
|
log.Printf("writing info from '%s' to '%s'\n", info_path, out_path)
|
|
|
|
xmlData, err := xml.MarshalIndent(ep, "", " ")
|
|
if err != nil {
|
|
panic(err)
|
|
}
|
|
|
|
complete := xml.Header + string(xmlData)
|
|
log.Printf("%s", complete)
|
|
os.WriteFile(out_path, xmlData, 0644)
|
|
}
|
|
|
|
func WriteShowInfo(show model.Show, out_path string) {
|
|
log.Printf("writing info from '%v' to '%s'\n", show, out_path)
|
|
|
|
xmlData, err := xml.MarshalIndent(show, "", " ")
|
|
if err != nil {
|
|
panic(err)
|
|
}
|
|
|
|
complete := xml.Header + string(xmlData)
|
|
log.Printf("%s", complete)
|
|
os.WriteFile(out_path, xmlData, 0644)
|
|
}
|