storage rework
This commit is contained in:
parent
06153d61c9
commit
e9df8925d1
49 changed files with 1825 additions and 1303 deletions
pkg
cowutils
go-nfs
kvsingle
kvtrace
rlog
slicesutils
ytdlp
14
pkg/cowutils/cowutils.go
Normal file
14
pkg/cowutils/cowutils.go
Normal file
|
@ -0,0 +1,14 @@
|
|||
package cowutils
|
||||
|
||||
import (
|
||||
"errors"
|
||||
)
|
||||
|
||||
// ErrNotSupported is returned by Always() if the operation is not
|
||||
// supported on the current operating system. Auto() will never return this
|
||||
// error.
|
||||
var (
|
||||
ErrNotSupported = errors.New("cow is not supported on this OS")
|
||||
ErrFailed = errors.New("cow is not supported on this OS or file")
|
||||
ErrTooSmall = errors.New("file is too smaller then filesystem block size")
|
||||
)
|
88
pkg/cowutils/dedupe.go
Normal file
88
pkg/cowutils/dedupe.go
Normal file
|
@ -0,0 +1,88 @@
|
|||
package cowutils
|
||||
|
||||
import (
|
||||
"context"
|
||||
"os"
|
||||
|
||||
"golang.org/x/sys/unix"
|
||||
)
|
||||
|
||||
func DedupeFiles(ctx context.Context, paths []string) (deduped uint64, err error) {
|
||||
srcF, err := os.Open(paths[0])
|
||||
if err != nil {
|
||||
return deduped, err
|
||||
}
|
||||
defer srcF.Close()
|
||||
srcStat, err := srcF.Stat()
|
||||
if err != nil {
|
||||
return deduped, err
|
||||
}
|
||||
|
||||
srcFd := int(srcF.Fd())
|
||||
srcSize := srcStat.Size()
|
||||
|
||||
fsStat := unix.Statfs_t{}
|
||||
err = unix.Fstatfs(srcFd, &fsStat)
|
||||
if err != nil {
|
||||
return deduped, err
|
||||
}
|
||||
|
||||
if int64(fsStat.Bsize) > srcSize { // for btrfs it means file residing in metadata and can't be deduplicated
|
||||
return deduped, nil
|
||||
}
|
||||
|
||||
blockSize := uint64((srcSize % int64(fsStat.Bsize)) * int64(fsStat.Bsize))
|
||||
|
||||
fdr := unix.FileDedupeRange{
|
||||
Src_offset: 0,
|
||||
Src_length: blockSize,
|
||||
Info: []unix.FileDedupeRangeInfo{},
|
||||
}
|
||||
|
||||
for _, dst := range paths[1:] {
|
||||
if ctx.Err() != nil {
|
||||
return deduped, ctx.Err()
|
||||
}
|
||||
|
||||
destF, err := os.OpenFile(dst, os.O_RDWR, os.ModePerm)
|
||||
if err != nil {
|
||||
return deduped, err
|
||||
}
|
||||
|
||||
// defer in cycle is intended, file must be closed only at the end of the function,
|
||||
// and, most importantly, this keeps GC from closing descriptor while dudupe in progress
|
||||
defer destF.Close()
|
||||
|
||||
fdr.Info = append(fdr.Info, unix.FileDedupeRangeInfo{
|
||||
Dest_fd: int64(destF.Fd()),
|
||||
Dest_offset: 0,
|
||||
})
|
||||
}
|
||||
|
||||
if len(fdr.Info) == 0 {
|
||||
return deduped, nil
|
||||
}
|
||||
|
||||
if ctx.Err() != nil {
|
||||
return deduped, ctx.Err()
|
||||
}
|
||||
|
||||
fdr.Src_offset = 0
|
||||
for i := range fdr.Info {
|
||||
fdr.Info[i].Dest_offset = 0
|
||||
}
|
||||
|
||||
err = unix.IoctlFileDedupeRange(srcFd, &fdr)
|
||||
if err != nil {
|
||||
return deduped, err
|
||||
}
|
||||
|
||||
for i := range fdr.Info {
|
||||
deduped += fdr.Info[i].Bytes_deduped
|
||||
|
||||
fdr.Info[i].Status = 0
|
||||
fdr.Info[i].Bytes_deduped = 0
|
||||
}
|
||||
|
||||
return deduped, nil
|
||||
}
|
54
pkg/cowutils/reflink.go
Normal file
54
pkg/cowutils/reflink.go
Normal file
|
@ -0,0 +1,54 @@
|
|||
package cowutils
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"io"
|
||||
"io/fs"
|
||||
"os"
|
||||
)
|
||||
|
||||
// Reflink performs the reflink operation on the passed files, replacing
|
||||
// dst's contents with src. If fallback is true and reflink fails,
|
||||
// copy_file_range will be used first, and if that fails too io.Copy will
|
||||
// be used to copy the data.
|
||||
func Reflink(ctx context.Context, dst, src *os.File, fallback bool) error {
|
||||
err := reflink(dst, src)
|
||||
if (err != nil) && fallback {
|
||||
// reflink failed, but we can fallback, but first we need to know the file's size
|
||||
var st fs.FileInfo
|
||||
st, err = src.Stat()
|
||||
if err != nil {
|
||||
// couldn't stat source, this can't be helped
|
||||
return fmt.Errorf("failed to stat source: %w", err)
|
||||
}
|
||||
_, err = copyFileRange(dst, src, 0, 0, st.Size())
|
||||
if err != nil {
|
||||
// copyFileRange failed too, switch to simple io copy
|
||||
reader := io.NewSectionReader(src, 0, st.Size())
|
||||
writer := §ionWriter{w: dst}
|
||||
_ = dst.Truncate(0) // assuming any error in trucate will result in copy error
|
||||
_, err = io.Copy(writer, reader)
|
||||
}
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
// ReflinkRange performs a range reflink operation on the passed files, replacing
|
||||
// part of dst's contents with data from src. If fallback is true and reflink
|
||||
// fails, copy_file_range will be used first, and if that fails too io.CopyN
|
||||
// will be used to copy the data.
|
||||
func ReflinkRange(ctx context.Context, dst, src *os.File, dstOffset, srcOffset, n int64, fallback bool) error {
|
||||
err := reflinkRange(dst, src, dstOffset, srcOffset, n)
|
||||
if (err != nil) && fallback {
|
||||
_, err = copyFileRange(dst, src, dstOffset, srcOffset, n)
|
||||
}
|
||||
|
||||
if (err != nil) && fallback {
|
||||
// seek both src & dst
|
||||
reader := io.NewSectionReader(src, srcOffset, n)
|
||||
writer := §ionWriter{w: dst, base: dstOffset}
|
||||
_, err = io.CopyN(writer, reader, n)
|
||||
}
|
||||
return err
|
||||
}
|
53
pkg/cowutils/reflink_unix.go
Normal file
53
pkg/cowutils/reflink_unix.go
Normal file
|
@ -0,0 +1,53 @@
|
|||
//!build +unix
|
||||
|
||||
package cowutils
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"os"
|
||||
|
||||
"golang.org/x/sys/unix"
|
||||
)
|
||||
|
||||
// reflink performs the actual reflink action without worrying about fallback
|
||||
func reflink(dst, src *os.File) error {
|
||||
srcFd := int(src.Fd())
|
||||
dstFd := int(dst.Fd())
|
||||
|
||||
err := unix.IoctlFileClone(dstFd, srcFd)
|
||||
|
||||
if err != nil && errors.Is(err, unix.ENOTSUP) {
|
||||
return ErrNotSupported
|
||||
}
|
||||
|
||||
return err
|
||||
}
|
||||
|
||||
func reflinkRange(dst, src *os.File, dstOffset, srcOffset, n int64) error {
|
||||
srcFd := int(src.Fd())
|
||||
dstFd := int(dst.Fd())
|
||||
|
||||
req := &unix.FileCloneRange{
|
||||
Src_fd: int64(srcFd),
|
||||
Src_offset: uint64(srcOffset),
|
||||
Src_length: uint64(n),
|
||||
Dest_offset: uint64(dstOffset),
|
||||
}
|
||||
|
||||
err := unix.IoctlFileCloneRange(dstFd, req)
|
||||
if err != nil && errors.Is(err, unix.ENOTSUP) {
|
||||
return ErrNotSupported
|
||||
}
|
||||
|
||||
return err
|
||||
}
|
||||
|
||||
func copyFileRange(dst, src *os.File, dstOffset, srcOffset, n int64) (int64, error) {
|
||||
srcFd := int(src.Fd())
|
||||
dstFd := int(dst.Fd())
|
||||
|
||||
resN, err := unix.CopyFileRange(srcFd, &srcOffset, dstFd, &dstOffset, int(n), 0)
|
||||
|
||||
return int64(resN), err
|
||||
|
||||
}
|
39
pkg/cowutils/writer.go
Normal file
39
pkg/cowutils/writer.go
Normal file
|
@ -0,0 +1,39 @@
|
|||
package cowutils
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"io"
|
||||
)
|
||||
|
||||
// sectionWriter is a helper used when we need to fallback into copying data manually
|
||||
type sectionWriter struct {
|
||||
w io.WriterAt // target file
|
||||
base int64 // base position in file
|
||||
off int64 // current relative offset
|
||||
}
|
||||
|
||||
// Write writes & updates offset
|
||||
func (s *sectionWriter) Write(p []byte) (int, error) {
|
||||
n, err := s.w.WriteAt(p, s.base+s.off)
|
||||
s.off += int64(n)
|
||||
return n, err
|
||||
}
|
||||
|
||||
func (s *sectionWriter) Seek(offset int64, whence int) (int64, error) {
|
||||
switch whence {
|
||||
case io.SeekStart:
|
||||
// nothing needed
|
||||
case io.SeekCurrent:
|
||||
offset += s.off
|
||||
case io.SeekEnd:
|
||||
// we don't support io.SeekEnd
|
||||
fallthrough
|
||||
default:
|
||||
return s.off, errors.New("Seek: invalid whence")
|
||||
}
|
||||
if offset < 0 {
|
||||
return s.off, errors.New("Seek: invalid offset")
|
||||
}
|
||||
s.off = offset
|
||||
return offset, nil
|
||||
}
|
|
@ -14,6 +14,7 @@ import (
|
|||
"github.com/willscott/go-nfs-client/nfs/rpc"
|
||||
"github.com/willscott/go-nfs-client/nfs/xdr"
|
||||
"go.opentelemetry.io/otel"
|
||||
"go.opentelemetry.io/otel/codes"
|
||||
)
|
||||
|
||||
var (
|
||||
|
@ -144,6 +145,8 @@ func (c *conn) handle(ctx context.Context, w *response) error {
|
|||
return err
|
||||
}
|
||||
}
|
||||
|
||||
span.SetStatus(codes.Ok, "")
|
||||
return nil
|
||||
}
|
||||
|
||||
|
|
24
pkg/kvsingle/single.go
Normal file
24
pkg/kvsingle/single.go
Normal file
|
@ -0,0 +1,24 @@
|
|||
package kvsingle
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/royalcat/kv"
|
||||
)
|
||||
|
||||
type Value[K, V any] struct {
|
||||
Key K
|
||||
db kv.Store[K, V]
|
||||
}
|
||||
|
||||
func New[K, V any](db kv.Store[K, V], key K) *Value[K, V] {
|
||||
return &Value[K, V]{Key: key, db: db}
|
||||
}
|
||||
|
||||
func (s *Value[K, V]) Get(ctx context.Context) (V, bool, error) {
|
||||
return s.db.Get(ctx, s.Key)
|
||||
}
|
||||
|
||||
func (s *Value[K, V]) Set(ctx context.Context, value V) error {
|
||||
return s.db.Set(ctx, s.Key, value)
|
||||
}
|
|
@ -53,7 +53,7 @@ func (m *traceSrtore[K, V]) Range(ctx context.Context, iter kv.Iter[K, V]) error
|
|||
defer span.End()
|
||||
|
||||
count := 0
|
||||
iterCount := func(k K, v V) bool {
|
||||
iterCount := func(k K, v V) error {
|
||||
count++
|
||||
return iter(k, v)
|
||||
}
|
||||
|
@ -69,7 +69,7 @@ func (m *traceSrtore[K, V]) RangeWithPrefix(ctx context.Context, k K, iter kv.It
|
|||
defer span.End()
|
||||
|
||||
count := 0
|
||||
iterCount := func(k K, v V) bool {
|
||||
iterCount := func(k K, v V) error {
|
||||
count++
|
||||
return iter(k, v)
|
||||
}
|
||||
|
|
|
@ -91,6 +91,12 @@ func (l *Logger) With(attrs ...slog.Attr) *Logger {
|
|||
}
|
||||
}
|
||||
|
||||
// returns a new slog logger with the same attribures as the original logger
|
||||
// TODO currently not logging function name
|
||||
func (l *Logger) Slog() *slog.Logger {
|
||||
return slog.New(l.handler)
|
||||
}
|
||||
|
||||
const endpointKey = "endpoint"
|
||||
|
||||
func (l *Logger) WithEndpoint(name string) *Logger {
|
||||
|
@ -126,9 +132,9 @@ func errValue(err error) slog.Value {
|
|||
return slog.GroupValue(groupValues...)
|
||||
}
|
||||
|
||||
func Component(name string) *Logger {
|
||||
func Component(name ...string) *Logger {
|
||||
return &Logger{
|
||||
handler: handler,
|
||||
component: []string{name},
|
||||
component: name,
|
||||
}
|
||||
}
|
||||
|
|
38
pkg/slicesutils/intersections.go
Normal file
38
pkg/slicesutils/intersections.go
Normal file
|
@ -0,0 +1,38 @@
|
|||
package slicesutils
|
||||
|
||||
func Intersection[T comparable](slices ...[]T) []T {
|
||||
counts := map[T]int{}
|
||||
result := []T{}
|
||||
|
||||
for _, slice := range slices {
|
||||
for _, val := range slice {
|
||||
counts[val]++
|
||||
}
|
||||
}
|
||||
|
||||
for val, count := range counts {
|
||||
if count == len(slices) {
|
||||
|
||||
result = append(result, val)
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
return result
|
||||
|
||||
}
|
||||
|
||||
func IntersectionFunc[T any](s1 []T, s2 []T, cmp func(T, T) bool) []T {
|
||||
set := make([]T, 0)
|
||||
|
||||
for _, a := range s1 {
|
||||
for _, b := range s2 {
|
||||
if cmp(a, b) {
|
||||
set = append(set, a)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return set
|
||||
|
||||
}
|
|
@ -1,12 +1,10 @@
|
|||
package ytdlp
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"os/exec"
|
||||
"path"
|
||||
"strings"
|
||||
|
||||
"github.com/royalcat/ctxprogress"
|
||||
|
@ -63,29 +61,27 @@ const rawProgressTemplate = `download:
|
|||
|
||||
var progressTemplate = strings.NewReplacer("\n", "", "\t", "", " ", "").Replace(rawProgressTemplate)
|
||||
|
||||
func (c *Client) Download(ctx context.Context, url string, dir string) error {
|
||||
func (c *Client) Download(ctx context.Context, url string, w io.Writer) error {
|
||||
args := []string{
|
||||
"--no-simulate", "-j",
|
||||
"--progress", "--newline", "--progress-template", progressTemplate,
|
||||
"-o", path.Join(dir, "%(title)s.%(ext)s"),
|
||||
"-o", "-",
|
||||
url,
|
||||
}
|
||||
|
||||
group, ctx := errgroup.WithContext(ctx)
|
||||
|
||||
w, lines, err := lineReader(group)
|
||||
stderr, lines, err := lineReader(group)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
var stderr bytes.Buffer
|
||||
cmd := exec.CommandContext(ctx, c.binary, args...)
|
||||
|
||||
cmd.Stdout = w
|
||||
cmd.Stderr = &stderr
|
||||
cmd.Stderr = stderr
|
||||
|
||||
group.Go(func() error {
|
||||
err := cmd.Run()
|
||||
defer w.Close()
|
||||
stderr.Close()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
@ -105,361 +101,5 @@ func (c *Client) Download(ctx context.Context, url string, dir string) error {
|
|||
}
|
||||
}
|
||||
|
||||
err = group.Wait()
|
||||
if err != nil {
|
||||
if _, ok := err.(*exec.ExitError); ok {
|
||||
return fmt.Errorf("yt-dlp error: %s", stderr.Bytes())
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
return group.Wait()
|
||||
}
|
||||
|
||||
// // Used to deser the yt-dlp -J output
|
||||
// type DownloadInfo struct {
|
||||
// URL string `json:"url"`
|
||||
// Title string `json:"title"`
|
||||
// Thumbnail string `json:"thumbnail"`
|
||||
// Resolution string `json:"resolution"`
|
||||
// Size int32 `json:"filesize_approx"`
|
||||
// VCodec string `json:"vcodec"`
|
||||
// ACodec string `json:"acodec"`
|
||||
// Extension string `json:"ext"`
|
||||
// OriginalURL string `json:"original_url"`
|
||||
// CreatedAt time.Time `json:"created_at"`
|
||||
// }
|
||||
|
||||
// // Process descriptor
|
||||
// type Process struct {
|
||||
// Id string
|
||||
// Url string
|
||||
// Params []string
|
||||
// OutputDir string
|
||||
// Info DownloadInfo
|
||||
// Progress DownloadProgress
|
||||
// proc *os.Process
|
||||
// Logger *slog.Logger
|
||||
// }
|
||||
|
||||
// func NewProcess(dir string) (*Process, error) {
|
||||
|
||||
// }
|
||||
|
||||
// // Starts spawns/forks a new yt-dlp process and parse its stdout.
|
||||
// // The process is spawned to outputting a custom progress text that
|
||||
// // Resembles a JSON Object in order to Unmarshal it later.
|
||||
// // This approach is anyhow not perfect: quotes are not escaped properly.
|
||||
// // Each process is not identified by its PID but by a UUIDv4
|
||||
// func (p *Process) Start() {
|
||||
// // escape bash variable escaping and command piping, you'll never know
|
||||
// // what they might come with...
|
||||
// p.Params = slices.DeleteFunc(p.Params, func(e string) bool {
|
||||
// match, _ := regexp.MatchString(`(\$\{)|(\&\&)`, e)
|
||||
// return match
|
||||
// })
|
||||
|
||||
// p.Params = slices.DeleteFunc(p.Params, func(e string) bool {
|
||||
// return e == ""
|
||||
// })
|
||||
|
||||
// if p.Output.Path != "" {
|
||||
// out.Path = p.Output.Path
|
||||
// }
|
||||
|
||||
// if p.Output.Filename != "" {
|
||||
// out.Filename = p.Output.Filename
|
||||
// }
|
||||
|
||||
// buildFilename(&p.Output)
|
||||
|
||||
// go p.GetFileName(&out)
|
||||
|
||||
// params := []string{
|
||||
// strings.Split(p.Url, "?list")[0], //no playlist
|
||||
// "--newline",
|
||||
// "--no-colors",
|
||||
// "--no-playlist",
|
||||
// "--progress-template",
|
||||
// strings.NewReplacer("\n", "", "\t", "", " ", "").Replace(template),
|
||||
// }
|
||||
|
||||
// // if user asked to manually override the output path...
|
||||
// if !(slices.Contains(params, "-P") || slices.Contains(params, "--paths")) {
|
||||
// params = append(params, "-o")
|
||||
// params = append(params, fmt.Sprintf("%s/%s", out.Path, out.Filename))
|
||||
// }
|
||||
|
||||
// params = append(params, p.Params...)
|
||||
|
||||
// // ----------------- main block ----------------- //
|
||||
// cmd := exec.Command(config.Instance().DownloaderPath, params...)
|
||||
// cmd.SysProcAttr = &syscall.SysProcAttr{Setpgid: true}
|
||||
|
||||
// r, err := cmd.StdoutPipe()
|
||||
// if err != nil {
|
||||
// p.Logger.Error(
|
||||
// "failed to connect to stdout",
|
||||
// slog.String("err", err.Error()),
|
||||
// )
|
||||
// panic(err)
|
||||
// }
|
||||
|
||||
// err = cmd.Start()
|
||||
// if err != nil {
|
||||
// p.Logger.Error(
|
||||
// "failed to start yt-dlp process",
|
||||
// slog.String("err", err.Error()),
|
||||
// )
|
||||
// panic(err)
|
||||
// }
|
||||
|
||||
// p.proc = cmd.Process
|
||||
|
||||
// // --------------- progress block --------------- //
|
||||
// var (
|
||||
// sourceChan = make(chan []byte)
|
||||
// doneChan = make(chan struct{})
|
||||
// )
|
||||
|
||||
// // spawn a goroutine that does the dirty job of parsing the stdout
|
||||
// // filling the channel with as many stdout line as yt-dlp produces (producer)
|
||||
// go func() {
|
||||
// scan := bufio.NewScanner(r)
|
||||
|
||||
// defer func() {
|
||||
// r.Close()
|
||||
// p.Complete()
|
||||
// doneChan <- struct{}{}
|
||||
// close(sourceChan)
|
||||
// close(doneChan)
|
||||
// }()
|
||||
|
||||
// for scan.Scan() {
|
||||
// sourceChan <- scan.Bytes()
|
||||
// }
|
||||
// }()
|
||||
|
||||
// // Slows down the unmarshal operation to every 500ms
|
||||
// go func() {
|
||||
// rx.Sample(time.Millisecond*500, sourceChan, doneChan, func(event []byte) {
|
||||
// var progress ProgressTemplate
|
||||
|
||||
// if err := json.Unmarshal(event, &progress); err != nil {
|
||||
// return
|
||||
// }
|
||||
|
||||
// p.Progress = DownloadProgress{
|
||||
// Status: StatusDownloading,
|
||||
// Percentage: progress.Percentage,
|
||||
// Speed: progress.Speed,
|
||||
// ETA: progress.Eta,
|
||||
// }
|
||||
|
||||
// p.Logger.Info("progress",
|
||||
// slog.String("id", p.getShortId()),
|
||||
// slog.String("url", p.Url),
|
||||
// slog.String("percentage", progress.Percentage),
|
||||
// )
|
||||
// })
|
||||
// }()
|
||||
|
||||
// // ------------- end progress block ------------- //
|
||||
// cmd.Wait()
|
||||
// }
|
||||
|
||||
// // Keep process in the memoryDB but marks it as complete
|
||||
// // Convention: All completed processes has progress -1
|
||||
// // and speed 0 bps.
|
||||
// func (p *Process) Complete() {
|
||||
// p.Progress = DownloadProgress{
|
||||
// Status: StatusCompleted,
|
||||
// Percentage: "-1",
|
||||
// Speed: 0,
|
||||
// ETA: 0,
|
||||
// }
|
||||
|
||||
// p.Logger.Info("finished",
|
||||
// slog.String("id", p.getShortId()),
|
||||
// slog.String("url", p.Url),
|
||||
// )
|
||||
// }
|
||||
|
||||
// // Kill a process and remove it from the memory
|
||||
// func (p *Process) Kill() error {
|
||||
// // yt-dlp uses multiple child process the parent process
|
||||
// // has been spawned with setPgid = true. To properly kill
|
||||
// // all subprocesses a SIGTERM need to be sent to the correct
|
||||
// // process group
|
||||
// if p.proc != nil {
|
||||
// pgid, err := syscall.Getpgid(p.proc.Pid)
|
||||
// if err != nil {
|
||||
// return err
|
||||
// }
|
||||
// err = syscall.Kill(-pgid, syscall.SIGTERM)
|
||||
|
||||
// p.Logger.Info("killed process", slog.String("id", p.Id))
|
||||
// return err
|
||||
// }
|
||||
|
||||
// return nil
|
||||
// }
|
||||
|
||||
// // Returns the available format for this URL
|
||||
// func (p *Process) GetFormatsSync() (DownloadFormats, error) {
|
||||
// cmd := exec.Command(config.Instance().DownloaderPath, p.Url, "-J")
|
||||
|
||||
// stdout, err := cmd.Output()
|
||||
// if err != nil {
|
||||
// p.Logger.Error(
|
||||
// "failed to retrieve metadata",
|
||||
// slog.String("err", err.Error()),
|
||||
// )
|
||||
// return DownloadFormats{}, err
|
||||
// }
|
||||
|
||||
// info := DownloadFormats{URL: p.Url}
|
||||
// best := Format{}
|
||||
|
||||
// var (
|
||||
// wg sync.WaitGroup
|
||||
// decodingError error
|
||||
// )
|
||||
|
||||
// wg.Add(2)
|
||||
|
||||
// log.Println(
|
||||
// cli.BgRed, "Metadata", cli.Reset,
|
||||
// cli.BgBlue, "Formats", cli.Reset,
|
||||
// p.Url,
|
||||
// )
|
||||
|
||||
// p.Logger.Info(
|
||||
// "retrieving metadata",
|
||||
// slog.String("caller", "getFormats"),
|
||||
// slog.String("url", p.Url),
|
||||
// )
|
||||
|
||||
// go func() {
|
||||
// decodingError = json.Unmarshal(stdout, &info)
|
||||
// wg.Done()
|
||||
// }()
|
||||
|
||||
// go func() {
|
||||
// decodingError = json.Unmarshal(stdout, &best)
|
||||
// wg.Done()
|
||||
// }()
|
||||
|
||||
// wg.Wait()
|
||||
|
||||
// if decodingError != nil {
|
||||
// return DownloadFormats{}, err
|
||||
// }
|
||||
|
||||
// info.Best = best
|
||||
|
||||
// return info, nil
|
||||
// }
|
||||
|
||||
// func (p *Process) GetFileName(o *DownloadOutput) error {
|
||||
// cmd := exec.Command(
|
||||
// config.Instance().DownloaderPath,
|
||||
// "--print", "filename",
|
||||
// "-o", fmt.Sprintf("%s/%s", o.Path, o.Filename),
|
||||
// p.Url,
|
||||
// )
|
||||
// cmd.SysProcAttr = &syscall.SysProcAttr{Setpgid: true}
|
||||
|
||||
// out, err := cmd.Output()
|
||||
// if err != nil {
|
||||
// return err
|
||||
// }
|
||||
|
||||
// p.Output.SavedFilePath = strings.Trim(string(out), "\n")
|
||||
// return nil
|
||||
// }
|
||||
|
||||
// func (p *Process) SetPending() {
|
||||
// // Since video's title isn't available yet, fill in with the URL.
|
||||
// p.Info = DownloadInfo{
|
||||
// URL: p.Url,
|
||||
// Title: p.Url,
|
||||
// CreatedAt: time.Now(),
|
||||
// }
|
||||
// p.Progress.Status = StatusPending
|
||||
// }
|
||||
|
||||
// func (p *Process) SetMetadata() error {
|
||||
// cmd := exec.Command(config.Instance().DownloaderPath, p.Url, "-J")
|
||||
// cmd.SysProcAttr = &syscall.SysProcAttr{Setpgid: true}
|
||||
|
||||
// stdout, err := cmd.StdoutPipe()
|
||||
// if err != nil {
|
||||
// p.Logger.Error("failed to connect to stdout",
|
||||
// slog.String("id", p.getShortId()),
|
||||
// slog.String("url", p.Url),
|
||||
// slog.String("err", err.Error()),
|
||||
// )
|
||||
// return err
|
||||
// }
|
||||
|
||||
// stderr, err := cmd.StderrPipe()
|
||||
// if err != nil {
|
||||
// p.Logger.Error("failed to connect to stderr",
|
||||
// slog.String("id", p.getShortId()),
|
||||
// slog.String("url", p.Url),
|
||||
// slog.String("err", err.Error()),
|
||||
// )
|
||||
// return err
|
||||
// }
|
||||
|
||||
// info := DownloadInfo{
|
||||
// URL: p.Url,
|
||||
// CreatedAt: time.Now(),
|
||||
// }
|
||||
|
||||
// if err := cmd.Start(); err != nil {
|
||||
// return err
|
||||
// }
|
||||
|
||||
// var bufferedStderr bytes.Buffer
|
||||
|
||||
// go func() {
|
||||
// io.Copy(&bufferedStderr, stderr)
|
||||
// }()
|
||||
|
||||
// p.Logger.Info("retrieving metadata",
|
||||
// slog.String("id", p.getShortId()),
|
||||
// slog.String("url", p.Url),
|
||||
// )
|
||||
|
||||
// if err := json.NewDecoder(stdout).Decode(&info); err != nil {
|
||||
// return err
|
||||
// }
|
||||
|
||||
// p.Info = info
|
||||
// p.Progress.Status = StatusPending
|
||||
|
||||
// if err := cmd.Wait(); err != nil {
|
||||
// return errors.New(bufferedStderr.String())
|
||||
// }
|
||||
|
||||
// return nil
|
||||
// }
|
||||
|
||||
// func (p *Process) getShortId() string {
|
||||
// return strings.Split(p.Id, "-")[0]
|
||||
// }
|
||||
|
||||
// func buildFilename(o *DownloadOutput) {
|
||||
// if o.Filename != "" && strings.Contains(o.Filename, ".%(ext)s") {
|
||||
// o.Filename += ".%(ext)s"
|
||||
// }
|
||||
|
||||
// o.Filename = strings.Replace(
|
||||
// o.Filename,
|
||||
// ".%(ext)s.%(ext)s",
|
||||
// ".%(ext)s",
|
||||
// 1,
|
||||
// )
|
||||
// }
|
||||
|
|
|
@ -3,6 +3,7 @@ package ytdlp_test
|
|||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"io"
|
||||
"testing"
|
||||
|
||||
"git.kmsign.ru/royalcat/tstor/pkg/ytdlp"
|
||||
|
@ -10,7 +11,7 @@ import (
|
|||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestYtDlp(t *testing.T) {
|
||||
func TestDownload(t *testing.T) {
|
||||
require := require.New(t)
|
||||
|
||||
ctx := context.Background()
|
||||
|
@ -21,6 +22,6 @@ func TestYtDlp(t *testing.T) {
|
|||
cur, total := p.Progress()
|
||||
fmt.Printf("%d/%d\n", cur, total)
|
||||
})
|
||||
err = c.Download(ctx, "https://www.youtube.com/watch?v=dQw4w9WgXcQ", "rickroll3")
|
||||
err = c.Download(ctx, "https://www.youtube.com/watch?v=dQw4w9WgXcQ", io.Discard)
|
||||
require.NoError(err)
|
||||
}
|
||||
|
|
33
pkg/ytdlp/info.go
Normal file
33
pkg/ytdlp/info.go
Normal file
|
@ -0,0 +1,33 @@
|
|||
package ytdlp
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"os/exec"
|
||||
)
|
||||
|
||||
func (c *Client) Info(ctx context.Context, url string) (*Info, error) {
|
||||
args := []string{
|
||||
"-q", "-J", url,
|
||||
}
|
||||
|
||||
cmd := exec.CommandContext(ctx, c.binary, args...)
|
||||
var stdout bytes.Buffer
|
||||
var stderr bytes.Buffer
|
||||
cmd.Stdout = &stdout
|
||||
cmd.Stderr = &stderr
|
||||
|
||||
err := cmd.Run()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var info Info
|
||||
err = json.Unmarshal(stdout.Bytes(), &info)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &info, nil
|
||||
}
|
|
@ -1,31 +1,223 @@
|
|||
package ytdlp
|
||||
|
||||
type Info struct {
|
||||
ID string `json:"id"`
|
||||
Title string `json:"title"`
|
||||
Availability string `json:"availability"`
|
||||
ChannelFollowerCount *int64 `json:"channel_follower_count"`
|
||||
Description string `json:"description"`
|
||||
Tags []string `json:"tags"`
|
||||
Thumbnails []Thumbnail `json:"thumbnails"`
|
||||
ModifiedDate *string `json:"modified_date,omitempty"`
|
||||
ViewCount int64 `json:"view_count"`
|
||||
PlaylistCount *int64 `json:"playlist_count,omitempty"`
|
||||
Channel string `json:"channel"`
|
||||
ChannelID string `json:"channel_id"`
|
||||
UploaderID string `json:"uploader_id"`
|
||||
Uploader string `json:"uploader"`
|
||||
ChannelURL string `json:"channel_url"`
|
||||
UploaderURL string `json:"uploader_url"`
|
||||
Type string `json:"_type"`
|
||||
Entries []Entry `json:"entries,omitempty"`
|
||||
ExtractorKey string `json:"extractor_key"`
|
||||
Extractor string `json:"extractor"`
|
||||
WebpageURL string `json:"webpage_url"`
|
||||
OriginalURL string `json:"original_url"`
|
||||
WebpageURLBasename string `json:"webpage_url_basename"`
|
||||
WebpageURLDomain string `json:"webpage_url_domain"`
|
||||
ReleaseYear interface{} `json:"release_year"`
|
||||
Epoch int64 `json:"epoch"`
|
||||
FilesToMove *FilesToMove `json:"__files_to_move,omitempty"`
|
||||
Version Version `json:"_version"`
|
||||
Formats []Format `json:"formats,omitempty"`
|
||||
Thumbnail *string `json:"thumbnail,omitempty"`
|
||||
Duration *int64 `json:"duration,omitempty"`
|
||||
AverageRating interface{} `json:"average_rating"`
|
||||
AgeLimit *int64 `json:"age_limit,omitempty"`
|
||||
Categories []string `json:"categories,omitempty"`
|
||||
PlayableInEmbed *bool `json:"playable_in_embed,omitempty"`
|
||||
LiveStatus *string `json:"live_status,omitempty"`
|
||||
ReleaseTimestamp interface{} `json:"release_timestamp"`
|
||||
FormatSortFields []string `json:"_format_sort_fields,omitempty"`
|
||||
AutomaticCaptions map[string][]AutomaticCaption `json:"automatic_captions,omitempty"`
|
||||
Subtitles *FilesToMove `json:"subtitles,omitempty"`
|
||||
CommentCount *int64 `json:"comment_count,omitempty"`
|
||||
Chapters interface{} `json:"chapters"`
|
||||
Heatmap []Heatmap `json:"heatmap,omitempty"`
|
||||
LikeCount *int64 `json:"like_count,omitempty"`
|
||||
ChannelIsVerified *bool `json:"channel_is_verified,omitempty"`
|
||||
UploadDate *string `json:"upload_date,omitempty"`
|
||||
Timestamp *int64 `json:"timestamp,omitempty"`
|
||||
Playlist interface{} `json:"playlist"`
|
||||
PlaylistIndex interface{} `json:"playlist_index"`
|
||||
DisplayID *string `json:"display_id,omitempty"`
|
||||
Fulltitle *string `json:"fulltitle,omitempty"`
|
||||
DurationString *string `json:"duration_string,omitempty"`
|
||||
IsLive *bool `json:"is_live,omitempty"`
|
||||
WasLive *bool `json:"was_live,omitempty"`
|
||||
RequestedSubtitles interface{} `json:"requested_subtitles"`
|
||||
HasDRM interface{} `json:"_has_drm"`
|
||||
RequestedDownloads []RequestedDownload `json:"requested_downloads,omitempty"`
|
||||
RequestedFormats []Format `json:"requested_formats,omitempty"`
|
||||
Format *string `json:"format,omitempty"`
|
||||
FormatID *string `json:"format_id,omitempty"`
|
||||
EXT *MediaEXT `json:"ext,omitempty"`
|
||||
Protocol *string `json:"protocol,omitempty"`
|
||||
Language *Language `json:"language,omitempty"`
|
||||
FormatNote *string `json:"format_note,omitempty"`
|
||||
FilesizeApprox *int64 `json:"filesize_approx,omitempty"`
|
||||
Tbr *float64 `json:"tbr,omitempty"`
|
||||
Width *int64 `json:"width,omitempty"`
|
||||
Height *int64 `json:"height,omitempty"`
|
||||
Resolution *Resolution `json:"resolution,omitempty"`
|
||||
FPS *int64 `json:"fps,omitempty"`
|
||||
DynamicRange *DynamicRange `json:"dynamic_range,omitempty"`
|
||||
Vcodec *string `json:"vcodec,omitempty"`
|
||||
Vbr *float64 `json:"vbr,omitempty"`
|
||||
StretchedRatio interface{} `json:"stretched_ratio"`
|
||||
AspectRatio *float64 `json:"aspect_ratio,omitempty"`
|
||||
Acodec *Acodec `json:"acodec,omitempty"`
|
||||
ABR *float64 `json:"abr,omitempty"`
|
||||
ASR *int64 `json:"asr,omitempty"`
|
||||
AudioChannels *int64 `json:"audio_channels,omitempty"`
|
||||
}
|
||||
|
||||
type AutomaticCaption struct {
|
||||
EXT AutomaticCaptionEXT `json:"ext"`
|
||||
URL string `json:"url"`
|
||||
Name string `json:"name"`
|
||||
}
|
||||
|
||||
type Entry struct {
|
||||
ID string `json:"id"`
|
||||
Title string `json:"title"`
|
||||
Formats []Format `json:"formats"`
|
||||
Thumbnails []Thumbnail `json:"thumbnails"`
|
||||
Thumbnail string `json:"thumbnail"`
|
||||
Description string `json:"description"`
|
||||
ChannelID string `json:"channel_id"`
|
||||
ChannelURL string `json:"channel_url"`
|
||||
Duration int64 `json:"duration"`
|
||||
ViewCount int64 `json:"view_count"`
|
||||
AverageRating interface{} `json:"average_rating"`
|
||||
AgeLimit int64 `json:"age_limit"`
|
||||
WebpageURL string `json:"webpage_url"`
|
||||
Categories []string `json:"categories"`
|
||||
Tags []string `json:"tags"`
|
||||
PlayableInEmbed bool `json:"playable_in_embed"`
|
||||
LiveStatus string `json:"live_status"`
|
||||
ReleaseTimestamp interface{} `json:"release_timestamp"`
|
||||
FormatSortFields []string `json:"_format_sort_fields"`
|
||||
AutomaticCaptions map[string][]AutomaticCaption `json:"automatic_captions"`
|
||||
Subtitles FilesToMove `json:"subtitles"`
|
||||
CommentCount int64 `json:"comment_count"`
|
||||
Chapters interface{} `json:"chapters"`
|
||||
Heatmap interface{} `json:"heatmap"`
|
||||
LikeCount int64 `json:"like_count"`
|
||||
Channel string `json:"channel"`
|
||||
ChannelFollowerCount int64 `json:"channel_follower_count"`
|
||||
Uploader string `json:"uploader"`
|
||||
UploaderID string `json:"uploader_id"`
|
||||
UploaderURL string `json:"uploader_url"`
|
||||
UploadDate string `json:"upload_date"`
|
||||
Timestamp int64 `json:"timestamp"`
|
||||
Availability string `json:"availability"`
|
||||
OriginalURL string `json:"original_url"`
|
||||
WebpageURLBasename string `json:"webpage_url_basename"`
|
||||
WebpageURLDomain string `json:"webpage_url_domain"`
|
||||
Extractor string `json:"extractor"`
|
||||
ExtractorKey string `json:"extractor_key"`
|
||||
PlaylistCount int64 `json:"playlist_count"`
|
||||
Playlist string `json:"playlist"`
|
||||
PlaylistID string `json:"playlist_id"`
|
||||
PlaylistTitle string `json:"playlist_title"`
|
||||
PlaylistUploader string `json:"playlist_uploader"`
|
||||
PlaylistUploaderID string `json:"playlist_uploader_id"`
|
||||
NEntries int64 `json:"n_entries"`
|
||||
PlaylistIndex int64 `json:"playlist_index"`
|
||||
LastPlaylistIndex int64 `json:"__last_playlist_index"`
|
||||
PlaylistAutonumber int64 `json:"playlist_autonumber"`
|
||||
DisplayID string `json:"display_id"`
|
||||
Fulltitle string `json:"fulltitle"`
|
||||
DurationString string `json:"duration_string"`
|
||||
ReleaseYear interface{} `json:"release_year"`
|
||||
IsLive bool `json:"is_live"`
|
||||
WasLive bool `json:"was_live"`
|
||||
RequestedSubtitles interface{} `json:"requested_subtitles"`
|
||||
HasDRM interface{} `json:"_has_drm"`
|
||||
Epoch int64 `json:"epoch"`
|
||||
RequestedDownloads []RequestedDownload `json:"requested_downloads"`
|
||||
RequestedFormats []Format `json:"requested_formats"`
|
||||
Format string `json:"format"`
|
||||
FormatID string `json:"format_id"`
|
||||
EXT string `json:"ext"`
|
||||
Protocol string `json:"protocol"`
|
||||
Language *Language `json:"language"`
|
||||
FormatNote string `json:"format_note"`
|
||||
FilesizeApprox int64 `json:"filesize_approx"`
|
||||
Tbr float64 `json:"tbr"`
|
||||
Width int64 `json:"width"`
|
||||
Height int64 `json:"height"`
|
||||
Resolution Resolution `json:"resolution"`
|
||||
FPS int64 `json:"fps"`
|
||||
DynamicRange DynamicRange `json:"dynamic_range"`
|
||||
Vcodec string `json:"vcodec"`
|
||||
Vbr float64 `json:"vbr"`
|
||||
StretchedRatio interface{} `json:"stretched_ratio"`
|
||||
AspectRatio float64 `json:"aspect_ratio"`
|
||||
Acodec Acodec `json:"acodec"`
|
||||
ABR float64 `json:"abr"`
|
||||
ASR int64 `json:"asr"`
|
||||
AudioChannels int64 `json:"audio_channels"`
|
||||
}
|
||||
|
||||
type Format struct {
|
||||
URL string `json:"url"`
|
||||
FormatID string `json:"format_id"`
|
||||
Height int64 `json:"height"`
|
||||
EXT EXT `json:"ext"`
|
||||
Protocol Protocol `json:"protocol"`
|
||||
Resolution string `json:"resolution"`
|
||||
DynamicRange DynamicRange `json:"dynamic_range"`
|
||||
AspectRatio *float64 `json:"aspect_ratio"`
|
||||
FilesizeApprox any `json:"filesize_approx"`
|
||||
HTTPHeaders HTTPHeaders `json:"http_headers"`
|
||||
VideoEXT EXT `json:"video_ext"`
|
||||
AudioEXT AudioEXT `json:"audio_ext"`
|
||||
Vbr any `json:"vbr"`
|
||||
ABR any `json:"abr"`
|
||||
Tbr *float64 `json:"tbr"`
|
||||
Format string `json:"format"`
|
||||
FormatIndex any `json:"format_index"`
|
||||
ManifestURL *string `json:"manifest_url,omitempty"`
|
||||
FPS *float64 `json:"fps,omitempty"`
|
||||
Preference any `json:"preference"`
|
||||
Quality any `json:"quality"`
|
||||
HasDRM *bool `json:"has_drm,omitempty"`
|
||||
Width *int64 `json:"width,omitempty"`
|
||||
Vcodec *string `json:"vcodec,omitempty"`
|
||||
Acodec *string `json:"acodec,omitempty"`
|
||||
FormatID string `json:"format_id"`
|
||||
FormatNote *FormatNote `json:"format_note,omitempty"`
|
||||
EXT MediaEXT `json:"ext"`
|
||||
Protocol Protocol `json:"protocol"`
|
||||
Acodec *Acodec `json:"acodec,omitempty"`
|
||||
Vcodec string `json:"vcodec"`
|
||||
URL string `json:"url"`
|
||||
Width *int64 `json:"width"`
|
||||
Height *int64 `json:"height"`
|
||||
FPS *float64 `json:"fps"`
|
||||
Rows *int64 `json:"rows,omitempty"`
|
||||
Columns *int64 `json:"columns,omitempty"`
|
||||
Fragments []Fragment `json:"fragments,omitempty"`
|
||||
Resolution Resolution `json:"resolution"`
|
||||
AspectRatio *float64 `json:"aspect_ratio"`
|
||||
FilesizeApprox *int64 `json:"filesize_approx"`
|
||||
HTTPHeaders HTTPHeaders `json:"http_headers"`
|
||||
AudioEXT MediaEXT `json:"audio_ext"`
|
||||
VideoEXT MediaEXT `json:"video_ext"`
|
||||
Vbr *float64 `json:"vbr"`
|
||||
ABR *float64 `json:"abr"`
|
||||
Tbr *float64 `json:"tbr"`
|
||||
Format string `json:"format"`
|
||||
FormatIndex interface{} `json:"format_index"`
|
||||
ManifestURL *string `json:"manifest_url,omitempty"`
|
||||
Language *Language `json:"language"`
|
||||
Preference interface{} `json:"preference"`
|
||||
Quality *int64 `json:"quality,omitempty"`
|
||||
HasDRM *bool `json:"has_drm,omitempty"`
|
||||
SourcePreference *int64 `json:"source_preference,omitempty"`
|
||||
ASR *int64 `json:"asr"`
|
||||
Filesize *int64 `json:"filesize"`
|
||||
AudioChannels *int64 `json:"audio_channels"`
|
||||
LanguagePreference *int64 `json:"language_preference,omitempty"`
|
||||
DynamicRange *DynamicRange `json:"dynamic_range"`
|
||||
Container *Container `json:"container,omitempty"`
|
||||
DownloaderOptions *DownloaderOptions `json:"downloader_options,omitempty"`
|
||||
}
|
||||
|
||||
type DownloaderOptions struct {
|
||||
HTTPChunkSize int64 `json:"http_chunk_size"`
|
||||
}
|
||||
|
||||
type Fragment struct {
|
||||
URL string `json:"url"`
|
||||
Duration float64 `json:"duration"`
|
||||
}
|
||||
|
||||
type HTTPHeaders struct {
|
||||
|
@ -35,25 +227,76 @@ type HTTPHeaders struct {
|
|||
SECFetchMode SECFetchMode `json:"Sec-Fetch-Mode"`
|
||||
}
|
||||
|
||||
type Subtitles struct {
|
||||
type RequestedDownload struct {
|
||||
RequestedFormats []Format `json:"requested_formats"`
|
||||
Format string `json:"format"`
|
||||
FormatID string `json:"format_id"`
|
||||
EXT string `json:"ext"`
|
||||
Protocol string `json:"protocol"`
|
||||
FormatNote string `json:"format_note"`
|
||||
FilesizeApprox int64 `json:"filesize_approx"`
|
||||
Tbr float64 `json:"tbr"`
|
||||
Width int64 `json:"width"`
|
||||
Height int64 `json:"height"`
|
||||
Resolution Resolution `json:"resolution"`
|
||||
FPS int64 `json:"fps"`
|
||||
DynamicRange DynamicRange `json:"dynamic_range"`
|
||||
Vcodec string `json:"vcodec"`
|
||||
Vbr float64 `json:"vbr"`
|
||||
AspectRatio float64 `json:"aspect_ratio"`
|
||||
Acodec Acodec `json:"acodec"`
|
||||
ABR float64 `json:"abr"`
|
||||
ASR int64 `json:"asr"`
|
||||
AudioChannels int64 `json:"audio_channels"`
|
||||
FilenameOld string `json:"_filename"`
|
||||
Filename string `json:"filename"`
|
||||
WriteDownloadArchive bool `json:"__write_download_archive"`
|
||||
Language *Language `json:"language,omitempty"`
|
||||
}
|
||||
|
||||
type FilesToMove struct {
|
||||
}
|
||||
|
||||
type Thumbnail struct {
|
||||
URL string `json:"url"`
|
||||
ID string `json:"id"`
|
||||
URL string `json:"url"`
|
||||
Preference *int64 `json:"preference,omitempty"`
|
||||
ID string `json:"id"`
|
||||
Height *int64 `json:"height,omitempty"`
|
||||
Width *int64 `json:"width,omitempty"`
|
||||
Resolution *string `json:"resolution,omitempty"`
|
||||
}
|
||||
|
||||
type Heatmap struct {
|
||||
StartTime float64 `json:"start_time"`
|
||||
EndTime float64 `json:"end_time"`
|
||||
Value float64 `json:"value"`
|
||||
}
|
||||
|
||||
type Version struct {
|
||||
Version string `json:"version"`
|
||||
CurrentGitHead string `json:"current_git_head"`
|
||||
ReleaseGitHead string `json:"release_git_head"`
|
||||
Repository string `json:"repository"`
|
||||
Version string `json:"version"`
|
||||
CurrentGitHead interface{} `json:"current_git_head"`
|
||||
ReleaseGitHead string `json:"release_git_head"`
|
||||
Repository string `json:"repository"`
|
||||
}
|
||||
|
||||
type AudioEXT string
|
||||
type Acodec string
|
||||
|
||||
const (
|
||||
None AudioEXT = "none"
|
||||
AcodecNone Acodec = "none"
|
||||
Mp4A402 Acodec = "mp4a.40.2"
|
||||
Mp4A405 Acodec = "mp4a.40.5"
|
||||
Opus Acodec = "opus"
|
||||
)
|
||||
|
||||
type AutomaticCaptionEXT string
|
||||
|
||||
const (
|
||||
Json3 AutomaticCaptionEXT = "json3"
|
||||
Srv1 AutomaticCaptionEXT = "srv1"
|
||||
Srv2 AutomaticCaptionEXT = "srv2"
|
||||
Srv3 AutomaticCaptionEXT = "srv3"
|
||||
Ttml AutomaticCaptionEXT = "ttml"
|
||||
Vtt AutomaticCaptionEXT = "vtt"
|
||||
)
|
||||
|
||||
type DynamicRange string
|
||||
|
@ -63,10 +306,38 @@ const (
|
|||
HDR DynamicRange = "HDR"
|
||||
)
|
||||
|
||||
type EXT string
|
||||
type MediaEXT string
|
||||
|
||||
const (
|
||||
Mp4 EXT = "mp4"
|
||||
EXTNone MediaEXT = "none"
|
||||
EXTMhtml MediaEXT = "mhtml"
|
||||
M4A MediaEXT = "m4a"
|
||||
Mp4 MediaEXT = "mp4"
|
||||
Webm MediaEXT = "webm"
|
||||
)
|
||||
|
||||
type Container string
|
||||
|
||||
const (
|
||||
M4ADash Container = "m4a_dash"
|
||||
Mp4Dash Container = "mp4_dash"
|
||||
WebmDash Container = "webm_dash"
|
||||
)
|
||||
|
||||
type FormatNote string
|
||||
|
||||
const (
|
||||
Default FormatNote = "Default"
|
||||
Low FormatNote = "low"
|
||||
Medium FormatNote = "medium"
|
||||
Premium FormatNote = "Premium"
|
||||
Storyboard FormatNote = "storyboard"
|
||||
The1080P FormatNote = "1080p"
|
||||
The144P FormatNote = "144p"
|
||||
The240P FormatNote = "240p"
|
||||
The360P FormatNote = "360p"
|
||||
The480P FormatNote = "480p"
|
||||
The720P FormatNote = "720p"
|
||||
)
|
||||
|
||||
type Accept string
|
||||
|
@ -87,9 +358,32 @@ const (
|
|||
Navigate SECFetchMode = "navigate"
|
||||
)
|
||||
|
||||
type Language string
|
||||
|
||||
const (
|
||||
En Language = "en"
|
||||
)
|
||||
|
||||
type Protocol string
|
||||
|
||||
const (
|
||||
HTTPS Protocol = "https"
|
||||
M3U8Native Protocol = "m3u8_native"
|
||||
HTTPS Protocol = "https"
|
||||
M3U8Native Protocol = "m3u8_native"
|
||||
ProtocolMhtml Protocol = "mhtml"
|
||||
)
|
||||
|
||||
type Resolution string
|
||||
|
||||
const (
|
||||
AudioOnly Resolution = "audio only"
|
||||
The1280X720 Resolution = "1280x720"
|
||||
The160X90 Resolution = "160x90"
|
||||
The1920X1080 Resolution = "1920x1080"
|
||||
The256X144 Resolution = "256x144"
|
||||
The320X180 Resolution = "320x180"
|
||||
The426X240 Resolution = "426x240"
|
||||
The48X27 Resolution = "48x27"
|
||||
The640X360 Resolution = "640x360"
|
||||
The80X45 Resolution = "80x45"
|
||||
The854X480 Resolution = "854x480"
|
||||
)
|
||||
|
|
|
@ -14,91 +14,26 @@ import (
|
|||
"golang.org/x/sync/errgroup"
|
||||
)
|
||||
|
||||
type PlaylistEntry struct {
|
||||
ID string `json:"id"`
|
||||
Uploader string `json:"uploader"`
|
||||
UploaderID string `json:"uploader_id"`
|
||||
UploadDate string `json:"upload_date"`
|
||||
Title string `json:"title"`
|
||||
Thumbnail string `json:"thumbnail"`
|
||||
Duration int64 `json:"duration"`
|
||||
LikeCount int64 `json:"like_count"`
|
||||
DislikeCount int64 `json:"dislike_count"`
|
||||
CommentCount int64 `json:"comment_count"`
|
||||
Formats []Format `json:"formats"`
|
||||
AgeLimit int64 `json:"age_limit"`
|
||||
Tags []string `json:"tags"`
|
||||
Categories []string `json:"categories"`
|
||||
Cast []any `json:"cast"`
|
||||
Subtitles Subtitles `json:"subtitles"`
|
||||
Thumbnails []Thumbnail `json:"thumbnails"`
|
||||
Timestamp int64 `json:"timestamp"`
|
||||
ViewCount int64 `json:"view_count"`
|
||||
WebpageURL string `json:"webpage_url"`
|
||||
OriginalURL string `json:"original_url"`
|
||||
WebpageURLBasename string `json:"webpage_url_basename"`
|
||||
WebpageURLDomain string `json:"webpage_url_domain"`
|
||||
Extractor string `json:"extractor"`
|
||||
ExtractorKey string `json:"extractor_key"`
|
||||
PlaylistCount int64 `json:"playlist_count"`
|
||||
Playlist string `json:"playlist"`
|
||||
PlaylistID string `json:"playlist_id"`
|
||||
PlaylistTitle string `json:"playlist_title"`
|
||||
PlaylistUploader string `json:"playlist_uploader"`
|
||||
PlaylistUploaderID string `json:"playlist_uploader_id"`
|
||||
NEntries int64 `json:"n_entries"`
|
||||
PlaylistIndex int64 `json:"playlist_index"`
|
||||
PlaylistAutonumber int64 `json:"playlist_autonumber"`
|
||||
DisplayID string `json:"display_id"`
|
||||
Fulltitle string `json:"fulltitle"`
|
||||
DurationString string `json:"duration_string"`
|
||||
ReleaseYear int `json:"release_year"`
|
||||
Epoch int64 `json:"epoch"`
|
||||
FormatID string `json:"format_id"`
|
||||
URL string `json:"url"`
|
||||
ManifestURL string `json:"manifest_url"`
|
||||
Tbr float64 `json:"tbr"`
|
||||
EXT EXT `json:"ext"`
|
||||
FPS float64 `json:"fps"`
|
||||
Protocol Protocol `json:"protocol"`
|
||||
VideoHasDRM bool `json:"has_drm"`
|
||||
Width int64 `json:"width"`
|
||||
Height int64 `json:"height"`
|
||||
Vcodec string `json:"vcodec"`
|
||||
Acodec string `json:"acodec"`
|
||||
DynamicRange DynamicRange `json:"dynamic_range"`
|
||||
Resolution string `json:"resolution"`
|
||||
AspectRatio float64 `json:"aspect_ratio"`
|
||||
HTTPHeaders HTTPHeaders `json:"http_headers"`
|
||||
VideoEXT EXT `json:"video_ext"`
|
||||
AudioEXT AudioEXT `json:"audio_ext"`
|
||||
Format string `json:"format"`
|
||||
Filename string `json:"_filename"`
|
||||
VideoFilename string `json:"filename"`
|
||||
Type string `json:"_type"`
|
||||
Version Version `json:"_version"`
|
||||
}
|
||||
|
||||
// Progress implements ctxprogress.Progress.
|
||||
func (p PlaylistEntry) Progress() (current int, total int) {
|
||||
func (p Entry) Progress() (current int, total int) {
|
||||
return int(p.PlaylistIndex), int(p.PlaylistCount)
|
||||
}
|
||||
|
||||
func (p PlaylistEntry) Url() string {
|
||||
if p.URL != "" {
|
||||
return p.URL
|
||||
}
|
||||
if p.WebpageURL != "" {
|
||||
return p.WebpageURL
|
||||
}
|
||||
if p.OriginalURL != "" {
|
||||
return p.OriginalURL
|
||||
}
|
||||
// func (p PlaylistEntry) Url() string {
|
||||
// if p.URL != "" {
|
||||
// return p.URL
|
||||
// }
|
||||
// if p.WebpageURL != "" {
|
||||
// return p.WebpageURL
|
||||
// }
|
||||
// if p.OriginalURL != "" {
|
||||
// return p.OriginalURL
|
||||
// }
|
||||
|
||||
return ""
|
||||
}
|
||||
// return ""
|
||||
// }
|
||||
|
||||
func (yt *Client) Playlist(ctx context.Context, url string) ([]PlaylistEntry, error) {
|
||||
func (yt *Client) Playlist(ctx context.Context, url string) ([]Entry, error) {
|
||||
group, ctx := errgroup.WithContext(ctx)
|
||||
w, lines, err := lineReader(group)
|
||||
if err != nil {
|
||||
|
@ -117,9 +52,9 @@ func (yt *Client) Playlist(ctx context.Context, url string) ([]PlaylistEntry, er
|
|||
return w.Close()
|
||||
})
|
||||
|
||||
playlists := []PlaylistEntry{}
|
||||
playlists := []Entry{}
|
||||
for line := range lines {
|
||||
entry := PlaylistEntry{}
|
||||
entry := Entry{}
|
||||
err = json.Unmarshal([]byte(line), &entry)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
|
@ -163,7 +98,7 @@ func lineReader(group *errgroup.Group) (io.WriteCloser, <-chan string, error) {
|
|||
return w, lines, nil
|
||||
}
|
||||
|
||||
var _ ctxprogress.Progress = (*PlaylistEntry)(nil)
|
||||
var _ ctxprogress.Progress = (*Entry)(nil)
|
||||
|
||||
var _ ctxprogress.Progress = (*DownloadProgress)(nil)
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue