2024-05-19 21:24:09 +00:00
|
|
|
package torrent
|
2023-10-16 09:18:40 +00:00
|
|
|
|
|
|
|
import (
|
2024-03-28 13:09:42 +00:00
|
|
|
"bufio"
|
2023-12-21 23:15:39 +00:00
|
|
|
"context"
|
2024-05-13 16:56:20 +00:00
|
|
|
"errors"
|
2023-12-21 23:15:39 +00:00
|
|
|
"fmt"
|
|
|
|
"log/slog"
|
2024-01-28 20:22:49 +00:00
|
|
|
"os"
|
|
|
|
"path/filepath"
|
2024-02-22 22:54:56 +00:00
|
|
|
"slices"
|
2024-01-28 20:22:49 +00:00
|
|
|
"strings"
|
2024-03-28 13:09:42 +00:00
|
|
|
"sync"
|
2024-05-18 07:24:14 +00:00
|
|
|
"time"
|
2023-10-16 09:18:40 +00:00
|
|
|
|
2024-03-20 21:47:51 +00:00
|
|
|
"git.kmsign.ru/royalcat/tstor/pkg/ctxio"
|
2024-03-28 13:09:42 +00:00
|
|
|
"git.kmsign.ru/royalcat/tstor/pkg/rlog"
|
2024-03-19 21:30:37 +00:00
|
|
|
"git.kmsign.ru/royalcat/tstor/src/config"
|
2024-01-28 20:22:49 +00:00
|
|
|
"git.kmsign.ru/royalcat/tstor/src/host/store"
|
2024-03-28 13:09:42 +00:00
|
|
|
"git.kmsign.ru/royalcat/tstor/src/host/tkv"
|
2023-10-16 09:18:40 +00:00
|
|
|
"git.kmsign.ru/royalcat/tstor/src/host/vfs"
|
2024-03-28 13:09:42 +00:00
|
|
|
"go.opentelemetry.io/otel"
|
|
|
|
"go.opentelemetry.io/otel/attribute"
|
|
|
|
"go.opentelemetry.io/otel/trace"
|
2024-03-19 21:30:37 +00:00
|
|
|
"golang.org/x/exp/maps"
|
2024-01-28 20:22:49 +00:00
|
|
|
|
2023-10-16 09:18:40 +00:00
|
|
|
"github.com/anacrolix/torrent"
|
2024-01-28 20:22:49 +00:00
|
|
|
"github.com/anacrolix/torrent/bencode"
|
2023-10-16 09:18:40 +00:00
|
|
|
"github.com/anacrolix/torrent/metainfo"
|
2024-01-28 20:22:49 +00:00
|
|
|
"github.com/anacrolix/torrent/types/infohash"
|
2024-05-18 07:24:14 +00:00
|
|
|
"github.com/go-git/go-billy/v5"
|
|
|
|
"github.com/go-git/go-billy/v5/util"
|
2024-03-19 21:30:37 +00:00
|
|
|
"github.com/royalcat/kv"
|
2023-10-16 09:18:40 +00:00
|
|
|
)
|
|
|
|
|
2024-05-19 21:24:09 +00:00
|
|
|
var tracer = otel.Tracer("git.kmsign.ru/royalcat/tstor/host/torrent")
|
2024-03-28 13:09:42 +00:00
|
|
|
|
2024-03-19 21:30:37 +00:00
|
|
|
type DirAquire struct {
|
|
|
|
Name string
|
|
|
|
Hashes []infohash.T
|
|
|
|
}
|
|
|
|
|
2023-10-16 09:18:40 +00:00
|
|
|
type Service struct {
|
2024-05-18 07:24:14 +00:00
|
|
|
client *torrent.Client
|
2024-05-19 21:24:09 +00:00
|
|
|
excludedFiles *filesMappingsStore
|
|
|
|
infoBytes *infoBytesStore
|
|
|
|
Storage *DataStorage
|
2024-05-18 07:24:14 +00:00
|
|
|
fis *store.FileItemStore
|
|
|
|
dirsAquire kv.Store[string, DirAquire]
|
2024-01-28 20:22:49 +00:00
|
|
|
|
2024-05-18 07:24:14 +00:00
|
|
|
loadMutex sync.Mutex
|
2024-01-28 20:22:49 +00:00
|
|
|
torrentLoaded chan struct{}
|
2023-10-16 09:18:40 +00:00
|
|
|
|
2024-05-18 07:24:14 +00:00
|
|
|
sourceFs billy.Filesystem
|
2024-03-19 21:30:37 +00:00
|
|
|
|
2024-04-17 08:36:14 +00:00
|
|
|
log *rlog.Logger
|
2023-10-16 09:18:40 +00:00
|
|
|
}
|
|
|
|
|
2024-05-19 21:24:09 +00:00
|
|
|
func NewService(sourceFs billy.Filesystem, conf config.TorrentClient) (*Service, error) {
|
2024-05-18 07:24:14 +00:00
|
|
|
s := &Service{
|
|
|
|
log: rlog.Component("torrent-service"),
|
|
|
|
sourceFs: sourceFs,
|
|
|
|
torrentLoaded: make(chan struct{}),
|
|
|
|
loadMutex: sync.Mutex{},
|
|
|
|
}
|
|
|
|
|
|
|
|
err := os.MkdirAll(conf.MetadataFolder, 0744)
|
|
|
|
if err != nil {
|
|
|
|
return nil, fmt.Errorf("error creating metadata folder: %w", err)
|
|
|
|
}
|
|
|
|
|
|
|
|
s.fis, err = store.NewFileItemStore(filepath.Join(conf.MetadataFolder, "items"), 2*time.Hour)
|
|
|
|
if err != nil {
|
|
|
|
return nil, fmt.Errorf("error starting item store: %w", err)
|
|
|
|
}
|
|
|
|
|
2024-05-19 21:24:09 +00:00
|
|
|
s.Storage, _, err = setupStorage(conf)
|
2024-03-19 21:30:37 +00:00
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
2024-05-19 21:24:09 +00:00
|
|
|
s.excludedFiles, err = newFileMappingsStore(conf.MetadataFolder, s.Storage)
|
2024-05-18 07:24:14 +00:00
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
2024-05-19 21:24:09 +00:00
|
|
|
s.infoBytes, err = newInfoBytesStore(conf.MetadataFolder)
|
2024-05-18 07:24:14 +00:00
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
id, err := store.GetOrCreatePeerID(filepath.Join(conf.MetadataFolder, "ID"))
|
|
|
|
if err != nil {
|
|
|
|
return nil, fmt.Errorf("error creating node ID: %w", err)
|
|
|
|
}
|
|
|
|
|
|
|
|
client, err := store.NewClient(s.Storage, s.fis, &conf, id)
|
|
|
|
if err != nil {
|
|
|
|
return nil, fmt.Errorf("error starting torrent client: %w", err)
|
|
|
|
}
|
|
|
|
client.AddDhtNodes(conf.DHTNodes)
|
|
|
|
|
|
|
|
s.dirsAquire, err = tkv.New[string, DirAquire](conf.MetadataFolder, "dir-acquire")
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
2023-10-16 09:18:40 +00:00
|
|
|
}
|
2024-01-28 20:22:49 +00:00
|
|
|
|
|
|
|
go func() {
|
2024-03-28 13:09:42 +00:00
|
|
|
ctx := context.Background()
|
|
|
|
err := s.loadTorrentFiles(ctx)
|
2024-01-28 20:22:49 +00:00
|
|
|
if err != nil {
|
2024-04-17 08:36:14 +00:00
|
|
|
s.log.Error(ctx, "initial torrent load failed", rlog.Error(err))
|
2024-01-28 20:22:49 +00:00
|
|
|
}
|
|
|
|
close(s.torrentLoaded)
|
|
|
|
}()
|
|
|
|
|
2024-03-19 21:30:37 +00:00
|
|
|
return s, nil
|
2023-10-16 09:18:40 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
var _ vfs.FsFactory = (*Service)(nil).NewTorrentFs
|
|
|
|
|
2024-05-18 07:24:14 +00:00
|
|
|
func (s *Service) Close(ctx context.Context) error {
|
2024-05-13 16:56:20 +00:00
|
|
|
return errors.Join(append(
|
2024-05-18 07:24:14 +00:00
|
|
|
s.client.Close(),
|
2024-05-13 16:56:20 +00:00
|
|
|
s.Storage.Close(),
|
2024-05-18 07:24:14 +00:00
|
|
|
s.dirsAquire.Close(ctx),
|
|
|
|
s.excludedFiles.Close(ctx),
|
|
|
|
s.infoBytes.Close(),
|
|
|
|
s.fis.Close(),
|
2024-05-13 16:56:20 +00:00
|
|
|
)...)
|
2024-03-17 21:00:34 +00:00
|
|
|
}
|
|
|
|
|
2024-05-19 21:24:09 +00:00
|
|
|
func (s *Service) LoadTorrent(ctx context.Context, f vfs.File) (*Controller, error) {
|
2024-03-28 13:09:42 +00:00
|
|
|
ctx, span := tracer.Start(ctx, "LoadTorrent")
|
|
|
|
defer span.End()
|
2024-04-17 08:36:14 +00:00
|
|
|
log := s.log
|
2024-03-28 13:09:42 +00:00
|
|
|
|
2024-03-20 21:47:51 +00:00
|
|
|
defer f.Close(ctx)
|
2024-01-07 17:09:56 +00:00
|
|
|
|
2024-03-28 13:09:42 +00:00
|
|
|
stat, err := f.Info()
|
2024-01-07 17:09:56 +00:00
|
|
|
if err != nil {
|
2024-01-28 20:22:49 +00:00
|
|
|
return nil, fmt.Errorf("call stat failed: %w", err)
|
2024-01-07 17:09:56 +00:00
|
|
|
}
|
|
|
|
|
2024-03-28 13:09:42 +00:00
|
|
|
span.SetAttributes(attribute.String("filename", stat.Name()))
|
|
|
|
|
|
|
|
mi, err := metainfo.Load(bufio.NewReader(ctxio.IoReader(ctx, f)))
|
2024-01-28 20:22:49 +00:00
|
|
|
if err != nil {
|
|
|
|
return nil, fmt.Errorf("loading torrent metadata from file %s, error: %w", stat.Name(), err)
|
|
|
|
}
|
2024-03-28 13:09:42 +00:00
|
|
|
|
2024-05-18 07:24:14 +00:00
|
|
|
t, ok := s.client.Torrent(mi.HashInfoBytes())
|
2024-01-07 17:09:56 +00:00
|
|
|
if !ok {
|
2024-03-28 13:09:42 +00:00
|
|
|
|
|
|
|
span.AddEvent("torrent not found, loading from file")
|
2024-04-17 08:36:14 +00:00
|
|
|
log.Info(ctx, "torrent not found, loading from file")
|
2024-03-28 13:09:42 +00:00
|
|
|
|
2024-01-28 20:22:49 +00:00
|
|
|
spec, err := torrent.TorrentSpecFromMetaInfoErr(mi)
|
2024-01-07 17:09:56 +00:00
|
|
|
if err != nil {
|
2024-01-28 20:22:49 +00:00
|
|
|
return nil, fmt.Errorf("parse spec from metadata: %w", err)
|
|
|
|
}
|
|
|
|
infoBytes := spec.InfoBytes
|
|
|
|
|
|
|
|
if !isValidInfoHashBytes(infoBytes) {
|
2024-04-17 08:36:14 +00:00
|
|
|
log.Warn(ctx, "info loaded from spec not valid")
|
2024-01-28 20:22:49 +00:00
|
|
|
infoBytes = nil
|
2024-01-07 17:09:56 +00:00
|
|
|
}
|
2024-01-28 20:22:49 +00:00
|
|
|
|
|
|
|
if len(infoBytes) == 0 {
|
2024-04-17 08:36:14 +00:00
|
|
|
log.Info(ctx, "no info loaded from file, try to load from cache")
|
2024-01-28 20:22:49 +00:00
|
|
|
infoBytes, err = s.infoBytes.GetBytes(spec.InfoHash)
|
2024-05-19 21:24:09 +00:00
|
|
|
if err != nil && err != errNotFound {
|
2024-01-28 20:22:49 +00:00
|
|
|
return nil, fmt.Errorf("get info bytes from database: %w", err)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2024-05-18 07:24:14 +00:00
|
|
|
t, _ = s.client.AddTorrentOpt(torrent.AddTorrentOpts{
|
2024-01-28 20:22:49 +00:00
|
|
|
InfoHash: spec.InfoHash,
|
|
|
|
Storage: s.Storage,
|
|
|
|
InfoBytes: infoBytes,
|
|
|
|
ChunkSize: spec.ChunkSize,
|
|
|
|
})
|
|
|
|
t.AllowDataDownload()
|
2024-02-22 22:54:56 +00:00
|
|
|
t.AllowDataUpload()
|
2024-01-28 20:22:49 +00:00
|
|
|
|
2024-03-28 13:09:42 +00:00
|
|
|
span.AddEvent("torrent added to client")
|
|
|
|
|
2024-01-07 17:09:56 +00:00
|
|
|
select {
|
|
|
|
case <-ctx.Done():
|
2024-03-28 13:09:42 +00:00
|
|
|
return nil, ctx.Err()
|
2024-01-07 17:09:56 +00:00
|
|
|
case <-t.GotInfo():
|
2024-01-28 20:22:49 +00:00
|
|
|
err := s.infoBytes.Set(t.InfoHash(), t.Metainfo())
|
|
|
|
if err != nil {
|
2024-04-17 08:36:14 +00:00
|
|
|
log.Error(ctx, "error setting info bytes for torrent",
|
|
|
|
slog.String("torrent-name", t.Name()),
|
|
|
|
rlog.Error(err),
|
|
|
|
)
|
2024-01-28 20:22:49 +00:00
|
|
|
}
|
2024-03-28 13:09:42 +00:00
|
|
|
}
|
|
|
|
span.AddEvent("got info")
|
2024-01-28 20:22:49 +00:00
|
|
|
|
2024-03-28 13:09:42 +00:00
|
|
|
info := t.Info()
|
|
|
|
if info == nil {
|
|
|
|
return nil, fmt.Errorf("info is nil")
|
|
|
|
}
|
|
|
|
|
|
|
|
compatable, _, err := s.checkTorrentCompatable(ctx, spec.InfoHash, *info)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
if !compatable {
|
|
|
|
return nil, fmt.Errorf(
|
|
|
|
"torrent with name '%s' not compatable existing infohash: %s, new: %s",
|
|
|
|
t.Name(), t.InfoHash().HexString(), spec.InfoHash.HexString(),
|
|
|
|
)
|
2024-01-07 17:09:56 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2024-05-19 21:24:09 +00:00
|
|
|
return newController(t, s.excludedFiles), nil
|
2024-01-07 17:09:56 +00:00
|
|
|
}
|
|
|
|
|
2024-03-19 21:30:37 +00:00
|
|
|
func (s *Service) checkTorrentCompatable(ctx context.Context, ih infohash.T, info metainfo.Info) (compatable bool, tryLater bool, err error) {
|
2024-04-17 08:36:14 +00:00
|
|
|
log := s.log.With(
|
|
|
|
slog.String("new-name", info.BestName()),
|
|
|
|
slog.String("new-infohash", ih.String()),
|
|
|
|
)
|
2024-03-19 21:30:37 +00:00
|
|
|
|
|
|
|
name := info.BestName()
|
|
|
|
|
|
|
|
aq, found, err := s.dirsAquire.Get(ctx, info.BestName())
|
|
|
|
if err != nil {
|
|
|
|
return false, false, err
|
|
|
|
}
|
|
|
|
if !found {
|
|
|
|
err = s.dirsAquire.Set(ctx, name, DirAquire{
|
|
|
|
Name: name,
|
|
|
|
Hashes: slices.Compact([]infohash.T{ih}),
|
|
|
|
})
|
|
|
|
if err != nil {
|
|
|
|
return false, false, err
|
|
|
|
}
|
|
|
|
|
2024-04-17 08:36:14 +00:00
|
|
|
log.Debug(ctx, "acquiring was not found, so created")
|
2024-03-19 21:30:37 +00:00
|
|
|
return true, false, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
if slices.Contains(aq.Hashes, ih) {
|
2024-04-17 08:36:14 +00:00
|
|
|
log.Debug(ctx, "hash already know to be compatable")
|
2024-03-19 21:30:37 +00:00
|
|
|
return true, false, nil
|
|
|
|
}
|
|
|
|
|
2024-05-18 07:24:14 +00:00
|
|
|
for _, existingTorrent := range s.client.Torrents() {
|
2024-03-19 21:30:37 +00:00
|
|
|
if existingTorrent.Name() != name || existingTorrent.InfoHash() == ih {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
|
|
|
existingInfo := existingTorrent.Info()
|
|
|
|
|
|
|
|
existingFiles := slices.Clone(existingInfo.Files)
|
|
|
|
newFiles := slices.Clone(info.Files)
|
|
|
|
|
2024-04-17 08:36:14 +00:00
|
|
|
if !s.checkTorrentFilesCompatable(ctx, aq, existingFiles, newFiles) {
|
2024-03-19 21:30:37 +00:00
|
|
|
return false, false, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
aq.Hashes = slicesUnique(append(aq.Hashes, ih))
|
|
|
|
err = s.dirsAquire.Set(ctx, aq.Name, aq)
|
|
|
|
if err != nil {
|
2024-04-17 08:36:14 +00:00
|
|
|
log.Warn(ctx, "torrent not compatible")
|
2024-03-19 21:30:37 +00:00
|
|
|
return false, false, err
|
|
|
|
}
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
if slices.Contains(aq.Hashes, ih) {
|
2024-04-17 08:36:14 +00:00
|
|
|
log.Debug(ctx, "hash is compatable")
|
2024-03-19 21:30:37 +00:00
|
|
|
return true, false, nil
|
|
|
|
}
|
|
|
|
|
2024-04-17 08:36:14 +00:00
|
|
|
log.Debug(ctx, "torrent with same name not found, try later")
|
2024-03-19 21:30:37 +00:00
|
|
|
return false, true, nil
|
|
|
|
}
|
|
|
|
|
2024-04-17 08:36:14 +00:00
|
|
|
func (s *Service) checkTorrentFilesCompatable(ctx context.Context, aq DirAquire, existingFiles, newFiles []metainfo.FileInfo) bool {
|
|
|
|
log := s.log.With(slog.String("name", aq.Name))
|
2024-02-22 22:54:56 +00:00
|
|
|
|
|
|
|
pathCmp := func(a, b metainfo.FileInfo) int {
|
|
|
|
return slices.Compare(a.BestPath(), b.BestPath())
|
|
|
|
}
|
|
|
|
slices.SortStableFunc(existingFiles, pathCmp)
|
|
|
|
slices.SortStableFunc(newFiles, pathCmp)
|
|
|
|
|
|
|
|
// torrents basically equals
|
|
|
|
if slices.EqualFunc(existingFiles, newFiles, func(fi1, fi2 metainfo.FileInfo) bool {
|
|
|
|
return fi1.Length == fi2.Length && slices.Equal(fi1.BestPath(), fi1.BestPath())
|
|
|
|
}) {
|
|
|
|
return true
|
|
|
|
}
|
|
|
|
|
|
|
|
if len(newFiles) > len(existingFiles) {
|
2024-03-19 21:30:37 +00:00
|
|
|
type fileInfo struct {
|
|
|
|
Path string
|
|
|
|
Length int64
|
|
|
|
}
|
|
|
|
mapInfo := func(fi metainfo.FileInfo) fileInfo {
|
|
|
|
return fileInfo{
|
|
|
|
Path: strings.Join(fi.BestPath(), "/"),
|
|
|
|
Length: fi.Length,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
existingFiles := apply(existingFiles, mapInfo)
|
|
|
|
newFiles := apply(newFiles, mapInfo)
|
|
|
|
|
|
|
|
for _, n := range newFiles {
|
|
|
|
if slices.Contains(existingFiles, n) {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
|
|
|
for _, e := range existingFiles {
|
|
|
|
if e.Path == n.Path && e.Length != n.Length {
|
2024-04-17 08:36:14 +00:00
|
|
|
log.Warn(ctx, "torrents not compatible, has files with different length",
|
|
|
|
slog.String("path", n.Path),
|
|
|
|
slog.Int64("existing-length", e.Length),
|
|
|
|
slog.Int64("new-length", e.Length),
|
|
|
|
)
|
2024-03-19 21:30:37 +00:00
|
|
|
return false
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2024-02-22 22:54:56 +00:00
|
|
|
}
|
|
|
|
|
2024-03-19 21:30:37 +00:00
|
|
|
return true
|
|
|
|
}
|
|
|
|
|
2024-01-28 20:22:49 +00:00
|
|
|
func isValidInfoHashBytes(d []byte) bool {
|
|
|
|
var info metainfo.Info
|
|
|
|
err := bencode.Unmarshal(d, &info)
|
|
|
|
return err == nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func (s *Service) Stats() (*Stats, error) {
|
|
|
|
return &Stats{}, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func (s *Service) GetStats() torrent.ConnStats {
|
2024-05-18 07:24:14 +00:00
|
|
|
return s.client.ConnStats()
|
2024-01-28 20:22:49 +00:00
|
|
|
}
|
|
|
|
|
2024-03-28 13:09:42 +00:00
|
|
|
const loadWorkers = 5
|
|
|
|
|
2024-01-28 20:22:49 +00:00
|
|
|
func (s *Service) loadTorrentFiles(ctx context.Context) error {
|
2024-03-28 13:09:42 +00:00
|
|
|
ctx, span := tracer.Start(ctx, "loadTorrentFiles", trace.WithAttributes(
|
|
|
|
attribute.Int("workers", loadWorkers),
|
|
|
|
))
|
|
|
|
defer span.End()
|
2024-04-17 08:36:14 +00:00
|
|
|
log := s.log
|
2024-03-28 13:09:42 +00:00
|
|
|
|
|
|
|
loaderPaths := make(chan string)
|
|
|
|
wg := sync.WaitGroup{}
|
|
|
|
|
|
|
|
defer func() {
|
|
|
|
close(loaderPaths)
|
|
|
|
wg.Wait()
|
|
|
|
}()
|
|
|
|
|
|
|
|
loaderWorker := func() {
|
|
|
|
wg.Add(1)
|
|
|
|
for path := range loaderPaths {
|
|
|
|
file, err := vfs.NewLazyOsFile(path)
|
|
|
|
if err != nil {
|
2024-04-17 08:36:14 +00:00
|
|
|
log.Error(ctx, "error opening torrent file", slog.String("filename", path), rlog.Error(err))
|
2024-03-28 13:09:42 +00:00
|
|
|
continue
|
|
|
|
}
|
|
|
|
defer file.Close(ctx)
|
|
|
|
|
|
|
|
_, err = s.LoadTorrent(ctx, file)
|
|
|
|
if err != nil {
|
2024-04-17 08:36:14 +00:00
|
|
|
log.Error(ctx, "failed adding torrent", rlog.Error(err))
|
2024-03-28 13:09:42 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
wg.Done()
|
|
|
|
}
|
|
|
|
|
|
|
|
for range loadWorkers {
|
|
|
|
go loaderWorker()
|
|
|
|
}
|
|
|
|
|
2024-05-18 07:24:14 +00:00
|
|
|
return util.Walk(s.sourceFs, "/", func(path string, info os.FileInfo, err error) error {
|
2023-12-21 23:15:39 +00:00
|
|
|
if err != nil {
|
2024-01-28 20:22:49 +00:00
|
|
|
return fmt.Errorf("fs walk error: %w", err)
|
2023-12-21 23:15:39 +00:00
|
|
|
}
|
2024-01-28 20:22:49 +00:00
|
|
|
|
|
|
|
if ctx.Err() != nil {
|
|
|
|
return ctx.Err()
|
|
|
|
}
|
|
|
|
|
|
|
|
if info.IsDir() {
|
|
|
|
return nil
|
2023-12-21 23:15:39 +00:00
|
|
|
}
|
2024-01-28 20:22:49 +00:00
|
|
|
|
|
|
|
if strings.HasSuffix(path, ".torrent") {
|
2024-03-28 13:09:42 +00:00
|
|
|
loaderPaths <- path
|
2023-12-21 23:15:39 +00:00
|
|
|
}
|
2023-10-18 09:52:48 +00:00
|
|
|
|
2024-01-28 20:22:49 +00:00
|
|
|
return nil
|
|
|
|
})
|
2023-10-16 09:18:40 +00:00
|
|
|
}
|
|
|
|
|
2024-05-19 21:24:09 +00:00
|
|
|
func (s *Service) ListTorrents(ctx context.Context) ([]*Controller, error) {
|
2024-01-28 20:22:49 +00:00
|
|
|
<-s.torrentLoaded
|
|
|
|
|
2024-05-19 21:24:09 +00:00
|
|
|
out := []*Controller{}
|
2024-05-18 07:24:14 +00:00
|
|
|
for _, v := range s.client.Torrents() {
|
2024-05-19 21:24:09 +00:00
|
|
|
out = append(out, newController(v, s.excludedFiles))
|
2024-01-28 20:22:49 +00:00
|
|
|
}
|
|
|
|
return out, nil
|
2023-10-16 09:18:40 +00:00
|
|
|
}
|
2024-01-07 17:09:56 +00:00
|
|
|
|
2024-05-19 21:24:09 +00:00
|
|
|
func (s *Service) GetTorrent(infohashHex string) (*Controller, error) {
|
2024-01-28 20:22:49 +00:00
|
|
|
<-s.torrentLoaded
|
|
|
|
|
2024-05-18 07:24:14 +00:00
|
|
|
t, ok := s.client.Torrent(infohash.FromHexString(infohashHex))
|
2024-01-28 20:22:49 +00:00
|
|
|
if !ok {
|
|
|
|
return nil, nil
|
|
|
|
}
|
|
|
|
|
2024-05-19 21:24:09 +00:00
|
|
|
return newController(t, s.excludedFiles), nil
|
2024-01-07 17:09:56 +00:00
|
|
|
}
|
2024-03-19 21:30:37 +00:00
|
|
|
|
|
|
|
func slicesUnique[S ~[]E, E comparable](in S) S {
|
|
|
|
m := map[E]struct{}{}
|
|
|
|
for _, v := range in {
|
|
|
|
m[v] = struct{}{}
|
|
|
|
}
|
|
|
|
|
|
|
|
return maps.Keys(m)
|
|
|
|
}
|
|
|
|
|
|
|
|
func apply[I, O any](in []I, f func(e I) O) []O {
|
|
|
|
out := []O{}
|
|
|
|
for _, v := range in {
|
|
|
|
out = append(out, f(v))
|
|
|
|
}
|
|
|
|
return out
|
|
|
|
}
|