torrent list
All checks were successful
docker / build-docker (linux/amd64) (push) Successful in 1m39s
docker / build-docker (linux/386) (push) Successful in 1m46s
docker / build-docker (linux/arm64/v8) (push) Successful in 8m18s
docker / build-docker (linux/arm64) (push) Successful in 8m29s
docker / build-docker (linux/arm/v7) (push) Successful in 8m49s
All checks were successful
docker / build-docker (linux/amd64) (push) Successful in 1m39s
docker / build-docker (linux/386) (push) Successful in 1m46s
docker / build-docker (linux/arm64/v8) (push) Successful in 8m18s
docker / build-docker (linux/arm64) (push) Successful in 8m29s
docker / build-docker (linux/arm/v7) (push) Successful in 8m49s
This commit is contained in:
parent
d8ee8a3a24
commit
0d7aac068c
23 changed files with 1285 additions and 698 deletions
|
@ -1,5 +1,5 @@
|
|||
type Query {
|
||||
torrents(filter: TorrentsFilter, pagination: Pagination): [Torrent!]!
|
||||
torrents(filter: TorrentsFilter): [Torrent!]!
|
||||
fsEntry(path: String!): FsEntry
|
||||
}
|
||||
|
||||
|
@ -10,6 +10,7 @@ input TorrentsFilter {
|
|||
bytesMissing: IntFilter
|
||||
|
||||
peersCount: IntFilter
|
||||
downloading: BooleanFilter
|
||||
}
|
||||
|
||||
input Pagination {
|
||||
|
|
|
@ -7,6 +7,9 @@ type Torrent {
|
|||
files: [TorrentFile!]!
|
||||
excludedFiles: [TorrentFile!]!
|
||||
peers: [TorrentPeer!]!
|
||||
|
||||
# if at least one piece of the torrent is request to download and not already downloaded
|
||||
downloading: Boolean!
|
||||
}
|
||||
|
||||
type TorrentFile {
|
||||
|
|
|
@ -80,7 +80,7 @@ type ComplexityRoot struct {
|
|||
|
||||
Query struct {
|
||||
FsEntry func(childComplexity int, path string) int
|
||||
Torrents func(childComplexity int, filter *model.TorrentsFilter, pagination *model.Pagination) int
|
||||
Torrents func(childComplexity int, filter *model.TorrentsFilter) int
|
||||
}
|
||||
|
||||
ResolverFS struct {
|
||||
|
@ -115,6 +115,7 @@ type ComplexityRoot struct {
|
|||
Torrent struct {
|
||||
BytesCompleted func(childComplexity int) int
|
||||
BytesMissing func(childComplexity int) int
|
||||
Downloading func(childComplexity int) int
|
||||
ExcludedFiles func(childComplexity int) int
|
||||
Files func(childComplexity int) int
|
||||
Infohash func(childComplexity int) int
|
||||
|
@ -166,7 +167,7 @@ type MutationResolver interface {
|
|||
DedupeStorage(ctx context.Context) (int64, error)
|
||||
}
|
||||
type QueryResolver interface {
|
||||
Torrents(ctx context.Context, filter *model.TorrentsFilter, pagination *model.Pagination) ([]*model.Torrent, error)
|
||||
Torrents(ctx context.Context, filter *model.TorrentsFilter) ([]*model.Torrent, error)
|
||||
FsEntry(ctx context.Context, path string) (model.FsEntry, error)
|
||||
}
|
||||
type ResolverFSResolver interface {
|
||||
|
@ -316,7 +317,7 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in
|
|||
return 0, false
|
||||
}
|
||||
|
||||
return e.complexity.Query.Torrents(childComplexity, args["filter"].(*model.TorrentsFilter), args["pagination"].(*model.Pagination)), true
|
||||
return e.complexity.Query.Torrents(childComplexity, args["filter"].(*model.TorrentsFilter)), true
|
||||
|
||||
case "ResolverFS.entries":
|
||||
if e.complexity.ResolverFS.Entries == nil {
|
||||
|
@ -414,6 +415,13 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in
|
|||
|
||||
return e.complexity.Torrent.BytesMissing(childComplexity), true
|
||||
|
||||
case "Torrent.downloading":
|
||||
if e.complexity.Torrent.Downloading == nil {
|
||||
break
|
||||
}
|
||||
|
||||
return e.complexity.Torrent.Downloading(childComplexity), true
|
||||
|
||||
case "Torrent.excludedFiles":
|
||||
if e.complexity.Torrent.ExcludedFiles == nil {
|
||||
break
|
||||
|
@ -731,7 +739,7 @@ type Task {
|
|||
}
|
||||
`, BuiltIn: false},
|
||||
{Name: "../../../graphql/query.graphql", Input: `type Query {
|
||||
torrents(filter: TorrentsFilter, pagination: Pagination): [Torrent!]!
|
||||
torrents(filter: TorrentsFilter): [Torrent!]!
|
||||
fsEntry(path: String!): FsEntry
|
||||
}
|
||||
|
||||
|
@ -742,6 +750,7 @@ input TorrentsFilter {
|
|||
bytesMissing: IntFilter
|
||||
|
||||
peersCount: IntFilter
|
||||
downloading: BooleanFilter
|
||||
}
|
||||
|
||||
input Pagination {
|
||||
|
@ -859,6 +868,9 @@ type TorrentFileEntry implements File & FsEntry {
|
|||
files: [TorrentFile!]!
|
||||
excludedFiles: [TorrentFile!]!
|
||||
peers: [TorrentPeer!]!
|
||||
|
||||
# if at least one piece of the torrent is request to download and not already downloaded
|
||||
downloading: Boolean!
|
||||
}
|
||||
|
||||
type TorrentFile {
|
||||
|
@ -873,7 +885,8 @@ type TorrentPeer {
|
|||
discovery: String!
|
||||
port: Int!
|
||||
clientName: String!
|
||||
}`, BuiltIn: false},
|
||||
}
|
||||
`, BuiltIn: false},
|
||||
}
|
||||
var parsedSchema = gqlparser.MustLoadSchema(sources...)
|
||||
|
||||
|
@ -986,15 +999,6 @@ func (ec *executionContext) field_Query_torrents_args(ctx context.Context, rawAr
|
|||
}
|
||||
}
|
||||
args["filter"] = arg0
|
||||
var arg1 *model.Pagination
|
||||
if tmp, ok := rawArgs["pagination"]; ok {
|
||||
ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("pagination"))
|
||||
arg1, err = ec.unmarshalOPagination2ᚖgitᚗkmsignᚗruᚋroyalcatᚋtstorᚋsrcᚋdeliveryᚋgraphqlᚋmodelᚐPagination(ctx, tmp)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
args["pagination"] = arg1
|
||||
return args, nil
|
||||
}
|
||||
|
||||
|
@ -1546,7 +1550,7 @@ func (ec *executionContext) _Query_torrents(ctx context.Context, field graphql.C
|
|||
}()
|
||||
resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) {
|
||||
ctx = rctx // use context from middleware stack in children
|
||||
return ec.resolvers.Query().Torrents(rctx, fc.Args["filter"].(*model.TorrentsFilter), fc.Args["pagination"].(*model.Pagination))
|
||||
return ec.resolvers.Query().Torrents(rctx, fc.Args["filter"].(*model.TorrentsFilter))
|
||||
})
|
||||
if err != nil {
|
||||
ec.Error(ctx, err)
|
||||
|
@ -1587,6 +1591,8 @@ func (ec *executionContext) fieldContext_Query_torrents(ctx context.Context, fie
|
|||
return ec.fieldContext_Torrent_excludedFiles(ctx, field)
|
||||
case "peers":
|
||||
return ec.fieldContext_Torrent_peers(ctx, field)
|
||||
case "downloading":
|
||||
return ec.fieldContext_Torrent_downloading(ctx, field)
|
||||
}
|
||||
return nil, fmt.Errorf("no field named %q was found under type Torrent", field.Name)
|
||||
},
|
||||
|
@ -2683,6 +2689,50 @@ func (ec *executionContext) fieldContext_Torrent_peers(ctx context.Context, fiel
|
|||
return fc, nil
|
||||
}
|
||||
|
||||
func (ec *executionContext) _Torrent_downloading(ctx context.Context, field graphql.CollectedField, obj *model.Torrent) (ret graphql.Marshaler) {
|
||||
fc, err := ec.fieldContext_Torrent_downloading(ctx, field)
|
||||
if err != nil {
|
||||
return graphql.Null
|
||||
}
|
||||
ctx = graphql.WithFieldContext(ctx, fc)
|
||||
defer func() {
|
||||
if r := recover(); r != nil {
|
||||
ec.Error(ctx, ec.Recover(ctx, r))
|
||||
ret = graphql.Null
|
||||
}
|
||||
}()
|
||||
resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) {
|
||||
ctx = rctx // use context from middleware stack in children
|
||||
return obj.Downloading, nil
|
||||
})
|
||||
if err != nil {
|
||||
ec.Error(ctx, err)
|
||||
return graphql.Null
|
||||
}
|
||||
if resTmp == nil {
|
||||
if !graphql.HasFieldError(ctx, fc) {
|
||||
ec.Errorf(ctx, "must not be null")
|
||||
}
|
||||
return graphql.Null
|
||||
}
|
||||
res := resTmp.(bool)
|
||||
fc.Result = res
|
||||
return ec.marshalNBoolean2bool(ctx, field.Selections, res)
|
||||
}
|
||||
|
||||
func (ec *executionContext) fieldContext_Torrent_downloading(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) {
|
||||
fc = &graphql.FieldContext{
|
||||
Object: "Torrent",
|
||||
Field: field,
|
||||
IsMethod: false,
|
||||
IsResolver: false,
|
||||
Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) {
|
||||
return nil, errors.New("field of type Boolean does not have child fields")
|
||||
},
|
||||
}
|
||||
return fc, nil
|
||||
}
|
||||
|
||||
func (ec *executionContext) _TorrentFS_name(ctx context.Context, field graphql.CollectedField, obj *model.TorrentFs) (ret graphql.Marshaler) {
|
||||
fc, err := ec.fieldContext_TorrentFS_name(ctx, field)
|
||||
if err != nil {
|
||||
|
@ -2782,6 +2832,8 @@ func (ec *executionContext) fieldContext_TorrentFS_torrent(ctx context.Context,
|
|||
return ec.fieldContext_Torrent_excludedFiles(ctx, field)
|
||||
case "peers":
|
||||
return ec.fieldContext_Torrent_peers(ctx, field)
|
||||
case "downloading":
|
||||
return ec.fieldContext_Torrent_downloading(ctx, field)
|
||||
}
|
||||
return nil, fmt.Errorf("no field named %q was found under type Torrent", field.Name)
|
||||
},
|
||||
|
@ -3064,6 +3116,8 @@ func (ec *executionContext) fieldContext_TorrentFileEntry_torrent(ctx context.Co
|
|||
return ec.fieldContext_Torrent_excludedFiles(ctx, field)
|
||||
case "peers":
|
||||
return ec.fieldContext_Torrent_peers(ctx, field)
|
||||
case "downloading":
|
||||
return ec.fieldContext_Torrent_downloading(ctx, field)
|
||||
}
|
||||
return nil, fmt.Errorf("no field named %q was found under type Torrent", field.Name)
|
||||
},
|
||||
|
@ -3390,6 +3444,8 @@ func (ec *executionContext) fieldContext_TorrentProgress_torrent(ctx context.Con
|
|||
return ec.fieldContext_Torrent_excludedFiles(ctx, field)
|
||||
case "peers":
|
||||
return ec.fieldContext_Torrent_peers(ctx, field)
|
||||
case "downloading":
|
||||
return ec.fieldContext_Torrent_downloading(ctx, field)
|
||||
}
|
||||
return nil, fmt.Errorf("no field named %q was found under type Torrent", field.Name)
|
||||
},
|
||||
|
@ -5773,7 +5829,7 @@ func (ec *executionContext) unmarshalInputTorrentsFilter(ctx context.Context, ob
|
|||
asMap[k] = v
|
||||
}
|
||||
|
||||
fieldsInOrder := [...]string{"infohash", "name", "bytesCompleted", "bytesMissing", "peersCount"}
|
||||
fieldsInOrder := [...]string{"infohash", "name", "bytesCompleted", "bytesMissing", "peersCount", "downloading"}
|
||||
for _, k := range fieldsInOrder {
|
||||
v, ok := asMap[k]
|
||||
if !ok {
|
||||
|
@ -5815,6 +5871,13 @@ func (ec *executionContext) unmarshalInputTorrentsFilter(ctx context.Context, ob
|
|||
return it, err
|
||||
}
|
||||
it.PeersCount = data
|
||||
case "downloading":
|
||||
ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("downloading"))
|
||||
data, err := ec.unmarshalOBooleanFilter2ᚖgitᚗkmsignᚗruᚋroyalcatᚋtstorᚋsrcᚋdeliveryᚋgraphqlᚋmodelᚐBooleanFilter(ctx, v)
|
||||
if err != nil {
|
||||
return it, err
|
||||
}
|
||||
it.Downloading = data
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -6752,6 +6815,11 @@ func (ec *executionContext) _Torrent(ctx context.Context, sel ast.SelectionSet,
|
|||
}
|
||||
|
||||
out.Concurrently(i, func(ctx context.Context) graphql.Marshaler { return innerFunc(ctx, out) })
|
||||
case "downloading":
|
||||
out.Values[i] = ec._Torrent_downloading(ctx, field, obj)
|
||||
if out.Values[i] == graphql.Null {
|
||||
atomic.AddUint32(&out.Invalids, 1)
|
||||
}
|
||||
default:
|
||||
panic("unknown field " + strconv.Quote(field.Name))
|
||||
}
|
||||
|
@ -8008,6 +8076,14 @@ func (ec *executionContext) marshalOBoolean2ᚖbool(ctx context.Context, sel ast
|
|||
return res
|
||||
}
|
||||
|
||||
func (ec *executionContext) unmarshalOBooleanFilter2ᚖgitᚗkmsignᚗruᚋroyalcatᚋtstorᚋsrcᚋdeliveryᚋgraphqlᚋmodelᚐBooleanFilter(ctx context.Context, v interface{}) (*model.BooleanFilter, error) {
|
||||
if v == nil {
|
||||
return nil, nil
|
||||
}
|
||||
res, err := ec.unmarshalInputBooleanFilter(ctx, v)
|
||||
return &res, graphql.ErrorOnPath(ctx, err)
|
||||
}
|
||||
|
||||
func (ec *executionContext) unmarshalODateTime2ᚖtimeᚐTime(ctx context.Context, v interface{}) (*time.Time, error) {
|
||||
if v == nil {
|
||||
return nil, nil
|
||||
|
@ -8107,14 +8183,6 @@ func (ec *executionContext) marshalOMutation2ᚖgitᚗkmsignᚗruᚋroyalcatᚋt
|
|||
return ec._Mutation(ctx, sel)
|
||||
}
|
||||
|
||||
func (ec *executionContext) unmarshalOPagination2ᚖgitᚗkmsignᚗruᚋroyalcatᚋtstorᚋsrcᚋdeliveryᚋgraphqlᚋmodelᚐPagination(ctx context.Context, v interface{}) (*model.Pagination, error) {
|
||||
if v == nil {
|
||||
return nil, nil
|
||||
}
|
||||
res, err := ec.unmarshalInputPagination(ctx, v)
|
||||
return &res, graphql.ErrorOnPath(ctx, err)
|
||||
}
|
||||
|
||||
func (ec *executionContext) marshalOProgress2gitᚗkmsignᚗruᚋroyalcatᚋtstorᚋsrcᚋdeliveryᚋgraphqlᚋmodelᚐProgress(ctx context.Context, sel ast.SelectionSet, v model.Progress) graphql.Marshaler {
|
||||
if v == nil {
|
||||
return graphql.Null
|
||||
|
|
|
@ -1,6 +1,8 @@
|
|||
package model
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"git.kmsign.ru/royalcat/tstor/src/host/vfs"
|
||||
)
|
||||
|
||||
|
@ -9,7 +11,7 @@ type FsElem interface {
|
|||
IsDir() bool
|
||||
}
|
||||
|
||||
func FillFsEntry(e FsElem, fs vfs.Filesystem, path string) FsEntry {
|
||||
func FillFsEntry(ctx context.Context, e FsElem, fs vfs.Filesystem, path string) (FsEntry, error) {
|
||||
switch e.(type) {
|
||||
case *vfs.ArchiveFS:
|
||||
e := e.(*vfs.ArchiveFS)
|
||||
|
@ -17,31 +19,35 @@ func FillFsEntry(e FsElem, fs vfs.Filesystem, path string) FsEntry {
|
|||
Name: e.Name(),
|
||||
Size: e.Size(),
|
||||
FS: e,
|
||||
}
|
||||
}, nil
|
||||
case *vfs.ResolverFS:
|
||||
e := e.(*vfs.ResolverFS)
|
||||
return ResolverFs{
|
||||
Name: e.Name(),
|
||||
FS: e,
|
||||
}
|
||||
}, nil
|
||||
case *vfs.TorrentFS:
|
||||
e := e.(*vfs.TorrentFS)
|
||||
torrent, err := MapTorrent(ctx, e.Torrent)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return TorrentFs{
|
||||
Name: e.Name(),
|
||||
Torrent: MapTorrent(e.Torrent),
|
||||
Torrent: torrent,
|
||||
FS: e,
|
||||
}
|
||||
}, nil
|
||||
default:
|
||||
if e.IsDir() {
|
||||
return SimpleDir{
|
||||
Name: e.Name(),
|
||||
FS: fs,
|
||||
Path: path,
|
||||
}
|
||||
}, nil
|
||||
} else {
|
||||
return SimpleFile{
|
||||
Name: e.Name(),
|
||||
}
|
||||
}, nil
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -42,3 +42,13 @@ func (f *StringFilter) Include(v string) bool {
|
|||
|
||||
return true
|
||||
}
|
||||
|
||||
func (f *BooleanFilter) Include(v bool) bool {
|
||||
if f == nil {
|
||||
return true
|
||||
} else if f.Eq != nil {
|
||||
return v == *f.Eq
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
|
|
@ -1,6 +1,8 @@
|
|||
package model
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"git.kmsign.ru/royalcat/tstor/src/host/controller"
|
||||
"github.com/anacrolix/torrent"
|
||||
)
|
||||
|
@ -26,12 +28,25 @@ func MapPeerSource(source torrent.PeerSource) string {
|
|||
}
|
||||
}
|
||||
|
||||
func MapTorrent(t *controller.Torrent) *Torrent {
|
||||
func MapTorrent(ctx context.Context, t *controller.Torrent) (*Torrent, error) {
|
||||
downloading := false
|
||||
files, err := t.Files(ctx)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
for _, file := range files {
|
||||
if file.Priority() > torrent.PiecePriorityNone && file.BytesCompleted() < file.Length() {
|
||||
downloading = true
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
return &Torrent{
|
||||
Infohash: t.InfoHash(),
|
||||
Name: t.Name(),
|
||||
BytesCompleted: t.BytesCompleted(),
|
||||
BytesMissing: t.BytesMissing(),
|
||||
T: t,
|
||||
}
|
||||
Downloading: downloading,
|
||||
}, nil
|
||||
}
|
||||
|
|
|
@ -179,6 +179,7 @@ type Torrent struct {
|
|||
Files []*TorrentFile `json:"files"`
|
||||
ExcludedFiles []*TorrentFile `json:"excludedFiles"`
|
||||
Peers []*TorrentPeer `json:"peers"`
|
||||
Downloading bool `json:"downloading"`
|
||||
T *controller.Torrent `json:"-"`
|
||||
}
|
||||
|
||||
|
@ -253,4 +254,5 @@ type TorrentsFilter struct {
|
|||
BytesCompleted *IntFilter `json:"bytesCompleted,omitempty"`
|
||||
BytesMissing *IntFilter `json:"bytesMissing,omitempty"`
|
||||
PeersCount *IntFilter `json:"peersCount,omitempty"`
|
||||
Downloading *BooleanFilter `json:"downloading,omitempty"`
|
||||
}
|
||||
|
|
|
@ -19,7 +19,11 @@ func (r *archiveFSResolver) Entries(ctx context.Context, obj *model.ArchiveFs) (
|
|||
}
|
||||
out := []model.FsEntry{}
|
||||
for _, e := range entries {
|
||||
out = append(out, model.FillFsEntry(e, obj.FS, "."))
|
||||
entry, err := model.FillFsEntry(ctx, e, obj.FS, ".")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
out = append(out, entry)
|
||||
}
|
||||
return out, nil
|
||||
}
|
||||
|
@ -32,7 +36,11 @@ func (r *resolverFSResolver) Entries(ctx context.Context, obj *model.ResolverFs)
|
|||
}
|
||||
out := []model.FsEntry{}
|
||||
for _, e := range entries {
|
||||
out = append(out, model.FillFsEntry(e, obj.FS, "."))
|
||||
entry, err := model.FillFsEntry(ctx, e, obj.FS, ".")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
out = append(out, entry)
|
||||
}
|
||||
return out, nil
|
||||
}
|
||||
|
@ -45,7 +53,11 @@ func (r *simpleDirResolver) Entries(ctx context.Context, obj *model.SimpleDir) (
|
|||
}
|
||||
out := []model.FsEntry{}
|
||||
for _, e := range entries {
|
||||
out = append(out, model.FillFsEntry(e, obj.FS, obj.Path))
|
||||
entry, err := model.FillFsEntry(ctx, e, obj.FS, obj.Path)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
out = append(out, entry)
|
||||
}
|
||||
return out, nil
|
||||
}
|
||||
|
@ -58,7 +70,11 @@ func (r *torrentFSResolver) Entries(ctx context.Context, obj *model.TorrentFs) (
|
|||
}
|
||||
out := []model.FsEntry{}
|
||||
for _, e := range entries {
|
||||
out = append(out, model.FillFsEntry(e, obj.FS, "."))
|
||||
entry, err := model.FillFsEntry(ctx, e, obj.FS, ".")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
out = append(out, entry)
|
||||
}
|
||||
return out, nil
|
||||
}
|
||||
|
|
|
@ -15,7 +15,7 @@ import (
|
|||
)
|
||||
|
||||
// Torrents is the resolver for the torrents field.
|
||||
func (r *queryResolver) Torrents(ctx context.Context, filter *model.TorrentsFilter, pagination *model.Pagination) ([]*model.Torrent, error) {
|
||||
func (r *queryResolver) Torrents(ctx context.Context, filter *model.TorrentsFilter) ([]*model.Torrent, error) {
|
||||
torrents, err := r.Service.ListTorrents(ctx)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
|
@ -49,6 +49,13 @@ func (r *queryResolver) Torrents(ctx context.Context, filter *model.TorrentsFilt
|
|||
})
|
||||
}
|
||||
|
||||
if filter.Downloading != nil {
|
||||
filterFuncs = append(filterFuncs, func(torrent *model.Torrent) bool {
|
||||
return filter.Downloading.Include(
|
||||
torrent.Downloading,
|
||||
)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
filterFunc := func(torrent *model.Torrent) bool {
|
||||
|
@ -62,7 +69,10 @@ func (r *queryResolver) Torrents(ctx context.Context, filter *model.TorrentsFilt
|
|||
|
||||
tr := []*model.Torrent{}
|
||||
for _, t := range torrents {
|
||||
d := model.MapTorrent(t)
|
||||
d, err := model.MapTorrent(ctx, t)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if !filterFunc(d) {
|
||||
continue
|
||||
|
@ -71,7 +81,7 @@ func (r *queryResolver) Torrents(ctx context.Context, filter *model.TorrentsFilt
|
|||
}
|
||||
|
||||
slices.SortStableFunc(torrents, func(t1, t2 *controller.Torrent) int {
|
||||
return strings.Compare(t1.InfoHash(), t2.InfoHash())
|
||||
return strings.Compare(t1.Name(), t2.Name())
|
||||
})
|
||||
|
||||
return tr, nil
|
||||
|
@ -84,7 +94,7 @@ func (r *queryResolver) FsEntry(ctx context.Context, path string) (model.FsEntry
|
|||
return nil, err
|
||||
}
|
||||
|
||||
return model.FillFsEntry(entry, r.VFS, path), nil
|
||||
return model.FillFsEntry(ctx, entry, r.VFS, path)
|
||||
}
|
||||
|
||||
// Query returns graph.QueryResolver implementation.
|
||||
|
|
|
@ -32,8 +32,13 @@ func (r *subscriptionResolver) TorrentDownloadUpdates(ctx context.Context) (<-ch
|
|||
fmt.Println("nil torrent")
|
||||
continue
|
||||
}
|
||||
torrent, err := model.MapTorrent(ctx, p.Torrent)
|
||||
if err != nil {
|
||||
// TODO logs
|
||||
continue
|
||||
}
|
||||
po := &model.TorrentProgress{
|
||||
Torrent: model.MapTorrent(p.Torrent),
|
||||
Torrent: torrent,
|
||||
Current: p.Current,
|
||||
Total: p.Total,
|
||||
}
|
||||
|
|
|
@ -20,7 +20,7 @@ func New(fc *filecache.Cache, ss *service.Stats, s *service.Service, vfs vfs.Fil
|
|||
|
||||
r := echo.New()
|
||||
r.Use(
|
||||
middleware.Recover(),
|
||||
// middleware.Recover(),
|
||||
middleware.Gzip(),
|
||||
middleware.Decompress(),
|
||||
Logger(),
|
||||
|
|
|
@ -62,9 +62,18 @@ func (s *Torrent) Files(ctx context.Context) ([]*torrent.File, error) {
|
|||
return nil, err
|
||||
}
|
||||
|
||||
<-s.t.GotInfo()
|
||||
select {
|
||||
case <-ctx.Done():
|
||||
return nil, ctx.Err()
|
||||
case <-s.t.GotInfo():
|
||||
}
|
||||
|
||||
files := s.t.Files()
|
||||
files = slices.DeleteFunc(files, func(file *torrent.File) bool {
|
||||
if file == nil {
|
||||
return true
|
||||
}
|
||||
|
||||
p := file.Path()
|
||||
if strings.Contains(p, "/.pad/") {
|
||||
return true
|
||||
|
|
|
@ -3,6 +3,7 @@ targets:
|
|||
builders:
|
||||
graphql_codegen:
|
||||
options:
|
||||
generatedFileHeader: "// ignore_for_file: type=lint\n"
|
||||
scalars:
|
||||
URL:
|
||||
type: String
|
||||
|
|
|
@ -1,18 +1,8 @@
|
|||
import 'package:flutter/foundation.dart';
|
||||
import 'package:graphql/client.dart';
|
||||
|
||||
final client = GraphQLClient(
|
||||
link: _loggerLink.concat(HttpLink("http://localhost:4444/graphql")),
|
||||
cache: GraphQLCache(store: null),
|
||||
defaultPolicies: DefaultPolicies(
|
||||
query: Policies(
|
||||
fetch: FetchPolicy.noCache,
|
||||
),
|
||||
),
|
||||
);
|
||||
|
||||
// final client = GraphQLClient(
|
||||
// link: HttpLink("http://192.168.217.150:4444/graphql"),
|
||||
// link: _loggerLink.concat(HttpLink("http://localhost:4444/graphql")),
|
||||
// cache: GraphQLCache(store: null),
|
||||
// defaultPolicies: DefaultPolicies(
|
||||
// query: Policies(
|
||||
|
@ -21,6 +11,16 @@ final client = GraphQLClient(
|
|||
// ),
|
||||
// );
|
||||
|
||||
final client = GraphQLClient(
|
||||
link: HttpLink("http://192.168.217.150:4444/graphql"),
|
||||
cache: GraphQLCache(store: null),
|
||||
defaultPolicies: DefaultPolicies(
|
||||
query: Policies(
|
||||
fetch: FetchPolicy.noCache,
|
||||
),
|
||||
),
|
||||
);
|
||||
|
||||
class LoggerLink extends Link {
|
||||
@override
|
||||
Stream<Response> request(
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -57,7 +57,7 @@ interface Progress {
|
|||
total: Int!
|
||||
}
|
||||
type Query {
|
||||
torrents(filter: TorrentsFilter, pagination: Pagination): [Torrent!]!
|
||||
torrents(filter: TorrentsFilter): [Torrent!]!
|
||||
fsEntry(path: String!): FsEntry
|
||||
}
|
||||
type ResolverFS implements Dir & FsEntry {
|
||||
|
@ -97,6 +97,7 @@ type Torrent {
|
|||
files: [TorrentFile!]!
|
||||
excludedFiles: [TorrentFile!]!
|
||||
peers: [TorrentPeer!]!
|
||||
downloading: Boolean!
|
||||
}
|
||||
type TorrentFS implements Dir & FsEntry {
|
||||
name: String!
|
||||
|
@ -135,4 +136,5 @@ input TorrentsFilter {
|
|||
bytesCompleted: IntFilter
|
||||
bytesMissing: IntFilter
|
||||
peersCount: IntFilter
|
||||
downloading: BooleanFilter
|
||||
}
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
// ignore_for_file: type=lint
|
||||
class Input$BooleanFilter {
|
||||
factory Input$BooleanFilter({bool? eq}) => Input$BooleanFilter._({
|
||||
if (eq != null) r'eq': eq,
|
||||
|
@ -38,7 +39,7 @@ class Input$BooleanFilter {
|
|||
if (identical(this, other)) {
|
||||
return true;
|
||||
}
|
||||
if (other is! Input$BooleanFilter || runtimeType != other.runtimeType) {
|
||||
if (!(other is Input$BooleanFilter) || runtimeType != other.runtimeType) {
|
||||
return false;
|
||||
}
|
||||
final l$eq = eq;
|
||||
|
@ -84,7 +85,6 @@ class _CopyWithImpl$Input$BooleanFilter<TRes>
|
|||
|
||||
static const _undefined = <dynamic, dynamic>{};
|
||||
|
||||
@override
|
||||
TRes call({Object? eq = _undefined}) => _then(Input$BooleanFilter._({
|
||||
..._instance._$data,
|
||||
if (eq != _undefined) 'eq': (eq as bool?),
|
||||
|
@ -95,9 +95,8 @@ class _CopyWithStubImpl$Input$BooleanFilter<TRes>
|
|||
implements CopyWith$Input$BooleanFilter<TRes> {
|
||||
_CopyWithStubImpl$Input$BooleanFilter(this._res);
|
||||
|
||||
final TRes _res;
|
||||
TRes _res;
|
||||
|
||||
@override
|
||||
call({bool? eq}) => _res;
|
||||
}
|
||||
|
||||
|
@ -197,7 +196,7 @@ class Input$DateTimeFilter {
|
|||
if (identical(this, other)) {
|
||||
return true;
|
||||
}
|
||||
if (other is! Input$DateTimeFilter || runtimeType != other.runtimeType) {
|
||||
if (!(other is Input$DateTimeFilter) || runtimeType != other.runtimeType) {
|
||||
return false;
|
||||
}
|
||||
final l$eq = eq;
|
||||
|
@ -291,7 +290,6 @@ class _CopyWithImpl$Input$DateTimeFilter<TRes>
|
|||
|
||||
static const _undefined = <dynamic, dynamic>{};
|
||||
|
||||
@override
|
||||
TRes call({
|
||||
Object? eq = _undefined,
|
||||
Object? gt = _undefined,
|
||||
|
@ -313,9 +311,8 @@ class _CopyWithStubImpl$Input$DateTimeFilter<TRes>
|
|||
implements CopyWith$Input$DateTimeFilter<TRes> {
|
||||
_CopyWithStubImpl$Input$DateTimeFilter(this._res);
|
||||
|
||||
final TRes _res;
|
||||
TRes _res;
|
||||
|
||||
@override
|
||||
call({
|
||||
DateTime? eq,
|
||||
DateTime? gt,
|
||||
|
@ -430,7 +427,7 @@ class Input$IntFilter {
|
|||
if (identical(this, other)) {
|
||||
return true;
|
||||
}
|
||||
if (other is! Input$IntFilter || runtimeType != other.runtimeType) {
|
||||
if (!(other is Input$IntFilter) || runtimeType != other.runtimeType) {
|
||||
return false;
|
||||
}
|
||||
final l$eq = eq;
|
||||
|
@ -550,7 +547,6 @@ class _CopyWithImpl$Input$IntFilter<TRes>
|
|||
|
||||
static const _undefined = <dynamic, dynamic>{};
|
||||
|
||||
@override
|
||||
TRes call({
|
||||
Object? eq = _undefined,
|
||||
Object? gt = _undefined,
|
||||
|
@ -574,9 +570,8 @@ class _CopyWithStubImpl$Input$IntFilter<TRes>
|
|||
implements CopyWith$Input$IntFilter<TRes> {
|
||||
_CopyWithStubImpl$Input$IntFilter(this._res);
|
||||
|
||||
final TRes _res;
|
||||
TRes _res;
|
||||
|
||||
@override
|
||||
call({
|
||||
int? eq,
|
||||
int? gt,
|
||||
|
@ -635,7 +630,7 @@ class Input$Pagination {
|
|||
if (identical(this, other)) {
|
||||
return true;
|
||||
}
|
||||
if (other is! Input$Pagination || runtimeType != other.runtimeType) {
|
||||
if (!(other is Input$Pagination) || runtimeType != other.runtimeType) {
|
||||
return false;
|
||||
}
|
||||
final l$offset = offset;
|
||||
|
@ -690,7 +685,6 @@ class _CopyWithImpl$Input$Pagination<TRes>
|
|||
|
||||
static const _undefined = <dynamic, dynamic>{};
|
||||
|
||||
@override
|
||||
TRes call({
|
||||
Object? offset = _undefined,
|
||||
Object? limit = _undefined,
|
||||
|
@ -706,9 +700,8 @@ class _CopyWithStubImpl$Input$Pagination<TRes>
|
|||
implements CopyWith$Input$Pagination<TRes> {
|
||||
_CopyWithStubImpl$Input$Pagination(this._res);
|
||||
|
||||
final TRes _res;
|
||||
TRes _res;
|
||||
|
||||
@override
|
||||
call({
|
||||
int? offset,
|
||||
int? limit,
|
||||
|
@ -784,7 +777,7 @@ class Input$StringFilter {
|
|||
if (identical(this, other)) {
|
||||
return true;
|
||||
}
|
||||
if (other is! Input$StringFilter || runtimeType != other.runtimeType) {
|
||||
if (!(other is Input$StringFilter) || runtimeType != other.runtimeType) {
|
||||
return false;
|
||||
}
|
||||
final l$eq = eq;
|
||||
|
@ -871,7 +864,6 @@ class _CopyWithImpl$Input$StringFilter<TRes>
|
|||
|
||||
static const _undefined = <dynamic, dynamic>{};
|
||||
|
||||
@override
|
||||
TRes call({
|
||||
Object? eq = _undefined,
|
||||
Object? substr = _undefined,
|
||||
|
@ -889,9 +881,8 @@ class _CopyWithStubImpl$Input$StringFilter<TRes>
|
|||
implements CopyWith$Input$StringFilter<TRes> {
|
||||
_CopyWithStubImpl$Input$StringFilter(this._res);
|
||||
|
||||
final TRes _res;
|
||||
TRes _res;
|
||||
|
||||
@override
|
||||
call({
|
||||
String? eq,
|
||||
String? substr,
|
||||
|
@ -955,7 +946,7 @@ class Input$TorrentFilter {
|
|||
if (identical(this, other)) {
|
||||
return true;
|
||||
}
|
||||
if (other is! Input$TorrentFilter || runtimeType != other.runtimeType) {
|
||||
if (!(other is Input$TorrentFilter) || runtimeType != other.runtimeType) {
|
||||
return false;
|
||||
}
|
||||
final l$everything = everything;
|
||||
|
@ -1018,7 +1009,6 @@ class _CopyWithImpl$Input$TorrentFilter<TRes>
|
|||
|
||||
static const _undefined = <dynamic, dynamic>{};
|
||||
|
||||
@override
|
||||
TRes call({
|
||||
Object? everything = _undefined,
|
||||
Object? infohash = _undefined,
|
||||
|
@ -1034,9 +1024,8 @@ class _CopyWithStubImpl$Input$TorrentFilter<TRes>
|
|||
implements CopyWith$Input$TorrentFilter<TRes> {
|
||||
_CopyWithStubImpl$Input$TorrentFilter(this._res);
|
||||
|
||||
final TRes _res;
|
||||
TRes _res;
|
||||
|
||||
@override
|
||||
call({
|
||||
bool? everything,
|
||||
String? infohash,
|
||||
|
@ -1046,22 +1035,32 @@ class _CopyWithStubImpl$Input$TorrentFilter<TRes>
|
|||
|
||||
class Input$TorrentsFilter {
|
||||
factory Input$TorrentsFilter({
|
||||
Input$StringFilter? infohash,
|
||||
Input$StringFilter? name,
|
||||
Input$IntFilter? bytesCompleted,
|
||||
Input$IntFilter? bytesMissing,
|
||||
Input$IntFilter? peersCount,
|
||||
Input$BooleanFilter? downloading,
|
||||
}) =>
|
||||
Input$TorrentsFilter._({
|
||||
if (infohash != null) r'infohash': infohash,
|
||||
if (name != null) r'name': name,
|
||||
if (bytesCompleted != null) r'bytesCompleted': bytesCompleted,
|
||||
if (bytesMissing != null) r'bytesMissing': bytesMissing,
|
||||
if (peersCount != null) r'peersCount': peersCount,
|
||||
if (downloading != null) r'downloading': downloading,
|
||||
});
|
||||
|
||||
Input$TorrentsFilter._(this._$data);
|
||||
|
||||
factory Input$TorrentsFilter.fromJson(Map<String, dynamic> data) {
|
||||
final result$data = <String, dynamic>{};
|
||||
if (data.containsKey('infohash')) {
|
||||
final l$infohash = data['infohash'];
|
||||
result$data['infohash'] = l$infohash == null
|
||||
? null
|
||||
: Input$StringFilter.fromJson((l$infohash as Map<String, dynamic>));
|
||||
}
|
||||
if (data.containsKey('name')) {
|
||||
final l$name = data['name'];
|
||||
result$data['name'] = l$name == null
|
||||
|
@ -1087,11 +1086,21 @@ class Input$TorrentsFilter {
|
|||
? null
|
||||
: Input$IntFilter.fromJson((l$peersCount as Map<String, dynamic>));
|
||||
}
|
||||
if (data.containsKey('downloading')) {
|
||||
final l$downloading = data['downloading'];
|
||||
result$data['downloading'] = l$downloading == null
|
||||
? null
|
||||
: Input$BooleanFilter.fromJson(
|
||||
(l$downloading as Map<String, dynamic>));
|
||||
}
|
||||
return Input$TorrentsFilter._(result$data);
|
||||
}
|
||||
|
||||
Map<String, dynamic> _$data;
|
||||
|
||||
Input$StringFilter? get infohash =>
|
||||
(_$data['infohash'] as Input$StringFilter?);
|
||||
|
||||
Input$StringFilter? get name => (_$data['name'] as Input$StringFilter?);
|
||||
|
||||
Input$IntFilter? get bytesCompleted =>
|
||||
|
@ -1102,8 +1111,15 @@ class Input$TorrentsFilter {
|
|||
|
||||
Input$IntFilter? get peersCount => (_$data['peersCount'] as Input$IntFilter?);
|
||||
|
||||
Input$BooleanFilter? get downloading =>
|
||||
(_$data['downloading'] as Input$BooleanFilter?);
|
||||
|
||||
Map<String, dynamic> toJson() {
|
||||
final result$data = <String, dynamic>{};
|
||||
if (_$data.containsKey('infohash')) {
|
||||
final l$infohash = infohash;
|
||||
result$data['infohash'] = l$infohash?.toJson();
|
||||
}
|
||||
if (_$data.containsKey('name')) {
|
||||
final l$name = name;
|
||||
result$data['name'] = l$name?.toJson();
|
||||
|
@ -1120,6 +1136,10 @@ class Input$TorrentsFilter {
|
|||
final l$peersCount = peersCount;
|
||||
result$data['peersCount'] = l$peersCount?.toJson();
|
||||
}
|
||||
if (_$data.containsKey('downloading')) {
|
||||
final l$downloading = downloading;
|
||||
result$data['downloading'] = l$downloading?.toJson();
|
||||
}
|
||||
return result$data;
|
||||
}
|
||||
|
||||
|
@ -1134,7 +1154,16 @@ class Input$TorrentsFilter {
|
|||
if (identical(this, other)) {
|
||||
return true;
|
||||
}
|
||||
if (other is! Input$TorrentsFilter || runtimeType != other.runtimeType) {
|
||||
if (!(other is Input$TorrentsFilter) || runtimeType != other.runtimeType) {
|
||||
return false;
|
||||
}
|
||||
final l$infohash = infohash;
|
||||
final lOther$infohash = other.infohash;
|
||||
if (_$data.containsKey('infohash') !=
|
||||
other._$data.containsKey('infohash')) {
|
||||
return false;
|
||||
}
|
||||
if (l$infohash != lOther$infohash) {
|
||||
return false;
|
||||
}
|
||||
final l$name = name;
|
||||
|
@ -1172,20 +1201,33 @@ class Input$TorrentsFilter {
|
|||
if (l$peersCount != lOther$peersCount) {
|
||||
return false;
|
||||
}
|
||||
final l$downloading = downloading;
|
||||
final lOther$downloading = other.downloading;
|
||||
if (_$data.containsKey('downloading') !=
|
||||
other._$data.containsKey('downloading')) {
|
||||
return false;
|
||||
}
|
||||
if (l$downloading != lOther$downloading) {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
@override
|
||||
int get hashCode {
|
||||
final l$infohash = infohash;
|
||||
final l$name = name;
|
||||
final l$bytesCompleted = bytesCompleted;
|
||||
final l$bytesMissing = bytesMissing;
|
||||
final l$peersCount = peersCount;
|
||||
final l$downloading = downloading;
|
||||
return Object.hashAll([
|
||||
_$data.containsKey('infohash') ? l$infohash : const {},
|
||||
_$data.containsKey('name') ? l$name : const {},
|
||||
_$data.containsKey('bytesCompleted') ? l$bytesCompleted : const {},
|
||||
_$data.containsKey('bytesMissing') ? l$bytesMissing : const {},
|
||||
_$data.containsKey('peersCount') ? l$peersCount : const {},
|
||||
_$data.containsKey('downloading') ? l$downloading : const {},
|
||||
]);
|
||||
}
|
||||
}
|
||||
|
@ -1200,15 +1242,19 @@ abstract class CopyWith$Input$TorrentsFilter<TRes> {
|
|||
_CopyWithStubImpl$Input$TorrentsFilter;
|
||||
|
||||
TRes call({
|
||||
Input$StringFilter? infohash,
|
||||
Input$StringFilter? name,
|
||||
Input$IntFilter? bytesCompleted,
|
||||
Input$IntFilter? bytesMissing,
|
||||
Input$IntFilter? peersCount,
|
||||
Input$BooleanFilter? downloading,
|
||||
});
|
||||
CopyWith$Input$StringFilter<TRes> get infohash;
|
||||
CopyWith$Input$StringFilter<TRes> get name;
|
||||
CopyWith$Input$IntFilter<TRes> get bytesCompleted;
|
||||
CopyWith$Input$IntFilter<TRes> get bytesMissing;
|
||||
CopyWith$Input$IntFilter<TRes> get peersCount;
|
||||
CopyWith$Input$BooleanFilter<TRes> get downloading;
|
||||
}
|
||||
|
||||
class _CopyWithImpl$Input$TorrentsFilter<TRes>
|
||||
|
@ -1224,15 +1270,18 @@ class _CopyWithImpl$Input$TorrentsFilter<TRes>
|
|||
|
||||
static const _undefined = <dynamic, dynamic>{};
|
||||
|
||||
@override
|
||||
TRes call({
|
||||
Object? infohash = _undefined,
|
||||
Object? name = _undefined,
|
||||
Object? bytesCompleted = _undefined,
|
||||
Object? bytesMissing = _undefined,
|
||||
Object? peersCount = _undefined,
|
||||
Object? downloading = _undefined,
|
||||
}) =>
|
||||
_then(Input$TorrentsFilter._({
|
||||
..._instance._$data,
|
||||
if (infohash != _undefined)
|
||||
'infohash': (infohash as Input$StringFilter?),
|
||||
if (name != _undefined) 'name': (name as Input$StringFilter?),
|
||||
if (bytesCompleted != _undefined)
|
||||
'bytesCompleted': (bytesCompleted as Input$IntFilter?),
|
||||
|
@ -1240,9 +1289,17 @@ class _CopyWithImpl$Input$TorrentsFilter<TRes>
|
|||
'bytesMissing': (bytesMissing as Input$IntFilter?),
|
||||
if (peersCount != _undefined)
|
||||
'peersCount': (peersCount as Input$IntFilter?),
|
||||
if (downloading != _undefined)
|
||||
'downloading': (downloading as Input$BooleanFilter?),
|
||||
}));
|
||||
|
||||
@override
|
||||
CopyWith$Input$StringFilter<TRes> get infohash {
|
||||
final local$infohash = _instance.infohash;
|
||||
return local$infohash == null
|
||||
? CopyWith$Input$StringFilter.stub(_then(_instance))
|
||||
: CopyWith$Input$StringFilter(local$infohash, (e) => call(infohash: e));
|
||||
}
|
||||
|
||||
CopyWith$Input$StringFilter<TRes> get name {
|
||||
final local$name = _instance.name;
|
||||
return local$name == null
|
||||
|
@ -1250,7 +1307,6 @@ class _CopyWithImpl$Input$TorrentsFilter<TRes>
|
|||
: CopyWith$Input$StringFilter(local$name, (e) => call(name: e));
|
||||
}
|
||||
|
||||
@override
|
||||
CopyWith$Input$IntFilter<TRes> get bytesCompleted {
|
||||
final local$bytesCompleted = _instance.bytesCompleted;
|
||||
return local$bytesCompleted == null
|
||||
|
@ -1259,7 +1315,6 @@ class _CopyWithImpl$Input$TorrentsFilter<TRes>
|
|||
local$bytesCompleted, (e) => call(bytesCompleted: e));
|
||||
}
|
||||
|
||||
@override
|
||||
CopyWith$Input$IntFilter<TRes> get bytesMissing {
|
||||
final local$bytesMissing = _instance.bytesMissing;
|
||||
return local$bytesMissing == null
|
||||
|
@ -1268,7 +1323,6 @@ class _CopyWithImpl$Input$TorrentsFilter<TRes>
|
|||
local$bytesMissing, (e) => call(bytesMissing: e));
|
||||
}
|
||||
|
||||
@override
|
||||
CopyWith$Input$IntFilter<TRes> get peersCount {
|
||||
final local$peersCount = _instance.peersCount;
|
||||
return local$peersCount == null
|
||||
|
@ -1276,38 +1330,49 @@ class _CopyWithImpl$Input$TorrentsFilter<TRes>
|
|||
: CopyWith$Input$IntFilter(
|
||||
local$peersCount, (e) => call(peersCount: e));
|
||||
}
|
||||
|
||||
CopyWith$Input$BooleanFilter<TRes> get downloading {
|
||||
final local$downloading = _instance.downloading;
|
||||
return local$downloading == null
|
||||
? CopyWith$Input$BooleanFilter.stub(_then(_instance))
|
||||
: CopyWith$Input$BooleanFilter(
|
||||
local$downloading, (e) => call(downloading: e));
|
||||
}
|
||||
}
|
||||
|
||||
class _CopyWithStubImpl$Input$TorrentsFilter<TRes>
|
||||
implements CopyWith$Input$TorrentsFilter<TRes> {
|
||||
_CopyWithStubImpl$Input$TorrentsFilter(this._res);
|
||||
|
||||
final TRes _res;
|
||||
TRes _res;
|
||||
|
||||
@override
|
||||
call({
|
||||
Input$StringFilter? infohash,
|
||||
Input$StringFilter? name,
|
||||
Input$IntFilter? bytesCompleted,
|
||||
Input$IntFilter? bytesMissing,
|
||||
Input$IntFilter? peersCount,
|
||||
Input$BooleanFilter? downloading,
|
||||
}) =>
|
||||
_res;
|
||||
|
||||
@override
|
||||
CopyWith$Input$StringFilter<TRes> get infohash =>
|
||||
CopyWith$Input$StringFilter.stub(_res);
|
||||
|
||||
CopyWith$Input$StringFilter<TRes> get name =>
|
||||
CopyWith$Input$StringFilter.stub(_res);
|
||||
|
||||
@override
|
||||
CopyWith$Input$IntFilter<TRes> get bytesCompleted =>
|
||||
CopyWith$Input$IntFilter.stub(_res);
|
||||
|
||||
@override
|
||||
CopyWith$Input$IntFilter<TRes> get bytesMissing =>
|
||||
CopyWith$Input$IntFilter.stub(_res);
|
||||
|
||||
@override
|
||||
CopyWith$Input$IntFilter<TRes> get peersCount =>
|
||||
CopyWith$Input$IntFilter.stub(_res);
|
||||
|
||||
CopyWith$Input$BooleanFilter<TRes> get downloading =>
|
||||
CopyWith$Input$BooleanFilter.stub(_res);
|
||||
}
|
||||
|
||||
enum Enum$__TypeKind {
|
||||
|
|
|
@ -6,11 +6,20 @@ mutation MarkTorrentDownload($infohash: String!) {
|
|||
}
|
||||
}
|
||||
|
||||
query ListTorrents {
|
||||
torrents {
|
||||
query ListTorrents($downloading: Boolean) {
|
||||
torrents(filter: {
|
||||
downloading: {
|
||||
eq: $downloading
|
||||
}
|
||||
}) {
|
||||
name
|
||||
infohash
|
||||
bytesCompleted
|
||||
bytesMissing
|
||||
peers {
|
||||
ip
|
||||
downloadRate
|
||||
clientName
|
||||
}
|
||||
}
|
||||
}
|
File diff suppressed because it is too large
Load diff
91
ui/lib/components/sliver_header.dart
Normal file
91
ui/lib/components/sliver_header.dart
Normal file
|
@ -0,0 +1,91 @@
|
|||
import 'package:flutter/material.dart';
|
||||
|
||||
class HideableHeaderSliver extends StatelessWidget {
|
||||
final Widget? leading;
|
||||
final Widget body;
|
||||
final double height;
|
||||
final List<Widget>? actions;
|
||||
|
||||
const HideableHeaderSliver({
|
||||
super.key,
|
||||
this.leading,
|
||||
required this.body,
|
||||
this.actions,
|
||||
this.height = 150,
|
||||
});
|
||||
|
||||
@override
|
||||
Widget build(BuildContext context) {
|
||||
return SliverPersistentHeader(
|
||||
floating: true,
|
||||
pinned: false,
|
||||
delegate: _HideableHeaderSliverDelegate(
|
||||
leading: leading,
|
||||
body: body,
|
||||
actions: actions,
|
||||
height: height,
|
||||
),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
class _HideableHeaderSliverDelegate extends SliverPersistentHeaderDelegate {
|
||||
final Widget? leading;
|
||||
final Widget body;
|
||||
final List<Widget>? actions;
|
||||
final double height;
|
||||
|
||||
const _HideableHeaderSliverDelegate({
|
||||
required this.leading,
|
||||
required this.body,
|
||||
required this.actions,
|
||||
required this.height,
|
||||
});
|
||||
|
||||
@override
|
||||
double get maxExtent => height;
|
||||
|
||||
@override
|
||||
double get minExtent => height;
|
||||
|
||||
@override
|
||||
bool shouldRebuild(covariant SliverPersistentHeaderDelegate oldDelegate) => true;
|
||||
|
||||
@override
|
||||
Widget build(BuildContext context, double shrinkOffset, bool overlapsContent) {
|
||||
final content = <Widget>[
|
||||
if (leading != null) leading!,
|
||||
Expanded(child: body),
|
||||
if (actions != null && actions!.isNotEmpty) ButtonBar(children: actions!),
|
||||
];
|
||||
|
||||
final appBarTheme = AppBarTheme.of(context);
|
||||
final colorScheme = Theme.of(context).colorScheme;
|
||||
final onTop = (shrinkOffset == 0);
|
||||
|
||||
return Material(
|
||||
color:
|
||||
onTop ? appBarTheme.backgroundColor ?? colorScheme.surface : colorScheme.surfaceContainer,
|
||||
elevation: onTop ? 0 : appBarTheme.elevation ?? 3,
|
||||
surfaceTintColor: appBarTheme.surfaceTintColor ?? colorScheme.surfaceTint,
|
||||
child: ClipRect(
|
||||
child: SizedBox(
|
||||
height: maxExtent,
|
||||
child: Column(
|
||||
children: [
|
||||
const Spacer(),
|
||||
Row(
|
||||
children: content,
|
||||
),
|
||||
const Spacer(),
|
||||
const Divider(
|
||||
height: 1,
|
||||
thickness: 1,
|
||||
),
|
||||
],
|
||||
),
|
||||
),
|
||||
),
|
||||
);
|
||||
}
|
||||
}
|
|
@ -43,9 +43,6 @@ class _MyHomePageState extends State<MyHomePage> {
|
|||
@override
|
||||
Widget build(BuildContext context) {
|
||||
return Scaffold(
|
||||
appBar: AppBar(
|
||||
title: const Text("tStor"),
|
||||
),
|
||||
body: <Widget>[
|
||||
const FileViewScreen(),
|
||||
const DownloadsScreen(),
|
||||
|
|
|
@ -2,6 +2,7 @@ import 'package:flutter/material.dart';
|
|||
import 'package:tstor_ui/api/client.dart';
|
||||
import 'package:tstor_ui/api/torrent.graphql.dart';
|
||||
import 'package:tstor_ui/components/download.dart';
|
||||
import 'package:tstor_ui/components/sliver_header.dart';
|
||||
|
||||
class DownloadsScreen extends StatefulWidget {
|
||||
const DownloadsScreen({super.key});
|
||||
|
@ -11,25 +12,82 @@ class DownloadsScreen extends StatefulWidget {
|
|||
}
|
||||
|
||||
class _DownloadsScreenState extends State<DownloadsScreen> {
|
||||
bool filterDownloading = false;
|
||||
|
||||
@override
|
||||
Widget build(BuildContext context) {
|
||||
return FutureBuilder(
|
||||
future: client.query$ListTorrents(),
|
||||
key: GlobalKey(),
|
||||
future: client.query$ListTorrents(Options$Query$ListTorrents(
|
||||
variables: Variables$Query$ListTorrents(downloading: filterDownloading),
|
||||
)),
|
||||
builder: (context, snapshot) {
|
||||
if (!snapshot.hasData || snapshot.data == null) {
|
||||
return const Center(child: CircularProgressIndicator());
|
||||
}
|
||||
final torrents = snapshot.data?.parsedData?.torrents;
|
||||
|
||||
final torrents = snapshot.data!.parsedData!.torrents;
|
||||
|
||||
return ListView.builder(
|
||||
return NestedScrollView(
|
||||
floatHeaderSlivers: true,
|
||||
headerSliverBuilder: (context, innerBoxIsScrolled) => [
|
||||
HideableHeaderSliver(
|
||||
height: 80,
|
||||
body: Padding(
|
||||
padding: const EdgeInsets.all(8.0),
|
||||
child: Wrap(
|
||||
spacing: 8,
|
||||
runSpacing: 8,
|
||||
children: [
|
||||
FilterChip(
|
||||
label: const Text("Downloading"),
|
||||
selected: filterDownloading,
|
||||
onSelected: (value) => setState(() {
|
||||
filterDownloading = value;
|
||||
}),
|
||||
),
|
||||
],
|
||||
),
|
||||
),
|
||||
actions: [
|
||||
IconButton(
|
||||
icon: const Icon(Icons.refresh),
|
||||
onPressed: () => setState(() {}),
|
||||
),
|
||||
],
|
||||
),
|
||||
],
|
||||
body: snapshot.hasData && torrents != null
|
||||
? ListView.builder(
|
||||
itemCount: torrents.length,
|
||||
itemBuilder: (context, index) {
|
||||
final torrent = torrents[index];
|
||||
itemBuilder: (context, index) => TorrentTile(torrent: torrents[index]),
|
||||
)
|
||||
: const Center(child: CircularProgressIndicator()),
|
||||
);
|
||||
},
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
class TorrentTile extends StatelessWidget {
|
||||
final Query$ListTorrents$torrents torrent;
|
||||
|
||||
const TorrentTile({super.key, required this.torrent});
|
||||
|
||||
@override
|
||||
Widget build(BuildContext context) {
|
||||
return ListTile(
|
||||
title: Text(torrent.name),
|
||||
subtitle: DownloadProgress(
|
||||
torrent.bytesCompleted, torrent.bytesCompleted + torrent.bytesMissing),
|
||||
isThreeLine: true,
|
||||
subtitle: Column(
|
||||
children: [
|
||||
DownloadProgress(
|
||||
torrent.bytesCompleted,
|
||||
torrent.bytesCompleted + torrent.bytesMissing,
|
||||
),
|
||||
Row(
|
||||
children: [
|
||||
Text("Peers: ${torrent.peers.length}"),
|
||||
],
|
||||
),
|
||||
],
|
||||
),
|
||||
trailing: Column(
|
||||
mainAxisSize: MainAxisSize.max,
|
||||
mainAxisAlignment: MainAxisAlignment.spaceAround,
|
||||
|
@ -47,9 +105,5 @@ class _DownloadsScreenState extends State<DownloadsScreen> {
|
|||
],
|
||||
),
|
||||
);
|
||||
},
|
||||
);
|
||||
},
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -5,6 +5,7 @@ import 'package:tstor_ui/api/client.dart';
|
|||
import 'package:tstor_ui/api/fs_entry.graphql.dart';
|
||||
import 'package:tstor_ui/api/torrent.graphql.dart';
|
||||
import 'package:tstor_ui/components/download.dart';
|
||||
import 'package:tstor_ui/components/sliver_header.dart';
|
||||
|
||||
import 'package:tstor_ui/font/t_icons_icons.dart';
|
||||
import 'package:path/path.dart' as p;
|
||||
|
@ -116,7 +117,7 @@ class _FileViewScreenState extends State<FileViewScreen> {
|
|||
|
||||
return CustomScrollView(
|
||||
slivers: [
|
||||
EntryInfoSliver(entry: entry),
|
||||
EntryHeaderSliver(entry: entry),
|
||||
SliverList.builder(
|
||||
itemCount: entries.length,
|
||||
itemBuilder: (context, index) {
|
||||
|
@ -214,20 +215,18 @@ class DirEntry extends StatelessWidget {
|
|||
}
|
||||
}
|
||||
|
||||
class EntryInfoSliver extends StatelessWidget {
|
||||
class EntryHeaderSliver extends StatelessWidget {
|
||||
final Query$ListDir$fsEntry entry;
|
||||
|
||||
const EntryInfoSliver({super.key, required this.entry});
|
||||
const EntryHeaderSliver({super.key, required this.entry});
|
||||
|
||||
@override
|
||||
Widget build(BuildContext context) {
|
||||
switch (entry) {
|
||||
case Query$ListDir$fsEntry$$TorrentFS entry:
|
||||
final total = entry.torrent.bytesCompleted + entry.torrent.bytesMissing;
|
||||
|
||||
return EntryInfoHeader(
|
||||
icon: TIcons.bittorrent_bttold_logo,
|
||||
title: Text(entry.torrent.name),
|
||||
return HideableHeaderSliver(
|
||||
leading: const Icon(TIcons.bittorrent_bttold_logo),
|
||||
body: Column(
|
||||
crossAxisAlignment: CrossAxisAlignment.start,
|
||||
children: [
|
||||
|
@ -252,101 +251,10 @@ class EntryInfoSliver extends StatelessWidget {
|
|||
);
|
||||
|
||||
default:
|
||||
return EntryInfoHeader(
|
||||
icon: Icons.folder,
|
||||
title: Text(entry.name),
|
||||
return HideableHeaderSliver(
|
||||
leading: const Icon(Icons.folder),
|
||||
body: Text(entry.name),
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
class EntryInfoHeader extends StatelessWidget {
|
||||
final IconData icon;
|
||||
final Widget title;
|
||||
final Widget body;
|
||||
final List<Widget>? actions;
|
||||
|
||||
const EntryInfoHeader({
|
||||
super.key,
|
||||
required this.icon,
|
||||
required this.title,
|
||||
required this.body,
|
||||
this.actions,
|
||||
});
|
||||
|
||||
@override
|
||||
Widget build(BuildContext context) {
|
||||
return SliverPersistentHeader(
|
||||
floating: true,
|
||||
pinned: false,
|
||||
delegate: EntryInfoSliverHeaderDelegate(icon: icon, title: title, body: body),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
class EntryInfoSliverHeaderDelegate extends SliverPersistentHeaderDelegate {
|
||||
final IconData icon;
|
||||
final Widget title;
|
||||
final Widget body;
|
||||
final List<Widget>? actions;
|
||||
final double size;
|
||||
|
||||
const EntryInfoSliverHeaderDelegate({
|
||||
required this.icon,
|
||||
required this.title,
|
||||
required this.body,
|
||||
this.actions,
|
||||
this.size = 150,
|
||||
});
|
||||
|
||||
@override
|
||||
double get maxExtent => size;
|
||||
|
||||
@override
|
||||
double get minExtent => size;
|
||||
|
||||
@override
|
||||
bool shouldRebuild(covariant SliverPersistentHeaderDelegate oldDelegate) => true;
|
||||
|
||||
@override
|
||||
Widget build(BuildContext context, double shrinkOffset, bool overlapsContent) {
|
||||
final content = [
|
||||
Icon(icon, size: 50),
|
||||
Expanded(child: body),
|
||||
];
|
||||
|
||||
if (actions != null && actions!.isNotEmpty) {
|
||||
content.add(ButtonBar(children: actions!));
|
||||
}
|
||||
|
||||
final appBarTheme = AppBarTheme.of(context);
|
||||
final colorScheme = Theme.of(context).colorScheme;
|
||||
final onTop = (shrinkOffset == 0);
|
||||
|
||||
return Material(
|
||||
color:
|
||||
onTop ? appBarTheme.backgroundColor ?? colorScheme.surface : colorScheme.surfaceContainer,
|
||||
elevation: onTop ? 0 : appBarTheme.elevation ?? 3,
|
||||
surfaceTintColor: appBarTheme.surfaceTintColor ?? colorScheme.surfaceTint,
|
||||
child: ClipRect(
|
||||
child: SizedBox(
|
||||
height: maxExtent,
|
||||
child: Column(
|
||||
children: [
|
||||
const Spacer(),
|
||||
Row(
|
||||
children: content,
|
||||
),
|
||||
const Spacer(),
|
||||
const Divider(
|
||||
height: 1,
|
||||
thickness: 1,
|
||||
),
|
||||
],
|
||||
),
|
||||
),
|
||||
),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Reference in a new issue