torrent list
All checks were successful
docker / build-docker (linux/amd64) (push) Successful in 1m39s
docker / build-docker (linux/386) (push) Successful in 1m46s
docker / build-docker (linux/arm64/v8) (push) Successful in 8m18s
docker / build-docker (linux/arm64) (push) Successful in 8m29s
docker / build-docker (linux/arm/v7) (push) Successful in 8m49s
All checks were successful
docker / build-docker (linux/amd64) (push) Successful in 1m39s
docker / build-docker (linux/386) (push) Successful in 1m46s
docker / build-docker (linux/arm64/v8) (push) Successful in 8m18s
docker / build-docker (linux/arm64) (push) Successful in 8m29s
docker / build-docker (linux/arm/v7) (push) Successful in 8m49s
This commit is contained in:
parent
d8ee8a3a24
commit
0d7aac068c
23 changed files with 1285 additions and 698 deletions
|
@ -1,5 +1,5 @@
|
||||||
type Query {
|
type Query {
|
||||||
torrents(filter: TorrentsFilter, pagination: Pagination): [Torrent!]!
|
torrents(filter: TorrentsFilter): [Torrent!]!
|
||||||
fsEntry(path: String!): FsEntry
|
fsEntry(path: String!): FsEntry
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -10,6 +10,7 @@ input TorrentsFilter {
|
||||||
bytesMissing: IntFilter
|
bytesMissing: IntFilter
|
||||||
|
|
||||||
peersCount: IntFilter
|
peersCount: IntFilter
|
||||||
|
downloading: BooleanFilter
|
||||||
}
|
}
|
||||||
|
|
||||||
input Pagination {
|
input Pagination {
|
||||||
|
|
|
@ -7,6 +7,9 @@ type Torrent {
|
||||||
files: [TorrentFile!]!
|
files: [TorrentFile!]!
|
||||||
excludedFiles: [TorrentFile!]!
|
excludedFiles: [TorrentFile!]!
|
||||||
peers: [TorrentPeer!]!
|
peers: [TorrentPeer!]!
|
||||||
|
|
||||||
|
# if at least one piece of the torrent is request to download and not already downloaded
|
||||||
|
downloading: Boolean!
|
||||||
}
|
}
|
||||||
|
|
||||||
type TorrentFile {
|
type TorrentFile {
|
||||||
|
@ -21,4 +24,4 @@ type TorrentPeer {
|
||||||
discovery: String!
|
discovery: String!
|
||||||
port: Int!
|
port: Int!
|
||||||
clientName: String!
|
clientName: String!
|
||||||
}
|
}
|
||||||
|
|
|
@ -80,7 +80,7 @@ type ComplexityRoot struct {
|
||||||
|
|
||||||
Query struct {
|
Query struct {
|
||||||
FsEntry func(childComplexity int, path string) int
|
FsEntry func(childComplexity int, path string) int
|
||||||
Torrents func(childComplexity int, filter *model.TorrentsFilter, pagination *model.Pagination) int
|
Torrents func(childComplexity int, filter *model.TorrentsFilter) int
|
||||||
}
|
}
|
||||||
|
|
||||||
ResolverFS struct {
|
ResolverFS struct {
|
||||||
|
@ -115,6 +115,7 @@ type ComplexityRoot struct {
|
||||||
Torrent struct {
|
Torrent struct {
|
||||||
BytesCompleted func(childComplexity int) int
|
BytesCompleted func(childComplexity int) int
|
||||||
BytesMissing func(childComplexity int) int
|
BytesMissing func(childComplexity int) int
|
||||||
|
Downloading func(childComplexity int) int
|
||||||
ExcludedFiles func(childComplexity int) int
|
ExcludedFiles func(childComplexity int) int
|
||||||
Files func(childComplexity int) int
|
Files func(childComplexity int) int
|
||||||
Infohash func(childComplexity int) int
|
Infohash func(childComplexity int) int
|
||||||
|
@ -166,7 +167,7 @@ type MutationResolver interface {
|
||||||
DedupeStorage(ctx context.Context) (int64, error)
|
DedupeStorage(ctx context.Context) (int64, error)
|
||||||
}
|
}
|
||||||
type QueryResolver interface {
|
type QueryResolver interface {
|
||||||
Torrents(ctx context.Context, filter *model.TorrentsFilter, pagination *model.Pagination) ([]*model.Torrent, error)
|
Torrents(ctx context.Context, filter *model.TorrentsFilter) ([]*model.Torrent, error)
|
||||||
FsEntry(ctx context.Context, path string) (model.FsEntry, error)
|
FsEntry(ctx context.Context, path string) (model.FsEntry, error)
|
||||||
}
|
}
|
||||||
type ResolverFSResolver interface {
|
type ResolverFSResolver interface {
|
||||||
|
@ -316,7 +317,7 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in
|
||||||
return 0, false
|
return 0, false
|
||||||
}
|
}
|
||||||
|
|
||||||
return e.complexity.Query.Torrents(childComplexity, args["filter"].(*model.TorrentsFilter), args["pagination"].(*model.Pagination)), true
|
return e.complexity.Query.Torrents(childComplexity, args["filter"].(*model.TorrentsFilter)), true
|
||||||
|
|
||||||
case "ResolverFS.entries":
|
case "ResolverFS.entries":
|
||||||
if e.complexity.ResolverFS.Entries == nil {
|
if e.complexity.ResolverFS.Entries == nil {
|
||||||
|
@ -414,6 +415,13 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in
|
||||||
|
|
||||||
return e.complexity.Torrent.BytesMissing(childComplexity), true
|
return e.complexity.Torrent.BytesMissing(childComplexity), true
|
||||||
|
|
||||||
|
case "Torrent.downloading":
|
||||||
|
if e.complexity.Torrent.Downloading == nil {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
return e.complexity.Torrent.Downloading(childComplexity), true
|
||||||
|
|
||||||
case "Torrent.excludedFiles":
|
case "Torrent.excludedFiles":
|
||||||
if e.complexity.Torrent.ExcludedFiles == nil {
|
if e.complexity.Torrent.ExcludedFiles == nil {
|
||||||
break
|
break
|
||||||
|
@ -731,7 +739,7 @@ type Task {
|
||||||
}
|
}
|
||||||
`, BuiltIn: false},
|
`, BuiltIn: false},
|
||||||
{Name: "../../../graphql/query.graphql", Input: `type Query {
|
{Name: "../../../graphql/query.graphql", Input: `type Query {
|
||||||
torrents(filter: TorrentsFilter, pagination: Pagination): [Torrent!]!
|
torrents(filter: TorrentsFilter): [Torrent!]!
|
||||||
fsEntry(path: String!): FsEntry
|
fsEntry(path: String!): FsEntry
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -742,6 +750,7 @@ input TorrentsFilter {
|
||||||
bytesMissing: IntFilter
|
bytesMissing: IntFilter
|
||||||
|
|
||||||
peersCount: IntFilter
|
peersCount: IntFilter
|
||||||
|
downloading: BooleanFilter
|
||||||
}
|
}
|
||||||
|
|
||||||
input Pagination {
|
input Pagination {
|
||||||
|
@ -859,6 +868,9 @@ type TorrentFileEntry implements File & FsEntry {
|
||||||
files: [TorrentFile!]!
|
files: [TorrentFile!]!
|
||||||
excludedFiles: [TorrentFile!]!
|
excludedFiles: [TorrentFile!]!
|
||||||
peers: [TorrentPeer!]!
|
peers: [TorrentPeer!]!
|
||||||
|
|
||||||
|
# if at least one piece of the torrent is request to download and not already downloaded
|
||||||
|
downloading: Boolean!
|
||||||
}
|
}
|
||||||
|
|
||||||
type TorrentFile {
|
type TorrentFile {
|
||||||
|
@ -873,7 +885,8 @@ type TorrentPeer {
|
||||||
discovery: String!
|
discovery: String!
|
||||||
port: Int!
|
port: Int!
|
||||||
clientName: String!
|
clientName: String!
|
||||||
}`, BuiltIn: false},
|
}
|
||||||
|
`, BuiltIn: false},
|
||||||
}
|
}
|
||||||
var parsedSchema = gqlparser.MustLoadSchema(sources...)
|
var parsedSchema = gqlparser.MustLoadSchema(sources...)
|
||||||
|
|
||||||
|
@ -986,15 +999,6 @@ func (ec *executionContext) field_Query_torrents_args(ctx context.Context, rawAr
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
args["filter"] = arg0
|
args["filter"] = arg0
|
||||||
var arg1 *model.Pagination
|
|
||||||
if tmp, ok := rawArgs["pagination"]; ok {
|
|
||||||
ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("pagination"))
|
|
||||||
arg1, err = ec.unmarshalOPagination2ᚖgitᚗkmsignᚗruᚋroyalcatᚋtstorᚋsrcᚋdeliveryᚋgraphqlᚋmodelᚐPagination(ctx, tmp)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
args["pagination"] = arg1
|
|
||||||
return args, nil
|
return args, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1546,7 +1550,7 @@ func (ec *executionContext) _Query_torrents(ctx context.Context, field graphql.C
|
||||||
}()
|
}()
|
||||||
resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) {
|
resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) {
|
||||||
ctx = rctx // use context from middleware stack in children
|
ctx = rctx // use context from middleware stack in children
|
||||||
return ec.resolvers.Query().Torrents(rctx, fc.Args["filter"].(*model.TorrentsFilter), fc.Args["pagination"].(*model.Pagination))
|
return ec.resolvers.Query().Torrents(rctx, fc.Args["filter"].(*model.TorrentsFilter))
|
||||||
})
|
})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
ec.Error(ctx, err)
|
ec.Error(ctx, err)
|
||||||
|
@ -1587,6 +1591,8 @@ func (ec *executionContext) fieldContext_Query_torrents(ctx context.Context, fie
|
||||||
return ec.fieldContext_Torrent_excludedFiles(ctx, field)
|
return ec.fieldContext_Torrent_excludedFiles(ctx, field)
|
||||||
case "peers":
|
case "peers":
|
||||||
return ec.fieldContext_Torrent_peers(ctx, field)
|
return ec.fieldContext_Torrent_peers(ctx, field)
|
||||||
|
case "downloading":
|
||||||
|
return ec.fieldContext_Torrent_downloading(ctx, field)
|
||||||
}
|
}
|
||||||
return nil, fmt.Errorf("no field named %q was found under type Torrent", field.Name)
|
return nil, fmt.Errorf("no field named %q was found under type Torrent", field.Name)
|
||||||
},
|
},
|
||||||
|
@ -2683,6 +2689,50 @@ func (ec *executionContext) fieldContext_Torrent_peers(ctx context.Context, fiel
|
||||||
return fc, nil
|
return fc, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (ec *executionContext) _Torrent_downloading(ctx context.Context, field graphql.CollectedField, obj *model.Torrent) (ret graphql.Marshaler) {
|
||||||
|
fc, err := ec.fieldContext_Torrent_downloading(ctx, field)
|
||||||
|
if err != nil {
|
||||||
|
return graphql.Null
|
||||||
|
}
|
||||||
|
ctx = graphql.WithFieldContext(ctx, fc)
|
||||||
|
defer func() {
|
||||||
|
if r := recover(); r != nil {
|
||||||
|
ec.Error(ctx, ec.Recover(ctx, r))
|
||||||
|
ret = graphql.Null
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) {
|
||||||
|
ctx = rctx // use context from middleware stack in children
|
||||||
|
return obj.Downloading, nil
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
ec.Error(ctx, err)
|
||||||
|
return graphql.Null
|
||||||
|
}
|
||||||
|
if resTmp == nil {
|
||||||
|
if !graphql.HasFieldError(ctx, fc) {
|
||||||
|
ec.Errorf(ctx, "must not be null")
|
||||||
|
}
|
||||||
|
return graphql.Null
|
||||||
|
}
|
||||||
|
res := resTmp.(bool)
|
||||||
|
fc.Result = res
|
||||||
|
return ec.marshalNBoolean2bool(ctx, field.Selections, res)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (ec *executionContext) fieldContext_Torrent_downloading(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) {
|
||||||
|
fc = &graphql.FieldContext{
|
||||||
|
Object: "Torrent",
|
||||||
|
Field: field,
|
||||||
|
IsMethod: false,
|
||||||
|
IsResolver: false,
|
||||||
|
Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) {
|
||||||
|
return nil, errors.New("field of type Boolean does not have child fields")
|
||||||
|
},
|
||||||
|
}
|
||||||
|
return fc, nil
|
||||||
|
}
|
||||||
|
|
||||||
func (ec *executionContext) _TorrentFS_name(ctx context.Context, field graphql.CollectedField, obj *model.TorrentFs) (ret graphql.Marshaler) {
|
func (ec *executionContext) _TorrentFS_name(ctx context.Context, field graphql.CollectedField, obj *model.TorrentFs) (ret graphql.Marshaler) {
|
||||||
fc, err := ec.fieldContext_TorrentFS_name(ctx, field)
|
fc, err := ec.fieldContext_TorrentFS_name(ctx, field)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -2782,6 +2832,8 @@ func (ec *executionContext) fieldContext_TorrentFS_torrent(ctx context.Context,
|
||||||
return ec.fieldContext_Torrent_excludedFiles(ctx, field)
|
return ec.fieldContext_Torrent_excludedFiles(ctx, field)
|
||||||
case "peers":
|
case "peers":
|
||||||
return ec.fieldContext_Torrent_peers(ctx, field)
|
return ec.fieldContext_Torrent_peers(ctx, field)
|
||||||
|
case "downloading":
|
||||||
|
return ec.fieldContext_Torrent_downloading(ctx, field)
|
||||||
}
|
}
|
||||||
return nil, fmt.Errorf("no field named %q was found under type Torrent", field.Name)
|
return nil, fmt.Errorf("no field named %q was found under type Torrent", field.Name)
|
||||||
},
|
},
|
||||||
|
@ -3064,6 +3116,8 @@ func (ec *executionContext) fieldContext_TorrentFileEntry_torrent(ctx context.Co
|
||||||
return ec.fieldContext_Torrent_excludedFiles(ctx, field)
|
return ec.fieldContext_Torrent_excludedFiles(ctx, field)
|
||||||
case "peers":
|
case "peers":
|
||||||
return ec.fieldContext_Torrent_peers(ctx, field)
|
return ec.fieldContext_Torrent_peers(ctx, field)
|
||||||
|
case "downloading":
|
||||||
|
return ec.fieldContext_Torrent_downloading(ctx, field)
|
||||||
}
|
}
|
||||||
return nil, fmt.Errorf("no field named %q was found under type Torrent", field.Name)
|
return nil, fmt.Errorf("no field named %q was found under type Torrent", field.Name)
|
||||||
},
|
},
|
||||||
|
@ -3390,6 +3444,8 @@ func (ec *executionContext) fieldContext_TorrentProgress_torrent(ctx context.Con
|
||||||
return ec.fieldContext_Torrent_excludedFiles(ctx, field)
|
return ec.fieldContext_Torrent_excludedFiles(ctx, field)
|
||||||
case "peers":
|
case "peers":
|
||||||
return ec.fieldContext_Torrent_peers(ctx, field)
|
return ec.fieldContext_Torrent_peers(ctx, field)
|
||||||
|
case "downloading":
|
||||||
|
return ec.fieldContext_Torrent_downloading(ctx, field)
|
||||||
}
|
}
|
||||||
return nil, fmt.Errorf("no field named %q was found under type Torrent", field.Name)
|
return nil, fmt.Errorf("no field named %q was found under type Torrent", field.Name)
|
||||||
},
|
},
|
||||||
|
@ -5773,7 +5829,7 @@ func (ec *executionContext) unmarshalInputTorrentsFilter(ctx context.Context, ob
|
||||||
asMap[k] = v
|
asMap[k] = v
|
||||||
}
|
}
|
||||||
|
|
||||||
fieldsInOrder := [...]string{"infohash", "name", "bytesCompleted", "bytesMissing", "peersCount"}
|
fieldsInOrder := [...]string{"infohash", "name", "bytesCompleted", "bytesMissing", "peersCount", "downloading"}
|
||||||
for _, k := range fieldsInOrder {
|
for _, k := range fieldsInOrder {
|
||||||
v, ok := asMap[k]
|
v, ok := asMap[k]
|
||||||
if !ok {
|
if !ok {
|
||||||
|
@ -5815,6 +5871,13 @@ func (ec *executionContext) unmarshalInputTorrentsFilter(ctx context.Context, ob
|
||||||
return it, err
|
return it, err
|
||||||
}
|
}
|
||||||
it.PeersCount = data
|
it.PeersCount = data
|
||||||
|
case "downloading":
|
||||||
|
ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("downloading"))
|
||||||
|
data, err := ec.unmarshalOBooleanFilter2ᚖgitᚗkmsignᚗruᚋroyalcatᚋtstorᚋsrcᚋdeliveryᚋgraphqlᚋmodelᚐBooleanFilter(ctx, v)
|
||||||
|
if err != nil {
|
||||||
|
return it, err
|
||||||
|
}
|
||||||
|
it.Downloading = data
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -6752,6 +6815,11 @@ func (ec *executionContext) _Torrent(ctx context.Context, sel ast.SelectionSet,
|
||||||
}
|
}
|
||||||
|
|
||||||
out.Concurrently(i, func(ctx context.Context) graphql.Marshaler { return innerFunc(ctx, out) })
|
out.Concurrently(i, func(ctx context.Context) graphql.Marshaler { return innerFunc(ctx, out) })
|
||||||
|
case "downloading":
|
||||||
|
out.Values[i] = ec._Torrent_downloading(ctx, field, obj)
|
||||||
|
if out.Values[i] == graphql.Null {
|
||||||
|
atomic.AddUint32(&out.Invalids, 1)
|
||||||
|
}
|
||||||
default:
|
default:
|
||||||
panic("unknown field " + strconv.Quote(field.Name))
|
panic("unknown field " + strconv.Quote(field.Name))
|
||||||
}
|
}
|
||||||
|
@ -8008,6 +8076,14 @@ func (ec *executionContext) marshalOBoolean2ᚖbool(ctx context.Context, sel ast
|
||||||
return res
|
return res
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (ec *executionContext) unmarshalOBooleanFilter2ᚖgitᚗkmsignᚗruᚋroyalcatᚋtstorᚋsrcᚋdeliveryᚋgraphqlᚋmodelᚐBooleanFilter(ctx context.Context, v interface{}) (*model.BooleanFilter, error) {
|
||||||
|
if v == nil {
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
res, err := ec.unmarshalInputBooleanFilter(ctx, v)
|
||||||
|
return &res, graphql.ErrorOnPath(ctx, err)
|
||||||
|
}
|
||||||
|
|
||||||
func (ec *executionContext) unmarshalODateTime2ᚖtimeᚐTime(ctx context.Context, v interface{}) (*time.Time, error) {
|
func (ec *executionContext) unmarshalODateTime2ᚖtimeᚐTime(ctx context.Context, v interface{}) (*time.Time, error) {
|
||||||
if v == nil {
|
if v == nil {
|
||||||
return nil, nil
|
return nil, nil
|
||||||
|
@ -8107,14 +8183,6 @@ func (ec *executionContext) marshalOMutation2ᚖgitᚗkmsignᚗruᚋroyalcatᚋt
|
||||||
return ec._Mutation(ctx, sel)
|
return ec._Mutation(ctx, sel)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (ec *executionContext) unmarshalOPagination2ᚖgitᚗkmsignᚗruᚋroyalcatᚋtstorᚋsrcᚋdeliveryᚋgraphqlᚋmodelᚐPagination(ctx context.Context, v interface{}) (*model.Pagination, error) {
|
|
||||||
if v == nil {
|
|
||||||
return nil, nil
|
|
||||||
}
|
|
||||||
res, err := ec.unmarshalInputPagination(ctx, v)
|
|
||||||
return &res, graphql.ErrorOnPath(ctx, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (ec *executionContext) marshalOProgress2gitᚗkmsignᚗruᚋroyalcatᚋtstorᚋsrcᚋdeliveryᚋgraphqlᚋmodelᚐProgress(ctx context.Context, sel ast.SelectionSet, v model.Progress) graphql.Marshaler {
|
func (ec *executionContext) marshalOProgress2gitᚗkmsignᚗruᚋroyalcatᚋtstorᚋsrcᚋdeliveryᚋgraphqlᚋmodelᚐProgress(ctx context.Context, sel ast.SelectionSet, v model.Progress) graphql.Marshaler {
|
||||||
if v == nil {
|
if v == nil {
|
||||||
return graphql.Null
|
return graphql.Null
|
||||||
|
|
|
@ -1,6 +1,8 @@
|
||||||
package model
|
package model
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"context"
|
||||||
|
|
||||||
"git.kmsign.ru/royalcat/tstor/src/host/vfs"
|
"git.kmsign.ru/royalcat/tstor/src/host/vfs"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -9,7 +11,7 @@ type FsElem interface {
|
||||||
IsDir() bool
|
IsDir() bool
|
||||||
}
|
}
|
||||||
|
|
||||||
func FillFsEntry(e FsElem, fs vfs.Filesystem, path string) FsEntry {
|
func FillFsEntry(ctx context.Context, e FsElem, fs vfs.Filesystem, path string) (FsEntry, error) {
|
||||||
switch e.(type) {
|
switch e.(type) {
|
||||||
case *vfs.ArchiveFS:
|
case *vfs.ArchiveFS:
|
||||||
e := e.(*vfs.ArchiveFS)
|
e := e.(*vfs.ArchiveFS)
|
||||||
|
@ -17,31 +19,35 @@ func FillFsEntry(e FsElem, fs vfs.Filesystem, path string) FsEntry {
|
||||||
Name: e.Name(),
|
Name: e.Name(),
|
||||||
Size: e.Size(),
|
Size: e.Size(),
|
||||||
FS: e,
|
FS: e,
|
||||||
}
|
}, nil
|
||||||
case *vfs.ResolverFS:
|
case *vfs.ResolverFS:
|
||||||
e := e.(*vfs.ResolverFS)
|
e := e.(*vfs.ResolverFS)
|
||||||
return ResolverFs{
|
return ResolverFs{
|
||||||
Name: e.Name(),
|
Name: e.Name(),
|
||||||
FS: e,
|
FS: e,
|
||||||
}
|
}, nil
|
||||||
case *vfs.TorrentFS:
|
case *vfs.TorrentFS:
|
||||||
e := e.(*vfs.TorrentFS)
|
e := e.(*vfs.TorrentFS)
|
||||||
|
torrent, err := MapTorrent(ctx, e.Torrent)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
return TorrentFs{
|
return TorrentFs{
|
||||||
Name: e.Name(),
|
Name: e.Name(),
|
||||||
Torrent: MapTorrent(e.Torrent),
|
Torrent: torrent,
|
||||||
FS: e,
|
FS: e,
|
||||||
}
|
}, nil
|
||||||
default:
|
default:
|
||||||
if e.IsDir() {
|
if e.IsDir() {
|
||||||
return SimpleDir{
|
return SimpleDir{
|
||||||
Name: e.Name(),
|
Name: e.Name(),
|
||||||
FS: fs,
|
FS: fs,
|
||||||
Path: path,
|
Path: path,
|
||||||
}
|
}, nil
|
||||||
} else {
|
} else {
|
||||||
return SimpleFile{
|
return SimpleFile{
|
||||||
Name: e.Name(),
|
Name: e.Name(),
|
||||||
}
|
}, nil
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -42,3 +42,13 @@ func (f *StringFilter) Include(v string) bool {
|
||||||
|
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (f *BooleanFilter) Include(v bool) bool {
|
||||||
|
if f == nil {
|
||||||
|
return true
|
||||||
|
} else if f.Eq != nil {
|
||||||
|
return v == *f.Eq
|
||||||
|
}
|
||||||
|
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
|
@ -1,6 +1,8 @@
|
||||||
package model
|
package model
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"context"
|
||||||
|
|
||||||
"git.kmsign.ru/royalcat/tstor/src/host/controller"
|
"git.kmsign.ru/royalcat/tstor/src/host/controller"
|
||||||
"github.com/anacrolix/torrent"
|
"github.com/anacrolix/torrent"
|
||||||
)
|
)
|
||||||
|
@ -26,12 +28,25 @@ func MapPeerSource(source torrent.PeerSource) string {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func MapTorrent(t *controller.Torrent) *Torrent {
|
func MapTorrent(ctx context.Context, t *controller.Torrent) (*Torrent, error) {
|
||||||
|
downloading := false
|
||||||
|
files, err := t.Files(ctx)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
for _, file := range files {
|
||||||
|
if file.Priority() > torrent.PiecePriorityNone && file.BytesCompleted() < file.Length() {
|
||||||
|
downloading = true
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
return &Torrent{
|
return &Torrent{
|
||||||
Infohash: t.InfoHash(),
|
Infohash: t.InfoHash(),
|
||||||
Name: t.Name(),
|
Name: t.Name(),
|
||||||
BytesCompleted: t.BytesCompleted(),
|
BytesCompleted: t.BytesCompleted(),
|
||||||
BytesMissing: t.BytesMissing(),
|
BytesMissing: t.BytesMissing(),
|
||||||
T: t,
|
T: t,
|
||||||
}
|
Downloading: downloading,
|
||||||
|
}, nil
|
||||||
}
|
}
|
||||||
|
|
|
@ -179,6 +179,7 @@ type Torrent struct {
|
||||||
Files []*TorrentFile `json:"files"`
|
Files []*TorrentFile `json:"files"`
|
||||||
ExcludedFiles []*TorrentFile `json:"excludedFiles"`
|
ExcludedFiles []*TorrentFile `json:"excludedFiles"`
|
||||||
Peers []*TorrentPeer `json:"peers"`
|
Peers []*TorrentPeer `json:"peers"`
|
||||||
|
Downloading bool `json:"downloading"`
|
||||||
T *controller.Torrent `json:"-"`
|
T *controller.Torrent `json:"-"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -248,9 +249,10 @@ func (this TorrentProgress) GetCurrent() int64 { return this.Current }
|
||||||
func (this TorrentProgress) GetTotal() int64 { return this.Total }
|
func (this TorrentProgress) GetTotal() int64 { return this.Total }
|
||||||
|
|
||||||
type TorrentsFilter struct {
|
type TorrentsFilter struct {
|
||||||
Infohash *StringFilter `json:"infohash,omitempty"`
|
Infohash *StringFilter `json:"infohash,omitempty"`
|
||||||
Name *StringFilter `json:"name,omitempty"`
|
Name *StringFilter `json:"name,omitempty"`
|
||||||
BytesCompleted *IntFilter `json:"bytesCompleted,omitempty"`
|
BytesCompleted *IntFilter `json:"bytesCompleted,omitempty"`
|
||||||
BytesMissing *IntFilter `json:"bytesMissing,omitempty"`
|
BytesMissing *IntFilter `json:"bytesMissing,omitempty"`
|
||||||
PeersCount *IntFilter `json:"peersCount,omitempty"`
|
PeersCount *IntFilter `json:"peersCount,omitempty"`
|
||||||
|
Downloading *BooleanFilter `json:"downloading,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
|
@ -19,7 +19,11 @@ func (r *archiveFSResolver) Entries(ctx context.Context, obj *model.ArchiveFs) (
|
||||||
}
|
}
|
||||||
out := []model.FsEntry{}
|
out := []model.FsEntry{}
|
||||||
for _, e := range entries {
|
for _, e := range entries {
|
||||||
out = append(out, model.FillFsEntry(e, obj.FS, "."))
|
entry, err := model.FillFsEntry(ctx, e, obj.FS, ".")
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
out = append(out, entry)
|
||||||
}
|
}
|
||||||
return out, nil
|
return out, nil
|
||||||
}
|
}
|
||||||
|
@ -32,7 +36,11 @@ func (r *resolverFSResolver) Entries(ctx context.Context, obj *model.ResolverFs)
|
||||||
}
|
}
|
||||||
out := []model.FsEntry{}
|
out := []model.FsEntry{}
|
||||||
for _, e := range entries {
|
for _, e := range entries {
|
||||||
out = append(out, model.FillFsEntry(e, obj.FS, "."))
|
entry, err := model.FillFsEntry(ctx, e, obj.FS, ".")
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
out = append(out, entry)
|
||||||
}
|
}
|
||||||
return out, nil
|
return out, nil
|
||||||
}
|
}
|
||||||
|
@ -45,7 +53,11 @@ func (r *simpleDirResolver) Entries(ctx context.Context, obj *model.SimpleDir) (
|
||||||
}
|
}
|
||||||
out := []model.FsEntry{}
|
out := []model.FsEntry{}
|
||||||
for _, e := range entries {
|
for _, e := range entries {
|
||||||
out = append(out, model.FillFsEntry(e, obj.FS, obj.Path))
|
entry, err := model.FillFsEntry(ctx, e, obj.FS, obj.Path)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
out = append(out, entry)
|
||||||
}
|
}
|
||||||
return out, nil
|
return out, nil
|
||||||
}
|
}
|
||||||
|
@ -58,7 +70,11 @@ func (r *torrentFSResolver) Entries(ctx context.Context, obj *model.TorrentFs) (
|
||||||
}
|
}
|
||||||
out := []model.FsEntry{}
|
out := []model.FsEntry{}
|
||||||
for _, e := range entries {
|
for _, e := range entries {
|
||||||
out = append(out, model.FillFsEntry(e, obj.FS, "."))
|
entry, err := model.FillFsEntry(ctx, e, obj.FS, ".")
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
out = append(out, entry)
|
||||||
}
|
}
|
||||||
return out, nil
|
return out, nil
|
||||||
}
|
}
|
||||||
|
|
|
@ -15,7 +15,7 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
// Torrents is the resolver for the torrents field.
|
// Torrents is the resolver for the torrents field.
|
||||||
func (r *queryResolver) Torrents(ctx context.Context, filter *model.TorrentsFilter, pagination *model.Pagination) ([]*model.Torrent, error) {
|
func (r *queryResolver) Torrents(ctx context.Context, filter *model.TorrentsFilter) ([]*model.Torrent, error) {
|
||||||
torrents, err := r.Service.ListTorrents(ctx)
|
torrents, err := r.Service.ListTorrents(ctx)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
|
@ -49,6 +49,13 @@ func (r *queryResolver) Torrents(ctx context.Context, filter *model.TorrentsFilt
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if filter.Downloading != nil {
|
||||||
|
filterFuncs = append(filterFuncs, func(torrent *model.Torrent) bool {
|
||||||
|
return filter.Downloading.Include(
|
||||||
|
torrent.Downloading,
|
||||||
|
)
|
||||||
|
})
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
filterFunc := func(torrent *model.Torrent) bool {
|
filterFunc := func(torrent *model.Torrent) bool {
|
||||||
|
@ -62,7 +69,10 @@ func (r *queryResolver) Torrents(ctx context.Context, filter *model.TorrentsFilt
|
||||||
|
|
||||||
tr := []*model.Torrent{}
|
tr := []*model.Torrent{}
|
||||||
for _, t := range torrents {
|
for _, t := range torrents {
|
||||||
d := model.MapTorrent(t)
|
d, err := model.MapTorrent(ctx, t)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
if !filterFunc(d) {
|
if !filterFunc(d) {
|
||||||
continue
|
continue
|
||||||
|
@ -71,7 +81,7 @@ func (r *queryResolver) Torrents(ctx context.Context, filter *model.TorrentsFilt
|
||||||
}
|
}
|
||||||
|
|
||||||
slices.SortStableFunc(torrents, func(t1, t2 *controller.Torrent) int {
|
slices.SortStableFunc(torrents, func(t1, t2 *controller.Torrent) int {
|
||||||
return strings.Compare(t1.InfoHash(), t2.InfoHash())
|
return strings.Compare(t1.Name(), t2.Name())
|
||||||
})
|
})
|
||||||
|
|
||||||
return tr, nil
|
return tr, nil
|
||||||
|
@ -84,7 +94,7 @@ func (r *queryResolver) FsEntry(ctx context.Context, path string) (model.FsEntry
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
return model.FillFsEntry(entry, r.VFS, path), nil
|
return model.FillFsEntry(ctx, entry, r.VFS, path)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Query returns graph.QueryResolver implementation.
|
// Query returns graph.QueryResolver implementation.
|
||||||
|
|
|
@ -32,8 +32,13 @@ func (r *subscriptionResolver) TorrentDownloadUpdates(ctx context.Context) (<-ch
|
||||||
fmt.Println("nil torrent")
|
fmt.Println("nil torrent")
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
torrent, err := model.MapTorrent(ctx, p.Torrent)
|
||||||
|
if err != nil {
|
||||||
|
// TODO logs
|
||||||
|
continue
|
||||||
|
}
|
||||||
po := &model.TorrentProgress{
|
po := &model.TorrentProgress{
|
||||||
Torrent: model.MapTorrent(p.Torrent),
|
Torrent: torrent,
|
||||||
Current: p.Current,
|
Current: p.Current,
|
||||||
Total: p.Total,
|
Total: p.Total,
|
||||||
}
|
}
|
||||||
|
|
|
@ -20,7 +20,7 @@ func New(fc *filecache.Cache, ss *service.Stats, s *service.Service, vfs vfs.Fil
|
||||||
|
|
||||||
r := echo.New()
|
r := echo.New()
|
||||||
r.Use(
|
r.Use(
|
||||||
middleware.Recover(),
|
// middleware.Recover(),
|
||||||
middleware.Gzip(),
|
middleware.Gzip(),
|
||||||
middleware.Decompress(),
|
middleware.Decompress(),
|
||||||
Logger(),
|
Logger(),
|
||||||
|
|
|
@ -62,9 +62,18 @@ func (s *Torrent) Files(ctx context.Context) ([]*torrent.File, error) {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
<-s.t.GotInfo()
|
select {
|
||||||
|
case <-ctx.Done():
|
||||||
|
return nil, ctx.Err()
|
||||||
|
case <-s.t.GotInfo():
|
||||||
|
}
|
||||||
|
|
||||||
files := s.t.Files()
|
files := s.t.Files()
|
||||||
files = slices.DeleteFunc(files, func(file *torrent.File) bool {
|
files = slices.DeleteFunc(files, func(file *torrent.File) bool {
|
||||||
|
if file == nil {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
p := file.Path()
|
p := file.Path()
|
||||||
if strings.Contains(p, "/.pad/") {
|
if strings.Contains(p, "/.pad/") {
|
||||||
return true
|
return true
|
||||||
|
|
|
@ -3,6 +3,7 @@ targets:
|
||||||
builders:
|
builders:
|
||||||
graphql_codegen:
|
graphql_codegen:
|
||||||
options:
|
options:
|
||||||
|
generatedFileHeader: "// ignore_for_file: type=lint\n"
|
||||||
scalars:
|
scalars:
|
||||||
URL:
|
URL:
|
||||||
type: String
|
type: String
|
||||||
|
|
|
@ -1,18 +1,8 @@
|
||||||
import 'package:flutter/foundation.dart';
|
import 'package:flutter/foundation.dart';
|
||||||
import 'package:graphql/client.dart';
|
import 'package:graphql/client.dart';
|
||||||
|
|
||||||
final client = GraphQLClient(
|
|
||||||
link: _loggerLink.concat(HttpLink("http://localhost:4444/graphql")),
|
|
||||||
cache: GraphQLCache(store: null),
|
|
||||||
defaultPolicies: DefaultPolicies(
|
|
||||||
query: Policies(
|
|
||||||
fetch: FetchPolicy.noCache,
|
|
||||||
),
|
|
||||||
),
|
|
||||||
);
|
|
||||||
|
|
||||||
// final client = GraphQLClient(
|
// final client = GraphQLClient(
|
||||||
// link: HttpLink("http://192.168.217.150:4444/graphql"),
|
// link: _loggerLink.concat(HttpLink("http://localhost:4444/graphql")),
|
||||||
// cache: GraphQLCache(store: null),
|
// cache: GraphQLCache(store: null),
|
||||||
// defaultPolicies: DefaultPolicies(
|
// defaultPolicies: DefaultPolicies(
|
||||||
// query: Policies(
|
// query: Policies(
|
||||||
|
@ -21,6 +11,16 @@ final client = GraphQLClient(
|
||||||
// ),
|
// ),
|
||||||
// );
|
// );
|
||||||
|
|
||||||
|
final client = GraphQLClient(
|
||||||
|
link: HttpLink("http://192.168.217.150:4444/graphql"),
|
||||||
|
cache: GraphQLCache(store: null),
|
||||||
|
defaultPolicies: DefaultPolicies(
|
||||||
|
query: Policies(
|
||||||
|
fetch: FetchPolicy.noCache,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
);
|
||||||
|
|
||||||
class LoggerLink extends Link {
|
class LoggerLink extends Link {
|
||||||
@override
|
@override
|
||||||
Stream<Response> request(
|
Stream<Response> request(
|
||||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -57,7 +57,7 @@ interface Progress {
|
||||||
total: Int!
|
total: Int!
|
||||||
}
|
}
|
||||||
type Query {
|
type Query {
|
||||||
torrents(filter: TorrentsFilter, pagination: Pagination): [Torrent!]!
|
torrents(filter: TorrentsFilter): [Torrent!]!
|
||||||
fsEntry(path: String!): FsEntry
|
fsEntry(path: String!): FsEntry
|
||||||
}
|
}
|
||||||
type ResolverFS implements Dir & FsEntry {
|
type ResolverFS implements Dir & FsEntry {
|
||||||
|
@ -97,6 +97,7 @@ type Torrent {
|
||||||
files: [TorrentFile!]!
|
files: [TorrentFile!]!
|
||||||
excludedFiles: [TorrentFile!]!
|
excludedFiles: [TorrentFile!]!
|
||||||
peers: [TorrentPeer!]!
|
peers: [TorrentPeer!]!
|
||||||
|
downloading: Boolean!
|
||||||
}
|
}
|
||||||
type TorrentFS implements Dir & FsEntry {
|
type TorrentFS implements Dir & FsEntry {
|
||||||
name: String!
|
name: String!
|
||||||
|
@ -135,4 +136,5 @@ input TorrentsFilter {
|
||||||
bytesCompleted: IntFilter
|
bytesCompleted: IntFilter
|
||||||
bytesMissing: IntFilter
|
bytesMissing: IntFilter
|
||||||
peersCount: IntFilter
|
peersCount: IntFilter
|
||||||
|
downloading: BooleanFilter
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
// ignore_for_file: type=lint
|
||||||
class Input$BooleanFilter {
|
class Input$BooleanFilter {
|
||||||
factory Input$BooleanFilter({bool? eq}) => Input$BooleanFilter._({
|
factory Input$BooleanFilter({bool? eq}) => Input$BooleanFilter._({
|
||||||
if (eq != null) r'eq': eq,
|
if (eq != null) r'eq': eq,
|
||||||
|
@ -38,7 +39,7 @@ class Input$BooleanFilter {
|
||||||
if (identical(this, other)) {
|
if (identical(this, other)) {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
if (other is! Input$BooleanFilter || runtimeType != other.runtimeType) {
|
if (!(other is Input$BooleanFilter) || runtimeType != other.runtimeType) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
final l$eq = eq;
|
final l$eq = eq;
|
||||||
|
@ -84,7 +85,6 @@ class _CopyWithImpl$Input$BooleanFilter<TRes>
|
||||||
|
|
||||||
static const _undefined = <dynamic, dynamic>{};
|
static const _undefined = <dynamic, dynamic>{};
|
||||||
|
|
||||||
@override
|
|
||||||
TRes call({Object? eq = _undefined}) => _then(Input$BooleanFilter._({
|
TRes call({Object? eq = _undefined}) => _then(Input$BooleanFilter._({
|
||||||
..._instance._$data,
|
..._instance._$data,
|
||||||
if (eq != _undefined) 'eq': (eq as bool?),
|
if (eq != _undefined) 'eq': (eq as bool?),
|
||||||
|
@ -95,9 +95,8 @@ class _CopyWithStubImpl$Input$BooleanFilter<TRes>
|
||||||
implements CopyWith$Input$BooleanFilter<TRes> {
|
implements CopyWith$Input$BooleanFilter<TRes> {
|
||||||
_CopyWithStubImpl$Input$BooleanFilter(this._res);
|
_CopyWithStubImpl$Input$BooleanFilter(this._res);
|
||||||
|
|
||||||
final TRes _res;
|
TRes _res;
|
||||||
|
|
||||||
@override
|
|
||||||
call({bool? eq}) => _res;
|
call({bool? eq}) => _res;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -197,7 +196,7 @@ class Input$DateTimeFilter {
|
||||||
if (identical(this, other)) {
|
if (identical(this, other)) {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
if (other is! Input$DateTimeFilter || runtimeType != other.runtimeType) {
|
if (!(other is Input$DateTimeFilter) || runtimeType != other.runtimeType) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
final l$eq = eq;
|
final l$eq = eq;
|
||||||
|
@ -291,7 +290,6 @@ class _CopyWithImpl$Input$DateTimeFilter<TRes>
|
||||||
|
|
||||||
static const _undefined = <dynamic, dynamic>{};
|
static const _undefined = <dynamic, dynamic>{};
|
||||||
|
|
||||||
@override
|
|
||||||
TRes call({
|
TRes call({
|
||||||
Object? eq = _undefined,
|
Object? eq = _undefined,
|
||||||
Object? gt = _undefined,
|
Object? gt = _undefined,
|
||||||
|
@ -313,9 +311,8 @@ class _CopyWithStubImpl$Input$DateTimeFilter<TRes>
|
||||||
implements CopyWith$Input$DateTimeFilter<TRes> {
|
implements CopyWith$Input$DateTimeFilter<TRes> {
|
||||||
_CopyWithStubImpl$Input$DateTimeFilter(this._res);
|
_CopyWithStubImpl$Input$DateTimeFilter(this._res);
|
||||||
|
|
||||||
final TRes _res;
|
TRes _res;
|
||||||
|
|
||||||
@override
|
|
||||||
call({
|
call({
|
||||||
DateTime? eq,
|
DateTime? eq,
|
||||||
DateTime? gt,
|
DateTime? gt,
|
||||||
|
@ -430,7 +427,7 @@ class Input$IntFilter {
|
||||||
if (identical(this, other)) {
|
if (identical(this, other)) {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
if (other is! Input$IntFilter || runtimeType != other.runtimeType) {
|
if (!(other is Input$IntFilter) || runtimeType != other.runtimeType) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
final l$eq = eq;
|
final l$eq = eq;
|
||||||
|
@ -550,7 +547,6 @@ class _CopyWithImpl$Input$IntFilter<TRes>
|
||||||
|
|
||||||
static const _undefined = <dynamic, dynamic>{};
|
static const _undefined = <dynamic, dynamic>{};
|
||||||
|
|
||||||
@override
|
|
||||||
TRes call({
|
TRes call({
|
||||||
Object? eq = _undefined,
|
Object? eq = _undefined,
|
||||||
Object? gt = _undefined,
|
Object? gt = _undefined,
|
||||||
|
@ -574,9 +570,8 @@ class _CopyWithStubImpl$Input$IntFilter<TRes>
|
||||||
implements CopyWith$Input$IntFilter<TRes> {
|
implements CopyWith$Input$IntFilter<TRes> {
|
||||||
_CopyWithStubImpl$Input$IntFilter(this._res);
|
_CopyWithStubImpl$Input$IntFilter(this._res);
|
||||||
|
|
||||||
final TRes _res;
|
TRes _res;
|
||||||
|
|
||||||
@override
|
|
||||||
call({
|
call({
|
||||||
int? eq,
|
int? eq,
|
||||||
int? gt,
|
int? gt,
|
||||||
|
@ -635,7 +630,7 @@ class Input$Pagination {
|
||||||
if (identical(this, other)) {
|
if (identical(this, other)) {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
if (other is! Input$Pagination || runtimeType != other.runtimeType) {
|
if (!(other is Input$Pagination) || runtimeType != other.runtimeType) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
final l$offset = offset;
|
final l$offset = offset;
|
||||||
|
@ -690,7 +685,6 @@ class _CopyWithImpl$Input$Pagination<TRes>
|
||||||
|
|
||||||
static const _undefined = <dynamic, dynamic>{};
|
static const _undefined = <dynamic, dynamic>{};
|
||||||
|
|
||||||
@override
|
|
||||||
TRes call({
|
TRes call({
|
||||||
Object? offset = _undefined,
|
Object? offset = _undefined,
|
||||||
Object? limit = _undefined,
|
Object? limit = _undefined,
|
||||||
|
@ -706,9 +700,8 @@ class _CopyWithStubImpl$Input$Pagination<TRes>
|
||||||
implements CopyWith$Input$Pagination<TRes> {
|
implements CopyWith$Input$Pagination<TRes> {
|
||||||
_CopyWithStubImpl$Input$Pagination(this._res);
|
_CopyWithStubImpl$Input$Pagination(this._res);
|
||||||
|
|
||||||
final TRes _res;
|
TRes _res;
|
||||||
|
|
||||||
@override
|
|
||||||
call({
|
call({
|
||||||
int? offset,
|
int? offset,
|
||||||
int? limit,
|
int? limit,
|
||||||
|
@ -784,7 +777,7 @@ class Input$StringFilter {
|
||||||
if (identical(this, other)) {
|
if (identical(this, other)) {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
if (other is! Input$StringFilter || runtimeType != other.runtimeType) {
|
if (!(other is Input$StringFilter) || runtimeType != other.runtimeType) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
final l$eq = eq;
|
final l$eq = eq;
|
||||||
|
@ -871,7 +864,6 @@ class _CopyWithImpl$Input$StringFilter<TRes>
|
||||||
|
|
||||||
static const _undefined = <dynamic, dynamic>{};
|
static const _undefined = <dynamic, dynamic>{};
|
||||||
|
|
||||||
@override
|
|
||||||
TRes call({
|
TRes call({
|
||||||
Object? eq = _undefined,
|
Object? eq = _undefined,
|
||||||
Object? substr = _undefined,
|
Object? substr = _undefined,
|
||||||
|
@ -889,9 +881,8 @@ class _CopyWithStubImpl$Input$StringFilter<TRes>
|
||||||
implements CopyWith$Input$StringFilter<TRes> {
|
implements CopyWith$Input$StringFilter<TRes> {
|
||||||
_CopyWithStubImpl$Input$StringFilter(this._res);
|
_CopyWithStubImpl$Input$StringFilter(this._res);
|
||||||
|
|
||||||
final TRes _res;
|
TRes _res;
|
||||||
|
|
||||||
@override
|
|
||||||
call({
|
call({
|
||||||
String? eq,
|
String? eq,
|
||||||
String? substr,
|
String? substr,
|
||||||
|
@ -955,7 +946,7 @@ class Input$TorrentFilter {
|
||||||
if (identical(this, other)) {
|
if (identical(this, other)) {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
if (other is! Input$TorrentFilter || runtimeType != other.runtimeType) {
|
if (!(other is Input$TorrentFilter) || runtimeType != other.runtimeType) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
final l$everything = everything;
|
final l$everything = everything;
|
||||||
|
@ -1018,7 +1009,6 @@ class _CopyWithImpl$Input$TorrentFilter<TRes>
|
||||||
|
|
||||||
static const _undefined = <dynamic, dynamic>{};
|
static const _undefined = <dynamic, dynamic>{};
|
||||||
|
|
||||||
@override
|
|
||||||
TRes call({
|
TRes call({
|
||||||
Object? everything = _undefined,
|
Object? everything = _undefined,
|
||||||
Object? infohash = _undefined,
|
Object? infohash = _undefined,
|
||||||
|
@ -1034,9 +1024,8 @@ class _CopyWithStubImpl$Input$TorrentFilter<TRes>
|
||||||
implements CopyWith$Input$TorrentFilter<TRes> {
|
implements CopyWith$Input$TorrentFilter<TRes> {
|
||||||
_CopyWithStubImpl$Input$TorrentFilter(this._res);
|
_CopyWithStubImpl$Input$TorrentFilter(this._res);
|
||||||
|
|
||||||
final TRes _res;
|
TRes _res;
|
||||||
|
|
||||||
@override
|
|
||||||
call({
|
call({
|
||||||
bool? everything,
|
bool? everything,
|
||||||
String? infohash,
|
String? infohash,
|
||||||
|
@ -1046,22 +1035,32 @@ class _CopyWithStubImpl$Input$TorrentFilter<TRes>
|
||||||
|
|
||||||
class Input$TorrentsFilter {
|
class Input$TorrentsFilter {
|
||||||
factory Input$TorrentsFilter({
|
factory Input$TorrentsFilter({
|
||||||
|
Input$StringFilter? infohash,
|
||||||
Input$StringFilter? name,
|
Input$StringFilter? name,
|
||||||
Input$IntFilter? bytesCompleted,
|
Input$IntFilter? bytesCompleted,
|
||||||
Input$IntFilter? bytesMissing,
|
Input$IntFilter? bytesMissing,
|
||||||
Input$IntFilter? peersCount,
|
Input$IntFilter? peersCount,
|
||||||
|
Input$BooleanFilter? downloading,
|
||||||
}) =>
|
}) =>
|
||||||
Input$TorrentsFilter._({
|
Input$TorrentsFilter._({
|
||||||
|
if (infohash != null) r'infohash': infohash,
|
||||||
if (name != null) r'name': name,
|
if (name != null) r'name': name,
|
||||||
if (bytesCompleted != null) r'bytesCompleted': bytesCompleted,
|
if (bytesCompleted != null) r'bytesCompleted': bytesCompleted,
|
||||||
if (bytesMissing != null) r'bytesMissing': bytesMissing,
|
if (bytesMissing != null) r'bytesMissing': bytesMissing,
|
||||||
if (peersCount != null) r'peersCount': peersCount,
|
if (peersCount != null) r'peersCount': peersCount,
|
||||||
|
if (downloading != null) r'downloading': downloading,
|
||||||
});
|
});
|
||||||
|
|
||||||
Input$TorrentsFilter._(this._$data);
|
Input$TorrentsFilter._(this._$data);
|
||||||
|
|
||||||
factory Input$TorrentsFilter.fromJson(Map<String, dynamic> data) {
|
factory Input$TorrentsFilter.fromJson(Map<String, dynamic> data) {
|
||||||
final result$data = <String, dynamic>{};
|
final result$data = <String, dynamic>{};
|
||||||
|
if (data.containsKey('infohash')) {
|
||||||
|
final l$infohash = data['infohash'];
|
||||||
|
result$data['infohash'] = l$infohash == null
|
||||||
|
? null
|
||||||
|
: Input$StringFilter.fromJson((l$infohash as Map<String, dynamic>));
|
||||||
|
}
|
||||||
if (data.containsKey('name')) {
|
if (data.containsKey('name')) {
|
||||||
final l$name = data['name'];
|
final l$name = data['name'];
|
||||||
result$data['name'] = l$name == null
|
result$data['name'] = l$name == null
|
||||||
|
@ -1087,11 +1086,21 @@ class Input$TorrentsFilter {
|
||||||
? null
|
? null
|
||||||
: Input$IntFilter.fromJson((l$peersCount as Map<String, dynamic>));
|
: Input$IntFilter.fromJson((l$peersCount as Map<String, dynamic>));
|
||||||
}
|
}
|
||||||
|
if (data.containsKey('downloading')) {
|
||||||
|
final l$downloading = data['downloading'];
|
||||||
|
result$data['downloading'] = l$downloading == null
|
||||||
|
? null
|
||||||
|
: Input$BooleanFilter.fromJson(
|
||||||
|
(l$downloading as Map<String, dynamic>));
|
||||||
|
}
|
||||||
return Input$TorrentsFilter._(result$data);
|
return Input$TorrentsFilter._(result$data);
|
||||||
}
|
}
|
||||||
|
|
||||||
Map<String, dynamic> _$data;
|
Map<String, dynamic> _$data;
|
||||||
|
|
||||||
|
Input$StringFilter? get infohash =>
|
||||||
|
(_$data['infohash'] as Input$StringFilter?);
|
||||||
|
|
||||||
Input$StringFilter? get name => (_$data['name'] as Input$StringFilter?);
|
Input$StringFilter? get name => (_$data['name'] as Input$StringFilter?);
|
||||||
|
|
||||||
Input$IntFilter? get bytesCompleted =>
|
Input$IntFilter? get bytesCompleted =>
|
||||||
|
@ -1102,8 +1111,15 @@ class Input$TorrentsFilter {
|
||||||
|
|
||||||
Input$IntFilter? get peersCount => (_$data['peersCount'] as Input$IntFilter?);
|
Input$IntFilter? get peersCount => (_$data['peersCount'] as Input$IntFilter?);
|
||||||
|
|
||||||
|
Input$BooleanFilter? get downloading =>
|
||||||
|
(_$data['downloading'] as Input$BooleanFilter?);
|
||||||
|
|
||||||
Map<String, dynamic> toJson() {
|
Map<String, dynamic> toJson() {
|
||||||
final result$data = <String, dynamic>{};
|
final result$data = <String, dynamic>{};
|
||||||
|
if (_$data.containsKey('infohash')) {
|
||||||
|
final l$infohash = infohash;
|
||||||
|
result$data['infohash'] = l$infohash?.toJson();
|
||||||
|
}
|
||||||
if (_$data.containsKey('name')) {
|
if (_$data.containsKey('name')) {
|
||||||
final l$name = name;
|
final l$name = name;
|
||||||
result$data['name'] = l$name?.toJson();
|
result$data['name'] = l$name?.toJson();
|
||||||
|
@ -1120,6 +1136,10 @@ class Input$TorrentsFilter {
|
||||||
final l$peersCount = peersCount;
|
final l$peersCount = peersCount;
|
||||||
result$data['peersCount'] = l$peersCount?.toJson();
|
result$data['peersCount'] = l$peersCount?.toJson();
|
||||||
}
|
}
|
||||||
|
if (_$data.containsKey('downloading')) {
|
||||||
|
final l$downloading = downloading;
|
||||||
|
result$data['downloading'] = l$downloading?.toJson();
|
||||||
|
}
|
||||||
return result$data;
|
return result$data;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1134,7 +1154,16 @@ class Input$TorrentsFilter {
|
||||||
if (identical(this, other)) {
|
if (identical(this, other)) {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
if (other is! Input$TorrentsFilter || runtimeType != other.runtimeType) {
|
if (!(other is Input$TorrentsFilter) || runtimeType != other.runtimeType) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
final l$infohash = infohash;
|
||||||
|
final lOther$infohash = other.infohash;
|
||||||
|
if (_$data.containsKey('infohash') !=
|
||||||
|
other._$data.containsKey('infohash')) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
if (l$infohash != lOther$infohash) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
final l$name = name;
|
final l$name = name;
|
||||||
|
@ -1172,20 +1201,33 @@ class Input$TorrentsFilter {
|
||||||
if (l$peersCount != lOther$peersCount) {
|
if (l$peersCount != lOther$peersCount) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
final l$downloading = downloading;
|
||||||
|
final lOther$downloading = other.downloading;
|
||||||
|
if (_$data.containsKey('downloading') !=
|
||||||
|
other._$data.containsKey('downloading')) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
if (l$downloading != lOther$downloading) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
@override
|
@override
|
||||||
int get hashCode {
|
int get hashCode {
|
||||||
|
final l$infohash = infohash;
|
||||||
final l$name = name;
|
final l$name = name;
|
||||||
final l$bytesCompleted = bytesCompleted;
|
final l$bytesCompleted = bytesCompleted;
|
||||||
final l$bytesMissing = bytesMissing;
|
final l$bytesMissing = bytesMissing;
|
||||||
final l$peersCount = peersCount;
|
final l$peersCount = peersCount;
|
||||||
|
final l$downloading = downloading;
|
||||||
return Object.hashAll([
|
return Object.hashAll([
|
||||||
|
_$data.containsKey('infohash') ? l$infohash : const {},
|
||||||
_$data.containsKey('name') ? l$name : const {},
|
_$data.containsKey('name') ? l$name : const {},
|
||||||
_$data.containsKey('bytesCompleted') ? l$bytesCompleted : const {},
|
_$data.containsKey('bytesCompleted') ? l$bytesCompleted : const {},
|
||||||
_$data.containsKey('bytesMissing') ? l$bytesMissing : const {},
|
_$data.containsKey('bytesMissing') ? l$bytesMissing : const {},
|
||||||
_$data.containsKey('peersCount') ? l$peersCount : const {},
|
_$data.containsKey('peersCount') ? l$peersCount : const {},
|
||||||
|
_$data.containsKey('downloading') ? l$downloading : const {},
|
||||||
]);
|
]);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1200,15 +1242,19 @@ abstract class CopyWith$Input$TorrentsFilter<TRes> {
|
||||||
_CopyWithStubImpl$Input$TorrentsFilter;
|
_CopyWithStubImpl$Input$TorrentsFilter;
|
||||||
|
|
||||||
TRes call({
|
TRes call({
|
||||||
|
Input$StringFilter? infohash,
|
||||||
Input$StringFilter? name,
|
Input$StringFilter? name,
|
||||||
Input$IntFilter? bytesCompleted,
|
Input$IntFilter? bytesCompleted,
|
||||||
Input$IntFilter? bytesMissing,
|
Input$IntFilter? bytesMissing,
|
||||||
Input$IntFilter? peersCount,
|
Input$IntFilter? peersCount,
|
||||||
|
Input$BooleanFilter? downloading,
|
||||||
});
|
});
|
||||||
|
CopyWith$Input$StringFilter<TRes> get infohash;
|
||||||
CopyWith$Input$StringFilter<TRes> get name;
|
CopyWith$Input$StringFilter<TRes> get name;
|
||||||
CopyWith$Input$IntFilter<TRes> get bytesCompleted;
|
CopyWith$Input$IntFilter<TRes> get bytesCompleted;
|
||||||
CopyWith$Input$IntFilter<TRes> get bytesMissing;
|
CopyWith$Input$IntFilter<TRes> get bytesMissing;
|
||||||
CopyWith$Input$IntFilter<TRes> get peersCount;
|
CopyWith$Input$IntFilter<TRes> get peersCount;
|
||||||
|
CopyWith$Input$BooleanFilter<TRes> get downloading;
|
||||||
}
|
}
|
||||||
|
|
||||||
class _CopyWithImpl$Input$TorrentsFilter<TRes>
|
class _CopyWithImpl$Input$TorrentsFilter<TRes>
|
||||||
|
@ -1224,15 +1270,18 @@ class _CopyWithImpl$Input$TorrentsFilter<TRes>
|
||||||
|
|
||||||
static const _undefined = <dynamic, dynamic>{};
|
static const _undefined = <dynamic, dynamic>{};
|
||||||
|
|
||||||
@override
|
|
||||||
TRes call({
|
TRes call({
|
||||||
|
Object? infohash = _undefined,
|
||||||
Object? name = _undefined,
|
Object? name = _undefined,
|
||||||
Object? bytesCompleted = _undefined,
|
Object? bytesCompleted = _undefined,
|
||||||
Object? bytesMissing = _undefined,
|
Object? bytesMissing = _undefined,
|
||||||
Object? peersCount = _undefined,
|
Object? peersCount = _undefined,
|
||||||
|
Object? downloading = _undefined,
|
||||||
}) =>
|
}) =>
|
||||||
_then(Input$TorrentsFilter._({
|
_then(Input$TorrentsFilter._({
|
||||||
..._instance._$data,
|
..._instance._$data,
|
||||||
|
if (infohash != _undefined)
|
||||||
|
'infohash': (infohash as Input$StringFilter?),
|
||||||
if (name != _undefined) 'name': (name as Input$StringFilter?),
|
if (name != _undefined) 'name': (name as Input$StringFilter?),
|
||||||
if (bytesCompleted != _undefined)
|
if (bytesCompleted != _undefined)
|
||||||
'bytesCompleted': (bytesCompleted as Input$IntFilter?),
|
'bytesCompleted': (bytesCompleted as Input$IntFilter?),
|
||||||
|
@ -1240,9 +1289,17 @@ class _CopyWithImpl$Input$TorrentsFilter<TRes>
|
||||||
'bytesMissing': (bytesMissing as Input$IntFilter?),
|
'bytesMissing': (bytesMissing as Input$IntFilter?),
|
||||||
if (peersCount != _undefined)
|
if (peersCount != _undefined)
|
||||||
'peersCount': (peersCount as Input$IntFilter?),
|
'peersCount': (peersCount as Input$IntFilter?),
|
||||||
|
if (downloading != _undefined)
|
||||||
|
'downloading': (downloading as Input$BooleanFilter?),
|
||||||
}));
|
}));
|
||||||
|
|
||||||
@override
|
CopyWith$Input$StringFilter<TRes> get infohash {
|
||||||
|
final local$infohash = _instance.infohash;
|
||||||
|
return local$infohash == null
|
||||||
|
? CopyWith$Input$StringFilter.stub(_then(_instance))
|
||||||
|
: CopyWith$Input$StringFilter(local$infohash, (e) => call(infohash: e));
|
||||||
|
}
|
||||||
|
|
||||||
CopyWith$Input$StringFilter<TRes> get name {
|
CopyWith$Input$StringFilter<TRes> get name {
|
||||||
final local$name = _instance.name;
|
final local$name = _instance.name;
|
||||||
return local$name == null
|
return local$name == null
|
||||||
|
@ -1250,7 +1307,6 @@ class _CopyWithImpl$Input$TorrentsFilter<TRes>
|
||||||
: CopyWith$Input$StringFilter(local$name, (e) => call(name: e));
|
: CopyWith$Input$StringFilter(local$name, (e) => call(name: e));
|
||||||
}
|
}
|
||||||
|
|
||||||
@override
|
|
||||||
CopyWith$Input$IntFilter<TRes> get bytesCompleted {
|
CopyWith$Input$IntFilter<TRes> get bytesCompleted {
|
||||||
final local$bytesCompleted = _instance.bytesCompleted;
|
final local$bytesCompleted = _instance.bytesCompleted;
|
||||||
return local$bytesCompleted == null
|
return local$bytesCompleted == null
|
||||||
|
@ -1259,7 +1315,6 @@ class _CopyWithImpl$Input$TorrentsFilter<TRes>
|
||||||
local$bytesCompleted, (e) => call(bytesCompleted: e));
|
local$bytesCompleted, (e) => call(bytesCompleted: e));
|
||||||
}
|
}
|
||||||
|
|
||||||
@override
|
|
||||||
CopyWith$Input$IntFilter<TRes> get bytesMissing {
|
CopyWith$Input$IntFilter<TRes> get bytesMissing {
|
||||||
final local$bytesMissing = _instance.bytesMissing;
|
final local$bytesMissing = _instance.bytesMissing;
|
||||||
return local$bytesMissing == null
|
return local$bytesMissing == null
|
||||||
|
@ -1268,7 +1323,6 @@ class _CopyWithImpl$Input$TorrentsFilter<TRes>
|
||||||
local$bytesMissing, (e) => call(bytesMissing: e));
|
local$bytesMissing, (e) => call(bytesMissing: e));
|
||||||
}
|
}
|
||||||
|
|
||||||
@override
|
|
||||||
CopyWith$Input$IntFilter<TRes> get peersCount {
|
CopyWith$Input$IntFilter<TRes> get peersCount {
|
||||||
final local$peersCount = _instance.peersCount;
|
final local$peersCount = _instance.peersCount;
|
||||||
return local$peersCount == null
|
return local$peersCount == null
|
||||||
|
@ -1276,38 +1330,49 @@ class _CopyWithImpl$Input$TorrentsFilter<TRes>
|
||||||
: CopyWith$Input$IntFilter(
|
: CopyWith$Input$IntFilter(
|
||||||
local$peersCount, (e) => call(peersCount: e));
|
local$peersCount, (e) => call(peersCount: e));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
CopyWith$Input$BooleanFilter<TRes> get downloading {
|
||||||
|
final local$downloading = _instance.downloading;
|
||||||
|
return local$downloading == null
|
||||||
|
? CopyWith$Input$BooleanFilter.stub(_then(_instance))
|
||||||
|
: CopyWith$Input$BooleanFilter(
|
||||||
|
local$downloading, (e) => call(downloading: e));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
class _CopyWithStubImpl$Input$TorrentsFilter<TRes>
|
class _CopyWithStubImpl$Input$TorrentsFilter<TRes>
|
||||||
implements CopyWith$Input$TorrentsFilter<TRes> {
|
implements CopyWith$Input$TorrentsFilter<TRes> {
|
||||||
_CopyWithStubImpl$Input$TorrentsFilter(this._res);
|
_CopyWithStubImpl$Input$TorrentsFilter(this._res);
|
||||||
|
|
||||||
final TRes _res;
|
TRes _res;
|
||||||
|
|
||||||
@override
|
|
||||||
call({
|
call({
|
||||||
|
Input$StringFilter? infohash,
|
||||||
Input$StringFilter? name,
|
Input$StringFilter? name,
|
||||||
Input$IntFilter? bytesCompleted,
|
Input$IntFilter? bytesCompleted,
|
||||||
Input$IntFilter? bytesMissing,
|
Input$IntFilter? bytesMissing,
|
||||||
Input$IntFilter? peersCount,
|
Input$IntFilter? peersCount,
|
||||||
|
Input$BooleanFilter? downloading,
|
||||||
}) =>
|
}) =>
|
||||||
_res;
|
_res;
|
||||||
|
|
||||||
@override
|
CopyWith$Input$StringFilter<TRes> get infohash =>
|
||||||
|
CopyWith$Input$StringFilter.stub(_res);
|
||||||
|
|
||||||
CopyWith$Input$StringFilter<TRes> get name =>
|
CopyWith$Input$StringFilter<TRes> get name =>
|
||||||
CopyWith$Input$StringFilter.stub(_res);
|
CopyWith$Input$StringFilter.stub(_res);
|
||||||
|
|
||||||
@override
|
|
||||||
CopyWith$Input$IntFilter<TRes> get bytesCompleted =>
|
CopyWith$Input$IntFilter<TRes> get bytesCompleted =>
|
||||||
CopyWith$Input$IntFilter.stub(_res);
|
CopyWith$Input$IntFilter.stub(_res);
|
||||||
|
|
||||||
@override
|
|
||||||
CopyWith$Input$IntFilter<TRes> get bytesMissing =>
|
CopyWith$Input$IntFilter<TRes> get bytesMissing =>
|
||||||
CopyWith$Input$IntFilter.stub(_res);
|
CopyWith$Input$IntFilter.stub(_res);
|
||||||
|
|
||||||
@override
|
|
||||||
CopyWith$Input$IntFilter<TRes> get peersCount =>
|
CopyWith$Input$IntFilter<TRes> get peersCount =>
|
||||||
CopyWith$Input$IntFilter.stub(_res);
|
CopyWith$Input$IntFilter.stub(_res);
|
||||||
|
|
||||||
|
CopyWith$Input$BooleanFilter<TRes> get downloading =>
|
||||||
|
CopyWith$Input$BooleanFilter.stub(_res);
|
||||||
}
|
}
|
||||||
|
|
||||||
enum Enum$__TypeKind {
|
enum Enum$__TypeKind {
|
||||||
|
|
|
@ -6,11 +6,20 @@ mutation MarkTorrentDownload($infohash: String!) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
query ListTorrents {
|
query ListTorrents($downloading: Boolean) {
|
||||||
torrents {
|
torrents(filter: {
|
||||||
|
downloading: {
|
||||||
|
eq: $downloading
|
||||||
|
}
|
||||||
|
}) {
|
||||||
name
|
name
|
||||||
infohash
|
infohash
|
||||||
bytesCompleted
|
bytesCompleted
|
||||||
bytesMissing
|
bytesMissing
|
||||||
|
peers {
|
||||||
|
ip
|
||||||
|
downloadRate
|
||||||
|
clientName
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
File diff suppressed because it is too large
Load diff
91
ui/lib/components/sliver_header.dart
Normal file
91
ui/lib/components/sliver_header.dart
Normal file
|
@ -0,0 +1,91 @@
|
||||||
|
import 'package:flutter/material.dart';
|
||||||
|
|
||||||
|
class HideableHeaderSliver extends StatelessWidget {
|
||||||
|
final Widget? leading;
|
||||||
|
final Widget body;
|
||||||
|
final double height;
|
||||||
|
final List<Widget>? actions;
|
||||||
|
|
||||||
|
const HideableHeaderSliver({
|
||||||
|
super.key,
|
||||||
|
this.leading,
|
||||||
|
required this.body,
|
||||||
|
this.actions,
|
||||||
|
this.height = 150,
|
||||||
|
});
|
||||||
|
|
||||||
|
@override
|
||||||
|
Widget build(BuildContext context) {
|
||||||
|
return SliverPersistentHeader(
|
||||||
|
floating: true,
|
||||||
|
pinned: false,
|
||||||
|
delegate: _HideableHeaderSliverDelegate(
|
||||||
|
leading: leading,
|
||||||
|
body: body,
|
||||||
|
actions: actions,
|
||||||
|
height: height,
|
||||||
|
),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
class _HideableHeaderSliverDelegate extends SliverPersistentHeaderDelegate {
|
||||||
|
final Widget? leading;
|
||||||
|
final Widget body;
|
||||||
|
final List<Widget>? actions;
|
||||||
|
final double height;
|
||||||
|
|
||||||
|
const _HideableHeaderSliverDelegate({
|
||||||
|
required this.leading,
|
||||||
|
required this.body,
|
||||||
|
required this.actions,
|
||||||
|
required this.height,
|
||||||
|
});
|
||||||
|
|
||||||
|
@override
|
||||||
|
double get maxExtent => height;
|
||||||
|
|
||||||
|
@override
|
||||||
|
double get minExtent => height;
|
||||||
|
|
||||||
|
@override
|
||||||
|
bool shouldRebuild(covariant SliverPersistentHeaderDelegate oldDelegate) => true;
|
||||||
|
|
||||||
|
@override
|
||||||
|
Widget build(BuildContext context, double shrinkOffset, bool overlapsContent) {
|
||||||
|
final content = <Widget>[
|
||||||
|
if (leading != null) leading!,
|
||||||
|
Expanded(child: body),
|
||||||
|
if (actions != null && actions!.isNotEmpty) ButtonBar(children: actions!),
|
||||||
|
];
|
||||||
|
|
||||||
|
final appBarTheme = AppBarTheme.of(context);
|
||||||
|
final colorScheme = Theme.of(context).colorScheme;
|
||||||
|
final onTop = (shrinkOffset == 0);
|
||||||
|
|
||||||
|
return Material(
|
||||||
|
color:
|
||||||
|
onTop ? appBarTheme.backgroundColor ?? colorScheme.surface : colorScheme.surfaceContainer,
|
||||||
|
elevation: onTop ? 0 : appBarTheme.elevation ?? 3,
|
||||||
|
surfaceTintColor: appBarTheme.surfaceTintColor ?? colorScheme.surfaceTint,
|
||||||
|
child: ClipRect(
|
||||||
|
child: SizedBox(
|
||||||
|
height: maxExtent,
|
||||||
|
child: Column(
|
||||||
|
children: [
|
||||||
|
const Spacer(),
|
||||||
|
Row(
|
||||||
|
children: content,
|
||||||
|
),
|
||||||
|
const Spacer(),
|
||||||
|
const Divider(
|
||||||
|
height: 1,
|
||||||
|
thickness: 1,
|
||||||
|
),
|
||||||
|
],
|
||||||
|
),
|
||||||
|
),
|
||||||
|
),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
|
@ -43,9 +43,6 @@ class _MyHomePageState extends State<MyHomePage> {
|
||||||
@override
|
@override
|
||||||
Widget build(BuildContext context) {
|
Widget build(BuildContext context) {
|
||||||
return Scaffold(
|
return Scaffold(
|
||||||
appBar: AppBar(
|
|
||||||
title: const Text("tStor"),
|
|
||||||
),
|
|
||||||
body: <Widget>[
|
body: <Widget>[
|
||||||
const FileViewScreen(),
|
const FileViewScreen(),
|
||||||
const DownloadsScreen(),
|
const DownloadsScreen(),
|
||||||
|
|
|
@ -2,6 +2,7 @@ import 'package:flutter/material.dart';
|
||||||
import 'package:tstor_ui/api/client.dart';
|
import 'package:tstor_ui/api/client.dart';
|
||||||
import 'package:tstor_ui/api/torrent.graphql.dart';
|
import 'package:tstor_ui/api/torrent.graphql.dart';
|
||||||
import 'package:tstor_ui/components/download.dart';
|
import 'package:tstor_ui/components/download.dart';
|
||||||
|
import 'package:tstor_ui/components/sliver_header.dart';
|
||||||
|
|
||||||
class DownloadsScreen extends StatefulWidget {
|
class DownloadsScreen extends StatefulWidget {
|
||||||
const DownloadsScreen({super.key});
|
const DownloadsScreen({super.key});
|
||||||
|
@ -11,45 +12,98 @@ class DownloadsScreen extends StatefulWidget {
|
||||||
}
|
}
|
||||||
|
|
||||||
class _DownloadsScreenState extends State<DownloadsScreen> {
|
class _DownloadsScreenState extends State<DownloadsScreen> {
|
||||||
|
bool filterDownloading = false;
|
||||||
|
|
||||||
@override
|
@override
|
||||||
Widget build(BuildContext context) {
|
Widget build(BuildContext context) {
|
||||||
return FutureBuilder(
|
return FutureBuilder(
|
||||||
future: client.query$ListTorrents(),
|
key: GlobalKey(),
|
||||||
|
future: client.query$ListTorrents(Options$Query$ListTorrents(
|
||||||
|
variables: Variables$Query$ListTorrents(downloading: filterDownloading),
|
||||||
|
)),
|
||||||
builder: (context, snapshot) {
|
builder: (context, snapshot) {
|
||||||
if (!snapshot.hasData || snapshot.data == null) {
|
final torrents = snapshot.data?.parsedData?.torrents;
|
||||||
return const Center(child: CircularProgressIndicator());
|
|
||||||
}
|
|
||||||
|
|
||||||
final torrents = snapshot.data!.parsedData!.torrents;
|
return NestedScrollView(
|
||||||
|
floatHeaderSlivers: true,
|
||||||
return ListView.builder(
|
headerSliverBuilder: (context, innerBoxIsScrolled) => [
|
||||||
itemCount: torrents.length,
|
HideableHeaderSliver(
|
||||||
itemBuilder: (context, index) {
|
height: 80,
|
||||||
final torrent = torrents[index];
|
body: Padding(
|
||||||
return ListTile(
|
padding: const EdgeInsets.all(8.0),
|
||||||
title: Text(torrent.name),
|
child: Wrap(
|
||||||
subtitle: DownloadProgress(
|
spacing: 8,
|
||||||
torrent.bytesCompleted, torrent.bytesCompleted + torrent.bytesMissing),
|
runSpacing: 8,
|
||||||
trailing: Column(
|
children: [
|
||||||
mainAxisSize: MainAxisSize.max,
|
FilterChip(
|
||||||
mainAxisAlignment: MainAxisAlignment.spaceAround,
|
label: const Text("Downloading"),
|
||||||
children: [
|
selected: filterDownloading,
|
||||||
IconButton(
|
onSelected: (value) => setState(() {
|
||||||
onPressed: () => client.mutate$MarkTorrentDownload(
|
filterDownloading = value;
|
||||||
Options$Mutation$MarkTorrentDownload(
|
}),
|
||||||
variables: Variables$Mutation$MarkTorrentDownload(
|
|
||||||
infohash: torrent.infohash,
|
|
||||||
),
|
|
||||||
),
|
|
||||||
),
|
),
|
||||||
icon: const Icon(Icons.download),
|
],
|
||||||
)
|
),
|
||||||
],
|
|
||||||
),
|
),
|
||||||
);
|
actions: [
|
||||||
},
|
IconButton(
|
||||||
|
icon: const Icon(Icons.refresh),
|
||||||
|
onPressed: () => setState(() {}),
|
||||||
|
),
|
||||||
|
],
|
||||||
|
),
|
||||||
|
],
|
||||||
|
body: snapshot.hasData && torrents != null
|
||||||
|
? ListView.builder(
|
||||||
|
itemCount: torrents.length,
|
||||||
|
itemBuilder: (context, index) => TorrentTile(torrent: torrents[index]),
|
||||||
|
)
|
||||||
|
: const Center(child: CircularProgressIndicator()),
|
||||||
);
|
);
|
||||||
},
|
},
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
class TorrentTile extends StatelessWidget {
|
||||||
|
final Query$ListTorrents$torrents torrent;
|
||||||
|
|
||||||
|
const TorrentTile({super.key, required this.torrent});
|
||||||
|
|
||||||
|
@override
|
||||||
|
Widget build(BuildContext context) {
|
||||||
|
return ListTile(
|
||||||
|
title: Text(torrent.name),
|
||||||
|
isThreeLine: true,
|
||||||
|
subtitle: Column(
|
||||||
|
children: [
|
||||||
|
DownloadProgress(
|
||||||
|
torrent.bytesCompleted,
|
||||||
|
torrent.bytesCompleted + torrent.bytesMissing,
|
||||||
|
),
|
||||||
|
Row(
|
||||||
|
children: [
|
||||||
|
Text("Peers: ${torrent.peers.length}"),
|
||||||
|
],
|
||||||
|
),
|
||||||
|
],
|
||||||
|
),
|
||||||
|
trailing: Column(
|
||||||
|
mainAxisSize: MainAxisSize.max,
|
||||||
|
mainAxisAlignment: MainAxisAlignment.spaceAround,
|
||||||
|
children: [
|
||||||
|
IconButton(
|
||||||
|
onPressed: () => client.mutate$MarkTorrentDownload(
|
||||||
|
Options$Mutation$MarkTorrentDownload(
|
||||||
|
variables: Variables$Mutation$MarkTorrentDownload(
|
||||||
|
infohash: torrent.infohash,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
),
|
||||||
|
icon: const Icon(Icons.download),
|
||||||
|
)
|
||||||
|
],
|
||||||
|
),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -5,6 +5,7 @@ import 'package:tstor_ui/api/client.dart';
|
||||||
import 'package:tstor_ui/api/fs_entry.graphql.dart';
|
import 'package:tstor_ui/api/fs_entry.graphql.dart';
|
||||||
import 'package:tstor_ui/api/torrent.graphql.dart';
|
import 'package:tstor_ui/api/torrent.graphql.dart';
|
||||||
import 'package:tstor_ui/components/download.dart';
|
import 'package:tstor_ui/components/download.dart';
|
||||||
|
import 'package:tstor_ui/components/sliver_header.dart';
|
||||||
|
|
||||||
import 'package:tstor_ui/font/t_icons_icons.dart';
|
import 'package:tstor_ui/font/t_icons_icons.dart';
|
||||||
import 'package:path/path.dart' as p;
|
import 'package:path/path.dart' as p;
|
||||||
|
@ -116,7 +117,7 @@ class _FileViewScreenState extends State<FileViewScreen> {
|
||||||
|
|
||||||
return CustomScrollView(
|
return CustomScrollView(
|
||||||
slivers: [
|
slivers: [
|
||||||
EntryInfoSliver(entry: entry),
|
EntryHeaderSliver(entry: entry),
|
||||||
SliverList.builder(
|
SliverList.builder(
|
||||||
itemCount: entries.length,
|
itemCount: entries.length,
|
||||||
itemBuilder: (context, index) {
|
itemBuilder: (context, index) {
|
||||||
|
@ -214,20 +215,18 @@ class DirEntry extends StatelessWidget {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
class EntryInfoSliver extends StatelessWidget {
|
class EntryHeaderSliver extends StatelessWidget {
|
||||||
final Query$ListDir$fsEntry entry;
|
final Query$ListDir$fsEntry entry;
|
||||||
|
|
||||||
const EntryInfoSliver({super.key, required this.entry});
|
const EntryHeaderSliver({super.key, required this.entry});
|
||||||
|
|
||||||
@override
|
@override
|
||||||
Widget build(BuildContext context) {
|
Widget build(BuildContext context) {
|
||||||
switch (entry) {
|
switch (entry) {
|
||||||
case Query$ListDir$fsEntry$$TorrentFS entry:
|
case Query$ListDir$fsEntry$$TorrentFS entry:
|
||||||
final total = entry.torrent.bytesCompleted + entry.torrent.bytesMissing;
|
final total = entry.torrent.bytesCompleted + entry.torrent.bytesMissing;
|
||||||
|
return HideableHeaderSliver(
|
||||||
return EntryInfoHeader(
|
leading: const Icon(TIcons.bittorrent_bttold_logo),
|
||||||
icon: TIcons.bittorrent_bttold_logo,
|
|
||||||
title: Text(entry.torrent.name),
|
|
||||||
body: Column(
|
body: Column(
|
||||||
crossAxisAlignment: CrossAxisAlignment.start,
|
crossAxisAlignment: CrossAxisAlignment.start,
|
||||||
children: [
|
children: [
|
||||||
|
@ -252,101 +251,10 @@ class EntryInfoSliver extends StatelessWidget {
|
||||||
);
|
);
|
||||||
|
|
||||||
default:
|
default:
|
||||||
return EntryInfoHeader(
|
return HideableHeaderSliver(
|
||||||
icon: Icons.folder,
|
leading: const Icon(Icons.folder),
|
||||||
title: Text(entry.name),
|
|
||||||
body: Text(entry.name),
|
body: Text(entry.name),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
class EntryInfoHeader extends StatelessWidget {
|
|
||||||
final IconData icon;
|
|
||||||
final Widget title;
|
|
||||||
final Widget body;
|
|
||||||
final List<Widget>? actions;
|
|
||||||
|
|
||||||
const EntryInfoHeader({
|
|
||||||
super.key,
|
|
||||||
required this.icon,
|
|
||||||
required this.title,
|
|
||||||
required this.body,
|
|
||||||
this.actions,
|
|
||||||
});
|
|
||||||
|
|
||||||
@override
|
|
||||||
Widget build(BuildContext context) {
|
|
||||||
return SliverPersistentHeader(
|
|
||||||
floating: true,
|
|
||||||
pinned: false,
|
|
||||||
delegate: EntryInfoSliverHeaderDelegate(icon: icon, title: title, body: body),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
class EntryInfoSliverHeaderDelegate extends SliverPersistentHeaderDelegate {
|
|
||||||
final IconData icon;
|
|
||||||
final Widget title;
|
|
||||||
final Widget body;
|
|
||||||
final List<Widget>? actions;
|
|
||||||
final double size;
|
|
||||||
|
|
||||||
const EntryInfoSliverHeaderDelegate({
|
|
||||||
required this.icon,
|
|
||||||
required this.title,
|
|
||||||
required this.body,
|
|
||||||
this.actions,
|
|
||||||
this.size = 150,
|
|
||||||
});
|
|
||||||
|
|
||||||
@override
|
|
||||||
double get maxExtent => size;
|
|
||||||
|
|
||||||
@override
|
|
||||||
double get minExtent => size;
|
|
||||||
|
|
||||||
@override
|
|
||||||
bool shouldRebuild(covariant SliverPersistentHeaderDelegate oldDelegate) => true;
|
|
||||||
|
|
||||||
@override
|
|
||||||
Widget build(BuildContext context, double shrinkOffset, bool overlapsContent) {
|
|
||||||
final content = [
|
|
||||||
Icon(icon, size: 50),
|
|
||||||
Expanded(child: body),
|
|
||||||
];
|
|
||||||
|
|
||||||
if (actions != null && actions!.isNotEmpty) {
|
|
||||||
content.add(ButtonBar(children: actions!));
|
|
||||||
}
|
|
||||||
|
|
||||||
final appBarTheme = AppBarTheme.of(context);
|
|
||||||
final colorScheme = Theme.of(context).colorScheme;
|
|
||||||
final onTop = (shrinkOffset == 0);
|
|
||||||
|
|
||||||
return Material(
|
|
||||||
color:
|
|
||||||
onTop ? appBarTheme.backgroundColor ?? colorScheme.surface : colorScheme.surfaceContainer,
|
|
||||||
elevation: onTop ? 0 : appBarTheme.elevation ?? 3,
|
|
||||||
surfaceTintColor: appBarTheme.surfaceTintColor ?? colorScheme.surfaceTint,
|
|
||||||
child: ClipRect(
|
|
||||||
child: SizedBox(
|
|
||||||
height: maxExtent,
|
|
||||||
child: Column(
|
|
||||||
children: [
|
|
||||||
const Spacer(),
|
|
||||||
Row(
|
|
||||||
children: content,
|
|
||||||
),
|
|
||||||
const Spacer(),
|
|
||||||
const Divider(
|
|
||||||
height: 1,
|
|
||||||
thickness: 1,
|
|
||||||
),
|
|
||||||
],
|
|
||||||
),
|
|
||||||
),
|
|
||||||
),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
Loading…
Reference in a new issue