Compare commits

..

2 commits

Author SHA1 Message Date
ae4501ae21 seek, load only with priority, qbittorrent
All checks were successful
docker / build-docker (push) Successful in 4m25s
2024-08-23 01:16:16 +03:00
e517332a65 torrent fix 2024-08-15 11:23:44 +03:00
42 changed files with 3568 additions and 1444 deletions

View file

@ -11,7 +11,10 @@ COPY ./src ./src
COPY ./cmd ./cmd
ARG TARGETOS TARGETARCH
RUN --mount=type=cache,mode=0777,target=/go/pkg/mod CGO_ENABLED=0 GOOS=$TARGETOS GOARCH=$TARGETARCH go build -tags timetzdata -o /tstor ./cmd/tstor/main.go
RUN --mount=type=cache,mode=0777,target=/go/pkg/mod \
CGO_ENABLED=0 GOOS=$TARGETOS GOARCH=$TARGETARCH \
go build -tags timetzdata \
-o /tstor ./cmd/tstor/main.go
FROM scratch

View file

@ -2,6 +2,7 @@ package main
import (
"context"
"errors"
"fmt"
"log/slog"
@ -75,23 +76,22 @@ func run(configPath string) error {
if err != nil {
return err
}
defer client.Shutdown(ctx)
log := rlog.Component("run")
// TODO make optional
err = syscall.Setpriority(syscall.PRIO_PGRP, 0, 19)
if err != nil {
log.Error(ctx, "set priority failed", rlog.Error(err))
}
// err = syscall.Setpriority(syscall.PRIO_PGRP, 0, 19)
// if err != nil {
// log.Error(ctx, "set priority failed", rlog.Error(err))
// }
if err := os.MkdirAll(conf.SourceDir, 0744); err != nil {
return fmt.Errorf("error creating data folder: %w", err)
}
sourceFs := osfs.New(conf.SourceDir, osfs.WithBoundOS())
tsrv, err := torrent.NewService(sourceFs, conf.TorrentClient)
tsrv, err := torrent.NewDaemon(sourceFs, conf.TorrentClient)
if err != nil {
return fmt.Errorf("error creating service: %w", err)
}
@ -187,7 +187,7 @@ func run(configPath string) error {
go func() {
logFilename := filepath.Join(conf.Log.Path, "logs")
err := delivery.New(nil, tsrv, sfs, logFilename, conf)
err := delivery.Run(tsrv, sfs, logFilename, conf)
if err != nil {
log.Error(ctx, "error initializing HTTP server", rlog.Error(err))
}
@ -197,5 +197,7 @@ func run(configPath string) error {
signal.Notify(sigChan, os.Interrupt, syscall.SIGINT, syscall.SIGTERM)
<-sigChan
return tsrv.Close(ctx)
return errors.Join(
tsrv.Close(ctx),
)
}

63
go.mod
View file

@ -2,27 +2,27 @@ module git.kmsign.ru/royalcat/tstor
go 1.22.3
replace github.com/bytedance/sonic v1.11.9 => github.com/bytedance/sonic v1.12.1
require (
github.com/99designs/gqlgen v0.17.49
github.com/agoda-com/opentelemetry-go/otelslog v0.1.1
github.com/agoda-com/opentelemetry-logs-go v0.5.0
github.com/anacrolix/dht/v2 v2.21.1
github.com/anacrolix/log v0.15.2
github.com/anacrolix/missinggo/v2 v2.7.3
github.com/anacrolix/torrent v1.56.1
github.com/anacrolix/log v0.15.3-0.20240627045001-cd912c641d83
github.com/anacrolix/torrent v1.56.2-0.20240813010934-f4711825e84e
github.com/billziss-gh/cgofuse v1.5.0
github.com/bodgit/sevenzip v1.5.1
github.com/cyphar/filepath-securejoin v0.2.5
github.com/dgraph-io/badger/v4 v4.2.0
github.com/dustin/go-humanize v1.0.1
github.com/gin-gonic/gin v1.9.1
github.com/go-git/go-billy/v5 v5.5.0
github.com/gofrs/uuid/v5 v5.1.0
github.com/google/go-github/v63 v63.0.0
github.com/google/uuid v1.6.0
github.com/grafana/otel-profiling-go v0.5.1
github.com/grafana/pyroscope-go v1.1.1
github.com/grafana/pyroscope-go v1.1.2
github.com/hashicorp/golang-lru/v2 v2.0.7
github.com/iceber/iouring-go v0.0.0-20230403020409-002cfd2e2a90
github.com/knadh/koanf/parsers/yaml v0.1.0
github.com/knadh/koanf/providers/env v0.1.0
github.com/knadh/koanf/providers/file v0.1.0
@ -32,6 +32,7 @@ require (
github.com/labstack/echo/v4 v4.12.0
github.com/mattetti/filebuffer v1.0.1
github.com/nwaples/rardecode/v2 v2.0.0-beta.2
github.com/prometheus/client_golang v1.19.1
github.com/rasky/go-xdr v0.0.0-20170124162913-1a41d1a06c93
github.com/ravilushqa/otelgqlgen v0.15.0
github.com/royalcat/ctxio v0.0.0-20240602060200-590d464c39be
@ -46,17 +47,19 @@ require (
github.com/vektah/gqlparser/v2 v2.5.16
github.com/willscott/go-nfs-client v0.0.0-20240104095149-b44639837b00
github.com/willscott/memphis v0.0.0-20210922141505-529d4987ab7e
go.opentelemetry.io/otel v1.27.0
go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetrichttp v1.27.0
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.26.0
go.opentelemetry.io/otel/metric v1.27.0
go.opentelemetry.io/otel/sdk v1.27.0
go.opentelemetry.io/otel/sdk/metric v1.27.0
go.opentelemetry.io/otel/trace v1.27.0
github.com/xuthus5/qbittorrent-client-go v0.0.0-20240710140754-51c95559ea0a
go.opentelemetry.io/otel v1.28.0
go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetrichttp v1.28.0
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.28.0
go.opentelemetry.io/otel/exporters/prometheus v0.50.0
go.opentelemetry.io/otel/metric v1.28.0
go.opentelemetry.io/otel/sdk v1.28.0
go.opentelemetry.io/otel/sdk/metric v1.28.0
go.opentelemetry.io/otel/trace v1.28.0
golang.org/x/exp v0.0.0-20240506185415-9bf2ced13842
golang.org/x/net v0.26.0
golang.org/x/sync v0.7.0
golang.org/x/sys v0.21.0
golang.org/x/sys v0.22.0
)
require (
@ -64,12 +67,13 @@ require (
github.com/agnivade/levenshtein v1.1.1 // indirect
github.com/ajwerner/btree v0.0.0-20211221152037-f427b3e689c0 // indirect
github.com/alecthomas/atomic v0.1.0-alpha2 // indirect
github.com/anacrolix/chansync v0.4.0 // indirect
github.com/anacrolix/chansync v0.4.1-0.20240627045151-1aa1ac392fe8 // indirect
github.com/anacrolix/envpprof v1.3.0 // indirect
github.com/anacrolix/generics v0.0.2-0.20240227122613-f95486179cab // indirect
github.com/anacrolix/go-libutp v1.3.1 // indirect
github.com/anacrolix/missinggo v1.3.0 // indirect
github.com/anacrolix/missinggo/perf v1.0.0 // indirect
github.com/anacrolix/missinggo/v2 v2.7.3 // indirect
github.com/anacrolix/mmsg v1.0.0 // indirect
github.com/anacrolix/multiless v0.3.1-0.20230203023154-f3d27407d8f1 // indirect
github.com/anacrolix/stm v0.5.0 // indirect
@ -79,18 +83,20 @@ require (
github.com/andybalholm/brotli v1.1.0 // indirect
github.com/bahlo/generic-list-go v0.2.0 // indirect
github.com/benbjohnson/immutable v0.4.3 // indirect
github.com/beorn7/perks v1.0.1 // indirect
github.com/bits-and-blooms/bitset v1.13.0 // indirect
github.com/bodgit/plumbing v1.3.0 // indirect
github.com/bodgit/windows v1.0.1 // indirect
github.com/bradfitz/iter v0.0.0-20191230175014-e8f45d346db8 // indirect
github.com/bytedance/sonic v1.11.6 // indirect
github.com/bytedance/sonic/loader v0.1.1 // indirect
github.com/bytedance/sonic v1.11.9 // indirect
github.com/bytedance/sonic/loader v0.2.0 // indirect
github.com/cenkalti/backoff/v4 v4.3.0 // indirect
github.com/cespare/xxhash v1.1.0 // indirect
github.com/cespare/xxhash/v2 v2.3.0 // indirect
github.com/cloudwego/base64x v0.1.4 // indirect
github.com/cloudwego/iasm v0.2.0 // indirect
github.com/cpuguy83/go-md2man/v2 v2.0.4 // indirect
github.com/cyphar/filepath-securejoin v0.2.5 // indirect
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect
github.com/dgraph-io/ristretto v0.1.1 // indirect
github.com/edsrzf/mmap-go v1.1.0 // indirect
@ -100,7 +106,7 @@ require (
github.com/gin-contrib/sse v0.1.0 // indirect
github.com/go-llsqlite/adapter v0.1.0 // indirect
github.com/go-llsqlite/crawshaw v0.5.2-0.20240425034140-f30eb7704568 // indirect
github.com/go-logr/logr v1.4.1 // indirect
github.com/go-logr/logr v1.4.2 // indirect
github.com/go-logr/stdr v1.2.2 // indirect
github.com/go-playground/locales v0.14.1 // indirect
github.com/go-playground/universal-translator v0.18.1 // indirect
@ -115,15 +121,17 @@ require (
github.com/golang/snappy v0.0.4 // indirect
github.com/google/btree v1.1.2 // indirect
github.com/google/flatbuffers v24.3.25+incompatible // indirect
github.com/google/go-querystring v1.1.0 // indirect
github.com/gorilla/schema v1.4.1 // indirect
github.com/gorilla/websocket v1.5.1 // indirect
github.com/grafana/pyroscope-go/godeltaprof v0.1.7 // indirect
github.com/grafana/pyroscope-go/godeltaprof v0.1.8 // indirect
github.com/grpc-ecosystem/grpc-gateway/v2 v2.20.0 // indirect
github.com/hashicorp/errwrap v1.1.0 // indirect
github.com/hashicorp/go-multierror v1.1.1 // indirect
github.com/huandu/xstrings v1.4.0 // indirect
github.com/json-iterator/go v1.1.12 // indirect
github.com/klauspost/compress v1.17.8 // indirect
github.com/klauspost/cpuid/v2 v2.2.7 // indirect
github.com/klauspost/cpuid/v2 v2.2.8 // indirect
github.com/knadh/koanf/maps v0.1.1 // indirect
github.com/labstack/gommon v0.4.2 // indirect
github.com/leodido/go-urn v1.4.0 // indirect
@ -139,6 +147,7 @@ require (
github.com/mschoch/smat v0.2.0 // indirect
github.com/multiformats/go-multihash v0.2.3 // indirect
github.com/multiformats/go-varint v0.0.6 // indirect
github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 // indirect
github.com/ncruces/go-strftime v0.1.9 // indirect
github.com/pelletier/go-toml/v2 v2.2.2 // indirect
github.com/pierrec/lz4/v4 v4.1.21 // indirect
@ -163,10 +172,12 @@ require (
github.com/polydawn/go-timeless-api v0.0.0-20220821201550-b93919e12c56 // indirect
github.com/polydawn/refmt v0.89.0 // indirect
github.com/polydawn/rio v0.0.0-20220823181337-7c31ad9831a4 // indirect
github.com/prometheus/client_model v0.6.1 // indirect
github.com/prometheus/common v0.55.0 // indirect
github.com/prometheus/procfs v0.15.1 // indirect
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec // indirect
github.com/rs/dnscache v0.0.0-20230804202142-fc85eb664529 // indirect
github.com/russross/blackfriday/v2 v2.1.0 // indirect
github.com/ryszard/goskiplist v0.0.0-20150312221310-2dfbae5fcf46 // indirect
github.com/samber/lo v1.39.0 // indirect
github.com/sosodev/duration v1.3.1 // indirect
github.com/spaolacci/murmur3 v1.1.0 // indirect
@ -181,8 +192,8 @@ require (
go.etcd.io/bbolt v1.3.9 // indirect
go.opencensus.io v0.24.0 // indirect
go.opentelemetry.io/contrib v1.26.0 // indirect
go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.26.0 // indirect
go.opentelemetry.io/proto/otlp v1.2.0 // indirect
go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.28.0 // indirect
go.opentelemetry.io/proto/otlp v1.3.1 // indirect
go4.org v0.0.0-20230225012048-214862532bf5 // indirect
golang.org/x/arch v0.8.0 // indirect
golang.org/x/crypto v0.24.0 // indirect
@ -190,10 +201,10 @@ require (
golang.org/x/text v0.16.0 // indirect
golang.org/x/time v0.5.0 // indirect
golang.org/x/tools v0.22.0 // indirect
google.golang.org/genproto/googleapis/api v0.0.0-20240520151616-dc85e6b867a5 // indirect
google.golang.org/genproto/googleapis/rpc v0.0.0-20240515191416-fc5f0ca64291 // indirect
google.golang.org/genproto/googleapis/api v0.0.0-20240701130421-f6361c86f094 // indirect
google.golang.org/genproto/googleapis/rpc v0.0.0-20240701130421-f6361c86f094 // indirect
google.golang.org/grpc v1.64.0 // indirect
google.golang.org/protobuf v1.34.1 // indirect
google.golang.org/protobuf v1.34.2 // indirect
gopkg.in/yaml.v3 v3.0.1 // indirect
lukechampine.com/blake3 v1.1.6 // indirect
modernc.org/libc v1.50.5 // indirect

115
go.sum
View file

@ -50,8 +50,8 @@ github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuy
github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0=
github.com/alecthomas/units v0.0.0-20190717042225-c3de453c63f4/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0=
github.com/anacrolix/chansync v0.4.0 h1:Md0HM7zYCAO4KwNwgcIRgxNsMxiRuk7D1Ha0Uo+2y60=
github.com/anacrolix/chansync v0.4.0/go.mod h1:DZsatdsdXxD0WiwcGl0nJVwyjCKMDv+knl1q2iBjA2k=
github.com/anacrolix/chansync v0.4.1-0.20240627045151-1aa1ac392fe8 h1:eyb0bBaQKMOh5Se/Qg54shijc8K4zpQiOjEhKFADkQM=
github.com/anacrolix/chansync v0.4.1-0.20240627045151-1aa1ac392fe8/go.mod h1:DZsatdsdXxD0WiwcGl0nJVwyjCKMDv+knl1q2iBjA2k=
github.com/anacrolix/dht/v2 v2.21.1 h1:s1rKkfLLcmBHKv4v/mtMkIeHIEptzEFiB6xVu54+5/o=
github.com/anacrolix/dht/v2 v2.21.1/go.mod h1:SDGC+sEs1pnO2sJGYuhvIis7T8749dDHNfcjtdH4e3g=
github.com/anacrolix/envpprof v0.0.0-20180404065416-323002cec2fa/go.mod h1:KgHhUaQMc8cC0+cEflSgCFNFbKwi5h54gqtVn8yhP7c=
@ -68,8 +68,8 @@ github.com/anacrolix/log v0.3.0/go.mod h1:lWvLTqzAnCWPJA08T2HCstZi0L1y2Wyvm3FJgw
github.com/anacrolix/log v0.6.0/go.mod h1:lWvLTqzAnCWPJA08T2HCstZi0L1y2Wyvm3FJgwU9jwU=
github.com/anacrolix/log v0.13.1/go.mod h1:D4+CvN8SnruK6zIFS/xPoRJmtvtnxs+CSfDQ+BFxZ68=
github.com/anacrolix/log v0.14.2/go.mod h1:1OmJESOtxQGNMlUO5rcv96Vpp9mfMqXXbe2RdinFLdY=
github.com/anacrolix/log v0.15.2 h1:LTSf5Wm6Q4GNWPFMBP7NPYV6UBVZzZLKckL+/Lj72Oo=
github.com/anacrolix/log v0.15.2/go.mod h1:m0poRtlr41mriZlXBQ9SOVZ8yZBkLjOkDhd5Li5pITA=
github.com/anacrolix/log v0.15.3-0.20240627045001-cd912c641d83 h1:9o/yVzzLzYaBDFx8B27yhkvBLhNnRAuSTK7Y+yZKVtU=
github.com/anacrolix/log v0.15.3-0.20240627045001-cd912c641d83/go.mod h1:xvHjsYWWP7yO8PZwtuIp/k0DBlu07pSJqH4SEC78Vwc=
github.com/anacrolix/lsan v0.0.0-20211126052245-807000409a62 h1:P04VG6Td13FHMgS5ZBcJX23NPC/fiC4cp9bXwYujdYM=
github.com/anacrolix/lsan v0.0.0-20211126052245-807000409a62/go.mod h1:66cFKPCO7Sl4vbFnAaSq7e4OXtdMhRSBagJGWgmpJbM=
github.com/anacrolix/missinggo v0.0.0-20180725070939-60ef2fbf63df/go.mod h1:kwGiTUTZ0+p4vAz3VbAI5a30t2YbvemcmspjKwrAz5s=
@ -99,8 +99,8 @@ github.com/anacrolix/sync v0.5.1/go.mod h1:BbecHL6jDSExojhNtgTFSBcdGerzNc64tz3DC
github.com/anacrolix/tagflag v0.0.0-20180109131632-2146c8d41bf0/go.mod h1:1m2U/K6ZT+JZG0+bdMK6qauP49QT4wE5pmhJXOKKCHw=
github.com/anacrolix/tagflag v1.0.0/go.mod h1:1m2U/K6ZT+JZG0+bdMK6qauP49QT4wE5pmhJXOKKCHw=
github.com/anacrolix/tagflag v1.1.0/go.mod h1:Scxs9CV10NQatSmbyjqmqmeQNwGzlNe0CMUMIxqHIG8=
github.com/anacrolix/torrent v1.56.1 h1:QeJMOP0NuhpQ5dATsOqEL0vUO85aPMNMGP2FACNt0Eg=
github.com/anacrolix/torrent v1.56.1/go.mod h1:5DMHbeIM1TuC5wTQ99XieKKLiYZYz6iB2lyZpKZEr6w=
github.com/anacrolix/torrent v1.56.2-0.20240813010934-f4711825e84e h1:gfu86Ozd6rvq4mwSgy1s6SRlS8UeeCORKoqnXvlXtY0=
github.com/anacrolix/torrent v1.56.2-0.20240813010934-f4711825e84e/go.mod h1:m6Jl1mdUG3wcapLuvn8ZwENi49DUCmiacom6plQ5rcI=
github.com/anacrolix/upnp v0.1.4 h1:+2t2KA6QOhm/49zeNyeVwDu1ZYS9dB9wfxyVvh/wk7U=
github.com/anacrolix/upnp v0.1.4/go.mod h1:Qyhbqo69gwNWvEk1xNTXsS5j7hMHef9hdr984+9fIic=
github.com/anacrolix/utp v0.2.0 h1:65Cdmr6q9WSw2KsM+rtJFu7rqDzLl2bdysf4KlNPcFI=
@ -119,6 +119,7 @@ github.com/benbjohnson/immutable v0.4.3 h1:GYHcksoJ9K6HyAUpGxwZURrbTkXA0Dh4otXGq
github.com/benbjohnson/immutable v0.4.3/go.mod h1:qJIKKSmdqz1tVzNtst1DZzvaqOU1onk1rc03IeM3Owk=
github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q=
github.com/beorn7/perks v1.0.0/go.mod h1:KWe93zE9D1o94FZ5RNwFwVgaQK1VOXiVxmqh+CedLV8=
github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM=
github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw=
github.com/billziss-gh/cgofuse v1.5.0 h1:kH516I/s+Ab4diL/Y/ayFeUjjA8ey+JK12xDfBf4HEs=
github.com/billziss-gh/cgofuse v1.5.0/go.mod h1:LJjoaUojlVjgo5GQoEJTcJNqZJeRU0nCR84CyxKt2YM=
@ -135,10 +136,11 @@ github.com/bradfitz/iter v0.0.0-20140124041915-454541ec3da2/go.mod h1:PyRFw1Lt2w
github.com/bradfitz/iter v0.0.0-20190303215204-33e6a9893b0c/go.mod h1:PyRFw1Lt2wKX4ZVSQ2mk+PeDa1rxyObEDlApuIsUKuo=
github.com/bradfitz/iter v0.0.0-20191230175014-e8f45d346db8 h1:GKTyiRCL6zVf5wWaqKnf+7Qs6GbEPfd4iMOitWzXJx8=
github.com/bradfitz/iter v0.0.0-20191230175014-e8f45d346db8/go.mod h1:spo1JLcs67NmW1aVLEgtA8Yy1elc+X8y5SRW1sFW4Og=
github.com/bytedance/sonic v1.11.6 h1:oUp34TzMlL+OY1OUWxHqsdkgC/Zfc85zGqw9siXjrc0=
github.com/bytedance/sonic v1.11.6/go.mod h1:LysEHSvpvDySVdC2f87zGWf6CIKJcAvqab1ZaiQtds4=
github.com/bytedance/sonic/loader v0.1.1 h1:c+e5Pt1k/cy5wMveRDyk2X4B9hF4g7an8N3zCYjJFNM=
github.com/bytedance/sonic v1.12.1 h1:jWl5Qz1fy7X1ioY74WqO0KjAMtAGQs4sYnjiEBiyX24=
github.com/bytedance/sonic v1.12.1/go.mod h1:B8Gt/XvtZ3Fqj+iSKMypzymZxw/FVwgIGKzMzT9r/rk=
github.com/bytedance/sonic/loader v0.1.1/go.mod h1:ncP89zfokxS5LZrJxl5z0UJcsk4M4yY2JpfqGeCtNLU=
github.com/bytedance/sonic/loader v0.2.0 h1:zNprn+lsIP06C/IqCHs3gPQIvnvpKbbxyXQP1iU4kWM=
github.com/bytedance/sonic/loader v0.2.0/go.mod h1:ncP89zfokxS5LZrJxl5z0UJcsk4M4yY2JpfqGeCtNLU=
github.com/cenkalti/backoff/v4 v4.3.0 h1:MyRJ/UdXutAwSAT+s3wNd7MfTIcy71VQueUuFK343L8=
github.com/cenkalti/backoff/v4 v4.3.0/go.mod h1:Y3VNntkOUPxTVeUxJ/G5vcM//AlwfmyYozVcomhLiZE=
github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU=
@ -225,8 +227,8 @@ github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9
github.com/go-logfmt/logfmt v0.4.0/go.mod h1:3RMwSq7FuexP4Kalkev3ejPJsZTpXXBr9+V4qmtdjCk=
github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A=
github.com/go-logr/logr v1.3.0/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY=
github.com/go-logr/logr v1.4.1 h1:pKouT5E8xu9zeFC39JXRDukb6JFQPXM5p5I91188VAQ=
github.com/go-logr/logr v1.4.1/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY=
github.com/go-logr/logr v1.4.2 h1:6pFjapn8bFcIbiKo3XT4j/BhANplGihG6tvd+8rYgrY=
github.com/go-logr/logr v1.4.2/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY=
github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag=
github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE=
github.com/go-playground/assert/v2 v2.2.0 h1:JvknZsQTYeFEAhQwI4qEt9cyV5ONwRHC+lYKSsYSR8s=
@ -293,10 +295,15 @@ github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMyw
github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-cmp v0.5.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-cmp v0.5.3/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI=
github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
github.com/google/go-github/v63 v63.0.0 h1:13xwK/wk9alSokujB9lJkuzdmQuVn2QCPeck76wR3nE=
github.com/google/go-github/v63 v63.0.0/go.mod h1:IqbcrgUmIcEaioWrGYei/09o+ge5vhffGOcxrO0AfmA=
github.com/google/go-querystring v1.1.0 h1:AnCroh3fv4ZBgVIf1Iwtovgjaw/GiKJo8M8yD/fhyJ8=
github.com/google/go-querystring v1.1.0/go.mod h1:Kcdr2DB4koayq7X8pmAG4sNG59So17icRSOU623lUBU=
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs=
github.com/google/pprof v0.0.0-20181206194817-3ea8567a2e57/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc=
@ -316,14 +323,16 @@ github.com/gopherjs/gopherjs v0.0.0-20190910122728-9d188e94fb99 h1:twflg0XRTjwKp
github.com/gopherjs/gopherjs v0.0.0-20190910122728-9d188e94fb99/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY=
github.com/gorilla/context v1.1.1/go.mod h1:kBGZzfjB9CEq2AlWe17Uuf7NDRt0dE0s8S51q0aT7Yg=
github.com/gorilla/mux v1.6.2/go.mod h1:1lud6UwP+6orDFRuTfBEV8e9/aOM/c4fVVCaMa2zaAs=
github.com/gorilla/schema v1.4.1 h1:jUg5hUjCSDZpNGLuXQOgIWGdlgrIdYvgQ0wZtdK1M3E=
github.com/gorilla/schema v1.4.1/go.mod h1:Dg5SSm5PV60mhF2NFaTV1xuYYj8tV8NOPRo4FggUMnM=
github.com/gorilla/websocket v1.5.1 h1:gmztn0JnHVt9JZquRuzLw3g4wouNVzKL15iLr/zn/QY=
github.com/gorilla/websocket v1.5.1/go.mod h1:x3kM2JMyaluk02fnUJpQuwD2dCS5NDG2ZHL0uE0tcaY=
github.com/grafana/otel-profiling-go v0.5.1 h1:stVPKAFZSa7eGiqbYuG25VcqYksR6iWvF3YH66t4qL8=
github.com/grafana/otel-profiling-go v0.5.1/go.mod h1:ftN/t5A/4gQI19/8MoWurBEtC6gFw8Dns1sJZ9W4Tls=
github.com/grafana/pyroscope-go v1.1.1 h1:PQoUU9oWtO3ve/fgIiklYuGilvsm8qaGhlY4Vw6MAcQ=
github.com/grafana/pyroscope-go v1.1.1/go.mod h1:Mw26jU7jsL/KStNSGGuuVYdUq7Qghem5P8aXYXSXG88=
github.com/grafana/pyroscope-go/godeltaprof v0.1.7 h1:C11j63y7gymiW8VugJ9ZW0pWfxTZugdSJyC48olk5KY=
github.com/grafana/pyroscope-go/godeltaprof v0.1.7/go.mod h1:Tk376Nbldo4Cha9RgiU7ik8WKFkNpfds98aUzS8omLE=
github.com/grafana/pyroscope-go v1.1.2 h1:7vCfdORYQMCxIzI3NlYAs3FcBP760+gWuYWOyiVyYx8=
github.com/grafana/pyroscope-go v1.1.2/go.mod h1:HSSmHo2KRn6FasBA4vK7BMiQqyQq8KSuBKvrhkXxYPU=
github.com/grafana/pyroscope-go/godeltaprof v0.1.8 h1:iwOtYXeeVSAeYefJNaxDytgjKtUuKQbJqgAIjlnicKg=
github.com/grafana/pyroscope-go/godeltaprof v0.1.8/go.mod h1:2+l7K7twW49Ct4wFluZD3tZ6e0SjanjcUUBPVD/UuGU=
github.com/grpc-ecosystem/grpc-gateway/v2 v2.20.0 h1:bkypFPDjIYGfCYD5mRBvpqxfYX1YCS1PXdKYWi8FsN0=
github.com/grpc-ecosystem/grpc-gateway/v2 v2.20.0/go.mod h1:P+Lt/0by1T8bfcF3z737NnSbmxQAppXMRziHUxPOC8k=
github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4=
@ -345,8 +354,6 @@ github.com/huandu/xstrings v1.3.1/go.mod h1:y5/lhBue+AyNmUVz9RLU9xbLR0o4KIIExikq
github.com/huandu/xstrings v1.4.0 h1:D17IlohoQq4UcpqD7fDk80P7l+lwAmlFaBHgOipl2FU=
github.com/huandu/xstrings v1.4.0/go.mod h1:y5/lhBue+AyNmUVz9RLU9xbLR0o4KIIExikq4ovT0aE=
github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc=
github.com/iceber/iouring-go v0.0.0-20230403020409-002cfd2e2a90 h1:xrtfZokN++5kencK33hn2Kx3Uj8tGnjMEhdt6FMvHD0=
github.com/iceber/iouring-go v0.0.0-20230403020409-002cfd2e2a90/go.mod h1:LEzdaZarZ5aqROlLIwJ4P7h3+4o71008fSy6wpaEB+s=
github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU=
github.com/json-iterator/go v1.1.9/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=
github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM=
@ -359,13 +366,12 @@ github.com/jtolds/gls v4.20.0+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfV
github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w=
github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8=
github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck=
github.com/klauspost/compress v1.17.3/go.mod h1:/dCuZOvVtNoHsyb+cuJD3itjs3NbnF6KH9zAO4BDxPM=
github.com/klauspost/compress v1.17.8 h1:YcnTYrq7MikUT7k0Yb5eceMmALQPYBW/Xltxn0NAMnU=
github.com/klauspost/compress v1.17.8/go.mod h1:Di0epgTjJY877eYKx5yC51cX2A2Vl2ibi7bDH9ttBbw=
github.com/klauspost/cpuid/v2 v2.0.4/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg=
github.com/klauspost/cpuid/v2 v2.0.9/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg=
github.com/klauspost/cpuid/v2 v2.2.7 h1:ZWSB3igEs+d0qvnxR/ZBzXVmxkgt8DdzP6m9pfuVLDM=
github.com/klauspost/cpuid/v2 v2.2.7/go.mod h1:Lcz8mBdAVJIBVzewtcLocK12l3Y+JytZYpaMropDUws=
github.com/klauspost/cpuid/v2 v2.2.8 h1:+StwCXwm9PdpiEkPyzBXIy+M9KUb4ODm0Zarf1kS5BM=
github.com/klauspost/cpuid/v2 v2.2.8/go.mod h1:Lcz8mBdAVJIBVzewtcLocK12l3Y+JytZYpaMropDUws=
github.com/knadh/koanf/maps v0.1.1 h1:G5TjmUh2D7G2YWf5SQQqSiHRJEjaicvU0KpypqB3NIs=
github.com/knadh/koanf/maps v0.1.1/go.mod h1:npD/QZY3V6ghQDdcQzl1W4ICNVTkohC8E73eI2xW4yI=
github.com/knadh/koanf/parsers/yaml v0.1.0 h1:ZZ8/iGfRLvKSaMEECEBPM1HQslrZADk8fP1XFUxVI5w=
@ -431,6 +437,8 @@ github.com/multiformats/go-multihash v0.2.3 h1:7Lyc8XfX/IY2jWb/gI7JP+o7JEq9hOa7B
github.com/multiformats/go-multihash v0.2.3/go.mod h1:dXgKXCXjBzdscBLk9JkjINiEsCKRVch90MdaGiKsvSM=
github.com/multiformats/go-varint v0.0.6 h1:gk85QWKxh3TazbLxED/NlDVv8+q+ReFJk7Y2W/KhfNY=
github.com/multiformats/go-varint v0.0.6/go.mod h1:3Ls8CIEsrijN6+B7PbrXRPxHRPuXSrVKRY101jdMZYE=
github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 h1:C3w9PqII01/Oq1c1nUAm88MOHcQC9l5mIlSMApZMrHA=
github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822/go.mod h1:+n7T8mK8HuQTcFwEeznm/DIxMOiR9yIdICNftLE1DvQ=
github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U=
github.com/ncruces/go-strftime v0.1.9 h1:bY0MQC28UADQmHmaF5dgpLmImcShSi2kHU9XLdhx/f4=
github.com/ncruces/go-strftime v0.1.9/go.mod h1:Fwc5htZGVVkseilnfgOVb9mKy6w1naJmn9CehxcKcls=
@ -515,19 +523,27 @@ github.com/prometheus/client_golang v0.9.1/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXP
github.com/prometheus/client_golang v0.9.3-0.20190127221311-3c4408c8b829/go.mod h1:p2iRAGwDERtqlqzRXnrOVns+ignqQo//hLXqYxZYVNs=
github.com/prometheus/client_golang v1.0.0/go.mod h1:db9x61etRT2tGnBNRi70OPL5FsnadC4Ky3P0J6CfImo=
github.com/prometheus/client_golang v1.5.1/go.mod h1:e9GMxYsXl05ICDXkRhurwBS4Q3OK1iX/F2sw+iXX5zU=
github.com/prometheus/client_golang v1.19.1 h1:wZWJDwK+NameRJuPGDhlnFgx8e8HN3XHQeLaYJFJBOE=
github.com/prometheus/client_golang v1.19.1/go.mod h1:mP78NwGzrVks5S2H6ab8+ZZGJLZUq1hoULYBAYBw1Ho=
github.com/prometheus/client_model v0.0.0-20180712105110-5c3871d89910/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo=
github.com/prometheus/client_model v0.0.0-20190115171406-56726106282f/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo=
github.com/prometheus/client_model v0.0.0-20190129233127-fd36f4220a90/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=
github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=
github.com/prometheus/client_model v0.2.0/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=
github.com/prometheus/client_model v0.6.1 h1:ZKSh/rekM+n3CeS952MLRAdFwIKqeY8b62p8ais2e9E=
github.com/prometheus/client_model v0.6.1/go.mod h1:OrxVMOVHjw3lKMa8+x6HeMGkHMQyHDk9E3jmP2AmGiY=
github.com/prometheus/common v0.2.0/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4=
github.com/prometheus/common v0.4.1/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4=
github.com/prometheus/common v0.9.1/go.mod h1:yhUN8i9wzaXS3w1O07YhxHEBxD+W35wd8bs7vj7HSQ4=
github.com/prometheus/common v0.55.0 h1:KEi6DK7lXW/m7Ig5i47x0vRzuBsHuvJdi5ee6Y3G1dc=
github.com/prometheus/common v0.55.0/go.mod h1:2SECS4xJG1kd8XF9IcM1gMX6510RAEL65zxzNImwdc8=
github.com/prometheus/procfs v0.0.0-20181005140218-185b4288413d/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk=
github.com/prometheus/procfs v0.0.0-20190117184657-bf6a532e95b1/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk=
github.com/prometheus/procfs v0.0.2/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA=
github.com/prometheus/procfs v0.0.8/go.mod h1:7Qr8sr6344vo1JqZ6HhLceV9o3AJ1Ff+GxbHq6oeK9A=
github.com/prometheus/procfs v0.0.11/go.mod h1:lV6e/gmhEcM9IjHGsFOCxxuZ+z1YqCvr4OA4YeYWdaU=
github.com/prometheus/procfs v0.15.1 h1:YagwOFzUgYfKKHX6Dr+sHT7km/hxC76UB0learggepc=
github.com/prometheus/procfs v0.15.1/go.mod h1:fB45yRUv8NstnjriLhBQLuOUt+WW4BsoGhij/e3PBqk=
github.com/rasky/go-xdr v0.0.0-20170124162913-1a41d1a06c93 h1:UVArwN/wkKjMVhh2EQGC0tEc1+FqiLlvYXY5mQ2f8Wg=
github.com/rasky/go-xdr v0.0.0-20170124162913-1a41d1a06c93/go.mod h1:Nfe4efndBz4TibWycNE+lqyJZiMX4ycx+QKV8Ta0f/o=
github.com/ravilushqa/otelgqlgen v0.15.0 h1:U85nrlweMXTGaMChUViYM39/MXBZVeVVlpuHq+6eECQ=
@ -637,6 +653,8 @@ github.com/willscott/memphis v0.0.0-20210922141505-529d4987ab7e h1:1eHCP4w7tMmpf
github.com/willscott/memphis v0.0.0-20210922141505-529d4987ab7e/go.mod h1:59vHBW4EpjiL5oiqgCrBp1Tc9JXRzKCNMEOaGmNfSHo=
github.com/xrash/smetrics v0.0.0-20240312152122-5f08fbb34913 h1:+qGGcbkzsfDQNPPe9UDgpxAWQrhbbBXOYJFQDq/dtJw=
github.com/xrash/smetrics v0.0.0-20240312152122-5f08fbb34913/go.mod h1:4aEEwZQutDLsQv2Deui4iYQ6DWTxR14g6m8Wv88+Xqk=
github.com/xuthus5/qbittorrent-client-go v0.0.0-20240710140754-51c95559ea0a h1:/3NF320wvXk5nm9Ng02eKTiWSYf20r4acufqecGLpfo=
github.com/xuthus5/qbittorrent-client-go v0.0.0-20240710140754-51c95559ea0a/go.mod h1:lP2yxMU6WGTmHqI9T+SrEw3wo7k5kUyiA9FBOK9NKMQ=
github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
github.com/yuin/goldmark v1.4.1/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k=
@ -654,32 +672,33 @@ go.opencensus.io v0.24.0/go.mod h1:vNK8G9p7aAivkbmorf4v+7Hgx+Zs0yY+0fOtgBfjQKo=
go.opentelemetry.io/contrib v1.26.0 h1:8/CmxLl5uDm37V9rdqbZcVLvYigAE2vMostBq3nGDrI=
go.opentelemetry.io/contrib v1.26.0/go.mod h1:Tmhw9grdWtmXy6DxZNpIAudzYJqLeEM2P6QTZQSRwU8=
go.opentelemetry.io/otel v1.21.0/go.mod h1:QZzNPQPm1zLX4gZK4cMi+71eaorMSGT3A4znnUvNNEo=
go.opentelemetry.io/otel v1.27.0 h1:9BZoF3yMK/O1AafMiQTVu0YDj5Ea4hPhxCs7sGva+cg=
go.opentelemetry.io/otel v1.27.0/go.mod h1:DMpAK8fzYRzs+bi3rS5REupisuqTheUlSZJ1WnZaPAQ=
go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetrichttp v1.27.0 h1:CIHWikMsN3wO+wq1Tp5VGdVRTcON+DmOJSfDjXypKOc=
go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetrichttp v1.27.0/go.mod h1:TNupZ6cxqyFEpLXAZW7On+mLFL0/g0TE3unIYL91xWc=
go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.26.0 h1:1u/AyyOqAWzy+SkPxDpahCNZParHV8Vid1RnI2clyDE=
go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.26.0/go.mod h1:z46paqbJ9l7c9fIPCXTqTGwhQZ5XoTIsfeFYWboizjs=
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.26.0 h1:1wp/gyxsuYtuE/JFxsQRtcCDtMrO2qMvlfXALU5wkzI=
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.26.0/go.mod h1:gbTHmghkGgqxMomVQQMur1Nba4M0MQ8AYThXDUjsJ38=
go.opentelemetry.io/otel v1.28.0 h1:/SqNcYk+idO0CxKEUOtKQClMK/MimZihKYMruSMViUo=
go.opentelemetry.io/otel v1.28.0/go.mod h1:q68ijF8Fc8CnMHKyzqL6akLO46ePnjkgfIMIjUIX9z4=
go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetrichttp v1.28.0 h1:aLmmtjRke7LPDQ3lvpFz+kNEH43faFhzW7v8BFIEydg=
go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetrichttp v1.28.0/go.mod h1:TC1pyCt6G9Sjb4bQpShH+P5R53pO6ZuGnHuuln9xMeE=
go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.28.0 h1:3Q/xZUyC1BBkualc9ROb4G8qkH90LXEIICcs5zv1OYY=
go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.28.0/go.mod h1:s75jGIWA9OfCMzF0xr+ZgfrB5FEbbV7UuYo32ahUiFI=
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.28.0 h1:j9+03ymgYhPKmeXGk5Zu+cIZOlVzd9Zv7QIiyItjFBU=
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.28.0/go.mod h1:Y5+XiUG4Emn1hTfciPzGPJaSI+RpDts6BnCIir0SLqk=
go.opentelemetry.io/otel/exporters/prometheus v0.50.0 h1:2Ewsda6hejmbhGFyUvWZjUThC98Cf8Zy6g0zkIimOng=
go.opentelemetry.io/otel/exporters/prometheus v0.50.0/go.mod h1:pMm5PkUo5YwbLiuEf7t2xg4wbP0/eSJrMxIMxKosynY=
go.opentelemetry.io/otel/metric v1.21.0/go.mod h1:o1p3CA8nNHW8j5yuQLdc1eeqEaPfzug24uvsyIEJRWM=
go.opentelemetry.io/otel/metric v1.27.0 h1:hvj3vdEKyeCi4YaYfNjv2NUje8FqKqUY8IlF0FxV/ik=
go.opentelemetry.io/otel/metric v1.27.0/go.mod h1:mVFgmRlhljgBiuk/MP/oKylr4hs85GZAylncepAX/ak=
go.opentelemetry.io/otel/metric v1.28.0 h1:f0HGvSl1KRAU1DLgLGFjrwVyismPlnuU6JD6bOeuA5Q=
go.opentelemetry.io/otel/metric v1.28.0/go.mod h1:Fb1eVBFZmLVTMb6PPohq3TO9IIhUisDsbJoL/+uQW4s=
go.opentelemetry.io/otel/sdk v1.21.0/go.mod h1:Nna6Yv7PWTdgJHVRD9hIYywQBRx7pbox6nwBnZIxl/E=
go.opentelemetry.io/otel/sdk v1.27.0 h1:mlk+/Y1gLPLn84U4tI8d3GNJmGT/eXe3ZuOXN9kTWmI=
go.opentelemetry.io/otel/sdk v1.27.0/go.mod h1:Ha9vbLwJE6W86YstIywK2xFfPjbWlCuwPtMkKdz/Y4A=
go.opentelemetry.io/otel/sdk/metric v1.27.0 h1:5uGNOlpXi+Hbo/DRoI31BSb1v+OGcpv2NemcCrOL8gI=
go.opentelemetry.io/otel/sdk/metric v1.27.0/go.mod h1:we7jJVrYN2kh3mVBlswtPU22K0SA+769l93J6bsyvqw=
go.opentelemetry.io/otel/sdk v1.28.0 h1:b9d7hIry8yZsgtbmM0DKyPWMMUMlK9NEKuIG4aBqWyE=
go.opentelemetry.io/otel/sdk v1.28.0/go.mod h1:oYj7ClPUA7Iw3m+r7GeEjz0qckQRJK2B8zjcZEfu7Pg=
go.opentelemetry.io/otel/sdk/metric v1.28.0 h1:OkuaKgKrgAbYrrY0t92c+cC+2F6hsFNnCQArXCKlg08=
go.opentelemetry.io/otel/sdk/metric v1.28.0/go.mod h1:cWPjykihLAPvXKi4iZc1dpER3Jdq2Z0YLse3moQUCpg=
go.opentelemetry.io/otel/trace v1.21.0/go.mod h1:LGbsEB0f9LGjN+OZaQQ26sohbOmiMR+BaslueVtS/qQ=
go.opentelemetry.io/otel/trace v1.27.0 h1:IqYb813p7cmbHk0a5y6pD5JPakbVfftRXABGt5/Rscw=
go.opentelemetry.io/otel/trace v1.27.0/go.mod h1:6RiD1hkAprV4/q+yd2ln1HG9GoPx39SuvvstaLBl+l4=
go.opentelemetry.io/proto/otlp v1.2.0 h1:pVeZGk7nXDC9O2hncA6nHldxEjm6LByfA2aN8IOkz94=
go.opentelemetry.io/proto/otlp v1.2.0/go.mod h1:gGpR8txAl5M03pDhMC79G6SdqNV26naRm/KDsgaHD8A=
go.opentelemetry.io/otel/trace v1.28.0 h1:GhQ9cUuQGmNDd5BTCP2dAvv75RdMxEfTmYejp+lkx9g=
go.opentelemetry.io/otel/trace v1.28.0/go.mod h1:jPyXzNPg6da9+38HEwElrQiHlVMTnVfM3/yv2OlIHaI=
go.opentelemetry.io/proto/otlp v1.3.1 h1:TrMUixzpM0yuc/znrFTP9MMRh8trP93mkCiDVeXrui0=
go.opentelemetry.io/proto/otlp v1.3.1/go.mod h1:0X1WI4de4ZsLrrJNLAQbFeLCm3T7yBkR0XqQ7niQU+8=
go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto=
go.uber.org/goleak v1.3.0/go.mod h1:CoHD4mav9JJNrW/WLlf7HGZPjdw8EucARQHekz1X6bE=
go4.org v0.0.0-20230225012048-214862532bf5 h1:nifaUDeh+rPaBCMPMQHZmvJf+QdpLFnuQPwx+LxVmtc=
go4.org v0.0.0-20230225012048-214862532bf5/go.mod h1:F57wTi5Lrj6WLyswp5EYV1ncrEbFGHD4hhz6S1ZYeaU=
golang.org/x/arch v0.0.0-20210923205945-b76863e36670/go.mod h1:5om86z9Hs0C8fWVUuoMHwpExlXzs5Tkyp9hOrfG7pp8=
golang.org/x/arch v0.8.0 h1:3wRIsP3pM4yUptoR96otTUOXI367OS0+c9eeRi9doIc=
golang.org/x/arch v0.8.0/go.mod h1:FEVrYAQjsQXMVJ1nsMoVVXPZg6p2JE2mx8psSWTDQys=
golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
@ -804,7 +823,6 @@ golang.org/x/sys v0.0.0-20200122134326-e047566fdf82/go.mod h1:h1NjWce9XRLGQEsW7w
golang.org/x/sys v0.0.0-20200212091648-12a6c2dcc1e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200302150141-5c8b2ff67527/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200413165638-669c56c373c4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200923182605-d9f96fdee20d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
@ -826,8 +844,8 @@ golang.org/x/sys v0.14.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/sys v0.16.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/sys v0.18.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/sys v0.21.0 h1:rF+pYz3DAGSQAxAu1CbC7catZg4ebC4UIeIhKxBZvws=
golang.org/x/sys v0.21.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/sys v0.22.0 h1:RI27ohtqKCnwULzJLqkv897zojh5/DwS/ENaMzUOaWI=
golang.org/x/sys v0.22.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k=
@ -921,10 +939,10 @@ google.golang.org/genproto v0.0.0-20191216164720-4f79533eabd1/go.mod h1:n3cpQtvx
google.golang.org/genproto v0.0.0-20191230161307-f3c370f40bfb/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc=
google.golang.org/genproto v0.0.0-20200212174721-66ed5ce911ce/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=
google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo=
google.golang.org/genproto/googleapis/api v0.0.0-20240520151616-dc85e6b867a5 h1:P8OJ/WCl/Xo4E4zoe4/bifHpSmmKwARqyqE4nW6J2GQ=
google.golang.org/genproto/googleapis/api v0.0.0-20240520151616-dc85e6b867a5/go.mod h1:RGnPtTG7r4i8sPlNyDeikXF99hMM+hN6QMm4ooG9g2g=
google.golang.org/genproto/googleapis/rpc v0.0.0-20240515191416-fc5f0ca64291 h1:AgADTJarZTBqgjiUzRgfaBchgYB3/WFTC80GPwsMcRI=
google.golang.org/genproto/googleapis/rpc v0.0.0-20240515191416-fc5f0ca64291/go.mod h1:EfXuqaE1J41VCDicxHzUDm+8rk+7ZdXzHV0IhO/I6s0=
google.golang.org/genproto/googleapis/api v0.0.0-20240701130421-f6361c86f094 h1:0+ozOGcrp+Y8Aq8TLNN2Aliibms5LEzsq99ZZmAGYm0=
google.golang.org/genproto/googleapis/api v0.0.0-20240701130421-f6361c86f094/go.mod h1:fJ/e3If/Q67Mj99hin0hMhiNyCRmt6BQ2aWIJshUSJw=
google.golang.org/genproto/googleapis/rpc v0.0.0-20240701130421-f6361c86f094 h1:BwIjyKYGsK9dMCBOorzRri8MQwmi7mT9rGHsCEinZkA=
google.golang.org/genproto/googleapis/rpc v0.0.0-20240701130421-f6361c86f094/go.mod h1:Ue6ibwXGpU+dqIcODieyLOcgj7z8+IcskoNIgZxtrFY=
google.golang.org/grpc v1.17.0/go.mod h1:6QZJwpn2B+Zp71q/5VxRsJ6NXXVCE5NRUHRo+f3cWCs=
google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c=
google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38=
@ -946,8 +964,8 @@ google.golang.org/protobuf v1.22.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2
google.golang.org/protobuf v1.23.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU=
google.golang.org/protobuf v1.23.1-0.20200526195155-81db48ad09cc/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU=
google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c=
google.golang.org/protobuf v1.34.1 h1:9ddQBjfCyZPOHPUiPxpYESBLc+T8P3E+Vo4IbKZgFWg=
google.golang.org/protobuf v1.34.1/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos=
google.golang.org/protobuf v1.34.2 h1:6xV6lTsCfpGD21XK49h7MhtcApnLqkfYgPcdHftf6hg=
google.golang.org/protobuf v1.34.2/go.mod h1:qYOHts0dSfpeUzUFpOMr/WGzszTmLH+DiWniOlNbLDw=
gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
@ -1001,7 +1019,6 @@ modernc.org/token v1.1.0 h1:Xl7Ap9dKaEs5kLoOQeQmPWevfnk/DM5qcLcYlA8ys6Y=
modernc.org/token v1.1.0/go.mod h1:UGzOrNV1mAFSEB63lOFHIpNRUVMvYTc6yu1SMY/XTDM=
nullprogram.com/x/optparse v1.0.0/go.mod h1:KdyPE+Igbe0jQUrVfMqDMeJQIJZEuyV7pjYmp6pbG50=
rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8=
rsc.io/pdf v0.1.1/go.mod h1:n8OzWcQ6Sp37PL01nO98y4iUCRdTGarVfzxY20ICaU4=
rsc.io/quote/v3 v3.1.0/go.mod h1:yEA65RcK8LyAZtP9Kv3t0HmxON59tX3rD+tICJqUlj0=
rsc.io/sampler v1.3.0/go.mod h1:T1hPZKmBbMNahiBKFy5HrXp6adAjACjK9JXDnKaTXpA=
zombiezen.com/go/sqlite v1.3.0 h1:98g1gnCm+CNz6AuQHu0gqyw7gR2WU3O3PJufDOStpUs=

View file

@ -1,355 +1,355 @@
package ctxbilly
import (
"context"
"errors"
"fmt"
"os"
"path/filepath"
"strings"
// import (
// "context"
// "errors"
// "fmt"
// "os"
// "path/filepath"
// "strings"
securejoin "github.com/cyphar/filepath-securejoin"
"github.com/iceber/iouring-go"
)
// securejoin "github.com/cyphar/filepath-securejoin"
// "github.com/iceber/iouring-go"
// )
func NewURingFS() (*UringFS, error) {
ur, err := iouring.New(64, iouring.WithAsync())
if err != nil {
return nil, err
}
return &UringFS{
ur: ur,
}, nil
}
var _ Filesystem = (*UringFS)(nil)
const (
defaultDirectoryMode = 0o755
defaultCreateMode = 0o666
)
// UringFS is a fs implementation based on the OS filesystem which is bound to
// a base dir.
// Prefer this fs implementation over ChrootOS.
//
// Behaviours of note:
// 1. Read and write operations can only be directed to files which descends
// from the base dir.
// 2. Symlinks don't have their targets modified, and therefore can point
// to locations outside the base dir or to non-existent paths.
// 3. Readlink and Lstat ensures that the link file is located within the base
// dir, evaluating any symlinks that file or base dir may contain.
type UringFS struct {
ur *iouring.IOURing
baseDir string
}
func newBoundOS(d string) *UringFS {
return &UringFS{baseDir: d}
}
func (fs *UringFS) Create(ctx context.Context, filename string) (File, error) {
return fs.OpenFile(ctx, filename, os.O_RDWR|os.O_CREATE|os.O_TRUNC, defaultCreateMode)
}
func (fs *UringFS) OpenFile(ctx context.Context, filename string, flag int, perm os.FileMode) (File, error) {
fn, err := fs.abs(filename)
if err != nil {
return nil, err
}
f, err := os.OpenFile(fn, flag, perm)
if err != nil {
return nil, err
}
return newFile(fs.ur, f)
}
func (fs *UringFS) ReadDir(ctx context.Context, path string) ([]os.FileInfo, error) {
dir, err := fs.abs(path)
if err != nil {
return nil, err
}
entries, err := os.ReadDir(dir)
if err != nil {
return nil, err
}
infos := make([]os.FileInfo, 0, len(entries))
for _, v := range entries {
info, err := v.Info()
if err != nil {
return nil, err
}
infos = append(infos, info)
}
return infos, nil
}
func (fs *UringFS) Rename(ctx context.Context, from, to string) error {
f, err := fs.abs(from)
if err != nil {
return err
}
t, err := fs.abs(to)
if err != nil {
return err
}
// MkdirAll for target name.
if err := fs.createDir(t); err != nil {
return err
}
return os.Rename(f, t)
}
func (fs *UringFS) MkdirAll(ctx context.Context, path string, perm os.FileMode) error {
dir, err := fs.abs(path)
if err != nil {
return err
}
return os.MkdirAll(dir, perm)
}
func (fs *UringFS) Stat(ctx context.Context, filename string) (os.FileInfo, error) {
filename, err := fs.abs(filename)
if err != nil {
return nil, err
}
return os.Stat(filename)
}
func (fs *UringFS) Remove(ctx context.Context, filename string) error {
fn, err := fs.abs(filename)
if err != nil {
return err
}
return os.Remove(fn)
}
func (fs *UringFS) Join(elem ...string) string {
return filepath.Join(elem...)
}
func (fs *UringFS) RemoveAll(path string) error {
dir, err := fs.abs(path)
if err != nil {
return err
}
return os.RemoveAll(dir)
}
func (fs *UringFS) Symlink(ctx context.Context, target, link string) error {
ln, err := fs.abs(link)
if err != nil {
return err
}
// MkdirAll for containing dir.
if err := fs.createDir(ln); err != nil {
return err
}
return os.Symlink(target, ln)
}
func (fs *UringFS) Lstat(ctx context.Context, filename string) (os.FileInfo, error) {
filename = filepath.Clean(filename)
if !filepath.IsAbs(filename) {
filename = filepath.Join(fs.baseDir, filename)
}
if ok, err := fs.insideBaseDirEval(filename); !ok {
return nil, err
}
return os.Lstat(filename)
}
func (fs *UringFS) Readlink(ctx context.Context, link string) (string, error) {
if !filepath.IsAbs(link) {
link = filepath.Clean(filepath.Join(fs.baseDir, link))
}
if ok, err := fs.insideBaseDirEval(link); !ok {
return "", err
}
return os.Readlink(link)
}
// Chroot returns a new OS filesystem, with the base dir set to the
// result of joining the provided path with the underlying base dir.
// func (fs *UringFS) Chroot(path string) (Filesystem, error) {
// joined, err := securejoin.SecureJoin(fs.baseDir, path)
// func NewURingFS() (*UringFS, error) {
// ur, err := iouring.New(64, iouring.WithAsync())
// if err != nil {
// return nil, err
// }
// return newBoundOS(joined), nil
// return &UringFS{
// ur: ur,
// }, nil
// }
// Root returns the current base dir of the billy.Filesystem.
// This is required in order for this implementation to be a drop-in
// replacement for other upstream implementations (e.g. memory and osfs).
func (fs *UringFS) Root() string {
return fs.baseDir
}
// var _ Filesystem = (*UringFS)(nil)
func (fs *UringFS) createDir(fullpath string) error {
dir := filepath.Dir(fullpath)
if dir != "." {
if err := os.MkdirAll(dir, defaultDirectoryMode); err != nil {
return err
}
}
// const (
// defaultDirectoryMode = 0o755
// defaultCreateMode = 0o666
// )
return nil
}
// // UringFS is a fs implementation based on the OS filesystem which is bound to
// // a base dir.
// // Prefer this fs implementation over ChrootOS.
// //
// // Behaviours of note:
// // 1. Read and write operations can only be directed to files which descends
// // from the base dir.
// // 2. Symlinks don't have their targets modified, and therefore can point
// // to locations outside the base dir or to non-existent paths.
// // 3. Readlink and Lstat ensures that the link file is located within the base
// // dir, evaluating any symlinks that file or base dir may contain.
// type UringFS struct {
// ur *iouring.IOURing
// baseDir string
// }
// abs transforms filename to an absolute path, taking into account the base dir.
// Relative paths won't be allowed to ascend the base dir, so `../file` will become
// `/working-dir/file`.
//
// Note that if filename is a symlink, the returned address will be the target of the
// symlink.
func (fs *UringFS) abs(filename string) (string, error) {
if filename == fs.baseDir {
filename = string(filepath.Separator)
}
// func newBoundOS(d string) *UringFS {
// return &UringFS{baseDir: d}
// }
path, err := securejoin.SecureJoin(fs.baseDir, filename)
if err != nil {
return "", nil
}
// func (fs *UringFS) Create(ctx context.Context, filename string) (File, error) {
// return fs.OpenFile(ctx, filename, os.O_RDWR|os.O_CREATE|os.O_TRUNC, defaultCreateMode)
// }
return path, nil
}
// func (fs *UringFS) OpenFile(ctx context.Context, filename string, flag int, perm os.FileMode) (File, error) {
// fn, err := fs.abs(filename)
// if err != nil {
// return nil, err
// }
// insideBaseDirEval checks whether filename is contained within
// a dir that is within the fs.baseDir, by first evaluating any symlinks
// that either filename or fs.baseDir may contain.
func (fs *UringFS) insideBaseDirEval(filename string) (bool, error) {
dir, err := filepath.EvalSymlinks(filepath.Dir(filename))
if dir == "" || os.IsNotExist(err) {
dir = filepath.Dir(filename)
}
wd, err := filepath.EvalSymlinks(fs.baseDir)
if wd == "" || os.IsNotExist(err) {
wd = fs.baseDir
}
if filename != wd && dir != wd && !strings.HasPrefix(dir, wd+string(filepath.Separator)) {
return false, fmt.Errorf("path outside base dir")
}
return true, nil
}
// f, err := os.OpenFile(fn, flag, perm)
// if err != nil {
// return nil, err
// }
func newFile(fsur *iouring.IOURing, f *os.File) (*URingFile, error) {
ur, err := iouring.New(64, iouring.WithAttachWQ(fsur))
if err != nil {
return nil, err
}
// return newFile(fs.ur, f)
// }
return &URingFile{
ur: ur,
f: f,
}, nil
}
// func (fs *UringFS) ReadDir(ctx context.Context, path string) ([]os.FileInfo, error) {
// dir, err := fs.abs(path)
// if err != nil {
// return nil, err
// }
type URingFile struct {
ur *iouring.IOURing
f *os.File
}
// entries, err := os.ReadDir(dir)
// if err != nil {
// return nil, err
// }
// infos := make([]os.FileInfo, 0, len(entries))
// for _, v := range entries {
// info, err := v.Info()
// if err != nil {
// return nil, err
// }
// Close implements File.
func (o *URingFile) Close(ctx context.Context) error {
return errors.Join(o.ur.UnregisterFile(o.f), o.Close(ctx))
}
// infos = append(infos, info)
// }
// Name implements File.
func (o *URingFile) Name() string {
return o.f.Name()
}
// return infos, nil
// }
// Read implements File.
func (o *URingFile) Read(ctx context.Context, p []byte) (n int, err error) {
req, err := o.ur.Read(o.f, p, nil)
if err != nil {
return 0, err
}
defer req.Cancel()
// func (fs *UringFS) Rename(ctx context.Context, from, to string) error {
// f, err := fs.abs(from)
// if err != nil {
// return err
// }
// t, err := fs.abs(to)
// if err != nil {
// return err
// }
select {
case <-req.Done():
return req.GetRes()
case <-ctx.Done():
req.Cancel()
<-req.Done()
return req.GetRes()
}
}
// // MkdirAll for target name.
// if err := fs.createDir(t); err != nil {
// return err
// }
// ReadAt implements File.
func (o *URingFile) ReadAt(ctx context.Context, p []byte, off int64) (n int, err error) {
req, err := o.ur.Pread(o.f, p, uint64(off), nil)
if err != nil {
return 0, err
}
defer req.Cancel()
// return os.Rename(f, t)
// }
select {
case <-req.Done():
return req.GetRes()
case <-ctx.Done():
req.Cancel()
<-req.Done()
return req.GetRes()
}
}
// func (fs *UringFS) MkdirAll(ctx context.Context, path string, perm os.FileMode) error {
// dir, err := fs.abs(path)
// if err != nil {
// return err
// }
// return os.MkdirAll(dir, perm)
// }
// Write implements File.
func (o *URingFile) Write(ctx context.Context, p []byte) (n int, err error) {
req, err := o.ur.Write(o.f, p, nil)
if err != nil {
return 0, err
}
defer req.Cancel()
// func (fs *UringFS) Stat(ctx context.Context, filename string) (os.FileInfo, error) {
// filename, err := fs.abs(filename)
// if err != nil {
// return nil, err
// }
// return os.Stat(filename)
// }
select {
case <-req.Done():
return req.GetRes()
case <-ctx.Done():
req.Cancel()
<-req.Done()
return req.GetRes()
}
}
// func (fs *UringFS) Remove(ctx context.Context, filename string) error {
// fn, err := fs.abs(filename)
// if err != nil {
// return err
// }
// return os.Remove(fn)
// }
// WriteAt implements File.
func (o *URingFile) WriteAt(ctx context.Context, p []byte, off int64) (n int, err error) {
req, err := o.ur.Pwrite(o.f, p, uint64(off), nil)
if err != nil {
return 0, err
}
defer req.Cancel()
// func (fs *UringFS) Join(elem ...string) string {
// return filepath.Join(elem...)
// }
select {
case <-req.Done():
return req.GetRes()
case <-ctx.Done():
req.Cancel()
<-req.Done()
return req.GetRes()
}
}
// func (fs *UringFS) RemoveAll(path string) error {
// dir, err := fs.abs(path)
// if err != nil {
// return err
// }
// return os.RemoveAll(dir)
// }
// Seek implements File.
func (o *URingFile) Seek(offset int64, whence int) (int64, error) {
return o.f.Seek(offset, whence)
}
// func (fs *UringFS) Symlink(ctx context.Context, target, link string) error {
// ln, err := fs.abs(link)
// if err != nil {
// return err
// }
// // MkdirAll for containing dir.
// if err := fs.createDir(ln); err != nil {
// return err
// }
// return os.Symlink(target, ln)
// }
// Truncate implements File.
func (o *URingFile) Truncate(ctx context.Context, size int64) error {
return o.f.Truncate(size)
}
// func (fs *UringFS) Lstat(ctx context.Context, filename string) (os.FileInfo, error) {
// filename = filepath.Clean(filename)
// if !filepath.IsAbs(filename) {
// filename = filepath.Join(fs.baseDir, filename)
// }
// if ok, err := fs.insideBaseDirEval(filename); !ok {
// return nil, err
// }
// return os.Lstat(filename)
// }
var _ File = (*URingFile)(nil)
// func (fs *UringFS) Readlink(ctx context.Context, link string) (string, error) {
// if !filepath.IsAbs(link) {
// link = filepath.Clean(filepath.Join(fs.baseDir, link))
// }
// if ok, err := fs.insideBaseDirEval(link); !ok {
// return "", err
// }
// return os.Readlink(link)
// }
// // Chroot returns a new OS filesystem, with the base dir set to the
// // result of joining the provided path with the underlying base dir.
// // func (fs *UringFS) Chroot(path string) (Filesystem, error) {
// // joined, err := securejoin.SecureJoin(fs.baseDir, path)
// // if err != nil {
// // return nil, err
// // }
// // return newBoundOS(joined), nil
// // }
// // Root returns the current base dir of the billy.Filesystem.
// // This is required in order for this implementation to be a drop-in
// // replacement for other upstream implementations (e.g. memory and osfs).
// func (fs *UringFS) Root() string {
// return fs.baseDir
// }
// func (fs *UringFS) createDir(fullpath string) error {
// dir := filepath.Dir(fullpath)
// if dir != "." {
// if err := os.MkdirAll(dir, defaultDirectoryMode); err != nil {
// return err
// }
// }
// return nil
// }
// // abs transforms filename to an absolute path, taking into account the base dir.
// // Relative paths won't be allowed to ascend the base dir, so `../file` will become
// // `/working-dir/file`.
// //
// // Note that if filename is a symlink, the returned address will be the target of the
// // symlink.
// func (fs *UringFS) abs(filename string) (string, error) {
// if filename == fs.baseDir {
// filename = string(filepath.Separator)
// }
// path, err := securejoin.SecureJoin(fs.baseDir, filename)
// if err != nil {
// return "", nil
// }
// return path, nil
// }
// // insideBaseDirEval checks whether filename is contained within
// // a dir that is within the fs.baseDir, by first evaluating any symlinks
// // that either filename or fs.baseDir may contain.
// func (fs *UringFS) insideBaseDirEval(filename string) (bool, error) {
// dir, err := filepath.EvalSymlinks(filepath.Dir(filename))
// if dir == "" || os.IsNotExist(err) {
// dir = filepath.Dir(filename)
// }
// wd, err := filepath.EvalSymlinks(fs.baseDir)
// if wd == "" || os.IsNotExist(err) {
// wd = fs.baseDir
// }
// if filename != wd && dir != wd && !strings.HasPrefix(dir, wd+string(filepath.Separator)) {
// return false, fmt.Errorf("path outside base dir")
// }
// return true, nil
// }
// func newFile(fsur *iouring.IOURing, f *os.File) (*URingFile, error) {
// ur, err := iouring.New(64, iouring.WithAttachWQ(fsur))
// if err != nil {
// return nil, err
// }
// return &URingFile{
// ur: ur,
// f: f,
// }, nil
// }
// type URingFile struct {
// ur *iouring.IOURing
// f *os.File
// }
// // Close implements File.
// func (o *URingFile) Close(ctx context.Context) error {
// return errors.Join(o.ur.UnregisterFile(o.f), o.Close(ctx))
// }
// // Name implements File.
// func (o *URingFile) Name() string {
// return o.f.Name()
// }
// // Read implements File.
// func (o *URingFile) Read(ctx context.Context, p []byte) (n int, err error) {
// req, err := o.ur.Read(o.f, p, nil)
// if err != nil {
// return 0, err
// }
// defer req.Cancel()
// select {
// case <-req.Done():
// return req.GetRes()
// case <-ctx.Done():
// req.Cancel()
// <-req.Done()
// return req.GetRes()
// }
// }
// // ReadAt implements File.
// func (o *URingFile) ReadAt(ctx context.Context, p []byte, off int64) (n int, err error) {
// req, err := o.ur.Pread(o.f, p, uint64(off), nil)
// if err != nil {
// return 0, err
// }
// defer req.Cancel()
// select {
// case <-req.Done():
// return req.GetRes()
// case <-ctx.Done():
// req.Cancel()
// <-req.Done()
// return req.GetRes()
// }
// }
// // Write implements File.
// func (o *URingFile) Write(ctx context.Context, p []byte) (n int, err error) {
// req, err := o.ur.Write(o.f, p, nil)
// if err != nil {
// return 0, err
// }
// defer req.Cancel()
// select {
// case <-req.Done():
// return req.GetRes()
// case <-ctx.Done():
// req.Cancel()
// <-req.Done()
// return req.GetRes()
// }
// }
// // WriteAt implements File.
// func (o *URingFile) WriteAt(ctx context.Context, p []byte, off int64) (n int, err error) {
// req, err := o.ur.Pwrite(o.f, p, uint64(off), nil)
// if err != nil {
// return 0, err
// }
// defer req.Cancel()
// select {
// case <-req.Done():
// return req.GetRes()
// case <-ctx.Done():
// req.Cancel()
// <-req.Done()
// return req.GetRes()
// }
// }
// // Seek implements File.
// func (o *URingFile) Seek(offset int64, whence int) (int64, error) {
// return o.f.Seek(offset, whence)
// }
// // Truncate implements File.
// func (o *URingFile) Truncate(ctx context.Context, size int64) error {
// return o.f.Truncate(size)
// }
// var _ File = (*URingFile)(nil)

View file

@ -9,13 +9,13 @@ import (
"git.kmsign.ru/royalcat/tstor/src/config"
"git.kmsign.ru/royalcat/tstor/src/sources/torrent"
"git.kmsign.ru/royalcat/tstor/src/vfs"
"github.com/anacrolix/missinggo/v2/filecache"
echopprof "github.com/labstack/echo-contrib/pprof"
"github.com/labstack/echo/v4"
"github.com/labstack/echo/v4/middleware"
"github.com/prometheus/client_golang/prometheus/promhttp"
)
func New(fc *filecache.Cache, s *torrent.Daemon, vfs vfs.Filesystem, logPath string, cfg *config.Settings) error {
func Run(s *torrent.Daemon, vfs vfs.Filesystem, logPath string, cfg *config.Settings) error {
log := slog.With()
r := echo.New()
@ -29,12 +29,11 @@ func New(fc *filecache.Cache, s *torrent.Daemon, vfs vfs.Filesystem, logPath str
echopprof.Register(r)
r.Any("/graphql", echo.WrapHandler((GraphQLHandler(s, vfs))))
r.GET("/metrics", echo.WrapHandler(promhttp.Handler()))
log.Info("starting webserver", "host", fmt.Sprintf("%s:%d", cfg.WebUi.IP, cfg.WebUi.Port))
go r.Start((fmt.Sprintf("%s:%d", cfg.WebUi.IP, cfg.WebUi.Port)))
return nil
return r.Start((fmt.Sprintf("%s:%d", cfg.WebUi.IP, cfg.WebUi.Port)))
}
func Logger() echo.MiddlewareFunc {

View file

@ -0,0 +1,85 @@
package qbittorrent
import (
"context"
"fmt"
"slices"
"time"
"github.com/xuthus5/qbittorrent-client-go/qbittorrent"
)
type client struct {
qb qbittorrent.Client
}
func wrapClient(qb qbittorrent.Client) *client {
return &client{qb: qb}
}
func (f *client) getFileContent(ctx context.Context, hash string, contextIndex int) (*qbittorrent.TorrentContent, error) {
contents, err := f.qb.Torrent().GetContents(hash)
if err != nil {
return nil, err
}
contentIndex := slices.IndexFunc(contents, func(c *qbittorrent.TorrentContent) bool {
return c.Index == contextIndex
})
if contentIndex == -1 {
return nil, fmt.Errorf("content not found")
}
return contents[contentIndex], nil
}
func (f *client) isPieceComplete(ctx context.Context, hash string, pieceIndex int) (bool, error) {
completion, err := f.qb.Torrent().GetPiecesStates(hash)
if err != nil {
return false, err
}
if completion[pieceIndex] == 2 {
return true, nil
}
return false, nil
}
func (f *client) waitPieceToComplete(ctx context.Context, hash string, pieceIndex int) error {
const checkingInterval = 1 * time.Second
ok, err := f.isPieceComplete(ctx, hash, pieceIndex)
if err != nil {
return err
}
if ok {
return nil
}
if deadline, ok := ctx.Deadline(); ok && time.Until(deadline) < checkingInterval {
return context.DeadlineExceeded
}
ticker := time.NewTicker(checkingInterval)
defer ticker.Stop()
for {
select {
case <-ctx.Done():
return ctx.Err()
case <-ticker.C:
ok, err := f.isPieceComplete(ctx, hash, pieceIndex)
if err != nil {
return err
}
if ok {
return nil
}
if deadline, ok := ctx.Deadline(); ok && time.Until(deadline) < checkingInterval {
return context.DeadlineExceeded
}
}
}
}

View file

@ -0,0 +1,101 @@
package qbittorrent
import (
"bytes"
"context"
"path"
"git.kmsign.ru/royalcat/tstor/src/vfs"
"github.com/anacrolix/torrent/metainfo"
"github.com/anacrolix/torrent/types/infohash"
"github.com/royalcat/ctxio"
"github.com/xuthus5/qbittorrent-client-go/qbittorrent"
)
type Daemon struct {
qb qbittorrent.Client
client *client
dataDir string
}
func NewDaemon(dir string) (*Daemon, error) {
dataDir := dir + "/data"
qb, err := qbittorrent.NewClient(&qbittorrent.Config{
Address: "localhost:8080",
})
if err != nil {
return nil, err
}
return &Daemon{
qb: qb,
dataDir: dataDir,
client: wrapClient(qb),
}, nil
}
func (fs *Daemon) torrentPath(ih infohash.T) string {
return path.Join(fs.dataDir, ih.HexString())
}
func (fs *Daemon) addTorrent(ctx context.Context, f vfs.File) error {
file, err := ctxio.ReadAll(ctx, f)
if err != nil {
return err
}
mi, err := metainfo.Load(bytes.NewBuffer(file))
if err != nil {
return err
}
ih := mi.HashInfoBytes()
err = fs.qb.Torrent().AddNewTorrent(&qbittorrent.TorrentAddOption{
Torrents: []*qbittorrent.TorrentAddFileMetadata{
{
Data: file,
},
},
SavePath: fs.torrentPath(ih),
// SequentialDownload: "true",
// FirstLastPiecePrio: "true",
})
if err != nil {
return err
}
return nil
}
func (fs *Daemon) TorrentFS(ctx context.Context, file vfs.File) (*FS, error) {
ih, err := readInfoHash(ctx, file)
if err != nil {
return nil, err
}
existing, err := fs.qb.Torrent().GetTorrents(&qbittorrent.TorrentOption{
Hashes: []string{ih.HexString()},
})
if err != nil {
return nil, err
}
if len(existing) == 0 {
err := fs.addTorrent(ctx, file)
if err != nil {
return nil, err
}
}
return newTorrentFS(fs.client, file.Name(), ih.HexString(), fs.torrentPath(ih))
}
// TODO caching
func readInfoHash(ctx context.Context, file vfs.File) (infohash.T, error) {
mi, err := metainfo.Load(ctxio.IoReader(ctx, file))
if err != nil {
return infohash.T{}, err
}
return mi.HashInfoBytes(), nil
}

View file

@ -0,0 +1,228 @@
package qbittorrent
import (
"context"
"io"
"io/fs"
"os"
"path"
"time"
"git.kmsign.ru/royalcat/tstor/src/vfs"
)
type FS struct {
client *client
name string
hash string
dataDir string
}
var _ vfs.Filesystem = (*FS)(nil)
func newTorrentFS(client *client, name string, hash string, dataDir string) (*FS, error) {
return &FS{
client: client,
name: name,
hash: hash,
dataDir: dataDir,
}, nil
}
// Info implements vfs.Filesystem.
func (f *FS) Info() (fs.FileInfo, error) {
return vfs.NewDirInfo(f.name), nil
}
// IsDir implements vfs.Filesystem.
func (f *FS) IsDir() bool {
return true
}
// Name implements vfs.Filesystem.
func (f *FS) Name() string {
return path.Base(f.dataDir)
}
// Open implements vfs.Filesystem.
func (f *FS) Open(ctx context.Context, filename string) (vfs.File, error) {
panic("unimplemented")
}
// ReadDir implements vfs.Filesystem.
func (f *FS) ReadDir(ctx context.Context, path string) ([]fs.DirEntry, error) {
panic("unimplemented")
}
// Stat implements vfs.Filesystem.
func (f *FS) Stat(ctx context.Context, filename string) (fs.FileInfo, error) {
return vfs.NewDirInfo(f.name), nil
}
// Type implements vfs.Filesystem.
func (f *FS) Type() fs.FileMode {
return vfs.ROMode
}
// Unlink implements vfs.Filesystem.
func (f *FS) Unlink(ctx context.Context, filename string) error {
panic("unimplemented")
}
func openFile(ctx context.Context, client client, hash, filePath string) *File {
client.getFileContent(ctx, hash, 0)
return &File{
client: client,
hash: hash,
filePath: filePath,
}
}
type File struct {
client client
hash string
dataDir string
filePath string // path inside a torrent directory
contentIndex int
pieceSize int
fileSize int64
offset int64
osfile *os.File
}
var _ vfs.File = (*File)(nil)
// Close implements vfs.File.
func (f *File) Close(ctx context.Context) error {
if f.osfile != nil {
err := f.osfile.Close()
f.osfile = nil
return err
}
return nil
}
// Info implements vfs.File.
func (f *File) Info() (fs.FileInfo, error) {
return &fileInfo{name: path.Base(f.filePath), size: f.fileSize}, nil
}
// IsDir implements vfs.File.
func (f *File) IsDir() bool {
return false
}
// Seek implements vfs.File.
func (f *File) Seek(offset int64, whence int) (int64, error) {
switch whence {
case io.SeekStart:
f.offset = offset
case io.SeekCurrent:
f.offset += offset
case io.SeekEnd:
f.offset = f.fileSize + offset
}
return f.offset, nil
}
// Name implements vfs.File.
func (f *File) Name() string {
return path.Base(f.filePath)
}
// Read implements vfs.File.
func (f *File) Read(ctx context.Context, p []byte) (n int, err error) {
pieceIndex := int(f.offset / int64(f.pieceSize))
err = f.client.waitPieceToComplete(ctx, f.hash, pieceIndex)
if err != nil {
return 0, err
}
descriptor, err := f.descriptor()
if err != nil {
return 0, err
}
n, err = descriptor.ReadAt(p, f.offset)
f.offset += int64(n)
return n, err
}
// ReadAt implements vfs.File.
func (f *File) ReadAt(ctx context.Context, p []byte, off int64) (n int, err error) {
pieceIndex := int(off / int64(f.pieceSize))
err = f.client.waitPieceToComplete(ctx, f.hash, pieceIndex)
if err != nil {
return 0, err
}
descriptor, err := f.descriptor()
if err != nil {
return 0, err
}
return descriptor.ReadAt(p, off)
}
// Size implements vfs.File.
func (f *File) Size() int64 {
return f.fileSize
}
// Type implements vfs.File.
func (f *File) Type() fs.FileMode {
return vfs.ROMode
}
func (f *File) descriptor() (*os.File, error) {
if f.osfile != nil {
return f.osfile, nil
}
osfile, err := os.Open(path.Join(f.dataDir, f.filePath))
if err != nil {
return nil, err
}
f.osfile = osfile
return f.osfile, nil
}
type fileInfo struct {
name string
size int64
}
var _ fs.FileInfo = (*fileInfo)(nil)
// IsDir implements fs.FileInfo.
func (f *fileInfo) IsDir() bool {
return false
}
// ModTime implements fs.FileInfo.
func (f *fileInfo) ModTime() time.Time {
return time.Time{}
}
// Mode implements fs.FileInfo.
func (f *fileInfo) Mode() fs.FileMode {
return vfs.ROMode
}
// Name implements fs.FileInfo.
func (f *fileInfo) Name() string {
return f.name
}
// Size implements fs.FileInfo.
func (f *fileInfo) Size() int64 {
return f.size
}
// Sys implements fs.FileInfo.
func (f *fileInfo) Sys() any {
return nil
}

View file

@ -0,0 +1,139 @@
package qbittorrent
import (
"bytes"
"context"
"errors"
"fmt"
"io"
"net/http"
"os"
"os/exec"
"path"
"runtime"
"time"
"github.com/google/go-github/v63/github"
"golang.org/x/sys/cpu"
)
const (
repoOwner = "userdocs"
repoName = "qbittorrent-nox-static"
binName = "qbittorrent-nox"
)
func runQBittorrent(binDir string, profileDir string, stdout, stderr io.Writer) (*os.Process, error) {
cmd := exec.Command(
path.Join(binDir, binName),
fmt.Sprintf("--profile=%s", profileDir),
)
cmd.Stdin = bytes.NewReader([]byte("y\n"))
cmd.Stdout = stdout
cmd.Stderr = stderr
err := cmd.Start()
if err != nil {
return nil, err
}
return cmd.Process, nil
}
func downloadLatestRelease(ctx context.Context, binPath string) error {
client := github.NewClient(nil)
rel, _, err := client.Repositories.GetLatestRelease(ctx, repoOwner, repoName)
if err != nil {
return err
}
arch := ""
switch runtime.GOARCH {
case "amd64":
arch = "x86_64"
case "arm":
arch = "armhf" // this is a safe version, go does not distinguish between armv6 and armv7
if cpu.ARM.HasNEON {
arch = "armv7"
}
case "arm64":
arch = "aarch64"
}
if arch == "" {
return errors.New("unsupported architecture")
}
binName := arch + "-qbittorrent-nox"
var targetRelease *github.ReleaseAsset
for _, v := range rel.Assets {
if v.GetName() == binName {
targetRelease = v
break
}
}
if targetRelease == nil {
return fmt.Errorf("target asset %s not found", binName)
}
downloadUrl := targetRelease.GetBrowserDownloadURL()
if downloadUrl == "" {
return errors.New("download url is empty")
}
err = os.MkdirAll(path.Dir(binPath), 0755)
if err != nil {
return err
}
return downloadFile(binPath, downloadUrl)
}
func downloadFile(filepath string, webUrl string) error {
if stat, err := os.Stat(filepath); err == nil {
resp, err := http.Head(webUrl)
if err != nil {
return err
}
defer resp.Body.Close()
var lastModified time.Time
lastModifiedHeader := resp.Header.Get("Last-Modified")
if lastModifiedHeader != "" {
lastModified, err = time.Parse(http.TimeFormat, lastModifiedHeader)
if err != nil {
return err
}
}
if resp.ContentLength == stat.Size() && lastModified.Before(stat.ModTime()) {
return nil
}
}
// Create the file
out, err := os.Create(filepath)
if err != nil {
return err
}
defer out.Close()
// Get the data
resp, err := http.Get(webUrl)
if err != nil {
return err
}
defer resp.Body.Close()
// Check server response
if resp.StatusCode != http.StatusOK {
return fmt.Errorf("bad status: %s", resp.Status)
}
// Writer the body to file
_, err = io.Copy(out, resp.Body)
if err != nil {
return err
}
return nil
}

View file

@ -0,0 +1,18 @@
package qbittorrent
import (
"context"
"testing"
"github.com/stretchr/testify/require"
)
func TestDownloadQBittorent(t *testing.T) {
ctx := context.Background()
tempDir := t.TempDir()
require := require.New(t)
err := downloadLatestRelease(ctx, tempDir)
require.NoError(err)
err = downloadLatestRelease(ctx, tempDir)
require.NoError(err)
}

View file

@ -1,71 +1,110 @@
package torrent
import (
"crypto/rand"
"log/slog"
"os"
"github.com/anacrolix/dht/v2"
"git.kmsign.ru/royalcat/tstor/src/config"
dlog "git.kmsign.ru/royalcat/tstor/src/log"
"github.com/anacrolix/dht/v2/bep44"
tlog "github.com/anacrolix/log"
"github.com/anacrolix/torrent"
"github.com/anacrolix/torrent/storage"
"git.kmsign.ru/royalcat/tstor/src/config"
dlog "git.kmsign.ru/royalcat/tstor/src/log"
"github.com/anacrolix/torrent/types/infohash"
)
func newClient(st storage.ClientImpl, fis bep44.Store, cfg *config.TorrentClient, id [20]byte) (*torrent.Client, error) {
func newClientConfig(st storage.ClientImpl, fis bep44.Store, cfg *config.TorrentClient, id [20]byte) *torrent.ClientConfig {
l := slog.With("component", "torrent-client")
// TODO download and upload limits
torrentCfg := torrent.NewDefaultClientConfig()
torrentCfg.PeerID = string(id[:])
torrentCfg.DefaultStorage = st
torrentCfg.AlwaysWantConns = true
// torrentCfg.AlwaysWantConns = true
torrentCfg.DropMutuallyCompletePeers = true
torrentCfg.TorrentPeersLowWater = 100
torrentCfg.TorrentPeersHighWater = 1000
torrentCfg.AcceptPeerConnections = true
// torrentCfg.TorrentPeersLowWater = 100
// torrentCfg.TorrentPeersHighWater = 1000
// torrentCfg.AcceptPeerConnections = true
torrentCfg.Seed = true
torrentCfg.DisableAggressiveUpload = false
tl := tlog.NewLogger()
torrentCfg.PeriodicallyAnnounceTorrentsToDht = true
// torrentCfg.ConfigureAnacrolixDhtServer = func(cfg *dht.ServerConfig) {
// cfg.Store = fis
// cfg.Exp = dhtTTL
// cfg.PeerStore = fis
// }
tl := tlog.NewLogger("torrent-client")
tl.SetHandlers(&dlog.Torrent{L: l})
torrentCfg.Logger = tl
torrentCfg.Callbacks.NewPeer = append(torrentCfg.Callbacks.NewPeer, func(p *torrent.Peer) {
l := l.With("ip", p.RemoteAddr.String())
if p.Torrent() != nil {
l = l.With("torrent", p.Torrent().Name())
}
l.Debug("new peer")
l.With(peerAttrs(p)...).Debug("new peer")
})
torrentCfg.Callbacks.PeerClosed = append(torrentCfg.Callbacks.PeerClosed, func(p *torrent.Peer) {
l := l.With("ip", p.RemoteAddr.String())
if p.Torrent() != nil {
l = l.With("torrent", p.Torrent().Name())
}
l.Debug("peer closed")
l.With(peerAttrs(p)...).Debug("peer closed")
})
torrentCfg.Callbacks.CompletedHandshake = func(pc *torrent.PeerConn, ih infohash.T) {
attrs := append(peerAttrs(&pc.Peer), slog.String("infohash", ih.HexString()))
l.With(attrs...).Debug("completed handshake")
}
torrentCfg.Callbacks.PeerConnAdded = append(torrentCfg.Callbacks.PeerConnAdded, func(pc *torrent.PeerConn) {
l.With(peerAttrs(&pc.Peer)...).Debug("peer conn added")
})
torrentCfg.Callbacks.PeerConnClosed = func(pc *torrent.PeerConn) {
l.With(peerAttrs(&pc.Peer)...).Debug("peer conn closed")
}
torrentCfg.Callbacks.CompletedHandshake = func(pc *torrent.PeerConn, ih infohash.T) {
attrs := append(peerAttrs(&pc.Peer), slog.String("infohash", ih.HexString()))
l.With(attrs...).Debug("completed handshake")
}
torrentCfg.Callbacks.ReceivedRequested = append(torrentCfg.Callbacks.ReceivedRequested, func(pme torrent.PeerMessageEvent) {
l.With(peerAttrs(pme.Peer)...).Debug("received requested")
})
torrentCfg.Callbacks.ReceivedUsefulData = append(torrentCfg.Callbacks.ReceivedUsefulData, func(pme torrent.PeerMessageEvent) {
l.With(peerAttrs(pme.Peer)...).Debug("received useful data")
})
// torrentCfg.Callbacks.PeerConnClosed = append(torrentCfg.Callbacks.PeerConnClosed, func(c *torrent.PeerConn) {
// l.Debug("peer closed", "ip", c.RemoteAddr.String())
// })
torrentCfg.PeriodicallyAnnounceTorrentsToDht = true
torrentCfg.ConfigureAnacrolixDhtServer = func(cfg *dht.ServerConfig) {
cfg.Store = fis
cfg.Exp = dhtTTL
cfg.NoSecurity = false
}
t, err := torrent.NewClient(torrentCfg)
if err != nil {
return nil, err
}
return t, nil
return torrentCfg
}
var emptyBytes [20]byte
func getOrCreatePeerID(p string) ([20]byte, error) {
idb, err := os.ReadFile(p)
if err == nil {
var out [20]byte
copy(out[:], idb)
return out, nil
}
if !os.IsNotExist(err) {
return emptyBytes, err
}
var out [20]byte
_, err = rand.Read(out[:])
if err != nil {
return emptyBytes, err
}
return out, os.WriteFile(p, out[:], 0755)
}
func peerAttrs(peer *torrent.Peer) []any {
out := []any{
slog.String("ip", peer.RemoteAddr.String()),
slog.String("discovery", string(peer.Discovery)),
slog.Int("max-requests", peer.PeerMaxRequests),
slog.Bool("prefers-encryption", peer.PeerPrefersEncryption),
}
if peer.Torrent() != nil {
out = append(out, slog.String("torrent", peer.Torrent().Name()))
}
return out
}

View file

@ -1,25 +1,21 @@
package torrent
import (
"bufio"
"context"
"errors"
"fmt"
"log/slog"
"os"
"path/filepath"
"strings"
"sync"
"time"
"git.kmsign.ru/royalcat/tstor/pkg/ctxbilly"
"git.kmsign.ru/royalcat/tstor/pkg/rlog"
"git.kmsign.ru/royalcat/tstor/src/config"
"git.kmsign.ru/royalcat/tstor/src/tkv"
"git.kmsign.ru/royalcat/tstor/src/vfs"
"github.com/royalcat/ctxio"
"go.opentelemetry.io/otel"
"go.opentelemetry.io/otel/attribute"
"go.opentelemetry.io/otel/metric"
"go.opentelemetry.io/otel/trace"
"golang.org/x/exp/maps"
@ -28,12 +24,14 @@ import (
"github.com/anacrolix/torrent/metainfo"
"github.com/anacrolix/torrent/types/infohash"
"github.com/go-git/go-billy/v5"
"github.com/go-git/go-billy/v5/util"
"github.com/royalcat/kv"
)
var tracer = otel.Tracer("git.kmsign.ru/royalcat/tstor/sources/torrent",
trace.WithInstrumentationAttributes(attribute.String("component", "torrent-daemon")),
const instrument = "git.kmsign.ru/royalcat/tstor/sources/torrent"
var (
tracer = otel.Tracer(instrument, trace.WithInstrumentationAttributes(attribute.String("component", "torrent-daemon")))
meter = otel.Meter(instrument, metric.WithInstrumentationAttributes(attribute.String("component", "torrent-daemon")))
)
type DirAquire struct {
@ -50,22 +48,20 @@ type Daemon struct {
fileProperties kv.Store[string, FileProperties]
statsStore *statsStore
loadMutex sync.Mutex
torrentLoaded chan struct{}
loadMutex sync.Mutex
sourceFs billy.Filesystem
log *rlog.Logger
}
const dhtTTL = 24 * time.Hour
const dhtTTL = 180 * 24 * time.Hour
func NewService(sourceFs billy.Filesystem, conf config.TorrentClient) (*Daemon, error) {
func NewDaemon(sourceFs billy.Filesystem, conf config.TorrentClient) (*Daemon, error) {
s := &Daemon{
log: rlog.Component("torrent-service"),
sourceFs: sourceFs,
torrentLoaded: make(chan struct{}),
loadMutex: sync.Mutex{},
log: rlog.Component("torrent-service"),
sourceFs: sourceFs,
loadMutex: sync.Mutex{},
}
err := os.MkdirAll(conf.MetadataFolder, 0744)
@ -103,10 +99,21 @@ func NewService(sourceFs billy.Filesystem, conf config.TorrentClient) (*Daemon,
return nil, err
}
s.client, err = newClient(s.Storage, s.fis, &conf, id)
clientConfig := newClientConfig(s.Storage, s.fis, &conf, id)
s.client, err = torrent.NewClient(clientConfig)
if err != nil {
return nil, fmt.Errorf("error starting torrent client: %w", err)
return nil, err
}
// TODO move to config
s.client.AddDhtNodes([]string{
"router.bittorrent.com:6881",
"router.utorrent.com:6881",
"dht.transmissionbt.com:6881",
"router.bitcomet.com:6881",
"dht.aelitis.com6881",
})
s.client.AddDhtNodes(conf.DHTNodes)
s.dirsAquire, err = tkv.NewKV[string, DirAquire](conf.MetadataFolder, "dir-acquire")
@ -116,17 +123,24 @@ func NewService(sourceFs billy.Filesystem, conf config.TorrentClient) (*Daemon,
go func() {
ctx := context.Background()
err := s.loadTorrentFiles(ctx)
err := s.backgroudFileLoad(ctx)
if err != nil {
s.log.Error(ctx, "initial torrent load failed", rlog.Error(err))
}
close(s.torrentLoaded)
}()
go func() {
ctx := context.Background()
const period = time.Second * 10
<-s.torrentLoaded
err := registerTorrentMetrics(s.client)
if err != nil {
s.log.Error(ctx, "error registering torrent metrics", rlog.Error(err))
}
err = registerDhtMetrics(s.client)
if err != nil {
s.log.Error(ctx, "error registering dht metrics", rlog.Error(err))
}
timer := time.NewTicker(period)
for {
@ -134,7 +148,7 @@ func NewService(sourceFs billy.Filesystem, conf config.TorrentClient) (*Daemon,
case <-s.client.Closed():
return
case <-timer.C:
s.updateStats(context.Background())
s.updateStats(ctx)
}
}
}()
@ -142,58 +156,6 @@ func NewService(sourceFs billy.Filesystem, conf config.TorrentClient) (*Daemon,
return s, nil
}
func (s *Daemon) updateStats(ctx context.Context) {
log := s.log
totalPeers := 0
activePeers := 0
connectedSeeders := 0
for _, v := range s.client.Torrents() {
stats := v.Stats()
err := s.statsStore.AddTorrentStats(v.InfoHash(), TorrentStats{
Timestamp: time.Now(),
DownloadedBytes: uint64(stats.BytesRead.Int64()),
UploadedBytes: uint64(stats.BytesWritten.Int64()),
TotalPeers: uint16(stats.TotalPeers),
ActivePeers: uint16(stats.ActivePeers),
ConnectedSeeders: uint16(stats.ConnectedSeeders),
})
if err != nil {
log.Error(ctx, "error saving torrent stats", rlog.Error(err))
}
totalPeers += stats.TotalPeers
activePeers += stats.ActivePeers
connectedSeeders += stats.ConnectedSeeders
}
totalStats := s.client.Stats()
err := s.statsStore.AddTotalStats(TorrentStats{
Timestamp: time.Now(),
DownloadedBytes: uint64(totalStats.BytesRead.Int64()),
UploadedBytes: uint64(totalStats.BytesWritten.Int64()),
TotalPeers: uint16(totalPeers),
ActivePeers: uint16(activePeers),
ConnectedSeeders: uint16(connectedSeeders),
})
if err != nil {
log.Error(ctx, "error saving total stats", rlog.Error(err))
}
}
func (s *Daemon) TotalStatsHistory(ctx context.Context, since time.Time) ([]TorrentStats, error) {
return s.statsStore.ReadTotalStatsHistory(ctx, since)
}
func (s *Daemon) TorrentStatsHistory(ctx context.Context, since time.Time, ih infohash.T) ([]TorrentStats, error) {
return s.statsStore.ReadTorrentStatsHistory(ctx, since, ih)
}
func (s *Daemon) StatsHistory(ctx context.Context, since time.Time) ([]TorrentStats, error) {
return s.statsStore.ReadStatsHistory(ctx, since)
}
var _ vfs.FsFactory = (*Daemon)(nil).NewTorrentFs
func (s *Daemon) Close(ctx context.Context) error {
@ -207,104 +169,6 @@ func (s *Daemon) Close(ctx context.Context) error {
)...)
}
func (s *Daemon) loadTorrent(ctx context.Context, f vfs.File) (*Controller, error) {
ctx, span := tracer.Start(ctx, "loadTorrent")
defer span.End()
log := s.log
stat, err := f.Info()
if err != nil {
return nil, fmt.Errorf("call stat failed: %w", err)
}
span.SetAttributes(attribute.String("filename", stat.Name()))
mi, err := metainfo.Load(bufio.NewReader(ctxio.IoReader(ctx, f)))
if err != nil {
return nil, fmt.Errorf("loading torrent metadata from file %s, error: %w", stat.Name(), err)
}
var ctl *Controller
t, ok := s.client.Torrent(mi.HashInfoBytes())
if ok {
ctl = s.newController(t)
} else {
span.AddEvent("torrent not found, loading from file")
log.Info(ctx, "torrent not found, loading from file")
spec, err := torrent.TorrentSpecFromMetaInfoErr(mi)
if err != nil {
return nil, fmt.Errorf("parse spec from metadata: %w", err)
}
infoBytes := spec.InfoBytes
if !isValidInfoHashBytes(infoBytes) {
log.Warn(ctx, "info loaded from spec not valid")
infoBytes = nil
}
if len(infoBytes) == 0 {
log.Info(ctx, "no info loaded from file, try to load from cache")
infoBytes, err = s.infoBytes.GetBytes(spec.InfoHash)
if err != nil && err != errNotFound {
return nil, fmt.Errorf("get info bytes from database: %w", err)
}
}
t, _ = s.client.AddTorrentOpt(torrent.AddTorrentOpts{
InfoHash: spec.InfoHash,
InfoHashV2: spec.InfoHashV2,
Storage: s.Storage,
InfoBytes: infoBytes,
ChunkSize: spec.ChunkSize,
})
t.AllowDataDownload()
t.AllowDataUpload()
span.AddEvent("torrent added to client")
select {
case <-ctx.Done():
return nil, ctx.Err()
case <-t.GotInfo():
err := s.infoBytes.Set(t.InfoHash(), t.Metainfo())
if err != nil {
log.Error(ctx, "error setting info bytes for torrent",
slog.String("torrent-name", t.Name()),
rlog.Error(err),
)
}
}
span.AddEvent("got info")
ctl = s.newController(t)
err = ctl.initializeTorrentPriories(ctx)
if err != nil {
return nil, fmt.Errorf("initialize torrent priorities: %w", err)
}
// info := t.Info()
// if info == nil {
// return nil, fmt.Errorf("info is nil")
// }
// compatable, _, err := s.checkTorrentCompatable(ctx, spec.InfoHash, *info)
// if err != nil {
// return nil, err
// }
// if !compatable {
// return nil, fmt.Errorf(
// "torrent with name '%s' not compatable existing infohash: %s, new: %s",
// t.Name(), t.InfoHash().HexString(), spec.InfoHash.HexString(),
// )
// }
}
return ctl, nil
}
func isValidInfoHashBytes(d []byte) bool {
var info metainfo.Info
err := bencode.Unmarshal(d, &info)
@ -315,74 +179,6 @@ func (s *Daemon) Stats() torrent.ConnStats {
return s.client.Stats().ConnStats
}
const loadWorkers = 5
func (s *Daemon) loadTorrentFiles(ctx context.Context) error {
ctx, span := tracer.Start(ctx, "loadTorrentFiles", trace.WithAttributes(
attribute.Int("workers", loadWorkers),
))
defer span.End()
log := s.log
loaderPaths := make(chan string, loadWorkers*5)
wg := sync.WaitGroup{}
defer func() {
close(loaderPaths)
wg.Wait()
}()
loaderWorker := func() {
for path := range loaderPaths {
info, err := s.sourceFs.Stat(path)
if err != nil {
log.Error(ctx, "error stat torrent file", slog.String("filename", path), rlog.Error(err))
continue
}
file, err := s.sourceFs.Open(path)
if err != nil {
log.Error(ctx, "error opening torrent file", slog.String("filename", path), rlog.Error(err))
continue
}
defer file.Close()
vfile := vfs.NewCtxBillyFile(info, ctxbilly.WrapFile(file))
_, err = s.loadTorrent(ctx, vfile)
if err != nil {
log.Error(ctx, "failed adding torrent", rlog.Error(err))
}
}
wg.Done()
}
wg.Add(loadWorkers)
for range loadWorkers {
go loaderWorker()
}
return util.Walk(s.sourceFs, "", func(path string, info os.FileInfo, err error) error {
if err != nil {
return fmt.Errorf("fs walk error: %w", err)
}
if ctx.Err() != nil {
return ctx.Err()
}
if info.IsDir() {
return nil
}
if strings.HasSuffix(path, ".torrent") {
loaderPaths <- path
}
return nil
})
}
func storeByTorrent[K kv.Bytes, V any](s kv.Store[K, V], infohash infohash.T) kv.Store[K, V] {
return kv.PrefixBytes[K, V](s, K(infohash.HexString()+"/"))
}
@ -396,8 +192,6 @@ func (s *Daemon) newController(t *torrent.Torrent) *Controller {
}
func (s *Daemon) ListTorrents(ctx context.Context) ([]*Controller, error) {
<-s.torrentLoaded
out := []*Controller{}
for _, v := range s.client.Torrents() {
out = append(out, s.newController(v))
@ -406,8 +200,6 @@ func (s *Daemon) ListTorrents(ctx context.Context) ([]*Controller, error) {
}
func (s *Daemon) GetTorrent(infohashHex string) (*Controller, error) {
<-s.torrentLoaded
t, ok := s.client.Torrent(infohash.FromHexString(infohashHex))
if !ok {
return nil, nil

View file

@ -0,0 +1,246 @@
package torrent
import (
"bufio"
"context"
"fmt"
"io"
"log/slog"
"os"
"strings"
"sync"
"time"
"git.kmsign.ru/royalcat/tstor/pkg/ctxbilly"
"git.kmsign.ru/royalcat/tstor/pkg/rlog"
"git.kmsign.ru/royalcat/tstor/src/vfs"
"github.com/anacrolix/torrent"
"github.com/anacrolix/torrent/metainfo"
"github.com/go-git/go-billy/v5/util"
"github.com/royalcat/ctxio"
"go.opentelemetry.io/otel/attribute"
"go.opentelemetry.io/otel/trace"
)
const activityTimeout = time.Minute * 15
func readInfoHash(ctx context.Context, f vfs.File) (metainfo.Hash, error) {
ctx, span := tracer.Start(ctx, "readInfoHash")
defer span.End()
mi, err := metainfo.Load(ctxio.IoReader(ctx, f))
if err != nil {
return metainfo.Hash{}, fmt.Errorf("loading metainfo: %w", err)
}
return mi.HashInfoBytes(), nil
}
func (s *Daemon) loadTorrent(ctx context.Context, f vfs.File) (*Controller, error) {
ctx, span := tracer.Start(ctx, "loadTorrent")
defer span.End()
log := s.log
stat, err := f.Info()
if err != nil {
return nil, fmt.Errorf("call stat failed: %w", err)
}
span.SetAttributes(attribute.String("filename", stat.Name()))
mi, err := metainfo.Load(bufio.NewReader(ctxio.IoReader(ctx, f)))
if err != nil {
return nil, fmt.Errorf("loading torrent metadata from file %s, error: %w", stat.Name(), err)
}
log = log.With(slog.String("info-hash", mi.HashInfoBytes().HexString()))
var ctl *Controller
t, ok := s.client.Torrent(mi.HashInfoBytes())
if ok {
log = log.With(slog.String("torrent-name", t.Name()))
ctl = s.newController(t)
} else {
span.AddEvent("torrent not found, loading from file")
log.Info(ctx, "torrent not found, loading from file")
spec, err := torrent.TorrentSpecFromMetaInfoErr(mi)
if err != nil {
return nil, fmt.Errorf("parse spec from metadata: %w", err)
}
infoBytes := spec.InfoBytes
if !isValidInfoHashBytes(infoBytes) {
log.Warn(ctx, "info loaded from spec not valid")
infoBytes = nil
}
if len(infoBytes) == 0 {
log.Info(ctx, "no info loaded from file, try to load from cache")
infoBytes, err = s.infoBytes.GetBytes(spec.InfoHash)
if err != nil && err != errNotFound {
return nil, fmt.Errorf("get info bytes from database: %w", err)
}
}
t, _ = s.client.AddTorrentOpt(torrent.AddTorrentOpts{
InfoHash: spec.InfoHash,
InfoHashV2: spec.InfoHashV2,
Storage: s.Storage,
InfoBytes: infoBytes,
ChunkSize: spec.ChunkSize,
})
log = log.With(slog.String("torrent-name", t.Name()))
t.AllowDataDownload()
t.AllowDataUpload()
span.AddEvent("torrent added to client")
select {
case <-ctx.Done():
return nil, ctx.Err()
case <-t.GotInfo():
err := s.infoBytes.Set(t.InfoHash(), t.Metainfo())
if err != nil {
log.Error(ctx, "error setting info bytes for torrent",
slog.String("torrent-name", t.Name()),
rlog.Error(err),
)
}
}
span.AddEvent("got info")
ctl = s.newController(t)
err = ctl.initializeTorrentPriories(ctx)
if err != nil {
return nil, fmt.Errorf("initialize torrent priorities: %w", err)
}
// go func() {
// subscr := ctl.t.SubscribePieceStateChanges()
// defer subscr.Close()
// dropTimer := time.NewTimer(activityTimeout)
// defer dropTimer.Stop()
// for {
// select {
// case <-subscr.Values:
// dropTimer.Reset(activityTimeout)
// case <-dropTimer.C:
// log.Info(ctx, "torrent dropped by activity timeout")
// select {
// case <-ctl.t.Closed():
// return
// case <-time.After(time.Second):
// ctl.t.Drop()
// }
// case <-ctl.t.Closed():
// return
// }
// }
// }()
}
return ctl, nil
}
const loadWorkers = 5
func (s *Daemon) backgroudFileLoad(ctx context.Context) error {
ctx, span := tracer.Start(ctx, "loadTorrentFiles", trace.WithAttributes(
attribute.Int("workers", loadWorkers),
))
defer span.End()
log := s.log
loaderPaths := make(chan string, loadWorkers*5)
wg := sync.WaitGroup{}
defer func() {
close(loaderPaths)
wg.Wait()
}()
loaderWorker := func() {
for path := range loaderPaths {
info, err := s.sourceFs.Stat(path)
if err != nil {
log.Error(ctx, "error stat torrent file", slog.String("filename", path), rlog.Error(err))
continue
}
file, err := s.sourceFs.Open(path)
if err != nil {
log.Error(ctx, "error opening torrent file", slog.String("filename", path), rlog.Error(err))
continue
}
defer file.Close()
vfile := vfs.NewCtxBillyFile(info, ctxbilly.WrapFile(file))
ih, err := readInfoHash(ctx, vfile)
if err != nil {
log.Error(ctx, "error reading info hash", slog.String("filename", path), rlog.Error(err))
continue
}
props := storeByTorrent(s.fileProperties, ih)
_, err = vfile.Seek(0, io.SeekStart)
if err != nil {
log.Error(ctx, "error seeking file", slog.String("filename", path), rlog.Error(err))
continue
}
isPrioritized := false
err = props.Range(ctx, func(k string, v FileProperties) error {
if v.Priority > 0 {
isPrioritized = true
return io.EOF
}
return nil
})
if err != nil && err != io.EOF {
log.Error(ctx, "error checking file priority", slog.String("filename", path), rlog.Error(err))
continue
}
if !isPrioritized {
log.Debug(ctx, "file not prioritized, skipping", slog.String("filename", path))
continue
}
_, err = s.loadTorrent(ctx, vfile)
if err != nil {
log.Error(ctx, "failed adding torrent", rlog.Error(err))
}
}
wg.Done()
}
wg.Add(loadWorkers)
for range loadWorkers {
go loaderWorker()
}
return util.Walk(s.sourceFs, "", func(path string, info os.FileInfo, err error) error {
if err != nil {
return fmt.Errorf("fs walk error: %w", err)
}
if ctx.Err() != nil {
return ctx.Err()
}
if info.IsDir() {
return nil
}
if strings.HasSuffix(path, ".torrent") {
loaderPaths <- path
}
return nil
})
}

View file

@ -0,0 +1,73 @@
package torrent
import (
"context"
"time"
"git.kmsign.ru/royalcat/tstor/pkg/rlog"
"github.com/anacrolix/torrent/types/infohash"
)
func (s *Daemon) allStats(ctx context.Context) (map[infohash.T]TorrentStats, TorrentStats) {
totalPeers := 0
activePeers := 0
connectedSeeders := 0
perTorrentStats := map[infohash.T]TorrentStats{}
for _, v := range s.client.Torrents() {
stats := v.Stats()
perTorrentStats[v.InfoHash()] = TorrentStats{
Timestamp: time.Now(),
DownloadedBytes: uint64(stats.BytesRead.Int64()),
UploadedBytes: uint64(stats.BytesWritten.Int64()),
TotalPeers: uint16(stats.TotalPeers),
ActivePeers: uint16(stats.ActivePeers),
ConnectedSeeders: uint16(stats.ConnectedSeeders),
}
totalPeers += stats.TotalPeers
activePeers += stats.ActivePeers
connectedSeeders += stats.ConnectedSeeders
}
totalStats := s.client.Stats()
return perTorrentStats, TorrentStats{
Timestamp: time.Now(),
DownloadedBytes: uint64(totalStats.BytesRead.Int64()),
UploadedBytes: uint64(totalStats.BytesWritten.Int64()),
TotalPeers: uint16(totalPeers),
ActivePeers: uint16(activePeers),
ConnectedSeeders: uint16(connectedSeeders),
}
}
func (s *Daemon) updateStats(ctx context.Context) {
log := s.log
perTorrentStats, totalStats := s.allStats(ctx)
for ih, v := range perTorrentStats {
err := s.statsStore.AddTorrentStats(ih, v)
if err != nil {
log.Error(ctx, "error saving torrent stats", rlog.Error(err))
}
}
err := s.statsStore.AddTotalStats(totalStats)
if err != nil {
log.Error(ctx, "error saving total stats", rlog.Error(err))
}
}
func (s *Daemon) TotalStatsHistory(ctx context.Context, since time.Time) ([]TorrentStats, error) {
return s.statsStore.ReadTotalStatsHistory(ctx, since)
}
func (s *Daemon) TorrentStatsHistory(ctx context.Context, since time.Time, ih infohash.T) ([]TorrentStats, error) {
return s.statsStore.ReadTorrentStatsHistory(ctx, since, ih)
}
func (s *Daemon) StatsHistory(ctx context.Context, since time.Time) ([]TorrentStats, error) {
return s.statsStore.ReadStatsHistory(ctx, since)
}

View file

@ -410,7 +410,6 @@ func openTorrentFile(ctx context.Context, name string, file *torrent.File, lastT
}
r := file.NewReader()
r.SetReadahead(1024 * 1024 * 16) // TODO configurable
_, err := r.ReadContext(ctx, make([]byte, 128))
if err != nil && err != io.EOF {
return nil, fmt.Errorf("failed initial file read: %w", err)
@ -433,6 +432,14 @@ func (tf *torrentFile) Name() string {
return tf.name
}
// Seek implements vfs.File.
func (tf *torrentFile) Seek(offset int64, whence int) (int64, error) {
tf.mu.Lock()
defer tf.mu.Unlock()
return tf.tr.Seek(offset, whence)
}
// Type implements File.
func (tf *torrentFile) Type() fs.FileMode {
return vfs.ROMode | fs.ModeDir
@ -483,8 +490,8 @@ func (tf *torrentFile) Read(ctx context.Context, p []byte) (n int, err error) {
span.End()
}()
tf.mu.RLock()
defer tf.mu.RUnlock()
tf.mu.Lock()
defer tf.mu.Unlock()
ctx, cancel := tf.readTimeout(ctx)
defer cancel()

View file

@ -1,30 +0,0 @@
package torrent
import (
"crypto/rand"
"os"
)
var emptyBytes [20]byte
func getOrCreatePeerID(p string) ([20]byte, error) {
idb, err := os.ReadFile(p)
if err == nil {
var out [20]byte
copy(out[:], idb)
return out, nil
}
if !os.IsNotExist(err) {
return emptyBytes, err
}
var out [20]byte
_, err = rand.Read(out[:])
if err != nil {
return emptyBytes, err
}
return out, os.WriteFile(p, out[:], 0755)
}

View file

@ -0,0 +1,69 @@
package torrent
import (
"context"
"encoding/base64"
"github.com/anacrolix/dht/v2"
"github.com/anacrolix/torrent"
"go.opentelemetry.io/otel/attribute"
"go.opentelemetry.io/otel/metric"
)
func registerTorrentMetrics(client *torrent.Client) error {
meterTotalPeers, _ := meter.Int64ObservableGauge("torrent.peers.total")
meterActivePeers, _ := meter.Int64ObservableGauge("torrent.peers.active")
meterSeeders, _ := meter.Int64ObservableGauge("torrent.seeders")
meterDownloaded, _ := meter.Int64ObservableGauge("torrent.downloaded", metric.WithUnit("By"))
meterIO, _ := meter.Int64ObservableGauge("torrent.io", metric.WithUnit("By"))
meterLoaded, _ := meter.Int64ObservableGauge("torrent.loaded")
_, err := meter.RegisterCallback(func(ctx context.Context, o metric.Observer) error {
o.ObserveInt64(meterLoaded, int64(len(client.Torrents())))
for _, v := range client.Torrents() {
as := attribute.NewSet(
attribute.String("infohash", v.InfoHash().HexString()),
attribute.String("name", v.Name()),
attribute.Int64("size", v.Length()),
)
stats := v.Stats()
o.ObserveInt64(meterTotalPeers, int64(stats.TotalPeers), metric.WithAttributeSet(as))
o.ObserveInt64(meterActivePeers, int64(stats.ActivePeers), metric.WithAttributeSet(as))
o.ObserveInt64(meterSeeders, int64(stats.ConnectedSeeders), metric.WithAttributeSet(as))
o.ObserveInt64(meterIO, stats.BytesRead.Int64(), metric.WithAttributeSet(as), metric.WithAttributes(attribute.String("direction", "download")))
o.ObserveInt64(meterIO, stats.BytesWritten.Int64(), metric.WithAttributeSet(as), metric.WithAttributes(attribute.String("direction", "upload")))
o.ObserveInt64(meterDownloaded, v.BytesCompleted(), metric.WithAttributeSet(as))
}
return nil
}, meterTotalPeers, meterActivePeers, meterSeeders, meterIO, meterDownloaded, meterLoaded)
if err != nil {
return err
}
return nil
}
func registerDhtMetrics(client *torrent.Client) error {
meterDhtNodes, _ := meter.Int64ObservableGauge("torrent.dht.nodes")
_, err := meter.RegisterCallback(func(ctx context.Context, o metric.Observer) error {
servers := client.DhtServers()
for _, dhtSrv := range servers {
stats, ok := dhtSrv.Stats().(dht.ServerStats)
if !ok {
continue
}
id := dhtSrv.ID()
as := attribute.NewSet(
attribute.String("id", base64.StdEncoding.EncodeToString(id[:])),
attribute.String("address", dhtSrv.Addr().String()),
)
o.ObserveInt64(meterDhtNodes, int64(stats.Nodes), metric.WithAttributeSet(as))
}
return nil
}, meterDhtNodes)
return err
}

View file

@ -0,0 +1,24 @@
package torrent
import (
"github.com/anacrolix/dht/v2/krpc"
peer_store "github.com/anacrolix/dht/v2/peer-store"
"github.com/anacrolix/torrent/types/infohash"
"github.com/royalcat/kv"
)
type peerStore struct {
store kv.Store[infohash.T, []krpc.NodeAddr]
}
var _ peer_store.Interface = (*peerStore)(nil)
// AddPeer implements peer_store.Interface.
func (p *peerStore) AddPeer(ih infohash.T, node krpc.NodeAddr) {
panic("unimplemented")
}
// GetPeers implements peer_store.Interface.
func (p *peerStore) GetPeers(ih infohash.T) []krpc.NodeAddr {
panic("unimplemented")
}

View file

@ -0,0 +1,36 @@
package torrent
import (
"testing"
"github.com/anacrolix/torrent/metainfo"
"github.com/anacrolix/torrent/storage"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestBoltPieceCompletion(t *testing.T) {
td := t.TempDir()
pc, err := newPieceCompletion(td)
require.NoError(t, err)
defer pc.Close()
pk := metainfo.PieceKey{}
b, err := pc.Get(pk)
require.NoError(t, err)
assert.False(t, b.Ok)
require.NoError(t, pc.Set(pk, false))
b, err = pc.Get(pk)
require.NoError(t, err)
assert.Equal(t, storage.Completion{Complete: false, Ok: true}, b)
require.NoError(t, pc.Set(pk, true))
b, err = pc.Get(pk)
require.NoError(t, err)
assert.Equal(t, storage.Completion{Complete: true, Ok: true}, b)
}

View file

@ -19,36 +19,14 @@ import (
)
// OpenTorrent implements storage.ClientImplCloser.
func (me *fileStorage) OpenTorrent(info *metainfo.Info, infoHash infohash.T) (storage.TorrentImpl, error) {
ctx := context.Background()
log := me.log.With(slog.String("infohash", infoHash.HexString()))
func (me *fileStorage) OpenTorrent(ctx context.Context, info *metainfo.Info, infoHash infohash.T) (storage.TorrentImpl, error) {
ctx, span := tracer.Start(ctx, "OpenTorrent")
defer span.End()
log := me.log.With(slog.String("infohash", infoHash.HexString()), slog.String("name", info.BestName()))
// dir := torrentDir(me.baseDir, infoHash)
// legacyDir := filepath.Join(me.baseDir, info.Name)
log.Debug(ctx, "opening torrent")
// log = log.With(slog.String("legacy_dir", legacyDir), slog.String("dir", dir))
// if _, err := os.Stat(legacyDir); err == nil {
// log.Warn(ctx, "legacy torrent dir found, renaming", slog.String("dir", dir))
// err = os.Rename(legacyDir, dir)
// if err != nil {
// return storage.TorrentImpl{}, fmt.Errorf("error renaming legacy torrent dir: %w", err)
// }
// }
// if _, err := os.Stat(dir); errors.Is(err, fs.ErrNotExist) {
// log.Info(ctx, "new torrent, trying copy files from existing")
// dups := me.dupIndex.Includes(infoHash, info.Files)
// for _, dup := range dups {
// err := me.copyDup(ctx, infoHash, dup)
// if err != nil {
// log.Error(ctx, "error copying file", slog.String("file", dup.fileinfo.DisplayPath(info)), rlog.Error(err))
// }
// }
// }
impl, err := me.client.OpenTorrent(info, infoHash)
impl, err := me.client.OpenTorrent(ctx, info, infoHash)
if err != nil {
log.Error(ctx, "error opening torrent", rlog.Error(err))
}

View file

@ -12,15 +12,17 @@ import (
"github.com/agoda-com/opentelemetry-logs-go/exporters/otlp/otlplogs"
"github.com/agoda-com/opentelemetry-logs-go/exporters/otlp/otlplogs/otlplogshttp"
logsdk "github.com/agoda-com/opentelemetry-logs-go/sdk/logs"
"github.com/google/uuid"
otelpyroscope "github.com/grafana/otel-profiling-go"
"github.com/grafana/pyroscope-go"
"go.opentelemetry.io/otel"
"go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetrichttp"
"go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp"
"go.opentelemetry.io/otel/exporters/prometheus"
"go.opentelemetry.io/otel/sdk/metric"
"go.opentelemetry.io/otel/sdk/resource"
"go.opentelemetry.io/otel/sdk/trace"
semconv "go.opentelemetry.io/otel/semconv/v1.25.0"
semconv "go.opentelemetry.io/otel/semconv/v1.26.0"
)
type Client struct {
@ -70,6 +72,7 @@ func Setup(ctx context.Context, endpoint string) (*Client, error) {
semconv.SchemaURL,
semconv.ServiceName(appName),
semconv.HostName(hostName),
semconv.ServiceInstanceID(uuid.NewString()),
),
)
if err != nil {
@ -86,8 +89,13 @@ func Setup(ctx context.Context, endpoint string) (*Client, error) {
return nil, err
}
promExporter, err := prometheus.New()
if err != nil {
return nil, fmt.Errorf("failed to initialize prometheus exporter: %w", err)
}
client.metricProvider = metric.NewMeterProvider(
metric.WithReader(metric.NewPeriodicReader(meticExporter)),
metric.WithReader(promExporter),
metric.WithResource(r),
)
otel.SetMeterProvider(client.metricProvider)

View file

@ -193,6 +193,20 @@ type archiveFile struct {
buffer *filebuffer.Buffer
}
// Seek implements File.
func (d *archiveFile) Seek(offset int64, whence int) (int64, error) {
switch whence {
case io.SeekStart:
d.offset = offset
case io.SeekCurrent:
d.offset += offset
case io.SeekEnd:
d.offset = d.size + offset
}
return d.offset, nil
}
// Name implements File.
func (d *archiveFile) Name() string {
return d.name

View file

@ -110,6 +110,11 @@ type CtxBillyFile struct {
file ctxbilly.File
}
// Seek implements File.
func (c *CtxBillyFile) Seek(offset int64, whence int) (int64, error) {
return c.file.Seek(offset, whence)
}
// Close implements File.
func (c *CtxBillyFile) Close(ctx context.Context) error {
return c.file.Close(ctx)

View file

@ -33,6 +33,11 @@ func (d *dirFile) IsDir() bool {
return true
}
// Seek implements File.
func (d *dirFile) Seek(offset int64, whence int) (int64, error) {
return 0, fs.ErrInvalid
}
// Name implements File.
func (d *dirFile) Name() string {
return d.name

View file

@ -79,6 +79,11 @@ type DummyFile struct {
name string
}
// Seek implements File.
func (d *DummyFile) Seek(offset int64, whence int) (int64, error) {
return 0, nil
}
// Name implements File.
func (d *DummyFile) Name() string {
panic("unimplemented")

View file

@ -19,6 +19,7 @@ type File interface {
ctxio.Reader
ctxio.ReaderAt
ctxio.Closer
ctxio.Seeker
}
var ErrNotImplemented = errors.New("not implemented")

View file

@ -2,9 +2,7 @@ package vfs
import (
"context"
"errors"
"fmt"
"io"
"io/fs"
"log/slog"
"reflect"
@ -36,7 +34,7 @@ type LogFS struct {
}
func isLoggableError(err error) bool {
return err != nil && !errors.Is(err, fs.ErrNotExist) && !errors.Is(err, io.EOF)
return err != nil // && !errors.Is(err, fs.ErrNotExist) && !errors.Is(err, io.EOF)
}
var _ Filesystem = (*LogFS)(nil)
@ -209,6 +207,11 @@ type LogFile struct {
timeout time.Duration
}
// Seek implements File.
func (f *LogFile) Seek(offset int64, whence int) (int64, error) {
return f.f.Seek(offset, whence)
}
// Name implements File.
func (f *LogFile) Name() string {
return f.f.Name()

View file

@ -108,6 +108,11 @@ func (d *MemoryFile) Name() string {
return d.name
}
// Seek implements File.
func (d *MemoryFile) Seek(offset int64, whence int) (int64, error) {
return d.data.Seek(offset, whence)
}
// Type implements File.
func (d *MemoryFile) Type() fs.FileMode {
return ROMode

View file

@ -122,6 +122,11 @@ func (f *LazyOsFile) Type() fs.FileMode {
return f.info.Mode()
}
// Seek implements File.
func (f *LazyOsFile) Seek(offset int64, whence int) (int64, error) {
return f.file.Seek(offset, whence)
}
// Close implements File.
func (f *LazyOsFile) Close(ctx context.Context) error {
if f.file == nil {

View file

@ -11,6 +11,8 @@ targets:
type: String
DateTime:
type: DateTime
UInt:
type: int
clients:
- graphql
- graphql_flutter

View file

@ -1033,6 +1033,282 @@ class _CopyWithStubImpl$Input$TorrentFilter<TRes>
_res;
}
class Input$TorrentPriorityFilter {
factory Input$TorrentPriorityFilter({
Enum$TorrentPriority? eq,
Enum$TorrentPriority? gt,
Enum$TorrentPriority? lt,
Enum$TorrentPriority? gte,
Enum$TorrentPriority? lte,
List<Enum$TorrentPriority>? $in,
}) =>
Input$TorrentPriorityFilter._({
if (eq != null) r'eq': eq,
if (gt != null) r'gt': gt,
if (lt != null) r'lt': lt,
if (gte != null) r'gte': gte,
if (lte != null) r'lte': lte,
if ($in != null) r'in': $in,
});
Input$TorrentPriorityFilter._(this._$data);
factory Input$TorrentPriorityFilter.fromJson(Map<String, dynamic> data) {
final result$data = <String, dynamic>{};
if (data.containsKey('eq')) {
final l$eq = data['eq'];
result$data['eq'] =
l$eq == null ? null : fromJson$Enum$TorrentPriority((l$eq as String));
}
if (data.containsKey('gt')) {
final l$gt = data['gt'];
result$data['gt'] =
l$gt == null ? null : fromJson$Enum$TorrentPriority((l$gt as String));
}
if (data.containsKey('lt')) {
final l$lt = data['lt'];
result$data['lt'] =
l$lt == null ? null : fromJson$Enum$TorrentPriority((l$lt as String));
}
if (data.containsKey('gte')) {
final l$gte = data['gte'];
result$data['gte'] = l$gte == null
? null
: fromJson$Enum$TorrentPriority((l$gte as String));
}
if (data.containsKey('lte')) {
final l$lte = data['lte'];
result$data['lte'] = l$lte == null
? null
: fromJson$Enum$TorrentPriority((l$lte as String));
}
if (data.containsKey('in')) {
final l$$in = data['in'];
result$data['in'] = (l$$in as List<dynamic>?)
?.map((e) => fromJson$Enum$TorrentPriority((e as String)))
.toList();
}
return Input$TorrentPriorityFilter._(result$data);
}
Map<String, dynamic> _$data;
Enum$TorrentPriority? get eq => (_$data['eq'] as Enum$TorrentPriority?);
Enum$TorrentPriority? get gt => (_$data['gt'] as Enum$TorrentPriority?);
Enum$TorrentPriority? get lt => (_$data['lt'] as Enum$TorrentPriority?);
Enum$TorrentPriority? get gte => (_$data['gte'] as Enum$TorrentPriority?);
Enum$TorrentPriority? get lte => (_$data['lte'] as Enum$TorrentPriority?);
List<Enum$TorrentPriority>? get $in =>
(_$data['in'] as List<Enum$TorrentPriority>?);
Map<String, dynamic> toJson() {
final result$data = <String, dynamic>{};
if (_$data.containsKey('eq')) {
final l$eq = eq;
result$data['eq'] =
l$eq == null ? null : toJson$Enum$TorrentPriority(l$eq);
}
if (_$data.containsKey('gt')) {
final l$gt = gt;
result$data['gt'] =
l$gt == null ? null : toJson$Enum$TorrentPriority(l$gt);
}
if (_$data.containsKey('lt')) {
final l$lt = lt;
result$data['lt'] =
l$lt == null ? null : toJson$Enum$TorrentPriority(l$lt);
}
if (_$data.containsKey('gte')) {
final l$gte = gte;
result$data['gte'] =
l$gte == null ? null : toJson$Enum$TorrentPriority(l$gte);
}
if (_$data.containsKey('lte')) {
final l$lte = lte;
result$data['lte'] =
l$lte == null ? null : toJson$Enum$TorrentPriority(l$lte);
}
if (_$data.containsKey('in')) {
final l$$in = $in;
result$data['in'] =
l$$in?.map((e) => toJson$Enum$TorrentPriority(e)).toList();
}
return result$data;
}
CopyWith$Input$TorrentPriorityFilter<Input$TorrentPriorityFilter>
get copyWith => CopyWith$Input$TorrentPriorityFilter(
this,
(i) => i,
);
@override
bool operator ==(Object other) {
if (identical(this, other)) {
return true;
}
if (!(other is Input$TorrentPriorityFilter) ||
runtimeType != other.runtimeType) {
return false;
}
final l$eq = eq;
final lOther$eq = other.eq;
if (_$data.containsKey('eq') != other._$data.containsKey('eq')) {
return false;
}
if (l$eq != lOther$eq) {
return false;
}
final l$gt = gt;
final lOther$gt = other.gt;
if (_$data.containsKey('gt') != other._$data.containsKey('gt')) {
return false;
}
if (l$gt != lOther$gt) {
return false;
}
final l$lt = lt;
final lOther$lt = other.lt;
if (_$data.containsKey('lt') != other._$data.containsKey('lt')) {
return false;
}
if (l$lt != lOther$lt) {
return false;
}
final l$gte = gte;
final lOther$gte = other.gte;
if (_$data.containsKey('gte') != other._$data.containsKey('gte')) {
return false;
}
if (l$gte != lOther$gte) {
return false;
}
final l$lte = lte;
final lOther$lte = other.lte;
if (_$data.containsKey('lte') != other._$data.containsKey('lte')) {
return false;
}
if (l$lte != lOther$lte) {
return false;
}
final l$$in = $in;
final lOther$$in = other.$in;
if (_$data.containsKey('in') != other._$data.containsKey('in')) {
return false;
}
if (l$$in != null && lOther$$in != null) {
if (l$$in.length != lOther$$in.length) {
return false;
}
for (int i = 0; i < l$$in.length; i++) {
final l$$in$entry = l$$in[i];
final lOther$$in$entry = lOther$$in[i];
if (l$$in$entry != lOther$$in$entry) {
return false;
}
}
} else if (l$$in != lOther$$in) {
return false;
}
return true;
}
@override
int get hashCode {
final l$eq = eq;
final l$gt = gt;
final l$lt = lt;
final l$gte = gte;
final l$lte = lte;
final l$$in = $in;
return Object.hashAll([
_$data.containsKey('eq') ? l$eq : const {},
_$data.containsKey('gt') ? l$gt : const {},
_$data.containsKey('lt') ? l$lt : const {},
_$data.containsKey('gte') ? l$gte : const {},
_$data.containsKey('lte') ? l$lte : const {},
_$data.containsKey('in')
? l$$in == null
? null
: Object.hashAll(l$$in.map((v) => v))
: const {},
]);
}
}
abstract class CopyWith$Input$TorrentPriorityFilter<TRes> {
factory CopyWith$Input$TorrentPriorityFilter(
Input$TorrentPriorityFilter instance,
TRes Function(Input$TorrentPriorityFilter) then,
) = _CopyWithImpl$Input$TorrentPriorityFilter;
factory CopyWith$Input$TorrentPriorityFilter.stub(TRes res) =
_CopyWithStubImpl$Input$TorrentPriorityFilter;
TRes call({
Enum$TorrentPriority? eq,
Enum$TorrentPriority? gt,
Enum$TorrentPriority? lt,
Enum$TorrentPriority? gte,
Enum$TorrentPriority? lte,
List<Enum$TorrentPriority>? $in,
});
}
class _CopyWithImpl$Input$TorrentPriorityFilter<TRes>
implements CopyWith$Input$TorrentPriorityFilter<TRes> {
_CopyWithImpl$Input$TorrentPriorityFilter(
this._instance,
this._then,
);
final Input$TorrentPriorityFilter _instance;
final TRes Function(Input$TorrentPriorityFilter) _then;
static const _undefined = <dynamic, dynamic>{};
TRes call({
Object? eq = _undefined,
Object? gt = _undefined,
Object? lt = _undefined,
Object? gte = _undefined,
Object? lte = _undefined,
Object? $in = _undefined,
}) =>
_then(Input$TorrentPriorityFilter._({
..._instance._$data,
if (eq != _undefined) 'eq': (eq as Enum$TorrentPriority?),
if (gt != _undefined) 'gt': (gt as Enum$TorrentPriority?),
if (lt != _undefined) 'lt': (lt as Enum$TorrentPriority?),
if (gte != _undefined) 'gte': (gte as Enum$TorrentPriority?),
if (lte != _undefined) 'lte': (lte as Enum$TorrentPriority?),
if ($in != _undefined) 'in': ($in as List<Enum$TorrentPriority>?),
}));
}
class _CopyWithStubImpl$Input$TorrentPriorityFilter<TRes>
implements CopyWith$Input$TorrentPriorityFilter<TRes> {
_CopyWithStubImpl$Input$TorrentPriorityFilter(this._res);
TRes _res;
call({
Enum$TorrentPriority? eq,
Enum$TorrentPriority? gt,
Enum$TorrentPriority? lt,
Enum$TorrentPriority? gte,
Enum$TorrentPriority? lte,
List<Enum$TorrentPriority>? $in,
}) =>
_res;
}
class Input$TorrentsFilter {
factory Input$TorrentsFilter({
Input$StringFilter? infohash,
@ -1040,7 +1316,7 @@ class Input$TorrentsFilter {
Input$IntFilter? bytesCompleted,
Input$IntFilter? bytesMissing,
Input$IntFilter? peersCount,
Input$BooleanFilter? downloading,
Input$TorrentPriorityFilter? priority,
}) =>
Input$TorrentsFilter._({
if (infohash != null) r'infohash': infohash,
@ -1048,7 +1324,7 @@ class Input$TorrentsFilter {
if (bytesCompleted != null) r'bytesCompleted': bytesCompleted,
if (bytesMissing != null) r'bytesMissing': bytesMissing,
if (peersCount != null) r'peersCount': peersCount,
if (downloading != null) r'downloading': downloading,
if (priority != null) r'priority': priority,
});
Input$TorrentsFilter._(this._$data);
@ -1086,12 +1362,12 @@ class Input$TorrentsFilter {
? null
: Input$IntFilter.fromJson((l$peersCount as Map<String, dynamic>));
}
if (data.containsKey('downloading')) {
final l$downloading = data['downloading'];
result$data['downloading'] = l$downloading == null
if (data.containsKey('priority')) {
final l$priority = data['priority'];
result$data['priority'] = l$priority == null
? null
: Input$BooleanFilter.fromJson(
(l$downloading as Map<String, dynamic>));
: Input$TorrentPriorityFilter.fromJson(
(l$priority as Map<String, dynamic>));
}
return Input$TorrentsFilter._(result$data);
}
@ -1111,8 +1387,8 @@ class Input$TorrentsFilter {
Input$IntFilter? get peersCount => (_$data['peersCount'] as Input$IntFilter?);
Input$BooleanFilter? get downloading =>
(_$data['downloading'] as Input$BooleanFilter?);
Input$TorrentPriorityFilter? get priority =>
(_$data['priority'] as Input$TorrentPriorityFilter?);
Map<String, dynamic> toJson() {
final result$data = <String, dynamic>{};
@ -1136,9 +1412,9 @@ class Input$TorrentsFilter {
final l$peersCount = peersCount;
result$data['peersCount'] = l$peersCount?.toJson();
}
if (_$data.containsKey('downloading')) {
final l$downloading = downloading;
result$data['downloading'] = l$downloading?.toJson();
if (_$data.containsKey('priority')) {
final l$priority = priority;
result$data['priority'] = l$priority?.toJson();
}
return result$data;
}
@ -1201,13 +1477,13 @@ class Input$TorrentsFilter {
if (l$peersCount != lOther$peersCount) {
return false;
}
final l$downloading = downloading;
final lOther$downloading = other.downloading;
if (_$data.containsKey('downloading') !=
other._$data.containsKey('downloading')) {
final l$priority = priority;
final lOther$priority = other.priority;
if (_$data.containsKey('priority') !=
other._$data.containsKey('priority')) {
return false;
}
if (l$downloading != lOther$downloading) {
if (l$priority != lOther$priority) {
return false;
}
return true;
@ -1220,14 +1496,14 @@ class Input$TorrentsFilter {
final l$bytesCompleted = bytesCompleted;
final l$bytesMissing = bytesMissing;
final l$peersCount = peersCount;
final l$downloading = downloading;
final l$priority = priority;
return Object.hashAll([
_$data.containsKey('infohash') ? l$infohash : const {},
_$data.containsKey('name') ? l$name : const {},
_$data.containsKey('bytesCompleted') ? l$bytesCompleted : const {},
_$data.containsKey('bytesMissing') ? l$bytesMissing : const {},
_$data.containsKey('peersCount') ? l$peersCount : const {},
_$data.containsKey('downloading') ? l$downloading : const {},
_$data.containsKey('priority') ? l$priority : const {},
]);
}
}
@ -1247,14 +1523,14 @@ abstract class CopyWith$Input$TorrentsFilter<TRes> {
Input$IntFilter? bytesCompleted,
Input$IntFilter? bytesMissing,
Input$IntFilter? peersCount,
Input$BooleanFilter? downloading,
Input$TorrentPriorityFilter? priority,
});
CopyWith$Input$StringFilter<TRes> get infohash;
CopyWith$Input$StringFilter<TRes> get name;
CopyWith$Input$IntFilter<TRes> get bytesCompleted;
CopyWith$Input$IntFilter<TRes> get bytesMissing;
CopyWith$Input$IntFilter<TRes> get peersCount;
CopyWith$Input$BooleanFilter<TRes> get downloading;
CopyWith$Input$TorrentPriorityFilter<TRes> get priority;
}
class _CopyWithImpl$Input$TorrentsFilter<TRes>
@ -1276,7 +1552,7 @@ class _CopyWithImpl$Input$TorrentsFilter<TRes>
Object? bytesCompleted = _undefined,
Object? bytesMissing = _undefined,
Object? peersCount = _undefined,
Object? downloading = _undefined,
Object? priority = _undefined,
}) =>
_then(Input$TorrentsFilter._({
..._instance._$data,
@ -1289,8 +1565,8 @@ class _CopyWithImpl$Input$TorrentsFilter<TRes>
'bytesMissing': (bytesMissing as Input$IntFilter?),
if (peersCount != _undefined)
'peersCount': (peersCount as Input$IntFilter?),
if (downloading != _undefined)
'downloading': (downloading as Input$BooleanFilter?),
if (priority != _undefined)
'priority': (priority as Input$TorrentPriorityFilter?),
}));
CopyWith$Input$StringFilter<TRes> get infohash {
@ -1331,12 +1607,12 @@ class _CopyWithImpl$Input$TorrentsFilter<TRes>
local$peersCount, (e) => call(peersCount: e));
}
CopyWith$Input$BooleanFilter<TRes> get downloading {
final local$downloading = _instance.downloading;
return local$downloading == null
? CopyWith$Input$BooleanFilter.stub(_then(_instance))
: CopyWith$Input$BooleanFilter(
local$downloading, (e) => call(downloading: e));
CopyWith$Input$TorrentPriorityFilter<TRes> get priority {
final local$priority = _instance.priority;
return local$priority == null
? CopyWith$Input$TorrentPriorityFilter.stub(_then(_instance))
: CopyWith$Input$TorrentPriorityFilter(
local$priority, (e) => call(priority: e));
}
}
@ -1352,7 +1628,7 @@ class _CopyWithStubImpl$Input$TorrentsFilter<TRes>
Input$IntFilter? bytesCompleted,
Input$IntFilter? bytesMissing,
Input$IntFilter? peersCount,
Input$BooleanFilter? downloading,
Input$TorrentPriorityFilter? priority,
}) =>
_res;
@ -1371,8 +1647,56 @@ class _CopyWithStubImpl$Input$TorrentsFilter<TRes>
CopyWith$Input$IntFilter<TRes> get peersCount =>
CopyWith$Input$IntFilter.stub(_res);
CopyWith$Input$BooleanFilter<TRes> get downloading =>
CopyWith$Input$BooleanFilter.stub(_res);
CopyWith$Input$TorrentPriorityFilter<TRes> get priority =>
CopyWith$Input$TorrentPriorityFilter.stub(_res);
}
enum Enum$TorrentPriority {
NONE,
NORMAL,
HIGH,
READAHEAD,
NOW,
$unknown;
factory Enum$TorrentPriority.fromJson(String value) =>
fromJson$Enum$TorrentPriority(value);
String toJson() => toJson$Enum$TorrentPriority(this);
}
String toJson$Enum$TorrentPriority(Enum$TorrentPriority e) {
switch (e) {
case Enum$TorrentPriority.NONE:
return r'NONE';
case Enum$TorrentPriority.NORMAL:
return r'NORMAL';
case Enum$TorrentPriority.HIGH:
return r'HIGH';
case Enum$TorrentPriority.READAHEAD:
return r'READAHEAD';
case Enum$TorrentPriority.NOW:
return r'NOW';
case Enum$TorrentPriority.$unknown:
return r'$unknown';
}
}
Enum$TorrentPriority fromJson$Enum$TorrentPriority(String value) {
switch (value) {
case r'NONE':
return Enum$TorrentPriority.NONE;
case r'NORMAL':
return Enum$TorrentPriority.NORMAL;
case r'HIGH':
return Enum$TorrentPriority.HIGH;
case r'READAHEAD':
return Enum$TorrentPriority.READAHEAD;
case r'NOW':
return Enum$TorrentPriority.NOW;
default:
return Enum$TorrentPriority.$unknown;
}
}
enum Enum$__TypeKind {

View file

@ -1,25 +1,21 @@
mutation MarkTorrentDownload($infohash: String!) {
downloadTorrent(infohash: $infohash) {
task {
id
}
mutation MarkTorrentDownload($infohash: String!, $priority: TorrentPriority! = NORMAL) {
torrentDaemon {
setTorrentPriority(infohash: $infohash, priority: $priority)
}
}
query ListTorrents($downloading: Boolean) {
torrents(filter: {
downloading: {
eq: $downloading
}
}) {
name
infohash
bytesCompleted
bytesMissing
peers {
ip
downloadRate
clientName
query ListTorrents {
torrentDaemon {
torrents {
name
infohash
bytesCompleted
bytesMissing
peers {
ip
downloadRate
clientName
}
}
}
}

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,13 @@
query TorrentTotalStats($since: DateTime!) {
torrentDaemon {
statsHistory(since: $since) {
timestamp
downloadedBytes
uploadedBytes
totalPeers
activePeers
connectedSeeders
}
}
}

View file

@ -0,0 +1,901 @@
// ignore_for_file: type=lint
import 'dart:async';
import 'package:flutter/widgets.dart' as widgets;
import 'package:gql/ast.dart';
import 'package:graphql/client.dart' as graphql;
import 'package:graphql_flutter/graphql_flutter.dart' as graphql_flutter;
class Variables$Query$TorrentTotalStats {
factory Variables$Query$TorrentTotalStats({required DateTime since}) =>
Variables$Query$TorrentTotalStats._({
r'since': since,
});
Variables$Query$TorrentTotalStats._(this._$data);
factory Variables$Query$TorrentTotalStats.fromJson(
Map<String, dynamic> data) {
final result$data = <String, dynamic>{};
final l$since = data['since'];
result$data['since'] = DateTime.parse((l$since as String));
return Variables$Query$TorrentTotalStats._(result$data);
}
Map<String, dynamic> _$data;
DateTime get since => (_$data['since'] as DateTime);
Map<String, dynamic> toJson() {
final result$data = <String, dynamic>{};
final l$since = since;
result$data['since'] = l$since.toIso8601String();
return result$data;
}
CopyWith$Variables$Query$TorrentTotalStats<Variables$Query$TorrentTotalStats>
get copyWith => CopyWith$Variables$Query$TorrentTotalStats(
this,
(i) => i,
);
@override
bool operator ==(Object other) {
if (identical(this, other)) {
return true;
}
if (!(other is Variables$Query$TorrentTotalStats) ||
runtimeType != other.runtimeType) {
return false;
}
final l$since = since;
final lOther$since = other.since;
if (l$since != lOther$since) {
return false;
}
return true;
}
@override
int get hashCode {
final l$since = since;
return Object.hashAll([l$since]);
}
}
abstract class CopyWith$Variables$Query$TorrentTotalStats<TRes> {
factory CopyWith$Variables$Query$TorrentTotalStats(
Variables$Query$TorrentTotalStats instance,
TRes Function(Variables$Query$TorrentTotalStats) then,
) = _CopyWithImpl$Variables$Query$TorrentTotalStats;
factory CopyWith$Variables$Query$TorrentTotalStats.stub(TRes res) =
_CopyWithStubImpl$Variables$Query$TorrentTotalStats;
TRes call({DateTime? since});
}
class _CopyWithImpl$Variables$Query$TorrentTotalStats<TRes>
implements CopyWith$Variables$Query$TorrentTotalStats<TRes> {
_CopyWithImpl$Variables$Query$TorrentTotalStats(
this._instance,
this._then,
);
final Variables$Query$TorrentTotalStats _instance;
final TRes Function(Variables$Query$TorrentTotalStats) _then;
static const _undefined = <dynamic, dynamic>{};
TRes call({Object? since = _undefined}) =>
_then(Variables$Query$TorrentTotalStats._({
..._instance._$data,
if (since != _undefined && since != null) 'since': (since as DateTime),
}));
}
class _CopyWithStubImpl$Variables$Query$TorrentTotalStats<TRes>
implements CopyWith$Variables$Query$TorrentTotalStats<TRes> {
_CopyWithStubImpl$Variables$Query$TorrentTotalStats(this._res);
TRes _res;
call({DateTime? since}) => _res;
}
class Query$TorrentTotalStats {
Query$TorrentTotalStats({
this.torrentDaemon,
this.$__typename = 'Query',
});
factory Query$TorrentTotalStats.fromJson(Map<String, dynamic> json) {
final l$torrentDaemon = json['torrentDaemon'];
final l$$__typename = json['__typename'];
return Query$TorrentTotalStats(
torrentDaemon: l$torrentDaemon == null
? null
: Query$TorrentTotalStats$torrentDaemon.fromJson(
(l$torrentDaemon as Map<String, dynamic>)),
$__typename: (l$$__typename as String),
);
}
final Query$TorrentTotalStats$torrentDaemon? torrentDaemon;
final String $__typename;
Map<String, dynamic> toJson() {
final _resultData = <String, dynamic>{};
final l$torrentDaemon = torrentDaemon;
_resultData['torrentDaemon'] = l$torrentDaemon?.toJson();
final l$$__typename = $__typename;
_resultData['__typename'] = l$$__typename;
return _resultData;
}
@override
int get hashCode {
final l$torrentDaemon = torrentDaemon;
final l$$__typename = $__typename;
return Object.hashAll([
l$torrentDaemon,
l$$__typename,
]);
}
@override
bool operator ==(Object other) {
if (identical(this, other)) {
return true;
}
if (!(other is Query$TorrentTotalStats) ||
runtimeType != other.runtimeType) {
return false;
}
final l$torrentDaemon = torrentDaemon;
final lOther$torrentDaemon = other.torrentDaemon;
if (l$torrentDaemon != lOther$torrentDaemon) {
return false;
}
final l$$__typename = $__typename;
final lOther$$__typename = other.$__typename;
if (l$$__typename != lOther$$__typename) {
return false;
}
return true;
}
}
extension UtilityExtension$Query$TorrentTotalStats on Query$TorrentTotalStats {
CopyWith$Query$TorrentTotalStats<Query$TorrentTotalStats> get copyWith =>
CopyWith$Query$TorrentTotalStats(
this,
(i) => i,
);
}
abstract class CopyWith$Query$TorrentTotalStats<TRes> {
factory CopyWith$Query$TorrentTotalStats(
Query$TorrentTotalStats instance,
TRes Function(Query$TorrentTotalStats) then,
) = _CopyWithImpl$Query$TorrentTotalStats;
factory CopyWith$Query$TorrentTotalStats.stub(TRes res) =
_CopyWithStubImpl$Query$TorrentTotalStats;
TRes call({
Query$TorrentTotalStats$torrentDaemon? torrentDaemon,
String? $__typename,
});
CopyWith$Query$TorrentTotalStats$torrentDaemon<TRes> get torrentDaemon;
}
class _CopyWithImpl$Query$TorrentTotalStats<TRes>
implements CopyWith$Query$TorrentTotalStats<TRes> {
_CopyWithImpl$Query$TorrentTotalStats(
this._instance,
this._then,
);
final Query$TorrentTotalStats _instance;
final TRes Function(Query$TorrentTotalStats) _then;
static const _undefined = <dynamic, dynamic>{};
TRes call({
Object? torrentDaemon = _undefined,
Object? $__typename = _undefined,
}) =>
_then(Query$TorrentTotalStats(
torrentDaemon: torrentDaemon == _undefined
? _instance.torrentDaemon
: (torrentDaemon as Query$TorrentTotalStats$torrentDaemon?),
$__typename: $__typename == _undefined || $__typename == null
? _instance.$__typename
: ($__typename as String),
));
CopyWith$Query$TorrentTotalStats$torrentDaemon<TRes> get torrentDaemon {
final local$torrentDaemon = _instance.torrentDaemon;
return local$torrentDaemon == null
? CopyWith$Query$TorrentTotalStats$torrentDaemon.stub(_then(_instance))
: CopyWith$Query$TorrentTotalStats$torrentDaemon(
local$torrentDaemon, (e) => call(torrentDaemon: e));
}
}
class _CopyWithStubImpl$Query$TorrentTotalStats<TRes>
implements CopyWith$Query$TorrentTotalStats<TRes> {
_CopyWithStubImpl$Query$TorrentTotalStats(this._res);
TRes _res;
call({
Query$TorrentTotalStats$torrentDaemon? torrentDaemon,
String? $__typename,
}) =>
_res;
CopyWith$Query$TorrentTotalStats$torrentDaemon<TRes> get torrentDaemon =>
CopyWith$Query$TorrentTotalStats$torrentDaemon.stub(_res);
}
const documentNodeQueryTorrentTotalStats = DocumentNode(definitions: [
OperationDefinitionNode(
type: OperationType.query,
name: NameNode(value: 'TorrentTotalStats'),
variableDefinitions: [
VariableDefinitionNode(
variable: VariableNode(name: NameNode(value: 'since')),
type: NamedTypeNode(
name: NameNode(value: 'DateTime'),
isNonNull: true,
),
defaultValue: DefaultValueNode(value: null),
directives: [],
)
],
directives: [],
selectionSet: SelectionSetNode(selections: [
FieldNode(
name: NameNode(value: 'torrentDaemon'),
alias: null,
arguments: [],
directives: [],
selectionSet: SelectionSetNode(selections: [
FieldNode(
name: NameNode(value: 'statsHistory'),
alias: null,
arguments: [
ArgumentNode(
name: NameNode(value: 'since'),
value: VariableNode(name: NameNode(value: 'since')),
)
],
directives: [],
selectionSet: SelectionSetNode(selections: [
FieldNode(
name: NameNode(value: 'timestamp'),
alias: null,
arguments: [],
directives: [],
selectionSet: null,
),
FieldNode(
name: NameNode(value: 'downloadedBytes'),
alias: null,
arguments: [],
directives: [],
selectionSet: null,
),
FieldNode(
name: NameNode(value: 'uploadedBytes'),
alias: null,
arguments: [],
directives: [],
selectionSet: null,
),
FieldNode(
name: NameNode(value: 'totalPeers'),
alias: null,
arguments: [],
directives: [],
selectionSet: null,
),
FieldNode(
name: NameNode(value: 'activePeers'),
alias: null,
arguments: [],
directives: [],
selectionSet: null,
),
FieldNode(
name: NameNode(value: 'connectedSeeders'),
alias: null,
arguments: [],
directives: [],
selectionSet: null,
),
FieldNode(
name: NameNode(value: '__typename'),
alias: null,
arguments: [],
directives: [],
selectionSet: null,
),
]),
),
FieldNode(
name: NameNode(value: '__typename'),
alias: null,
arguments: [],
directives: [],
selectionSet: null,
),
]),
),
FieldNode(
name: NameNode(value: '__typename'),
alias: null,
arguments: [],
directives: [],
selectionSet: null,
),
]),
),
]);
Query$TorrentTotalStats _parserFn$Query$TorrentTotalStats(
Map<String, dynamic> data) =>
Query$TorrentTotalStats.fromJson(data);
typedef OnQueryComplete$Query$TorrentTotalStats = FutureOr<void> Function(
Map<String, dynamic>?,
Query$TorrentTotalStats?,
);
class Options$Query$TorrentTotalStats
extends graphql.QueryOptions<Query$TorrentTotalStats> {
Options$Query$TorrentTotalStats({
String? operationName,
required Variables$Query$TorrentTotalStats variables,
graphql.FetchPolicy? fetchPolicy,
graphql.ErrorPolicy? errorPolicy,
graphql.CacheRereadPolicy? cacheRereadPolicy,
Object? optimisticResult,
Query$TorrentTotalStats? typedOptimisticResult,
Duration? pollInterval,
graphql.Context? context,
OnQueryComplete$Query$TorrentTotalStats? onComplete,
graphql.OnQueryError? onError,
}) : onCompleteWithParsed = onComplete,
super(
variables: variables.toJson(),
operationName: operationName,
fetchPolicy: fetchPolicy,
errorPolicy: errorPolicy,
cacheRereadPolicy: cacheRereadPolicy,
optimisticResult: optimisticResult ?? typedOptimisticResult?.toJson(),
pollInterval: pollInterval,
context: context,
onComplete: onComplete == null
? null
: (data) => onComplete(
data,
data == null
? null
: _parserFn$Query$TorrentTotalStats(data),
),
onError: onError,
document: documentNodeQueryTorrentTotalStats,
parserFn: _parserFn$Query$TorrentTotalStats,
);
final OnQueryComplete$Query$TorrentTotalStats? onCompleteWithParsed;
@override
List<Object?> get properties => [
...super.onComplete == null
? super.properties
: super.properties.where((property) => property != onComplete),
onCompleteWithParsed,
];
}
class WatchOptions$Query$TorrentTotalStats
extends graphql.WatchQueryOptions<Query$TorrentTotalStats> {
WatchOptions$Query$TorrentTotalStats({
String? operationName,
required Variables$Query$TorrentTotalStats variables,
graphql.FetchPolicy? fetchPolicy,
graphql.ErrorPolicy? errorPolicy,
graphql.CacheRereadPolicy? cacheRereadPolicy,
Object? optimisticResult,
Query$TorrentTotalStats? typedOptimisticResult,
graphql.Context? context,
Duration? pollInterval,
bool? eagerlyFetchResults,
bool carryForwardDataOnException = true,
bool fetchResults = false,
}) : super(
variables: variables.toJson(),
operationName: operationName,
fetchPolicy: fetchPolicy,
errorPolicy: errorPolicy,
cacheRereadPolicy: cacheRereadPolicy,
optimisticResult: optimisticResult ?? typedOptimisticResult?.toJson(),
context: context,
document: documentNodeQueryTorrentTotalStats,
pollInterval: pollInterval,
eagerlyFetchResults: eagerlyFetchResults,
carryForwardDataOnException: carryForwardDataOnException,
fetchResults: fetchResults,
parserFn: _parserFn$Query$TorrentTotalStats,
);
}
class FetchMoreOptions$Query$TorrentTotalStats
extends graphql.FetchMoreOptions {
FetchMoreOptions$Query$TorrentTotalStats({
required graphql.UpdateQuery updateQuery,
required Variables$Query$TorrentTotalStats variables,
}) : super(
updateQuery: updateQuery,
variables: variables.toJson(),
document: documentNodeQueryTorrentTotalStats,
);
}
extension ClientExtension$Query$TorrentTotalStats on graphql.GraphQLClient {
Future<graphql.QueryResult<Query$TorrentTotalStats>> query$TorrentTotalStats(
Options$Query$TorrentTotalStats options) async =>
await this.query(options);
graphql.ObservableQuery<Query$TorrentTotalStats> watchQuery$TorrentTotalStats(
WatchOptions$Query$TorrentTotalStats options) =>
this.watchQuery(options);
void writeQuery$TorrentTotalStats({
required Query$TorrentTotalStats data,
required Variables$Query$TorrentTotalStats variables,
bool broadcast = true,
}) =>
this.writeQuery(
graphql.Request(
operation:
graphql.Operation(document: documentNodeQueryTorrentTotalStats),
variables: variables.toJson(),
),
data: data.toJson(),
broadcast: broadcast,
);
Query$TorrentTotalStats? readQuery$TorrentTotalStats({
required Variables$Query$TorrentTotalStats variables,
bool optimistic = true,
}) {
final result = this.readQuery(
graphql.Request(
operation:
graphql.Operation(document: documentNodeQueryTorrentTotalStats),
variables: variables.toJson(),
),
optimistic: optimistic,
);
return result == null ? null : Query$TorrentTotalStats.fromJson(result);
}
}
graphql_flutter.QueryHookResult<Query$TorrentTotalStats>
useQuery$TorrentTotalStats(Options$Query$TorrentTotalStats options) =>
graphql_flutter.useQuery(options);
graphql.ObservableQuery<Query$TorrentTotalStats>
useWatchQuery$TorrentTotalStats(
WatchOptions$Query$TorrentTotalStats options) =>
graphql_flutter.useWatchQuery(options);
class Query$TorrentTotalStats$Widget
extends graphql_flutter.Query<Query$TorrentTotalStats> {
Query$TorrentTotalStats$Widget({
widgets.Key? key,
required Options$Query$TorrentTotalStats options,
required graphql_flutter.QueryBuilder<Query$TorrentTotalStats> builder,
}) : super(
key: key,
options: options,
builder: builder,
);
}
class Query$TorrentTotalStats$torrentDaemon {
Query$TorrentTotalStats$torrentDaemon({
required this.statsHistory,
this.$__typename = 'TorrentDaemonQuery',
});
factory Query$TorrentTotalStats$torrentDaemon.fromJson(
Map<String, dynamic> json) {
final l$statsHistory = json['statsHistory'];
final l$$__typename = json['__typename'];
return Query$TorrentTotalStats$torrentDaemon(
statsHistory: (l$statsHistory as List<dynamic>)
.map((e) =>
Query$TorrentTotalStats$torrentDaemon$statsHistory.fromJson(
(e as Map<String, dynamic>)))
.toList(),
$__typename: (l$$__typename as String),
);
}
final List<Query$TorrentTotalStats$torrentDaemon$statsHistory> statsHistory;
final String $__typename;
Map<String, dynamic> toJson() {
final _resultData = <String, dynamic>{};
final l$statsHistory = statsHistory;
_resultData['statsHistory'] =
l$statsHistory.map((e) => e.toJson()).toList();
final l$$__typename = $__typename;
_resultData['__typename'] = l$$__typename;
return _resultData;
}
@override
int get hashCode {
final l$statsHistory = statsHistory;
final l$$__typename = $__typename;
return Object.hashAll([
Object.hashAll(l$statsHistory.map((v) => v)),
l$$__typename,
]);
}
@override
bool operator ==(Object other) {
if (identical(this, other)) {
return true;
}
if (!(other is Query$TorrentTotalStats$torrentDaemon) ||
runtimeType != other.runtimeType) {
return false;
}
final l$statsHistory = statsHistory;
final lOther$statsHistory = other.statsHistory;
if (l$statsHistory.length != lOther$statsHistory.length) {
return false;
}
for (int i = 0; i < l$statsHistory.length; i++) {
final l$statsHistory$entry = l$statsHistory[i];
final lOther$statsHistory$entry = lOther$statsHistory[i];
if (l$statsHistory$entry != lOther$statsHistory$entry) {
return false;
}
}
final l$$__typename = $__typename;
final lOther$$__typename = other.$__typename;
if (l$$__typename != lOther$$__typename) {
return false;
}
return true;
}
}
extension UtilityExtension$Query$TorrentTotalStats$torrentDaemon
on Query$TorrentTotalStats$torrentDaemon {
CopyWith$Query$TorrentTotalStats$torrentDaemon<
Query$TorrentTotalStats$torrentDaemon>
get copyWith => CopyWith$Query$TorrentTotalStats$torrentDaemon(
this,
(i) => i,
);
}
abstract class CopyWith$Query$TorrentTotalStats$torrentDaemon<TRes> {
factory CopyWith$Query$TorrentTotalStats$torrentDaemon(
Query$TorrentTotalStats$torrentDaemon instance,
TRes Function(Query$TorrentTotalStats$torrentDaemon) then,
) = _CopyWithImpl$Query$TorrentTotalStats$torrentDaemon;
factory CopyWith$Query$TorrentTotalStats$torrentDaemon.stub(TRes res) =
_CopyWithStubImpl$Query$TorrentTotalStats$torrentDaemon;
TRes call({
List<Query$TorrentTotalStats$torrentDaemon$statsHistory>? statsHistory,
String? $__typename,
});
TRes statsHistory(
Iterable<Query$TorrentTotalStats$torrentDaemon$statsHistory> Function(
Iterable<
CopyWith$Query$TorrentTotalStats$torrentDaemon$statsHistory<
Query$TorrentTotalStats$torrentDaemon$statsHistory>>)
_fn);
}
class _CopyWithImpl$Query$TorrentTotalStats$torrentDaemon<TRes>
implements CopyWith$Query$TorrentTotalStats$torrentDaemon<TRes> {
_CopyWithImpl$Query$TorrentTotalStats$torrentDaemon(
this._instance,
this._then,
);
final Query$TorrentTotalStats$torrentDaemon _instance;
final TRes Function(Query$TorrentTotalStats$torrentDaemon) _then;
static const _undefined = <dynamic, dynamic>{};
TRes call({
Object? statsHistory = _undefined,
Object? $__typename = _undefined,
}) =>
_then(Query$TorrentTotalStats$torrentDaemon(
statsHistory: statsHistory == _undefined || statsHistory == null
? _instance.statsHistory
: (statsHistory
as List<Query$TorrentTotalStats$torrentDaemon$statsHistory>),
$__typename: $__typename == _undefined || $__typename == null
? _instance.$__typename
: ($__typename as String),
));
TRes statsHistory(
Iterable<Query$TorrentTotalStats$torrentDaemon$statsHistory> Function(
Iterable<
CopyWith$Query$TorrentTotalStats$torrentDaemon$statsHistory<
Query$TorrentTotalStats$torrentDaemon$statsHistory>>)
_fn) =>
call(
statsHistory: _fn(_instance.statsHistory.map((e) =>
CopyWith$Query$TorrentTotalStats$torrentDaemon$statsHistory(
e,
(i) => i,
))).toList());
}
class _CopyWithStubImpl$Query$TorrentTotalStats$torrentDaemon<TRes>
implements CopyWith$Query$TorrentTotalStats$torrentDaemon<TRes> {
_CopyWithStubImpl$Query$TorrentTotalStats$torrentDaemon(this._res);
TRes _res;
call({
List<Query$TorrentTotalStats$torrentDaemon$statsHistory>? statsHistory,
String? $__typename,
}) =>
_res;
statsHistory(_fn) => _res;
}
class Query$TorrentTotalStats$torrentDaemon$statsHistory {
Query$TorrentTotalStats$torrentDaemon$statsHistory({
required this.timestamp,
required this.downloadedBytes,
required this.uploadedBytes,
required this.totalPeers,
required this.activePeers,
required this.connectedSeeders,
this.$__typename = 'TorrentStats',
});
factory Query$TorrentTotalStats$torrentDaemon$statsHistory.fromJson(
Map<String, dynamic> json) {
final l$timestamp = json['timestamp'];
final l$downloadedBytes = json['downloadedBytes'];
final l$uploadedBytes = json['uploadedBytes'];
final l$totalPeers = json['totalPeers'];
final l$activePeers = json['activePeers'];
final l$connectedSeeders = json['connectedSeeders'];
final l$$__typename = json['__typename'];
return Query$TorrentTotalStats$torrentDaemon$statsHistory(
timestamp: DateTime.parse((l$timestamp as String)),
downloadedBytes: (l$downloadedBytes as int),
uploadedBytes: (l$uploadedBytes as int),
totalPeers: (l$totalPeers as int),
activePeers: (l$activePeers as int),
connectedSeeders: (l$connectedSeeders as int),
$__typename: (l$$__typename as String),
);
}
final DateTime timestamp;
final int downloadedBytes;
final int uploadedBytes;
final int totalPeers;
final int activePeers;
final int connectedSeeders;
final String $__typename;
Map<String, dynamic> toJson() {
final _resultData = <String, dynamic>{};
final l$timestamp = timestamp;
_resultData['timestamp'] = l$timestamp.toIso8601String();
final l$downloadedBytes = downloadedBytes;
_resultData['downloadedBytes'] = l$downloadedBytes;
final l$uploadedBytes = uploadedBytes;
_resultData['uploadedBytes'] = l$uploadedBytes;
final l$totalPeers = totalPeers;
_resultData['totalPeers'] = l$totalPeers;
final l$activePeers = activePeers;
_resultData['activePeers'] = l$activePeers;
final l$connectedSeeders = connectedSeeders;
_resultData['connectedSeeders'] = l$connectedSeeders;
final l$$__typename = $__typename;
_resultData['__typename'] = l$$__typename;
return _resultData;
}
@override
int get hashCode {
final l$timestamp = timestamp;
final l$downloadedBytes = downloadedBytes;
final l$uploadedBytes = uploadedBytes;
final l$totalPeers = totalPeers;
final l$activePeers = activePeers;
final l$connectedSeeders = connectedSeeders;
final l$$__typename = $__typename;
return Object.hashAll([
l$timestamp,
l$downloadedBytes,
l$uploadedBytes,
l$totalPeers,
l$activePeers,
l$connectedSeeders,
l$$__typename,
]);
}
@override
bool operator ==(Object other) {
if (identical(this, other)) {
return true;
}
if (!(other is Query$TorrentTotalStats$torrentDaemon$statsHistory) ||
runtimeType != other.runtimeType) {
return false;
}
final l$timestamp = timestamp;
final lOther$timestamp = other.timestamp;
if (l$timestamp != lOther$timestamp) {
return false;
}
final l$downloadedBytes = downloadedBytes;
final lOther$downloadedBytes = other.downloadedBytes;
if (l$downloadedBytes != lOther$downloadedBytes) {
return false;
}
final l$uploadedBytes = uploadedBytes;
final lOther$uploadedBytes = other.uploadedBytes;
if (l$uploadedBytes != lOther$uploadedBytes) {
return false;
}
final l$totalPeers = totalPeers;
final lOther$totalPeers = other.totalPeers;
if (l$totalPeers != lOther$totalPeers) {
return false;
}
final l$activePeers = activePeers;
final lOther$activePeers = other.activePeers;
if (l$activePeers != lOther$activePeers) {
return false;
}
final l$connectedSeeders = connectedSeeders;
final lOther$connectedSeeders = other.connectedSeeders;
if (l$connectedSeeders != lOther$connectedSeeders) {
return false;
}
final l$$__typename = $__typename;
final lOther$$__typename = other.$__typename;
if (l$$__typename != lOther$$__typename) {
return false;
}
return true;
}
}
extension UtilityExtension$Query$TorrentTotalStats$torrentDaemon$statsHistory
on Query$TorrentTotalStats$torrentDaemon$statsHistory {
CopyWith$Query$TorrentTotalStats$torrentDaemon$statsHistory<
Query$TorrentTotalStats$torrentDaemon$statsHistory>
get copyWith =>
CopyWith$Query$TorrentTotalStats$torrentDaemon$statsHistory(
this,
(i) => i,
);
}
abstract class CopyWith$Query$TorrentTotalStats$torrentDaemon$statsHistory<
TRes> {
factory CopyWith$Query$TorrentTotalStats$torrentDaemon$statsHistory(
Query$TorrentTotalStats$torrentDaemon$statsHistory instance,
TRes Function(Query$TorrentTotalStats$torrentDaemon$statsHistory) then,
) = _CopyWithImpl$Query$TorrentTotalStats$torrentDaemon$statsHistory;
factory CopyWith$Query$TorrentTotalStats$torrentDaemon$statsHistory.stub(
TRes res) =
_CopyWithStubImpl$Query$TorrentTotalStats$torrentDaemon$statsHistory;
TRes call({
DateTime? timestamp,
int? downloadedBytes,
int? uploadedBytes,
int? totalPeers,
int? activePeers,
int? connectedSeeders,
String? $__typename,
});
}
class _CopyWithImpl$Query$TorrentTotalStats$torrentDaemon$statsHistory<TRes>
implements
CopyWith$Query$TorrentTotalStats$torrentDaemon$statsHistory<TRes> {
_CopyWithImpl$Query$TorrentTotalStats$torrentDaemon$statsHistory(
this._instance,
this._then,
);
final Query$TorrentTotalStats$torrentDaemon$statsHistory _instance;
final TRes Function(Query$TorrentTotalStats$torrentDaemon$statsHistory) _then;
static const _undefined = <dynamic, dynamic>{};
TRes call({
Object? timestamp = _undefined,
Object? downloadedBytes = _undefined,
Object? uploadedBytes = _undefined,
Object? totalPeers = _undefined,
Object? activePeers = _undefined,
Object? connectedSeeders = _undefined,
Object? $__typename = _undefined,
}) =>
_then(Query$TorrentTotalStats$torrentDaemon$statsHistory(
timestamp: timestamp == _undefined || timestamp == null
? _instance.timestamp
: (timestamp as DateTime),
downloadedBytes:
downloadedBytes == _undefined || downloadedBytes == null
? _instance.downloadedBytes
: (downloadedBytes as int),
uploadedBytes: uploadedBytes == _undefined || uploadedBytes == null
? _instance.uploadedBytes
: (uploadedBytes as int),
totalPeers: totalPeers == _undefined || totalPeers == null
? _instance.totalPeers
: (totalPeers as int),
activePeers: activePeers == _undefined || activePeers == null
? _instance.activePeers
: (activePeers as int),
connectedSeeders:
connectedSeeders == _undefined || connectedSeeders == null
? _instance.connectedSeeders
: (connectedSeeders as int),
$__typename: $__typename == _undefined || $__typename == null
? _instance.$__typename
: ($__typename as String),
));
}
class _CopyWithStubImpl$Query$TorrentTotalStats$torrentDaemon$statsHistory<TRes>
implements
CopyWith$Query$TorrentTotalStats$torrentDaemon$statsHistory<TRes> {
_CopyWithStubImpl$Query$TorrentTotalStats$torrentDaemon$statsHistory(
this._res);
TRes _res;
call({
DateTime? timestamp,
int? downloadedBytes,
int? uploadedBytes,
int? totalPeers,
int? activePeers,
int? connectedSeeders,
String? $__typename,
}) =>
_res;
}

View file

@ -20,10 +20,10 @@ class _DownloadsScreenState extends State<DownloadsScreen> {
child: FutureBuilder(
key: GlobalKey(),
future: client.query$ListTorrents(Options$Query$ListTorrents(
variables: Variables$Query$ListTorrents(downloading: filterDownloading),
)),
// variables: Variables$Query$ListTorrents(downloading: filterDownloading),
)),
builder: (context, snapshot) {
final torrents = snapshot.data?.parsedData?.torrents;
final torrents = snapshot.data?.parsedData?.torrentDaemon?.torrents;
return NestedScrollView(
floatHeaderSlivers: true,
@ -68,7 +68,7 @@ class _DownloadsScreenState extends State<DownloadsScreen> {
}
class TorrentTile extends StatelessWidget {
final Query$ListTorrents$torrents torrent;
final Query$ListTorrents$torrentDaemon$torrents torrent;
const TorrentTile({super.key, required this.torrent});

View file

@ -9,6 +9,7 @@ import 'package:tstor_ui/components/sliver_header.dart';
import 'package:tstor_ui/font/t_icons_icons.dart';
import 'package:path/path.dart' as p;
import 'package:tstor_ui/screens/torrent_stats.dart';
import 'package:tstor_ui/utils/bytes.dart';
class FileViewScreen extends StatefulWidget {
@ -88,6 +89,13 @@ class _FileViewScreenState extends State<FileViewScreen> {
icon: const Icon(Icons.arrow_upward),
),
actions: [
IconButton(
onPressed: () => Navigator.push(
context,
MaterialPageRoute(builder: (context) => const TorrentStatsScreen()),
),
icon: const Icon(Icons.trending_up),
),
IconButton(
icon: const Icon(Icons.refresh),
onPressed: refresh,

View file

@ -0,0 +1,62 @@
import 'package:fl_chart/fl_chart.dart';
import 'package:flutter/material.dart';
import 'package:graphql/client.dart';
import 'package:tstor_ui/api/client.dart';
import 'package:tstor_ui/api/torrent_stats.graphql.dart';
class TorrentStatsScreen extends StatefulWidget {
const TorrentStatsScreen({super.key});
@override
State<TorrentStatsScreen> createState() => _TorrentStatsScreenState();
}
class _TorrentStatsScreenState extends State<TorrentStatsScreen> {
Future<LineChartData> _totalStats() async {
final since = DateTime.now().subtract(Duration(hours: 1));
final data = await client.query$TorrentTotalStats(
Options$Query$TorrentTotalStats(
variables: Variables$Query$TorrentTotalStats(
since: since,
),
fetchPolicy: FetchPolicy.networkOnly,
),
);
return LineChartData(
lineBarsData: [
LineChartBarData(
spots: data.parsedData!.torrentDaemon!.statsHistory
.map(
(e) => FlSpot(
since.difference(e.timestamp).inSeconds.toDouble(),
e.activePeers.toDouble(),
),
)
.toList(),
),
],
);
}
@override
Widget build(BuildContext context) {
return Scaffold(
appBar: AppBar(
title: const Text("Torrent Stats"),
),
body: FutureBuilder(
future: _totalStats(),
builder: (context, snapshot) {
if (!snapshot.hasData) {
return const Center(
child: CircularProgressIndicator(),
);
}
return LineChart(snapshot.data!);
},
),
);
}
}

View file

@ -10,7 +10,7 @@ import dynamic_color
import path_provider_foundation
func RegisterGeneratedPlugins(registry: FlutterPluginRegistry) {
ConnectivityPlugin.register(with: registry.registrar(forPlugin: "ConnectivityPlugin"))
ConnectivityPlusPlugin.register(with: registry.registrar(forPlugin: "ConnectivityPlusPlugin"))
DynamicColorPlugin.register(with: registry.registrar(forPlugin: "DynamicColorPlugin"))
PathProviderPlugin.register(with: registry.registrar(forPlugin: "PathProviderPlugin"))
}

View file

@ -5,18 +5,23 @@ packages:
dependency: transitive
description:
name: _fe_analyzer_shared
sha256: "0b2f2bd91ba804e53a61d757b986f89f1f9eaed5b11e4b2f5a2468d86d6c9fc7"
sha256: "5aaf60d96c4cd00fe7f21594b5ad6a1b699c80a27420f8a837f4d68473ef09e3"
url: "https://pub.dev"
source: hosted
version: "67.0.0"
version: "68.0.0"
_macros:
dependency: transitive
description: dart
source: sdk
version: "0.1.5"
analyzer:
dependency: transitive
description:
name: analyzer
sha256: "37577842a27e4338429a1cbc32679d508836510b056f1eedf0c8d20e39c1383d"
sha256: "21f1d3720fd1c70316399d5e2bccaebb415c434592d778cce8acb967b8578808"
url: "https://pub.dev"
source: hosted
version: "6.4.1"
version: "6.5.0"
args:
dependency: transitive
description:
@ -61,10 +66,10 @@ packages:
dependency: transitive
description:
name: build_daemon
sha256: "0343061a33da9c5810b2d6cee51945127d8f4c060b7fbdd9d54917f0a3feaaa1"
sha256: "79b2aef6ac2ed00046867ed354c88778c9c0f029df8a20fe10b5436826721ef9"
url: "https://pub.dev"
source: hosted
version: "4.0.1"
version: "4.0.2"
build_resolvers:
dependency: transitive
description:
@ -77,18 +82,18 @@ packages:
dependency: "direct dev"
description:
name: build_runner
sha256: "3ac61a79bfb6f6cc11f693591063a7f19a7af628dc52f141743edac5c16e8c22"
sha256: "644dc98a0f179b872f612d3eb627924b578897c629788e858157fa5e704ca0c7"
url: "https://pub.dev"
source: hosted
version: "2.4.9"
version: "2.4.11"
build_runner_core:
dependency: transitive
description:
name: build_runner_core
sha256: "4ae8ffe5ac758da294ecf1802f2aff01558d8b1b00616aa7538ea9a8a5d50799"
sha256: e3c79f69a64bdfcd8a776a3c28db4eb6e3fb5356d013ae5eb2e52007706d5dbe
url: "https://pub.dev"
source: hosted
version: "7.3.0"
version: "7.3.1"
built_collection:
dependency: transitive
description:
@ -149,18 +154,18 @@ packages:
dependency: transitive
description:
name: connectivity_plus
sha256: "224a77051d52a11fbad53dd57827594d3bd24f945af28bd70bab376d68d437f0"
sha256: db7a4e143dc72cc3cb2044ef9b052a7ebfe729513e6a82943bc3526f784365b8
url: "https://pub.dev"
source: hosted
version: "5.0.2"
version: "6.0.3"
connectivity_plus_platform_interface:
dependency: transitive
description:
name: connectivity_plus_platform_interface
sha256: cf1d1c28f4416f8c654d7dc3cd638ec586076255d407cef3ddbdaf178272a71a
sha256: b6a56efe1e6675be240de39107281d4034b64ac23438026355b4234042a35adb
url: "https://pub.dev"
source: hosted
version: "1.2.4"
version: "2.0.0"
convert:
dependency: transitive
description:
@ -209,6 +214,14 @@ packages:
url: "https://pub.dev"
source: hosted
version: "1.7.0"
equatable:
dependency: transitive
description:
name: equatable
sha256: c2b87cb7756efdf69892005af546c56c0b5037f54d2a88269b4f347a505e3ca2
url: "https://pub.dev"
source: hosted
version: "2.0.5"
fake_async:
dependency: transitive
description:
@ -241,6 +254,14 @@ packages:
url: "https://pub.dev"
source: hosted
version: "1.1.0"
fl_chart:
dependency: "direct main"
description:
name: fl_chart
sha256: d0f0d49112f2f4b192481c16d05b6418bd7820e021e265a3c22db98acf7ed7fb
url: "https://pub.dev"
source: hosted
version: "0.68.0"
flutter:
dependency: "direct main"
description: flutter
@ -258,10 +279,10 @@ packages:
dependency: "direct dev"
description:
name: flutter_lints
sha256: "9e8c3858111da373efc5aa341de011d9bd23e2c5c5e0c62bccf32438e192d7b1"
sha256: "3f41d009ba7172d5ff9be5f6e6e6abb4300e263aab8866d2a0842ed2a70f8f0c"
url: "https://pub.dev"
source: hosted
version: "3.0.2"
version: "4.0.0"
flutter_test:
dependency: "direct dev"
description: flutter
@ -292,10 +313,10 @@ packages:
dependency: "direct main"
description:
name: gql
sha256: afe032332ddfa69b79f1dea2ad7d95923d4993c1b269b224fc7bb3d17e32d33c
sha256: "8ecd3585bb9e40d671aa58f52575d950670f99e5ffab18e2b34a757e071a6693"
url: "https://pub.dev"
source: hosted
version: "1.0.1-alpha+1709845491443"
version: "1.0.1-alpha+1717789143880"
gql_code_builder:
dependency: transitive
description:
@ -308,10 +329,10 @@ packages:
dependency: transitive
description:
name: gql_dedupe_link
sha256: "2971173c68623d5c43f5327ea899bd2ee64ce3461c1263f240b4bb6211f667be"
sha256: "10bee0564d67c24e0c8bd08bd56e0682b64a135e58afabbeed30d85d5e9fea96"
url: "https://pub.dev"
source: hosted
version: "2.0.4-alpha+1709845491527"
version: "2.0.4-alpha+1715521079596"
gql_error_link:
dependency: transitive
description:
@ -332,18 +353,18 @@ packages:
dependency: transitive
description:
name: gql_http_link
sha256: "1f922eed1b7078fdbfd602187663026f9f659fe9a9499e2207b5d5e01617f658"
sha256: ef6ad24d31beb5a30113e9b919eec20876903cc4b0ee0d31550047aaaba7d5dd
url: "https://pub.dev"
source: hosted
version: "1.0.1+1"
version: "1.1.0"
gql_link:
dependency: transitive
description:
name: gql_link
sha256: "177500e250b3742d6d2673d57961e8413b6593dc6bd6a512c51865b6cf096f7e"
sha256: "70fd5b5cbcc50601679f4b9fea3bcc994e583f59cfec7e1fec11113074b1a565"
url: "https://pub.dev"
source: hosted
version: "1.0.1-alpha+1709845491457"
version: "1.0.1-alpha+1717789143896"
gql_transform_link:
dependency: transitive
description:
@ -364,10 +385,10 @@ packages:
dependency: "direct main"
description:
name: graphql
sha256: d066e53446166c12537458386b507f7426f2b8801ebafc184576aab3cbc64d56
sha256: "62f31433ba194eda7b81a812a83c3d9560766cec5ac0210ea4a3e677c91b8df4"
url: "https://pub.dev"
source: hosted
version: "5.2.0-beta.7"
version: "5.2.0-beta.8"
graphql_codegen:
dependency: "direct dev"
description:
@ -380,10 +401,10 @@ packages:
dependency: "direct main"
description:
name: graphql_flutter
sha256: "39b5e830bc654ab02c5b776c31675841d1a8c95840fdd284efba713b1d47e65d"
sha256: "2423b394465e7d83a5e708cd2f5b37b54e7ae9900abfbf0948d512fa46961acb"
url: "https://pub.dev"
source: hosted
version: "5.2.0-beta.6"
version: "5.2.0-beta.7"
graphs:
dependency: transitive
description:
@ -404,10 +425,10 @@ packages:
dependency: transitive
description:
name: http
sha256: "761a297c042deedc1ffbb156d6e2af13886bb305c2a343a4d972504cd67dd938"
sha256: b9c29a161230ee03d3ccf545097fccd9b87a5264228c5d348202e0f0c28f9010
url: "https://pub.dev"
source: hosted
version: "1.2.1"
version: "1.2.2"
http_multi_server:
dependency: transitive
description:
@ -436,34 +457,34 @@ packages:
dependency: transitive
description:
name: js
sha256: f2c445dce49627136094980615a031419f7f3eb393237e4ecd97ac15dea343f3
sha256: c1b2e9b5ea78c45e1a0788d29606ba27dc5f71f019f32ca5140f61ef071838cf
url: "https://pub.dev"
source: hosted
version: "0.6.7"
version: "0.7.1"
json_annotation:
dependency: transitive
description:
name: json_annotation
sha256: b10a7b2ff83d83c777edba3c6a0f97045ddadd56c944e1a23a3fdf43a1bf4467
sha256: "1ce844379ca14835a50d2f019a3099f419082cfdd231cd86a142af94dd5c6bb1"
url: "https://pub.dev"
source: hosted
version: "4.8.1"
version: "4.9.0"
leak_tracker:
dependency: transitive
description:
name: leak_tracker
sha256: "7f0df31977cb2c0b88585095d168e689669a2cc9b97c309665e3386f3e9d341a"
sha256: "3f87a60e8c63aecc975dda1ceedbc8f24de75f09e4856ea27daf8958f2f0ce05"
url: "https://pub.dev"
source: hosted
version: "10.0.4"
version: "10.0.5"
leak_tracker_flutter_testing:
dependency: transitive
description:
name: leak_tracker_flutter_testing
sha256: "06e98f569d004c1315b991ded39924b21af84cf14cc94791b8aea337d25b57f8"
sha256: "932549fb305594d82d7183ecd9fa93463e9914e1b67cacc34bc40906594a1806"
url: "https://pub.dev"
source: hosted
version: "3.0.3"
version: "3.0.5"
leak_tracker_testing:
dependency: transitive
description:
@ -476,10 +497,10 @@ packages:
dependency: transitive
description:
name: lints
sha256: cbf8d4b858bb0134ef3ef87841abdf8d63bfc255c266b7bf6b39daa1085c4290
sha256: "976c774dd944a42e83e2467f4cc670daef7eed6295b10b36ae8c85bcbf828235"
url: "https://pub.dev"
source: hosted
version: "3.0.0"
version: "4.0.0"
logging:
dependency: transitive
description:
@ -488,6 +509,14 @@ packages:
url: "https://pub.dev"
source: hosted
version: "1.2.0"
macros:
dependency: transitive
description:
name: macros
sha256: a8403c89b36483b4cbf9f1fcd24562f483cb34a5c9bf101cf2b0d8a083cf1239
url: "https://pub.dev"
source: hosted
version: "0.1.0-main.5"
matcher:
dependency: transitive
description:
@ -500,18 +529,18 @@ packages:
dependency: transitive
description:
name: material_color_utilities
sha256: "0e0a020085b65b6083975e499759762399b4475f766c21668c4ecca34ea74e5a"
sha256: f7142bb1154231d7ea5f96bc7bde4bda2a0945d2806bb11670e30b850d56bdec
url: "https://pub.dev"
source: hosted
version: "0.8.0"
version: "0.11.1"
meta:
dependency: transitive
description:
name: meta
sha256: "7687075e408b093f36e6bbf6c91878cc0d4cd10f409506f7bc996f68220b9136"
sha256: "25dfcaf170a0190f47ca6355bdd4552cb8924b430512ff0cafb8db9bd41fe33b"
url: "https://pub.dev"
source: hosted
version: "1.12.0"
version: "1.14.0"
mime:
dependency: transitive
description:
@ -572,18 +601,18 @@ packages:
dependency: transitive
description:
name: path_provider_android
sha256: a248d8146ee5983446bf03ed5ea8f6533129a12b11f12057ad1b4a67a2b3b41d
sha256: "30c5aa827a6ae95ce2853cdc5fe3971daaac00f6f081c419c013f7f57bff2f5e"
url: "https://pub.dev"
source: hosted
version: "2.2.4"
version: "2.2.7"
path_provider_foundation:
dependency: transitive
description:
name: path_provider_foundation
sha256: "5a7999be66e000916500be4f15a3633ebceb8302719b47b9cc49ce924125350f"
sha256: f234384a3fdd67f989b4d54a5d73ca2a6c422fa55ae694381ae0f4375cd1ea16
url: "https://pub.dev"
source: hosted
version: "2.3.2"
version: "2.4.0"
path_provider_linux:
dependency: transitive
description:
@ -604,10 +633,10 @@ packages:
dependency: transitive
description:
name: path_provider_windows
sha256: "8bc9f22eee8690981c22aa7fc602f5c85b497a6fb2ceb35ee5a5e5ed85ad8170"
sha256: bd6f00dbd873bfb70d0761682da2b3a2c2fccc2b9e84c495821639601d81afe7
url: "https://pub.dev"
source: hosted
version: "2.2.1"
version: "2.3.0"
petitparser:
dependency: transitive
description:
@ -620,10 +649,10 @@ packages:
dependency: transitive
description:
name: platform
sha256: "12220bb4b65720483f8fa9450b4332347737cf8213dd2840d8b2c823e47243ec"
sha256: "9b71283fc13df574056616011fb138fd3b793ea47cc509c189a6c3fa5f8a1a65"
url: "https://pub.dev"
source: hosted
version: "3.1.4"
version: "3.1.5"
plugin_platform_interface:
dependency: transitive
description:
@ -660,10 +689,10 @@ packages:
dependency: transitive
description:
name: pubspec_parse
sha256: c63b2876e58e194e4b0828fcb080ad0e06d051cb607a6be51a9e084f47cb9367
sha256: c799b721d79eb6ee6fa56f00c04b472dcd44a30d258fac2174a6ec57302678f8
url: "https://pub.dev"
source: hosted
version: "1.2.3"
version: "1.3.0"
recase:
dependency: transitive
description:
@ -692,10 +721,10 @@ packages:
dependency: transitive
description:
name: shelf_web_socket
sha256: "9ca081be41c60190ebcb4766b2486a7d50261db7bd0f5d9615f2d653637a84c1"
sha256: "073c147238594ecd0d193f3456a5fe91c4b0abbcc68bf5cd95b36c4e194ac611"
url: "https://pub.dev"
source: hosted
version: "1.0.4"
version: "2.0.0"
sky_engine:
dependency: transitive
description: flutter
@ -761,10 +790,10 @@ packages:
dependency: transitive
description:
name: test_api
sha256: "9955ae474176f7ac8ee4e989dadfb411a58c30415bcfb648fa04b2b8a03afa7f"
sha256: "2419f20b0c8677b2d67c8ac4d1ac7372d862dc6c460cdbb052b40155408cd794"
url: "https://pub.dev"
source: hosted
version: "0.7.0"
version: "0.7.1"
timing:
dependency: transitive
description:
@ -785,10 +814,10 @@ packages:
dependency: transitive
description:
name: uuid
sha256: "814e9e88f21a176ae1359149021870e87f7cddaf633ab678a5d2b0bff7fd1ba8"
sha256: "83d37c7ad7aaf9aa8e275490669535c8080377cfa7a7004c24dfac53afffaa90"
url: "https://pub.dev"
source: hosted
version: "4.4.0"
version: "4.4.2"
vector_math:
dependency: transitive
description:
@ -801,10 +830,10 @@ packages:
dependency: transitive
description:
name: vm_service
sha256: a75f83f14ad81d5fe4b3319710b90dec37da0e22612326b696c9e1b8f34bbf48
sha256: "7475cb4dd713d57b6f7464c0e13f06da0d535d8b2067e188962a59bac2cf280b"
url: "https://pub.dev"
source: hosted
version: "14.2.0"
version: "14.2.2"
watcher:
dependency: transitive
description:
@ -825,18 +854,10 @@ packages:
dependency: transitive
description:
name: web_socket_channel
sha256: "58c6666b342a38816b2e7e50ed0f1e261959630becd4c879c4f26bfa14aa5a42"
sha256: d88238e5eac9a42bb43ca4e721edba3c08c6354d4a53063afaa568516217621b
url: "https://pub.dev"
source: hosted
version: "2.4.5"
win32:
dependency: transitive
description:
name: win32
sha256: "0a989dc7ca2bb51eac91e8fd00851297cfffd641aa7538b165c62637ca0eaa4a"
url: "https://pub.dev"
source: hosted
version: "5.4.0"
version: "2.4.0"
xdg_directories:
dependency: transitive
description:
@ -862,5 +883,5 @@ packages:
source: hosted
version: "3.1.2"
sdks:
dart: ">=3.4.0-282.1.beta <4.0.0"
flutter: ">=3.18.0-18.0.pre.54"
dart: ">=3.4.0 <4.0.0"
flutter: ">=3.22.0"

View file

@ -38,11 +38,12 @@ dependencies:
path: any
dynamic_color: ^1.7.0
fl_chart: ^0.68.0
dev_dependencies:
flutter_test:
sdk: flutter
flutter_lints: ^3.0.0
flutter_lints: ^4.0.0
build_runner: ^2.4.9
graphql_codegen: ^0.14.0