mirror of
https://github.com/bitmagnet-io/bitmagnet.git
synced 2025-12-28 06:34:17 +00:00
Use budgeted count for aggregations (#128)
This PR introduces a significant optimisation of aggregations (counts). It takes advantage of the fact that a Postgres query plan can tell you the cost of a query up-front, along with a rough estimate of the count based on indexes. All count queries now have a "budget", defaulting to 5,000. If the budget is exceeded according to the query plan, then the estimate will be returned (and the UI will display an estimate symbol `~` next to the associated count), otherwise the query will be executed and an exact count will be returned. The accuracy of the estimate seems to be within 10-20% of the exact count in most cases - though accuracy depends on selected filter criteria and what is being counted, I've noticed bigger discrepancies but overall it seems like an acceptable trade-off. The background cache warmer has been removed and aggregations are now real time again (the cache warmer was at best a short term mitigation while I figured out a better solution). The cache TTL has been reduced to 10 minutes. It was previously increased to allow the cache warmer to be run less frequently. There are also some adjustments to the indexes that improve performance and the accuracy of estimations. For large indexes the migration may take a while to run: in my tests on 12 million torrents it took 15 minutes.
This commit is contained in:
parent
48acef4d03
commit
e471dd2ddf
@ -5,52 +5,62 @@ fragment TorrentContentSearchResult on TorrentContentSearchResult {
|
||||
...TorrentContent
|
||||
}
|
||||
totalCount
|
||||
totalCountIsEstimate
|
||||
hasNextPage
|
||||
aggregations {
|
||||
contentType {
|
||||
value
|
||||
label
|
||||
count
|
||||
isEstimate
|
||||
}
|
||||
torrentSource {
|
||||
value
|
||||
label
|
||||
count
|
||||
isEstimate
|
||||
}
|
||||
torrentTag {
|
||||
value
|
||||
label
|
||||
count
|
||||
isEstimate
|
||||
}
|
||||
torrentFileType {
|
||||
value
|
||||
label
|
||||
count
|
||||
isEstimate
|
||||
}
|
||||
language {
|
||||
value
|
||||
label
|
||||
count
|
||||
isEstimate
|
||||
}
|
||||
genre {
|
||||
value
|
||||
label
|
||||
count
|
||||
isEstimate
|
||||
}
|
||||
releaseYear {
|
||||
value
|
||||
label
|
||||
count
|
||||
isEstimate
|
||||
}
|
||||
videoResolution {
|
||||
value
|
||||
label
|
||||
count
|
||||
isEstimate
|
||||
}
|
||||
videoSource {
|
||||
value
|
||||
label
|
||||
count
|
||||
isEstimate
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -8,6 +8,7 @@ input SearchQueryInput {
|
||||
"""
|
||||
hasNextPage: Boolean
|
||||
cached: Boolean
|
||||
aggregationBudget: Float
|
||||
}
|
||||
|
||||
input ContentTypeFacetInput {
|
||||
@ -75,54 +76,63 @@ type ContentTypeAgg {
|
||||
value: ContentType
|
||||
label: String!
|
||||
count: Int!
|
||||
isEstimate: Boolean!
|
||||
}
|
||||
|
||||
type TorrentSourceAgg {
|
||||
value: String!
|
||||
label: String!
|
||||
count: Int!
|
||||
isEstimate: Boolean!
|
||||
}
|
||||
|
||||
type TorrentTagAgg {
|
||||
value: String!
|
||||
label: String!
|
||||
count: Int!
|
||||
isEstimate: Boolean!
|
||||
}
|
||||
|
||||
type TorrentFileTypeAgg {
|
||||
value: FileType!
|
||||
label: String!
|
||||
count: Int!
|
||||
isEstimate: Boolean!
|
||||
}
|
||||
|
||||
type LanguageAgg {
|
||||
value: Language!
|
||||
label: String!
|
||||
count: Int!
|
||||
isEstimate: Boolean!
|
||||
}
|
||||
|
||||
type GenreAgg {
|
||||
value: String!
|
||||
label: String!
|
||||
count: Int!
|
||||
isEstimate: Boolean!
|
||||
}
|
||||
|
||||
type ReleaseYearAgg {
|
||||
value: Year
|
||||
label: String!
|
||||
count: Int!
|
||||
isEstimate: Boolean!
|
||||
}
|
||||
|
||||
type VideoResolutionAgg {
|
||||
value: VideoResolution
|
||||
label: String!
|
||||
count: Int!
|
||||
isEstimate: Boolean!
|
||||
}
|
||||
|
||||
type VideoSourceAgg {
|
||||
value: VideoSource
|
||||
label: String!
|
||||
count: Int!
|
||||
isEstimate: Boolean!
|
||||
}
|
||||
|
||||
type TorrentContentAggregations {
|
||||
@ -139,6 +149,7 @@ type TorrentContentAggregations {
|
||||
|
||||
type TorrentContentSearchResult {
|
||||
totalCount: Int!
|
||||
totalCountIsEstimate: Boolean!
|
||||
"""
|
||||
hasNextPage is true if there are more results to fetch
|
||||
"""
|
||||
|
||||
2
internal/database/cache/config.go
vendored
2
internal/database/cache/config.go
vendored
@ -16,7 +16,7 @@ func NewDefaultConfig() Config {
|
||||
// if I can get time to understand the problem better I may open an issue in https://github.com/go-gorm/caches, though they
|
||||
// don't seem very responsive to issues, hence why bitmagnet uses a forked version of this library...
|
||||
EaserEnabled: false,
|
||||
Ttl: time.Minute * 60,
|
||||
Ttl: time.Minute * 10,
|
||||
MaxKeys: 1000,
|
||||
}
|
||||
}
|
||||
|
||||
24
internal/database/dao/budgeted_count.go
Normal file
24
internal/database/dao/budgeted_count.go
Normal file
@ -0,0 +1,24 @@
|
||||
package dao
|
||||
|
||||
import (
|
||||
"gorm.io/gorm"
|
||||
)
|
||||
|
||||
func ToSQL(db *gorm.DB) string {
|
||||
return db.ToSQL(func(tx *gorm.DB) *gorm.DB {
|
||||
return tx.Find(&[]interface{}{})
|
||||
})
|
||||
}
|
||||
|
||||
type BudgetedCountResult struct {
|
||||
Count int64
|
||||
Cost float64
|
||||
BudgetExceeded bool
|
||||
}
|
||||
|
||||
func BudgetedCount(db *gorm.DB, budget float64) (BudgetedCountResult, error) {
|
||||
row := db.Raw("SELECT count, cost, budget_exceeded from budgeted_count(?, ?)", ToSQL(db), budget).Row()
|
||||
result := BudgetedCountResult{}
|
||||
err := row.Scan(&result.Count, &result.Cost, &result.BudgetExceeded)
|
||||
return result, err
|
||||
}
|
||||
@ -32,7 +32,7 @@ func newContent(db *gorm.DB, opts ...gen.DOOption) content {
|
||||
_content.ID = field.NewString(tableName, "id")
|
||||
_content.Title = field.NewString(tableName, "title")
|
||||
_content.ReleaseDate = field.NewTime(tableName, "release_date")
|
||||
_content.ReleaseYear = field.NewField(tableName, "release_year")
|
||||
_content.ReleaseYear = field.NewUint16(tableName, "release_year")
|
||||
_content.Adult = field.NewField(tableName, "adult")
|
||||
_content.OriginalLanguage = field.NewField(tableName, "original_language")
|
||||
_content.OriginalTitle = field.NewField(tableName, "original_title")
|
||||
@ -86,7 +86,7 @@ type content struct {
|
||||
ID field.String
|
||||
Title field.String
|
||||
ReleaseDate field.Time
|
||||
ReleaseYear field.Field
|
||||
ReleaseYear field.Uint16
|
||||
Adult field.Field
|
||||
OriginalLanguage field.Field
|
||||
OriginalTitle field.Field
|
||||
@ -124,7 +124,7 @@ func (c *content) updateTableName(table string) *content {
|
||||
c.ID = field.NewString(table, "id")
|
||||
c.Title = field.NewString(table, "title")
|
||||
c.ReleaseDate = field.NewTime(table, "release_date")
|
||||
c.ReleaseYear = field.NewField(table, "release_year")
|
||||
c.ReleaseYear = field.NewUint16(table, "release_year")
|
||||
c.Adult = field.NewField(table, "adult")
|
||||
c.OriginalLanguage = field.NewField(table, "original_language")
|
||||
c.OriginalTitle = field.NewField(table, "original_title")
|
||||
|
||||
15
internal/database/dao/torrent_contents.go
Normal file
15
internal/database/dao/torrent_contents.go
Normal file
@ -0,0 +1,15 @@
|
||||
package dao
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"gorm.io/gorm/callbacks"
|
||||
)
|
||||
|
||||
func (t torrentContent) CountEstimate() (int64, error) {
|
||||
db := t.UnderlyingDB()
|
||||
callbacks.BuildQuerySQL(db)
|
||||
query := db.Statement.SQL.String()
|
||||
args := db.Statement.Vars
|
||||
fmt.Printf("query: %s, args: %v", query, args)
|
||||
return 0, nil
|
||||
}
|
||||
@ -9,7 +9,6 @@ import (
|
||||
"github.com/bitmagnet-io/bitmagnet/internal/database/migrations"
|
||||
"github.com/bitmagnet-io/bitmagnet/internal/database/postgres"
|
||||
"github.com/bitmagnet-io/bitmagnet/internal/database/search"
|
||||
"github.com/bitmagnet-io/bitmagnet/internal/database/search/warmer"
|
||||
"go.uber.org/fx"
|
||||
)
|
||||
|
||||
@ -18,7 +17,6 @@ func New() fx.Option {
|
||||
"database",
|
||||
configfx.NewConfigModule[postgres.Config]("postgres", postgres.NewDefaultConfig()),
|
||||
configfx.NewConfigModule[cache.Config]("gorm_cache", cache.NewDefaultConfig()),
|
||||
configfx.NewConfigModule[warmer.Config]("search_warmer", warmer.NewDefaultConfig()),
|
||||
fx.Provide(
|
||||
cache.NewInMemoryCacher,
|
||||
cache.NewPlugin,
|
||||
@ -28,7 +26,6 @@ func New() fx.Option {
|
||||
migrations.New,
|
||||
postgres.New,
|
||||
search.New,
|
||||
warmer.New,
|
||||
),
|
||||
fx.Decorate(
|
||||
cache.NewDecorator,
|
||||
|
||||
@ -254,6 +254,7 @@ func BuildGenerator(db *gorm.DB) *gen.Generator {
|
||||
gen.FieldType("release_date", "Date"),
|
||||
gen.FieldGenType("release_date", "Time"),
|
||||
gen.FieldType("release_year", "Year"),
|
||||
gen.FieldGenType("release_year", "Uint16"),
|
||||
gen.FieldType("original_language", "NullLanguage"),
|
||||
gen.FieldType("popularity", "NullFloat32"),
|
||||
gen.FieldType("vote_average", "NullFloat32"),
|
||||
|
||||
@ -27,7 +27,7 @@ func (l gooseLogger) Println(v ...interface{}) {
|
||||
|
||||
func (l gooseLogger) Printf(format string, v ...interface{}) {
|
||||
fn := l.l.Debugf
|
||||
if strings.HasPrefix(format, "goose: successfully migrated") {
|
||||
if strings.HasPrefix(format, "goose: successfully migrated") || strings.HasPrefix(format, "goose: no migrations to run") {
|
||||
fn = l.l.Infof
|
||||
}
|
||||
fn(strings.TrimSpace(format), v...)
|
||||
|
||||
@ -33,16 +33,18 @@ func New(p Params) Result {
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
initGoose(p.Logger)
|
||||
logger := p.Logger.Named("migrator")
|
||||
initGoose(logger)
|
||||
return &migrator{
|
||||
db: db,
|
||||
db: db,
|
||||
logger: logger,
|
||||
}, nil
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
func initGoose(logger *zap.SugaredLogger) {
|
||||
goose.SetLogger(gooseLogger{logger.Named("migrator")})
|
||||
goose.SetLogger(gooseLogger{logger})
|
||||
goose.SetBaseFS(migrationssql.FS)
|
||||
err := goose.SetDialect("postgres")
|
||||
if err != nil {
|
||||
@ -58,10 +60,12 @@ type Migrator interface {
|
||||
}
|
||||
|
||||
type migrator struct {
|
||||
db *sql.DB
|
||||
db *sql.DB
|
||||
logger *zap.SugaredLogger
|
||||
}
|
||||
|
||||
func (m *migrator) Up(ctx context.Context) error {
|
||||
m.logger.Info("checking and applying migrations...")
|
||||
return goose.UpContext(ctx, m.db, ".")
|
||||
}
|
||||
|
||||
|
||||
@ -22,8 +22,8 @@ type FacetConfig interface {
|
||||
|
||||
type Facet interface {
|
||||
FacetConfig
|
||||
Aggregate(ctx FacetContext) (AggregationItems, error)
|
||||
Criteria() []Criteria
|
||||
Values(ctx FacetContext) (map[string]string, error)
|
||||
Criteria(filter FacetFilter) []Criteria
|
||||
}
|
||||
|
||||
type FacetFilter map[string]struct{}
|
||||
@ -38,6 +38,11 @@ func (f FacetFilter) Values() []string {
|
||||
return values
|
||||
}
|
||||
|
||||
func (f FacetFilter) HasKey(key string) bool {
|
||||
_, ok := f[key]
|
||||
return ok
|
||||
}
|
||||
|
||||
type facetConfig struct {
|
||||
key string
|
||||
label string
|
||||
@ -58,8 +63,9 @@ func NewFacetConfig(options ...FacetOption) FacetConfig {
|
||||
}
|
||||
|
||||
type AggregationItem struct {
|
||||
Label string
|
||||
Count uint
|
||||
Label string
|
||||
Count uint
|
||||
IsEstimate bool
|
||||
}
|
||||
|
||||
type AggregationItems = map[string]AggregationItem
|
||||
@ -189,7 +195,7 @@ func (c facetConfig) Filter() FacetFilter {
|
||||
func (b optionBuilder) createFacetsFilterCriteria() (c Criteria, err error) {
|
||||
cs := make([]Criteria, 0, len(b.facets))
|
||||
for _, facet := range b.facets {
|
||||
cr := facet.Criteria()
|
||||
cr := facet.Criteria(facet.Filter())
|
||||
switch facet.Logic() {
|
||||
case model.FacetLogicAnd:
|
||||
cs = append(cs, AndCriteria{cr})
|
||||
@ -211,39 +217,90 @@ func withCurrentFacet(facetKey string) Option {
|
||||
|
||||
func (b optionBuilder) calculateAggregations(ctx context.Context) (Aggregations, error) {
|
||||
aggregations := make(Aggregations, len(b.facets))
|
||||
wg := sync.WaitGroup{}
|
||||
wg.Add(len(b.facets))
|
||||
wgOuter := sync.WaitGroup{}
|
||||
wgOuter.Add(len(b.facets))
|
||||
mtx := sync.Mutex{}
|
||||
var errs []error
|
||||
addErr := func(err error) {
|
||||
mtx.Lock()
|
||||
defer mtx.Unlock()
|
||||
errs = append(errs, err)
|
||||
}
|
||||
addAggregation := func(key string, aggregation AggregationGroup) {
|
||||
mtx.Lock()
|
||||
defer mtx.Unlock()
|
||||
aggregations[key] = aggregation
|
||||
}
|
||||
for _, facet := range b.facets {
|
||||
go (func(facet Facet) {
|
||||
defer wg.Done()
|
||||
defer wgOuter.Done()
|
||||
if !facet.IsAggregated() {
|
||||
return
|
||||
}
|
||||
aggBuilder, aggBuilderErr := Options(facet.AggregationOption, withCurrentFacet(facet.Key()))(b)
|
||||
if aggBuilderErr != nil {
|
||||
errs = append(errs, fmt.Errorf("failed to create aggregation option for key '%s': %w", facet.Key(), aggBuilderErr))
|
||||
values, valuesErr := facet.Values(facetContext{
|
||||
optionBuilder: b,
|
||||
ctx: ctx,
|
||||
})
|
||||
if valuesErr != nil {
|
||||
addErr(fmt.Errorf("failed to get values for key '%s': %w", facet.Key(), valuesErr))
|
||||
return
|
||||
}
|
||||
aggCtx := facetContext{
|
||||
optionBuilder: aggBuilder,
|
||||
ctx: ctx,
|
||||
filter := facet.Filter()
|
||||
items := make(AggregationItems, len(values))
|
||||
addItem := func(key string, item AggregationItem) {
|
||||
mtx.Lock()
|
||||
defer mtx.Unlock()
|
||||
items[key] = item
|
||||
}
|
||||
aggregation, aggregateErr := facet.Aggregate(aggCtx)
|
||||
mtx.Lock()
|
||||
defer mtx.Unlock()
|
||||
if aggregateErr != nil {
|
||||
errs = append(errs, fmt.Errorf("failed to aggregate key '%s': %w", facet.Key(), aggregateErr))
|
||||
} else {
|
||||
aggregations[facet.Key()] = AggregationGroup{
|
||||
Label: facet.Label(),
|
||||
Logic: facet.Logic(),
|
||||
Items: aggregation,
|
||||
}
|
||||
wgInner := sync.WaitGroup{}
|
||||
wgInner.Add(len(values))
|
||||
for key, label := range values {
|
||||
go func(key, label string) {
|
||||
defer wgInner.Done()
|
||||
criterias := facet.Criteria(FacetFilter{key: struct{}{}})
|
||||
var criteria Criteria
|
||||
switch facet.Logic() {
|
||||
case model.FacetLogicAnd:
|
||||
criteria = AndCriteria{criterias}
|
||||
case model.FacetLogicOr:
|
||||
criteria = OrCriteria{criterias}
|
||||
}
|
||||
aggBuilder, aggBuilderErr := Options(
|
||||
facet.AggregationOption,
|
||||
withCurrentFacet(facet.Key()),
|
||||
Where(criteria),
|
||||
)(b)
|
||||
if aggBuilderErr != nil {
|
||||
addErr(fmt.Errorf("failed to create aggregation option for key '%s': %w", facet.Key(), aggBuilderErr))
|
||||
return
|
||||
}
|
||||
q := aggBuilder.NewSubQuery(ctx)
|
||||
if preErr := aggBuilder.applyPre(q); preErr != nil {
|
||||
addErr(fmt.Errorf("failed to apply pre for key '%s': %w", facet.Key(), preErr))
|
||||
return
|
||||
}
|
||||
countResult, countErr := dao.BudgetedCount(q.UnderlyingDB(), b.aggregationBudget)
|
||||
if countErr != nil {
|
||||
addErr(fmt.Errorf("failed to get count for key '%s': %w", facet.Key(), countErr))
|
||||
return
|
||||
}
|
||||
if countResult.Count > 0 || countResult.BudgetExceeded || filter.HasKey(key) {
|
||||
addItem(key, AggregationItem{
|
||||
Label: label,
|
||||
Count: uint(countResult.Count),
|
||||
IsEstimate: countResult.BudgetExceeded,
|
||||
})
|
||||
}
|
||||
}(key, label)
|
||||
}
|
||||
wgInner.Wait()
|
||||
addAggregation(facet.Key(), AggregationGroup{
|
||||
Label: facet.Label(),
|
||||
Logic: facet.Logic(),
|
||||
Items: items,
|
||||
})
|
||||
})(facet)
|
||||
}
|
||||
wg.Wait()
|
||||
wgOuter.Wait()
|
||||
return aggregations, errors.Join(errs...)
|
||||
}
|
||||
|
||||
@ -17,6 +17,7 @@ type Option = func(ctx OptionBuilder) (OptionBuilder, error)
|
||||
func DefaultOption() Option {
|
||||
return Options(
|
||||
Limit(10),
|
||||
WithAggregationBudget(5_000),
|
||||
)
|
||||
}
|
||||
|
||||
@ -183,6 +184,12 @@ func WithHasNextPage(bl bool) Option {
|
||||
}
|
||||
}
|
||||
|
||||
func WithAggregationBudget(budget float64) Option {
|
||||
return func(ctx OptionBuilder) (OptionBuilder, error) {
|
||||
return ctx.WithAggregationBudget(budget), nil
|
||||
}
|
||||
}
|
||||
|
||||
func Context(fn func(ctx context.Context) context.Context) Option {
|
||||
return func(b OptionBuilder) (OptionBuilder, error) {
|
||||
return b.Context(fn), nil
|
||||
|
||||
@ -1,41 +1,45 @@
|
||||
package query
|
||||
|
||||
import (
|
||||
"github.com/bitmagnet-io/bitmagnet/internal/model"
|
||||
"github.com/bitmagnet-io/bitmagnet/internal/model"
|
||||
)
|
||||
|
||||
type SearchParams struct {
|
||||
QueryString model.NullString
|
||||
Limit model.NullUint
|
||||
Offset model.NullUint
|
||||
TotalCount model.NullBool
|
||||
HasNextPage model.NullBool
|
||||
Cached model.NullBool
|
||||
QueryString model.NullString
|
||||
Limit model.NullUint
|
||||
Offset model.NullUint
|
||||
TotalCount model.NullBool
|
||||
HasNextPage model.NullBool
|
||||
Cached model.NullBool
|
||||
AggregationBudget model.NullFloat64
|
||||
}
|
||||
|
||||
func (s SearchParams) Option() Option {
|
||||
var options []Option
|
||||
if s.QueryString.Valid {
|
||||
options = append(options, QueryString(s.QueryString.String), OrderByQueryStringRank())
|
||||
}
|
||||
if s.Limit.Valid {
|
||||
options = append(options, Limit(s.Limit.Uint))
|
||||
}
|
||||
if s.Offset.Valid {
|
||||
options = append(options, Offset(s.Offset.Uint))
|
||||
}
|
||||
if s.TotalCount.Valid {
|
||||
options = append(options, WithTotalCount(s.TotalCount.Bool))
|
||||
}
|
||||
if s.HasNextPage.Valid {
|
||||
options = append(options, WithHasNextPage(s.HasNextPage.Bool))
|
||||
}
|
||||
if s.Cached.Valid {
|
||||
if s.Cached.Bool {
|
||||
options = append(options, Cached())
|
||||
} else {
|
||||
options = append(options, CacheWarm())
|
||||
}
|
||||
}
|
||||
return Options(options...)
|
||||
var options []Option
|
||||
if s.QueryString.Valid {
|
||||
options = append(options, QueryString(s.QueryString.String), OrderByQueryStringRank())
|
||||
}
|
||||
if s.Limit.Valid {
|
||||
options = append(options, Limit(s.Limit.Uint))
|
||||
}
|
||||
if s.Offset.Valid {
|
||||
options = append(options, Offset(s.Offset.Uint))
|
||||
}
|
||||
if s.TotalCount.Valid {
|
||||
options = append(options, WithTotalCount(s.TotalCount.Bool))
|
||||
}
|
||||
if s.HasNextPage.Valid {
|
||||
options = append(options, WithHasNextPage(s.HasNextPage.Bool))
|
||||
}
|
||||
if s.Cached.Valid {
|
||||
if s.Cached.Bool {
|
||||
options = append(options, Cached())
|
||||
} else {
|
||||
options = append(options, CacheWarm())
|
||||
}
|
||||
}
|
||||
if s.AggregationBudget.Valid {
|
||||
options = append(options, WithAggregationBudget(s.AggregationBudget.Float64))
|
||||
}
|
||||
return Options(options...)
|
||||
}
|
||||
|
||||
@ -20,10 +20,11 @@ type ResultItem struct {
|
||||
}
|
||||
|
||||
type GenericResult[T interface{}] struct {
|
||||
TotalCount uint
|
||||
HasNextPage bool
|
||||
Items []T
|
||||
Aggregations Aggregations
|
||||
TotalCount uint
|
||||
TotalCountIsEstimate bool
|
||||
HasNextPage bool
|
||||
Items []T
|
||||
Aggregations Aggregations
|
||||
}
|
||||
|
||||
type SubQueryFactory = func(context.Context, *dao.Query) SubQuery
|
||||
@ -72,10 +73,11 @@ func GenericQuery[T interface{}](
|
||||
addErr(sqErr)
|
||||
return
|
||||
}
|
||||
if tc, countErr := sq.Count(); countErr != nil {
|
||||
if countResult, countErr := dao.BudgetedCount(sq.UnderlyingDB(), builder.AggregationBudget()); countErr != nil {
|
||||
addErr(countErr)
|
||||
} else {
|
||||
r.TotalCount = uint(tc)
|
||||
r.TotalCount = uint(countResult.Count)
|
||||
r.TotalCountIsEstimate = countResult.BudgetExceeded
|
||||
}
|
||||
}
|
||||
})()
|
||||
@ -222,6 +224,8 @@ type OptionBuilder interface {
|
||||
calculateAggregations(context.Context) (Aggregations, error)
|
||||
WithTotalCount(bool) OptionBuilder
|
||||
WithHasNextPage(bool) OptionBuilder
|
||||
WithAggregationBudget(float64) OptionBuilder
|
||||
AggregationBudget() float64
|
||||
withTotalCount() bool
|
||||
applyCallbacks(context.Context, any) error
|
||||
hasZeroLimit() bool
|
||||
@ -233,21 +237,22 @@ type OptionBuilder interface {
|
||||
|
||||
type optionBuilder struct {
|
||||
dbContext
|
||||
joins map[string]TableJoin
|
||||
requiredJoins maps.InsertMap[string, struct{}]
|
||||
scopes []Scope
|
||||
selections []clause.Expr
|
||||
groupBy []clause.Column
|
||||
orderBy []clause.OrderByColumn
|
||||
limit model.NullUint
|
||||
nextPage bool
|
||||
offset uint
|
||||
facets []Facet
|
||||
currentFacet string
|
||||
preloads []field.RelationField
|
||||
totalCount bool
|
||||
callbacks []Callback
|
||||
contextFn func(context.Context) context.Context
|
||||
joins map[string]TableJoin
|
||||
requiredJoins maps.InsertMap[string, struct{}]
|
||||
scopes []Scope
|
||||
selections []clause.Expr
|
||||
groupBy []clause.Column
|
||||
orderBy []clause.OrderByColumn
|
||||
limit model.NullUint
|
||||
nextPage bool
|
||||
offset uint
|
||||
facets []Facet
|
||||
currentFacet string
|
||||
preloads []field.RelationField
|
||||
totalCount bool
|
||||
aggregationBudget float64
|
||||
callbacks []Callback
|
||||
contextFn func(context.Context) context.Context
|
||||
}
|
||||
|
||||
type RawJoin struct {
|
||||
@ -385,6 +390,15 @@ func (b optionBuilder) hasNextPage(nItems int) bool {
|
||||
return nItems > int(b.limit.Uint)
|
||||
}
|
||||
|
||||
func (b optionBuilder) WithAggregationBudget(budget float64) OptionBuilder {
|
||||
b.aggregationBudget = budget
|
||||
return b
|
||||
}
|
||||
|
||||
func (b optionBuilder) AggregationBudget() float64 {
|
||||
return b.aggregationBudget
|
||||
}
|
||||
|
||||
func (b optionBuilder) withCurrentFacet(facet string) OptionBuilder {
|
||||
b.currentFacet = facet
|
||||
return b
|
||||
|
||||
@ -1,9 +1,9 @@
|
||||
package search
|
||||
|
||||
import (
|
||||
"database/sql/driver"
|
||||
"fmt"
|
||||
"github.com/bitmagnet-io/bitmagnet/internal/database/query"
|
||||
"github.com/bitmagnet-io/bitmagnet/internal/maps"
|
||||
"github.com/bitmagnet-io/bitmagnet/internal/model"
|
||||
"gorm.io/gen/field"
|
||||
"strconv"
|
||||
@ -34,46 +34,12 @@ type yearFacet struct {
|
||||
field string
|
||||
}
|
||||
|
||||
func (r yearFacet) Aggregate(ctx query.FacetContext) (query.AggregationItems, error) {
|
||||
var results []struct {
|
||||
Year string
|
||||
Count uint
|
||||
}
|
||||
q, qErr := ctx.NewAggregationQuery()
|
||||
if qErr != nil {
|
||||
return nil, qErr
|
||||
}
|
||||
if txErr := q.UnderlyingDB().Select(
|
||||
fmt.Sprintf("%s.%s as year", ctx.TableName(), r.field),
|
||||
"count(*) as count",
|
||||
).Group(
|
||||
"year",
|
||||
).Find(&results).Error; txErr != nil {
|
||||
return nil, txErr
|
||||
}
|
||||
agg := make(query.AggregationItems, len(results))
|
||||
for _, item := range results {
|
||||
key := item.Year
|
||||
label := item.Year
|
||||
if key == "" {
|
||||
key = "null"
|
||||
label = "Unknown"
|
||||
}
|
||||
agg[key] = query.AggregationItem{
|
||||
Label: label,
|
||||
Count: item.Count,
|
||||
}
|
||||
}
|
||||
return agg, nil
|
||||
}
|
||||
|
||||
func (r yearFacet) Criteria() []query.Criteria {
|
||||
func (r yearFacet) Criteria(filter query.FacetFilter) []query.Criteria {
|
||||
return []query.Criteria{
|
||||
query.GenCriteria(func(ctx query.DbContext) (query.Criteria, error) {
|
||||
filter := r.Filter().Values()
|
||||
years := make([]uint16, 0, len(filter))
|
||||
hasNull := false
|
||||
for _, v := range filter {
|
||||
for _, v := range filter.Values() {
|
||||
if v == "null" {
|
||||
hasNull = true
|
||||
continue
|
||||
@ -88,15 +54,18 @@ func (r yearFacet) Criteria() []query.Criteria {
|
||||
years = append(years, uint16(vInt))
|
||||
}
|
||||
yearField := ctx.Query().Content.ReleaseYear
|
||||
joins := maps.NewInsertMap(maps.MapEntry[string, struct{}]{Key: model.TableNameContent})
|
||||
var or []query.Criteria
|
||||
if len(years) > 0 {
|
||||
or = append(or, query.RawCriteria{
|
||||
Query: ctx.Query().Content.UnderlyingDB().Where(yearCondition(yearField, years...).RawExpr()),
|
||||
Joins: joins,
|
||||
})
|
||||
}
|
||||
if hasNull {
|
||||
or = append(or, query.RawCriteria{
|
||||
Query: ctx.Query().Content.UnderlyingDB().Where(yearField.IsNull().RawExpr()),
|
||||
Joins: joins,
|
||||
})
|
||||
}
|
||||
return query.Or(or...), nil
|
||||
@ -104,10 +73,24 @@ func (r yearFacet) Criteria() []query.Criteria {
|
||||
}
|
||||
}
|
||||
|
||||
func yearCondition(target field.Field, years ...uint16) field.Expr {
|
||||
valuers := make([]driver.Valuer, 0, len(years))
|
||||
for _, year := range years {
|
||||
valuers = append(valuers, model.NewNullUint16(year))
|
||||
}
|
||||
return target.In(valuers...)
|
||||
func yearCondition(target field.Uint16, years ...uint16) field.Expr {
|
||||
return target.In(years...)
|
||||
}
|
||||
|
||||
func (yearFacet) Values(ctx query.FacetContext) (map[string]string, error) {
|
||||
q := ctx.Query().Content
|
||||
var years []model.Year
|
||||
err := q.WithContext(ctx.Context()).Where(
|
||||
q.ReleaseYear.Gte(1000),
|
||||
q.ReleaseYear.Lte(9999),
|
||||
).Distinct(q.ReleaseYear).Pluck(q.ReleaseYear, &years)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
values := make(map[string]string, len(years)+1)
|
||||
values["null"] = "Unknown"
|
||||
for _, y := range years {
|
||||
values[y.String()] = y.String()
|
||||
}
|
||||
return values, nil
|
||||
}
|
||||
|
||||
@ -22,51 +22,17 @@ type attribute interface {
|
||||
Label() string
|
||||
}
|
||||
|
||||
func (f torrentContentAttributeFacet[T]) Aggregate(ctx query.FacetContext) (query.AggregationItems, error) {
|
||||
var results []struct {
|
||||
Value *T
|
||||
Count uint
|
||||
}
|
||||
q, qErr := ctx.NewAggregationQuery()
|
||||
if qErr != nil {
|
||||
return nil, qErr
|
||||
}
|
||||
fld := f.field(ctx.Query())
|
||||
if err := q.UnderlyingDB().Select(
|
||||
ctx.TableName()+"."+string(fld.ColumnName())+" as value",
|
||||
"count(*) as count",
|
||||
).Group(
|
||||
"value",
|
||||
).Find(&results).Error; err != nil {
|
||||
return nil, fmt.Errorf("failed to aggregate: %w", err)
|
||||
}
|
||||
agg := make(query.AggregationItems, len(results))
|
||||
for _, item := range results {
|
||||
var key, label string
|
||||
if item.Value == nil {
|
||||
key = "null"
|
||||
label = "Unknown"
|
||||
} else {
|
||||
vV := *item.Value
|
||||
key = vV.String()
|
||||
label = vV.Label()
|
||||
}
|
||||
agg[key] = query.AggregationItem{
|
||||
Label: label,
|
||||
Count: item.Count,
|
||||
}
|
||||
}
|
||||
return agg, nil
|
||||
func (torrentContentAttributeFacet[T]) Values(query.FacetContext) (map[string]string, error) {
|
||||
return map[string]string{}, nil
|
||||
}
|
||||
|
||||
func (f torrentContentAttributeFacet[T]) Criteria() []query.Criteria {
|
||||
func (f torrentContentAttributeFacet[T]) Criteria(filter query.FacetFilter) []query.Criteria {
|
||||
return []query.Criteria{
|
||||
query.GenCriteria(func(ctx query.DbContext) (query.Criteria, error) {
|
||||
fld := f.field(ctx.Query())
|
||||
filter := f.Filter().Values()
|
||||
values := make([]driver.Valuer, 0, len(filter))
|
||||
hasNull := false
|
||||
for _, v := range filter {
|
||||
for _, v := range filter.Values() {
|
||||
if v == "null" {
|
||||
hasNull = true
|
||||
continue
|
||||
|
||||
@ -1,74 +1,34 @@
|
||||
package search
|
||||
|
||||
import (
|
||||
"github.com/bitmagnet-io/bitmagnet/internal/database/dao"
|
||||
"github.com/bitmagnet-io/bitmagnet/internal/database/query"
|
||||
"github.com/bitmagnet-io/bitmagnet/internal/model"
|
||||
"gorm.io/gen/field"
|
||||
"strings"
|
||||
)
|
||||
|
||||
type contentCollectionFacet struct {
|
||||
type torrentContentCollectionFacet struct {
|
||||
query.FacetConfig
|
||||
collectionType string
|
||||
}
|
||||
|
||||
func (r contentCollectionFacet) Aggregate(ctx query.FacetContext) (items query.AggregationItems, err error) {
|
||||
var results []struct {
|
||||
ConcatId string
|
||||
Name string
|
||||
Count uint
|
||||
func (f torrentContentCollectionFacet) Values(ctx query.FacetContext) (map[string]string, error) {
|
||||
q := ctx.Query().ContentCollection
|
||||
colls, err := ctx.Query().ContentCollection.WithContext(ctx.Context()).Where(
|
||||
q.Type.Eq(f.collectionType),
|
||||
).Find()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
sq, sqErr := ctx.NewAggregationQuery(
|
||||
query.Table(model.TableNameContentCollectionContent),
|
||||
query.Join(func(q *dao.Query) []query.TableJoin {
|
||||
return []query.TableJoin{
|
||||
{
|
||||
Table: q.TorrentContent,
|
||||
On: []field.Expr{
|
||||
q.TorrentContent.ContentType.EqCol(q.ContentCollectionContent.ContentType),
|
||||
q.TorrentContent.ContentSource.EqCol(q.ContentCollectionContent.ContentSource),
|
||||
q.TorrentContent.ContentID.EqCol(q.ContentCollectionContent.ContentID),
|
||||
},
|
||||
Type: query.TableJoinTypeInner,
|
||||
},
|
||||
}
|
||||
}),
|
||||
query.RequireJoin(model.TableNameContentCollection),
|
||||
query.Where(query.RawCriteria{
|
||||
Query: "content_collections_content.content_collection_type = ?",
|
||||
Args: []interface{}{r.collectionType},
|
||||
}),
|
||||
)
|
||||
if sqErr != nil {
|
||||
err = sqErr
|
||||
return
|
||||
values := make(map[string]string, len(colls))
|
||||
for _, coll := range colls {
|
||||
values[coll.Source+":"+coll.ID] = coll.Name
|
||||
}
|
||||
tx := sq.UnderlyingDB().Select(
|
||||
"(content_collections_content.content_collection_source || ':' ||content_collections_content.content_collection_id) as concat_id",
|
||||
"MIN(content_collections.name) as name",
|
||||
"count(distinct(content_collections_content.content_source, content_collections_content.content_id)) as count",
|
||||
).Group(
|
||||
"concat_id",
|
||||
).Find(&results)
|
||||
if tx.Error != nil {
|
||||
err = tx.Error
|
||||
return
|
||||
}
|
||||
agg := make(query.AggregationItems, len(results))
|
||||
for _, item := range results {
|
||||
agg[item.ConcatId] = query.AggregationItem{
|
||||
Label: item.Name,
|
||||
Count: item.Count,
|
||||
}
|
||||
}
|
||||
return agg, nil
|
||||
return values, nil
|
||||
}
|
||||
|
||||
func (r contentCollectionFacet) Criteria() []query.Criteria {
|
||||
func (f torrentContentCollectionFacet) Criteria(filter query.FacetFilter) []query.Criteria {
|
||||
sourceMap := make(map[string]map[string]struct{})
|
||||
filter := r.Filter().Values()
|
||||
for _, value := range filter {
|
||||
for _, value := range filter.Values() {
|
||||
parts := strings.Split(value, ":")
|
||||
if len(parts) != 2 {
|
||||
continue
|
||||
@ -84,12 +44,12 @@ func (r contentCollectionFacet) Criteria() []query.Criteria {
|
||||
refs := make([]model.ContentCollectionRef, 0, len(idMap))
|
||||
for id := range idMap {
|
||||
refs = append(refs, model.ContentCollectionRef{
|
||||
Type: r.collectionType,
|
||||
Type: f.collectionType,
|
||||
Source: source,
|
||||
ID: id,
|
||||
})
|
||||
}
|
||||
switch r.Logic() {
|
||||
switch f.Logic() {
|
||||
case model.FacetLogicOr:
|
||||
criteria = append(criteria, ContentCollectionCriteria(refs...))
|
||||
case model.FacetLogicAnd:
|
||||
|
||||
@ -8,7 +8,7 @@ import (
|
||||
const ContentGenreFacetKey = "content_genre"
|
||||
|
||||
func TorrentContentGenreFacet(options ...query.FacetOption) query.Facet {
|
||||
return contentCollectionFacet{
|
||||
return torrentContentCollectionFacet{
|
||||
FacetConfig: query.NewFacetConfig(
|
||||
append([]query.FacetOption{
|
||||
query.FacetHasKey(ContentGenreFacetKey),
|
||||
|
||||
@ -26,40 +26,20 @@ type torrentContentLanguageFacet struct {
|
||||
query.FacetConfig
|
||||
}
|
||||
|
||||
func (f torrentContentLanguageFacet) Aggregate(ctx query.FacetContext) (query.AggregationItems, error) {
|
||||
var results []struct {
|
||||
Language model.Language
|
||||
Count uint
|
||||
func (torrentContentLanguageFacet) Values(query.FacetContext) (map[string]string, error) {
|
||||
languageValues := model.LanguageValues()
|
||||
values := make(map[string]string, len(languageValues))
|
||||
for _, l := range languageValues {
|
||||
values[l.Id()] = l.Name()
|
||||
}
|
||||
q, qErr := ctx.NewAggregationQuery()
|
||||
if qErr != nil {
|
||||
return nil, qErr
|
||||
}
|
||||
tx := q.UnderlyingDB().Select(
|
||||
"jsonb_array_elements(torrent_contents.languages) as language",
|
||||
"count(*) as count",
|
||||
).Group(
|
||||
"language",
|
||||
).Find(&results)
|
||||
if tx.Error != nil {
|
||||
return nil, fmt.Errorf("failed to aggregate languages: %w", tx.Error)
|
||||
}
|
||||
agg := make(query.AggregationItems, len(results))
|
||||
for _, item := range results {
|
||||
agg[item.Language.Id()] = query.AggregationItem{
|
||||
Label: item.Language.Name(),
|
||||
Count: item.Count,
|
||||
}
|
||||
}
|
||||
return agg, nil
|
||||
return values, nil
|
||||
}
|
||||
|
||||
func (f torrentContentLanguageFacet) Criteria() []query.Criteria {
|
||||
func (f torrentContentLanguageFacet) Criteria(filter query.FacetFilter) []query.Criteria {
|
||||
return []query.Criteria{
|
||||
query.GenCriteria(func(ctx query.DbContext) (query.Criteria, error) {
|
||||
filter := f.Filter().Values()
|
||||
langs := make([]model.Language, 0, len(filter))
|
||||
for _, v := range filter {
|
||||
for _, v := range filter.Values() {
|
||||
lang := model.ParseLanguage(v)
|
||||
if !lang.Valid {
|
||||
return nil, errors.New("invalid language filter specified")
|
||||
|
||||
@ -10,17 +10,32 @@ import (
|
||||
const TorrentContentTypeFacetKey = "content_type"
|
||||
|
||||
func TorrentContentTypeFacet(options ...query.FacetOption) query.Facet {
|
||||
return torrentContentAttributeFacet[model.ContentType]{
|
||||
FacetConfig: query.NewFacetConfig(
|
||||
append([]query.FacetOption{
|
||||
query.FacetHasKey(TorrentContentTypeFacetKey),
|
||||
query.FacetHasLabel("Content Type"),
|
||||
query.FacetUsesOrLogic(),
|
||||
}, options...)...,
|
||||
),
|
||||
field: func(q *dao.Query) field.Field {
|
||||
return field.Field(q.TorrentContent.ContentType)
|
||||
return torrentContentTypeFacet{
|
||||
torrentContentAttributeFacet[model.ContentType]{
|
||||
FacetConfig: query.NewFacetConfig(
|
||||
append([]query.FacetOption{
|
||||
query.FacetHasKey(TorrentContentTypeFacetKey),
|
||||
query.FacetHasLabel("Content Type"),
|
||||
query.FacetUsesOrLogic(),
|
||||
}, options...)...,
|
||||
),
|
||||
field: func(q *dao.Query) field.Field {
|
||||
return field.Field(q.TorrentContent.ContentType)
|
||||
},
|
||||
parse: model.ParseContentType,
|
||||
},
|
||||
parse: model.ParseContentType,
|
||||
}
|
||||
}
|
||||
|
||||
type torrentContentTypeFacet struct {
|
||||
torrentContentAttributeFacet[model.ContentType]
|
||||
}
|
||||
|
||||
func (f torrentContentTypeFacet) Values(query.FacetContext) (map[string]string, error) {
|
||||
values := make(map[string]string)
|
||||
values["null"] = "Unknown"
|
||||
for _, contentType := range model.ContentTypeValues() {
|
||||
values[string(contentType)] = contentType.Label()
|
||||
}
|
||||
return values, nil
|
||||
}
|
||||
|
||||
@ -14,7 +14,7 @@ func video3dField(q *dao.Query) field.Field {
|
||||
}
|
||||
|
||||
func Video3dFacet(options ...query.FacetOption) query.Facet {
|
||||
return torrentContentAttributeFacet[model.Video3d]{
|
||||
return video3dFacet{torrentContentAttributeFacet[model.Video3d]{
|
||||
FacetConfig: query.NewFacetConfig(
|
||||
append([]query.FacetOption{
|
||||
query.FacetHasKey(Video3dFacetKey),
|
||||
@ -24,5 +24,18 @@ func Video3dFacet(options ...query.FacetOption) query.Facet {
|
||||
),
|
||||
field: video3dField,
|
||||
parse: model.ParseVideo3d,
|
||||
}
|
||||
}}
|
||||
}
|
||||
|
||||
type video3dFacet struct {
|
||||
torrentContentAttributeFacet[model.Video3d]
|
||||
}
|
||||
|
||||
func (f video3dFacet) Values(query.FacetContext) (map[string]string, error) {
|
||||
v3ds := model.Video3dValues()
|
||||
values := make(map[string]string, len(v3ds))
|
||||
for _, vr := range v3ds {
|
||||
values[vr.String()] = vr.Label()
|
||||
}
|
||||
return values, nil
|
||||
}
|
||||
|
||||
@ -10,7 +10,7 @@ import (
|
||||
const VideoCodecFacetKey = "video_codec"
|
||||
|
||||
func VideoCodecFacet(options ...query.FacetOption) query.Facet {
|
||||
return torrentContentAttributeFacet[model.VideoCodec]{
|
||||
return videoCodecFacet{torrentContentAttributeFacet[model.VideoCodec]{
|
||||
FacetConfig: query.NewFacetConfig(
|
||||
append([]query.FacetOption{
|
||||
query.FacetHasKey(VideoCodecFacetKey),
|
||||
@ -22,5 +22,18 @@ func VideoCodecFacet(options ...query.FacetOption) query.Facet {
|
||||
return q.TorrentContent.VideoCodec
|
||||
},
|
||||
parse: model.ParseVideoCodec,
|
||||
}
|
||||
}}
|
||||
}
|
||||
|
||||
type videoCodecFacet struct {
|
||||
torrentContentAttributeFacet[model.VideoCodec]
|
||||
}
|
||||
|
||||
func (f videoCodecFacet) Values(query.FacetContext) (map[string]string, error) {
|
||||
vcs := model.VideoCodecValues()
|
||||
values := make(map[string]string, len(vcs))
|
||||
for _, vr := range vcs {
|
||||
values[vr.String()] = vr.Label()
|
||||
}
|
||||
return values, nil
|
||||
}
|
||||
|
||||
@ -10,7 +10,7 @@ import (
|
||||
const VideoModifierFacetKey = "video_modifier"
|
||||
|
||||
func VideoModifierFacet(options ...query.FacetOption) query.Facet {
|
||||
return torrentContentAttributeFacet[model.VideoModifier]{
|
||||
return videoModifierFacet{torrentContentAttributeFacet[model.VideoModifier]{
|
||||
FacetConfig: query.NewFacetConfig(
|
||||
append([]query.FacetOption{
|
||||
query.FacetHasKey(VideoModifierFacetKey),
|
||||
@ -22,5 +22,18 @@ func VideoModifierFacet(options ...query.FacetOption) query.Facet {
|
||||
return q.TorrentContent.VideoModifier
|
||||
},
|
||||
parse: model.ParseVideoModifier,
|
||||
}
|
||||
}}
|
||||
}
|
||||
|
||||
type videoModifierFacet struct {
|
||||
torrentContentAttributeFacet[model.VideoModifier]
|
||||
}
|
||||
|
||||
func (f videoModifierFacet) Values(query.FacetContext) (map[string]string, error) {
|
||||
vms := model.VideoModifierValues()
|
||||
values := make(map[string]string, len(vms))
|
||||
for _, vr := range vms {
|
||||
values[vr.String()] = vr.Label()
|
||||
}
|
||||
return values, nil
|
||||
}
|
||||
|
||||
@ -14,7 +14,7 @@ func videoResolutionField(q *dao.Query) field.Field {
|
||||
}
|
||||
|
||||
func VideoResolutionFacet(options ...query.FacetOption) query.Facet {
|
||||
return torrentContentAttributeFacet[model.VideoResolution]{
|
||||
return videoResolutionFacet{torrentContentAttributeFacet[model.VideoResolution]{
|
||||
FacetConfig: query.NewFacetConfig(
|
||||
append([]query.FacetOption{
|
||||
query.FacetHasKey(VideoResolutionFacetKey),
|
||||
@ -24,5 +24,18 @@ func VideoResolutionFacet(options ...query.FacetOption) query.Facet {
|
||||
),
|
||||
field: videoResolutionField,
|
||||
parse: model.ParseVideoResolution,
|
||||
}
|
||||
}}
|
||||
}
|
||||
|
||||
type videoResolutionFacet struct {
|
||||
torrentContentAttributeFacet[model.VideoResolution]
|
||||
}
|
||||
|
||||
func (f videoResolutionFacet) Values(query.FacetContext) (map[string]string, error) {
|
||||
vrs := model.VideoResolutionValues()
|
||||
values := make(map[string]string, len(vrs))
|
||||
for _, vr := range vrs {
|
||||
values[vr.String()] = vr.Label()
|
||||
}
|
||||
return values, nil
|
||||
}
|
||||
|
||||
@ -10,7 +10,7 @@ import (
|
||||
const VideoSourceFacetKey = "video_source"
|
||||
|
||||
func VideoSourceFacet(options ...query.FacetOption) query.Facet {
|
||||
return torrentContentAttributeFacet[model.VideoSource]{
|
||||
return videoSourceFacet{torrentContentAttributeFacet[model.VideoSource]{
|
||||
FacetConfig: query.NewFacetConfig(
|
||||
append([]query.FacetOption{
|
||||
query.FacetHasKey(VideoSourceFacetKey),
|
||||
@ -22,5 +22,18 @@ func VideoSourceFacet(options ...query.FacetOption) query.Facet {
|
||||
return q.TorrentContent.VideoSource
|
||||
},
|
||||
parse: model.ParseVideoSource,
|
||||
}
|
||||
}}
|
||||
}
|
||||
|
||||
type videoSourceFacet struct {
|
||||
torrentContentAttributeFacet[model.VideoSource]
|
||||
}
|
||||
|
||||
func (f videoSourceFacet) Values(query.FacetContext) (map[string]string, error) {
|
||||
vsrcs := model.VideoSourceValues()
|
||||
values := make(map[string]string, len(vsrcs))
|
||||
for _, vr := range vsrcs {
|
||||
values[vr.String()] = vr.Label()
|
||||
}
|
||||
return values, nil
|
||||
}
|
||||
|
||||
@ -2,12 +2,8 @@ package search
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"github.com/bitmagnet-io/bitmagnet/internal/database/dao"
|
||||
"github.com/bitmagnet-io/bitmagnet/internal/database/query"
|
||||
"github.com/bitmagnet-io/bitmagnet/internal/model"
|
||||
"gorm.io/gen/field"
|
||||
"strings"
|
||||
"sync"
|
||||
)
|
||||
|
||||
const TorrentFileTypeFacetKey = "file_type"
|
||||
@ -29,126 +25,22 @@ type torrentFileTypeFacet struct {
|
||||
query.FacetConfig
|
||||
}
|
||||
|
||||
func (f torrentFileTypeFacet) Aggregate(ctx query.FacetContext) (query.AggregationItems, error) {
|
||||
type result struct {
|
||||
FileType model.FileType
|
||||
Count uint
|
||||
func (torrentFileTypeFacet) Values(query.FacetContext) (map[string]string, error) {
|
||||
fts := model.FileTypeValues()
|
||||
values := make(map[string]string, len(fts))
|
||||
for _, vr := range fts {
|
||||
values[vr.String()] = vr.Label()
|
||||
}
|
||||
var allExts []string
|
||||
ftFieldTemplate := "case "
|
||||
for _, ft := range model.FileTypeValues() {
|
||||
exts := ft.Extensions()
|
||||
allExts = append(allExts, exts...)
|
||||
ftFieldTemplate += "when {table}.extension in " + makeStringList(exts...) + " then '" + ft.String() + "' "
|
||||
}
|
||||
ftFieldTemplate += "end as file_type"
|
||||
var fileResults []result
|
||||
var torrentResults []result
|
||||
var errs []error
|
||||
// we need to gather aggregations from both the torrent_files table and the torrents table (for the case when the torrent is a single file)
|
||||
wg := sync.WaitGroup{}
|
||||
wg.Add(2)
|
||||
go func() {
|
||||
defer wg.Done()
|
||||
q, qErr := ctx.NewAggregationQuery(
|
||||
query.Table(model.TableNameTorrentFile),
|
||||
query.Join(func(q *dao.Query) []query.TableJoin {
|
||||
return []query.TableJoin{
|
||||
{
|
||||
Table: q.TorrentContent,
|
||||
On: []field.Expr{
|
||||
q.TorrentContent.InfoHash.EqCol(q.TorrentFile.InfoHash),
|
||||
},
|
||||
Type: query.TableJoinTypeInner,
|
||||
},
|
||||
}
|
||||
}),
|
||||
)
|
||||
if qErr != nil {
|
||||
errs = append(errs, qErr)
|
||||
return
|
||||
}
|
||||
if err := q.UnderlyingDB().Select(
|
||||
strings.Replace(ftFieldTemplate, "{table}", "torrent_files", -1),
|
||||
"count(distinct(torrent_files.info_hash)) as count",
|
||||
).Where("torrent_files.extension in " + makeStringList(allExts...)).Group(
|
||||
"file_type",
|
||||
).Find(&fileResults).Error; err != nil {
|
||||
errs = append(errs, err)
|
||||
}
|
||||
}()
|
||||
go func() {
|
||||
defer wg.Done()
|
||||
q, qErr := ctx.NewAggregationQuery(
|
||||
query.Table(model.TableNameTorrent),
|
||||
query.Join(func(q *dao.Query) []query.TableJoin {
|
||||
return []query.TableJoin{
|
||||
{
|
||||
Table: q.TorrentContent,
|
||||
On: []field.Expr{
|
||||
q.TorrentContent.InfoHash.EqCol(q.Torrent.InfoHash),
|
||||
},
|
||||
Type: query.TableJoinTypeInner,
|
||||
},
|
||||
}
|
||||
}),
|
||||
)
|
||||
if qErr != nil {
|
||||
errs = append(errs, qErr)
|
||||
return
|
||||
}
|
||||
if err := q.UnderlyingDB().Select(
|
||||
strings.Replace(ftFieldTemplate, "{table}", "torrents", -1),
|
||||
"count(*) as count",
|
||||
).Where("torrents.extension in " + makeStringList(allExts...)).Group(
|
||||
"file_type",
|
||||
).Find(&torrentResults).Error; err != nil {
|
||||
errs = append(errs, err)
|
||||
}
|
||||
}()
|
||||
wg.Wait()
|
||||
if len(errs) > 0 {
|
||||
return nil, errors.Join(errs...)
|
||||
}
|
||||
allResults := make([]result, 0, len(fileResults)+len(torrentResults))
|
||||
allResults = append(allResults, fileResults...)
|
||||
allResults = append(allResults, torrentResults...)
|
||||
agg := make(query.AggregationItems, len(allResults))
|
||||
for _, item := range allResults {
|
||||
key := item.FileType.String()
|
||||
if existing, ok := agg[key]; !ok {
|
||||
agg[key] = query.AggregationItem{
|
||||
Label: item.FileType.Label(),
|
||||
Count: item.Count,
|
||||
}
|
||||
} else {
|
||||
existing.Count += item.Count
|
||||
agg[key] = existing
|
||||
}
|
||||
}
|
||||
return agg, nil
|
||||
return values, nil
|
||||
}
|
||||
|
||||
func makeStringList(values ...string) string {
|
||||
strs := "("
|
||||
for i, ext := range values {
|
||||
if i > 0 {
|
||||
strs += ","
|
||||
}
|
||||
strs += "'" + ext + "'"
|
||||
}
|
||||
strs += ")"
|
||||
return strs
|
||||
}
|
||||
|
||||
func (f torrentFileTypeFacet) Criteria() []query.Criteria {
|
||||
func (f torrentFileTypeFacet) Criteria(filter query.FacetFilter) []query.Criteria {
|
||||
return []query.Criteria{query.GenCriteria(func(ctx query.DbContext) (query.Criteria, error) {
|
||||
filter := f.Filter().Values()
|
||||
if len(filter) == 0 {
|
||||
return query.AndCriteria{}, nil
|
||||
}
|
||||
fileTypes := make([]model.FileType, 0, len(filter))
|
||||
for _, v := range filter {
|
||||
for _, v := range filter.Values() {
|
||||
ft, ftErr := model.ParseFileType(v)
|
||||
if ftErr != nil {
|
||||
return nil, errors.New("invalid file type filter specified")
|
||||
|
||||
@ -1,12 +1,8 @@
|
||||
package search
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"github.com/bitmagnet-io/bitmagnet/internal/database/dao"
|
||||
"github.com/bitmagnet-io/bitmagnet/internal/database/query"
|
||||
"github.com/bitmagnet-io/bitmagnet/internal/model"
|
||||
"gorm.io/gen"
|
||||
"gorm.io/gen/field"
|
||||
)
|
||||
|
||||
const TorrentSourceFacetKey = "torrent_source"
|
||||
@ -27,74 +23,25 @@ type torrentSourceFacet struct {
|
||||
query.FacetConfig
|
||||
}
|
||||
|
||||
func (f torrentSourceFacet) Aggregate(ctx query.FacetContext) (query.AggregationItems, error) {
|
||||
var results []struct {
|
||||
Value string
|
||||
Count uint
|
||||
func (torrentSourceFacet) Values(ctx query.FacetContext) (map[string]string, error) {
|
||||
q := ctx.Query().TorrentSource
|
||||
sources, sourcesErr := q.WithContext(ctx.Context()).Find()
|
||||
if sourcesErr != nil {
|
||||
return nil, sourcesErr
|
||||
}
|
||||
q, qErr := ctx.NewAggregationQuery(
|
||||
query.Table(model.TableNameTorrentsTorrentSource),
|
||||
query.Join(func(daoQ *dao.Query) []query.TableJoin {
|
||||
return []query.TableJoin{
|
||||
{
|
||||
Table: daoQ.Torrent,
|
||||
On: []field.Expr{
|
||||
daoQ.Torrent.InfoHash.EqCol(daoQ.TorrentsTorrentSource.InfoHash),
|
||||
},
|
||||
Type: query.TableJoinTypeInner,
|
||||
},
|
||||
{
|
||||
Table: daoQ.TorrentContent,
|
||||
On: []field.Expr{
|
||||
daoQ.TorrentContent.InfoHash.EqCol(daoQ.TorrentsTorrentSource.InfoHash),
|
||||
},
|
||||
Type: query.TableJoinTypeInner,
|
||||
},
|
||||
}
|
||||
}),
|
||||
)
|
||||
if qErr != nil {
|
||||
return nil, qErr
|
||||
values := make(map[string]string, len(sources))
|
||||
for _, s := range sources {
|
||||
values[s.Key] = s.Name
|
||||
}
|
||||
if err := q.UnderlyingDB().Select(
|
||||
fmt.Sprintf("%s.source as value", model.TableNameTorrentsTorrentSource),
|
||||
"count(*) as count",
|
||||
).Group(
|
||||
"value",
|
||||
).Find(&results).Error; err != nil {
|
||||
return nil, err
|
||||
}
|
||||
agg := make(query.AggregationItems, len(results))
|
||||
var values []string
|
||||
for _, item := range results {
|
||||
agg[item.Value] = query.AggregationItem{
|
||||
Count: item.Count,
|
||||
}
|
||||
values = append(values, item.Value)
|
||||
}
|
||||
if len(values) > 0 {
|
||||
sources, sourcesErr := ctx.Query().TorrentSource.WithContext(ctx.Context()).Where(
|
||||
ctx.Query().TorrentSource.Key.In(values...),
|
||||
).Find()
|
||||
if sourcesErr != nil {
|
||||
return nil, sourcesErr
|
||||
}
|
||||
for _, source := range sources {
|
||||
thisAgg := agg[source.Key]
|
||||
thisAgg.Label = source.Name
|
||||
agg[source.Key] = thisAgg
|
||||
}
|
||||
}
|
||||
return agg, nil
|
||||
return values, nil
|
||||
}
|
||||
|
||||
func (f torrentSourceFacet) Criteria() []query.Criteria {
|
||||
filter := f.Filter().Values()
|
||||
func (f torrentSourceFacet) Criteria(filter query.FacetFilter) []query.Criteria {
|
||||
if len(filter) == 0 {
|
||||
return []query.Criteria{}
|
||||
}
|
||||
return []query.Criteria{
|
||||
TorrentSourceCriteria(filter...),
|
||||
TorrentSourceCriteria(filter.Values()...),
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -1,11 +1,8 @@
|
||||
package search
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"github.com/bitmagnet-io/bitmagnet/internal/database/dao"
|
||||
"github.com/bitmagnet-io/bitmagnet/internal/database/query"
|
||||
"github.com/bitmagnet-io/bitmagnet/internal/model"
|
||||
"gorm.io/gen/field"
|
||||
)
|
||||
|
||||
const TorrentTagFacetKey = "torrent_tag"
|
||||
@ -27,64 +24,22 @@ type torrentTagFacet struct {
|
||||
query.FacetConfig
|
||||
}
|
||||
|
||||
func (f torrentTagFacet) Aggregate(ctx query.FacetContext) (query.AggregationItems, error) {
|
||||
var results []struct {
|
||||
Value string
|
||||
Count uint
|
||||
func (torrentTagFacet) Values(ctx query.FacetContext) (map[string]string, error) {
|
||||
q := ctx.Query().TorrentTag
|
||||
tags, tagsErr := q.WithContext(ctx.Context()).Distinct(q.Name).Find()
|
||||
if tagsErr != nil {
|
||||
return nil, tagsErr
|
||||
}
|
||||
q, qErr := ctx.NewAggregationQuery(
|
||||
query.Table(model.TableNameTorrentTag),
|
||||
query.Join(func(daoQ *dao.Query) []query.TableJoin {
|
||||
return []query.TableJoin{
|
||||
{
|
||||
Table: daoQ.TorrentContent,
|
||||
On: []field.Expr{
|
||||
daoQ.TorrentContent.InfoHash.EqCol(daoQ.TorrentTag.InfoHash),
|
||||
},
|
||||
Type: query.TableJoinTypeInner,
|
||||
},
|
||||
}
|
||||
}),
|
||||
)
|
||||
if qErr != nil {
|
||||
return nil, qErr
|
||||
values := make(map[string]string, len(tags))
|
||||
for _, tag := range tags {
|
||||
values[tag.Name] = tag.Name
|
||||
}
|
||||
if err := q.UnderlyingDB().Select(
|
||||
fmt.Sprintf("%s.name as value", model.TableNameTorrentTag),
|
||||
"count(*) as count",
|
||||
).Group(
|
||||
"value",
|
||||
).Find(&results).Error; err != nil {
|
||||
return nil, err
|
||||
}
|
||||
agg := make(query.AggregationItems, len(results))
|
||||
var values []string
|
||||
for _, item := range results {
|
||||
agg[item.Value] = query.AggregationItem{
|
||||
Count: item.Count,
|
||||
}
|
||||
values = append(values, item.Value)
|
||||
}
|
||||
if len(values) > 0 {
|
||||
tags, tagsErr := ctx.Query().TorrentTag.WithContext(ctx.Context()).Where(
|
||||
ctx.Query().TorrentTag.Name.In(values...),
|
||||
).Find()
|
||||
if tagsErr != nil {
|
||||
return nil, tagsErr
|
||||
}
|
||||
for _, tag := range tags {
|
||||
thisAgg := agg[tag.Name]
|
||||
thisAgg.Label = tag.Name
|
||||
agg[tag.Name] = thisAgg
|
||||
}
|
||||
}
|
||||
return agg, nil
|
||||
return values, nil
|
||||
}
|
||||
|
||||
func (f torrentTagFacet) Criteria() []query.Criteria {
|
||||
filter := f.Filter().Values()
|
||||
func (f torrentTagFacet) Criteria(filter query.FacetFilter) []query.Criteria {
|
||||
criteria := make([]query.Criteria, len(filter))
|
||||
for i, tag := range filter {
|
||||
for i, tag := range filter.Values() {
|
||||
criteria[i] = TorrentTagCriteria(tag)
|
||||
}
|
||||
return criteria
|
||||
|
||||
@ -1,15 +0,0 @@
|
||||
package warmer
|
||||
|
||||
import "time"
|
||||
|
||||
type Config struct {
|
||||
Enabled bool
|
||||
Interval time.Duration
|
||||
}
|
||||
|
||||
func NewDefaultConfig() Config {
|
||||
return Config{
|
||||
Enabled: true,
|
||||
Interval: 50 * time.Minute,
|
||||
}
|
||||
}
|
||||
@ -1,55 +0,0 @@
|
||||
package warmer
|
||||
|
||||
import (
|
||||
"context"
|
||||
"github.com/bitmagnet-io/bitmagnet/internal/boilerplate/lazy"
|
||||
"github.com/bitmagnet-io/bitmagnet/internal/boilerplate/worker"
|
||||
"github.com/bitmagnet-io/bitmagnet/internal/database/search"
|
||||
"go.uber.org/fx"
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
|
||||
type DecoratorParams struct {
|
||||
fx.In
|
||||
Config Config
|
||||
Search lazy.Lazy[search.Search]
|
||||
Logger *zap.SugaredLogger
|
||||
}
|
||||
|
||||
type DecoratorResult struct {
|
||||
fx.Out
|
||||
Decorator worker.Decorator `group:"worker_decorators"`
|
||||
}
|
||||
|
||||
func New(params DecoratorParams) DecoratorResult {
|
||||
var w warmer
|
||||
return DecoratorResult{
|
||||
Decorator: worker.Decorator{
|
||||
Key: "http_server",
|
||||
Decorate: func(hook fx.Hook) fx.Hook {
|
||||
return fx.Hook{
|
||||
OnStart: func(ctx context.Context) error {
|
||||
s, err := params.Search.Get()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
w = warmer{
|
||||
stopped: make(chan struct{}),
|
||||
interval: params.Config.Interval,
|
||||
search: s,
|
||||
logger: params.Logger.Named("search_warmer"),
|
||||
}
|
||||
go w.start()
|
||||
return hook.OnStart(ctx)
|
||||
},
|
||||
OnStop: func(ctx context.Context) error {
|
||||
if w.stopped != nil {
|
||||
close(w.stopped)
|
||||
}
|
||||
return hook.OnStop(ctx)
|
||||
},
|
||||
}
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
@ -1,95 +0,0 @@
|
||||
package warmer
|
||||
|
||||
import (
|
||||
"context"
|
||||
"github.com/bitmagnet-io/bitmagnet/internal/database/query"
|
||||
"github.com/bitmagnet-io/bitmagnet/internal/database/search"
|
||||
"github.com/bitmagnet-io/bitmagnet/internal/maps"
|
||||
"github.com/bitmagnet-io/bitmagnet/internal/model"
|
||||
"go.uber.org/zap"
|
||||
"time"
|
||||
)
|
||||
|
||||
type warmer struct {
|
||||
stopped chan struct{}
|
||||
interval time.Duration
|
||||
search search.Search
|
||||
logger *zap.SugaredLogger
|
||||
}
|
||||
|
||||
func (w warmer) start() {
|
||||
ctx, cancel := context.WithCancel(context.Background())
|
||||
defer cancel()
|
||||
ticker := time.NewTicker(w.interval)
|
||||
go func() {
|
||||
for {
|
||||
warmed := make(chan struct{})
|
||||
go func() {
|
||||
w.warm(ctx)
|
||||
close(warmed)
|
||||
}()
|
||||
// wait for warming to complete
|
||||
select {
|
||||
case <-ctx.Done():
|
||||
return
|
||||
case <-warmed:
|
||||
}
|
||||
// then wait for the next tick
|
||||
select {
|
||||
case <-ctx.Done():
|
||||
return
|
||||
case <-ticker.C:
|
||||
}
|
||||
}
|
||||
}()
|
||||
<-w.stopped
|
||||
}
|
||||
|
||||
func (w warmer) warm(ctx context.Context) {
|
||||
for _, e := range warmers.Entries() {
|
||||
w.logger.Debugw("warming", "warmer", e.Key)
|
||||
if _, err := w.search.TorrentContent(
|
||||
ctx,
|
||||
query.Limit(0),
|
||||
search.TorrentContentCoreJoins(),
|
||||
e.Value,
|
||||
query.CacheWarm(),
|
||||
); err != nil {
|
||||
w.logger.Errorw("error warming", "warmer", e.Key, "error", err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var warmers = maps.NewInsertMap[string, query.Option]()
|
||||
|
||||
func init() {
|
||||
|
||||
facets := maps.NewInsertMap[string, func(options ...query.FacetOption) query.Facet]()
|
||||
facets.Set(search.TorrentContentTypeFacetKey, search.TorrentContentTypeFacet)
|
||||
facets.Set(search.ContentGenreFacetKey, search.TorrentContentGenreFacet)
|
||||
facets.Set(search.LanguageFacetKey, search.TorrentContentLanguageFacet)
|
||||
facets.Set(search.Video3dFacetKey, search.Video3dFacet)
|
||||
facets.Set(search.VideoCodecFacetKey, search.VideoCodecFacet)
|
||||
facets.Set(search.VideoModifierFacetKey, search.VideoModifierFacet)
|
||||
facets.Set(search.VideoResolutionFacetKey, search.VideoResolutionFacet)
|
||||
facets.Set(search.VideoSourceFacetKey, search.VideoSourceFacet)
|
||||
facets.Set(search.TorrentFileTypeFacetKey, search.TorrentFileTypeFacet)
|
||||
facets.Set(search.TorrentSourceFacetKey, search.TorrentSourceFacet)
|
||||
facets.Set(search.TorrentTagFacetKey, search.TorrentTagsFacet)
|
||||
|
||||
// All the top-level facets should be warmed:
|
||||
for _, f := range facets.Entries() {
|
||||
warmers.Set("aggs:"+f.Key, query.WithFacet(
|
||||
f.Value(query.FacetIsAggregated()),
|
||||
))
|
||||
}
|
||||
// All the top-level facets within each content type should be warmed:
|
||||
for _, ct := range model.ContentTypeValues() {
|
||||
for _, f := range facets.Entries()[1:] {
|
||||
warmers.Set("aggs:"+ct.String()+"/"+f.Key, query.Options(query.WithFacet(
|
||||
search.TorrentContentTypeFacet(query.FacetHasFilter(query.FacetFilter{
|
||||
ct.String(): struct{}{},
|
||||
}))), query.WithFacet(f.Value(query.FacetIsAggregated()))))
|
||||
}
|
||||
}
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
@ -99,6 +99,7 @@ models:
|
||||
model:
|
||||
- github.com/99designs/gqlgen/graphql.Float
|
||||
- github.com/bitmagnet-io/bitmagnet/internal/model.NullFloat32
|
||||
- github.com/bitmagnet-io/bitmagnet/internal/model.NullFloat64
|
||||
Boolean:
|
||||
model:
|
||||
- github.com/99designs/gqlgen/graphql.Boolean
|
||||
|
||||
@ -137,7 +137,7 @@ func videoSourceFacet(input gen.VideoSourceFacetInput) q.Facet {
|
||||
func aggs[T any, Agg comparable](
|
||||
items q.AggregationItems,
|
||||
parse func(string) (T, error),
|
||||
newAgg func(value *T, label string, count uint) Agg,
|
||||
newAgg func(value *T, label string, count uint, isEstimate bool) Agg,
|
||||
) ([]Agg, error) {
|
||||
r := make([]Agg, 0, len(items))
|
||||
labelMap := make(map[Agg]string, len(items))
|
||||
@ -147,7 +147,7 @@ func aggs[T any, Agg comparable](
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error parsing aggregation item: %w", err)
|
||||
}
|
||||
agg := newAgg(&v, item.Label, item.Count)
|
||||
agg := newAgg(&v, item.Label, item.Count, item.IsEstimate)
|
||||
r = append(r, agg)
|
||||
labelMap[agg] = item.Label
|
||||
}
|
||||
@ -156,32 +156,32 @@ func aggs[T any, Agg comparable](
|
||||
return natsort.Compare(labelMap[r[i]], labelMap[r[j]])
|
||||
})
|
||||
if null, nullOk := items["null"]; nullOk {
|
||||
r = append(r, newAgg(nil, null.Label, null.Count))
|
||||
r = append(r, newAgg(nil, null.Label, null.Count, null.IsEstimate))
|
||||
}
|
||||
return r, nil
|
||||
}
|
||||
|
||||
func contentTypeAggs(items q.AggregationItems) ([]gen.ContentTypeAgg, error) {
|
||||
return aggs(items, model.ParseContentType, func(value *model.ContentType, label string, count uint) gen.ContentTypeAgg {
|
||||
return gen.ContentTypeAgg{Value: value, Label: label, Count: int(count)}
|
||||
return aggs(items, model.ParseContentType, func(value *model.ContentType, label string, count uint, isEstimate bool) gen.ContentTypeAgg {
|
||||
return gen.ContentTypeAgg{Value: value, Label: label, Count: int(count), IsEstimate: isEstimate}
|
||||
})
|
||||
}
|
||||
|
||||
func torrentSourceAggs(items q.AggregationItems) ([]gen.TorrentSourceAgg, error) {
|
||||
return aggs(items, func(s string) (string, error) { return s, nil }, func(value *string, label string, count uint) gen.TorrentSourceAgg {
|
||||
return gen.TorrentSourceAgg{Value: *value, Label: label, Count: int(count)}
|
||||
return aggs(items, func(s string) (string, error) { return s, nil }, func(value *string, label string, count uint, isEstimate bool) gen.TorrentSourceAgg {
|
||||
return gen.TorrentSourceAgg{Value: *value, Label: label, Count: int(count), IsEstimate: isEstimate}
|
||||
})
|
||||
}
|
||||
|
||||
func torrentTagAggs(items q.AggregationItems) ([]gen.TorrentTagAgg, error) {
|
||||
return aggs(items, func(s string) (string, error) { return s, nil }, func(value *string, label string, count uint) gen.TorrentTagAgg {
|
||||
return gen.TorrentTagAgg{Value: *value, Label: label, Count: int(count)}
|
||||
return aggs(items, func(s string) (string, error) { return s, nil }, func(value *string, label string, count uint, isEstimate bool) gen.TorrentTagAgg {
|
||||
return gen.TorrentTagAgg{Value: *value, Label: label, Count: int(count), IsEstimate: isEstimate}
|
||||
})
|
||||
}
|
||||
|
||||
func torrentFileTypeAggs(items q.AggregationItems) ([]gen.TorrentFileTypeAgg, error) {
|
||||
return aggs(items, model.ParseFileType, func(value *model.FileType, label string, count uint) gen.TorrentFileTypeAgg {
|
||||
return gen.TorrentFileTypeAgg{Value: *value, Label: label, Count: int(count)}
|
||||
return aggs(items, model.ParseFileType, func(value *model.FileType, label string, count uint, isEstimate bool) gen.TorrentFileTypeAgg {
|
||||
return gen.TorrentFileTypeAgg{Value: *value, Label: label, Count: int(count), IsEstimate: isEstimate}
|
||||
})
|
||||
}
|
||||
|
||||
@ -192,31 +192,31 @@ func languageAggs(items q.AggregationItems) ([]gen.LanguageAgg, error) {
|
||||
return "", errors.New("invalid language")
|
||||
}
|
||||
return lang.Language, nil
|
||||
}, func(value *model.Language, label string, count uint) gen.LanguageAgg {
|
||||
return gen.LanguageAgg{Value: *value, Label: label, Count: int(count)}
|
||||
}, func(value *model.Language, label string, count uint, isEstimate bool) gen.LanguageAgg {
|
||||
return gen.LanguageAgg{Value: *value, Label: label, Count: int(count), IsEstimate: isEstimate}
|
||||
})
|
||||
}
|
||||
|
||||
func genreAggs(items q.AggregationItems) ([]gen.GenreAgg, error) {
|
||||
return aggs(items, func(s string) (string, error) { return s, nil }, func(value *string, label string, count uint) gen.GenreAgg {
|
||||
return gen.GenreAgg{Value: *value, Label: label, Count: int(count)}
|
||||
return aggs(items, func(s string) (string, error) { return s, nil }, func(value *string, label string, count uint, isEstimate bool) gen.GenreAgg {
|
||||
return gen.GenreAgg{Value: *value, Label: label, Count: int(count), IsEstimate: isEstimate}
|
||||
})
|
||||
}
|
||||
|
||||
func releaseYearAggs(items q.AggregationItems) ([]gen.ReleaseYearAgg, error) {
|
||||
return aggs(items, model.ParseYear, func(value *model.Year, label string, count uint) gen.ReleaseYearAgg {
|
||||
return gen.ReleaseYearAgg{Value: value, Label: label, Count: int(count)}
|
||||
return aggs(items, model.ParseYear, func(value *model.Year, label string, count uint, isEstimate bool) gen.ReleaseYearAgg {
|
||||
return gen.ReleaseYearAgg{Value: value, Label: label, Count: int(count), IsEstimate: isEstimate}
|
||||
})
|
||||
}
|
||||
|
||||
func videoResolutionAggs(items q.AggregationItems) ([]gen.VideoResolutionAgg, error) {
|
||||
return aggs(items, model.ParseVideoResolution, func(value *model.VideoResolution, label string, count uint) gen.VideoResolutionAgg {
|
||||
return gen.VideoResolutionAgg{Value: value, Label: label, Count: int(count)}
|
||||
return aggs(items, model.ParseVideoResolution, func(value *model.VideoResolution, label string, count uint, isEstimate bool) gen.VideoResolutionAgg {
|
||||
return gen.VideoResolutionAgg{Value: value, Label: label, Count: int(count), IsEstimate: isEstimate}
|
||||
})
|
||||
}
|
||||
|
||||
func videoSourceAggs(items q.AggregationItems) ([]gen.VideoSourceAgg, error) {
|
||||
return aggs(items, model.ParseVideoSource, func(value *model.VideoSource, label string, count uint) gen.VideoSourceAgg {
|
||||
return gen.VideoSourceAgg{Value: value, Label: label, Count: int(count)}
|
||||
return aggs(items, model.ParseVideoSource, func(value *model.VideoSource, label string, count uint, isEstimate bool) gen.VideoSourceAgg {
|
||||
return gen.VideoSourceAgg{Value: value, Label: label, Count: int(count), IsEstimate: isEstimate}
|
||||
})
|
||||
}
|
||||
|
||||
@ -8,9 +8,10 @@ import (
|
||||
)
|
||||
|
||||
type ContentTypeAgg struct {
|
||||
Value *model.ContentType `json:"value,omitempty"`
|
||||
Label string `json:"label"`
|
||||
Count int `json:"count"`
|
||||
Value *model.ContentType `json:"value,omitempty"`
|
||||
Label string `json:"label"`
|
||||
Count int `json:"count"`
|
||||
IsEstimate bool `json:"isEstimate"`
|
||||
}
|
||||
|
||||
type ContentTypeFacetInput struct {
|
||||
@ -19,9 +20,10 @@ type ContentTypeFacetInput struct {
|
||||
}
|
||||
|
||||
type GenreAgg struct {
|
||||
Value string `json:"value"`
|
||||
Label string `json:"label"`
|
||||
Count int `json:"count"`
|
||||
Value string `json:"value"`
|
||||
Label string `json:"label"`
|
||||
Count int `json:"count"`
|
||||
IsEstimate bool `json:"isEstimate"`
|
||||
}
|
||||
|
||||
type GenreFacetInput struct {
|
||||
@ -31,9 +33,10 @@ type GenreFacetInput struct {
|
||||
}
|
||||
|
||||
type LanguageAgg struct {
|
||||
Value model.Language `json:"value"`
|
||||
Label string `json:"label"`
|
||||
Count int `json:"count"`
|
||||
Value model.Language `json:"value"`
|
||||
Label string `json:"label"`
|
||||
Count int `json:"count"`
|
||||
IsEstimate bool `json:"isEstimate"`
|
||||
}
|
||||
|
||||
type LanguageFacetInput struct {
|
||||
@ -48,9 +51,10 @@ type Query struct {
|
||||
}
|
||||
|
||||
type ReleaseYearAgg struct {
|
||||
Value *model.Year `json:"value,omitempty"`
|
||||
Label string `json:"label"`
|
||||
Count int `json:"count"`
|
||||
Value *model.Year `json:"value,omitempty"`
|
||||
Label string `json:"label"`
|
||||
Count int `json:"count"`
|
||||
IsEstimate bool `json:"isEstimate"`
|
||||
}
|
||||
|
||||
type ReleaseYearFacetInput struct {
|
||||
@ -88,9 +92,10 @@ type TorrentContentFacetsInput struct {
|
||||
}
|
||||
|
||||
type TorrentFileTypeAgg struct {
|
||||
Value model.FileType `json:"value"`
|
||||
Label string `json:"label"`
|
||||
Count int `json:"count"`
|
||||
Value model.FileType `json:"value"`
|
||||
Label string `json:"label"`
|
||||
Count int `json:"count"`
|
||||
IsEstimate bool `json:"isEstimate"`
|
||||
}
|
||||
|
||||
type TorrentFileTypeFacetInput struct {
|
||||
@ -100,9 +105,10 @@ type TorrentFileTypeFacetInput struct {
|
||||
}
|
||||
|
||||
type TorrentSourceAgg struct {
|
||||
Value string `json:"value"`
|
||||
Label string `json:"label"`
|
||||
Count int `json:"count"`
|
||||
Value string `json:"value"`
|
||||
Label string `json:"label"`
|
||||
Count int `json:"count"`
|
||||
IsEstimate bool `json:"isEstimate"`
|
||||
}
|
||||
|
||||
type TorrentSourceFacetInput struct {
|
||||
@ -112,9 +118,10 @@ type TorrentSourceFacetInput struct {
|
||||
}
|
||||
|
||||
type TorrentTagAgg struct {
|
||||
Value string `json:"value"`
|
||||
Label string `json:"label"`
|
||||
Count int `json:"count"`
|
||||
Value string `json:"value"`
|
||||
Label string `json:"label"`
|
||||
Count int `json:"count"`
|
||||
IsEstimate bool `json:"isEstimate"`
|
||||
}
|
||||
|
||||
type TorrentTagFacetInput struct {
|
||||
@ -124,9 +131,10 @@ type TorrentTagFacetInput struct {
|
||||
}
|
||||
|
||||
type VideoResolutionAgg struct {
|
||||
Value *model.VideoResolution `json:"value,omitempty"`
|
||||
Label string `json:"label"`
|
||||
Count int `json:"count"`
|
||||
Value *model.VideoResolution `json:"value,omitempty"`
|
||||
Label string `json:"label"`
|
||||
Count int `json:"count"`
|
||||
IsEstimate bool `json:"isEstimate"`
|
||||
}
|
||||
|
||||
type VideoResolutionFacetInput struct {
|
||||
@ -135,9 +143,10 @@ type VideoResolutionFacetInput struct {
|
||||
}
|
||||
|
||||
type VideoSourceAgg struct {
|
||||
Value *model.VideoSource `json:"value,omitempty"`
|
||||
Label string `json:"label"`
|
||||
Count int `json:"count"`
|
||||
Value *model.VideoSource `json:"value,omitempty"`
|
||||
Label string `json:"label"`
|
||||
Count int `json:"count"`
|
||||
IsEstimate bool `json:"isEstimate"`
|
||||
}
|
||||
|
||||
type VideoSourceFacetInput struct {
|
||||
|
||||
@ -98,10 +98,11 @@ func TorrentSourcesFromTorrent(t model.Torrent) []TorrentSource {
|
||||
}
|
||||
|
||||
type TorrentContentSearchResult struct {
|
||||
TotalCount uint
|
||||
HasNextPage bool
|
||||
Items []TorrentContent
|
||||
Aggregations gen.TorrentContentAggregations
|
||||
TotalCount uint
|
||||
TotalCountIsEstimate bool
|
||||
HasNextPage bool
|
||||
Items []TorrentContent
|
||||
Aggregations gen.TorrentContentAggregations
|
||||
}
|
||||
|
||||
func (t TorrentContentQuery) Search(ctx context.Context, query *q.SearchParams, facets *gen.TorrentContentFacetsInput) (TorrentContentSearchResult, error) {
|
||||
@ -159,10 +160,11 @@ func transformTorrentContentSearchResult(result q.GenericResult[search.TorrentCo
|
||||
items = append(items, NewTorrentContentFromResultItem(item))
|
||||
}
|
||||
return TorrentContentSearchResult{
|
||||
TotalCount: result.TotalCount,
|
||||
HasNextPage: result.HasNextPage,
|
||||
Items: items,
|
||||
Aggregations: aggs,
|
||||
TotalCount: result.TotalCount,
|
||||
TotalCountIsEstimate: result.TotalCountIsEstimate,
|
||||
HasNextPage: result.HasNextPage,
|
||||
Items: items,
|
||||
Aggregations: aggs,
|
||||
}, nil
|
||||
}
|
||||
|
||||
|
||||
@ -245,6 +245,79 @@ func (n NullFloat32) MarshalGQL(w io.Writer) {
|
||||
_, _ = fmt.Fprintf(w, "%f", n.Float32)
|
||||
}
|
||||
|
||||
// NullFloat64 - nullable float64
|
||||
type NullFloat64 struct {
|
||||
Float64 float64
|
||||
Valid bool // Valid is true if Float64 is not NULL
|
||||
}
|
||||
|
||||
func NewNullFloat64(f float64) NullFloat64 {
|
||||
return NullFloat64{
|
||||
Float64: f,
|
||||
Valid: true,
|
||||
}
|
||||
}
|
||||
|
||||
func (n *NullFloat64) Scan(value interface{}) error {
|
||||
v, ok := value.(float64)
|
||||
if !ok {
|
||||
n.Valid = false
|
||||
} else {
|
||||
n.Float64 = v
|
||||
n.Valid = true
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (n NullFloat64) Value() (driver.Value, error) {
|
||||
if !n.Valid {
|
||||
return nil, nil
|
||||
}
|
||||
return n.Float64, nil
|
||||
}
|
||||
|
||||
func (n *NullFloat64) UnmarshalGQL(v interface{}) error {
|
||||
if v == nil {
|
||||
n.Valid = false
|
||||
return nil
|
||||
}
|
||||
switch v := v.(type) {
|
||||
case int:
|
||||
n.Float64 = float64(v)
|
||||
case int32:
|
||||
n.Float64 = float64(v)
|
||||
case int64:
|
||||
n.Float64 = float64(v)
|
||||
case uint:
|
||||
n.Float64 = float64(v)
|
||||
case uint32:
|
||||
n.Float64 = float64(v)
|
||||
case uint64:
|
||||
n.Float64 = float64(v)
|
||||
case float32:
|
||||
n.Float64 = float64(v)
|
||||
case float64:
|
||||
n.Float64 = v
|
||||
case string:
|
||||
_, err := fmt.Sscanf(v, "%f", &n.Float64)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
default:
|
||||
return fmt.Errorf("wrong type")
|
||||
}
|
||||
n.Valid = true
|
||||
return nil
|
||||
}
|
||||
|
||||
func (n NullFloat64) MarshalGQL(w io.Writer) {
|
||||
if !n.Valid {
|
||||
_, _ = w.Write([]byte("null"))
|
||||
return
|
||||
}
|
||||
_, _ = fmt.Fprintf(w, "%f", n.Float64)
|
||||
}
|
||||
|
||||
// NullUint64 - nullable uint64
|
||||
type NullUint64 struct {
|
||||
Uint64 uint64
|
||||
|
||||
32
migrations/00010_budgeted_count.sql
Normal file
32
migrations/00010_budgeted_count.sql
Normal file
@ -0,0 +1,32 @@
|
||||
-- +goose Up
|
||||
-- +goose StatementBegin
|
||||
|
||||
CREATE OR REPLACE FUNCTION budgeted_count(
|
||||
query text,
|
||||
budget double precision,
|
||||
OUT count integer,
|
||||
OUT cost double precision,
|
||||
OUT budget_exceeded boolean,
|
||||
OUT plan jsonb
|
||||
) LANGUAGE plpgsql AS $$
|
||||
BEGIN
|
||||
EXECUTE 'EXPLAIN (FORMAT JSON) ' || query INTO plan;
|
||||
cost := plan->0->'Plan'->'Total Cost';
|
||||
IF cost > budget THEN
|
||||
count := plan->0->'Plan'->'Plan Rows';
|
||||
budget_exceeded := true;
|
||||
ELSE
|
||||
EXECUTE 'SELECT count(*) FROM (' || query || ') AS subquery' INTO count;
|
||||
budget_exceeded := false;
|
||||
END IF;
|
||||
END;
|
||||
$$;
|
||||
|
||||
-- +goose StatementEnd
|
||||
|
||||
-- +goose Down
|
||||
-- +goose StatementBegin
|
||||
|
||||
drop function if exists budgeted_count(text, double precision, OUT integer, OUT double precision, OUT boolean, OUT jsonb);
|
||||
|
||||
-- +goose StatementEnd
|
||||
27
migrations/00011_indexes.sql
Normal file
27
migrations/00011_indexes.sql
Normal file
@ -0,0 +1,27 @@
|
||||
-- +goose Up
|
||||
-- +goose StatementBegin
|
||||
|
||||
drop index if exists torrent_contents_id_idx;
|
||||
drop index if exists torrent_contents_languages_idx;
|
||||
drop index if exists torrent_contents_tsv_idx;
|
||||
|
||||
create extension if not exists btree_gin;
|
||||
|
||||
create index on torrent_contents using gin(content_type, tsv);
|
||||
create index on torrent_contents using gin(content_type, languages);
|
||||
|
||||
-- +goose StatementEnd
|
||||
|
||||
-- +goose Down
|
||||
-- +goose StatementBegin
|
||||
|
||||
drop index if exists torrent_contents_content_type_tsv_idx;
|
||||
drop index if exists torrent_contents_content_type_languages_idx;
|
||||
|
||||
CREATE INDEX on torrent_contents USING gist (id gist_trgm_ops);
|
||||
create index on torrent_contents (languages);
|
||||
CREATE INDEX on torrent_contents USING GIN(tsv);
|
||||
|
||||
drop extension if exists btree_gin;
|
||||
|
||||
-- +goose StatementEnd
|
||||
4
webui/dist/bitmagnet/index.html
vendored
4
webui/dist/bitmagnet/index.html
vendored
File diff suppressed because one or more lines are too long
228
webui/dist/bitmagnet/main.b93d3476d106fc6d.js
vendored
Normal file
228
webui/dist/bitmagnet/main.b93d3476d106fc6d.js
vendored
Normal file
File diff suppressed because one or more lines are too long
218
webui/dist/bitmagnet/main.b9db37cab9a35084.js
vendored
218
webui/dist/bitmagnet/main.b9db37cab9a35084.js
vendored
File diff suppressed because one or more lines are too long
@ -81,6 +81,7 @@ export type ContentType =
|
||||
export type ContentTypeAgg = {
|
||||
__typename?: 'ContentTypeAgg';
|
||||
count: Scalars['Int']['output'];
|
||||
isEstimate: Scalars['Boolean']['output'];
|
||||
label: Scalars['String']['output'];
|
||||
value?: Maybe<ContentType>;
|
||||
};
|
||||
@ -125,6 +126,7 @@ export type FilesStatus =
|
||||
export type GenreAgg = {
|
||||
__typename?: 'GenreAgg';
|
||||
count: Scalars['Int']['output'];
|
||||
isEstimate: Scalars['Boolean']['output'];
|
||||
label: Scalars['String']['output'];
|
||||
value: Scalars['String']['output'];
|
||||
};
|
||||
@ -202,6 +204,7 @@ export type Language =
|
||||
export type LanguageAgg = {
|
||||
__typename?: 'LanguageAgg';
|
||||
count: Scalars['Int']['output'];
|
||||
isEstimate: Scalars['Boolean']['output'];
|
||||
label: Scalars['String']['output'];
|
||||
value: Language;
|
||||
};
|
||||
@ -237,6 +240,7 @@ export type Query = {
|
||||
export type ReleaseYearAgg = {
|
||||
__typename?: 'ReleaseYearAgg';
|
||||
count: Scalars['Int']['output'];
|
||||
isEstimate: Scalars['Boolean']['output'];
|
||||
label: Scalars['String']['output'];
|
||||
value?: Maybe<Scalars['Year']['output']>;
|
||||
};
|
||||
@ -247,6 +251,7 @@ export type ReleaseYearFacetInput = {
|
||||
};
|
||||
|
||||
export type SearchQueryInput = {
|
||||
aggregationBudget?: InputMaybe<Scalars['Float']['input']>;
|
||||
cached?: InputMaybe<Scalars['Boolean']['input']>;
|
||||
/** hasNextPage if true, the search result will include the hasNextPage field, indicating if there are more results to fetch */
|
||||
hasNextPage?: InputMaybe<Scalars['Boolean']['input']>;
|
||||
@ -360,6 +365,7 @@ export type TorrentContentSearchResult = {
|
||||
hasNextPage?: Maybe<Scalars['Boolean']['output']>;
|
||||
items: Array<TorrentContent>;
|
||||
totalCount: Scalars['Int']['output'];
|
||||
totalCountIsEstimate: Scalars['Boolean']['output'];
|
||||
};
|
||||
|
||||
export type TorrentFile = {
|
||||
@ -377,6 +383,7 @@ export type TorrentFile = {
|
||||
export type TorrentFileTypeAgg = {
|
||||
__typename?: 'TorrentFileTypeAgg';
|
||||
count: Scalars['Int']['output'];
|
||||
isEstimate: Scalars['Boolean']['output'];
|
||||
label: Scalars['String']['output'];
|
||||
value: FileType;
|
||||
};
|
||||
@ -440,6 +447,7 @@ export type TorrentSource = {
|
||||
export type TorrentSourceAgg = {
|
||||
__typename?: 'TorrentSourceAgg';
|
||||
count: Scalars['Int']['output'];
|
||||
isEstimate: Scalars['Boolean']['output'];
|
||||
label: Scalars['String']['output'];
|
||||
value: Scalars['String']['output'];
|
||||
};
|
||||
@ -458,6 +466,7 @@ export type TorrentSuggestTagsResult = {
|
||||
export type TorrentTagAgg = {
|
||||
__typename?: 'TorrentTagAgg';
|
||||
count: Scalars['Int']['output'];
|
||||
isEstimate: Scalars['Boolean']['output'];
|
||||
label: Scalars['String']['output'];
|
||||
value: Scalars['String']['output'];
|
||||
};
|
||||
@ -503,6 +512,7 @@ export type VideoResolution =
|
||||
export type VideoResolutionAgg = {
|
||||
__typename?: 'VideoResolutionAgg';
|
||||
count: Scalars['Int']['output'];
|
||||
isEstimate: Scalars['Boolean']['output'];
|
||||
label: Scalars['String']['output'];
|
||||
value?: Maybe<VideoResolution>;
|
||||
};
|
||||
@ -526,6 +536,7 @@ export type VideoSource =
|
||||
export type VideoSourceAgg = {
|
||||
__typename?: 'VideoSourceAgg';
|
||||
count: Scalars['Int']['output'];
|
||||
isEstimate: Scalars['Boolean']['output'];
|
||||
label: Scalars['String']['output'];
|
||||
value?: Maybe<VideoSource>;
|
||||
};
|
||||
@ -541,7 +552,7 @@ export type TorrentFragment = { __typename?: 'Torrent', infoHash: string, name:
|
||||
|
||||
export type TorrentContentFragment = { __typename?: 'TorrentContent', id: string, infoHash: string, contentType?: ContentType | null, title: string, video3d?: Video3d | null, videoCodec?: VideoCodec | null, videoModifier?: VideoModifier | null, videoResolution?: VideoResolution | null, videoSource?: VideoSource | null, createdAt: string, updatedAt: string, torrent: { __typename?: 'Torrent', infoHash: string, name: string, size: number, private: boolean, filesStatus: FilesStatus, hasFilesInfo: boolean, singleFile?: boolean | null, fileType?: FileType | null, seeders?: number | null, leechers?: number | null, tagNames: Array<string>, magnetUri: string, createdAt: string, updatedAt: string, files?: Array<{ __typename?: 'TorrentFile', infoHash: string, index: number, path: string, size: number, fileType?: FileType | null, createdAt: string, updatedAt: string }> | null, sources: Array<{ __typename?: 'TorrentSource', key: string, name: string }> }, content?: { __typename?: 'Content', type: ContentType, source: string, id: string, title: string, releaseDate?: string | null, releaseYear?: number | null, overview?: string | null, runtime?: number | null, voteAverage?: number | null, voteCount?: number | null, createdAt: string, updatedAt: string, metadataSource: { __typename?: 'MetadataSource', key: string, name: string }, originalLanguage?: { __typename?: 'LanguageInfo', id: string, name: string } | null, attributes: Array<{ __typename?: 'ContentAttribute', source: string, key: string, value: string, createdAt: string, updatedAt: string, metadataSource: { __typename?: 'MetadataSource', key: string, name: string } }>, collections: Array<{ __typename?: 'ContentCollection', type: string, source: string, id: string, name: string, createdAt: string, updatedAt: string, metadataSource: { __typename?: 'MetadataSource', key: string, name: string } }>, externalLinks: Array<{ __typename?: 'ExternalLink', url: string, metadataSource: { __typename?: 'MetadataSource', key: string, name: string } }> } | null, languages?: Array<{ __typename?: 'LanguageInfo', id: string, name: string }> | null, episodes?: { __typename?: 'Episodes', label: string, seasons: Array<{ __typename?: 'Season', season: number, episodes?: Array<number> | null }> } | null };
|
||||
|
||||
export type TorrentContentSearchResultFragment = { __typename?: 'TorrentContentSearchResult', totalCount: number, hasNextPage?: boolean | null, items: Array<{ __typename?: 'TorrentContent', id: string, infoHash: string, contentType?: ContentType | null, title: string, video3d?: Video3d | null, videoCodec?: VideoCodec | null, videoModifier?: VideoModifier | null, videoResolution?: VideoResolution | null, videoSource?: VideoSource | null, createdAt: string, updatedAt: string, torrent: { __typename?: 'Torrent', infoHash: string, name: string, size: number, private: boolean, filesStatus: FilesStatus, hasFilesInfo: boolean, singleFile?: boolean | null, fileType?: FileType | null, seeders?: number | null, leechers?: number | null, tagNames: Array<string>, magnetUri: string, createdAt: string, updatedAt: string, files?: Array<{ __typename?: 'TorrentFile', infoHash: string, index: number, path: string, size: number, fileType?: FileType | null, createdAt: string, updatedAt: string }> | null, sources: Array<{ __typename?: 'TorrentSource', key: string, name: string }> }, content?: { __typename?: 'Content', type: ContentType, source: string, id: string, title: string, releaseDate?: string | null, releaseYear?: number | null, overview?: string | null, runtime?: number | null, voteAverage?: number | null, voteCount?: number | null, createdAt: string, updatedAt: string, metadataSource: { __typename?: 'MetadataSource', key: string, name: string }, originalLanguage?: { __typename?: 'LanguageInfo', id: string, name: string } | null, attributes: Array<{ __typename?: 'ContentAttribute', source: string, key: string, value: string, createdAt: string, updatedAt: string, metadataSource: { __typename?: 'MetadataSource', key: string, name: string } }>, collections: Array<{ __typename?: 'ContentCollection', type: string, source: string, id: string, name: string, createdAt: string, updatedAt: string, metadataSource: { __typename?: 'MetadataSource', key: string, name: string } }>, externalLinks: Array<{ __typename?: 'ExternalLink', url: string, metadataSource: { __typename?: 'MetadataSource', key: string, name: string } }> } | null, languages?: Array<{ __typename?: 'LanguageInfo', id: string, name: string }> | null, episodes?: { __typename?: 'Episodes', label: string, seasons: Array<{ __typename?: 'Season', season: number, episodes?: Array<number> | null }> } | null }>, aggregations: { __typename?: 'TorrentContentAggregations', contentType?: Array<{ __typename?: 'ContentTypeAgg', value?: ContentType | null, label: string, count: number }> | null, torrentSource?: Array<{ __typename?: 'TorrentSourceAgg', value: string, label: string, count: number }> | null, torrentTag?: Array<{ __typename?: 'TorrentTagAgg', value: string, label: string, count: number }> | null, torrentFileType?: Array<{ __typename?: 'TorrentFileTypeAgg', value: FileType, label: string, count: number }> | null, language?: Array<{ __typename?: 'LanguageAgg', value: Language, label: string, count: number }> | null, genre?: Array<{ __typename?: 'GenreAgg', value: string, label: string, count: number }> | null, releaseYear?: Array<{ __typename?: 'ReleaseYearAgg', value?: number | null, label: string, count: number }> | null, videoResolution?: Array<{ __typename?: 'VideoResolutionAgg', value?: VideoResolution | null, label: string, count: number }> | null, videoSource?: Array<{ __typename?: 'VideoSourceAgg', value?: VideoSource | null, label: string, count: number }> | null } };
|
||||
export type TorrentContentSearchResultFragment = { __typename?: 'TorrentContentSearchResult', totalCount: number, totalCountIsEstimate: boolean, hasNextPage?: boolean | null, items: Array<{ __typename?: 'TorrentContent', id: string, infoHash: string, contentType?: ContentType | null, title: string, video3d?: Video3d | null, videoCodec?: VideoCodec | null, videoModifier?: VideoModifier | null, videoResolution?: VideoResolution | null, videoSource?: VideoSource | null, createdAt: string, updatedAt: string, torrent: { __typename?: 'Torrent', infoHash: string, name: string, size: number, private: boolean, filesStatus: FilesStatus, hasFilesInfo: boolean, singleFile?: boolean | null, fileType?: FileType | null, seeders?: number | null, leechers?: number | null, tagNames: Array<string>, magnetUri: string, createdAt: string, updatedAt: string, files?: Array<{ __typename?: 'TorrentFile', infoHash: string, index: number, path: string, size: number, fileType?: FileType | null, createdAt: string, updatedAt: string }> | null, sources: Array<{ __typename?: 'TorrentSource', key: string, name: string }> }, content?: { __typename?: 'Content', type: ContentType, source: string, id: string, title: string, releaseDate?: string | null, releaseYear?: number | null, overview?: string | null, runtime?: number | null, voteAverage?: number | null, voteCount?: number | null, createdAt: string, updatedAt: string, metadataSource: { __typename?: 'MetadataSource', key: string, name: string }, originalLanguage?: { __typename?: 'LanguageInfo', id: string, name: string } | null, attributes: Array<{ __typename?: 'ContentAttribute', source: string, key: string, value: string, createdAt: string, updatedAt: string, metadataSource: { __typename?: 'MetadataSource', key: string, name: string } }>, collections: Array<{ __typename?: 'ContentCollection', type: string, source: string, id: string, name: string, createdAt: string, updatedAt: string, metadataSource: { __typename?: 'MetadataSource', key: string, name: string } }>, externalLinks: Array<{ __typename?: 'ExternalLink', url: string, metadataSource: { __typename?: 'MetadataSource', key: string, name: string } }> } | null, languages?: Array<{ __typename?: 'LanguageInfo', id: string, name: string }> | null, episodes?: { __typename?: 'Episodes', label: string, seasons: Array<{ __typename?: 'Season', season: number, episodes?: Array<number> | null }> } | null }>, aggregations: { __typename?: 'TorrentContentAggregations', contentType?: Array<{ __typename?: 'ContentTypeAgg', value?: ContentType | null, label: string, count: number, isEstimate: boolean }> | null, torrentSource?: Array<{ __typename?: 'TorrentSourceAgg', value: string, label: string, count: number, isEstimate: boolean }> | null, torrentTag?: Array<{ __typename?: 'TorrentTagAgg', value: string, label: string, count: number, isEstimate: boolean }> | null, torrentFileType?: Array<{ __typename?: 'TorrentFileTypeAgg', value: FileType, label: string, count: number, isEstimate: boolean }> | null, language?: Array<{ __typename?: 'LanguageAgg', value: Language, label: string, count: number, isEstimate: boolean }> | null, genre?: Array<{ __typename?: 'GenreAgg', value: string, label: string, count: number, isEstimate: boolean }> | null, releaseYear?: Array<{ __typename?: 'ReleaseYearAgg', value?: number | null, label: string, count: number, isEstimate: boolean }> | null, videoResolution?: Array<{ __typename?: 'VideoResolutionAgg', value?: VideoResolution | null, label: string, count: number, isEstimate: boolean }> | null, videoSource?: Array<{ __typename?: 'VideoSourceAgg', value?: VideoSource | null, label: string, count: number, isEstimate: boolean }> | null } };
|
||||
|
||||
export type TorrentFileFragment = { __typename?: 'TorrentFile', infoHash: string, index: number, path: string, size: number, fileType?: FileType | null, createdAt: string, updatedAt: string };
|
||||
|
||||
@ -582,7 +593,7 @@ export type TorrentContentSearchQueryVariables = Exact<{
|
||||
}>;
|
||||
|
||||
|
||||
export type TorrentContentSearchQuery = { __typename?: 'Query', torrentContent: { __typename?: 'TorrentContentQuery', search: { __typename?: 'TorrentContentSearchResult', totalCount: number, hasNextPage?: boolean | null, items: Array<{ __typename?: 'TorrentContent', id: string, infoHash: string, contentType?: ContentType | null, title: string, video3d?: Video3d | null, videoCodec?: VideoCodec | null, videoModifier?: VideoModifier | null, videoResolution?: VideoResolution | null, videoSource?: VideoSource | null, createdAt: string, updatedAt: string, torrent: { __typename?: 'Torrent', infoHash: string, name: string, size: number, private: boolean, filesStatus: FilesStatus, hasFilesInfo: boolean, singleFile?: boolean | null, fileType?: FileType | null, seeders?: number | null, leechers?: number | null, tagNames: Array<string>, magnetUri: string, createdAt: string, updatedAt: string, files?: Array<{ __typename?: 'TorrentFile', infoHash: string, index: number, path: string, size: number, fileType?: FileType | null, createdAt: string, updatedAt: string }> | null, sources: Array<{ __typename?: 'TorrentSource', key: string, name: string }> }, content?: { __typename?: 'Content', type: ContentType, source: string, id: string, title: string, releaseDate?: string | null, releaseYear?: number | null, overview?: string | null, runtime?: number | null, voteAverage?: number | null, voteCount?: number | null, createdAt: string, updatedAt: string, metadataSource: { __typename?: 'MetadataSource', key: string, name: string }, originalLanguage?: { __typename?: 'LanguageInfo', id: string, name: string } | null, attributes: Array<{ __typename?: 'ContentAttribute', source: string, key: string, value: string, createdAt: string, updatedAt: string, metadataSource: { __typename?: 'MetadataSource', key: string, name: string } }>, collections: Array<{ __typename?: 'ContentCollection', type: string, source: string, id: string, name: string, createdAt: string, updatedAt: string, metadataSource: { __typename?: 'MetadataSource', key: string, name: string } }>, externalLinks: Array<{ __typename?: 'ExternalLink', url: string, metadataSource: { __typename?: 'MetadataSource', key: string, name: string } }> } | null, languages?: Array<{ __typename?: 'LanguageInfo', id: string, name: string }> | null, episodes?: { __typename?: 'Episodes', label: string, seasons: Array<{ __typename?: 'Season', season: number, episodes?: Array<number> | null }> } | null }>, aggregations: { __typename?: 'TorrentContentAggregations', contentType?: Array<{ __typename?: 'ContentTypeAgg', value?: ContentType | null, label: string, count: number }> | null, torrentSource?: Array<{ __typename?: 'TorrentSourceAgg', value: string, label: string, count: number }> | null, torrentTag?: Array<{ __typename?: 'TorrentTagAgg', value: string, label: string, count: number }> | null, torrentFileType?: Array<{ __typename?: 'TorrentFileTypeAgg', value: FileType, label: string, count: number }> | null, language?: Array<{ __typename?: 'LanguageAgg', value: Language, label: string, count: number }> | null, genre?: Array<{ __typename?: 'GenreAgg', value: string, label: string, count: number }> | null, releaseYear?: Array<{ __typename?: 'ReleaseYearAgg', value?: number | null, label: string, count: number }> | null, videoResolution?: Array<{ __typename?: 'VideoResolutionAgg', value?: VideoResolution | null, label: string, count: number }> | null, videoSource?: Array<{ __typename?: 'VideoSourceAgg', value?: VideoSource | null, label: string, count: number }> | null } } } };
|
||||
export type TorrentContentSearchQuery = { __typename?: 'Query', torrentContent: { __typename?: 'TorrentContentQuery', search: { __typename?: 'TorrentContentSearchResult', totalCount: number, totalCountIsEstimate: boolean, hasNextPage?: boolean | null, items: Array<{ __typename?: 'TorrentContent', id: string, infoHash: string, contentType?: ContentType | null, title: string, video3d?: Video3d | null, videoCodec?: VideoCodec | null, videoModifier?: VideoModifier | null, videoResolution?: VideoResolution | null, videoSource?: VideoSource | null, createdAt: string, updatedAt: string, torrent: { __typename?: 'Torrent', infoHash: string, name: string, size: number, private: boolean, filesStatus: FilesStatus, hasFilesInfo: boolean, singleFile?: boolean | null, fileType?: FileType | null, seeders?: number | null, leechers?: number | null, tagNames: Array<string>, magnetUri: string, createdAt: string, updatedAt: string, files?: Array<{ __typename?: 'TorrentFile', infoHash: string, index: number, path: string, size: number, fileType?: FileType | null, createdAt: string, updatedAt: string }> | null, sources: Array<{ __typename?: 'TorrentSource', key: string, name: string }> }, content?: { __typename?: 'Content', type: ContentType, source: string, id: string, title: string, releaseDate?: string | null, releaseYear?: number | null, overview?: string | null, runtime?: number | null, voteAverage?: number | null, voteCount?: number | null, createdAt: string, updatedAt: string, metadataSource: { __typename?: 'MetadataSource', key: string, name: string }, originalLanguage?: { __typename?: 'LanguageInfo', id: string, name: string } | null, attributes: Array<{ __typename?: 'ContentAttribute', source: string, key: string, value: string, createdAt: string, updatedAt: string, metadataSource: { __typename?: 'MetadataSource', key: string, name: string } }>, collections: Array<{ __typename?: 'ContentCollection', type: string, source: string, id: string, name: string, createdAt: string, updatedAt: string, metadataSource: { __typename?: 'MetadataSource', key: string, name: string } }>, externalLinks: Array<{ __typename?: 'ExternalLink', url: string, metadataSource: { __typename?: 'MetadataSource', key: string, name: string } }> } | null, languages?: Array<{ __typename?: 'LanguageInfo', id: string, name: string }> | null, episodes?: { __typename?: 'Episodes', label: string, seasons: Array<{ __typename?: 'Season', season: number, episodes?: Array<number> | null }> } | null }>, aggregations: { __typename?: 'TorrentContentAggregations', contentType?: Array<{ __typename?: 'ContentTypeAgg', value?: ContentType | null, label: string, count: number, isEstimate: boolean }> | null, torrentSource?: Array<{ __typename?: 'TorrentSourceAgg', value: string, label: string, count: number, isEstimate: boolean }> | null, torrentTag?: Array<{ __typename?: 'TorrentTagAgg', value: string, label: string, count: number, isEstimate: boolean }> | null, torrentFileType?: Array<{ __typename?: 'TorrentFileTypeAgg', value: FileType, label: string, count: number, isEstimate: boolean }> | null, language?: Array<{ __typename?: 'LanguageAgg', value: Language, label: string, count: number, isEstimate: boolean }> | null, genre?: Array<{ __typename?: 'GenreAgg', value: string, label: string, count: number, isEstimate: boolean }> | null, releaseYear?: Array<{ __typename?: 'ReleaseYearAgg', value?: number | null, label: string, count: number, isEstimate: boolean }> | null, videoResolution?: Array<{ __typename?: 'VideoResolutionAgg', value?: VideoResolution | null, label: string, count: number, isEstimate: boolean }> | null, videoSource?: Array<{ __typename?: 'VideoSourceAgg', value?: VideoSource | null, label: string, count: number, isEstimate: boolean }> | null } } } };
|
||||
|
||||
export type TorrentSuggestTagsQueryVariables = Exact<{
|
||||
query: SuggestTagsQueryInput;
|
||||
@ -720,52 +731,62 @@ export const TorrentContentSearchResultFragmentDoc = gql`
|
||||
...TorrentContent
|
||||
}
|
||||
totalCount
|
||||
totalCountIsEstimate
|
||||
hasNextPage
|
||||
aggregations {
|
||||
contentType {
|
||||
value
|
||||
label
|
||||
count
|
||||
isEstimate
|
||||
}
|
||||
torrentSource {
|
||||
value
|
||||
label
|
||||
count
|
||||
isEstimate
|
||||
}
|
||||
torrentTag {
|
||||
value
|
||||
label
|
||||
count
|
||||
isEstimate
|
||||
}
|
||||
torrentFileType {
|
||||
value
|
||||
label
|
||||
count
|
||||
isEstimate
|
||||
}
|
||||
language {
|
||||
value
|
||||
label
|
||||
count
|
||||
isEstimate
|
||||
}
|
||||
genre {
|
||||
value
|
||||
label
|
||||
count
|
||||
isEstimate
|
||||
}
|
||||
releaseYear {
|
||||
value
|
||||
label
|
||||
count
|
||||
isEstimate
|
||||
}
|
||||
videoResolution {
|
||||
value
|
||||
label
|
||||
count
|
||||
isEstimate
|
||||
}
|
||||
videoSource {
|
||||
value
|
||||
label
|
||||
count
|
||||
isEstimate
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -14,7 +14,7 @@
|
||||
{{ firstItemIndex | number }} - {{ lastItemIndex | number
|
||||
}}{{
|
||||
hasTotalLength
|
||||
? " of " + (totalLessThanOrEqual ? "≤ " : "") + (totalLength | number)
|
||||
? " of " + (totalIsEstimate ? "~" : "") + (totalLength | number)
|
||||
: ""
|
||||
}}
|
||||
</p>
|
||||
|
||||
@ -18,7 +18,8 @@ export class PaginatorComponent {
|
||||
@Input() pageSizes: number[] = [10, 20, 50, 100];
|
||||
@Input({ transform: numberAttribute }) pageLength = 0;
|
||||
@Input() totalLength: number | null = null;
|
||||
@Input() totalLessThanOrEqual = false;
|
||||
@Input() totalIsEstimate = false;
|
||||
@Input() hasNextPage: boolean | null | undefined = null;
|
||||
|
||||
@Output() page = new EventEmitter<PageEvent>();
|
||||
|
||||
@ -38,10 +39,6 @@ export class PaginatorComponent {
|
||||
return this.pageIndex > 0;
|
||||
}
|
||||
|
||||
get hasNextPage() {
|
||||
return this.firstItemIndex + this.pageSize <= this.totalLength!;
|
||||
}
|
||||
|
||||
emitChange() {
|
||||
this.page.emit({
|
||||
pageIndex: this.pageIndex,
|
||||
|
||||
@ -5,6 +5,7 @@ export type Agg<T, _allowNull extends boolean> = {
|
||||
count: number;
|
||||
label: string;
|
||||
value: FacetValue<T, _allowNull>;
|
||||
isEstimate: boolean;
|
||||
};
|
||||
|
||||
export type GenreAgg = Agg<string, false>;
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
import { CollectionViewer, DataSource } from "@angular/cdk/collections";
|
||||
import { BehaviorSubject, catchError, EMPTY, map, Observable, zip } from "rxjs";
|
||||
import { BehaviorSubject, catchError, EMPTY, map, Observable } from "rxjs";
|
||||
import * as generated from "../../graphql/generated";
|
||||
import { GraphQLService } from "../../graphql/graphql.service";
|
||||
import { AppErrorsService } from "../../app-errors.service";
|
||||
@ -9,9 +9,20 @@ import { Facet, VideoResolutionAgg, VideoSourceAgg } from "./facet";
|
||||
const emptyResult: generated.TorrentContentSearchResult = {
|
||||
items: [],
|
||||
totalCount: 0,
|
||||
totalCountIsEstimate: false,
|
||||
aggregations: {},
|
||||
};
|
||||
|
||||
type BudgetedCount = {
|
||||
count: number;
|
||||
isEstimate: boolean;
|
||||
};
|
||||
|
||||
const emptyBudgetedCount = {
|
||||
count: 0,
|
||||
isEstimate: false,
|
||||
};
|
||||
|
||||
export class TorrentContentSearchEngine
|
||||
implements DataSource<generated.TorrentContent>
|
||||
{
|
||||
@ -42,6 +53,9 @@ export class TorrentContentSearchEngine
|
||||
map((result) => result.aggregations),
|
||||
);
|
||||
public loading$ = this.loadingSubject.asObservable();
|
||||
public hasNextPage$ = this.itemsResultSubject.pipe(
|
||||
map((result) => result.hasNextPage),
|
||||
);
|
||||
|
||||
private torrentSourceFacet = new Facet<string, false>(
|
||||
"Torrent Source",
|
||||
@ -100,11 +114,15 @@ export class TorrentContentSearchEngine
|
||||
this.genreFacet,
|
||||
];
|
||||
|
||||
private overallTotalCountSubject = new BehaviorSubject<number>(0);
|
||||
private overallTotalCountSubject = new BehaviorSubject<BudgetedCount>(
|
||||
emptyBudgetedCount,
|
||||
);
|
||||
public overallTotalCount$ = this.overallTotalCountSubject.asObservable();
|
||||
|
||||
private maxTotalCountSubject = new BehaviorSubject<number>(0);
|
||||
public maxTotalCount$ = this.maxTotalCountSubject.asObservable();
|
||||
private totalCountSubject = new BehaviorSubject<BudgetedCount>(
|
||||
emptyBudgetedCount,
|
||||
);
|
||||
public totalCount$ = this.totalCountSubject.asObservable();
|
||||
|
||||
public contentTypes = contentTypes;
|
||||
|
||||
@ -136,7 +154,6 @@ export class TorrentContentSearchEngine
|
||||
const pageSize = this.pageSizeSubject.getValue();
|
||||
const queryString = this.queryStringSubject.getValue() || undefined;
|
||||
const offset = this.pageIndexSubject.getValue() * pageSize;
|
||||
const contentType = this.contentTypeSubject.getValue();
|
||||
const items = this.graphQLService
|
||||
.torrentContentSearch({
|
||||
query: {
|
||||
@ -145,8 +162,9 @@ export class TorrentContentSearchEngine
|
||||
offset,
|
||||
hasNextPage: true,
|
||||
cached,
|
||||
totalCount: true,
|
||||
},
|
||||
facets: this.facetsInput(false),
|
||||
facets: this.facetsInput(true),
|
||||
})
|
||||
.pipe(
|
||||
catchError((err: Error) => {
|
||||
@ -156,59 +174,29 @@ export class TorrentContentSearchEngine
|
||||
return EMPTY;
|
||||
}),
|
||||
);
|
||||
const aggs = this.graphQLService
|
||||
.torrentContentSearch({
|
||||
query: {
|
||||
limit: 0,
|
||||
cached: true,
|
||||
},
|
||||
facets: this.facetsInput(true),
|
||||
})
|
||||
.pipe(
|
||||
catchError((err: Error) => {
|
||||
this.errorsService.addError(
|
||||
`Error loading aggregation results: ${err.message}`,
|
||||
);
|
||||
return EMPTY;
|
||||
}),
|
||||
);
|
||||
items.subscribe((result) => {
|
||||
const lastRequestTime = this.lastRequestTimeSubject.getValue();
|
||||
if (requestTime >= lastRequestTime) {
|
||||
this.itemsResultSubject.next(result);
|
||||
}
|
||||
});
|
||||
aggs.subscribe((result) => {
|
||||
const lastRequestTime = this.lastRequestTimeSubject.getValue();
|
||||
if (requestTime >= lastRequestTime) {
|
||||
this.aggsResultSubject.next(result);
|
||||
this.loadingSubject.next(false);
|
||||
this.lastRequestTimeSubject.next(requestTime);
|
||||
this.pageLengthSubject.next(result.items.length);
|
||||
this.totalCountSubject.next({
|
||||
count: result.totalCount,
|
||||
isEstimate: result.totalCountIsEstimate,
|
||||
});
|
||||
this.overallTotalCountSubject.next(
|
||||
(result.aggregations.contentType ?? []).reduce(
|
||||
(acc, next) => ({
|
||||
count: acc.count + next.count,
|
||||
isEstimate: acc.isEstimate || next.isEstimate,
|
||||
}),
|
||||
emptyBudgetedCount,
|
||||
),
|
||||
);
|
||||
}
|
||||
});
|
||||
zip(items, aggs).subscribe(([i, a]) => {
|
||||
this.loadingSubject.next(false);
|
||||
this.lastRequestTimeSubject.next(requestTime);
|
||||
const overallTotalCount =
|
||||
a.aggregations.contentType
|
||||
?.map((c) => c.count)
|
||||
.reduce((a, b) => a + b, 0) ?? 0;
|
||||
let maxTotalCount: number;
|
||||
if (!i.hasNextPage) {
|
||||
maxTotalCount = offset + i.items.length;
|
||||
} else if (contentType === undefined) {
|
||||
maxTotalCount =
|
||||
a.aggregations.contentType
|
||||
?.map((c) => c.count)
|
||||
.reduce((a, b) => a + b, 0) ?? 0;
|
||||
} else {
|
||||
maxTotalCount =
|
||||
a.aggregations.contentType?.find(
|
||||
(a) => (a.value ?? "null") === (contentType ?? undefined),
|
||||
)?.count ?? overallTotalCount;
|
||||
}
|
||||
this.pageLengthSubject.next(i.items.length);
|
||||
this.maxTotalCountSubject.next(maxTotalCount);
|
||||
this.overallTotalCountSubject.next(overallTotalCount);
|
||||
});
|
||||
}
|
||||
|
||||
private facetsInput(aggregate: boolean): generated.TorrentContentFacetsInput {
|
||||
@ -233,13 +221,6 @@ export class TorrentContentSearchEngine
|
||||
};
|
||||
}
|
||||
|
||||
get isDeepFiltered(): boolean {
|
||||
return (
|
||||
!!this.queryStringSubject.getValue() ||
|
||||
this.facets.some((f) => f.isActive() && !f.isEmpty())
|
||||
);
|
||||
}
|
||||
|
||||
selectContentType(
|
||||
contentType: generated.ContentType | "null" | null | undefined,
|
||||
) {
|
||||
@ -264,13 +245,15 @@ export class TorrentContentSearchEngine
|
||||
this.loadResult();
|
||||
}
|
||||
|
||||
contentTypeCount(type: string): Observable<number> {
|
||||
contentTypeCount(type: string): Observable<{
|
||||
count: number;
|
||||
isEstimate: boolean;
|
||||
}> {
|
||||
return this.aggregations$.pipe(
|
||||
map(
|
||||
(aggs) =>
|
||||
aggs.contentType?.find((a) => (a.value ?? "null") === type)?.count ??
|
||||
0,
|
||||
),
|
||||
map((aggs) => {
|
||||
const agg = aggs.contentType?.find((a) => (a.value ?? "null") === type);
|
||||
return agg ?? { count: 0, isEstimate: false };
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
@ -335,7 +318,7 @@ function facetInput<T = unknown, _allowNull extends boolean = true>(
|
||||
return facet.isActive()
|
||||
? {
|
||||
aggregate,
|
||||
filter: aggregate ? undefined : facet.filterValues(),
|
||||
filter: facet.filterValues(),
|
||||
}
|
||||
: undefined;
|
||||
}
|
||||
|
||||
@ -10,9 +10,9 @@
|
||||
<mat-radio-group [formControl]="contentType">
|
||||
<mat-radio-button>
|
||||
<mat-icon fontSet="material-icons">emergency</mat-icon>All
|
||||
<small
|
||||
>{{ isDeepFiltered ? "≤ " : ""
|
||||
}}{{ search.overallTotalCount$ | async | number }}</small
|
||||
<small *ngIf="search.overallTotalCount$ | async as count"
|
||||
>{{ count.isEstimate ? "~" : ""
|
||||
}}{{ count.count | number }}</small
|
||||
>
|
||||
</mat-radio-button>
|
||||
<mat-radio-button
|
||||
@ -21,9 +21,8 @@
|
||||
>
|
||||
<mat-icon>{{ t.value.icon }}</mat-icon>
|
||||
{{ t.value.plural }}
|
||||
<small
|
||||
>{{ isDeepFiltered ? "≤ " : ""
|
||||
}}{{ search.contentTypeCount(t.key) | async | number }}</small
|
||||
<small *ngIf="search.contentTypeCount(t.key) | async as agg"
|
||||
>{{ agg.isEstimate ? "~" : "" }}{{ agg.count | number }}</small
|
||||
>
|
||||
</mat-radio-button>
|
||||
</mat-radio-group>
|
||||
@ -56,9 +55,7 @@
|
||||
[style.display]="'block'"
|
||||
>
|
||||
{{ r.label }}
|
||||
<small
|
||||
>{{ isDeepFiltered ? "≤ " : "" }}{{ r.count | number }}</small
|
||||
>
|
||||
<small>{{ r.isEstimate ? "~" : "" }}{{ r.count | number }}</small>
|
||||
</mat-checkbox>
|
||||
<span
|
||||
*ngIf="!(search.loading$ | async) && !facet.aggregations?.length"
|
||||
@ -622,12 +619,14 @@
|
||||
</table>
|
||||
<span class="spacer"></span>
|
||||
<app-paginator
|
||||
*ngIf="search.totalCount$ | async as totalCount"
|
||||
(page)="search.handlePageEvent($event)"
|
||||
[pageIndex]="search.pageIndex$ | async"
|
||||
[pageSize]="search.pageSize$ | async"
|
||||
[pageLength]="search.pageLength$ | async"
|
||||
[totalLength]="search.maxTotalCount$ | async"
|
||||
[totalLessThanOrEqual]="search.isDeepFiltered"
|
||||
[totalLength]="totalCount.count"
|
||||
[totalIsEstimate]="totalCount.isEstimate"
|
||||
[hasNextPage]="search.hasNextPage$ | async"
|
||||
/>
|
||||
</div>
|
||||
</mat-drawer-content>
|
||||
|
||||
@ -87,10 +87,6 @@ export class TorrentContentComponent
|
||||
this.updateSuggestedTags();
|
||||
}
|
||||
|
||||
get isDeepFiltered(): boolean {
|
||||
return this.search.isDeepFiltered;
|
||||
}
|
||||
|
||||
loadResult(cached = true) {
|
||||
this.search.loadResult(cached);
|
||||
}
|
||||
|
||||
@ -20,12 +20,12 @@
|
||||
"useDefineForClassFields": false,
|
||||
"lib": ["ES2022", "dom", "esnext.asynciterable"],
|
||||
"allowSyntheticDefaultImports": true,
|
||||
"strictPropertyInitialization": false,
|
||||
"strictPropertyInitialization": false
|
||||
},
|
||||
"angularCompilerOptions": {
|
||||
"enableI18nLegacyMessageIdFormat": false,
|
||||
"strictInjectionParameters": true,
|
||||
"strictInputAccessModifiers": true,
|
||||
"strictTemplates": true,
|
||||
},
|
||||
"strictTemplates": true
|
||||
}
|
||||
}
|
||||
|
||||
Loading…
Reference in New Issue
Block a user