mirror of
https://github.com/miniflux/v2.git
synced 2025-07-12 16:58:36 +00:00
refactor: use min/max instead of math.Min/math.Max
This saves a couple of back'n'forth casts.
This commit is contained in:
parent
1b0b8b9c42
commit
e6185b1393
3 changed files with 5 additions and 6 deletions
|
@ -123,8 +123,8 @@ func (f *Feed) ScheduleNextCheck(weeklyCount int, refreshDelayInMinutes int) {
|
|||
intervalMinutes = config.Opts.SchedulerEntryFrequencyMaxInterval()
|
||||
} else {
|
||||
intervalMinutes = int(math.Round(float64(7*24*60) / float64(weeklyCount*config.Opts.SchedulerEntryFrequencyFactor())))
|
||||
intervalMinutes = int(math.Min(float64(intervalMinutes), float64(config.Opts.SchedulerEntryFrequencyMaxInterval())))
|
||||
intervalMinutes = int(math.Max(float64(intervalMinutes), float64(config.Opts.SchedulerEntryFrequencyMinInterval())))
|
||||
intervalMinutes = min(intervalMinutes, config.Opts.SchedulerEntryFrequencyMaxInterval())
|
||||
intervalMinutes = max(intervalMinutes, config.Opts.SchedulerEntryFrequencyMinInterval())
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -8,7 +8,6 @@ import (
|
|||
"fmt"
|
||||
"io"
|
||||
"log/slog"
|
||||
"math"
|
||||
"regexp"
|
||||
"strings"
|
||||
|
||||
|
@ -108,7 +107,7 @@ func ExtractContent(page io.Reader) (baseURL string, extractedContent string, er
|
|||
// Things like preambles, content split by ads that we removed, etc.
|
||||
func getArticle(topCandidate *candidate, candidates candidateList) string {
|
||||
output := bytes.NewBufferString("<div>")
|
||||
siblingScoreThreshold := float32(math.Max(10, float64(topCandidate.score*.2)))
|
||||
siblingScoreThreshold := max(10, topCandidate.score*.2)
|
||||
|
||||
topCandidate.selection.Siblings().Union(topCandidate.selection).Each(func(i int, s *goquery.Selection) {
|
||||
append := false
|
||||
|
@ -223,7 +222,7 @@ func getCandidates(document *goquery.Document) candidateList {
|
|||
contentScore += float32(strings.Count(text, ",") + 1)
|
||||
|
||||
// For every 100 characters in this paragraph, add another point. Up to 3 points.
|
||||
contentScore += float32(math.Min(float64(int(len(text)/100.0)), 3))
|
||||
contentScore += float32(min(int(len(text)/100.0), 3))
|
||||
|
||||
candidates[parentNode].score += contentScore
|
||||
if grandParentNode != nil {
|
||||
|
|
|
@ -19,7 +19,7 @@ func EstimateReadingTime(content string, defaultReadingSpeed, cjkReadingSpeed in
|
|||
sanitizedContent := sanitizer.StripTags(content)
|
||||
|
||||
// Litterature on language detection says that around 100 signes is enough, we're safe here.
|
||||
truncationPoint := int(math.Min(float64(len(sanitizedContent)), 250))
|
||||
truncationPoint := min(len(sanitizedContent), 250)
|
||||
|
||||
// We're only interested in identifying Japanse/Chinese/Korean
|
||||
options := whatlanggo.Options{
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue