diff --git a/cache/dynacache/dynacache_test.go b/cache/dynacache/dynacache_test.go index 14abf240d..78b2fc82e 100644 --- a/cache/dynacache/dynacache_test.go +++ b/cache/dynacache/dynacache_test.go @@ -191,16 +191,16 @@ func TestPanicInCreate(t *testing.T) { return err } - for i := 0; i < 3; i++ { - for j := 0; j < 3; j++ { + for i := range 3 { + for range 3 { c.Assert(willPanic(i), qt.PanicMatches, fmt.Sprintf("panic-%d", i)) c.Assert(willErr(i), qt.ErrorMatches, fmt.Sprintf("error-%d", i)) } } // Test the same keys again without the panic. - for i := 0; i < 3; i++ { - for j := 0; j < 3; j++ { + for i := range 3 { + for range 3 { v, err := p1.GetOrCreate(fmt.Sprintf("panic-%d", i), func(key string) (testItem, error) { return testItem{ name: key, diff --git a/cache/filecache/filecache_pruner_test.go b/cache/filecache/filecache_pruner_test.go index f0cecfe9f..b49ba7645 100644 --- a/cache/filecache/filecache_pruner_test.go +++ b/cache/filecache/filecache_pruner_test.go @@ -59,7 +59,7 @@ dir = ":resourceDir/_gen" caches, err := filecache.NewCaches(p) c.Assert(err, qt.IsNil) cache := caches[name] - for i := 0; i < 10; i++ { + for i := range 10 { id := fmt.Sprintf("i%d", i) cache.GetOrCreateBytes(id, func() ([]byte, error) { return []byte("abc"), nil @@ -74,7 +74,7 @@ dir = ":resourceDir/_gen" c.Assert(err, qt.IsNil) c.Assert(count, qt.Equals, 5, msg) - for i := 0; i < 10; i++ { + for i := range 10 { id := fmt.Sprintf("i%d", i) v := cache.GetString(id) if i < 5 { @@ -97,7 +97,7 @@ dir = ":resourceDir/_gen" c.Assert(count, qt.Equals, 4) // Now only the i5 should be left. - for i := 0; i < 10; i++ { + for i := range 10 { id := fmt.Sprintf("i%d", i) v := cache.GetString(id) if i != 5 { diff --git a/cache/filecache/filecache_test.go b/cache/filecache/filecache_test.go index 59fb09276..a30aaa50b 100644 --- a/cache/filecache/filecache_test.go +++ b/cache/filecache/filecache_test.go @@ -105,7 +105,7 @@ dir = ":cacheDir/c" } for _, ca := range []*filecache.Cache{caches.ImageCache(), caches.AssetsCache(), caches.GetJSONCache(), caches.GetCSVCache()} { - for i := 0; i < 2; i++ { + for range 2 { info, r, err := ca.GetOrCreate("a", rf("abc")) c.Assert(err, qt.IsNil) c.Assert(r, qt.Not(qt.IsNil)) @@ -193,11 +193,11 @@ dir = "/cache/c" var wg sync.WaitGroup - for i := 0; i < 50; i++ { + for i := range 50 { wg.Add(1) go func(i int) { defer wg.Done() - for j := 0; j < 20; j++ { + for range 20 { ca := caches.Get(cacheName) c.Assert(ca, qt.Not(qt.IsNil)) filename, data := filenameData(i) diff --git a/codegen/methods.go b/codegen/methods.go index 299063bb5..08ac97b00 100644 --- a/codegen/methods.go +++ b/codegen/methods.go @@ -26,6 +26,7 @@ import ( "path/filepath" "reflect" "regexp" + "slices" "sort" "strings" "sync" @@ -102,7 +103,7 @@ func (c *Inspector) MethodsFromTypes(include []reflect.Type, exclude []reflect.T } for _, t := range include { - for i := 0; i < t.NumMethod(); i++ { + for i := range t.NumMethod() { m := t.Method(i) if excludes[m.Name] || seen[m.Name] { @@ -122,7 +123,7 @@ func (c *Inspector) MethodsFromTypes(include []reflect.Type, exclude []reflect.T method := Method{Owner: t, OwnerName: ownerName, Name: m.Name} - for i := 0; i < numIn; i++ { + for i := range numIn { in := m.Type.In(i) name, pkg := nameAndPackage(in) @@ -137,7 +138,7 @@ func (c *Inspector) MethodsFromTypes(include []reflect.Type, exclude []reflect.T numOut := m.Type.NumOut() if numOut > 0 { - for i := 0; i < numOut; i++ { + for i := range numOut { out := m.Type.Out(i) name, pkg := nameAndPackage(out) @@ -304,7 +305,7 @@ func (m Method) inOutStr() string { } args := make([]string, len(m.In)) - for i := 0; i < len(args); i++ { + for i := range args { args[i] = fmt.Sprintf("arg%d", i) } return "(" + strings.Join(args, ", ") + ")" @@ -316,7 +317,7 @@ func (m Method) inStr() string { } args := make([]string, len(m.In)) - for i := 0; i < len(args); i++ { + for i := range args { args[i] = fmt.Sprintf("arg%d %s", i, m.In[i]) } return "(" + strings.Join(args, ", ") + ")" @@ -339,7 +340,7 @@ func (m Method) outStrNamed() string { } outs := make([]string, len(m.Out)) - for i := 0; i < len(outs); i++ { + for i := range outs { outs[i] = fmt.Sprintf("o%d %s", i, m.Out[i]) } @@ -435,7 +436,7 @@ func (m Methods) ToMarshalJSON(receiver, pkgPath string, excludes ...string) (st // Exclude self for i, pkgImp := range pkgImports { if pkgImp == pkgPath { - pkgImports = append(pkgImports[:i], pkgImports[i+1:]...) + pkgImports = slices.Delete(pkgImports, i, i+1) } } } diff --git a/commands/commandeer.go b/commands/commandeer.go index 697ece1f0..bf9655637 100644 --- a/commands/commandeer.go +++ b/commands/commandeer.go @@ -101,8 +101,8 @@ type configKey struct { // This is the root command. type rootCommand struct { - Printf func(format string, v ...interface{}) - Println func(a ...interface{}) + Printf func(format string, v ...any) + Println func(a ...any) StdOut io.Writer StdErr io.Writer @@ -431,12 +431,12 @@ func (r *rootCommand) PreRun(cd, runner *simplecobra.Commandeer) error { // Used by mkcert (server). log.SetOutput(r.StdOut) - r.Printf = func(format string, v ...interface{}) { + r.Printf = func(format string, v ...any) { if !r.quiet { fmt.Fprintf(r.StdOut, format, v...) } } - r.Println = func(a ...interface{}) { + r.Println = func(a ...any) { if !r.quiet { fmt.Fprintln(r.StdOut, a...) } diff --git a/commands/config.go b/commands/config.go index 89f14e0e6..7d166b9b8 100644 --- a/commands/config.go +++ b/commands/config.go @@ -90,7 +90,7 @@ func (c *configCommand) Run(ctx context.Context, cd *simplecobra.Commandeer, arg os.Stdout.Write(buf.Bytes()) default: // Decode the JSON to a map[string]interface{} and then unmarshal it again to the correct format. - var m map[string]interface{} + var m map[string]any if err := json.Unmarshal(buf.Bytes(), &m); err != nil { return err } diff --git a/commands/gen.go b/commands/gen.go index 5e49d9e7d..b77deeeb7 100644 --- a/commands/gen.go +++ b/commands/gen.go @@ -222,7 +222,7 @@ url: %s } // Decode the JSON to a map[string]interface{} and then unmarshal it again to the correct format. - var m map[string]interface{} + var m map[string]any if err := json.Unmarshal(buf.Bytes(), &m); err != nil { return err } diff --git a/commands/server.go b/commands/server.go index d42ce6d29..d3a72ec9a 100644 --- a/commands/server.go +++ b/commands/server.go @@ -65,6 +65,7 @@ import ( "github.com/spf13/fsync" "golang.org/x/sync/errgroup" "golang.org/x/sync/semaphore" + "maps" ) var ( @@ -195,9 +196,7 @@ func (f *fileChangeDetector) PrepareNew() { } f.prev = make(map[string]uint64) - for k, v := range f.current { - f.prev[k] = v - } + maps.Copy(f.prev, f.current) f.current = make(map[string]uint64) } @@ -759,7 +758,7 @@ func (c *serverCommand) createServerPorts(cd *simplecobra.Commandeer) error { c.serverPorts = make([]serverPortListener, len(conf.configs.Languages)) } currentServerPort := c.serverPort - for i := 0; i < len(c.serverPorts); i++ { + for i := range c.serverPorts { l, err := net.Listen("tcp", net.JoinHostPort(c.serverInterface, strconv.Itoa(currentServerPort))) if err == nil { c.serverPorts[i] = serverPortListener{ln: l, p: currentServerPort} diff --git a/common/collections/append.go b/common/collections/append.go index 8f1e21ea3..db9db8bf3 100644 --- a/common/collections/append.go +++ b/common/collections/append.go @@ -117,7 +117,7 @@ func appendToInterfaceSliceFromValues(slice1, slice2 reflect.Value) ([]any, erro tos = append(tos, nil) continue } - for i := 0; i < slice.Len(); i++ { + for i := range slice.Len() { tos = append(tos, slice.Index(i).Interface()) } } @@ -128,7 +128,7 @@ func appendToInterfaceSliceFromValues(slice1, slice2 reflect.Value) ([]any, erro func appendToInterfaceSlice(tov reflect.Value, from ...any) ([]any, error) { var tos []any - for i := 0; i < tov.Len(); i++ { + for i := range tov.Len() { tos = append(tos, tov.Index(i).Interface()) } diff --git a/common/collections/stack.go b/common/collections/stack.go index 96d32fe4b..ff0db2f02 100644 --- a/common/collections/stack.go +++ b/common/collections/stack.go @@ -13,6 +13,8 @@ package collections +import "slices" + import "sync" // Stack is a simple LIFO stack that is safe for concurrent use. @@ -73,7 +75,7 @@ func (s *Stack[T]) DrainMatching(predicate func(T) bool) []T { for i := len(s.items) - 1; i >= 0; i-- { if predicate(s.items[i]) { items = append(items, s.items[i]) - s.items = append(s.items[:i], s.items[i+1:]...) + s.items = slices.Delete(s.items, i, i+1) } } return items diff --git a/common/hashing/hashing_test.go b/common/hashing/hashing_test.go index bd66f3ebf..105b6d8b5 100644 --- a/common/hashing/hashing_test.go +++ b/common/hashing/hashing_test.go @@ -37,12 +37,12 @@ func TestXxHashFromReaderPara(t *testing.T) { c := qt.New(t) var wg sync.WaitGroup - for i := 0; i < 10; i++ { + for i := range 10 { i := i wg.Add(1) go func() { defer wg.Done() - for j := 0; j < 100; j++ { + for j := range 100 { s := strings.Repeat("Hello ", i+j+1*42) r := strings.NewReader(s) got, size, err := XXHashFromReader(r) @@ -144,8 +144,8 @@ func BenchmarkHashString(b *testing.B) { } func BenchmarkHashMap(b *testing.B) { - m := map[string]interface{}{} - for i := 0; i < 1000; i++ { + m := map[string]any{} + for i := range 1000 { m[fmt.Sprintf("key%d", i)] = i } diff --git a/common/herrors/error_locator.go b/common/herrors/error_locator.go index 1ece0cca4..acaebb4bc 100644 --- a/common/herrors/error_locator.go +++ b/common/herrors/error_locator.go @@ -152,10 +152,7 @@ func locateError(r io.Reader, le FileError, matches LineMatcherFn) *ErrorContext } if ectx.Position.LineNumber > 0 { - low := ectx.Position.LineNumber - 3 - if low < 0 { - low = 0 - } + low := max(ectx.Position.LineNumber-3, 0) if ectx.Position.LineNumber > 2 { ectx.LinesPos = 2 @@ -163,10 +160,7 @@ func locateError(r io.Reader, le FileError, matches LineMatcherFn) *ErrorContext ectx.LinesPos = ectx.Position.LineNumber - 1 } - high := ectx.Position.LineNumber + 2 - if high > len(lines) { - high = len(lines) - } + high := min(ectx.Position.LineNumber+2, len(lines)) ectx.Lines = lines[low:high] diff --git a/common/hreflect/helpers.go b/common/hreflect/helpers.go index 4d7339b5b..ab7883a47 100644 --- a/common/hreflect/helpers.go +++ b/common/hreflect/helpers.go @@ -245,7 +245,7 @@ func ToSliceAny(v any) ([]any, bool) { vvv := reflect.ValueOf(v) if vvv.Kind() == reflect.Slice { out := make([]any, vvv.Len()) - for i := 0; i < vvv.Len(); i++ { + for i := range vvv.Len() { out[i] = vvv.Index(i).Interface() } return out, true diff --git a/common/hstrings/strings.go b/common/hstrings/strings.go index 1232eee37..2df3486fc 100644 --- a/common/hstrings/strings.go +++ b/common/hstrings/strings.go @@ -20,6 +20,7 @@ import ( "sync" "github.com/gohugoio/hugo/compare" + "slices" ) var _ compare.Eqer = StringEqualFold("") @@ -50,12 +51,7 @@ func (s StringEqualFold) Eq(s2 any) bool { // EqualAny returns whether a string is equal to any of the given strings. func EqualAny(a string, b ...string) bool { - for _, s := range b { - if a == s { - return true - } - } - return false + return slices.Contains(b, a) } // regexpCache represents a cache of regexp objects protected by a mutex. @@ -103,12 +99,7 @@ func GetOrCompileRegexp(pattern string) (re *regexp.Regexp, err error) { // InSlice checks if a string is an element of a slice of strings // and returns a boolean value. func InSlice(arr []string, el string) bool { - for _, v := range arr { - if v == el { - return true - } - } - return false + return slices.Contains(arr, el) } // InSlicEqualFold checks if a string is an element of a slice of strings diff --git a/common/hugio/hasBytesWriter_test.go b/common/hugio/hasBytesWriter_test.go index f0d6c3a7b..9e689a112 100644 --- a/common/hugio/hasBytesWriter_test.go +++ b/common/hugio/hasBytesWriter_test.go @@ -46,7 +46,7 @@ func TestHasBytesWriter(t *testing.T) { return strings.Repeat("ab cfo", r.Intn(33)) } - for i := 0; i < 22; i++ { + for range 22 { h, w := neww() fmt.Fprint(w, rndStr()+"abc __foobar"+rndStr()) c.Assert(h.Patterns[0].Match, qt.Equals, true) diff --git a/common/hugo/hugo.go b/common/hugo/hugo.go index 815c25fa7..e745e5e90 100644 --- a/common/hugo/hugo.go +++ b/common/hugo/hugo.go @@ -416,10 +416,7 @@ func Deprecate(item, alternative string, version string) { // DeprecateLevelMin informs about a deprecation starting at the given version, but with a minimum log level. func DeprecateLevelMin(item, alternative string, version string, minLevel logg.Level) { - level := deprecationLogLevelFromVersion(version) - if level < minLevel { - level = minLevel - } + level := max(deprecationLogLevelFromVersion(version), minLevel) DeprecateLevel(item, alternative, version, level) } diff --git a/common/loggers/logger_test.go b/common/loggers/logger_test.go index b03e6d903..bc8975b06 100644 --- a/common/loggers/logger_test.go +++ b/common/loggers/logger_test.go @@ -37,7 +37,7 @@ func TestLogDistinct(t *testing.T) { l := loggers.New(opts) - for i := 0; i < 10; i++ { + for range 10 { l.Errorln("error 1") l.Errorln("error 2") l.Warnln("warn 1") @@ -137,7 +137,7 @@ func TestReset(t *testing.T) { l := loggers.New(opts) - for i := 0; i < 3; i++ { + for range 3 { l.Errorln("error 1") l.Errorln("error 2") l.Errorln("error 1") diff --git a/common/maps/ordered.go b/common/maps/ordered.go index eaa4d73c6..08dd77919 100644 --- a/common/maps/ordered.go +++ b/common/maps/ordered.go @@ -15,6 +15,7 @@ package maps import ( "github.com/gohugoio/hugo/common/hashing" + "slices" ) // Ordered is a map that can be iterated in the order of insertion. @@ -64,7 +65,7 @@ func (m *Ordered[K, T]) Delete(key K) { delete(m.values, key) for i, k := range m.keys { if k == key { - m.keys = append(m.keys[:i], m.keys[i+1:]...) + m.keys = slices.Delete(m.keys, i, i+1) break } } diff --git a/common/maps/scratch_test.go b/common/maps/scratch_test.go index 88fd73f2b..f07169e61 100644 --- a/common/maps/scratch_test.go +++ b/common/maps/scratch_test.go @@ -140,7 +140,7 @@ func TestScratchInParallel(t *testing.T) { for i := 1; i <= 10; i++ { wg.Add(1) go func(j int) { - for k := 0; k < 10; k++ { + for k := range 10 { newVal := int64(k + j) _, err := scratch.Add(key, newVal) diff --git a/common/para/para_test.go b/common/para/para_test.go index 2d9188ecf..cf24a4e37 100644 --- a/common/para/para_test.go +++ b/common/para/para_test.go @@ -42,7 +42,7 @@ func TestPara(t *testing.T) { c.Run("Order", func(c *qt.C) { n := 500 ints := make([]int, n) - for i := 0; i < n; i++ { + for i := range n { ints[i] = i } @@ -51,7 +51,7 @@ func TestPara(t *testing.T) { var result []int var mu sync.Mutex - for i := 0; i < n; i++ { + for i := range n { i := i r.Run(func() error { mu.Lock() @@ -78,7 +78,7 @@ func TestPara(t *testing.T) { var counter int64 - for i := 0; i < n; i++ { + for range n { r.Run(func() error { atomic.AddInt64(&counter, 1) time.Sleep(1 * time.Millisecond) diff --git a/common/rungroup/rungroup.go b/common/rungroup/rungroup.go index 96ec57883..80a730ca9 100644 --- a/common/rungroup/rungroup.go +++ b/common/rungroup/rungroup.go @@ -51,7 +51,7 @@ func Run[T any](ctx context.Context, cfg Config[T]) Group[T] { // Buffered for performance. ch := make(chan T, cfg.NumWorkers) - for i := 0; i < cfg.NumWorkers; i++ { + for range cfg.NumWorkers { g.Go(func() error { for { select { diff --git a/common/tasks/tasks.go b/common/tasks/tasks.go index 1f7e061f9..3f8a754e9 100644 --- a/common/tasks/tasks.go +++ b/common/tasks/tasks.go @@ -103,10 +103,7 @@ func (r *RunEvery) Add(name string, f Func) { f.IntervalHigh = 20 * time.Second } - start := f.IntervalHigh / 3 - if start < f.IntervalLow { - start = f.IntervalLow - } + start := max(f.IntervalHigh/3, f.IntervalLow) f.interval = start f.last = time.Now() diff --git a/common/types/convert.go b/common/types/convert.go index 0cb5035df..6b1750376 100644 --- a/common/types/convert.go +++ b/common/types/convert.go @@ -69,7 +69,7 @@ func ToStringSlicePreserveStringE(v any) ([]string, error) { switch vv.Kind() { case reflect.Slice, reflect.Array: result = make([]string, vv.Len()) - for i := 0; i < vv.Len(); i++ { + for i := range vv.Len() { s, err := cast.ToStringE(vv.Index(i).Interface()) if err != nil { return nil, err diff --git a/common/types/evictingqueue.go b/common/types/evictingqueue.go index c3598f19f..a335be3b2 100644 --- a/common/types/evictingqueue.go +++ b/common/types/evictingqueue.go @@ -15,6 +15,7 @@ package types import ( + "slices" "sync" ) @@ -45,7 +46,7 @@ func (q *EvictingQueue[T]) Add(v T) *EvictingQueue[T] { if len(q.set) == q.size { // Full delete(q.set, q.vals[0]) - q.vals = append(q.vals[:0], q.vals[1:]...) + q.vals = slices.Delete(q.vals, 0, 1) } q.set[v] = true q.vals = append(q.vals, v) diff --git a/common/types/evictingqueue_test.go b/common/types/evictingqueue_test.go index cd10d3d8e..b93243f3c 100644 --- a/common/types/evictingqueue_test.go +++ b/common/types/evictingqueue_test.go @@ -55,7 +55,7 @@ func TestEvictingStringQueueConcurrent(t *testing.T) { queue := NewEvictingQueue[string](3) - for j := 0; j < 100; j++ { + for range 100 { wg.Add(1) go func() { defer wg.Done() diff --git a/common/types/types.go b/common/types/types.go index 062ecc403..082c058ff 100644 --- a/common/types/types.go +++ b/common/types/types.go @@ -59,7 +59,7 @@ func (k KeyValues) String() string { // KeyValues struct. func NewKeyValuesStrings(key string, values ...string) KeyValues { iv := make([]any, len(values)) - for i := 0; i < len(values); i++ { + for i := range values { iv[i] = values[i] } return KeyValues{Key: key, Values: iv} diff --git a/config/allconfig/allconfig.go b/config/allconfig/allconfig.go index af841fb54..3b3109aeb 100644 --- a/config/allconfig/allconfig.go +++ b/config/allconfig/allconfig.go @@ -82,7 +82,7 @@ func init() { } configLanguageKeys = make(map[string]bool) addKeys := func(v reflect.Value) { - for i := 0; i < v.NumField(); i++ { + for i := range v.NumField() { name := strings.ToLower(v.Type().Field(i).Name) if skip[name] { continue diff --git a/config/allconfig/load.go b/config/allconfig/load.go index 9e9c7a42a..f224009ac 100644 --- a/config/allconfig/load.go +++ b/config/allconfig/load.go @@ -305,7 +305,7 @@ func (l configLoader) applyOsEnvOverrides(environ []string) error { _, ok := allDecoderSetups[key] if ok { // A map. - if v, err := metadecoders.Default.UnmarshalStringTo(env.Value, map[string]interface{}{}); err == nil { + if v, err := metadecoders.Default.UnmarshalStringTo(env.Value, map[string]any{}); err == nil { val = v } } diff --git a/config/commonConfig.go b/config/commonConfig.go index a31c2312e..3dfd9b409 100644 --- a/config/commonConfig.go +++ b/config/commonConfig.go @@ -28,6 +28,7 @@ import ( "github.com/gohugoio/hugo/common/herrors" "github.com/mitchellh/mapstructure" "github.com/spf13/cast" + "slices" ) type BaseConfig struct { @@ -128,7 +129,7 @@ func (w BuildStats) Enabled() bool { } func (b BuildConfig) clone() BuildConfig { - b.CacheBusters = append([]CacheBuster{}, b.CacheBusters...) + b.CacheBusters = slices.Clone(b.CacheBusters) return b } diff --git a/config/commonConfig_test.go b/config/commonConfig_test.go index ce68cec15..05ba185e3 100644 --- a/config/commonConfig_test.go +++ b/config/commonConfig_test.go @@ -166,7 +166,7 @@ func TestBuildConfigCacheBusters(t *testing.T) { func TestBuildConfigCacheBusterstTailwindSetup(t *testing.T) { c := qt.New(t) cfg := New() - cfg.Set("build", map[string]interface{}{ + cfg.Set("build", map[string]any{ "cacheBusters": []map[string]string{ { "source": "assets/watching/hugo_stats\\.json", diff --git a/config/defaultConfigProvider.go b/config/defaultConfigProvider.go index bb7c47412..8c1d63851 100644 --- a/config/defaultConfigProvider.go +++ b/config/defaultConfigProvider.go @@ -345,7 +345,7 @@ func (c *defaultConfigProvider) getNestedKeyAndMap(key string, create bool) (str c.keyCache.Store(key, parts) } current := c.root - for i := 0; i < len(parts)-1; i++ { + for i := range len(parts) - 1 { next, found := current[parts[i]] if !found { if create { diff --git a/config/defaultConfigProvider_test.go b/config/defaultConfigProvider_test.go index 65f10ec6a..cd6247e60 100644 --- a/config/defaultConfigProvider_test.go +++ b/config/defaultConfigProvider_test.go @@ -332,7 +332,7 @@ func TestDefaultConfigProvider(t *testing.T) { return nil } - for i := 0; i < 20; i++ { + for i := range 20 { i := i r.Run(func() error { const v = 42 diff --git a/config/namespace_test.go b/config/namespace_test.go index df27ae05c..f443523a4 100644 --- a/config/namespace_test.go +++ b/config/namespace_test.go @@ -29,7 +29,7 @@ func TestNamespace(t *testing.T) { // ns, err := config.DecodeNamespace[map[string]DocsMediaTypeConfig](in, defaultMediaTypesConfig, buildConfig) ns, err := DecodeNamespace[[]*tstNsExt]( - map[string]interface{}{"foo": "bar"}, + map[string]any{"foo": "bar"}, func(v any) (*tstNsExt, any, error) { t := &tstNsExt{} m, err := maps.ToStringMapE(v) @@ -42,7 +42,7 @@ func TestNamespace(t *testing.T) { c.Assert(err, qt.IsNil) c.Assert(ns, qt.Not(qt.IsNil)) - c.Assert(ns.SourceStructure, qt.DeepEquals, map[string]interface{}{"foo": "bar"}) + c.Assert(ns.SourceStructure, qt.DeepEquals, map[string]any{"foo": "bar"}) c.Assert(ns.SourceHash, qt.Equals, "1420f6c7782f7459") c.Assert(ns.Config, qt.DeepEquals, &tstNsExt{Foo: "bar"}) c.Assert(ns.Signature(), qt.DeepEquals, []*tstNsExt(nil)) diff --git a/config/security/whitelist.go b/config/security/whitelist.go index 92eb3102f..5ce369a1f 100644 --- a/config/security/whitelist.go +++ b/config/security/whitelist.go @@ -73,7 +73,7 @@ func NewWhitelist(patterns ...string) (Whitelist, error) { var patternsr []*regexp.Regexp - for i := 0; i < len(patterns); i++ { + for i := range patterns { p := strings.TrimSpace(patterns[i]) if p == "" { continue diff --git a/create/content_test.go b/create/content_test.go index 63045cbea..429edfc26 100644 --- a/create/content_test.go +++ b/create/content_test.go @@ -129,7 +129,7 @@ site RegularPages: {{ len site.RegularPages }} ` - c.Assert(afero.WriteFile(mm, filepath.Join(archetypeDir, "index.md"), []byte(fmt.Sprintf(contentFile, "index.md")), 0o755), qt.IsNil) + c.Assert(afero.WriteFile(mm, filepath.Join(archetypeDir, "index.md"), fmt.Appendf(nil, contentFile, "index.md"), 0o755), qt.IsNil) c.Assert(afero.WriteFile(mm, filepath.Join(defaultArchetypeDir, "index.md"), []byte("default archetype index.md"), 0o755), qt.IsNil) c.Assert(initFs(mm), qt.IsNil) diff --git a/helpers/content.go b/helpers/content.go index 9d74a3d31..6edcf88d4 100644 --- a/helpers/content.go +++ b/helpers/content.go @@ -109,10 +109,7 @@ func ExtractTOC(content []byte) (newcontent []byte, toc []byte) { startOfTOC := bytes.Index(content, first) - peekEnd := len(content) - if peekEnd > 70+startOfTOC { - peekEnd = 70 + startOfTOC - } + peekEnd := min(len(content), 70+startOfTOC) if startOfTOC < 0 { return stripEmptyNav(content), toc diff --git a/helpers/emoji.go b/helpers/emoji.go index c103a5479..aa5540dca 100644 --- a/helpers/emoji.go +++ b/helpers/emoji.go @@ -43,11 +43,7 @@ func Emojify(source []byte) []byte { j := start + k - upper := j + emojiMaxSize - - if upper > len(source) { - upper = len(source) - } + upper := min(j+emojiMaxSize, len(source)) endEmoji := bytes.Index(source[j+1:upper], emojiDelim) nextWordDelim := bytes.Index(source[j:upper], emojiWordDelim) diff --git a/helpers/general.go b/helpers/general.go index 11cc185a8..76275a6b9 100644 --- a/helpers/general.go +++ b/helpers/general.go @@ -63,7 +63,7 @@ func UniqueStrings(s []string) []string { unique := make([]string, 0, len(s)) for i, val := range s { var seen bool - for j := 0; j < i; j++ { + for j := range i { if s[j] == val { seen = true break @@ -83,7 +83,7 @@ func UniqueStringsReuse(s []string) []string { for i, val := range s { var seen bool - for j := 0; j < i; j++ { + for j := range i { if s[j] == val { seen = true break diff --git a/helpers/processing_stats.go b/helpers/processing_stats.go index 540060aa2..3f48466c7 100644 --- a/helpers/processing_stats.go +++ b/helpers/processing_stats.go @@ -89,7 +89,7 @@ func ProcessingStatsTable(w io.Writer, stats ...*ProcessingStats) { var data [][]string - for i := 0; i < len(stats); i++ { + for i := range stats { stat := stats[i] names[i+1] = stat.Name diff --git a/helpers/url_test.go b/helpers/url_test.go index ce1b24487..7f625035c 100644 --- a/helpers/url_test.go +++ b/helpers/url_test.go @@ -101,10 +101,10 @@ func doTestAbsURL(t *testing.T, defaultInSubDir, addLanguage, multilingual bool, v := config.New() if multilingual { v.Set("languages", map[string]any{ - "fr": map[string]interface{}{ + "fr": map[string]any{ "weight": 20, }, - "en": map[string]interface{}{ + "en": map[string]any{ "weight": 10, }, }) @@ -112,7 +112,7 @@ func doTestAbsURL(t *testing.T, defaultInSubDir, addLanguage, multilingual bool, } else { v.Set("defaultContentLanguage", lang) v.Set("languages", map[string]any{ - lang: map[string]interface{}{ + lang: map[string]any{ "weight": 10, }, }) @@ -167,10 +167,10 @@ func doTestRelURL(t testing.TB, defaultInSubDir, addLanguage, multilingual bool, v := config.New() if multilingual { v.Set("languages", map[string]any{ - "fr": map[string]interface{}{ + "fr": map[string]any{ "weight": 20, }, - "en": map[string]interface{}{ + "en": map[string]any{ "weight": 10, }, }) @@ -178,7 +178,7 @@ func doTestRelURL(t testing.TB, defaultInSubDir, addLanguage, multilingual bool, } else { v.Set("defaultContentLanguage", lang) v.Set("languages", map[string]any{ - lang: map[string]interface{}{ + lang: map[string]any{ "weight": 10, }, }) diff --git a/htesting/hqt/checkers.go b/htesting/hqt/checkers.go index a05206535..b06185756 100644 --- a/htesting/hqt/checkers.go +++ b/htesting/hqt/checkers.go @@ -151,7 +151,7 @@ func structTypes(v reflect.Value, m map[reflect.Type]struct{}) { structTypes(v.Elem(), m) } case reflect.Slice, reflect.Array: - for i := 0; i < v.Len(); i++ { + for i := range v.Len() { structTypes(v.Index(i), m) } case reflect.Map: @@ -160,7 +160,7 @@ func structTypes(v reflect.Value, m map[reflect.Type]struct{}) { } case reflect.Struct: m[v.Type()] = struct{}{} - for i := 0; i < v.NumField(); i++ { + for i := range v.NumField() { structTypes(v.Field(i), m) } } diff --git a/hugofs/fileinfo.go b/hugofs/fileinfo.go index 60d2a38df..5e6a87acc 100644 --- a/hugofs/fileinfo.go +++ b/hugofs/fileinfo.go @@ -93,7 +93,7 @@ func (m *FileMeta) Merge(from *FileMeta) { dstv := reflect.Indirect(reflect.ValueOf(m)) srcv := reflect.Indirect(reflect.ValueOf(from)) - for i := 0; i < dstv.NumField(); i++ { + for i := range dstv.NumField() { v := dstv.Field(i) if !v.CanSet() { continue diff --git a/hugofs/fs.go b/hugofs/fs.go index fab0d3886..aecf72a7f 100644 --- a/hugofs/fs.go +++ b/hugofs/fs.go @@ -214,7 +214,7 @@ func WalkFilesystems(fs afero.Fs, fn WalkFn) bool { } } } else if cfs, ok := fs.(overlayfs.FilesystemIterator); ok { - for i := 0; i < cfs.NumFilesystems(); i++ { + for i := range cfs.NumFilesystems() { if WalkFilesystems(cfs.Filesystem(i), fn) { return true } diff --git a/hugofs/glob/glob.go b/hugofs/glob/glob.go index 42aa1fa3b..a4e5c49e8 100644 --- a/hugofs/glob/glob.go +++ b/hugofs/glob/glob.go @@ -166,7 +166,7 @@ func FilterGlobParts(a []string) []string { // HasGlobChar returns whether s contains any glob wildcards. func HasGlobChar(s string) bool { - for i := 0; i < len(s); i++ { + for i := range len(s) { if syntax.Special(s[i]) { return true } diff --git a/hugofs/walk_test.go b/hugofs/walk_test.go index 7366d008d..03b808533 100644 --- a/hugofs/walk_test.go +++ b/hugofs/walk_test.go @@ -91,7 +91,7 @@ func TestWalkRootMappingFs(t *testing.T) { p := para.New(4) r, _ := p.Start(context.Background()) - for i := 0; i < 8; i++ { + for range 8 { r.Run(func() error { _, err := collectPaths(bfs, "") if err != nil { @@ -153,7 +153,7 @@ func BenchmarkWalk(b *testing.B) { fs := NewBaseFileDecorator(afero.NewMemMapFs()) writeFiles := func(dir string, numfiles int) { - for i := 0; i < numfiles; i++ { + for i := range numfiles { filename := filepath.Join(dir, fmt.Sprintf("file%d.txt", i)) c.Assert(afero.WriteFile(fs, filename, []byte("content"), 0o777), qt.IsNil) } diff --git a/hugolib/cascade_test.go b/hugolib/cascade_test.go index f3060814c..d0a6730db 100644 --- a/hugolib/cascade_test.go +++ b/hugolib/cascade_test.go @@ -871,7 +871,7 @@ Background: {{ .Params.background }}| {{ .Title }}| ` - for i := 0; i < 10; i++ { + for range 10 { b := Test(t, files) b.AssertFileContent("public/p1/index.html", "Background: yosemite.jpg") } diff --git a/hugolib/config_test.go b/hugolib/config_test.go index c0bfde37d..cbf821ee7 100644 --- a/hugolib/config_test.go +++ b/hugolib/config_test.go @@ -793,7 +793,7 @@ Single. files := strings.ReplaceAll(filesTemplate, "WEIGHT_EN", "2") files = strings.ReplaceAll(files, "WEIGHT_SV", "1") - for i := 0; i < 20; i++ { + for range 20 { cfg := config.New() b, err := NewIntegrationTestBuilder( IntegrationTestConfig{ diff --git a/hugolib/content_map_test.go b/hugolib/content_map_test.go index 4cbdaf53a..7202840f3 100644 --- a/hugolib/content_map_test.go +++ b/hugolib/content_map_test.go @@ -323,7 +323,7 @@ R: {{ with $r }}{{ .Content }}{{ end }}|Len: {{ len $bundle.Resources }}|$ ` - for i := 0; i < 3; i++ { + for range 3 { b := Test(t, files) b.AssertFileContent("public/index.html", "R: Data 1.txt|", "Len: 1|") } @@ -435,14 +435,14 @@ func TestContentTreeReverseIndex(t *testing.T) { pageReverseIndex := newContentTreeTreverseIndex( func(get func(key any) (contentNodeI, bool), set func(key any, val contentNodeI)) { - for i := 0; i < 10; i++ { + for i := range 10 { key := fmt.Sprint(i) set(key, &testContentNode{key: key}) } }, ) - for i := 0; i < 10; i++ { + for i := range 10 { key := fmt.Sprint(i) v := pageReverseIndex.Get(key) c.Assert(v, qt.Not(qt.IsNil)) @@ -456,17 +456,17 @@ func TestContentTreeReverseIndexPara(t *testing.T) { var wg sync.WaitGroup - for i := 0; i < 10; i++ { + for range 10 { pageReverseIndex := newContentTreeTreverseIndex( func(get func(key any) (contentNodeI, bool), set func(key any, val contentNodeI)) { - for i := 0; i < 10; i++ { + for i := range 10 { key := fmt.Sprint(i) set(key, &testContentNode{key: key}) } }, ) - for j := 0; j < 10; j++ { + for j := range 10 { wg.Add(1) go func(i int) { defer wg.Done() diff --git a/hugolib/doctree/nodeshiftree_test.go b/hugolib/doctree/nodeshiftree_test.go index ac89037ac..0f4fb6f68 100644 --- a/hugolib/doctree/nodeshiftree_test.go +++ b/hugolib/doctree/nodeshiftree_test.go @@ -193,7 +193,7 @@ func TestTreePara(t *testing.T) { }, ) - for i := 0; i < 8; i++ { + for i := range 8 { i := i r.Run(func() error { a := &testValue{ID: "/a"} @@ -289,7 +289,7 @@ func BenchmarkTreeInsert(b *testing.B) { }, ) - for i := 0; i < numElements; i++ { + for i := range numElements { lang := rand.Intn(2) tree.InsertIntoValuesDimension(fmt.Sprintf("/%d", i), &testValue{ID: fmt.Sprintf("/%d", i), Lang: lang, Weight: i, NoCopy: true}) } @@ -323,7 +323,7 @@ func BenchmarkWalk(b *testing.B) { }, ) - for i := 0; i < numElements; i++ { + for i := range numElements { lang := rand.Intn(2) tree.InsertIntoValuesDimension(fmt.Sprintf("/%d", i), &testValue{ID: fmt.Sprintf("/%d", i), Lang: lang, Weight: i, NoCopy: true}) } @@ -355,8 +355,8 @@ func BenchmarkWalk(b *testing.B) { base := createTree() b.ResetTimer() for i := 0; i < b.N; i++ { - for d1 := 0; d1 < 1; d1++ { - for d2 := 0; d2 < 2; d2++ { + for d1 := range 1 { + for d2 := range 2 { tree := base.Shape(d1, d2) w := &doctree.NodeShiftTreeWalker[*testValue]{ Tree: tree, diff --git a/hugolib/doctree/nodeshifttree.go b/hugolib/doctree/nodeshifttree.go index 497e9f02e..298b24d1b 100644 --- a/hugolib/doctree/nodeshifttree.go +++ b/hugolib/doctree/nodeshifttree.go @@ -363,7 +363,7 @@ func (r *NodeShiftTreeWalker[T]) Walk(ctx context.Context) error { main := r.Tree var err error - fnMain := func(s string, v interface{}) bool { + fnMain := func(s string, v any) bool { if r.ShouldSkip(s) { return false } diff --git a/hugolib/doctree/treeshifttree.go b/hugolib/doctree/treeshifttree.go index cd13b9f61..059eaaf88 100644 --- a/hugolib/doctree/treeshifttree.go +++ b/hugolib/doctree/treeshifttree.go @@ -34,7 +34,7 @@ func NewTreeShiftTree[T comparable](d, length int) *TreeShiftTree[T] { panic("length must be > 0") } trees := make([]*SimpleTree[T], length) - for i := 0; i < length; i++ { + for i := range length { trees[i] = NewSimpleTree[T]() } return &TreeShiftTree[T]{d: d, trees: trees} diff --git a/hugolib/filesystems/basefs.go b/hugolib/filesystems/basefs.go index cb7846cd1..3e9b92087 100644 --- a/hugolib/filesystems/basefs.go +++ b/hugolib/filesystems/basefs.go @@ -634,7 +634,7 @@ func (b *sourceFilesystemsBuilder) createMainOverlayFs(p *paths.Paths) (*filesys mounts := make([]mountsDescriptor, len(mods)) - for i := 0; i < len(mods); i++ { + for i := range mods { mod := mods[i] dir := mod.Dir() diff --git a/hugolib/filesystems/basefs_test.go b/hugolib/filesystems/basefs_test.go index 3f189c860..abe06ac4a 100644 --- a/hugolib/filesystems/basefs_test.go +++ b/hugolib/filesystems/basefs_test.go @@ -57,14 +57,14 @@ func TestNewBaseFs(t *testing.T) { filenameTheme := filepath.Join(base, fmt.Sprintf("theme-file-%s.txt", theme)) filenameOverlap := filepath.Join(base, "f3.txt") afs.Mkdir(base, 0o755) - content := []byte(fmt.Sprintf("content:%s:%s", theme, dir)) + content := fmt.Appendf(nil, "content:%s:%s", theme, dir) afero.WriteFile(afs, filenameTheme, content, 0o755) afero.WriteFile(afs, filenameOverlap, content, 0o755) } // Write some files to the root of the theme base := filepath.Join(workingDir, "themes", theme) - afero.WriteFile(afs, filepath.Join(base, fmt.Sprintf("theme-root-%s.txt", theme)), []byte(fmt.Sprintf("content:%s", theme)), 0o755) - afero.WriteFile(afs, filepath.Join(base, "file-theme-root.txt"), []byte(fmt.Sprintf("content:%s", theme)), 0o755) + afero.WriteFile(afs, filepath.Join(base, fmt.Sprintf("theme-root-%s.txt", theme)), fmt.Appendf(nil, "content:%s", theme), 0o755) + afero.WriteFile(afs, filepath.Join(base, "file-theme-root.txt"), fmt.Appendf(nil, "content:%s", theme), 0o755) } afero.WriteFile(afs, filepath.Join(workingDir, "file-root.txt"), []byte("content-project"), 0o755) @@ -683,8 +683,8 @@ func setConfigAndWriteSomeFilesTo(fs afero.Fs, v config.Provider, key, val strin workingDir := v.GetString("workingDir") v.Set(key, val) fs.Mkdir(val, 0o755) - for i := 0; i < num; i++ { + for i := range num { filename := filepath.Join(workingDir, val, fmt.Sprintf("f%d.txt", i+1)) - afero.WriteFile(fs, filename, []byte(fmt.Sprintf("content:%s:%d", key, i+1)), 0o755) + afero.WriteFile(fs, filename, fmt.Appendf(nil, "content:%s:%d", key, i+1), 0o755) } } diff --git a/hugolib/hugo_sites.go b/hugolib/hugo_sites.go index b17c761aa..5373f9832 100644 --- a/hugolib/hugo_sites.go +++ b/hugolib/hugo_sites.go @@ -311,7 +311,7 @@ func (h *HugoSites) NumLogErrors() int { func (h *HugoSites) PrintProcessingStats(w io.Writer) { stats := make([]*helpers.ProcessingStats, len(h.Sites)) - for i := 0; i < len(h.Sites); i++ { + for i := range h.Sites { stats[i] = h.Sites[i].PathSpec.ProcessingStats } helpers.ProcessingStatsTable(w, stats...) diff --git a/hugolib/page.go b/hugolib/page.go index 701e0f11b..de64767df 100644 --- a/hugolib/page.go +++ b/hugolib/page.go @@ -707,7 +707,7 @@ func (p *pageState) shiftToOutputFormat(isRenderingSite bool, idx int) error { cp := p.pageOutput.pco if cp == nil && p.reusePageOutputContent() { // Look for content to reuse. - for i := 0; i < len(p.pageOutputs); i++ { + for i := range p.pageOutputs { if i == idx { continue } diff --git a/hugolib/page__content.go b/hugolib/page__content.go index f7579f182..3cfea1727 100644 --- a/hugolib/page__content.go +++ b/hugolib/page__content.go @@ -45,6 +45,7 @@ import ( "github.com/gohugoio/hugo/tpl" "github.com/mitchellh/mapstructure" "github.com/spf13/cast" + maps0 "maps" ) const ( @@ -696,9 +697,7 @@ func (c *cachedContentScope) contentToC(ctx context.Context) (contentTableOfCont cp.otherOutputs.Set(cp2.po.p.pid, cp2) // Merge content placeholders - for k, v := range ct2.contentPlaceholders { - ct.contentPlaceholders[k] = v - } + maps0.Copy(ct.contentPlaceholders, ct2.contentPlaceholders) if p.s.conf.Internal.Watch { for _, s := range cp2.po.p.m.content.shortcodeState.shortcodes { diff --git a/hugolib/pagebundler_test.go b/hugolib/pagebundler_test.go index e4da75a72..e5521412b 100644 --- a/hugolib/pagebundler_test.go +++ b/hugolib/pagebundler_test.go @@ -690,7 +690,7 @@ bundle min min key: {{ $jsonMinMin.Key }} `) - for i := 0; i < 3; i++ { + for range 3 { b.Build(BuildCfg{}) diff --git a/hugolib/pagecollections_test.go b/hugolib/pagecollections_test.go index 692ae9ef6..10c973b7e 100644 --- a/hugolib/pagecollections_test.go +++ b/hugolib/pagecollections_test.go @@ -47,8 +47,8 @@ func BenchmarkGetPage(b *testing.B) { b.Fatal(err) } - for i := 0; i < 10; i++ { - for j := 0; j < 100; j++ { + for i := range 10 { + for j := range 100 { writeSource(b, fs, filepath.Join("content", fmt.Sprintf("sect%d", i), fmt.Sprintf("page%d.md", j)), "CONTENT") } } @@ -91,8 +91,8 @@ func createGetPageRegularBenchmarkSite(t testing.TB) *Site { return fmt.Sprintf(pageCollectionsPageTemplate, title) } - for i := 0; i < 10; i++ { - for j := 0; j < 100; j++ { + for i := range 10 { + for j := range 100 { content := pc(fmt.Sprintf("Title%d_%d", i, j)) writeSource(c, fs, filepath.Join("content", fmt.Sprintf("sect%d", i), fmt.Sprintf("page%d.md", j)), content) } @@ -105,7 +105,7 @@ func TestBenchmarkGetPageRegular(t *testing.T) { c := qt.New(t) s := createGetPageRegularBenchmarkSite(t) - for i := 0; i < 10; i++ { + for i := range 10 { pp := path.Join("/", fmt.Sprintf("sect%d", i), fmt.Sprintf("page%d.md", i)) page, _ := s.getPage(nil, pp) c.Assert(page, qt.Not(qt.IsNil), qt.Commentf(pp)) @@ -192,8 +192,8 @@ func TestGetPage(t *testing.T) { return fmt.Sprintf(pageCollectionsPageTemplate, title) } - for i := 0; i < 10; i++ { - for j := 0; j < 10; j++ { + for i := range 10 { + for j := range 10 { content := pc(fmt.Sprintf("Title%d_%d", i, j)) writeSource(t, fs, filepath.Join("content", fmt.Sprintf("sect%d", i), fmt.Sprintf("page%d.md", j)), content) } diff --git a/hugolib/pages_language_merge_test.go b/hugolib/pages_language_merge_test.go index 3862d7cf0..ba1ed83de 100644 --- a/hugolib/pages_language_merge_test.go +++ b/hugolib/pages_language_merge_test.go @@ -42,7 +42,7 @@ func TestMergeLanguages(t *testing.T) { c.Assert(len(frSite.RegularPages()), qt.Equals, 6) c.Assert(len(nnSite.RegularPages()), qt.Equals, 12) - for i := 0; i < 2; i++ { + for range 2 { mergedNN := nnSite.RegularPages().MergeByLanguage(enSite.RegularPages()) c.Assert(len(mergedNN), qt.Equals, 31) for i := 1; i <= 31; i++ { @@ -163,7 +163,7 @@ date: "2018-02-28" // Add a bundles j := 100 contentPairs = append(contentPairs, []string{"bundle/index.md", fmt.Sprintf(contentTemplate, j, j)}...) - for i := 0; i < 6; i++ { + for i := range 6 { contentPairs = append(contentPairs, []string{fmt.Sprintf("bundle/pb%d.md", i), fmt.Sprintf(contentTemplate, i+j, i+j)}...) } contentPairs = append(contentPairs, []string{"bundle/index.nn.md", fmt.Sprintf(contentTemplate, j, j)}...) diff --git a/hugolib/paginator_test.go b/hugolib/paginator_test.go index dcee6e38e..2470a9046 100644 --- a/hugolib/paginator_test.go +++ b/hugolib/paginator_test.go @@ -40,7 +40,7 @@ contentDir = "content/nn" ` b := newTestSitesBuilder(t).WithConfigFile("toml", configFile) var content []string - for i := 0; i < 9; i++ { + for i := range 9 { for _, contentDir := range []string{"content/en", "content/nn"} { content = append(content, fmt.Sprintf(contentDir+"/blog/page%d.md", i), fmt.Sprintf(`--- title: Page %d @@ -118,7 +118,7 @@ cascade: - JSON ---`) - for i := 0; i < 22; i++ { + for i := range 22 { b.WithContent(fmt.Sprintf("p%d.md", i+1), fmt.Sprintf(`--- title: "Page" weight: %d diff --git a/hugolib/rebuild_test.go b/hugolib/rebuild_test.go index dc2c6524f..fab47679f 100644 --- a/hugolib/rebuild_test.go +++ b/hugolib/rebuild_test.go @@ -124,7 +124,7 @@ func TestRebuildEditTextFileInLeafBundle(t *testing.T) { func TestRebuildEditTextFileInShortcode(t *testing.T) { t.Parallel() - for i := 0; i < 3; i++ { + for range 3 { b := TestRunning(t, rebuildFilesSimple) b.AssertFileContent("public/mythirdsection/mythirdsectionpage/index.html", "Text: Assets My Shortcode Text.") @@ -138,7 +138,7 @@ func TestRebuildEditTextFileInShortcode(t *testing.T) { func TestRebuildEditTextFileInHook(t *testing.T) { t.Parallel() - for i := 0; i < 3; i++ { + for range 3 { b := TestRunning(t, rebuildFilesSimple) b.AssertFileContent("public/mythirdsection/mythirdsectionpage/index.html", "Text: Assets My Other Text.") @@ -1545,7 +1545,7 @@ title: "P%d" P%d Content. ` - for i := 0; i < count; i++ { + for i := range count { files += fmt.Sprintf("-- content/mysect/p%d/index.md --\n%s", i, fmt.Sprintf(contentTemplate, i, i)) } diff --git a/hugolib/resource_chain_test.go b/hugolib/resource_chain_test.go index 942873ae4..00e4c0060 100644 --- a/hugolib/resource_chain_test.go +++ b/hugolib/resource_chain_test.go @@ -99,7 +99,7 @@ FAILED REMOTE ERROR DETAILS CONTENT: {{ with $failedImg }}{{ with .Err }}{{ with b.Running() - for i := 0; i < 2; i++ { + for i := range 2 { b.Logf("Test run %d", i) b.Build(BuildCfg{}) @@ -200,7 +200,7 @@ func BenchmarkResourceChainPostProcess(b *testing.B) { for i := 0; i < b.N; i++ { b.StopTimer() s := newTestSitesBuilder(b) - for i := 0; i < 300; i++ { + for i := range 300 { s.WithContent(fmt.Sprintf("page%d.md", i+1), "---\ntitle: Page\n---") } s.WithTemplates("_default/single.html", `Start. diff --git a/hugolib/shortcode_test.go b/hugolib/shortcode_test.go index 5799de452..93edd9345 100644 --- a/hugolib/shortcode_test.go +++ b/hugolib/shortcode_test.go @@ -865,13 +865,13 @@ Content: {{ .Content }}| func TestShortcodeStableOutputFormatTemplates(t *testing.T) { t.Parallel() - for i := 0; i < 5; i++ { + for range 5 { b := newTestSitesBuilder(t) const numPages = 10 - for i := 0; i < numPages; i++ { + for i := range numPages { b.WithContent(fmt.Sprintf("page%d.md", i), `--- title: "Page" outputs: ["html", "css", "csv", "json"] @@ -894,14 +894,14 @@ outputs: ["html", "css", "csv", "json"] // helpers.PrintFs(b.Fs.Destination, "public", os.Stdout) - for i := 0; i < numPages; i++ { + for i := range numPages { b.AssertFileContent(fmt.Sprintf("public/page%d/index.html", i), "Short-HTML") b.AssertFileContent(fmt.Sprintf("public/page%d/index.csv", i), "Short-CSV") b.AssertFileContent(fmt.Sprintf("public/page%d/index.json", i), "Short-HTML") } - for i := 0; i < numPages; i++ { + for i := range numPages { b.AssertFileContent(fmt.Sprintf("public/page%d/styles.css", i), "Short-HTML") } diff --git a/hugolib/site.go b/hugolib/site.go index 7c09ba346..dab23d670 100644 --- a/hugolib/site.go +++ b/hugolib/site.go @@ -330,10 +330,7 @@ func NewHugoSites(cfg deps.DepsCfg) (*HugoSites, error) { func newHugoSites(cfg deps.DepsCfg, d *deps.Deps, pageTrees *pageTrees, sites []*Site) (*HugoSites, error) { numWorkers := config.GetNumWorkerMultiplier() - numWorkersSite := numWorkers - if numWorkersSite > len(sites) { - numWorkersSite = len(sites) - } + numWorkersSite := min(numWorkers, len(sites)) workersSite := para.New(numWorkersSite) h := &HugoSites{ diff --git a/hugolib/site_render.go b/hugolib/site_render.go index 5ac3d5a75..c88036f26 100644 --- a/hugolib/site_render.go +++ b/hugolib/site_render.go @@ -78,7 +78,7 @@ func (s *Site) renderPages(ctx *siteRenderContext) error { wg := &sync.WaitGroup{} - for i := 0; i < numWorkers; i++ { + for range numWorkers { wg.Add(1) go pageRenderer(ctx, s, pages, results, wg) } diff --git a/hugolib/site_stats_test.go b/hugolib/site_stats_test.go index da09ec368..02f2c0a8c 100644 --- a/hugolib/site_stats_test.go +++ b/hugolib/site_stats_test.go @@ -69,15 +69,15 @@ aliases: [/Ali%d] "_default/terms.html", "Terms List|{{ .Title }}|{{ .Content }}", ) - for i := 0; i < 2; i++ { - for j := 0; j < 2; j++ { + for i := range 2 { + for j := range 2 { pageID := i + j + 1 b.WithContent(fmt.Sprintf("content/sect/p%d.md", pageID), fmt.Sprintf(pageTemplate, pageID, fmt.Sprintf("- tag%d", j), fmt.Sprintf("- category%d", j), pageID)) } } - for i := 0; i < 5; i++ { + for i := range 5 { b.WithContent(fmt.Sprintf("assets/image%d.png", i+1), "image") } diff --git a/hugolib/site_test.go b/hugolib/site_test.go index 2ee33da24..e611897fe 100644 --- a/hugolib/site_test.go +++ b/hugolib/site_test.go @@ -372,14 +372,14 @@ func TestMainSections(t *testing.T) { b := newTestSitesBuilder(c).WithViper(v) - for i := 0; i < 20; i++ { + for i := range 20 { b.WithContent(fmt.Sprintf("page%d.md", i), `--- title: "Page" --- `) } - for i := 0; i < 5; i++ { + for i := range 5 { b.WithContent(fmt.Sprintf("blog/page%d.md", i), `--- title: "Page" tags: ["a", "b", "c", "d", "e", "f", "g", "h", "i", "j"] @@ -387,7 +387,7 @@ tags: ["a", "b", "c", "d", "e", "f", "g", "h", "i", "j"] `) } - for i := 0; i < 3; i++ { + for i := range 3 { b.WithContent(fmt.Sprintf("docs/page%d.md", i), `--- title: "Page" --- diff --git a/hugolib/site_url_test.go b/hugolib/site_url_test.go index 29170118f..091251f80 100644 --- a/hugolib/site_url_test.go +++ b/hugolib/site_url_test.go @@ -97,7 +97,7 @@ Do not go gentle into that good night. writeSource(t, fs, filepath.Join("content", "sect1", "_index.md"), fmt.Sprintf(st, "/ss1/")) writeSource(t, fs, filepath.Join("content", "sect2", "_index.md"), fmt.Sprintf(st, "/ss2/")) - for i := 0; i < 5; i++ { + for i := range 5 { writeSource(t, fs, filepath.Join("content", "sect1", fmt.Sprintf("p%d.md", i+1)), pt) writeSource(t, fs, filepath.Join("content", "sect2", fmt.Sprintf("p%d.md", i+1)), pt) } diff --git a/hugolib/taxonomy_test.go b/hugolib/taxonomy_test.go index 26148dd1b..6577a22c1 100644 --- a/hugolib/taxonomy_test.go +++ b/hugolib/taxonomy_test.go @@ -314,7 +314,7 @@ func TestTaxonomiesNextGenLoops(t *testing.T) { `) - for i := 0; i < 10; i++ { + for i := range 10 { b.WithContent(fmt.Sprintf("page%d.md", i+1), ` --- Title: "Taxonomy!" diff --git a/hugolib/template_test.go b/hugolib/template_test.go index 055d9593c..01dfc7eba 100644 --- a/hugolib/template_test.go +++ b/hugolib/template_test.go @@ -250,7 +250,7 @@ Content. Base %d: {{ block "main" . }}FOO{{ end }} ` - for i := 0; i < numPages; i++ { + for i := range numPages { id := i + 1 b.WithContent(fmt.Sprintf("page%d.md", id), fmt.Sprintf(pageTemplate, id, id)) b.WithTemplates(fmt.Sprintf("_default/layout%d.html", id), fmt.Sprintf(singleTemplate, id)) @@ -258,7 +258,7 @@ Base %d: {{ block "main" . }}FOO{{ end }} } b.Build(BuildCfg{}) - for i := 0; i < numPages; i++ { + for i := range numPages { id := i + 1 b.AssertFileContent(fmt.Sprintf("public/page%d/index.html", id), fmt.Sprintf(`Base %d: %d`, id, id)) } diff --git a/identity/finder.go b/identity/finder.go index b1a08d061..fd1055aef 100644 --- a/identity/finder.go +++ b/identity/finder.go @@ -27,7 +27,7 @@ func NewFinder(cfg FinderConfig) *Finder { } var searchIDPool = sync.Pool{ - New: func() interface{} { + New: func() any { return &searchID{seen: make(map[Manager]bool)} }, } diff --git a/identity/identity_test.go b/identity/identity_test.go index d003caaf0..f9b04aa14 100644 --- a/identity/identity_test.go +++ b/identity/identity_test.go @@ -25,7 +25,7 @@ import ( func BenchmarkIdentityManager(b *testing.B) { createIds := func(num int) []identity.Identity { ids := make([]identity.Identity, num) - for i := 0; i < num; i++ { + for i := range num { name := fmt.Sprintf("id%d", i) ids[i] = &testIdentity{base: name, name: name} } @@ -108,10 +108,10 @@ func BenchmarkIsNotDependent(b *testing.B) { newNestedManager := func(depth, count int) identity.Manager { m1 := identity.NewManager("") - for i := 0; i < depth; i++ { + for range depth { m2 := identity.NewManager("") m1.AddIdentity(m2) - for j := 0; j < count; j++ { + for j := range count { id := fmt.Sprintf("id%d", j) m2.AddIdentity(&testIdentity{id, id, "", ""}) } diff --git a/internal/js/esbuild/resolve.go b/internal/js/esbuild/resolve.go index 8ceec97ef..a2516dbd2 100644 --- a/internal/js/esbuild/resolve.go +++ b/internal/js/esbuild/resolve.go @@ -27,6 +27,7 @@ import ( "github.com/gohugoio/hugo/resources" "github.com/gohugoio/hugo/resources/resource" "github.com/spf13/afero" + "slices" ) const ( @@ -167,15 +168,11 @@ func createBuildPlugins(rs *resources.Spec, assetsResolver *fsResolver, depsMana } } - for _, ext := range opts.Externals { - // ESBuild will do a more thorough check for packages resolved in node_modules, - // but we need to make sure that we don't try to resolve these in the /assets folder. - if ext == impPath { - return api.OnResolveResult{ - Path: impPath, - External: true, - }, nil - } + if slices.Contains(opts.Externals, impPath) { + return api.OnResolveResult{ + Path: impPath, + External: true, + }, nil } if opts.ImportOnResolveFunc != nil { diff --git a/internal/warpc/warpc.go b/internal/warpc/warpc.go index 1159944a4..f2dfc6244 100644 --- a/internal/warpc/warpc.go +++ b/internal/warpc/warpc.go @@ -384,7 +384,7 @@ func newDispatcher[Q, R any](opts Options) (*dispatcherPool[Q, R], error) { } inOuts := make([]*inOut, opts.PoolSize) - for i := 0; i < opts.PoolSize; i++ { + for i := range opts.PoolSize { var stdin, stdout hugio.ReadWriteCloser stdin = hugio.NewPipeReadWriteCloser() @@ -478,7 +478,7 @@ func newDispatcher[Q, R any](opts Options) (*dispatcherPool[Q, R], error) { close(dp.donec) }() - for i := 0; i < len(inOuts); i++ { + for i := range inOuts { d := &dispatcher[Q, R]{ pending: make(map[uint32]*call[Q, R]), inOut: inOuts[i], diff --git a/internal/warpc/warpc_test.go b/internal/warpc/warpc_test.go index 613a9706f..2ee4c3de5 100644 --- a/internal/warpc/warpc_test.go +++ b/internal/warpc/warpc_test.go @@ -101,7 +101,7 @@ func TestGreet(t *testing.T) { Infof: t.Logf, } - for i := 0; i < 2; i++ { + for range 2 { func() { d, err := Start[person, greeting](opts) if err != nil { @@ -123,7 +123,7 @@ func TestGreet(t *testing.T) { }, } - for j := 0; j < 20; j++ { + for j := range 20 { inputMessage.Header.ID = uint32(j + 1) g, err := d.Execute(ctx, inputMessage) if err != nil { @@ -163,7 +163,7 @@ func TestGreetParallel(t *testing.T) { ctx := context.Background() - for j := 0; j < 5; j++ { + for j := range 5 { base := i * 100 id := uint32(base + j) @@ -217,7 +217,7 @@ func TestKatexParallel(t *testing.T) { ctx := context.Background() - for j := 0; j < 1; j++ { + for j := range 1 { base := i * 100 id := uint32(base + j) diff --git a/langs/language_test.go b/langs/language_test.go index 543f4a133..33240f3f4 100644 --- a/langs/language_test.go +++ b/langs/language_test.go @@ -29,13 +29,13 @@ func TestCollator(t *testing.T) { coll := &Collator{c: collate.New(language.English, collate.Loose)} - for i := 0; i < 10; i++ { + for range 10 { wg.Add(1) go func() { coll.Lock() defer coll.Unlock() defer wg.Done() - for j := 0; j < 10; j++ { + for range 10 { k := coll.CompareStrings("abc", "def") c.Assert(k, qt.Equals, -1) } @@ -48,7 +48,7 @@ func BenchmarkCollator(b *testing.B) { s := []string{"foo", "bar", "éntre", "baz", "qux", "quux", "corge", "grault", "garply", "waldo", "fred", "plugh", "xyzzy", "thud"} doWork := func(coll *Collator) { - for i := 0; i < len(s); i++ { + for i := range s { for j := i + 1; j < len(s); j++ { _ = coll.CompareStrings(s[i], s[j]) } diff --git a/lazy/init_test.go b/lazy/init_test.go index 96a959494..94736fab8 100644 --- a/lazy/init_test.go +++ b/lazy/init_test.go @@ -79,7 +79,7 @@ func TestInit(t *testing.T) { // Add some concurrency and randomness to verify thread safety and // init order. - for i := 0; i < 100; i++ { + for i := range 100 { wg.Add(1) go func(i int) { defer wg.Done() diff --git a/markup/goldmark/codeblocks/render.go b/markup/goldmark/codeblocks/render.go index 4164f0e0a..c29632b90 100644 --- a/markup/goldmark/codeblocks/render.go +++ b/markup/goldmark/codeblocks/render.go @@ -77,7 +77,7 @@ func (r *htmlRenderer) renderCodeBlock(w util.BufWriter, src []byte, node ast.No var buff bytes.Buffer l := n.Lines().Len() - for i := 0; i < l; i++ { + for i := range l { line := n.Lines().At(i) buff.Write(line.Value(src)) } diff --git a/markup/goldmark/hugocontext/hugocontext.go b/markup/goldmark/hugocontext/hugocontext.go index e68acb8c3..7a556083c 100644 --- a/markup/goldmark/hugocontext/hugocontext.go +++ b/markup/goldmark/hugocontext/hugocontext.go @@ -182,7 +182,7 @@ func (r *hugoContextRenderer) renderHTMLBlock( if entering { if r.Unsafe { l := n.Lines().Len() - for i := 0; i < l; i++ { + for i := range l { line := n.Lines().At(i) linev := line.Value(source) var stripped bool @@ -226,7 +226,7 @@ func (r *hugoContextRenderer) renderRawHTML( n := node.(*ast.RawHTML) l := n.Segments.Len() if r.Unsafe { - for i := 0; i < l; i++ { + for i := range l { segment := n.Segments.At(i) _, _ = w.Write(segment.Value(source)) } diff --git a/markup/goldmark/passthrough/passthrough.go b/markup/goldmark/passthrough/passthrough.go index 4d72e7c80..c56842f3d 100644 --- a/markup/goldmark/passthrough/passthrough.go +++ b/markup/goldmark/passthrough/passthrough.go @@ -110,7 +110,7 @@ func (r *htmlRenderer) renderPassthroughBlock(w util.BufWriter, src []byte, node case (*passthrough.PassthroughBlock): l := nn.Lines().Len() var buff bytes.Buffer - for i := 0; i < l; i++ { + for i := range l { line := nn.Lines().At(i) buff.Write(line.Value(src)) } diff --git a/markup/rst/convert.go b/markup/rst/convert.go index 398f5eb0c..5bb0adb15 100644 --- a/markup/rst/convert.go +++ b/markup/rst/convert.go @@ -100,10 +100,7 @@ func (c *rstConverter) getRstContent(src []byte, ctx converter.DocumentContext) bodyEnd := bytes.Index(result, []byte("\n")) if bodyEnd < 0 || bodyEnd >= len(result) { - bodyEnd = len(result) - 1 - if bodyEnd < 0 { - bodyEnd = 0 - } + bodyEnd = max(len(result)-1, 0) } return result[bodyStart+7 : bodyEnd], err diff --git a/markup/tableofcontents/tableofcontents.go b/markup/tableofcontents/tableofcontents.go index 741179d96..6c40c9a59 100644 --- a/markup/tableofcontents/tableofcontents.go +++ b/markup/tableofcontents/tableofcontents.go @@ -250,7 +250,7 @@ func (b *tocBuilder) writeHeading(level, indent int, h *Heading) { } func (b *tocBuilder) indent(n int) { - for i := 0; i < n; i++ { + for range n { b.s.WriteString(" ") } } diff --git a/markup/tableofcontents/tableofcontents_test.go b/markup/tableofcontents/tableofcontents_test.go index 9ec7ec293..b07d9e3ad 100644 --- a/markup/tableofcontents/tableofcontents_test.go +++ b/markup/tableofcontents/tableofcontents_test.go @@ -196,7 +196,7 @@ func TestTocMisc(t *testing.T) { func BenchmarkToc(b *testing.B) { newTocs := func(n int) []*Fragments { var tocs []*Fragments - for i := 0; i < n; i++ { + for range n { tocs = append(tocs, newTestToc()) } return tocs diff --git a/media/config.go b/media/config.go index e50d8499d..394159d04 100644 --- a/media/config.go +++ b/media/config.go @@ -26,6 +26,7 @@ import ( "github.com/mitchellh/mapstructure" "github.com/spf13/cast" + "slices" ) // DefaultTypes is the default media types supported by Hugo. @@ -46,7 +47,7 @@ func init() { // Initialize the Builtin types with values from DefaultTypes. v := reflect.ValueOf(&Builtin).Elem() - for i := 0; i < v.NumField(); i++ { + for i := range v.NumField() { f := v.Field(i) fieldName := v.Type().Field(i).Name builtinType := f.Interface().(Type) @@ -149,12 +150,7 @@ func (t ContentTypes) IsIndexContentFile(filename string) bool { // IsHTMLSuffix returns whether the given suffix is a HTML media type. func (t ContentTypes) IsHTMLSuffix(suffix string) bool { - for _, s := range t.HTML.Suffixes() { - if s == suffix { - return true - } - } - return false + return slices.Contains(t.HTML.Suffixes(), suffix) } // Types is a slice of media types. diff --git a/modules/client.go b/modules/client.go index 011d43014..a8998bb8d 100644 --- a/modules/client.go +++ b/modules/client.go @@ -380,14 +380,12 @@ func (c *Client) Verify(clean bool) error { if err != nil { if clean { m := verifyErrorDirRe.FindAllStringSubmatch(err.Error(), -1) - if m != nil { - for i := 0; i < len(m); i++ { - c, err := hugofs.MakeReadableAndRemoveAllModulePkgDir(c.fs, m[i][1]) - if err != nil { - return err - } - fmt.Println("Cleaned", c) + for i := range m { + c, err := hugofs.MakeReadableAndRemoveAllModulePkgDir(c.fs, m[i][1]) + if err != nil { + return err } + fmt.Println("Cleaned", c) } // Try to verify it again. err = c.runVerify() diff --git a/navigation/menu.go b/navigation/menu.go index 3802014b1..a971f2e74 100644 --- a/navigation/menu.go +++ b/navigation/menu.go @@ -25,6 +25,7 @@ import ( "github.com/mitchellh/mapstructure" "github.com/spf13/cast" + "slices" ) var smc = newMenuCache() @@ -267,7 +268,7 @@ func (m Menu) Reverse() Menu { // Clone clones the menu entries. // This is for internal use only. func (m Menu) Clone() Menu { - return append(Menu(nil), m...) + return slices.Clone(m) } func DecodeConfig(in any) (*config.ConfigNamespace[map[string]MenuConfig, Menus], error) { diff --git a/navigation/menu_cache.go b/navigation/menu_cache.go index b2c46f7ac..065781780 100644 --- a/navigation/menu_cache.go +++ b/navigation/menu_cache.go @@ -14,6 +14,7 @@ package navigation import ( + "slices" "sync" ) @@ -84,7 +85,7 @@ func (c *menuCache) getP(key string, apply func(m *Menu), menuLists ...Menu) (Me } m := menuLists[0] - menuCopy := append(Menu(nil), m...) + menuCopy := slices.Clone(m) if apply != nil { apply(&menuCopy) diff --git a/navigation/menu_cache_test.go b/navigation/menu_cache_test.go index 9943db517..8fa17ffc3 100644 --- a/navigation/menu_cache_test.go +++ b/navigation/menu_cache_test.go @@ -23,7 +23,7 @@ import ( func createSortTestMenu(num int) Menu { menu := make(Menu, num) - for i := 0; i < num; i++ { + for i := range num { m := &MenuEntry{} menu[i] = m } @@ -49,11 +49,11 @@ func TestMenuCache(t *testing.T) { var testMenuSets []Menu - for i := 0; i < 50; i++ { + for i := range 50 { testMenuSets = append(testMenuSets, createSortTestMenu(i+1)) } - for j := 0; j < 100; j++ { + for range 100 { wg.Add(1) go func() { defer wg.Done() diff --git a/output/layouts/layout.go b/output/layouts/layout.go index 09606dba1..79f718dda 100644 --- a/output/layouts/layout.go +++ b/output/layouts/layout.go @@ -321,7 +321,7 @@ func uniqueStringsReuse(s []string) []string { for i, val := range s { var seen bool - for j := 0; j < i; j++ { + for j := range i { if s[j] == val { seen = true break diff --git a/parser/lowercase_camel_json.go b/parser/lowercase_camel_json.go index 468c1a8fe..9d89ff020 100644 --- a/parser/lowercase_camel_json.go +++ b/parser/lowercase_camel_json.go @@ -99,7 +99,7 @@ func (c ReplacingJSONMarshaller) MarshalJSON() ([]byte, error) { if c.OmitEmpty { // It's tricky to do this with a regexp, so convert it to a map, remove zero values and convert back. - var m map[string]interface{} + var m map[string]any err = json.Unmarshal(converted, &m) if err != nil { return nil, err @@ -111,9 +111,9 @@ func (c ReplacingJSONMarshaller) MarshalJSON() ([]byte, error) { delete(m, k) } else { switch vv := v.(type) { - case map[string]interface{}: + case map[string]any: removeZeroVAlues(vv) - case []interface{}: + case []any: for _, vvv := range vv { if m, ok := vvv.(map[string]any); ok { removeZeroVAlues(m) diff --git a/parser/pageparser/pagelexer_intro.go b/parser/pageparser/pagelexer_intro.go index 334d9a79c..a68a9e03a 100644 --- a/parser/pageparser/pagelexer_intro.go +++ b/parser/pageparser/pagelexer_intro.go @@ -123,7 +123,7 @@ LOOP: // Handle YAML or TOML front matter. func (l *pageLexer) lexFrontMatterSection(tp ItemType, delimr rune, name string, delim []byte) stateFunc { - for i := 0; i < 2; i++ { + for range 2 { if r := l.next(); r != delimr { return l.errorf("invalid %s delimiter", name) } diff --git a/parser/pageparser/pageparser.go b/parser/pageparser/pageparser.go index 1cf87bb70..5c6f4b2ff 100644 --- a/parser/pageparser/pageparser.go +++ b/parser/pageparser/pageparser.go @@ -192,7 +192,7 @@ func (t *Iterator) PeekWalk(walkFn func(item Item) bool) { // Consume is a convenience method to consume the next n tokens, // but back off Errors and EOF. func (t *Iterator) Consume(cnt int) { - for i := 0; i < cnt; i++ { + for range cnt { token := t.Next() if token.Type == tError || token.Type == tEOF { t.Backup() diff --git a/related/inverted_index.go b/related/inverted_index.go index 9197a6135..b8f1ad3e2 100644 --- a/related/inverted_index.go +++ b/related/inverted_index.go @@ -292,7 +292,7 @@ func (r *rank) addWeight(w int) { } var rankPool = sync.Pool{ - New: func() interface{} { + New: func() any { return &rank{} }, } @@ -433,7 +433,7 @@ func (cfg IndexConfig) ToKeywords(v any) ([]Keyword, error) { keywords = append(keywords, cfg.stringToKeyword(vv)) case []string: vvv := make([]Keyword, len(vv)) - for i := 0; i < len(vvv); i++ { + for i := range vvv { vvv[i] = cfg.stringToKeyword(vv[i]) } keywords = append(keywords, vvv...) @@ -623,7 +623,7 @@ type Keyword interface { func (cfg IndexConfig) StringsToKeywords(s ...string) []Keyword { kw := make([]Keyword, len(s)) - for i := 0; i < len(s); i++ { + for i := range s { kw[i] = cfg.stringToKeyword(s[i]) } diff --git a/related/inverted_index_test.go b/related/inverted_index_test.go index 568486d1f..d57237e11 100644 --- a/related/inverted_index_test.go +++ b/related/inverted_index_test.go @@ -65,7 +65,7 @@ func (d *testDoc) addKeywords(name string, keywords ...string) *testDoc { for k, v := range keywordm { keywords := make([]Keyword, len(v)) - for i := 0; i < len(v); i++ { + for i := range v { keywords[i] = StringKeyword(v[i]) } d.keywords[k] = keywords @@ -221,7 +221,7 @@ func TestSearch(t *testing.T) { doc := newTestDocWithDate("keywords", date, "a", "b") doc.name = "thedoc" - for i := 0; i < 10; i++ { + for i := range 10 { docc := *doc docc.name = fmt.Sprintf("doc%d", i) idx.Add(context.Background(), &docc) @@ -230,7 +230,7 @@ func TestSearch(t *testing.T) { m, err := idx.Search(context.Background(), SearchOpts{Document: doc, Indices: []string{"keywords"}}) c.Assert(err, qt.IsNil) c.Assert(len(m), qt.Equals, 10) - for i := 0; i < 10; i++ { + for i := range 10 { c.Assert(m[i].Name(), qt.Equals, fmt.Sprintf("doc%d", i)) } }) @@ -311,11 +311,11 @@ func BenchmarkRelatedNewIndex(b *testing.B) { pages := make([]*testDoc, 100) numkeywords := 30 allKeywords := make([]string, numkeywords) - for i := 0; i < numkeywords; i++ { + for i := range numkeywords { allKeywords[i] = fmt.Sprintf("keyword%d", i+1) } - for i := 0; i < len(pages); i++ { + for i := range pages { start := rand.Intn(len(allKeywords)) end := start + 3 if end >= len(allKeywords) { @@ -356,7 +356,7 @@ func BenchmarkRelatedNewIndex(b *testing.B) { for i := 0; i < b.N; i++ { idx := NewInvertedIndex(cfg) docs := make([]Document, len(pages)) - for i := 0; i < len(pages); i++ { + for i := range pages { docs[i] = pages[i] } idx.Add(context.Background(), docs...) @@ -372,7 +372,7 @@ func BenchmarkRelatedMatchesIn(b *testing.B) { docs := make([]*testDoc, 1000) numkeywords := 20 allKeywords := make([]string, numkeywords) - for i := 0; i < numkeywords; i++ { + for i := range numkeywords { allKeywords[i] = fmt.Sprintf("keyword%d", i+1) } @@ -386,7 +386,7 @@ func BenchmarkRelatedMatchesIn(b *testing.B) { idx := NewInvertedIndex(cfg) - for i := 0; i < len(docs); i++ { + for i := range docs { start := rand.Intn(len(allKeywords)) end := start + 3 if end >= len(allKeywords) { diff --git a/related/related_integration_test.go b/related/related_integration_test.go index 291bfdbf7..6d3c6d6de 100644 --- a/related/related_integration_test.go +++ b/related/related_integration_test.go @@ -160,7 +160,7 @@ keywords: ['k%d'] --- ` - for i := 0; i < 32; i++ { + for range 32 { base += fmt.Sprintf("\n## Title %d", rand.Intn(100)) } diff --git a/releaser/releaser.go b/releaser/releaser.go index f2244842a..4c3db2c14 100644 --- a/releaser/releaser.go +++ b/releaser/releaser.go @@ -230,10 +230,10 @@ func git(args ...string) (string, error) { return string(out), nil } -func logf(format string, args ...interface{}) { +func logf(format string, args ...any) { fmt.Fprintf(os.Stderr, format, args...) } -func logln(args ...interface{}) { +func logln(args ...any) { fmt.Fprintln(os.Stderr, args...) } diff --git a/resources/image_test.go b/resources/image_test.go index 1ba5a149a..ee5de8bec 100644 --- a/resources/image_test.go +++ b/resources/image_test.go @@ -348,13 +348,13 @@ func TestImageTransformConcurrent(t *testing.T) { image := fetchImageForSpec(spec, c, "sunset.jpg") - for i := 0; i < 4; i++ { + for i := range 4 { wg.Add(1) go func(id int) { defer wg.Done() - for j := 0; j < 5; j++ { + for j := range 5 { img := image - for k := 0; k < 2; k++ { + for k := range 2 { r1, err := img.Resize(fmt.Sprintf("%dx", id-k)) if err != nil { t.Error(err) @@ -499,7 +499,7 @@ func BenchmarkImageExif(b *testing.B) { b.StartTimer() for i := 0; i < b.N; i++ { - for j := 0; j < 10; j++ { + for range 10 { getAndCheckExif(c, images[i]) } } diff --git a/resources/images/color.go b/resources/images/color.go index e2ff2377f..c7f3b9eb6 100644 --- a/resources/images/color.go +++ b/resources/images/color.go @@ -22,6 +22,7 @@ import ( "strings" "github.com/gohugoio/hugo/common/hstrings" + "slices" ) type colorGoProvider interface { @@ -91,11 +92,8 @@ func (c Color) toSRGB(i uint8) float64 { // that the palette is valid for the relevant format. func AddColorToPalette(c color.Color, p color.Palette) color.Palette { var found bool - for _, cc := range p { - if c == cc { - found = true - break - } + if slices.Contains(p, c) { + found = true } if !found { diff --git a/resources/images/imagetesting/testing.go b/resources/images/imagetesting/testing.go index 22a2317a1..25f2ab087 100644 --- a/resources/images/imagetesting/testing.go +++ b/resources/images/imagetesting/testing.go @@ -209,7 +209,7 @@ func goldenEqual(img1, img2 *image.NRGBA) bool { if len(img1.Pix) != len(img2.Pix) { return false } - for i := 0; i < len(img1.Pix); i++ { + for i := range img1.Pix { diff := int(img1.Pix[i]) - int(img2.Pix[i]) if diff < 0 { diff = -diff diff --git a/resources/page/page_matcher.go b/resources/page/page_matcher.go index 8155be99d..1e98b0836 100644 --- a/resources/page/page_matcher.go +++ b/resources/page/page_matcher.go @@ -24,6 +24,7 @@ import ( "github.com/gohugoio/hugo/hugofs/glob" "github.com/gohugoio/hugo/resources/kinds" "github.com/mitchellh/mapstructure" + "slices" ) // A PageMatcher can be used to match a Page with Glob patterns. @@ -208,13 +209,7 @@ func decodePageMatcher(m any, v *PageMatcher) error { v.Kind = strings.ToLower(v.Kind) if v.Kind != "" { g, _ := glob.GetGlob(v.Kind) - found := false - for _, k := range kinds.AllKindsInPages { - if g.Match(k) { - found = true - break - } - } + found := slices.ContainsFunc(kinds.AllKindsInPages, g.Match) if !found { return fmt.Errorf("%q did not match a valid Page Kind", v.Kind) } diff --git a/resources/page/pagemeta/page_frontmatter.go b/resources/page/pagemeta/page_frontmatter.go index c26662fc2..7f98b1b88 100644 --- a/resources/page/pagemeta/page_frontmatter.go +++ b/resources/page/pagemeta/page_frontmatter.go @@ -539,7 +539,7 @@ func expandDefaultValues(values []string, defaults []string) []string { func toLowerSlice(in any) []string { out := cast.ToStringSlice(in) - for i := 0; i < len(out); i++ { + for i := range out { out[i] = strings.ToLower(out[i]) } diff --git a/resources/page/pagemeta/page_frontmatter_test.go b/resources/page/pagemeta/page_frontmatter_test.go index 18f9e5aa1..fe9d3d99c 100644 --- a/resources/page/pagemeta/page_frontmatter_test.go +++ b/resources/page/pagemeta/page_frontmatter_test.go @@ -31,7 +31,7 @@ import ( func newTestFd() *pagemeta.FrontMatterDescriptor { return &pagemeta.FrontMatterDescriptor{ PageConfig: &pagemeta.PageConfig{ - Params: make(map[string]interface{}), + Params: make(map[string]any), }, Location: time.UTC, } diff --git a/resources/page/pages_cache.go b/resources/page/pages_cache.go index 9435cb308..5300d5521 100644 --- a/resources/page/pages_cache.go +++ b/resources/page/pages_cache.go @@ -14,6 +14,7 @@ package page import ( + "slices" "sync" ) @@ -92,7 +93,7 @@ func (c *pageCache) getP(key string, apply func(p *Pages), pageLists ...Pages) ( } p := pageLists[0] - pagesCopy := append(Pages(nil), p...) + pagesCopy := slices.Clone(p) if apply != nil { apply(&pagesCopy) @@ -126,7 +127,7 @@ func pagesEqual(p1, p2 Pages) bool { return true } - for i := 0; i < len(p1); i++ { + for i := range p1 { if p1[i] != p2[i] { return false } diff --git a/resources/page/pages_cache_test.go b/resources/page/pages_cache_test.go index 825bdc31f..9e6af1c28 100644 --- a/resources/page/pages_cache_test.go +++ b/resources/page/pages_cache_test.go @@ -41,11 +41,11 @@ func TestPageCache(t *testing.T) { var testPageSets []Pages - for i := 0; i < 50; i++ { + for i := range 50 { testPageSets = append(testPageSets, createSortTestPages(i+1)) } - for j := 0; j < 100; j++ { + for range 100 { wg.Add(1) go func() { defer wg.Done() @@ -75,7 +75,7 @@ func TestPageCache(t *testing.T) { func BenchmarkPageCache(b *testing.B) { cache := newPageCache() pages := make(Pages, 30) - for i := 0; i < 30; i++ { + for i := range 30 { pages[i] = &testPage{title: "p" + strconv.Itoa(i)} } key := "key" diff --git a/resources/page/pages_sort_test.go b/resources/page/pages_sort_test.go index 12fa4a1e1..70c7bc8a8 100644 --- a/resources/page/pages_sort_test.go +++ b/resources/page/pages_sort_test.go @@ -139,7 +139,7 @@ func TestLimit(t *testing.T) { p := createSortTestPages(10) firstFive := p.Limit(5) c.Assert(len(firstFive), qt.Equals, 5) - for i := 0; i < 5; i++ { + for i := range 5 { c.Assert(firstFive[i], qt.Equals, p[i]) } c.Assert(p.Limit(10), eq, p) @@ -197,7 +197,7 @@ func TestPageSortByParamNumeric(t *testing.T) { n := 10 unsorted := createSortTestPages(n) - for i := 0; i < n; i++ { + for i := range n { v := 100 - i if i%2 == 0 { v = 100.0 - i @@ -269,7 +269,7 @@ func setSortVals(dates [4]time.Time, titles [4]string, weights [4]int, pages Pag func createSortTestPages(num int) Pages { pages := make(Pages, num) - for i := 0; i < num; i++ { + for i := range num { p := newTestPage() p.path = fmt.Sprintf("/x/y/p%d.md", i) p.title = fmt.Sprintf("Title %d", i%((num+1)/2)) diff --git a/resources/page/pagination_test.go b/resources/page/pagination_test.go index d3a770eaf..64ee9a998 100644 --- a/resources/page/pagination_test.go +++ b/resources/page/pagination_test.go @@ -28,7 +28,7 @@ func TestSplitPages(t *testing.T) { chunks := splitPages(pages, 5) c.Assert(len(chunks), qt.Equals, 5) - for i := 0; i < 4; i++ { + for i := range 4 { c.Assert(chunks[i].Len(), qt.Equals, 5) } diff --git a/resources/page/site.go b/resources/page/site.go index 47e1454c8..3c9e9e78c 100644 --- a/resources/page/site.go +++ b/resources/page/site.go @@ -105,7 +105,7 @@ type Site interface { Config() SiteConfig // Deprecated: Use taxonomies instead. - Author() map[string]interface{} + Author() map[string]any // Deprecated: Use taxonomies instead. Authors() AuthorList @@ -173,7 +173,7 @@ func (s *siteWrapper) Social() map[string]string { } // Deprecated: Use taxonomies instead. -func (s *siteWrapper) Author() map[string]interface{} { +func (s *siteWrapper) Author() map[string]any { return s.s.Author() } @@ -316,7 +316,7 @@ type testSite struct { } // Deprecated: Use taxonomies instead. -func (s testSite) Author() map[string]interface{} { +func (s testSite) Author() map[string]any { return nil } diff --git a/resources/page/testhelpers_test.go b/resources/page/testhelpers_test.go index 59b8cf0e8..19bdc0068 100644 --- a/resources/page/testhelpers_test.go +++ b/resources/page/testhelpers_test.go @@ -587,7 +587,7 @@ func (p *testPage) WordCount(context.Context) int { func createTestPages(num int) Pages { pages := make(Pages, num) - for i := 0; i < num; i++ { + for i := range num { m := &testPage{ path: fmt.Sprintf("/x/y/z/p%d.md", i), weight: 5, diff --git a/resources/postpub/fields.go b/resources/postpub/fields.go index 13b2963ce..12b3be2eb 100644 --- a/resources/postpub/fields.go +++ b/resources/postpub/fields.go @@ -31,7 +31,7 @@ func structToMap(s any) map[string]any { m := make(map[string]any) t := reflect.TypeOf(s) - for i := 0; i < t.NumMethod(); i++ { + for i := range t.NumMethod() { method := t.Method(i) if method.PkgPath != "" { continue @@ -41,7 +41,7 @@ func structToMap(s any) map[string]any { } } - for i := 0; i < t.NumField(); i++ { + for i := range t.NumField() { field := t.Field(i) if field.PkgPath != "" { continue diff --git a/resources/resource/resources.go b/resources/resource/resources.go index 480c703b5..6b7311bad 100644 --- a/resources/resource/resources.go +++ b/resources/resource/resources.go @@ -24,6 +24,7 @@ import ( "github.com/gohugoio/hugo/common/paths" "github.com/gohugoio/hugo/hugofs/glob" "github.com/spf13/cast" + "slices" ) var _ ResourceFinder = (*Resources)(nil) @@ -222,7 +223,7 @@ type translatedResource interface { // MergeByLanguage adds missing translations in r1 from r2. func (r Resources) MergeByLanguage(r2 Resources) Resources { - result := append(Resources(nil), r...) + result := slices.Clone(r) m := make(map[string]bool) for _, rr := range r { if translated, ok := rr.(translatedResource); ok { diff --git a/resources/resource_factories/bundler/bundler.go b/resources/resource_factories/bundler/bundler.go index 8b268ebbe..aef644b7f 100644 --- a/resources/resource_factories/bundler/bundler.go +++ b/resources/resource_factories/bundler/bundler.go @@ -141,7 +141,7 @@ func (c *Client) Concat(targetPath string, r resource.Resources) (resource.Resou if resolvedm.MainType == media.Builtin.JavascriptType.MainType && resolvedm.SubType == media.Builtin.JavascriptType.SubType { readers := make([]hugio.ReadSeekCloser, 2*len(rcsources)-1) j := 0 - for i := 0; i < len(rcsources); i++ { + for i := range rcsources { if i > 0 { readers[j] = hugio.NewReadSeekerNoOpCloserFromString("\n;\n") j++ diff --git a/resources/resource_factories/bundler/bundler_test.go b/resources/resource_factories/bundler/bundler_test.go index 17a74cc88..66f0c2340 100644 --- a/resources/resource_factories/bundler/bundler_test.go +++ b/resources/resource_factories/bundler/bundler_test.go @@ -31,7 +31,7 @@ func TestMultiReadSeekCloser(t *testing.T) { hugio.NewReadSeekerNoOpCloserFromString("C"), ) - for i := 0; i < 3; i++ { + for range 3 { s1 := helpers.ReaderToString(rc) c.Assert(s1, qt.Equals, "ABC") _, err := rc.Seek(0, 0) diff --git a/resources/resource_factories/create/create_integration_test.go b/resources/resource_factories/create/create_integration_test.go index faa2de565..0ed43721c 100644 --- a/resources/resource_factories/create/create_integration_test.go +++ b/resources/resource_factories/create/create_integration_test.go @@ -136,7 +136,7 @@ mediaTypes = ['text/plain'] {{ end }} ` - for i := 0; i < numPages; i++ { + for i := range numPages { filesTemplate += fmt.Sprintf("-- content/post/p%d.md --\n", i) } @@ -153,7 +153,7 @@ mediaTypes = ['text/plain'] b.Build() - for i := 0; i < numPages; i++ { + for i := range numPages { b.AssertFileContent(fmt.Sprintf("public/post/p%d/index.html", i), fmt.Sprintf("Content: Response for /post/p%d/.", i)) } }) diff --git a/resources/resource_metadata.go b/resources/resource_metadata.go index 8861ded5c..2a4faa315 100644 --- a/resources/resource_metadata.go +++ b/resources/resource_metadata.go @@ -28,6 +28,7 @@ import ( "github.com/gohugoio/hugo/common/maps" "github.com/gohugoio/hugo/common/paths" + maps0 "maps" ) var ( @@ -85,11 +86,9 @@ func (r *metaResource) setName(name string) { func (r *metaResource) updateParams(params map[string]any) { if r.params == nil { - r.params = make(map[string]interface{}) - } - for k, v := range params { - r.params[k] = v + r.params = make(map[string]any) } + maps0.Copy(r.params, params) r.changed = true } diff --git a/resources/resource_transformers/cssjs/postcss_integration_test.go b/resources/resource_transformers/cssjs/postcss_integration_test.go index 8f2132789..a05f340fd 100644 --- a/resources/resource_transformers/cssjs/postcss_integration_test.go +++ b/resources/resource_transformers/cssjs/postcss_integration_test.go @@ -239,7 +239,7 @@ func TestTransformPostCSSResourceCacheWithPathInBaseURL(t *testing.T) { c.Assert(err, qt.IsNil) c.Cleanup(clean) - for i := 0; i < 2; i++ { + for i := range 2 { files := postCSSIntegrationTestFiles if i == 1 { diff --git a/resources/resources_integration_test.go b/resources/resources_integration_test.go index 0c45b775a..0d02b45d5 100644 --- a/resources/resources_integration_test.go +++ b/resources/resources_integration_test.go @@ -122,7 +122,7 @@ iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAYAAAAfFcSJAAAADUlEQVR42mNkYPhfDwAChwGA60e6kgAA ` - for i := 0; i < 3; i++ { + for range 3 { b := hugolib.NewIntegrationTestBuilder( hugolib.IntegrationTestConfig{ diff --git a/resources/transform_test.go b/resources/transform_test.go index 79d4841b1..eac85ada9 100644 --- a/resources/transform_test.go +++ b/resources/transform_test.go @@ -207,7 +207,7 @@ func TestTransform(t *testing.T) { fs := afero.NewMemMapFs() - for i := 0; i < 2; i++ { + for i := range 2 { spec := newTestResourceSpec(specDescriptor{c: c, fs: fs}) r := createTransformer(c, spec, "f1.txt", "color is blue") @@ -337,12 +337,12 @@ func TestTransform(t *testing.T) { const count = 26 // A-Z transformations := make([]resources.ResourceTransformation, count) - for i := 0; i < count; i++ { + for i := range count { transformations[i] = createContentReplacer(fmt.Sprintf("t%d", i), fmt.Sprint(i), string(rune(i+65))) } var countstr strings.Builder - for i := 0; i < count; i++ { + for i := range count { countstr.WriteString(fmt.Sprint(i)) } @@ -405,18 +405,18 @@ func TestTransform(t *testing.T) { transformers := make([]resources.Transformer, 10) transformations := make([]resources.ResourceTransformation, 10) - for i := 0; i < 10; i++ { + for i := range 10 { transformers[i] = createTransformer(c, spec, fmt.Sprintf("f%d.txt", i), fmt.Sprintf("color is %d", i)) transformations[i] = createContentReplacer("test", strconv.Itoa(i), "blue") } var wg sync.WaitGroup - for i := 0; i < 13; i++ { + for i := range 13 { wg.Add(1) go func(i int) { defer wg.Done() - for j := 0; j < 23; j++ { + for j := range 23 { id := (i + j) % 10 tr, err := transformers[id].Transform(transformations[id]) c.Assert(err, qt.IsNil) diff --git a/tpl/collections/apply.go b/tpl/collections/apply.go index 3d50395b9..39b66a27f 100644 --- a/tpl/collections/apply.go +++ b/tpl/collections/apply.go @@ -48,7 +48,7 @@ func (ns *Namespace) Apply(ctx context.Context, c any, fname string, args ...any switch seqv.Kind() { case reflect.Array, reflect.Slice: r := make([]any, seqv.Len()) - for i := 0; i < seqv.Len(); i++ { + for i := range seqv.Len() { vv := seqv.Index(i) vvv, err := applyFnToThis(ctx, fnv, vv, args...) @@ -91,7 +91,7 @@ func applyFnToThis(ctx context.Context, fn, this reflect.Value, args ...any) (re return reflect.ValueOf(nil), errors.New("Too many arguments") }*/ - for i := 0; i < num; i++ { + for i := range num { // AssignableTo reports whether xt is assignable to type targ. if xt, targ := n[i].Type(), fn.Type().In(i); !xt.AssignableTo(targ) { return reflect.ValueOf(nil), errors.New("called apply using " + xt.String() + " as type " + targ.String()) diff --git a/tpl/collections/collections.go b/tpl/collections/collections.go index c1e7286ce..0653a453a 100644 --- a/tpl/collections/collections.go +++ b/tpl/collections/collections.go @@ -125,7 +125,7 @@ func (ns *Namespace) Delimit(ctx context.Context, l, sep any, last ...any) (stri lv = reflect.ValueOf(sortSeq) fallthrough case reflect.Array, reflect.Slice, reflect.String: - for i := 0; i < lv.Len(); i++ { + for i := range lv.Len() { val := lv.Index(i).Interface() valStr, err := cast.ToStringE(val) if err != nil { @@ -165,7 +165,7 @@ func (ns *Namespace) Dictionary(values ...any) (map[string]any, error) { case string: key = v case []string: - for i := 0; i < len(v)-1; i++ { + for i := range len(v) - 1 { key = v[i] var m map[string]any v, found := dict[key] @@ -235,7 +235,7 @@ func (ns *Namespace) In(l any, v any) (bool, error) { switch lv.Kind() { case reflect.Array, reflect.Slice: - for i := 0; i < lv.Len(); i++ { + for i := range lv.Len() { lvv, isNil := indirectInterface(lv.Index(i)) if isNil { continue @@ -277,13 +277,13 @@ func (ns *Namespace) Intersect(l1, l2 any) (any, error) { ins = &intersector{r: reflect.MakeSlice(l1v.Type(), 0, 0), seen: make(map[any]bool)} switch l2v.Kind() { case reflect.Array, reflect.Slice: - for i := 0; i < l1v.Len(); i++ { + for i := range l1v.Len() { l1vv := l1v.Index(i) if !l1vv.Type().Comparable() { return make([]any, 0), errors.New("intersect does not support slices or arrays of uncomparable types") } - for j := 0; j < l2v.Len(); j++ { + for j := range l2v.Len() { l2vv := l2v.Index(j) if !l2vv.Type().Comparable() { return make([]any, 0), errors.New("intersect does not support slices or arrays of uncomparable types") @@ -590,7 +590,7 @@ func (ns *Namespace) Union(l1, l2 any) (any, error) { isNil bool ) - for i := 0; i < l1v.Len(); i++ { + for i := range l1v.Len() { l1vv, isNil = indirectInterface(l1v.Index(i)) if !l1vv.Type().Comparable() { @@ -610,7 +610,7 @@ func (ns *Namespace) Union(l1, l2 any) (any, error) { } } - for j := 0; j < l2v.Len(); j++ { + for j := range l2v.Len() { l2vv := l2v.Index(j) switch kind := l1vv.Kind(); { @@ -661,7 +661,7 @@ func (ns *Namespace) Uniq(l any) (any, error) { seen := make(map[any]bool) - for i := 0; i < v.Len(); i++ { + for i := range v.Len() { ev, _ := indirectInterface(v.Index(i)) key := normalize(ev) diff --git a/tpl/collections/collections_integration_test.go b/tpl/collections/collections_integration_test.go index 2aabee03e..cc60770f9 100644 --- a/tpl/collections/collections_integration_test.go +++ b/tpl/collections/collections_integration_test.go @@ -52,7 +52,7 @@ Desc: {{ sort (sort $values "b" "desc") "a" "desc" }} ` - for i := 0; i < 4; i++ { + for range 4 { b := hugolib.NewIntegrationTestBuilder( hugolib.IntegrationTestConfig{ @@ -122,7 +122,7 @@ func TestAppendNilsToSliceWithNils(t *testing.T) { ` - for i := 0; i < 4; i++ { + for range 4 { b := hugolib.NewIntegrationTestBuilder( hugolib.IntegrationTestConfig{ diff --git a/tpl/collections/collections_test.go b/tpl/collections/collections_test.go index 2cd6bfc3f..fe7f2144d 100644 --- a/tpl/collections/collections_test.go +++ b/tpl/collections/collections_test.go @@ -856,7 +856,7 @@ func ToTstXIs(slice any) []TstXI { } tis := make([]TstXI, s.Len()) - for i := 0; i < s.Len(); i++ { + for i := range s.Len() { tsti, ok := s.Index(i).Interface().(TstXI) if !ok { return nil diff --git a/tpl/collections/complement.go b/tpl/collections/complement.go index 0cc2b5857..606d77dde 100644 --- a/tpl/collections/complement.go +++ b/tpl/collections/complement.go @@ -44,7 +44,7 @@ func (ns *Namespace) Complement(ls ...any) (any, error) { switch v.Kind() { case reflect.Array, reflect.Slice: sl := reflect.MakeSlice(v.Type(), 0, 0) - for i := 0; i < v.Len(); i++ { + for i := range v.Len() { ev, _ := indirectInterface(v.Index(i)) if _, found := aset[normalize(ev)]; !found { sl = reflect.Append(sl, ev) diff --git a/tpl/collections/index.go b/tpl/collections/index.go index df932f7c6..a319ea298 100644 --- a/tpl/collections/index.go +++ b/tpl/collections/index.go @@ -52,7 +52,7 @@ func (ns *Namespace) doIndex(item any, args ...any) (any, error) { if len(args) == 1 { v := reflect.ValueOf(args[0]) if v.Kind() == reflect.Slice { - for i := 0; i < v.Len(); i++ { + for i := range v.Len() { indices = append(indices, v.Index(i).Interface()) } } else { diff --git a/tpl/collections/reflect_helpers.go b/tpl/collections/reflect_helpers.go index 6b986cbc4..05816a009 100644 --- a/tpl/collections/reflect_helpers.go +++ b/tpl/collections/reflect_helpers.go @@ -74,7 +74,7 @@ func collectIdentities(seqs ...any) (map[any]bool, error) { v := reflect.ValueOf(seq) switch v.Kind() { case reflect.Array, reflect.Slice: - for i := 0; i < v.Len(); i++ { + for i := range v.Len() { ev, _ := indirectInterface(v.Index(i)) if !ev.Type().Comparable() { diff --git a/tpl/collections/sort.go b/tpl/collections/sort.go index 20862a451..0c09f6af4 100644 --- a/tpl/collections/sort.go +++ b/tpl/collections/sort.go @@ -73,7 +73,7 @@ func (ns *Namespace) Sort(ctx context.Context, l any, args ...any) (any, error) switch seqv.Kind() { case reflect.Array, reflect.Slice: - for i := 0; i < seqv.Len(); i++ { + for i := range seqv.Len() { p.Pairs[i].Value = seqv.Index(i) if sortByField == "" || sortByField == "value" { p.Pairs[i].Key = p.Pairs[i].Value diff --git a/tpl/collections/symdiff.go b/tpl/collections/symdiff.go index 8ecee3c4a..4b9dc6e42 100644 --- a/tpl/collections/symdiff.go +++ b/tpl/collections/symdiff.go @@ -44,7 +44,7 @@ func (ns *Namespace) SymDiff(s2, s1 any) (any, error) { slice = reflect.MakeSlice(sliceType, 0, 0) } - for i := 0; i < v.Len(); i++ { + for i := range v.Len() { ev, _ := indirectInterface(v.Index(i)) key := normalize(ev) diff --git a/tpl/collections/where.go b/tpl/collections/where.go index a14a4863d..b15cfe781 100644 --- a/tpl/collections/where.go +++ b/tpl/collections/where.go @@ -148,7 +148,7 @@ func (ns *Namespace) checkCondition(v, mv reflect.Value, op string) (bool, error case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: iv := v.Int() ivp = &iv - for i := 0; i < mv.Len(); i++ { + for i := range mv.Len() { if anInt, err := toInt(mv.Index(i)); err == nil { ima = append(ima, anInt) } @@ -156,7 +156,7 @@ func (ns *Namespace) checkCondition(v, mv reflect.Value, op string) (bool, error case reflect.String: sv := v.String() svp = &sv - for i := 0; i < mv.Len(); i++ { + for i := range mv.Len() { if aString, err := toString(mv.Index(i)); err == nil { sma = append(sma, aString) } @@ -164,7 +164,7 @@ func (ns *Namespace) checkCondition(v, mv reflect.Value, op string) (bool, error case reflect.Float64: fv := v.Float() fvp = &fv - for i := 0; i < mv.Len(); i++ { + for i := range mv.Len() { if aFloat, err := toFloat(mv.Index(i)); err == nil { fma = append(fma, aFloat) } @@ -173,7 +173,7 @@ func (ns *Namespace) checkCondition(v, mv reflect.Value, op string) (bool, error if hreflect.IsTime(v.Type()) { iv := ns.toTimeUnix(v) ivp = &iv - for i := 0; i < mv.Len(); i++ { + for i := range mv.Len() { ima = append(ima, ns.toTimeUnix(mv.Index(i))) } } @@ -397,7 +397,7 @@ func parseWhereArgs(args ...any) (mv reflect.Value, op string, err error) { func (ns *Namespace) checkWhereArray(ctxv, seqv, kv, mv reflect.Value, path []string, op string) (any, error) { rv := reflect.MakeSlice(seqv.Type(), 0, 0) - for i := 0; i < seqv.Len(); i++ { + for i := range seqv.Len() { var vvv reflect.Value rvv := seqv.Index(i) diff --git a/tpl/collections/where_test.go b/tpl/collections/where_test.go index c66a5d608..60f97e607 100644 --- a/tpl/collections/where_test.go +++ b/tpl/collections/where_test.go @@ -865,10 +865,10 @@ func BenchmarkWhereOps(b *testing.B) { ns := newNs() var seq []map[string]string ctx := context.Background() - for i := 0; i < 500; i++ { + for range 500 { seq = append(seq, map[string]string{"foo": "bar"}) } - for i := 0; i < 500; i++ { + for range 500 { seq = append(seq, map[string]string{"foo": "baz"}) } // Shuffle the sequence. @@ -907,7 +907,7 @@ func BenchmarkWhereMap(b *testing.B) { ns := newNs() seq := map[string]string{} - for i := 0; i < 1000; i++ { + for i := range 1000 { seq[fmt.Sprintf("key%d", i)] = "value" } diff --git a/tpl/data/data.go b/tpl/data/data.go index 097cfe4a8..ca1796826 100644 --- a/tpl/data/data.go +++ b/tpl/data/data.go @@ -36,6 +36,7 @@ import ( "github.com/spf13/cast" "github.com/gohugoio/hugo/deps" + "slices" ) // New returns a new instance of the data-namespaced template functions. @@ -170,12 +171,7 @@ func hasHeaderValue(m http.Header, key, value string) bool { return false } - for _, v := range s { - if v == value { - return true - } - } - return false + return slices.Contains(s, value) } func hasHeaderKey(m http.Header, key string) bool { diff --git a/tpl/data/resources_test.go b/tpl/data/resources_test.go index b8003bf43..d49e74d4c 100644 --- a/tpl/data/resources_test.go +++ b/tpl/data/resources_test.go @@ -155,11 +155,11 @@ func TestScpGetRemoteParallel(t *testing.T) { var wg sync.WaitGroup - for i := 0; i < 1; i++ { + for i := range 1 { wg.Add(1) go func(gor int) { defer wg.Done() - for j := 0; j < 10; j++ { + for range 10 { var cb []byte f := func(b []byte) (bool, error) { cb = b diff --git a/tpl/internal/templatefuncsRegistry.go b/tpl/internal/templatefuncsRegistry.go index 1b74bf443..425938b07 100644 --- a/tpl/internal/templatefuncsRegistry.go +++ b/tpl/internal/templatefuncsRegistry.go @@ -213,7 +213,7 @@ func (t *TemplateFuncsNamespace) toJSON(ctx context.Context) ([]byte, error) { return nil, nil } ctxType := reflect.TypeOf(tctx) - for i := 0; i < ctxType.NumMethod(); i++ { + for i := range ctxType.NumMethod() { method := ctxType.Method(i) if ignoreFuncs[method.Name] { continue diff --git a/tpl/math/math.go b/tpl/math/math.go index 15c0db22c..5a5e42f4e 100644 --- a/tpl/math/math.go +++ b/tpl/math/math.go @@ -314,7 +314,7 @@ func (ns *Namespace) toFloatsE(v any) ([]float64, bool, error) { switch vv.Kind() { case reflect.Slice, reflect.Array: var floats []float64 - for i := 0; i < vv.Len(); i++ { + for i := range vv.Len() { f, err := cast.ToFloat64E(vv.Index(i).Interface()) if err != nil { return nil, true, err diff --git a/tpl/page/init.go b/tpl/page/init.go index 826aa45d3..106552630 100644 --- a/tpl/page/init.go +++ b/tpl/page/init.go @@ -31,7 +31,7 @@ func init() { f := func(d *deps.Deps) *internal.TemplateFuncsNamespace { ns := &internal.TemplateFuncsNamespace{ Name: name, - Context: func(ctx context.Context, args ...interface{}) (interface{}, error) { + Context: func(ctx context.Context, args ...any) (any, error) { v := tpl.Context.Page.Get(ctx) if v == nil { // The multilingual sitemap does not have a page as its context. diff --git a/tpl/templates/defer_integration_test.go b/tpl/templates/defer_integration_test.go index 77be91cee..27b8fbf8c 100644 --- a/tpl/templates/defer_integration_test.go +++ b/tpl/templates/defer_integration_test.go @@ -87,7 +87,7 @@ func TestDeferRepeatedBuildsEditOutside(t *testing.T) { b := hugolib.TestRunning(t, deferFilesCommon) - for i := 0; i < 5; i++ { + for i := range 5 { old := fmt.Sprintf("EDIT_COUNTER_OUTSIDE_%d", i) new := fmt.Sprintf("EDIT_COUNTER_OUTSIDE_%d", i+1) b.EditFileReplaceAll("layouts/index.html", old, new).Build() @@ -100,7 +100,7 @@ func TestDeferRepeatedBuildsEditDefer(t *testing.T) { b := hugolib.TestRunning(t, deferFilesCommon) - for i := 0; i < 8; i++ { + for i := range 8 { old := fmt.Sprintf("EDIT_COUNTER_DEFER_%d", i) new := fmt.Sprintf("EDIT_COUNTER_DEFER_%d", i+1) b.EditFileReplaceAll("layouts/index.html", old, new).Build() diff --git a/tpl/tplimpl/template.go b/tpl/tplimpl/template.go index 0ea7117a3..3b643162a 100644 --- a/tpl/tplimpl/template.go +++ b/tpl/tplimpl/template.go @@ -431,7 +431,7 @@ func (t *templateHandler) LookupVariants(name string) []tpl.Template { } variants := make([]tpl.Template, len(s.variants)) - for i := 0; i < len(variants); i++ { + for i := range variants { variants[i] = s.variants[i].ts } @@ -599,7 +599,7 @@ func (t *templateHandler) addFileContext(templ tpl.Template, inerr error) error func (t *templateHandler) extractIdentifiers(line string) []string { m := identifiersRe.FindAllStringSubmatch(line, -1) identifiers := make([]string, len(m)) - for i := 0; i < len(m); i++ { + for i := range m { identifiers[i] = m[i][1] } return identifiers diff --git a/tpl/tplimpl/template_ast_transformers.go b/tpl/tplimpl/template_ast_transformers.go index f95335779..4deadd052 100644 --- a/tpl/tplimpl/template_ast_transformers.go +++ b/tpl/tplimpl/template_ast_transformers.go @@ -27,6 +27,7 @@ import ( "github.com/gohugoio/hugo/common/maps" "github.com/gohugoio/hugo/tpl" "github.com/mitchellh/mapstructure" + "slices" ) type templateType int @@ -187,7 +188,7 @@ func (c *templateContext) applyTransformations(n parse.Node) (bool, error) { for i, cmd := range x.Cmds { keep, _ := c.applyTransformations(cmd) if !keep { - x.Cmds = append(x.Cmds[:i], x.Cmds[i+1:]...) + x.Cmds = slices.Delete(x.Cmds, i, i+1) } } @@ -271,12 +272,7 @@ func (c *templateContext) applyTransformationsToNodes(nodes ...parse.Node) { } func (c *templateContext) hasIdent(idents []string, ident string) bool { - for _, id := range idents { - if id == ident { - return true - } - } - return false + return slices.Contains(idents, ident) } // collectConfig collects and parses any leading template config variable declaration. diff --git a/tpl/tplimpl/template_funcs.go b/tpl/tplimpl/template_funcs.go index d73d4d336..b181db061 100644 --- a/tpl/tplimpl/template_funcs.go +++ b/tpl/tplimpl/template_funcs.go @@ -64,6 +64,7 @@ import ( _ "github.com/gohugoio/hugo/tpl/time" _ "github.com/gohugoio/hugo/tpl/transform" _ "github.com/gohugoio/hugo/tpl/urls" + maps0 "maps" ) var ( @@ -290,9 +291,7 @@ func createFuncMap(d *deps.Deps) map[string]any { } if d.OverloadedTemplateFuncs != nil { - for k, v := range d.OverloadedTemplateFuncs { - funcMap[k] = v - } + maps0.Copy(funcMap, d.OverloadedTemplateFuncs) } d.TmplFuncMap = funcMap diff --git a/tpl/transform/unmarshal_test.go b/tpl/transform/unmarshal_test.go index d65f05fd4..9b34e1daa 100644 --- a/tpl/transform/unmarshal_test.go +++ b/tpl/transform/unmarshal_test.go @@ -192,7 +192,7 @@ func BenchmarkUnmarshalString(b *testing.B) { const numJsons = 100 var jsons [numJsons]string - for i := 0; i < numJsons; i++ { + for i := range numJsons { jsons[i] = strings.Replace(testJSON, "ROOT_KEY", fmt.Sprintf("root%d", i), 1) } @@ -220,7 +220,7 @@ func BenchmarkUnmarshalResource(b *testing.B) { const numJsons = 100 var jsons [numJsons]testContentResource - for i := 0; i < numJsons; i++ { + for i := range numJsons { key := fmt.Sprintf("root%d", i) jsons[i] = testContentResource{key: key, content: strings.Replace(testJSON, "ROOT_KEY", key, 1), mime: media.Builtin.JSONType} } diff --git a/transform/livereloadinject/livereloadinject.go b/transform/livereloadinject/livereloadinject.go index e88e3895b..425d268b3 100644 --- a/transform/livereloadinject/livereloadinject.go +++ b/transform/livereloadinject/livereloadinject.go @@ -56,7 +56,7 @@ func New(baseURL *url.URL) transform.Transformer { src += "&port=" + baseURL.Port() src += "&path=" + strings.TrimPrefix(path+"/livereload", "/") - script := []byte(fmt.Sprintf(``, html.EscapeString(src))) + script := fmt.Appendf(nil, ``, html.EscapeString(src)) c := make([]byte, len(b)+len(script)) copy(c, b[:idx]) diff --git a/transform/metainject/hugogenerator.go b/transform/metainject/hugogenerator.go index 43a477354..b3dda9b15 100644 --- a/transform/metainject/hugogenerator.go +++ b/transform/metainject/hugogenerator.go @@ -39,12 +39,12 @@ func HugoGenerator(ft transform.FromTo) error { } head := "" - replace := []byte(fmt.Sprintf("%s\n\t%s", head, hugoGeneratorTag)) + replace := fmt.Appendf(nil, "%s\n\t%s", head, hugoGeneratorTag) newcontent := bytes.Replace(b, []byte(head), replace, 1) if len(newcontent) == len(b) { head := "" - replace := []byte(fmt.Sprintf("%s\n\t%s", head, hugoGeneratorTag)) + replace := fmt.Appendf(nil, "%s\n\t%s", head, hugoGeneratorTag) newcontent = bytes.Replace(b, []byte(head), replace, 1) } diff --git a/watcher/filenotify/poller_test.go b/watcher/filenotify/poller_test.go index 9b52b9780..77feb459d 100644 --- a/watcher/filenotify/poller_test.go +++ b/watcher/filenotify/poller_test.go @@ -220,11 +220,11 @@ func prepareTestDirWithSomeFiles(c *qt.C, id string) string { c.Assert(os.MkdirAll(filepath.Join(dir, subdir1), 0o777), qt.IsNil) c.Assert(os.MkdirAll(filepath.Join(dir, subdir2), 0o777), qt.IsNil) - for i := 0; i < 3; i++ { + for i := range 3 { c.Assert(os.WriteFile(filepath.Join(dir, subdir1, fmt.Sprintf("file%d", i)), []byte("hello1"), 0o600), qt.IsNil) } - for i := 0; i < 3; i++ { + for i := range 3 { c.Assert(os.WriteFile(filepath.Join(dir, subdir2, fmt.Sprintf("file%d", i)), []byte("hello2"), 0o600), qt.IsNil) }