diff --git a/common/paths/pathparser.go b/common/paths/pathparser.go index 951501406c6..d62b08e1e2b 100644 --- a/common/paths/pathparser.go +++ b/common/paths/pathparser.go @@ -108,7 +108,6 @@ func (pp *PathParser) parse(component, s string) (*Path, error) { var err error // Preserve the original case for titles etc. p.unnormalized, err = pp.doParse(component, s, pp.newPath(component)) - if err != nil { return nil, err } @@ -195,23 +194,26 @@ func (pp *PathParser) doParse(component, s string, p *Path) (*Path, error) { } } - isContentComponent := p.component == files.ComponentFolderContent || p.component == files.ComponentFolderArchetypes - isContent := isContentComponent && files.IsContentExt(p.Ext()) - - if isContent { + if len(p.identifiers) > 0 { + isContentComponent := p.component == files.ComponentFolderContent || p.component == files.ComponentFolderArchetypes + isContent := isContentComponent && files.IsContentExt(p.Ext()) id := p.identifiers[len(p.identifiers)-1] b := p.s[p.posContainerHigh : id.Low-1] - switch b { - case "index": - p.bundleType = PathTypeLeaf - case "_index": - p.bundleType = PathTypeBranch - default: - p.bundleType = PathTypeContentSingle - } + if isContent { + switch b { + case "index": + p.bundleType = PathTypeLeaf + case "_index": + p.bundleType = PathTypeBranch + default: + p.bundleType = PathTypeContentSingle + } - if slashCount == 2 && p.IsLeafBundle() { - p.posSectionHigh = 0 + if slashCount == 2 && p.IsLeafBundle() { + p.posSectionHigh = 0 + } + } else if b == files.NameContentData && files.IsContentDataExt(p.Ext()) { + p.bundleType = PathTypeContentData } } @@ -246,6 +248,9 @@ const ( // Branch bundles, e.g. /blog/_index.md PathTypeBranch + + // Content data file, _content.gotmpl. + PathTypeContentData ) type Path struct { @@ -541,6 +546,10 @@ func (p *Path) IsLeafBundle() bool { return p.bundleType == PathTypeLeaf } +func (p *Path) IsContentData() bool { + return p.bundleType == PathTypeContentData +} + func (p Path) ForBundleType(t PathType) *Path { p.bundleType = t return &p diff --git a/common/paths/pathparser_test.go b/common/paths/pathparser_test.go index 8c89ddd4109..11bfcca4f21 100644 --- a/common/paths/pathparser_test.go +++ b/common/paths/pathparser_test.go @@ -333,6 +333,22 @@ func TestParse(t *testing.T) { c.Assert(p.Path(), qt.Equals, "/a/b/c.txt") }, }, + { + "Content data file gotmpl", + "/a/b/_content.gotmpl", + func(c *qt.C, p *Path) { + c.Assert(p.Path(), qt.Equals, "/a/b/_content.gotmpl") + c.Assert(p.Ext(), qt.Equals, "gotmpl") + c.Assert(p.IsContentData(), qt.IsTrue) + }, + }, + { + "Content data file yaml", + "/a/b/_content.yaml", + func(c *qt.C, p *Path) { + c.Assert(p.IsContentData(), qt.IsFalse) + }, + }, } for _, test := range tests { c.Run(test.name, func(c *qt.C) { diff --git a/hugofs/files/classifier.go b/hugofs/files/classifier.go index a8d231f7338..4012e6dadf9 100644 --- a/hugofs/files/classifier.go +++ b/hugofs/files/classifier.go @@ -82,6 +82,15 @@ func IsContentExt(ext string) bool { return contentFileExtensionsSet[ext] } +func IsGoTmplExt(ext string) bool { + return ext == "gotmpl" +} + +// Supported data file extensions for _content.* files. +func IsContentDataExt(ext string) bool { + return IsGoTmplExt(ext) +} + const ( ComponentFolderArchetypes = "archetypes" ComponentFolderStatic = "static" @@ -93,6 +102,8 @@ const ( FolderResources = "resources" FolderJSConfig = "_jsconfig" // Mounted below /assets with postcss.config.js etc. + + NameContentData = "_content" ) var ( diff --git a/hugolib/content_map.go b/hugolib/content_map.go index 62cabec514c..89db4d9aad6 100644 --- a/hugolib/content_map.go +++ b/hugolib/content_map.go @@ -23,6 +23,8 @@ import ( "github.com/bep/logg" "github.com/gohugoio/hugo/common/hugio" "github.com/gohugoio/hugo/common/paths" + "github.com/gohugoio/hugo/hugofs/files" + "github.com/gohugoio/hugo/hugolib/pagesfromdata" "github.com/gohugoio/hugo/identity" "github.com/gohugoio/hugo/source" @@ -162,11 +164,13 @@ func (cfg contentMapConfig) getTaxonomyConfig(s string) (v viewName) { return } -func (m *pageMap) AddFi(fi hugofs.FileMetaInfo) error { +func (m *pageMap) AddFi(fi hugofs.FileMetaInfo, whatChanged *whatChanged) error { if fi.IsDir() { return nil } + rebuilding := m.s.h.buildCounter.Load() > 0 + insertResource := func(fim hugofs.FileMetaInfo) error { pi := fi.Meta().PathInfo key := pi.Base() @@ -222,6 +226,57 @@ func (m *pageMap) AddFi(fi hugofs.FileMetaInfo) error { if err := insertResource(fi); err != nil { return err } + case paths.PathTypeContentData: + m.s.Log.Trace(logg.StringFunc( + func() string { + return fmt.Sprintf("insert pages from data file: %q", fi.Meta().Filename) + }, + )) + + if !files.IsGoTmplExt(pi.Ext()) { + return fmt.Errorf("unsupported data file extension %q", pi.Ext()) + } + + f := source.NewFileInfo(fi) + if err := func() error { + return pagesfromdata.PagesFromTemplate( + pagesfromdata.PagesFromTemplateOptions{ + Fi: fi, + TmplFinder: m.s.TextTmpl(), + TmplExec: m.s.Tmpl(), + Site: m.s, // TODO1 wrapper without RegularPages etc. + Handlepage: func(p pagesfromdata.PageData) error { + pc := p.PageConfig + pc.Path = path.Join(pi.Base(), pc.Path) + ps, pi, err := m.s.h.newPage( + &pageMeta{ + f: f, + pageMetaParams: pageMetaParams{ + pageConfig: pc, + }, + }, + ) + if err != nil { + return err + } + + if ps == nil { + // Disabled page. + return nil + } + + n, _, replaced := m.treePages.InsertIntoValuesDimension(pi.Base(), ps) + + if rebuilding && replaced { + whatChanged.Add(n.GetIdentity()) + } + return nil + }, + }, + ) + }(); err != nil { + return err + } default: m.s.Log.Trace(logg.StringFunc( func() string { @@ -244,7 +299,7 @@ func (m *pageMap) AddFi(fi hugofs.FileMetaInfo) error { return nil } - m.treePages.InsertWithLock(pi.Base(), p) + m.treePages.InsertIntoValuesDimensionWithLock(pi.Base(), p) } return nil diff --git a/hugolib/content_map_page.go b/hugolib/content_map_page.go index 50e1bc35dcb..0270bba50a2 100644 --- a/hugolib/content_map_page.go +++ b/hugolib/content_map_page.go @@ -100,6 +100,8 @@ type pageMap struct { cacheContentPlain *dynacache.Partition[string, *resources.StaleValue[contentPlainPlainWords]] contentTableOfContents *dynacache.Partition[string, *resources.StaleValue[contentTableOfContents]] + contentDataFileSeenItems *maps.Cache[string, map[uint64]bool] + cfg contentMapConfig } @@ -587,9 +589,9 @@ func (m *pageMap) getOrCreateResourcesForPage(ps *pageState) resource.Resources sort.SliceStable(res, lessFunc) - if len(ps.m.pageConfig.Resources) > 0 { + if len(ps.m.pageConfig.ResourcesMeta) > 0 { for i, r := range res { - res[i] = resources.CloneWithMetadataIfNeeded(ps.m.pageConfig.Resources, r) + res[i] = resources.CloneWithMetadataIfNeeded(ps.m.pageConfig.ResourcesMeta, r) } sort.SliceStable(res, lessFunc) } @@ -778,7 +780,7 @@ func (s *contentNodeShifter) ForEeachInDimension(n contentNodeI, d int, f func(c } } -func (s *contentNodeShifter) InsertInto(old, new contentNodeI, dimension doctree.Dimension) contentNodeI { +func (s *contentNodeShifter) InsertInto(old, new contentNodeI, dimension doctree.Dimension) (contentNodeI, contentNodeI, bool) { langi := dimension[doctree.DimensionLanguage.Index()] switch vv := old.(type) { case *pageState: @@ -787,37 +789,39 @@ func (s *contentNodeShifter) InsertInto(old, new contentNodeI, dimension doctree panic(fmt.Sprintf("unknown type %T", new)) } if vv.s.languagei == newp.s.languagei && newp.s.languagei == langi { - return new + return new, vv, true } is := make(contentNodeIs, s.numLanguages) is[vv.s.languagei] = old is[langi] = new - return is + return is, old, false case contentNodeIs: + oldv := vv[langi] vv[langi] = new - return vv + return vv, oldv, oldv != nil case resourceSources: + oldv := vv[langi] vv[langi] = new.(*resourceSource) - return vv + return vv, oldv, oldv != nil case *resourceSource: newp, ok := new.(*resourceSource) if !ok { panic(fmt.Sprintf("unknown type %T", new)) } if vv.LangIndex() == newp.LangIndex() && newp.LangIndex() == langi { - return new + return new, vv, true } rs := make(resourceSources, s.numLanguages) rs[vv.LangIndex()] = vv rs[langi] = newp - return rs + return rs, vv, false default: panic(fmt.Sprintf("unknown type %T", old)) } } -func (s *contentNodeShifter) Insert(old, new contentNodeI) contentNodeI { +func (s *contentNodeShifter) Insert(old, new contentNodeI) (contentNodeI, contentNodeI, bool) { switch vv := old.(type) { case *pageState: newp, ok := new.(*pageState) @@ -825,40 +829,42 @@ func (s *contentNodeShifter) Insert(old, new contentNodeI) contentNodeI { panic(fmt.Sprintf("unknown type %T", new)) } if vv.s.languagei == newp.s.languagei { - return new + return new, vv, true } is := make(contentNodeIs, s.numLanguages) is[newp.s.languagei] = new is[vv.s.languagei] = old - return is + return is, old, false case contentNodeIs: newp, ok := new.(*pageState) if !ok { panic(fmt.Sprintf("unknown type %T", new)) } - resource.MarkStale(vv[newp.s.languagei]) + oldv := vv[newp.s.languagei] + resource.MarkStale(oldv) vv[newp.s.languagei] = new - return vv + return vv, oldv, oldv != nil case *resourceSource: newp, ok := new.(*resourceSource) if !ok { panic(fmt.Sprintf("unknown type %T", new)) } if vv.LangIndex() == newp.LangIndex() { - return new + return new, vv, true } rs := make(resourceSources, s.numLanguages) rs[newp.LangIndex()] = newp rs[vv.LangIndex()] = vv - return rs + return rs, vv, false case resourceSources: newp, ok := new.(*resourceSource) if !ok { panic(fmt.Sprintf("unknown type %T", new)) } - resource.MarkStale(vv[newp.LangIndex()]) + oldv := vv[newp.LangIndex()] + resource.MarkStale(oldv) vv[newp.LangIndex()] = newp - return vv + return vv, oldv, oldv != nil default: panic(fmt.Sprintf("unknown type %T", old)) } @@ -878,6 +884,8 @@ func newPageMap(i int, s *Site, mcache *dynacache.Cache, pageTrees *pageTrees) * cacheContentPlain: dynacache.GetOrCreatePartition[string, *resources.StaleValue[contentPlainPlainWords]](mcache, fmt.Sprintf("/cont/pla/%d", i), dynacache.OptionsPartition{Weight: 70, ClearWhen: dynacache.ClearOnChange}), contentTableOfContents: dynacache.GetOrCreatePartition[string, *resources.StaleValue[contentTableOfContents]](mcache, fmt.Sprintf("/cont/toc/%d", i), dynacache.OptionsPartition{Weight: 70, ClearWhen: dynacache.ClearOnChange}), + contentDataFileSeenItems: maps.NewCache[string, map[uint64]bool](), + cfg: contentMapConfig{ lang: s.Lang(), taxonomyConfig: taxonomiesConfig.Values(), @@ -948,8 +956,6 @@ type contentTreeReverseIndexMap struct { type sitePagesAssembler struct { *Site - watching bool - incomingChanges *whatChanged assembleChanges *whatChanged ctx context.Context } @@ -1020,11 +1026,13 @@ func (m *pageMap) debugPrint(prefix string, maxLevel int, w io.Writer) { } } +// TODO1 do once? func (h *HugoSites) resolveAndClearStateForIdentities( ctx context.Context, l logg.LevelLogger, cachebuster func(s string) bool, changes []identity.Identity, ) error { + // TODO1 add a threshold for len(changes) => Djengis Khan. Check if we have a similar somewhere else. h.Log.Debug().Log(logg.StringFunc( func() string { var sb strings.Builder @@ -1257,6 +1265,7 @@ func (sa *sitePagesAssembler) applyAggregates() error { rw := pw.Extend() rw.Tree = sa.pageMap.treeResources sa.lastmod = time.Time{} + rebuild := sa.s.h.buildCounter.Load() > 0 pw.Handle = func(keyPage string, n contentNodeI, match doctree.DimensionFlag) (bool, error) { pageBundle := n.(*pageState) @@ -1288,18 +1297,20 @@ func (sa *sitePagesAssembler) applyAggregates() error { } } - if (pageBundle.IsHome() || pageBundle.IsSection()) && pageBundle.m.setMetaPostCount > 0 { - oldDates := pageBundle.m.pageConfig.Dates + if rebuild { + if (pageBundle.IsHome() || pageBundle.IsSection()) && pageBundle.m.setMetaPostCount > 0 { + oldDates := pageBundle.m.pageConfig.Dates - // We need to wait until after the walk to determine if any of the dates have changed. - pw.WalkContext.AddPostHook( - func() error { - if oldDates != pageBundle.m.pageConfig.Dates { - sa.assembleChanges.Add(pageBundle) - } - return nil - }, - ) + // We need to wait until after the walk to determine if any of the dates have changed. + pw.WalkContext.AddPostHook( + func() error { + if oldDates != pageBundle.m.pageConfig.Dates { + sa.assembleChanges.Add(pageBundle) + } + return nil + }, + ) + } } // Combine the cascade map with front matter. @@ -1309,7 +1320,7 @@ func (sa *sitePagesAssembler) applyAggregates() error { // We receive cascade values from above. If this leads to a change compared // to the previous value, we need to mark the page and its dependencies as changed. - if pageBundle.m.setMetaPostCascadeChanged { + if rebuild && pageBundle.m.setMetaPostCascadeChanged { sa.assembleChanges.Add(pageBundle) } @@ -1542,7 +1553,7 @@ func (sa *sitePagesAssembler) assembleTermsAndTranslations() error { s: sa.Site, pathInfo: pi, pageMetaParams: pageMetaParams{ - pageConfig: &pagemeta.PageConfig{ + pageConfig: pagemeta.PageConfig{ Kind: kinds.KindTerm, }, }, @@ -1764,7 +1775,7 @@ func (sa *sitePagesAssembler) addStandalonePages() error { s: s, pathInfo: s.Conf.PathParser().Parse(files.ComponentFolderContent, key+f.MediaType.FirstSuffix.FullSuffix), pageMetaParams: pageMetaParams{ - pageConfig: &pagemeta.PageConfig{ + pageConfig: pagemeta.PageConfig{ Kind: kind, }, }, @@ -1882,7 +1893,7 @@ func (sa *sitePagesAssembler) addMissingRootSections() error { s: sa.Site, pathInfo: p, pageMetaParams: pageMetaParams{ - pageConfig: &pagemeta.PageConfig{ + pageConfig: pagemeta.PageConfig{ Kind: kinds.KindHome, }, }, @@ -1891,7 +1902,7 @@ func (sa *sitePagesAssembler) addMissingRootSections() error { if err != nil { return err } - w.Tree.InsertWithLock(p.Base(), n) + w.Tree.InsertIntoValuesDimensionWithLock(p.Base(), n) sa.home = n } @@ -1915,7 +1926,7 @@ func (sa *sitePagesAssembler) addMissingTaxonomies() error { s: sa.Site, pathInfo: sa.Conf.PathParser().Parse(files.ComponentFolderContent, key+"/_index.md"), pageMetaParams: pageMetaParams{ - pageConfig: &pagemeta.PageConfig{ + pageConfig: pagemeta.PageConfig{ Kind: kinds.KindTaxonomy, }, }, diff --git a/hugolib/doctree/nodeshiftree_test.go b/hugolib/doctree/nodeshiftree_test.go index 313be0bc4f7..13a84d5fa84 100644 --- a/hugolib/doctree/nodeshiftree_test.go +++ b/hugolib/doctree/nodeshiftree_test.go @@ -173,7 +173,7 @@ func TestTreeInsert(t *testing.T) { c.Assert(tree.Get("/notfound"), qt.IsNil) ab2 := &testValue{ID: "/a/b", Lang: 0} - v, ok := tree.InsertIntoValuesDimension("/a/b", ab2) + v, _, ok := tree.InsertIntoValuesDimension("/a/b", ab2) c.Assert(ok, qt.IsTrue) c.Assert(v, qt.DeepEquals, ab2) @@ -239,12 +239,12 @@ func (s *testShifter) ForEeachInDimension(n *testValue, d int, f func(n *testVal f(n) } -func (s *testShifter) Insert(old, new *testValue) *testValue { - return new +func (s *testShifter) Insert(old, new *testValue) (*testValue, *testValue, bool) { + return new, old, true } -func (s *testShifter) InsertInto(old, new *testValue, dimension doctree.Dimension) *testValue { - return new +func (s *testShifter) InsertInto(old, new *testValue, dimension doctree.Dimension) (*testValue, *testValue, bool) { + return new, old, true } func (s *testShifter) Delete(n *testValue, dimension doctree.Dimension) (bool, bool) { diff --git a/hugolib/doctree/nodeshifttree.go b/hugolib/doctree/nodeshifttree.go index 1c11753055a..bc24f93fc39 100644 --- a/hugolib/doctree/nodeshifttree.go +++ b/hugolib/doctree/nodeshifttree.go @@ -38,13 +38,15 @@ type ( // Insert inserts new into the tree into the dimension it provides. // It may replace old. - // It returns a T (can be the same as old). - Insert(old, new T) T + // It returns the updated and existing T + // and a bool indicating if an existing record is updated. + Insert(old, new T) (T, T, bool) // Insert inserts new into the given dimension. // It may replace old. - // It returns a T (can be the same as old). - InsertInto(old, new T, dimension Dimension) T + // It returns the updated and existing T + // and a bool indicating if an existing record is updated. + InsertInto(old, new T, dimension Dimension) (T, T, bool) // Delete deletes T from the given dimension and returns whether the dimension was deleted and if it's empty after the delete. Delete(v T, dimension Dimension) (bool, bool) @@ -141,22 +143,30 @@ func (t *NodeShiftTree[T]) Increment(d int) *NodeShiftTree[T] { return t.Shape(d, t.dims[d]+1) } -func (r *NodeShiftTree[T]) InsertIntoCurrentDimension(s string, v T) (T, bool) { +func (r *NodeShiftTree[T]) InsertIntoCurrentDimension(s string, v T) (T, T, bool) { s = mustValidateKey(cleanKey(s)) + var ( + updated bool + existing T + ) if vv, ok := r.tree.Get(s); ok { - v = r.shifter.InsertInto(vv.(T), v, r.dims) + v, existing, updated = r.shifter.InsertInto(vv.(T), v, r.dims) } r.tree.Insert(s, v) - return v, true + return v, existing, updated } -func (r *NodeShiftTree[T]) InsertIntoValuesDimension(s string, v T) (T, bool) { +func (r *NodeShiftTree[T]) InsertIntoValuesDimension(s string, v T) (T, T, bool) { s = mustValidateKey(cleanKey(s)) + var ( + updated bool + existing T + ) if vv, ok := r.tree.Get(s); ok { - v = r.shifter.Insert(vv.(T), v) + v, existing, updated = r.shifter.Insert(vv.(T), v) } r.tree.Insert(s, v) - return v, true + return v, existing, updated } func (r *NodeShiftTree[T]) InsertRawWithLock(s string, v any) (any, bool) { @@ -165,7 +175,7 @@ func (r *NodeShiftTree[T]) InsertRawWithLock(s string, v any) (any, bool) { return r.tree.Insert(s, v) } -func (r *NodeShiftTree[T]) InsertWithLock(s string, v T) (T, bool) { +func (r *NodeShiftTree[T]) InsertIntoValuesDimensionWithLock(s string, v T) (T, T, bool) { r.mu.Lock() defer r.mu.Unlock() return r.InsertIntoValuesDimension(s, v) diff --git a/hugolib/hugo_sites_build.go b/hugolib/hugo_sites_build.go index 3beb072e3c7..b19d6cd2e44 100644 --- a/hugolib/hugo_sites_build.go +++ b/hugolib/hugo_sites_build.go @@ -251,15 +251,11 @@ func (h *HugoSites) assemble(ctx context.Context, l logg.LevelLogger, bcfg *Buil h.translationKeyPages.Reset() assemblers := make([]*sitePagesAssembler, len(h.Sites)) // Changes detected during assembly (e.g. aggregate date changes) - assembleChanges := &whatChanged{ - identitySet: make(map[identity.Identity]bool), - } + for i, s := range h.Sites { assemblers[i] = &sitePagesAssembler{ Site: s, - watching: s.watching(), - incomingChanges: bcfg.whatChanged, - assembleChanges: assembleChanges, + assembleChanges: bcfg.whatChanged, ctx: ctx, } } @@ -275,7 +271,7 @@ func (h *HugoSites) assemble(ctx context.Context, l logg.LevelLogger, bcfg *Buil return err } - changes := assembleChanges.Changes() + changes := bcfg.whatChanged.Changes() // Changes from the assemble step (e.g. lastMod, cascase) needs a re-calculation // of what needs to be re-built. @@ -695,12 +691,16 @@ func (h *HugoSites) processPartial(ctx context.Context, l logg.LevelLogger, conf switch pathInfo.Component() { case files.ComponentFolderContent: logger.Println("Source changed", pathInfo.Path()) - if ids := h.pageTrees.collectAndMarkStaleIdentities(pathInfo); len(ids) > 0 { - changes = append(changes, ids...) + isContentDataFile := pathInfo.IsContentData() + if !isContentDataFile { + if ids := h.pageTrees.collectAndMarkStaleIdentities(pathInfo); len(ids) > 0 { + changes = append(changes, ids...) + } } contentChanged = true + // TODO1 move? if config.RecentlyVisited != nil { // Fast render mode. Adding them to the visited queue // avoids rerendering them on navigation. @@ -985,7 +985,7 @@ func (s *HugoSites) processFiles(ctx context.Context, l logg.LevelLogger, buildC // For inserts, we can pick an arbitrary pageMap. pageMap := s.Sites[0].pageMap - c := newPagesCollector(ctx, s.h, sourceSpec, s.Log, l, pageMap, filenames) + c := newPagesCollector(ctx, s.h, sourceSpec, s.Log, l, pageMap, buildConfig.whatChanged, filenames) if err := c.Collect(); err != nil { return err diff --git a/hugolib/page__content.go b/hugolib/page__content.go index f10c25d7b8b..73066783517 100644 --- a/hugolib/page__content.go +++ b/hugolib/page__content.go @@ -20,6 +20,7 @@ import ( "fmt" "html/template" "io" + "path/filepath" "strconv" "strings" "unicode/utf8" @@ -54,9 +55,19 @@ type pageContentReplacement struct { source pageparser.Item } -func (m *pageMeta) parseFrontMatter(h *HugoSites, pid uint64, sourceKey string) (*contentParseInfo, error) { - var openSource hugio.OpenReadSeekCloser - if m.f != nil { +func (m *pageMeta) parseFrontMatter(h *HugoSites, pid uint64) (*contentParseInfo, error) { + var ( + sourceKey string + openSource hugio.OpenReadSeekCloser + isDataFile = !m.pageConfig.Content.IsZero() + ) + + if isDataFile { + sourceKey = strconv.FormatUint(m.pageConfig.SourceHash, 10) + // Only text values implemented for now. + openSource = hugio.NewOpenReadSeekCloser(hugio.NewReadSeekerNoOpCloserFromString(m.pageConfig.Content.Value)) + } else if m.f != nil { + sourceKey = filepath.ToSlash(m.f.Filename()) meta := m.f.FileInfo().Meta() openSource = func() (hugio.ReadSeekCloser, error) { r, err := meta.Open() @@ -68,7 +79,7 @@ func (m *pageMeta) parseFrontMatter(h *HugoSites, pid uint64, sourceKey string) } if sourceKey == "" { - sourceKey = strconv.Itoa(int(pid)) + sourceKey = strconv.FormatUint(pid, 10) } pi := &contentParseInfo{ @@ -93,6 +104,11 @@ func (m *pageMeta) parseFrontMatter(h *HugoSites, pid uint64, sourceKey string) pi.itemsStep1 = items + if isDataFile { + // No front matter. + return pi, nil + } + if err := pi.mapFrontMatter(source); err != nil { return nil, err } diff --git a/hugolib/page__meta.go b/hugolib/page__meta.go index d8203fe751f..70a7dca782a 100644 --- a/hugolib/page__meta.go +++ b/hugolib/page__meta.go @@ -82,7 +82,7 @@ type pageMetaParams struct { setMetaPostCount int setMetaPostCascadeChanged bool - pageConfig *pagemeta.PageConfig + pageConfig pagemeta.PageConfig // These are only set in watch mode. datesOriginal pagemeta.Dates @@ -135,19 +135,19 @@ func (p *pageMeta) BundleType() string { } func (p *pageMeta) Date() time.Time { - return p.pageConfig.Date + return p.pageConfig.Dates.Date } func (p *pageMeta) PublishDate() time.Time { - return p.pageConfig.PublishDate + return p.pageConfig.Dates.PublishDate } func (p *pageMeta) Lastmod() time.Time { - return p.pageConfig.Lastmod + return p.pageConfig.Dates.Lastmod } func (p *pageMeta) ExpiryDate() time.Time { - return p.pageConfig.ExpiryDate + return p.pageConfig.Dates.ExpiryDate } func (p *pageMeta) Description() string { @@ -276,10 +276,7 @@ func (p *pageMeta) Weight() int { func (p *pageMeta) setMetaPre(pi *contentParseInfo, logger loggers.Logger, conf config.AllProvider) error { frontmatter := pi.frontMatter if frontmatter != nil { - pcfg := p.pageConfig - if pcfg == nil { - panic("pageConfig not set") - } + pcfg := &p.pageConfig // Needed for case insensitive fetching of params values maps.PrepareParams(frontmatter) pcfg.Params = frontmatter @@ -413,7 +410,7 @@ func (p *pageState) setMetaPostParams() error { } descriptor := &pagemeta.FrontMatterDescriptor{ - PageConfig: pm.pageConfig, + PageConfig: &pm.pageConfig, BaseFilename: contentBaseName, ModTime: mtime, GitAuthorDate: gitAuthorDate, @@ -455,9 +452,11 @@ params: var sitemapSet bool - pcfg := pm.pageConfig - + pcfg := &pm.pageConfig params := pcfg.Params + if params == nil { + panic("params not set for " + p.Title()) + } var draft, published, isCJKLanguage *bool var userParams map[string]any @@ -601,7 +600,7 @@ params: } if handled { - pcfg.Resources = resources + pcfg.ResourcesMeta = resources break } fallthrough diff --git a/hugolib/page__new.go b/hugolib/page__new.go index e14de692549..69695c131ad 100644 --- a/hugolib/page__new.go +++ b/hugolib/page__new.go @@ -15,7 +15,6 @@ package hugolib import ( "fmt" - "path/filepath" "sync" "sync/atomic" @@ -29,28 +28,18 @@ import ( "github.com/gohugoio/hugo/resources/kinds" "github.com/gohugoio/hugo/resources/page" - "github.com/gohugoio/hugo/resources/page/pagemeta" ) var pageIDCounter atomic.Uint64 func (h *HugoSites) newPage(m *pageMeta) (*pageState, *paths.Path, error) { m.Staler = &resources.AtomicStaler{} - if m.pageConfig == nil { - m.pageMetaParams = pageMetaParams{ - pageConfig: &pagemeta.PageConfig{ - Params: maps.Params{}, - }, - } - } - - var sourceKey string - if m.f != nil { - sourceKey = filepath.ToSlash(m.f.Filename()) + if m.pageConfig.Params == nil { + m.pageConfig.Params = maps.Params{} } pid := pageIDCounter.Add(1) - pi, err := m.parseFrontMatter(h, pid, sourceKey) + pi, err := m.parseFrontMatter(h, pid) if err != nil { return nil, nil, err } diff --git a/hugolib/pages_capture.go b/hugolib/pages_capture.go index 231c2efad15..80a0908ffef 100644 --- a/hugolib/pages_capture.go +++ b/hugolib/pages_capture.go @@ -42,18 +42,20 @@ func newPagesCollector( logger loggers.Logger, infoLogger logg.LevelLogger, m *pageMap, + whatChanged *whatChanged, ids []pathChange, ) *pagesCollector { return &pagesCollector{ - ctx: ctx, - h: h, - fs: sp.BaseFs.Content.Fs, - m: m, - sp: sp, - logger: logger, - infoLogger: infoLogger, - ids: ids, - seenDirs: make(map[string]bool), + ctx: ctx, + h: h, + fs: sp.BaseFs.Content.Fs, + m: m, + sp: sp, + logger: logger, + infoLogger: infoLogger, + whatChanged: whatChanged, + ids: ids, + seenDirs: make(map[string]bool), } } @@ -68,6 +70,8 @@ type pagesCollector struct { fs afero.Fs + whatChanged *whatChanged + // List of paths that have changed. Used in partial builds. ids []pathChange seenDirs map[string]bool @@ -113,7 +117,7 @@ func (c *pagesCollector) Collect() (collectErr error) { c.g = rungroup.Run[hugofs.FileMetaInfo](c.ctx, rungroup.Config[hugofs.FileMetaInfo]{ NumWorkers: numWorkers, Handle: func(ctx context.Context, fi hugofs.FileMetaInfo) error { - if err := c.m.AddFi(fi); err != nil { + if err := c.m.AddFi(fi, c.whatChanged); err != nil { return hugofs.AddFileInfoToError(err, fi, c.fs) } numFilesProcessedTotal.Add(1) @@ -243,6 +247,20 @@ func (c *pagesCollector) collectDirDir(path string, root hugofs.FileMetaInfo, in return nil, nil } + n := 0 + for _, fi := range readdir { + if fi.Meta().PathInfo.IsContentData() { + // _content.json + // These are not part of any bundle, so just add them directly and remove them from the readdir slice. + if err := c.g.Enqueue(fi); err != nil { + return nil, err + } + } else { + n++ + } + } + readdir = readdir[:n] + // Pick the first regular file. var first hugofs.FileMetaInfo for _, fi := range readdir { @@ -260,6 +278,7 @@ func (c *pagesCollector) collectDirDir(path string, root hugofs.FileMetaInfo, in // Any bundle file will always be first. firstPi := first.Meta().PathInfo + if firstPi == nil { panic(fmt.Sprintf("collectDirDir: no path info for %q", first.Meta().Filename)) } diff --git a/hugolib/pagesfromdata/pagesfromdata.go b/hugolib/pagesfromdata/pagesfromdata.go new file mode 100644 index 00000000000..881d6a71e0e --- /dev/null +++ b/hugolib/pagesfromdata/pagesfromdata.go @@ -0,0 +1,133 @@ +// Copyright 2024 The Hugo Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package pagesfromdata + +import ( + "context" + "io" + + "github.com/gohugoio/hugo/common/maps" + "github.com/gohugoio/hugo/helpers" + "github.com/gohugoio/hugo/hugofs" + "github.com/gohugoio/hugo/resources/page" + "github.com/gohugoio/hugo/resources/page/pagemeta" + "github.com/gohugoio/hugo/tpl" + "github.com/mitchellh/mapstructure" +) + +type PagesFromDataTemplateContext interface { + // UseCached returns whether Hugo can use a cached version + // matching the given ETag. + UseCached(eTag any) bool + + // AddPage adds a new page to the site. + // The first return value will always be an empty string. + AddPage(any) (string, error) + + // AddResource adds a new resource to the site. + // The first return value will always be an empty string. + AddResource(any) (string, error) + + // The site to which the pages will be added. + Site() page.Site + + // The same template may be executed multiple times for multiple languages. + // The Scratch can be used to store state between these invocations. + Scratch() *maps.Scratch + + // By default, the template will be executed for the language + // defined by the _content.gotmpl file (e.g. its mount definition). + // This method can be used to activate the template for all languages. + // The return value will always be an empty string. + SetForAllLanguages() string +} + +var _ PagesFromDataTemplateContext = (*pagesFromDataTemplateContext)(nil) + +type pagesFromDataTemplateContext struct { + allLanguages bool + opts PagesFromTemplateOptions +} + +func (pagesFromDataTemplateContext) UseCached(eTag any) bool { + return false +} + +func (p pagesFromDataTemplateContext) AddPage(v any) (string, error) { + var pd PageData + if err := mapstructure.WeakDecode(v, &pd); err != nil { + return "", err + } + + if err := pd.Compile(true); err != nil { + return "", err + } + + return "", p.opts.Handlepage(pd) +} + +func (pagesFromDataTemplateContext) AddResource(any) (string, error) { + return "", nil +} + +func (p pagesFromDataTemplateContext) Site() page.Site { + return p.opts.Site +} + +func (pagesFromDataTemplateContext) Scratch() *maps.Scratch { + return nil +} + +func (p *pagesFromDataTemplateContext) SetForAllLanguages() string { + p.allLanguages = true + return "" +} + +type PagesFromTemplateOptions struct { + Fi hugofs.FileMetaInfo + TmplFinder tpl.TemplateParseFinder + TmplExec tpl.TemplateExecutor + Site page.Site + Handlepage func(p PageData) error +} + +func PagesFromTemplate(opts PagesFromTemplateOptions) error { + f, err := opts.Fi.Meta().Open() + if err != nil { + return err + } + defer f.Close() + + tmpl, err := opts.TmplFinder.Parse(opts.Fi.Meta().Filename, helpers.ReaderToString(f)) + if err != nil { + return err + } + + data := pagesFromDataTemplateContext{ + opts: opts, + } + + if err := opts.TmplExec.ExecuteWithContext(context.TODO(), tmpl, io.Discard, data); err != nil { + return err + } + + return nil +} + +////////////// + +type PageData struct { + skip map[uint64]bool + pagemeta.PageConfig `mapstructure:",squash"` +} diff --git a/hugolib/pagesfromdata/pagesfromdata_integration_test.go b/hugolib/pagesfromdata/pagesfromdata_integration_test.go new file mode 100644 index 00000000000..12614278076 --- /dev/null +++ b/hugolib/pagesfromdata/pagesfromdata_integration_test.go @@ -0,0 +1,43 @@ +// Copyright 2024 The Hugo Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package pagesfromdata_test + +import ( + "testing" + + "github.com/gohugoio/hugo/hugolib" +) + +func TestPagesFromDataTempleBasic(t *testing.T) { + t.Parallel() + + files := ` +-- hugo.toml -- +disableKinds = ["home", "taxonomy", "term", "rss", "sitemap"] +baseURL = "https://example.com" +-- layouts/_default/single.html -- +Single: {{ .Title }}|{{ .Content }}| +-- layouts/_default/list.html -- +List: {{ .Title }}|{{ .Content }}| +-- content/docs/_content.gotmpl -- +{{ $eTag := "abcde" }} +{{ if not ($.UseCached $eTag) }} + {{ $.AddPage (dict "kind" "page" "path" "p1" "title" "P1") }} +{{ end }} +` + + b := hugolib.Test(t, files) + + b.AssertFileContent("public/docs/p1/index.html", "Single: P1|") +} diff --git a/hugolib/pagesfromdata/pagesfromdata_test.go b/hugolib/pagesfromdata/pagesfromdata_test.go new file mode 100644 index 00000000000..07a8c5d0e69 --- /dev/null +++ b/hugolib/pagesfromdata/pagesfromdata_test.go @@ -0,0 +1,14 @@ +// Copyright 2024 The Hugo Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package pagesfromdata diff --git a/hugolib/site.go b/hugolib/site.go index 2803878388d..922ae5b0b68 100644 --- a/hugolib/site.go +++ b/hugolib/site.go @@ -385,6 +385,12 @@ func (w *whatChanged) Add(ids ...identity.Identity) { } } +func (w *whatChanged) Clear() { + w.mu.Lock() + defer w.mu.Unlock() + w.identitySet = identity.Identities{} +} + func (w *whatChanged) Changes() []identity.Identity { if w == nil || w.identitySet == nil { return nil diff --git a/parser/frontmatter.go b/parser/frontmatter.go index ced8b84fc47..18e55f9ad4f 100644 --- a/parser/frontmatter.go +++ b/parser/frontmatter.go @@ -104,7 +104,6 @@ func InterfaceToFrontMatter(in any, format metadecoders.Format, w io.Writer) err } err = InterfaceToConfig(in, format, w) - if err != nil { return err } diff --git a/resources/page/pagemeta/page_frontmatter.go b/resources/page/pagemeta/page_frontmatter.go index 123dd4b704d..9729abe0f60 100644 --- a/resources/page/pagemeta/page_frontmatter.go +++ b/resources/page/pagemeta/page_frontmatter.go @@ -14,6 +14,8 @@ package pagemeta import ( + "errors" + "fmt" "strings" "time" @@ -29,6 +31,13 @@ import ( "github.com/spf13/cast" ) +type DatesStrings struct { + Date string `json:"date"` + Lastmod string `json:"lastMod"` + PublishDate string `json:"publishDate"` + ExpiryDate string `json:"expiryDate"` +} + type Dates struct { Date time.Time Lastmod time.Time @@ -36,6 +45,8 @@ type Dates struct { ExpiryDate time.Time } +// date, err = htime.ToTimeInDefaultLocationE(v, d.Location) + func (d Dates) IsDateOrLastModAfter(in Dates) bool { return d.Date.After(in.Date) || d.Lastmod.After(in.Lastmod) } @@ -57,7 +68,8 @@ func (d Dates) IsAllDatesZero() bool { // Note that all the top level fields are reserved Hugo keywords. // Any custom configuration needs to be set in the Params map. type PageConfig struct { - Dates // Dates holds the four core dates for this page. + Dates Dates `json:"-"` // Dates holds the four core dates for this page. + DatesStrings Title string // The title of the page. LinkTitle string // The link title of the page. Type string // The content type of the page. @@ -72,25 +84,72 @@ type PageConfig struct { Description string // The description for this page. Summary string // The summary for this page. Draft bool // Whether or not the content is a draft. - Headless bool // Whether or not the page should be rendered. + Headless bool `json:"-"` // Whether or not the page should be rendered. IsCJKLanguage bool // Whether or not the content is in a CJK language. TranslationKey string // The translation key for this page. Keywords []string // The keywords for this page. Aliases []string // The aliases for this page. Outputs []string // The output formats to render this page in. If not set, the site's configured output formats for this page kind will be used. + FrontMatterOnlyValues `json:"-"` + // These build options are set in the front matter, // but not passed on to .Params. - Resources []map[string]any - Cascade map[page.PageMatcher]maps.Params // Only relevant for branch nodes. - Sitemap config.SitemapConfig - Build BuildConfig + // TODO1 + Cascade map[page.PageMatcher]maps.Params // Only relevant for branch nodes. + Sitemap config.SitemapConfig + Build BuildConfig // User defined params. Params maps.Params // Compiled values. IsGoldmark bool `json:"-"` + // Only from data files. + Content Source + + SourceHash uint64 `json:"-"` +} + +// Compile validates and sets defaults etc. +func (p *PageConfig) Compile(pagesFromData bool) error { + if p.Path == "" { + return errors.New("path must be set") + } + if pagesFromData { + if strings.HasPrefix(p.Path, "/") { + return fmt.Errorf("path %q must not start with a /", p.Path) + } + if p.Content.Type == "" { + p.Content.Type = SourceTypeText + } + if p.Content.Type != SourceTypeText { + return errors.New("only text content is implemented in data files") + } + } + + return nil +} + +type SourceType string + +const ( + SourceTypeText SourceType = "text" + SourceTypeURL SourceType = "url" +) + +type Source struct { + // Type may be either "text" or "url". + Type SourceType + Value string +} + +func (s Source) IsZero() bool { + return s.Type == "" +} + +type FrontMatterOnlyValues struct { + ResourcesMeta []map[string]any } // FrontMatterHandler maps front matter into Page fields and .Params. @@ -354,7 +413,7 @@ func (f *FrontMatterHandler) createHandlers() error { if f.dateHandler, err = f.createDateHandler(f.fmConfig.Date, func(d *FrontMatterDescriptor, t time.Time) { - d.PageConfig.Date = t + d.PageConfig.Dates.Date = t setParamIfNotSet(fmDate, t, d) }); err != nil { return err @@ -363,7 +422,7 @@ func (f *FrontMatterHandler) createHandlers() error { if f.lastModHandler, err = f.createDateHandler(f.fmConfig.Lastmod, func(d *FrontMatterDescriptor, t time.Time) { setParamIfNotSet(fmLastmod, t, d) - d.PageConfig.Lastmod = t + d.PageConfig.Dates.Lastmod = t }); err != nil { return err } @@ -371,7 +430,7 @@ func (f *FrontMatterHandler) createHandlers() error { if f.publishDateHandler, err = f.createDateHandler(f.fmConfig.PublishDate, func(d *FrontMatterDescriptor, t time.Time) { setParamIfNotSet(fmPubDate, t, d) - d.PageConfig.PublishDate = t + d.PageConfig.Dates.PublishDate = t }); err != nil { return err } @@ -379,7 +438,7 @@ func (f *FrontMatterHandler) createHandlers() error { if f.expiryDateHandler, err = f.createDateHandler(f.fmConfig.ExpiryDate, func(d *FrontMatterDescriptor, t time.Time) { setParamIfNotSet(fmExpiryDate, t, d) - d.PageConfig.ExpiryDate = t + d.PageConfig.Dates.ExpiryDate = t }); err != nil { return err } diff --git a/source/fileInfo.go b/source/fileInfo.go index 44d08e62080..3263428cc28 100644 --- a/source/fileInfo.go +++ b/source/fileInfo.go @@ -37,6 +37,11 @@ type File struct { lazyInit sync.Once } +// TODO1 name. +func (fi *File) IsMultipart() bool { + return fi.fim.Meta().PathInfo.IsContentData() +} + // Filename returns a file's absolute path and filename on disk. func (fi *File) Filename() string { return fi.fim.Meta().Filename } diff --git a/tpl/template.go b/tpl/template.go index 5ef0eecb840..0ab1abf2f93 100644 --- a/tpl/template.go +++ b/tpl/template.go @@ -65,10 +65,14 @@ type TemplateHandlers struct { TxtTmpl TemplateParseFinder } +type TemplateExecutor interface { + ExecuteWithContext(ctx context.Context, t Template, wr io.Writer, data any) error +} + // TemplateHandler finds and executes templates. type TemplateHandler interface { TemplateFinder - ExecuteWithContext(ctx context.Context, t Template, wr io.Writer, data any) error + TemplateExecutor LookupLayout(d layouts.LayoutDescriptor, f output.Format) (Template, bool, error) HasTemplate(name string) bool GetIdentity(name string) (identity.Identity, bool)