diff --git a/Gopkg.lock b/Gopkg.lock index 4233d927872..44f957d186d 100644 --- a/Gopkg.lock +++ b/Gopkg.lock @@ -21,7 +21,13 @@ [[projects]] name = "github.com/alecthomas/chroma" - packages = [".","formatters","formatters/html","lexers","styles"] + packages = [ + ".", + "formatters", + "formatters/html", + "lexers", + "styles" + ] revision = "v0.2.0" [[projects]] @@ -54,21 +60,26 @@ version = "v1.1.0" [[projects]] - branch = "master" - name = "github.com/dchest/cssmin" + name = "github.com/disintegration/imaging" packages = ["."] - revision = "fb8d9b44afdc258bfff6052d3667521babcb2239" + revision = "v1.2.4" [[projects]] name = "github.com/dlclark/regexp2" - packages = [".","syntax"] + packages = [ + ".", + "syntax" + ] revision = "487489b64fb796de2e55f4e8a4ad1e145f80e957" version = "v1.1.6" [[projects]] branch = "master" name = "github.com/eknkc/amber" - packages = [".","parser"] + packages = [ + ".", + "parser" + ] revision = "cdade1c073850f4ffc70a829e31235ea6892853b" [[projects]] @@ -104,7 +115,17 @@ [[projects]] branch = "master" name = "github.com/hashicorp/hcl" - packages = [".","hcl/ast","hcl/parser","hcl/scanner","hcl/strconv","hcl/token","json/parser","json/scanner","json/token"] + packages = [ + ".", + "hcl/ast", + "hcl/parser", + "hcl/scanner", + "hcl/strconv", + "hcl/token", + "json/parser", + "json/scanner", + "json/token" + ] revision = "23c074d0eceb2b8a5bfdbb271ab780cde70f05a8" [[projects]] @@ -115,7 +136,10 @@ [[projects]] name = "github.com/jdkato/prose" - packages = ["internal/util","transform"] + packages = [ + "internal/util", + "transform" + ] revision = "20d3663d4bc9dd10d75abcde9d92e04b4861c674" version = "v1.1.0" @@ -133,7 +157,10 @@ [[projects]] name = "github.com/magefile/mage" - packages = ["mg","sh"] + packages = [ + "mg", + "sh" + ] revision = "2f974307b636f59c13b88704cf350a4772fef271" version = "v1.0.2" @@ -149,6 +176,12 @@ packages = ["."] revision = "54ffb37507cd7d2ccd4768855e84f9ff6c4352b6" +[[projects]] + name = "github.com/mattn/go-runewidth" + packages = ["."] + revision = "9e777a8366cce605130a531d2cd6363d07ad7317" + version = "v0.0.2" + [[projects]] name = "github.com/miekg/mmark" packages = ["."] @@ -163,10 +196,20 @@ [[projects]] name = "github.com/nicksnyder/go-i18n" - packages = ["i18n/bundle","i18n/language","i18n/translation"] + packages = [ + "i18n/bundle", + "i18n/language", + "i18n/translation" + ] revision = "0dc1626d56435e9d605a29875701721c54bc9bbd" version = "v1.10.0" +[[projects]] + branch = "master" + name = "github.com/olekukonko/tablewriter" + packages = ["."] + revision = "65fec0d89a572b4367094e2058d3ebe667de3b60" + [[projects]] name = "github.com/pelletier/go-toml" packages = ["."] @@ -193,7 +236,10 @@ [[projects]] name = "github.com/spf13/afero" - packages = [".","mem"] + packages = [ + ".", + "mem" + ] revision = "8d919cbe7e2627e417f3e45c3c0e489a5b7e2536" version = "v1.0.0" @@ -206,7 +252,10 @@ [[projects]] branch = "master" name = "github.com/spf13/cobra" - packages = [".","doc"] + packages = [ + ".", + "doc" + ] revision = "7b2c5ac9fc04fc5efafb60700713d4fa609b777b" [[projects]] @@ -241,7 +290,10 @@ [[projects]] name = "github.com/stretchr/testify" - packages = ["assert","require"] + packages = [ + "assert", + "require" + ] revision = "69483b4bd14f5845b5a1e55bca19e954e827f1d0" version = "v1.1.4" @@ -254,15 +306,32 @@ [[projects]] branch = "master" name = "golang.org/x/image" - packages = ["riff","vp8","vp8l","webp"] + packages = [ + "bmp", + "riff", + "tiff", + "tiff/lzw", + "vp8", + "vp8l", + "webp" + ] revision = "f7e31b4ea2e3413ab91b4e7d2dc83e5f8d19a44c" [[projects]] branch = "master" name = "golang.org/x/net" - packages = ["idna"] + packages = [ + "context", + "idna" + ] revision = "cd69bc3fc700721b709c3a59e16e24c67b58f6ff" +[[projects]] + branch = "master" + name = "golang.org/x/sync" + packages = ["errgroup"] + revision = "fd80eb99c8f653c847d294a001bdf2a3a6f768f5" + [[projects]] branch = "master" name = "golang.org/x/sys" @@ -272,7 +341,23 @@ [[projects]] branch = "master" name = "golang.org/x/text" - packages = ["collate","collate/build","internal/colltab","internal/gen","internal/tag","internal/triegen","internal/ucd","language","secure/bidirule","transform","unicode/bidi","unicode/cldr","unicode/norm","unicode/rangetable","width"] + packages = [ + "collate", + "collate/build", + "internal/colltab", + "internal/gen", + "internal/tag", + "internal/triegen", + "internal/ucd", + "language", + "secure/bidirule", + "transform", + "unicode/bidi", + "unicode/cldr", + "unicode/norm", + "unicode/rangetable", + "width" + ] revision = "c01e4764d870b77f8abe5096ee19ad20d80e8075" [[projects]] @@ -284,6 +369,6 @@ [solve-meta] analyzer-name = "dep" analyzer-version = 1 - inputs-digest = "d75b02c8a7c7d724120447dd438e7bef140d0f4d4986adda52eabbfe3db8271a" + inputs-digest = "2d9c34c260bc26814a0635c93009daeb9d8ffa56c29c0cff6827ae2d3e9ef96d" solver-name = "gps-cdcl" solver-version = 1 diff --git a/Gopkg.toml b/Gopkg.toml index c8f19297430..cef7427cece 100644 --- a/Gopkg.toml +++ b/Gopkg.toml @@ -21,8 +21,8 @@ revision = "v1.1.0" [[constraint]] - branch = "master" - name = "github.com/dchest/cssmin" + name = "github.com/disintegration/imaging" + revision = "v1.2.4" [[constraint]] name = "github.com/magefile/mage" @@ -116,6 +116,10 @@ name = "github.com/stretchr/testify" version = "1.1.4" +[[constraint]] + branch = "master" + name = "github.com/olekukonko/tablewriter" + [[constraint]] name = "github.com/yosssi/ace" version = "0.0.5" diff --git a/commands/benchmark.go b/commands/benchmark.go index 51f2be8766a..f5ebf7b003c 100644 --- a/commands/benchmark.go +++ b/commands/benchmark.go @@ -53,7 +53,7 @@ func benchmark(cmd *cobra.Command, args []string) error { return err } - c, err := newCommandeer(cfg) + c, err := newCommandeer(cfg, false) if err != nil { return err } @@ -84,7 +84,7 @@ func benchmark(cmd *cobra.Command, args []string) error { t := time.Now() for i := 0; i < benchmarkTimes; i++ { - if err = c.resetAndBuildSites(false); err != nil { + if err = c.resetAndBuildSites(); err != nil { return err } } diff --git a/commands/commandeer.go b/commands/commandeer.go index f538ba61997..bb0a19075a8 100644 --- a/commands/commandeer.go +++ b/commands/commandeer.go @@ -18,6 +18,7 @@ import ( "github.com/gohugoio/hugo/deps" "github.com/gohugoio/hugo/helpers" "github.com/gohugoio/hugo/hugofs" + src "github.com/gohugoio/hugo/source" ) type commandeer struct { @@ -25,6 +26,8 @@ type commandeer struct { pathSpec *helpers.PathSpec visitedURLs *types.EvictingStringQueue + staticDirsConfig []*src.Dirs + serverPorts []int configured bool @@ -44,10 +47,6 @@ func (c *commandeer) PathSpec() *helpers.PathSpec { return c.pathSpec } -func (c *commandeer) languages() helpers.Languages { - return c.Cfg.Get("languagesSorted").(helpers.Languages) -} - func (c *commandeer) initFs(fs *hugofs.Fs) error { c.DepsCfg.Fs = fs ps, err := helpers.NewPathSpec(fs, c.Cfg) @@ -55,10 +54,18 @@ func (c *commandeer) initFs(fs *hugofs.Fs) error { return err } c.pathSpec = ps + + dirsConfig, err := c.createStaticDirsConfig() + if err != nil { + return err + } + c.staticDirsConfig = dirsConfig + return nil } -func newCommandeer(cfg *deps.DepsCfg) (*commandeer, error) { +func newCommandeer(cfg *deps.DepsCfg, watching bool) (*commandeer, error) { + cfg.Watching = watching l := cfg.Language if l == nil { l = helpers.NewDefaultLanguage(cfg.Cfg) @@ -68,5 +75,7 @@ func newCommandeer(cfg *deps.DepsCfg) (*commandeer, error) { return nil, err } - return &commandeer{DepsCfg: cfg, pathSpec: ps, visitedURLs: types.NewEvictingStringQueue(10)}, nil + c := &commandeer{DepsCfg: cfg, pathSpec: ps, visitedURLs: types.NewEvictingStringQueue(10)} + + return c, nil } diff --git a/commands/convert.go b/commands/convert.go index 298ff6019b3..dbd64da9446 100644 --- a/commands/convert.go +++ b/commands/convert.go @@ -14,12 +14,15 @@ package commands import ( - "errors" "fmt" - "path/filepath" "time" + src "github.com/gohugoio/hugo/source" + "github.com/gohugoio/hugo/hugolib" + + "path/filepath" + "github.com/gohugoio/hugo/parser" "github.com/spf13/cast" "github.com/spf13/cobra" @@ -78,6 +81,10 @@ func init() { } func convertContents(mark rune) error { + if outputDir == "" && !unsafe { + return newUserError("Unsafe operation not allowed, use --unsafe or set a different output path") + } + cfg, err := InitializeConfig() if err != nil { return err @@ -88,71 +95,89 @@ func convertContents(mark rune) error { return err } - site := h.Sites[0] - - if err = site.Initialise(); err != nil { + if err := h.Build(hugolib.BuildCfg{SkipRender: true}); err != nil { return err } - if site.Source == nil { - panic("site.Source not set") - } - if len(site.Source.Files()) < 1 { - return errors.New("No source files found") - } + site := h.Sites[0] - contentDir := site.PathSpec.AbsPathify(site.Cfg.GetString("contentDir")) - site.Log.FEEDBACK.Println("processing", len(site.Source.Files()), "content files") - for _, file := range site.Source.Files() { - site.Log.INFO.Println("Attempting to convert", file.LogicalName()) - page, err := site.NewPage(file.LogicalName()) - if err != nil { + site.Log.FEEDBACK.Println("processing", len(site.AllPages), "content files") + for _, p := range site.AllPages { + if err := convertAndSavePage(p, site, mark); err != nil { return err } + } + return nil +} - psr, err := parser.ReadFrom(file.Contents) - if err != nil { - site.Log.ERROR.Println("Error processing file:", file.Path()) - return err - } - metadata, err := psr.Metadata() - if err != nil { - site.Log.ERROR.Println("Error processing file:", file.Path()) +func convertAndSavePage(p *hugolib.Page, site *hugolib.Site, mark rune) error { + // The resources are not in .Site.AllPages. + for _, r := range p.Resources.ByType("page") { + if err := convertAndSavePage(r.(*hugolib.Page), site, mark); err != nil { return err } + } - // better handling of dates in formats that don't have support for them - if mark == parser.FormatToLeadRune("json") || mark == parser.FormatToLeadRune("yaml") || mark == parser.FormatToLeadRune("toml") { - newMetadata := cast.ToStringMap(metadata) - for k, v := range newMetadata { - switch vv := v.(type) { - case time.Time: - newMetadata[k] = vv.Format(time.RFC3339) - } - } - metadata = newMetadata - } + if p.Filename() == "" { + // No content file. + return nil + } - page.SetDir(filepath.Join(contentDir, file.Dir())) - page.SetSourceContent(psr.Content()) - if err = page.SetSourceMetaData(metadata, mark); err != nil { - site.Log.ERROR.Printf("Failed to set source metadata for file %q: %s. For more info see For more info see https://github.com/gohugoio/hugo/issues/2458", page.FullFilePath(), err) - continue - } + site.Log.INFO.Println("Attempting to convert", p.LogicalName()) + newPage, err := site.NewPage(p.LogicalName()) + if err != nil { + return err + } - if outputDir != "" { - if err = page.SaveSourceAs(filepath.Join(outputDir, page.FullFilePath())); err != nil { - return fmt.Errorf("Failed to save file %q: %s", page.FullFilePath(), err) - } - } else { - if unsafe { - if err = page.SaveSource(); err != nil { - return fmt.Errorf("Failed to save file %q: %s", page.FullFilePath(), err) - } - } else { - site.Log.FEEDBACK.Println("Unsafe operation not allowed, use --unsafe or set a different output path") + f, _ := p.File.(src.ReadableFile) + file, err := f.Open() + if err != nil { + site.Log.ERROR.Println("Error reading file:", p.Path()) + file.Close() + return nil + } + + psr, err := parser.ReadFrom(file) + if err != nil { + site.Log.ERROR.Println("Error processing file:", p.Path()) + file.Close() + return err + } + + file.Close() + + metadata, err := psr.Metadata() + if err != nil { + site.Log.ERROR.Println("Error processing file:", p.Path()) + return err + } + + // better handling of dates in formats that don't have support for them + if mark == parser.FormatToLeadRune("json") || mark == parser.FormatToLeadRune("yaml") || mark == parser.FormatToLeadRune("toml") { + newMetadata := cast.ToStringMap(metadata) + for k, v := range newMetadata { + switch vv := v.(type) { + case time.Time: + newMetadata[k] = vv.Format(time.RFC3339) } } + metadata = newMetadata + } + + newPage.SetSourceContent(psr.Content()) + if err = newPage.SetSourceMetaData(metadata, mark); err != nil { + site.Log.ERROR.Printf("Failed to set source metadata for file %q: %s. For more info see For more info see https://github.com/gohugoio/hugo/issues/2458", newPage.FullFilePath(), err) + return nil } + + newFilename := p.Filename() + if outputDir != "" { + newFilename = filepath.Join(outputDir, p.Dir(), newPage.LogicalName()) + } + + if err = newPage.SaveSourceAs(newFilename); err != nil { + return fmt.Errorf("Failed to save file %q: %s", newFilename, err) + } + return nil } diff --git a/commands/hugo.go b/commands/hugo.go index 7b50d0bb344..2049613bf5a 100644 --- a/commands/hugo.go +++ b/commands/hugo.go @@ -18,6 +18,9 @@ package commands import ( "fmt" "io/ioutil" + "sync/atomic" + + "golang.org/x/sync/errgroup" "github.com/gohugoio/hugo/hugofs" @@ -58,6 +61,13 @@ import ( // provide a cleaner external API, but until then, this is it. var Hugo *hugolib.HugoSites +const ( + ansiEsc = "\u001B" + clearLine = "\r\033[K" + hideCursor = ansiEsc + "[?25l" + showCursor = ansiEsc + "[?25h" +) + // Reset resets Hugo ready for a new full build. This is mainly only useful // for benchmark testing etc. via the CLI commands. func Reset() error { @@ -121,7 +131,7 @@ Complete documentation is available at http://gohugo.io/.`, return err } - c, err := newCommandeer(cfg) + c, err := newCommandeer(cfg, buildWatch) if err != nil { return err } @@ -149,6 +159,7 @@ var ( ) var ( + gc bool baseURL string cacheDir string contentDir string @@ -201,6 +212,7 @@ func AddCommands() { genCmd.AddCommand(genmanCmd) genCmd.AddCommand(createGenDocsHelper().cmd) genCmd.AddCommand(createGenChromaStyles().cmd) + } // initHugoBuilderFlags initializes all common flags, typically used by the @@ -240,6 +252,7 @@ func initHugoBuildCommonFlags(cmd *cobra.Command) { cmd.Flags().Bool("canonifyURLs", false, "if true, all relative URLs will be canonicalized using baseURL") cmd.Flags().StringVarP(&baseURL, "baseURL", "b", "", "hostname (and path) to the root, e.g. http://spf13.com/") cmd.Flags().Bool("enableGitInfo", false, "add Git revision, date and author info to the pages") + cmd.Flags().BoolVar(&gc, "gc", false, "enable to run some cleanup tasks (remove unused cache files) after the build") cmd.Flags().BoolVar(&nitro.AnalysisOn, "stepAnalysis", false, "display memory and timing of different steps of the program") cmd.Flags().Bool("templateMetrics", false, "display metrics about template executions") @@ -300,7 +313,7 @@ func InitializeConfig(subCmdVs ...*cobra.Command) (*deps.DepsCfg, error) { // Init file systems. This may be changed at a later point. cfg.Cfg = config - c, err := newCommandeer(cfg) + c, err := newCommandeer(cfg, false) if err != nil { return nil, err } @@ -482,17 +495,17 @@ func (c *commandeer) initializeFlags(cmd *cobra.Command) { "templateMetricsHints", } - // Remove these in Hugo 0.23. + // Remove these in Hugo 0.33. if cmd.Flags().Changed("disable404") { - helpers.Deprecated("command line", "--disable404", "Use --disableKinds=404", false) + helpers.Deprecated("command line", "--disable404", "Use --disableKinds=404", true) } if cmd.Flags().Changed("disableRSS") { - helpers.Deprecated("command line", "--disableRSS", "Use --disableKinds=RSS", false) + helpers.Deprecated("command line", "--disableRSS", "Use --disableKinds=RSS", true) } if cmd.Flags().Changed("disableSitemap") { - helpers.Deprecated("command line", "--disableSitemap", "Use --disableKinds=sitemap", false) + helpers.Deprecated("command line", "--disableSitemap", "Use --disableKinds=sitemap", true) } for _, key := range persFlagKeys { @@ -525,16 +538,71 @@ func (c *commandeer) watchConfig() { }) } -func (c *commandeer) build(watches ...bool) error { - if err := c.copyStatic(); err != nil { - return fmt.Errorf("Error copying static files: %s", err) +func (c *commandeer) fullBuild(watches ...bool) error { + var ( + g errgroup.Group + langCount map[string]uint64 + ) + + if !quiet { + fmt.Print(hideCursor + "Building sites … ") + defer func() { + fmt.Print(showCursor + clearLine) + }() + } + + g.Go(func() error { + cnt, err := c.copyStatic() + if err != nil { + return fmt.Errorf("Error copying static files: %s", err) + } + langCount = cnt + return nil + }) + + g.Go(func() error { + if err := c.buildSites(); err != nil { + return fmt.Errorf("Error building site: %s", err) + } + + return nil + }) + + if err := g.Wait(); err != nil { + return err + } + + for _, s := range Hugo.Sites { + s.ProcessingStats.Static = langCount[s.Language.Lang] + } + + if gc { + count, err := Hugo.GC() + if err != nil { + return err + } + for _, s := range Hugo.Sites { + // We have no way of knowing what site the garbage belonged to. + s.ProcessingStats.Cleaned = uint64(count) + } } - watch := false - if len(watches) > 0 && watches[0] { - watch = true + + return nil + +} + +func (c *commandeer) build(watches ...bool) error { + defer c.timeTrack(time.Now(), "Total") + + if err := c.fullBuild(watches...); err != nil { + return err } - if err := c.buildSites(buildWatch || watch); err != nil { - return fmt.Errorf("Error building site: %s", err) + + // TODO(bep) Feedback? + if !quiet { + fmt.Println() + Hugo.PrintProcessingStats(os.Stdout) + fmt.Println() } if buildWatch { @@ -550,42 +618,76 @@ func (c *commandeer) build(watches ...bool) error { return nil } -func (c *commandeer) copyStatic() error { +func (c *commandeer) copyStatic() (map[string]uint64, error) { return c.doWithPublishDirs(c.copyStaticTo) } -func (c *commandeer) doWithPublishDirs(f func(dirs *src.Dirs, publishDir string) error) error { - publishDir := c.PathSpec().AbsPathify(c.Cfg.GetString("publishDir")) + helpers.FilePathSeparator - // If root, remove the second '/' - if publishDir == "//" { - publishDir = helpers.FilePathSeparator - } - - languages := c.languages() +func (c *commandeer) createStaticDirsConfig() ([]*src.Dirs, error) { + var dirsConfig []*src.Dirs + languages := c.PathSpec().Languages if !languages.IsMultihost() { dirs, err := src.NewDirs(c.Fs, c.Cfg, c.DepsCfg.Logger) if err != nil { - return err + return nil, err } - return f(dirs, publishDir) + dirsConfig = append(dirsConfig, dirs) } for _, l := range languages { - dir := filepath.Join(publishDir, l.Lang) dirs, err := src.NewDirs(c.Fs, l, c.DepsCfg.Logger) if err != nil { - return err + return nil, err } - if err := f(dirs, dir); err != nil { - return err + dirsConfig = append(dirsConfig, dirs) + } + + return dirsConfig, nil + +} + +func (c *commandeer) doWithPublishDirs(f func(dirs *src.Dirs, publishDir string) (uint64, error)) (map[string]uint64, error) { + + languages := c.PathSpec().Languages + langCount := make(map[string]uint64) + + for _, dirs := range c.staticDirsConfig { + + cnt, err := f(dirs, c.pathSpec.PublishDir) + if err != nil { + return langCount, err } + + if dirs.Language == nil { + // Not multihost + for _, l := range languages { + langCount[l.Lang] = cnt + } + } else { + langCount[dirs.Language.Lang] = cnt + } + } - return nil + return langCount, nil +} + +type countingStatFs struct { + afero.Fs + statCounter uint64 } -func (c *commandeer) copyStaticTo(dirs *src.Dirs, publishDir string) error { +func (fs *countingStatFs) Stat(name string) (os.FileInfo, error) { + f, err := fs.Fs.Stat(name) + if err == nil { + if !f.IsDir() { + atomic.AddUint64(&fs.statCounter, 1) + } + } + return f, err +} + +func (c *commandeer) copyStaticTo(dirs *src.Dirs, publishDir string) (uint64, error) { // If root, remove the second '/' if publishDir == "//" { @@ -594,18 +696,20 @@ func (c *commandeer) copyStaticTo(dirs *src.Dirs, publishDir string) error { staticSourceFs, err := dirs.CreateStaticFs() if err != nil { - return err + return 0, err } if staticSourceFs == nil { c.Logger.WARN.Println("No static directories found to sync") - return nil + return 0, nil } + fs := &countingStatFs{Fs: staticSourceFs} + syncer := fsync.NewSyncer() syncer.NoTimes = c.Cfg.GetBool("noTimes") syncer.NoChmod = c.Cfg.GetBool("noChmod") - syncer.SrcFs = staticSourceFs + syncer.SrcFs = fs syncer.DestFs = c.Fs.Destination // Now that we are using a unionFs for the static directories // We can effectively clean the publishDir on initial sync @@ -622,7 +726,20 @@ func (c *commandeer) copyStaticTo(dirs *src.Dirs, publishDir string) error { // because we are using a baseFs (to get the union right). // set sync src to root - return syncer.Sync(publishDir, helpers.FilePathSeparator) + err = syncer.Sync(publishDir, helpers.FilePathSeparator) + if err != nil { + return 0, err + } + + // Sync runs Stat 3 times for every source file (which sounds much) + numFiles := fs.statCounter / 3 + + return numFiles, err +} + +func (c *commandeer) timeTrack(start time.Time, name string) { + elapsed := time.Since(start) + c.Logger.FEEDBACK.Printf("%s in %v ms", name, int(1000*elapsed.Seconds())) } // getDirList provides NewWatcher() with a list of directories to watch for changes. @@ -638,84 +755,88 @@ func (c *commandeer) getDirList() ([]string, error) { layoutDir := c.PathSpec().GetLayoutDirPath() staticDirs := staticSyncer.d.AbsStaticDirs - walker := func(path string, fi os.FileInfo, err error) error { - if err != nil { - if path == dataDir && os.IsNotExist(err) { - c.Logger.WARN.Println("Skip dataDir:", err) - return nil - } + newWalker := func(allowSymbolicDirs bool) func(path string, fi os.FileInfo, err error) error { + return func(path string, fi os.FileInfo, err error) error { + if err != nil { + if path == dataDir && os.IsNotExist(err) { + c.Logger.WARN.Println("Skip dataDir:", err) + return nil + } - if path == i18nDir && os.IsNotExist(err) { - c.Logger.WARN.Println("Skip i18nDir:", err) - return nil - } + if path == i18nDir && os.IsNotExist(err) { + c.Logger.WARN.Println("Skip i18nDir:", err) + return nil + } - if path == layoutDir && os.IsNotExist(err) { - c.Logger.WARN.Println("Skip layoutDir:", err) - return nil - } + if path == layoutDir && os.IsNotExist(err) { + c.Logger.WARN.Println("Skip layoutDir:", err) + return nil + } - if os.IsNotExist(err) { - for _, staticDir := range staticDirs { - if path == staticDir && os.IsNotExist(err) { - c.Logger.WARN.Println("Skip staticDir:", err) + if os.IsNotExist(err) { + for _, staticDir := range staticDirs { + if path == staticDir && os.IsNotExist(err) { + c.Logger.WARN.Println("Skip staticDir:", err) + } } + // Ignore. + return nil } - // Ignore. + + c.Logger.ERROR.Println("Walker: ", err) return nil } - c.Logger.ERROR.Println("Walker: ", err) - return nil - } - - // Skip .git directories. - // Related to https://github.com/gohugoio/hugo/issues/3468. - if fi.Name() == ".git" { - return nil - } - - if fi.Mode()&os.ModeSymlink == os.ModeSymlink { - link, err := filepath.EvalSymlinks(path) - if err != nil { - c.Logger.ERROR.Printf("Cannot read symbolic link '%s', error was: %s", path, err) + // Skip .git directories. + // Related to https://github.com/gohugoio/hugo/issues/3468. + if fi.Name() == ".git" { return nil } - linkfi, err := c.Fs.Source.Stat(link) - if err != nil { - c.Logger.ERROR.Printf("Cannot stat '%s', error was: %s", link, err) - return nil + + if fi.Mode()&os.ModeSymlink == os.ModeSymlink { + link, err := filepath.EvalSymlinks(path) + if err != nil { + c.Logger.ERROR.Printf("Cannot read symbolic link '%s', error was: %s", path, err) + return nil + } + linkfi, err := c.Fs.Source.Stat(link) + if err != nil { + c.Logger.ERROR.Printf("Cannot stat '%s', error was: %s", link, err) + return nil + } + if !allowSymbolicDirs && !linkfi.Mode().IsRegular() { + c.Logger.ERROR.Printf("Symbolic links for directories not supported, skipping '%s'", path) + return nil + } + } - if !linkfi.Mode().IsRegular() { - c.Logger.ERROR.Printf("Symbolic links for directories not supported, skipping '%s'", path) + + if fi.IsDir() { + if fi.Name() == ".git" || + fi.Name() == "node_modules" || fi.Name() == "bower_components" { + return filepath.SkipDir + } + a = append(a, path) } return nil } - if fi.IsDir() { - if fi.Name() == ".git" || - fi.Name() == "node_modules" || fi.Name() == "bower_components" { - return filepath.SkipDir - } - a = append(a, path) - } - return nil } // SymbolicWalk will log anny ERRORs - _ = helpers.SymbolicWalk(c.Fs.Source, dataDir, walker) - _ = helpers.SymbolicWalk(c.Fs.Source, c.PathSpec().AbsPathify(c.Cfg.GetString("contentDir")), walker) - _ = helpers.SymbolicWalk(c.Fs.Source, i18nDir, walker) - _ = helpers.SymbolicWalk(c.Fs.Source, layoutDir, walker) + _ = helpers.SymbolicWalk(c.Fs.Source, dataDir, newWalker(false)) + _ = helpers.SymbolicWalk(c.Fs.Source, c.PathSpec().AbsPathify(c.Cfg.GetString("contentDir")), newWalker(true)) + _ = helpers.SymbolicWalk(c.Fs.Source, i18nDir, newWalker(false)) + _ = helpers.SymbolicWalk(c.Fs.Source, layoutDir, newWalker(false)) for _, staticDir := range staticDirs { - _ = helpers.SymbolicWalk(c.Fs.Source, staticDir, walker) + _ = helpers.SymbolicWalk(c.Fs.Source, staticDir, newWalker(false)) } if c.PathSpec().ThemeSet() { themesDir := c.PathSpec().GetThemeDir() - _ = helpers.SymbolicWalk(c.Fs.Source, filepath.Join(themesDir, "layouts"), walker) - _ = helpers.SymbolicWalk(c.Fs.Source, filepath.Join(themesDir, "i18n"), walker) - _ = helpers.SymbolicWalk(c.Fs.Source, filepath.Join(themesDir, "data"), walker) + _ = helpers.SymbolicWalk(c.Fs.Source, filepath.Join(themesDir, "layouts"), newWalker(false)) + _ = helpers.SymbolicWalk(c.Fs.Source, filepath.Join(themesDir, "i18n"), newWalker(false)) + _ = helpers.SymbolicWalk(c.Fs.Source, filepath.Join(themesDir, "data"), newWalker(false)) } return a, nil @@ -728,17 +849,17 @@ func (c *commandeer) recreateAndBuildSites(watching bool) (err error) { if !quiet { c.Logger.FEEDBACK.Println("Started building sites ...") } - return Hugo.Build(hugolib.BuildCfg{CreateSitesFromConfig: true, Watching: watching, PrintStats: !quiet}) + return Hugo.Build(hugolib.BuildCfg{CreateSitesFromConfig: true}) } -func (c *commandeer) resetAndBuildSites(watching bool) (err error) { +func (c *commandeer) resetAndBuildSites() (err error) { if err = c.initSites(); err != nil { return } if !quiet { c.Logger.FEEDBACK.Println("Started building sites ...") } - return Hugo.Build(hugolib.BuildCfg{ResetState: true, Watching: watching, PrintStats: !quiet}) + return Hugo.Build(hugolib.BuildCfg{ResetState: true}) } func (c *commandeer) initSites() error { @@ -755,14 +876,14 @@ func (c *commandeer) initSites() error { return nil } -func (c *commandeer) buildSites(watching bool) (err error) { +func (c *commandeer) buildSites() (err error) { if err := c.initSites(); err != nil { return err } if !quiet { - c.Logger.FEEDBACK.Println("Started building sites ...") + //c.Logger.FEEDBACK.Println("Started building sites ...") } - return Hugo.Build(hugolib.BuildCfg{Watching: watching, PrintStats: !quiet}) + return Hugo.Build(hugolib.BuildCfg{}) } func (c *commandeer) rebuildSites(events []fsnotify.Event) error { @@ -776,7 +897,7 @@ func (c *commandeer) rebuildSites(events []fsnotify.Event) error { // Make sure we always render the home page visited[home] = true } - return Hugo.Build(hugolib.BuildCfg{PrintStats: !quiet, Watching: true, RecentlyVisited: visited}, events...) + return Hugo.Build(hugolib.BuildCfg{RecentlyVisited: visited}, events...) } // newWatcher creates a new watcher to watch filesystem events. @@ -818,6 +939,20 @@ func (c *commandeer) newWatcher(serve bool, dirList ...string) error { staticEvents := []fsnotify.Event{} dynamicEvents := []fsnotify.Event{} + // Special handling for symbolic links inside /content. + for i, ev := range evs { + dir, name := filepath.Split(ev.Name) + contentMapped := Hugo.ContentChanges.GetSymbolicLinkMappings(dir) + if len(contentMapped) == 0 { + continue + } + evs = append(evs[:i], evs[i+1:]...) + for _, mapped := range contentMapped { + mappedFilename := filepath.Join(mapped, name) + evs = append(evs, fsnotify.Event{Name: mappedFilename, Op: ev.Op}) + } + } + for _, ev := range evs { ext := filepath.Ext(ev.Name) baseName := filepath.Base(ev.Name) @@ -894,7 +1029,7 @@ func (c *commandeer) newWatcher(serve bool, dirList ...string) error { if c.Cfg.GetBool("forceSyncStatic") { c.Logger.FEEDBACK.Printf("Syncing all static files\n") - err := c.copyStatic() + _, err := c.copyStatic() if err != nil { utils.StopOnErr(c.Logger, err, "Error copying static files to publish dir") } @@ -950,6 +1085,7 @@ func (c *commandeer) newWatcher(serve bool, dirList ...string) error { if onePageName != "" { p = Hugo.GetContentPage(onePageName) } + } if p != nil { diff --git a/commands/import_jekyll.go b/commands/import_jekyll.go index 3d89fee0d70..98094dbb7be 100644 --- a/commands/import_jekyll.go +++ b/commands/import_jekyll.go @@ -468,7 +468,6 @@ func convertJekyllPost(s *hugolib.Site, path, relPath, targetDir string, draft b return err } - page.SetDir(targetParentDir) page.SetSourceContent([]byte(content)) page.SetSourceMetaData(newmetadata, parser.FormatToLeadRune("yaml")) page.SaveSourceAs(targetFile) diff --git a/commands/list.go b/commands/list.go index b2a6b539564..245ec146c87 100644 --- a/commands/list.go +++ b/commands/list.go @@ -49,7 +49,7 @@ var listDraftsCmd = &cobra.Command{ return err } - c, err := newCommandeer(cfg) + c, err := newCommandeer(cfg, false) if err != nil { return err } @@ -90,7 +90,7 @@ posted in the future.`, return err } - c, err := newCommandeer(cfg) + c, err := newCommandeer(cfg, false) if err != nil { return err } @@ -131,7 +131,7 @@ expired.`, return err } - c, err := newCommandeer(cfg) + c, err := newCommandeer(cfg, false) if err != nil { return err } diff --git a/commands/new.go b/commands/new.go index b30c49aa201..b0a682e30d2 100644 --- a/commands/new.go +++ b/commands/new.go @@ -92,7 +92,7 @@ func NewContent(cmd *cobra.Command, args []string) error { return err } - c, err := newCommandeer(cfg) + c, err := newCommandeer(cfg, false) if err != nil { return err } @@ -130,7 +130,7 @@ func NewContent(cmd *cobra.Command, args []string) error { return nil, err } - if err := Hugo.Build(hugolib.BuildCfg{SkipRender: true, PrintStats: false}); err != nil { + if err := Hugo.Build(hugolib.BuildCfg{SkipRender: true}); err != nil { return nil, err } @@ -250,7 +250,7 @@ func NewTheme(cmd *cobra.Command, args []string) error { return newUserError("theme name needs to be provided") } - c, err := newCommandeer(cfg) + c, err := newCommandeer(cfg, false) if err != nil { return err } @@ -375,7 +375,11 @@ func newContentPathSection(path string) (string, string) { var section string // assume the first directory is the section (kind) if strings.Contains(createpath[1:], helpers.FilePathSeparator) { - section = helpers.GuessSection(createpath) + parts := strings.Split(strings.TrimPrefix(createpath, helpers.FilePathSeparator), helpers.FilePathSeparator) + if len(parts) > 0 { + section = parts[0] + } + } return createpath, section diff --git a/commands/server.go b/commands/server.go index e1cc5674957..1a9733f8aa5 100644 --- a/commands/server.go +++ b/commands/server.go @@ -115,7 +115,7 @@ func server(cmd *cobra.Command, args []string) error { return err } - c, err := newCommandeer(cfg) + c, err := newCommandeer(cfg, serverWatch) if err != nil { return err } @@ -141,7 +141,7 @@ func server(cmd *cobra.Command, args []string) error { c.watchConfig() } - languages := c.languages() + languages := c.pathSpec.Languages serverPorts := make([]int, 1) if languages.IsMultihost() { diff --git a/commands/static_syncer.go b/commands/static_syncer.go index 98b745e4c0e..49b118bd16a 100644 --- a/commands/static_syncer.go +++ b/commands/static_syncer.go @@ -44,15 +44,15 @@ func (s *staticSyncer) isStatic(path string) bool { func (s *staticSyncer) syncsStaticEvents(staticEvents []fsnotify.Event) error { c := s.c - syncFn := func(dirs *src.Dirs, publishDir string) error { + syncFn := func(dirs *src.Dirs, publishDir string) (uint64, error) { staticSourceFs, err := dirs.CreateStaticFs() if err != nil { - return err + return 0, err } if staticSourceFs == nil { c.Logger.WARN.Println("No static directories found to sync") - return nil + return 0, nil } syncer := fsync.NewSyncer() @@ -127,9 +127,10 @@ func (s *staticSyncer) syncsStaticEvents(staticEvents []fsnotify.Event) error { } } - return nil + return 0, nil } - return c.doWithPublishDirs(syncFn) + _, err := c.doWithPublishDirs(syncFn) + return err } diff --git a/create/content_template_handler.go b/create/content_template_handler.go index d73c52a243f..0a444abf65c 100644 --- a/create/content_template_handler.go +++ b/create/content_template_handler.go @@ -45,7 +45,7 @@ type ArchetypeFileData struct { // The target content file. Note that the .Content will be empty, as that // has not been created yet. - *source.File + source.File } const ( @@ -82,7 +82,7 @@ func executeArcheTypeAsTemplate(s *hugolib.Site, kind, targetPath, archetypeFile ) sp := source.NewSourceSpec(s.Deps.Cfg, s.Deps.Fs) - f := sp.NewFile(targetPath) + f := sp.NewFileInfo("", targetPath, nil) data := ArchetypeFileData{ Type: kind, diff --git a/deps/deps.go b/deps/deps.go index a7d77b5b10f..6136bc993b2 100644 --- a/deps/deps.go +++ b/deps/deps.go @@ -10,6 +10,7 @@ import ( "github.com/gohugoio/hugo/hugofs" "github.com/gohugoio/hugo/metrics" "github.com/gohugoio/hugo/output" + "github.com/gohugoio/hugo/source" "github.com/gohugoio/hugo/tpl" jww "github.com/spf13/jwalterweatherman" ) @@ -33,6 +34,9 @@ type Deps struct { // The ContentSpec to use *helpers.ContentSpec `json:"-"` + // The SourceSpec to use + SourceSpec *source.SourceSpec `json:"-"` + // The configuration to use Cfg config.Provider `json:"-"` @@ -122,6 +126,8 @@ func New(cfg DepsCfg) (*Deps, error) { return nil, err } + sp := source.NewSourceSpec(cfg.Language, fs) + d := &Deps{ Fs: fs, Log: logger, @@ -130,6 +136,7 @@ func New(cfg DepsCfg) (*Deps, error) { WithTemplate: cfg.WithTemplate, PathSpec: ps, ContentSpec: contentSpec, + SourceSpec: sp, Cfg: cfg.Language, Language: cfg.Language, } @@ -194,4 +201,7 @@ type DepsCfg struct { // i18n handling. TranslationProvider ResourceProvider + + // Whether we are in watch (server) mode + Watching bool } diff --git a/helpers/content.go b/helpers/content.go index ca93d7d9e4c..f2cfc9b0ffa 100644 --- a/helpers/content.go +++ b/helpers/content.go @@ -47,6 +47,10 @@ type ContentSpec struct { // SummaryLength is the length of the summary that Hugo extracts from a content. summaryLength int + BuildFuture bool + BuildExpired bool + BuildDrafts bool + Highlight func(code, lang, optsStr string) (string, error) defatultPygmentsOpts map[string]string @@ -62,6 +66,9 @@ func NewContentSpec(cfg config.Provider) (*ContentSpec, error) { footnoteAnchorPrefix: cfg.GetString("footnoteAnchorPrefix"), footnoteReturnLinkContents: cfg.GetString("footnoteReturnLinkContents"), summaryLength: cfg.GetInt("summaryLength"), + BuildFuture: cfg.GetBool("buildFuture"), + BuildExpired: cfg.GetBool("buildExpired"), + BuildDrafts: cfg.GetBool("buildDrafts"), cfg: cfg, } diff --git a/helpers/content_test.go b/helpers/content_test.go index c10ad881ba7..2f8884c55f2 100644 --- a/helpers/content_test.go +++ b/helpers/content_test.go @@ -19,9 +19,12 @@ import ( "strings" "testing" + "github.com/spf13/viper" + "github.com/miekg/mmark" "github.com/russross/blackfriday" "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" ) const tstHTMLContent = "
This is some text.
And some more.
some content
\n"}, - {filepath.FromSlash("public/sect/doc2.html"), "more content"}, - {filepath.FromSlash("public/sect/doc3.html"), "\n\nsome content
\n"}, - {filepath.FromSlash("public/sect/doc3/img1.png"), string([]byte("‰PNG ��� IHDR����������:~›U��� IDATWcø��ZMoñ����IEND®B`‚"))}, - {filepath.FromSlash("public/sect/img2.gif"), string([]byte("GIF89a��€��ÿÿÿ���,�������D�;"))}, - {filepath.FromSlash("public/sect/img2.spf"), string([]byte("****FAKE-FILETYPE****"))}, - {filepath.FromSlash("public/doc7.html"), "doc7 content"}, - {filepath.FromSlash("public/sect/doc8.html"), "\n\nsome content
\n"}, - } - - for _, test := range tests { - file, err := fs.Destination.Open(test.doc) - if err != nil { - t.Fatalf("Did not find %s in target.", test.doc) - } - - content := helpers.ReaderToString(file) - - if content != test.expected { - t.Errorf("%s content expected:\n%q\ngot:\n%q", test.doc, test.expected, content) - } - } - -} diff --git a/hugolib/hugo_sites.go b/hugolib/hugo_sites.go index bf488b9be75..088f35b7a0b 100644 --- a/hugolib/hugo_sites.go +++ b/hugolib/hugo_sites.go @@ -15,10 +15,13 @@ package hugolib import ( "errors" + "io" + "path/filepath" + "sort" "strings" "sync" - "path/filepath" + "github.com/gohugoio/hugo/resource" "github.com/gohugoio/hugo/deps" "github.com/gohugoio/hugo/helpers" @@ -32,20 +35,40 @@ import ( type HugoSites struct { Sites []*Site - runMode runmode - multilingual *Multilingual // Multihost is set if multilingual and baseURL set on the language level. multihost bool + // If this is built in "watch mode". + watching bool + *deps.Deps + + // Keeps track of bundle directories and symlinks to enable partial rebuilding. + ContentChanges *contentChangeMap } func (h *HugoSites) IsMultihost() bool { return h != nil && h.multihost } +func (h *HugoSites) PrintProcessingStats(w io.Writer) { + stats := make([]*helpers.ProcessingStats, len(h.Sites)) + for i := 0; i < len(h.Sites); i++ { + stats[i] = h.Sites[i].PathSpec.ProcessingStats + } + helpers.ProcessingStatsTable(w, stats...) +} + +func (h *HugoSites) langSite() map[string]*Site { + m := make(map[string]*Site) + for _, s := range h.Sites { + m[s.Language.Lang] = s + } + return m +} + // GetContentPage finds a Page with content given the absolute filename. // Returns nil if none found. func (h *HugoSites) GetContentPage(filename string) *Page { @@ -58,12 +81,17 @@ func (h *HugoSites) GetContentPage(filename string) *Page { rel := strings.TrimPrefix(filename, contendDir) rel = strings.TrimPrefix(rel, helpers.FilePathSeparator) - pos := s.rawAllPages.findPagePosByFilePath(rel) + for _, s := range h.Sites { - if pos == -1 { - return nil + pos := s.rawAllPages.findPagePosByFilePath(rel) + + if pos == -1 { + continue + } + return s.rawAllPages[pos] } - return s.rawAllPages[pos] + + return nil } @@ -81,10 +109,19 @@ func newHugoSites(cfg deps.DepsCfg, sites ...*Site) (*HugoSites, error) { return nil, err } + var contentChangeTracker *contentChangeMap + + // Only needed in server mode. + if cfg.Watching { + contentChangeTracker = &contentChangeMap{symContentDirs: make(map[string]map[string]bool)} + } + h := &HugoSites{ - multilingual: langConfig, - multihost: cfg.Cfg.GetBool("multihost"), - Sites: sites} + watching: cfg.Watching, + multilingual: langConfig, + multihost: cfg.Cfg.GetBool("multihost"), + ContentChanges: contentChangeTracker, + Sites: sites} for _, s := range sites { s.owner = h @@ -143,6 +180,10 @@ func applyDepsIfNeeded(cfg deps.DepsCfg, sites ...*Site) error { d.OutputFormatsConfig = s.outputFormatsConfig s.Deps = d } + s.resourceSpec, err = resource.NewSpec(s.Deps.PathSpec, s.mediaTypesConfig) + if err != nil { + return err + } } @@ -258,10 +299,6 @@ func (h *HugoSites) toSiteInfos() []*SiteInfo { // BuildCfg holds build options used to, as an example, skip the render step. type BuildCfg struct { - // Whether we are in watch (server) mode - Watching bool - // Print build stats at the end of a build - PrintStats bool // Reset site state before build. Use to force full rebuilds. ResetState bool // Re-creates the sites from configuration before a build. @@ -304,11 +341,12 @@ func (h *HugoSites) renderCrossSitesArtifacts() error { smLayouts := []string{"sitemapindex.xml", "_default/sitemapindex.xml", "_internal/_default/sitemapindex.xml"} - return s.renderAndWriteXML("sitemapindex", + return s.renderAndWriteXML(&s.PathSpec.ProcessingStats.Sitemaps, "sitemapindex", sitemapDefault.Filename, h.toSiteInfos(), s.appendThemeTemplates(smLayouts)...) } func (h *HugoSites) assignMissingTranslations() error { + // This looks heavy, but it should be a small number of nodes by now. allPages := h.findAllPagesByKindNotIn(KindPage) for _, nodeType := range []string{KindHome, KindSection, KindTaxonomy, KindTaxonomyTerm} { @@ -427,73 +465,57 @@ func (h *HugoSites) createMissingPages() error { return nil } -func (s *Site) assignSiteByLanguage(p *Page) { - - pageLang := p.Lang() - - if pageLang == "" { - panic("Page language missing: " + p.Title) +func (h *HugoSites) removePageByPathPrefix(path string) { + for _, s := range h.Sites { + s.removePageByPathPrefix(path) } +} - for _, site := range s.owner.Sites { - if strings.HasPrefix(site.Language.Lang, pageLang) { - p.s = site - p.Site = &site.Info - return - } +func (h *HugoSites) removePageByPath(path string) { + for _, s := range h.Sites { + s.removePageByPath(path) } - } func (h *HugoSites) setupTranslations() { - - master := h.Sites[0] - - for _, p := range master.rawAllPages { - if p.Lang() == "" { - panic("Page language missing: " + p.Title) - } - - if p.Kind == kindUnknown { - p.Kind = p.s.kindFromSections(p.sections) - } - - if !p.s.isEnabled(p.Kind) { - continue - } - - shouldBuild := p.shouldBuild() - - for i, site := range h.Sites { - // The site is assigned by language when read. - if site == p.s { - site.updateBuildStats(p) - if shouldBuild { - site.Pages = append(site.Pages, p) - } + for _, s := range h.Sites { + for _, p := range s.rawAllPages { + if p.Kind == kindUnknown { + p.Kind = p.s.kindFromSections(p.sections) } - if !shouldBuild { + if !p.s.isEnabled(p.Kind) { continue } - if i == 0 { - site.AllPages = append(site.AllPages, p) + shouldBuild := p.shouldBuild() + s.updateBuildStats(p) + if shouldBuild { + s.Pages = append(s.Pages, p) } } + } + + allPages := make(Pages, 0) + for _, s := range h.Sites { + allPages = append(allPages, s.Pages...) + } + + allPages.Sort() + + for _, s := range h.Sites { + s.AllPages = allPages } // Pull over the collections from the master site for i := 1; i < len(h.Sites); i++ { - h.Sites[i].AllPages = h.Sites[0].AllPages h.Sites[i].Data = h.Sites[0].Data } if len(h.Sites) > 1 { - pages := h.Sites[0].AllPages - allTranslations := pagesToTranslationsMap(pages) - assignTranslationsToPages(allTranslations, pages) + allTranslations := pagesToTranslationsMap(allPages) + assignTranslationsToPages(allTranslations, allPages) } } @@ -501,6 +523,7 @@ func (s *Site) preparePagesForRender(cfg *BuildCfg) { pageChan := make(chan *Page) wg := &sync.WaitGroup{} + numWorkers := getGoMaxProcs() * 4 for i := 0; i < numWorkers; i++ { @@ -508,77 +531,10 @@ func (s *Site) preparePagesForRender(cfg *BuildCfg) { go func(pages <-chan *Page, wg *sync.WaitGroup) { defer wg.Done() for p := range pages { - if !p.shouldRenderTo(s.rc.Format) { - // No need to prepare - continue - } - var shortcodeUpdate bool - if p.shortcodeState != nil { - shortcodeUpdate = p.shortcodeState.updateDelta() - } - - if !shortcodeUpdate && !cfg.whatChanged.other && p.rendered { - // No need to process it again. - continue - } - - // If we got this far it means that this is either a new Page pointer - // or a template or similar has changed so wee need to do a rerendering - // of the shortcodes etc. - - // Mark it as rendered - p.rendered = true - - // If in watch mode or if we have multiple output formats, - // we need to keep the original so we can - // potentially repeat this process on rebuild. - needsACopy := cfg.Watching || len(p.outputFormats) > 1 - var workContentCopy []byte - if needsACopy { - workContentCopy = make([]byte, len(p.workContent)) - copy(workContentCopy, p.workContent) - } else { - // Just reuse the same slice. - workContentCopy = p.workContent - } - - if p.Markup == "markdown" { - tmpContent, tmpTableOfContents := helpers.ExtractTOC(workContentCopy) - p.TableOfContents = helpers.BytesToHTML(tmpTableOfContents) - workContentCopy = tmpContent - } - - var err error - if workContentCopy, err = handleShortcodes(p, workContentCopy); err != nil { - s.Log.ERROR.Printf("Failed to handle shortcodes for page %s: %s", p.BaseFileName(), err) - } - - if p.Markup != "html" { - - // Now we know enough to create a summary of the page and count some words - summaryContent, err := p.setUserDefinedSummaryIfProvided(workContentCopy) - - if err != nil { - s.Log.ERROR.Printf("Failed to set user defined summary for page %q: %s", p.Path(), err) - } else if summaryContent != nil { - workContentCopy = summaryContent.content - } + if err := p.prepareForRender(cfg); err != nil { + s.Log.ERROR.Printf("Failed to prepare page %q for render: %s", p.BaseFileName(), err) - p.Content = helpers.BytesToHTML(workContentCopy) - - if summaryContent == nil { - if err := p.setAutoSummary(); err != nil { - s.Log.ERROR.Printf("Failed to set user auto summary for page %q: %s", p.pathOrTitle(), err) - } - } - - } else { - p.Content = helpers.BytesToHTML(workContentCopy) } - - //analyze for raw stats - p.analyzePage() - } }(pageChan, wg) } @@ -646,3 +602,112 @@ func (h *HugoSites) findAllPagesByKind(kind string) Pages { func (h *HugoSites) findAllPagesByKindNotIn(kind string) Pages { return h.findPagesByKindNotIn(kind, h.Sites[0].AllPages) } + +func (h *HugoSites) findPagesByShortcode(shortcode string) Pages { + var pages Pages + for _, s := range h.Sites { + pages = append(pages, s.findPagesByShortcode(shortcode)...) + } + return pages +} + +// Used in partial reloading to determine if the change is in a bundle. +type contentChangeMap struct { + mu sync.RWMutex + branches []string + leafs []string + + // Hugo supports symlinked content (both directories and files). This + // can lead to situations where the same file can be referenced from several + // locations in /content -- which is really cool, but also means we have to + // go an extra mile to handle changes. + // This map is only used in watch mode. + // It maps the real dir to a set of content directories where it is in use. + symContentDirs map[string]map[string]bool + symContentDirsMu sync.Mutex +} + +func (m *contentChangeMap) add(filename string, tp bundleDirType) { + m.mu.Lock() + dir := filepath.Dir(filename) + switch tp { + case bundleBranch: + m.branches = append(m.branches, dir) + case bundleLeaf: + m.leafs = append(m.leafs, dir) + default: + panic("invalid bundle type") + } + m.mu.Unlock() +} + +// Track the addition of bundle dirs. +func (m *contentChangeMap) handleBundles(b *bundleDirs) { + for _, bd := range b.bundles { + m.add(bd.fi.Filename(), bd.tp) + } +} + +// resolveAndRemove resolves the given filename to the root folder of a bundle, if relevant. +// It also removes the entry from the map. It will be re-added again by the partial +// build if it still is a bundle. +func (m *contentChangeMap) resolveAndRemove(filename string) (string, bundleDirType) { + m.mu.RLock() + defer m.mu.RUnlock() + + dir, name := filepath.Split(filename) + fileTp, isContent := classifyBundledFile(name) + + // If the file itself is a bundle, no need to look further: + if fileTp > bundleNot { + return dir, fileTp + } + + // This may be a member of a bundle. Start with branch bundles, the most specific. + if !isContent { + for i, b := range m.branches { + if b == dir { + m.branches = append(m.branches[:i], m.branches[i+1:]...) + return dir, bundleBranch + } + } + } + + // And finally the leaf bundles, which can contain anything. + for i, l := range m.leafs { + if strings.HasPrefix(dir, l) { + m.leafs = append(m.leafs[:i], m.leafs[i+1:]...) + return dir, bundleLeaf + } + } + + // Not part of any bundle + return filename, bundleNot +} + +func (m *contentChangeMap) addSymbolicLinkDirMapping(from, to string) { + m.symContentDirsMu.Lock() + mm, found := m.symContentDirs[from] + if !found { + mm = make(map[string]bool) + m.symContentDirs[from] = mm + } + mm[to] = true + m.symContentDirsMu.Unlock() +} + +func (m *contentChangeMap) GetSymbolicLinkMappings(dir string) []string { + mm, found := m.symContentDirs[dir] + if !found { + return nil + } + dirs := make([]string, len(mm)) + i := 0 + for dir, _ := range mm { + dirs[i] = dir + i++ + } + + sort.Strings(dirs) + return dirs +} diff --git a/hugolib/hugo_sites_build.go b/hugolib/hugo_sites_build.go index c0749e388c9..4f6d55ae171 100644 --- a/hugolib/hugo_sites_build.go +++ b/hugolib/hugo_sites_build.go @@ -15,7 +15,6 @@ package hugolib import ( "bytes" - "time" "errors" @@ -30,7 +29,7 @@ func (h *HugoSites) Build(config BuildCfg, events ...fsnotify.Event) error { h.Metrics.Reset() } - t0 := time.Now() + //t0 := time.Now() // Need a pointer as this may be modified. conf := &config @@ -63,10 +62,6 @@ func (h *HugoSites) Build(config BuildCfg, events ...fsnotify.Event) error { return err } - if config.PrintStats { - h.Log.FEEDBACK.Printf("total in %v ms\n", int(1000*time.Since(t0).Seconds())) - } - if h.Metrics != nil { var b bytes.Buffer h.Metrics.WriteMetrics(&b) @@ -101,8 +96,6 @@ func (h *HugoSites) init(config *BuildCfg) error { } } - h.runMode.Watching = config.Watching - return nil } @@ -115,12 +108,10 @@ func (h *HugoSites) initRebuild(config *BuildCfg) error { return errors.New("Rebuild does not support 'ResetState'.") } - if !config.Watching { + if !h.watching { return errors.New("Rebuild called when not in watch mode") } - h.runMode.Watching = config.Watching - if config.whatChanged.source { // This is for the non-renderable content pages (rarely used, I guess). // We could maybe detect if this is really needed, but it should be @@ -147,7 +138,7 @@ func (h *HugoSites) process(config *BuildCfg, events ...fsnotify.Event) error { if len(events) > 0 { // This is a rebuild - changed, err := firstSite.reProcess(events) + changed, err := firstSite.processPartial(events) config.whatChanged = &changed return err } @@ -188,25 +179,19 @@ func (h *HugoSites) assemble(config *BuildCfg) error { } for _, s := range h.Sites { - s.siteStats = &siteStats{} for _, p := range s.Pages { // May have been set in front matter if len(p.outputFormats) == 0 { p.outputFormats = s.outputFormats[p.Kind] } - - cnt := len(p.outputFormats) - if p.Kind == KindPage { - s.siteStats.pageCountRegular += cnt + for _, r := range p.Resources.ByType(pageResourceType) { + r.(*Page).outputFormats = p.outputFormats } - s.siteStats.pageCount += cnt - if err := p.initTargetPathDescriptor(); err != nil { - return err - } - if err := p.initURLs(); err != nil { + if err := p.initPaths(); err != nil { return err } + } s.assembleMenus() s.refreshPageCaches() @@ -222,7 +207,6 @@ func (h *HugoSites) assemble(config *BuildCfg) error { } func (h *HugoSites) render(config *BuildCfg) error { - for _, s := range h.Sites { s.initRenderFormats() for i, rf := range s.renderFormats { @@ -235,10 +219,6 @@ func (h *HugoSites) render(config *BuildCfg) error { } } } - - if !config.SkipRender && config.PrintStats { - s.Stats() - } } if !config.SkipRender { diff --git a/hugolib/hugo_sites_build_test.go b/hugolib/hugo_sites_build_test.go index 6b3522d6fb3..e7ea9c018ed 100644 --- a/hugolib/hugo_sites_build_test.go +++ b/hugolib/hugo_sites_build_test.go @@ -16,7 +16,6 @@ import ( "github.com/gohugoio/hugo/deps" "github.com/gohugoio/hugo/helpers" "github.com/gohugoio/hugo/hugofs" - "github.com/gohugoio/hugo/source" "github.com/spf13/afero" "github.com/spf13/viper" "github.com/stretchr/testify/require" @@ -26,6 +25,7 @@ type testSiteConfig struct { DefaultContentLanguage string DefaultContentLanguageInSubdir bool Fs afero.Fs + Watching bool } func TestMultiSitesMainLangInRoot(t *testing.T) { @@ -238,7 +238,6 @@ func doTestMultiSitesBuild(t *testing.T, configTemplate, configSuffix string) { if len(enSite.RegularPages) != 4 { t.Fatal("Expected 4 english pages") } - require.Len(t, enSite.Source.Files(), 14, "should have 13 source files") require.Len(t, enSite.AllPages, 28, "should have 28 total pages (including translations and index types)") doc1en := enSite.RegularPages[0] @@ -401,12 +400,11 @@ func TestMultiSitesRebuild(t *testing.T) { if !isCI() { defer leaktest.CheckTimeout(t, 30*time.Second)() } - siteConfig := testSiteConfig{Fs: afero.NewMemMapFs(), DefaultContentLanguage: "fr", DefaultContentLanguageInSubdir: true} + siteConfig := testSiteConfig{Watching: true, Fs: afero.NewMemMapFs(), DefaultContentLanguage: "fr", DefaultContentLanguageInSubdir: true} sites := createMultiTestSites(t, siteConfig, multiSiteTOMLConfigTemplate) fs := sites.Fs - cfg := BuildCfg{Watching: true} th := testHelper{sites.Cfg, fs, t} - + cfg := BuildCfg{} err := sites.Build(cfg) if err != nil { @@ -447,7 +445,7 @@ func TestMultiSitesRebuild(t *testing.T) { // * Change language file { nil, - []fsnotify.Event{{Name: "content/sect/doc2.en.md", Op: fsnotify.Remove}}, + []fsnotify.Event{{Name: filepath.FromSlash("content/sect/doc2.en.md"), Op: fsnotify.Remove}}, func(t *testing.T) { require.Len(t, enSite.RegularPages, 3, "1 en removed") @@ -467,9 +465,9 @@ func TestMultiSitesRebuild(t *testing.T) { writeNewContentFile(t, fs, "new_fr_1", "2016-07-30", "content/new1.fr.md", 10) }, []fsnotify.Event{ - {Name: "content/new1.en.md", Op: fsnotify.Create}, - {Name: "content/new2.en.md", Op: fsnotify.Create}, - {Name: "content/new1.fr.md", Op: fsnotify.Create}, + {Name: filepath.FromSlash("content/new1.en.md"), Op: fsnotify.Create}, + {Name: filepath.FromSlash("content/new2.en.md"), Op: fsnotify.Create}, + {Name: filepath.FromSlash("content/new1.fr.md"), Op: fsnotify.Create}, }, func(t *testing.T) { require.Len(t, enSite.RegularPages, 5) @@ -490,7 +488,7 @@ func TestMultiSitesRebuild(t *testing.T) { doc1 += "CHANGED" writeSource(t, fs, p, doc1) }, - []fsnotify.Event{{Name: "content/sect/doc1.en.md", Op: fsnotify.Write}}, + []fsnotify.Event{{Name: filepath.FromSlash("content/sect/doc1.en.md"), Op: fsnotify.Write}}, func(t *testing.T) { require.Len(t, enSite.RegularPages, 5) doc1 := readDestination(t, fs, "public/en/sect/doc1-slug/index.html") @@ -506,8 +504,8 @@ func TestMultiSitesRebuild(t *testing.T) { } }, []fsnotify.Event{ - {Name: "content/new1renamed.en.md", Op: fsnotify.Rename}, - {Name: "content/new1.en.md", Op: fsnotify.Rename}, + {Name: filepath.FromSlash("content/new1renamed.en.md"), Op: fsnotify.Rename}, + {Name: filepath.FromSlash("content/new1.en.md"), Op: fsnotify.Rename}, }, func(t *testing.T) { require.Len(t, enSite.RegularPages, 5, "Rename") @@ -523,7 +521,7 @@ func TestMultiSitesRebuild(t *testing.T) { templateContent += "{{ print \"Template Changed\"}}" writeSource(t, fs, template, templateContent) }, - []fsnotify.Event{{Name: "layouts/_default/single.html", Op: fsnotify.Write}}, + []fsnotify.Event{{Name: filepath.FromSlash("layouts/_default/single.html"), Op: fsnotify.Write}}, func(t *testing.T) { require.Len(t, enSite.RegularPages, 5) require.Len(t, enSite.AllPages, 30) @@ -540,7 +538,7 @@ func TestMultiSitesRebuild(t *testing.T) { langContent = strings.Replace(langContent, "Bonjour", "Salut", 1) writeSource(t, fs, languageFile, langContent) }, - []fsnotify.Event{{Name: "i18n/fr.yaml", Op: fsnotify.Write}}, + []fsnotify.Event{{Name: filepath.FromSlash("i18n/fr.yaml"), Op: fsnotify.Write}}, func(t *testing.T) { require.Len(t, enSite.RegularPages, 5) require.Len(t, enSite.AllPages, 30) @@ -563,7 +561,7 @@ func TestMultiSitesRebuild(t *testing.T) { writeSource(t, fs, "layouts/shortcodes/shortcode.html", "Modified Shortcode: {{ i18n \"hello\" }}") }, []fsnotify.Event{ - {Name: "layouts/shortcodes/shortcode.html", Op: fsnotify.Write}, + {Name: filepath.FromSlash("layouts/shortcodes/shortcode.html"), Op: fsnotify.Write}, }, func(t *testing.T) { require.Len(t, enSite.RegularPages, 5) @@ -1097,16 +1095,16 @@ hello: } // Sources - sources := []source.ByteSource{ - {Name: filepath.FromSlash("root.en.md"), Content: []byte(`--- + sources := [][2]string{ + {filepath.FromSlash("root.en.md"), `--- title: root weight: 10000 slug: root publishdate: "2000-01-01" --- # root -`)}, - {Name: filepath.FromSlash("sect/doc1.en.md"), Content: []byte(`--- +`}, + {filepath.FromSlash("sect/doc1.en.md"), `--- title: doc1 weight: 1 slug: doc1-slug @@ -1122,8 +1120,8 @@ publishdate: "2000-01-01" {{< lingo >}} NOTE: slug should be used as URL -`)}, - {Name: filepath.FromSlash("sect/doc1.fr.md"), Content: []byte(`--- +`}, + {filepath.FromSlash("sect/doc1.fr.md"), `--- title: doc1 weight: 1 plaques: @@ -1140,8 +1138,8 @@ publishdate: "2000-01-04" NOTE: should be in the 'en' Page's 'Translations' field. NOTE: date is after "doc3" -`)}, - {Name: filepath.FromSlash("sect/doc2.en.md"), Content: []byte(`--- +`}, + {filepath.FromSlash("sect/doc2.en.md"), `--- title: doc2 weight: 2 publishdate: "2000-01-02" @@ -1149,8 +1147,8 @@ publishdate: "2000-01-02" # doc2 *some content* NOTE: without slug, "doc2" should be used, without ".en" as URL -`)}, - {Name: filepath.FromSlash("sect/doc3.en.md"), Content: []byte(`--- +`}, + {filepath.FromSlash("sect/doc3.en.md"), `--- title: doc3 weight: 3 publishdate: "2000-01-03" @@ -1163,8 +1161,8 @@ url: /superbob # doc3 *some content* NOTE: third 'en' doc, should trigger pagination on home page. -`)}, - {Name: filepath.FromSlash("sect/doc4.md"), Content: []byte(`--- +`}, + {filepath.FromSlash("sect/doc4.md"), `--- title: doc4 weight: 4 plaques: @@ -1175,8 +1173,8 @@ publishdate: "2000-01-05" *du contenu francophone* NOTE: should use the defaultContentLanguage and mark this doc as 'fr'. NOTE: doesn't have any corresponding translation in 'en' -`)}, - {Name: filepath.FromSlash("other/doc5.fr.md"), Content: []byte(`--- +`}, + {filepath.FromSlash("other/doc5.fr.md"), `--- title: doc5 weight: 5 publishdate: "2000-01-06" @@ -1184,45 +1182,45 @@ publishdate: "2000-01-06" # doc5 *autre contenu francophone* NOTE: should use the "permalinks" configuration with :filename -`)}, +`}, // Add some for the stats - {Name: filepath.FromSlash("stats/expired.fr.md"), Content: []byte(`--- + {filepath.FromSlash("stats/expired.fr.md"), `--- title: expired publishdate: "2000-01-06" expiryDate: "2001-01-06" --- # Expired -`)}, - {Name: filepath.FromSlash("stats/future.fr.md"), Content: []byte(`--- +`}, + {filepath.FromSlash("stats/future.fr.md"), `--- title: future weight: 6 publishdate: "2100-01-06" --- # Future -`)}, - {Name: filepath.FromSlash("stats/expired.en.md"), Content: []byte(`--- +`}, + {filepath.FromSlash("stats/expired.en.md"), `--- title: expired weight: 7 publishdate: "2000-01-06" expiryDate: "2001-01-06" --- # Expired -`)}, - {Name: filepath.FromSlash("stats/future.en.md"), Content: []byte(`--- +`}, + {filepath.FromSlash("stats/future.en.md"), `--- title: future weight: 6 publishdate: "2100-01-06" --- # Future -`)}, - {Name: filepath.FromSlash("stats/draft.en.md"), Content: []byte(`--- +`}, + {filepath.FromSlash("stats/draft.en.md"), `--- title: expired publishdate: "2000-01-06" draft: true --- # Draft -`)}, - {Name: filepath.FromSlash("stats/tax.nn.md"), Content: []byte(`--- +`}, + {filepath.FromSlash("stats/tax.nn.md"), `--- title: Tax NN weight: 8 publishdate: "2000-01-06" @@ -1231,8 +1229,8 @@ lag: - Sogndal --- # Tax NN -`)}, - {Name: filepath.FromSlash("stats/tax.nb.md"), Content: []byte(`--- +`}, + {filepath.FromSlash("stats/tax.nb.md"), `--- title: Tax NB weight: 8 publishdate: "2000-01-06" @@ -1241,7 +1239,7 @@ lag: - Sogndal --- # Tax NB -`)}, +`}, } configFile := "multilangconfig." + configSuffix @@ -1252,10 +1250,8 @@ lag: fs := hugofs.NewFrom(mf, cfg) - // Hugo support using ByteSource's directly (for testing), - // but to make it more real, we write them to the mem file system. for _, s := range sources { - if err := afero.WriteFile(mf, filepath.Join("content", s.Name), s.Content, 0755); err != nil { + if err := afero.WriteFile(mf, filepath.Join("content", s[0]), []byte(s[1]), 0755); err != nil { t.Fatalf("Failed to write file: %s", err) } } @@ -1263,7 +1259,7 @@ lag: // Add some data writeSource(t, fs, "data/hugo.toml", "slogan = \"Hugo Rocks!\"") - sites, err := NewHugoSites(deps.DepsCfg{Fs: fs, Cfg: cfg}) //, Logger: newDebugLogger()}) + sites, err := NewHugoSites(deps.DepsCfg{Fs: fs, Cfg: cfg, Watching: siteConfig.Watching}) //, Logger: newDebugLogger()}) if err != nil { t.Fatalf("Failed to create sites: %s", err) @@ -1311,7 +1307,7 @@ func readFileFromFs(t testing.TB, fs afero.Fs, filename string) string { b, err := afero.ReadFile(fs, filename) if err != nil { // Print some debug info - root := strings.Split(filename, helpers.FilePathSeparator)[0] + root := "/" //strings.Split(filename, helpers.FilePathSeparator)[0] afero.Walk(fs, root, func(path string, info os.FileInfo, err error) error { if info != nil && !info.IsDir() { fmt.Println(" ", path) diff --git a/hugolib/hugo_sites_multihost_test.go b/hugolib/hugo_sites_multihost_test.go index af96c766d9d..fd4d514a884 100644 --- a/hugolib/hugo_sites_multihost_test.go +++ b/hugolib/hugo_sites_multihost_test.go @@ -47,13 +47,12 @@ languageName = "Nynorsk" ` - siteConfig := testSiteConfig{Fs: afero.NewMemMapFs(), DefaultContentLanguage: "fr", DefaultContentLanguageInSubdir: false} + siteConfig := testSiteConfig{Watching: true, Fs: afero.NewMemMapFs(), DefaultContentLanguage: "fr", DefaultContentLanguageInSubdir: false} sites := createMultiTestSites(t, siteConfig, multiSiteTOMLConfigTemplate) fs := sites.Fs - cfg := BuildCfg{Watching: true} th := testHelper{sites.Cfg, fs, t} assert := require.New(t) - + cfg := BuildCfg{} err := sites.Build(cfg) assert.NoError(err) diff --git a/hugolib/hugolib.debug b/hugolib/hugolib.debug new file mode 100644 index 00000000000..fa1575f5824 Binary files /dev/null and b/hugolib/hugolib.debug differ diff --git a/hugolib/menu_old_test.go b/hugolib/menu_old_test.go deleted file mode 100644 index 65db315880e..00000000000 --- a/hugolib/menu_old_test.go +++ /dev/null @@ -1,654 +0,0 @@ -// Copyright 2016 The Hugo Authors. All rights reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package hugolib - -// TODO(bep) remove this file when the reworked tests in menu_test.go is done. -// NOTE: Do not add more tests to this file! - -import ( - "fmt" - "strings" - "testing" - - "github.com/gohugoio/hugo/deps" - - "path/filepath" - - "github.com/BurntSushi/toml" - "github.com/gohugoio/hugo/source" - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" -) - -const ( - confMenu1 = ` -[[menu.main]] - name = "Go Home" - url = "/" - weight = 1 - pre = "Tags: 2
\n"}, } - sources := make([]source.ByteSource, len(tests)) + sources := make([][2]string, len(tests)) for i, test := range tests { - sources[i] = source.ByteSource{Name: filepath.FromSlash(test.contentPath), Content: []byte(test.content)} + sources[i] = [2]string{filepath.FromSlash(test.contentPath), test.content} } addTemplates := func(templ tpl.TemplateHandler) error { diff --git a/hugolib/site.go b/hugolib/site.go index 936584580f4..a86ada4a318 100644 --- a/hugolib/site.go +++ b/hugolib/site.go @@ -25,16 +25,18 @@ import ( "sort" "strconv" "strings" - "sync" "time" + "github.com/gohugoio/hugo/resource" + + "golang.org/x/sync/errgroup" + "github.com/gohugoio/hugo/config" "github.com/gohugoio/hugo/media" "github.com/markbates/inflect" - - "sync/atomic" + "golang.org/x/net/context" "github.com/fsnotify/fsnotify" bp "github.com/gohugoio/hugo/bufferpool" @@ -81,7 +83,6 @@ type Site struct { *PageCollections - Files []*source.File Taxonomies TaxonomyList // Plural is what we get in the folder, so keep track of this mapping @@ -93,7 +94,6 @@ type Site struct { // is set. taxonomiesOrigKey map[string]string - Source source.Input Sections Taxonomy Info SiteInfo Menus Menus @@ -104,8 +104,9 @@ type Site struct { draftCount int futureCount int expiredCount int - Data map[string]interface{} - Language *helpers.Language + + Data map[string]interface{} + Language *helpers.Language disabledKinds map[string]bool @@ -131,14 +132,13 @@ type Site struct { renderFormats output.Formats // Logger etc. - *deps.Deps `json:"-"` + *deps.Deps `json:"-"` + resourceSpec *resource.Spec // The func used to title case titles. titleFunc func(s string) string relatedDocsHandler *relatedDocsHandler - - siteStats *siteStats } type siteRenderingContext struct { @@ -161,11 +161,6 @@ func (s *Site) initRenderFormats() { s.renderFormats = formats } -type siteStats struct { - pageCount int - pageCountRegular int -} - func (s *Site) isEnabled(kind string) bool { if kind == kindUnknown { panic("Unknown kind") @@ -183,6 +178,7 @@ func (s *Site) reset() *Site { outputFormats: s.outputFormats, outputFormatsConfig: s.outputFormatsConfig, mediaTypesConfig: s.mediaTypesConfig, + resourceSpec: s.resourceSpec, Language: s.Language, owner: s.owner, PageCollections: newPageCollections()} @@ -342,20 +338,10 @@ func NewSiteForCfg(cfg deps.DepsCfg) (*Site, error) { } type SiteInfo struct { - // atomic requires 64-bit alignment for struct field access - // According to the docs, " The first word in a global variable or in an - // allocated struct or slice can be relied upon to be 64-bit aligned." - // Moving paginationPageCount to the top of this struct didn't do the - // magic, maybe due to the way SiteInfo is embedded. - // Adding the 4 byte padding below does the trick. - _ [4]byte - paginationPageCount uint64 - Taxonomies TaxonomyList Authors AuthorList Social SiteSocial *PageCollections - Files *[]*source.File Menus *Menus Hugo *HugoInfo Title string @@ -385,6 +371,11 @@ type SiteInfo struct { sectionPagesMenu string } +func (s *SiteInfo) Files() []source.File { + helpers.Deprecated(".Site", "Files", "", true) + return nil +} + func (s *SiteInfo) String() string { return fmt.Sprintf("Site(%q)", s.Title) } @@ -530,16 +521,8 @@ func (s *SiteInfo) RelRef(ref string, page *Page, options ...string) (string, er return s.refLink(ref, page, true, outputFormat) } -func (s *SiteInfo) addToPaginationPageCount(cnt uint64) { - atomic.AddUint64(&s.paginationPageCount, cnt) -} - -type runmode struct { - Watching bool -} - func (s *Site) running() bool { - return s.owner.runMode.Watching + return s.owner.watching } func init() { @@ -569,15 +552,19 @@ func (s *Site) RegisterMediaTypes() { // reBuild partially rebuilds a site given the filesystem events. // It returns whetever the content source was changed. -func (s *Site) reProcess(events []fsnotify.Event) (whatChanged, error) { +// TODO(bep) clean up/rewrite this method. +func (s *Site) processPartial(events []fsnotify.Event) (whatChanged, error) { s.Log.DEBUG.Printf("Rebuild for events %q", events) + h := s.owner + s.timerStep("initialize rebuild") // First we need to determine what changed sourceChanged := []fsnotify.Event{} sourceReallyChanged := []fsnotify.Event{} + var contentFilesChanged []string tmplChanged := []fsnotify.Event{} dataChanged := []fsnotify.Event{} i18nChanged := []fsnotify.Event{} @@ -647,90 +634,34 @@ func (s *Site) reProcess(events []fsnotify.Event) (whatChanged, error) { } } - // If a content file changes, we need to reload only it and re-render the entire site. - - // First step is to read the changed files and (re)place them in site.AllPages - // This includes processing any meta-data for that content - - // The second step is to convert the content into HTML - // This includes processing any shortcodes that may be present. - - // We do this in parallel... even though it's likely only one file at a time. - // We need to process the reading prior to the conversion for each file, but - // we can convert one file while another one is still reading. - errs := make(chan error, 2) - readResults := make(chan HandledResult) - filechan := make(chan *source.File) - convertResults := make(chan HandledResult) - pageChan := make(chan *Page) - fileConvChan := make(chan *source.File) - coordinator := make(chan bool) - - wg := &sync.WaitGroup{} - wg.Add(2) - for i := 0; i < 2; i++ { - go sourceReader(s, filechan, readResults, wg) - } - - wg2 := &sync.WaitGroup{} - wg2.Add(4) - for i := 0; i < 2; i++ { - go fileConverter(s, fileConvChan, convertResults, wg2) - go pageConverter(pageChan, convertResults, wg2) - } - - sp := source.NewSourceSpec(s.Cfg, s.Fs) - fs := sp.NewFilesystem("") - for _, ev := range sourceChanged { - // The incrementalReadCollator below will also make changes to the site's pages, - // so we do this first to prevent races. - if ev.Op&fsnotify.Remove == fsnotify.Remove { - //remove the file & a create will follow - path, _ := helpers.GetRelativePath(ev.Name, s.getContentDir(ev.Name)) - s.removePageByPathPrefix(path) - continue - } + isContent := isContentFile(ev.Name) + if isContent { + if ev.Op&fsnotify.Remove == fsnotify.Remove { + //remove the file & a create will follow - // Some editors (Vim) sometimes issue only a Rename operation when writing an existing file - // Sometimes a rename operation means that file has been renamed other times it means - // it's been updated - if ev.Op&fsnotify.Rename == fsnotify.Rename { - // If the file is still on disk, it's only been updated, if it's not, it's been moved - if ex, err := afero.Exists(s.Fs.Source, ev.Name); !ex || err != nil { path, _ := helpers.GetRelativePath(ev.Name, s.getContentDir(ev.Name)) - s.removePageByPath(path) - continue - } - } + h.removePageByPathPrefix(path) - // ignore files shouldn't be proceed - if fi, err := s.Fs.Source.Stat(ev.Name); err != nil { - continue - } else { - if ok, err := fs.ShouldRead(ev.Name, fi); err != nil || !ok { continue } - } - - sourceReallyChanged = append(sourceReallyChanged, ev) - } - - go incrementalReadCollator(s, readResults, pageChan, fileConvChan, coordinator, errs) - go converterCollator(convertResults, errs) - - for _, ev := range sourceReallyChanged { - file, err := s.reReadFile(ev.Name) - - if err != nil { - s.Log.ERROR.Println("Error reading file", ev.Name, ";", err) + // Some editors (Vim) sometimes issue only a Rename operation when writing an existing file + // Sometimes a rename operation means that file has been renamed other times it means + // it's been updated + if ev.Op&fsnotify.Rename == fsnotify.Rename { + // If the file is still on disk, it's only been updated, if it's not, it's been moved + if ex, err := afero.Exists(s.Fs.Source, ev.Name); !ex || err != nil { + path, _ := helpers.GetRelativePath(ev.Name, s.getContentDir(ev.Name)) + h.removePageByPath(path) + continue + } + } } - if file != nil { - filechan <- file + if !s.SourceSpec.IgnoreFile(ev.Name) { + sourceReallyChanged = append(sourceReallyChanged, ev) } - } for shortcode := range shortcodesChanged { @@ -740,39 +671,23 @@ func (s *Site) reProcess(events []fsnotify.Event) (whatChanged, error) { // and then creates the shortcode on the file system. // To handle these scenarios, we must do a full reprocessing of the // pages that keeps a reference to the changed shortcode. - pagesWithShortcode := s.findPagesByShortcode(shortcode) + pagesWithShortcode := h.findPagesByShortcode(shortcode) for _, p := range pagesWithShortcode { - p.rendered = false - pageChan <- p + contentFilesChanged = append(contentFilesChanged, p.File.Filename()) } } - // we close the filechan as we have sent everything we want to send to it. - // this will tell the sourceReaders to stop iterating on that channel - close(filechan) - - // waiting for the sourceReaders to all finish - wg.Wait() - // Now closing readResults as this will tell the incrementalReadCollator to - // stop iterating over that. - close(readResults) - - // once readResults is finished it will close coordinator and move along - <-coordinator - // allow that routine to finish, then close page & fileconvchan as we've sent - // everything to them we need to. - close(pageChan) - close(fileConvChan) - - wg2.Wait() - close(convertResults) - - s.timerStep("read & convert pages from source") + if len(sourceReallyChanged) > 0 || len(contentFilesChanged) > 0 { + var filenamesChanged []string + for _, e := range sourceReallyChanged { + filenamesChanged = append(filenamesChanged, e.Name) + } + if len(contentFilesChanged) > 0 { + filenamesChanged = append(filenamesChanged, contentFilesChanged...) + } - for i := 0; i < 2; i++ { - err := <-errs - if err != nil { - s.Log.ERROR.Println(err) + if err := s.readAndProcessContent(filenamesChanged...); err != nil { + return whatChanged{}, err } } @@ -785,88 +700,111 @@ func (s *Site) reProcess(events []fsnotify.Event) (whatChanged, error) { } -func (s *Site) loadData(sources []source.Input) (err error) { - s.Log.DEBUG.Printf("Load Data from %d source(s)", len(sources)) +func (s *Site) loadData(sourceDirs []string) (err error) { + s.Log.DEBUG.Printf("Load Data from %d source(s)", len(sourceDirs)) s.Data = make(map[string]interface{}) - var current map[string]interface{} - for _, currentSource := range sources { - for _, r := range currentSource.Files() { - // Crawl in data tree to insert data - current = s.Data - for _, key := range strings.Split(r.Dir(), helpers.FilePathSeparator) { - if key != "" { - if _, ok := current[key]; !ok { - current[key] = make(map[string]interface{}) - } - current = current[key].(map[string]interface{}) - } + for _, sourceDir := range sourceDirs { + fs := s.SourceSpec.NewFilesystem(sourceDir) + for _, r := range fs.Files() { + if err := s.handleDataFile(r); err != nil { + return err } + } + } - data, err := s.readData(r) - if err != nil { - s.Log.WARN.Printf("Failed to read data from %s: %s", filepath.Join(r.Path(), r.LogicalName()), err) - continue - } + return +} - if data == nil { - continue - } +func (s *Site) handleDataFile(r source.ReadableFile) error { + var current map[string]interface{} - // Copy content from current to data when needed - if _, ok := current[r.BaseFileName()]; ok { - data := data.(map[string]interface{}) - - for key, value := range current[r.BaseFileName()].(map[string]interface{}) { - if _, override := data[key]; override { - // filepath.Walk walks the files in lexical order, '/' comes before '.' - // this warning could happen if - // 1. A theme uses the same key; the main data folder wins - // 2. A sub folder uses the same key: the sub folder wins - s.Log.WARN.Printf("Data for key '%s' in path '%s' is overridden in subfolder", key, r.Path()) - } - data[key] = value - } + f, err := r.Open() + if err != nil { + return fmt.Errorf("Failed to open data file %q: %s", r.LogicalName(), err) + } + defer f.Close() + + // Crawl in data tree to insert data + current = s.Data + for _, key := range strings.Split(r.Dir(), helpers.FilePathSeparator) { + if key != "" { + if _, ok := current[key]; !ok { + current[key] = make(map[string]interface{}) } + current = current[key].(map[string]interface{}) + } + } - // Insert data - current[r.BaseFileName()] = data + data, err := s.readData(r) + if err != nil { + s.Log.WARN.Printf("Failed to read data from %s: %s", filepath.Join(r.Path(), r.LogicalName()), err) + return nil + } + + if data == nil { + return nil + } + + // Copy content from current to data when needed + if _, ok := current[r.BaseFileName()]; ok { + data := data.(map[string]interface{}) + + for key, value := range current[r.BaseFileName()].(map[string]interface{}) { + if _, override := data[key]; override { + // filepath.Walk walks the files in lexical order, '/' comes before '.' + // this warning could happen if + // 1. A theme uses the same key; the main data folder wins + // 2. A sub folder uses the same key: the sub folder wins + s.Log.WARN.Printf("Data for key '%s' in path '%s' is overridden in subfolder", key, r.Path()) + } + data[key] = value } } - return + // Insert data + current[r.BaseFileName()] = data + + return nil } -func (s *Site) readData(f *source.File) (interface{}, error) { +func (s *Site) readData(f source.ReadableFile) (interface{}, error) { + file, err := f.Open() + if err != nil { + return nil, err + } + defer file.Close() + content := helpers.ReaderToBytes(file) + switch f.Extension() { case "yaml", "yml": - return parser.HandleYAMLMetaData(f.Bytes()) + return parser.HandleYAMLMetaData(content) case "json": - return parser.HandleJSONMetaData(f.Bytes()) + return parser.HandleJSONMetaData(content) case "toml": - return parser.HandleTOMLMetaData(f.Bytes()) + return parser.HandleTOMLMetaData(content) default: return nil, fmt.Errorf("Data not supported for extension '%s'", f.Extension()) } } func (s *Site) readDataFromSourceFS() error { - sp := source.NewSourceSpec(s.Cfg, s.Fs) - dataSources := make([]source.Input, 0, 2) - dataSources = append(dataSources, sp.NewFilesystem(s.absDataDir())) + var dataSourceDirs []string // have to be last - duplicate keys in earlier entries will win themeDataDir, err := s.PathSpec.GetThemeDataDirPath() if err == nil { - dataSources = append(dataSources, sp.NewFilesystem(themeDataDir)) + dataSourceDirs = []string{s.absDataDir(), themeDataDir} + } else { + dataSourceDirs = []string{s.absDataDir()} + } - err = s.loadData(dataSources) + err = s.loadData(dataSourceDirs) s.timerStep("load data") return err } func (s *Site) process(config BuildCfg) (err error) { - s.timerStep("Go initialization") if err = s.initialize(); err != nil { return } @@ -877,7 +815,13 @@ func (s *Site) process(config BuildCfg) (err error) { } s.timerStep("load i18n") - return s.createPages() + + if err := s.readAndProcessContent(); err != nil { + return err + } + s.timerStep("read and convert pages from source") + + return err } @@ -967,19 +911,10 @@ func (s *Site) initialize() (err error) { defer s.initializeSiteInfo() s.Menus = Menus{} - // May be supplied in tests. - if s.Source != nil && len(s.Source.Files()) > 0 { - s.Log.DEBUG.Println("initialize: Source is already set") - return - } - if err = s.checkDirectories(); err != nil { return err } - sp := source.NewSourceSpec(s.Cfg, s.Fs) - s.Source = sp.NewFilesystem(s.absContentDir()) - return } @@ -1053,7 +988,6 @@ func (s *Site) initializeSiteInfo() { uglyURLs: s.Cfg.GetBool("uglyURLs"), preserveTaxonomyNames: lang.GetBool("preserveTaxonomyNames"), PageCollections: s.PageCollections, - Files: &s.Files, Menus: &s.Menus, Params: params, Permalinks: permalinks, @@ -1144,7 +1078,7 @@ func (s *Site) getThemeLayoutDir(path string) string { } func (s *Site) absContentDir() string { - return s.PathSpec.AbsPathify(s.Cfg.GetString("contentDir")) + return s.PathSpec.AbsPathify(s.PathSpec.ContentDir()) } func (s *Site) isContentDirEvent(e fsnotify.Event) bool { @@ -1190,241 +1124,86 @@ func (s *Site) checkDirectories() (err error) { return } -// reReadFile resets file to be read from disk again -func (s *Site) reReadFile(absFilePath string) (*source.File, error) { - s.Log.INFO.Println("rereading", absFilePath) - var file *source.File - - reader, err := source.NewLazyFileReader(s.Fs.Source, absFilePath) - if err != nil { - return nil, err - } - - sp := source.NewSourceSpec(s.Cfg, s.Fs) - file, err = sp.NewFileFromAbs(s.getContentDir(absFilePath), absFilePath, reader) - - if err != nil { - return nil, err - } - - return file, nil +type contentCaptureResultHandler struct { + contentProcessors map[string]*siteContentProcessor } -func (s *Site) readPagesFromSource() chan error { - if s.Source == nil { - panic(fmt.Sprintf("s.Source not set %s", s.absContentDir())) - } - - s.Log.DEBUG.Printf("Read %d pages from source", len(s.Source.Files())) - - errs := make(chan error) - if len(s.Source.Files()) < 1 { - close(errs) - return errs - } - - files := s.Source.Files() - results := make(chan HandledResult) - filechan := make(chan *source.File) - wg := &sync.WaitGroup{} - numWorkers := getGoMaxProcs() * 4 - wg.Add(numWorkers) - for i := 0; i < numWorkers; i++ { - go sourceReader(s, filechan, results, wg) - } - - // we can only have exactly one result collator, since it makes changes that - // must be synchronized. - go readCollator(s, results, errs) +func (c *contentCaptureResultHandler) handleSingles(fis ...*fileInfo) { + for _, fi := range fis { + // May be connected to a language (content files) + proc, found := c.contentProcessors[fi.Lang()] + if !found { + panic("proc not found") + } + proc.fileSinglesChan <- fi - for _, file := range files { - filechan <- file } - - close(filechan) - wg.Wait() - close(results) - - return errs } +func (c *contentCaptureResultHandler) handleBundles(d *bundleDirs) { + for _, b := range d.bundles { + lang := b.fi.Lang() -func (s *Site) convertSource() chan error { - errs := make(chan error) - results := make(chan HandledResult) - pageChan := make(chan *Page) - fileConvChan := make(chan *source.File) - numWorkers := getGoMaxProcs() * 4 - wg := &sync.WaitGroup{} - - for i := 0; i < numWorkers; i++ { - wg.Add(2) - go fileConverter(s, fileConvChan, results, wg) - go pageConverter(pageChan, results, wg) - } - - go converterCollator(results, errs) - - for _, p := range s.rawAllPages { - if p.shouldBuild() { - pageChan <- p + proc, found := c.contentProcessors[lang] + if !found { + panic("proc not found") } - } + proc.fileBundlesChan <- b - for _, f := range s.Files { - fileConvChan <- f } - - close(pageChan) - close(fileConvChan) - wg.Wait() - close(results) - - return errs } -func (s *Site) createPages() error { - readErrs := <-s.readPagesFromSource() - s.timerStep("read pages from source") - - renderErrs := <-s.convertSource() - s.timerStep("convert source") - - if renderErrs == nil && readErrs == nil { - return nil - } - if renderErrs == nil { - return readErrs - } - if readErrs == nil { - return renderErrs +func (c *contentCaptureResultHandler) handleCopyFiles(filenames ...string) { + for _, proc := range c.contentProcessors { + proc.fileAssetsChan <- filenames } - - return fmt.Errorf("%s\n%s", readErrs, renderErrs) } -func sourceReader(s *Site, files <-chan *source.File, results chan<- HandledResult, wg *sync.WaitGroup) { - defer wg.Done() - for file := range files { - readSourceFile(s, file, results) - } -} +func (s *Site) readAndProcessContent(filenames ...string) error { -func readSourceFile(s *Site, file *source.File, results chan<- HandledResult) { - h := NewMetaHandler(file.Extension()) - if h != nil { - h.Read(file, s, results) - } else { - s.Log.ERROR.Println("Unsupported File Type", file.Path()) - } -} + ctx := context.Background() + g, ctx := errgroup.WithContext(ctx) -func pageConverter(pages <-chan *Page, results HandleResults, wg *sync.WaitGroup) { - defer wg.Done() - for page := range pages { - var h *MetaHandle - if page.Markup != "" { - h = NewMetaHandler(page.Markup) - } else { - h = NewMetaHandler(page.File.Extension()) - } - if h != nil { - // Note that we convert pages from the site's rawAllPages collection - // Which may contain pages from multiple sites, so we use the Page's site - // for the conversion. - h.Convert(page, page.s, results) - } - } -} + sourceSpec := source.NewSourceSpec(s.owner.Cfg, s.Fs) + baseDir := s.absContentDir() -func fileConverter(s *Site, files <-chan *source.File, results HandleResults, wg *sync.WaitGroup) { - defer wg.Done() - for file := range files { - h := NewMetaHandler(file.Extension()) - if h != nil { - h.Convert(file, s, results) - } - } -} + contentProcessors := make(map[string]*siteContentProcessor) + sites := s.owner.langSite() + for k, v := range sites { + proc := newSiteContentProcessor(baseDir, len(filenames) > 0, v) + contentProcessors[k] = proc -func converterCollator(results <-chan HandledResult, errs chan<- error) { - errMsgs := []string{} - for r := range results { - if r.err != nil { - errMsgs = append(errMsgs, r.err.Error()) - continue - } + g.Go(func() error { + return proc.process(ctx) + }) } - if len(errMsgs) == 0 { - errs <- nil - return - } - errs <- fmt.Errorf("Errors rendering pages: %s", strings.Join(errMsgs, "\n")) -} -func (s *Site) replaceFile(sf *source.File) { - for i, f := range s.Files { - if f.Path() == sf.Path() { - s.Files[i] = sf - return - } - } + var ( + handler captureResultHandler + bundleMap *contentChangeMap + ) - // If a match isn't found, then append it - s.Files = append(s.Files, sf) -} + mainHandler := &contentCaptureResultHandler{contentProcessors: contentProcessors} -func incrementalReadCollator(s *Site, results <-chan HandledResult, pageChan chan *Page, fileConvChan chan *source.File, coordinator chan bool, errs chan<- error) { - errMsgs := []string{} - for r := range results { - if r.err != nil { - errMsgs = append(errMsgs, r.Error()) - continue - } + if s.running() { + // Need to track changes. + bundleMap = s.owner.ContentChanges + handler = &captureResultHandlerChain{handlers: []captureBundlesHandler{mainHandler, bundleMap}} - if r.page == nil { - s.replaceFile(r.file) - fileConvChan <- r.file - } else { - s.replacePage(r.page) - pageChan <- r.page - } + } else { + handler = mainHandler } - s.rawAllPages.Sort() - close(coordinator) - - if len(errMsgs) == 0 { - errs <- nil - return - } - errs <- fmt.Errorf("Errors reading pages: %s", strings.Join(errMsgs, "\n")) -} + c := newCapturer(s.Log, sourceSpec, handler, bundleMap, baseDir, filenames...) -func readCollator(s *Site, results <-chan HandledResult, errs chan<- error) { - if s.PageCollections == nil { - panic("No page collections") + if err := c.capture(); err != nil { + return err } - errMsgs := []string{} - for r := range results { - if r.err != nil { - errMsgs = append(errMsgs, r.Error()) - continue - } - // !page == file - if r.page == nil { - s.Files = append(s.Files, r.file) - } else { - s.addPage(r.page) - } + for _, proc := range contentProcessors { + proc.closeInput() } - s.rawAllPages.Sort() - if len(errMsgs) == 0 { - errs <- nil - return - } - errs <- fmt.Errorf("Errors reading pages: %s", strings.Join(errMsgs, "\n")) + return g.Wait() } func (s *Site) buildSiteMeta() (err error) { @@ -1647,7 +1426,6 @@ func (s *Site) resetBuildState() { // TODO(bep) get rid of this double s.Info.PageCollections = s.PageCollections - s.Info.paginationPageCount = 0 s.draftCount = 0 s.futureCount = 0 @@ -1661,6 +1439,10 @@ func (s *Site) resetBuildState() { } func (s *Site) kindFromSections(sections []string) string { + if len(sections) == 0 { + return KindSection + } + if _, isTaxonomy := s.Taxonomies[sections[0]]; isTaxonomy { if len(sections) == 1 { return KindTaxonomyTerm @@ -1738,28 +1520,6 @@ func (s *Site) appendThemeTemplates(in []string) []string { } -// Stats prints Hugo builds stats to the console. -// This is what you see after a successful hugo build. -func (s *Site) Stats() { - s.Log.FEEDBACK.Printf("\nBuilt site for language %s:\n", s.Language.Lang) - s.Log.FEEDBACK.Println(s.draftStats()) - s.Log.FEEDBACK.Println(s.futureStats()) - s.Log.FEEDBACK.Println(s.expiredStats()) - s.Log.FEEDBACK.Printf("%d regular pages created\n", s.siteStats.pageCountRegular) - s.Log.FEEDBACK.Printf("%d other pages created\n", (s.siteStats.pageCount - s.siteStats.pageCountRegular)) - s.Log.FEEDBACK.Printf("%d non-page files copied\n", len(s.Files)) - s.Log.FEEDBACK.Printf("%d paginator pages created\n", s.Info.paginationPageCount) - - if s.isEnabled(KindTaxonomy) { - taxonomies := s.Language.GetStringMapString("taxonomies") - - for _, pl := range taxonomies { - s.Log.FEEDBACK.Printf("%d %s created\n", len(s.Taxonomies[pl]), pl) - } - } - -} - // GetPage looks up a page of a given type in the path given. // {{ with .Site.GetPage "section" "blog" }}{{ .Title }}{{ end }} // @@ -1783,23 +1543,15 @@ func (s *Site) permalinkForOutputFormat(link string, f output.Format) (string, e } else { baseURL = s.PathSpec.BaseURL.String() } - return s.permalinkForBaseURL(link, baseURL), nil + return s.PathSpec.PermalinkForBaseURL(link, baseURL), nil } func (s *Site) permalink(link string) string { - return s.permalinkForBaseURL(link, s.PathSpec.BaseURL.String()) - -} + return s.PathSpec.PermalinkForBaseURL(link, s.PathSpec.BaseURL.String()) -func (s *Site) permalinkForBaseURL(link, baseURL string) string { - link = strings.TrimPrefix(link, "/") - if !strings.HasSuffix(baseURL, "/") { - baseURL += "/" - } - return baseURL + link } -func (s *Site) renderAndWriteXML(name string, dest string, d interface{}, layouts ...string) error { +func (s *Site) renderAndWriteXML(statCounter *uint64, name string, dest string, d interface{}, layouts ...string) error { s.Log.DEBUG.Printf("Render XML for %q to %q", name, dest) renderBuffer := bp.GetBuffer() defer bp.PutBuffer(renderBuffer) @@ -1829,11 +1581,11 @@ func (s *Site) renderAndWriteXML(name string, dest string, d interface{}, layout return nil } - return s.publish(dest, outBuffer) + return s.publish(statCounter, dest, outBuffer) } -func (s *Site) renderAndWritePage(name string, dest string, p *PageOutput, layouts ...string) error { +func (s *Site) renderAndWritePage(statCounter *uint64, name string, dest string, p *PageOutput, layouts ...string) error { renderBuffer := bp.GetBuffer() defer bp.PutBuffer(renderBuffer) @@ -1888,7 +1640,7 @@ func (s *Site) renderAndWritePage(name string, dest string, p *PageOutput, layou return nil } - return s.publish(dest, outBuffer) + return s.publish(statCounter, dest, outBuffer) } func (s *Site) renderForLayouts(name string, d interface{}, w io.Writer, layouts ...string) (err error) { @@ -1936,8 +1688,11 @@ func (s *Site) findFirstTemplate(layouts ...string) tpl.Template { return nil } -func (s *Site) publish(path string, r io.Reader) (err error) { +func (s *Site) publish(statCounter *uint64, path string, r io.Reader) (err error) { + s.PathSpec.ProcessingStats.Incr(statCounter) + path = filepath.Join(s.absPublishDir(), path) + return helpers.WriteToDisk(path, r, s.Fs.Destination) } @@ -2012,6 +1767,7 @@ func (s *Site) newNodePage(typ string, sections ...string) *Page { language: s.Language, pageInit: &pageInit{}, Kind: typ, + Source: Source{File: &source.FileInfo{}}, Data: make(map[string]interface{}), Site: &s.Info, sections: sections, diff --git a/hugolib/siteJSONEncode_test.go b/hugolib/siteJSONEncode_test.go index 9c83899fde2..5bb6e52e822 100644 --- a/hugolib/siteJSONEncode_test.go +++ b/hugolib/siteJSONEncode_test.go @@ -29,11 +29,13 @@ func TestEncodePage(t *testing.T) { t.Parallel() cfg, fs := newTestCfg() - // borrowed from menu_test.go - for _, src := range menuPageSources { - writeSource(t, fs, filepath.Join("content", src.Name), string(src.Content)) + writeSource(t, fs, filepath.Join("content", "page.md"), `--- +title: Simple +--- +Summary text - } + +`) s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{}) diff --git a/hugolib/site_render.go b/hugolib/site_render.go index c14592c895f..497433ff661 100644 --- a/hugolib/site_render.go +++ b/hugolib/site_render.go @@ -98,6 +98,26 @@ func pageRenderer(s *Site, pages <-chan *Page, results chan<- error, wg *sync.Wa continue } + // We only need to re-publish the resources if the output format is different + // from all of the previous (e.g. the "amp" use case). + shouldRender := i == 0 + if i > 0 { + for j := i; j >= 0; j-- { + if outFormat.Path != page.outputFormats[j].Path { + shouldRender = true + } else { + shouldRender = false + } + } + } + + if shouldRender { + if err := pageOutput.renderResources(); err != nil { + s.Log.ERROR.Printf("Failed to render resources for page %q: %s", page, err) + continue + } + } + var layouts []string if page.selfLayout != "" { @@ -125,7 +145,7 @@ func pageRenderer(s *Site, pages <-chan *Page, results chan<- error, wg *sync.Wa s.Log.DEBUG.Printf("Render %s to %q with layouts %q", pageOutput.Kind, targetPath, layouts) - if err := s.renderAndWritePage("page "+pageOutput.FullFilePath(), targetPath, pageOutput, layouts...); err != nil { + if err := s.renderAndWritePage(&s.PathSpec.ProcessingStats.Pages, "page "+pageOutput.FullFilePath(), targetPath, pageOutput, layouts...); err != nil { results <- err } @@ -191,6 +211,7 @@ func (s *Site) renderPaginator(p *PageOutput) error { } if err := s.renderAndWritePage( + &s.PathSpec.ProcessingStats.PaginatorPages, pagerNode.Title, targetPath, pagerNode, layouts...); err != nil { return err @@ -232,7 +253,7 @@ func (s *Site) renderRSS(p *PageOutput) error { return err } - return s.renderAndWriteXML(p.Title, + return s.renderAndWriteXML(&s.PathSpec.ProcessingStats.Pages, p.Title, targetPath, p, layouts...) } @@ -271,7 +292,7 @@ func (s *Site) render404() error { s.Log.ERROR.Printf("Failed to create target path for page %q: %s", p, err) } - return s.renderAndWritePage("404 page", targetPath, pageOutput, s.appendThemeTemplates(nfLayouts)...) + return s.renderAndWritePage(&s.PathSpec.ProcessingStats.Pages, "404 page", targetPath, pageOutput, s.appendThemeTemplates(nfLayouts)...) } func (s *Site) renderSitemap() error { @@ -325,7 +346,7 @@ func (s *Site) renderSitemap() error { smLayouts := []string{"sitemap.xml", "_default/sitemap.xml", "_internal/_default/sitemap.xml"} addLanguagePrefix := n.Site.IsMultiLingual() - return s.renderAndWriteXML("sitemap", + return s.renderAndWriteXML(&s.PathSpec.ProcessingStats.Sitemaps, "sitemap", n.addLangPathPrefixIfFlagSet(page.Sitemap.Filename, addLanguagePrefix), n, s.appendThemeTemplates(smLayouts)...) } @@ -357,7 +378,7 @@ func (s *Site) renderRobotsTXT() error { return nil } - return s.publish("robots.txt", outBuffer) + return s.publish(&s.PathSpec.ProcessingStats.Pages, "robots.txt", outBuffer) } // renderAliases renders shell pages that simply have a redirect in the header. diff --git a/hugolib/site_sections_test.go b/hugolib/site_sections_test.go index 37752467b40..8b5b37fcce4 100644 --- a/hugolib/site_sections_test.go +++ b/hugolib/site_sections_test.go @@ -114,6 +114,7 @@ PAG|{{ .Title }}|{{ $sect.InSection . }} cfg.Set("paginate", 2) s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{}) + require.Len(t, s.RegularPages, 21) tests := []struct { @@ -264,6 +265,8 @@ PAG|{{ .Title }}|{{ $sect.InSection . }} }}, } + home := s.getPage(KindHome) + for _, test := range tests { sections := strings.Split(test.sections, ",") p := s.getPage(KindSection, sections...) @@ -276,8 +279,6 @@ PAG|{{ .Title }}|{{ $sect.InSection . }} test.verify(p) } - home := s.getPage(KindHome) - assert.NotNil(home) assert.Len(home.Sections(), 9) diff --git a/hugolib/site_stats_test.go b/hugolib/site_stats_test.go new file mode 100644 index 00000000000..522b5636bc4 --- /dev/null +++ b/hugolib/site_stats_test.go @@ -0,0 +1,101 @@ +// Copyright 2017 The Hugo Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package hugolib + +import ( + "bytes" + "fmt" + "io/ioutil" + "testing" + + "github.com/gohugoio/hugo/helpers" + "github.com/spf13/afero" + + "github.com/stretchr/testify/require" +) + +func TestSiteStats(t *testing.T) { + t.Parallel() + + assert := require.New(t) + + siteConfig := ` +baseURL = "http://example.com/blog" + +paginate = 1 +defaultContentLanguage = "nn" + +[languages] +[languages.nn] +languageName = "Nynorsk" +weight = 1 +title = "Hugo på norsk" + +[languages.en] +languageName = "English" +weight = 2 +title = "Hugo in English" + +` + + pageTemplate := `--- +title: "T%d" +tags: +%s +categories: +%s +aliases: [Ali%d] +--- +# Doc +` + + th, h := newTestSitesFromConfig(t, afero.NewMemMapFs(), siteConfig, + "layouts/_default/single.html", "Single|{{ .Title }}|{{ .Content }}", + "layouts/_default/list.html", `List|{{ .Title }}|Pages: {{ .Paginator.TotalPages }}|{{ .Content }}`, + "layouts/_default/terms.html", "Terms List|{{ .Title }}|{{ .Content }}", + ) + require.Len(t, h.Sites, 2) + + fs := th.Fs + + for i := 0; i < 2; i++ { + for j := 0; j < 2; j++ { + pageID := i + j + 1 + writeSource(t, fs, fmt.Sprintf("content/sect/p%d.md", pageID), + fmt.Sprintf(pageTemplate, pageID, fmt.Sprintf("- tag%d", j), fmt.Sprintf("- category%d", j), pageID)) + } + } + + for i := 0; i < 5; i++ { + writeSource(t, fs, fmt.Sprintf("content/assets/image%d.png", i+1), "image") + } + + err := h.Build(BuildCfg{}) + + assert.NoError(err) + + stats := []*helpers.ProcessingStats{ + h.Sites[0].PathSpec.ProcessingStats, + h.Sites[1].PathSpec.ProcessingStats} + + stats[0].Table(ioutil.Discard) + stats[1].Table(ioutil.Discard) + + var buff bytes.Buffer + + helpers.ProcessingStatsTable(&buff, stats...) + + assert.Contains(buff.String(), "Pages | 19 | 6") + +} diff --git a/hugolib/site_test.go b/hugolib/site_test.go index 7c5af66718a..6c5c28c5416 100644 --- a/hugolib/site_test.go +++ b/hugolib/site_test.go @@ -23,7 +23,6 @@ import ( jww "github.com/spf13/jwalterweatherman" "github.com/gohugoio/hugo/helpers" - "github.com/gohugoio/hugo/source" "github.com/gohugoio/hugo/deps" "github.com/gohugoio/hugo/hugofs" @@ -74,11 +73,11 @@ func TestRenderWithInvalidTemplate(t *testing.T) { func TestDraftAndFutureRender(t *testing.T) { t.Parallel() - sources := []source.ByteSource{ - {Name: filepath.FromSlash("sect/doc1.md"), Content: []byte("---\ntitle: doc1\ndraft: true\npublishdate: \"2414-05-29\"\n---\n# doc1\n*some content*")}, - {Name: filepath.FromSlash("sect/doc2.md"), Content: []byte("---\ntitle: doc2\ndraft: true\npublishdate: \"2012-05-29\"\n---\n# doc2\n*some content*")}, - {Name: filepath.FromSlash("sect/doc3.md"), Content: []byte("---\ntitle: doc3\ndraft: false\npublishdate: \"2414-05-29\"\n---\n# doc3\n*some content*")}, - {Name: filepath.FromSlash("sect/doc4.md"), Content: []byte("---\ntitle: doc4\ndraft: false\npublishdate: \"2012-05-29\"\n---\n# doc4\n*some content*")}, + sources := [][2]string{ + {filepath.FromSlash("sect/doc1.md"), "---\ntitle: doc1\ndraft: true\npublishdate: \"2414-05-29\"\n---\n# doc1\n*some content*"}, + {filepath.FromSlash("sect/doc2.md"), "---\ntitle: doc2\ndraft: true\npublishdate: \"2012-05-29\"\n---\n# doc2\n*some content*"}, + {filepath.FromSlash("sect/doc3.md"), "---\ntitle: doc3\ndraft: false\npublishdate: \"2414-05-29\"\n---\n# doc3\n*some content*"}, + {filepath.FromSlash("sect/doc4.md"), "---\ntitle: doc4\ndraft: false\npublishdate: \"2012-05-29\"\n---\n# doc4\n*some content*"}, } siteSetup := func(t *testing.T, configKeyValues ...interface{}) *Site { @@ -91,7 +90,7 @@ func TestDraftAndFutureRender(t *testing.T) { } for _, src := range sources { - writeSource(t, fs, filepath.Join("content", src.Name), string(src.Content)) + writeSource(t, fs, filepath.Join("content", src[0]), src[1]) } @@ -132,9 +131,9 @@ func TestDraftAndFutureRender(t *testing.T) { func TestFutureExpirationRender(t *testing.T) { t.Parallel() - sources := []source.ByteSource{ - {Name: filepath.FromSlash("sect/doc3.md"), Content: []byte("---\ntitle: doc1\nexpirydate: \"2400-05-29\"\n---\n# doc1\n*some content*")}, - {Name: filepath.FromSlash("sect/doc4.md"), Content: []byte("---\ntitle: doc2\nexpirydate: \"2000-05-29\"\n---\n# doc2\n*some content*")}, + sources := [][2]string{ + {filepath.FromSlash("sect/doc3.md"), "---\ntitle: doc1\nexpirydate: \"2400-05-29\"\n---\n# doc1\n*some content*"}, + {filepath.FromSlash("sect/doc4.md"), "---\ntitle: doc2\nexpirydate: \"2000-05-29\"\n---\n# doc2\n*some content*"}, } siteSetup := func(t *testing.T) *Site { @@ -142,7 +141,7 @@ func TestFutureExpirationRender(t *testing.T) { cfg.Set("baseURL", "http://auth/bub") for _, src := range sources { - writeSource(t, fs, filepath.Join("content", src.Name), string(src.Content)) + writeSource(t, fs, filepath.Join("content", src[0]), src[1]) } @@ -234,29 +233,29 @@ func doTestCrossrefs(t *testing.T, relative, uglyURLs bool) { doc3Slashed := filepath.FromSlash("/sect/doc3.md") - sources := []source.ByteSource{ + sources := [][2]string{ { - Name: filepath.FromSlash("sect/doc1.md"), - Content: []byte(fmt.Sprintf(`Ref 2: {{< %s "sect/doc2.md" >}}`, refShortcode)), + filepath.FromSlash("sect/doc1.md"), + fmt.Sprintf(`Ref 2: {{< %s "sect/doc2.md" >}}`, refShortcode), }, // Issue #1148: Make sure that no P-tags is added around shortcodes. { - Name: filepath.FromSlash("sect/doc2.md"), - Content: []byte(fmt.Sprintf(`**Ref 1:** + filepath.FromSlash("sect/doc2.md"), + fmt.Sprintf(`**Ref 1:** {{< %s "sect/doc1.md" >}} -THE END.`, refShortcode)), +THE END.`, refShortcode), }, // Issue #1753: Should not add a trailing newline after shortcode. { - Name: filepath.FromSlash("sect/doc3.md"), - Content: []byte(fmt.Sprintf(`**Ref 1:**{{< %s "sect/doc3.md" >}}.`, refShortcode)), + filepath.FromSlash("sect/doc3.md"), + fmt.Sprintf(`**Ref 1:**{{< %s "sect/doc3.md" >}}.`, refShortcode), }, // Issue #3703 { - Name: filepath.FromSlash("sect/doc4.md"), - Content: []byte(fmt.Sprintf(`**Ref 1:**{{< %s "%s" >}}.`, refShortcode, doc3Slashed)), + filepath.FromSlash("sect/doc4.md"), + fmt.Sprintf(`**Ref 1:**{{< %s "%s" >}}.`, refShortcode, doc3Slashed), }, } @@ -267,7 +266,7 @@ THE END.`, refShortcode)), cfg.Set("verbose", true) for _, src := range sources { - writeSource(t, fs, filepath.Join("content", src.Name), string(src.Content)) + writeSource(t, fs, filepath.Join("content", src[0]), src[1]) } s := buildSingleSite( @@ -323,13 +322,13 @@ func doTestShouldAlwaysHaveUglyURLs(t *testing.T, uglyURLs bool) { cfg.Set("uglyURLs", uglyURLs) - sources := []source.ByteSource{ - {Name: filepath.FromSlash("sect/doc1.md"), Content: []byte("---\nmarkup: markdown\n---\n# title\nsome *content*")}, - {Name: filepath.FromSlash("sect/doc2.md"), Content: []byte("---\nurl: /ugly.html\nmarkup: markdown\n---\n# title\ndoc2 *content*")}, + sources := [][2]string{ + {filepath.FromSlash("sect/doc1.md"), "---\nmarkup: markdown\n---\n# title\nsome *content*"}, + {filepath.FromSlash("sect/doc2.md"), "---\nurl: /ugly.html\nmarkup: markdown\n---\n# title\ndoc2 *content*"}, } for _, src := range sources { - writeSource(t, fs, filepath.Join("content", src.Name), string(src.Content)) + writeSource(t, fs, filepath.Join("content", src[0]), src[1]) } writeSource(t, fs, filepath.Join("layouts", "index.html"), "Home Sweet {{ if.IsHome }}Home{{ end }}.") @@ -402,7 +401,9 @@ func TestSectionNaming(t *testing.T) { for _, canonify := range []bool{true, false} { for _, uglify := range []bool{true, false} { for _, pluralize := range []bool{true, false} { - doTestSectionNaming(t, canonify, uglify, pluralize) + t.Run(fmt.Sprintf("canonify=%t,uglify=%t,pluralize=%t", canonify, uglify, pluralize), func(t *testing.T) { + doTestSectionNaming(t, canonify, uglify, pluralize) + }) } } } @@ -418,12 +419,12 @@ func doTestSectionNaming(t *testing.T, canonify, uglify, pluralize bool) { expectedPathSuffix = "/index.html" } - sources := []source.ByteSource{ - {Name: filepath.FromSlash("sect/doc1.html"), Content: []byte("doc1")}, + sources := [][2]string{ + {filepath.FromSlash("sect/doc1.html"), "doc1"}, // Add one more page to sect to make sure sect is picked in mainSections - {Name: filepath.FromSlash("sect/sect.html"), Content: []byte("sect")}, - {Name: filepath.FromSlash("Fish and Chips/doc2.html"), Content: []byte("doc2")}, - {Name: filepath.FromSlash("ラーメン/doc3.html"), Content: []byte("doc3")}, + {filepath.FromSlash("sect/sect.html"), "sect"}, + {filepath.FromSlash("Fish and Chips/doc2.html"), "doc2"}, + {filepath.FromSlash("ラーメン/doc3.html"), "doc3"}, } cfg, fs := newTestCfg() @@ -433,8 +434,8 @@ func doTestSectionNaming(t *testing.T, canonify, uglify, pluralize bool) { cfg.Set("pluralizeListTitles", pluralize) cfg.Set("canonifyURLs", canonify) - for _, source := range sources { - writeSource(t, fs, filepath.Join("content", source.Name), string(source.Content)) + for _, src := range sources { + writeSource(t, fs, filepath.Join("content", src[0]), src[1]) } writeSource(t, fs, filepath.Join("layouts", "_default/single.html"), "{{.Content}}") @@ -472,17 +473,17 @@ func doTestSectionNaming(t *testing.T, canonify, uglify, pluralize bool) { } func TestSkipRender(t *testing.T) { t.Parallel() - sources := []source.ByteSource{ - {Name: filepath.FromSlash("sect/doc1.html"), Content: []byte("---\nmarkup: markdown\n---\n# title\nsome *content*")}, - {Name: filepath.FromSlash("sect/doc2.html"), Content: []byte("more content")}, - {Name: filepath.FromSlash("sect/doc3.md"), Content: []byte("# doc3\n*some* content")}, - {Name: filepath.FromSlash("sect/doc4.md"), Content: []byte("---\ntitle: doc4\n---\n# doc4\n*some content*")}, - {Name: filepath.FromSlash("sect/doc5.html"), Content: []byte("{{ template \"head\" }}body5")}, - {Name: filepath.FromSlash("sect/doc6.html"), Content: []byte("{{ template \"head_abs\" }}body5")}, - {Name: filepath.FromSlash("doc7.html"), Content: []byte("doc7 content")}, - {Name: filepath.FromSlash("sect/doc8.html"), Content: []byte("---\nmarkup: md\n---\n# title\nsome *content*")}, + sources := [][2]string{ + {filepath.FromSlash("sect/doc1.html"), "---\nmarkup: markdown\n---\n# title\nsome *content*"}, + {filepath.FromSlash("sect/doc2.html"), "more content"}, + {filepath.FromSlash("sect/doc3.md"), "# doc3\n*some* content"}, + {filepath.FromSlash("sect/doc4.md"), "---\ntitle: doc4\n---\n# doc4\n*some content*"}, + {filepath.FromSlash("sect/doc5.html"), "{{ template \"head\" }}body5"}, + {filepath.FromSlash("sect/doc6.html"), "{{ template \"head_abs\" }}body5"}, + {filepath.FromSlash("doc7.html"), "doc7 content"}, + {filepath.FromSlash("sect/doc8.html"), "---\nmarkup: md\n---\n# title\nsome *content*"}, // Issue #3021 - {Name: filepath.FromSlash("doc9.html"), Content: []byte("doc9: {{< myshortcode >}}")}, + {filepath.FromSlash("doc9.html"), "doc9: {{< myshortcode >}}"}, } cfg, fs := newTestCfg() @@ -493,7 +494,7 @@ func TestSkipRender(t *testing.T) { cfg.Set("baseURL", "http://auth/bub") for _, src := range sources { - writeSource(t, fs, filepath.Join("content", src.Name), string(src.Content)) + writeSource(t, fs, filepath.Join("content", src[0]), src[1]) } @@ -535,9 +536,9 @@ func TestSkipRender(t *testing.T) { func TestAbsURLify(t *testing.T) { t.Parallel() - sources := []source.ByteSource{ - {Name: filepath.FromSlash("sect/doc1.html"), Content: []byte("link")}, - {Name: filepath.FromSlash("blue/doc2.html"), Content: []byte("---\nf: t\n---\nmore content")}, + sources := [][2]string{ + {filepath.FromSlash("sect/doc1.html"), "link"}, + {filepath.FromSlash("blue/doc2.html"), "---\nf: t\n---\nmore content"}, } for _, baseURL := range []string{"http://auth/bub", "http://base", "//base"} { for _, canonify := range []bool{true, false} { @@ -549,7 +550,7 @@ func TestAbsURLify(t *testing.T) { cfg.Set("baseURL", baseURL) for _, src := range sources { - writeSource(t, fs, filepath.Join("content", src.Name), string(src.Content)) + writeSource(t, fs, filepath.Join("content", src[0]), src[1]) } @@ -584,23 +585,23 @@ func TestAbsURLify(t *testing.T) { } } -var weightedPage1 = []byte(`+++ +var weightedPage1 = `+++ weight = "2" title = "One" my_param = "foo" my_date = 1979-05-27T07:32:00Z +++ -Front Matter with Ordered Pages`) +Front Matter with Ordered Pages` -var weightedPage2 = []byte(`+++ +var weightedPage2 = `+++ weight = "6" title = "Two" publishdate = "2012-03-05" my_param = "foo" +++ -Front Matter with Ordered Pages 2`) +Front Matter with Ordered Pages 2` -var weightedPage3 = []byte(`+++ +var weightedPage3 = `+++ weight = "4" title = "Three" date = "2012-04-06" @@ -609,9 +610,9 @@ my_param = "bar" only_one = "yes" my_date = 2010-05-27T07:32:00Z +++ -Front Matter with Ordered Pages 3`) +Front Matter with Ordered Pages 3` -var weightedPage4 = []byte(`+++ +var weightedPage4 = `+++ weight = "4" title = "Four" date = "2012-01-01" @@ -620,13 +621,13 @@ my_param = "baz" my_date = 2010-05-27T07:32:00Z categories = [ "hugo" ] +++ -Front Matter with Ordered Pages 4. This is longer content`) +Front Matter with Ordered Pages 4. This is longer content` -var weightedSources = []source.ByteSource{ - {Name: filepath.FromSlash("sect/doc1.md"), Content: weightedPage1}, - {Name: filepath.FromSlash("sect/doc2.md"), Content: weightedPage2}, - {Name: filepath.FromSlash("sect/doc3.md"), Content: weightedPage3}, - {Name: filepath.FromSlash("sect/doc4.md"), Content: weightedPage4}, +var weightedSources = [][2]string{ + {filepath.FromSlash("sect/doc1.md"), weightedPage1}, + {filepath.FromSlash("sect/doc2.md"), weightedPage2}, + {filepath.FromSlash("sect/doc3.md"), weightedPage3}, + {filepath.FromSlash("sect/doc4.md"), weightedPage4}, } func TestOrderedPages(t *testing.T) { @@ -635,7 +636,7 @@ func TestOrderedPages(t *testing.T) { cfg.Set("baseURL", "http://auth/bub") for _, src := range weightedSources { - writeSource(t, fs, filepath.Join("content", src.Name), string(src.Content)) + writeSource(t, fs, filepath.Join("content", src[0]), src[1]) } @@ -678,11 +679,11 @@ func TestOrderedPages(t *testing.T) { } } -var groupedSources = []source.ByteSource{ - {Name: filepath.FromSlash("sect1/doc1.md"), Content: weightedPage1}, - {Name: filepath.FromSlash("sect1/doc2.md"), Content: weightedPage2}, - {Name: filepath.FromSlash("sect2/doc3.md"), Content: weightedPage3}, - {Name: filepath.FromSlash("sect3/doc4.md"), Content: weightedPage4}, +var groupedSources = [][2]string{ + {filepath.FromSlash("sect1/doc1.md"), weightedPage1}, + {filepath.FromSlash("sect1/doc2.md"), weightedPage2}, + {filepath.FromSlash("sect2/doc3.md"), weightedPage3}, + {filepath.FromSlash("sect3/doc4.md"), weightedPage4}, } func TestGroupedPages(t *testing.T) { @@ -822,16 +823,16 @@ func TestGroupedPages(t *testing.T) { } } -var pageWithWeightedTaxonomies1 = []byte(`+++ +var pageWithWeightedTaxonomies1 = `+++ tags = [ "a", "b", "c" ] tags_weight = 22 categories = ["d"] title = "foo" categories_weight = 44 +++ -Front Matter with weighted tags and categories`) +Front Matter with weighted tags and categories` -var pageWithWeightedTaxonomies2 = []byte(`+++ +var pageWithWeightedTaxonomies2 = `+++ tags = "a" tags_weight = 33 title = "bar" @@ -840,23 +841,23 @@ categories_weight = 11 alias = "spf13" date = 1979-05-27T07:32:00Z +++ -Front Matter with weighted tags and categories`) +Front Matter with weighted tags and categories` -var pageWithWeightedTaxonomies3 = []byte(`+++ +var pageWithWeightedTaxonomies3 = `+++ title = "bza" categories = [ "e" ] categories_weight = 11 alias = "spf13" date = 2010-05-27T07:32:00Z +++ -Front Matter with weighted tags and categories`) +Front Matter with weighted tags and categories` func TestWeightedTaxonomies(t *testing.T) { t.Parallel() - sources := []source.ByteSource{ - {Name: filepath.FromSlash("sect/doc1.md"), Content: pageWithWeightedTaxonomies2}, - {Name: filepath.FromSlash("sect/doc2.md"), Content: pageWithWeightedTaxonomies1}, - {Name: filepath.FromSlash("sect/doc3.md"), Content: pageWithWeightedTaxonomies3}, + sources := [][2]string{ + {filepath.FromSlash("sect/doc1.md"), pageWithWeightedTaxonomies2}, + {filepath.FromSlash("sect/doc2.md"), pageWithWeightedTaxonomies1}, + {filepath.FromSlash("sect/doc3.md"), pageWithWeightedTaxonomies3}, } taxonomies := make(map[string]string) @@ -884,39 +885,23 @@ func TestWeightedTaxonomies(t *testing.T) { } } -func findPage(site *Site, f string) *Page { - sp := source.NewSourceSpec(site.Cfg, site.Fs) - currentPath := sp.NewFile(filepath.FromSlash(f)) - //t.Logf("looking for currentPath: %s", currentPath.Path()) - - for _, page := range site.Pages { - //t.Logf("page: %s", page.Source.Path()) - if page.Source.Path() == currentPath.Path() { - return page - } - } - return nil -} - func setupLinkingMockSite(t *testing.T) *Site { - sources := []source.ByteSource{ - {Name: filepath.FromSlash("level2/unique.md"), Content: []byte("")}, - {Name: filepath.FromSlash("index.md"), Content: []byte("")}, - {Name: filepath.FromSlash("rootfile.md"), Content: []byte("")}, - {Name: filepath.FromSlash("root-image.png"), Content: []byte("")}, + sources := [][2]string{ + {filepath.FromSlash("level2/unique.md"), ""}, + {filepath.FromSlash("rootfile.md"), ""}, + {filepath.FromSlash("root-image.png"), ""}, - {Name: filepath.FromSlash("level2/2-root.md"), Content: []byte("")}, - {Name: filepath.FromSlash("level2/index.md"), Content: []byte("")}, - {Name: filepath.FromSlash("level2/common.md"), Content: []byte("")}, + {filepath.FromSlash("level2/2-root.md"), ""}, + {filepath.FromSlash("level2/common.md"), ""}, - {Name: filepath.FromSlash("level2/2-image.png"), Content: []byte("")}, - {Name: filepath.FromSlash("level2/common.png"), Content: []byte("")}, + {filepath.FromSlash("level2/2-image.png"), ""}, + {filepath.FromSlash("level2/common.png"), ""}, - {Name: filepath.FromSlash("level2/level3/3-root.md"), Content: []byte("")}, - {Name: filepath.FromSlash("level2/level3/index.md"), Content: []byte("")}, - {Name: filepath.FromSlash("level2/level3/common.md"), Content: []byte("")}, - {Name: filepath.FromSlash("level2/level3/3-image.png"), Content: []byte("")}, - {Name: filepath.FromSlash("level2/level3/common.png"), Content: []byte("")}, + {filepath.FromSlash("level2/level3/start.md"), ""}, + {filepath.FromSlash("level2/level3/3-root.md"), ""}, + {filepath.FromSlash("level2/level3/common.md"), ""}, + {filepath.FromSlash("level2/level3/3-image.png"), ""}, + {filepath.FromSlash("level2/level3/common.png"), ""}, } cfg, fs := newTestCfg() @@ -939,7 +924,7 @@ func TestRefLinking(t *testing.T) { t.Parallel() site := setupLinkingMockSite(t) - currentPage := findPage(site, "level2/level3/index.md") + currentPage := site.getPage(KindPage, "level2/level3/start.md") if currentPage == nil { t.Fatalf("failed to find current page in site") } @@ -953,8 +938,6 @@ func TestRefLinking(t *testing.T) { {"unique.md", "", true, "/level2/unique/"}, {"level2/common.md", "", true, "/level2/common/"}, {"3-root.md", "", true, "/level2/level3/3-root/"}, - {"level2/level3/index.md", "amp", true, "/amp/level2/level3/"}, - {"level2/index.md", "amp", false, "http://auth/amp/level2/"}, } { if out, err := site.Info.refLink(test.link, currentPage, test.relative, test.outputFormat); err != nil || out != test.expected { t.Errorf("[%d] Expected %s to resolve to (%s), got (%s) - error: %s", i, test.link, test.expected, out, err) diff --git a/hugolib/site_url_test.go b/hugolib/site_url_test.go index 272c78c7efa..479967673c7 100644 --- a/hugolib/site_url_test.go +++ b/hugolib/site_url_test.go @@ -20,7 +20,6 @@ import ( "html/template" "github.com/gohugoio/hugo/deps" - "github.com/gohugoio/hugo/source" "github.com/stretchr/testify/require" ) @@ -33,9 +32,9 @@ slug: slug-doc-2 slug doc 2 content ` -var urlFakeSource = []source.ByteSource{ - {Name: filepath.FromSlash("content/blue/doc1.md"), Content: []byte(slugDoc1)}, - {Name: filepath.FromSlash("content/blue/doc2.md"), Content: []byte(slugDoc2)}, +var urlFakeSource = [][2]string{ + {filepath.FromSlash("content/blue/doc1.md"), slugDoc1}, + {filepath.FromSlash("content/blue/doc2.md"), slugDoc2}, } // Issue #1105 diff --git a/hugolib/testhelpers_test.go b/hugolib/testhelpers_test.go index 3db2d9d5136..349c39ebc79 100644 --- a/hugolib/testhelpers_test.go +++ b/hugolib/testhelpers_test.go @@ -14,7 +14,6 @@ import ( "github.com/spf13/afero" "github.com/gohugoio/hugo/helpers" - "github.com/gohugoio/hugo/source" "github.com/gohugoio/hugo/tpl" "github.com/spf13/viper" @@ -169,6 +168,11 @@ func newDebugLogger() *jww.Notepad { func newErrorLogger() *jww.Notepad { return jww.NewNotepad(jww.LevelError, jww.LevelError, os.Stdout, ioutil.Discard, "", log.Ldate|log.Ltime) } + +func newWarningLogger() *jww.Notepad { + return jww.NewNotepad(jww.LevelWarn, jww.LevelError, os.Stdout, ioutil.Discard, "", log.Ldate|log.Ltime) +} + func createWithTemplateFromNameValues(additionalTemplates ...string) func(templ tpl.TemplateHandler) error { return func(templ tpl.TemplateHandler) error { @@ -203,9 +207,17 @@ func buildSingleSiteExpected(t testing.TB, expectBuildError bool, depsCfg deps.D return h.Sites[0] } -func writeSourcesToSource(t *testing.T, base string, fs *hugofs.Fs, sources ...source.ByteSource) { +func writeSourcesToSource(t *testing.T, base string, fs *hugofs.Fs, sources ...[2]string) { for _, src := range sources { - writeSource(t, fs, filepath.Join(base, src.Name), string(src.Content)) + writeSource(t, fs, filepath.Join(base, src[0]), src[1]) + } +} + +func dumpPages(pages ...*Page) { + for i, p := range pages { + fmt.Printf("%d: Kind: %s Title: %-10s RelPermalink: %-10s Path: %-10s sections: %s Len Sections(): %d\n", + i+1, + p.Kind, p.Title, p.RelPermalink(), p.Path(), p.sections, len(p.Sections())) } } diff --git a/i18n/translationProvider.go b/i18n/translationProvider.go index e0eb89134aa..52aada8bdf4 100644 --- a/i18n/translationProvider.go +++ b/i18n/translationProvider.go @@ -17,6 +17,8 @@ import ( "errors" "fmt" + "github.com/gohugoio/hugo/helpers" + "github.com/gohugoio/hugo/deps" "github.com/gohugoio/hugo/source" "github.com/nicksnyder/go-i18n/i18n/bundle" @@ -73,9 +75,8 @@ func (tp *TranslationProvider) Update(d *deps.Deps) error { for _, currentSource := range sources { for _, r := range currentSource.Files() { - err := i18nBundle.ParseTranslationFileBytes(r.LogicalName(), r.Bytes()) - if err != nil { - return fmt.Errorf("Failed to load translations in file %q: %s", r.LogicalName(), err) + if err := addTranslationFile(i18nBundle, r); err != nil { + return err } } } @@ -88,6 +89,19 @@ func (tp *TranslationProvider) Update(d *deps.Deps) error { } +func addTranslationFile(bundle *bundle.Bundle, r source.ReadableFile) error { + f, err := r.Open() + if err != nil { + return fmt.Errorf("Failed to open translations file %q: %s", r.LogicalName(), err) + } + defer f.Close() + err = bundle.ParseTranslationFileBytes(r.LogicalName(), helpers.ReaderToBytes(f)) + if err != nil { + return fmt.Errorf("Failed to load translations in file %q: %s", r.LogicalName(), err) + } + return nil +} + // Clone sets the language func for the new language. func (tp *TranslationProvider) Clone(d *deps.Deps) error { d.Translate = tp.t.Func(d.Language.Lang) diff --git a/resource/image.go b/resource/image.go new file mode 100644 index 00000000000..97607ffa4bf --- /dev/null +++ b/resource/image.go @@ -0,0 +1,533 @@ +// Copyright 2017-present The Hugo Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package resource + +import ( + "errors" + "fmt" + "image/color" + "io" + "os" + "path/filepath" + "strconv" + "strings" + + "github.com/mitchellh/mapstructure" + + "github.com/gohugoio/hugo/helpers" + "github.com/spf13/afero" + + // Importing image codecs for image.DecodeConfig + "image" + _ "image/gif" + "image/jpeg" + _ "image/png" + + "github.com/disintegration/imaging" + + // Import webp codec + "sync" + + _ "golang.org/x/image/webp" +) + +var ( + _ Resource = (*Image)(nil) + _ Source = (*Image)(nil) + _ Cloner = (*Image)(nil) +) + +// Imaging contains default image processing configuration. This will be fetched +// from site (or language) config. +type Imaging struct { + // Default image quality setting (1-100). Only used for JPEG images. + Quality int + + // Resample filter used. See https://github.com/disintegration/imaging + ResampleFilter string +} + +const ( + defaultJPEGQuality = 75 + defaultResampleFilter = "box" +) + +var imageFormats = map[string]imaging.Format{ + ".jpg": imaging.JPEG, + ".jpeg": imaging.JPEG, + ".png": imaging.PNG, + ".tif": imaging.TIFF, + ".tiff": imaging.TIFF, + ".bmp": imaging.BMP, + ".gif": imaging.GIF, +} + +var anchorPositions = map[string]imaging.Anchor{ + strings.ToLower("Center"): imaging.Center, + strings.ToLower("TopLeft"): imaging.TopLeft, + strings.ToLower("Top"): imaging.Top, + strings.ToLower("TopRight"): imaging.TopRight, + strings.ToLower("Left"): imaging.Left, + strings.ToLower("Right"): imaging.Right, + strings.ToLower("BottomLeft"): imaging.BottomLeft, + strings.ToLower("Bottom"): imaging.Bottom, + strings.ToLower("BottomRight"): imaging.BottomRight, +} + +var imageFilters = map[string]imaging.ResampleFilter{ + strings.ToLower("NearestNeighbor"): imaging.NearestNeighbor, + strings.ToLower("Box"): imaging.Box, + strings.ToLower("Linear"): imaging.Linear, + strings.ToLower("Hermite"): imaging.Hermite, + strings.ToLower("MitchellNetravali"): imaging.MitchellNetravali, + strings.ToLower("CatmullRom"): imaging.CatmullRom, + strings.ToLower("BSpline"): imaging.BSpline, + strings.ToLower("Gaussian"): imaging.Gaussian, + strings.ToLower("Lanczos"): imaging.Lanczos, + strings.ToLower("Hann"): imaging.Hann, + strings.ToLower("Hamming"): imaging.Hamming, + strings.ToLower("Blackman"): imaging.Blackman, + strings.ToLower("Bartlett"): imaging.Bartlett, + strings.ToLower("Welch"): imaging.Welch, + strings.ToLower("Cosine"): imaging.Cosine, +} + +type Image struct { + config image.Config + configInit sync.Once + configLoaded bool + + imaging *Imaging + + *genericResource +} + +func (i *Image) Width() int { + i.initConfig() + return i.config.Width +} + +func (i *Image) Height() int { + i.initConfig() + return i.config.Height +} + +// Implement the Cloner interface. +func (i *Image) WithNewBase(base string) Resource { + return &Image{ + imaging: i.imaging, + genericResource: i.genericResource.WithNewBase(base).(*genericResource)} +} + +// Resize resizes the image to the specified width and height using the specified resampling +// filter and returns the transformed image. If one of width or height is 0, the image aspect +// ratio is preserved. +func (i *Image) Resize(spec string) (*Image, error) { + return i.doWithImageConfig("resize", spec, func(src image.Image, conf imageConfig) (image.Image, error) { + return imaging.Resize(src, conf.Width, conf.Height, conf.Filter), nil + }) +} + +// Fit scales down the image using the specified resample filter to fit the specified +// maximum width and height. +func (i *Image) Fit(spec string) (*Image, error) { + return i.doWithImageConfig("fit", spec, func(src image.Image, conf imageConfig) (image.Image, error) { + return imaging.Fit(src, conf.Width, conf.Height, conf.Filter), nil + }) +} + +// Fill scales the image to the smallest possible size that will cover the specified dimensions, +// crops the resized image to the specified dimensions using the given anchor point. +// Space delimited config: 200x300 TopLeft +func (i *Image) Fill(spec string) (*Image, error) { + return i.doWithImageConfig("fill", spec, func(src image.Image, conf imageConfig) (image.Image, error) { + return imaging.Fill(src, conf.Width, conf.Height, conf.Anchor, conf.Filter), nil + }) +} + +// Holds configuration to create a new image from an existing one, resize etc. +type imageConfig struct { + Action string + + // Quality ranges from 1 to 100 inclusive, higher is better. + // This is only relevant for JPEG images. + // Default is 75. + Quality int + + // Rotate rotates an image by the given angle counter-clockwise. + // The rotation will be performed first. + Rotate int + + Width int + Height int + + Filter imaging.ResampleFilter + FilterStr string + + Anchor imaging.Anchor + AnchorStr string +} + +func (i *Image) isJPEG() bool { + name := strings.ToLower(i.rel) + return strings.HasSuffix(name, ".jpg") || strings.HasSuffix(name, ".jpeg") +} + +func (i *Image) doWithImageConfig(action, spec string, f func(src image.Image, conf imageConfig) (image.Image, error)) (*Image, error) { + conf, err := parseImageConfig(spec) + if err != nil { + return nil, err + } + conf.Action = action + + if conf.Quality <= 0 && i.isJPEG() { + // We need a quality setting for all JPEGs + conf.Quality = i.imaging.Quality + } + + if conf.FilterStr == "" { + conf.FilterStr = i.imaging.ResampleFilter + conf.Filter = imageFilters[conf.FilterStr] + } + + key := i.relPermalinkForRel(i.filenameFromConfig(conf)) + + return i.spec.imageCache.getOrCreate(i.spec, key, func(resourceCacheFilename string) (*Image, error) { + ci := i.clone() + + ci.setBasePath(conf) + + src, err := i.decodeSource() + if err != nil { + return nil, err + } + + if conf.Rotate != 0 { + // Rotate it befor any scaling to get the dimensions correct. + src = imaging.Rotate(src, float64(conf.Rotate), color.Transparent) + } + + converted, err := f(src, conf) + if err != nil { + return ci, err + } + + b := converted.Bounds() + ci.config = image.Config{Width: b.Max.X, Height: b.Max.Y} + ci.configLoaded = true + + return ci, i.encodeToDestinations(converted, conf, resourceCacheFilename, ci.RelPermalink()) + }) + +} + +func (i imageConfig) key() string { + k := strconv.Itoa(i.Width) + "x" + strconv.Itoa(i.Height) + if i.Action != "" { + k += "_" + i.Action + } + if i.Quality > 0 { + k += "_q" + strconv.Itoa(i.Quality) + } + if i.Rotate != 0 { + k += "_r" + strconv.Itoa(i.Rotate) + } + k += "_" + i.FilterStr + "_" + i.AnchorStr + return k +} + +var defaultImageConfig = imageConfig{ + Action: "", + Anchor: imaging.Center, + AnchorStr: strings.ToLower("Center"), +} + +func newImageConfig(width, height, quality, rotate int, filter, anchor string) imageConfig { + c := defaultImageConfig + + c.Width = width + c.Height = height + c.Quality = quality + c.Rotate = rotate + + if filter != "" { + filter = strings.ToLower(filter) + if v, ok := imageFilters[filter]; ok { + c.Filter = v + c.FilterStr = filter + } + } + + if anchor != "" { + anchor = strings.ToLower(anchor) + if v, ok := anchorPositions[anchor]; ok { + c.Anchor = v + c.AnchorStr = anchor + } + } + + return c +} + +func parseImageConfig(config string) (imageConfig, error) { + var ( + c = defaultImageConfig + err error + ) + + if config == "" { + return c, errors.New("image config cannot be empty") + } + + parts := strings.Fields(config) + for _, part := range parts { + part = strings.ToLower(part) + + if pos, ok := anchorPositions[part]; ok { + c.Anchor = pos + c.AnchorStr = part + } else if filter, ok := imageFilters[part]; ok { + c.Filter = filter + c.FilterStr = part + } else if part[0] == 'q' { + c.Quality, err = strconv.Atoi(part[1:]) + if err != nil { + return c, err + } + if c.Quality < 1 && c.Quality > 100 { + return c, errors.New("quality ranges from 1 to 100 inclusive") + } + } else if part[0] == 'r' { + c.Rotate, err = strconv.Atoi(part[1:]) + if err != nil { + return c, err + } + } else if strings.Contains(part, "x") { + widthHeight := strings.Split(part, "x") + if len(widthHeight) <= 2 { + first := widthHeight[0] + if first != "" { + c.Width, err = strconv.Atoi(first) + if err != nil { + return c, err + } + } + + if len(widthHeight) == 2 { + second := widthHeight[1] + if second != "" { + c.Height, err = strconv.Atoi(second) + if err != nil { + return c, err + } + } + } + } else { + return c, errors.New("invalid image dimensions") + } + + } + } + + if c.Width == 0 && c.Height == 0 { + return c, errors.New("must provide Width or Height") + } + + return c, nil +} + +func (i *Image) initConfig() error { + var err error + i.configInit.Do(func() { + if i.configLoaded { + return + } + + var ( + f afero.File + config image.Config + ) + + f, err = i.spec.Fs.Source.Open(i.AbsSourceFilename()) + if err != nil { + return + } + defer f.Close() + + config, _, err = image.DecodeConfig(f) + if err != nil { + return + } + i.config = config + }) + + return err +} + +func (i *Image) decodeSource() (image.Image, error) { + file, err := i.spec.Fs.Source.Open(i.AbsSourceFilename()) + if err != nil { + return nil, err + } + defer file.Close() + return imaging.Decode(file) +} + +func (i *Image) copyToDestination(src string) error { + target := filepath.Join(i.absPublishDir, i.RelPermalink()) + + in, err := i.spec.Fs.Source.Open(src) + if err != nil { + return err + } + defer in.Close() + + out, err := i.spec.Fs.Destination.Create(target) + if err != nil { + return err + } + defer out.Close() + + _, err = io.Copy(out, in) + if err != nil { + return err + } + return nil +} + +func (i *Image) encodeToDestinations(img image.Image, conf imageConfig, resourceCacheFilename, filename string) error { + ext := strings.ToLower(helpers.Ext(filename)) + + imgFormat, ok := imageFormats[ext] + if !ok { + return imaging.ErrUnsupportedFormat + } + + target := filepath.Join(i.absPublishDir, filename) + + file1, err := i.spec.Fs.Destination.Create(target) + if err != nil { + return err + } + defer file1.Close() + + var w io.Writer + + if resourceCacheFilename != "" { + // Also save it to the image resource cache for later reuse. + if err = i.spec.Fs.Source.MkdirAll(filepath.Dir(resourceCacheFilename), os.FileMode(0755)); err != nil { + return err + } + + file2, err := i.spec.Fs.Source.Create(resourceCacheFilename) + if err != nil { + return err + } + + w = io.MultiWriter(file1, file2) + defer file2.Close() + } else { + w = file1 + } + + switch imgFormat { + case imaging.JPEG: + + var rgba *image.RGBA + quality := conf.Quality + + if nrgba, ok := img.(*image.NRGBA); ok { + if nrgba.Opaque() { + rgba = &image.RGBA{ + Pix: nrgba.Pix, + Stride: nrgba.Stride, + Rect: nrgba.Rect, + } + } + } + if rgba != nil { + return jpeg.Encode(w, rgba, &jpeg.Options{Quality: quality}) + } else { + return jpeg.Encode(w, img, &jpeg.Options{Quality: quality}) + } + default: + return imaging.Encode(w, img, imgFormat) + } + +} + +func (i *Image) clone() *Image { + g := *i.genericResource + + return &Image{ + imaging: i.imaging, + genericResource: &g} +} + +func (i *Image) setBasePath(conf imageConfig) { + i.rel = i.filenameFromConfig(conf) +} + +// We need to set this to something static during tests. +var fiModTimeFunc = func(fi os.FileInfo) int64 { + return fi.ModTime().Unix() +} + +func (i *Image) filenameFromConfig(conf imageConfig) string { + p1, p2 := helpers.FileAndExt(i.rel) + sizeModeStr := fmt.Sprintf("_S%d_T%d", i.osFileInfo.Size(), fiModTimeFunc(i.osFileInfo)) + // On scaling an already scaled image, we get the file info from the original. + // Repeating the same info in the filename makes it stuttery for no good reason. + if strings.Contains(p1, sizeModeStr) { + sizeModeStr = "" + } + + const md5Threshold = 100 + + key := conf.key() + + // It is useful to have the key in clear text, but when nesting transforms, it + // can easily be too long to read, and maybe even too long + // for the different OSes to handle. + if len(p1)+len(sizeModeStr)+len(p2) > md5Threshold { + key = helpers.MD5String(p1 + key + p2) + p1 = p1[:strings.Index(p1, "_S")] + } + + return fmt.Sprintf("%s%s_%s%s", p1, sizeModeStr, key, p2) +} + +func decodeImaging(m map[string]interface{}) (Imaging, error) { + var i Imaging + if err := mapstructure.WeakDecode(m, &i); err != nil { + return i, err + } + + if i.Quality <= 0 || i.Quality > 100 { + i.Quality = defaultJPEGQuality + } + + if i.ResampleFilter == "" { + i.ResampleFilter = defaultResampleFilter + } else { + filter := strings.ToLower(i.ResampleFilter) + _, found := imageFilters[filter] + if !found { + return i, fmt.Errorf("%q is not a valid resample filter", filter) + } + i.ResampleFilter = filter + } + + return i, nil +} diff --git a/resource/image_cache.go b/resource/image_cache.go new file mode 100644 index 00000000000..8a0e17e31d3 --- /dev/null +++ b/resource/image_cache.go @@ -0,0 +1,109 @@ +// Copyright 2017-present The Hugo Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package resource + +import ( + "fmt" + "os" + "path/filepath" + "sync" + "time" + + "github.com/gohugoio/hugo/helpers" +) + +type imageCache struct { + absPublishDir string + absCacheDir string + pathSpec *helpers.PathSpec + mu sync.RWMutex + store map[string]*Image +} + +func (c *imageCache) isInCache(key string) bool { + c.mu.RLock() + _, found := c.store[key] + c.mu.RUnlock() + return found +} + +func (c *imageCache) getOrCreate( + spec *Spec, key string, create func(resourceCacheFilename string) (*Image, error)) (*Image, error) { + // First check the in-memory store, then the disk. + c.mu.RLock() + img, found := c.store[key] + c.mu.RUnlock() + + if found { + return img, nil + } + + // Now look in the file cache. + cacheFilename := filepath.Join(c.absCacheDir, key) + + // The definition of this counter is not that we have processed that amount + // (e.g. resized etc.), it can be fetched from file cache, + // but the count of processed image variations for this site. + c.pathSpec.ProcessingStats.Incr(&c.pathSpec.ProcessingStats.ProcessedImages) + + r, err := spec.NewResourceFromFilename(nil, c.absPublishDir, cacheFilename, key) + notFound := err != nil && os.IsNotExist(err) + if err != nil && !os.IsNotExist(err) { + return nil, err + } + + if notFound { + img, err = create(cacheFilename) + if err != nil { + return nil, err + } + } else { + img = r.(*Image) + } + + c.mu.Lock() + if img2, found := c.store[key]; found { + c.mu.Unlock() + return img2, nil + } + + c.store[key] = img + + c.mu.Unlock() + + if notFound { + // File already written to destination + return img, nil + } + + // Fast path: + // This is a processed version of the original. + // If it exists on destination with the same filename and file size, it is + // the same file, so no need to transfer it again. + if fi, err := spec.Fs.Destination.Stat(cacheFilename); err == nil && fi.Size() == img.osFileInfo.Size() { + return img, nil + } + + return img, img.copyToDestination(cacheFilename) + +} + +func newImageCache(ps *helpers.PathSpec, absCacheDir, absPublishDir string) *imageCache { + return &imageCache{pathSpec: ps, store: make(map[string]*Image), absCacheDir: absCacheDir, absPublishDir: absPublishDir} +} + +func timeTrack(start time.Time, name string) { + elapsed := time.Since(start) + fmt.Printf("%s took %s\n", name, elapsed) +} diff --git a/resource/image_test.go b/resource/image_test.go new file mode 100644 index 00000000000..3543abb37dd --- /dev/null +++ b/resource/image_test.go @@ -0,0 +1,134 @@ +// Copyright 2017-present The Hugo Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package resource + +import ( + "fmt" + "os" + "testing" + + "github.com/stretchr/testify/require" +) + +func TestParseImageConfig(t *testing.T) { + for i, this := range []struct { + in string + expect interface{} + }{ + {"300x400", newImageConfig(300, 400, 0, 0, "", "")}, + {"100x200 bottomRight", newImageConfig(100, 200, 0, 0, "", "BottomRight")}, + {"10x20 topleft Lanczos", newImageConfig(10, 20, 0, 0, "Lanczos", "topleft")}, + {"linear left 10x r180", newImageConfig(10, 0, 0, 180, "linear", "left")}, + {"x20 riGht Cosine q95", newImageConfig(0, 20, 95, 0, "cosine", "right")}, + + {"", false}, + {"foo", false}, + } { + result, err := parseImageConfig(this.in) + if b, ok := this.expect.(bool); ok && !b { + if err == nil { + t.Errorf("[%d] parseImageConfig didn't return an expected error", i) + } + } else { + if err != nil { + t.Fatalf("[%d] err: %s", i, err) + } + if fmt.Sprint(result) != fmt.Sprint(this.expect) { + t.Fatalf("[%d] got\n%v\n but expected\n%v", i, result, this.expect) + } + } + } +} + +func TestImageTransform(t *testing.T) { + fiModTimeFunc = func(fi os.FileInfo) int64 { + return int64(10111213) + } + + assert := require.New(t) + + image := fetchSunset(assert) + + assert.Equal("/a/sunset.jpg", image.RelPermalink()) + assert.Equal("image", image.ResourceType()) + + resized, err := image.Resize("300x200") + assert.NoError(err) + assert.True(image != resized) + assert.True(image.genericResource != resized.genericResource) + + resized0x, err := image.Resize("x200") + assert.NoError(err) + assert.Equal(320, resized0x.Width()) + assert.Equal(200, resized0x.Height()) + assertFileCache(assert, image.spec.Fs, resized0x.RelPermalink(), 320, 200) + + resizedx0, err := image.Resize("200x") + assert.NoError(err) + assert.Equal(200, resizedx0.Width()) + assert.Equal(125, resizedx0.Height()) + assertFileCache(assert, image.spec.Fs, resizedx0.RelPermalink(), 200, 125) + + resizedAndRotated, err := image.Resize("x200 r90") + assert.NoError(err) + assert.Equal(125, resizedAndRotated.Width()) + assert.Equal(200, resizedAndRotated.Height()) + assertFileCache(assert, image.spec.Fs, resizedAndRotated.RelPermalink(), 125, 200) + + assert.Equal("/a/sunset_S90587_T10111213_300x200_resize_q75_box_center.jpg", resized.RelPermalink()) + assert.Equal(300, resized.Width()) + assert.Equal(200, resized.Height()) + + fitted, err := resized.Fit("50x50") + assert.NoError(err) + assert.Equal("/a/sunset_S90587_T10111213_300x200_resize_q75_box_center_50x50_fit_q75_box_center.jpg", fitted.RelPermalink()) + assert.Equal(50, fitted.Width()) + assert.Equal(31, fitted.Height()) + + // Check the MD5 key threshold + fittedAgain, _ := fitted.Fit("10x20") + fittedAgain, err = fittedAgain.Fit("10x20") + assert.NoError(err) + assert.Equal("/a/sunset_f1fb715a17c42d5d4602a1870424d590.jpg", fittedAgain.RelPermalink()) + assert.Equal(10, fittedAgain.Width()) + assert.Equal(6, fittedAgain.Height()) + + filled, err := image.Fill("200x100 bottomLeft") + assert.NoError(err) + assert.Equal("/a/sunset_S90587_T10111213_200x100_fill_q75_box_bottomleft.jpg", filled.RelPermalink()) + assert.Equal(200, filled.Width()) + assert.Equal(100, filled.Height()) + assertFileCache(assert, image.spec.Fs, filled.RelPermalink(), 200, 100) + + // Check cache + filledAgain, err := image.Fill("200x100 bottomLeft") + assert.NoError(err) + assert.True(filled == filledAgain) + assertFileCache(assert, image.spec.Fs, filledAgain.RelPermalink(), 200, 100) + +} + +func TestDecodeImaging(t *testing.T) { + assert := require.New(t) + m := map[string]interface{}{ + "quality": 42, + "resampleFilter": "NearestNeighbor", + } + + imaging, err := decodeImaging(m) + + assert.NoError(err) + assert.Equal(42, imaging.Quality) + assert.Equal("nearestneighbor", imaging.ResampleFilter) +} diff --git a/resource/resource.go b/resource/resource.go new file mode 100644 index 00000000000..c63c1e04fa9 --- /dev/null +++ b/resource/resource.go @@ -0,0 +1,252 @@ +// Copyright 2017-present The Hugo Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package resource + +import ( + "mime" + "os" + "path" + "path/filepath" + "strings" + + "github.com/gohugoio/hugo/media" + "github.com/gohugoio/hugo/source" + + "github.com/gohugoio/hugo/helpers" +) + +var ( + _ Resource = (*genericResource)(nil) + _ Source = (*genericResource)(nil) + _ Cloner = (*genericResource)(nil) +) + +const DefaultResourceType = "unknown" + +type Source interface { + AbsSourceFilename() string + Publish() error +} + +type Cloner interface { + WithNewBase(base string) Resource +} + +// Resource represents a linkable resource, i.e. a content page, image etc. +type Resource interface { + Permalink() string + RelPermalink() string + ResourceType() string +} + +// Resources represents a slice of resources, which can be a mix of different types. +// I.e. both pages and images etc. +type Resources []Resource + +func (r Resources) ByType(tp string) []Resource { + var filtered []Resource + + for _, resource := range r { + if resource.ResourceType() == tp { + filtered = append(filtered, resource) + } + } + return filtered +} + +// GetBySuffix gets the first resource matching the given filename prefix, e.g +// "logo" will match logo.png. It returns nil of none found. +// In potential ambiguous situations, combine it with ByType. +func (r Resources) GetByPrefix(prefix string) Resource { + for _, resource := range r { + _, name := filepath.Split(resource.RelPermalink()) + if strings.HasPrefix(name, prefix) { + return resource + } + } + return nil +} + +type Spec struct { + *helpers.PathSpec + mimeTypes media.Types + + // Holds default filter settings etc. + imaging *Imaging + + imageCache *imageCache + + AbsGenImagePath string +} + +func NewSpec(s *helpers.PathSpec, mimeTypes media.Types) (*Spec, error) { + + imaging, err := decodeImaging(s.Cfg.GetStringMap("imaging")) + if err != nil { + return nil, err + } + s.GetLayoutDirPath() + + genImagePath := s.AbsPathify(filepath.Join(s.Cfg.GetString("resourceDir"), "_gen", "images")) + + return &Spec{AbsGenImagePath: genImagePath, PathSpec: s, imaging: &imaging, mimeTypes: mimeTypes, imageCache: newImageCache( + s, + // We're going to write a cache pruning routine later, so make it extremely + // unlikely that the user shoots him or herself in the foot + // and this is set to a value that represents data he/she + // cares about. This should be set in stone once released. + genImagePath, + s.AbsPathify(s.Cfg.GetString("publishDir")))}, nil +} + +func (r *Spec) NewResourceFromFile( + linker func(base string) string, + absPublishDir string, + file source.File, relTargetFilename string) (Resource, error) { + + return r.newResource(linker, absPublishDir, file.Filename(), file.FileInfo(), relTargetFilename) +} + +func (r *Spec) NewResourceFromFilename( + linker func(base string) string, + absPublishDir, + absSourceFilename, relTargetFilename string) (Resource, error) { + + fi, err := r.Fs.Source.Stat(absSourceFilename) + if err != nil { + return nil, err + } + return r.newResource(linker, absPublishDir, absSourceFilename, fi, relTargetFilename) +} + +func (r *Spec) newResource( + linker func(base string) string, + absPublishDir, + absSourceFilename string, fi os.FileInfo, relTargetFilename string) (Resource, error) { + + var mimeType string + ext := filepath.Ext(relTargetFilename) + m, found := r.mimeTypes.GetBySuffix(strings.TrimPrefix(ext, ".")) + if found { + mimeType = m.SubType + } else { + mimeType = mime.TypeByExtension(ext) + if mimeType == "" { + mimeType = DefaultResourceType + } else { + mimeType = mimeType[:strings.Index(mimeType, "/")] + } + } + + gr := r.newGenericResource(linker, fi, absPublishDir, absSourceFilename, filepath.ToSlash(relTargetFilename), mimeType) + + if mimeType == "image" { + return &Image{ + imaging: r.imaging, + genericResource: gr}, nil + } + return gr, nil +} + +func (r *Spec) IsInCache(key string) bool { + // This is used for cache pruning. We currently only have images, but we could + // imagine expanding on this. + return r.imageCache.isInCache(key) +} + +// genericResource represents a generic linkable resource. +type genericResource struct { + // The relative path to this resource. + rel string + + // Base is set when the output format's path has a offset, e.g. for AMP. + base string + + // Absolute filename to the source, including any content folder path. + absSourceFilename string + absPublishDir string + resourceType string + osFileInfo os.FileInfo + + spec *Spec + link func(rel string) string +} + +func (l *genericResource) Permalink() string { + return l.spec.PermalinkForBaseURL(l.RelPermalink(), l.spec.BaseURL.String()) +} + +func (l *genericResource) RelPermalink() string { + return l.relPermalinkForRel(l.rel) +} + +// Implement the Cloner interface. +func (l genericResource) WithNewBase(base string) Resource { + l.base = base + return &l +} + +func (l *genericResource) relPermalinkForRel(rel string) string { + if l.link != nil { + rel = l.link(rel) + } + + if l.base != "" { + rel = path.Join(l.base, rel) + if rel[0] != '/' { + rel = "/" + rel + } + } + + return l.spec.PathSpec.URLizeFilename(rel) +} + +func (l *genericResource) ResourceType() string { + return l.resourceType +} + +func (l *genericResource) AbsSourceFilename() string { + return l.absSourceFilename +} + +func (l *genericResource) Publish() error { + f, err := l.spec.Fs.Source.Open(l.AbsSourceFilename()) + if err != nil { + return err + } + defer f.Close() + + target := filepath.Join(l.absPublishDir, l.RelPermalink()) + + return helpers.WriteToDisk(target, f, l.spec.Fs.Destination) +} + +func (r *Spec) newGenericResource( + linker func(base string) string, + osFileInfo os.FileInfo, + absPublishDir, + absSourceFilename, + baseFilename, + resourceType string) *genericResource { + + return &genericResource{ + link: linker, + osFileInfo: osFileInfo, + absPublishDir: absPublishDir, + absSourceFilename: absSourceFilename, + rel: baseFilename, + resourceType: resourceType, + spec: r, + } +} diff --git a/resource/resource_test.go b/resource/resource_test.go new file mode 100644 index 00000000000..34d63cd604e --- /dev/null +++ b/resource/resource_test.go @@ -0,0 +1,108 @@ +// Copyright 2017-present The Hugo Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package resource + +import ( + "path" + "path/filepath" + "testing" + + "github.com/stretchr/testify/require" +) + +func TestGenericResource(t *testing.T) { + assert := require.New(t) + spec := newTestResourceSpec(assert) + + r := spec.newGenericResource(nil, nil, "/public", "/a/foo.css", "foo.css", "css") + + assert.Equal("https://example.com/foo.css", r.Permalink()) + assert.Equal("foo.css", r.RelPermalink()) + assert.Equal("css", r.ResourceType()) + +} + +func TestGenericResourceWithLinkFacory(t *testing.T) { + assert := require.New(t) + spec := newTestResourceSpec(assert) + + factory := func(s string) string { + return path.Join("/foo", s) + } + r := spec.newGenericResource(factory, nil, "/public", "/a/foo.css", "foo.css", "css") + + assert.Equal("https://example.com/foo/foo.css", r.Permalink()) + assert.Equal("/foo/foo.css", r.RelPermalink()) + assert.Equal("css", r.ResourceType()) +} + +func TestNewResourceFromFilename(t *testing.T) { + assert := require.New(t) + spec := newTestResourceSpec(assert) + + writeSource(t, spec.Fs, "/project/a/b/logo.png", "image") + writeSource(t, spec.Fs, "/root/a/b/data.json", "json") + + r, err := spec.NewResourceFromFilename(nil, "/public", + filepath.FromSlash("/project/a/b/logo.png"), filepath.FromSlash("a/b/logo.png")) + + assert.NoError(err) + assert.NotNil(r) + assert.Equal("image", r.ResourceType()) + assert.Equal("a/b/logo.png", r.RelPermalink()) + assert.Equal("https://example.com/a/b/logo.png", r.Permalink()) + + r, err = spec.NewResourceFromFilename(nil, "/public", "/root/a/b/data.json", "a/b/data.json") + + assert.NoError(err) + assert.NotNil(r) + assert.Equal("json", r.ResourceType()) + + cloned := r.(Cloner).WithNewBase("aceof") + assert.Equal(r.ResourceType(), cloned.ResourceType()) + assert.Equal("/aceof/a/b/data.json", cloned.RelPermalink()) +} + +func TestResourcesByType(t *testing.T) { + assert := require.New(t) + spec := newTestResourceSpec(assert) + resources := Resources{ + spec.newGenericResource(nil, nil, "/public", "/a/foo1.css", "foo1.css", "css"), + spec.newGenericResource(nil, nil, "/public", "/a/logo.png", "logo.css", "image"), + spec.newGenericResource(nil, nil, "/public", "/a/foo2.css", "foo2.css", "css"), + spec.newGenericResource(nil, nil, "/public", "/a/foo3.css", "foo3.css", "css")} + + assert.Len(resources.ByType("css"), 3) + assert.Len(resources.ByType("image"), 1) + +} + +func TestResourcesGetByPrefix(t *testing.T) { + assert := require.New(t) + spec := newTestResourceSpec(assert) + resources := Resources{ + spec.newGenericResource(nil, nil, "/public", "/a/foo1.css", "foo1.css", "css"), + spec.newGenericResource(nil, nil, "/public", "/a/logo1.png", "logo1.png", "image"), + spec.newGenericResource(nil, nil, "/public", "/b/logo2.png", "logo2.png", "image"), + spec.newGenericResource(nil, nil, "/public", "/b/foo2.css", "foo2.css", "css"), + spec.newGenericResource(nil, nil, "/public", "/b/foo3.css", "foo3.css", "css")} + + assert.Nil(resources.GetByPrefix("asdf")) + assert.Equal("logo1.png", resources.GetByPrefix("logo").RelPermalink()) + assert.Equal("foo2.css", resources.GetByPrefix("foo2").RelPermalink()) + assert.Equal("foo1.css", resources.GetByPrefix("foo1").RelPermalink()) + assert.Equal("foo1.css", resources.GetByPrefix("foo1").RelPermalink()) + assert.Nil(resources.GetByPrefix("asdfasdf")) + +} diff --git a/resource/testdata/sunset.jpg b/resource/testdata/sunset.jpg new file mode 100644 index 00000000000..7d7307bed36 Binary files /dev/null and b/resource/testdata/sunset.jpg differ diff --git a/resource/testhelpers_test.go b/resource/testhelpers_test.go new file mode 100644 index 00000000000..7ab2106889e --- /dev/null +++ b/resource/testhelpers_test.go @@ -0,0 +1,78 @@ +package resource + +import ( + "path/filepath" + "testing" + + "image" + "io" + "os" + "path" + + "github.com/gohugoio/hugo/helpers" + "github.com/gohugoio/hugo/hugofs" + "github.com/gohugoio/hugo/media" + "github.com/spf13/afero" + "github.com/spf13/viper" + "github.com/stretchr/testify/require" +) + +func newTestResourceSpec(assert *require.Assertions) *Spec { + cfg := viper.New() + cfg.Set("baseURL", "https://example.com/") + cfg.Set("resourceDir", "/res") + fs := hugofs.NewMem(cfg) + + s, err := helpers.NewPathSpec(fs, cfg) + assert.NoError(err) + + spec, err := NewSpec(s, media.DefaultTypes) + assert.NoError(err) + return spec +} + +func fetchSunset(assert *require.Assertions) *Image { + src, err := os.Open("testdata/sunset.jpg") + assert.NoError(err) + + spec := newTestResourceSpec(assert) + + out, err := spec.Fs.Source.Create("/b/sunset.jpg") + assert.NoError(err) + _, err = io.Copy(out, src) + out.Close() + src.Close() + assert.NoError(err) + + factory := func(s string) string { + return path.Join("/a", s) + } + + r, err := spec.NewResourceFromFilename(factory, "/public", "/b/sunset.jpg", "sunset.jpg") + assert.NoError(err) + assert.IsType(&Image{}, r) + return r.(*Image) + +} + +func assertFileCache(assert *require.Assertions, fs *hugofs.Fs, filename string, width, height int) { + f, err := fs.Source.Open(filepath.Join("/res/_gen/images", filename)) + assert.NoError(err) + defer f.Close() + + config, _, err := image.DecodeConfig(f) + assert.NoError(err) + + assert.Equal(width, config.Width) + assert.Equal(height, config.Height) +} + +func writeSource(t testing.TB, fs *hugofs.Fs, filename, content string) { + writeToFs(t, fs.Source, filename, content) +} + +func writeToFs(t testing.TB, fs afero.Fs, filename, content string) { + if err := afero.WriteFile(fs, filepath.FromSlash(filename), []byte(content), 0755); err != nil { + t.Fatalf("Failed to write file: %s", err) + } +} diff --git a/source/content_directory_test.go b/source/content_directory_test.go index 4ff12af8dd4..9874acec2bb 100644 --- a/source/content_directory_test.go +++ b/source/content_directory_test.go @@ -14,6 +14,7 @@ package source import ( + "path/filepath" "testing" "github.com/gohugoio/hugo/hugofs" @@ -41,21 +42,21 @@ func TestIgnoreDotFilesAndDirectories(t *testing.T) { {"foobar/bar~foo.md", false, nil}, {"foobar/foo.md", true, []string{"\\.md$", "\\.boo$"}}, {"foobar/foo.html", false, []string{"\\.md$", "\\.boo$"}}, - {"foobar/foo.md", true, []string{"^foo"}}, - {"foobar/foo.md", false, []string{"*", "\\.md$", "\\.boo$"}}, + {"foobar/foo.md", true, []string{"foo.md$"}}, + {"foobar/foo.md", true, []string{"*", "\\.md$", "\\.boo$"}}, {"foobar/.#content.md", true, []string{"/\\.#"}}, {".#foobar.md", true, []string{"^\\.#"}}, } - for _, test := range tests { + for i, test := range tests { v := viper.New() v.Set("ignoreFiles", test.ignoreFilesRegexpes) s := NewSourceSpec(v, hugofs.NewMem(v)) - if ignored := s.isNonProcessablePath(test.path); test.ignore != ignored { - t.Errorf("File not ignored. Expected: %t, got: %t", test.ignore, ignored) + if ignored := s.IgnoreFile(filepath.FromSlash(test.path)); test.ignore != ignored { + t.Errorf("[%d] File not ignored", i) } } } diff --git a/source/dirs.go b/source/dirs.go index 1e6850da7bd..2c784a98a73 100644 --- a/source/dirs.go +++ b/source/dirs.go @@ -39,6 +39,8 @@ type Dirs struct { AbsStaticDirs []string publishDir string + + Language *helpers.Language } // NewDirs creates a new dirs with the given configuration and filesystem. @@ -48,7 +50,12 @@ func NewDirs(fs *hugofs.Fs, cfg config.Provider, logger *jww.Notepad) (*Dirs, er return nil, err } - d := &Dirs{pathSpec: ps, logger: logger} + var l *helpers.Language + if language, ok := cfg.(*helpers.Language); ok { + l = language + } + + d := &Dirs{Language: l, pathSpec: ps, logger: logger} return d, d.init(cfg) diff --git a/source/file.go b/source/file.go deleted file mode 100644 index a630431c6a5..00000000000 --- a/source/file.go +++ /dev/null @@ -1,172 +0,0 @@ -// Copyright 2015 The Hugo Authors. All rights reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package source - -import ( - "io" - "path/filepath" - "strings" - - "github.com/gohugoio/hugo/hugofs" - - "github.com/gohugoio/hugo/config" - "github.com/gohugoio/hugo/helpers" -) - -// SourceSpec abstracts language-specific file creation. -type SourceSpec struct { - Cfg config.Provider - Fs *hugofs.Fs - - languages map[string]interface{} - defaultContentLanguage string -} - -// NewSourceSpec initializes SourceSpec using languages from a given configuration. -func NewSourceSpec(cfg config.Provider, fs *hugofs.Fs) SourceSpec { - defaultLang := cfg.GetString("defaultContentLanguage") - languages := cfg.GetStringMap("languages") - return SourceSpec{Cfg: cfg, Fs: fs, languages: languages, defaultContentLanguage: defaultLang} -} - -// File represents a source content file. -// All paths are relative from the source directory base -type File struct { - relpath string // Original relative path, e.g. section/foo.txt - logicalName string // foo.txt - baseName string // `post` for `post.md`, also `post.en` for `post.en.md` - Contents io.Reader - section string // The first directory - dir string // The relative directory Path (minus file name) - ext string // Just the ext (eg txt) - uniqueID string // MD5 of the file's path - - translationBaseName string // `post` for `post.es.md` (if `Multilingual` is enabled.) - lang string // The language code if `Multilingual` is enabled -} - -// UniqueID is the MD5 hash of the file's path and is for most practical applications, -// Hugo content files being one of them, considered to be unique. -func (f *File) UniqueID() string { - return f.uniqueID -} - -// String returns the file's content as a string. -func (f *File) String() string { - return helpers.ReaderToString(f.Contents) -} - -// Bytes returns the file's content as a byte slice. -func (f *File) Bytes() []byte { - return helpers.ReaderToBytes(f.Contents) -} - -// BaseFileName is a filename without extension. -func (f *File) BaseFileName() string { - return f.baseName -} - -// TranslationBaseName is a filename with no extension, -// not even the optional language extension part. -func (f *File) TranslationBaseName() string { - return f.translationBaseName -} - -// Lang for this page, if `Multilingual` is enabled on your site. -func (f *File) Lang() string { - return f.lang -} - -// Section is first directory below the content root. -func (f *File) Section() string { - return f.section -} - -// LogicalName is filename and extension of the file. -func (f *File) LogicalName() string { - return f.logicalName -} - -// SetDir sets the relative directory where this file lives. -// TODO(bep) Get rid of this. -func (f *File) SetDir(dir string) { - f.dir = dir -} - -// Dir gets the name of the directory that contains this file. -// The directory is relative to the content root. -func (f *File) Dir() string { - return f.dir -} - -// Extension gets the file extension, i.e "myblogpost.md" will return "md". -func (f *File) Extension() string { - return f.ext -} - -// Ext is an alias for Extension. -func (f *File) Ext() string { - return f.Extension() -} - -// Path gets the relative path including file name and extension. -// The directory is relative to the content root. -func (f *File) Path() string { - return f.relpath -} - -// NewFileWithContents creates a new File pointer with the given relative path and -// content. The language defaults to "en". -func (sp SourceSpec) NewFileWithContents(relpath string, content io.Reader) *File { - file := sp.NewFile(relpath) - file.Contents = content - file.lang = "en" - return file -} - -// NewFile creates a new File pointer with the given relative path. -func (sp SourceSpec) NewFile(relpath string) *File { - f := &File{ - relpath: relpath, - } - - f.dir, f.logicalName = filepath.Split(f.relpath) - f.ext = strings.TrimPrefix(filepath.Ext(f.LogicalName()), ".") - f.baseName = helpers.Filename(f.LogicalName()) - - lang := strings.TrimPrefix(filepath.Ext(f.baseName), ".") - if _, ok := sp.languages[lang]; lang == "" || !ok { - f.lang = sp.defaultContentLanguage - f.translationBaseName = f.baseName - } else { - f.lang = lang - f.translationBaseName = helpers.Filename(f.baseName) - } - - f.section = helpers.GuessSection(f.Dir()) - f.uniqueID = helpers.Md5String(filepath.ToSlash(f.relpath)) - - return f -} - -// NewFileFromAbs creates a new File pointer with the given full file path path and -// content. -func (sp SourceSpec) NewFileFromAbs(base, fullpath string, content io.Reader) (f *File, err error) { - var name string - if name, err = helpers.GetRelativePath(fullpath, base); err != nil { - return nil, err - } - - return sp.NewFileWithContents(name, content), nil -} diff --git a/source/fileInfo.go b/source/fileInfo.go new file mode 100644 index 00000000000..e4b4a80fbd0 --- /dev/null +++ b/source/fileInfo.go @@ -0,0 +1,213 @@ +// Copyright 2017-present The Hugo Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package source + +import ( + "io" + "os" + "path/filepath" + "strings" + "sync" + + "github.com/gohugoio/hugo/helpers" +) + +// fileInfo implements the File interface. +var ( + _ File = (*FileInfo)(nil) + _ ReadableFile = (*FileInfo)(nil) +) + +type File interface { + + // Filename gets the full path and filename to the file. + Filename() string + + // Path gets the relative path including file name and extension. + // The directory is relative to the content root. + Path() string + + // Dir gets the name of the directory that contains this file. + // The directory is relative to the content root. + Dir() string + + // Extension gets the file extension, i.e "myblogpost.md" will return "md". + Extension() string + // Ext is an alias for Extension. + Ext() string // Hmm... Deprecate Extension + + // Lang for this page, if `Multilingual` is enabled on your site. + Lang() string + + // LogicalName is filename and extension of the file. + LogicalName() string + + // Section is first directory below the content root. + Section() string + + // BaseFileName is a filename without extension. + BaseFileName() string + + // TranslationBaseName is a filename with no extension, + // not even the optional language extension part. + TranslationBaseName() string + + // UniqueID is the MD5 hash of the file's path and is for most practical applications, + // Hugo content files being one of them, considered to be unique. + UniqueID() string + + FileInfo() os.FileInfo + + String() string + + // Deprecated + Bytes() []byte +} + +// A ReadableFile is a File that is readable. +type ReadableFile interface { + File + Open() (io.ReadCloser, error) +} + +type FileInfo struct { + + // Absolute filename to the file on disk. + filename string + fi os.FileInfo + + // Derived from filename + ext string // Extension without any "." + lang string + + name string + + dir string + relDir string + relPath string + baseName string + translationBaseName string + section string + + uniqueID string + + sp *SourceSpec + + lazyInit sync.Once +} + +func (fi *FileInfo) Filename() string { return fi.filename } +func (fi *FileInfo) Path() string { return fi.relPath } +func (fi *FileInfo) Dir() string { return fi.relDir } +func (fi *FileInfo) Extension() string { return fi.Ext() } +func (fi *FileInfo) Ext() string { return fi.ext } +func (fi *FileInfo) Lang() string { return fi.lang } +func (fi *FileInfo) LogicalName() string { return fi.name } +func (fi *FileInfo) BaseFileName() string { return fi.baseName } +func (fi *FileInfo) TranslationBaseName() string { return fi.translationBaseName } + +func (fi *FileInfo) Section() string { + fi.init() + return fi.section +} + +func (fi *FileInfo) UniqueID() string { + fi.init() + return fi.uniqueID +} +func (fi *FileInfo) FileInfo() os.FileInfo { + return fi.fi +} + +func (fi *FileInfo) Bytes() []byte { + // Remove in Hugo 0.34 + helpers.Deprecated("File", "Bytes", "", false) + return []byte("") +} + +func (fi *FileInfo) String() string { return fi.BaseFileName() } + +// We create a lot of these FileInfo objects, but there are parts of it used only +// in some cases that is slightly expensive to construct. +func (fi *FileInfo) init() { + fi.lazyInit.Do(func() { + parts := strings.Split(fi.relDir, helpers.FilePathSeparator) + var section string + if len(parts) == 1 { + section = parts[0] + } else if len(parts) > 1 { + if parts[0] == "" { + section = parts[1] + } else { + section = parts[0] + } + } + + fi.section = section + + fi.uniqueID = helpers.MD5String(filepath.ToSlash(fi.relPath)) + + }) +} + +func (sp *SourceSpec) NewFileInfo(baseDir, filename string, fi os.FileInfo) *FileInfo { + dir, name := filepath.Split(filename) + + dir = strings.TrimSuffix(dir, helpers.FilePathSeparator) + baseDir = strings.TrimSuffix(baseDir, helpers.FilePathSeparator) + + relDir := "" + if dir != baseDir { + relDir = strings.TrimPrefix(dir, baseDir) + } + + relDir = strings.TrimPrefix(relDir, helpers.FilePathSeparator) + + relPath := filepath.Join(relDir, name) + + ext := strings.ToLower(strings.TrimPrefix(filepath.Ext(name), ".")) + baseName := helpers.Filename(name) + + lang := strings.TrimPrefix(filepath.Ext(baseName), ".") + var translationBaseName string + + if _, ok := sp.Languages[lang]; lang == "" || !ok { + lang = sp.DefaultContentLanguage + translationBaseName = baseName + } else { + translationBaseName = helpers.Filename(baseName) + } + + f := &FileInfo{ + sp: sp, + filename: filename, + fi: fi, + lang: lang, + ext: ext, + dir: dir, + relDir: relDir, + relPath: relPath, + name: name, + baseName: baseName, + translationBaseName: translationBaseName, + } + + return f + +} + +// Open implements ReadableFile. +func (fi *FileInfo) Open() (io.ReadCloser, error) { + return fi.sp.Fs.Source.Open(fi.Filename()) +} diff --git a/source/inmemory.go b/source/fileInfo_test.go similarity index 65% rename from source/inmemory.go rename to source/fileInfo_test.go index 387bde3b890..3f99497ad36 100644 --- a/source/inmemory.go +++ b/source/fileInfo_test.go @@ -1,4 +1,4 @@ -// Copyright 2015 The Hugo Authors. All rights reserved. +// Copyright 2017-present The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -13,13 +13,10 @@ package source -// ByteSource represents a source's name and content. -// It's currently only used for testing purposes. -type ByteSource struct { - Name string - Content []byte -} +import ( + "testing" +) + +func TestFileInfo(t *testing.T) { -func (b *ByteSource) String() string { - return b.Name + " " + string(b.Content) } diff --git a/source/file_test.go b/source/file_test.go deleted file mode 100644 index 64ad6fb46fb..00000000000 --- a/source/file_test.go +++ /dev/null @@ -1,62 +0,0 @@ -// Copyright 2015 The Hugo Authors. All rights reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package source - -import ( - "path/filepath" - "strings" - "testing" - - "github.com/gohugoio/hugo/hugofs" - "github.com/spf13/viper" - - "github.com/stretchr/testify/assert" -) - -func TestFileUniqueID(t *testing.T) { - ss := newTestSourceSpec() - - f1 := File{uniqueID: "123"} - f2 := ss.NewFile("a") - - assert.Equal(t, "123", f1.UniqueID()) - assert.Equal(t, "0cc175b9c0f1b6a831c399e269772661", f2.UniqueID()) - - f3 := ss.NewFile(filepath.FromSlash("test1/index.md")) - f4 := ss.NewFile(filepath.FromSlash("test2/index.md")) - - assert.NotEqual(t, f3.UniqueID(), f4.UniqueID()) - - f5l := ss.NewFile("test3/index.md") - f5w := ss.NewFile(filepath.FromSlash("test3/index.md")) - - assert.Equal(t, f5l.UniqueID(), f5w.UniqueID()) -} - -func TestFileString(t *testing.T) { - ss := newTestSourceSpec() - assert.Equal(t, "abc", ss.NewFileWithContents("a", strings.NewReader("abc")).String()) - assert.Equal(t, "", ss.NewFile("a").String()) -} - -func TestFileBytes(t *testing.T) { - ss := newTestSourceSpec() - assert.Equal(t, []byte("abc"), ss.NewFileWithContents("a", strings.NewReader("abc")).Bytes()) - assert.Equal(t, []byte(""), ss.NewFile("a").Bytes()) -} - -func newTestSourceSpec() SourceSpec { - v := viper.New() - return SourceSpec{Fs: hugofs.NewMem(v), Cfg: v} -} diff --git a/source/filesystem.go b/source/filesystem.go index e6e354e9971..a5f2988e906 100644 --- a/source/filesystem.go +++ b/source/filesystem.go @@ -14,73 +14,52 @@ package source import ( - "io" "os" "path/filepath" - "regexp" "runtime" - "strings" + "sync" "github.com/gohugoio/hugo/helpers" - "github.com/spf13/cast" jww "github.com/spf13/jwalterweatherman" "golang.org/x/text/unicode/norm" ) -type Input interface { - Files() []*File -} - type Filesystem struct { - files []*File - Base string - AvoidPaths []string + files []ReadableFile + filesInit sync.Once + + Base string SourceSpec } -func (sp SourceSpec) NewFilesystem(base string, avoidPaths ...string) *Filesystem { - return &Filesystem{SourceSpec: sp, Base: base, AvoidPaths: avoidPaths} +type Input interface { + Files() []ReadableFile } -func (f *Filesystem) FilesByExts(exts ...string) []*File { - var newFiles []*File - - if len(exts) == 0 { - return f.Files() - } - - for _, x := range f.Files() { - for _, e := range exts { - if x.Ext() == strings.TrimPrefix(e, ".") { - newFiles = append(newFiles, x) - } - } - } - return newFiles +func (sp SourceSpec) NewFilesystem(base string) *Filesystem { + return &Filesystem{SourceSpec: sp, Base: base} } -func (f *Filesystem) Files() []*File { - if len(f.files) < 1 { +func (f *Filesystem) Files() []ReadableFile { + f.filesInit.Do(func() { f.captureFiles() - } + }) return f.files } // add populates a file in the Filesystem.files -func (f *Filesystem) add(name string, reader io.Reader) (err error) { - var file *File +func (f *Filesystem) add(name string, fi os.FileInfo) (err error) { + var file ReadableFile if runtime.GOOS == "darwin" { // When a file system is HFS+, its filepath is in NFD form. name = norm.NFC.String(name) } - file, err = f.SourceSpec.NewFileFromAbs(f.Base, name, reader) + file = f.SourceSpec.NewFileInfo(f.Base, name, fi) + f.files = append(f.files, file) - if err == nil { - f.files = append(f.files, file) - } return err } @@ -90,16 +69,12 @@ func (f *Filesystem) captureFiles() { return nil } - b, err := f.ShouldRead(filePath, fi) + b, err := f.shouldRead(filePath, fi) if err != nil { return err } if b { - rd, err := NewLazyFileReader(f.Fs.Source, filePath) - if err != nil { - return err - } - f.add(filePath, rd) + f.add(filePath, fi) } return err } @@ -118,11 +93,11 @@ func (f *Filesystem) captureFiles() { } -func (f *Filesystem) ShouldRead(filePath string, fi os.FileInfo) (bool, error) { +func (f *Filesystem) shouldRead(filename string, fi os.FileInfo) (bool, error) { if fi.Mode()&os.ModeSymlink == os.ModeSymlink { - link, err := filepath.EvalSymlinks(filePath) + link, err := filepath.EvalSymlinks(filename) if err != nil { - jww.ERROR.Printf("Cannot read symbolic link '%s', error was: %s", filePath, err) + jww.ERROR.Printf("Cannot read symbolic link '%s', error was: %s", filename, err) return false, nil } linkfi, err := f.Fs.Source.Stat(link) @@ -130,52 +105,25 @@ func (f *Filesystem) ShouldRead(filePath string, fi os.FileInfo) (bool, error) { jww.ERROR.Printf("Cannot stat '%s', error was: %s", link, err) return false, nil } + if !linkfi.Mode().IsRegular() { - jww.ERROR.Printf("Symbolic links for directories not supported, skipping '%s'", filePath) + jww.ERROR.Printf("Symbolic links for directories not supported, skipping '%s'", filename) } return false, nil } + ignore := f.SourceSpec.IgnoreFile(filename) + if fi.IsDir() { - if f.avoid(filePath) || f.isNonProcessablePath(filePath) { + if ignore { return false, filepath.SkipDir } return false, nil } - if f.isNonProcessablePath(filePath) { + if ignore { return false, nil } - return true, nil -} - -func (f *Filesystem) avoid(filePath string) bool { - for _, avoid := range f.AvoidPaths { - if avoid == filePath { - return true - } - } - return false -} -func (sp SourceSpec) isNonProcessablePath(filePath string) bool { - base := filepath.Base(filePath) - if strings.HasPrefix(base, ".") || - strings.HasPrefix(base, "#") || - strings.HasSuffix(base, "~") { - return true - } - ignoreFiles := cast.ToStringSlice(sp.Cfg.Get("ignoreFiles")) - if len(ignoreFiles) > 0 { - for _, ignorePattern := range ignoreFiles { - match, err := regexp.MatchString(ignorePattern, filePath) - if err != nil { - helpers.DistinctErrorLog.Printf("Invalid regexp '%s' in ignoreFiles: %s", ignorePattern, err) - return false - } else if match { - return true - } - } - } - return false + return true, nil } diff --git a/source/filesystem_test.go b/source/filesystem_test.go index 90512ce3f1f..25ce0268f2c 100644 --- a/source/filesystem_test.go +++ b/source/filesystem_test.go @@ -14,11 +14,13 @@ package source import ( - "bytes" - "path/filepath" + "os" "runtime" - "strings" "testing" + + "github.com/gohugoio/hugo/hugofs" + + "github.com/spf13/viper" ) func TestEmptySourceFilesystem(t *testing.T) { @@ -37,54 +39,6 @@ type TestPath struct { dir string } -func TestAddFile(t *testing.T) { - ss := newTestSourceSpec() - tests := platformPaths - for _, test := range tests { - base := platformBase - srcDefault := ss.NewFilesystem("") - srcWithBase := ss.NewFilesystem(base) - - for _, src := range []*Filesystem{srcDefault, srcWithBase} { - - p := test.filename - if !filepath.IsAbs(test.filename) { - p = filepath.Join(src.Base, test.filename) - } - - if err := src.add(p, bytes.NewReader([]byte(test.content))); err != nil { - if err.Error() == "source: missing base directory" { - continue - } - t.Fatalf("%s add returned an error: %s", p, err) - } - - if len(src.Files()) != 1 { - t.Fatalf("%s Files() should return 1 file", p) - } - - f := src.Files()[0] - if f.LogicalName() != test.logical { - t.Errorf("Filename (Base: %q) expected: %q, got: %q", src.Base, test.logical, f.LogicalName()) - } - - b := new(bytes.Buffer) - b.ReadFrom(f.Contents) - if b.String() != test.content { - t.Errorf("File (Base: %q) contents should be %q, got: %q", src.Base, test.content, b.String()) - } - - if f.Section() != test.section { - t.Errorf("File section (Base: %q) expected: %q, got: %q", src.Base, test.section, f.Section()) - } - - if f.Dir() != test.dir { - t.Errorf("Dir path (Base: %q) expected: %q, got: %q", src.Base, test.dir, f.Dir()) - } - } - } -} - func TestUnicodeNorm(t *testing.T) { if runtime.GOOS != "darwin" { // Normalization code is only for Mac OS, since it is not necessary for other OSes. @@ -100,10 +54,11 @@ func TestUnicodeNorm(t *testing.T) { } ss := newTestSourceSpec() + var fi os.FileInfo for _, path := range paths { - src := ss.NewFilesystem("") - _ = src.add(path.NFD, strings.NewReader("")) + src := ss.NewFilesystem("base") + _ = src.add(path.NFD, fi) f := src.Files()[0] if f.BaseFileName() != path.NFC { t.Fatalf("file name in NFD form should be normalized (%s)", path.NFC) @@ -111,3 +66,8 @@ func TestUnicodeNorm(t *testing.T) { } } + +func newTestSourceSpec() SourceSpec { + v := viper.New() + return SourceSpec{Fs: hugofs.NewMem(v), Cfg: v} +} diff --git a/source/lazy_file_reader.go b/source/lazy_file_reader.go deleted file mode 100644 index 7cc484f0b5e..00000000000 --- a/source/lazy_file_reader.go +++ /dev/null @@ -1,170 +0,0 @@ -// Copyright 2015 The Hugo Authors. All rights reserved. -// Portions Copyright 2009 The Go Authors. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package source - -import ( - "bytes" - "errors" - "fmt" - "io" - - "github.com/spf13/afero" -) - -// LazyFileReader is an io.Reader implementation to postpone reading the file -// contents until it is really needed. It keeps filename and file contents once -// it is read. -type LazyFileReader struct { - fs afero.Fs - filename string - contents *bytes.Reader - pos int64 -} - -// NewLazyFileReader creates and initializes a new LazyFileReader of filename. -// It checks whether the file can be opened. If it fails, it returns nil and an -// error. -func NewLazyFileReader(fs afero.Fs, filename string) (*LazyFileReader, error) { - f, err := fs.Open(filename) - if err != nil { - return nil, err - } - defer f.Close() - return &LazyFileReader{fs: fs, filename: filename, contents: nil, pos: 0}, nil -} - -// Filename returns a file name which LazyFileReader keeps -func (l *LazyFileReader) Filename() string { - return l.filename -} - -// Read reads up to len(p) bytes from the LazyFileReader's file and copies them -// into p. It returns the number of bytes read and any error encountered. If -// the file is once read, it returns its contents from cache, doesn't re-read -// the file. -func (l *LazyFileReader) Read(p []byte) (n int, err error) { - if l.contents == nil { - b, err := afero.ReadFile(l.fs, l.filename) - if err != nil { - return 0, fmt.Errorf("failed to read content from %s: %s", l.filename, err.Error()) - } - l.contents = bytes.NewReader(b) - } - if _, err = l.contents.Seek(l.pos, 0); err != nil { - return 0, errors.New("failed to set read position: " + err.Error()) - } - n, err = l.contents.Read(p) - l.pos += int64(n) - return n, err -} - -// Seek implements the io.Seeker interface. Once reader contents is consumed by -// Read, WriteTo etc, to read it again, it must be rewinded by this function -func (l *LazyFileReader) Seek(offset int64, whence int) (pos int64, err error) { - if l.contents == nil { - switch whence { - case 0: - pos = offset - case 1: - pos = l.pos + offset - case 2: - fi, err := l.fs.Stat(l.filename) - if err != nil { - return 0, fmt.Errorf("failed to get %q info: %s", l.filename, err.Error()) - } - pos = fi.Size() + offset - default: - return 0, errors.New("invalid whence") - } - if pos < 0 { - return 0, errors.New("negative position") - } - } else { - pos, err = l.contents.Seek(offset, whence) - if err != nil { - return 0, err - } - } - l.pos = pos - return pos, nil -} - -// WriteTo writes data to w until all the LazyFileReader's file contents is -// drained or an error occurs. If the file is once read, it just writes its -// read cache to w, doesn't re-read the file but this method itself doesn't try -// to keep the contents in cache. -func (l *LazyFileReader) WriteTo(w io.Writer) (n int64, err error) { - if l.contents != nil { - l.contents.Seek(l.pos, 0) - if err != nil { - return 0, errors.New("failed to set read position: " + err.Error()) - } - n, err = l.contents.WriteTo(w) - l.pos += n - return n, err - } - f, err := l.fs.Open(l.filename) - if err != nil { - return 0, fmt.Errorf("failed to open %s to read content: %s", l.filename, err.Error()) - } - defer f.Close() - - fi, err := f.Stat() - if err != nil { - return 0, fmt.Errorf("failed to get %q info: %s", l.filename, err.Error()) - } - - if l.pos >= fi.Size() { - return 0, nil - } - - return l.copyBuffer(w, f, nil) -} - -// copyBuffer is the actual implementation of Copy and CopyBuffer. -// If buf is nil, one is allocated. -// -// Most of this function is copied from the Go stdlib 'io/io.go'. -func (l *LazyFileReader) copyBuffer(dst io.Writer, src io.Reader, buf []byte) (written int64, err error) { - if buf == nil { - buf = make([]byte, 32*1024) - } - for { - nr, er := src.Read(buf) - if nr > 0 { - nw, ew := dst.Write(buf[0:nr]) - if nw > 0 { - l.pos += int64(nw) - written += int64(nw) - } - if ew != nil { - err = ew - break - } - if nr != nw { - err = io.ErrShortWrite - break - } - } - if er == io.EOF { - break - } - if er != nil { - err = er - break - } - } - return written, err -} diff --git a/source/lazy_file_reader_test.go b/source/lazy_file_reader_test.go deleted file mode 100644 index 778a9513be9..00000000000 --- a/source/lazy_file_reader_test.go +++ /dev/null @@ -1,236 +0,0 @@ -// Copyright 2015 The Hugo Authors. All rights reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package source - -import ( - "bytes" - "io" - "os" - "testing" - - "github.com/spf13/afero" -) - -func TestNewLazyFileReader(t *testing.T) { - fs := afero.NewOsFs() - filename := "itdoesnotexistfile" - _, err := NewLazyFileReader(fs, filename) - if err == nil { - t.Errorf("NewLazyFileReader %s: error expected but no error is returned", filename) - } - - filename = "lazy_file_reader_test.go" - _, err = NewLazyFileReader(fs, filename) - if err != nil { - t.Errorf("NewLazyFileReader %s: %v", filename, err) - } -} - -func TestFilename(t *testing.T) { - fs := afero.NewOsFs() - filename := "lazy_file_reader_test.go" - rd, err := NewLazyFileReader(fs, filename) - if err != nil { - t.Fatalf("NewLazyFileReader %s: %v", filename, err) - } - if rd.Filename() != filename { - t.Errorf("Filename: expected filename %q, got %q", filename, rd.Filename()) - } -} - -func TestRead(t *testing.T) { - fs := afero.NewOsFs() - filename := "lazy_file_reader_test.go" - fi, err := fs.Stat(filename) - if err != nil { - t.Fatalf("os.Stat: %v", err) - } - - b, err := afero.ReadFile(fs, filename) - if err != nil { - t.Fatalf("afero.ReadFile: %v", err) - } - - rd, err := NewLazyFileReader(fs, filename) - if err != nil { - t.Fatalf("NewLazyFileReader %s: %v", filename, err) - } - - tst := func(testcase string) { - p := make([]byte, fi.Size()) - n, err := rd.Read(p) - if err != nil { - t.Fatalf("Read %s case: %v", testcase, err) - } - if int64(n) != fi.Size() { - t.Errorf("Read %s case: read bytes length expected %d, got %d", testcase, fi.Size(), n) - } - if !bytes.Equal(b, p) { - t.Errorf("Read %s case: read bytes are different from expected", testcase) - } - } - tst("No cache") - _, err = rd.Seek(0, 0) - if err != nil { - t.Fatalf("Seek: %v", err) - } - tst("Cache") -} - -func TestSeek(t *testing.T) { - type testcase struct { - seek int - offset int64 - length int - moveto int64 - expected []byte - } - fs := afero.NewOsFs() - filename := "lazy_file_reader_test.go" - b, err := afero.ReadFile(fs, filename) - if err != nil { - t.Fatalf("afero.ReadFile: %v", err) - } - - // no cache case - for i, this := range []testcase{ - {seek: os.SEEK_SET, offset: 0, length: 10, moveto: 0, expected: b[:10]}, - {seek: os.SEEK_SET, offset: 5, length: 10, moveto: 5, expected: b[5:15]}, - {seek: os.SEEK_CUR, offset: 5, length: 10, moveto: 5, expected: b[5:15]}, // current pos = 0 - {seek: os.SEEK_END, offset: -1, length: 1, moveto: int64(len(b) - 1), expected: b[len(b)-1:]}, - {seek: 3, expected: nil}, - {seek: os.SEEK_SET, offset: -1, expected: nil}, - } { - rd, err := NewLazyFileReader(fs, filename) - if err != nil { - t.Errorf("[%d] NewLazyFileReader %s: %v", i, filename, err) - continue - } - - pos, err := rd.Seek(this.offset, this.seek) - if this.expected == nil { - if err == nil { - t.Errorf("[%d] Seek didn't return an expected error", i) - } - } else { - if err != nil { - t.Errorf("[%d] Seek failed unexpectedly: %v", i, err) - continue - } - if pos != this.moveto { - t.Errorf("[%d] Seek failed to move the pointer: got %d, expected: %d", i, pos, this.moveto) - } - - buf := make([]byte, this.length) - n, err := rd.Read(buf) - if err != nil { - t.Errorf("[%d] Read failed unexpectedly: %v", i, err) - } - if !bytes.Equal(this.expected, buf[:n]) { - t.Errorf("[%d] Seek and Read got %q but expected %q", i, buf[:n], this.expected) - } - } - } - - // cache case - rd, err := NewLazyFileReader(fs, filename) - if err != nil { - t.Fatalf("NewLazyFileReader %s: %v", filename, err) - } - dummy := make([]byte, len(b)) - _, err = rd.Read(dummy) - if err != nil { - t.Fatalf("Read failed unexpectedly: %v", err) - } - - for i, this := range []testcase{ - {seek: os.SEEK_SET, offset: 0, length: 10, moveto: 0, expected: b[:10]}, - {seek: os.SEEK_SET, offset: 5, length: 10, moveto: 5, expected: b[5:15]}, - {seek: os.SEEK_CUR, offset: 1, length: 10, moveto: 16, expected: b[16:26]}, // current pos = 15 - {seek: os.SEEK_END, offset: -1, length: 1, moveto: int64(len(b) - 1), expected: b[len(b)-1:]}, - {seek: 3, expected: nil}, - {seek: os.SEEK_SET, offset: -1, expected: nil}, - } { - pos, err := rd.Seek(this.offset, this.seek) - if this.expected == nil { - if err == nil { - t.Errorf("[%d] Seek didn't return an expected error", i) - } - } else { - if err != nil { - t.Errorf("[%d] Seek failed unexpectedly: %v", i, err) - continue - } - if pos != this.moveto { - t.Errorf("[%d] Seek failed to move the pointer: got %d, expected: %d", i, pos, this.moveto) - } - - buf := make([]byte, this.length) - n, err := rd.Read(buf) - if err != nil { - t.Errorf("[%d] Read failed unexpectedly: %v", i, err) - } - if !bytes.Equal(this.expected, buf[:n]) { - t.Errorf("[%d] Seek and Read got %q but expected %q", i, buf[:n], this.expected) - } - } - } -} - -func TestWriteTo(t *testing.T) { - fs := afero.NewOsFs() - filename := "lazy_file_reader_test.go" - fi, err := fs.Stat(filename) - if err != nil { - t.Fatalf("os.Stat: %v", err) - } - - b, err := afero.ReadFile(fs, filename) - if err != nil { - t.Fatalf("afero.ReadFile: %v", err) - } - - rd, err := NewLazyFileReader(fs, filename) - if err != nil { - t.Fatalf("NewLazyFileReader %s: %v", filename, err) - } - - tst := func(testcase string, expectedSize int64, checkEqual bool) { - buf := bytes.NewBuffer(make([]byte, 0, bytes.MinRead)) - n, err := rd.WriteTo(buf) - if err != nil { - t.Fatalf("WriteTo %s case: %v", testcase, err) - } - if n != expectedSize { - t.Errorf("WriteTo %s case: written bytes length expected %d, got %d", testcase, expectedSize, n) - } - if checkEqual && !bytes.Equal(b, buf.Bytes()) { - t.Errorf("WriteTo %s case: written bytes are different from expected", testcase) - } - } - tst("No cache", fi.Size(), true) - tst("No cache 2nd", 0, false) - - p := make([]byte, fi.Size()) - _, err = rd.Read(p) - if err != nil && err != io.EOF { - t.Fatalf("Read: %v", err) - } - _, err = rd.Seek(0, 0) - if err != nil { - t.Fatalf("Seek: %v", err) - } - - tst("Cache", fi.Size(), true) -} diff --git a/source/sourceSpec.go b/source/sourceSpec.go new file mode 100644 index 00000000000..6d21a197b2f --- /dev/null +++ b/source/sourceSpec.go @@ -0,0 +1,91 @@ +// Copyright 2017-present The Hugo Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package source + +import ( + "path/filepath" + "regexp" + + "github.com/gohugoio/hugo/config" + "github.com/gohugoio/hugo/helpers" + "github.com/gohugoio/hugo/hugofs" + "github.com/spf13/cast" +) + +// SourceSpec abstracts language-specific file creation. +// TODO(bep) rename to Spec +type SourceSpec struct { + Cfg config.Provider + Fs *hugofs.Fs + + // This is set if the ignoreFiles config is set. + ignoreFilesRe []*regexp.Regexp + + Languages map[string]interface{} + DefaultContentLanguage string +} + +// NewSourceSpec initializes SourceSpec using languages from a given configuration. +func NewSourceSpec(cfg config.Provider, fs *hugofs.Fs) *SourceSpec { + defaultLang := cfg.GetString("defaultContentLanguage") + languages := cfg.GetStringMap("languages") + + if len(languages) == 0 { + l := helpers.NewDefaultLanguage(cfg) + languages[l.Lang] = l + defaultLang = l.Lang + } + + ignoreFiles := cast.ToStringSlice(cfg.Get("ignoreFiles")) + var regexps []*regexp.Regexp + if len(ignoreFiles) > 0 { + for _, ignorePattern := range ignoreFiles { + re, err := regexp.Compile(ignorePattern) + if err != nil { + helpers.DistinctErrorLog.Printf("Invalid regexp %q in ignoreFiles: %s", ignorePattern, err) + } else { + regexps = append(regexps, re) + } + + } + } + + return &SourceSpec{ignoreFilesRe: regexps, Cfg: cfg, Fs: fs, Languages: languages, DefaultContentLanguage: defaultLang} +} + +func (s *SourceSpec) IgnoreFile(filename string) bool { + base := filepath.Base(filename) + + if len(base) > 0 { + first := base[0] + last := base[len(base)-1] + if first == '.' || + first == '#' || + last == '~' { + return true + } + } + + if len(s.ignoreFilesRe) == 0 { + return false + } + + for _, re := range s.ignoreFilesRe { + if re.MatchString(filename) { + return true + } + } + + return false +}