mirror of
https://github.com/gohugoio/hugo.git
synced 2025-04-26 21:51:02 +03:00
Optimize the multilanguage build process
Work In Progress! This commit makes a rework of the build and rebuild process to better suit a multi-site setup. This also includes a complete overhaul of the site tests. Previous these were a messy mix that were testing just small parts of the build chain, some of it testing code-paths not even used in "real life". Now all tests that depends on a built site follows the same and real production code path. See #2309 Closes #2211 Closes #477 Closes #1744
This commit is contained in:
parent
f023dfd763
commit
708bc78770
35 changed files with 1264 additions and 991 deletions
|
@ -30,8 +30,7 @@ const robotTxtTemplate = `User-agent: Googlebot
|
|||
`
|
||||
|
||||
func TestRobotsTXTOutput(t *testing.T) {
|
||||
viper.Reset()
|
||||
defer viper.Reset()
|
||||
testCommonResetState()
|
||||
|
||||
hugofs.InitMemFs()
|
||||
|
||||
|
@ -39,29 +38,15 @@ func TestRobotsTXTOutput(t *testing.T) {
|
|||
viper.Set("enableRobotsTXT", true)
|
||||
|
||||
s := &Site{
|
||||
Source: &source.InMemorySource{ByteSource: weightedSources},
|
||||
Lang: newDefaultLanguage(),
|
||||
Source: &source.InMemorySource{ByteSource: weightedSources},
|
||||
Language: newDefaultLanguage(),
|
||||
}
|
||||
|
||||
s.initializeSiteInfo()
|
||||
|
||||
s.prepTemplates("robots.txt", robotTxtTemplate)
|
||||
|
||||
createPagesAndMeta(t, s)
|
||||
|
||||
if err := s.renderHomePage(); err != nil {
|
||||
t.Fatalf("Unable to RenderHomePage: %s", err)
|
||||
if err := buildAndRenderSite(s, "robots.txt", robotTxtTemplate); err != nil {
|
||||
t.Fatalf("Failed to build site: %s", err)
|
||||
}
|
||||
|
||||
if err := s.renderSitemap(); err != nil {
|
||||
t.Fatalf("Unable to RenderSitemap: %s", err)
|
||||
}
|
||||
|
||||
if err := s.renderRobotsTXT(); err != nil {
|
||||
t.Fatalf("Unable to RenderRobotsTXT :%s", err)
|
||||
}
|
||||
|
||||
robotsFile, err := hugofs.Destination().Open("robots.txt")
|
||||
robotsFile, err := hugofs.Destination().Open("public/robots.txt")
|
||||
|
||||
if err != nil {
|
||||
t.Fatalf("Unable to locate: robots.txt")
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue