chore: disallow all crawlers with robots.txt (#833)
Signed-off-by: Tim Birkett <tim.birkett@sainsburys.co.uk>
This commit is contained in:
parent
1e0b3a2a8c
commit
7b1a0d3cd3
3 changed files with 13 additions and 0 deletions
|
@ -86,6 +86,7 @@ const (
|
|||
providerEventsPath = "/api/v2/events/provider"
|
||||
sharesPath = "/api/v2/shares"
|
||||
healthzPath = "/healthz"
|
||||
robotsTxtPath = "/robots.txt"
|
||||
webRootPathDefault = "/"
|
||||
webBasePathDefault = "/web"
|
||||
webBasePathAdminDefault = "/web/admin"
|
||||
|
|
|
@ -118,6 +118,7 @@ const (
|
|||
providerEventsPath = "/api/v2/events/provider"
|
||||
sharesPath = "/api/v2/shares"
|
||||
healthzPath = "/healthz"
|
||||
robotsTxtPath = "/robots.txt"
|
||||
webBasePath = "/web"
|
||||
webBasePathAdmin = "/web/admin"
|
||||
webAdminSetupPath = "/web/admin/setup"
|
||||
|
@ -8910,6 +8911,13 @@ func TestHealthCheck(t *testing.T) {
|
|||
assert.Equal(t, "ok", rr.Body.String())
|
||||
}
|
||||
|
||||
func TestRobotsTxtCheck(t *testing.T) {
|
||||
req, _ := http.NewRequest(http.MethodGet, "/robots.txt", nil)
|
||||
rr := executeRequest(req)
|
||||
checkResponseCode(t, http.StatusOK, rr)
|
||||
assert.Equal(t, "User-agent: *\nDisallow: /", rr.Body.String())
|
||||
}
|
||||
|
||||
func TestGetWebRootMock(t *testing.T) {
|
||||
req, _ := http.NewRequest(http.MethodGet, "/", nil)
|
||||
rr := executeRequest(req)
|
||||
|
|
|
@ -1137,6 +1137,10 @@ func (s *httpdServer) initializeRouter() {
|
|||
render.PlainText(w, r, "ok")
|
||||
})
|
||||
|
||||
s.router.Get(robotsTxtPath, func(w http.ResponseWriter, r *http.Request) {
|
||||
render.PlainText(w, r, "User-agent: *\nDisallow: /")
|
||||
})
|
||||
|
||||
// share API exposed to external users
|
||||
s.router.Get(sharesPath+"/{id}", s.downloadFromShare)
|
||||
s.router.Post(sharesPath+"/{id}", s.uploadFilesToShare)
|
||||
|
|
Loading…
Reference in a new issue