Skip to content

Commit

Permalink
add robots.txt
Browse files Browse the repository at this point in the history
  • Loading branch information
Ariana Barzinpour authored and Ariana-B committed Jun 18, 2024
1 parent 5b93679 commit 2faa968
Show file tree
Hide file tree
Showing 2 changed files with 14 additions and 1 deletion.
7 changes: 6 additions & 1 deletion cubedash/_pages.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
from datacube.model import DatasetType, Range
from datacube.scripts.dataset import build_dataset_info
from dateutil import tz
from flask import abort, redirect, request, url_for
from flask import abort, redirect, request, send_from_directory, url_for
from werkzeug.datastructures import MultiDict
from werkzeug.exceptions import HTTPException

Expand Down Expand Up @@ -674,6 +674,11 @@ def about_page():
)


@app.route("/robots.txt")
def robots_txt():
return send_from_directory("static", "robots.txt")


@app.route("/")
def default_redirect():
"""Redirect to default starting page."""
Expand Down
8 changes: 8 additions & 0 deletions integration_tests/test_page_loads.py
Original file line number Diff line number Diff line change
Expand Up @@ -961,6 +961,14 @@ def check_doc_start_has_hint(hint: str, url: str):
)


def test_get_robots(client: FlaskClient):
"""
Check that robots.txt is correctly served from root
"""
text, rv = get_text_response(client, "/robots.txt")
assert "User-Agent: *" in text


def test_all_give_404s(client: FlaskClient):
"""
We should get 404 messages, not exceptions, for missing things.
Expand Down

0 comments on commit 2faa968

Please sign in to comment.