Skip to content

Commit

Permalink
provide robots.txt via config instead of static file
Browse files Browse the repository at this point in the history
  • Loading branch information
Ariana Barzinpour authored and Ariana-B committed Jun 18, 2024
1 parent 0f7875e commit 1277ff2
Show file tree
Hide file tree
Showing 2 changed files with 8 additions and 6 deletions.
10 changes: 8 additions & 2 deletions cubedash/_pages.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
from datacube.model import DatasetType, Range
from datacube.scripts.dataset import build_dataset_info
from dateutil import tz
from flask import abort, redirect, request, send_from_directory, url_for
from flask import abort, redirect, request, url_for
from werkzeug.datastructures import MultiDict
from werkzeug.exceptions import HTTPException

Expand Down Expand Up @@ -51,6 +51,10 @@

_DEFAULT_ARRIVALS_DAYS: int = app.config.get("CUBEDASH_DEFAULT_ARRIVALS_DAY_COUNT", 14)

_ROBOTS_TXT_DEFAULT = (
"User-Agent: *\nAllow: /\nDisallow: /products/*/*\nDisallow: /stac/**"
)

# Add server timings to http headers.
if app.config.get("CUBEDASH_SHOW_PERF_TIMES", False):
_monitoring.init_app_monitoring()
Expand Down Expand Up @@ -676,7 +680,9 @@ def about_page():

@app.route("/robots.txt")
def robots_txt():
return send_from_directory("static", "robots.txt")
return utils.as_json(
flask.current_app.config.get("ROBOTS_TXT", _ROBOTS_TXT_DEFAULT)
)


@app.route("/")
Expand Down
4 changes: 0 additions & 4 deletions cubedash/static/robots.txt

This file was deleted.

0 comments on commit 1277ff2

Please sign in to comment.