@@ -415,23 +415,19 @@ def setup_switchers(
415
415
ofile .write (line )
416
416
417
417
418
- def build_robots_txt (
419
- versions : Iterable [Version ],
420
- languages : Iterable [Language ],
418
+ def copy_robots_txt (
421
419
www_root : Path ,
422
420
group ,
423
421
skip_cache_invalidation ,
424
422
http : urllib3 .PoolManager ,
425
423
) -> None :
426
- """Disallow crawl of EOL versions in robots.txt."""
424
+ """Copy robots.txt to www_root ."""
427
425
if not www_root .exists ():
428
- logging .info ("Skipping robots.txt generation (www root does not even exist)." )
426
+ logging .info ("Skipping copying robots.txt (www root does not even exist)." )
429
427
return
430
428
template_path = HERE / "templates" / "robots.txt"
431
- template = jinja2 .Template (template_path .read_text (encoding = "UTF-8" ))
432
- rendered_template = template .render (languages = languages , versions = versions )
433
429
robots_path = www_root / "robots.txt"
434
- robots_path . write_text ( rendered_template + " \n " , encoding = "UTF-8" )
430
+ shutil . copyfile ( template_path , robots_path )
435
431
robots_path .chmod (0o775 )
436
432
run (["chgrp" , group , robots_path ])
437
433
if not skip_cache_invalidation :
@@ -1204,9 +1200,7 @@ def build_docs(args) -> bool:
1204
1200
1205
1201
build_sitemap (versions , languages , args .www_root , args .group )
1206
1202
build_404 (args .www_root , args .group )
1207
- build_robots_txt (
1208
- versions ,
1209
- languages ,
1203
+ copy_robots_txt (
1210
1204
args .www_root ,
1211
1205
args .group ,
1212
1206
args .skip_cache_invalidation ,
0 commit comments