@@ -421,55 +421,6 @@ def setup_switchers(
421
421
ofile .write (line )
422
422
423
423
424
- def copy_robots_txt (
425
- www_root : Path ,
426
- group ,
427
- skip_cache_invalidation ,
428
- http : urllib3 .PoolManager ,
429
- ) -> None :
430
- """Copy robots.txt to www_root."""
431
- if not www_root .exists ():
432
- logging .info ("Skipping copying robots.txt (www root does not even exist)." )
433
- return
434
- logging .info ("Copying robots.txt..." )
435
- template_path = HERE / "templates" / "robots.txt"
436
- robots_path = www_root / "robots.txt"
437
- shutil .copyfile (template_path , robots_path )
438
- robots_path .chmod (0o775 )
439
- run (["chgrp" , group , robots_path ])
440
- if not skip_cache_invalidation :
441
- purge (http , "robots.txt" )
442
-
443
-
444
- def build_sitemap (
445
- versions : Iterable [Version ], languages : Iterable [Language ], www_root : Path , group
446
- ):
447
- """Build a sitemap with all live versions and translations."""
448
- if not www_root .exists ():
449
- logging .info ("Skipping sitemap generation (www root does not even exist)." )
450
- return
451
- logging .info ("Starting sitemap generation..." )
452
- template_path = HERE / "templates" / "sitemap.xml"
453
- template = jinja2 .Template (template_path .read_text (encoding = "UTF-8" ))
454
- rendered_template = template .render (languages = languages , versions = versions )
455
- sitemap_path = www_root / "sitemap.xml"
456
- sitemap_path .write_text (rendered_template + "\n " , encoding = "UTF-8" )
457
- sitemap_path .chmod (0o664 )
458
- run (["chgrp" , group , sitemap_path ])
459
-
460
-
461
- def build_404 (www_root : Path , group ):
462
- """Build a nice 404 error page to display in case PDFs are not built yet."""
463
- if not www_root .exists ():
464
- logging .info ("Skipping 404 page generation (www root does not even exist)." )
465
- return
466
- logging .info ("Copying 404 page..." )
467
- not_found_file = www_root / "404.html"
468
- shutil .copyfile (HERE / "templates" / "404.html" , not_found_file )
469
- not_found_file .chmod (0o664 )
470
- run (["chgrp" , group , not_found_file ])
471
-
472
-
473
424
def head (text , lines = 10 ):
474
425
"""Return the first *lines* lines from the given text."""
475
426
return "\n " .join (text .split ("\n " )[:lines ])
@@ -895,188 +846,6 @@ def save_state(self, build_start: dt, build_duration: float, trigger: str):
895
846
logging .info ("Saved new rebuild state for %s: %s" , key , table .as_string ())
896
847
897
848
898
- def symlink (
899
- www_root : Path ,
900
- language : Language ,
901
- directory : str ,
902
- name : str ,
903
- group : str ,
904
- skip_cache_invalidation : bool ,
905
- http : urllib3 .PoolManager ,
906
- ) -> None :
907
- """Used by major_symlinks and dev_symlink to maintain symlinks."""
908
- if language .tag == "en" : # English is rooted on /, no /en/
909
- path = www_root
910
- else :
911
- path = www_root / language .tag
912
- link = path / name
913
- directory_path = path / directory
914
- if not directory_path .exists ():
915
- return # No touching link, dest doc not built yet.
916
-
917
- if not link .exists () or readlink (link ) != directory :
918
- # Link does not exist or points to the wrong target.
919
- link .unlink (missing_ok = True )
920
- link .symlink_to (directory )
921
- run (["chown" , "-h" , f":{ group } " , str (link )])
922
- if not skip_cache_invalidation :
923
- surrogate_key = f"{ language .tag } /{ name } "
924
- purge_surrogate_key (http , surrogate_key )
925
-
926
-
927
- def major_symlinks (
928
- www_root : Path ,
929
- group : str ,
930
- versions : Iterable [Version ],
931
- languages : Iterable [Language ],
932
- skip_cache_invalidation : bool ,
933
- http : urllib3 .PoolManager ,
934
- ) -> None :
935
- """Maintains the /2/ and /3/ symlinks for each language.
936
-
937
- Like:
938
- - /3/ → /3.9/
939
- - /fr/3/ → /fr/3.9/
940
- - /es/3/ → /es/3.9/
941
- """
942
- logging .info ("Creating major version symlinks..." )
943
- current_stable = Version .current_stable (versions ).name
944
- for language in languages :
945
- symlink (
946
- www_root ,
947
- language ,
948
- current_stable ,
949
- "3" ,
950
- group ,
951
- skip_cache_invalidation ,
952
- http ,
953
- )
954
- symlink (www_root , language , "2.7" , "2" , group , skip_cache_invalidation , http )
955
-
956
-
957
- def dev_symlink (
958
- www_root : Path ,
959
- group ,
960
- versions ,
961
- languages ,
962
- skip_cache_invalidation : bool ,
963
- http : urllib3 .PoolManager ,
964
- ) -> None :
965
- """Maintains the /dev/ symlinks for each language.
966
-
967
- Like:
968
- - /dev/ → /3.11/
969
- - /fr/dev/ → /fr/3.11/
970
- - /es/dev/ → /es/3.11/
971
- """
972
- logging .info ("Creating development version symlinks..." )
973
- current_dev = Version .current_dev (versions ).name
974
- for language in languages :
975
- symlink (
976
- www_root ,
977
- language ,
978
- current_dev ,
979
- "dev" ,
980
- group ,
981
- skip_cache_invalidation ,
982
- http ,
983
- )
984
-
985
-
986
- def purge (http : urllib3 .PoolManager , * paths : Path | str ) -> None :
987
- """Remove one or many paths from docs.python.org's CDN.
988
-
989
- To be used when a file changes, so the CDN fetches the new one.
990
- """
991
- base = "https://docs.python.org/"
992
- for path in paths :
993
- url = urljoin (base , str (path ))
994
- logging .debug ("Purging %s from CDN" , url )
995
- http .request ("PURGE" , url , timeout = 30 )
996
-
997
-
998
- def purge_surrogate_key (http : urllib3 .PoolManager , surrogate_key : str ) -> None :
999
- """Remove paths from docs.python.org's CDN.
1000
-
1001
- All paths matching the given 'Surrogate-Key' will be removed.
1002
- This is set by the Nginx server for every language-version pair.
1003
- To be used when a directory changes, so the CDN fetches the new one.
1004
-
1005
- https://www.fastly.com/documentation/reference/api/purging/#purge-tag
1006
- """
1007
- service_id = getenv ("FASTLY_SERVICE_ID" , "__UNSET__" )
1008
- fastly_key = getenv ("FASTLY_TOKEN" , "__UNSET__" )
1009
-
1010
- logging .info ("Purging Surrogate-Key '%s' from CDN" , surrogate_key )
1011
- http .request (
1012
- "POST" ,
1013
- f"https://api.fastly.com/service/{ service_id } /purge/{ surrogate_key } " ,
1014
- headers = {"Fastly-Key" : fastly_key },
1015
- timeout = 30 ,
1016
- )
1017
-
1018
-
1019
- def proofread_canonicals (
1020
- www_root : Path , skip_cache_invalidation : bool , http : urllib3 .PoolManager
1021
- ) -> None :
1022
- """In www_root we check that all canonical links point to existing contents.
1023
-
1024
- It can happen that a canonical is "broken":
1025
-
1026
- - /3.11/whatsnew/3.11.html typically would link to
1027
- /3/whatsnew/3.11.html, which may not exist yet.
1028
- """
1029
- logging .info ("Checking canonical links..." )
1030
- canonical_re = re .compile (
1031
- """<link rel="canonical" href="https://docs.python.org/([^"]*)" />"""
1032
- )
1033
- for file in www_root .glob ("**/*.html" ):
1034
- html = file .read_text (encoding = "UTF-8" , errors = "surrogateescape" )
1035
- canonical = canonical_re .search (html )
1036
- if not canonical :
1037
- continue
1038
- target = canonical .group (1 )
1039
- if not (www_root / target ).exists ():
1040
- logging .info ("Removing broken canonical from %s to %s" , file , target )
1041
- html = html .replace (canonical .group (0 ), "" )
1042
- file .write_text (html , encoding = "UTF-8" , errors = "surrogateescape" )
1043
- if not skip_cache_invalidation :
1044
- purge (http , str (file ).replace ("/srv/docs.python.org/" , "" ))
1045
-
1046
-
1047
- def parse_versions_from_devguide (http : urllib3 .PoolManager ) -> list [Version ]:
1048
- releases = http .request (
1049
- "GET" ,
1050
- "https://raw.githubusercontent.com/"
1051
- "python/devguide/main/include/release-cycle.json" ,
1052
- timeout = 30 ,
1053
- ).json ()
1054
- versions = [Version .from_json (name , release ) for name , release in releases .items ()]
1055
- versions .sort (key = Version .as_tuple )
1056
- return versions
1057
-
1058
-
1059
- def parse_languages_from_config () -> list [Language ]:
1060
- """Read config.toml to discover languages to build."""
1061
- config = tomlkit .parse ((HERE / "config.toml" ).read_text (encoding = "UTF-8" ))
1062
- defaults = config ["defaults" ]
1063
- default_translated_name = defaults .get ("translated_name" , "" )
1064
- default_in_prod = defaults .get ("in_prod" , True )
1065
- default_sphinxopts = defaults .get ("sphinxopts" , [])
1066
- default_html_only = defaults .get ("html_only" , False )
1067
- return [
1068
- Language (
1069
- iso639_tag = iso639_tag ,
1070
- name = section ["name" ],
1071
- translated_name = section .get ("translated_name" , default_translated_name ),
1072
- in_prod = section .get ("in_prod" , default_in_prod ),
1073
- sphinxopts = section .get ("sphinxopts" , default_sphinxopts ),
1074
- html_only = section .get ("html_only" , default_html_only ),
1075
- )
1076
- for iso639_tag , section in config ["languages" ].items ()
1077
- ]
1078
-
1079
-
1080
849
def format_seconds (seconds : float ) -> str :
1081
850
hours , remainder = divmod (seconds , 3600 )
1082
851
minutes , seconds = divmod (remainder , 60 )
@@ -1091,79 +860,6 @@ def format_seconds(seconds: float) -> str:
1091
860
return f"{ h } h { m } m { s } s"
1092
861
1093
862
1094
- def build_docs (args ) -> bool :
1095
- """Build all docs (each language and each version)."""
1096
- logging .info ("Full build start." )
1097
- start_time = perf_counter ()
1098
- http = urllib3 .PoolManager ()
1099
- versions = parse_versions_from_devguide (http )
1100
- languages = parse_languages_from_config ()
1101
- # Reverse languages but not versions, because we take version-language
1102
- # pairs from the end of the list, effectively reversing it.
1103
- # This runs languages in config.toml order and versions newest first.
1104
- todo = [
1105
- (version , language )
1106
- for version in Version .filter (versions , args .branch )
1107
- for language in reversed (Language .filter (languages , args .languages ))
1108
- ]
1109
- del args .branch
1110
- del args .languages
1111
- all_built_successfully = True
1112
- cpython_repo = Repository (
1113
- "https://github.com/python/cpython.git" ,
1114
- args .build_root / _checkout_name (args .select_output ),
1115
- )
1116
- while todo :
1117
- version , language = todo .pop ()
1118
- logging .root .handlers [0 ].setFormatter (
1119
- logging .Formatter (
1120
- f"%(asctime)s %(levelname)s { language .tag } /{ version .name } : %(message)s"
1121
- )
1122
- )
1123
- if sentry_sdk :
1124
- scope = sentry_sdk .get_isolation_scope ()
1125
- scope .set_tag ("version" , version .name )
1126
- scope .set_tag ("language" , language .tag )
1127
- cpython_repo .update ()
1128
- builder = DocBuilder (
1129
- version , versions , language , languages , cpython_repo , ** vars (args )
1130
- )
1131
- all_built_successfully &= builder .run (http )
1132
- logging .root .handlers [0 ].setFormatter (
1133
- logging .Formatter ("%(asctime)s %(levelname)s: %(message)s" )
1134
- )
1135
-
1136
- build_sitemap (versions , languages , args .www_root , args .group )
1137
- build_404 (args .www_root , args .group )
1138
- copy_robots_txt (
1139
- args .www_root ,
1140
- args .group ,
1141
- args .skip_cache_invalidation ,
1142
- http ,
1143
- )
1144
- major_symlinks (
1145
- args .www_root ,
1146
- args .group ,
1147
- versions ,
1148
- languages ,
1149
- args .skip_cache_invalidation ,
1150
- http ,
1151
- )
1152
- dev_symlink (
1153
- args .www_root ,
1154
- args .group ,
1155
- versions ,
1156
- languages ,
1157
- args .skip_cache_invalidation ,
1158
- http ,
1159
- )
1160
- proofread_canonicals (args .www_root , args .skip_cache_invalidation , http )
1161
-
1162
- logging .info ("Full build done (%s)." , format_seconds (perf_counter () - start_time ))
1163
-
1164
- return all_built_successfully
1165
-
1166
-
1167
863
def _checkout_name (select_output : str | None ) -> str :
1168
864
if select_output is not None :
1169
865
return f"cpython-{ select_output } "
@@ -1301,5 +997,309 @@ def build_docs_with_lock(args: Namespace, lockfile_name: str) -> int:
1301
997
lock .close ()
1302
998
1303
999
1000
+ def build_docs (args ) -> bool :
1001
+ """Build all docs (each language and each version)."""
1002
+ logging .info ("Full build start." )
1003
+ start_time = perf_counter ()
1004
+ http = urllib3 .PoolManager ()
1005
+ versions = parse_versions_from_devguide (http )
1006
+ languages = parse_languages_from_config ()
1007
+ # Reverse languages but not versions, because we take version-language
1008
+ # pairs from the end of the list, effectively reversing it.
1009
+ # This runs languages in config.toml order and versions newest first.
1010
+ todo = [
1011
+ (version , language )
1012
+ for version in Version .filter (versions , args .branch )
1013
+ for language in reversed (Language .filter (languages , args .languages ))
1014
+ ]
1015
+ del args .branch
1016
+ del args .languages
1017
+ all_built_successfully = True
1018
+ cpython_repo = Repository (
1019
+ "https://github.com/python/cpython.git" ,
1020
+ args .build_root / _checkout_name (args .select_output ),
1021
+ )
1022
+ while todo :
1023
+ version , language = todo .pop ()
1024
+ logging .root .handlers [0 ].setFormatter (
1025
+ logging .Formatter (
1026
+ f"%(asctime)s %(levelname)s { language .tag } /{ version .name } : %(message)s"
1027
+ )
1028
+ )
1029
+ if sentry_sdk :
1030
+ scope = sentry_sdk .get_isolation_scope ()
1031
+ scope .set_tag ("version" , version .name )
1032
+ scope .set_tag ("language" , language .tag )
1033
+ cpython_repo .update ()
1034
+ builder = DocBuilder (
1035
+ version , versions , language , languages , cpython_repo , ** vars (args )
1036
+ )
1037
+ all_built_successfully &= builder .run (http )
1038
+ logging .root .handlers [0 ].setFormatter (
1039
+ logging .Formatter ("%(asctime)s %(levelname)s: %(message)s" )
1040
+ )
1041
+
1042
+ build_sitemap (versions , languages , args .www_root , args .group )
1043
+ build_404 (args .www_root , args .group )
1044
+ copy_robots_txt (
1045
+ args .www_root ,
1046
+ args .group ,
1047
+ args .skip_cache_invalidation ,
1048
+ http ,
1049
+ )
1050
+ major_symlinks (
1051
+ args .www_root ,
1052
+ args .group ,
1053
+ versions ,
1054
+ languages ,
1055
+ args .skip_cache_invalidation ,
1056
+ http ,
1057
+ )
1058
+ dev_symlink (
1059
+ args .www_root ,
1060
+ args .group ,
1061
+ versions ,
1062
+ languages ,
1063
+ args .skip_cache_invalidation ,
1064
+ http ,
1065
+ )
1066
+ proofread_canonicals (args .www_root , args .skip_cache_invalidation , http )
1067
+
1068
+ logging .info ("Full build done (%s)." , format_seconds (perf_counter () - start_time ))
1069
+
1070
+ return all_built_successfully
1071
+
1072
+
1073
+ def parse_versions_from_devguide (http : urllib3 .PoolManager ) -> list [Version ]:
1074
+ releases = http .request (
1075
+ "GET" ,
1076
+ "https://raw.githubusercontent.com/"
1077
+ "python/devguide/main/include/release-cycle.json" ,
1078
+ timeout = 30 ,
1079
+ ).json ()
1080
+ versions = [Version .from_json (name , release ) for name , release in releases .items ()]
1081
+ versions .sort (key = Version .as_tuple )
1082
+ return versions
1083
+
1084
+
1085
+ def parse_languages_from_config () -> list [Language ]:
1086
+ """Read config.toml to discover languages to build."""
1087
+ config = tomlkit .parse ((HERE / "config.toml" ).read_text (encoding = "UTF-8" ))
1088
+ defaults = config ["defaults" ]
1089
+ default_translated_name = defaults .get ("translated_name" , "" )
1090
+ default_in_prod = defaults .get ("in_prod" , True )
1091
+ default_sphinxopts = defaults .get ("sphinxopts" , [])
1092
+ default_html_only = defaults .get ("html_only" , False )
1093
+ return [
1094
+ Language (
1095
+ iso639_tag = iso639_tag ,
1096
+ name = section ["name" ],
1097
+ translated_name = section .get ("translated_name" , default_translated_name ),
1098
+ in_prod = section .get ("in_prod" , default_in_prod ),
1099
+ sphinxopts = section .get ("sphinxopts" , default_sphinxopts ),
1100
+ html_only = section .get ("html_only" , default_html_only ),
1101
+ )
1102
+ for iso639_tag , section in config ["languages" ].items ()
1103
+ ]
1104
+
1105
+
1106
+ def build_sitemap (
1107
+ versions : Iterable [Version ], languages : Iterable [Language ], www_root : Path , group
1108
+ ):
1109
+ """Build a sitemap with all live versions and translations."""
1110
+ if not www_root .exists ():
1111
+ logging .info ("Skipping sitemap generation (www root does not even exist)." )
1112
+ return
1113
+ logging .info ("Starting sitemap generation..." )
1114
+ template_path = HERE / "templates" / "sitemap.xml"
1115
+ template = jinja2 .Template (template_path .read_text (encoding = "UTF-8" ))
1116
+ rendered_template = template .render (languages = languages , versions = versions )
1117
+ sitemap_path = www_root / "sitemap.xml"
1118
+ sitemap_path .write_text (rendered_template + "\n " , encoding = "UTF-8" )
1119
+ sitemap_path .chmod (0o664 )
1120
+ run (["chgrp" , group , sitemap_path ])
1121
+
1122
+
1123
+ def build_404 (www_root : Path , group ):
1124
+ """Build a nice 404 error page to display in case PDFs are not built yet."""
1125
+ if not www_root .exists ():
1126
+ logging .info ("Skipping 404 page generation (www root does not even exist)." )
1127
+ return
1128
+ logging .info ("Copying 404 page..." )
1129
+ not_found_file = www_root / "404.html"
1130
+ shutil .copyfile (HERE / "templates" / "404.html" , not_found_file )
1131
+ not_found_file .chmod (0o664 )
1132
+ run (["chgrp" , group , not_found_file ])
1133
+
1134
+
1135
+ def copy_robots_txt (
1136
+ www_root : Path ,
1137
+ group ,
1138
+ skip_cache_invalidation ,
1139
+ http : urllib3 .PoolManager ,
1140
+ ) -> None :
1141
+ """Copy robots.txt to www_root."""
1142
+ if not www_root .exists ():
1143
+ logging .info ("Skipping copying robots.txt (www root does not even exist)." )
1144
+ return
1145
+ logging .info ("Copying robots.txt..." )
1146
+ template_path = HERE / "templates" / "robots.txt"
1147
+ robots_path = www_root / "robots.txt"
1148
+ shutil .copyfile (template_path , robots_path )
1149
+ robots_path .chmod (0o775 )
1150
+ run (["chgrp" , group , robots_path ])
1151
+ if not skip_cache_invalidation :
1152
+ purge (http , "robots.txt" )
1153
+
1154
+
1155
+ def major_symlinks (
1156
+ www_root : Path ,
1157
+ group : str ,
1158
+ versions : Iterable [Version ],
1159
+ languages : Iterable [Language ],
1160
+ skip_cache_invalidation : bool ,
1161
+ http : urllib3 .PoolManager ,
1162
+ ) -> None :
1163
+ """Maintains the /2/ and /3/ symlinks for each language.
1164
+
1165
+ Like:
1166
+ - /3/ → /3.9/
1167
+ - /fr/3/ → /fr/3.9/
1168
+ - /es/3/ → /es/3.9/
1169
+ """
1170
+ logging .info ("Creating major version symlinks..." )
1171
+ current_stable = Version .current_stable (versions ).name
1172
+ for language in languages :
1173
+ symlink (
1174
+ www_root ,
1175
+ language ,
1176
+ current_stable ,
1177
+ "3" ,
1178
+ group ,
1179
+ skip_cache_invalidation ,
1180
+ http ,
1181
+ )
1182
+ symlink (www_root , language , "2.7" , "2" , group , skip_cache_invalidation , http )
1183
+
1184
+
1185
+ def dev_symlink (
1186
+ www_root : Path ,
1187
+ group ,
1188
+ versions ,
1189
+ languages ,
1190
+ skip_cache_invalidation : bool ,
1191
+ http : urllib3 .PoolManager ,
1192
+ ) -> None :
1193
+ """Maintains the /dev/ symlinks for each language.
1194
+
1195
+ Like:
1196
+ - /dev/ → /3.11/
1197
+ - /fr/dev/ → /fr/3.11/
1198
+ - /es/dev/ → /es/3.11/
1199
+ """
1200
+ logging .info ("Creating development version symlinks..." )
1201
+ current_dev = Version .current_dev (versions ).name
1202
+ for language in languages :
1203
+ symlink (
1204
+ www_root ,
1205
+ language ,
1206
+ current_dev ,
1207
+ "dev" ,
1208
+ group ,
1209
+ skip_cache_invalidation ,
1210
+ http ,
1211
+ )
1212
+
1213
+
1214
+ def symlink (
1215
+ www_root : Path ,
1216
+ language : Language ,
1217
+ directory : str ,
1218
+ name : str ,
1219
+ group : str ,
1220
+ skip_cache_invalidation : bool ,
1221
+ http : urllib3 .PoolManager ,
1222
+ ) -> None :
1223
+ """Used by major_symlinks and dev_symlink to maintain symlinks."""
1224
+ if language .tag == "en" : # English is rooted on /, no /en/
1225
+ path = www_root
1226
+ else :
1227
+ path = www_root / language .tag
1228
+ link = path / name
1229
+ directory_path = path / directory
1230
+ if not directory_path .exists ():
1231
+ return # No touching link, dest doc not built yet.
1232
+
1233
+ if not link .exists () or readlink (link ) != directory :
1234
+ # Link does not exist or points to the wrong target.
1235
+ link .unlink (missing_ok = True )
1236
+ link .symlink_to (directory )
1237
+ run (["chown" , "-h" , f":{ group } " , str (link )])
1238
+ if not skip_cache_invalidation :
1239
+ surrogate_key = f"{ language .tag } /{ name } "
1240
+ purge_surrogate_key (http , surrogate_key )
1241
+
1242
+
1243
+ def proofread_canonicals (
1244
+ www_root : Path , skip_cache_invalidation : bool , http : urllib3 .PoolManager
1245
+ ) -> None :
1246
+ """In www_root we check that all canonical links point to existing contents.
1247
+
1248
+ It can happen that a canonical is "broken":
1249
+
1250
+ - /3.11/whatsnew/3.11.html typically would link to
1251
+ /3/whatsnew/3.11.html, which may not exist yet.
1252
+ """
1253
+ logging .info ("Checking canonical links..." )
1254
+ canonical_re = re .compile (
1255
+ """<link rel="canonical" href="https://docs.python.org/([^"]*)" />"""
1256
+ )
1257
+ for file in www_root .glob ("**/*.html" ):
1258
+ html = file .read_text (encoding = "UTF-8" , errors = "surrogateescape" )
1259
+ canonical = canonical_re .search (html )
1260
+ if not canonical :
1261
+ continue
1262
+ target = canonical .group (1 )
1263
+ if not (www_root / target ).exists ():
1264
+ logging .info ("Removing broken canonical from %s to %s" , file , target )
1265
+ html = html .replace (canonical .group (0 ), "" )
1266
+ file .write_text (html , encoding = "UTF-8" , errors = "surrogateescape" )
1267
+ if not skip_cache_invalidation :
1268
+ purge (http , str (file ).replace ("/srv/docs.python.org/" , "" ))
1269
+
1270
+
1271
+ def purge (http : urllib3 .PoolManager , * paths : Path | str ) -> None :
1272
+ """Remove one or many paths from docs.python.org's CDN.
1273
+
1274
+ To be used when a file changes, so the CDN fetches the new one.
1275
+ """
1276
+ base = "https://docs.python.org/"
1277
+ for path in paths :
1278
+ url = urljoin (base , str (path ))
1279
+ logging .debug ("Purging %s from CDN" , url )
1280
+ http .request ("PURGE" , url , timeout = 30 )
1281
+
1282
+
1283
+ def purge_surrogate_key (http : urllib3 .PoolManager , surrogate_key : str ) -> None :
1284
+ """Remove paths from docs.python.org's CDN.
1285
+
1286
+ All paths matching the given 'Surrogate-Key' will be removed.
1287
+ This is set by the Nginx server for every language-version pair.
1288
+ To be used when a directory changes, so the CDN fetches the new one.
1289
+
1290
+ https://www.fastly.com/documentation/reference/api/purging/#purge-tag
1291
+ """
1292
+ service_id = getenv ("FASTLY_SERVICE_ID" , "__UNSET__" )
1293
+ fastly_key = getenv ("FASTLY_TOKEN" , "__UNSET__" )
1294
+
1295
+ logging .info ("Purging Surrogate-Key '%s' from CDN" , surrogate_key )
1296
+ http .request (
1297
+ "POST" ,
1298
+ f"https://api.fastly.com/service/{ service_id } /purge/{ surrogate_key } " ,
1299
+ headers = {"Fastly-Key" : fastly_key },
1300
+ timeout = 30 ,
1301
+ )
1302
+
1303
+
1304
1304
if __name__ == "__main__" :
1305
1305
sys .exit (main ())
0 commit comments