Skip to content

Commit

Permalink
test: 5000 solr page size?
Browse files Browse the repository at this point in the history
  • Loading branch information
nickumia-reisys committed Jul 27, 2023
1 parent 87ac89d commit 234b4f2
Showing 1 changed file with 3 additions and 6 deletions.
9 changes: 3 additions & 6 deletions ckanext/geodatagov/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,6 @@
import logging
import sys
import tempfile
import time
import warnings
from typing import Optional

Expand All @@ -27,7 +26,7 @@
# default constants
# for sitemap_to_s3
UPLOAD_TO_S3 = True
PAGE_SIZE = 10000
PAGE_SIZE = 5000
MAX_PER_PAGE = 50000
# for db_solr_sync
_INDICES = {"package": PackageSearchIndex}
Expand Down Expand Up @@ -202,7 +201,6 @@ def upload_to_key(upload_str: str, filename_on_s3: str) -> None:
log.error(f"File {filename_on_s3} upload failed. Error: {resp_metadata}")

del temp_file
time.sleep(30)


def upload_sitemap_file(sitemap: list) -> None:
Expand Down Expand Up @@ -231,7 +229,7 @@ def sitemap_to_s3(upload_to_s3: bool, page_size: int, max_per_page: int):
log.info("Nothing to process, exiting.")
return

start = 140000
start = 150000

num_of_pages = (count // page_size) + 1

Expand All @@ -249,7 +247,7 @@ def sitemap_to_s3(upload_to_s3: bool, page_size: int, max_per_page: int):
{S3_ENDPOINT_URL}/{BUCKET_NAME}/{sitemap_index.filename_s3}"
)

for file_num in range(14, 38):
for file_num in range(16, 76):
sitemap = SitemapData(str(file_num), start, page_size)
sitemap.write_sitemap_header()
sitemap.write_pkgs(package_query)
Expand All @@ -276,7 +274,6 @@ def sitemap_to_s3(upload_to_s3: bool, page_size: int, max_per_page: int):
print(json.dumps(sitemap.to_json(), indent=4))

del sitemap
time.sleep(30)


def _normalize_type(_type):
Expand Down

0 comments on commit 234b4f2

Please sign in to comment.