From c282c625c87e8bc68f47efe36593a1c47fed1e87 Mon Sep 17 00:00:00 2001 From: Tobias Wesper <37782951+TGWesper@users.noreply.github.com> Date: Mon, 26 Mar 2018 20:04:38 +0200 Subject: [PATCH] Bugfix for missing comments When scraping multiple posts in a row, any post that is scraped after a post with paged comments had a malformed base_url, containing multiple "&after=" parameters. Because of this, its seems that only the last page of comments was scraped for all subsequent posts. Resetting the "after" variable at the start of the loop fixes this. In the for-loop for subcomments this was already implemented correctly. --- get_fb_comments_from_fb.py | 1 + 1 file changed, 1 insertion(+) diff --git a/get_fb_comments_from_fb.py b/get_fb_comments_from_fb.py index a970752..b70a858 100644 --- a/get_fb_comments_from_fb.py +++ b/get_fb_comments_from_fb.py @@ -143,6 +143,7 @@ def scrapeFacebookPageFeedComments(page_id, access_token): for status in reader: has_next_page = True + after = '' while has_next_page: