Skip to content

Commit

Permalink
Merge pull request #27 from UPB-SS1/fix-remove-sleep
Browse files Browse the repository at this point in the history
Fix remove sleep
  • Loading branch information
JoseRZapata authored May 12, 2021
2 parents e57305b + e2709e3 commit f14aa3e
Show file tree
Hide file tree
Showing 3 changed files with 8 additions and 4 deletions.
5 changes: 4 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,10 @@ def main():
links_df = pd.read_csv('samples/sample_source_links.csv')
# Init CrowdTangle with api key
crowd_tangle = CrowdTangle("abc123def345")
ct_df = crowd_tangle.get_shares(urls=links_df, url_column='clean_url', date_column='date',clean_urls=True, platforms='facebook', max_calls = 2)
ct_df = crowd_tangle.get_shares(urls=links_df, url_column='clean_url',
date_column='date',clean_urls=True,
platforms='facebook', max_calls = 2)

shared = Shared(ct_df)
crowtangle_shares_df, shares_graph, q, coordination_interval = shared.coord_shares(clean_urls=True)

Expand Down
6 changes: 4 additions & 2 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@

setup(
name="pycoornet",
version="0.5.0",
version="0.5.1",
description="Using Python Given a set of URLs, this packages detects coordinated link sharing behavior on social media and outputs the network of entities that performed such behaviour.",
long_description=long_description,
long_description_content_type="text/markdown",
Expand All @@ -41,6 +41,8 @@
'tqdm>=4.47.0',
'networkx>=2.4',
'python-louvain>=0.14',
'tldextract>=3.1.0'
'tldextract>=3.1.0',
'pyarrow>=4.0.0',
'ratelimiter>= 1.2.0'
],
)
1 change: 0 additions & 1 deletion src/pycoornet/crowdtangle.py
Original file line number Diff line number Diff line change
Expand Up @@ -135,7 +135,6 @@ def get_shares(self, urls, url_column='url', date_column='date', platforms=('fac
if not data['result']['posts']:
print(f"Empty response on url: {url}")
logger.debug(f"Empty response on url: {url}")
time.sleep(sleep_time)
continue

# convert json response to dataframe
Expand Down

0 comments on commit f14aa3e

Please sign in to comment.