-
Notifications
You must be signed in to change notification settings - Fork 17
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Add docs notebook integration tests (#46)
* update notebooks for changed imports * remove autogen from poetry * add final_result variable in tests ipynb * add launch ipynb integration tests * add cache files for blog with images.ipynb * add cache files for multi-step research agent.ipynb * add cache files for math via python code with a single agent.ipynb * update poetry * update integration tests cache files * update cache files for single_llama_index integration test * update cache files for delegation_crewai integration test * update cache files for blog_with_images_ipynb integration test * update cache files for multi_step_research_agent_ipynb integration test * update cache files for math_via_python_code_with_a_single_agent_ipynb integration test * remove multi_step_research_agent_ipynb from integration tests * update cache files for math_via_python_code_with_a_single_agent_ipynb integration test * update cache files for multi_step_research_agent_ipynb integration test * rename examples/data/agent_storage to examples/data/research_agent_storage * fix autogen example * add cache files for using_autogen_with_motleycrew_ipynb integration test * update cache files for using_autogen_with_motleycrew_ipynb integration test * update cache files for using_autogen_with_motleycrew_ipynb integration test * remove using_autogen_with_motleycrew_ipynb from integration tests --------- Co-authored-by: User <[email protected]> Co-authored-by: whimo <[email protected]>
- Loading branch information
1 parent
25cd4a0
commit e2100ec
Showing
114 changed files
with
261 additions
and
450 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,44 @@ | ||
# filename: fetch_arxiv_gpt4.py | ||
import urllib.request | ||
import urllib.parse | ||
import xml.etree.ElementTree as ET | ||
|
||
def search_arxiv(query): | ||
url = 'http://export.arxiv.org/api/query?' | ||
params = { | ||
'search_query': query, | ||
'start': 0, | ||
'max_results': 5, | ||
'sortBy': 'submittedDate', | ||
'sortOrder': 'descending' | ||
} | ||
query_string = urllib.parse.urlencode(params) | ||
url += query_string | ||
with urllib.request.urlopen(url) as response: | ||
response_text = response.read() | ||
return response_text | ||
|
||
def parse_response(response): | ||
root = ET.fromstring(response) | ||
papers = [] | ||
for entry in root.findall('{http://www.w3.org/2005/Atom}entry'): | ||
title = entry.find('{http://www.w3.org/2005/Atom}title').text | ||
published = entry.find('{http://www.w3.org/2005/Atom}published').text | ||
summary = entry.find('{http://www.w3.org/2005/Atom}summary').text | ||
papers.append({'title': title, 'published': published, 'summary': summary}) | ||
return papers | ||
|
||
def main(): | ||
query = 'all:"GPT-4"' | ||
response = search_arxiv(query) | ||
papers = parse_response(response) | ||
if papers: | ||
print("Most Recent Paper on GPT-4:") | ||
print("Title:", papers[0]['title']) | ||
print("Published Date:", papers[0]['published']) | ||
print("Summary:", papers[0]['summary']) | ||
else: | ||
print("No papers found.") | ||
|
||
if __name__ == '__main__': | ||
main() |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,44 @@ | ||
# filename: fetch_latest_gpt4_paper.py | ||
import requests | ||
from datetime import datetime | ||
|
||
def fetch_latest_paper(): | ||
# Define the API endpoint | ||
url = "http://export.arxiv.org/api/query" | ||
|
||
# Set the search parameters to find papers related to GPT-4 | ||
params = { | ||
"search_query": "all:GPT-4", | ||
"sortBy": "submittedDate", | ||
"sortOrder": "descending", | ||
"max_results": 1 | ||
} | ||
|
||
# Send a GET request to the API | ||
response = requests.get(url, params=params) | ||
|
||
if response.status_code == 200: | ||
# Parse the response XML | ||
from xml.etree import ElementTree as ET | ||
root = ET.fromstring(response.content) | ||
|
||
# Navigate to the entry element | ||
entry = root.find('{http://www.w3.org/2005/Atom}entry') | ||
if entry is not None: | ||
# Extract title and summary (abstract) | ||
title = entry.find('{http://www.w3.org/2005/Atom}title').text | ||
summary = entry.find('{http://www.w3.org/2005/Atom}summary').text | ||
published_date = entry.find('{http://www.w3.org/2005/Atom}published').text | ||
|
||
# Convert published date to a readable format | ||
published_datetime = datetime.strptime(published_date, '%Y-%m-%dT%H:%M:%SZ') | ||
|
||
print("Title:", title) | ||
print("Published Date:", published_datetime.strftime('%Y-%m-%d')) | ||
print("Abstract:", summary.strip()) | ||
else: | ||
print("No GPT-4 papers found.") | ||
else: | ||
print("Failed to fetch data from arXiv. Status code:", response.status_code) | ||
|
||
fetch_latest_paper() |
1 change: 1 addition & 0 deletions
1
examples/data/research_agent_storage/default__vector_store.json
Large diffs are not rendered by default.
Oops, something went wrong.
Large diffs are not rendered by default.
Oops, something went wrong.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1 @@ | ||
{"graph_dict": {}} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1 @@ | ||
{"embedding_dict": {}, "text_id_to_ref_doc_id": {}, "metadata_dict": {}} |
Oops, something went wrong.