-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathsetup.py
121 lines (118 loc) · 3.33 KB
/
setup.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
# Copyright(C) 2024 Advanced Micro Devices, Inc. All rights reserved.
# SPDX-License-Identifier: MIT
from setuptools import setup
tkml_version = "5.0.4"
setup(
name="gaia",
version="0.6.3",
description="GAIA genAI sandbox",
author="AMD",
package_dir={"": "src"},
packages=[
"gaia",
"gaia.llm",
"gaia.agents",
"gaia.agents.Llm",
"gaia.agents.Chaty",
"gaia.agents.Clip",
"gaia.agents.Example",
"gaia.agents.Joker",
"gaia.agents.Maven",
"gaia.agents.Neo",
"gaia.agents.Picasso",
"gaia.interface",
],
install_requires=[
"aiohttp",
"fastapi",
"pydantic==1.10.12",
"uvicorn>=0.15.0",
"transformers",
"accelerate",
"websockets",
"websocket-client",
"python-dotenv",
"torch>=2.0.0,<2.4",
"torchvision<0.19.0",
"torchaudio",
"pyside6",
"ollama",
"pyaudio",
"openai-whisper",
"numpy",
],
extras_require={
"dml": [
f"turnkeyml[llm-oga-igpu]=={tkml_version}",
],
"npu": [
f"turnkeyml[llm-oga-npu]=={tkml_version}",
],
"hybrid": [
f"turnkeyml[llm-oga-hybrid]=={tkml_version}",
],
"llamaindex": [
"llama_index",
"llama-index-embeddings-huggingface",
],
"joker": [
"gaia[llamaindex]",
],
"clip": [
"youtube_search",
"google-api-python-client",
"llama-index-readers-youtube-transcript",
"gaia[llamaindex]",
],
"maven": [
"openai",
"llama-index-tools-arxiv",
"llama-index-tools-duckduckgo",
"llama-index-readers-web",
"llama-index-readers-papers",
"llama-index-readers-wikipedia",
"llama-index-tools-wikipedia",
"gaia[llamaindex]",
],
"neo": [
"llama-index-readers-github",
"gaia[llamaindex]",
],
"notebooks": [
"jupyter",
"ipywidgets",
"openai",
"wordcloud",
"arize-phoenix[evals,llama-index]",
"llama-index-callbacks-arize-phoenix",
"gaia[clip,maven,neo,llamaindex]",
],
"cuda": [
"torch @ https://download.pytorch.org/whl/cu118/torch-2.3.1%2Bcu118-cp310-cp310-win_amd64.whl",
"torchvision @ https://download.pytorch.org/whl/cu118/torchvision-0.18.1%2Bcu118-cp310-cp310-win_amd64.whl",
"torchaudio @ https://download.pytorch.org/whl/cu118/torchaudio-2.3.1%2Bcu118-cp310-cp310-win_amd64.whl",
],
"dev": [
"pytest",
"pytest-benchmark",
"pytest-mock",
"pytest-asyncio",
"memory_profiler",
"matplotlib",
"adjustText",
"plotly",
"black",
]
},
classifiers=[],
entry_points={
"console_scripts": [
"gaia = gaia.interface.widget:main",
"gaia-cli = gaia.cli:main",
]
},
python_requires=">=3.8, <3.12",
long_description=open("README.md", "r", encoding="utf-8").read(),
long_description_content_type="text/markdown",
include_package_data=True,
)