diff --git a/client/html/index.html b/client/html/index.html index 64263467..0a571ad9 100644 --- a/client/html/index.html +++ b/client/html/index.html @@ -12,9 +12,20 @@ content="A conversational AI system that listens, learns, and challenges" /> - - - + + + - - Version: 0.0.10-Alpha - + Version: 0.0.10-Alpha @@ -76,11 +85,8 @@
diff --git a/g4f/Provider/Providers/Dfehub.py b/g4f/Provider/Providers/Zeabur.py similarity index 86% rename from g4f/Provider/Providers/Dfehub.py rename to g4f/Provider/Providers/Zeabur.py index 2f66f19b..e412720b 100644 --- a/g4f/Provider/Providers/Dfehub.py +++ b/g4f/Provider/Providers/Zeabur.py @@ -2,8 +2,9 @@ import requests from ...typing import sha256, Dict, get_type_hints -url = "https://chat.dfehub.com" -model = ['gpt-3.5-turbo', 'gpt-3.5-turbo-16k', 'gpt-4'] +url = "https://gptleg.zeabur.app" +model = ['gpt-3.5-turbo', 'gpt-3.5-turbo-0301', + 'gpt-3.5-turbo-16k', 'gpt-4', 'gpt-4-0613'] supports_stream = True needs_auth = False @@ -18,8 +19,8 @@ def _create_completion(model: str, messages: list, stream: bool, **kwargs): 'Accept': 'text/event-stream', 'Accept-Language': 'pt-BR,pt;q=0.9,en-US;q=0.8,en;q=0.7,zh-CN;q=0.6,zh;q=0.5', 'Content-Type': 'application/json', - 'Origin': 'https://chat.dfehub.com', - 'Referer': 'https://chat.dfehub.com/', + 'Origin': 'https://gptleg.zeabur.app', + 'Referer': 'https://gptleg.zeabur.app/', 'Sec-Ch-Ua': '"Not.A/Brand";v="8", "Chromium";v="114", "Google Chrome";v="114"', 'Sec-Ch-Ua-Mobile': '?0', 'Sec-Ch-Ua-Platform': '"Windows"', @@ -33,11 +34,11 @@ def _create_completion(model: str, messages: list, stream: bool, **kwargs): data = { 'model': model, 'temperature': 0.7, - 'max_tokens': '8000', + 'max_tokens': '16000', 'presence_penalty': 0, 'messages': messages, } - + response = requests.post(url + '/api/openai/v1/chat/completions', headers=headers, json=data, stream=stream) diff --git a/g4f/Provider/__init__.py b/g4f/Provider/__init__.py index 6eb0bc20..65f8cb1d 100644 --- a/g4f/Provider/__init__.py +++ b/g4f/Provider/__init__.py @@ -9,7 +9,6 @@ ChatgptLogin, ChatgptLogin, DeepAi, - Dfehub, Easychat, Ezcht, Fakeopen, @@ -28,6 +27,7 @@ Xiaor, Yqcloud, You, + Zeabur ) Palm = Bard diff --git a/g4f/models.py b/g4f/models.py index 0f52fff2..cb6d4c98 100644 --- a/g4f/models.py +++ b/g4f/models.py @@ -1,6 +1,7 @@ from g4f import Provider import random + class Model: class model: name: str @@ -15,17 +16,22 @@ class gpt_35_turbo: class gpt_35_turbo_0613: name: str = 'gpt-3.5-turbo-0613' base_provider: str = 'openai' - best_provider: Provider.Provider = Provider.Gravityengine + best_provider: Provider.Provider = Provider.Zeabur + + class gpt_35_turbo_0301: + name: str = 'gpt-3.5-turbo-0301' + base_provider: str = 'openai' + best_provider: Provider.Provider = Provider.Zeabur class gpt_35_turbo_16k_0613: name: str = 'gpt-3.5-turbo-16k-0613' base_provider: str = 'openai' - best_provider: Provider.Provider = Provider.Gravityengine + best_provider: Provider.Provider = Provider.Zeabur class gpt_35_turbo_16k: name: str = 'gpt-3.5-turbo-16k' base_provider: str = 'openai' - best_provider: Provider.Provider = Provider.Gravityengine + best_provider: Provider.Provider = Provider.Zeabur class gpt_4_dev: name: str = 'gpt-4-for-dev' @@ -36,7 +42,7 @@ class gpt_4: name: str = 'gpt-4' base_provider: str = 'openai' best_provider: Provider.Provider = Provider.ChatgptAi - + class gpt_4_0613: name: str = 'gpt-4-0613' base_provider: str = 'openai' @@ -152,79 +158,80 @@ class text_davinci_003: name: str = 'text-davinci-003' base_provider: str = 'openai' best_provider: Provider.Provider = Provider.Vercel - + class palm: name: str = 'palm2' base_provider: str = 'google' best_provider: Provider.Provider = Provider.Bard - - + """ 'falcon-40b': Model.falcon_40b, 'falcon-7b': Model.falcon_7b, 'llama-13b': Model.llama_13b,""" - + class falcon_40b: name: str = 'falcon-40b' base_provider: str = 'huggingface' best_provider: Provider.Provider = Provider.H2o - + class falcon_7b: name: str = 'falcon-7b' base_provider: str = 'huggingface' best_provider: Provider.Provider = Provider.H2o - + class llama_13b: name: str = 'llama-13b' base_provider: str = 'huggingface' best_provider: Provider.Provider = Provider.H2o - + + class ModelUtils: convert: dict = { 'gpt-3.5-turbo': Model.gpt_35_turbo, 'gpt-3.5-turbo-0613': Model.gpt_35_turbo_0613, + 'gpt-3.5-turbo-0301': Model.gpt_35_turbo_0301, 'gpt-4': Model.gpt_4, 'gpt-4-0613': Model.gpt_4_0613, 'gpt-4-for-dev': Model.gpt_4_dev, 'gpt-3.5-turbo-16k': Model.gpt_35_turbo_16k, 'gpt-3.5-turbo-16k-0613': Model.gpt_35_turbo_16k_0613, - + 'claude-instant-v1-100k': Model.claude_instant_v1_100k, 'claude-v1-100k': Model.claude_v1_100k, 'claude-instant-v1': Model.claude_instant_v1, 'claude-v1': Model.claude_v1, - + 'alpaca-7b': Model.alpaca_7b, 'stablelm-tuned-alpha-7b': Model.stablelm_tuned_alpha_7b, - + 'bloom': Model.bloom, 'bloomz': Model.bloomz, - + 'flan-t5-xxl': Model.flan_t5_xxl, 'flan-ul2': Model.flan_ul2, - + 'gpt-neox-20b': Model.gpt_neox_20b, 'oasst-sft-4-pythia-12b-epoch-3.5': Model.oasst_sft_4_pythia_12b_epoch_35, 'santacoder': Model.santacoder, - + 'command-medium-nightly': Model.command_medium_nightly, 'command-xlarge-nightly': Model.command_xlarge_nightly, - + 'code-cushman-001': Model.code_cushman_001, 'code-davinci-002': Model.code_davinci_002, - + 'text-ada-001': Model.text_ada_001, 'text-babbage-001': Model.text_babbage_001, 'text-curie-001': Model.text_curie_001, 'text-davinci-002': Model.text_davinci_002, 'text-davinci-003': Model.text_davinci_003, - + 'palm2': Model.palm, 'palm': Model.palm, 'google': Model.palm, 'google-bard': Model.palm, 'google-palm': Model.palm, 'bard': Model.palm, - + 'falcon-40b': Model.falcon_40b, 'falcon-7b': Model.falcon_7b, 'llama-13b': Model.llama_13b,