Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Feature/save charts with User Defined Path #316

Merged
merged 10 commits into from
Jun 27, 2023
3 changes: 2 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -37,4 +37,5 @@ exports/
*.log

# vscode
.vscode
.vscode

10 changes: 10 additions & 0 deletions CONTRIBUTING.md
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,14 @@ ruff pandasai examples

Make sure that the linter does not report any errors or warnings before submitting a pull request.

### Code Format with `black`

We use `black` to reformat the code by running the following command:

```bash
black pandasai
```

### 🧪 Testing

We use `pytest` to test our code. You can run the tests by running the following command:
Expand All @@ -54,6 +62,8 @@ poetry run pytest

Make sure that all tests pass before submitting a pull request.



## 🚀 Release Process

At the moment, the release process is manual. We try to make frequent releases. Usually, we release a new version when we have a new feature or bugfix. A developer with admin rights to the repository will create a new release on GitHub, and then publish the new version to PyPI.
28 changes: 28 additions & 0 deletions docs/getting-started.md
Original file line number Diff line number Diff line change
Expand Up @@ -131,6 +131,34 @@ print(response)
# Output: check out images/histogram-chart.png
```

#### Saving Plots with User Defined Path

Below is the example to Save Charts with user defined location.

```python
import pandas as pd
import os
from data.sample_dataframe import dataframe

from pandasai import PandasAI
from pandasai.llm.openai import OpenAI

df = pd.DataFrame(dataframe)

llm = OpenAI()

user_defined_path = os.getcwd()
pandas_ai = PandasAI(llm, save_charts=True,
save_charts_path=user_defined_path,
verbose=True)
response = pandas_ai(
df,
"Plot the histogram of countries showing for each the gpd,"
" using different colors for each bar",
)
# Output: check out $pwd/exports/charts/{hashid}/chart.png
```

### Working with multiple dataframes

Example of using PandasAI with multiple Pandas DataFrames
Expand Down
6 changes: 6 additions & 0 deletions examples/pai_version.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
"""Prints the version of pandasai."""


import pandasai as pai

print(pai.__version__)
23 changes: 23 additions & 0 deletions examples/save_chart.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
"""Example of using PandasAI to generate and save a chart from a Pandas DataFrame"""

import pandas as pd
import os
from data.sample_dataframe import dataframe

from pandasai import PandasAI
from pandasai.llm.openai import OpenAI

df = pd.DataFrame(dataframe)

llm = OpenAI()

user_defined_path = os.getcwd()
pandas_ai = PandasAI(llm, save_charts=True,
save_charts_path=user_defined_path,
verbose=True)
response = pandas_ai(
df,
"Plot the histogram of countries showing for each the gpd,"
" using different colors for each bar",
)
# Output: check out $pwd/exports/charts/{hashid}/chart.png
10 changes: 8 additions & 2 deletions pandasai/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@

```
"""

import ast
import io
import logging
Expand All @@ -42,7 +43,8 @@
import time
from contextlib import redirect_stdout
from typing import List, Optional, Union, Dict, Type

import importlib.metadata
__version__ = importlib.metadata.version(__package__ or __name__)
import astor
import pandas as pd
from .constants import (
Expand Down Expand Up @@ -160,6 +162,7 @@ def __init__(
verbose=False,
enforce_privacy=False,
save_charts=False,
save_charts_path=None,
enable_cache=True,
middlewares=None,
custom_whitelisted_dependencies=None,
Expand Down Expand Up @@ -217,6 +220,7 @@ def __init__(
self._verbose = verbose
self._enforce_privacy = enforce_privacy
self._save_charts = save_charts
self._save_charts_path = save_charts_path
self._process_id = str(uuid.uuid4())

self._non_default_prompts = (
Expand Down Expand Up @@ -627,7 +631,9 @@ def run_code(

# Add save chart code
if self._save_charts:
code = add_save_chart(code, self._prompt_id, not self._verbose)
code = add_save_chart(
code, self._prompt_id, self._save_charts_path, not self._verbose
)

# Get the code to run removing unsafe imports and df overwrites
code_to_run = self._clean_code(code)
Expand Down
18 changes: 14 additions & 4 deletions pandasai/helpers/save_chart.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,13 +49,19 @@ def compare_ast(
return node1 == node2


def add_save_chart(code: str, folder_name: str, print_save_dir: bool = True) -> str:
def add_save_chart(
code: str,
folder_name: str,
save_charts_path: str = None,
print_save_dir: bool = True,
) -> str:
"""
Add line to code that save charts to a file, if plt.show() is called.

Args:
code (str): Code to add line to.
folder_name (str): Name of folder to save charts to.
save_charts_path (str): User Defined Path to save Charts
print_save_dir (bool): Print the save directory to the console.
Defaults to True.

Expand All @@ -64,9 +70,13 @@ def add_save_chart(code: str, folder_name: str, print_save_dir: bool = True) ->

"""

# define chart save directory
project_root = dirname(dirname(dirname(__file__)))
chart_save_dir = os.path.join(project_root, "exports", "charts", folder_name)
if save_charts_path is not None:
charts_root_dir = save_charts_path
amjadraza marked this conversation as resolved.
Show resolved Hide resolved
else:
# define chart save directory
charts_root_dir = dirname(dirname(dirname(__file__)))

chart_save_dir = os.path.join(charts_root_dir, "exports", "charts", folder_name)

tree = ast.parse(code)

Expand Down
14 changes: 7 additions & 7 deletions pandasai/llm/azure_openai.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,13 +34,13 @@ class AzureOpenAI(BaseOpenAI):
engine: str

def __init__(
self,
api_token: Optional[str] = None,
api_base: Optional[str] = None,
api_version: Optional[str] = None,
deployment_name: str = None,
is_chat_model: Optional[bool] = False,
**kwargs,
self,
api_token: Optional[str] = None,
api_base: Optional[str] = None,
api_version: Optional[str] = None,
deployment_name: str = None,
is_chat_model: Optional[bool] = False,
**kwargs,
):
"""
__init__ method of AzureOpenAI Class
Expand Down
2 changes: 0 additions & 2 deletions pandasai/llm/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -337,7 +337,6 @@ def _configure(self, api_key: str):
self.genai = genai

def _configurevertexai(self, project_id: str, location: str):

"""
Configure Google VertexAi
Args:
Expand All @@ -353,7 +352,6 @@ def _configurevertexai(self, project_id: str, location: str):
vertexai.init(project=project_id, location=location)
self.vertexai = vertexai


def _valid_params(self):
return ["temperature", "top_p", "top_k", "max_output_tokens"]

Expand Down
24 changes: 13 additions & 11 deletions pandasai/llm/google_palm.py
Original file line number Diff line number Diff line change
Expand Up @@ -88,9 +88,9 @@ class GoogleVertexai(BaseGoogle):

"""

def __init__(self, project_id: str, location: str,
model: Optional[str] = None, **kwargs):

def __init__(
self, project_id: str, location: str, model: Optional[str] = None, **kwargs
):
"""
A init class to implement the Google Vertexai Models

Expand Down Expand Up @@ -138,20 +138,22 @@ def _generate_text(self, prompt: str) -> str:
"""
self._validate()

vertexai = self.vertexai # --fix
vertexai = self.vertexai # --fix
print(vertexai.__version__)
from vertexai.preview.language_models import (CodeGenerationModel,
TextGenerationModel)
from vertexai.preview.language_models import (
CodeGenerationModel,
TextGenerationModel,
)

if self.model == "code-bison@001":

code_generation = CodeGenerationModel.from_pretrained(self.model)

completion = code_generation.predict(prefix=prompt,
temperature=self.temperature,
max_output_tokens=self.max_output_tokens)
completion = code_generation.predict(
prefix=prompt,
temperature=self.temperature,
max_output_tokens=self.max_output_tokens,
)
else:

text_generation = TextGenerationModel.from_pretrained(self.model)

completion = text_generation.predict(
Expand Down