Skip to content

Commit

Permalink
merge
Browse files Browse the repository at this point in the history
  • Loading branch information
sarahwooders committed Dec 11, 2024
2 parents 95ad1e0 + 470f95a commit ed2da23
Show file tree
Hide file tree
Showing 100 changed files with 3,293 additions and 8,699 deletions.
10 changes: 9 additions & 1 deletion .dockerignore
Original file line number Diff line number Diff line change
@@ -1 +1,9 @@
chatui
**/__pycache__
**/.pytest_cache
**/*.pyc
**/*.pyo
**/*.pyd
.git
.gitignore
.env
*.log
7 changes: 6 additions & 1 deletion .github/workflows/check_for_new_prints.yml
Original file line number Diff line number Diff line change
Expand Up @@ -31,14 +31,19 @@ jobs:
# Check each changed Python file
while IFS= read -r file; do
if [ "$file" == "letta/main.py" ]; then
echo "Skipping $file for print statement checks."
continue
fi
if [ -f "$file" ]; then
echo "Checking $file for new print statements..."
# Get diff and look for added lines containing print statements
NEW_PRINTS=$(git diff ${{ github.event.pull_request.base.sha }} ${{ github.sha }} "$file" | \
grep "^+" | \
grep -v "^+++" | \
grep "print(" || true)
grep -E "(^|\s)print\(" || true)
if [ ! -z "$NEW_PRINTS" ]; then
echo "❌ Found new print statements in $file:"
Expand Down
3 changes: 2 additions & 1 deletion .github/workflows/tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@ jobs:
- "test_utils.py"
- "test_tool_schema_parsing.py"
- "test_v1_routes.py"
- "test_offline_memory_agent.py"
services:
qdrant:
image: qdrant/qdrant
Expand Down Expand Up @@ -133,4 +134,4 @@ jobs:
LETTA_SERVER_PASS: test_server_token
PYTHONPATH: ${{ github.workspace }}:${{ env.PYTHONPATH }}
run: |
poetry run pytest -s -vv -k "not test_v1_routes.py and not test_model_letta_perfomance.py and not test_utils.py and not test_client.py and not integration_test_tool_execution_sandbox.py and not integration_test_summarizer.py and not test_agent_tool_graph.py and not test_tool_rule_solver.py and not test_local_client.py and not test_o1_agent.py and not test_cli.py and not test_concurrent_connections.py and not test_quickstart and not test_model_letta_performance and not test_storage and not test_server and not test_openai_client and not test_providers and not test_client_legacy.py" tests
poetry run pytest -s -vv -k "not test_offline_memory_agent.py and not test_v1_routes.py and not test_model_letta_perfomance.py and not test_utils.py and not test_client.py and not integration_test_tool_execution_sandbox.py and not integration_test_summarizer.py and not test_agent_tool_graph.py and not test_tool_rule_solver.py and not test_local_client.py and not test_o1_agent.py and not test_cli.py and not test_concurrent_connections.py and not test_quickstart and not test_model_letta_performance and not test_storage and not test_server and not test_openai_client and not test_providers and not test_client_legacy.py" tests
63 changes: 63 additions & 0 deletions .github/workflows/warn_poetry_updates.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,63 @@
name: Check Poetry Dependencies Changes

on:
pull_request:
paths:
- 'poetry.lock'
- 'pyproject.toml'

jobs:
check-poetry-changes:
runs-on: ubuntu-latest
permissions:
pull-requests: write

steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0

- name: Check for poetry.lock changes
id: check-poetry-lock
run: |
if git diff --name-only ${{ github.event.pull_request.base.sha }} ${{ github.event.pull_request.head.sha }} | grep -q "poetry.lock"; then
echo "poetry_lock_changed=true" >> $GITHUB_OUTPUT
else
echo "poetry_lock_changed=false" >> $GITHUB_OUTPUT
fi
- name: Check for pyproject.toml changes
id: check-pyproject
run: |
if git diff --name-only ${{ github.event.pull_request.base.sha }} ${{ github.event.pull_request.head.sha }} | grep -q "pyproject.toml"; then
echo "pyproject_changed=true" >> $GITHUB_OUTPUT
else
echo "pyproject_changed=false" >> $GITHUB_OUTPUT
fi
- name: Create PR comment
if: steps.check-poetry-lock.outputs.poetry_lock_changed == 'true' || steps.check-pyproject.outputs.pyproject_changed == 'true'
uses: actions/github-script@v7
with:
script: |
const poetryLockChanged = ${{ steps.check-poetry-lock.outputs.poetry_lock_changed }};
const pyprojectChanged = ${{ steps.check-pyproject.outputs.pyproject_changed }};
let message = '📦 Dependencies Alert:\n\n';
if (poetryLockChanged && pyprojectChanged) {
message += '- Both `poetry.lock` and `pyproject.toml` have been modified\n';
} else if (poetryLockChanged) {
message += '- `poetry.lock` has been modified\n';
} else if (pyprojectChanged) {
message += '- `pyproject.toml` has been modified\n';
}
message += '\nPlease review these changes carefully to ensure they are intended (cc @sarahwooders @cpacker).';
github.rest.issues.createComment({
issue_number: context.issue.number,
owner: context.repo.owner,
repo: context.repo.repo,
body: message
});
3 changes: 3 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -1022,3 +1022,6 @@ memgpy/pytest.ini

## ignore venvs
tests/test_tool_sandbox/restaurant_management_system/venv

## custom scripts
test
Empty file removed .persist/.gitkeep
Empty file.
93 changes: 53 additions & 40 deletions Dockerfile
Original file line number Diff line number Diff line change
@@ -1,56 +1,69 @@
# The builder image, used to build the virtual environment
FROM python:3.12.2-bookworm as builder
ARG LETTA_ENVIRONMENT=PRODUCTION
ENV LETTA_ENVIRONMENT=${LETTA_ENVIRONMENT}
RUN pip install poetry==1.8.2
# Start with pgvector base for builder
FROM ankane/pgvector:v0.5.1 AS builder

# Install Python and required packages
RUN apt-get update && apt-get install -y \
python3 \
python3-venv \
python3-pip \
python3-full \
build-essential \
libpq-dev \
python3-dev \
&& rm -rf /var/lib/apt/lists/*

ENV POETRY_NO_INTERACTION=1 \
ARG LETTA_ENVIRONMENT=PRODUCTION
ENV LETTA_ENVIRONMENT=${LETTA_ENVIRONMENT} \
POETRY_NO_INTERACTION=1 \
POETRY_VIRTUALENVS_IN_PROJECT=1 \
POETRY_VIRTUALENVS_CREATE=1 \
POETRY_CACHE_DIR=/tmp/poetry_cache

WORKDIR /app

# Create and activate virtual environment
RUN python3 -m venv /opt/venv
ENV PATH="/opt/venv/bin:$PATH"

# Now install poetry in the virtual environment
RUN pip install --no-cache-dir poetry==1.8.2

# Copy dependency files first
COPY pyproject.toml poetry.lock ./
RUN poetry lock --no-update
RUN if [ "$LETTA_ENVIRONMENT" = "DEVELOPMENT" ] ; then \
poetry install --no-root -E "postgres server dev" ; \
else \
poetry install --no-root --all-extras && \
rm -rf $POETRY_CACHE_DIR ; \
fi
# Then copy the rest of the application code
COPY . .

RUN poetry lock --no-update && \
poetry install --all-extras && \
rm -rf $POETRY_CACHE_DIR

# Runtime stage
FROM ankane/pgvector:v0.5.1 AS runtime

# Install Python packages
RUN apt-get update && apt-get install -y \
python3 \
python3-venv \
&& rm -rf /var/lib/apt/lists/* \
&& mkdir -p /app

# The runtime image, used to just run the code provided its virtual environment
FROM python:3.12.2-slim-bookworm as runtime
ARG LETTA_ENVIRONMENT=PRODUCTION
ENV LETTA_ENVIRONMENT=${LETTA_ENVIRONMENT}
ENV VIRTUAL_ENV=/app/.venv \
PATH="/app/.venv/bin:$PATH"
ENV LETTA_ENVIRONMENT=${LETTA_ENVIRONMENT} \
VIRTUAL_ENV="/app/.venv" \
PATH="/app/.venv/bin:$PATH" \
POSTGRES_USER=letta \
POSTGRES_PASSWORD=letta \
POSTGRES_DB=letta

COPY --from=builder ${VIRTUAL_ENV} ${VIRTUAL_ENV}
WORKDIR /app

COPY ./letta /letta
COPY ./alembic.ini /alembic.ini
COPY ./alembic /alembic
# Copy virtual environment and app from builder
COPY --from=builder /app .

EXPOSE 8283
# Copy initialization SQL if it exists
COPY init.sql /docker-entrypoint-initdb.d/

CMD ./letta/server/startup.sh
EXPOSE 8283 5432

# allow for in-container development and testing
FROM builder as development
ARG LETTA_ENVIRONMENT=PRODUCTION
ENV LETTA_ENVIRONMENT=${LETTA_ENVIRONMENT}
ENV VIRTUAL_ENV=/app/.venv \
PATH="/app/.venv/bin:$PATH"
ENV PYTHONPATH=/
WORKDIR /
COPY ./tests /tests
COPY ./letta /letta
COPY ./alembic.ini /alembic.ini
COPY ./alembic /alembic
#COPY ./configs/server_config.yaml /root/.letta/config
EXPOSE 8083

CMD ./letta/server/startup.sh
ENTRYPOINT ["/usr/local/bin/docker-entrypoint.sh"]
CMD ["./letta/server/startup.sh"]
63 changes: 63 additions & 0 deletions alembic/versions/95badb46fdf9_migrate_messages_to_the_orm.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,63 @@
"""Migrate message to orm
Revision ID: 95badb46fdf9
Revises: 3c683a662c82
Create Date: 2024-12-05 14:02:04.163150
"""

from typing import Sequence, Union

import sqlalchemy as sa
from sqlalchemy.dialects import postgresql

from alembic import op

# revision identifiers, used by Alembic.
revision: str = "95badb46fdf9"
down_revision: Union[str, None] = "08b2f8225812"
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None


def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.add_column("messages", sa.Column("updated_at", sa.DateTime(timezone=True), server_default=sa.text("now()"), nullable=True))
op.add_column("messages", sa.Column("is_deleted", sa.Boolean(), server_default=sa.text("FALSE"), nullable=False))
op.add_column("messages", sa.Column("_created_by_id", sa.String(), nullable=True))
op.add_column("messages", sa.Column("_last_updated_by_id", sa.String(), nullable=True))
op.add_column("messages", sa.Column("organization_id", sa.String(), nullable=True))
# Populate `organization_id` based on `user_id`
# Use a raw SQL query to update the organization_id
op.execute(
"""
UPDATE messages
SET organization_id = users.organization_id
FROM users
WHERE messages.user_id = users.id
"""
)
op.alter_column("messages", "organization_id", nullable=False)
op.alter_column("messages", "tool_calls", existing_type=postgresql.JSON(astext_type=sa.Text()), nullable=False)
op.alter_column("messages", "created_at", existing_type=postgresql.TIMESTAMP(timezone=True), nullable=False)
op.drop_index("message_idx_user", table_name="messages")
op.create_foreign_key(None, "messages", "agents", ["agent_id"], ["id"])
op.create_foreign_key(None, "messages", "organizations", ["organization_id"], ["id"])
op.drop_column("messages", "user_id")
# ### end Alembic commands ###


def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.add_column("messages", sa.Column("user_id", sa.VARCHAR(), autoincrement=False, nullable=False))
op.drop_constraint(None, "messages", type_="foreignkey")
op.drop_constraint(None, "messages", type_="foreignkey")
op.create_index("message_idx_user", "messages", ["user_id", "agent_id"], unique=False)
op.alter_column("messages", "created_at", existing_type=postgresql.TIMESTAMP(timezone=True), nullable=True)
op.alter_column("messages", "tool_calls", existing_type=postgresql.JSON(astext_type=sa.Text()), nullable=True)
op.drop_column("messages", "organization_id")
op.drop_column("messages", "_last_updated_by_id")
op.drop_column("messages", "_created_by_id")
op.drop_column("messages", "is_deleted")
op.drop_column("messages", "updated_at")
# ### end Alembic commands ###
Original file line number Diff line number Diff line change
Expand Up @@ -97,7 +97,7 @@ def upgrade() -> None:
sa.Column("text", sa.String(), nullable=True),
sa.Column("model", sa.String(), nullable=True),
sa.Column("name", sa.String(), nullable=True),
sa.Column("tool_calls", letta.metadata.ToolCallColumn(), nullable=True),
sa.Column("tool_calls", letta.orm.message.ToolCallColumn(), nullable=True),
sa.Column("tool_call_id", sa.String(), nullable=True),
sa.Column("created_at", sa.DateTime(timezone=True), nullable=True),
sa.PrimaryKeyConstraint("id"),
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
"""add column to tools table to contain function return limit return_char_limit
Revision ID: a91994b9752f
Revises: e1a625072dbf
Create Date: 2024-12-09 18:27:25.650079
"""

from typing import Sequence, Union

import sqlalchemy as sa

from alembic import op
from letta.constants import FUNCTION_RETURN_CHAR_LIMIT

# revision identifiers, used by Alembic.
revision: str = "a91994b9752f"
down_revision: Union[str, None] = "e1a625072dbf"
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None


def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.add_column("tools", sa.Column("return_char_limit", sa.Integer(), nullable=True))

# Populate `return_char_limit` column
op.execute(
f"""
UPDATE tools
SET return_char_limit = {FUNCTION_RETURN_CHAR_LIMIT}
"""
)


def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column("tools", "return_char_limit")
# ### end Alembic commands ###
Loading

0 comments on commit ed2da23

Please sign in to comment.