Skip to content

Commit

Permalink
Merge branch 'dev' into ntindle/secrt-997-publish-a-new-security-policy
Browse files Browse the repository at this point in the history
  • Loading branch information
Torantulino authored Nov 13, 2024
2 parents 6aa5550 + 5c0f979 commit bb3d4b7
Show file tree
Hide file tree
Showing 44 changed files with 1,702 additions and 966 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/platform-autgpt-deploy-prod.yml
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Trigger deploy workflow
uses: peter-evans/repository-dispatch@v2
uses: peter-evans/repository-dispatch@v3
with:
token: ${{ secrets.DEPLOY_TOKEN }}
repository: Significant-Gravitas/AutoGPT_cloud_infrastructure
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/platform-autogpt-deploy-dev.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Trigger deploy workflow
uses: peter-evans/repository-dispatch@v2
uses: peter-evans/repository-dispatch@v3
with:
token: ${{ secrets.DEPLOY_TOKEN }}
repository: Significant-Gravitas/AutoGPT_cloud_infrastructure
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -46,21 +46,21 @@
)
openai_credentials = APIKeyCredentials(
id="53c25cb8-e3ee-465c-a4d1-e75a4c899c2a",
provider="llm",
provider="openai",
api_key=SecretStr(settings.secrets.openai_api_key),
title="Use Credits for OpenAI",
expires_at=None,
)
anthropic_credentials = APIKeyCredentials(
id="24e5d942-d9e3-4798-8151-90143ee55629",
provider="llm",
provider="anthropic",
api_key=SecretStr(settings.secrets.anthropic_api_key),
title="Use Credits for Anthropic",
expires_at=None,
)
groq_credentials = APIKeyCredentials(
id="4ec22295-8f97-4dd1-b42b-2c6957a02545",
provider="llm",
provider="groq",
api_key=SecretStr(settings.secrets.groq_api_key),
title="Use Credits for Groq",
expires_at=None,
Expand All @@ -79,6 +79,13 @@
title="Use Credits for Jina",
expires_at=None,
)
unreal_credentials = APIKeyCredentials(#
id="66f20754-1b81-48e4-91d0-f4f0dd82145f",
provider="unreal",
api_key=SecretStr(settings.secrets.unreal_speech_api_key),
title="Use Credits for Unreal",
expires_at=None,
)


DEFAULT_CREDENTIALS = [
Expand All @@ -90,6 +97,7 @@
groq_credentials,
did_credentials,
jina_credentials,
unreal_credentials,
]


Expand Down Expand Up @@ -135,6 +143,8 @@ def get_all_creds(self, user_id: str) -> list[Credentials]:
all_credentials.append(did_credentials)
if settings.secrets.jina_api_key:
all_credentials.append(jina_credentials)
if settings.secrets.unreal_speech_api_key:
all_credentials.append(unreal_credentials)
return all_credentials

def get_creds_by_id(self, user_id: str, credentials_id: str) -> Credentials | None:
Expand Down
79 changes: 39 additions & 40 deletions autogpt_platform/autogpt_libs/poetry.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

4 changes: 2 additions & 2 deletions autogpt_platform/autogpt_libs/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -15,11 +15,11 @@ pydantic-settings = "^2.6.1"
pyjwt = "^2.8.0"
python = ">=3.10,<4.0"
python-dotenv = "^1.0.1"
supabase = "^2.9.1"
supabase = "^2.10.0"

[tool.poetry.group.dev.dependencies]
redis = "^5.2.0"
ruff = "^0.7.2"
ruff = "^0.7.3"

[build-system]
requires = ["poetry-core"]
Expand Down
4 changes: 2 additions & 2 deletions autogpt_platform/backend/Dockerfile
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
FROM python:3.11-slim-buster AS builder
FROM python:3.11.10-slim-bookworm AS builder

# Set environment variables
ENV PYTHONDONTWRITEBYTECODE 1
Expand Down Expand Up @@ -35,7 +35,7 @@ COPY autogpt_platform/backend/schema.prisma ./
RUN poetry config virtualenvs.create false \
&& poetry run prisma generate

FROM python:3.11-slim-buster AS server_dependencies
FROM python:3.11.10-slim-bookworm AS server_dependencies

WORKDIR /app

Expand Down
100 changes: 100 additions & 0 deletions autogpt_platform/backend/backend/blocks/agent.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,100 @@
import logging

from autogpt_libs.utils.cache import thread_cached

from backend.data.block import (
Block,
BlockCategory,
BlockInput,
BlockOutput,
BlockSchema,
BlockType,
get_block,
)
from backend.data.execution import ExecutionStatus
from backend.data.model import SchemaField

logger = logging.getLogger(__name__)


@thread_cached
def get_executor_manager_client():
from backend.executor import ExecutionManager
from backend.util.service import get_service_client

return get_service_client(ExecutionManager)


@thread_cached
def get_event_bus():
from backend.data.queue import RedisExecutionEventBus

return RedisExecutionEventBus()


class AgentExecutorBlock(Block):
class Input(BlockSchema):
user_id: str = SchemaField(description="User ID")
graph_id: str = SchemaField(description="Graph ID")
graph_version: int = SchemaField(description="Graph Version")

data: BlockInput = SchemaField(description="Input data for the graph")
input_schema: dict = SchemaField(description="Input schema for the graph")
output_schema: dict = SchemaField(description="Output schema for the graph")

class Output(BlockSchema):
pass

def __init__(self):
super().__init__(
id="e189baac-8c20-45a1-94a7-55177ea42565",
description="Executes an existing agent inside your agent",
input_schema=AgentExecutorBlock.Input,
output_schema=AgentExecutorBlock.Output,
block_type=BlockType.AGENT,
categories={BlockCategory.AGENT},
)

def run(self, input_data: Input, **kwargs) -> BlockOutput:
executor_manager = get_executor_manager_client()
event_bus = get_event_bus()

graph_exec = executor_manager.add_execution(
graph_id=input_data.graph_id,
graph_version=input_data.graph_version,
user_id=input_data.user_id,
data=input_data.data,
)
log_id = f"Graph #{input_data.graph_id}-V{input_data.graph_version}, exec-id: {graph_exec.graph_exec_id}"
logger.info(f"Starting execution of {log_id}")

for event in event_bus.listen(
graph_id=graph_exec.graph_id, graph_exec_id=graph_exec.graph_exec_id
):
logger.info(
f"Execution {log_id} produced input {event.input_data} output {event.output_data}"
)

if not event.node_id:
if event.status in [ExecutionStatus.COMPLETED, ExecutionStatus.FAILED]:
logger.info(f"Execution {log_id} ended with status {event.status}")
break
else:
continue

if not event.block_id:
logger.warning(f"{log_id} received event without block_id {event}")
continue

block = get_block(event.block_id)
if not block or block.block_type != BlockType.OUTPUT:
continue

output_name = event.input_data.get("name")
if not output_name:
logger.warning(f"{log_id} produced an output with no name {event}")
continue

for output_data in event.output_data.get("output", []):
logger.info(f"Execution {log_id} produced {output_name}: {output_data}")
yield output_name, output_data
Loading

0 comments on commit bb3d4b7

Please sign in to comment.