From fcbdde2a3493844bc8d0a00f9f30b8e18a0093c7 Mon Sep 17 00:00:00 2001 From: rafa-ruiz Date: Mon, 9 Feb 2026 13:15:40 -0800 Subject: [PATCH] Tech stack --- Dockerfile | 38 ++++++++++++++++++++++++ docker-compose.yaml | 63 ++++++++++++++++++++++++++++++++++++++++ protos/brunix.proto | 18 ++++++++++++ src/server.py | 70 +++++++++++++++++++++++++++++++++++++++++++++ 4 files changed, 189 insertions(+) create mode 100644 Dockerfile create mode 100644 docker-compose.yaml create mode 100644 protos/brunix.proto create mode 100644 src/server.py diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000..440a121 --- /dev/null +++ b/Dockerfile @@ -0,0 +1,38 @@ +FROM python:3.11-slim + +ENV PYTHONDONTWRITEBYTECODE=1 +ENV PYTHONUNBUFFERED=1 + +WORKDIR /app + +RUN apt-get update && apt-get install -y --no-install-recommends \ + build-essential \ + curl \ + libpq-dev \ + protobuf-compiler \ + && rm -rf /var/lib/apt/lists/* + +RUN pip install --no-cache-dir --upgrade pip +RUN pip install --no-cache-dir \ + langchain==0.1.0 \ + langfuse>=2.0.0 \ + langgraph \ + langchain-openai \ + langchain-elasticsearch \ + grpcio \ + grpcio-tools \ + psycopg2-binary \ + pydantic + +COPY ./protos ./protos +COPY . . + +RUN python -m grpc_tools.protoc \ + --proto_path=./protos \ + --python_out=./src \ + --grpc_python_out=./src \ + ./protos/brunix.proto + +EXPOSE 50051 +CMD ["tail", "-f", "/dev/null"] +#CMD ["python", "src/server.py"] diff --git a/docker-compose.yaml b/docker-compose.yaml new file mode 100644 index 0000000..38112d0 --- /dev/null +++ b/docker-compose.yaml @@ -0,0 +1,63 @@ +version: '3.8' + +services: + brunix-engine: + build: . + container_name: brunix-assistance-engine + ports: + - "50052:50051" + environment: + - ELASTICSEARCH_URL=http://elasticsearch:9200 + - LANGFUSE_PUBLIC_KEY=${LANGFUSE_PUBLIC_KEY} + - LANGFUSE_SECRET_KEY=${LANGFUSE_SECRET_KEY} + - LANGFUSE_HOST=http://langfuse:3000 + - OPENAI_API_KEY=${OPENAI_API_KEY} # O el proveedor que elija Ivar + depends_on: + - elasticsearch + - langfuse + networks: + - avap-network + + elasticsearch: + image: docker.elastic.co/elasticsearch/elasticsearch:8.12.0 + container_name: brunix-vector-db + environment: + - discovery.type=single-node + - xpack.security.enabled=false + - "ES_JAVA_OPTS=-Xms512m -Xmx512m" + ports: + - "9200:9200" + networks: + - avap-network + + langfuse: + image: langfuse/langfuse:2.33.0 + container_name: brunix-observability + ports: + - "3000:3000" + environment: + - DATABASE_URL=postgresql://postgres:brunix_pass@langfuse-db:5432/postgres + - NEXTAUTH_URL=http://localhost:3000 + - NEXTAUTH_SECRET=my_ultra_secret + - SALT=my_salt + depends_on: + - langfuse-db + networks: + - avap-network + + langfuse-db: + image: postgres:15 + container_name: brunix-postgres + environment: + - POSTGRES_PASSWORD=brunix_pass + volumes: + - postgres_data:/var/lib/postgresql/data + networks: + - avap-network + +networks: + avap-network: + driver: bridge + +volumes: + postgres_data: diff --git a/protos/brunix.proto b/protos/brunix.proto new file mode 100644 index 0000000..420662b --- /dev/null +++ b/protos/brunix.proto @@ -0,0 +1,18 @@ +syntax = "proto3"; + +package brunix; + +service AssistanceEngine { + rpc AskAgent (AgentRequest) returns (stream AgentResponse); +} + +message AgentRequest { + string query = 1; + string session_id = 2; +} + +message AgentResponse { + string text = 1; + string avap_code = 2; + bool is_final = 3; +} diff --git a/src/server.py b/src/server.py new file mode 100644 index 0000000..c3437c4 --- /dev/null +++ b/src/server.py @@ -0,0 +1,70 @@ +import os +import grpc +from concurrent import futures +import logging + +import brunix_pb2 +import brunix_pb2_grpc +from langfuse.callback import CallbackHandler +from langchain_openai import ChatOpenAI +from langchain_core.messages import HumanMessage + +logging.basicConfig(level=logging.INFO) +logger = logging.getLogger("BrunixServer") + +class BrunixService(brunix_pb2_grpc.AssistanceEngineServicer): + def __init__(self): + #self.langfuse_handler = CallbackHandler( # Descomentar cuando este configurado + # public_key=os.getenv("LANGFUSE_PUBLIC_KEY"), + # secret_key=os.getenv("LANGFUSE_SECRET_KEY"), + # host=os.getenv("LANGFUSE_HOST") + #) + # AQUI IMPLEMENTAR EL MODELO QUE CORRESPONDA - IVAR + #self.llm = ChatOpenAI( + # model="gpt-4-turbo-preview", + # temperature=0.2, + # streaming=True + #) + logger.info("Brunix Engine initializing.") + + def AskAgent(self, request, context): # PLACEHOLDER DE FUNCIONAMIENTO + logger.info(f"Request received: {request.query}") + + + try: + + config = {"callbacks": [self.langfuse_handler], "run_name": "Brunix_Query"} + + full_response_text = "" + + chunks = [ + {"text": ""}, + ] + + for chunk in chunks: + yield brunix_pb2.AgentResponse( + text=chunk["text"], + avap_code="", + node_id=chunk["node"], + is_final=False + ) + + yield brunix_pb2.AgentResponse(text="", is_final=True) + + except Exception as e: + logger.error(f"AGENT ERROR: {str(e)}") + context.set_details(str(e)) + context.set_code(grpc.StatusCode.INTERNAL) + +def serve(): + server = grpc.server(futures.ThreadPoolExecutor(max_workers=10)) + brunix_pb2_grpc.add_AssistanceEngineServicer_to_server(BrunixService(), server) + + server.add_insecure_port('[::]:50051') + logger.info("Brunix gRPC Server listen on port 50051") + + server.start() + server.wait_for_termination() + +if __name__ == '__main__': + serve()