refactor: optimize Docker build context, remove redundant /workspace and enforce production-ready standards

This commit is contained in:
rafa-ruiz 2026-02-16 20:45:01 -08:00
parent 5eb7d7c939
commit 0415a3c271
5 changed files with 385 additions and 0 deletions

31
Docker/Dockerfile Normal file
View File

@ -0,0 +1,31 @@
FROM python:3.11-slim
ENV PYTHONDONTWRITEBYTECODE=1
ENV PYTHONUNBUFFERED=1
WORKDIR /app
COPY ./requirements.txt .
RUN apt-get update && apt-get install -y --no-install-recommends \
build-essential \
curl \
protobuf-compiler \
&& rm -rf /var/lib/apt/lists/*
RUN pip install --no-cache-dir --upgrade pip
RUN pip install --no-cache-dir -r requirements.txt
COPY ./protos ./protos
COPY ./src ./src
RUN python -m grpc_tools.protoc \
--proto_path=./protos \
--python_out=./src \
--grpc_python_out=./src \
./protos/brunix.proto
EXPOSE 50051
CMD ["python", "src/server.py"]

View File

@ -0,0 +1,23 @@
version: '3.8'
services:
brunix-engine:
build: .
container_name: brunix-assistance-engine
volumes:
- .:/workspace
env_file: .env
ports:
- "50052:50051"
environment:
- ELASTICSEARCH_URL=http://host.docker.internal:9200
- DATABASE_URL=postgresql://postgres:brunix_pass@host.docker.internal:5432/postgres
- LANGFUSE_HOST=http://45.77.119.180
- LANGFUSE_PUBLIC_KEY=${LANGFUSE_PUBLIC_KEY}
- LANGFUSE_SECRET_KEY=${LANGFUSE_SECRET_KEY}
- LLM_BASE_URL=http://host.docker.internal:11434
extra_hosts:
- "host.docker.internal:host-gateway"

View File

@ -0,0 +1,18 @@
syntax = "proto3";
package brunix;
service AssistanceEngine {
rpc AskAgent (AgentRequest) returns (stream AgentResponse);
}
message AgentRequest {
string query = 1;
string session_id = 2;
}
message AgentResponse {
string text = 1;
string avap_code = 2;
bool is_final = 3;
}

226
Docker/requirements.txt Normal file
View File

@ -0,0 +1,226 @@
# This file was autogenerated by uv via the following command:
# uv export --format requirements-txt --no-hashes --no-dev -o requirements.txt
aiohappyeyeballs==2.6.1
# via aiohttp
aiohttp==3.13.3
# via langchain-community
aiosignal==1.4.0
# via aiohttp
annotated-types==0.7.0
# via pydantic
anyio==4.12.1
# via httpx
attrs==25.4.0
# via aiohttp
certifi==2026.1.4
# via
# elastic-transport
# httpcore
# httpx
# requests
charset-normalizer==3.4.4
# via requests
colorama==0.4.6 ; sys_platform == 'win32'
# via
# loguru
# tqdm
dataclasses-json==0.6.7
# via langchain-community
elastic-transport==8.17.1
# via elasticsearch
elasticsearch==8.19.3
# via langchain-elasticsearch
frozenlist==1.8.0
# via
# aiohttp
# aiosignal
greenlet==3.3.1 ; platform_machine == 'AMD64' or platform_machine == 'WIN32' or platform_machine == 'aarch64' or platform_machine == 'amd64' or platform_machine == 'ppc64le' or platform_machine == 'win32' or platform_machine == 'x86_64'
# via sqlalchemy
grpcio==1.78.0
# via
# assistance-engine
# grpcio-reflection
# grpcio-tools
grpcio-reflection==1.78.0
# via assistance-engine
grpcio-tools==1.78.0
# via assistance-engine
h11==0.16.0
# via httpcore
httpcore==1.0.9
# via httpx
httpx==0.28.1
# via
# langgraph-sdk
# langsmith
httpx-sse==0.4.3
# via langchain-community
idna==3.11
# via
# anyio
# httpx
# requests
# yarl
jsonpatch==1.33
# via langchain-core
jsonpointer==3.0.0
# via jsonpatch
langchain==1.2.10
# via assistance-engine
langchain-classic==1.0.1
# via langchain-community
langchain-community==0.4.1
# via assistance-engine
langchain-core==1.2.11
# via
# langchain
# langchain-classic
# langchain-community
# langchain-elasticsearch
# langchain-text-splitters
# langgraph
# langgraph-checkpoint
# langgraph-prebuilt
langchain-elasticsearch==1.0.0
# via assistance-engine
langchain-text-splitters==1.1.0
# via langchain-classic
langgraph==1.0.8
# via langchain
langgraph-checkpoint==4.0.0
# via
# langgraph
# langgraph-prebuilt
langgraph-prebuilt==1.0.7
# via langgraph
langgraph-sdk==0.3.5
# via langgraph
langsmith==0.7.1
# via
# langchain-classic
# langchain-community
# langchain-core
loguru==0.7.3
# via assistance-engine
marshmallow==3.26.2
# via dataclasses-json
multidict==6.7.1
# via
# aiohttp
# yarl
mypy-extensions==1.1.0
# via typing-inspect
numpy==2.4.2
# via
# assistance-engine
# elasticsearch
# langchain-community
# pandas
orjson==3.11.7
# via
# langgraph-sdk
# langsmith
ormsgpack==1.12.2
# via langgraph-checkpoint
packaging==26.0
# via
# langchain-core
# langsmith
# marshmallow
pandas==3.0.0
# via assistance-engine
propcache==0.4.1
# via
# aiohttp
# yarl
protobuf==6.33.5
# via
# grpcio-reflection
# grpcio-tools
pydantic==2.12.5
# via
# langchain
# langchain-classic
# langchain-core
# langgraph
# langsmith
# pydantic-settings
pydantic-core==2.41.5
# via pydantic
pydantic-settings==2.12.0
# via langchain-community
python-dateutil==2.9.0.post0
# via
# elasticsearch
# pandas
python-dotenv==1.2.1
# via
# assistance-engine
# pydantic-settings
pyyaml==6.0.3
# via
# langchain-classic
# langchain-community
# langchain-core
requests==2.32.5
# via
# langchain-classic
# langchain-community
# langsmith
# requests-toolbelt
requests-toolbelt==1.0.0
# via langsmith
setuptools==82.0.0
# via grpcio-tools
simsimd==6.5.12
# via elasticsearch
six==1.17.0
# via python-dateutil
sqlalchemy==2.0.46
# via
# langchain-classic
# langchain-community
tenacity==9.1.4
# via
# langchain-community
# langchain-core
tqdm==4.67.3
# via assistance-engine
typing-extensions==4.15.0
# via
# aiosignal
# anyio
# elasticsearch
# grpcio
# langchain-core
# pydantic
# pydantic-core
# sqlalchemy
# typing-inspect
# typing-inspection
typing-inspect==0.9.0
# via dataclasses-json
typing-inspection==0.4.2
# via
# pydantic
# pydantic-settings
tzdata==2025.3 ; sys_platform == 'emscripten' or sys_platform == 'win32'
# via pandas
urllib3==2.6.3
# via
# elastic-transport
# requests
uuid-utils==0.14.0
# via
# langchain-core
# langsmith
win32-setctime==1.2.0 ; sys_platform == 'win32'
# via loguru
xxhash==3.6.0
# via
# langgraph
# langsmith
yarl==1.22.0
# via aiohttp
zstandard==0.25.0
# via langsmith

87
Docker/src/server.py Normal file
View File

@ -0,0 +1,87 @@
import os
import grpc
import logging
from concurrent import futures
from grpc_reflection.v1alpha import reflection
import brunix_pb2
import brunix_pb2_grpc
from langchain_community.llms import Ollama
from langchain_community.embeddings import OllamaEmbeddings
from langchain_elasticsearch import ElasticsearchStore
from langchain_core.prompts import ChatPromptTemplate
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger("brunix-engine")
class BrunixEngine(brunix_pb2_grpc.AssistanceEngineServicer):
def __init__(self):
self.base_url = os.getenv("LLM_BASE_URL", "http://ollama-light-service:11434")
self.model_name = os.getenv("LLM_MODEL", "qwen2.5:1.5b")
logger.info(f"Starting server")
self.llm = Ollama(base_url=self.base_url, model=self.model_name)
self.embeddings = OllamaEmbeddings(base_url=self.base_url, model="nomic-embed-text")
es_url = os.getenv("ELASTICSEARCH_URL", "http://elasticsearch:9200")
logger.info(f"ElasticSearch on: {es_url}")
self.vector_store = ElasticsearchStore(
es_url=es_url,
index_name="avap_manuals",
embedding=self.embeddings
)
def AskAgent(self, request, context):
logger.info(f"request {request.session_id}): {request.query[:50]}.")
try:
context_text = "AVAP is a virtual programming language for API development."
# 4. Prompt Engineering
prompt = ChatPromptTemplate.from_template("""
You are Brunix, the 101OBEX artificial intelligence for the AVAP Sphere platform. Respond in a professional manner.
CONTEXT:
{context}
QUESTION:
{question}
""")
chain = prompt | self.llm
for chunk in chain.stream({"context": context_text, "question": request.query}):
yield brunix_pb2.AgentResponse(
text=str(chunk),
avap_code="AVAP-2026",
is_final=False
)
yield brunix_pb2.AgentResponse(text="", avap_code="", is_final=True)
except Exception as e:
logger.error(f"Error in AskAgent: {str(e)}")
yield brunix_pb2.AgentResponse(text=f"[Error Motor]: {str(e)}", is_final=True)
def serve():
server = grpc.server(futures.ThreadPoolExecutor(max_workers=10))
brunix_pb2_grpc.add_AssistanceEngineServicer_to_server(BrunixEngine(), server)
SERVICE_NAMES = (
brunix_pb2.DESCRIPTOR.services_by_name['AssistanceEngine'].full_name,
reflection.SERVICE_NAME,
)
reflection.enable_server_reflection(SERVICE_NAMES, server)
server.add_insecure_port('[::]:50051')
logger.info("Brunix Engine on port 50051")
server.start()
server.wait_for_termination()
if __name__ == '__main__':
serve()