Merge pull request #6 from BRUNIX-AI/mrh-online-proposal

Refactor project structure and enhance configuration
This commit is contained in:
Rafael Ruiz 2026-02-16 19:44:07 -08:00 committed by GitHub
commit 5eb7d7c939
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
17 changed files with 2726 additions and 63 deletions

View File

@ -0,0 +1,26 @@
{
"name": "brunix-assistance-engine",
"dockerComposeFile": "../docker-compose.yaml",
"service": "brunix-engine",
"workspaceFolder": "/workspace",
"remoteUser": "root",
"runArgs": [
"--add-host",
"host.docker.internal:host-gateway"
],
"customizations": {
"vscode": {
"extensions": [
"ms-python.python",
"ms-python.vscode-pylance",
"ms-python.debugpy",
"astral-sh.ruff",
"ms-python.black-formatter",
"njpwerner.autodocstring"
],
"settings": {
"python.defaultInterpreterPath": "/usr/local/bin/python"
}
}
}
}

53
.dockerignore Normal file
View File

@ -0,0 +1,53 @@
# Documentation
*.md
documentation/
# Build and dependency files
Makefile
*.pyc
__pycache__/
*.egg-info/
dist/
build/
# Development and testing
.venv/
venv/
env/
.pytest_cache/
.coverage
# Git and version control
.git/
.gitignore
.gitattributes
# IDE and editor files
.vscode/
.idea/
*.swp
*.swo
*~
.DS_Store
# Environment files
.env
.env.local
.env.*.local
# Docker files (no copy Docker files into the image)
Dockerfile
docker-compose.yaml
# CI/CD
.github/
.gitlab-ci.yml
# Temporary files
*.tmp
*.log
scratches/
# Node modules (if any)
node_modules/
npm-debug.log

View File

@ -1,2 +0,0 @@
LANGFUSE_PUBLIC_KEY=pk-lf-...
LANGFUSE_SECRET_KEY=sk-lf-...

141
.gitignore vendored
View File

@ -1,6 +1,6 @@
# Byte-compiled / optimized / DLL files # Byte-compiled / optimized / DLL files
__pycache__/ __pycache__/
*.py[codz] *.py[cod]
*$py.class *$py.class
# C extensions # C extensions
@ -46,7 +46,7 @@ htmlcov/
nosetests.xml nosetests.xml
coverage.xml coverage.xml
*.cover *.cover
*.py.cover *.py,cover
.hypothesis/ .hypothesis/
.pytest_cache/ .pytest_cache/
cover/ cover/
@ -94,35 +94,20 @@ ipython_config.py
# install all needed dependencies. # install all needed dependencies.
#Pipfile.lock #Pipfile.lock
# UV
# Similar to Pipfile.lock, it is generally recommended to include uv.lock in version control.
# This is especially recommended for binary packages to ensure reproducibility, and is more
# commonly ignored for libraries.
#uv.lock
# poetry # poetry
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
# This is especially recommended for binary packages to ensure reproducibility, and is more # This is especially recommended for binary packages to ensure reproducibility, and is more
# commonly ignored for libraries. # commonly ignored for libraries.
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
#poetry.lock #poetry.lock
#poetry.toml
# pdm # pdm
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
# pdm recommends including project-wide configuration in pdm.toml, but excluding .pdm-python.
# https://pdm-project.org/en/latest/usage/project/#working-with-version-control
#pdm.lock #pdm.lock
#pdm.toml # pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
.pdm-python # in version control.
.pdm-build/ # https://pdm.fming.dev/#use-with-ide
.pdm.toml
# pixi
# Similar to Pipfile.lock, it is generally recommended to include pixi.lock in version control.
#pixi.lock
# Pixi creates a virtual environment in the .pixi directory, just like venv module creates one
# in the .venv directory. It is recommended not to include this directory in version control.
.pixi
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
__pypackages__/ __pypackages__/
@ -136,7 +121,6 @@ celerybeat.pid
# Environments # Environments
.env .env
.envrc
.venv .venv
env/ env/
venv/ venv/
@ -173,35 +157,96 @@ cython_debug/
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
# and can be added to the global gitignore or merged into this file. For a more nuclear # and can be added to the global gitignore or merged into this file. For a more nuclear
# option (not recommended) you can uncomment the following to ignore the entire idea folder. # option (not recommended) you can uncomment the following to ignore the entire idea folder.
#.idea/ # Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio, WebStorm and Rider
# Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839
.idea
# User-specific stuff
.idea/**/workspace.xml
.idea/**/tasks.xml
.idea/**/usage.statistics.xml
.idea/**/dictionaries
.idea/**/shelf
# Abstra # AWS User-specific
# Abstra is an AI-powered process automation framework. .idea/**/aws.xml
# Ignore directories containing user credentials, local state, and settings.
# Learn more at https://abstra.io/docs
.abstra/
# Visual Studio Code # Generated files
# Visual Studio Code specific template is maintained in a separate VisualStudioCode.gitignore .idea/**/contentModel.xml
# that can be found at https://github.com/github/gitignore/blob/main/Global/VisualStudioCode.gitignore
# and can be added to the global gitignore or merged into this file. However, if you prefer,
# you could uncomment the following to ignore the entire vscode folder
# .vscode/
# Ruff stuff: # Sensitive or high-churn files
.ruff_cache/ .idea/**/dataSources/
.idea/**/dataSources.ids
.idea/**/dataSources.local.xml
.idea/**/sqlDataSources.xml
.idea/**/dynamic.xml
.idea/**/uiDesigner.xml
.idea/**/dbnavigator.xml
# PyPI configuration file # Gradle
.pypirc .idea/**/gradle.xml
.idea/**/libraries
# Cursor # Gradle and Maven with auto-import
# Cursor is an AI-powered code editor. `.cursorignore` specifies files/directories to # When using Gradle or Maven with auto-import, you should exclude module files,
# exclude from AI features like autocomplete and code analysis. Recommended for sensitive data # since they will be recreated, and may cause churn. Uncomment if using
# refer to https://docs.cursor.com/context/ignore-files # auto-import.
.cursorignore # .idea/artifacts
.cursorindexingignore # .idea/compiler.xml
# .idea/jarRepositories.xml
# .idea/modules.xml
# .idea/*.iml
# .idea/modules
# *.iml
# *.ipr
# Marimo # CMake
marimo/_static/ cmake-build-*/
marimo/_lsp/
__marimo__/ # Mongo Explorer plugin
.idea/**/mongoSettings.xml
# File-based project format
*.iws
# IntelliJ
out/
# mpeltonen/sbt-idea plugin
.idea_modules/
# JIRA plugin
atlassian-ide-plugin.xml
# Cursive Clojure plugin
.idea/replstate.xml
# SonarLint plugin
.idea/sonarlint/
# Crashlytics plugin (for Android Studio and IntelliJ)
com_crashlytics_export_strings.xml
crashlytics.properties
crashlytics-build.properties
fabric.properties
# Editor-based Rest Client
.idea/httpRequests
# Kubernetes
/kubernetes
#documentation
/documentation
# Android studio 3.1+ serialized cache file
.idea/caches/build_file_checksums.ser
/kubernetes
/data
/data_tmp
#logging.yml
.python-version
src/mrh_saltoki_common/py.typed
*.history

22
.vscode/settings.json vendored Normal file
View File

@ -0,0 +1,22 @@
{
"makefile.configureOnOpen": false,
"python-envs.pythonProjects": [],
"python.terminal.useEnvFile": true,
"python.envFile": "${workspaceFolder}/.env",
"jupyter.logging.level": "info",
"terminal.integrated.env.linux": {
"PYTHONPATH": "${workspaceFolder}:${env:PYTHONPATH}"
},
"python.analysis.ignore": [
"*"
], // Disables Pylance's native linting
"python.analysis.typeCheckingMode": "basic", // Keeps Pylance's type validation
"[python]": {
"editor.defaultFormatter": "charliermarsh.ruff",
"editor.formatOnSave": true,
"editor.codeActionsOnSave": {
"source.fixAll.ruff": "explicit",
"source.organizeImports.ruff": "explicit"
}
}
}

View File

@ -3,8 +3,11 @@ FROM python:3.11-slim
ENV PYTHONDONTWRITEBYTECODE=1 ENV PYTHONDONTWRITEBYTECODE=1
ENV PYTHONUNBUFFERED=1 ENV PYTHONUNBUFFERED=1
WORKDIR /app WORKDIR /app
COPY ./requirements.txt .
RUN apt-get update && apt-get install -y --no-install-recommends \ RUN apt-get update && apt-get install -y --no-install-recommends \
build-essential \ build-essential \
curl \ curl \
@ -12,18 +15,10 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
&& rm -rf /var/lib/apt/lists/* && rm -rf /var/lib/apt/lists/*
RUN pip install --no-cache-dir --upgrade pip RUN pip install --no-cache-dir --upgrade pip
RUN pip install --no-cache-dir -r requirements.txt
RUN pip install --no-cache-dir \
langchain==0.1.0 \
langchain-community==0.0.10 \
langchain-elasticsearch \
grpcio \
grpcio-tools \
grpcio-reflection \
pydantic
COPY ./protos ./protos COPY ./protos ./protos
COPY . . COPY ./src ./src
RUN python -m grpc_tools.protoc \ RUN python -m grpc_tools.protoc \
--proto_path=./protos \ --proto_path=./protos \

22
Makefile Normal file
View File

@ -0,0 +1,22 @@
.PHONY: help requirements docker-build docker-up docker-down clean start tunnels_up
help:
@echo "Available commands:"
@echo " make sync_requirements - Export dependencies from pyproject.toml to requirements.txt"
@echo " make tunnels_up - Start tunnels"
@echo " make compose_up - Run tunnels script and start Docker Compose"
sync_requirements:
@echo "Exporting dependencies from pyproject.toml to requirements.txt..."
uv export --format requirements-txt --no-hashes --no-dev -o requirements.txt
@echo "✓ requirements.txt updated successfully"
tunnels_up:
bash ./scripts/start-tunnels.sh < /dev/null &
@echo "✓ Tunnels started!"
compose_up:
bash ./scripts/start-tunnels.sh < /dev/null &
sleep 2
docker compose up -d --build
@echo "✓ Done!"

View File

@ -115,15 +115,17 @@ Open a terminal and establish the connection to the Devaron Cluster:
```bash ```bash
# 1. AI Model Tunnel (Ollama) # 1. AI Model Tunnel (Ollama)
kubectl port-forward svc/ollama-light-service 11434:11434 -n brunix --kubeconfig ./ivar.yaml & kubectl port-forward --address 0.0.0.0 svc/ollama-light-service 11434:11434 -n brunix --kubeconfig ./kubernetes/ivar.yaml &
# 2. Knowledge Base Tunnel (Elasticsearch) # 2. Knowledge Base Tunnel (Elasticsearch)
kubectl port-forward svc/brunix-vector-db 9200:9200 -n brunix --kubeconfig ./ivar.yaml & kubectl port-forward --address 0.0.0.0 svc/brunix-vector-db 9200:9200 -n brunix --kubeconfig ./kubernetes/ivar.yaml &
# 3. Observability DB Tunnel (PostgreSQL) # 3. Observability DB Tunnel (PostgreSQL)
kubectl port-forward svc/brunix-postgres 5432:5432 -n brunix --kubeconfig ./ivar.yaml & kubectl port-forward --address 0.0.0.0 svc/brunix-postgres 5432:5432 -n brunix --kubeconfig ./kubernetes/ivar.yaml &
``` ```
### 4. Launch the Engine ### 4. Launch the Engine
```bash ```bash
docker-compose up -d --build docker-compose up -d --build

View File

@ -4,6 +4,9 @@ services:
brunix-engine: brunix-engine:
build: . build: .
container_name: brunix-assistance-engine container_name: brunix-assistance-engine
volumes:
- .:/workspace
env_file: .env
ports: ports:
- "50052:50051" - "50052:50051"
environment: environment:

0
notebooks/.gitkeep Normal file
View File

19
pyproject.toml Normal file
View File

@ -0,0 +1,19 @@
[project]
name = "assistance-engine"
version = "0.1.0"
description = "Add your description here"
readme = "README.md"
requires-python = ">=3.11"
dependencies = [
"grpcio>=1.78.0",
"grpcio-reflection>=1.78.0",
"grpcio-tools>=1.78.0",
"langchain>=1.2.10",
"langchain-community>=0.4.1",
"langchain-elasticsearch>=1.0.0",
"loguru>=0.7.3",
"numpy>=2.4.2",
"pandas>=3.0.0",
"python-dotenv>=1.2.1",
"tqdm>=4.67.3",
]

226
requirements.txt Normal file
View File

@ -0,0 +1,226 @@
# This file was autogenerated by uv via the following command:
# uv export --format requirements-txt --no-hashes --no-dev -o requirements.txt
aiohappyeyeballs==2.6.1
# via aiohttp
aiohttp==3.13.3
# via langchain-community
aiosignal==1.4.0
# via aiohttp
annotated-types==0.7.0
# via pydantic
anyio==4.12.1
# via httpx
attrs==25.4.0
# via aiohttp
certifi==2026.1.4
# via
# elastic-transport
# httpcore
# httpx
# requests
charset-normalizer==3.4.4
# via requests
colorama==0.4.6 ; sys_platform == 'win32'
# via
# loguru
# tqdm
dataclasses-json==0.6.7
# via langchain-community
elastic-transport==8.17.1
# via elasticsearch
elasticsearch==8.19.3
# via langchain-elasticsearch
frozenlist==1.8.0
# via
# aiohttp
# aiosignal
greenlet==3.3.1 ; platform_machine == 'AMD64' or platform_machine == 'WIN32' or platform_machine == 'aarch64' or platform_machine == 'amd64' or platform_machine == 'ppc64le' or platform_machine == 'win32' or platform_machine == 'x86_64'
# via sqlalchemy
grpcio==1.78.0
# via
# assistance-engine
# grpcio-reflection
# grpcio-tools
grpcio-reflection==1.78.0
# via assistance-engine
grpcio-tools==1.78.0
# via assistance-engine
h11==0.16.0
# via httpcore
httpcore==1.0.9
# via httpx
httpx==0.28.1
# via
# langgraph-sdk
# langsmith
httpx-sse==0.4.3
# via langchain-community
idna==3.11
# via
# anyio
# httpx
# requests
# yarl
jsonpatch==1.33
# via langchain-core
jsonpointer==3.0.0
# via jsonpatch
langchain==1.2.10
# via assistance-engine
langchain-classic==1.0.1
# via langchain-community
langchain-community==0.4.1
# via assistance-engine
langchain-core==1.2.11
# via
# langchain
# langchain-classic
# langchain-community
# langchain-elasticsearch
# langchain-text-splitters
# langgraph
# langgraph-checkpoint
# langgraph-prebuilt
langchain-elasticsearch==1.0.0
# via assistance-engine
langchain-text-splitters==1.1.0
# via langchain-classic
langgraph==1.0.8
# via langchain
langgraph-checkpoint==4.0.0
# via
# langgraph
# langgraph-prebuilt
langgraph-prebuilt==1.0.7
# via langgraph
langgraph-sdk==0.3.5
# via langgraph
langsmith==0.7.1
# via
# langchain-classic
# langchain-community
# langchain-core
loguru==0.7.3
# via assistance-engine
marshmallow==3.26.2
# via dataclasses-json
multidict==6.7.1
# via
# aiohttp
# yarl
mypy-extensions==1.1.0
# via typing-inspect
numpy==2.4.2
# via
# assistance-engine
# elasticsearch
# langchain-community
# pandas
orjson==3.11.7
# via
# langgraph-sdk
# langsmith
ormsgpack==1.12.2
# via langgraph-checkpoint
packaging==26.0
# via
# langchain-core
# langsmith
# marshmallow
pandas==3.0.0
# via assistance-engine
propcache==0.4.1
# via
# aiohttp
# yarl
protobuf==6.33.5
# via
# grpcio-reflection
# grpcio-tools
pydantic==2.12.5
# via
# langchain
# langchain-classic
# langchain-core
# langgraph
# langsmith
# pydantic-settings
pydantic-core==2.41.5
# via pydantic
pydantic-settings==2.12.0
# via langchain-community
python-dateutil==2.9.0.post0
# via
# elasticsearch
# pandas
python-dotenv==1.2.1
# via
# assistance-engine
# pydantic-settings
pyyaml==6.0.3
# via
# langchain-classic
# langchain-community
# langchain-core
requests==2.32.5
# via
# langchain-classic
# langchain-community
# langsmith
# requests-toolbelt
requests-toolbelt==1.0.0
# via langsmith
setuptools==82.0.0
# via grpcio-tools
simsimd==6.5.12
# via elasticsearch
six==1.17.0
# via python-dateutil
sqlalchemy==2.0.46
# via
# langchain-classic
# langchain-community
tenacity==9.1.4
# via
# langchain-community
# langchain-core
tqdm==4.67.3
# via assistance-engine
typing-extensions==4.15.0
# via
# aiosignal
# anyio
# elasticsearch
# grpcio
# langchain-core
# pydantic
# pydantic-core
# sqlalchemy
# typing-inspect
# typing-inspection
typing-inspect==0.9.0
# via dataclasses-json
typing-inspection==0.4.2
# via
# pydantic
# pydantic-settings
tzdata==2025.3 ; sys_platform == 'emscripten' or sys_platform == 'win32'
# via pandas
urllib3==2.6.3
# via
# elastic-transport
# requests
uuid-utils==0.14.0
# via
# langchain-core
# langsmith
win32-setctime==1.2.0 ; sys_platform == 'win32'
# via loguru
xxhash==3.6.0
# via
# langgraph
# langsmith
yarl==1.22.0
# via aiohttp
zstandard==0.25.0
# via langsmith

View File

@ -0,0 +1,19 @@
from langchain_elasticsearch import ElasticsearchStore
from langchain_community.llms import Ollama
from langchain_community.embeddings import OllamaEmbeddings
es_url = "http://localhost:9200"
base_url = "http://ollama-light-service:11434"
model_name = "qwen2.5:1.5b"
print(f"Starting server")
llm = Ollama(base_url=base_url, model=model_name)
embeddings = OllamaEmbeddings(base_url=base_url, model="nomic-embed-text")
vector_store = ElasticsearchStore(
es_url=es_url, index_name="avap_manuals", embedding=embeddings
)
print(vector_store)

View File

@ -0,0 +1,5 @@
from langchain_elasticsearch import ElasticsearchStore
print("Hello world")

0
scripts/.gitkeep Normal file
View File

49
scripts/start-tunnels.sh Executable file
View File

@ -0,0 +1,49 @@
#!/usr/bin/env bash
# Start Infrastructure Tunnels for Brunix Assistance Engine
# Connects to the Devaron Cluster in Vultr Cloud
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
PROJECT_ROOT="$(cd "$SCRIPT_DIR/.." && pwd)"
KUBECONFIG_PATH="$PROJECT_ROOT/$(grep KUBECONFIG_PATH "$PROJECT_ROOT/.env" | cut -d '=' -f2)"
# Colors for output
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
NC='\033[0m' # No Color
echo -e "${GREEN}Starting Brunix Infrastructure Tunnels...${NC}"
echo ""
# Check if kubeconfig exists
if [ ! -f "$KUBECONFIG_PATH" ]; then
echo "Error: Kubeconfig not found at $KUBECONFIG_PATH"
exit 1
fi
# 1. AI Model Tunnel (Ollama)
echo -e "${YELLOW}[1/3]${NC} Starting Ollama Light Service tunnel (localhost:11434)..."
kubectl port-forward --address 0.0.0.0 svc/ollama-light-service 11434:11434 -n brunix --kubeconfig "$KUBECONFIG_PATH" &
OLLAMA_PID=$!
# 2. Knowledge Base Tunnel (Elasticsearch)
echo -e "${YELLOW}[2/3]${NC} Starting Elasticsearch Vector DB tunnel (localhost:9200)..."
kubectl port-forward --address 0.0.0.0 svc/brunix-vector-db 9200:9200 -n brunix --kubeconfig "$KUBECONFIG_PATH" &
ES_PID=$!
# 3. Observability DB Tunnel (PostgreSQL)
echo -e "${YELLOW}[3/3]${NC} Starting PostgreSQL tunnel (localhost:5432)..."
kubectl port-forward --address 0.0.0.0 svc/brunix-postgres 5432:5432 -n brunix --kubeconfig "$KUBECONFIG_PATH" &
PG_PID=$!
echo ""
echo -e "${GREEN}✓ All tunnels started successfully${NC}"
echo ""
echo "Process IDs:"
echo " Ollama: $OLLAMA_PID"
echo " Elasticsearch: $ES_PID"
echo " PostgreSQL: $PG_PID"
echo ""
echo "To stop all tunnels, run:"
echo " kill $OLLAMA_PID $ES_PID $PG_PID"
echo ""

2179
uv.lock Normal file

File diff suppressed because it is too large Load Diff