Merge branch 'mrh-online-proposal' of github.com:BRUNIX-AI/assistance-engine into mrh-online-proposal
This commit is contained in:
commit
e23a33cfac
|
|
@ -7,6 +7,9 @@ requires-python = ">=3.11"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"grpcio>=1.78.0",
|
"grpcio>=1.78.0",
|
||||||
"grpcio-tools>=1.78.0",
|
"grpcio-tools>=1.78.0",
|
||||||
|
"langchain>=1.2.10",
|
||||||
|
"langchain-community>=0.4.1",
|
||||||
|
"langchain-elasticsearch>=1.0.0",
|
||||||
"loguru>=0.7.3",
|
"loguru>=0.7.3",
|
||||||
"numpy>=2.4.2",
|
"numpy>=2.4.2",
|
||||||
"pandas>=3.0.0",
|
"pandas>=3.0.0",
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,19 @@
|
||||||
|
from langchain_elasticsearch import ElasticsearchStore
|
||||||
|
from langchain_community.llms import Ollama
|
||||||
|
from langchain_community.embeddings import OllamaEmbeddings
|
||||||
|
|
||||||
|
es_url = "http://localhost:9200"
|
||||||
|
|
||||||
|
base_url = "http://ollama-light-service:11434"
|
||||||
|
model_name = "qwen2.5:1.5b"
|
||||||
|
|
||||||
|
print(f"Starting server")
|
||||||
|
|
||||||
|
llm = Ollama(base_url=base_url, model=model_name)
|
||||||
|
embeddings = OllamaEmbeddings(base_url=base_url, model="nomic-embed-text")
|
||||||
|
|
||||||
|
|
||||||
|
vector_store = ElasticsearchStore(
|
||||||
|
es_url=es_url, index_name="avap_manuals", embedding=embeddings
|
||||||
|
)
|
||||||
|
print(vector_store)
|
||||||
Loading…
Reference in New Issue