File tree Expand file tree Collapse file tree 11 files changed +140
-69
lines changed
integrations/hugging-face Expand file tree Collapse file tree 11 files changed +140
-69
lines changed Original file line number Diff line number Diff line change 1+ name : tests
2+ on :
3+ push :
4+ branches :
5+ - main
6+ paths :
7+ - notebooks/**
8+ pull_request :
9+ branches :
10+ - main
11+ paths :
12+ - notebooks/**
13+ jobs :
14+ notebook-tests :
15+ strategy :
16+ matrix :
17+ es_stack :
18+ - 8.11.4
19+ - 8.12.0
20+ runs-on : ubuntu-latest
21+ services :
22+ elasticsearch :
23+ image : docker.elastic.co/elasticsearch/elasticsearch:${{ matrix.es_stack }}
24+ env :
25+ discovery.type : single-node
26+ xpack.security.enabled : false
27+ xpack.security.http.ssl.enabled : false
28+ xpack.license.self_generated.type : trial
29+ ports :
30+ - 9200:9200
31+ steps :
32+ - name : Checkout
33+ uses : actions/checkout@v4
34+ - name : Setup python
35+ uses : actions/setup-python@v5
36+ with :
37+ python-version : ' 3.10'
38+ - name : Setup nbtest
39+ run : make nbtest
40+ - name : Warm up
41+ continue-on-error : true
42+ run : sleep 30 && PATCH_ES=1 ELASTIC_CLOUD_ID=foo ELASTIC_API_KEY=bar bin/nbtest notebooks/search/00-quick-start.ipynb
43+ - name : Run tests
44+ run : PATCH_ES=1 FORCE_COLOR=1 make -s test
Original file line number Diff line number Diff line change 1+ # this is the list of notebooks that are integrated with the testing framework
2+ NOTEBOOKS = $(shell bin/find-notebooks-to-test.sh)
3+
14.PHONY : install pre-commit nbtest test notebooks
25
36test : nbtest notebooks
47
5- notebooks : search document-chunking model-upgrades langchain
6-
7- search :
8- $(MAKE ) -C notebooks/search
9-
10- document-chunking :
11- $(MAKE ) -C notebooks/document-chunking
12-
13- model-upgrades :
14- $(MAKE ) -C notebooks/model-upgrades
15-
16- langchain :
17- $(MAKE ) -C notebooks/langchain
8+ notebooks :
9+ bin/nbtest $(NOTEBOOKS )
1810
1911install : pre-commit nbtest
2012
2113pre-commit :
2214python -m venv .venv
23- .venv/bin/pip install -r requirements-dev.txt
15+ .venv/bin/pip install -qqq - r requirements-dev.txt
2416.venv/bin/pre-commit install
2517
2618nbtest :
2719python3 -m venv .venv
28- .venv/bin/pip install elastic-nbtest
20+ .venv/bin/pip install -qqq elastic-nbtest
Original file line number Diff line number Diff line change 1+ #! /bin/bash
2+ # add any notebooks that are currently not testable to the exempt list
3+ EXEMPT_NOTEBOOKS=(
4+ " notebooks/search/07-inference.ipynb"
5+ " notebooks/search/08-learning-to-rank.ipynb"
6+ " notebooks/langchain/langchain-vector-store.ipynb"
7+ " notebooks/langchain/self-query-retriever-examples/chatbot-example.ipynb"
8+ " notebooks/langchain/self-query-retriever-examples/chatbot-with-bm25-only-example.ipynb"
9+ " notebooks/langchain/self-query-retriever-examples/langchain-self-query-retriever.ipynb"
10+ " notebooks/langchain/multi-query-retriever-examples/chatbot-with-multi-query-retriever.ipynb"
11+ " notebooks/langchain/multi-query-retriever-examples/langchain-multi-query-retriever.ipynb"
12+ " notebooks/generative-ai/question-answering.ipynb"
13+ " notebooks/generative-ai/chatbot.ipynb"
14+ " notebooks/integrations/amazon-bedrock/langchain-qa-example.ipynb"
15+ " notebooks/integrations/llama-index/intro.ipynb"
16+ " notebooks/integrations/gemini/vector-search-gemini-elastic.ipynb"
17+ " notebooks/integrations/gemini/qa-langchain-gemini-elasticsearch.ipynb"
18+ " notebooks/integrations/openai/openai-KNN-RAG.ipynb"
19+ )
20+
21+ ALL_NOTEBOOKS=$( find notebooks -name " *.ipynb" | grep -v " _nbtest" | grep -v " .ipynb_checkpoints" | sort)
22+ for notebook in $ALL_NOTEBOOKS ; do
23+ if [[ ! " ${EXEMPT_NOTEBOOKS[@]} " =~ $notebook ]]; then
24+ echo $notebook
25+ fi
26+ done
Original file line number Diff line number Diff line change 1+ import os
2+ import sys
3+
4+
5+ def patch_elasticsearch ():
6+ # preserve the original import path
7+ saved_path = sys .path .copy ()
8+
9+ # remove the path entry that refers to this directory
10+ for path in sys .path :
11+ if not path .startswith ('/' ):
12+ path = os .path .join (os .getcwd (), path )
13+ if __file__ == os .path .join (path , 'elasticsearch.py' ):
14+ sys .path .remove (path )
15+ break
16+
17+ # remove this module, and import the real one instead
18+ del sys .modules ['elasticsearch' ]
19+ import elasticsearch
20+
21+ # restore the import path
22+ sys .path = saved_path
23+
24+ # preserve the original Elasticsearch.__init__ method
25+ orig_es_init = elasticsearch .Elasticsearch .__init__
26+
27+ # patched version of Elasticsearch.__init__ that connects to self-hosted
28+ # regardless of connection arguments given
29+ def patched_es_init (self , * args , ** kwargs ):
30+ if 'cloud_id' in kwargs :
31+ assert kwargs ['cloud_id' ] == 'foo'
32+ if 'api_key' in kwargs :
33+ assert kwargs ['api_key' ] == 'bar'
34+ return orig_es_init (self , 'http://localhost:9200' )
35+
36+ # patch Elasticsearch.__init__
37+ elasticsearch .Elasticsearch .__init__ = patched_es_init
38+
39+
40+ patch_elasticsearch ()
41+ del patch_elasticsearch
Original file line number Diff line number Diff line change @@ -4,5 +4,18 @@ SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )
44if [[ ! -f $SCRIPT_DIR /../.venv/bin/nbtest ]]; then
55 make nbtest
66fi
7+
8+ if [[ " $PATCH_ES " != " " ]]; then
9+ # here we do some Python dark magic to patch the elasticsearch package to
10+ # connect to a locally hosted instance in spite of connection arguments
11+ # given
12+ export ELASTIC_CLOUD_ID=foo
13+ export ELASTIC_API_KEY=bar
14+ export PYTHONPATH=$SCRIPT_DIR /mocks
15+
16+ # ensure elasticsearch is installed so that it can be patched
17+ $SCRIPT_DIR /../.venv/bin/pip install -qqq elasticsearch
18+ fi
19+
720source $SCRIPT_DIR /../.venv/bin/activate
821$SCRIPT_DIR /../.venv/bin/nbtest $*
Load Diff This file was deleted.
Original file line number Diff line number Diff line change 1+ masks :
2+ - ' Score: [0-9]+\.[0-9][0-9]*'
Original file line number Diff line number Diff line change 4747 },
4848 "outputs" : [],
4949 "source" : [
50- " !python3 -m pip -qU install sentence-transformers eland elasticsearch transformers"
50+ " !python3 -m pip install sentence-transformers eland elasticsearch transformers"
5151 ]
5252 },
5353 {
6060 " from elasticsearch import Elasticsearch\n " ,
6161 " from getpass import getpass\n " ,
6262 " from urllib.request import urlopen\n " ,
63- " import json"
63+ " import json\n " ,
64+ " from time import sleep"
6465 ]
6566 },
6667 {
111112 "metadata" : {},
112113 "outputs" : [],
113114 "source" : [
114- " !eland_import_hub_model --cloud-id $ELASTIC_CLOUD_ID --hub-model-id sentence-transformers/all-MiniLM-L6-v2 --task-type text_embedding --es-api-key $ELASTIC_API_KEY --start"
115+ " !eland_import_hub_model --cloud-id $ELASTIC_CLOUD_ID --hub-model-id sentence-transformers/all-MiniLM-L6-v2 --task-type text_embedding --es-api-key $ELASTIC_API_KEY --start --clear-previous "
115116 ]
116117 },
117118 {
301302 " for title in titles:\n " ,
302303 " actions.append({\" index\" : {\" _index\" : \" blogs\" }})\n " ,
303304 " actions.append(title)\n " ,
304- " es.bulk(index=\" blogs\" , operations=actions)"
305+ " es.bulk(index=\" blogs\" , operations=actions)\n " ,
306+ " sleep(5)"
305307 ]
306308 },
307309 {
423425 "name" : " python" ,
424426 "nbconvert_exporter" : " python" ,
425427 "pygments_lexer" : " ipython3" ,
426- "version" : " 3.11.6 "
428+ "version" : " 3.10.13 "
427429 },
428430 "vscode" : {
429431 "interpreter" : {
Load Diff This file was deleted.
Load Diff This file was deleted.
You can’t perform that action at this time.
0 commit comments