-
Notifications
You must be signed in to change notification settings - Fork 1
/
Makefile
232 lines (189 loc) · 6.89 KB
/
Makefile
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
SHELL=/bin/bash
.PHONY: all format lint test tests test_watch integration_tests docker_tests help extended_tests
POETRY_EXTRA?=--all-extras
POETRY_WITH?=dev,lint,test,codespell
# Default target executed when no arguments are given to make.
all: help
# Define a variable for the test file path.
TEST_FILE ?= tests/unit_tests/
.make-rag_vectorstore.ipynb: docs/integrations/vectorstores/rag_vectorstore.ipynb
@jupyter execute $<
@touch .make-rag_vectorstore.ipynb
integration_tests:.make-rag_vectorstore.ipynb
poetry run pytest tests/integration_tests
test tests:
poetry run pytest -v $(TEST_FILE)
test_watch:
poetry run ptw --now . -- tests/unit_tests
######################
# LINTING AND FORMATTING
######################
# Define a variable for Python and notebook files.
PYTHON_FILES=.
lint format: PYTHON_FILES=.
lint_diff format_diff: PYTHON_FILES=$(shell git diff --relative=libs/experimental --name-only --diff-filter=d master | grep -E '\.py$$|\.ipynb$$')
lint lint_diff:
poetry run mypy $(PYTHON_FILES)
poetry run black $(PYTHON_FILES) --check
poetry run ruff .
format format_diff:
poetry run black $(PYTHON_FILES)
poetry run ruff --select I --fix $(PYTHON_FILES)
spell_check:
poetry run codespell --toml pyproject.toml
spell_fix:
poetry run codespell --toml pyproject.toml -w
######################
# DOCUMENTATION
######################
clean: docs_clean api_docs_clean
@find . -type d -name ".ipynb_checkpoints" -exec rm -rf {} \; || true
@rm -Rf dist/ .make-* .mypy_cache .pytest_cache .ruff_cache
docs_build:
docs/.local_build.sh
docs_clean:
rm -rf docs/_dist
docs_linkcheck:
poetry run linkchecker docs/_dist/docs_skeleton/ --ignore-url node_modules
api_docs_build:
# poetry run python docs/api_reference/create_api_rst.py
# cd docs/api_reference && poetry run make html
api_docs_clean:
# rm -f docs/api_reference/api_reference.rst
# cd docs/api_reference && poetry run make clean
api_docs_linkcheck:
poetry run linkchecker docs/api_reference/_build/html/index.html
######################
# HELP
######################
help:
@echo '----'
@echo 'format - run code formatters'
@echo 'lint - run linters'
@echo 'test - run unit tests'
@echo 'tests - run unit tests'
@echo 'test TEST_FILE=<test_file> - run all tests in file'
@echo 'test_watch - run unit tests in watch mode'
@echo 'clean - run docs_clean and api_docs_clean'
@echo 'docs_build - build the documentation'
@echo 'docs_clean - clean the documentation build artifacts'
@echo 'docs_linkcheck - run linkchecker on the documentation'
@echo 'api_docs_build - build the API Reference documentation'
@echo 'api_docs_clean - clean the API Reference documentation build artifacts'
@echo 'api_docs_linkcheck - run linkchecker on the API Reference documentation'
@echo 'spell_check - run codespell on the project'
@echo 'spell_fix - run codespell on the project and fix the errors'
.PHONY: dist
dist:
poetry build
# ---------------------------------------------------------------------------------------
# SNIPPET pour tester la publication d'une distribution
# sur test.pypi.org.
.PHONY: test-twine
## Publish distribution on test.pypi.org
test-twine: dist
ifeq ($(OFFLINE),True)
@echo -e "$(red)Can not test-twine in offline mode$(normal)"
else
@$(VALIDATE_VENV)
rm -f dist/*.asc
twine upload --sign --repository-url https://test.pypi.org/legacy/ \
$(shell find dist -type f \( -name "*.whl" -or -name '*.gz' \) -and ! -iname "*dev*" )
endif
# ---------------------------------------------------------------------------------------
# SNIPPET pour publier la version sur pypi.org.
.PHONY: release
## Publish distribution on pypi.org
release: validate integration_tests clean dist
ifeq ($(OFFLINE),True)
@echo -e "$(red)Can not release in offline mode$(normal)"
else
@$(VALIDATE_VENV)
[[ $$( find dist -name "*.dev*" | wc -l ) == 0 ]] || \
( echo -e "$(red)Add a tag version in GIT before release$(normal)" \
; exit 1 )
rm -f dist/*.asc
echo "Enter Pypi password"
twine upload \
$(shell find dist -type f \( -name "*.whl" -or -name '*.gz' \) -and ! -iname "*dev*" )
endif
LANGCHAIN_HOME=../langchain
TARGET:=core
SRC_PACKAGE=langchain_rag
DST_PACKAGE=langchain_core
SRC_MODULE:=langchain-rag
DST_MODULE:=core
define _push_sync
@$(eval TARGET=$(TARGET))
@$(eval SRC_PACKAGE=$(SRC_PACKAGE))
@$(eval DST_PACKAGE=$(DST_PACKAGE))
@$(eval WORK_DIR=$(shell mktemp -d --suffix ".rsync"))
@mkdir -p "${WORK_DIR}/libs/${TARGET}"
@mkdir -p "${WORK_DIR}/docs/docs"
@echo Copy and patch $(SRC_PACKAGE) to $(DST_PACKAGE) in $(LANGCHAIN_HOME)
@( \
cd $(SRC_PACKAGE)/ ; \
rsync -a \
--exclude ".*" \
--exclude __pycache__ \
--exclude __init__.py \
. "${WORK_DIR}/libs/${TARGET}/$(DST_PACKAGE)" ; \
)
@( \
cd tests/ ; \
rsync -a \
--exclude ".*" \
--exclude __pycache__ \
--exclude __init__.py \
. "${WORK_DIR}/libs/${TARGET}/tests" ; \
)
@( \
cd docs/ ; \
rsync -a \
--exclude ".*" \
. "${WORK_DIR}/docs/docs" ; \
)
@find '${WORK_DIR}' -type f -a \
-exec sed -i "s/${SRC_PACKAGE}/${DST_PACKAGE}/g" {} ';' \
-exec sed -i "s/pip install -q '$(SRC_MODULE)'/pip install -q '$(DST_MODULE)'/g" {} ';'
#@echo "${WORK_DIR}/libs"
@cp -R "${WORK_DIR}/libs" "${WORK_DIR}/docs" $(LANGCHAIN_HOME)/
@rm -Rf '${WORK_DIR}'
endef
push-sync:
$(call _push_sync)
#pull-sync:
# cp -rf $(TARGET)/langchain_experimental/chains/qa_with_references/ \
# langchain_qa_with_references/chains/
# cp -f $(TARGET)/langchain_experimental/chains/__init__.py \
# langchain_qa_with_references/chains/
# cp -rf $(TARGET)/langchain_experimental/chains/qa_with_references_and_verbatims/ \
# langchain_qa_with_references/chains/
# cp -rf $(TARGET)/tests/unit_tests/chains/ \
# tests/unit_tests/
# cp $(TARGET)/docs/qa_with_reference*.ipynb .
# find . -type f \( -name '*.py' -or -name '*.ipynb' \) | xargs sed -i 's/langchain_experimental/langchain_qa_with_references/g'
# find . -type f -name '*.ipynb' | xargs sed -i 's/langchain\([_-]\)experimental/langchain\1qa_with_references/g'
poetry.lock: pyproject.toml
poetry lock
git add poetry.lock
poetry install $(POETRY_EXTRA) --with $(POETRY_WITH)
## Refresh lock
lock: poetry.lock
## Start jupyter
jupyter:
poetry run jupyter lab
demo.py: docs/integrations/vectorstores/rag_vectorstore.ipynb
jupyter nbconvert --to python $< --output $(PWD)/$@
## Validate the code
validate: poetry.lock format lint spell_check test
init: poetry.lock
@poetry self update
@poetry self add poetry-dotenv-plugin
@poetry self add poetry-plugin-export
@poetry self add poetry-git-version-plugin
@poetry config warnings.export false
@poetry config virtualenvs.in-project true
@poetry install --sync $(POETRY_EXTRA) --with $(POETRY_WITH)
@pre-commit install
@git lfs install