chore(infra): cleanup bashscript and respect .envrc [skip ci]

Signed-off-by: Aaron Pham <29749331+aarnphm@users.noreply.github.com>
This commit is contained in:
Aaron Pham
2024-06-08 01:08:43 -04:00
parent 82449f42d3
commit 3c7362289a
6 changed files with 30 additions and 14 deletions

1
.envrc.template Normal file
View File

@@ -0,0 +1 @@
PAPERSPACE_API_KEY=$(bw get notes paperspace-api)

1
.gitignore vendored
View File

@@ -156,3 +156,4 @@ private/
.vscode/*
node_modules
.eslintcache
.envrc

7
all.sh
View File

@@ -1,5 +1,12 @@
#!/usr/bin/env bash
GIT_ROOT=$(git rev-parse --show-toplevel)
cd "$GIT_ROOT" || exit 1
if command -v direnv >/dev/null 2>&1 && [ -f "$GIT_ROOT/.envrc" ]; then
direnv allow
fi
printf "Running update-mypy.py\n"
python ./tools/update-mypy.py
printf "Running update-config-stubs.py\n"

View File

@@ -17,10 +17,15 @@ if [ ! -f "$GIT_ROOT/.python-version" ]; then
ln -s "$GIT_ROOT/.python-version-default" "$GIT_ROOT/.python-version"
fi
if [ ! -f "$GIT_ROOT/.envrc" ]; then
echo "copy .envrc.template to .envrc"
cp "$GIT_ROOT/.envrc.template" "$GIT_ROOT/.envrc"
fi
# check if there is a $GIT_ROOT/.venv directory, if not, create it
if [ ! -d "$GIT_ROOT/.venv" ]; then
# get the python version from $GIT_ROOT/.python-version-default
uv venv -p $(cat "$GIT_ROOT/.python-version-default") "$GIT_ROOT/.venv"
uv venv -p "$(cat "$GIT_ROOT/.python-version-default")" "$GIT_ROOT/.venv"
fi
. "$GIT_ROOT/.venv/bin/activate"
@@ -32,14 +37,14 @@ print_usage() {
# Parse command line arguments
while [[ "$#" -gt 0 ]]; do
case $1 in
--help | -h)
print_usage
exit 0
;;
*)
print_usage
exit 1
;;
--help | -h)
print_usage
exit 0
;;
*)
print_usage
exit 1
;;
esac
shift
done
@@ -49,7 +54,7 @@ PRERELEASE=${PRERELEASE:-false}
ARGS=()
[[ "${PRERELEASE}" == "true" ]] && ARGS+=("--prerelease=allow")
uv pip install "${ARGS[@]}" --editable "$GIT_ROOT/openllm-python"
uv pip install "${ARGS[@]}" --editable "$GIT_ROOT/openllm-python" || true
uv pip install "${ARGS[@]}" --editable "$GIT_ROOT/openllm-client"
uv pip install "${ARGS[@]}" --editable "$GIT_ROOT/openllm-core"

View File

@@ -773,8 +773,7 @@ OpenLLM is not just a standalone product; it's a building block designed to
integrate with other powerful tools easily. We currently offer integration with
[OpenAI's Compatible Endpoints](https://platform.openai.com/docs/api-reference/completions/object),
[LlamaIndex](https://www.llamaindex.ai/),
[LangChain](https://github.com/hwchase17/langchain), and
[Transformers Agents](https://huggingface.co/docs/transformers/transformers_agents).
[LangChain](https://github.com/hwchase17/langchain).
### OpenAI Compatible Endpoints
@@ -791,7 +790,7 @@ completions = client.chat.completions.create(
)
```
The compatible endpoints supports `/completions`, `/chat/completions`, and `/models`
The compatible endpoints supports `/chat/completions`, and `/models`
> [!NOTE]
> You can find out OpenAI example clients under the
@@ -806,7 +805,8 @@ from llama_index.llms.openllm import OpenLLMAPI
```
> [!NOTE]
> All synchronous and asynchronous API from `llama_index.llms.LLM` are supported.
> All synchronous and asynchronous API from `llama_index.llms.OpenLLMAPI` are supported.
> Make sure to install `llama-index-integrations-llm-openllm` to use the supported class.
### [LangChain](https://python.langchain.com/docs/integrations/llms/openllm/)

View File

@@ -1,2 +1,4 @@
ipython
jupyter
tomlkit
ghapi