From bce273ad47a604b1afcd923b904edcb676a1ec96 Mon Sep 17 00:00:00 2001 From: Aaron Pham <29749331+aarnphm@users.noreply.github.com> Date: Fri, 17 Nov 2023 09:51:17 -0500 Subject: [PATCH] fix(env): correct format environment on docker (#680) * fix(env): correct format environment on docker Signed-off-by: Aaron <29749331+aarnphm@users.noreply.github.com> * docs: changelog Signed-off-by: Aaron <29749331+aarnphm@users.noreply.github.com> --------- Signed-off-by: Aaron <29749331+aarnphm@users.noreply.github.com> --- changelog.d/680.fix.md | 1 + openllm-python/src/openllm/bundle/_package.py | 3 +++ openllm-python/src/openllm_cli/entrypoint.py | 2 +- 3 files changed, 5 insertions(+), 1 deletion(-) create mode 100644 changelog.d/680.fix.md diff --git a/changelog.d/680.fix.md b/changelog.d/680.fix.md new file mode 100644 index 00000000..b18ad22a --- /dev/null +++ b/changelog.d/680.fix.md @@ -0,0 +1 @@ +Fixes a environment generation bug that caused CONFIG envvar to be invalid JSON diff --git a/openllm-python/src/openllm/bundle/_package.py b/openllm-python/src/openllm/bundle/_package.py index df5e7d51..bed265b8 100644 --- a/openllm-python/src/openllm/bundle/_package.py +++ b/openllm-python/src/openllm/bundle/_package.py @@ -95,6 +95,9 @@ def construct_docker_options( llm._prompt_template, use_current_env=False, ) + # XXX: We need to quote this so that the envvar in container recognize as valid json + environ['OPENLLM_CONFIG'] = f"'{environ['OPENLLM_CONFIG']}'" + environ.pop('BENTOML_HOME', None) # NOTE: irrelevant in container return DockerOptions( base_image=oci.RefResolver.construct_base_image(container_registry, container_version_strategy), env=environ, diff --git a/openllm-python/src/openllm_cli/entrypoint.py b/openllm-python/src/openllm_cli/entrypoint.py index f30f25e3..91991b92 100644 --- a/openllm-python/src/openllm_cli/entrypoint.py +++ b/openllm-python/src/openllm_cli/entrypoint.py @@ -646,7 +646,7 @@ def process_environ( 'OPENLLM_ADAPTER_MAP': orjson.dumps(adapter_map).decode(), 'OPENLLM_SERIALIZATION': serialisation, 'OPENLLM_BACKEND': llm.__llm_backend__, - 'OPENLLM_CONFIG': f'"""{config.model_dump_json(flatten=True).decode()}"""', + 'OPENLLM_CONFIG': config.model_dump_json(flatten=True).decode(), 'TORCH_DTYPE': str(llm._torch_dtype).split('.')[-1], 'TRUST_REMOTE_CODE': str(llm.trust_remote_code), }