chore: add annotations for attrs and eval correct annotation type

eval will be here once I find a different way to parse types into
python

Signed-off-by: Aaron <29749331+aarnphm@users.noreply.github.com>
This commit is contained in:
Aaron
2023-05-31 17:11:20 -07:00
parent e910b6d3bd
commit c33a90a0cc
11 changed files with 646 additions and 17 deletions

View File

@@ -61,8 +61,6 @@ if t.TYPE_CHECKING:
import transformers
from transformers.generation.beam_constraints import Constraint
from ._types import Attribute
P = t.ParamSpec("P")
F = t.Callable[P, t.Any]
@@ -513,12 +511,10 @@ class LLMConfig:
annotation = anns.get(key, None)
if annotation is not None:
try:
annotation = orjson.loads(annotation)
except orjson.JSONDecodeError as err:
raise ValueError(f"Failed to load from environment variables: {err}")
# NOTE: eval is dangerous, but we don't provide any specific imports here.
annotation = eval(annotation, {}, {})
attribute: attr.Attribute[t.Any] = t.cast(Attribute, attr.Attribute).from_counting_attr(
attribute: attr.Attribute[t.Any] = attr.Attribute.from_counting_attr(
key, dantic.Field(default, env=env_key, alias=key), type=annotation
)

View File

@@ -20,9 +20,6 @@ from __future__ import annotations
import typing as t
from attr import Attribute as _Attribute
from attr._make import _CountingAttr
if not t.TYPE_CHECKING:
raise RuntimeError(f"{__name__} should not be imported during runtime")
@@ -34,10 +31,3 @@ LLMModel = transformers.PreTrainedModel | transformers.TFPreTrainedModel | trans
LLMTokenizer = (
transformers.PreTrainedTokenizer | transformers.PreTrainedTokenizerFast | transformers.PreTrainedTokenizerBase
)
# NOTE: We are using attr.Attribute as a internal API for attrs
class Attribute(_Attribute):
@classmethod
def from_counting_attr(cls, name: str, ca: _CountingAttr, type: type[t.Any] = None):
...