mirror of
https://github.com/rendercv/rendercv.git
synced 2026-02-07 03:51:26 -05:00
refactor and clean the code
This commit is contained in:
@@ -24,9 +24,9 @@ There are two ways of developing RenderCV: locally or with GitHub Codespaces.
|
||||
5. RenderCV uses three virtual environments:
|
||||
- `default`: For the development and testing. It contains packages like [Ruff](https://github.com/astral-sh/ruff), [Black](https://github.com/psf/black), [pytest](https://github.com/pytest-dev/pytest) etc.
|
||||
- `docs`: For building the documentation.
|
||||
|
||||
|
||||
Create the virtual environments with the following commands.
|
||||
|
||||
|
||||
```bash
|
||||
hatch env create default
|
||||
hatch env create docs
|
||||
@@ -38,11 +38,11 @@ There are two ways of developing RenderCV: locally or with GitHub Codespaces.
|
||||
```bash
|
||||
hatch shell default
|
||||
```
|
||||
|
||||
|
||||
```bash
|
||||
hatch shell docs
|
||||
```
|
||||
|
||||
|
||||
- Select one of the virtual environments in your Integrated Development Environment (IDE).
|
||||
|
||||
=== "Visual Studio Code"
|
||||
@@ -69,7 +69,7 @@ This is done with [Development containers](https://containers.dev/), and the env
|
||||
|
||||
These commands are defined in the [`pyproject.toml`](https://github.com/sinaatalay/rendercv/blob/main/pyproject.toml) file.
|
||||
|
||||
- Format the code with [Black](https://github.com/psf/black)
|
||||
- Format the code with [Black](https://github.com/psf/black) and [Ruff](https://github.com/astral-sh/ruff)
|
||||
```bash
|
||||
hatch run format
|
||||
```
|
||||
@@ -77,10 +77,6 @@ These commands are defined in the [`pyproject.toml`](https://github.com/sinaatal
|
||||
```bash
|
||||
hatch run lint
|
||||
```
|
||||
- Sort the imports with [isort](https://github.com/timothycrosley/isort/)
|
||||
```bash
|
||||
hatch run sort-imports
|
||||
```
|
||||
- Check the types with [Pyright](https://github.com/RobertCraigie/pyright-python)
|
||||
```bash
|
||||
hatch run check-types
|
||||
|
||||
@@ -45,8 +45,7 @@ def dictionary_to_yaml(dictionary: dict):
|
||||
yaml_object.indent(mapping=2, sequence=4, offset=2)
|
||||
with io.StringIO() as string_stream:
|
||||
yaml_object.dump(dictionary, string_stream)
|
||||
yaml_string = string_stream.getvalue()
|
||||
return yaml_string
|
||||
return string_stream.getvalue()
|
||||
|
||||
|
||||
def define_env(env):
|
||||
@@ -57,7 +56,7 @@ def define_env(env):
|
||||
# validate the parsed dictionary by creating an instance of SampleEntries:
|
||||
SampleEntries(**sample_entries)
|
||||
|
||||
entries_showcase = dict()
|
||||
entries_showcase = {}
|
||||
for entry_name, entry in sample_entries.items():
|
||||
proper_entry_name = entry_name.replace("_", " ").title().replace(" ", "")
|
||||
entries_showcase[proper_entry_name] = {
|
||||
@@ -76,9 +75,9 @@ def define_env(env):
|
||||
|
||||
# For theme templates reference docs
|
||||
themes_path = rendercv_path / "themes"
|
||||
theme_templates = dict()
|
||||
theme_templates = {}
|
||||
for theme in data.available_themes:
|
||||
theme_templates[theme] = dict()
|
||||
theme_templates[theme] = {}
|
||||
for theme_file in themes_path.glob(f"{theme}/*.tex"):
|
||||
theme_templates[theme][theme_file.stem] = theme_file.read_text()
|
||||
|
||||
@@ -155,12 +154,10 @@ def generate_entry_figures():
|
||||
for entry_type in entry_types:
|
||||
# Create data model with only one section and one entry
|
||||
data_model = data.RenderCVDataModel(
|
||||
**{
|
||||
"cv": data.CurriculumVitae(
|
||||
sections={entry_type: [getattr(entries, entry_type)]}
|
||||
),
|
||||
"design": design_dictionary,
|
||||
}
|
||||
cv=data.CurriculumVitae(
|
||||
sections={entry_type: [getattr(entries, entry_type)]}
|
||||
),
|
||||
design=design_dictionary,
|
||||
)
|
||||
|
||||
# Render
|
||||
@@ -221,4 +218,4 @@ def update_index():
|
||||
|
||||
if __name__ == "__main__":
|
||||
generate_entry_figures()
|
||||
print("Entry figures generated successfully.")
|
||||
print("Entry figures generated successfully.") # NOQA: T201
|
||||
|
||||
@@ -80,4 +80,4 @@ def generate_examples():
|
||||
|
||||
if __name__ == "__main__":
|
||||
generate_examples()
|
||||
print("Examples generated successfully.")
|
||||
print("Examples generated successfully.") # NOQA: T201
|
||||
|
||||
@@ -15,4 +15,4 @@ def generate_schema():
|
||||
|
||||
if __name__ == "__main__":
|
||||
generate_schema()
|
||||
print("Schema generated successfully.")
|
||||
print("Schema generated successfully.") # NOQA: T201
|
||||
|
||||
@@ -15,8 +15,8 @@ from .commands import (
|
||||
|
||||
__all__ = [
|
||||
"app",
|
||||
"cli_command_render",
|
||||
"cli_command_new",
|
||||
"cli_command_create_theme",
|
||||
"cli_command_new",
|
||||
"cli_command_no_args",
|
||||
"cli_command_render",
|
||||
]
|
||||
|
||||
@@ -4,7 +4,8 @@ to print nice-looking messages to the terminal.
|
||||
"""
|
||||
|
||||
import functools
|
||||
from typing import Callable, Optional
|
||||
from collections.abc import Callable
|
||||
from typing import Optional
|
||||
|
||||
import jinja2
|
||||
import pydantic
|
||||
@@ -125,8 +126,7 @@ def warn_if_new_version_is_available() -> bool:
|
||||
f" and the latest version is v{latest_version}."
|
||||
)
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
return False
|
||||
|
||||
|
||||
def welcome():
|
||||
@@ -257,9 +257,9 @@ def print_validation_errors(exception: pydantic.ValidationError):
|
||||
location = error_object["loc"]
|
||||
ctx_object = error_object["ctx"]
|
||||
if "error" in ctx_object:
|
||||
error_object = ctx_object["error"]
|
||||
if hasattr(error_object, "__cause__"):
|
||||
cause_object = error_object.__cause__
|
||||
inner_error_object = ctx_object["error"]
|
||||
if hasattr(inner_error_object, "__cause__"):
|
||||
cause_object = inner_error_object.__cause__
|
||||
cause_object_errors = cause_object.errors()
|
||||
for cause_error_object in cause_object_errors:
|
||||
# we use [1:] to avoid `entries` location. It is a location for
|
||||
@@ -274,10 +274,9 @@ def print_validation_errors(exception: pydantic.ValidationError):
|
||||
# (e.g. avoid stuff like .end_date.literal['present'])
|
||||
unwanted_locations = ["tagged-union", "list", "literal", "int", "constrained-str"]
|
||||
for error_object in errors:
|
||||
location = error_object["loc"]
|
||||
location = [str(location_element) for location_element in error_object["loc"]]
|
||||
new_location = [str(location_element) for location_element in location]
|
||||
for location_element in location:
|
||||
location_element = str(location_element)
|
||||
for unwanted_location in unwanted_locations:
|
||||
if unwanted_location in location_element:
|
||||
new_location.remove(location_element)
|
||||
@@ -322,7 +321,7 @@ def print_validation_errors(exception: pydantic.ValidationError):
|
||||
|
||||
# If the input is a dictionary or a list (the model itself fails to validate),
|
||||
# then don't show the input. It looks confusing and it is not helpful.
|
||||
if isinstance(input, (dict, list)):
|
||||
if isinstance(input, dict | list):
|
||||
input = ""
|
||||
|
||||
new_error = {
|
||||
|
||||
@@ -10,14 +10,15 @@ import re
|
||||
import shutil
|
||||
import time
|
||||
import urllib.request
|
||||
from typing import Callable, Optional
|
||||
from collections.abc import Callable
|
||||
from typing import Optional
|
||||
|
||||
import typer
|
||||
import watchdog.events
|
||||
import watchdog.observers
|
||||
|
||||
from .. import data, renderer
|
||||
from . import printer, utilities
|
||||
from . import printer
|
||||
|
||||
|
||||
def set_or_update_a_value(
|
||||
@@ -40,10 +41,7 @@ def set_or_update_a_value(
|
||||
|
||||
keys = key.split(".")
|
||||
|
||||
if sub_dictionary is not None:
|
||||
updated_dict = sub_dictionary
|
||||
else:
|
||||
updated_dict = dictionary
|
||||
updated_dict = sub_dictionary if sub_dictionary is not None else dictionary
|
||||
|
||||
if len(keys) == 1:
|
||||
# Set the value:
|
||||
@@ -72,7 +70,7 @@ def set_or_update_a_value(
|
||||
sub_dictionary = updated_dict[first_key] # type: ignore
|
||||
else:
|
||||
# Key does not exist, create a new sub dictionary:
|
||||
sub_dictionary = dict()
|
||||
sub_dictionary = {}
|
||||
|
||||
updated_sub_dict = set_or_update_a_value(dictionary, key, value, sub_dictionary)
|
||||
updated_dict[first_key] = updated_sub_dict # type: ignore
|
||||
@@ -173,8 +171,7 @@ def get_error_message_and_location_and_value_from_a_custom_error(
|
||||
match = re.search(pattern, error_string)
|
||||
if match:
|
||||
return match.group(1), match.group(2), match.group(3)
|
||||
else:
|
||||
return None, None, None
|
||||
return None, None, None
|
||||
|
||||
|
||||
def copy_templates(
|
||||
@@ -200,15 +197,14 @@ def copy_templates(
|
||||
|
||||
if destination.exists():
|
||||
return None
|
||||
else:
|
||||
# copy the folder but don't include __init__.py:
|
||||
shutil.copytree(
|
||||
template_directory,
|
||||
destination,
|
||||
ignore=shutil.ignore_patterns("__init__.py", "__pycache__"),
|
||||
)
|
||||
# copy the folder but don't include __init__.py:
|
||||
shutil.copytree(
|
||||
template_directory,
|
||||
destination,
|
||||
ignore=shutil.ignore_patterns("__init__.py", "__pycache__"),
|
||||
)
|
||||
|
||||
return destination
|
||||
return destination
|
||||
|
||||
|
||||
def parse_render_command_override_arguments(
|
||||
@@ -223,7 +219,7 @@ def parse_render_command_override_arguments(
|
||||
Returns:
|
||||
The key and value pairs.
|
||||
"""
|
||||
key_and_values: dict["str", "str"] = dict()
|
||||
key_and_values: dict[str, str] = {}
|
||||
|
||||
# `extra_arguments.args` is a list of arbitrary arguments that haven't been
|
||||
# specified in `cli_render_command` function's definition. They are used to allow
|
||||
@@ -234,16 +230,18 @@ def parse_render_command_override_arguments(
|
||||
# below parses `ctx.args` accordingly.
|
||||
|
||||
if len(extra_arguments.args) % 2 != 0:
|
||||
raise ValueError(
|
||||
"There is a problem with the extra arguments! Each key should have"
|
||||
" a corresponding value."
|
||||
message = (
|
||||
"There is a problem with the extra arguments! Each key should have a"
|
||||
" corresponding value."
|
||||
)
|
||||
raise ValueError(message)
|
||||
|
||||
for i in range(0, len(extra_arguments.args), 2):
|
||||
key = extra_arguments.args[i]
|
||||
value = extra_arguments.args[i + 1]
|
||||
if not key.startswith("--"):
|
||||
raise ValueError(f"The key ({key}) should start with double dashes!")
|
||||
message = f"The key ({key}) should start with double dashes!"
|
||||
raise ValueError(message)
|
||||
|
||||
key = key.replace("--", "")
|
||||
|
||||
@@ -261,14 +259,12 @@ def get_default_render_command_cli_arguments() -> dict:
|
||||
from .commands import cli_command_render
|
||||
|
||||
sig = inspect.signature(cli_command_render)
|
||||
default_render_command_cli_arguments = {
|
||||
return {
|
||||
k: v.default
|
||||
for k, v in sig.parameters.items()
|
||||
if v.default is not inspect.Parameter.empty
|
||||
}
|
||||
|
||||
return default_render_command_cli_arguments
|
||||
|
||||
|
||||
def update_render_command_settings_of_the_input_file(
|
||||
input_file_as_a_dict: dict,
|
||||
@@ -292,16 +288,17 @@ def update_render_command_settings_of_the_input_file(
|
||||
# field. If the field is the default value, check if it exists in the input file.
|
||||
# If it doesn't exist, add it to the input file. If it exists, don't do anything.
|
||||
if "rendercv_settings" not in input_file_as_a_dict:
|
||||
input_file_as_a_dict["rendercv_settings"] = dict()
|
||||
input_file_as_a_dict["rendercv_settings"] = {}
|
||||
|
||||
if "render_command" not in input_file_as_a_dict["rendercv_settings"]:
|
||||
input_file_as_a_dict["rendercv_settings"]["render_command"] = dict()
|
||||
input_file_as_a_dict["rendercv_settings"]["render_command"] = {}
|
||||
|
||||
render_command_field = input_file_as_a_dict["rendercv_settings"]["render_command"]
|
||||
for key, value in render_command_cli_arguments.items():
|
||||
if value != default_render_command_cli_arguments[key]:
|
||||
render_command_field[key] = value
|
||||
elif key not in render_command_field:
|
||||
if (
|
||||
value != default_render_command_cli_arguments[key]
|
||||
or key not in render_command_field
|
||||
):
|
||||
render_command_field[key] = value
|
||||
|
||||
input_file_as_a_dict["rendercv_settings"]["render_command"] = render_command_field
|
||||
@@ -373,7 +370,7 @@ def run_rendercv_with_printer(
|
||||
)
|
||||
)
|
||||
if render_command_settings.latex_path:
|
||||
utilities.copy_files(
|
||||
copy_files(
|
||||
latex_file_path_in_output_folder,
|
||||
render_command_settings.latex_path,
|
||||
)
|
||||
@@ -387,7 +384,7 @@ def run_rendercv_with_printer(
|
||||
render_command_settings.use_local_latex_command,
|
||||
)
|
||||
if render_command_settings.pdf_path:
|
||||
utilities.copy_files(
|
||||
copy_files(
|
||||
pdf_file_path_in_output_folder,
|
||||
render_command_settings.pdf_path,
|
||||
)
|
||||
@@ -401,7 +398,7 @@ def run_rendercv_with_printer(
|
||||
pdf_file_path_in_output_folder
|
||||
)
|
||||
if render_command_settings.png_path:
|
||||
utilities.copy_files(
|
||||
copy_files(
|
||||
png_file_paths_in_output_folder,
|
||||
render_command_settings.png_path,
|
||||
)
|
||||
@@ -415,7 +412,7 @@ def run_rendercv_with_printer(
|
||||
data_model, output_directory
|
||||
)
|
||||
if render_command_settings.markdown_path:
|
||||
utilities.copy_files(
|
||||
copy_files(
|
||||
markdown_file_path_in_output_folder,
|
||||
render_command_settings.markdown_path,
|
||||
)
|
||||
@@ -431,7 +428,7 @@ def run_rendercv_with_printer(
|
||||
markdown_file_path_in_output_folder
|
||||
)
|
||||
if render_command_settings.html_path:
|
||||
utilities.copy_files(
|
||||
copy_files(
|
||||
html_file_path_in_output_folder,
|
||||
render_command_settings.html_path,
|
||||
)
|
||||
@@ -457,7 +454,7 @@ def run_a_function_if_a_file_changes(file_path: pathlib.Path, function: Callable
|
||||
super().__init__()
|
||||
self.function_to_call = function
|
||||
|
||||
def on_modified(self, event: watchdog.events.FileModifiedEvent) -> None:
|
||||
def on_modified(self, _: watchdog.events.FileModifiedEvent) -> None:
|
||||
printer.information(
|
||||
"\n\nThe input file has been updated. Re-running RenderCV..."
|
||||
)
|
||||
|
||||
@@ -46,31 +46,31 @@ from .reader import (
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
"OneLineEntry",
|
||||
"BulletEntry",
|
||||
"EducationEntry",
|
||||
"ExperienceEntry",
|
||||
"PublicationEntry",
|
||||
"NormalEntry",
|
||||
"SocialNetwork",
|
||||
"CurriculumVitae",
|
||||
"EducationEntry",
|
||||
"Entry",
|
||||
"ExperienceEntry",
|
||||
"LocaleCatalog",
|
||||
"NormalEntry",
|
||||
"OneLineEntry",
|
||||
"PublicationEntry",
|
||||
"RenderCVDataModel",
|
||||
"RenderCVSettings",
|
||||
"RenderCommandSettings",
|
||||
"SectionContents",
|
||||
"SocialNetwork",
|
||||
"available_entry_models",
|
||||
"available_entry_type_names",
|
||||
"available_social_networks",
|
||||
"available_theme_options",
|
||||
"available_themes",
|
||||
"create_a_sample_data_model",
|
||||
"create_a_sample_yaml_input_file",
|
||||
"generate_json_schema_file",
|
||||
"generate_json_schema",
|
||||
"read_input_file",
|
||||
"format_date",
|
||||
"Entry",
|
||||
"available_social_networks",
|
||||
"SectionContents",
|
||||
"available_entry_type_names",
|
||||
"available_entry_models",
|
||||
"generate_json_schema",
|
||||
"generate_json_schema_file",
|
||||
"read_a_yaml_file",
|
||||
"read_input_file",
|
||||
"validate_input_dictionary_and_return_the_data_model",
|
||||
"RenderCVSettings",
|
||||
"RenderCommandSettings",
|
||||
]
|
||||
|
||||
@@ -29,9 +29,7 @@ def dictionary_to_yaml(dictionary: dict) -> str:
|
||||
yaml_object.indent(mapping=2, sequence=4, offset=2)
|
||||
with io.StringIO() as string_stream:
|
||||
yaml_object.dump(dictionary, string_stream)
|
||||
yaml_string = string_stream.getvalue()
|
||||
|
||||
return yaml_string
|
||||
return string_stream.getvalue()
|
||||
|
||||
|
||||
def create_a_sample_data_model(
|
||||
@@ -48,10 +46,11 @@ def create_a_sample_data_model(
|
||||
# Check if the theme is valid:
|
||||
if theme not in models.available_theme_options:
|
||||
available_themes_string = ", ".join(models.available_theme_options.keys())
|
||||
raise ValueError(
|
||||
message = (
|
||||
f"The theme should be one of the following: {available_themes_string}!"
|
||||
f' The provided theme is "{theme}".'
|
||||
)
|
||||
raise ValueError(message)
|
||||
|
||||
# read the sample_content.yaml file
|
||||
sample_content = pathlib.Path(__file__).parent / "sample_content.yaml"
|
||||
@@ -135,7 +134,7 @@ def generate_json_schema() -> dict:
|
||||
|
||||
# Loop through $defs and remove docstring descriptions and fix optional
|
||||
# fields
|
||||
for object_name, value in json_schema["$defs"].items():
|
||||
for _, value in json_schema["$defs"].items():
|
||||
# Don't allow additional properties
|
||||
value["additionalProperties"] = False
|
||||
|
||||
@@ -147,7 +146,7 @@ def generate_json_schema() -> dict:
|
||||
null_type_dict = {
|
||||
"type": "null",
|
||||
}
|
||||
for field_name, field in value["properties"].items():
|
||||
for _, field in value["properties"].items():
|
||||
if "anyOf" in field:
|
||||
if null_type_dict in field["anyOf"]:
|
||||
field["anyOf"].remove(null_type_dict)
|
||||
@@ -172,12 +171,10 @@ def generate_json_schema() -> dict:
|
||||
|
||||
return json_schema
|
||||
|
||||
schema = models.RenderCVDataModel.model_json_schema(
|
||||
return models.RenderCVDataModel.model_json_schema(
|
||||
schema_generator=RenderCVSchemaGenerator
|
||||
)
|
||||
|
||||
return schema
|
||||
|
||||
|
||||
def generate_json_schema_file(json_schema_path: pathlib.Path):
|
||||
"""Generate the JSON schema of RenderCV and save it to a file.
|
||||
|
||||
@@ -46,24 +46,24 @@ from .rendercv_data_model import RenderCVDataModel
|
||||
from .rendercv_settings import RenderCommandSettings, RenderCVSettings
|
||||
|
||||
__all__ = [
|
||||
"OneLineEntry",
|
||||
"BulletEntry",
|
||||
"EducationEntry",
|
||||
"ExperienceEntry",
|
||||
"PublicationEntry",
|
||||
"NormalEntry",
|
||||
"SocialNetwork",
|
||||
"CurriculumVitae",
|
||||
"LocaleCatalog",
|
||||
"RenderCVDataModel",
|
||||
"available_theme_options",
|
||||
"format_date",
|
||||
"EducationEntry",
|
||||
"Entry",
|
||||
"available_social_networks",
|
||||
"SectionContents",
|
||||
"available_themes",
|
||||
"available_entry_type_names",
|
||||
"ExperienceEntry",
|
||||
"LocaleCatalog",
|
||||
"NormalEntry",
|
||||
"OneLineEntry",
|
||||
"PublicationEntry",
|
||||
"RenderCVDataModel",
|
||||
"RenderCVSettings",
|
||||
"RenderCommandSettings",
|
||||
"SectionContents",
|
||||
"SocialNetwork",
|
||||
"available_entry_models",
|
||||
"available_entry_type_names",
|
||||
"available_social_networks",
|
||||
"available_theme_options",
|
||||
"available_themes",
|
||||
"format_date",
|
||||
]
|
||||
|
||||
@@ -37,10 +37,9 @@ def format_phone_number(phone_number: str) -> str:
|
||||
format = LOCALE_CATALOG["phone_number_format"].upper() # type: ignore
|
||||
|
||||
parsed_number = phonenumbers.parse(phone_number, None)
|
||||
formatted_number = phonenumbers.format_number(
|
||||
return phonenumbers.format_number(
|
||||
parsed_number, getattr(phonenumbers.PhoneNumberFormat, format)
|
||||
)
|
||||
return formatted_number
|
||||
|
||||
|
||||
def format_date(date: Date, date_style: Optional[str] = None) -> str:
|
||||
@@ -81,12 +80,12 @@ def format_date(date: Date, date_style: Optional[str] = None) -> str:
|
||||
if date_style is None:
|
||||
date_style = LOCALE_CATALOG["date_style"] # type: ignore
|
||||
|
||||
assert isinstance(date_style, str)
|
||||
|
||||
for placeholder, value in placeholders.items():
|
||||
date_style = date_style.replace(placeholder, value) # type: ignore
|
||||
|
||||
date_string = date_style
|
||||
|
||||
return date_string # type: ignore
|
||||
return date_style
|
||||
|
||||
|
||||
def replace_placeholders(value: str) -> str:
|
||||
@@ -165,11 +164,11 @@ def compute_time_span_string(
|
||||
# empty string.
|
||||
return ""
|
||||
|
||||
elif not start_date_is_provided and not end_date_is_provided:
|
||||
if not start_date_is_provided and not end_date_is_provided:
|
||||
# If neither start_date nor end_date is provided, return an empty string.
|
||||
return ""
|
||||
|
||||
elif isinstance(start_date, int) or isinstance(end_date, int):
|
||||
if isinstance(start_date, int) or isinstance(end_date, int):
|
||||
# Then it means one of the dates is year, so time span cannot be more
|
||||
# specific than years.
|
||||
start_year = get_date_object(start_date).year # type: ignore
|
||||
@@ -184,49 +183,49 @@ def compute_time_span_string(
|
||||
|
||||
return time_span_string
|
||||
|
||||
# Then it means both start_date and end_date are in YYYY-MM-DD or YYYY-MM
|
||||
# format.
|
||||
end_date = get_date_object(end_date) # type: ignore
|
||||
start_date = get_date_object(start_date) # type: ignore
|
||||
|
||||
# Calculate the number of days between start_date and end_date:
|
||||
timespan_in_days = (end_date - start_date).days # type: ignore
|
||||
|
||||
# Calculate the number of years and months between start_date and end_date:
|
||||
how_many_years = timespan_in_days // 365
|
||||
how_many_months = (timespan_in_days % 365) // 30 + 1
|
||||
# Deal with overflow (prevent rounding to 1 year 12 months, etc.)
|
||||
how_many_years += how_many_months // 12
|
||||
how_many_months %= 12
|
||||
|
||||
# Format the number of years and months between start_date and end_date:
|
||||
if how_many_years == 0:
|
||||
how_many_years_string = None
|
||||
elif how_many_years == 1:
|
||||
how_many_years_string = f"1 {LOCALE_CATALOG['year']}"
|
||||
else:
|
||||
# Then it means both start_date and end_date are in YYYY-MM-DD or YYYY-MM
|
||||
# format.
|
||||
end_date = get_date_object(end_date) # type: ignore
|
||||
start_date = get_date_object(start_date) # type: ignore
|
||||
how_many_years_string = f"{how_many_years} {LOCALE_CATALOG['years']}"
|
||||
|
||||
# Calculate the number of days between start_date and end_date:
|
||||
timespan_in_days = (end_date - start_date).days # type: ignore
|
||||
# Format the number of months between start_date and end_date:
|
||||
if how_many_months == 1 or (how_many_years_string is None and how_many_months == 0):
|
||||
how_many_months_string = f"1 {LOCALE_CATALOG['month']}"
|
||||
elif how_many_months == 0:
|
||||
how_many_months_string = None
|
||||
else:
|
||||
how_many_months_string = f"{how_many_months} {LOCALE_CATALOG['months']}"
|
||||
|
||||
# Calculate the number of years and months between start_date and end_date:
|
||||
how_many_years = timespan_in_days // 365
|
||||
how_many_months = (timespan_in_days % 365) // 30 + 1
|
||||
# Deal with overflow (prevent rounding to 1 year 12 months, etc.)
|
||||
how_many_years += how_many_months // 12
|
||||
how_many_months %= 12
|
||||
# Combine howManyYearsString and howManyMonthsString:
|
||||
if how_many_years_string is None and how_many_months_string is not None:
|
||||
time_span_string = how_many_months_string
|
||||
elif how_many_months_string is None and how_many_years_string is not None:
|
||||
time_span_string = how_many_years_string
|
||||
elif how_many_years_string is not None and how_many_months_string is not None:
|
||||
time_span_string = f"{how_many_years_string} {how_many_months_string}"
|
||||
else:
|
||||
message = "The time span is not valid!"
|
||||
raise ValueError(message)
|
||||
|
||||
# Format the number of years and months between start_date and end_date:
|
||||
if how_many_years == 0:
|
||||
how_many_years_string = None
|
||||
elif how_many_years == 1:
|
||||
how_many_years_string = f"1 {LOCALE_CATALOG['year']}"
|
||||
else:
|
||||
how_many_years_string = f"{how_many_years} {LOCALE_CATALOG['years']}"
|
||||
|
||||
# Format the number of months between start_date and end_date:
|
||||
if how_many_months == 1 or (
|
||||
how_many_years_string is None and how_many_months == 0
|
||||
):
|
||||
how_many_months_string = f"1 {LOCALE_CATALOG['month']}"
|
||||
elif how_many_months == 0:
|
||||
how_many_months_string = None
|
||||
else:
|
||||
how_many_months_string = f"{how_many_months} {LOCALE_CATALOG['months']}"
|
||||
|
||||
# Combine howManyYearsString and howManyMonthsString:
|
||||
if how_many_years_string is None:
|
||||
time_span_string = how_many_months_string
|
||||
elif how_many_months_string is None:
|
||||
time_span_string = how_many_years_string
|
||||
else:
|
||||
time_span_string = f"{how_many_years_string} {how_many_months_string}"
|
||||
|
||||
return time_span_string.strip()
|
||||
return time_span_string.strip()
|
||||
|
||||
|
||||
def compute_date_string(
|
||||
@@ -294,10 +293,7 @@ def compute_date_string(
|
||||
else:
|
||||
# Then it means end_date is either in YYYY-MM-DD or YYYY-MM format
|
||||
date_object = get_date_object(end_date)
|
||||
if show_only_years:
|
||||
end_date = date_object.year
|
||||
else:
|
||||
end_date = format_date(date_object)
|
||||
end_date = date_object.year if show_only_years else format_date(date_object)
|
||||
|
||||
date_string = f"{start_date} {LOCALE_CATALOG['to']} {end_date}"
|
||||
|
||||
@@ -357,10 +353,11 @@ def get_date_object(date: str | int) -> Date:
|
||||
elif date == "present":
|
||||
date_object = Date.today()
|
||||
else:
|
||||
raise ValueError(
|
||||
message = (
|
||||
"This is not a valid date! Please use either YYYY-MM-DD, YYYY-MM, or"
|
||||
" YYYY format."
|
||||
)
|
||||
raise ValueError(message)
|
||||
|
||||
return date_object
|
||||
|
||||
@@ -418,7 +415,7 @@ def dictionary_key_to_proper_section_title(key: str) -> str:
|
||||
# loop through the words and if the word doesn't contain any uppercase letters,
|
||||
# capitalize the first letter of the word. If the word contains uppercase letters,
|
||||
# don't change the word.
|
||||
proper_title = " ".join(
|
||||
return " ".join(
|
||||
(
|
||||
word.capitalize()
|
||||
if (word.islower() and word not in words_not_capitalized_in_a_title)
|
||||
@@ -426,5 +423,3 @@ def dictionary_key_to_proper_section_title(key: str) -> str:
|
||||
)
|
||||
for word in words
|
||||
)
|
||||
|
||||
return proper_title
|
||||
|
||||
@@ -5,7 +5,7 @@ field of the input file.
|
||||
|
||||
import functools
|
||||
import re
|
||||
from typing import Annotated, Any, Literal, Optional, Type, get_args
|
||||
from typing import Annotated, Any, Literal, Optional, get_args
|
||||
|
||||
import pydantic
|
||||
import pydantic_extra_types.phone_numbers as pydantic_phone_numbers
|
||||
@@ -47,7 +47,7 @@ def validate_url(url: str) -> str:
|
||||
return url
|
||||
|
||||
|
||||
def create_a_section_validator(entry_type: Type) -> Type[SectionBase]:
|
||||
def create_a_section_validator(entry_type: type) -> type[SectionBase]:
|
||||
"""Create a section model based on the entry type. See [Pydantic's documentation
|
||||
about dynamic model
|
||||
creation](https://pydantic-docs.helpmanual.io/usage/models/#dynamic-model-creation)
|
||||
@@ -69,19 +69,17 @@ def create_a_section_validator(entry_type: Type) -> Type[SectionBase]:
|
||||
model_name = "SectionWith" + entry_type.__name__.replace("Entry", "Entries")
|
||||
entry_type_name = entry_type.__name__
|
||||
|
||||
SectionModel = pydantic.create_model(
|
||||
return pydantic.create_model(
|
||||
model_name,
|
||||
entry_type=(Literal[entry_type_name], ...), # type: ignore
|
||||
entries=(list[entry_type], ...),
|
||||
__base__=SectionBase,
|
||||
)
|
||||
|
||||
return SectionModel
|
||||
|
||||
|
||||
def get_characteristic_entry_attributes(
|
||||
entry_types: list[Type],
|
||||
) -> dict[Type, set[str]]:
|
||||
entry_types: list[type],
|
||||
) -> dict[type, set[str]]:
|
||||
"""Get the characteristic attributes of the entry types.
|
||||
|
||||
Args:
|
||||
@@ -98,9 +96,9 @@ def get_characteristic_entry_attributes(
|
||||
for EntryType in entry_types:
|
||||
all_attributes.extend(EntryType.model_fields.keys())
|
||||
|
||||
common_attributes = set(
|
||||
common_attributes = {
|
||||
attribute for attribute in all_attributes if all_attributes.count(attribute) > 1
|
||||
)
|
||||
}
|
||||
|
||||
# Store each entry type's characteristic attributes in a dictionary:
|
||||
characteristic_entry_attributes = {}
|
||||
@@ -113,8 +111,8 @@ def get_characteristic_entry_attributes(
|
||||
|
||||
|
||||
def get_entry_type_name_and_section_validator(
|
||||
entry: dict[str, str | list[str]] | str | Type, entry_types: list[Type]
|
||||
) -> tuple[str, Type[SectionBase]]:
|
||||
entry: dict[str, str | list[str]] | str | type, entry_types: list[type]
|
||||
) -> tuple[str, type[SectionBase]]:
|
||||
"""Get the entry type name and the section validator based on the entry.
|
||||
|
||||
It takes an entry (as a dictionary or a string) and a list of entry types. Then
|
||||
@@ -149,7 +147,8 @@ def get_entry_type_name_and_section_validator(
|
||||
break
|
||||
|
||||
if entry_type_name is None:
|
||||
raise ValueError("The entry is not provided correctly.")
|
||||
message = "The entry is not provided correctly."
|
||||
raise ValueError(message)
|
||||
|
||||
elif isinstance(entry, str):
|
||||
# Then it is a TextEntry
|
||||
@@ -165,7 +164,7 @@ def get_entry_type_name_and_section_validator(
|
||||
|
||||
|
||||
def validate_a_section(
|
||||
sections_input: list[Any], entry_types: list[Type]
|
||||
sections_input: list[Any], entry_types: list[type]
|
||||
) -> list[entry_types.Entry]:
|
||||
"""Validate a list of entries (a section) based on the entry types.
|
||||
|
||||
@@ -199,9 +198,12 @@ def validate_a_section(
|
||||
pass
|
||||
|
||||
if entry_type_name is None or section_type is None:
|
||||
raise ValueError(
|
||||
message = (
|
||||
"RenderCV couldn't match this section with any entry types! Please"
|
||||
" check the entries and make sure they are provided correctly.",
|
||||
" check the entries and make sure they are provided correctly."
|
||||
)
|
||||
raise ValueError(
|
||||
message,
|
||||
"", # This is the location of the error
|
||||
"", # This is value of the error
|
||||
)
|
||||
@@ -228,10 +230,11 @@ def validate_a_section(
|
||||
raise new_error from e
|
||||
|
||||
else:
|
||||
raise ValueError(
|
||||
message = (
|
||||
"Each section should be a list of entries! Please see the documentation for"
|
||||
" more information about the sections.",
|
||||
" more information about the sections."
|
||||
)
|
||||
raise ValueError(message)
|
||||
return sections_input
|
||||
|
||||
|
||||
@@ -247,21 +250,22 @@ def validate_a_social_network_username(username: str, network: str) -> str:
|
||||
if network == "Mastodon":
|
||||
mastodon_username_pattern = r"@[^@]+@[^@]+"
|
||||
if not re.fullmatch(mastodon_username_pattern, username):
|
||||
raise ValueError(
|
||||
'Mastodon username should be in the format "@username@domain"!'
|
||||
)
|
||||
if network == "StackOverflow":
|
||||
message = 'Mastodon username should be in the format "@username@domain"!'
|
||||
raise ValueError(message)
|
||||
elif network == "StackOverflow":
|
||||
stackoverflow_username_pattern = r"\d+\/[^\/]+"
|
||||
if not re.fullmatch(stackoverflow_username_pattern, username):
|
||||
raise ValueError(
|
||||
message = (
|
||||
'StackOverflow username should be in the format "user_id/username"!'
|
||||
)
|
||||
if network == "YouTube":
|
||||
raise ValueError(message)
|
||||
elif network == "YouTube":
|
||||
if username.startswith("@"):
|
||||
raise ValueError(
|
||||
message = (
|
||||
'YouTube username should not start with "@"! Remove "@" from the'
|
||||
" beginning of the username."
|
||||
)
|
||||
raise ValueError(message)
|
||||
|
||||
return username
|
||||
|
||||
@@ -333,9 +337,7 @@ class SocialNetwork(RenderCVBaseModelWithoutExtraKeys):
|
||||
|
||||
network = info.data["network"]
|
||||
|
||||
username = validate_a_social_network_username(username, network)
|
||||
|
||||
return username
|
||||
return validate_a_social_network_username(username, network)
|
||||
|
||||
@pydantic.model_validator(mode="after") # type: ignore
|
||||
def check_url(self) -> "SocialNetwork":
|
||||
@@ -528,7 +530,9 @@ class CurriculumVitae(RenderCVBaseModelWithExtraKeys):
|
||||
|
||||
if self.sections_input is not None:
|
||||
for title, entries in self.sections_input.items():
|
||||
title = computers.dictionary_key_to_proper_section_title(title)
|
||||
formatted_title = computers.dictionary_key_to_proper_section_title(
|
||||
title
|
||||
)
|
||||
|
||||
# The first entry can be used because all the entries in the section are
|
||||
# already validated with the `validate_a_section` function:
|
||||
@@ -539,7 +543,7 @@ class CurriculumVitae(RenderCVBaseModelWithExtraKeys):
|
||||
|
||||
# SectionBase is used so that entries are not validated again:
|
||||
section = SectionBase(
|
||||
title=title,
|
||||
title=formatted_title,
|
||||
entry_type=entry_type_name,
|
||||
entries=entries,
|
||||
)
|
||||
@@ -550,4 +554,4 @@ class CurriculumVitae(RenderCVBaseModelWithExtraKeys):
|
||||
|
||||
# The dictionary below will be overwritten by CurriculumVitae class, which will contain
|
||||
# some important data for the CV.
|
||||
curriculum_vitae: dict[str, str] = dict()
|
||||
curriculum_vitae: dict[str, str] = {}
|
||||
|
||||
@@ -6,7 +6,7 @@ of the input file.
|
||||
import importlib
|
||||
import importlib.util
|
||||
import pathlib
|
||||
from typing import Annotated, Any, Type
|
||||
from typing import Annotated, Any
|
||||
|
||||
import pydantic
|
||||
|
||||
@@ -26,7 +26,7 @@ from .base import RenderCVBaseModelWithoutExtraKeys
|
||||
|
||||
def validate_design_options(
|
||||
design: Any,
|
||||
available_theme_options: dict[str, Type],
|
||||
available_theme_options: dict[str, type],
|
||||
available_entry_type_names: list[str],
|
||||
) -> Any:
|
||||
"""Chech if the design options are for a built-in theme or a custom theme. If it is
|
||||
@@ -47,96 +47,110 @@ def validate_design_options(
|
||||
if isinstance(design, tuple(available_theme_options.values())):
|
||||
# Then it means it is an already validated built-in theme. Return it as it is:
|
||||
return design
|
||||
elif design["theme"] in available_theme_options:
|
||||
if design["theme"] in available_theme_options:
|
||||
# Then it is a built-in theme, but it is not validated yet. Validate it and
|
||||
# return it:
|
||||
ThemeDataModel = available_theme_options[design["theme"]]
|
||||
return ThemeDataModel(**design)
|
||||
else:
|
||||
# It is a custom theme. Validate it:
|
||||
theme_name: str = str(design["theme"])
|
||||
# It is a custom theme. Validate it:
|
||||
theme_name: str = str(design["theme"])
|
||||
|
||||
# Custom theme should only contain letters and digits:
|
||||
if not theme_name.isalnum():
|
||||
raise ValueError(
|
||||
"The custom theme name should only contain letters and digits.",
|
||||
"theme", # this is the location of the error
|
||||
theme_name, # this is value of the error
|
||||
)
|
||||
# Custom theme should only contain letters and digits:
|
||||
if not theme_name.isalnum():
|
||||
message = "The custom theme name should only contain letters and digits."
|
||||
raise ValueError(
|
||||
message,
|
||||
"theme", # this is the location of the error
|
||||
theme_name, # this is value of the error
|
||||
)
|
||||
|
||||
custom_theme_folder = pathlib.Path(theme_name)
|
||||
custom_theme_folder = pathlib.Path(theme_name)
|
||||
|
||||
# Check if the custom theme folder exists:
|
||||
if not custom_theme_folder.exists():
|
||||
raise ValueError(
|
||||
# Check if the custom theme folder exists:
|
||||
if not custom_theme_folder.exists():
|
||||
message = (
|
||||
(
|
||||
f"The custom theme folder `{custom_theme_folder}` does not exist."
|
||||
" It should be in the working directory as the input file.",
|
||||
"", # this is the location of the error
|
||||
theme_name, # this is value of the error
|
||||
" It should be in the working directory as the input file."
|
||||
),
|
||||
)
|
||||
raise ValueError(
|
||||
message,
|
||||
"", # this is the location of the error
|
||||
theme_name, # this is value of the error
|
||||
)
|
||||
|
||||
# check if all the necessary files are provided in the custom theme folder:
|
||||
required_entry_files = [
|
||||
entry_type_name + ".j2.tex" for entry_type_name in available_entry_type_names
|
||||
]
|
||||
required_files = [
|
||||
"SectionBeginning.j2.tex", # section beginning template
|
||||
"SectionEnding.j2.tex", # section ending template
|
||||
"Preamble.j2.tex", # preamble template
|
||||
"Header.j2.tex", # header template
|
||||
*required_entry_files,
|
||||
]
|
||||
|
||||
for file in required_files:
|
||||
file_path = custom_theme_folder / file
|
||||
if not file_path.exists():
|
||||
message = (
|
||||
f"You provided a custom theme, but the file `{file}` is not"
|
||||
f" found in the folder `{custom_theme_folder}`."
|
||||
)
|
||||
raise ValueError(
|
||||
message,
|
||||
"", # This is the location of the error
|
||||
theme_name, # This is value of the error
|
||||
)
|
||||
|
||||
# check if all the necessary files are provided in the custom theme folder:
|
||||
required_entry_files = [
|
||||
entry_type_name + ".j2.tex"
|
||||
for entry_type_name in available_entry_type_names
|
||||
]
|
||||
required_files = [
|
||||
"SectionBeginning.j2.tex", # section beginning template
|
||||
"SectionEnding.j2.tex", # section ending template
|
||||
"Preamble.j2.tex", # preamble template
|
||||
"Header.j2.tex", # header template
|
||||
] + required_entry_files
|
||||
# Import __init__.py file from the custom theme folder if it exists:
|
||||
path_to_init_file = pathlib.Path(f"{theme_name}/__init__.py")
|
||||
|
||||
for file in required_files:
|
||||
file_path = custom_theme_folder / file
|
||||
if not file_path.exists():
|
||||
raise ValueError(
|
||||
f"You provided a custom theme, but the file `{file}` is not"
|
||||
f" found in the folder `{custom_theme_folder}`.",
|
||||
"", # This is the location of the error
|
||||
theme_name, # This is value of the error
|
||||
)
|
||||
if path_to_init_file.exists():
|
||||
spec = importlib.util.spec_from_file_location(
|
||||
"theme",
|
||||
path_to_init_file,
|
||||
)
|
||||
|
||||
# Import __init__.py file from the custom theme folder if it exists:
|
||||
path_to_init_file = pathlib.Path(f"{theme_name}/__init__.py")
|
||||
|
||||
if path_to_init_file.exists():
|
||||
spec = importlib.util.spec_from_file_location(
|
||||
"theme",
|
||||
path_to_init_file,
|
||||
theme_module = importlib.util.module_from_spec(spec) # type: ignore
|
||||
try:
|
||||
spec.loader.exec_module(theme_module) # type: ignore
|
||||
except SyntaxError as e:
|
||||
message = (
|
||||
f"The custom theme {theme_name}'s __init__.py file has a syntax"
|
||||
" error. Please fix it."
|
||||
)
|
||||
|
||||
theme_module = importlib.util.module_from_spec(spec) # type: ignore
|
||||
try:
|
||||
spec.loader.exec_module(theme_module) # type: ignore
|
||||
except SyntaxError:
|
||||
raise ValueError(
|
||||
f"The custom theme {theme_name}'s __init__.py file has a syntax"
|
||||
" error. Please fix it.",
|
||||
)
|
||||
except ImportError:
|
||||
raise ValueError(
|
||||
raise ValueError(message) from e
|
||||
except ImportError as e:
|
||||
message = (
|
||||
(
|
||||
f"The custom theme {theme_name}'s __init__.py file has an"
|
||||
" import error. If you have copy-pasted RenderCV's built-in"
|
||||
" themes, make sure to update the import statements (e.g.,"
|
||||
' "from . import" to "from rendercv.themes import").',
|
||||
)
|
||||
|
||||
ThemeDataModel = getattr(
|
||||
theme_module, f"{theme_name.capitalize()}ThemeOptions" # type: ignore
|
||||
' "from . import" to "from rendercv.themes import").'
|
||||
),
|
||||
)
|
||||
|
||||
# Initialize and validate the custom theme data model:
|
||||
theme_data_model = ThemeDataModel(**design)
|
||||
else:
|
||||
# Then it means there is no __init__.py file in the custom theme folder.
|
||||
# Create a dummy data model and use that instead.
|
||||
class ThemeOptionsAreNotProvided(RenderCVBaseModelWithoutExtraKeys):
|
||||
theme: str = theme_name
|
||||
raise ValueError(message) from e
|
||||
|
||||
theme_data_model = ThemeOptionsAreNotProvided(theme=theme_name)
|
||||
ThemeDataModel = getattr(
|
||||
theme_module,
|
||||
f"{theme_name.capitalize()}ThemeOptions", # type: ignore
|
||||
)
|
||||
|
||||
return theme_data_model
|
||||
# Initialize and validate the custom theme data model:
|
||||
theme_data_model = ThemeDataModel(**design)
|
||||
else:
|
||||
# Then it means there is no __init__.py file in the custom theme folder.
|
||||
# Create a dummy data model and use that instead.
|
||||
class ThemeOptionsAreNotProvided(RenderCVBaseModelWithoutExtraKeys):
|
||||
theme: str = theme_name
|
||||
|
||||
theme_data_model = ThemeOptionsAreNotProvided(theme=theme_name)
|
||||
|
||||
return theme_data_model
|
||||
|
||||
|
||||
# ======================================================================================
|
||||
|
||||
@@ -119,8 +119,10 @@ def validate_and_adjust_dates_for_an_entry(
|
||||
end_date_object = computers.get_date_object(end_date)
|
||||
|
||||
if start_date_object > end_date_object:
|
||||
message = '"start_date" can not be after "end_date"!'
|
||||
|
||||
raise ValueError(
|
||||
'"start_date" can not be after "end_date"!',
|
||||
message,
|
||||
"start_date", # This is the location of the error
|
||||
str(start_date), # This is value of the error
|
||||
)
|
||||
@@ -267,8 +269,7 @@ class PublicationEntryBase(RenderCVBaseModelWithExtraKeys):
|
||||
|
||||
if doi_is_provided:
|
||||
return f"https://doi.org/{self.doi}"
|
||||
else:
|
||||
return ""
|
||||
return ""
|
||||
|
||||
@functools.cached_property
|
||||
def clean_url(self) -> str:
|
||||
@@ -279,8 +280,7 @@ class PublicationEntryBase(RenderCVBaseModelWithExtraKeys):
|
||||
|
||||
if url_is_provided:
|
||||
return computers.make_a_url_clean(str(self.url)) # type: ignore
|
||||
else:
|
||||
return ""
|
||||
return ""
|
||||
|
||||
|
||||
# The following class is to ensure PublicationEntryBase keys come first,
|
||||
@@ -292,8 +292,6 @@ class PublicationEntry(EntryWithDate, PublicationEntryBase):
|
||||
the fields in the correct order.
|
||||
"""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class EntryBase(EntryWithDate):
|
||||
"""This class is the parent class of some of the entry types. It is being used
|
||||
@@ -368,7 +366,9 @@ class EntryBase(EntryWithDate):
|
||||
|
||||
Example:
|
||||
```python
|
||||
entry = dm.EntryBase(start_date="2020-10-11", end_date="2021-04-04").date_string_only_years
|
||||
entry = dm.EntryBase(
|
||||
start_date="2020-10-11", end_date="2021-04-04"
|
||||
).date_string_only_years
|
||||
```
|
||||
returns
|
||||
`"2020 to 2021"`
|
||||
@@ -405,8 +405,6 @@ class NormalEntry(EntryBase, NormalEntryBase):
|
||||
correct order.
|
||||
"""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class ExperienceEntryBase(RenderCVBaseModelWithExtraKeys):
|
||||
"""This class is the parent class of the `ExperienceEntry` class."""
|
||||
@@ -427,8 +425,6 @@ class ExperienceEntry(EntryBase, ExperienceEntryBase):
|
||||
fields in the correct order.
|
||||
"""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class EducationEntryBase(RenderCVBaseModelWithExtraKeys):
|
||||
"""This class is the parent class of the `EducationEntry` class."""
|
||||
@@ -455,8 +451,6 @@ class EducationEntry(EntryBase, EducationEntryBase):
|
||||
fields in the correct order.
|
||||
"""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
# ======================================================================================
|
||||
# Create custom types based on the entry models: =======================================
|
||||
|
||||
@@ -65,7 +65,7 @@ class LocaleCatalog(RenderCVBaseModelWithoutExtraKeys):
|
||||
description='Translation of the word "present" in the locale.',
|
||||
)
|
||||
to: Optional[str] = pydantic.Field(
|
||||
default="–", # en dash
|
||||
default="–", # NOQA: RUF001
|
||||
title='Translation of "To"',
|
||||
description=(
|
||||
"The word or character used to indicate a range in the locale (e.g.,"
|
||||
@@ -138,7 +138,7 @@ class LocaleCatalog(RenderCVBaseModelWithoutExtraKeys):
|
||||
|
||||
# The dictionary below will be overwritten by LocaleCatalog class, which will contain
|
||||
# month names, month abbreviations, and other locale-specific strings.
|
||||
LOCALE_CATALOG: dict[str, str | list[str]] = dict()
|
||||
LOCALE_CATALOG: dict[str, str | list[str]] = {}
|
||||
|
||||
# Initialize even if the RenderCVDataModel is not called (to make `format_date` function
|
||||
# work on its own):
|
||||
|
||||
@@ -25,10 +25,8 @@ def read_a_yaml_file(file_path_or_contents: pathlib.Path | str) -> dict:
|
||||
if isinstance(file_path_or_contents, pathlib.Path):
|
||||
# Check if the file exists:
|
||||
if not file_path_or_contents.exists():
|
||||
raise FileNotFoundError(
|
||||
f"The input file [magenta]{file_path_or_contents}[/magenta] doesn't"
|
||||
" exist!"
|
||||
)
|
||||
message = f"The input file {file_path_or_contents} doesn't exist!"
|
||||
raise FileNotFoundError(message)
|
||||
|
||||
# Check the file extension:
|
||||
accepted_extensions = [".yaml", ".yml", ".json", ".json5"]
|
||||
@@ -39,11 +37,12 @@ def read_a_yaml_file(file_path_or_contents: pathlib.Path | str) -> dict:
|
||||
user_friendly_accepted_extensions = ", ".join(
|
||||
user_friendly_accepted_extensions
|
||||
)
|
||||
raise ValueError(
|
||||
message = (
|
||||
"The input file should have one of the following extensions:"
|
||||
f" {user_friendly_accepted_extensions}. The input file is"
|
||||
f" [magenta]{file_path_or_contents}[/magenta]."
|
||||
f" {file_path_or_contents}."
|
||||
)
|
||||
raise ValueError(message)
|
||||
|
||||
file_content = file_path_or_contents.read_text(encoding="utf-8")
|
||||
else:
|
||||
@@ -52,7 +51,8 @@ def read_a_yaml_file(file_path_or_contents: pathlib.Path | str) -> dict:
|
||||
yaml_as_a_dictionary: dict = ruamel.yaml.YAML().load(file_content)
|
||||
|
||||
if yaml_as_a_dictionary is None:
|
||||
raise ValueError("The input file is empty!")
|
||||
message = "The input file is empty!"
|
||||
raise ValueError(message)
|
||||
|
||||
return yaml_as_a_dictionary
|
||||
|
||||
@@ -71,9 +71,7 @@ def validate_input_dictionary_and_return_the_data_model(
|
||||
"""
|
||||
|
||||
# Validate the parsed dictionary by creating an instance of RenderCVDataModel:
|
||||
rendercv_data_model = models.RenderCVDataModel(**input_dictionary)
|
||||
|
||||
return rendercv_data_model
|
||||
return models.RenderCVDataModel(**input_dictionary)
|
||||
|
||||
|
||||
def read_input_file(
|
||||
@@ -91,8 +89,4 @@ def read_input_file(
|
||||
"""
|
||||
input_as_dictionary = read_a_yaml_file(file_path_or_contents)
|
||||
|
||||
rendercv_data_model = validate_input_dictionary_and_return_the_data_model(
|
||||
input_as_dictionary
|
||||
)
|
||||
|
||||
return rendercv_data_model
|
||||
return validate_input_dictionary_and_return_the_data_model(input_as_dictionary)
|
||||
|
||||
@@ -21,9 +21,9 @@ from .renderer import (
|
||||
|
||||
__all__ = [
|
||||
"create_a_latex_file",
|
||||
"create_a_markdown_file",
|
||||
"create_a_latex_file_and_copy_theme_files",
|
||||
"create_a_markdown_file",
|
||||
"render_a_pdf_from_latex",
|
||||
"render_pngs_from_pdf",
|
||||
"render_an_html_from_markdown",
|
||||
"render_pngs_from_pdf",
|
||||
]
|
||||
|
||||
@@ -41,10 +41,11 @@ def copy_theme_files_to_output_directory(
|
||||
theme_directory_path = pathlib.Path.cwd() / theme_name
|
||||
|
||||
if not theme_directory_path.is_dir():
|
||||
raise FileNotFoundError(
|
||||
f"The theme {theme_name} doesn't exist in the current working"
|
||||
" directory!"
|
||||
message = (
|
||||
f"The theme {theme_name} doesn't exist in the available themes and"
|
||||
" the current working directory!"
|
||||
)
|
||||
raise FileNotFoundError(message)
|
||||
|
||||
for theme_file in theme_directory_path.iterdir():
|
||||
dont_copy_files_with_these_extensions = [".j2.tex", ".py"]
|
||||
@@ -161,7 +162,8 @@ def render_a_pdf_from_latex(
|
||||
"""
|
||||
# check if the file exists:
|
||||
if not latex_file_path.is_file():
|
||||
raise FileNotFoundError(f"The file {latex_file_path} doesn't exist!")
|
||||
message = f"The file {latex_file_path} doesn't exist!"
|
||||
raise FileNotFoundError(message)
|
||||
|
||||
if local_latex_command:
|
||||
executable = local_latex_command
|
||||
@@ -172,13 +174,15 @@ def render_a_pdf_from_latex(
|
||||
[executable, "--version"],
|
||||
stdout=subprocess.DEVNULL, # don't capture the output
|
||||
stderr=subprocess.DEVNULL, # don't capture the error
|
||||
check=True,
|
||||
)
|
||||
except FileNotFoundError:
|
||||
raise FileNotFoundError(
|
||||
f"[blue]{executable}[/blue] isn't installed! Please install LaTeX and"
|
||||
" try again (or don't use the"
|
||||
" [bright_black]--use-local-latex-command[/bright_black] option)."
|
||||
except FileNotFoundError as e:
|
||||
message = (
|
||||
f"{executable} isn't installed! Please install LaTeX and try again (or"
|
||||
" don't use the [bright_black]--use-local-latex-command[/bright_black]"
|
||||
" option)."
|
||||
)
|
||||
raise FileNotFoundError(message) from e
|
||||
else:
|
||||
tinytex_binaries_directory = (
|
||||
pathlib.Path(__file__).parent / "tinytex-release" / "TinyTeX" / "bin"
|
||||
@@ -191,17 +195,19 @@ def render_a_pdf_from_latex(
|
||||
}
|
||||
|
||||
if sys.platform not in executables:
|
||||
raise OSError(f"TinyTeX doesn't support the platform {sys.platform}!")
|
||||
message = f"TinyTeX doesn't support the platform {sys.platform}!"
|
||||
raise OSError(message)
|
||||
|
||||
executable = executables[sys.platform]
|
||||
|
||||
# check if the executable exists:
|
||||
if not executable.is_file():
|
||||
raise FileNotFoundError(
|
||||
message = (
|
||||
f"The TinyTeX executable ({executable}) doesn't exist! If you are"
|
||||
" cloning the repository, make sure to clone it recursively to get the"
|
||||
" TinyTeX binaries. See the developer guide for more information."
|
||||
)
|
||||
raise FileNotFoundError(message)
|
||||
|
||||
# Before running LaTeX, make sure the PDF file is not open in another program,
|
||||
# that wouldn't allow LaTeX to write to it. Remove the PDF file if it exists,
|
||||
@@ -210,11 +216,12 @@ def render_a_pdf_from_latex(
|
||||
if pdf_file_path.is_file():
|
||||
try:
|
||||
pdf_file_path.unlink()
|
||||
except PermissionError:
|
||||
raise RuntimeError(
|
||||
except PermissionError as e:
|
||||
message = (
|
||||
f"The PDF file {pdf_file_path} is open in another program and doesn't"
|
||||
" allow RenderCV to rewrite it. Please close the PDF file."
|
||||
)
|
||||
raise RuntimeError(message) from e
|
||||
|
||||
# Run LaTeX to render the PDF:
|
||||
command = [
|
||||
@@ -230,51 +237,60 @@ def render_a_pdf_from_latex(
|
||||
) as latex_process:
|
||||
output = latex_process.communicate() # wait for the process to finish
|
||||
if latex_process.returncode != 0:
|
||||
latex_file_path_log = latex_file_path.with_suffix(".log").read_text()
|
||||
|
||||
if local_latex_command:
|
||||
raise RuntimeError(
|
||||
message = (
|
||||
f"The local LaTeX command {local_latex_command} couldn't render"
|
||||
" this LaTeX file into a PDF. Check out the log file"
|
||||
f" {latex_file_path.with_suffix('.log')} in the output directory"
|
||||
" for more information."
|
||||
" for more information. It is printed below:\n\n"
|
||||
)
|
||||
else:
|
||||
raise RuntimeError(
|
||||
"RenderCV's built-in TinyTeX binaries couldn't render this LaTeX"
|
||||
" file into a PDF. This could be caused by one of two"
|
||||
" reasons:\n\n1- The theme templates might have been updated in a"
|
||||
" way RenderCV's TinyTeX cannot render. RenderCV's TinyTeX is"
|
||||
" minified to keep the package size small. As a result, it doesn't"
|
||||
" function like a general-purpose LaTeX distribution.\n2- Special"
|
||||
" characters, like Greek or Chinese letters, that are not"
|
||||
" compatible with the fonts used or RenderCV's TinyTeX might have"
|
||||
" been used.\n\nHowever, this issue can be resolved by using your"
|
||||
" own LaTeX distribution instead of the built-in TinyTeX. This can"
|
||||
" be done with the '--use-local-latex-command' option, as shown"
|
||||
" below:\n\nrendercv render --use-local-latex-command lualatex"
|
||||
" John_Doe_CV.yaml\n\nIf you ensure that the generated LaTeX file"
|
||||
" can be compiled by your local LaTeX distribution, RenderCV will"
|
||||
" work successfully. You can debug the generated LaTeX file in"
|
||||
" your LaTeX editor to resolve any bugs. Then, you can start using"
|
||||
" RenderCV with your local LaTeX distribution.\n\nIf you can't"
|
||||
" solve the problem, please open an issue on GitHub. Also, to see"
|
||||
" the error, check out the log file"
|
||||
f" {latex_file_path.with_suffix('.log')} in the output directory."
|
||||
)
|
||||
else:
|
||||
try:
|
||||
output = output[0].decode("utf-8")
|
||||
except UnicodeDecodeError:
|
||||
output = output[0].decode("latin-1")
|
||||
|
||||
if "Rerun to get" in output:
|
||||
# Run TinyTeX again to get the references right:
|
||||
subprocess.run(
|
||||
command,
|
||||
cwd=latex_file_path.parent,
|
||||
stdout=subprocess.DEVNULL, # don't capture the output
|
||||
stderr=subprocess.DEVNULL, # don't capture the error
|
||||
stdin=subprocess.DEVNULL, # don't allow TinyTeX to ask for user input
|
||||
)
|
||||
message = message + latex_file_path_log
|
||||
raise RuntimeError(message)
|
||||
|
||||
message = (
|
||||
"RenderCV's built-in TinyTeX binaries couldn't render this LaTeX"
|
||||
" file into a PDF. This could be caused by one of two"
|
||||
" reasons:\n\n1- The theme templates might have been updated in a"
|
||||
" way RenderCV's TinyTeX cannot render. RenderCV's TinyTeX is"
|
||||
" minified to keep the package size small. As a result, it doesn't"
|
||||
" function like a general-purpose LaTeX distribution.\n2- Special"
|
||||
" characters, like Greek or Chinese letters, that are not"
|
||||
" compatible with the fonts used or RenderCV's TinyTeX might have"
|
||||
" been used.\n\nHowever, this issue can be resolved by using your"
|
||||
" own LaTeX distribution instead of the built-in TinyTeX. This can"
|
||||
" be done with the '--use-local-latex-command' option, as shown"
|
||||
" below:\n\nrendercv render --use-local-latex-command lualatex"
|
||||
" John_Doe_CV.yaml\n\nIf you ensure that the generated LaTeX file"
|
||||
" can be compiled by your local LaTeX distribution, RenderCV will"
|
||||
" work successfully. You can debug the generated LaTeX file in"
|
||||
" your LaTeX editor to resolve any bugs. Then, you can start using"
|
||||
" RenderCV with your local LaTeX distribution.\n\nIf you can't"
|
||||
" solve the problem, please open an issue on GitHub. Also, to see"
|
||||
" the error, check out the log file"
|
||||
f" {latex_file_path.with_suffix('.log')} in the output directory."
|
||||
" It is printed below:\n\n"
|
||||
)
|
||||
message = message + latex_file_path_log
|
||||
raise RuntimeError(message)
|
||||
|
||||
try:
|
||||
output = output[0].decode("utf-8")
|
||||
except UnicodeDecodeError:
|
||||
output = output[0].decode("latin-1")
|
||||
|
||||
if "Rerun to get" in output:
|
||||
# Run TinyTeX again to get the references right:
|
||||
subprocess.run(
|
||||
command,
|
||||
cwd=latex_file_path.parent,
|
||||
stdout=subprocess.DEVNULL, # don't capture the output
|
||||
stderr=subprocess.DEVNULL, # don't capture the error
|
||||
stdin=subprocess.DEVNULL, # don't allow TinyTeX to ask for user input
|
||||
check=True,
|
||||
)
|
||||
|
||||
return pdf_file_path
|
||||
|
||||
@@ -290,7 +306,8 @@ def render_pngs_from_pdf(pdf_file_path: pathlib.Path) -> list[pathlib.Path]:
|
||||
"""
|
||||
# check if the file exists:
|
||||
if not pdf_file_path.is_file():
|
||||
raise FileNotFoundError(f"The file {pdf_file_path} doesn't exist!")
|
||||
message = f"The file {pdf_file_path} doesn't exist!"
|
||||
raise FileNotFoundError(message)
|
||||
|
||||
# convert the PDF to PNG:
|
||||
png_directory = pdf_file_path.parent
|
||||
@@ -299,7 +316,7 @@ def render_pngs_from_pdf(pdf_file_path: pathlib.Path) -> list[pathlib.Path]:
|
||||
pdf = fitz.open(pdf_file_path) # open the PDF file
|
||||
for page in pdf: # iterate the pages
|
||||
image = page.get_pixmap(dpi=300) # type: ignore
|
||||
png_file_path = png_directory / f"{png_file_name}_{page.number+1}.png" # type: ignore
|
||||
png_file_path = png_directory / f"{png_file_name}_{page.number + 1}.png" # type: ignore
|
||||
image.save(png_file_path)
|
||||
png_files.append(png_file_path)
|
||||
|
||||
@@ -318,7 +335,8 @@ def render_an_html_from_markdown(markdown_file_path: pathlib.Path) -> pathlib.Pa
|
||||
"""
|
||||
# check if the file exists:
|
||||
if not markdown_file_path.is_file():
|
||||
raise FileNotFoundError(f"The file {markdown_file_path} doesn't exist!")
|
||||
message = f"The file {markdown_file_path} doesn't exist!"
|
||||
raise FileNotFoundError(message)
|
||||
|
||||
# Convert the markdown file to HTML:
|
||||
markdown_text = markdown_file_path.read_text(encoding="utf-8")
|
||||
@@ -326,10 +344,7 @@ def render_an_html_from_markdown(markdown_file_path: pathlib.Path) -> pathlib.Pa
|
||||
|
||||
# Get the title of the markdown content:
|
||||
title = re.search(r"# (.*)\n", markdown_text)
|
||||
if title is None:
|
||||
title = ""
|
||||
else:
|
||||
title = title.group(1)
|
||||
title = title.group(1) if title else None
|
||||
|
||||
jinja2_environment = templater.setup_jinja2_environment()
|
||||
html_template = jinja2_environment.get_template("main.j2.html")
|
||||
|
||||
@@ -71,7 +71,7 @@ class TemplatedFile:
|
||||
entry.__setattr__(key, "")
|
||||
|
||||
# The arguments of the template can be used in the template file:
|
||||
result = template.render(
|
||||
return template.render(
|
||||
cv=self.cv,
|
||||
design=self.design,
|
||||
entry=entry,
|
||||
@@ -79,15 +79,12 @@ class TemplatedFile:
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
return result
|
||||
|
||||
def get_full_code(self, main_template_name: str, **kwargs) -> str:
|
||||
"""Combine all the templates to get the full code of the file."""
|
||||
main_template = self.environment.get_template(main_template_name)
|
||||
latex_code = main_template.render(
|
||||
return main_template.render(
|
||||
**kwargs,
|
||||
)
|
||||
return latex_code
|
||||
|
||||
|
||||
class LaTeXFile(TemplatedFile):
|
||||
@@ -171,9 +168,7 @@ class LaTeXFile(TemplatedFile):
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
result = revert_nested_latex_style_commands(result)
|
||||
|
||||
return result
|
||||
return revert_nested_latex_style_commands(result)
|
||||
|
||||
def get_full_code(self) -> str:
|
||||
"""Get the $\\LaTeX$ code of the file.
|
||||
@@ -219,10 +214,7 @@ class MarkdownFile(TemplatedFile):
|
||||
)
|
||||
entries: list[str] = []
|
||||
for i, entry in enumerate(section.entries):
|
||||
if i == 0:
|
||||
is_first_entry = True
|
||||
else:
|
||||
is_first_entry = False
|
||||
is_first_entry = bool(i == 0)
|
||||
entries.append(
|
||||
self.template(
|
||||
section.entry_type,
|
||||
@@ -252,14 +244,13 @@ class MarkdownFile(TemplatedFile):
|
||||
Returns:
|
||||
The templated file.
|
||||
"""
|
||||
result = super().template(
|
||||
return super().template(
|
||||
"markdown",
|
||||
template_name,
|
||||
"md",
|
||||
entry,
|
||||
**kwargs,
|
||||
)
|
||||
return result
|
||||
|
||||
def get_full_code(self) -> str:
|
||||
"""Get the Markdown code of the file.
|
||||
@@ -410,7 +401,9 @@ def markdown_to_latex(markdown_string: str) -> str:
|
||||
|
||||
Example:
|
||||
```python
|
||||
markdown_to_latex("This is a **bold** text with an [*italic link*](https://google.com).")
|
||||
markdown_to_latex(
|
||||
"This is a **bold** text with an [*italic link*](https://google.com)."
|
||||
)
|
||||
```
|
||||
|
||||
returns
|
||||
@@ -463,9 +456,7 @@ def markdown_to_latex(markdown_string: str) -> str:
|
||||
|
||||
# markdown_string = markdown_string.replace(old_code_text, new_code_text)
|
||||
|
||||
latex_string = markdown_string
|
||||
|
||||
return latex_string
|
||||
return markdown_string
|
||||
|
||||
|
||||
def transform_markdown_sections_to_latex_sections(
|
||||
@@ -495,19 +486,19 @@ def transform_markdown_sections_to_latex_sections(
|
||||
# Then it means it's one of the other entries.
|
||||
fields_to_skip = ["doi"]
|
||||
entry_as_dict = entry.model_dump()
|
||||
for entry_key, value in entry_as_dict.items():
|
||||
for entry_key, inner_value in entry_as_dict.items():
|
||||
if entry_key in fields_to_skip:
|
||||
continue
|
||||
if isinstance(value, str):
|
||||
result = markdown_to_latex(escape_latex_characters(value))
|
||||
if isinstance(inner_value, str):
|
||||
result = markdown_to_latex(escape_latex_characters(inner_value))
|
||||
setattr(entry, entry_key, result)
|
||||
elif isinstance(value, list):
|
||||
for j, item in enumerate(value):
|
||||
elif isinstance(inner_value, list):
|
||||
for j, item in enumerate(inner_value):
|
||||
if isinstance(item, str):
|
||||
value[j] = markdown_to_latex(
|
||||
inner_value[j] = markdown_to_latex(
|
||||
escape_latex_characters(item)
|
||||
)
|
||||
setattr(entry, entry_key, value)
|
||||
setattr(entry, entry_key, inner_value)
|
||||
transformed_list.append(entry)
|
||||
|
||||
sections[key] = transformed_list
|
||||
@@ -698,9 +689,8 @@ def abbreviate_name(name: Optional[str]) -> str:
|
||||
first_names = name.split(" ")[:-1]
|
||||
first_names_initials = [first_name[0] + "." for first_name in first_names]
|
||||
last_name = name.split(" ")[-1]
|
||||
abbreviated_name = " ".join(first_names_initials) + " " + last_name
|
||||
|
||||
return abbreviated_name
|
||||
return " ".join(first_names_initials) + " " + last_name
|
||||
|
||||
|
||||
def divide_length_by(length: str, divider: float) -> str:
|
||||
@@ -729,12 +719,14 @@ def divide_length_by(length: str, divider: float) -> str:
|
||||
value = re.search(r"\d+\.?\d*", length)
|
||||
|
||||
if value is None:
|
||||
raise ValueError(f"Invalid length {length}!")
|
||||
else:
|
||||
value = value.group()
|
||||
message = f"Invalid length {length}!"
|
||||
raise ValueError(message)
|
||||
|
||||
value = value.group()
|
||||
|
||||
if divider <= 0:
|
||||
raise ValueError(f"The divider must be greater than 0, but got {divider}!")
|
||||
message = f"The divider must be greater than 0, but got {divider}!"
|
||||
raise ValueError(message)
|
||||
|
||||
unit = re.findall(r"[^\d\.\s]+", length)[0]
|
||||
|
||||
@@ -749,9 +741,9 @@ def get_an_item_with_a_specific_attribute_value(
|
||||
Example:
|
||||
```python
|
||||
get_an_item_with_a_specific_attribute_value(
|
||||
[item1, item2], # where item1.name = "John" and item2.name = "Jane"
|
||||
[item1, item2], # where item1.name = "John" and item2.name = "Jane"
|
||||
"name",
|
||||
"Jane"
|
||||
"Jane",
|
||||
)
|
||||
```
|
||||
returns
|
||||
@@ -770,14 +762,13 @@ def get_an_item_with_a_specific_attribute_value(
|
||||
if items is not None:
|
||||
for item in items:
|
||||
if not hasattr(item, attribute):
|
||||
raise AttributeError(
|
||||
f"The attribute {attribute} doesn't exist in the item {item}!"
|
||||
)
|
||||
else:
|
||||
if getattr(item, attribute) == value:
|
||||
return item
|
||||
else:
|
||||
return None
|
||||
message = f"The attribute {attribute} doesn't exist in the item {item}!"
|
||||
raise AttributeError(message)
|
||||
|
||||
if getattr(item, attribute) == value:
|
||||
return item
|
||||
|
||||
return None
|
||||
|
||||
|
||||
# Only one Jinja2 environment is needed for all the templates:
|
||||
@@ -790,7 +781,7 @@ def setup_jinja2_environment() -> jinja2.Environment:
|
||||
Returns:
|
||||
The theme environment.
|
||||
"""
|
||||
global jinja2_environment
|
||||
global jinja2_environment # noqa: PLW0603
|
||||
themes_directory = pathlib.Path(__file__).parent.parent / "themes"
|
||||
|
||||
if jinja2_environment is None:
|
||||
@@ -833,7 +824,10 @@ def setup_jinja2_environment() -> jinja2.Environment:
|
||||
else:
|
||||
# update the loader in case the current working directory has changed:
|
||||
jinja2_environment.loader = jinja2.FileSystemLoader(
|
||||
[pathlib.Path.cwd(), themes_directory]
|
||||
[
|
||||
pathlib.Path.cwd(),
|
||||
themes_directory,
|
||||
]
|
||||
)
|
||||
|
||||
return jinja2_environment
|
||||
|
||||
@@ -3,11 +3,10 @@
|
||||
import copy
|
||||
import filecmp
|
||||
import itertools
|
||||
import os
|
||||
import pathlib
|
||||
import shutil
|
||||
import typing
|
||||
from typing import Optional, Type
|
||||
from typing import Optional
|
||||
|
||||
import jinja2
|
||||
import pydantic
|
||||
@@ -226,20 +225,20 @@ def return_a_value_for_a_field_type(
|
||||
|
||||
if field in field_dictionary:
|
||||
return field_dictionary[field]
|
||||
elif type(None) in typing.get_args(field_type):
|
||||
if type(None) in typing.get_args(field_type):
|
||||
return return_a_value_for_a_field_type(field, field_type.__args__[0])
|
||||
elif typing.get_origin(field_type) == typing.Literal:
|
||||
if typing.get_origin(field_type) == typing.Literal:
|
||||
return field_type.__args__[0]
|
||||
elif typing.get_origin(field_type) == typing.Union:
|
||||
if typing.get_origin(field_type) == typing.Union:
|
||||
return return_a_value_for_a_field_type(field, field_type.__args__[0])
|
||||
elif field_type in field_type_dictionary:
|
||||
if field_type in field_type_dictionary:
|
||||
return field_type_dictionary[field_type]
|
||||
|
||||
return "A string"
|
||||
|
||||
|
||||
def create_combinations_of_a_model(
|
||||
model: Type[data.Entry],
|
||||
model: type[data.Entry],
|
||||
) -> list[data.Entry]:
|
||||
"""Look at the required fields and optional fields of a model and create all
|
||||
possible combinations of them.
|
||||
@@ -252,8 +251,8 @@ def create_combinations_of_a_model(
|
||||
"""
|
||||
fields = typing.get_type_hints(model)
|
||||
|
||||
required_fields = dict()
|
||||
optional_fields = dict()
|
||||
required_fields = {}
|
||||
optional_fields = {}
|
||||
|
||||
for field, field_type in fields.items():
|
||||
value = return_a_value_for_a_field_type(field, field_type)
|
||||
@@ -408,8 +407,7 @@ def are_these_two_files_the_same(file1: pathlib.Path, file2: pathlib.Path) -> bo
|
||||
return False
|
||||
|
||||
return True
|
||||
else:
|
||||
return filecmp.cmp(file1, file2)
|
||||
return filecmp.cmp(file1, file2)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
@@ -457,18 +455,18 @@ def run_a_function_and_check_if_output_is_the_same_as_reference(
|
||||
shutil.move(output_file_path, reference_file_or_directory_path) # type: ignore
|
||||
else:
|
||||
shutil.move(tmp_path, reference_file_or_directory_path)
|
||||
os.mkdir(tmp_path)
|
||||
pathlib.Path.mkdir(tmp_path)
|
||||
|
||||
function(tmp_path, reference_file_or_directory_path, **kwargs)
|
||||
|
||||
if output_is_a_single_file:
|
||||
return are_these_two_files_the_same(
|
||||
output_file_path, reference_file_or_directory_path # type: ignore
|
||||
)
|
||||
else:
|
||||
return are_these_two_directories_the_same(
|
||||
tmp_path, reference_file_or_directory_path
|
||||
output_file_path, # type: ignore
|
||||
reference_file_or_directory_path, # type: ignore
|
||||
)
|
||||
return are_these_two_directories_the_same(
|
||||
tmp_path, reference_file_or_directory_path
|
||||
)
|
||||
|
||||
return function
|
||||
|
||||
|
||||
@@ -19,17 +19,17 @@ import rendercv.data.reader as reader
|
||||
from rendercv import __version__
|
||||
|
||||
|
||||
def run_render_command(input_file_path, working_path, extra_arguments=[]):
|
||||
def run_render_command(input_file_path, working_path, extra_arguments=None):
|
||||
if extra_arguments is None:
|
||||
extra_arguments = []
|
||||
# copy input file to the temporary directory to create the output directory there:
|
||||
if not input_file_path == working_path / input_file_path.name:
|
||||
if input_file_path != working_path / input_file_path.name:
|
||||
shutil.copy(input_file_path, working_path)
|
||||
|
||||
# change the current working directory to the temporary directory:
|
||||
os.chdir(working_path)
|
||||
|
||||
result = runner.invoke(cli.app, ["render", "John_Doe_CV.yaml"] + extra_arguments)
|
||||
|
||||
return result
|
||||
return runner.invoke(cli.app, ["render", "John_Doe_CV.yaml", *extra_arguments])
|
||||
|
||||
|
||||
def test_welcome():
|
||||
@@ -77,7 +77,7 @@ def test_get_error_message_and_location_and_value_from_a_custom_error():
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"data_model_class, invalid_model",
|
||||
("data_model_class", "invalid_model"),
|
||||
[
|
||||
(
|
||||
data.EducationEntry,
|
||||
@@ -209,7 +209,7 @@ def test_live_progress_reporter_class():
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"folder_name",
|
||||
["markdown"] + data.available_themes,
|
||||
["markdown", *data.available_themes],
|
||||
)
|
||||
def test_copy_templates(tmp_path, folder_name):
|
||||
copied_path = utilities.copy_templates(
|
||||
@@ -236,7 +236,7 @@ def test_copy_templates_with_new_folder_name(tmp_path):
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"folder_name",
|
||||
["markdown"] + data.available_themes,
|
||||
["markdown", *data.available_themes],
|
||||
)
|
||||
def test_copy_templates_destinations_exist(tmp_path, folder_name):
|
||||
(tmp_path / folder_name).mkdir()
|
||||
@@ -423,7 +423,6 @@ def test_render_command_with_invalid_arguments(
|
||||
def test_render_command_with_overriding_values(
|
||||
tmp_path, input_file_path, yaml_location, new_value
|
||||
):
|
||||
|
||||
result = run_render_command(
|
||||
input_file_path,
|
||||
tmp_path,
|
||||
@@ -618,7 +617,7 @@ def test_create_theme_command(tmp_path, input_file_path, based_on):
|
||||
assert "Your CV is rendered!" in result.stdout
|
||||
|
||||
|
||||
def test_create_theme_command_invalid_based_on_theme(tmp_path):
|
||||
def test_create_theme_command_invalid_based_on_theme():
|
||||
result = runner.invoke(
|
||||
cli.app, ["create-theme", "newtheme", "--based-on", "invalid_theme"]
|
||||
)
|
||||
@@ -693,7 +692,7 @@ def test_warn_if_new_version_is_available(monkeypatch):
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"key, value",
|
||||
("key", "value"),
|
||||
[
|
||||
("cv.email", "test@example.com"),
|
||||
("cv.sections.education.0.degree", "PhD"),
|
||||
@@ -716,9 +715,9 @@ def test_set_or_update_a_value(rendercv_data_model, key, value):
|
||||
key = re.sub(r"sections\.([^\.]*)", 'sections_input["\\1"]', key)
|
||||
key = re.sub(r"\.(\d+)", "[\\1]", key)
|
||||
|
||||
if value.startswith("{") and value.endswith("}"):
|
||||
value = eval(value)
|
||||
elif value.startswith("[") and value.endswith("]"):
|
||||
if (value.startswith("{") and value.endswith("}")) or (
|
||||
value.startswith("[") and value.endswith("]")
|
||||
):
|
||||
value = eval(value)
|
||||
|
||||
if key == "cv.sections":
|
||||
@@ -728,7 +727,7 @@ def test_set_or_update_a_value(rendercv_data_model, key, value):
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"key, value",
|
||||
("key", "value"),
|
||||
[
|
||||
("cv.phone", "+9999995555555555"),
|
||||
("cv.email", "notanemail***"),
|
||||
@@ -737,10 +736,10 @@ def test_set_or_update_a_value(rendercv_data_model, key, value):
|
||||
],
|
||||
)
|
||||
def test_set_or_update_a_value_invalid_values(rendercv_data_model, key, value):
|
||||
new_dict = utilities.set_or_update_a_value(
|
||||
rendercv_data_model.model_dump(by_alias=True), key, value
|
||||
)
|
||||
with pytest.raises(pydantic.ValidationError):
|
||||
new_dict = utilities.set_or_update_a_value(
|
||||
rendercv_data_model.model_dump(by_alias=True), key, value
|
||||
)
|
||||
data.validate_input_dictionary_and_return_the_data_model(new_dict)
|
||||
|
||||
|
||||
@@ -796,8 +795,6 @@ def test_render_command_with_input_file_settings(tmp_path, input_file_path):
|
||||
],
|
||||
)
|
||||
|
||||
print(result.stdout)
|
||||
|
||||
assert (tmp_path / "test.pdf").exists()
|
||||
assert (tmp_path / "test.tex").exists()
|
||||
assert (tmp_path / "test.md").exists()
|
||||
@@ -827,7 +824,7 @@ def test_render_command_with_input_file_settings_2(tmp_path, input_file_path):
|
||||
yaml_content = generator.dictionary_to_yaml(input_dictionary)
|
||||
new_input_file_path.write_text(yaml_content, encoding="utf-8")
|
||||
|
||||
result = runner.invoke(
|
||||
runner.invoke(
|
||||
cli.app,
|
||||
[
|
||||
"render",
|
||||
@@ -835,8 +832,6 @@ def test_render_command_with_input_file_settings_2(tmp_path, input_file_path):
|
||||
],
|
||||
)
|
||||
|
||||
print(result.stdout)
|
||||
|
||||
assert (tmp_path / "rendercv_output" / "John_Doe_CV.pdf").exists()
|
||||
assert not (tmp_path / "rendercv_output" / "John_Doe_CV.md").exists()
|
||||
assert not (tmp_path / "rendercv_output" / "John_Doe_CV.html").exists()
|
||||
@@ -888,8 +883,6 @@ def test_render_command_overriding_input_file_settings(
|
||||
],
|
||||
)
|
||||
|
||||
print(result.stdout)
|
||||
|
||||
assert (tmp_path / new_value).exists()
|
||||
assert "Your CV is rendered!" in result.stdout
|
||||
|
||||
|
||||
@@ -9,7 +9,7 @@ import pytest
|
||||
import ruamel.yaml
|
||||
import time_machine
|
||||
|
||||
from rendercv import data as data
|
||||
from rendercv import data
|
||||
from rendercv.data import generator
|
||||
from rendercv.data.models import (
|
||||
computers,
|
||||
@@ -22,7 +22,7 @@ from .conftest import update_testdata
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"date, expected_date_object, expected_error",
|
||||
("date", "expected_date_object", "expected_error"),
|
||||
[
|
||||
("2020-01-01", Date(2020, 1, 1), None),
|
||||
("2020-01", Date(2020, 1, 1), None),
|
||||
@@ -46,7 +46,7 @@ def test_get_date_object(date, expected_date_object, expected_error):
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"date, expected_date_string",
|
||||
("date", "expected_date_string"),
|
||||
[
|
||||
(Date(2020, 1, 1), "Jan 2020"),
|
||||
(Date(2020, 2, 1), "Feb 2020"),
|
||||
@@ -92,7 +92,7 @@ def test_read_input_file(input_file_path):
|
||||
assert isinstance(data_model, data.RenderCVDataModel)
|
||||
|
||||
|
||||
def test_read_input_file_directly_with_contents(input_file_path):
|
||||
def test_read_input_file_directly_with_contents():
|
||||
input_dictionary = {
|
||||
"cv": {
|
||||
"name": "John Doe",
|
||||
@@ -118,7 +118,7 @@ def test_read_input_file_directly_with_contents(input_file_path):
|
||||
def test_read_input_file_invalid_file(tmp_path):
|
||||
invalid_file_path = tmp_path / "invalid.extension"
|
||||
invalid_file_path.write_text("dummy content", encoding="utf-8")
|
||||
with pytest.raises(ValueError):
|
||||
with pytest.raises(ValueError): # NOQA: PT011
|
||||
data.read_input_file(invalid_file_path)
|
||||
|
||||
|
||||
@@ -138,7 +138,7 @@ def test_create_a_sample_data_model(theme):
|
||||
|
||||
|
||||
def test_create_a_sample_data_model_invalid_theme():
|
||||
with pytest.raises(ValueError):
|
||||
with pytest.raises(ValueError): # NOQA: PT011
|
||||
data.create_a_sample_data_model("John Doe", "invalid")
|
||||
|
||||
|
||||
@@ -173,94 +173,121 @@ def test_if_the_schema_is_the_latest(root_directory_path):
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"start_date, end_date, date, expected_date_string, expected_date_string_only_years,"
|
||||
" expected_time_span",
|
||||
(
|
||||
"start_date",
|
||||
"end_date",
|
||||
"date",
|
||||
"expected_date_string",
|
||||
"expected_date_string_only_years",
|
||||
"expected_time_span",
|
||||
),
|
||||
[
|
||||
(
|
||||
"2020-01-01",
|
||||
"2021-01-01",
|
||||
None,
|
||||
"Jan 2020 – Jan 2021",
|
||||
"2020 – 2021",
|
||||
"Jan 2020 – Jan 2021", # NOQA: RUF001
|
||||
"2020 – 2021", # NOQA: RUF001
|
||||
"1 year 1 month",
|
||||
),
|
||||
(
|
||||
"2020-01-01",
|
||||
"2022-01-01",
|
||||
None,
|
||||
"Jan 2020 – Jan 2022",
|
||||
"2020 – 2022",
|
||||
"Jan 2020 – Jan 2022", # NOQA: RUF001
|
||||
"2020 – 2022", # NOQA: RUF001
|
||||
"2 years 1 month",
|
||||
),
|
||||
(
|
||||
"2020-01-01",
|
||||
"2021-12-10",
|
||||
None,
|
||||
"Jan 2020 – Dec 2021",
|
||||
"2020 – 2021",
|
||||
"Jan 2020 – Dec 2021", # NOQA: RUF001
|
||||
"2020 – 2021", # NOQA: RUF001
|
||||
"2 years",
|
||||
),
|
||||
(
|
||||
Date(2020, 1, 1),
|
||||
Date(2021, 1, 1),
|
||||
None,
|
||||
"Jan 2020 – Jan 2021",
|
||||
"2020 – 2021",
|
||||
"Jan 2020 – Jan 2021", # NOQA: RUF001
|
||||
"2020 – 2021", # NOQA: RUF001
|
||||
"1 year 1 month",
|
||||
),
|
||||
(
|
||||
"2020-01",
|
||||
"2021-01",
|
||||
None,
|
||||
"Jan 2020 – Jan 2021",
|
||||
"2020 – 2021",
|
||||
"Jan 2020 – Jan 2021", # NOQA: RUF001
|
||||
"2020 – 2021", # NOQA: RUF001
|
||||
"1 year 1 month",
|
||||
),
|
||||
(
|
||||
"2020-01",
|
||||
"2021-01-01",
|
||||
None,
|
||||
"Jan 2020 – Jan 2021",
|
||||
"2020 – 2021",
|
||||
"Jan 2020 – Jan 2021", # NOQA: RUF001
|
||||
"2020 – 2021", # NOQA: RUF001
|
||||
"1 year 1 month",
|
||||
),
|
||||
(
|
||||
"2020-01-01",
|
||||
"2021-01",
|
||||
None,
|
||||
"Jan 2020 – Jan 2021",
|
||||
"2020 – 2021",
|
||||
"Jan 2020 – Jan 2021", # NOQA: RUF001
|
||||
"2020 – 2021", # NOQA: RUF001
|
||||
"1 year 1 month",
|
||||
),
|
||||
(
|
||||
"2020-01-01",
|
||||
None,
|
||||
None,
|
||||
"Jan 2020 – present",
|
||||
"2020 – present",
|
||||
"Jan 2020 – present", # NOQA: RUF001
|
||||
"2020 – present", # NOQA: RUF001
|
||||
"4 years 1 month",
|
||||
),
|
||||
(
|
||||
"2020-02-01",
|
||||
"present",
|
||||
None,
|
||||
"Feb 2020 – present",
|
||||
"2020 – present",
|
||||
"Feb 2020 – present", # NOQA: RUF001
|
||||
"2020 – present", # NOQA: RUF001
|
||||
"4 years",
|
||||
),
|
||||
("2020-01-01", "2021-01-01", "2023-02-01", "Feb 2023", "2023", ""),
|
||||
("2020", "2021", None, "2020 – 2021", "2020 – 2021", "1 year"),
|
||||
("2020", None, None, "2020 – present", "2020 – present", "4 years"),
|
||||
("2020-10-10", "2022", None, "Oct 2020 – 2022", "2020 – 2022", "2 years"),
|
||||
("2020", "2021", None, "2020 – 2021", "2020 – 2021", "1 year"), # NOQA: RUF001
|
||||
(
|
||||
"2020",
|
||||
None,
|
||||
None,
|
||||
"2020 – present", # NOQA: RUF001
|
||||
"2020 – present", # NOQA: RUF001
|
||||
"4 years",
|
||||
),
|
||||
(
|
||||
"2020-10-10",
|
||||
"2022",
|
||||
None,
|
||||
"Oct 2020 – 2022", # NOQA: RUF001
|
||||
"2020 – 2022", # NOQA: RUF001
|
||||
"2 years",
|
||||
),
|
||||
(
|
||||
"2020-10-10",
|
||||
"2020-11-05",
|
||||
None,
|
||||
"Oct 2020 – Nov 2020",
|
||||
"2020 – 2020",
|
||||
"Oct 2020 – Nov 2020", # NOQA: RUF001
|
||||
"2020 – 2020", # NOQA: RUF001
|
||||
"1 month",
|
||||
),
|
||||
("2022", "2023-10-10", None, "2022 – Oct 2023", "2022 – 2023", "1 year"),
|
||||
(
|
||||
"2022",
|
||||
"2023-10-10",
|
||||
None,
|
||||
"2022 – Oct 2023", # NOQA: RUF001
|
||||
"2022 – 2023", # NOQA: RUF001
|
||||
"1 year",
|
||||
),
|
||||
(
|
||||
"2020-01-01",
|
||||
"present",
|
||||
@@ -321,11 +348,11 @@ def test_dates(
|
||||
|
||||
|
||||
def test_dates_style():
|
||||
assert "TEST" == data.format_date(Date(2020, 1, 1), "TEST")
|
||||
assert data.format_date(Date(2020, 1, 1), "TEST") == "TEST"
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"date, expected_date_string",
|
||||
("date", "expected_date_string"),
|
||||
[
|
||||
("2020-01-01", "Jan 2020"),
|
||||
("2020-01", "Jan 2020"),
|
||||
@@ -340,13 +367,13 @@ def test_publication_dates(publication_entry, date, expected_date_string):
|
||||
|
||||
@pytest.mark.parametrize("date", ["2025-23-23"])
|
||||
def test_invalid_publication_dates(publication_entry, date):
|
||||
publication_entry["date"] = date
|
||||
with pytest.raises(pydantic.ValidationError):
|
||||
publication_entry["date"] = date
|
||||
data.PublicationEntry(**publication_entry)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"start_date, end_date, date",
|
||||
("start_date", "end_date", "date"),
|
||||
[
|
||||
("aaa", "2021-01-01", None),
|
||||
("2020-01-01", "aaa", None),
|
||||
@@ -366,7 +393,7 @@ def test_invalid_dates(start_date, end_date, date):
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"doi, expected_doi_url",
|
||||
("doi", "expected_doi_url"),
|
||||
[
|
||||
("10.1109/TASC.2023.3340648", "https://doi.org/10.1109/TASC.2023.3340648"),
|
||||
],
|
||||
@@ -378,7 +405,7 @@ def test_doi_url(publication_entry, doi, expected_doi_url):
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"network, username",
|
||||
("network", "username"),
|
||||
[
|
||||
("Mastodon", "invalidmastodon"),
|
||||
("Mastodon", "@inva@l@id"),
|
||||
@@ -395,7 +422,7 @@ def test_invalid_social_networks(network, username):
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"network, username, expected_url",
|
||||
("network", "username", "expected_url"),
|
||||
[
|
||||
("LinkedIn", "myusername", "https://linkedin.com/in/myusername"),
|
||||
("GitHub", "myusername", "https://github.com/myusername"),
|
||||
@@ -440,7 +467,7 @@ def test_social_network_url(network, username, expected_url):
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"entry, expected_entry_type, expected_section_type",
|
||||
("entry", "expected_entry_type", "expected_section_type"),
|
||||
[
|
||||
(
|
||||
"publication_entry",
|
||||
@@ -559,7 +586,7 @@ def test_sections(
|
||||
|
||||
|
||||
def test_sections_with_invalid_entries():
|
||||
input = {"name": "John Doe", "sections": dict()}
|
||||
input = {"name": "John Doe", "sections": {}}
|
||||
input["sections"]["section_title"] = [
|
||||
{
|
||||
"this": "is",
|
||||
@@ -572,7 +599,7 @@ def test_sections_with_invalid_entries():
|
||||
|
||||
|
||||
def test_sections_without_list():
|
||||
input = {"name": "John Doe", "sections": dict()}
|
||||
input = {"name": "John Doe", "sections": {}}
|
||||
input["sections"]["section_title"] = {
|
||||
"this section": "does not have a list of entries but a single entry."
|
||||
}
|
||||
@@ -590,23 +617,19 @@ def test_sections_without_list():
|
||||
def test_invalid_custom_theme(invalid_custom_theme_name):
|
||||
with pytest.raises(pydantic.ValidationError):
|
||||
data.RenderCVDataModel(
|
||||
**{
|
||||
"cv": {"name": "John Doe"},
|
||||
"design": {"theme": invalid_custom_theme_name},
|
||||
}
|
||||
cv={"name": "John Doe"}, # type: ignore
|
||||
design={"theme": invalid_custom_theme_name},
|
||||
)
|
||||
|
||||
|
||||
def test_custom_theme_with_missing_files(tmp_path):
|
||||
custom_theme_path = tmp_path / "customtheme"
|
||||
custom_theme_path.mkdir()
|
||||
os.chdir(tmp_path)
|
||||
with pytest.raises(pydantic.ValidationError):
|
||||
os.chdir(tmp_path)
|
||||
data.RenderCVDataModel(
|
||||
**{ # type: ignore
|
||||
"cv": {"name": "John Doe"},
|
||||
"design": {"theme": "customtheme"},
|
||||
}
|
||||
cv={"name": "John Doe"},
|
||||
design={"theme": "customtheme"}, # type: ignore
|
||||
)
|
||||
|
||||
|
||||
@@ -616,10 +639,8 @@ def test_custom_theme(testdata_directory_path):
|
||||
/ "test_copy_theme_files_to_output_directory_custom_theme"
|
||||
)
|
||||
data_model = data.RenderCVDataModel(
|
||||
**{ # type: ignore
|
||||
"cv": {"name": "John Doe"},
|
||||
"design": {"theme": "dummytheme"},
|
||||
}
|
||||
cv={"name": "John Doe"},
|
||||
design={"theme": "dummytheme"}, # type: ignore
|
||||
)
|
||||
|
||||
assert data_model.design.theme == "dummytheme"
|
||||
@@ -642,10 +663,8 @@ def test_custom_theme_without_init_file(tmp_path, testdata_directory_path):
|
||||
|
||||
os.chdir(tmp_path)
|
||||
data_model = data.RenderCVDataModel(
|
||||
**{ # type: ignore
|
||||
"cv": {"name": "John Doe"},
|
||||
"design": {"theme": "dummytheme"},
|
||||
}
|
||||
cv={"name": "John Doe"},
|
||||
design={"theme": "dummytheme"}, # type: ignore
|
||||
)
|
||||
|
||||
assert data_model.design.theme == "dummytheme"
|
||||
@@ -669,10 +688,8 @@ def test_custom_theme_with_broken_init_file(tmp_path, testdata_directory_path):
|
||||
os.chdir(tmp_path)
|
||||
with pytest.raises(pydantic.ValidationError):
|
||||
data.RenderCVDataModel(
|
||||
**{ # type: ignore
|
||||
"cv": {"name": "John Doe"},
|
||||
"design": {"theme": "dummytheme"},
|
||||
}
|
||||
cv={"name": "John Doe"},
|
||||
design={"theme": "dummytheme"}, # type: ignore
|
||||
)
|
||||
|
||||
# overwrite the __init__.py file (import error)
|
||||
@@ -682,10 +699,8 @@ def test_custom_theme_with_broken_init_file(tmp_path, testdata_directory_path):
|
||||
os.chdir(tmp_path)
|
||||
with pytest.raises(pydantic.ValidationError):
|
||||
data.RenderCVDataModel(
|
||||
**{ # type: ignore
|
||||
"cv": {"name": "John Doe"},
|
||||
"design": {"theme": "dummytheme"},
|
||||
}
|
||||
cv={"name": "John Doe"},
|
||||
design={"theme": "dummytheme"}, # type: ignore
|
||||
)
|
||||
|
||||
|
||||
@@ -729,7 +744,7 @@ def test_locale_catalog():
|
||||
phone_number_format="international",
|
||||
)
|
||||
|
||||
assert locale_catalog.LOCALE_CATALOG == data_model.locale_catalog.model_dump()
|
||||
assert data_model.locale_catalog.model_dump() == locale_catalog.LOCALE_CATALOG
|
||||
|
||||
|
||||
def test_if_local_catalog_resets():
|
||||
@@ -797,7 +812,7 @@ def test_default_input_file_doesnt_have_local_catalog():
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"key, expected_section_title",
|
||||
("key", "expected_section_title"),
|
||||
[
|
||||
("this_is_a_test", "This Is a Test"),
|
||||
("welcome_to_RenderCV!", "Welcome to RenderCV!"),
|
||||
@@ -816,7 +831,7 @@ def test_dictionary_key_to_proper_section_title(key, expected_section_title):
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"url, expected_clean_url",
|
||||
("url", "expected_clean_url"),
|
||||
[
|
||||
("https://example.com", "example.com"),
|
||||
("https://example.com/", "example.com"),
|
||||
@@ -834,7 +849,7 @@ def test_make_a_url_clean(url, expected_clean_url):
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"path_name, expected_value",
|
||||
("path_name", "expected_value"),
|
||||
[
|
||||
("NAME_IN_SNAKE_CASE", "John_Doe"),
|
||||
("NAME_IN_LOWER_SNAKE_CASE", "john_doe"),
|
||||
|
||||
@@ -21,4 +21,4 @@ import pytest
|
||||
],
|
||||
)
|
||||
def test_default_format(script_name):
|
||||
subprocess.run([sys.executable, "-m", "hatch", "run", script_name])
|
||||
subprocess.run([sys.executable, "-m", "hatch", "run", script_name], check=False)
|
||||
|
||||
@@ -25,7 +25,7 @@ def test_latex_file_class(tmp_path, rendercv_data_model, jinja2_environment):
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"string, expected_string",
|
||||
("string", "expected_string"),
|
||||
[
|
||||
(
|
||||
"\\textit{This is a \\textit{nested} italic text.}",
|
||||
@@ -71,7 +71,7 @@ def test_latex_file_revert_nested_latex_style_commands_method(string, expected_s
|
||||
),
|
||||
)
|
||||
@pytest.mark.parametrize(
|
||||
"string, expected_string",
|
||||
("string", "expected_string"),
|
||||
[
|
||||
(
|
||||
"\\textbf{This is a \\textbf{nested} bold \\textbf{text}.}",
|
||||
@@ -102,7 +102,7 @@ def test_markdown_file_class(tmp_path, rendercv_data_model, jinja2_environment):
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"string, expected_string",
|
||||
("string", "expected_string"),
|
||||
[
|
||||
("My Text", "My Text"),
|
||||
("My # Text", "My \\# Text"),
|
||||
@@ -139,7 +139,7 @@ def test_escape_latex_characters(string, expected_string):
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"markdown_string, expected_latex_string",
|
||||
("markdown_string", "expected_latex_string"),
|
||||
[
|
||||
("My Text", "My Text"),
|
||||
("**My** Text", "\\textbf{My} Text"),
|
||||
@@ -178,7 +178,7 @@ def test_transform_markdown_sections_to_latex_sections(rendercv_data_model):
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"string, placeholders, expected_string",
|
||||
("string", "placeholders", "expected_string"),
|
||||
[
|
||||
("Hello, {name}!", {"{name}": None}, "Hello, None!"),
|
||||
(
|
||||
@@ -201,7 +201,7 @@ def test_replace_placeholders_with_actual_values(string, placeholders, expected_
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"value, something, match_str, expected",
|
||||
("value", "something", "match_str", "expected"),
|
||||
[
|
||||
("Hello World", "textbf", None, "\\textbf{Hello World}"),
|
||||
("Hello World", "textbf", "World", "Hello \\textbf{World}"),
|
||||
@@ -216,7 +216,7 @@ def test_make_matched_part_something(value, something, match_str, expected):
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"value, match_str, expected",
|
||||
("value", "match_str", "expected"),
|
||||
[
|
||||
("Hello World", None, "\\textbf{Hello World}"),
|
||||
("Hello World", "World", "Hello \\textbf{World}"),
|
||||
@@ -231,7 +231,7 @@ def test_make_matched_part_bold(value, match_str, expected):
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"value, match_str, expected",
|
||||
("value", "match_str", "expected"),
|
||||
[
|
||||
("Hello World", None, "\\underline{Hello World}"),
|
||||
("Hello World", "World", "Hello \\underline{World}"),
|
||||
@@ -246,7 +246,7 @@ def test_make_matched_part_underlined(value, match_str, expected):
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"value, match_str, expected",
|
||||
("value", "match_str", "expected"),
|
||||
[
|
||||
("Hello World", None, "\\textit{Hello World}"),
|
||||
("Hello World", "World", "Hello \\textit{World}"),
|
||||
@@ -261,7 +261,7 @@ def test_make_matched_part_italic(value, match_str, expected):
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"value, match_str, expected",
|
||||
("value", "match_str", "expected"),
|
||||
[
|
||||
("Hello World", None, "\\mbox{Hello World}"),
|
||||
("Hello World", "World", "Hello \\mbox{World}"),
|
||||
@@ -276,7 +276,7 @@ def test_make_matched_part_non_line_breakable(value, match_str, expected):
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"name, expected",
|
||||
("name", "expected"),
|
||||
[
|
||||
("John Doe", "J. Doe"),
|
||||
("John Jacob Jingleheimer Schmidt", "J. J. J. Schmidt"),
|
||||
@@ -291,7 +291,7 @@ def test_abbreviate_name(name, expected):
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"length, divider, expected",
|
||||
("length", "divider", "expected"),
|
||||
[
|
||||
("10pt", 2, "5.0pt"),
|
||||
("15cm", 3, "5.0cm"),
|
||||
@@ -311,11 +311,11 @@ def test_divide_length_by(length, divider, expected):
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"length, divider",
|
||||
("length", "divider"),
|
||||
[("10pt", 0), ("10pt", -1), ("invalid", 4)],
|
||||
)
|
||||
def test_invalid_divide_length_by(length, divider):
|
||||
with pytest.raises(ValueError):
|
||||
with pytest.raises(ValueError): # NOQA: PT011
|
||||
templater.divide_length_by(length, divider)
|
||||
|
||||
|
||||
@@ -399,7 +399,7 @@ def test_create_a_latex_file(
|
||||
f"{theme_name}_{folder_name_dictionary[curriculum_vitae_data_model]}.tex"
|
||||
)
|
||||
|
||||
def create_a_latex_file(output_directory_path, reference_file_or_directory_path):
|
||||
def create_a_latex_file(output_directory_path, _):
|
||||
renderer.create_a_latex_file(data_model, output_directory_path)
|
||||
|
||||
assert run_a_function_and_check_if_output_is_the_same_as_reference(
|
||||
@@ -448,7 +448,7 @@ def test_create_a_markdown_file(
|
||||
f"{theme_name}_{folder_name_dictionary[curriculum_vitae_data_model]}.md"
|
||||
)
|
||||
|
||||
def create_a_markdown_file(output_directory_path, reference_file_or_directory_path):
|
||||
def create_a_markdown_file(output_directory_path, _):
|
||||
renderer.create_a_markdown_file(data_model, output_directory_path)
|
||||
|
||||
assert run_a_function_and_check_if_output_is_the_same_as_reference(
|
||||
@@ -479,9 +479,7 @@ def test_copy_theme_files_to_output_directory(
|
||||
):
|
||||
reference_directory_name = theme_name
|
||||
|
||||
def copy_theme_files_to_output_directory(
|
||||
output_directory_path, reference_file_or_directory_path
|
||||
):
|
||||
def copy_theme_files_to_output_directory(output_directory_path, _):
|
||||
renderer_module.copy_theme_files_to_output_directory(
|
||||
theme_name, output_directory_path
|
||||
)
|
||||
@@ -557,7 +555,7 @@ def test_copy_theme_files_to_output_directory_custom_theme(
|
||||
def test_copy_theme_files_to_output_directory_nonexistent_theme():
|
||||
with pytest.raises(FileNotFoundError):
|
||||
renderer_module.copy_theme_files_to_output_directory(
|
||||
"nonexistent_theme", pathlib.Path(".")
|
||||
"nonexistent_theme", pathlib.Path()
|
||||
)
|
||||
|
||||
|
||||
@@ -588,9 +586,7 @@ def test_create_a_latex_file_and_copy_theme_files(
|
||||
design={"theme": theme_name},
|
||||
)
|
||||
|
||||
def create_a_latex_file_and_copy_theme_files(
|
||||
output_directory_path, reference_file_or_directory_path
|
||||
):
|
||||
def create_a_latex_file_and_copy_theme_files(output_directory_path, _):
|
||||
renderer.create_a_latex_file_and_copy_theme_files(
|
||||
data_model, output_directory_path
|
||||
)
|
||||
@@ -649,8 +645,8 @@ def test_render_a_pdf_from_latex(
|
||||
|
||||
|
||||
def test_render_pdf_from_latex_nonexistent_latex_file():
|
||||
file_path = pathlib.Path("file_doesnt_exist.tex")
|
||||
with pytest.raises(FileNotFoundError):
|
||||
file_path = pathlib.Path("file_doesnt_exist.tex")
|
||||
renderer.render_a_pdf_from_latex(file_path)
|
||||
|
||||
|
||||
@@ -704,8 +700,8 @@ def test_render_an_html_from_markdown(
|
||||
|
||||
|
||||
def test_render_html_from_markdown_nonexistent_markdown_file():
|
||||
file_path = pathlib.Path("file_doesnt_exist.md")
|
||||
with pytest.raises(FileNotFoundError):
|
||||
file_path = pathlib.Path("file_doesnt_exist.md")
|
||||
renderer.render_an_html_from_markdown(file_path)
|
||||
|
||||
|
||||
@@ -767,6 +763,6 @@ def test_render_pngs_from_pdf(
|
||||
|
||||
|
||||
def test_render_pngs_from_pdf_nonexistent_pdf_file():
|
||||
file_path = pathlib.Path("file_doesnt_exist.pdf")
|
||||
with pytest.raises(FileNotFoundError):
|
||||
file_path = pathlib.Path("file_doesnt_exist.pdf")
|
||||
renderer.render_pngs_from_pdf(file_path)
|
||||
|
||||
Reference in New Issue
Block a user