Merge pull request #224 from Dictionarry-Hub/dev

fix(backend): perms env, mm import refactor, deserialize error
This commit is contained in:
Samuel Chau
2025-08-26 22:11:39 +09:30
committed by GitHub
16 changed files with 332 additions and 186 deletions

1
.gitignore vendored
View File

@@ -20,6 +20,7 @@ backend/app/static/
# Config data
config/
config-test/
radarr-config/
sonarr-config/
test-data/

View File

@@ -1,17 +1,21 @@
# Dockerfile
FROM python:3.9-slim
WORKDIR /app
# Install git (since we're still using slim)
RUN apt-get update && apt-get install -y git && rm -rf /var/lib/apt/lists/*
# Install git and gosu for user switching
RUN apt-get update && apt-get install -y git gosu && rm -rf /var/lib/apt/lists/*
# Copy pre-built files from dist directory
COPY dist/backend/app ./app
COPY dist/static ./app/static
COPY dist/requirements.txt .
RUN pip install --no-cache-dir -r requirements.txt
# Copy and setup entrypoint script
COPY entrypoint.sh /entrypoint.sh
RUN chmod +x /entrypoint.sh
LABEL org.opencontainers.image.authors="Dictionarry dictionarry@pm.me"
LABEL org.opencontainers.image.description="Profilarr - Profile manager for *arr apps"
LABEL org.opencontainers.image.source="https://github.com/Dictionarry-Hub/profilarr"
LABEL org.opencontainers.image.title="Profilarr"
LABEL org.opencontainers.image.version="beta"
EXPOSE 6868
ENTRYPOINT ["/entrypoint.sh"]
CMD ["gunicorn", "--bind", "0.0.0.0:6868", "--timeout", "600", "app.main:create_app()"]

View File

@@ -0,0 +1,33 @@
# backend/app/db/migrations/versions/004_update_language_score_default.py
from ...connection import get_db
version = 4
name = "update_language_score_default"
def up():
"""Update default language import score to -999999."""
with get_db() as conn:
# Update existing record to new default value
conn.execute('''
UPDATE language_import_config
SET score = -999999,
updated_at = CURRENT_TIMESTAMP
WHERE id = 1
''')
conn.commit()
def down():
"""Revert language import score to previous default."""
with get_db() as conn:
# Revert to previous default value
conn.execute('''
UPDATE language_import_config
SET score = -99999,
updated_at = CURRENT_TIMESTAMP
WHERE id = 1
''')
conn.commit()

View File

@@ -38,8 +38,8 @@ class ProfileStrategy(ImportStrategy):
# Load profile YAML
profile_yaml = load_yaml(f"profile/{filename}.yml")
# Extract referenced custom formats
format_names = extract_format_names(profile_yaml)
# Extract referenced custom formats (only for the target arr type)
format_names = extract_format_names(profile_yaml, self.arr_type)
for format_name in format_names:
# Skip if already processed

View File

@@ -46,12 +46,14 @@ def load_yaml(file_path: str) -> Dict[str, Any]:
return yaml.safe_load(f)
def extract_format_names(profile_data: Dict[str, Any]) -> Set[str]:
def extract_format_names(profile_data: Dict[str, Any], arr_type: str = None) -> Set[str]:
"""
Extract all custom format names referenced in a profile.
Args:
profile_data: Profile YAML data
arr_type: Target arr type ('radarr' or 'sonarr'). If provided, only extracts
formats for that specific arr type.
Returns:
Set of unique format names
@@ -64,10 +66,18 @@ def extract_format_names(profile_data: Dict[str, Any]) -> Set[str]:
format_names.add(cf['name'])
# Extract from app-specific custom_formats
for key in ['custom_formats_radarr', 'custom_formats_sonarr']:
for cf in profile_data.get(key, []):
if arr_type:
# Only extract formats for the specific arr type
app_key = f'custom_formats_{arr_type.lower()}'
for cf in profile_data.get(app_key, []):
if isinstance(cf, dict) and 'name' in cf:
format_names.add(cf['name'])
else:
# Extract from all app-specific sections (backwards compatibility)
for key in ['custom_formats_radarr', 'custom_formats_sonarr']:
for cf in profile_data.get(key, []):
if isinstance(cf, dict) and 'name' in cf:
format_names.add(cf['name'])
return format_names

View File

@@ -124,11 +124,14 @@ def setup_logging():
def init_git_user():
"""Initialize Git user configuration globally and update PAT status."""
"""Initialize Git user configuration for the repository and update PAT status."""
logger = logging.getLogger(__name__)
logger.info("Starting Git user configuration")
try:
from .config import config
repo_path = config.DB_DIR
git_name = os.environ.get('GIT_USER_NAME', 'Profilarr')
git_email = os.environ.get('GIT_USER_EMAIL',
'profilarr@dictionarry.com')
@@ -139,30 +142,38 @@ def init_git_user():
if git_name == 'Profilarr' or git_email == 'profilarr@dictionarry.com':
logger.info("Using default Git user configuration")
# Set global Git configuration
subprocess.run(['git', 'config', '--global', 'user.name', git_name],
check=True)
subprocess.run(['git', 'config', '--global', 'user.email', git_email],
check=True)
# Set repository-level Git configuration if repo exists
if os.path.exists(os.path.join(repo_path, '.git')):
logger.info(f"Setting git config for repository at {repo_path}")
subprocess.run(['git', '-C', repo_path, 'config', '--local', 'user.name', git_name],
check=True)
subprocess.run(['git', '-C', repo_path, 'config', '--local', 'user.email', git_email],
check=True)
# Add safe.directory to prevent ownership issues
subprocess.run(['git', '-C', repo_path, 'config', '--local', '--add', 'safe.directory', repo_path],
check=True)
else:
logger.warning(f"No git repository found at {repo_path}, skipping git config")
# Update PAT status in database
update_pat_status()
# Verify configuration
configured_name = subprocess.run(
['git', 'config', '--global', 'user.name'],
capture_output=True,
text=True,
check=True).stdout.strip()
configured_email = subprocess.run(
['git', 'config', '--global', 'user.email'],
capture_output=True,
text=True,
check=True).stdout.strip()
# Verify configuration if repository exists
if os.path.exists(os.path.join(repo_path, '.git')):
configured_name = subprocess.run(
['git', '-C', repo_path, 'config', '--local', 'user.name'],
capture_output=True,
text=True,
check=True).stdout.strip()
configured_email = subprocess.run(
['git', '-C', repo_path, 'config', '--local', 'user.email'],
capture_output=True,
text=True,
check=True).stdout.strip()
if configured_name != git_name or configured_email != git_email:
logger.error("Git configuration verification failed")
return False, "Git configuration verification failed"
if configured_name != git_name or configured_email != git_email:
logger.error("Git configuration verification failed")
return False, "Git configuration verification failed"
logger.info("Git user configuration completed successfully")
return True, "Git configuration successful"

View File

@@ -101,13 +101,12 @@ def sync_media_management():
try:
# Get the current media management data for this category
category_data = get_media_management_data(category)
logger.info(f"Raw category_data for {category}: {category_data}")
arr_type_data = category_data.get(arr_type, {})
logger.info(f"Extracted arr_type_data for {arr_type}: {arr_type_data}")
if category == 'naming':
arr_type_data = category_data.get(arr_type, {})
success, message = sync_naming_config(base_url, api_key, arr_type, arr_type_data)
elif category == 'misc':
arr_type_data = category_data.get(arr_type, {})
success, message = sync_media_management_config(base_url, api_key, arr_type, arr_type_data)
elif category == 'quality_definitions':
# Quality definitions has a nested structure: qualityDefinitions -> arr_type -> qualities

View File

@@ -1,6 +1,6 @@
import logging
import requests
from typing import Dict, Any, Tuple
from ..importer.arr_handler import ArrHandler, ArrApiError
logger = logging.getLogger(__name__)
@@ -18,22 +18,14 @@ def sync_naming_config(base_url: str, api_key: str, arr_type: str, naming_data:
Returns:
Tuple of (success, message)
"""
arr = None
try:
# Construct the endpoint URL
endpoint = f"{base_url}/api/v3/config/naming"
headers = {
"X-Api-Key": api_key,
"Content-Type": "application/json"
}
# Initialize ArrHandler
arr = ArrHandler(base_url, api_key)
logger.info(f"Syncing naming config to {arr_type}")
# GET current naming config
logger.info(f"Fetching current naming config from {arr_type} at {base_url}")
response = requests.get(endpoint, headers=headers, timeout=10)
response.raise_for_status()
current_config = response.json()
logger.info(f"Current naming config for {arr_type}:")
logger.info(current_config)
# GET current naming config using ArrHandler
current_config = arr.get("/api/v3/config/naming")
# Update current_config with fields from naming_data
if arr_type == 'radarr':
@@ -73,24 +65,22 @@ def sync_naming_config(base_url: str, api_key: str, arr_type: str, naming_data:
if 'specialsFolderFormat' in naming_data:
current_config['specialsFolderFormat'] = naming_data['specialsFolderFormat']
# PUT the updated config back
logger.info(f"Updating naming config for {arr_type}")
logger.info(f"Request body for naming sync:")
logger.info(current_config)
put_response = requests.put(endpoint, json=current_config, headers=headers, timeout=10)
put_response.raise_for_status()
logger.info(f"Successfully synced naming config for {arr_type}")
# PUT the updated config back using ArrHandler
arr.put("/api/v3/config/naming", current_config)
logger.info(f"Successfully synced naming config to {arr_type}")
return True, "Naming config sync successful"
except requests.exceptions.RequestException as e:
except ArrApiError as e:
error_msg = f"Failed to sync naming config: {str(e)}"
logger.error(error_msg)
return False, error_msg
except Exception as e:
error_msg = f"Unexpected error syncing naming config: {str(e)}"
error_msg = f"Failed to sync naming config: {str(e)}"
logger.error(error_msg)
return False, error_msg
finally:
if arr:
arr.close()
def sync_media_management_config(base_url: str, api_key: str, arr_type: str, misc_data: Dict[str, Any]) -> Tuple[bool, str]:
@@ -107,48 +97,37 @@ def sync_media_management_config(base_url: str, api_key: str, arr_type: str, mis
Returns:
Tuple of (success, message)
"""
arr = None
try:
# Construct the endpoint URL
endpoint = f"{base_url}/api/v3/config/mediamanagement"
headers = {
"X-Api-Key": api_key,
"Content-Type": "application/json"
}
# Initialize ArrHandler
arr = ArrHandler(base_url, api_key)
logger.info(f"Syncing media management config to {arr_type}")
# GET current media management config
logger.info(f"Fetching current media management config from {arr_type} at {base_url}")
response = requests.get(endpoint, headers=headers, timeout=10)
response.raise_for_status()
current_config = response.json()
logger.info(f"Current media management config for {arr_type}:")
logger.info(current_config)
# GET current media management config using ArrHandler
current_config = arr.get("/api/v3/config/mediamanagement")
# Update current_config with fields from misc_data
# We only manage two fields: propersRepacks and enableMediaInfo
if 'propersRepacks' in misc_data:
current_config['downloadPropersAndRepacks'] = misc_data['propersRepacks']
if 'enableMediaInfo' in misc_data:
current_config['enableMediaInfo'] = misc_data['enableMediaInfo']
# PUT the updated config back
logger.info(f"Updating media management config for {arr_type}")
logger.info(f"Request body for media management sync:")
logger.info(current_config)
put_response = requests.put(endpoint, json=current_config, headers=headers, timeout=10)
put_response.raise_for_status()
logger.info(f"Successfully synced media management config for {arr_type}")
# PUT the updated config back using ArrHandler
arr.put("/api/v3/config/mediamanagement", current_config)
logger.info(f"Successfully synced media management config to {arr_type}")
return True, "Media management config sync successful"
except requests.exceptions.RequestException as e:
except ArrApiError as e:
error_msg = f"Failed to sync media management config: {str(e)}"
logger.error(error_msg)
return False, error_msg
except Exception as e:
error_msg = f"Unexpected error syncing media management config: {str(e)}"
error_msg = f"Failed to sync media management config: {str(e)}"
logger.error(error_msg)
return False, error_msg
finally:
if arr:
arr.close()
def sync_quality_definitions(base_url: str, api_key: str, arr_type: str, quality_data: Dict[str, Any]) -> Tuple[bool, str]:
@@ -165,94 +144,43 @@ def sync_quality_definitions(base_url: str, api_key: str, arr_type: str, quality
Returns:
Tuple of (success, message)
"""
arr = None
try:
# Construct the endpoint URL
endpoint = f"{base_url}/api/v3/qualitydefinition"
headers = {
"X-Api-Key": api_key,
"Content-Type": "application/json"
}
# Initialize ArrHandler
arr = ArrHandler(base_url, api_key)
logger.info(f"Syncing quality definitions to {arr_type}")
# GET current quality definitions (for logging/comparison)
logger.info(f"Fetching current quality definitions from {arr_type} at {base_url}")
response = requests.get(endpoint, headers=headers, timeout=10)
response.raise_for_status()
# GET current quality definitions using ArrHandler
current_definitions = arr.get("/api/v3/qualitydefinition")
current_definitions = response.json()
logger.info(f"Current quality definitions for {arr_type}:")
logger.info(current_definitions)
# Create a mapping of quality names to current definitions for easier lookup
quality_map = {def_['quality']['name']: def_ for def_ in current_definitions}
if arr_type == 'sonarr':
# Log the quality data we received from YML
logger.info(f"Quality data from YML:")
logger.info(quality_data)
# Create a mapping of quality names to current definitions for easier lookup
quality_map = {def_['quality']['name']: def_ for def_ in current_definitions}
# Update each quality definition with our values
for quality_name, settings in quality_data.items():
if quality_name in quality_map:
definition = quality_map[quality_name]
# Update size limits from our YML data
if 'min' in settings:
definition['minSize'] = settings['min']
if 'preferred' in settings:
definition['preferredSize'] = settings['preferred']
if 'max' in settings:
definition['maxSize'] = settings['max']
# PUT the updated definitions back
logger.info(f"Updating quality definitions for {arr_type}")
logger.info(f"Request body for quality definitions sync:")
logger.info(current_definitions)
# Sonarr expects the full array of definitions at the update endpoint
update_endpoint = f"{base_url}/api/v3/qualitydefinition/update"
put_response = requests.put(update_endpoint, json=current_definitions, headers=headers, timeout=10)
put_response.raise_for_status()
logger.info(f"Successfully synced quality definitions for {arr_type}")
return True, "Quality definitions sync successful"
# Update each quality definition with our values
for quality_name, settings in quality_data.items():
if quality_name in quality_map:
definition = quality_map[quality_name]
# Update size limits from our YML data
if 'min' in settings:
definition['minSize'] = settings['min']
if 'preferred' in settings:
definition['preferredSize'] = settings['preferred']
if 'max' in settings:
definition['maxSize'] = settings['max']
else: # radarr
# Log the quality data we received from YML
logger.info(f"Quality data from YML:")
logger.info(quality_data)
# Create a mapping of quality names to current definitions for easier lookup
quality_map = {def_['quality']['name']: def_ for def_ in current_definitions}
# Update each quality definition with our values
for quality_name, settings in quality_data.items():
if quality_name in quality_map:
definition = quality_map[quality_name]
# Update size limits from our YML data
if 'min' in settings:
definition['minSize'] = settings['min']
if 'preferred' in settings:
definition['preferredSize'] = settings['preferred']
if 'max' in settings:
definition['maxSize'] = settings['max']
# PUT the updated definitions back
logger.info(f"Updating quality definitions for {arr_type}")
logger.info(f"Request body for quality definitions sync:")
logger.info(current_definitions)
# Radarr expects the full array of definitions at the update endpoint
update_endpoint = f"{base_url}/api/v3/qualitydefinition/update"
put_response = requests.put(update_endpoint, json=current_definitions, headers=headers, timeout=10)
put_response.raise_for_status()
logger.info(f"Successfully synced quality definitions for {arr_type}")
return True, "Quality definitions sync successful"
# PUT the updated definitions back using ArrHandler
arr.put("/api/v3/qualitydefinition/update", current_definitions)
logger.info(f"Successfully synced quality definitions to {arr_type}")
return True, "Quality definitions sync successful"
except requests.exceptions.RequestException as e:
except ArrApiError as e:
error_msg = f"Failed to sync quality definitions: {str(e)}"
logger.error(error_msg)
return False, error_msg
except Exception as e:
error_msg = f"Unexpected error syncing quality definitions: {str(e)}"
error_msg = f"Failed to sync quality definitions: {str(e)}"
logger.error(error_msg)
return False, error_msg
return False, error_msg
finally:
if arr:
arr.close()

View File

@@ -1,5 +1,5 @@
# app/task/__init__.py
from flask import Blueprint, jsonify
from flask import Blueprint, jsonify, request
import logging
from ..db import get_db
from .tasks import TaskScheduler
@@ -78,6 +78,63 @@ def get_task(task_id):
return jsonify({"error": "An unexpected error occurred"}), 500
@bp.route('/<int:task_id>', methods=['PUT'])
def update_task(task_id):
try:
data = request.get_json()
if not data:
return jsonify({"error": "No data provided"}), 400
interval_minutes = data.get('interval_minutes')
if interval_minutes is None:
return jsonify({"error": "interval_minutes is required"}), 400
if not isinstance(interval_minutes, int) or interval_minutes < 1:
return jsonify({"error": "interval_minutes must be a positive integer"}), 400
with get_db() as conn:
# Check if task exists
task = conn.execute('SELECT * FROM scheduled_tasks WHERE id = ?',
(task_id, )).fetchone()
if not task:
return jsonify({"error": "Task not found"}), 404
# Update the interval in database
conn.execute(
'UPDATE scheduled_tasks SET interval_minutes = ? WHERE id = ?',
(interval_minutes, task_id)
)
conn.commit()
# Update the scheduler
scheduler_instance = TaskScheduler.get_instance()
if scheduler_instance and interval_minutes > 0:
# Remove old job
scheduler_instance.scheduler.remove_job(str(task_id))
# Create new task instance with updated interval
task_class = TaskScheduler.get_task_class(task['type'])
if task_class:
new_task = task_class(
id=task_id,
name=task['name'],
interval_minutes=interval_minutes
)
scheduler_instance.schedule_task(new_task)
logger.info(f"Updated task {task_id} interval to {interval_minutes} minutes")
return jsonify({
"success": True,
"message": f"Task interval updated to {interval_minutes} minutes"
}), 200
except Exception as e:
logger.exception(f"Failed to update task {task_id}")
return jsonify({"error": f"Failed to update task: {str(e)}"}), 500
@bp.route('/<int:task_id>/run', methods=['POST'])
def trigger_task(task_id):
try:

View File

@@ -1,19 +1,16 @@
# docker-compose.yml
version: '3.8'
services:
profilarr:
image: santiagosayshey/profilarr:beta
build:
context: .
dockerfile: Dockerfile
container_name: profilarr
ports:
- 6868:6868
- 6870:6868
volumes:
- profilarr_data:/config
- ./config-test:/config
environment:
- PUID=1000
- PGID=1000
- UMASK=002
- TZ=Australia/Adelaide
env_file:
- .env
restart: unless-stopped
volumes:
profilarr_data:
name: profilarr_data

View File

@@ -17,5 +17,7 @@ services:
- ./backend:/app
- ./config:/config
environment:
- PUID=1000
- PGID=1000
- TZ=Australia/Adelaide
restart: always

34
entrypoint.sh Normal file
View File

@@ -0,0 +1,34 @@
#!/bin/bash
set -e
# Default to UID/GID 1000 if not provided
PUID=${PUID:-1000}
PGID=${PGID:-1000}
# Default umask to 022 if not provided
UMASK=${UMASK:-022}
echo "Starting with UID: $PUID, GID: $PGID, UMASK: $UMASK"
umask "$UMASK"
# Create group with specified GID
groupadd -g "$PGID" appgroup 2>/dev/null || true
# Create user with specified UID and GID
useradd -u "$PUID" -g "$PGID" -d /home/appuser -s /bin/bash appuser 2>/dev/null || true
# Create home directory if it doesn't exist
mkdir -p /home/appuser
chown "$PUID:$PGID" /home/appuser
# Fix permissions on /config if it exists
if [ -d "/config" ]; then
echo "Setting up /config directory permissions"
# Change ownership of /config and all its contents to PUID:PGID
# This ensures files created by different UIDs are accessible
chown -R "$PUID:$PGID" /config
fi
# Execute the main command as the specified user
echo "Starting application as user $PUID:$PGID"
exec gosu "$PUID:$PGID" "$@"

View File

@@ -1,4 +1,5 @@
import axios from 'axios';
import Alert from '@ui/Alert';
export const getAllTasks = async () => {
try {
@@ -37,3 +38,23 @@ export const triggerTask = async taskId => {
};
}
};
export const updateTaskInterval = async (taskId, intervalMinutes) => {
try {
const response = await axios.put(`/api/tasks/${taskId}`, {
interval_minutes: intervalMinutes
});
Alert.success(response.data.message || 'Task interval updated successfully');
return {
success: true,
data: response.data
};
} catch (error) {
const errorMessage = error.response?.data?.error || 'Failed to update task interval';
Alert.error(errorMessage);
return {
success: false,
error: errorMessage
};
}
};

View File

@@ -1,8 +1,15 @@
// components/settings/TaskCard.jsx
import React from 'react';
import {Play, Loader} from 'lucide-react';
import React, {useState, useEffect} from 'react';
import {Play, Loader, Edit2, Check, X} from 'lucide-react';
import NumberInput from '@ui/NumberInput';
import {updateTaskInterval} from '@/api/task';
const TaskCard = ({task, onTrigger, isTriggering}) => {
const TaskCard = ({task, onTrigger, isTriggering, isLast, onIntervalUpdate}) => {
const [intervalValue, setIntervalValue] = useState(task.interval_minutes);
const [originalValue, setOriginalValue] = useState(task.interval_minutes);
// Only allow editing for Repository Sync and Backup tasks
const isEditable = task.type === 'Sync' || task.type === 'Backup';
const formatDateTime = dateString => {
if (!dateString) return 'Never';
return new Date(dateString).toLocaleString();
@@ -13,8 +20,32 @@ const TaskCard = ({task, onTrigger, isTriggering}) => {
return `${duration}s`;
};
useEffect(() => {
setIntervalValue(task.interval_minutes);
setOriginalValue(task.interval_minutes);
}, [task.interval_minutes]);
useEffect(() => {
if (intervalValue !== originalValue && intervalValue > 0) {
const updateInterval = async () => {
const result = await updateTaskInterval(task.id, intervalValue);
if (result.success) {
setOriginalValue(intervalValue);
// Refresh task data to get new next_run time
if (onIntervalUpdate) {
onIntervalUpdate();
}
} else {
// Reset to original value if update failed
setIntervalValue(originalValue);
}
};
updateInterval();
}
}, [intervalValue]);
return (
<tr className='bg-gray-900 border-b border-gray-700'>
<tr className={`bg-gray-900 ${!isLast ? 'border-b border-gray-700' : ''}`}>
<td className='py-4 px-4'>
<div className='flex items-center space-x-3'>
<span className='font-medium text-gray-100'>
@@ -23,7 +54,21 @@ const TaskCard = ({task, onTrigger, isTriggering}) => {
</div>
</td>
<td className='py-4 px-4 text-gray-300'>
{task.interval_minutes} minutes
{isEditable ? (
<div className='flex items-center space-x-2'>
<NumberInput
value={intervalValue}
onChange={setIntervalValue}
min={1}
max={43200}
step={1}
className='w-24'
/>
<span className='text-gray-400 text-sm'>minutes</span>
</div>
) : (
<span>{task.interval_minutes} minutes</span>
)}
</td>
<td className='py-4 px-4 text-gray-300'>
{formatDateTime(task.last_run)}

View File

@@ -77,12 +77,14 @@ const TaskContainer = () => {
</tr>
</thead>
<tbody>
{tasks.map(task => (
{tasks.map((task, index) => (
<TaskCard
key={task.id}
task={task}
onTrigger={handleTriggerTask}
isTriggering={triggeringTask === task.id}
isLast={index === tasks.length - 1}
onIntervalUpdate={fetchTasks}
/>
))}
</tbody>

View File

@@ -5,6 +5,8 @@ import {ChevronUp, ChevronDown} from 'lucide-react';
const NumberInput = ({
value,
onChange,
onBlur = () => {},
onFocus = () => {},
className = '',
step = 1,
disabled = false,
@@ -24,26 +26,26 @@ const NumberInput = ({
}
};
const handleBlur = () => {
const handleBlur = (e) => {
setIsFocused(false);
const numValue =
localValue === '' || localValue === '-' ? 0 : parseInt(localValue);
if (min !== undefined && numValue < min) {
onChange(min);
return;
}
if (max !== undefined && numValue > max) {
} else if (max !== undefined && numValue > max) {
onChange(max);
return;
} else {
onChange(numValue);
}
onChange(numValue);
onBlur(e);
};
const handleFocus = () => {
const handleFocus = (e) => {
setIsFocused(true);
setLocalValue(value.toString());
onFocus(e);
};
const increment = () => {