diff --git a/.github/workflows/adventurelog-bot.yml b/.github/workflows/adventurelog-bot.yml
index d03a9c31..567720eb 100644
--- a/.github/workflows/adventurelog-bot.yml
+++ b/.github/workflows/adventurelog-bot.yml
@@ -61,9 +61,9 @@ jobs:
await safeClosePr();
}
- // Ignore specific user
- if (context.actor === "seanmorley15") {
- console.log("Skipping maintainer PR");
+ // Ignore PRs created by the maintainer to avoid blocking their work, as well as dependabot
+ if (context.actor === "seanmorley15" || context.actor === "dependabot") {
+ console.log("Skipping maintainer or dependabot PR");
return;
}
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
index 806ec340..ec00bf11 100644
--- a/CONTRIBUTING.md
+++ b/CONTRIBUTING.md
@@ -163,7 +163,7 @@ If your changes affect:
please update the documentation in the:
```
-/documentation
+/docs
```
folder accordingly.
diff --git a/backend/server/adventures/geocoding.py b/backend/server/adventures/geocoding.py
index fb80c32f..baa03730 100644
--- a/backend/server/adventures/geocoding.py
+++ b/backend/server/adventures/geocoding.py
@@ -3,6 +3,7 @@ import time
import socket
import re
import unicodedata
+from urllib.parse import quote
from worldtravel.models import Region, City, VisitedRegion, VisitedCity
from django.conf import settings
@@ -20,7 +21,12 @@ def search_google(query):
headers = {
'Content-Type': 'application/json',
'X-Goog-Api-Key': api_key,
- 'X-Goog-FieldMask': 'places.displayName.text,places.formattedAddress,places.location,places.types,places.rating,places.userRatingCount'
+ 'X-Goog-FieldMask': (
+ 'places.id,places.displayName.text,places.formattedAddress,places.location,'
+ 'places.types,places.rating,places.userRatingCount,places.websiteUri,'
+ 'places.nationalPhoneNumber,places.internationalPhoneNumber,'
+ 'places.editorialSummary.text,places.googleMapsUri,places.photos.name'
+ )
}
payload = {
@@ -52,6 +58,14 @@ def search_google(query):
if rating is not None and ratings_total:
importance = round(float(rating) * ratings_total / 100, 2)
+ photos = []
+ for photo in place.get('photos', [])[:5]:
+ photo_name = photo.get('name')
+ if photo_name:
+ photos.append(
+ f"https://places.googleapis.com/v1/{photo_name}/media?key={api_key}&maxHeightPx=800&maxWidthPx=800"
+ )
+
# Extract display name from the new API structure
display_name_obj = place.get("displayName", {})
name = display_name_obj.get("text") if display_name_obj else None
@@ -61,9 +75,18 @@ def search_google(query):
"lon": location.get("longitude"),
"name": name,
"display_name": place.get("formattedAddress"),
+ "place_id": place.get("id"),
"type": primary_type,
+ "types": types,
"category": category,
+ "description": (place.get('editorialSummary') or {}).get('text'),
+ "website": place.get('websiteUri'),
+ "phone_number": place.get('internationalPhoneNumber') or place.get('nationalPhoneNumber'),
+ "google_maps_url": place.get('googleMapsUri'),
"importance": importance,
+ "rating": rating,
+ "review_count": ratings_total,
+ "photos": photos,
"addresstype": addresstype,
"powered_by": "google",
})
@@ -172,6 +195,359 @@ def search(query):
# If Google fails, fallback to OSM
return search_osm(query)
+
+def _fetch_wikipedia_summary(query, language='en'):
+ normalized_query = (query or '').strip()
+ if not normalized_query:
+ return None
+
+ candidates = [normalized_query]
+ if ',' in normalized_query:
+ head = normalized_query.split(',')[0].strip()
+ if head and head not in candidates:
+ candidates.append(head)
+
+ for candidate in candidates:
+ try:
+ encoded_query = quote(candidate, safe='')
+ url = f"https://{language}.wikipedia.org/api/rest_v1/page/summary/{encoded_query}"
+ response = requests.get(
+ url,
+ headers={'User-Agent': 'AdventureLog Server'},
+ timeout=(2, 5),
+ )
+ if response.status_code != 200:
+ continue
+
+ data = response.json()
+ if data.get('type') == 'disambiguation':
+ continue
+
+ extract = (data.get('extract') or '').strip()
+ if len(extract) >= 120:
+ return extract
+ except requests.exceptions.RequestException:
+ continue
+
+ return None
+
+
+def _compose_place_description(
+ editorial_summary,
+ review_snippets,
+):
+ parts = []
+
+ summary = (editorial_summary or '').strip()
+ if summary:
+ parts.append(f"### About\n\n{summary}")
+
+ cleaned_reviews = []
+ for snippet in review_snippets:
+ text = (snippet or '').strip()
+ if len(text) >= 40:
+ cleaned_reviews.append(text)
+ if len(cleaned_reviews) >= 2:
+ break
+
+ if cleaned_reviews:
+ review_block = '### Visitor Highlights\n\n' + '\n'.join(
+ f"- {text}" for text in cleaned_reviews
+ )
+ parts.append(review_block)
+
+ return '\n\n'.join(parts).strip() or None
+
+
+def get_place_details(place_id, fallback_query=None, language='en'):
+ if not place_id:
+ return {'error': 'place_id is required'}
+
+ details = {
+ 'description': None,
+ 'name': None,
+ 'formatted_address': None,
+ 'types': [],
+ 'rating': None,
+ 'review_count': None,
+ 'website': None,
+ 'phone_number': None,
+ 'google_maps_url': None,
+ 'source': None,
+ }
+
+ api_key = settings.GOOGLE_MAPS_API_KEY
+ if api_key:
+ try:
+ url = f"https://places.googleapis.com/v1/places/{place_id}"
+ headers = {
+ 'X-Goog-Api-Key': api_key,
+ 'X-Goog-FieldMask': (
+ 'id,displayName.text,formattedAddress,editorialSummary.text,types,'
+ 'rating,userRatingCount,websiteUri,nationalPhoneNumber,'
+ 'internationalPhoneNumber,googleMapsUri,reviews.text.text'
+ ),
+ }
+ response = requests.get(url, headers=headers, timeout=(2, 6))
+ response.raise_for_status()
+
+ place = response.json()
+ details['name'] = (place.get('displayName') or {}).get('text')
+ details['formatted_address'] = place.get('formattedAddress')
+ details['types'] = place.get('types') or []
+ details['rating'] = place.get('rating')
+ details['review_count'] = place.get('userRatingCount')
+ details['website'] = place.get('websiteUri')
+ details['phone_number'] = (
+ place.get('internationalPhoneNumber') or place.get('nationalPhoneNumber')
+ )
+ details['google_maps_url'] = place.get('googleMapsUri')
+
+ editorial_summary = (place.get('editorialSummary') or {}).get('text')
+ reviews = place.get('reviews') or []
+ review_snippets = [((review.get('text') or {}).get('text')) for review in reviews]
+ details['description'] = _compose_place_description(
+ editorial_summary,
+ review_snippets,
+ )
+ if details['description']:
+ details['source'] = 'google'
+ except requests.exceptions.RequestException:
+ pass
+
+ # Google summaries are often short; fallback to Wikipedia for richer context.
+ description_text = (details.get('description') or '').strip()
+ if len(description_text) < 220:
+ wikipedia_summary = _fetch_wikipedia_summary(
+ fallback_query or details.get('name') or '',
+ language=language,
+ )
+ if wikipedia_summary:
+ if description_text:
+ details['description'] = f"{description_text}\n\n### Background\n\n{wikipedia_summary}"
+ details['source'] = 'google+wikipedia'
+ else:
+ details['description'] = f"### Background\n\n{wikipedia_summary}"
+ details['source'] = 'wikipedia'
+
+ if not details.get('description'):
+ return {'error': 'Unable to enrich place description'}
+
+ return details
+
+
+def _clean_location_candidate(value):
+ if value is None:
+ return None
+ cleaned = str(value).strip()
+ return cleaned or None
+
+
+def _looks_like_street_address(value):
+ candidate = _clean_location_candidate(value)
+ if not candidate:
+ return False
+
+ lowered = candidate.lower()
+ if not re.search(r"\d", lowered):
+ return False
+
+ if lowered.count(",") >= 2:
+ return True
+
+ if not re.match(r"^\d{1,6}\s+\S+", lowered):
+ return False
+
+ street_tokens = (
+ "st",
+ "street",
+ "rd",
+ "road",
+ "ave",
+ "avenue",
+ "blvd",
+ "boulevard",
+ "dr",
+ "drive",
+ "ln",
+ "lane",
+ "ct",
+ "court",
+ "pl",
+ "place",
+ "pkwy",
+ "parkway",
+ "hwy",
+ "highway",
+ "trl",
+ "trail",
+ )
+ return any(re.search(rf"\b{token}\b", lowered) for token in street_tokens)
+
+
+def _first_preferred_location_name(candidates, allow_address_fallback=False):
+ address_fallback = None
+ for candidate in candidates:
+ cleaned = _clean_location_candidate(candidate)
+ if not cleaned:
+ continue
+ if not _looks_like_street_address(cleaned):
+ return cleaned
+ if address_fallback is None:
+ address_fallback = cleaned
+ return address_fallback if allow_address_fallback else None
+
+
+def _extract_google_component_name(address_components):
+ preferred_types = (
+ "premise",
+ "point_of_interest",
+ "establishment",
+ "subpremise",
+ "natural_feature",
+ "airport",
+ "park",
+ "tourist_attraction",
+ "shopping_mall",
+ "university",
+ "school",
+ "hospital",
+ )
+
+ for preferred_type in preferred_types:
+ for component in address_components or []:
+ types = component.get("types", [])
+ if preferred_type in types:
+ return component.get("long_name") or component.get("short_name")
+ return None
+
+
+def _score_google_result_types(types):
+ priority = (
+ "point_of_interest",
+ "establishment",
+ "premise",
+ "subpremise",
+ "tourist_attraction",
+ "park",
+ "airport",
+ "shopping_mall",
+ "university",
+ "school",
+ "hospital",
+ "street_address",
+ "route",
+ )
+ for idx, type_name in enumerate(priority):
+ if type_name in types:
+ return len(priority) - idx
+ return 0
+
+
+def _fetch_google_nearby_place_name(lat, lon, api_key):
+ url = "https://places.googleapis.com/v1/places:searchNearby"
+ headers = {
+ 'Content-Type': 'application/json',
+ 'X-Goog-Api-Key': api_key,
+ 'X-Goog-FieldMask': 'places.displayName.text,places.formattedAddress,places.types',
+ }
+ payload = {
+ "maxResultCount": 6,
+ "rankPreference": "DISTANCE",
+ "locationRestriction": {
+ "circle": {
+ "center": {
+ "latitude": float(lat),
+ "longitude": float(lon),
+ },
+ "radius": 45.0,
+ }
+ },
+ }
+
+ try:
+ response = requests.post(url, headers=headers, json=payload, timeout=(2, 5))
+ response.raise_for_status()
+ places = (response.json() or {}).get("places", [])
+ except requests.exceptions.RequestException:
+ return None
+
+ candidates = [((place.get("displayName") or {}).get("text")) for place in places]
+ return _first_preferred_location_name(candidates, allow_address_fallback=False)
+
+
+def _extract_google_location_name(results, nearby_place_name=None):
+ preferred_nearby = _first_preferred_location_name([nearby_place_name], allow_address_fallback=False)
+ if preferred_nearby:
+ return preferred_nearby
+
+ scored_candidates = []
+ for result in results or []:
+ score = _score_google_result_types(result.get("types", []))
+ if score <= 0:
+ continue
+ component_name = _extract_google_component_name(result.get("address_components", []))
+ name_candidate = _first_preferred_location_name([component_name], allow_address_fallback=False)
+ if name_candidate:
+ scored_candidates.append((score, name_candidate))
+
+ if scored_candidates:
+ scored_candidates.sort(key=lambda item: item[0], reverse=True)
+ return scored_candidates[0][1]
+
+ component_candidates = [
+ _extract_google_component_name(result.get("address_components", []))
+ for result in (results or [])
+ ]
+ component_pick = _first_preferred_location_name(component_candidates, allow_address_fallback=False)
+ if component_pick:
+ return component_pick
+
+ formatted_candidates = [result.get("formatted_address") for result in (results or [])]
+ return _first_preferred_location_name(formatted_candidates, allow_address_fallback=True)
+
+
+def _extract_osm_location_name(data):
+ address = data.get("address", {}) or {}
+ namedetails = data.get("namedetails", {}) or {}
+ extratags = data.get("extratags", {}) or {}
+
+ candidates = [
+ data.get("name"),
+ namedetails.get("name"),
+ namedetails.get("official_name"),
+ namedetails.get("short_name"),
+ namedetails.get("brand"),
+ namedetails.get("loc_name"),
+ address.get("amenity"),
+ address.get("tourism"),
+ address.get("attraction"),
+ address.get("building"),
+ address.get("shop"),
+ address.get("leisure"),
+ address.get("historic"),
+ address.get("man_made"),
+ address.get("office"),
+ address.get("aeroway"),
+ address.get("railway"),
+ address.get("public_transport"),
+ address.get("craft"),
+ address.get("house_name"),
+ extratags.get("name"),
+ extratags.get("official_name"),
+ extratags.get("brand"),
+ extratags.get("operator"),
+ ]
+
+ preferred = _first_preferred_location_name(candidates, allow_address_fallback=False)
+ if preferred:
+ return preferred
+
+ return _first_preferred_location_name(
+ [data.get("name"), data.get("display_name")],
+ allow_address_fallback=True,
+ )
+
# -----------------
# REVERSE GEOCODING
# -----------------
@@ -186,10 +562,7 @@ def extractIsoCode(user, data):
country_code = None
city = None
visited_city = None
- location_name = None
-
- if 'name' in data.keys():
- location_name = data['name']
+ location_name = _clean_location_candidate(data.get('location_name') or data.get('name'))
address = data.get('address', {}) or {}
@@ -369,7 +742,10 @@ def reverse_geocode(lat, lon, user):
return reverse_geocode_osm(lat, lon, user)
def reverse_geocode_osm(lat, lon, user):
- url = f"https://nominatim.openstreetmap.org/reverse?format=jsonv2&lat={lat}&lon={lon}"
+ url = (
+ "https://nominatim.openstreetmap.org/reverse"
+ f"?format=jsonv2&addressdetails=1&namedetails=1&extratags=1&zoom=18&lat={lat}&lon={lon}"
+ )
headers = {'User-Agent': 'AdventureLog Server'}
connect_timeout = 1
read_timeout = 5
@@ -381,6 +757,7 @@ def reverse_geocode_osm(lat, lon, user):
response = requests.get(url, headers=headers, timeout=(connect_timeout, read_timeout))
response.raise_for_status()
data = response.json()
+ data["location_name"] = _extract_osm_location_name(data)
return extractIsoCode(user, data)
except requests.exceptions.Timeout:
return {"error": "Request timed out while contacting OpenStreetMap. Please try again."}
@@ -424,11 +801,23 @@ def reverse_geocode_google(lat, lon, user):
else:
return {"error": "Geocoding failed. Please try again."}
+ results = data.get("results", [])
+ if not results:
+ return {"error": "No location found for the given coordinates."}
+
+ nearby_place_name = _fetch_google_nearby_place_name(lat, lon, api_key)
+ location_name = _extract_google_location_name(results, nearby_place_name=nearby_place_name)
+
# Convert Google schema to Nominatim-style for extractIsoCode
- first_result = data.get("results", [])[0]
+ first_result = results[0]
+ address_result = next(
+ (result for result in results if "plus_code" not in result.get("types", [])),
+ first_result,
+ )
result_data = {
"name": first_result.get("formatted_address"),
- "address": _parse_google_address_components(first_result.get("address_components", []))
+ "location_name": location_name,
+ "address": _parse_google_address_components(address_result.get("address_components", [])),
}
return extractIsoCode(user, result_data)
except requests.exceptions.Timeout:
diff --git a/backend/server/adventures/serializers.py b/backend/server/adventures/serializers.py
index 5b0115e8..3d0ac204 100644
--- a/backend/server/adventures/serializers.py
+++ b/backend/server/adventures/serializers.py
@@ -1060,6 +1060,7 @@ class CollectionItineraryDaySerializer(CustomModelSerializer):
return super().update(instance, validated_data)
class CollectionItineraryItemSerializer(CustomModelSerializer):
+ date = serializers.DateField(required=False, allow_null=True, default=None)
item = serializers.SerializerMethodField()
start_datetime = serializers.ReadOnlyField()
end_datetime = serializers.ReadOnlyField()
@@ -1069,6 +1070,33 @@ class CollectionItineraryItemSerializer(CustomModelSerializer):
model = CollectionItineraryItem
fields = ['id', 'collection', 'content_type', 'object_id', 'item', 'date', 'is_global', 'order', 'start_datetime', 'end_datetime', 'created_at', 'object_name']
read_only_fields = ['id', 'created_at', 'start_datetime', 'end_datetime', 'item', 'object_name']
+
+ def validate(self, attrs):
+ data = super().validate(attrs)
+
+ is_global = data.get('is_global')
+ if is_global is None and self.instance is not None:
+ is_global = self.instance.is_global
+
+ if 'date' in data:
+ date = data.get('date')
+ elif self.instance is not None:
+ date = self.instance.date
+ else:
+ date = None
+
+ if is_global and date is not None:
+ raise serializers.ValidationError({
+ 'date': 'Global items must not have a date.',
+ 'is_global': 'Provide either a date or set is_global, not both.',
+ })
+
+ if not is_global and date is None and self.instance is None:
+ raise serializers.ValidationError({
+ 'date': 'Dated items must include a date. To create a trip-wide item, set is_global=true.',
+ })
+
+ return data
def update(self, instance, validated_data):
# Security: Prevent changing collection, content_type, or object_id after creation
diff --git a/backend/server/adventures/tests.py b/backend/server/adventures/tests.py
index 7ce503c2..3041c9d2 100644
--- a/backend/server/adventures/tests.py
+++ b/backend/server/adventures/tests.py
@@ -1,3 +1,57 @@
-from django.test import TestCase
+from rest_framework.test import APITestCase
-# Create your tests here.
+from adventures.models import Collection, CollectionItineraryItem, Location
+from users.models import CustomUser
+
+
+class ItineraryAPITestCase(APITestCase):
+ def setUp(self):
+ self.user = CustomUser.objects.create_user(
+ username='itinerary-user',
+ email='itinerary-user@example.com',
+ password='testpassword123',
+ )
+ self.collection = Collection.objects.create(user=self.user, name='Test Trip')
+ self.location = Location.objects.create(user=self.user, name='Test Location', is_public=True)
+ self.client.force_authenticate(user=self.user)
+
+ def test_create_global_itinerary_item_without_date(self):
+ response = self.client.post(
+ '/api/itineraries/',
+ {
+ 'collection': str(self.collection.id),
+ 'content_type': 'location',
+ 'object_id': str(self.location.id),
+ 'is_global': True,
+ 'order': 0,
+ },
+ format='json',
+ )
+
+ self.assertEqual(response.status_code, 201)
+ self.assertEqual(CollectionItineraryItem.objects.count(), 1)
+
+ item = CollectionItineraryItem.objects.get()
+ self.assertTrue(item.is_global)
+ self.assertIsNone(item.date)
+ self.assertEqual(item.collection, self.collection)
+
+ payload = response.json()
+ self.assertTrue(payload['is_global'])
+ self.assertIsNone(payload['date'])
+
+ def test_create_dated_itinerary_item_without_date_is_rejected(self):
+ response = self.client.post(
+ '/api/itineraries/',
+ {
+ 'collection': str(self.collection.id),
+ 'content_type': 'location',
+ 'object_id': str(self.location.id),
+ 'is_global': False,
+ 'order': 0,
+ },
+ format='json',
+ )
+
+ self.assertEqual(response.status_code, 400)
+ self.assertEqual(response.json()['date'][0], 'Dated items must include a date. To create a trip-wide item, set is_global=true.')
diff --git a/backend/server/adventures/views/import_export_view.py b/backend/server/adventures/views/import_export_view.py
index 39dc303c..6270d018 100644
--- a/backend/server/adventures/views/import_export_view.py
+++ b/backend/server/adventures/views/import_export_view.py
@@ -100,6 +100,103 @@ class BackupViewSet(viewsets.ViewSet):
normalized_currency = default_currency
return amount, normalized_currency
+
+ def _serialize_images(self, images_qs):
+ """Serialize ContentImage queryset into backup-safe dicts."""
+ serialized = []
+ for image in images_qs.all():
+ entry = {
+ 'immich_id': image.immich_id,
+ 'is_primary': image.is_primary,
+ 'filename': None,
+ }
+ if image.image:
+ entry['filename'] = image.image.name.split('/')[-1]
+ serialized.append(entry)
+ return serialized
+
+ def _serialize_attachments(self, attachments_qs):
+ """Serialize ContentAttachment queryset into backup-safe dicts."""
+ serialized = []
+ for attachment in attachments_qs.all():
+ entry = {
+ 'name': attachment.name,
+ 'filename': None,
+ }
+ if attachment.file:
+ entry['filename'] = attachment.file.name.split('/')[-1]
+ serialized.append(entry)
+ return serialized
+
+ def _add_storage_file_to_zip(self, zip_file, storage_name, arcname, files_added):
+ """Read a Django storage file and add it to the zip once."""
+ if not storage_name or storage_name in files_added:
+ return
+
+ with default_storage.open(storage_name) as storage_file:
+ zip_file.writestr(arcname, storage_file.read())
+ files_added.add(storage_name)
+
+ def _import_images(self, images_data, zip_file, user, content_type, object_id, summary):
+ created = []
+ for img_data in images_data or []:
+ immich_id = (img_data or {}).get('immich_id')
+ if immich_id:
+ created.append(
+ ContentImage.objects.create(
+ user=user,
+ immich_id=immich_id,
+ is_primary=(img_data or {}).get('is_primary', False),
+ content_type=content_type,
+ object_id=object_id,
+ )
+ )
+ summary['images'] += 1
+ continue
+
+ filename = (img_data or {}).get('filename')
+ if not filename:
+ continue
+
+ try:
+ img_content = zip_file.read(f'images/{filename}')
+ except KeyError:
+ continue
+
+ img_file = ContentFile(img_content, name=filename)
+ created.append(
+ ContentImage.objects.create(
+ user=user,
+ image=img_file,
+ is_primary=(img_data or {}).get('is_primary', False),
+ content_type=content_type,
+ object_id=object_id,
+ )
+ )
+ summary['images'] += 1
+
+ return created
+
+ def _import_attachments(self, attachments_data, zip_file, user, content_type, object_id, summary):
+ for att_data in attachments_data or []:
+ filename = (att_data or {}).get('filename')
+ if not filename:
+ continue
+
+ try:
+ att_content = zip_file.read(f'attachments/{filename}')
+ except KeyError:
+ continue
+
+ att_file = ContentFile(att_content, name=filename)
+ ContentAttachment.objects.create(
+ user=user,
+ file=att_file,
+ name=(att_data or {}).get('name'),
+ content_type=content_type,
+ object_id=object_id,
+ )
+ summary['attachments'] += 1
@action(detail=False, methods=['get'])
def export(self, request):
@@ -148,9 +245,11 @@ class BackupViewSet(viewsets.ViewSet):
# Track images so we can reference them for collection primary images
image_export_map = {}
+ collection_id_to_export_id = {}
# Export Collections
for idx, collection in enumerate(user.collection_set.all()):
+ collection_id_to_export_id[collection.id] = idx
export_data['collections'].append({
'export_id': idx, # Add unique identifier for this export
'name': collection.name,
@@ -200,7 +299,9 @@ class BackupViewSet(viewsets.ViewSet):
'end_date': visit.end_date.isoformat() if visit.end_date else None,
'timezone': visit.timezone,
'notes': visit.notes,
- 'activities': []
+ 'activities': [],
+ 'images': [],
+ 'attachments': [],
}
# Add activities for this visit
@@ -239,6 +340,20 @@ class BackupViewSet(viewsets.ViewSet):
visit_data['activities'].append(activity_data)
location_data['visits'].append(visit_data)
+
+ # Add visit images/attachments (generic)
+ visit_data['images'] = self._serialize_images(visit.images)
+ visit_data['attachments'] = self._serialize_attachments(visit.attachments)
+
+ for image_index, image in enumerate(visit.images.all()):
+ image_export_map[image.id] = {
+ 'content_type': 'visit',
+ 'location_export_id': idx,
+ 'visit_export_id': visit_idx,
+ 'image_index': image_index,
+ 'immich_id': image.immich_id,
+ 'filename': image.image.name.split('/')[-1] if image.image else None,
+ }
# Add trails for this location
for trail in location.trails.all():
@@ -251,48 +366,28 @@ class BackupViewSet(viewsets.ViewSet):
location_data['trails'].append(trail_data)
# Add images
+ location_data['images'] = self._serialize_images(location.images)
for image_index, image in enumerate(location.images.all()):
- image_data = {
- 'immich_id': image.immich_id,
- 'is_primary': image.is_primary,
- 'filename': None,
- }
- if image.image:
- image_data['filename'] = image.image.name.split('/')[-1]
- location_data['images'].append(image_data)
-
image_export_map[image.id] = {
+ 'content_type': 'location',
'location_export_id': idx,
'image_index': image_index,
'immich_id': image.immich_id,
- 'filename': image_data['filename'],
+ 'filename': image.image.name.split('/')[-1] if image.image else None,
}
# Add attachments
- for attachment in location.attachments.all():
- attachment_data = {
- 'name': attachment.name,
- 'filename': None
- }
- if attachment.file:
- attachment_data['filename'] = attachment.file.name.split('/')[-1]
- location_data['attachments'].append(attachment_data)
+ location_data['attachments'] = self._serialize_attachments(location.attachments)
export_data['locations'].append(location_data)
- # Attach collection primary image references (if any)
- for idx, collection in enumerate(user.collection_set.all()):
- primary = collection.primary_image
- if primary and primary.id in image_export_map:
- export_data['collections'][idx]['primary_image'] = image_export_map[primary.id]
-
# Export Transportation
for idx, transport in enumerate(user.transportation_set.all()):
collection_export_id = None
if transport.collection:
collection_export_id = collection_name_to_id.get(transport.collection.name)
- export_data['transportation'].append({
+ transport_data = {
'export_id': idx,
'type': transport.type,
'name': transport.name,
@@ -313,8 +408,20 @@ class BackupViewSet(viewsets.ViewSet):
'destination_longitude': str(transport.destination_longitude) if transport.destination_longitude else None,
'to_location': transport.to_location,
'is_public': transport.is_public,
- 'collection_export_id': collection_export_id
- })
+ 'collection_export_id': collection_export_id,
+ 'images': self._serialize_images(transport.images),
+ 'attachments': self._serialize_attachments(transport.attachments),
+ }
+ export_data['transportation'].append(transport_data)
+
+ for image_index, image in enumerate(transport.images.all()):
+ image_export_map[image.id] = {
+ 'content_type': 'transportation',
+ 'object_export_id': idx,
+ 'image_index': image_index,
+ 'immich_id': image.immich_id,
+ 'filename': image.image.name.split('/')[-1] if image.image else None,
+ }
# Export Notes
for idx, note in enumerate(user.note_set.all()):
@@ -322,15 +429,27 @@ class BackupViewSet(viewsets.ViewSet):
if note.collection:
collection_export_id = collection_name_to_id.get(note.collection.name)
- export_data['notes'].append({
+ note_data = {
'export_id': idx,
'name': note.name,
'content': note.content,
'links': note.links,
'date': note.date.isoformat() if note.date else None,
'is_public': note.is_public,
- 'collection_export_id': collection_export_id
- })
+ 'collection_export_id': collection_export_id,
+ 'images': self._serialize_images(note.images),
+ 'attachments': self._serialize_attachments(note.attachments),
+ }
+ export_data['notes'].append(note_data)
+
+ for image_index, image in enumerate(note.images.all()):
+ image_export_map[image.id] = {
+ 'content_type': 'note',
+ 'object_export_id': idx,
+ 'image_index': image_index,
+ 'immich_id': image.immich_id,
+ 'filename': image.image.name.split('/')[-1] if image.image else None,
+ }
# Export Checklists
for idx, checklist in enumerate(user.checklist_set.all()):
@@ -362,7 +481,7 @@ class BackupViewSet(viewsets.ViewSet):
if lodging.collection:
collection_export_id = collection_name_to_id.get(lodging.collection.name)
- export_data['lodging'].append({
+ lodging_data = {
'export_id': idx,
'name': lodging.name,
'type': lodging.type,
@@ -379,8 +498,30 @@ class BackupViewSet(viewsets.ViewSet):
'longitude': str(lodging.longitude) if lodging.longitude else None,
'location': lodging.location,
'is_public': lodging.is_public,
- 'collection_export_id': collection_export_id
- })
+ 'collection_export_id': collection_export_id,
+ 'images': self._serialize_images(lodging.images),
+ 'attachments': self._serialize_attachments(lodging.attachments),
+ }
+ export_data['lodging'].append(lodging_data)
+
+ for image_index, image in enumerate(lodging.images.all()):
+ image_export_map[image.id] = {
+ 'content_type': 'lodging',
+ 'object_export_id': idx,
+ 'image_index': image_index,
+ 'immich_id': image.immich_id,
+ 'filename': image.image.name.split('/')[-1] if image.image else None,
+ }
+
+ # Attach collection primary image references (if any)
+ for collection in user.collection_set.all():
+ export_id = collection_id_to_export_id.get(collection.id)
+ if export_id is None:
+ continue
+
+ primary = collection.primary_image
+ if primary and primary.id in image_export_map:
+ export_data['collections'][export_id]['primary_image'] = image_export_map[primary.id]
# Export Itinerary Items
# Create export_id mappings for all content types
@@ -431,35 +572,153 @@ class BackupViewSet(viewsets.ViewSet):
for image in location.images.all():
if image.image and image.image.name not in files_added:
try:
- image_content = default_storage.open(image.image.name).read()
filename = image.image.name.split('/')[-1]
- zip_file.writestr(f'images/{filename}', image_content)
- files_added.add(image.image.name)
+ self._add_storage_file_to_zip(
+ zip_file,
+ image.image.name,
+ f'images/{filename}',
+ files_added,
+ )
except Exception as e:
print(f"Error adding image {image.image.name}: {e}")
+
+ # Add visit images
+ for visit in location.visits.all():
+ for image in visit.images.all():
+ if image.image and image.image.name not in files_added:
+ try:
+ filename = image.image.name.split('/')[-1]
+ self._add_storage_file_to_zip(
+ zip_file,
+ image.image.name,
+ f'images/{filename}',
+ files_added,
+ )
+ except Exception as e:
+ print(f"Error adding visit image {image.image.name}: {e}")
# Add attachments
for attachment in location.attachments.all():
if attachment.file and attachment.file.name not in files_added:
try:
- file_content = default_storage.open(attachment.file.name).read()
filename = attachment.file.name.split('/')[-1]
- zip_file.writestr(f'attachments/{filename}', file_content)
- files_added.add(attachment.file.name)
+ self._add_storage_file_to_zip(
+ zip_file,
+ attachment.file.name,
+ f'attachments/{filename}',
+ files_added,
+ )
except Exception as e:
print(f"Error adding attachment {attachment.file.name}: {e}")
+
+ # Add visit attachments
+ for visit in location.visits.all():
+ for attachment in visit.attachments.all():
+ if attachment.file and attachment.file.name not in files_added:
+ try:
+ filename = attachment.file.name.split('/')[-1]
+ self._add_storage_file_to_zip(
+ zip_file,
+ attachment.file.name,
+ f'attachments/{filename}',
+ files_added,
+ )
+ except Exception as e:
+ print(f"Error adding visit attachment {attachment.file.name}: {e}")
# Add GPX files from activities
for visit in location.visits.all():
for activity in visit.activities.all():
if activity.gpx_file and activity.gpx_file.name not in files_added:
try:
- gpx_content = default_storage.open(activity.gpx_file.name).read()
filename = activity.gpx_file.name.split('/')[-1]
- zip_file.writestr(f'gpx/{filename}', gpx_content)
- files_added.add(activity.gpx_file.name)
+ self._add_storage_file_to_zip(
+ zip_file,
+ activity.gpx_file.name,
+ f'gpx/{filename}',
+ files_added,
+ )
except Exception as e:
print(f"Error adding GPX file {activity.gpx_file.name}: {e}")
+
+ # Add non-location content images/attachments
+ for transport in user.transportation_set.all():
+ for image in transport.images.all():
+ if image.image and image.image.name not in files_added:
+ try:
+ filename = image.image.name.split('/')[-1]
+ self._add_storage_file_to_zip(
+ zip_file,
+ image.image.name,
+ f'images/{filename}',
+ files_added,
+ )
+ except Exception as e:
+ print(f"Error adding transportation image {image.image.name}: {e}")
+ for attachment in transport.attachments.all():
+ if attachment.file and attachment.file.name not in files_added:
+ try:
+ filename = attachment.file.name.split('/')[-1]
+ self._add_storage_file_to_zip(
+ zip_file,
+ attachment.file.name,
+ f'attachments/{filename}',
+ files_added,
+ )
+ except Exception as e:
+ print(f"Error adding transportation attachment {attachment.file.name}: {e}")
+
+ for note in user.note_set.all():
+ for image in note.images.all():
+ if image.image and image.image.name not in files_added:
+ try:
+ filename = image.image.name.split('/')[-1]
+ self._add_storage_file_to_zip(
+ zip_file,
+ image.image.name,
+ f'images/{filename}',
+ files_added,
+ )
+ except Exception as e:
+ print(f"Error adding note image {image.image.name}: {e}")
+ for attachment in note.attachments.all():
+ if attachment.file and attachment.file.name not in files_added:
+ try:
+ filename = attachment.file.name.split('/')[-1]
+ self._add_storage_file_to_zip(
+ zip_file,
+ attachment.file.name,
+ f'attachments/{filename}',
+ files_added,
+ )
+ except Exception as e:
+ print(f"Error adding note attachment {attachment.file.name}: {e}")
+
+ for lodging in user.lodging_set.all():
+ for image in lodging.images.all():
+ if image.image and image.image.name not in files_added:
+ try:
+ filename = image.image.name.split('/')[-1]
+ self._add_storage_file_to_zip(
+ zip_file,
+ image.image.name,
+ f'images/{filename}',
+ files_added,
+ )
+ except Exception as e:
+ print(f"Error adding lodging image {image.image.name}: {e}")
+ for attachment in lodging.attachments.all():
+ if attachment.file and attachment.file.name not in files_added:
+ try:
+ filename = attachment.file.name.split('/')[-1]
+ self._add_storage_file_to_zip(
+ zip_file,
+ attachment.file.name,
+ f'attachments/{filename}',
+ files_added,
+ )
+ except Exception as e:
+ print(f"Error adding lodging attachment {attachment.file.name}: {e}")
# Return ZIP file as response
with open(tmp_file.name, 'rb') as zip_file:
@@ -611,6 +870,16 @@ class BackupViewSet(viewsets.ViewSet):
pending_primary_images = []
location_images_map = {}
+ visit_images_map = {}
+ transportation_images_map = {}
+ note_images_map = {}
+ lodging_images_map = {}
+
+ content_type_location = ContentType.objects.get(model='location')
+ content_type_visit = ContentType.objects.get(model='visit')
+ content_type_transportation = ContentType.objects.get(model='transportation')
+ content_type_note = ContentType.objects.get(model='note')
+ content_type_lodging = ContentType.objects.get(model='lodging')
# Import Collections
for col_data in backup_data.get('collections', []):
@@ -721,6 +990,10 @@ class BackupViewSet(viewsets.ViewSet):
timezone=visit_data.get('timezone'),
notes=visit_data.get('notes')
)
+
+ visit_export_id = visit_data.get('export_id')
+ if visit_export_id is not None:
+ visit_images_map.setdefault((adv_data['export_id'], visit_export_id), [])
# Import activities for this visit
for activity_data in visit_data.get('activities', []):
@@ -783,77 +1056,50 @@ class BackupViewSet(viewsets.ViewSet):
activity.save()
summary['activities'] += 1
+
+ # Import visit images/attachments (if present)
+ created_visit_images = self._import_images(
+ visit_data.get('images', []),
+ zip_file,
+ user,
+ content_type_visit,
+ visit.id,
+ summary,
+ )
+ if visit_export_id is not None:
+ visit_images_map[(adv_data['export_id'], visit_export_id)].extend(created_visit_images)
+
+ self._import_attachments(
+ visit_data.get('attachments', []),
+ zip_file,
+ user,
+ content_type_visit,
+ visit.id,
+ summary,
+ )
# Import images
- content_type = ContentType.objects.get(model='location')
+ created_location_images = self._import_images(
+ adv_data.get('images', []),
+ zip_file,
+ user,
+ content_type_location,
+ location.id,
+ summary,
+ )
+ location_images_map[adv_data['export_id']].extend(created_location_images)
- for img_data in adv_data.get('images', []):
- immich_id = img_data.get('immich_id')
- if immich_id:
- new_img = ContentImage.objects.create(
- user=user,
- immich_id=immich_id,
- is_primary=img_data.get('is_primary', False),
- content_type=content_type,
- object_id=location.id
- )
- location_images_map[adv_data['export_id']].append(new_img)
- summary['images'] += 1
- else:
- filename = img_data.get('filename')
- if filename:
- try:
- img_content = zip_file.read(f'images/{filename}')
- img_file = ContentFile(img_content, name=filename)
- new_img = ContentImage.objects.create(
- user=user,
- image=img_file,
- is_primary=img_data.get('is_primary', False),
- content_type=content_type,
- object_id=location.id
- )
- location_images_map[adv_data['export_id']].append(new_img)
- summary['images'] += 1
- except KeyError:
- pass
-
- # Import attachments
- for att_data in adv_data.get('attachments', []):
- filename = att_data.get('filename')
- if filename:
- try:
- att_content = zip_file.read(f'attachments/{filename}')
- att_file = ContentFile(att_content, name=filename)
- ContentAttachment.objects.create(
- user=user,
- file=att_file,
- name=att_data.get('name'),
- content_type=content_type,
- object_id=location.id
- )
- summary['attachments'] += 1
- except KeyError:
- pass
+ self._import_attachments(
+ adv_data.get('attachments', []),
+ zip_file,
+ user,
+ content_type_location,
+ location.id,
+ summary,
+ )
summary['locations'] += 1
- # Apply primary image selections now that images exist
- for entry in pending_primary_images:
- collection = collection_map.get(entry['collection_export_id'])
- data = entry.get('data', {}) or {}
- if not collection:
- continue
-
- loc_export_id = data.get('location_export_id')
- img_index = data.get('image_index')
- if loc_export_id is None or img_index is None:
- continue
-
- images_for_location = location_images_map.get(loc_export_id, [])
- if 0 <= img_index < len(images_for_location):
- collection.primary_image = images_for_location[img_index]
- collection.save(update_fields=['primary_image'])
-
# Import Transportation
transportation_map = {} # Map export_id to actual transportation object
for trans_data in backup_data.get('transportation', []):
@@ -889,6 +1135,28 @@ class BackupViewSet(viewsets.ViewSet):
is_public=trans_data.get('is_public', False),
collection=collection
)
+
+ export_id = trans_data.get('export_id')
+ if export_id is not None:
+ transportation_images_map.setdefault(export_id, [])
+ transportation_images_map[export_id].extend(
+ self._import_images(
+ trans_data.get('images', []),
+ zip_file,
+ user,
+ content_type_transportation,
+ transportation.id,
+ summary,
+ )
+ )
+ self._import_attachments(
+ trans_data.get('attachments', []),
+ zip_file,
+ user,
+ content_type_transportation,
+ transportation.id,
+ summary,
+ )
# Only add to map if export_id exists (for backward compatibility with old backups)
if 'export_id' in trans_data:
transportation_map[trans_data['export_id']] = transportation
@@ -910,6 +1178,28 @@ class BackupViewSet(viewsets.ViewSet):
is_public=note_data.get('is_public', False),
collection=collection
)
+
+ export_id = note_data.get('export_id')
+ if export_id is not None:
+ note_images_map.setdefault(export_id, [])
+ note_images_map[export_id].extend(
+ self._import_images(
+ note_data.get('images', []),
+ zip_file,
+ user,
+ content_type_note,
+ note.id,
+ summary,
+ )
+ )
+ self._import_attachments(
+ note_data.get('attachments', []),
+ zip_file,
+ user,
+ content_type_note,
+ note.id,
+ summary,
+ )
# Only add to map if export_id exists (for backward compatibility with old backups)
if 'export_id' in note_data:
note_map[note_data['export_id']] = note
@@ -976,10 +1266,77 @@ class BackupViewSet(viewsets.ViewSet):
is_public=lodg_data.get('is_public', False),
collection=collection
)
+
+ export_id = lodg_data.get('export_id')
+ if export_id is not None:
+ lodging_images_map.setdefault(export_id, [])
+ lodging_images_map[export_id].extend(
+ self._import_images(
+ lodg_data.get('images', []),
+ zip_file,
+ user,
+ content_type_lodging,
+ lodging.id,
+ summary,
+ )
+ )
+ self._import_attachments(
+ lodg_data.get('attachments', []),
+ zip_file,
+ user,
+ content_type_lodging,
+ lodging.id,
+ summary,
+ )
# Only add to map if export_id exists (for backward compatibility with old backups)
if 'export_id' in lodg_data:
lodging_map[lodg_data['export_id']] = lodging
summary['lodging'] += 1
+
+ # Apply primary image selections now that images exist
+ for entry in pending_primary_images:
+ collection = collection_map.get(entry['collection_export_id'])
+ data = entry.get('data', {}) or {}
+ if not collection:
+ continue
+
+ content_type_str = data.get('content_type') or 'location'
+ img_index = data.get('image_index')
+ if img_index is None:
+ continue
+
+ if content_type_str == 'location':
+ loc_export_id = data.get('location_export_id')
+ if loc_export_id is None:
+ continue
+ images_for_object = location_images_map.get(loc_export_id, [])
+ elif content_type_str == 'visit':
+ loc_export_id = data.get('location_export_id')
+ visit_export_id = data.get('visit_export_id')
+ if loc_export_id is None or visit_export_id is None:
+ continue
+ images_for_object = visit_images_map.get((loc_export_id, visit_export_id), [])
+ elif content_type_str == 'transportation':
+ obj_export_id = data.get('object_export_id')
+ if obj_export_id is None:
+ continue
+ images_for_object = transportation_images_map.get(obj_export_id, [])
+ elif content_type_str == 'note':
+ obj_export_id = data.get('object_export_id')
+ if obj_export_id is None:
+ continue
+ images_for_object = note_images_map.get(obj_export_id, [])
+ elif content_type_str == 'lodging':
+ obj_export_id = data.get('object_export_id')
+ if obj_export_id is None:
+ continue
+ images_for_object = lodging_images_map.get(obj_export_id, [])
+ else:
+ continue
+
+ if 0 <= img_index < len(images_for_object):
+ collection.primary_image = images_for_object[img_index]
+ collection.save(update_fields=['primary_image'])
# Import Itinerary Items
# Maps already created during import of each content type
diff --git a/backend/server/adventures/views/itinerary_view.py b/backend/server/adventures/views/itinerary_view.py
index f6ddae34..318d88b3 100644
--- a/backend/server/adventures/views/itinerary_view.py
+++ b/backend/server/adventures/views/itinerary_view.py
@@ -54,6 +54,8 @@ class ItineraryViewSet(viewsets.ModelViewSet):
if isinstance(is_global, str):
is_global = is_global.lower() in ['1', 'true', 'yes']
data['is_global'] = is_global
+ if is_global and not target_date:
+ data['date'] = None
# Support legacy field 'location' -> treat as content_type='location'
if not content_type_val and data.get('location'):
diff --git a/backend/server/adventures/views/location_image_view.py b/backend/server/adventures/views/location_image_view.py
index d1a9c4b0..27a5d177 100644
--- a/backend/server/adventures/views/location_image_view.py
+++ b/backend/server/adventures/views/location_image_view.py
@@ -4,8 +4,11 @@ from rest_framework.response import Response
from rest_framework.permissions import IsAuthenticated
from rest_framework.throttling import UserRateThrottle
from django.http import HttpResponse
+from concurrent.futures import ThreadPoolExecutor, as_completed
import ipaddress
+import mimetypes
import socket
+from urllib.parse import urljoin
from urllib.parse import urlparse
from django.db.models import Q
from django.core.files.base import ContentFile
@@ -17,6 +20,7 @@ from adventures.permissions import IsOwnerOrSharedWithFullAccess # Your existin
import requests
from adventures.permissions import ContentImagePermission
import logging
+import uuid
logger = logging.getLogger(__name__)
@@ -25,6 +29,17 @@ class ImageProxyThrottle(UserRateThrottle):
scope = 'image_proxy'
+def _public_import_error_message(exc):
+ """Return a safe, user-facing import error without exposing internal details."""
+ if isinstance(exc, ValueError):
+ return "Invalid image URL"
+ if isinstance(exc, requests.exceptions.Timeout):
+ return "Download timeout"
+ if isinstance(exc, requests.exceptions.RequestException):
+ return "Failed to fetch image from the remote server"
+ return "Image import failed"
+
+
def _is_safe_url(image_url):
"""
Validate a URL for safe proxy use.
@@ -67,6 +82,149 @@ def _is_safe_url(image_url):
return True, parsed
+def download_remote_image(image_url):
+ safe, result = _is_safe_url(image_url)
+ if not safe:
+ raise ValueError(result)
+
+ headers = {'User-Agent': 'AdventureLog/1.0 (Image Import)'}
+ max_redirects = 3
+ current_url = image_url
+
+ response = None
+ for _ in range(max_redirects + 1):
+ response = requests.get(
+ current_url,
+ timeout=10,
+ headers=headers,
+ stream=True,
+ allow_redirects=False,
+ )
+
+ if not response.is_redirect:
+ break
+
+ redirect_url = response.headers.get('Location', '')
+ if not redirect_url:
+ raise ValueError('Redirect with missing Location header')
+
+ # Handle relative redirects safely.
+ redirect_url = urljoin(current_url, redirect_url)
+
+ safe, result = _is_safe_url(redirect_url)
+ if not safe:
+ raise ValueError(f'Redirect blocked: {result}')
+
+ current_url = redirect_url
+ else:
+ raise ValueError('Too many redirects')
+
+ if response is None:
+ raise ValueError('Failed to fetch image')
+
+ response.raise_for_status()
+
+ content_type = response.headers.get('Content-Type', '').split(';')[0].strip().lower()
+ if not content_type.startswith('image/'):
+ raise ValueError('URL does not point to an image')
+
+ content_length = response.headers.get('Content-Length')
+ if content_length and int(content_length) > 20 * 1024 * 1024:
+ raise ValueError('Image too large (max 20MB)')
+
+ ext = mimetypes.guess_extension(content_type) or '.jpg'
+ if ext == '.jpe':
+ ext = '.jpg'
+
+ return {
+ 'filename': f"remote_{uuid.uuid4().hex}{ext}",
+ 'content': response.content,
+ 'content_type': content_type,
+ 'source_url': image_url,
+ }
+
+
+def import_remote_images_for_object(content_object, urls, owner=None, max_workers=5):
+ """Download remote URLs and attach them as ContentImage records for a content object."""
+ content_type = ContentType.objects.get_for_model(content_object.__class__)
+ object_id = str(content_object.id)
+ image_owner = owner or getattr(content_object, 'user', None)
+
+ downloaded_results = []
+ worker_count = max(1, min(max_workers, len(urls)))
+
+ with ThreadPoolExecutor(max_workers=worker_count) as executor:
+ futures = {
+ executor.submit(download_remote_image, image_url): (index, image_url)
+ for index, image_url in enumerate(urls)
+ }
+
+ for future in as_completed(futures):
+ index, image_url = futures[future]
+ try:
+ file_data = future.result()
+ downloaded_results.append((index, image_url, file_data, None))
+ except Exception as exc:
+ logger.warning(
+ "Image import failed for URL %s",
+ image_url,
+ exc_info=True,
+ )
+ downloaded_results.append((index, image_url, None, _public_import_error_message(exc)))
+
+ downloaded_results.sort(key=lambda item: item[0])
+
+ existing_image_count = ContentImage.objects.filter(
+ content_type=content_type,
+ object_id=object_id,
+ ).count()
+ set_primary_next = existing_image_count == 0
+
+ created_images = []
+ results = []
+ failed = []
+
+ for _, image_url, file_data, error_message in downloaded_results:
+ if error_message:
+ failure = {
+ 'url': image_url,
+ 'error': error_message,
+ }
+ results.append({
+ **failure,
+ 'status': 'failed',
+ })
+ failed.append(failure)
+ continue
+
+ image_file = ContentFile(file_data['content'], name=file_data['filename'])
+ image = ContentImage.objects.create(
+ user=image_owner,
+ image=image_file,
+ content_type=content_type,
+ object_id=object_id,
+ is_primary=set_primary_next,
+ )
+ if set_primary_next:
+ set_primary_next = False
+
+ created_images.append(image)
+ results.append({
+ 'url': image_url,
+ 'status': 'created',
+ 'id': str(image.id),
+ })
+
+ return {
+ 'created_images': created_images,
+ 'results': results,
+ 'created_count': len(created_images),
+ 'requested_count': len(urls),
+ 'failed_count': len(failed),
+ 'failed': failed,
+ }
+
+
class ContentImageViewSet(viewsets.ModelViewSet):
serializer_class = ContentImageSerializer
permission_classes = [ContentImagePermission]
@@ -192,69 +350,12 @@ class ContentImageViewSet(viewsets.ModelViewSet):
status=status.HTTP_400_BAD_REQUEST
)
- # Validate the initial URL (scheme, port, SSRF check on all resolved IPs)
- safe, result = _is_safe_url(image_url)
- if not safe:
- return Response({"error": result}, status=status.HTTP_400_BAD_REQUEST)
-
try:
- headers = {'User-Agent': 'AdventureLog/1.0 (Image Proxy)'}
- max_redirects = 3
- current_url = image_url
+ image_data = download_remote_image(str(image_url).strip())
+ return HttpResponse(image_data['content'], content_type=image_data['content_type'], status=200)
- for _ in range(max_redirects + 1):
- response = requests.get(
- current_url,
- timeout=10,
- headers=headers,
- stream=True,
- allow_redirects=False,
- )
-
- if not response.is_redirect:
- break
-
- # Re-validate every redirect destination before following
- redirect_url = response.headers.get('Location', '')
- if not redirect_url:
- return Response(
- {"error": "Redirect with missing Location header"},
- status=status.HTTP_502_BAD_GATEWAY,
- )
-
- safe, result = _is_safe_url(redirect_url)
- if not safe:
- return Response(
- {"error": f"Redirect blocked: {result}"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- current_url = redirect_url
- else:
- return Response(
- {"error": "Too many redirects"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- response.raise_for_status()
-
- content_type = response.headers.get('Content-Type', '')
- if not content_type.startswith('image/'):
- return Response(
- {"error": "URL does not point to an image"},
- status=status.HTTP_400_BAD_REQUEST
- )
-
- content_length = response.headers.get('Content-Length')
- if content_length and int(content_length) > 20 * 1024 * 1024:
- return Response(
- {"error": "Image too large (max 20MB)"},
- status=status.HTTP_400_BAD_REQUEST
- )
-
- image_data = response.content
-
- return HttpResponse(image_data, content_type=content_type, status=200)
+ except ValueError:
+ return Response({"error": "Invalid image URL"}, status=status.HTTP_400_BAD_REQUEST)
except requests.exceptions.Timeout:
logger.error("Timeout fetching image from URL %s", image_url)
@@ -269,6 +370,64 @@ class ContentImageViewSet(viewsets.ModelViewSet):
status=status.HTTP_502_BAD_GATEWAY
)
+ @action(detail=False, methods=['post'], permission_classes=[IsAuthenticated])
+ def import_from_urls(self, request):
+ content_type_name = request.data.get('content_type')
+ object_id = request.data.get('object_id')
+ urls = request.data.get('urls')
+
+ if not isinstance(urls, list) or not urls:
+ return Response({"error": "urls must be a non-empty array"}, status=status.HTTP_400_BAD_REQUEST)
+
+ urls = [str(url).strip() for url in urls if str(url).strip()]
+ if not urls:
+ return Response({"error": "No valid URLs provided"}, status=status.HTTP_400_BAD_REQUEST)
+
+ if len(urls) > 10:
+ return Response({"error": "Maximum 10 URLs per request"}, status=status.HTTP_400_BAD_REQUEST)
+
+ content_object = self._get_and_validate_content_object(content_type_name, object_id)
+ if isinstance(content_object, Response):
+ return content_object
+
+ owner = getattr(content_object, 'user', request.user)
+
+ import_summary = import_remote_images_for_object(
+ content_object,
+ urls,
+ owner=owner,
+ max_workers=min(5, len(urls)),
+ )
+
+ created_images = import_summary['created_images']
+ results = import_summary['results']
+
+ if not created_images:
+ return Response(
+ {
+ 'error': 'No images could be imported',
+ 'results': results,
+ },
+ status=status.HTTP_400_BAD_REQUEST,
+ )
+
+ serialized = ContentImageSerializer(created_images, many=True, context={'request': request})
+ response_status = (
+ status.HTTP_201_CREATED
+ if import_summary['created_count'] == import_summary['requested_count']
+ else status.HTTP_200_OK
+ )
+
+ return Response(
+ {
+ 'created': serialized.data,
+ 'results': results,
+ 'created_count': import_summary['created_count'],
+ 'requested_count': import_summary['requested_count'],
+ },
+ status=response_status,
+ )
+
def create(self, request, *args, **kwargs):
# Get content type and object ID from request
content_type_name = request.data.get('content_type')
diff --git a/backend/server/adventures/views/location_view.py b/backend/server/adventures/views/location_view.py
index c9630e18..2f9ae8a6 100644
--- a/backend/server/adventures/views/location_view.py
+++ b/backend/server/adventures/views/location_view.py
@@ -12,8 +12,31 @@ import requests
from adventures.models import Location, Category, Collection, CollectionItineraryItem, ContentImage, Visit
from django.contrib.contenttypes.models import ContentType
from adventures.permissions import IsOwnerOrSharedWithFullAccess
-from adventures.serializers import LocationSerializer, MapPinSerializer, CalendarLocationSerializer
+from adventures.serializers import (
+ CalendarLocationSerializer,
+ CollectionItineraryItemSerializer,
+ LocationSerializer,
+ MapPinSerializer,
+)
from adventures.utils import pagination
+from adventures.geocoding import reverse_geocode
+from worldtravel.models import City, Country, Region
+from .location_image_view import import_remote_images_for_object
+from .quick_add_utils import (
+ build_quick_add_description,
+ clean_url,
+ coerce_bool,
+ coerce_coordinate,
+ coerce_float,
+ coerce_int,
+ create_quick_add_itinerary_item,
+ extract_google_place_details,
+ parse_itinerary_date,
+ preferred_link,
+ resolve_quick_add_collection,
+ sanitize_photo_urls,
+ sanitize_tags,
+)
logger = logging.getLogger(__name__)
@@ -158,6 +181,122 @@ class LocationViewSet(viewsets.ModelViewSet):
# ==================== CUSTOM ACTIONS ====================
+ @action(detail=False, methods=['post'], url_path='quick-add')
+ @transaction.atomic
+ def quick_add(self, request):
+ """Create a location from lightweight map/place input in one server-side call."""
+ payload = request.data if isinstance(request.data, dict) else {}
+
+ name = str(payload.get('name') or '').strip()
+ if not name:
+ return Response({"error": "name is required"}, status=status.HTTP_400_BAD_REQUEST)
+
+ latitude = coerce_coordinate(payload.get('latitude'), -90, 90)
+ longitude = coerce_coordinate(payload.get('longitude'), -180, 180)
+ if latitude is None or longitude is None:
+ return Response(
+ {"error": "Valid latitude and longitude are required"},
+ status=status.HTTP_400_BAD_REQUEST,
+ )
+
+ collection = self._resolve_quick_add_collection(payload.get('collection_id'))
+ if isinstance(collection, Response):
+ return collection
+
+ reverse_data = {}
+ _, details = extract_google_place_details(payload, fallback_query=name)
+
+ try:
+ reverse_result = reverse_geocode(latitude, longitude, request.user)
+ if isinstance(reverse_result, dict) and 'error' not in reverse_result:
+ reverse_data = reverse_result
+ except Exception:
+ reverse_data = {}
+
+ rating = coerce_float(payload.get('rating'))
+ if rating is None:
+ rating = coerce_float(details.get('rating'))
+
+ review_count = coerce_int(payload.get('review_count'))
+ if review_count is None:
+ review_count = coerce_int(details.get('review_count'))
+
+ link = preferred_link(payload, details)
+
+ phone_number = str(details.get('phone_number') or payload.get('phone_number') or '').strip() or None
+
+ location_label = (
+ str(payload.get('location') or '').strip()
+ or str(reverse_data.get('display_name') or '').strip()
+ or str(details.get('formatted_address') or '').strip()
+ or None
+ )
+
+ description = build_quick_add_description(
+ base_description=payload.get('description'),
+ detailed_description=details.get('description'),
+ )
+
+ category_payload = self._normalize_quick_add_category(payload.get('category'))
+ if isinstance(category_payload, Response):
+ return category_payload
+
+ serializer_payload = {
+ 'name': name,
+ 'location': location_label,
+ 'latitude': latitude,
+ 'longitude': longitude,
+ 'rating': rating,
+ 'description': description,
+ 'link': link,
+ 'tags': sanitize_tags(payload.get('types') or payload.get('tags')),
+ 'is_public': coerce_bool(payload.get('is_public'), default=False),
+ }
+
+ if category_payload:
+ serializer_payload['category'] = category_payload
+
+ if collection:
+ serializer_payload['collections'] = [str(collection.id)]
+
+ serializer = self.get_serializer(data=serializer_payload)
+ serializer.is_valid(raise_exception=True)
+ self.perform_create(serializer)
+
+ location = serializer.instance
+ self._apply_reverse_geocode_metadata(location, reverse_data, location_label)
+
+ itinerary_date = parse_itinerary_date(payload.get('itinerary_date'))
+ itinerary_item = None
+ if collection and itinerary_date:
+ itinerary_item = create_quick_add_itinerary_item(collection, location, itinerary_date)
+ if isinstance(itinerary_item, Response):
+ return itinerary_item
+
+ photo_urls = sanitize_photo_urls(payload.get('photos'))
+ image_import_summary = None
+ if photo_urls:
+ image_import_summary = import_remote_images_for_object(
+ location,
+ photo_urls,
+ owner=location.user,
+ max_workers=min(5, len(photo_urls)),
+ )
+
+ response_data = self.get_serializer(location).data
+ if itinerary_item:
+ response_data['quick_add_itinerary_item'] = CollectionItineraryItemSerializer(
+ itinerary_item
+ ).data
+ if image_import_summary and image_import_summary.get('failed'):
+ response_data['quick_add_image_import'] = {
+ 'created_count': image_import_summary['created_count'],
+ 'failed_count': image_import_summary['failed_count'],
+ 'failed': image_import_summary['failed'],
+ }
+
+ return Response(response_data, status=status.HTTP_201_CREATED)
+
@action(detail=False, methods=['get'])
def filtered(self, request):
"""Filter locations by category types and visit status."""
@@ -460,6 +599,122 @@ class LocationViewSet(viewsets.ModelViewSet):
f"You don't have permission to add location to collection '{collection.name}'"
)
+ def _resolve_quick_add_collection(self, collection_id):
+ return resolve_quick_add_collection(
+ collection_id,
+ validate_permissions=self._validate_collection_permissions,
+ permission_error_message=(
+ "You do not have permission to add this location to the selected collection."
+ ),
+ )
+
+ def _coerce_coordinate(self, value, min_value, max_value):
+ return coerce_coordinate(value, min_value, max_value)
+
+ def _coerce_float(self, value):
+ return coerce_float(value)
+
+ def _coerce_int(self, value):
+ return coerce_int(value)
+
+ def _coerce_bool(self, value, default=False):
+ return coerce_bool(value, default=default)
+
+ def _clean_url(self, value):
+ return clean_url(value)
+
+ def _sanitize_tags(self, raw_tags):
+ return sanitize_tags(raw_tags)
+
+ def _sanitize_photo_urls(self, raw_urls):
+ return sanitize_photo_urls(raw_urls)
+
+ def _normalize_quick_add_category(self, raw_category):
+ if not raw_category:
+ return None
+
+ if isinstance(raw_category, dict):
+ category_id = raw_category.get('id')
+ name = str(raw_category.get('name') or '').strip().lower()
+ display_name = str(raw_category.get('display_name') or '').strip()
+ icon = str(raw_category.get('icon') or '').strip() or '🌍'
+ elif isinstance(raw_category, str):
+ category_id = raw_category.strip()
+ name = ''
+ display_name = ''
+ icon = '🌍'
+ else:
+ return Response(
+ {"error": "category must be an object or string"},
+ status=status.HTTP_400_BAD_REQUEST,
+ )
+
+ category = None
+ if category_id:
+ category = Category.objects.filter(id=category_id, user=self.request.user).first()
+ if not category:
+ return Response(
+ {"error": "Category not found or inaccessible"},
+ status=status.HTTP_400_BAD_REQUEST,
+ )
+
+ if category:
+ return {
+ 'name': category.name,
+ 'display_name': category.display_name,
+ 'icon': category.icon,
+ }
+
+ if not name:
+ return None
+
+ return {
+ 'name': name,
+ 'display_name': display_name or name,
+ 'icon': icon,
+ }
+
+ def _build_quick_add_description(
+ self,
+ base_description,
+ detailed_description,
+ ):
+ return build_quick_add_description(base_description, detailed_description)
+
+ def _apply_reverse_geocode_metadata(self, location, reverse_data, fallback_location):
+ if not isinstance(reverse_data, dict):
+ reverse_data = {}
+
+ updated_fields = []
+
+ region_id = reverse_data.get('region_id')
+ if region_id:
+ region = Region.objects.filter(id=region_id).first()
+ if region and location.region_id != region.id:
+ location.region = region
+ updated_fields.append('region')
+
+ city_id = reverse_data.get('city_id')
+ if city_id:
+ city = City.objects.filter(id=city_id).first()
+ if city and location.city_id != city.id:
+ location.city = city
+ updated_fields.append('city')
+
+ country_id = reverse_data.get('country_id')
+ if country_id:
+ country = Country.objects.filter(country_code=country_id).first()
+ if country and location.country_id != country.id:
+ location.country = country
+ updated_fields.append('country')
+
+ if fallback_location and not location.location:
+ location.location = fallback_location
+ updated_fields.append('location')
+
+ if updated_fields:
+ location.save(update_fields=updated_fields, _skip_geocode=True)
+
def _apply_visit_filtering(self, queryset, request):
"""Apply visit status filtering to queryset."""
is_visited_param = request.query_params.get('is_visited')
diff --git a/backend/server/adventures/views/lodging_view.py b/backend/server/adventures/views/lodging_view.py
index 159c127a..d530fa1b 100644
--- a/backend/server/adventures/views/lodging_view.py
+++ b/backend/server/adventures/views/lodging_view.py
@@ -1,12 +1,27 @@
from rest_framework import viewsets, status
from rest_framework.decorators import action
from rest_framework.response import Response
+from django.db import transaction
from django.db.models import Q
from adventures.models import Lodging
-from adventures.serializers import LodgingSerializer
+from adventures.serializers import CollectionItineraryItemSerializer, LodgingSerializer
from rest_framework.exceptions import PermissionDenied
from adventures.permissions import IsOwnerOrSharedWithFullAccess
-from rest_framework.permissions import IsAuthenticated
+from adventures.geocoding import reverse_geocode
+from .location_image_view import import_remote_images_for_object
+from .quick_add_utils import (
+ build_quick_add_description,
+ coerce_bool,
+ coerce_coordinate,
+ coerce_float,
+ create_quick_add_itinerary_item,
+ extract_google_place_details,
+ infer_lodging_type,
+ parse_itinerary_date,
+ preferred_link,
+ resolve_quick_add_collection,
+ sanitize_photo_urls,
+)
class LodgingViewSet(viewsets.ModelViewSet):
queryset = Lodging.objects.all()
@@ -63,6 +78,114 @@ class LodgingViewSet(viewsets.ModelViewSet):
def perform_update(self, serializer):
serializer.save()
+
+ @action(detail=False, methods=['post'], url_path='quick-add')
+ @transaction.atomic
+ def quick_add(self, request):
+ """Create a lodging from lightweight map/place input in one server-side call."""
+ payload = request.data if isinstance(request.data, dict) else {}
+
+ name = str(payload.get('name') or '').strip()
+ if not name:
+ return Response({"error": "name is required"}, status=status.HTTP_400_BAD_REQUEST)
+
+ latitude = coerce_coordinate(payload.get('latitude'), -90, 90)
+ longitude = coerce_coordinate(payload.get('longitude'), -180, 180)
+ if latitude is None or longitude is None:
+ return Response(
+ {"error": "Valid latitude and longitude are required"},
+ status=status.HTTP_400_BAD_REQUEST,
+ )
+
+ collection = resolve_quick_add_collection(
+ payload.get('collection_id'),
+ validate_permissions=self._validate_collection_permissions,
+ permission_error_message=(
+ "You do not have permission to add this lodging to the selected collection."
+ ),
+ )
+ if isinstance(collection, Response):
+ return collection
+
+ reverse_data = {}
+ try:
+ reverse_result = reverse_geocode(latitude, longitude, request.user)
+ if isinstance(reverse_result, dict) and 'error' not in reverse_result:
+ reverse_data = reverse_result
+ except Exception:
+ reverse_data = {}
+
+ _, details = extract_google_place_details(payload, fallback_query=name)
+
+ rating = coerce_float(payload.get('rating'))
+ if rating is None:
+ rating = coerce_float(details.get('rating'))
+
+ location_label = (
+ str(payload.get('location') or '').strip()
+ or str(reverse_data.get('display_name') or '').strip()
+ or str(details.get('formatted_address') or '').strip()
+ or None
+ )
+
+ place_types = payload.get('types')
+ if not isinstance(place_types, list) or not place_types:
+ place_types = details.get('types') if isinstance(details.get('types'), list) else []
+
+ serializer_payload = {
+ 'name': name,
+ 'type': infer_lodging_type(payload.get('type'), place_types),
+ 'location': location_label,
+ 'latitude': latitude,
+ 'longitude': longitude,
+ 'rating': rating,
+ 'description': build_quick_add_description(
+ base_description=payload.get('description'),
+ detailed_description=details.get('description'),
+ ),
+ 'link': preferred_link(payload, details),
+ 'is_public': coerce_bool(payload.get('is_public'), default=False),
+ }
+
+ if collection:
+ serializer_payload['collection'] = str(collection.id)
+
+ serializer = self.get_serializer(data=serializer_payload)
+ serializer.is_valid(raise_exception=True)
+ self.perform_create(serializer)
+
+ lodging = serializer.instance
+
+ itinerary_date = parse_itinerary_date(payload.get('itinerary_date'))
+ itinerary_item = None
+ if collection and itinerary_date:
+ itinerary_item = create_quick_add_itinerary_item(collection, lodging, itinerary_date)
+ if isinstance(itinerary_item, Response):
+ return itinerary_item
+
+ photo_urls = sanitize_photo_urls(payload.get('photos'))
+ image_import_summary = None
+ if photo_urls:
+ image_import_summary = import_remote_images_for_object(
+ lodging,
+ photo_urls,
+ owner=lodging.user,
+ max_workers=min(5, len(photo_urls)),
+ )
+
+ response_data = self.get_serializer(lodging).data
+ if itinerary_item:
+ response_data['quick_add_itinerary_item'] = CollectionItineraryItemSerializer(
+ itinerary_item
+ ).data
+ if image_import_summary and image_import_summary.get('failed'):
+ response_data['quick_add_image_import'] = {
+ 'created_count': image_import_summary['created_count'],
+ 'failed_count': image_import_summary['failed_count'],
+ 'failed': image_import_summary['failed'],
+ }
+
+ return Response(response_data, status=status.HTTP_201_CREATED)
# when creating an adventure, make sure the user is the owner of the collection or shared with the collection
def perform_create(self, serializer):
@@ -81,4 +204,13 @@ class LodgingViewSet(viewsets.ModelViewSet):
return
# Save the adventure with the current user as the owner
- serializer.save(user=self.request.user)
\ No newline at end of file
+ serializer.save(user=self.request.user)
+
+ def _validate_collection_permissions(self, collections):
+ """Validate permissions for all collections (used by quick add)."""
+ for collection in collections:
+ if collection.user != self.request.user:
+ if not collection.shared_with.filter(id=self.request.user.id).exists():
+ raise PermissionDenied(
+ f"You don't have permission to add lodging to collection '{collection.name}'"
+ )
\ No newline at end of file
diff --git a/backend/server/adventures/views/quick_add_utils.py b/backend/server/adventures/views/quick_add_utils.py
new file mode 100644
index 00000000..37478a75
--- /dev/null
+++ b/backend/server/adventures/views/quick_add_utils.py
@@ -0,0 +1,325 @@
+import datetime
+from urllib.parse import urlparse
+
+from django.core.exceptions import PermissionDenied as DjangoPermissionDenied
+from django.db import models
+from django.utils.dateparse import parse_date, parse_datetime
+from rest_framework import status
+from rest_framework.exceptions import PermissionDenied as DRFPermissionDenied
+from rest_framework.response import Response
+
+from django.contrib.contenttypes.models import ContentType
+
+from adventures.geocoding import get_place_details
+from adventures.models import Collection, CollectionItineraryItem, Visit
+
+
+def coerce_coordinate(value, min_value, max_value):
+ try:
+ number = round(float(value), 6)
+ except (TypeError, ValueError):
+ return None
+
+ if number < min_value or number > max_value:
+ return None
+
+ return number
+
+
+def coerce_float(value):
+ try:
+ return float(value)
+ except (TypeError, ValueError):
+ return None
+
+
+def coerce_int(value):
+ try:
+ return int(value)
+ except (TypeError, ValueError):
+ return None
+
+
+def coerce_bool(value, default=False):
+ if isinstance(value, bool):
+ return value
+
+ if isinstance(value, str):
+ normalized = value.strip().lower()
+ if normalized in {"true", "1", "yes", "on"}:
+ return True
+ if normalized in {"false", "0", "no", "off"}:
+ return False
+
+ return default
+
+
+def clean_url(value):
+ if not isinstance(value, str):
+ return None
+
+ normalized = value.strip()
+ if not normalized:
+ return None
+
+ parsed = urlparse(normalized)
+ if parsed.scheme in {"http", "https"} and parsed.netloc:
+ return normalized
+
+ return None
+
+
+def sanitize_tags(raw_tags, max_tags=8):
+ if not isinstance(raw_tags, list):
+ return []
+
+ tags = []
+ for item in raw_tags:
+ if not isinstance(item, str):
+ continue
+
+ value = item.strip()
+ if not value or value in tags:
+ continue
+
+ tags.append(value)
+ if len(tags) >= max_tags:
+ break
+
+ return tags
+
+
+def sanitize_photo_urls(raw_urls, max_urls=5):
+ if not isinstance(raw_urls, list):
+ return []
+
+ cleaned = []
+ for value in raw_urls:
+ url = clean_url(value)
+ if not url or url in cleaned:
+ continue
+
+ cleaned.append(url)
+ if len(cleaned) >= max_urls:
+ break
+
+ return cleaned
+
+
+def build_quick_add_description(base_description, detailed_description):
+ description = str(detailed_description or "").strip() or str(base_description or "").strip()
+ return description or None
+
+
+def resolve_quick_add_collection(collection_id, validate_permissions, permission_error_message):
+ if not collection_id:
+ return None
+
+ try:
+ collection = Collection.objects.get(id=collection_id)
+ except Collection.DoesNotExist:
+ return Response(
+ {"error": "Collection not found."},
+ status=status.HTTP_404_NOT_FOUND,
+ )
+
+ try:
+ validate_permissions([collection])
+ except (DjangoPermissionDenied, DRFPermissionDenied):
+ return Response(
+ {"error": permission_error_message},
+ status=status.HTTP_403_FORBIDDEN,
+ )
+
+ return collection
+
+
+def extract_google_place_details(payload, fallback_query=""):
+ place_id = str(payload.get("place_id") or "").strip() or None
+ details = {}
+
+ if not place_id:
+ return place_id, details
+
+ details_result = get_place_details(place_id, fallback_query=fallback_query)
+ if isinstance(details_result, dict):
+ if "error" not in details_result or details_result.get("description"):
+ details = details_result
+
+ return place_id, details
+
+
+def preferred_link(payload, details):
+ website = clean_url(details.get("website")) or clean_url(payload.get("website"))
+ maps_url = clean_url(details.get("google_maps_url")) or clean_url(payload.get("google_maps_url"))
+ return clean_url(payload.get("link")) or website or maps_url
+
+
+def infer_lodging_type(primary_type, place_types):
+ valid_types = {
+ "hotel",
+ "hostel",
+ "resort",
+ "bnb",
+ "campground",
+ "cabin",
+ "apartment",
+ "house",
+ "villa",
+ "motel",
+ "other",
+ }
+
+ if isinstance(primary_type, str):
+ normalized = primary_type.strip().lower()
+ if normalized in valid_types:
+ return normalized
+
+ normalized_types = [
+ str(type_name).strip().lower()
+ for type_name in (place_types or [])
+ if str(type_name).strip()
+ ]
+
+ mapping = {
+ "hotel": "hotel",
+ "resort_hotel": "resort",
+ "motel": "motel",
+ "hostel": "hostel",
+ "bed_and_breakfast": "bnb",
+ "guest_house": "bnb",
+ "campground": "campground",
+ "rv_park": "campground",
+ "camping_cabin": "cabin",
+ "apartment_building": "apartment",
+ "lodging": "hotel",
+ "villa": "villa",
+ }
+
+ for type_name in normalized_types:
+ if type_name in mapping:
+ return mapping[type_name]
+
+ for type_name in normalized_types:
+ if type_name in valid_types:
+ return type_name
+
+ return "other"
+
+
+def parse_itinerary_date(value):
+ if not value:
+ return None
+
+ raw_value = str(value).strip()
+ if not raw_value:
+ return None
+
+ parsed_date = parse_date(raw_value)
+ if parsed_date:
+ return parsed_date
+
+ parsed_datetime = parse_datetime(raw_value)
+ if parsed_datetime:
+ return parsed_datetime.date()
+
+ return None
+
+
+def validate_itinerary_date(collection, date_value):
+ if not collection or not date_value:
+ return None
+
+ if collection.start_date and date_value < collection.start_date:
+ return Response(
+ {"error": "Itinerary item date is before the collection start_date"},
+ status=status.HTTP_400_BAD_REQUEST,
+ )
+ if collection.end_date and date_value > collection.end_date:
+ return Response(
+ {"error": "Itinerary item date is after the collection end_date"},
+ status=status.HTTP_400_BAD_REQUEST,
+ )
+
+ return None
+
+
+def apply_quick_add_itinerary_date(content_object, date_value):
+ if not content_object or not date_value:
+ return
+
+ model_name = content_object._meta.model_name
+
+ if model_name == "location":
+ start_dt = datetime.datetime.combine(date_value, datetime.time.min)
+ end_dt = datetime.datetime.combine(date_value, datetime.time.max)
+
+ exact_match = Visit.objects.filter(
+ location=content_object, start_date=start_dt, end_date=end_dt
+ ).first()
+ if exact_match:
+ return
+
+ overlap_q = models.Q(start_date__lte=end_dt) & models.Q(end_date__gte=start_dt)
+ existing = Visit.objects.filter(location=content_object).filter(overlap_q).first()
+ if existing:
+ existing.start_date = start_dt
+ existing.end_date = end_dt
+ existing.save(update_fields=["start_date", "end_date"])
+ return
+
+ Visit.objects.create(
+ location=content_object,
+ start_date=start_dt,
+ end_date=end_dt,
+ notes="Created from quick add",
+ )
+ return
+
+ if model_name == "lodging":
+ if content_object.check_in and content_object.check_out:
+ return
+
+ check_in = datetime.datetime.combine(date_value, datetime.time.min)
+ check_out = check_in + datetime.timedelta(days=1)
+ content_object.check_in = check_in
+ content_object.check_out = check_out
+ content_object.save(update_fields=["check_in", "check_out"])
+
+
+def create_quick_add_itinerary_item(collection, content_object, date_value):
+ if not collection or not content_object or not date_value:
+ return None
+
+ existing_error = validate_itinerary_date(collection, date_value)
+ if isinstance(existing_error, Response):
+ return existing_error
+
+ content_type = ContentType.objects.get_for_model(content_object.__class__)
+ existing_item = CollectionItineraryItem.objects.filter(
+ collection=collection,
+ content_type=content_type,
+ object_id=content_object.id,
+ date=date_value,
+ is_global=False,
+ ).first()
+ if existing_item:
+ return existing_item
+
+ max_order = (
+ CollectionItineraryItem.objects.filter(
+ collection=collection, date=date_value, is_global=False
+ ).aggregate(max_order=models.Max("order"))["max_order"]
+ or -1
+ )
+
+ apply_quick_add_itinerary_date(content_object, date_value)
+
+ return CollectionItineraryItem.objects.create(
+ collection=collection,
+ content_type=content_type,
+ object_id=content_object.id,
+ date=date_value,
+ is_global=False,
+ order=max_order + 1,
+ )
diff --git a/backend/server/adventures/views/reverse_geocode_view.py b/backend/server/adventures/views/reverse_geocode_view.py
index b0635300..d9fa55d1 100644
--- a/backend/server/adventures/views/reverse_geocode_view.py
+++ b/backend/server/adventures/views/reverse_geocode_view.py
@@ -7,7 +7,7 @@ from adventures.models import Location
from adventures.serializers import LocationSerializer
from adventures.geocoding import reverse_geocode
from django.conf import settings
-from adventures.geocoding import search_google, search_osm
+from adventures.geocoding import search_google, search_osm, get_place_details
class ReverseGeocodeViewSet(viewsets.ViewSet):
permission_classes = [IsAuthenticated]
@@ -131,4 +131,18 @@ class ReverseGeocodeViewSet(viewsets.ViewSet):
"regions": new_regions,
"new_cities": new_city_count,
"cities": new_cities
- })
\ No newline at end of file
+ })
+
+ @action(detail=False, methods=['get'])
+ def place_details(self, request):
+ place_id = request.query_params.get('place_id', '').strip()
+ if not place_id:
+ return Response({"error": "place_id parameter is required"}, status=400)
+
+ name = request.query_params.get('name', '')
+ language = request.query_params.get('language', 'en')
+
+ details = get_place_details(place_id, fallback_query=name, language=language)
+ if 'error' in details and not details.get('description'):
+ return Response(details, status=502)
+ return Response(details)
\ No newline at end of file
diff --git a/backend/server/requirements.txt b/backend/server/requirements.txt
index 69668bd1..ef6659a4 100644
--- a/backend/server/requirements.txt
+++ b/backend/server/requirements.txt
@@ -1,4 +1,4 @@
-Django==5.2.12
+Django==5.2.13
djangorestframework>=3.15.2,<3.16
django-allauth==0.63.3
django-money==3.5.4
@@ -8,7 +8,7 @@ django-cors-headers==4.4.0
coreapi==2.3.3
python-dotenv==1.1.0
psycopg2-binary==2.9.10
-pillow==12.1.1
+pillow==12.2.0
whitenoise==6.9.0
django-resized==1.0.3
django-geojson==4.2.0
diff --git a/backend/server/templates/base.html b/backend/server/templates/base.html
index 205445ee..24234e17 100644
--- a/backend/server/templates/base.html
+++ b/backend/server/templates/base.html
@@ -175,7 +175,7 @@
`API Response: ${data.status} ${data.statusText}
- {$t('adventures.click_map') || 'Click on the map to select a location'}
- {selectedLocation.name}
- {selectedMarker.lat.toFixed(6)}, {selectedMarker.lng.toFixed(6)}
-
- {selectedLocation.category} • {selectedLocation.type || 'location'}
-
- {locationData.display_name}
- {quickAddedLocation.name}
+ {#if mode === 'lodging'}
+ Click on the map to select a lodging
+ {:else}
+ {$t('adventures.click_map') || 'Click on the map to select a location'}
+ {/if}
+ {selectedLocation.name} {selectedLocation.location}
+ {selectedMarker.lat.toFixed(6)}, {selectedMarker.lng.toFixed(6)}
+
+ Optional. If not selected, backend defaults to General.
+
-
Content: ${data.responseText}`
);
};
- const susccess_response = (data) => {
+ const success_response = (data) => {
$(".api-response").html(
`API Response: OK
Content: ${JSON.stringify(
data,
@@ -190,7 +190,7 @@
const form = $("form.ajax-post");
$.post(form.attr("action"), form.serialize())
.fail(error_response)
- .done(susccess_response);
+ .done(success_response);
return false;
});
});
diff --git a/backend/server/users/models.py b/backend/server/users/models.py
index 27fd8a1e..a224c335 100644
--- a/backend/server/users/models.py
+++ b/backend/server/users/models.py
@@ -1,6 +1,7 @@
import hashlib
import secrets
import uuid
+from django.conf import settings
from django.contrib.auth.models import AbstractUser
from django.db import models
from django_resized import ResizedImageField
@@ -49,12 +50,15 @@ class APIKey(models.Model):
Security design:
- A 32-byte cryptographically random token is generated with the prefix ``al_``.
- - Only a SHA-256 hash of the full token is persisted; the plaintext is returned
- exactly once at creation time and never stored.
+ - Only a PBKDF2-HMAC-SHA256 derived hash of the full token is persisted;
+ the plaintext is returned exactly once at creation time and never stored.
- The first 12 characters of the token are kept as ``key_prefix`` so users can
- identify their keys without revealing the secret.
+ identify their keys without revealing the secret.
"""
+ _KEY_HASH_ITERATIONS = 600000
+ _KEY_HASH_SALT_NAMESPACE = "users.APIKey"
+
id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
user = models.ForeignKey(
CustomUser, on_delete=models.CASCADE, related_name='api_keys'
@@ -71,6 +75,17 @@ class APIKey(models.Model):
def __str__(self):
return f"{self.user.username} – {self.name} ({self.key_prefix}…)"
+ @classmethod
+ def _hash_raw_key(cls, raw_key: str) -> str:
+ """Derive a computationally expensive hash for API key persistence."""
+ salt = f"{cls._KEY_HASH_SALT_NAMESPACE}:{settings.SECRET_KEY}".encode("utf-8")
+ return hashlib.pbkdf2_hmac(
+ "sha256",
+ raw_key.encode("utf-8"),
+ salt,
+ cls._KEY_HASH_ITERATIONS,
+ ).hex()
+
@classmethod
def generate(cls, user, name: str) -> tuple['APIKey', str]:
"""
@@ -80,7 +95,7 @@ class APIKey(models.Model):
user once and must never be stored anywhere after that.
"""
raw_key = f"al_{secrets.token_urlsafe(32)}"
- key_hash = hashlib.sha256(raw_key.encode()).hexdigest()
+ key_hash = cls._hash_raw_key(raw_key)
key_prefix = raw_key[:12]
instance = cls.objects.create(
user=user,
@@ -98,7 +113,7 @@ class APIKey(models.Model):
Returns the matching ``APIKey`` instance (updating ``last_used_at``) or
``None`` if not found.
"""
- key_hash = hashlib.sha256(raw_key.encode()).hexdigest()
+ key_hash = cls._hash_raw_key(raw_key)
try:
api_key = cls.objects.select_related('user').get(key_hash=key_hash)
except cls.DoesNotExist:
diff --git a/documentation/docs/install/docker.md b/documentation/docs/install/docker.md
index d574287c..3e5d4208 100644
--- a/documentation/docs/install/docker.md
+++ b/documentation/docs/install/docker.md
@@ -62,7 +62,7 @@ The `.env` file contains all the configuration settings for your AdventureLog in
| `FRONTEND_URL` | Yes | URL to the **frontend**, used for email generation. | `http://localhost:8015` |
| `BACKEND_PORT` | Yes | Port that the backend will run on inside Docker. | `8016` |
| `DEBUG` | No | Should be `False` in production. | `False` |
-| `ENABLE_RATE_LIMITS` | No | Enable rate limits on the backend. Should be `True` in production. | `True` |
+| `ENABLE_RATE_LIMITS` | No | Enable rate limits on the backend. Should be `True` in production. | `False` |
## Optional Configuration
diff --git a/frontend/package.json b/frontend/package.json
index a54c7b95..537caf02 100644
--- a/frontend/package.json
+++ b/frontend/package.json
@@ -20,7 +20,7 @@
"@sveltejs/adapter-node": "^5.2.12",
"@sveltejs/adapter-vercel": "^5.7.0",
"@sveltejs/kit": "^2.49.5",
- "@sveltejs/vite-plugin-svelte": "^3.1.2",
+ "@sveltejs/vite-plugin-svelte": "3.1.2",
"@tailwindcss/typography": "^0.5.19",
"@types/node": "^22.15.2",
"@types/qrcode": "^1.5.5",
@@ -30,7 +30,7 @@
"postcss": "^8.5.3",
"prettier": "^3.5.3",
"prettier-plugin-svelte": "^3.3.3",
- "svelte": "^4.2.19",
+ "svelte": "4.2.19",
"svelte-check": "^3.8.6",
"tailwindcss": "^3.4.17",
"tslib": "^2.8.1",
diff --git a/frontend/pnpm-lock.yaml b/frontend/pnpm-lock.yaml
index 2ba80d43..624a073d 100644
--- a/frontend/pnpm-lock.yaml
+++ b/frontend/pnpm-lock.yaml
@@ -11,8 +11,6 @@ overrides:
brace-expansion@>=4.0.0 <5.0.5: '>=5.0.5'
picomatch@<2.3.2: '>=2.3.2'
picomatch@>=4.0.0 <4.0.4: '>=4.0.4'
- svelte@<=5.51.4: '>=5.51.5'
- svelte@<=5.53.4: '>=5.53.5'
importers:
@@ -20,7 +18,7 @@ importers:
dependencies:
'@lukulent/svelte-umami':
specifier: ^0.0.3
- version: 0.0.3(svelte@5.55.1)
+ version: 0.0.3(svelte@4.2.19)
dompurify:
specifier: ^3.2.5
version: 3.3.3
@@ -44,13 +42,13 @@ importers:
version: 1.5.4
svelte-dnd-action:
specifier: ^0.9.68
- version: 0.9.69(svelte@5.55.1)
+ version: 0.9.69(svelte@4.2.19)
svelte-i18n:
specifier: ^4.0.1
- version: 4.0.1(svelte@5.55.1)
+ version: 4.0.1(svelte@4.2.19)
svelte-maplibre:
specifier: ^0.9.14
- version: 0.9.14(svelte@5.55.1)
+ version: 0.9.14(svelte@4.2.19)
devDependencies:
'@event-calendar/core':
specifier: ^3.12.0
@@ -69,16 +67,16 @@ importers:
version: 1.2.3
'@sveltejs/adapter-node':
specifier: ^5.2.12
- version: 5.5.4(@sveltejs/kit@2.55.0(@sveltejs/vite-plugin-svelte@3.1.2(svelte@5.55.1)(vite@5.4.21(@types/node@22.19.15)))(svelte@5.55.1)(typescript@5.9.3)(vite@5.4.21(@types/node@22.19.15)))
+ version: 5.5.4(@sveltejs/kit@2.55.0(@sveltejs/vite-plugin-svelte@3.1.2(svelte@4.2.19)(vite@5.4.21(@types/node@22.19.15)))(svelte@4.2.19)(typescript@5.9.3)(vite@5.4.21(@types/node@22.19.15)))
'@sveltejs/adapter-vercel':
specifier: '>=6.3.2'
- version: 6.3.3(@sveltejs/kit@2.55.0(@sveltejs/vite-plugin-svelte@3.1.2(svelte@5.55.1)(vite@5.4.21(@types/node@22.19.15)))(svelte@5.55.1)(typescript@5.9.3)(vite@5.4.21(@types/node@22.19.15)))(rollup@4.59.0)
+ version: 6.3.3(@sveltejs/kit@2.55.0(@sveltejs/vite-plugin-svelte@3.1.2(svelte@4.2.19)(vite@5.4.21(@types/node@22.19.15)))(svelte@4.2.19)(typescript@5.9.3)(vite@5.4.21(@types/node@22.19.15)))(rollup@4.59.0)
'@sveltejs/kit':
specifier: ^2.49.5
- version: 2.55.0(@sveltejs/vite-plugin-svelte@3.1.2(svelte@5.55.1)(vite@5.4.21(@types/node@22.19.15)))(svelte@5.55.1)(typescript@5.9.3)(vite@5.4.21(@types/node@22.19.15))
+ version: 2.55.0(@sveltejs/vite-plugin-svelte@3.1.2(svelte@4.2.19)(vite@5.4.21(@types/node@22.19.15)))(svelte@4.2.19)(typescript@5.9.3)(vite@5.4.21(@types/node@22.19.15))
'@sveltejs/vite-plugin-svelte':
- specifier: ^3.1.2
- version: 3.1.2(svelte@5.55.1)(vite@5.4.21(@types/node@22.19.15))
+ specifier: 3.1.2
+ version: 3.1.2(svelte@4.2.19)(vite@5.4.21(@types/node@22.19.15))
'@tailwindcss/typography':
specifier: ^0.5.19
version: 0.5.19(tailwindcss@3.4.19)
@@ -105,13 +103,13 @@ importers:
version: 3.8.1
prettier-plugin-svelte:
specifier: ^3.3.3
- version: 3.5.1(prettier@3.8.1)(svelte@5.55.1)
+ version: 3.5.1(prettier@3.8.1)(svelte@4.2.19)
svelte:
- specifier: '>=5.53.5'
- version: 5.55.1
+ specifier: 4.2.19
+ version: 4.2.19
svelte-check:
specifier: ^3.8.6
- version: 3.8.6(postcss@8.5.8)(svelte@5.55.1)
+ version: 3.8.6(postcss@8.5.8)(svelte@4.2.19)
tailwindcss:
specifier: ^3.4.17
version: 3.4.19
@@ -134,6 +132,10 @@ packages:
resolution: {integrity: sha512-UrcABB+4bUrFABwbluTIBErXwvbsU/V7TZWfmbgJfbkwiBuziS9gxdODUyuiecfdGQ85jglMW6juS3+z5TsKLw==}
engines: {node: '>=10'}
+ '@ampproject/remapping@2.3.0':
+ resolution: {integrity: sha512-30iZtAPgz+LTIYoeivqYo853f02jBYSd5uGnGpkFV0M3xOt9aN73erkgYAmZU43x4VfqcnLxW9Kpg3R5LC4YYw==}
+ engines: {node: '>=6.0.0'}
+
'@antfu/install-pkg@0.4.1':
resolution: {integrity: sha512-T7yB5QNG29afhWVkVq7XeIMBa5U/vs9mX69YqayXypPRmYzUmzwnYltplHmPtZ4HPCn+sQKeXW8I47wCbuBOjw==}
@@ -345,9 +347,6 @@ packages:
'@jridgewell/gen-mapping@0.3.13':
resolution: {integrity: sha512-2kkt/7niJ6MgEPxF0bYdQ6etZaA+fQvDcLKckhy1yIQOzaoKjBBjSj63/aLVjYE3qhRt5dvM+uUyfCg6UKCBbA==}
- '@jridgewell/remapping@2.3.5':
- resolution: {integrity: sha512-LI9u/+laYG4Ds1TDKSJW2YPrIlcVYOwi2fUC6xB43lueCjgxV4lffOCZCtYFiH6TNOX+tQKXx97T4IKHbhyHEQ==}
-
'@jridgewell/resolve-uri@3.1.2':
resolution: {integrity: sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==}
engines: {node: '>=6.0.0'}
@@ -361,7 +360,7 @@ packages:
'@lukulent/svelte-umami@0.0.3':
resolution: {integrity: sha512-4pL0sJapfy14yDj6CyZgewbRDadRoBJtk/dLqCJh7/tQuX7HO4hviBzhrVa4Osxaq2kcGEKdpkhAKAoaNdlNSA==}
peerDependencies:
- svelte: '>=5.53.5'
+ svelte: ^4.0.0
'@mapbox/geojson-rewind@0.5.2':
resolution: {integrity: sha512-tJaT+RbYGJYStt7wI3cq4Nl4SXxG8W7JDG5DMJu97V25RnbNg3QtQtf+KD+VLjNpWKYsRvXDNmNrBgEETr1ifA==}
@@ -611,7 +610,7 @@ packages:
peerDependencies:
'@opentelemetry/api': ^1.0.0
'@sveltejs/vite-plugin-svelte': ^3.0.0 || ^4.0.0-next.1 || ^5.0.0 || ^6.0.0-next.0 || ^7.0.0
- svelte: '>=5.53.5'
+ svelte: ^4.0.0 || ^5.0.0-next.0
typescript: ^5.3.3
vite: ^5.0.3 || ^6.0.0 || ^7.0.0-beta.0 || ^8.0.0
peerDependenciesMeta:
@@ -625,14 +624,14 @@ packages:
engines: {node: ^18.0.0 || >=20}
peerDependencies:
'@sveltejs/vite-plugin-svelte': ^3.0.0
- svelte: '>=5.53.5'
+ svelte: ^4.0.0 || ^5.0.0-next.0
vite: ^5.0.0
'@sveltejs/vite-plugin-svelte@3.1.2':
resolution: {integrity: sha512-Txsm1tJvtiYeLUVRNqxZGKR/mI+CzuIQuc2gn+YCs9rMTowpNZ2Nqt53JdL8KF9bLhAf2ruR/dr9eZCwdTriRA==}
engines: {node: ^18.0.0 || >=20}
peerDependencies:
- svelte: '>=5.53.5'
+ svelte: ^4.0.0 || ^5.0.0-next.0
vite: ^5.0.0
'@tailwindcss/typography@0.5.19':
@@ -682,10 +681,6 @@ packages:
'@types/trusted-types@2.0.7':
resolution: {integrity: sha512-ScaPdn1dQczgbl0QFTeTOmVHFULt394XJgOQNoyVhZ6r2vLnMLJfBPd53SB52T/3G36VI1/g2MZaX0cwDuXsfw==}
- '@typescript-eslint/types@8.58.0':
- resolution: {integrity: sha512-O9CjxypDT89fbHxRfETNoAnHj/i6IpRK0CvbVN3qibxlLdo5p5hcLmUuCCrHMpxiWSwKyI8mCP7qRNYuOJ0Uww==}
- engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0}
-
'@vercel/nft@1.5.0':
resolution: {integrity: sha512-IWTDeIoWhQ7ZtRO/JRKH+jhmeQvZYhtGPmzw/QGDY+wDCQqfm25P9yIdoAFagu4fWsK4IwZXDFIjrmp5rRm/sA==}
engines: {node: '>=20'}
@@ -810,9 +805,8 @@ packages:
cliui@6.0.0:
resolution: {integrity: sha512-t6wbgtoCXvAzst7QgXxJYqPt0usEfbgQdftEPbLL/cvv6HPE5VgvqCuAIDR0NgU52ds6rFwqrgakNLrHEjCbrQ==}
- clsx@2.1.1:
- resolution: {integrity: sha512-eYm0QWBtUrBWZWG0d386OGAw16Z995PiOVo2B7bjWSbHedGl5e0ZWaq65kOGgUSNesEIDkB9ISbTg/JK9dhCZA==}
- engines: {node: '>=6'}
+ code-red@1.0.4:
+ resolution: {integrity: sha512-7qJWqItLA8/VPVlKJlFXU+NBlo/qyfs39aJcuMT/2ere32ZqvF5OSxgdM5xOfJJ7O429gg2HM47y8v9P+9wrNw==}
color-convert@2.0.1:
resolution: {integrity: sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==}
@@ -848,6 +842,10 @@ packages:
css-selector-tokenizer@0.8.0:
resolution: {integrity: sha512-Jd6Ig3/pe62/qe5SBPTN8h8LeUg/pT4lLgtavPf7updwwHpvFzxvOQBHYj2LZDMjUnBzgvIUSjRcf6oT5HzHFg==}
+ css-tree@2.3.1:
+ resolution: {integrity: sha512-6Fv1DV/TYw//QF5IzQdqsNDjx/wc8TrMBZsqjL9eW01tWb7R7k/mq+/VXfJCl7SoD5emsJop9cOByJZfs8hYIw==}
+ engines: {node: ^10 || ^12.20.0 || ^14.13.0 || >=15.0.0}
+
cssesc@3.0.0:
resolution: {integrity: sha512-/Tb/JcjK111nNScGob5MNtsntNM1aCNUDipB/TkwZFhyDrrE47SOx/18wF2bbjgc3ZzCSKW1T5nt5EbFoAz/Vg==}
engines: {node: '>=4'}
@@ -965,12 +963,12 @@ packages:
resolution: {integrity: sha512-kTUIGKQ/mDPFoJ0oVfcmyJn4iBDRptjNVIzwIFR7tqWXdVI9xfA2RMwY/gbSpJG3lkdWNEjLap/NqVHZiJsdfg==}
engines: {node: '>=0.10'}
- esrap@2.2.4:
- resolution: {integrity: sha512-suICpxAmZ9A8bzJjEl/+rLJiDKC0X4gYWUxT6URAWBLvlXmtbZd5ySMu/N2ZGEtMCAmflUDPSehrP9BQcsGcSg==}
-
estree-walker@2.0.2:
resolution: {integrity: sha512-Rfkk/Mp/DL7JVje3u18FxFujQlTNR2q6QfMSMB7AvCBx91NGj/ba3kCfza0f6dVDbw7YlRf/nDrn7pQrCCyQ/w==}
+ estree-walker@3.0.3:
+ resolution: {integrity: sha512-7RUKfXgSMMkzt6ZuXmqapOurLGPPfgj6l9uRZ7lRGolvk0y2yocc35LdcxKC5PQZdn2DMqioAQ2NoWcrTKmm6g==}
+
event-emitter@0.3.5:
resolution: {integrity: sha512-D9rRn9y7kLPnJ+hMq7S/nhvoKwwvVJahBi2BPmx3bvbsEdK3W9ii8cBSGjP+72/LnM4n6fo3+dkCX5FeTQruXA==}
@@ -1218,6 +1216,9 @@ packages:
engines: {node: '>= 18'}
hasBin: true
+ mdn-data@2.0.30:
+ resolution: {integrity: sha512-GaqWWShW4kv/G9IEucWScBx9G1/vsFZZJUO+tD26M8J8z3Kw5RDQjaoZe03YAClgeS/SWPOcb4nkFBTEi5DUEA==}
+
memoizee@0.4.17:
resolution: {integrity: sha512-DGqD7Hjpi/1or4F/aYAspXKNm5Yili0QDAFAY4QYvpqpgiY6+1jOfqpmByzjxbWd/T9mChbCArXAbDAsTm5oXA==}
engines: {node: '>=0.12'}
@@ -1360,6 +1361,9 @@ packages:
resolution: {integrity: sha512-XDF38WCH3z5OV/OVa8GKUNtLAyneuzbCisx7QUCF8Q6Nutx0WnJrQe5O+kOtBlLfRNUws98Y58Lblp+NJG5T4Q==}
hasBin: true
+ periscopic@3.1.0:
+ resolution: {integrity: sha512-vKiQ8RRtkl9P+r/+oefh25C3fhybptkHKCZSPlcXiJux2tJF55GnEj3BVn4A5gKfq9NWWXXrxkHBwVPUfH0opw==}
+
picocolors@1.1.1:
resolution: {integrity: sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==}
@@ -1450,7 +1454,7 @@ packages:
resolution: {integrity: sha512-65+fr5+cgIKWKiqM1Doum4uX6bY8iFCdztvvp2RcF+AJoieaw9kJOFMNcJo/bkmKYsxFaM9OsVZK/gWauG/5mg==}
peerDependencies:
prettier: ^3.0.0
- svelte: '>=5.53.5'
+ svelte: ^3.2.0 || ^4.0.0-next.0 || ^5.0.0-next.0
prettier@3.8.1:
resolution: {integrity: sha512-UOnG6LftzbdaHZcKoPFtOcCKztrQ57WkHDeRD9t/PTQtmT0NHSeWWepj6pS0z/N7+08BHFDQVUrfmfMRcZwbMg==}
@@ -1588,25 +1592,25 @@ packages:
resolution: {integrity: sha512-ij0u4Lw/sOTREP13BdWZjiXD/BlHE6/e2e34XzmVmsp5IN4kVa3PWP65NM32JAgwjZlwBg/+JtiNV1MM8khu0Q==}
hasBin: true
peerDependencies:
- svelte: '>=5.53.5'
+ svelte: ^3.55.0 || ^4.0.0-next.0 || ^4.0.0 || ^5.0.0-next.0
svelte-dnd-action@0.9.69:
resolution: {integrity: sha512-NAmSOH7htJoYraTQvr+q5whlIuVoq88vEuHr4NcFgscDRUxfWPPxgie2OoxepBCQCikrXZV4pqV86aun60wVyw==}
peerDependencies:
- svelte: '>=5.53.5'
+ svelte: '>=3.23.0 || ^5.0.0-next.0'
svelte-hmr@0.16.0:
resolution: {integrity: sha512-Gyc7cOS3VJzLlfj7wKS0ZnzDVdv3Pn2IuVeJPk9m2skfhcu5bq3wtIZyQGggr7/Iim5rH5cncyQft/kRLupcnA==}
engines: {node: ^12.20 || ^14.13.1 || >= 16}
peerDependencies:
- svelte: '>=5.53.5'
+ svelte: ^3.19.0 || ^4.0.0
svelte-i18n@4.0.1:
resolution: {integrity: sha512-jaykGlGT5PUaaq04JWbJREvivlCnALtT+m87Kbm0fxyYHynkQaxQMnIKHLm2WeIuBRoljzwgyvz0Z6/CMwfdmQ==}
engines: {node: '>= 16'}
hasBin: true
peerDependencies:
- svelte: '>=5.53.5'
+ svelte: ^3 || ^4 || ^5
svelte-maplibre@0.9.14:
resolution: {integrity: sha512-5HBvibzU/Uf3g8eEz4Hty5XAwoBhW9Tp7NQEvb80U/glR/M1IHyzUKss6XMq8Zbci2wtsASeoPc6dA5R4+0e0w==}
@@ -1614,7 +1618,7 @@ packages:
'@deck.gl/core': ^8.8.0
'@deck.gl/layers': ^8.8.0
'@deck.gl/mapbox': ^8.8.0
- svelte: '>=5.53.5'
+ svelte: ^3.54.0 || ^4.0.0 || ^5.0.0
peerDependenciesMeta:
'@deck.gl/core':
optional: true
@@ -1636,7 +1640,7 @@ packages:
sass: ^1.26.8
stylus: ^0.55.0
sugarss: ^2.0.0 || ^3.0.0 || ^4.0.0
- svelte: '>=5.53.5'
+ svelte: ^3.23.0 || ^4.0.0-next.0 || ^4.0.0 || ^5.0.0-next.0
typescript: '>=3.9.5 || ^4.0.0 || ^5.0.0'
peerDependenciesMeta:
'@babel/core':
@@ -1660,9 +1664,9 @@ packages:
typescript:
optional: true
- svelte@5.55.1:
- resolution: {integrity: sha512-QjvU7EFemf6mRzdMGlAFttMWtAAVXrax61SZYHdkD6yoVGQ89VeyKfZD4H1JrV1WLmJBxWhFch9H6ig/87VGjw==}
- engines: {node: '>=18'}
+ svelte@4.2.19:
+ resolution: {integrity: sha512-IY1rnGr6izd10B0A8LqsBfmlT5OILVuZ7XsI0vdGPEvuonFV7NYEUK4dAkm9Zg2q0Um92kYjTpS1CAP3Nh/KWw==}
+ engines: {node: '>=16'}
tailwindcss@3.4.19:
resolution: {integrity: sha512-3ofp+LL8E+pK/JuPLPggVAIaEuhvIz4qNcf3nA1Xn2o/7fb7s/TYpHhwGDv1ZU3PkBluUVaF8PyCHcm48cKLWQ==}
@@ -1846,13 +1850,15 @@ packages:
resolution: {integrity: sha512-aePbxDmcYW++PaqBsJ+HYUFwCdv4LVvdnhBy78E57PIor8/OVvhMrADFFEDh8DHDFRv/O9i3lPhsENjO7QX0+A==}
engines: {node: '>=8'}
- zimmerframe@1.1.4:
- resolution: {integrity: sha512-B58NGBEoc8Y9MWWCQGl/gq9xBCe4IiKM0a2x7GZdQKOW5Exr8S1W24J6OgM1njK8xCRGvAJIL/MxXHf6SkmQKQ==}
-
snapshots:
'@alloc/quick-lru@5.2.0': {}
+ '@ampproject/remapping@2.3.0':
+ dependencies:
+ '@jridgewell/gen-mapping': 0.3.13
+ '@jridgewell/trace-mapping': 0.3.31
+
'@antfu/install-pkg@0.4.1':
dependencies:
package-manager-detector: 0.2.11
@@ -1947,22 +1953,22 @@ snapshots:
'@event-calendar/core@3.12.0':
dependencies:
- svelte: 5.55.1
+ svelte: 4.2.19
'@event-calendar/day-grid@3.12.0':
dependencies:
'@event-calendar/core': 3.12.0
- svelte: 5.55.1
+ svelte: 4.2.19
'@event-calendar/interaction@3.12.0':
dependencies:
'@event-calendar/core': 3.12.0
- svelte: 5.55.1
+ svelte: 4.2.19
'@event-calendar/time-grid@3.12.0':
dependencies:
'@event-calendar/core': 3.12.0
- svelte: 5.55.1
+ svelte: 4.2.19
'@formatjs/ecma402-abstract@2.3.6':
dependencies:
@@ -2018,11 +2024,6 @@ snapshots:
'@jridgewell/sourcemap-codec': 1.5.5
'@jridgewell/trace-mapping': 0.3.31
- '@jridgewell/remapping@2.3.5':
- dependencies:
- '@jridgewell/gen-mapping': 0.3.13
- '@jridgewell/trace-mapping': 0.3.31
-
'@jridgewell/resolve-uri@3.1.2': {}
'@jridgewell/sourcemap-codec@1.5.5': {}
@@ -2032,9 +2033,9 @@ snapshots:
'@jridgewell/resolve-uri': 3.1.2
'@jridgewell/sourcemap-codec': 1.5.5
- '@lukulent/svelte-umami@0.0.3(svelte@5.55.1)':
+ '@lukulent/svelte-umami@0.0.3(svelte@4.2.19)':
dependencies:
- svelte: 5.55.1
+ svelte: 4.2.19
'@mapbox/geojson-rewind@0.5.2':
dependencies:
@@ -2209,17 +2210,17 @@ snapshots:
dependencies:
acorn: 8.16.0
- '@sveltejs/adapter-node@5.5.4(@sveltejs/kit@2.55.0(@sveltejs/vite-plugin-svelte@3.1.2(svelte@5.55.1)(vite@5.4.21(@types/node@22.19.15)))(svelte@5.55.1)(typescript@5.9.3)(vite@5.4.21(@types/node@22.19.15)))':
+ '@sveltejs/adapter-node@5.5.4(@sveltejs/kit@2.55.0(@sveltejs/vite-plugin-svelte@3.1.2(svelte@4.2.19)(vite@5.4.21(@types/node@22.19.15)))(svelte@4.2.19)(typescript@5.9.3)(vite@5.4.21(@types/node@22.19.15)))':
dependencies:
'@rollup/plugin-commonjs': 29.0.2(rollup@4.59.0)
'@rollup/plugin-json': 6.1.0(rollup@4.59.0)
'@rollup/plugin-node-resolve': 16.0.3(rollup@4.59.0)
- '@sveltejs/kit': 2.55.0(@sveltejs/vite-plugin-svelte@3.1.2(svelte@5.55.1)(vite@5.4.21(@types/node@22.19.15)))(svelte@5.55.1)(typescript@5.9.3)(vite@5.4.21(@types/node@22.19.15))
+ '@sveltejs/kit': 2.55.0(@sveltejs/vite-plugin-svelte@3.1.2(svelte@4.2.19)(vite@5.4.21(@types/node@22.19.15)))(svelte@4.2.19)(typescript@5.9.3)(vite@5.4.21(@types/node@22.19.15))
rollup: 4.59.0
- '@sveltejs/adapter-vercel@6.3.3(@sveltejs/kit@2.55.0(@sveltejs/vite-plugin-svelte@3.1.2(svelte@5.55.1)(vite@5.4.21(@types/node@22.19.15)))(svelte@5.55.1)(typescript@5.9.3)(vite@5.4.21(@types/node@22.19.15)))(rollup@4.59.0)':
+ '@sveltejs/adapter-vercel@6.3.3(@sveltejs/kit@2.55.0(@sveltejs/vite-plugin-svelte@3.1.2(svelte@4.2.19)(vite@5.4.21(@types/node@22.19.15)))(svelte@4.2.19)(typescript@5.9.3)(vite@5.4.21(@types/node@22.19.15)))(rollup@4.59.0)':
dependencies:
- '@sveltejs/kit': 2.55.0(@sveltejs/vite-plugin-svelte@3.1.2(svelte@5.55.1)(vite@5.4.21(@types/node@22.19.15)))(svelte@5.55.1)(typescript@5.9.3)(vite@5.4.21(@types/node@22.19.15))
+ '@sveltejs/kit': 2.55.0(@sveltejs/vite-plugin-svelte@3.1.2(svelte@4.2.19)(vite@5.4.21(@types/node@22.19.15)))(svelte@4.2.19)(typescript@5.9.3)(vite@5.4.21(@types/node@22.19.15))
'@vercel/nft': 1.5.0(rollup@4.59.0)
esbuild: 0.25.12
transitivePeerDependencies:
@@ -2227,11 +2228,11 @@ snapshots:
- rollup
- supports-color
- '@sveltejs/kit@2.55.0(@sveltejs/vite-plugin-svelte@3.1.2(svelte@5.55.1)(vite@5.4.21(@types/node@22.19.15)))(svelte@5.55.1)(typescript@5.9.3)(vite@5.4.21(@types/node@22.19.15))':
+ '@sveltejs/kit@2.55.0(@sveltejs/vite-plugin-svelte@3.1.2(svelte@4.2.19)(vite@5.4.21(@types/node@22.19.15)))(svelte@4.2.19)(typescript@5.9.3)(vite@5.4.21(@types/node@22.19.15))':
dependencies:
'@standard-schema/spec': 1.1.0
'@sveltejs/acorn-typescript': 1.0.9(acorn@8.16.0)
- '@sveltejs/vite-plugin-svelte': 3.1.2(svelte@5.55.1)(vite@5.4.21(@types/node@22.19.15))
+ '@sveltejs/vite-plugin-svelte': 3.1.2(svelte@4.2.19)(vite@5.4.21(@types/node@22.19.15))
'@types/cookie': 0.6.0
acorn: 8.16.0
cookie: 0.6.0
@@ -2242,29 +2243,29 @@ snapshots:
mrmime: 2.0.1
set-cookie-parser: 3.0.1
sirv: 3.0.2
- svelte: 5.55.1
+ svelte: 4.2.19
vite: 5.4.21(@types/node@22.19.15)
optionalDependencies:
typescript: 5.9.3
- '@sveltejs/vite-plugin-svelte-inspector@2.1.0(@sveltejs/vite-plugin-svelte@3.1.2(svelte@5.55.1)(vite@5.4.21(@types/node@22.19.15)))(svelte@5.55.1)(vite@5.4.21(@types/node@22.19.15))':
+ '@sveltejs/vite-plugin-svelte-inspector@2.1.0(@sveltejs/vite-plugin-svelte@3.1.2(svelte@4.2.19)(vite@5.4.21(@types/node@22.19.15)))(svelte@4.2.19)(vite@5.4.21(@types/node@22.19.15))':
dependencies:
- '@sveltejs/vite-plugin-svelte': 3.1.2(svelte@5.55.1)(vite@5.4.21(@types/node@22.19.15))
+ '@sveltejs/vite-plugin-svelte': 3.1.2(svelte@4.2.19)(vite@5.4.21(@types/node@22.19.15))
debug: 4.4.3
- svelte: 5.55.1
+ svelte: 4.2.19
vite: 5.4.21(@types/node@22.19.15)
transitivePeerDependencies:
- supports-color
- '@sveltejs/vite-plugin-svelte@3.1.2(svelte@5.55.1)(vite@5.4.21(@types/node@22.19.15))':
+ '@sveltejs/vite-plugin-svelte@3.1.2(svelte@4.2.19)(vite@5.4.21(@types/node@22.19.15))':
dependencies:
- '@sveltejs/vite-plugin-svelte-inspector': 2.1.0(@sveltejs/vite-plugin-svelte@3.1.2(svelte@5.55.1)(vite@5.4.21(@types/node@22.19.15)))(svelte@5.55.1)(vite@5.4.21(@types/node@22.19.15))
+ '@sveltejs/vite-plugin-svelte-inspector': 2.1.0(@sveltejs/vite-plugin-svelte@3.1.2(svelte@4.2.19)(vite@5.4.21(@types/node@22.19.15)))(svelte@4.2.19)(vite@5.4.21(@types/node@22.19.15))
debug: 4.4.3
deepmerge: 4.3.1
kleur: 4.1.5
magic-string: 0.30.21
- svelte: 5.55.1
- svelte-hmr: 0.16.0(svelte@5.55.1)
+ svelte: 4.2.19
+ svelte-hmr: 0.16.0(svelte@4.2.19)
vite: 5.4.21(@types/node@22.19.15)
vitefu: 0.2.5(vite@5.4.21(@types/node@22.19.15))
transitivePeerDependencies:
@@ -2315,9 +2316,8 @@ snapshots:
dependencies:
'@types/geojson': 7946.0.16
- '@types/trusted-types@2.0.7': {}
-
- '@typescript-eslint/types@8.58.0': {}
+ '@types/trusted-types@2.0.7':
+ optional: true
'@vercel/nft@1.5.0(rollup@4.59.0)':
dependencies:
@@ -2447,7 +2447,13 @@ snapshots:
strip-ansi: 6.0.1
wrap-ansi: 6.2.0
- clsx@2.1.1: {}
+ code-red@1.0.4:
+ dependencies:
+ '@jridgewell/sourcemap-codec': 1.5.5
+ '@types/estree': 1.0.8
+ acorn: 8.16.0
+ estree-walker: 3.0.3
+ periscopic: 3.1.0
color-convert@2.0.1:
dependencies:
@@ -2474,6 +2480,11 @@ snapshots:
cssesc: 3.0.0
fastparse: 1.1.2
+ css-tree@2.3.1:
+ dependencies:
+ mdn-data: 2.0.30
+ source-map-js: 1.2.1
+
cssesc@3.0.0: {}
culori@3.3.0: {}
@@ -2603,13 +2614,12 @@ snapshots:
event-emitter: 0.3.5
type: 2.7.3
- esrap@2.2.4:
- dependencies:
- '@jridgewell/sourcemap-codec': 1.5.5
- '@typescript-eslint/types': 8.58.0
-
estree-walker@2.0.2: {}
+ estree-walker@3.0.3:
+ dependencies:
+ '@types/estree': 1.0.8
+
event-emitter@0.3.5:
dependencies:
d: 1.0.2
@@ -2851,6 +2861,8 @@ snapshots:
marked@15.0.12: {}
+ mdn-data@2.0.30: {}
+
memoizee@0.4.17:
dependencies:
d: 1.0.2
@@ -2972,6 +2984,12 @@ snapshots:
ieee754: 1.2.1
resolve-protobuf-schema: 2.1.0
+ periscopic@3.1.0:
+ dependencies:
+ '@types/estree': 1.0.8
+ estree-walker: 3.0.3
+ is-reference: 3.0.3
+
picocolors@1.1.1: {}
picomatch@4.0.3: {}
@@ -3045,10 +3063,10 @@ snapshots:
potpack@2.1.0: {}
- prettier-plugin-svelte@3.5.1(prettier@3.8.1)(svelte@5.55.1):
+ prettier-plugin-svelte@3.5.1(prettier@3.8.1)(svelte@4.2.19):
dependencies:
prettier: 3.8.1
- svelte: 5.55.1
+ svelte: 4.2.19
prettier@3.8.1: {}
@@ -3203,14 +3221,14 @@ snapshots:
supports-preserve-symlinks-flag@1.0.0: {}
- svelte-check@3.8.6(postcss@8.5.8)(svelte@5.55.1):
+ svelte-check@3.8.6(postcss@8.5.8)(svelte@4.2.19):
dependencies:
'@jridgewell/trace-mapping': 0.3.31
chokidar: 3.6.0
picocolors: 1.1.1
sade: 1.8.1
- svelte: 5.55.1
- svelte-preprocess: 5.1.4(postcss@8.5.8)(svelte@5.55.1)(typescript@5.9.3)
+ svelte: 4.2.19
+ svelte-preprocess: 5.1.4(postcss@8.5.8)(svelte@4.2.19)(typescript@5.9.3)
typescript: 5.9.3
transitivePeerDependencies:
- '@babel/core'
@@ -3223,15 +3241,15 @@ snapshots:
- stylus
- sugarss
- svelte-dnd-action@0.9.69(svelte@5.55.1):
+ svelte-dnd-action@0.9.69(svelte@4.2.19):
dependencies:
- svelte: 5.55.1
+ svelte: 4.2.19
- svelte-hmr@0.16.0(svelte@5.55.1):
+ svelte-hmr@0.16.0(svelte@4.2.19):
dependencies:
- svelte: 5.55.1
+ svelte: 4.2.19
- svelte-i18n@4.0.1(svelte@5.55.1):
+ svelte-i18n@4.0.1(svelte@4.2.19):
dependencies:
cli-color: 2.0.4
deepmerge: 4.3.1
@@ -3239,10 +3257,10 @@ snapshots:
estree-walker: 2.0.2
intl-messageformat: 10.7.18
sade: 1.8.1
- svelte: 5.55.1
+ svelte: 4.2.19
tiny-glob: 0.2.9
- svelte-maplibre@0.9.14(svelte@5.55.1):
+ svelte-maplibre@0.9.14(svelte@4.2.19):
dependencies:
d3-geo: 3.1.1
dequal: 2.0.3
@@ -3250,38 +3268,36 @@ snapshots:
just-flush: 2.3.0
maplibre-gl: 4.7.1
pmtiles: 3.2.1
- svelte: 5.55.1
+ svelte: 4.2.19
- svelte-preprocess@5.1.4(postcss@8.5.8)(svelte@5.55.1)(typescript@5.9.3):
+ svelte-preprocess@5.1.4(postcss@8.5.8)(svelte@4.2.19)(typescript@5.9.3):
dependencies:
'@types/pug': 2.0.10
detect-indent: 6.1.0
magic-string: 0.30.21
sorcery: 0.11.1
strip-indent: 3.0.0
- svelte: 5.55.1
+ svelte: 4.2.19
optionalDependencies:
postcss: 8.5.8
typescript: 5.9.3
- svelte@5.55.1:
+ svelte@4.2.19:
dependencies:
- '@jridgewell/remapping': 2.3.5
+ '@ampproject/remapping': 2.3.0
'@jridgewell/sourcemap-codec': 1.5.5
- '@sveltejs/acorn-typescript': 1.0.9(acorn@8.16.0)
+ '@jridgewell/trace-mapping': 0.3.31
'@types/estree': 1.0.8
- '@types/trusted-types': 2.0.7
acorn: 8.16.0
aria-query: 5.3.1
axobject-query: 4.1.0
- clsx: 2.1.1
- devalue: 5.6.4
- esm-env: 1.2.2
- esrap: 2.2.4
+ code-red: 1.0.4
+ css-tree: 2.3.1
+ estree-walker: 3.0.3
is-reference: 3.0.3
locate-character: 3.0.0
magic-string: 0.30.21
- zimmerframe: 1.1.4
+ periscopic: 3.1.0
tailwindcss@3.4.19:
dependencies:
@@ -3457,5 +3473,3 @@ snapshots:
which-module: 2.0.1
y18n: 4.0.3
yargs-parser: 18.1.3
-
- zimmerframe@1.1.4: {}
diff --git a/frontend/pnpm-workspace.yaml b/frontend/pnpm-workspace.yaml
index d66e1150..c3322a78 100644
--- a/frontend/pnpm-workspace.yaml
+++ b/frontend/pnpm-workspace.yaml
@@ -5,5 +5,3 @@ overrides:
brace-expansion@>=4.0.0 <5.0.5: '>=5.0.5'
picomatch@<2.3.2: '>=2.3.2'
picomatch@>=4.0.0 <4.0.4: '>=4.0.4'
- svelte@<=5.51.4: '>=5.51.5'
- svelte@<=5.53.4: '>=5.53.5'
diff --git a/frontend/src/lib/components/collections/CollectionItineraryPlanner.svelte b/frontend/src/lib/components/collections/CollectionItineraryPlanner.svelte
index 04dfcd00..d96ea437 100644
--- a/frontend/src/lib/components/collections/CollectionItineraryPlanner.svelte
+++ b/frontend/src/lib/components/collections/CollectionItineraryPlanner.svelte
@@ -396,6 +396,79 @@
return value.includes('T') ? value.split('T')[0] : value;
}
+ function upsertItineraryItem(newItem: CollectionItineraryItem) {
+ if (!newItem) return;
+ const itinerary = collection.itinerary ? [...collection.itinerary] : [];
+ const idMatchIndex = itinerary.findIndex((it) => String(it.id) === String(newItem.id));
+ if (idMatchIndex >= 0) {
+ itinerary[idMatchIndex] = newItem;
+ collection = { ...collection, itinerary };
+ return;
+ }
+ const duplicate = itinerary.some(
+ (it) =>
+ String(it.object_id) === String(newItem.object_id) &&
+ String(it.date || '') === String(newItem.date || '') &&
+ Boolean(it.is_global) === Boolean(newItem.is_global)
+ );
+ if (!duplicate) {
+ collection = { ...collection, itinerary: [...itinerary, newItem] };
+ }
+ }
+
+ function handleQuickAddCreated(
+ objectType: 'location' | 'lodging',
+ event: CustomEvent<{
+ location: any;
+ itineraryItem?: CollectionItineraryItem | null;
+ itineraryDate?: string | null;
+ }>
+ ) {
+ const createdItem = event.detail?.location;
+ if (!createdItem) return;
+
+ if (objectType === 'location') {
+ const locs = collection.locations ? [...collection.locations] : [];
+ const idx = locs.findIndex((loc) => String(loc.id) === String(createdItem.id));
+ if (idx >= 0) {
+ locs[idx] = {
+ ...locs[idx],
+ ...createdItem,
+ visits: createdItem.visits || locs[idx].visits || []
+ };
+ } else {
+ locs.unshift({ ...createdItem });
+ }
+ collection = { ...collection, locations: locs };
+ } else {
+ const lodgings = collection.lodging ? [...collection.lodging] : [];
+ const idx = lodgings.findIndex((l) => String(l.id) === String(createdItem.id));
+ if (idx >= 0) {
+ lodgings[idx] = { ...lodgings[idx], ...createdItem };
+ } else {
+ lodgings.unshift({ ...createdItem });
+ }
+ collection = { ...collection, lodging: lodgings };
+ }
+
+ const itineraryItem = event.detail?.itineraryItem || null;
+ if (itineraryItem) {
+ upsertItineraryItem(itineraryItem);
+ addedToItinerary.add(String(createdItem.id));
+ addedToItinerary = addedToItinerary;
+ } else if (event.detail?.itineraryDate) {
+ void addItineraryItemForObject(
+ objectType,
+ String(createdItem.id),
+ String(event.detail.itineraryDate)
+ );
+ addedToItinerary.add(String(createdItem.id));
+ addedToItinerary = addedToItinerary;
+ }
+
+ pendingAddDate = null;
+ }
+
function upsertNote(note: Note) {
const notes = collection.notes ? [...collection.notes] : [];
const idx = notes.findIndex((n) => n.id === note.id);
@@ -543,11 +616,11 @@
$: if (
locationBeingUpdated?.id &&
pendingAddDate &&
- !addedToItinerary.has(locationBeingUpdated.id)
+ !addedToItinerary.has(String(locationBeingUpdated.id))
) {
addItineraryItemForObject('location', locationBeingUpdated.id, pendingAddDate);
// Mark this location as added to prevent duplicates
- addedToItinerary.add(locationBeingUpdated.id);
+ addedToItinerary.add(String(locationBeingUpdated.id));
addedToItinerary = addedToItinerary; // trigger reactivity
}
@@ -578,7 +651,7 @@
$: if (
lodgingBeingUpdated?.id &&
pendingAddDate &&
- !addedToItinerary.has(lodgingBeingUpdated.id)
+ !addedToItinerary.has(String(lodgingBeingUpdated.id))
) {
// Normalize check_in to date-only (YYYY-MM-DD) if present
const lodgingCheckInDate = lodgingBeingUpdated.check_in
@@ -588,7 +661,7 @@
addItineraryItemForObject('lodging', lodgingBeingUpdated.id, targetDate);
// Mark this lodging as added to prevent duplicates
- addedToItinerary.add(lodgingBeingUpdated.id);
+ addedToItinerary.add(String(lodgingBeingUpdated.id));
addedToItinerary = addedToItinerary; // trigger reactivity
}
@@ -619,11 +692,11 @@
$: if (
transportationBeingUpdated?.id &&
pendingAddDate &&
- !addedToItinerary.has(transportationBeingUpdated.id)
+ !addedToItinerary.has(String(transportationBeingUpdated.id))
) {
addItineraryItemForObject('transportation', transportationBeingUpdated.id, pendingAddDate);
// Mark this transportation as added to prevent duplicates
- addedToItinerary.add(transportationBeingUpdated.id);
+ addedToItinerary.add(String(transportationBeingUpdated.id));
addedToItinerary = addedToItinerary; // trigger reactivity
}
@@ -1301,6 +1374,15 @@
dateISO: string,
updateItemDate: boolean = false
) {
+ const alreadyScheduled = (collection.itinerary || []).some(
+ (it) =>
+ String(it.object_id) === String(objectId) &&
+ String(it.date || '') === String(dateISO) &&
+ !it.is_global
+ );
+ if (alreadyScheduled) {
+ return;
+ }
const tempId = `temp-${Date.now()}`;
const day = days.find((d) => d.date === dateISO);
const order = day ? day.items.length : 0;
@@ -1520,6 +1602,7 @@
addedToItinerary.clear();
addedToItinerary = addedToItinerary;
}}
+ on:quickAddCreated={(e) => handleQuickAddCreated('location', e)}
{user}
{locationToEdit}
bind:location={locationBeingUpdated}
@@ -1538,6 +1621,7 @@
addedToItinerary.clear();
addedToItinerary = addedToItinerary;
}}
+ on:quickAddCreated={(e) => handleQuickAddCreated('lodging', e)}
{user}
{lodgingToEdit}
bind:lodging={lodgingBeingUpdated}
diff --git a/frontend/src/lib/components/locations/LocationDetails.svelte b/frontend/src/lib/components/locations/LocationDetails.svelte
index bb1d5895..1f4c55f4 100755
--- a/frontend/src/lib/components/locations/LocationDetails.svelte
+++ b/frontend/src/lib/components/locations/LocationDetails.svelte
@@ -6,7 +6,8 @@
import MoneyInput from '../shared/MoneyInput.svelte';
import MarkdownEditor from '../MarkdownEditor.svelte';
import TagComplete from '../TagComplete.svelte';
- import { DEFAULT_CURRENCY, normalizeMoneyPayload, toMoneyValue } from '$lib/money';
+ import { DEFAULT_CURRENCY, toMoneyValue } from '$lib/money';
+ import { saveLocation } from '$lib/location-save';
import { addToast } from '$lib/toasts';
import type { Category, Collection, Location, MoneyValue, User } from '$lib/types';
import MapIcon from '~icons/mdi/map';
@@ -67,6 +68,14 @@
let isGeneratingDesc = false;
let ownerUser: User | null = null;
+ function toFiniteNumber(value: unknown): number | null {
+ if (value === null || value === undefined) {
+ return null;
+ }
+ const parsed = Number(value);
+ return Number.isFinite(parsed) ? parsed : null;
+ }
+
export let initialLocation: any = null;
export let currentUser: any = null;
export let editingLocation: any = null;
@@ -84,21 +93,25 @@
location.price_currency = defaultCurrency;
}
}
- $: initialSelection =
- initialLocation && initialLocation.latitude && initialLocation.longitude
- ? {
- name: initialLocation.name || '',
- lat: Number(initialLocation.latitude),
- lng: Number(initialLocation.longitude),
- location: initialLocation.location || ''
- }
- : null;
+ $: {
+ const lat = toFiniteNumber(initialLocation?.latitude);
+ const lng = toFiniteNumber(initialLocation?.longitude);
+ initialSelection =
+ initialLocation && lat !== null && lng !== null
+ ? {
+ name: initialLocation.name || '',
+ lat,
+ lng,
+ location: initialLocation.location || ''
+ }
+ : null;
+ }
function handleLocationUpdate(
event: CustomEvent<{ name?: string; lat: number; lng: number; location: string }>
) {
const { name, lat, lng, location: displayName } = event.detail;
- if (!location.name && name) location.name = name;
+ if (name) location.name = name;
location.latitude = lat;
location.longitude = lng;
location.location = displayName;
@@ -139,83 +152,31 @@
return;
}
- if (location.latitude !== null && typeof location.latitude === 'number') {
- location.latitude = parseFloat(location.latitude.toFixed(6));
- }
- if (location.longitude !== null && typeof location.longitude === 'number') {
- location.longitude = parseFloat(location.longitude.toFixed(6));
- }
- if (collection && collection.id) {
- location.collections = [collection.id];
- }
-
- let payload: any = { ...location };
-
- // Clean up link: empty/whitespace → null, invalid URL → null
- if (!payload.link || !payload.link.trim()) {
- payload.link = null;
- } else {
- try {
- new URL(payload.link);
- } catch {
- // Not a valid URL — clear it so Django doesn't reject it
- payload.link = null;
- }
- }
- if (!payload.description || !payload.description.trim()) {
- payload.description = null;
- }
-
- if (location.price === null) {
- payload.price = null;
- payload.price_currency = null;
- } else {
- payload = normalizeMoneyPayload(payload, 'price', 'price_currency', defaultCurrency);
- }
-
- let res: Response;
- if (locationToEdit && locationToEdit.id) {
- // Only include collections if explicitly set via a collection context;
- // otherwise remove them from the PATCH payload to avoid triggering the
- // m2m_changed signal which can override is_public.
- if (!collection || !collection.id) {
- delete payload.collections;
- }
-
- res = await fetch(`/api/locations/${locationToEdit.id}`, {
- method: 'PATCH',
- headers: {
- 'Content-Type': 'application/json'
- },
- body: JSON.stringify(payload)
+ try {
+ const savedLocation = await saveLocation({
+ location,
+ locationToEdit,
+ collectionId: collection?.id || null,
+ defaultCurrency
});
- } else {
- res = await fetch(`/api/locations`, {
- method: 'POST',
- headers: {
- 'Content-Type': 'application/json'
- },
- body: JSON.stringify(payload)
- });
- }
-
- if (!res.ok) {
- const errorData = await res.json().catch(() => ({}));
- // Extract error message from Django field errors (e.g. {"link": ["Enter a valid URL."]})
- let errorMsg = errorData?.detail || errorData?.name?.[0] || '';
- if (!errorMsg) {
- const fieldErrors = Object.entries(errorData)
- .filter(([_, v]) => Array.isArray(v))
- .map(([k, v]) => `${k}: ${(v as string[]).join(', ')}`)
- .join('; ');
- errorMsg = fieldErrors || 'Failed to save location';
- }
- addToast('error', String(errorMsg));
+ location = {
+ ...location,
+ ...savedLocation,
+ rating:
+ typeof savedLocation.rating === 'number' && !Number.isNaN(savedLocation.rating)
+ ? savedLocation.rating
+ : location.rating,
+ link: savedLocation.link || location.link || '',
+ description: savedLocation.description || location.description || '',
+ location: savedLocation.location || location.location || '',
+ tags: savedLocation.tags || location.tags || [],
+ collections: savedLocation.collections || location.collections || []
+ };
+ } catch (error) {
+ addToast('error', error instanceof Error ? error.message : 'Failed to save location');
return;
}
- location = await res.json();
-
dispatch('save', {
...location
});
@@ -226,9 +187,11 @@
}
onMount(() => {
- if (initialLocation && initialLocation.latitude && initialLocation.longitude) {
- location.latitude = initialLocation.latitude;
- location.longitude = initialLocation.longitude;
+ const lat = toFiniteNumber(initialLocation?.latitude);
+ const lng = toFiniteNumber(initialLocation?.longitude);
+ if (initialLocation && lat !== null && lng !== null) {
+ location.latitude = lat;
+ location.longitude = lng;
if (!location.name) location.name = initialLocation.name || '';
if (initialLocation.location) location.location = initialLocation.location;
}
diff --git a/frontend/src/lib/components/locations/LocationModal.svelte b/frontend/src/lib/components/locations/LocationModal.svelte
index 4694bc97..3ef81775 100644
--- a/frontend/src/lib/components/locations/LocationModal.svelte
+++ b/frontend/src/lib/components/locations/LocationModal.svelte
@@ -12,6 +12,7 @@
export let collection: Collection | null = null;
export let initialLatLng: { lat: number; lng: number } | null = null; // Used to pass the location from the map selection to the modal
export let initialVisitDate: string | null = null; // Used to pre-fill visit date when adding from itinerary planner
+ export let itineraryDayLabel: string | null = null;
const dispatch = createEventDispatcher();
@@ -19,6 +20,10 @@
let storedInitialVisitDate: string | null = initialVisitDate;
let modal: HTMLDialogElement;
+ let googleMapsEnabled = false;
+ let isEditMode = false;
+ let pendingGooglePhotoUrls: string[] = [];
+ let importingGooglePhotos = false;
// Whether a save/create occurred during this modal session
let didSave = false;
@@ -46,6 +51,105 @@
}
];
+ function setStep(stepIndex: number) {
+ steps = steps.map((step, index) => ({
+ ...step,
+ selected: index === stepIndex
+ }));
+ }
+
+ function handleStepSelect(stepIndex: number) {
+ if (stepIndex === 0 && isEditMode) {
+ return;
+ }
+ if (steps[stepIndex]?.requires_id && !location.id) {
+ return;
+ }
+ setStep(stepIndex);
+ }
+
+ function handleDetailsBack() {
+ if (isEditMode) {
+ close();
+ return;
+ }
+ setStep(0);
+ }
+
+ function applyQuickStartPrefill(prefill: any) {
+ if (!prefill) return;
+
+ if (prefill.name) location.name = prefill.name;
+ if (prefill.location) location.location = prefill.location;
+ if (typeof prefill.latitude === 'number') location.latitude = prefill.latitude;
+ if (typeof prefill.longitude === 'number') location.longitude = prefill.longitude;
+ if (typeof prefill.rating === 'number') location.rating = prefill.rating;
+ if (!location.link && (prefill.website || prefill.google_maps_url)) {
+ location.link = prefill.website || prefill.google_maps_url;
+ }
+ if (!location.description && prefill.description) {
+ location.description = prefill.description;
+ }
+ if ((!location.tags || location.tags.length === 0) && Array.isArray(prefill.types)) {
+ location.tags = prefill.types.slice(0, 8);
+ }
+ if (prefill.selected_category && typeof prefill.selected_category === 'object') {
+ location.category = prefill.selected_category;
+ }
+ pendingGooglePhotoUrls = Array.isArray(prefill.photos)
+ ? prefill.photos.filter((url: unknown) => typeof url === 'string' && url.trim()).slice(0, 5)
+ : [];
+ }
+
+ async function importPendingGoogleImages(locationId: string) {
+ if (!locationId || pendingGooglePhotoUrls.length === 0) return;
+ importingGooglePhotos = true;
+
+ try {
+ const res = await fetch('/api/images/import_from_urls/', {
+ method: 'POST',
+ headers: {
+ 'Content-Type': 'application/json'
+ },
+ body: JSON.stringify({
+ content_type: 'location',
+ object_id: locationId,
+ urls: pendingGooglePhotoUrls
+ })
+ });
+
+ if (!res.ok) {
+ addToast('warning', 'Location saved, but Google photos could not be imported');
+ return;
+ }
+
+ const data = await res.json();
+ if (Array.isArray(data.created) && data.created.length > 0) {
+ const existingImages = Array.isArray(location.images) ? location.images : [];
+ const existingIds = new Set(existingImages.map((img: any) => img.id));
+ const imported = data.created.filter((img: any) => !existingIds.has(img.id));
+ location.images = [...existingImages, ...imported];
+ }
+
+ pendingGooglePhotoUrls = [];
+ } catch {
+ addToast('warning', 'Location saved, but Google photos import failed');
+ } finally {
+ importingGooglePhotos = false;
+ }
+ }
+
+ async function loadIntegrations() {
+ try {
+ const res = await fetch('/api/integrations/');
+ if (!res.ok) return;
+ const integrations = await res.json();
+ googleMapsEnabled = Boolean(integrations?.google_maps);
+ } catch {
+ googleMapsEnabled = false;
+ }
+ }
+
export let location: Location = {
id: '',
name: '',
@@ -81,17 +185,17 @@
link: locationToEdit?.link || null,
description: locationToEdit?.description || null,
tags: locationToEdit?.tags || [],
- rating: locationToEdit?.rating || NaN,
+ rating: locationToEdit?.rating ?? NaN,
price: locationToEdit?.price ?? null,
price_currency: locationToEdit?.price_currency ?? null,
- is_public: locationToEdit?.is_public || false,
- latitude: locationToEdit?.latitude || NaN,
- longitude: locationToEdit?.longitude || NaN,
+ is_public: locationToEdit?.is_public ?? false,
+ latitude: locationToEdit?.latitude ?? NaN,
+ longitude: locationToEdit?.longitude ?? NaN,
location: locationToEdit?.location || null,
images: locationToEdit?.images || [],
user: locationToEdit?.user || null,
visits: locationToEdit?.visits || [],
- is_visited: locationToEdit?.is_visited || false,
+ is_visited: locationToEdit?.is_visited ?? false,
collections: locationToEdit?.collections || [],
category: locationToEdit?.category || {
id: '',
@@ -104,23 +208,25 @@
attachments: locationToEdit?.attachments || []
};
- onMount(async () => {
+ onMount(() => {
modal = document.getElementById('my_modal_1') as HTMLDialogElement;
modal.showModal();
+ isEditMode = Boolean(locationToEdit?.id);
+
// Skip the quick start step if editing an existing location
- if (!locationToEdit) {
- steps[0].selected = true;
- steps[1].selected = false;
+ if (!isEditMode) {
+ setStep(0);
} else {
- steps[0].selected = false;
- steps[1].selected = true;
+ setStep(1);
}
+
if (initialLatLng) {
location.latitude = initialLatLng.lat;
location.longitude = initialLatLng.lng;
- steps[1].selected = true;
- steps[0].selected = false;
+ setStep(1);
}
+
+ void loadIntegrations();
});
function close() {
@@ -206,7 +312,7 @@
>
-
- {#if selectedMarker}
-
- {/if}
- {$t('adventures.location_selected')}
-
+ {mode === 'lodging' ? 'Lodging added' : 'Location added'}
+
+
+
+ {#if selectedMarker}
+
+ {/if}
+
+ {/if}
+
+ {mode === 'lodging'
+ ? $t('lodging.new_lodging') || 'Lodging selected'
+ : $t('adventures.location_selected')}
+
+