diff --git a/backend/server/adventures/views/generate_description_view.py b/backend/server/adventures/views/generate_description_view.py index c4f16eff..b54e3a03 100644 --- a/backend/server/adventures/views/generate_description_view.py +++ b/backend/server/adventures/views/generate_description_view.py @@ -1,21 +1,31 @@ +import logging +import re +import urllib.parse +from difflib import SequenceMatcher + +import requests +from django.conf import settings from rest_framework import viewsets from rest_framework.decorators import action from rest_framework.permissions import IsAuthenticated from rest_framework.response import Response -import requests -from django.conf import settings -import urllib.parse -import logging logger = logging.getLogger(__name__) class GenerateDescription(viewsets.ViewSet): permission_classes = [IsAuthenticated] - - # User-Agent header required by Wikipedia API - HEADERS = { + + # User-Agent header required by Wikipedia API, Accept-Language patched in per request + BASE_HEADERS = { 'User-Agent': f'AdventureLog/{getattr(settings, "ADVENTURELOG_RELEASE_VERSION", "unknown")}' } + DEFAULT_LANGUAGE = "en" + LANGUAGE_PATTERN = re.compile(r"^[a-z0-9-]{2,12}$", re.IGNORECASE) + MAX_CANDIDATES = 10 # Increased to find better matches + + # Accepted image formats (no SVG) + ACCEPTED_IMAGE_FORMATS = {'.jpg', '.jpeg', '.png', '.webp', '.gif'} + MIN_DESCRIPTION_LENGTH = 50 # Minimum characters for a valid description @action(detail=False, methods=['get']) def desc(self, request): @@ -23,42 +33,48 @@ class GenerateDescription(viewsets.ViewSet): if not name: return Response({"error": "Name parameter is required"}, status=400) - # Properly URL decode the name - name = urllib.parse.unquote(name) - search_term = self.get_search_term(name) - - if not search_term: - return Response({"error": "No matching Wikipedia article found"}, status=404) - - # Properly URL encode the search term for the API - encoded_term = urllib.parse.quote(search_term) - url = f'https://en.wikipedia.org/w/api.php?origin=*&action=query&prop=extracts&exintro&explaintext&format=json&titles={encoded_term}' - + name = urllib.parse.unquote(name).strip() + if not name: + return Response({"error": "Name parameter is required"}, status=400) + + lang = self.get_language(request) + try: - response = requests.get(url, headers=self.HEADERS, timeout=10) - response.raise_for_status() - data = response.json() + candidates = self.get_candidate_pages(name, lang) - pages = data.get("query", {}).get("pages", {}) - if not pages: - return Response({"error": "No page data found"}, status=404) - - page_id = next(iter(pages)) - page_data = pages[page_id] - - # Check if page exists (page_id of -1 means page doesn't exist) - if page_id == "-1": - return Response({"error": "Wikipedia page not found"}, status=404) - - if not page_data.get('extract'): - return Response({"error": "No description found"}, status=404) - - return Response(page_data) - - except requests.exceptions.RequestException as e: + for candidate in candidates: + page_data = self.fetch_page( + lang=lang, + candidate=candidate, + props='extracts|categories', + extra_params={'exintro': 1, 'explaintext': 1} + ) + if not page_data or page_data.get('missing'): + continue + + # Check if this is a disambiguation page + if self.is_disambiguation_page(page_data): + continue + + extract = (page_data.get('extract') or '').strip() + + # Filter out pages with very short descriptions + if len(extract) < self.MIN_DESCRIPTION_LENGTH: + continue + + # Filter out list/index pages + if self.is_list_or_index_page(page_data): + continue + + page_data['lang'] = lang + return Response(page_data) + + return Response({"error": "No description found"}, status=404) + + except requests.exceptions.RequestException: logger.exception("Failed to fetch data from Wikipedia") return Response({"error": "Failed to fetch data from Wikipedia."}, status=500) - except ValueError as e: # JSON decode error + except ValueError: return Response({"error": "Invalid response from Wikipedia API"}, status=500) @action(detail=False, methods=['get']) @@ -67,73 +83,270 @@ class GenerateDescription(viewsets.ViewSet): if not name: return Response({"error": "Name parameter is required"}, status=400) - # Properly URL decode the name - name = urllib.parse.unquote(name) - search_term = self.get_search_term(name) - - if not search_term: - return Response({"error": "No matching Wikipedia article found"}, status=404) - - # Properly URL encode the search term for the API - encoded_term = urllib.parse.quote(search_term) - url = f'https://en.wikipedia.org/w/api.php?origin=*&action=query&prop=pageimages&format=json&piprop=original&titles={encoded_term}' - + name = urllib.parse.unquote(name).strip() + if not name: + return Response({"error": "Name parameter is required"}, status=400) + + lang = self.get_language(request) + try: - response = requests.get(url, headers=self.HEADERS, timeout=10) - response.raise_for_status() - data = response.json() + candidates = self.get_candidate_pages(name, lang) - pages = data.get("query", {}).get("pages", {}) - if not pages: - return Response({"error": "No page data found"}, status=404) - - page_id = next(iter(pages)) - page_data = pages[page_id] - - # Check if page exists - if page_id == "-1": - return Response({"error": "Wikipedia page not found"}, status=404) - - original_image = page_data.get('original') - if not original_image: - return Response({"error": "No image found"}, status=404) - - return Response(original_image) - - except requests.exceptions.RequestException as e: + for candidate in candidates: + page_data = self.fetch_page( + lang=lang, + candidate=candidate, + props='pageimages|categories', + extra_params={'piprop': 'original|thumbnail', 'pithumbsize': 640} + ) + if not page_data or page_data.get('missing'): + continue + + # Skip disambiguation pages + if self.is_disambiguation_page(page_data): + continue + + # Skip list/index pages + if self.is_list_or_index_page(page_data): + continue + + # Try original image first + original_image = page_data.get('original') + if original_image and self.is_valid_image(original_image.get('source')): + return Response(original_image) + + # Fall back to thumbnail + thumbnail_image = page_data.get('thumbnail') + if thumbnail_image and self.is_valid_image(thumbnail_image.get('source')): + return Response(thumbnail_image) + + return Response({"error": "No image found"}, status=404) + + except requests.exceptions.RequestException: logger.exception("Failed to fetch data from Wikipedia") return Response({"error": "Failed to fetch data from Wikipedia."}, status=500) - except ValueError as e: # JSON decode error + except ValueError: return Response({"error": "Invalid response from Wikipedia API"}, status=500) - - def get_search_term(self, term): + + def is_valid_image(self, image_url): + """Check if image URL is valid and not an SVG""" + if not image_url: + return False + + url_lower = image_url.lower() + + # Reject SVG images + if '.svg' in url_lower: + return False + + # Accept only specific image formats + return any(url_lower.endswith(fmt) or fmt in url_lower for fmt in self.ACCEPTED_IMAGE_FORMATS) + + def is_disambiguation_page(self, page_data): + """Check if page is a disambiguation page""" + categories = page_data.get('categories', []) + for cat in categories: + cat_title = cat.get('title', '').lower() + if 'disambiguation' in cat_title or 'disambig' in cat_title: + return True + + # Check title for disambiguation indicators + title = page_data.get('title', '').lower() + if '(disambiguation)' in title: + return True + + return False + + def is_list_or_index_page(self, page_data): + """Check if page is a list or index page""" + title = page_data.get('title', '').lower() + + # Common patterns for list/index pages + list_patterns = [ + 'list of', + 'index of', + 'timeline of', + 'glossary of', + 'outline of' + ] + + return any(pattern in title for pattern in list_patterns) + + def get_candidate_pages(self, term, lang): + """Get and rank candidate pages from Wikipedia search""" if not term: - return None - - # Properly URL encode the search term - encoded_term = urllib.parse.quote(term) - url = f'https://en.wikipedia.org/w/api.php?action=opensearch&search={encoded_term}&limit=10&namespace=0&format=json' - + return [] + + url = self.build_api_url(lang) + params = { + 'origin': '*', + 'action': 'query', + 'format': 'json', + 'list': 'search', + 'srsearch': term, + 'srlimit': self.MAX_CANDIDATES, + 'srwhat': 'text', + 'utf8': 1, + } + + response = requests.get(url, headers=self.get_headers(lang), params=params, timeout=10) + response.raise_for_status() + try: - response = requests.get(url, headers=self.HEADERS, timeout=10) - response.raise_for_status() - - # Check if response is empty - if not response.text.strip(): - return None - data = response.json() + except ValueError: + logger.warning("Invalid response while searching Wikipedia for '%s'", term) + return [{'title': term, 'pageid': None}] + + search_results = data.get('query', {}).get('search', []) + if not search_results: + return [{'title': term, 'pageid': None}] + + normalized = term.lower() + ranked_results = [] + + for result in search_results: + title = (result.get('title') or '').strip() + if not title: + continue - # OpenSearch API returns an array with 4 elements: - # [search_term, [titles], [descriptions], [urls]] - if len(data) >= 2 and data[1] and len(data[1]) > 0: - return data[1][0] # Return the first title match + title_lower = title.lower() + # Calculate multiple similarity metrics + similarity = SequenceMatcher(None, normalized, title_lower).ratio() + + # Boost score for exact matches + exact_match = int(title_lower == normalized) + + # Boost score for titles that start with the search term + starts_with = int(title_lower.startswith(normalized)) + + # Penalize disambiguation pages + is_disambig = int('disambiguation' in title_lower or '(disambig' in title_lower) + + # Penalize list/index pages + is_list = int(any(p in title_lower for p in ['list of', 'index of', 'timeline of'])) + + score = result.get('score') or 0 + + ranked_results.append({ + 'title': title, + 'pageid': result.get('pageid'), + 'exact': exact_match, + 'starts_with': starts_with, + 'similarity': similarity, + 'score': score, + 'is_disambig': is_disambig, + 'is_list': is_list + }) + + if not ranked_results: + return [{'title': term, 'pageid': None}] + + # Sort by: exact match > starts with > not disambiguation > not list > similarity > search score + ranked_results.sort( + key=lambda e: ( + e['exact'], + e['starts_with'], + -e['is_disambig'], + -e['is_list'], + e['similarity'], + e['score'] + ), + reverse=True + ) + + candidates = [] + seen_titles = set() + + for entry in ranked_results: + title_key = entry['title'].lower() + if title_key in seen_titles: + continue + seen_titles.add(title_key) + candidates.append({'title': entry['title'], 'pageid': entry['pageid']}) + if len(candidates) >= self.MAX_CANDIDATES: + break + + # Add original term as fallback if not already included + if normalized not in seen_titles: + candidates.append({'title': term, 'pageid': None}) + + return candidates + + def fetch_page(self, *, lang, candidate, props, extra_params=None): + """Fetch page data from Wikipedia API""" + if not candidate or not candidate.get('title'): return None - - except requests.exceptions.RequestException: - # If search fails, return the original term as fallback - return term - except ValueError: # JSON decode error - # If JSON parsing fails, return the original term as fallback - return term \ No newline at end of file + + params = { + 'origin': '*', + 'action': 'query', + 'format': 'json', + 'prop': props, + } + + page_id = candidate.get('pageid') + if page_id: + params['pageids'] = page_id + else: + params['titles'] = candidate['title'] + + if extra_params: + params.update(extra_params) + + response = requests.get( + self.build_api_url(lang), + headers=self.get_headers(lang), + params=params, + timeout=10 + ) + response.raise_for_status() + + try: + data = response.json() + except ValueError: + logger.warning("Invalid response while fetching Wikipedia page '%s'", candidate['title']) + return None + + pages = data.get('query', {}).get('pages', {}) + if not pages: + return None + + if page_id is not None: + page_data = pages.get(str(page_id)) + if page_data: + page_data.setdefault('title', candidate['title']) + return page_data + + page_data = next(iter(pages.values())) + if page_data: + page_data.setdefault('title', candidate['title']) + return page_data + + def get_language(self, request): + """Extract and validate language parameter""" + candidate = request.query_params.get('lang') + if not candidate: + candidate = self.DEFAULT_LANGUAGE + + if not candidate: + candidate = 'en' + + normalized = candidate.replace('_', '-').lower() + if self.LANGUAGE_PATTERN.match(normalized): + return normalized + + return 'en' + + def get_headers(self, lang): + """Build headers for Wikipedia API request""" + headers = dict(self.BASE_HEADERS) + headers['Accept-Language'] = lang + headers['Accept'] = 'application/json' + return headers + + def build_api_url(self, lang): + """Build Wikipedia API URL for given language""" + subdomain = lang.split('-', 1)[0] + return f'https://{subdomain}.wikipedia.org/w/api.php' \ No newline at end of file diff --git a/backend/server/worldtravel/views.py b/backend/server/worldtravel/views.py index 53012d02..239ddd3b 100644 --- a/backend/server/worldtravel/views.py +++ b/backend/server/worldtravel/views.py @@ -14,6 +14,26 @@ from adventures.models import Location # Cache TTL CACHE_TTL = 60 * 60 * 24 # 1 day + +def invalidate_visit_caches_for_region_and_user(region, user): + """Invalidate cached visit lists for a given region and user. + + Removes both the per-region and per-country per-user cache keys so + UI calls will refetch updated visited lists. + """ + try: + if region is None or user is None: + return + # per-region cache + cache.delete(f"visits_by_region_{region.id}_{user.id}") + # per-country cache (region -> country -> country_code) + country_code = getattr(region.country, 'country_code', None) + if country_code: + cache.delete(f"visits_by_country_{country_code}_{user.id}") + except Exception: + # Avoid raising cache-related exceptions; best-effort invalidation + pass + @cache_page(CACHE_TTL) @api_view(['GET']) @permission_classes([IsAuthenticated]) @@ -138,13 +158,22 @@ class VisitedRegionViewSet(viewsets.ModelViewSet): serializer.is_valid(raise_exception=True) self.perform_create(serializer) headers = self.get_success_headers(serializer.data) + # Invalidate caches for this region and its country for the user + try: + region = serializer.validated_data.get('region') + invalidate_visit_caches_for_region_and_user(region, request.user) + except Exception: + pass return Response(serializer.data, status=status.HTTP_201_CREATED, headers=headers) def destroy(self, request, **kwargs): region = get_object_or_404(Region, id=kwargs['pk']) visited_region = VisitedRegion.objects.filter(user=request.user.id, region=region) if visited_region.exists(): + # capture region before deleting so we can invalidate caches + affected_region = visited_region.first().region visited_region.delete() + invalidate_visit_caches_for_region_and_user(affected_region, request.user) return Response(status=status.HTTP_204_NO_CONTENT) else: return Response({"error": "Visited region not found."}, status=status.HTTP_404_NOT_FOUND) @@ -164,9 +193,14 @@ class VisitedCityViewSet(viewsets.ModelViewSet): serializer = self.get_serializer(data=request.data) serializer.is_valid(raise_exception=True) self.perform_create(serializer) + # Ensure a VisitedRegion exists for the city and invalidate caches region = serializer.validated_data['city'].region if not VisitedRegion.objects.filter(user=request.user.id, region=region).exists(): VisitedRegion.objects.create(user=request.user, region=region) + try: + invalidate_visit_caches_for_region_and_user(region, request.user) + except Exception: + pass headers = self.get_success_headers(serializer.data) return Response(serializer.data, status=status.HTTP_201_CREATED, headers=headers) @@ -174,7 +208,9 @@ class VisitedCityViewSet(viewsets.ModelViewSet): city = get_object_or_404(City, id=kwargs['pk']) visited_city = VisitedCity.objects.filter(user=request.user.id, city=city) if visited_city.exists(): + region = city.region visited_city.delete() + invalidate_visit_caches_for_region_and_user(region, request.user) return Response(status=status.HTTP_204_NO_CONTENT) else: return Response({"error": "Visited city not found."}, status=status.HTTP_404_NOT_FOUND) diff --git a/documentation/docs/install/unraid.md b/documentation/docs/install/unraid.md index c8726f6d..9697f786 100644 --- a/documentation/docs/install/unraid.md +++ b/documentation/docs/install/unraid.md @@ -20,52 +20,58 @@ docker network create example - Network type should be set to your **custom network**. - There is **no** AdventureLog---Database app, to find the database application search for `PostGIS` on the Unraid App Store then add and fill out the fields as shown below - Change the repository version to `postgis/postgis:15-3.3` -- Ensure that the variables ```POSTGRES_DB```, ```POSTGRES_USER```, and ```POSTGRES_PASSWORD``` are set in the ```PostGIS``` container. If not, then add them as custom variables. The template should have ```POSTGRES_PASSWORD``` already and you will simply have to add ```POSTGRES_DB``` and ```POSTGRES_USER```. -- The forwarded port of ```5012``` is not needed unless you plan to access the database outside of the container's network. +- Ensure that the variables `POSTGRES_DB`, `POSTGRES_USER`, and `POSTGRES_PASSWORD` are set in the `PostGIS` container. If not, then add them as custom variables. The template should have `POSTGRES_PASSWORD` already and you will simply have to add `POSTGRES_DB` and `POSTGRES_USER`. +- The forwarded port of `5012` is not needed unless you plan to access the database outside of the container's network. -| Name | Required | Description | Default Value | -| ------------------- | -------- | -------------------------------------------------------------------------------- | --------------- | -| `POSTGRES_DB` | Yes | The name of the database in PostGIS. | `N/A` | -| `POSTGRES_USER` | Yes | Name of the user generated on first start that will have access to the database. | `N/A` | -| `POSTGRES_PASSWORD` | Yes | Password of the user that will be generated on first start. | `N/A` | +| Name | Required | Description | Default Value | +| ------------------- | -------- | -------------------------------------------------------------------------------- | ------------- | +| `POSTGRES_DB` | Yes | The name of the database in PostGIS. | `N/A` | +| `POSTGRES_USER` | Yes | Name of the user generated on first start that will have access to the database. | `N/A` | +| `POSTGRES_PASSWORD` | Yes | Password of the user that will be generated on first start. | `N/A` | - Here's some visual instructions of how to configure the database template, click the image to open larger version in new tab.\ -[](/unraid-config-2.png) + [](/unraid-config-2.png) ## Backend - Network type should be set to your **custom network**. - **Note:** If you're running the server in a docker network that is other than "host" (for example "bridge"), then you need to add the IP of the host machine in the CSRF Trusted Origins variable instead of using localhost. This is only necessary when accessing locally, otherwise you will use the domain name. -| Name | Required | Description | Default Value | -| ----------------------- | -------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------- | -| `API Port` | Yes | This is the port of the backend. This is a port, not a variable. | `8016` | -| `PGHOST` | Yes | This is how the backend will access the database. Use the database container's name. | `N/A` | -| `PGDATABASE` | Yes | Name of the database in PostGIS to access. | `N/A` | -| `PGUSER` | Yes | Name of the user to access with. This is the same as the variable in the database. | `N/A` | -| `PGPASSWORD` | Yes | Password of the user it's accessing with. This is the same as the variable in the database. | `N/A` | -| `SECRET_KEY` | Yes | Secret Backend Key. Change to anything. | `N/A` | -| `DJANGO_ADMIN_USERNAME` | Yes | Default username for admin access. | `admin` | -| `DJANGO_ADMIN_EMAIL` | Yes | Default admin user's email. **Note:** You cannot make more than one user with each email. | `N/A` | -| `DJANGO_ADMIN_PASSWORD` | Yes | Default password for admin access. Change after initial login. | `N/A` | -| `PUBLIC_URL` | Yes | This needs to match how you will connect to the backend, so either local ip with matching port or domain. It is used for the creation of image URLs. | `http://IP_ADDRESS:8016` | -| `FRONTEND_URL` | Yes | This needs to match how you will connect to the frontend, so either local ip with matching port or domain. This link should be available for all users. Used for email generation. | `http://IP_ADDRESS:8015` | -| `CSRF_TRUSTED_ORIGINS` | Yes | This needs to be changed to the URLs of how you connect to your backend server and frontend. These values are comma-separated and usually the same as the 2 above values. | `http://IP_ADDRESS:8016,http://IP_ADDRESS:8015` | +| Name | Required | Description | Default Value | +| ----------------------- | -------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------- | +| `API Port` | Yes | This is the port of the backend. This is a port, not a variable. | `8016` | +| `PGHOST` | Yes | This is how the backend will access the database. Use the database container's name. | `N/A` | +| `PGDATABASE` | Yes | Name of the database in PostGIS to access. | `N/A` | +| `PGUSER` | Yes | Name of the user to access with. This is the same as the variable in the database. | `N/A` | +| `PGPASSWORD` | Yes | Password of the user it's accessing with. This is the same as the variable in the database. | `N/A` | +| `SECRET_KEY` | Yes | Secret Backend Key. Change to anything. | `N/A` | +| `DJANGO_ADMIN_USERNAME` | Yes | Default username for admin access. | `admin` | +| `DJANGO_ADMIN_EMAIL` | Yes | Default admin user's email. **Note:** You cannot make more than one user with each email. | `N/A` | +| `DJANGO_ADMIN_PASSWORD` | Yes | Default password for admin access. Change after initial login. | `N/A` | +| `PUBLIC_URL` | Yes | This needs to match how you will connect to the backend, so either local ip with matching port or domain. It is used for the creation of image URLs. | `http://IP_ADDRESS:8016` | +| `FRONTEND_URL` | Yes | This needs to match how you will connect to the frontend, so either local ip with matching port or domain. This link should be available for all users. Used for email generation. | `http://IP_ADDRESS:8015` | +| `CSRF_TRUSTED_ORIGINS` | Yes | This needs to be changed to the URLs of how you connect to your backend server and frontend. These values are comma-separated and usually the same as the 2 above values. | `http://IP_ADDRESS:8016,http://IP_ADDRESS:8015` | - Here's some visual instructions of how to configure the backend template, click the image to open larger version in new tab.\ -[](/unraid-config-1.png) + [](/unraid-config-1.png) ## Frontend - Network type should be set to your **custom network**. -- **Note:** The default value for ```PUBLIC_SERVER_URL``` is ```http://IP_ADDRESS:8000```, however ```IP_ADDRESS``` **should be changed** to the name of the backend container for simplicity. +- **Note:** The default value for `PUBLIC_SERVER_URL` is `http://IP_ADDRESS:8000`, however `IP_ADDRESS` **should be changed** to the name of the backend container for simplicity. -| Name | Required | Description | Default Value | -| ------------------- | -------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------ | -| `WEB UI Port` | Yes | The port of the frontend. This is not a variable. | `8015` | -| `PUBLIC_SERVER_URL` | Yes | What the frontend SSR server uses to connect to the backend. Change `IP_ADDRESS` to the name of the backend container. | `http://IP_ADDRESS:8000` | -| `ORIGIN` | Sometimes| Set to the URL you will access the frontend from, such as localhost with correct port, or set it to the domain of what you will access the app from. | `http://IP_ADDRESS:8015` | -| `BODY_SIZE_LIMIT` | Yes | Used to set the maximum upload size to the server. Should be changed to prevent someone from uploading too much! Custom values must be set in **bytes**. | `Infinity` | +| Name | Required | Description | Default Value | +| ------------------- | --------- | -------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------ | +| `WEB UI Port` | Yes | The port of the frontend. This is not a variable. | `8015` | +| `PUBLIC_SERVER_URL` | Yes | What the frontend SSR server uses to connect to the backend. Change `IP_ADDRESS` to the name of the backend container. | `http://IP_ADDRESS:8000` | +| `ORIGIN` | Sometimes | Set to the URL you will access the frontend from, such as localhost with correct port, or set it to the domain of what you will access the app from. | `http://IP_ADDRESS:8015` | +| `BODY_SIZE_LIMIT` | Yes | Used to set the maximum upload size to the server. Should be changed to prevent someone from uploading too much! Custom values must be set in **bytes**. | `Infinity` | - Here's some visual instructions of how to configure the frontend template, click the image to open larger version in new tab.\ -[](/unraid-config-3.png) + [](/unraid-config-3.png) + +## Additional Resources + +Youtuber AlienTech42 has created a helpful video walking through the installation of AdventureLog on Unraid: + + diff --git a/frontend/src/app.html b/frontend/src/app.html index abea469a..0b2e937c 100644 --- a/frontend/src/app.html +++ b/frontend/src/app.html @@ -5,6 +5,24 @@ + + + + + + + + + + %sveltekit.head%
diff --git a/frontend/src/lib/assets/apple-touch-icon-120.png b/frontend/src/lib/assets/apple-touch-icon-120.png new file mode 100644 index 00000000..a5e9aeda Binary files /dev/null and b/frontend/src/lib/assets/apple-touch-icon-120.png differ diff --git a/frontend/src/lib/assets/apple-touch-icon-152.png b/frontend/src/lib/assets/apple-touch-icon-152.png new file mode 100644 index 00000000..a9427ec2 Binary files /dev/null and b/frontend/src/lib/assets/apple-touch-icon-152.png differ diff --git a/frontend/src/lib/assets/apple-touch-icon.png b/frontend/src/lib/assets/apple-touch-icon.png new file mode 100644 index 00000000..1ae7248e Binary files /dev/null and b/frontend/src/lib/assets/apple-touch-icon.png differ diff --git a/frontend/src/lib/components/ClusterMap.svelte b/frontend/src/lib/components/ClusterMap.svelte new file mode 100644 index 00000000..da222241 --- /dev/null +++ b/frontend/src/lib/components/ClusterMap.svelte @@ -0,0 +1,171 @@ + + +