mirror of
https://github.com/Marketscrape/marketscrape-web.git
synced 2026-02-20 07:34:03 -05:00
Setup database to store report metrics in search history
This commit is contained in:
BIN
db.sqlite3
BIN
db.sqlite3
Binary file not shown.
@@ -22,17 +22,19 @@
|
||||
<table class="table table-striped mx-auto">
|
||||
<thead>
|
||||
<tr>
|
||||
<th></th>
|
||||
<th>Item</th>
|
||||
<th>Score</th>
|
||||
<th>Link</th>
|
||||
<th>Rating</th>
|
||||
<th>Report</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
{% for result in results %}
|
||||
{% for item in latest_items %}
|
||||
<tr>
|
||||
<td>{{ result.item }}</td>
|
||||
<td>{{ result.price }}</td>
|
||||
<td><a href="{{ result.link }}" target="_blank">{{ result.link }}</a></td>
|
||||
<td style="vertical-align: middle;"><img src="{{ item.image }}" style="width:30%;" class="figure-img img-fluid rounded" style="object-fit: fill;"></td>
|
||||
<td style="vertical-align: middle;"><a href="{{ item.url }}" target="_blank">{{ item.title }}</a></td>
|
||||
<td style="vertical-align: middle;">{{ item.rating }}/5.0</td>
|
||||
<td style="vertical-align: middle;"></td>
|
||||
</tr>
|
||||
{% empty %}
|
||||
<tr>
|
||||
|
||||
@@ -5,58 +5,67 @@ from .utils import *
|
||||
from .scraper_class import FacebookScraper
|
||||
import re
|
||||
import statistics
|
||||
from .models import Item
|
||||
|
||||
class Index(View):
|
||||
def get(self, request):
|
||||
form = MarketForm()
|
||||
return render(request, 'scraper/index.html', {'form': form})
|
||||
latest_items = Item.objects.all().order_by('-id')[:10]
|
||||
context = {'form': form, 'latest_items': latest_items}
|
||||
return render(request, 'scraper/index.html', context)
|
||||
|
||||
def post(self, request):
|
||||
form = MarketForm(request.POST)
|
||||
if form.is_valid():
|
||||
url = form.cleaned_data['url']
|
||||
shortened_url = re.search(r".*[0-9]", url).group(0)
|
||||
mobile_url = shortened_url.replace("www", "m")
|
||||
market_id = re.search(r"\/item\/([0-9]*)", url).group(1)
|
||||
mobile_soup = create_soup(mobile_url, headers=None)
|
||||
base_soup = create_soup(url, headers=None)
|
||||
scraper_instance = FacebookScraper(mobile_soup, base_soup)
|
||||
|
||||
shortened_url = re.search(r".*[0-9]", url).group(0)
|
||||
mobile_url = shortened_url.replace("www", "m")
|
||||
listing_image = scraper_instance.get_listing_image()
|
||||
listing_days, listing_hours = scraper_instance.get_listing_date()
|
||||
listing_description = scraper_instance.get_listing_description()
|
||||
title = scraper_instance.get_listing_title()
|
||||
list_price = scraper_instance.get_listing_price()
|
||||
|
||||
market_id = re.search(r"\/item\/([0-9]*)", url).group(1)
|
||||
sentiment_rating = sentiment_analysis(listing_description)
|
||||
|
||||
mobile_soup = create_soup(mobile_url, headers=None)
|
||||
base_soup = create_soup(url, headers=None)
|
||||
scraper_instance = FacebookScraper(mobile_soup, base_soup)
|
||||
list_price = re.sub("[\$,]", "", list_price)
|
||||
initial_price = int(re.sub("[\$,]", "", list_price))
|
||||
|
||||
listing_image = scraper_instance.get_listing_image()
|
||||
listing_days, listing_hours = scraper_instance.get_listing_date()
|
||||
listing_description = scraper_instance.get_listing_description()
|
||||
title = scraper_instance.get_listing_title()
|
||||
list_price = scraper_instance.get_listing_price()
|
||||
lower_bound, upper_bound, median = find_viable_product(title, ramp_down=0.0)
|
||||
price_rating = price_difference_rating(initial_price, median)
|
||||
average_rating = statistics.mean([sentiment_rating, price_rating])
|
||||
|
||||
sentiment_rating = sentiment_analysis(listing_description)
|
||||
# Create a new Item object
|
||||
average_rating = round(average_rating, 1)
|
||||
item = Item.objects.create(image=listing_image[0], title=title, rating=average_rating, url=shortened_url)
|
||||
|
||||
list_price = re.sub("[\$,]", "", list_price)
|
||||
initial_price = int(re.sub("[\$,]", "", list_price))
|
||||
context = {
|
||||
'shortened_url': shortened_url,
|
||||
'mobile_url': mobile_url,
|
||||
'market_id': market_id,
|
||||
'sentiment_rating': round(sentiment_rating, 1),
|
||||
'title': title,
|
||||
'list_price': "{0:,.2f}".format(float(list_price)),
|
||||
'initial_price': initial_price,
|
||||
'lower_bound': "{0:,.2f}".format(lower_bound),
|
||||
'upper_bound': "{0:,.2f}".format(upper_bound),
|
||||
'median': "{0:,.2f}".format(median),
|
||||
'price_rating': round(price_rating, 1),
|
||||
'average_rating': average_rating,
|
||||
'days': listing_days,
|
||||
'hours': listing_hours,
|
||||
'image': listing_image[0],
|
||||
'id': market_id
|
||||
}
|
||||
|
||||
lower_bound, upper_bound, median = find_viable_product(title, ramp_down=0.0)
|
||||
price_rating = price_difference_rating(initial_price, median)
|
||||
average_rating = statistics.mean([sentiment_rating, price_rating])
|
||||
return render(request, 'scraper/result.html', context)
|
||||
|
||||
context = {
|
||||
'shortened_url': shortened_url,
|
||||
'mobile_url': mobile_url,
|
||||
'market_id': market_id,
|
||||
'sentiment_rating': round(sentiment_rating, 1),
|
||||
'title': title,
|
||||
'list_price': "{0:,.2f}".format(float(list_price)),
|
||||
'initial_price': initial_price,
|
||||
'lower_bound': "{0:,.2f}".format(lower_bound),
|
||||
'upper_bound': "{0:,.2f}".format(upper_bound),
|
||||
'median': "{0:,.2f}".format(median),
|
||||
'price_rating': round(price_rating, 1),
|
||||
'average_rating': round(average_rating, 1),
|
||||
'days': listing_days,
|
||||
'hours': listing_hours,
|
||||
'image': listing_image[0],
|
||||
'id': market_id
|
||||
}
|
||||
|
||||
return render(request, 'scraper/result.html', context)
|
||||
else:
|
||||
latest_items = Item.objects.all().order_by('-id')[:10]
|
||||
context = {'form': form, 'latest_items': latest_items}
|
||||
return render(request, 'scraper/index.html', context)
|
||||
|
||||
Reference in New Issue
Block a user