Fixed food import bug

This commit is contained in:
michigg 2017-12-04 05:17:46 +01:00
parent 6ce4f8e79b
commit b253e9cd09
5 changed files with 59 additions and 10 deletions

View File

@ -0,0 +1,22 @@
# -*- coding: utf-8 -*-
# Generated by Django 1.11.7 on 2017-12-04 03:54
from __future__ import unicode_literals
import datetime
from django.db import migrations, models
from django.utils.timezone import utc
class Migration(migrations.Migration):
dependencies = [
('donar', '0004_auto_20171204_0447'),
]
operations = [
migrations.AlterField(
model_name='lecture_terms',
name='starttime',
field=models.TimeField(default=datetime.datetime(2017, 12, 4, 3, 54, 33, 503002, tzinfo=utc)),
),
]

View File

@ -0,0 +1,20 @@
# -*- coding: utf-8 -*-
# Generated by Django 1.11.7 on 2017-12-04 03:54
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('food', '0002_auto_20171204_0303'),
]
operations = [
migrations.AlterField(
model_name='singlefood',
name='image',
field=models.ManyToManyField(blank=True, null=True, to='food.UserFoodImage'),
),
]

View File

@ -27,7 +27,7 @@ class SingleFood(models.Model):
price_student = models.CharField(max_length=10, blank=True, null=True)
price_employee = models.CharField(max_length=10, blank=True, null=True)
price_guest = models.CharField(max_length=10, blank=True, null=True)
image = models.ManyToManyField("UserFoodImage", related_name='user_images')
image = models.ManyToManyField("UserFoodImage", blank=True, null=True)
rating = models.FloatField(default=0)
allergens = models.ManyToManyField("Allergene", blank=True)

View File

@ -45,9 +45,12 @@ def writeStudentenwerkDataInDB(data):
except IntegrityError:
db_single_food = SingleFood.objects.get(name=single_food['title'])
if 'prices' in single_food:
db_single_food.price_student = single_food['prices']['price_student'],
db_single_food.price_employee = single_food['prices']['price_employee'],
db_single_food.price_guest = single_food['prices']['price_guest']
if 'price_student' in single_food['prices']:
db_single_food.price_student = single_food['prices']['price_student']
if 'price_employee' in single_food['prices']:
db_single_food.price_employee = single_food['prices']['price_employee']
if 'price_guest' in single_food['prices']:
db_single_food.price_guest = single_food['prices']['price_guest']
if 'allergens' in locals():
db_single_food.allergens = allergens

View File

@ -3,7 +3,8 @@ from bs4 import BeautifulSoup
import json
import datetime
FEKI_URL = "https://www.studentenwerk-wuerzburg.de/bamberg/essen-trinken/speiseplaene.html?tx_thmensamenu_pi2%5Bmensen%5D=3&tx_thmensamenu_pi2%5Baction%5D=show&tx_thmensamenu_pi2%5Bcontroller%5D=Speiseplan&cHash=c3fe5ebb35e5fba3794f01878e798b7c"
# FEKI_URL = "https://www.studentenwerk-wuerzburg.de/bamberg/essen-trinken/speiseplaene.html?tx_thmensamenu_pi2%5Bmensen%5D=3&tx_thmensamenu_pi2%5Baction%5D=show&tx_thmensamenu_pi2%5Bcontroller%5D=Speiseplan&cHash=c3fe5ebb35e5fba3794f01878e798b7c"
def loadPage(url: str):
@ -27,9 +28,13 @@ def getFoodPerDay(soup):
title = singleFoodSoup.find('div', {'class': 'title'}).getText()
allergens = [e.getText() for e in singleFoodSoup.select('.left .additnr .toggler ul li')]
prices = {}
prices['price_student'] = singleFoodSoup.select('.price')[0]['data-default']
prices['price_employee'] = singleFoodSoup.select('.price')[0]['data-bed']
prices['price_guest'] = singleFoodSoup.select('.price')[0]['data-guest']
print(singleFoodSoup.select('.price'))
if singleFoodSoup.select('.price'):
prices['price_student'] = singleFoodSoup.select('.price')[0]['data-default']
if singleFoodSoup.select('.price'):
prices['price_employee'] = singleFoodSoup.select('.price')[0]['data-bed']
if singleFoodSoup.select('.price'):
prices['price_guest'] = singleFoodSoup.select('.price')[0]['data-guest']
singleFoodObj['title'] = title
singleFoodObj['allergens'] = allergens
singleFoodObj['prices'] = prices
@ -64,5 +69,4 @@ def getFoodplanName(soup):
foodplan_name = soup.select('.mensamenu h2')[0].getText()
return foodplan_name
parsePage(FEKI_URL)
# parsePage(FEKI_URL)