try to fix univis non filter error for events
This commit is contained in:
parent
81db0b2381
commit
3404b88c8e
1259
ofu_app/.idea/workspace.xml
generated
1259
ofu_app/.idea/workspace.xml
generated
File diff suppressed because it is too large
Load Diff
55
ofu_app/apps/donar/migrations/0003_auto_20171004_0211.py
Normal file
55
ofu_app/apps/donar/migrations/0003_auto_20171004_0211.py
Normal file
@ -0,0 +1,55 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Generated by Django 1.11.2 on 2017-10-04 00:11
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('donar', '0002_auto_20171004_0016'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='room',
|
||||
name='description',
|
||||
field=models.CharField(default='', max_length=200),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='room',
|
||||
name='size',
|
||||
field=models.CharField(default='', max_length=60),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='room',
|
||||
name='address',
|
||||
field=models.CharField(default='', max_length=60),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='room',
|
||||
name='building_key',
|
||||
field=models.CharField(default='', max_length=60),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='room',
|
||||
name='floor',
|
||||
field=models.CharField(default='', max_length=60),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='room',
|
||||
name='key',
|
||||
field=models.CharField(default='', max_length=60),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='room',
|
||||
name='name',
|
||||
field=models.CharField(default='', max_length=60),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='room',
|
||||
name='orgname',
|
||||
field=models.CharField(default='', max_length=60),
|
||||
),
|
||||
]
|
||||
@ -6,10 +6,14 @@ MAX_LENGTH = 60
|
||||
# Create your models here.
|
||||
class Room(models.Model):
|
||||
id = models.AutoField(primary_key=True)
|
||||
key = models.CharField(max_length=MAX_LENGTH)
|
||||
address = models.CharField(max_length=MAX_LENGTH)
|
||||
building_key = models.CharField(max_length=MAX_LENGTH)
|
||||
floor = models.CharField(max_length=MAX_LENGTH)
|
||||
name = models.CharField(max_length=MAX_LENGTH)
|
||||
orgname = models.CharField(max_length=MAX_LENGTH)
|
||||
key = models.CharField(max_length=MAX_LENGTH, default="")
|
||||
address = models.CharField(max_length=MAX_LENGTH, default="")
|
||||
building_key = models.CharField(max_length=MAX_LENGTH, default="")
|
||||
floor = models.CharField(max_length=MAX_LENGTH, default="")
|
||||
name = models.CharField(max_length=MAX_LENGTH, default="")
|
||||
orgname = models.CharField(max_length=MAX_LENGTH, default="")
|
||||
short = models.CharField(unique=True, max_length=MAX_LENGTH)
|
||||
size = models.CharField(max_length=MAX_LENGTH, default="")
|
||||
description = models.CharField(max_length=200, default="")
|
||||
|
||||
|
||||
|
||||
@ -18,7 +18,6 @@ def getJsonFromFile(path):
|
||||
|
||||
|
||||
def writeFekideDataInDB(data):
|
||||
|
||||
for room in data:
|
||||
try:
|
||||
key = ""
|
||||
@ -28,12 +27,12 @@ def writeFekideDataInDB(data):
|
||||
name = ""
|
||||
orgname = ""
|
||||
short = ""
|
||||
size = ""
|
||||
description = ""
|
||||
if '@key' in room:
|
||||
key = room['@key']
|
||||
|
||||
if 'address' in room:
|
||||
address = room['address']
|
||||
|
||||
if 'buildingkey' in room:
|
||||
building_key = room['buildingkey']
|
||||
if 'floor' in room:
|
||||
@ -42,9 +41,13 @@ def writeFekideDataInDB(data):
|
||||
name = room['name']
|
||||
if 'short' in room:
|
||||
short = room['short']
|
||||
if 'size' in room:
|
||||
size = room['size']
|
||||
if 'description' in room:
|
||||
description = room['description']
|
||||
|
||||
Room.objects.create(key=key, address=address, building_key=building_key, floor=floor, name=name,
|
||||
orgname=orgname, short=short)
|
||||
orgname=orgname, short=short, size=size, description=description)
|
||||
except IntegrityError:
|
||||
# ignored
|
||||
break
|
||||
|
||||
@ -4,12 +4,6 @@ import xmltodict
|
||||
import json
|
||||
from pprint import pprint
|
||||
|
||||
# CONFIG
|
||||
UNIVIS_RPG_GuK = "http://univis.uni-bamberg.de/prg?search=rooms&department=Fakult%E4t%20Geistes-%20und%20Kulturwissenschaften&show=xml"
|
||||
UNIVIS_RPG_SoWi = "http://univis.uni-bamberg.de/prg?search=rooms&department=Fakult%E4t%20Sozial-%20und%20Wirtschaftswissenschaften&show=xml"
|
||||
UNIVIS_RPG_HuWi = "http://www.config.de/cgi-bin/prg-wizard.pl"
|
||||
UNIVIS_RPG_WIAI = "http://univis.uni-bamberg.de/prg?search=rooms&department=Fakult%E4t%20Wirtschaftsinformatik&show=xml"
|
||||
|
||||
|
||||
def loadPage(url: str):
|
||||
return requests.get(url).content
|
||||
|
||||
@ -11,6 +11,6 @@ class Command(BaseCommand):
|
||||
pass
|
||||
|
||||
def handle(self, *args, **options):
|
||||
controller_json_events.main("apps/events/utils/json_generator/jsons/")
|
||||
migrate_data.main("apps/events/utils/json_generator/jsons/")
|
||||
# controller_json_events.main("apps/events/utils/json_generator/jsons/")
|
||||
migrate_data.main()
|
||||
self.stdout.write(self.style.SUCCESS('Successfully migrate data'))
|
||||
|
||||
36
ofu_app/apps/events/migrations/0002_auto_20171005_1655.py
Normal file
36
ofu_app/apps/events/migrations/0002_auto_20171005_1655.py
Normal file
@ -0,0 +1,36 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Generated by Django 1.11.2 on 2017-10-05 14:55
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('events', '0001_initial'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='Location',
|
||||
fields=[
|
||||
('id', models.AutoField(primary_key=True, serialize=False)),
|
||||
('key', models.CharField(max_length=60)),
|
||||
('name', models.CharField(max_length=60)),
|
||||
],
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name='event',
|
||||
unique_together=set([]),
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='event',
|
||||
name='location',
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='event',
|
||||
name='location',
|
||||
field=models.ManyToManyField(to='events.Location'),
|
||||
),
|
||||
]
|
||||
39
ofu_app/apps/events/migrations/0003_auto_20171005_1807.py
Normal file
39
ofu_app/apps/events/migrations/0003_auto_20171005_1807.py
Normal file
@ -0,0 +1,39 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Generated by Django 1.11.2 on 2017-10-05 16:07
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('events', '0002_auto_20171005_1655'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='event',
|
||||
name='orgname',
|
||||
field=models.CharField(blank=True, max_length=60),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='event',
|
||||
name='presenter',
|
||||
field=models.CharField(blank=True, max_length=60),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='event',
|
||||
name='link',
|
||||
field=models.CharField(blank=True, max_length=60),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='location',
|
||||
name='key',
|
||||
field=models.CharField(blank=True, max_length=60),
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name='event',
|
||||
unique_together=set([('date', 'time', 'title')]),
|
||||
),
|
||||
]
|
||||
20
ofu_app/apps/events/migrations/0004_auto_20171005_1923.py
Normal file
20
ofu_app/apps/events/migrations/0004_auto_20171005_1923.py
Normal file
@ -0,0 +1,20 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Generated by Django 1.11.2 on 2017-10-05 17:23
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('events', '0003_auto_20171005_1807'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='location',
|
||||
name='name',
|
||||
field=models.CharField(max_length=60, unique=True),
|
||||
),
|
||||
]
|
||||
20
ofu_app/apps/events/migrations/0005_auto_20171005_2004.py
Normal file
20
ofu_app/apps/events/migrations/0005_auto_20171005_2004.py
Normal file
@ -0,0 +1,20 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Generated by Django 1.11.2 on 2017-10-05 18:04
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('events', '0004_auto_20171005_1923'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RenameField(
|
||||
model_name='event',
|
||||
old_name='location',
|
||||
new_name='locations',
|
||||
),
|
||||
]
|
||||
@ -8,12 +8,26 @@ MAX_LENGTH = 60
|
||||
# Create your models here.
|
||||
class Event(models.Model):
|
||||
id = models.AutoField(primary_key=True)
|
||||
title = models.CharField(max_length=MAX_LENGTH)
|
||||
category = models.CharField(max_length=MAX_LENGTH)
|
||||
link = models.CharField(max_length=MAX_LENGTH)
|
||||
location = models.CharField(max_length=MAX_LENGTH)
|
||||
time = models.TimeField(default=timezone.now)
|
||||
date = models.DateField(default=timezone.now)
|
||||
title = models.CharField(blank=False, max_length=MAX_LENGTH)
|
||||
category = models.CharField(blank=False, max_length=MAX_LENGTH)
|
||||
link = models.CharField(blank=True, max_length=MAX_LENGTH)
|
||||
locations = models.ManyToManyField('Location', blank=False)
|
||||
date = models.DateField(blank=False, default=timezone.now)
|
||||
time = models.TimeField(blank=False, default=timezone.now)
|
||||
presenter = models.CharField(blank=True, max_length=MAX_LENGTH)
|
||||
orgname = models.CharField(blank=True, max_length=MAX_LENGTH)
|
||||
|
||||
def __str__(self):
|
||||
return "Date: %s, Titel: %s" % (self.date.strftime("%Y.%m.%d"), self.title)
|
||||
|
||||
class Meta:
|
||||
unique_together = ('date', 'location')
|
||||
unique_together = ('date', 'time', 'title')
|
||||
|
||||
|
||||
class Location(models.Model):
|
||||
id = models.AutoField(primary_key=True)
|
||||
key = models.CharField(blank=True, max_length=MAX_LENGTH)
|
||||
name = models.CharField(blank=False, unique=True, max_length=MAX_LENGTH)
|
||||
|
||||
def __str__(self):
|
||||
return str(self.name)
|
||||
|
||||
File diff suppressed because one or more lines are too long
@ -1,36 +1,127 @@
|
||||
import json
|
||||
from datetime import datetime
|
||||
from datetime import timedelta
|
||||
from pprint import pprint
|
||||
from django.db.utils import IntegrityError
|
||||
from apps.events.utils.parser import univis_eventpage_parser
|
||||
from apps.events.utils.parser import fekide_eventpage_parser
|
||||
|
||||
from apps.events.models import Event
|
||||
from apps.events.models import Event, Location
|
||||
|
||||
# JSON_FILES_PATH_EVENTS = "json_generator/jsons/"
|
||||
JSON_FILES_PATH_EVENTS = "events/json_generator/jsons/"
|
||||
UNIVIS_CATEGORY = 'Univis'
|
||||
|
||||
|
||||
def getJsonFromFile(path):
|
||||
with open(path, "r") as file:
|
||||
return json.load(file)
|
||||
# CONFIG
|
||||
UNIVIS_RPG_GuK = "http://univis.uni-bamberg.de/prg?search=events&department=Fakult%E4t%20Geistes-%20und%20Kulturwissenschaften&show=xml"
|
||||
UNIVIS_RPG_SoWi = "http://univis.uni-bamberg.de/prg?search=events&department=Fakult%E4t%20Sozial-%20und%20Wirtschaftswissenschaften&show=xml"
|
||||
UNIVIS_RPG_HuWi = "http://univis.uni-bamberg.de/prg?search=events&department=Fakult%E4t%20Humanwissenschaften&show=xml"
|
||||
UNIVIS_RPG_WIAI = "http://univis.uni-bamberg.de/prg?search=events&department=Fakult%E4t%20Wirtschaftsinformatik&show=xml"
|
||||
|
||||
|
||||
def writeFekideDataInDB(data):
|
||||
for date in data['dates']:
|
||||
for event in date['events']:
|
||||
try:
|
||||
time = datetime.strptime(str(event['time']).split()[1], "%H:%M")
|
||||
Event.objects.create(date=datetime.strptime(date['date'], "%d.%m.%Y"), category=event['category'],
|
||||
link=event['link'], location=event['location'], title=event['title'], time=time)
|
||||
Location.objects.create(name=event['location'])
|
||||
event_obj = Event.objects.create()
|
||||
event_obj.save()
|
||||
event_obj.date = datetime.strptime(date['date'], "%d.%m.%Y")
|
||||
event_obj.category = event['category']
|
||||
event_obj.link = event['link']
|
||||
event_obj.time = datetime.strptime(str(event['time']).split()[1], "%H:%M")
|
||||
event_obj.title = event['title']
|
||||
event_obj.locations.add(Location.objects.get(name=event['location']))
|
||||
event_obj.save()
|
||||
Event.objects.filter(title="").delete()
|
||||
except IntegrityError:
|
||||
# ignored
|
||||
break
|
||||
|
||||
|
||||
def main(path=JSON_FILES_PATH_EVENTS):
|
||||
# get food jsons
|
||||
writeFekideDataInDB(getJsonFromFile(path + "events-fekide.json"))
|
||||
def deleteUnivisObjects():
|
||||
Event.objects.filter(category=UNIVIS_CATEGORY).delete()
|
||||
Location.objects.all().delete()
|
||||
|
||||
|
||||
def writeUnivisDataInDB(events, rooms, persons):
|
||||
writeUnivisLocationsInDB(rooms)
|
||||
writeUnivisEventsInDB(events)
|
||||
|
||||
|
||||
def writeUnivisLocationsInDB(rooms):
|
||||
for room in rooms:
|
||||
if '@key' in room and 'short' in room:
|
||||
try:
|
||||
Location.objects.create(key=room['@key'], name=room['short'])
|
||||
except IntegrityError:
|
||||
print("Possible Duplicate! Start DB refresh")
|
||||
try:
|
||||
Location.objects.get(name=room['short']).key = room['@key']
|
||||
except Exception as harderr:
|
||||
print("Failed to refresh object" + harderr.args)
|
||||
except Exception as err:
|
||||
print(err.args)
|
||||
|
||||
|
||||
def getLocationIDs(event):
|
||||
rooms = []
|
||||
if not isinstance(event['rooms']['room'], list):
|
||||
rooms.append(event['rooms']['room']['UnivISRef']['@key'])
|
||||
else:
|
||||
for room_item in event['rooms']['room']:
|
||||
rooms.append(room_item['UnivISRef']['@key'])
|
||||
return rooms
|
||||
|
||||
|
||||
def writeUnivisEventsInDB(events: list):
|
||||
for event in events:
|
||||
if 'calendar' in event and not event['calendar'] == 'nein' and 'internal' in event and event[
|
||||
'internal'] == 'nein' and 'startdate' in event and 'enddate' in event:
|
||||
startdate = datetime.strptime(event['startdate'], "%Y-%m-%d")
|
||||
enddate = datetime.strptime(event['enddate'], "%Y-%m-%d")
|
||||
if (startdate + timedelta(1)) >= enddate:
|
||||
|
||||
if 'title' in event and 'rooms' in event and 'starttime' in event:
|
||||
# TODO: Add Description in
|
||||
# TODO: Is there a better way to add Objects in DB?
|
||||
event_obj = Event()
|
||||
event_obj.save()
|
||||
event_obj.title = event['title']
|
||||
locations = getLocationIDs(event)
|
||||
for location in locations:
|
||||
event_obj.locations.add(Location.objects.get(key=location))
|
||||
event_obj.time = event['starttime']
|
||||
event_obj.date = event['startdate']
|
||||
# TODO: Better Category handling
|
||||
event_obj.category = UNIVIS_CATEGORY
|
||||
if 'presenter' in event:
|
||||
event_obj.presenter = event['presenter']
|
||||
if 'orgname' in event:
|
||||
event_obj.orgname = event['orgname']
|
||||
try:
|
||||
event_obj.save()
|
||||
except IntegrityError:
|
||||
# TODO: Update DB Object if duplicate detected
|
||||
print("Found Duplicate!")
|
||||
except Exception as err:
|
||||
print(err.args)
|
||||
Event.objects.filter(title="").delete()
|
||||
|
||||
|
||||
def main():
|
||||
deleteUnivisObjects()
|
||||
events, rooms, persons = univis_eventpage_parser.parsePage(UNIVIS_RPG_HuWi)
|
||||
writeUnivisDataInDB(events, rooms, persons)
|
||||
events, rooms, persons = univis_eventpage_parser.parsePage(UNIVIS_RPG_SoWi)
|
||||
writeUnivisDataInDB(events, rooms, persons)
|
||||
events, rooms, persons = univis_eventpage_parser.parsePage(UNIVIS_RPG_GuK)
|
||||
writeUnivisDataInDB(events, rooms, persons)
|
||||
events, rooms, persons = univis_eventpage_parser.parsePage(UNIVIS_RPG_WIAI)
|
||||
writeUnivisDataInDB(events, rooms, persons)
|
||||
|
||||
writeFekideDataInDB(fekide_eventpage_parser.parsePage())
|
||||
|
||||
pprint("Event: " + str(Event.objects.count()))
|
||||
pprint("Location: " + str(Location.objects.count()))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
@ -64,7 +64,6 @@ def getAllDates(url: str):
|
||||
def parsePage():
|
||||
pagecontent = {}
|
||||
pagecontent['dates'] = getAllDates(LINK_FEKIDE_Events)
|
||||
jsondata = json.dumps(pagecontent)
|
||||
return jsondata
|
||||
return pagecontent
|
||||
|
||||
# parsePage()
|
||||
|
||||
@ -1,10 +1,14 @@
|
||||
import requests
|
||||
import datetime
|
||||
import xmltodict
|
||||
from pprint import pprint
|
||||
import json
|
||||
|
||||
# CONFIG
|
||||
UNIVIS_RPG_URL = "http://univis.uni-bamberg.de/prg?search=events&show=xml"
|
||||
UNIVIS_RPG_GuK = "http://univis.uni-bamberg.de/prg?search=events&department=Fakult%E4t%20Geistes-%20und%20Kulturwissenschaften&show=xml"
|
||||
UNIVIS_RPG_SoWi = "http://univis.uni-bamberg.de/prg?search=events&department=Fakult%E4t%20Sozial-%20und%20Wirtschaftswissenschaften&show=xml"
|
||||
UNIVIS_RPG_HuWi = "http://univis.uni-bamberg.de/prg?search=events&department=Fakult%E4t%20Humanwissenschaften&show=xml"
|
||||
UNIVIS_RPG_WIAI = "http://univis.uni-bamberg.de/prg?search=events&department=Fakult%E4t%20Wirtschaftsinformatik&show=xml"
|
||||
|
||||
|
||||
def loadPage(url: str):
|
||||
@ -39,15 +43,30 @@ def resolveDates(json):
|
||||
event['enddate'] = end_datetime
|
||||
|
||||
|
||||
def parsePage():
|
||||
# {Univis: {'Event':[{,,,,},,,,]}}
|
||||
page = loadPage(UNIVIS_RPG_URL)
|
||||
dict = xmltodict.parse(page)
|
||||
# resolveUnivisRefs(dict)
|
||||
json_data = json.dumps(dict)
|
||||
json_data = json.loads(json_data)
|
||||
resolveDates(json_data)
|
||||
json_data['last_execute'] = getDay()
|
||||
return json.dumps(json_data)
|
||||
def getEvents(dict):
|
||||
events = []
|
||||
for event in dict['UnivIS']['Event']:
|
||||
events.append(event)
|
||||
return events
|
||||
|
||||
# parsePage()
|
||||
|
||||
def getRooms(dict: dict):
|
||||
rooms = []
|
||||
for room in dict['UnivIS']['Room']:
|
||||
rooms.append(room)
|
||||
return rooms
|
||||
|
||||
|
||||
def getPersons(dict: dict):
|
||||
persons = []
|
||||
for person in dict['UnivIS']['Person']:
|
||||
persons.append(person)
|
||||
return persons
|
||||
|
||||
|
||||
def parsePage(url):
|
||||
page = loadPage(url)
|
||||
dict = xmltodict.parse(page)
|
||||
return getEvents(dict), getRooms(dict), getPersons(dict)
|
||||
|
||||
# parsePage(UNIVIS_RPG_GuK)
|
||||
|
||||
@ -13,7 +13,7 @@ def events_main_page(request):
|
||||
def all_events(request):
|
||||
today = datetime.datetime.now()
|
||||
all_future_events = Event.objects.filter(date__gte=today)
|
||||
lastdate = all_future_events.last().date
|
||||
lastdate = Event.objects.latest('date').date
|
||||
return render(request, "events/all_events.jinja", {
|
||||
'startdate': today,
|
||||
'events': all_future_events,
|
||||
|
||||
@ -16,16 +16,16 @@ class Menu(models.Model):
|
||||
class Meta:
|
||||
unique_together = ('date', 'location')
|
||||
|
||||
# def __str__(self):
|
||||
# return self.date.strftime("%d.%m.%Y")
|
||||
def __str__(self):
|
||||
return "Date: %s, Location: %s" % (self.date.strftime("%d.%m.%Y"), self.location)
|
||||
|
||||
|
||||
class SingleFood(models.Model):
|
||||
id = models.AutoField(primary_key=True)
|
||||
name = models.CharField(unique=True, max_length=MAX_LENGTH)
|
||||
|
||||
# def __str__(self):
|
||||
# return self.name
|
||||
def __str__(self):
|
||||
return self.name
|
||||
|
||||
|
||||
class HappyHour(models.Model):
|
||||
@ -39,5 +39,5 @@ class HappyHour(models.Model):
|
||||
class Meta:
|
||||
unique_together = ('date', 'location')
|
||||
|
||||
# def __str__(self):
|
||||
# return "Date: %s, Location: %s" % (self.date.strftime("%Y.%m.%d"), self.location)
|
||||
def __str__(self):
|
||||
return "Date: %s, Location: %s" % (self.date.strftime("%Y.%m.%d"), self.location)
|
||||
|
||||
@ -1 +0,0 @@
|
||||
{"execution_time": "Tuesday, 03.10.2017", "weekmenu": [{"date": "24.07.", "menu": ["Rinderhacksteak an Cognacso\u00dfe und Sp\u00e4tzle"]}, {"date": "25.07.", "menu": ["Gem\u00fcseschnitzel mit Kr\u00e4utercreme und Kartoffeln"]}, {"date": "26.07.", "menu": ["Penne-Nudeln mit Hackfleischso\u00dfe"]}, {"date": "27.07.", "menu": ["Kaiserschmarrn mit Apfelmus"]}], "name": "Cafeteria Erba-Insel"}
|
||||
@ -1 +1 @@
|
||||
{"execution_time": "Tuesday, 03.10.2017", "weekmenu": [{"date": "24.07.", "menu": ["Vegetarisches Chili sin Carne"]}, {"date": "25.07.", "menu": ["Gem\u00fcseschnitzel mit Kr\u00e4utercreme und Kartoffeln"]}, {"date": "26.07.", "menu": ["\u00dcberbackene Zucchini an Paprikaso\u00dfe mit P\u00fcree"]}, {"date": "27.07.", "menu": ["Kaiserschmarrn mit Apfelmus"]}], "name": "Cafeteria Markusplatz"}
|
||||
{"execution_time": "Thursday, 05.10.2017", "name": "Cafeteria Markusplatz", "weekmenu": [{"menu": ["Vegetarisches Chili sin Carne"], "date": "24.07."}, {"menu": ["Gem\u00fcseschnitzel mit Kr\u00e4utercreme und Kartoffeln"], "date": "25.07."}, {"menu": ["\u00dcberbackene Zucchini an Paprikaso\u00dfe mit P\u00fcree"], "date": "26.07."}, {"menu": ["Kaiserschmarrn mit Apfelmus"], "date": "27.07."}]}
|
||||
@ -1 +1 @@
|
||||
{"execution_time": "Tuesday, 03.10.2017", "day": "Tuesday, 03.10.2017", "happyhours": []}
|
||||
{"happyhours": [{"time": "11:00 - 18:00", "description": "10% Rabatt auf Beefburger & belg. Fritten", "location": "Fritz Heaven"}, {"time": "12:00 - 22:00", "description": "Burger, Wraps und Baguettes f\u00fcr 5,20 \u20ac", "location": "Caf\u00e9 Esspress"}, {"time": "16:00 - 20:00", "description": "Aperitivo Italiano incl. Antipasti", "location": "Caff\u00e8bar Kranen"}, {"time": "16:00 - 23:59", "description": "Burger-Tag 6\u20ac", "location": "Bowlinghaus Bamberg"}, {"time": "17:00 - 21:00", "description": "Burger f\u00fcr Studenten 5,50\u20ac", "location": "Cafe Arte"}, {"time": "17:30 - 21:00", "description": "Alle vegetarischen Hauptgerichte kosten f\u00fcr Studierende nur 8,90 \u20ac", "location": "Restaurant Kornblume"}, {"time": "18:00 - 21:00", "description": "Burger mit Pommes oder Westernkartoffeln + 0,5 Mahr's Bier - 7,99\u20ac", "location": "Brasserie"}, {"time": "18:00 - 21:00", "description": "Spare-Ribs AYCE mit Pommes 9,90 \u20ac oder mit Weissbrot 7,70 \u20ac", "location": "Bamberger Weissbierhaus"}, {"time": "19:00 - 22:00", "description": "Brotzeitplatten 7,50 \u20ac", "location": "Ah\u00f6rnla im Sand"}, {"time": "19:00 - 23:00", "description": "Jeder Burger f\u00fcr 4,90 \u20ac", "location": "Mainfranken Bowling"}], "execution_time": "Thursday, 05.10.2017", "day": "Thursday, 05.10.2017"}
|
||||
@ -1 +1 @@
|
||||
{"execution_time": "Tuesday, 03.10.2017", "weekmenu": [{"date": "02.10.", "menu": ["Pennenudeln mit Lachs, Spinat und Kirschtomaten", "Rinderhacksteak mit Cognac - Pfefferso\u00dfe", "Schneller Teller: H\u00e4hnchenspie\u00df mit Gefl\u00fcgelso\u00dfe, Bratkartoffeln und Blumenkohl", "Kartoffelgnocchi - Gem\u00fcsepfanne", "Dampfkartoffeln mit hausgemachtem Schnittlauchquark"]}, {"date": "03.10.", "menu": []}, {"date": "04.10.", "menu": ["Siebenschwabenplatte mit Bratenso\u00dfe", "Putenbrustgeschnetzeltes in Pilzrahm", "Schneller Teller: Veganes Soja - Gem\u00fcsegeschnetzeltes mit Penine Rigate, Apfel", "\u00dcberbackene Zucchini \"mediterrane Art\" an Paprikaso\u00dfe"]}, {"date": "05.10.", "menu": ["Chicken Burger mit Ananas Currydip", "Schneller Teller: Paprikagulsch vom Schwein mit Spiralnudeln und Rosenkohl", "Nudelgem\u00fcseauflauf", "Pizza \"Spinat & Hirtenk\u00e4se\""]}, {"date": "06.10.", "menu": ["Schneller Teller: Seelachsfilet an \"S\u00fc\u00df - Scharfer\" Tomatenso\u00dfe, Langkornreis und Brokkoli", "Gyros mit hausgemachtem Tsatsiki", "Green Tacos mit Walnuss - Chili - P\u00e2t\u00e9", "Kartoffel Broccoli Gratin"]}], "name": "Speiseplan f\u00fcr Austra\u00dfe Bamberg"}
|
||||
{"name": "Speiseplan f\u00fcr Austra\u00dfe Bamberg", "execution_time": "Thursday, 05.10.2017", "weekmenu": [{"menu": ["Pennenudeln mit Lachs, Spinat und Kirschtomaten", "Rinderhacksteak mit Cognac - Pfefferso\u00dfe", "Schneller Teller: H\u00e4hnchenspie\u00df mit Gefl\u00fcgelso\u00dfe, Bratkartoffeln und Blumenkohl", "Kartoffelgnocchi - Gem\u00fcsepfanne", "Dampfkartoffeln mit hausgemachtem Schnittlauchquark"], "date": "02.10."}, {"menu": [], "date": "03.10."}, {"menu": ["Siebenschwabenplatte mit Bratenso\u00dfe", "Moussaka vom Rind", "Putenbrustgeschnetzeltes in Pilzrahm", "Schneller Teller: Veganes Soja - Gem\u00fcsegeschnetzeltes mit Penine Rigate, Apfel", "\u00dcberbackene Zucchini \"mediterrane Art\" an Paprikaso\u00dfe"], "date": "04.10."}, {"menu": ["Chicken Burger mit Ananas Currydip", "Kasseler mit gr\u00fcnem Bohnengem\u00fcse", "Schneller Teller: Paprikagulsch vom Schwein mit Spiralnudeln und Rosenkohl", "Putenbrustgeschnetzeltes in Pilzrahm", "Schneller Teller: Veganes Soja - Gem\u00fcsegeschnetzeltes mit Penine Rigate, Apfel", "Nudelgem\u00fcseauflauf", "Pizza \"Spinat & Hirtenk\u00e4se\""], "date": "05.10."}, {"menu": ["Schneller Teller: Seelachsfilet an \"S\u00fc\u00df - Scharfer\" Tomatenso\u00dfe, Langkornreis und Brokkoli", "Gyros mit hausgemachtem Tsatsiki", "Green Tacos mit Walnuss - Chili - P\u00e2t\u00e9"], "date": "06.10."}]}
|
||||
@ -1 +1 @@
|
||||
{"execution_time": "Tuesday, 03.10.2017", "weekmenu": [{"date": "02.10.", "menu": ["Rinderhacksteak mit Cognac - Pfefferso\u00dfe", "Schneller Teller: H\u00e4hnchenspie\u00df mit Gefl\u00fcgelso\u00dfe, Bratkartoffeln und Blumenkohl", "Kartoffelgnocchi - Gem\u00fcsepfanne", "Dampfkartoffeln mit hausgemachtem Schnittlauchquark"]}, {"date": "03.10.", "menu": []}, {"date": "04.10.", "menu": ["Siebenschwabenplatte mit Bratenso\u00dfe", "Putenbrustgeschnetzeltes in Pilzrahm", "Schneller Teller: Veganes Soja - Gem\u00fcsegeschnetzeltes mit Penine Rigate, Apfel", "\u00dcberbackene Zucchini \"mediterrane Art\" an Paprikaso\u00dfe"]}, {"date": "05.10.", "menu": ["Chicken Burger mit Ananas Currydip", "Schneller Teller: Paprikagulsch vom Schwein mit Spiralnudeln und Rosenkohl", "Nudelgem\u00fcseauflauf", "Pizza \"Spinat & Hirtenk\u00e4se\""]}, {"date": "06.10.", "menu": ["Schneller Teller: Seelachsfilet an \"S\u00fc\u00df - Scharfer\" Tomatenso\u00dfe, Langkornreis und Brokkoli", "Gyros mit hausgemachtem Tsatsiki", "Green Tacos mit Walnuss - Chili - P\u00e2t\u00e9", "Kartoffel Broccoli Gratin"]}], "name": "Speiseplan f\u00fcr Feldkirchenstra\u00dfe Bamberg"}
|
||||
{"name": "Speiseplan f\u00fcr Feldkirchenstra\u00dfe Bamberg", "execution_time": "Thursday, 05.10.2017", "weekmenu": [{"menu": ["Rinderhacksteak mit Cognac - Pfefferso\u00dfe", "Schneller Teller: H\u00e4hnchenspie\u00df mit Gefl\u00fcgelso\u00dfe, Bratkartoffeln und Blumenkohl", "Kartoffelgnocchi - Gem\u00fcsepfanne", "Dampfkartoffeln mit hausgemachtem Schnittlauchquark"], "date": "02.10."}, {"menu": [], "date": "03.10."}, {"menu": ["Siebenschwabenplatte mit Bratenso\u00dfe", "Putenbrustgeschnetzeltes in Pilzrahm", "Schneller Teller: Veganes Soja - Gem\u00fcsegeschnetzeltes mit Penine Rigate, Apfel", "\u00dcberbackene Zucchini \"mediterrane Art\" an Paprikaso\u00dfe"], "date": "04.10."}, {"menu": ["Chicken Burger mit Ananas Currydip", "Schneller Teller: Paprikagulsch vom Schwein mit Spiralnudeln und Rosenkohl", "\u00dcberbackene Zucchini \"mediterrane Art\" an Paprikaso\u00dfe", "Nudelgem\u00fcseauflauf", "Pizza \"Spinat & Hirtenk\u00e4se\""], "date": "05.10."}, {"menu": ["Schneller Teller: Seelachsfilet an \"S\u00fc\u00df - Scharfer\" Tomatenso\u00dfe, Langkornreis und Brokkoli", "Gyros mit hausgemachtem Tsatsiki", "Green Tacos mit Walnuss - Chili - P\u00e2t\u00e9"], "date": "06.10."}]}
|
||||
Binary file not shown.
@ -40,15 +40,15 @@ INSTALLED_APPS = [
|
||||
'apps.donar',
|
||||
]
|
||||
|
||||
MIDDLEWARE = [
|
||||
'django.middleware.security.SecurityMiddleware',
|
||||
'django.contrib.sessions.middleware.SessionMiddleware',
|
||||
'django.middleware.common.CommonMiddleware',
|
||||
'django.middleware.csrf.CsrfViewMiddleware',
|
||||
'django.contrib.auth.middleware.AuthenticationMiddleware',
|
||||
'django.contrib.messages.middleware.MessageMiddleware',
|
||||
'django.middleware.clickjacking.XFrameOptionsMiddleware',
|
||||
]
|
||||
# MIDDLEWARE = [
|
||||
# 'django.middleware.security.SecurityMiddleware',
|
||||
# 'django.contrib.sessions.middleware.SessionMiddleware',
|
||||
# 'django.middleware.common.CommonMiddleware',
|
||||
# 'django.middleware.csrf.CsrfViewMiddleware',
|
||||
# 'django.contrib.auth.middleware.AuthenticationMiddleware',
|
||||
# 'django.contrib.messages.middleware.MessageMiddleware',
|
||||
# 'django.middleware.clickjacking.XFrameOptionsMiddleware',
|
||||
# ]
|
||||
|
||||
ROOT_URLCONF = 'ofu_app.urls'
|
||||
|
||||
|
||||
@ -9,7 +9,14 @@
|
||||
<div class="col-6 p-3">
|
||||
<div class="p-3 border border-dark roundep-3 border border-dark rounded bg-light text-dark">
|
||||
<h3>{{ room.name }}</h3>
|
||||
<p>Short: {{ room.short }}</p>
|
||||
<p><strong>Key:</strong> {{ room.key }}</p>
|
||||
<p><strong>Address:</strong> {{ room.address }}</p>
|
||||
<p><strong>Building Key:</strong> {{ room.building_key }}</p>
|
||||
<p><strong>Floor:</strong> {{ room.floor }}</p>
|
||||
<p><strong>Orgname:</strong> {{ room.orgname }}</p>
|
||||
<p><strong>Short:</strong> {{ room.short }}</p>
|
||||
<p><strong>Size:</strong> {{ room.size }}</p>
|
||||
<p><strong>Description:</strong> {{ room.description }}</p>
|
||||
</div>
|
||||
</div>
|
||||
{% endfor %}
|
||||
|
||||
@ -5,18 +5,29 @@
|
||||
{% endblock %}
|
||||
{% block content %}
|
||||
<div class="container">
|
||||
<div class="row">
|
||||
{% for event in events %}
|
||||
<div class="col-6 p-3">
|
||||
<div class="p-3 border border-dark roundep-3 border border-dark rounded bg-light text-dark">
|
||||
<h3>{{ event.title }}</h3>
|
||||
<p>Wann: {{ event.date|date:"D, d.M" }} ab {{ event.time }}</p>
|
||||
<p>Wo: {{ event.location }}</p>
|
||||
<div class="row">
|
||||
{% for event in events %}
|
||||
<div class="col-6 p-3">
|
||||
<div class="p-3 border border-dark roundep-3 border border-dark rounded bg-light text-dark">
|
||||
<h3>{{ event.title }}</h3>
|
||||
<p>Wann: {{ event.date|date:"D, d.M" }} ab {{ event.time }}</p>
|
||||
<p>Wo:</p>
|
||||
<ul>
|
||||
{% for location in event.locations.all %}
|
||||
<li>{{ location.name }}</li>
|
||||
{% endfor %}
|
||||
</ul>
|
||||
{% if event.orgname %}
|
||||
<p>Organisator: {{ event.orgname }}</p>
|
||||
{% endif %}
|
||||
{% if event.link %}
|
||||
<a href="{{ event.link }}">Details</a>
|
||||
<p>Kategorie: {{ event.category }}</p>
|
||||
</div>
|
||||
{% endif %}
|
||||
|
||||
<p>Kategorie: {{ event.category }}</p>
|
||||
</div>
|
||||
{% endfor %}
|
||||
</div>
|
||||
</div>
|
||||
{% endfor %}
|
||||
</div>
|
||||
</div>
|
||||
{% endblock %}
|
||||
2934
ofu_app/with_calendar.txt
Normal file
2934
ofu_app/with_calendar.txt
Normal file
File diff suppressed because it is too large
Load Diff
22
ofu_app/with_calendar_and_internal.txt
Normal file
22
ofu_app/with_calendar_and_internal.txt
Normal file
@ -0,0 +1,22 @@
|
||||
'ForMaD: Forum Mathematik-Didaktik'
|
||||
'ForMaD: Forum Mathematik-Didaktik'
|
||||
'ForMaD: Forum Mathematik-Didaktik'
|
||||
'ForMaD: Forum Mathematik-Didaktik'
|
||||
'Liederabend mit Martin Fösel (Gesang) und Beate Roux (Klavier)'
|
||||
('Internationale Konferenz zum Ganztag "WERA-IRN Konferenz Ganztägige Bildung '
|
||||
'aus einer international vergleichenden Perspektive", 30.11.- 02.12.17')
|
||||
'11. Bamberger Neuropsychologietag'
|
||||
('Vortrag Dr. Jessica Röhner: Die Untersuchung von fälschungs- und '
|
||||
'konstruktbezogener Varianz im IAT mit Hilfe von Diffusionsmodellanalysen')
|
||||
('Vortrag Dr. Anna Dechant: (Nicht-)Intentional Partnerlos. Wer ist '
|
||||
'(un-)freiwillig Single und wie verändert sich das mit der Zeit?')
|
||||
('Vortrag Dr. Oliver Arnold: Verhalten als kompensatorische Funktion von '
|
||||
'Einstellung und Verhaltenskosten: Die Person-Situation-Interaktion im Rahmen '
|
||||
'des Campell-Paradigmas')
|
||||
('Vortrag Prof. Dr. Christine Syrek: Mikropause bis Urlaub: Förderung von '
|
||||
'Gesundheit und Leistungsverhalten durch Erholung von arbeitsbezogenem Stress')
|
||||
('Vortrag PD Dr. Miriam Kunz: Wenn die Sprache versiegt: Affekterkennung bei '
|
||||
'Demenz')
|
||||
'Hochschulöffentliches Gespräch mit dem Mittelbau'
|
||||
'Hochschulöffentliches Gespräch mit Studierenden'
|
||||
Successfully migrate data
|
||||
99
ofu_app/with_calendar_no.txt
Normal file
99
ofu_app/with_calendar_no.txt
Normal file
@ -0,0 +1,99 @@
|
||||
'ForMaD: Forum Mathematik-Didaktik'
|
||||
'ForMaD: Forum Mathematik-Didaktik'
|
||||
'ForMaD: Forum Mathematik-Didaktik'
|
||||
'ForMaD: Forum Mathematik-Didaktik'
|
||||
'Abschlussworkshop Innovationslabor'
|
||||
'Auftaktworkshop Innovationslabor'
|
||||
'Bigband Aufbau'
|
||||
'Cajónbau Workshop'
|
||||
'Cajónbau Workshop'
|
||||
'Cajón Workshop'
|
||||
'Ensembleleitung Übung'
|
||||
'Fortbildungstag "Musik lebendig unterrichten"'
|
||||
'Fortbildungstag "Musik lebendig unterrichten"'
|
||||
'Fortbildungstag "Musik lebendig unterrichten"'
|
||||
'Fortbildungstag "Musik lebendig unterrichten"'
|
||||
'Kammerorchesterprobe'
|
||||
'Kammerorchesterprobe'
|
||||
'Kammerorchesterprobe'
|
||||
'Kammerorchesterprobe'
|
||||
'Kammerorchesterprobe'
|
||||
'Liederabend mit Martin Fösel (Gesang) und Beate Roux (Klavier)'
|
||||
'Probe für Liederabend mit Martin Fösel (Gesang) Beate Roux (Klavier) und'
|
||||
'Semester-Ouvertüre'
|
||||
'Staatsexamen Ensemblearbeit'
|
||||
'Aktivierende Methoden'
|
||||
'Aktivierende Methoden'
|
||||
'Andragogentag'
|
||||
'Visualisieren - Präsentieren'
|
||||
'Doktoranden-Kolloquium'
|
||||
('Internationale Konferenz zum Ganztag "WERA-IRN Konferenz Ganztägige Bildung '
|
||||
'aus einer international vergleichenden Perspektive", 30.11.- 02.12.17')
|
||||
'Tagung - WERA-IRN Extended Education'
|
||||
'Bamberger Peer-Beratungstraining'
|
||||
'Bamberger Peer-Beratungstraining'
|
||||
'Bamberger Peer-Beratungstraining'
|
||||
'Bamberger Peer-Beratungstraining'
|
||||
'Bamberger Peer-Beratungstraining'
|
||||
'Bamberger Peer-Beratungstraining'
|
||||
'Bamberger Peer-Beratungstraining'
|
||||
'Bamberger Peer-Beratungstraining'
|
||||
'Bamberger Peer-Beratungstraining'
|
||||
'Bamberger Peer-Beratungstraining'
|
||||
'Beratung im schulischen Kontext. Das Bamberger Peer-Beratungstraining'
|
||||
'Beratung im schulischen Kontext. Das Bamberger Peer-Beratungstraining'
|
||||
'Beratung im schulischen Kontext. Das Bamberger Peer-Beratungstraining'
|
||||
('Jahrestagung 2018 der Konferenz für Grundschulpädagogik und -didaktik an '
|
||||
'bayerischen Universitäten')
|
||||
'Lernwerkstattfortbildung für Schulleitungen'
|
||||
'Lernwerkstattfortbildung für Schulleitungen'
|
||||
'Lernwerkstattfortbildung für Schulleitungen'
|
||||
'Lernwerkstattfortbildung für Schulleitungen'
|
||||
'Lehrstuhlkolloquium'
|
||||
'Der Masterstudiengang Gerontologie in Erlangen'
|
||||
'Gruppentraining Sozialer Kompetenzen'
|
||||
'Infoabend Auslandssemester/-praktikum'
|
||||
'Nebenfachabend'
|
||||
'Pädagogik in einem (sozial-) psychiatrischen Arbeitsfeld'
|
||||
'Prüfungsangstbewältigung'
|
||||
'Psychische Erkrankungen bei Studierenden'
|
||||
'Schriftl. Prüfung "Masterstudiengang Educational Quality"'
|
||||
'Schulleitersymposium (SLS)'
|
||||
'Schulleitersymposium (SLS)'
|
||||
'Schulleitersymposium (SLS)'
|
||||
'Schulleitersymposium (SLS)'
|
||||
'Schulleitersymposium (SLS)'
|
||||
'Schwerpunktabend Sozialpädagogik'
|
||||
'Schwerpunktabend EFP'
|
||||
'Schwerpunktabend Erwachsenenbildung/Weiterbildung'
|
||||
'Forschertreffen'
|
||||
'Forschertreffen'
|
||||
'Forschertreffen'
|
||||
'Frau Penczek'
|
||||
'Jour fixe'
|
||||
'Jour fixe'
|
||||
'Praxis lehren'
|
||||
'Praxis lehren'
|
||||
'Workshop Bildung'
|
||||
'Workshop Bildung'
|
||||
'Workshop Bildung'
|
||||
'11. Bamberger Neuropsychologietag'
|
||||
'Disputation'
|
||||
'Training zur psychischen ersten Hilfe für Laien'
|
||||
('Vortrag Dr. Jessica Röhner: Die Untersuchung von fälschungs- und '
|
||||
'konstruktbezogener Varianz im IAT mit Hilfe von Diffusionsmodellanalysen')
|
||||
('Vortrag Dr. Anna Dechant: (Nicht-)Intentional Partnerlos. Wer ist '
|
||||
'(un-)freiwillig Single und wie verändert sich das mit der Zeit?')
|
||||
('Vortrag Dr. Oliver Arnold: Verhalten als kompensatorische Funktion von '
|
||||
'Einstellung und Verhaltenskosten: Die Person-Situation-Interaktion im Rahmen '
|
||||
'des Campell-Paradigmas')
|
||||
('Vortrag Prof. Dr. Christine Syrek: Mikropause bis Urlaub: Förderung von '
|
||||
'Gesundheit und Leistungsverhalten durch Erholung von arbeitsbezogenem Stress')
|
||||
('Vortrag PD Dr. Miriam Kunz: Wenn die Sprache versiegt: Affekterkennung bei '
|
||||
'Demenz')
|
||||
'Abschiedsvorlesung Prof. Dr. Rahm (noch unter Vorbehalt)'
|
||||
'Disputation Nusser'
|
||||
'Fakultätsweihnacht'
|
||||
'Hochschulöffentliches Gespräch mit dem Mittelbau'
|
||||
'Hochschulöffentliches Gespräch mit Studierenden'
|
||||
Successfully migrate data
|
||||
22
ofu_app/with_calendar_yes.txt
Normal file
22
ofu_app/with_calendar_yes.txt
Normal file
@ -0,0 +1,22 @@
|
||||
'ForMaD: Forum Mathematik-Didaktik'
|
||||
'ForMaD: Forum Mathematik-Didaktik'
|
||||
'ForMaD: Forum Mathematik-Didaktik'
|
||||
'ForMaD: Forum Mathematik-Didaktik'
|
||||
'Liederabend mit Martin Fösel (Gesang) und Beate Roux (Klavier)'
|
||||
('Internationale Konferenz zum Ganztag "WERA-IRN Konferenz Ganztägige Bildung '
|
||||
'aus einer international vergleichenden Perspektive", 30.11.- 02.12.17')
|
||||
'11. Bamberger Neuropsychologietag'
|
||||
('Vortrag Dr. Jessica Röhner: Die Untersuchung von fälschungs- und '
|
||||
'konstruktbezogener Varianz im IAT mit Hilfe von Diffusionsmodellanalysen')
|
||||
('Vortrag Dr. Anna Dechant: (Nicht-)Intentional Partnerlos. Wer ist '
|
||||
'(un-)freiwillig Single und wie verändert sich das mit der Zeit?')
|
||||
('Vortrag Dr. Oliver Arnold: Verhalten als kompensatorische Funktion von '
|
||||
'Einstellung und Verhaltenskosten: Die Person-Situation-Interaktion im Rahmen '
|
||||
'des Campell-Paradigmas')
|
||||
('Vortrag Prof. Dr. Christine Syrek: Mikropause bis Urlaub: Förderung von '
|
||||
'Gesundheit und Leistungsverhalten durch Erholung von arbeitsbezogenem Stress')
|
||||
('Vortrag PD Dr. Miriam Kunz: Wenn die Sprache versiegt: Affekterkennung bei '
|
||||
'Demenz')
|
||||
'Hochschulöffentliches Gespräch mit dem Mittelbau'
|
||||
'Hochschulöffentliches Gespräch mit Studierenden'
|
||||
Successfully migrate data
|
||||
Reference in New Issue
Block a user