Newer
Older
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
import json
import os
from copy import deepcopy
import aiohttp
from bs4 import BeautifulSoup
import disnake
from disnake import ApplicationCommandInteraction
from disnake.ext import commands, tasks
from cogs.help import help
"""
Environment Variablen:
DISCORD_JOBOFFERS_FILE - json file mit allen aktuellen
DISCORD_JOBOFFERS_CHANNEL - Channel-ID für Stellenangebote
DISCORD_JOBOFFERS_URL - URL von der die Stellenangebote geholt werde
DISCORD_JOBOFFERS_STD_FAK - Fakultät deren Stellenangebote standardmäßig gepostet werden
Struktur der json:
{fak:{id:{title:..., info:..., link:..., deadline:...}}
mit fak = [mi|rewi|wiwi|ksw|psy|other]
"""
JOBS_URL = os.getenv("DISCORD_JOBOFFERS_URL")
STD_FAK = os.getenv("DISCORD_JOBOFFERS_STD_FAK")
class Joboffers(commands.Cog):
def __init__(self, bot):
self.bot = bot
self.joboffers = {}
self.joboffers_channel_id = int(os.getenv("DISCORD_JOBOFFERS_CHANNEL"))
self.joboffers_file = os.getenv("DISCORD_JOBOFFERS_FILE")
self.load_joboffers()
self.update_loop.start()
@tasks.loop(hours=24)
async def update_loop(self):
await self.fetch_joboffers()
@update_loop.before_loop
async def before_update_loop(self):
await self.bot.wait_until_ready()
def save_joboffers(self):
joboffers_file = open(self.joboffers_file, mode='w')
json.dump(self.joboffers, joboffers_file)
def load_joboffers(self):
try:
joboffers_file = open(self.joboffers_file, mode='r')
self.joboffers = json.load(joboffers_file)
except FileNotFoundError:
self.joboffers = {}
@help(
syntax="/jobs <fak?>",
parameters={
"fak": "Fakultät für die die studentische Hilfskraft Jobs ausgegeben werden sollen "
"(mi, rewi, wiwi, ksw, psy)"
},
brief="Ruft Jobangebote für Studiernde der Fernuni Hagen auf."
)
@commands.slash_command(name="jobs", aliases=["offers","stellen","joboffers"],
description="Liste Jobangebote der Uni auf",
options=[disnake.Option(name="faculty",
description="Fakultät",
choices=[disnake.OptionChoice('mi','mi'),
disnake.OptionChoice('rewi','rewi'),
disnake.OptionChoice('wiwi','wiwi'),
disnake.OptionChoice('ksw','ksw'),
disnake.OptionChoice('psy','psy'),
disnake.OptionChoice('other','other'),
disnake.OptionChoice('all','all')])])
async def cmd_jobs(self, interaction: ApplicationCommandInteraction, faculty: str = STD_FAK):
await self.fetch_joboffers()
fak_text = "aller Fakultäten" if faculty == 'all' else f"der Fakultät {faculty}"
embed = disnake.Embed(title="Stellenangebote der Uni",
description=f"Ich habe folgende Stellenangebote {fak_text} gefunden:")
for fak, fak_offers in self.joboffers.items():
if STD_FAK != 'all' and fak != faculty:
continue
for offer_id, offer_data in fak_offers.items():
descr = f"{offer_data['info']}\nDeadline: {offer_data['deadline']}\n{offer_data['link']}"
embed.add_field(name=offer_data['title'], value=descr, inline=False)
await interaction.response.send_message(embed=embed, ephemeral=True)
async def post_new_jobs(self, jobs):
fak_text = "aller Fakultäten" if STD_FAK == 'all' else f"der Fakultät {STD_FAK}"
embed = disnake.Embed(title="Neue Stellenangebote der Uni",
description=f"Ich habe folgende neue Stellenangebote {fak_text} gefunden:")
for job in jobs:
descr = f"{job['info']}\nDeadline: {job['deadline']}\n{job['link']}"
embed.add_field(name=job['title'], value=descr, inline=False)
joboffers_channel = await self.bot.fetch_channel(self.joboffers_channel_id)
await joboffers_channel.send(embed=embed)
async def fetch_joboffers(self):
sess = aiohttp.ClientSession()
req = await sess.get(JOBS_URL)
text = await req.read()
await sess.close()
soup = BeautifulSoup(text, "html.parser")
list = soup.findAll("li")
# alte Liste sichern zum Abgleich
old_joboffers = deepcopy(self.joboffers)
for job in list:
detail_string = job.text.strip()
if "Studentische Hilfskraft" in detail_string:
id = detail_string[detail_string.index('(')+12:detail_string.index(')')]
title = detail_string[:detail_string.index(')')+1]
info = detail_string[detail_string.index(')')+1:detail_string.index('[')]
deadline = detail_string[detail_string.index('[')+1:detail_string.index(']')]
link = job.find('a')['href']
# Umlaute aufräumen
info = info.replace("ä","ä")
info = info.replace("ü", "ü")
faks = ["other", "wiwi", "mi", "ksw", "psy", "rewi"]
fak_id = int(id[0]) # Kennziffer 1=wiwi, 2=mi, 3=ksw, 4=psy, 5=rewi, alle anderen=other
if fak_id in range(1,6):
fak = faks[fak_id]
else:
fak = faks[0]
if not self.joboffers.get(fak):
self.joboffers[fak] = {}
self.joboffers[fak][id] = {'title': title, 'info': info, 'deadline': deadline, 'link': link}
self.save_joboffers()
await self.check_for_new_jobs(old_joboffers)
async def check_for_new_jobs(self, old_joboffers):
new_jobs = []
for fak, fak_offers in self.joboffers.items():
if STD_FAK != 'all' and fak != STD_FAK:
continue
if fak_old := old_joboffers.get(fak):
for offer_id, offer_data in fak_offers.items():
if old_offer := fak_old.get(offer_id):
if offer_data != old_offer:
new_jobs.append(offer_data)
else:
new_jobs.append(offer_data)
else:
for offer_id, offer_data in self.joboffers.get(fak).items():
new_jobs.append(offer_data)
if new_jobs:
await self.post_new_jobs(new_jobs)