mirror of
https://github.com/sascha-hemi/hacs_waste_collection_schedule.git
synced 2026-03-21 04:06:03 +01:00
Merge pull request #530 from mampfes/improve_test_sources
Improve test sources
This commit is contained in:
@@ -349,7 +349,10 @@ If you'd like to help with any of these, please raise an [issue](https://github.
|
||||
|
||||
The following waste service providers return errors when running the test_source script:
|
||||
|
||||
- `banyule_vic_gov_au`: JSONDecodeError, causes by Captcha
|
||||
- `banyule_vic_gov_au`: JSONDecodeError, caused by not supported Captcha wall
|
||||
- `republicservices_com`: JSONDecoderError
|
||||
- `newcastle_gov_uk`: all tests return 0 entries
|
||||
- `awn_de`: all tests return 0 entries
|
||||
|
||||
If you can fix any of these, please raise a Pull Request with the updates.
|
||||
|
||||
|
||||
@@ -18,12 +18,7 @@ class ICS_v1:
|
||||
|
||||
def convert(self, ics_data):
|
||||
# parse ics file
|
||||
try:
|
||||
calendar = icalendar.Calendar.from_ical(ics_data)
|
||||
except Exception as err:
|
||||
_LOGGER.error(f"Parsing ics data failed:{str(err)}")
|
||||
_LOGGER.debug(ics_data)
|
||||
return []
|
||||
calendar = icalendar.Calendar.from_ical(ics_data)
|
||||
|
||||
# calculate start- and end-date for recurring events
|
||||
start_date = datetime.datetime.now().replace(
|
||||
|
||||
@@ -1,17 +1,17 @@
|
||||
import logging
|
||||
|
||||
import requests
|
||||
from waste_collection_schedule import Collection
|
||||
from waste_collection_schedule import Collection # type: ignore[attr-defined]
|
||||
from waste_collection_schedule.service.ICS import ICS
|
||||
|
||||
TITLE = "Neunkirchen Siegerland"
|
||||
DESCRIPTION = " Source for 'Abfallkalender Neunkirchen Siegerland'."
|
||||
URL = "https://www.neunkirchen-siegerland.de"
|
||||
TEST_CASES = {
|
||||
"Waldstraße":{ "strasse":"Waldstr"}
|
||||
}
|
||||
TEST_CASES = {"Waldstraße": {"strasse": "Waldstr"}}
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Source:
|
||||
def __init__(self, strasse):
|
||||
self._strasse = strasse
|
||||
@@ -19,13 +19,20 @@ class Source:
|
||||
|
||||
def fetch(self):
|
||||
|
||||
args = {"out":"json", "type": "abto", "select":"2", "refid": "3362.1", "term": self._strasse }
|
||||
args = {
|
||||
"out": "json",
|
||||
"type": "abto",
|
||||
"select": "2",
|
||||
"refid": "3362.1",
|
||||
"term": self._strasse,
|
||||
}
|
||||
header = {"referer": "https://www.neunkirchen-siegerland.de"}
|
||||
r = requests.get("https://www.neunkirchen-siegerland.de/output/autocomplete.php", params=args,headers=header)
|
||||
|
||||
if r.status_code != 200:
|
||||
_LOGGER.error("Error querying calender data")
|
||||
return []
|
||||
r = requests.get(
|
||||
"https://www.neunkirchen-siegerland.de/output/autocomplete.php",
|
||||
params=args,
|
||||
headers=header,
|
||||
)
|
||||
r.raise_for_status()
|
||||
|
||||
ids = r.json()
|
||||
|
||||
@@ -33,19 +40,22 @@ class Source:
|
||||
raise Exception("no address found")
|
||||
|
||||
if len(ids) > 1:
|
||||
raise Exception (" to many addresses found, specify more detailed street name")
|
||||
raise Exception(
|
||||
" to many addresses found, specify more detailed street name"
|
||||
)
|
||||
|
||||
args = {"ModID":48, "call": "ical", "pois": ids[0][0], "kat": 1, "alarm":0}
|
||||
r = requests.get("https://www.neunkirchen-siegerland.de/output/options.php", params=args,headers=header)
|
||||
|
||||
if r.status_code != 200:
|
||||
_LOGGER.error("Error querying calender data")
|
||||
return []
|
||||
args = {"ModID": 48, "call": "ical", "pois": ids[0][0], "kat": 1, "alarm": 0}
|
||||
r = requests.get(
|
||||
"https://www.neunkirchen-siegerland.de/output/options.php",
|
||||
params=args,
|
||||
headers=header,
|
||||
)
|
||||
r.raise_for_status()
|
||||
|
||||
dates = self._ics.convert(r.text)
|
||||
|
||||
entries = []
|
||||
for d in dates:
|
||||
entries.append(Collection(d[0],d[1]))
|
||||
entries.append(Collection(d[0], d[1]))
|
||||
|
||||
return entries
|
||||
|
||||
@@ -16,7 +16,9 @@ TEST_CASES = {
|
||||
},
|
||||
}
|
||||
|
||||
API_URL = "https://www.landkreis-harburg.de/bauen-umwelt/abfallwirtschaft/abfallkalender/"
|
||||
API_URL = (
|
||||
"https://www.landkreis-harburg.de/bauen-umwelt/abfallwirtschaft/abfallkalender/"
|
||||
)
|
||||
HEADERS = {
|
||||
"User-Agent": "Mozilla/5.0 (Windows NT 6.1; Win64; x64)",
|
||||
}
|
||||
@@ -35,10 +37,10 @@ class Source:
|
||||
# Double loading is on purpose because sometimes the webpage has an overlay
|
||||
# which is gone on the second try in a session
|
||||
r = session.get(API_URL, headers=HEADERS)
|
||||
r.raise_for_status()
|
||||
if "Zur aufgerufenen Seite" in r.text:
|
||||
r = session.get(API_URL, headers=HEADERS)
|
||||
if r.status_code != 200:
|
||||
raise Exception(f"Error: failed to fetch first url: {API_URL}")
|
||||
r.raise_for_status()
|
||||
|
||||
# Get the IDs of the districts on the first level
|
||||
id = self.parse_level(r.text, 1)
|
||||
@@ -54,8 +56,7 @@ class Source:
|
||||
"selected_ebene": 0,
|
||||
}
|
||||
r = session.get(url, params=params, headers=HEADERS)
|
||||
if r.status_code != 200:
|
||||
raise Exception(f"Error: failed to fetch second url: {url}")
|
||||
r.raise_for_status()
|
||||
|
||||
# Get the IDs of the districts on the second level
|
||||
id = self.parse_level(r.text, 2)
|
||||
@@ -70,8 +71,7 @@ class Source:
|
||||
"selected_ebene": 0,
|
||||
}
|
||||
r = session.get(url, params=params, headers=HEADERS)
|
||||
if r.status_code != 200:
|
||||
raise Exception(f"Error: failed to fetch third url: {url}")
|
||||
r.raise_for_status()
|
||||
|
||||
# Get the IDs of the districts on the third level
|
||||
id = self.parse_level(r.text, 3)
|
||||
@@ -83,6 +83,7 @@ class Source:
|
||||
"owner": 20100,
|
||||
}
|
||||
r = session.get(url, params=params, headers=HEADERS)
|
||||
r.raise_for_status()
|
||||
|
||||
# Sometimes there is no garbage calendar available
|
||||
if "Es sind keine Abfuhrbezirke hinterlegt." in r.text:
|
||||
@@ -111,7 +112,7 @@ class Source:
|
||||
for d in dates:
|
||||
entries.append(Collection(d[0], d[1]))
|
||||
except ValueError:
|
||||
pass # during year transition the ical for the next year may be empty
|
||||
pass # during year transition the ical for the next year may be empty
|
||||
return entries
|
||||
|
||||
def parse_level(self, response, level):
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
import datetime
|
||||
import json
|
||||
import logging
|
||||
|
||||
import requests
|
||||
@@ -44,14 +43,14 @@ class Source:
|
||||
r = requests.get(
|
||||
f"https://awido.cubefour.de/WebServices/Awido.Service.svc/secure/getPlaces/client={self._customer}"
|
||||
)
|
||||
places = json.loads(r.text)
|
||||
r.raise_for_status()
|
||||
places = r.json()
|
||||
|
||||
# create city to key map from retrieved places
|
||||
city_to_oid = {place["value"].strip(): place["key"] for (place) in places}
|
||||
|
||||
if self._city not in city_to_oid:
|
||||
_LOGGER.error(f"city not found: {self._city}")
|
||||
return []
|
||||
raise Exception(f"city not found: {self._city}")
|
||||
|
||||
oid = city_to_oid[self._city]
|
||||
|
||||
@@ -62,7 +61,8 @@ class Source:
|
||||
f"https://awido.cubefour.de/WebServices/Awido.Service.svc/secure/getGroupedStreets/{oid}",
|
||||
params={"client": self._customer},
|
||||
)
|
||||
streets = json.loads(r.text)
|
||||
r.raise_for_status()
|
||||
streets = r.json()
|
||||
|
||||
# create street to key map from retrieved places
|
||||
street_to_oid = {
|
||||
@@ -78,7 +78,8 @@ class Source:
|
||||
f"https://awido.cubefour.de/WebServices/Awido.Service.svc/secure/getGroupedStreets/{oid}",
|
||||
params={"client": self._customer},
|
||||
)
|
||||
streets = json.loads(r.text)
|
||||
r.raise_for_status()
|
||||
streets = r.json()
|
||||
|
||||
# create street to key map from retrieved places
|
||||
street_to_oid = {
|
||||
@@ -86,8 +87,7 @@ class Source:
|
||||
}
|
||||
|
||||
if self._street not in street_to_oid:
|
||||
_LOGGER.error(f"street not found: {self._street}")
|
||||
return []
|
||||
raise Exception(f"street not found: {self._street}")
|
||||
|
||||
oid = street_to_oid[self._street]
|
||||
|
||||
@@ -96,7 +96,8 @@ class Source:
|
||||
f"https://awido.cubefour.de/WebServices/Awido.Service.svc/secure/getStreetAddons/{oid}",
|
||||
params={"client": self._customer},
|
||||
)
|
||||
hsnbrs = json.loads(r.text)
|
||||
r.raise_for_status()
|
||||
hsnbrs = r.json()
|
||||
|
||||
# create housenumber to key map from retrieved places
|
||||
hsnbr_to_oid = {
|
||||
@@ -104,8 +105,7 @@ class Source:
|
||||
}
|
||||
|
||||
if self._housenumber not in hsnbr_to_oid:
|
||||
_LOGGER.error(f"housenumber not found: {self._housenumber}")
|
||||
return []
|
||||
raise Exception(f"housenumber not found: {self._housenumber}")
|
||||
|
||||
oid = hsnbr_to_oid[self._housenumber]
|
||||
|
||||
@@ -114,7 +114,8 @@ class Source:
|
||||
f"https://awido.cubefour.de/WebServices/Awido.Service.svc/secure/getData/{oid}",
|
||||
params={"fractions": "", "client": self._customer},
|
||||
)
|
||||
cal_json = json.loads(r.text)
|
||||
r.raise_for_status()
|
||||
cal_json = r.json()
|
||||
|
||||
# map fraction code to fraction name
|
||||
fractions = {fract["snm"]: fract["nm"] for (fract) in cal_json["fracts"]}
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
import json
|
||||
import logging
|
||||
|
||||
import requests
|
||||
@@ -24,7 +23,8 @@ class Source:
|
||||
def fetch(self):
|
||||
# retrieve list of cities
|
||||
r = requests.get("https://www.awr.de/api_v2/collection_dates/1/orte")
|
||||
cities = json.loads(r.text)
|
||||
r.raise_for_status()
|
||||
cities = r.json()
|
||||
|
||||
# create city to id map from retrieved cities
|
||||
city_to_id = {
|
||||
@@ -32,8 +32,7 @@ class Source:
|
||||
}
|
||||
|
||||
if self._city not in city_to_id:
|
||||
_LOGGER.error(f"city not found: {self._city}")
|
||||
return []
|
||||
raise Exception(f"city not found: {self._city}")
|
||||
|
||||
cityId = city_to_id[self._city]
|
||||
|
||||
@@ -41,7 +40,8 @@ class Source:
|
||||
r = requests.get(
|
||||
f"https://www.awr.de/api_v2/collection_dates/1/ort/{cityId}/strassen"
|
||||
)
|
||||
streets = json.loads(r.text)
|
||||
r.raise_for_status()
|
||||
streets = r.json()
|
||||
|
||||
# create street to id map from retrieved cities
|
||||
street_to_id = {
|
||||
@@ -50,8 +50,7 @@ class Source:
|
||||
}
|
||||
|
||||
if self._street not in street_to_id:
|
||||
_LOGGER.error(f"street not found: {self._street}")
|
||||
return []
|
||||
raise Exception(f"street not found: {self._street}")
|
||||
|
||||
streetId = street_to_id[self._street]
|
||||
|
||||
@@ -59,7 +58,8 @@ class Source:
|
||||
r = requests.get(
|
||||
f"https://www.awr.de/api_v2/collection_dates/1/ort/{cityId}/abfallarten"
|
||||
)
|
||||
waste_types = json.loads(r.text)
|
||||
r.raise_for_status()
|
||||
waste_types = r.json()
|
||||
wt = "-".join([t["id"] for t in waste_types["abfallarten"]])
|
||||
|
||||
# get ics file
|
||||
@@ -73,4 +73,3 @@ class Source:
|
||||
for d in dates:
|
||||
entries.append(Collection(d[0], d[1]))
|
||||
return entries
|
||||
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
import json
|
||||
import logging
|
||||
|
||||
import requests
|
||||
@@ -24,7 +23,8 @@ class Source:
|
||||
def fetch(self):
|
||||
# retrieve list of cities
|
||||
r = requests.get("https://www.awsh.de/api_v2/collection_dates/1/orte")
|
||||
cities = json.loads(r.text)
|
||||
r.raise_for_status()
|
||||
cities = r.json()
|
||||
|
||||
# create city to id map from retrieved cities
|
||||
city_to_id = {
|
||||
@@ -32,8 +32,7 @@ class Source:
|
||||
}
|
||||
|
||||
if self._city not in city_to_id:
|
||||
_LOGGER.error(f"city not found: {self._city}")
|
||||
return []
|
||||
raise Exception(f"city not found: {self._city}")
|
||||
|
||||
cityId = city_to_id[self._city]
|
||||
|
||||
@@ -41,7 +40,8 @@ class Source:
|
||||
r = requests.get(
|
||||
f"https://www.awsh.de/api_v2/collection_dates/1/ort/{cityId}/strassen"
|
||||
)
|
||||
streets = json.loads(r.text)
|
||||
r.raise_for_status()
|
||||
streets = r.json()
|
||||
|
||||
# create street to id map from retrieved cities
|
||||
street_to_id = {
|
||||
@@ -50,8 +50,7 @@ class Source:
|
||||
}
|
||||
|
||||
if self._street not in street_to_id:
|
||||
_LOGGER.error(f"street not found: {self._street}")
|
||||
return []
|
||||
raise Exception(f"street not found: {self._street}")
|
||||
|
||||
streetId = street_to_id[self._street]
|
||||
|
||||
@@ -59,13 +58,15 @@ class Source:
|
||||
r = requests.get(
|
||||
f"https://www.awsh.de/api_v2/collection_dates/1/ort/{cityId}/abfallarten"
|
||||
)
|
||||
waste_types = json.loads(r.text)
|
||||
r.raise_for_status()
|
||||
waste_types = r.json()
|
||||
wt = "-".join([t["id"] for t in waste_types["abfallarten"]])
|
||||
|
||||
# get ics file
|
||||
r = requests.get(
|
||||
f"https://www.awsh.de/api_v2/collection_dates/1/ort/{cityId}/strasse/{streetId}/hausnummern/0/abfallarten/{wt}/kalender.ics"
|
||||
)
|
||||
r.raise_for_status()
|
||||
|
||||
dates = self._ics.convert(r.text)
|
||||
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
import logging
|
||||
import requests
|
||||
from datetime import datetime
|
||||
from xml.dom.minidom import parseString
|
||||
|
||||
import requests
|
||||
from waste_collection_schedule import Collection # type: ignore[attr-defined]
|
||||
from waste_collection_schedule.service.ICS import ICS
|
||||
|
||||
@@ -9,12 +10,17 @@ TITLE = "Umweltprofis"
|
||||
DESCRIPTION = "Source for Umweltprofis"
|
||||
URL = "https://www.umweltprofis.at"
|
||||
TEST_CASES = {
|
||||
"Ebensee": {"url": "https://data.umweltprofis.at/OpenData/AppointmentService/AppointmentService.asmx/GetIcalWastePickupCalendar?key=KXX_K0bIXDdk0NrTkk3xWqLM9-bsNgIVBE6FMXDObTqxmp9S39nIqwhf9LTIAX9shrlpfCYU7TG_8pS9NjkAJnM_ruQ1SYm3V9YXVRfLRws1"},
|
||||
"Rohrbach": {"xmlurl": "https://data.umweltprofis.at/opendata/AppointmentService/AppointmentService.asmx/GetTermineForLocationSecured?Key=TEMPKeyabvvMKVCic0cMcmsTEMPKey&StreetNr=118213&HouseNr=Alle&intervall=Alle"},
|
||||
"Ebensee": {
|
||||
"url": "https://data.umweltprofis.at/OpenData/AppointmentService/AppointmentService.asmx/GetIcalWastePickupCalendar?key=KXX_K0bIXDdk0NrTkk3xWqLM9-bsNgIVBE6FMXDObTqxmp9S39nIqwhf9LTIAX9shrlpfCYU7TG_8pS9NjkAJnM_ruQ1SYm3V9YXVRfLRws1"
|
||||
},
|
||||
"Rohrbach": {
|
||||
"xmlurl": "https://data.umweltprofis.at/opendata/AppointmentService/AppointmentService.asmx/GetTermineForLocationSecured?Key=TEMPKeyabvvMKVCic0cMcmsTEMPKey&StreetNr=118213&HouseNr=Alle&intervall=Alle"
|
||||
},
|
||||
}
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def getText(element):
|
||||
s = ""
|
||||
for e in element.childNodes:
|
||||
@@ -22,6 +28,7 @@ def getText(element):
|
||||
s += e.nodeValue
|
||||
return s
|
||||
|
||||
|
||||
class Source:
|
||||
def __init__(self, url=None, xmlurl=None):
|
||||
self._url = url
|
||||
@@ -38,11 +45,11 @@ class Source:
|
||||
|
||||
def fetch_ics(self):
|
||||
r = requests.get(self._url)
|
||||
if r.status_code != 200:
|
||||
_LOGGER.error("Error querying calendar data")
|
||||
return []
|
||||
r.raise_for_status()
|
||||
|
||||
fixed_text = r.text.replace("REFRESH - INTERVAL; VALUE = ", "REFRESH-INTERVAL;VALUE=")
|
||||
fixed_text = r.text.replace(
|
||||
"REFRESH - INTERVAL; VALUE = ", "REFRESH-INTERVAL;VALUE="
|
||||
)
|
||||
|
||||
dates = self._ics.convert(fixed_text)
|
||||
|
||||
|
||||
@@ -37,7 +37,7 @@ class Source:
|
||||
self._post_code = post_code
|
||||
self._house_number = house_number
|
||||
if not any([self._premises_id, self._post_code and self._house_number]):
|
||||
_LOGGER.error(
|
||||
raise Exception(
|
||||
"premises_id or post_code and house number must be provided in config"
|
||||
)
|
||||
self._session = requests.Session()
|
||||
|
||||
@@ -68,9 +68,14 @@ class Source:
|
||||
data = r.json()
|
||||
|
||||
if data.get("error"):
|
||||
for type, errormsg in data["errors"].items():
|
||||
_LOGGER.error(f"{type} - {errormsg}")
|
||||
return []
|
||||
raise Exception(
|
||||
"\n".join(
|
||||
[
|
||||
f"{type} - {errormsg}"
|
||||
for type, errormsg in data["errors"].items()
|
||||
]
|
||||
)
|
||||
)
|
||||
|
||||
entries = []
|
||||
for year, months in data["waste_discharge"].items():
|
||||
|
||||
@@ -7,8 +7,14 @@ from waste_collection_schedule import Collection # type: ignore[attr-defined]
|
||||
TITLE = None
|
||||
DESCRIPTION = "Source for the Dutch HVCGroep waste management."
|
||||
URL = "https://www.hvcgroep.nl"
|
||||
|
||||
|
||||
def EXTRA_INFO():
|
||||
return [ { "title": s["title"], "url": get_main_url(s["api_url"])} for s in SERVICE_MAP ]
|
||||
return [
|
||||
{"title": s["title"], "url": get_main_url(s["api_url"])} for s in SERVICE_MAP
|
||||
]
|
||||
|
||||
|
||||
TEST_CASES = {
|
||||
"Tollebeek": {"postal_code": "8309AV", "house_number": "1"},
|
||||
"Hvgroep: Tollebeek": {
|
||||
@@ -27,89 +33,115 @@ TEST_CASES = {
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
SERVICE_MAP = [
|
||||
{ "title": "Alpen an den Rijn",
|
||||
"api_url": "https://afvalkalender.alphenaandenrijn.nl",
|
||||
{
|
||||
"title": "Alpen an den Rijn",
|
||||
"api_url": "https://afvalkalender.alphenaandenrijn.nl",
|
||||
},
|
||||
{ "title": "Gemeente Cranendonck",
|
||||
"api_url": "https://afvalkalender.cranendonck.nl",
|
||||
{
|
||||
"title": "Gemeente Cranendonck",
|
||||
"api_url": "https://afvalkalender.cranendonck.nl",
|
||||
},
|
||||
{ "title": "Cyclus NV",
|
||||
"api_url": "https://afvalkalender.cyclusnv.nl",
|
||||
{
|
||||
"title": "Cyclus NV",
|
||||
"api_url": "https://afvalkalender.cyclusnv.nl",
|
||||
},
|
||||
{ "title": "Dar",
|
||||
"api_url": "https://afvalkalender.dar.nl",
|
||||
{
|
||||
"title": "Dar",
|
||||
"api_url": "https://afvalkalender.dar.nl",
|
||||
},
|
||||
{ "title": "Den Haag",
|
||||
"api_url": "https://huisvuilkalender.denhaag.nl",
|
||||
{
|
||||
"title": "Den Haag",
|
||||
"api_url": "https://huisvuilkalender.denhaag.nl",
|
||||
},
|
||||
{ "title": "GAD",
|
||||
"api_url": "https://inzamelkalender.gad.nl",
|
||||
{
|
||||
"title": "GAD",
|
||||
"api_url": "https://inzamelkalender.gad.nl",
|
||||
},
|
||||
{ "title": "Gemeente Berkelland",
|
||||
"api_url": "https://afvalkalender.gemeenteberkelland.nl",
|
||||
{
|
||||
"title": "Gemeente Berkelland",
|
||||
"api_url": "https://afvalkalender.gemeenteberkelland.nl",
|
||||
},
|
||||
{ "title": "HVC Groep",
|
||||
"api_url": "https://inzamelkalender.hvcgroep.nl",
|
||||
{
|
||||
"title": "HVC Groep",
|
||||
"api_url": "https://inzamelkalender.hvcgroep.nl",
|
||||
},
|
||||
{ "title": "Gemeente Lingewaard",
|
||||
"api_url": "https://afvalwijzer.lingewaard.nl",
|
||||
{
|
||||
"title": "Gemeente Lingewaard",
|
||||
"api_url": "https://afvalwijzer.lingewaard.nl",
|
||||
},
|
||||
{ "title": "Gemeente Middelburg + Vlissingen",
|
||||
"api_url": "https://afvalwijzer.middelburgvlissingen.nl",
|
||||
{
|
||||
"title": "Gemeente Middelburg + Vlissingen",
|
||||
"api_url": "https://afvalwijzer.middelburgvlissingen.nl",
|
||||
},
|
||||
{ "title": "Mijn Blink",
|
||||
"api_url": "https://mijnblink.nl",
|
||||
{
|
||||
"title": "Mijn Blink",
|
||||
"api_url": "https://mijnblink.nl",
|
||||
},
|
||||
{ "title": "Gemeente Peel en Maas",
|
||||
"api_url": "https://afvalkalender.peelenmaas.nl",
|
||||
{
|
||||
"title": "Gemeente Peel en Maas",
|
||||
"api_url": "https://afvalkalender.peelenmaas.nl",
|
||||
},
|
||||
{ "title": "PreZero",
|
||||
"api_url": "https://inzamelwijzer.prezero.nl",
|
||||
{
|
||||
"title": "PreZero",
|
||||
"api_url": "https://inzamelwijzer.prezero.nl",
|
||||
},
|
||||
{ "title": "Purmerend",
|
||||
"api_url": "https://afvalkalender.purmerend.nl",
|
||||
{
|
||||
"title": "Purmerend",
|
||||
"api_url": "https://afvalkalender.purmerend.nl",
|
||||
},
|
||||
{ "title": "Reinigingsbedrijf Midden Nederland",
|
||||
"api_url": "https://inzamelschema.rmn.nl",
|
||||
{
|
||||
"title": "Reinigingsbedrijf Midden Nederland",
|
||||
"api_url": "https://inzamelschema.rmn.nl",
|
||||
},
|
||||
{ "title": "Gemeente Schouwen-Duiveland",
|
||||
"api_url": "https://afvalkalender.schouwen-duiveland.nl",
|
||||
{
|
||||
"title": "Gemeente Schouwen-Duiveland",
|
||||
"api_url": "https://afvalkalender.schouwen-duiveland.nl",
|
||||
},
|
||||
{ "title": "Spaarne Landen",
|
||||
"api_url": "https://afvalwijzer.spaarnelanden.nl",
|
||||
{
|
||||
"title": "Spaarne Landen",
|
||||
"api_url": "https://afvalwijzer.spaarnelanden.nl",
|
||||
},
|
||||
{ "title": "Stadswerk 072",
|
||||
"api_url": "https://www.stadswerk072.nl",
|
||||
{
|
||||
"title": "Stadswerk 072",
|
||||
"api_url": "https://www.stadswerk072.nl",
|
||||
},
|
||||
{ "title": "Gemeente Sudwest-Fryslan",
|
||||
"api_url": "https://afvalkalender.sudwestfryslan.nl",
|
||||
{
|
||||
"title": "Gemeente Sudwest-Fryslan",
|
||||
"api_url": "https://afvalkalender.sudwestfryslan.nl",
|
||||
},
|
||||
{ "title": "Gemeente Venray",
|
||||
"api_url": "https://afvalkalender.venray.nl",
|
||||
{
|
||||
"title": "Gemeente Venray",
|
||||
"api_url": "https://afvalkalender.venray.nl",
|
||||
},
|
||||
{ "title": "Gemeente Voorschoten",
|
||||
"api_url": "https://afvalkalender.voorschoten.nl",
|
||||
{
|
||||
"title": "Gemeente Voorschoten",
|
||||
"api_url": "https://afvalkalender.voorschoten.nl",
|
||||
},
|
||||
{ "title": "Gemeente Wallre",
|
||||
"api_url": "https://afvalkalender.waalre.nl",
|
||||
{
|
||||
"title": "Gemeente Wallre",
|
||||
"api_url": "https://afvalkalender.waalre.nl",
|
||||
},
|
||||
{ "title": "ZRD",
|
||||
"api_url": "https://afvalkalender.zrd.nl",
|
||||
{
|
||||
"title": "ZRD",
|
||||
"api_url": "https://afvalkalender.zrd.nl",
|
||||
},
|
||||
]
|
||||
|
||||
|
||||
def get_service_name_map():
|
||||
def extract_service_name(api_url):
|
||||
name = api_url.split(".")[-2]
|
||||
name = name.split("/")[-1]
|
||||
return name
|
||||
|
||||
return { extract_service_name(s["api_url"]):s["api_url"] for s in SERVICE_MAP }
|
||||
return {extract_service_name(s["api_url"]): s["api_url"] for s in SERVICE_MAP}
|
||||
|
||||
|
||||
def get_main_url(url):
|
||||
x = url.split(".")[-2:]
|
||||
x[0] = x[0].removeprefix("https://")
|
||||
return "https://" + ".".join(x)
|
||||
x = url.split(".")[-2:]
|
||||
x[0] = x[0].removeprefix("https://")
|
||||
return "https://" + ".".join(x)
|
||||
|
||||
|
||||
ICON_MAP = {
|
||||
"plastic-blik-drinkpak": "mdi:recycle",
|
||||
@@ -134,8 +166,7 @@ class Source:
|
||||
|
||||
# Something must be wrong, maybe the address isn't valid? No need to do the extra requests so just return here.
|
||||
if len(data) == 0:
|
||||
_LOGGER.error("no data found for this address")
|
||||
return []
|
||||
raise Exception("no data found for this address")
|
||||
|
||||
bag_id = data[0]["bagid"]
|
||||
|
||||
|
||||
@@ -39,9 +39,7 @@ class Source:
|
||||
response = requests.get(
|
||||
"https://www.hygea.be/displaycalws.html", params=params
|
||||
)
|
||||
|
||||
if not response.ok:
|
||||
return []
|
||||
response.raise_for_status()
|
||||
data = json.loads(response.text)
|
||||
|
||||
entries = []
|
||||
|
||||
@@ -94,7 +94,7 @@ TEST_CASES = {
|
||||
"year_field": "year",
|
||||
},
|
||||
"EAW Rheingau Taunus": {
|
||||
"url": "https://www.eaw-rheingau-taunus.de/abfallkalender/calendar.ics?streetid=1429",
|
||||
"url": "https://www.eaw-rheingau-taunus.de/abfallsammlung/abfuhrtermine/feed.ics?tx_vierwdeaw_garbagecalendarics%5Baction%5D=ics&tx_vierwdeaw_garbagecalendarics%5Bcontroller%5D=GarbageCalendar&tx_vierwdeaw_garbagecalendarics%5Bstreet%5D=38",
|
||||
"split_at": ",",
|
||||
},
|
||||
"Recollect, Ottawa": {
|
||||
@@ -196,16 +196,9 @@ class Source:
|
||||
raise RuntimeError(
|
||||
"Error: unknown method to fetch URL, use GET or POST; got {self._method}"
|
||||
)
|
||||
r.raise_for_status()
|
||||
|
||||
r.encoding = "utf-8" # requests doesn't guess the encoding correctly
|
||||
|
||||
# check the return code
|
||||
if not r.ok:
|
||||
_LOGGER.error(
|
||||
"Error: the response is not ok; need code 200, but got code %s"
|
||||
% r.status_code
|
||||
)
|
||||
return []
|
||||
|
||||
return self._convert(r.text)
|
||||
|
||||
def fetch_file(self, file):
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import json
|
||||
import logging
|
||||
|
||||
import requests
|
||||
from waste_collection_schedule import Collection # type: ignore[attr-defined]
|
||||
from waste_collection_schedule.service.ICS import ICS
|
||||
@@ -11,9 +11,9 @@ DESCRIPTION = "Source for INFEO waste collection."
|
||||
URL = "https://www.infeo.at/"
|
||||
EXTRA_INFO = [
|
||||
{
|
||||
"title": "Bogenschütz Entsorgung",
|
||||
"url": "https://bogenschuetz-entsorgung.de",
|
||||
"country": "de",
|
||||
"title": "Bogenschütz Entsorgung",
|
||||
"url": "https://bogenschuetz-entsorgung.de",
|
||||
"country": "de",
|
||||
},
|
||||
]
|
||||
TEST_CASES = {"Bogenschütz": {"customer": "bogenschütz", "zone": "Dettenhausen"}}
|
||||
@@ -27,25 +27,25 @@ class Source:
|
||||
|
||||
def fetch(self):
|
||||
baseUrl = f"https://services.infeo.at/awm/api/{self._customer}/wastecalendar"
|
||||
issueUrl = "https://github.com/mampfes/hacs_waste_collection_schedule/issues/new"
|
||||
|
||||
issueUrl = (
|
||||
"https://github.com/mampfes/hacs_waste_collection_schedule/issues/new"
|
||||
)
|
||||
|
||||
params = {
|
||||
"showUnpublishedCalendars": "false",
|
||||
}
|
||||
|
||||
|
||||
# get the available published calendar years
|
||||
url = f"{baseUrl}/calendars"
|
||||
response = requests.get(url, params=params)
|
||||
response.raise_for_status()
|
||||
|
||||
# data validation
|
||||
if(response.status_code != 200):
|
||||
_LOGGER.error(f"problems during api calendar year access, please file an issue at {issueUrl} and mention @dm82m and add this: {response.text}")
|
||||
return []
|
||||
|
||||
response = response.json()
|
||||
if len(response) <= 0:
|
||||
_LOGGER.error(f"no calendars found, please file an issue at {issueUrl} and mention @dm82m")
|
||||
return []
|
||||
raise Exception(
|
||||
f"no calendars found, please file an issue at {issueUrl} and mention @dm82m"
|
||||
)
|
||||
|
||||
entries = []
|
||||
|
||||
@@ -61,15 +61,14 @@ class Source:
|
||||
# get available zones for calendar year
|
||||
url = f"{baseUrl}/zones"
|
||||
response = requests.get(url, params=params)
|
||||
response.raise_for_status()
|
||||
|
||||
# data validation
|
||||
if(response.status_code != 200):
|
||||
_LOGGER.error(f"problems during api zones for calendar year access, please file an issue at {issueUrl} and mention @dm82m and add this: {response.text}")
|
||||
return []
|
||||
|
||||
response = response.json()
|
||||
if len(response) <= 0:
|
||||
_LOGGER.warning(f"no zones found for calendar year {calendarYearName}, continuing with next calendar year ...")
|
||||
_LOGGER.warning(
|
||||
f"no zones found for calendar year {calendarYearName}, continuing with next calendar year ..."
|
||||
)
|
||||
continue
|
||||
|
||||
zoneId = 0
|
||||
@@ -80,7 +79,9 @@ class Source:
|
||||
zoneId = zone["id"]
|
||||
|
||||
if zoneId == 0:
|
||||
_LOGGER.warning(f"zone '{self._zone}' not found in calendar year {calendarYearName}, continuing with next calendar year ...")
|
||||
_LOGGER.warning(
|
||||
f"zone '{self._zone}' not found in calendar year {calendarYearName}, continuing with next calendar year ..."
|
||||
)
|
||||
continue
|
||||
|
||||
params = {
|
||||
@@ -92,19 +93,17 @@ class Source:
|
||||
# get ical data for year and zone
|
||||
url = f"{baseUrl}/v2/export"
|
||||
response = requests.get(url, params=params)
|
||||
|
||||
# data validation
|
||||
if(response.status_code != 200):
|
||||
_LOGGER.error(f"problems during api ical data for zone in calendar year, please file an issue at {issueUrl} and mention @dm82m and add this: {response.text}")
|
||||
return []
|
||||
response.raise_for_status()
|
||||
|
||||
dates = self._ics.convert(response.text)
|
||||
|
||||
for d in dates:
|
||||
entries.append(Collection(d[0], d[1]))
|
||||
|
||||
|
||||
# validate that we processed some data and show an error if not
|
||||
if len(entries) <= 0:
|
||||
_LOGGER.error(f"we were not able to get any waste entries for you! please file an issue at {issueUrl} and mention @dm82m and add this zone: '{self._zone}'")
|
||||
|
||||
_LOGGER.warning(
|
||||
f"we were not able to get any waste entries for you! please file an issue at {issueUrl} and mention @dm82m and add this zone: '{self._zone}'"
|
||||
)
|
||||
|
||||
return entries
|
||||
|
||||
@@ -4,23 +4,27 @@ from urllib.parse import urljoin
|
||||
import requests
|
||||
from bs4 import BeautifulSoup
|
||||
from waste_collection_schedule import Collection # type: ignore[attr-defined]
|
||||
from waste_collection_schedule.service.ICS import ICS # type: ignore[attr-defined]
|
||||
from waste_collection_schedule.service.ICS import ICS # type: ignore[attr-defined]
|
||||
|
||||
TITLE = 'Stadtservice Korneuburg'
|
||||
DESCRIPTION = 'Source for Stadtservice Korneuburg'
|
||||
URL = 'https://www.korneuburg.gv.at'
|
||||
TITLE = "Stadtservice Korneuburg"
|
||||
DESCRIPTION = "Source for Stadtservice Korneuburg"
|
||||
URL = "https://www.korneuburg.gv.at"
|
||||
TEST_CASES = {
|
||||
"Rathaus": {"street_name": "Hauptplatz", "street_number": 39}, # Teilgebiet 4
|
||||
"Rathaus using Teilgebiet": {"street_name": "SomeStreet", "street_number": "1A", "teilgebiet": "4"}, # Teilgebiet 4
|
||||
"Werft": {"street_name": "Am Hafen", "street_number": 6} # Teilgebiet 2
|
||||
"Rathaus": {"street_name": "Hauptplatz", "street_number": 39}, # Teilgebiet 4
|
||||
"Rathaus using Teilgebiet": {
|
||||
"street_name": "SomeStreet",
|
||||
"street_number": "1A",
|
||||
"teilgebiet": "4",
|
||||
}, # Teilgebiet 4
|
||||
"Werft": {"street_name": "Am Hafen", "street_number": 6}, # Teilgebiet 2
|
||||
}
|
||||
|
||||
# Mapping of teilgebiete to calendar urls
|
||||
WASTE_TYPE_URLS = {
|
||||
'1': ('Biomuell_3', 'Restmuell_3', 'Papier_2', 'Gelber_Sack_4'),
|
||||
'2': ('Biomuell_4', 'Restmuell_2', 'Papier_3', 'Gelber_Sack_1'),
|
||||
'3': ('Biomuell_1', 'Restmuell_1', 'Papier_1', 'Gelber_Sack_2'),
|
||||
'4': ('Biomuell_2', 'Restmuell', 'Papier', 'Gelber_Sack_3')
|
||||
"1": ("Biomuell_3", "Restmuell_3", "Papier_2", "Gelber_Sack_4"),
|
||||
"2": ("Biomuell_4", "Restmuell_2", "Papier_3", "Gelber_Sack_1"),
|
||||
"3": ("Biomuell_1", "Restmuell_1", "Papier_1", "Gelber_Sack_2"),
|
||||
"4": ("Biomuell_2", "Restmuell", "Papier", "Gelber_Sack_3"),
|
||||
}
|
||||
|
||||
|
||||
@@ -28,18 +32,15 @@ class Source:
|
||||
def __init__(self, street_name, street_number, teilgebiet=-1):
|
||||
self.street_name = street_name
|
||||
self.street_number = street_number
|
||||
self.teilgebiet = teilgebiet
|
||||
|
||||
self._region = None
|
||||
self._street_name_id = -1
|
||||
self._street_number_id = -1
|
||||
self._headers = {'User-Agent': 'Mozilla/5.0'}
|
||||
self._cookies = {'ris_cookie_setting': 'g7750'} # Accept Cookie Consent
|
||||
self._headers = {"User-Agent": "Mozilla/5.0"}
|
||||
self._cookies = {"ris_cookie_setting": "g7750"} # Accept Cookie Consent
|
||||
self._ics = ICS()
|
||||
|
||||
if 0 < int(teilgebiet) <= 4:
|
||||
self.region = str(teilgebiet)
|
||||
else:
|
||||
self.region = self.determine_region()
|
||||
|
||||
@staticmethod
|
||||
def extract_street_numbers(soup):
|
||||
|
||||
@@ -52,19 +53,31 @@ class Source:
|
||||
street_number_idx += 1
|
||||
|
||||
possible_numbers = json.loads(
|
||||
scripts[street_number_idx].string[19:].replace('\r\n', '').replace(', ]', ']').replace('\'', '"'))
|
||||
scripts[street_number_idx]
|
||||
.string[19:]
|
||||
.replace("\r\n", "")
|
||||
.replace(", ]", "]")
|
||||
.replace("'", '"')
|
||||
)
|
||||
|
||||
number_dict = dict()
|
||||
|
||||
for idx, street_id in enumerate(possible_numbers):
|
||||
number_dict[street_id[0]] = {e[1]: (e[0], e[2]) for _idx, e in enumerate(possible_numbers[idx][1])}
|
||||
number_dict[street_id[0]] = {
|
||||
e[1]: (e[0], e[2]) for _idx, e in enumerate(possible_numbers[idx][1])
|
||||
}
|
||||
|
||||
return number_dict
|
||||
|
||||
@staticmethod
|
||||
def extract_street_names(soup):
|
||||
street_selector = soup.find("select", {"id": "225991280_boxmuellkalenderstrassedd"}).findAll("option")
|
||||
available_streets = {street.string: int(street["value"]) for _idx, street in enumerate(street_selector)}
|
||||
street_selector = soup.find(
|
||||
"select", {"id": "225991280_boxmuellkalenderstrassedd"}
|
||||
).findAll("option")
|
||||
available_streets = {
|
||||
street.string: int(street["value"])
|
||||
for _idx, street in enumerate(street_selector)
|
||||
}
|
||||
|
||||
return available_streets
|
||||
|
||||
@@ -73,8 +86,8 @@ class Source:
|
||||
region = -1
|
||||
|
||||
for span in soup.findAll("span"):
|
||||
if span.parent.name == 'td' and "teilgebiet" in span.string.lower():
|
||||
region = span.string.split(' ')[1]
|
||||
if span.parent.name == "td" and "teilgebiet" in span.string.lower():
|
||||
region = span.string.split(" ")[1]
|
||||
break
|
||||
|
||||
return region
|
||||
@@ -82,6 +95,9 @@ class Source:
|
||||
def determine_region(self):
|
||||
"""finds the target region for the street and street number"""
|
||||
|
||||
if 0 < int(self.teilgebiet) <= 4:
|
||||
return str(self.teilgebiet)
|
||||
|
||||
# request address selection form
|
||||
url = urljoin(URL, "Rathaus/Buergerservice/Muellabfuhr")
|
||||
page = requests.get(url=url, headers=self._headers, cookies=self._cookies)
|
||||
@@ -94,44 +110,68 @@ class Source:
|
||||
street_found = self.street_name in available_streets.keys()
|
||||
|
||||
if not street_found:
|
||||
raise Exception(f"{self.street_name} not found. Please check back spelling with the official site: {url}")
|
||||
raise Exception(
|
||||
f"{self.street_name} not found. Please check back spelling with the official site: {url}"
|
||||
)
|
||||
|
||||
self._street_name_id = available_streets.get(self.street_name)
|
||||
|
||||
self._street_number_id, street_number_link = number_dict.get(
|
||||
available_streets.get(self.street_name)).get(str(self.street_number), (-1, 'not found'))
|
||||
available_streets.get(self.street_name)
|
||||
).get(str(self.street_number), (-1, "not found"))
|
||||
|
||||
if street_number_link == 'not found':
|
||||
raise Exception(f"{self.street_number} not found. Available numbers for {self.street_name} are\
|
||||
{list(number_dict.get(available_streets['Am Hafen']).keys())}")
|
||||
if street_number_link == "not found":
|
||||
raise Exception(
|
||||
f"{self.street_number} not found. Available numbers for {self.street_name} are\
|
||||
{list(number_dict.get(available_streets['Am Hafen']).keys())}"
|
||||
)
|
||||
|
||||
# add selection cookie
|
||||
self._cookies['riscms_muellkalender'] = str(f"{self._street_name_id}_{self._street_number_id}")
|
||||
self._cookies["riscms_muellkalender"] = str(
|
||||
f"{self._street_name_id}_{self._street_number_id}"
|
||||
)
|
||||
|
||||
# request overview with address selection to get the region
|
||||
url = urljoin(URL, "system/web/kalender.aspx")
|
||||
page = requests.get(url=url, headers=self._headers, cookies=self._cookies,
|
||||
params={"sprache": "1", "menuonr": "225991280", "typids": street_number_link})
|
||||
page = requests.get(
|
||||
url=url,
|
||||
headers=self._headers,
|
||||
cookies=self._cookies,
|
||||
params={
|
||||
"sprache": "1",
|
||||
"menuonr": "225991280",
|
||||
"typids": street_number_link,
|
||||
},
|
||||
)
|
||||
soup = BeautifulSoup(page.content, "html.parser")
|
||||
|
||||
region = self.extract_region(soup)
|
||||
|
||||
if region == -1:
|
||||
raise Exception(f"Region could not be found")
|
||||
raise Exception("Region could not be found")
|
||||
|
||||
return str(region)
|
||||
|
||||
def get_region_links(self):
|
||||
"""traverses the pages for different waste types and collects download links for the iCals"""
|
||||
|
||||
if self._region is None:
|
||||
self._region = self.determine_region()
|
||||
|
||||
# create waste type urls
|
||||
ical_urls = []
|
||||
urls = [urljoin(URL, u) for u in WASTE_TYPE_URLS.get(self.region)]
|
||||
urls = [urljoin(URL, u) for u in WASTE_TYPE_URLS.get(self._region)]
|
||||
|
||||
for u in urls:
|
||||
r = requests.get(url=u, headers=self._headers, cookies=self._cookies)
|
||||
soup = BeautifulSoup(r.content, "html.parser")
|
||||
download_link = soup.findAll("a", {"class": "piwik_download_tracker", "data-trackingtyp": "iCal/Kalender"})
|
||||
download_link = soup.findAll(
|
||||
"a",
|
||||
{
|
||||
"class": "piwik_download_tracker",
|
||||
"data-trackingtyp": "iCal/Kalender",
|
||||
},
|
||||
)
|
||||
if len(download_link):
|
||||
ical_urls.append(urljoin(URL, download_link[0].get("href")))
|
||||
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
from datetime import date
|
||||
|
||||
import requests
|
||||
from bs4 import BeautifulSoup
|
||||
from datetime import date
|
||||
from waste_collection_schedule import Collection # type: ignore[attr-defined]
|
||||
from waste_collection_schedule.service.ICS import ICS
|
||||
|
||||
@@ -9,16 +10,16 @@ DESCRIPTION = "Source for KWU Entsorgung, Germany"
|
||||
URL = "https://www.kwu-entsorgung.de/"
|
||||
TEST_CASES = {
|
||||
"Erkner": {"city": "Erkner", "street": "Heinrich-Heine-Straße", "number": "11"},
|
||||
"Bad Saarow": {"city": "Bad Saarow", "street": "Ahornallee", "number": "1"}
|
||||
"Bad Saarow": {"city": "Bad Saarow", "street": "Ahornallee", "number": "1"},
|
||||
}
|
||||
|
||||
HEADERS = {"user-agent": "Mozilla/5.0 (xxxx Windows NT 10.0; Win64; x64)"}
|
||||
ICON_MAP = {
|
||||
ICON_MAP = {
|
||||
"Restabfall": "mdi:trash-can-outline",
|
||||
"Gelber Sack" : "mdi:recycle",
|
||||
"Papiertonne" : "mdi:package-variant",
|
||||
"Gelber Sack": "mdi:recycle",
|
||||
"Papiertonne": "mdi:package-variant",
|
||||
"Biotonne": "mdi:food-apple-outline",
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
class Source:
|
||||
@@ -31,51 +32,64 @@ class Source:
|
||||
def fetch(self):
|
||||
session = requests.Session()
|
||||
|
||||
params = {
|
||||
"city": self._city,
|
||||
"street": self._street,
|
||||
"number": self._number,
|
||||
"direct": "true",
|
||||
}
|
||||
|
||||
r = requests.get("https://www.kwu-entsorgung.de/inc/wordpress/kal_objauswahl.php", headers=HEADERS)
|
||||
r = requests.get(
|
||||
"https://www.kwu-entsorgung.de/inc/wordpress/kal_objauswahl.php",
|
||||
headers=HEADERS,
|
||||
)
|
||||
parsed_html = BeautifulSoup(r.text, "html.parser")
|
||||
Orte = parsed_html.find_all('option')
|
||||
Orte = parsed_html.find_all("option")
|
||||
|
||||
for Ort in Orte:
|
||||
if self._city in Ort.text:
|
||||
OrtValue = Ort['value']
|
||||
OrtValue = Ort["value"]
|
||||
break
|
||||
|
||||
r = requests.get("https://www.kwu-entsorgung.de/inc/wordpress/kal_str2ort.php", params={"ort": OrtValue}, headers=HEADERS)
|
||||
r = requests.get(
|
||||
"https://www.kwu-entsorgung.de/inc/wordpress/kal_str2ort.php",
|
||||
params={"ort": OrtValue},
|
||||
headers=HEADERS,
|
||||
)
|
||||
parsed_html = BeautifulSoup(r.text, "html.parser")
|
||||
Strassen = parsed_html.find_all('option')
|
||||
Strassen = parsed_html.find_all("option")
|
||||
|
||||
for Strasse in Strassen:
|
||||
if self._street in Strasse.text:
|
||||
StrasseValue = Strasse['value']
|
||||
StrasseValue = Strasse["value"]
|
||||
break
|
||||
|
||||
r = requests.get("https://www.kwu-entsorgung.de/inc/wordpress/kal_str2ort.php", params={"ort": OrtValue, "strasse": StrasseValue}, headers=HEADERS)
|
||||
r = requests.get(
|
||||
"https://www.kwu-entsorgung.de/inc/wordpress/kal_str2ort.php",
|
||||
params={"ort": OrtValue, "strasse": StrasseValue},
|
||||
headers=HEADERS,
|
||||
)
|
||||
parsed_html = BeautifulSoup(r.text, "html.parser")
|
||||
Objekte = parsed_html.find_all('option')
|
||||
objects = parsed_html.find_all("option")
|
||||
|
||||
for Objekt in Objekte:
|
||||
if self._number in Objekt.text:
|
||||
ObjektValue = Objekt['value']
|
||||
for obj in objects:
|
||||
if self._number in obj.text:
|
||||
ObjektValue = obj["value"]
|
||||
break
|
||||
|
||||
r = requests.post("https://www.kwu-entsorgung.de/inc/wordpress/kal_uebersicht-2020.php", data={"ort": OrtValue, "strasse": StrasseValue, "objekt": ObjektValue, "jahr": date.today().year}, headers=HEADERS)
|
||||
r = requests.post(
|
||||
"https://www.kwu-entsorgung.de/inc/wordpress/kal_uebersicht-2020.php",
|
||||
data={
|
||||
"ort": OrtValue,
|
||||
"strasse": StrasseValue,
|
||||
"objekt": ObjektValue,
|
||||
"jahr": date.today().year,
|
||||
},
|
||||
headers=HEADERS,
|
||||
)
|
||||
|
||||
parsed_html = BeautifulSoup(r.text, "html.parser")
|
||||
Links = parsed_html.find_all('a')
|
||||
Links = parsed_html.find_all("a")
|
||||
|
||||
for Link in Links:
|
||||
if 'ICal herunterladen' in Link.text:
|
||||
ics_url = Link['href']
|
||||
if "ICal herunterladen" in Link.text:
|
||||
ics_url = Link["href"]
|
||||
|
||||
if ics_url is None:
|
||||
raise Exception(f"ics url not found")
|
||||
raise Exception("ics url not found")
|
||||
|
||||
# get ics file
|
||||
r = session.get(ics_url, headers=HEADERS)
|
||||
@@ -85,15 +99,19 @@ class Source:
|
||||
dates = self._ics.convert(r.text)
|
||||
|
||||
entries = []
|
||||
#for d in dates:
|
||||
# for d in dates:
|
||||
# entries.append(Collection(d[0], d[1]))
|
||||
#return entries
|
||||
# return entries
|
||||
for d in dates:
|
||||
# _LOGGER.error(d)
|
||||
waste_type = d[1].strip()
|
||||
next_pickup_date = d[0]
|
||||
|
||||
entries.append(Collection(date=next_pickup_date, t=waste_type, icon=ICON_MAP.get(waste_type,"mdi:trash-can")))
|
||||
|
||||
entries.append(
|
||||
Collection(
|
||||
date=next_pickup_date,
|
||||
t=waste_type,
|
||||
icon=ICON_MAP.get(waste_type, "mdi:trash-can"),
|
||||
)
|
||||
)
|
||||
|
||||
return entries
|
||||
|
||||
|
||||
@@ -1,17 +1,15 @@
|
||||
import logging
|
||||
from datetime import datetime
|
||||
|
||||
import requests
|
||||
from waste_collection_schedule import Collection # type: ignore[attr-defined]
|
||||
|
||||
from bs4 import BeautifulSoup
|
||||
from urllib.parse import urlsplit, parse_qs
|
||||
import logging
|
||||
from waste_collection_schedule import Collection # type: ignore[attr-defined]
|
||||
|
||||
TITLE = "Manchester City Council"
|
||||
DESCRIPTION = "Source for bin collection services for Manchester City Council, UK."
|
||||
URL = "https://www.manchester.gov.uk"
|
||||
TEST_CASES = {
|
||||
"domestic": {'uprn': '000077065560'},
|
||||
"domestic": {"uprn": "000077065560"},
|
||||
}
|
||||
|
||||
API_URL = "https://www.manchester.gov.uk/bincollections/"
|
||||
@@ -26,25 +24,15 @@ _LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Source:
|
||||
def __init__(
|
||||
self, uprn: int = None
|
||||
):
|
||||
def __init__(self, uprn: int):
|
||||
self._uprn = uprn
|
||||
if not self._uprn:
|
||||
_LOGGER.error(
|
||||
"uprn must be provided in config"
|
||||
)
|
||||
self._session = requests.Session()
|
||||
|
||||
def fetch(self):
|
||||
entries = []
|
||||
|
||||
r = requests.post(
|
||||
API_URL,
|
||||
data={
|
||||
"mcc_bin_dates_uprn": self._uprn,
|
||||
"mcc_bin_dates_submit": "Go"
|
||||
},
|
||||
data={"mcc_bin_dates_uprn": self._uprn, "mcc_bin_dates_submit": "Go"},
|
||||
)
|
||||
|
||||
soup = BeautifulSoup(r.text, features="html.parser")
|
||||
@@ -54,21 +42,18 @@ class Source:
|
||||
date = result.find("p", {"class": "caption"})
|
||||
dates = []
|
||||
dates.append(str(date.text).replace("Next collection ", "", 1))
|
||||
for date in result.find_all('li'):
|
||||
for date in result.find_all("li"):
|
||||
dates.append(date.text)
|
||||
img_tag = result.find("img")
|
||||
collection_type = img_tag["alt"]
|
||||
for current_date in dates:
|
||||
try:
|
||||
date = datetime.strptime(current_date, "%A %d %b %Y").date()
|
||||
entries.append(
|
||||
Collection(
|
||||
date=date,
|
||||
t=collection_type,
|
||||
icon=ICON_MAP[collection_type],
|
||||
)
|
||||
date = datetime.strptime(current_date, "%A %d %b %Y").date()
|
||||
entries.append(
|
||||
Collection(
|
||||
date=date,
|
||||
t=collection_type,
|
||||
icon=ICON_MAP[collection_type],
|
||||
)
|
||||
except ValueError:
|
||||
_LOGGER.error(f"Skipped {current_date} as it does not match time format")
|
||||
)
|
||||
|
||||
return entries
|
||||
|
||||
@@ -45,10 +45,9 @@ class Source:
|
||||
addressSearchApiResults["Items"] is None
|
||||
or len(addressSearchApiResults["Items"]) < 1
|
||||
):
|
||||
_LOGGER.error(
|
||||
raise Exception(
|
||||
f"Address search for '{self._street_address}' returned no results. Check your address on https://www.melton.vic.gov.au/My-Area"
|
||||
)
|
||||
return []
|
||||
|
||||
addressSearchTopHit = addressSearchApiResults["Items"][0]
|
||||
_LOGGER.debug("Address search top hit: %s", addressSearchTopHit)
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import logging
|
||||
from datetime import datetime
|
||||
import re
|
||||
from datetime import datetime
|
||||
|
||||
import requests
|
||||
from bs4 import BeautifulSoup
|
||||
@@ -45,10 +45,9 @@ class Source:
|
||||
addressSearchApiResults["Items"] is None
|
||||
or len(addressSearchApiResults["Items"]) < 1
|
||||
):
|
||||
_LOGGER.error(
|
||||
raise Exception(
|
||||
f"Address search for '{self._street_address}' returned no results. Check your address on https://www.mrsc.vic.gov.au/Live-Work/Bins-Rubbish-Recycling/Bins-and-collection-days/Bin-collection-days"
|
||||
)
|
||||
return []
|
||||
|
||||
addressSearchTopHit = addressSearchApiResults["Items"][0]
|
||||
_LOGGER.debug("Address search top hit: %s", addressSearchTopHit)
|
||||
@@ -72,10 +71,12 @@ class Source:
|
||||
waste_type = article.h3.string
|
||||
icon = ICON_MAP.get(waste_type, "mdi:trash-can")
|
||||
next_pickup = article.find(class_="next-service").string.strip()
|
||||
if re.match("[^\s]* \d{1,2}\/\d{1,2}\/\d{4}", next_pickup):
|
||||
if re.match(r"[^\s]* \d{1,2}\/\d{1,2}\/\d{4}", next_pickup):
|
||||
next_pickup_date = datetime.strptime(
|
||||
next_pickup.split(sep=" ")[1], "%d/%m/%Y"
|
||||
).date()
|
||||
entries.append(Collection(date=next_pickup_date, t=waste_type, icon=icon))
|
||||
entries.append(
|
||||
Collection(date=next_pickup_date, t=waste_type, icon=icon)
|
||||
)
|
||||
|
||||
return entries
|
||||
|
||||
@@ -1,32 +1,31 @@
|
||||
import logging
|
||||
from datetime import datetime
|
||||
import re
|
||||
from datetime import datetime
|
||||
import requests
|
||||
from waste_collection_schedule import Collection # type: ignore[attr-defined]
|
||||
|
||||
# These lines are needed to suppress the InsecureRequestWarning resulting from the POST verify=False option
|
||||
# With verify=True the POST fails due to a SSLCertVerificationError.
|
||||
import urllib3
|
||||
from waste_collection_schedule import Collection
|
||||
|
||||
urllib3.disable_warnings()
|
||||
# The following links may provide a better way of dealing with this, as using verify=False is not ideal:
|
||||
# https://urllib3.readthedocs.io/en/1.26.x/advanced-usage.html#ssl-warnings
|
||||
# https://urllib3.readthedocs.io/en/1.26.x/user-guide.html#ssl
|
||||
import urllib3
|
||||
urllib3.disable_warnings()
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
TITLE = "Newcastle City Council"
|
||||
DESCRIPTION = "Source for waste collection services for Newcastle City Council"
|
||||
URL = "https://community.newcastle.gov.uk"
|
||||
TEST_CASES = {
|
||||
"Test_001": {"uprn": "004510053797"},
|
||||
"Test_002": {"uprn": 4510053797}
|
||||
}
|
||||
TEST_CASES = {"Test_001": {"uprn": "004510053797"}, "Test_002": {"uprn": 4510053797}}
|
||||
|
||||
|
||||
API_URL = "https://community.newcastle.gov.uk/my-neighbourhood/ajax/getBinsNew.php"
|
||||
REGEX = "<strong>(Green|Blue|Brown) [bB]in \\((Domestic|Recycling|Garden)( Waste)?\\) details: <\\/strong><br\\/>" \
|
||||
"collection day : [a-zA-Z]*day<br\\/>" \
|
||||
"Next collection : ([0-9]{2}-[A-Za-z]+-[0-9]{4})"
|
||||
REGEX = (
|
||||
"<strong>(Green|Blue|Brown) [bB]in \\((Domestic|Recycling|Garden)( Waste)?\\) details: <\\/strong><br\\/>"
|
||||
"collection day : [a-zA-Z]*day<br\\/>"
|
||||
"Next collection : ([0-9]{2}-[A-Za-z]+-[0-9]{4})"
|
||||
)
|
||||
ICON_MAP = {
|
||||
"DOMESTIC": "mdi:trash-can",
|
||||
"RECYCLING": "mdi:recycle",
|
||||
@@ -35,13 +34,8 @@ ICON_MAP = {
|
||||
|
||||
|
||||
class Source:
|
||||
def __init__(self, uprn=None):
|
||||
def __init__(self, uprn):
|
||||
self._uprn = str(uprn).zfill(12)
|
||||
if not self._uprn:
|
||||
_LOGGER.error(
|
||||
"uprn must be provided in config"
|
||||
)
|
||||
self._uprn = self._uprn.zfill(12)
|
||||
self._session = requests.Session()
|
||||
|
||||
def fetch(self):
|
||||
@@ -54,7 +48,7 @@ class Source:
|
||||
collection_date = collection[3]
|
||||
entries.append(
|
||||
Collection(
|
||||
date=datetime.strptime(collection_date, '%d-%b-%Y').date(),
|
||||
date=datetime.strptime(collection_date, "%d-%b-%Y").date(),
|
||||
t=collection_type,
|
||||
icon=ICON_MAP.get(collection_type.upper()),
|
||||
)
|
||||
|
||||
@@ -9,9 +9,7 @@ from waste_collection_schedule import Collection # type: ignore[attr-defined]
|
||||
TITLE = "Nillumbik Shire Council"
|
||||
DESCRIPTION = "Source for Nillumbik Shire Council rubbish collection."
|
||||
URL = "https://www.nillumbik.vic.gov.au"
|
||||
TEST_CASES = {
|
||||
"Test": {"street_address": "11 Sunnyside Crescent, WATTLE GLEN, 3096"}
|
||||
}
|
||||
TEST_CASES = {"Test": {"street_address": "11 Sunnyside Crescent, WATTLE GLEN, 3096"}}
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -29,7 +27,9 @@ class Source:
|
||||
def fetch(self):
|
||||
session = requests.Session()
|
||||
|
||||
response = session.get("https://www.nillumbik.vic.gov.au/Residents/Waste-and-recycling/Bin-collection/Check-my-bin-day")
|
||||
response = session.get(
|
||||
"https://www.nillumbik.vic.gov.au/Residents/Waste-and-recycling/Bin-collection/Check-my-bin-day"
|
||||
)
|
||||
response.raise_for_status()
|
||||
|
||||
response = session.get(
|
||||
@@ -42,10 +42,9 @@ class Source:
|
||||
addressSearchApiResults["Items"] is None
|
||||
or len(addressSearchApiResults["Items"]) < 1
|
||||
):
|
||||
_LOGGER.error(
|
||||
raise Exception(
|
||||
f"Address search for '{self._street_address}' returned no results. Check your address on https://www.nillumbik.vic.gov.au/Residents/Waste-and-recycling/Bin-collection/Check-my-bin-day"
|
||||
)
|
||||
return []
|
||||
|
||||
addressSearchTopHit = addressSearchApiResults["Items"][0]
|
||||
_LOGGER.debug("Address search top hit: %s", addressSearchTopHit)
|
||||
|
||||
@@ -51,20 +51,17 @@ class Source:
|
||||
"Authorization": "",
|
||||
}
|
||||
r = requests.get(f"{url}/access-token", headers=headers)
|
||||
r.raise_for_status()
|
||||
headers["Authorization"] = r.json()["accessToken"]
|
||||
|
||||
params = {"q": self._postcode}
|
||||
r = requests.get(f"{url}/zipcodes", params=params, headers=headers)
|
||||
if r.status_code != 200:
|
||||
_LOGGER.error("Get zip code failed")
|
||||
return []
|
||||
r.raise_for_status()
|
||||
zipcodeId = r.json()["items"][0]["id"]
|
||||
|
||||
params = {"q": self._street, "zipcodes": zipcodeId}
|
||||
r = requests.post(f"{url}/streets", params=params, headers=headers)
|
||||
if r.status_code != 200:
|
||||
_LOGGER.error("Get street id failed")
|
||||
return []
|
||||
r.raise_for_status()
|
||||
|
||||
streetId = None
|
||||
for item in r.json()["items"]:
|
||||
@@ -85,9 +82,7 @@ class Source:
|
||||
# "size":100,
|
||||
}
|
||||
r = requests.get(f"{url}/collections", params=params, headers=headers)
|
||||
if r.status_code != 200:
|
||||
_LOGGER.error("Get data failed")
|
||||
return []
|
||||
r.raise_for_status()
|
||||
|
||||
entries = []
|
||||
for item in r.json()["items"]:
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
from datetime import datetime
|
||||
import logging
|
||||
|
||||
import requests
|
||||
from waste_collection_schedule import Collection # type: ignore[attr-defined]
|
||||
@@ -9,20 +8,16 @@ TITLE = "Südbrandenburgischer Abfallzweckverband"
|
||||
DESCRIPTION = "SBAZV Brandenburg, Deutschland"
|
||||
URL = "https://www.sbazv.de"
|
||||
TEST_CASES = {
|
||||
"Wildau": {
|
||||
"city": "wildau",
|
||||
"district": "Wildau",
|
||||
"street": "Miersdorfer Str."
|
||||
}
|
||||
"Wildau": {"city": "wildau", "district": "Wildau", "street": "Miersdorfer Str."}
|
||||
}
|
||||
|
||||
ICON_MAP = {
|
||||
"Restmülltonnen": "mdi:trash-can",
|
||||
"Laubsäcke" : "mdi:leaf",
|
||||
"Gelbe Säcke" : "mdi:sack",
|
||||
"Papiertonnen" : "mdi:package-variant",
|
||||
"Laubsäcke": "mdi:leaf",
|
||||
"Gelbe Säcke": "mdi:sack",
|
||||
"Papiertonnen": "mdi:package-variant",
|
||||
"Weihnachtsbäume": "mdi:pine-tree",
|
||||
}
|
||||
}
|
||||
|
||||
# _LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -59,17 +54,24 @@ class Source:
|
||||
|
||||
# get ics file
|
||||
# https://www.sbazv.de/entsorgungstermine/klein.ics?city=Wildau&district=Wildau&street=Miersdorfer+Str.
|
||||
r = requests.get("https://www.sbazv.de/entsorgungstermine/klein.ics", params=args)
|
||||
r = requests.get(
|
||||
"https://www.sbazv.de/entsorgungstermine/klein.ics", params=args
|
||||
)
|
||||
|
||||
# parse ics file
|
||||
dates = self._ics.convert(r.text)
|
||||
|
||||
entries = []
|
||||
for d in dates:
|
||||
# _LOGGER.error(d)
|
||||
waste_type = d[1].strip()
|
||||
next_pickup_date = d[0]
|
||||
|
||||
entries.append(Collection(date=next_pickup_date, t=waste_type, icon=ICON_MAP.get(waste_type,"mdi:trash-can")))
|
||||
|
||||
entries.append(
|
||||
Collection(
|
||||
date=next_pickup_date,
|
||||
t=waste_type,
|
||||
icon=ICON_MAP.get(waste_type, "mdi:trash-can"),
|
||||
)
|
||||
)
|
||||
|
||||
return entries
|
||||
|
||||
@@ -53,8 +53,7 @@ class Source:
|
||||
def fetch(self):
|
||||
city = CITIES.get(self._city)
|
||||
if city is None:
|
||||
_LOGGER.error(f"city not found {self._city}")
|
||||
return []
|
||||
raise Exception(f"city not found {self._city}")
|
||||
|
||||
args = city
|
||||
args["searchFor"] = self._street
|
||||
@@ -64,13 +63,13 @@ class Source:
|
||||
params=args,
|
||||
headers=HEADERS,
|
||||
)
|
||||
r.raise_for_status()
|
||||
streets = {
|
||||
e["name"].strip(): e["id"] for (e) in json.loads(extractJson(r.text))
|
||||
}
|
||||
|
||||
if self._street not in streets:
|
||||
_LOGGER.error(f"street not found {self._street}")
|
||||
return []
|
||||
raise Exception(f"street not found {self._street}")
|
||||
|
||||
args = {
|
||||
"licenseKey": city["licenseKey"],
|
||||
@@ -89,6 +88,7 @@ class Source:
|
||||
params=args,
|
||||
headers=HEADERS,
|
||||
)
|
||||
r.raise_for_status()
|
||||
data = json.loads(extractJson(r.text))
|
||||
|
||||
for ts, pickups in data["pickups"].items():
|
||||
|
||||
@@ -2,7 +2,7 @@ import logging
|
||||
from datetime import datetime
|
||||
|
||||
import requests
|
||||
from waste_collection_schedule import Collection
|
||||
from waste_collection_schedule import Collection # type: ignore[attr-defined]
|
||||
|
||||
TITLE = "SRV Återvinning"
|
||||
DESCRIPTION = "Source for SRV återvinning AB, Sweden"
|
||||
@@ -27,12 +27,10 @@ class Source:
|
||||
"query": self._address,
|
||||
"city": "",
|
||||
}
|
||||
url = "https://www.srvatervinning.se/rest-api/srv-slamsok-rest-new/search"
|
||||
r = requests.get(url, params)
|
||||
|
||||
if r.status_code != 200:
|
||||
_LOGGER.error("Error querying calendar data")
|
||||
return []
|
||||
r = requests.get(
|
||||
"https://www.srvatervinning.se/rest-api/srv-slamsok-rest-new/search", params
|
||||
)
|
||||
r.raise_for_status()
|
||||
|
||||
data = r.json()
|
||||
|
||||
|
||||
@@ -31,17 +31,14 @@ class Source:
|
||||
|
||||
data = json.loads(r.text)
|
||||
if len(data["results"]) == 0:
|
||||
_LOGGER.error(f"street not found: {self._street}")
|
||||
return []
|
||||
raise Exception(f"street not found: {self._street}")
|
||||
street_entry = data["results"].get(self._street)
|
||||
if street_entry is None:
|
||||
_LOGGER.error(f"street not found: {self._street}")
|
||||
return []
|
||||
raise Exception(f"street not found: {self._street}")
|
||||
|
||||
id = street_entry.get(str(self._house_number))
|
||||
if id is None:
|
||||
_LOGGER.error(f"house_number not found: {self._house_number}")
|
||||
return []
|
||||
raise Exception(f"house_number not found: {self._house_number}")
|
||||
|
||||
# get ics file
|
||||
params = {
|
||||
|
||||
@@ -3,7 +3,7 @@ import logging
|
||||
|
||||
import requests
|
||||
from bs4 import BeautifulSoup
|
||||
from waste_collection_schedule import Collection
|
||||
from waste_collection_schedule import Collection # type: ignore[attr-defined]
|
||||
from waste_collection_schedule.service.ICS import ICS
|
||||
|
||||
TITLE = "StadtService Brühl"
|
||||
@@ -34,12 +34,8 @@ class Source:
|
||||
r = requests.post(
|
||||
"https://services.stadtservice-bruehl.de/abfallkalender/", data=data
|
||||
)
|
||||
r.raise_for_status()
|
||||
|
||||
if r.status_code != 200:
|
||||
_LOGGER.error("Error querying calender data")
|
||||
return []
|
||||
|
||||
# print(r.text)
|
||||
soup = BeautifulSoup(r.text, "html.parser")
|
||||
|
||||
for tag in soup.find_all("input", type="hidden"):
|
||||
@@ -49,8 +45,7 @@ class Source:
|
||||
post_district = tag["value"]
|
||||
|
||||
if post_district == "":
|
||||
_LOGGER.error("Unable to get district")
|
||||
return []
|
||||
raise Exception("Unable to get district")
|
||||
|
||||
# print(post_district);
|
||||
# Get ICAL
|
||||
@@ -74,10 +69,7 @@ class Source:
|
||||
"https://services.stadtservice-bruehl.de/abfallkalender/individuellen-abfuhrkalender-herunterladen/",
|
||||
data=data,
|
||||
)
|
||||
|
||||
if r.status_code != 200:
|
||||
_LOGGER.error("Error querying calendar data")
|
||||
return []
|
||||
r.raise_for_status()
|
||||
|
||||
dates = self._ics.convert(r.text)
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import logging
|
||||
from datetime import datetime
|
||||
import re
|
||||
from datetime import datetime
|
||||
|
||||
import requests
|
||||
from bs4 import BeautifulSoup
|
||||
@@ -45,10 +45,9 @@ class Source:
|
||||
addressSearchApiResults["Items"] is None
|
||||
or len(addressSearchApiResults["Items"]) < 1
|
||||
):
|
||||
_LOGGER.error(
|
||||
raise Exception(
|
||||
f"Address search for '{self._street_address}' returned no results. Check your address on https://www.stonnington.vic.gov.au/Services/Waste-and-recycling"
|
||||
)
|
||||
return []
|
||||
|
||||
addressSearchTopHit = addressSearchApiResults["Items"][0]
|
||||
_LOGGER.debug("Address search top hit: %s", addressSearchTopHit)
|
||||
@@ -72,10 +71,12 @@ class Source:
|
||||
waste_type = article.h3.string
|
||||
icon = ICON_MAP.get(waste_type, "mdi:trash-can")
|
||||
next_pickup = article.find(class_="next-service").string.strip()
|
||||
if re.match("[^\s]* \d{1,2}\/\d{1,2}\/\d{4}", next_pickup):
|
||||
if re.match(r"[^\s]* \d{1,2}\/\d{1,2}\/\d{4}", next_pickup):
|
||||
next_pickup_date = datetime.strptime(
|
||||
next_pickup.split(sep=" ")[1], "%d/%m/%Y"
|
||||
).date()
|
||||
entries.append(Collection(date=next_pickup_date, t=waste_type, icon=icon))
|
||||
entries.append(
|
||||
Collection(date=next_pickup_date, t=waste_type, icon=icon)
|
||||
)
|
||||
|
||||
return entries
|
||||
|
||||
@@ -37,8 +37,7 @@ class Source:
|
||||
|
||||
# check if suburb exists
|
||||
if self._suburb not in suburbs:
|
||||
_LOGGER.error(f"suburb not found: {self._suburb}")
|
||||
return []
|
||||
raise Exception(f"suburb not found: {self._suburb}")
|
||||
suburbKey = suburbs[self._suburb]
|
||||
|
||||
# get list of streets for selected suburb
|
||||
@@ -51,14 +50,14 @@ class Source:
|
||||
|
||||
# check if street exists
|
||||
if self._street not in streets:
|
||||
_LOGGER.error(f"street not found: {self._street}")
|
||||
return []
|
||||
raise Exception(f"street not found: {self._street}")
|
||||
streetKey = streets[self._street]
|
||||
|
||||
# get list of house numbers for selected street
|
||||
params = {"streetkey": streetKey, "suburbKey": suburbKey}
|
||||
r = requests.get(
|
||||
f"{self._url}/properties/GetPropertiesByStreetAndSuburbKey", params=params,
|
||||
f"{self._url}/properties/GetPropertiesByStreetAndSuburbKey",
|
||||
params=params,
|
||||
)
|
||||
data = json.loads(r.text)
|
||||
|
||||
@@ -70,8 +69,7 @@ class Source:
|
||||
|
||||
# check if house number exists
|
||||
if self._houseNo not in houseNos:
|
||||
_LOGGER.error(f"house number not found: {self._houseNo}")
|
||||
return []
|
||||
raise Exception(f"house number not found: {self._houseNo}")
|
||||
propertyKey = houseNos[self._houseNo]
|
||||
|
||||
# get collection schedule
|
||||
|
||||
@@ -1,18 +1,24 @@
|
||||
import logging
|
||||
from datetime import datetime
|
||||
|
||||
import requests
|
||||
from bs4 import BeautifulSoup
|
||||
from waste_collection_schedule import Collection # type: ignore[attr-defined]
|
||||
from datetime import datetime
|
||||
|
||||
TITLE = "Wyndham City Council, Melbourne"
|
||||
DESCRIPTION = "Source for Wyndham City Council rubbish collection."
|
||||
URL = "https://wyndham.vic.gov.au"
|
||||
TEST_CASES = {
|
||||
"Truganina South Primary School": {"street_address": "3-19 Parkvista Drive TRUGANINA 3029"},
|
||||
"Truganina South Primary School": {
|
||||
"street_address": "3-19 Parkvista Drive TRUGANINA 3029"
|
||||
},
|
||||
"Westbourne Grammar School": {"street_address": "300 Sayers Road TRUGANINA 3029"},
|
||||
"Werribee Mercy Hospital": {"street_address": "300-310 Princes Highway WERRIBEE 3030"},
|
||||
"Wyndham Park Primary School": {"street_address": "59-77 Kookaburra Avenue WERRIBEE 3030"},
|
||||
"Werribee Mercy Hospital": {
|
||||
"street_address": "300-310 Princes Highway WERRIBEE 3030"
|
||||
},
|
||||
"Wyndham Park Primary School": {
|
||||
"street_address": "59-77 Kookaburra Avenue WERRIBEE 3030"
|
||||
},
|
||||
}
|
||||
|
||||
API_URL = "https://digital.wyndham.vic.gov.au/myWyndham/"
|
||||
@@ -33,47 +39,52 @@ class Source:
|
||||
session = requests.Session()
|
||||
response = session.get(API_URL)
|
||||
response.raise_for_status()
|
||||
response = session.get("https://digital.wyndham.vic.gov.au/myWyndham/ajax/address-search-suggestions.asp?",
|
||||
params=dict(ASEARCH=self._street_address),
|
||||
)
|
||||
response = session.get(
|
||||
"https://digital.wyndham.vic.gov.au/myWyndham/ajax/address-search-suggestions.asp?",
|
||||
params=dict(ASEARCH=self._street_address),
|
||||
)
|
||||
response.raise_for_status()
|
||||
html = response.content
|
||||
property_address = BeautifulSoup(html, 'html.parser').find("li").get_text()
|
||||
property_address = BeautifulSoup(html, "html.parser").find("li").get_text()
|
||||
_LOGGER.debug("Fetched Property Address: %s", property_address)
|
||||
if property_address == 'No match found.':
|
||||
_LOGGER.error(
|
||||
f"Address search for '{self._street_address}' returned no results. Check your address on "
|
||||
f"https://digital.wyndham.vic.gov.au/myWyndham/ "
|
||||
)
|
||||
if property_address.upper() == self._street_address.upper():
|
||||
property_number = BeautifulSoup(html, 'html.parser').find('span').get_text()
|
||||
_LOGGER.debug("Fetched Property Number: %s", property_number)
|
||||
response = session.get(
|
||||
"https://digital.wyndham.vic.gov.au/myWyndham/init-map-data.asp",
|
||||
params=dict(propnum=property_number, radius="1000", mapfeatures="23,37,22,33,35"),
|
||||
)
|
||||
response.raise_for_status()
|
||||
wasteApiResult = response.content
|
||||
soup = BeautifulSoup(wasteApiResult, 'html.parser')
|
||||
entries = []
|
||||
|
||||
for article in soup.findAll("div", {"class": "waste"}):
|
||||
if article.get_text().startswith('Next'):
|
||||
waste_type = article.get_text().strip().split(':')[0][5:].replace(' Collection', '')
|
||||
_LOGGER.debug("Waste Type: %s", waste_type)
|
||||
icon = ICON_MAP.get(waste_type, 'mdi:trash-can')
|
||||
_LOGGER.debug("Icon: %s", icon)
|
||||
next_pickup_date = datetime.strptime(article.get_text().split(':')[1].strip(), "%A, %d %B %Y").date()
|
||||
_LOGGER.debug("Next Pickup Date: %s", next_pickup_date)
|
||||
entries.append(
|
||||
Collection(date=next_pickup_date, t=waste_type, icon=icon)
|
||||
)
|
||||
return entries
|
||||
else:
|
||||
_LOGGER.error(
|
||||
if (
|
||||
property_address == "No match found."
|
||||
or property_address.upper() != self._street_address.upper()
|
||||
):
|
||||
raise Exception(
|
||||
f"Address search for '{self._street_address}' returned no results. Check your address on "
|
||||
f"https://digital.wyndham.vic.gov.au/myWyndham/ "
|
||||
)
|
||||
|
||||
property_number = BeautifulSoup(html, "html.parser").find("span").get_text()
|
||||
_LOGGER.debug("Fetched Property Number: %s", property_number)
|
||||
response = session.get(
|
||||
"https://digital.wyndham.vic.gov.au/myWyndham/init-map-data.asp",
|
||||
params=dict(
|
||||
propnum=property_number, radius="1000", mapfeatures="23,37,22,33,35"
|
||||
),
|
||||
)
|
||||
response.raise_for_status()
|
||||
wasteApiResult = response.content
|
||||
soup = BeautifulSoup(wasteApiResult, "html.parser")
|
||||
entries = []
|
||||
|
||||
|
||||
for article in soup.findAll("div", {"class": "waste"}):
|
||||
if article.get_text().startswith("Next"):
|
||||
waste_type = (
|
||||
article.get_text()
|
||||
.strip()
|
||||
.split(":")[0][5:]
|
||||
.replace(" Collection", "")
|
||||
)
|
||||
_LOGGER.debug("Waste Type: %s", waste_type)
|
||||
icon = ICON_MAP.get(waste_type, "mdi:trash-can")
|
||||
_LOGGER.debug("Icon: %s", icon)
|
||||
next_pickup_date = datetime.strptime(
|
||||
article.get_text().split(":")[1].strip(), "%A, %d %B %Y"
|
||||
).date()
|
||||
_LOGGER.debug("Next Pickup Date: %s", next_pickup_date)
|
||||
entries.append(
|
||||
Collection(date=next_pickup_date, t=waste_type, icon=icon)
|
||||
)
|
||||
return entries
|
||||
|
||||
@@ -25,6 +25,12 @@ def main():
|
||||
parser.add_argument(
|
||||
"-i", "--icon", action="store_true", help="Show waste type icon"
|
||||
)
|
||||
parser.add_argument(
|
||||
"-t",
|
||||
"--traceback",
|
||||
action="store_true",
|
||||
help="Print exception information and stack trace",
|
||||
)
|
||||
args = parser.parse_args()
|
||||
|
||||
# read secrets.yaml
|
||||
@@ -73,10 +79,18 @@ def main():
|
||||
replace_secret(secrets, tc)
|
||||
|
||||
# create source
|
||||
source = module.Source(**tc)
|
||||
try:
|
||||
source = module.Source(**tc)
|
||||
result = source.fetch()
|
||||
print(f" found {len(result)} entries for {name}")
|
||||
count = len(result)
|
||||
if count > 0:
|
||||
print(
|
||||
f" found {bcolors.OKGREEN}{count}{bcolors.ENDC} entries for {name}"
|
||||
)
|
||||
else:
|
||||
print(
|
||||
f" found {bcolors.WARNING}0{bcolors.ENDC} entries for {name}"
|
||||
)
|
||||
|
||||
# test if source is returning the correct date format
|
||||
if (
|
||||
@@ -88,7 +102,7 @@ def main():
|
||||
> 0
|
||||
):
|
||||
print(
|
||||
" ERROR: source returns invalid date format (datetime.datetime instead of datetime.date?)"
|
||||
f"{bcolors.FAIL} ERROR: source returns invalid date format (datetime.datetime instead of datetime.date?){bcolors.ENDC}"
|
||||
)
|
||||
|
||||
if args.list:
|
||||
@@ -97,8 +111,10 @@ def main():
|
||||
print(f" {x.date.isoformat()}: {x.type}{icon_str}")
|
||||
except KeyboardInterrupt:
|
||||
exit()
|
||||
except Exception:
|
||||
print(traceback.format_exc())
|
||||
except Exception as exc:
|
||||
print(f" {name} {bcolors.FAIL}failed{bcolors.ENDC}: {exc}")
|
||||
if args.traceback:
|
||||
print(indent(traceback.format_exc(), 4))
|
||||
|
||||
|
||||
def replace_secret(secrets, d):
|
||||
@@ -116,5 +132,22 @@ def replace_secret(secrets, d):
|
||||
print(f"identifier '{id}' not found in {SECRET_FILENAME}")
|
||||
|
||||
|
||||
def indent(s, count):
|
||||
indent = " " * count
|
||||
return "\n".join([indent + line for line in s.split("\n")])
|
||||
|
||||
|
||||
class bcolors:
|
||||
HEADER = "\033[95m"
|
||||
OKBLUE = "\033[94m"
|
||||
OKCYAN = "\033[96m"
|
||||
OKGREEN = "\033[92m"
|
||||
WARNING = "\033[93m"
|
||||
FAIL = "\033[91m"
|
||||
ENDC = "\033[0m"
|
||||
BOLD = "\033[1m"
|
||||
UNDERLINE = "\033[4m"
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
||||
@@ -218,6 +218,7 @@ The script supports the following options:
|
||||
| `-s` | SOURCE | [Source name](https://github.com/mampfes/hacs_waste_collection_schedule#source-configuration-variables) (source file name without ending `.py`) |
|
||||
| `-l` | - | List all found dates. |
|
||||
| `-i` | - | Add icon name to output. Only effective together with `-l`. |
|
||||
| `-t` | - | Show extended exception info and stack trace. |
|
||||
|
||||
For debugging purposes of a single source, it is recommended to use the `-s SOURCE` option. If used without any arguments provided, the script tests every script in the `custom_components/waste_collection_schedule/waste_collection_schedule/source` folder and prints the number of found entries for every test case.
|
||||
|
||||
|
||||
@@ -421,12 +421,15 @@ waste_collection_schedule:
|
||||
|
||||
### EAW Rheingau Taunus
|
||||
|
||||
1. Find your ICS link via the <eaw_rheingau-taunus.de> web page
|
||||
2. Remove the cHash attribute
|
||||
|
||||
```yaml
|
||||
waste_collection_schedule:
|
||||
sources:
|
||||
- name: ics
|
||||
args:
|
||||
url: "https://www.eaw-rheingau-taunus.de/abfallkalender/calendar.ics?streetid=1429"
|
||||
url: "https://www.eaw-rheingau-taunus.de/abfallsammlung/abfuhrtermine/feed.ics?tx_vierwdeaw_garbagecalendarics%5Baction%5D=ics&tx_vierwdeaw_garbagecalendarics%5Bcontroller%5D=GarbageCalendar&tx_vierwdeaw_garbagecalendarics%5Bstreet%5D=38"
|
||||
split_at: ","
|
||||
```
|
||||
|
||||
|
||||
Reference in New Issue
Block a user