mirror of
https://github.com/sascha-hemi/hacs_waste_collection_schedule.git
synced 2026-03-21 04:06:03 +01:00
fix highland_gov_uk (breaks exsiting configurations)
Breaks existing configuration as the new implementeation needs a UPRN While the old needed a different internal ID
This commit is contained in:
@@ -1,86 +1,111 @@
|
||||
import datetime
|
||||
import time
|
||||
|
||||
import requests
|
||||
from waste_collection_schedule import Collection # type: ignore[attr-defined]
|
||||
from bs4 import BeautifulSoup, Tag
|
||||
import datetime
|
||||
|
||||
TITLE = "Highland"
|
||||
DESCRIPTION = "Source for Highland."
|
||||
URL = "https://www.highland.gov.uk/"
|
||||
TEST_CASES = {
|
||||
"Allangrange Mains Road, Black Isle": {"record_id": 2004443},
|
||||
"Kishorn, Wester Ross": {"record_id": "2005124"},
|
||||
"Quarry Lane, Tain": {"record_id": "2005420"},
|
||||
"Allangrange Mains Road, Black Isle": {"uprn": 130108578, "predict": True},
|
||||
"Kishorn, Wester Ross": {"uprn": "130066519", "predict": True},
|
||||
"Quarry Lane, Tain": {"uprn": "130007199"},
|
||||
}
|
||||
|
||||
|
||||
ICON_MAP = {
|
||||
"refuse": "mdi:trash-can",
|
||||
"recycling": "mdi:recycle",
|
||||
"garden waste": "mdi:leaf",
|
||||
"recycle": "mdi:recycle",
|
||||
"garden": "mdi:leaf",
|
||||
"food": "mdi:food",
|
||||
"containers": "mdi:package",
|
||||
}
|
||||
|
||||
|
||||
API_URL = "https://www.highland.gov.uk/directory_record"
|
||||
SESSION_URL = "https://highland-self.achieveservice.com/authapi/isauthenticated?uri=https%3A%2F%2Fhighland-self.achieveservice.com%2Fen%2Fservice%2FCheck_your_household_bin_collection_days&hostname=highland-self.achieveservice.com&withCredentials=true"
|
||||
|
||||
API_URL = "https://highland-self.achieveservice.com/apibroker/runLookup"
|
||||
|
||||
|
||||
class Source:
|
||||
def __init__(self, record_id: str | int):
|
||||
self._record_id: str = str(record_id)
|
||||
def __init__(self, uprn: str | int, predict: bool = False):
|
||||
self._uprn: str = str(uprn)
|
||||
self._predict: bool = predict
|
||||
|
||||
def fetch(self):
|
||||
today = datetime.datetime.now().date()
|
||||
|
||||
r = requests.get(f"{API_URL}/{self._record_id}/")
|
||||
def fetch(self) -> list[Collection]:
|
||||
data = {
|
||||
"formValues": {"Your address": {"propertyuprn": {"value": self._uprn}}},
|
||||
}
|
||||
headers = {
|
||||
"Content-Type": "application/json",
|
||||
"Accept": "application/json",
|
||||
"User-Agent": "Mozilla/5.0",
|
||||
"X-Requested-With": "XMLHttpRequest",
|
||||
"Referer": "https://highland-self.achieveservice.com/fillform/?iframe_id=fillform-frame-1&db_id=",
|
||||
}
|
||||
s = requests.session()
|
||||
r = s.get(SESSION_URL)
|
||||
r.raise_for_status()
|
||||
session_data = r.json()
|
||||
sid = session_data["auth-session"]
|
||||
params = {
|
||||
"id": "660d44a698632",
|
||||
"repeat_against": "",
|
||||
"noRetry": "false",
|
||||
"getOnlyTokens": "undefined",
|
||||
"log_id": "",
|
||||
"app_name": "AF-Renderer::Self",
|
||||
# unix_timestamp
|
||||
"_": str(int(time.time() * 1000)),
|
||||
"sid": sid,
|
||||
}
|
||||
|
||||
r = s.post(API_URL, json=data, headers=headers, params=params)
|
||||
r.raise_for_status()
|
||||
|
||||
soup = BeautifulSoup(r.text, "html.parser")
|
||||
table = soup.find("ul", {"class": "data-table"})
|
||||
if table is None or isinstance(table, str):
|
||||
raise Exception(f"Content of the webpage seems to be invalid check {API_URL}/{self._record_id}/")
|
||||
rows:list[Tag] = table.find_all("li")
|
||||
data = r.json()
|
||||
rows_data = data["integration"]["transformed"]["rows_data"]["0"]
|
||||
if not isinstance(rows_data, dict):
|
||||
raise ValueError("Invalid data returned from API")
|
||||
|
||||
entries = []
|
||||
for row in rows:
|
||||
if not "bin days" in row.text.lower():
|
||||
continue
|
||||
bin_type_heading = row.find("h2")
|
||||
if bin_type_heading is None:
|
||||
continue
|
||||
bin_type_text = bin_type_heading.text.lower().strip()
|
||||
|
||||
if "bin days" == bin_type_text:
|
||||
bin_type = "Refuse"
|
||||
else:
|
||||
bin_type = bin_type_text.replace(
|
||||
"bin days", "").strip().capitalize()
|
||||
|
||||
dates = row.find("div")
|
||||
if dates is None:
|
||||
for key, value in rows_data.items():
|
||||
if not (key.endswith("NextDate") or key.endswith("NextDateNew")):
|
||||
continue
|
||||
|
||||
for date in dates.text.split(","):
|
||||
# Remove suffixes from date string
|
||||
date = date.replace("th", "").replace("st", "").replace(
|
||||
"nd", "").replace("rd", "").strip()
|
||||
bin_type = key.split("NextDate")[0]
|
||||
try:
|
||||
date = datetime.datetime.fromisoformat(value).date()
|
||||
except ValueError:
|
||||
continue
|
||||
entries.append(
|
||||
Collection(
|
||||
date=date,
|
||||
t=bin_type,
|
||||
icon=ICON_MAP.get(bin_type.lower()),
|
||||
)
|
||||
)
|
||||
freq_key = key.replace("NextDate", "Frequency")
|
||||
if not self._predict or freq_key not in rows_data:
|
||||
continue
|
||||
week_freq = rows_data[freq_key]
|
||||
if not week_freq or not isinstance(week_freq, str):
|
||||
continue
|
||||
week_freq = week_freq.lower().replace("every week", "every 1 weeks")
|
||||
week_freq = week_freq.replace("every ", "").replace(" weeks", "")
|
||||
# if week_freq is integer string
|
||||
if not week_freq.isdigit():
|
||||
continue
|
||||
week_freq_int = int(week_freq)
|
||||
|
||||
# Convert date string to datetime object
|
||||
try:
|
||||
date_obj = datetime.datetime.strptime(date, "%A %d %B").date()
|
||||
date_obj = date_obj.replace(year=today.year)
|
||||
if date_obj < today:
|
||||
date_obj = date_obj.replace(year=today.year + 1)
|
||||
|
||||
except ValueError:
|
||||
continue
|
||||
|
||||
# Create Collection object and append to entries list
|
||||
icon = ICON_MAP.get(bin_type.lower())
|
||||
# add 10 weeks of entries
|
||||
for i in range(int(10 * (1 / week_freq_int))):
|
||||
entries.append(
|
||||
Collection(
|
||||
date=date_obj,
|
||||
date=date + datetime.timedelta(weeks=i * week_freq_int),
|
||||
t=bin_type,
|
||||
icon=icon
|
||||
icon=ICON_MAP.get(bin_type.lower()),
|
||||
)
|
||||
)
|
||||
|
||||
return entries
|
||||
|
||||
@@ -9,15 +9,20 @@ waste_collection_schedule:
|
||||
sources:
|
||||
- name: highland_gov_uk
|
||||
args:
|
||||
record_id: RECORD ID
|
||||
uprn: UPRN
|
||||
predict: PREDICT_MULTIPLE_DATES
|
||||
|
||||
```
|
||||
|
||||
### Configuration Variables
|
||||
|
||||
**record_id**
|
||||
**uprn**
|
||||
*(String | Integer) (required)*
|
||||
|
||||
**predict**
|
||||
*(Boolean) (optional|default=False)*
|
||||
Tries to predict the next collections based on the nextCollection date, and the frequency of the collection. Only returns one date per waste type if set to False. If set to True, it will try to predict the next 10 weeks of collections.
|
||||
|
||||
## Example
|
||||
|
||||
```yaml
|
||||
@@ -25,10 +30,31 @@ waste_collection_schedule:
|
||||
sources:
|
||||
- name: highland_gov_uk
|
||||
args:
|
||||
record_id: 2004443
|
||||
|
||||
uprn: 130108578
|
||||
# Implicit
|
||||
# predict: False
|
||||
```
|
||||
|
||||
```yaml
|
||||
waste_collection_schedule:
|
||||
sources:
|
||||
- name: highland_gov_uk
|
||||
args:
|
||||
uprn: 130007199
|
||||
predict: False
|
||||
```
|
||||
|
||||
```yaml
|
||||
waste_collection_schedule:
|
||||
sources:
|
||||
- name: highland_gov_uk
|
||||
args:
|
||||
uprn: 130066519
|
||||
predict: True
|
||||
```
|
||||
|
||||
## How to get the source argument
|
||||
|
||||
Go to <https://www.highland.gov.uk/bindays> and search for your corresponding entry. The URL should look something like this: `https://www.highland.gov.uk/directory_record/2004443/allangrange_mains_road_black_isle` the number after directory_record is your record_id (here 2004443).
|
||||
An easy way to discover your Unique Property Reference Number (UPRN) is by going to <https://www.findmyaddress.co.uk/> and entering in your address details.
|
||||
|
||||
Or by going to uprn.uk and entering your postcode.
|
||||
|
||||
Reference in New Issue
Block a user