Skip to content

Commit

Permalink
make regex strings raw strings
Browse files Browse the repository at this point in the history
made these regex strings raw strings as mypy did complain
  • Loading branch information
5ila5 committed Jun 23, 2024
1 parent d84d3e7 commit a107f0e
Show file tree
Hide file tree
Showing 3 changed files with 33 additions and 32 deletions.
Original file line number Diff line number Diff line change
@@ -1,8 +1,9 @@
import logging
import requests
import urllib
from bs4 import BeautifulSoup
from datetime import date

import requests
from bs4 import BeautifulSoup
from waste_collection_schedule import Collection # type: ignore[attr-defined]
from waste_collection_schedule.service.ICS import ICS

Expand All @@ -17,14 +18,14 @@

_LOGGER = logging.getLogger(__name__)


class Source:
def __init__(self, street, house_number):
self._street = street
self._house_number = house_number
self._ics = ICS(regex=r"(.*)\:\s*\!")

def fetch(self):

street_idx = self.get_street_idx(self._street)
if street_idx == -1:
_LOGGER.error("Error: Street not found..")
Expand All @@ -33,10 +34,10 @@ def fetch(self):

args = {
"tx_collectioncalendar_abfuhrkalender[action]": "exportIcs",
"tx_collectioncalendar_abfuhrkalender[controller]": "Frontend\Export",
"tx_collectioncalendar_abfuhrkalender[houseNumber]": str(self._house_number).encode(
"utf-8"
),
"tx_collectioncalendar_abfuhrkalender[controller]": r"Frontend\Export",
"tx_collectioncalendar_abfuhrkalender[houseNumber]": str(
self._house_number
).encode("utf-8"),
"tx_collectioncalendar_abfuhrkalender[street]": str(street_idx).encode(
"utf-8"
),
Expand All @@ -45,7 +46,7 @@ def fetch(self):
"tx_collectioncalendar_abfuhrkalender[wasteTypes][3]": 3,
"tx_collectioncalendar_abfuhrkalender[wasteTypes][4]": 4,
"tx_collectioncalendar_abfuhrkalender[wasteTypes][5]": 5,
"tx_collectioncalendar_abfuhrkalender[year]": year
"tx_collectioncalendar_abfuhrkalender[year]": year,
}

# use '%20' instead of '+' in API_URL
Expand All @@ -62,20 +63,22 @@ def fetch(self):
entries.append(Collection(d[0], d[1]))
return entries

def get_street_mapping(self): # thanks @dt215git (https://github.com/mampfes/hacs_waste_collection_schedule/issues/539#issuecomment-1371413297)
def get_street_mapping(
self,
): # thanks @dt215git (https://github.com/mampfes/hacs_waste_collection_schedule/issues/539#issuecomment-1371413297)
s = requests.Session()
r = s.get(API_URL)

soup = BeautifulSoup(r.text, "html.parser")
items = soup.find_all("option")
items = items[2:] # first two values are not street addresses so remove them
items = items[2:] # first two values are not street addresses so remove them

streets = []
ids = []
for item in items:
streets.append(item.text) # street name
ids.append(item.attrs["value"]) # dropdown value
mapping = {k:v for (k,v) in zip(streets, ids)}
mapping = {k: v for (k, v) in zip(streets, ids)}

return mapping

Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import re

import requests
from waste_collection_schedule import Collection # type: ignore[attr-defined]
from waste_collection_schedule.service.ICS import ICS
Expand Down Expand Up @@ -37,9 +38,7 @@


class Source:
def __init__(
self, village: str, street: str, house_number: str
):
def __init__(self, village: str, street: str, house_number: str):
self._village = village
self._street = street
self._house_number = house_number
Expand Down Expand Up @@ -77,31 +76,32 @@ def get_from_proxy(self, village: int = 0, street: int = 0, input: str = ""):
"str_id": street,
"hidden_kalenderart": "privat",
"url": 0 if village == 0 else 2 if street == 0 else 3,
"server": 0
"server": 0,
}
data = requests.post(DATA_URL, data=post_data).text
data = re.findall("<li id = '.*?_\d+'onClick='get_value\(\".*?\",\d+,\d+\)'>" +
"<span style = 'display:none;'>(\d+)</span>" +
"<span style = 'display:none;'>(\d+)</span>" +
"<span>(.*?)</span>" +
"</li>", data)
data_text = requests.post(DATA_URL, data=post_data).text
data = re.findall(
r"<li id = '.*?_\d+'onClick='get_value\(\".*?\",\d+,\d+\)'>"
+ r"<span style = 'display:none;'>(\d+)</span>"
+ r"<span style = 'display:none;'>(\d+)</span>"
+ r"<span>(.*?)</span>"
+ "</li>",
data_text,
)
return [data[0][0], data[0][1]]

def get_ids(self):
[village_id, _] = self.get_from_proxy(input=self._village)
[street_id, _] = self.get_from_proxy(
village=village_id, input=self._street)
[street_id, _] = self.get_from_proxy(village=village_id, input=self._street)
[house_number_id, area_id] = self.get_from_proxy(
village=village_id, street=street_id, input=self._house_number)
village=village_id, street=street_id, input=self._house_number
)
return [village_id, street_id, house_number_id, area_id]

def fetch(self):
[village_id, street_id, house_number_id, area_id] = self.get_ids()
dates = self.get_calendar(village_id, street_id,
house_number_id, area_id)
dates = self.get_calendar(village_id, street_id, house_number_id, area_id)

entries = []
for d in dates:
entries.append(Collection(
date=d[0], t=d[1], icon=ICON_MAP.get(d[1])))
entries.append(Collection(date=d[0], t=d[1], icon=ICON_MAP.get(d[1])))
return entries
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
import logging
import re
from datetime import datetime

import requests
from waste_collection_schedule import Collection # type: ignore[attr-defined]


_LOGGER = logging.getLogger(__name__)

TITLE = "Newcastle City Council"
Expand All @@ -14,9 +14,7 @@


API_URL = "https://community.newcastle.gov.uk/my-neighbourhood/ajax/getBinsNew.php"
REGEX = (
"[Green|Blue|Brown] [Bb]in \(([A-Za-z]+)( Waste)?\) .*? ([0-9]{2}-[A-Za-z]+-[0-9]{4})"
)
REGEX = r"[Green|Blue|Brown] [Bb]in \(([A-Za-z]+)( Waste)?\) .*? ([0-9]{2}-[A-Za-z]+-[0-9]{4})"
ICON_MAP = {
"DOMESTIC": "mdi:trash-can",
"RECYCLING": "mdi:recycle",
Expand Down

0 comments on commit a107f0e

Please sign in to comment.