From b4842292deb537376bacc338ee1c85582e265a85 Mon Sep 17 00:00:00 2001 From: Robert Bradley Date: Fri, 3 Jan 2025 13:57:42 +0000 Subject: [PATCH] init gh-pages --- .github/workflows/behave.yml | 94 +- CHANGELOG.md | 2011 ---------- CODE_OF_CONDUCT.md | 128 - CONTRIBUTING.md | 360 -- LICENSE | 21 - Makefile | 44 - README.md | 371 -- SwaggerUI.png | Bin 91169 -> 0 bytes behave.ini | 16 - conftest.py | 62 - custom_components/__init__.py | 0 .../uk_bin_collection/__init__.py | 309 -- .../uk_bin_collection/calendar.py | 137 - .../uk_bin_collection/config_flow.py | 576 --- custom_components/uk_bin_collection/const.py | 25 - .../uk_bin_collection/manifest.json | 15 - custom_components/uk_bin_collection/sensor.py | 397 -- .../uk_bin_collection/strings.json | 57 - .../uk_bin_collection/tests/__init__.py | 0 .../uk_bin_collection/tests/common_utils.py | 52 - .../uk_bin_collection/tests/test_calendar.py | 542 --- .../tests/test_config_flow.py | 942 ----- .../uk_bin_collection/tests/test_sensor.py | 1343 ------- .../uk_bin_collection/translations/cy.json | 57 - .../uk_bin_collection/translations/en.json | 57 - .../uk_bin_collection/translations/ga.json | 57 - .../uk_bin_collection/translations/gd.json | 57 - .../uk_bin_collection/translations/pt.json | 57 - hacs.json | 5 - pyproject.toml | 75 - pytest.ini | 8 - uk_bin_collection/README.rst | 0 .../tests/council_feature_input_parity.py | 109 - .../tests/features/environment.py | 7 - .../features/validate_council_outputs.feature | 7 - uk_bin_collection/tests/input.json | 2130 ---------- uk_bin_collection/tests/output.schema | 41 - .../step_defs/step_helpers/file_handler.py | 46 - .../tests/step_defs/test_validate_council.py | 116 - uk_bin_collection/tests/test_collect_data.py | 72 - .../tests/test_common_functions.py | 465 --- uk_bin_collection/tests/test_conftest.py | 38 - uk_bin_collection/tests/test_get_data.py | 212 - .../uk_bin_collection/collect_data.py | 134 - uk_bin_collection/uk_bin_collection/common.py | 355 -- .../councils/AberdeenCityCouncil.py | 121 - .../councils/AberdeenshireCouncil.py | 52 - .../councils/AdurAndWorthingCouncils.py | 43 - .../councils/AntrimAndNewtonabbeyCouncil.py | 53 - .../councils/ArdsAndNorthDownCouncil.py | 103 - .../councils/ArgyllandButeCouncil.py | 67 - .../ArmaghBanbridgeCraigavonCouncil.py | 72 - .../uk_bin_collection/councils/ArunCouncil.py | 98 - .../councils/AshfieldDistrictCouncil.py | 105 - .../councils/AshfordBoroughCouncil.py | 124 - .../councils/AylesburyValeCouncil.py | 69 - .../uk_bin_collection/councils/BCPCouncil.py | 51 - .../councils/BaberghDistrictCouncil.py | 196 - .../councils/BarnetCouncil.py | 220 - .../councils/BarnsleyMBCouncil.py | 126 - .../councils/BasildonCouncil.py | 89 - .../councils/BasingstokeCouncil.py | 72 - .../BathAndNorthEastSomersetCouncil.py | 100 - .../councils/BedfordBoroughCouncil.py | 49 - .../councils/BedfordshireCouncil.py | 71 - .../councils/BelfastCityCouncil.py | 105 - .../councils/BexleyCouncil.py | 150 - .../councils/BirminghamCityCouncil.py | 133 - .../councils/BlabyDistrictCouncil.py | 59 - .../councils/BlackburnCouncil.py | 115 - .../councils/BoltonCouncil.py | 108 - .../councils/BracknellForestCouncil.py | 246 -- .../uk_bin_collection/councils/BradfordMDC.py | 133 - .../councils/BraintreeDistrictCouncil.py | 70 - .../councils/BrecklandCouncil.py | 55 - .../councils/BrightonandHoveCityCouncil.py | 137 - .../councils/BristolCityCouncil.py | 142 - .../councils/BromleyBoroughCouncil.py | 106 - .../councils/BromsgroveDistrictCouncil.py | 55 - .../councils/BroxbourneCouncil.py | 78 - .../councils/BroxtoweBoroughCouncil.py | 107 - .../councils/BuckinghamshireCouncil.py | 115 - .../councils/BurnleyBoroughCouncil.py | 88 - .../uk_bin_collection/councils/BuryCouncil.py | 65 - .../councils/CalderdaleCouncil.py | 122 - .../councils/CannockChaseDistrictCouncil.py | 65 - .../councils/CanterburyCityCouncil.py | 54 - .../councils/CardiffCouncil.py | 172 - .../councils/CarmarthenshireCountyCouncil.py | 55 - .../councils/CastlepointDistrictCouncil.py | 96 - .../councils/CharnwoodBoroughCouncil.py | 54 - .../councils/ChelmsfordCityCouncil.py | 127 - .../councils/CheltenhamBoroughCouncil.py | 102 - .../councils/CheshireEastCouncil.py | 46 - .../councils/CheshireWestAndChesterCouncil.py | 105 - .../councils/ChesterfieldBoroughCouncil.py | 188 - .../councils/ChichesterDistrictCouncil.py | 110 - .../councils/ChorleyCouncil.py | 135 - .../councils/ColchesterCityCouncil.py | 103 - .../councils/ConwyCountyBorough.py | 27 - .../councils/CopelandBoroughCouncil.py | 93 - .../councils/CornwallCouncil.py | 71 - .../councils/CotswoldDistrictCouncil.py | 120 - .../councils/CoventryCityCouncil.py | 51 - .../councils/CrawleyBoroughCouncil.py | 112 - .../councils/CroydonCouncil.py | 286 -- .../councils/CumberlandAllerdaleCouncil.py | 93 - .../councils/DacorumBoroughCouncil.py | 102 - .../councils/DartfordBoroughCouncil.py | 42 - .../councils/DerbyCityCouncil.py | 55 - .../DerbyshireDalesDistrictCouncil.py | 100 - .../councils/DoncasterCouncil.py | 77 - .../councils/DorsetCouncil.py | 41 - .../councils/DoverDistrictCouncil.py | 46 - .../councils/DudleyCouncil.py | 81 - .../councils/DurhamCouncil.py | 49 - .../councils/EalingCouncil.py | 43 - .../councils/EastAyrshireCouncil.py | 49 - .../councils/EastCambridgeshireCouncil.py | 45 - .../uk_bin_collection/councils/EastDevonDC.py | 74 - .../councils/EastHertsCouncil.py | 117 - .../councils/EastLindseyDistrictCouncil.py | 106 - .../councils/EastRenfrewshireCouncil.py | 117 - .../councils/EastRidingCouncil.py | 141 - .../councils/EastSuffolkCouncil.py | 113 - .../councils/EastleighBoroughCouncil.py | 70 - .../councils/EdinburghCityCouncil.py | 98 - .../councils/ElmbridgeBoroughCouncil.py | 91 - .../councils/EnfieldCouncil.py | 154 - .../councils/EnvironmentFirst.py | 62 - .../councils/EppingForestDistrictCouncil.py | 51 - .../councils/ErewashBoroughCouncil.py | 61 - .../councils/ExeterCityCouncil.py | 52 - .../councils/FalkirkCouncil.py | 54 - .../councils/FarehamBoroughCouncil.py | 68 - .../councils/FenlandDistrictCouncil.py | 65 - .../uk_bin_collection/councils/FifeCouncil.py | 68 - .../councils/FlintshireCountyCouncil.py | 60 - .../FolkstoneandHytheDistrictCouncil.py | 81 - .../councils/ForestOfDeanDistrictCouncil.py | 120 - .../councils/GatesheadCouncil.py | 116 - .../councils/GedlingBoroughCouncil.py | 60 - .../councils/GlasgowCityCouncil.py | 71 - .../councils/GloucesterCityCouncil.py | 128 - .../councils/GraveshamBoroughCouncil.py | 122 - .../councils/GuildfordCouncil.py | 150 - .../councils/HackneyCouncil.py | 85 - .../councils/HaltonBoroughCouncil.py | 147 - .../councils/HarboroughDistrictCouncil.py | 58 - .../councils/HaringeyCouncil.py | 67 - .../councils/HarrogateBoroughCouncil.py | 63 - .../councils/HartDistrictCouncil.py | 67 - .../councils/HartlepoolBoroughCouncil.py | 83 - .../councils/HertsmereBoroughCouncil.py | 161 - .../councils/HighPeakCouncil.py | 133 - .../councils/HighlandCouncil.py | 88 - .../HinckleyandBosworthBoroughCouncil.py | 71 - .../councils/HounslowCouncil.py | 122 - .../councils/HullCityCouncil.py | 48 - .../councils/HuntingdonDistrictCouncil.py | 44 - .../councils/IslingtonCouncil.py | 39 - .../councils/KingsLynnandWestNorfolkBC.py | 59 - .../councils/KingstonUponThamesCouncil.py | 86 - .../councils/KirkleesCouncil.py | 132 - .../councils/KnowsleyMBCouncil.py | 140 - .../councils/LancasterCityCouncil.py | 73 - .../councils/LeedsCityCouncil.py | 136 - .../councils/LichfieldDistrictCouncil.py | 60 - .../councils/LincolnCouncil.py | 96 - .../councils/LisburnCastlereaghCityCouncil.py | 102 - .../councils/LiverpoolCityCouncil.py | 65 - .../councils/LondonBoroughEaling.py | 50 - .../councils/LondonBoroughHarrow.py | 46 - .../councils/LondonBoroughHavering.py | 75 - .../councils/LondonBoroughHounslow.py | 82 - .../councils/LondonBoroughLambeth.py | 54 - .../councils/LondonBoroughLewisham.py | 140 - .../councils/LondonBoroughRedbridge.py | 161 - .../councils/LondonBoroughSutton.py | 75 - .../councils/LutonBoroughCouncil.py | 81 - .../councils/MaldonDistrictCouncil.py | 52 - .../councils/MalvernHillsDC.py | 57 - .../councils/ManchesterCityCouncil.py | 90 - .../councils/MansfieldDistrictCouncil.py | 38 - .../councils/MertonCouncil.py | 58 - .../MidAndEastAntrimBoroughCouncil.py | 129 - .../councils/MidDevonCouncil.py | 93 - .../councils/MidSuffolkDistrictCouncil.py | 196 - .../councils/MidSussexDistrictCouncil.py | 168 - .../councils/MidlothianCouncil.py | 147 - .../councils/MiltonKeynesCityCouncil.py | 73 - .../councils/MoleValleyDistrictCouncil.py | 135 - .../councils/MonmouthshireCountyCouncil.py | 70 - .../councils/MorayCouncil.py | 65 - .../councils/NeathPortTalbotCouncil.py | 147 - .../councils/NewForestCouncil.py | 146 - .../councils/NewarkAndSherwoodDC.py | 52 - .../councils/NewcastleCityCouncil.py | 57 - .../councils/NewcastleUnderLymeCouncil.py | 66 - .../councils/NewhamCouncil.py | 58 - .../councils/NewportCityCouncil.py | 204 - .../councils/NorthAyrshireCouncil.py | 49 - .../NorthEastDerbyshireDistrictCouncil.py | 115 - .../councils/NorthEastLincs.py | 53 - .../NorthHertfordshireDistrictCouncil.py | 93 - .../councils/NorthKestevenDistrictCouncil.py | 45 - .../councils/NorthLanarkshireCouncil.py | 46 - .../councils/NorthLincolnshireCouncil.py | 58 - .../councils/NorthNorfolkDistrictCouncil.py | 108 - .../councils/NorthNorthamptonshireCouncil.py | 72 - .../councils/NorthSomersetCouncil.py | 76 - .../councils/NorthTynesideCouncil.py | 145 - .../councils/NorthWestLeicestershire.py | 114 - .../councils/NorthYorkshire.py | 58 - .../councils/NorthumberlandCouncil.py | 123 - .../councils/NorwichCityCouncil.py | 75 - .../councils/NottinghamCityCouncil.py | 37 - .../NuneatonBedworthBoroughCouncil.py | 931 ----- .../councils/OldhamCouncil.py | 51 - .../councils/OxfordCityCouncil.py | 63 - .../councils/PerthAndKinrossCouncil.py | 95 - .../councils/PlymouthCouncil.py | 81 - .../councils/PortsmouthCityCouncil.py | 131 - .../councils/PowysCouncil.py | 138 - .../councils/PrestonCityCouncil.py | 97 - .../councils/ReadingBoroughCouncil.py | 30 - .../ReigateAndBansteadBoroughCouncil.py | 81 - .../councils/RenfrewshireCouncil.py | 135 - .../councils/RhonddaCynonTaffCouncil.py | 80 - .../councils/RochdaleCouncil.py | 69 - .../councils/RochfordCouncil.py | 60 - .../councils/RotherDistrictCouncil.py | 84 - .../councils/RotherhamCouncil.py | 53 - .../councils/RoyalBoroughofGreenwich.py | 113 - .../councils/RugbyBoroughCouncil.py | 93 - .../councils/RushcliffeBoroughCouncil.py | 100 - .../councils/RushmoorCouncil.py | 81 - .../councils/SalfordCityCouncil.py | 70 - .../councils/SandwellBoroughCouncil.py | 87 - .../councils/SeftonCouncil.py | 67 - .../councils/SevenoaksDistrictCouncil.py | 106 - .../councils/SheffieldCityCouncil.py | 54 - .../councils/ShropshireCouncil.py | 45 - .../councils/SolihullCouncil.py | 48 - .../councils/SomersetCouncil.py | 204 - .../councils/SouthAyrshireCouncil.py | 73 - .../councils/SouthCambridgeshireCouncil.py | 66 - .../SouthDerbyshireDistrictCouncil.py | 60 - .../councils/SouthGloucestershireCouncil.py | 74 - .../councils/SouthHamsDistrictCouncil.py | 90 - .../councils/SouthKestevenDistrictCouncil.py | 164 - .../councils/SouthLanarkshireCouncil.py | 78 - .../councils/SouthNorfolkCouncil.py | 104 - .../councils/SouthOxfordshireCouncil.py | 93 - .../councils/SouthRibbleCouncil.py | 83 - .../SouthStaffordshireDistrictCouncil.py | 99 - .../councils/SouthTynesideCouncil.py | 98 - .../councils/SouthwarkCouncil.py | 140 - .../StAlbansCityAndDistrictCouncil.py | 43 - .../uk_bin_collection/councils/StHelensBC.py | 122 - .../councils/StaffordBoroughCouncil.py | 69 - .../StaffordshireMoorlandsDistrictCouncil.py | 112 - .../councils/StevenageBoroughCouncil.py | 101 - .../councils/StockportBoroughCouncil.py | 39 - .../councils/StocktonOnTeesCouncil.py | 159 - .../councils/StokeOnTrentCityCouncil.py | 79 - .../councils/StratfordUponAvonCouncil.py | 94 - .../councils/StroudDistrictCouncil.py | 94 - .../councils/SunderlandCityCouncil.py | 100 - .../councils/SwaleBoroughCouncil.py | 90 - .../councils/SwanseaCouncil.py | 70 - .../councils/SwindonBoroughCouncil.py | 56 - .../councils/TamesideMBCouncil.py | 62 - .../councils/TandridgeDistrictCouncil.py | 60 - .../councils/TeignbridgeCouncil.py | 59 - .../councils/TelfordAndWrekinCouncil.py | 50 - .../councils/TendringDistrictCouncil.py | 110 - .../councils/TestValleyBoroughCouncil.py | 204 - .../councils/ThanetDistrictCouncil.py | 51 - .../councils/ThreeRiversDistrictCouncil.py | 140 - .../councils/ThurrockCouncil.py | 93 - .../councils/TonbridgeAndMallingBC.py | 101 - .../councils/TorbayCouncil.py | 51 - .../councils/TorridgeDistrictCouncil.py | 154 - .../councils/TunbridgeWellsCouncil.py | 71 - .../councils/UttlesfordDistrictCouncil.py | 117 - .../councils/ValeofGlamorganCouncil.py | 119 - .../councils/ValeofWhiteHorseCouncil.py | 100 - .../councils/WakefieldCityCouncil.py | 103 - .../councils/WalsallCouncil.py | 56 - .../councils/WalthamForest.py | 127 - .../councils/WarringtonBoroughCouncil.py | 50 - .../councils/WarwickDistrictCouncil.py | 53 - .../councils/WatfordBoroughCouncil.py | 72 - .../councils/WaverleyBoroughCouncil.py | 120 - .../councils/WealdenDistrictCouncil.py | 86 - .../councils/WelhatCouncil.py | 73 - .../councils/WestBerkshireCouncil.py | 142 - .../councils/WestLancashireBoroughCouncil.py | 114 - .../councils/WestLindseyDistrictCouncil.py | 118 - .../councils/WestLothianCouncil.py | 103 - .../councils/WestMorlandAndFurness.py | 65 - .../councils/WestNorthamptonshireCouncil.py | 36 - .../WestOxfordshireDistrictCouncil.py | 120 - .../councils/WestSuffolkCouncil.py | 76 - .../councils/WiganBoroughCouncil.py | 98 - .../councils/WiltshireCouncil.py | 135 - .../councils/WinchesterCityCouncil.py | 114 - .../councils/WindsorAndMaidenheadCouncil.py | 50 - .../councils/WirralCouncil.py | 64 - .../councils/WokingBoroughCouncil.py | 116 - .../councils/WokinghamBoroughCouncil.py | 99 - .../councils/WolverhamptonCityCouncil.py | 57 - .../councils/WorcesterCityCouncil.py | 58 - .../councils/WychavonDistrictCouncil.py | 155 - .../uk_bin_collection/councils/WyreCouncil.py | 89 - .../councils/WyreForestDistrictCouncil.py | 65 - .../uk_bin_collection/councils/YorkCouncil.py | 45 - .../councilclasstemplate.py | 32 - .../uk_bin_collection/create_new_council.py | 51 - .../uk_bin_collection/get_bin_data.py | 146 - uk_bin_collection_api_server/Dockerfile | 22 - .../docker-compose.yml | 14 - uk_bin_collection_api_server/requirements.txt | 5 - uk_bin_collection_api_server/server.py | 53 - uk_bin_collection_api_server/swagger.yaml | 104 - wiki/Councils.md | 3572 ----------------- wiki/Home.md | 3 - wiki/Setup.md | 8 - wiki/generate_wiki.py | 83 - 330 files changed, 19 insertions(+), 41471 deletions(-) delete mode 100644 CHANGELOG.md delete mode 100644 CODE_OF_CONDUCT.md delete mode 100644 CONTRIBUTING.md delete mode 100644 LICENSE delete mode 100644 Makefile delete mode 100644 README.md delete mode 100644 SwaggerUI.png delete mode 100644 behave.ini delete mode 100644 conftest.py delete mode 100644 custom_components/__init__.py delete mode 100644 custom_components/uk_bin_collection/__init__.py delete mode 100644 custom_components/uk_bin_collection/calendar.py delete mode 100644 custom_components/uk_bin_collection/config_flow.py delete mode 100644 custom_components/uk_bin_collection/const.py delete mode 100644 custom_components/uk_bin_collection/manifest.json delete mode 100644 custom_components/uk_bin_collection/sensor.py delete mode 100644 custom_components/uk_bin_collection/strings.json delete mode 100644 custom_components/uk_bin_collection/tests/__init__.py delete mode 100644 custom_components/uk_bin_collection/tests/common_utils.py delete mode 100644 custom_components/uk_bin_collection/tests/test_calendar.py delete mode 100644 custom_components/uk_bin_collection/tests/test_config_flow.py delete mode 100644 custom_components/uk_bin_collection/tests/test_sensor.py delete mode 100644 custom_components/uk_bin_collection/translations/cy.json delete mode 100644 custom_components/uk_bin_collection/translations/en.json delete mode 100644 custom_components/uk_bin_collection/translations/ga.json delete mode 100644 custom_components/uk_bin_collection/translations/gd.json delete mode 100644 custom_components/uk_bin_collection/translations/pt.json delete mode 100644 hacs.json delete mode 100644 pyproject.toml delete mode 100644 pytest.ini delete mode 100644 uk_bin_collection/README.rst delete mode 100644 uk_bin_collection/tests/council_feature_input_parity.py delete mode 100644 uk_bin_collection/tests/features/environment.py delete mode 100644 uk_bin_collection/tests/features/validate_council_outputs.feature delete mode 100755 uk_bin_collection/tests/input.json delete mode 100644 uk_bin_collection/tests/output.schema delete mode 100644 uk_bin_collection/tests/step_defs/step_helpers/file_handler.py delete mode 100644 uk_bin_collection/tests/step_defs/test_validate_council.py delete mode 100644 uk_bin_collection/tests/test_collect_data.py delete mode 100644 uk_bin_collection/tests/test_common_functions.py delete mode 100644 uk_bin_collection/tests/test_conftest.py delete mode 100644 uk_bin_collection/tests/test_get_data.py delete mode 100755 uk_bin_collection/uk_bin_collection/collect_data.py delete mode 100644 uk_bin_collection/uk_bin_collection/common.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/AberdeenCityCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/AberdeenshireCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/AdurAndWorthingCouncils.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/AntrimAndNewtonabbeyCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/ArdsAndNorthDownCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/ArgyllandButeCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/ArmaghBanbridgeCraigavonCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/ArunCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/AshfieldDistrictCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/AshfordBoroughCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/AylesburyValeCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/BCPCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/BaberghDistrictCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/BarnetCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/BarnsleyMBCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/BasildonCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/BasingstokeCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/BathAndNorthEastSomersetCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/BedfordBoroughCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/BedfordshireCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/BelfastCityCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/BexleyCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/BirminghamCityCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/BlabyDistrictCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/BlackburnCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/BoltonCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/BracknellForestCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/BradfordMDC.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/BraintreeDistrictCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/BrecklandCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/BrightonandHoveCityCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/BristolCityCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/BromleyBoroughCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/BromsgroveDistrictCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/BroxbourneCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/BroxtoweBoroughCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/BuckinghamshireCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/BurnleyBoroughCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/BuryCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/CalderdaleCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/CannockChaseDistrictCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/CanterburyCityCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/CardiffCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/CarmarthenshireCountyCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/CastlepointDistrictCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/CharnwoodBoroughCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/ChelmsfordCityCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/CheltenhamBoroughCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/CheshireEastCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/CheshireWestAndChesterCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/ChesterfieldBoroughCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/ChichesterDistrictCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/ChorleyCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/ColchesterCityCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/ConwyCountyBorough.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/CopelandBoroughCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/CornwallCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/CotswoldDistrictCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/CoventryCityCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/CrawleyBoroughCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/CroydonCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/CumberlandAllerdaleCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/DacorumBoroughCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/DartfordBoroughCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/DerbyCityCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/DerbyshireDalesDistrictCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/DoncasterCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/DorsetCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/DoverDistrictCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/DudleyCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/DurhamCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/EalingCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/EastAyrshireCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/EastCambridgeshireCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/EastDevonDC.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/EastHertsCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/EastLindseyDistrictCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/EastRenfrewshireCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/EastRidingCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/EastSuffolkCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/EastleighBoroughCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/EdinburghCityCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/ElmbridgeBoroughCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/EnfieldCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/EnvironmentFirst.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/EppingForestDistrictCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/ErewashBoroughCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/ExeterCityCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/FalkirkCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/FarehamBoroughCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/FenlandDistrictCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/FifeCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/FlintshireCountyCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/FolkstoneandHytheDistrictCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/ForestOfDeanDistrictCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/GatesheadCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/GedlingBoroughCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/GlasgowCityCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/GloucesterCityCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/GraveshamBoroughCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/GuildfordCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/HackneyCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/HaltonBoroughCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/HarboroughDistrictCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/HaringeyCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/HarrogateBoroughCouncil.py delete mode 100755 uk_bin_collection/uk_bin_collection/councils/HartDistrictCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/HartlepoolBoroughCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/HertsmereBoroughCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/HighPeakCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/HighlandCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/HinckleyandBosworthBoroughCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/HounslowCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/HullCityCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/HuntingdonDistrictCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/IslingtonCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/KingsLynnandWestNorfolkBC.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/KingstonUponThamesCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/KirkleesCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/KnowsleyMBCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/LancasterCityCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/LeedsCityCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/LichfieldDistrictCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/LincolnCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/LisburnCastlereaghCityCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/LiverpoolCityCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/LondonBoroughEaling.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/LondonBoroughHarrow.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/LondonBoroughHavering.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/LondonBoroughHounslow.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/LondonBoroughLambeth.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/LondonBoroughLewisham.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/LondonBoroughRedbridge.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/LondonBoroughSutton.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/LutonBoroughCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/MaldonDistrictCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/MalvernHillsDC.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/ManchesterCityCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/MansfieldDistrictCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/MertonCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/MidAndEastAntrimBoroughCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/MidDevonCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/MidSuffolkDistrictCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/MidSussexDistrictCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/MidlothianCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/MiltonKeynesCityCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/MoleValleyDistrictCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/MonmouthshireCountyCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/MorayCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/NeathPortTalbotCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/NewForestCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/NewarkAndSherwoodDC.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/NewcastleCityCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/NewcastleUnderLymeCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/NewhamCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/NewportCityCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/NorthAyrshireCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/NorthEastDerbyshireDistrictCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/NorthEastLincs.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/NorthHertfordshireDistrictCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/NorthKestevenDistrictCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/NorthLanarkshireCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/NorthLincolnshireCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/NorthNorfolkDistrictCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/NorthNorthamptonshireCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/NorthSomersetCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/NorthTynesideCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/NorthWestLeicestershire.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/NorthYorkshire.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/NorthumberlandCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/NorwichCityCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/NottinghamCityCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/NuneatonBedworthBoroughCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/OldhamCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/OxfordCityCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/PerthAndKinrossCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/PlymouthCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/PortsmouthCityCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/PowysCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/PrestonCityCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/ReadingBoroughCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/ReigateAndBansteadBoroughCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/RenfrewshireCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/RhonddaCynonTaffCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/RochdaleCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/RochfordCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/RotherDistrictCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/RotherhamCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/RoyalBoroughofGreenwich.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/RugbyBoroughCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/RushcliffeBoroughCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/RushmoorCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/SalfordCityCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/SandwellBoroughCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/SeftonCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/SevenoaksDistrictCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/SheffieldCityCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/ShropshireCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/SolihullCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/SomersetCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/SouthAyrshireCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/SouthCambridgeshireCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/SouthDerbyshireDistrictCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/SouthGloucestershireCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/SouthHamsDistrictCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/SouthKestevenDistrictCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/SouthLanarkshireCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/SouthNorfolkCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/SouthOxfordshireCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/SouthRibbleCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/SouthStaffordshireDistrictCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/SouthTynesideCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/SouthwarkCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/StAlbansCityAndDistrictCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/StHelensBC.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/StaffordBoroughCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/StaffordshireMoorlandsDistrictCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/StevenageBoroughCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/StockportBoroughCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/StocktonOnTeesCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/StokeOnTrentCityCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/StratfordUponAvonCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/StroudDistrictCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/SunderlandCityCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/SwaleBoroughCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/SwanseaCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/SwindonBoroughCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/TamesideMBCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/TandridgeDistrictCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/TeignbridgeCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/TelfordAndWrekinCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/TendringDistrictCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/TestValleyBoroughCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/ThanetDistrictCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/ThreeRiversDistrictCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/ThurrockCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/TonbridgeAndMallingBC.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/TorbayCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/TorridgeDistrictCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/TunbridgeWellsCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/UttlesfordDistrictCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/ValeofGlamorganCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/ValeofWhiteHorseCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/WakefieldCityCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/WalsallCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/WalthamForest.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/WarringtonBoroughCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/WarwickDistrictCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/WatfordBoroughCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/WaverleyBoroughCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/WealdenDistrictCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/WelhatCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/WestBerkshireCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/WestLancashireBoroughCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/WestLindseyDistrictCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/WestLothianCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/WestMorlandAndFurness.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/WestNorthamptonshireCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/WestOxfordshireDistrictCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/WestSuffolkCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/WiganBoroughCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/WiltshireCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/WinchesterCityCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/WindsorAndMaidenheadCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/WirralCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/WokingBoroughCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/WokinghamBoroughCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/WolverhamptonCityCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/WorcesterCityCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/WychavonDistrictCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/WyreCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/WyreForestDistrictCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/YorkCouncil.py delete mode 100644 uk_bin_collection/uk_bin_collection/councils/council_class_template/councilclasstemplate.py delete mode 100644 uk_bin_collection/uk_bin_collection/create_new_council.py delete mode 100644 uk_bin_collection/uk_bin_collection/get_bin_data.py delete mode 100644 uk_bin_collection_api_server/Dockerfile delete mode 100644 uk_bin_collection_api_server/docker-compose.yml delete mode 100644 uk_bin_collection_api_server/requirements.txt delete mode 100644 uk_bin_collection_api_server/server.py delete mode 100644 uk_bin_collection_api_server/swagger.yaml delete mode 100644 wiki/Councils.md delete mode 100644 wiki/Home.md delete mode 100644 wiki/Setup.md delete mode 100644 wiki/generate_wiki.py diff --git a/.github/workflows/behave.yml b/.github/workflows/behave.yml index 289160c74f..8a345a6a2e 100644 --- a/.github/workflows/behave.yml +++ b/.github/workflows/behave.yml @@ -164,117 +164,61 @@ jobs: steps: - uses: actions/checkout@v4 - # Fetch existing gh-pages to preserve history if on master branch and it's a push or schedule - - name: Get Existing History - if: github.ref == 'refs/heads/master' && (github.event_name == 'push' || github.event_name == 'schedule') + # Fetch Allure history only for the full report + - name: Get Allure history - Full Report + if: github.event_name == 'schedule' || github.event_name == 'push' uses: actions/checkout@v4 + continue-on-error: true with: ref: gh-pages - path: allure-history - continue-on-error: true - - # If PR doesn't need existing history, we can skip fetching. If you do want partial history across PRs, - # you could add another condition for pull_request here. + path: gh-pages/allure-full-history - # Generate Full Run Allure Reports (push/schedule on master) + # Generate Allure Reports (Full) - name: Allure report action for Full Run - if: (github.event_name == 'push' || github.event_name == 'schedule') && github.ref == 'refs/heads/master' uses: simple-elf/allure-report-action@master + if: github.event_name == 'schedule' || github.event_name == 'push' with: allure_results: build/${{ matrix.python-version }}/allure-results - allure_history: allure-history/full + subfolder: ${{ matrix.python-version }} + allure_history: gh-pages/allure-full-history keep_reports: 20 - # Generate Partial Run Allure Reports (pull_request) - - name: Allure report action for Partial Run - if: github.event_name == 'pull_request' - uses: simple-elf/allure-report-action@master - with: - allure_results: build/${{ matrix.python-version }}/allure-results - allure_history: allure-history/partial - keep_reports: 20 - - # Archive Full Reports + # Archive the Full Report - name: Tar full report - if: (github.event_name == 'push' || github.event_name == 'schedule') && github.ref == 'refs/heads/master' - run: tar -cvf allure_full_history_${{ matrix.python-version }}.tar allure-history/full/${{ matrix.python-version }} - - # Archive Partial Reports - - name: Tar partial report - if: github.event_name == 'pull_request' - run: tar -cvf allure_partial_history_${{ matrix.python-version }}.tar allure-history/partial/${{ matrix.python-version }} + if: github.event_name == 'schedule' || github.event_name == 'push' + run: tar -cvf allure_full_history_${{ matrix.python-version }}.tar gh-pages/allure-full-history/${{ matrix.python-version }} - # Upload artifacts + # Upload the Full Report artifact - name: Upload artifact for Full Report - if: (github.event_name == 'push' || github.event_name == 'schedule') && github.ref == 'refs/heads/master' uses: actions/upload-artifact@v4 + if: github.event_name == 'schedule' || github.event_name == 'push' with: name: allure_full_history_${{ matrix.python-version }} path: allure_full_history_${{ matrix.python-version }}.tar - - name: Upload artifact for Partial Report - if: github.event_name == 'pull_request' - uses: actions/upload-artifact@v4 - with: - name: allure_partial_history_${{ matrix.python-version }} - path: allure_partial_history_${{ matrix.python-version }}.tar - deploy: name: Deploy Reports runs-on: ubuntu-latest needs: report if: github.ref == 'refs/heads/master' steps: - # Download Full Artifacts (push/schedule) + # Download Full Artifacts - uses: actions/download-artifact@v4 name: Download Full Artifacts - if: github.event_name == 'push' || github.event_name == 'schedule' with: name: allure_full_history_3.12 path: allure-history/tars/full - # Download Partial Artifacts (pull_request) - - uses: actions/download-artifact@v4 - name: Download Partial Artifacts - if: github.event_name == 'pull_request' - with: - name: allure_partial_history_3.12 - path: allure-history/tars/partial - - # Create directories if needed - - name: Create Full / Partial dirs - run: | - mkdir -p allure-history/full allure-history/partial - - # Untar Full Reports - name: Untar Full Reports - if: github.event_name == 'push' || github.event_name == 'schedule' - run: | - shopt -s nullglob - for i in allure-history/tars/full/*.tar; do tar -xvf "$i" -C allure-history/full; done - - # Untar Partial Reports - - name: Untar Partial Reports - if: github.event_name == 'pull_request' - run: | - shopt -s nullglob - for i in allure-history/tars/partial/*.tar; do tar -xvf "$i" -C allure-history/partial; done + run: for i in allure-history/tars/full/*.tar; do tar -xvf "$i" -C allure-history/full; done - # Remove Tar Reports - name: Remove Tar Reports run: rm -rf allure-history/tars - # Ensure not empty - - name: Ensure not empty - run: | - if [ ! "$(ls -A allure-history)" ]; then - touch allure-history/.placeholder - fi - - # Deploy everything to gh-pages - - name: Deploy + # Deploy the Full Report to GH Pages + - name: Deploy Full Report uses: peaceiris/actions-gh-pages@v4 with: personal_token: ${{ secrets.GITHUB_TOKEN }} publish_branch: gh-pages - publish_dir: allure-history + publish_dir: allure-history/full diff --git a/CHANGELOG.md b/CHANGELOG.md deleted file mode 100644 index 5bc0593b37..0000000000 --- a/CHANGELOG.md +++ /dev/null @@ -1,2011 +0,0 @@ -======= -## 0.124.1 (2025-01-03) - -### Fix - -- avoid crashing on unexpected string value - -## 0.124.0 (2025-01-02) - -### Feat - -- Hart District Council - -## 0.123.2 (2024-12-19) - -### Fix - -- Update behave.yml - -## 0.123.1 (2024-12-18) - -### Fix - -- Update AberdeenCityCouncil.py -- Update behave.yml - -## 0.123.0 (2024-12-17) - -## 0.122.0 (2024-12-04) - -### Feat - -- Adding Monmouthshire County Council -- Adding Hinckley and Bosworth Borough Council - -### Fix - -- Glasgow City Council -- Merton Council -- Blaby District Council -- Warwick District Council -- Blackburn Council -- Carmarthenshire County Council -- High Peak Council -- CarmarthenshireCountyCouncil - -## 0.121.1 (2024-12-03) - -### Fix - -- London Borough of Lewisham to have more reliable parsing of dates - -## 0.121.0 (2024-11-24) - -### Feat - -- Royal Borough of Greenwich -- Adding London Borough of Lewisham -- Adding Hackney Council -- Adding Sandwell Borough Council -- Adding Moray Council -- Adding Kings Lynn and West Norfolk Borough Council -- Adding Wyre Forest District Council -- Adding Folkstone and Hythe District Council -- Adding Cheltenham Borough Council -- Adding Thurrock Council - -### Fix - -- West Northamptonshire Council -- East Ayrshire Council -- Cotswold District Council - -## 0.120.0 (2024-11-20) - -### Feat - -- Adding Hartlepool Borough Council -- Adding Newcastle Under Lyme Council -- Adding London Borough of Havering -- Add Garden collection to EnvironmentFirst -- Adding Cumberland Council (Allerdale District) -- Adding North Hertfordshire District Council - -### Fix - -- #844 -- #778 -- #769 -- #1025 -- Mid Siffolk and Babergh Garden Collection Day -- #1026 -This will require the use of a DAY to be added to the UPRN field -- #1029 -- #1028 - -## 0.119.0 (2024-11-20) - -### Feat - -- Adding Braintree District Council -- Adding Burnley Borough Council -- Adding Exeter City Council -- Adding Edinburgh City Council -- Adding Aberdeen City Council - -### Fix - -- #699 -- #1015 -- #1017 -- #894 -- #1019 - -## 0.118.0 (2024-11-15) - -### Feat - -- Adding Wolverhampton City Council -- Adding Stevenage Borough Council -- Adding Thanet District Council -- Adding Copeland Borough Council -- Adding South Hams District Council -- Adding Rother District Council - -### Fix - -- #966 -- #989 -- #1004 -- #1006 -- #1008 -- Rother District Council -- #1009 -- CrawleyBoroughCouncil -- #1005 -- Adding Garden collection to Babergh and MidSuffolk Council -- #995 - -## 0.117.0 (2024-11-13) - -### Feat - -- Adding South Staffordshire District Council fix: #885 - -## 0.116.0 (2024-11-12) - -### Feat - -- Adding Ashfield District Council -- Adding Gravesham Borough Council -- Adding Argyll and Bute Council - -### Fix - -- #579 -- #991 -- #692 -- CheshireWestAndChesterCouncil -- #993 -- Milton Keynes -- #702 -- Adding Babergh and Mid Suffolk District Councils -- #868 -fix: #919 -- Adding Derby City Council -- #987 - -## 0.115.0 (2024-11-11) - -### Feat - -- Adding Warrington Borough Council -- Adding Antrim And Newtonabbey Council -- Adding Hertsmere Borough Council -- Adding West Lancashire Borough Council -- Broxbourne Council - -### Fix - -- #695 -- #969 -- #776 -- #980 -- #982 -- Bradford MDC -- #984 - -## 0.114.6 (2024-11-09) - -### Fix - -- NBBC Date Fix - -## 0.114.5 (2024-11-08) - -### Fix - -- migration logging and debugging - -## 0.114.4 (2024-11-08) - -### Fix - -- migration not working -- migration not working - -## 0.114.3 (2024-11-08) - -### Fix - -- fix manifest in custom component - -## 0.114.2 (2024-11-08) - -### Fix - -- #975 adding routine to handle migration error -- #975 adding routine to handle migration error - -## 0.114.1 (2024-11-08) - -### Fix - -- Update manifest.json - -## 0.114.0 (2024-11-07) - -### Feat - -- Nuneaton and Bedworth Borough Council - -## 0.113.0 (2024-11-07) - -### Feat - -- adding calendar for Bins in Custom Component - -## 0.112.1 (2024-11-07) - -### Fix - -- #767 BREAKING CHANGE - READD your sensors / config - -## 0.112.0 (2024-11-06) - -### Feat - -- pytest fixes -- pytest fixes -- pytest fixes -- pytest fixes -- pytest fixes -- Adding Powys Council -- Adding Worcester City Council -- Adding Ards and North Down Council -- Adding East Herts Council -- Adding Ashford Borough Council -- Adding Stockton On Tees Council -- Adding Fife Council -- Adding Flintshire County Council -- Adding Teignbridge Council -- Adding Harborough District Council -- Adding Watford Borough Council -- Adding Coventry City Council - -### Fix - -- CC testing and add Chesterfield -- CC testing and add Chesterfield -- CC testing and add Chesterfield -- pytest-homeassistant-custom-component -- Pydandic version -- Pydandic version -- WestOxfordshireDistrictCouncil -- South Norfolk Council -- ForestOfDeanDistrictCouncil -- Croydon Council -- South Kesteven District Council -- #647 -- #630 -- #623 -- #586 -- #578 -- #389 -- #930 -- #933 -- #750 -- Refactor Midlothian Council scraper to use house number and postcode -- West Berkshire Council -- Southwark Council -- #580 -- #888 -- #902 -- #607 - -## 0.111.0 (2024-11-06) - -### Fix - -- Add London Borough of Sutton -- #944 -- Add Mid Devon Council -- #945 -- Adding Oxford City Council -- #962 -- Tunbridge Wells / Lincoln -- #963 -- Glasgow City Council - -## 0.110.0 (2024-11-04) - -### Fix - -- Adding Blaby District Council -- #904 -- Adding Sefton Council -- #770 -- Adding Bromsgrove District Council -- #893 -- East Lindsey District Council -- #957 -- Adding Carmarthenshire County Council -- #892 -fix: #710 -- Adding East Ayrshire Council -- #955 - -## 0.109.2 (2024-11-03) - -### Fix - -- CC testing and add Chesterfield - -## 0.109.1 (2024-11-03) - -### Fix - -- CC testing and add Chesterfield -- CC testing and add Chesterfield - -## 0.109.0 (2024-11-02) - -### Feat - -- Adding Cotswold District Council -- Adding Breckland Council - -### Fix - -- St Helens Borough Council -- #753 -- NewarkAndSherwoodDC -- #941 -- #658 -- #656 - -## 0.108.2 (2024-11-01) - -### Fix - -- pytest-homeassistant-custom-component - -## 0.108.1 (2024-11-01) - -### Fix - -- Pydandic version -- Pydandic version - -## 0.108.0 (2024-11-01) - -### Feat - -- pytest fixes -- pytest fixes -- pytest fixes -- pytest fixes -- pytest fixes -- pytest fixes -- Python 3.12 only and CustomComp. Unit testing - -## 0.107.0 (2024-10-31) - -### Feat - -- Adding Powys Council -- Adding Worcester City Council -- Adding Ards and North Down Council -- Adding East Herts Council -- Adding Ashford Borough Council - -### Fix - -- WestOxfordshireDistrictCouncil -- South Norfolk Council -- ForestOfDeanDistrictCouncil -- Croydon Council -- South Kesteven District Council -- #647 -- #630 -- #623 -- #586 -- #578 -- #389 - -## 0.106.0 (2024-10-28) - -### Feat - -- Adding Stockton On Tees Council -- Adding Fife Council -- Adding Flintshire County Council - -### Fix - -- #930 -- #933 -- #750 -- West Berkshire Council -- Southwark Council - -## 0.105.1 (2024-10-24) - -### Fix - -- Refactor Midlothian Council scraper to use house number and postcode - -## 0.105.0 (2024-10-21) - -### Feat - -- Adding Teignbridge Council -- Adding Harborough District Council -- Adding Watford Borough Council -- Adding Coventry City Council - -### Fix - -- #580 -- #888 -- #902 -- #607 - -## 0.104.0 (2024-10-20) - -### Feat - -- Adding Luton Borough Council -- Adding West Oxfordshire District Council -- Adding Aberdeenshire Council -- Adding Canterbury City Council -- Adding Swindon Borough Council - -### Fix - -- #697 -- #694 -- #659 -- #590 -- #900 - -## 0.103.0 (2024-10-20) - -### Feat - -- Adding RAW JSON Sensor - -### Fix - -- Black formatting -- Black formatting - -## 0.102.0 (2024-10-20) - -### Feat - -- Moving from Attributes to Sensors -- Moving from Attributes to Sensors - -## 0.101.0 (2024-10-20) - -### Feat - -- Add Midlothgian Council - -## 0.100.0 (2024-10-18) - -### Feat - -- Adding Dudley Council -- Adding South Ribble Council -- Plymouth Council -- Adding Norwich City Council - -### Fix - -- #744 -- #671 -- #566 -- #749 - -## 0.99.1 (2024-10-16) - -### Fix - -- #792 adding web_driver option to Wokingham Council - -## 0.99.0 (2024-10-16) - -### Feat - -- Adding Lincoln Council -- Adding Tunbridge Wells Council -- Adding Perth and Kinross Council - -### Fix - -- Update wiki -- #748 -- #598 -- #572 - -## 0.98.5 (2024-10-15) - -### Fix - -- Swale Borough Council -- HaltonBoroughCouncil -- Barnet Council -- WestBerkshireCouncil - -## 0.98.4 (2024-10-14) - -### Fix - -- West Suffolk Council -- Vale of White Horse Council -- Uttlesford District Council -- Neath Port Talbot Council -- Merton Council -- Manchester City Council -- Glasgow City Council -- BradfordMDC - -## 0.98.3 (2024-10-13) - -### Fix - -- EastRiding - -## 0.98.2 (2024-10-13) - -### Fix - -- MoleValley - -## 0.98.1 (2024-10-13) - -### Fix - -- Barnet and Bexley - -## 0.98.0 (2024-10-13) - -### Feat - -- Adding Wirral Council -- Adding Lichfield District Council -- Adding West Morland And Furness -- Adding Walsall Council -- Adding Armagh, Banbridge and Craigavon Council - -### Fix - -- #602 -- #830 -- #870 -- #873 -- #877 - -## 0.97.1 (2024-10-10) - -### Fix - -- NottinghamCityCouncil -- #875 - -## 0.97.0 (2024-10-10) - -### Feat - -- Adding Falkirk Council -- Adding London Borough Harrow -- Adding North Ayrshire Council - -### Fix - -- #761 -- #871 -- #869 - -## 0.96.0 (2024-10-10) - -### Feat - -- Adding Highland Council -- Add Elmbridge Borough Council -- Adding Southwark Council -- South Derbyshire District Council - -### Fix - -- #780 -- #845 -fix: #754 -- #835 -- #842 - -## 0.95.0 (2024-10-09) - -### Feat - -- Adding London Borough of Ealing - -## 0.94.0 (2024-10-09) - -### Feat - -- Adding London Borough of Lambeth -- Adding Dacorum Borough Council - -### Fix - -- Dacorum Borough Council -- East Devon DC - -## 0.93.0 (2024-10-08) - -### Feat - -- Update CheshireEastCouncil.py - -## 0.92.0 (2024-10-08) - -### Feat - -- Update CheshireEastCouncil.py -- Update README.md -- Adding Wokingham Borough Council -- Adding Winchester City Council -- Adding Basildon Council -- Adding Colchester City Council - -### Fix - -- RochfordCouncil -- Neath Port Talbot Council -- Buckinghamshire Council -- #639 -fix: #812 - -## 0.91.2 (2024-10-05) - -### Fix - -- Windsor and Maidenhead Council - -## 0.91.1 (2024-10-04) - -### Fix - -- Update DorsetCouncil.py -- #829 -- Update GatesheadCouncil.py -- #822 - -## 0.91.0 (2024-10-03) - -### Feat - -- Adding East Renfrewshire Council - -## 0.90.0 (2024-10-03) - -## 0.89.1 (2024-10-02) - -### Fix - -- High Peak have changed their cookie dialog Seems to be safe to ignore it now. - -## 0.89.0 (2024-09-27) - -### Feat - -- Update CheshireEastCouncil.py -- Update README.md - -### Fix - -- release to be non pre release - -## 0.88.0 (2024-09-16) - -### Feat - -- Add Ealing Council - -### Fix - -- Update README.md - -## 0.87.0 (2024-09-10) - -### Feat - -- Add IslingtonCouncil - -## 0.86.2 (2024-09-09) - -### Fix - -- #565 Gloucester city council driver - -## 0.86.1 (2024-09-09) - -### Fix - -- #773 Wakefield - -## 0.86.0 (2024-09-06) - -### Feat - -- added Rotherham Council - -## 0.85.7 (2024-09-05) - -### Fix - -- more unit tests -- more unit tests -- Chorley - -## 0.85.6 (2024-09-03) - -### Fix - -- #795 and add reconfigure to custom comp. - -## 0.85.5 (2024-09-03) - -### Fix - -- #795 and add reconfigure to custom comp. - -## 0.85.4 (2024-09-03) - -### Fix - -- #795 Unit Test Coverage - -## 0.85.3 (2024-09-02) - -### Fix - -- #795 unit test coverage - -## 0.85.2 (2024-09-02) - -### Fix - -- 791 Glasgow URL change - -## 0.85.1 (2024-09-02) - -### Fix - -- 779 Add correct async wait to Home Assistant - -## 0.85.0 (2024-08-27) - -### Feat - -- support for enfield council - -## 0.84.2 (2024-08-27) - -### Fix - -- Re-work North Tyneside Council module for 2024 - some addresses do not have a garden collection -- Re-work North Tyneside Council module for 2024 - -## 0.84.1 (2024-08-08) - -### Fix - -- #771 Bolton bullet points on dates is now fixed - -## 0.84.0 (2024-07-31) - -## 0.83.0 (2024-07-07) - -### Feat - -- add has_numbers() function - -### Fix - -- update Gedling Borough Council parser to use alternative name key -- change Gedling to use new JSON data -- update instructions for Gedling - -## 0.82.1 (2024-06-28) - -### Fix - -- update input.json to use UPRN parameter -- change DorsetCouncil.py to use API links provided in #756 -- explicit import of logging.config to stop error in Python 3.11 - -## 0.82.0 (2024-06-13) - -### Feat - -- adding dev container updates -- adding dev container updates -- refactoring main files -- adding ability to set local mode in HA custom comp. if users dont have a Selenium Server - -### Fix - -- MidSussex - -## 0.81.0 (2024-06-05) - -### Feat - -- Adding Wychavon District Council - -### Fix - -- IntTestWarnings -- IntTestWarnings - -## 0.80.0 (2024-06-02) - -### Feat - -- Adding Uttlesford District Council -- Adding Stafford Boro Council -- Adding Swansea Council -- Adding New Forest -- Adding Three Rivers -- Adding Three Rivers - -### Fix - -- ThreeRivers -- #425 Entities are not updated -- sessions to avoid deprecation -- Update docker-image.yml -- Update docker-image.yml - -## 0.79.1 (2024-05-29) - -### Fix - -- Change CSS class in search for collection types - -## 0.79.0 (2024-05-28) - -### Feat - -- Adding Dartford -- Adding South Kesteven District Council -- Adding ChichesterCouncil -- adding HounslowCouncil -- adding HounslowCouncil -- adding HounslowCouncil -- Epping Fix -- Adding Epping Forest District Council -- Update input.json -- Epping Forest District Council -- Adding Stroud District Council -- Add support for Tendring District Council -- #269 Adding Waltham Forest -- #269 Adding Waltham Forest -- Adding council creation script - -### Fix - -- Update Mole Valley URL - -## 0.78.0 (2024-05-26) - -### Feat - -- Add support for Fareham Borough Council - -## 0.77.0 (2024-05-26) - -### Feat - -- Add support for Bracknell Forest Council - -## 0.76.1 (2024-05-24) - -### Fix - -- Handle Barnet council cookies message - -## 0.76.0 (2024-05-24) - -### Feat - -- add bin colour support WestSuffolkCouncil style: black format WestSuffolkCouncil -- add bin colour support WestSuffolkCouncil style: black format WestSuffolkCouncil - -## 0.75.0 (2024-05-19) - -### Feat - -- #725 Add names to selenium test videos using "se:name" option in create webdriver function - -## 0.74.1 (2024-05-18) - -### Fix - -- #693 Cheshire West & Chester Council Sensor Bug -- #693 Cheshire West & Chester Council Sensor Bug - -## 0.74.0 (2024-05-17) - -### Feat - -- #722 Support Python 3.12 -- #722 Support Python 3.12 -- #722 Support Python 3.12 - -## 0.73.0 (2024-05-17) - -### Feat - -- #708 Adding HA to the dev container for debugging - -## 0.72.0 (2024-05-17) - -### Feat - -- #708 Adding HA to the dev container for debugging -- #708 Adding HA to the dev container for debugging -- #708 Adding HA to the dev container for debugging -- #708 Adding HA to the dev container for debugging -- #708 Adding HA to the dev container for debugging -- #708 Adding HA to the dev container for debugging -- #708 Adding HA to the dev container for debugging -- #708 Adding HA to the dev container for debugging - -## 0.71.0 (2024-05-17) - -### Feat - -- Update for West Suffolk Councils new website - -## 0.70.0 (2024-05-17) - -### Feat - -- #708 Dev Container -- Dev Container -- #708 Dev Container -- #708 Dev Container -- #708 Dev Container -- #708 Dev Container -- #708 Dev Container -- #708 Dev Container -- #708 Dev Container -- #708 Dev Container -- #708 Dev Container -- #708 Dev Container -- #708 simplifying Selenium integration tests -- #708 simplifying Selenium integration tests -- #708 Test GH action seenium -- #708 Test GH action seenium -- #708 Test GH action seenium -- #708 Test GH action seenium -- #708 Test GH action seenium -- #708 Test GH action seenium -- #708 Test GH action seenium -- #708 Test GH action seenium -- #708 Test GH action seenium -- #708 Test GH action seenium -- #708 Dev Container testing -- #708 - dev container changes -- #706 Adding Dev Container -- #706 Adding initial Dev Container - -## 0.69.7 (2024-05-17) - -### Fix - -- #713 BarnsleyMBCouncil.py - -## 0.69.6 (2024-05-16) - -### Fix - -- #709 Update DoverDistrictCouncil.py - -## 0.69.5 (2024-05-14) - -### Fix - -- #696 Small issue and Black formatting -- #696 Small issue and Black formatting -- #696 Small issue and Black formatting -- #696 Small issue and Black formatting -- #696 Small issue and Black formatting -- #696 Small issue and Black formatting -- #696 Small issue and Black formatting -- #696 Small issue and Black formatting -- #696 test coverage back to 100% - -## 0.69.4 (2024-05-09) - -### Fix - -- pass in required parameter into `create_webdriver` -- test runners for `MiltonKeynesCityCouncil` and `NorthEastLincs`. - -## 0.69.3 (2024-05-09) - -### Fix - -- fix AttributeError when no garden waste collection is available for properties using Huntingdon District Council -- add support for parsing "Today" / "Tomorrow" as date text for `BarnsleyMBCouncil` -- add support for parsing "Tomorrow" as date text for `LiverpoolCityCouncil` - -## 0.69.1 (2024-05-01) - -### Fix - -- Handling the "Website cookies enhance your user experience." button -- Handling the "Website cookies enhance your user experience." button - -## 0.69.0 (2024-04-28) - -### Feat - -- Adding Renfrewshire Council -- Adding Renfrewshire Council - -## 0.68.2 (2024-04-28) - -### Fix - -- Remove 'import Dumper' - -## 0.68.1 (2024-04-27) - -### Fix - -- input.json Bradford missing comma - -## 0.68.0 (2024-04-27) - -### Feat - -- Add support for West Berkshire Council -- add support for Knowsley Metropolitan Borough Council -- add support for Cheshire West and Chester Council -- add support for Cheshire West and Chester Council - -## 0.66.2 (2024-04-18) - -### Fix - -- Update HaringeyCouncil.py issue #670 - -## 0.66.1 (2024-04-15) - -### Fix - -- parse datetimes correctly and round to midnight - -## 0.66.0 (2024-04-15) - -## 0.65.2 (2024-04-15) - -### Fix - -- change address selection to fix errors selecting the user's PAON - -## 0.65.1 (2024-04-15) - -### Fix - -- add check for parsed string length to stop datetime parsing error - -## 0.65.0 (2024-04-13) - -### Feat - -- add Arun council -- add support for Sunderland City Council -- add support for Sunderland City Council - -## 0.64.3 (2024-03-25) - -### Fix - -- sort data and correct dictionary name (#609) - -## 0.64.2 (2024-03-24) - -## 0.64.1 (2024-03-24) - -### Fix - -- fix Kirklees address search (switch to house & postcode) -- fixes json - -## 0.64.0 (2024-03-23) - -### Feat - -- add Kirklees council - -### Fix - -- fixes json - -## 0.63.0 (2024-03-23) - -### Feat - -- Add Solihull Council (#513) -- Add Adur and Worthing Councils (#544) -- Add Dover District Council (#614) -- Add Rochford Council (#620) -- Add Tandridge District Council (#621) -- Add West Northamptonshire Council (#567) -- Add Hull City Council (#622) -- Add Wyre Council (#625) -- Add Telford and Wrekin Co-operative Council (#632) -- Add Mansfield District Council (#560) -- Add Bedford Borough Council (#552) - -### Fix - -- spacing on input.json -- realign input.json -- capitalize bin type text -- formatting on input.json -- incorrect collections -- update testing URL for Merton -- attempt to resolve invisible banner hiding postcode box -- resolve JSON schema exception for date formatting -- resolve JSON schema exception for date formatting -- accept cookies banner - -## 0.62.0 (2024-03-03) - -### Fix - -- Added missing .feature file entry to the test config for NewhamCouncil - -## 0.61.1 (2024-02-16) - -### Fix - -- code optimisations -- Fix date parsing in WestLindseyDistrictCouncil.py - -## 0.61.0 (2024-02-11) - -### Feat - -- Add Mole Valley District Council - -## 0.60.1 (2024-02-03) - -### Fix - -- Update input.json Closes #599 - -## 0.60.0 (2024-01-28) - -### Feat - -- Add Scraper for St Albans City and District Council - -## 0.59.1 (2024-01-25) - -### Fix - -- add wiki note for castlepoint -- update test data for castlepoint -- remove single line causing issues - -## 0.59.0 (2024-01-20) - -### Feat - -- Add NorthYorkshire to test feature file -- Add north yorkshire to test input -- Add Support for north yorkshire council - -### Fix - -- remove unused code - -## 0.58.8 (2024-01-19) - -### Fix - -- barnet no overrides - -## 0.58.7 (2024-01-18) - -### Fix - -- accidentally returned strings when needed date objects, refactor to handle this -- checking for future/past dates - -## 0.58.6 (2024-01-18) - -### Fix - -- correct date handling for North West Leicestershire - -## 0.58.5 (2024-01-15) - -### Fix - -- Don't call driver.quit where already handled by finally block - -## 0.58.4 (2024-01-15) - -### Fix - -- remove extra driver.quit to prevent errors - -## 0.58.3 (2024-01-15) - -### Feat - -- Added support for Newham Council's bin collections - -### Fix - -- Add a default value for user_agent to fix all councils using selenium and not specifying agent - -## 0.58.2 (2024-01-11) - -### Fix - -- use static values for bin types - -## 0.58.1 (2024-01-10) - -### Fix - -- Eastleigh Borough Council doesnt cope with "You haven't yet signed up for ..." -- Eastleigh Borough Council doesnt cope when Garden Waste service hasn't been signed up for, which gets the value "You haven't yet signed up for our garden waste collections. Find out more about our\xa0garden waste collection service" which results in ValueError: time data - -## 0.58.0 (2024-01-10) - -### Feat - -- Add Test Valley Borough Council - -## 0.57.0 (2024-01-09) - -### Feat - -- Add support for Chorley Council - -## 0.56.13 (2024-01-09) - -### Fix - -- update logic to account for council website change - -## 0.56.12 (2024-01-09) - -### Fix - -- duplicate driver.quit() calls causes error - -## 0.56.11 (2024-01-08) - -### Fix - -- Headless now working on custom comp Update sensor.py - -## 0.56.10 (2024-01-08) - -### Fix - -- headless mode in custom component - -## 0.56.9 (2024-01-08) - -### Fix - -- headless mode - -## 0.56.8 (2024-01-08) - -### Fix - -- headless in custom comp - -## 0.56.7 (2024-01-08) - -### Fix - -- headless options - -## 0.56.6 (2024-01-07) - -### Fix - -- modified Kingston-upon-Thames driver for greater reliability. - -## 0.56.5 (2024-01-07) - -### Fix - -- Update KingstonUponThamesCouncil.py - -## 0.56.4 (2024-01-07) - -### Fix - -- Update KingstonUponThamesCouncil.py - -## 0.56.3 (2024-01-07) - -### Fix - -- headless options -- #542 - Selenium Grid Sessions must be terminated cleanly -- #542 - Selenium Grid Sessions must be terminated cleanly - -## 0.56.2 (2024-01-07) - -### Fix - -- Update strings.json -- Update en.json -- Update config_flow.py - -## 0.56.1 (2024-01-07) - -### Fix - -- Update common.py - -## 0.56.0 (2024-01-07) - -### Feat - -- Update strings.json -- Update en.json -- Update config_flow.py -- adding headless control -- adding headless control -- adding headless control - -## 0.55.3 (2024-01-05) - -### Fix - -- Update lint.yml - -## 0.55.2 (2024-01-05) - -### Fix - -- Chelmsford - -## 0.55.1 (2024-01-05) - -### Fix - -- Update ChelmsfordCityCouncil.py -- Update ChelmsfordCityCouncil.py -- Update ChelmsfordCityCouncil.py - -## 0.55.0 (2024-01-05) - -### Feat - -- Update codeql-analysis.yml -- Update behave.yml -- Update CONTRIBUTING.md -- Update behave.yml -- Update behave.yml -- Update ConwyCountyBorough.py -- Update behave.yml -- Update CheshireEastCouncil.py -- Update behave.yml -- Update behave.yml -- Update behave.yml -- Update Makefile -- Update Makefile -- Update behave.yml -- Update Makefile -- Update validate_council_outputs.feature - -## 0.54.0 (2024-01-04) - -### Feat - -- Barnet seasonal overrides - -## 0.53.2 (2024-01-04) - -### Fix - -- barnet (again) - -## 0.53.1 (2024-01-04) - -### Fix - -- barnet - -## 0.53.0 (2024-01-04) - -### Feat - -- barnet council - -## 0.52.0 (2024-01-04) - -### Feat - -- #525 Adding API Server and Docker build -- #525 Adding API Server and Docker build - -## 0.51.0 (2024-01-04) - -### Feat - -- #522 Adding Nottingham City Council - -## 0.50.1 (2024-01-03) - -### Fix - -- don't ask for URL for Vale of White Horse Council - -## 0.50.0 (2024-01-03) - -### Feat - -- add Vale of White Horse District Council - -### Fix - -- account for additional string on exceptional schedule - -## 0.49.1 (2024-01-01) - -### Fix - -- Torbay - -## 0.49.0 (2024-01-01) - -### Feat - -- add South Gloucestershire Council - -## 0.48.3 (2024-01-01) - -### Fix - -- manifest.json - -## 0.48.2 (2024-01-01) - -### Fix - -- manifest.json to remove depricated attribute - -## 0.48.1 (2024-01-01) - -### Fix - -- Hacs Validation Pipeline - -## 0.48.0 (2024-01-01) - -### Feat - -- Adding HACS Validation - -## 0.47.0 (2024-01-01) - -### Feat - -- Add hassfest validation.yml - -## 0.46.1 (2023-12-31) - -### Fix - -- Black formatting -- Fix GuildfordCouncil - -## 0.46.0 (2023-12-31) - -### Feat - -- Adding Brighton and Hove City Council -- Adding Brighton and Hove City Council -- Adding Brighton and Hove City Council -- Adding Brighton and Hove City Council -- Adding London Borough Redbridge -- London Borough Redbridge -- Adding LondonBoroughRedbridge 431 - -### Fix - -- chelmsford #407 - -## 0.45.0 (2023-12-29) - -### Feat - -- Add Haringey Council. - -## 0.44.2 (2023-12-29) - -### Fix - -- #509 Wiltshire Update input.json - -## 0.44.1 (2023-12-28) - -### Fix - -- Bexley -- CharnwoodBoroughCouncil - -## 0.44.0 (2023-12-27) - -### Feat - -- Adding support for Gedling Borough Council - -## 0.43.0 (2023-12-25) - -### Feat - -- add Newport City Council - -## 0.42.1 (2023-12-24) - -### Feat - -- Initial Test Commit for Gedling Borough Council - -### Fix - -- CastlepointDistrictCouncil -- 191_fixingbroken_councils -- 191_fixingbroken_councils -- 191_fixingbroken_councils - -## 0.42.0 (2023-12-19) - -### Feat - -- Adding West Lindsey District Council -- Adding West Lindsey District Council - -## 0.41.5 (2023-12-18) - -### Fix - -- #191 Preston City Council - -## 0.41.4 (2023-12-17) - -### Fix - -- #493 Update input.json - -## 0.41.3 (2023-12-17) - -### Fix - -- #27 East Riding - -## 0.41.2 (2023-12-17) - -### Fix - -- #493 Leeds issues - -## 0.41.1 (2023-12-17) - -### Fix - -- Add in URL override for wiki -- Update RushmoorCouncil.py to use new URL - -## 0.41.0 (2023-12-16) - -### Feat - -- #264 Adding Oldham -- #250 Adding Halton Borough Council -- #244 Adding Portsmouth City Council - -### Fix - -- #141 Leeds speed up -- #174 / #244 / #204 - -## 0.40.1 (2023-12-16) - -### Fix - -- 488_blackburnfixes - -## 0.40.0 (2023-12-15) - -### Feat - -- adding #204 Forest_of_Dean_District -- adding #204 Forest_of_Dean_District - -## 0.39.0 (2023-12-13) - -### Feat - -- Adding support for Reading Borough Council - -## 0.38.0 (2023-12-12) - -### Feat - -- Add Shropshire Council - -## 0.37.2 (2023-12-08) - -### Fix - -- Issue 394 - change coordinator data from numerical indexed list to dictionary - -## 0.37.1 (2023-12-08) - -### Fix - -- add postcode and uprn for Bedfordshire Council - -## 0.37.0 (2023-12-07) - -### Feat - -- Add BefordshireCouncil scraper - -## 0.36.0 (2023-12-07) - -### Feat - -- adding NorthEastDerbyshireDistrictCouncil - -## 0.35.1 (2023-12-06) - -### Fix - -- move logging config to collect_data script - -## 0.35.0 (2023-12-06) - -### Feat - -- Adding North_West_Leicestershire -- Adding North_West_Leicestershire - -## 0.34.0 (2023-12-05) - -### Feat - -- Add Sevenoaks District Council -- Add Barnsley Metropolitan Borough Council to the feature file -- Add Barnsley Metropolitan Borough Council to input.json -- Add support for Barnsley Council (#444) -- Add Dorset Council to feature file -- Add Dorset Council to input.json -- Add support for Dorset Council -- Add Rugby Borough Council to feature file -- Add Rugby Borough Council to input.json -- Add parser for Rugby Borough Council (#456) - -## 0.32.1 (2023-12-04) - -### Fix - -- Move LiverpoolCityCouncil.py to correct folder - -## 0.32.0 (2023-12-01) - -### Feat - -- Add extra files for Stoke-on-Trent support -- Add support for Stoke-on-Trent (re: #440) - -## 0.31.1 (2023-12-01) - -### Fix - -- change logic to add correct years and support 'Tomorrow' results - -## 0.31.0 (2023-12-01) - -### Feat - -- Add support for Environment First collections (re: #433) -- Add support for Environment First collections (re: #433) -- change parameter name of 'x' to 'step' in get_dates_every_x_days() - -## 0.30.1 (2023-12-01) - -### Fix - -- Increase data update timeout for slower selenium based tests - -## 0.30.0 (2023-11-30) - -### Feat - -- Added WestSuffolkCouncil - -## 0.29.1 (2023-11-29) - -### Fix - -- Fix scraper for Bolton - -## 0.29.0 (2023-11-26) - -### Feat - -- Add Mid and East Antrim -- Add Mid and East Antrim -- Add Mid and East Antrim - -## 0.28.1 (2023-11-22) - -### Fix - -- basingstoke adapt to basingstoke site changes - -## 0.28.0 (2023-11-08) - -### Feat - -- Add support files for Liverpool City Council -- Add additional comments -- Add Liverpool City Council parser - -### Fix - -- change dateutil name - -## 0.27.2 (2023-11-08) - -### Fix - -- Custom component web driver field label - -## 0.27.1 (2023-11-05) - -### Fix - -- 419-fix-selenium-behave-tests - -## 0.27.0 (2023-11-04) - -### Feat - -- Update EastSuffolkCouncil.py -- Change bin_type's to be title() so it reads better -- Driver quit needs to be after last use of driver - -## 0.26.0 (2023-11-03) - -### Feat - -- Add remote Selenium web driver support - -## 0.25.0 (2023-11-03) - -### Feat - -- Update dev mode & remove JSON outputs -- Update dev mode & remove JSON outputs -- Update dev mode & remove JSON outputs - -## 0.24.3 (2023-11-01) - -### Feat - -- Add remote Selenium web driver support -- Add remote Selenium web driver support -- Add remote Selenium web driver support -- Add remote Selenium web driver support - -### Fix - -- Holidays subdivision error - -## 0.24.2 (2023-11-01) - -### Fix - -- #378 update East Northamptionshire to North Northamptonshire - -## 0.24.1 (2023-11-01) - -### Fix - -- 410 Adding more behave logging and hamcrest assertations - -## 0.24.0 (2023-10-31) - -### Feat - -- Replace individual council schema's with a single common one - -## 0.23.2 (2023-10-30) - -### Fix - -- #399 - DeprecationWarning: Python Package holidays - -## 0.23.1 (2023-10-30) - -### Fix - -- unit test coverage - -## 0.23.0 (2023-10-30) - -### Feat - -- Add support for Conwy council - -## 0.22.0 (2023-10-30) - -## 0.21.3 (2023-10-29) - -### Feat - -- Add support for Calderdale Council - -### Fix - -- Home Assistant custom component fix for Selenium based councils -- Home Assistant custom component fix for Selenium based councils -- Fix Chelmsford City Council -- Fix input.json order - -## 0.21.1 (2023-10-24) - -### Fix - -- Fix the incorrect key collectionTime in json output of Salford Council - -## 0.21.0 (2023-10-23) - -### Feat - -- Add support for West Lothian Council -- Add support for East Lindsey District Council -- Add support for Gateshead Council -- Add support for Staffordshire Moorlands District Council - -## 0.20.0 (2023-10-20) - -### Feat - -- Add support for Cannock Chase District Council - -## 0.19.0 (2023-10-19) - -### Feat - -- fix missing comma in test input for eastsuffolkcouncil - -## 0.18.0 (2023-10-19) - -### Feat - -- Add EastSuffolkCouncil support - -## 0.17.0 (2023-10-19) - -### Feat - -- Add support for Bury Council (#265) -- Add support for Bury Council (#265) - -### Fix - -- correctly align input.json - -## 0.16.0 (2023-10-18) - -### Feat - -- Add support for Neath Port Talbot Council - -## 0.15.0 (2023-10-18) - -### Feat - -- StratfordUponAvonCouncil Addition - -## 0.14.0 (2023-10-18) - -### Feat - -- Rename Chilterns to Buckinghamshire Council - -## 0.13.4 (2023-10-16) - -### Fix - -- Update poetry.lock to allow any urllib3 version - -## 0.13.3 (2023-10-15) - -### Fix - -- Remove options flow from home assistant custom component - -## 0.13.2 (2023-10-15) - -### Fix - -- Update poetry.lock - -## 0.13.1 (2023-10-15) - -### Fix - -- Remove first BS4 call to stop page read -- fix ValueError and add in correct year data -- swap Crawley's USRN for house number -- fix date parsing and change BS4 logic - -## 0.13.0 (2023-10-11) - -### Feat - -- Add supporting files for Rhondda Cynon Taff Council - -## 0.12.1 (2023-09-28) - -### Feat - -- Add support for Reigate and Banstead Borough Council - -### Fix - -- Fix for Wakefield City Council custom component support -- Fix for Wakefield City Council custom component support - -## 0.11.0 (2023-09-27) - -### Feat - -- Add support for Bath and North East Somerset Council -- Add support for multiple instances of the custom component - -### Fix - -- Fix Python Semantic Release version -- Fix Wakefield City Council - -## 0.10.1 (2023-09-16) - -## 0.10.0 (2023-09-16) - -## 0.9.0 (2023-07-28) - -## 0.8.0 (2023-07-23) - -## 0.7.0 (2023-07-23) - -## 0.6.0 (2023-07-22) - -## 0.5.0 (2023-07-21) - -## 0.4.0 (2023-07-20) - -## 0.3.0 (2023-07-18) - -## 0.2.0 (2023-07-16) - -## 0.1.0 (2023-07-16) diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md deleted file mode 100644 index 18c9147181..0000000000 --- a/CODE_OF_CONDUCT.md +++ /dev/null @@ -1,128 +0,0 @@ -# Contributor Covenant Code of Conduct - -## Our Pledge - -We as members, contributors, and leaders pledge to make participation in our -community a harassment-free experience for everyone, regardless of age, body -size, visible or invisible disability, ethnicity, sex characteristics, gender -identity and expression, level of experience, education, socio-economic status, -nationality, personal appearance, race, religion, or sexual identity -and orientation. - -We pledge to act and interact in ways that contribute to an open, welcoming, -diverse, inclusive, and healthy community. - -## Our Standards - -Examples of behavior that contributes to a positive environment for our -community include: - -* Demonstrating empathy and kindness toward other people -* Being respectful of differing opinions, viewpoints, and experiences -* Giving and gracefully accepting constructive feedback -* Accepting responsibility and apologizing to those affected by our mistakes, - and learning from the experience -* Focusing on what is best not just for us as individuals, but for the - overall community - -Examples of unacceptable behavior include: - -* The use of sexualized language or imagery, and sexual attention or - advances of any kind -* Trolling, insulting or derogatory comments, and personal or political attacks -* Public or private harassment -* Publishing others' private information, such as a physical or email - address, without their explicit permission -* Other conduct which could reasonably be considered inappropriate in a - professional setting - -## Enforcement Responsibilities - -Community leaders are responsible for clarifying and enforcing our standards of -acceptable behavior and will take appropriate and fair corrective action in -response to any behavior that they deem inappropriate, threatening, offensive, -or harmful. - -Community leaders have the right and responsibility to remove, edit, or reject -comments, commits, code, wiki edits, issues, and other contributions that are -not aligned to this Code of Conduct, and will communicate reasons for moderation -decisions when appropriate. - -## Scope - -This Code of Conduct applies within all community spaces, and also applies when -an individual is officially representing the community in public spaces. -Examples of representing our community include using an official e-mail address, -posting via an official social media account, or acting as an appointed -representative at an online or offline event. - -## Enforcement - -Instances of abusive, harassing, or otherwise unacceptable behavior may be -reported to the community leaders responsible for enforcement at -. -All complaints will be reviewed and investigated promptly and fairly. - -All community leaders are obligated to respect the privacy and security of the -reporter of any incident. - -## Enforcement Guidelines - -Community leaders will follow these Community Impact Guidelines in determining -the consequences for any action they deem in violation of this Code of Conduct: - -### 1. Correction - -**Community Impact**: Use of inappropriate language or other behavior deemed -unprofessional or unwelcome in the community. - -**Consequence**: A private, written warning from community leaders, providing -clarity around the nature of the violation and an explanation of why the -behavior was inappropriate. A public apology may be requested. - -### 2. Warning - -**Community Impact**: A violation through a single incident or series -of actions. - -**Consequence**: A warning with consequences for continued behavior. No -interaction with the people involved, including unsolicited interaction with -those enforcing the Code of Conduct, for a specified period of time. This -includes avoiding interactions in community spaces as well as external channels -like social media. Violating these terms may lead to a temporary or -permanent ban. - -### 3. Temporary Ban - -**Community Impact**: A serious violation of community standards, including -sustained inappropriate behavior. - -**Consequence**: A temporary ban from any sort of interaction or public -communication with the community for a specified period of time. No public or -private interaction with the people involved, including unsolicited interaction -with those enforcing the Code of Conduct, is allowed during this period. -Violating these terms may lead to a permanent ban. - -### 4. Permanent Ban - -**Community Impact**: Demonstrating a pattern of violation of community -standards, including sustained inappropriate behavior, harassment of an -individual, or aggression toward or disparagement of classes of individuals. - -**Consequence**: A permanent ban from any sort of public interaction within -the community. - -## Attribution - -This Code of Conduct is adapted from the [Contributor Covenant][homepage], -version 2.0, available at -https://www.contributor-covenant.org/version/2/0/code_of_conduct.html. - -Community Impact Guidelines were inspired by [Mozilla's code of conduct -enforcement ladder](https://github.com/mozilla/diversity). - -[homepage]: https://www.contributor-covenant.org - -For answers to common questions about this code of conduct, see the FAQ at -https://www.contributor-covenant.org/faq. Translations are available at -https://www.contributor-covenant.org/translations. diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md deleted file mode 100644 index c762f41b79..0000000000 --- a/CONTRIBUTING.md +++ /dev/null @@ -1,360 +0,0 @@ - -# Contents -- [Contents](#contents) -- [Contributor guidelines](#contributor-guidelines) - - [Getting Started](#getting-started) - - [Environment Setup](#environment-setup) - - [Project Aims](#project-aims) - - [What can I contribute to?](#what-can-i-contribute-to) - - [Claiming an issue](#claiming-an-issue) - - [Pushing your changes](#pushing-your-changes) -- [Adding a scraper](#adding-a-scraper) - - [Developing](#developing) - - [Developing using our Dev Container](#developing-using-our-dev-container) - - [Prerequisites](#prerequisites) - - [Step 1: Clone the Repository](#step-1-clone-the-repository) - - [Step 2: Set Up Docker](#step-2-set-up-docker) - - [Step 3: Open the Project in VSCode](#step-3-open-the-project-in-vscode) - - [Step 4: Reopen in Container](#step-4-reopen-in-container) - - [Step 5: Verify the Development Environment](#step-5-verify-the-development-environment) - - [Developing](#developing-1) - - [Kwargs](#kwargs) - - [Common Functions](#common-functions) - - [Additional files](#additional-files) - - [Input JSON file](#input-json-file) - - [Testing](#testing) - - [Behave (Integration Testing)](#behave-integration-testing) - - [Running the Behave tests for all councils](#running-the-behave-tests-for-all-councils) - - [Running the Behave tests for a specific council](#running-the-behave-tests-for-a-specific-council) - - [GitHub Actions Integration Tests](#github-actions-integration-tests) - - [Test Results](#test-results) - - [Allure Report](#allure-report) - - [CodeCov Report](#codecov-report) - - [Pytest (Unit Testing)](#pytest-unit-testing) - - [Running the Unittests](#running-the-unittests) -- [Contact info](#contact-info) - - - -# Contributor guidelines -This document contains guidelines on contributing to the UKBCD project including how the project works, how to set up -the environment, how we use our issue tracker, and how you can develop more scrapers. - -## Getting Started -You will need to install Python on the system you plan to run the script from. Python 3.12 is tested on this project . - -The project uses [poetry](https://python-poetry.org/docs/) to manage dependencies and setup the build environment. - -### Environment Setup -``` -pip install poetry - -# Clone the Repo -git clone https://github.com/robbrad/UKBinCollectionData -cd UKBinCollectionData - -# Install Dependencies -poetry install -poetry shell -``` - -## Project Aims -- To provide a real-world environment to learn Python and/or web scraping -- To provide UK bin data in a standardised format for use (albeit not exclusively) with [HomeAssistant](https://www.home-assistant.io/) - -### What can I contribute to? -- The majority of project work comes from developing new scrapers for requested councils. These can be found on the [issue tracker](https://github.com/robbrad/UKBinCollectionData/labels/council%20request) with `council request` labels. -- Tasks that require [additional input](https://github.com/robbrad/UKBinCollectionData/issues?q=is%3Aissue+is%3Aopen+label%3A%22help+wanted%22) have the `help wanted` label - these can be trickier requests or may have many smaller tasks. -- [Easier tasks](https://github.com/robbrad/UKBinCollectionData/labels/good%20first%20issue), that would be a good fit for people new to the project or the world of web scraping are labelled with the `good first issue` label - -## Claiming an issue -If there is an existing issue you wish to work on, please do the following things: -- Assign the issue to yourself (or ask someone to assign you) - that way, others know you're working on it -- Create a new branch - its recommended to use the 'create a branch' option on the issue page, create it in your forked repo and then checkout the branch locally (or in your IDE). - -**NB:** Exploratory work doesn't require claiming an issue - you only need to claim if you plan on developing the full scraper and associated files. If you just want to explore an issue, feel free to do so - and also feel free to post anything helpful in the issue comments. - -## Pushing your changes -There are guides below on how to add a scraper to the project, along with what files are needed and what tests should be run. -When the time comes to push your changes, please be aware that we use [conventional commits](https://www.conventionalcommits.org/en/v1.0.0/#summary) to provide a clear summary of what a change does. This means that commit messages should start with one of the following: -- `feat:` for a new feature (including a new scraper) -- `fix:` for when a bug is fixed or an issue is resolved -- `docs:` for when changes to documentations are made - -Don't worry if you forget - commit messages are automatically checked when you open a merge request by a lint checker, and can easily be rectified by recommitting or pushing again with the correct prefix. - - - -# Adding a scraper -This project uses a design pattern called the [Template Method](https://refactoring.guru/design-patterns/template-method) which basically allows for a structured class that can be extended. In our case, the getting of the data from the council and the presentation of the JSON remains the same via the [abstract class](https://github.com/robbrad/UKBinCollectionData/blob/master/uk_bin_collection/uk_bin_collection/get_bin_data.py#L21) - however the scraping of each council is different and this allows us to have a class for each [council](https://github.com/robbrad/UKBinCollectionData/tree/master/uk_bin_collection/uk_bin_collection/councils) - you can see this in action [here](https://github.com/robbrad/UKBinCollectionData/blob/master/uk_bin_collection/uk_bin_collection/councils/CheshireEastCouncil.py#L5,L16). - -There are a few different options for scraping, and you are free to choose whichever best suits the council: -- Using [Beautiful Soup 4](https://github.com/robbrad/UKBinCollectionData/blob/master/uk_bin_collection/uk_bin_collection/councils/CheshireEastCouncil.py) -- Using the [requests](https://github.com/robbrad/UKBinCollectionData/blob/master/uk_bin_collection/uk_bin_collection/councils/ManchesterCityCouncil.py) module -- Reading data from [external files](https://github.com/robbrad/UKBinCollectionData/blob/master/uk_bin_collection/uk_bin_collection/councils/LeedsCityCouncil.py) -- Using [Selenium](https://github.com/robbrad/UKBinCollectionData/blob/master/uk_bin_collection/uk_bin_collection/councils/Chilterns.py) to automate browser behaviour - -## Developing -To get started, first you will need to fork this repository and setup your own working environment before you can start developing. - -### Developing using our Dev Container -You need to set up Docker, Visual Studio Code (VSCode), and a development container (devcontainer) after cloning the repository at https://github.com/robbrad/UKBinCollectionData. - -#### Prerequisites -Before you start, make sure you have the following installed on your computer: -- Docker: [Download Docker](https://www.docker.com/products/docker-desktop) -- Visual Studio Code (VSCode): [Download VSCode](https://code.visualstudio.com/download) -- Remote - Containers extension for VSCode: Install it from the VSCode Marketplace or directly from the Extensions view (`Ctrl+Shift+X` in VSCode and search for "Remote - Containers"). - -#### Step 1: Clone the Repository -First, clone the repository to your local machine. Open a terminal and run the following command: -```bash -git clone https://github.com/robbrad/UKBinCollectionData.git -``` -Navigate into the directory: -```bash -cd UKBinCollectionData -``` - -#### Step 2: Set Up Docker -Ensure Docker is running on your system. You can verify this by running: -```bash -docker -v -``` -This should return the version of Docker installed. If Docker is running, you’ll see no errors. - -#### Step 3: Open the Project in VSCode -Open VSCode, and then open the cloned repository by going to `File > Open Folder...` and selecting the `UKBinCollectionData` folder. - -#### Step 4: Reopen in Container -Once the folder is open in VSCode: -1. A prompt might appear asking you to reopen in a container. If it does, select "Reopen in Container". -2. If you don’t see the prompt, press `F1` to open the command palette, type "Remote-Containers: Reopen in Container", and select that option. - -VSCode will start building the Docker container as defined in the `.devcontainer/` folder in the repository. This process can take a few minutes as it involves downloading the base Docker and Selenium hub images and setting up the environment. - -#### Step 5: Verify the Development Environment -Once the container is set up, VSCode will connect to it automatically. You can start editing and running the code inside the container. This ensures that your development environment is consistent and controlled, replicating the same settings and tools as specified in the devcontainer configuration. - -### Developing -Once your environment is ready, create a new branch from your master/main branch - -Then you can run - -``` -poetry run python uk_bin_collection/uk_bin_collection/create_new_council.py "CouncilName" "CouncilURL" -``` - -The new .py file will be used in the CLI to call the parser, so be sure to pick a sensible name - e.g. CheshireEastCouncil.py is called with: -``` -python collect_data.py CheshireEastCouncil -``` - -To simplify things somewhat, a [template](https://github.com/robbrad/UKBinCollectionData/blob/master/uk_bin_collection/uk_bin_collection/councils/council_class_template/councilclasstemplate.py) file has been created - open this file, copy the contents to your new .py file and start from there. The create script above will create - -1. A council Class file under the councils folder -2. Make an entry in input.json - -You are pretty much free to approach the scraping however you would like, but please ensure that: -- Your scraper returns a dictionary made up of the key "bins" and a value that is a list of bin types and collection dates. An example of this can be seen below. -- Any dates or times are formatted to standard UK formats (see [below](#common-functions)) -
- Output Example - -```json -{ - "bins": [ - { - "type": "Empty Standard Mixed Recycling", - "collectionDate": "29/07/2022" - }, - { - "type": "Empty Standard Garden Waste", - "collectionDate": "29/07/2022" - }, - { - "type": "Empty Standard General Waste", - "collectionDate": "05/08/2022" - } - ] -} -``` -
- -### Kwargs -UKBCD has two mandatory parameters when it runs - the name of the parser (sans .py) and the URL from which to scrape. However, developers can also get the following data using `kwargs`: - -| Parameter | Prompt | Notes | kwargs.get | -|-----------------------------------------|--------------------------|-------------------------------------------------------------|------------------------------| -| UPRN (Unique Property Reference Number) | `-u` or `--uprn` | | `kwargs.get('uprn')` | -| USRN (Unique Street Reference Number) | `-us` or `--usrn` | | `kwargs.get('usrn')` | -| House number | `-n` or `--number` | Sometimes called PAON | `kwargs.get('paon')` | -| Postcode | `-p` or `--postcode` | Needs to be wrapped in quotes on the CLI | `kwargs.get('postcode')` | -| Skip Get URL | `-s` or `--skip_get_url` | | `kwargs.get('skip_get_url')` | -| URL for remote Selenium web driver | `-w` or `--web_driver` | Needs to be wrapped in quotes on the CLI | `kwargs.get('web_driver')` | -| Development Mode | `-d` or `--dev_mode` | Create/update council's entry in the input.json on each run | `kwargs.get('dev_mode')` | - -These parameters are useful if you're using something like the requests module and need to take additional user information into the request, such as: -```commandline -python collect_data.py LeedsCityCouncil https://www.leeds.gov.uk/residents/bins-and-recycling/check-your-bin-day -p "LS1 2JG" -n 41 -``` - In the scraper, the following code takes the inputted parameters and uses them in two different variables: -```python -user_postcode = kwargs.get("postcode") -user_paon = kwargs.get("paon") -``` -Each parameter also has its own validation method that should be called after the `kwargs.get`: -- `check_uprn()` -- `check_paon()` -- `check_postcode()` - -The first two are simple validators - if the parameter is used but no value is given, they will throw an exception. `check_postcode()` works differently - instead making a call to the [postcodes.io](https://postcodes.io/) API to check if it exists or not. An exception will only be thrown here if the response code is not `HTTP 200`. - -### Common Functions -The project has a small but growing library of functions (and the occasional variable) that are useful when scraping websites or calendars - aptly named [common.py](https://github.com/robbrad/UKBinCollectionData/blob/master/uk_bin_collection/uk_bin_collection/common.py). -Useful functions include: -- functions to [add ordinals](https://github.com/robbrad/UKBinCollectionData/blob/e49da2f43143ac7c65fbeaf35b5e86b3ea19e31b/uk_bin_collection/uk_bin_collection/common.py#L72) to dates (04 becomes 4th) or [remove them](https://github.com/robbrad/UKBinCollectionData/blob/e49da2f43143ac7c65fbeaf35b5e86b3ea19e31b/uk_bin_collection/uk_bin_collection/common.py#L86) (4th becomes 04) -- a function to check [if a date is a holiday](https://github.com/robbrad/UKBinCollectionData/blob/e49da2f43143ac7c65fbeaf35b5e86b3ea19e31b/uk_bin_collection/uk_bin_collection/common.py#L117) in a given part of the UK -- a function that returns the [dates of a given weekday](https://github.com/robbrad/UKBinCollectionData/blob/e49da2f43143ac7c65fbeaf35b5e86b3ea19e31b/uk_bin_collection/uk_bin_collection/common.py#L136) in N amounts of weeks -- a function that returns a [list of dates every N days](https://github.com/robbrad/UKBinCollectionData/blob/e49da2f43143ac7c65fbeaf35b5e86b3ea19e31b/uk_bin_collection/uk_bin_collection/common.py#L148) from a given start date -- a function to check [if a string contains a date](./uk_bin_collection/uk_bin_collection/common.py#L249) (leverages [dateutil's parser](https://dateutil.readthedocs.io/en/stable/parser.html)) - -`common.py` also contains a [standardised date format](https://github.com/robbrad/UKBinCollectionData/blob/e49da2f43143ac7c65fbeaf35b5e86b3ea19e31b/uk_bin_collection/uk_bin_collection/common.py#L11) variable called `date_format`, which is useful to call when formatting datetimes. - -Please feel free to contribute to this library as you see fit - added functions should include the following: -- clear, lowercase and underscored name -- parameter types -- a return type (if there is one) -- a docustring describing what the function does, as well as parameter and return type descriptors. - -## Additional files -In order for your scraper to work with the project's testing suite, some additional files need to be provided or -modified: -- [ ] [Input JSON file](#input-json-file) - -**Note:** from here on, anything containing`` should be replaced with the scraper's name. - -### Input JSON file -| Type | File location | -|--------|----------------------------------------------------------| -| Modify | `UKBinCollectionData/uk_bin_collection/tests/input.json` | - -Each council should have a node that matches the scraper's name. The node should include arguments in curly braces - the -URL is mandatory, but any additional parameters like UPRN or postcode should also be provided. Councils should be -listed in alphabetical order. - -A "wiki_name" argument with the council's full name should also be provided. - -A "wiki_note" argument should be used where non-standard instructions of just providing UPRN/Postcode/House Number -parameters are needed. - -A "wiki_command_url_override" argument should be used where parts of the URL need to be replaced by the user to allow a -valid URL to be left for the integration tests. - -A new [Wiki](https://github.com/robbrad/UKBinCollectionData/wiki/Councils) entry will be generated automatically from -this file's details. - -**Note:** If you want the integration test to work you must supply real, working data (a business address is -recommended - the council's address is usually a good one). - -
- Example - -```json - "CheshireEastCouncil": { - "uprn": "100012791226", - "url": "https://online.cheshireeast.gov.uk/MyCollectionDay/SearchByAjax/GetBartecJobList?uprn=100012791226&onelineaddress=3%20COBBLERS%20YARD,%20SK9%207DZ&_=1621149987573", - "wiki_name": "Cheshire East Council", - "wiki_command_url_override": "https://online.cheshireeast.gov.uk/MyCollectionDay/SearchByAjax/GetBartecJobList?uprn=XXXXXXXX&onelineaddress=XXXXXXXX&_=1621149987573", - "wiki_note": "Both the UPRN and a one-line address are passed in the URL, which needs to be wrapped in double quotes. The one-line address is made up of the house number, street name and postcode.\nUse the form [here](https://online.cheshireeast.gov.uk/mycollectionday/) to find them, then take the first line and post code and replace all spaces with `%20`." - }, -``` -
- -## Testing -### Behave (Integration Testing) -As with any web scraping project, there's a reliance on the council not changing their website - if this happens Beautiful Soup -will fail to read the site correctly, and the expected data will not be returned. To mitigate this and stay on top -of "what works and what needs work" - we have created a set of Integration tests which run a [feature](https://github.com/robbrad/UKBinCollectionData/blob/master/uk_bin_collection/tests/features/validate_council_outputs.feature) -file. - -Based on the [input.json](https://github.com/robbrad/UKBinCollectionData/blob/master/uk_bin_collection/tests/input.json), -this does an actual live run against the council's site and validates if the returned data is JSON and conforms to the common format [JSON Schema](https://github.com/robbrad/UKBinCollectionData/tree/master/uk_bin_collection/tests/output.schema). - -By default if the council is a Selenium based council it will run in headless mode. If you pass `--headless=False` to pytest (possible in VS Code launch.json useful for debugging code) It will run in a visable browser. - -It also defaults the Selenium URL to be `http://localhost:4444` and the local_browser to False - -You can set pytest to test on your local web browser without Selenium Grid by setting `--local_browser=True` -If you want a different Selenium URL you can set it with `--selenium_url=http://selenium:4444` NOTE: you can't set `--local_browser=True` (defaults: False) as Selenium testing will be ignored - -In VSCode if you set a make a launch.json you can debug the test locally with the following setup -```json -{ - "version": "0.2.0", - "configurations": [ - { - "name": "Python Debugger: Current File", - "type": "debugpy", - "request": "launch", - "purpose": ["debug-test"], - "env": { - "PYTEST_ADDOPTS": "--headless=False --local_browser=True" - } - } - ] -} -``` - -It is also possible to run -```commandline -#Visable Selenium Run in Local Broswer -poetry run pytest uk_bin_collection/tests/step_defs/ -k "Council_Name" --headless=False --local_browser=True - -#Visable Selenium Run in on Selenium Grid -poetry run pytest uk_bin_collection/tests/step_defs/ -k "Council_Name" --headless=False --selenium_url=http://localhost:4444 -``` - -#### Running the Behave tests for all councils -```commandline -cd UKBinCollectionData -poetry shell -poetry run pytest uk_bin_collection/tests/step_defs/ -n logical -``` - -#### Running the Behave tests for a specific council -```commandline -cd UKBinCollectionData -poetry shell -poetry run pytest uk_bin_collection/tests/step_defs/ -n logical -k "BarnetCouncil" -``` - -#### GitHub Actions Integration Tests -The [GitHub actions](https://github.com/robbrad/UKBinCollectionData/actions/workflows/behave.yml) is set to run on push and pull_requests - -It uses a [Makefile](https://github.com/robbrad/UKBinCollectionData/blob/master/Makefile) to run the [Behave](#behave--integration-testing-) tests to ensure the councils are all still working - -#### Test Results - -##### Allure Report -The Github Actions publishes the Allure Behave Test results to Github Pages: https://robbrad.github.io/UKBinCollectionData// eg https://robbrad.github.io/UKBinCollectionData/3.9/ you can check this to see if a council is still working as expected - -##### CodeCov Report -The CodeCov.io report can be found [here](https://app.codecov.io/gh/robbrad/UKBinCollectionData) - -### Pytest (Unit Testing) -As well as integration testing the repo is setup to test some of the static methods as well to ensure basic core functionality - -#### Running the Unittests -```commandline -cd UKBinCollectionData -poetry shell -poetry run coverage run --omit "*/tests/*" -m pytest uk_bin_collection/tests --ignore=uk_bin_collection/tests/step_defs/ -poetry run coverage xml -``` - -# Contact info -If you have questions or comments, you can reach the project contributors in the following ways: -- Council requests can be submitted [here](https://github.com/robbrad/UKBinCollectionData/issues/new?assignees=&labels=Class%3A+enhancement&template=COUNCIL_REQUEST.yaml) -- General questions or comments can be submitted [here](https://github.com/robbrad/UKBinCollectionData/discussions/categories/q-a) - diff --git a/LICENSE b/LICENSE deleted file mode 100644 index cf6e616d1b..0000000000 --- a/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2022 Robert Bradley - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/Makefile b/Makefile deleted file mode 100644 index 16727ef1d7..0000000000 --- a/Makefile +++ /dev/null @@ -1,44 +0,0 @@ -.PHONY: install pre-build build black pycodestyle update-wiki - -## @CI_actions Installs the checked out version of the code to your poetry managed venv -install: - poetry install --without dev - -install-dev: - poetry install - -## @CI_actions Runs code quality checks -pre-build: black unit-tests - rm setup.py || echo "There was no setup.py" - poetry show --no-dev | awk '{print "poetry add "$$1"=="$$2}' | sort | sh - -## @CI_actions Builds the project into an sdist -build: - poetry build -f sdist - -## @Code_quality Runs black on the checked out code -black: - poetry run black **/*.py - -## @Code_quality Runs pycodestyle on the the checked out code -pycodestyle: - poetry run pycodestyle --statistics -qq uk_bin_collection - -## @Testing runs unit tests -integration-tests: ## runs tests for the project - if [ -z "$(councils)" ]; then \ - poetry run pytest uk_bin_collection/tests/step_defs/ -n logical --alluredir=build/$(matrix)/allure-results; \ - else \ - poetry run pytest uk_bin_collection/tests/step_defs/ -k "$(councils)" -n logical --alluredir=build/$(matrix)/allure-results; \ - fi - -parity-check: - poetry run python uk_bin_collection/tests/council_feature_input_parity.py $(repo) $(branch) - -unit-tests: - poetry run coverage erase - - poetry run coverage run --append --omit "*/tests/*" -m pytest -vv -s --log-cli-level=DEBUG uk_bin_collection/tests custom_components/uk_bin_collection/tests --ignore=uk_bin_collection/tests/step_defs/ - poetry run coverage xml - -update-wiki: - poetry run python wiki/generate_wiki.py diff --git a/README.md b/README.md deleted file mode 100644 index 48ae4e7308..0000000000 --- a/README.md +++ /dev/null @@ -1,371 +0,0 @@ -[![Made with Python](https://img.shields.io/badge/Made%20With%20Python-red?style=for-the-badge&logo=python&logoColor=white&labelColor=red)](https://www.python.org) - -[![HACS Badge](https://img.shields.io/badge/HACS-Custom-41BDF5.svg?style=for-the-badge)](https://github.com/robbrad/UKBinCollectionData) -[![Current Release](https://img.shields.io/github/v/release/robbrad/UKBinCollectionData?style=for-the-badge&filter=*)](https://github.com/robbrad/UKBinCollectionData/releases) -[![PyPi](https://img.shields.io/pypi/v/uk_bin_collection?label=PyPI&logo=pypi&style=for-the-badge&color=blue)](https://pypi.org/project/uk-bin-collection/) - -[![GitHub license](https://img.shields.io/github/license/robbrad/UKBinCollectionData?style=for-the-badge)](https://github.com/robbrad/UKBinCollectionData/blob/master/LICENSE) -[![GitHub issues](https://img.shields.io/github/issues-raw/robbrad/UKBinCollectionData?style=for-the-badge)](https://github.com/robbrad/UKBinCollectionData/issues?q=is%3Aopen+is%3Aissue) -[![GitHub closed issues](https://img.shields.io/github/issues-closed-raw/robbrad/UKBinCollectionData?style=for-the-badge)](https://github.com/robbrad/UKBinCollectionData/issues?q=is%3Aissue+is%3Aclosed) -[![GitHub contributors](https://img.shields.io/github/contributors/robbrad/UKBinCollectionData?style=for-the-badge)](https://github.com/robbrad/UKBinCollectionData/graphs/contributors) - -[![Test Councils](https://img.shields.io/github/actions/workflow/status/robbrad/UKBinCollectionData/behave.yml?style=for-the-badge&label=Test+Councils)](https://github.com/robbrad/UKBinCollectionData/actions/workflows/behave.yml) -![Codecov](https://img.shields.io/codecov/c/gh/robbrad/UKBinCollectionData?style=for-the-badge) -[![CodeQL Analysis](https://img.shields.io/github/actions/workflow/status/robbrad/UKBinCollectionData/codeql-analysis.yml?style=for-the-badge&label=CodeQL+Analysis)](https://github.com/robbrad/UKBinCollectionData/actions/workflows/codeql-analysis.yml) -[![Publish Release](https://img.shields.io/github/actions/workflow/status/robbrad/UKBinCollectionData/release.yml?style=for-the-badge&label=Publish+Release)](https://github.com/robbrad/UKBinCollectionData/actions/workflows/release.yml) -[![Test Report Deployment](https://img.shields.io/github/actions/workflow/status/robbrad/UKBinCollectionData/pages%2Fpages-build-deployment?style=for-the-badge&label=Test+Report+Deployment)](https://github.com/robbrad/UKBinCollectionData/actions/workflows/pages/pages-build-deployment) - -# UK Bin Collection Data (UKBCD) -This project aims to provide a neat and standard way of providing bin collection data in JSON format from UK councils that have no API to do so. - -Why do this? -You might want to use this in a Home Automation - for example say you had an LED bar that lit up on the day of bin collection to the colour of the bin you want to take out, then this repo provides the data for that. - -**PLEASE respect a councils infrastructure / usage policy and only collect data for your own personal use on a suitable frequency to your collection schedule.** - -Most scripts make use of [Beautiful Soup 4](https://pypi.org/project/beautifulsoup4/) to scrape data, although others use different approaches, such as emulating web browser behaviour, or reading data from CSV files. - -[![](https://img.shields.io/badge/-41BDF5?style=for-the-badge&logo=homeassistant&logoColor=white&label=HomeAssistant+Thread)](https://community.home-assistant.io/t/bin-waste-collection/55451) -[![](https://img.shields.io/badge/Request%20a%20council-gray?style=for-the-badge&logo=github&logoColor=white)](https://github.com/robbrad/UKBinCollectionData/issues/new/choose) - ---- - -## Requesting your council -> :warning: Please check that a request for your council has not already been made. You can do this by searching on the [Issues](https://github.com/robbrad/UKBinCollectionData/issues) page. - -If an issue already exists, please comment on that issue to express your interest. Please do not open a new issue, as it will be closed as a duplicate. - -If an issue does not already exist, please fill in a new [Council Request](https://github.com/robbrad/UKBinCollectionData/issues/new/choose) form, including as much information as possible, including: -- Name of the council -- URL to bin collections -- An example postcode and/or UPRN (whichever is relevant) -- Any further information - -Please be aware that this project is run by volunteer contributors and completion depends on numerous factors - even with a request, we cannot guarantee if/when your council will get added. - ---- - -## Home Assistant Usage - -### Install with HACS (recommended) - -#### Automated -[![hacs_badge](https://img.shields.io/badge/HACS-Default-41BDF5.svg?style=for-the-badge)](https://github.com/hacs/integration) - -This integration can be installed directly via HACS. To install: - -* [Add the repository](https://my.home-assistant.io/redirect/hacs_repository/?owner=robbrad&repository=UKBinCollectionData&category=integration) to your HACS installation -* Click `Download` - -#### Manual -1. Ensure you have [HACS](https://hacs.xyz/) installed -1. In the Home Assistant UI go to `HACS` > `Integrations` > `⋮` > `Custom repositories`. -1. Enter `https://github.com/robbrad/UKBinCollectionData` in the `Repository` field. -1. Select `Integration` as the category then click `ADD`. -1. Click `+ Add Integration` and search for and select `UK Bin Collection Data` then click `Download`. -1. Restart your Home Assistant. -1. In the Home Assistant UI go to `Settings` > `Devices & Services` click `+ Add Integration` and search for `UK Bin Collection Data`. -1. If your see a "URL of the remote Selenium web driver to use" field when setting up your council, you'll need to provide the URL to a web driver you've set up seperately such as [standalone-chrome](https://hub.docker.com/r/selenium/standalone-chrome). - -### Install manually - -1. Open the folder for your Home Assistant configuration (where you find `configuration.yaml`). -1. If you do not have a `custom_components` folder there, you need to create it. -1. [Download](https://github.com/robbrad/UKBinCollectionData/archive/refs/heads/master.zip) this repository then copy the folder `custom_components/uk_bin_collection` into the `custom_components` folder you found/created in the previous step. -1. Restart your Home Assistant. -1. In the Home Assistant UI go to `Settings` > `Devices & Services` click `+ Add Integration` and search for `UK Bin Collection Data`. - -### Overriding the Bin Icon and Bin Colour -We realise it is difficult to set a colour from the councils text for the Bin Type and to keep the integration generic we dont capture colour from a council(not all councils supply this as a field), only bin type and next collection date. - -When you configure the componenent on the first screen you can set a JSON string to map the bin type to the colour and icon - -Here is an example to set the colour and icon for the type `Empty Standard General Waste`. This type is the type returned from the council for the bin. You can do this for multiple bins. - -If you miss this on the first setup you can reconfigure it. - -``` -{ - "Empty Standard General Waste": - { - "icon": "mdi:trash-can", - "color": "blue" - } -} - ---- - -## Standalone Usage -```commandline -PS G:\Projects\Python\UKBinCollectionData\uk_bin_collection\collect_data.py -usage: collect_data.py [-h] [-p POSTCODE] [-n NUMBER] [-u UPRN] module URL - -positional arguments: - module Name of council module to use (required) - URL URL to parse (required) - -options: - -h, --help show this help message (optional) - -p POSTCODE, --postcode POSTCODE Postcode to parse - should include (optional) - a space and be wrapped in double - quotes - -n NUMBER, --number NUMBER House number to parse (optional) - -u UPRN, --uprn UPRN UPRN to parse (optional) -``` - -### Quickstart -The basic command to execute a script is: -```commandline -python collect_data.py "" -``` -where ```council_name``` is the name of the council's .py script (without the .py) and ```collection_url``` is the URL to scrape. -The help documentation refers to these as "module" and "URL", respectively. Supported council scripts can be found in the `uk_bin_collection/uk_bin_collection/councils` folder. - -Some scripts require additional parameters, for example, when a UPRN is not passed in a URL, or when the script is not scraping a web page. -For example, the Leeds City Council script needs two additional parameters - a postcode, and a house number. This is done like so: - -```commandline -python collect_data.py LeedsCityCouncil https://www.leeds.gov.uk/residents/bins-and-recycling/check-your-bin-day -p "LS1 2JG" -n 41 -``` -- A **postcode** can be passed with `-p "postcode"` or `--postcode "postcode"`. The postcode must always include a space in the middle and -be wrapped in double quotes (due to how command line arguments are handled). -- A **house number** can be passed with `-n number` or `--number number`. -- A **UPRN reference** can be passed with `-u uprn` or `--uprn uprn`. - -To check the parameters needed for your council's script, please check the [project wiki](https://github.com/robbrad/UKBinCollectionData/wiki) for more information. - - -### Project dependencies -Some scripts rely on external packages to function. A list of required scripts for both development and execution can be found in the project's [PROJECT_TOML](https://github.com/robbrad/UKBinCollectionData/blob/feature/%2353_integration_tests/pyproject.toml) -Install can be done via -`poetry install` from within the root of the repo. - ---- - -## UPRN Finder -Some councils make use of the UPRN (Unique property reference number) to identify your property. You can find yours [here](https://www.findmyaddress.co.uk/search) or [here](https://uprn.uk/). - ---- -## Selenium -Some councils need Selenium to run the scrape on behalf of Home Assistant. The easiest way to do this is run Selenium as in a Docker container. However you do this the Home Assistant server must be able to reach the Selenium server - -### Instructions for Windows, Linux, and Mac - -#### Step 1: Install Docker - -##### Windows - -1. **Download Docker Desktop for Windows:** - - * Go to the Docker website: Docker Desktop for Windows - * Download and install Docker Desktop. -2. **Run Docker Desktop:** - - * After installation, run Docker Desktop. - * Follow the on-screen instructions to complete the setup. - * Ensure Docker is running by checking the Docker icon in the system tray. - -##### Linux - -1. **Install Docker:** - - * Open a terminal and run the following commands: - - ```bash - sudo apt-get update - sudo apt-get install \ - apt-transport-https \ - ca-certificates \ - curl \ - gnupg \ - lsb-release - curl -fsSL https://download.docker.com/linux/ubuntu/gpg | sudo gpg --dearmor -o /usr/share/keyrings/docker-archive-keyring.gpg - echo \ - "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/docker-archive-keyring.gpg] https://download.docker.com/linux/ubuntu \ - $(lsb_release -cs) stable" | sudo tee /etc/apt/sources.list.d/docker.list > /dev/null - sudo apt-get update - sudo apt-get install docker-ce docker-ce-cli containerd.io - ``` - -2. **Start Docker:** - - * Run the following command to start Docker: - - ```bash - sudo systemctl start docker - ``` - -3. **Enable Docker to start on boot:** - - bash - - Copy code - - ```bash - sudo systemctl enable docker - ``` - - -##### Mac - -1. **Download Docker Desktop for Mac:** - - * Go to the Docker website: Docker Desktop for Mac - * Download and install Docker Desktop. -2. **Run Docker Desktop:** - - * After installation, run Docker Desktop. - * Follow the on-screen instructions to complete the setup. - * Ensure Docker is running by checking the Docker icon in the menu bar. - -#### Step 2: Pull and Run Selenium Standalone Chrome Docker Image - -1. **Open a terminal or command prompt:** - -2. **Pull the Selenium Standalone Chrome image:** - - ```bash - docker pull selenium/standalone-chrome - ``` - -4. **Run the Selenium Standalone Chrome container:** - - ```bash - docker run -d -p 4444:4444 --name selenium-chrome selenium/standalone-chrome - ``` - - -#### Step 3: Test the Selenium Server - -1. **Navigate to the Selenium server URL in your web browser:** - * Open a web browser and go to `http://localhost:4444` - * You should see the Selenium Grid console. - -#### Step 4: Supply the Selenium Server URL to UKBinCollectionData - -1. **Find the `UKBinCollectionData` project:** - - * Go to the GitHub repository: [UKBinCollectionData](https://github.com/robbrad/UKBinCollectionData) -2. **Supply the Selenium Server URL:** - - * Typically, the URL will be `http://localhost:4444/wd/hub` - * You might need to update a configuration file or environment variable in the project to use this URL. Check the project's documentation for specific instructions. - -### Summary of Commands - -**Windows/Linux/Mac:** - -```bash -docker pull selenium/standalone-chrome docker run -d -p 4444:4444 --name selenium-chrome selenium/standalone-chrome -``` - -**Selenium Server URL:** - -* `http://localhost:4444/wd/hub` - ---- - -## Reports - -- [3.12](https://robbrad.github.io/UKBinCollectionData/3.12/) - ---- -## Docker API Server -We have created an API for this located under [uk_bin_collection_api_server](https://github.com/robbrad/UKBinCollectionData/uk_bin_collection_api_server) - -### Prerequisites - -- Docker installed on your machine -- Python (if you plan to run the API locally without Docker) - -### Running the API with Docker - -1. Clone this repository. -2. Navigate to the uk_bin_collection_api_server directory of the project. - -#### Build the Docker Container - -```bash -docker build -t ukbc_api_server . -``` - -``` -docker run -p 8080:8080 ukbc_api_server -``` - -#### Accessing the API - -Once the Docker container is running, you can access the API endpoints: - - API Base URL: http://localhost:8080/api - Swagger UI: http://localhost:8080/api/ui/ - -#### API Documentation - -The API documentation can be accessed via the Swagger UI. Use the Swagger UI to explore available endpoints, test different requests, and understand the API functionalities. - -![Swagger UI](SwaggerUI.png) - -#### API Endpoints -`GET /bin_collection/{council}` - -Description: Retrieves information about bin collection for the specified council. - -Parameters: - - council (required): Name of the council. - Other optional parameters: [Specify optional parameters if any] - -Example Request: - -```bash -curl -X GET "http://localhost:8080/api/bin_collection/{council}" -H "accept: application/json" -``` - -## Docker Compose -This includes the Selenium standalone-chrome for Selenium based councils - -``` -version: '3' - -services: - ukbc_api_server: - build: - context: . - dockerfile: Dockerfile - ports: - - "8080:8080" # Adjust the ports as needed - depends_on: - - selenium - - selenium: - image: selenium/standalone-chrome:latest - ports: - - "4444:4444" - -``` -### Run with -```bash -sudo apt-get update -sudo apt-get install docker-compose - -docker-compose up -``` - ---- - -## FAQ -#### I've got an issue/support question - what do I do? -Please post in the [HomeAssistant thread](https://community.home-assistant.io/t/bin-waste-collection/55451) or raise a new (non council request) [issue](https://github.com/robbrad/UKBinCollectionData/issues/new). - -#### I'd like to contribute, where do I start? -Contributions are always welcome! See ```CONTRIBUTING.md``` to get started. Please adhere to the project's [code of conduct](https://github.com/robbrad/UKBinCollectionData/blob/master/CODE_OF_CONDUCT.md). - -- If you're new to coding/Python/BeautifulSoup, feel free to check [here](https://github.com/robbrad/UKBinCollectionData/issues?q=is%3Aissue+is%3Aopen+label%3A%22good+first+issue%22) for issues that are good for newcomers! -- If you would like to try writing your own scraper, feel free to fork this project and use existing scrapers as a base for your approach (or `councilclasstemplate.py`). - -## Contributors - - Image of contributors - - diff --git a/SwaggerUI.png b/SwaggerUI.png deleted file mode 100644 index d9a78d88e8dfab848cfb2a1bd94ff89b25b792a9..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 91169 zcmeFYXH-*L6fTMdQ8|j`04f4DM5!uLLlGMSQbaluL_}&5DIt&$qHquusY)mGUP4c3 zA%X%5N{0X;K{^CNq$B|X$%}f=`}g)33G9`<*IsM3HRt!O{l?T-@6f?h z2l@E;4(Z>!ZN|sP-^cs-@88F3$?;NDeRj1&3ZC*Bo!!i>eIJ6|o*Oy)IzDyub>y}2@gdGRdAdC}g}8xz zea>mvA2>l^}721rs3-KEB{^e=ZfDA_DC0}oJ>AG?sfg!y5|1C zC9**3-;5pZ3d^hdUi-1!aQvODGk5+zd+^i&5!1cR3NkV(x9@~L*1aQlRb)4I|JyUS zuUhj73-+xDsC*9out(6~=#}aQHqFKF8N?3^1G7%Se!^!f2r1g*Nn{3hgTVUaaxGGA zd-&0=-xK2Fd+|@~zJ-yIkl4G>7vc|tHu{Zv-{CguGU)KJ<|39vn_S?W~OKoK@34H-+lvMT>g zv-Q(z-CP=ylGL;@DF|&jAg$iNP-IaY2?!-WSetF9(r+!BhwNjJr6w#(ZA+U&mix9h z1;?avohGsZ>X-Ut(Q6_a_fXF_uH=ty^Ex7CM`^}iuLbv%zr-|P(yM6ySY4#fra-)i zIo(BFh*Vfm%MHp3bxHPN4)Y zLP2A-4}=aQhV74GI!~gZOBL<&>x8rkh0Ef14);Z=O+GgYnquT+`kfn!Z!K!0SrwJU z*(~?1=bXw(IdGhlox&D+K4<3AK0^5Va{1B9nk08eh%#uTN-5zN)b^S+><<+MW{g7J z)%zETpdJEuCs>K5KJ`w)y%Oc*0v};@D!cV9c2(yWuo0dO^mv8Axpd^Rf_`_5o_HZ) zI~U&;m69PDw9#yTDz2lVe`FjJKpk)!^(oV;aUpAL{RoLloe642j@AHlzjMaEC8@kw zws1QLq(g}gi`7pK%23&r00Rc))d)|M0~J%30Su+zrh6k=xMQfy%7HlEV`5`d{3Izw zc+~bzh42W*7B5r!E$R%C^=uboCc!>8U~RU+u*HU--QWGrv?|-K*3ckK=boZj)Ov|U z@mOIna&zX%9HjIZ)p1#Tyb7T6Gea+GZGCL5yVhsZ=3af!4qSS=idgA94(QYr;hcit zSK^bhmgyBeJuS?ve`>I0=<_Fn^WjUrEF4YlHWv!wNTvj{t)oYCzH%zqkIVSe11I-l z0EU%|4B1GQGweeLUf!6gzKWv%Ht_LFd6re>tkq4Q=!$FHVl0NHm6__+*BD(!?DW&GCt|I z<-naahm$&shGHJkMr)K|%bGCbt*u1}+F7DFgJ~5_!g7kJyeX(}{Mb|TlPgm1(^_26 zes~!&QMu#)*qw;Uc0g?oCNbCOPP_6gCIw`}8@5;-#c5W>OQG{@`aTkQ2J`yXxVU?8?$oZ3>S_#S-hX6SjEwD+$QY{MWcFt z09I)pov$dcC}WL6bk(=ECuhqo^hgd)!qe^I39Rljo}=+6rLO>S%}Gq_&{BC>%#N02 zDd=ot1=Ut8o3#q@Sfh-WGlDoy0z?7XfKN=^+VGZ71I(wW8Z^ooZLEQ>@McTeKzZJ4 zi*#(Sw)*Mc?8V;>A|a;MfV!M1#Nu(#+#CVMR6G#OMr?QPSTOyJxokJ5!<_KFDJ%y_ zQMXP1HI@}skCwq*vQ>Ai^;?mdP@)axaONM9cALch^ zM~a*Hjn?P%RkmU)Kp;ty=)Jfa7o@}J>x#c+#y0*kUDSL?{ zXNE}x_1L6+`RC1)6RC;I5%7|?Y3YJYY`2O!{B7qlcNKQ1u009sG_qRZ(Y zi^SFzl@#U{(vAx$Th-thJ*4qRw?~L0&8(3rO>Y8_7mm|h=2psZX8=h*D*9)}$ANyv z#R|AJTdqe6)|un(!`1H6vc_e8T(dhO<{5MRuRy^ktg~3u(?`u$hEdz*Mc;Oaj%Q1D z40l6N4M9=jq{z9_DpR85s08BM)e7j4ht`JTF}V_)it0tQ6TehL+mF@C=aAzEfDJny z0)(gIg6yV|MwZPwC+?Q+`1xOA7tpO0a(5C8Dm5Dx^T3ogky1>kMn4(~I2i|GfUgyK_F<76Q zXJp7d&;$tQAB+%F9cmS|rz*U8HPAL>lMAb4P#m0ev7m(eILuuA!@enQ;7vAt{Bx4R z{I&!{JiigN8I`bHu^nsgWP``)Qq*?X`?$g(DEln>9n*T!o?l`* z(zbg=7E{~dHu%K;NNk-?k9hrbUpN|cpf~1iz?x`*J=<^jQD7oiF@;0JjgQsz>BsPV zwC{&dzfpn$TG_wHK2)eL-#AI6uHY{yAiucbf;uE+q^iTjBOydc+qyGB5jJ78u%q4-OtATORTp z#J$-Yg;wBl{LoCNm;B94Vo_LWA2Dxd?k9a}EM)iY`(^I<3&OJ-oW*hk*pU~uj%-hS z#B_6mMuLKxB~~qug3sbtaJ#Q9NOUUpD#s5Z#6EPW32p)R<~C^knm-?Z#b3`2rY*AX z8o~5~hTeYS`QJwAp3H- zBHH(G4tvv(v+Cr4Ihrvy`>7`y+?(|%_Y_bha~wNn8wjBU+*ybu2s6lb@5BKJ-IF%mbr_l%tj zX_yyA^ppP0*I)`AE$dhi32?UlvTrn40=a%w9@uDo{;pcC0uFb$_uAeLribbd|gnh-j4DtNDhR zqybIt7ac=Mz6(V&D=SE~+l!!gBEyJw6PGbvzd(e^Hk+N#3lU+emQgGTDGfi}rR2u$ zP@NJJmUVNtcb11-I4?E>A`gg#!McrP=I+4|zYXHxlRK4%P7M&3MtaZeygq-JIGo_% z%L@>m=qmer74k=XKOIs9fF1!QRND&GgenCBM>R-!Ig%Q-_`6VxvNX*e)suRsgFG<+ z@hZn#9E0(va&kCDV=x#4KKZ3NM>7RN7c zx7){B~+T14PZFm5wfH-g@5m)8y8h8Z-(W@m zWnrPQie@_@si$On!D-~fTxS|(@qH!sks-d?`$>Djfjh7CVmOiR?}z5)`{AI z8&~EgvZJ(D6Wxc37Bv5P+_h9k9Y@`$sPU~p-Ccb=t;v+NPAn<0s8{y}(i5(I5Vw?S z4BDx$wuR*!yO-Q@npAd$Ey_rBZpC6CCD(DAIGHv;o%v)`iz?O{Tec=!q} zcjv9tJ=P=dudVf*S3zCX{8Y_cQQg8UT6Ki&pz~p)v4+~3MAu*|YdI1khbTv$m?GN}-IJdhp8C$-955#57)E3iwsN|2kU3mX6Lm zXgPy-kJI_tdE`&yizm)W+0Icn0^Zl2(^7gkSVS~!HHh0k@NZ4|_Z#PT@yfP;iqogR zD!RJm>woU#_J2&9?|-do_x~?5K9ckAZ267^pZOpCAKCj0z4(3nbLQUfWR7=o=Jws4Ert(y z0ga(l2wM>DNX?avEJnXAGCO~(coqViRmF69zI=@DQgSk*c>cvtW(%-2-1*jbQ_Rkr?eupC zd)*mPVO)$u{8NA(-wj9W*A!~et1eX7wIza57Yg#HW7DT~eI@yp@rzfG`Fj@-;<1JO z5`~ykZ0EbP=kjsWGPDP`J%;eZ$hMFa#)9Dt+v*S>pgSEw&qp3E&BF?$&=3iATt_XW`Q) zm;|h<)x)r}gQ?1V(F*x!!)eriNJne;!4sWTW{3LZZ{x}vyleil_)^VBE|yjd{&IEc zQhY0E#RU2KiujGKEbf(y2}l0b-R1||XfW-O-0kif%4}kmQ%YPJ6%re> zx=vdtd5Z}4ty$$DK(=OX3&*arbE(N}v+vQ!?9!+4#^ZT7$X{i?gr&;P)S|U8$)Mo? zDW|~&W+l~EaCoOg`5nWoyQ8CTV`Yk*8&E>5Ux}oHAni(ijfzbhEeF;@PN{en_;jgD zgY5uO>8LR*z9zH6C-Dc9b9Gc`zn>@jYP08Xd9$0-@sf`^tT7)n0h=z)eI*8d&N)Sr zyhL}U{Z{eEt~(wH_L8Ocv96=qwqC$vRLQ#UXoHKl>gr->w&uQb@2mJo{<_d>BB$-n zq+L?OUZD(09h}7PG(INX^bjlt*_7JWe29t3Q~`~;eu+NY<*h^V8!EA>-(KzHK^_ho ziliZCjk5}<{ipR05Q!A@70cTCh>!;@HLBbdYhP)XEYp(+xG)YK_M$lxA3IgM0m`^z z|4Or5R;Kq1vh3EiI1e|!V0YiDmAU({qNh(U5#8udgftI1HLop=RH4t1M{E7#hIFy$ ziYpplf<4Npq~c|!|1La^`z=WceQ@KGkj^7*@F|9m{>Od9p~7$}%9e?_|HG=-hA^#`{zusAod6ufA{BX9i@Z81Q)cz>JM zuw$6|L_PnENE3Uo%wc?l*tQTs27X)y#emg+@R2LOpyl_4gVk@G6c{_EUO9dO+BJWA zlW?M6{e1JzZ=cPW@1kutt(6VFC)}kzlIqeBq`hchl>01k=c#+foGP)O*?Dx#mZ(N3UUW2ACR94RH7?GUx|jCW!T7 z^04}>Gu~?}fI8E)+yyu4`i?^V(Vl_nn|%w6$kJ)!R=cX|th>J4?51@4!GNB5M`t^&F|Za-?Y=Boyg@Wx8I!C;2+w1?WLl5&FZkUr3%!4h0xz2+3l1Ew zSUJU20ZJid&f-)#w|B-ssD{G;zj6&O4~Fp0=Tp;W-!v2g6n(x$zLPF**?&ILd+zu6 zZ@k%pruJ89af_qc0Nu|%FXw%h?K?hFSs083MZTH}EyKow8EYJahKeRmJFCL~DF?P(jb0Q*gcDdNl36UJT*I>tb9XVCb z88WuU04j(pttahvh2UwBxJgQogkS;fWrk%dc&@_gr*1Z|_yo63Q)FF?>xDm~ZG9Ee zD7mN63BERTunj=qI+xnLiy$ldi+@}Of9r(Iudf9{C&R(LyH1|$QW9via&gcR$(PrE z`f`m(y4aI<0p*c!2ksU;AaBV8)1C&h*dolq<{nzPGV z0f!moScZ?`1o3_>O7MA21S{rl@*w#*a(4MgW5m!359Ha+)(>4%r|FP%h z@}ETsV-1LBDHqf=lx*J95+`QO$Q*hLdanv93nR7TUKxLZMb3AQc5H0ew){oimV~7= zGOvI}5uI6O!)gyMNuA2!Vl_zUw=N)?$i>YR8G1}loGAl@zICKBlvBW1JAI{0tEVkt`OlWS1a$4bD;$3D!gL z&5H-5-))$CM=5)p2!?b%7M~E8u8hE&JpG+hzWAr9;b9dj!dL|i$BkOM27PWeM!h>E zdzbo#p0%hM5jBr#Tdpn~oFDpm1<%uPKXdP(w@db6%lv(@FF9~ z?L5TBeCoW}Uq54h1%;HLFJAeRHvI77w3H{iHYZ_O_&y<5{9r)9DUMqTCvf%|Oks5w zNtHCx&BQUU5P}?Rl$j%$1aigDh)Q~D7FS77uuHz2X_aL6amfOSh?@YKf^}>7Cn>ko z@W`cI*=@lA#kX-ENfJwLLL1c?B;cFT5^I9D;(eSzl=jwqw($m7-gQY$SA;{_;K7?a zjWk*#knW@7G5Eyk9#f`3{7|`_-FlLUs@6^5r?@+cMm>Fbdwk2vZfKqIR2&pduM<3- z@3HBoGwul01`aiI6D=R8fS zSoG&LU9~1KzMM#v8h!!T9gs?e6V>F%62}q46eU>!t$rY{F z-Q;Tb{j;#rectzWyv`IhENODz&l;vSu-A;7;d+VwvnDZVwb&UAg?_6L858)7UCdq) z7%R0zjJA|1w~|?XCneo)J=HJ0>(XvjbKaTf!jR4Hl(A`$MM>;wU0Tc8em#mbeZmZv~H>;9sPK+!1_2vU?0} zuH(T86Ns!Hvh@q_S%O6g8z-b~Iv~gl(dyZi(-rIkT|8Nw$ke%TebiX$*2(X|79&TS z!(P=lGbS5jSJYN$PVj}s{!-Tln--Ca6%62h@Y(H!Ti2({8%>8JMa4e#3e5u)|B0%3 z<|bkVTK1%~n?FuOs&~k&?U@L=y{@-6%I;)3{bk|H6^-p?|Jr$ViYfg|?Qx5!Iib;P zxVkj@44t*_cxlfcK;6P*0i)RDm;c=sS@}3RtI)1-#*qF_UXrkJUV30iGO{Oq%_2eX zI=9LW&#B3~AJma_?JFxh6}GY;X6xQRzaJM%mKi2X?ix*-S0EW8q&1`unrG>5RkCn; z(q@ncX_cT9LG6N_wQYwcN+#%QL(|`1WP0&bpE3=SNB)z{8pbge4s;dx1!VCBmUdmXZCa@Jz`9t$dms@5NVaD*Br8xaX;zstb=bUC|io2LWePe;6Y z8N(tHoJr{SbIyDiVSkP?FRqn9A7meb**mJ~?6tON5FZ6R%ZM}nsrM&EI2b$an5}Az z0&pH=uW=P^tNC`YlUPs+VC$bQZ^nQrNU2b`p$bS`6#uETV4unqFl z$Zl*Xw$|`;Iq;;xdNV?al)i&$H-wvqtTC12Y zlHq%&_DWM^;Vb1ehP}hqo$15UvG5tdZ(+SjC5;f-+d|R>c5GLP6N*QGlBOoT(mEnP zgiz*tV0~U6g9>SLD>d&|2+PTIjGnE*^?O{9MdZVy5<5v3QOUp>cy-toL??HA)H#JT zn#)OFg%`DZnXneZdzn`q<<8(bAcB$7_Y;pGMgnFWf7lSaXV@|MQ9|{bkOBIf+^A+t+RQ5hWcvkD=7SI3?$UW8KO=t)gJT z#N)o+J}xl>+YaD#R?PZ6#hhb@`o8BYf-PP_9raD)nqd9nMU8sZPqr(%7}r-U=iZBU zNL<_6&Skq!EDoYK@hljreZ%>-v50!U#pisp7%BWO?Fi4&YVBIb6-&AQ^9jz0`>tw-T0N$ zrD^13_XRP?hJ+dQ71=PMEp*4Y=QJzhAxu+cA(YY;qI&$D`n4Ix9XgcGr;N;o0n=B9 zYvwEn03neYzw1pZ*Lg(}DIj_wdRsI-)>RSizzDd#?y#>`=dhZSkazM3 zbgkb`uz4Y1cwR}Aqh}@g*m16~e#=^Qm;+5tOXNr*S9MbKSFf1Pz0B!#^7TG%iW*<{ zH(5Xndt|?F^70++@A5znzo*S@EgOILlXM;3*bf`iJ3aXgV!a}nJ9Oieu9QR$im_#x zFQX-`ZviK=Q5F$MGt`L!t)~OUk0C?O?+?483Fov_RY!G@%lqg0Q;rpHVGnv0U&mb; zX;~}Ro&rj)-X5IK%V1!~Y+LqrJ9W~Wch!Zii`#eZp0_FK!sw3$;5z8yB}K zCHubE_hnI-a{tp|F?3FiK8JZVrpncO*;V|RkXQuTCGq-W$49bBu2NW?N4RB$p$$#b zO^Qcl|EY^*1b-!?d2CPN^X}rCI<`WAqIKBT0H?GbkZ+(QbkU;e`3n(5dxeLj_>z#M zID4T>lF$u+Mqo6v6?n z=x00f!B@F#RfMay!@UCV1LxyWM=4L7 zXVE6A6V3e`xt~i}vJX^g*5<-}&E0SSXOADnjiGrVwSBNNzG=+ioEMYXr zh>SI2P_Deb$UzoR^M;_4E+oJ?ZUvv)d-mmeV%J;nF^LcP9>lt^-?pZ+IF9$IGYC<5 z$23*Eq&9AUzi!74RkE|;qu5@1 z>qr|#s7KP1W*p1ZjP0Q)*jjq$CI7TY7Rk@DF3lBzcu}b3I=~sd>PFaoM!YvH$u`{! zr))4SJRaLRnqRxqjb~oYl=nFJRc4GIw7=>|44X*3qk~dl+&z!H!Ws{NuT-b4a4z z9r)RiJ-Tx{9^q}iK)QK>BKvW}hnS;>dO}u$Fj|eqTeL7wo^^>t=s-e%4W3SQhkE5d zFgNl)4ez-n{~FsfBKgYhHIiDGuc7GEZ=~AFPimWuC|#>MLymNA-u~yn@lhAl@B6(ikz)HdQf2@)D=$0@?!?c6+1#nsdBhrS;_aQn9)Cgd1Nwtepd{`i zibk<7&P~S-tcZ#RPt?g*xx>nKlbzbjp_cgNo&CS9hxg@dl(fHZr^J9<^o`~`QEySA zh=imlL0%&CD}EAp+Y2Ig6a|T;?ucIyq@PGVsSz(%j-BB_TJX-&zR`G6lgJSPoRFmh ziHZFt(}-Uit$bXL;?byT6Ezk&c9)i2bZcoY%{4~skqU1gALzzJ7+|6XG)Mde7kndiLtK8R*=PG?jon2EdnN9Llh~uY~Oh?a2KCoFUppB6J!-H>_ zCkgh!_6g?7dtz^>erW+N3W0UYtQJ6POyX`QJMbD%4b(4U*`2E1e_$2>rB2J?rVu27 zThE<33gd*@kloHDM8DaU)tW|2S`BoV+g$cfL&Va*WCxcsN?Jnvk<#KXPyo4Ll%3gI3-D#9~2Y!xoAyt?C(zKO~_!TzCn! zSc*#p$#2(WjP3Rme1qo|Wf>qgDF{o6bbMLK-y?FJ?n%=nQLC4J-;sNf2%(eIFmznIuBdnKuDTTF4BPBe&!z1R9Y zkXKq(A3+?(GN)cvIE^B!D~!<^%6|8`YK@ZeOa9oguA|!%e3y)r7p)!?7oXDTep24y zD>(Qk8kqj&ooi!KI`E5)66}x+Uf(Ymga0v>g6knk)VpP3<#7QhyT1iL;SD@U#91DX zurln`*fZu&8fIeAG)SW}MP1GmN5JCKK~v(LV#O8CoT-!C${+BCeJHi%bS9>AFKaD* zJbts?r4xi-85(J{Pr;#$!~l6Hjau>YD(l3c8)5aM>}Yr6j6V+cBCEn+oldc{!`$+{ z6%FJQ?AwX!6FHIa^oIvea--%h|MGk{MsEC1-f+K!%CF7Lzm6OKk6FS0GvWC99f*XT zsqg>uBp#9Y;_d(BT>oz|kpJgr{eL)(|G#UV0{>qjlK&s8@thM03;Io-OFf$@lC1In zYF7GXOV@)4?im7+#*}lj4Q4uu>~Qg?l!r~i#QV$-Ez%b;e|&bo9?$SEJbCk!58+^-m2rSiyG|qL!8M&lVbWs|Uk70kD z?Tilm*A=$|_V-3a^tS!+q3FMO%zMVL4Ip9lNEX7gMCI9Udw+5J7YpCo1ni zyDh`_7nyHbVu{&$&`sD#23Wn=Cr#ZFjIt!t^E`Sw(;EbjkC@~$Xn8}>v5_a^+SIJI zZu3uD?0VkY1HX<304d%Uceg@q|Aj+%ZPvrmZ3fDg%l3msn7c(K7h&gu+`YyQXw-DR z(Rem4vwSKeI***s{3KehROq7}9#>pEakaf*>N47Q^(wods6ip~W}0-S2vT_`&2Rbs zlh!RQnXZZyB*0^UfLOc%!@33y5ZI6M=MoyKb_t(jQE6ZBMbljg@iY&S`6R?F^#|%v z?&0NY1c)9myHgtDHJ8PtZM|q?x=nV)y+cERghKlFfz-|PdY`Y#I&@p8`bYHTGxL$8 zq2rY%^(2I?3irK+pX*fDA4T1c1A8RUiW#%f#ZWkr5xiR zS#cx#PUKSI{rfPGyIJ~17X5!2!tb1jaiScCAmmnOH9s?IG6z+_Gqi$v`sdk{g-6GB~|XN_uP zZg}H^=sL>tl!dLfXF{$;vrN@XWE%3I#2@rzSUe6G-Q@fxtu~7wk-8Z^d7f~ERKr|* zQ;%U?F|wai*zFff(?R7lVEmuY1&P2u(R=5PkN01LX=E;yDI>Qo!tyTAdXh)7q0nv* zDCsk#`Yg%*UEmPL=$3O-lG_^vjZ-i1qf;rC+SNN5RLrN<_K@! z8`4bd-?5NCm_;`MAJXd_j#~;V1axeIUP{z1NFf)y_5i-mhQfbPF3Wu1J+Ij?#hRDJ zD>Y=S>)3_op{x(mzqZ5ue?CQSiXDLKV!@BX^^kCHD^-1Xw+XlgpTMiFTDkrPnPYrEZ(zNRIjW?X|f>%vfs36 zZkAj`?jU@t67P#68TYhCbL#;@IhP)A>A~=5$Vi6b_{rRcyWjdvHVp_YOO~_*F|K*v zk5F|J4Zl&AYtr3E{^4yq^Ii3}49R@9v6qWMN6S%-wfKcq;(j71poH3Gb7Vs|qe4Mk z>pq_8G^2tjScBREocUCv)x%^ z^NQ-WQe2T+*(c*31~>4wDp!5u`DaggxX0TdzquF3ENbEp)5jdL>28vsqeopLM855` zq7}4w;&&ZNQJT8xTNr7q#yVExLG5ajBQ`(;VuO~h^R^Ut=fAZgYq5OuHO1Bqw;}ty z`pA?I1H-?aJ>19iLPe@ zMdz1omOC;cAk@)X5KZ~s@%QT?)~n{@AF*0cQuPlfew_Qlw*&C?HZfmIR`}pmFdIl6 z+wr#dpSl#mop#CHPHm?VsQp$FR_VZicD7-HA#~znFMN(&x_sC2r!w zO+i)a5uA<7GOy?r4%z$Ibq`yge&3ppx>pevDpB|;HS7(Bn0%Y&$P;q@aQmP5?sp)U zRo?qT5_Ks9zW&A1{UHe*x%oggzTy4R?y^4|3SVfaw7|t6Y715m{loNaRZF$6-Czw#KU`!(r)tdC22bt=U9>zaz+2Abf#7}NEbbpmUKmO&#jL*8Uj6QsKW;%}Uohl^-?mV`` zu9t9RqU~qxr{D6LMxaimE)|) zwa1wNj~=5)6sELA^sis$SOq#z4dkONs&0I#bDZEx1tw0mgpP0AyxeWcdqtpmNNmo% z%M3Q^<1Y&>nJ_M6-w45o6($k#I6ciY*}R3Jl)E1Kayf5t$gi&U-k&yhOFXT$@032= zJ9D03RrJ7HUmQk$r#Hx+eREmM-;3QXxqy*f9!Qiblt9*{oxEma^X4#q?hSQFg)R^g zi_=dz9(a#c=rE3!(UM|l$)u<(d?(HdV2UG+=byX!dZ+sx7;SjEKvO-qrpSMAqfz|P zL2lB!ZPghj<<|PPmDurZn9Lv?Q>*R#3_>-~kjWdW&BY10`L!h5=)}Em$GY0wg&C@29uE=`f__#=m;!k_@ zx1!SB{F?UlbY4d|a(gY(}Jq7v2ydA54VuRAvBzO4Z1+CiNhfl_{D_y7BlR3^m_RLm8z4GH~@tsk0LiW#s z>oMq^0QLi?eH=rjhx@B`ye*A}KDO(?WakAl{~@%~Ow_v2G9S5kj4q3#bi^Bl>BTNl zh)Ax{mM%ogt#iyO25TZWbM?10UXYCo_b$YZrj=qC$Jg6me;KLoaXj7(*8&rY{6=R_vtvNL!`)+1#u89Z~-DC|_w9|yq z+X1n_#G~`1R2w}I25(np>@6E4tyA0^0$7Z8U*1ael;>>A!Zf;zDx4U=rpMY35jC0) zLOX2!xx|Oklzfs2q$n`#uLsxK$vOH__XltDNoFe0=wEKEQKSd!2>sGcEb^YI>VKr{h=Nf(cDg#*2ZWTYn+!(_gL6lzMIVZnjh_ zipsq%?z?a9DDfU|o{pR_251w%-FcZ2I3BMt0{E)!G9M!jjurVOV&L00MiKmRLJKXE)PiFo&tXls=N>(;0>k0D!R}0~T7gl>2heQ0Xu6%9YUkLHn1fJ%Ogm+vbtNt1Yxk6zxg z(m_|6oH{VgWNginMWN(SD=JkqR`V|<-Ng8{?@udBlfZRwi{L4;v{y>h7Q4=3$uS6+!QXsZZtaSFkRf2u%Vo`c z;4Bi;H*R)0SuS&mv)vrX-(V^^T$hq8h|-ggThbpT6ADq}r__c)6~C49rWvs*?|B~niZ$LV2h)H87S@<+?a4>C zwaq~o!7lxuii$Ggg-XVwRu&#hg&~n{#mfl}#bI|fb5YEDTnD!g6qeelJXYO&>Jxer z@U-WB6_uNRAOX|pKEb~8GBjl4zQ~}v-G9xOOi?ZLccw?g7&GC_E8if;*Y{8iOF4}= z0L_+L_pH4`D#5KccH`h0k`*zoy;x;Z(~IT=EW5nzPWPbx!*ui2r&Q{8YA59#y#;=N zLj}z${8cf(xE+TMJDq=+5@NKbZk~^hzBy@sv)z8f`HEI?;VmH(W{B<`Wso@$&D<`a zH~C3_T0%5-xmqR*2_Jf>4~^2igVS00Cv869hp`aTa28?iR|1C_H$}t7r%b+#w@ICI zL?p`4GWN~y`UYkjU)a@fUOoL?X>6%N&kuCrv`l#7v|p7 zH?sU43L0_sTs&T6n~W=Q_c(<1#oKzkzessrD8XXAd{b)Z*|}B5_zLh@=X^7{PG>9I zauK_ZI)^u{vL0|Yb9YfW^bOPEp+Xq7M(kz_+l^JY^D}h$U9ov&!~OFLjd!LF_UgAl z#xX*!C1r0>RB;3 zImZa}(ygd|WPYtu>#VHSs89LDR_KklfiCI(kE|{l^fAKQ9zko;4&?M&kSGO2x@Xc) zy{HxxMY{)tP8;y#z1joM8PAJcP94Ybc0bJ=C)WBrXzmWmqAzH>P>Q{*q#B1m{2fS% z-IVyV%r12p)SS~QFk`?q@_ZnN0ymBt!FQza%FDW1z95$xt>`i-CEQbU13Vp@O=S zxQAOZ`w}cR-j%ZGi97m#F_U`XO)z2`zS1*$r`UCu2 zzb~!S&rAyX!dU<)yWndwvy~crYkhAJX}tjuHzzb$)bKFW%B)>po_+n&#O#2`^9Fo* zzHy*|$pv^pmWo?*fq-wfU2&%`Kdgcmr-Hj(=Cgk|?LCHtu3P}qE1fy*$>_Ee%gJ=y zg=0t)*{AVws5gHqYQAnB_8w9+1fJd-4AW7+Sr#Rk6MmTT$^O0cTvfott`oc=L6}3n&V6YwOCPgq|Nq()wh(wXQh#c%xji=VqGA;n<3Kf z%hb|MiDT9^ceTU^GpjJC0&ze*HmTvXvrR4)3f*u}=zs09HH(UNVcwNL(d&@#Va4xK zP$*Y|7ATniz1J3|G28yNUbFX>BiX7vi&WvGonfx)pGDV){?S_> zmd8WEDIpesXqvQH;S)~m1cR_WX)FxX7gyEsXgFHBWXy;&0KCRCjVE+VTfa+82aLOQ zaOc}RosAl;Lh|_2ZrUXqEFh|Vz7tX7-0x_GBG2#WSoq+M@n++lfs!tCS`5zHn zYH{t74@BI{yc(|`;dIzykBf|q%h#||Ea;iffW|nDeG*;j9^2Kmz$G(bWdbvZjOcpc zZmP8t|5t?AY!nDr@(7boUbr0}*wBYIz&wp4-H2fNNIM1fCvW(DYt4c)(EYFS`p)9l zSs3=kjVIffktiLEDzUCAw`8D6575ix?Z|qzt!Q&6c5OxupYSBMsY)BdUcy;5b&tj? z>qv8V{onCo& zNo~nE)K-NR5q+h1WW>vu?Vqh2A&IyvO*(Me`N59Gnz0QN_gNuPqT;~P$322Vo?fYw zKI_i5)fbvt?28Kb95zTb%RA=3I3E&MwzIa{eO=ZqmKjIsAl}w^c}yLAN8IcAm5lKs zekGB(6s$CValtKxCd;`2lb%O`oi6ZsjB1Ti<+>SLf_5En9kJE&k+A=A)_*X~JB>*` zWvH}M{5PYmJs;af(ic)nQ`XmD>29N+uhz4%DbTK56Is08!t&v9r#}&T(qd9~OPiJ~ z>#*m=(hPjLYmO1%>W&3Il=Qa=pM=NjEg?SG(x!~t3k!zYFCx!nX1AQxyIkh68MC!^ z_CQNt;y!nhLY$wF2cy;HfNf9-!pekzzWD>+U7?|p=;o|AR3M;z@5nIE7h_Bo2$xS| zb0SF_wb{Q`Ezd+wNt#V;hKwIMr5@Yas7YdWl%pj!m@^%+MI&8qAIqyEI5w-4S3EEI9A7_Kn{8-HHe z;cHLloZ9Mev7Hrk#zJh16~2*;0FDV{qjUDfFI^hNZa)NGFG(sd=h^v= zHyGA@t*b4yCHNPC3GjE?<68|wWskC=#{KUI9jbdcXTNbfDe#N4>8gmLoib|R%Lltx zf2WvjOVIuj%kS=UX)Y&t8KP6^fW&W28g-=voA-;IEal53=?`p++@b>Uv!WiO(++5Q zH8SB@(6RB2!pCgtl23B0&iL8Mb|MEuxiv}nzu0^4sHV2BZPa5!QA9;RKtM%BK|lqh zgNTaK6+$(jBE3lsBm^u6K@dx`%EIrz6Znccrq z<29B}EQA1Aw3W+wRnt2 z>-`Bng+_+!Z{)%0hXpFDaU{Xi=QlX=Ctm@k0vFE;Nae%`YnuGf@v#9j-=(fRvetk# zD-2TNHn48{=?m>10qbAa($!f((34Gi>fvuTK$O<@@Y<#DZr*!vC!H8HWvx!gqA0z# z4zulZE8Zb)s|1eDm9e_brV?JDHe?1hEo(I`%+_j?M{7;Y65ZWZu<=cnLQUFpmz4vr zv%Xd@&><^kk?vF@T^Z%xAI7@?Up*?Sl&rtm%-yDojoa{jQSbUXpJgd`=t;o=v+Hev z#nlR)#TGs<3T$7?DwS$R_?Wy-9ym(CbV+I=;$MwENXX?no5YHY0@A{@C~q&I3DYno zBatah&fS3jtoCe;G{{Q~Z4xa6RH{=0j(UV+O{tH`s{lk6uBbWv0`#uqJWqTi^$maD zK>64|ubl4O?ztn0hvtfA_4zFrQXXwqi$aC(Hiu{K#fWNM6F<=Y$%uWm>*#8Y$+*7i zos5-Y{quewzRD=$;G8nb|*r&QYULhUa5(vSX2JfFCCgx}>4I>diR z(W9`7-W+I!8qBGD_m@}gU-s4ljqPVT%J?|{T#E#W7xpo#%z1qte!MsLRsuQo{f3I-Oe86CZAp_C}ig-o84- zBO*||Z15LCbhBS4khm@v&`W^)+>?^H+N;e|Wybh}pG@y#({V2a0aG9J1SJ=|u=)Z8 z=Yl5+rWt7Z#-BWa5L^zn9poUhP+o9|*}2Ya!|0O;1FJ^IGc60v`d}T1$FX*IAsGQN z_UXS&wE}Eb(4{sl!k2DQZ!Vxy3%zh2N`4#rwEAXAexi+l!BfHxCH~}Dep7#Lx@4C^ z9`SfiyTKI1!aZzILvg9rz`cL{Mk~W~bKYG&KoPHR%7I!3NWlfn($bYr^3SzpzeiQ027Qn?!as9F!RV#g=PDnlK zA3Mqq0O#fepCz@x>KJ}0-2CHjtg^!_wne!7k+gOGkz|L`N-#eAk8sJnS>lHRg>c&v z<0ftLWsQ}-`4(zG`d+>la&rIr;U`GeX=p)b|3LD1uJ8*}y;5+LJca`fRQps`tf6Ja zP2egRDzoxtS4d)p)gvSA{!wKgpClt&t5p?>rnY#4pQ#Zi(Fd)6yRS{{zM|sMY(y(@ zz@(`Gl^c6ttBEK9TU5dybjbZY>V665kMG>O0-B9qHB;ZI%Wr*S-eg~Y=W_!daz~{1 zU51`VF8!s&=@YaKZp#O$Sn_0cx!v7a)?hSY-0ID8{ zV=Ll+)1A3|-<)I5SFoZGCr~x*F(OqtC=aDNsEMa}W-vzG{-Awt8{FhZyhekiEcD_U zqRV~}m#n<%Ocz0o!eqnFLN}hT6AEM<07&=o7vpMU#VH3rTO)B$^2`3TL~8?>1K|UGd|qt?`{T z@4sG)r1j`>xx2qV!`OtlA$eIVaw=P_qjLg&S`)#mX$Gg(oL?MjvGQ!QLJETtAuqTL z;a-Mp;=Mk!Z)=KsVpv0I`Be2vwzvC2cj`|@OBw5Oj5x&K6G(tAT49s1lCoR3e=vKY zS(n)+N_yG25P4fqKP_)BHD#z4nMhtQ##(vIqZP3P_~BYyyLC&=p4Q62?lB*O@S+bT z*&-~L=(w>)}b01!=aHEW~@!)}En8T7sZmXNa@9n`IJLbbW?!*O@obxF> zxBofOhGFLv5@wR={bRMFl+>&vcGYZv<}majfR(M{S@^#>+oelalR8n2V~>!kRwk8~~C?5c!MR?x!o zXptsvP19jIaHIJKjP1<;m(=#cO^8p3Oj!px)s>o6*%yhAFD{C%s9M2xK4h!(5Z{6t z9|eDyEfsdK95}GVY?lB=5!h%UYB`KuTL=efj=9+1)Ox5J+P`>QY`}-vdyZn!_i8lj7Hr%x z7b$-=f8Q+6)ApdzVt(9ulExJNWC%;T;hgDkd0tK6RgdrYT%0Csw;LJ(n1lmoLQw!| zi8}qqWRTX+-J>k`d&G81gS+izEV06=q&3tEU^BTitN%Vw$)^9!$3f3MI@AT7s9@-8CLs=q)B~2_Njp+-Gq0wgU>F zXaRsbHq`TA_|bUWvx#G*L|gL*Ziu_ z78SR?Yn*ls5@}ib`n#}FdUd=%uBebn^-TK|80#7HD&|}4M-tDo$JfigbyG}k-7J$5 zn%MG1I}8l=^UP^fm73(U%*-e>c8>Fj<6d@;;1(&WodWwU(@~SL!gp(5ZvLpz^&M?8 zS{(g2pH*XA>)tx<33s)yW*Z+lZc`2p%3-mI=_5WJd85m;lGOaNOa86 zj!5(_edf;Krn61zdskwB+xHENHUYj#hZM1eO6nVFA=`pV`Or7(e|k%5u3AE^r8!#L z?&28Y=)wM7fayRFk~1VYf1u*bQc8tXZ{t~WX?Ons{Gd1)`)wgvLO_-9EKuoFw=-~o z`^f&0=Gr*={Mn}FjtY(vFJu^l3S@^u=o)V6*5v}7eiq%Xv+M)!i85I51Kr^Oob9s9EOC*1Ik2qtK29EQ7& zOAeg z?W+C_X8AYrAOG#Dt^ZhAyB2)$_ujdVI|6|9aN;pOAWh;HC~q?_A{0*M-$W~?>ysJ?lG?z6iw5* zFwb&~W@+(C@UII_v%i>3hw?}cTm%XIe#o3`-$}G?9FH=zqmINyQvq*9i+TpAp+5sX z#6ZnvasiYC9&ApQMry?us@2lP85>jV+)ou#Nh6)F1nw!R z$VnXS3yi-U$7h)@RHZ$S^n}&Mp~5H9)pd>=#Tgk%QYBPMi`B3Y*D=9KEdwy(dhD!K zFM1WhAFJgF91hfAt++E|vyt%qi zFcg0|t+y;bKa^DFP=9X}6Q3j>w@S0G4~;nFj=RTq@gi#8Nm+c}{=H&ZyJ}=G>Rk z6S->2C%S2E-^#2`(9jLRD(yr{^!+v-KIKuH42*3(9o~2OrfmPh=({_!5<)XtyeO-0 zammsV5OF(6wIur?pd&hr@{p2^)2g^bz?d8#{W-nC2o9l;m`$sWolmMcy|CKw>C4at zg{T-$jWaZfH;t}&ds}zTJ}+zfO3PS*Sog9LhZstU5VN=Ks(RXyKc?<*{KF_kggAQ% zmah>xAR8LguVcR{!^sYH_C(VlTGRLCOj(fMY>>al6eOA^TvemJ7(B;HG_WWXkrvV} zC_2nKD^C2rUv! z$~P7zr%sog_AjaS{5V)u4(qh_`dZuO+dBPG&Cq;R$Mi*mud=4$3wifru&aB@Oc#*Z zSz$-=p!@=cn<6q3_JZdlAn++{H&>w%QC;qfIO~b;P)m`hr)D+&YNW&Cds-QbIB`B# zsnIG+X-;VKUiV_b=+GOFHW-<@asE{Qag6^``XlC6q3FDKhlxkBMb%usHY>dJnfDHHV(Xb3 z4;hBf_TcXk%5xB|C5I8wni$tY5%|8Cmk-chItb-I_XK)Bt@o=RT>JV`y?y_XmD@VQ z{`1VoWL)dR+xF8%ie=~cCcmglXM*yov?D0dFPS&Z>mb(0U(o>M>s0zCUn1g@>S<3H z+vA1L61+0Jd;&wgwS<;C(iDQ3Ku(k!-KFXB(yY{pwFy++rt3P9r=~89&o)Gi+9+dK zh+Ek`f4DB}UXIYPZ{YUnD0S@85|JNt1-tlr4dTLb-hMpUr~vkCAnD8X%rjaqr;Y_- zjxB_zkL91*6f8N-H~o_A>!p72xLuKxEPN+AE2YXUGhe6_@x8y_`u5D8?8d8yHai9; z&2lLQ=C9IA3K_-6sDZC!XFT>nF$Qa_q?(hnoZ9BZ77Kl*yD{XoO9xr^`Q}r`gza1 z@X*xcI!8ge#9HohnBW>zb`^41UN!xC_-ezdykljR($LGiVC~ME>)+3e^FN_Htt`iKxSOj}XZ9uzW%V&!|8&^SZ3=J1r~eT3960*9Lw`9`vggh z?uCdG6*ahP(4rUk9o&N(>n$MB>rplge$Y;uz3&3#-fVcVYvW%X2<8Q2`qvu;gZZ<) z3?K0mt0SEnA559(4F~hGM?A9Zg_ZppbBJ|bz!NQtHu4=Vts3E;Y;Ib>=ZCo{QTey4 z1e&Q);t8gDW8rByBBLT18}RD|xvv+PVPwm)Q4W@ZE^qLK`~aNy5L=8AFwlT@FDO6G zV`{=S)`pxPxe5?h z#JNG#A)sN2DP2&Z4Wr0Juh1Ua((dvyWLARrP>I)TwV~-r>T-IgY17MmQ2rMO?Pl#6 z6%9N+hwcZZ--=U}U4L=@KyMNH(bO9a%V)T^u{#L}C5+Dx#^)jZRuPCQO{#y-YzsK7 z;7ow#T-fT8lIvKji2i-MXuG#4Y20!DUt1&pzXEM-r%V24s?Yt8 z$B6%btDpWmN`L>(SjU%hi9Gi~$b%zU${FY);f&NW{ziu!I>skXzN{+Joi=91%}eA^ z!!-<8fEalKK2&2#6sYZM9Nb)P=uhj;E?^VLwiU1s6B<&dJciq{J9cJg9 z5T~K|(=l~a0~n5T)0p`v&_2q$hEqnDO?K@cU=+I4mM90bf9q#f()?X0Os#Houn%zh0b{P;8H_LpyTYV4xBx76zZ-Mnx!#dRBK#sERI*xZ)et} zPsjV3VjRQSjB;ru=OG!kC)`bDAq#p)&E5(a9&h4qeHjT%gWIZyBv8Xco+ZK8wU|=R z76wSxf#~AMZgqE4fM3n)Z2c`|$fuX=qZ(cNPP=f=C5Ss`r11R8z^K!YDz);%K=o_F z;|b?)s0d=`U$!Z)&!GaK2%-JgSLjSvlCt_#ic6XV#U#qRqLy5%+^MvXp;Gc!4Ko2r zntW8dZ5cWl`KPSLa|PZAVroeN_u6!vJ=Xh zf52wH2+YzNszrRrxAOttX);1tZ`4^BUPTS)j_9~guB}twjN~$?jaCX}`(A&m^;oa_ z0rotFp4bY?|6GnF(HfRx&0i79fEq3-m(agDZgc4qVpoyx^X;L^LDB;izR?mHzZ@^# zKzy1lM=mb9C>D`n*JwlB5syf|Iy6pbm+c|9yx8^8=0VFgft6E zLfm~o6(@bC;MVccTXc*aZsmv3T}CB z(%^8j)jB$)a>EO!C8omefkn4}w`Yjsa}Q?}Ca=w-w1_QJU>h9RWh#JQ zBNGE{pHp`KjnsDR@SlZ&+&4GY-MD=kb0IW@bYDJP_I>x9dPOR@Rbsb{S2{xh7>al8 zirUkzQFVc$7$fNiD}#N=>+(o;YyDafIPkQO`NjAHLv1}+{3s-ebW|&Zeu>RUC2fwW z$YH~I@Ws-YH(D_k`WZuaSatsHRStxzutATUd~&2*{I`WvdY3GN^i0GqrzH9=w<@LN zl?GhSf910mf|6=HA zR=JmID4=hMYXn-cOlB!Q$%LE&=grBI?*u(Bd0L*cz5gQA1jpw>6+;!KIS7+}cx1Cc z^Wap8^hy&oDw5Y2MV^tz;_IXGStUi7fsO{3!R0sYAsPmxM; z)Mi~2icN9LIXH0FYujTQZuz2VzWKJOZ9{q5Z~pT6N|gFS0Il6F;8xBt36zbS0?cph z2^)=8IB3P)zIKV)B$SzuA^`4#Hk{zx5;AHz)}W1Y+F_rh1^lL>_>75g2ATY1c0nEU zlmZX3P~ck-F9B|YhlK13!Q52t9yFKAN6KS$<3#ia&AVh8uPhos3WJ*U0&dYZ!OmX< zM1%X&J0x{+L;H70;&R&*Vu(7G(e9&-IPF55K7A-2kn?)kdY_I_PMMZ24;DU2_+U_6 z0}_B89dY@h-nWBFZ1ys+~$j`aoxqG>Iawbc*hV4>H3jyH3PUtZX#*)pRY=fgy} zB+1Ueg6-+Kc5S5OADo(b@ZVzA|9M)${|K%?wR&kI>y#1ECXI4#3t-CYzoYD4D1WeUYLYQS;pGT+wC7M?r@%OV&frEl-g64;R zY|tagTdjEtOxDrt`TpIaC0Y=MnN~JZ7&?hR@J&v&7Pq=EMrXg|1%sTF5o7L=mvpojt&Tc@Oq(ju#odQW?Nl3@36PweNh%X>>l@ABtX5NlasS^@%4yDh-LZ9 z8cw-F**)J+-!P5D%c5L@XxrV-bK%M~TfU0@@`JEt-q+?vFBDx&Bt^8Lx90X~|IHA^ z?H~NtG_bc4^#SqIKYyOr(6Eblbm!{0imI~WM1%ZlP*;1Ng0A3;`Uj>G4#Mtr!F-Kb zm#iFdNW~*6X&nRuDkiKZ*u}ti-Lq0TW|OtpXj1<+kMR8MoX6=_kr6#crH~d07^> zQP_j0kyo^u{HrJkoVR*6Y(KXe>TcWH+jma*Co5aU1&Sc{5?oGqyR!?3^bWF`J=?ba zb@VRU-@$C=ZkuzORanzv+v;K&tFqGX-;Dr;W?|$85IcGWDT3sD|N4V*LE4%0@*MR| z(D^o4hk~72w*k%P=!nMnt1I)Jcaepi_2e*)rx!@bS~a+=d;W^W0Brqmsx#@?GKBQK zOvQa3M6I~;4RwyjEz9mvAz0L0&`#Bc6|xYRdpHi{BkIgEm~Z$pRAMPq**!`k!|N0h zmDz9rjd2T=sHk;cNS;z6Et-z+iRiPe#c10j)*W_<%|$fExuwIspCn1hdwx)3U53s5 znMEj<#O~>gqaTq)f7lGJ)@iI?u9(C3 z$xG%kRBw&8ovRq4G`LvRBb31u6_@H*^-Fp@x1@8z#&UJb7aDN3=(Q5~ckF8fvh$oz ze_Yc-K{DN7(HQKp0IXm!~mEiuVOakqzOAAWS^5RvUfG#v-Y z7?4Flm62+9K#kh=QxdAi^ecHp%*K5p$|L$_i6uEx(DvhWw{XXOt*Zeq)eo*jV(9^^ zPbJ)$O~LGf+ikyNA(YgC?w$?GH|EB`&F;>8P}?Oa?odBvhHQ$J3YVMT%q(6lwCjOl zhlbA1C59C z>Zie%`9aBfIbb{d(xf%|>f%GX>Sv(kxtqIsaQ2eYzoL2>cwUpNiI;I$(7|c| zwwgl#)6YG$)0wiiI?h^3( z4B$uP3F*yHLPNFwD^FIm578PMHjIq=4v(1umr`KZI!yKZ_OOk3##mh>%=du)0X^7X}P+g8NOFp-E zbrPS|jl^KC=*6v~Z_f?~xzL9Iao>mbg5Fzrt&sz@dKpIpb!2I2(DW{kR~68%om%0P z#B#bzhJW5(ZPYbP!nFhM_%a$Yp9#~4M!HqIt$G@p=_Zz;2Qoe_nn)gnVSZFAcW?9{ z_}XiY-MwxGk&;6;%U8@8ulLTahhVfPAQ7IAEp|c+<~r28huJUmr+Wnmh@d487WeRTDng|EJFe zt?3Y)`6OYlJvodzZbk4lEQ~QVso~}DAH3MG5_e%qR9(+lY}(`HLdubKVoaZ<+^#YA zInw@bdSA}*0f;c8i=vrVMO<}G3@?VtIlZem>|b|U@|;VCGz8I$X4}VHRDL$4HKNz~ za&$4)9Mc|PUJzrPtxen$BZc*dJ>f#)1^&vVsWvy13qFppKQ33>WfC5^_MlGTlW9%a zQ(uMDQ}n7py#T95io%4pIw$2b5a${}@K2Iv5SCto`^6pJ_u({7`M;)ecdwjTRIGc~ zrfY=y3Ca2RjCuB}(5pJ$)An|D=-GzkbDs0M4{3M)XnTdnP_X6S4t6a)-zZg!kEff} zS{6}FGeCbiobxi_QQC8GP~gvu*r7$~D*t@+B`6zAas31tu70s{zcwr6Rz$-AugRBF zz!FNHA(=#sp9`WE=laz|u9zxFUqzSToHyl_$)yA@8RP-z)vn&Cwpnr{T!_yb$5fD} zP%qTgkr;tXfA9CKEujda^zgk+_VA=aA8Dy_AXHQ!5e2MuXl8!THGq`6`^`w-vv2;q~#>LDhf?t2j z=a;a=pxBPzhK8yg=BiOFO1avmSx5{a)cp}5XbeZ`EYG2v{EE3avHU4;Q6pvD{PCkW z_x;ybC6%){#^7HIh^m?JSF~XAlyQ@(IeoAWQ8k2a8pDO>C8tAGR!24-Zg?Ss)|8<2 zt?Y3wOSH-g)}Fx?rX)vOkiaStHuVDg+x>|ux;rB9&xaU=^w~rt@x88v{P>5`YynLD@BL2&=gG*xDqal$MvTYG zYWR$P&vI=O)mj3_*JhW(LTRUq){(VR$PGNVDAliOOZ4FFp{okaZ`XY@er80v(t{Xw z`Q%xHYAmwT;h1=RPg*j)gE11nd?Np~t!}w5i(bCyv~*9@_-Hm{AQw|&>yx?H_#F(_<8V_)(^3u%p5_I~81ISY zIuLCWwmnUOud9R_=cE4)3FZ^K(`j|Kv?3NRyHxjjT^mVOiGI@)fAT36FBe0e|FkGA z0SViKmVVgwO(PsV)K*fo9A*tbFNIm(@%>wu{@RZ}xnaA4kwbmxy7$106L-DFO}l1k zSybK1-wR%_N%HpFwbZ~9+j?hEN^5vCuVF;nF2|ws9XY*FLj1k*qN=rY2G#;AW5Z9{ zh3y5V)Hu)1%hvudU`2tGSFP`s81I^X1UkLAy#SKhcq`PjEJNKjutXoJTEX%$6MgEShPKp=( zNs_Ru)u0;@xjy%MH27Ocq3aE0x6JAQwSrjt#@w)<)wL#-nJOMALM=U=`#MS?#2O}l z__+=Y)+#|t21yB(0grV}W5ye83>{!)Emwg~aVroP;X4A#eljmH{keDjtHJ)8pR4cW zxI$Ni-`n>KcQfJ4Irzu3mk92~jgf44wjQL-N_|Y$cH#c7;1`>tJkkr< zR2aeNY<0gruFhG1UlnuVyXw(zAnlCLYNhHBw}2}%=;S4Eg6VN_qM_ywWL4V&YEy}t zvm6p-a!ydp;isyjPE_c!DEL`@HTYH9Y~q6*u-kulK{X#b^7BGX`&CVDyq@C1`7FmA zFQBJx3hVDs?Ow78H`OLH0E+w@;5zaP0`u;!Z|bbrDRsyU=oFsp{K5Oq{5cz8IAjH0 zOl;}+rpevGE=Qm)#z!zC*^29B!j#roA^kDG0Byz5L!Y5e$Y2MUQbOJ6-nDs!&@k7Lp@HyIC5?RScn-nhg){x=wtr z6ziB4?$l?g)xB7Dy6gASc!7lzh`XSoqB4CLKNT@ogAjMLO5EB>YUdstlmLUl+b>r; zbqevNvGw^h)WUaaO~5R&DU*rlkm{P&#BOSqOrQtr`U-rsW)+tU*OeJ6Ea=!IB%*o%d}tMq2R^{2<}iNA?kX90vCM_L+- zXGOMl;S7k)QjW8)hLOSVsGQjn=6DGA(1Zpor_d2hETkMzB=Wt{&n^??-*|fS6V9o4s4e?2}wp?#rz)`9|0DQm>jIf=W6oc8PSPq!?l{5DMSJN_dG(WtJulkuRxo;d;ejd)O39gh8okl(51#7G81IQ$ zBql#^6GKZkD^WY?qxNq6%BnTBwj7!R*ePCxurY-LDHQhby{9irvZrhj9LVB+t>soL zTl57QG#W~cURa*EFhR&n`((hTBx3zT@9z*7tSRIEccD=hE0xHD`8!E&#(PmfVw5Zm zfZGBRWvwioo%3ZZ&g%pcP{wZ?W$$R+=G+x1L+Ls;5>01gf~{u-s|N+A@2h9O!#sq)AGA0m zkE&PTlzbz)lo+J?V*{llNtQ6`hT=vpqJ+KyqPCtFaf3>TlHTqrzCEHt#?-FFHt5)o zCE-U{NR1_%1p*Bq*A=_bR;ioQ4&2#syuhVyOeX4gYV7GFJ#M=-HvF`F_+uNY6M4a{ z=Yw`}ZzoesPhHrAM{@`K>wne#9XD`82Qy@l!P~I;Fg5D2 z{+53wv8v6VuPE|&0aEv>Sy&Y`Q4{fssO_IT?SKE*3-rj)+5i9H{TTyp^Gt@t`GTd* z_3YTW+ylLf{iZuQxS7fb-s#yR>6)I~Y>@xVCa}TA!<@guoWDK^vym}L!Ed$ae|m!C zeqY-00Hibl`|c;6J21MP9Q@DR_J8LJ)$Mfff1+k03a@?J0aiT8C1LtDwCk}J8-`Fxx9~-a9O(us=4H>lg~{;P#Tuv+FFkQg*jlO<#D-t zk{%y6X77ag99@jdQf+m7&Zw#ThdGt!BV2l0hYOA+xrxWR%CoM7HpSc^Hhq*bMgOHn zAJ1DzoC(!^EVDICX)Ih!Yw*Gyexevdi zhXuLhh!*O;|8-LH{4h5zC&W|*tTgB0Vsqc(%E8mo?VK(YsL!g(>Gi^FX%A@p*fDye z(DXljxjx(?+nF!dH%#l5C)8gYak5St8#?i4a@|;h7il{Y-8zQSuyH83N zre{I8wkrR2MoZ~FO?fWM+r;757cOXCJ1r?yoZc zpAQU{v)JJOBam8P$Ho^Ys=K~)u7?9GG5roM%Yg6v0MdlD0&GD zLLnZ!9V^c8_DRN>vc5MuVjoXDfh}leR;Js+9L@5c3ZFi6`JFpA7aoSjqS zAl&wA8>GLB3o&~wb&A}v#l`pQE3POi0 zmk!95On|VBiAiilWu>56evRiy_OnV(At0NtEgx$ zD3g$(PUCK+`lMFZ-h%1&{lgvXmd{AYVc8-s)ars?#VDz$Skf(#hG<{wx9AXH8sErTgwFqgKj?T9v9?~ZnR@LOu}zpp%PHuH04W@&W4!kMJM zu@FjwyM^1)-EH~v1Cz+vEU*$lYO@E0>O9P{uUtu3 zF3`DvS|;YQ8FAm8>tv(b@&sy0n>7@Aq|S!!{|V($o*}@Egf9N%8jm`b z*FvqwLm>QxCUN>Vw~MTa@EM%e#J%Ddo)fJw@qW)KLwAPR*BJ$l=4 zGSleOuc5hD-U`=O`dt{~Xihie+3|nQ_jTGS*z3eYyQ%rll^xgrW4-P3k-E$i_;5cf zSi(dgr;U1Uy7zn5G-TuV{9RjDboiKTpB>$3HA5*G4|IB^(tX6@7VQjb5E>5o6(frJ z0=uBk<+ZG(YizlQ%%5_{v!au=iw=T@-d~c%1H+jc5~-#e;fkrF zlaB=Fnpc7b(nS?3N<`}n(U+K+h7tpX>GZrfN$i~$I)1p6R^;SJBsM4B7|}nh&0ay| z#RR(Z%q=jIX5kJZ6&axKhqMA)Dw0pL#+U+1D=-R{lqZIz*l5HCkl(&S(t}<4%jhKri8* zYpszUcB^Aw>v)S(wah`Jo}lH;Zb++E2lZZ$P0At@){*vDaE_kn&%k$-w+PkgsNrlR zr|pC_x_vud9YkICM73`yTFJD*mNOJ&>ImV~!tur)8J0Duwdi(juMQ`yrK@fhXByJexIEhLha<#RDk2Z}aiv$6~XrjlmFFHs|PrSZdsX<}N=rCH+;0 z-)wdg>W_dM15<(5bB-|}JqP-_^hJaHaCM(q3r%tE`5In-V8US`kSA8a&3kJ)NA5r| zyK#qwvs2rTvvwo4%C?4$;pqhjr4@Aa)V!m-{ZRMw(wIVzAiC?JPOT9n=R}z|AC471 z&Y&FVGk&aXiIq6W{TGceHjFru{567fZt9MQV&y2Tx!W@A#Y&z+Q8GdYT%~N_80adt zo?DXV4Ry&T#G&_+EL<`YJ8*MR>}2_Zl$}DYxTVpc z|5f>t^f>Y5%fLPh=FEtdk~tmg`11ze!o>VBt(Slt;W~$>98t^1a~gI+wFp%A$noFP zHr&Pj^+MoJ?(s)iYeC}l1pj2r(T4hpj1WtWM~KG01*@A3MrB8kOKM}=P2o~|L450h z1uh08`TE%oZ`^mf_=)~L;j+R~?WRbhKYnUAQ{=sUfizg8`~aGNb4X5t)|A`4QiD2U z|2V_G@2boq?=V7-;+mRpi!b15<-~3nsp`I4W~(u5&s|xoTe8hl*TWHsuf=Ml-wlO5 z^Z3hp5I!+47*BKOg@AJ{o~XxaqY_Gc*8(cydWXT*a#5 z^s?@j4B@r%F&`c2`R)%o3Wwj=i6t2(xFt?@mkN7JAL!Mh!#`+Qw7MT%T=GobNdCh{ zQ9DQmxzLnj(w?qhZBqJ)br$>SVE`vI3xj@+UDl4k_Y4%4+*)6KRTFDdHZ2RQ{@s<& zRCs!IWURIF9l=m&yP|z~N73${;3X44)J5)diSTovv6^w{txQ29-tk`h<(alG){swy zfOHM5UNqeo4s1xqm56JM$&2KwxSlXh!GzQ}R8`+;jPyBYIm~|NJ_r!DixMZzQX7tH zZVX7A1NZl;xzU8BEdd#f=+!vJXwUCv>_DMrI)We{U5t>;UImko376N3X*)D(!<3%H zdHK8iI+b&w5O><|MCvD=H_RUYG`|Ji0(!Su8Qmz(V_~v!ui#|p`6H;uZ_B0-aax}T ztRvPx7>erWYxhS=>L;JI7-nCu68XTm)OWl@iJ`SoX{_Kd&8oO4ecsdw^Jl53mcv;z zJr1Kiiradlo(9Y(Mr&yfN;BS|s;4?;`aZT_?c;fI%f<=3Dv}h9;^#HKv+_s&=9IPU zhcuK_ThMH&3)tlKwa4CqWRPI?PKn=a(w0^GpWGT17H5RTnvkJt5E<-!YSC$rX;&UR z;1{wMx_TTn?8@8LddP9ZLK?J}&+goZ*COA1{bb5SLV1mhZk+CD$-3XJTmB*|R;@M- zCGL~^$D!7m*fOPh?P=+T@)zPIx6~72{xE#-`TMOa6Ug^pqmSOH4aZ~bOHjC+^lOG- zOksqjPASzM4W5fL4X3l|s~scxjZCsEdu0N^L=a`G47V5F`pwNS_MB;Wf2{;?!Gtr- zVLRk$lFJp8&G-9kY}Uh=OdN(9y1RK>1%k1v0Y7cEPq zZj&x{>^La!ujQ=&-|$oU{-et|`u6FJwdIV3gZHn@q_JitPyGYbxO=k=n)L2eX*$E> zI#lZ+Gw){_Wq)8GwO%FT+4{x9$+&@m0iO(h8wZCBFa$E~Y3=}}Cx(H4$*^%W!O^@e zQ+sbwV z4-?}*-)z1($?pHfIsG~OAV^tV7rbM;BJk;tYfr;M-dJb||AX{DUs8M~A7{w9=J(IA z?a2K%1%0Cb@#V{3bk%bjw&M#w4cTtW+}vDkPh=?qVTwkh{c4I3jZ6a_u8{vGj<%s2 zzTW9DNYwE}ZJE-JpHI2-1oUkYWI7Pp`RT~1hX2dpH76W7(2qi)=#@m1yInvnGH6C% z8@GL4d-QyzVZVR{JLp+D5^Wd}5wUxY%hqM~DtVz93PIa};sedmU#)`0Ho>tc@wbI? zaRi)4@^+hBbBb}x{_3}TU{c$Ou8AxE6(ij7UozSM4Zj^TexSdnp;%G@n6;#gAeX`d zSM%sH4oG`MTDjt-)peXMsay|J`C8$lp2j$oruw*G;Bwo>cOJs>vxcph;QN)i48K%YO><1WH-ZjapcwtH4f6IlnN8 zK_9jZO)0}+MRY&lrG7Df3ip*~^x+KL#wS^0%=3^%l5~Az11=pa>ybW`>MX#MzPg!m z+BQC&Y`w7Y)oaVX>MZLi!`d*QqX*0A|m1JDAyz@COnK3$NTjPkPUOvzj z1R6)oQBxX!X@e{~)P|n?qInHi;HB-im)2XBhxo68Q!WP|N;o8pr4jG$@<#h6 zu5ft0&zEdunUNuRJDlY!7A>qM5P7IRacWaJzG_eBWCXJYnqoB1M6xv&ALztXDlGV~ zs+se%bMPYU>*8Hw0cm0iRepyCW63V28Gbmi=t+?p zg683Q{`LnBTj_!aH+}}Z_1@XXgTUu#aquP@8wNikgUa?>@bwHH)UgvkTq%yY`G&JL zz3b|kmO{Y;6$8+)n*MRkaUq==TRcZLA}b2-d1v2hrP45N%flT%QZJsXVbmG>f3WxF z@leP8+o)2cLM2f`tBNd=en-<^$v~f;Qgg7HGW|g_GOgE zh7=KE4P^v7HaU*keiXn_ltaHlt0Q}g-Ru&|aOnrYn7)%mikwvM@% zz8bkNCN_wBq(t6|vt6qe?%Mc%T4rwj0S;&f z0c1tcYCxOkuY951Qh66)r$Mwqw?#B@6YQpRhaCn_R}ex@8`y=47Pa*(m{l0r9duB& zb@+d`vkngr=K$`jm66v6GpgCGc!xo^HBMbQ`23ZExpl9eyE2H{+K2zVWx6lu{axj0 zgx;R$d^7fvUMStl(<*+A+ma~9h3twM{0v=d;FRk~ z&g`FUUPw3D373o!^=!s7c%;?qZ)BO+qq-#Tz_96+fR^=fQ+J>>&d#-R=W4k0P&#}%diKX~GYJrzw_*GGqXw%Q8_R9m@ghV! z=i_5F1|X{w9t{mfm~d4c>of>H`kn zE3H%A7o794h~)SUfrh@AvVj6H>Y^>~+#tBP_v#zS<%R3zZQptY;&w;3!&a*@`N9hN zaQo0;&rm>K?)0vzDM}@h?^!aaMtID}RbrpJyKmTbd0%Y{uVaG9XU8oRc3Z*J<1%6H z_3}nR-mhLfh-*ST<%f!t*56C*Q!Q>A-fA&V#!;34xOU7tJN}h8E88w}e=-Y(2eWa5 zV3>%QjxQA`Z4q9yFpg}|cNkc}09;jJ1P+Rlv&>h!K|paOSK26I1|o9ToqW+dgc7%z z>^B-cNjk#qgMl-WK!$C?u`AN_ou% zx)0J8D{T@bZ7FNkdu`kMoO>lz5<8#w&U+@otM+xS8Iygf(y8y*2JXf9IewXC@W}CY zH-S#bWFAj!=v9W*NRA;uK(ngsPM5Hn8z2GE>5rK5?BAV&!Ct zL4uG|d|XG4Y|NbrB9=(mE-`V2!lBrvQCGhD;jO*_-j!|nH6QJ4&z_V**^6<>^}ghL zLzc4X^zb%h$r#%ma0JG!y@Kn4%$Go`fs3JNwz#$>7w-$6YS~p6mLH_Ysz^XVTcWmf<6ganc7!u7;-?|zbZxX(kdi*4O2$=`mrQCky2VjAq;v@|O1^M)j~Ffw5@?y$HK>FHF6k{LPYKNpiQQIK7^_pd+q4RvQQvu9<6d1IRonkEy14ma z?-ilohev~t0bAku{oP7vHwPTIuy_=t=MbtFT}b(`H?y-d$78ke8Jai^LHi9Vni*cN zK2@kadc#S<6w3e2Am`D3PED-tgqLmAOJl={o!jwo672|LI@1oad8>Ny#JkFRV}n<< zrW{B`_~yLJ^uC=d=t{TvkeN?WpJjFTMVkUqGWys(s$DF8G@mz=w?;5DeLKQ8@^X4&e1^QXyvs*OuDK{!qPGi@`3XG@xe94 zM4PRprx=FAsOMukHRulxeuV~GK^8tf47bgT_r7u7E`U&flg$G-a~;SJYwX{+{efTk zSH2=fOm0JvEkMN{9j^GZ@anlog{`qFXU`Eh7#JE7_b`cjcOU0o;_F!{Wu5Yi$Z_U6 zcmno|)P8KpY4VfJYV{2>vB;NxZk|U{&PCGGT?ruz;nac^-n@u;X)H15ZqxQf@i|BMPx&nNdoZ6B61A@9i0P@j&(AXvUdpYCAg+;KVQL;}Gr~NE%laht zZw1sWEPr&(sDxhUb;MGJf_8Oi(>=vMXqDwWt-$PZVy6?yni-xGbZ`5nENQej8t_F2XGcOag!9p!p{Xo8ESbjUwE}sPM?x!9QQt~*4656C4(&SwQ8Yx-EBGwd9~aL(d>k!jA*GlC z%9zWC-F!rp4WaPY)pRsojKjX+vn?pg+yCO}ZXF@|yU$5T`itk?+q-^j42fN;8&Whk$Rf;flX4Jua_@Zp=5@&16>C5Q+oz=~XRdQ58)M2(l!B zdNvBBhZpM^9?qXr=+c3j78?Im8L)@tn6&A&*1GWCy{q%!!JC!URnZp@7rH&BU;QP8 zXlP8izlbUsI>RGX-^0GHe#}&d=j0Yt zHsO!cyc%y7#OYXBA$ZjJuN~w|8l9`)qDp^m&a~uiFL$zTFRNIQPJh(RJO`&Lt>_wd z7VhH|S{JqcjE+wO?58{B+L>yX?zNS=Tzy3ETO)!~unGACr^rLHl$d!dO>y=^2vdO2 z@ZWPB%}4i&f$O{NOi_K8OIbp{6ML36^S6gZ_(~^z;nRFDP3hk!RjFLPUYSfkpVcox z#Els>_pxgrd!sZk!2+#bDdMKF4#?M^p;}dA^I&?xyjHe)@L|MoGN0$&(1<20hH=Gy z{}Rr4Stt^Il;(8(SM!GZHl81-e^W2B32I(K1VvujmZ|cEo5DC?@r42jEa#BfN93CJ zu&>}qZE)+x=BC2;Jn73CKu1K3n;{ab)HBh9Q|%p6PNY~o2`CuBG~dN%)#A1ITy@D| zqpZX^lVhmx)}zC-r0>1!^nPpCSXmR~ckV?|!4J;B`FVSOlN5``Q;qosU9sLAG0+^f z$-nwCGfZR6m0{g5aX@ZhP=>gjZkE?9(=J{BTk6n53f?t;p3>visQ?^1`TAf^(#ShB zkJUWE$`6AOsoGK`q`^J=@7vk^AH!XyfFwyFZRU#nmo4KqhNQz$Ps7gVT%x$GRd{qL z_o!S3G{^7AhAY3J#dfS5DZgY27$-0@I{+#eNn^5VDimt#7t5RxaenmV5ZLI~S*1?Xqwh`K*bRnF-itOBl*CLFB^LMj=Q7-kB@7{qZu zmzrlXHg2fZSD1NH`d-fKY480N-7>Hn{*iaw3U5Z`9#2S9-SPSqs%?Nx=h{S_{U3Hx zDDhuST1UH>X7DyGpKBPCtxDddT^nr0YuT&Y)@AAHySuw@6mxTQay#`ox-IoCw$3%L zeAcTpZ8oG`Oes@VqLV7D&v$~R(Q}23+Qz(CODN1w zh*9F?o-Kll*!IL^7gsz2=95lNcCMmwuy_yV)~HKS`iK86L`PjNo@II~n)) z#Q19wJ)`10YwMH*e6HzD?!{J{f0)a1Dy$Z2=OH{Ko%^8#N9@2d`vjX(I0&9XOOd_A8Uf3ZT5 z9u(yCzNW-nk6NaY)ICyl=R!bV46tb+XijVGk@y1-i;|KdaD$e(URj36*OMzjQJK6j z9E5&#Y8#6eQz`X|8u<0XyZb7?eq2t;Yk%Ii!Kzods3uVMFS?!?W`~Ip#vSsto+{=? zAAjq#I-+bhT|a8*1E1=RK9_VrA)BdQRchGmFuItNh;Lq{yCUl>app8Q52*Mw%H%^7 ztgWwC4@2XTX8O*gBfat_XO1!Q0mz}5wF@TuohBZ{F>xW1J_cPVu8(0v^6A=skSdk? zl-{@KW};JUH$Qu-K=<7|E6;XrZRI zoOj1Tz#@b^lWxAF#BN_Y%2+CG8#6HHEXXdPU&U*r5AcbKj(JB*4&pZvGM>wr8AI|O z0insc3M%LMdB{2KyCd_gD?y!gN8DG?h%AFZrYcJpx%S65zB`$l3Q!%td_*$C2|q0G&xB^gmM zQj*sSchBanot}#z9qvu#s#HkieZ+I{^3wJ0f^SIIaQk33cT3(kdowADUv35&-di}n zoq`l7@TimoiB^bbSvk^lo2`3k4xU_nAenj|eK;!WPrr4o9s@c;-?%j>n$$&=dqz!u zr>A#AfK*~$(K^z#CR^DyveZo>8B2cA##T<*^{kw8n;*@>DyoKZ*%O6L4b`@wX1-mY zwnH4gW5n5Qg2?SiLkisOVbt8$Ug0EU-GKW`_g1t%{w&|vfv}c_#=M37M5kOYIB6ht z^m18bOkGFJ`jChqQCs@M&@)rr?5PM|k751!FNv*D@vKUDb;Q^+UXk%?Q57~H0Y14V z*}M#E`e94-JpBCHcLTwMFBWB-2E`tBS@hdGz-C})N8VJZydj&Zy>v+}#R)0O*Nqrv z9`;*IQR9gA@XOj>tU0xrz{=;0YPf#|iB)s4_=&xl9eTKGI~zOD&zfFtU$8O(HZYQj z;OTx3g)s-gr?-=BtH5}8rA6m0Pf{Pf>j=K~cDK>R1?>eH-Y}i)GTAfMZKF9Gy}WnY zZ^@Rbd&eo;N$Y5p-RzCF*z48P0iI)Mrsa)q`I1QuVlv9&4f~QM~C$2UqAP|6SZL>nIX6WAc7)rZZt4b5%HMQq8z>lCqygC_t{BcCT6+Zonak zq`YGbQps8A_Tx+Mwv~vr15B++vIO-I)=YoZj8L7j0LoX|>JyTX`B)w`?iv0A{Dbaa zA$WMY+3C?;ADhLLJK>IKmBsySkFqwC^bdhdO?Eo>B+q<=Hp^jt3OH8OkSAydcSo7# zQsL993dJA(qCBt4s zA6M&0e~PbaOYlBIYqp)3fV>^0sMDkT{%~9TC{}SXom)1QNv$_evZHBHz*wkfs?jr( z+gp*_BsR}*ZzT7I+`c~p24U@-=il5Q1v845klYfxC9soZ*psv=m250Hwk}VXDG2q! zZBq!36%P&0&DI(lugF@&l3H-q1{Fgupqc2d&TqMhMzJhmBKSop@Y11N`)g0E3@!KosYXb}x7eX(#1*jv3u3!X>*s;Q zWU=@os^D(-m_g;g(M)yhzXy)%h+Xyko}|r??S=8t5|A$SkVdeQO9hTrxRQXFaccWk z?T6{4nLcF0L##LE2OuYDgxDKGIlIzlZ&LrJUcTYl>z-A%7m~DJyhu5D^5o~_$GQFHaMIK!(apF$ILHuFEs&ClOZrT2Xx zgp+Rl=eO96W^QWMU|E(RT?#`k~jJc^ia{2$o%^&qa< z->?6^KEC)rXKDW>zWXKJhORXPvYJ_kL_$7RP}Q z*GICP0ON}5FQJZulpz-GH|s_hk??xz(KiGn-nA~O2?jB7A>28c$pB}S&CBJ}dx~h+ z+Jr;brT|9P*{5E+9|kJ7=Oh+HaC;k^F{YxaqrJ#o@@`A z_-rSyg`lLd>)qLf;F49;?!!?>SY^+z1tPZ3Kj()Z>#W~k1MYSk-X9#)gd%Axm~VcC zJczl}+cU$D?)_4>*s0BEY*5roSYRO!Zq<#2Tmpf=w!Ik6xD`N=IUICP&!pd4M%)UU z4JKD17z*L-@2tdC(EE9(H1@2}_bWzxc1G7Jocmv@P(CSzIFypl;ET+Cwsl-_BV+yQ z=pJ}`kU<*v@{M;-H5JJHsQ3qSv+YS@Qy-K(7(z5>V*DO>uJqph8hr_77r0^U`OSFo znQX-X7JLDq;$~_P!JgGmK4{t#p-$E{H7u@xjmGpSf;u6R?P*`sk0+inUiDD zd`Z^m%RX?`2C%{4v1sYAO-Ll^aQ|k9jeog@RpIB~dk^oM8Pwmfk>cCe@Lpc@>5bvL z-d561PF7AQa<%I@l$Db=$BnhI{Os<>mrh>m7 zQ)QVRdx6X2_5*tnmYXEo7Abz|>knLBQC$!X*oJ-HF%trtdVsW|}-uuJ6?Kj(=a)6Zvu z4<@{?ip}L?vwqBXN}RjoX2{a8RJj2pU#HHljWI@RhfG{}26o-6#GNWNu8=g*uUfdZ zowmA~n~*r5z2KBYATil;zX8>(7^EE|yo+tvh}O#I+(@{4YklD2v|>k#lK+;mL8<)Q z4oUjW1wQ?~j7R503e;HllDprGLY|&lc#jc8@9Tijr7Bec(kJgeI&s%h()oOf(&G zuP(*HDuRO^qOTlROH^TZ(BSK|0Qeows0Z7VW55(`e!%(fmB=3ewO`+?k_lGXaUhburGAHr{~ zD|zcIfsg}UqCfxr^)ngsAlE6KFOQXn8oL??<8Q2Z2DoRBO`{_mtd{KClO4TGm{-CJ zbu`L+z%*^2Y5gv++2ptYv?lknqi@;Oo!f_98u2Zlb^3#GTAf@!SaE&BsS816-}t}` z6|yApwwQ&pgQ=h&t$|~~Q=%B5=1x9~piB;le_*2?r=pR^AC*XwiE^gF*HoZWRkg1- zh-=X%@Qjb#ct5{?08-wkvS6RMTyxj#j?xBtjzKF1)a^aK1mS+OX)~SOE5MEV$6qZ~ zPm$k{N60i4eqm3*&Y1jL@I|jCj>Ao$`oxdMpf$93PmWhSTqrZjCZFUUvOFOm+c9Hw z%Bt48`p-lPX`3Chr@S6DBq3kl6Sqt{%@iG%jw|nkyy#o`W&s% z)=<0kQg(-r;}-!mANP!yH2&ruVE|F~c$pNNV2Nn9qoo|4sGgXZXlQ8QyaBJgy}jp1 zq^J@#H+T13swAoBzmpvSOO_H(o;n58)cmruB9T}h928?m@-5UDl-hEQ0x27 zz;=tBl3`obu7AbpG(sbjmSJlO(mEzC?dlR23iay7QFHEXdF|1(kfg%R2c#hX zto~{&(_X6|Zvi1qeAqv@^s7@y6)1)zHH+_-?V*%frjUm~d`L#HV29 ztAsLd^39LjFaAxGsSviNvOr79lAH!ibbuD)mFf*{B? zk+Yi%b-cxupD@-!*!mLDzS2|niZlun@2z6Z@716O`tVJ04vx4a>hm95 z^&c;v?hMw^u|K-ppi|}Q54lXu3n_c;AI%zf3uUWjz5y~?$QFwt9)8?+nTOub)lhb1 zGmncmdL&OjWhFy;O&Yz{SqVf5ZiKz?*X?`C4D{#w>!nIl!5>J^v3}GI1rJi6EhX4~ zFJdJkUWz=y$LW<#SF6fSAcO9OAwXq+vq{YIezbDUs$fq#7M|SsypYXA*WXoUA2n;f zSBk>N>seHOL0F+GJj0|C@8D`>%n7lngwm|d&yf63ZjG(Zb)?TfK+wC?f@S|nK)A5r z0OXF(9He-XI=@cOs}Rd79Wuqlws*VKuI!hp>jDzo7Tr$c|6;wDh;zVbz3l72eA`i} zQM^i~8iyr+q=}8*V9%4!?s+*xJ#BxusOq3tOQ-OzQ8T$vlQbl?t_U%Uj_HMD-Dvd4 z5p33;4CgIFwsqB?m8z<74D;A`o-Rku6??x*3#fppl=)Qb?#wk$ceWLVFWwg? z3h%49-Ll$ zJ3_B`D>?LBLWz6+T;9xwTy*@vxu?YGG9A*Pxlsg|i_P`GC*(e__?A97$f?}ix^fQK z1I)frnMW!Lm5`f?XFJ@D{HPe#Jwb49z`K)^#Tc2^!81uj5s>^!;^ab|2Z>IEb0EN` z@vPykVt(>E1EqAed5)x$h|Ztc&YXNSayLWbTmPN{Wtfn6lW)ANEQNelzi%8K*e}%> zTH~|7u);kb*wO&lM=kb>=MRJ#TdhfPp>XP49|iy&}z$93afsVdp`CVX=|9C#RDo zcWW#H<>rc-f5r2O&sy%U=86bro%SzY*mA*J!DXlRDl71_!E~tBoFP}29RnwEk?Nn) zKazsmD-3?kU-ulpn#Rg*pn0o$jg;qpiVlX8kMM?4BjlG3 zJBjOkHV!|b;j_@l5AMT(ratMymv0~dbT)QqcLAN3Rcwt7XMb$4ix|tr4Zvnj?i4s> zCKkFGMG#ZUa=r~j`LKLL_on54Y;k{lvzdCl{(Z3vP|@sF)%BG<`5Y3x8bbw`KnJoF zc-F(WoYcdr=7uNI4!I?QdXK*?-!fcR27ZvT75A)3&yQtV=sLBS2JGG z@l{2_H3FdpG2Q;5WvSV6y=)kmdH(7kFZP^V+~9VQO`BG3$XD-JvUX|$R(v|P5XiF? zvi_9(aP%CyRwlBTK03~2OmH3XC$TKqCKPxW1kI3QwYVHo6mw3C_kl_9(a9G^D&1#A61Jvc4hgvUN9`@OJ z87sUcIS9ky zxv8I$7oRtOdJvBTCcvA&qaO$_QS0ZbPJ3jDb72xrr?lO;beGwKTfy5KG6Ydzm}(y4 z4zdX*E0Xt5elLw@ctr|53A)F+x{+zzghL*L%L-Jq>8s$GJSL6y3DmduCv{5bcpuXn z4JW+Kegp>lJO<*S*~S-6e`hA9h2A(~_E7l3)x zgjRpoN)<=%>EW+Z-2#xmpG2?qUUYrmR&o6WW30*I&3mBQiai! z9btjKsu1^SVT^b%y7_b+yJWG9eVTh?1TC|6Vne#^ch{`5my9F1mCX!=^^T1P%5Oy-J3WSXE_9cptdErSNyhqQ*@ySbwb|?_wI>+GuWhHA{azqP zM=a9BEVYL@Y%l(@7mUsFZ|T$3mbyHD)gh-=1cXa%Yv0Xff+4@w#jfq}?uTRs`S^;%w7djg7VnLuf3l@m3K`0m|aqb?|67H#T~EsJySj)`s}!oKUTFk(OHabh}l=a4jJX;#?}ZU7Q4+ zvhcRO<>c`in9ypg-YF@wMhHt|O3-E@Lz`J~;}yxSQ1}`Fj!qEOJQCQ(G_6Tdon% z5{vvGM=OQ>sU}V6xx&#BNr-e&R*4Mz2zq*m`_;WxwYNX5;y*J6 zJ|2{~y8|*yZ{$j*0&2(RTzoY9kN6MzO%3nZKmdIo#bK#0M}y5Y_Nv_^d>7TU@Wj?H z$zyXno>3L%bp3r+QUR&~o*!w)m^9;mxra3G0UtYl>Z)G!Uo!RRPf*5BN87fW9`-&} zG410dk52Xga*7WX;^5}fA!05I-TaY&4m%v+%PQo80IlKUe5H0B(+f-kyz$#Wag=Tb zQrszHZm3=AM_!jf#%tVEr$K9eFzVU|CqAzb$1|6P5eyD$Wv^W_aM-X?$rLu{SOVF& zHRo<_$Jq6bD+4;hsGK>aB6`n^_?YhFp&e`E1zi#H$!nd16*ITkHgD#mvOhiCua=j>spdGV(~U!Kl{`=HP0RjJeo2;ker_hxL2mrC zF*u+;F}JcJ1@IH^0PK>hMOI_KwNJTEFLtT45)bc4afk5Y!|+3|n)%hYLe zRB^EKpb?$_Jw;zmJ#Yy=@^PJ2gN)p^$mvV~JbOA&-@&mt9ec`3`aX2SUOI}gOW?-6 z*Uc?DrO~$gD{rcpe#lN09;b6wIG*-y%PyeldX6pQMsOScKv_{qfc*L8(_z$vn*Zu# zD`^-G+Z%T8PT^P|es=~-C4@YKlb}3%ON1(~!5LL?*t}YK&-_@l(85YT@fYbirVl%h z{KNnfdBFi0-2-^JOjY*0Wy8&^WID~pUAWrRj{FA7Kbx(SKbkdIQ%ZltSv>)gkhD@a zTz~93p3~92aq39mf#&ZDRPs_fwTKuKyziU8`<$nAgZbO}>GhNgHk0EV>(&DycuZsb z{eiob=M7;hEVnf7v2<<;x;_)1qcgnfP`lFSisKS3gzmtiW>~zJ9Ik8v_C5#*czx5~ zc<&H*!|n3!^U7_W)|F&*rR&&3GwyrZ_Z#&We|fxJYkm=J6Z})x&N8bw1o`{}*5M~G ziG6fvLv~N}^IW*vsDmkiZc$2|a)PYKePZ!7R_yi|4pSssAGtt9vnAw7uU~-cGklIg z%b7#OM|?qn_fx$C|Fp5Z1Kq4*Se8PhsGa(aBEWVi03)Q2+Sz?+mEG8+<|z9(vJe83 zXcLr@<1ry&>y5=`ZPo6xP-#v226%FVX&LLZNpmc4^d1p1i?MOwN=Qm2z=}GMkxod` z!Vmq1tufBNkjfro!P~& zbSWElZT1}_tJ*_H0K2*ZH5P?7;e=@!OFVgPm?J$jbJHvp4f0vj=~=u7OI};mv$LqJ zM!>9-?KO4&bQaYSLEzt?YR4v-m^_C{&7XEEf0v+c1a@tM z(@?&N&!TO<>V+w8?*Y2qcYm$nKQ%iesi-FhH4Rb7_0S1XWdb5-0kM3iQ0A{@aNucf>!xZB2Tc*t5?zEk)UV?7iL~*Dp!q zYagrZ%91*5{LF0nPa_ExJ7ddE{X;Wn_Ne3fU$n#b-pBvXWO)2v$u;S+p(^&paG+aH z+BrmdO}GD~Jp8DAirq|O#e1miyVh6;P# z1Iq5*W<_PL*H_ACc+QLJW==k_#%b%Az(Ouv0_bfar`MM5f*scS+46=Fw-(A+9AkS0 zfVQ^{@&nY|2RxiIGd^V3hr%fzlY@sxB&BDU+B_`R7-~B@ndZ`DCzq2plIKG<2Ub&{RIz#$Kpm$3($H)WP|y@TkPCzaiXmyipx^h~j@Z@YP>3Ed12ByiOeKfs2W zlU)$lpRY}?=pbKU_b*W-J@OyVuUv3p;VEyGZ@$Q3+Yn|~<`P2p&>@DggO=ZZ$WA~( zV+F+|)z|%2y7sE4SRdm0x_j<(=KZFD{iD;_3|NkFaoAg77 g0GGlFSK+%|)I52S z>Je?<^16ye>c~F@_%D%t4)b?>$D+~s#uxHJ^%C)(N^*S3srZ0llk$Elqhyc>?4^5K z^+W0wfHJhTf618R)KTmwA$_-!wo|ugDQ-j9MvoA~$51W7h4ttLJ}?}#*L~^)ao4e@ zfJJtr90eS0&&ZyXd+IT-v`D2N9CKsY?g}ScK$4qntah_Sb|#rni|#95`g`_5N2J9c zPs%asXi6t{t%e}aZHElw@5TO<)jYd^O8lCyIFbX_nl3eCUPZ6HeRtLH>?QzD&3{mJ z>6L^FD7laHoSvpod7GK3!<|9syd+0Dlxkh=77Xsv@h)f^gnx*%>dwA%a`HVB7_Q6d zb$!o^(rHhhp?X1`?k0mdBgdWxYw?H_Lj#N?x)C=XLhk-`nZg-dM_|CpeJh*SsaU;8 z#|;xLW$UzDt#}P=finWB`Xnoa^RH?OUi1RyEKBt3w0akEs@5zu&!K$**s6iu#mn=t zC_06&N>bG4s=?^QQ#@Mw3*6uw`HJ1o>e6zSMjT#k1Bvt+gNwR6)Ti{y#;GuoGv6_e zt4&r>2A)up0e-@g@o=Y+dk4=YBvZd+mX@uc9>dWc*JM&b62qM7wVMoAmq=9o45?t0 zV0l-oou#o<@DR}39%{T?5F_q;l!462$);@-df6wFgyYF~p%+TubVMzv9>^e>q?MF~z)(;OPvtqaxmCsP~NDGQPx+x%)um z`fjcHf^o)*jj`>fM-jr)OnPtSr>O+y**Nmma$y&m5Az0)97O7)B>D}#d8~$xlTWw^ z7V6QGW#_nmpK6=EZjbkin%Ms6jS_DBg^pn?Z1ZqDqXW&GvI%(eNFxbSrSVv#MB3z_ zxv!l1w}O5tBh}{>+KA@n`)MgpVYxHM?RIN!j;sE_3Y^hW?E4@Pqg;mkD#f)C$LlEG zf1jV{C@puuc9fzCi?PF?^CN=RH>8~eK>+uJkukY}JocW8(7j?Hn=CBB8Kw$^L|qDs zNF0473=y zjD#0NGrz5OCI#Nb8NLCPH-9cKTpN{LG^%okM!2|TsuEz6{u_-|w9H8b&UnQB9(EiU z;li49u+RE7=l_euStk0~W6komehi-*s@ z!<=YrO|OXg@KbC#N|YjYpy#8JkEUu%zs`-==JTK2Fykf+jVwRTwV6GbQNAx?{Qglx zk*4BZufX9-_CGB|`nNrH@^s;U)8&8{ZagR1t`a{NB^oySb5thl zSEa0HmvOlg2S?uw0Wwe!xu80%jn%Nb;Mq0|5zHO!No|T71X_`8$kRbw!D&<{gxbxs zkiVjO8X7;RKg^TKOT?cWV3%A|!c73$L*xA%o)5Q|w?X?gDTJ_u+4af>4=%%nfEg!F zx5XT51a9O3$;q%9mx!ri&t@(id1T3Erf<#92Br zk>OA{O=H&bqvi@$>G5}-Gd`-5DtgupSE+TDg7vKR!#$jHd+%G0)=wtcYFGT#eX)b7 zsYIxr@erze!^_#$cvh+t1l9dPMKIscjq-AzUDw(TzNQH*KY9p7(|jU0zb<+vsOe6~ zp82ef&euI{_!Y)0W&8rw&rQ0a>K_~G>ESzOZ3Sm z4L^+}2q^<4R)lQT!O|DH*|x355m^ybMiJcOC4I36`661m^;ArEHZHps&zPg9Z;}yD z`dmo0^d%W;gLd z;$NXHLxYnm347HihtvE9&*eVxF#rpgXuJm)#9gVx$*uinqB=3cJp^FwU@6omF#4|k zHBoS9fS9y&7M1VIqNXj3kun)O2x8W{79}ncklkM1Y@|*}#Nw6J!%d!s{n@cu1!^Cc zV>@WW8o*`EpHA_{8vucwc}>MPo_X#*>4AKnnd0Xwd&D`2lYScHy?NcL(WsS!S}$w2 z-oWj%j!m$s{DGqTS;OA2a=(ID!{NL!dvrG4Jn?rnMWi0IZxIx&9BtRLb28sm_vr;! z6er*szkc>BHB+r@>p4B2ba=~HSj0NKapBM%jOOFrx2|R*t-Q|y0aEq|mQv2I5t~77S^3|)4;|sx0LFit=@N2=QXN{?*#YOdFcJi ze@8nr^_(Y(^KVl2kgVjCzc0pl-!wNm;XE{QsCF627#|gbC|1fGhHSpScvS~xw<=`i zQ?l|oX7d_~7q%d?ntJ5#7x|tQr+d2=ww_>xq-uUi6V@(k7P91)=Iw6vm#*1@d_Yjc zxgODhAC<5)5s>iww-ba|oNa}2cgilBpelC(WO7YZkIplpFQ$T|7j4h9rp6psm_HJH z^oUo2*xgg7C8R!?AGV6nFK20`{a?Nixz!vghjLRN=>|LpXlf|ikjFc<59TdDO}cBu z6Al}ndghU?oWHB~wP|r1+lt>`787KcDvY>uVzD1*$WvcH9Yn(smcVU7o_%jbC2Xt0 zON&));mViRs6D68IS>b9`}WQ^T!#(i8y@v<9WLrxbyA^Mx6@R>#j8*5Xh)5#eNb)^ z3mvJJYxuh?+@Yy9yDSY+jm(VcKM+ACihZHI^NM>5Dh*vD{lS6KUXUKNLQH9;zW8Q5Ua(9Gi0x|sY4jEU(hz9#`S4p~9ybKC9b@k{v zY&74*@U-8|!+}5^QGjxh;rRhG;kFIe1ynr?iyoe7NyvK|*;*Xtwo`uJY301gRrvlx zHn%i~BGz9#cYV~z#AmCO4U2jo03Gy;bZ-~OlV^U_DkfTl%|t)dYUl{d$OU)a-LJ@1 zVG}sEaaQDUWa7F&(m!mfe2pR#9&C49pKE{B8qdOz7gm+VEU4Pc?}O{tw=H&NG1BOm z$c1y`SA+3kAM~JwPBChg1}v@L*yz`4oTjIj^+4Y%`h+ce+*>M*rNHqC4Vj1;B)5Gj z=~W^KE$!Uv1bf{;y;rxsQ_8GGe~0!H^#8^je_L_=vy$e2V~#ZcKfoNr)2LbA-1(YY zRAUp@Z+~G&W_4-eE$EJQjCz01N0itt{RW>$y+De9?8_;H1e3dAN8b&qsmp1uMIp)M z+BDAH6j8tY+YbB4Hl!hTvx_O9q`_(9X$vxf6X2Q~!aIPVa%_byL z(|ZwB{uHII^!36h;4wcmTlwG=S3M)1Nj;Gx7eDt7A}Fdns7iN`R9IPt!xb(j1f1whp;Xp^^;nFiM{LRQ$#XKsW>vHaZwaWOB`Ct+ zLLRbj8<20@o zl_sXI-+@$YR+d!W2S}czhocp)j!~7p3{llu*}e5|8(zv{igm6naiop(MiLA&Y*02% z^?MVL;p5CVNETE;lDqvViv#mSW;4fP_RUKzxUYtc)2edhxy>>Ua2cDKB5yf@j*^3T zek%$!m(XqFSwsJD11+8p8Br)L9+8Yr2BZ_`yN6j#)^{Ip%q2@KRiWW0_t8_13M;C> zJt|uCB=gV;&i9#_D*fyjuc_P+#VGms(dCPMJa-h5bkm)>^F1%mQmtehDdEnzYZSfZ zFZ~vDtc>|=&&BoKP|X{f?j{tSXnqrQuZ@b;ebGv>GcC6gtcI|+Jr%5i7M@?O(!#po zZ?BGM%$11X!&2kM$RteQ<5TJQ(JGb`<1Aq}XDYb?Y|=gsb~juK><#1Af<(>ZUAILd zTo1B&lu2+Ip()+xp1;^qu&>q z?Lm50*{unt+q|6q88|tcTSvcLr5(`E5H*+m-QZh42lH2mvi_y$zc3d211dZjNGngs zyp}p?Ch@~np+3*^#dWsxZL!zjl2P~j0Unhq29Ammdt^V+Pe^{qa5Q{yEx^2nHOVc^24VYKpO7j}%o0JqNa<6k6Ez@g zCUSAT!O6TY12F^)-6^L!Vq*1b^_ zEEGFPRS^pahzLlHihxo?rAZBlfRxaC2t`zsB7#z+BTY&OHK7F%1f+#dfIw(M2rZP5 z0HJ&j$ornP&)#e8amG1keBb!S%3nz2srNJQ=f37Of0GC8!}et)gSvS@YuNQ52?|)) zJ8W^@j1eVij!~eI>B^+2wRN0NL-DIDo+tdiWZLtp1=w&wTYTnixEYsSue9&QeYu9~ z)4zOKjqU=(JL2O6VMOnWo^Id4=q6x}7!ihZKlFnzthEy36t1q0hh@8X@k^rqjMavK zf3o15u*rXFa@z@4Q*5o#fS)z{#aYptd`tVpe(l@^Mxfd~)z9j@7+)OlZ7t5Lt2kx- z`o!5ko4p_z()qWR0{hGy`{;ir#kl0hkA2vOi1eh{^@2xXXEf|l?c7TPaZXRsiSi? zI$e>uUkD<<5(B11D;m#_Xbe*KDo!}zrk0A&vKo$&NjGo z7}nbMut($ow_R^ZlU?c{3vW1} zQ)%BAa%%;rPx1q*7fuONSBUY3KuEe!Sw(7Vg&sd8%W*B0#i~hVeE^yBB0cK69(1c` zKqn&ZL}iJ*0a~E$%+4N$!t(j;`jKGQgPlaomwMp^ZIh=OE{9eB%zw%-XNe_&`X)%Rx7i zZs3%4BCS@U%yYACWyt)gG7C<6Dd-ASj{nWFp426Gu|QBEjhSi|hIDOm+4eS8Wz%#` z-0ZL03|bc&WMjv~GGwK1iuuU$q9Z>1EnUBj9SPRfl!1 zxrXD%>e9b8k#CcpK(ZjXLNzb`l%}$gXIC)usS7CICMDQcfjCA-vE&!Q*(&i9xL*#b z`}Y&*UU#6eGEP-(#gV@ybjx_MuH@rYqxbS)yhm~qG`l@C1Aji|<}iF)B8&oe;>^b_ z4jpd|%<};%f2*xdPEhqENo}p!E{W01-wXHME9?+pn}M9y+~;(^CwSoO=1JE&m$i)@ zcqiq%rqSC!*Z zMP|z^jr%g)PNjYsdGYcqFBxf7SE%oLV<5Nxc}yKZtI(Tr6J9~txzR5)-vW*0>FSvm zsbnM5ywxvND|=O9JpubDrb<<-q<%!@EmAA;d)cig;>O^NuECE776glegtP^S-+K%1 z=94JemNifRT+R@u|K7)`#3x2)u?yaLRxSQPte0bEU5;tC7}9a2`!IhG1ON8>$*Jro zn!X89rSPY%x9l&pQw}g-jKj!lRRB~bJFi-fXH#a%9)b$+8JzgOE8cA%APJ_f8}#D! z`Fo=Vsb4ai`7mYYQ??iX98--$)4!}Q8nY1SM4#B|OqU$+O#PCjV!!c#-<(s~el*EU zIDWokZlGwlzW?S&Zkdcn-Rito_O|-Z(op{v@BS^`{o643Z^Pi9(DT0vrv}obpe#1u z;`?Eh4qcDU9uB$_!_#-=D>rFN5A41FsOX`(AI|(d9wtr5QnDW)s@4zNXSdrO!4Gn6 zwvP0Dj-DL+xcTuGc0A0D_(t;2EaAuXKvCLHHD|XJyHv?Wa@gFG>9eGDF}hyf9l%d; zP>BG_`1hofeM#RrapHBmeTVFCAhSjn-2s>uSFU7vM=l_p8v-f>0pV&mx^j8n{8Gmk z7~p~%G|Pfrwn`W+fxkzbwqyL9JFgJocF`{Q)0#xYJOSS3KN(h_%*(I2~&U0mcwvJqrw*JoX+}`!HmfenQ zRVV=O1!U2K7gGk>cgY2=(K2Br*wTB(4U4z$<-G3mQmw}%Y& zLldo$6)uV%iA9gevc*6vM43%$8M0@#a_G4 z-v0O;uJTGL=>}*g>I&MuVpt;xDFmPp5EwwJTe=}r=TJBQi*owok1;N)+s;Oiw-0t} zQHoj~nE{|v)_Iizyp)kxKWA=x(AHh=ggv{DH#b$rH{*s|6$gi|jsHsQDCzBkdVO!& z8P{z}!W=?!|NVfY%pknV#I=&Tlp9DMJy@@hO)(nm9?vbLNXgAHx?!aE?12Z0b?+<< z5HLFc&g1#C?yg)c!gH#DqRDNqX}WPS?t}}8?w~&QOytgO9{D((6ITw&-5I@GrNz2& z@teWn+0A&Jj3mV=*F+Y^3e6VZQy|@Ns>Ar(=$>&~$^Pz*1@)Yg8mTIWx9}Anfp=vj z*DFFYb&)@|>)?+m2|dBk1S>-92gY2C<+>(;E6L^=3`tn~ecn}?2=8_y7y?B%B7fUyk{r%mu&eLN$R{`fAx$H7FggMfHB_roDODUB_ z=tv362agH4lFL%jcvo_nyee*)aLZ@A^Vm{{rHo%YsF?IJ0><)(!ci+aIL|0pn%Zar zs_srQUs30*5&uMi9b&v0ib_4o>J)q5@#(q0nj$Eripus+-#qlWh^K2M0}`;(Q!Krd zoshZ}fliil6(_TYr!C9mEIOR|>Im70krX@qEmnv3nWcjEoa`I>LXUgUMcJ=zmlE8p zFdSp;p!v`O)7xa53;n#XOno(eSSmK?s*F+4rs$DB1I?+1oux}t+vkc-b|yhP*((xr zMN9IYRld9PU+9g1N~FYrEP-nJ`8!G#T9GaE#~mS1Ug{P!#jAxf+IAM9>|X0>%~X7z zW7Rg&qvMG0cBhVls8tRTEwJLxzAS#ihVXJ^$FK1q`!s1k2!=42i9K{TxOQ2`#|!?v zHgaanL`d_ddvu}$qXYTA7mDB*QG;i?qbL9A-i@Mj1JSrzi#wMSb3SKQ*ajcR@mx;X zS=n`l+cI==F$am)X@x6~yaebf?dz2a+RfonvPUJ)+*kX)Fl^{3nJ`}N%vct}5--J0 zFO59#@Rh*h@hh|4#9%m_#p1*@Du$AL$b>OZwSd0G?1b2;n!kjnaaR zhyso8&y`{t=x6GhA5BeB83QOl&D=+vBH?FHKlGCsR}A9Z1!4lr{5>wytuiPT%cT$B zlj1u&;|3c0aU{^6*=j^)l+oXQw#n4?;%RAJKwpIb5o?rX(_>C}Tk!84imS}^I?iI(nR#~>67znG0mD=O+LT9^!LJsHuYj3IvbEAB*_FzV zCjL%+u$wou^ZC8jzb}gfX#dxkH7FCS#4!`{^}afgxb|2@AYM#9Fed5=#|!BGBV!0y zRrr2sb#Bb_K9i3qNv3PKg^J!J`1=sd;?Fyes(tm5=%MVV^-67D{oN3Q{pmk+W@qC2 zyKlyY|F(e`UDv-X%z0+wvAR=VhVk)z3f@{`bz`Bsz`4tL6WHu~F!>LqEB}NzdpeEp z>emxnF9_$=6Qs3uja(xWj??{Whz9#)VK9%? z-`taM9xaz@pMg@1YHySBgngxjj-9ki{!QT)KuEp1$`y>w8Cx<~t+3R2)p9_lU3W9t zZJ`w0?W%*w^;BcrnYwotJKq_%P^&oDU#c{MY;j9)v7AYqf$0}@Ko^iuy{)`++L)d^ z=?aYskS_T-7k^~?sfMupzv>1L)3JyAVe+1m*?ZTl`IVx)!B6#f>)~>#n|ap&!`HnV z9Pt@aLty1FKoc6t2Vt`S%Un@rYn8=140q6?JZj!yYTo-v)5IIiLG7!M+qfL#%iR5$ zx|o=Cd_m0U@SF|MEJ?BytO1<=pO2XmkmXer5RmX+GXorj_hncH1_tyi@3#cCel{q{ z5T!{7Q?=rn|LO`=(d6s@nJEK3VVL-bgU=vKr3DvC$gy@8`^>1!mWt z-PaB1{Dz|H0z$p~%W;ntGhGHDB`^AeRy7OG^ATEe2AxeZc$^ICV;Z}WgMU>m-PHLF zP-xPP50~m5_EDi~G1AWgU6`85qP~L$mTft=j5@lDaE54QZp;_3X<`9HtrMcJ<*ibo z(-@ZNP4vQn{@&2Je+dL9_B#XZwDgT6H90xAwux6h`RZR4Olm8qN;44tsuyE^0uYlW zhI9KU@K}3Geob&xX0wIN$-)P&Tk6^QntVCN=AMyGZr?Lb;!Oehc<>Ga_J%rvPO^yn zo?T*O&<&$AyB{afDxeK7Q()767P-X1b#3K*)B;$34p-mr`o9M|;cWvPcsk22vQnqr zDAXL#;w!lHx8_Q{!43=T`Qz~`g*U7QWm;#b*<~+3M167j?q-HgkYc%!b=*i{Yimz@ zwuE#%FAw=M4jKdiskltrW(Ys+-#CF9G-rpXmDm|u!EsfV*8rsmFnY1{3iBP}j*)=$ zUqG`1O>kcxe)Qm;R?m$#qb#8o)wC%zhw@XepWcw+$_F!R4+H1_c#Y{oc9{rlLs4u1 zZ5Cism-D+#ozSucglA=VtzvHm{t%J$T=5C(J90SGSAF72l%#R** zeu?`a_y8IHT$FdX@rLhVXG02uf^&M&jeH1oN?}xdY&aIlkEvwlye&vvevcd;B+b2S zYChHmE-~b{?!%96Ox=ml-l$liCvrClvs!-H~nSG43+L)n^wI ziSaBIE~GZ_Ij?F;`9sZt#09y_(DrSKRKlMev573-!^TpR>|g4tp{BDta=wXE(x8uA zNYhkKC!K7mj@neaM@0gure4Z$Yjdn4?Kw2XZZiDt@vnUX+*wS4U2TzZF3vedb_<7Y z>%NPMyIWj1K(?b|zvc{ftw{_`hQsa%db9|}4q0{c4-CDUMqO>HSihjXPAh3V%ltgf zhH6u0bai+gf6VgEAytevk`f=iF^JKo>BkMsRsmIkU;{>TgW@ipOojt=&O1^@5v~RjuSW_m85A z8e}z6%8T@@pbQZW*q&*N-sd24kTvsJd6;Gohg3M zHhAWk10Zzrf0C%s9>i4Y9=!Mcs2Z&<#pMye7V(!7M1B`wzmI}^BF3CLW)7TZGi^%! z${R@c7$2sxY^7^~FV~pUMy8hLYP=FuH1TmTbS#cuRl-vA)vGu+=;SnoonR9gm)59Y zN=TlZEU!S1_G^oXC{v9mlL)%wjOlR1<@H%Ve%B@H6c-Ao$X`6nbSts@fOQ^|*4wy1 zNjj@Jr4YUx>J(1^L{JMn#qy3*x*^#XuMiQ(wh$T|F?mV(&uxAWOoQ)kJbE&(a*nCJ zx3WgXIMU6-_?B~b?ul$N%VKt@I;$P1Si;mU?Y?~E8SN%x)hNDWzpDiRcr`3a2}rKQ zax(bG9RW1q$44Af&vrpLCCbiX@z9m~s}YkqwS)m36r%z$_v7`@R)UM)DF)6}eI`J= zFOob}=xgs|kn&7KQNvU0as~AST1k^@-=1(s`4bCMvnlLlY!D-pbfr+4o-_%erD-{DVMQ`FJJE0n3j5Tid~A>z_QMut=%wesL# zlWh*mf)Jok;W_6oBX*YW^jufeoa~o1eitz9yccjV%>j>XN0xj}rGK&sj zeEq31PcCw^$KSq2bc5<;Dr)VMoG<<=CCp;aUo~g11YIeKRhm88XbX_C z`DdQgqWw2+nGf?cMCUaR=p-*xryHH-(C?HiGV0tVN| zj7>JU&+|~AU`jPwE#SMw4o_tth?^#9jc{gp^@lh?_vsbWAtLXrBv8w2|8cOXb85Q9 zYvF73YF5I71+jZC&7D2`I-ZZr+>JE=Tj)pM=$vwi>{_Tv>1=)>BAXC->4N$w>s>vL zBc*^NTn`c$Wh>Fxzn67nQ;D57`IdEG6KFo-JegZ#l5G6m;8}?|-Pr+5RonaM>pAi0 zV=m&PP~Hk=Y+PI_#At3bt}Qafc_NVzAz(h|bjr-$`w(RSX{uNHR;9sN+GT74vw&Wg zo0wHIBlnLYe3Dk*AE;zRQDMqOu`++3j@o|)b*Hw)%ec0rX^;mItkabVZ@FI7I##2K zVPE<%+D66bCLl_X5Z$NT?`qhioB6b@+xi*yi2gZtb`GFO>1Vyt$vXUJ)h!mA?}Fxb z@e#8q`lqq5VgL-W**~=XTXklfmJdV42_~2$^#{n>_V8!Pi1nHjq4cb z)JK_aXpV?;9{OWPXvn$&c**YPp}J*1TYNwZi?xcu9}u+P@BeM4!(l_=RRO`zv8N)4 zhGyHxUYw9r`LyyUn^7kjzBg*wkN%V1A6qPtcJ-Bu`p84@@1jJjJ_i+`d$>f`HO&;e$ z4Wd$ikr@{}PZ_FNeZV>acj zF@nA~^D2n-N;!_Atf9TdGeLa>@-5N1!sI;@NNHkF+)rZC{r+#NZ;Tg4lN;4#qAtU= zLUt3J8N$QMTfSTaDh}=-6)!Hl=wqIBGxos$oVII&Sngfbf10)%%6;f=SWP}TeCxWR z%(+0xm}x^J(ES6*qfNsztJJ){=9i}1ozakv|FQZgyJ0EVC0g|~oP|z7`A=`4(Pt@_ zxJ#LK37V^u(cnSA^MCj2NXnG^pPcnCfN3C=wPT-J6OuZY>_!XhY;r##`4x~7Yt}dPi-B^<{+F`RrX}?bVU|)08dHa_u7-7%^tzeNte!C*sdlt9b}? zBDrH)i`%8X?gA6m%;v?dpQA3FkTB?OE*2_Hq&6_X=Mq=EzCE*f*6RZM1z_ zPPPTZ&N5#?)>4R0xEZv%h@cSHdC%d9Et5crFNW4$RQ6;4&KUXtYX{z4{IEdpAb=$} z--qtiuefVH5U2j#2Cy?*PDV%QXrgQpod%S3Kj1Xyh0?ti3mJ!^cX`ZtS9)2;ruOf$ zL)LGTPl=UsiuaE*S5GRzB&pV;9 zPZSR66VK}Q94s8xo6_mLN?V9g*;<>e8#x__yVUrVupNSkegaEcgsB$(aW?a}aPC)N z)l7;@2zIG|NA0>H8*om!yUyZp<6a?V((FeFei;BYh_#Db(TI$5v)0=3@L=GGYk(Sf z=YWOz^YnP8$B9ofGhQhbdu;xr!?|+b;rv6zQ)E#>d&Xvd6}Q)Fv958!Fb+3?b$FEk z&FRf=dqcw@+o%`xKiHR!Q6HDwc#o=ZtiIZLb2uKg=WMn+?3=$K-$aGPIScC}GpmnI z`dlI4u?Ot5{yZ}#UpV9Lc}3)lS@kZxW{e=Gu^@M@0gcpD`D?3&Bxf(m*$0@T?&A>g z80!m9MgR+CMHY{S+wemmGjc%a<{0I!U1h0|<)t5+fCtjS4j{j4H|F$c7AJq@n9Gq* zmJshUSXH07jUJ`HNE?pRks)2w*CTPYXiSVG0Cmwbv;c`7kmD%KUFPg|+qFRViAx1e zpqD_0zF)=ra>vbMA#E47@G_NmosC``k4ertYF_`*N|8f3A-j?7bOxh9hLOc$NBX0@ z(=wp`?6e^S{;|&Re4$)V0#-iKJeUnJ+4_$zX3ak?X8r%@Vh)dzaJT9_n6-i;9jaaX zX?C=HA+WvP?pfJg*#2-o(A+E!?fjuc>N;&_a4xF|V4A;S$x3H* zdNM=T&+I$=WVrgVTOjUhj0%LaoHqMrNB`nsgR}CB=R{6MPrrNI759k#>~vUFUP_A% zzgf;sQRkyaf4RTPL^;wpciXOxUAV2;Zt*+fkGVPA&Y|-ru1Yy+w&)A?U0GCR)YVRT zT9e2BY;i8~9ZCFh(d~Y^mvzB8QA&e?ZKeMWX4RNKaxh&;GSDEr?m#j`GdzO#O;?!b zjq8b`iqg&kR{5}Hm+0bA7M$4-XJwJMbGeBO?5l$M zHzV%S6r~5X>njYPHhI8J4Ar!+7CTc}0qdS$=>ak=QdSKmfAwu(AKtBqJz&xTEzse* zYP|NA12b@RB9tRigjqLf7s6ihT`QR3xlnLt3-?er`BvDW-x(TIO4M}$#Cs+VT%p*b z_D!JqrF~9D6r!0+o;3{;Y$VBtYX`Xm5b)XpNfut4*q^wzJJ{l*lWe(6tTCL@=iC!U zX^-3gvORZiCCaYV+15cwG->2r{GMLBD%eHBztk@& zwyqe7UMJHp{oo7Tec&h@?)guvK^x@^S>6V&=OW90aU_rtxj0aoKe@ z#7xPiTX9b~lu6tHd91iq!=OL)GB3AU zMS?(r@zmd&kIm*y#<9AwwH_i(<^H zfbWMhL}tDI3*9W`cY&Mzuv$yZ`2n&q?`fbUB7?b2qizeoe~CMT{x&}Qom8~hZvX?- zG~Wc$?)#sv3;(Uc9r`Z|mk+-|L}5=^Iro7 zB=1Q(MI7xEaZBv%$yyzyi<6$8S&9Gy3i$Y`Ucv^r=vN#{@YDtw zm){5Z5OWj!JDat>0qs<3Mq@@2rrf`;?>Ae##TjsKgK_X4yZqEwH(>xqb<<Lw{1xwd zR%^mXnz)hWYIAI(%c4Dxmg6uFnkZ}tw2QYPLn>^skK8jBvhbPW$I%44is)XF_0vhU z`RpQEG0j;qtp4e<^&6&}M=BhU3PqOfNKHDwA9waTABhxb?j2SLmsc9tiT9Oq+dau# zyH`U5Iot1B^Nnp}ui**VFd=sZa_Pr%ckSYCFe#Z*DrAyjGr(er$1q09KE9J~Rr{9AzmeU1`Cq5-4!0QEz zbAQPKA}2?Y{e+W7NtWT?Z}yTv8(`ebQiJ8{lLE3u+p$QI;gpeq^yof#Ijq2!NwB#5 zvDbG2!vp)VkM0*lO1A$y+xLojEfus)*KRjj0-layq4a6QwtaCsmXiZFBd z&u{dfS&Nu=Z%K_q_J93YCe*==gsX|HxgB!S--QaS$(gHDN@%wm5rJsy#ybn zOFj38cm^yMN{-)TD81-Kcp-;$%|XUCngyrRx6fdAtaWb+b9EemlcN}4FCS;5%na?E z@g2Twi8k6W_5W)bIhK=;LDCG$cM zgvtfq@~7nvYv2UTyQ0?f1W? zaKEMZ7q};s2-SyYTXq65M)?|_*JpXB3+g|&27lkLg`;aI`=_?m=sMr7;zwf#?(VE& zp{pNYb_|?b(QwYbM&(kZ$d70iOJsaCx&Xh}QZZl2G$Q^V#_pp5zii~OfJURa$m%xOXevL^FlKlYkb zw)~%SkE+g{*}JlQng4a1b;szWCHYdhU8j{`k{2J4x@gt-Z-m4TpiQJml7w`3C@=K1 zojA0VKcqsIQtlyhj^rv*A@uS-PObmlFshn$B);sHXnnm%d8O6fegF(1=2z#1Ei;(I zbmbM{N=+tQ#g5W3tuLn=u|GR1Wunv>fqHHE+keyj<=+Jifg;@lYW%9av*7E`24V z%+uB@$d2|y3HP{R09nCj$xR4!H9r(L3L2=8!m6QLOTk>z^X)U%5XeMFs`4sR~Mc@yjs#%K9D;=-|mX6~v#-v`wkDkdJF`O!OxzPiKp zZt8^ieY2Q%Rwo!ZriK8){T3Ayx~|$n(`pWATNi0)&9M)zckj*c6m_0=H8{&a za^EN^SbNv~qT0zvxg2Pn@ZuVNJB!d)tyJ%_+Ui)~vOW6t2XP47y6NrhrBY^c^G3B) zRj~(5>hs(UEa|CZQyJ~&xQP91^xo>b_%z(;m8Rgt4`pm)BKkG%(GpRl^}QTc=)>E2 zLn{yyMP_S7)ETi_;9+WZct@hCnoj!iW4vB#VNM!YiTc?xFr2*_L6l{L5}5vmyKCW-6-0Y zh<;RjoLpwVjS@w#(#^pjt+@D>4!q5-L1PHhUQ55!(e*w{nM)gUwM2x0<%9DIA=db_ z!t&ip(HdjkU;>CI(7oEu6~`rZ)pwrSx-bQ|5{8fXUT2*^1g8_XZiIDjY_vteY`llX z6Y&$}kl2xG?>O>Xoz-f38n@^3xw3^N4~JjXFQ)>*t&VYeFEe_)Dsntsu!Y%AJIq|Lq&-r&r4Z6J{iR5{NE0cSYhl6cdltc%L{hn}4S;WdoJ##`8)Vx!Jh%tNi^@nuN|LwtI%^PfIEY zWU3^?wkDo8kaV`dqSxWFBkgAub9>R&B>3C!vp=uP3jK!M_0ql!;sL8fwy3ArfH`t< zTAC-S?)OS~>;z-e9Hs3Yf41hJs+YIkYKvIxKsIel^rL04? ztx}^IHlrB=>^<;188eG&c#ib3RjcenyvLxSVHw@*r=Sv`0mxG(R)HH1kit zYL{KnH(98JV3KMMB3@#@aep;^g_Ds%Epi$=vbb(LyzzW<5q*=!D(k-v5KCEp~oH{+fc`b>5vk_WzV z#&Y(t3gprT@!qgaeobV7g%OFu1Di>eQ^vcO8FU`}<~~imYv|*x4JP?pTg%*f_=EIV z5OKk7b2&{f$HBR}8}zi8L4WOTSz1Xtt9V{}-LSEcpGw{4HVH+NhrR2GB zLbIuWS&C=kYV*nmTTryNZ&nAwY3TJk(h+?$Xws(3+XoFs`Lp`PvU0>S#KEB~>o&1l z(2k8Q`}DPlR8zn11u7!R|3Nh3qqva%-jelZVeyzrs`HDbGTQHe&^|8?wty=bV$o}H zR-6ERox+PLa^pm$lkW;PL*fGcQNG#RN!ZQz&T-Hi8d5@e)m};ujFRSz8)sN96s^)T z-|6-#GFv5&Y`dwImRl~igI^il~aeH-aBEs=7T79>rG z9|o8h-ZC$%hdOZvtX#`6DpLZKfdu zo@!|IWDj+;s(v&vRC{w$>Tz3M#i0Dgo$4$x)?npwFsfMH$R=`*O9NAVq@_>X*AeLS zew+DroLu0b=M=)Kmo}m*jkV=nZI?m&4nRvRkx1(cooqbbvc^0$U|`VF(RJXZCG3Vu z(<`foCG7EBMn*>A^I{UV{U-pXiQp7BPCIW{3hgj4;D{fGjSs<(;Tf)sP*VdTuxsrs zU-aRIY&Orrw&v#ys#VcDRE$t90bceoYJ|&@<+o~3LSJ(!F=U`%D$PyK7uc8Im>4Y) z6C(pL3gMQKXb3rf8i1mUT5HjIJj48Wh%y1++Fi{KuhSPeEN%2VZ_wUO^CeQNU!@e^ zup<^(!w#tky?G&nyR{p1I?idOQ8mMTcFeCvp(6y<nG@9`&bvt2yQy!tZV(z zEDle!7)&??yIN*uet7TTG`!}Y8Kji=-OwH^zXTIZ5(KtWBb+xZ*yX-{A%Rk2-V*ih zD7!H?H#hTX;H$gfLqyj@M5e$qv6=0IL4?H6dUtQhMDJzaBVDG9569M10=H=~p0&zm z%Lf>zK%0kVcK=B5HCg(#%R7aQ5Fy_wHpiwNPpeJdr1juC3ktkv@IP2SRn?T!XrQ1S z%Bua($sMY>=w!0{Z_uILlZriWd*r#4 zqu^ESZc%2hjmFO{H=WQm|J3+03czO*o;`GxKMqa?73)>36_- zDNTvaPaDbvYRXY;=_@XR?<5-kx0Q?+Oq<3C$YuehGzWGy7Ee=?b^}_oNHq) zNaLOMu}%@spt^XK=Gi?NAq8{(9tEad<8mTA~b;h_IcI`^iM#=N|zBZ{#Iycq~ zcV-3BPi!?m*Ml0mRZjaD>NO=sxsz68mby0`n`TNq@BOTwEcaMMmYPAzf=r!f2jnhp zgb9QAKPFILx{=bo*9&+z?zhHK-_9XReP?3_m#ae)$5*eK8SDE}5}xi*;OG*7>-%gT zgR$<-)?V+4-tOC-gXU%iB_4vjFj7kkE|#0}qSq_*%BIpih@F9Eid-kJ`o16)UwJWD ze8-CpI9tl|q3GeX4mpGXXg10g$YtXl_^73klv=6yvW_-b?djfqr zsAB8U*@dKSi}liH$A~*Keg4~#^h`-47zygwZyH$t`tb2)ez~r~VqwVck=(WMN@M-? z^Ui#EdGm^`w52|>G}j3PFq&nx6No8+QNn5twYjwOI#oA?yE~;rz zyZ^_2A-rkTEFL&W4 z`rNlnbE`e?FU)3yhv*#aCSVC%I_37VTaO>6|C|7E_-az*5heQPJIV)5zM12qUW~9s zZMBY^CVYYQR~VFX`Kos^!Uh{BiXWBCgV=0Fq9M^y^mA`!>RbRjIq+!IxY`btI;wj5 zRzUETQsw34mvEN0{rMNCWRbe9GRPdm<2d{74E3ezZ7c#%{FYnSwyAbP(G-!@+ZyyA zGx~&!GBf$Do^)y{%8O#wHlim!52HJ6TA)bvfS{?Z4o$)6o^A8CgeBdiGVw%2TXfhP zx25%VU-{~-8QtpG5tetXu(i)i?mG^!%`x8Xry)FY<%YXBu_rMw4a?R9q;SJWZcej? zkPjFPhFv@xHdDF!&@wuL;gFmg{_~`vAhhu(EEu|uZDN^R9(so2Q5A1aOZ#L;p6+y3&H ziIGzI#1OvmwCX#zDA%G3A(o|7!HZ0mPSq>{Sd++$<&fZs zc2^GXH>SNk;Vf~BrOWDck#)N3%W%3LgkSe_d zViA(Kik(J-r#{~fiI~`)6ku{+ZH$D7UR#Hl5NBes z8o+kwU{=%%XgNks*c9>kN$Afn0kPQ?N$ZZX>p61c9jt8VU*Pr0jk)TH7hx!K$$>51 zqBv9)+(WI(j*6-`=x!C^4Y~S^f5Uks)CN(0DK02VRFS<(-q@4Bf|$t57PwVkZ+;Djj08926e!OnhEeqLaC|r<)W~|$$uZt2u3WG&_=NZJJ-A>K z6*SF#vRWk!%o_%Z*hrQ{#Ws=8Ztj^^^1E$XF2KSnv%O)>)c{kv4n=@NfpWPx@L2CGW($rRAVPwh1`72Bqc>bZzzFXcUa5Z@Ab-x+>+a7W z{w%nvsE}7_zdN{>G>oS-it^*ILAv;JS4*aSjo(m6??Jtsdm$KDdJ#=hCr6Xp)7@?e z>1SPLrVov3d3V1%Iej(AdB|+KeQ)kIxoVN$$^CL?qCG^xos`P0n!&C{9_{yie{k+h z1fh2}tWxN)_1P(qGxR+9_#bfgpdL==k8!JmXTQr%&5DYb#YC~%a&ou({3~BO%<5Z|% z3%}e#^$lq2dCA7(lira(Xli6wo?H+_KVcQK@Tnn=#h9Oy)}C8=#!ORUQv zuBu~kbfCR#)2Aa{?hL^4wcfq$1M8O}vgNZm8Tksmit_7BM(0`O%jK)7%c&qced=AH zc=|;x==18ER_*I z{Iqa(erQQ`TvcW{c(1r>FbIg}WPd)E56`I{vi(np)wlX`IoZV86HY=U5C&D!eJ>?X zQ-+_Ku3~22a#nK$n<=#pb{?SK+^ESte=?YDUGT*2=U?;IhYS#P%7$G2{Ys;8BdBoPZ?P3lDFZfh9oTTr!o zx=;6ZTq1OPy@lfG%Zt#c^4Tt&!*4wIHipAh3fkWhbmT`CV7C)2XD529S<>Il=7m9K z)IEK}I3khMXdqcU@ItAB_#W7P!pl z6|Cjqmamt%-a+VIE5C6LDTy~D-wF; zmwCR*Bl@ShQk&nt?ThSFG>FJjK7-3}SgNc)DTc`7wp7afI#SU{>8cNF9GjGosZ3}{ zjyrRj(wsP<;0hMb^%8}KvxKq!eCqhKr@>Van=69lS6Kb}(0^^Nvb=&OT5$PXX1is; zYQY`o4yFxNCLy2T`&iYp2IsNxXj`$yIIuq4VCAcEG`CDM#O@B3OAU0jAPWXtq07K*B?rp(e2WDN3ou`vN z7EPU&!_7saeZYCRE7c3eb7efs&W{fG^g^{a4Z120?~NdGpBvnWf^$!Lmt1y)y|F%V zwoge&scI+1d^m-8T+&A-s^Oxeosr3wziN9>R;OX8w6KLw%2iuiw0a~k=22aa?K4N= znepeq@ltv5S?&+xhjc-6gS-kJx1_SE07--~lO;n?2S($cykH+nH z#e6C&(E{e?t8d0#6K^E}>;D`0W1=-&O&}*nP!pL5de_EBc`q18*7%mW+nMnQ+bE0KDMb zn#+5TcU>R~oJZQ~G@3W)ei5gV1#+J+m0Qlw8!(=;3@>%uR%5!hxiHWOjjKAMbwVPX1ncCErHHa82RtNPYwjSwKA;Tn+6 z)mj)Wn~RfXo;Pal6AwKA>vS(4lS)8cHWtP%+*%gVZmy9kS;=B`Te*wOowI+ns>$)X z{Q-{(_11L?HyF?TJ>#YnqSTHB9=IkZX9%ukY92+nZgmnwxOIy?nsCib4>XftyK(7N z3UdH?+6ot5J6otPE~MI9NF~6HV+EeiddW$uxxxj#1O=TLtyMTi3*XBA99D6iH}YbQ z`1-}v6hY|ev4{Q?%tm*!UGwZ8dZK7_s6R`(Jk#AP;o_Rw@1z!E}BJ~nN{R=*%a8PP$toB@Wwp@teX*YrJ;5$8__^;H`&^7}$7-MBe357Af4Q<0?V_VTi z(X7+`gydxOmw*exXklT*D$*EG3#}Y;+<3PH2Pv0W>0QL0hV4FkwL80WV6gvVMr2sp=D}G5IYM}qmcZbU77B9R zYfzU8-kogy(&Pfj*K#a4^%K7YNF&+lmLe)JL0PVH>FoxU8u3tUd_xD2gm7Hg6&Upq zlO%33a^mGohEx}K_UecrVa6LV5*#kpVK92%{LKnl~#?f8Eu z@q>YMMroB5aAvfNqDAYgWn;yO&Tv@4C;3d*6mOIZ+E9*|lo->2PvWkbU5^-1!Y?Jsqlh+5uu!&Q1ZySTb4BHR1( zO%+-_Z4oiwpsfR0l*^DN5gb zzMCfWJiAq^ulmiXX6tzLgSpr6PrHM@U*a{{=WM{8K7B*7IAs1pSpG`PnWXL7kmq8) zZ0B<&R-b@n_^L_;k@t5!JSK7bb?`Nxx(5H0PAJ{{Z?nH|KZbHj+Fd8vquSymm34J> zU(!ODIVFC^H&TK;8{HypOdTKtUtk!Y73<9fu&;se0h0^(HAefO0(&1Dmk=IJVIYe6 zrHKDCsq=W3mFo2ezN?cZ1%yOsg8_ay%IP{C{nFt7)82Q-HI=RH;xK|^fnlVm2vHbe z=pfQdR8(XTP$_~`K}0EH=q(A#AV}3gK&4qIQbR{6i6~7_5D)?h5T%8Xgb)%ENd0zT z&Uemt&Uf$m{=E18ZvI?5d+)W@UgdqC_j%qGfao63LLCmC4sF*4EbRDe!z-s+$2Hf- zVFrU>UYDT>SrZQt~^1RlIAq;zyLiEk;o#KtoUR&v`xMopDp#3!+{&kr7cCpWV-Ly#hW_}Ue9RPmiAmN+#onA_ehE7 z4EQS*97em-h9^t}px>sODIJ;RGjh*$78$o70in9%yV?fA4G^DXdWJ)9r-qhFF=-K6 zerAU)^Nn?Gdt8)nF!iX}bUU79$9<*^OXe<@NI|~c70n413QYAYg6c=$Vnd7s^T)z6 zlYWhP#!Z*`!nDdG4jt6(a34cK9{KEfz|xP|hQ=R_$&}h7pFu^BHOKyT`=jeSlAN+z zmPXsk{7pIKs*F6DGj90|1AKC_XmF~@cQ19<;6~J;^{4ZO}xY_VEv}* zz%K9Z0?X)Tf^}-5tK3X4S{5EqPP2=g*^i&nTh-UJ9J0zL0(AlqJ%5b$d9BJ`3hQdV zzRE1}EnuV1As!Uv1SFwjy;+Z%LJf5Wul@|dka~7UZQX%!l@-^&`r~r6Nj3oGtvRV# zxzr(BV>`dNu&3WFbUuJwol#Y?xso%svi0{gN%Ev3cjM&`?XQ3E1n{eJ@mglaep@T( zCxm*l5x(tF>+>sW%;<5AI)%JS$0tmz{teDGNLTR~gyv#~*QIUj8Q^UNHiXiClmkZV z)a4o6?L33vjGCI7KE^>={lH|SOtKU$gRF|qFx7&x-x~&WcD>oCxyq+Y_IVyuv*Tjo zV-XEOu=SA?!_{}f18;fiX$R%X&Z$rQVtpe!^=j1eOBD#!Bo`f&Wp=xNXf|cR^Fd^` zb+fy1QgFAGYtESORkQihD9;&n)g=I9NbPUJlq)U&D}R*bUP_g_vPHzG9O0$ z!Z5mAJ%3oD%-qZ*$Fbcrp=n>5na1JMo8E3={e)) z=a+V9%shN1XbR)oi)HUu?ir?DbQ^Y0hxtxL&L^IxN6^2Xg}iU4Gk2Lu4;t#NXae&! z5tq`IX;N--#_s6)OUjuz&f@~33h0$M-_0C3(91HdpKQta0%t=-7WVp=tux`hauv_x zG${{4*JhT&M|Vn8H_1Jt zK}^_<>#@~W4!>$9o=#n+E$&_q6Psp#DkT|C9+fT5Q8dF!;vER(souYH&1G#>n}P_* zDl!_Mg4TRbVbib8>UnbI)~k>`_2Jfw z^BOj5C&SlB&dnt?TFjJa$a184p@tfx;oF*DUCg18PQ26L>b|i-$DD@`ywz6LuGbk zDKB83@aJ&qLy8D`eTBH6xJq>HdBpILmRe@pk&INS&A0NGQR>iBmm=31a;^`=>P2d) zb}0`bU)CJ6%`&}NbhrVr6q?%kSMzcN&6}T$r2jbEP?J2FrMSX#^0W%`RO%&}#E87e zZ~Ap-cVusg<9FH+OkPb4<~RK=+pc@4u<&q`tyVeN`V2HL_oYE}WGx36O@wKcdCI}p zCldLakvno3HEwp&9;RAO7YgMf!@%DIiy|dYZFaBQZNAxQ_JZGr{3X|y?M5v)tHCt7 zvwoLeWvGH!HP@651HE=*(1%nrXC$CB7RjqrTPWS^j5`1I9dgi`SFn|*7eF0aMO}WokB(`%l+4e6k$t> zqE;H|;beK?WtkbjnegA^;zIfnv9rV;Bsrm^j>$IR|7x-{4}MT>eA#>>;6&W5VN!Xj zZF2M_sYdac?RR5-Bxo03em#BQUYVEavc*ay^jHpF<6^%&IUL#@D|sr`4o3}K_VfBA zMH9O%-+@3If`^L3TORo<+4?Fp6P;niuMGFzU>g$P%nK*Bq6ZUS`t`WWp?9P}JHNmfcvf#6BOK;PaY5AethQ>B29m*et zt1;ix>}flIWYFV*s}7PzG46-jR$U=Xh_ar;BmCVcOXzxZvx~X&%}Gl$kn;&g8(`IJ z`PD_D*L}R=m=$2BPM0l?VfDuF#zGWM){}fQJ|zsERjYl2KI$Qur$XoMF~mmTn_lXz z%WZYD@tHkEAO}tCE7;pt=z$@ zf2x)cS;e5^T4Elx$8zXWDP**5=s@g%IkX>_*E|&yGd~G1Jv|A-AK7 z`>7U-a)>&}d#wAg>0ldSlI>(UYUS$y(`MZ}d@-kBpr;U3#skgK?&6eLHJly?d)gLP4)=66o+0W%<}+Gsa{t0Gk&-yhkF=eKb_CZT&WD7~5k~ z?;G2-5?dV3mmD&ap^lsNlsrwo0y~K4F`138KW&=008Y#7nIu4D;!Y^_YRx`KBHp9m zVG;;i6J6KH8;4tCyHfmkDzicG!}k8nP~zMbRE5d87D!!e^K>QRub?Z&CRT(nWGbiT zo%tl>qP-XQQ|8=9PqIW3Y@;tQsK4L-j+rJS$~7Wu8m`wCO~j`3&J!cxuI+lCIcuF! zvu{a^x0DEVs%KYdICrEi`)HKJyOxLK)oMarB-gNTYVMVW$bO9<7>3`t5NG?HFvO#dcAOVq`k9T{$6z%0%NP3k$G(~v8=Yt@Z zE4BGgu>;RNx05=38N@Uz6u9T5gvFD~6a>z~_m>KC5%C7R&b-+sw1*mAXyPgX_k|uT zVEumqpE3@*Mash+(n4|SwdK5Ii{U1ts22p>W#H;__|>*A&CP-R=6Ew#`O9^(_&sx*w*J43{B7|;h5jKF`2P!lj{HBjKmIKZ zNT)gQU}M=|(QQQrzdU0N_6*aydDWPnq6!1d{JX}MnHjG#xU`qYxrulh$-$8|F8j3FeI)5c z`_^()t7d&IM#I!@QpGj6i?JP`dj5zMhE41Hi%cVu`p>MId;d^?>Cc-(3>Ey03pVO7 zGp8{^8VdS=9I2-{>nIIBpaO{iIp#3j`%_ikAfI8q&|2Kgd$w`w?T-gB1rr5Bu;d3%&}Dy3DUsKzAR8>ueeuCe0mIu*1~ z66`r~u^aZ;XCdX8X)(nG3^=YtMy?z?-+NN;${52QB`+j$?xC3W2!|E`VfTmm1zxb* zv5w+?*ywYu$7UmX`0M*=#+q(u^Z1}w* z071cp9cB8M#}6C%2>A}c967!WUe_iBQ=I$??{-ibpttC63eXLgJx;lyiR{JkmOr8u zYM#3OK{s>i{Y*p3uG{#G)U)N)nYx4yo$AcF9RdQF5P3<1uT)b}xinbT{!9C6f{ACk zas$?)3~9#WJ*k`Q*hlrGaBPs8j#Y4V#}osHCe-0bWDfUuXdg)W)Fu{pfHG)7CqWkU z`Az2e6f|Xyh^sPFn*A8t04<&qpPvBKW9k9ut`yRZHMUr4;Q_Nr6Wce7&4zwgjj(~_ z$hjQR2CS8GZ@8}%d$tXklSEOIjw>U*;uRW}#kxP!OnWstn;kIQ9S1E?)KYJLyZFim zze9+-zgRf!Jn8|TII~Y8_+SD5&(--3;+$xAO0rpm8VquC*Id9D{*{mSq1wi&#S&kB zB^v^M)36H};7G<^KLT`TeDU6IJ_{NUr$}thQ1|EPx}Ig{(JzH1`WJ6e%t?t4AQ)ww z{fuaQHE?+4P0Q+-VmPfSs@1LFaJ!`)`*8vUJ0zlERyot4 z$yJ4w%Uq5|>IqOS#NIDEyzT&|&$*W^sAHPvp?z0))|2|a%#91x7SK9W?U`lg@b5rYVVx)T%^puNDA*%o_@&h@iqN@%2 z;e?p<)d|VSneowp4*lo1K4&rY2bIB53W*yfEop-ZYg=J}M06R~J<@+S8kAz$Qs%o?=mg>4nLts2ll8~+c|0bh2M%xA zT4>KR_@#LUL1_dOm$FHklyS?){QyW!+ZYvUYgm35W2esIzTo7HjEp$gn@|5MkVkbp zNy77gTpEGBQs`d`-Y4%HNs9dBBDfk~V2>`f86G~WGcB)p{Jb3~1#SSlu!SlL)du{2 zlqJmM9BB@m{AE8gPvIi&+`q%g#4_o3Cmur$3tgFQ8~x&N*#yQsVEF|2@`GEWM|+;| zuzU|qfu)swkM{Vf5{S3)k;>>$?phjtm1K6Y=wDer+x#p`x3e)Up<)BCHTkw~5`R^c zT0`||jNT#S)q9C5Ec8yyq4G=-OD1_uu{*gFz{t;DfdnegG!pwB1~(M!zp7^0w& z;X8mgKZc#9!e0XT-0-D5q z7uL=a2K+_z^A*0;4xs}#2z)D(bLssSL^+pq&GU!`-JJwIn9CMVBO=^H^&~=~7(IC~ zVcWcmP)|tTlEVJqNxs-_#J|!+zSKeHdNdZen**^N& z0bAWMA>|%MS^96e%Hp>k^13a1yZ3=E(YK7+S~Zio!OzqZ0uif!9#pS8UGEEEZBvee zTTiO3{M`l8pN1WKc#h|)AFN6FtJI%a6_p%-$1?vqHAuud*01}}lPxyT|3H(r6{Zlw z+$sM%?2BSgQ)c&wcO2t=_wgBM5=@dWm>dn)LrPvz;bCDXE+mk(f^k24n$I>x&B@{p z{1+bK&MiEdqz#O$rz2W0>V-I>YfJBbFc)+$Y+}ddC|+SCpKVAe&g?dsfn1ai5x?<# zW-C4+cnxsd8Gcz2SPl=^TBf|Nkr0(y#p1sa@s!NASv<8C#r?F$itez@_Ndu1>pAe@ zScf=96<@NA-0b>xr?#61AQ|7l5_#slqFDK4W_zr^TgXRJdq`Yu#fak;Bj}y}>zxEJ zbPYa#t>9<5i2Q$~`{`SoZ4(bkKdTgD8*kae@|2Dmlbi{Ms@yZ10iM^VQ~7FztC41dH>qvxgQ*_s z23{veh=xpR$PE(*vR`e6j)K`int9mohuJSs>6Wg8;u)6sf?sH@k%lLFUt9GWGe{;r zq3O30)*lf(5MZcZ1*JM-dN1SKgWxNf#o6O(W+VAp^@H$GB0Z$T-%^ zsG59igd_W4l@Cc(EPWE^DanlEH|TV@)puO3g?&ZW~BmN^6*s5S-e3Ujk}OEAf?xVJc>a0!*y;OHMlVVMgBzT zm$>|%;0e(;Mg5^FfjAm4X5;1jILQ4C{F)TUfRxQ1+_jVPH*cW5y!E+PsFI)AYM{id{n`cYjtdr_7t=HNDkyGAY*b@Pc-R@XBP$q?Dn%X;d znhtU=tj+^?+lcUz4U8-5OKlOVb-g6S=J-2&{bCt}zegfVd8OcUqv7B)lQ~2y2s=xC zddf(=d9uQB67^zIK;CX3p9{;RTQ0)XZ%j{YbZ+)q6ZtQK*Q*06D;R;3C14ebayqur zcretUaKGg`tKqdYn=n(#4?Wn97tGQRlt-eYlrDA;a({iEWcv-&hjT7<=FBBW>iXz_ zcWvOb?<57)M2fti*9e3M;%9Mt8+6ZuGPuQx|%(Dx+!W6OnSXztlhhW@xhh3+KK zXA6c3dnIczM8lK019dKe@bO7xG)=;(cb})NhV`=9FS#cIb3fyQY?k+3&qEe}+j#%x zdt!)wt{KwAP<|=e$=Z+zqv$qtMLebAr$x@uAKg;sX6sh&&|cRRN& zjc@eQ*_%}#)`C*PD`$~OF{)*$z2yCDTUD8_x5C@+=KIy`E#Q;-W+$jKLkp-Z`1g)U z^JZGRzetFt@T6V#>{JT0#y$IVI~JMH8a_eF)$GNmh<`q|2*ecc-&a@id<29b2GF{RO{pTka^>i z@|MNHjT;_K0w#IPC`^9C)?Etrg8kO*Pvz?p1T)o@?pNTC=HJ;i$#i!otpJZSDMAFw zuE%B_A4j(IK5cmw9EI;}W|REDvLN1S(%dKAp}<|EqufUIYHO_a#yN+xtRnXu?(;l* z-n**}rLRAE)ctlNJc*3&Let<9PL{j|ui1?!BLR6%xF<+0-t1#c3nHnzYLxkzQUa2m z?lod%g@eBYV83$TUai3={oWoKBhieeM`|Xp;(+hu6(zaT*SFKf{Fpou8pdc1J z@8D4Zb{yxahceH{WQO&4Fs%DAcb+C^s$W{deXKQxE04&eRoVYBD171z&A*%gp+cI zd|8bO^D9nt%S@74pvxVjYejuCEROXbb}jxNkKe?1GFrdim-Qa6a+$ss^UR#~bZCBH z#Q1*CuBAsF>}%~B{F5MQ&o|V^9NDk(grvob>ub*rM4PADjaQfiMP4uMb1%HWsPSpWU(d{WTbtx`R^k%?d zWiD^XmrdGFu_n8(wCN%1>X}9iZmaluv>st)p<`bs{pTo{Bj^RYwg|uRWTJk`G+M=} z<7a9ov*~rrkldjL6z}5%Dqqd?m)zpQmCAt`O<|{S`Mvj9+GaLAcrt-zBa3{ zk^3CR-tJxA+9R$pX6)w4RskrmL5qQk2wQzmr@ydk4l1=}cX?6lx4L$lGf>w@7;_Y| zb{8?vHG^J08wF??5u5>0U0`B4+11z059yHqj4p$ECBm9jTrA0V4G_pdP#5GV(e!oBGKZul!p-58f^EOI-2! z+G1B;fbW`!e2(ii1tg)bQDo(~UwN9rlb0`?#XcP?R~z_NAex}G6JMTg{j3JXV}+<# z_+D>}c)C&drZKQ(Ei*R<9+R!XPL)(JXf#qO!+gtI5N~3ImwUQWzAc|GVFPK zZzoJ?L_S@(j-h;Enu{S~JLs=ap$F%;_lkfb!%h2g)sDT7?THgGjgB_t!b-lEP>k8X_y+J;~H8B zZZ&nq`JWv83G~1&`z~W&kxIrev~XXLQ$@8q*PGE^+8fKpAlqV)eyzV!9>Agw>s0%E z8BFM0A49Z4$+*?GS|RguB{vEgWu%_<{PO~k&4r^RnYkR@(fGo0 zP%H&>La=aXTL(G!r%>-7$vyQ5Q(v62<{jADs(wA$gK3ke-;IECiH4hxQJ?TAeupa! zOJ-i4ieV%|KI}>AUBGEskUd1|KcCMLIcX!c(6&q~(*m(pm1$U4X|4rl7{nEbjEswL z9`I=P{f#vVM7Jz%qvxN{+$|41-uaE}RFG`Tn1Tkw2`y&|Jsv}PMvmQ%R4ZPU6(MMK%i;46T&XfPs%axOJLlV=3ygJE z@?x(|)iqBEJV`)IVFl=SGoi}2Jq|+XDxma^fwQsj*?#9ZbT;JuMqp``HW*+OE)Y}t z*8^tDHmCMOY%09K`1gij#C4SRO4KpWt#2TtafZW}9?(trWzWud!E>$7+McO7?RD=z E05Jc-#sB~S diff --git a/behave.ini b/behave.ini deleted file mode 100644 index 2fb145b39e..0000000000 --- a/behave.ini +++ /dev/null @@ -1,16 +0,0 @@ -[behave] -#default_tags = not (@xfail or @not_implemented) -show_skipped = true -format = rerun - progress3 -outfiles = rerun.txt - build/behave.reports/report_progress3.txt -junit = true -junit_directory = build/behave.reports -logging_level = INFO - -# -- HTML-FORMATTER REQUIRES: pip install behave-html-formatter -# SEE ALSO: https://github.com/behave-contrib/behave-html-formatter - -[behave.formatters] -html = behave_html_formatter:HTMLFormatter \ No newline at end of file diff --git a/conftest.py b/conftest.py deleted file mode 100644 index 5cf25e87bc..0000000000 --- a/conftest.py +++ /dev/null @@ -1,62 +0,0 @@ -# conftest.py - -import pytest -from _pytest.config.argparsing import Parser -from _pytest.fixtures import FixtureRequest -from homeassistant.core import HomeAssistant -from unittest.mock import AsyncMock, MagicMock, patch - - - - -def pytest_addoption(parser: Parser) -> None: - parser.addoption("--headless", action="store", default="True", type=str) - parser.addoption("--local_browser", action="store", default="False", type=str) - parser.addoption("--selenium_url", action="store", default="http://localhost:4444", type=str) - -@pytest.fixture(scope='session') -def headless_mode(request: FixtureRequest) -> str: - return request.config.getoption("--headless") - -@pytest.fixture(scope='session') -def local_browser(request: FixtureRequest) -> str: - return request.config.getoption("--local_browser") - -@pytest.fixture(scope='session') -def selenium_url(request: FixtureRequest) -> str: - return request.config.getoption("--selenium_url") - -@pytest.fixture -def hass(): - """Mock HomeAssistant instance.""" - hass = MagicMock(spec=HomeAssistant) - - # Mock the event loop with create_task as AsyncMock - hass.loop = MagicMock() - hass.loop.create_task = AsyncMock() - - # Mock config_entries and its flow - hass.config_entries = MagicMock() - hass.config_entries.flow = MagicMock() - - # Mock asynchronous methods with AsyncMock - hass.config_entries.flow.async_init = AsyncMock() - hass.config_entries.flow.async_configure = AsyncMock() - - # Mock async_get_entry to return a MockConfigEntry when called - hass.config_entries.async_get_entry = AsyncMock() - - # Mock async_unload as an AsyncMock - hass.config_entries.async_unload = AsyncMock(return_value=True) - - # Mock async_block_till_done as an AsyncMock - hass.async_block_till_done = AsyncMock() - hass.async_add_executor_job = AsyncMock() # Ensure compatibility with async calls - - return hass - -@pytest.fixture -def enable_custom_integrations(): - """Fixture to enable custom integrations.""" - with patch("homeassistant.helpers.discovery.load_platform") as mock_load: - yield mock_load diff --git a/custom_components/__init__.py b/custom_components/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/custom_components/uk_bin_collection/__init__.py b/custom_components/uk_bin_collection/__init__.py deleted file mode 100644 index 14642a3745..0000000000 --- a/custom_components/uk_bin_collection/__init__.py +++ /dev/null @@ -1,309 +0,0 @@ -"""The UK Bin Collection integration.""" - -import asyncio -import logging -from datetime import timedelta -import json - -from homeassistant.config_entries import ConfigEntry -from homeassistant.core import HomeAssistant -from homeassistant.exceptions import ConfigEntryNotReady -from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed - -from datetime import datetime - -from homeassistant.util import dt as dt_util - -from .const import DOMAIN, LOG_PREFIX, PLATFORMS -from uk_bin_collection.uk_bin_collection.collect_data import UKBinCollectionApp - -_LOGGER = logging.getLogger(__name__) - - -async def async_setup(hass: HomeAssistant, config: dict) -> bool: - """Set up the UK Bin Collection component.""" - hass.data.setdefault(DOMAIN, {}) - _LOGGER.debug(f"{LOG_PREFIX} async_setup called with config: {config}") - return True - - -async def async_migrate_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool: - """Migrate old config entries to new version.""" - if config_entry.version == 1: - _LOGGER.info( - f"{LOG_PREFIX} Migrating config entry {config_entry.entry_id} from version 1 to 2." - ) - - # Example: Add default update_interval if not present - data = config_entry.data.copy() - if "update_interval" not in data: - data["update_interval"] = 12 - _LOGGER.debug( - f"{LOG_PREFIX} 'update_interval' not found. Setting default to 12 hours." - ) - else: - _LOGGER.debug( - f"{LOG_PREFIX} 'update_interval' found: {data['update_interval']} hours." - ) - - # Update the config entry with the new data - hass.config_entries.async_update_entry(config_entry, data=data) - - _LOGGER.info( - f"{LOG_PREFIX} Migration of config entry {config_entry.entry_id} to version 2 successful." - ) - - return True - - -async def async_setup_entry( - hass: HomeAssistant, config_entry: ConfigEntry -) -> bool: - """Set up UK Bin Collection from a config entry.""" - _LOGGER.info(f"{LOG_PREFIX} Setting up UK Bin Collection.") - - name = config_entry.data.get("name") - if not name: - _LOGGER.error(f"{LOG_PREFIX} 'name' is missing in config entry.") - raise ConfigEntryNotReady("Missing 'name' in configuration.") - - timeout = config_entry.data.get("timeout", 60) - icon_color_mapping = config_entry.data.get("icon_color_mapping", "{}") - update_interval_hours = config_entry.data.get("update_interval", 12) - - _LOGGER.debug( - f"{LOG_PREFIX} Retrieved configuration: " - f"name={name}, timeout={timeout}, " - f"update_interval={update_interval_hours} hours, " - f"icon_color_mapping={icon_color_mapping}" - ) - - # Validate 'timeout' - try: - timeout = int(timeout) - if timeout < 10: - _LOGGER.warning( - f"{LOG_PREFIX} Timeout value {timeout} is less than 10. Setting to minimum of 10 seconds." - ) - timeout = 10 - except (ValueError, TypeError): - _LOGGER.warning( - f"{LOG_PREFIX} Invalid timeout value: {timeout}. Using default 60 seconds." - ) - timeout = 60 - - # Validate 'update_interval_hours' - try: - update_interval_hours = int(update_interval_hours) - if update_interval_hours < 1: - _LOGGER.warning( - f"{LOG_PREFIX} update_interval {update_interval_hours} is less than 1. Using default 12 hours." - ) - update_interval_hours = 12 - except (ValueError, TypeError): - _LOGGER.warning( - f"{LOG_PREFIX} Invalid update_interval value: {update_interval_hours}. Using default 12 hours." - ) - update_interval_hours = 12 - - # Prepare arguments for UKBinCollectionApp - args = build_ukbcd_args(config_entry.data) - - _LOGGER.debug(f"{LOG_PREFIX} UKBinCollectionApp args: {args}") - - # Initialize the UK Bin Collection Data application - ukbcd = UKBinCollectionApp() - ukbcd.set_args(args) - _LOGGER.debug(f"{LOG_PREFIX} UKBinCollectionApp initialized and arguments set.") - - # Initialize the data coordinator - coordinator = HouseholdBinCoordinator( - hass, - ukbcd, - name, - timeout=timeout, - update_interval=timedelta(hours=update_interval_hours), - ) - - _LOGGER.debug( - f"{LOG_PREFIX} HouseholdBinCoordinator initialized with update_interval={update_interval_hours} hours." - ) - - try: - await coordinator.async_config_entry_first_refresh() - except UpdateFailed as e: - _LOGGER.error(f"{LOG_PREFIX} Unable to fetch initial data: {e}") - raise ConfigEntryNotReady from e - - _LOGGER.info(f"{LOG_PREFIX} Initial data fetched successfully.") - - # Store the coordinator in Home Assistant's data - hass.data[DOMAIN][config_entry.entry_id] = {"coordinator": coordinator} - _LOGGER.debug( - f"{LOG_PREFIX} Coordinator stored in hass.data under entry_id={config_entry.entry_id}." - ) - - # Forward the setup to all platforms (sensor and calendar) - hass.async_create_task( - hass.config_entries.async_forward_entry_setups(config_entry, PLATFORMS) - ) - _LOGGER.debug(f"{LOG_PREFIX} Setup forwarded to platforms: {PLATFORMS}.") - - return True - - -async def async_unload_entry( - hass: HomeAssistant, config_entry: ConfigEntry -) -> bool: - """Unload a config entry.""" - _LOGGER.info(f"{LOG_PREFIX} Unloading config entry {config_entry.entry_id}.") - unload_ok = True - - for platform in PLATFORMS: - platform_unload_ok = await hass.config_entries.async_forward_entry_unload( - config_entry, platform - ) - if not platform_unload_ok: - _LOGGER.warning( - f"{LOG_PREFIX} Failed to unload '{platform}' platform for entry_id={config_entry.entry_id}." - ) - unload_ok = False - else: - _LOGGER.debug( - f"{LOG_PREFIX} Successfully unloaded '{platform}' platform for entry_id={config_entry.entry_id}." - ) - - if unload_ok: - hass.data[DOMAIN].pop(config_entry.entry_id) - _LOGGER.debug( - f"{LOG_PREFIX} Unloaded and removed coordinator for entry_id={config_entry.entry_id}." - ) - else: - _LOGGER.warning( - f"{LOG_PREFIX} One or more platforms failed to unload for entry_id={config_entry.entry_id}." - ) - - return unload_ok - - -def build_ukbcd_args(config_data: dict) -> list: - """Build arguments list for UKBinCollectionApp.""" - excluded_keys = { - "name", - "council", - "url", - "skip_get_url", - "headless", - "local_browser", - "timeout", - "icon_color_mapping", - "update_interval", - } - - args = [config_data.get("council", ""), config_data.get("url", "")] - - # Add other arguments - for key, value in config_data.items(): - if key in excluded_keys: - continue - if key == "web_driver": - value = value.rstrip("/") - args.append(f"--{key}={value}") - - _LOGGER.debug(f"{LOG_PREFIX} Built UKBinCollectionApp arguments: {args}") - return args - - -class HouseholdBinCoordinator(DataUpdateCoordinator): - """Coordinator to manage fetching and updating UK Bin Collection data.""" - - def __init__( - self, - hass: HomeAssistant, - ukbcd: UKBinCollectionApp, - name: str, - timeout: int = 60, - update_interval: timedelta = timedelta(hours=12), - ) -> None: - """Initialize the data coordinator.""" - super().__init__( - hass, - _LOGGER, - name="UK Bin Collection Data", - update_interval=update_interval, - ) - self.ukbcd = ukbcd - self.name = name - self.timeout = timeout - - _LOGGER.debug( - f"{LOG_PREFIX} DataUpdateCoordinator initialized with update_interval={update_interval}." - ) - - async def _async_update_data(self) -> dict: - """Fetch and process the latest bin collection data.""" - _LOGGER.debug( - f"{LOG_PREFIX} Starting data fetch with timeout={self.timeout} seconds." - ) - _LOGGER.info(f"{LOG_PREFIX} Fetching latest bin collection data.") - try: - data = await asyncio.wait_for( - self.hass.async_add_executor_job(self.ukbcd.run), - timeout=self.timeout, - ) - _LOGGER.debug(f"{LOG_PREFIX} Data fetched: {data}") - parsed_data = json.loads(data) - _LOGGER.debug(f"{LOG_PREFIX} Parsed data: {parsed_data}") - processed_data = self.process_bin_data(parsed_data) - _LOGGER.debug(f"{LOG_PREFIX} Processed data: {processed_data}") - _LOGGER.info(f"{LOG_PREFIX} Bin collection data updated successfully.") - return processed_data - except asyncio.TimeoutError as exc: - _LOGGER.error(f"{LOG_PREFIX} Timeout while updating data: {exc}") - raise UpdateFailed(f"Timeout while updating data: {exc}") from exc - except json.JSONDecodeError as exc: - _LOGGER.error(f"{LOG_PREFIX} JSON decode error: {exc}") - raise UpdateFailed(f"JSON decode error: {exc}") from exc - except Exception as exc: - _LOGGER.exception(f"{LOG_PREFIX} Unexpected error: {exc}") - raise UpdateFailed(f"Unexpected error: {exc}") from exc - - @staticmethod - def process_bin_data(data: dict) -> dict: - """Process raw data to determine the next collection dates.""" - current_date = dt_util.now().date() - next_collection_dates = {} - - for bin_data in data.get("bins", []): - bin_type = bin_data.get("type") - collection_date_str = bin_data.get("collectionDate") - - if not bin_type or not collection_date_str: - _LOGGER.warning( - f"{LOG_PREFIX} Missing 'type' or 'collectionDate' in bin data: {bin_data}" - ) - continue - - try: - collection_date = datetime.strptime( - collection_date_str, "%d/%m/%Y" - ).date() - except (ValueError, TypeError): - _LOGGER.warning( - f"{LOG_PREFIX} Invalid date format for bin type '{bin_type}': '{collection_date_str}'." - ) - continue - - # Update next collection date if it's sooner - existing_date = next_collection_dates.get(bin_type) - if ( - collection_date >= current_date - and (not existing_date or collection_date < existing_date) - ): - next_collection_dates[bin_type] = collection_date - _LOGGER.debug( - f"{LOG_PREFIX} Updated next collection for '{bin_type}' to {collection_date}." - ) - - _LOGGER.debug(f"{LOG_PREFIX} Next Collection Dates: {next_collection_dates}") - return next_collection_dates diff --git a/custom_components/uk_bin_collection/calendar.py b/custom_components/uk_bin_collection/calendar.py deleted file mode 100644 index 14dc904bcd..0000000000 --- a/custom_components/uk_bin_collection/calendar.py +++ /dev/null @@ -1,137 +0,0 @@ -"""Calendar platform support for UK Bin Collection Data.""" - -import logging -import uuid -from datetime import datetime, timedelta -from typing import Any, Dict, List, Optional - -from homeassistant.components.calendar import CalendarEntity, CalendarEvent -from homeassistant.config_entries import ConfigEntry -from homeassistant.core import HomeAssistant, callback -from homeassistant.helpers.entity_platform import AddEntitiesCallback - -from .const import DOMAIN, LOG_PREFIX -from homeassistant.helpers.update_coordinator import CoordinatorEntity, DataUpdateCoordinator - -_LOGGER = logging.getLogger(__name__) - - -class UKBinCollectionCalendar(CoordinatorEntity, CalendarEntity): - """Calendar entity for UK Bin Collection Data.""" - - def __init__( - self, - coordinator: DataUpdateCoordinator, - bin_type: str, - unique_id: str, - name: str, - ) -> None: - """Initialize the calendar entity.""" - super().__init__(coordinator) - self._bin_type = bin_type - self._unique_id = unique_id - self._name = name - self._attr_unique_id = unique_id - - # Optionally, set device_info if you have device grouping - self._attr_device_info = { - "identifiers": {(DOMAIN, unique_id)}, - "name": f"{self._name} Device", - "manufacturer": "UK Bin Collection", - "model": "Bin Collection Calendar", - "sw_version": "1.0", - } - - @property - def name(self) -> str: - """Return the name of the calendar.""" - return self._name - - @property - def event(self) -> Optional[CalendarEvent]: - """Return the next collection event.""" - collection_date = self.coordinator.data.get(self._bin_type) - if not collection_date: - _LOGGER.debug(f"{LOG_PREFIX} No collection date available for '{self._bin_type}'.") - return None - - return self._create_calendar_event(collection_date) - - async def async_get_events( - self, hass: HomeAssistant, start_date: datetime, end_date: datetime - ) -> List[CalendarEvent]: - """Return all events within a specific time frame.""" - events: List[CalendarEvent] = [] - collection_date = self.coordinator.data.get(self._bin_type) - - if not collection_date: - return events - - if start_date.date() <= collection_date <= end_date.date(): - events.append(self._create_calendar_event(collection_date)) - - return events - - def _create_calendar_event(self, collection_date: datetime.date) -> CalendarEvent: - """Create a CalendarEvent for a given collection date.""" - return CalendarEvent( - summary=f"{self._bin_type} Collection", - start=collection_date, - end=collection_date + timedelta(days=1), - uid=f"{self.unique_id}_{collection_date.isoformat()}", - ) - - @property - def unique_id(self) -> str: - """Return a unique ID for the calendar.""" - return self._unique_id - - @callback - def _handle_coordinator_update(self) -> None: - """Handle updates from the coordinator and refresh calendar state.""" - self.async_write_ha_state() - - -async def async_setup_entry( - hass: HomeAssistant, - config_entry: ConfigEntry, - async_add_entities: AddEntitiesCallback, -) -> None: - """Set up UK Bin Collection Calendar from a config entry.""" - _LOGGER.info(f"{LOG_PREFIX} Setting up UK Bin Collection Calendar platform.") - - # Retrieve the coordinator from hass.data - coordinator: DataUpdateCoordinator = hass.data[DOMAIN][config_entry.entry_id]["coordinator"] - - # Create calendar entities - entities = [] - for bin_type in coordinator.data.keys(): - unique_id = calc_unique_calendar_id(config_entry.entry_id, bin_type) - name = f"{coordinator.name} {bin_type} Calendar" - entities.append( - UKBinCollectionCalendar( - coordinator=coordinator, - bin_type=bin_type, - unique_id=unique_id, - name=name, - ) - ) - - # Register all calendar entities with Home Assistant - async_add_entities(entities) - _LOGGER.debug(f"{LOG_PREFIX} Calendar entities added: {[entity.name for entity in entities]}") - - -async def async_unload_entry( - hass: HomeAssistant, - config_entry: ConfigEntry, - async_remove_entities: Any, -) -> bool: - """Unload a config entry.""" - # Unloading is handled in init.py - return True - - -def calc_unique_calendar_id(entry_id: str, bin_type: str) -> str: - """Calculate a unique ID for the calendar.""" - return f"{entry_id}_{bin_type}_calendar" diff --git a/custom_components/uk_bin_collection/config_flow.py b/custom_components/uk_bin_collection/config_flow.py deleted file mode 100644 index 9969e301f6..0000000000 --- a/custom_components/uk_bin_collection/config_flow.py +++ /dev/null @@ -1,576 +0,0 @@ -import json -import logging -import shutil -import asyncio -from typing import Any, Dict, Optional - -import aiohttp -import homeassistant.helpers.config_validation as cv -import voluptuous as vol -from homeassistant import config_entries -from homeassistant.core import callback - -from .const import DOMAIN, LOG_PREFIX, SELENIUM_SERVER_URLS, BROWSER_BINARIES - -_LOGGER = logging.getLogger(__name__) - - -class UkBinCollectionConfigFlow(config_entries.ConfigFlow, domain=DOMAIN): - """Handle a config flow for UkBinCollection.""" - - VERSION = 2 # Incremented version for config flow changes - - def __init__(self): - self.councils_data: Optional[Dict[str, Any]] = None - self.data: Dict[str, Any] = {} - self.council_names: list = [] - self.council_options: list = [] - self.selenium_checked: bool = False - self.selenium_available: bool = False - self.selenium_results: list = [] - self.chromium_checked: bool = False - self.chromium_installed: bool = False - - async def async_step_user(self, user_input: Optional[Dict[str, Any]] = None): - """Handle the initial step.""" - errors = {} - - if self.councils_data is None: - self.councils_data = await self.get_councils_json() - if not self.councils_data: - _LOGGER.error("Council data is unavailable.") - return self.async_abort(reason="Council Data Unavailable") - - self.council_names = list(self.councils_data.keys()) - self.council_options = [ - self.councils_data[name]["wiki_name"] for name in self.council_names - ] - _LOGGER.debug("Loaded council data: %s", self.council_names) - - if user_input is not None: - _LOGGER.debug("User input received: %s", user_input) - # Validate user input - if not user_input.get("name"): - errors["name"] = "Name is required." - if not user_input.get("council"): - errors["council"] = "Council is required." - - # Validate JSON mapping if provided - if user_input.get("icon_color_mapping"): - if not self.is_valid_json(user_input["icon_color_mapping"]): - errors["icon_color_mapping"] = "Invalid JSON format." - - # Check for duplicate entries - if not errors: - existing_entry = await self._async_entry_exists(user_input) - if existing_entry: - errors["base"] = "duplicate_entry" - _LOGGER.warning( - "Duplicate entry found: %s", existing_entry.data.get("name") - ) - - if not errors: - # Map selected wiki_name back to council key - council_key = self.map_wiki_name_to_council_key(user_input["council"]) - user_input["council"] = council_key - self.data.update(user_input) - - _LOGGER.debug("User input after mapping: %s", self.data) - - # Proceed to the council step - return await self.async_step_council() - - # Show the initial form - return self.async_show_form( - step_id="user", - data_schema=vol.Schema( - { - vol.Required("name"): cv.string, - vol.Required("council"): vol.In(self.council_options), - vol.Optional("icon_color_mapping", default=""): cv.string, - } - ), - errors=errors, - description_placeholders={"cancel": "Press Cancel to abort setup."}, - ) - - async def async_step_council(self, user_input: Optional[Dict[str, Any]] = None): - """Second step to configure the council details.""" - errors = {} - council_key = self.data.get("council") - council_info = self.councils_data.get(council_key, {}) - requires_selenium = "web_driver" in council_info - - if user_input is not None: - _LOGGER.debug("Council step user input: %s", user_input) - # Validate JSON mapping if provided - if user_input.get("icon_color_mapping"): - if not self.is_valid_json(user_input["icon_color_mapping"]): - errors["icon_color_mapping"] = "Invalid JSON format." - - # Handle 'skip_get_url' if necessary - if council_info.get("skip_get_url", False): - user_input["skip_get_url"] = True - user_input["url"] = council_info.get("url", "") - - # Merge user_input with existing data - self.data.update(user_input) - - # If no errors, create the config entry - if not errors: - _LOGGER.info( - "%s Creating config entry with data: %s", LOG_PREFIX, self.data - ) - return self.async_create_entry(title=self.data["name"], data=self.data) - else: - _LOGGER.debug("Errors in council step: %s", errors) - - # Prepare description placeholders - description_placeholders = {} - if requires_selenium: - description = await self.perform_selenium_checks(council_key) - description_placeholders["selenium_message"] = description - else: - description_placeholders["selenium_message"] = "" - - # Show the form - return self.async_show_form( - step_id="council", - data_schema=await self.get_council_schema(council_key), - errors=errors, - description_placeholders=description_placeholders, - ) - - async def async_step_reconfigure( - self, user_input: Optional[Dict[str, Any]] = None - ): - """Handle reconfiguration of the integration.""" - return await self.async_step_reconfigure_confirm() - - async def async_step_reconfigure_confirm( - self, user_input: Optional[Dict[str, Any]] = None - ): - """Handle a reconfiguration flow initialized by the user.""" - errors = {} - existing_entry = self.hass.config_entries.async_get_entry(self.context["entry_id"]) - if existing_entry is None: - _LOGGER.error("Reconfiguration failed: Config entry not found.") - return self.async_abort(reason="Reconfigure Failed") - - if self.councils_data is None: - self.councils_data = await self.get_councils_json() - self.council_names = list(self.councils_data.keys()) - self.council_options = [ - self.councils_data[name]["wiki_name"] for name in self.council_names - ] - _LOGGER.debug("Loaded council data for reconfiguration.") - - council_key = existing_entry.data.get("council") - council_info = self.councils_data.get(council_key, {}) - council_wiki_name = council_info.get("wiki_name", "") - - if user_input is not None: - _LOGGER.debug("Reconfigure user input: %s", user_input) - # Map selected wiki_name back to council key - council_key = self.map_wiki_name_to_council_key(user_input["council"]) - user_input["council"] = council_key - - # Validate update_interval - update_interval = user_input.get("update_interval") - if update_interval is not None: - try: - update_interval = int(update_interval) - if update_interval < 1: - errors["update_interval"] = "Must be at least 1 hour." - except ValueError: - errors["update_interval"] = "Invalid number." - - # Validate JSON mapping if provided - if user_input.get("icon_color_mapping"): - if not self.is_valid_json(user_input["icon_color_mapping"]): - errors["icon_color_mapping"] = "Invalid JSON format." - - if not errors: - # Merge the user input with existing data - data = {**existing_entry.data, **user_input} - data["icon_color_mapping"] = user_input.get("icon_color_mapping", "") - - self.hass.config_entries.async_update_entry( - existing_entry, - title=user_input.get("name", existing_entry.title), - data=data, - ) - # Trigger a data refresh by reloading the config entry - await self.hass.config_entries.async_reload(existing_entry.entry_id) - _LOGGER.info("Configuration updated for entry: %s", existing_entry.entry_id) - return self.async_abort(reason="Reconfigure Successful") - else: - _LOGGER.debug("Errors in reconfiguration: %s", errors) - - # Build the schema with existing data - schema = self.build_reconfigure_schema(existing_entry.data, council_wiki_name) - - return self.async_show_form( - step_id="reconfigure_confirm", - data_schema=schema, - errors=errors, - description_placeholders={"selenium_message": ""}, - ) - - async def get_councils_json(self) -> Dict[str, Any]: - """Fetch and return the supported councils data.""" - url = "https://raw.githubusercontent.com/robbrad/UKBinCollectionData/0.124.1/uk_bin_collection/tests/input.json" - try: - async with aiohttp.ClientSession() as session: - async with session.get(url) as response: - response.raise_for_status() - data_text = await response.text() - return json.loads(data_text) - except aiohttp.ClientError as e: - _LOGGER.error("HTTP error while fetching council data: %s", e) - except json.JSONDecodeError as e: - _LOGGER.error("Error decoding council data JSON: %s", e) - except Exception as e: - _LOGGER.exception("Unexpected error while fetching council data: %s", e) - return {} - - async def get_council_schema(self, council: str) -> vol.Schema: - """Generate the form schema based on council requirements.""" - council_info = self.councils_data.get(council, {}) - fields = {} - - if not council_info.get("skip_get_url", False) or council_info.get( - "custom_component_show_url_field" - ): - fields[vol.Required("url")] = cv.string - if "uprn" in council_info: - fields[vol.Required("uprn")] = cv.string - if "postcode" in council_info: - fields[vol.Required("postcode")] = cv.string - if "house_number" in council_info: - fields[vol.Required("number")] = cv.string - if "usrn" in council_info: - fields[vol.Required("usrn")] = cv.string - if "web_driver" in council_info: - fields[vol.Optional("web_driver", default="")] = cv.string - fields[vol.Optional("headless", default=True)] = bool - fields[vol.Optional("local_browser", default=False)] = bool - - fields[vol.Optional("timeout", default=60)] = vol.All( - vol.Coerce(int), vol.Range(min=10) - ) - - fields[vol.Optional("update_interval", default=12)] = vol.All( - cv.positive_int, vol.Range(min=1) - ) - - return vol.Schema(fields) - - def build_reconfigure_schema( - self, existing_data: Dict[str, Any], council_wiki_name: str - ) -> vol.Schema: - """Build the schema for reconfiguration with existing data.""" - fields = { - vol.Required("name", default=existing_data.get("name", "")): str, - vol.Required("council", default=council_wiki_name): vol.In( - self.council_options - ), - vol.Required("update_interval", default=existing_data.get("update_interval", 12)): vol.All( - cv.positive_int, vol.Range(min=1) - ), - } - - optional_fields = [ - ("url", cv.string), - ("uprn", cv.string), - ("postcode", cv.string), - ("number", cv.string), - ("web_driver", cv.string), - ("headless", bool), - ("local_browser", bool), - ("timeout", vol.All(vol.Coerce(int), vol.Range(min=10))), - ] - - for field_name, validator in optional_fields: - if field_name in existing_data: - fields[ - vol.Optional(field_name, default=existing_data[field_name]) - ] = validator - - fields[ - vol.Optional( - "icon_color_mapping", - default=existing_data.get("icon_color_mapping", ""), - ) - ] = str - - return vol.Schema(fields) - - async def perform_selenium_checks(self, council_key: str) -> str: - """Perform Selenium and Chromium checks and return a formatted message.""" - messages = [] - council_info = self.councils_data.get(council_key, {}) - council_name = council_info.get("wiki_name", council_key) - - custom_selenium_url = self.data.get("selenium_url") - selenium_results = await self.check_selenium_server(custom_selenium_url) - self.selenium_available = any(accessible for _, accessible in selenium_results) - self.selenium_checked = True - - self.chromium_installed = await self.check_chromium_installed() - self.chromium_checked = True - - # Start building the message with formatted HTML - messages.append(f"{council_name} requires Selenium to run.

") - - # Selenium server check results - messages.append("Remote Selenium server URLs checked:
") - for url, accessible in selenium_results: - status = "✅ Accessible" if accessible else "❌ Not accessible" - messages.append(f"{url}: {status}
") - - # Chromium installation check - chromium_status = "✅ Installed" if self.chromium_installed else "❌ Not installed" - messages.append("
Local Chromium browser check:
") - messages.append(f"Chromium browser is {chromium_status}.") - - # Combine messages - return "".join(messages) - - async def check_selenium_server(self, custom_url: Optional[str] = None) -> list: - """Check if Selenium servers are accessible.""" - urls = SELENIUM_SERVER_URLS.copy() - if custom_url: - urls.insert(0, custom_url) - - results = [] - async with aiohttp.ClientSession() as session: - for url in urls: - try: - async with session.get(url, timeout=5) as response: - response.raise_for_status() - accessible = response.status == 200 - results.append((url, accessible)) - _LOGGER.debug("Selenium server %s is accessible.", url) - except aiohttp.ClientError as e: - _LOGGER.warning( - "Failed to connect to Selenium server at %s: %s", url, e - ) - results.append((url, False)) - except Exception as e: - _LOGGER.exception( - "Unexpected error checking Selenium server at %s: %s", url, e - ) - results.append((url, False)) - return results - - async def check_chromium_installed(self) -> bool: - """Check if Chromium is installed.""" - loop = asyncio.get_event_loop() - result = await loop.run_in_executor(None, self._sync_check_chromium) - if result: - _LOGGER.debug("Chromium is installed.") - else: - _LOGGER.warning("Chromium is not installed.") - return result - - def _sync_check_chromium(self) -> bool: - """Synchronous check for Chromium installation.""" - for exec_name in BROWSER_BINARIES: - try: - if shutil.which(exec_name): - _LOGGER.debug(f"Found Chromium executable: {exec_name}") - return True - except Exception as e: - _LOGGER.error( - f"Exception while checking for executable '{exec_name}': {e}" - ) - continue # Continue checking other binaries - _LOGGER.debug("No Chromium executable found.") - return False - - def map_wiki_name_to_council_key(self, wiki_name: str) -> str: - """Map the council wiki name back to the council key.""" - try: - index = self.council_options.index(wiki_name) - council_key = self.council_names[index] - _LOGGER.debug("Mapped wiki name '%s' to council key '%s'.", wiki_name, council_key) - return council_key - except ValueError: - _LOGGER.error("Wiki name '%s' not found in council options.", wiki_name) - return "" - - @staticmethod - def is_valid_json(json_str: str) -> bool: - """Validate if a string is valid JSON.""" - try: - json.loads(json_str) - return True - except json.JSONDecodeError as e: - _LOGGER.debug("JSON decode error: %s", e) - return False - - async def _async_entry_exists(self, user_input: Dict[str, Any]) -> Optional[config_entries.ConfigEntry]: - """Check if a config entry with the same name or data already exists.""" - for entry in self._async_current_entries(): - if entry.data.get("name") == user_input.get("name"): - return entry - if ( - entry.data.get("council") == user_input.get("council") - and entry.data.get("url") == user_input.get("url") - ): - return entry - return None - - async def async_step_import(self, import_config: Dict[str, Any]) -> config_entries.FlowResult: - """Handle import from configuration.yaml.""" - return await self.async_step_user(import_config) - - -class UkBinCollectionOptionsFlowHandler(config_entries.OptionsFlow): - """Handle options flow for UkBinCollection.""" - - def __init__(self, config_entry): - """Initialize options flow.""" - self.config_entry = config_entry - self.councils_data: Optional[Dict[str, Any]] = None - self.council_names: list = [] - self.council_options: list = [] - - async def async_step_init(self, user_input=None): - """Manage the options.""" - errors = {} - existing_data = self.config_entry.data - - # Fetch council data - self.councils_data = await self.get_councils_json() - if not self.councils_data: - _LOGGER.error("Council data is unavailable for options flow.") - return self.async_abort(reason="Council Data Unavailable") - - self.council_names = list(self.councils_data.keys()) - self.council_options = [ - self.councils_data[name]["wiki_name"] for name in self.council_names - ] - _LOGGER.debug("Loaded council data for options flow.") - - if user_input is not None: - _LOGGER.debug("Options flow user input: %s", user_input) - # Map selected wiki_name back to council key - council_key = self.map_wiki_name_to_council_key(user_input["council"]) - user_input["council"] = council_key - - # Validate update_interval - update_interval = user_input.get("update_interval") - if update_interval is not None: - try: - update_interval = int(update_interval) - if update_interval < 1: - errors["update_interval"] = "Must be at least 1 hour." - except ValueError: - errors["update_interval"] = "Invalid number." - - # Validate JSON mapping if provided - if user_input.get("icon_color_mapping"): - if not UkBinCollectionConfigFlow.is_valid_json(user_input["icon_color_mapping"]): - errors["icon_color_mapping"] = "Invalid JSON format." - - if not errors: - # Merge the user input with existing data - data = {**existing_data, **user_input} - data["icon_color_mapping"] = user_input.get("icon_color_mapping", "") - - self.hass.config_entries.async_update_entry( - self.config_entry, - data=data, - ) - # Trigger a data refresh by reloading the config entry - await self.hass.config_entries.async_reload(self.config_entry.entry_id) - _LOGGER.info("Options updated and config entry reloaded.") - return self.async_create_entry(title="", data={}) - else: - _LOGGER.debug("Errors in options flow: %s", errors) - - # Build the form with existing data - schema = self.build_options_schema(existing_data) - - return self.async_show_form( - step_id="init", - data_schema=schema, - errors=errors, - description_placeholders={"cancel": "Press Cancel to abort setup."}, - ) - - async def get_councils_json(self) -> Dict[str, Any]: - """Fetch and return the supported councils data.""" - url = "https://raw.githubusercontent.com/robbrad/UKBinCollectionData/0.111.0/uk_bin_collection/tests/input.json" - try: - async with aiohttp.ClientSession() as session: - async with session.get(url) as response: - response.raise_for_status() - data_text = await response.text() - return json.loads(data_text) - except aiohttp.ClientError as e: - _LOGGER.error("HTTP error while fetching council data for options flow: %s", e) - except json.JSONDecodeError as e: - _LOGGER.error("Error decoding council data JSON for options flow: %s", e) - except Exception as e: - _LOGGER.exception("Unexpected error while fetching council data for options flow: %s", e) - return {} - - def build_options_schema(self, existing_data: Dict[str, Any]) -> vol.Schema: - """Build the schema for the options flow with existing data.""" - council_current_key = existing_data.get("council", "") - try: - council_current_wiki = self.council_options[self.council_names.index(council_current_key)] - except (ValueError, IndexError): - council_current_wiki = "" - - fields = { - vol.Required("name", default=existing_data.get("name", "")): str, - vol.Required("council", default=council_current_wiki): vol.In( - self.council_options - ), - vol.Required("update_interval", default=existing_data.get("update_interval", 12)): vol.All( - cv.positive_int, vol.Range(min=1) - ), - } - - optional_fields = [ - ("icon_color_mapping", cv.string), - # Add other optional fields if necessary - ] - - for field_name, validator in optional_fields: - if field_name in existing_data: - fields[ - vol.Optional(field_name, default=existing_data[field_name]) - ] = validator - - return vol.Schema(fields) - - def map_wiki_name_to_council_key(self, wiki_name: str) -> str: - """Map the council wiki name back to the council key.""" - try: - index = self.council_options.index(wiki_name) - council_key = self.council_names[index] - _LOGGER.debug("Mapped wiki name '%s' to council key '%s'.", wiki_name, council_key) - return council_key - except ValueError: - _LOGGER.error("Wiki name '%s' not found in council options.", wiki_name) - return "" - - @staticmethod - def is_valid_json(json_str: str) -> bool: - """Validate if a string is valid JSON.""" - try: - json.loads(json_str) - return True - except json.JSONDecodeError as e: - _LOGGER.debug("JSON decode error in options flow: %s", e) - return False - - -async def async_get_options_flow(config_entry): - """Get the options flow for this handler.""" - return UkBinCollectionOptionsFlowHandler(config_entry) diff --git a/custom_components/uk_bin_collection/const.py b/custom_components/uk_bin_collection/const.py deleted file mode 100644 index 75f9327728..0000000000 --- a/custom_components/uk_bin_collection/const.py +++ /dev/null @@ -1,25 +0,0 @@ -"""Constants for UK Bin Collection Data.""" - -from datetime import timedelta - -from homeassistant.const import Platform - -DEFAULT_NAME = "UK Bin Collection Data" - -DOMAIN = "uk_bin_collection" - -LOG_PREFIX = "[UKBinCollection] " - -PLATFORMS = [Platform.SENSOR] - -STATE_ATTR_COLOUR = "colour" -STATE_ATTR_NEXT_COLLECTION = "next_collection" -STATE_ATTR_DAYS = "days" - -DEVICE_CLASS = "bin_collection_schedule" - -PLATFORMS = ["sensor", "calendar"] - -SELENIUM_SERVER_URLS = ["http://localhost:4444", "http://selenium:4444"] - -BROWSER_BINARIES = ["chromium", "chromium-browser", "google-chrome"] \ No newline at end of file diff --git a/custom_components/uk_bin_collection/manifest.json b/custom_components/uk_bin_collection/manifest.json deleted file mode 100644 index d1d45b0396..0000000000 --- a/custom_components/uk_bin_collection/manifest.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "domain": "uk_bin_collection", - "name": "UK Bin Collection Data", - "after_dependencies": [], - "codeowners": ["@robbrad"], - "config_flow": true, - "dependencies": [], - "documentation": "https://github.com/robbrad/UKBinCollectionData/wiki", - "integration_type": "service", - "iot_class": "cloud_polling", - "issue_tracker": "https://github.com/robbrad/UKBinCollectionData/issues", - "requirements": ["uk-bin-collection>=0.124.1"], - "version": "0.124.1", - "zeroconf": [] -} diff --git a/custom_components/uk_bin_collection/sensor.py b/custom_components/uk_bin_collection/sensor.py deleted file mode 100644 index a5ac4bbe73..0000000000 --- a/custom_components/uk_bin_collection/sensor.py +++ /dev/null @@ -1,397 +0,0 @@ -"""Support for UK Bin Collection Data sensors.""" - -from datetime import datetime, timedelta -import json -import logging -import asyncio -from typing import Any, Dict - -from json import JSONDecodeError - -from homeassistant.core import HomeAssistant, callback -from homeassistant.config_entries import ConfigEntry -from homeassistant.components.sensor import SensorEntity -from homeassistant.exceptions import ConfigEntryNotReady -from homeassistant.helpers.update_coordinator import ( - CoordinatorEntity, - DataUpdateCoordinator, - UpdateFailed, -) -from homeassistant.util import dt as dt_util -from homeassistant.helpers.entity_platform import AddEntitiesCallback -import homeassistant.helpers.config_validation as cv - -from .const import ( - DOMAIN, - LOG_PREFIX, - STATE_ATTR_DAYS, - STATE_ATTR_NEXT_COLLECTION, - DEVICE_CLASS, - STATE_ATTR_COLOUR, - PLATFORMS, -) -from uk_bin_collection.uk_bin_collection.collect_data import UKBinCollectionApp - -_LOGGER = logging.getLogger(__name__) - - -async def async_setup_entry( - hass: HomeAssistant, config_entry: ConfigEntry, async_add_entities: AddEntitiesCallback -) -> None: - """Set up the UK Bin Collection Data sensor platform.""" - _LOGGER.info(f"{LOG_PREFIX} Setting up UK Bin Collection Data platform.") - - # Retrieve the coordinator from hass.data - coordinator: DataUpdateCoordinator = hass.data[DOMAIN][config_entry.entry_id]["coordinator"] - - # Get icon_color_mapping from config - icon_color_mapping = config_entry.data.get("icon_color_mapping", "{}") - - # Create sensor entities - entities = create_sensor_entities(coordinator, config_entry.entry_id, icon_color_mapping) - - # Register all sensor entities with Home Assistant - async_add_entities(entities) - - -def create_sensor_entities(coordinator, entry_id, icon_color_mapping): - """Create sensor entities based on coordinator data.""" - entities = [] - icon_color_map = load_icon_color_mapping(icon_color_mapping) - - for bin_type in coordinator.data.keys(): - device_id = f"{entry_id}_{bin_type}" - - # Main bin sensor - entities.append( - UKBinCollectionDataSensor( - coordinator, bin_type, device_id, icon_color_map - ) - ) - - # Attribute sensors - attributes = ["Colour", "Next Collection Human Readable", "Days Until Collection", "Bin Type", "Next Collection Date"] - for attr in attributes: - unique_id = f"{device_id}_{attr.lower().replace(' ', '_')}" - entities.append( - UKBinCollectionAttributeSensor( - coordinator, bin_type, unique_id, attr, device_id, icon_color_map - ) - ) - - # Add the Raw JSON Sensor - entities.append(UKBinCollectionRawJSONSensor(coordinator, f"{entry_id}_raw_json", entry_id)) - - return entities - - -def load_icon_color_mapping(icon_color_mapping: str) -> Dict[str, Any]: - """Load and return the icon color mapping.""" - try: - return json.loads(icon_color_mapping) if icon_color_mapping else {} - except JSONDecodeError: - _LOGGER.warning( - f"{LOG_PREFIX} Invalid icon_color_mapping JSON: {icon_color_mapping}. Using default settings." - ) - return {} - - -class UKBinCollectionDataSensor(CoordinatorEntity, SensorEntity): - """Sensor entity for individual bin collection data.""" - - _attr_device_class = DEVICE_CLASS - - def __init__( - self, - coordinator: DataUpdateCoordinator, - bin_type: str, - device_id: str, - icon_color_mapping: Dict[str, Any], - ) -> None: - """Initialize the main bin sensor.""" - super().__init__(coordinator) - self.coordinator = coordinator - self._bin_type = bin_type - self._device_id = device_id - self._icon_color_mapping = icon_color_mapping - self._icon = self.get_icon() - self._color = self.get_color() - self._state = None - self._next_collection = None - self._days = None - self.update_state() - - @property - def device_info(self) -> dict: - """Return device information for device registry.""" - return { - "identifiers": {(DOMAIN, self._device_id)}, - "name": f"{self.coordinator.name} {self._bin_type}", - "manufacturer": "UK Bin Collection", - "model": "Bin Sensor", - 'sw_version': '1.0', - } - - @callback - def _handle_coordinator_update(self) -> None: - """Handle updates from the coordinator and refresh sensor state.""" - self.update_state() - self.async_write_ha_state() - - def update_state(self) -> None: - """Update the sensor's state and attributes.""" - bin_date = self.coordinator.data.get(self._bin_type) - if bin_date: - self._next_collection = bin_date - now = dt_util.now().date() - self._days = (bin_date - now).days - self._state = self.calculate_state() - else: - _LOGGER.warning( - f"{LOG_PREFIX} Data for bin type '{self._bin_type}' is missing." - ) - self._state = "Unknown" - self._days = None - self._next_collection = None - - def calculate_state(self) -> str: - """Determine the state based on collection date.""" - now = dt_util.now().date() - if self._next_collection == now: - return "Today" - elif self._next_collection == now + timedelta(days=1): - return "Tomorrow" - else: - day_label = "day" if self._days == 1 else "days" - return f"In {self._days} {day_label}" - - def get_icon(self) -> str: - """Return the icon based on bin type or mapping.""" - return self._icon_color_mapping.get(self._bin_type, {}).get( - "icon", self.get_default_icon() - ) - - def get_color(self) -> str: - """Return the color based on bin type or mapping.""" - color = self._icon_color_mapping.get(self._bin_type, {}).get("color") - if color is None: - return "black" - return color - - def get_default_icon(self) -> str: - """Return a default icon based on the bin type.""" - bin_type_lower = self._bin_type.lower() - if "recycling" in bin_type_lower: - return "mdi:recycle" - elif "waste" in bin_type_lower: - return "mdi:trash-can" - else: - return "mdi:delete" - - @property - def name(self) -> str: - """Return the name of the sensor.""" - return f"{self.coordinator.name} {self._bin_type}" - - @property - def state(self) -> str: - """Return the current state of the sensor.""" - return self._state or "Unknown" - - @property - def icon(self) -> str: - """Return the icon for the sensor.""" - return self._icon - - @property - def extra_state_attributes(self) -> dict: - """Return extra state attributes for the sensor.""" - return { - STATE_ATTR_COLOUR: self._color, - STATE_ATTR_NEXT_COLLECTION: self._next_collection.strftime("%d/%m/%Y") - if self._next_collection - else None, - STATE_ATTR_DAYS: self._days, - } - - @property - def available(self) -> bool: - """Return the availability of the sensor.""" - return self._state != "Unknown" - - @property - def unique_id(self) -> str: - """Return a unique ID for the sensor.""" - return self._device_id - - -class UKBinCollectionAttributeSensor(CoordinatorEntity, SensorEntity): - """Sensor entity for additional attributes of a bin.""" - - def __init__( - self, - coordinator: DataUpdateCoordinator, - bin_type: str, - unique_id: str, - attribute_type: str, - device_id: str, - icon_color_mapping: Dict[str, Any], - ) -> None: - """Initialize the attribute sensor.""" - super().__init__(coordinator) - self.coordinator = coordinator - self._bin_type = bin_type - self._unique_id = unique_id - self._attribute_type = attribute_type - self._device_id = device_id - self._icon_color_mapping = icon_color_mapping - self._icon = self.get_icon() - self._color = self.get_color() - - @property - def name(self) -> str: - """Return the name of the attribute sensor.""" - return f"{self.coordinator.name} {self._bin_type} {self._attribute_type}" - - @property - def state(self): - """Return the state based on the attribute type.""" - if self._attribute_type == "Colour": - return self._color - elif self._attribute_type == "Bin Type": - return self._bin_type - elif self._attribute_type == "Next Collection Date": - bin_date = self.coordinator.data.get(self._bin_type) - return bin_date.strftime("%d/%m/%Y") if bin_date else "Unknown" - elif self._attribute_type == "Next Collection Human Readable": - return self.calculate_human_readable() - elif self._attribute_type == "Days Until Collection": - return self.calculate_days_until() - else: - _LOGGER.warning( - f"{LOG_PREFIX} Undefined attribute type: {self._attribute_type}" - ) - return "Undefined" - - def calculate_human_readable(self) -> str: - """Calculate human-readable collection date.""" - bin_date = self.coordinator.data.get(self._bin_type) - if not bin_date: - return "Unknown" - now = dt_util.now().date() - days = (bin_date - now).days - if days == 0: - return "Today" - elif days == 1: - return "Tomorrow" - else: - day_label = "day" if days == 1 else "days" - return f"In {days} {day_label}" - - def calculate_days_until(self) -> int: - """Calculate days until collection.""" - bin_date = self.coordinator.data.get(self._bin_type) - if not bin_date: - return -1 - return (bin_date - dt_util.now().date()).days - - def get_icon(self) -> str: - """Return the icon based on bin type or mapping.""" - return self._icon_color_mapping.get(self._bin_type, {}).get( - "icon", self.get_default_icon() - ) - - def get_color(self) -> str: - """Return the color based on bin type or mapping.""" - return self._icon_color_mapping.get(self._bin_type, {}).get("color", "black") - - def get_default_icon(self) -> str: - """Return a default icon based on the bin type.""" - bin_type_lower = self._bin_type.lower() - if "recycling" in bin_type_lower: - return "mdi:recycle" - elif "waste" in bin_type_lower: - return "mdi:trash-can" - else: - return "mdi:delete" - - @property - def icon(self) -> str: - """Return the icon for the attribute sensor.""" - return self._icon - - @property - def extra_state_attributes(self) -> dict: - """Return the extra state attributes.""" - return { - STATE_ATTR_COLOUR: self._color, - STATE_ATTR_NEXT_COLLECTION: self.coordinator.data.get(self._bin_type), - } - - @property - def device_info(self) -> dict: - """Return device information for device registry.""" - return { - "identifiers": {(DOMAIN, self._device_id)}, - "name": f"{self.coordinator.name} {self._bin_type}", - "manufacturer": "UK Bin Collection", - "model": "Bin Sensor", - 'sw_version': '1.0', - } - - @property - def unique_id(self) -> str: - """Return a unique ID for the attribute sensor.""" - return self._unique_id - - @property - def available(self) -> bool: - """Return the availability of the attribute sensor.""" - return self.coordinator.last_update_success - - -class UKBinCollectionRawJSONSensor(CoordinatorEntity, SensorEntity): - """Sensor entity to hold the raw JSON data for bin collections.""" - - def __init__( - self, - coordinator: DataUpdateCoordinator, - unique_id: str, - name: str, - ) -> None: - """Initialize the raw JSON sensor.""" - super().__init__(coordinator) - self.coordinator = coordinator - self._unique_id = unique_id - self._name = f"{name} Raw JSON" - - @property - def name(self) -> str: - """Return the name of the raw JSON sensor.""" - return self._name - - @property - def state(self) -> str: - """Return the raw JSON data as the state.""" - if not self.coordinator.data: - return "{}" - data = { - bin_type: bin_date.strftime("%d/%m/%Y") if bin_date else None - for bin_type, bin_date in self.coordinator.data.items() - } - return json.dumps(data) - - @property - def unique_id(self) -> str: - """Return a unique ID for the raw JSON sensor.""" - return self._unique_id - - @property - def extra_state_attributes(self) -> dict: - """Return the raw JSON data as an attribute.""" - return {"raw_data": self.coordinator.data or {}} - - @property - def available(self) -> bool: - """Return the availability of the raw JSON sensor.""" - return self.coordinator.last_update_success diff --git a/custom_components/uk_bin_collection/strings.json b/custom_components/uk_bin_collection/strings.json deleted file mode 100644 index 90e72fb99b..0000000000 --- a/custom_components/uk_bin_collection/strings.json +++ /dev/null @@ -1,57 +0,0 @@ -{ - "title": "UK Bin Collection Data", - "config": { - "step": { - "user": { - "title": "Select the council", - "data": { - "name": "Location name", - "council": "Council", - "icon_color_mapping": "JSON to map Bin Type for Colour and Icon see: https://github.com/robbrad/UKBinCollectionData" - }, - "description": "Please see [here](https://github.com/robbrad/UKBinCollectionData#requesting-your-council) if your council isn't listed" - }, - "council": { - "title": "Provide council details", - "data": { - "url": "URL to fetch bin collection data", - "timeout": "The time in seconds for how long the sensor should wait for data", - "update_interval": "Time in hours between updates", - "uprn": "UPRN (Unique Property Reference Number)", - "postcode": "Postcode of the address", - "number": "House number of the address", - "usrn": "USRN (Unique Street Reference Number)", - "web_driver": "To run on a remote Selenium Server add the Selenium Server URL", - "headless": "Run Selenium in headless mode (recommended)", - "local_browser": "Don't run on remote Selenium server, use local install of Chrome instead", - "submit": "Submit" - }, - "description": "Please refer to your council's [wiki](https://github.com/robbrad/UKBinCollectionData/wiki/Councils) entry for details on what to enter.\n{selenium_message}" - }, - "reconfigure_confirm": { - "title": "Update council details", - "data": { - "url": "URL to fetch bin collection data", - "timeout": "The time in seconds for how long the sensor should wait for data", - "update_interval": "Time in hours between updates", - "uprn": "UPRN (Unique Property Reference Number)", - "postcode": "Postcode of the address", - "number": "House number of the address", - "usrn": "USRN (Unique Street Reference Number)", - "web_driver": "To run on a remote Selenium Server add the Selenium Server URL", - "headless": "Run Selenium in headless mode (recommended)", - "local_browser": "Don't run on remote Selenium server, use local install of Chrome instead", - "icon_color_mapping": "JSON to map Bin Type for Colour and Icon see: https://github.com/robbrad/UKBinCollectionData", - "submit": "Submit" - }, - "description": "Please refer to your council's [wiki](https://github.com/robbrad/UKBinCollectionData/wiki/Councils) entry for details on what to enter." - } - }, - "error": { - "name": "Please enter a location name", - "council": "Please select a council", - "selenium_unavailable": "❌ Selenium server is not accessible. Please ensure it is running at http://localhost:4444 or http://selenium:4444. [Setup Guide](https://example.com/selenium-setup)", - "chromium_not_found": "❌ Chromium browser is not installed. Please install Chromium or Google Chrome. [Installation Guide](https://example.com/chromium-install)" - } - } -} diff --git a/custom_components/uk_bin_collection/tests/__init__.py b/custom_components/uk_bin_collection/tests/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/custom_components/uk_bin_collection/tests/common_utils.py b/custom_components/uk_bin_collection/tests/common_utils.py deleted file mode 100644 index 8c58ff5214..0000000000 --- a/custom_components/uk_bin_collection/tests/common_utils.py +++ /dev/null @@ -1,52 +0,0 @@ -# custom_components/uk_bin_collection/tests/common_utils.py - -import uuid -from unittest.mock import Mock, AsyncMock # Import AsyncMock -from homeassistant import config_entries -import asyncio - -class MockConfigEntry: - """Mock for Home Assistant ConfigEntry.""" - - def __init__( - self, - domain, - data=None, - options=None, - title=None, - unique_id=None, - source=config_entries.SOURCE_USER, - entry_id=None, - version=1, - ): - """Initialize a mock config entry.""" - self.domain = domain - self.data = data or {} - self.options = options or {} - self.title = title or "Mock Title" - self.unique_id = unique_id - self.source = source - self.entry_id = entry_id or uuid.uuid4().hex - self.version = version - self.state = config_entries.ConfigEntryState.NOT_LOADED - - def add_to_hass(self, hass): - """Add the mock config entry to Home Assistant.""" - # Mock the async_add method to accept the entry - hass.config_entries.async_add.return_value = None - hass.config_entries.async_add(self) - - # Mock async_setup to be an AsyncMock that returns True - hass.config_entries.async_setup = AsyncMock(return_value=True) - - # Mock the create_task to immediately run the coroutine - # Define a coroutine that runs async_setup and updates the entry state - async def run_setup(entry_id): - result = await hass.config_entries.async_setup(entry_id) - if result: - self.state = config_entries.ConfigEntryState.LOADED - else: - self.state = config_entries.ConfigEntryState.SETUP_ERROR - - # Assign the coroutine as a side effect to create_task - hass.loop.create_task = AsyncMock(side_effect=lambda coro: asyncio.create_task(run_setup(self.entry_id))) diff --git a/custom_components/uk_bin_collection/tests/test_calendar.py b/custom_components/uk_bin_collection/tests/test_calendar.py deleted file mode 100644 index a324e35523..0000000000 --- a/custom_components/uk_bin_collection/tests/test_calendar.py +++ /dev/null @@ -1,542 +0,0 @@ -# test_calendar.py - -"""Unit tests for the UK Bin Collection Calendar platform.""" - -import pytest -from unittest.mock import MagicMock, AsyncMock, patch -from datetime import datetime, date, timedelta - -from homeassistant.core import HomeAssistant -from homeassistant.helpers.update_coordinator import DataUpdateCoordinator - -from custom_components.uk_bin_collection.const import DOMAIN -from custom_components.uk_bin_collection.calendar import ( - UKBinCollectionCalendar, - async_setup_entry, -) -from homeassistant.components.calendar import CalendarEvent - -from .common_utils import MockConfigEntry - -pytest_plugins = ["freezegun"] - -# Mock Data -MOCK_COORDINATOR_DATA = { - "Recycling": date(2024, 4, 25), - "General Waste": date(2024, 4, 26), - "Garden Waste": date(2024, 4, 27), -} - -@pytest.fixture -def mock_coordinator(): - """Fixture to create a mock DataUpdateCoordinator with sample data.""" - coordinator = MagicMock(spec=DataUpdateCoordinator) - coordinator.data = MOCK_COORDINATOR_DATA.copy() - coordinator.name = "Test Council" - coordinator.last_update_success = True - return coordinator - -@pytest.fixture -def mock_config_entry(): - """Create a mock ConfigEntry.""" - return MockConfigEntry( - domain=DOMAIN, - title="Test Entry", - data={ - "name": "Test Name", - "council": "Test Council", - "url": "https://example.com", - "timeout": 60, - "icon_color_mapping": {}, - }, - entry_id="test_entry_id", - unique_id="test_unique_id", - ) - -@pytest.fixture -async def hass_instance(hass: HomeAssistant): - return hass - -# Tests - -def test_calendar_entity_initialization(hass_instance, mock_coordinator): - """Test that the calendar entity initializes correctly.""" - calendar = UKBinCollectionCalendar( - coordinator=mock_coordinator, - bin_type="Recycling", - unique_id="test_entry_id_Recycling_calendar", - name="Test Council Recycling Calendar", - ) - - assert calendar.name == "Test Council Recycling Calendar" - assert calendar.unique_id == "test_entry_id_Recycling_calendar" - assert calendar.device_info == { - "identifiers": {(DOMAIN, "test_entry_id_Recycling_calendar")}, - "name": "Test Council Recycling Calendar Device", - "manufacturer": "UK Bin Collection", - "model": "Bin Collection Calendar", - "sw_version": "1.0", - } - -def test_calendar_event_property(hass_instance, mock_coordinator): - """Test that the event property returns the correct CalendarEvent.""" - collection_date = date(2024, 4, 25) - mock_coordinator.data["Recycling"] = collection_date - - calendar = UKBinCollectionCalendar( - coordinator=mock_coordinator, - bin_type="Recycling", - unique_id="test_entry_id_Recycling_calendar", - name="Test Council Recycling Calendar", - ) - - expected_event = CalendarEvent( - summary="Recycling Collection", - start=collection_date, - end=collection_date + timedelta(days=1), - uid="test_entry_id_Recycling_calendar_2024-04-25", - ) - - assert calendar.event == expected_event - -def test_calendar_event_property_no_data(hass_instance, mock_coordinator): - """Test that the event property returns None when there's no collection date.""" - mock_coordinator.data["Recycling"] = None - - calendar = UKBinCollectionCalendar( - coordinator=mock_coordinator, - bin_type="Recycling", - unique_id="test_entry_id_Recycling_calendar", - name="Test Council Recycling Calendar", - ) - - assert calendar.event is None - -@pytest.mark.asyncio -async def test_async_get_events(hass_instance, mock_coordinator): - """Test that async_get_events returns correct events within the date range.""" - mock_coordinator.data = { - "Recycling": date(2024, 4, 25), - "General Waste": date(2024, 4, 26), - } - - calendar = UKBinCollectionCalendar( - coordinator=mock_coordinator, - bin_type="Recycling", - unique_id="test_entry_id_Recycling_calendar", - name="Test Council Recycling Calendar", - ) - - start_date = datetime(2024, 4, 24) - end_date = datetime(2024, 4, 26) - - expected_event = CalendarEvent( - summary="Recycling Collection", - start=date(2024, 4, 25), - end=date(2024, 4, 26), - uid="test_entry_id_Recycling_calendar_2024-04-25", - ) - - events = await calendar.async_get_events(hass_instance, start_date, end_date) - assert events == [expected_event] - -@pytest.mark.asyncio -async def test_async_get_events_no_events_in_range(hass_instance, mock_coordinator): - """Test that async_get_events returns empty list when no events are in the range.""" - mock_coordinator.data = { - "Recycling": date(2024, 4, 25), - } - - calendar = UKBinCollectionCalendar( - coordinator=mock_coordinator, - bin_type="Recycling", - unique_id="test_entry_id_Recycling_calendar", - name="Test Council Recycling Calendar", - ) - - start_date = datetime(2024, 4, 26) - end_date = datetime(2024, 4, 30) - - events = await calendar.async_get_events(hass_instance, start_date, end_date) - assert events == [] - -def test_calendar_update_on_coordinator_change(hass_instance, mock_coordinator): - """Test that the calendar entity updates when the coordinator's data changes.""" - collection_date_initial = date(2024, 4, 25) - collection_date_updated = date(2024, 4, 26) - mock_coordinator.data["Recycling"] = collection_date_initial - - calendar = UKBinCollectionCalendar( - coordinator=mock_coordinator, - bin_type="Recycling", - unique_id="test_entry_id_Recycling_calendar", - name="Test Council Recycling Calendar", - ) - - # Initially, the event should be for April 25 - expected_event_initial = CalendarEvent( - summary="Recycling Collection", - start=collection_date_initial, - end=collection_date_initial + timedelta(days=1), - uid="test_entry_id_Recycling_calendar_2024-04-25", - ) - assert calendar.event == expected_event_initial - - # Update the coordinator's data - mock_coordinator.data["Recycling"] = collection_date_updated - mock_coordinator.async_write_ha_state = AsyncMock() - - # Simulate coordinator update by calling the update handler - with patch.object(calendar, "async_write_ha_state", new=AsyncMock()) as mock_write: - calendar._handle_coordinator_update() - - # The event should now be updated to April 26 - expected_event_updated = CalendarEvent( - summary="Recycling Collection", - start=collection_date_updated, - end=collection_date_updated + timedelta(days=1), - uid="test_entry_id_Recycling_calendar_2024-04-26", - ) - assert calendar.event == expected_event_updated - mock_write.assert_called_once() - -@pytest.mark.asyncio -async def test_async_setup_entry_creates_calendar_entities(hass_instance, mock_coordinator, mock_config_entry): - """Test that async_setup_entry creates calendar entities based on coordinator data.""" - # Mock the data in the coordinator - mock_coordinator.data = { - "Recycling": date(2024, 4, 25), - "General Waste": date(2024, 4, 26), - } - - # Patch the hass.data to include the coordinator - hass_instance.data[DOMAIN][mock_config_entry.entry_id] = { - "coordinator": mock_coordinator, - } - - with patch("custom_components.uk_bin_collection.calendar.UKBinCollectionCalendar", autospec=True) as mock_calendar_cls: - mock_calendar_instance_recycling = MagicMock() - mock_calendar_instance_general_waste = MagicMock() - mock_calendar_cls.side_effect = [ - mock_calendar_instance_recycling, - mock_calendar_instance_general_waste, - ] - - await async_setup_entry(hass_instance, mock_config_entry) - - # Ensure that two calendar entities are created - assert mock_calendar_cls.call_count == 2 - - # Verify that the calendar entities are initialized with correct parameters - mock_calendar_cls.assert_any_call( - coordinator=mock_coordinator, - bin_type="Recycling", - unique_id="test_entry_id_Recycling_calendar", - name="Test Council Recycling Calendar", - ) - mock_calendar_cls.assert_any_call( - coordinator=mock_coordinator, - bin_type="General Waste", - unique_id="test_entry_id_General Waste_calendar", - name="Test Council General Waste Calendar", - ) - -@pytest.mark.asyncio -async def test_async_setup_entry_handles_empty_data(hass_instance, mock_config_entry): - """Test that async_setup_entry handles empty coordinator data gracefully.""" - # Mock an empty data coordinator - mock_coordinator = MagicMock(spec=DataUpdateCoordinator) - mock_coordinator.data = {} - mock_coordinator.name = "Test Council" - mock_coordinator.last_update_success = True - - # Patch the hass.data to include the coordinator - hass_instance.data[DOMAIN][mock_config_entry.entry_id] = { - "coordinator": mock_coordinator, - } - - with patch("custom_components.uk_bin_collection.calendar.UKBinCollectionCalendar", autospec=True) as mock_calendar_cls: - await async_setup_entry(hass_instance, mock_config_entry) - - # No calendar entities should be created since there's no data - mock_calendar_cls.assert_not_called() - -@pytest.mark.asyncio -async def test_async_setup_entry_handles_coordinator_failure(hass_instance, mock_config_entry): - """Test that async_setup_entry raises ConfigEntryNotReady on coordinator failure.""" - mock_coordinator = MagicMock(spec=DataUpdateCoordinator) - mock_coordinator.async_config_entry_first_refresh.side_effect = Exception("Update failed") - mock_coordinator.name = "Test Council" - - # Patch the hass.data to include the coordinator - hass_instance.data[DOMAIN][mock_config_entry.entry_id] = { - "coordinator": mock_coordinator, - } - - with pytest.raises(Exception, match="Update failed"): - await async_setup_entry(hass_instance, mock_config_entry) - -@pytest.mark.asyncio -async def test_async_unload_entry(hass_instance, mock_coordinator, mock_config_entry): - """Test that async_unload_entry unloads calendar entities correctly.""" - # Mock the data in the coordinator - mock_coordinator.data = { - "Recycling": date(2024, 4, 25), - } - - # Patch the hass.data to include the coordinator - hass_instance.data[DOMAIN][mock_config_entry.entry_id] = { - "coordinator": mock_coordinator, - } - - # First, set up the entry - with patch("custom_components.uk_bin_collection.calendar.UKBinCollectionCalendar", autospec=True) as mock_calendar_cls: - mock_calendar_instance = MagicMock() - mock_calendar_cls.return_value = mock_calendar_instance - - await async_setup_entry(hass_instance, mock_config_entry) - - # Now, attempt to unload the entry - with patch( - "homeassistant.config_entries.ConfigEntry.async_forward_entry_unload", - return_value=AsyncMock(return_value=True), - ) as mock_unload_forward: - unload_ok = await hass_instance.config_entries.async_forward_entry_unload( - mock_config_entry, "calendar" - ) - - assert unload_ok is True - mock_unload_forward.assert_called_once_with(mock_config_entry, "calendar") - - # Ensure that the coordinator is removed from hass.data - assert mock_config_entry.entry_id not in hass_instance.data[DOMAIN] - -def test_calendar_entity_available_property(hass_instance, mock_coordinator): - """Test the available property of the calendar entity.""" - # When data is present and last_update_success is True - mock_coordinator.last_update_success = True - mock_coordinator.data["Recycling"] = date(2024, 4, 25) - - calendar = UKBinCollectionCalendar( - coordinator=mock_coordinator, - bin_type="Recycling", - unique_id="test_entry_id_Recycling_calendar", - name="Test Council Recycling Calendar", - ) - - assert calendar.available is True - - # When data is missing - mock_coordinator.data["Recycling"] = None - assert calendar.available is False - - # When last_update_success is False - mock_coordinator.last_update_success = False - calendar._state = "Unknown" # Assuming state is set to "Unknown" when unavailable - assert calendar.available is False - -@pytest.mark.asyncio -async def test_async_setup_entry_creates_no_calendar_entities_on_empty_data(hass_instance, mock_config_entry): - """Test that async_setup_entry does not create calendar entities when coordinator data is empty.""" - mock_coordinator = MagicMock(spec=DataUpdateCoordinator) - mock_coordinator.data = {} - mock_coordinator.name = "Test Council" - mock_coordinator.last_update_success = True - - # Patch the hass.data to include the coordinator - hass_instance.data[DOMAIN][mock_config_entry.entry_id] = { - "coordinator": mock_coordinator, - } - - with patch("custom_components.uk_bin_collection.calendar.UKBinCollectionCalendar", autospec=True) as mock_calendar_cls: - await async_setup_entry(hass_instance, mock_config_entry) - - # No calendar entities should be created - mock_calendar_cls.assert_not_called() - -@pytest.mark.asyncio -async def test_async_setup_entry_with_coordinator_failure(hass_instance, mock_config_entry): - """Test that async_setup_entry handles coordinator failures gracefully.""" - mock_coordinator = MagicMock(spec=DataUpdateCoordinator) - mock_coordinator.async_config_entry_first_refresh.side_effect = Exception("Update failed") - mock_coordinator.name = "Test Council" - - # Patch the hass.data to include the coordinator - hass_instance.data[DOMAIN][mock_config_entry.entry_id] = { - "coordinator": mock_coordinator, - } - - with pytest.raises(Exception, match="Update failed"): - await async_setup_entry(hass_instance, mock_config_entry) - -@pytest.mark.asyncio -async def test_async_unload_entry_failure(hass_instance, mock_coordinator, mock_config_entry): - """Test that async_unload_entry handles unload failures.""" - # Mock the data in the coordinator - mock_coordinator.data = { - "Recycling": date(2024, 4, 25), - } - - # Patch the hass.data to include the coordinator - hass_instance.data[DOMAIN][mock_config_entry.entry_id] = { - "coordinator": mock_coordinator, - } - - # First, set up the entry - with patch("custom_components.uk_bin_collection.calendar.UKBinCollectionCalendar", autospec=True) as mock_calendar_cls: - mock_calendar_instance = MagicMock() - mock_calendar_cls.return_value = mock_calendar_instance - - await async_setup_entry(hass_instance, mock_config_entry) - - # Now, attempt to unload the entry but simulate a failure - with patch( - "homeassistant.config_entries.ConfigEntry.async_forward_entry_unload", - return_value=AsyncMock(return_value=False), - ) as mock_unload_forward: - unload_ok = await hass_instance.config_entries.async_forward_entry_unload( - mock_config_entry, "calendar" - ) - - assert unload_ok is False - mock_unload_forward.assert_called_once_with(mock_config_entry, "calendar") - - # Ensure that the coordinator is still present in hass.data - assert mock_config_entry.entry_id in hass_instance.data[DOMAIN] - -@pytest.mark.asyncio -async def test_async_get_events_multiple_events_same_day(hass_instance, mock_coordinator): - """Test async_get_events when multiple bin types have the same collection date.""" - mock_coordinator.data = { - "Recycling": date(2024, 4, 25), - "General Waste": date(2024, 4, 25), - } - - calendar_recycling = UKBinCollectionCalendar( - coordinator=mock_coordinator, - bin_type="Recycling", - unique_id="test_entry_id_Recycling_calendar", - name="Test Council Recycling Calendar", - ) - - calendar_general_waste = UKBinCollectionCalendar( - coordinator=mock_coordinator, - bin_type="General Waste", - unique_id="test_entry_id_General Waste_calendar", - name="Test Council General Waste Calendar", - ) - - start_date = datetime(2024, 4, 24) - end_date = datetime(2024, 4, 26) - - expected_event_recycling = CalendarEvent( - summary="Recycling Collection", - start=date(2024, 4, 25), - end=date(2024, 4, 26), - uid="test_entry_id_Recycling_calendar_2024-04-25", - ) - - expected_event_general_waste = CalendarEvent( - summary="General Waste Collection", - start=date(2024, 4, 25), - end=date(2024, 4, 26), - uid="test_entry_id_General Waste_calendar_2024-04-25", - ) - - events_recycling = await calendar_recycling.async_get_events(hass_instance, start_date, end_date) - events_general_waste = await calendar_general_waste.async_get_events(hass_instance, start_date, end_date) - - assert events_recycling == [expected_event_recycling] - assert events_general_waste == [expected_event_general_waste] - -@pytest.mark.asyncio -async def test_async_get_events_no_coordinator_data(hass_instance, mock_coordinator): - """Test async_get_events when coordinator has no data.""" - mock_coordinator.data = {} - calendar = UKBinCollectionCalendar( - coordinator=mock_coordinator, - bin_type="Recycling", - unique_id="test_entry_id_Recycling_calendar", - name="Test Council Recycling Calendar", - ) - - start_date = datetime(2024, 4, 24) - end_date = datetime(2024, 4, 26) - - events = await calendar.async_get_events(hass_instance, start_date, end_date) - assert events == [] - -def test_calendar_entity_available_property_no_data(hass_instance, mock_coordinator): - """Test that the calendar's available property is False when there's no data.""" - mock_coordinator.data["Recycling"] = None - - calendar = UKBinCollectionCalendar( - coordinator=mock_coordinator, - bin_type="Recycling", - unique_id="test_entry_id_Recycling_calendar", - name="Test Council Recycling Calendar", - ) - - assert calendar.available is False - -@pytest.mark.asyncio -async def test_calendar_entity_extra_state_attributes(hass_instance, mock_coordinator): - """Test the extra_state_attributes property of the calendar entity.""" - mock_coordinator.data["Recycling"] = date(2024, 4, 25) - - calendar = UKBinCollectionCalendar( - coordinator=mock_coordinator, - bin_type="Recycling", - unique_id="test_entry_id_Recycling_calendar", - name="Test Council Recycling Calendar", - ) - - # Assuming extra_state_attributes includes more data if implemented - # Adjust this part based on your actual calendar.py implementation - # For example, you might include 'next_collection_date' and 'days_until_collection' - # Here, we'll assume no additional attributes as per the initial calendar.py - - # If extra_state_attributes is not implemented, it defaults to None - # To handle this, you can set it to return an empty dict if not implemented - assert calendar.extra_state_attributes == {} - -@pytest.mark.asyncio -async def test_async_setup_entry_handles_coordinator_partial_data(hass_instance, mock_config_entry): - """Test that async_setup_entry creates calendar entities only for available data.""" - mock_coordinator.data = { - "Recycling": date(2024, 4, 25), - "General Waste": None, # No collection date - "Garden Waste": date(2024, 4, 27), - } - - # Patch the hass.data to include the coordinator - hass_instance.data[DOMAIN][mock_config_entry.entry_id] = { - "coordinator": mock_coordinator, - } - - with patch("custom_components.uk_bin_collection.calendar.UKBinCollectionCalendar", autospec=True) as mock_calendar_cls: - mock_calendar_instance_recycling = MagicMock() - mock_calendar_instance_garden_waste = MagicMock() - mock_calendar_cls.side_effect = [ - mock_calendar_instance_recycling, - mock_calendar_instance_garden_waste, - ] - - await async_setup_entry(hass_instance, mock_config_entry) - - # Ensure that two calendar entities are created (excluding General Waste) - assert mock_calendar_cls.call_count == 2 - - # Verify that the calendar entities are initialized with correct parameters - mock_calendar_cls.assert_any_call( - coordinator=mock_coordinator, - bin_type="Recycling", - unique_id="test_entry_id_Recycling_calendar", - name="Test Council Recycling Calendar", - ) - mock_calendar_cls.assert_any_call( - coordinator=mock_coordinator, - bin_type="Garden Waste", - unique_id="test_entry_id_Garden Waste_calendar", - name="Test Council Garden Waste Calendar", - ) diff --git a/custom_components/uk_bin_collection/tests/test_config_flow.py b/custom_components/uk_bin_collection/tests/test_config_flow.py deleted file mode 100644 index 99dde1aae3..0000000000 --- a/custom_components/uk_bin_collection/tests/test_config_flow.py +++ /dev/null @@ -1,942 +0,0 @@ -# test_config_flow.py - -"""Test UkBinCollection config flow.""" - -from unittest.mock import AsyncMock, MagicMock, patch - -import pytest -import voluptuous as vol -from homeassistant import config_entries, data_entry_flow -from homeassistant.const import CONF_NAME, CONF_URL -from homeassistant.core import HomeAssistant - -from custom_components.uk_bin_collection.config_flow import UkBinCollectionConfigFlow -from custom_components.uk_bin_collection.const import DOMAIN - -from .common_utils import MockConfigEntry - -# Mock council data representing different scenarios -MOCK_COUNCILS_DATA = { - "CouncilWithoutURL": { - "wiki_name": "Council without URL", - "skip_get_url": True, - # Do not include 'custom_component_show_url_field' - # Other necessary fields - "uprn": True, - "url": "https://example.com/council_without_url", - }, - "CouncilWithUSRN": { - "wiki_name": "Council with USRN", - "usrn": True, - }, - "CouncilWithUPRN": { - "wiki_name": "Council with UPRN", - "uprn": True, - }, - "CouncilWithPostcodeNumber": { - "wiki_name": "Council with Postcode and Number", - "postcode": True, - "house_number": True, - }, - "CouncilWithWebDriver": { - "wiki_name": "Council with Web Driver", - "web_driver": True, - }, - "CouncilSkippingURL": { - "wiki_name": "Council skipping URL", - "skip_get_url": True, - "url": "https://council.example.com", - }, - "CouncilCustomURLField": { - "wiki_name": "Council with Custom URL Field", - "custom_component_show_url_field": True, - }, - # Add more mock councils as needed to cover different scenarios -} - - -# Helper function to initiate the config flow and proceed through steps -async def proceed_through_config_flow( - hass: HomeAssistant, flow, user_input_initial, user_input_council -): - # Start the flow and complete the `user` step - result = await flow.async_step_user(user_input=user_input_initial) - - assert result["type"] == data_entry_flow.RESULT_TYPE_FORM - assert result["step_id"] == "council" - - # Complete the `council` step - result = await flow.async_step_council(user_input=user_input_council) - - return result - - -@pytest.mark.asyncio -async def test_config_flow_with_uprn(hass: HomeAssistant): - """Test config flow for a council requiring UPRN.""" - with patch( - "custom_components.uk_bin_collection.config_flow.UkBinCollectionConfigFlow.get_councils_json", - return_value=MOCK_COUNCILS_DATA, - ): - flow = UkBinCollectionConfigFlow() - flow.hass = hass - - user_input_initial = { - "name": "Test Name", - "council": "Council with UPRN", - } - user_input_council = { - "uprn": "1234567890", - "timeout": 60, - } - - result = await proceed_through_config_flow( - hass, flow, user_input_initial, user_input_council - ) - - assert result["type"] == data_entry_flow.FlowResultType.CREATE_ENTRY - assert result["title"] == "Test Name" - assert result["data"] == { - "name": "Test Name", - "council": "CouncilWithUPRN", - "uprn": "1234567890", - "timeout": 60, - } - - -async def test_config_flow_with_postcode_and_number(hass: HomeAssistant): - """Test config flow for a council requiring postcode and house number.""" - with patch( - "custom_components.uk_bin_collection.config_flow.UkBinCollectionConfigFlow.get_councils_json", - return_value=MOCK_COUNCILS_DATA, - ): - flow = UkBinCollectionConfigFlow() - flow.hass = hass - - user_input_initial = { - "name": "Test Name", - "council": "Council with Postcode and Number", - } - user_input_council = { - "postcode": "AB1 2CD", - "number": "42", - "timeout": 60, - } - - result = await proceed_through_config_flow( - hass, flow, user_input_initial, user_input_council - ) - - assert result["type"] == data_entry_flow.FlowResultType.CREATE_ENTRY - assert result["title"] == "Test Name" - assert result["data"] == { - "name": "Test Name", - "council": "CouncilWithPostcodeNumber", - "postcode": "AB1 2CD", - "number": "42", - "timeout": 60, - } - - -async def test_config_flow_with_web_driver(hass: HomeAssistant): - """Test config flow for a council requiring web driver.""" - with patch( - "custom_components.uk_bin_collection.config_flow.UkBinCollectionConfigFlow.get_councils_json", - return_value=MOCK_COUNCILS_DATA, - ): - flow = UkBinCollectionConfigFlow() - flow.hass = hass - - user_input_initial = { - "name": "Test Name", - "council": "Council with Web Driver", - } - user_input_council = { - "web_driver": "/path/to/webdriver", - "headless": True, - "local_browser": False, - "timeout": 60, - } - - result = await proceed_through_config_flow( - hass, flow, user_input_initial, user_input_council - ) - - assert result["type"] == data_entry_flow.FlowResultType.CREATE_ENTRY - assert result["title"] == "Test Name" - assert result["data"] == { - "name": "Test Name", - "council": "CouncilWithWebDriver", - "web_driver": "/path/to/webdriver", - "headless": True, - "local_browser": False, - "timeout": 60, - } - - -async def test_config_flow_skipping_url(hass: HomeAssistant): - """Test config flow for a council that skips URL input.""" - with patch( - "custom_components.uk_bin_collection.config_flow.UkBinCollectionConfigFlow.get_councils_json", - return_value=MOCK_COUNCILS_DATA, - ): - flow = UkBinCollectionConfigFlow() - flow.hass = hass - - user_input_initial = { - "name": "Test Name", - "council": "Council skipping URL", - } - user_input_council = { - "timeout": 60, - } - - result = await proceed_through_config_flow( - hass, flow, user_input_initial, user_input_council - ) - - assert result["type"] == data_entry_flow.FlowResultType.CREATE_ENTRY - assert result["title"] == "Test Name" - assert result["data"] == { - "name": "Test Name", - "council": "CouncilSkippingURL", - "skip_get_url": True, - "url": "https://council.example.com", - "timeout": 60, - } - - -async def test_config_flow_with_custom_url_field(hass: HomeAssistant): - """Test config flow for a council with custom URL field.""" - with patch( - "custom_components.uk_bin_collection.config_flow.UkBinCollectionConfigFlow.get_councils_json", - return_value=MOCK_COUNCILS_DATA, - ): - flow = UkBinCollectionConfigFlow() - flow.hass = hass - - user_input_initial = { - "name": "Test Name", - "council": "Council with Custom URL Field", - } - user_input_council = { - "url": "https://custom-url.example.com", - "timeout": 60, - } - - result = await proceed_through_config_flow( - hass, flow, user_input_initial, user_input_council - ) - - assert result["type"] == data_entry_flow.FlowResultType.CREATE_ENTRY - assert result["title"] == "Test Name" - assert result["data"] == { - "name": "Test Name", - "council": "CouncilCustomURLField", - "url": "https://custom-url.example.com", - "timeout": 60, - } - - -async def test_config_flow_missing_name(hass: HomeAssistant): - """Test config flow when name is missing.""" - with patch( - "custom_components.uk_bin_collection.config_flow.UkBinCollectionConfigFlow.get_councils_json", - return_value=MOCK_COUNCILS_DATA, - ): - flow = UkBinCollectionConfigFlow() - flow.hass = hass - - user_input_initial = { - "name": "", # Missing name - "council": "Council with UPRN", - } - - result = await flow.async_step_user(user_input=user_input_initial) - - assert result["type"] == data_entry_flow.RESULT_TYPE_FORM - assert result["step_id"] == "user" - assert result["errors"] == {"name": "Name is required."} - - -async def test_config_flow_invalid_icon_color_mapping(hass: HomeAssistant): - """Test config flow with invalid icon_color_mapping JSON.""" - with patch( - "custom_components.uk_bin_collection.config_flow.UkBinCollectionConfigFlow.get_councils_json", - return_value=MOCK_COUNCILS_DATA, - ): - flow = UkBinCollectionConfigFlow() - flow.hass = hass - - user_input_initial = { - "name": "Test Name", - "council": "Council with UPRN", - "icon_color_mapping": "invalid json", # Invalid JSON - } - - result = await flow.async_step_user(user_input=user_input_initial) - - # Should return to the user step with an error - assert result["type"] == data_entry_flow.RESULT_TYPE_FORM - assert result["step_id"] == "user" - assert result["errors"] == {"icon_color_mapping": "Invalid JSON format."} - - -async def test_config_flow_with_usrn(hass: HomeAssistant): - """Test config flow for a council requiring USRN.""" - with patch( - "custom_components.uk_bin_collection.config_flow.UkBinCollectionConfigFlow.get_councils_json", - return_value=MOCK_COUNCILS_DATA, - ): - flow = UkBinCollectionConfigFlow() - flow.hass = hass - - user_input_initial = { - "name": "Test Name", - "council": "Council with USRN", - } - user_input_council = { - "usrn": "9876543210", - "timeout": 60, - } - - result = await proceed_through_config_flow( - hass, flow, user_input_initial, user_input_council - ) - - assert result["type"] == data_entry_flow.FlowResultType.CREATE_ENTRY - assert result["title"] == "Test Name" - assert result["data"] == { - "name": "Test Name", - "council": "CouncilWithUSRN", - "usrn": "9876543210", - "timeout": 60, - } - - -@pytest.mark.asyncio -async def test_reconfigure_flow(hass): - """Test reconfiguration of an existing integration.""" - with patch( - "custom_components.uk_bin_collection.config_flow.UkBinCollectionConfigFlow.get_councils_json", - return_value=MOCK_COUNCILS_DATA, - ): - # Create an existing entry - existing_entry = MockConfigEntry( - domain=DOMAIN, - data={ - "name": "Existing Entry", - "council": "CouncilWithUPRN", - "uprn": "1234567890", - "timeout": 60, - }, - ) - existing_entry.add_to_hass(hass) - - # Configure async_get_entry to return the existing_entry when called with its entry_id - hass.config_entries.async_get_entry.return_value = existing_entry - - # Configure async_init to return a FlowResultType.FORM with step_id 'reconfigure_confirm' - hass.config_entries.flow.async_init.return_value = { - "flow_id": "test_flow_id", - "type": data_entry_flow.RESULT_TYPE_FORM, - "step_id": "reconfigure_confirm", - } - - # Initialize the flow - flow = UkBinCollectionConfigFlow() - flow.hass = hass - - # Set the context to reconfigure the existing entry - flow.context = {"source": "reconfigure", "entry_id": existing_entry.entry_id} - - # Mock async_step_reconfigure_confirm's behavior - with patch.object( - flow, "async_step_reconfigure_confirm", new=AsyncMock() - ) as mock_step: - mock_step.return_value = { - "type": data_entry_flow.RESULT_TYPE_CREATE_ENTRY, - "title": "Test Name", - "data": { - "name": "Test Name", - "council": "CouncilWithUPRN", - "uprn": "0987654321", - "timeout": 120, - }, - } - - # Start the reconfiguration flow - result = await flow.async_step_reconfigure() - - assert result["type"] == data_entry_flow.FlowResultType.CREATE_ENTRY - assert result["title"] == "Test Name" - assert result["data"] == { - "name": "Test Name", - "council": "CouncilWithUPRN", - "uprn": "0987654321", - "timeout": 120, - } - - # Verify that async_step_reconfigure_confirm was called - mock_step.assert_called_once() - - -async def get_councils_json(self) -> object: - """Returns an object of supported councils and their required fields.""" - url = "https://raw.githubusercontent.com/robbrad/UKBinCollectionData/0.104.0/uk_bin_collection/tests/input.json" - try: - async with aiohttp.ClientSession() as session: - async with session.get(url) as response: - data_text = await response.text() - return json.loads(data_text) - except Exception as e: - _LOGGER.error("Failed to fetch councils data: %s", e) - return {} - - -@pytest.mark.asyncio -async def test_get_councils_json_failure(hass: HomeAssistant): - """Test handling when get_councils_json fails.""" - with patch( - "aiohttp.ClientSession", - autospec=True, - ) as mock_session_cls: - # Configure the mock session to simulate a network error - mock_session = mock_session_cls.return_value.__aenter__.return_value - mock_session.get.side_effect = Exception("Network error") - - # Configure async_init to simulate flow abort due to council data being unavailable - hass.config_entries.flow.async_init.return_value = { - "type": data_entry_flow.RESULT_TYPE_ABORT, - "reason": "council_data_unavailable", - } - - # Initialize the flow - flow = UkBinCollectionConfigFlow() - flow.hass = hass - - # Start the flow using hass.config_entries.flow.async_init - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - - # The flow should abort due to council data being unavailable - assert result["type"] == data_entry_flow.FlowResultType.ABORT - assert result["reason"] == "council_data_unavailable" - - -async def test_config_flow_user_input_none(hass: HomeAssistant): - """Test config flow when user_input is None.""" - with patch( - "custom_components.uk_bin_collection.config_flow.UkBinCollectionConfigFlow.get_councils_json", - return_value=MOCK_COUNCILS_DATA, - ): - flow = UkBinCollectionConfigFlow() - flow.hass = hass - - result = await flow.async_step_user(user_input=None) - - assert result["type"] == data_entry_flow.RESULT_TYPE_FORM - assert result["step_id"] == "user" - - -async def test_config_flow_with_optional_fields(hass: HomeAssistant): - """Test config flow with optional fields provided.""" - # Assume 'CouncilWithOptionalFields' requires 'uprn' and has optional 'web_driver' - MOCK_COUNCILS_DATA["CouncilWithOptionalFields"] = { - "wiki_name": "Council with Optional Fields", - "uprn": True, - "web_driver": True, - } - - with patch( - "custom_components.uk_bin_collection.config_flow.UkBinCollectionConfigFlow.get_councils_json", - return_value=MOCK_COUNCILS_DATA, - ): - flow = UkBinCollectionConfigFlow() - flow.hass = hass - - user_input_initial = { - "name": "Test Name", - "council": "Council with Optional Fields", - } - user_input_council = { - "uprn": "1234567890", - "web_driver": "/path/to/webdriver", - "headless": True, - "local_browser": False, - "timeout": 60, - } - - result = await proceed_through_config_flow( - hass, flow, user_input_initial, user_input_council - ) - - assert result["type"] == data_entry_flow.FlowResultType.CREATE_ENTRY - assert result["title"] == "Test Name" - assert result["data"] == { - "name": "Test Name", - "council": "CouncilWithOptionalFields", - "uprn": "1234567890", - "web_driver": "/path/to/webdriver", - "headless": True, - "local_browser": False, - "timeout": 60, - } - - -@pytest.mark.asyncio -async def test_get_councils_json_session_creation_failure(hass): - """Test handling when creating aiohttp ClientSession fails.""" - with patch( - "aiohttp.ClientSession", - side_effect=Exception("Failed to create session"), - ): - flow = UkBinCollectionConfigFlow() - flow.hass = hass - - # Configure async_init to simulate flow abort due to council data being unavailable - hass.config_entries.flow.async_init.return_value = { - "type": data_entry_flow.RESULT_TYPE_ABORT, - "reason": "council_data_unavailable", - } - - # Start the flow using hass.config_entries.flow.async_init - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - - # The flow should abort due to council data being unavailable - assert result["type"] == data_entry_flow.FlowResultType.ABORT - assert result["reason"] == "council_data_unavailable" - - -@pytest.mark.asyncio -async def test_config_flow_council_without_url(hass): - """Test config flow for a council where 'url' field should not be included.""" - with patch( - "custom_components.uk_bin_collection.config_flow.UkBinCollectionConfigFlow.get_councils_json", - return_value=MOCK_COUNCILS_DATA, - ): - flow = UkBinCollectionConfigFlow() - flow.hass = hass - - user_input_initial = { - "name": "Test Name", - "council": "Council without URL", - } - user_input_council = { - "uprn": "1234567890", - "timeout": 60, - } - - # Configure async_init to return a FlowResultType.FORM with step_id 'council' - hass.config_entries.flow.async_init.return_value = { - "flow_id": "test_flow_id", - "type": data_entry_flow.RESULT_TYPE_FORM, - "step_id": "council", - } - - # Configure async_configure to return a FlowResultType.CREATE_ENTRY - hass.config_entries.flow.async_configure.return_value = { - "type": data_entry_flow.RESULT_TYPE_CREATE_ENTRY, - "title": "Test Name", - "data": { - "name": "Test Name", - "council": "CouncilWithoutURL", - "uprn": "1234567890", - "timeout": 60, - "skip_get_url": True, - "url": "https://example.com/council_without_url", - }, - } - - # Start the flow - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - - # Provide initial user input - result = await hass.config_entries.flow.async_configure( - result["flow_id"], user_input=user_input_initial - ) - - assert result["type"] == data_entry_flow.FlowResultType.CREATE_ENTRY - assert result["title"] == "Test Name" - assert result["data"] == { - "name": "Test Name", - "council": "CouncilWithoutURL", - "uprn": "1234567890", - "timeout": 60, - "skip_get_url": True, - "url": "https://example.com/council_without_url", - } - - -async def test_config_flow_missing_council(hass: HomeAssistant): - """Test config flow when council is missing.""" - with patch( - "custom_components.uk_bin_collection.config_flow.UkBinCollectionConfigFlow.get_councils_json", - return_value=MOCK_COUNCILS_DATA, - ): - flow = UkBinCollectionConfigFlow() - flow.hass = hass - - user_input_initial = { - "name": "Test Name", - "council": "", # Missing council - } - - result = await flow.async_step_user(user_input=user_input_initial) - - # Should return to the user step with an error - assert result["type"] == data_entry_flow.RESULT_TYPE_FORM - assert result["step_id"] == "user" - assert result["errors"] == {"council": "Council is required."} - - -@pytest.mark.asyncio -async def test_reconfigure_flow_with_errors(hass): - """Test reconfiguration with invalid input.""" - with patch( - "custom_components.uk_bin_collection.config_flow.UkBinCollectionConfigFlow.get_councils_json", - return_value=MOCK_COUNCILS_DATA, - ): - # Create an existing entry - existing_entry = MockConfigEntry( - domain=DOMAIN, - data={ - "name": "Existing Entry", - "council": "CouncilWithUPRN", - "uprn": "1234567890", - "timeout": 60, - }, - ) - existing_entry.add_to_hass(hass) - - # Configure async_get_entry to return the existing_entry when called with its entry_id - hass.config_entries.async_get_entry.return_value = existing_entry - - # Configure async_init to return a FlowResultType.FORM with step_id 'reconfigure_confirm' - hass.config_entries.flow.async_init.return_value = { - "flow_id": "test_flow_id", - "type": data_entry_flow.RESULT_TYPE_FORM, - "step_id": "reconfigure_confirm", - } - - # Initialize the flow - flow = UkBinCollectionConfigFlow() - flow.hass = hass - - # Set the context to reconfigure the existing entry - flow.context = {"source": "reconfigure", "entry_id": existing_entry.entry_id} - - # Mock async_step_reconfigure_confirm's behavior to handle invalid input - with patch.object( - flow, "async_step_reconfigure_confirm", new=AsyncMock() - ) as mock_step: - mock_step.return_value = { - "type": data_entry_flow.RESULT_TYPE_FORM, - "step_id": "reconfigure_confirm", - "errors": {"icon_color_mapping": "invalid_json"}, - } - - # Start the reconfiguration flow - result = await flow.async_step_reconfigure() - - assert result["type"] == data_entry_flow.FlowResultType.FORM - assert result["step_id"] == "reconfigure_confirm" - - # Provide invalid data (e.g., invalid JSON for icon_color_mapping) - user_input = { - "name": "Updated Entry", - "council": "Council with UPRN", - "uprn": "0987654321", - "icon_color_mapping": "invalid json", - "timeout": 60, - } - - # Configure async_configure to return an error - hass.config_entries.flow.async_configure.return_value = { - "type": data_entry_flow.RESULT_TYPE_FORM, - "step_id": "reconfigure_confirm", - "errors": {"icon_color_mapping": "invalid_json"}, - } - - result = await flow.async_step_reconfigure_confirm(user_input=user_input) - - # Should return to the reconfigure_confirm step with an error - assert result["type"] == data_entry_flow.FlowResultType.FORM - assert result["step_id"] == "reconfigure_confirm" - assert result["errors"] == {"icon_color_mapping": "invalid_json"} - - -@pytest.mark.asyncio -async def test_reconfigure_flow_entry_missing(hass): - """Test reconfiguration when the config entry is missing.""" - with patch( - "custom_components.uk_bin_collection.config_flow.UkBinCollectionConfigFlow.get_councils_json", - return_value=MOCK_COUNCILS_DATA, - ): - flow = UkBinCollectionConfigFlow() - flow.hass = hass - - # Set the context with an invalid entry_id to simulate a missing entry - flow.context = {"source": "reconfigure", "entry_id": "invalid_entry_id"} - - # Mock async_get_entry to return None using MagicMock, not AsyncMock - hass.config_entries.async_get_entry = MagicMock(return_value=None) - - # Run the reconfiguration step to check for abort - result = await flow.async_step_reconfigure() - - # Assert that the flow aborts due to the missing config entry - assert result["type"] == data_entry_flow.FlowResultType.ABORT - assert result["reason"] == "Reconfigure Failed" - - -@pytest.mark.asyncio -async def test_reconfigure_flow_no_user_input(hass): - """Test reconfiguration when user_input is None.""" - with patch( - "custom_components.uk_bin_collection.config_flow.UkBinCollectionConfigFlow.get_councils_json", - return_value=MOCK_COUNCILS_DATA, - ): - # Create a mock entry and ensure add_to_hass is awaited - existing_entry = MockConfigEntry( - domain=DOMAIN, - data={ - "name": "Existing Entry", - "council": "CouncilWithUPRN", - "uprn": "1234567890", - "timeout": 60, - }, - ) - existing_entry.add_to_hass(hass) - - # Mock async_get_entry to return the entry directly, avoiding coroutine issues - hass.config_entries.async_get_entry = AsyncMock(return_value=existing_entry) - - # Mock async_init and start the reconfigure flow - hass.config_entries.flow.async_init.return_value = { - "flow_id": "test_flow_id", - "type": data_entry_flow.RESULT_TYPE_FORM, - "step_id": "reconfigure_confirm", - } - - flow = UkBinCollectionConfigFlow() - flow.hass = hass - flow.context = {"source": "reconfigure", "entry_id": existing_entry.entry_id} - - # Proceed without user input, simulating the form return - with patch.object( - flow, "async_step_reconfigure_confirm", new=AsyncMock() - ) as mock_step: - mock_step.return_value = { - "type": data_entry_flow.RESULT_TYPE_FORM, - "step_id": "reconfigure_confirm", - "errors": {}, - } - - result = await flow.async_step_reconfigure_confirm(user_input=None) - - assert result["type"] == data_entry_flow.FlowResultType.FORM - assert result["step_id"] == "reconfigure_confirm" - - -@pytest.mark.asyncio -async def test_check_selenium_server_exception(hass: HomeAssistant): - """Test exception handling in check_selenium_server.""" - with patch( - "aiohttp.ClientSession.get", - side_effect=Exception("Connection error"), - ): - flow = UkBinCollectionConfigFlow() - flow.hass = hass - - result = await flow.check_selenium_server() - # Expected result is that all URLs are marked as not accessible - expected_result = [ - ("http://localhost:4444", False), - ("http://selenium:4444", False), - ] - assert result == expected_result - - -@pytest.mark.asyncio -async def test_get_councils_json_exception(hass: HomeAssistant): - """Test exception handling in get_councils_json.""" - with patch( - "aiohttp.ClientSession.get", - side_effect=Exception("Network error"), - ): - flow = UkBinCollectionConfigFlow() - flow.hass = hass - - result = await flow.get_councils_json() - assert result == {} - - -@pytest.mark.asyncio -async def test_async_step_user_council_data_unavailable(hass: HomeAssistant): - """Test async_step_user when council data is unavailable.""" - with patch( - "custom_components.uk_bin_collection.config_flow.UkBinCollectionConfigFlow.get_councils_json", - return_value=None, - ): - flow = UkBinCollectionConfigFlow() - flow.hass = hass - - result = await flow.async_step_user(user_input={}) - - assert result["type"] == data_entry_flow.FlowResultType.ABORT - assert result["reason"] == "Council Data Unavailable" - - -@pytest.mark.asyncio -async def test_async_step_council_invalid_icon_color_mapping(hass: HomeAssistant): - """Test async_step_council with invalid JSON in icon_color_mapping.""" - with patch( - "custom_components.uk_bin_collection.config_flow.UkBinCollectionConfigFlow.get_councils_json", - return_value=MOCK_COUNCILS_DATA, - ): - flow = UkBinCollectionConfigFlow() - flow.hass = hass - flow.data = { - "name": "Test Name", - "council": "CouncilWithUPRN", - } - flow.councils_data = MOCK_COUNCILS_DATA - - user_input = { - "uprn": "1234567890", - "icon_color_mapping": "invalid json", - "timeout": 60, - } - - result = await flow.async_step_council(user_input=user_input) - - assert result["type"] == data_entry_flow.RESULT_TYPE_FORM - assert result["step_id"] == "council" - assert result["errors"] == {"icon_color_mapping": "Invalid JSON format."} - - -@pytest.mark.asyncio -async def test_async_step_reconfigure_entry_none(hass: HomeAssistant): - """Test async_step_reconfigure when config entry is None.""" - flow = UkBinCollectionConfigFlow() - flow.hass = hass - flow.context = {"entry_id": "non_existent_entry_id"} - - # Mock async_get_entry to return None - flow.hass.config_entries.async_get_entry = MagicMock(return_value=None) - - result = await flow.async_step_reconfigure() - - assert result["type"] == data_entry_flow.FlowResultType.ABORT - assert result["reason"] == "Reconfigure Failed" - - -@pytest.mark.asyncio -async def test_async_step_reconfigure_confirm_user_input_none(hass: HomeAssistant): - """Test async_step_reconfigure_confirm when user_input is None.""" - flow = UkBinCollectionConfigFlow() - flow.hass = hass - - # Create a mock config entry - config_entry = MockConfigEntry( - domain=DOMAIN, - data={ - "name": "Test Name", - "council": "CouncilWithUPRN", - "uprn": "1234567890", - "timeout": 60, - }, - ) - config_entry.add_to_hass(hass) - - flow.config_entry = config_entry - flow.context = {"entry_id": config_entry.entry_id} - flow.councils_data = MOCK_COUNCILS_DATA - - result = await flow.async_step_reconfigure_confirm(user_input=None) - - assert result["type"] == data_entry_flow.RESULT_TYPE_FORM - assert result["step_id"] == "reconfigure_confirm" - - -@pytest.mark.asyncio -async def test_async_step_council_missing_council_key(hass: HomeAssistant): - """Test async_step_council when council_key is missing in councils_data.""" - flow = UkBinCollectionConfigFlow() - flow.hass = hass - flow.data = { - "name": "Test Name", - "council": "NonExistentCouncil", - } - flow.councils_data = MOCK_COUNCILS_DATA - - result = await flow.async_step_council(user_input=None) - - assert result["type"] == data_entry_flow.RESULT_TYPE_FORM - assert result["step_id"] == "council" - - -@pytest.mark.asyncio -async def test_check_chromium_installed_exception(hass: HomeAssistant): - """Test exception handling in check_chromium_installed.""" - with patch( - "shutil.which", - side_effect=Exception("Filesystem error"), - ): - flow = UkBinCollectionConfigFlow() - flow.hass = hass - - result = await flow.check_chromium_installed() - assert result is False - - - -@pytest.mark.asyncio -async def test_async_step_reconfigure_confirm_invalid_json(hass: HomeAssistant): - """Test async_step_reconfigure_confirm with invalid JSON.""" - with patch( - "custom_components.uk_bin_collection.config_flow.UkBinCollectionConfigFlow.get_councils_json", - return_value=MOCK_COUNCILS_DATA, - ): - flow = UkBinCollectionConfigFlow() - flow.hass = hass - - # Create a mock config entry - config_entry = MockConfigEntry( - domain=DOMAIN, - data={ - "name": "Existing Entry", - "council": "CouncilWithUPRN", - "uprn": "1234567890", - "timeout": 60, - }, - ) - config_entry.add_to_hass(hass) - - flow.config_entry = config_entry - flow.context = {"entry_id": config_entry.entry_id} - - # Set up mocks for async methods - hass.config_entries.async_reload = AsyncMock() - hass.config_entries.async_update_entry = MagicMock() - - user_input = { - "name": "Updated Entry", - "council": "Council with UPRN", - "icon_color_mapping": "invalid json", - "uprn": "0987654321", - "timeout": 120, - } - - result = await flow.async_step_reconfigure_confirm(user_input=user_input) - - assert result["type"] == data_entry_flow.RESULT_TYPE_FORM - assert result["step_id"] == "reconfigure_confirm" - assert result["errors"] == {"icon_color_mapping": "Invalid JSON format."} - diff --git a/custom_components/uk_bin_collection/tests/test_sensor.py b/custom_components/uk_bin_collection/tests/test_sensor.py deleted file mode 100644 index 9b59e4c47a..0000000000 --- a/custom_components/uk_bin_collection/tests/test_sensor.py +++ /dev/null @@ -1,1343 +0,0 @@ -import asyncio -import json -import logging -from datetime import date, datetime, timedelta -from json import JSONDecodeError -from unittest.mock import AsyncMock, MagicMock, patch, Mock - -import pytest -from freezegun import freeze_time -from homeassistant.config_entries import ConfigEntryState -from homeassistant.exceptions import ConfigEntryNotReady -from homeassistant.helpers.update_coordinator import UpdateFailed -from homeassistant.util import dt as dt_util - -from custom_components.uk_bin_collection.const import DOMAIN -from custom_components.uk_bin_collection.sensor import ( - UKBinCollectionAttributeSensor, - UKBinCollectionDataSensor, - UKBinCollectionRawJSONSensor, - async_setup_entry, -) - -from custom_components.uk_bin_collection import HouseholdBinCoordinator - -logging.basicConfig(level=logging.DEBUG) - -from .common_utils import MockConfigEntry - -pytest_plugins = ["freezegun"] - -# Mock Data -MOCK_BIN_COLLECTION_DATA = { - "bins": [ - {"type": "General Waste", "collectionDate": "15/10/2023"}, - {"type": "Recycling", "collectionDate": "16/10/2023"}, - {"type": "Garden Waste", "collectionDate": "17/10/2023"}, - ] -} - -MOCK_PROCESSED_DATA = { - "General Waste": datetime.strptime("15/10/2023", "%d/%m/%Y").date(), - "Recycling": datetime.strptime("16/10/2023", "%d/%m/%Y").date(), - "Garden Waste": datetime.strptime("17/10/2023", "%d/%m/%Y").date(), -} - - -@pytest.fixture -def mock_config_entry(): - """Create a mock ConfigEntry.""" - return MockConfigEntry( - domain=DOMAIN, - title="Test Entry", - data={ - "name": "Test Name", - "council": "Test Council", - "url": "https://example.com", - "timeout": 60, - "icon_color_mapping": {}, - }, - entry_id="test", - unique_id="test_unique_id", - ) - - -# Tests -def test_process_bin_data(freezer): - """Test processing of bin collection data.""" - freezer.move_to("2023-10-14") - processed_data = HouseholdBinCoordinator.process_bin_data(MOCK_BIN_COLLECTION_DATA) - # Convert dates to strings for comparison - processed_data_str = {k: v.strftime("%Y-%m-%d") for k, v in processed_data.items()} - expected_data_str = { - k: v.strftime("%Y-%m-%d") for k, v in MOCK_PROCESSED_DATA.items() - } - assert processed_data_str == expected_data_str - - -def test_process_bin_data_empty(): - """Test processing when data is empty.""" - processed_data = HouseholdBinCoordinator.process_bin_data({"bins": []}) - assert processed_data == {} - - -def test_process_bin_data_past_dates(freezer): - """Test processing when all dates are in the past.""" - freezer.move_to("2023-10-14") - past_date = (datetime(2023, 10, 14) - timedelta(days=1)).strftime("%d/%m/%Y") - data = { - "bins": [ - {"type": "General Waste", "collectionDate": past_date}, - ] - } - processed_data = HouseholdBinCoordinator.process_bin_data(data) - assert processed_data == {} # No future dates - - -def test_process_bin_data_duplicate_bin_types(freezer): - """Test processing when duplicate bin types are present.""" - freezer.move_to("2023-10-14") - data = { - "bins": [ - {"type": "General Waste", "collectionDate": "15/10/2023"}, - {"type": "General Waste", "collectionDate": "16/10/2023"}, # Later date - ] - } - expected = { - "General Waste": date(2023, 10, 15), # Should take the earliest future date - } - processed_data = HouseholdBinCoordinator.process_bin_data(data) - assert processed_data == expected - - -def test_unique_id_uniqueness(): - """Test that each sensor has a unique ID.""" - coordinator = MagicMock() - coordinator.name = "Test Name" - coordinator.data = MOCK_PROCESSED_DATA - - sensor1 = UKBinCollectionDataSensor( - coordinator, "General Waste", "test_general_waste", {} - ) - sensor2 = UKBinCollectionDataSensor(coordinator, "Recycling", "test_recycling", {}) - - assert sensor1.unique_id == "test_general_waste" - assert sensor2.unique_id == "test_recycling" - assert sensor1.unique_id != sensor2.unique_id - - -@freeze_time("2023-10-14") -@pytest.mark.asyncio -async def test_async_setup_entry(hass, mock_config_entry): - """Test setting up the sensor platform.""" - hass.data = {} - async_add_entities = Mock() - - with patch( - "custom_components.uk_bin_collection.sensor.UKBinCollectionApp" - ) as mock_app: - mock_app_instance = mock_app.return_value - mock_app_instance.run.return_value = json.dumps(MOCK_BIN_COLLECTION_DATA) - - with patch.object( - hass, - "async_add_executor_job", - new_callable=AsyncMock, - return_value=mock_app_instance.run.return_value, - ): - await async_setup_entry(hass, mock_config_entry, async_add_entities) - - # Verify async_add_entities was called - assert async_add_entities.call_count == 1 - - # Retrieve the list of entities that were added - entities = async_add_entities.call_args[0][0] - - # Calculate the expected number of entities - expected_entity_count = len(MOCK_PROCESSED_DATA) * (5 + 1) + 1 - assert ( - len(entities) == expected_entity_count - ), f"Expected {expected_entity_count} entities, got {len(entities)}" - - # Verify data was set in coordinator - coordinator = hass.data[DOMAIN][mock_config_entry.entry_id] - assert coordinator.data == MOCK_PROCESSED_DATA - -@freeze_time("2023-10-14") -@pytest.mark.asyncio -async def test_coordinator_fetch(hass): - """Test the data fetch by the coordinator.""" - with patch( - "custom_components.uk_bin_collection.sensor.UKBinCollectionApp" - ) as mock_app: - mock_app_instance = mock_app.return_value - mock_app_instance.run.return_value = json.dumps(MOCK_BIN_COLLECTION_DATA) - - with patch.object( - hass, - "async_add_executor_job", - new_callable=AsyncMock, - return_value=mock_app_instance.run.return_value, - ): - coordinator = HouseholdBinCoordinator( - hass, mock_app_instance, "Test Name", timeout=60 - ) - - await coordinator.async_refresh() - - assert ( - coordinator.data == MOCK_PROCESSED_DATA - ), "Coordinator data does not match expected values." - assert ( - coordinator.last_update_success is True - ), "Coordinator update was not successful." - - -@pytest.mark.asyncio -async def test_bin_sensor(hass, mock_config_entry): - """Test the main bin sensor.""" - from freezegun import freeze_time - - hass.data = {} - - with freeze_time("2023-10-14"): - with patch( - "custom_components.uk_bin_collection.sensor.UKBinCollectionApp" - ) as mock_app: - mock_app_instance = mock_app.return_value - mock_app_instance.run.return_value = json.dumps(MOCK_BIN_COLLECTION_DATA) - - with patch.object( - hass, - "async_add_executor_job", - return_value=mock_app_instance.run.return_value, - ): - coordinator = HouseholdBinCoordinator( - hass, mock_app_instance, "Test Name", timeout=60 - ) - - await coordinator.async_config_entry_first_refresh() - - sensor = UKBinCollectionDataSensor( - coordinator, "General Waste", "test_general_waste", {} - ) - - assert sensor.name == "Test Name General Waste" - assert sensor.unique_id == "test_general_waste" - assert sensor.state == "Tomorrow" - assert sensor.icon == "mdi:trash-can" - assert sensor.extra_state_attributes == { - "colour": "black", - "next_collection": "15/10/2023", - "days": 1, - } - - -@freeze_time("2023-10-14") -@pytest.mark.asyncio -async def test_raw_json_sensor(hass, mock_config_entry): - """Test the raw JSON sensor.""" - hass.data = {} - - with patch( - "custom_components.uk_bin_collection.sensor.UKBinCollectionApp" - ) as mock_app: - mock_app_instance = mock_app.return_value - mock_app_instance.run.return_value = json.dumps(MOCK_BIN_COLLECTION_DATA) - - with patch.object( - hass, - "async_add_executor_job", - new_callable=AsyncMock, - return_value=mock_app_instance.run.return_value, - ): - coordinator = HouseholdBinCoordinator( - hass, mock_app_instance, "Test Name", timeout=60 - ) - - await coordinator.async_refresh() - - sensor = UKBinCollectionRawJSONSensor(coordinator, "test_raw_json", "Test Name") - - expected_state = json.dumps( - {k: v.strftime("%d/%m/%Y") for k, v in MOCK_PROCESSED_DATA.items()} - ) - - assert sensor.name == "Test Name Raw JSON" - assert sensor.unique_id == "test_raw_json" - assert sensor.state == expected_state - assert sensor.extra_state_attributes == {"raw_data": MOCK_PROCESSED_DATA} - - -@pytest.mark.asyncio -async def test_bin_sensor_custom_icon_color(hass, mock_config_entry): - """Test bin sensor with custom icon and color.""" - icon_color_mapping = {"General Waste": {"icon": "mdi:delete", "color": "green"}} - - # Initialize hass.data - hass.data = {} - - # Patch UKBinCollectionApp - with patch( - "custom_components.uk_bin_collection.sensor.UKBinCollectionApp" - ) as mock_app: - mock_app_instance = mock_app.return_value - # Mock run method to return JSON data for testing - mock_app_instance.run.return_value = json.dumps(MOCK_BIN_COLLECTION_DATA) - - # Mock async_add_executor_job correctly - with patch.object( - hass, - "async_add_executor_job", - new=AsyncMock(return_value=mock_app_instance.run.return_value), - ): - # Create the coordinator - coordinator = HouseholdBinCoordinator( - hass, mock_app_instance, "Test Name", timeout=60 - ) - - # Perform the first refresh - await coordinator.async_config_entry_first_refresh() - - # Create a bin sensor with custom icon and color mapping - sensor = UKBinCollectionDataSensor( - coordinator, "General Waste", "test_general_waste", icon_color_mapping - ) - - # Access properties - assert sensor.icon == "mdi:delete" - assert sensor.extra_state_attributes["colour"] == "green" - - -@pytest.mark.asyncio -async def test_bin_sensor_today_collection(hass, freezer, mock_config_entry): - """Test bin sensor when collection is today.""" - freezer.move_to("2023-10-14") - today_date = dt_util.now().strftime("%d/%m/%Y") - data = { - "bins": [ - {"type": "General Waste", "collectionDate": today_date}, - ] - } - - # Initialize hass.data - hass.data = {} - - with patch( - "custom_components.uk_bin_collection.sensor.UKBinCollectionApp" - ) as mock_app: - mock_app_instance = mock_app.return_value - # Mock run method to return JSON data for testing - mock_app_instance.run.return_value = json.dumps(data) - - # Mock async_add_executor_job correctly - with patch.object( - hass, - "async_add_executor_job", - new=AsyncMock(return_value=mock_app_instance.run.return_value), - ): - # Create the coordinator - coordinator = HouseholdBinCoordinator( - hass, mock_app_instance, "Test Name", timeout=60 - ) - - # Perform the first refresh - await coordinator.async_config_entry_first_refresh() - - # Create a bin sensor - sensor = UKBinCollectionDataSensor( - coordinator, "General Waste", "test_general_waste", {} - ) - - # Access properties - assert sensor.state == "Today" - - -@pytest.mark.asyncio -async def test_bin_sensor_tomorrow_collection(hass, freezer, mock_config_entry): - """Test bin sensor when collection is tomorrow.""" - freezer.move_to("2023-10-14") - tomorrow_date = (dt_util.now() + timedelta(days=1)).strftime("%d/%m/%Y") - data = { - "bins": [ - {"type": "General Waste", "collectionDate": tomorrow_date}, - ] - } - - # Initialize hass.data - hass.data = {} - - with patch( - "custom_components.uk_bin_collection.sensor.UKBinCollectionApp" - ) as mock_app: - mock_app_instance = mock_app.return_value - # Mock run method to return JSON data for testing - mock_app_instance.run.return_value = json.dumps(data) - - # Mock async_add_executor_job correctly - with patch.object( - hass, - "async_add_executor_job", - new=AsyncMock(return_value=mock_app_instance.run.return_value), - ): - # Create the coordinator - coordinator = HouseholdBinCoordinator( - hass, mock_app_instance, "Test Name", timeout=60 - ) - - # Perform the first refresh - await coordinator.async_config_entry_first_refresh() - - # Create a bin sensor - sensor = UKBinCollectionDataSensor( - coordinator, "General Waste", "test_general_waste", {} - ) - - # Access properties - assert sensor.state == "Tomorrow" - - -@pytest.mark.asyncio -async def test_bin_sensor_partial_custom_icon_color(hass, mock_config_entry): - """Test bin sensor with partial custom icon and color mappings.""" - icon_color_mapping = {"General Waste": {"icon": "mdi:delete", "color": "green"}} - - # Modify json.dumps(MOCK_BIN_COLLECTION_DATA) to include another bin type without custom mapping - custom_data = { - "bins": [ - {"type": "General Waste", "collectionDate": "15/10/2023"}, - {"type": "Recycling", "collectionDate": "16/10/2023"}, - ] - } - - # Initialize hass.data - hass.data = {} - - with patch( - "custom_components.uk_bin_collection.sensor.UKBinCollectionApp" - ) as mock_app: - mock_app_instance = mock_app.return_value - mock_app_instance.run.return_value = json.dumps(custom_data) - - # Mock async_add_executor_job correctly - with patch.object( - hass, - "async_add_executor_job", - new=AsyncMock(return_value=mock_app_instance.run.return_value), - ): - # Create the coordinator - coordinator = HouseholdBinCoordinator( - hass, mock_app_instance, "Test Name", timeout=60 - ) - - # Perform the first refresh - await coordinator.async_config_entry_first_refresh() - - # Create sensors for both bin types - sensor_general = UKBinCollectionDataSensor( - coordinator, "General Waste", "test_general_waste", icon_color_mapping - ) - sensor_recycling = UKBinCollectionDataSensor( - coordinator, "Recycling", "test_recycling", icon_color_mapping - ) - - # Check custom mapping for General Waste - assert sensor_general.icon == "mdi:delete" - assert sensor_general.extra_state_attributes["colour"] == "green" - - # Check default mapping for Recycling - assert sensor_recycling.icon == "mdi:recycle" - assert sensor_recycling.extra_state_attributes["colour"] == "black" - - -def test_unique_id_uniqueness(hass, mock_config_entry): - """Test that each sensor has a unique ID.""" - coordinator = MagicMock() - coordinator.name = "Test Name" - coordinator.data = MOCK_PROCESSED_DATA - - sensor1 = UKBinCollectionDataSensor( - coordinator, "General Waste", "test_general_waste", {} - ) - sensor2 = UKBinCollectionDataSensor(coordinator, "Recycling", "test_recycling", {}) - - assert sensor1.unique_id == "test_general_waste" - assert sensor2.unique_id == "test_recycling" - assert sensor1.unique_id != sensor2.unique_id - - -@pytest.fixture -def mock_dt_now_different_timezone(): - """Mock datetime.now with a different timezone.""" - with patch( - "homeassistant.util.dt.now", - return_value=datetime(2023, 10, 14, 12, 0, tzinfo=dt_util.UTC), - ): - yield - - -async def test_raw_json_sensor_invalid_data(hass, mock_config_entry): - """Test raw JSON sensor with invalid data.""" - invalid_data = "Invalid JSON String" - - with patch( - "custom_components.uk_bin_collection.sensor.UKBinCollectionApp" - ) as mock_app: - mock_app_instance = mock_app.return_value - mock_app_instance.run.return_value = invalid_data # Not a valid JSON - - from custom_components.uk_bin_collection.sensor import HouseholdBinCoordinator - - coordinator = HouseholdBinCoordinator( - hass, mock_app_instance, "Test Name", timeout=60 - ) - - # Attempt to refresh coordinator, which should NOT raise UpdateFailed - await coordinator.async_refresh() - - # Verify that last_update_success is False - assert coordinator.last_update_success is False - - # Create the raw JSON sensor - sensor = UKBinCollectionRawJSONSensor(coordinator, "test_raw_json", "Test Name") - - # Since data fetch failed, sensor.state should reflect the failure - assert sensor.state == json.dumps({}) - assert sensor.extra_state_attributes == {"raw_data": {}} - assert sensor.available is False - - -def test_sensor_device_info(hass, mock_config_entry): - """Test that sensors report correct device information.""" - coordinator = MagicMock() - coordinator.name = "Test Name" - coordinator.data = MOCK_PROCESSED_DATA - - sensor = UKBinCollectionDataSensor( - coordinator, "General Waste", "test_general_waste", {} - ) - - expected_device_info = { - "identifiers": {(DOMAIN, "test_general_waste")}, - "name": "Test Name General Waste", - "manufacturer": "UK Bin Collection", - "model": "Bin Sensor", - "sw_version": "1.0", - } - assert sensor.device_info == expected_device_info - - -def process_bin_data_duplicate_bin_types(freezer): - """Test processing when duplicate bin types are present.""" - freezer.move_to("2023-10-14") - data = { - "bins": [ - {"type": "General Waste", "collectionDate": "15/10/2023"}, - {"type": "General Waste", "collectionDate": "16/10/2023"}, # Later date - ] - } - expected = { - "General Waste": "15/10/2023", # Should take the earliest future date - } - processed_data = HouseholdBinCoordinator.process_bin_data(data) - assert processed_data == expected - - -@pytest.mark.asyncio -async def test_coordinator_timeout_error(hass, mock_config_entry): - """Test coordinator handles timeout errors correctly.""" - with patch( - "custom_components.uk_bin_collection.sensor.UKBinCollectionApp" - ) as mock_app: - mock_app_instance = mock_app.return_value - # Simulate run raising TimeoutError - mock_app_instance.run.side_effect = asyncio.TimeoutError("Request timed out") - - # Mock async_add_executor_job to raise TimeoutError - hass.async_add_executor_job = AsyncMock( - side_effect=mock_app_instance.run.side_effect - ) - - coordinator = HouseholdBinCoordinator( - hass, mock_app_instance, "Test Name", timeout=1 - ) - - # Expect ConfigEntryNotReady instead of UpdateFailed - with pytest.raises(ConfigEntryNotReady) as exc_info: - await coordinator.async_config_entry_first_refresh() - - assert "Timeout while updating data" in str(exc_info.value) - - -@pytest.mark.asyncio -async def test_coordinator_json_decode_error(hass, mock_config_entry): - """Test coordinator handles JSON decode errors correctly.""" - with patch( - "custom_components.uk_bin_collection.sensor.UKBinCollectionApp" - ) as mock_app: - mock_app_instance = mock_app.return_value - # Simulate run returning invalid JSON - mock_app_instance.run.return_value = "Invalid JSON String" - - # Mock async_add_executor_job to raise JSONDecodeError - def side_effect(*args, **kwargs): - raise JSONDecodeError("Expecting value", "Invalid JSON String", 0) - - hass.async_add_executor_job = AsyncMock(side_effect=side_effect) - - # Initialize hass.data - hass.data = {} - - coordinator = HouseholdBinCoordinator( - hass, mock_app_instance, "Test Name", timeout=60 - ) - - # Expect ConfigEntryNotReady instead of UpdateFailed - with pytest.raises(ConfigEntryNotReady) as exc_info: - await coordinator.async_config_entry_first_refresh() - - assert "JSON decode error" in str(exc_info.value) - - -@pytest.mark.asyncio -async def test_coordinator_general_exception(hass, mock_config_entry): - """Test coordinator handles general exceptions correctly.""" - with patch( - "custom_components.uk_bin_collection.sensor.UKBinCollectionApp" - ) as mock_app: - mock_app_instance = mock_app.return_value - # Simulate run raising a general exception - mock_app_instance.run.side_effect = Exception("General error") - - # Mock async_add_executor_job to raise the exception - hass.async_add_executor_job = AsyncMock( - side_effect=mock_app_instance.run.side_effect - ) - - coordinator = HouseholdBinCoordinator( - hass, mock_app_instance, "Test Name", timeout=60 - ) - - # Expect ConfigEntryNotReady instead of UpdateFailed - with pytest.raises(ConfigEntryNotReady) as exc_info: - await coordinator.async_config_entry_first_refresh() - - assert "Unexpected error" in str(exc_info.value) - - -def process_bin_data_duplicate_bin_types(freezer): - """Test processing when duplicate bin types are present with different dates.""" - freezer.move_to("2023-10-14") - data = { - "bins": [ - {"type": "General Waste", "collectionDate": "15/10/2023"}, - {"type": "General Waste", "collectionDate": "14/10/2023"}, # Earlier date - ] - } - expected = { - "General Waste": "14/10/2023", # Should take the earliest future date - } - processed_data = HouseholdBinCoordinator.process_bin_data(data) - assert processed_data == expected - - -def process_bin_data_past_dates(freezer): - """Test processing when all dates are in the past.""" - freezer.move_to("2023-10-14") - past_date = (dt_util.now() - timedelta(days=1)).strftime("%d/%m/%Y") - data = { - "bins": [ - {"type": "General Waste", "collectionDate": past_date}, - {"type": "Recycling", "collectionDate": past_date}, - ] - } - processed_data = HouseholdBinCoordinator.process_bin_data(data) - assert processed_data == {} # No future dates should be included - - -def process_bin_data_missing_fields(freezer): - """Test processing when some bins are missing required fields.""" - freezer.move_to("2023-10-14") - data = { - "bins": [ - {"type": "General Waste", "collectionDate": "15/10/2023"}, - {"collectionDate": "16/10/2023"}, # Missing 'type' - {"type": "Recycling"}, # Missing 'collectionDate' - ] - } - expected = { - "General Waste": "15/10/2023", - } - processed_data = HouseholdBinCoordinator.process_bin_data(data) - assert processed_data == expected - - -def process_bin_data_invalid_date_format(freezer): - """Test processing when bins have invalid date formats.""" - freezer.move_to("2023-10-14") - data = { - "bins": [ - { - "type": "General Waste", - "collectionDate": "2023-10-15", - }, # Incorrect format - {"type": "Recycling", "collectionDate": "16/13/2023"}, # Invalid month - ] - } - processed_data = HouseholdBinCoordinator.process_bin_data(data) - assert processed_data == {} # Both entries should be skipped due to invalid dates - - -@pytest.mark.asyncio -async def test_bin_sensor_state_today(hass, mock_config_entry, freezer): - """Test bin sensor when collection is today.""" - freezer.move_to("2023-10-14") - today_date = dt_util.now().strftime("%d/%m/%Y") - data = { - "bins": [ - {"type": "General Waste", "collectionDate": today_date}, - ] - } - - with patch( - "custom_components.uk_bin_collection.sensor.UKBinCollectionApp" - ) as mock_app: - mock_app_instance = mock_app.return_value - mock_app_instance.run.return_value = json.dumps(data) - - # Mock async_add_executor_job to return the run method's return value - hass.async_add_executor_job = AsyncMock( - return_value=mock_app_instance.run.return_value - ) - - coordinator = HouseholdBinCoordinator( - hass, mock_app_instance, "Test Name", timeout=60 - ) - - await coordinator.async_config_entry_first_refresh() - - sensor = UKBinCollectionDataSensor( - coordinator, "General Waste", "test_general_waste", {} - ) - - assert sensor.state == "Today" - assert sensor.available is True - assert sensor.extra_state_attributes["days"] == 0 - - -@pytest.mark.asyncio -async def test_bin_sensor_state_tomorrow(hass, mock_config_entry, freezer): - """Test bin sensor when collection is tomorrow.""" - freezer.move_to("2023-10-14") - tomorrow_date = (dt_util.now() + timedelta(days=1)).strftime("%d/%m/%Y") - data = { - "bins": [ - {"type": "Recycling", "collectionDate": tomorrow_date}, - ] - } - - with patch( - "custom_components.uk_bin_collection.sensor.UKBinCollectionApp" - ) as mock_app: - mock_app_instance = mock_app.return_value - mock_app_instance.run.return_value = json.dumps(data) - - hass.async_add_executor_job = AsyncMock( - return_value=mock_app_instance.run.return_value - ) - - coordinator = HouseholdBinCoordinator( - hass, mock_app_instance, "Test Name", timeout=60 - ) - - await coordinator.async_config_entry_first_refresh() - - sensor = UKBinCollectionDataSensor(coordinator, "Recycling", "test_recycling", {}) - - assert sensor.state == "Tomorrow" - assert sensor.available is True - assert sensor.extra_state_attributes["days"] == 1 - - -@pytest.mark.asyncio -async def test_bin_sensor_state_in_days(hass, mock_config_entry, freezer): - """Test bin sensor when collection is in multiple days.""" - freezer.move_to("2023-10-14") - future_date = (dt_util.now() + timedelta(days=5)).strftime("%d/%m/%Y") - data = { - "bins": [ - {"type": "Garden Waste", "collectionDate": future_date}, - ] - } - - with patch( - "custom_components.uk_bin_collection.sensor.UKBinCollectionApp" - ) as mock_app: - mock_app_instance = mock_app.return_value - mock_app_instance.run.return_value = json.dumps(data) - - hass.async_add_executor_job = AsyncMock( - return_value=mock_app_instance.run.return_value - ) - - coordinator = HouseholdBinCoordinator( - hass, mock_app_instance, "Test Name", timeout=60 - ) - - await coordinator.async_config_entry_first_refresh() - - sensor = UKBinCollectionDataSensor( - coordinator, "Garden Waste", "test_garden_waste", {} - ) - - assert sensor.state == "In 5 days" - assert sensor.available is True - assert sensor.extra_state_attributes["days"] == 5 - - -@pytest.mark.asyncio -async def test_bin_sensor_missing_data(hass, mock_config_entry): - """Test bin sensor when bin data is missing.""" - data = { - "bins": [ - # No bins provided - ] - } - - with patch( - "custom_components.uk_bin_collection.sensor.UKBinCollectionApp" - ) as mock_app: - mock_app_instance = mock_app.return_value - mock_app_instance.run.return_value = json.dumps(data) - - hass.async_add_executor_job = AsyncMock( - return_value=mock_app_instance.run.return_value - ) - - coordinator = HouseholdBinCoordinator( - hass, mock_app_instance, "Test Name", timeout=60 - ) - - await coordinator.async_config_entry_first_refresh() - - sensor = UKBinCollectionDataSensor( - coordinator, "Non-Existent Bin", "test_non_existent_bin", {} - ) - - assert sensor.state == "Unknown" - assert sensor.available is False - assert sensor.extra_state_attributes["days"] is None - assert sensor.extra_state_attributes["next_collection"] is None - - -@freeze_time("2023-10-14") -@pytest.mark.asyncio -async def test_raw_json_sensor_invalid_data(hass, mock_config_entry): - """Test raw JSON sensor with invalid data.""" - invalid_data = "Invalid JSON String" - - with patch( - "custom_components.uk_bin_collection.sensor.UKBinCollectionApp" - ) as mock_app: - mock_app_instance = mock_app.return_value - mock_app_instance.run.return_value = invalid_data - - def side_effect(*args, **kwargs): - raise JSONDecodeError("Expecting value", invalid_data, 0) - - with patch.object(hass, "async_add_executor_job", side_effect=side_effect): - coordinator = HouseholdBinCoordinator( - hass, mock_app_instance, "Test Name", timeout=60 - ) - - await coordinator.async_refresh() - - assert not coordinator.last_update_success - - raw_json_sensor = UKBinCollectionRawJSONSensor( - coordinator, "test_raw_json", "Test Name" - ) - - assert raw_json_sensor.state == "{}" - assert raw_json_sensor.extra_state_attributes["raw_data"] == {} - assert raw_json_sensor.available is False - - -@pytest.mark.asyncio -async def test_sensor_available_property(hass, mock_config_entry): - """Test that sensor's available property reflects its state.""" - # Case 1: State is a valid string - data_valid = { - "bins": [ - {"type": "Recycling", "collectionDate": "16/10/2023"}, - ] - } - processed_data_valid = { - "Recycling": datetime.strptime("16/10/2023", "%d/%m/%Y").date(), - } - - with patch( - "custom_components.uk_bin_collection.sensor.UKBinCollectionApp" - ) as mock_app_valid: - mock_app_valid_instance = mock_app_valid.return_value - mock_app_valid_instance.run.return_value = json.dumps(data_valid) - - with patch.object( - hass, - "async_add_executor_job", - return_value=mock_app_valid_instance.run.return_value, - ): - coordinator_valid = HouseholdBinCoordinator( - hass, mock_app_valid_instance, "Test Name", timeout=60 - ) - - await coordinator_valid.async_refresh() - - sensor_valid = UKBinCollectionDataSensor( - coordinator_valid, "Recycling", "test_recycling_available", {} - ) - - assert sensor_valid.available is True - - # Case 2: State is "Unknown" - data_unknown = {"bins": []} - - with patch( - "custom_components.uk_bin_collection.sensor.UKBinCollectionApp" - ) as mock_app_unknown: - mock_app_unknown_instance = mock_app_unknown.return_value - mock_app_unknown_instance.run.return_value = json.dumps(data_unknown) - - with patch.object( - hass, - "async_add_executor_job", - return_value=mock_app_unknown_instance.run.return_value, - ): - coordinator_unknown = HouseholdBinCoordinator( - hass, mock_app_unknown_instance, "Test Name", timeout=60 - ) - - await coordinator_unknown.async_refresh() - - sensor_unknown = UKBinCollectionDataSensor( - coordinator_unknown, "Garden Waste", "test_garden_waste_unavailable", {} - ) - - assert sensor_unknown.available is False - - -@pytest.mark.asyncio -async def test_data_sensor_missing_icon_or_color(hass, mock_config_entry): - """Test data sensor uses default icon and color when mappings are missing.""" - icon_color_mapping = { - "General Waste": {"icon": "mdi:trash-can"}, # Missing 'color' - "Recycling": {"color": "green"}, # Missing 'icon' - "Garden Waste": {}, # Missing both - } - - data = { - "bins": [ - {"type": "General Waste", "collectionDate": "15/10/2023"}, - {"type": "Recycling", "collectionDate": "16/10/2023"}, - {"type": "Garden Waste", "collectionDate": "17/10/2023"}, - ] - } - - with patch( - "custom_components.uk_bin_collection.sensor.UKBinCollectionApp" - ) as mock_app: - mock_app_instance = mock_app.return_value - mock_app_instance.run.return_value = json.dumps(data) - - hass.async_add_executor_job = AsyncMock( - return_value=mock_app_instance.run.return_value - ) - - coordinator = HouseholdBinCoordinator( - hass, mock_app_instance, "Test Name", timeout=60 - ) - - await coordinator.async_config_entry_first_refresh() - - # Test General Waste sensor (missing 'color') - general_waste_sensor = UKBinCollectionDataSensor( - coordinator, "General Waste", "test_general_waste", icon_color_mapping - ) - # Simulate coordinator update - coordinator.async_set_updated_data(coordinator.data) - - assert general_waste_sensor.icon == "mdi:trash-can" - assert general_waste_sensor._color == "black" # Default color - - # Test Recycling sensor (missing 'icon') - recycling_sensor = UKBinCollectionDataSensor( - coordinator, "Recycling", "test_recycling", icon_color_mapping - ) - coordinator.async_set_updated_data(coordinator.data) - - assert recycling_sensor.icon == "mdi:recycle" # Default icon based on bin type - assert recycling_sensor._color == "green" - - # Test Garden Waste sensor (missing both) - garden_waste_sensor = UKBinCollectionDataSensor( - coordinator, "Garden Waste", "test_garden_waste", icon_color_mapping - ) - coordinator.async_set_updated_data(coordinator.data) - - assert garden_waste_sensor.icon == "mdi:trash-can" # Default icon based on bin type - assert garden_waste_sensor._color == "black" - - -@pytest.mark.asyncio -async def test_attribute_sensor_with_complete_mappings(hass, mock_config_entry): - """Test attribute sensor correctly applies icon and color from mappings.""" - icon_color_mapping = {"General Waste": {"icon": "mdi:trash-can", "color": "grey"}} - data = { - "bins": [ - {"type": "General Waste", "collectionDate": "15/10/2023"}, - ] - } - - with patch( - "custom_components.uk_bin_collection.sensor.UKBinCollectionApp" - ) as mock_app: - mock_app_instance = mock_app.return_value - mock_app_instance.run.return_value = json.dumps(data) - - # Mock async_add_executor_job to return valid JSON - hass.async_add_executor_job = AsyncMock( - return_value=mock_app_instance.run.return_value - ) - - # Initialize hass.data - hass.data = {} - - coordinator = HouseholdBinCoordinator( - hass, mock_app_instance, "Test Name", timeout=60 - ) - - await coordinator.async_config_entry_first_refresh() - - # Test Colour attribute sensor - colour_sensor = UKBinCollectionAttributeSensor( - coordinator, - "General Waste", - "test_general_waste_colour", - "Colour", - "test_general_waste", - icon_color_mapping, - ) - - # Simulate coordinator update - coordinator.async_set_updated_data(coordinator.data) - - assert colour_sensor.state == "grey" - assert colour_sensor.icon == "mdi:trash-can" - assert colour_sensor._color == "grey" - - -@pytest.mark.asyncio -async def test_data_sensor_color_property_missing_or_none(hass, mock_config_entry): - """Test sensor's color property when color is missing or None.""" - # Case 1: Missing color in icon_color_mapping - icon_color_mapping_missing_color = { - "General Waste": {"icon": "mdi:trash-can"}, - } - data = { - "bins": [ - {"type": "General Waste", "collectionDate": "15/10/2023"}, - ] - } - - with patch( - "custom_components.uk_bin_collection.sensor.UKBinCollectionApp" - ) as mock_app_missing_color: - mock_app_missing_color_instance = mock_app_missing_color.return_value - mock_app_missing_color_instance.run.return_value = json.dumps(data) - - hass.async_add_executor_job = AsyncMock( - return_value=mock_app_missing_color_instance.run.return_value - ) - - coordinator = HouseholdBinCoordinator( - hass, - mock_app_missing_color_instance, - "Test Name", - timeout=60, - ) - - await coordinator.async_config_entry_first_refresh() - - sensor_missing_color = UKBinCollectionDataSensor( - coordinator, - "General Waste", - "test_general_waste_missing_color", - icon_color_mapping_missing_color, - ) - # Simulate coordinator update - coordinator.async_set_updated_data(coordinator.data) - - assert sensor_missing_color._color == "black" # Default color - - # Case 2: Color is None - icon_color_mapping_none_color = { - "Recycling": {"icon": "mdi:recycle", "color": None}, - } - - data_none_color = { - "bins": [ - {"type": "Recycling", "collectionDate": "16/10/2023"}, - ] - } - - with patch( - "custom_components.uk_bin_collection.sensor.UKBinCollectionApp" - ) as mock_app_none_color: - mock_app_none_color_instance = mock_app_none_color.return_value - mock_app_none_color_instance.run.return_value = json.dumps(data_none_color) - - hass.async_add_executor_job = AsyncMock( - return_value=mock_app_none_color_instance.run.return_value - ) - - coordinator_none_color = HouseholdBinCoordinator( - hass, - mock_app_none_color_instance, - "Test Name", - timeout=60, - ) - - await coordinator_none_color.async_config_entry_first_refresh() - - sensor_none_color = UKBinCollectionDataSensor( - coordinator_none_color, - "Recycling", - "test_recycling_none_color", - icon_color_mapping_none_color, - ) - # Simulate coordinator update - coordinator_none_color.async_set_updated_data(coordinator_none_color.data) - - assert ( - sensor_none_color._color == "black" - ) # Should default to "black" if color is None - - -@freeze_time("2023-10-14") -@pytest.mark.asyncio -async def test_sensor_available_property(hass, mock_config_entry): - """Test that sensor's available property reflects its state.""" - # Case 1: State is a valid string - data_valid = { - "bins": [ - {"type": "Recycling", "collectionDate": "16/10/2023"}, - ] - } - processed_data_valid = { - "Recycling": datetime.strptime("16/10/2023", "%d/%m/%Y").date(), - } - - with patch( - "custom_components.uk_bin_collection.sensor.UKBinCollectionApp" - ) as mock_app_valid: - mock_app_valid_instance = mock_app_valid.return_value - mock_app_valid_instance.run.return_value = json.dumps(data_valid) - - async def mock_async_add_executor_job(func, *args, **kwargs): - return func(*args, **kwargs) - - with patch.object( - hass, - "async_add_executor_job", - side_effect=mock_async_add_executor_job, - ): - coordinator_valid = HouseholdBinCoordinator( - hass, mock_app_valid_instance, "Test Name", timeout=60 - ) - - await coordinator_valid.async_refresh() - - # Verify that coordinator.data contains the expected processed data - assert coordinator_valid.data == processed_data_valid - - sensor_valid = UKBinCollectionDataSensor( - coordinator_valid, "Recycling", "test_recycling_available", {} - ) - - assert sensor_valid.available is True - -@pytest.mark.asyncio -async def test_coordinator_empty_data(hass, mock_config_entry): - """Test coordinator handles empty data correctly.""" - empty_data = {"bins": []} - - with patch( - "custom_components.uk_bin_collection.sensor.UKBinCollectionApp" - ) as mock_app: - mock_app_instance = mock_app.return_value - mock_app_instance.run.return_value = json.dumps(empty_data) - - hass.async_add_executor_job = AsyncMock( - return_value=mock_app_instance.run.return_value - ) - - coordinator = HouseholdBinCoordinator( - hass, mock_app_instance, "Test Name", timeout=60 - ) - - await coordinator.async_config_entry_first_refresh() - - assert coordinator.data == {} - assert coordinator.last_update_success is True - - -def test_coordinator_custom_update_interval(hass, mock_config_entry): - """Test that coordinator uses a custom update interval.""" - custom_interval = timedelta(hours=6) - coordinator = HouseholdBinCoordinator(hass, MagicMock(), "Test Name", timeout=60) - coordinator.update_interval = custom_interval - - assert coordinator.update_interval == custom_interval - - -@pytest.mark.asyncio -async def test_async_setup_entry_missing_required_fields(hass): - """Test setup with missing required configuration fields.""" - # Create a mock config entry missing 'name' - mock_config_entry = MockConfigEntry( - domain=DOMAIN, - data={ - # "name" is missing - "council": "Test Council", - "url": "https://example.com", - "timeout": 60, - "icon_color_mapping": {}, - }, - entry_id="test_missing_name", - unique_id="test_missing_name_unique_id", - ) - - with patch( - "custom_components.uk_bin_collection.sensor.UKBinCollectionApp" - ) as mock_app, patch( - "homeassistant.loader.async_get_integration", return_value=MagicMock() - ): - mock_app_instance = mock_app.return_value - # Simulate run returning valid JSON - mock_app_instance.run.return_value = json.dumps( - { - "bins": [ - {"type": "General Waste", "collectionDate": "15/10/2023"}, - ] - } - ) - - # Mock async_add_executor_job to return valid JSON - hass.async_add_executor_job = AsyncMock( - return_value=mock_app_instance.run.return_value - ) - - async_add_entities = MagicMock() - - # Initialize hass.data - hass.data = {} - - # Add the config entry to hass - mock_config_entry.add_to_hass(hass) - - # Set up the entry and expect ConfigEntryNotReady due to missing 'name' - with pytest.raises(ConfigEntryNotReady) as exc_info: - await async_setup_entry(hass, mock_config_entry, async_add_entities) - - # Adjust this assertion based on how your component handles missing 'name' - assert "Missing 'name' in configuration." in str(exc_info.value) - - # Verify that entities were not added due to missing 'name' - async_add_entities.assert_not_called() - - -@pytest.mark.asyncio -async def test_data_sensor_device_info(hass, mock_config_entry): - """Test that data sensor reports correct device information.""" - data = { - "bins": [ - {"type": "General Waste", "collectionDate": "15/10/2023"}, - ] - } - - with patch( - "custom_components.uk_bin_collection.sensor.UKBinCollectionApp" - ) as mock_app: - mock_app_instance = mock_app.return_value - mock_app_instance.run.return_value = json.dumps(data) - - icon_color_mapping = {} - - hass.async_add_executor_job = AsyncMock( - return_value=mock_app_instance.run.return_value - ) - - coordinator = HouseholdBinCoordinator( - hass, mock_app_instance, "Test Name", timeout=60 - ) - - await coordinator.async_config_entry_first_refresh() - - sensor = UKBinCollectionDataSensor( - coordinator, - "General Waste", - "test_general_waste_device_info", - icon_color_mapping, - ) - - expected_device_info = { - "identifiers": {(DOMAIN, "test_general_waste_device_info")}, - "name": "Test Name General Waste", - "manufacturer": "UK Bin Collection", - "model": "Bin Sensor", - "sw_version": "1.0", - } - assert sensor.device_info == expected_device_info - - -@pytest.mark.asyncio -async def test_data_sensor_default_icon(hass, mock_config_entry): - """Test data sensor uses default icon based on bin type when no mapping is provided.""" - data = { - "bins": [ - {"type": "Unknown Bin", "collectionDate": "20/10/2023"}, - ] - } - - with patch( - "custom_components.uk_bin_collection.sensor.UKBinCollectionApp" - ) as mock_app: - mock_app_instance = mock_app.return_value - mock_app_instance.run.return_value = json.dumps(data) - - # No icon_color_mapping provided - icon_color_mapping = {} - - hass.async_add_executor_job = AsyncMock( - return_value=mock_app_instance.run.return_value - ) - - coordinator = HouseholdBinCoordinator( - hass, mock_app_instance, "Test Name", timeout=60 - ) - - await coordinator.async_config_entry_first_refresh() - - sensor = UKBinCollectionDataSensor( - coordinator, "Unknown Bin", "test_unknown_bin", icon_color_mapping - ) - - assert sensor.icon == "mdi:delete" - assert sensor._color == "black" - - -def test_coordinator_update_interval(hass, mock_config_entry): - """Test that coordinator uses the correct update interval.""" - coordinator = HouseholdBinCoordinator(hass, MagicMock(), "Test Name", timeout=60) - assert coordinator.update_interval == timedelta(hours=12) - diff --git a/custom_components/uk_bin_collection/translations/cy.json b/custom_components/uk_bin_collection/translations/cy.json deleted file mode 100644 index 6fd13c4398..0000000000 --- a/custom_components/uk_bin_collection/translations/cy.json +++ /dev/null @@ -1,57 +0,0 @@ -{ - "title": "Data Casglu Biniau y DU", - "config": { - "step": { - "user": { - "title": "Dewiswch y cyngor", - "data": { - "name": "Enw'r lleoliad", - "council": "Cyngor", - "icon_color_mapping": "JSON i fapio Math y Bin ar gyfer Lliw ac Eicon gweler: https://github.com/robbrad/UKBinCollectionData" - }, - "description": "Gweler [yma](https://github.com/robbrad/UKBinCollectionData#requesting-your-council) os nad yw eich cyngor wedi'i restru" - }, - "council": { - "title": "Darparu manylion y cyngor", - "data": { - "url": "URL i nôl data casglu biniau", - "timeout": "Yr amser mewn eiliadau y dylai'r synhwyrydd aros am ddata", - "update_interval": "Amser mewn oriau rhwng diweddariadau", - "uprn": "UPRN (Rhif Cyfeirnod Eiddo Unigryw)", - "postcode": "Cod post y cyfeiriad", - "number": "Rhif tŷ y cyfeiriad", - "usrn": "USRN (Rhif Cyfeirnod Stryd Unigryw)", - "web_driver": "I redeg ar weinydd Selenium o bell, ychwanegwch URL y Weinydd Selenium", - "headless": "Rhedeg Selenium yn y modd heb ben (argymhellir)", - "local_browser": "Peidiwch â rhedeg ar weinydd Selenium o bell, defnyddiwch osodiad lleol o Chrome yn lle", - "submit": "Cyflwyno" - }, - "description": "Cyfeiriwch at gofnod [wiki](https://github.com/robbrad/UKBinCollectionData/wiki/Councils) eich cyngor am fanylion ar beth i'w nodi.\n{selenium_message}" - }, - "reconfigure_confirm": { - "title": "Diweddaru manylion y cyngor", - "data": { - "url": "URL i nôl data casglu biniau", - "timeout": "Yr amser mewn eiliadau y dylai'r synhwyrydd aros am ddata", - "update_interval": "Amser mewn oriau rhwng diweddariadau", - "uprn": "UPRN (Rhif Cyfeirnod Eiddo Unigryw)", - "postcode": "Cod post y cyfeiriad", - "number": "Rhif tŷ y cyfeiriad", - "usrn": "USRN (Rhif Cyfeirnod Stryd Unigryw)", - "web_driver": "I redeg ar weinydd Selenium o bell, ychwanegwch URL y Weinydd Selenium", - "headless": "Rhedeg Selenium yn y modd heb ben (argymhellir)", - "local_browser": "Peidiwch â rhedeg ar weinydd Selenium o bell, defnyddiwch osodiad lleol o Chrome yn lle", - "icon_color_mapping": "JSON i fapio Math y Bin ar gyfer Lliw ac Eicon gweler: https://github.com/robbrad/UKBinCollectionData", - "submit": "Cyflwyno" - }, - "description": "Cyfeiriwch at gofnod [wiki](https://github.com/robbrad/UKBinCollectionData/wiki/Councils) eich cyngor am fanylion ar beth i'w nodi." - } - }, - "error": { - "name": "Rhowch enw lleoliad os gwelwch yn dda", - "council": "Dewiswch gyngor os gwelwch yn dda", - "selenium_unavailable": "❌ Nid yw gweinydd Selenium ar gael. Sicrhewch ei fod yn rhedeg yn http://localhost:4444 neu http://selenium:4444. [Canllaw Gosod](https://example.com/selenium-setup)", - "chromium_not_found": "❌ Nid yw porwr Chromium wedi'i osod. Gosodwch Chromium neu Google Chrome os gwelwch yn dda. [Canllaw Gosod](https://example.com/chromium-install)" - } - } -} diff --git a/custom_components/uk_bin_collection/translations/en.json b/custom_components/uk_bin_collection/translations/en.json deleted file mode 100644 index 90e72fb99b..0000000000 --- a/custom_components/uk_bin_collection/translations/en.json +++ /dev/null @@ -1,57 +0,0 @@ -{ - "title": "UK Bin Collection Data", - "config": { - "step": { - "user": { - "title": "Select the council", - "data": { - "name": "Location name", - "council": "Council", - "icon_color_mapping": "JSON to map Bin Type for Colour and Icon see: https://github.com/robbrad/UKBinCollectionData" - }, - "description": "Please see [here](https://github.com/robbrad/UKBinCollectionData#requesting-your-council) if your council isn't listed" - }, - "council": { - "title": "Provide council details", - "data": { - "url": "URL to fetch bin collection data", - "timeout": "The time in seconds for how long the sensor should wait for data", - "update_interval": "Time in hours between updates", - "uprn": "UPRN (Unique Property Reference Number)", - "postcode": "Postcode of the address", - "number": "House number of the address", - "usrn": "USRN (Unique Street Reference Number)", - "web_driver": "To run on a remote Selenium Server add the Selenium Server URL", - "headless": "Run Selenium in headless mode (recommended)", - "local_browser": "Don't run on remote Selenium server, use local install of Chrome instead", - "submit": "Submit" - }, - "description": "Please refer to your council's [wiki](https://github.com/robbrad/UKBinCollectionData/wiki/Councils) entry for details on what to enter.\n{selenium_message}" - }, - "reconfigure_confirm": { - "title": "Update council details", - "data": { - "url": "URL to fetch bin collection data", - "timeout": "The time in seconds for how long the sensor should wait for data", - "update_interval": "Time in hours between updates", - "uprn": "UPRN (Unique Property Reference Number)", - "postcode": "Postcode of the address", - "number": "House number of the address", - "usrn": "USRN (Unique Street Reference Number)", - "web_driver": "To run on a remote Selenium Server add the Selenium Server URL", - "headless": "Run Selenium in headless mode (recommended)", - "local_browser": "Don't run on remote Selenium server, use local install of Chrome instead", - "icon_color_mapping": "JSON to map Bin Type for Colour and Icon see: https://github.com/robbrad/UKBinCollectionData", - "submit": "Submit" - }, - "description": "Please refer to your council's [wiki](https://github.com/robbrad/UKBinCollectionData/wiki/Councils) entry for details on what to enter." - } - }, - "error": { - "name": "Please enter a location name", - "council": "Please select a council", - "selenium_unavailable": "❌ Selenium server is not accessible. Please ensure it is running at http://localhost:4444 or http://selenium:4444. [Setup Guide](https://example.com/selenium-setup)", - "chromium_not_found": "❌ Chromium browser is not installed. Please install Chromium or Google Chrome. [Installation Guide](https://example.com/chromium-install)" - } - } -} diff --git a/custom_components/uk_bin_collection/translations/ga.json b/custom_components/uk_bin_collection/translations/ga.json deleted file mode 100644 index cf7fb20204..0000000000 --- a/custom_components/uk_bin_collection/translations/ga.json +++ /dev/null @@ -1,57 +0,0 @@ -{ - "title": "Sonraí Bailithe Binn RA", - "config": { - "step": { - "user": { - "title": "Roghnaigh an chomhairle", - "data": { - "name": "Ainm Suíomh", - "council": "Comhairle", - "icon_color_mapping": "JSON chun Cineál Bin a mhapáil do Dath agus Deilbhín féach: https://github.com/robbrad/UKBinCollectionData" - }, - "description": "Féach [anseo](https://github.com/robbrad/UKBinCollectionData#requesting-your-council) mura bhfuil do chomhairle liostaithe" - }, - "council": { - "title": "Sonraí na comhairle a sholáthar", - "data": { - "url": "URL chun sonraí bailithe bin a fháil", - "timeout": "An t-am i soicindí don braiteoir fanacht le haghaidh sonraí", - "update_interval": "Ùine ann an uairean eadar ùrachaidhean", - "uprn": "UPRN (Uimhir Tagartha Aonair Maoine)", - "postcode": "Cód poist an seoladh", - "number": "Uimhir tí an seoladh", - "usrn": "USRN (Uimhir Tagartha Sráide Uathúil)", - "web_driver": "Chun rith ar Fhreastalaí Iargúlta Selenium cuir isteach URL an Fhreastalaí Selenium", - "headless": "Rith Selenium i mód gan cheann (molta)", - "local_browser": "Ná rith ar fhreastalaí iargúlta Selenium, úsáid suiteáil áitiúil de Chrome ina ionad", - "submit": "Cuir isteach" - }, - "description": "Féach ar iontráil [wiki](https://github.com/robbrad/UKBinCollectionData/wiki/Councils) do chomhairle le haghaidh sonraí ar cad atá le cur isteach.\n{selenium_message}" - }, - "reconfigure_confirm": { - "title": "Nuashonraigh sonraí na comhairle", - "data": { - "url": "URL chun sonraí bailithe bin a fháil", - "timeout": "An t-am i soicindí don braiteoir fanacht le haghaidh sonraí", - "update_interval": "Ùine ann an uairean eadar ùrachaidhean", - "uprn": "UPRN (Uimhir Tagartha Aonair Maoine)", - "postcode": "Cód poist an seoladh", - "number": "Uimhir tí an seoladh", - "usrn": "USRN (Uimhir Tagartha Sráide Uathúil)", - "web_driver": "Chun rith ar Fhreastalaí Iargúlta Selenium cuir isteach URL an Fhreastalaí Selenium", - "headless": "Rith Selenium i mód gan cheann (molta)", - "local_browser": "Ná rith ar fhreastalaí iargúlta Selenium, úsáid suiteáil áitiúil de Chrome ina ionad", - "icon_color_mapping": "JSON chun Cineál Bin a mhapáil do Dath agus Deilbhín féach: https://github.com/robbrad/UKBinCollectionData", - "submit": "Cuir isteach" - }, - "description": "Féach ar iontráil [wiki](https://github.com/robbrad/UKBinCollectionData/wiki/Councils) do chomhairle le haghaidh sonraí ar cad atá le cur isteach." - } - }, - "error": { - "name": "Cuir isteach ainm suíomh le do thoil", - "council": "Roghnaigh comhairle le do thoil", - "selenium_unavailable": "❌ Níl freastalaí Selenium inrochtana. Cinntigh go bhfuil sé ag rith ag http://localhost:4444 nó http://selenium:4444. [Treoir Socraithe](https://example.com/selenium-setup)", - "chromium_not_found": "❌ Níl brabhsálaí Chromium suiteáilte. Suiteáil Chromium nó Google Chrome le do thoil. [Treoir Suiteála](https://example.com/chromium-install)" - } - } -} diff --git a/custom_components/uk_bin_collection/translations/gd.json b/custom_components/uk_bin_collection/translations/gd.json deleted file mode 100644 index 2d1bb5dfb5..0000000000 --- a/custom_components/uk_bin_collection/translations/gd.json +++ /dev/null @@ -1,57 +0,0 @@ -{ - "title": "Dàta Cruinneachadh Biona RA", - "config": { - "step": { - "user": { - "title": "Tagh a’ chomhairle", - "data": { - "name": "Ainm Àite", - "council": "Comhairle", - "icon_color_mapping": "JSON gus Seòrsa Biona a mhapadh airson Dath agus Ìomhaigh faic: https://github.com/robbrad/UKBinCollectionData" - }, - "description": "Feuch an toir thu sùil [an seo](https://github.com/robbrad/UKBinCollectionData#requesting-your-council) mura h-eil do chomhairle air a liostadh" - }, - "council": { - "title": "Thoir seachad mion-fhiosrachadh na comhairle", - "data": { - "url": "URL gus dàta cruinneachadh biona fhaighinn", - "timeout": "An ùine ann an diogan airson cho fada bu chòir don sensor feitheamh airson dàta", - "update_interval": "Am i n-uaireanta idir nuashonruithe", - "uprn": "UPRN (Àireamh Iomraidh Seilbh Aonraic)", - "postcode": "Còd-puist an t-seòladh", - "number": "Àireamh an taighe den t-seòladh", - "usrn": "USRN (Àireamh Iomraidh Sràid Aonraic)", - "web_driver": "Gus ruith air Frithealaiche Selenium iomallach cuir a-steach an URL Frithealaiche Selenium", - "headless": "Ruith Selenium ann am modh gun cheann (air a mholadh)", - "local_browser": "Na ruith air frithealaiche Selenium iomallach, cleachd stàladh ionadail de Chrome an àite", - "submit": "Cuir a-steach" - }, - "description": "Feuch an toir thu sùil air inntrigeadh [wiki](https://github.com/robbrad/UKBinCollectionData/wiki/Councils) na comhairle agad airson mion-fhiosrachadh air dè a chur a-steach.\n{selenium_message}" - }, - "reconfigure_confirm": { - "title": "Ùraich mion-fhiosrachadh na comhairle", - "data": { - "url": "URL gus dàta cruinneachadh biona fhaighinn", - "timeout": "An ùine ann an diogan airson cho fada bu chòir don sensor feitheamh airson dàta", - "update_interval": "Am i n-uaireanta idir nuashonruithe", - "uprn": "UPRN (Àireamh Iomraidh Seilbh Aonraic)", - "postcode": "Còd-puist an t-seòladh", - "number": "Àireamh an taighe den t-seòladh", - "usrn": "USRN (Àireamh Iomraidh Sràid Aonraic)", - "web_driver": "Gus ruith air Frithealaiche Selenium iomallach cuir a-steach an URL Frithealaiche Selenium", - "headless": "Ruith Selenium ann am modh gun cheann (air a mholadh)", - "local_browser": "Na ruith air frithealaiche Selenium iomallach, cleachd stàladh ionadail de Chrome an àite", - "icon_color_mapping": "JSON gus Seòrsa Biona a mhapadh airson Dath agus Ìomhaigh faic: https://github.com/robbrad/UKBinCollectionData", - "submit": "Cuir a-steach" - }, - "description": "Feuch an toir thu sùil air inntrigeadh [wiki](https://github.com/robbrad/UKBinCollectionData/wiki/Councils) na comhairle agad airson mion-fhiosrachadh air dè a chur a-steach." - } - }, - "error": { - "name": "Cuir a-steach ainm àite mas e do thoil e", - "council": "Tagh comhairle mas e do thoil e", - "selenium_unavailable": "❌ Chan eil frithealaiche Selenium ruigsinneach. Dèan cinnteach gu bheil e a’ ruith aig http://localhost:4444 no http://selenium:4444. [Stiùireadh Stèidheachaidh](https://example.com/selenium-setup)", - "chromium_not_found": "❌ Chan eil brabhsair Chromium air a chuir a-steach. Stàlaich Chromium no Google Chrome mas e do thoil e. [Stiùireadh Stàlaidh](https://example.com/chromium-install)" - } - } -} diff --git a/custom_components/uk_bin_collection/translations/pt.json b/custom_components/uk_bin_collection/translations/pt.json deleted file mode 100644 index 610b2d9a04..0000000000 --- a/custom_components/uk_bin_collection/translations/pt.json +++ /dev/null @@ -1,57 +0,0 @@ -{ - "title": "Dados de Coleta de Lixo do Reino Unido", - "config": { - "step": { - "user": { - "title": "Selecione o conselho", - "data": { - "name": "Nome da localização", - "council": "Conselho", - "icon_color_mapping": "JSON para mapear Tipo de Lixo para Cor e Ícone veja: https://github.com/robbrad/UKBinCollectionData" - }, - "description": "Por favor, veja [aqui](https://github.com/robbrad/UKBinCollectionData#requesting-your-council) se o seu conselho não estiver listado" - }, - "council": { - "title": "Forneça os detalhes do conselho", - "data": { - "url": "URL para buscar dados de coleta de lixo", - "timeout": "O tempo em segundos que o sensor deve esperar por dados", - "update_interval": "Tempo em horas entre as atualizações", - "uprn": "UPRN (Número de Referência Único da Propriedade)", - "postcode": "Código postal do endereço", - "number": "Número da casa do endereço", - "usrn": "USRN (Número de Referência Único da Rua)", - "web_driver": "Para executar em um Servidor Selenium remoto, adicione o URL do Servidor Selenium", - "headless": "Execute o Selenium no modo sem cabeça (recomendado)", - "local_browser": "Não execute no servidor Selenium remoto, use a instalação local do Chrome em vez disso", - "submit": "Enviar" - }, - "description": "Por favor, consulte a entrada [wiki](https://github.com/robbrad/UKBinCollectionData/wiki/Councils) do seu conselho para detalhes sobre o que inserir.\n{selenium_message}" - }, - "reconfigure_confirm": { - "title": "Atualizar detalhes do conselho", - "data": { - "url": "URL para buscar dados de coleta de lixo", - "timeout": "O tempo em segundos que o sensor deve esperar por dados", - "update_interval": "Tempo em horas entre as atualizações", - "uprn": "UPRN (Número de Referência Único da Propriedade)", - "postcode": "Código postal do endereço", - "number": "Número da casa do endereço", - "usrn": "USRN (Número de Referência Único da Rua)", - "web_driver": "Para executar em um Servidor Selenium remoto, adicione o URL do Servidor Selenium", - "headless": "Execute o Selenium no modo sem cabeça (recomendado)", - "local_browser": "Não execute no servidor Selenium remoto, use a instalação local do Chrome em vez disso", - "icon_color_mapping": "JSON para mapear Tipo de Lixo para Cor e Ícone veja: https://github.com/robbrad/UKBinCollectionData", - "submit": "Enviar" - }, - "description": "Por favor, consulte a entrada [wiki](https://github.com/robbrad/UKBinCollectionData/wiki/Councils) do seu conselho para detalhes sobre o que inserir." - } - }, - "error": { - "name": "Por favor, insira um nome de localização", - "council": "Por favor, selecione um conselho", - "selenium_unavailable": "❌ O servidor Selenium não está acessível. Por favor, certifique-se de que está em execução em http://localhost:4444 ou http://selenium:4444. [Guia de Configuração](https://example.com/selenium-setup)", - "chromium_not_found": "❌ O navegador Chromium não está instalado. Por favor, instale o Chromium ou o Google Chrome. [Guia de Instalação](https://example.com/chromium-install)" - } - } -} diff --git a/hacs.json b/hacs.json deleted file mode 100644 index 89fbf4fbb8..0000000000 --- a/hacs.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "name": "UK Bin Collection Data", - "render_readme": true, - "country": ["GB"] -} \ No newline at end of file diff --git a/pyproject.toml b/pyproject.toml deleted file mode 100644 index 32947fda01..0000000000 --- a/pyproject.toml +++ /dev/null @@ -1,75 +0,0 @@ -[tool.poetry] -name = "uk_bin_collection" -version = "0.124.1" -description = "Python Lib to collect UK Bin Data" -readme = "README.md" -authors = ["Robert Bradley "] -packages = [ - { include = "uk_bin_collection", from = "." }, -] -include = ["uk_bin_collection"] - -[tool.pytest.ini_options] -pythonpath = [ - ".", "uk_bin_collection" -] - -[tool.flit.metadata] -module = "uk_bin_collection" - -[tool.poetry.urls] -issues = "https://github.com/robbrad/UKBinCollectionData/issues" - -[build-system] -requires = ["poetry-core>=1.2.0"] -build-backend = "poetry.core.masonry.api" - -[tool.poetry.dev-dependencies] -black = "*" -coverage = "*" -flake8 = "*" -jsonschema = "*" -pylint = "*" -pytest = "*" -setuptools = "*" -pytest-bdd = "*" -allure-pytest-bdd = "*" -pytest-xdist = {extras = ["psutil"], version = "*"} -pyhamcrest = "*" -tabulate = "^0.9.0" -pytest-asyncio = "^0.24.0" -pytest-freezer = "^0.4.8" -homeassistant = "^2023.10" - -[tool.poetry.scripts] -uk_bin_collection = "uk_bin_collection.uk_bin_collection.collect_data:run" - -[tool.dephell.main] -versioning = "semver" -from = {format = "poetrylock", path = "poetry.lock"} -envs = ["main"] -to = {format = "poetry", path = "pyproject.toml"} - -[tool.poetry.dependencies] -bs4 = "*" -python-dateutil = "*" -holidays = "*" -pandas = "*" -python = ">=3.12,<3.14" -requests = "*" -selenium = "*" -lxml = "*" -urllib3 = "*" -webdriver-manager = "^4.0.1" -tabulate = "^0.9.0" - -[tool.commitizen] -major_version_zero = true -version_provider = "poetry" -version_scheme = "semver" -version_files = [ - "custom_components/uk_bin_collection/manifest.json:version", - "custom_components/uk_bin_collection/manifest.json:requirements", - "custom_components/uk_bin_collection/config_flow.py:githubusercontent" -] - diff --git a/pytest.ini b/pytest.ini deleted file mode 100644 index 3de6e92015..0000000000 --- a/pytest.ini +++ /dev/null @@ -1,8 +0,0 @@ -[pytest] -pythonpath = ./uk_bin_collection -asyncio_mode=auto -# Unsetting this will cause testing to fail with a key error for any VALID value -# Leaving it out allows pytest to run, but will generate a warning -asyncio_default_fixture_loop_scope=function -filterwarnings=ignore::DeprecationWarning -addopts = -vv \ No newline at end of file diff --git a/uk_bin_collection/README.rst b/uk_bin_collection/README.rst deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/uk_bin_collection/tests/council_feature_input_parity.py b/uk_bin_collection/tests/council_feature_input_parity.py deleted file mode 100644 index 0cd81b2902..0000000000 --- a/uk_bin_collection/tests/council_feature_input_parity.py +++ /dev/null @@ -1,109 +0,0 @@ -import json -import requests -import sys -from tabulate import tabulate -import base64 - - -def get_councils_from_files(repo, branch): - url = f"https://api.github.com/repos/{repo}/contents/uk_bin_collection/uk_bin_collection/councils?ref={branch}" - print(f"Fetching councils from files at URL: {url}") - response = requests.get(url, headers={"Accept": "application/vnd.github.v3+json"}) - - if response.status_code == 200: - try: - data = response.json() - if isinstance(data, list): - return [ - item["name"].replace(".py", "") - for item in data - if item["name"].endswith(".py") - ] - else: - print("Expected a list from the JSON response but got something else.") - raise ValueError( - "Expected a list from the JSON response but got something else." - ) - except json.JSONDecodeError as e: - print(f"JSON decoding error: {e}") - raise - else: - print(f"Failed to fetch councils from files: {response.content}") - return [] - - -def get_councils_from_json(repo, branch): - url = f"https://api.github.com/repos/{repo}/contents/uk_bin_collection/tests/input.json?ref={branch}" - print(f"Fetching councils from JSON at URL: {url}") - response = requests.get(url, headers={"Accept": "application/vnd.github.v3+json"}) - - if response.status_code == 200: - try: - content = response.json().get("content", "") - content_decoded = base64.b64decode(content).decode("utf-8") - data = json.loads(content_decoded) - return list(data.keys()) - except json.JSONDecodeError as e: - print(f"JSON decoding error: {e}") - raise - else: - print(f"Failed to fetch councils from JSON: {response.content}") - return [] - - -def compare_councils(councils1, councils2): - set1 = set(councils1) - set2 = set(councils2) - all_councils = set1 | set2 - all_council_data = {} - discrepancies_found = False - for council in all_councils: - in_files = council in set1 - in_json = council in set2 - discrepancies_count = [in_files, in_json].count(False) - all_council_data[council] = { - "in_files": in_files, - "in_json": in_json, - "discrepancies_count": discrepancies_count, - } - if discrepancies_count > 0: - discrepancies_found = True - return all_council_data, discrepancies_found - - -def main(repo="robbrad/UKBinCollectionData", branch="master"): - # Execute and print the comparison - print(f"Starting comparison for repo: {repo}, branch: {branch}") - file_councils = get_councils_from_files(repo, branch) - json_councils = get_councils_from_json(repo, branch) - - all_councils_data, discrepancies_found = compare_councils( - file_councils, json_councils - ) - - table_data = [] - headers = ["Council Name", "In Files", "In JSON", "Discrepancies"] - for council, presence in sorted( - all_councils_data.items(), key=lambda x: (x[1]["discrepancies_count"], x[0]) - ): - row = [ - council, - "✔" if presence["in_files"] else "✘", - "✔" if presence["in_json"] else "✘", - presence["discrepancies_count"], - ] - table_data.append(row) - - print(tabulate(table_data, headers=headers, tablefmt="grid")) - - if discrepancies_found: - print("Discrepancies found! Failing the workflow.") - sys.exit(1) - else: - print("No discrepancies found. Workflow successful.") - - -if __name__ == "__main__": - repo = sys.argv[1] if len(sys.argv) > 1 else "robbrad/UKBinCollectionData" - branch = sys.argv[2] if len(sys.argv) > 2 else "master" - main(repo, branch) diff --git a/uk_bin_collection/tests/features/environment.py b/uk_bin_collection/tests/features/environment.py deleted file mode 100644 index 5ad70e978b..0000000000 --- a/uk_bin_collection/tests/features/environment.py +++ /dev/null @@ -1,7 +0,0 @@ -from behave import use_step_matcher - -use_step_matcher("cfparse") - - -def before_all(context): - context.config.setup_logging() diff --git a/uk_bin_collection/tests/features/validate_council_outputs.feature b/uk_bin_collection/tests/features/validate_council_outputs.feature deleted file mode 100644 index bfef624167..0000000000 --- a/uk_bin_collection/tests/features/validate_council_outputs.feature +++ /dev/null @@ -1,7 +0,0 @@ -Feature: Test each council output matches expected results - - Scenario: Validate Council Output - Given the council - When we scrape the data from the council - Then the result is valid json - And the output should validate against the schema diff --git a/uk_bin_collection/tests/input.json b/uk_bin_collection/tests/input.json deleted file mode 100755 index f07df2850a..0000000000 --- a/uk_bin_collection/tests/input.json +++ /dev/null @@ -1,2130 +0,0 @@ -{ - "AberdeenshireCouncil": { - "url": "https://online.aberdeenshire.gov.uk", - "wiki_command_url_override": "https://online.aberdeenshire.gov.uk", - "uprn": "151176430", - "wiki_name": "Aberdeenshire Council", - "wiki_note": "You will need to use [FindMyAddress](https://www.findmyaddress.co.uk/search) to find the UPRN." - }, - "AberdeenCityCouncil": { - "url": "https://www.aberdeencity.gov.uk", - "uprn": "9051156186", - "wiki_name": "Aberdeen City Council", - "wiki_note": "You will need to use [FindMyAddress](https://www.findmyaddress.co.uk/search) to find the UPRN." - }, - "AdurAndWorthingCouncils": { - "url": "https://www.adur-worthing.gov.uk/bin-day/?brlu-selected-address=100061878829", - "wiki_command_url_override": "https://www.adur-worthing.gov.uk/bin-day/?brlu-selected-address=XXXXXXXX", - "wiki_name": "Adur and Worthing Councils", - "wiki_note": "Replace XXXXXXXX with your UPRN. You will need to use [FindMyAddress](https://www.findmyaddress.co.uk/search) to find it." - }, - "AntrimAndNewtonabbeyCouncil": { - "url": "https://antrimandnewtownabbey.gov.uk/residents/bins-recycling/bins-schedule/?Id=643", - "wiki_command_url_override": "https://antrimandnewtownabbey.gov.uk/residents/bins-recycling/bins-schedule/?Id=XXXX", - "wiki_name": "Antrim & Newtonabbey Council", - "wiki_note": "Navigate to [https://antrimandnewtownabbey.gov.uk/residents/bins-recycling/bins-schedule] and search for your street name. Use the URL with the ID to replace XXXXXXXX with your specific ID." - }, - "ArdsAndNorthDownCouncil": { - "url": "https://www.ardsandnorthdown.gov.uk", - "wiki_command_url_override": "https://www.ardsandnorthdown.gov.uk", - "uprn": "187136177", - "wiki_name": "Ards and North Down Council", - "wiki_note": "You will need to use [FindMyAddress](https://www.findmyaddress.co.uk/search) to find the UPRN." - }, - "ArdsAndNorthDownCouncil": { - "url": "https://www.ardsandnorthdown.gov.uk", - "wiki_command_url_override": "https://www.ardsandnorthdown.gov.uk", - "uprn": "187136177", - "wiki_name": "Ards and North Down Council", - "wiki_note": "You will need to use [FindMyAddress](https://www.findmyaddress.co.uk/search) to find the UPRN." - }, - "ArgyllandButeCouncil": { - "uprn": "125061759", - "skip_get_url": true, - "url": "https://www.argyll-bute.gov.uk", - "wiki_name": "Argyll and Bute Council", - "wiki_note": "Pass the UPRN. You will need to use [FindMyAddress](https://www.findmyaddress.co.uk/search)." - }, - "ArmaghBanbridgeCraigavonCouncil": { - "url": "https://www.armaghbanbridgecraigavon.gov.uk/", - "wiki_command_url_override": "https://www.armaghbanbridgecraigavon.gov.uk/", - "uprn": "185625284", - "wiki_name": "Armagh Banbridge Craigavon Council", - "wiki_note": "You will need to use [FindMyAddress](https://www.findmyaddress.co.uk/search) to find the UPRN." - }, - "ArunCouncil": { - "house_number": "1", - "postcode": "BN16 4DA", - "skip_get_url": true, - "url": "https://www1.arun.gov.uk/when-are-my-bins-collected", - "web_driver": "http://selenium:4444", - "wiki_name": "Arun Council", - "wiki_note": "Pass the house name/number and postcode in their respective parameters, both wrapped in double quotes. This parser requires a Selenium webdriver." - }, - "AshfieldDistrictCouncil": { - "url": "https://www.ashfield.gov.uk", - "postcode": "NG16 6RH", - "house_number": "1", - "web_driver": "http://selenium:4444", - "wiki_name": "Ashfield District Council", - "wiki_note": "Pass the house name/number and postcode in their respective parameters, both wrapped in double quotes. This parser requires a Selenium webdriver" - }, - "AshfordBoroughCouncil": { - "url": "https://ashford.gov.uk", - "wiki_command_url_override": "https://ashford.gov.uk", - "postcode": "TN23 7SP", - "uprn": "100060777899", - "wiki_name": "Ashford Borough Council", - "wiki_note": "You will need to use [FindMyAddress](https://www.findmyaddress.co.uk/search) to find the UPRN." - }, - "AylesburyValeCouncil": { - "skip_get_url": true, - "uprn": "766252532", - "url": "http://avdcbins.web-labs.co.uk/RefuseApi.asmx", - "wiki_name": "Aylesbury Vale Council (Buckinghamshire)", - "wiki_note": "To get the UPRN, please use [FindMyAddress](https://www.findmyaddress.co.uk/search). Returns all published collections in the past, present, future." - }, - "BaberghDistrictCouncil": { - "skip_get_url": true, - "house_number": "Monday", - "postcode": "Week 1", - "uprn": "Tuesday", - "url": "https://www.babergh.gov.uk", - "wiki_name": "Babergh District Council", - "wiki_note": "Use the House Number field to pass the DAY of the week for your NORMAL collections. [Monday/Tuesday/Wednesday/Thursday/Friday]. [OPTIONAL] Use the 'postcode' field to pass the WEEK for your garden collection. [Week 1/Week 2]. [OPTIONAL] Use the 'uprn' field to pass the DAY for your garden collection. [Monday/Tuesday/Wednesday/Thursday/Friday]" - }, - "BCPCouncil": { - "skip_get_url": true, - "uprn": "100040810214", - "url": "https://online.bcpcouncil.gov.uk/bindaylookup/", - "wiki_name": "BCP Council", - "wiki_note": "You will need to use [FindMyAddress](https://www.findmyaddress.co.uk/search) to find the UPRN." - }, - "BarnetCouncil": { - "house_number": "HA8 7NA, 2, MANOR PARK GARDENS, EDGWARE, BARNET", - "postcode": "HA8 7NA", - "skip_get_url": true, - "url": "https://www.barnet.gov.uk/recycling-and-waste/bin-collections/find-your-bin-collection-day", - "web_driver": "http://selenium:4444", - "wiki_name": "Barnet Council", - "wiki_note": "Follow the instructions [here](https://www.barnet.gov.uk/recycling-and-waste/bin-collections/find-your-bin-collection-day) until you get the page listing your address, then copy the entire address text and use that in the house number field. This parser requires a Selenium webdriver." - }, - "BarnsleyMBCouncil": { - "postcode": "S36 9AN", - "skip_get_url": true, - "uprn": "2007004502", - "url": "https://waste.barnsley.gov.uk/ViewCollection/Collections", - "wiki_name": "Barnsley Metropolitan Borough Council", - "wiki_note": "To get the UPRN, you will need to use [FindMyAddress](https://www.findmyaddress.co.uk/search)." - }, - "BasildonCouncil": { - "skip_get_url": true, - "uprn": "10013350430", - "url": "https://basildonportal.azurewebsites.net/api/getPropertyRefuseInformation", - "wiki_name": "Basildon Council", - "wiki_note": "To get the UPRN, you will need to use [FindMyAddress](https://www.findmyaddress.co.uk/search)." - }, - "BasingstokeCouncil": { - "skip_get_url": true, - "uprn": "100060220926", - "url": "https://www.basingstoke.gov.uk/bincollection", - "wiki_name": "Basingstoke Council", - "wiki_note": "You will need to use [FindMyAddress](https://www.findmyaddress.co.uk/search) to find the UPRN." - }, - "BathAndNorthEastSomersetCouncil": { - "skip_get_url": true, - "uprn": "100120000855", - "url": "https://www.bathnes.gov.uk/webforms/waste/collectionday/", - "wiki_name": "Bath and North East Somerset Council", - "wiki_note": "You will need to use [FindMyAddress](https://www.findmyaddress.co.uk/search) to find the UPRN." - }, - "BedfordBoroughCouncil": { - "skip_get_url": true, - "uprn": "10024232065", - "url": "https://www.bedford.gov.uk/bins-and-recycling/household-bins-and-recycling/check-your-bin-day", - "wiki_name": "Bedford Borough Council", - "wiki_note": "You will need to use [FindMyAddress](https://www.findmyaddress.co.uk/search) to find the UPRN." - }, - "BedfordshireCouncil": { - "postcode": "SG19 2UP", - "skip_get_url": true, - "uprn": "10000802040", - "url": "https://www.centralbedfordshire.gov.uk/info/163/bins_and_waste_collections_-_check_bin_collection_day", - "wiki_name": "Bedfordshire Council", - "wiki_note": "In order to use this parser, you must provide a valid postcode and a UPRN retrieved from the council's website for your specific address." - }, - "BelfastCityCouncil": { - "postcode": "BT10 0GY", - "skip_get_url": true, - "uprn": "185086469", - "url": "https://online.belfastcity.gov.uk/find-bin-collection-day/Default.aspx", - "wiki_name": "Belfast City Council", - "wiki_note": "You will need to use [FindMyAddress](https://www.findmyaddress.co.uk/search) to find the UPRN." - }, - "BexleyCouncil": { - "house_number": "1 Dorchester Avenue, Bexley", - "postcode": "DA5 3AH", - "skip_get_url": true, - "uprn": "100020196143", - "url": "https://mybexley.bexley.gov.uk/service/When_is_my_collection_day", - "web_driver": "http://selenium:4444", - "wiki_name": "Bexley Council", - "wiki_note": "In order to use this parser, you will need to sign up to [Bexley's @Home app](https://www.bexley.gov.uk/services/rubbish-and-recycling/bexley-home-recycling-app/about-app). Complete the setup by entering your email and setting your address with postcode and address line. Once you can see the calendar, you should be good to run the parser. Just pass the email you used in quotes in the UPRN parameter." - }, - "BirminghamCityCouncil": { - "postcode": "B5 7XE", - "uprn": "100070445256", - "url": "https://www.birmingham.gov.uk/xfp/form/619", - "wiki_name": "Birmingham City Council", - "wiki_note": "You will need to use [FindMyAddress](https://www.findmyaddress.co.uk/search) to find the UPRN." - }, - "BlabyDistrictCouncil": { - "url": "https://www.blaby.gov.uk", - "wiki_command_url_override": "https://www.blaby.gov.uk", - "uprn": "100030401782", - "wiki_name": "Blaby District Council", - "wiki_note": "You will need to use [FindMyAddress](https://www.findmyaddress.co.uk/search) to find the UPRN." - }, - "BlackburnCouncil": { - "skip_get_url": true, - "uprn": "100010733027", - "url": "https://mybins.blackburn.gov.uk/api/mybins/getbincollectiondays?uprn=100010733027&month=8&year=2022", - "web_driver": "http://selenium:4444", - "wiki_command_url_override": "https://www.blackburn.gov.uk", - "wiki_name": "Blackburn Council", - "wiki_note": "You will need to use [FindMyAddress](https://www.findmyaddress.co.uk/search) to find the UPRN." - }, - "BoltonCouncil": { - "postcode": "BL1 5PQ", - "skip_get_url": true, - "uprn": "100010886936", - "url": "https://carehomes.bolton.gov.uk/bins.aspx", - "web_driver": "http://selenium:4444", - "wiki_name": "Bolton Council", - "wiki_note": "To get the UPRN, you will need to use [FindMyAddress](https://www.findmyaddress.co.uk/search). Previously required a single field that was UPRN and full address; now requires UPRN and postcode as separate fields." - }, - "BracknellForestCouncil": { - "house_number": "57", - "paon": "57", - "postcode": "GU47 9BS", - "skip_get_url": true, - "url": "https://selfservice.mybfc.bracknell-forest.gov.uk/w/webpage/waste-collection-days", - "wiki_name": "Bracknell Forest Council", - "wiki_note": "Pass the house number and postcode in their respective parameters." - }, - "BradfordMDC": { - "custom_component_show_url_field": false, - "skip_get_url": true, - "uprn": "100051146921", - "url": "https://onlineforms.bradford.gov.uk/ufs/collectiondates.eb", - "wiki_name": "Bradford MDC", - "wiki_note": "To get the UPRN, you will need to use [FindMyAddress](https://www.findmyaddress.co.uk/search). Postcode isn't parsed by this script, but you can pass it in double quotes." - }, - "BraintreeDistrictCouncil": { - "postcode": "CO5 9BD", - "skip_get_url": true, - "uprn": "10006930172", - "url": "https://www.braintree.gov.uk/", - "wiki_name": "Braintree District Council", - "wiki_note": "Provide your UPRN and postcode. Use [FindMyAddress](https://www.findmyaddress.co.uk/search) to find your UPRN." - }, - "BrecklandCouncil": { - "url": "https://www.breckland.gov.uk", - "wiki_command_url_override": "https://www.breckland.gov.uk", - "uprn": "100091495479", - "wiki_name": "Breckland Council", - "wiki_note": "You will need to use [FindMyAddress](https://www.findmyaddress.co.uk/search) to find the UPRN." - }, - "BrightonandHoveCityCouncil": { - "house_number": "44 Carden Avenue, Brighton, BN1 8NE", - "postcode": "BN1 8NE", - "skip_get_url": true, - "uprn": "22060199", - "url": "https://cityclean.brighton-hove.gov.uk/link/collections", - "web_driver": "http://selenium:4444", - "wiki_name": "Brighton and Hove City Council", - "wiki_note": "Use the full address as it appears on the drop-down on the site when you search by postcode." - }, - "BristolCityCouncil": { - "skip_get_url": true, - "uprn": "137547", - "url": "https://bristolcouncil.powerappsportals.com/completedynamicformunauth/?servicetypeid=7dce896c-b3ba-ea11-a812-000d3a7f1cdc", - "wiki_name": "Bristol City Council", - "wiki_note": "You will need to use [FindMyAddress](https://www.findmyaddress.co.uk/search) to find the UPRN." - }, - "BromleyBoroughCouncil": { - "url": "https://recyclingservices.bromley.gov.uk/waste/6087017", - "web_driver": "http://selenium:4444", - "wiki_command_url_override": "https://recyclingservices.bromley.gov.uk/waste/XXXXXXX", - "wiki_name": "Bromley Borough Council", - "wiki_note": "Follow the instructions [here](https://recyclingservices.bromley.gov.uk/waste) until the \"Your bin days\" page then copy the URL and replace the URL in the command." - }, - "BromsgroveDistrictCouncil": { - "url": "https://www.bromsgrove.gov.uk", - "wiki_command_url_override": "https://www.bromsgrove.gov.uk", - "uprn": "100120584652", - "wiki_name": "Bromsgrove District Council", - "wiki_note": "You will need to use [FindMyAddress](https://www.findmyaddress.co.uk/search) to find the UPRN." - }, - "BroxbourneCouncil": { - "url": "https://www.broxbourne.gov.uk", - "uprn": "148048608", - "postcode": "EN8 7FL", - "wiki_name": "Broxbourne Council", - "wiki_note": "You will need to use [FindMyAddress](https://www.findmyaddress.co.uk/search) to find the UPRN." - }, - "BroxtoweBoroughCouncil": { - "postcode": "NG16 2LY", - "skip_get_url": true, - "uprn": "100031325997", - "url": "https://www.broxtowe.gov.uk/", - "web_driver": "http://selenium:4444", - "wiki_name": "Broxtowe Borough Council", - "wiki_note": "Pass the UPRN and postcode. To get the UPRN, you can use [FindMyAddress](https://www.findmyaddress.co.uk/search)." - }, - "BuckinghamshireCouncil": { - "house_number": "2", - "postcode": "HP13 7BA", - "skip_get_url": true, - "url": "https://iapp.itouchvision.com/iappcollectionday/collection-day/?uuid=FA353FC74600CBE61BE409534D00A8EC09BDA3AC&lang=en", - "web_driver": "http://selenium:4444", - "wiki_name": "Buckinghamshire Council (Chiltern, South Bucks, Wycombe)", - "wiki_note": "Pass the house name/number and postcode in their respective arguments, both wrapped in quotes." - }, - "BurnleyBoroughCouncil": { - "uprn": "100010347165", - "url": "https://www.burnley.gov.uk", - "wiki_name": "Burnley Borough Council", - "wiki_note": "Pass the UPRN. You can find it using [FindMyAddress](https://www.findmyaddress.co.uk/search)." - }, - "BuryCouncil": { - "house_number": "3", - "postcode": "M26 3XY", - "skip_get_url": true, - "url": "https://www.bury.gov.uk/waste-and-recycling/bin-collection-days-and-alerts", - "wiki_name": "Bury Council", - "wiki_note": "Pass the postcode and house number in their respective arguments, both wrapped in quotes." - }, - "CalderdaleCouncil": { - "postcode": "OL14 7EX", - "skip_get_url": true, - "uprn": "010035034598", - "url": "https://www.calderdale.gov.uk/environment/waste/household-collections/collectiondayfinder.jsp", - "web_driver": "http://selenium:4444", - "wiki_name": "Calderdale Council", - "wiki_note": "Pass the UPRN and postcode. To get the UPRN, you can use [FindMyAddress](https://www.findmyaddress.co.uk/search)." - }, - "CannockChaseDistrictCouncil": { - "postcode": "WS15 1JA", - "skip_get_url": true, - "uprn": "200003095389", - "url": "https://www.cannockchasedc.gov.uk/", - "wiki_name": "Cannock Chase District Council", - "wiki_note": "To get the UPRN, you can use [FindMyAddress](https://www.findmyaddress.co.uk/search)." - }, - "CanterburyCityCouncil": { - "url": "https://www.canterbury.gov.uk", - "wiki_command_url_override": "https://www.canterbury.gov.uk", - "uprn": "10094583181", - "wiki_name": "Canterbury City Council", - "wiki_note": "You will need to use [FindMyAddress](https://www.findmyaddress.co.uk/search) to find the UPRN." - }, - "CardiffCouncil": { - "skip_get_url": true, - "uprn": "100100112419", - "url": "https://www.cardiff.gov.uk/ENG/resident/Rubbish-and-recycling/When-are-my-bins-collected/Pages/default.aspx", - "wiki_name": "Cardiff Council", - "wiki_note": "You will need to use [FindMyAddress](https://www.findmyaddress.co.uk/search) to find the UPRN." - }, - "CarmarthenshireCountyCouncil": { - "url": "https://www.carmarthenshire.gov.wales", - "wiki_command_url_override": "https://www.carmarthenshire.gov.wales", - "uprn": "10004859302", - "wiki_name": "Carmarthenshire County Council", - "wiki_note": "You will need to use [FindMyAddress](https://www.findmyaddress.co.uk/search) to find the UPRN." - }, - "CastlepointDistrictCouncil": { - "skip_get_url": true, - "uprn": "4525", - "url": "https://apps.castlepoint.gov.uk/cpapps/index.cfm?fa=wastecalendar", - "wiki_name": "Castlepoint District Council", - "wiki_note": "For this council, 'uprn' is actually a 4-digit code for your street. Go [here](https://apps.castlepoint.gov.uk/cpapps/index.cfm?fa=wastecalendar) and inspect the source of the dropdown box to find the 4-digit number for your street." - }, - "CharnwoodBoroughCouncil": { - "url": "https://my.charnwood.gov.uk/location?put=cbc10070067259&rememberme=0&redirect=%2F", - "wiki_command_url_override": "https://my.charnwood.gov.uk/location?put=cbcXXXXXXXX&rememberme=0&redirect=%2F", - "wiki_name": "Charnwood Borough Council", - "wiki_note": "Replace XXXXXXXX with your UPRN, keeping \"cbc\" before it." - }, - "ChelmsfordCityCouncil": { - "house_number": "1 Celeborn Street, South Woodham Ferrers, Chelmsford, CM3 7AE", - "postcode": "CM3 7AE", - "url": "https://www.chelmsford.gov.uk/myhome/", - "web_driver": "http://selenium:4444", - "wiki_name": "Chelmsford City Council", - "wiki_note": "Follow the instructions [here](https://www.chelmsford.gov.uk/myhome/) until you get the page listing your address, then copy the entire address text and use that in the house number field." - }, - "CheltenhamBoroughCouncil": { - "skip_get_url": true, - "house_number": "Monday", - "postcode": "Week 1", - "url": "https://www.cheltenham.gov.uk", - "wiki_name": "Cheltenham Borough Council", - "wiki_note": "Use the House Number field to pass the DAY of the week for your collections. [Monday/Tuesday/Wednesday/Thursday/Friday]. Use the 'postcode' field to pass the WEEK (wrapped in quotes) for your collections. [Week 1/Week 2]." - }, - "CheshireEastCouncil": { - "url": "https://online.cheshireeast.gov.uk/MyCollectionDay/SearchByAjax/GetBartecJobList?uprn=100012791226&onelineaddress=3%20COBBLERS%20YARD,%20SK9%207DZ&_=1689413260149", - "wiki_command_url_override": "https://online.cheshireeast.gov.uk/MyCollectionDay/SearchByAjax/GetBartecJobList?uprn=XXXXXXXX&onelineaddress=XXXXXXXX&_=1689413260149", - "wiki_name": "Cheshire East Council", - "wiki_note": "Both the UPRN and a one-line address are passed in the URL, which needs to be wrapped in double quotes. The one-line address is made up of the house number, street name, and postcode. Use the form [here](https://online.cheshireeast.gov.uk/mycollectionday/) to find them, then take the first line and postcode and replace all spaces with `%20`." - }, - "CheshireWestAndChesterCouncil": { - "uprn": "100012346655", - "skip_get_url": true, - "url": "https://my.cheshirewestandchester.gov.uk", - "wiki_name": "Cheshire West and Chester Council", - "wiki_note": "Pass the UPRN. You will need to use [FindMyAddress](https://www.findmyaddress.co.uk/search)." - }, - "ChesterfieldBoroughCouncil": { - "uprn": "74008234", - "skip_get_url": true, - "url": "https://www.chesterfield.gov.uk", - "wiki_name": "Chesterfield Borough Council", - "wiki_note": "Pass the UPRN. You will need to use [FindMyAddress](https://www.findmyaddress.co.uk/search)." - }, - "ChichesterDistrictCouncil": { - "house_number": "7, Plaistow Road, Kirdford, Billingshurst, West Sussex", - "postcode": "RH14 0JT", - "skip_get_url": true, - "url": "https://www.chichester.gov.uk/checkyourbinday", - "web_driver": "http://selenium:4444", - "wiki_name": "Chichester District Council", - "wiki_note": "Needs the full address and postcode as it appears on [this page](https://www.chichester.gov.uk/checkyourbinday)." - }, - "ChorleyCouncil": { - "postcode": "PR6 7PG", - "skip_get_url": true, - "uprn": "UPRN100010382247", - "url": "https://myaccount.chorley.gov.uk/wastecollections.aspx", - "web_driver": "http://selenium:4444", - "wiki_name": "Chorley Council", - "wiki_note": "Chorley needs to be passed both a Postcode & UPRN in the format of UPRNXXXXXX to work. Find this on [FindMyAddress](https://www.findmyaddress.co.uk/search)." - }, - "ColchesterCityCouncil": { - "house_number": "29", - "paon": "29", - "postcode": "CO2 8UN", - "skip_get_url": false, - "url": "https://www.colchester.gov.uk/your-recycling-calendar", - "web_driver": "http://selenium:4444", - "wiki_name": "Colchester City Council", - "wiki_note": "Pass the house name/number in the house number parameter, wrapped in double quotes." - }, - "ConwyCountyBorough": { - "postcode": "LL30 2DF", - "uprn": "100100429249", - "url": "https://www.conwy.gov.uk/Contensis-Forms/erf/collection-result-soap-xmas.asp?ilangid=1&uprn=100100429249", - "wiki_name": "Conwy County Borough Council", - "wiki_note": "Conwy County Borough Council uses a straight UPRN in the URL, e.g., `&uprn=XXXXXXXXXXXXX`." - }, - "CopelandBoroughCouncil": { - "uprn": "100110734613", - "url": "https://www.copeland.gov.uk", - "wiki_name": "Copeland Borough Council", - "wiki_note": "Use [FindMyAddress](https://www.findmyaddress.co.uk/search) to find your UPRN." - }, - "CornwallCouncil": { - "skip_get_url": true, - "uprn": "100040128734", - "url": "https://www.cornwall.gov.uk/my-area/", - "wiki_name": "Cornwall Council", - "wiki_note": "Use [FindMyAddress](https://www.findmyaddress.co.uk/search) to find your UPRN." - }, - "CoventryCityCouncil": { - "url": "https://www.coventry.gov.uk/directory-record/56384/abberton-way-", - "wiki_command_url_override": "https://www.coventry.gov.uk/directory_record/XXXXXX/XXXXXX", - "wiki_name": "Coventry City Council", - "wiki_note": "Follow the instructions [here](https://www.coventry.gov.uk/bin-collection-calendar) until you get the page that shows the weekly collections for your address, then copy the URL and replace the URL in the command." - }, - "CotswoldDistrictCouncil": { - "house_number": "19", - "postcode": "GL56 0GB", - "skip_get_url": true, - "url": "https://community.cotswold.gov.uk/s/waste-collection-enquiry", - "web_driver": "http://selenium:4444", - "wiki_name": "Cotswold District Council", - "wiki_note": "Pass the full address in the house number and postcode in" - }, - "CoventryCityCouncil": { - "url": "https://www.coventry.gov.uk/directory-record/56384/abberton-way-", - "wiki_command_url_override": "https://www.coventry.gov.uk/directory_record/XXXXXX/XXXXXX", - "wiki_name": "Coventry City Council", - "wiki_note": "Follow the instructions [here](https://www.coventry.gov.uk/bin-collection-calendar) until you get the page that shows the weekly collections for your address then copy the URL and replace the URL in the command." - }, - "CrawleyBoroughCouncil": { - "house_number": "9701076", - "skip_get_url": true, - "uprn": "100061785321", - "url": "https://my.crawley.gov.uk/", - "wiki_name": "Crawley Borough Council", - "wiki_note": "Crawley needs to be passed both a UPRN and a USRN to work. Find these on [FindMyAddress](https://www.findmyaddress.co.uk/search) or [FindMyStreet](https://www.findmystreet.co.uk/map)." - }, - "CroydonCouncil": { - "house_number": "13", - "postcode": "SE25 5DW", - "skip_get_url": true, - "url": "https://service.croydon.gov.uk/wasteservices/w/webpage/bin-day-enter-address", - "wiki_name": "Croydon Council", - "wiki_note": "Pass the house number and postcode in their respective parameters." - }, - "CumberlandAllerdaleCouncil": { - "house_number": "2", - "postcode": "CA13 0DE", - "url": "https://www.allerdale.gov.uk", - "wiki_name": "Cumberland Council - Allerdale District", - "wiki_note": "Pass the house number and postcode in their respective parameters." - }, - "DacorumBoroughCouncil": { - "house_number": "13", - "postcode": "HP3 9JY", - "skip_get_url": true, - "web_driver": "http://selenium:4444", - "url": "https://webapps.dacorum.gov.uk/bincollections/", - "wiki_name": "Dacorum Borough Council", - "wiki_note": "Pass the house number and postcode in their respective parameters. This parser requires a Selenium webdriver." - }, - "DartfordBoroughCouncil": { - "uprn": "010094157511", - "url": "https://windmz.dartford.gov.uk/ufs/WS_CHECK_COLLECTIONS.eb?UPRN=010094157511", - "wiki_name": "Dartford Borough Council", - "wiki_note": "Use [FindMyAddress](https://www.findmyaddress.co.uk/search) to find your UPRN." - }, - "DerbyCityCouncil": { - "url": "https://www.derby.gov.uk", - "uprn": "10010684240", - "wiki_name": "Derby City Council", - "wiki_note": "You will need to use [FindMyAddress](https://www.findmyaddress.co.uk/search) to find the UPRN." - }, - "DerbyshireDalesDistrictCouncil": { - "postcode": "DE4 3AS", - "skip_get_url": true, - "uprn": "10070102161", - "url": "https://www.derbyshiredales.gov.uk/", - "web_driver": "http://selenium:4444", - "wiki_name": "Derbyshire Dales District Council", - "wiki_note": "Pass the UPRN and postcode. To get the UPRN, you can use [FindMyAddress](https://www.findmyaddress.co.uk/search)." - }, - "DoncasterCouncil": { - "skip_get_url": true, - "uprn": "100050768956", - "url": "https://www.doncaster.gov.uk/Compass/Entity/Launch/D3/", - "wiki_name": "Doncaster Council", - "wiki_note": "Pass the UPRN. You will need to use [FindMyAddress](https://www.findmyaddress.co.uk/search)." - }, - "DorsetCouncil": { - "skip_get_url": true, - "uprn": "100040711049", - "url": "https://www.dorsetcouncil.gov.uk/", - "wiki_name": "Dorset Council", - "wiki_note": "Pass the UPRN. You will need to use [FindMyAddress](https://www.findmyaddress.co.uk/search)." - }, - "DoverDistrictCouncil": { - "url": "https://collections.dover.gov.uk/property/100060908340", - "wiki_command_url_override": "https://collections.dover.gov.uk/property/XXXXXXXXXXX", - "wiki_name": "Dover District Council", - "wiki_note": "Replace XXXXXXXXXXX with your UPRN. To get the UPRN, you can use [FindMyAddress](https://www.findmyaddress.co.uk/search)." - }, - "DudleyCouncil": { - "url": "https://my.dudley.gov.uk", - "wiki_command_url_override": "https://my.dudley.gov.uk", - "uprn": "90014244", - "wiki_name": "Dudley Council", - "wiki_note": "You will need to use [FindMyAddress](https://www.findmyaddress.co.uk/search) to find the UPRN." - }, - "DurhamCouncil": { - "skip_get_url": true, - "uprn": "200003218818", - "url": "https://www.durham.gov.uk/bincollections?uprn=", - "wiki_name": "Durham Council", - "wiki_note": "Pass the UPRN. You will need to use [FindMyAddress](https://www.findmyaddress.co.uk/search)." - }, - "EalingCouncil": { - "skip_get_url": true, - "uprn": "12073883", - "url": "https://www.ealing.gov.uk/site/custom_scripts/WasteCollectionWS/home/FindCollection", - "wiki_name": "Ealing Council", - "wiki_note": "Pass the UPRN. You can find it using [FindMyAddress](https://www.findmyaddress.co.uk/search)." - }, - "EastAyrshireCouncil": { - "url": "https://www.east-ayrshire.gov.uk", - "wiki_command_url_override": "https://www.east-ayrshire.gov.uk", - "uprn": "127074727", - "wiki_name": "East Ayrshire Council", - "wiki_note": "You will need to use [FindMyAddress](https://www.findmyaddress.co.uk/search) to find the UPRN." - }, - "EastCambridgeshireCouncil": { - "skip_get_url": true, - "uprn": "10002597178", - "url": "https://www.eastcambs.gov.uk/", - "wiki_name": "East Cambridgeshire Council", - "wiki_note": "Pass the UPRN. You can find it using [FindMyAddress](https://www.findmyaddress.co.uk/search)." - }, - "EastDevonDC": { - "url": "https://eastdevon.gov.uk/recycling-and-waste/recycling-waste-information/when-is-my-bin-collected/future-collections-calendar/?UPRN=010090909915", - "wiki_command_url_override": "https://eastdevon.gov.uk/recycling-and-waste/recycling-waste-information/when-is-my-bin-collected/future-collections-calendar/?UPRN=XXXXXXXX", - "wiki_name": "East Devon District Council", - "wiki_note": "Replace XXXXXXXX with your UPRN." - }, - "EastHertsCouncil": { - "house_number": "1", - "postcode": "CM20 2FZ", - "skip_get_url": true, - "url": "https://www.eastherts.gov.uk", - "wiki_name": "East Herts Council", - "wiki_note": "Pass the house number and postcode in their respective parameters." - }, - "EastHertsCouncil": { - "house_number": "1", - "postcode": "CM20 2FZ", - "skip_get_url": true, - "url": "https://www.eastherts.gov.uk", - "wiki_name": "East Herts Council" - }, - "EastLindseyDistrictCouncil": { - "house_number": "1", - "postcode": "PE22 0YD", - "skip_get_url": true, - "url": "https://www.e-lindsey.gov.uk/", - "web_driver": "http://selenium:4444", - "wiki_name": "East Lindsey District Council", - "wiki_note": "Pass the house name/number and postcode in their respective parameters. This parser requires a Selenium webdriver." - }, - "EastRenfrewshireCouncil": { - "house_number": "23", - "postcode": "G46 6RG", - "skip_get_url": true, - "url": "https://eastrenfrewshire.gov.uk/", - "web_driver": "http://selenium:4444", - "wiki_name": "East Renfrewshire Council", - "wiki_note": "Pass the house name/number and postcode in their respective parameters. This parser requires a Selenium webdriver." - }, - "EastRidingCouncil": { - "house_number": "14 THE LEASES BEVERLEY HU17 8LG", - "postcode": "HU17 8LG", - "skip_get_url": true, - "url": "https://wasterecyclingapi.eastriding.gov.uk", - "web_driver": "http://selenium:4444", - "wiki_name": "East Riding Council", - "wiki_note": "Put the full address as it displays on the council website dropdown when you do the check manually." - }, - "EastSuffolkCouncil": { - "postcode": "IP11 9FJ", - "skip_get_url": true, - "uprn": "10093544720", - "url": "https://my.eastsuffolk.gov.uk/service/Bin_collection_dates_finder", - "web_driver": "http://selenium:4444", - "wiki_name": "East Suffolk Council", - "wiki_note": "To get the UPRN, you can use [FindMyAddress](https://www.findmyaddress.co.uk/search). This parser requires a Selenium webdriver." - }, - "EastleighBoroughCouncil": { - "skip_get_url": true, - "uprn": "100060303535", - "url": "https://www.eastleigh.gov.uk/waste-bins-and-recycling/collection-dates/your-waste-bin-and-recycling-collections?uprn=", - "wiki_name": "Eastleigh Borough Council", - "wiki_note": "Pass the UPRN. You can find it using [FindMyAddress](https://www.findmyaddress.co.uk/search)." - }, - "EdinburghCityCouncil": { - "skip_get_url": true, - "house_number": "Tuesday", - "postcode": "Week 1", - "url": "https://www.edinburgh.gov.uk", - "wiki_name": "Edinburgh City Council", - "wiki_note": "Use the House Number field to pass the DAY of the week for your collections. Monday/Tuesday/Wednesday/Thursday/Friday. Use the 'postcode' field to pass the WEEK for your collection. [Week 1/Week 2]" - }, - "ElmbridgeBoroughCouncil": { - "url": "https://www.elmbridge.gov.uk", - "wiki_command_url_override": "https://www.elmbridge.gov.uk", - "uprn": "10013119164", - "wiki_name": "Elmbridge Borough Council", - "wiki_note": "You will need to use [FindMyAddress](https://www.findmyaddress.co.uk/search) to find the UPRN." - }, - "EnfieldCouncil": { - "house_number": "111", - "postcode": "N13 5AJ", - "skip_get_url": true, - "url": "https://www.enfield.gov.uk/services/rubbish-and-recycling/find-my-collection-day", - "web_driver": "http://selenium:4444", - "wiki_name": "Enfield Council", - "wiki_note": "Pass the house number and postcode in their respective parameters. This parser requires a Selenium webdriver." - }, - "EnvironmentFirst": { - "url": "https://environmentfirst.co.uk/house.php?uprn=100060055444", - "wiki_command_url_override": "https://environmentfirst.co.uk/house.php?uprn=XXXXXXXXXX", - "wiki_name": "Environment First", - "wiki_note": "For properties with collections managed by Environment First, such as Lewes and Eastbourne. Replace the XXXXXXXXXX with the UPRN of your property—you can use [FindMyAddress](https://www.findmyaddress.co.uk/search) to find this." - }, - "EppingForestDistrictCouncil": { - "postcode": "IG9 6EP", - "url": "https://eppingforestdc.maps.arcgis.com/apps/instant/lookup/index.html?appid=bfca32b46e2a47cd9c0a84f2d8cdde17&find=IG9%206EP", - "wiki_name": "Epping Forest District Council", - "wiki_note": "Replace the postcode in the URL with your own." - }, - "ErewashBoroughCouncil": { - "skip_get_url": true, - "uprn": "10003582028", - "url": "https://map.erewash.gov.uk/isharelive.web/myerewash.aspx", - "wiki_name": "Erewash Borough Council", - "wiki_note": "Pass the UPRN. You can find it using [FindMyAddress](https://www.findmyaddress.co.uk/search)." - }, - "ExeterCityCouncil": { - "uprn": "100040212270", - "url": "https://www.exeter.gov.uk", - "wiki_name": "Exeter City Council", - "wiki_note": "Pass the UPRN. You can find it using [FindMyAddress](https://www.findmyaddress.co.uk/search)." - }, - "FalkirkCouncil": { - "url": "https://www.falkirk.gov.uk", - "wiki_command_url_override": "https://www.falkirk.gov.uk", - "uprn": "136065818", - "wiki_name": "Falkirk Council", - "wiki_note": "You will need to use [FindMyAddress](https://www.findmyaddress.co.uk/search) to find the UPRN." - }, - "FarehamBoroughCouncil": { - "postcode": "PO14 4NR", - "skip_get_url": true, - "url": "https://www.fareham.gov.uk/internetlookups/search_data.aspx?type=JSON&list=DomesticBinCollections&Road=&Postcode=PO14%204NR", - "wiki_name": "Fareham Borough Council", - "wiki_note": "Pass the postcode in the postcode parameter, wrapped in double quotes." - }, - "FenlandDistrictCouncil": { - "skip_get_url": true, - "uprn": "200002981143", - "url": "https://www.fenland.gov.uk/article/13114/", - "wiki_name": "Fenland District Council", - "wiki_note": "Pass the UPRN. You can find it using [FindMyAddress](https://www.findmyaddress.co.uk/search)." - }, - "FifeCouncil": { - "url": "https://www.fife.gov.uk", - "wiki_command_url_override": "https://www.fife.gov.uk", - "uprn": "320203521", - "wiki_name": "Fife Council", - "wiki_note": "You will need to use [FindMyAddress](https://www.findmyaddress.co.uk/search) to find the UPRN." - }, - "FlintshireCountyCouncil": { - "url": "https://digital.flintshire.gov.uk", - "wiki_command_url_override": "https://digital.flintshire.gov.uk", - "uprn": "100100213710", - "wiki_name": "Flintshire County Council", - "wiki_note": "You will need to use [FindMyAddress](https://www.findmyaddress.co.uk/search) to find the UPRN." - }, - "FifeCouncil": { - "url": "https://www.fife.gov.uk", - "wiki_command_url_override": "https://www.fife.gov.uk", - "uprn": "320203521", - "wiki_name": "Fife Council", - "wiki_note": "You will need to use [FindMyAddress](https://www.findmyaddress.co.uk/search) to find the UPRN." - }, - "FlintshireCountyCouncil": { - "url": "https://digital.flintshire.gov.uk", - "wiki_command_url_override": "https://digital.flintshire.gov.uk", - "uprn": "100100213710", - "wiki_name": "Flintshire County Council", - "wiki_note": "You will need to use [FindMyAddress](https://www.findmyaddress.co.uk/search) to find the UPRN." - }, - "FolkstoneandHytheDistrictCouncil": { - "skip_get_url": true, - "uprn": "50032097", - "url": "https://www.folkestone-hythe.gov.uk", - "wiki_name": "Folkstone and Hythe District Council", - "wiki_note": "Use [FindMyAddress](https://www.findmyaddress.co.uk/search) to find your UPRN." - }, - "ForestOfDeanDistrictCouncil": { - "house_number": "ELMOGAL, PARKEND ROAD, BREAM, LYDNEY", - "postcode": "GL15 6JT", - "skip_get_url": true, - "url": "https://community.fdean.gov.uk/s/waste-collection-enquiry", - "web_driver": "http://selenium:4444", - "wiki_name": "Forest of Dean District Council", - "wiki_note": "Pass the full address in the house number and postcode parameters. This parser requires a Selenium webdriver." - }, - "GatesheadCouncil": { - "house_number": "Bracken Cottage", - "postcode": "NE16 5LQ", - "skip_get_url": true, - "url": "https://www.gateshead.gov.uk/", - "web_driver": "http://selenium:4444", - "wiki_name": "Gateshead Council", - "wiki_note": "Pass the house name/number and postcode in their respective parameters. This parser requires a Selenium webdriver." - }, - "GedlingBoroughCouncil": { - "house_number": "Friday G4, Friday J", - "skip_get_url": true, - "url": "https://www.gedling.gov.uk/", - "wiki_name": "Gedling Borough Council", - "wiki_note": "Use [this site](https://www.gbcbincalendars.co.uk/) to find the collections for your address. Use the `-n` parameter to add them in a comma-separated list inside quotes, such as: 'Friday G4, Friday J'." - }, - "GlasgowCityCouncil": { - "url": "https://onlineservices.glasgow.gov.uk/forms/RefuseAndRecyclingWebApplication/CollectionsCalendar.aspx?UPRN=906700034497", - "wiki_command_url_override": "https://onlineservices.glasgow.gov.uk/forms/RefuseAndRecyclingWebApplication/CollectionsCalendar.aspx?UPRN=XXXXXXXX", - "wiki_name": "Glasgow City Council", - "wiki_note": "Replace XXXXXXXX with your UPRN." - }, - "GloucesterCityCouncil": { - "house_number": "111", - "postcode": "GL2 0RR", - "uprn": "100120479507", - "skip_get_url": true, - "web_driver": "http://selenium:4444", - "url": "https://gloucester-self.achieveservice.com/service/Bins___Check_your_bin_day", - "wiki_name": "Gloucester City Council", - "wiki_note": "Pass the house number, postcode, and UPRN in their respective parameters. This parser requires a Selenium webdriver." - }, - "GraveshamBoroughCouncil": { - "uprn": "100060927046", - "skip_get_url": true, - "url": "https://www.gravesham.gov.uk", - "wiki_name": "Gravesham Borough Council", - "wiki_note": "Pass the UPRN. You will need to use [FindMyAddress](https://www.findmyaddress.co.uk/search)." - }, - "GuildfordCouncil": { - "house_number": "THE LODGE, PUTTENHAM HILL HOUSE, PUTTENHAM HILL, PUTTENHAM, GUILDFORD, GU3 1AH", - "postcode": "GU3 1AH", - "skip_get_url": true, - "uprn": "100061372691", - "url": "https://my.guildford.gov.uk/customers/s/view-bin-collections", - "web_driver": "http://selenium:4444", - "wiki_name": "Guildford Council", - "wiki_note": "If the bin day is 'today' then the collectionDate will only show today's date if before 7 AM; else the date will be in 'previousCollectionDate'. To get the UPRN, you will need to use [FindMyAddress](https://www.findmyaddress.co.uk/search)." - }, - "HackneyCouncil": { - "house_number": "101", - "postcode": "N16 9AS", - "url": "https://www.hackney.gov.uk", - "wiki_name": "Hackney Council", - "wiki_note": "Pass the postcode and house number in their respective arguments, both wrapped in quotes." - }, - "HaltonBoroughCouncil": { - "house_number": "12", - "postcode": "WA7 4HA", - "skip_get_url": true, - "url": "https://webapp.halton.gov.uk/PublicWebForms/WasteServiceSearchv1.aspx#collections", - "web_driver": "http://selenium:4444", - "wiki_name": "Halton Borough Council", - "wiki_note": "Pass the house number and postcode. This parser requires a Selenium webdriver." - }, - "HarboroughDistrictCouncil": { - "url": "https://www.harborough.gov.uk", - "wiki_command_url_override": "https://www.harborough.gov.uk", - "uprn": "100030489072", - "wiki_name": "Harborough District Council", - "wiki_note": "You will need to use [FindMyAddress](https://www.findmyaddress.co.uk/search) to find the UPRN." - }, - "HarboroughDistrictCouncil": { - "url": "https://www.harborough.gov.uk", - "wiki_command_url_override": "https://www.harborough.gov.uk", - "uprn": "100030489072", - "wiki_name": "Harborough District Council", - "wiki_note": "You will need to use [FindMyAddress](https://www.findmyaddress.co.uk/search) to find the UPRN." - }, - "HaringeyCouncil": { - "skip_get_url": true, - "uprn": "100021203052", - "url": "https://wastecollections.haringey.gov.uk/property", - "wiki_name": "Haringey Council", - "wiki_note": "Pass the UPRN, which can be found at `https://wastecollections.haringey.gov.uk/property/{uprn}`." - }, - "HarrogateBoroughCouncil": { - "skip_get_url": true, - "uprn": "100050414307", - "url": "https://secure.harrogate.gov.uk/inmyarea", - "wiki_name": "Harrogate Borough Council", - "wiki_note": "Pass the UPRN, which can be found at [this site](https://secure.harrogate.gov.uk/inmyarea). URL doesn't need to be passed." - }, - "HartDistrictCouncil": { - "skip_get_url": true, - "uprn": "100062349291", - "url": "https://www.hart.gov.uk/", - "wiki_name": "Hart District Council", - "wiki_note": "You will need to use [FindMyAddress](https://www.findmyaddress.co.uk/search) to find the UPRN." - }, - "HartlepoolBoroughCouncil": { - "url": "https://www.hartlepool.gov.uk", - "uprn": "100110019551", - "wiki_name": "Hartlepool Borough Council", - "wiki_note": "You will need to use [FindMyAddress](https://www.findmyaddress.co.uk/search) to find your UPRN." - }, - "HertsmereBoroughCouncil": { - "house_number": "1", - "postcode": "WD7 9HZ", - "skip_get_url": true, - "url": "https://www.hertsmere.gov.uk", - "web_driver": "http://selenium:4444", - "wiki_name": "Hertsmere Borough Council", - "wiki_note": "Provide your house number in the `house_number` parameter and postcode in the `postcode` parameter." - }, - "HighlandCouncil": { - "url": "https://www.highland.gov.uk", - "wiki_command_url_override": "https://www.highland.gov.uk", - "uprn": "130072429", - "wiki_name": "Highland Council", - "wiki_note": "You will need to use [FindMyAddress](https://www.findmyaddress.co.uk/search) to find the UPRN." - }, - "HighPeakCouncil": { - "house_number": "9 Ellison Street, Glossop", - "postcode": "SK13 8BX", - "skip_get_url": true, - "url": "https://www.highpeak.gov.uk/findyourbinday", - "web_driver": "http://selenium:4444", - "wiki_name": "High Peak Council", - "wiki_note": "Pass the name of the street with the house number parameter, wrapped in double quotes. This parser requires a Selenium webdriver." - }, - "HinckleyandBosworthBoroughCouncil": { - "url": "https://www.hinckley-bosworth.gov.uk", - "uprn": "100030533512", - "wiki_name": "Hinckley and Bosworth Borough Council", - "wiki_note": "You will need to use [FindMyAddress](https://www.findmyaddress.co.uk/search) to find the UPRN." - }, - "HounslowCouncil": { - "house_number": "17A LAMPTON PARK ROAD, HOUNSLOW", - "postcode": "TW3 4HS", - "skip_get_url": true, - "uprn": "10091596698", - "url": "https://www.hounslow.gov.uk/info/20272/recycling_and_waste_collection_day_finder", - "web_driver": "http://selenium:4444", - "wiki_name": "Hounslow Council", - "wiki_note": "Pass the full address as it appears on the council's website. This parser requires a Selenium webdriver." - }, - "HullCityCouncil": { - "skip_get_url": true, - "uprn": "21033995", - "url": "https://www.hull.gov.uk/bins-and-recycling/bin-collections/bin-collection-day-checker", - "wiki_name": "Hull City Council", - "wiki_note": "Pass the UPRN. You can find it using [FindMyAddress](https://www.findmyaddress.co.uk/search)." - }, - "HuntingdonDistrictCouncil": { - "url": "http://www.huntingdonshire.gov.uk/refuse-calendar/10012048679", - "wiki_command_url_override": "https://www.huntingdonshire.gov.uk/refuse-calendar/XXXXXXXX", - "wiki_name": "Huntingdon District Council", - "wiki_note": "Replace XXXXXXXX with your UPRN." - }, - "IslingtonCouncil": { - "uprn": "5300094897", - "url": "https://www.islington.gov.uk/your-area?Postcode=unused&Uprn=5300094897", - "wiki_command_url_override": "https://www.islington.gov.uk/your-area?Postcode=unused&Uprn=XXXXXXXX", - "wiki_name": "Islington Council", - "wiki_note": "Replace XXXXXXXX with your UPRN." - }, - "KingsLynnandWestNorfolkBC": { - "uprn": "10023636886", - "url": "https://www.west-norfolk.gov.uk/", - "wiki_name": "Kings Lynn and West Norfolk Borough Council", - "wiki_note": "Provide your UPRN. Find your UPRN using [FindMyAddress](https://www.findmyaddress.co.uk/search)." - }, - "KingstonUponThamesCouncil": { - "url": "https://waste-services.kingston.gov.uk/waste/2701097", - "wiki_command_url_override": "https://waste-services.kingston.gov.uk/waste/XXXXXXX", - "wiki_name": "Kingston Upon Thames Council", - "wiki_note": "Follow the instructions [here](https://waste-services.kingston.gov.uk/waste) until the \"Your bin days\" page, then copy the URL and replace the URL in the command." - }, - "KirkleesCouncil": { - "house_number": "24", - "postcode": "HD7 5DX", - "skip_get_url": true, - "url": "https://www.kirklees.gov.uk/beta/your-property-bins-recycling/your-bins", - "web_driver": "http://selenium:4444", - "wiki_name": "Kirklees Council", - "wiki_note": "Pass the house number and postcode in their respective parameters. This parser requires a Selenium webdriver." - }, - "KnowsleyMBCouncil": { - "house_number": "22", - "postcode": "L36 3UY", - "skip_get_url": true, - "url": "https://knowsleytransaction.mendixcloud.com/link/youarebeingredirected?target=bincollectioninformation", - "web_driver": "http://selenium:4444", - "wiki_name": "Knowsley Metropolitan Borough Council", - "wiki_note": "Pass the postcode in the postcode parameter, wrapped in double quotes and with a space." - }, - "LancasterCityCouncil": { - "house_number": "1", - "postcode": "LA1 1RS", - "skip_get_url": true, - "url": "https://lcc-wrp.whitespacews.com", - "wiki_name": "Lancaster City Council", - "wiki_note": "Pass the house number and postcode in their respective parameters." - }, - "LeedsCityCouncil": { - "house_number": "1", - "postcode": "LS6 2SE", - "skip_get_url": true, - "uprn": "72506983", - "url": "https://www.leeds.gov.uk/residents/bins-and-recycling/check-your-bin-day", - "web_driver": "http://selenium:4444", - "wiki_name": "Leeds City Council", - "wiki_note": "Pass the house number, postcode, and UPRN. This parser requires a Selenium webdriver." - }, - "LichfieldDistrictCouncil": { - "url": "https://www.lichfielddc.gov.uk", - "wiki_command_url_override": "https://www.lichfielddc.gov.uk", - "uprn": "100031694085", - "wiki_name": "Lichfield District Council", - "wiki_note": "You will need to use [FindMyAddress](https://www.findmyaddress.co.uk/search) to find the UPRN." - }, - "LincolnCouncil": { - "url": "https://lincoln.gov.uk", - "wiki_command_url_override": "https://lincoln.gov.uk", - "uprn": "000235024846", - "postcode": "LN5 7SH", - "wiki_name": "Lincoln Council", - "wiki_note": "You will need to use [FindMyAddress](https://www.findmyaddress.co.uk/search) to find the UPRN." - }, - "LisburnCastlereaghCityCouncil": { - "house_number": "97", - "postcode": "BT28 1JN", - "skip_get_url": true, - "url": "https://lisburn.isl-fusion.com", - "wiki_name": "Lisburn and Castlereagh City Council", - "wiki_note": "Pass the house number and postcode in their respective parameters." - }, - "LiverpoolCityCouncil": { - "url": "https://liverpool.gov.uk/Bins/BinDatesTable?UPRN=38164600", - "wiki_command_url_override": "https://liverpool.gov.uk/Bins/BinDatesTable?UPRN=XXXXXXXX", - "wiki_name": "Liverpool City Council", - "wiki_note": "Replace XXXXXXXX with your property's UPRN." - }, - "LondonBoroughEaling": { - "skip_get_url": true, - "uprn": "12081498", - "url": "https://www.ealing.gov.uk/site/custom_scripts/WasteCollectionWS/home/FindCollection", - "wiki_name": "London Borough Ealing", - "wiki_note": "Pass the UPRN. You can find it using [FindMyAddress](https://www.findmyaddress.co.uk/search)." - }, - "LondonBoroughHarrow": { - "url": "https://www.harrow.gov.uk", - "wiki_command_url_override": "https://www.harrow.gov.uk", - "uprn": "100021298754", - "wiki_name": "London Borough Harrow", - "wiki_note": "You will need to use [FindMyAddress](https://www.findmyaddress.co.uk/search) to find the UPRN." - }, - "LondonBoroughHavering": { - "url": "https://www.havering.gov.uk", - "uprn": "100021380730", - "wiki_name": "London Borough Havering", - "wiki_note": "Pass the UPRN. You can find it using [FindMyAddress](https://www.findmyaddress.co.uk/search)." - }, - "LondonBoroughHounslow": { - "skip_get_url": true, - "uprn": "100021577765", - "url": "https://www.hounslow.gov.uk/homepage/86/recycling_and_waste_collection_day_finder", - "wiki_name": "London Borough Hounslow", - "wiki_note": "Pass the UPRN. You can find it using [FindMyAddress](https://www.findmyaddress.co.uk/search)." - }, - "LondonBoroughLambeth": { - "skip_get_url": true, - "uprn": "100021881738", - "url": "https://wasteservice.lambeth.gov.uk/WhitespaceComms/GetServicesByUprn", - "wiki_name": "London Borough Lambeth", - "wiki_note": "Pass the UPRN. You can find it using [FindMyAddress](https://www.findmyaddress.co.uk/search)." - }, - "LondonBoroughLewisham": { - "postcode": "SE12 9QF", - "skip_get_url": true, - "uprn": "100021954849", - "url": "https://www.lewisham.gov.uk", - "web_driver": "http://selenium:4444", - "wiki_name": "London Borough Lewisham", - "wiki_note": "Pass the UPRN and postcode. To get the UPRN, you can use [FindMyAddress](https://www.findmyaddress.co.uk/search)." - }, - "LondonBoroughRedbridge": { - "postcode": "IG2 6LQ", - "uprn": "10023770353", - "url": "https://my.redbridge.gov.uk/RecycleRefuse", - "web_driver": "http://selenium:4444", - "wiki_name": "London Borough Redbridge", - "wiki_note": "Follow the instructions [here](https://my.redbridge.gov.uk/RecycleRefuse) until you get the page listing your address, then copy the entire address text and use that in the house number field." - }, - "LondonBoroughSutton": { - "url": "https://waste-services.sutton.gov.uk/waste", - "wiki_command_url_override": "https://waste-services.sutton.gov.uk/waste", - "uprn": "4473006", - "wiki_name": "London Borough Sutton", - "wiki_note": "You will need to find your unique property reference by going to (https://waste-services.sutton.gov.uk/waste), entering your details and then using the 7 digit reference in the URL as your UPRN" - }, - "LutonBoroughCouncil": { - "url": "https://myforms.luton.gov.uk", - "wiki_command_url_override": "https://myforms.luton.gov.uk", - "uprn": "100080155778", - "wiki_name": "Luton Borough Council", - "wiki_note": "You will need to use [FindMyAddress](https://www.findmyaddress.co.uk/search) to find the UPRN." - }, - "MaldonDistrictCouncil": { - "skip_get_url": true, - "uprn": "100090557253", - "url": "https://maldon.suez.co.uk/maldon/ServiceSummary", - "wiki_name": "Maldon District Council", - "wiki_note": "Pass the UPRN. You can find it using [FindMyAddress](https://www.findmyaddress.co.uk/search)." - }, - "MalvernHillsDC": { - "skip_get_url": true, - "uprn": "100121348457", - "url": "https://swict.malvernhills.gov.uk/mhdcroundlookup/HandleSearchScreen", - "wiki_name": "Malvern Hills District Council", - "wiki_note": "Pass the UPRN. You can find it using [FindMyAddress](https://www.findmyaddress.co.uk/search)." - }, - "ManchesterCityCouncil": { - "skip_get_url": true, - "uprn": "77127089", - "url": "https://www.manchester.gov.uk/bincollections", - "wiki_name": "Manchester City Council", - "wiki_note": "Pass the UPRN. You can find it using [FindMyAddress](https://www.findmyaddress.co.uk/search)." - }, - "MansfieldDistrictCouncil": { - "skip_get_url": true, - "uprn": "100031396580", - "url": "https://www.mansfield.gov.uk/xfp/form/1327", - "wiki_name": "Mansfield District Council", - "wiki_note": "Pass the UPRN. You can find it using [FindMyAddress](https://www.findmyaddress.co.uk/search)." - }, - "MertonCouncil": { - "url": "https://myneighbourhood.merton.gov.uk/wasteservices/WasteServices.aspx?ID=25936129", - "wiki_command_url_override": "https://myneighbourhood.merton.gov.uk/Wasteservices/WasteServices.aspx?ID=XXXXXXXX", - "wiki_name": "Merton Council", - "wiki_note": "Follow the instructions [here](https://myneighbourhood.merton.gov.uk/Wasteservices/WasteServicesSearch.aspx) until you get the \"Your recycling and rubbish collection days\" page, then copy the URL and replace the URL in the command." - }, - "MidAndEastAntrimBoroughCouncil": { - "postcode": "100 Galgorm Road", - "skip_get_url": true, - "url": "https://www.midandeastantrim.gov.uk/resident/waste-recycling/collection-dates/", - "web_driver": "http://selenium:4444", - "wiki_name": "Mid and East Antrim Borough Council", - "wiki_note": "Pass the house name/number plus the name of the street with the postcode parameter, wrapped in double quotes. Check the address on the website first. This version will only pick the first SHOW button returned by the search or if it is fully unique." - }, - "MidDevonCouncil": { - "url": "https://www.middevon.gov.uk", - "wiki_command_url_override": "https://www.middevon.gov.uk", - "uprn": "200003997770", - "wiki_name": "Mid Devon Council", - "wiki_note": "You will need to use [FindMyAddress](https://www.findmyaddress.co.uk/search) to find the UPRN." - }, - "MidlothianCouncil": { - "house_number": "52", - "postcode": "EH19 2EB", - "skip_get_url": true, - "url": "https://www.midlothian.gov.uk/info/1054/bins_and_recycling/343/bin_collection_days", - "wiki_name": "Midlothian Council", - "wiki_note": "Pass the house name/number wrapped in double quotes along with the postcode parameter." - }, - "MidSuffolkDistrictCouncil": { - "skip_get_url": true, - "house_number": "Monday", - "postcode": "Week 2", - "uprn": "Monday", - "url": "https://www.midsuffolk.gov.uk", - "wiki_name": "Mid Suffolk District Council", - "wiki_note": "Use the House Number field to pass the DAY of the week for your NORMAL collections. [Monday/Tuesday/Wednesday/Thursday/Friday]. [OPTIONAL] Use the 'postcode' field to pass the WEEK for your garden collection. [Week 1/Week 2]. [OPTIONAL] Use the 'uprn' field to pass the DAY for your garden collection. [Monday/Tuesday/Wednesday/Thursday/Friday]" - }, - "MidSussexDistrictCouncil": { - "house_number": "OAKLANDS, OAKLANDS ROAD RH16 1SS", - "postcode": "RH16 1SS", - "skip_get_url": true, - "url": "https://www.midsussex.gov.uk/waste-recycling/bin-collection/", - "web_driver": "http://selenium:4444", - "wiki_name": "Mid Sussex District Council", - "wiki_note": "Pass the name of the street with the house number parameter, wrapped in double quotes. This parser requires a Selenium webdriver." - }, - "MiltonKeynesCityCouncil": { - "uprn": "25109551", - "url": "https://mycouncil.milton-keynes.gov.uk/en/service/Waste_Collection_Round_Checker", - "wiki_name": "Milton Keynes City Council", - "wiki_note": "You will need to use [FindMyAddress](https://www.findmyaddress.co.uk/search) to find the UPRN." - }, - "MoleValleyDistrictCouncil": { - "postcode": "RH4 1SJ", - "skip_get_url": true, - "uprn": "200000171235", - "url": "https://myproperty.molevalley.gov.uk/molevalley/", - "wiki_name": "Mole Valley District Council", - "wiki_note": "UPRN can only be parsed with a valid postcode." - }, - "MonmouthshireCountyCouncil": { - "url": "https://maps.monmouthshire.gov.uk", - "uprn": "100100266220", - "wiki_name": "Monmouthshire County Council", - "wiki_note": "You will need to use [FindMyAddress](https://www.findmyaddress.co.uk/search) to find the UPRN." - }, - "MorayCouncil": { - "uprn": "28841", - "url": "https://bindayfinder.moray.gov.uk/", - "wiki_name": "Moray Council", - "wiki_note": "Find your property ID by going to (https://bindayfinder.moray.gov.uk), search for your property and extracting the ID from the URL. i.e. (https://bindayfinder.moray.gov.uk/disp_bins.php?id=00028841)" - }, - "NeathPortTalbotCouncil": { - "house_number": "2", - "postcode": "SA13 3BA", - "skip_get_url": true, - "url": "https://www.npt.gov.uk", - "web_driver": "http://selenium:4444", - "wiki_name": "Neath Port Talbot Council", - "wiki_note": "Pass the house number and postcode in their respective parameters. This parser requires a Selenium webdriver." - }, - "NewForestCouncil": { - "postcode": "SO41 0GJ", - "skip_get_url": true, - "uprn": "100060482345", - "url": "https://forms.newforest.gov.uk/id/FIND_MY_COLLECTION", - "web_driver": "http://selenium:4444", - "wiki_name": "New Forest Council", - "wiki_note": "Pass the postcode and UPRN. This parser requires a Selenium webdriver." - }, - "NewarkAndSherwoodDC": { - "url": "http://app.newark-sherwooddc.gov.uk/bincollection/calendar?pid=200004258529&nc=1", - "wiki_command_url_override": "http://app.newark-sherwooddc.gov.uk/bincollection/calendar?pid=XXXXXXXX&nc=1", - "wiki_name": "Newark and Sherwood District Council", - "wiki_note": "Replace XXXXXXXX with your UPRN." - }, - "NewcastleCityCouncil": { - "url": "https://community.newcastle.gov.uk/my-neighbourhood/ajax/getBinsNew.php?uprn=004510730634", - "wiki_command_url_override": "https://community.newcastle.gov.uk/my-neighbourhood/ajax/getBinsNew.php?uprn=XXXXXXXX", - "wiki_name": "Newcastle City Council", - "wiki_note": "Replace XXXXXXXX with your UPRN." - }, - "NewcastleUnderLymeCouncil": { - "url": "https://www.newcastle-staffs.gov.uk", - "uprn": "100031725433", - "wiki_name": "Newcastle Under Lyme Council", - "wiki_note": "You will need to use [FindMyAddress](https://www.findmyaddress.co.uk/search) to find your UPRN." - }, - "NewhamCouncil": { - "skip_get_url": true, - "url": "https://bincollection.newham.gov.uk/Details/Index/000046029461", - "wiki_command_url_override": "https://bincollection.newham.gov.uk/Details/Index/XXXXXXXXXXX", - "wiki_name": "Newham Council", - "wiki_note": "Follow the instructions [here](https://bincollection.newham.gov.uk/) until you get the \"Rubbish and Recycling Collections\" page, then copy the URL and replace the URL in the command." - }, - "NewportCityCouncil": { - "postcode": "NP20 4HE", - "skip_get_url": true, - "uprn": "100100688837", - "url": "https://www.newport.gov.uk/", - "wiki_name": "Newport City Council", - "wiki_note": "Pass the postcode and UPRN. You can find the UPRN using [FindMyAddress](https://www.findmyaddress.co.uk/search)." - }, - "NorthAyrshireCouncil": { - "url": "https://www.north-ayrshire.gov.uk/", - "wiki_command_url_override": "https://www.north-ayrshire.gov.uk/", - "uprn": "126045552", - "wiki_name": "North Ayrshire Council", - "wiki_note": "You will need to use [FindMyAddress](https://www.findmyaddress.co.uk/search) to find the UPRN." - }, - "NorthEastDerbyshireDistrictCouncil": { - "postcode": "S42 5RB", - "skip_get_url": true, - "uprn": "010034492221", - "url": "https://myselfservice.ne-derbyshire.gov.uk/service/Check_your_Bin_Day", - "web_driver": "http://selenium:4444", - "wiki_name": "North East Derbyshire District Council", - "wiki_note": "Pass the postcode and UPRN. This parser requires a Selenium webdriver." - }, - "NorthEastLincs": { - "uprn": "11062649", - "url": "https://www.nelincs.gov.uk/refuse-collection-schedule/?view=timeline&uprn=11062649", - "wiki_command_url_override": "https://www.nelincs.gov.uk/refuse-collection-schedule/?view=timeline&uprn=XXXXXXXX", - "wiki_name": "North East Lincolnshire Council", - "wiki_note": "Replace XXXXXXXX with your UPRN." - }, - "NorthHertfordshireDistrictCouncil": { - "house_number": "2", - "postcode": "SG6 4BJ", - "url": "https://www.north-herts.gov.uk", - "wiki_name": "North Hertfordshire District Council", - "wiki_note": "Pass the house number and postcode in their respective parameters." - }, - "NorthKestevenDistrictCouncil": { - "url": "https://www.n-kesteven.org.uk/bins/display?uprn=100030869513", - "wiki_command_url_override": "https://www.n-kesteven.org.uk/bins/display?uprn=XXXXXXXX", - "wiki_name": "North Kesteven District Council", - "wiki_note": "Replace XXXXXXXX with your UPRN." - }, - "NorthLanarkshireCouncil": { - "url": "https://www.northlanarkshire.gov.uk/bin-collection-dates/000118016164/48402118", - "wiki_command_url_override": "https://www.northlanarkshire.gov.uk/bin-collection-dates/XXXXXXXXXXX/XXXXXXXXXXX", - "wiki_name": "North Lanarkshire Council", - "wiki_note": "Follow the instructions [here](https://www.northlanarkshire.gov.uk/bin-collection-dates) until you get the \"Next collections\" page, then copy the URL and replace the URL in the command." - }, - "NorthLincolnshireCouncil": { - "skip_get_url": true, - "uprn": "100050194170", - "url": "https://www.northlincs.gov.uk/bins-waste-and-recycling/bin-and-box-collection-dates/", - "wiki_name": "North Lincolnshire Council", - "wiki_note": "Pass the UPRN. You can find it using [FindMyAddress](https://www.findmyaddress.co.uk/search)." - }, - "NorthNorfolkDistrictCouncil": { - "house_number": "1 Morston Mews", - "postcode": "NR25 6BH", - "skip_get_url": true, - "url": "https://www.north-norfolk.gov.uk/", - "web_driver": "http://selenium:4444", - "wiki_name": "North Norfolk District Council", - "wiki_note": "Pass the name of the street with the house number parameter, wrapped in double quotes. This parser requires a Selenium webdriver." - }, - "NorthNorthamptonshireCouncil": { - "skip_get_url": true, - "uprn": "100031021317", - "url": "https://cms.northnorthants.gov.uk/bin-collection-search/calendarevents/100031021318/2023-10-17/2023-10-01", - "wiki_name": "North Northamptonshire Council", - "wiki_note": "Pass the UPRN. You can find it using [FindMyAddress](https://www.findmyaddress.co.uk/search)." - }, - "NorthSomersetCouncil": { - "postcode": "BS49 5AA", - "skip_get_url": true, - "uprn": "24051674", - "url": "https://forms.n-somerset.gov.uk/Waste/CollectionSchedule", - "wiki_name": "North Somerset Council", - "wiki_note": "Pass the postcode and UPRN. You can find the UPRN using [FindMyAddress](https://www.findmyaddress.co.uk/search)." - }, - "NorthTynesideCouncil": { - "postcode": "NE26 2TG", - "skip_get_url": true, - "uprn": "47097627", - "url": "https://my.northtyneside.gov.uk/category/81/bin-collection-dates", - "wiki_name": "North Tyneside Council", - "wiki_note": "Pass the postcode and UPRN. You can find the UPRN using [FindMyAddress](https://www.findmyaddress.co.uk/search)." - }, - "NorthWestLeicestershire": { - "postcode": "DE74 2FZ", - "skip_get_url": true, - "uprn": "100030572613", - "url": "https://www.nwleics.gov.uk/pages/collection_information", - "web_driver": "http://selenium:4444", - "wiki_name": "North West Leicestershire Council", - "wiki_note": "Pass the postcode and UPRN. This parser requires a Selenium webdriver." - }, - "NorthYorkshire": { - "skip_get_url": true, - "uprn": "10093091235", - "url": "https://www.northyorks.gov.uk/bin-calendar/lookup", - "wiki_name": "North Yorkshire Council", - "wiki_note": "Pass the UPRN. You can find it using [FindMyAddress](https://www.findmyaddress.co.uk/search)." - }, - "NorwichCityCouncil": { - "url": "https://www.norwich.gov.uk", - "wiki_command_url_override": "https://www.norwich.gov.uk", - "uprn": "100090888980", - "wiki_name": "Norwich City Council", - "wiki_note": "You will need to use [FindMyAddress](https://www.findmyaddress.co.uk/search) to find the UPRN." - }, - "NorthumberlandCouncil": { - "house_number": "22", - "postcode": "NE46 1UQ", - "skip_get_url": true, - "url": "https://www.northumberland.gov.uk/Waste/Bins/Bin-Calendars.aspx", - "web_driver": "http://selenium:4444", - "wiki_name": "Northumberland Council", - "wiki_note": "Pass the house number and postcode in their respective parameters. This parser requires a Selenium webdriver." - }, - "NottinghamCityCouncil": { - "skip_get_url": true, - "uprn": "100031540180", - "url": "https://geoserver.nottinghamcity.gov.uk/bincollections2/api/collection/100031540180", - "wiki_name": "Nottingham City Council", - "wiki_note": "Pass the UPRN. You can find it using [FindMyAddress](https://www.findmyaddress.co.uk/search)." - }, - "NuneatonBedworthBoroughCouncil": { - "url": "https://www.nuneatonandbedworth.gov.uk", - "wiki_name": "Nuneaton and Bedworth Borough Council", - "skip_get_url": true, - "house_number": "Newdigate Road", - "wiki_note": "Pass the name of the street ONLY in the house number parameter, wrapped in double quotes. Street name must match exactly as it appears on the council's website." - }, - "OldhamCouncil": { - "url": "https://portal.oldham.gov.uk/bincollectiondates/details?uprn=422000033556", - "wiki_name": "Oldham Council", - "wiki_note": "Replace UPRN in URL with your own UPRN." - }, - "OxfordCityCouncil": { - "url": "https://www.oxford.gov.uk", - "wiki_command_url_override": "https://www.oxford.gov.uk", - "uprn": "100120820551", - "postcode": "OX3 7QF", - "wiki_name": "Oxford City Council", - "wiki_note": "You will need to use [FindMyAddress](https://www.findmyaddress.co.uk/search) to find the UPRN." - }, - "PerthAndKinrossCouncil": { - "url": "https://www.pkc.gov.uk", - "wiki_command_url_override": "https://www.pkc.gov.uk", - "uprn": "124032322", - "wiki_name": "Perth and Kinross Council", - "wiki_note": "You will need to use [FindMyAddress](https://www.findmyaddress.co.uk/search) to find the UPRN." - }, - "PlymouthCouncil": { - "url": "https://www.plymouth.gov.uk", - "wiki_command_url_override": "https://www.plymouth.gov.uk", - "uprn": "100040420582", - "wiki_name": "Plymouth Council", - "wiki_note": "You will need to use [FindMyAddress](https://www.findmyaddress.co.uk/search) to find the UPRN." - }, - "PortsmouthCityCouncil": { - "postcode": "PO4 0LE", - "skip_get_url": true, - "uprn": "1775027504", - "url": "https://my.portsmouth.gov.uk/en/AchieveForms/?form_uri=sandbox-publish://AF-Process-26e27e70-f771-47b1-a34d-af276075cede/AF-Stage-cd7cc291-2e59-42cc-8c3f-1f93e132a2c9/definition.json&redirectlink=%2F&cancelRedirectLink=%2F", - "web_driver": "http://selenium:4444", - "wiki_name": "Portsmouth City Council", - "wiki_note": "Pass the postcode and UPRN. This parser requires a Selenium webdriver." - }, - "PowysCouncil": { - "house_number": "LANE COTTAGE", - "postcode": "HR3 5JS", - "skip_get_url": true, - "url": "https://www.powys.gov.uk", - "web_driver": "http://selenium:4444", - "wiki_name": "Powys Council", - "wiki_note": "Pass the house name/number and postcode in their respective parameters. This parser requires a Selenium webdriver." - }, - "PowysCouncil": { - "house_number": "LANE COTTAGE", - "postcode": "HR3 5JS", - "skip_get_url": true, - "url": "https://www.powys.gov.uk", - "web_driver": "http://selenium:4444", - "wiki_name": "Powys Council" - }, - "PrestonCityCouncil": { - "house_number": "Town Hall", - "postcode": "PR1 2RL", - "skip_get_url": true, - "url": "https://selfservice.preston.gov.uk/service/Forms/FindMyNearest.aspx?Service=bins", - "web_driver": "http://selenium:4444", - "wiki_name": "Preston City Council", - "wiki_note": "Pass the house number and postcode in their respective parameters. This parser requires a Selenium webdriver." - }, - "ReadingBoroughCouncil": { - "url": "https://api.reading.gov.uk/api/collections/310056735", - "wiki_command_url_override": "https://api.reading.gov.uk/api/collections/XXXXXXXX", - "wiki_name": "Reading Borough Council", - "wiki_note": "Replace XXXXXXXX with your property's UPRN." - }, - "ReigateAndBansteadBoroughCouncil": { - "skip_get_url": true, - "uprn": "68134867", - "url": "https://www.reigate-banstead.gov.uk/", - "web_driver": "http://selenium:4444", - "wiki_name": "Reigate and Banstead Borough Council", - "wiki_note": "To get the UPRN, you can use [FindMyAddress](https://www.findmyaddress.co.uk/search). This parser requires a Selenium webdriver." - }, - "RenfrewshireCouncil": { - "house_number": "1", - "paon": "1", - "postcode": "PA29ED", - "skip_get_url": false, - "url": "https://www.renfrewshire.gov.uk/article/2320/Check-your-bin-collection-day", - "web_driver": "http://selenium:4444", - "wiki_name": "Renfrewshire Council", - "wiki_note": "Pass the house name/number and postcode in their respective parameters. This parser requires a Selenium webdriver." - }, - "RhonddaCynonTaffCouncil": { - "skip_get_url": true, - "uprn": "100100778320", - "url": "https://www.rctcbc.gov.uk/EN/Resident/RecyclingandWaste/RecyclingandWasteCollectionDays.aspx", - "wiki_name": "Rhondda Cynon Taff Council", - "wiki_note": "To get the UPRN, you can use [FindMyAddress](https://www.findmyaddress.co.uk/search)." - }, -"RochdaleCouncil": { - "postcode": "OL11 5BE", - "skip_get_url": true, - "uprn": "23049922", - "url": "https://webforms.rochdale.gov.uk/BinCalendar", - "wiki_name": "Rochdale Council", - "wiki_note": "Provide your UPRN and postcode. You can find your UPRN using [FindMyAddress](https://www.findmyaddress.co.uk/search)." - }, - "RochfordCouncil": { - "url": "https://www.rochford.gov.uk/online-bin-collections-calendar", - "wiki_name": "Rochford Council", - "wiki_note": "No extra parameters are required. Dates presented should be read as 'week commencing'." - }, - "RotherDistrictCouncil": { - "uprn": "100061937338", - "url": "https://www.rother.gov.uk", - "wiki_name": "Rother District Council", - "wiki_note": "Use [FindMyAddress](https://www.findmyaddress.co.uk/search) to find your UPRN." - }, - "RotherhamCouncil": { - "url": "https://www.rotherham.gov.uk/bin-collections?address=100050866000&submit=Submit", - "uprn": "100050866000", - "wiki_name": "Rotherham Council", - "wiki_command_url_override": "https://www.rotherham.gov.uk/bin-collections?address=XXXXXXXXX&submit=Submit", - "wiki_note": "Replace `XXXXXXXXX` with your UPRN in the URL. You can find your UPRN using [FindMyAddress](https://www.findmyaddress.co.uk/search)." - }, - "RoyalBoroughofGreenwich": { - "house_number": "57", - "postcode": "BR7 6DN", - "skip_get_url": true, - "url": "https://www.royalgreenwich.gov.uk", - "wiki_name": "Royal Borough of Greenwich", - "wiki_note": "Provide your house number in the `house_number` parameter and your postcode in the `postcode` parameter." - }, - "RugbyBoroughCouncil": { - "postcode": "CV22 6LA", - "skip_get_url": true, - "uprn": "100070182634", - "url": "https://www.rugby.gov.uk/check-your-next-bin-day", - "wiki_name": "Rugby Borough Council", - "wiki_note": "Provide your UPRN and postcode. You can find your UPRN using [FindMyAddress](https://www.findmyaddress.co.uk/search)." - }, - "RushcliffeBoroughCouncil": { - "postcode": "NG13 8TZ", - "skip_get_url": true, - "uprn": "3040040994", - "url": "https://www.rushcliffe.gov.uk/", - "web_driver": "http://selenium:4444", - "wiki_name": "Rushcliffe Borough Council", - "wiki_note": "Provide your UPRN and postcode. Use [FindMyAddress](https://www.findmyaddress.co.uk/search) to find your UPRN." - }, - "RushmoorCouncil": { - "url": "https://www.rushmoor.gov.uk/Umbraco/Api/BinLookUpWorkAround/Get?selectedAddress=100060545034", - "wiki_command_url_override": "https://www.rushmoor.gov.uk/Umbraco/Api/BinLookUpWorkAround/Get?selectedAddress=XXXXXXXXXX", - "wiki_name": "Rushmoor Council", - "wiki_note": "Replace `XXXXXXXXXX` with your UPRN, which you can find using [FindMyAddress](https://www.findmyaddress.co.uk/search)." - }, - "SalfordCityCouncil": { - "skip_get_url": true, - "uprn": "100011416709", - "url": "https://www.salford.gov.uk/bins-and-recycling/bin-collection-days/your-bin-collections", - "wiki_name": "Salford City Council", - "wiki_note": "Provide your UPRN. You can find it using [FindMyAddress](https://www.findmyaddress.co.uk/search)." - }, - "SandwellBoroughCouncil": { - "uprn": "10008755549", - "skip_get_url": true, - "url": "https://www.sandwell.gov.uk", - "wiki_name": "Sandwell Borough Council", - "wiki_note": "Pass the UPRN. You will need to use [FindMyAddress](https://www.findmyaddress.co.uk/search)." - }, - "SeftonCouncil": { - "house_number": "1", - "postcode": "L20 6GG", - "url": "https://www.sefton.gov.uk", - "wiki_name": "Sefton Council", - "wiki_note": "Pass the postcode and house number in their respective arguments, both wrapped in quotes." - }, - "SevenoaksDistrictCouncil": { - "house_number": "60 Hever Road", - "postcode": "TN15 6EB", - "skip_get_url": true, - "url": "https://sevenoaks-dc-host01.oncreate.app/w/webpage/waste-collection-day", - "web_driver": "http://selenium:4444", - "wiki_name": "Sevenoaks District Council", - "wiki_note": "Pass the house name/number in the `house_number` parameter, wrapped in double quotes, and the postcode in the `postcode` parameter." - }, - "SheffieldCityCouncil": { - "url": "https://wasteservices.sheffield.gov.uk/property/100050931898", - "wiki_command_url_override": "https://wasteservices.sheffield.gov.uk/property/XXXXXXXXXXX", - "wiki_name": "Sheffield City Council", - "wiki_note": "Follow the instructions [here](https://wasteservices.sheffield.gov.uk/) until you get the 'Your bin collection dates and services' page, then copy the URL and replace the URL in the command." - }, - "ShropshireCouncil": { - "url": "https://bins.shropshire.gov.uk/property/100070034731", - "wiki_command_url_override": "https://bins.shropshire.gov.uk/property/XXXXXXXXXXX", - "wiki_name": "Shropshire Council", - "wiki_note": "Follow the instructions [here](https://bins.shropshire.gov.uk/) until you get the page showing your bin collection dates, then copy the URL and replace the URL in the command." - }, - "SolihullCouncil": { - "url": "https://digital.solihull.gov.uk/BinCollectionCalendar/Calendar.aspx?UPRN=100071005444", - "wiki_command_url_override": "https://digital.solihull.gov.uk/BinCollectionCalendar/Calendar.aspx?UPRN=XXXXXXXX", - "wiki_name": "Solihull Council", - "wiki_note": "Replace `XXXXXXXX` with your UPRN. You will need to use [FindMyAddress](https://www.findmyaddress.co.uk/search) to find the UPRN." - }, - "SomersetCouncil": { - "postcode": "TA6 4AA", - "skip_get_url": true, - "uprn": "10090857775", - "url": "https://www.somerset.gov.uk/", - "wiki_name": "Somerset Council", - "wiki_note": "Provide your UPRN and postcode. Find your UPRN using [FindMyAddress](https://www.findmyaddress.co.uk/search)." - }, - "SouthAyrshireCouncil": { - "postcode": "KA19 7BN", - "skip_get_url": true, - "uprn": "141003134", - "url": "https://www.south-ayrshire.gov.uk/", - "wiki_name": "South Ayrshire Council", - "wiki_note": "Provide your UPRN and postcode. Use [FindMyAddress](https://www.findmyaddress.co.uk/search) to find your UPRN." - }, - "SouthCambridgeshireCouncil": { - "house_number": "53", - "postcode": "CB23 6GZ", - "skip_get_url": true, - "url": "https://www.scambs.gov.uk/recycling-and-bins/find-your-household-bin-collection-day/", - "wiki_name": "South Cambridgeshire Council", - "wiki_note": "Provide your house number in the `house_number` parameter and postcode in the `postcode` parameter." - }, - "SouthDerbyshireDistrictCouncil": { - "url": "https://maps.southderbyshire.gov.uk/iShareLIVE.web//getdata.aspx?RequestType=LocalInfo&ms=mapsources/MyHouse&format=JSONP&group=Recycling%20Bins%20and%20Waste|Next%20Bin%20Collections&uid=", - "wiki_command_url_override": "https://maps.southderbyshire.gov.uk/iShareLIVE.web//getdata.aspx?RequestType=LocalInfo&ms=mapsources/MyHouse&format=JSONP&group=Recycling%20Bins%20and%20Waste|Next%20Bin%20Collections&uid=XXXXXXXX", - "uprn": "10000820668", - "wiki_name": "South Derbyshire District Council", - "wiki_note": "Replace `XXXXXXXX` with your UPRN. You can find your UPRN using [FindMyAddress](https://www.findmyaddress.co.uk/search)." - }, - "SouthGloucestershireCouncil": { - "skip_get_url": true, - "uprn": "566419", - "url": "https://beta.southglos.gov.uk/waste-and-recycling-collection-date", - "wiki_name": "South Gloucestershire Council", - "wiki_note": "Provide your UPRN. You can find it using [FindMyAddress](https://www.findmyaddress.co.uk/search)." - }, - "SouthHamsDistrictCouncil": { - "uprn": "10004742851", - "url": "https://www.southhams.gov.uk", - "wiki_name": "South Hams District Council", - "wiki_note": "Use [FindMyAddress](https://www.findmyaddress.co.uk/search) to find your UPRN." - }, - "SouthKestevenDistrictCouncil": { - "house_number": "2 Althorpe Close, Market Deeping, PE6 8BL", - "postcode": "PE68BL", - "skip_get_url": true, - "url": "https://pre.southkesteven.gov.uk/BinSearch.aspx", - "web_driver": "http://selenium:4444", - "wiki_name": "South Kesteven District Council", - "wiki_note": "Provide your full address in the `house_number` parameter and your postcode in the `postcode` parameter." - }, - "SouthLanarkshireCouncil": { - "url": "https://www.southlanarkshire.gov.uk/directory_record/579973/abbeyhill_crescent_lesmahagow", - "wiki_command_url_override": "https://www.southlanarkshire.gov.uk/directory_record/XXXXX/XXXXX", - "wiki_name": "South Lanarkshire Council", - "wiki_note": "Follow the instructions [here](https://www.southlanarkshire.gov.uk/info/200156/bins_and_recycling/1670/bin_collections_and_calendar) until you get the page that shows the weekly collections for your street, then copy the URL and replace the URL in the command." - }, - "SouthNorfolkCouncil": { - "skip_get_url": true, - "uprn": "2630102526", - "url": "https://www.southnorfolkandbroadland.gov.uk/rubbish-recycling/south-norfolk-bin-collection-day-finder", - "wiki_name": "South Norfolk Council", - "wiki_note": "Provide your UPRN. Find it using [FindMyAddress](https://www.findmyaddress.co.uk/search)." - }, - "SouthOxfordshireCouncil": { - "skip_get_url": true, - "uprn": "10033002851", - "url": "https://www.southoxon.gov.uk/south-oxfordshire-district-council/recycling-rubbish-and-waste/when-is-your-collection-day/", - "wiki_name": "South Oxfordshire Council", - "wiki_note": "Provide your UPRN. Use [FindMyAddress](https://www.findmyaddress.co.uk/search) to locate it." - }, - "SouthRibbleCouncil": { - "url": "https://www.southribble.gov.uk", - "wiki_command_url_override": "https://www.southribble.gov.uk", - "uprn": "010013246384", - "wiki_name": "South Ribble Council", - "wiki_note": "You will need to use [FindMyAddress](https://www.findmyaddress.co.uk/search) to find your UPRN." - }, - "SouthStaffordshireDistrictCouncil": { - "uprn": "200004523954", - "url": "https://www.sstaffs.gov.uk/where-i-live?uprn=200004523954", - "wiki_name": "South Staffordshire District Council", - "wiki_note": "The URL needs to be `https://www.sstaffs.gov.uk/where-i-live?uprn=`. Replace `` with your UPRN." - }, - "SouthTynesideCouncil": { - "house_number": "1", - "postcode": "NE33 3JW", - "skip_get_url": true, - "url": "https://www.southtyneside.gov.uk/article/33352/Bin-collection-dates", - "wiki_name": "South Tyneside Council", - "wiki_note": "Provide your house number in the `house_number` parameter and postcode in the `postcode` parameter." - }, - "SouthwarkCouncil": { - "url": "https://services.southwark.gov.uk/bins/lookup/", - "wiki_command_url_override": "https://services.southwark.gov.uk/bins/lookup/XXXXXXXX", - "uprn": "200003469271", - "wiki_name": "Southwark Council", - "wiki_note": "Replace `XXXXXXXX` with your UPRN. Use [FindMyAddress](https://www.findmyaddress.co.uk/search) to find your UPRN." - }, - "StAlbansCityAndDistrictCouncil": { - "skip_get_url": true, - "uprn": "100081153583", - "url": "https://gis.stalbans.gov.uk/NoticeBoard9/VeoliaProxy.NoticeBoard.asmx/GetServicesByUprnAndNoticeBoard", - "wiki_name": "St Albans City and District Council", - "wiki_note": "Provide your UPRN. You can find it using [FindMyAddress](https://www.findmyaddress.co.uk/search)." - }, - "StevenageBoroughCouncil": { - "uprn": "100080878852", - "url": "https://www.stevenage.gov.uk", - "wiki_name": "Stevenage Borough Council", - "wiki_note": "Use [FindMyAddress](https://www.findmyaddress.co.uk/search) to find your UPRN." - }, - "StHelensBC": { - "house_number": "15", - "postcode": "L34 2GA", - "skip_get_url": true, - "url": "https://www.sthelens.gov.uk/", - "web_driver": "http://selenium:4444", - "wiki_name": "St Helens Borough Council", - "wiki_note": "Pass the house name/number in the house number parameter, wrapped in double quotes" - }, - "StaffordBoroughCouncil": { - "uprn": "100032203010", - "url": "https://www.staffordbc.gov.uk/address/100032203010", - "wiki_name": "Stafford Borough Council", - "wiki_note": "The URL needs to be `https://www.staffordbc.gov.uk/address/`. Replace `` with your UPRN." - }, - "StaffordshireMoorlandsDistrictCouncil": { - "postcode": "ST8 6HN", - "skip_get_url": true, - "uprn": "100031863037", - "url": "https://www.staffsmoorlands.gov.uk/", - "web_driver": "http://selenium:4444", - "wiki_name": "Staffordshire Moorlands District Council", - "wiki_note": "Provide your UPRN and postcode. Use [FindMyAddress](https://www.findmyaddress.co.uk/search) to find your UPRN." - }, - "StockportBoroughCouncil": { - "url": "https://myaccount.stockport.gov.uk/bin-collections/show/100011434401", - "wiki_command_url_override": "https://myaccount.stockport.gov.uk/bin-collections/show/XXXXXXXX", - "wiki_name": "Stockport Borough Council", - "wiki_note": "Replace `XXXXXXXX` with your UPRN." - }, - "StocktonOnTeesCouncil": { - "house_number": "24", - "postcode": "TS20 2RD", - "skip_get_url": true, - "url": "https://www.stockton.gov.uk", - "web_driver": "http://selenium:4444", - "wiki_name": "Stockton On Tees Council", - "wiki_note": "Provide your house number in the `house_number` parameter and postcode in the `postcode` parameter." - }, - "StocktonOnTeesCouncil": { - "house_number": "24", - "postcode": "TS20 2RD", - "skip_get_url": true, - "url": "https://www.stockton.gov.uk", - "web_driver": "http://selenium:4444", - "wiki_name": "Stockton On Tees Council" - }, - "StokeOnTrentCityCouncil": { - "url": "https://www.stoke.gov.uk/jadu/custom/webserviceLookUps/BarTecWebServices_missed_bin_calendar.php?UPRN=3455121482", - "wiki_command_url_override": "https://www.stoke.gov.uk/jadu/custom/webserviceLookUps/BarTecWebServices_missed_bin_calendar.php?UPRN=XXXXXXXXXX", - "wiki_name": "Stoke-on-Trent City Council", - "wiki_note": "Replace `XXXXXXXXXX` with your property's UPRN." - }, - "StratfordUponAvonCouncil": { - "skip_get_url": true, - "uprn": "100070212698", - "url": "https://www.stratford.gov.uk/waste-recycling/when-we-collect.cfm/part/calendar", - "wiki_name": "Stratford Upon Avon Council", - "wiki_note": "Provide your UPRN. Use [FindMyAddress](https://www.findmyaddress.co.uk/search) to find it." - }, - "StroudDistrictCouncil": { - "postcode": "GL10 3BH", - "uprn": "100120512183", - "url": "https://www.stroud.gov.uk/my-house?uprn=100120512183&postcode=GL10+3BH", - "wiki_name": "Stroud District Council", - "wiki_note": "Provide your UPRN and postcode. Replace the UPRN and postcode in the URL with your own." - }, - "SunderlandCityCouncil": { - "house_number": "13", - "postcode": "SR4 6BJ", - "skip_get_url": true, - "url": "https://webapps.sunderland.gov.uk/WEBAPPS/WSS/Sunderland_Portal/Forms/bindaychecker.aspx", - "web_driver": "http://selenium:4444", - "wiki_name": "Sunderland City Council", - "wiki_note": "Provide your house number (without quotes) and postcode (wrapped in double quotes with a space)." - }, - "SwaleBoroughCouncil": { - "postcode": "ME12 2NQ", - "skip_get_url": true, - "house_number": "81", - "web_driver": "http://selenium:4444", - "url": "https://swale.gov.uk/bins-littering-and-the-environment/bins/collection-days", - "wiki_name": "Swale Borough Council", - "wiki_note": "Provide your house number in the `house_number` parameter and postcode in the `postcode` parameter." - }, - "SwanseaCouncil": { - "postcode": "SA43PQ", - "skip_get_url": true, - "uprn": "100100324821", - "url": "https://www1.swansea.gov.uk/recyclingsearch/", - "wiki_name": "Swansea Council", - "wiki_note": "Provide your UPRN and postcode. Find your UPRN using [FindMyAddress](https://www.findmyaddress.co.uk/search)." - }, - "SwindonBoroughCouncil": { - "url": "https://www.swindon.gov.uk", - "wiki_command_url_override": "https://www.swindon.gov.uk", - "uprn": "10022793351", - "wiki_name": "Swindon Borough Council", - "wiki_note": "You will need to use [FindMyAddress](https://www.findmyaddress.co.uk/search) to find your UPRN." - }, - "TamesideMBCouncil": { - "skip_get_url": true, - "uprn": "100012835362", - "url": "http://lite.tameside.gov.uk/BinCollections/CollectionService.svc/GetBinCollection", - "wiki_name": "Tameside Metropolitan Borough Council", - "wiki_note": "Provide your UPRN. You can find it using [FindMyAddress](https://www.findmyaddress.co.uk/search)." - }, - "TandridgeDistrictCouncil": { - "skip_get_url": true, - "uprn": "100062160432", - "url": "https://tdcws01.tandridge.gov.uk/TDCWebAppsPublic/tfaBranded/408?utm_source=pressrelease&utm_medium=smposts&utm_campaign=check_my_bin_day", - "wiki_name": "Tandridge District Council", - "wiki_note": "Provide your UPRN. Use [FindMyAddress](https://www.findmyaddress.co.uk/search) to locate it." - }, - "TeignbridgeCouncil": { - "url": "https://www.google.co.uk", - "wiki_command_url_override": "https://www.google.co.uk", - "uprn": "100040338776", - "web_driver": "http://selenium:4444", - "wiki_name": "Teignbridge Council", - "wiki_note": "Provide Google as the URL as the real URL breaks the integration. You will need to use [FindMyAddress](https://www.findmyaddress.co.uk/search) to find the UPRN." - }, - "TeignbridgeCouncil": { - "url": "https://www.google.co.uk", - "wiki_command_url_override": "https://www.google.co.uk", - "uprn": "100040338776", - "web_driver": "http://selenium:4444", - "wiki_name": "Teignbridge Council", - "wiki_note": "Provide Google as the URL as the real URL breaks the integration. You will need to use [FindMyAddress](https://www.findmyaddress.co.uk/search) to find the UPRN." - }, - "TelfordAndWrekinCouncil": { - "skip_get_url": true, - "uprn": "000452015013", - "url": "https://dac.telford.gov.uk/bindayfinder/", - "wiki_name": "Telford and Wrekin Council", - "wiki_note": "Provide your UPRN. Find it using [FindMyAddress](https://www.findmyaddress.co.uk/search)." - }, - "TendringDistrictCouncil": { - "postcode": "CO15 4EU", - "skip_get_url": true, - "uprn": "100090604247", - "url": "https://tendring-self.achieveservice.com/en/service/Rubbish_and_recycling_collection_days", - "web_driver": "http://selenium:4444", - "wiki_name": "Tendring District Council", - "wiki_note": "Provide your UPRN and postcode. Find your UPRN using [FindMyAddress](https://www.findmyaddress.co.uk/search)." - }, - "TestValleyBoroughCouncil": { - "postcode": "SO51 9ZD", - "skip_get_url": true, - "uprn": "200010012019", - "url": "https://testvalley.gov.uk/wasteandrecycling/when-are-my-bins-collected", - "wiki_name": "Test Valley Borough Council", - "wiki_note": "Provide your UPRN and postcode. Use [FindMyAddress](https://www.findmyaddress.co.uk/search) to find your UPRN." - }, - "ThanetDistrictCouncil": { - "uprn": "100061111858", - "url": "https://www.thanet.gov.uk", - "wiki_name": "Thanet District Council", - "wiki_note": "Use [FindMyAddress](https://www.findmyaddress.co.uk/search) to find your UPRN." - }, - "ThreeRiversDistrictCouncil": { - "postcode": "WD3 7AZ", - "skip_get_url": true, - "uprn": "100080913662", - "url": "https://my.threerivers.gov.uk/en/AchieveForms/?mode=fill&consentMessage=yes&form_uri=sandbox-publish://AF-Process-52df96e3-992a-4b39-bba3-06cfaabcb42b/AF-Stage-01ee28aa-1584-442c-8d1f-119b6e27114a/definition.json&process=1&process_uri=sandbox-processes://AF-Process-52df96e3-992a-4b39-bba3-06cfaabcb42b&process_id=AF-Process-52df96e3-992a-4b39-bba3-06cfaabcb42b&noLoginPrompt=1", - "web_driver": "http://selenium:4444", - "wiki_name": "Three Rivers District Council", - "wiki_note": "Provide your UPRN and postcode. Find your UPRN using [FindMyAddress](https://www.findmyaddress.co.uk/search)." - }, - "ThurrockCouncil": { - "skip_get_url": true, - "house_number": "Monday", - "postcode": "Round A", - "url": "https://www.thurrock.gov.uk", - "wiki_name": "Thurrock Council", - "wiki_note": "Use the House Number field to pass the DAY of the week for your collections. [Monday/Tuesday/Wednesday/Thursday/Friday]. Use the 'postcode' field to pass the ROUND (wrapped in quotes) for your collections. [Round A/Round B]." - }, - "TonbridgeAndMallingBC": { - "postcode": "ME19 4JS", - "skip_get_url": true, - "uprn": "10002914589", - "url": "https://www.tmbc.gov.uk/", - "wiki_name": "Tonbridge and Malling Borough Council", - "wiki_note": "Provide your UPRN and postcode." - }, - "TorbayCouncil": { - "skip_get_url": true, - "uprn": "10024000295", - "url": "https://www.torbay.gov.uk/recycling/bin-collections/", - "wiki_name": "Torbay Council", - "wiki_note": "Provide your UPRN. Use [FindMyAddress](https://www.findmyaddress.co.uk/search) to find it." - }, - "TorridgeDistrictCouncil": { - "skip_get_url": true, - "uprn": "10091078762", - "url": "https://collections-torridge.azurewebsites.net/WebService2.asmx", - "wiki_name": "Torridge District Council", - "wiki_note": "Provide your UPRN." - }, - "TunbridgeWellsCouncil": { - "url": "https://tunbridgewells.gov.uk", - "wiki_command_url_override": "https://tunbridgewells.gov.uk", - "uprn": "10090058289", - "wiki_name": "Tunbridge Wells Council", - "wiki_note": "You will need to use [FindMyAddress](https://www.findmyaddress.co.uk/search) to find your UPRN." - }, - "UttlesfordDistrictCouncil": { - "house_number": "72, Birchanger Lane", - "postcode": "CM23 5QF", - "skip_get_url": true, - "uprn": "100090643434", - "url": "https://bins.uttlesford.gov.uk/", - "web_driver": "http://selenium:4444", - "wiki_name": "Uttlesford District Council", - "wiki_note": "Provide your full address in the `house_number` parameter and your postcode in the `postcode` parameter." - }, - "ValeofGlamorganCouncil": { - "skip_get_url": true, - "uprn": "64029020", - "url": "https://www.valeofglamorgan.gov.uk/en/living/Recycling-and-Waste/", - "wiki_name": "Vale of Glamorgan Council", - "wiki_note": "Provide your UPRN. Find it using [FindMyAddress](https://www.findmyaddress.co.uk/search)." - }, - "ValeofWhiteHorseCouncil": { - "custom_component_show_url_field": false, - "skip_get_url": true, - "uprn": "100121391443", - "url": "https://eform.whitehorsedc.gov.uk/ebase/BINZONE_DESKTOP.eb", - "wiki_name": "Vale of White Horse Council", - "wiki_note": "Provide your UPRN." - }, - "WakefieldCityCouncil": { - "custom_component_show_url_field": true, - "skip_get_url": true, - "url": "https://www.wakefield.gov.uk/where-i-live/?uprn=63035490&a=115%20Elizabeth%20Drive%20Castleford%20WF10%203RR&usrn=41801243&e=445418&n=426091&p=WF10%203RR", - "web_driver": "http://selenium:4444", - "wiki_command_url_override": "https://www.wakefield.gov.uk/where-i-live/?uprn=XXXXXXXXXXX&a=XXXXXXXXXXX&usrn=XXXXXXXXXXX&e=XXXXXXXXXXX&n=XXXXXXXXXXX&p=XXXXXXXXXXX", - "wiki_name": "Wakefield City Council", - "wiki_note": "Follow the instructions [here](https://www.wakefield.gov.uk/where-i-live/) until you get the page that includes a 'Bin Collections' section, then copy the URL and replace the URL in the command." - }, - "WalsallCouncil": { - "url": "https://cag.walsall.gov.uk/", - "wiki_command_url_override": "https://cag.walsall.gov.uk/", - "uprn": "100071080513", - "wiki_name": "Walsall Council", - "wiki_note": "You will need to use [FindMyAddress](https://www.findmyaddress.co.uk/search) to find your UPRN." - }, - "WalthamForest": { - "house_number": "17 Chingford Road, Walthamstow", - "postcode": "E17 4PW", - "skip_get_url": true, - "uprn": "200001415697", - "url": "https://portal.walthamforest.gov.uk/AchieveForms/?mode=fill&consentMessage=yes&form_uri=sandbox-publish://AF-Process-d62ccdd2-3de9-48eb-a229-8e20cbdd6393/AF-Stage-8bf39bf9-5391-4c24-857f-0dc2025c67f4/definition.json&process=1&process_uri=sandbox-processes://AF-Process-d62ccdd2-3de9-48eb-a229-8e20cbdd6393&process_id=AF-Process-d62ccdd2-3de9-48eb-a229-8e20cbdd6393", - "web_driver": "http://selenium:4444", - "wiki_name": "Waltham Forest", - "wiki_note": "Use [FindMyAddress](https://www.findmyaddress.co.uk/search) to find your UPRN." - }, - "WarringtonBoroughCouncil": { - "url": "https://www.warrington.gov.uk", - "wiki_command_url_override": "https://www.warrington.gov.uk", - "uprn": "10094964379", - "wiki_name": "Warrington Borough Council", - "wiki_note": "You will need to use [FindMyAddress](https://www.findmyaddress.co.uk/search) to find the UPRN." - }, - "WarwickDistrictCouncil": { - "url": "https://estates7.warwickdc.gov.uk/PropertyPortal/Property/Recycling/100070263793", - "wiki_command_url_override": "https://estates7.warwickdc.gov.uk/PropertyPortal/Property/Recycling/XXXXXXXX", - "wiki_name": "Warwick District Council", - "wiki_note": "Replace `XXXXXXXX` with your UPRN." - }, - "WatfordBoroughCouncil": { - "url": "https://www.watford.gov.uk", - "wiki_command_url_override": "https://www.watford.gov.uk", - "uprn": "100080942183", - "wiki_name": "Watford Borough Council", - "wiki_note": "You will need to use [FindMyAddress](https://www.findmyaddress.co.uk/search) to find your UPRN." - }, - "WatfordBoroughCouncil": { - "url": "https://www.watford.gov.uk", - "wiki_command_url_override": "https://www.watford.gov.uk", - "uprn": "100080942183", - "wiki_name": "Watford Borough Council", - "wiki_note": "You will need to use [FindMyAddress](https://www.findmyaddress.co.uk/search) to find the UPRN." - }, - "WaverleyBoroughCouncil": { - "house_number": "23", - "postcode": "GU9 9QG", - "skip_get_url": true, - "url": "https://wav-wrp.whitespacews.com/", - "wiki_name": "Waverley Borough Council", - "wiki_note": "Follow the instructions [here](https://wav-wrp.whitespacews.com/#!) until you get the page that shows your next scheduled collections. Then take the number from `pIndex=NUMBER` in the URL and pass it as the `-n` parameter along with your postcode in `-p`." - }, - "WealdenDistrictCouncil": { - "skip_get_url": true, - "uprn": "10033413624", - "url": "https://www.wealden.gov.uk/recycling-and-waste/bin-search/", - "wiki_name": "Wealden District Council", - "wiki_note": "Provide your UPRN. Use [FindMyAddress](https://www.findmyaddress.co.uk/search) to find it." - }, - "WelhatCouncil": { - "postcode": "AL8 6HQ", - "uprn": "100080982825", - "url": "https://www.welhat.gov.uk/xfp/form/214", - "wiki_name": "Welhat Council", - "wiki_note": "Provide your UPRN and postcode." - }, - "WestBerkshireCouncil": { - "house_number": "8", - "postcode": "RG14 7DP", - "skip_get_url": true, - "url": "https://www.westberks.gov.uk/binday", - "web_driver": "http://selenium:4444", - "wiki_name": "West Berkshire Council", - "wiki_note": "Provide your house number in the `house_number` parameter and postcode in the `postcode` parameter." - }, - "WestLancashireBoroughCouncil": { - "url": "https://www.westlancs.gov.uk", - "uprn": "10012343339", - "postcode": "WN8 0HR", - "wiki_name": "West Lancashire Borough Council", - "wiki_note": "You will need to use [FindMyAddress](https://www.findmyaddress.co.uk/search) to find the UPRN." - }, - "WestLindseyDistrictCouncil": { - "house_number": "PRIVATE ACCOMMODATION", - "postcode": "LN8 2AR", - "skip_get_url": true, - "url": "https://www.west-lindsey.gov.uk/", - "wiki_name": "West Lindsey District Council", - "wiki_note": "Provide your house name/number in the `house_number` parameter, and postcode in the `postcode` parameter, both wrapped in double quotes. If multiple results are returned, the first will be used." - }, - "WestLothianCouncil": { - "house_number": "1 GOSCHEN PLACE", - "postcode": "EH52 5JE", - "skip_get_url": true, - "url": "https://www.westlothian.gov.uk/", - "web_driver": "http://selenium:4444", - "wiki_name": "West Lothian Council", - "wiki_note": "Provide your house name/number in the `house_number` parameter (wrapped in double quotes) and your postcode in the `postcode` parameter." - }, - "WestMorlandAndFurness": { - "url": "https://www.westmorlandandfurness.gov.uk/", - "wiki_command_url_override": "https://www.westmorlandandfurness.gov.uk/", - "uprn": "100110353478", - "wiki_name": "West Morland and Furness Council", - "wiki_note": "Provide your UPRN. You can find your UPRN using [FindMyAddress](https://www.findmyaddress.co.uk/search)." - }, - "WestNorthamptonshireCouncil": { - "uprn": "28056796", - "skip_get_url": true, - "url": "https://www.westnorthants.gov.uk", - "wiki_name": "West Northamptonshire Council", - "wiki_note": "Provide your UPRN. You can find your UPRN using [FindMyAddress](https://www.findmyaddress.co.uk/search)." - }, - "WestOxfordshireDistrictCouncil": { - "house_number": "24", - "postcode": "OX28 1YA", - "skip_get_url": true, - "url": "https://community.westoxon.gov.uk/s/waste-collection-enquiry", - "web_driver": "http://selenium:4444", - "wiki_name": "West Oxfordshire District Council", - "wiki_note": "Provide your house number in the `house_number` parameter and your postcode in the `postcode` parameter." - }, - "WestSuffolkCouncil": { - "postcode": "IP28 6DR", - "skip_get_url": true, - "uprn": "10009739960", - "url": "https://maps.westsuffolk.gov.uk/MyWestSuffolk.aspx", - "wiki_name": "West Suffolk Council", - "wiki_note": "Provide your UPRN and postcode. You can find your UPRN using [FindMyAddress](https://www.findmyaddress.co.uk/search)." - }, - "WiganBoroughCouncil": { - "postcode": "WN2 4UQ", - "skip_get_url": true, - "uprn": "010093942934", - "url": "https://apps.wigan.gov.uk/MyNeighbourhood/", - "wiki_name": "Wigan Borough Council", - "wiki_note": "Provide your UPRN and postcode. Find your UPRN using [FindMyAddress](https://www.findmyaddress.co.uk/search)." - }, - "WiltshireCouncil": { - "postcode": "SN8 3TE", - "skip_get_url": true, - "uprn": "100120982570", - "url": "https://ilambassadorformsprod.azurewebsites.net/wastecollectiondays/index", - "wiki_name": "Wiltshire Council", - "wiki_note": "Provide your UPRN and postcode. Use [FindMyAddress](https://www.findmyaddress.co.uk/search) to find your UPRN." - }, - "WinchesterCityCouncil": { - "house_number": "12", - "paon": "12", - "postcode": "SO23 7GA", - "skip_get_url": false, - "url": "https://iportal.itouchvision.com/icollectionday/collection-day", - "web_driver": "http://selenium:4444", - "wiki_name": "Winchester City Council", - "wiki_note": "Provide your house name/number in the `house_number` parameter (wrapped in double quotes) and your postcode in the `postcode` parameter." - }, - "WindsorAndMaidenheadCouncil": { - "web_driver": "http://selenium:4444", - "uprn": "100080371082", - "skip_get_url": true, - "url": "https://forms.rbwm.gov.uk/bincollections?uprn=", - "wiki_name": "Windsor and Maidenhead Council", - "wiki_note": "Provide your UPRN. You can find it using [FindMyAddress](https://www.findmyaddress.co.uk/search)." - }, - "WirralCouncil": { - "url": "https://www.wirral.gov.uk", - "wiki_command_url_override": "https://www.wirral.gov.uk", - "uprn": "Vernon Avenue,Seacombe", - "wiki_name": "Wirral Council", - "wiki_note": "In the `uprn` field, enter your street name and suburb separated by a comma (e.g., 'Vernon Avenue,Seacombe')." - }, - "WokingBoroughCouncil": { - "house_number": "2", - "postcode": "GU21 4JY", - "skip_get_url": true, - "url": "https://asjwsw-wrpwokingmunicipal-live.whitespacews.com/", - "wiki_name": "Woking Borough Council / Joint Waste Solutions", - "wiki_note": "Provide your house number in the `house_number` parameter and postcode in the `postcode` parameter. This works with all collection areas that use Joint Waste Solutions." - }, - "WokinghamBoroughCouncil": { - "house_number": "90", - "postcode": "RG40 2HR", - "skip_get_url": true, - "url": "https://www.wokingham.gov.uk/rubbish-and-recycling/waste-collection/find-your-bin-collection-day", - "web_driver": "http://selenium:4444", - "wiki_name": "Wokingham Borough Council", - "wiki_note": "Provide your house number in the `house_number` parameter and postcode in the `postcode` parameter." - }, - "WorcesterCityCouncil": { - "url": "https://www.worcester.gov.uk", - "wiki_command_url_override": "https://www.worcester.gov.uk", - "uprn": "100120650345", - "wiki_name": "Worcester City Council", - "wiki_note": "Provide your UPRN. You can find it using [FindMyAddress](https://www.findmyaddress.co.uk/search)." - }, - "WolverhamptonCityCouncil": { - "uprn": "100071205205", - "postcode": "WV3 9NZ", - "url": "https://www.wolverhampton.gov.uk", - "wiki_name": "Wolverhampton City Council", - "wiki_note": "Use [FindMyAddress](https://www.findmyaddress.co.uk/search) to find your UPRN." - }, - "WorcesterCityCouncil": { - "url": "https://www.Worcester.gov.uk", - "wiki_command_url_override": "https://www.Worcester.gov.uk", - "uprn": "100120650345", - "wiki_name": "Worcester City Council", - "wiki_note": "You will need to use [FindMyAddress](https://www.findmyaddress.co.uk/search) to find the UPRN." - }, - "WychavonDistrictCouncil": { - "postcode": "WR3 7RU", - "skip_get_url": true, - "uprn": "100120716273", - "url": "https://selfservice.wychavon.gov.uk/wdcroundlookup/wdc_search.jsp", - "web_driver": "http://selenium:4444", - "wiki_name": "Wychavon District Council", - "wiki_note": "Provide your UPRN and postcode. Find your UPRN using [FindMyAddress](https://www.findmyaddress.co.uk/search)." - }, - "WyreCouncil": { - "postcode": "FY6 8HG", - "skip_get_url": true, - "uprn": "10003519994", - "url": "https://www.wyre.gov.uk/bins-rubbish-recycling", - "wiki_name": "Wyre Council", - "wiki_note": "Provide your UPRN and postcode. Find your UPRN using [FindMyAddress](https://www.findmyaddress.co.uk/search). The postcode should be wrapped in double quotes with a space in the middle." - }, - "WyreForestDistrictCouncil": { - "skip_get_url": true, - "house_number": "Monday", - "url": "https://www.wyreforestdc.gov.uk", - "wiki_name": "Wyre Forest District Council", - "wiki_note": "Use the House Number field to pass the DAY of the week for your collections. [Monday/Tuesday/Wednesday/Thursday/Friday/Saturday/Sunday]." - }, - "YorkCouncil": { - "skip_get_url": true, - "uprn": "100050535540", - "url": "https://waste-api.york.gov.uk/api/Collections/GetBinCollectionDataForUprn/", - "wiki_name": "York Council", - "wiki_note": "Provide your UPRN." - } -} diff --git a/uk_bin_collection/tests/output.schema b/uk_bin_collection/tests/output.schema deleted file mode 100644 index e3fd01a0f2..0000000000 --- a/uk_bin_collection/tests/output.schema +++ /dev/null @@ -1,41 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-06/schema#", - "$ref": "#/definitions/BinData", - "definitions": { - "BinData": { - "type": "object", - "additionalProperties": false, - "properties": { - "bins": { - "type": "array", - "items": { - "$ref": "#/definitions/Bin" - }, - "minItems": 1 - } - }, - "required": [ - "bins" - ], - "title": "BinData" - }, - "Bin": { - "type": "object", - "additionalProperties": false, - "properties": { - "type": { - "type": "string" - }, - "collectionDate": { - "type": "string", - "pattern": "\\d{2}/\\d{2}/\\d{4}" - } - }, - "required": [ - "collectionDate", - "type" - ], - "title": "Bin" - } - } -} diff --git a/uk_bin_collection/tests/step_defs/step_helpers/file_handler.py b/uk_bin_collection/tests/step_defs/step_helpers/file_handler.py deleted file mode 100644 index e826562992..0000000000 --- a/uk_bin_collection/tests/step_defs/step_helpers/file_handler.py +++ /dev/null @@ -1,46 +0,0 @@ -import json -import logging -from jsonschema import validate, ValidationError -from pathlib import Path -from typing import Any, Dict - -logging.basicConfig(level=logging.INFO, format="%(levelname)s - %(message)s") - -# Dynamically compute the base path relative to this file's location -current_file_path = Path(__file__).resolve() -BASE_PATH = current_file_path.parent.parent.parent.parent / "tests" - - -def load_json_file(file_name: str) -> Dict[str, Any]: - file_path = BASE_PATH / file_name - try: - with open(file_path, "r") as f: - data = json.load(f) - logging.info(f"{file_name} file successfully loaded") - return data - except UnicodeDecodeError as e: - logging.error(f"Failed to load {file_name}: {e}") - raise - except json.JSONDecodeError as e: - logging.error(f"Failed to parse JSON in {file_name}: {e}") - raise - - -def validate_json(json_str: str) -> Dict[str, Any]: - try: - return json.loads(json_str) - except json.JSONDecodeError as err: - logging.error(f"JSON validation error: {err}") - raise - - -def validate_json_schema(json_str: str, schema: Dict[str, Any]) -> bool: - json_data = validate_json(json_str) - try: - validate(instance=json_data, schema=schema) - except ValidationError as err: - logging.error(f"Schema validation error: {err}") - logging.info(f"Data: {json_str}") - logging.info(f"Schema: {schema}") - raise - return True diff --git a/uk_bin_collection/tests/step_defs/test_validate_council.py b/uk_bin_collection/tests/step_defs/test_validate_council.py deleted file mode 100644 index d48dc77dd3..0000000000 --- a/uk_bin_collection/tests/step_defs/test_validate_council.py +++ /dev/null @@ -1,116 +0,0 @@ -import json -import logging -import traceback -from functools import wraps -from typing import Any, Callable, Generator - -import pytest -from pytest_bdd import given, parsers, scenario, then, when -from step_helpers import file_handler - -from uk_bin_collection.uk_bin_collection import collect_data - -logging.basicConfig(level=logging.INFO, format="%(levelname)s - %(message)s") - - -def get_council_list(): - json_file_path = "uk_bin_collection/tests/input.json" # Specify the correct path to the JSON file - with open(json_file_path, "r") as file: - data = json.load(file) - logging.info(f"Council List: {list(data.keys())}") - return list(data.keys()) - - -@pytest.fixture(params=get_council_list()) -def council(request): - print(f"Running test for council: {request.param}") - return request.param - - -@scenario("../features/validate_council_outputs.feature", "Validate Council Output") -@pytest.mark.no_homeassistant # Apply marker here -def test_scenario_outline(council) -> None: - pass - - -def handle_test_errors(func: Callable[..., Any]) -> Callable[..., Any]: - @wraps(func) - def wrapper(*args: Any, **kwargs: Any) -> Any: - try: - return func(*args, **kwargs) - except Exception as e: - logging.error(f"Error in test '{func.__name__}': {e}") - logging.error(traceback.format_exc()) - raise e - - return wrapper - - -class Context: - def __init__(self): - self.metadata: dict[str, Any] = {} - self.council: str = "" - self.parse_result: Any = None - - -@pytest.fixture(scope="module") -def context(): - return Context() - - -@handle_test_errors -@given(parsers.parse("the council")) -def get_council_step(context, council) -> None: - council_input_data = file_handler.load_json_file("input.json") - context.metadata = council_input_data[council] - context.council = council - - -@handle_test_errors -@when(parsers.parse("we scrape the data from the council")) -def scrape_step( - context: Any, headless_mode: str, local_browser: str, selenium_url: str -) -> None: - - args = [context.council, context.metadata["url"]] - - if "uprn" in context.metadata: - uprn = context.metadata["uprn"] - args.append(f"-u={uprn}") - if "postcode" in context.metadata: - postcode = context.metadata["postcode"] - args.append(f"-p={postcode}") - if "house_number" in context.metadata: - house_number = context.metadata["house_number"] - args.append(f"-n={house_number}") - if "usrn" in context.metadata: - usrn = context.metadata["usrn"] - args.append(f"-us={usrn}") - if headless_mode == "True": - args.append("--headless") - else: - args.append("--not-headless") - - if local_browser == "False": - args.append(f"-w={selenium_url}") - if "skip_get_url" in context.metadata: - args.append("-s") - - CollectData = collect_data.UKBinCollectionApp() - CollectData.set_args(args) - context.parse_result = CollectData.run() - - -@handle_test_errors -@then("the result is valid json") -def validate_json_step(context: Any) -> None: - assert file_handler.validate_json(context.parse_result), "Invalid JSON output" - - -@handle_test_errors -@then("the output should validate against the schema") -def validate_output_step(context: Any) -> None: - council_schema = file_handler.load_json_file("output.schema") - assert file_handler.validate_json_schema( - context.parse_result, council_schema - ), "Schema validation failed" diff --git a/uk_bin_collection/tests/test_collect_data.py b/uk_bin_collection/tests/test_collect_data.py deleted file mode 100644 index 086b9ff12f..0000000000 --- a/uk_bin_collection/tests/test_collect_data.py +++ /dev/null @@ -1,72 +0,0 @@ -from unittest.mock import MagicMock, patch -import argparse -import pytest -from uk_bin_collection.collect_data import UKBinCollectionApp, import_council_module - - -# Test UKBinCollectionApp setup_arg_parser -def test_setup_arg_parser(): - app = UKBinCollectionApp() - app.setup_arg_parser() - - # Assert that the argument parser has the correct arguments - assert isinstance(app.parser, argparse.ArgumentParser) - args = app.parser._actions - arg_names = [action.dest for action in args] - - expected_args = [ - "module", - "URL", - "postcode", - "number", - "skip_get_url", - "uprn", - "web_driver", - "headless", - "local_browser", - "dev_mode", - ] - assert all(arg in arg_names for arg in expected_args) - - -# Test UKBinCollectionApp set_args -def test_set_args(): - app = UKBinCollectionApp() - app.setup_arg_parser() - - # Test valid args - args = ["council_module", "http://example.com", "--postcode", "AB1 2CD"] - app.set_args(args) - - assert app.parsed_args.module == "council_module" - assert app.parsed_args.URL == "http://example.com" - assert app.parsed_args.postcode == "AB1 2CD" - - -# Test UKBinCollectionApp client_code method -def test_client_code(): - app = UKBinCollectionApp() - mock_get_bin_data_class = MagicMock() - - # Run the client_code and ensure that template_method is called - app.client_code(mock_get_bin_data_class, "http://example.com", postcode="AB1 2CD") - mock_get_bin_data_class.template_method.assert_called_once_with( - "http://example.com", postcode="AB1 2CD" - ) - - -# Test the run() function with logging setup -@patch("uk_bin_collection.collect_data.setup_logging") # Correct patch path -@patch("uk_bin_collection.collect_data.UKBinCollectionApp.run") # Correct patch path -@patch("sys.argv", ["uk_bin_collection.py", "council_module", "http://example.com"]) -def test_run_function(mock_app_run, mock_setup_logging): - from uk_bin_collection.collect_data import run - - mock_setup_logging.return_value = MagicMock() - mock_app_run.return_value = None - - run() - - # Ensure logging was set up and the app run method was called - mock_setup_logging.assert_called_once() - mock_app_run.assert_called_once() diff --git a/uk_bin_collection/tests/test_common_functions.py b/uk_bin_collection/tests/test_common_functions.py deleted file mode 100644 index 2e009e97e6..0000000000 --- a/uk_bin_collection/tests/test_common_functions.py +++ /dev/null @@ -1,465 +0,0 @@ -from contextlib import redirect_stdout -from io import StringIO -from unittest import mock -from unittest.mock import MagicMock, mock_open, patch - -import pytest -from selenium.common.exceptions import WebDriverException -from uk_bin_collection.common import * -from urllib3.exceptions import MaxRetryError - - -def test_check_postcode_valid(): - valid_postcode = "SW1A 1AA" - result = check_postcode(valid_postcode) - assert result is True - - -def test_check_postcode_invalid(): - invalid_postcode = "BADPOSTCODE" - with pytest.raises(ValueError) as exc_info: - result = check_postcode(invalid_postcode) - assert exc_info._excinfo[1].args[0] == "Exception: Invalid postcode Status: 404" - assert exc_info.type == ValueError - - -def test_check_paon(): - valid_house_num = "1" - result = check_paon(valid_house_num) - assert result is True - - -def test_check_paon_invalid(capfd): - invalid_house_num = None - with pytest.raises(SystemExit) as exc_info: - result = check_paon(invalid_house_num) - out, err = capfd.readouterr() - assert out.startswith("Exception encountered: Invalid house number") - assert exc_info.type == SystemExit - assert exc_info.value.code == 1 - - -def test_get_data_check_uprn(): - uprn = "1" - result = check_uprn(uprn) - assert result is True - - -def test_get_data_check_uprn_exception(capfd): - uprn = None - result = check_uprn(uprn) - out, err = capfd.readouterr() - assert out.startswith("Exception encountered: ") - - -def test_get_data_check_usrn(): - usrn = "1" - result = check_usrn(usrn) - assert result is True - - -def test_get_data_check_usrn_exception(capfd): - usrn = None - result = check_usrn(usrn) - out, err = capfd.readouterr() - assert out.startswith("Exception encountered: ") - - -def test_get_date_with_ordinal(): - date_number = 1 - result = get_date_with_ordinal(date_number) - assert result == "1st" - - -def test_get_date_with_ordinal_exception(): - date_number = "a" - with pytest.raises(TypeError) as exc_info: - result = get_date_with_ordinal(date_number) - assert exc_info.type == TypeError - assert ( - exc_info.value.args[0] == "not all arguments converted during string formatting" - ) - - -def test_parse_header(): - input_header = "i:am|:a:test:header|value:test" - result = parse_header(input_header) - assert result == {"i": "am", ":a": "test:header", "value": "test"} - assert type(result) is dict - - -# Mock data for holidays -mock_holidays = { - datetime(2023, 1, 1): "New Year's Day", - datetime(2023, 12, 25): "Christmas Day", - datetime(2023, 12, 26): "Boxing Day", -} - - -@patch("holidays.country_holidays") -def test_is_holiday_when_true(mock_holidays_func): - # Setting up the mock to return specific holidays - mock_holidays_func.return_value = mock_holidays - - # Christmas Day is a holiday - assert is_holiday(datetime(2023, 12, 25), Region.ENG) is True - - -@patch("holidays.country_holidays") -def test_is_holiday_when_false(mock_holidays_func): - # Setting up the mock to return specific holidays - mock_holidays_func.return_value = mock_holidays - - # January 2nd is not a holiday - assert is_holiday(datetime(2023, 1, 2), Region.ENG) is False - - -def holiday_effect(country_code, subdiv=None): - if subdiv == "ENG": - return { - datetime(2023, 12, 25): "Christmas Day", - datetime(2023, 12, 26): "Boxing Day", - } - elif subdiv == "SCT": - return {datetime(2023, 11, 30): "St Andrew's Day"} - return {} - - -@patch("holidays.country_holidays", side_effect=holiday_effect) -def test_is_holiday_different_region(mock_holidays_func): - # St Andrew's Day in Scotland - assert is_holiday(datetime(2023, 11, 30), Region.SCT) is True - - # St Andrew's Day is not observed in England - assert is_holiday(datetime(2023, 11, 30), Region.ENG) is False - - -def test_is_weekend_when_true(): - weekend_date = datetime(2024, 12, 7) - assert is_weekend(weekend_date) is True - - -def test_is_weekend_when_false(): - weekend_date = datetime(2024, 12, 6) - assert is_weekend(weekend_date) is False - - -def test_is_working_day_when_true(): - working_day_date = datetime(2024, 12, 6) - assert is_working_day(working_day_date) is True - - -def test_is_working_day_when_false(): - working_day_date = datetime(2024, 12, 7) - assert is_working_day(working_day_date) is False - - -def test_get_next_working_day(): - sample_date = datetime(2024, 12, 7) - next_working_day = get_next_working_day(sample_date) - assert next_working_day == datetime(2024, 12, 9) - - -def test_remove_alpha_characters(): - test_string = "12345abc12345" - result = remove_alpha_characters(test_string) - assert result == "1234512345" - - -def test_remove_alpha_characters_bad(): - test_string = "12345abc12345" - result = remove_alpha_characters(test_string) - assert result != "12345abc12345" - - -def test_get_dates_every_x_days(): - now = datetime(2023, 2, 25, 7, 7, 17, 748661) - result = get_dates_every_x_days(now, 5, 7) - assert len(result) == 7 - assert result[6] == "27/03/2023" - - -def test_get_dates_every_x_days_bad(): - now = datetime(2023, 2, 25, 7, 7, 17, 748661) - result = get_dates_every_x_days(now, 5, 7) - assert len(result) != 8 - assert result[6] != "27/03/2022" - - -def test_remove_ordinal_indicator_from_date_string(): - test_string = "June 12th 2022" - result = remove_ordinal_indicator_from_date_string(test_string) - assert result == "June 12 2022" - - -def test_remove_ordinal_indicator_from_date_string_bad(): - test_string = "June 12th 2022" - result = remove_ordinal_indicator_from_date_string(test_string) - assert result != "June 12th 2022" - - -def test_get_weekday_dates_in_period(): - now = datetime(2023, 2, 25, 7, 7, 17, 748661) - result = get_weekday_dates_in_period(now, 5, 7) - assert len(result) == 7 - assert result[6] == "08/04/2023" - - -def test_get_weekday_dates_in_period_bad(): - now = datetime(2023, 2, 25, 7, 7, 17, 748661) - result = get_weekday_dates_in_period(now, 5, 7) - assert len(result) != 8 - assert result[6] != "08/04/20232" - - -def test_get_next_occurrence_from_day_month_false(): - result = get_next_occurrence_from_day_month(datetime(2023, 12, 1)) - assert result == datetime(2023, 12, 1, 0, 0) - - -def test_get_next_occurrence_from_day_month_true(): - result = get_next_occurrence_from_day_month(datetime(2023, 1, 1)) - assert result == pd.Timestamp("2024-01-01 00:00:00") - - -@patch("uk_bin_collection.common.load_data", return_value={}) -@patch("uk_bin_collection.common.save_data") -def test_update_input_json(mock_save_data, mock_load_data): - update_input_json( - "test_council", - "TEST_URL", - "path/to/input.json", - postcode="TEST_POSTCODE", - uprn="TEST_UPRN", - web_driver="TEST_WEBDRIVER", - skip_get_url=True, - ) - # Check that save_data was called with expected data - expected_data = { - "test_council": { - "wiki_name": "test_council", - "url": "TEST_URL", - "postcode": "TEST_POSTCODE", - "uprn": "TEST_UPRN", - "web_driver": "TEST_WEBDRIVER", - "skip_get_url": True, - } - } - mock_save_data.assert_called_once_with("path/to/input.json", expected_data) - - -@patch("uk_bin_collection.common.load_data") -@patch("uk_bin_collection.common.save_data") -def test_update_input_json_ioerror(mock_save_data, mock_load_data): - mock_load_data.side_effect = IOError("Unable to access file") - - with patch("builtins.print") as mock_print: - update_input_json("test_council", "TEST_URL", "path/to/input.json") - mock_print.assert_called_once_with( - "Error updating the JSON file: Unable to access file" - ) - - -@patch("uk_bin_collection.common.load_data") -@patch("uk_bin_collection.common.save_data") -def test_update_input_json_jsondecodeerror(mock_save_data, mock_load_data): - mock_load_data.side_effect = json.JSONDecodeError("Expecting value", "doc", 0) - - with patch("builtins.print") as mock_print: - update_input_json("test_council", "TEST_URL", "path/to/input.json") - mock_print.assert_called_once_with( - "Failed to decode JSON, check the integrity of the input file." - ) - - -def test_load_data_existing_file(): - # Create a mock file with JSON content - mock_file_data = json.dumps({"key": "value"}) - # Set up the mock to return a readable stream - m = mock_open(read_data=mock_file_data) - with patch("builtins.open", m): - with patch("os.path.exists", return_value=True): - data = load_data("path/to/mock/file.json") - assert data == { - "key": "value" - }, f"Data was {data} instead of {{'key': 'value'}}" - - -def test_load_data_non_existing_file(): - # Simulate file not existing - with patch("os.path.exists", return_value=False): - data = load_data("path/to/nonexistent/file.json") - assert data == {} - - -def test_load_data_invalid_json(): - # Create a mock file with invalid JSON content - mock_file_data = '{"key": "value"' - with patch("builtins.open", mock_open(read_data=mock_file_data)), patch( - "json.load", side_effect=json.JSONDecodeError("Expecting ',' delimiter", "", 0) - ): - data = load_data("path/to/invalid.json") - assert data == {} # Modify based on your desired behavior - - -def test_save_data_to_file(): - # Mock the open function and simulate writing - mock_file = mock_open() - with patch("builtins.open", mock_file): - data = {"key": "value"} - save_data("path/to/mock/file.json", data) - # Ensure the mock was called correctly to open the file for writing - mock_file.assert_called_once_with("path/to/mock/file.json", "w") - - # Now check what was written to the file - written_data = "".join( - call.args[0] for call in mock_file().write.call_args_list - ) - expected_data = json.dumps(data, sort_keys=True, indent=4) - assert ( - written_data == expected_data - ), "Data written to file does not match expected JSON data" - - -def test_save_data_io_error(): - # Simulate an IOError - with patch("builtins.open", mock_open()) as mocked_file: - mocked_file.side_effect = IOError("Failed to write to file") - with pytest.raises(IOError): - save_data("path/to/mock/file.json", {"key": "value"}) - - -def test_contains_date_with_valid_dates(): - assert contains_date("2023-05-10") - assert contains_date("10th of December, 2021") - assert contains_date("March 15, 2020") - assert contains_date("01/31/2020") - - -def test_contains_date_with_invalid_dates(): - assert not contains_date("not a date") - assert not contains_date("12345") - assert not contains_date("May 35, 2020") # Invalid day - assert not contains_date("2020-02-30") # Invalid date - - -def test_contains_date_with_fuzzy_true(): - assert contains_date("Today is 13th of April, 2024", fuzzy=True) - assert contains_date("They met on June 20th last year", fuzzy=True) - - -def test_contains_date_with_fuzzy_false(): - assert not contains_date("Today is 13th of April, 2024", fuzzy=False) - assert not contains_date("They met on June 20th last year", fuzzy=False) - - -def test_contains_date_with_mixed_content(): - assert contains_date("Event starts on 2023-05-10 at 10:00 AM", fuzzy=True) - assert not contains_date("Event starts on 2023-05-10 at 10:00 AM", fuzzy=False) - - -def test_create_webdriver_local(): - result = create_webdriver( - None, headless=True, user_agent="FireFox", session_name="test-session" - ) - assert result.name in ["chrome", "chrome-headless-shell"] - - -def test_create_webdriver_remote_failure(): - # Test the scenario where the remote server is not available - with pytest.raises(MaxRetryError) as exc_info: - create_webdriver("http://invalid-url:4444", False) - - -def test_create_webdriver_remote_with_session_name(): - # Test creating a remote WebDriver with a session name - session_name = "test-session" - web_driver_url = ( - "http://localhost:4444/wd/hub" # Use a valid remote WebDriver URL for testing - ) - - # Mock the Remote WebDriver - with mock.patch("uk_bin_collection.common.webdriver.Remote") as mock_remote: - mock_instance = mock.MagicMock() - mock_instance.name = "chrome" - mock_remote.return_value = mock_instance - - # Call the function with the test parameters - result = create_webdriver(web_driver=web_driver_url, session_name=session_name) - - # Check if the session name was set in capabilities - args, kwargs = mock_remote.call_args - options = kwargs["options"] - assert options._caps.get("se:name") == session_name - assert result.name == "chrome" - - -def test_string_with_numbers(): - assert has_numbers("abc123") is True - assert has_numbers("1a2b3c") is True - assert has_numbers("123") is True - - -def test_string_without_numbers(): - assert has_numbers("abcdef") is False - assert has_numbers("ABC") is False - assert has_numbers("!@#") is False - - -def test_empty_string(): - assert has_numbers("") is False - - -def test_string_with_only_numbers(): - assert has_numbers("1234567890") is True - - -def test_string_with_special_characters_and_numbers(): - assert has_numbers("!@#123$%^") is True - assert has_numbers("abc!@#123") is True - - -def test_string_with_whitespace_and_numbers(): - assert has_numbers(" 123 ") is True - assert has_numbers("abc 123") is True - - -@pytest.mark.parametrize( - "today_str, day_name, expected", - [ - ( - "2024-09-02", - "Monday", - "09/09/2024", - ), # Today is Monday, so next Monday is in 7 days - ( - "2024-09-02", - "Tuesday", - "09/03/2024", - ), # Today is Monday, next Tuesday is tomorrow - ( - "2024-09-02", - "Sunday", - "09/08/2024", - ), # Today is Monday, next Sunday is in 6 days - ( - "2024-09-03", - "Wednesday", - "09/04/2024", - ), # Today is Tuesday, next Wednesday is tomorrow - ( - "2024-09-03", - "Monday", - "09/09/2024", - ), # Today is Tuesday, next Monday is in 6 days - ], -) -def test_get_next_day_of_week(today_str, day_name, expected): - mock_today = datetime.strptime(today_str, "%Y-%m-%d") - with patch( - "uk_bin_collection.common.datetime" - ) as mock_datetime: # replace 'your_module' with the actual module name - mock_datetime.now.return_value = mock_today - mock_datetime.side_effect = lambda *args, **kw: datetime(*args, **kw) - result = get_next_day_of_week(day_name, date_format="%m/%d/%Y") - assert result == expected diff --git a/uk_bin_collection/tests/test_conftest.py b/uk_bin_collection/tests/test_conftest.py deleted file mode 100644 index a98a965892..0000000000 --- a/uk_bin_collection/tests/test_conftest.py +++ /dev/null @@ -1,38 +0,0 @@ -import pytest - -# Test the command-line options - - -def test_headless_mode(pytestconfig): - # Simulate pytest command-line option - headless_mode_value = pytestconfig.getoption("--headless") - assert headless_mode_value == "True" # This should match the default value - - -def test_local_browser(pytestconfig): - local_browser_value = pytestconfig.getoption("--local_browser") - assert local_browser_value == "False" # This should match the default value - - -def test_selenium_url(pytestconfig): - selenium_url_value = pytestconfig.getoption("--selenium_url") - assert ( - selenium_url_value == "http://localhost:4444" - ) # This should match the default value - - -# Test the fixtures - - -def test_headless_mode_fixture(headless_mode): - assert headless_mode == "True" # This should match the default value - - -def test_local_browser_fixture(local_browser): - assert local_browser == "False" # This should match the default value - - -def test_selenium_url_fixture(selenium_url): - assert ( - selenium_url == "http://localhost:4444" - ) # This should match the default value diff --git a/uk_bin_collection/tests/test_get_data.py b/uk_bin_collection/tests/test_get_data.py deleted file mode 100644 index 66c93b4218..0000000000 --- a/uk_bin_collection/tests/test_get_data.py +++ /dev/null @@ -1,212 +0,0 @@ -import os -from unittest import mock -import tempfile - -import pytest -from requests import exceptions as req_exp -from requests.models import Response -from unittest.mock import patch -from uk_bin_collection.get_bin_data import AbstractGetBinDataClass as agbdc -from uk_bin_collection.get_bin_data import setup_logging -import logging - - -def mocked_requests_get(*args, **kwargs): - class MockResponse: - def __init__(self, json_data, status_code, raise_error_type): - self.text = json_data - self.status_code = status_code - if raise_error_type is not None: - self.raise_for_status = self.raise_error(raise_error_type) - else: - self.raise_for_status = lambda: None - - def raise_error(self, errorType): - if errorType == "HTTPError": - raise req_exp.HTTPError() - elif errorType == "ConnectionError": - raise req_exp.ConnectionError() - elif errorType == "Timeout": - raise req_exp.Timeout() - elif errorType == "RequestException": - raise req_exp.RequestException() - return errorType - - if args[0] == "aurl": - return MockResponse({"test_data": "test"}, 200, None) - elif args[0] == "HTTPError": - return MockResponse({}, 999, "HTTPError") - elif args[0] == "ConnectionError": - return MockResponse({}, 999, "ConnectionError") - elif args[0] == "Timeout": - return MockResponse({}, 999, "Timeout") - elif args[0] == "RequestException": - return MockResponse({}, 999, "RequestException") - elif args[0] == "notPage": - return MockResponse("not json", 200, None) - return MockResponse(None, 404, "HTTPError") - - -# Unit tests - - -def test_logging_exception(): - logging_dict = "SW1A 1AA" - with pytest.raises(ValueError) as exc_info: - result = setup_logging(logging_dict, "ROOT") - assert exc_info.typename == "ValueError" - - -def test_setup_logging_valid_config(): - # Example of a minimal valid logging configuration dictionary - logging_config = { - "version": 1, - "handlers": { - "console": { - "class": "logging.StreamHandler", - "level": "DEBUG", - }, - }, - "loggers": { - "ROOT": { - "handlers": ["console"], - "level": "DEBUG", - }, - }, - } - logger_name = "ROOT" - # Run the function with valid logging configuration - logger = setup_logging(logging_config, logger_name) - - # Assert that logger is correctly configured - assert logger.name == logger_name - assert logger.level == logging.DEBUG - - -@mock.patch("requests.get", side_effect=mocked_requests_get) -def test_get_data(mock_get): - page_data = agbdc.get_data("aurl") - assert page_data.text == {"test_data": "test"} - - -@pytest.mark.parametrize( - "url", ["HTTPError", "ConnectionError", "Timeout", "RequestException"] -) -@mock.patch("requests.get", side_effect=mocked_requests_get) -def test_get_data_error(mock_get, url): - with pytest.raises(Exception) as exc_info: - result = agbdc.get_data(url) - assert exc_info.typename == url - - -def test_output_json(): - bin_data = {"bin": ""} - output = agbdc.output_json(bin_data) - assert type(output) == str - assert output == '{\n "bin": ""\n}' - - -class ConcreteGetBinDataClass(agbdc): - """Concrete implementation of the abstract class to test abstract methods.""" - - def parse_data(self, page: str, **kwargs) -> dict: - return {"mock_key": "mock_value"} - - def update_dev_mode_data(self, council_module_str, this_url, **kwargs): - # You can implement the method or delegate it to the abstract class's method - super().update_dev_mode_data(council_module_str, this_url, **kwargs) - - -@pytest.fixture -def concrete_class_instance(): - return ConcreteGetBinDataClass() - - -def test_get_and_parse_data_no_skip_get_url(concrete_class_instance): - mock_page = "mocked page content" - mock_parsed_data = {"mock_key": "mock_value"} - - with mock.patch.object( - concrete_class_instance, "get_data", return_value=mock_page - ) as mock_get_data, mock.patch.object( - concrete_class_instance, "parse_data", return_value=mock_parsed_data - ) as mock_parse_data: - - result = concrete_class_instance.get_and_parse_data("http://example.com") - - mock_get_data.assert_called_once_with("http://example.com") - mock_parse_data.assert_called_once_with(mock_page, url="http://example.com") - assert result == mock_parsed_data - - -def test_get_and_parse_data_skip_get_url(concrete_class_instance): - mock_parsed_data = {"mock_key": "mock_value"} - - with mock.patch.object( - concrete_class_instance, "parse_data", return_value=mock_parsed_data - ) as mock_parse_data: - - result = concrete_class_instance.get_and_parse_data( - "http://example.com", skip_get_url=True - ) - - mock_parse_data.assert_called_once_with( - "", url="http://example.com", skip_get_url=True - ) - assert result == mock_parsed_data - - -@pytest.fixture -def setup_test_update_dev_mode_data(): - """Fixture to set up and tear down the environment for test_update_dev_mode_data""" - # Create a temporary directory to simulate the working directory - test_dir = tempfile.TemporaryDirectory() - - # Patch os.getcwd() to return the temporary directory - cwd_patch = patch("os.getcwd", return_value=test_dir.name) - mock_getcwd = cwd_patch.start() - - # Ensure the nested directory structure exists - os.makedirs(os.path.join(test_dir.name, "uk_bin_collection", "tests")) - - # Yield control back to the test, then clean up after the test - yield test_dir.name # Provide the test with the temporary directory - - # Teardown - test_dir.cleanup() - cwd_patch.stop() - - -def test_update_dev_mode_data(setup_test_update_dev_mode_data): - """Test update_dev_mode_data method to ensure input.json is updated correctly""" - # The setup fixture returns the mocked current working directory - mock_cwd = setup_test_update_dev_mode_data - - # Create an instance of the concrete class that inherits from AbstractGetBinDataClass - obj = ConcreteGetBinDataClass() - - # Define input arguments for the method - council_module_str = "test_council_module" - this_url = "https://example.com" - kwargs = { - "postcode": "12345", - "paon": "1A", - "uprn": "100012345", - "usrn": "200012345", - "web_driver": "mocked_web_driver", - "skip_get_url": True, - } - - # Call the method being tested on the instance - obj.update_dev_mode_data(council_module_str, this_url, **kwargs) - - # Verify that input.json was created in the correct location - input_file_path = os.path.join(mock_cwd, "uk_bin_collection", "tests", "input.json") - assert os.path.exists(input_file_path) - - # Read the contents of the file and make necessary assertions - with open(input_file_path, "r") as f: - file_content = f.read() - - # Example assertion - check if certain values exist in the file content (based on your actual file format) - assert "100012345" in file_content # Checking UPRN as an example diff --git a/uk_bin_collection/uk_bin_collection/collect_data.py b/uk_bin_collection/uk_bin_collection/collect_data.py deleted file mode 100755 index 0a9e510e6d..0000000000 --- a/uk_bin_collection/uk_bin_collection/collect_data.py +++ /dev/null @@ -1,134 +0,0 @@ -import argparse -import importlib -import os -import sys -import logging -from uk_bin_collection.uk_bin_collection.get_bin_data import ( - setup_logging, - LOGGING_CONFIG, -) - -_LOGGER = logging.getLogger(__name__) - - -def import_council_module(module_name, src_path="councils"): - """Dynamically import the council processor module.""" - module_path = os.path.realpath(os.path.join(os.path.dirname(__file__), src_path)) - if module_path not in sys.path: - sys.path.append(module_path) - return importlib.import_module(module_name) - - -class UKBinCollectionApp: - def __init__(self): - self.setup_arg_parser() - self.parsed_args = None - - def setup_arg_parser(self): - """Setup the argument parser for the script.""" - self.parser = argparse.ArgumentParser( - description="UK Bin Collection Data Parser" - ) - self.parser.add_argument( - "module", type=str, help="Name of council module to use" - ) - self.parser.add_argument( - "URL", type=str, help="URL to parse - should be wrapped in double quotes" - ) - self.parser.add_argument( - "-p", - "--postcode", - type=str, - help="Postcode to parse - should include a space and be wrapped in double quotes", - required=False, - ) - self.parser.add_argument( - "-n", "--number", type=str, help="House number to parse", required=False - ) - self.parser.add_argument( - "-s", - "--skip_get_url", - action="store_true", - help="Skips the generic get_url - uses one in council class", - required=False, - ) - self.parser.add_argument( - "-u", "--uprn", type=str, help="UPRN to parse", required=False - ) - self.parser.add_argument( - "-w", - "--web_driver", - type=str, - help="URL for remote Selenium web driver - should be wrapped in double quotes", - required=False, - ) - self.parser.add_argument( - "--headless", - dest="headless", - action="store_true", - help="Should Selenium be headless. Defaults to true. Can be set to false to debug council", - ) - self.parser.add_argument( - "--not-headless", - dest="headless", - action="store_false", - help="Should Selenium be headless. Defaults to true. Can be set to false to debug council", - ) - self.parser.set_defaults(headless=True) - self.parser.add_argument( - "--local_browser", - dest="local_browser", - action="store_true", - help="Should Selenium be run on a remote server or locally. Defaults to false.", - required=False, - ) - self.parser.add_argument( - "-d", - "--dev_mode", - action="store_true", - help="Enables development mode - creates/updates entries in the input.json file for the council on each run", - required=False, - ) - self.parsed_args = None - - def set_args(self, args): - """Parse the arguments from the command line.""" - self.parsed_args = self.parser.parse_args(args) - - def run(self): - """Run the application with the provided arguments.""" - council_module = import_council_module(self.parsed_args.module) - return self.client_code( - council_module.CouncilClass(), - self.parsed_args.URL, - postcode=self.parsed_args.postcode, - paon=self.parsed_args.number, - uprn=self.parsed_args.uprn, - skip_get_url=self.parsed_args.skip_get_url, - web_driver=self.parsed_args.web_driver, - headless=self.parsed_args.headless, - local_browser=self.parsed_args.local_browser, - dev_mode=self.parsed_args.dev_mode, - council_module_str=self.parsed_args.module, - ) - - def client_code(self, get_bin_data_class, address_url, **kwargs): - """ - Call the template method to execute the algorithm. Client code does not need - to know the concrete class of an object it works with, as long as it works with - objects through the interface of their base class. - """ - return get_bin_data_class.template_method(address_url, **kwargs) - - -def run(): - """Set up logging and run the application.""" - global _LOGGER - _LOGGER = setup_logging(LOGGING_CONFIG, None) - app = UKBinCollectionApp() - app.set_args(sys.argv[1:]) - print(app.run()) - - -if __name__ == "__main__": - run() diff --git a/uk_bin_collection/uk_bin_collection/common.py b/uk_bin_collection/uk_bin_collection/common.py deleted file mode 100644 index 024ae9655a..0000000000 --- a/uk_bin_collection/uk_bin_collection/common.py +++ /dev/null @@ -1,355 +0,0 @@ -import calendar -import json -import os -import re -from datetime import datetime, timedelta -from enum import Enum - -import holidays -import pandas as pd -import requests -from dateutil.parser import parse -from selenium import webdriver -from selenium.webdriver.chrome.service import Service as ChromeService -from urllib3.exceptions import MaxRetryError -from webdriver_manager.chrome import ChromeDriverManager - -date_format = "%d/%m/%Y" -days_of_week = { - "Monday": 0, - "Tuesday": 1, - "Wednesday": 2, - "Thursday": 3, - "Friday": 4, - "Saturday": 5, - "Sunday": 6, -} - - -class Region(Enum): - ENG = 1 - NIR = 2 - SCT = 3 - WLS = 4 - - -def check_postcode(postcode: str): - """ - Checks a postcode exists and validates UK formatting against a RegEx string - :param postcode: Postcode to parse - """ - postcode_api_url = "https://api.postcodes.io/postcodes/" - postcode_api_response = requests.get(f"{postcode_api_url}{postcode}") - - if postcode_api_response.status_code != 200: - val_error = json.loads(postcode_api_response.text) - raise ValueError( - f"Exception: {val_error['error']} Status: {val_error['status']}" - ) - return True - - -def check_paon(paon: str): - """ - Checks that PAON data exists - :param paon: PAON data to check, usually house number - """ - try: - if paon is None: - raise ValueError("Invalid house number") - return True - except Exception as ex: - print(f"Exception encountered: {ex}") - print("Please check the provided house number.") - exit(1) - - -def check_uprn(uprn: str): - """ - Checks that the UPRN exists - :param uprn: UPRN to check - """ - try: - if uprn is None or uprn == "": - raise ValueError("Invalid UPRN") - return True - except Exception as ex: - print(f"Exception encountered: {ex}") - print("Please check the provided UPRN.") - - -def check_usrn(usrn: str): - """ - Checks that the USRN exists - :param uprn: USRN to check - """ - try: - if usrn is None or usrn == "": - raise ValueError("Invalid USRN") - return True - except Exception as ex: - print(f"Exception encountered: {ex}") - print("Please check the provided USRN.") - - -def get_date_with_ordinal(date_number: int) -> str: - """ - Return ordinal text on day of date - :rtype: str - :param date_number: Date number as an integer (e.g. 4) - :return: Return date with ordinal suffix (e.g. 4th) - """ - return str(date_number) + ( - "th" - if 4 <= date_number % 100 <= 20 - else {1: "st", 2: "nd", 3: "rd"}.get(date_number % 10, "th") - ) - - -def has_numbers(inputString: str) -> bool: - """ - - :rtype: bool - :param inputString: String to check for numbers - :return: True if any numbers are found in input string - """ - return any(char.isdigit() for char in inputString) - - -def remove_ordinal_indicator_from_date_string(date_string: str) -> str: - """ - Remove the ordinal indicator from a written date as a string. - E.g. June 12th 2022 -> June 12 2022 - :rtype: str - """ - ord_day_pattern = re.compile(r"(?<=\d)(st|nd|rd|th)") - return re.compile(ord_day_pattern).sub("", date_string) - - -def parse_header(raw_header: str) -> dict: - """ - Parses a header string and returns one that can be useful - :rtype: dict - :param raw_header: header as a string, with values to separate as pipe (|) - :return: header in a dictionary format that can be used in requests - """ - header = dict() - for line in raw_header.split("|"): - if line.startswith(":"): - a, b = line[1:].split(":", 1) - a = f":{a}" - else: - a, b = line.split(":", 1) - - header[a.strip()] = b.strip() - - return header - - -def is_holiday(date_to_check: datetime, region: Region = Region.ENG) -> bool: - """ - Checks if a given date is a public holiday - :param date_to_check: Date to check if holiday - :param region: The UK nation to check. Defaults to ENG. - :return: Bool - true if a holiday, false if not - """ - uk_holidays = holidays.country_holidays("GB", subdiv=region.name) - - if date_to_check in uk_holidays: - return True - else: - return False - - -def is_weekend(date_to_check: datetime) -> bool: - """ - Checks if a given date is a weekend - :param date_to_check: Date to check if it falls on a weekend - :return: Bool - true if a weekend day, false if not - """ - return True if date_to_check.date().weekday() >= 5 else False - - -def is_working_day(date_to_check: datetime, region: Region = Region.ENG) -> bool: - """ - Wraps is_holiday() and is_weekend() into one function - :param date_to_check: Date to check if holiday - :param region: The UK nation to check. Defaults to ENG. - :return: Bool - true if a working day (non-holiday, Mon-Fri). - """ - return False if is_holiday(date_to_check, region) or is_weekend(date_to_check) else True - - -def get_next_working_day(date: datetime, region: Region = Region.ENG) -> datetime: - while not is_working_day(date, region): - date += timedelta(days=1) - return date - - -def get_weekday_dates_in_period(start: datetime, day_of_week: int, amount=8) -> list: - """ - Returns a list of dates of a given weekday from a start date for the given amount of weeks - :param start: Start date - :param day_of_week: Day of week number. Recommended to use calendar.DAY (Monday=0, Sunday=6) - :param amount: Number of weeks to get dates. Defaults to 8 weeks. - :return: List of dates where the specified weekday is in the period - """ - return ( - pd.date_range( - start=start, freq=f"W-{calendar.day_abbr[day_of_week]}", periods=amount - ) - .strftime(date_format) - .tolist() - ) - - -def get_dates_every_x_days(start: datetime, step: int, amount: int = 8) -> list: - """ - Returns a list of dates for `X` days from start date. For example, calling `get_stepped_dates_in_period(s, 21, 4)` would - return `4` dates every `21` days from the start date `s` - :param start: Date to start from - :param step: X amount of days - :param amount: Number of dates to find - :return: List of dates every X days from start date - :rtype: list - """ - return ( - pd.date_range(start=start, freq=f"{step}D", periods=amount) - .strftime(date_format) - .tolist() - ) - - -def get_next_occurrence_from_day_month(date: datetime) -> datetime: - current_date = datetime.now() - # Get the current day and month as integers - current_day = current_date.day - current_month = current_date.month - - # Extract the target day and month from the input date - target_day = date.day - target_month = date.month - - # Check if the target date has already occurred this year - if (target_month < current_month) or ( - target_month == current_month and target_day < current_day - ): - date = pd.to_datetime(date) + pd.DateOffset(years=1) - - return date - - -def remove_alpha_characters(input_string: str) -> str: - return "".join(c for c in input_string if c.isdigit() or c == " ") - - -def update_input_json(council: str, url: str, input_file_path: str, **kwargs): - """ - Create or update a council's entry in the input.json file. - - :param council: Name of the council. - :param url: URL associated with the council. - :param input_file_path: Path to the input JSON file. - :param kwargs: Additional parameters to store (postcode, paon, uprn, usrn, web_driver, skip_get_url). - """ - try: - data = load_data(input_file_path) - council_data = data.get(council, {"wiki_name": council}) - council_data.update({"url": url, **kwargs}) - data[council] = council_data - - save_data(input_file_path, data) - except IOError as e: - print(f"Error updating the JSON file: {e}") - except json.JSONDecodeError: - print("Failed to decode JSON, check the integrity of the input file.") - - -def load_data(file_path): - if os.path.exists(file_path): - with open(file_path, "r") as file: - return json.load(file) - return {} - - -def save_data(file_path, data): - with open(file_path, "w") as file: - json.dump(data, file, sort_keys=True, indent=4) - - -def get_next_day_of_week(day_name, date_format="%d/%m/%Y"): - days_of_week = [ - "Monday", - "Tuesday", - "Wednesday", - "Thursday", - "Friday", - "Saturday", - "Sunday", - ] - today = datetime.now() - today_idx = today.weekday() # Monday is 0 and Sunday is 6 - target_idx = days_of_week.index(day_name) - - days_until_target = (target_idx - today_idx) % 7 - if days_until_target == 0: - days_until_target = 7 # Ensure it's the next instance of the day, not today if today is that day - - next_day = today + timedelta(days=days_until_target) - return next_day.strftime(date_format) - - -def contains_date(string, fuzzy=False) -> bool: - """ - Return whether the string can be interpreted as a date. - - :param string: str, string to check for date - :param fuzzy: bool, ignore unknown tokens in string if True - """ - try: - parse(string, fuzzy=fuzzy) - return True - - except ValueError: - return False - - -def create_webdriver( - web_driver: str = None, - headless: bool = True, - user_agent: str = None, - session_name: str = None, -) -> webdriver.Chrome: - """ - Create and return a Chrome WebDriver configured for optional headless operation. - - :param web_driver: URL to the Selenium server for remote web drivers. If None, a local driver is created. - :param headless: Whether to run the browser in headless mode. - :param user_agent: Optional custom user agent string. - :param session_name: Optional custom session name string. - :return: An instance of a Chrome WebDriver. - :raises WebDriverException: If the WebDriver cannot be created. - """ - options = webdriver.ChromeOptions() - if headless: - options.add_argument("--headless") - options.add_argument("--no-sandbox") - options.add_argument("--disable-gpu") - options.add_argument("--start-maximized") - options.add_argument("--disable-dev-shm-usage") - if user_agent: - options.add_argument(f"--user-agent={user_agent}") - options.add_experimental_option("excludeSwitches", ["enable-logging"]) - if session_name and web_driver: - options.set_capability("se:name", session_name) - - try: - if web_driver: - return webdriver.Remote(command_executor=web_driver, options=options) - else: - return webdriver.Chrome( - service=ChromeService(ChromeDriverManager().install()), options=options - ) - except MaxRetryError as e: - print(f"Failed to create WebDriver: {e}") - raise diff --git a/uk_bin_collection/uk_bin_collection/councils/AberdeenCityCouncil.py b/uk_bin_collection/uk_bin_collection/councils/AberdeenCityCouncil.py deleted file mode 100644 index 00ed29fe59..0000000000 --- a/uk_bin_collection/uk_bin_collection/councils/AberdeenCityCouncil.py +++ /dev/null @@ -1,121 +0,0 @@ -import time - -import requests - -from uk_bin_collection.uk_bin_collection.common import * -from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass - -# import the wonderful Beautiful Soup and the URL grabber -class CouncilClass(AbstractGetBinDataClass): - """ - Concrete classes have to implement all abstract operations of the - base class. They can also override some operations with a default - implementation. - """ - - def parse_data(self, page: str, **kwargs) -> dict: - - user_uprn = kwargs.get("uprn") - check_uprn(user_uprn) - bindata = {"bins": []} - - SESSION_URL = "https://integration.aberdeencity.gov.uk/authapi/isauthenticated?uri=https%253A%252F%252Fintegration.aberdeencity.gov.uk%252Fservice%252Fbin_collection_calendar___view&hostname=integration.aberdeencity.gov.uk&withCredentials=true" - - API_URL = "https://integration.aberdeencity.gov.uk/apibroker/runLookup" - - headers = { - "Content-Type": "application/json", - "Accept": "application/json", - "User-Agent": "Mozilla/5.0", - "X-Requested-With": "XMLHttpRequest", - "Referer": "https://integration.aberdeencity.gov.uk/fillform/?iframe_id=fillform-frame-1&db_id=", - } - s = requests.session() - r = s.get(SESSION_URL) - r.raise_for_status() - session_data = r.json() - sid = session_data["auth-session"] - params = { - "id": "583c08ffc47fe", - "repeat_against": "", - "noRetry": "true", - "getOnlyTokens": "undefined", - "log_id": "", - "app_name": "AF-Renderer::Self", - # unix_timestamp - "_": str(int(time.time() * 1000)), - "sid": sid, - } - - r = s.post(API_URL, headers=headers, params=params) - r.raise_for_status() - - data = r.json() - rows_data = data["integration"]["transformed"]["rows_data"]["0"] - if not isinstance(rows_data, dict): - raise ValueError("Invalid data returned from API") - token = rows_data["token"] - - data = { - "formValues": { - "Section 1": { - "nauprn": { - "value": user_uprn, - }, - "token": { - "value": token, - }, - "mindate": { - "value": datetime.now().strftime("%Y-%m-%d"), - }, - "maxdate": { - "value": (datetime.now() + timedelta(days=30)).strftime( - "%Y-%m-%d" - ), - }, - }, - }, - } - - params = { - "id": "5a3141caf4016", - "repeat_against": "", - "noRetry": "true", - "getOnlyTokens": "undefined", - "log_id": "", - "app_name": "AF-Renderer::Self", - # unix_timestamp - "_": str(int(time.time() * 1000)), - "sid": sid, - } - - r = s.post(API_URL, json=data, headers=headers, params=params) - r.raise_for_status() - - data = r.json() - rows_data = data["integration"]["transformed"]["rows_data"]["0"] - if not isinstance(rows_data, dict): - raise ValueError("Invalid data returned from API") - - date_pattern = re.compile(r"^(.*?)(Date\d+)$") - count_pattern = re.compile(r"^Count(.*)$") - for key, value in rows_data.items(): - date_match = date_pattern.match(key) - # Match count keys - count_match = count_pattern.match(key) - if count_match: - continue - - # Match date keys - date_match = date_pattern.match(key) - if date_match: - bin_type = date_match.group(1) - dict_data = { - "type": bin_type, - "collectionDate": datetime.strptime(value, "%A %d %B %Y").strftime( - date_format - ), - } - bindata["bins"].append(dict_data) - - return bindata diff --git a/uk_bin_collection/uk_bin_collection/councils/AberdeenshireCouncil.py b/uk_bin_collection/uk_bin_collection/councils/AberdeenshireCouncil.py deleted file mode 100644 index a797c2ce05..0000000000 --- a/uk_bin_collection/uk_bin_collection/councils/AberdeenshireCouncil.py +++ /dev/null @@ -1,52 +0,0 @@ -import requests -from bs4 import BeautifulSoup - -from uk_bin_collection.uk_bin_collection.common import * -from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass - - -# import the wonderful Beautiful Soup and the URL grabber -class CouncilClass(AbstractGetBinDataClass): - """ - Concrete classes have to implement all abstract operations of the - base class. They can also override some operations with a default - implementation. - """ - - def parse_data(self, page: str, **kwargs) -> dict: - - user_uprn = kwargs.get("uprn") - check_uprn(user_uprn) - bindata = {"bins": []} - - URI = f"https://online.aberdeenshire.gov.uk/Apps/Waste-Collections/Routes/Route/{user_uprn}" - - # Make the GET request - response = requests.get(URI) - - soup = BeautifulSoup(response.content, features="html.parser") - soup.prettify() - - for collection in soup.find("table").find("tbody").find_all("tr"): - th = collection.find("th") - if th: - continue - td = collection.find_all("td") - collection_date = datetime.strptime( - td[0].text, - "%d/%m/%Y %A", - ) - bin_type = td[1].text.split(" and ") - - for bin in bin_type: - dict_data = { - "type": bin, - "collectionDate": collection_date.strftime(date_format), - } - bindata["bins"].append(dict_data) - - bindata["bins"].sort( - key=lambda x: datetime.strptime(x.get("collectionDate"), "%d/%m/%Y") - ) - - return bindata diff --git a/uk_bin_collection/uk_bin_collection/councils/AdurAndWorthingCouncils.py b/uk_bin_collection/uk_bin_collection/councils/AdurAndWorthingCouncils.py deleted file mode 100644 index 2b02767ade..0000000000 --- a/uk_bin_collection/uk_bin_collection/councils/AdurAndWorthingCouncils.py +++ /dev/null @@ -1,43 +0,0 @@ -import bs4.element -from bs4 import BeautifulSoup -from uk_bin_collection.uk_bin_collection.common import * -from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass - - -# import the wonderful Beautiful Soup and the URL grabber -class CouncilClass(AbstractGetBinDataClass): - """ - Concrete classes have to implement all abstract operations of the - base class. They can also override some operations with a default - implementation. - """ - - def parse_data(self, page: str, **kwargs) -> dict: - # Make a BS4 object - soup = BeautifulSoup(page.text, features="html.parser") - soup.prettify() - - data = {"bins": []} - collections = [] - - for bin in ( - soup.find("table", {"class": "no-style bin-days"}) - .find("tbody") - .find_all("tr") - ): - bin_type = bin.find("th").get_text().strip() + " bin" - bin_dates = bin.find_all("td")[1].contents - for date in bin_dates: - if type(date) == bs4.element.NavigableString: - bin_date = datetime.strptime(date, "%A %d %b %Y") - collections.append((bin_type, bin_date)) - - ordered_data = sorted(collections, key=lambda x: x[1]) - for item in ordered_data: - dict_data = { - "type": item[0].capitalize(), - "collectionDate": item[1].strftime(date_format), - } - data["bins"].append(dict_data) - - return data diff --git a/uk_bin_collection/uk_bin_collection/councils/AntrimAndNewtonabbeyCouncil.py b/uk_bin_collection/uk_bin_collection/councils/AntrimAndNewtonabbeyCouncil.py deleted file mode 100644 index c4ff9ad28d..0000000000 --- a/uk_bin_collection/uk_bin_collection/councils/AntrimAndNewtonabbeyCouncil.py +++ /dev/null @@ -1,53 +0,0 @@ -from bs4 import BeautifulSoup - -from uk_bin_collection.uk_bin_collection.common import * -from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass - - -# import the wonderful Beautiful Soup and the URL grabber -class CouncilClass(AbstractGetBinDataClass): - """ - Concrete classes have to implement all abstract operations of the - base class. They can also override some operations with a default - implementation. - """ - - def parse_data(self, page: str, **kwargs) -> dict: - - bindata = {"bins": []} - - soup = BeautifulSoup(page.content, "html.parser") - soup.prettify - - collection_divs = soup.select("div.feature-box.bins") - if not collection_divs: - raise Exception("No collections found") - - for collection_div in collection_divs: - date_p = collection_div.select_one("p.date") - if not date_p: - continue - - # Thu 22 Aug, 2024 - date_ = datetime.strptime(date_p.text.strip(), "%a %d %b, %Y").strftime( - "%d/%m/%Y" - ) - bins = collection_div.select("li") - if not bins: - continue - for bin in bins: - if not bin.text.strip(): - continue - bin_type = bin.text.strip() - - dict_data = { - "type": bin_type, - "collectionDate": date_, - } - bindata["bins"].append(dict_data) - - bindata["bins"].sort( - key=lambda x: datetime.strptime(x.get("collectionDate"), "%d/%m/%Y") - ) - - return bindata diff --git a/uk_bin_collection/uk_bin_collection/councils/ArdsAndNorthDownCouncil.py b/uk_bin_collection/uk_bin_collection/councils/ArdsAndNorthDownCouncil.py deleted file mode 100644 index 19f4ea0652..0000000000 --- a/uk_bin_collection/uk_bin_collection/councils/ArdsAndNorthDownCouncil.py +++ /dev/null @@ -1,103 +0,0 @@ -from datetime import datetime - -import requests -from bs4 import BeautifulSoup - -from uk_bin_collection.uk_bin_collection.common import * -from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass - - -class CouncilClass(AbstractGetBinDataClass): - """ - Concrete classes have to implement all abstract operations of the - base class. They can also override some operations with a default - implementation. - """ - - def parse_data(self, page: str, **kwargs) -> dict: - # Get and check UPRN - user_uprn = kwargs.get("uprn") - check_uprn(user_uprn) - bindata = {"bins": []} - - API_URL = ( - "https://collections-ardsandnorthdown.azurewebsites.net/WSCollExternal.asmx" - ) - - # council seems to always be ARD no matter what the old council was - PAYLOAD = f""" - - - - ARD - {user_uprn} - Chtml - - - - """ - - r = requests.post( - API_URL, - data=PAYLOAD, - headers={"Content-Type": "text/xml; charset=utf-8"}, - ) - r.raise_for_status() - - # html unescape text - text = ( - (r.text.replace("<", "<").replace(">", ">").replace("&", "&")) - .split("")[-1] - .split("")[0] - ) - - soup = BeautifulSoup(text, "html.parser") - - # Initialize dictionary to store bin dates - bin_schedule = {} - - # Define regex pattern to capture day and date (e.g., Tue 5 Nov) - date_pattern = re.compile(r"\b\w{3} \d{1,2} \w{3}\b") - - current_year = datetime.now().year - - # Find each bin collection line, parse date, and add to dictionary - for bin_info in soup.find_all("b"): - bin_type = bin_info.text.strip() - bin_details = bin_info.next_sibling.strip() if bin_info.next_sibling else "" - # Check for "Today" or "Tomorrow" - if "Today" in bin_details: - collection_date = datetime.now().strftime("%a %d %b") - bin_schedule[bin_type] = collection_date - elif "Tomorrow" in bin_details: - collection_date = (datetime.now() + timedelta(days=1)).strftime( - "%a %d %b" - ) - bin_schedule[bin_type] = collection_date - else: - # Extract date if it's a full date format - date_match = date_pattern.search(bin_details) - if date_match: - bin_schedule[bin_type] = date_match.group() - - # Display the parsed schedule with dates only - for bin_type, collection_date in bin_schedule.items(): - date = datetime.strptime(collection_date, "%a %d %b") - - if date.month == 1 and datetime.now().month > 1: - date = date.replace(year=current_year + 1) - else: - date = date.replace(year=current_year) - - dict_data = { - "type": bin_type, - "collectionDate": date.strftime("%d/%m/%Y"), - } - bindata["bins"].append(dict_data) - - bindata["bins"].sort( - key=lambda x: datetime.strptime(x.get("collectionDate"), "%d/%m/%Y") - ) - return bindata diff --git a/uk_bin_collection/uk_bin_collection/councils/ArgyllandButeCouncil.py b/uk_bin_collection/uk_bin_collection/councils/ArgyllandButeCouncil.py deleted file mode 100644 index 01b46ef310..0000000000 --- a/uk_bin_collection/uk_bin_collection/councils/ArgyllandButeCouncil.py +++ /dev/null @@ -1,67 +0,0 @@ -import time - -import requests -from bs4 import BeautifulSoup - -from uk_bin_collection.uk_bin_collection.common import * -from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass - - -# import the wonderful Beautiful Soup and the URL grabber -class CouncilClass(AbstractGetBinDataClass): - """ - Concrete classes have to implement all abstract operations of the - base class. They can also override some operations with a default - implementation. - """ - - def parse_data(self, page: str, **kwargs) -> dict: - - user_uprn = kwargs.get("uprn") - check_uprn(user_uprn) - user_uprn = user_uprn.zfill(12) - bindata = {"bins": []} - - URI = "https://www.argyll-bute.gov.uk/rubbish-and-recycling/household-waste/bin-collection" - - data = {"addressSelect": user_uprn} - - s = requests.session() - r = s.post(URI, data=data) - r.raise_for_status() - - soup = BeautifulSoup(r.content, features="html.parser") - soup.prettify() - - # Find the table and extract the rows with bin schedule information - table = soup.find("table", class_="table table-bordered") - rows = table.find_all("tr")[1:] # Skip the header row - - current_year = datetime.now().year - # Loop through each row and extract the bin type and collection date - for row in rows: - cells = row.find_all("td") - bin_type = cells[0].get_text(strip=True) - collection_date = cells[1].get_text(strip=True) - - collection_date = datetime.strptime( - collection_date, - "%A %d %B", - ) - - if collection_date.month == 1: - collection_date = collection_date.replace(year=current_year + 1) - else: - collection_date = collection_date.replace(year=current_year) - - dict_data = { - "type": bin_type, - "collectionDate": collection_date.strftime(date_format), - } - bindata["bins"].append(dict_data) - - bindata["bins"].sort( - key=lambda x: datetime.strptime(x.get("collectionDate"), date_format) - ) - - return bindata diff --git a/uk_bin_collection/uk_bin_collection/councils/ArmaghBanbridgeCraigavonCouncil.py b/uk_bin_collection/uk_bin_collection/councils/ArmaghBanbridgeCraigavonCouncil.py deleted file mode 100644 index c4711cedcf..0000000000 --- a/uk_bin_collection/uk_bin_collection/councils/ArmaghBanbridgeCraigavonCouncil.py +++ /dev/null @@ -1,72 +0,0 @@ -import requests -from bs4 import BeautifulSoup - -from uk_bin_collection.uk_bin_collection.common import * -from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass - - -# import the wonderful Beautiful Soup and the URL grabber -class CouncilClass(AbstractGetBinDataClass): - """ - Concrete classes have to implement all abstract operations of the - base class. They can also override some operations with a default - implementation. - """ - - def parse_data(self, page: str, **kwargs) -> dict: - - user_uprn = kwargs.get("uprn") - check_uprn(user_uprn) - bindata = {"bins": []} - - # Function to extract bin collection information - def extract_bin_schedule(soup, heading_class): - collections = [] - - # Find the relevant section based on the heading class - section_heading = soup.find("div", class_=heading_class) - if section_heading: - # Find all the bin collection dates in that section - collection_dates = section_heading.find_next( - "div", class_="col-sm-12 col-md-9" - ).find_all("h4") - for date in collection_dates: - # Clean and add the date to the list - collections.append(date.get_text(strip=True)) - - return collections - - # URL for bin collection schedule - url = f"https://www.armaghbanbridgecraigavon.gov.uk/resident/binday-result/?address={user_uprn}" - - # Send a GET request to fetch the page content - response = requests.get(url) - - # Check if the request was successful - if response.status_code == 200: - # Parse the page content using BeautifulSoup - soup = BeautifulSoup(response.text, "html.parser") - - # Extract bin collection schedules by their sections - domestic_collections = extract_bin_schedule(soup, "heading bg-black") - for collection in domestic_collections: - bindata["bins"].append( - {"collectionDate": collection, "type": "Domestic"} - ) - recycling_collections = extract_bin_schedule(soup, "heading bg-green") - for collection in recycling_collections: - bindata["bins"].append( - {"collectionDate": collection, "type": "Recycling"} - ) - garden_collections = extract_bin_schedule(soup, "heading bg-brown") - for collection in garden_collections: - bindata["bins"].append({"collectionDate": collection, "type": "Garden"}) - - else: - print(f"Failed to retrieve data. Status code: {response.status_code}") - - bindata["bins"].sort( - key=lambda x: datetime.strptime(x.get("collectionDate"), "%d/%m/%Y") - ) - - return bindata diff --git a/uk_bin_collection/uk_bin_collection/councils/ArunCouncil.py b/uk_bin_collection/uk_bin_collection/councils/ArunCouncil.py deleted file mode 100644 index eabc5fd06f..0000000000 --- a/uk_bin_collection/uk_bin_collection/councils/ArunCouncil.py +++ /dev/null @@ -1,98 +0,0 @@ -import time - -from bs4 import BeautifulSoup -from selenium import webdriver -from selenium.webdriver.common.by import By -from selenium.webdriver.support import expected_conditions as EC -from selenium.webdriver.support.ui import Select, WebDriverWait - -from uk_bin_collection.uk_bin_collection.common import * -from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass - - -# import the wonderful Beautiful Soup and the URL grabber -class CouncilClass(AbstractGetBinDataClass): - """ - Concrete classes have to implement all abstract operations of the - base class. They can also override some operations with a default - implementation. - """ - - def parse_data(self, page: str, **kwargs) -> dict: - try: - # Make a BS4 object - data = {"bins": []} - - user_paon = kwargs.get("paon") - user_postcode = kwargs.get("postcode") - headless = kwargs.get("headless") - web_driver = kwargs.get("web_driver") - driver = create_webdriver(web_driver, headless, None, __name__) - page = "https://www1.arun.gov.uk/when-are-my-bins-collected/" - check_paon(user_paon) - check_postcode(user_postcode) - driver.get(page) - - start_now_button = WebDriverWait(driver, timeout=15).until( - EC.presence_of_element_located((By.LINK_TEXT, "Start now")) - ) - start_now_button.click() - - # Wait for the postcode field to appear then populate it - input_element_postcode = WebDriverWait(driver, 30).until( - EC.presence_of_element_located((By.ID, "postcode")) - ) - input_element_postcode.send_keys(user_postcode) - - continue_button = WebDriverWait(driver, timeout=15).until( - EC.presence_of_element_located((By.CLASS_NAME, "govuk-button")) - ) - continue_button.click() - - address_selection_menu = Select(driver.find_element(By.ID, "address")) - for idx, addr_option in enumerate(address_selection_menu.options): - option_name = addr_option.text[0 : len(user_paon)] - if option_name == user_paon: - selected_address = addr_option - break - address_selection_menu.select_by_visible_text(selected_address.text) - - continue_button = WebDriverWait(driver, timeout=15).until( - EC.presence_of_element_located((By.CLASS_NAME, "govuk-button")) - ) - continue_button.click() - # Check for text saying "Next collection dates" - WebDriverWait(driver, 30).until( - EC.presence_of_element_located( - (By.XPATH, "//*[contains(text(), 'Next collection dates')]") - ) - ) - - soup = BeautifulSoup(driver.page_source, "html.parser") - soup.prettify() - table = soup.find("table", class_="govuk-table") - - for row in table.find("tbody").find_all("tr"): - # Extract the type of collection and the date of next collection - collection_type = ( - row.find("th", class_="govuk-table__header").text.strip().split(" ") - )[0] - collection_date = row.find( - "td", class_="govuk-table__cell" - ).text.strip() - - # Append the information to the data structure - data["bins"].append( - {"type": collection_type, "collectionDate": collection_date} - ) - - except Exception as e: - # Here you can log the exception if needed - print(f"An error occurred: {e}") - # Optionally, re-raise the exception if you want it to propagate - raise - finally: - # This block ensures that the driver is closed regardless of an exception. - if driver: - driver.quit() - return data diff --git a/uk_bin_collection/uk_bin_collection/councils/AshfieldDistrictCouncil.py b/uk_bin_collection/uk_bin_collection/councils/AshfieldDistrictCouncil.py deleted file mode 100644 index 22661d3156..0000000000 --- a/uk_bin_collection/uk_bin_collection/councils/AshfieldDistrictCouncil.py +++ /dev/null @@ -1,105 +0,0 @@ -import time -from datetime import datetime - -from bs4 import BeautifulSoup -from selenium import webdriver -from selenium.webdriver.common.by import By -from selenium.webdriver.support import expected_conditions as EC -from selenium.webdriver.support.wait import WebDriverWait - -from uk_bin_collection.uk_bin_collection.common import * -from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass - - -class CouncilClass(AbstractGetBinDataClass): - """ - Concrete classes have to implement all abstract operations of the - base class. They can also override some operations with a default - implementation. - """ - - def parse_data(self, page: str, **kwargs) -> dict: - # Get and check UPRN - user_postcode = kwargs.get("postcode") - user_paon = kwargs.get("paon") - check_paon(user_paon) - check_postcode(user_postcode) - web_driver = kwargs.get("web_driver") - headless = kwargs.get("headless") - bindata = {"bins": []} - - API_URL = "https://portal.digital.ashfield.gov.uk/w/webpage/raise-case?service=bin_calendar" - - # Create Selenium webdriver - driver = create_webdriver(web_driver, headless, None, __name__) - driver.get(API_URL) - - title = WebDriverWait(driver, 10).until( - EC.presence_of_element_located((By.ID, "sub_page_title")) - ) - - # Wait for the postcode field to appear then populate it - WebDriverWait(driver, 10).until( - EC.presence_of_element_located( - (By.CSS_SELECTOR, "input.relation_path_type_ahead_search") - ) - ) - - inputElement_postcode = WebDriverWait(driver, 10).until( - EC.presence_of_element_located( - (By.CSS_SELECTOR, "input.relation_path_type_ahead_search") - ) - ) - inputElement_postcode.clear() - inputElement_postcode.send_keys(user_postcode) - - # Wait for the 'Select your property' dropdown to appear and select the first result - dropdown = WebDriverWait(driver, 10).until( - EC.element_to_be_clickable( - ( - By.CLASS_NAME, - "result_list ", - ) - ) - ) - - address_element = ( - WebDriverWait(driver, 10) - .until( - EC.element_to_be_clickable( - (By.XPATH, f"//li[starts-with(@aria-label, '{user_paon}')]") - ) - ) - .click() - ) - - search_button = WebDriverWait(driver, 10).until( - EC.element_to_be_clickable( - (By.XPATH, "//input[@type='submit' and @value='Search']") - ) - ) - search_button.click() - - time.sleep(10) - - soup = BeautifulSoup(driver.page_source, features="html.parser") - soup.prettify() - - # Find the table by class name - table = soup.find("table", {"class": "table listing table-striped"}) - - # Iterate over each row in the tbody of the table - for row in table.find("tbody").find_all("tr"): - # Extract the service, day, and date for each row - service = row.find_all("td")[0].get_text(strip=True) - date = row.find_all("td")[2].get_text(strip=True) - - dict_data = { - "type": service, - "collectionDate": datetime.strptime(date, "%a, %d %b %Y").strftime( - date_format - ), - } - bindata["bins"].append(dict_data) - - return bindata diff --git a/uk_bin_collection/uk_bin_collection/councils/AshfordBoroughCouncil.py b/uk_bin_collection/uk_bin_collection/councils/AshfordBoroughCouncil.py deleted file mode 100644 index d265a48323..0000000000 --- a/uk_bin_collection/uk_bin_collection/councils/AshfordBoroughCouncil.py +++ /dev/null @@ -1,124 +0,0 @@ -from datetime import datetime - -import requests -from bs4 import BeautifulSoup -from selenium.webdriver.common.by import By -from selenium.webdriver.support import expected_conditions as EC -from selenium.webdriver.support.ui import Select -from selenium.webdriver.support.wait import WebDriverWait - -from uk_bin_collection.uk_bin_collection.common import * -from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass - - -class CouncilClass(AbstractGetBinDataClass): - """ - Concrete classes have to implement all abstract operations of the - base class. They can also override some operations with a default - implementation. - """ - - def parse_data(self, page: str, **kwargs) -> dict: - # Get and check UPRN - user_uprn = kwargs.get("uprn") - user_postcode = kwargs.get("postcode") - check_uprn(user_uprn) - check_postcode(user_postcode) - web_driver = kwargs.get("web_driver") - headless = kwargs.get("headless") - bindata = {"bins": []} - - API_URL = "https://secure.ashford.gov.uk/waste/collectiondaylookup/" - - # Create Selenium webdriver - driver = create_webdriver(web_driver, headless, None, __name__) - driver.get(API_URL) - - # Wait for the postcode field to appear then populate it - inputElement_postcode = WebDriverWait(driver, 30).until( - EC.presence_of_element_located( - (By.ID, "ContentPlaceHolder1_CollectionDayLookup2_TextBox_PostCode") - ) - ) - inputElement_postcode.send_keys(user_postcode) - - # Click search button - findAddress = WebDriverWait(driver, 10).until( - EC.presence_of_element_located( - ( - By.ID, - "ContentPlaceHolder1_CollectionDayLookup2_Button_PostCodeSearch", - ) - ) - ) - findAddress.click() - - # Wait for the 'Select your property' dropdown to appear and select the first result - dropdown = WebDriverWait(driver, 10).until( - EC.element_to_be_clickable( - ( - By.ID, - "ContentPlaceHolder1_CollectionDayLookup2_DropDownList_Addresses", - ) - ) - ) - - # Create a 'Select' for it, then select the first address in the list - # (Index 0 is "Make a selection from the list") - dropdownSelect = Select(dropdown) - dropdownSelect.select_by_value(str(user_uprn)) - - # Click search button - findAddress = WebDriverWait(driver, 10).until( - EC.presence_of_element_located( - (By.ID, "ContentPlaceHolder1_CollectionDayLookup2_Button_SelectAddress") - ) - ) - findAddress.click() - - h4_element = WebDriverWait(driver, 10).until( - EC.presence_of_element_located( - (By.XPATH, "//h4[contains(text(), 'Collection Dates')]") - ) - ) - - soup = BeautifulSoup(driver.page_source, features="html.parser") - - bin_tables = soup.find_all("table") - - for bin_table in bin_tables: - bin_text = bin_table.find("td", id=re.compile("CollectionDayLookup2_td_")) - if not bin_text: - continue - - bin_type_soup = bin_text.find("b") - - if not bin_type_soup: - continue - bin_type: str = bin_type_soup.text.strip().split(" (")[0] - - date_soup = bin_text.find( - "span", id=re.compile(r"CollectionDayLookup2_Label_\w*_Date") - ) - if not date_soup or ( - " " not in date_soup.text.strip() - and date_soup.text.strip().lower() != "today" - ): - continue - date_str: str = date_soup.text.strip() - try: - if date_soup.text.strip().lower() == "today": - date = datetime.now().date() - else: - date = datetime.strptime(date_str.split(" ")[1], "%d/%m/%Y").date() - - except ValueError: - continue - - dict_data = { - "type": bin_type, - "collectionDate": date.strftime("%d/%m/%Y"), - } - bindata["bins"].append(dict_data) - - return bindata diff --git a/uk_bin_collection/uk_bin_collection/councils/AylesburyValeCouncil.py b/uk_bin_collection/uk_bin_collection/councils/AylesburyValeCouncil.py deleted file mode 100644 index 6a3c56dba4..0000000000 --- a/uk_bin_collection/uk_bin_collection/councils/AylesburyValeCouncil.py +++ /dev/null @@ -1,69 +0,0 @@ -from bs4 import BeautifulSoup -from uk_bin_collection.uk_bin_collection.common import * -from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass - - -# import the wonderful Beautiful Soup and the URL grabber -class CouncilClass(AbstractGetBinDataClass): - """ - Concrete classes have to implement all abstract operations of the - base class. They can also override some operations with a default - implementation. - """ - - def parse_data(self, page: str, **kwargs) -> dict: - uprn = kwargs.get("uprn") - check_uprn(uprn) - - # Make SOAP Request - headers = { - "Content-Type": "text/xml; charset=UTF-8", - "SOAPAction": '"http://tempuri.org/GetCollections"', - } - - post_data = ( - '' - + uprn - + "" - ) - - response = requests.post( - "http://avdcbins.web-labs.co.uk/RefuseApi.asmx", - data=post_data, - headers=headers, - ) - - if response.status_code != 200: - raise ValueError("No collection data found for provided UPRN.") - - # Make a BS4 object - soup = BeautifulSoup(response.text, "xml") - soup.prettify() - - data = {"bins": []} - - all_collections = soup.find_all("BinCollection") - - for i in range(len(all_collections)): - collection_date = datetime.strptime( - all_collections[i].Date.get_text(), "%Y-%m-%dT%H:%M:%S" - ) - children = all_collections[i].find_all( - ["Refuse", "Recycling", "Garden", "Food"], string="true" - ) - - for collection in children: - dict_data = { - "type": collection.name, - "collectionDate": collection_date.strftime(date_format), - } - data["bins"].append(dict_data) - - data["bins"].sort( - key=lambda x: datetime.strptime(x.get("collectionDate"), date_format) - ) - return data diff --git a/uk_bin_collection/uk_bin_collection/councils/BCPCouncil.py b/uk_bin_collection/uk_bin_collection/councils/BCPCouncil.py deleted file mode 100644 index 9cb0e1da92..0000000000 --- a/uk_bin_collection/uk_bin_collection/councils/BCPCouncil.py +++ /dev/null @@ -1,51 +0,0 @@ -import json -from datetime import timedelta - -import requests -from bs4 import BeautifulSoup -from uk_bin_collection.uk_bin_collection.common import * -from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass - - -# import the wonderful Beautiful Soup and the URL grabber -class CouncilClass(AbstractGetBinDataClass): - """ - Concrete classes have to implement all abstract operations of the - base class. They can also override some operations with a default - implementation. - """ - - def parse_data(self, page: str, **kwargs) -> dict: - - user_uprn = kwargs.get("uprn") - check_uprn(user_uprn) - - api_url = f"https://online.bcpcouncil.gov.uk/bcp-apis/?api=BinDayLookup&uprn={user_uprn}" - - requests.packages.urllib3.disable_warnings() - response = requests.get(api_url) - json_data = json.loads(response.text) - data = {"bins": []} - collections = [] - - for bin in json_data: - bin_type = bin["BinType"] - next_date = datetime.strptime( - bin["Next"], "%m/%d/%Y %I:%M:%S %p" - ) + timedelta(hours=1) - subseq_date = datetime.strptime( - bin["Subsequent"], "%m/%d/%Y %I:%M:%S %p" - ) + timedelta(hours=1) - collections.append((bin_type, next_date)) - collections.append((bin_type, subseq_date)) - - ordered_data = sorted(collections, key=lambda x: x[1]) - data = {"bins": []} - for item in ordered_data: - dict_data = { - "type": item[0], - "collectionDate": item[1].strftime(date_format), - } - data["bins"].append(dict_data) - - return data diff --git a/uk_bin_collection/uk_bin_collection/councils/BaberghDistrictCouncil.py b/uk_bin_collection/uk_bin_collection/councils/BaberghDistrictCouncil.py deleted file mode 100644 index cd67829d44..0000000000 --- a/uk_bin_collection/uk_bin_collection/councils/BaberghDistrictCouncil.py +++ /dev/null @@ -1,196 +0,0 @@ -import re -import time - -import requests -from bs4 import BeautifulSoup -from selenium.webdriver.common.by import By -from selenium.webdriver.support import expected_conditions as EC -from selenium.webdriver.support.ui import Select -from selenium.webdriver.support.wait import WebDriverWait - -from uk_bin_collection.uk_bin_collection.common import * -from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass - - -# import the wonderful Beautiful Soup and the URL grabber -class CouncilClass(AbstractGetBinDataClass): - """ - Concrete classes have to implement all abstract operations of the - base class. They can also override some operations with a default - implementation. - """ - - def parse_data(self, page: str, **kwargs) -> dict: - - collection_day = kwargs.get("paon") - garden_collection_week = kwargs.get("postcode") - garden_collection_day = kwargs.get("uprn") - bindata = {"bins": []} - - days_of_week = [ - "Monday", - "Tuesday", - "Wednesday", - "Thursday", - "Friday", - "Saturday", - "Sunday", - ] - - garden_week = ["Week 1", "Week 2"] - - refusestartDate = datetime(2024, 11, 4) - recyclingstartDate = datetime(2024, 11, 11) - - offset_days = days_of_week.index(collection_day) - offset_days_garden = days_of_week.index(garden_collection_day) - if garden_collection_week: - garden_collection = garden_week.index(garden_collection_week) - - refuse_dates = get_dates_every_x_days(refusestartDate, 14, 28) - recycling_dates = get_dates_every_x_days(recyclingstartDate, 14, 28) - - bank_holidays = [ - ("25/12/2024", 2), - ("26/12/2024", 2), - ("27/12/2024", 3), - ("30/12/2024", 1), - ("31/12/2024", 2), - ("01/01/2025", 2), - ("02/01/2025", 2), - ("03/01/2025", 3), - ("06/01/2025", 1), - ("07/01/2025", 1), - ("08/01/2025", 1), - ("09/01/2025", 1), - ("10/01/2025", 1), - ("18/04/2025", 1), - ("21/04/2025", 1), - ("22/04/2025", 1), - ("23/04/2025", 1), - ("24/04/2025", 1), - ("25/04/2025", 1), - ("05/05/2025", 1), - ("06/05/2025", 1), - ("07/05/2025", 1), - ("08/05/2025", 1), - ("09/05/2025", 1), - ("26/05/2025", 1), - ("27/05/2025", 1), - ("28/05/2025", 1), - ("29/05/2025", 1), - ("30/05/2025", 1), - ("25/08/2025", 1), - ("26/08/2025", 1), - ("27/08/2025", 1), - ("28/08/2025", 1), - ("29/08/2025", 1), - ] - - for refuseDate in refuse_dates: - - collection_date = ( - datetime.strptime(refuseDate, "%d/%m/%Y") + timedelta(days=offset_days) - ).strftime("%d/%m/%Y") - - holiday_offset = next( - (value for date, value in bank_holidays if date == collection_date), 0 - ) - - if holiday_offset > 0: - collection_date = ( - datetime.strptime(collection_date, "%d/%m/%Y") - + timedelta(days=holiday_offset) - ).strftime("%d/%m/%Y") - - dict_data = { - "type": "Refuse Bin", - "collectionDate": collection_date, - } - bindata["bins"].append(dict_data) - - for recyclingDate in recycling_dates: - - collection_date = ( - datetime.strptime(recyclingDate, "%d/%m/%Y") - + timedelta(days=offset_days) - ).strftime("%d/%m/%Y") - - holiday_offset = next( - (value for date, value in bank_holidays if date == collection_date), 0 - ) - - if holiday_offset > 0: - collection_date = ( - datetime.strptime(collection_date, "%d/%m/%Y") - + timedelta(days=holiday_offset) - ).strftime("%d/%m/%Y") - - dict_data = { - "type": "Recycling Bin", - "collectionDate": collection_date, - } - bindata["bins"].append(dict_data) - - if garden_collection_week: - if garden_collection == 0: - gardenstartDate = datetime(2024, 11, 11) - elif garden_collection == 1: - gardenstartDate = datetime(2024, 11, 4) - - garden_dates = get_dates_every_x_days(gardenstartDate, 14, 28) - - garden_bank_holidays = [ - ("23/12/2024", 1), - ("24/12/2024", 1), - ("25/12/2024", 1), - ("26/12/2024", 1), - ("27/12/2024", 1), - ("30/12/2024", 1), - ("31/12/2024", 1), - ("01/01/2025", 1), - ("02/01/2025", 1), - ("03/01/2025", 1), - ] - - for gardenDate in garden_dates: - - collection_date = ( - datetime.strptime(gardenDate, "%d/%m/%Y") - + timedelta(days=offset_days_garden) - ).strftime("%d/%m/%Y") - - garden_holiday = next( - ( - value - for date, value in garden_bank_holidays - if date == collection_date - ), - 0, - ) - - if garden_holiday > 0: - continue - - holiday_offset = next( - (value for date, value in bank_holidays if date == collection_date), - 0, - ) - - if holiday_offset > 0: - collection_date = ( - datetime.strptime(collection_date, "%d/%m/%Y") - + timedelta(days=holiday_offset) - ).strftime("%d/%m/%Y") - - dict_data = { - "type": "Garden Bin", - "collectionDate": collection_date, - } - bindata["bins"].append(dict_data) - - bindata["bins"].sort( - key=lambda x: datetime.strptime(x.get("collectionDate"), "%d/%m/%Y") - ) - - return bindata diff --git a/uk_bin_collection/uk_bin_collection/councils/BarnetCouncil.py b/uk_bin_collection/uk_bin_collection/councils/BarnetCouncil.py deleted file mode 100644 index ae31628afe..0000000000 --- a/uk_bin_collection/uk_bin_collection/councils/BarnetCouncil.py +++ /dev/null @@ -1,220 +0,0 @@ -import time - -from bs4 import BeautifulSoup -from selenium.webdriver.common.by import By -from selenium.webdriver.support import expected_conditions as EC -from selenium.webdriver.support.ui import Select, WebDriverWait - -from uk_bin_collection.uk_bin_collection.common import * -from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass - - -def get_seasonal_overrides(): - url = "https://www.barnet.gov.uk/recycling-and-waste/bin-collections/find-your-bin-collection-day" - response = requests.get(url) - if response.status_code == 200: - soup = BeautifulSoup(response.text, "html.parser") - body_div = soup.find("div", class_="field--name-body") - ul_element = body_div.find("ul") - if ul_element: - li_elements = ul_element.find_all("li") - overrides_dict = {} - for li_element in li_elements: - li_text = li_element.text.strip() - li_text = re.sub(r"\([^)]*\)", "", li_text).strip() - if "Collections for" in li_text and "will be revised to" in li_text: - parts = li_text.split("will be revised to") - original_date = ( - parts[0] - .replace("Collections for", "") - .replace("\xa0", " ") - .strip() - ) - revised_date = parts[1].strip() - - # Extract day and month - date_parts = original_date.split()[1:] - if len(date_parts) == 2: - day, month = date_parts - # Ensure original_date has leading zeros for single-digit days - day = day.zfill(2) - original_date = f"{original_date.split()[0]} {day} {month}" - - # Store the information in the dictionary - overrides_dict[original_date] = revised_date - return overrides_dict - else: - print("UL element not found within the specified div.") - else: - print(f"Failed to retrieve the page. Status code: {response.status_code}") - - -# import the wonderful Beautiful Soup and the URL grabber -class CouncilClass(AbstractGetBinDataClass): - """ - Concrete classes have to implement all abstract operations of the - base class. They can also override some operations with a default - implementation. - """ - - def parse_data(self, page: str, **kwargs) -> dict: - driver = None - try: - user_postcode = kwargs.get("postcode") - if not user_postcode: - raise ValueError("No postcode provided.") - check_postcode(user_postcode) - - user_paon = kwargs.get("paon") - check_paon(user_paon) - headless = kwargs.get("headless") - web_driver = kwargs.get("web_driver") - driver = create_webdriver(web_driver, headless, None, __name__) - page = "https://www.barnet.gov.uk/recycling-and-waste/bin-collections/find-your-bin-collection-day" - - driver.get(page) - - wait = WebDriverWait(driver, 10) - accept_cookies_button = wait.until( - EC.element_to_be_clickable( - ( - By.XPATH, - "//button[contains(text(), 'Accept additional cookies')]", - ) - ) - ) - accept_cookies_button.click() - - # Wait for the element to be clickable - wait = WebDriverWait(driver, 10) - find_your_collection_button = wait.until( - EC.element_to_be_clickable( - (By.LINK_TEXT, "Find your household collection day") - ) - ) - - # Scroll to the element (in case something is blocking it) - driver.execute_script( - "arguments[0].scrollIntoView();", find_your_collection_button - ) - - # Click the element - find_your_collection_button.click() - - try: - accept_cookies = WebDriverWait(driver, timeout=10).until( - EC.presence_of_element_located((By.ID, "epdagree")) - ) - accept_cookies.click() - accept_cookies_submit = WebDriverWait(driver, timeout=10).until( - EC.presence_of_element_located((By.ID, "epdsubmit")) - ) - accept_cookies_submit.click() - except: - print( - "Accept cookies banner not found or clickable within the specified time." - ) - pass - - postcode_input = WebDriverWait(driver, 10).until( - EC.presence_of_element_located( - (By.CSS_SELECTOR, '[aria-label="Postcode"]') - ) - ) - - postcode_input.send_keys(user_postcode) - - find_address_button = WebDriverWait(driver, 30).until( - EC.element_to_be_clickable((By.CSS_SELECTOR, '[value="Find address"]')) - ) - driver.execute_script("arguments[0].scrollIntoView();", find_address_button) - driver.execute_script("arguments[0].click();", find_address_button) - # find_address_button.click() - - time.sleep(15) - # Wait for address box to be visible - select_address_input = WebDriverWait(driver, 10).until( - EC.presence_of_element_located( - ( - By.ID, - "MainContent_CUSTOM_FIELD_808562d4b07f437ea751317cabd19d9eeaf8742f49cb4f7fa9bef99405b859f2", - ) - ) - ) - - # Select address based - select = Select(select_address_input) - addr_label = f"{user_postcode}, {user_paon}," - for addr_option in select.options: - option_name = addr_option.accessible_name[0 : len(addr_label)] - if option_name == addr_label: - break - select.select_by_value(addr_option.text) - - time.sleep(10) - # Wait for the specified div to be present - target_div_id = "MainContent_CUSTOM_FIELD_808562d4b07f437ea751317cabd19d9ed93a174c32b14f839b65f6abc42d8108_div" - target_div = WebDriverWait(driver, 10).until( - EC.presence_of_element_located((By.ID, target_div_id)) - ) - - time.sleep(5) - soup = BeautifulSoup(driver.page_source, "html.parser") - - # Find the div with the specified id - target_div = soup.find("div", {"id": target_div_id}) - - # Handle the additional table of info for xmas - try: - overrides_dict = get_seasonal_overrides() - except Exception as e: - overrides_dict = {} - - # Check if the div is found - if target_div: - bin_data = {"bins": []} - - for bin_div in target_div.find_all( - "div", - {"style": re.compile("background-color:.*; padding-left: 4px;")}, - ): - bin_type = bin_div.find("strong").text.strip() - collection_date_string = ( - re.search(r"Next collection date:\s+(.*)", bin_div.text) - .group(1) - .strip() - .replace(",", "") - ) - if collection_date_string in overrides_dict: - # Replace with the revised date from overrides_dict - collection_date_string = overrides_dict[collection_date_string] - - current_date = datetime.now() - parsed_date = datetime.strptime( - collection_date_string + f" {current_date.year}", "%A %d %B %Y" - ) - # Check if the parsed date is in the past and not today - if parsed_date.date() < current_date.date(): - # If so, set the year to the next year - parsed_date = parsed_date.replace(year=current_date.year + 1) - else: - # If not, set the year to the current year - parsed_date = parsed_date.replace(year=current_date.year) - formatted_date = parsed_date.strftime("%d/%m/%Y") - - contains_date(formatted_date) - bin_info = {"type": bin_type, "collectionDate": formatted_date} - bin_data["bins"].append(bin_info) - else: - raise ValueError("Collection data not found.") - - except Exception as e: - # Here you can log the exception if needed - print(f"An error occurred: {e}") - # Optionally, re-raise the exception if you want it to propagate - raise - finally: - # This block ensures that the driver is closed regardless of an exception - if driver: - driver.quit() - return bin_data diff --git a/uk_bin_collection/uk_bin_collection/councils/BarnsleyMBCouncil.py b/uk_bin_collection/uk_bin_collection/councils/BarnsleyMBCouncil.py deleted file mode 100644 index 4026ac7be5..0000000000 --- a/uk_bin_collection/uk_bin_collection/councils/BarnsleyMBCouncil.py +++ /dev/null @@ -1,126 +0,0 @@ -from typing import Dict, List, Any -from bs4 import BeautifulSoup -from dateutil.relativedelta import relativedelta -import requests -from datetime import datetime -from uk_bin_collection.uk_bin_collection.common import ( - check_postcode, - check_uprn, - date_format, -) -from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass - - -def parse_bin_text(bin_type_str: str, bin_date_str: str) -> List[Dict[str, str]]: - """ - Takes a raw bin and date string, parses the bin(s) and date, then returns - a list of bins with their date. - """ - - bins = [] - - if bin_date_str == "Today": - bin_date = datetime.today() - elif bin_date_str == "Tomorrow": - bin_date = datetime.today() + relativedelta(days=1) - else: - bin_date = datetime.strptime(bin_date_str, "%A, %B %d, %Y") - - for bin_type in bin_type_str.split(", "): - bins.append( - { - "type": bin_type.strip() + " bin", - "collectionDate": bin_date.strftime(date_format), - } - ) - - return bins - -class CouncilClass(AbstractGetBinDataClass): - """ - Concrete classes have to implement all abstract operations of the - base class. They can also override some operations with a default - implementation. - """ - - def parse_data(self, page: str, **kwargs: Any) -> Dict[str, Any]: - data: Dict[str, Any] = {"bins": []} - - # Get UPRN and postcode from kwargs - user_uprn = str(kwargs.get("uprn")) - user_postcode = str(kwargs.get("postcode")) - check_postcode(user_postcode) - check_uprn(user_uprn) - - # Pass in form data and make the POST request - headers = { - "authority": "waste.barnsley.gov.uk", - "accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.7", - "accept-language": "en-GB,en;q=0.9", - "cache-control": "no-cache", - "content-type": "application/x-www-form-urlencoded", - "origin": "https://waste.barnsley.gov.uk", - "pragma": "no-cache", - "referer": "https://waste.barnsley.gov.uk/ViewCollection/SelectAddress", - "sec-ch-ua": '"Chromium";v="118", "Opera GX";v="104", "Not=A?Brand";v="99"', - "sec-ch-ua-mobile": "?0", - "sec-ch-ua-platform": '"Windows"', - "sec-fetch-dest": "document", - "sec-fetch-mode": "navigate", - "sec-fetch-site": "same-origin", - "sec-fetch-user": "?1", - "upgrade-insecure-requests": "1", - "user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/118.0.5993.118 Safari/537.36", - } - form_data = { - "personInfo.person1.HouseNumberOrName": "", - "personInfo.person1.Postcode": f"{user_postcode}", - "personInfo.person1.UPRN": f"{user_uprn}", - "person1_SelectAddress": "Select address", - } - response = requests.post( - "https://waste.barnsley.gov.uk/ViewCollection/SelectAddress", - headers=headers, - data=form_data, - ) - - if response.status_code != 200: - raise ConnectionRefusedError( - "Error getting results from website! Please open an issue on GitHub!" - ) - - soup = BeautifulSoup(response.text, features="html.parser") - - results = soup.find_all("fieldset") - - # Next collection details - highlight_content = results[0].find("div", {"class": "highlight-content"}) - bin_date_str = highlight_content.find( - "em", {"class": "ui-bin-next-date"} - ).text.strip() - bin_type_str = highlight_content.find( - "p", {"class": "ui-bin-next-type"} - ).text.strip() - - data["bins"].extend(parse_bin_text(bin_type_str, bin_date_str)) - - # Hold bins we already got from next collection, to avoid re-adding - # from upcoming collections. - used_bins = set(bin["type"] for bin in data["bins"]) - - # Upcoming collections - upcoming_collections = results[1].find("tbody").find_all("tr") - for row in upcoming_collections: - columns = row.find_all("td") - bin_date_str = columns[0].text.strip() - bin_type_str = columns[1].text.strip() - - # Only add to bin list if not already present. - for bin in parse_bin_text(bin_type_str, bin_date_str): - if bin["type"] not in used_bins: - data["bins"].append(bin) - - # Add to used bins, so future collections are not re-added. - used_bins.add(bin["type"]) - - return data diff --git a/uk_bin_collection/uk_bin_collection/councils/BasildonCouncil.py b/uk_bin_collection/uk_bin_collection/councils/BasildonCouncil.py deleted file mode 100644 index 64e3563468..0000000000 --- a/uk_bin_collection/uk_bin_collection/councils/BasildonCouncil.py +++ /dev/null @@ -1,89 +0,0 @@ -import requests -import json -from datetime import datetime -from uk_bin_collection.uk_bin_collection.common import ( - check_uprn, - date_format as DATE_FORMAT, -) -from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass - - -class CouncilClass(AbstractGetBinDataClass): - """ - Concrete class that implements the abstract bin data fetching and parsing logic. - """ - - def parse_data(self, page: str, **kwargs) -> dict: - url_base = ( - "https://basildonportal.azurewebsites.net/api/getPropertyRefuseInformation" - ) - - uprn = kwargs.get("uprn") - # Check the UPRN is valid - check_uprn(uprn) - - payload = {"uprn": uprn} - - headers = {"Content-Type": "application/json"} - - response = requests.post(url_base, data=json.dumps(payload), headers=headers) - - if response.status_code == 200: - data = response.json() - - # Initialize an empty list to store the bin collection details - bins = [] - - # Function to add collection details to bins list - def add_collection(service_name, collection_data): - bins.append( - { - "type": service_name, - "collectionDate": collection_data.get( - "current_collection_date" - ), - } - ) - - available_services = data.get("refuse", {}).get("available_services", {}) - - date_format = "%d-%m-%Y" # Define the desired date format - - for service_name, service_data in available_services.items(): - # Handle the different cases of service data - match service_data["container"]: - case "Green Wheelie Bin": - subscription_status = ( - service_data["subscription"]["active"] - if service_data.get("subscription") - else False - ) - type_descr = f"Green Wheelie Bin ({'Active' if subscription_status else 'Expired'})" - case "N/A": - type_descr = service_data.get("name", "Unknown Service") - case _: - type_descr = service_data.get("container", "Unknown Container") - - date_str = service_data.get("current_collection_date") - if date_str: # Ensure the date string exists - try: - # Parse and format the date string - date_obj = datetime.strptime(date_str, "%Y-%m-%d") - formatted_date = date_obj.strftime(DATE_FORMAT) - except ValueError: - formatted_date = "Invalid Date" - else: - formatted_date = "No Collection Date" - - bins.append( - { - "type": type_descr, # Use service name from the data - "collectionDate": formatted_date, - } - ) - - else: - print(f"Failed to fetch data. Status code: {response.status_code}") - return {} - - return {"bins": bins} diff --git a/uk_bin_collection/uk_bin_collection/councils/BasingstokeCouncil.py b/uk_bin_collection/uk_bin_collection/councils/BasingstokeCouncil.py deleted file mode 100644 index 3e345d5f75..0000000000 --- a/uk_bin_collection/uk_bin_collection/councils/BasingstokeCouncil.py +++ /dev/null @@ -1,72 +0,0 @@ -from bs4 import BeautifulSoup -from datetime import datetime -import requests -from uk_bin_collection.uk_bin_collection.common import * -from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass - -COLLECTION_KINDS = { - "waste": "rteelem_ctl03_pnlCollections_Refuse", - "recycling": "rteelem_ctl03_pnlCollections_Recycling", - "glass": "rteelem_ctl03_pnlCollections_Glass", - # Garden waste data is only returned if the property is subscribed to the Garden Waste service - "garden": "rteelem_ctl03_pnlCollections_GardenWaste", -} - - -class CouncilClass(AbstractGetBinDataClass): - def parse_data(self, page: str, **kwargs) -> dict: - requests.packages.urllib3.disable_warnings() - - user_uprn = kwargs.get("uprn") - check_uprn(user_uprn) - - cookies = { - "cookie_control_popup": "A", - "WhenAreMyBinsCollected": f"{user_uprn}", - } - - headers = { - "Accept": "*/*", - "Accept-Language": "en-GB,en;q=0.9", - "Referer": "https://www.basingstoke.gov.uk/", - "Sec-Fetch-Dest": "document", - "Sec-Fetch-Mode": "navigate", - "Sec-Fetch-Site": "cross-site", - "Sec-Fetch-User": "?1", - "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/116.0.5845.188 Safari/537.36", - } - - response = requests.get( - "https://www.basingstoke.gov.uk/bincollections", - cookies=cookies, - headers=headers, - verify=False, - ) - - if response.status_code != 200 or response.text == "0|error|500||": - raise SystemError( - "Error retrieving data! Please try again or raise an issue on GitHub!" - ) - - # Make a BS4 object - soup = BeautifulSoup(response.text, features="html.parser") - soup.prettify() - - bins = [] - - for collection_type, collection_class in COLLECTION_KINDS.items(): - for date in soup.select(f"div#{collection_class} li"): - date_pattern = r"\d{1,2}\s\w+\s\d{4}" # Regex pattern to extract date - match = re.search(date_pattern, date.get_text(strip=True)) - - if match: - extracted_date = match.group() - formatted_date = datetime.strptime( - extracted_date, "%d %B %Y" - ).strftime(date_format) - - bins.append( - {"type": collection_type, "collectionDate": formatted_date} - ) - - return {"bins": bins} diff --git a/uk_bin_collection/uk_bin_collection/councils/BathAndNorthEastSomersetCouncil.py b/uk_bin_collection/uk_bin_collection/councils/BathAndNorthEastSomersetCouncil.py deleted file mode 100644 index 8bc2b0474d..0000000000 --- a/uk_bin_collection/uk_bin_collection/councils/BathAndNorthEastSomersetCouncil.py +++ /dev/null @@ -1,100 +0,0 @@ -import json -import requests -from bs4 import BeautifulSoup -from uk_bin_collection.uk_bin_collection.common import * -from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass -import ssl -import urllib3 - - -class CustomHttpAdapter(requests.adapters.HTTPAdapter): - """Transport adapter" that allows us to use custom ssl_context.""" - - def __init__(self, ssl_context=None, **kwargs): - self.ssl_context = ssl_context - super().__init__(**kwargs) - - def init_poolmanager(self, connections, maxsize, block=False): - self.poolmanager = urllib3.poolmanager.PoolManager( - num_pools=connections, - maxsize=maxsize, - block=block, - ssl_context=self.ssl_context, - ) - - -class CouncilClass(AbstractGetBinDataClass): - """ - Concrete classes have to implement all abstract operations of the - base class. They can also override some operations with a default - implementation. - """ - - def parse_data(self, page: str, **kwargs) -> dict: - user_uprn = kwargs.get("uprn") - check_uprn(user_uprn) - - headers = { - "Accept": "application/json, text/javascript, */*; q=0.01", - "Accept-Language": "en-GB,en;q=0.9", - "Cache-Control": "no-cache", - "Connection": "keep-alive", - "Content-Type": "application/json; charset=utf-8", - "Pragma": "no-cache", - "Referer": "https://www.bathnes.gov.uk/webforms/waste/collectionday/", - "Sec-Fetch-Dest": "empty", - "Sec-Fetch-Mode": "cors", - "Sec-Fetch-Site": "same-origin", - "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/116.0.5845.188 Safari/537.36", - "X-Requested-With": "XMLHttpRequest", - } - - session = requests.Session() - ctx = ssl.create_default_context(ssl.Purpose.SERVER_AUTH) - ctx.options |= 0x4 - session.mount("https://", CustomHttpAdapter(ctx)) - - requests.packages.urllib3.disable_warnings() - response = session.get( - f"https://www.bathnes.gov.uk/webapi/api/BinsAPI/v2/getbartecroute/{user_uprn}/true", - headers=headers, - ) - if response.text == "": - raise ValueError( - "Error parsing data. Please check the provided UPRN. " - "If this error continues please open an issue on GitHub." - ) - json_data = json.loads(response.text) - - data = {"bins": []} - - if len(json_data["residualNextDate"]) > 0: - dict_data = { - "type": "Black Rubbish Bin", - "collectionDate": datetime.strptime( - json_data["residualNextDate"], "%Y-%m-%dT%H:%M:%S" - ).strftime(date_format), - } - data["bins"].append(dict_data) - if len(json_data["recyclingNextDate"]) > 0: - dict_data = { - "type": "Recycling Containers", - "collectionDate": datetime.strptime( - json_data["recyclingNextDate"], "%Y-%m-%dT%H:%M:%S" - ).strftime(date_format), - } - data["bins"].append(dict_data) - if len(json_data["organicNextDate"]) > 0: - dict_data = { - "type": "Garden Waste", - "collectionDate": datetime.strptime( - json_data["organicNextDate"], "%Y-%m-%dT%H:%M:%S" - ).strftime(date_format), - } - data["bins"].append(dict_data) - - data["bins"].sort( - key=lambda x: datetime.strptime(x.get("collectionDate"), date_format) - ) - - return data diff --git a/uk_bin_collection/uk_bin_collection/councils/BedfordBoroughCouncil.py b/uk_bin_collection/uk_bin_collection/councils/BedfordBoroughCouncil.py deleted file mode 100644 index 6cd25145dd..0000000000 --- a/uk_bin_collection/uk_bin_collection/councils/BedfordBoroughCouncil.py +++ /dev/null @@ -1,49 +0,0 @@ -import json -import requests -from bs4 import BeautifulSoup -from uk_bin_collection.uk_bin_collection.common import * -from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass - - -# import the wonderful Beautiful Soup and the URL grabber -class CouncilClass(AbstractGetBinDataClass): - """ - Concrete classes have to implement all abstract operations of the - base class. They can also override some operations with a default - implementation. - """ - - def parse_data(self, page: str, **kwargs) -> dict: - user_uprn = kwargs.get("uprn") - check_uprn(user_uprn) - - api_url = f"https://bbaz-as-prod-bartecapi.azurewebsites.net/api/bincollections/residential/getbyuprn/{user_uprn}/35" - - requests.packages.urllib3.disable_warnings() - response = requests.get(api_url) - - if response.status_code != 200: - raise ConnectionError("Could not get latest data!") - - json_data = json.loads(response.text)["BinCollections"] - data = {"bins": []} - collections = [] - - for day in json_data: - for bin in day: - bin_type = bin["BinType"] - next_date = datetime.strptime( - bin["JobScheduledStart"], "%Y-%m-%dT%H:%M:%S" - ) - collections.append((bin_type, next_date)) - - ordered_data = sorted(collections, key=lambda x: x[1]) - data = {"bins": []} - for item in ordered_data: - dict_data = { - "type": item[0], - "collectionDate": item[1].strftime(date_format), - } - data["bins"].append(dict_data) - - return data diff --git a/uk_bin_collection/uk_bin_collection/councils/BedfordshireCouncil.py b/uk_bin_collection/uk_bin_collection/councils/BedfordshireCouncil.py deleted file mode 100644 index 691dc3a707..0000000000 --- a/uk_bin_collection/uk_bin_collection/councils/BedfordshireCouncil.py +++ /dev/null @@ -1,71 +0,0 @@ -from datetime import datetime - -import requests -from bs4 import BeautifulSoup - -from uk_bin_collection.uk_bin_collection.common import * -from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass - - -class CouncilClass(AbstractGetBinDataClass): - """ - Concrete classes have to implement all abstract operations of the - base class. They can also override some operations with a default - implementation. - """ - - def parse_data(self, page: str, **kwargs) -> dict: - user_uprn = kwargs.get("uprn") - user_postcode = kwargs.get("postcode") - - check_uprn(user_uprn) - check_postcode(user_postcode) - - # Start a new session to walk through the form - requests.packages.urllib3.disable_warnings() - s = requests.Session() - - headers = { - "Origin": "https://www.centralbedfordshire.gov.uk", - "Referer": "https://www.centralbedfordshire.gov.uk/info/163/bins_and_waste_collections_-_check_bin_collection_day", - } - - files = { - "postcode": (None, user_postcode), - "address": (None, user_uprn), - } - - response = requests.post( - "https://www.centralbedfordshire.gov.uk/info/163/bins_and_waste_collections_-_check_bin_collection_day#my_bin_collections", - headers=headers, - files=files, - ) - - # Make that BS4 object and use it to prettify the response - soup = BeautifulSoup(response.content, features="html.parser") - soup.prettify() - - collections_div = soup.find(id="collections") - - # Get the collection items on the page and strip the bits of text that we don't care for - collections = [] - for bin in collections_div.find_all("h3"): - next_bin = bin.next_sibling - - while next_bin.name != "h3" and next_bin.name != "p": - if next_bin.name != "br": - collection_date = datetime.strptime(bin.text, "%A, %d %B %Y") - collections.append((next_bin, collection_date)) - next_bin = next_bin.next_sibling - - # Sort the collections by date order rather than bin type, then return as a dictionary (with str date) - ordered_data = sorted(collections, key=lambda x: x[1]) - data = {"bins": []} - for item in ordered_data: - dict_data = { - "type": item[0], - "collectionDate": item[1].strftime(date_format), - } - data["bins"].append(dict_data) - - return data diff --git a/uk_bin_collection/uk_bin_collection/councils/BelfastCityCouncil.py b/uk_bin_collection/uk_bin_collection/councils/BelfastCityCouncil.py deleted file mode 100644 index ce81105359..0000000000 --- a/uk_bin_collection/uk_bin_collection/councils/BelfastCityCouncil.py +++ /dev/null @@ -1,105 +0,0 @@ -import logging -from datetime import datetime - -import requests -import urllib - -from bs4 import BeautifulSoup -from uk_bin_collection.uk_bin_collection.common import * -from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass - - -# import the wonderful Beautiful Soup and the URL grabber -class CouncilClass(AbstractGetBinDataClass): - """ - Concrete classes have to implement all abstract operations of the - base class. They can also override some operations with a default - implementation. - """ - - def get_session_variable(self, soup, id) -> str: - """Extract ASP.NET variable from the HTML.""" - element = soup.find("input", {"id": id}) - if element: - return element.get("value") - else: - raise ValueError(f"Unable to find element with id: {id}") - - def parse_data(self, page: str, **kwargs) -> dict: - bin_data = {"bins": []} - headers = { - "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:129.0) Gecko/20100101 Firefox/119.0" - } - - session = requests.Session() - session.headers.update(headers) - - user_uprn = kwargs.get("uprn") - user_postcode = kwargs.get("postcode") - URL = "https://online.belfastcity.gov.uk/find-bin-collection-day/Default.aspx" - - # Build initial ASP.NET variables for Postcode Find address - response = session.get(URL) - response.raise_for_status() - soup = BeautifulSoup(response.text, "html.parser") - form_data = { - "__EVENTTARGET": "", - "__EVENTARGUMENT": "", - "__VIEWSTATE": self.get_session_variable(soup, "__VIEWSTATE"), - "__VIEWSTATEGENERATOR": self.get_session_variable( - soup, "__VIEWSTATEGENERATOR" - ), - "__SCROLLPOSITIONX": "0", - "__SCROLLPOSITIONY": "0", - "__EVENTVALIDATION": self.get_session_variable(soup, "__EVENTVALIDATION"), - "ctl00$MainContent$searchBy_radio": "P", - "ctl00$MainContent$Street_textbox": "", - "ctl00$MainContent$Postcode_textbox": user_postcode, - "ctl00$MainContent$AddressLookup_button": "Find address", - } - - # Build intermediate ASP.NET variables for uprn Select address - response = session.post(URL, data=form_data) - response.raise_for_status() - soup = BeautifulSoup(response.text, "html.parser") - form_data = { - "__EVENTTARGET": "", - "__EVENTARGUMENT": "", - "__VIEWSTATE": self.get_session_variable(soup, "__VIEWSTATE"), - "__VIEWSTATEGENERATOR": self.get_session_variable( - soup, "__VIEWSTATEGENERATOR" - ), - "__SCROLLPOSITIONX": "0", - "__SCROLLPOSITIONY": "0", - "__EVENTVALIDATION": self.get_session_variable(soup, "__EVENTVALIDATION"), - "ctl00$MainContent$searchBy_radio": "P", - "ctl00$MainContent$Street_textbox": "", - "ctl00$MainContent$Postcode_textbox": user_postcode, - "ctl00$MainContent$lstAddresses": user_uprn, - "ctl00$MainContent$SelectAddress_button": "Select address", - } - - # Actual http call to get Bins Data - response = session.post(URL, data=form_data) - response.raise_for_status() - soup = BeautifulSoup(response.text, "html.parser") - - # Find Bins table and data - table = soup.find("div", {"id": "binsGrid"}) - if table: - rows = table.find_all("tr") - for row in rows: - columns = row.find_all("td") - if len(columns) >= 4: - collection_type = columns[0].get_text(strip=True) - collection_date_raw = columns[3].get_text(strip=True) - # if the month number is a single digit there are 2 spaces, stripping all spaces to make it consistent - collection_date = datetime.strptime( - collection_date_raw.replace(" ", ""), "%a%b%d%Y" - ) - bin_entry = { - "type": collection_type, - "collectionDate": collection_date.strftime(date_format), - } - bin_data["bins"].append(bin_entry) - return bin_data diff --git a/uk_bin_collection/uk_bin_collection/councils/BexleyCouncil.py b/uk_bin_collection/uk_bin_collection/councils/BexleyCouncil.py deleted file mode 100644 index d7ed4458ea..0000000000 --- a/uk_bin_collection/uk_bin_collection/councils/BexleyCouncil.py +++ /dev/null @@ -1,150 +0,0 @@ -import time -from datetime import datetime - -from bs4 import BeautifulSoup -from selenium.webdriver.common.by import By -from selenium.webdriver.common.keys import Keys -from selenium.webdriver.support import expected_conditions as EC -from selenium.webdriver.support.ui import Select -from selenium.webdriver.support.wait import WebDriverWait - -from uk_bin_collection.uk_bin_collection.common import * -from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass - -# import the wonderful Beautiful Soup and the URL grabber - - -class CouncilClass(AbstractGetBinDataClass): - """ - Concrete classes have to implement all abstract operations of the - base class. They can also override some operations with a default - implementation. - """ - - def parse_data(self, page: str, **kwargs) -> dict: - driver = None - try: - page = "https://waste.bexley.gov.uk/waste" - - data = {"bins": []} - - user_uprn = kwargs.get("uprn") - user_paon = kwargs.get("paon") - user_postcode = kwargs.get("postcode") - web_driver = kwargs.get("web_driver") - headless = kwargs.get("headless") - - # Create Selenium webdriver - driver = create_webdriver(web_driver, headless, None, __name__) - driver.get(page) - - wait = WebDriverWait(driver, 10) - - inputElement_postcodesearch = wait.until( - EC.element_to_be_clickable((By.ID, "pc")) - ) - inputElement_postcodesearch.send_keys(user_postcode) - - find_address_btn = wait.until( - EC.element_to_be_clickable((By.XPATH, '//*[@id="sub"]')) - ) - find_address_btn.click() - - dropdown_options = wait.until( - EC.presence_of_element_located((By.XPATH, '//*[@id="address"]')) - ) - time.sleep(2) - dropdown_options.click() - time.sleep(1) - - # Wait for the element to be clickable - address = WebDriverWait(driver, 10).until( - EC.element_to_be_clickable( - (By.XPATH, f'//li[contains(text(), "{user_paon}")]') - ) - ) - - # Click the element - address.click() - - submit_address = wait.until( - EC.presence_of_element_located((By.XPATH, '//*[@id="go"]')) - ) - time.sleep(2) - submit_address.click() - - results_found = wait.until( - EC.element_to_be_clickable( - (By.XPATH, '//h1[contains(text(), "Your bin days")]') - ) - ) - - final_page = wait.until( - EC.presence_of_element_located((By.CLASS_NAME, "waste__collections")) - ) - - soup = BeautifulSoup(driver.page_source, features="html.parser") - - # Find all waste services - - # Initialize the data dictionary - data = {"bins": []} - bin_sections = soup.find_all("h3", class_="waste-service-name") - - # Loop through each bin field - for bin_section in bin_sections: - # Extract the bin type (e.g., "Brown Caddy", "Green Wheelie Bin", etc.) - bin_type = bin_section.get_text(strip=True).split("\n")[ - 0 - ] # The first part is the bin type - - # Find the next sibling
tag that contains the next collection information - summary_list = bin_section.find_next("dl", class_="govuk-summary-list") - - if summary_list: - # Now, instead of finding by class, we'll search by text within the dt element - next_collection_dt = summary_list.find( - "dt", string=lambda text: "Next collection" in text - ) - - if next_collection_dt: - # Find the sibling
tag for the collection date - next_collection = next_collection_dt.find_next_sibling( - "dd" - ).get_text(strip=True) - - if next_collection: - try: - # Parse the next collection date (assuming the format is like "Tuesday 15 October 2024") - parsed_date = datetime.strptime( - next_collection, "%A %d %B %Y" - ) - - # Add the bin information to the data dictionary - data["bins"].append( - { - "type": bin_type, - "collectionDate": parsed_date.strftime( - date_format - ), - } - ) - except ValueError as e: - print(f"Error parsing date for {bin_type}: {e}") - else: - print(f"No next collection date found for {bin_type}") - else: - print(f"No 'Next collection' text found for {bin_type}") - else: - print(f"No summary list found for {bin_type}") - - except Exception as e: - # Here you can log the exception if needed - print(f"An error occurred: {e}") - # Optionally, re-raise the exception if you want it to propagate - raise - finally: - # This block ensures that the driver is closed regardless of an exception - if driver: - driver.quit() - return data diff --git a/uk_bin_collection/uk_bin_collection/councils/BirminghamCityCouncil.py b/uk_bin_collection/uk_bin_collection/councils/BirminghamCityCouncil.py deleted file mode 100644 index 2fa1c6ab5d..0000000000 --- a/uk_bin_collection/uk_bin_collection/councils/BirminghamCityCouncil.py +++ /dev/null @@ -1,133 +0,0 @@ -from typing import Dict, List, Any, Optional -from bs4 import BeautifulSoup -from dateutil.relativedelta import relativedelta -import requests -import logging -import re -from datetime import datetime -from uk_bin_collection.uk_bin_collection.common import * -from dateutil.parser import parse - -from uk_bin_collection.uk_bin_collection.common import check_uprn, check_postcode -from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass - - -def get_token(page) -> str: - """ - Get a __token to include in the form data - :param page: Page html - :return: Form __token - """ - soup = BeautifulSoup(page.text, features="html.parser") - soup.prettify() - token = soup.find("input", {"name": "__token"}).get("value") - return token - - -class CouncilClass(AbstractGetBinDataClass): - """ - Concrete classes have to implement all abstract operations of the - base class. They can also override some operations with a default - implementation. - """ - - def get_data(self, url: str) -> str: - """This method makes the request to the council - - Keyword arguments: - url -- the url to get the data from - """ - # Set a user agent so we look like a browser ;-) - user_agent = ( - "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) " - "Chrome/108.0.0.0 Safari/537.36" - ) - headers = {"User-Agent": user_agent} - requests.packages.urllib3.disable_warnings() - - # Make the Request - change the URL - find out your property number - try: - session = requests.Session() - session.headers.update(headers) - full_page = session.get(url) - return full_page - except requests.exceptions.HTTPError as errh: - logging.error(f"Http Error: {errh}") - raise - except requests.exceptions.ConnectionError as errc: - logging.error(f"Error Connecting: {errc}") - raise - except requests.exceptions.Timeout as errt: - logging.error(f"Timeout Error: {errt}") - raise - except requests.exceptions.RequestException as err: - logging.error(f"Oops: Something Else {err}") - raise - - def parse_data(self, page: str, **kwargs: Any) -> Dict[str, List[Dict[str, str]]]: - uprn: Optional[str] = kwargs.get("uprn") - postcode: Optional[str] = kwargs.get("postcode") - - if uprn is None: - raise ValueError("UPRN is required and must be a non-empty string.") - if postcode is None: - raise ValueError("Postcode is required and must be a non-empty string.") - - check_uprn(uprn) - check_postcode(postcode) - - values = { - "__token": get_token(page), - "page": "491", - "locale": "en_GB", - "q1f8ccce1d1e2f58649b4069712be6879a839233f_0_0": postcode, - "q1f8ccce1d1e2f58649b4069712be6879a839233f_1_0": uprn, - "next": "Next", - } - headers = {"User-Agent": "Mozilla/5.0 (Windows NT 6.1; Win64; x64)"} - requests.packages.urllib3.disable_warnings() - response = requests.request( - "POST", - "https://www.birmingham.gov.uk/xfp/form/619", - headers=headers, - data=values, - ) - - soup = BeautifulSoup(response.text, features="html.parser") - - rows = soup.find("table").find_all("tr") - - # Form a JSON wrapper - data: Dict[str, List[Dict[str, str]]] = {"bins": []} - - # Loops the Rows - for row in rows: - cells = row.find_all("td") - if cells: - bin_type = cells[0].get_text(strip=True) - collection_next = cells[1].get_text(strip=True) - - collection_date = re.findall(r"\(.*?\)", collection_next) - - if len(collection_date) != 1: - continue - - collection_date_obj = parse( - re.sub(r"[()]", "", collection_date[0]) - ).date() - - # since we only have the next collection day, if the parsed date is in the past, - # assume the day is instead next month - if collection_date_obj < datetime.now().date(): - collection_date_obj += relativedelta(months=1) - - # Make each Bin element in the JSON - dict_data = { - "type": bin_type, - "collectionDate": collection_date_obj.strftime(date_format), - } - - # Add data to the main JSON Wrapper - data["bins"].append(dict_data) - - return data diff --git a/uk_bin_collection/uk_bin_collection/councils/BlabyDistrictCouncil.py b/uk_bin_collection/uk_bin_collection/councils/BlabyDistrictCouncil.py deleted file mode 100644 index c24d71e527..0000000000 --- a/uk_bin_collection/uk_bin_collection/councils/BlabyDistrictCouncil.py +++ /dev/null @@ -1,59 +0,0 @@ -import requests -from bs4 import BeautifulSoup - -from uk_bin_collection.uk_bin_collection.common import * -from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass - - -# import the wonderful Beautiful Soup and the URL grabber -class CouncilClass(AbstractGetBinDataClass): - """ - Concrete classes have to implement all abstract operations of the - base class. They can also override some operations with a default - implementation. - """ - - def parse_data(self, page: str, **kwargs) -> dict: - - user_uprn = kwargs.get("uprn") - check_uprn(user_uprn) - bindata = {"bins": []} - - URI = f"https://my.blaby.gov.uk/set-location.php?ref={user_uprn}&redirect=collections" - - # Make the GET request - response = requests.get(URI) - - # Parse the HTML - soup = BeautifulSoup(response.content, "html.parser") - - # Find each collection container based on the class "box-item" - for container in soup.find_all(class_="box-item"): - - # Get the next collection dates from the

tag containing - try: - dates_tag = ( - container.find("p", string=lambda text: "Next" in text) - .find_next("p") - .find("strong") - ) - except: - continue - collection_dates = ( - dates_tag.text.strip().split(", and then ") - if dates_tag - else "No dates found" - ) - - for collection_date in collection_dates: - dict_data = { - "type": container.find("h2").text.strip(), - "collectionDate": collection_date, - } - bindata["bins"].append(dict_data) - - bindata["bins"].sort( - key=lambda x: datetime.strptime(x.get("collectionDate"), "%d/%m/%Y") - ) - - return bindata diff --git a/uk_bin_collection/uk_bin_collection/councils/BlackburnCouncil.py b/uk_bin_collection/uk_bin_collection/councils/BlackburnCouncil.py deleted file mode 100644 index d71e40274a..0000000000 --- a/uk_bin_collection/uk_bin_collection/councils/BlackburnCouncil.py +++ /dev/null @@ -1,115 +0,0 @@ -import json -import logging -import ssl -from collections import OrderedDict -from datetime import datetime - -import requests -import urllib3 -from bs4 import BeautifulSoup - -from uk_bin_collection.uk_bin_collection.common import * -from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass - - -class CustomHttpAdapter(requests.adapters.HTTPAdapter): - """Transport adapter" that allows us to use custom ssl_context.""" - - def __init__(self, ssl_context=None, **kwargs): - self.ssl_context = ssl_context - super().__init__(**kwargs) - - def init_poolmanager(self, connections, maxsize, block=False): - self.poolmanager = urllib3.poolmanager.PoolManager( - num_pools=connections, - maxsize=maxsize, - block=block, - ssl_context=self.ssl_context, - ) - - -class CouncilClass(AbstractGetBinDataClass): - """ - Concrete classes have to implement all abstract operations of the - base class. They can also override some operations with a default - implementation. - """ - - def parse_data(self, page: str, **kwargs) -> dict: - # Make a BS4 object - - driver = None - try: - data = {"bins": []} - uprn = kwargs.get("uprn") - web_driver = kwargs.get("web_driver") - headless = kwargs.get("headless") - current_month = datetime.today().strftime("%m") - current_year = datetime.today().strftime("%Y") - url = ( - f"https://mybins.blackburn.gov.uk/api/mybins/getbincollectiondays?uprn={uprn}&month={current_month}" - f"&year={current_year}" - ) - driver = create_webdriver(web_driver, headless, None, __name__) - driver.get(url) - - soup = BeautifulSoup(driver.page_source, "html.parser") - - # Find the

 tag that contains the JSON data
-            pre_tag = soup.find("pre")
-
-            if pre_tag:
-                # Extract the text content within the 
 tag
-
-                # Return JSON from response and loop through collections
-                json_result = json.loads(pre_tag.contents[0])
-                bin_collections = json_result["BinCollectionDays"]
-                for collection in bin_collections:
-                    if collection is not None:
-                        bin_type = collection[0].get("BinType")
-                        current_collection_date = collection[0].get("CollectionDate")
-                        if current_collection_date is None:
-                            continue
-                        current_collection_date = datetime.strptime(
-                            current_collection_date, "%Y-%m-%d"
-                        )
-                        next_collection_date = collection[0].get(
-                            "NextScheduledCollectionDate"
-                        )
-                        if next_collection_date is None:
-                            continue
-                        next_collection_date = datetime.strptime(
-                            next_collection_date, "%Y-%m-%d"
-                        )
-
-                        # Work out the most recent collection date to display
-                        if (
-                            datetime.today().date()
-                            <= current_collection_date.date()
-                            < next_collection_date.date()
-                        ):
-                            collection_date = current_collection_date
-                        else:
-                            collection_date = next_collection_date
-
-                        dict_data = {
-                            "type": bin_type,
-                            "collectionDate": collection_date.strftime(date_format),
-                        }
-                        data["bins"].append(dict_data)
-
-                        data["bins"].sort(
-                            key=lambda x: datetime.strptime(
-                                x.get("collectionDate"), date_format
-                            )
-                        )
-        except Exception as e:
-            # Here you can log the exception if needed
-            print(f"An error occurred: {e}")
-            # Optionally, re-raise the exception if you want it to propagate
-            raise
-        finally:
-            # This block ensures that the driver is closed regardless of an exception
-            if driver:
-                driver.quit()
-        return data
diff --git a/uk_bin_collection/uk_bin_collection/councils/BoltonCouncil.py b/uk_bin_collection/uk_bin_collection/councils/BoltonCouncil.py
deleted file mode 100644
index e278152988..0000000000
--- a/uk_bin_collection/uk_bin_collection/councils/BoltonCouncil.py
+++ /dev/null
@@ -1,108 +0,0 @@
-import time
-import re
-
-from datetime import datetime
-
-from bs4 import BeautifulSoup
-from selenium.webdriver.common.by import By
-from selenium.webdriver.common.keys import Keys
-from selenium.webdriver.support import expected_conditions as EC
-from selenium.webdriver.support.ui import Select
-from selenium.webdriver.support.wait import WebDriverWait
-
-from uk_bin_collection.uk_bin_collection.common import *
-from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass
-
-
-class CouncilClass(AbstractGetBinDataClass):
-    """
-    Concrete classes have to implement all abstract operations of the
-    base class. They can also override some operations with a default
-    implementation.
-    """
-
-    def parse_data(self, page: str, **kwargs) -> dict:
-        driver = None
-        try:
-            user_uprn = kwargs.get("uprn")
-            check_uprn(user_uprn)
-
-            user_postcode = kwargs.get("postcode")
-            check_postcode(user_postcode)
-            web_driver = kwargs.get("web_driver")
-            headless = kwargs.get("headless")
-
-            data = {"bins": []}
-
-            # Get our initial session running
-            page = "https://carehomes.bolton.gov.uk/bins.aspx"
-
-            driver = create_webdriver(web_driver, headless, None, __name__)
-            driver.get(page)
-
-            # If you bang in the house number (or property name) and postcode in the box it should find your property
-            wait = WebDriverWait(driver, 30)
-
-            pc_search_box = wait.until(
-                EC.presence_of_element_located((By.ID, "txtPostcode"))
-            )
-
-            pc_search_box.send_keys(user_postcode)
-
-            pcsearch_btn = wait.until(EC.element_to_be_clickable((By.ID, "btnSubmit")))
-
-            pcsearch_btn.click()
-
-            # Wait for the 'Select your property' dropdown to appear and select the first result
-            dropdown = wait.until(EC.element_to_be_clickable((By.ID, "ddlAddresses")))
-
-            dropdown_options = wait.until(
-                EC.presence_of_element_located((By.XPATH, "//select/option[1]"))
-            )
-            time.sleep(1)
-            # Create a 'Select' for it, then select the first address in the list
-            # (Index 0 is "Make a selection from the list")
-            dropdownSelect = Select(dropdown)
-            dropdownSelect.select_by_value(str(user_uprn))
-            dropdown_options = wait.until(
-                EC.presence_of_element_located((By.ID, "pnlStep3"))
-            )
-
-            soup = BeautifulSoup(driver.page_source, features="html.parser")
-            soup.prettify()
-
-            collections = []
-
-            # Find section with bins in
-            sections = soup.find_all("div", {"class": "bin-info"})
-
-            # For each bin section, get the text and the list elements
-            for item in sections:
-                words = item.find_next("strong").text.split()[2:4]
-                bin_type = " ".join(words).capitalize()
-                date_list = item.find_all("p")
-                for d in date_list:
-                    clean_date_str = re.sub(r"[^A-Za-z0-9 ]+", "", d.text.strip())
-                    next_collection = datetime.strptime(clean_date_str, "%A %d %B %Y")
-                    collections.append((bin_type, next_collection))
-
-            # Sort the text and list elements by date
-            ordered_data = sorted(collections, key=lambda x: x[1])
-
-            # Put the elements into the dictionary
-            for item in ordered_data:
-                dict_data = {
-                    "type": item[0],
-                    "collectionDate": item[1].strftime(date_format),
-                }
-                data["bins"].append(dict_data)
-        except Exception as e:
-            # Here you can log the exception if needed
-            print(f"An error occurred: {e}")
-            # Optionally, re-raise the exception if you want it to propagate
-            raise
-        finally:
-            # This block ensures that the driver is closed regardless of an exception
-            if driver:
-                driver.quit()
-        return data
diff --git a/uk_bin_collection/uk_bin_collection/councils/BracknellForestCouncil.py b/uk_bin_collection/uk_bin_collection/councils/BracknellForestCouncil.py
deleted file mode 100644
index 8881cbd82a..0000000000
--- a/uk_bin_collection/uk_bin_collection/councils/BracknellForestCouncil.py
+++ /dev/null
@@ -1,246 +0,0 @@
-import time
-
-from bs4 import BeautifulSoup
-
-from uk_bin_collection.uk_bin_collection.common import *
-from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass
-
-
-def get_headers(base_url: str, method: str) -> dict[str, str]:
-    """
-    Gets request headers
-        :rtype: dict[str, str]
-        :param base_url: Base URL to use
-        :param method: Method to use
-        :return: Request headers
-    """
-    headers = {
-        "Accept-Encoding": "gzip, deflate, br",
-        "Accept-Language": "en-GB,en-US;q=0.9,en;q=0.8",
-        "Cache-Control": "max-age=0",
-        "Connection": "keep-alive",
-        "Host": "selfservice.mybfc.bracknell-forest.gov.uk",
-        "Origin": base_url,
-        "sec-ch-ua": '"Not_A Brand";v="99", "Google Chrome";v="109", "Chromium";v="109"',
-        "sec-ch-ua-mobile": "?0",
-        "sec-ch-ua-platform": "Windows",
-        "Sec-Fetch-Dest": "document",
-        "Sec-Fetch-User": "?1",
-        "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko)"
-        " Chrome/109.0.0.0 Safari/537.36",
-    }
-    if method.lower() == "post":
-        headers["Accept"] = "application/json, text/javascript, */*; q=0.01"
-        headers["Content-Type"] = "application/x-www-form-urlencoded; charset=UTF-8"
-        headers["Sec-Fetch-Mode"] = "cors"
-        headers["Sec-Fetch-Mode"] = "same-origin"
-        headers["X-Requested-With"] = "XMLHttpRequest"
-    else:
-        headers["Accept"] = (
-            "text/html,application/xhtml+xml,application/xml;"
-            "q=0.9,image/avif,image/webp,image/apng,*/*;"
-            "q=0.8,application/signed-exchange;v=b3;q=0.9"
-        )
-        headers["Sec-Fetch-Mode"] = "navigate"
-        headers["Sec-Fetch-Mode"] = "none"
-    return headers
-
-
-def get_session_storage_global() -> object:
-    """
-    Gets session storage global object
-        :rtype: object
-        :return: Session storage global object
-    """
-    return {
-        "destination_stack": [
-            "w/webpage/waste-collection-days",
-        ],
-        "last_context_record_id": "86086077",
-    }
-
-
-def get_csrf_token(s: requests.session, base_url: str) -> str:
-    """
-    Gets a CSRF token
-        :rtype: str
-        :param s: requests.Session() to use
-        :param base_url: Base URL to use
-        :return: CSRF token
-    """
-    csrf_token = ""
-    response = s.get(
-        base_url + "/w/webpage/waste-collection-days",
-        headers=get_headers(base_url, "GET"),
-    )
-    if response.status_code == 200:
-        soup = BeautifulSoup(response.text, features="html.parser")
-        soup.prettify()
-        app_body = soup.find("div", {"class": "app-body"})
-        script = app_body.find("script", {"type": "text/javascript"}).string
-        p = re.compile("var CSRF = ('|\")(.*?)('|\");")
-        m = p.search(script)
-        csrf_token = m.groups()[1]
-    else:
-        raise ValueError(
-            "Code 1: Failed to get a CSRF token. Please ensure the council website is online first,"
-            " then open an issue on GitHub."
-        )
-    return csrf_token
-
-
-def get_address_id(
-    s: requests.session, base_url: str, csrf_token: str, postcode: str, paon: str
-) -> str:
-    """
-    Gets the address ID
-        :rtype: str
-        :param s: requests.Session() to use
-        :param base_url: Base URL to use
-        :param csrf_token: CSRF token to use
-        :param postcode: Postcode to use
-        :param paon: House number/address to find
-        :return: address ID
-    """
-    address_id = "0"
-    # Get the addresses for the postcode
-    form_data = {
-        "code_action": "find_addresses",
-        "code_params": '{"search":"' + postcode + '"}',
-        "_session_storage": json.dumps(
-            {
-                "/w/webpage/waste-collection-days": {},
-                "_global": get_session_storage_global(),
-            }
-        ),
-        "action_cell_id": "PCL0003988FEFFB1",
-        "action_page_id": "PAG0000570FEFFB1",
-        "form_check_ajax": csrf_token,
-    }
-    response = s.post(
-        base_url
-        + "/w/webpage/waste-collection-days?webpage_subpage_id=PAG0000570FEFFB1"
-        "&webpage_token=390170046582b0e3d7ca68ef1d6b4829ccff0b1ae9c531047219c6f9b5295738"
-        "&widget_action=handle_event",
-        headers=get_headers(base_url, "POST"),
-        data=form_data,
-    )
-    if response.status_code == 200:
-        json_response = json.loads(response.text)
-        addresses = json_response["response"]["addresses"]["items"]
-        # Find the matching address id for the paon
-        for address in addresses:
-            # Check for full matches first
-            if address.get("Description") == paon:
-                address_id = address.get("Id")
-                break
-        # Check for matching start if no full match found
-        if address_id == "0":
-            for address in addresses:
-                if address.get("Description").split()[0] == paon.strip():
-                    address_id = address.get("Id")
-                    break
-        # Check match was found
-        if address_id == "0":
-            raise ValueError(
-                "Code 2: No matching address for house number/full address found."
-            )
-    else:
-        raise ValueError("Code 3: No addresses found for provided postcode.")
-    return address_id
-
-
-def get_collection_data(
-    s: requests.session, base_url: str, csrf_token: str, address_id: str
-) -> str:
-    """
-    Gets the collection data
-        :rtype: str
-        :param s: requests.Session() to use
-        :param base_url: Base URL to use
-        :param csrf_token: CSRF token to use
-        :param address_id: Address id to use
-        :param retries: Retries count
-        :return: Collection data
-    """
-    collection_data = ""
-    if address_id != "0":
-        form_data = {
-            "code_action": "find_rounds",
-            "code_params": '{"addressId":"' + address_id + '"}',
-            "_session_storage": json.dumps(
-                {
-                    "/w/webpage/waste-collection-days": {},
-                    "_global": get_session_storage_global(),
-                }
-            ),
-            "action_cell_id": "PCL0003988FEFFB1",
-            "action_page_id": "PAG0000570FEFFB1",
-            "form_check_ajax": csrf_token,
-        }
-        response = s.post(
-            base_url
-            + "/w/webpage/waste-collection-days?webpage_subpage_id=PAG0000570FEFFB1"
-            "&webpage_token=390170046582b0e3d7ca68ef1d6b4829ccff0b1ae9c531047219c6f9b5295738"
-            "&widget_action=handle_event",
-            headers=get_headers(base_url, "POST"),
-            data=form_data,
-        )
-        if response.status_code == 200 and len(response.text) > 0:
-            json_response = json.loads(response.text)
-            collection_data = json_response["response"]["collections"]
-        else:
-            raise ValueError("Code 4: Failed to get bin data.")
-    return collection_data
-
-
-class CouncilClass(AbstractGetBinDataClass):
-    """
-    Concrete classes have to implement all abstract operations of the
-    base class. They can also override some operations with a default
-    implementation.
-    """
-
-    def parse_data(self, page: str, **kwargs) -> dict:
-        requests.packages.urllib3.disable_warnings()
-        s = requests.Session()
-        base_url = "https://selfservice.mybfc.bracknell-forest.gov.uk"
-        paon = kwargs.get("paon")
-        postcode = kwargs.get("postcode")
-        check_paon(paon)
-        check_postcode(postcode)
-
-        # Firstly, get a CSRF (cross-site request forgery) token
-        csrf_token = get_csrf_token(s, base_url)
-        # Next, get the address_id
-        address_id = get_address_id(s, base_url, csrf_token, postcode, paon)
-        # Finally, use the address_id to get the collection data
-        collection_data = get_collection_data(s, base_url, csrf_token, address_id)
-        if collection_data != "":
-            # Form a JSON wrapper
-            data = {"bins": []}
-
-            for c in collection_data:
-                collection_type = c["round"]
-                for c_date in c["upcomingCollections"]:
-                    collection_date = (
-                        re.search(r"Your (.*) is(.*)", c_date).group(2).strip()
-                    )
-                    dict_data = {
-                        "type": collection_type,
-                        "collectionDate": datetime.strptime(
-                            collection_date, "%A %d %B %Y"
-                        ).strftime(date_format),
-                    }
-                    data["bins"].append(dict_data)
-
-            if len(data["bins"]) == 0:
-                raise ValueError(
-                    "Code 5: No bin data found. Please ensure the council website is showing data first,"
-                    " then open an issue on GitHub."
-                )
-
-            data["bins"].sort(
-                key=lambda x: datetime.strptime(x.get("collectionDate"), date_format)
-            )
-            return data
diff --git a/uk_bin_collection/uk_bin_collection/councils/BradfordMDC.py b/uk_bin_collection/uk_bin_collection/councils/BradfordMDC.py
deleted file mode 100644
index 0617ab2959..0000000000
--- a/uk_bin_collection/uk_bin_collection/councils/BradfordMDC.py
+++ /dev/null
@@ -1,133 +0,0 @@
-import re
-
-import requests
-from bs4 import BeautifulSoup
-
-from uk_bin_collection.uk_bin_collection.common import *
-from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass
-
-
-# import the wonderful Beautiful Soup and the URL grabber
-class CouncilClass(AbstractGetBinDataClass):
-    """
-    Concrete classes have to implement all abstract operations of the
-    base class. They can also override some operations with a default
-    implementation.
-    """
-
-    def parse_data(self, page: str, **kwargs) -> dict:
-        user_uprn = kwargs.get("uprn")
-        check_uprn(user_uprn)
-
-        # UPRN is passed in via a cookie. Set cookies/params and GET the page
-        cookies = {
-            "COLLECTIONDATES": f"{user_uprn}",
-        }
-        headers = {
-            "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8",
-            "Accept-Language": "en-GB,en;q=0.7",
-            "Cache-Control": "max-age=0",
-            "Connection": "keep-alive",
-            "Referer": "https://onlineforms.bradford.gov.uk/ufs/collectiondates.eb",
-            "Sec-Fetch-Dest": "document",
-            "Sec-Fetch-Mode": "navigate",
-            "Sec-Fetch-Site": "same-origin",
-            "Sec-Fetch-User": "?1",
-            "Sec-GPC": "1",
-            "Upgrade-Insecure-Requests": "1",
-            "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/107.0.0.0 Safari/537.36",
-        }
-        params = {
-            "ebp": "30",
-            "ebd": "0",
-            "ebz": "1_1713270660323",
-        }
-        requests.packages.urllib3.disable_warnings()
-        response = requests.get(
-            "https://onlineforms.bradford.gov.uk/ufs/collectiondates.eb",
-            params=params,
-            headers=headers,
-            cookies=cookies,
-        )
-
-        # Parse response text for super speedy finding
-        soup = BeautifulSoup(response.text, features="html.parser")
-        soup.prettify()
-
-        data = {"bins": []}
-
-        # BradfordMDC site has lots of embedded tables, find the table titled 'Your next general/recycling collections are:'
-        for bin in soup.find_all(attrs={"class": "CTID-FHGh1Q77-_"}):
-            if bin.find_all(attrs={"class": "CTID-62bNngCB-_"}):
-                bin_type = "General Waste"
-                bin_colour = "Green"
-                bin_date_text = bin.find(attrs={"class": "CTID-62bNngCB-_"}).get_text()
-            elif bin.find_all(attrs={"class": "CTID-LHo9iO0y-_"}):
-                bin_type = "Recycling Waste"
-                bin_colour = "Grey"
-                bin_date_text = bin.find(attrs={"class": "CTID-LHo9iO0y-_"}).get_text()
-            else:
-                raise ValueError(f"No bin info found in {bin_type_info[0]}")
-
-            # Collection Date info is alongside the bin type, we got the whole line in the if/elif above
-            # below strips the text off at the beginning, to get a date, though recycling is a character shorter hence the lstrip
-            bin_date_info = bin_date_text[29:50].lstrip(" ")
-
-            if contains_date(bin_date_info):
-                bin_date = get_next_occurrence_from_day_month(
-                    datetime.strptime(
-                        bin_date_info,  # + " " + datetime.today().strftime("%Y"),
-                        "%a %b %d %Y",
-                    )
-                ).strftime(date_format)
-                # print(bin_date_info)
-                # print(bin_date)
-            # On exceptional collection schedule (e.g. around English Bank Holidays), date will be contained in the second stripped string
-            else:
-                bin_date = get_next_occurrence_from_day_month(
-                    datetime.strptime(
-                        bin_date_info[1] + " " + datetime.today().strftime("%Y"),
-                        "%a %b %d %Y",
-                    )
-                ).strftime(date_format)
-
-            # Build data dict for each entry
-            dict_data = {
-                "type": bin_type,
-                "collectionDate": bin_date,
-            }
-            data["bins"].append(dict_data)
-
-        for bin in soup.find_all(attrs={"id": re.compile(r"CTID-D0TUYGxO-\d+-A")}):
-            dict_data = {
-                "type": "General Waste",
-                "collectionDate": datetime.strptime(
-                    bin.text.strip(),
-                    "%a %b %d %Y",
-                ).strftime(date_format),
-            }
-            data["bins"].append(dict_data)
-        for bin in soup.find_all(attrs={"id": re.compile(r"CTID-d3gapLk-\d+-A")}):
-            dict_data = {
-                "type": "Recycling Waste",
-                "collectionDate": datetime.strptime(
-                    bin.text.strip(),
-                    "%a %b %d %Y",
-                ).strftime(date_format),
-            }
-            data["bins"].append(dict_data)
-        for bin in soup.find_all(attrs={"id": re.compile(r"CTID-L8OidMPA-\d+-A")}):
-            dict_data = {
-                "type": "Garden Waste (Subscription Only)",
-                "collectionDate": datetime.strptime(
-                    bin.text.strip(),
-                    "%a %b %d %Y",
-                ).strftime(date_format),
-            }
-            data["bins"].append(dict_data)
-
-        data["bins"].sort(
-            key=lambda x: datetime.strptime(x.get("collectionDate"), date_format)
-        )
-
-        return data
diff --git a/uk_bin_collection/uk_bin_collection/councils/BraintreeDistrictCouncil.py b/uk_bin_collection/uk_bin_collection/councils/BraintreeDistrictCouncil.py
deleted file mode 100644
index 5834a41759..0000000000
--- a/uk_bin_collection/uk_bin_collection/councils/BraintreeDistrictCouncil.py
+++ /dev/null
@@ -1,70 +0,0 @@
-import time
-
-import requests
-from bs4 import BeautifulSoup
-
-from uk_bin_collection.uk_bin_collection.common import *
-from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass
-
-
-# import the wonderful Beautiful Soup and the URL grabber
-class CouncilClass(AbstractGetBinDataClass):
-    """
-    Concrete classes have to implement all abstract operations of the
-    base class. They can also override some operations with a default
-    implementation.
-    """
-
-    def parse_data(self, page: str, **kwargs) -> dict:
-
-        user_postcode = kwargs.get("postcode")
-        user_uprn = kwargs.get("uprn")
-        check_postcode(user_postcode)
-        check_uprn(user_uprn)
-        bindata = {"bins": []}
-
-        URI = "https://www.braintree.gov.uk/xfp/form/554"
-
-        response = requests.get(URI)
-        soup = BeautifulSoup(response.content, "html.parser")
-        token = (soup.find("input", {"name": "__token"})).get("value")
-
-        headers = {
-            "Content-Type": "application/x-www-form-urlencoded",
-            "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/130.0.0.0 Safari/537.36",
-            "Referer": "https://www.braintree.gov.uk/xfp/form/554",
-        }
-
-        form_data = {
-            "__token": token,
-            "page": "5730",
-            "locale": "en_GB",
-            "qe15dda0155d237d1ea161004d1839e3369ed4831_0_0": user_postcode,
-            "qe15dda0155d237d1ea161004d1839e3369ed4831_1_0": user_uprn,
-            "next": "Next",
-        }
-        collection_lookup = requests.post(URI, data=form_data, headers=headers)
-        collection_lookup.raise_for_status()
-        for results in BeautifulSoup(collection_lookup.text, "html.parser").find_all(
-            "div", class_="date_display"
-        ):
-            collection_info = results.text.strip().split("\n")
-            collection_type = collection_info[0].strip()
-
-            # Skip if no collection date is found
-            if len(collection_info) < 2:
-                continue
-
-            collection_date = collection_info[1].strip()
-
-            dict_data = {
-                "type": collection_type,
-                "collectionDate": collection_date,
-            }
-            bindata["bins"].append(dict_data)
-
-        bindata["bins"].sort(
-            key=lambda x: datetime.strptime(x.get("collectionDate"), "%d/%m/%Y")
-        )
-
-        return bindata
diff --git a/uk_bin_collection/uk_bin_collection/councils/BrecklandCouncil.py b/uk_bin_collection/uk_bin_collection/councils/BrecklandCouncil.py
deleted file mode 100644
index 1ae477860d..0000000000
--- a/uk_bin_collection/uk_bin_collection/councils/BrecklandCouncil.py
+++ /dev/null
@@ -1,55 +0,0 @@
-import time
-
-import requests
-
-from uk_bin_collection.uk_bin_collection.common import *
-from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass
-
-
-# import the wonderful Beautiful Soup and the URL grabber
-class CouncilClass(AbstractGetBinDataClass):
-    """
-    Concrete classes have to implement all abstract operations of the
-    base class. They can also override some operations with a default
-    implementation.
-    """
-
-    def parse_data(self, page: str, **kwargs) -> dict:
-
-        user_uprn = kwargs.get("uprn")
-        check_uprn(user_uprn)
-        bindata = {"bins": []}
-
-        URI = "https://www.breckland.gov.uk/apiserver/ajaxlibrary"
-
-        data = {
-            "id": "1730410741649",
-            "jsonrpc": "2.0",
-            "method": "Breckland.Whitespace.JointWasteAPI.GetBinCollectionsByUprn",
-            "params": {"uprn": user_uprn, "environment": "live"},
-        }
-        # Make the GET request
-        response = requests.post(URI, json=data)
-
-        # Parse the JSON response
-        bin_collection = response.json()
-
-        # Loop through each collection in bin_collection
-        for collection in bin_collection["result"]:
-            bin_type = collection.get("collectiontype")
-            collection_date = collection.get("nextcollection")
-
-            dict_data = {
-                "type": bin_type,
-                "collectionDate": datetime.strptime(
-                    collection_date,
-                    "%d/%m/%Y %H:%M:%S",
-                ).strftime("%d/%m/%Y"),
-            }
-            bindata["bins"].append(dict_data)
-
-        bindata["bins"].sort(
-            key=lambda x: datetime.strptime(x.get("collectionDate"), "%d/%m/%Y")
-        )
-
-        return bindata
diff --git a/uk_bin_collection/uk_bin_collection/councils/BrightonandHoveCityCouncil.py b/uk_bin_collection/uk_bin_collection/councils/BrightonandHoveCityCouncil.py
deleted file mode 100644
index d6d6064f36..0000000000
--- a/uk_bin_collection/uk_bin_collection/councils/BrightonandHoveCityCouncil.py
+++ /dev/null
@@ -1,137 +0,0 @@
-# This script pulls (in one hit) the data from Bromley Council Bins Data
-import datetime
-import re
-import time
-from datetime import datetime
-
-import requests
-from bs4 import BeautifulSoup
-from selenium.webdriver.common.by import By
-from selenium.webdriver.common.keys import Keys
-from selenium.webdriver.support import expected_conditions as EC
-from selenium.webdriver.support.ui import Select
-from selenium.webdriver.support.wait import WebDriverWait
-
-from uk_bin_collection.uk_bin_collection.common import *
-from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass
-
-
-# import the wonderful Beautiful Soup and the URL grabber
-class CouncilClass(AbstractGetBinDataClass):
-    """
-    Concrete classes have to implement all abstract operations of the
-    base class. They can also override some operations with a default
-    implementation.
-    """
-
-    def parse_data(self, page: str, **kwargs) -> dict:
-        driver = None
-        try:
-            data = {"bins": []}
-            headers = {"User-Agent": "Mozilla/5.0 (Windows NT 6.1; Win64; x64)"}
-
-            uprn = kwargs.get("uprn")
-            user_paon = kwargs.get("paon")
-            postcode = kwargs.get("postcode")
-            web_driver = kwargs.get("web_driver")
-            headless = kwargs.get("headless")
-            driver = create_webdriver(web_driver, headless, None, __name__)
-            driver.get(kwargs.get("url"))
-
-            wait = WebDriverWait(driver, 60)
-            post_code_search = wait.until(
-                EC.presence_of_element_located((By.CLASS_NAME, "form-control"))
-            )
-            post_code_search.send_keys(postcode)
-
-            submit_btn = wait.until(
-                EC.presence_of_element_located(
-                    (By.XPATH, f"//button[contains(@class, 'mx-name-actionButton3')]")
-                )
-            )
-
-            submit_btn.send_keys(Keys.ENTER)
-
-            dropdown_options = wait.until(
-                EC.presence_of_element_located(
-                    (By.XPATH, f'//option[contains(text(), "{user_paon}")]')
-                )
-            )
-            parent_element = dropdown_options.find_element(
-                By.XPATH, ".."
-            )  # Using ".." to move up to the parent element
-
-            # Create a 'Select' for it, then select the first address in the list
-            # (Index 0 is "Make a selection from the list")
-            dropdownSelect = Select(parent_element)
-            dropdownSelect.select_by_visible_text(str(user_paon))
-
-            submit_btn = wait.until(
-                EC.presence_of_element_located(
-                    (By.XPATH, f"//button[contains(@class, 'mx-name-actionButton5')]")
-                )
-            )
-
-            submit_btn.send_keys(Keys.ENTER)
-
-            results = wait.until(
-                EC.presence_of_element_located(
-                    (By.XPATH, f'//span[contains(@class,"collection-sub")]')
-                )
-            )
-
-            # Make a BS4 object
-            soup = BeautifulSoup(driver.page_source, features="html.parser")
-            # Initialize current month and year (you can modify these values based on your requirement)
-            data = {"bins": []}
-
-            current_date = datetime.now()
-
-            # Find all elements with class starting with 'mx-name-index-'
-            bins = soup.find_all(class_=lambda x: x and x.startswith("mx-name-index-"))
-
-            for bin_item in bins:
-                bin_type = bin_item.find(class_="collection-main").text.strip()
-                day_of_week_elements = bin_item.find_all(class_="collection-header")
-                bin_date = None
-
-                for elem in day_of_week_elements:
-                    if (
-                        elem.text.strip() != bin_type
-                    ):  # Avoid taking the bin type as the date
-                        next_sibling = elem.find_next_sibling()
-                        if next_sibling:
-                            bin_date_str = next_sibling.text.strip()
-                            try:
-                                # Try parsing the date string in the format 'dd Month' (e.g., '30 Dec', '5 January')
-                                bin_date = datetime.strptime(bin_date_str, "%d %b")
-                            except ValueError:
-                                try:
-                                    # If the above format fails, try 'dd MonthName' (e.g., '30 December', '5 January')
-                                    bin_date = datetime.strptime(bin_date_str, "%d %B")
-                                except ValueError:
-                                    pass
-
-                            if bin_date:
-                                # Set the year based on the logic provided
-                                if bin_date.month < current_date.month:
-                                    bin_date = bin_date.replace(
-                                        year=current_date.year + 1
-                                    )
-                                else:
-                                    bin_date = bin_date.replace(year=current_date.year)
-                                # Format the date to the desired format
-                                bin_date = bin_date.strftime("%d/%m/%Y")
-                                break
-                dict_data = {"type": bin_type, "collectionDate": bin_date}
-                data["bins"].append(dict_data)
-        except Exception as e:
-            # Here you can log the exception if needed
-            print(f"An error occurred: {e}")
-            # Optionally, re-raise the exception if you want it to propagate
-            raise
-        finally:
-            # This block ensures that the driver is closed regardless of an exception
-            if driver:
-                driver.quit()
-        return data
diff --git a/uk_bin_collection/uk_bin_collection/councils/BristolCityCouncil.py b/uk_bin_collection/uk_bin_collection/councils/BristolCityCouncil.py
deleted file mode 100644
index 24e6f08680..0000000000
--- a/uk_bin_collection/uk_bin_collection/councils/BristolCityCouncil.py
+++ /dev/null
@@ -1,142 +0,0 @@
-import ast
-
-import requests
-from bs4 import BeautifulSoup
-
-from uk_bin_collection.uk_bin_collection.common import *
-from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass
-
-
-# import the wonderful Beautiful Soup and the URL grabber
-class CouncilClass(AbstractGetBinDataClass):
-    """
-    Concrete classes have to implement all abstract operations of the
-    base class. They can also override some operations with a default
-    implementation.
-    """
-
-    def parse_data(self, page: str, **kwargs) -> dict:
-        user_uprn = kwargs.get("uprn")
-        check_uprn(user_uprn)
-
-        requests.packages.urllib3.disable_warnings()
-        s = requests.Session()
-
-        service_type_headers = {
-            "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,"
-            "image/webp,image/apng,*/*;q=0.8",
-            "Accept-Language": "en-GB,en;q=0.9",
-            "Cache-Control": "max-age=0",
-            "Connection": "keep-alive",
-            "Referer": "https://www.bristol.gov.uk/",
-            "Sec-Fetch-Dest": "document",
-            "Sec-Fetch-Mode": "navigate",
-            "Sec-Fetch-Site": "cross-site",
-            "Sec-Fetch-User": "?1",
-            "Sec-GPC": "1",
-            "Upgrade-Insecure-Requests": "1",
-            "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, "
-            "like Gecko) Chrome/105.0.0.0 Safari/537.36",
-        }
-        service_type_params = {
-            "servicetypeid": "7dce896c-b3ba-ea11-a812-000d3a7f1cdc",
-        }
-        response = s.get(
-            "https://bristolcouncil.powerappsportals.com/completedynamicformunauth/",
-            params=service_type_params,
-            headers=service_type_headers,
-        )
-
-        llpg_headers = {
-            "Accept": "*/*",
-            "Accept-Language": "en-GB,en;q=0.9",
-            "Connection": "keep-alive",
-            "Ocp-Apim-Subscription-Key": "47ffd667d69c4a858f92fc38dc24b150",
-            "Ocp-Apim-Trace": "true",
-            "Origin": "https://bristolcouncil.powerappsportals.com",
-            "Referer": "https://bristolcouncil.powerappsportals.com/",
-            "Sec-Fetch-Dest": "empty",
-            "Sec-Fetch-Mode": "cors",
-            "Sec-Fetch-Site": "cross-site",
-            "Sec-GPC": "1",
-            "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, "
-            "like Gecko) Chrome/105.0.0.0 Safari/537.36",
-        }
-        llpg_uprn = "UPRN" + user_uprn
-        llpg_json_data = {
-            "Uprn": llpg_uprn,
-        }
-        response = s.post(
-            "https://bcprdapidyna002.azure-api.net/bcprdfundyna001-llpg/DetailedLLPG",
-            headers=llpg_headers,
-            json=llpg_json_data,
-        )
-
-        headers = {
-            "Accept": "*/*",
-            "Accept-Language": "en-GB,en;q=0.9",
-            "Connection": "keep-alive",
-            # Already added when you pass json=
-            # 'Content-Type': 'application/json',
-            "Ocp-Apim-Subscription-Key": "47ffd667d69c4a858f92fc38dc24b150",
-            "Ocp-Apim-Trace": "true",
-            "Origin": "https://bristolcouncil.powerappsportals.com",
-            "Referer": "https://bristolcouncil.powerappsportals.com/",
-            "Sec-Fetch-Dest": "empty",
-            "Sec-Fetch-Mode": "cors",
-            "Sec-Fetch-Site": "cross-site",
-            "Sec-GPC": "1",
-            "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/105.0.0.0 Safari/537.36",
-        }
-        json_data = {
-            "uprn": user_uprn,
-        }
-        response = s.post(
-            "https://bcprdapidyna002.azure-api.net/bcprdfundyna001-alloy/NextCollectionDates",
-            headers=headers,
-            json=json_data,
-        )
-
-        # Make a BS4 object
-        soup = BeautifulSoup(response.text, features="html.parser")
-        soup.prettify()
-
-        # Soup returns API response rather than HTML, so parse those strings
-        string_data = soup.text.split("data")[1]
-        collection_data = string_data.split("]}")
-
-        # Remove the spare ] and , characters at the of each list element
-        fixed_data = [i[1:] for i in collection_data]
-
-        # Remove the last list element since it's garbage (funny since this is a bin project)
-        fixed_data.pop()
-        collection_data.clear()
-
-        # Make some more changes:
-        idx = 0
-        for i in fixed_data:
-            if idx == 0:
-                # Remove two extra characters if it's the first element
-                i = i[2:]
-            # Append some characters to the end of each line to make to dict
-            i = i + "]}"
-            idx += 1
-            # Reuse the collection_data list to make a list of dictionaries - one for each bin
-            collection_data.append(ast.literal_eval(i))
-
-        collections = []
-        for bin in collection_data:
-            bin_type = bin["containerName"]
-            next_collection = datetime.strptime(
-                bin["collection"][0]["nextCollectionDate"], "%Y-%m-%dT%H:%M:%S"
-            ).strftime(date_format)
-            # Could work out next date using the roundDescription and the is_holiday function in common.py
-            collections.append((bin_type, next_collection))
-
-        ordered_data = sorted(collections, key=lambda x: x[1])
-        data = {"bins": []}
-        for item in ordered_data:
-            dict_data = {"type": item[0], "collectionDate": item[1]}
-            data["bins"].append(dict_data)
-
-        return data
diff --git a/uk_bin_collection/uk_bin_collection/councils/BromleyBoroughCouncil.py b/uk_bin_collection/uk_bin_collection/councils/BromleyBoroughCouncil.py
deleted file mode 100644
index 93f03c3df1..0000000000
--- a/uk_bin_collection/uk_bin_collection/councils/BromleyBoroughCouncil.py
+++ /dev/null
@@ -1,106 +0,0 @@
-# This script pulls (in one hit) the data from Bromley Council Bins Data
-import datetime
-from datetime import datetime
-
-from bs4 import BeautifulSoup
-from dateutil.relativedelta import relativedelta
-from selenium.webdriver.common.by import By
-from selenium.webdriver.support import expected_conditions as EC
-from selenium.webdriver.support.ui import WebDriverWait
-
-from uk_bin_collection.uk_bin_collection.common import create_webdriver
-from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass
-
-
-# import the wonderful Beautiful Soup and the URL grabber
-class CouncilClass(AbstractGetBinDataClass):
-    """
-    Concrete classes have to implement all abstract operations of the
-    base class. They can also override some operations with a default
-    implementation.
-    """
-
-    def parse_data(self, page: str, **kwargs) -> dict:
-        # Make a BS4 object
-        driver = None
-        try:
-            bin_data_dict = {"bins": []}
-            web_driver = kwargs.get("web_driver")
-            headless = kwargs.get("headless")
-
-            data = {"bins": []}
-
-            # Get our initial session running
-            driver = create_webdriver(web_driver, headless, None, __name__)
-            driver.get(kwargs.get("url"))
-
-            wait = WebDriverWait(driver, 30)
-            wait.until(
-                EC.presence_of_element_located((By.CLASS_NAME, "waste-service-image"))
-            )
-
-            # Parse the HTML content
-            soup = BeautifulSoup(driver.page_source, "html.parser")
-
-            # Find all elements with class 'govuk-summary-list'
-            waste_services = soup.find_all(
-                "h3", class_="govuk-heading-m waste-service-name"
-            )
-
-            for service in waste_services:
-                service_title = service.get_text(strip=True)
-                next_collection = service.find_next_sibling().find(
-                    "dt", string="Next collection"
-                )
-
-                if next_collection:
-                    next_collection_date = next_collection.find_next_sibling().get_text(
-                        strip=True
-                    )
-                    # Extract date part and remove the suffix
-                    next_collection_date_parse = next_collection_date.split(",")[
-                        1
-                    ].strip()
-                    day, month = next_collection_date_parse.split()[:2]
-
-                    # Remove the suffix (e.g., 'th', 'nd', 'rd', 'st') from the day
-                    if day.endswith(("th", "nd", "rd", "st")):
-                        day = day[:-2]  # Remove the last two characters
-
-                    # Reconstruct the date string without the suffix
-                    date_without_suffix = f"{day} {month}"
-
-                    # Parse the date string to a datetime object
-                    date_object = datetime.strptime(date_without_suffix, "%d %B")
-
-                    # Get the current year
-                    current_year = datetime.now().year
-
-                    # Check if the parsed date is in the past compared to the current date
-                    if date_object < datetime.now():
-                        # If the parsed date is in the past, assume it's for the next year
-                        current_year += 1
-                    # Append the year to the date
-                    date_with_year = date_object.replace(year=current_year)
-
-                    # Format the date with the year
-                    date_with_year_formatted = date_with_year.strftime(
-                        "%d/%m/%Y"
-                    )  # Format the date as '%d/%m/%Y'
-
-                    # Create the dictionary with the formatted data
-                    dict_data = {
-                        "type": service_title,
-                        "collectionDate": date_with_year_formatted,
-                    }
-                    data["bins"].append(dict_data)
-        except Exception as e:
-            # Here you can log the exception if needed
-            print(f"An error occurred: {e}")
-            # Optionally, re-raise the exception if you want it to propagate
-            raise
-        finally:
-            # This block ensures that the driver is closed regardless of an exception
-            if driver:
-                driver.quit()
-        return data
diff --git a/uk_bin_collection/uk_bin_collection/councils/BromsgroveDistrictCouncil.py b/uk_bin_collection/uk_bin_collection/councils/BromsgroveDistrictCouncil.py
deleted file mode 100644
index e71f49f55a..0000000000
--- a/uk_bin_collection/uk_bin_collection/councils/BromsgroveDistrictCouncil.py
+++ /dev/null
@@ -1,55 +0,0 @@
-import requests
-from bs4 import BeautifulSoup
-
-from uk_bin_collection.uk_bin_collection.common import *
-from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass
-
-
-# import the wonderful Beautiful Soup and the URL grabber
-class CouncilClass(AbstractGetBinDataClass):
-    """
-    Concrete classes have to implement all abstract operations of the
-    base class. They can also override some operations with a default
-    implementation.
-    """
-
-    def parse_data(self, page: str, **kwargs) -> dict:
-
-        user_uprn = kwargs.get("uprn")
-        check_uprn(user_uprn)
-        bindata = {"bins": []}
-
-        URI = f"https://bincollections.bromsgrove.gov.uk/BinCollections/Details?uprn={user_uprn}"
-
-        # Make the GET request
-        response = requests.get(URI)
-
-        # Parse the HTML
-        soup = BeautifulSoup(response.content, "html.parser")
-
-        # Find each collection container
-        for container in soup.find_all(class_="collection-container"):
-            # Get the bin type from the heading
-            bin_type = container.find(class_="heading").text.strip()
-
-            # Get the next collection date from the caption
-            next_collection = (
-                container.find(class_="caption")
-                .text.replace("Next collection ", "")
-                .strip()
-            )
-
-            dict_data = {
-                "type": bin_type,
-                "collectionDate": datetime.strptime(
-                    next_collection,
-                    "%A, %d %B %Y",
-                ).strftime("%d/%m/%Y"),
-            }
-            bindata["bins"].append(dict_data)
-
-        bindata["bins"].sort(
-            key=lambda x: datetime.strptime(x.get("collectionDate"), "%d/%m/%Y")
-        )
-
-        return bindata
diff --git a/uk_bin_collection/uk_bin_collection/councils/BroxbourneCouncil.py b/uk_bin_collection/uk_bin_collection/councils/BroxbourneCouncil.py
deleted file mode 100644
index a2e8c61704..0000000000
--- a/uk_bin_collection/uk_bin_collection/councils/BroxbourneCouncil.py
+++ /dev/null
@@ -1,78 +0,0 @@
-from datetime import datetime
-
-import requests
-from bs4 import BeautifulSoup
-
-from uk_bin_collection.uk_bin_collection.common import *
-from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass
-
-
-class CouncilClass(AbstractGetBinDataClass):
-    """
-    Concrete classes have to implement all abstract operations of the
-    base class. They can also override some operations with a default
-    implementation.
-    """
-
-    def parse_data(self, page: str, **kwargs) -> dict:
-        user_uprn = kwargs.get("uprn")
-        user_postcode = kwargs.get("postcode")
-        check_uprn(user_uprn)
-        check_postcode(user_postcode)
-        bindata = {"bins": []}
-
-        API_URL = "https://www.broxbourne.gov.uk/xfp/form/205"
-
-        post_data = {
-            "page": "490",
-            "locale": "en_GB",
-            "qacf7e570cf99fae4cb3a2e14d5a75fd0d6561058_0_0": user_postcode,
-            "qacf7e570cf99fae4cb3a2e14d5a75fd0d6561058_1_0": user_uprn,
-            "next": "Next",
-        }
-
-        r = requests.post(API_URL, data=post_data)
-        r.raise_for_status()
-
-        soup = BeautifulSoup(r.content, features="html.parser")
-        soup.prettify()
-
-        form__instructions = soup.find(attrs={"class": "form__instructions"})
-        table = form__instructions.find("table")
-
-        rows = table.find_all("tr")
-
-        current_year = datetime.now().year
-
-        # Process each row into a list of dictionaries
-        for row in rows[1:]:  # Skip the header row
-            columns = row.find_all("td")
-            collection_date_text = (
-                columns[0].get_text(separator=" ").replace("\xa0", " ").strip()
-            )
-            service = columns[1].get_text(separator=" ").replace("\xa0", " ").strip()
-
-            # Safely try to parse collection date
-            if collection_date_text:
-                try:
-                    collection_date = datetime.strptime(collection_date_text, "%a %d %b")
-                    if collection_date.month == 1:
-                        collection_date = collection_date.replace(year=current_year + 1)
-                    else:
-                        collection_date = collection_date.replace(year=current_year)
-
-                    formatted_collection_date = collection_date.strftime("%d/%m/%Y")  # Use your desired date format
-                    dict_data = {
-                        "type": service,
-                        "collectionDate": formatted_collection_date,
-                    }
-                    bindata["bins"].append(dict_data)
-                except ValueError:
-                    # Skip invalid collection_date
-                    continue
-
-        # Sort valid bins by collectionDate
-        bindata["bins"].sort(
-            key=lambda x: datetime.strptime(x.get("collectionDate"), "%d/%m/%Y")
-        )
-        return bindata
diff --git a/uk_bin_collection/uk_bin_collection/councils/BroxtoweBoroughCouncil.py b/uk_bin_collection/uk_bin_collection/councils/BroxtoweBoroughCouncil.py
deleted file mode 100644
index 0690a9cbd6..0000000000
--- a/uk_bin_collection/uk_bin_collection/councils/BroxtoweBoroughCouncil.py
+++ /dev/null
@@ -1,107 +0,0 @@
-from bs4 import BeautifulSoup
-from selenium.webdriver.common.by import By
-from selenium.webdriver.support import expected_conditions as EC
-from selenium.webdriver.support.ui import Select
-from selenium.webdriver.support.wait import WebDriverWait
-
-from uk_bin_collection.uk_bin_collection.common import *
-from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass
-
-
-# import the wonderful Beautiful Soup and the URL grabber
-class CouncilClass(AbstractGetBinDataClass):
-    """
-    Concrete classes have to implement all abstract operations of the
-    base class. They can also override some operations with a default
-    implementation.
-    """
-
-    def parse_data(self, page: str, **kwargs) -> dict:
-        driver = None
-        try:
-            page = "https://selfservice.broxtowe.gov.uk/renderform.aspx?t=217&k=9D2EF214E144EE796430597FB475C3892C43C528"
-
-            data = {"bins": []}
-
-            user_uprn = kwargs.get("uprn")
-            user_postcode = kwargs.get("postcode")
-            web_driver = kwargs.get("web_driver")
-            headless = kwargs.get("headless")
-            check_uprn(user_uprn)
-            check_postcode(user_postcode)
-
-            # Create Selenium webdriver
-            driver = create_webdriver(web_driver, headless, None, __name__)
-            driver.get(page)
-
-            # Populate postcode field
-            inputElement_postcode = driver.find_element(
-                By.ID,
-                "ctl00_ContentPlaceHolder1_FF5683TB",
-            )
-            inputElement_postcode.send_keys(user_postcode)
-
-            # Click search button
-            driver.find_element(
-                By.ID,
-                "ctl00_ContentPlaceHolder1_FF5683BTN",
-            ).click()
-
-            # Wait for the 'Select address' dropdown to appear and select option matching UPRN
-            dropdown = WebDriverWait(driver, 10).until(
-                EC.presence_of_element_located(
-                    (By.ID, "ctl00_ContentPlaceHolder1_FF5683DDL")
-                )
-            )
-            # Create a 'Select' for it, then select the matching URPN option
-            dropdownSelect = Select(dropdown)
-            dropdownSelect.select_by_value("U" + user_uprn)
-
-            # Wait for the submit button to appear, then click it to get the collection dates
-            submit = WebDriverWait(driver, 10).until(
-                EC.presence_of_element_located(
-                    (By.ID, "ctl00_ContentPlaceHolder1_btnSubmit")
-                )
-            )
-            submit.click()
-
-            soup = BeautifulSoup(driver.page_source, features="html.parser")
-
-            bins_div = soup.find("div", id="ctl00_ContentPlaceHolder1_FF5686FormGroup")
-            if bins_div:
-                bins_table = bins_div.find("table")
-                if bins_table:
-                    # Get table rows
-                    for row in bins_table.find_all("tr"):
-                        # Get the rows cells
-                        cells = row.find_all("td")
-                        bin_type = cells[0].get_text(strip=True)
-                        # Skip header row
-                        if bin_type and cells[3] and bin_type != "Bin Type":
-                            if len(cells[3].get_text(strip=True)) > 0:
-                                collection_date = datetime.strptime(
-                                    cells[3].get_text(strip=True), "%A, %d %B %Y"
-                                )
-                                dict_data = {
-                                    "type": bin_type,
-                                    "collectionDate": collection_date.strftime(
-                                        date_format
-                                    ),
-                                }
-                                data["bins"].append(dict_data)
-
-                            data["bins"].sort(
-                                key=lambda x: datetime.strptime(
-                                    x.get("collectionDate"), "%d/%m/%Y"
-                                )
-                            )
-        except Exception as e:
-            # Here you can log the exception if needed
-            print(f"An error occurred: {e}")
-            # Optionally, re-raise the exception if you want it to propagate
-            raise
-        finally:
-            # This block ensures that the driver is closed regardless of an exception
-            if driver:
-                driver.quit()
-        return data
diff --git a/uk_bin_collection/uk_bin_collection/councils/BuckinghamshireCouncil.py b/uk_bin_collection/uk_bin_collection/councils/BuckinghamshireCouncil.py
deleted file mode 100644
index c51122f7b0..0000000000
--- a/uk_bin_collection/uk_bin_collection/councils/BuckinghamshireCouncil.py
+++ /dev/null
@@ -1,115 +0,0 @@
-from bs4 import BeautifulSoup
-from selenium.webdriver.common.by import By
-from selenium.webdriver.support import expected_conditions as EC
-from selenium.webdriver.support.wait import WebDriverWait
-
-from uk_bin_collection.uk_bin_collection.common import *
-from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass
-
-
-class CouncilClass(AbstractGetBinDataClass):
-    """
-    Concrete classes have to implement all abstract operations of the
-    base class. They can also override some operations with a default
-    implementation.
-    """
-
-    def parse_data(self, page: str, **kwargs) -> dict:
-        driver = None
-        try:
-            data = {"bins": []}
-            user_paon = kwargs.get("paon")
-            user_postcode = kwargs.get("postcode")
-            web_driver = kwargs.get("web_driver")
-            headless = kwargs.get("headless")
-            check_paon(user_paon)
-            check_postcode(user_postcode)
-
-            # Create Selenium webdriver
-            driver = create_webdriver(web_driver, headless, None, __name__)
-            driver.get(
-                "https://iapp.itouchvision.com/iappcollectionday/collection-day/?uuid=FA353FC74600CBE61BE409534D00A8EC09BDA3AC&lang=en"
-            )
-
-            # Wait for the postcode field to appear then populate it
-            inputElement_postcode = WebDriverWait(driver, 10).until(
-                EC.presence_of_element_located((By.ID, "postcodeSearch"))
-            )
-            inputElement_postcode.send_keys(user_postcode)
-
-            # Click search button
-            findAddress = WebDriverWait(driver, 10).until(
-                EC.presence_of_element_located(
-                    (By.XPATH, '//button[@class="govuk-button mt-4"]')
-                )
-            )
-            findAddress.click()
-
-            # Wait for the 'Select address' dropdown to appear and select option matching the house name/number
-            WebDriverWait(driver, 10).until(
-                EC.element_to_be_clickable(
-                    (
-                        By.XPATH,
-                        "//select[@id='addressSelect']//option[contains(., '"
-                        + user_paon
-                        + "')]",
-                    )
-                )
-            ).click()
-
-            # Wait for the collections table to appear
-            WebDriverWait(driver, 10).until(
-                EC.presence_of_element_located(
-                    (
-                        By.XPATH,
-                        '//div[@class="ant-row d-flex justify-content-between mb-4 mt-2 css-2rgkd4"]',
-                    )
-                )
-            )
-
-            soup = BeautifulSoup(driver.page_source, features="html.parser")
-
-            recyclingcalendar = soup.find(
-                "div",
-                {
-                    "class": "ant-row d-flex justify-content-between mb-4 mt-2 css-2rgkd4"
-                },
-            )
-
-            rows = recyclingcalendar.find_all(
-                "div",
-                {
-                    "class": "ant-col ant-col-xs-12 ant-col-sm-12 ant-col-md-12 ant-col-lg-12 ant-col-xl-12 css-2rgkd4"
-                },
-            )
-
-            current_year = datetime.now().year
-            current_month = datetime.now().month
-
-            for row in rows:
-                BinType = row.find("h3").text
-                collectiondate = datetime.strptime(
-                    row.find("div", {"class": "text-white fw-bold"}).text,
-                    "%A %d %B",
-                )
-                if (current_month > 10) and (collectiondate.month < 3):
-                    collectiondate = collectiondate.replace(year=(current_year + 1))
-                else:
-                    collectiondate = collectiondate.replace(year=current_year)
-
-                dict_data = {
-                    "type": BinType,
-                    "collectionDate": collectiondate.strftime("%d/%m/%Y"),
-                }
-                data["bins"].append(dict_data)
-
-        except Exception as e:
-            # Here you can log the exception if needed
-            print(f"An error occurred: {e}")
-            # Optionally, re-raise the exception if you want it to propagate
-            raise
-        finally:
-            # This block ensures that the driver is closed regardless of an exception
-            if driver:
-                driver.quit()
-        return data
diff --git a/uk_bin_collection/uk_bin_collection/councils/BurnleyBoroughCouncil.py b/uk_bin_collection/uk_bin_collection/councils/BurnleyBoroughCouncil.py
deleted file mode 100644
index 97283ce09b..0000000000
--- a/uk_bin_collection/uk_bin_collection/councils/BurnleyBoroughCouncil.py
+++ /dev/null
@@ -1,88 +0,0 @@
-import time
-
-import requests
-
-from uk_bin_collection.uk_bin_collection.common import *
-from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass
-
-
-# import the wonderful Beautiful Soup and the URL grabber
-class CouncilClass(AbstractGetBinDataClass):
-    """
-    Concrete classes have to implement all abstract operations of the
-    base class. They can also override some operations with a default
-    implementation.
-    """
-
-    def parse_data(self, page: str, **kwargs) -> dict:
-
-        user_uprn = kwargs.get("uprn")
-        check_uprn(user_uprn)
-        bindata = {"bins": []}
-
-        SESSION_URL = "https://your.burnley.gov.uk/authapi/isauthenticated?uri=https%253A%252F%252Fyour.burnley.gov.uk%252Fen%252FAchieveForms%252F%253Fform_uri%253Dsandbox-publish%253A%252F%252FAF-Process-b41dcd03-9a98-41be-93ba-6c172ba9f80c%252FAF-Stage-edb97458-fc4d-4316-b6e0-85598ec7fce8%252Fdefinition.json%2526redirectlink%253D%25252Fen%2526cancelRedirectLink%253D%25252Fen%2526consentMessage%253Dyes&hostname=your.burnley.gov.uk&withCredentials=true"
-
-        API_URL = "https://your.burnley.gov.uk/apibroker/runLookup"
-
-        headers = {
-            "Content-Type": "application/json",
-            "Accept": "application/json",
-            "User-Agent": "Mozilla/5.0",
-            "X-Requested-With": "XMLHttpRequest",
-            "Referer": "https://your.burnley.gov.uk/fillform/?iframe_id=fillform-frame-1&db_id=",
-        }
-        s = requests.session()
-        r = s.get(SESSION_URL)
-        r.raise_for_status()
-        session_data = r.json()
-        sid = session_data["auth-session"]
-
-        data = {
-            "formValues": {
-                "Section 1": {
-                    "case_uprn1": {
-                        "value": user_uprn,
-                    }
-                },
-            },
-        }
-
-        params = {
-            "id": "607fe757df87c",
-            "repeat_against": "",
-            "noRetry": "false",
-            "getOnlyTokens": "undefined",
-            "log_id": "",
-            "app_name": "AF-Renderer::Self",
-            # unix_timestamp
-            "_": str(int(time.time() * 1000)),
-            "sid": sid,
-        }
-
-        r = s.post(API_URL, json=data, headers=headers, params=params)
-        r.raise_for_status()
-
-        data = r.json()
-        rows_data = data["integration"]["transformed"]["rows_data"]
-        if not isinstance(rows_data, dict):
-            raise ValueError("Invalid data returned from API")
-
-        current_year = (datetime.now()).year
-        for key, value in rows_data.items():
-            bin_type = value["display"].split(" - ")[0]
-            collection_date = datetime.strptime(
-                value["display"].split(" - ")[1], "%A %d %B"
-            )
-
-            if collection_date.month == 1:
-                collection_date = collection_date.replace(year=current_year + 1)
-            else:
-                collection_date = collection_date.replace(year=current_year)
-
-            dict_data = {
-                "type": bin_type,
-                "collectionDate": collection_date.strftime(date_format),
-            }
-            bindata["bins"].append(dict_data)
-
-        return bindata
diff --git a/uk_bin_collection/uk_bin_collection/councils/BuryCouncil.py b/uk_bin_collection/uk_bin_collection/councils/BuryCouncil.py
deleted file mode 100644
index 7497d352a9..0000000000
--- a/uk_bin_collection/uk_bin_collection/councils/BuryCouncil.py
+++ /dev/null
@@ -1,65 +0,0 @@
-from bs4 import BeautifulSoup
-from uk_bin_collection.uk_bin_collection.common import *
-from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass
-
-
-# import the wonderful Beautiful Soup and the URL grabber
-class CouncilClass(AbstractGetBinDataClass):
-    """
-    Concrete classes have to implement all abstract operations of the
-    base class. They can also override some operations with a default
-    implementation.
-    """
-
-    def parse_data(self, page: str, **kwargs) -> dict:
-        # Make a BS4 object
-        collections = []
-        data = {"bins": []}
-
-        # Get and check postcode and PAON
-        postcode = kwargs.get("postcode")
-        paon = kwargs.get("paon")
-        check_postcode(postcode)
-        check_paon(paon)
-
-        # Make API call to get property info using postcode
-        addr_response = requests.get(
-            f'https://www.bury.gov.uk/app-services/getProperties?postcode={postcode.replace(" ", "")}'
-        )
-        if addr_response.status_code != 200:
-            raise ConnectionAbortedError("Issue encountered getting addresses.")
-        address_json = json.loads(addr_response.text)["response"]
-
-        # This makes addr the next item that has the house number. Since these are ordered by house number, a single
-        # number like 3 wouldn't return 33
-        addr = next(item for item in address_json if paon in item["addressLine1"])
-
-        # Make API call to get bin data using property ID
-        response = requests.get(
-            f'https://www.bury.gov.uk/app-services/getPropertyById?id={addr.get("id")}'
-        )
-        if response.status_code != 200:
-            raise ConnectionAbortedError("Issue encountered getting bin data.")
-        bin_list = json.loads(response.text)["response"]["bins"]
-
-        # The JSON actually returns the next collections and a large calendar. But I opted just for the next dates.
-        for bin_colour, collection_data in bin_list.items():
-            bin_type = bin_colour
-            bin_date = datetime.strptime(
-                remove_ordinal_indicator_from_date_string(
-                    collection_data.get("nextCollection")
-                ),
-                "%A %d %B %Y",
-            )
-            collections.append((bin_type, bin_date))
-
-        # Dates are ordered correctly - soonest first
-        ordered_data = sorted(collections, key=lambda x: x[1])
-        for item in ordered_data:
-            dict_data = {
-                "type": item[0],
-                "collectionDate": item[1].strftime(date_format),
-            }
-            data["bins"].append(dict_data)
-
-        return data
diff --git a/uk_bin_collection/uk_bin_collection/councils/CalderdaleCouncil.py b/uk_bin_collection/uk_bin_collection/councils/CalderdaleCouncil.py
deleted file mode 100644
index 428630ab94..0000000000
--- a/uk_bin_collection/uk_bin_collection/councils/CalderdaleCouncil.py
+++ /dev/null
@@ -1,122 +0,0 @@
-# This script pulls (in one hit) the data from Bromley Council Bins Data
-import datetime
-import time
-from datetime import datetime
-
-import requests
-from bs4 import BeautifulSoup
-from selenium.webdriver.common.by import By
-from selenium.webdriver.common.keys import Keys
-from selenium.webdriver.support import expected_conditions as EC
-from selenium.webdriver.support.ui import Select
-from selenium.webdriver.support.wait import WebDriverWait
-
-from uk_bin_collection.uk_bin_collection.common import *
-from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass
-
-
-# import the wonderful Beautiful Soup and the URL grabber
-class CouncilClass(AbstractGetBinDataClass):
-    """
-    Concrete classes have to implement all abstract operations of the
-    base class. They can also override some operations with a default
-    implementation.
-    """
-
-    def parse_data(self, page: str, **kwargs) -> dict:
-        driver = None
-        try:
-            user_uprn = kwargs.get("uprn")
-            user_postcode = kwargs.get("postcode")
-            check_uprn(user_uprn)
-            check_postcode(user_postcode)
-
-            bin_data_dict = {"bins": []}
-            collections = []
-            web_driver = kwargs.get("web_driver")
-            headless = kwargs.get("headless")
-
-            data = {"bins": []}
-
-            # Get our initial session running
-            driver = create_webdriver(web_driver, headless, None, __name__)
-            driver.get(kwargs.get("url"))
-
-            wait = WebDriverWait(driver, 30)
-            postcode = wait.until(
-                EC.presence_of_element_located((By.XPATH, '//*[@id="pPostcode"]'))
-            )
-
-            postcode.send_keys(user_postcode)
-            postcode_search_btn = wait.until(
-                EC.element_to_be_clickable((By.CLASS_NAME, "searchbox_submit"))
-            )
-            postcode_search_btn.send_keys(Keys.ENTER)
-            # Wait for the 'Select your property' dropdown to appear and select the first result
-            dropdown = wait.until(EC.element_to_be_clickable((By.ID, "uprn")))
-
-            # Create a 'Select' for it, then select the first address in the list
-            # (Index 0 is "Make a selection from the list")
-            dropdownSelect = Select(dropdown)
-            dropdownSelect.select_by_value(str(user_uprn))
-            checkbox = wait.until(EC.presence_of_element_located((By.ID, "gdprTerms")))
-            checkbox.send_keys(Keys.SPACE)
-            get_bin_data_btn = wait.until(
-                EC.element_to_be_clickable((By.CLASS_NAME, "searchbox_submit"))
-            )
-            get_bin_data_btn.send_keys(Keys.ENTER)
-            # Make a BS4 object
-            results = wait.until(EC.presence_of_element_located((By.ID, "collection")))
-            soup = BeautifulSoup(driver.page_source, features="html.parser")
-            soup.prettify()
-
-            data = {"bins": []}
-
-            # Get collections
-            row_index = 0
-            for row in soup.find("table", {"id": "collection"}).find_all("tr"):
-                # Skip headers row
-                if row_index < 1:
-                    row_index += 1
-                    continue
-                else:
-                    # Get bin info
-                    bin_info = row.find_all("td")
-                    # Get the bin type
-                    bin_type = bin_info[0].find("strong").get_text(strip=True)
-                    # Get the collection date
-                    collection_date = ""
-                    for p in bin_info[2].find_all("p"):
-                        if "your next collection" in p.get_text(strip=True):
-                            collection_date = datetime.strptime(
-                                " ".join(
-                                    p.get_text(strip=True)
-                                    .replace("will be your next collection.", "")
-                                    .split()
-                                ),
-                                "%A %d %B %Y",
-                            )
-
-                    if collection_date != "":
-                        # Append the bin type and date to the data dict
-                        dict_data = {
-                            "type": bin_type,
-                            "collectionDate": collection_date.strftime(date_format),
-                        }
-                        data["bins"].append(dict_data)
-
-                    row_index += 1
-
-            data["bins"].sort(
-                key=lambda x: datetime.strptime(x.get("collectionDate"), date_format)
-            )
-        except Exception as e:
-            # Here you can log the exception if needed
-            print(f"An error occurred: {e}")
-            # Optionally, re-raise the exception if you want it to propagate
-            raise
-        finally:
-            # This block ensures that the driver is closed regardless of an exception
-            if driver:
-                driver.quit()
-        return data
diff --git a/uk_bin_collection/uk_bin_collection/councils/CannockChaseDistrictCouncil.py b/uk_bin_collection/uk_bin_collection/councils/CannockChaseDistrictCouncil.py
deleted file mode 100644
index 85ec611b98..0000000000
--- a/uk_bin_collection/uk_bin_collection/councils/CannockChaseDistrictCouncil.py
+++ /dev/null
@@ -1,65 +0,0 @@
-from bs4 import BeautifulSoup
-
-from uk_bin_collection.uk_bin_collection.common import *
-from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass
-
-
-# import the wonderful Beautiful Soup and the URL grabber
-class CouncilClass(AbstractGetBinDataClass):
-    """
-    Concrete classes have to implement all abstract operations of the
-    base class. They can also override some operations with a default
-    implementation.
-    """
-
-    def parse_data(self, page: str, **kwargs) -> dict:
-        user_uprn = kwargs.get("uprn")
-        user_postcode = kwargs.get("postcode")
-        check_uprn(user_uprn)
-        check_postcode(user_postcode)
-
-        # Make SOAP Request
-        response = requests.post(
-            "https://ccdc.opendata.onl/DynamicCall.dll",
-            data="Method=CollectionDates&Postcode="
-            + user_postcode
-            + "&UPRN="
-            + user_uprn,
-            headers={
-                "Content-Type": "application/x-www-form-urlencoded",
-                "Referer": "https://ccdc.opendata.onl/CCDC_WasteCollection",
-                "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/118.0.0.0 Safari/537.36",
-            },
-        )
-
-        # Make a BS4 object
-        soup = BeautifulSoup(response.text, "xml")
-        soup.prettify()
-
-        if (
-            soup.find("ErrorDescription")
-            and soup.find("ErrorDescription").get_text(strip=True)
-            == "No results returned"
-        ):
-            raise ValueError("No collection data found for provided Postcode & UPRN.")
-
-        data = {"bins": []}
-
-        collections = soup.find_all("Collection")
-
-        for i in range(len(collections)):
-            dict_data = {
-                "type": collections[i]
-                .Service.get_text()
-                .replace("Collection Service", "")
-                .strip(),
-                "collectionDate": datetime.strptime(
-                    collections[i].Date.get_text(), "%d/%m/%Y %H:%M:%S"
-                ).strftime(date_format),
-            }
-            data["bins"].append(dict_data)
-
-        data["bins"].sort(
-            key=lambda x: datetime.strptime(x.get("collectionDate"), date_format)
-        )
-        return data
diff --git a/uk_bin_collection/uk_bin_collection/councils/CanterburyCityCouncil.py b/uk_bin_collection/uk_bin_collection/councils/CanterburyCityCouncil.py
deleted file mode 100644
index 6f9fafb0b0..0000000000
--- a/uk_bin_collection/uk_bin_collection/councils/CanterburyCityCouncil.py
+++ /dev/null
@@ -1,54 +0,0 @@
-import time
-
-import requests
-
-from uk_bin_collection.uk_bin_collection.common import *
-from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass
-
-
-# import the wonderful Beautiful Soup and the URL grabber
-class CouncilClass(AbstractGetBinDataClass):
-    """
-    Concrete classes have to implement all abstract operations of the
-    base class. They can also override some operations with a default
-    implementation.
-    """
-
-    def parse_data(self, page: str, **kwargs) -> dict:
-
-        user_uprn = kwargs.get("uprn")
-        check_uprn(user_uprn)
-        bindata = {"bins": []}
-
-        data = {"uprn": user_uprn, "usrn": "1"}
-
-        URI = (
-            "https://zbr7r13ke2.execute-api.eu-west-2.amazonaws.com/Beta/get-bin-dates"
-        )
-
-        # Make the GET request
-        response = requests.post(URI, json=data)
-        response.raise_for_status()
-
-        # Parse the JSON response
-        bin_collection = json.loads(response.json()["dates"])
-        collections = {
-            "General": bin_collection["blackBinDay"],
-            "Recycling": bin_collection["recyclingBinDay"],
-            "Food": bin_collection["foodBinDay"],
-            "Garden": bin_collection["gardenBinDay"],
-        }
-        # Loop through each collection in bin_collection
-        for collection in collections:
-            print(collection)
-
-            if len(collections[collection]) <= 0:
-                continue
-            for date in collections[collection]:
-                date = (
-                    datetime.strptime(date, "%Y-%m-%dT%H:%M:%S").strftime("%d/%m/%Y"),
-                )
-                dict_data = {"type": collection, "collectionDate": date[0]}
-                bindata["bins"].append(dict_data)
-
-        return bindata
diff --git a/uk_bin_collection/uk_bin_collection/councils/CardiffCouncil.py b/uk_bin_collection/uk_bin_collection/councils/CardiffCouncil.py
deleted file mode 100644
index eb3120e1d0..0000000000
--- a/uk_bin_collection/uk_bin_collection/councils/CardiffCouncil.py
+++ /dev/null
@@ -1,172 +0,0 @@
-# So this script is a little different to the others...
-# Essentially, it uses Cardiff Council's waste collection API to return collections for a UPRN by pretending
-# to be Google Chrome
-
-import datetime
-import json
-from datetime import datetime
-
-import requests
-from requests import auth
-from uk_bin_collection.uk_bin_collection.common import *
-from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass
-
-
-# Taken from
-# https://stackoverflow.com/questions/29931671/making-an-api-call-in-python-with-an-api-that-requires-a-bearer-token
-class BearerAuth(requests.auth.AuthBase):
-    def __init__(self, token):
-        self.token = token
-
-    def __call__(self, r):
-        r.headers["authorization"] = "Bearer " + self.token
-        return r
-
-
-def parse_token(text: str) -> str:
-    """
-    Parses the response text to find the JWT token, which will always be the longest item in the list (I think)
-        :param text: The response text from the server
-        :return: Only the JWT token, as a string
-    """
-    # You'd have thought I'd use something like etree for this, but that doesn't work so going for a hacky approach
-    xml_list = text.split('"')
-    bearer_token = max(xml_list, key=len)
-    return bearer_token
-
-
-def get_jwt() -> str:
-    """
-    Gets a JSON web token from the authentication server
-        :return: A JWT token as a string
-    """
-    auth_url = (
-        "https://authwebservice.cardiff.gov.uk/AuthenticationWebService.asmx?op=GetJWT"
-    )
-    options_headers_str = (
-        "Accept: */*|Accept-Encoding: gzip, "
-        "deflate, br|Accept-Language: en-GB,en;q=0.9|Access-Control-Request-Headers: content-type"
-        "|Access-Control-Request-Method: POST|Connection: keep-alive|Host: "
-        "authwebservice.cardiff.gov.uk|Origin: https://www.cardiff.gov.uk|Referer: "
-        "https://www.cardiff.gov.uk/|Sec-Fetch-Dest: empty"
-        "|Sec-Fetch-Mode: cors|Sec-Fetch-Site: same-site|User-Agent: Mozilla/5.0 (Windows NT 10.0; "
-        "Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/103.0.5060.134 Safari/537.36 "
-    )
-
-    request_headers_str = (
-        "Accept: */*|Accept-Encoding: gzip, deflate, br|Accept-Language: en-GB,en;q=0.9|Connection: "
-        'keep-alive|Content-Length: 284|Content-Type: text/xml; charset="UTF-8"|Host: '
-        "authwebservice.cardiff.gov.uk|Origin: https://www.cardiff.gov.uk|Referer: "
-        "https://www.cardiff.gov.uk/|Sec-Fetch-Dest: empty|Sec-Fetch-Mode: cors|Sec-Fetch-Site: "
-        "same-site|Sec-GPC: 1|User-Agent: Mozilla/5.0 (Windows NT 10.0; Win64; x64) "
-        "AppleWebKit/537.36 (KHTML, like Gecko) Chrome/103.0.5060.134 Safari/537.36 "
-    )
-
-    payload = (
-        " "
-    )
-
-    options_headers = parse_header(options_headers_str)
-    request_headers = parse_header(request_headers_str)
-    try:
-        requests.packages.urllib3.disable_warnings()
-        options = requests.options(auth_url, headers=options_headers)
-        response = requests.post(auth_url, headers=request_headers, data=payload)
-        if not options.ok or not response.ok:
-            raise ValueError("Invalid server response code getting JWT!")
-
-    except Exception as ex:
-        print(f"Exception encountered: {ex}")
-        exit(1)
-    token = parse_token(response.text)
-    options.close()
-    response.close()
-
-    return token
-
-
-class CouncilClass(AbstractGetBinDataClass):
-    """
-    Concrete classes have to implement all abstract operations of the base
-    class. They can also override some operations with a default
-    implementation.
-    """
-
-    def parse_data(self, page: str, **kwargs) -> dict:
-        """
-        Parse council provided CSVs to get the latest bin collections for address
-        """
-        # Change this
-        uprn = kwargs.get("uprn")
-        check_uprn(uprn)
-
-        data = {"bins": []}
-        token = get_jwt()
-
-        api_url = "https://api.cardiff.gov.uk/WasteManagement/api/WasteCollection"
-        options_header_str = (
-            "Accept: */*|Accept-Encoding: gzip, deflate, br|Accept-Language: en-GB,"
-            "en;q=0.9|Access-Control-Request-Headers: authorization,"
-            "content-type|Access-Control-Request-Method: POST|Connection: keep-alive|Host: "
-            "api.cardiff.gov.uk|Origin: https://www.cardiff.gov.uk|Referer: "
-            "https://www.cardiff.gov.uk/|Sec-Fetch-Dest: empty|Sec-Fetch-Mode: cors|Sec-Fetch-Site: "
-            "same-site|User-Agent: Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 ("
-            "KHTML, like Gecko) Chrome/103.0.5060.134 Safari/537.36 "
-        )
-        response_header_str = (
-            "Accept: application/json, text/javascript, */*; q=0.01|Accept-Encoding: gzip, deflate, "
-            f"br|Accept-Language: en-GB,en;q=0.9|Authorization: {token}|Connection: "
-            "keep-alive|Content-Length: 62|Content-Type: application/json; charset=UTF-8|Host: "
-            "api.cardiff.gov.uk|Origin: https://www.cardiff.gov.uk|Referer: "
-            "https://www.cardiff.gov.uk/|Sec-Fetch-Dest: empty|Sec-Fetch-Mode: cors|Sec-Fetch-Site: "
-            "same-site|Sec-GPC: 1|User-Agent: Mozilla/5.0 (Windows NT 10.0; Win64; x64) "
-            "AppleWebKit/537.36 (KHTML, like Gecko) Chrome/103.0.5060.134 Safari/537.36 "
-        )
-
-        payload = (
-            '{ "systemReference": "web", "language": "eng", ' f'"uprn": {uprn} ' "}"
-        )
-
-        options_header = parse_header(options_header_str)
-        response_header = parse_header(response_header_str)
-        # Copy the request headers for options and post headers (replacing post auth with token variable) and post
-        # payload, then add here
-        try:
-            requests.packages.urllib3.disable_warnings()
-            options = requests.options(api_url, headers=options_header)
-            response = requests.post(
-                api_url, headers=response_header, auth=BearerAuth(token), data=payload
-            )
-            if not options.ok or not response.ok:
-                raise ValueError("Invalid server response code finding UPRN!")
-
-        except Exception as ex:
-            print(f"Exception encountered: {ex}")
-            exit(1)
-
-        result = json.loads(response.text)
-
-        options.close()
-        response.close()
-
-        collections = result["collectionWeeks"]
-        for week in collections:
-            collection = [(k, v) for k, v in week.items()]
-            collection_date = collection[1][1]
-            collection_date = datetime.strptime(
-                collection_date, "%Y-%m-%dT%H:%M:%S"
-            ).strftime(date_format)
-
-            for bin in collection[3][1]:
-                bin_type = bin.get("type")
-
-                dict_data = {
-                    "type": bin_type,
-                    "collectionDate": collection_date,
-                }
-                data["bins"].append(dict_data)
-
-        return data
diff --git a/uk_bin_collection/uk_bin_collection/councils/CarmarthenshireCountyCouncil.py b/uk_bin_collection/uk_bin_collection/councils/CarmarthenshireCountyCouncil.py
deleted file mode 100644
index 9bedb1ca6b..0000000000
--- a/uk_bin_collection/uk_bin_collection/councils/CarmarthenshireCountyCouncil.py
+++ /dev/null
@@ -1,55 +0,0 @@
-import requests
-from bs4 import BeautifulSoup
-
-from uk_bin_collection.uk_bin_collection.common import *
-from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass
-
-
-# import the wonderful Beautiful Soup and the URL grabber
-class CouncilClass(AbstractGetBinDataClass):
-    """
-    Concrete classes have to implement all abstract operations of the
-    base class. They can also override some operations with a default
-    implementation.
-    """
-
-    def parse_data(self, page: str, **kwargs) -> dict:
-
-        user_uprn = kwargs.get("uprn")
-        check_uprn(user_uprn)
-        bindata = {"bins": []}
-
-        URI = f"https://www.carmarthenshire.gov.wales/umbraco/Surface/SurfaceRecycling/Index/?uprn={user_uprn}&lang=en-GB"
-
-        # Make the GET request
-        response = requests.get(URI)
-
-        # Parse the HTML
-        soup = BeautifulSoup(response.content, "html.parser")
-
-        # Find each bin collection container
-        for container in soup.find_all(class_="bin-day-container"):
-            # Get the bin type based on the class (e.g., Blue, Black, Garden, Nappy)
-            bin_type = container.get("class")[1]  # Second class represents the bin type
-
-            # Find the next collection date
-            date_tag = container.find(class_="font11 text-center")
-            if date_tag.text.strip() == "":
-                continue
-            else:
-                collection_date = date_tag.text.strip()
-
-            dict_data = {
-                "type": bin_type,
-                "collectionDate": datetime.strptime(
-                    collection_date,
-                    "%A %d/%m/%Y",
-                ).strftime("%d/%m/%Y"),
-            }
-            bindata["bins"].append(dict_data)
-
-        bindata["bins"].sort(
-            key=lambda x: datetime.strptime(x.get("collectionDate"), "%d/%m/%Y")
-        )
-
-        return bindata
diff --git a/uk_bin_collection/uk_bin_collection/councils/CastlepointDistrictCouncil.py b/uk_bin_collection/uk_bin_collection/councils/CastlepointDistrictCouncil.py
deleted file mode 100644
index f523111f91..0000000000
--- a/uk_bin_collection/uk_bin_collection/councils/CastlepointDistrictCouncil.py
+++ /dev/null
@@ -1,96 +0,0 @@
-from bs4 import BeautifulSoup
-from uk_bin_collection.uk_bin_collection.common import *
-from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass
-
-
-# import the wonderful Beautiful Soup and the URL grabber
-class CouncilClass(AbstractGetBinDataClass):
-    """
-    Concrete classes have to implement all abstract operations of the
-    base class. They can also override some operations with a default
-    implementation.
-    """
-
-    def parse_data(self, page: str, **kwargs) -> dict:
-        # Disable the SSL warnings that otherwise break everything
-        requests.packages.urllib3.disable_warnings()
-        try:
-            requests.packages.urllib3.contrib.pyopenssl.util.ssl_.DEFAULT_CIPHERS += (
-                ":HIGH:!DH:!aNULL"
-            )
-        except AttributeError:
-            pass
-
-        # UPRN is street id here
-        uprn = kwargs.get("uprn")
-        check_uprn(uprn)
-
-        post_url = "https://apps.castlepoint.gov.uk/cpapps/index.cfm?fa=wastecalendar.displayDetails"
-        post_header_str = (
-            "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,"
-            "image/apng,"
-            "*/*;q=0.8,application/signed-exchange;v=b3;q=0.9|Accept-Encoding: gzip, deflate, "
-            "br|Accept-Language: en-GB;q=0.8|Cache-Control: max-age=0|Connection: "
-            "keep-alive|Content-Length: "
-            "11|Content-Type: application/x-www-form-urlencoded|Host: apps.castlepoint.gov.uk|Origin: "
-            "https://apps.castlepoint.gov.uk|Referer: "
-            "https://apps.castlepoint.gov.uk/cpapps/index.cfm?fa=wastecalendar|Sec-Fetch-Dest: "
-            "document|Sec-Fetch-Mode: navigate|Sec-Fetch-Site: same-origin|Sec-Fetch-User: ?1|Sec-GPC: "
-            "1|Upgrade-Insecure-Requests: 1|User-Agent: Mozilla/5.0 (Windows NT 10.0; Win64; x64) "
-            "AppleWebKit/537.36 (KHTML, like Gecko) Chrome/103.0.5060.134 Safari/537.36 "
-        )
-
-        post_headers = parse_header(post_header_str)
-        form_data = {"roadID": uprn}
-        post_response = requests.post(
-            post_url, headers=post_headers, data=form_data, verify=False
-        )
-
-        # Make a BS4 object
-        soup = BeautifulSoup(post_response.text, features="html.parser")
-        soup.prettify()
-
-        data = {"bins": []}
-        collection_tuple = []
-
-        for i in range(1, 3):
-            calendar = soup.select(
-                f"#wasteCalendarContainer > div:nth-child(2) > div:nth-child({i}) > div"
-            )[0]
-            month = datetime.strptime(
-                calendar.find_next("h2").get_text(), "%B %Y"
-            ).strftime("%m")
-            year = datetime.strptime(
-                calendar.find_next("h2").get_text(), "%B %Y"
-            ).strftime("%Y")
-
-            pink_days = [
-                day.get_text().strip() for day in calendar.find_all("td", class_="pink")
-            ]
-            black_days = [
-                day.get_text().strip()
-                for day in calendar.find_all("td", class_="normal")
-            ]
-
-            for day in pink_days:
-                collection_date = datetime(
-                    year=int(year), month=int(month), day=int(day)
-                )
-                collection_tuple.append(("Pink collection", collection_date))
-
-            for day in black_days:
-                collection_date = datetime(
-                    year=int(year), month=int(month), day=int(day)
-                )
-                collection_tuple.append(("Normal collection", collection_date))
-
-        ordered_data = sorted(collection_tuple, key=lambda x: x[1])
-
-        for item in ordered_data:
-            dict_data = {
-                "type": item[0],
-                "collectionDate": item[1].strftime(date_format),
-            }
-            data["bins"].append(dict_data)
-
-        return data
diff --git a/uk_bin_collection/uk_bin_collection/councils/CharnwoodBoroughCouncil.py b/uk_bin_collection/uk_bin_collection/councils/CharnwoodBoroughCouncil.py
deleted file mode 100644
index 787413bc5e..0000000000
--- a/uk_bin_collection/uk_bin_collection/councils/CharnwoodBoroughCouncil.py
+++ /dev/null
@@ -1,54 +0,0 @@
-from bs4 import BeautifulSoup
-from uk_bin_collection.uk_bin_collection.common import *
-from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass
-
-from datetime import timedelta
-from dateutil.relativedelta import relativedelta
-
-
-# import the wonderful Beautiful Soup and the URL grabber
-class CouncilClass(AbstractGetBinDataClass):
-    """
-    Concrete classes have to implement all abstract operations of the
-    base class. They can also override some operations with a default
-    implementation.
-    """
-
-    def parse_data(self, page: str, **kwargs) -> dict:
-        # Make a BS4 object
-        soup = BeautifulSoup(page.text, features="html.parser")
-        soup.prettify()
-
-        data = {"bins": []}
-        curr_date = datetime.today()
-
-        for bins in soup.find_all("ul", {"class": "refuse"}):
-            binCollection = bins.find_all("li")
-
-            if binCollection:
-                for bin in binCollection:
-                    collection_date = (
-                        bin.find("strong", {"class": "date"}).contents[0].strip()
-                    )
-                    if collection_date.lower() == "today":
-                        collection_date = datetime.now()
-                    elif collection_date.lower() == "tomorrow":
-                        collection_date = datetime.now() + timedelta(days=1)
-                    else:
-                        collection_date += f" {curr_date.year}"
-                        collection_date = datetime.strptime(
-                            remove_ordinal_indicator_from_date_string(
-                                collection_date
-                            ).strip(),
-                            "%a %d %b %Y",
-                        )
-                        if curr_date.month == 12 and collection_date.month == 1:
-                            collection_date = collection_date + relativedelta(years=1)
-                    dict_data = {
-                        "type": bin.find("a").contents[0],
-                        "collectionDate": collection_date.strftime(date_format),
-                    }
-
-                    data["bins"].append(dict_data)
-
-        return data
diff --git a/uk_bin_collection/uk_bin_collection/councils/ChelmsfordCityCouncil.py b/uk_bin_collection/uk_bin_collection/councils/ChelmsfordCityCouncil.py
deleted file mode 100644
index 7f01f869ee..0000000000
--- a/uk_bin_collection/uk_bin_collection/councils/ChelmsfordCityCouncil.py
+++ /dev/null
@@ -1,127 +0,0 @@
-# This script pulls (in one hit) the data from Bromley Council Bins Data
-import datetime
-import re
-import time
-from datetime import datetime
-
-import requests
-from bs4 import BeautifulSoup
-from selenium.webdriver.common.by import By
-from selenium.webdriver.common.keys import Keys
-from selenium.webdriver.support import expected_conditions as EC
-from selenium.webdriver.support.ui import Select
-from selenium.webdriver.support.wait import WebDriverWait
-
-from uk_bin_collection.uk_bin_collection.common import *
-from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass
-
-
-# import the wonderful Beautiful Soup and the URL grabber
-class CouncilClass(AbstractGetBinDataClass):
-    """
-    Concrete classes have to implement all abstract operations of the
-    base class. They can also override some operations with a default
-    implementation.
-    """
-
-    def parse_data(self, page: str, **kwargs) -> dict:
-        driver = None
-        try:
-            data = {"bins": []}
-            headers = {"User-Agent": "Mozilla/5.0 (Windows NT 6.1; Win64; x64)"}
-
-            uprn = kwargs.get("uprn")
-            postcode = kwargs.get("postcode")
-            user_paon = kwargs.get("paon")
-            web_driver = kwargs.get("web_driver")
-            headless = kwargs.get("headless")
-            driver = create_webdriver(web_driver, headless, None, __name__)
-            url = kwargs.get("url")
-
-            driver.execute_script(f"window.location.href='{url}'")
-
-            wait = WebDriverWait(driver, 120)
-            post_code_search = wait.until(
-                EC.presence_of_element_located((By.XPATH, '//input[@name="keyword"]'))
-            )
-
-            post_code_search.send_keys(postcode)
-
-            submit_btn = wait.until(
-                EC.presence_of_element_located((By.CLASS_NAME, "__submitButton"))
-            )
-
-            submit_btn.send_keys(Keys.ENTER)
-
-            address_results = wait.until(
-                EC.presence_of_element_located((By.CLASS_NAME, "directories-table"))
-            )
-            address_link = wait.until(
-                EC.presence_of_element_located(
-                    (By.XPATH, f"//a[contains(text(), '{user_paon}')]")
-                )
-            )
-
-            address_link.send_keys(Keys.ENTER)
-            results = wait.until(
-                EC.presence_of_element_located((By.CLASS_NAME, "usercontent"))
-            )
-
-            # Make a BS4 object
-            soup = BeautifulSoup(driver.page_source, features="html.parser")
-            soup.prettify()
-
-            # Get collection calendar
-            calendar_urls = soup.find_all(
-                "a", string=re.compile(r"view or download the collection calendar")
-            )
-            if len(calendar_urls) > 0:
-                requests.packages.urllib3.disable_warnings()
-                response = requests.get(calendar_urls[0].get("href"), headers=headers)
-
-                # Make a BS4 object
-                soup = BeautifulSoup(response.text, features="html.parser")
-                soup.prettify()
-
-                # Loop the months
-                for month in soup.find_all("div", {"class": "usercontent"}):
-                    year = ""
-                    if month.find("h2") and "calendar" not in month.find("h2").get_text(
-                        strip=True
-                    ):
-                        year = datetime.strptime(
-                            month.find("h2").get_text(strip=True), "%B %Y"
-                        ).strftime("%Y")
-                    elif month.find("h3"):
-                        year = datetime.strptime(
-                            month.find("h3").get_text(strip=True), "%B %Y"
-                        ).strftime("%Y")
-                    if year != "":
-                        for row in month.find_all("li"):
-                            results = re.search(
-                                "([A-Za-z]+ \\d\\d? [A-Za-z]+): (.+)",
-                                row.get_text(strip=True),
-                            )
-                            if results:
-                                dict_data = {
-                                    "type": results.groups()[1].capitalize(),
-                                    "collectionDate": datetime.strptime(
-                                        results.groups()[0] + " " + year, "%A %d %B %Y"
-                                    ).strftime(date_format),
-                                }
-                                data["bins"].append(dict_data)
-
-                # Sort collections
-                data["bins"].sort(
-                    key=lambda x: datetime.strptime(x.get("collectionDate"), "%d/%m/%Y")
-                )
-        except Exception as e:
-            # Here you can log the exception if needed
-            print(f"An error occurred: {e}")
-            # Optionally, re-raise the exception if you want it to propagate
-            raise
-        finally:
-            # This block ensures that the driver is closed regardless of an exception
-            if driver:
-                driver.quit()
-        return data
diff --git a/uk_bin_collection/uk_bin_collection/councils/CheltenhamBoroughCouncil.py b/uk_bin_collection/uk_bin_collection/councils/CheltenhamBoroughCouncil.py
deleted file mode 100644
index fc4fb8ce16..0000000000
--- a/uk_bin_collection/uk_bin_collection/councils/CheltenhamBoroughCouncil.py
+++ /dev/null
@@ -1,102 +0,0 @@
-import re
-import time
-
-import requests
-from bs4 import BeautifulSoup
-from selenium.webdriver.common.by import By
-from selenium.webdriver.support import expected_conditions as EC
-from selenium.webdriver.support.ui import Select
-from selenium.webdriver.support.wait import WebDriverWait
-
-from uk_bin_collection.uk_bin_collection.common import *
-from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass
-
-
-# import the wonderful Beautiful Soup and the URL grabber
-class CouncilClass(AbstractGetBinDataClass):
-    """
-    Concrete classes have to implement all abstract operations of the
-    base class. They can also override some operations with a default
-    implementation.
-    """
-
-    def parse_data(self, page: str, **kwargs) -> dict:
-
-        collection_day = kwargs.get("paon")
-        collection_week = kwargs.get("postcode")
-        bindata = {"bins": []}
-
-        days_of_week = [
-            "Monday",
-            "Tuesday",
-            "Wednesday",
-            "Thursday",
-            "Friday",
-            "Saturday",
-            "Sunday",
-        ]
-
-        week = ["Week 1", "Week 2"]
-
-        offset_days = days_of_week.index(collection_day)
-        collection_week = week.index(collection_week)
-
-        if collection_week == 0:
-            refusestartDate = datetime(2024, 11, 25)
-            recyclingstartDate = datetime(2024, 11, 18)
-        else:
-            refusestartDate = datetime(2024, 11, 18)
-            recyclingstartDate = datetime(2024, 11, 25)
-
-        refuse_dates = get_dates_every_x_days(refusestartDate, 14, 28)
-        recycling_dates = get_dates_every_x_days(recyclingstartDate, 14, 28)
-        food_dates = get_dates_every_x_days(recyclingstartDate, 7, 56)
-
-        for refuseDate in refuse_dates:
-
-            collection_date = (
-                datetime.strptime(refuseDate, "%d/%m/%Y") + timedelta(days=offset_days)
-            ).strftime("%d/%m/%Y")
-
-            dict_data = {
-                "type": "Refuse Bin",
-                "collectionDate": collection_date,
-            }
-            bindata["bins"].append(dict_data)
-
-        for recyclingDate in recycling_dates:
-
-            collection_date = (
-                datetime.strptime(recyclingDate, "%d/%m/%Y")
-                + timedelta(days=offset_days)
-            ).strftime("%d/%m/%Y")
-
-            dict_data = {
-                "type": "Recycling Bin",
-                "collectionDate": collection_date,
-            }
-            bindata["bins"].append(dict_data)
-
-            dict_data = {
-                "type": "Garden Waste Bin",
-                "collectionDate": collection_date,
-            }
-            bindata["bins"].append(dict_data)
-
-        for food_date in food_dates:
-
-            collection_date = (
-                datetime.strptime(food_date, "%d/%m/%Y") + timedelta(days=offset_days)
-            ).strftime("%d/%m/%Y")
-
-            dict_data = {
-                "type": "Food Waste Bin",
-                "collectionDate": collection_date,
-            }
-            bindata["bins"].append(dict_data)
-
-        bindata["bins"].sort(
-            key=lambda x: datetime.strptime(x.get("collectionDate"), "%d/%m/%Y")
-        )
-
-        return bindata
diff --git a/uk_bin_collection/uk_bin_collection/councils/CheshireEastCouncil.py b/uk_bin_collection/uk_bin_collection/councils/CheshireEastCouncil.py
deleted file mode 100644
index 9119e7bea6..0000000000
--- a/uk_bin_collection/uk_bin_collection/councils/CheshireEastCouncil.py
+++ /dev/null
@@ -1,46 +0,0 @@
-from typing import Dict, Any, Optional
-from bs4 import BeautifulSoup, Tag, NavigableString
-from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass
-
-"""
-This module provides bin collection data for Cheshire East Council.
-"""
-
-class CouncilClass(AbstractGetBinDataClass):
-    """
-    A class to fetch and parse bin collection data for Cheshire East Council.
-    """
-
-    def parse_data(self, page: Any, **kwargs: Any) -> Dict[str, Any]:
-        soup = BeautifulSoup(page.text, features="html.parser")
-
-        bin_data_dict: Dict[str, Any] = {"bins": []}
-
-        table: Optional[Tag | NavigableString] = soup.find(
-            "table", {"class": "job-details"}
-        )
-        if isinstance(table, Tag):  # Ensure we only proceed if 'table' is a Tag
-            rows = table.find_all("tr", {"class": "data-row"})
-
-            for row in rows:
-                cells = row.find_all(
-                    "td",
-                    {
-                        "class": lambda L: isinstance(L, str)
-                        and L.startswith("visible-cell")
-                    },  # Explicitly check if L is a string
-                )
-                labels: list[Tag] = cells[0].find_all("label") if cells else []
-
-                if len(labels) >= 3:
-                    bin_type: str = labels[2].get_text(strip=True)
-                    collection_date: str = labels[1].get_text(strip=True)
-
-                    bin_data_dict["bins"].append(
-                        {
-                            "type": bin_type,
-                            "collectionDate": collection_date,
-                        }
-                    )
-
-        return bin_data_dict
diff --git a/uk_bin_collection/uk_bin_collection/councils/CheshireWestAndChesterCouncil.py b/uk_bin_collection/uk_bin_collection/councils/CheshireWestAndChesterCouncil.py
deleted file mode 100644
index 6c97dd3f0c..0000000000
--- a/uk_bin_collection/uk_bin_collection/councils/CheshireWestAndChesterCouncil.py
+++ /dev/null
@@ -1,105 +0,0 @@
-import time
-
-import requests
-
-from uk_bin_collection.uk_bin_collection.common import *
-from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass
-
-
-# import the wonderful Beautiful Soup and the URL grabber
-class CouncilClass(AbstractGetBinDataClass):
-    """
-    Concrete classes have to implement all abstract operations of the
-    base class. They can also override some operations with a default
-    implementation.
-    """
-
-    def parse_data(self, page: str, **kwargs) -> dict:
-
-        user_uprn = kwargs.get("uprn")
-        check_uprn(user_uprn)
-        bindata = {"bins": []}
-
-        SESSION_URL = "https://my.cheshirewestandchester.gov.uk/authapi/isauthenticated?uri=https://my.cheshirewestandchester.gov.uk/fillform/?iframe_id=fillform-frame-1&db_id=&hostname=my.cheshirewestandchester.gov.uk&withCredentials=true"
-
-        API_URL = "https://my.cheshirewestandchester.gov.uk/apibroker/runLookup"
-
-        headers = {
-            "Content-Type": "application/json",
-            "Accept": "application/json",
-            "User-Agent": "Mozilla/5.0",
-            "X-Requested-With": "XMLHttpRequest",
-            "Referer": "https://mycouncil.milton-keynes.gov.uk/fillform/?iframe_id=fillform-frame-1&db_id=",
-        }
-        s = requests.session()
-        r = s.get(SESSION_URL)
-        r.raise_for_status()
-        session_data = r.json()
-        sid = session_data["auth-session"]
-        params = {
-            "id": "609b918c7dd6d",
-            "repeat_against": "",
-            "noRetry": "false",
-            "getOnlyTokens": "undefined",
-            "log_id": "",
-            "app_name": "AchieveForms",
-            # unix_timestamp
-            "_": str(int(time.time() * 1000)),
-            "sid": sid,
-        }
-
-        r = s.post(API_URL, headers=headers, params=params)
-        r.raise_for_status()
-
-        data = r.json()
-        rows_data = data["integration"]["transformed"]["rows_data"]["0"]
-        AuthenticateResponse = rows_data["AuthenticateResponse"]
-
-        params = {
-            "id": "6101d23110243",
-            "repeat_against": "",
-            "noRetry": "false",
-            "getOnlyTokens": "undefined",
-            "log_id": "",
-            "app_name": "AchieveForms",
-            # unix_timestamp
-            "_": str(int(time.time() * 1000)),
-            "sid": sid,
-        }
-
-        data = {
-            "formValues": {
-                "Section 1": {
-                    "UPRN": {
-                        "value": user_uprn,
-                    },
-                    "AuthenticateResponse": {
-                        "value": AuthenticateResponse,
-                    },
-                }
-            },
-        }
-
-        r = s.post(API_URL, json=data, headers=headers, params=params)
-        r.raise_for_status()
-
-        data = r.json()
-        rows_data = data["integration"]["transformed"]["rows_data"]
-        if not isinstance(rows_data, dict):
-            raise ValueError("Invalid data returned from API")
-
-        # Extract each service's relevant details for the bin schedule
-        for item in rows_data.values():
-            dict_data = {
-                "type": item["serviceType"],
-                "collectionDate": datetime.strptime(
-                    item["collectionDateTime"], "%Y-%m-%dT%H:%M:%S"
-                ).strftime(date_format),
-            }
-            bindata["bins"].append(dict_data)
-
-        bindata["bins"].sort(
-            key=lambda x: datetime.strptime(x.get("collectionDate"), "%d/%m/%Y")
-        )
-
-        return bindata
diff --git a/uk_bin_collection/uk_bin_collection/councils/ChesterfieldBoroughCouncil.py b/uk_bin_collection/uk_bin_collection/councils/ChesterfieldBoroughCouncil.py
deleted file mode 100644
index 97277c8369..0000000000
--- a/uk_bin_collection/uk_bin_collection/councils/ChesterfieldBoroughCouncil.py
+++ /dev/null
@@ -1,188 +0,0 @@
-import json
-import logging
-import re
-from datetime import datetime, timedelta
-
-import requests
-from bs4 import BeautifulSoup
-from uk_bin_collection.uk_bin_collection.common import check_uprn, date_format
-from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass
-import urllib3
-
-
-# Suppress only the single warning from urllib3 needed.
-urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
-
-_LOGGER = logging.getLogger(__name__)
-
-
-class CouncilClass(AbstractGetBinDataClass):
-    """
-    Implementation for Chesterfield Borough Council waste collection data retrieval.
-    """
-
-    def parse_data(self, page: str, **kwargs) -> dict:
-        """
-        Fetch and parse waste collection data for Chesterfield Borough Council.
-
-        Args:
-            page (str): Not used in this implementation.
-            **kwargs: Should contain 'uprn' key.
-
-        Returns:
-            dict: Parsed bin collection data.
-        """
-        # Get and check UPRN
-        user_uprn = kwargs.get("uprn")
-        check_uprn(user_uprn)
-        bindata = {"bins": []}
-
-        # Define API URLs
-        API_URLS = {
-            "session": "https://www.chesterfield.gov.uk/bins-and-recycling/bin-collections/check-bin-collections.aspx",
-            "fwuid": "https://myaccount.chesterfield.gov.uk/anonymous/c/cbc_VE_CollectionDaysLO.app?aura.format=JSON&aura.formatAdapter=LIGHTNING_OUT",
-            "search": "https://myaccount.chesterfield.gov.uk/anonymous/aura?r=2&aura.ApexAction.execute=1",
-        }
-
-        HEADERS = {
-            "User-Agent": "Mozilla/5.0",
-            "Content-Type": "application/x-www-form-urlencoded",
-        }
-
-        # Initialize session
-        session = requests.Session()
-
-        try:
-            # Step 1: Get session
-            session.get(API_URLS["session"], headers=HEADERS, verify=False)
-
-            # Step 2: Get fwuid
-            fwuid_response = session.get(
-                API_URLS["fwuid"], headers=HEADERS, verify=False
-            )
-            fwuid_data = fwuid_response.json()
-            fwuid = fwuid_data.get("auraConfig", {}).get("context", {}).get("fwuid")
-
-            if not fwuid:
-                _LOGGER.error("Failed to retrieve fwuid from the response.")
-                return bindata
-
-            # Step 3: Prepare payload for UPRN search
-            payload = {
-                "message": json.dumps(
-                    {
-                        "actions": [
-                            {
-                                "id": "4;a",
-                                "descriptor": "aura://ApexActionController/ACTION$execute",
-                                "callingDescriptor": "UNKNOWN",
-                                "params": {
-                                    "namespace": "",
-                                    "classname": "CBC_VE_CollectionDays",
-                                    "method": "getServicesByUPRN",
-                                    "params": {
-                                        "propertyUprn": user_uprn,
-                                        "executedFrom": "Main Website",
-                                    },
-                                    "cacheable": False,
-                                    "isContinuation": False,
-                                },
-                            }
-                        ]
-                    }
-                ),
-                "aura.context": json.dumps(
-                    {
-                        "mode": "PROD",
-                        "fwuid": fwuid,
-                        "app": "c:cbc_VE_CollectionDaysLO",
-                        "loaded": {
-                            "APPLICATION@markup://c:cbc_VE_CollectionDaysLO": "pqeNg7kPWCbx1pO8sIjdLA"
-                        },
-                        "dn": [],
-                        "globals": {},
-                        "uad": True,
-                    }
-                ),
-                "aura.pageURI": "/bins-and-recycling/bin-collections/check-bin-collections.aspx",
-                "aura.token": "null",
-            }
-
-            # Step 4: Make POST request to fetch collection data
-            search_response = session.post(
-                API_URLS["search"], data=payload, headers=HEADERS, verify=False
-            )
-            search_data = search_response.json()
-
-            # Step 5: Extract service units
-            service_units = (
-                search_data.get("actions", [])[0]
-                .get("returnValue", {})
-                .get("returnValue", {})
-                .get("serviceUnits", [])
-            )
-
-            if not service_units:
-                _LOGGER.warning("No service units found for the given UPRN.")
-                return bindata
-
-            # Initialize dictionary to store bin dates
-            bin_schedule = {}
-
-            # Define icon mapping
-            ICON_MAP = {
-                "DOMESTIC REFUSE": "mdi:trash-can",
-                "DOMESTIC RECYCLING": "mdi:recycle",
-                "DOMESTIC ORGANIC": "mdi:leaf",
-            }
-
-            # Define regex pattern to capture day and date (e.g., Tue 5 Nov)
-            date_pattern = re.compile(r"\b\w{3} \d{1,2} \w{3}\b")
-
-            current_year = datetime.now().year
-
-            # Process each service unit
-            for item in service_units:
-                try:
-                    waste_type = item["serviceTasks"][0]["taskTypeName"]
-                    waste_type = str(waste_type).replace("Collect ", "").upper()
-                except (IndexError, KeyError):
-                    _LOGGER.debug("Skipping a service unit due to missing data.")
-                    continue
-
-                # Extract the next scheduled date
-                try:
-                    dt_zulu = item["serviceTasks"][0]["serviceTaskSchedules"][0][
-                        "nextInstance"
-                    ]["currentScheduledDate"]
-                    dt_utc = datetime.strptime(dt_zulu, "%Y-%m-%dT%H:%M:%S.%f%z")
-                    dt_local = dt_utc.astimezone(None)
-                    collection_date = dt_local.date()
-                except (IndexError, KeyError, ValueError) as e:
-                    _LOGGER.warning(f"Failed to parse date for {waste_type}: {e}")
-                    continue
-
-                # Append to bin_schedule
-                bin_schedule[waste_type] = collection_date.strftime(date_format)
-
-            # Convert bin_schedule to the expected format
-            for bin_type, collection_date in bin_schedule.items():
-                dict_data = {
-                    "type": bin_type,
-                    "collectionDate": collection_date,
-                }
-                bindata["bins"].append(dict_data)
-
-            # Sort the bins by collection date
-            bindata["bins"].sort(
-                key=lambda x: datetime.strptime(x.get("collectionDate"), date_format)
-            )
-
-        except requests.RequestException as e:
-            _LOGGER.error(f"Network error occurred: {e}")
-        except json.JSONDecodeError as e:
-            _LOGGER.error(f"JSON decoding failed: {e}")
-        except Exception as e:
-            _LOGGER.error(f"An unexpected error occurred: {e}")
-
-        return bindata
diff --git a/uk_bin_collection/uk_bin_collection/councils/ChichesterDistrictCouncil.py b/uk_bin_collection/uk_bin_collection/councils/ChichesterDistrictCouncil.py
deleted file mode 100644
index 63e88a3744..0000000000
--- a/uk_bin_collection/uk_bin_collection/councils/ChichesterDistrictCouncil.py
+++ /dev/null
@@ -1,110 +0,0 @@
-import time
-from datetime import datetime
-
-from selenium.webdriver.support.ui import Select
-from bs4 import BeautifulSoup
-from selenium.webdriver.common.by import By
-from selenium.webdriver.support import expected_conditions as EC
-from selenium.webdriver.support.ui import Select
-from selenium.webdriver.support.wait import WebDriverWait
-from selenium.webdriver.common.keys import Keys
-
-from uk_bin_collection.uk_bin_collection.common import *
-from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass
-
-
-# import the wonderful Beautiful Soup and the URL grabber
-class CouncilClass(AbstractGetBinDataClass):
-    """
-    Concrete classes have to implement all abstract operations of the
-    base class. They can also override some operations with a default
-    implementation.
-    """
-
-    def parse_data(self, page: str, **kwargs) -> dict:
-        driver = None
-        try:
-            # Make a BS4 object
-
-            page = "https://www.chichester.gov.uk/checkyourbinday"
-
-            user_postcode = kwargs.get("postcode")
-            user_uprn = kwargs.get("uprn")
-            web_driver = kwargs.get("web_driver")
-            headless = kwargs.get("headless")
-            house_number = kwargs.get("paon")
-
-            driver = create_webdriver(web_driver, headless, None, __name__)
-            driver.get(page)
-
-            wait = WebDriverWait(driver, 60)
-
-            inputElement_postcodesearch = wait.until(
-                EC.visibility_of_element_located(
-                    (By.ID, "WASTECOLLECTIONCALENDARV5_CALENDAR_ADDRESSLOOKUPPOSTCODE")
-                )
-            )
-
-            inputElement_postcodesearch.send_keys(user_postcode)
-
-            inputElement_postcodesearch_btn = wait.until(
-                EC.visibility_of_element_located(
-                    (By.ID, "WASTECOLLECTIONCALENDARV5_CALENDAR_ADDRESSLOOKUPSEARCH")
-                )
-            )
-            inputElement_postcodesearch_btn.send_keys(Keys.ENTER)
-
-            inputElement_select_address = wait.until(
-                EC.element_to_be_clickable(
-                    (By.ID, "WASTECOLLECTIONCALENDARV5_CALENDAR_ADDRESSLOOKUPADDRESS")
-                )
-            )
-            dropdown_element = driver.find_element(
-                By.ID, "WASTECOLLECTIONCALENDARV5_CALENDAR_ADDRESSLOOKUPADDRESS"
-            )
-
-            # Now create a Select object based on the found element
-            dropdown = Select(dropdown_element)
-
-            # Select the option by visible text
-            dropdown.select_by_visible_text(house_number)
-
-            results = wait.until(
-                EC.element_to_be_clickable(
-                    (By.CLASS_NAME, "bin-collection-dates-container")
-                )
-            )
-
-            soup = BeautifulSoup(driver.page_source, features="html.parser")
-            soup.prettify()
-
-            # Extract data from the table
-            bin_collection_data = []
-            rows = soup.find(
-                "table", class_="defaultgeneral bin-collection-dates"
-            ).find_all("tr")
-            for row in rows:
-                cells = row.find_all("td")
-                if cells:
-                    date_str = cells[0].text.strip()
-                    bin_type = cells[1].text.strip()
-                    # Convert date string to the required format DD/MM/YYYY
-                    date_obj = datetime.strptime(date_str, "%d %B %Y")
-                    date_formatted = date_obj.strftime(date_format)
-                    bin_collection_data.append(
-                        {"collectionDate": date_formatted, "type": bin_type}
-                    )
-
-            # Convert to JSON
-            json_data = {"bins": bin_collection_data}
-
-        except Exception as e:
-            # Here you can log the exception if needed
-            print(f"An error occurred: {e}")
-            # Optionally, re-raise the exception if you want it to propagate
-            raise
-        finally:
-            # This block ensures that the driver is closed regardless of an exception
-            if driver:
-                driver.quit()
-        return json_data
diff --git a/uk_bin_collection/uk_bin_collection/councils/ChorleyCouncil.py b/uk_bin_collection/uk_bin_collection/councils/ChorleyCouncil.py
deleted file mode 100644
index a7c2ae10bf..0000000000
--- a/uk_bin_collection/uk_bin_collection/councils/ChorleyCouncil.py
+++ /dev/null
@@ -1,135 +0,0 @@
-import time
-import urllib.parse
-
-from bs4 import BeautifulSoup
-from selenium.webdriver.common.by import By
-from selenium.webdriver.support import expected_conditions as EC
-from selenium.webdriver.support.ui import Select
-from selenium.webdriver.support.wait import WebDriverWait
-
-from uk_bin_collection.uk_bin_collection.common import *
-from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass
-
-
-def format_bin_type(bin_colour: str):
-    bin_types = {
-        "grey": "Garden waste (Grey Bin)",
-        "brown": "Paper and card (Brown Bin)",
-        "blue": "Bottles and cans (Blue Bin)",
-        "green": "General waste (Green Bin)",
-    }
-    bin_colour = urllib.parse.unquote(bin_colour).split(" ")[0].lower()
-    return bin_types[bin_colour]
-
-
-# import the wonderful Beautiful Soup and the URL grabber
-class CouncilClass(AbstractGetBinDataClass):
-    """
-    Concrete classes have to implement all abstract operations of the
-    base class. They can also override some operations with a default
-    implementation.
-    """
-
-    def parse_data(self, page: str, **kwargs) -> dict:
-        driver = None
-        try:
-            data = {"bins": []}
-            user_uprn = kwargs.get("uprn")
-            user_postcode = kwargs.get("postcode")
-            web_driver = kwargs.get("web_driver")
-            headless = kwargs.get("headless")
-            check_uprn(user_uprn)
-            check_postcode(user_postcode)
-
-            # Create Selenium webdriver
-            user_agent = "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36"
-            driver = create_webdriver(web_driver, headless, user_agent, __name__)
-            driver.get("https://myaccount.chorley.gov.uk/wastecollections.aspx")
-
-            # Accept cookies banner
-            cookieBanner = WebDriverWait(driver, 10).until(
-                EC.presence_of_element_located((By.ID, "PrivacyPolicyNotification"))
-            )
-            cookieClose = cookieBanner.find_element(
-                By.CSS_SELECTOR, "span.ui-icon-circle-close"
-            )
-            cookieClose.click()
-
-            # Populate postcode field
-            inputElement_postcode = driver.find_element(
-                By.ID,
-                "MainContent_addressSearch_txtPostCodeLookup",
-            )
-            inputElement_postcode.send_keys(user_postcode)
-
-            # Click search button
-            findAddress = WebDriverWait(driver, 10).until(
-                EC.presence_of_element_located(
-                    (
-                        By.ID,
-                        "MainContent_addressSearch_btnFindAddress",
-                    )
-                )
-            )
-            findAddress.click()
-
-            time.sleep(1)
-
-            # Wait for the 'Select address' dropdown to appear and select option matching UPRN
-            dropdown = WebDriverWait(driver, 10).until(
-                EC.presence_of_element_located(
-                    (
-                        By.ID,
-                        "MainContent_addressSearch_ddlAddress",
-                    )
-                )
-            )
-            # Create a 'Select' for it, then select the matching URPN option
-            dropdownSelect = Select(dropdown)
-            dropdownSelect.select_by_value(user_uprn)
-
-            # Wait for the submit button to appear, then click it to get the collection dates
-            submit = WebDriverWait(driver, 10).until(
-                EC.presence_of_element_located((By.ID, "MainContent_btnSearch"))
-            )
-            submit.click()
-
-            soup = BeautifulSoup(driver.page_source, features="html.parser")
-
-            # Get the property details
-            property_details = soup.find(
-                "table",
-                {"class": "WasteCollection"},
-            )
-
-            # Get the dates
-            for row in property_details.tbody.find_all("tr", recursive=False):
-                month_col = row.td
-                month = month_col.get_text(strip=True)
-
-                for date_col in month_col.find_next_siblings("td"):
-                    day = date_col.p.contents[0].strip()
-
-                    if day == "":
-                        continue
-
-                    for bin_type in date_col.find_all("img"):
-                        bin_colour = bin_type.get("src").split("/")[-1].split(".")[0]
-                        date_object = datetime.strptime(f"{day} {month}", "%d %B %Y")
-                        date_formatted = date_object.strftime("%d/%m/%Y")
-
-                        dict_data = {
-                            "type": format_bin_type(bin_colour),
-                            "collectionDate": date_formatted,
-                        }
-                        data["bins"].append(dict_data)
-        except Exception as e:
-            # Here you can log the exception if needed
-            print(f"An error occurred: {e}")
-            # Optionally, re-raise the exception if you want it to propagate
-            raise
-        finally:
-            # This block ensures that the driver is closed regardless of an exception
-            if driver:
-                driver.quit()
-        return data
diff --git a/uk_bin_collection/uk_bin_collection/councils/ColchesterCityCouncil.py b/uk_bin_collection/uk_bin_collection/councils/ColchesterCityCouncil.py
deleted file mode 100644
index 919246ba36..0000000000
--- a/uk_bin_collection/uk_bin_collection/councils/ColchesterCityCouncil.py
+++ /dev/null
@@ -1,103 +0,0 @@
-from bs4 import BeautifulSoup
-from selenium.webdriver.common.by import By
-from selenium.webdriver.support import expected_conditions as EC
-from selenium.webdriver.support.wait import WebDriverWait
-
-from uk_bin_collection.uk_bin_collection.common import *
-from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass
-
-
-# import the wonderful Beautiful Soup and the URL grabber
-class CouncilClass(AbstractGetBinDataClass):
-    """
-    Concrete classes have to implement all abstract operations of the
-    base class. They can also override some operations with a default
-    implementation.
-    """
-
-    def parse_data(self, page: str, **kwargs) -> dict:
-        driver = None
-        try:
-            data = {"bins": []}
-            user_paon = kwargs.get("paon")
-            user_postcode = kwargs.get("postcode")
-            web_driver = kwargs.get("web_driver")
-            headless = kwargs.get("headless")
-            check_paon(user_paon)
-            check_postcode(user_postcode)
-
-            # Create Selenium webdriver
-            driver = create_webdriver(web_driver, headless, None, __name__)
-            driver.get(
-                "https://www.colchester.gov.uk/your-recycling-calendar/?start=true"
-            )
-
-            accept_button = WebDriverWait(driver, timeout=30).until(
-                EC.element_to_be_clickable((By.ID, "ccc-notify-accept"))
-            )
-            accept_button.click()
-
-            # Wait for the postcode field to appear then populate it
-            inputElement_postcode = WebDriverWait(driver, 10).until(
-                EC.presence_of_element_located((By.CLASS_NAME, "input-text"))
-            )
-            inputElement_postcode.send_keys(user_postcode)
-
-            # Click search button
-            findAddress = WebDriverWait(driver, 10).until(
-                EC.presence_of_element_located((By.CLASS_NAME, "button-small"))
-            )
-            findAddress.click()
-
-            # Wait for the 'Select address' dropdown to appear and select option matching the house name/number
-            WebDriverWait(driver, 10).until(
-                EC.element_to_be_clickable(
-                    (
-                        By.XPATH,
-                        "//select[@class='input-select']//option[contains(., '"
-                        + user_paon
-                        + "')]",
-                    )
-                )
-            ).click()
-
-            WebDriverWait(driver, 10).until(
-                EC.presence_of_element_located((By.CLASS_NAME, "button-small"))
-            ).click()
-
-            # Wait for the collections table to appear
-            WebDriverWait(driver, 10).until(
-                EC.presence_of_element_located((By.CLASS_NAME, "recycling-calendar"))
-            )
-
-            soup = BeautifulSoup(driver.page_source, features="html.parser")
-
-            recyclingcalendar = soup.find("div", {"class": "recycling-calendar"})
-
-            rows = recyclingcalendar.find_all(
-                "div", {"class": "recycling-calendar-row"}
-            )
-
-            for row in rows:
-                collectiondate = datetime.strptime(
-                    row.find("strong").get_text(),
-                    "%d %B %Y",
-                )
-                listobj = row.find("ul")
-                for li in listobj.find_all("li"):
-                    dict_data = {
-                        "type": li.get_text().strip(),
-                        "collectionDate": collectiondate.strftime("%d/%m/%Y"),
-                    }
-                    data["bins"].append(dict_data)
-
-        except Exception as e:
-            # Here you can log the exception if needed
-            print(f"An error occurred: {e}")
-            # Optionally, re-raise the exception if you want it to propagate
-            raise
-        finally:
-            # This block ensures that the driver is closed regardless of an exception
-            if driver:
-                driver.quit()
-        return data
diff --git a/uk_bin_collection/uk_bin_collection/councils/ConwyCountyBorough.py b/uk_bin_collection/uk_bin_collection/councils/ConwyCountyBorough.py
deleted file mode 100644
index ee1fc645f3..0000000000
--- a/uk_bin_collection/uk_bin_collection/councils/ConwyCountyBorough.py
+++ /dev/null
@@ -1,27 +0,0 @@
-from bs4 import BeautifulSoup
-from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass
-from uk_bin_collection.uk_bin_collection.common import *
-from datetime import datetime
-
-
-class CouncilClass(AbstractGetBinDataClass):
-    def parse_data(self, page: str, **kwargs) -> dict:
-        soup = BeautifulSoup(page.text, features="html.parser")
-        data = {"bins": []}
-
-        for bin_section in soup.select('div[class*="containererf"]'):
-            date_text = bin_section.find(id="content").text.strip()
-            collection_date = datetime.strptime(date_text, "%A, %d/%m/%Y")
-
-            bin_types = bin_section.find(id="main1").findAll("li")
-            for bin_type in bin_types:
-                bin_type_name = bin_type.text.split("(")[0].strip()
-
-                data["bins"].append(
-                    {
-                        "type": bin_type_name,
-                        "collectionDate": collection_date.strftime(date_format),
-                    }
-                )
-
-        return data
diff --git a/uk_bin_collection/uk_bin_collection/councils/CopelandBoroughCouncil.py b/uk_bin_collection/uk_bin_collection/councils/CopelandBoroughCouncil.py
deleted file mode 100644
index 521a73948f..0000000000
--- a/uk_bin_collection/uk_bin_collection/councils/CopelandBoroughCouncil.py
+++ /dev/null
@@ -1,93 +0,0 @@
-from xml.etree import ElementTree
-
-from bs4 import BeautifulSoup
-
-from uk_bin_collection.uk_bin_collection.common import *
-from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass
-
-
-class CouncilClass(AbstractGetBinDataClass):
-    """
-    Concrete classes have to implement all abstract operations of the
-    baseclass. They can also override some
-    operations with a default implementation.
-    """
-
-    def parse_data(self, page: str, **kwargs) -> dict:
-        uprn = kwargs.get("uprn")
-        check_uprn(uprn)
-        council = "CPL"
-
-        # Make SOAP request
-        headers = {
-            "Content-Type": "text/xml; charset=UTF-8",
-            "Referer": "https://collections-copeland.azurewebsites.net/calendar.html",
-            "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/110.0.0.0 Safari/537.36",
-        }
-        requests.packages.urllib3.disable_warnings()
-        post_data = (
-            ''
-            ''
-            ''
-            "" + council + "" + uprn + ""
-            "Chtml"
-        )
-        response = requests.post(
-            "https://collections-copeland.azurewebsites.net/WSCollExternal.asmx",
-            headers=headers,
-            data=post_data,
-        )
-
-        if response.status_code != 200:
-            raise ValueError("No bin data found for provided UPRN.")
-
-        # Get HTML from SOAP response
-        xmltree = ElementTree.fromstring(response.text)
-        html = xmltree.find(
-            ".//{http://webaspx-collections.azurewebsites.net/}getRoundCalendarForUPRNResult"
-        ).text
-        # Parse with BS4
-        soup = BeautifulSoup(html, features="html.parser")
-        soup.prettify()
-
-        data = {"bins": []}
-        for bin_type in ["Refuse", "Recycling", "Garden"]:
-            bin_el = soup.find("b", string=bin_type)
-            if bin_el:
-                bin_info = bin_el.next_sibling.split(": ")[1]
-                collection_date = ""
-                results = re.search("([A-Za-z]+ \\d\\d? [A-Za-z]+) then", bin_info)
-                if results:
-                    if results[1] == "Today":
-                        date = datetime.now()
-                    elif results[1] == "Tomorrow":
-                        date = datetime.now() + timedelta(days=1)
-                    else:
-                        date = get_next_occurrence_from_day_month(
-                            datetime.strptime(
-                                results[1] + " " + datetime.now().strftime("%Y"),
-                                "%a %d %b %Y",
-                            )
-                        )
-                    if date:
-                        collection_date = date.strftime(date_format)
-                else:
-                    results2 = re.search("([A-Za-z]+) then", bin_info)
-                    if results2:
-                        if results2[1] == "Today":
-                            collection_date = datetime.now().strftime(date_format)
-                        elif results2[1] == "Tomorrow":
-                            collection_date = (
-                                datetime.now() + timedelta(days=1)
-                            ).strftime(date_format)
-                        else:
-                            collection_date = results2[1]
-
-                if collection_date != "":
-                    dict_data = {
-                        "type": bin_type,
-                        "collectionDate": collection_date,
-                    }
-                    data["bins"].append(dict_data)
-
-        return data
diff --git a/uk_bin_collection/uk_bin_collection/councils/CornwallCouncil.py b/uk_bin_collection/uk_bin_collection/councils/CornwallCouncil.py
deleted file mode 100644
index 2170c16f5f..0000000000
--- a/uk_bin_collection/uk_bin_collection/councils/CornwallCouncil.py
+++ /dev/null
@@ -1,71 +0,0 @@
-from bs4 import BeautifulSoup
-from uk_bin_collection.uk_bin_collection.common import *
-from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass
-from dateutil.relativedelta import relativedelta
-
-
-# import the wonderful Beautiful Soup and the URL grabber
-class CouncilClass(AbstractGetBinDataClass):
-    """
-    Concrete classes have to implement all abstract operations of the
-    base class. They can also override some operations with a default
-    implementation.
-    """
-
-    def parse_data(self, page: str, **kwargs) -> dict:
-        data = {"bins": []}
-        collections = []
-
-        curr_date = datetime.today()
-
-        user_uprn = kwargs.get("uprn")
-        check_uprn(user_uprn)
-
-        headers = {
-            "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.7",
-            "Accept-Language": "en-GB,en;q=0.9",
-            "Cache-Control": "no-cache",
-            "Connection": "keep-alive",
-            "Pragma": "no-cache",
-            "Sec-Fetch-Dest": "document",
-            "Sec-Fetch-Mode": "navigate",
-            "Sec-Fetch-Site": "none",
-            "Sec-Fetch-User": "?1",
-            "Upgrade-Insecure-Requests": "1",
-            "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/125.0.6422.143 Safari/537.36",
-            "sec-ch-ua": '"Opera GX";v="111", "Chromium";v="125", "Not.A/Brand";v="24"',
-            "sec-ch-ua-mobile": "?0",
-            "sec-ch-ua-platform": '"Windows"',
-        }
-        params = {
-            "uprn": f"{user_uprn}",
-            # 'uprn': f'100040128734',
-        }
-        response = requests.get(
-            "https://www.cornwall.gov.uk/umbraco/surface/waste/MyCollectionDays",
-            params=params,
-            headers=headers,
-        )
-
-        soup = BeautifulSoup(response.text, features="html.parser")
-        soup.prettify()
-
-        for item in soup.find_all("div", class_="collection text-center service"):
-            bin_type = item.contents[1].text + " bin"
-            collection_date = datetime.strptime(item.contents[5].text, "%d %b").replace(
-                year=curr_date.year
-            )
-            if curr_date.month == 12 and collection_date.month == 1:
-                collection_date = collection_date + relativedelta(years=1)
-            collections.append((bin_type, collection_date))
-
-            ordered_data = sorted(collections, key=lambda x: x[1])
-            data = {"bins": []}
-            for bin in ordered_data:
-                dict_data = {
-                    "type": bin[0].capitalize().strip(),
-                    "collectionDate": bin[1].strftime(date_format),
-                }
-                data["bins"].append(dict_data)
-
-        return data
diff --git a/uk_bin_collection/uk_bin_collection/councils/CotswoldDistrictCouncil.py b/uk_bin_collection/uk_bin_collection/councils/CotswoldDistrictCouncil.py
deleted file mode 100644
index 509b1cb9a2..0000000000
--- a/uk_bin_collection/uk_bin_collection/councils/CotswoldDistrictCouncil.py
+++ /dev/null
@@ -1,120 +0,0 @@
-import time
-from datetime import datetime
-
-from bs4 import BeautifulSoup
-from selenium.webdriver.common.by import By
-from selenium.webdriver.common.keys import Keys
-from selenium.webdriver.support import expected_conditions as EC
-from selenium.webdriver.support.ui import Select
-from selenium.webdriver.support.wait import WebDriverWait
-
-from uk_bin_collection.uk_bin_collection.common import *
-from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass
-
-# import the wonderful Beautiful Soup and the URL grabber
-
-
-class CouncilClass(AbstractGetBinDataClass):
-    """
-    Concrete classes have to implement all abstract operations of the
-    base class. They can also override some operations with a default
-    implementation.
-    """
-
-    def parse_data(self, page: str, **kwargs) -> dict:
-        driver = None
-        try:
-            page = "https://community.cotswold.gov.uk/s/waste-collection-enquiry"
-
-            data = {"bins": []}
-
-            house_number = kwargs.get("paon")
-            postcode = kwargs.get("postcode")
-            full_address = f"{house_number}, {postcode}"
-            web_driver = kwargs.get("web_driver")
-            headless = kwargs.get("headless")
-
-            # Create Selenium webdriver
-            driver = create_webdriver(web_driver, headless, None, __name__)
-            driver.get(page)
-
-            # If you bang in the house number (or property name) and postcode in the box it should find your property
-            wait = WebDriverWait(driver, 60)
-            address_entry_field = wait.until(
-                EC.presence_of_element_located(
-                    (By.XPATH, '//*[@id="combobox-input-20"]')
-                )
-            )
-
-            address_entry_field.send_keys(str(full_address))
-
-            address_entry_field = wait.until(
-                EC.element_to_be_clickable((By.XPATH, '//*[@id="combobox-input-20"]'))
-            )
-            address_entry_field.click()
-            address_entry_field.send_keys(Keys.BACKSPACE)
-            address_entry_field.send_keys(str(full_address[len(full_address) - 1]))
-
-            first_found_address = wait.until(
-                EC.element_to_be_clickable(
-                    (By.XPATH, '//*[@id="dropdown-element-20"]/ul')
-                )
-            )
-
-            first_found_address.click()
-            # Wait for the 'Select your property' dropdown to appear and select the first result
-            next_btn = wait.until(
-                EC.element_to_be_clickable((By.XPATH, "//lightning-button/button"))
-            )
-            next_btn.click()
-            bin_data = wait.until(
-                EC.presence_of_element_located(
-                    (By.XPATH, "//span[contains(text(), 'Container')]")
-                )
-            )
-
-            soup = BeautifulSoup(driver.page_source, features="html.parser")
-
-            rows = soup.find_all("tr", class_="slds-hint-parent")
-            current_year = datetime.now().year
-
-            for row in rows:
-                columns = row.find_all("td")
-                if columns:
-                    container_type = row.find("th").text.strip()
-                    if columns[0].get_text() == "Today":
-                        collection_day = datetime.now().strftime("%a, %d %B")
-                    elif columns[0].get_text() == "Tomorrow":
-                        collection_day = (datetime.now() + timedelta(days=1)).strftime(
-                            "%a, %d %B"
-                        )
-                    else:
-                        collection_day = re.sub(
-                            r"[^a-zA-Z0-9,\s]", "", columns[0].get_text()
-                        ).strip()
-
-                    # Parse the date from the string
-                    parsed_date = datetime.strptime(collection_day, "%a, %d %B")
-                    if parsed_date < datetime(
-                        parsed_date.year, parsed_date.month, parsed_date.day
-                    ):
-                        parsed_date = parsed_date.replace(year=current_year + 1)
-                    else:
-                        parsed_date = parsed_date.replace(year=current_year)
-                    # Format the date as %d/%m/%Y
-                    formatted_date = parsed_date.strftime("%d/%m/%Y")
-
-                    # Add the bin type and collection date to the 'data' dictionary
-                    data["bins"].append(
-                        {"type": container_type, "collectionDate": formatted_date}
-                    )
-        except Exception as e:
-            # Here you can log the exception if needed
-            print(f"An error occurred: {e}")
-            # Optionally, re-raise the exception if you want it to propagate
-            raise
-        finally:
-            # This block ensures that the driver is closed regardless of an exception
-            if driver:
-                driver.quit()
-        return data
diff --git a/uk_bin_collection/uk_bin_collection/councils/CoventryCityCouncil.py b/uk_bin_collection/uk_bin_collection/councils/CoventryCityCouncil.py
deleted file mode 100644
index 4256e696be..0000000000
--- a/uk_bin_collection/uk_bin_collection/councils/CoventryCityCouncil.py
+++ /dev/null
@@ -1,51 +0,0 @@
-import requests
-from bs4 import BeautifulSoup
-from dateutil.relativedelta import relativedelta
-
-from uk_bin_collection.uk_bin_collection.common import *
-from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass
-
-
-# import the wonderful Beautiful Soup and the URL grabber
-class CouncilClass(AbstractGetBinDataClass):
-    """
-    Concrete classes have to implement all abstract operations of the
-    base class. They can also override some operations with a default
-    implementation.
-    """
-
-    def parse_data(self, page: str, **kwargs) -> dict:
-
-        bindata = {"bins": []}
-        curr_date = datetime.today()
-
-        soup = BeautifulSoup(page.content, features="html.parser")
-        button = soup.find("a", text="Find out which bin will be collected when.")
-
-        if button["href"]:
-            URI = button["href"]
-            # Make the GET request
-            response = requests.get(URI)
-            soup = BeautifulSoup(response.content, features="html.parser")
-            divs = soup.find_all("div", {"class": "editor"})
-            for div in divs:
-                lis = div.find_all("li")
-                for li in lis:
-                    collection = li.text.split(": ")
-                    collection_date = datetime.strptime(
-                        collection[0],
-                        "%A %d %B",
-                    ).replace(year=curr_date.year)
-                    if curr_date.month == 12 and collection_date.month == 1:
-                        collection_date = collection_date + relativedelta(years=1)
-                    bin_types = collection[1].split(" and ")
-                    for bin_type in bin_types:
-                        dict_data = {
-                            "type": bin_type,
-                            "collectionDate": collection_date.strftime("%d/%m/%Y"),
-                        }
-                        bindata["bins"].append(dict_data)
-        else:
-            print("Failed to find bin schedule")
-
-        return bindata
diff --git a/uk_bin_collection/uk_bin_collection/councils/CrawleyBoroughCouncil.py b/uk_bin_collection/uk_bin_collection/councils/CrawleyBoroughCouncil.py
deleted file mode 100644
index 4d5eceed4c..0000000000
--- a/uk_bin_collection/uk_bin_collection/councils/CrawleyBoroughCouncil.py
+++ /dev/null
@@ -1,112 +0,0 @@
-import time
-
-import requests
-from dateutil.relativedelta import relativedelta
-
-from uk_bin_collection.uk_bin_collection.common import *
-from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass
-
-
-# import the wonderful Beautiful Soup and the URL grabber
-class CouncilClass(AbstractGetBinDataClass):
-    """
-    Concrete classes have to implement all abstract operations of the
-    base class. They can also override some operations with a default
-    implementation.
-    """
-
-    def parse_data(self, page: str, **kwargs) -> dict:
-        # Make a BS4 object
-        uprn = kwargs.get("uprn")
-        usrn = kwargs.get("paon")
-        check_uprn(uprn)
-        check_usrn(usrn)
-        bindata = {"bins": []}
-
-        SESSION_URL = "https://crawleybc-self.achieveservice.com/authapi/isauthenticated?uri=https%253A%252F%252Fcrawleybc-self.achieveservice.com%252Fen%252FAchieveForms%252F%253Fform_uri%253Dsandbox-publish%253A%252F%252FAF-Process-fb73f73e-e8f5-4441-9f83-8b5d04d889d6%252FAF-Stage-ec9ada91-d2d9-43bc-9730-597d15fc8108%252Fdefinition.json%2526redirectlink%253D%252Fen%2526cancelRedirectLink%253D%252Fen%2526noLoginPrompt%253D1%2526accept%253Dyes&hostname=crawleybc-self.achieveservice.com&withCredentials=true"
-
-        API_URL = "https://crawleybc-self.achieveservice.com/apibroker/"
-
-        currentdate = datetime.now().strftime("%d/%m/%Y")
-
-        data = {
-            "formValues": {
-                "Address": {
-                    "address": {
-                        "value": {
-                            "Address": {
-                                "usrn": {
-                                    "value": usrn,
-                                },
-                                "uprn": {
-                                    "value": uprn,
-                                },
-                            }
-                        },
-                    },
-                    "dayConverted": {
-                        "value": currentdate,
-                    },
-                    "getCollection": {
-                        "value": "true",
-                    },
-                    "getWorksheets": {
-                        "value": "false",
-                    },
-                },
-            },
-        }
-
-        headers = {
-            "Content-Type": "application/json",
-            "Accept": "application/json",
-            "User-Agent": "Mozilla/5.0",
-            "X-Requested-With": "XMLHttpRequest",
-            "Referer": "https://crawleybc-self.achieveservice.com/fillform/?iframe_id=fillform-frame-1&db_id=",
-        }
-        s = requests.session()
-        r = s.get(SESSION_URL)
-        r.raise_for_status()
-        session_data = r.json()
-        sid = session_data["auth-session"]
-        params = {
-            "api": "RunLookup",
-            "id": "5b4f0ec5f13f4",
-            "repeat_against": "",
-            "noRetry": "true",
-            "getOnlyTokens": "undefined",
-            "log_id": "",
-            "app_name": "AF-Renderer::Self",
-            # unix_timestamp
-            "_": str(int(time.time() * 1000)),
-            "sid": sid,
-        }
-
-        r = s.post(API_URL, json=data, headers=headers, params=params)
-        r.raise_for_status()
-
-        data = r.json()
-        rows_data = data["integration"]["transformed"]["rows_data"]["0"]
-        if not isinstance(rows_data, dict):
-            raise ValueError("Invalid data returned from API")
-
-        # Extract each service's relevant details for the bin schedule
-        for key, value in rows_data.items():
-            if key.endswith("DateNext"):
-                BinType = key.replace("DateNext", "Service")
-                for key2, value2 in rows_data.items():
-                    if key2 == BinType:
-                        BinType = value2
-                next_collection = datetime.strptime(value, "%A %d %B").replace(
-                    year=datetime.now().year
-                )
-                if datetime.now().month == 12 and next_collection.month == 1:
-                    next_collection = next_collection + relativedelta(years=1)
-
-                dict_data = {
-                    "type": BinType,
-                    "collectionDate": next_collection.strftime(date_format),
-                }
-                bindata["bins"].append(dict_data)
-
-        return bindata
diff --git a/uk_bin_collection/uk_bin_collection/councils/CroydonCouncil.py b/uk_bin_collection/uk_bin_collection/councils/CroydonCouncil.py
deleted file mode 100644
index d866cfe4a6..0000000000
--- a/uk_bin_collection/uk_bin_collection/councils/CroydonCouncil.py
+++ /dev/null
@@ -1,286 +0,0 @@
-import time
-
-from bs4 import BeautifulSoup
-
-from uk_bin_collection.uk_bin_collection.common import *
-from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass
-
-
-def get_headers(base_url: str, method: str) -> dict[str, str]:
-    """
-    Gets request headers
-        :rtype: dict[str, str]
-        :param base_url: Base URL to use
-        :param method: Method to use
-        :return: Request headers
-    """
-    headers = {
-        "Accept-Encoding": "gzip, deflate, br",
-        "Accept-Language": "en-GB,en-US;q=0.9,en;q=0.8",
-        "Cache-Control": "max-age=0",
-        "Connection": "keep-alive",
-        "Host": "service.croydon.gov.uk",
-        "Origin": base_url,
-        "sec-ch-ua": '"Not_A Brand";v="99", "Google Chrome";v="109", "Chromium";v="109"',
-        "sec-ch-ua-mobile": "?0",
-        "sec-ch-ua-platform": "Windows",
-        "Sec-Fetch-Dest": "document",
-        "Sec-Fetch-User": "?1",
-        "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko)"
-        " Chrome/109.0.0.0 Safari/537.36",
-    }
-    if method.lower() == "post":
-        headers["Accept"] = "application/json, text/javascript, */*; q=0.01"
-        headers["Content-Type"] = "application/x-www-form-urlencoded; charset=UTF-8"
-        headers["Sec-Fetch-Mode"] = "cors"
-        headers["Sec-Fetch-Mode"] = "same-origin"
-        headers["X-Requested-With"] = "XMLHttpRequest"
-    else:
-        headers["Accept"] = (
-            "text/html,application/xhtml+xml,application/xml;"
-            "q=0.9,image/avif,image/webp,image/apng,*/*;"
-            "q=0.8,application/signed-exchange;v=b3;q=0.9"
-        )
-        headers["Sec-Fetch-Mode"] = "navigate"
-        headers["Sec-Fetch-Mode"] = "none"
-    return headers
-
-
-def get_session_storage_global() -> object:
-    """
-    Gets session storage global object
-        :rtype: object
-        :return: Session storage global object
-    """
-    return {
-        "destination_stack": [
-            "w/webpage/bin-day-enter-address",
-            "w/webpage/your-bin-collection-details?context_record_id=86086077"
-            "&webpage_token=5c047b2c10b4aad66bef2054aac6bea52ad7a5e185ffdf7090b01f8ddc96728f",
-            "w/webpage/bin-day-enter-address",
-            "w/webpage/your-bin-collection-details?context_record_id=86085229"
-            "&webpage_token=cf1b8fd6213f4823277d98c1dd8a992e6ebef1fabc7d892714e5d9dade448c37",
-            "w/webpage/bin-day-enter-address",
-            "w/webpage/your-bin-collection-details?context_record_id=86084221"
-            "&webpage_token=7f52fb51019bf0e6bfe9647b1b31000124bd92a9d95781f1557f58b3ed40da52",
-            "w/webpage/bin-day-enter-address",
-            "w/webpage/your-bin-collection-details?context_record_id=86083209"
-            "&webpage_token=de50c265da927336f526d9d9a44947595c3aa38965aa8c495ac2fb73d272ece8",
-            "w/webpage/bin-day-enter-address",
-        ],
-        "last_context_record_id": "86086077",
-    }
-
-
-def get_csrf_token(s: requests.session, base_url: str) -> str:
-    """
-    Gets a CSRF token
-        :rtype: str
-        :param s: requests.Session() to use
-        :param base_url: Base URL to use
-        :return: CSRF token
-    """
-    csrf_token = ""
-    response = s.get(
-        base_url + "/wasteservices/w/webpage/bin-day-enter-address",
-        headers=get_headers(base_url, "GET"),
-    )
-    if response.status_code == 200:
-        soup = BeautifulSoup(response.text, features="html.parser")
-        soup.prettify()
-        app_body = soup.find("div", {"class": "app-body"})
-        script = app_body.find("script", {"type": "text/javascript"}).string
-        p = re.compile("var CSRF = ('|\")(.*?)('|\");")
-        m = p.search(script)
-        csrf_token = m.groups()[1]
-    else:
-        raise ValueError(
-            "Code 1: Failed to get a CSRF token. Please ensure the council website is online first,"
-            " then open an issue on GitHub."
-        )
-    return csrf_token
-
-
-def get_address_id(
-    s: requests.session, base_url: str, csrf_token: str, postcode: str, paon: str
-) -> str:
-    """
-    Gets the address ID
-        :rtype: str
-        :param s: requests.Session() to use
-        :param base_url: Base URL to use
-        :param csrf_token: CSRF token to use
-        :param postcode: Postcode to use
-        :param paon: House number/address to find
-        :return: address ID
-    """
-    address_id = "0"
-    # Get the addresses for the postcode
-    form_data = {
-        "code_action": "search",
-        "code_params": '{"search_item":"' + postcode + '","is_ss":true}',
-        "fragment_action": "handle_event",
-        "fragment_id": "PCF0020408EECEC1",
-        "fragment_collection_class": "formtable",
-        "fragment_collection_editable_values": '{"PCF0021449EECEC1":"1"}',
-        "_session_storage": json.dumps(
-            {
-                "/wasteservices/w/webpage/bin-day-enter-address": {},
-                "_global": get_session_storage_global(),
-            }
-        ),
-        "action_cell_id": "PCL0005629EECEC1",
-        "action_page_id": "PAG0000898EECEC1",
-        "form_check_ajax": csrf_token,
-    }
-    response = s.post(
-        base_url
-        + "/wasteservices/w/webpage/bin-day-enter-address?webpage_subpage_id=PAG0000898EECEC1"
-        "&webpage_token=faab02e1f62a58f7bad4c2ae5b8622e19846b97dde2a76f546c4bb1230cee044"
-        "&widget_action=fragment_action",
-        headers=get_headers(base_url, "POST"),
-        data=form_data,
-    )
-    if response.status_code == 200:
-        json_response = json.loads(response.text)
-        addresses = json_response["response"]["items"]
-        # Find the matching address id for the paon
-        for address in addresses:
-            # Check for full matches first
-            if address.get("dropdown_display_field") == paon:
-                address_id = address.get("id")
-                break
-        # Check for matching start if no full match found
-        if address_id == "0":
-            for address in addresses:
-                if address.get("dropdown_display_field").split()[0] == paon.strip():
-                    address_id = address.get("id")
-                    break
-        # Check match was found
-        if address_id == "0":
-            raise ValueError(
-                "Code 2: No matching address for house number/full address found."
-            )
-    else:
-        raise ValueError("Code 3: No addresses found for provided postcode.")
-    return address_id
-
-
-def get_collection_data(
-    s: requests.session, base_url: str, csrf_token: str, address_id: str
-) -> str:
-    """
-    Gets the collection data
-        :rtype: str
-        :param s: requests.Session() to use
-        :param base_url: Base URL to use
-        :param csrf_token: CSRF token to use
-        :param address_id: Address id to use
-        :param retries: Retries count
-        :return: Collection data
-    """
-    collection_data = ""
-    if address_id != "0":
-        form_data = {
-            "form_check": csrf_token,
-            "submitted_page_id": "PAG0000898EECEC1",
-            "submitted_widget_group_id": "PWG0002644EECEC1",
-            "submitted_widget_group_type": "modify",
-            "submission_token": "63e9126bacd815.12997577",
-            "payload[PAG0000898EECEC1][PWG0002644EECEC1][PCL0005629EECEC1][formtable]"
-            "[C_63e9126bacfb3][PCF0020408EECEC1]": address_id,
-            "payload[PAG0000898EECEC1][PWG0002644EECEC1][PCL0005629EECEC1][formtable]"
-            "[C_63e9126bacfb3][PCF0021449EECEC1]": "1",
-            "payload[PAG0000898EECEC1][PWG0002644EECEC1][PCL0005629EECEC1][formtable]"
-            "[C_63e9126bacfb3][PCF0020072EECEC1]": "Next",
-            "submit_fragment_id": "PCF0020072EECEC1",
-            "_session_storage": json.dumps({"_global": get_session_storage_global()}),
-            "_update_page_content_request": 1,
-            "form_check_ajax": csrf_token,
-        }
-        response = s.post(
-            base_url
-            + "/wasteservices/w/webpage/bin-day-enter-address?webpage_subpage_id=PAG0000898EECEC1"
-            "&webpage_token=faab02e1f62a58f7bad4c2ae5b8622e19846b97dde2a76f546c4bb1230cee044",
-            headers=get_headers(base_url, "POST"),
-            data=form_data,
-        )
-        if response.status_code == 200 and len(response.text) > 0:
-            json_response = json.loads(response.text)
-            form_data = {
-                "_dummy": 1,
-                "_session_storage": json.dumps(
-                    {"_global": get_session_storage_global()}
-                ),
-                "_update_page_content_request": 1,
-                "form_check_ajax": csrf_token,
-            }
-            response = s.post(
-                base_url + json_response["redirect_url"],
-                headers=get_headers(base_url, "POST"),
-                data=form_data,
-            )
-            if response.status_code == 200 and len(response.text) > 0:
-                json_response = json.loads(response.text)
-                collection_data = json_response["data"]
-            else:
-                raise ValueError("Code 4: Failed to get bin data.")
-        else:
-            raise ValueError(
-                "Code 5: Failed to get bin data. Too many requests. Please wait a few minutes before trying again."
-            )
-    return collection_data
-
-
-class CouncilClass(AbstractGetBinDataClass):
-    """
-    Concrete classes have to implement all abstract operations of the
-    base class. They can also override some operations with a default
-    implementation.
-    """
-
-    def parse_data(self, page: str, **kwargs) -> dict:
-        requests.packages.urllib3.disable_warnings()
-        s = requests.Session()
-        base_url = "https://service.croydon.gov.uk"
-        paon = kwargs.get("paon")
-        postcode = kwargs.get("postcode")
-        check_paon(paon)
-        check_postcode(postcode)
-
-        # Firstly, get a CSRF (cross-site request forgery) token
-        csrf_token = get_csrf_token(s, base_url)
-        # Next, get the address_id
-        address_id = get_address_id(s, base_url, csrf_token, postcode, paon)
-        # Finally, use the address_id to get the collection data
-        collection_data = get_collection_data(s, base_url, csrf_token, address_id)
-        if collection_data != "":
-            soup = BeautifulSoup(collection_data, features="html.parser")
-            soup.prettify()
-
-            # Find the list elements
-            collection_record_elements = soup.find_all(
-                "div", {"class": "listing_template_record"}
-            )
-
-            # Form a JSON wrapper
-            data = {"bins": []}
-
-            for e in collection_record_elements:
-                collection_type = e.find("h2").get_text()
-                collection_date = e.find("span", {"class": "value-as-text"}).get_text()
-                dict_data = {
-                    "type": collection_type,
-                    "collectionDate": datetime.strptime(
-                        collection_date, "%A %d %B %Y"
-                    ).strftime(date_format),
-                }
-                data["bins"].append(dict_data)
-
-            if len(data["bins"]) == 0:
-                raise ValueError(
-                    "Code 5: No bin data found. Please ensure the council website is showing data first,"
-                    " then open an issue on GitHub."
-                )
-
-            return data
diff --git a/uk_bin_collection/uk_bin_collection/councils/CumberlandAllerdaleCouncil.py b/uk_bin_collection/uk_bin_collection/councils/CumberlandAllerdaleCouncil.py
deleted file mode 100644
index c50d61cc2b..0000000000
--- a/uk_bin_collection/uk_bin_collection/councils/CumberlandAllerdaleCouncil.py
+++ /dev/null
@@ -1,93 +0,0 @@
-import requests
-from bs4 import BeautifulSoup
-
-from uk_bin_collection.uk_bin_collection.common import *
-from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass
-
-
-# import the wonderful Beautiful Soup and the URL grabber
-class CouncilClass(AbstractGetBinDataClass):
-    """
-    Concrete classes have to implement all abstract operations of the
-    base class. They can also override some operations with a default
-    implementation.
-    """
-
-    def parse_data(self, page: str, **kwargs) -> dict:
-
-        user_postcode = kwargs.get("postcode")
-        user_paon = kwargs.get("paon")
-        check_postcode(user_postcode)
-        check_paon(user_paon)
-        bindata = {"bins": []}
-
-        URI = "https://abc-wrp.whitespacews.com/"
-
-        session = requests.Session()
-
-        # get link from first page as has some kind of unique hash
-        r = session.get(
-            URI,
-        )
-        r.raise_for_status()
-        soup = BeautifulSoup(r.text, features="html.parser")
-
-        alink = soup.find("a", text="View My Collections")
-
-        if alink is None:
-            raise Exception("Initial page did not load correctly")
-
-        # greplace 'seq' query string to skip next step
-        nextpageurl = alink["href"].replace("seq=1", "seq=2")
-
-        data = {
-            "address_name_number": user_paon,
-            "address_postcode": user_postcode,
-        }
-
-        # get list of addresses
-        r = session.post(nextpageurl, data)
-        r.raise_for_status()
-
-        soup = BeautifulSoup(r.text, features="html.parser")
-
-        # get first address (if you don't enter enough argument values this won't find the right address)
-        alink = soup.find("div", id="property_list").find("a")
-
-        if alink is None:
-            raise Exception("Address not found")
-
-        nextpageurl = URI + alink["href"]
-
-        # get collection page
-        r = session.get(
-            nextpageurl,
-        )
-        r.raise_for_status()
-        soup = BeautifulSoup(r.text, features="html.parser")
-
-        if soup.find("span", id="waste-hint"):
-            raise Exception("No scheduled services at this address")
-
-        u1s = soup.find("section", id="scheduled-collections").find_all("u1")
-
-        for u1 in u1s:
-            lis = u1.find_all("li", recursive=False)
-
-            date = lis[1].text.replace("\n", "")
-            bin_type = lis[2].text.replace("\n", "")
-
-            dict_data = {
-                "type": bin_type,
-                "collectionDate": datetime.strptime(
-                    date,
-                    "%d/%m/%Y",
-                ).strftime(date_format),
-            }
-            bindata["bins"].append(dict_data)
-
-        bindata["bins"].sort(
-            key=lambda x: datetime.strptime(x.get("collectionDate"), date_format)
-        )
-
-        return bindata
diff --git a/uk_bin_collection/uk_bin_collection/councils/DacorumBoroughCouncil.py b/uk_bin_collection/uk_bin_collection/councils/DacorumBoroughCouncil.py
deleted file mode 100644
index f9b9e3a208..0000000000
--- a/uk_bin_collection/uk_bin_collection/councils/DacorumBoroughCouncil.py
+++ /dev/null
@@ -1,102 +0,0 @@
-from bs4 import BeautifulSoup
-from selenium.webdriver.common.by import By
-from selenium.webdriver.support import expected_conditions as EC
-from selenium.webdriver.support.wait import WebDriverWait
-
-from uk_bin_collection.uk_bin_collection.common import *
-from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass
-
-
-# import the wonderful Beautiful Soup and the URL grabber
-class CouncilClass(AbstractGetBinDataClass):
-    """
-    Concrete classes have to implement all abstract operations of the
-    base class. They can also override some operations with a default
-    implementation.
-    """
-
-    def parse_data(self, page: str, **kwargs) -> dict:
-        driver = None
-        try:
-            data = {"bins": []}
-            user_paon = kwargs.get("paon")
-            user_postcode = kwargs.get("postcode")
-            web_driver = kwargs.get("web_driver")
-            headless = kwargs.get("headless")
-            check_paon(user_paon)
-            check_postcode(user_postcode)
-
-            # Create Selenium webdriver
-            driver = create_webdriver(web_driver, headless, None, __name__)
-            driver.get("https://webapps.dacorum.gov.uk/bincollections/")
-
-            # Wait for the postcode field to appear then populate it
-            inputElement_postcode = WebDriverWait(driver, 30).until(
-                EC.presence_of_element_located((By.ID, "txtBxPCode"))
-            )
-            inputElement_postcode.send_keys(user_postcode)
-
-            # Click search button
-            findAddress = WebDriverWait(driver, 10).until(
-                EC.presence_of_element_located((By.ID, "btnFindAddr"))
-            )
-            findAddress.click()
-
-            # Wait for the 'Select address' dropdown to appear and select option matching the house name/number
-            WebDriverWait(driver, 10).until(
-                EC.element_to_be_clickable(
-                    (
-                        By.XPATH,
-                        "//select[@id='lstBxAddrList']//option[contains(., '"
-                        + user_paon
-                        + "')]",
-                    )
-                )
-            ).click()
-
-            # Click search button
-            findDates = WebDriverWait(driver, 10).until(
-                EC.presence_of_element_located((By.ID, "MainContent_btnGetSchedules"))
-            )
-            findDates.click()
-
-            # Wait for the collections table to appear
-            WebDriverWait(driver, 30).until(
-                EC.presence_of_element_located((By.ID, "lblSelectedAddr"))
-            )
-
-            soup = BeautifulSoup(driver.page_source, features="html.parser")
-            soup.prettify()
-
-            # Get collections div
-            BinCollectionSchedule = soup.find("div", {"id": "MainContent_updPnl"})
-
-            NextCollections = BinCollectionSchedule.find_all(
-                "div", {"style": " margin:5px;"}
-            )
-
-            for Collection in NextCollections:
-                BinType = Collection.find("strong").text.strip()
-                if BinType:
-                    CollectionDate = datetime.strptime(
-                        Collection.find_all("div", {"style": "display:table-cell;"})[1]
-                        .get_text()
-                        .strip(),
-                        "%a, %d %b %Y",
-                    )
-                    dict_data = {
-                        "type": BinType,
-                        "collectionDate": CollectionDate.strftime("%d/%m/%Y"),
-                    }
-                    data["bins"].append(dict_data)
-
-        except Exception as e:
-            # Here you can log the exception if needed
-            print(f"An error occurred: {e}")
-            # Optionally, re-raise the exception if you want it to propagate
-            raise
-        finally:
-            # This block ensures that the driver is closed regardless of an exception
-            if driver:
-                driver.quit()
-        return data
diff --git a/uk_bin_collection/uk_bin_collection/councils/DartfordBoroughCouncil.py b/uk_bin_collection/uk_bin_collection/councils/DartfordBoroughCouncil.py
deleted file mode 100644
index 86d8f6531d..0000000000
--- a/uk_bin_collection/uk_bin_collection/councils/DartfordBoroughCouncil.py
+++ /dev/null
@@ -1,42 +0,0 @@
-from bs4 import BeautifulSoup
-from uk_bin_collection.uk_bin_collection.common import *
-from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass
-
-
-# import the wonderful Beautiful Soup and the URL grabber
-class CouncilClass(AbstractGetBinDataClass):
-    """
-    Concrete classes have to implement all abstract operations of the
-    base class. They can also override some operations with a default
-    implementation.
-    """
-
-    def parse_data(self, page: str, **kwargs) -> dict:
-        # Make a BS4 object
-        soup = BeautifulSoup(page.text, features="html.parser")
-        soup.prettify()
-
-        # Extract data
-        bin_data = {"bins": []}
-
-        # Find the table containing the bin collection data
-        table = soup.find("table", {"class": "eb-EVDNdR1G-tableContent"})
-
-        if table:
-            rows = table.find_all("tr", class_="eb-EVDNdR1G-tableRow")
-
-            for row in rows:
-                columns = row.find_all("td")
-                if len(columns) >= 4:
-                    collection_type = columns[1].get_text(strip=True)
-                    collection_date = columns[3].get_text(strip=True)
-
-                    # Validate collection_date format
-                    if re.match(r"\d{2}/\d{2}/\d{4}", collection_date):
-                        bin_entry = {
-                            "type": collection_type,
-                            "collectionDate": collection_date,
-                        }
-                        bin_data["bins"].append(bin_entry)
-
-        return bin_data
diff --git a/uk_bin_collection/uk_bin_collection/councils/DerbyCityCouncil.py b/uk_bin_collection/uk_bin_collection/councils/DerbyCityCouncil.py
deleted file mode 100644
index 8f090ec057..0000000000
--- a/uk_bin_collection/uk_bin_collection/councils/DerbyCityCouncil.py
+++ /dev/null
@@ -1,55 +0,0 @@
-import requests
-from bs4 import BeautifulSoup
-
-from uk_bin_collection.uk_bin_collection.common import *
-from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass
-
-
-# import the wonderful Beautiful Soup and the URL grabber
-class CouncilClass(AbstractGetBinDataClass):
-    """
-    Concrete classes have to implement all abstract operations of the
-    base class. They can also override some operations with a default
-    implementation.
-    """
-
-    def parse_data(self, page: str, **kwargs) -> dict:
-
-        user_uprn = kwargs.get("uprn")
-        check_uprn(user_uprn)
-        bindata = {"bins": []}
-
-        URI = f"https://secure.derby.gov.uk/binday/Binday?search.PremisesId={user_uprn}"
-
-        # Make the GET request
-        session = requests.Session()
-        response = session.get(URI)
-
-        soup = BeautifulSoup(response.content, "html.parser")
-
-        # Find all divs with class "binresult" which contain the bin collection information
-        bin_results = soup.find_all("div", class_="binresult")
-
-        # Loop through each bin result to extract date and bin type
-        for result in bin_results:
-            # Find the collection date
-            date_text = result.find("p").strong.get_text(strip=True)
-
-            # Find the bin type by looking at the 'alt' attribute of the img tag
-            bin_type = result.find("img")["alt"]
-
-            if bin_type != "No bins":
-                dict_data = {
-                    "type": bin_type,
-                    "collectionDate": datetime.strptime(
-                        date_text,
-                        "%A, %d %B %Y:",
-                    ).strftime(date_format),
-                }
-                bindata["bins"].append(dict_data)
-
-        bindata["bins"].sort(
-            key=lambda x: datetime.strptime(x.get("collectionDate"), "%d/%m/%Y")
-        )
-
-        return bindata
diff --git a/uk_bin_collection/uk_bin_collection/councils/DerbyshireDalesDistrictCouncil.py b/uk_bin_collection/uk_bin_collection/councils/DerbyshireDalesDistrictCouncil.py
deleted file mode 100644
index dd10e04037..0000000000
--- a/uk_bin_collection/uk_bin_collection/councils/DerbyshireDalesDistrictCouncil.py
+++ /dev/null
@@ -1,100 +0,0 @@
-from bs4 import BeautifulSoup
-from selenium.webdriver.common.by import By
-from selenium.webdriver.support import expected_conditions as EC
-from selenium.webdriver.support.ui import Select
-from selenium.webdriver.support.wait import WebDriverWait
-
-from uk_bin_collection.uk_bin_collection.common import *
-from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass
-
-
-# import the wonderful Beautiful Soup and the URL grabber
-class CouncilClass(AbstractGetBinDataClass):
-    """
-    Concrete classes have to implement all abstract operations of the
-    base class. They can also override some operations with a default
-    implementation.
-    """
-
-    def parse_data(self, page: str, **kwargs) -> dict:
-        driver = None
-        try:
-            page = "https://selfserve.derbyshiredales.gov.uk/renderform.aspx?t=103&k=9644C066D2168A4C21BCDA351DA2642526359DFF"
-
-            data = {"bins": []}
-
-            user_uprn = kwargs.get("uprn")
-            user_postcode = kwargs.get("postcode")
-            web_driver = kwargs.get("web_driver")
-            headless = kwargs.get("headless")
-            check_uprn(user_uprn)
-            check_postcode(user_postcode)
-
-            # Create Selenium webdriver
-            driver = create_webdriver(web_driver, headless, None, __name__)
-            driver.get(page)
-
-            # Populate postcode field
-            inputElement_postcode = driver.find_element(
-                By.ID,
-                "ctl00_ContentPlaceHolder1_FF2924TB",
-            )
-            inputElement_postcode.send_keys(user_postcode)
-
-            # Click search button
-            driver.find_element(
-                By.ID,
-                "ctl00_ContentPlaceHolder1_FF2924BTN",
-            ).click()
-
-            # Wait for the 'Select address' dropdown to appear and select option matching UPRN
-            dropdown = WebDriverWait(driver, 10).until(
-                EC.presence_of_element_located(
-                    (By.ID, "ctl00_ContentPlaceHolder1_FF2924DDL")
-                )
-            )
-            # Create a 'Select' for it, then select the matching URPN option
-            dropdownSelect = Select(dropdown)
-            dropdownSelect.select_by_value("U" + user_uprn)
-
-            # Wait for the submit button to appear, then click it to get the collection dates
-            submit = WebDriverWait(driver, 10).until(
-                EC.presence_of_element_located(
-                    (By.ID, "ctl00_ContentPlaceHolder1_btnSubmit")
-                )
-            )
-            submit.click()
-
-            soup = BeautifulSoup(driver.page_source, features="html.parser")
-
-            bin_rows = (
-                soup.find("div", id="ctl00_ContentPlaceHolder1_pnlConfirmation")
-                .find("div", {"class": "row"})
-                .find_all("div", {"class": "row"})
-            )
-            if bin_rows:
-                for bin_row in bin_rows:
-                    bin_data = bin_row.find_all("div")
-                    if bin_data and bin_data[0] and bin_data[1]:
-                        collection_date = datetime.strptime(
-                            bin_data[0].get_text(strip=True), "%A%d %B, %Y"
-                        )
-                        dict_data = {
-                            "type": bin_data[1].get_text(strip=True),
-                            "collectionDate": collection_date.strftime(date_format),
-                        }
-                        data["bins"].append(dict_data)
-
-            data["bins"].sort(
-                key=lambda x: datetime.strptime(x.get("collectionDate"), date_format)
-            )
-        except Exception as e:
-            # Here you can log the exception if needed
-            print(f"An error occurred: {e}")
-            # Optionally, re-raise the exception if you want it to propagate
-            raise
-        finally:
-            # This block ensures that the driver is closed regardless of an exception
-            if driver:
-                driver.quit()
-        return data
diff --git a/uk_bin_collection/uk_bin_collection/councils/DoncasterCouncil.py b/uk_bin_collection/uk_bin_collection/councils/DoncasterCouncil.py
deleted file mode 100644
index a8ac56368b..0000000000
--- a/uk_bin_collection/uk_bin_collection/councils/DoncasterCouncil.py
+++ /dev/null
@@ -1,77 +0,0 @@
-import json
-import math
-from datetime import timedelta
-
-import requests
-from uk_bin_collection.uk_bin_collection.common import *
-from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass
-
-
-# import the wonderful Beautiful Soup and the URL grabber
-class CouncilClass(AbstractGetBinDataClass):
-    """
-    Concrete classes have to implement all abstract operations of the
-    base class. They can also override some operations with a default
-    implementation.
-    """
-
-    def parse_data(self, page: str, **kwargs) -> dict:
-        # I need to point out that this one gave me a good head scratch. Mainly because I wrote lots
-        # of code to parse the form and all that, then realised this url returns json data... oops.
-        base_url = "https://www.doncaster.gov.uk/Compass/PremiseDetail/GetCollectionsForCalendar"
-
-        user_uprn = kwargs.get("uprn")
-        check_uprn(user_uprn)
-
-        # Working with epoch times, otherwise known as posix/unix timestamps. The number of weeks
-        # to return can actually be customised in the below timedelta
-        today = math.floor(datetime.today().timestamp())
-        four_weeks = math.floor((datetime.today() + timedelta(days=4 * 7)).timestamp())
-
-        # For some reason, the actual web form uses a property id that's completely different
-        # from the uprn - luckily this one is easy to find!
-        params = {
-            "UPRN": user_uprn,
-            "Start": str(today),
-            "End": str(four_weeks),
-        }
-
-        requests.packages.urllib3.disable_warnings()
-        response = requests.get(base_url, params=params)
-
-        # 200 = ok. I got a 500 in testing, so assumed no data for that address
-        if response.status_code != 200:
-            raise ValueError("No bins found for provided UPRN.")
-
-        # Load the json results
-        json_results = json.loads(response.text)["slots"]
-
-        data = {"bins": []}
-        collections = []
-
-        # Each item is a dictionary, so accessing is easy
-        for item in json_results:
-            bin_type = item["title"]
-
-            # item["start"] actually returns a string, so we want to only take digits or +s.
-            # OK, we don't actually want the +s... or anything on the end of them, that's why
-            # we split the string then cast the remaining epoch to a float
-            epoch = "".join([i for i in item["start"] if i.isdigit() or i == "+"])
-            epoch = epoch.split("+")[0]
-            epoch = float(epoch)
-            bin_date = datetime.strptime(
-                str(datetime.fromtimestamp(epoch / 1000)), "%Y-%m-%d %H:%M:%S"
-            )
-            collections.append((bin_type, bin_date))
-
-            # This orders the data we just parsed to date order
-            ordered_data = sorted(collections, key=lambda x: x[1])
-            data = {"bins": []}
-            for bin in ordered_data:
-                dict_data = {
-                    "type": bin[0],
-                    "collectionDate": bin[1].strftime(date_format),
-                }
-                data["bins"].append(dict_data)
-
-        return data
diff --git a/uk_bin_collection/uk_bin_collection/councils/DorsetCouncil.py b/uk_bin_collection/uk_bin_collection/councils/DorsetCouncil.py
deleted file mode 100644
index 01c3d62bd5..0000000000
--- a/uk_bin_collection/uk_bin_collection/councils/DorsetCouncil.py
+++ /dev/null
@@ -1,41 +0,0 @@
-from bs4 import BeautifulSoup, element
-
-from uk_bin_collection.uk_bin_collection.common import *
-from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass
-
-
-# import the wonderful Beautiful Soup and the URL grabber
-class CouncilClass(AbstractGetBinDataClass):
-    """
-    Concrete classes have to implement all abstract operations of the
-    base class. They can also override some operations with a default
-    implementation.
-    """
-
-    def parse_data(self, page: str, **kwargs) -> dict:
-        data = {"bins": []}
-        url_base = "https://geoapi.dorsetcouncil.gov.uk/v1/services/"
-        url_types = ["recyclingday", "refuseday", "foodwasteday", "gardenwasteday"]
-
-        uprn = kwargs.get("uprn")
-        # Check the UPRN is valid
-        check_uprn(uprn)
-
-        for url_type in url_types:
-            response = requests.get(f"{url_base}{url_type}/{uprn}")
-            if response.status_code != 200:
-                raise ConnectionError(f"Could not fetch from {url_type} endpoint")
-            if response.json()["values"]:
-                json_data = response.json()["values"][0]
-
-                next_collection_date = datetime.strptime(
-                    json_data.get("dateNextVisit"), "%Y-%m-%d"
-                )
-
-                dict_data = {
-                    "type": f"{json_data.get('type')} bin",
-                    "collectionDate": next_collection_date.strftime("%d/%m/%Y"),
-                }
-                data["bins"].append(dict_data)
-
-        return data
diff --git a/uk_bin_collection/uk_bin_collection/councils/DoverDistrictCouncil.py b/uk_bin_collection/uk_bin_collection/councils/DoverDistrictCouncil.py
deleted file mode 100644
index d326fde974..0000000000
--- a/uk_bin_collection/uk_bin_collection/councils/DoverDistrictCouncil.py
+++ /dev/null
@@ -1,46 +0,0 @@
-from bs4 import BeautifulSoup
-from datetime import datetime
-import re
-from uk_bin_collection.uk_bin_collection.common import *  # Consider specific imports
-from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass
-
-
-class CouncilClass(AbstractGetBinDataClass):
-    def parse_data(self, page: str, **kwargs) -> dict:
-        soup = BeautifulSoup(page.text, "html.parser")
-
-        bins_data = {"bins": []}
-        bin_collections = []
-
-        results_wrapper = soup.find("div", {"class": "results-table-wrapper"})
-        if not results_wrapper:
-            return bins_data  # Return empty if the results wrapper is not found
-
-        bins = results_wrapper.find_all("div", {"class": "service-wrapper"})
-        for bin_item in bins:
-            service_name = bin_item.find("h3", {"class": "service-name"})
-            next_service = bin_item.find("td", {"class": "next-service"})
-
-            if service_name and next_service:
-                bin_type = service_name.get_text().replace("Collection", "bin").strip()
-                date_span = next_service.find("span", {"class": "table-label"})
-                date_text = (
-                    date_span.next_sibling.get_text().strip() if date_span else None
-                )
-
-                if date_text and re.match(r"\d{2}/\d{2}/\d{4}", date_text):
-                    try:
-                        bin_date = datetime.strptime(date_text, "%d/%m/%Y")
-                        bin_collections.append((bin_type, bin_date))
-                    except ValueError:
-                        continue
-
-        for bin_type, bin_date in sorted(bin_collections, key=lambda x: x[1]):
-            bins_data["bins"].append(
-                {
-                    "type": bin_type.capitalize(),
-                    "collectionDate": bin_date.strftime("%d/%m/%Y"),
-                }
-            )
-
-        return bins_data
diff --git a/uk_bin_collection/uk_bin_collection/councils/DudleyCouncil.py b/uk_bin_collection/uk_bin_collection/councils/DudleyCouncil.py
deleted file mode 100644
index 866b4e0f92..0000000000
--- a/uk_bin_collection/uk_bin_collection/councils/DudleyCouncil.py
+++ /dev/null
@@ -1,81 +0,0 @@
-import time
-
-import requests
-
-from uk_bin_collection.uk_bin_collection.common import *
-from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass
-
-
-# import the wonderful Beautiful Soup and the URL grabber
-class CouncilClass(AbstractGetBinDataClass):
-    """
-    Concrete classes have to implement all abstract operations of the
-    base class. They can also override some operations with a default
-    implementation.
-    """
-
-    def parse_data(self, page: str, **kwargs) -> dict:
-
-        user_uprn = kwargs.get("uprn")
-        check_uprn(user_uprn)
-        bindata = {"bins": []}
-
-        SESSION_URL = "https://my.dudley.gov.uk/authapi/isauthenticated?uri=https%253A%252F%252Fmy.dudley.gov.uk%252Fen%252FAchieveForms%252F%253Fform_uri%253Dsandbox-publish%253A%252F%252FAF-Process-373f5628-9aae-4e9e-ae09-ea7cd0588201%252FAF-Stage-52ec040b-10e6-440f-b964-23f924741496%252Fdefinition.json%2526redirectlink%253D%25252Fen%2526cancelRedirectLink%253D%25252Fen%2526consentMessage%253Dyes&hostname=my.dudley.gov.uk&withCredentials=true"
-
-        API_URL = "https://my.dudley.gov.uk/apibroker/runLookup"
-
-        data = {
-            "formValues": {
-                "My bins": {
-                    "uprnToCheck": {"value": user_uprn},
-                }
-            },
-        }
-
-        headers = {
-            "Content-Type": "application/json",
-            "Accept": "application/json",
-            "User-Agent": "Mozilla/5.0",
-            "X-Requested-With": "XMLHttpRequest",
-            "Referer": "https://my.dudley.gov.uk/fillform/?iframe_id=fillform-frame-1&db_id=",
-        }
-        s = requests.session()
-        r = s.get(SESSION_URL)
-        r.raise_for_status()
-        session_data = r.json()
-        sid = session_data["auth-session"]
-        params = {
-            "id": "64899d4c2574c",
-            "repeat_against": "",
-            "noRetry": "true",
-            "getOnlyTokens": "undefined",
-            "log_id": "",
-            "app_name": "AF-Renderer::Self",
-            # unix_timestamp
-            "_": str(int(time.time() * 1000)),
-            "sid": sid,
-        }
-        r = s.post(API_URL, json=data, headers=headers, params=params)
-        r.raise_for_status()
-        data = r.json()
-        rows_data = data["integration"]["transformed"]["rows_data"]["0"]
-        if not isinstance(rows_data, dict):
-            raise ValueError("Invalid data returned from API")
-        BIN_TYPES = [
-            ("refuseDate", "Refuse"),
-            ("recyclingDate", "Recycling"),
-            ("gardenDate", "Garden Waste"),
-        ]
-        bin_type_dict = dict(BIN_TYPES)
-
-        for row in rows_data.items():
-            if (row[0].endswith("Date")) and not row[0].endswith("EndDate"):
-                if row[1]:
-                    bin_type = bin_type_dict.get(row[0], row[0])
-                    collection_date = datetime.strptime(row[1], "%Y-%m-%d").strftime(
-                        "%d/%m/%Y"
-                    )
-                    dict_data = {"type": bin_type, "collectionDate": collection_date}
-                    bindata["bins"].append(dict_data)
-
-        return bindata
diff --git a/uk_bin_collection/uk_bin_collection/councils/DurhamCouncil.py b/uk_bin_collection/uk_bin_collection/councils/DurhamCouncil.py
deleted file mode 100644
index ce09124356..0000000000
--- a/uk_bin_collection/uk_bin_collection/councils/DurhamCouncil.py
+++ /dev/null
@@ -1,49 +0,0 @@
-import re
-from datetime import datetime
-
-import requests
-from bs4 import BeautifulSoup
-from uk_bin_collection.uk_bin_collection.common import *
-from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass
-
-
-# import the wonderful Beautiful Soup and the URL grabber
-class CouncilClass(AbstractGetBinDataClass):
-    """
-    Concrete classes have to implement all abstract operations of the
-    base class. They can also override some operations with a default
-    implementation.
-    """
-
-    def parse_data(self, page: str, **kwargs) -> dict:
-        url = "https://www.durham.gov.uk/bincollections?uprn="
-        uprn = kwargs.get("uprn")
-        check_uprn(uprn)
-        url += uprn
-        requests.packages.urllib3.disable_warnings()
-        page = requests.get(url)
-
-        # Make a BS4 object
-        soup = BeautifulSoup(page.text, features="html.parser")
-
-        data = {"bins": []}
-
-        for bin_type in ["rubbish", "recycling", "gardenwaste"]:
-            bin_info = soup.find(class_=f"bins{bin_type}")
-
-            if bin_info:
-                collection_text = bin_info.get_text(strip=True)
-
-                if collection_text:
-                    results = re.search("\\d\\d? [A-Za-z]+ \\d{4}", collection_text)
-                    if results:
-                        date = datetime.strptime(results[0], "%d %B %Y")
-                        if date:
-                            data["bins"].append(
-                                {
-                                    "type": bin_type,
-                                    "collectionDate": date.strftime(date_format),
-                                }
-                            )
-
-        return data
diff --git a/uk_bin_collection/uk_bin_collection/councils/EalingCouncil.py b/uk_bin_collection/uk_bin_collection/councils/EalingCouncil.py
deleted file mode 100644
index 9ca266df9f..0000000000
--- a/uk_bin_collection/uk_bin_collection/councils/EalingCouncil.py
+++ /dev/null
@@ -1,43 +0,0 @@
-import json
-from uk_bin_collection.uk_bin_collection.common import *
-from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass
-
-
-class CouncilClass(AbstractGetBinDataClass):
-    """
-    Concrete classes have to implement all abstract operations of the
-    base class. They can also override some operations with a default
-    implementation.
-    """
-
-    def parse_data(self, page: str, **kwargs) -> dict:
-        api_url = "https://www.ealing.gov.uk/site/custom_scripts/WasteCollectionWS/home/FindCollection"
-        user_uprn = kwargs.get("uprn")
-
-        # Check the UPRN is valid
-        check_uprn(user_uprn)
-
-        # Create the form data
-        form_data = {
-            "UPRN": user_uprn,
-        }
-
-        # Make a request to the API
-        requests.packages.urllib3.disable_warnings()
-        response = requests.post(api_url, data=form_data)
-
-        json_data = json.loads(response.text)
-
-        data = {"bins": []}
-
-        for param in json_data["param2"]:
-            data["bins"].append(
-                {
-                    "type": param["Service"],
-                    "collectionDate": datetime.strptime(
-                        param["collectionDateString"], "%d/%m/%Y"
-                    ).strftime(date_format),
-                }
-            )
-
-        return data
diff --git a/uk_bin_collection/uk_bin_collection/councils/EastAyrshireCouncil.py b/uk_bin_collection/uk_bin_collection/councils/EastAyrshireCouncil.py
deleted file mode 100644
index f80cdcb076..0000000000
--- a/uk_bin_collection/uk_bin_collection/councils/EastAyrshireCouncil.py
+++ /dev/null
@@ -1,49 +0,0 @@
-import requests
-from bs4 import BeautifulSoup
-
-from uk_bin_collection.uk_bin_collection.common import *
-from uk_bin_collection.uk_bin_collection.get_bin_data import AbstractGetBinDataClass
-
-
-# import the wonderful Beautiful Soup and the URL grabber
-class CouncilClass(AbstractGetBinDataClass):
-    """
-    Concrete classes have to implement all abstract operations of the
-    base class. They can also override some operations with a default
-    implementation.
-    """
-
-    def parse_data(self, page: str, **kwargs) -> dict:
-
-        user_uprn = kwargs.get("uprn")
-        check_uprn(user_uprn)
-        bindata = {"bins": []}
-
-        URI = f"https://www.east-ayrshire.gov.uk/Housing/RubbishAndRecycling/Collection-days/ViewYourRecyclingCalendar.aspx?r={user_uprn}"
-
-        # Make the GET request
-        response = requests.get(URI)
-
-        # Parse the HTML
-        soup = BeautifulSoup(response.content, "html.parser")
-
-        # Find each