Updated ics_calendar to restore compatibility with HA

This commit is contained in:
2025-05-08 10:02:22 +02:00
parent fef90d5a78
commit ca599eab7a
21 changed files with 1329 additions and 234 deletions

View File

@@ -1,23 +1,25 @@
"""Provide CalendarData class."""
import zlib
from datetime import timedelta
from gzip import BadGzipFile, GzipFile
from logging import Logger
from threading import Lock
from urllib.error import ContentTooShortError, HTTPError, URLError
from urllib.request import (
HTTPBasicAuthHandler,
HTTPDigestAuthHandler,
HTTPPasswordMgrWithDefaultRealm,
build_opener,
install_opener,
urlopen,
)
import re
from logging import Logger
from math import floor
import httpx
import httpx_auth
from homeassistant.util.dt import now as hanow
# from urllib.error import ContentTooShortError, HTTPError, URLError
class CalendarData:
class DigestWithMultiAuth(httpx.DigestAuth, httpx_auth.SupportMultiAuth):
"""Describes a DigestAuth authentication."""
def __init__(self, username: str, password: str):
"""Construct Digest authentication that supports Multi Auth."""
httpx.DigestAuth.__init__(self, username, password)
class CalendarData: # pylint: disable=R0902
"""CalendarData class.
The CalendarData class is used to download and cache calendar data from a
@@ -25,32 +27,33 @@ class CalendarData:
instance.
"""
opener_lock = Lock()
def __init__(
self, logger: Logger, name: str, url: str, min_update_time: timedelta
self,
async_client: httpx.AsyncClient,
logger: Logger,
conf: dict,
):
"""Construct CalendarData object.
:param async_client: An httpx.AsyncClient object for requests
:type httpx.AsyncClient
:param logger: The logger for reporting problems
:type logger: Logger
:param name: The name of the calendar (used for reporting problems)
:type name: str
:param url: The URL of the calendar
:type url: str
:param min_update_time: The minimum time between downloading data from
the URL when requested
:type min_update_time: timedelta
:param conf: Configuration options
:type conf: dict
"""
self._auth = None
self._calendar_data = None
self._headers = []
self._last_download = None
self._min_update_time = min_update_time
self._opener = None
self._min_update_time = conf["min_update_time"]
self.logger = logger
self.name = name
self.url = url
self.name = conf["name"]
self.url = conf["url"]
self.connection_timeout = None
self._httpx = async_client
def download_calendar(self) -> bool:
async def download_calendar(self) -> bool:
"""Download the calendar data.
This only downloads data if self.min_update_time has passed since the
@@ -59,20 +62,25 @@ class CalendarData:
returns: True if data was downloaded, otherwise False.
rtype: bool
"""
now = hanow()
self.logger.debug("%s: download_calendar start", self.name)
if (
self._calendar_data is None
or self._last_download is None
or (now - self._last_download) > self._min_update_time
or (hanow() - self._last_download) > self._min_update_time
):
self._last_download = now
self._calendar_data = None
next_url: str = self._make_url()
self.logger.debug(
"%s: Downloading calendar data from: %s", self.name, self.url
"%s: Downloading calendar data from: %s",
self.name,
next_url,
)
self._download_data()
await self._download_data(next_url)
self._last_download = hanow()
self.logger.debug("%s: download_calendar done", self.name)
return self._calendar_data is not None
self.logger.debug("%s: download_calendar skipped download", self.name)
return False
def get(self) -> str:
@@ -92,10 +100,8 @@ class CalendarData:
):
"""Set a user agent, accept header, and/or user name and password.
The user name and password will be set into an HTTPBasicAuthHandler an
an HTTPDigestAuthHandler. Both are attached to a new urlopener, so
that HTTP Basic Auth and HTTP Digest Auth will be supported when
opening the URL.
The user name and password will be set into an auth object that
supports both Basic Auth and Digest Auth for httpx.
If the user_agent parameter is not "", a User-agent header will be
added to the urlopener.
@@ -110,81 +116,63 @@ class CalendarData:
:type accept_header: str
"""
if user_name != "" and password != "":
passman = HTTPPasswordMgrWithDefaultRealm()
passman.add_password(None, self.url, user_name, password)
basic_auth_handler = HTTPBasicAuthHandler(passman)
digest_auth_handler = HTTPDigestAuthHandler(passman)
self._opener = build_opener(
digest_auth_handler, basic_auth_handler
)
self._auth = httpx_auth.Basic(
user_name, password
) + DigestWithMultiAuth(user_name, password)
additional_headers = []
if user_agent != "":
additional_headers.append(("User-agent", user_agent))
self._headers.append(("User-agent", user_agent))
if accept_header != "":
additional_headers.append(("Accept", accept_header))
if len(additional_headers) > 0:
if self._opener is None:
self._opener = build_opener()
self._opener.addheaders = additional_headers
self._headers.append(("Accept", accept_header))
def _decode_data(self, conn):
if (
"Content-Encoding" in conn.headers
and conn.headers["Content-Encoding"] == "gzip"
):
reader = GzipFile(fileobj=conn)
else:
reader = conn
try:
return self._decode_stream(reader.read()).replace("\0", "")
except zlib.error:
self.logger.error(
"%s: Failed to uncompress gzip data from url(%s): zlib",
self.name,
self.url,
)
except BadGzipFile as gzip_error:
self.logger.error(
"%s: Failed to uncompress gzip data from url(%s): %s",
self.name,
self.url,
gzip_error.strerror,
)
return None
def set_timeout(self, connection_timeout: float):
"""Set the connection timeout.
def _decode_stream(self, strm):
for encoding in "utf-8-sig", "utf-8", "utf-16":
try:
return strm.decode(encoding)
except UnicodeDecodeError:
continue
return None
:param connection_timeout: The timeout value in seconds.
:type connection_timeout: float
"""
self.connection_timeout = connection_timeout
def _download_data(self):
def _decode_data(self, data):
return data.replace("\0", "")
async def _download_data(self, url): # noqa: C901
"""Download the calendar data."""
self.logger.debug("%s: _download_data start", self.name)
try:
with CalendarData.opener_lock:
if self._opener is not None:
install_opener(self._opener)
with urlopen(self._make_url()) as conn:
self._calendar_data = self._decode_data(conn)
except HTTPError as http_error:
response = await self._httpx.get(
url,
auth=self._auth,
headers=self._headers,
follow_redirects=True,
timeout=self.connection_timeout,
)
if response.status_code >= 400:
raise httpx.HTTPStatusError(
"status error", request=None, response=response
)
self._calendar_data = self._decode_data(response.text)
self.logger.debug("%s: _download_data done", self.name)
except httpx.HTTPStatusError as http_status_error:
self.logger.error(
"%s: Failed to open url(%s): %s",
self.name,
self.url,
http_error.reason,
http_status_error.response.status_code,
)
except ContentTooShortError as content_too_short_error:
except httpx.TimeoutException:
self.logger.error(
"%s: Could not download calendar data: %s",
self.name,
content_too_short_error.reason,
"%s: Timeout opening url: %s", self.name, self.url
)
except URLError as url_error:
except httpx.DecodingError:
self.logger.error(
"%s: Failed to open url: %s", self.name, url_error.reason
"%s: Error decoding data from url: %s", self.name, self.url
)
except httpx.InvalidURL:
self.logger.error("%s: Invalid URL: %s", self.name, self.url)
except httpx.HTTPError:
self.logger.error(
"%s: Error decoding data from url: %s", self.name, self.url
)
except: # pylint: disable=W0702
self.logger.error(
@@ -192,7 +180,45 @@ class CalendarData:
)
def _make_url(self):
"""Replace templates in url and encode."""
now = hanow()
return self.url.replace("{year}", f"{now.year:04}").replace(
"{month}", f"{now.month:02}"
year: int = now.year
month: int = now.month
url = self.url
(month, year, url) = self._get_month_year(url, month, year)
return url.replace("{year}", f"{year:04}").replace(
"{month}", f"{month:02}"
)
def _get_year_as_months(self, url: str, month: int) -> int:
year_match = re.search("\\{year([-+])([0-9]+)\\}", url)
if year_match:
if year_match.group(1) == "-":
month = month - (int(year_match.group(2)) * 12)
else:
month = month + (int(year_match.group(2)) * 12)
url = url.replace(year_match.group(0), "{year}")
return (month, url)
def _get_month_year(self, url: str, month: int, year: int) -> int:
(month, url) = self._get_year_as_months(url, month)
print(f"month: {month}\n")
month_match = re.search("\\{month([-+])([0-9]+)\\}", url)
if month_match:
if month_match.group(1) == "-":
month = month - int(month_match.group(2))
else:
month = month + int(month_match.group(2))
if month < 1:
year -= floor(abs(month) / 12) + 1
month = month % 12
if month == 0:
month = 12
elif month > 12:
year += abs(floor(month / 12))
month = month % 12
if month == 0:
month = 12
year -= 1
url = url.replace(month_match.group(0), "{month}")
return (month, year, url)