1
0
mirror of https://github.com/esphome/esphome.git synced 2025-11-13 13:25:50 +00:00
This commit is contained in:
J. Nick Koston
2025-10-19 14:54:31 -10:00
parent 20c65f70ed
commit db42983f0c

View File

@@ -43,15 +43,31 @@ def patch_structhash():
def patch_file_downloader(): def patch_file_downloader():
"""Patch PlatformIO's FileDownloader to add caching and retry on PackageException errors.""" """Patch PlatformIO's FileDownloader to add caching and retry on PackageException errors.
This function attempts to patch PlatformIO's internal download mechanism.
If patching fails (due to API changes), it gracefully falls back to no caching.
"""
try:
from platformio.package.download import FileDownloader from platformio.package.download import FileDownloader
from platformio.package.exception import PackageException from platformio.package.exception import PackageException
except ImportError as e:
_LOGGER.debug("Could not import PlatformIO modules for patching: %s", e)
return
# Import our cache module # Import our cache module
from esphome.github_cache import GitHubCache from esphome.github_cache import GitHubCache
_LOGGER.debug("Applying GitHub download cache patch...") _LOGGER.debug("Applying GitHub download cache patch...")
# Verify the classes have the expected methods before patching
if not hasattr(FileDownloader, "__init__") or not hasattr(FileDownloader, "start"):
_LOGGER.warning(
"PlatformIO FileDownloader API has changed, skipping cache patch"
)
return
try:
original_init = FileDownloader.__init__ original_init = FileDownloader.__init__
original_start = FileDownloader.start original_start = FileDownloader.start
@@ -62,9 +78,13 @@ def patch_file_downloader():
cache = GitHubCache(cache_dir=cache_dir) cache = GitHubCache(cache_dir=cache_dir)
if not cache_dir_existed: if not cache_dir_existed:
_LOGGER.info("Created GitHub download cache at: %s", cache.cache_dir) _LOGGER.info("Created GitHub download cache at: %s", cache.cache_dir)
except Exception as e:
_LOGGER.warning("Failed to initialize GitHub download cache: %s", e)
return
def patched_init(self, *args, **kwargs): def patched_init(self, *args, **kwargs):
"""Patched init that checks cache before making HTTP connection.""" """Patched init that checks cache before making HTTP connection."""
try:
# Extract URL from args (first positional argument) # Extract URL from args (first positional argument)
url = args[0] if args else kwargs.get("url") url = args[0] if args else kwargs.get("url")
dest_dir = args[1] if len(args) > 1 else kwargs.get("dest_dir") dest_dir = args[1] if len(args) > 1 else kwargs.get("dest_dir")
@@ -88,7 +108,9 @@ def patch_file_downloader():
"[GitHub Cache] Found in cache: %s", self._esphome_use_cache "[GitHub Cache] Found in cache: %s", self._esphome_use_cache
) )
else: else:
_LOGGER.debug("[GitHub Cache] Not in cache, will download and cache") _LOGGER.debug(
"[GitHub Cache] Not in cache, will download and cache"
)
else: else:
self._esphome_use_cache = None self._esphome_use_cache = None
if url and str(url).startswith("http"): if url and str(url).startswith("http"):
@@ -98,7 +120,12 @@ def patch_file_downloader():
if self._esphome_use_cache: if self._esphome_use_cache:
# Skip HTTP connection, we'll handle this in start() # Skip HTTP connection, we'll handle this in start()
# Set minimal attributes to satisfy FileDownloader # Set minimal attributes to satisfy FileDownloader
self._http_session = None # Create a mock session that can be safely closed in __del__
class MockSession:
def close(self):
pass
self._http_session = MockSession()
self._http_response = None self._http_response = None
self._fname = Path(url.split("?")[0]).name self._fname = Path(url.split("?")[0]).name
self._destination = self._fname self._destination = self._fname
@@ -126,9 +153,16 @@ def patch_file_downloader():
# Final attempt - re-raise # Final attempt - re-raise
raise raise
return None return None
except Exception as e:
# If anything goes wrong in our cache logic, fall back to normal download
_LOGGER.debug("Cache check failed, falling back to normal download: %s", e)
self._esphome_cache_url = None
self._esphome_use_cache = None
return original_init(self, *args, **kwargs)
def patched_start(self, *args, **kwargs): def patched_start(self, *args, **kwargs):
"""Patched start that uses cache when available.""" """Patched start that uses cache when available."""
try:
import shutil import shutil
# Get the cache URL and path that were set in __init__ # Get the cache URL and path that were set in __init__
@@ -140,7 +174,8 @@ def patch_file_downloader():
try: try:
shutil.copy2(cached_file, self._destination) shutil.copy2(cached_file, self._destination)
_LOGGER.info( _LOGGER.info(
"Restored %s from cache (avoided download)", Path(cached_file).name "Restored %s from cache (avoided download)",
Path(cached_file).name,
) )
return True return True
except OSError as e: except OSError as e:
@@ -158,9 +193,18 @@ def patch_file_downloader():
_LOGGER.debug("Failed to save to cache: %s", e) _LOGGER.debug("Failed to save to cache: %s", e)
return result return result
except Exception as e:
# If anything goes wrong, fall back to normal download
_LOGGER.debug("Cache restoration failed, using normal download: %s", e)
return original_start(self, *args, **kwargs)
# Apply the patches
try:
FileDownloader.__init__ = patched_init FileDownloader.__init__ = patched_init
FileDownloader.start = patched_start FileDownloader.start = patched_start
_LOGGER.debug("GitHub download cache patch applied successfully")
except Exception as e:
_LOGGER.warning("Failed to apply GitHub download cache patch: %s", e)
IGNORE_LIB_WARNINGS = f"(?:{'|'.join(['Hash', 'Update'])})" IGNORE_LIB_WARNINGS = f"(?:{'|'.join(['Hash', 'Update'])})"