secrets tab, drop commute filter, favicon, robust error reports
1. Admin → Geheimnisse sub-tab lets you edit ANTHROPIC_API_KEY + BERLIN_WOHNEN_USERNAME/PASSWORD at runtime. Migration v7 adds a secrets(key,value,updated_at) table; startup seeds missing keys from env (idempotent). web reads secrets DB-first (env fallback) via llm._api_key(); alert fetches them from web /internal/secrets on each scan, passes them into Scraper(). Rotating creds no longer needs a redeploy. Masked display: 6 leading + 4 trailing chars, "…" in the middle. Blank form fields leave the stored value untouched. 2. Drop the max_morning_commute filter from UI + server + FILTER_KEYS + filter summary (the underlying Maps.calculate_score code stays for potential future re-enable). 3. /static/didi.webp wired as favicon via <link rel="icon"> in base.html. 4. apply.open_page wraps page.goto in try/except so a failed load still produces a "goto.failed" step + screenshot instead of returning an empty forensics blob. networkidle + post-submission sleep are also made best-effort. The error ZIP export already writes screenshot+HTML per step and final_html — with this change every apply run leaves a reconstructable trail even when the listing is already offline. Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com>
This commit is contained in:
parent
9fbe1ce728
commit
3bb04210c4
12 changed files with 211 additions and 27 deletions
|
|
@ -71,7 +71,13 @@ class FlatAlerter:
|
|||
|
||||
def scan(self):
|
||||
logger.info("starting scan")
|
||||
scraper = Scraper()
|
||||
# Pull fresh creds from web each scan so admin edits take effect
|
||||
# without a redeploy.
|
||||
secrets = self.web.fetch_secrets()
|
||||
scraper = Scraper(
|
||||
username=secrets.get("BERLIN_WOHNEN_USERNAME", ""),
|
||||
password=secrets.get("BERLIN_WOHNEN_PASSWORD", ""),
|
||||
)
|
||||
if not scraper.login():
|
||||
return
|
||||
flats_data = scraper.get_flats()
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@ import requests
|
|||
import re
|
||||
import logging
|
||||
from bs4 import BeautifulSoup
|
||||
from settings import BERLIN_WOHNEN_USERNAME, BERLIN_WOHNEN_PASSWORD
|
||||
from settings import BERLIN_WOHNEN_USERNAME, BERLIN_WOHNEN_PASSWORD # env fallback
|
||||
|
||||
logger = logging.getLogger("flat-alert")
|
||||
|
||||
|
|
@ -19,11 +19,16 @@ class Scraper:
|
|||
'Upgrade-Insecure-Requests': '1',
|
||||
}
|
||||
|
||||
def __init__(self):
|
||||
def __init__(self, username: str = "", password: str = ""):
|
||||
self.username = username or BERLIN_WOHNEN_USERNAME
|
||||
self.password = password or BERLIN_WOHNEN_PASSWORD
|
||||
self.session = requests.Session()
|
||||
self.session.headers.update(self.HEADERS)
|
||||
|
||||
def login(self):
|
||||
if not self.username or not self.password:
|
||||
logger.critical("BERLIN_WOHNEN credentials missing — nothing to log in with")
|
||||
return False
|
||||
logger.info("fetching inberlinwohnen.de login page")
|
||||
resp_login_page = self.session.get(self.URL_LOGIN, timeout=30)
|
||||
token_search = re.search(r'name="csrf-token" content="([^"]+)"', resp_login_page.text)
|
||||
|
|
@ -34,8 +39,8 @@ class Scraper:
|
|||
|
||||
payload_login = {
|
||||
'_token': csrf_token,
|
||||
'email': BERLIN_WOHNEN_USERNAME,
|
||||
'password': BERLIN_WOHNEN_PASSWORD,
|
||||
'email': self.username,
|
||||
'password': self.password,
|
||||
'remember': 'on'
|
||||
}
|
||||
headers_login = self.HEADERS.copy()
|
||||
|
|
|
|||
|
|
@ -20,7 +20,9 @@ TIME_INTERVALL: int = int(getenv("SLEEP_INTERVALL", "60"))
|
|||
WEB_URL: str = getenv("WEB_URL", "http://web:8000")
|
||||
INTERNAL_API_KEY: str = _required("INTERNAL_API_KEY")
|
||||
|
||||
# secrets
|
||||
# secrets — BERLIN_WOHNEN_* env acts as bootstrap only; the web service
|
||||
# owns the live creds in its DB (admin UI), alert fetches them on each
|
||||
# scan via /internal/secrets. GMAPS_API_KEY is still env-only.
|
||||
GMAPS_API_KEY: str = _required("GMAPS_API_KEY")
|
||||
BERLIN_WOHNEN_USERNAME: str = _required("BERLIN_WOHNEN_USERNAME")
|
||||
BERLIN_WOHNEN_PASSWORD: str = _required("BERLIN_WOHNEN_PASSWORD")
|
||||
BERLIN_WOHNEN_USERNAME: str = getenv("BERLIN_WOHNEN_USERNAME", "")
|
||||
BERLIN_WOHNEN_PASSWORD: str = getenv("BERLIN_WOHNEN_PASSWORD", "")
|
||||
|
|
|
|||
|
|
@ -37,3 +37,18 @@ class WebClient:
|
|||
)
|
||||
except requests.RequestException:
|
||||
pass
|
||||
|
||||
def fetch_secrets(self) -> dict:
|
||||
"""Pull the current runtime secrets dict from web. Empty on failure
|
||||
— caller falls back to env values."""
|
||||
try:
|
||||
r = requests.get(
|
||||
f"{self.base_url}/internal/secrets",
|
||||
headers=self.headers,
|
||||
timeout=5,
|
||||
)
|
||||
if r.ok:
|
||||
return r.json() or {}
|
||||
except requests.RequestException as e:
|
||||
logger.warning(f"secrets fetch failed: {e}")
|
||||
return {}
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue