Container names, FastAPI titles, email subjects, filenames, brand text, session cookie, User-Agent, docstrings, README. Volume lazyflat_data and /data/lazyflat.sqlite already used the new name, so on-disk data is preserved; dropped the now-obsolete legacy-rename comments. Side effect: SESSION_COOKIE_NAME change logs everyone out on deploy. Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com>
102 lines
3.2 KiB
Python
102 lines
3.2 KiB
Python
import logging
|
|
import time
|
|
from rich.console import Console
|
|
from rich.logging import RichHandler
|
|
|
|
from flat import Flat
|
|
from scraper import Scraper
|
|
from settings import TIME_INTERVALL
|
|
from utils import hash_any_object
|
|
from web_client import WebClient
|
|
|
|
|
|
def setup_logging():
|
|
logging.basicConfig(
|
|
level=logging.INFO,
|
|
format="%(message)s",
|
|
datefmt="[%X]",
|
|
handlers=[RichHandler(markup=True, console=Console(width=110))],
|
|
)
|
|
logging.getLogger("googlemaps").setLevel(logging.WARNING)
|
|
logging.getLogger("urllib3").setLevel(logging.WARNING)
|
|
|
|
|
|
logger = logging.getLogger("alert")
|
|
setup_logging()
|
|
|
|
|
|
class FlatAlerter:
|
|
def __init__(self):
|
|
self.web = WebClient()
|
|
self.last_response_hash = ""
|
|
|
|
def _flat_payload(self, flat: Flat) -> dict:
|
|
lat, lng = flat.coords
|
|
return {
|
|
"id": flat.id,
|
|
"link": flat.link,
|
|
"address": flat.address,
|
|
"rooms": flat.rooms,
|
|
"size": flat.size,
|
|
"cold_rent": flat.cold_rent,
|
|
"utilities": flat.utilities,
|
|
"total_rent": flat.total_rent,
|
|
"sqm_price": flat.sqm_price,
|
|
"available_from": flat.available_from,
|
|
"published_on": flat.published_on,
|
|
"wbs": flat.wbs,
|
|
"floor": flat.floor,
|
|
"bathrooms": flat.bathrooms,
|
|
"year_built": flat.year_built,
|
|
"heating": flat.heating,
|
|
"energy_carrier": flat.energy_carrier,
|
|
"energy_value": flat.energy_value,
|
|
"energy_certificate": flat.energy_certificate,
|
|
"address_link_gmaps": flat.address_link_gmaps,
|
|
"lat": lat,
|
|
"lng": lng,
|
|
"raw_data": flat.raw_data,
|
|
}
|
|
|
|
def scan(self):
|
|
logger.info("starting scan")
|
|
# Pull fresh creds from web each scan so admin edits take effect
|
|
# without a redeploy.
|
|
secrets = self.web.fetch_secrets()
|
|
scraper = Scraper(
|
|
username=secrets.get("BERLIN_WOHNEN_USERNAME", ""),
|
|
password=secrets.get("BERLIN_WOHNEN_PASSWORD", ""),
|
|
)
|
|
if not scraper.login():
|
|
return
|
|
flats_data = scraper.get_flats()
|
|
|
|
response_hashed = hash_any_object(flats_data)
|
|
if response_hashed == self.last_response_hash:
|
|
logger.info("no change since last scan")
|
|
return
|
|
self.last_response_hash = response_hashed
|
|
|
|
for number, data in enumerate(flats_data, 1):
|
|
flat = Flat(data)
|
|
logger.info(f"{str(number).rjust(2)}: submitting {flat}")
|
|
payload = self._flat_payload(flat)
|
|
if not self.web.submit_flat(payload):
|
|
logger.warning(f"\tcould not submit {flat.id} to web, will retry next loop")
|
|
logger.info("scan finished")
|
|
|
|
|
|
if __name__ == "__main__":
|
|
logger.info("starting lazyflat alert service")
|
|
alerter = FlatAlerter()
|
|
while True:
|
|
try:
|
|
alerter.scan()
|
|
alerter.web.heartbeat()
|
|
logger.info(f"sleeping for {TIME_INTERVALL} seconds")
|
|
time.sleep(TIME_INTERVALL)
|
|
except KeyboardInterrupt:
|
|
break
|
|
except Exception:
|
|
logger.exception("unexpected error")
|
|
time.sleep(60)
|