Per review §1 — verified no callers before each deletion:
- _next_scrape_utc (context dict key never read by any template)
- ALERT_SCRAPE_INTERVAL_SECONDS settings import (only _next_scrape_utc read it)
- alert/paths.py (imported by nothing)
- alert/settings.py LANGUAGE (alert doesn't use translations.toml)
- alert/main.py: the vestigial `c = {}` connectivity dict, the comment
about re-enabling it, and the entire connectivity block in
_flat_payload — the web-side columns stay NULL on insert now
- alert/maps.py: DESTINATIONS, calculate_score, _get_next_weekday,
_calculate_transfers (only geocode is used in the scraper)
- alert/flat.py: connectivity + display_address properties,
_connectivity field, unused datetime import
- apply/utils.py str_to_preview (no callers) — file removed
- web/matching.py: max_morning_commute + commute check
- web/app.py: don't pass connectivity dict into flat_matches_filter,
don't write email_address through update_notifications
- web/db.py: get_error (no callers); drop kill_switch,
max_morning_commute, email_address from their allowed-sets so they're
not writable through update_* anymore
- web/settings.py + docker-compose.yml: SMTP_HOST/PORT/USERNAME/PASSWORD/
FROM/STARTTLS (notifications.py is telegram-only now)
DB columns themselves (kill_switch, email_address, max_morning_commute,
connectivity_morning_time, connectivity_night_time) stay in the schema
— SQLite can't drop them cheaply and they're harmless.
Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com>
60 lines
2.4 KiB
Python
60 lines
2.4 KiB
Python
import re
|
|
from urllib.parse import quote
|
|
from rich.markup import escape
|
|
|
|
import maps
|
|
|
|
|
|
class Flat:
|
|
def __init__(self, data):
|
|
self.link = data.get('link', '')
|
|
self.address = data.get('Adresse', '')
|
|
self.rooms = self._parse_german_float(data.get('Zimmeranzahl', '0'))
|
|
self.size = self._parse_german_float(data.get('Wohnfläche', '0'))
|
|
self.cold_rent = self._parse_german_float(data.get('Kaltmiete', '0'))
|
|
self.utilities = self._parse_german_float(data.get('Nebenkosten', '0'))
|
|
self.total_rent = self._parse_german_float(data.get('Gesamtmiete', '0'))
|
|
self.available_from = data.get('Bezugsfertig ab', '')
|
|
self.published_on = data.get('Eingestellt am', '')
|
|
self.wbs = data.get('WBS', '')
|
|
self.floor = data.get('Etage', '')
|
|
self.bathrooms = data.get('Badezimmer', '')
|
|
self.year_built = data.get('Baujahr', '')
|
|
self.heating = data.get('Heizung', '')
|
|
self.energy_carrier = data.get('Hauptenergieträger', '')
|
|
self.energy_value = data.get('Energieverbrauchskennwert', '')
|
|
self.energy_certificate = data.get('Energieausweis', '')
|
|
self.raw_data = data
|
|
self.id = self.link # we could use data.get('id', None) but link is easier to debug
|
|
self.gmaps = maps.Maps()
|
|
self._coords = None
|
|
self.address_link_gmaps = f"https://www.google.com/maps/search/?api=1&query={quote(self.address)}"
|
|
|
|
def __str__(self):
|
|
# URL encode the link to ensure it doesn't contain characters that break markup
|
|
# We preserve characters that are standard in URLs but encode problematic ones like brackets and spaces
|
|
safe_chars = ":/?#@!$&'()*+,;=%-"
|
|
escaped_link = quote(self.link, safe=safe_chars)
|
|
return f"[link={escaped_link}]{escape(self.address)}[/link]"
|
|
|
|
def _parse_german_float(self, text):
|
|
if not text:
|
|
return 0.0
|
|
clean_text = re.sub(r'[^\d,.]', '', text)
|
|
clean_text = clean_text.replace('.', '').replace(',', '.')
|
|
try:
|
|
return float(clean_text)
|
|
except ValueError:
|
|
return 0.0
|
|
|
|
@property
|
|
def sqm_price(self):
|
|
if self.size > 0:
|
|
return self.total_rent / self.size
|
|
return 0.0
|
|
|
|
@property
|
|
def coords(self):
|
|
if self._coords is None:
|
|
self._coords = self.gmaps.geocode(self.address) or (None, None)
|
|
return self._coords
|