# robots.txt for Trackility # Allow all crawlers by default User-agent: * # Disallow private/authenticated pages Disallow: /dashboard Disallow: /login Disallow: /logout Disallow: /register Disallow: /settings # Disallow data entry/modification pages Disallow: /add_charge_rule Disallow: /add_counter Disallow: /add_index Disallow: /add_invoice Disallow: /add_location Disallow: /add_renter_payment Disallow: /edit_charge_rule Disallow: /edit_location Disallow: /delete_location # Disallow data viewing pages (likely require authentication) Disallow: /charge_rules Disallow: /counters Disallow: /invoices Disallow: /locations Disallow: /renter_payments Disallow: /renter_summary Disallow: /stats # Disallow API endpoints Disallow: /backend Disallow: /get_counters_for_location Disallow: /get_recent_readings # Disallow error pages Disallow: /404 # Allow public informational pages (welcome, how, privacy, terms, index) # These are allowed by default since they're not explicitly disallowed # Crawl-delay (optional - uncomment if needed) # Crawl-delay: 1