Fix duplicate SEO route registration
Remove sitemap.xml and robots.txt route registrations from app.py since they're already defined in routes.py. This was causing an AssertionError on startup. 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
parent
4318b6cec5
commit
8c7c22b958
|
|
@ -12,7 +12,6 @@ from smarttools.registry import app as registry_app
|
||||||
from . import web_bp
|
from . import web_bp
|
||||||
from .auth import login, register, logout
|
from .auth import login, register, logout
|
||||||
from .filters import register_filters
|
from .filters import register_filters
|
||||||
from .seo import sitemap_response, robots_txt
|
|
||||||
from .sessions import SQLiteSessionInterface, cleanup_expired_sessions
|
from .sessions import SQLiteSessionInterface, cleanup_expired_sessions
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -46,10 +45,6 @@ def create_web_app() -> Flask:
|
||||||
|
|
||||||
cleanup_expired_sessions()
|
cleanup_expired_sessions()
|
||||||
|
|
||||||
# SEO routes
|
|
||||||
app.add_url_rule("/sitemap.xml", endpoint="web.sitemap", view_func=sitemap_response)
|
|
||||||
app.add_url_rule("/robots.txt", endpoint="web.robots", view_func=robots_txt)
|
|
||||||
|
|
||||||
# Error handlers
|
# Error handlers
|
||||||
@app.errorhandler(404)
|
@app.errorhandler(404)
|
||||||
def not_found_error(error):
|
def not_found_error(error):
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue