create robots.txt to stop crawlers

This commit is contained in:
mdivecky 2022-08-27 13:20:44 +02:00
parent be7b62ee00
commit e61fc639db
2 changed files with 10 additions and 2 deletions

View file

@ -1,8 +1,7 @@
#!/usr/bin/python3 #!/usr/bin/python3
import sqlite3 import sqlite3
from flask import Flask, request, render_template from flask import Flask, request, render_template, g, send_from_directory
from flask import g
from flask_paginate import Pagination, get_page_parameter from flask_paginate import Pagination, get_page_parameter
import confuse import confuse
@ -75,5 +74,12 @@ def feed_list():
} }
feeds.append(feed) feeds.append(feed)
return render_template('feeds.html', feeds=feeds) return render_template('feeds.html', feeds=feeds)
@app.route('/robots.txt')
def static_from_root():
return send_from_directory(app.static_folder, request.path[1:])
if __name__ == "__main__": if __name__ == "__main__":
app.run(host="0.0.0.0") app.run(host="0.0.0.0")

2
view/static/robots.txt Normal file
View file

@ -0,0 +1,2 @@
User-agent: *
Disallow: /