mirror of
https://git.nolog.cz/NoLog.cz/headline.git
synced 2025-01-31 11:53:35 +01:00
create robots.txt to stop crawlers
This commit is contained in:
parent
be7b62ee00
commit
e61fc639db
2 changed files with 10 additions and 2 deletions
10
view/app.py
10
view/app.py
|
@ -1,8 +1,7 @@
|
|||
#!/usr/bin/python3
|
||||
|
||||
import sqlite3
|
||||
from flask import Flask, request, render_template
|
||||
from flask import g
|
||||
from flask import Flask, request, render_template, g, send_from_directory
|
||||
from flask_paginate import Pagination, get_page_parameter
|
||||
import confuse
|
||||
|
||||
|
@ -75,5 +74,12 @@ def feed_list():
|
|||
}
|
||||
feeds.append(feed)
|
||||
return render_template('feeds.html', feeds=feeds)
|
||||
|
||||
|
||||
|
||||
@app.route('/robots.txt')
|
||||
def static_from_root():
|
||||
return send_from_directory(app.static_folder, request.path[1:])
|
||||
|
||||
if __name__ == "__main__":
|
||||
app.run(host="0.0.0.0")
|
2
view/static/robots.txt
Normal file
2
view/static/robots.txt
Normal file
|
@ -0,0 +1,2 @@
|
|||
User-agent: *
|
||||
Disallow: /
|
Loading…
Reference in a new issue