add robots.txt
This commit is contained in:
parent
b4e01e129d
commit
563630547a
|
@ -115,7 +115,7 @@ def worker(backend_url):
|
||||||
if not selected_model:
|
if not selected_model:
|
||||||
selected_model = backend_info['model']
|
selected_model = backend_info['model']
|
||||||
|
|
||||||
logger.debug(f"Starting using {backend_url} and {selected_model}. Online: {backend_info['online']}")
|
logger.debug(f"Starting using {backend_url} and {selected_model}. Online: {backend_info['online']}. Streaming: {do_stream}")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
stream_redis.delete(get_stream_name(worker_id)) # clean up any old streams
|
stream_redis.delete(get_stream_name(worker_id)) # clean up any old streams
|
||||||
|
|
13
server.py
13
server.py
|
@ -10,7 +10,7 @@ import sys
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
import simplejson as json
|
import simplejson as json
|
||||||
from flask import Flask, jsonify, render_template, request
|
from flask import Flask, jsonify, render_template, request, Response
|
||||||
|
|
||||||
import config
|
import config
|
||||||
from llm_server import opts
|
from llm_server import opts
|
||||||
|
@ -168,6 +168,17 @@ def home():
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@app.route('/robots.txt')
|
||||||
|
def robots():
|
||||||
|
# TODO: have config value to deny all
|
||||||
|
# TODO: https://developers.google.com/search/docs/crawling-indexing/robots/create-robots-txt
|
||||||
|
t = """User-agent: *
|
||||||
|
Allow: /"""
|
||||||
|
r = Response(t)
|
||||||
|
r.headers['Content-Type'] = 'text/plain'
|
||||||
|
return r
|
||||||
|
|
||||||
|
|
||||||
@app.route('/<first>')
|
@app.route('/<first>')
|
||||||
@app.route('/<first>/<path:rest>')
|
@app.route('/<first>/<path:rest>')
|
||||||
def fallback(first=None, rest=None):
|
def fallback(first=None, rest=None):
|
||||||
|
|
Reference in New Issue