Commit 7eeb477a authored by BAIRE Anthony's avatar BAIRE Anthony
Browse files

prevent web crawlers from seeing private apps

parent 4ce034d6
......@@ -9,7 +9,7 @@ RUN apt-getq update && apt-getq install \
nginx-light zip gcc python3-dev python3-pip python3-wheel python3-mysqldb \
python-mysqldb python3-crypto gunicorn3 python3-redis python-mysqldb \
python3-crypto python3-natsort python3-aiohttp python3-aioredis supervisor \
python3-ipy python3-django-taggit python3-iso8601
python3-ipy python3-django-taggit python3-iso8601 python3-robot-detection
COPY requirements.txt /tmp/
RUN cd /tmp && pip3 install -r requirements.txt && rm requirements.txt
......@@ -24,6 +24,7 @@ import zipfile
import iso8601
import natsort
import requests
import robot_detection
from django.conf import settings
from django.contrib import messages
from django.contrib.auth.forms import PasswordChangeForm
......@@ -1035,6 +1036,16 @@ class JobCreate(SuccessMessageMixin, CreateView):
webapp = Webapp.objects.get(docker_name=self.kwargs['docker_name'])
kwargs['webapp'] = webapp
# Private apps should not be indexed by search engines
# -> return 404 if we detect a robot
# (but we let authenticated user through, so that the app is still
# usable if robot_detection gets broken)
if (webapp.private and self.request.user.is_anonymous() and
self.request.META.get("HTTP_USER_AGENT") or " ")):
raise Http404
# Check if a readme is declared in the database
if webapp.readme:
readme_file = os.path.join(
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment