# This file is part of Wolnelektury, licensed under GNU Affero GPLv3 or later.
# Copyright © Fundacja Nowoczesna Polska. See NOTICE for more information.
#
+import codecs
+import csv
+import cStringIO
import json
import os
from functools import wraps
import pytz
from inspect import getargspec
+import re
+from django.core.mail import send_mail
from django.http import HttpResponse
from django.template import RequestContext
from django.template.loader import render_to_string
from django.utils import timezone
from django.conf import settings
+from django.utils.translation import ugettext
tz = pytz.timezone(settings.TIME_ZONE)
return ajax_view
return decorator
+
+
+def send_noreply_mail(subject, message, recipient_list, **kwargs):
+ send_mail(
+ u'[WolneLektury] ' + subject,
+ message + u"\n\n-- \n" + ugettext(u'Message sent automatically. Please do not reply.'),
+ 'no-reply@wolnelektury.pl', recipient_list, **kwargs)
+
+
+# source: https://docs.python.org/2/library/csv.html#examples
+class UnicodeCSVWriter(object):
+ """
+ A CSV writer which will write rows to CSV file "f",
+ which is encoded in the given encoding.
+ """
+
+ def __init__(self, f, dialect=csv.excel, encoding="utf-8", **kwds):
+ # Redirect output to a queue
+ self.queue = cStringIO.StringIO()
+ self.writer = csv.writer(self.queue, dialect=dialect, **kwds)
+ self.stream = f
+ self.encoder = codecs.getincrementalencoder(encoding)()
+
+ def writerow(self, row):
+ self.writer.writerow([s.encode("utf-8") for s in row])
+ # Fetch UTF-8 output from the queue ...
+ data = self.queue.getvalue()
+ data = data.decode("utf-8")
+ # ... and reencode it into the target encoding
+ data = self.encoder.encode(data)
+ # write to the target stream
+ self.stream.write(data)
+ # empty queue
+ self.queue.truncate(0)
+
+ def writerows(self, rows):
+ for row in rows:
+ self.writerow(row)
+
+
+# the original re.escape messes with unicode
+def re_escape(s):
+ return re.sub(r"[(){}\[\].*?|^$\\+-]", r"\\\g<0>", s)
+
+
+BOT_BITS = ['bot', 'slurp', 'spider', 'facebook', 'crawler', 'parser', 'http']
+
+
+def is_crawler(request):
+ user_agent = request.META.get('HTTP_USER_AGENT')
+ if not user_agent:
+ return True
+ user_agent = user_agent.lower()
+ return any(bot_bit in user_agent for bot_bit in BOT_BITS)