import os
+import logging
+logger = logging.getLogger("fnp.wiki")
from django.conf import settings
+
from django.views.generic.simple import direct_to_template
-from django.http import HttpResponse, Http404
-from django.utils import simplejson as json
+from django.views.decorators.http import require_POST, require_GET
+from django.core.urlresolvers import reverse
+from wiki.helpers import (JSONResponse, JSONFormInvalid, JSONServerError,
+ ajax_require_permission, recursive_groupby)
+from django import http
+from django.shortcuts import get_object_or_404, redirect
+from django.http import Http404
-from wiki.models import storage, Document, DocumentNotFound
-from wiki.forms import DocumentForm
+from wiki.models import Book, Chunk, Theme
+from wiki.forms import DocumentTextSaveForm, DocumentTextRevertForm, DocumentTagForm, DocumentCreateForm, DocumentsUploadForm
from datetime import datetime
+from django.utils.encoding import smart_unicode
+from django.utils.translation import ugettext_lazy as _
+from django.utils.decorators import decorator_from_middleware
+from django.middleware.gzip import GZipMiddleware
+
+
+#
+# Quick hack around caching problems, TODO: use ETags
+#
+from django.views.decorators.cache import never_cache
-# import google_diff
-# import difflib
import nice_diff
import operator
MAX_LAST_DOCS = 10
-class DateTimeEncoder(json.JSONEncoder):
- def default(self, obj):
- if isinstance(obj, datetime):
- return datetime.ctime(obj) + " " + (datetime.tzname(obj) or 'GMT')
- return json.JSONEncoder.default(self, obj)
-def document_list(request, template_name = 'wiki/document_list.html'):
- # TODO: find a way to cache "Storage All"
- return direct_to_template(request, template_name, extra_context = {
- 'document_list': storage.all(),
- 'last_docs': sorted(request.session.get("wiki_last_docs", {}).items(),
- key=operator.itemgetter(1), reverse = True)
- })
+@never_cache
+def document_list(request):
+ return direct_to_template(request, 'wiki/document_list.html', extra_context={
+ 'books': Book.objects.all(),
+ 'last_books': sorted(request.session.get("wiki_last_books", {}).items(),
+ key=lambda x: x[1]['time'], reverse=True),
+ })
-def document_detail(request, name, template_name = 'wiki/document_details.html'):
+@never_cache
+def editor(request, slug, chunk=None, template_name='wiki/document_details.html'):
try:
- document = storage.get(name)
- except DocumentNotFound:
+ chunk = Chunk.get(slug, chunk)
+ except Chunk.MultipleObjectsReturned:
+ # TODO: choice page
raise Http404
-
+ except Chunk.DoesNotExist:
+ if chunk is None:
+ try:
+ book = Book.objects.get(slug=slug)
+ except Book.DoesNotExist:
+ return http.HttpResponseRedirect(reverse("wiki_create_missing", args=[slug]))
+ else:
+ raise Http404
+
access_time = datetime.now()
- last_documents = request.session.get("wiki_last_docs", {})
- last_documents[name] = access_time
-
- if len(last_documents) > MAX_LAST_DOCS:
- oldest_key = min(last_documents, key = last_documents.__getitem__)
- del last_documents[oldest_key]
- request.session['wiki_last_docs'] = last_documents
-
+ last_books = request.session.get("wiki_last_books", {})
+ last_books[slug, chunk.slug] = {
+ 'time': access_time,
+ 'title': chunk.pretty_name(),
+ }
+
+ if len(last_books) > MAX_LAST_DOCS:
+ oldest_key = min(last_books, key=lambda x: last_books[x]['time'])
+ del last_books[oldest_key]
+ request.session['wiki_last_books'] = last_books
+
+ return direct_to_template(request, template_name, extra_context={
+ 'chunk': chunk,
+ 'forms': {
+ "text_save": DocumentTextSaveForm(prefix="textsave"),
+ "text_revert": DocumentTextRevertForm(prefix="textrevert"),
+ "add_tag": DocumentTagForm(prefix="addtag"),
+ },
+ 'REDMINE_URL': settings.REDMINE_URL,
+ })
+
+
+@require_GET
+def editor_readonly(request, slug, chunk=None, template_name='wiki/document_details_readonly.html'):
+ try:
+ chunk = Chunk.get(slug, chunk)
+ revision = request.GET['revision']
+ except (Chunk.MultipleObjectsReturned, Chunk.DoesNotExist, KeyError):
+ raise Http404
+
+ access_time = datetime.now()
+ last_books = request.session.get("wiki_last_books", {})
+ last_books[slug, chunk.slug] = {
+ 'time': access_time,
+ 'title': chunk.book.title,
+ }
+
+ if len(last_books) > MAX_LAST_DOCS:
+ oldest_key = min(last_books, key=lambda x: last_books[x]['time'])
+ del last_books[oldest_key]
+ request.session['wiki_last_books'] = last_books
+
+ return direct_to_template(request, template_name, extra_context={
+ 'chunk': chunk,
+ 'revision': revision,
+ 'readonly': True,
+ 'REDMINE_URL': settings.REDMINE_URL,
+ })
+
+
+def create_missing(request, slug):
+ slug = slug.replace(' ', '-')
+
+ if request.method == "POST":
+ form = DocumentCreateForm(request.POST, request.FILES)
+ if form.is_valid():
+
+ if request.user.is_authenticated():
+ creator = request.user
+ else:
+ creator = None
+ book = Book.create(creator=creator,
+ slug=form.cleaned_data['slug'],
+ title=form.cleaned_data['title'],
+ text=form.cleaned_data['text'],
+ )
+
+ return http.HttpResponseRedirect(reverse("wiki_editor", args=[book.slug]))
+ else:
+ form = DocumentCreateForm(initial={
+ "slug": slug,
+ "title": slug.replace('-', ' ').title(),
+ })
+
+ return direct_to_template(request, "wiki/document_create_missing.html", extra_context={
+ "slug": slug,
+ "form": form,
+ })
+
+
+def upload(request):
+ if request.method == "POST":
+ form = DocumentsUploadForm(request.POST, request.FILES)
+ if form.is_valid():
+ import slughifi
+
+ if request.user.is_authenticated():
+ creator = request.user
+ else:
+ creator = None
+
+ zip = form.cleaned_data['zip']
+ skipped_list = []
+ ok_list = []
+ error_list = []
+ slugs = {}
+ existing = [book.slug for book in Book.objects.all()]
+ for filename in zip.namelist():
+ if filename[-1] == '/':
+ continue
+ title = os.path.basename(filename)[:-4]
+ slug = slughifi(title)
+ if not (slug and filename.endswith('.xml')):
+ skipped_list.append(filename)
+ elif slug in slugs:
+ error_list.append((filename, slug, _('Slug already used for %s' % slugs[slug])))
+ elif slug in existing:
+ error_list.append((filename, slug, _('Slug already used in repository.')))
+ else:
+ try:
+ zip.read(filename).decode('utf-8') # test read
+ ok_list.append((filename, slug, title))
+ except UnicodeDecodeError:
+ error_list.append((filename, title, _('File should be UTF-8 encoded.')))
+ slugs[slug] = filename
+
+ if not error_list:
+ for filename, slug, title in ok_list:
+ Book.create(creator=creator,
+ slug=slug,
+ title=title,
+ text=zip.read(filename).decode('utf-8'),
+ )
+
+ return direct_to_template(request, "wiki/document_upload.html", extra_context={
+ "form": form,
+ "ok_list": ok_list,
+ "skipped_list": skipped_list,
+ "error_list": error_list,
+ })
+ else:
+ form = DocumentsUploadForm()
+
+ return direct_to_template(request, "wiki/document_upload.html", extra_context={
+ "form": form,
+ })
+
+
+@never_cache
+@decorator_from_middleware(GZipMiddleware)
+def text(request, slug, chunk=None):
+ try:
+ doc = Chunk.get(slug, chunk).doc
+ except (Chunk.MultipleObjectsReturned, Chunk.DoesNotExist):
+ raise Http404
+
if request.method == 'POST':
-
- form = DocumentForm(request.POST, instance = document)
+ form = DocumentTextSaveForm(request.POST, prefix="textsave")
if form.is_valid():
- document = form.save()
- return HttpResponse(json.dumps({'text': document.plain_text, 'meta': document.meta(), 'revision': document.revision()}))
+ # TODO:
+ # - stage completion should be stored (as a relation)
+
+ if request.user.is_authenticated():
+ author = request.user
+ else:
+ author = None
+ text = form.cleaned_data['text']
+ parent_revision = form.cleaned_data['parent_revision']
+ parent = doc.at_revision(parent_revision)
+ doc.commit(author=author,
+ text=text,
+ parent=parent,
+ description=form.cleaned_data['comment'],
+ )
+ revision = doc.revision()
+ return JSONResponse({
+ 'text': doc.materialize() if parent_revision != revision else None,
+ 'meta': {},
+ 'revision': revision,
+ })
else:
- return HttpResponse(json.dumps({'errors': form.errors}))
+ return JSONFormInvalid(form)
else:
- form = DocumentForm(instance = document)
+ revision = request.GET.get("revision", None)
+
+ try:
+ revision = int(revision)
+ except (ValueError, TypeError):
+ revision = None
- return direct_to_template(request, template_name, extra_context = {
- 'document': document,
- 'form': form,
- })
+ return JSONResponse({
+ 'text': doc.at_revision(revision).materialize(),
+ 'meta': {},
+ 'revision': revision if revision else doc.revision(),
+ })
+
+
+@never_cache
+def compiled(request, slug):
+ text = get_object_or_404(Book, slug=slug).materialize()
+
+ response = http.HttpResponse(text, content_type='application/xml', mimetype='application/wl+xml')
+ response['Content-Disposition'] = 'attachment; filename=%s.xml' % slug
+ return response
+
+
+@never_cache
+@require_POST
+def revert(request, slug, chunk=None):
+ form = DocumentTextRevertForm(request.POST, prefix="textrevert")
+ if form.is_valid():
+ try:
+ doc = Chunk.get(slug, chunk).doc
+ except (Chunk.MultipleObjectsReturned, Chunk.DoesNotExist):
+ raise Http404
+
+ revision = form.cleaned_data['revision']
+
+ comment = form.cleaned_data['comment']
+ comment += "\n#revert to %s" % revision
+
+ if request.user.is_authenticated():
+ author = request.user
+ else:
+ author = None
+ before = doc.revision()
+ logger.info("Reverting %s to %s", slug, revision)
+ doc.at_revision(revision).revert(author=author, description=comment)
-def document_gallery(request, directory):
+ return JSONResponse({
+ 'text': doc.materialize() if before != doc.revision() else None,
+ 'meta': {},
+ 'revision': doc.revision(),
+ })
+ else:
+ return JSONFormInvalid(form)
+
+
+@never_cache
+def gallery(request, directory):
try:
- base_dir = os.path.join(settings.MEDIA_ROOT, settings.FILEBROWSER_DIRECTORY, directory)
- images = [u'%s%s%s/%s' % (settings.MEDIA_URL, settings.FILEBROWSER_DIRECTORY, directory, f) for f in os.listdir(base_dir) if os.path.splitext(f)[1].lower() in (u'.jpg', u'.jpeg', u'.png')]
+ base_url = ''.join((
+ smart_unicode(settings.MEDIA_URL),
+ smart_unicode(settings.FILEBROWSER_DIRECTORY),
+ smart_unicode(directory)))
+
+ base_dir = os.path.join(
+ smart_unicode(settings.MEDIA_ROOT),
+ smart_unicode(settings.FILEBROWSER_DIRECTORY),
+ smart_unicode(directory))
+
+ def map_to_url(filename):
+ return "%s/%s" % (base_url, smart_unicode(filename))
+
+ def is_image(filename):
+ return os.path.splitext(f)[1].lower() in (u'.jpg', u'.jpeg', u'.png')
+
+ images = [map_to_url(f) for f in map(smart_unicode, os.listdir(base_dir)) if is_image(f)]
images.sort()
- return HttpResponse(json.dumps(images))
- except (IndexError, OSError), exc:
- import traceback
- traceback.print_exc()
+ return JSONResponse(images)
+ except (IndexError, OSError):
+ logger.exception("Unable to fetch gallery")
+ raise http.Http404
+
+
+@never_cache
+def diff(request, slug, chunk=None):
+ revA = int(request.GET.get('from', 0))
+ revB = int(request.GET.get('to', 0))
+ if revA > revB:
+ revA, revB = revB, revA
+
+ if revB == 0:
+ revB = None
+
+ try:
+ doc = Chunk.get(slug, chunk).doc
+ except (Chunk.MultipleObjectsReturned, Chunk.DoesNotExist):
raise Http404
-
-def document_diff(request, name, revA, revB):
- docA = storage.get(name, int(revA))
- docB = storage.get(name, int(revB))
-
-
- return HttpResponse(nice_diff.html_diff_table(docA.plain_text.splitlines(),
- docB.plain_text.splitlines()) )
-
-
-def document_history(reuqest, name):
- return HttpResponse( json.dumps(storage.history(name), cls=DateTimeEncoder), mimetype='application/json')
+ docA = doc.at_revision(revA).materialize()
+ docB = doc.at_revision(revB).materialize()
+
+ return http.HttpResponse(nice_diff.html_diff_table(docA.splitlines(),
+ docB.splitlines(), context=3))
+
+
+@never_cache
+def revision(request, slug, chunk=None):
+ try:
+ doc = Chunk.get(slug, chunk).doc
+ except (Chunk.MultipleObjectsReturned, Chunk.DoesNotExist):
+ raise Http404
+ return http.HttpResponse(str(doc.revision()))
+
+
+@never_cache
+def history(request, slug, chunk=None):
+ # TODO: pagination
+ try:
+ doc = Chunk.get(slug, chunk).doc
+ except (Chunk.MultipleObjectsReturned, Chunk.DoesNotExist):
+ raise Http404
+
+ changes = []
+ for change in doc.history().order_by('-created_at'):
+ if change.author:
+ author = "%s %s <%s>" % (
+ change.author.first_name,
+ change.author.last_name,
+ change.author.email)
+ else:
+ author = None
+ changes.append({
+ "version": change.revision,
+ "description": change.description,
+ "author": author,
+ "date": change.created_at,
+ "tag": [],
+ })
+ return JSONResponse(changes)
+
+
+
+"""
+import wlapi
+
+
+@require_POST
+@ajax_require_permission('wiki.can_change_tags')
+def add_tag(request, name):
+ name = normalize_name(name)
+ storage = getstorage()
+
+ form = DocumentTagForm(request.POST, prefix="addtag")
+ if form.is_valid():
+ doc = storage.get_or_404(form.cleaned_data['id'])
+ doc.add_tag(tag=form.cleaned_data['tag'],
+ revision=form.cleaned_data['revision'],
+ author=request.user.username)
+ return JSONResponse({"message": _("Tag added")})
+ else:
+ return JSONFormInvalid(form)
+
+
+@require_POST
+@ajax_require_permission('wiki.can_publish')
+def publish(request, name):
+ name = normalize_name(name)
+
+ storage = getstorage()
+ document = storage.get_by_tag(name, "ready_to_publish")
+
+ api = wlapi.WLAPI(**settings.WL_API_CONFIG)
+
+ try:
+ return JSONResponse({"result": api.publish_book(document)})
+ except wlapi.APICallException, e:
+ return JSONServerError({"message": str(e)})
+"""
+
+def themes(request):
+ prefix = request.GET.get('q', '')
+ return http.HttpResponse('\n'.join([str(t) for t in Theme.objects.filter(name__istartswith=prefix)]))