Merge branch 'development'
This commit is contained in:
@@ -1,29 +1,31 @@
|
||||
image: python:2.7
|
||||
image: ubuntu:16.10
|
||||
services:
|
||||
- mysql
|
||||
- mysql:latest
|
||||
- postgres
|
||||
before_script:
|
||||
- apt-get update -qq
|
||||
- apt-get install -qq python-dev gcc tesseract-ocr tesseract-ocr-deu unpaper ghostscript libjpeg-dev libpng-dev libtiff-dev poppler-utils libreoffice
|
||||
- apt-get install -qq python-dev python-pip gcc gnupg1 tesseract-ocr tesseract-ocr-deu ghostscript libjpeg-dev libpng-dev libtiff-dev poppler-utils libreoffice
|
||||
variables:
|
||||
POSTGRES_DB: "mayan_edms"
|
||||
POSTGRES_PASSWORD: "postgres"
|
||||
MYSQL_ALLOW_EMPTY_PASSWORD: "yes"
|
||||
MYSQL_DATABASE: "mayan_edms"
|
||||
#test:mysql:
|
||||
# script:
|
||||
# - pip install -r requirements/testing.txt
|
||||
# - pip install -q mysql-python
|
||||
# - apt-get install -qq mysql-client
|
||||
# - mysql -h"$MYSQL_PORT_3306_TCP_ADDR" -P"$MYSQL_PORT_3306_TCP_PORT" -uroot -p"$MYSQL_ENV_MYSQL_ROOT_PASSWORD" -e "ALTER DATABASE $MYSQL_DATABASE CHARACTER SET utf8 COLLATE utf8_unicode_ci;"
|
||||
# - coverage run manage.py runtests --settings=mayan.settings.testing.gitlab-ci.db_mysql --nomigrations
|
||||
# - bash <(curl https://raw.githubusercontent.com/codecov/codecov-bash/master/codecov) -t $CODECOV_TOKEN
|
||||
# tags:
|
||||
# - mysql
|
||||
test:mysql:
|
||||
script:
|
||||
- apt-get install -qq libmysqlclient-dev
|
||||
- pip install -r requirements/testing.txt
|
||||
- pip install mysql-python
|
||||
- apt-get install -qq mysql-client
|
||||
- mysql -h"$MYSQL_PORT_3306_TCP_ADDR" -P"$MYSQL_PORT_3306_TCP_PORT" -uroot -p"$MYSQL_ENV_MYSQL_ROOT_PASSWORD" -e "ALTER DATABASE $MYSQL_DATABASE CHARACTER SET utf8 COLLATE utf8_unicode_ci;"
|
||||
- coverage run manage.py runtests --settings=mayan.settings.testing.gitlab-ci.db_mysql --nomigrations
|
||||
- bash <(curl https://raw.githubusercontent.com/codecov/codecov-bash/master/codecov) -t $CODECOV_TOKEN
|
||||
tags:
|
||||
- mysql
|
||||
test:postgres:
|
||||
script:
|
||||
- apt-get install -qq libpq-dev
|
||||
- pip install -r requirements/testing.txt
|
||||
- pip install -q psycopg2
|
||||
- pip install psycopg2
|
||||
- coverage run manage.py runtests --settings=mayan.settings.testing.gitlab-ci.db_postgres --nomigrations
|
||||
- bash <(curl https://raw.githubusercontent.com/codecov/codecov-bash/master/codecov) -t $CODECOV_TOKEN
|
||||
tags:
|
||||
|
||||
6
Makefile
6
Makefile
@@ -32,6 +32,7 @@ help:
|
||||
@echo "release - Package (sdist and wheel) and upload a release."
|
||||
|
||||
@echo "runserver - Run the development server."
|
||||
@echo "shell_plus - Run the shell_plus command."
|
||||
|
||||
|
||||
# Cleaning
|
||||
@@ -111,3 +112,8 @@ wheel: clean
|
||||
runserver:
|
||||
$(BROWSER) http://127.0.0.1:8000
|
||||
./manage.py runserver
|
||||
|
||||
shell_plus:
|
||||
./manage.py shell_plus --settings=mayan.settings.development
|
||||
|
||||
|
||||
|
||||
@@ -38,7 +38,6 @@ extensions = ['djangodocs', 'sphinxcontrib.blockdiag']
|
||||
blockdiag_antialias = True
|
||||
blockdiag_html_image_format = "SVG"
|
||||
blockdiag_latex_image_format = "PDF"
|
||||
blockdiag_tex_image_format = "PDF"
|
||||
|
||||
# Add any paths that contain templates here, relative to this directory.
|
||||
templates_path = ['_templates']
|
||||
|
||||
@@ -10,7 +10,7 @@ and install it from PyPI with the following commands:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
sudo apt-get install libjpeg-dev libmagic1 libpng-dev libreoffice libtiff-dev gcc ghostscript gpgv python-dev python-virtualenv tesseract-ocr poppler-utils -y
|
||||
sudo apt-get install libjpeg-dev libmagic1 libpng-dev libreoffice libtiff-dev gcc ghostscript gnupg1 python-dev python-virtualenv tesseract-ocr poppler-utils -y
|
||||
virtualenv venv
|
||||
source venv/bin/activate
|
||||
pip install mayan-edms
|
||||
|
||||
75
docs/releases/2.1.4.rst
Normal file
75
docs/releases/2.1.4.rst
Normal file
@@ -0,0 +1,75 @@
|
||||
===============================
|
||||
Mayan EDMS v2.1.4 release notes
|
||||
===============================
|
||||
|
||||
Released: XX, 2016
|
||||
|
||||
What's new
|
||||
==========
|
||||
|
||||
This is a bug-fix release and all users are encouraged to upgrade.
|
||||
|
||||
Other changes
|
||||
-------------
|
||||
- Fix statistics namespace list display view
|
||||
- Fix events list display view
|
||||
- Update to Django 1.8.15
|
||||
|
||||
Removals
|
||||
--------
|
||||
* None
|
||||
|
||||
Upgrading from a previous version
|
||||
---------------------------------
|
||||
|
||||
Using PIP
|
||||
~~~~~~~~~
|
||||
|
||||
Type in the console::
|
||||
|
||||
$ pip install -U mayan-edms
|
||||
|
||||
the requirements will also be updated automatically.
|
||||
|
||||
Using Git
|
||||
~~~~~~~~~
|
||||
|
||||
If you installed Mayan EDMS by cloning the Git repository issue the commands::
|
||||
|
||||
$ git reset --hard HEAD
|
||||
$ git pull
|
||||
|
||||
otherwise download the compressed archived and uncompress it overriding the
|
||||
existing installation.
|
||||
|
||||
Next upgrade/add the new requirements::
|
||||
|
||||
$ pip install --upgrade -r requirements.txt
|
||||
|
||||
Common steps
|
||||
~~~~~~~~~~~~
|
||||
|
||||
Migrate existing database schema with::
|
||||
|
||||
$ mayan-edms.py performupgrade
|
||||
|
||||
Add new static media::
|
||||
|
||||
$ mayan-edms.py collectstatic --noinput
|
||||
|
||||
The upgrade procedure is now complete.
|
||||
|
||||
|
||||
Backward incompatible changes
|
||||
=============================
|
||||
|
||||
* None
|
||||
|
||||
Bugs fixed or issues closed
|
||||
===========================
|
||||
|
||||
* `GitLab issue #311 <https://gitlab.com/mayan-edms/mayan-edms/issues/311>`_ acl page return ContentType:Document
|
||||
* `GitLab issue #316 <https://gitlab.com/mayan-edms/mayan-edms/issues/316>`_ Error when trying to access the statistics
|
||||
* `GitLab issue #324 <https://gitlab.com/mayan-edms/mayan-edms/issues/324>`_ Document signature tests fail in Ubuntu 16.10
|
||||
|
||||
.. _PyPI: https://pypi.python.org/pypi/mayan-edms/
|
||||
@@ -22,6 +22,8 @@ versions of the documentation contain the release notes for any later releases.
|
||||
.. toctree::
|
||||
:maxdepth: 1
|
||||
|
||||
=======
|
||||
2.1.4
|
||||
2.1.3
|
||||
2.1.2
|
||||
2.1.1
|
||||
|
||||
@@ -2,15 +2,17 @@
|
||||
Deploying
|
||||
=========
|
||||
|
||||
OS "bare metal"
|
||||
===============
|
||||
Below are some ways to deploye and use Mayan EDMS. Do use more than one method.
|
||||
|
||||
OS "bare metal" method
|
||||
======================
|
||||
|
||||
Like other Django based projects Mayan EDMS can be deployed in a wide variety
|
||||
of ways. The method provided below is only a bare minimum example.
|
||||
These instructions are independent of the instructions mentioned in the
|
||||
:doc:`installation` chapter but assume you have already made a test install to
|
||||
test the compatibility of your operating system. These instruction are for
|
||||
Ubuntu 15.04.
|
||||
Ubuntu 16.10.
|
||||
|
||||
Switch to superuser::
|
||||
|
||||
@@ -20,8 +22,8 @@ Install all system dependencies::
|
||||
|
||||
apt-get install nginx supervisor redis-server postgresql \
|
||||
libpq-dev libjpeg-dev libmagic1 libpng-dev libreoffice \
|
||||
libtiff-dev gcc ghostscript gpgv python-dev python-virtualenv \
|
||||
tesseract-ocr unpaper poppler-utils -y
|
||||
libtiff-dev gcc ghostscript gnupg1 python-dev python-virtualenv \
|
||||
tesseract-ocr poppler-utils -y
|
||||
|
||||
Change to the directory where the project will be deployed::
|
||||
|
||||
@@ -79,6 +81,7 @@ Append the following to the ``mayan/settings/local.py`` file, paying attention t
|
||||
|
||||
BROKER_URL = 'redis://127.0.0.1:6379/0'
|
||||
CELERY_RESULT_BACKEND = 'redis://127.0.0.1:6379/0'
|
||||
SIGNATURES_GPG_PATH = '/usr/bin/gpg1'
|
||||
|
||||
Migrate the database or initialize the project::
|
||||
|
||||
@@ -189,8 +192,8 @@ Restart the services::
|
||||
/etc/init.d/nginx restart
|
||||
/etc/init.d/supervisor restart
|
||||
|
||||
Docker
|
||||
======
|
||||
Docker method
|
||||
=============
|
||||
|
||||
Deploy the Docker image stack::
|
||||
|
||||
@@ -205,8 +208,8 @@ with::
|
||||
docker logs mayan-edms
|
||||
|
||||
|
||||
Docker Compose
|
||||
==============
|
||||
Docker Compose method
|
||||
=====================
|
||||
|
||||
Create a file named ``environment`` with the following content::
|
||||
|
||||
@@ -248,8 +251,8 @@ with::
|
||||
|
||||
docker logs mayanedms_mayan-edms_1
|
||||
|
||||
Vagrant
|
||||
=======
|
||||
Vagrant method
|
||||
==============
|
||||
Make sure you have Vagrant and a provider properly installed as per
|
||||
https://docs.vagrantup.com/v2/installation/index.html
|
||||
Clone the repository and execute::
|
||||
|
||||
@@ -2,8 +2,9 @@
|
||||
OCR backend
|
||||
===========
|
||||
|
||||
Mayan EDMS ships an OCR backend that uses the FLOSS engine Tesseract, but it can
|
||||
use other engines. To support other engines a wrapper that subclasess the
|
||||
Mayan EDMS ships an OCR backend that uses the FLOSS engine Tesseract
|
||||
(https://github.com/tesseract-ocr/tesseract/), but it can
|
||||
use other engines. To support other engines crate a wrapper that subclasess the
|
||||
``OCRBackendBase`` class defined in mayan/apps/ocr/classes. This subclass should
|
||||
expose the ``execute`` method. For an example of how the Tesseract backend
|
||||
is implemented take a look at the file ``mayan/apps/ocr/backends/tesseract.py``
|
||||
@@ -13,3 +14,8 @@ OCR_BACKEND and point it to your new OCR backend class path.
|
||||
|
||||
The default value of OCR_BACKEND is ``"ocr.backends.tesseract.Tesseract"``
|
||||
|
||||
To add support to OCR more languages when using Tesseract, install the
|
||||
corresponding language file. If using a Debian based OS, this command will
|
||||
display the available language files:
|
||||
|
||||
apt-cache search tesseract-ocr
|
||||
|
||||
@@ -21,4 +21,6 @@ keys no longer needed can also be deleted from this menu.
|
||||
|
||||
Only `GNU Privacy Guard`_ signatures are support at the moment.
|
||||
|
||||
Only version 1 of `GNU Privacy Guard`_ is supported for now.
|
||||
|
||||
.. _`GNU Privacy Guard`: www.gnupg.org/
|
||||
|
||||
@@ -8,7 +8,7 @@ from user_management.tests import (
|
||||
)
|
||||
|
||||
from ..models import AccessControlList
|
||||
from ..permissions import permission_acl_edit
|
||||
from ..permissions import permission_acl_edit, permission_acl_view
|
||||
|
||||
|
||||
class AccessControlListViewTestCase(GenericDocumentViewTestCase):
|
||||
@@ -109,3 +109,60 @@ class AccessControlListViewTestCase(GenericDocumentViewTestCase):
|
||||
|
||||
self.assertNotContains(response, text='optgroup', status_code=200)
|
||||
self.assertEqual(AccessControlList.objects.count(), 1)
|
||||
|
||||
def test_acl_list_view_no_permission(self):
|
||||
self.login(username=TEST_USER_USERNAME, password=TEST_USER_PASSWORD)
|
||||
|
||||
document = self.document.add_as_recent_document_for_user(
|
||||
self.user
|
||||
).document
|
||||
|
||||
acl = AccessControlList.objects.create(
|
||||
content_object=document, role=self.role
|
||||
)
|
||||
acl.permissions.add(permission_acl_edit.stored_permission)
|
||||
|
||||
content_type = ContentType.objects.get_for_model(document)
|
||||
|
||||
view_arguments = {
|
||||
'app_label': content_type.app_label,
|
||||
'model': content_type.model,
|
||||
'object_id': document.pk
|
||||
}
|
||||
|
||||
response = self.get(
|
||||
viewname='acls:acl_list', kwargs=view_arguments
|
||||
)
|
||||
|
||||
self.assertNotContains(response, text=document.label, status_code=403)
|
||||
self.assertNotContains(response, text='otal: 1', status_code=403)
|
||||
|
||||
def test_acl_list_view_with_permission(self):
|
||||
self.login(username=TEST_USER_USERNAME, password=TEST_USER_PASSWORD)
|
||||
|
||||
self.role.permissions.add(
|
||||
permission_acl_view.stored_permission
|
||||
)
|
||||
|
||||
document = self.document.add_as_recent_document_for_user(
|
||||
self.user
|
||||
).document
|
||||
|
||||
acl = AccessControlList.objects.create(
|
||||
content_object=document, role=self.role
|
||||
)
|
||||
acl.permissions.add(permission_acl_view.stored_permission)
|
||||
|
||||
content_type = ContentType.objects.get_for_model(document)
|
||||
|
||||
view_arguments = {
|
||||
'app_label': content_type.app_label,
|
||||
'model': content_type.model,
|
||||
'object_id': document.pk
|
||||
}
|
||||
|
||||
response = self.get(
|
||||
viewname='acls:acl_list', kwargs=view_arguments
|
||||
)
|
||||
self.assertContains(response, text=document.label, status_code=200)
|
||||
self.assertContains(response, text='otal: 1', status_code=200)
|
||||
|
||||
@@ -121,13 +121,13 @@ class ACLDeleteView(SingleObjectDeleteView):
|
||||
|
||||
class ACLListView(SingleObjectListView):
|
||||
def dispatch(self, request, *args, **kwargs):
|
||||
self.content_type = get_object_or_404(
|
||||
self.object_content_type = get_object_or_404(
|
||||
ContentType, app_label=self.kwargs['app_label'],
|
||||
model=self.kwargs['model']
|
||||
)
|
||||
|
||||
try:
|
||||
self.content_object = self.content_type.get_object_for_this_type(
|
||||
self.content_object = self.object_content_type.get_object_for_this_type(
|
||||
pk=self.kwargs['object_id']
|
||||
)
|
||||
except self.content_type.model_class().DoesNotExist:
|
||||
@@ -153,7 +153,7 @@ class ACLListView(SingleObjectListView):
|
||||
|
||||
def get_queryset(self):
|
||||
return AccessControlList.objects.filter(
|
||||
content_type=self.content_type, object_id=self.content_object.pk
|
||||
content_type=self.object_content_type, object_id=self.content_object.pk
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -2,10 +2,10 @@ from __future__ import unicode_literals
|
||||
|
||||
from django.test import TestCase
|
||||
|
||||
from .mixins import OpenFileCheckMixin, TempfileCheckMixin
|
||||
from .mixins import ContentTypeCheckMixin, OpenFileCheckMixin, TempfileCheckMixin
|
||||
|
||||
|
||||
class BaseTestCase(OpenFileCheckMixin, TempfileCheckMixin, TestCase):
|
||||
class BaseTestCase(ContentTypeCheckMixin, OpenFileCheckMixin, TempfileCheckMixin, TestCase):
|
||||
"""
|
||||
This is the most basic test case class any test in the project should use.
|
||||
"""
|
||||
|
||||
@@ -7,6 +7,30 @@ import psutil
|
||||
from ..settings import setting_temporary_directory
|
||||
|
||||
|
||||
class ContentTypeCheckMixin(object):
|
||||
expected_content_type = 'text/html; charset=utf-8'
|
||||
|
||||
def _pre_setup(self):
|
||||
super(ContentTypeCheckMixin, self)._pre_setup()
|
||||
test_instance = self
|
||||
|
||||
class CustomClient(self.client_class):
|
||||
def request(self, *args, **kwargs):
|
||||
response = super(CustomClient, self).request(*args, **kwargs)
|
||||
|
||||
content_type = response._headers['content-type'][1]
|
||||
test_instance.assertEqual(
|
||||
content_type, test_instance.expected_content_type,
|
||||
msg='Unexpected response content type: {}, expected: {}.'.format(
|
||||
content_type, test_instance.expected_content_type
|
||||
)
|
||||
)
|
||||
|
||||
return response
|
||||
|
||||
self.client = CustomClient()
|
||||
|
||||
|
||||
class TempfileCheckMixin(object):
|
||||
def _get_temporary_entries(self):
|
||||
return os.listdir(setting_temporary_directory.value)
|
||||
|
||||
@@ -1,6 +1,136 @@
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
|
||||
from datetime import date
|
||||
import os
|
||||
import shutil
|
||||
|
||||
import gnupg
|
||||
|
||||
from common.utils import mkdtemp
|
||||
|
||||
|
||||
class GPGBackend(object):
|
||||
def __init__(self, **kwargs):
|
||||
self.kwargs = kwargs
|
||||
|
||||
|
||||
class PythonGNUPGBackend(GPGBackend):
|
||||
@staticmethod
|
||||
def _import_key(gpg, **kwargs):
|
||||
return gpg.import_keys(**kwargs)
|
||||
|
||||
@staticmethod
|
||||
def _list_keys(gpg, **kwargs):
|
||||
return gpg.list_keys(**kwargs)
|
||||
|
||||
@staticmethod
|
||||
def _import_and_list_keys(gpg, **kwargs):
|
||||
import_results = gpg.import_keys(**kwargs)
|
||||
return import_results, gpg.list_keys(
|
||||
keys=import_results.fingerprints[0]
|
||||
)[0]
|
||||
|
||||
@staticmethod
|
||||
def _sign_file(gpg, file_object, key_data, passphrase, clearsign, detached, binary, output):
|
||||
import_results = gpg.import_keys(key_data=key_data)
|
||||
|
||||
return gpg.sign_file(
|
||||
file=file_object, keyid=import_results.fingerprints[0],
|
||||
passphrase=passphrase, clearsign=clearsign, detach=detached,
|
||||
binary=binary, output=output
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def _decrypt_file(gpg, file_object, keys):
|
||||
for key in keys:
|
||||
gpg.import_keys(key_data=key['key_data'])
|
||||
|
||||
return gpg.decrypt_file(file=file_object)
|
||||
|
||||
@staticmethod
|
||||
def _verify_file(gpg, file_object, keys, data_filename=None):
|
||||
for key in keys:
|
||||
gpg.import_keys(key_data=key['key_data'])
|
||||
|
||||
return gpg.verify_file(
|
||||
file=file_object, data_filename=data_filename
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def _recv_keys(gpg, keyserver, key_id):
|
||||
import_results = gpg.recv_keys(keyserver, key_id)
|
||||
if import_results.count:
|
||||
key_data = gpg.export_keys(import_results.fingerprints[0])
|
||||
else:
|
||||
key_data = None
|
||||
return key_data
|
||||
|
||||
@staticmethod
|
||||
def _search_keys(gpg, keyserver, query):
|
||||
return gpg.search_keys(
|
||||
keyserver=keyserver, query=query
|
||||
)
|
||||
|
||||
def gpg_command(self, function, **kwargs):
|
||||
temporary_directory = mkdtemp()
|
||||
os.chmod(temporary_directory, 0x1C0)
|
||||
|
||||
gpg = gnupg.GPG(
|
||||
gnupghome=temporary_directory, gpgbinary=self.kwargs['binary_path']
|
||||
)
|
||||
|
||||
result = function(gpg=gpg, **kwargs)
|
||||
|
||||
shutil.rmtree(temporary_directory)
|
||||
|
||||
return result
|
||||
|
||||
def import_key(self, key_data):
|
||||
return self.gpg_command(
|
||||
function=PythonGNUPGBackend._import_key, key_data=key_data
|
||||
)
|
||||
|
||||
def list_keys(self, keys):
|
||||
return self.gpg_command(
|
||||
function=PythonGNUPGBackend._list_keys, keys=keys
|
||||
)
|
||||
|
||||
def import_and_list_keys(self, key_data):
|
||||
return self.gpg_command(
|
||||
function=PythonGNUPGBackend._import_and_list_keys,
|
||||
key_data=key_data
|
||||
)
|
||||
|
||||
def sign_file(self, file_object, key_data, passphrase, clearsign, detached, binary, output):
|
||||
return self.gpg_command(
|
||||
function=PythonGNUPGBackend._sign_file, file_object=file_object,
|
||||
key_data=key_data, passphrase=passphrase, clearsign=clearsign,
|
||||
detached=detached, binary=binary, output=output
|
||||
)
|
||||
|
||||
def decrypt_file(self, file_object, keys):
|
||||
return self.gpg_command(
|
||||
function=PythonGNUPGBackend._decrypt_file, file_object=file_object,
|
||||
keys=keys
|
||||
)
|
||||
|
||||
def verify_file(self, file_object, keys, data_filename=None):
|
||||
return self.gpg_command(
|
||||
function=PythonGNUPGBackend._verify_file, file_object=file_object,
|
||||
keys=keys, data_filename=data_filename
|
||||
)
|
||||
|
||||
def recv_keys(self, keyserver, key_id):
|
||||
return self.gpg_command(
|
||||
function=PythonGNUPGBackend._recv_keys, keyserver=keyserver,
|
||||
key_id=key_id
|
||||
)
|
||||
|
||||
def search_keys(self, keyserver, query):
|
||||
return self.gpg_command(
|
||||
function=PythonGNUPGBackend._search_keys, keyserver=keyserver,
|
||||
query=query
|
||||
)
|
||||
|
||||
|
||||
class KeyStub(object):
|
||||
|
||||
@@ -3,64 +3,56 @@ from __future__ import absolute_import, unicode_literals
|
||||
import io
|
||||
import logging
|
||||
import os
|
||||
import shutil
|
||||
|
||||
import gnupg
|
||||
|
||||
from django.db import models
|
||||
|
||||
from common.utils import mkdtemp, mkstemp
|
||||
from common.utils import mkstemp
|
||||
|
||||
from .classes import KeyStub, SignatureVerification
|
||||
from .exceptions import (
|
||||
DecryptionError, KeyDoesNotExist, KeyFetchingError, VerificationError
|
||||
)
|
||||
from .literals import KEY_TYPE_PUBLIC, KEY_TYPE_SECRET
|
||||
from .settings import setting_gpg_path, setting_keyserver
|
||||
from .runtime import gpg_backend
|
||||
from .settings import setting_keyserver
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class KeyManager(models.Manager):
|
||||
def decrypt_file(self, file_object, all_keys=False, key_fingerprint=None, key_id=None):
|
||||
temporary_directory = mkdtemp()
|
||||
|
||||
os.chmod(temporary_directory, 0x1C0)
|
||||
|
||||
gpg = gnupg.GPG(
|
||||
gnupghome=temporary_directory, gpgbinary=setting_gpg_path.value
|
||||
)
|
||||
|
||||
def _preload_keys(self, all_keys=False, key_fingerprint=None, key_id=None):
|
||||
# Preload keys
|
||||
if all_keys:
|
||||
logger.debug('preloading all keys')
|
||||
for key in self.all():
|
||||
gpg.import_keys(key_data=key.key_data)
|
||||
keys = self.values()
|
||||
elif key_fingerprint:
|
||||
logger.debug('preloading key fingerprint: %s', key_fingerprint)
|
||||
try:
|
||||
key = self.get(fingerprint=key_fingerprint)
|
||||
except self.model.DoesNotExist:
|
||||
keys = self.filter(fingerprint=key_fingerprint).values()
|
||||
if not keys:
|
||||
logger.debug('key fingerprint %s not found', key_fingerprint)
|
||||
shutil.rmtree(temporary_directory)
|
||||
raise KeyDoesNotExist(
|
||||
'Specified key for verification not found'
|
||||
)
|
||||
else:
|
||||
gpg.import_keys(key_data=key.key_data)
|
||||
elif key_id:
|
||||
logger.debug('preloading key id: %s', key_id)
|
||||
try:
|
||||
key = self.get(fingerprint__endswith=key_id)
|
||||
except self.model.DoesNotExist:
|
||||
logger.debug('key id %s not found', key_id)
|
||||
else:
|
||||
gpg.import_keys(key_data=key.key_data)
|
||||
keys = self.filter(fingerprint__endswith=key_id).values()
|
||||
if keys:
|
||||
logger.debug('key id %s impored', key_id)
|
||||
else:
|
||||
logger.debug('key id %s not found', key_id)
|
||||
else:
|
||||
keys = ()
|
||||
|
||||
decrypt_result = gpg.decrypt_file(file=file_object)
|
||||
return keys
|
||||
|
||||
shutil.rmtree(temporary_directory)
|
||||
def decrypt_file(self, file_object, all_keys=False, key_fingerprint=None, key_id=None):
|
||||
keys = self._preload_keys(
|
||||
all_keys=all_keys, key_fingerprint=key_fingerprint, key_id=key_id
|
||||
)
|
||||
|
||||
decrypt_result = gpg_backend.decrypt_file(
|
||||
file_object=file_object, keys=keys
|
||||
)
|
||||
|
||||
logger.debug('decrypt_result.status: %s', decrypt_result.status)
|
||||
|
||||
@@ -72,40 +64,20 @@ class KeyManager(models.Manager):
|
||||
return io.BytesIO(decrypt_result.data)
|
||||
|
||||
def receive_key(self, key_id):
|
||||
temporary_directory = mkdtemp()
|
||||
|
||||
os.chmod(temporary_directory, 0x1C0)
|
||||
|
||||
gpg = gnupg.GPG(
|
||||
gnupghome=temporary_directory, gpgbinary=setting_gpg_path.value
|
||||
key_data = gpg_backend.recv_keys(
|
||||
keyserver=setting_keyserver.value, key_id=key_id
|
||||
)
|
||||
|
||||
import_results = gpg.recv_keys(setting_keyserver.value, key_id)
|
||||
|
||||
if not import_results.count:
|
||||
shutil.rmtree(temporary_directory)
|
||||
if not key_data:
|
||||
raise KeyFetchingError('No key found')
|
||||
else:
|
||||
key_data = gpg.export_keys(import_results.fingerprints[0])
|
||||
|
||||
shutil.rmtree(temporary_directory)
|
||||
|
||||
return self.create(key_data=key_data)
|
||||
|
||||
def search(self, query):
|
||||
temporary_directory = mkdtemp()
|
||||
|
||||
os.chmod(temporary_directory, 0x1C0)
|
||||
|
||||
gpg = gnupg.GPG(
|
||||
gnupghome=temporary_directory, gpgbinary=setting_gpg_path.value
|
||||
key_data_list = gpg_backend.search_keys(
|
||||
keyserver=setting_keyserver.value, query=query
|
||||
)
|
||||
|
||||
key_data_list = gpg.search_keys(
|
||||
query=query, keyserver=setting_keyserver.value
|
||||
)
|
||||
shutil.rmtree(temporary_directory)
|
||||
|
||||
result = []
|
||||
for key_data in key_data_list:
|
||||
result.append(KeyStub(raw=key_data))
|
||||
@@ -119,41 +91,10 @@ class KeyManager(models.Manager):
|
||||
return self.filter(key_type=KEY_TYPE_SECRET)
|
||||
|
||||
def verify_file(self, file_object, signature_file=None, all_keys=False, key_fingerprint=None, key_id=None):
|
||||
temporary_directory = mkdtemp()
|
||||
|
||||
os.chmod(temporary_directory, 0x1C0)
|
||||
|
||||
gpg = gnupg.GPG(
|
||||
gnupghome=temporary_directory, gpgbinary=setting_gpg_path.value
|
||||
keys = self._preload_keys(
|
||||
all_keys=all_keys, key_fingerprint=key_fingerprint, key_id=key_id
|
||||
)
|
||||
|
||||
# Preload keys
|
||||
if all_keys:
|
||||
logger.debug('preloading all keys')
|
||||
for key in self.all():
|
||||
gpg.import_keys(key_data=key.key_data)
|
||||
elif key_fingerprint:
|
||||
logger.debug('preloading key fingerprint: %s', key_fingerprint)
|
||||
try:
|
||||
key = self.get(fingerprint=key_fingerprint)
|
||||
except self.model.DoesNotExist:
|
||||
logger.debug('key fingerprint %s not found', key_fingerprint)
|
||||
shutil.rmtree(temporary_directory)
|
||||
raise KeyDoesNotExist(
|
||||
'Specified key for verification not found'
|
||||
)
|
||||
else:
|
||||
gpg.import_keys(key_data=key.key_data)
|
||||
elif key_id:
|
||||
logger.debug('preloading key id: %s', key_id)
|
||||
try:
|
||||
key = self.get(fingerprint__endswith=key_id)
|
||||
except self.model.DoesNotExist:
|
||||
logger.debug('key id %s not found', key_id)
|
||||
else:
|
||||
gpg.import_keys(key_data=key.key_data)
|
||||
logger.debug('key id %s impored', key_id)
|
||||
|
||||
if signature_file:
|
||||
# Save the original data and invert the argument order
|
||||
# Signature first, file second
|
||||
@@ -165,18 +106,19 @@ class KeyManager(models.Manager):
|
||||
signature_file_buffer.write(signature_file.read())
|
||||
signature_file_buffer.seek(0)
|
||||
signature_file.seek(0)
|
||||
verify_result = gpg.verify_file(
|
||||
file=signature_file_buffer, data_filename=temporary_filename
|
||||
verify_result = gpg_backend.verify_file(
|
||||
file_object=signature_file_buffer,
|
||||
data_filename=temporary_filename, keys=keys
|
||||
)
|
||||
signature_file_buffer.close()
|
||||
os.unlink(temporary_filename)
|
||||
else:
|
||||
verify_result = gpg.verify_file(file=file_object)
|
||||
verify_result = gpg_backend.verify_file(
|
||||
file_object=file_object, keys=keys
|
||||
)
|
||||
|
||||
logger.debug('verify_result.status: %s', verify_result.status)
|
||||
|
||||
shutil.rmtree(temporary_directory)
|
||||
|
||||
if verify_result:
|
||||
# Signed and key present
|
||||
logger.debug('signed and key present')
|
||||
|
||||
@@ -14,6 +14,9 @@ class Migration(migrations.Migration):
|
||||
migrations.AlterField(
|
||||
model_name='key',
|
||||
name='key_data',
|
||||
field=models.TextField(help_text='ASCII armored version of the key.', verbose_name='Key data'),
|
||||
field=models.TextField(
|
||||
help_text='ASCII armored version of the key.',
|
||||
verbose_name='Key data'
|
||||
),
|
||||
),
|
||||
]
|
||||
|
||||
@@ -2,10 +2,6 @@ from __future__ import absolute_import, unicode_literals
|
||||
|
||||
from datetime import date
|
||||
import logging
|
||||
import os
|
||||
import shutil
|
||||
|
||||
import gnupg
|
||||
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.core.urlresolvers import reverse
|
||||
@@ -13,8 +9,6 @@ from django.db import models
|
||||
from django.utils.encoding import python_2_unicode_compatible
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
from common.utils import mkdtemp
|
||||
|
||||
from .exceptions import NeedPassphrase, PassphraseError
|
||||
from .literals import (
|
||||
ERROR_MSG_NEED_PASSPHRASE, ERROR_MSG_BAD_PASSPHRASE,
|
||||
@@ -22,26 +16,11 @@ from .literals import (
|
||||
OUTPUT_MESSAGE_CONTAINS_PRIVATE_KEY
|
||||
)
|
||||
from .managers import KeyManager
|
||||
from .settings import setting_gpg_path
|
||||
from .runtime import gpg_backend
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def gpg_command(function):
|
||||
temporary_directory = mkdtemp()
|
||||
os.chmod(temporary_directory, 0x1C0)
|
||||
|
||||
gpg = gnupg.GPG(
|
||||
gnupghome=temporary_directory, gpgbinary=setting_gpg_path.value
|
||||
)
|
||||
|
||||
result = function(gpg=gpg)
|
||||
|
||||
shutil.rmtree(temporary_directory)
|
||||
|
||||
return result
|
||||
|
||||
|
||||
@python_2_unicode_compatible
|
||||
class Key(models.Model):
|
||||
key_data = models.TextField(
|
||||
@@ -78,10 +57,7 @@ class Key(models.Model):
|
||||
verbose_name_plural = _('Keys')
|
||||
|
||||
def clean(self):
|
||||
def import_key(gpg):
|
||||
return gpg.import_keys(key_data=self.key_data)
|
||||
|
||||
import_results = gpg_command(function=import_key)
|
||||
import_results = gpg_backend.import_key(key_data=self.key_data)
|
||||
|
||||
if not import_results.count:
|
||||
raise ValidationError(_('Invalid key data'))
|
||||
@@ -93,22 +69,11 @@ class Key(models.Model):
|
||||
return reverse('django_gpg:key_detail', args=(self.pk,))
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
temporary_directory = mkdtemp()
|
||||
|
||||
os.chmod(temporary_directory, 0x1C0)
|
||||
|
||||
gpg = gnupg.GPG(
|
||||
gnupghome=temporary_directory, gpgbinary=setting_gpg_path.value
|
||||
import_results, key_info = gpg_backend.import_and_list_keys(
|
||||
key_data=self.key_data
|
||||
)
|
||||
|
||||
import_results = gpg.import_keys(key_data=self.key_data)
|
||||
|
||||
key_info = gpg.list_keys(keys=import_results.fingerprints[0])[0]
|
||||
|
||||
logger.debug('key_info: %s', key_info)
|
||||
|
||||
shutil.rmtree(temporary_directory)
|
||||
|
||||
self.algorithm = key_info['algo']
|
||||
self.creation_date = date.fromtimestamp(int(key_info['date']))
|
||||
if key_info['expires']:
|
||||
@@ -134,24 +99,12 @@ class Key(models.Model):
|
||||
# file, and appear to be due to random data being inserted in the
|
||||
# output data stream."
|
||||
|
||||
temporary_directory = mkdtemp()
|
||||
|
||||
os.chmod(temporary_directory, 0x1C0)
|
||||
|
||||
gpg = gnupg.GPG(
|
||||
gnupghome=temporary_directory, gpgbinary=setting_gpg_path.value
|
||||
)
|
||||
|
||||
import_results = gpg.import_keys(key_data=self.key_data)
|
||||
|
||||
file_sign_results = gpg.sign_file(
|
||||
file=file_object, keyid=import_results.fingerprints[0],
|
||||
passphrase=passphrase, clearsign=clearsign, detach=detached,
|
||||
file_sign_results = gpg_backend.sign_file(
|
||||
file_object=file_object, key_data=self.key_data,
|
||||
passphrase=passphrase, clearsign=clearsign, detached=detached,
|
||||
binary=binary, output=output
|
||||
)
|
||||
|
||||
shutil.rmtree(temporary_directory)
|
||||
|
||||
logger.debug('file_sign_results.stderr: %s', file_sign_results.stderr)
|
||||
|
||||
if ERROR_MSG_NEED_PASSPHRASE in file_sign_results.stderr:
|
||||
|
||||
10
mayan/apps/django_gpg/runtime.py
Normal file
10
mayan/apps/django_gpg/runtime.py
Normal file
@@ -0,0 +1,10 @@
|
||||
from django.utils.module_loading import import_string
|
||||
|
||||
from .settings import setting_gpg_path
|
||||
|
||||
# TODO: This will become an setting option in 2.2
|
||||
SETTING_GPG_BACKEND = 'django_gpg.classes.PythonGNUPGBackend'
|
||||
|
||||
gpg_backend = import_string(SETTING_GPG_BACKEND)(
|
||||
binary_path=setting_gpg_path.value
|
||||
)
|
||||
@@ -32,6 +32,8 @@ class KeyViewTestCase(GenericViewTestCase):
|
||||
|
||||
self.role.permissions.add(permission_key_download.stored_permission)
|
||||
|
||||
self.expected_content_type = 'application/octet-stream; charset=utf-8'
|
||||
|
||||
response = self.get(
|
||||
viewname='django_gpg:key_download', args=(key.pk,)
|
||||
)
|
||||
|
||||
@@ -213,6 +213,8 @@ class SignaturesViewTestCase(GenericDocumentViewTestCase):
|
||||
permission_document_version_signature_download.stored_permission
|
||||
)
|
||||
|
||||
self.expected_content_type = 'application/octet-stream; charset=utf-8'
|
||||
|
||||
response = self.get(
|
||||
'signatures:document_version_signature_download',
|
||||
args=(signature.pk,),
|
||||
|
||||
@@ -52,6 +52,9 @@ class DocumentEventsTestCase(GenericDocumentViewTestCase):
|
||||
self.role.permissions.add(
|
||||
permission_document_download.stored_permission
|
||||
)
|
||||
|
||||
self.expected_content_type = 'image/png'
|
||||
|
||||
self.post(
|
||||
'documents:document_download', args=(self.document.pk,),
|
||||
)
|
||||
|
||||
@@ -247,6 +247,9 @@ class DocumentsViewsTestCase(GenericDocumentViewTestCase):
|
||||
permission_document_download.stored_permission
|
||||
)
|
||||
|
||||
# Set the expected_content_type for common.tests.mixins.ContentTypeCheckMixin
|
||||
self.expected_content_type = self.document.file_mimetype
|
||||
|
||||
response = self.post(
|
||||
'documents:document_download', args=(self.document.pk,)
|
||||
)
|
||||
@@ -284,6 +287,9 @@ class DocumentsViewsTestCase(GenericDocumentViewTestCase):
|
||||
permission_document_download.stored_permission
|
||||
)
|
||||
|
||||
# Set the expected_content_type for common.tests.mixins.ContentTypeCheckMixin
|
||||
self.expected_content_type = self.document.file_mimetype
|
||||
|
||||
response = self.post(
|
||||
'documents:document_multiple_download',
|
||||
data={'id_list': self.document.pk}
|
||||
@@ -323,6 +329,9 @@ class DocumentsViewsTestCase(GenericDocumentViewTestCase):
|
||||
permission_document_download.stored_permission
|
||||
)
|
||||
|
||||
# Set the expected_content_type for common.tests.mixins.ContentTypeCheckMixin
|
||||
self.expected_content_type = self.document.file_mimetype
|
||||
|
||||
response = self.post(
|
||||
'documents:document_version_download', args=(
|
||||
self.document.latest_version.pk,
|
||||
|
||||
@@ -15,6 +15,7 @@ from .widgets import event_type_link
|
||||
|
||||
class EventsApp(MayanAppConfig):
|
||||
name = 'events'
|
||||
test = True
|
||||
verbose_name = _('Events')
|
||||
|
||||
def ready(self):
|
||||
|
||||
0
mayan/apps/events/tests/__init__.py
Normal file
0
mayan/apps/events/tests/__init__.py
Normal file
72
mayan/apps/events/tests/test_views.py
Normal file
72
mayan/apps/events/tests/test_views.py
Normal file
@@ -0,0 +1,72 @@
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
|
||||
from acls.models import AccessControlList
|
||||
from documents.tests.test_views import GenericDocumentViewTestCase
|
||||
from user_management.tests import (
|
||||
TEST_USER_USERNAME, TEST_USER_PASSWORD
|
||||
)
|
||||
|
||||
from ..permissions import permission_events_view
|
||||
|
||||
|
||||
class EventsViewTestCase(GenericDocumentViewTestCase):
|
||||
def setUp(self):
|
||||
super(EventsViewTestCase, self).setUp()
|
||||
|
||||
content_type = ContentType.objects.get_for_model(self.document)
|
||||
|
||||
self.view_arguments = {
|
||||
'app_label': content_type.app_label,
|
||||
'model': content_type.model,
|
||||
'object_id': self.document.pk
|
||||
}
|
||||
|
||||
def test_events_for_object_view_no_permission(self):
|
||||
self.login(username=TEST_USER_USERNAME, password=TEST_USER_PASSWORD)
|
||||
|
||||
document = self.document.add_as_recent_document_for_user(
|
||||
self.user
|
||||
).document
|
||||
|
||||
content_type = ContentType.objects.get_for_model(document)
|
||||
|
||||
view_arguments = {
|
||||
'app_label': content_type.app_label,
|
||||
'model': content_type.model,
|
||||
'object_id': document.pk
|
||||
}
|
||||
|
||||
response = self.get(
|
||||
viewname='events:events_for_object', kwargs=view_arguments
|
||||
)
|
||||
|
||||
self.assertNotContains(response, text=document.label, status_code=403)
|
||||
self.assertNotContains(response, text='otal:', status_code=403)
|
||||
|
||||
def test_events_for_object_view_with_permission(self):
|
||||
self.login(username=TEST_USER_USERNAME, password=TEST_USER_PASSWORD)
|
||||
|
||||
self.role.permissions.add(
|
||||
permission_events_view.stored_permission
|
||||
)
|
||||
|
||||
document = self.document.add_as_recent_document_for_user(
|
||||
self.user
|
||||
).document
|
||||
|
||||
content_type = ContentType.objects.get_for_model(document)
|
||||
|
||||
view_arguments = {
|
||||
'app_label': content_type.app_label,
|
||||
'model': content_type.model,
|
||||
'object_id': document.pk
|
||||
}
|
||||
|
||||
response = self.get(
|
||||
viewname='events:events_for_object', kwargs=view_arguments
|
||||
)
|
||||
|
||||
self.assertContains(response, text=document.label, status_code=200)
|
||||
self.assertNotContains(response, text='otal: 0', status_code=200)
|
||||
@@ -43,16 +43,16 @@ class ObjectEventListView(EventListView):
|
||||
view_permissions = None
|
||||
|
||||
def dispatch(self, request, *args, **kwargs):
|
||||
self.content_type = get_object_or_404(
|
||||
self.object_content_type = get_object_or_404(
|
||||
ContentType, app_label=self.kwargs['app_label'],
|
||||
model=self.kwargs['model']
|
||||
)
|
||||
|
||||
try:
|
||||
self.content_object = self.content_type.get_object_for_this_type(
|
||||
self.content_object = self.object_content_type.get_object_for_this_type(
|
||||
pk=self.kwargs['object_id']
|
||||
)
|
||||
except self.content_type.model_class().DoesNotExist:
|
||||
except self.object_content_type.model_class().DoesNotExist:
|
||||
raise Http404
|
||||
|
||||
try:
|
||||
|
||||
@@ -367,7 +367,7 @@ class SourceColumn(object):
|
||||
# Special case for queryset items produced from
|
||||
# .defer() or .only() optimizations
|
||||
return cls._registry[source._meta.parents.items()[0][0]]
|
||||
except (KeyError, IndexError):
|
||||
except (AttributeError, KeyError, IndexError):
|
||||
return ()
|
||||
except TypeError:
|
||||
# unhashable type: list
|
||||
|
||||
@@ -34,3 +34,20 @@ class StatisticsViewTestCase(GenericViewTestCase):
|
||||
)
|
||||
|
||||
self.assertEqual(response.status_code, 200)
|
||||
|
||||
|
||||
def test_statistic_namespace_list_view_no_permissions(self):
|
||||
self.login(username=TEST_USER_USERNAME, password=TEST_USER_PASSWORD)
|
||||
|
||||
response = self.get('statistics:namespace_list')
|
||||
|
||||
self.assertEqual(response.status_code, 403)
|
||||
|
||||
def test_statistic_namespace_list_view_with_permissions(self):
|
||||
self.login(username=TEST_USER_USERNAME, password=TEST_USER_PASSWORD)
|
||||
|
||||
self.role.permissions.add(permission_statistics_view.stored_permission)
|
||||
|
||||
response = self.get('statistics:namespace_list')
|
||||
|
||||
self.assertEqual(response.status_code, 200)
|
||||
|
||||
@@ -2,6 +2,8 @@ from __future__ import absolute_import, unicode_literals
|
||||
|
||||
from ..base import * # NOQA
|
||||
|
||||
SIGNATURES_GPG_PATH = '/usr/bin/gpg1'
|
||||
|
||||
INSTALLED_APPS += ('test_without_migrations',)
|
||||
TEMPLATE_LOADERS = (
|
||||
'django.template.loaders.filesystem.Loader',
|
||||
|
||||
@@ -30,7 +30,7 @@ pdfminer==20140328
|
||||
pycountry==1.19
|
||||
pytesseract==0.1.6
|
||||
python-dateutil==2.4.2
|
||||
python-gnupg==0.3.8
|
||||
python-gnupg==0.3.9
|
||||
python-magic==0.4.10
|
||||
pytz==2015.4
|
||||
|
||||
|
||||
@@ -1,2 +1,2 @@
|
||||
-r base.txt
|
||||
Django==1.8.13
|
||||
Django==1.8.15
|
||||
|
||||
Reference in New Issue
Block a user