diff --git a/mayan/apps/common/migrations/0012_auto_20181130_2348.py b/mayan/apps/common/migrations/0012_auto_20181202_0803.py similarity index 53% rename from mayan/apps/common/migrations/0012_auto_20181130_2348.py rename to mayan/apps/common/migrations/0012_auto_20181202_0803.py index e3bee0c4a9..b23eac2acc 100644 --- a/mayan/apps/common/migrations/0012_auto_20181130_2348.py +++ b/mayan/apps/common/migrations/0012_auto_20181202_0803.py @@ -1,9 +1,7 @@ # -*- coding: utf-8 -*- -# Generated by Django 1.11.16 on 2018-11-30 23:48 +# Generated by Django 1.11.16 on 2018-12-02 08:03 from __future__ import unicode_literals -import common.models -import django.core.files.storage from django.db import migrations, models import django.db.models.deletion @@ -11,7 +9,6 @@ import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ - ('contenttypes', '0002_remove_content_type_name'), ('common', '0011_auto_20180429_0758'), ] @@ -20,12 +17,10 @@ class Migration(migrations.Migration): name='Cache', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('name', models.CharField(max_length=128, verbose_name='Name')), + ('name', models.CharField(max_length=128, unique=True, verbose_name='Name')), ('label', models.CharField(max_length=128, verbose_name='Label')), ('maximum_size', models.PositiveIntegerField(verbose_name='Maximum size')), - ('object_id', models.PositiveIntegerField()), - ('storage_instance_path', models.CharField(max_length=255, verbose_name='Storage instance path')), - ('content_type', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='contenttypes.ContentType')), + ('storage_instance_path', models.CharField(max_length=255, unique=True, verbose_name='Storage instance path')), ], options={ 'verbose_name': 'Cache', @@ -33,23 +28,38 @@ class Migration(migrations.Migration): }, ), migrations.CreateModel( - name='CacheFile', + name='CachePartition', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('name', models.CharField(max_length=128, verbose_name='Name')), + ('cache', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='partitions', to='common.Cache', verbose_name='Cache')), + ], + options={ + 'verbose_name': 'Cache partition', + 'verbose_name_plural': 'Cache partitions', + }, + ), + migrations.CreateModel( + name='CachePartitionFile', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('datetime', models.DateTimeField(auto_now_add=True, db_index=True, verbose_name='Date time')), - ('filename', models.CharField(max_length=128, verbose_name='Filename')), + ('filename', models.CharField(max_length=255, verbose_name='Filename')), ('file_size', models.PositiveIntegerField(db_index=True, default=0, verbose_name='File size')), - ('cache', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='files', to='common.Cache', verbose_name='Cache')), + ('partition', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='files', to='common.CachePartition', verbose_name='Cache partition')), ], options={ 'get_latest_by': 'datetime', - 'verbose_name': 'Cache file', - 'verbose_name_plural': 'Cache files', + 'verbose_name': 'Cache partition file', + 'verbose_name_plural': 'Cache partition files', }, ), - migrations.AlterField( - model_name='shareduploadedfile', - name='file', - field=models.FileField(storage=django.core.files.storage.FileSystemStorage(location='/usr/local/development/mayan-edms/mayan/media/shared_files'), upload_to=common.models.upload_to, verbose_name='File'), + migrations.AlterUniqueTogether( + name='cachepartitionfile', + unique_together=set([('partition', 'filename')]), + ), + migrations.AlterUniqueTogether( + name='cachepartition', + unique_together=set([('cache', 'name')]), ), ] diff --git a/mayan/apps/common/models.py b/mayan/apps/common/models.py index 6e535202fe..6dd2e84797 100644 --- a/mayan/apps/common/models.py +++ b/mayan/apps/common/models.py @@ -1,5 +1,7 @@ from __future__ import unicode_literals +from contextlib import contextmanager +import logging import uuid from pytz import common_timezones @@ -7,16 +9,22 @@ from pytz import common_timezones from django.conf import settings from django.contrib.contenttypes.fields import GenericForeignKey from django.contrib.contenttypes.models import ContentType -from django.db import models +from django.core.files.base import ContentFile +from django.db import models, transaction, OperationalError from django.db.models import Sum from django.utils.encoding import force_text, python_2_unicode_compatible from django.utils.functional import cached_property from django.utils.module_loading import import_string from django.utils.translation import ugettext_lazy as _ +from lock_manager import LockError +from lock_manager.runtime import locking_backend + from .managers import ErrorLogEntryManager, UserLocaleProfileManager from .storages import storage_sharedupload +logger = logging.getLogger(__name__) + def upload_to(instance, filename): return 'shared-file-{}'.format(uuid.uuid4().hex) @@ -24,14 +32,13 @@ def upload_to(instance, filename): @python_2_unicode_compatible class Cache(models.Model): - name = models.CharField(max_length=128, verbose_name=_('Name')) + name = models.CharField( + max_length=128, unique=True, verbose_name=_('Name') + ) label = models.CharField(max_length=128, verbose_name=_('Label')) maximum_size = models.PositiveIntegerField(verbose_name=_('Maximum size')) - content_type = models.ForeignKey(ContentType, on_delete=models.CASCADE) - object_id = models.PositiveIntegerField() - content_object = GenericForeignKey('content_type', 'object_id') storage_instance_path = models.CharField( - max_length=255, verbose_name=_('Storage instance path') + max_length=255, unique=True, verbose_name=_('Storage instance path') ) class Meta: @@ -41,46 +48,143 @@ class Cache(models.Model): def __str__(self): return self.label + def get_files(self): + return CachePartitionFile.objects.filter(partition__cache__id=self.pk) + def get_total_size(self): - return self.files.aggregate( + return self.get_files().aggregate( file_size__sum=Sum('file_size') - )['file_size__sum'] + )['file_size__sum'] or 0 def prune(self): while self.get_total_size() > self.maximum_size: - self.files.earliest().delete() + self.get_files().earliest().delete() + + def purge(self): + for partition in self.partitions.all(): + partition.purge() @cached_property - def storage_instance(self): + def storage(self): return import_string(self.storage_instance_path) -class CacheFile(models.Model): +class CachePartition(models.Model): cache = models.ForeignKey( - on_delete=models.CASCADE, related_name='files', + on_delete=models.CASCADE, related_name='partitions', to=Cache, verbose_name=_('Cache') ) + name = models.CharField( + max_length=128, verbose_name=_('Name') + ) + + class Meta: + unique_together = ('cache', 'name') + verbose_name = _('Cache partition') + verbose_name_plural = _('Cache partitions') + + @staticmethod + def get_combined_filename(parent, filename): + return '{}-{}'.format(parent, filename) + + @contextmanager + def create_file(self, filename): + lock_id = 'cache_partition-create_file-{}-{}'.format(self.pk, filename) + try: + logger.debug('trying to acquire lock: %s', lock_id) + lock = locking_backend.acquire_lock(lock_id)#, LOCK_EXPIRE) + logger.debug('acquired lock: %s', lock_id) + try: + self.cache.prune() + + # Since open "wb+" doesn't create files force the creation of an + # empty file. + self.cache.storage.save( + name=self.get_full_filename(filename=filename), + content=ContentFile(content='') + ) + + try: + with transaction.atomic(): + partition_file = self.files.create(filename=filename) + yield partition_file.open(mode='wb') + partition_file.update_size() + except Exception as exception: + logger.error( + 'Unexpected exception while trying to save new ' + 'cache file.' + ) + self.cache.storage.delete( + name=self.get_full_filename(filename=filename) + ) + raise + finally: + lock.release() + except LockError: + logger.debug('unable to obtain lock: %s' % lock_id) + raise + + def get_file(self, filename): + try: + return self.files.get(filename=filename) + except self.files.model.DoesNotExist: + return None + + def get_full_filename(self, filename): + return CachePartition.get_combined_filename( + parent=self.name, filename=filename + ) + + def purge(self): + for parition_file in self.files.all(): + parition_file.delete() + + +class CachePartitionFile(models.Model): + partition = models.ForeignKey( + on_delete=models.CASCADE, related_name='files', + to=CachePartition, verbose_name=_('Cache partition') + ) datetime = models.DateTimeField( auto_now_add=True, db_index=True, verbose_name=_('Date time') ) - filename = models.CharField(max_length=128, verbose_name=_('Filename')) + filename = models.CharField(max_length=255, verbose_name=_('Filename')) file_size = models.PositiveIntegerField( db_index=True, default=0, verbose_name=_('File size') ) class Meta: get_latest_by = 'datetime' - verbose_name = _('Cache file') - verbose_name_plural = _('Cache files') + unique_together = ('partition', 'filename') + verbose_name = _('Cache partition file') + verbose_name_plural = _('Cache partition files') def delete(self, *args, **kwargs): - self.cache.storage_instance.delete(self.filename) - return super(CacheFile, self).delete(*args, **kwargs) + self.partition.cache.storage.delete(name=self.full_filename) + return super(CachePartitionFile, self).delete(*args, **kwargs) - def save(self, *args, **kwargs): - self.cache.prune() - self.file_size = self.cache.storage_instance.size(self.filename) - return super(CacheFile, self).save(*args, **kwargs) + @cached_property + def full_filename(self): + return CachePartition.get_combined_filename( + parent=self.partition.name, filename=self.filename + ) + + def open(self, mode='rb'): + try: + return self.partition.cache.storage.open( + name=self.full_filename, mode=mode + ) + except Exception as exception: + logger.error( + 'Unexpected exception opening the cache file; %s', exception + ) + raise + + def update_size(self): + self.file_size = self.partition.cache.storage.size( + name=self.full_filename + ) + self.save() class ErrorLogEntry(models.Model): diff --git a/mayan/apps/documents/api_views.py b/mayan/apps/documents/api_views.py index e86e54be10..52ffb2663b 100644 --- a/mayan/apps/documents/api_views.py +++ b/mayan/apps/documents/api_views.py @@ -234,7 +234,8 @@ class APIDocumentPageImageView(generics.RetrieveAPIView): ) cache_filename = task.get(timeout=DOCUMENT_IMAGE_TASK_TIMEOUT) - with storage_documentimagecache.open(cache_filename) as file_object: + #with storage_documentimagecache.open(cache_filename) as file_object: + with self.get_object().cache_partition.get_file(filename=cache_filename).open() as file_object: response = HttpResponse(file_object.read(), content_type='image') if '_hash' in request.GET: patch_cache_control( diff --git a/mayan/apps/documents/handlers.py b/mayan/apps/documents/handlers.py index c59e666a60..d0e45f35d2 100644 --- a/mayan/apps/documents/handlers.py +++ b/mayan/apps/documents/handlers.py @@ -3,7 +3,11 @@ from __future__ import unicode_literals from django.apps import apps from django.utils.translation import ugettext_lazy as _ -from .literals import DEFAULT_DOCUMENT_TYPE_LABEL +from .literals import ( + DEFAULT_DOCUMENT_TYPE_LABEL, DOCUMENT_CACHE_STORAGE_INSTANCE_PATH, + DOCUMENT_IMAGES_CACHE_NAME +) +from .settings import setting_document_cache_maximum_size from .signals import post_initial_document_type from .tasks import task_clean_empty_duplicate_lists, task_scan_duplicates_for @@ -24,9 +28,12 @@ def create_default_document_type(sender, **kwargs): def handler_create_document_cache(sender, **kwargs): Cache = apps.get_model(app_label='common', model_name='Cache') - Cache.objects.get_or_create( - name='document_images', label=_('Document images'), - storage_instance_path='documents.storages.storage_documentimagecache' + Cache.objects.update_or_create( + defaults={ + 'label': _('Document images'), + 'storage_instance_path': DOCUMENT_CACHE_STORAGE_INSTANCE_PATH, + 'maximum_size': setting_document_cache_maximum_size.value, + }, name=DOCUMENT_IMAGES_CACHE_NAME, ) diff --git a/mayan/apps/documents/literals.py b/mayan/apps/documents/literals.py index 8a6da28df8..f4aa65c2bb 100644 --- a/mayan/apps/documents/literals.py +++ b/mayan/apps/documents/literals.py @@ -9,7 +9,7 @@ CHECK_TRASH_PERIOD_INTERVAL = 60 DELETE_STALE_STUBS_INTERVAL = 60 * 10 # 10 minutes DEFAULT_DELETE_PERIOD = 30 DEFAULT_DELETE_TIME_UNIT = TIME_DELTA_UNIT_DAYS -DEFAULT_DOCUMENTS_CACHE_MAXIMUM_SIZE = 50 * 2 ** 20 # 50 Megabytes +DEFAULT_DOCUMENTS_CACHE_MAXIMUM_SIZE = 100 * 2 ** 20 # 100 Megabytes DEFAULT_LANGUAGE = 'eng' DEFAULT_LANGUAGE_CODES = ( 'ilo', 'run', 'uig', 'hin', 'pan', 'pnb', 'wuu', 'msa', 'kxd', 'ind', @@ -30,6 +30,8 @@ DEFAULT_LANGUAGE_CODES = ( DEFAULT_ZIP_FILENAME = 'document_bundle.zip' DEFAULT_DOCUMENT_TYPE_LABEL = _('Default') DOCUMENT_IMAGE_TASK_TIMEOUT = 120 +DOCUMENT_IMAGES_CACHE_NAME = 'document_images' +DOCUMENT_CACHE_STORAGE_INSTANCE_PATH = 'documents.storages.storage_documentimagecache' STUB_EXPIRATION_INTERVAL = 60 * 60 * 24 # 24 hours UPDATE_PAGE_COUNT_RETRY_DELAY = 10 UPLOAD_NEW_VERSION_RETRY_DELAY = 10 diff --git a/mayan/apps/documents/models.py b/mayan/apps/documents/models.py index 7a9a849d41..796c929c49 100644 --- a/mayan/apps/documents/models.py +++ b/mayan/apps/documents/models.py @@ -7,6 +7,7 @@ import uuid from furl import furl +from django.apps import apps from django.conf import settings from django.core.files import File from django.core.files.base import ContentFile @@ -14,6 +15,7 @@ from django.db import models, transaction from django.template import Template, Context from django.urls import reverse from django.utils.encoding import force_text, python_2_unicode_compatible +from django.utils.functional import cached_property from django.utils.timezone import now from django.utils.translation import ugettext, ugettext_lazy as _ @@ -26,6 +28,7 @@ from converter import ( from converter.exceptions import InvalidOfficeFormat, PageCountError from converter.literals import DEFAULT_ZOOM_LEVEL, DEFAULT_ROTATION from converter.models import Transformation +from lock_manager import LockError from mimetype.api import get_mimetype from .events import ( @@ -34,7 +37,9 @@ from .events import ( event_document_type_created, event_document_type_edited, event_document_version_revert ) -from .literals import DEFAULT_DELETE_PERIOD, DEFAULT_DELETE_TIME_UNIT +from .literals import ( + DEFAULT_DELETE_PERIOD, DEFAULT_DELETE_TIME_UNIT, DOCUMENT_IMAGES_CACHE_NAME +) from .managers import ( DocumentManager, DocumentPageCachedImage, DocumentPageManager, DocumentVersionManager, DocumentTypeManager, DuplicatedDocumentManager, @@ -50,7 +55,7 @@ from .settings import ( from .signals import ( post_document_created, post_document_type_change, post_version_upload ) -from .storages import storage_documentversion, storage_documentimagecache +from .storages import storage_documentversion logger = logging.getLogger(__name__) @@ -472,9 +477,17 @@ class DocumentVersion(models.Model): def __str__(self): return self.get_rendered_string() - @property - def cache_filename(self): - return 'document-version-{}'.format(self.uuid) + @cached_property + def cache(self): + Cache = apps.get_model(app_label='common', model_name='Cache') + return Cache.objects.get(name=DOCUMENT_IMAGES_CACHE_NAME) + + @cached_property + def cache_partition(self): + partition, created = self.cache.partitions.get_or_create( + name='version-{}'.format(self.uuid) + ) + return partition def delete(self, *args, **kwargs): for page in self.pages.all(): @@ -511,34 +524,34 @@ class DocumentVersion(models.Model): return first_page.get_api_image_url(*args, **kwargs) def get_intermidiate_file(self): - cache_filename = self.cache_filename - logger.debug('Intermidiate filename: %s', cache_filename) + import time - if storage_documentimagecache.exists(cache_filename): - logger.debug('Intermidiate file "%s" found.', cache_filename) - - return storage_documentimagecache.open(cache_filename) + cache_file = self.cache_partition.get_file(filename='intermediate_file') + if cache_file: + logger.debug('Intermidiate file found.') + return cache_file.open() else: - logger.debug('Intermidiate file "%s" not found.', cache_filename) + logger.debug('Intermidiate file not found.') try: converter = converter_class(file_object=self.open()) pdf_file_object = converter.to_pdf() - with storage_documentimagecache.open(cache_filename, mode='wb+') as file_object: - for chunk in pdf_file_object: - file_object.write(chunk) + try: + with self.cache_partition.create_file(filename='intermediate_file') as file_object: + for chunk in pdf_file_object: + file_object.write(chunk) + except LockError: + time.sleep(0.1) + return self.get_intermidiate_file() - return storage_documentimagecache.open(cache_filename) + return self.cache_partition.get_file(filename='intermediate_file').open() except InvalidOfficeFormat: return self.open() except Exception as exception: - # Cleanup in case of error logger.error( - 'Error creating intermediate file "%s"; %s.', - cache_filename, exception + 'Error creating intermediate file; %s.', exception ) - storage_documentimagecache.delete(cache_filename) raise def get_rendered_string(self, preserve_extension=False): @@ -562,7 +575,7 @@ class DocumentVersion(models.Model): natural_key.dependencies = ['documents.Document'] def invalidate_cache(self): - storage_documentimagecache.delete(self.cache_filename) + self.cache_partition.purge() for page in self.pages.all(): page.invalidate_cache() @@ -743,37 +756,12 @@ class DocumentVersion(models.Model): return detected_pages - @property + @cached_property def uuid(self): # Make cache UUID a mix of document UUID, version ID return '{}-{}'.format(self.document.uuid, self.pk) -@python_2_unicode_compatible -class DocumentTypeFilename(models.Model): - """ - List of labels available to a specific document type for the - quick rename functionality - """ - document_type = models.ForeignKey( - on_delete=models.CASCADE, related_name='filenames', to=DocumentType, - verbose_name=_('Document type') - ) - filename = models.CharField( - db_index=True, max_length=128, verbose_name=_('Label') - ) - enabled = models.BooleanField(default=True, verbose_name=_('Enabled')) - - class Meta: - ordering = ('filename',) - unique_together = ('document_type', 'filename') - verbose_name = _('Quick label') - verbose_name_plural = _('Quick labels') - - def __str__(self): - return self.filename - - @python_2_unicode_compatible class DocumentPage(models.Model): """ @@ -804,9 +792,12 @@ class DocumentPage(models.Model): 'total_pages': self.document_version.pages.count() } - @property - def cache_filename(self): - return 'page-cache-{}'.format(self.uuid) + @cached_property + def cache_partition(self): + partition, created = self.document_version.cache.partitions.get_or_create( + name=self.uuid + ) + return partition def delete(self, *args, **kwargs): self.invalidate_cache() @@ -822,35 +813,30 @@ class DocumentPage(models.Model): page_number=self.page_number ) - @property + @cached_property def document(self): return self.document_version.document def generate_image(self, *args, **kwargs): transformation_list = self.get_combined_transformation_list(*args, **kwargs) - - cache_filename = '{}-{}'.format( - self.cache_filename, BaseTransformation.combine(transformation_list) - ) + combined_cache_filename = BaseTransformation.combine(transformation_list) # Check is transformed image is available - logger.debug('transformations cache filename: %s', cache_filename) + logger.debug('transformations cache filename: %s', combined_cache_filename) - if not setting_disable_transformed_image_cache.value and storage_documentimagecache.exists(cache_filename): + if not setting_disable_transformed_image_cache.value and self.cache_partition.get_file(filename=combined_cache_filename): logger.debug( - 'transformations cache file "%s" found', cache_filename + 'transformations cache file "%s" found', combined_cache_filename ) else: logger.debug( - 'transformations cache file "%s" not found', cache_filename + 'transformations cache file "%s" not found', combined_cache_filename ) image = self.get_image(transformations=transformation_list) - with storage_documentimagecache.open(cache_filename, 'wb+') as file_object: + with self.cache_partition.create_file(filename=combined_cache_filename) as file_object: file_object.write(image.getvalue()) - self.cached_images.create(filename=cache_filename) - - return cache_filename + return combined_cache_filename def get_absolute_url(self): return reverse('documents:document_page_view', args=(self.pk,)) @@ -913,7 +899,6 @@ class DocumentPage(models.Model): zoom_level = setting_zoom_max_level.value # Generate transformation hash - transformation_list = [] # Stored transformations first @@ -940,42 +925,29 @@ class DocumentPage(models.Model): return transformation_list def get_image(self, transformations=None): - cache_filename = self.cache_filename + cache_filename = 'base_image' logger.debug('Page cache filename: %s', cache_filename) - if not setting_disable_base_image_cache.value and storage_documentimagecache.exists(cache_filename): + cache_file = self.cache_partition.get_file(filename=cache_filename) + if not setting_disable_base_image_cache.value and cache_file: logger.debug('Page cache file "%s" found', cache_filename) converter = converter_class( - file_object=storage_documentimagecache.open(cache_filename) + file_object=cache_file.open() ) converter.seek(0) else: logger.debug('Page cache file "%s" not found', cache_filename) - try: - converter = converter_class( - file_object=self.document_version.get_intermidiate_file() - ) - converter.seek(page_number=self.page_number - 1) + converter = converter_class( + file_object=self.document_version.get_intermidiate_file() + ) + converter.seek(page_number=self.page_number - 1) - page_image = converter.get_page() + page_image = converter.get_page() - # Since open "wb+" doesn't create files, check if the file - # exists, if not then create it - if not storage_documentimagecache.exists(cache_filename): - storage_documentimagecache.save(name=cache_filename, content=ContentFile(content='')) - - with storage_documentimagecache.open(cache_filename, 'wb+') as file_object: - file_object.write(page_image.getvalue()) - except Exception as exception: - # Cleanup in case of error - logger.error( - 'Error creating page cache file "%s"; %s', - cache_filename, exception - ) - storage_documentimagecache.delete(cache_filename) - raise + with self.cache_partition.create_file(filename=cache_filename) as file_object: + file_object.write(page_image.getvalue()) for transformation in transformations: converter.transform(transformation=transformation) @@ -983,9 +955,7 @@ class DocumentPage(models.Model): return converter.get_page() def invalidate_cache(self): - storage_documentimagecache.delete(self.cache_filename) - for cached_image in self.cached_images.all(): - cached_image.delete() + self.cache_partition.purge() @property def is_in_trash(self): @@ -1001,10 +971,10 @@ class DocumentPage(models.Model): document_version=self.document_version ) - @property + @cached_property def uuid(self): """ - Make cache UUID a mix of version ID and page ID to avoid using stale + Make cache UUID a mix of version UUID and page ID to avoid using stale images """ return '{}-{}'.format(self.document_version.uuid, self.pk) @@ -1029,17 +999,17 @@ class DocumentPageCachedImage(models.Model): verbose_name = _('Document page cached image') verbose_name_plural = _('Document page cached images') - def delete(self, *args, **kwargs): - storage_documentimagecache.delete(self.filename) - return super(DocumentPageCachedImage, self).delete(*args, **kwargs) + #def delete(self, *args, **kwargs): + # storage_documentimagecache.delete(self.filename) + # return super(DocumentPageCachedImage, self).delete(*args, **kwargs) def natural_key(self): return (self.filename, self.document_page.natural_key()) natural_key.dependencies = ['documents.DocumentPage'] - def save(self, *args, **kwargs): - self.file_size = storage_documentimagecache.size(self.filename) - return super(DocumentPageCachedImage, self).save(*args, **kwargs) + #def save(self, *args, **kwargs): + # self.file_size = storage_documentimagecache.size(self.filename) + # return super(DocumentPageCachedImage, self).save(*args, **kwargs) class DocumentPageResult(DocumentPage): @@ -1050,6 +1020,31 @@ class DocumentPageResult(DocumentPage): verbose_name_plural = _('Document pages') +@python_2_unicode_compatible +class DocumentTypeFilename(models.Model): + """ + List of labels available to a specific document type for the + quick rename functionality + """ + document_type = models.ForeignKey( + on_delete=models.CASCADE, related_name='filenames', to=DocumentType, + verbose_name=_('Document type') + ) + filename = models.CharField( + db_index=True, max_length=128, verbose_name=_('Label') + ) + enabled = models.BooleanField(default=True, verbose_name=_('Enabled')) + + class Meta: + ordering = ('filename',) + unique_together = ('document_type', 'filename') + verbose_name = _('Quick label') + verbose_name_plural = _('Quick labels') + + def __str__(self): + return self.filename + + @python_2_unicode_compatible class DuplicatedDocument(models.Model): document = models.ForeignKey(