Reorganize and sort models and managers according to Mayan's best practices.
Signed-off-by: Michael Price <loneviking72@gmail.com>
This commit is contained in:
committed by
Roberto Rosario
parent
a172538dfc
commit
28aa0b913c
@@ -13,6 +13,14 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class TransformationManager(models.Manager):
|
||||
def add_for_model(self, obj, transformation, arguments=None):
|
||||
content_type = ContentType.objects.get_for_model(obj)
|
||||
|
||||
self.create(
|
||||
content_type=content_type, object_id=obj.pk,
|
||||
name=transformation.name, arguments=arguments
|
||||
)
|
||||
|
||||
def copy(self, source, targets):
|
||||
"""
|
||||
Copy transformation from source to all targets
|
||||
@@ -102,11 +110,3 @@ class TransformationManager(models.Manager):
|
||||
return result
|
||||
else:
|
||||
return transformations
|
||||
|
||||
def add_for_model(self, obj, transformation, arguments=None):
|
||||
content_type = ContentType.objects.get_for_model(obj)
|
||||
|
||||
self.create(
|
||||
content_type=content_type, object_id=obj.pk,
|
||||
name=transformation.name, arguments=arguments
|
||||
)
|
||||
|
||||
@@ -52,6 +52,12 @@ class Transformation(models.Model):
|
||||
|
||||
objects = TransformationManager()
|
||||
|
||||
class Meta:
|
||||
ordering = ('order',)
|
||||
unique_together = ('content_type', 'object_id', 'order')
|
||||
verbose_name = _('Transformation')
|
||||
verbose_name_plural = _('Transformations')
|
||||
|
||||
def __str__(self):
|
||||
return self.get_name_display()
|
||||
|
||||
@@ -63,9 +69,3 @@ class Transformation(models.Model):
|
||||
if last_order is not None:
|
||||
self.order = last_order + 1
|
||||
super(Transformation, self).save(*args, **kwargs)
|
||||
|
||||
class Meta:
|
||||
ordering = ('order',)
|
||||
unique_together = ('content_type', 'object_id', 'order')
|
||||
verbose_name = _('Transformation')
|
||||
verbose_name_plural = _('Transformations')
|
||||
|
||||
@@ -63,6 +63,12 @@ class KeyManager(models.Manager):
|
||||
|
||||
return io.BytesIO(decrypt_result.data)
|
||||
|
||||
def private_keys(self):
|
||||
return self.filter(key_type=KEY_TYPE_SECRET)
|
||||
|
||||
def public_keys(self):
|
||||
return self.filter(key_type=KEY_TYPE_PUBLIC)
|
||||
|
||||
def receive_key(self, key_id):
|
||||
key_data = gpg_backend.recv_keys(
|
||||
keyserver=setting_keyserver.value, key_id=key_id
|
||||
@@ -84,12 +90,6 @@ class KeyManager(models.Manager):
|
||||
|
||||
return result
|
||||
|
||||
def public_keys(self):
|
||||
return self.filter(key_type=KEY_TYPE_PUBLIC)
|
||||
|
||||
def private_keys(self):
|
||||
return self.filter(key_type=KEY_TYPE_SECRET)
|
||||
|
||||
def verify_file(self, file_object, signature_file=None, all_keys=False, key_fingerprint=None, key_id=None):
|
||||
keys = self._preload_keys(
|
||||
all_keys=all_keys, key_fingerprint=key_fingerprint, key_id=key_id
|
||||
|
||||
@@ -61,6 +61,9 @@ class Key(models.Model):
|
||||
verbose_name = _('Key')
|
||||
verbose_name_plural = _('Keys')
|
||||
|
||||
def __str__(self):
|
||||
return '{} - {}'.format(self.key_id, self.user_id)
|
||||
|
||||
def clean(self):
|
||||
import_results = gpg_backend.import_key(key_data=self.key_data)
|
||||
|
||||
@@ -93,9 +96,6 @@ class Key(models.Model):
|
||||
|
||||
super(Key, self).save(*args, **kwargs)
|
||||
|
||||
def __str__(self):
|
||||
return '{} - {}'.format(self.key_id, self.user_id)
|
||||
|
||||
def sign_file(self, file_object, passphrase=None, clearsign=False, detached=False, binary=False, output=None):
|
||||
# WARNING: using clearsign=True and subsequent decryption corrupts the
|
||||
# file. Appears to be a problem in python-gnupg or gpg itself.
|
||||
|
||||
@@ -33,9 +33,25 @@ class Comment(models.Model):
|
||||
verbose_name=_('Date time submitted')
|
||||
)
|
||||
|
||||
class Meta:
|
||||
get_latest_by = 'submit_date'
|
||||
ordering = ('-submit_date',)
|
||||
verbose_name = _('Comment')
|
||||
verbose_name_plural = _('Comments')
|
||||
|
||||
def __str__(self):
|
||||
return self.comment
|
||||
|
||||
def delete(self, *args, **kwargs):
|
||||
user = kwargs.pop('_user', None)
|
||||
super(Comment, self).delete(*args, **kwargs)
|
||||
if user:
|
||||
event_document_comment_delete.commit(
|
||||
actor=user, target=self.document
|
||||
)
|
||||
else:
|
||||
event_document_comment_delete.commit(target=self.document)
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
user = kwargs.pop('_user', None) or self.user
|
||||
is_new = not self.pk
|
||||
@@ -55,19 +71,3 @@ class Comment(models.Model):
|
||||
'Comment "%s" added to document "%s"', self.comment,
|
||||
self.document
|
||||
)
|
||||
|
||||
def delete(self, *args, **kwargs):
|
||||
user = kwargs.pop('_user', None)
|
||||
super(Comment, self).delete(*args, **kwargs)
|
||||
if user:
|
||||
event_document_comment_delete.commit(
|
||||
actor=user, target=self.document
|
||||
)
|
||||
else:
|
||||
event_document_comment_delete.commit(target=self.document)
|
||||
|
||||
class Meta:
|
||||
get_latest_by = 'submit_date'
|
||||
ordering = ('-submit_date',)
|
||||
verbose_name = _('Comment')
|
||||
verbose_name_plural = _('Comments')
|
||||
|
||||
@@ -9,13 +9,13 @@ class DocumentIndexInstanceNodeManager(models.Manager):
|
||||
|
||||
|
||||
class IndexManager(models.Manager):
|
||||
def get_by_natural_key(self, name):
|
||||
return self.get(name=name)
|
||||
|
||||
def index_document(self, document):
|
||||
for index in self.filter(enabled=True, document_types=document.document_type):
|
||||
index.index_document(document=document)
|
||||
|
||||
def get_by_natural_key(self, name):
|
||||
return self.get(name=name)
|
||||
|
||||
def rebuild(self):
|
||||
for index in self.all():
|
||||
index.rebuild()
|
||||
|
||||
@@ -68,21 +68,6 @@ class Index(models.Model):
|
||||
except IndexInstanceNode.DoesNotExist:
|
||||
return '#'
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
"""
|
||||
Automatically create the root index template node
|
||||
"""
|
||||
super(Index, self).save(*args, **kwargs)
|
||||
IndexTemplateNode.objects.get_or_create(parent=None, index=self)
|
||||
|
||||
@property
|
||||
def instance_root(self):
|
||||
return self.template_root.index_instance_nodes.get()
|
||||
|
||||
@property
|
||||
def template_root(self):
|
||||
return self.node_templates.get(parent=None)
|
||||
|
||||
def get_document_types_names(self):
|
||||
return ', '.join(
|
||||
[
|
||||
@@ -94,6 +79,10 @@ class Index(models.Model):
|
||||
logger.debug('Index; Indexing document: %s', document)
|
||||
self.template_root.index_document(document=document)
|
||||
|
||||
@property
|
||||
def instance_root(self):
|
||||
return self.template_root.index_instance_nodes.get()
|
||||
|
||||
def rebuild(self):
|
||||
"""
|
||||
Delete and reconstruct the index by deleting of all its instance nodes
|
||||
@@ -118,8 +107,24 @@ class Index(models.Model):
|
||||
# associated with this index.
|
||||
self.index_document(document=document)
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
"""
|
||||
Automatically create the root index template node
|
||||
"""
|
||||
super(Index, self).save(*args, **kwargs)
|
||||
IndexTemplateNode.objects.get_or_create(parent=None, index=self)
|
||||
|
||||
@property
|
||||
def template_root(self):
|
||||
return self.node_templates.get(parent=None)
|
||||
|
||||
|
||||
class IndexInstance(Index):
|
||||
class Meta:
|
||||
proxy = True
|
||||
verbose_name = _('Index instance')
|
||||
verbose_name_plural = _('Index instances')
|
||||
|
||||
def get_instance_node_count(self):
|
||||
try:
|
||||
return self.instance_root.get_descendant_count()
|
||||
@@ -132,11 +137,6 @@ class IndexInstance(Index):
|
||||
except IndexInstanceNode.DoesNotExist:
|
||||
return 0
|
||||
|
||||
class Meta:
|
||||
proxy = True
|
||||
verbose_name = _('Index instance')
|
||||
verbose_name_plural = _('Index instances')
|
||||
|
||||
|
||||
@python_2_unicode_compatible
|
||||
class IndexTemplateNode(MPTTModel):
|
||||
@@ -300,50 +300,11 @@ class IndexInstanceNode(MPTTModel):
|
||||
def __str__(self):
|
||||
return self.value
|
||||
|
||||
def get_absolute_url(self):
|
||||
return reverse('indexing:index_instance_node_view', args=(self.pk,))
|
||||
|
||||
@property
|
||||
def children(self):
|
||||
# Convenience method for serializer
|
||||
return self.get_children()
|
||||
|
||||
def get_children_count(self):
|
||||
return self.get_children().count()
|
||||
|
||||
def get_descendants_count(self):
|
||||
return self.get_descendants().count()
|
||||
|
||||
def get_descendants_document_count(self, user):
|
||||
return AccessControlList.objects.filter_by_access(
|
||||
permission=permission_document_view, user=user,
|
||||
queryset=Document.objects.filter(
|
||||
index_instance_nodes__in=self.get_descendants(
|
||||
include_self=True
|
||||
)
|
||||
)
|
||||
).count()
|
||||
|
||||
def get_item_count(self, user):
|
||||
if self.index_template_node.link_documents:
|
||||
queryset = AccessControlList.objects.filter_by_access(
|
||||
permission_document_view, user, queryset=self.documents
|
||||
)
|
||||
|
||||
return queryset.count()
|
||||
else:
|
||||
return self.get_children().count()
|
||||
|
||||
def get_full_path(self):
|
||||
result = []
|
||||
for node in self.get_ancestors(include_self=True):
|
||||
if node.is_root_node():
|
||||
result.append(force_text(self.index()))
|
||||
else:
|
||||
result.append(force_text(node))
|
||||
|
||||
return ' / '.join(result)
|
||||
|
||||
def delete_empty(self, acquire_lock=True):
|
||||
"""
|
||||
The argument `acquire_lock` controls whether or not this method
|
||||
@@ -369,6 +330,45 @@ class IndexInstanceNode(MPTTModel):
|
||||
if acquire_lock:
|
||||
lock.release()
|
||||
|
||||
def get_absolute_url(self):
|
||||
return reverse('indexing:index_instance_node_view', args=(self.pk,))
|
||||
|
||||
def get_children_count(self):
|
||||
return self.get_children().count()
|
||||
|
||||
def get_descendants_count(self):
|
||||
return self.get_descendants().count()
|
||||
|
||||
def get_descendants_document_count(self, user):
|
||||
return AccessControlList.objects.filter_by_access(
|
||||
permission=permission_document_view, user=user,
|
||||
queryset=Document.objects.filter(
|
||||
index_instance_nodes__in=self.get_descendants(
|
||||
include_self=True
|
||||
)
|
||||
)
|
||||
).count()
|
||||
|
||||
def get_full_path(self):
|
||||
result = []
|
||||
for node in self.get_ancestors(include_self=True):
|
||||
if node.is_root_node():
|
||||
result.append(force_text(self.index()))
|
||||
else:
|
||||
result.append(force_text(node))
|
||||
|
||||
return ' / '.join(result)
|
||||
|
||||
def get_item_count(self, user):
|
||||
if self.index_template_node.link_documents:
|
||||
queryset = AccessControlList.objects.filter_by_access(
|
||||
permission_document_view, user, queryset=self.documents
|
||||
)
|
||||
|
||||
return queryset.count()
|
||||
else:
|
||||
return self.get_children().count()
|
||||
|
||||
def index(self):
|
||||
return IndexInstance.objects.get(pk=self.index_template_node.index.pk)
|
||||
|
||||
|
||||
@@ -19,13 +19,13 @@ class DocumentPageContent(models.Model):
|
||||
|
||||
objects = DocumentPageContentManager()
|
||||
|
||||
def __str__(self):
|
||||
return force_text(self.document_page)
|
||||
|
||||
class Meta:
|
||||
verbose_name = _('Document page content')
|
||||
verbose_name_plural = _('Document pages contents')
|
||||
|
||||
def __str__(self):
|
||||
return force_text(self.document_page)
|
||||
|
||||
|
||||
@python_2_unicode_compatible
|
||||
class DocumentVersionParseError(models.Model):
|
||||
@@ -38,10 +38,10 @@ class DocumentVersionParseError(models.Model):
|
||||
)
|
||||
result = models.TextField(blank=True, null=True, verbose_name=_('Result'))
|
||||
|
||||
def __str__(self):
|
||||
return force_text(self.document_version)
|
||||
|
||||
class Meta:
|
||||
ordering = ('datetime_submitted',)
|
||||
verbose_name = _('Document version parse error')
|
||||
verbose_name_plural = _('Document version parse errors')
|
||||
|
||||
def __str__(self):
|
||||
return force_text(self.document_version)
|
||||
|
||||
@@ -28,11 +28,6 @@ class EmbeddedSignatureManager(models.Manager):
|
||||
else:
|
||||
return file_object
|
||||
|
||||
def unsigned_document_versions(self):
|
||||
return DocumentVersion.objects.exclude(
|
||||
pk__in=self.values('document_version')
|
||||
)
|
||||
|
||||
def sign_document_version(self, document_version, key, passphrase=None, user=None):
|
||||
temporary_file_object, temporary_filename = mkstemp()
|
||||
|
||||
@@ -53,3 +48,8 @@ class EmbeddedSignatureManager(models.Manager):
|
||||
os.unlink(temporary_filename)
|
||||
|
||||
return new_version
|
||||
|
||||
def unsigned_document_versions(self):
|
||||
return DocumentVersion.objects.exclude(
|
||||
pk__in=self.values('document_version')
|
||||
)
|
||||
|
||||
@@ -54,6 +54,11 @@ class Workflow(models.Model):
|
||||
|
||||
objects = WorkflowManager()
|
||||
|
||||
class Meta:
|
||||
ordering = ('label',)
|
||||
verbose_name = _('Workflow')
|
||||
verbose_name_plural = _('Workflows')
|
||||
|
||||
def __str__(self):
|
||||
return self.label
|
||||
|
||||
@@ -123,11 +128,6 @@ class Workflow(models.Model):
|
||||
|
||||
return diagram.pipe()
|
||||
|
||||
class Meta:
|
||||
ordering = ('label',)
|
||||
verbose_name = _('Workflow')
|
||||
verbose_name_plural = _('Workflows')
|
||||
|
||||
|
||||
@python_2_unicode_compatible
|
||||
class WorkflowState(models.Model):
|
||||
@@ -169,11 +169,6 @@ class WorkflowState(models.Model):
|
||||
def __str__(self):
|
||||
return self.label
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
if self.initial:
|
||||
self.workflow.states.all().update(initial=False)
|
||||
return super(WorkflowState, self).save(*args, **kwargs)
|
||||
|
||||
@property
|
||||
def entry_actions(self):
|
||||
return self.actions.filter(when=WORKFLOW_ACTION_ON_ENTRY)
|
||||
@@ -207,6 +202,11 @@ class WorkflowState(models.Model):
|
||||
)
|
||||
).distinct()
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
if self.initial:
|
||||
self.workflow.states.all().update(initial=False)
|
||||
return super(WorkflowState, self).save(*args, **kwargs)
|
||||
|
||||
|
||||
@python_2_unicode_compatible
|
||||
class WorkflowStateAction(models.Model):
|
||||
@@ -231,15 +231,15 @@ class WorkflowStateAction(models.Model):
|
||||
blank=True, verbose_name=_('Entry action data')
|
||||
)
|
||||
|
||||
def __str__(self):
|
||||
return self.label
|
||||
|
||||
class Meta:
|
||||
ordering = ('label',)
|
||||
unique_together = ('state', 'label')
|
||||
verbose_name = _('Workflow state action')
|
||||
verbose_name_plural = _('Workflow state actions')
|
||||
|
||||
def __str__(self):
|
||||
return self.label
|
||||
|
||||
def dumps(self, data):
|
||||
self.action_data = json.dumps(data)
|
||||
self.save()
|
||||
@@ -260,12 +260,12 @@ class WorkflowStateAction(models.Model):
|
||||
def get_class(self):
|
||||
return import_string(self.action_path)
|
||||
|
||||
def get_class_label(self):
|
||||
return self.get_class().label
|
||||
|
||||
def get_class_instance(self):
|
||||
return self.get_class()(form_data=self.loads())
|
||||
|
||||
def get_class_label(self):
|
||||
return self.get_class().label
|
||||
|
||||
def loads(self):
|
||||
return json.loads(self.action_data)
|
||||
|
||||
@@ -287,9 +287,6 @@ class WorkflowTransition(models.Model):
|
||||
verbose_name=_('Destination state')
|
||||
)
|
||||
|
||||
def __str__(self):
|
||||
return self.label
|
||||
|
||||
class Meta:
|
||||
ordering = ('label',)
|
||||
unique_together = (
|
||||
@@ -298,6 +295,9 @@ class WorkflowTransition(models.Model):
|
||||
verbose_name = _('Workflow transition')
|
||||
verbose_name_plural = _('Workflow transitions')
|
||||
|
||||
def __str__(self):
|
||||
return self.label
|
||||
|
||||
|
||||
@python_2_unicode_compatible
|
||||
class WorkflowTransitionTriggerEvent(models.Model):
|
||||
@@ -329,14 +329,15 @@ class WorkflowInstance(models.Model):
|
||||
verbose_name=_('Document')
|
||||
)
|
||||
|
||||
class Meta:
|
||||
ordering = ('workflow',)
|
||||
unique_together = ('document', 'workflow')
|
||||
verbose_name = _('Workflow instance')
|
||||
verbose_name_plural = _('Workflow instances')
|
||||
|
||||
def __str__(self):
|
||||
return force_text(self.workflow)
|
||||
|
||||
def get_absolute_url(self):
|
||||
return reverse(
|
||||
'document_states:workflow_instance_detail', args=(str(self.pk),)
|
||||
)
|
||||
|
||||
def do_transition(self, transition, user=None, comment=None):
|
||||
try:
|
||||
if transition in self.get_current_state().origin_transitions.all():
|
||||
@@ -347,6 +348,11 @@ class WorkflowInstance(models.Model):
|
||||
# No initial state has been set for this workflow
|
||||
pass
|
||||
|
||||
def get_absolute_url(self):
|
||||
return reverse(
|
||||
'document_states:workflow_instance_detail', args=(str(self.pk),)
|
||||
)
|
||||
|
||||
def get_current_state(self):
|
||||
"""
|
||||
Actual State - The current state of the workflow. If there are
|
||||
@@ -419,12 +425,6 @@ class WorkflowInstance(models.Model):
|
||||
|
||||
return WorkflowTransition.objects.none()
|
||||
|
||||
class Meta:
|
||||
ordering = ('workflow',)
|
||||
unique_together = ('document', 'workflow')
|
||||
verbose_name = _('Workflow instance')
|
||||
verbose_name_plural = _('Workflow instances')
|
||||
|
||||
|
||||
@python_2_unicode_compatible
|
||||
class WorkflowInstanceLogEntry(models.Model):
|
||||
@@ -452,14 +452,14 @@ class WorkflowInstanceLogEntry(models.Model):
|
||||
)
|
||||
comment = models.TextField(blank=True, verbose_name=_('Comment'))
|
||||
|
||||
def __str__(self):
|
||||
return force_text(self.transition)
|
||||
|
||||
class Meta:
|
||||
ordering = ('datetime',)
|
||||
verbose_name = _('Workflow instance log entry')
|
||||
verbose_name_plural = _('Workflow instance log entries')
|
||||
|
||||
def __str__(self):
|
||||
return force_text(self.transition)
|
||||
|
||||
def clean(self):
|
||||
if self.transition not in self.workflow_instance.get_transition_choices(_user=self.user):
|
||||
raise ValidationError(_('Not a valid transition choice.'))
|
||||
|
||||
@@ -91,6 +91,11 @@ class DocumentType(models.Model):
|
||||
|
||||
objects = DocumentTypeManager()
|
||||
|
||||
class Meta:
|
||||
ordering = ('label',)
|
||||
verbose_name = _('Document type')
|
||||
verbose_name_plural = _('Documents types')
|
||||
|
||||
def __str__(self):
|
||||
return self.label
|
||||
|
||||
@@ -100,23 +105,15 @@ class DocumentType(models.Model):
|
||||
|
||||
return super(DocumentType, self).delete(*args, **kwargs)
|
||||
|
||||
@property
|
||||
def deleted_documents(self):
|
||||
return DeletedDocument.objects.filter(document_type=self)
|
||||
|
||||
def get_absolute_url(self):
|
||||
return reverse(
|
||||
'documents:document_type_document_list', args=(self.pk,)
|
||||
)
|
||||
|
||||
def natural_key(self):
|
||||
return (self.label,)
|
||||
|
||||
class Meta:
|
||||
ordering = ('label',)
|
||||
verbose_name = _('Document type')
|
||||
verbose_name_plural = _('Documents types')
|
||||
|
||||
@property
|
||||
def deleted_documents(self):
|
||||
return DeletedDocument.objects.filter(document_type=self)
|
||||
|
||||
def get_document_count(self, user):
|
||||
queryset = AccessControlList.objects.filter_by_access(
|
||||
permission_document_view, user, queryset=self.documents
|
||||
@@ -124,6 +121,9 @@ class DocumentType(models.Model):
|
||||
|
||||
return queryset.count()
|
||||
|
||||
def natural_key(self):
|
||||
return (self.label,)
|
||||
|
||||
def new_document(self, file_object, label=None, description=None, language=None, _user=None):
|
||||
try:
|
||||
with transaction.atomic():
|
||||
@@ -203,6 +203,9 @@ class Document(models.Model):
|
||||
def __str__(self):
|
||||
return self.label or ugettext('Document stub, id: %d') % self.pk
|
||||
|
||||
def add_as_recent_document_for_user(self, user):
|
||||
return RecentDocument.objects.add_document_for_user(user, self)
|
||||
|
||||
def delete(self, *args, **kwargs):
|
||||
to_trash = kwargs.pop('to_trash', True)
|
||||
|
||||
@@ -216,36 +219,6 @@ class Document(models.Model):
|
||||
|
||||
return super(Document, self).delete(*args, **kwargs)
|
||||
|
||||
def get_absolute_url(self):
|
||||
return reverse('documents:document_preview', args=(self.pk,))
|
||||
|
||||
def natural_key(self):
|
||||
return (self.uuid,)
|
||||
natural_key.dependencies = ['documents.DocumentType']
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
user = kwargs.pop('_user', None)
|
||||
_commit_events = kwargs.pop('_commit_events', True)
|
||||
new_document = not self.pk
|
||||
super(Document, self).save(*args, **kwargs)
|
||||
|
||||
if new_document:
|
||||
if user:
|
||||
self.add_as_recent_document_for_user(user)
|
||||
event_document_create.commit(
|
||||
actor=user, target=self, action_object=self.document_type
|
||||
)
|
||||
else:
|
||||
event_document_create.commit(
|
||||
target=self, action_object=self.document_type
|
||||
)
|
||||
else:
|
||||
if _commit_events:
|
||||
event_document_properties_edit.commit(actor=user, target=self)
|
||||
|
||||
def add_as_recent_document_for_user(self, user):
|
||||
return RecentDocument.objects.add_document_for_user(user, self)
|
||||
|
||||
def exists(self):
|
||||
"""
|
||||
Returns a boolean value that indicates if the document's
|
||||
@@ -257,6 +230,9 @@ class Document(models.Model):
|
||||
else:
|
||||
return False
|
||||
|
||||
def get_absolute_url(self):
|
||||
return reverse('documents:document_preview', args=(self.pk,))
|
||||
|
||||
def get_api_image_url(self):
|
||||
latest_version = self.latest_version
|
||||
if latest_version:
|
||||
@@ -268,6 +244,10 @@ class Document(models.Model):
|
||||
for document_version in self.versions.all():
|
||||
document_version.invalidate_cache()
|
||||
|
||||
def natural_key(self):
|
||||
return (self.uuid,)
|
||||
natural_key.dependencies = ['documents.DocumentType']
|
||||
|
||||
def new_version(self, file_object, comment=None, _user=None):
|
||||
logger.info('Creating new document version for document: %s', self)
|
||||
|
||||
@@ -290,6 +270,26 @@ class Document(models.Model):
|
||||
self.in_trash = False
|
||||
self.save()
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
user = kwargs.pop('_user', None)
|
||||
_commit_events = kwargs.pop('_commit_events', True)
|
||||
new_document = not self.pk
|
||||
super(Document, self).save(*args, **kwargs)
|
||||
|
||||
if new_document:
|
||||
if user:
|
||||
self.add_as_recent_document_for_user(user)
|
||||
event_document_create.commit(
|
||||
actor=user, target=self, action_object=self.document_type
|
||||
)
|
||||
else:
|
||||
event_document_create.commit(
|
||||
target=self, action_object=self.document_type
|
||||
)
|
||||
else:
|
||||
if _commit_events:
|
||||
event_document_properties_edit.commit(actor=user, target=self)
|
||||
|
||||
def save_to_file(self, *args, **kwargs):
|
||||
return self.latest_version.save_to_file(*args, **kwargs)
|
||||
|
||||
@@ -417,6 +417,10 @@ class DocumentVersion(models.Model):
|
||||
def __str__(self):
|
||||
return self.get_rendered_string()
|
||||
|
||||
@property
|
||||
def cache_filename(self):
|
||||
return 'document-version-{}'.format(self.uuid)
|
||||
|
||||
def delete(self, *args, **kwargs):
|
||||
for page in self.pages.all():
|
||||
page.delete()
|
||||
@@ -425,6 +429,24 @@ class DocumentVersion(models.Model):
|
||||
|
||||
return super(DocumentVersion, self).delete(*args, **kwargs)
|
||||
|
||||
def exists(self):
|
||||
"""
|
||||
Returns a boolean value that indicates if the document's file
|
||||
exists in storage. Returns True if the document's file is verified to
|
||||
be in the document storage. This is a diagnostic flag to help users
|
||||
detect if the storage has desynchronized (ie: Amazon's S3).
|
||||
"""
|
||||
return self.file.storage.exists(self.file.name)
|
||||
|
||||
def fix_orientation(self):
|
||||
for page in self.pages.all():
|
||||
degrees = page.detect_orientation()
|
||||
if degrees:
|
||||
Transformation.objects.add_for_model(
|
||||
obj=page, transformation=TransformationRotate,
|
||||
arguments='{{"degrees": {}}}'.format(360 - degrees)
|
||||
)
|
||||
|
||||
def get_absolute_url(self):
|
||||
return reverse('documents:document_version_view', args=(self.pk,))
|
||||
|
||||
@@ -507,28 +529,6 @@ class DocumentVersion(models.Model):
|
||||
sender=Document, instance=self.document
|
||||
)
|
||||
|
||||
@property
|
||||
def cache_filename(self):
|
||||
return 'document-version-{}'.format(self.uuid)
|
||||
|
||||
def exists(self):
|
||||
"""
|
||||
Returns a boolean value that indicates if the document's file
|
||||
exists in storage. Returns True if the document's file is verified to
|
||||
be in the document storage. This is a diagnostic flag to help users
|
||||
detect if the storage has desynchronized (ie: Amazon's S3).
|
||||
"""
|
||||
return self.file.storage.exists(self.file.name)
|
||||
|
||||
def fix_orientation(self):
|
||||
for page in self.pages.all():
|
||||
degrees = page.detect_orientation()
|
||||
if degrees:
|
||||
Transformation.objects.add_for_model(
|
||||
obj=page, transformation=TransformationRotate,
|
||||
arguments='{{"degrees": {}}}'.format(360 - degrees)
|
||||
)
|
||||
|
||||
def get_intermidiate_file(self):
|
||||
cache_filename = self.cache_filename
|
||||
logger.debug('Intermidiate filename: %s', cache_filename)
|
||||
@@ -727,6 +727,11 @@ class DocumentPage(models.Model):
|
||||
verbose_name=_('Page number')
|
||||
)
|
||||
|
||||
class Meta:
|
||||
ordering = ('page_number',)
|
||||
verbose_name = _('Document page')
|
||||
verbose_name_plural = _('Document pages')
|
||||
|
||||
def __str__(self):
|
||||
return _(
|
||||
'Page %(page_num)d out of %(total_pages)d of %(document)s'
|
||||
@@ -736,25 +741,13 @@ class DocumentPage(models.Model):
|
||||
'total_pages': self.document_version.pages.count()
|
||||
}
|
||||
|
||||
def delete(self, *args, **kwargs):
|
||||
self.invalidate_cache()
|
||||
super(DocumentPage, self).delete(*args, **kwargs)
|
||||
|
||||
def get_absolute_url(self):
|
||||
return reverse('documents:document_page_view', args=(self.pk,))
|
||||
|
||||
class Meta:
|
||||
ordering = ('page_number',)
|
||||
verbose_name = _('Document page')
|
||||
verbose_name_plural = _('Document pages')
|
||||
|
||||
@property
|
||||
def cache_filename(self):
|
||||
return 'page-cache-{}'.format(self.uuid)
|
||||
|
||||
@property
|
||||
def document(self):
|
||||
return self.document_version.document
|
||||
def delete(self, *args, **kwargs):
|
||||
self.invalidate_cache()
|
||||
super(DocumentPage, self).delete(*args, **kwargs)
|
||||
|
||||
def detect_orientation(self):
|
||||
with self.document_version.open() as file_object:
|
||||
@@ -766,6 +759,10 @@ class DocumentPage(models.Model):
|
||||
page_number=self.page_number
|
||||
)
|
||||
|
||||
@property
|
||||
def document(self):
|
||||
return self.document_version.document
|
||||
|
||||
def generate_image(self, *args, **kwargs):
|
||||
# Convert arguments into transformations
|
||||
transformations = kwargs.get('transformations', [])
|
||||
@@ -831,6 +828,16 @@ class DocumentPage(models.Model):
|
||||
|
||||
return cache_filename
|
||||
|
||||
def get_absolute_url(self):
|
||||
return reverse('documents:document_page_view', args=(self.pk,))
|
||||
|
||||
def get_api_image_url(self):
|
||||
return reverse(
|
||||
'rest_api:documentpage-image', args=(
|
||||
self.document.pk, self.document_version.pk, self.pk
|
||||
)
|
||||
)
|
||||
|
||||
def get_image(self, transformations=None):
|
||||
cache_filename = self.cache_filename
|
||||
logger.debug('Page cache filename: %s', cache_filename)
|
||||
@@ -888,13 +895,6 @@ class DocumentPage(models.Model):
|
||||
"""
|
||||
return '{}-{}'.format(self.document_version.uuid, self.pk)
|
||||
|
||||
def get_api_image_url(self):
|
||||
return reverse(
|
||||
'rest_api:documentpage-image', args=(
|
||||
self.document.pk, self.document_version.pk, self.pk
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
class DocumentPageCachedImage(models.Model):
|
||||
document_page = models.ForeignKey(
|
||||
@@ -940,6 +940,11 @@ class RecentDocument(models.Model):
|
||||
|
||||
objects = RecentDocumentManager()
|
||||
|
||||
class Meta:
|
||||
ordering = ('-datetime_accessed',)
|
||||
verbose_name = _('Recent document')
|
||||
verbose_name_plural = _('Recent documents')
|
||||
|
||||
def __str__(self):
|
||||
return force_text(self.document)
|
||||
|
||||
@@ -947,11 +952,6 @@ class RecentDocument(models.Model):
|
||||
return self.document.natural_key() + self.user.natural_key()
|
||||
natural_key.dependencies = ['documents.Document', settings.AUTH_USER_MODEL]
|
||||
|
||||
class Meta:
|
||||
ordering = ('-datetime_accessed',)
|
||||
verbose_name = _('Recent document')
|
||||
verbose_name_plural = _('Recent documents')
|
||||
|
||||
|
||||
@python_2_unicode_compatible
|
||||
class DuplicatedDocument(models.Model):
|
||||
@@ -968,9 +968,9 @@ class DuplicatedDocument(models.Model):
|
||||
|
||||
objects = DuplicatedDocumentManager()
|
||||
|
||||
def __str__(self):
|
||||
return force_text(self.document)
|
||||
|
||||
class Meta:
|
||||
verbose_name = _('Duplicated document')
|
||||
verbose_name_plural = _('Duplicated documents')
|
||||
|
||||
def __str__(self):
|
||||
return force_text(self.document)
|
||||
|
||||
@@ -34,6 +34,11 @@ class SmartLink(models.Model):
|
||||
|
||||
objects = SmartLinkManager()
|
||||
|
||||
class Meta:
|
||||
ordering = ('label',)
|
||||
verbose_name = _('Smart link')
|
||||
verbose_name_plural = _('Smart links')
|
||||
|
||||
def __str__(self):
|
||||
return self.label
|
||||
|
||||
@@ -91,11 +96,6 @@ class SmartLink(models.Model):
|
||||
smart_link=self, queryset=self.get_linked_document_for(document)
|
||||
)
|
||||
|
||||
class Meta:
|
||||
ordering = ('label',)
|
||||
verbose_name = _('Smart link')
|
||||
verbose_name_plural = _('Smart links')
|
||||
|
||||
|
||||
class ResolvedSmartLink(SmartLink):
|
||||
class Meta:
|
||||
@@ -132,13 +132,13 @@ class SmartLinkCondition(models.Model):
|
||||
)
|
||||
enabled = models.BooleanField(default=True, verbose_name=_('Enabled'))
|
||||
|
||||
class Meta:
|
||||
verbose_name = _('Link condition')
|
||||
verbose_name_plural = _('Link conditions')
|
||||
|
||||
def __str__(self):
|
||||
return '%s foreign %s %s %s %s' % (
|
||||
self.get_inclusion_display(),
|
||||
self.foreign_document_data, _('not') if self.negated else '',
|
||||
self.get_operator_display(), self.expression
|
||||
)
|
||||
|
||||
class Meta:
|
||||
verbose_name = _('Link condition')
|
||||
verbose_name_plural = _('Link conditions')
|
||||
|
||||
@@ -22,15 +22,13 @@ class Lock(models.Model):
|
||||
|
||||
objects = LockManager()
|
||||
|
||||
class Meta:
|
||||
verbose_name = _('Lock')
|
||||
verbose_name_plural = _('Locks')
|
||||
|
||||
def __str__(self):
|
||||
return self.name
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
if not self.timeout and not kwargs.get('timeout'):
|
||||
self.timeout = setting_default_lock_timeout.value
|
||||
|
||||
super(Lock, self).save(*args, **kwargs)
|
||||
|
||||
def release(self):
|
||||
try:
|
||||
lock = Lock.objects.get(
|
||||
@@ -42,6 +40,8 @@ class Lock(models.Model):
|
||||
else:
|
||||
lock.delete()
|
||||
|
||||
class Meta:
|
||||
verbose_name = _('Lock')
|
||||
verbose_name_plural = _('Locks')
|
||||
def save(self, *args, **kwargs):
|
||||
if not self.timeout and not kwargs.get('timeout'):
|
||||
self.timeout = setting_default_lock_timeout.value
|
||||
|
||||
super(Lock, self).save(*args, **kwargs)
|
||||
|
||||
@@ -56,17 +56,13 @@ class UserMailer(models.Model):
|
||||
def __str__(self):
|
||||
return self.label
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
if self.default:
|
||||
UserMailer.objects.select_for_update().exclude(pk=self.pk).update(
|
||||
default=False
|
||||
)
|
||||
|
||||
return super(UserMailer, self).save(*args, **kwargs)
|
||||
|
||||
def backend_label(self):
|
||||
return self.get_backend().label
|
||||
|
||||
def dumps(self, data):
|
||||
self.backend_data = json.dumps(data)
|
||||
self.save()
|
||||
|
||||
def get_backend(self):
|
||||
return import_string(self.backend_path)
|
||||
|
||||
@@ -78,9 +74,13 @@ class UserMailer(models.Model):
|
||||
def loads(self):
|
||||
return json.loads(self.backend_data)
|
||||
|
||||
def dumps(self, data):
|
||||
self.backend_data = json.dumps(data)
|
||||
self.save()
|
||||
def save(self, *args, **kwargs):
|
||||
if self.default:
|
||||
UserMailer.objects.select_for_update().exclude(pk=self.pk).update(
|
||||
default=False
|
||||
)
|
||||
|
||||
return super(UserMailer, self).save(*args, **kwargs)
|
||||
|
||||
def send(self, subject='', body='', to=None, document=None, as_attachment=False):
|
||||
recipient_list = split_recipient_list(recipients=[to])
|
||||
|
||||
@@ -17,16 +17,16 @@ class StatisticResult(models.Model):
|
||||
)
|
||||
serialize_data = models.TextField(blank=True, verbose_name=_('Data'))
|
||||
|
||||
class Meta:
|
||||
verbose_name = _('Statistics result')
|
||||
verbose_name_plural = _('Statistics results')
|
||||
|
||||
def __str__(self):
|
||||
return self.slug
|
||||
|
||||
def get_data(self):
|
||||
return json.loads(self.serialize_data)
|
||||
|
||||
def store_data(self, data):
|
||||
self.serialize_data = json.dumps(data)
|
||||
self.save()
|
||||
|
||||
def __str__(self):
|
||||
return self.slug
|
||||
|
||||
class Meta:
|
||||
verbose_name = _('Statistics result')
|
||||
verbose_name_plural = _('Statistics results')
|
||||
|
||||
@@ -80,17 +80,14 @@ class MetadataType(models.Model):
|
||||
|
||||
objects = MetadataTypeManager()
|
||||
|
||||
def __str__(self):
|
||||
return self.label
|
||||
|
||||
def natural_key(self):
|
||||
return (self.name,)
|
||||
|
||||
class Meta:
|
||||
ordering = ('label',)
|
||||
verbose_name = _('Metadata type')
|
||||
verbose_name_plural = _('Metadata types')
|
||||
|
||||
def __str__(self):
|
||||
return self.label
|
||||
|
||||
@staticmethod
|
||||
def comma_splitter(string):
|
||||
splitter = shlex.shlex(string.encode('utf-8'), posix=True)
|
||||
@@ -114,6 +111,9 @@ class MetadataType(models.Model):
|
||||
required=True, metadata_type=self
|
||||
).exists()
|
||||
|
||||
def natural_key(self):
|
||||
return (self.name,)
|
||||
|
||||
def validate_value(self, document_type, value):
|
||||
# Check default
|
||||
if not value and self.default:
|
||||
@@ -171,6 +171,13 @@ class DocumentMetadata(models.Model):
|
||||
def __str__(self):
|
||||
return force_text(self.metadata_type)
|
||||
|
||||
def clean_fields(self, *args, **kwargs):
|
||||
super(DocumentMetadata, self).clean_fields(*args, **kwargs)
|
||||
|
||||
self.value = self.metadata_type.validate_value(
|
||||
document_type=self.document.document_type, value=self.value
|
||||
)
|
||||
|
||||
def delete(self, enforce_required=True, *args, **kwargs):
|
||||
"""
|
||||
enforce_required prevents deletion of required metadata at the
|
||||
@@ -184,6 +191,12 @@ class DocumentMetadata(models.Model):
|
||||
|
||||
return super(DocumentMetadata, self).delete(*args, **kwargs)
|
||||
|
||||
@property
|
||||
def is_required(self):
|
||||
return self.metadata_type.get_required_for(
|
||||
document_type=self.document.document_type
|
||||
)
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
if self.metadata_type.pk not in self.document.document_type.metadata.values_list('metadata_type', flat=True):
|
||||
raise ValidationError(
|
||||
@@ -192,19 +205,6 @@ class DocumentMetadata(models.Model):
|
||||
|
||||
return super(DocumentMetadata, self).save(*args, **kwargs)
|
||||
|
||||
def clean_fields(self, *args, **kwargs):
|
||||
super(DocumentMetadata, self).clean_fields(*args, **kwargs)
|
||||
|
||||
self.value = self.metadata_type.validate_value(
|
||||
document_type=self.document.document_type, value=self.value
|
||||
)
|
||||
|
||||
@property
|
||||
def is_required(self):
|
||||
return self.metadata_type.get_required_for(
|
||||
document_type=self.document.document_type
|
||||
)
|
||||
|
||||
|
||||
@python_2_unicode_compatible
|
||||
class DocumentTypeMetadataType(models.Model):
|
||||
@@ -220,11 +220,11 @@ class DocumentTypeMetadataType(models.Model):
|
||||
|
||||
objects = DocumentTypeMetadataTypeManager()
|
||||
|
||||
def __str__(self):
|
||||
return force_text(self.metadata_type)
|
||||
|
||||
class Meta:
|
||||
ordering = ('metadata_type',)
|
||||
unique_together = ('document_type', 'metadata_type')
|
||||
verbose_name = _('Document type metadata type options')
|
||||
verbose_name_plural = _('Document type metadata types options')
|
||||
|
||||
def __str__(self):
|
||||
return force_text(self.metadata_type)
|
||||
|
||||
@@ -18,6 +18,34 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class DocumentPageOCRContentManager(models.Manager):
|
||||
def process_document_page(self, document_page):
|
||||
logger.info(
|
||||
'Processing page: %d of document version: %s',
|
||||
document_page.page_number, document_page.document_version
|
||||
)
|
||||
|
||||
DocumentPageOCRContent = apps.get_model(
|
||||
app_label='ocr', model_name='DocumentPageOCRContent'
|
||||
)
|
||||
|
||||
# TODO: Call task and wait
|
||||
cache_filename = document_page.generate_image()
|
||||
|
||||
with cache_storage_backend.open(cache_filename) as file_object:
|
||||
document_page_content, created = DocumentPageOCRContent.objects.get_or_create(
|
||||
document_page=document_page
|
||||
)
|
||||
document_page_content.content = ocr_backend.execute(
|
||||
file_object=file_object,
|
||||
language=document_page.document.language
|
||||
)
|
||||
document_page_content.save()
|
||||
|
||||
logger.info(
|
||||
'Finished processing page: %d of document version: %s',
|
||||
document_page.page_number, document_page.document_version
|
||||
)
|
||||
|
||||
def process_document_version(self, document_version):
|
||||
logger.info('Starting OCR for document version: %s', document_version)
|
||||
logger.debug('document version: %d', document_version.pk)
|
||||
@@ -55,31 +83,3 @@ class DocumentPageOCRContentManager(models.Manager):
|
||||
post_document_version_ocr.send(
|
||||
sender=document_version.__class__, instance=document_version
|
||||
)
|
||||
|
||||
def process_document_page(self, document_page):
|
||||
logger.info(
|
||||
'Processing page: %d of document version: %s',
|
||||
document_page.page_number, document_page.document_version
|
||||
)
|
||||
|
||||
DocumentPageOCRContent = apps.get_model(
|
||||
app_label='ocr', model_name='DocumentPageOCRContent'
|
||||
)
|
||||
|
||||
# TODO: Call task and wait
|
||||
cache_filename = document_page.generate_image()
|
||||
|
||||
with cache_storage_backend.open(cache_filename) as file_object:
|
||||
document_page_content, created = DocumentPageOCRContent.objects.get_or_create(
|
||||
document_page=document_page
|
||||
)
|
||||
document_page_content.content = ocr_backend.execute(
|
||||
file_object=file_object,
|
||||
language=document_page.document.language
|
||||
)
|
||||
document_page_content.save()
|
||||
|
||||
logger.info(
|
||||
'Finished processing page: %d of document version: %s',
|
||||
document_page.page_number, document_page.document_version
|
||||
)
|
||||
|
||||
@@ -37,13 +37,13 @@ class DocumentPageOCRContent(models.Model):
|
||||
|
||||
objects = DocumentPageOCRContentManager()
|
||||
|
||||
def __str__(self):
|
||||
return force_text(self.document_page)
|
||||
|
||||
class Meta:
|
||||
verbose_name = _('Document page OCR content')
|
||||
verbose_name_plural = _('Document pages OCR contents')
|
||||
|
||||
def __str__(self):
|
||||
return force_text(self.document_page)
|
||||
|
||||
|
||||
@python_2_unicode_compatible
|
||||
class DocumentVersionOCRError(models.Model):
|
||||
@@ -56,10 +56,10 @@ class DocumentVersionOCRError(models.Model):
|
||||
)
|
||||
result = models.TextField(blank=True, null=True, verbose_name=_('Result'))
|
||||
|
||||
def __str__(self):
|
||||
return force_text(self.document_version)
|
||||
|
||||
class Meta:
|
||||
ordering = ('datetime_submitted',)
|
||||
verbose_name = _('Document version OCR error')
|
||||
verbose_name_plural = _('Document version OCR errors')
|
||||
|
||||
def __str__(self):
|
||||
return force_text(self.document_version)
|
||||
|
||||
@@ -21,6 +21,12 @@ class StoredPermission(models.Model):
|
||||
|
||||
objects = StoredPermissionManager()
|
||||
|
||||
class Meta:
|
||||
ordering = ('namespace',)
|
||||
unique_together = ('namespace', 'name')
|
||||
verbose_name = _('Permission')
|
||||
verbose_name_plural = _('Permissions')
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(StoredPermission, self).__init__(*args, **kwargs)
|
||||
try:
|
||||
@@ -39,12 +45,6 @@ class StoredPermission(models.Model):
|
||||
def natural_key(self):
|
||||
return (self.namespace, self.name)
|
||||
|
||||
class Meta:
|
||||
ordering = ('namespace',)
|
||||
unique_together = ('namespace', 'name')
|
||||
verbose_name = _('Permission')
|
||||
verbose_name_plural = _('Permissions')
|
||||
|
||||
def requester_has_this(self, user):
|
||||
if user.is_superuser or user.is_staff:
|
||||
logger.debug(
|
||||
@@ -83,6 +83,11 @@ class Role(models.Model):
|
||||
|
||||
objects = RoleManager()
|
||||
|
||||
class Meta:
|
||||
ordering = ('label',)
|
||||
verbose_name = _('Role')
|
||||
verbose_name_plural = _('Roles')
|
||||
|
||||
def __str__(self):
|
||||
return self.label
|
||||
|
||||
@@ -92,8 +97,3 @@ class Role(models.Model):
|
||||
def natural_key(self):
|
||||
return (self.label,)
|
||||
natural_key.dependencies = ['auth.Group', 'permissions.StoredPermission']
|
||||
|
||||
class Meta:
|
||||
ordering = ('label',)
|
||||
verbose_name = _('Role')
|
||||
verbose_name_plural = _('Roles')
|
||||
|
||||
@@ -68,16 +68,56 @@ class Source(models.Model):
|
||||
verbose_name = _('Source')
|
||||
verbose_name_plural = _('Sources')
|
||||
|
||||
def __str__(self):
|
||||
return '%s' % self.label
|
||||
|
||||
@classmethod
|
||||
def class_fullname(cls):
|
||||
return force_text(dict(SOURCE_CHOICES).get(cls.source_type))
|
||||
|
||||
def __str__(self):
|
||||
return '%s' % self.label
|
||||
def clean_up_upload_file(self, upload_file_object):
|
||||
pass
|
||||
# TODO: Should raise NotImplementedError?
|
||||
|
||||
def fullname(self):
|
||||
return ' '.join([self.class_fullname(), '"%s"' % self.label])
|
||||
|
||||
def handle_upload(self, file_object, description=None, document_type=None, expand=False, label=None, language=None, metadata_dict_list=None, metadata_dictionary=None, tag_ids=None, user=None):
|
||||
"""
|
||||
Handle an upload request from a file object which may be an individual
|
||||
document or a compressed file containing multiple documents.
|
||||
"""
|
||||
if not document_type:
|
||||
document_type = self.document_type
|
||||
|
||||
kwargs = {
|
||||
'description': description, 'document_type': document_type,
|
||||
'label': label, 'language': language,
|
||||
'metadata_dict_list': metadata_dict_list,
|
||||
'metadata_dictionary': metadata_dictionary, 'tag_ids': tag_ids,
|
||||
'user': user
|
||||
}
|
||||
|
||||
if expand:
|
||||
try:
|
||||
compressed_file = CompressedFile(file_object)
|
||||
for compressed_file_child in compressed_file.children():
|
||||
kwargs.update({'label': force_text(compressed_file_child)})
|
||||
self.upload_document(
|
||||
file_object=File(compressed_file_child), **kwargs
|
||||
)
|
||||
compressed_file_child.close()
|
||||
|
||||
except NotACompressedFile:
|
||||
logging.debug('Exception: NotACompressedFile')
|
||||
self.upload_document(file_object=file_object, **kwargs)
|
||||
else:
|
||||
self.upload_document(file_object=file_object, **kwargs)
|
||||
|
||||
def get_upload_file_object(self, form_data):
|
||||
pass
|
||||
# TODO: Should raise NotImplementedError?
|
||||
|
||||
def upload_document(self, file_object, document_type, description=None, label=None, language=None, metadata_dict_list=None, metadata_dictionary=None, tag_ids=None, user=None):
|
||||
"""
|
||||
Upload an individual document
|
||||
@@ -133,46 +173,6 @@ class Source(models.Model):
|
||||
document.delete(to_trash=False)
|
||||
raise
|
||||
|
||||
def handle_upload(self, file_object, description=None, document_type=None, expand=False, label=None, language=None, metadata_dict_list=None, metadata_dictionary=None, tag_ids=None, user=None):
|
||||
"""
|
||||
Handle an upload request from a file object which may be an individual
|
||||
document or a compressed file containing multiple documents.
|
||||
"""
|
||||
if not document_type:
|
||||
document_type = self.document_type
|
||||
|
||||
kwargs = {
|
||||
'description': description, 'document_type': document_type,
|
||||
'label': label, 'language': language,
|
||||
'metadata_dict_list': metadata_dict_list,
|
||||
'metadata_dictionary': metadata_dictionary, 'tag_ids': tag_ids,
|
||||
'user': user
|
||||
}
|
||||
|
||||
if expand:
|
||||
try:
|
||||
compressed_file = CompressedFile(file_object)
|
||||
for compressed_file_child in compressed_file.children():
|
||||
kwargs.update({'label': force_text(compressed_file_child)})
|
||||
self.upload_document(
|
||||
file_object=File(compressed_file_child), **kwargs
|
||||
)
|
||||
compressed_file_child.close()
|
||||
|
||||
except NotACompressedFile:
|
||||
logging.debug('Exception: NotACompressedFile')
|
||||
self.upload_document(file_object=file_object, **kwargs)
|
||||
else:
|
||||
self.upload_document(file_object=file_object, **kwargs)
|
||||
|
||||
def get_upload_file_object(self, form_data):
|
||||
pass
|
||||
# TODO: Should raise NotImplementedError?
|
||||
|
||||
def clean_up_upload_file(self, upload_file_object):
|
||||
pass
|
||||
# TODO: Should raise NotImplementedError?
|
||||
|
||||
|
||||
class InteractiveSource(Source):
|
||||
objects = InheritanceManager()
|
||||
@@ -336,6 +336,19 @@ class StagingFolderSource(InteractiveSource):
|
||||
verbose_name = _('Staging folder')
|
||||
verbose_name_plural = _('Staging folders')
|
||||
|
||||
def clean_up_upload_file(self, upload_file_object):
|
||||
if self.delete_after_upload:
|
||||
try:
|
||||
upload_file_object.extra_data.delete()
|
||||
except Exception as exception:
|
||||
logger.error(
|
||||
'Error deleting staging file: %s; %s', upload_file_object,
|
||||
exception
|
||||
)
|
||||
raise Exception(
|
||||
_('Error deleting staging file; %s') % exception
|
||||
)
|
||||
|
||||
def get_file(self, *args, **kwargs):
|
||||
return StagingFile(staging_folder=self, *args, **kwargs)
|
||||
|
||||
@@ -360,19 +373,6 @@ class StagingFolderSource(InteractiveSource):
|
||||
source=self, file=staging_file.as_file(), extra_data=staging_file
|
||||
)
|
||||
|
||||
def clean_up_upload_file(self, upload_file_object):
|
||||
if self.delete_after_upload:
|
||||
try:
|
||||
upload_file_object.extra_data.delete()
|
||||
except Exception as exception:
|
||||
logger.error(
|
||||
'Error deleting staging file: %s; %s', upload_file_object,
|
||||
exception
|
||||
)
|
||||
raise Exception(
|
||||
_('Error deleting staging file; %s') % exception
|
||||
)
|
||||
|
||||
|
||||
class WebFormSource(InteractiveSource):
|
||||
"""
|
||||
@@ -387,20 +387,20 @@ class WebFormSource(InteractiveSource):
|
||||
is_interactive = True
|
||||
source_type = SOURCE_CHOICE_WEB_FORM
|
||||
|
||||
objects = models.Manager()
|
||||
|
||||
class Meta:
|
||||
verbose_name = _('Web form')
|
||||
verbose_name_plural = _('Web forms')
|
||||
|
||||
# TODO: unify uncompress as an InteractiveSource field
|
||||
uncompress = models.CharField(
|
||||
choices=SOURCE_INTERACTIVE_UNCOMPRESS_CHOICES,
|
||||
help_text=_('Whether to expand or not compressed archives.'),
|
||||
max_length=1, verbose_name=_('Uncompress')
|
||||
)
|
||||
# Default path
|
||||
|
||||
objects = models.Manager()
|
||||
|
||||
class Meta:
|
||||
verbose_name = _('Web form')
|
||||
verbose_name_plural = _('Web forms')
|
||||
|
||||
# Default path
|
||||
def get_upload_file_object(self, form_data):
|
||||
return SourceUploadedFile(source=self, file=form_data['file'])
|
||||
|
||||
@@ -440,9 +440,6 @@ class IntervalBaseModel(OutOfProcessSource):
|
||||
verbose_name = _('Interval source')
|
||||
verbose_name_plural = _('Interval sources')
|
||||
|
||||
def _get_periodic_task_name(self, pk=None):
|
||||
return 'check_interval_source-%i' % (pk or self.pk)
|
||||
|
||||
def _delete_periodic_task(self, pk=None):
|
||||
try:
|
||||
periodic_task = PeriodicTask.objects.get(
|
||||
@@ -462,6 +459,14 @@ class IntervalBaseModel(OutOfProcessSource):
|
||||
self._get_periodic_task_name(pk)
|
||||
)
|
||||
|
||||
def _get_periodic_task_name(self, pk=None):
|
||||
return 'check_interval_source-%i' % (pk or self.pk)
|
||||
|
||||
def delete(self, *args, **kwargs):
|
||||
pk = self.pk
|
||||
super(IntervalBaseModel, self).delete(*args, **kwargs)
|
||||
self._delete_periodic_task(pk)
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
new_source = not self.pk
|
||||
super(IntervalBaseModel, self).save(*args, **kwargs)
|
||||
@@ -480,11 +485,6 @@ class IntervalBaseModel(OutOfProcessSource):
|
||||
kwargs=json.dumps({'source_id': self.pk})
|
||||
)
|
||||
|
||||
def delete(self, *args, **kwargs):
|
||||
pk = self.pk
|
||||
super(IntervalBaseModel, self).delete(*args, **kwargs)
|
||||
self._delete_periodic_task(pk)
|
||||
|
||||
|
||||
class EmailBaseModel(IntervalBaseModel):
|
||||
"""
|
||||
|
||||
@@ -24,23 +24,23 @@ class Tag(models.Model):
|
||||
Document, related_name='tags', verbose_name=_('Documents')
|
||||
)
|
||||
|
||||
def __str__(self):
|
||||
return self.label
|
||||
|
||||
def get_absolute_url(self):
|
||||
return reverse('tags:tag_tagged_item_list', args=(str(self.pk),))
|
||||
|
||||
class Meta:
|
||||
ordering = ('label',)
|
||||
verbose_name = _('Tag')
|
||||
verbose_name_plural = _('Tags')
|
||||
|
||||
def __str__(self):
|
||||
return self.label
|
||||
|
||||
def attach_to(self, document, user=None):
|
||||
self.documents.add(document)
|
||||
event_tag_attach.commit(
|
||||
action_object=self, actor=user, target=document
|
||||
)
|
||||
|
||||
def get_absolute_url(self):
|
||||
return reverse('tags:tag_tagged_item_list', args=(str(self.pk),))
|
||||
|
||||
def get_document_count(self, user):
|
||||
queryset = AccessControlList.objects.filter_by_access(
|
||||
permission_document_view, user, queryset=self.documents
|
||||
|
||||
Reference in New Issue
Block a user