PEP8 Cleanups.
This commit is contained in:
@@ -111,10 +111,14 @@ def urlquote(link=None, get=None):
|
||||
|
||||
Example:
|
||||
|
||||
urlquote('/wiki/Python_(programming_language)') --> '/wiki/Python_%28programming_language%29'
|
||||
urlquote('/mypath/', {'key': 'value'}) --> '/mypath/?key=value'
|
||||
urlquote('/mypath/', {'key': ['value1', 'value2']}) --> '/mypath/?key=value1&key=value2'
|
||||
urlquote({'key': ['value1', 'value2']}) --> 'key=value1&key=value2'
|
||||
urlquote('/wiki/Python_(programming_language)')
|
||||
--> '/wiki/Python_%28programming_language%29'
|
||||
urlquote('/mypath/', {'key': 'value'})
|
||||
--> '/mypath/?key=value'
|
||||
urlquote('/mypath/', {'key': ['value1', 'value2']})
|
||||
--> '/mypath/?key=value1&key=value2'
|
||||
urlquote({'key': ['value1', 'value2']})
|
||||
--> 'key=value1&key=value2'
|
||||
"""
|
||||
if get is None:
|
||||
get = []
|
||||
|
||||
@@ -15,13 +15,23 @@ class Migration(migrations.Migration):
|
||||
migrations.AlterField(
|
||||
model_name='transformation',
|
||||
name='arguments',
|
||||
field=models.TextField(help_text='Enter the arguments for the transformation as a YAML dictionary. ie: {"degrees": 180}', blank=True, verbose_name='Arguments', validators=[converter.validators.YAMLValidator()]),
|
||||
field=models.TextField(
|
||||
help_text='Enter the arguments for the transformation as a '
|
||||
'YAML dictionary. ie: {"degrees": 180}', blank=True,
|
||||
verbose_name='Arguments',
|
||||
validators=[converter.validators.YAMLValidator()]
|
||||
),
|
||||
preserve_default=True,
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='transformation',
|
||||
name='name',
|
||||
field=models.CharField(max_length=128, verbose_name='Name', choices=[('rotate', 'Rotate: degrees'), ('zoom', 'Zoom: percent'), ('resize', 'Resize: width, height')]),
|
||||
field=models.CharField(
|
||||
max_length=128, verbose_name='Name', choices=[
|
||||
('rotate', 'Rotate: degrees'), ('zoom', 'Zoom: percent'),
|
||||
('resize', 'Resize: width, height')
|
||||
]
|
||||
),
|
||||
preserve_default=True,
|
||||
),
|
||||
]
|
||||
|
||||
@@ -15,7 +15,12 @@ class Migration(migrations.Migration):
|
||||
migrations.AlterField(
|
||||
model_name='transformation',
|
||||
name='arguments',
|
||||
field=models.TextField(help_text='Enter the arguments for the transformation as a YAML dictionary. ie: {"degrees": 180}', blank=True, verbose_name='Arguments', validators=[converter.validators.YAMLValidator()]),
|
||||
field=models.TextField(
|
||||
help_text='Enter the arguments for the transformation as a '
|
||||
'YAML dictionary. ie: {"degrees": 180}', blank=True,
|
||||
verbose_name='Arguments',
|
||||
validators=[converter.validators.YAMLValidator()]
|
||||
),
|
||||
preserve_default=True,
|
||||
),
|
||||
]
|
||||
|
||||
@@ -14,7 +14,11 @@ class Migration(migrations.Migration):
|
||||
migrations.AlterField(
|
||||
model_name='transformation',
|
||||
name='order',
|
||||
field=models.PositiveIntegerField(default=0, help_text='Order in which the transformations will be executed.', db_index=True, verbose_name='Order', blank=True),
|
||||
field=models.PositiveIntegerField(
|
||||
default=0, help_text='Order in which the transformations '
|
||||
'will be executed.', db_index=True, verbose_name='Order',
|
||||
blank=True
|
||||
),
|
||||
preserve_default=True,
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
|
||||
@@ -14,7 +14,12 @@ class Migration(migrations.Migration):
|
||||
migrations.AlterField(
|
||||
model_name='transformation',
|
||||
name='order',
|
||||
field=models.PositiveIntegerField(default=0, help_text='Order in which the transformations will be executed. If left unchanged, an automatic order value will be assigned.', db_index=True, verbose_name='Order', blank=True),
|
||||
field=models.PositiveIntegerField(
|
||||
default=0, help_text='Order in which the transformations '
|
||||
'will be executed. If left unchanged, an automatic order '
|
||||
'value will be assigned.', db_index=True,
|
||||
verbose_name='Order', blank=True
|
||||
),
|
||||
preserve_default=True,
|
||||
),
|
||||
]
|
||||
|
||||
@@ -14,7 +14,14 @@ class Migration(migrations.Migration):
|
||||
migrations.AlterField(
|
||||
model_name='transformation',
|
||||
name='name',
|
||||
field=models.CharField(max_length=128, verbose_name='Name', choices=[('rotate', 'Rotate: degrees'), ('zoom', 'Zoom: percent'), ('resize', 'Resize: width, height'), ('crop', 'Crop: left, top, right, bottom')]),
|
||||
field=models.CharField(
|
||||
max_length=128, verbose_name='Name',
|
||||
choices=[
|
||||
('rotate', 'Rotate: degrees'), ('zoom', 'Zoom: percent'),
|
||||
('resize', 'Resize: width, height'),
|
||||
('crop', 'Crop: left, top, right, bottom')
|
||||
]
|
||||
),
|
||||
preserve_default=True,
|
||||
),
|
||||
]
|
||||
|
||||
@@ -9,7 +9,9 @@ from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
from documents.models import Document
|
||||
|
||||
from .events import event_document_comment_create, event_document_comment_delete
|
||||
from .events import (
|
||||
event_document_comment_create, event_document_comment_delete
|
||||
)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -43,16 +45,24 @@ class Comment(models.Model):
|
||||
event_document_comment_create.commit(
|
||||
actor=user, target=self.document
|
||||
)
|
||||
logger.info('Comment "%s" added to document "%s" by user "%s"', self.comment, self.document, user)
|
||||
logger.info(
|
||||
'Comment "%s" added to document "%s" by user "%s"',
|
||||
self.comment, self.document, user
|
||||
)
|
||||
else:
|
||||
event_document_comment_create.commit(target=self.document)
|
||||
logger.info('Comment "%s" added to document "%s"', self.comment, self.document)
|
||||
logger.info(
|
||||
'Comment "%s" added to document "%s"', self.comment,
|
||||
self.document
|
||||
)
|
||||
|
||||
def delete(self, *args, **kwargs):
|
||||
user = kwargs.pop('_user', None)
|
||||
super(Comment, self).delete(*args, **kwargs)
|
||||
if user:
|
||||
event_document_comment_delete.commit(actor=user, target=self.document)
|
||||
event_document_comment_delete.commit(
|
||||
actor=user, target=self.document
|
||||
)
|
||||
else:
|
||||
event_document_comment_delete.commit(target=self.document)
|
||||
|
||||
|
||||
@@ -34,7 +34,9 @@ class DocumentCommentCreateView(SingleObjectCreateView):
|
||||
permission_comment_create, request.user, self.get_document()
|
||||
)
|
||||
|
||||
return super(DocumentCommentCreateView, self).dispatch(request, *args, **kwargs)
|
||||
return super(
|
||||
DocumentCommentCreateView, self
|
||||
).dispatch(request, *args, **kwargs)
|
||||
|
||||
def get_document(self):
|
||||
return get_object_or_404(Document, pk=self.kwargs['pk'])
|
||||
@@ -66,10 +68,13 @@ class DocumentCommentDeleteView(SingleObjectDeleteView):
|
||||
)
|
||||
except PermissionDenied:
|
||||
AccessControlList.objects.check_access(
|
||||
permission_comment_delete, request.user, self.get_object().document
|
||||
permission_comment_delete, request.user,
|
||||
self.get_object().document
|
||||
)
|
||||
|
||||
return super(DocumentCommentDeleteView, self).dispatch(request, *args, **kwargs)
|
||||
return super(
|
||||
DocumentCommentDeleteView, self
|
||||
).dispatch(request, *args, **kwargs)
|
||||
|
||||
def get_delete_extra_data(self):
|
||||
return {'_user': self.request.user}
|
||||
@@ -82,7 +87,8 @@ class DocumentCommentDeleteView(SingleObjectDeleteView):
|
||||
|
||||
def get_post_action_redirect(self):
|
||||
return reverse(
|
||||
'comments:comments_for_document', args=(self.get_object().document.pk,)
|
||||
'comments:comments_for_document',
|
||||
args=(self.get_object().document.pk,)
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -59,8 +59,8 @@ def task_index_document(self, document_id):
|
||||
IndexInstanceNode.objects.index_document(document)
|
||||
except OperationalError as exception:
|
||||
logger.warning(
|
||||
'Operational error while trying to index document: %s; %s',
|
||||
document, exception
|
||||
'Operational error while trying to index document: '
|
||||
'%s; %s', document, exception
|
||||
)
|
||||
lock.release()
|
||||
raise self.retry(exc=exception)
|
||||
|
||||
@@ -45,52 +45,93 @@ class IndexTestCase(TestCase):
|
||||
|
||||
# Create simple index template
|
||||
root = index.template_root
|
||||
index.node_templates.create(parent=root, expression='{{ document.metadata_value_of.test }}', link_documents=True)
|
||||
self.assertEqual(list(IndexTemplateNode.objects.values_list('expression', flat=True)), ['', '{{ document.metadata_value_of.test }}'])
|
||||
index.node_templates.create(
|
||||
parent=root, expression='{{ document.metadata_value_of.test }}',
|
||||
link_documents=True
|
||||
)
|
||||
self.assertEqual(
|
||||
list(
|
||||
IndexTemplateNode.objects.values_list('expression', flat=True)
|
||||
), ['', '{{ document.metadata_value_of.test }}']
|
||||
)
|
||||
|
||||
# Add document metadata value to trigger index node instance creation
|
||||
self.document.metadata.create(metadata_type=metadata_type, value='0001')
|
||||
self.assertEqual(list(IndexInstanceNode.objects.values_list('value', flat=True)), ['', '0001'])
|
||||
self.assertEqual(
|
||||
list(
|
||||
IndexInstanceNode.objects.values_list('value', flat=True)
|
||||
), ['', '0001']
|
||||
)
|
||||
|
||||
# Check that document is in instance node
|
||||
instance_node = IndexInstanceNode.objects.get(value='0001')
|
||||
self.assertQuerysetEqual(instance_node.documents.all(), [repr(self.document)])
|
||||
self.assertQuerysetEqual(
|
||||
instance_node.documents.all(), [repr(self.document)]
|
||||
)
|
||||
|
||||
# Change document metadata value to trigger index node instance update
|
||||
document_metadata = self.document.metadata.get(metadata_type=metadata_type)
|
||||
document_metadata.value = '0002'
|
||||
document_metadata.save()
|
||||
self.assertEqual(list(IndexInstanceNode.objects.values_list('value', flat=True)), ['', '0002'])
|
||||
self.assertEqual(
|
||||
list(
|
||||
IndexInstanceNode.objects.values_list('value', flat=True)
|
||||
), ['', '0002']
|
||||
)
|
||||
|
||||
# Check that document is in new instance node
|
||||
instance_node = IndexInstanceNode.objects.get(value='0002')
|
||||
self.assertQuerysetEqual(instance_node.documents.all(), [repr(self.document)])
|
||||
self.assertQuerysetEqual(
|
||||
instance_node.documents.all(), [repr(self.document)]
|
||||
)
|
||||
|
||||
# Check node instance is destoyed when no metadata is available
|
||||
self.document.metadata.get(metadata_type=metadata_type).delete()
|
||||
self.assertEqual(list(IndexInstanceNode.objects.values_list('value', flat=True)), [''])
|
||||
self.assertEqual(
|
||||
list(
|
||||
IndexInstanceNode.objects.values_list('value', flat=True)
|
||||
), ['']
|
||||
)
|
||||
|
||||
# Add document metadata value again to trigger index node instance creation
|
||||
self.document.metadata.create(metadata_type=metadata_type, value='0003')
|
||||
self.assertEqual(list(IndexInstanceNode.objects.values_list('value', flat=True)), ['', '0003'])
|
||||
self.document.metadata.create(
|
||||
metadata_type=metadata_type, value='0003'
|
||||
)
|
||||
self.assertEqual(
|
||||
list(
|
||||
IndexInstanceNode.objects.values_list('value', flat=True)
|
||||
), ['', '0003']
|
||||
)
|
||||
|
||||
# Check node instance is destroyed when no documents are contained
|
||||
self.document.delete()
|
||||
|
||||
# Document is in trash, index structure should remain unchanged
|
||||
self.assertEqual(list(IndexInstanceNode.objects.values_list('value', flat=True)), ['', '0003'])
|
||||
self.assertEqual(
|
||||
list(
|
||||
IndexInstanceNode.objects.values_list('value', flat=True)
|
||||
), ['', '0003']
|
||||
)
|
||||
|
||||
# Document deleted from, index structure should update
|
||||
self.document.delete()
|
||||
self.assertEqual(list(IndexInstanceNode.objects.values_list('value', flat=True)), [''])
|
||||
self.assertEqual(
|
||||
list(
|
||||
IndexInstanceNode.objects.values_list('value', flat=True)
|
||||
), ['']
|
||||
)
|
||||
|
||||
def test_rebuild_all_indexes(self):
|
||||
# Add metadata type and connect to document type
|
||||
metadata_type = MetadataType.objects.create(name='test', label='test')
|
||||
DocumentTypeMetadataType.objects.create(document_type=self.document_type, metadata_type=metadata_type)
|
||||
DocumentTypeMetadataType.objects.create(
|
||||
document_type=self.document_type, metadata_type=metadata_type
|
||||
)
|
||||
|
||||
# Add document metadata value
|
||||
self.document.metadata.create(metadata_type=metadata_type, value='0001')
|
||||
self.document.metadata.create(
|
||||
metadata_type=metadata_type, value='0001'
|
||||
)
|
||||
|
||||
# Create empty index
|
||||
index = Index.objects.create(label='test')
|
||||
@@ -98,12 +139,21 @@ class IndexTestCase(TestCase):
|
||||
|
||||
# Add our document type to the new index
|
||||
index.document_types.add(self.document_type)
|
||||
self.assertQuerysetEqual(index.document_types.all(), [repr(self.document_type)])
|
||||
self.assertQuerysetEqual(
|
||||
index.document_types.all(), [repr(self.document_type)]
|
||||
)
|
||||
|
||||
# Create simple index template
|
||||
root = index.template_root
|
||||
index.node_templates.create(parent=root, expression='{{ document.metadata_value_of.test }}', link_documents=True)
|
||||
self.assertEqual(list(IndexTemplateNode.objects.values_list('expression', flat=True)), ['', '{{ document.metadata_value_of.test }}'])
|
||||
index.node_templates.create(
|
||||
parent=root, expression='{{ document.metadata_value_of.test }}',
|
||||
link_documents=True
|
||||
)
|
||||
self.assertEqual(
|
||||
list(
|
||||
IndexTemplateNode.objects.values_list('expression', flat=True)
|
||||
), ['', '{{ document.metadata_value_of.test }}']
|
||||
)
|
||||
|
||||
# There should be no index instances
|
||||
self.assertEqual(list(IndexInstanceNode.objects.all()), [])
|
||||
@@ -113,4 +163,6 @@ class IndexTestCase(TestCase):
|
||||
|
||||
# Check that document is in instance node
|
||||
instance_node = IndexInstanceNode.objects.get(value='0001')
|
||||
self.assertQuerysetEqual(instance_node.documents.all(), [repr(self.document)])
|
||||
self.assertQuerysetEqual(
|
||||
instance_node.documents.all(), [repr(self.document)]
|
||||
)
|
||||
|
||||
@@ -16,10 +16,32 @@ class Migration(migrations.Migration):
|
||||
migrations.CreateModel(
|
||||
name='DocumentVersionSignature',
|
||||
fields=[
|
||||
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
|
||||
('signature_file', models.FileField(storage=storage.backends.filebasedstorage.FileBasedStorage(), upload_to=document_signatures.models.upload_to, blank=True, editable=False, null=True, verbose_name='Signature file')),
|
||||
('has_embedded_signature', models.BooleanField(default=False, verbose_name='Has embedded signature', editable=False)),
|
||||
('document_version', models.ForeignKey(editable=False, to='documents.DocumentVersion', verbose_name='Document version')),
|
||||
(
|
||||
'id', models.AutoField(
|
||||
verbose_name='ID', serialize=False, auto_created=True,
|
||||
primary_key=True
|
||||
)
|
||||
),
|
||||
(
|
||||
'signature_file', models.FileField(
|
||||
storage=storage.backends.filebasedstorage.FileBasedStorage(),
|
||||
upload_to=document_signatures.models.upload_to,
|
||||
blank=True, editable=False, null=True,
|
||||
verbose_name='Signature file'
|
||||
)
|
||||
),
|
||||
(
|
||||
'has_embedded_signature', models.BooleanField(
|
||||
default=False, verbose_name='Has embedded signature',
|
||||
editable=False
|
||||
)
|
||||
),
|
||||
(
|
||||
'document_version', models.ForeignKey(
|
||||
editable=False, to='documents.DocumentVersion',
|
||||
verbose_name='Document version'
|
||||
)
|
||||
),
|
||||
],
|
||||
options={
|
||||
'verbose_name': 'Document version signature',
|
||||
|
||||
@@ -33,7 +33,9 @@ def document_verify(request, document_pk):
|
||||
document = get_object_or_404(Document, pk=document_pk)
|
||||
|
||||
try:
|
||||
Permission.check_permissions(request.user, (permission_document_verify,))
|
||||
Permission.check_permissions(
|
||||
request.user, (permission_document_verify,)
|
||||
)
|
||||
except PermissionDenied:
|
||||
AccessControlList.objects.check_access(permission_document_verify, request.user, document)
|
||||
|
||||
|
||||
@@ -37,4 +37,6 @@ class WorkflowInstanceTransitionForm(forms.Form):
|
||||
self.fields['transition'].choices = workflow.get_transition_choices().values_list('pk', 'label')
|
||||
|
||||
transition = forms.ChoiceField(label=_('Transition'))
|
||||
comment = forms.CharField(label=_('Comment'), required=False, widget=forms.widgets.Textarea())
|
||||
comment = forms.CharField(
|
||||
label=_('Comment'), required=False, widget=forms.widgets.Textarea()
|
||||
)
|
||||
|
||||
@@ -16,9 +16,24 @@ class Migration(migrations.Migration):
|
||||
migrations.CreateModel(
|
||||
name='Workflow',
|
||||
fields=[
|
||||
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
|
||||
('label', models.CharField(unique=True, max_length=255, verbose_name='Label')),
|
||||
('document_types', models.ManyToManyField(related_name='workflows', verbose_name='Document types', to='documents.DocumentType')),
|
||||
(
|
||||
'id', models.AutoField(
|
||||
verbose_name='ID', serialize=False, auto_created=True,
|
||||
primary_key=True
|
||||
)
|
||||
),
|
||||
(
|
||||
'label', models.CharField(
|
||||
unique=True, max_length=255, verbose_name='Label'
|
||||
)
|
||||
),
|
||||
(
|
||||
'document_types', models.ManyToManyField(
|
||||
related_name='workflows',
|
||||
verbose_name='Document types',
|
||||
to='documents.DocumentType'
|
||||
)
|
||||
),
|
||||
],
|
||||
options={
|
||||
'verbose_name': 'Workflow',
|
||||
@@ -29,9 +44,24 @@ class Migration(migrations.Migration):
|
||||
migrations.CreateModel(
|
||||
name='WorkflowInstance',
|
||||
fields=[
|
||||
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
|
||||
('document', models.ForeignKey(related_name='workflows', verbose_name='Document', to='documents.Document')),
|
||||
('workflow', models.ForeignKey(related_name='instances', verbose_name='Workflow', to='document_states.Workflow')),
|
||||
(
|
||||
'id', models.AutoField(
|
||||
verbose_name='ID', serialize=False, auto_created=True,
|
||||
primary_key=True
|
||||
)
|
||||
),
|
||||
(
|
||||
'document', models.ForeignKey(
|
||||
related_name='workflows', verbose_name='Document',
|
||||
to='documents.Document'
|
||||
)
|
||||
),
|
||||
(
|
||||
'workflow', models.ForeignKey(
|
||||
related_name='instances', verbose_name='Workflow',
|
||||
to='document_states.Workflow'
|
||||
)
|
||||
),
|
||||
],
|
||||
options={
|
||||
'verbose_name': 'Workflow instance',
|
||||
@@ -42,9 +72,23 @@ class Migration(migrations.Migration):
|
||||
migrations.CreateModel(
|
||||
name='WorkflowInstanceLogEntry',
|
||||
fields=[
|
||||
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
|
||||
('datetime', models.DateTimeField(auto_now_add=True, verbose_name='Datetime', db_index=True)),
|
||||
('comment', models.TextField(verbose_name='Comment', blank=True)),
|
||||
(
|
||||
'id', models.AutoField(
|
||||
verbose_name='ID', serialize=False, auto_created=True,
|
||||
primary_key=True
|
||||
)
|
||||
),
|
||||
(
|
||||
'datetime', models.DateTimeField(
|
||||
auto_now_add=True, verbose_name='Datetime',
|
||||
db_index=True
|
||||
)
|
||||
),
|
||||
(
|
||||
'comment', models.TextField(
|
||||
verbose_name='Comment', blank=True
|
||||
)
|
||||
),
|
||||
],
|
||||
options={
|
||||
'verbose_name': 'Workflow instance log entry',
|
||||
@@ -55,10 +99,30 @@ class Migration(migrations.Migration):
|
||||
migrations.CreateModel(
|
||||
name='WorkflowState',
|
||||
fields=[
|
||||
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
|
||||
('label', models.CharField(max_length=255, verbose_name='Label')),
|
||||
('initial', models.BooleanField(default=False, help_text='Select if this will be the state with which you want the workflow to start in. Only one state can be the initial state.', verbose_name='Initial')),
|
||||
('workflow', models.ForeignKey(related_name='states', verbose_name='Workflow', to='document_states.Workflow')),
|
||||
(
|
||||
'id', models.AutoField(
|
||||
verbose_name='ID', serialize=False, auto_created=True,
|
||||
primary_key=True
|
||||
)
|
||||
),
|
||||
(
|
||||
'label', models.CharField(
|
||||
max_length=255, verbose_name='Label'
|
||||
)
|
||||
),
|
||||
(
|
||||
'initial', models.BooleanField(
|
||||
default=False,
|
||||
help_text='Select if this will be the state with which you want the workflow to start in. Only one state can be the initial state.',
|
||||
verbose_name='Initial'
|
||||
)
|
||||
),
|
||||
(
|
||||
'workflow', models.ForeignKey(
|
||||
related_name='states', verbose_name='Workflow',
|
||||
to='document_states.Workflow'
|
||||
)
|
||||
),
|
||||
],
|
||||
options={
|
||||
'verbose_name': 'Workflow state',
|
||||
@@ -69,11 +133,37 @@ class Migration(migrations.Migration):
|
||||
migrations.CreateModel(
|
||||
name='WorkflowTransition',
|
||||
fields=[
|
||||
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
|
||||
('label', models.CharField(max_length=255, verbose_name='Label')),
|
||||
('destination_state', models.ForeignKey(related_name='destination_transitions', verbose_name='Destination state', to='document_states.WorkflowState')),
|
||||
('origin_state', models.ForeignKey(related_name='origin_transitions', verbose_name='Origin state', to='document_states.WorkflowState')),
|
||||
('workflow', models.ForeignKey(related_name='transitions', verbose_name='Workflow', to='document_states.Workflow')),
|
||||
(
|
||||
'id', models.AutoField(
|
||||
verbose_name='ID', serialize=False, auto_created=True,
|
||||
primary_key=True
|
||||
)
|
||||
),
|
||||
(
|
||||
'label', models.CharField(
|
||||
max_length=255, verbose_name='Label'
|
||||
)
|
||||
),
|
||||
(
|
||||
'destination_state', models.ForeignKey(
|
||||
related_name='destination_transitions',
|
||||
verbose_name='Destination state',
|
||||
to='document_states.WorkflowState'
|
||||
)
|
||||
),
|
||||
(
|
||||
'origin_state', models.ForeignKey(
|
||||
related_name='origin_transitions',
|
||||
verbose_name='Origin state',
|
||||
to='document_states.WorkflowState'
|
||||
)
|
||||
),
|
||||
(
|
||||
'workflow', models.ForeignKey(
|
||||
related_name='transitions', verbose_name='Workflow',
|
||||
to='document_states.Workflow'
|
||||
)
|
||||
),
|
||||
],
|
||||
options={
|
||||
'verbose_name': 'Workflow transition',
|
||||
@@ -83,7 +173,9 @@ class Migration(migrations.Migration):
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name='workflowtransition',
|
||||
unique_together=set([('workflow', 'label', 'origin_state', 'destination_state')]),
|
||||
unique_together=set(
|
||||
[('workflow', 'label', 'origin_state', 'destination_state')]
|
||||
),
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name='workflowstate',
|
||||
@@ -92,19 +184,27 @@ class Migration(migrations.Migration):
|
||||
migrations.AddField(
|
||||
model_name='workflowinstancelogentry',
|
||||
name='transition',
|
||||
field=models.ForeignKey(verbose_name='Transition', to='document_states.WorkflowTransition'),
|
||||
field=models.ForeignKey(
|
||||
verbose_name='Transition',
|
||||
to='document_states.WorkflowTransition'
|
||||
),
|
||||
preserve_default=True,
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='workflowinstancelogentry',
|
||||
name='user',
|
||||
field=models.ForeignKey(verbose_name='User', to=settings.AUTH_USER_MODEL),
|
||||
field=models.ForeignKey(
|
||||
verbose_name='User', to=settings.AUTH_USER_MODEL
|
||||
),
|
||||
preserve_default=True,
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='workflowinstancelogentry',
|
||||
name='workflow_instance',
|
||||
field=models.ForeignKey(related_name='log_entries', verbose_name='Workflow instance', to='document_states.WorkflowInstance'),
|
||||
field=models.ForeignKey(
|
||||
related_name='log_entries', verbose_name='Workflow instance',
|
||||
to='document_states.WorkflowInstance'
|
||||
),
|
||||
preserve_default=True,
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
|
||||
File diff suppressed because one or more lines are too long
@@ -13,18 +13,31 @@ class Migration(migrations.Migration):
|
||||
operations = [
|
||||
migrations.AlterModelOptions(
|
||||
name='document',
|
||||
options={'ordering': ('-date_added',), 'verbose_name': 'Document', 'verbose_name_plural': 'Documents'},
|
||||
options={
|
||||
'ordering': ('-date_added',), 'verbose_name': 'Document',
|
||||
'verbose_name_plural': 'Documents'
|
||||
},
|
||||
),
|
||||
migrations.AlterModelOptions(
|
||||
name='documentpage',
|
||||
options={'ordering': ('page_number',), 'verbose_name': 'Document page', 'verbose_name_plural': 'Document pages'},
|
||||
options={
|
||||
'ordering': ('page_number',), 'verbose_name': 'Document page',
|
||||
'verbose_name_plural': 'Document pages'
|
||||
},
|
||||
),
|
||||
migrations.AlterModelOptions(
|
||||
name='documenttype',
|
||||
options={'ordering': ('name',), 'verbose_name': 'Document type', 'verbose_name_plural': 'Documents types'},
|
||||
options={
|
||||
'ordering': ('name',), 'verbose_name': 'Document type',
|
||||
'verbose_name_plural': 'Documents types'
|
||||
},
|
||||
),
|
||||
migrations.AlterModelOptions(
|
||||
name='documenttypefilename',
|
||||
options={'ordering': ('filename',), 'verbose_name': 'Document type quick rename filename', 'verbose_name_plural': 'Document types quick rename filenames'},
|
||||
options={
|
||||
'ordering': ('filename',),
|
||||
'verbose_name': 'Document type quick rename filename',
|
||||
'verbose_name_plural': 'Document types quick rename filenames'
|
||||
},
|
||||
),
|
||||
]
|
||||
|
||||
@@ -14,31 +14,49 @@ class Migration(migrations.Migration):
|
||||
migrations.AddField(
|
||||
model_name='documenttype',
|
||||
name='delete_time_period',
|
||||
field=models.PositiveIntegerField(default=30, verbose_name='Delete time period'),
|
||||
field=models.PositiveIntegerField(
|
||||
default=30, verbose_name='Delete time period'
|
||||
),
|
||||
preserve_default=True,
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='documenttype',
|
||||
name='delete_time_unit',
|
||||
field=models.CharField(default='days', max_length=8, verbose_name='Delete time unit', choices=[('days', 'Days'), ('hours', 'Hours'), ('minutes', 'Minutes'), ('seconds', 'Seconds')]),
|
||||
field=models.CharField(
|
||||
default='days', max_length=8, verbose_name='Delete time unit',
|
||||
choices=[
|
||||
('days', 'Days'), ('hours', 'Hours'),
|
||||
('minutes', 'Minutes'), ('seconds', 'Seconds')
|
||||
]
|
||||
),
|
||||
preserve_default=True,
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='documenttype',
|
||||
name='trash_time_period',
|
||||
field=models.PositiveIntegerField(null=True, verbose_name='Trash time period', blank=True),
|
||||
field=models.PositiveIntegerField(
|
||||
null=True, verbose_name='Trash time period', blank=True
|
||||
),
|
||||
preserve_default=True,
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='documenttype',
|
||||
name='trash_time_unit',
|
||||
field=models.CharField(blank=True, max_length=8, null=True, verbose_name='Trash time unit', choices=[('days', 'Days'), ('hours', 'Hours'), ('minutes', 'Minutes'), ('seconds', 'Seconds')]),
|
||||
field=models.CharField(
|
||||
blank=True, max_length=8, null=True,
|
||||
verbose_name='Trash time unit', choices=[
|
||||
('days', 'Days'), ('hours', 'Hours'),
|
||||
('minutes', 'Minutes'), ('seconds', 'Seconds')
|
||||
]
|
||||
),
|
||||
preserve_default=True,
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='document',
|
||||
name='deleted_date_time',
|
||||
field=models.DateTimeField(verbose_name='Date and time trashed', blank=True),
|
||||
field=models.DateTimeField(
|
||||
verbose_name='Date and time trashed', blank=True
|
||||
),
|
||||
preserve_default=True,
|
||||
),
|
||||
]
|
||||
|
||||
@@ -14,31 +14,53 @@ class Migration(migrations.Migration):
|
||||
migrations.AlterField(
|
||||
model_name='document',
|
||||
name='deleted_date_time',
|
||||
field=models.DateTimeField(null=True, verbose_name='Date and time trashed', blank=True),
|
||||
field=models.DateTimeField(
|
||||
null=True, verbose_name='Date and time trashed', blank=True
|
||||
),
|
||||
preserve_default=True,
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='documenttype',
|
||||
name='delete_time_period',
|
||||
field=models.PositiveIntegerField(default=30, help_text='Amount of time after which documents of this type in the trash will be deleted.', verbose_name='Delete time period'),
|
||||
field=models.PositiveIntegerField(
|
||||
default=30, help_text='Amount of time after which documents '
|
||||
'of this type in the trash will be deleted.',
|
||||
verbose_name='Delete time period'
|
||||
),
|
||||
preserve_default=True,
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='documenttype',
|
||||
name='delete_time_unit',
|
||||
field=models.CharField(default='days', max_length=8, verbose_name='Delete time unit', choices=[('days', 'Days'), ('hours', 'Hours'), ('minutes', 'Minutes')]),
|
||||
field=models.CharField(
|
||||
default='days', max_length=8, verbose_name='Delete time unit',
|
||||
choices=[
|
||||
('days', 'Days'), ('hours', 'Hours'),
|
||||
('minutes', 'Minutes')
|
||||
]
|
||||
),
|
||||
preserve_default=True,
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='documenttype',
|
||||
name='trash_time_period',
|
||||
field=models.PositiveIntegerField(help_text='Amount of time after which documents of this type will be moved to the trash.', null=True, verbose_name='Trash time period', blank=True),
|
||||
field=models.PositiveIntegerField(
|
||||
help_text='Amount of time after which documents of this type '
|
||||
'will be moved to the trash.', null=True,
|
||||
verbose_name='Trash time period', blank=True
|
||||
),
|
||||
preserve_default=True,
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='documenttype',
|
||||
name='trash_time_unit',
|
||||
field=models.CharField(blank=True, max_length=8, null=True, verbose_name='Trash time unit', choices=[('days', 'Days'), ('hours', 'Hours'), ('minutes', 'Minutes')]),
|
||||
field=models.CharField(
|
||||
blank=True, max_length=8, null=True,
|
||||
verbose_name='Trash time unit', choices=[
|
||||
('days', 'Days'), ('hours', 'Hours'),
|
||||
('minutes', 'Minutes')
|
||||
]
|
||||
),
|
||||
preserve_default=True,
|
||||
),
|
||||
]
|
||||
|
||||
@@ -13,7 +13,10 @@ class Migration(migrations.Migration):
|
||||
operations = [
|
||||
migrations.AlterModelOptions(
|
||||
name='documenttype',
|
||||
options={'ordering': ('label',), 'verbose_name': 'Document type', 'verbose_name_plural': 'Documents types'},
|
||||
options={
|
||||
'ordering': ('label',), 'verbose_name': 'Document type',
|
||||
'verbose_name_plural': 'Documents types'
|
||||
},
|
||||
),
|
||||
migrations.RenameField(
|
||||
model_name='documenttype',
|
||||
|
||||
@@ -40,7 +40,9 @@ class DocumentPageSerializer(serializers.HyperlinkedModelSerializer):
|
||||
class Meta:
|
||||
extra_kwargs = {
|
||||
'url': {'view_name': 'rest_api:documentpage-detail'},
|
||||
'document_version': {'view_name': 'rest_api:documentversion-detail'}
|
||||
'document_version': {
|
||||
'view_name': 'rest_api:documentversion-detail'
|
||||
}
|
||||
}
|
||||
model = DocumentPage
|
||||
|
||||
@@ -68,7 +70,9 @@ class DocumentTypeSerializer(serializers.HyperlinkedModelSerializer):
|
||||
|
||||
class DocumentVersionSerializer(serializers.HyperlinkedModelSerializer):
|
||||
pages = DocumentPageSerializer(many=True, required=False, read_only=True)
|
||||
revert = serializers.HyperlinkedIdentityField(view_name='rest_api:documentversion-revert')
|
||||
revert = serializers.HyperlinkedIdentityField(
|
||||
view_name='rest_api:documentversion-revert'
|
||||
)
|
||||
|
||||
class Meta:
|
||||
extra_kwargs = {
|
||||
@@ -103,7 +107,9 @@ class NewDocumentVersionSerializer(serializers.Serializer):
|
||||
|
||||
class DeletedDocumentSerializer(serializers.HyperlinkedModelSerializer):
|
||||
document_type_label = serializers.SerializerMethodField()
|
||||
restore = serializers.HyperlinkedIdentityField(view_name='rest_api:deleteddocument-restore')
|
||||
restore = serializers.HyperlinkedIdentityField(
|
||||
view_name='rest_api:deleteddocument-restore'
|
||||
)
|
||||
|
||||
def get_document_type_label(self, instance):
|
||||
return instance.document_type.label
|
||||
@@ -141,9 +147,9 @@ class DocumentSerializer(serializers.HyperlinkedModelSerializer):
|
||||
'url': {'view_name': 'rest_api:document-detail'}
|
||||
}
|
||||
fields = (
|
||||
'date_added', 'description', 'document_type', 'document_type_label',
|
||||
'id', 'label', 'language', 'latest_version', 'url', 'uuid',
|
||||
'versions',
|
||||
'date_added', 'description', 'document_type',
|
||||
'document_type_label', 'id', 'label', 'language',
|
||||
'latest_version', 'url', 'uuid', 'versions',
|
||||
)
|
||||
model = Document
|
||||
|
||||
@@ -155,8 +161,12 @@ class NewDocumentSerializer(serializers.ModelSerializer):
|
||||
document = Document.objects.create(
|
||||
description=self.validated_data.get('description', ''),
|
||||
document_type=self.validated_data['document_type'],
|
||||
label=self.validated_data.get('label', unicode(self.validated_data['file'])),
|
||||
language=self.validated_data.get('language', setting_language.value)
|
||||
label=self.validated_data.get(
|
||||
'label', unicode(self.validated_data['file'])
|
||||
),
|
||||
language=self.validated_data.get(
|
||||
'language', setting_language.value
|
||||
)
|
||||
)
|
||||
document.save(_user=_user)
|
||||
|
||||
|
||||
@@ -100,7 +100,9 @@ def document_html_widget(document_page, click_view=None, click_view_arguments=No
|
||||
alt_text = _('Document page image')
|
||||
|
||||
if not document_page:
|
||||
return mark_safe('<span class="fa-stack fa-lg"><i class="fa fa-file-o fa-stack-2x"></i><i class="fa fa-question fa-stack-1x text-danger"></i></span>')
|
||||
return mark_safe(
|
||||
'<span class="fa-stack fa-lg"><i class="fa fa-file-o fa-stack-2x"></i><i class="fa fa-question fa-stack-1x text-danger"></i></span>'
|
||||
)
|
||||
|
||||
document = document_page.document
|
||||
|
||||
@@ -118,7 +120,8 @@ def document_html_widget(document_page, click_view=None, click_view_arguments=No
|
||||
query_string = urlencode(query_dict)
|
||||
|
||||
preview_view = '%s?%s' % (
|
||||
reverse('rest_api:documentpage-image', args=(document_page.pk,)), query_string
|
||||
reverse('rest_api:documentpage-image', args=(document_page.pk,)),
|
||||
query_string
|
||||
)
|
||||
|
||||
result.append(
|
||||
|
||||
@@ -18,7 +18,9 @@ class EventsApp(MayanAppConfig):
|
||||
def ready(self):
|
||||
super(EventsApp, self).ready()
|
||||
|
||||
SourceColumn(source=Action, label=_('Timestamp'), attribute='timestamp')
|
||||
SourceColumn(
|
||||
source=Action, label=_('Timestamp'), attribute='timestamp'
|
||||
)
|
||||
SourceColumn(source=Action, label=_('Actor'), attribute='actor')
|
||||
SourceColumn(
|
||||
source=Action, label=_('Verb'),
|
||||
|
||||
@@ -26,7 +26,9 @@ class Event(object):
|
||||
model = apps.get_model('events', 'EventType')
|
||||
|
||||
if not self.event_type:
|
||||
self.event_type, created = model.objects.get_or_create(name=self.name)
|
||||
self.event_type, created = model.objects.get_or_create(
|
||||
name=self.name
|
||||
)
|
||||
|
||||
action.send(
|
||||
actor or target, actor=actor, verb=self.name,
|
||||
|
||||
@@ -10,7 +10,9 @@ from .permissions import permission_events_view
|
||||
|
||||
def get_kwargs_factory(variable_name):
|
||||
def get_kwargs(context):
|
||||
content_type = ContentType.objects.get_for_model(context[variable_name])
|
||||
content_type = ContentType.objects.get_for_model(
|
||||
context[variable_name]
|
||||
)
|
||||
return {
|
||||
'app_label': '"{}"'.format(content_type.app_label),
|
||||
'model': '"{}"'.format(content_type.model),
|
||||
|
||||
@@ -13,8 +13,17 @@ class Migration(migrations.Migration):
|
||||
migrations.CreateModel(
|
||||
name='EventType',
|
||||
fields=[
|
||||
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
|
||||
('name', models.CharField(unique=True, max_length=64, verbose_name='Name')),
|
||||
(
|
||||
'id', models.AutoField(
|
||||
verbose_name='ID', serialize=False, auto_created=True,
|
||||
primary_key=True
|
||||
)
|
||||
),
|
||||
(
|
||||
'name', models.CharField(
|
||||
unique=True, max_length=64, verbose_name='Name'
|
||||
)
|
||||
),
|
||||
],
|
||||
options={
|
||||
'verbose_name': 'Event type',
|
||||
|
||||
@@ -14,11 +14,35 @@ class Migration(migrations.Migration):
|
||||
migrations.CreateModel(
|
||||
name='SmartLink',
|
||||
fields=[
|
||||
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
|
||||
('title', models.CharField(max_length=96, verbose_name='Title')),
|
||||
('dynamic_title', models.CharField(help_text='This expression will be evaluated against the current selected document.', max_length=96, verbose_name='Dynamic title', blank=True)),
|
||||
('enabled', models.BooleanField(default=True, verbose_name='Enabled')),
|
||||
('document_types', models.ManyToManyField(to='documents.DocumentType', verbose_name='Document types')),
|
||||
(
|
||||
'id', models.AutoField(
|
||||
verbose_name='ID', serialize=False, auto_created=True,
|
||||
primary_key=True
|
||||
)
|
||||
),
|
||||
(
|
||||
'title', models.CharField(
|
||||
max_length=96, verbose_name='Title'
|
||||
)
|
||||
),
|
||||
(
|
||||
'dynamic_title', models.CharField(
|
||||
help_text='This expression will be evaluated against the current selected document.',
|
||||
max_length=96, verbose_name='Dynamic title',
|
||||
blank=True
|
||||
)
|
||||
),
|
||||
(
|
||||
'enabled', models.BooleanField(
|
||||
default=True, verbose_name='Enabled'
|
||||
)
|
||||
),
|
||||
(
|
||||
'document_types', models.ManyToManyField(
|
||||
to='documents.DocumentType',
|
||||
verbose_name='Document types'
|
||||
)
|
||||
),
|
||||
],
|
||||
options={
|
||||
'verbose_name': 'Smart link',
|
||||
@@ -29,14 +53,69 @@ class Migration(migrations.Migration):
|
||||
migrations.CreateModel(
|
||||
name='SmartLinkCondition',
|
||||
fields=[
|
||||
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
|
||||
('inclusion', models.CharField(default='&', help_text='The inclusion is ignored for the first item.', max_length=16, choices=[('&', 'and'), ('|', 'or')])),
|
||||
('foreign_document_data', models.CharField(help_text='This represents the metadata of all other documents.', max_length=128, verbose_name='Foreign document attribute')),
|
||||
('operator', models.CharField(max_length=16, choices=[('exact', 'is equal to'), ('iexact', 'is equal to (case insensitive)'), ('contains', 'contains'), ('icontains', 'contains (case insensitive)'), ('in', 'is in'), ('gt', 'is greater than'), ('gte', 'is greater than or equal to'), ('lt', 'is less than'), ('lte', 'is less than or equal to'), ('startswith', 'starts with'), ('istartswith', 'starts with (case insensitive)'), ('endswith', 'ends with'), ('iendswith', 'ends with (case insensitive)'), ('regex', 'is in regular expression'), ('iregex', 'is in regular expression (case insensitive)')])),
|
||||
('expression', models.TextField(help_text='This expression will be evaluated against the current document.', verbose_name='Expression')),
|
||||
('negated', models.BooleanField(default=False, help_text='Inverts the logic of the operator.', verbose_name='Negated')),
|
||||
('enabled', models.BooleanField(default=True, verbose_name='Enabled')),
|
||||
('smart_link', models.ForeignKey(related_name='conditions', verbose_name='Smart link', to='linking.SmartLink')),
|
||||
(
|
||||
'id', models.AutoField(
|
||||
verbose_name='ID', serialize=False, auto_created=True,
|
||||
primary_key=True
|
||||
)
|
||||
),
|
||||
(
|
||||
'inclusion', models.CharField(
|
||||
default='&',
|
||||
help_text='The inclusion is ignored for the first item.',
|
||||
max_length=16, choices=[('&', 'and'), ('|', 'or')]
|
||||
)
|
||||
),
|
||||
(
|
||||
'foreign_document_data', models.CharField(
|
||||
help_text='This represents the metadata of all other documents.',
|
||||
max_length=128,
|
||||
verbose_name='Foreign document attribute'
|
||||
)
|
||||
),
|
||||
(
|
||||
'operator', models.CharField(
|
||||
max_length=16, choices=[
|
||||
('exact', 'is equal to'),
|
||||
('iexact', 'is equal to (case insensitive)'),
|
||||
('contains', 'contains'),
|
||||
('icontains', 'contains (case insensitive)'),
|
||||
('in', 'is in'), ('gt', 'is greater than'),
|
||||
('gte', 'is greater than or equal to'),
|
||||
('lt', 'is less than'),
|
||||
('lte', 'is less than or equal to'),
|
||||
('startswith', 'starts with'),
|
||||
('istartswith', 'starts with (case insensitive)'),
|
||||
('endswith', 'ends with'),
|
||||
('iendswith', 'ends with (case insensitive)'),
|
||||
('regex', 'is in regular expression'),
|
||||
('iregex', 'is in regular expression (case insensitive)')
|
||||
]
|
||||
)
|
||||
),
|
||||
(
|
||||
'expression', models.TextField(
|
||||
help_text='This expression will be evaluated against the current document.',
|
||||
verbose_name='Expression'
|
||||
)
|
||||
),
|
||||
(
|
||||
'negated', models.BooleanField(
|
||||
default=False, help_text='Inverts the logic of the operator.',
|
||||
verbose_name='Negated'
|
||||
)
|
||||
),
|
||||
(
|
||||
'enabled', models.BooleanField(
|
||||
default=True, verbose_name='Enabled'
|
||||
)
|
||||
),
|
||||
(
|
||||
'smart_link', models.ForeignKey(
|
||||
related_name='conditions', verbose_name='Smart link',
|
||||
to='linking.SmartLink'
|
||||
)
|
||||
),
|
||||
],
|
||||
options={
|
||||
'verbose_name': 'Link condition',
|
||||
|
||||
@@ -219,7 +219,9 @@ class SmartLinkConditionListView(SingleObjectListView):
|
||||
return {
|
||||
'hide_link': True,
|
||||
'object': self.get_smart_link(),
|
||||
'title': _('Conditions for smart link: %s') % self.get_smart_link(),
|
||||
'title': _(
|
||||
'Conditions for smart link: %s'
|
||||
) % self.get_smart_link(),
|
||||
}
|
||||
|
||||
def get_smart_link(self):
|
||||
|
||||
@@ -10,7 +10,7 @@ from django.utils.translation import ugettext_lazy as _
|
||||
from acls import ModelPermission
|
||||
from common import (
|
||||
MayanAppConfig, menu_facet, menu_multi_item, menu_object, menu_secondary,
|
||||
menu_setup, menu_sidebar, menu_tools
|
||||
menu_setup, menu_sidebar
|
||||
)
|
||||
from common.classes import ModelAttribute, Filter
|
||||
from common.widgets import two_state_template
|
||||
|
||||
@@ -1,16 +1,11 @@
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import shlex
|
||||
|
||||
from django import forms
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.forms.formsets import formset_factory
|
||||
from django.template import Context, Template
|
||||
from django.utils.module_loading import import_string
|
||||
from django.utils.translation import string_concat, ugettext_lazy as _
|
||||
|
||||
from .classes import MetadataLookup
|
||||
from .models import DocumentMetadata, MetadataType
|
||||
from .models import MetadataType
|
||||
|
||||
|
||||
class MetadataForm(forms.Form):
|
||||
|
||||
@@ -133,6 +133,7 @@ class MetadataType(models.Model):
|
||||
value = parser.parse(value)
|
||||
|
||||
return value
|
||||
|
||||
class Meta:
|
||||
ordering = ('label',)
|
||||
verbose_name = _('Metadata type')
|
||||
|
||||
@@ -19,7 +19,6 @@ from documents.models import Document, DocumentType
|
||||
from documents.permissions import (
|
||||
permission_document_type_edit
|
||||
)
|
||||
from documents.views import DocumentListView
|
||||
from permissions import Permission
|
||||
|
||||
from .api import save_metadata_list
|
||||
|
||||
@@ -186,7 +186,9 @@ class IndexFS(Operations):
|
||||
yield '..'
|
||||
|
||||
# Nodes
|
||||
queryset = node.get_children().values('value').exclude(value__contains='/')
|
||||
queryset = node.get_children().values('value').exclude(
|
||||
value__contains='/'
|
||||
)
|
||||
|
||||
for duplicate in queryset.order_by().annotate(count_id=Count('id')).filter(count_id__gt=1):
|
||||
queryset = queryset.exclude(label=duplicate['label'])
|
||||
@@ -196,7 +198,9 @@ class IndexFS(Operations):
|
||||
|
||||
# Documents
|
||||
if node.index_template_node.link_documents:
|
||||
queryset = node.documents.values('label').exclude(label__contains='/')
|
||||
queryset = node.documents.values('label').exclude(
|
||||
label__contains='/'
|
||||
)
|
||||
|
||||
for duplicate in queryset.order_by().annotate(count_id=Count('id')).filter(count_id__gt=1):
|
||||
queryset = queryset.exclude(label=duplicate['label'])
|
||||
|
||||
@@ -15,9 +15,22 @@ class Migration(migrations.Migration):
|
||||
migrations.CreateModel(
|
||||
name='Source',
|
||||
fields=[
|
||||
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
|
||||
('title', models.CharField(max_length=64, verbose_name='Title')),
|
||||
('enabled', models.BooleanField(default=True, verbose_name='Enabled')),
|
||||
(
|
||||
'id', models.AutoField(
|
||||
verbose_name='ID', serialize=False, auto_created=True,
|
||||
primary_key=True
|
||||
)
|
||||
),
|
||||
(
|
||||
'title', models.CharField(
|
||||
max_length=64, verbose_name='Title'
|
||||
)
|
||||
),
|
||||
(
|
||||
'enabled', models.BooleanField(
|
||||
default=True, verbose_name='Enabled'
|
||||
)
|
||||
),
|
||||
],
|
||||
options={
|
||||
'ordering': ('title',),
|
||||
@@ -29,7 +42,12 @@ class Migration(migrations.Migration):
|
||||
migrations.CreateModel(
|
||||
name='OutOfProcessSource',
|
||||
fields=[
|
||||
('source_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='sources.Source')),
|
||||
(
|
||||
'source_ptr', models.OneToOneField(
|
||||
parent_link=True, auto_created=True, primary_key=True,
|
||||
serialize=False, to='sources.Source'
|
||||
)
|
||||
),
|
||||
],
|
||||
options={
|
||||
'verbose_name': 'Out of process',
|
||||
@@ -40,9 +58,25 @@ class Migration(migrations.Migration):
|
||||
migrations.CreateModel(
|
||||
name='IntervalBaseModel',
|
||||
fields=[
|
||||
('outofprocesssource_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='sources.OutOfProcessSource')),
|
||||
('interval', models.PositiveIntegerField(default=600, help_text='Interval in seconds between checks for new documents.', verbose_name='Interval')),
|
||||
('uncompress', models.CharField(help_text='Whether to expand or not, compressed archives.', max_length=1, verbose_name='Uncompress', choices=[('y', 'Always'), ('n', 'Never')])),
|
||||
(
|
||||
'outofprocesssource_ptr', models.OneToOneField(
|
||||
parent_link=True, auto_created=True, primary_key=True,
|
||||
serialize=False, to='sources.OutOfProcessSource'
|
||||
)
|
||||
),
|
||||
(
|
||||
'interval', models.PositiveIntegerField(
|
||||
default=600, help_text='Interval in seconds between '
|
||||
'checks for new documents.', verbose_name='Interval'
|
||||
)
|
||||
),
|
||||
(
|
||||
'uncompress', models.CharField(
|
||||
help_text='Whether to expand or not, compressed '
|
||||
'archives.', max_length=1, verbose_name='Uncompress',
|
||||
choices=[('y', 'Always'), ('n', 'Never')]
|
||||
)
|
||||
),
|
||||
],
|
||||
options={
|
||||
'verbose_name': 'Interval source',
|
||||
@@ -53,12 +87,39 @@ class Migration(migrations.Migration):
|
||||
migrations.CreateModel(
|
||||
name='EmailBaseModel',
|
||||
fields=[
|
||||
('intervalbasemodel_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='sources.IntervalBaseModel')),
|
||||
('host', models.CharField(max_length=128, verbose_name='Host')),
|
||||
('ssl', models.BooleanField(default=True, verbose_name='SSL')),
|
||||
('port', models.PositiveIntegerField(help_text='Typical choices are 110 for POP3, 995 for POP3 over SSL, 143 for IMAP, 993 for IMAP over SSL.', null=True, verbose_name='Port', blank=True)),
|
||||
('username', models.CharField(max_length=96, verbose_name='Username')),
|
||||
('password', models.CharField(max_length=96, verbose_name='Password')),
|
||||
(
|
||||
'intervalbasemodel_ptr', models.OneToOneField(
|
||||
parent_link=True, auto_created=True, primary_key=True,
|
||||
serialize=False, to='sources.IntervalBaseModel'
|
||||
)
|
||||
),
|
||||
(
|
||||
'host', models.CharField(
|
||||
max_length=128, verbose_name='Host'
|
||||
)
|
||||
),
|
||||
(
|
||||
'ssl', models.BooleanField(
|
||||
default=True, verbose_name='SSL'
|
||||
)
|
||||
),
|
||||
(
|
||||
'port', models.PositiveIntegerField(
|
||||
help_text='Typical choices are 110 for POP3, 995 for '
|
||||
'POP3 over SSL, 143 for IMAP, 993 for IMAP over SSL.',
|
||||
null=True, verbose_name='Port', blank=True
|
||||
)
|
||||
),
|
||||
(
|
||||
'username', models.CharField(
|
||||
max_length=96, verbose_name='Username'
|
||||
)
|
||||
),
|
||||
(
|
||||
'password', models.CharField(
|
||||
max_length=96, verbose_name='Password'
|
||||
)
|
||||
),
|
||||
],
|
||||
options={
|
||||
'verbose_name': 'Email source',
|
||||
@@ -69,8 +130,17 @@ class Migration(migrations.Migration):
|
||||
migrations.CreateModel(
|
||||
name='POP3Email',
|
||||
fields=[
|
||||
('emailbasemodel_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='sources.EmailBaseModel')),
|
||||
('timeout', models.PositiveIntegerField(default=60, verbose_name='Timeout')),
|
||||
(
|
||||
'emailbasemodel_ptr', models.OneToOneField(
|
||||
parent_link=True, auto_created=True, primary_key=True,
|
||||
serialize=False, to='sources.EmailBaseModel'
|
||||
)
|
||||
),
|
||||
(
|
||||
'timeout', models.PositiveIntegerField(
|
||||
default=60, verbose_name='Timeout'
|
||||
)
|
||||
),
|
||||
],
|
||||
options={
|
||||
'verbose_name': 'POP email',
|
||||
@@ -81,8 +151,19 @@ class Migration(migrations.Migration):
|
||||
migrations.CreateModel(
|
||||
name='IMAPEmail',
|
||||
fields=[
|
||||
('emailbasemodel_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='sources.EmailBaseModel')),
|
||||
('mailbox', models.CharField(default='INBOX', help_text='Mail from which to check for messages with attached documents.', max_length=64, verbose_name='Mailbox')),
|
||||
(
|
||||
'emailbasemodel_ptr', models.OneToOneField(
|
||||
parent_link=True, auto_created=True, primary_key=True,
|
||||
serialize=False, to='sources.EmailBaseModel'
|
||||
)
|
||||
),
|
||||
(
|
||||
'mailbox', models.CharField(
|
||||
default='INBOX', help_text='Mail from which to check '
|
||||
'for messages with attached documents.',
|
||||
max_length=64, verbose_name='Mailbox'
|
||||
)
|
||||
),
|
||||
],
|
||||
options={
|
||||
'verbose_name': 'IMAP email',
|
||||
@@ -93,7 +174,12 @@ class Migration(migrations.Migration):
|
||||
migrations.CreateModel(
|
||||
name='InteractiveSource',
|
||||
fields=[
|
||||
('source_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='sources.Source')),
|
||||
(
|
||||
'source_ptr', models.OneToOneField(
|
||||
parent_link=True, auto_created=True, primary_key=True,
|
||||
serialize=False, to='sources.Source'
|
||||
)
|
||||
),
|
||||
],
|
||||
options={
|
||||
'verbose_name': 'Interactive source',
|
||||
@@ -104,12 +190,42 @@ class Migration(migrations.Migration):
|
||||
migrations.CreateModel(
|
||||
name='SourceTransformation',
|
||||
fields=[
|
||||
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
|
||||
('object_id', models.PositiveIntegerField()),
|
||||
('order', models.PositiveIntegerField(default=0, null=True, verbose_name='Order', db_index=True, blank=True)),
|
||||
('transformation', models.CharField(max_length=128, verbose_name='Transformation', choices=[('resize', 'Resize'), ('rotate', 'Rotate'), ('zoom', 'Zoom')])),
|
||||
('arguments', models.TextField(blank=True, help_text="Use dictionaries to indentify arguments, example: {'degrees':90}", null=True, verbose_name='Arguments', validators=[])),
|
||||
('content_type', models.ForeignKey(to='contenttypes.ContentType')),
|
||||
(
|
||||
'id', models.AutoField(
|
||||
verbose_name='ID', serialize=False, auto_created=True,
|
||||
primary_key=True
|
||||
)
|
||||
),
|
||||
(
|
||||
'object_id', models.PositiveIntegerField()
|
||||
),
|
||||
(
|
||||
'order', models.PositiveIntegerField(
|
||||
default=0, null=True, verbose_name='Order',
|
||||
db_index=True, blank=True
|
||||
)
|
||||
),
|
||||
(
|
||||
'transformation', models.CharField(
|
||||
max_length=128, verbose_name='Transformation',
|
||||
choices=[
|
||||
('resize', 'Resize'), ('rotate', 'Rotate'),
|
||||
('zoom', 'Zoom')
|
||||
]
|
||||
)
|
||||
),
|
||||
(
|
||||
'arguments', models.TextField(
|
||||
blank=True, help_text="Use dictionaries to indentify "
|
||||
"arguments, example: {'degrees':90}", null=True,
|
||||
verbose_name='Arguments', validators=[]
|
||||
)
|
||||
),
|
||||
(
|
||||
'content_type', models.ForeignKey(
|
||||
to='contenttypes.ContentType'
|
||||
)
|
||||
),
|
||||
],
|
||||
options={
|
||||
'ordering': ('order',),
|
||||
@@ -121,12 +237,47 @@ class Migration(migrations.Migration):
|
||||
migrations.CreateModel(
|
||||
name='StagingFolderSource',
|
||||
fields=[
|
||||
('interactivesource_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='sources.InteractiveSource')),
|
||||
('folder_path', models.CharField(help_text='Server side filesystem path.', max_length=255, verbose_name='Folder path')),
|
||||
('preview_width', models.IntegerField(help_text='Width value to be passed to the converter backend.', verbose_name='Preview width')),
|
||||
('preview_height', models.IntegerField(help_text='Height value to be passed to the converter backend.', null=True, verbose_name='Preview height', blank=True)),
|
||||
('uncompress', models.CharField(help_text='Whether to expand or not compressed archives.', max_length=1, verbose_name='Uncompress', choices=[('y', 'Always'), ('n', 'Never'), ('a', 'Ask user')])),
|
||||
('delete_after_upload', models.BooleanField(default=True, help_text='Delete the file after is has been successfully uploaded.', verbose_name='Delete after upload')),
|
||||
(
|
||||
'interactivesource_ptr', models.OneToOneField(
|
||||
parent_link=True, auto_created=True, primary_key=True,
|
||||
serialize=False, to='sources.InteractiveSource'
|
||||
)
|
||||
),
|
||||
(
|
||||
'folder_path', models.CharField(
|
||||
help_text='Server side filesystem path.',
|
||||
max_length=255, verbose_name='Folder path'
|
||||
)
|
||||
),
|
||||
(
|
||||
'preview_width', models.IntegerField(
|
||||
help_text='Width value to be passed to the converter '
|
||||
'backend.', verbose_name='Preview width'
|
||||
)
|
||||
),
|
||||
(
|
||||
'preview_height', models.IntegerField(
|
||||
help_text='Height value to be passed to the '
|
||||
'converter backend.', null=True,
|
||||
verbose_name='Preview height', blank=True
|
||||
)
|
||||
),
|
||||
(
|
||||
'uncompress', models.CharField(
|
||||
help_text='Whether to expand or not compressed '
|
||||
'archives.', max_length=1, verbose_name='Uncompress',
|
||||
choices=[
|
||||
('y', 'Always'), ('n', 'Never'), ('a', 'Ask user')
|
||||
]
|
||||
)
|
||||
),
|
||||
(
|
||||
'delete_after_upload', models.BooleanField(
|
||||
default=True, help_text='Delete the file after is '
|
||||
'has been successfully uploaded.',
|
||||
verbose_name='Delete after upload'
|
||||
)
|
||||
),
|
||||
],
|
||||
options={
|
||||
'verbose_name': 'Staging folder',
|
||||
@@ -137,8 +288,18 @@ class Migration(migrations.Migration):
|
||||
migrations.CreateModel(
|
||||
name='WatchFolderSource',
|
||||
fields=[
|
||||
('intervalbasemodel_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='sources.IntervalBaseModel')),
|
||||
('folder_path', models.CharField(help_text='Server side filesystem path.', max_length=255, verbose_name='Folder path')),
|
||||
(
|
||||
'intervalbasemodel_ptr', models.OneToOneField(
|
||||
parent_link=True, auto_created=True, primary_key=True,
|
||||
serialize=False, to='sources.IntervalBaseModel'
|
||||
)
|
||||
),
|
||||
(
|
||||
'folder_path', models.CharField(
|
||||
help_text='Server side filesystem path.',
|
||||
max_length=255, verbose_name='Folder path'
|
||||
)
|
||||
),
|
||||
],
|
||||
options={
|
||||
'verbose_name': 'Watch folder',
|
||||
@@ -149,8 +310,21 @@ class Migration(migrations.Migration):
|
||||
migrations.CreateModel(
|
||||
name='WebFormSource',
|
||||
fields=[
|
||||
('interactivesource_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='sources.InteractiveSource')),
|
||||
('uncompress', models.CharField(help_text='Whether to expand or not compressed archives.', max_length=1, verbose_name='Uncompress', choices=[('y', 'Always'), ('n', 'Never'), ('a', 'Ask user')])),
|
||||
(
|
||||
'interactivesource_ptr', models.OneToOneField(
|
||||
parent_link=True, auto_created=True, primary_key=True,
|
||||
serialize=False, to='sources.InteractiveSource'
|
||||
)
|
||||
),
|
||||
(
|
||||
'uncompress', models.CharField(
|
||||
help_text='Whether to expand or not compressed '
|
||||
'archives.', max_length=1, verbose_name='Uncompress',
|
||||
choices=[
|
||||
('y', 'Always'), ('n', 'Never'), ('a', 'Ask user')
|
||||
]
|
||||
)
|
||||
),
|
||||
],
|
||||
options={
|
||||
'verbose_name': 'Web form',
|
||||
@@ -161,7 +335,11 @@ class Migration(migrations.Migration):
|
||||
migrations.AddField(
|
||||
model_name='intervalbasemodel',
|
||||
name='document_type',
|
||||
field=models.ForeignKey(verbose_name='Document type', to='documents.DocumentType', help_text='Assign a document type to documents uploaded from this source.'),
|
||||
field=models.ForeignKey(
|
||||
verbose_name='Document type', to='documents.DocumentType',
|
||||
help_text='Assign a document type to documents uploaded from '
|
||||
'this source.'
|
||||
),
|
||||
preserve_default=True,
|
||||
),
|
||||
]
|
||||
|
||||
@@ -13,6 +13,10 @@ class Migration(migrations.Migration):
|
||||
operations = [
|
||||
migrations.AlterModelOptions(
|
||||
name='sourcelog',
|
||||
options={'ordering': ('-datetime',), 'get_latest_by': 'datetime', 'verbose_name': 'Log entry', 'verbose_name_plural': 'Log entries'},
|
||||
options={
|
||||
'ordering': ('-datetime',), 'get_latest_by': 'datetime',
|
||||
'verbose_name': 'Log entry',
|
||||
'verbose_name_plural': 'Log entries'
|
||||
},
|
||||
),
|
||||
]
|
||||
|
||||
@@ -13,7 +13,10 @@ class Migration(migrations.Migration):
|
||||
operations = [
|
||||
migrations.AlterModelOptions(
|
||||
name='source',
|
||||
options={'ordering': ('label',), 'verbose_name': 'Source', 'verbose_name_plural': 'Sources'},
|
||||
options={
|
||||
'ordering': ('label',), 'verbose_name': 'Source',
|
||||
'verbose_name_plural': 'Sources'
|
||||
},
|
||||
),
|
||||
migrations.RenameField(
|
||||
model_name='source',
|
||||
|
||||
@@ -86,7 +86,8 @@ class Source(models.Model):
|
||||
|
||||
except Exception as exception:
|
||||
logger.critical(
|
||||
'Unexpected exception while trying to create new document "%s" from source "%s"; %s',
|
||||
'Unexpected exception while trying to create new document '
|
||||
'"%s" from source "%s"; %s',
|
||||
label or unicode(file_object), self, exception
|
||||
)
|
||||
raise
|
||||
@@ -164,7 +165,9 @@ class StagingFolderSource(InteractiveSource):
|
||||
)
|
||||
delete_after_upload = models.BooleanField(
|
||||
default=True,
|
||||
help_text=_('Delete the file after is has been successfully uploaded.'),
|
||||
help_text=_(
|
||||
'Delete the file after is has been successfully uploaded.'
|
||||
),
|
||||
verbose_name=_('Delete after upload')
|
||||
)
|
||||
|
||||
@@ -254,7 +257,9 @@ class IntervalBaseModel(OutOfProcessSource):
|
||||
)
|
||||
document_type = models.ForeignKey(
|
||||
DocumentType,
|
||||
help_text=_('Assign a document type to documents uploaded from this source.'),
|
||||
help_text=_(
|
||||
'Assign a document type to documents uploaded from this source.'
|
||||
),
|
||||
verbose_name=_('Document type')
|
||||
)
|
||||
uncompress = models.CharField(
|
||||
@@ -317,8 +322,8 @@ class EmailBaseModel(IntervalBaseModel):
|
||||
host = models.CharField(max_length=128, verbose_name=_('Host'))
|
||||
ssl = models.BooleanField(default=True, verbose_name=_('SSL'))
|
||||
port = models.PositiveIntegerField(blank=True, null=True, help_text=_(
|
||||
'Typical choices are 110 for POP3, 995 for POP3 over SSL, 143 for IMAP, 993 for IMAP over SSL.'),
|
||||
verbose_name=_('Port')
|
||||
'Typical choices are 110 for POP3, 995 for POP3 over SSL, 143 for '
|
||||
'IMAP, 993 for IMAP over SSL.'), verbose_name=_('Port')
|
||||
)
|
||||
username = models.CharField(max_length=96, verbose_name=_('Username'))
|
||||
password = models.CharField(max_length=96, verbose_name=_('Password'))
|
||||
@@ -331,8 +336,10 @@ class EmailBaseModel(IntervalBaseModel):
|
||||
), max_length=128, verbose_name=_('Metadata attachment name')
|
||||
)
|
||||
|
||||
# From: http://bookmarks.honewatson.com/2009/08/11/python-gmail-imaplib-search-subject-get-attachments/
|
||||
# TODO: Add lock to avoid running more than once concurrent same document download
|
||||
# From: http://bookmarks.honewatson.com/2009/08/11/
|
||||
# python-gmail-imaplib-search-subject-get-attachments/
|
||||
# TODO: Add lock to avoid running more than once concurrent same document
|
||||
# download
|
||||
# TODO: Use message ID for lock
|
||||
@staticmethod
|
||||
def process_message(source, message):
|
||||
@@ -357,7 +364,9 @@ class EmailBaseModel(IntervalBaseModel):
|
||||
|
||||
with Attachment(part, name=filename) as file_object:
|
||||
if filename == source.metadata_attachment_name:
|
||||
metadata_dictionary = yaml.safe_load(file_object.read())
|
||||
metadata_dictionary = yaml.safe_load(
|
||||
file_object.read()
|
||||
)
|
||||
logger.debug(
|
||||
'Got metadata dictionary: %s', metadata_dictionary
|
||||
)
|
||||
@@ -425,8 +434,9 @@ class IMAPEmail(EmailBaseModel):
|
||||
|
||||
mailbox = models.CharField(
|
||||
default=DEFAULT_IMAP_MAILBOX,
|
||||
help_text=_('Mail from which to check for messages with attached documents.'),
|
||||
max_length=64, verbose_name=_('Mailbox')
|
||||
help_text=_(
|
||||
'Mail from which to check for messages with attached documents.'
|
||||
), max_length=64, verbose_name=_('Mailbox')
|
||||
)
|
||||
|
||||
# http://www.doughellmann.com/PyMOTW/imaplib/
|
||||
@@ -451,7 +461,9 @@ class IMAPEmail(EmailBaseModel):
|
||||
for message_number in messages_info:
|
||||
logger.debug('message_number: %s', message_number)
|
||||
status, data = mailbox.fetch(message_number, '(RFC822)')
|
||||
EmailBaseModel.process_message(source=self, message=data[0][1])
|
||||
EmailBaseModel.process_message(
|
||||
source=self, message=data[0][1]
|
||||
)
|
||||
mailbox.store(message_number, '+FLAGS', '\\Deleted')
|
||||
|
||||
mailbox.expunge()
|
||||
|
||||
@@ -53,7 +53,8 @@ def task_upload_document(self, source_id, document_type_id, shared_uploaded_file
|
||||
|
||||
except OperationalError as exception:
|
||||
logger.warning(
|
||||
'Operational exception while trying to create new document "%s" from source id %d; %s. Retying.',
|
||||
'Operational exception while trying to create new document "%s" '
|
||||
'from source id %d; %s. Retying.',
|
||||
label or shared_upload.filename, source_id, exception
|
||||
)
|
||||
raise self.retry(exc=exception)
|
||||
@@ -62,8 +63,8 @@ def task_upload_document(self, source_id, document_type_id, shared_uploaded_file
|
||||
shared_upload.delete()
|
||||
except OperationalError as exception:
|
||||
logger.warning(
|
||||
'Operational error during attempt to delete shared upload file: %s; %s. Retrying.',
|
||||
shared_upload, exception
|
||||
'Operational error during attempt to delete shared upload '
|
||||
'file: %s; %s. Retrying.', shared_upload, exception
|
||||
)
|
||||
|
||||
|
||||
@@ -80,8 +81,8 @@ def task_source_handle_upload(self, document_type_id, shared_uploaded_file_id, s
|
||||
|
||||
except OperationalError as exception:
|
||||
logger.warning(
|
||||
'Operational error during attempt to load data to handle source upload: %s. Retrying.',
|
||||
exception
|
||||
'Operational error during attempt to load data to handle source '
|
||||
'upload: %s. Retrying.', exception
|
||||
)
|
||||
raise self.retry(exc=exception)
|
||||
|
||||
@@ -111,8 +112,8 @@ def task_source_handle_upload(self, document_type_id, shared_uploaded_file_id, s
|
||||
)
|
||||
except OperationalError as exception:
|
||||
logger.warning(
|
||||
'Operational error while preparing to upload child document: %s. Rescheduling.',
|
||||
exception
|
||||
'Operational error while preparing to upload '
|
||||
'child document: %s. Rescheduling.', exception
|
||||
)
|
||||
|
||||
task_source_handle_upload.delay(
|
||||
@@ -139,8 +140,9 @@ def task_source_handle_upload(self, document_type_id, shared_uploaded_file_id, s
|
||||
shared_upload.delete()
|
||||
except OperationalError as exception:
|
||||
logger.warning(
|
||||
'Operational error during attempt to delete shared upload file: %s; %s. Retrying.',
|
||||
shared_upload, exception
|
||||
'Operational error during attempt to delete shared '
|
||||
'upload file: %s; %s. Retrying.', shared_upload,
|
||||
exception
|
||||
)
|
||||
except NotACompressedFile:
|
||||
logging.debug('Exception: NotACompressedFile')
|
||||
|
||||
@@ -51,7 +51,9 @@ class SourceLogListView(SingleObjectListView):
|
||||
view_permission = permission_sources_setup_view
|
||||
|
||||
def get_source(self):
|
||||
return get_object_or_404(Source.objects.select_subclasses(), pk=self.kwargs['pk'])
|
||||
return get_object_or_404(
|
||||
Source.objects.select_subclasses(), pk=self.kwargs['pk']
|
||||
)
|
||||
|
||||
def get_queryset(self):
|
||||
return self.get_source().logs.all()
|
||||
@@ -132,12 +134,17 @@ class UploadBaseView(MultiFormView):
|
||||
pk=kwargs['source_id']
|
||||
)
|
||||
else:
|
||||
self.source = InteractiveSource.objects.filter(enabled=True).select_subclasses().first()
|
||||
self.source = InteractiveSource.objects.filter(
|
||||
enabled=True
|
||||
).select_subclasses().first()
|
||||
|
||||
if InteractiveSource.objects.filter(enabled=True).count() == 0:
|
||||
messages.error(
|
||||
request,
|
||||
_('No interactive document sources have been defined or none have been enabled, create one before proceeding.')
|
||||
_(
|
||||
'No interactive document sources have been defined or '
|
||||
'none have been enabled, create one before proceeding.'
|
||||
)
|
||||
)
|
||||
return HttpResponseRedirect(reverse('sources:setup_source_list'))
|
||||
|
||||
@@ -211,7 +218,9 @@ class UploadInteractiveView(UploadBaseView):
|
||||
|
||||
self.tab_links = UploadBaseView.get_active_tab_links()
|
||||
|
||||
return super(UploadInteractiveView, self).dispatch(request, *args, **kwargs)
|
||||
return super(
|
||||
UploadInteractiveView, self
|
||||
).dispatch(request, *args, **kwargs)
|
||||
|
||||
def forms_valid(self, forms):
|
||||
if self.source.uncompress == SOURCE_UNCOMPRESS_CHOICE_ASK:
|
||||
@@ -259,7 +268,10 @@ class UploadInteractiveView(UploadBaseView):
|
||||
))
|
||||
messages.success(
|
||||
self.request,
|
||||
_('New document queued for uploaded and will be available shortly.')
|
||||
_(
|
||||
'New document queued for uploaded and will be available '
|
||||
'shortly.'
|
||||
)
|
||||
)
|
||||
return HttpResponseRedirect(self.request.get_full_path())
|
||||
|
||||
@@ -314,7 +326,9 @@ class UploadInteractiveVersionView(UploadBaseView):
|
||||
|
||||
self.tab_links = UploadBaseView.get_active_tab_links(self.document)
|
||||
|
||||
return super(UploadInteractiveVersionView, self).dispatch(request, *args, **kwargs)
|
||||
return super(
|
||||
UploadInteractiveVersionView, self
|
||||
).dispatch(request, *args, **kwargs)
|
||||
|
||||
def forms_valid(self, forms):
|
||||
uploaded_file = self.source.get_upload_file_object(
|
||||
@@ -344,10 +358,15 @@ class UploadInteractiveVersionView(UploadBaseView):
|
||||
|
||||
messages.success(
|
||||
self.request,
|
||||
_('New document version queued for uploaded and will be available shortly.')
|
||||
_(
|
||||
'New document version queued for uploaded and will be '
|
||||
'available shortly.'
|
||||
)
|
||||
)
|
||||
return HttpResponseRedirect(
|
||||
reverse('documents:document_version_list', args=(self.document.pk,))
|
||||
reverse(
|
||||
'documents:document_version_list', args=(self.document.pk,)
|
||||
)
|
||||
)
|
||||
|
||||
def create_source_form_form(self, **kwargs):
|
||||
@@ -373,7 +392,9 @@ class UploadInteractiveVersionView(UploadBaseView):
|
||||
}
|
||||
|
||||
def get_context_data(self, **kwargs):
|
||||
context = super(UploadInteractiveVersionView, self).get_context_data(**kwargs)
|
||||
context = super(
|
||||
UploadInteractiveVersionView, self
|
||||
).get_context_data(**kwargs)
|
||||
context['object'] = self.document
|
||||
context['title'] = _(
|
||||
'Upload a new version from source: %s'
|
||||
@@ -437,7 +458,9 @@ class SetupSourceCreateView(SingleObjectCreateView):
|
||||
def get_extra_context(self):
|
||||
return {
|
||||
'object': self.kwargs['source_type'],
|
||||
'title': _('Create new source of type: %s') % get_class(self.kwargs['source_type']).class_fullname(),
|
||||
'title': _(
|
||||
'Create new source of type: %s'
|
||||
) % get_class(self.kwargs['source_type']).class_fullname(),
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -33,10 +33,15 @@ class DocumentCreateWizard(ViewPermissionCheckMixin, SessionWizardView):
|
||||
if InteractiveSource.objects.filter(enabled=True).count() == 0:
|
||||
messages.error(
|
||||
request,
|
||||
_('No interactive document sources have been defined or none have been enabled, create one before proceeding.')
|
||||
_(
|
||||
'No interactive document sources have been defined or '
|
||||
'none have been enabled, create one before proceeding.'
|
||||
)
|
||||
)
|
||||
return HttpResponseRedirect(reverse('sources:setup_source_list'))
|
||||
return super(DocumentCreateWizard, self).dispatch(request, *args, **kwargs)
|
||||
return super(
|
||||
DocumentCreateWizard, self
|
||||
).dispatch(request, *args, **kwargs)
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(DocumentCreateWizard, self).__init__(*args, **kwargs)
|
||||
@@ -62,7 +67,9 @@ class DocumentCreateWizard(ViewPermissionCheckMixin, SessionWizardView):
|
||||
return self.initial_dict.get(step, {})
|
||||
|
||||
def get_context_data(self, form, **kwargs):
|
||||
context = super(DocumentCreateWizard, self).get_context_data(form=form, **kwargs)
|
||||
context = super(
|
||||
DocumentCreateWizard, self
|
||||
).get_context_data(form=form, **kwargs)
|
||||
context.update({
|
||||
'step_title': self.step_titles[self.steps.step0],
|
||||
'submit_label': _('Next step'),
|
||||
@@ -85,5 +92,10 @@ class DocumentCreateWizard(ViewPermissionCheckMixin, SessionWizardView):
|
||||
except TypeError:
|
||||
pass
|
||||
|
||||
url = '?'.join([reverse('sources:upload_interactive'), urlencode(query_dict, doseq=True)])
|
||||
url = '?'.join(
|
||||
[
|
||||
reverse('sources:upload_interactive'),
|
||||
urlencode(query_dict, doseq=True)
|
||||
]
|
||||
)
|
||||
return HttpResponseRedirect(url)
|
||||
|
||||
Reference in New Issue
Block a user