PEP8 Cleanups.

This commit is contained in:
Roberto Rosario
2015-08-24 20:55:45 -04:00
parent f54c0d5058
commit 7392e80fc2
39 changed files with 1376 additions and 226 deletions

View File

@@ -111,10 +111,14 @@ def urlquote(link=None, get=None):
Example: Example:
urlquote('/wiki/Python_(programming_language)') --> '/wiki/Python_%28programming_language%29' urlquote('/wiki/Python_(programming_language)')
urlquote('/mypath/', {'key': 'value'}) --> '/mypath/?key=value' --> '/wiki/Python_%28programming_language%29'
urlquote('/mypath/', {'key': ['value1', 'value2']}) --> '/mypath/?key=value1&key=value2' urlquote('/mypath/', {'key': 'value'})
urlquote({'key': ['value1', 'value2']}) --> 'key=value1&key=value2' --> '/mypath/?key=value'
urlquote('/mypath/', {'key': ['value1', 'value2']})
--> '/mypath/?key=value1&key=value2'
urlquote({'key': ['value1', 'value2']})
--> 'key=value1&key=value2'
""" """
if get is None: if get is None:
get = [] get = []

View File

@@ -15,13 +15,23 @@ class Migration(migrations.Migration):
migrations.AlterField( migrations.AlterField(
model_name='transformation', model_name='transformation',
name='arguments', name='arguments',
field=models.TextField(help_text='Enter the arguments for the transformation as a YAML dictionary. ie: {"degrees": 180}', blank=True, verbose_name='Arguments', validators=[converter.validators.YAMLValidator()]), field=models.TextField(
help_text='Enter the arguments for the transformation as a '
'YAML dictionary. ie: {"degrees": 180}', blank=True,
verbose_name='Arguments',
validators=[converter.validators.YAMLValidator()]
),
preserve_default=True, preserve_default=True,
), ),
migrations.AlterField( migrations.AlterField(
model_name='transformation', model_name='transformation',
name='name', name='name',
field=models.CharField(max_length=128, verbose_name='Name', choices=[('rotate', 'Rotate: degrees'), ('zoom', 'Zoom: percent'), ('resize', 'Resize: width, height')]), field=models.CharField(
max_length=128, verbose_name='Name', choices=[
('rotate', 'Rotate: degrees'), ('zoom', 'Zoom: percent'),
('resize', 'Resize: width, height')
]
),
preserve_default=True, preserve_default=True,
), ),
] ]

View File

@@ -15,7 +15,12 @@ class Migration(migrations.Migration):
migrations.AlterField( migrations.AlterField(
model_name='transformation', model_name='transformation',
name='arguments', name='arguments',
field=models.TextField(help_text='Enter the arguments for the transformation as a YAML dictionary. ie: {"degrees": 180}', blank=True, verbose_name='Arguments', validators=[converter.validators.YAMLValidator()]), field=models.TextField(
help_text='Enter the arguments for the transformation as a '
'YAML dictionary. ie: {"degrees": 180}', blank=True,
verbose_name='Arguments',
validators=[converter.validators.YAMLValidator()]
),
preserve_default=True, preserve_default=True,
), ),
] ]

View File

@@ -14,7 +14,11 @@ class Migration(migrations.Migration):
migrations.AlterField( migrations.AlterField(
model_name='transformation', model_name='transformation',
name='order', name='order',
field=models.PositiveIntegerField(default=0, help_text='Order in which the transformations will be executed.', db_index=True, verbose_name='Order', blank=True), field=models.PositiveIntegerField(
default=0, help_text='Order in which the transformations '
'will be executed.', db_index=True, verbose_name='Order',
blank=True
),
preserve_default=True, preserve_default=True,
), ),
migrations.AlterUniqueTogether( migrations.AlterUniqueTogether(

View File

@@ -14,7 +14,12 @@ class Migration(migrations.Migration):
migrations.AlterField( migrations.AlterField(
model_name='transformation', model_name='transformation',
name='order', name='order',
field=models.PositiveIntegerField(default=0, help_text='Order in which the transformations will be executed. If left unchanged, an automatic order value will be assigned.', db_index=True, verbose_name='Order', blank=True), field=models.PositiveIntegerField(
default=0, help_text='Order in which the transformations '
'will be executed. If left unchanged, an automatic order '
'value will be assigned.', db_index=True,
verbose_name='Order', blank=True
),
preserve_default=True, preserve_default=True,
), ),
] ]

View File

@@ -14,7 +14,14 @@ class Migration(migrations.Migration):
migrations.AlterField( migrations.AlterField(
model_name='transformation', model_name='transformation',
name='name', name='name',
field=models.CharField(max_length=128, verbose_name='Name', choices=[('rotate', 'Rotate: degrees'), ('zoom', 'Zoom: percent'), ('resize', 'Resize: width, height'), ('crop', 'Crop: left, top, right, bottom')]), field=models.CharField(
max_length=128, verbose_name='Name',
choices=[
('rotate', 'Rotate: degrees'), ('zoom', 'Zoom: percent'),
('resize', 'Resize: width, height'),
('crop', 'Crop: left, top, right, bottom')
]
),
preserve_default=True, preserve_default=True,
), ),
] ]

View File

@@ -9,7 +9,9 @@ from django.utils.translation import ugettext_lazy as _
from documents.models import Document from documents.models import Document
from .events import event_document_comment_create, event_document_comment_delete from .events import (
event_document_comment_create, event_document_comment_delete
)
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@@ -43,16 +45,24 @@ class Comment(models.Model):
event_document_comment_create.commit( event_document_comment_create.commit(
actor=user, target=self.document actor=user, target=self.document
) )
logger.info('Comment "%s" added to document "%s" by user "%s"', self.comment, self.document, user) logger.info(
'Comment "%s" added to document "%s" by user "%s"',
self.comment, self.document, user
)
else: else:
event_document_comment_create.commit(target=self.document) event_document_comment_create.commit(target=self.document)
logger.info('Comment "%s" added to document "%s"', self.comment, self.document) logger.info(
'Comment "%s" added to document "%s"', self.comment,
self.document
)
def delete(self, *args, **kwargs): def delete(self, *args, **kwargs):
user = kwargs.pop('_user', None) user = kwargs.pop('_user', None)
super(Comment, self).delete(*args, **kwargs) super(Comment, self).delete(*args, **kwargs)
if user: if user:
event_document_comment_delete.commit(actor=user, target=self.document) event_document_comment_delete.commit(
actor=user, target=self.document
)
else: else:
event_document_comment_delete.commit(target=self.document) event_document_comment_delete.commit(target=self.document)

View File

@@ -34,7 +34,9 @@ class DocumentCommentCreateView(SingleObjectCreateView):
permission_comment_create, request.user, self.get_document() permission_comment_create, request.user, self.get_document()
) )
return super(DocumentCommentCreateView, self).dispatch(request, *args, **kwargs) return super(
DocumentCommentCreateView, self
).dispatch(request, *args, **kwargs)
def get_document(self): def get_document(self):
return get_object_or_404(Document, pk=self.kwargs['pk']) return get_object_or_404(Document, pk=self.kwargs['pk'])
@@ -66,10 +68,13 @@ class DocumentCommentDeleteView(SingleObjectDeleteView):
) )
except PermissionDenied: except PermissionDenied:
AccessControlList.objects.check_access( AccessControlList.objects.check_access(
permission_comment_delete, request.user, self.get_object().document permission_comment_delete, request.user,
self.get_object().document
) )
return super(DocumentCommentDeleteView, self).dispatch(request, *args, **kwargs) return super(
DocumentCommentDeleteView, self
).dispatch(request, *args, **kwargs)
def get_delete_extra_data(self): def get_delete_extra_data(self):
return {'_user': self.request.user} return {'_user': self.request.user}
@@ -82,7 +87,8 @@ class DocumentCommentDeleteView(SingleObjectDeleteView):
def get_post_action_redirect(self): def get_post_action_redirect(self):
return reverse( return reverse(
'comments:comments_for_document', args=(self.get_object().document.pk,) 'comments:comments_for_document',
args=(self.get_object().document.pk,)
) )

View File

@@ -59,8 +59,8 @@ def task_index_document(self, document_id):
IndexInstanceNode.objects.index_document(document) IndexInstanceNode.objects.index_document(document)
except OperationalError as exception: except OperationalError as exception:
logger.warning( logger.warning(
'Operational error while trying to index document: %s; %s', 'Operational error while trying to index document: '
document, exception '%s; %s', document, exception
) )
lock.release() lock.release()
raise self.retry(exc=exception) raise self.retry(exc=exception)

View File

@@ -45,52 +45,93 @@ class IndexTestCase(TestCase):
# Create simple index template # Create simple index template
root = index.template_root root = index.template_root
index.node_templates.create(parent=root, expression='{{ document.metadata_value_of.test }}', link_documents=True) index.node_templates.create(
self.assertEqual(list(IndexTemplateNode.objects.values_list('expression', flat=True)), ['', '{{ document.metadata_value_of.test }}']) parent=root, expression='{{ document.metadata_value_of.test }}',
link_documents=True
)
self.assertEqual(
list(
IndexTemplateNode.objects.values_list('expression', flat=True)
), ['', '{{ document.metadata_value_of.test }}']
)
# Add document metadata value to trigger index node instance creation # Add document metadata value to trigger index node instance creation
self.document.metadata.create(metadata_type=metadata_type, value='0001') self.document.metadata.create(metadata_type=metadata_type, value='0001')
self.assertEqual(list(IndexInstanceNode.objects.values_list('value', flat=True)), ['', '0001']) self.assertEqual(
list(
IndexInstanceNode.objects.values_list('value', flat=True)
), ['', '0001']
)
# Check that document is in instance node # Check that document is in instance node
instance_node = IndexInstanceNode.objects.get(value='0001') instance_node = IndexInstanceNode.objects.get(value='0001')
self.assertQuerysetEqual(instance_node.documents.all(), [repr(self.document)]) self.assertQuerysetEqual(
instance_node.documents.all(), [repr(self.document)]
)
# Change document metadata value to trigger index node instance update # Change document metadata value to trigger index node instance update
document_metadata = self.document.metadata.get(metadata_type=metadata_type) document_metadata = self.document.metadata.get(metadata_type=metadata_type)
document_metadata.value = '0002' document_metadata.value = '0002'
document_metadata.save() document_metadata.save()
self.assertEqual(list(IndexInstanceNode.objects.values_list('value', flat=True)), ['', '0002']) self.assertEqual(
list(
IndexInstanceNode.objects.values_list('value', flat=True)
), ['', '0002']
)
# Check that document is in new instance node # Check that document is in new instance node
instance_node = IndexInstanceNode.objects.get(value='0002') instance_node = IndexInstanceNode.objects.get(value='0002')
self.assertQuerysetEqual(instance_node.documents.all(), [repr(self.document)]) self.assertQuerysetEqual(
instance_node.documents.all(), [repr(self.document)]
)
# Check node instance is destoyed when no metadata is available # Check node instance is destoyed when no metadata is available
self.document.metadata.get(metadata_type=metadata_type).delete() self.document.metadata.get(metadata_type=metadata_type).delete()
self.assertEqual(list(IndexInstanceNode.objects.values_list('value', flat=True)), ['']) self.assertEqual(
list(
IndexInstanceNode.objects.values_list('value', flat=True)
), ['']
)
# Add document metadata value again to trigger index node instance creation # Add document metadata value again to trigger index node instance creation
self.document.metadata.create(metadata_type=metadata_type, value='0003') self.document.metadata.create(
self.assertEqual(list(IndexInstanceNode.objects.values_list('value', flat=True)), ['', '0003']) metadata_type=metadata_type, value='0003'
)
self.assertEqual(
list(
IndexInstanceNode.objects.values_list('value', flat=True)
), ['', '0003']
)
# Check node instance is destroyed when no documents are contained # Check node instance is destroyed when no documents are contained
self.document.delete() self.document.delete()
# Document is in trash, index structure should remain unchanged # Document is in trash, index structure should remain unchanged
self.assertEqual(list(IndexInstanceNode.objects.values_list('value', flat=True)), ['', '0003']) self.assertEqual(
list(
IndexInstanceNode.objects.values_list('value', flat=True)
), ['', '0003']
)
# Document deleted from, index structure should update # Document deleted from, index structure should update
self.document.delete() self.document.delete()
self.assertEqual(list(IndexInstanceNode.objects.values_list('value', flat=True)), ['']) self.assertEqual(
list(
IndexInstanceNode.objects.values_list('value', flat=True)
), ['']
)
def test_rebuild_all_indexes(self): def test_rebuild_all_indexes(self):
# Add metadata type and connect to document type # Add metadata type and connect to document type
metadata_type = MetadataType.objects.create(name='test', label='test') metadata_type = MetadataType.objects.create(name='test', label='test')
DocumentTypeMetadataType.objects.create(document_type=self.document_type, metadata_type=metadata_type) DocumentTypeMetadataType.objects.create(
document_type=self.document_type, metadata_type=metadata_type
)
# Add document metadata value # Add document metadata value
self.document.metadata.create(metadata_type=metadata_type, value='0001') self.document.metadata.create(
metadata_type=metadata_type, value='0001'
)
# Create empty index # Create empty index
index = Index.objects.create(label='test') index = Index.objects.create(label='test')
@@ -98,12 +139,21 @@ class IndexTestCase(TestCase):
# Add our document type to the new index # Add our document type to the new index
index.document_types.add(self.document_type) index.document_types.add(self.document_type)
self.assertQuerysetEqual(index.document_types.all(), [repr(self.document_type)]) self.assertQuerysetEqual(
index.document_types.all(), [repr(self.document_type)]
)
# Create simple index template # Create simple index template
root = index.template_root root = index.template_root
index.node_templates.create(parent=root, expression='{{ document.metadata_value_of.test }}', link_documents=True) index.node_templates.create(
self.assertEqual(list(IndexTemplateNode.objects.values_list('expression', flat=True)), ['', '{{ document.metadata_value_of.test }}']) parent=root, expression='{{ document.metadata_value_of.test }}',
link_documents=True
)
self.assertEqual(
list(
IndexTemplateNode.objects.values_list('expression', flat=True)
), ['', '{{ document.metadata_value_of.test }}']
)
# There should be no index instances # There should be no index instances
self.assertEqual(list(IndexInstanceNode.objects.all()), []) self.assertEqual(list(IndexInstanceNode.objects.all()), [])
@@ -113,4 +163,6 @@ class IndexTestCase(TestCase):
# Check that document is in instance node # Check that document is in instance node
instance_node = IndexInstanceNode.objects.get(value='0001') instance_node = IndexInstanceNode.objects.get(value='0001')
self.assertQuerysetEqual(instance_node.documents.all(), [repr(self.document)]) self.assertQuerysetEqual(
instance_node.documents.all(), [repr(self.document)]
)

View File

@@ -16,10 +16,32 @@ class Migration(migrations.Migration):
migrations.CreateModel( migrations.CreateModel(
name='DocumentVersionSignature', name='DocumentVersionSignature',
fields=[ fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), (
('signature_file', models.FileField(storage=storage.backends.filebasedstorage.FileBasedStorage(), upload_to=document_signatures.models.upload_to, blank=True, editable=False, null=True, verbose_name='Signature file')), 'id', models.AutoField(
('has_embedded_signature', models.BooleanField(default=False, verbose_name='Has embedded signature', editable=False)), verbose_name='ID', serialize=False, auto_created=True,
('document_version', models.ForeignKey(editable=False, to='documents.DocumentVersion', verbose_name='Document version')), primary_key=True
)
),
(
'signature_file', models.FileField(
storage=storage.backends.filebasedstorage.FileBasedStorage(),
upload_to=document_signatures.models.upload_to,
blank=True, editable=False, null=True,
verbose_name='Signature file'
)
),
(
'has_embedded_signature', models.BooleanField(
default=False, verbose_name='Has embedded signature',
editable=False
)
),
(
'document_version', models.ForeignKey(
editable=False, to='documents.DocumentVersion',
verbose_name='Document version'
)
),
], ],
options={ options={
'verbose_name': 'Document version signature', 'verbose_name': 'Document version signature',

View File

@@ -33,7 +33,9 @@ def document_verify(request, document_pk):
document = get_object_or_404(Document, pk=document_pk) document = get_object_or_404(Document, pk=document_pk)
try: try:
Permission.check_permissions(request.user, (permission_document_verify,)) Permission.check_permissions(
request.user, (permission_document_verify,)
)
except PermissionDenied: except PermissionDenied:
AccessControlList.objects.check_access(permission_document_verify, request.user, document) AccessControlList.objects.check_access(permission_document_verify, request.user, document)

View File

@@ -37,4 +37,6 @@ class WorkflowInstanceTransitionForm(forms.Form):
self.fields['transition'].choices = workflow.get_transition_choices().values_list('pk', 'label') self.fields['transition'].choices = workflow.get_transition_choices().values_list('pk', 'label')
transition = forms.ChoiceField(label=_('Transition')) transition = forms.ChoiceField(label=_('Transition'))
comment = forms.CharField(label=_('Comment'), required=False, widget=forms.widgets.Textarea()) comment = forms.CharField(
label=_('Comment'), required=False, widget=forms.widgets.Textarea()
)

View File

@@ -16,9 +16,24 @@ class Migration(migrations.Migration):
migrations.CreateModel( migrations.CreateModel(
name='Workflow', name='Workflow',
fields=[ fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), (
('label', models.CharField(unique=True, max_length=255, verbose_name='Label')), 'id', models.AutoField(
('document_types', models.ManyToManyField(related_name='workflows', verbose_name='Document types', to='documents.DocumentType')), verbose_name='ID', serialize=False, auto_created=True,
primary_key=True
)
),
(
'label', models.CharField(
unique=True, max_length=255, verbose_name='Label'
)
),
(
'document_types', models.ManyToManyField(
related_name='workflows',
verbose_name='Document types',
to='documents.DocumentType'
)
),
], ],
options={ options={
'verbose_name': 'Workflow', 'verbose_name': 'Workflow',
@@ -29,9 +44,24 @@ class Migration(migrations.Migration):
migrations.CreateModel( migrations.CreateModel(
name='WorkflowInstance', name='WorkflowInstance',
fields=[ fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), (
('document', models.ForeignKey(related_name='workflows', verbose_name='Document', to='documents.Document')), 'id', models.AutoField(
('workflow', models.ForeignKey(related_name='instances', verbose_name='Workflow', to='document_states.Workflow')), verbose_name='ID', serialize=False, auto_created=True,
primary_key=True
)
),
(
'document', models.ForeignKey(
related_name='workflows', verbose_name='Document',
to='documents.Document'
)
),
(
'workflow', models.ForeignKey(
related_name='instances', verbose_name='Workflow',
to='document_states.Workflow'
)
),
], ],
options={ options={
'verbose_name': 'Workflow instance', 'verbose_name': 'Workflow instance',
@@ -42,9 +72,23 @@ class Migration(migrations.Migration):
migrations.CreateModel( migrations.CreateModel(
name='WorkflowInstanceLogEntry', name='WorkflowInstanceLogEntry',
fields=[ fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), (
('datetime', models.DateTimeField(auto_now_add=True, verbose_name='Datetime', db_index=True)), 'id', models.AutoField(
('comment', models.TextField(verbose_name='Comment', blank=True)), verbose_name='ID', serialize=False, auto_created=True,
primary_key=True
)
),
(
'datetime', models.DateTimeField(
auto_now_add=True, verbose_name='Datetime',
db_index=True
)
),
(
'comment', models.TextField(
verbose_name='Comment', blank=True
)
),
], ],
options={ options={
'verbose_name': 'Workflow instance log entry', 'verbose_name': 'Workflow instance log entry',
@@ -55,10 +99,30 @@ class Migration(migrations.Migration):
migrations.CreateModel( migrations.CreateModel(
name='WorkflowState', name='WorkflowState',
fields=[ fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), (
('label', models.CharField(max_length=255, verbose_name='Label')), 'id', models.AutoField(
('initial', models.BooleanField(default=False, help_text='Select if this will be the state with which you want the workflow to start in. Only one state can be the initial state.', verbose_name='Initial')), verbose_name='ID', serialize=False, auto_created=True,
('workflow', models.ForeignKey(related_name='states', verbose_name='Workflow', to='document_states.Workflow')), primary_key=True
)
),
(
'label', models.CharField(
max_length=255, verbose_name='Label'
)
),
(
'initial', models.BooleanField(
default=False,
help_text='Select if this will be the state with which you want the workflow to start in. Only one state can be the initial state.',
verbose_name='Initial'
)
),
(
'workflow', models.ForeignKey(
related_name='states', verbose_name='Workflow',
to='document_states.Workflow'
)
),
], ],
options={ options={
'verbose_name': 'Workflow state', 'verbose_name': 'Workflow state',
@@ -69,11 +133,37 @@ class Migration(migrations.Migration):
migrations.CreateModel( migrations.CreateModel(
name='WorkflowTransition', name='WorkflowTransition',
fields=[ fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), (
('label', models.CharField(max_length=255, verbose_name='Label')), 'id', models.AutoField(
('destination_state', models.ForeignKey(related_name='destination_transitions', verbose_name='Destination state', to='document_states.WorkflowState')), verbose_name='ID', serialize=False, auto_created=True,
('origin_state', models.ForeignKey(related_name='origin_transitions', verbose_name='Origin state', to='document_states.WorkflowState')), primary_key=True
('workflow', models.ForeignKey(related_name='transitions', verbose_name='Workflow', to='document_states.Workflow')), )
),
(
'label', models.CharField(
max_length=255, verbose_name='Label'
)
),
(
'destination_state', models.ForeignKey(
related_name='destination_transitions',
verbose_name='Destination state',
to='document_states.WorkflowState'
)
),
(
'origin_state', models.ForeignKey(
related_name='origin_transitions',
verbose_name='Origin state',
to='document_states.WorkflowState'
)
),
(
'workflow', models.ForeignKey(
related_name='transitions', verbose_name='Workflow',
to='document_states.Workflow'
)
),
], ],
options={ options={
'verbose_name': 'Workflow transition', 'verbose_name': 'Workflow transition',
@@ -83,7 +173,9 @@ class Migration(migrations.Migration):
), ),
migrations.AlterUniqueTogether( migrations.AlterUniqueTogether(
name='workflowtransition', name='workflowtransition',
unique_together=set([('workflow', 'label', 'origin_state', 'destination_state')]), unique_together=set(
[('workflow', 'label', 'origin_state', 'destination_state')]
),
), ),
migrations.AlterUniqueTogether( migrations.AlterUniqueTogether(
name='workflowstate', name='workflowstate',
@@ -92,19 +184,27 @@ class Migration(migrations.Migration):
migrations.AddField( migrations.AddField(
model_name='workflowinstancelogentry', model_name='workflowinstancelogentry',
name='transition', name='transition',
field=models.ForeignKey(verbose_name='Transition', to='document_states.WorkflowTransition'), field=models.ForeignKey(
verbose_name='Transition',
to='document_states.WorkflowTransition'
),
preserve_default=True, preserve_default=True,
), ),
migrations.AddField( migrations.AddField(
model_name='workflowinstancelogentry', model_name='workflowinstancelogentry',
name='user', name='user',
field=models.ForeignKey(verbose_name='User', to=settings.AUTH_USER_MODEL), field=models.ForeignKey(
verbose_name='User', to=settings.AUTH_USER_MODEL
),
preserve_default=True, preserve_default=True,
), ),
migrations.AddField( migrations.AddField(
model_name='workflowinstancelogentry', model_name='workflowinstancelogentry',
name='workflow_instance', name='workflow_instance',
field=models.ForeignKey(related_name='log_entries', verbose_name='Workflow instance', to='document_states.WorkflowInstance'), field=models.ForeignKey(
related_name='log_entries', verbose_name='Workflow instance',
to='document_states.WorkflowInstance'
),
preserve_default=True, preserve_default=True,
), ),
migrations.AlterUniqueTogether( migrations.AlterUniqueTogether(

File diff suppressed because one or more lines are too long

View File

@@ -13,18 +13,31 @@ class Migration(migrations.Migration):
operations = [ operations = [
migrations.AlterModelOptions( migrations.AlterModelOptions(
name='document', name='document',
options={'ordering': ('-date_added',), 'verbose_name': 'Document', 'verbose_name_plural': 'Documents'}, options={
'ordering': ('-date_added',), 'verbose_name': 'Document',
'verbose_name_plural': 'Documents'
},
), ),
migrations.AlterModelOptions( migrations.AlterModelOptions(
name='documentpage', name='documentpage',
options={'ordering': ('page_number',), 'verbose_name': 'Document page', 'verbose_name_plural': 'Document pages'}, options={
'ordering': ('page_number',), 'verbose_name': 'Document page',
'verbose_name_plural': 'Document pages'
},
), ),
migrations.AlterModelOptions( migrations.AlterModelOptions(
name='documenttype', name='documenttype',
options={'ordering': ('name',), 'verbose_name': 'Document type', 'verbose_name_plural': 'Documents types'}, options={
'ordering': ('name',), 'verbose_name': 'Document type',
'verbose_name_plural': 'Documents types'
},
), ),
migrations.AlterModelOptions( migrations.AlterModelOptions(
name='documenttypefilename', name='documenttypefilename',
options={'ordering': ('filename',), 'verbose_name': 'Document type quick rename filename', 'verbose_name_plural': 'Document types quick rename filenames'}, options={
'ordering': ('filename',),
'verbose_name': 'Document type quick rename filename',
'verbose_name_plural': 'Document types quick rename filenames'
},
), ),
] ]

View File

@@ -14,31 +14,49 @@ class Migration(migrations.Migration):
migrations.AddField( migrations.AddField(
model_name='documenttype', model_name='documenttype',
name='delete_time_period', name='delete_time_period',
field=models.PositiveIntegerField(default=30, verbose_name='Delete time period'), field=models.PositiveIntegerField(
default=30, verbose_name='Delete time period'
),
preserve_default=True, preserve_default=True,
), ),
migrations.AddField( migrations.AddField(
model_name='documenttype', model_name='documenttype',
name='delete_time_unit', name='delete_time_unit',
field=models.CharField(default='days', max_length=8, verbose_name='Delete time unit', choices=[('days', 'Days'), ('hours', 'Hours'), ('minutes', 'Minutes'), ('seconds', 'Seconds')]), field=models.CharField(
default='days', max_length=8, verbose_name='Delete time unit',
choices=[
('days', 'Days'), ('hours', 'Hours'),
('minutes', 'Minutes'), ('seconds', 'Seconds')
]
),
preserve_default=True, preserve_default=True,
), ),
migrations.AddField( migrations.AddField(
model_name='documenttype', model_name='documenttype',
name='trash_time_period', name='trash_time_period',
field=models.PositiveIntegerField(null=True, verbose_name='Trash time period', blank=True), field=models.PositiveIntegerField(
null=True, verbose_name='Trash time period', blank=True
),
preserve_default=True, preserve_default=True,
), ),
migrations.AddField( migrations.AddField(
model_name='documenttype', model_name='documenttype',
name='trash_time_unit', name='trash_time_unit',
field=models.CharField(blank=True, max_length=8, null=True, verbose_name='Trash time unit', choices=[('days', 'Days'), ('hours', 'Hours'), ('minutes', 'Minutes'), ('seconds', 'Seconds')]), field=models.CharField(
blank=True, max_length=8, null=True,
verbose_name='Trash time unit', choices=[
('days', 'Days'), ('hours', 'Hours'),
('minutes', 'Minutes'), ('seconds', 'Seconds')
]
),
preserve_default=True, preserve_default=True,
), ),
migrations.AlterField( migrations.AlterField(
model_name='document', model_name='document',
name='deleted_date_time', name='deleted_date_time',
field=models.DateTimeField(verbose_name='Date and time trashed', blank=True), field=models.DateTimeField(
verbose_name='Date and time trashed', blank=True
),
preserve_default=True, preserve_default=True,
), ),
] ]

View File

@@ -14,31 +14,53 @@ class Migration(migrations.Migration):
migrations.AlterField( migrations.AlterField(
model_name='document', model_name='document',
name='deleted_date_time', name='deleted_date_time',
field=models.DateTimeField(null=True, verbose_name='Date and time trashed', blank=True), field=models.DateTimeField(
null=True, verbose_name='Date and time trashed', blank=True
),
preserve_default=True, preserve_default=True,
), ),
migrations.AlterField( migrations.AlterField(
model_name='documenttype', model_name='documenttype',
name='delete_time_period', name='delete_time_period',
field=models.PositiveIntegerField(default=30, help_text='Amount of time after which documents of this type in the trash will be deleted.', verbose_name='Delete time period'), field=models.PositiveIntegerField(
default=30, help_text='Amount of time after which documents '
'of this type in the trash will be deleted.',
verbose_name='Delete time period'
),
preserve_default=True, preserve_default=True,
), ),
migrations.AlterField( migrations.AlterField(
model_name='documenttype', model_name='documenttype',
name='delete_time_unit', name='delete_time_unit',
field=models.CharField(default='days', max_length=8, verbose_name='Delete time unit', choices=[('days', 'Days'), ('hours', 'Hours'), ('minutes', 'Minutes')]), field=models.CharField(
default='days', max_length=8, verbose_name='Delete time unit',
choices=[
('days', 'Days'), ('hours', 'Hours'),
('minutes', 'Minutes')
]
),
preserve_default=True, preserve_default=True,
), ),
migrations.AlterField( migrations.AlterField(
model_name='documenttype', model_name='documenttype',
name='trash_time_period', name='trash_time_period',
field=models.PositiveIntegerField(help_text='Amount of time after which documents of this type will be moved to the trash.', null=True, verbose_name='Trash time period', blank=True), field=models.PositiveIntegerField(
help_text='Amount of time after which documents of this type '
'will be moved to the trash.', null=True,
verbose_name='Trash time period', blank=True
),
preserve_default=True, preserve_default=True,
), ),
migrations.AlterField( migrations.AlterField(
model_name='documenttype', model_name='documenttype',
name='trash_time_unit', name='trash_time_unit',
field=models.CharField(blank=True, max_length=8, null=True, verbose_name='Trash time unit', choices=[('days', 'Days'), ('hours', 'Hours'), ('minutes', 'Minutes')]), field=models.CharField(
blank=True, max_length=8, null=True,
verbose_name='Trash time unit', choices=[
('days', 'Days'), ('hours', 'Hours'),
('minutes', 'Minutes')
]
),
preserve_default=True, preserve_default=True,
), ),
] ]

View File

@@ -13,7 +13,10 @@ class Migration(migrations.Migration):
operations = [ operations = [
migrations.AlterModelOptions( migrations.AlterModelOptions(
name='documenttype', name='documenttype',
options={'ordering': ('label',), 'verbose_name': 'Document type', 'verbose_name_plural': 'Documents types'}, options={
'ordering': ('label',), 'verbose_name': 'Document type',
'verbose_name_plural': 'Documents types'
},
), ),
migrations.RenameField( migrations.RenameField(
model_name='documenttype', model_name='documenttype',

View File

@@ -40,7 +40,9 @@ class DocumentPageSerializer(serializers.HyperlinkedModelSerializer):
class Meta: class Meta:
extra_kwargs = { extra_kwargs = {
'url': {'view_name': 'rest_api:documentpage-detail'}, 'url': {'view_name': 'rest_api:documentpage-detail'},
'document_version': {'view_name': 'rest_api:documentversion-detail'} 'document_version': {
'view_name': 'rest_api:documentversion-detail'
}
} }
model = DocumentPage model = DocumentPage
@@ -68,7 +70,9 @@ class DocumentTypeSerializer(serializers.HyperlinkedModelSerializer):
class DocumentVersionSerializer(serializers.HyperlinkedModelSerializer): class DocumentVersionSerializer(serializers.HyperlinkedModelSerializer):
pages = DocumentPageSerializer(many=True, required=False, read_only=True) pages = DocumentPageSerializer(many=True, required=False, read_only=True)
revert = serializers.HyperlinkedIdentityField(view_name='rest_api:documentversion-revert') revert = serializers.HyperlinkedIdentityField(
view_name='rest_api:documentversion-revert'
)
class Meta: class Meta:
extra_kwargs = { extra_kwargs = {
@@ -103,7 +107,9 @@ class NewDocumentVersionSerializer(serializers.Serializer):
class DeletedDocumentSerializer(serializers.HyperlinkedModelSerializer): class DeletedDocumentSerializer(serializers.HyperlinkedModelSerializer):
document_type_label = serializers.SerializerMethodField() document_type_label = serializers.SerializerMethodField()
restore = serializers.HyperlinkedIdentityField(view_name='rest_api:deleteddocument-restore') restore = serializers.HyperlinkedIdentityField(
view_name='rest_api:deleteddocument-restore'
)
def get_document_type_label(self, instance): def get_document_type_label(self, instance):
return instance.document_type.label return instance.document_type.label
@@ -141,9 +147,9 @@ class DocumentSerializer(serializers.HyperlinkedModelSerializer):
'url': {'view_name': 'rest_api:document-detail'} 'url': {'view_name': 'rest_api:document-detail'}
} }
fields = ( fields = (
'date_added', 'description', 'document_type', 'document_type_label', 'date_added', 'description', 'document_type',
'id', 'label', 'language', 'latest_version', 'url', 'uuid', 'document_type_label', 'id', 'label', 'language',
'versions', 'latest_version', 'url', 'uuid', 'versions',
) )
model = Document model = Document
@@ -155,8 +161,12 @@ class NewDocumentSerializer(serializers.ModelSerializer):
document = Document.objects.create( document = Document.objects.create(
description=self.validated_data.get('description', ''), description=self.validated_data.get('description', ''),
document_type=self.validated_data['document_type'], document_type=self.validated_data['document_type'],
label=self.validated_data.get('label', unicode(self.validated_data['file'])), label=self.validated_data.get(
language=self.validated_data.get('language', setting_language.value) 'label', unicode(self.validated_data['file'])
),
language=self.validated_data.get(
'language', setting_language.value
)
) )
document.save(_user=_user) document.save(_user=_user)

View File

@@ -100,7 +100,9 @@ def document_html_widget(document_page, click_view=None, click_view_arguments=No
alt_text = _('Document page image') alt_text = _('Document page image')
if not document_page: if not document_page:
return mark_safe('<span class="fa-stack fa-lg"><i class="fa fa-file-o fa-stack-2x"></i><i class="fa fa-question fa-stack-1x text-danger"></i></span>') return mark_safe(
'<span class="fa-stack fa-lg"><i class="fa fa-file-o fa-stack-2x"></i><i class="fa fa-question fa-stack-1x text-danger"></i></span>'
)
document = document_page.document document = document_page.document
@@ -118,7 +120,8 @@ def document_html_widget(document_page, click_view=None, click_view_arguments=No
query_string = urlencode(query_dict) query_string = urlencode(query_dict)
preview_view = '%s?%s' % ( preview_view = '%s?%s' % (
reverse('rest_api:documentpage-image', args=(document_page.pk,)), query_string reverse('rest_api:documentpage-image', args=(document_page.pk,)),
query_string
) )
result.append( result.append(

View File

@@ -18,7 +18,9 @@ class EventsApp(MayanAppConfig):
def ready(self): def ready(self):
super(EventsApp, self).ready() super(EventsApp, self).ready()
SourceColumn(source=Action, label=_('Timestamp'), attribute='timestamp') SourceColumn(
source=Action, label=_('Timestamp'), attribute='timestamp'
)
SourceColumn(source=Action, label=_('Actor'), attribute='actor') SourceColumn(source=Action, label=_('Actor'), attribute='actor')
SourceColumn( SourceColumn(
source=Action, label=_('Verb'), source=Action, label=_('Verb'),

View File

@@ -26,7 +26,9 @@ class Event(object):
model = apps.get_model('events', 'EventType') model = apps.get_model('events', 'EventType')
if not self.event_type: if not self.event_type:
self.event_type, created = model.objects.get_or_create(name=self.name) self.event_type, created = model.objects.get_or_create(
name=self.name
)
action.send( action.send(
actor or target, actor=actor, verb=self.name, actor or target, actor=actor, verb=self.name,

View File

@@ -10,7 +10,9 @@ from .permissions import permission_events_view
def get_kwargs_factory(variable_name): def get_kwargs_factory(variable_name):
def get_kwargs(context): def get_kwargs(context):
content_type = ContentType.objects.get_for_model(context[variable_name]) content_type = ContentType.objects.get_for_model(
context[variable_name]
)
return { return {
'app_label': '"{}"'.format(content_type.app_label), 'app_label': '"{}"'.format(content_type.app_label),
'model': '"{}"'.format(content_type.model), 'model': '"{}"'.format(content_type.model),

View File

@@ -13,8 +13,17 @@ class Migration(migrations.Migration):
migrations.CreateModel( migrations.CreateModel(
name='EventType', name='EventType',
fields=[ fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), (
('name', models.CharField(unique=True, max_length=64, verbose_name='Name')), 'id', models.AutoField(
verbose_name='ID', serialize=False, auto_created=True,
primary_key=True
)
),
(
'name', models.CharField(
unique=True, max_length=64, verbose_name='Name'
)
),
], ],
options={ options={
'verbose_name': 'Event type', 'verbose_name': 'Event type',

View File

@@ -14,11 +14,35 @@ class Migration(migrations.Migration):
migrations.CreateModel( migrations.CreateModel(
name='SmartLink', name='SmartLink',
fields=[ fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), (
('title', models.CharField(max_length=96, verbose_name='Title')), 'id', models.AutoField(
('dynamic_title', models.CharField(help_text='This expression will be evaluated against the current selected document.', max_length=96, verbose_name='Dynamic title', blank=True)), verbose_name='ID', serialize=False, auto_created=True,
('enabled', models.BooleanField(default=True, verbose_name='Enabled')), primary_key=True
('document_types', models.ManyToManyField(to='documents.DocumentType', verbose_name='Document types')), )
),
(
'title', models.CharField(
max_length=96, verbose_name='Title'
)
),
(
'dynamic_title', models.CharField(
help_text='This expression will be evaluated against the current selected document.',
max_length=96, verbose_name='Dynamic title',
blank=True
)
),
(
'enabled', models.BooleanField(
default=True, verbose_name='Enabled'
)
),
(
'document_types', models.ManyToManyField(
to='documents.DocumentType',
verbose_name='Document types'
)
),
], ],
options={ options={
'verbose_name': 'Smart link', 'verbose_name': 'Smart link',
@@ -29,14 +53,69 @@ class Migration(migrations.Migration):
migrations.CreateModel( migrations.CreateModel(
name='SmartLinkCondition', name='SmartLinkCondition',
fields=[ fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), (
('inclusion', models.CharField(default='&', help_text='The inclusion is ignored for the first item.', max_length=16, choices=[('&', 'and'), ('|', 'or')])), 'id', models.AutoField(
('foreign_document_data', models.CharField(help_text='This represents the metadata of all other documents.', max_length=128, verbose_name='Foreign document attribute')), verbose_name='ID', serialize=False, auto_created=True,
('operator', models.CharField(max_length=16, choices=[('exact', 'is equal to'), ('iexact', 'is equal to (case insensitive)'), ('contains', 'contains'), ('icontains', 'contains (case insensitive)'), ('in', 'is in'), ('gt', 'is greater than'), ('gte', 'is greater than or equal to'), ('lt', 'is less than'), ('lte', 'is less than or equal to'), ('startswith', 'starts with'), ('istartswith', 'starts with (case insensitive)'), ('endswith', 'ends with'), ('iendswith', 'ends with (case insensitive)'), ('regex', 'is in regular expression'), ('iregex', 'is in regular expression (case insensitive)')])), primary_key=True
('expression', models.TextField(help_text='This expression will be evaluated against the current document.', verbose_name='Expression')), )
('negated', models.BooleanField(default=False, help_text='Inverts the logic of the operator.', verbose_name='Negated')), ),
('enabled', models.BooleanField(default=True, verbose_name='Enabled')), (
('smart_link', models.ForeignKey(related_name='conditions', verbose_name='Smart link', to='linking.SmartLink')), 'inclusion', models.CharField(
default='&',
help_text='The inclusion is ignored for the first item.',
max_length=16, choices=[('&', 'and'), ('|', 'or')]
)
),
(
'foreign_document_data', models.CharField(
help_text='This represents the metadata of all other documents.',
max_length=128,
verbose_name='Foreign document attribute'
)
),
(
'operator', models.CharField(
max_length=16, choices=[
('exact', 'is equal to'),
('iexact', 'is equal to (case insensitive)'),
('contains', 'contains'),
('icontains', 'contains (case insensitive)'),
('in', 'is in'), ('gt', 'is greater than'),
('gte', 'is greater than or equal to'),
('lt', 'is less than'),
('lte', 'is less than or equal to'),
('startswith', 'starts with'),
('istartswith', 'starts with (case insensitive)'),
('endswith', 'ends with'),
('iendswith', 'ends with (case insensitive)'),
('regex', 'is in regular expression'),
('iregex', 'is in regular expression (case insensitive)')
]
)
),
(
'expression', models.TextField(
help_text='This expression will be evaluated against the current document.',
verbose_name='Expression'
)
),
(
'negated', models.BooleanField(
default=False, help_text='Inverts the logic of the operator.',
verbose_name='Negated'
)
),
(
'enabled', models.BooleanField(
default=True, verbose_name='Enabled'
)
),
(
'smart_link', models.ForeignKey(
related_name='conditions', verbose_name='Smart link',
to='linking.SmartLink'
)
),
], ],
options={ options={
'verbose_name': 'Link condition', 'verbose_name': 'Link condition',

View File

@@ -219,7 +219,9 @@ class SmartLinkConditionListView(SingleObjectListView):
return { return {
'hide_link': True, 'hide_link': True,
'object': self.get_smart_link(), 'object': self.get_smart_link(),
'title': _('Conditions for smart link: %s') % self.get_smart_link(), 'title': _(
'Conditions for smart link: %s'
) % self.get_smart_link(),
} }
def get_smart_link(self): def get_smart_link(self):

View File

@@ -10,7 +10,7 @@ from django.utils.translation import ugettext_lazy as _
from acls import ModelPermission from acls import ModelPermission
from common import ( from common import (
MayanAppConfig, menu_facet, menu_multi_item, menu_object, menu_secondary, MayanAppConfig, menu_facet, menu_multi_item, menu_object, menu_secondary,
menu_setup, menu_sidebar, menu_tools menu_setup, menu_sidebar
) )
from common.classes import ModelAttribute, Filter from common.classes import ModelAttribute, Filter
from common.widgets import two_state_template from common.widgets import two_state_template

View File

@@ -1,16 +1,11 @@
from __future__ import unicode_literals from __future__ import unicode_literals
import shlex
from django import forms from django import forms
from django.core.exceptions import ValidationError
from django.forms.formsets import formset_factory from django.forms.formsets import formset_factory
from django.template import Context, Template
from django.utils.module_loading import import_string
from django.utils.translation import string_concat, ugettext_lazy as _ from django.utils.translation import string_concat, ugettext_lazy as _
from .classes import MetadataLookup from .classes import MetadataLookup
from .models import DocumentMetadata, MetadataType from .models import MetadataType
class MetadataForm(forms.Form): class MetadataForm(forms.Form):

View File

@@ -133,6 +133,7 @@ class MetadataType(models.Model):
value = parser.parse(value) value = parser.parse(value)
return value return value
class Meta: class Meta:
ordering = ('label',) ordering = ('label',)
verbose_name = _('Metadata type') verbose_name = _('Metadata type')

View File

@@ -19,7 +19,6 @@ from documents.models import Document, DocumentType
from documents.permissions import ( from documents.permissions import (
permission_document_type_edit permission_document_type_edit
) )
from documents.views import DocumentListView
from permissions import Permission from permissions import Permission
from .api import save_metadata_list from .api import save_metadata_list

View File

@@ -186,7 +186,9 @@ class IndexFS(Operations):
yield '..' yield '..'
# Nodes # Nodes
queryset = node.get_children().values('value').exclude(value__contains='/') queryset = node.get_children().values('value').exclude(
value__contains='/'
)
for duplicate in queryset.order_by().annotate(count_id=Count('id')).filter(count_id__gt=1): for duplicate in queryset.order_by().annotate(count_id=Count('id')).filter(count_id__gt=1):
queryset = queryset.exclude(label=duplicate['label']) queryset = queryset.exclude(label=duplicate['label'])
@@ -196,7 +198,9 @@ class IndexFS(Operations):
# Documents # Documents
if node.index_template_node.link_documents: if node.index_template_node.link_documents:
queryset = node.documents.values('label').exclude(label__contains='/') queryset = node.documents.values('label').exclude(
label__contains='/'
)
for duplicate in queryset.order_by().annotate(count_id=Count('id')).filter(count_id__gt=1): for duplicate in queryset.order_by().annotate(count_id=Count('id')).filter(count_id__gt=1):
queryset = queryset.exclude(label=duplicate['label']) queryset = queryset.exclude(label=duplicate['label'])

View File

@@ -15,9 +15,22 @@ class Migration(migrations.Migration):
migrations.CreateModel( migrations.CreateModel(
name='Source', name='Source',
fields=[ fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), (
('title', models.CharField(max_length=64, verbose_name='Title')), 'id', models.AutoField(
('enabled', models.BooleanField(default=True, verbose_name='Enabled')), verbose_name='ID', serialize=False, auto_created=True,
primary_key=True
)
),
(
'title', models.CharField(
max_length=64, verbose_name='Title'
)
),
(
'enabled', models.BooleanField(
default=True, verbose_name='Enabled'
)
),
], ],
options={ options={
'ordering': ('title',), 'ordering': ('title',),
@@ -29,7 +42,12 @@ class Migration(migrations.Migration):
migrations.CreateModel( migrations.CreateModel(
name='OutOfProcessSource', name='OutOfProcessSource',
fields=[ fields=[
('source_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='sources.Source')), (
'source_ptr', models.OneToOneField(
parent_link=True, auto_created=True, primary_key=True,
serialize=False, to='sources.Source'
)
),
], ],
options={ options={
'verbose_name': 'Out of process', 'verbose_name': 'Out of process',
@@ -40,9 +58,25 @@ class Migration(migrations.Migration):
migrations.CreateModel( migrations.CreateModel(
name='IntervalBaseModel', name='IntervalBaseModel',
fields=[ fields=[
('outofprocesssource_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='sources.OutOfProcessSource')), (
('interval', models.PositiveIntegerField(default=600, help_text='Interval in seconds between checks for new documents.', verbose_name='Interval')), 'outofprocesssource_ptr', models.OneToOneField(
('uncompress', models.CharField(help_text='Whether to expand or not, compressed archives.', max_length=1, verbose_name='Uncompress', choices=[('y', 'Always'), ('n', 'Never')])), parent_link=True, auto_created=True, primary_key=True,
serialize=False, to='sources.OutOfProcessSource'
)
),
(
'interval', models.PositiveIntegerField(
default=600, help_text='Interval in seconds between '
'checks for new documents.', verbose_name='Interval'
)
),
(
'uncompress', models.CharField(
help_text='Whether to expand or not, compressed '
'archives.', max_length=1, verbose_name='Uncompress',
choices=[('y', 'Always'), ('n', 'Never')]
)
),
], ],
options={ options={
'verbose_name': 'Interval source', 'verbose_name': 'Interval source',
@@ -53,12 +87,39 @@ class Migration(migrations.Migration):
migrations.CreateModel( migrations.CreateModel(
name='EmailBaseModel', name='EmailBaseModel',
fields=[ fields=[
('intervalbasemodel_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='sources.IntervalBaseModel')), (
('host', models.CharField(max_length=128, verbose_name='Host')), 'intervalbasemodel_ptr', models.OneToOneField(
('ssl', models.BooleanField(default=True, verbose_name='SSL')), parent_link=True, auto_created=True, primary_key=True,
('port', models.PositiveIntegerField(help_text='Typical choices are 110 for POP3, 995 for POP3 over SSL, 143 for IMAP, 993 for IMAP over SSL.', null=True, verbose_name='Port', blank=True)), serialize=False, to='sources.IntervalBaseModel'
('username', models.CharField(max_length=96, verbose_name='Username')), )
('password', models.CharField(max_length=96, verbose_name='Password')), ),
(
'host', models.CharField(
max_length=128, verbose_name='Host'
)
),
(
'ssl', models.BooleanField(
default=True, verbose_name='SSL'
)
),
(
'port', models.PositiveIntegerField(
help_text='Typical choices are 110 for POP3, 995 for '
'POP3 over SSL, 143 for IMAP, 993 for IMAP over SSL.',
null=True, verbose_name='Port', blank=True
)
),
(
'username', models.CharField(
max_length=96, verbose_name='Username'
)
),
(
'password', models.CharField(
max_length=96, verbose_name='Password'
)
),
], ],
options={ options={
'verbose_name': 'Email source', 'verbose_name': 'Email source',
@@ -69,8 +130,17 @@ class Migration(migrations.Migration):
migrations.CreateModel( migrations.CreateModel(
name='POP3Email', name='POP3Email',
fields=[ fields=[
('emailbasemodel_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='sources.EmailBaseModel')), (
('timeout', models.PositiveIntegerField(default=60, verbose_name='Timeout')), 'emailbasemodel_ptr', models.OneToOneField(
parent_link=True, auto_created=True, primary_key=True,
serialize=False, to='sources.EmailBaseModel'
)
),
(
'timeout', models.PositiveIntegerField(
default=60, verbose_name='Timeout'
)
),
], ],
options={ options={
'verbose_name': 'POP email', 'verbose_name': 'POP email',
@@ -81,8 +151,19 @@ class Migration(migrations.Migration):
migrations.CreateModel( migrations.CreateModel(
name='IMAPEmail', name='IMAPEmail',
fields=[ fields=[
('emailbasemodel_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='sources.EmailBaseModel')), (
('mailbox', models.CharField(default='INBOX', help_text='Mail from which to check for messages with attached documents.', max_length=64, verbose_name='Mailbox')), 'emailbasemodel_ptr', models.OneToOneField(
parent_link=True, auto_created=True, primary_key=True,
serialize=False, to='sources.EmailBaseModel'
)
),
(
'mailbox', models.CharField(
default='INBOX', help_text='Mail from which to check '
'for messages with attached documents.',
max_length=64, verbose_name='Mailbox'
)
),
], ],
options={ options={
'verbose_name': 'IMAP email', 'verbose_name': 'IMAP email',
@@ -93,7 +174,12 @@ class Migration(migrations.Migration):
migrations.CreateModel( migrations.CreateModel(
name='InteractiveSource', name='InteractiveSource',
fields=[ fields=[
('source_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='sources.Source')), (
'source_ptr', models.OneToOneField(
parent_link=True, auto_created=True, primary_key=True,
serialize=False, to='sources.Source'
)
),
], ],
options={ options={
'verbose_name': 'Interactive source', 'verbose_name': 'Interactive source',
@@ -104,12 +190,42 @@ class Migration(migrations.Migration):
migrations.CreateModel( migrations.CreateModel(
name='SourceTransformation', name='SourceTransformation',
fields=[ fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), (
('object_id', models.PositiveIntegerField()), 'id', models.AutoField(
('order', models.PositiveIntegerField(default=0, null=True, verbose_name='Order', db_index=True, blank=True)), verbose_name='ID', serialize=False, auto_created=True,
('transformation', models.CharField(max_length=128, verbose_name='Transformation', choices=[('resize', 'Resize'), ('rotate', 'Rotate'), ('zoom', 'Zoom')])), primary_key=True
('arguments', models.TextField(blank=True, help_text="Use dictionaries to indentify arguments, example: {'degrees':90}", null=True, verbose_name='Arguments', validators=[])), )
('content_type', models.ForeignKey(to='contenttypes.ContentType')), ),
(
'object_id', models.PositiveIntegerField()
),
(
'order', models.PositiveIntegerField(
default=0, null=True, verbose_name='Order',
db_index=True, blank=True
)
),
(
'transformation', models.CharField(
max_length=128, verbose_name='Transformation',
choices=[
('resize', 'Resize'), ('rotate', 'Rotate'),
('zoom', 'Zoom')
]
)
),
(
'arguments', models.TextField(
blank=True, help_text="Use dictionaries to indentify "
"arguments, example: {'degrees':90}", null=True,
verbose_name='Arguments', validators=[]
)
),
(
'content_type', models.ForeignKey(
to='contenttypes.ContentType'
)
),
], ],
options={ options={
'ordering': ('order',), 'ordering': ('order',),
@@ -121,12 +237,47 @@ class Migration(migrations.Migration):
migrations.CreateModel( migrations.CreateModel(
name='StagingFolderSource', name='StagingFolderSource',
fields=[ fields=[
('interactivesource_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='sources.InteractiveSource')), (
('folder_path', models.CharField(help_text='Server side filesystem path.', max_length=255, verbose_name='Folder path')), 'interactivesource_ptr', models.OneToOneField(
('preview_width', models.IntegerField(help_text='Width value to be passed to the converter backend.', verbose_name='Preview width')), parent_link=True, auto_created=True, primary_key=True,
('preview_height', models.IntegerField(help_text='Height value to be passed to the converter backend.', null=True, verbose_name='Preview height', blank=True)), serialize=False, to='sources.InteractiveSource'
('uncompress', models.CharField(help_text='Whether to expand or not compressed archives.', max_length=1, verbose_name='Uncompress', choices=[('y', 'Always'), ('n', 'Never'), ('a', 'Ask user')])), )
('delete_after_upload', models.BooleanField(default=True, help_text='Delete the file after is has been successfully uploaded.', verbose_name='Delete after upload')), ),
(
'folder_path', models.CharField(
help_text='Server side filesystem path.',
max_length=255, verbose_name='Folder path'
)
),
(
'preview_width', models.IntegerField(
help_text='Width value to be passed to the converter '
'backend.', verbose_name='Preview width'
)
),
(
'preview_height', models.IntegerField(
help_text='Height value to be passed to the '
'converter backend.', null=True,
verbose_name='Preview height', blank=True
)
),
(
'uncompress', models.CharField(
help_text='Whether to expand or not compressed '
'archives.', max_length=1, verbose_name='Uncompress',
choices=[
('y', 'Always'), ('n', 'Never'), ('a', 'Ask user')
]
)
),
(
'delete_after_upload', models.BooleanField(
default=True, help_text='Delete the file after is '
'has been successfully uploaded.',
verbose_name='Delete after upload'
)
),
], ],
options={ options={
'verbose_name': 'Staging folder', 'verbose_name': 'Staging folder',
@@ -137,8 +288,18 @@ class Migration(migrations.Migration):
migrations.CreateModel( migrations.CreateModel(
name='WatchFolderSource', name='WatchFolderSource',
fields=[ fields=[
('intervalbasemodel_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='sources.IntervalBaseModel')), (
('folder_path', models.CharField(help_text='Server side filesystem path.', max_length=255, verbose_name='Folder path')), 'intervalbasemodel_ptr', models.OneToOneField(
parent_link=True, auto_created=True, primary_key=True,
serialize=False, to='sources.IntervalBaseModel'
)
),
(
'folder_path', models.CharField(
help_text='Server side filesystem path.',
max_length=255, verbose_name='Folder path'
)
),
], ],
options={ options={
'verbose_name': 'Watch folder', 'verbose_name': 'Watch folder',
@@ -149,8 +310,21 @@ class Migration(migrations.Migration):
migrations.CreateModel( migrations.CreateModel(
name='WebFormSource', name='WebFormSource',
fields=[ fields=[
('interactivesource_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='sources.InteractiveSource')), (
('uncompress', models.CharField(help_text='Whether to expand or not compressed archives.', max_length=1, verbose_name='Uncompress', choices=[('y', 'Always'), ('n', 'Never'), ('a', 'Ask user')])), 'interactivesource_ptr', models.OneToOneField(
parent_link=True, auto_created=True, primary_key=True,
serialize=False, to='sources.InteractiveSource'
)
),
(
'uncompress', models.CharField(
help_text='Whether to expand or not compressed '
'archives.', max_length=1, verbose_name='Uncompress',
choices=[
('y', 'Always'), ('n', 'Never'), ('a', 'Ask user')
]
)
),
], ],
options={ options={
'verbose_name': 'Web form', 'verbose_name': 'Web form',
@@ -161,7 +335,11 @@ class Migration(migrations.Migration):
migrations.AddField( migrations.AddField(
model_name='intervalbasemodel', model_name='intervalbasemodel',
name='document_type', name='document_type',
field=models.ForeignKey(verbose_name='Document type', to='documents.DocumentType', help_text='Assign a document type to documents uploaded from this source.'), field=models.ForeignKey(
verbose_name='Document type', to='documents.DocumentType',
help_text='Assign a document type to documents uploaded from '
'this source.'
),
preserve_default=True, preserve_default=True,
), ),
] ]

View File

@@ -13,6 +13,10 @@ class Migration(migrations.Migration):
operations = [ operations = [
migrations.AlterModelOptions( migrations.AlterModelOptions(
name='sourcelog', name='sourcelog',
options={'ordering': ('-datetime',), 'get_latest_by': 'datetime', 'verbose_name': 'Log entry', 'verbose_name_plural': 'Log entries'}, options={
'ordering': ('-datetime',), 'get_latest_by': 'datetime',
'verbose_name': 'Log entry',
'verbose_name_plural': 'Log entries'
},
), ),
] ]

View File

@@ -13,7 +13,10 @@ class Migration(migrations.Migration):
operations = [ operations = [
migrations.AlterModelOptions( migrations.AlterModelOptions(
name='source', name='source',
options={'ordering': ('label',), 'verbose_name': 'Source', 'verbose_name_plural': 'Sources'}, options={
'ordering': ('label',), 'verbose_name': 'Source',
'verbose_name_plural': 'Sources'
},
), ),
migrations.RenameField( migrations.RenameField(
model_name='source', model_name='source',

View File

@@ -86,7 +86,8 @@ class Source(models.Model):
except Exception as exception: except Exception as exception:
logger.critical( logger.critical(
'Unexpected exception while trying to create new document "%s" from source "%s"; %s', 'Unexpected exception while trying to create new document '
'"%s" from source "%s"; %s',
label or unicode(file_object), self, exception label or unicode(file_object), self, exception
) )
raise raise
@@ -164,7 +165,9 @@ class StagingFolderSource(InteractiveSource):
) )
delete_after_upload = models.BooleanField( delete_after_upload = models.BooleanField(
default=True, default=True,
help_text=_('Delete the file after is has been successfully uploaded.'), help_text=_(
'Delete the file after is has been successfully uploaded.'
),
verbose_name=_('Delete after upload') verbose_name=_('Delete after upload')
) )
@@ -254,7 +257,9 @@ class IntervalBaseModel(OutOfProcessSource):
) )
document_type = models.ForeignKey( document_type = models.ForeignKey(
DocumentType, DocumentType,
help_text=_('Assign a document type to documents uploaded from this source.'), help_text=_(
'Assign a document type to documents uploaded from this source.'
),
verbose_name=_('Document type') verbose_name=_('Document type')
) )
uncompress = models.CharField( uncompress = models.CharField(
@@ -317,8 +322,8 @@ class EmailBaseModel(IntervalBaseModel):
host = models.CharField(max_length=128, verbose_name=_('Host')) host = models.CharField(max_length=128, verbose_name=_('Host'))
ssl = models.BooleanField(default=True, verbose_name=_('SSL')) ssl = models.BooleanField(default=True, verbose_name=_('SSL'))
port = models.PositiveIntegerField(blank=True, null=True, help_text=_( port = models.PositiveIntegerField(blank=True, null=True, help_text=_(
'Typical choices are 110 for POP3, 995 for POP3 over SSL, 143 for IMAP, 993 for IMAP over SSL.'), 'Typical choices are 110 for POP3, 995 for POP3 over SSL, 143 for '
verbose_name=_('Port') 'IMAP, 993 for IMAP over SSL.'), verbose_name=_('Port')
) )
username = models.CharField(max_length=96, verbose_name=_('Username')) username = models.CharField(max_length=96, verbose_name=_('Username'))
password = models.CharField(max_length=96, verbose_name=_('Password')) password = models.CharField(max_length=96, verbose_name=_('Password'))
@@ -331,8 +336,10 @@ class EmailBaseModel(IntervalBaseModel):
), max_length=128, verbose_name=_('Metadata attachment name') ), max_length=128, verbose_name=_('Metadata attachment name')
) )
# From: http://bookmarks.honewatson.com/2009/08/11/python-gmail-imaplib-search-subject-get-attachments/ # From: http://bookmarks.honewatson.com/2009/08/11/
# TODO: Add lock to avoid running more than once concurrent same document download # python-gmail-imaplib-search-subject-get-attachments/
# TODO: Add lock to avoid running more than once concurrent same document
# download
# TODO: Use message ID for lock # TODO: Use message ID for lock
@staticmethod @staticmethod
def process_message(source, message): def process_message(source, message):
@@ -357,7 +364,9 @@ class EmailBaseModel(IntervalBaseModel):
with Attachment(part, name=filename) as file_object: with Attachment(part, name=filename) as file_object:
if filename == source.metadata_attachment_name: if filename == source.metadata_attachment_name:
metadata_dictionary = yaml.safe_load(file_object.read()) metadata_dictionary = yaml.safe_load(
file_object.read()
)
logger.debug( logger.debug(
'Got metadata dictionary: %s', metadata_dictionary 'Got metadata dictionary: %s', metadata_dictionary
) )
@@ -425,8 +434,9 @@ class IMAPEmail(EmailBaseModel):
mailbox = models.CharField( mailbox = models.CharField(
default=DEFAULT_IMAP_MAILBOX, default=DEFAULT_IMAP_MAILBOX,
help_text=_('Mail from which to check for messages with attached documents.'), help_text=_(
max_length=64, verbose_name=_('Mailbox') 'Mail from which to check for messages with attached documents.'
), max_length=64, verbose_name=_('Mailbox')
) )
# http://www.doughellmann.com/PyMOTW/imaplib/ # http://www.doughellmann.com/PyMOTW/imaplib/
@@ -451,7 +461,9 @@ class IMAPEmail(EmailBaseModel):
for message_number in messages_info: for message_number in messages_info:
logger.debug('message_number: %s', message_number) logger.debug('message_number: %s', message_number)
status, data = mailbox.fetch(message_number, '(RFC822)') status, data = mailbox.fetch(message_number, '(RFC822)')
EmailBaseModel.process_message(source=self, message=data[0][1]) EmailBaseModel.process_message(
source=self, message=data[0][1]
)
mailbox.store(message_number, '+FLAGS', '\\Deleted') mailbox.store(message_number, '+FLAGS', '\\Deleted')
mailbox.expunge() mailbox.expunge()

View File

@@ -53,7 +53,8 @@ def task_upload_document(self, source_id, document_type_id, shared_uploaded_file
except OperationalError as exception: except OperationalError as exception:
logger.warning( logger.warning(
'Operational exception while trying to create new document "%s" from source id %d; %s. Retying.', 'Operational exception while trying to create new document "%s" '
'from source id %d; %s. Retying.',
label or shared_upload.filename, source_id, exception label or shared_upload.filename, source_id, exception
) )
raise self.retry(exc=exception) raise self.retry(exc=exception)
@@ -62,8 +63,8 @@ def task_upload_document(self, source_id, document_type_id, shared_uploaded_file
shared_upload.delete() shared_upload.delete()
except OperationalError as exception: except OperationalError as exception:
logger.warning( logger.warning(
'Operational error during attempt to delete shared upload file: %s; %s. Retrying.', 'Operational error during attempt to delete shared upload '
shared_upload, exception 'file: %s; %s. Retrying.', shared_upload, exception
) )
@@ -80,8 +81,8 @@ def task_source_handle_upload(self, document_type_id, shared_uploaded_file_id, s
except OperationalError as exception: except OperationalError as exception:
logger.warning( logger.warning(
'Operational error during attempt to load data to handle source upload: %s. Retrying.', 'Operational error during attempt to load data to handle source '
exception 'upload: %s. Retrying.', exception
) )
raise self.retry(exc=exception) raise self.retry(exc=exception)
@@ -111,8 +112,8 @@ def task_source_handle_upload(self, document_type_id, shared_uploaded_file_id, s
) )
except OperationalError as exception: except OperationalError as exception:
logger.warning( logger.warning(
'Operational error while preparing to upload child document: %s. Rescheduling.', 'Operational error while preparing to upload '
exception 'child document: %s. Rescheduling.', exception
) )
task_source_handle_upload.delay( task_source_handle_upload.delay(
@@ -139,8 +140,9 @@ def task_source_handle_upload(self, document_type_id, shared_uploaded_file_id, s
shared_upload.delete() shared_upload.delete()
except OperationalError as exception: except OperationalError as exception:
logger.warning( logger.warning(
'Operational error during attempt to delete shared upload file: %s; %s. Retrying.', 'Operational error during attempt to delete shared '
shared_upload, exception 'upload file: %s; %s. Retrying.', shared_upload,
exception
) )
except NotACompressedFile: except NotACompressedFile:
logging.debug('Exception: NotACompressedFile') logging.debug('Exception: NotACompressedFile')

View File

@@ -51,7 +51,9 @@ class SourceLogListView(SingleObjectListView):
view_permission = permission_sources_setup_view view_permission = permission_sources_setup_view
def get_source(self): def get_source(self):
return get_object_or_404(Source.objects.select_subclasses(), pk=self.kwargs['pk']) return get_object_or_404(
Source.objects.select_subclasses(), pk=self.kwargs['pk']
)
def get_queryset(self): def get_queryset(self):
return self.get_source().logs.all() return self.get_source().logs.all()
@@ -132,12 +134,17 @@ class UploadBaseView(MultiFormView):
pk=kwargs['source_id'] pk=kwargs['source_id']
) )
else: else:
self.source = InteractiveSource.objects.filter(enabled=True).select_subclasses().first() self.source = InteractiveSource.objects.filter(
enabled=True
).select_subclasses().first()
if InteractiveSource.objects.filter(enabled=True).count() == 0: if InteractiveSource.objects.filter(enabled=True).count() == 0:
messages.error( messages.error(
request, request,
_('No interactive document sources have been defined or none have been enabled, create one before proceeding.') _(
'No interactive document sources have been defined or '
'none have been enabled, create one before proceeding.'
)
) )
return HttpResponseRedirect(reverse('sources:setup_source_list')) return HttpResponseRedirect(reverse('sources:setup_source_list'))
@@ -211,7 +218,9 @@ class UploadInteractiveView(UploadBaseView):
self.tab_links = UploadBaseView.get_active_tab_links() self.tab_links = UploadBaseView.get_active_tab_links()
return super(UploadInteractiveView, self).dispatch(request, *args, **kwargs) return super(
UploadInteractiveView, self
).dispatch(request, *args, **kwargs)
def forms_valid(self, forms): def forms_valid(self, forms):
if self.source.uncompress == SOURCE_UNCOMPRESS_CHOICE_ASK: if self.source.uncompress == SOURCE_UNCOMPRESS_CHOICE_ASK:
@@ -259,7 +268,10 @@ class UploadInteractiveView(UploadBaseView):
)) ))
messages.success( messages.success(
self.request, self.request,
_('New document queued for uploaded and will be available shortly.') _(
'New document queued for uploaded and will be available '
'shortly.'
)
) )
return HttpResponseRedirect(self.request.get_full_path()) return HttpResponseRedirect(self.request.get_full_path())
@@ -314,7 +326,9 @@ class UploadInteractiveVersionView(UploadBaseView):
self.tab_links = UploadBaseView.get_active_tab_links(self.document) self.tab_links = UploadBaseView.get_active_tab_links(self.document)
return super(UploadInteractiveVersionView, self).dispatch(request, *args, **kwargs) return super(
UploadInteractiveVersionView, self
).dispatch(request, *args, **kwargs)
def forms_valid(self, forms): def forms_valid(self, forms):
uploaded_file = self.source.get_upload_file_object( uploaded_file = self.source.get_upload_file_object(
@@ -344,10 +358,15 @@ class UploadInteractiveVersionView(UploadBaseView):
messages.success( messages.success(
self.request, self.request,
_('New document version queued for uploaded and will be available shortly.') _(
'New document version queued for uploaded and will be '
'available shortly.'
)
) )
return HttpResponseRedirect( return HttpResponseRedirect(
reverse('documents:document_version_list', args=(self.document.pk,)) reverse(
'documents:document_version_list', args=(self.document.pk,)
)
) )
def create_source_form_form(self, **kwargs): def create_source_form_form(self, **kwargs):
@@ -373,7 +392,9 @@ class UploadInteractiveVersionView(UploadBaseView):
} }
def get_context_data(self, **kwargs): def get_context_data(self, **kwargs):
context = super(UploadInteractiveVersionView, self).get_context_data(**kwargs) context = super(
UploadInteractiveVersionView, self
).get_context_data(**kwargs)
context['object'] = self.document context['object'] = self.document
context['title'] = _( context['title'] = _(
'Upload a new version from source: %s' 'Upload a new version from source: %s'
@@ -437,7 +458,9 @@ class SetupSourceCreateView(SingleObjectCreateView):
def get_extra_context(self): def get_extra_context(self):
return { return {
'object': self.kwargs['source_type'], 'object': self.kwargs['source_type'],
'title': _('Create new source of type: %s') % get_class(self.kwargs['source_type']).class_fullname(), 'title': _(
'Create new source of type: %s'
) % get_class(self.kwargs['source_type']).class_fullname(),
} }

View File

@@ -33,10 +33,15 @@ class DocumentCreateWizard(ViewPermissionCheckMixin, SessionWizardView):
if InteractiveSource.objects.filter(enabled=True).count() == 0: if InteractiveSource.objects.filter(enabled=True).count() == 0:
messages.error( messages.error(
request, request,
_('No interactive document sources have been defined or none have been enabled, create one before proceeding.') _(
'No interactive document sources have been defined or '
'none have been enabled, create one before proceeding.'
)
) )
return HttpResponseRedirect(reverse('sources:setup_source_list')) return HttpResponseRedirect(reverse('sources:setup_source_list'))
return super(DocumentCreateWizard, self).dispatch(request, *args, **kwargs) return super(
DocumentCreateWizard, self
).dispatch(request, *args, **kwargs)
def __init__(self, *args, **kwargs): def __init__(self, *args, **kwargs):
super(DocumentCreateWizard, self).__init__(*args, **kwargs) super(DocumentCreateWizard, self).__init__(*args, **kwargs)
@@ -62,7 +67,9 @@ class DocumentCreateWizard(ViewPermissionCheckMixin, SessionWizardView):
return self.initial_dict.get(step, {}) return self.initial_dict.get(step, {})
def get_context_data(self, form, **kwargs): def get_context_data(self, form, **kwargs):
context = super(DocumentCreateWizard, self).get_context_data(form=form, **kwargs) context = super(
DocumentCreateWizard, self
).get_context_data(form=form, **kwargs)
context.update({ context.update({
'step_title': self.step_titles[self.steps.step0], 'step_title': self.step_titles[self.steps.step0],
'submit_label': _('Next step'), 'submit_label': _('Next step'),
@@ -85,5 +92,10 @@ class DocumentCreateWizard(ViewPermissionCheckMixin, SessionWizardView):
except TypeError: except TypeError:
pass pass
url = '?'.join([reverse('sources:upload_interactive'), urlencode(query_dict, doseq=True)]) url = '?'.join(
[
reverse('sources:upload_interactive'),
urlencode(query_dict, doseq=True)
]
)
return HttpResponseRedirect(url) return HttpResponseRedirect(url)