Import the entire post_init.py file removing the need to seprate init_ prefixed functions

This commit is contained in:
Roberto Rosario
2012-09-26 22:32:34 -04:00
parent 11aa9e64f1
commit 7cebb82e1c
12 changed files with 160 additions and 108 deletions

View File

@@ -1,7 +1,5 @@
from __future__ import absolute_import
import inspect
from django.conf import settings
from django.utils.translation import ugettext_lazy as _
from django.utils.importlib import import_module
@@ -14,13 +12,8 @@ def register_apps():
App.register(app_name)
try:
post_init = import_module('%s.post_init' % app_name)
except ImportError:
except ImportError as exception:
pass
else:
if post_init:
for name, value in inspect.getmembers(post_init):
if hasattr(value, '__call__') and name.startswith('init'):
value()
register_apps()

View File

@@ -60,18 +60,22 @@ class App(TranslatableLabelMixin, LiveObjectMixin, models.Model):
app, created = App.objects.get_or_create(name=app_name)
except DatabaseError:
transaction.rollback()
raise cls.UnableToRegister
# If database is not ready create a memory only app instance
app = App()
app.label = getattr(registration, 'label', app_name)
app.description = getattr(registration, 'description', u'')
else:
# If there are not error go ahead with the stored app instance
app.label = getattr(registration, 'label', app_name)
app.description = getattr(registration, 'description', u'')
app.dependencies.clear()
app.save()
app.icon = getattr(registration, 'icon', None)
for dependency_name in getattr(registration, 'dependencies', []):
dependency, created = App.objects.get_or_create(name=dependency_name)
app.dependencies.add(dependency)
app.icon = getattr(registration, 'icon', None)
settings = getattr(registration, 'settings', None)
if settings:

View File

@@ -11,6 +11,7 @@ from .links import tool_link, node_list
from .models import Node
from .settings import NODE_HEARTBEAT_INTERVAL, DEAD_NODE_REMOVAL_INTERVAL
@transaction.commit_on_success
def add_clustering_jobs():
clustering_scheduler = LocalScheduler('clustering', _(u'Clustering'))
@@ -23,6 +24,5 @@ def add_clustering_jobs():
clustering_scheduler.start()
def init_clustering():
add_clustering_jobs()
bind_links([Node, 'node_list'], [node_list], menu_name='secondary_menu')

View File

@@ -5,7 +5,7 @@ import tempfile
from .utils import validate_path
import common.settings as common_settings
from .settings import TEMPORARY_DIRECTORY
def init_validate_temp_path():
if (validate_path(common_settings.TEMPORARY_DIRECTORY) == False) or (not common_settings.TEMPORARY_DIRECTORY):
if (validate_path(getattr(common_settings, 'TEMPORARY_DIRECTORY')) == False) or (not getattr(common_settings, 'TEMPORARY_DIRECTORY')):
setattr(common_settings, 'TEMPORARY_DIRECTORY', tempfile.mkdtemp())

View File

@@ -73,7 +73,6 @@ def document_post_save_hook(instance):
# if kwargs.get('created', False):
# DocumentVersionSignature.objects.signature_state(instance.document)
def init_app():
bind_links([Document], [document_verify], menu_name='form_header')
bind_links(['document_verify', 'document_signature_upload', 'document_signature_download', 'document_signature_delete'], [document_signature_upload, document_signature_download, document_signature_delete], menu_name='sidebar')
@@ -86,4 +85,3 @@ def init_app():
])
DocumentVersionSignature._meta.get_field('signature_file').storage=STORAGE_BACKEND()

View File

@@ -11,9 +11,8 @@ from .models import DocumentVersion, get_filename_from_uuid
from .settings import STORAGE_BACKEND, CACHE_PATH
def init_validate_cache_path():
#TODO: fix with method to set a settings value
if (validate_path(CACHE_PATH) == False) or (not CACHE_PATH):
setattr(document_settings, 'CACHE_PATH', tempfile.mkdtemp())
def init_set_storage_backend():
DocumentVersion._meta.get_field('file').storage=STORAGE_BACKEND()

View File

@@ -8,7 +8,6 @@ from .settings import INDEX_UPDATE_INTERVAL
from .jobs import search_index_update
def init_start_indexing_job():
dynamic_search_scheduler = LocalScheduler('search', _(u'Search'))
dynamic_search_scheduler.add_interval_job('search_index_update', _(u'Update the search index with the most recent modified documents.'), search_index_update, seconds=INDEX_UPDATE_INTERVAL)
dynamic_search_scheduler.start()

View File

@@ -65,22 +65,17 @@ def kill_all_node_processes():
process.join()
def init_job_processor():
atexit.register(kill_all_node_processes)
#atexit.register(kill_all_node_processes)
add_job_queue_jobs()
bind_links([JobQueue, 'job_queues'], [job_queues], menu_name='secondary_menu')
bind_links([JobQueue], [job_queue_start, job_queue_stop, job_queue_items_pending, job_queue_items_active, job_queue_items_error])
bind_links([Node], [node_workers])
bind_links([JobQueueItem], [job_requeue, job_delete])
bind_links([Worker], [worker_terminate])
Node.add_to_class('workers', lambda node: node.worker_set)
register_model_list_columns(Node, [
{
'name': _(u'active workers'),
'attribute': encapsulate(lambda x: x.workers().all().count())
},
])
bind_links([JobQueue, 'job_queues'], [job_queues], menu_name='secondary_menu')
bind_links([JobQueue], [job_queue_start, job_queue_stop, job_queue_items_pending, job_queue_items_active, job_queue_items_error])
bind_links([Node], [node_workers])
bind_links([JobQueueItem], [job_requeue, job_delete])
bind_links([Worker], [worker_terminate])

View File

@@ -8,6 +8,5 @@ from .models import MetadataType
available_models_string = (_(u' Available models: %s') % u','.join([name for name, model in AVAILABLE_MODELS.items()])) if AVAILABLE_MODELS else u''
available_functions_string = (_(u' Available functions: %s') % u','.join([u'%s()' % name for name, function in AVAILABLE_FUNCTIONS.items()])) if AVAILABLE_FUNCTIONS else u''
def init_set_metadata_type_help_texts():
MetadataType._meta.get_field('default').help_text=_(u'Enter a string to be evaluated.%s') % available_functions_string
MetadataType._meta.get_field('lookup').help_text=_(u'Enter a string to be evaluated. Example: [user.get_full_name() for user in User.objects.all()].%s') % available_models_string

View File

@@ -3,24 +3,18 @@ from __future__ import absolute_import
from django.contrib.auth.models import User
from django.db.models.signals import post_save
from django.core.exceptions import ObjectDoesNotExist
from django.dispatch import receiver
def user_post_save(sender, instance, **kwargs):
from .settings import DEFAULT_ROLES
@receiver(post_save, dispatch_uid='set_default_roles', sender=User)
def set_default_roles(sender, instance, **kwargs):
if kwargs.get('created', False):
for default_role in SETTING_DEFAULT_ROLES:
if isinstance(default_role, Role):
#If a model is passed, execute method
default_role.add_member(instance)
else:
#If a role name is passed, lookup the corresponding model
for default_role in DEFAULT_ROLES:
try:
role = Role.objects.get(name=default_role)
role.add_member(instance)
except ObjectDoesNotExist:
pass
def init_signal_handler():
post_save.connect(user_post_save, sender=User)

View File

@@ -0,0 +1,71 @@
from __future__ import absolute_import
from south.signals import post_migrate
from django.utils.translation import ugettext_lazy as _
from django.contrib.auth import models as auth_models
from django.contrib.auth.models import User
from django.dispatch import receiver
from django.conf import settings
from django.db.models.signals import post_save
from django.contrib.auth.models import User, Group
from django.utils.translation import ugettext_lazy as _
from navigation.api import bind_links, register_multi_item_links
from .links import (user_list, user_edit, user_add, user_delete,
user_multiple_delete, user_set_password, user_multiple_set_password,
group_list, group_edit, group_add, group_delete,
group_multiple_delete, group_members)
from .models import AutoAdminSingleton
from .settings import (AUTO_CREATE_ADMIN, AUTO_ADMIN_USERNAME,
AUTO_ADMIN_PASSWORD)
@receiver(post_migrate, dispatch_uid='create_superuser')
def create_superuser(sender, **kwargs):
"""
From https://github.com/lambdalisue/django-qwert/blob/master/qwert/autoscript/__init__.py
From http://stackoverflow.com/questions/1466827/ --
Prevent interactive question about wanting a superuser created. (This code
has to go in this otherwise empty "models" module so that it gets processed by
the "syncdb" command during database creation.)
Create our own admin super user automatically.
"""
if AUTO_CREATE_ADMIN and kwargs['app'] == 'user_management':
try:
auth_models.User.objects.get(username=AUTO_ADMIN_USERNAME)
except auth_models.User.DoesNotExist:
print '*' * 80
print 'Creating super admin user -- login: %s, password: %s' % (AUTO_ADMIN_USERNAME, AUTO_ADMIN_PASSWORD)
print '*' * 80
assert auth_models.User.objects.create_superuser(AUTO_ADMIN_USERNAME, 'autoadmin@autoadmin.com', AUTO_ADMIN_PASSWORD)
admin = auth_models.User.objects.get(username=AUTO_ADMIN_USERNAME)
# Store the auto admin password properties to display the first login message
auto_admin_properties = AutoAdminSingleton.get()
auto_admin_properties.account = admin
auto_admin_properties.password = AUTO_ADMIN_PASSWORD
auto_admin_properties.password_hash = admin.password
auto_admin_properties.save()
else:
print 'Super admin user already exists. -- login: %s' % AUTO_ADMIN_USERNAME
@receiver(post_save, dispatch_uid='auto_admin_account_passwd_change', sender=User)
def auto_admin_account_passwd_change(sender, instance, **kwargs):
auto_admin_properties = AutoAdminSingleton.get()
if instance == auto_admin_properties.account and instance.password != auto_admin_properties.password_hash:
# Only delete the auto admin properties when the password has been changed
auto_admin_properties.delete(force=True)
bind_links([User], [user_edit, user_set_password, user_delete])
bind_links(['user_multiple_set_password', 'user_set_password', 'user_multiple_delete', 'user_delete', 'user_edit', 'user_list', 'user_add'], [user_list, user_add], menu_name=u'secondary_menu')
register_multi_item_links(['user_list'], [user_multiple_set_password, user_multiple_delete])
bind_links([Group], [group_edit, group_members, group_delete])
bind_links(['group_multiple_delete', 'group_delete', 'group_edit', 'group_list', 'group_add', 'group_members'], [group_list, group_add], menu_name=u'secondary_menu')
register_multi_item_links(['group_list'], [group_multiple_delete])