Repository: hanson007/FirstBlood Branch: master Commit: fef5c9887eea Files: 115 Total size: 1.9 MB Directory structure: gitextract_epu7r_5a/ ├── .gitignore ├── FirstBlood/ │ ├── __init__.py │ ├── celery.py │ ├── settings.py │ ├── urls.py │ ├── views.py │ └── wsgi.py ├── LICENSE ├── README.md ├── batch_job/ │ ├── __init__.py │ ├── admin.py │ ├── apps.py │ ├── conf/ │ │ ├── __init__.py │ │ └── config.py │ ├── models.py │ ├── tests.py │ ├── urls.py │ └── views.py ├── controller/ │ ├── __init__.py │ ├── core/ │ │ ├── __init__.py │ │ ├── access.py │ │ ├── excel.py │ │ ├── local_mysql.py │ │ ├── mailtable.py │ │ ├── public.py │ │ ├── query_sql.py │ │ └── unicode_width.py │ └── public/ │ ├── __init__.py │ ├── dataconn.py │ ├── log.py │ ├── mailclass.py │ ├── mysql_helper.py │ ├── pagination.py │ └── sqlserver_helper.py ├── create_table.sql ├── datax_web/ │ ├── __init__.py │ ├── admin.py │ ├── apps.py │ ├── conf/ │ │ ├── __init__.py │ │ └── config.py │ ├── models.py │ ├── tests.py │ ├── urls.py │ └── views.py ├── manage.py ├── requirements.txt ├── static/ │ ├── css/ │ │ └── login.css │ ├── js/ │ │ └── csrf.js │ ├── plugins/ │ │ ├── bootstarp-table/ │ │ │ └── bootstrap-table-zh-CN.js │ │ ├── datatables/ │ │ │ ├── css/ │ │ │ │ ├── jquery.dataTables.css │ │ │ │ └── jquery.dataTables_themeroller.css │ │ │ └── js/ │ │ │ └── jquery.dataTables.js │ │ └── layer/ │ │ ├── layer.js │ │ └── skin/ │ │ └── layer.css │ └── template/ │ ├── bootstrap/ │ │ └── css/ │ │ └── bootstrap.css │ ├── css/ │ │ ├── bootstrap-datepicker.css │ │ ├── bootstrap-timepicker.css │ │ ├── bootstrap-wysihtml5.css │ │ ├── colorbox/ │ │ │ └── colorbox.css │ │ ├── datepicker.css │ │ ├── dropzone/ │ │ │ └── dropzone.css │ │ ├── endless-skin.css │ │ ├── endless.css │ │ ├── fullcalendar.css │ │ ├── gritter/ │ │ │ └── jquery.gritter.css │ │ ├── jcarousel.responsive.css │ │ ├── jquery.dataTables_themeroller.css │ │ ├── jquery.tagsinput.css │ │ ├── morris.css │ │ ├── pace.css │ │ ├── prettify.css │ │ └── slider.css │ └── js/ │ ├── bootstrap-datepicker.js │ ├── endless/ │ │ ├── endless.js │ │ ├── endless_dashboard.js │ │ ├── endless_form.js │ │ └── endless_wizard.js │ ├── jquery-ui.js │ └── uncompressed/ │ ├── bootstrap-wysihtml5.js │ ├── holder.js │ ├── pace.js │ └── run_prettify.js ├── supervisord/ │ ├── FirstBlood_dev.conf │ └── FirstBlood_pro.conf ├── templates/ │ ├── 403.html │ ├── 404.html │ ├── base.html │ ├── batch_job/ │ │ ├── add_batch_job.html │ │ ├── add_crontabs.html │ │ ├── batch_job_instance.html │ │ ├── batch_job_instance_details.html │ │ ├── crontabs.html │ │ ├── index.html │ │ ├── mod_crontabs.html │ │ ├── update_batch_job.html │ │ └── update_job.html │ ├── datax_web/ │ │ ├── add_crontabs.html │ │ ├── add_job.html │ │ ├── crontabs.html │ │ ├── index.html │ │ ├── mod_crontabs.html │ │ ├── mod_periodic_task.html │ │ ├── monitor_job.html │ │ ├── monitor_job_detail.html │ │ └── update_job.html │ ├── index.html │ └── registered/ │ ├── login.html │ ├── password_change.html │ └── password_change_done.html └── websockted/ ├── CHANGES ├── LICENSE ├── README.md ├── __init__.py ├── datax_web_job_instance.py └── websocketd ================================================ FILE CONTENTS ================================================ ================================================ FILE: .gitignore ================================================ # python pyc pyo *.pyc *.pyo # log *.log # out *.out # pid *.pid # pycharm .idea # datax datax # create table sql */migrations ================================================ FILE: FirstBlood/__init__.py ================================================ from __future__ import absolute_import # This will make sure the app is always imported when # Django starts so that shared_task will use this app. from .celery import app as celery_app ================================================ FILE: FirstBlood/celery.py ================================================ #!/usr/bin/python env # -*- coding: UTF-8 -*- # Description: # Author: 黄小雪 # Date: 2017年03月25日 # Company: 东方银谷 from __future__ import absolute_import import os from celery import Celery from django.conf import settings # set the default Django settings module for the 'celery' program. os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'FirstBlood.settings') app = Celery('FirstBlood') # Using a string here means the worker will not have to # pickle the object when using Windows. app.config_from_object('django.conf:settings') app.autodiscover_tasks(lambda: settings.INSTALLED_APPS) @app.task(bind=True) def debug_task(self): print('Request: {0!r}'.format(self.request)) ================================================ FILE: FirstBlood/settings.py ================================================ # -*-coding:utf-8-*- """ Django settings for FirstBlood project. Generated by 'django-admin startproject' using Django 1.9.5. For more information on this file, see https://docs.djangoproject.com/en/1.9/topics/settings/ For the full list of settings and their values, see https://docs.djangoproject.com/en/1.9/ref/settings/ """ import os # Build paths inside the project like this: os.path.join(BASE_DIR, ...) BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) # Quick-start development settings - unsuitable for production # See https://docs.djangoproject.com/en/1.9/howto/deployment/checklist/ # SECURITY WARNING: keep the secret key used in production secret! SECRET_KEY = 'on9c1o9$*rvub+_(z!_n!b$!2mwo1h8(pd_h#n_q$=2s84+^9(' # SECURITY WARNING: don't run with debug turned on in production! DEBUG = True ALLOWED_HOSTS = ['*'] # 根据系统变量判断生产、测试环境来加载配置文件 SYSTEM_ENVIRONMENT_VARIABLES = 'development_environment' # Application definition INSTALLED_APPS = [ 'datax_web', 'batch_job', 'djcelery', 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.messages', 'django.contrib.staticfiles', ] MIDDLEWARE_CLASSES = [ 'django.middleware.security.SecurityMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.auth.middleware.SessionAuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', ] ROOT_URLCONF = 'FirstBlood.urls' TEMPLATES = [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': [os.path.join(BASE_DIR, 'templates')], 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django.template.context_processors.debug', 'django.template.context_processors.request', 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', ], }, }, ] WSGI_APPLICATION = 'FirstBlood.wsgi.application' # Database # https://docs.djangoproject.com/en/1.9/ref/settings/#databases # 测试环境 DATABASES = { 'default': { 'ENGINE': 'django.db.backends.mysql', # Add 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'. 'NAME': 'FirstBlood', # Or path to database file if using sqlite3. 'USER': 'root', # Not used with sqlite3. 'PASSWORD': '123.com', # Not used with sqlite3. 'HOST': '127.0.0.1', # Set to empty string for localhost. Not used with sqlite3. 'PORT': '3306', # Set to empty string for default. Not used with sqlite3. 'STORAGE_ENGINE': 'MYISAM' # 'OPTIONS': {"init_command": "SET foreign_key_checks = 0;",}, } } CELERYD_CONCURRENCY = 3 # Password validation # https://docs.djangoproject.com/en/1.9/ref/settings/#auth-password-validators AUTH_PASSWORD_VALIDATORS = [ { 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator', }, { 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', }, { 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator', }, { 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator', }, ] # Internationalization # https://docs.djangoproject.com/en/1.9/topics/i18n/ LANGUAGE_CODE = 'en-us' TIME_ZONE = 'Asia/Shanghai' USE_I18N = True USE_L10N = True USE_TZ = False # redis celery # CELERY STUFF import djcelery djcelery.setup_loader() BROKER_URL = 'redis://localhost:6379' CELERYBEAT_SCHEDULER = 'djcelery.schedulers.DatabaseScheduler' # 定时任务 CELERY_RESULT_BACKEND = 'djcelery.backends.database:DatabaseBackend' # CELERY_RESULT_BACKEND = 'redis://localhost:6379' CELERY_ACCEPT_CONTENT = ['application/json'] CELERY_TASK_SERIALIZER = 'json' CELERY_RESULT_SERIALIZER = 'json' CELERY_TIMEZONE = 'Asia/Shanghai' CELERY_ENABLE_UTC=False # CELERYD_CONCURRENCY = 10 CELERYD_MAX_TASKS_PER_CHILD = 1 # 每个worker最多执行3个任务就会被销毁,可防止内存泄露 # Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/1.9/howto/static-files/ STATIC_URL = '/static/' STATICFILES_DIRS = [ os.path.join(BASE_DIR, "static"), ] LOG_FILE_DIR = os.path.join(BASE_DIR, "log/") LOGGING = { 'version': 1, 'disable_existing_loggers': True, 'formatters': { 'standard': { 'format': '%(levelname)s %(asctime)s %(message)s' }, }, 'filters': { }, 'handlers': { 'mail_admins': { 'level': 'ERROR', 'class': 'django.utils.log.AdminEmailHandler', 'formatter':'standard', }, 'datax_web_handler': { 'level':'DEBUG', 'class':'logging.handlers.RotatingFileHandler', 'filename':'%s%s' % (LOG_FILE_DIR, 'datax_web.log'), 'formatter':'standard', }, 'batch_job_handler': { 'level':'DEBUG', 'class':'logging.handlers.RotatingFileHandler', 'filename':'%s%s' % (LOG_FILE_DIR, 'batch_job.log'), 'formatter':'standard', }, }, 'loggers': { 'django.request': { 'handlers': ['mail_admins'], 'level': 'ERROR', 'propagate': True, }, 'datax_web':{ 'handlers': ['datax_web_handler'], 'level': 'INFO', 'propagate': False }, 'batch_job':{ 'handlers': ['batch_job_handler'], 'level': 'INFO', 'propagate': False }, } } ================================================ FILE: FirstBlood/urls.py ================================================ # -*- coding: UTF-8 -*- """FirstBlood URL Configuration The `urlpatterns` list routes URLs to views. For more information please see: https://docs.djangoproject.com/en/1.9/topics/http/urls/ Examples: Function views 1. Add an import: from my_app import views 2. Add a URL to urlpatterns: url(r'^$', views.home, name='home') Class-based views 1. Add an import: from other_app.views import Home 2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home') Including another URLconf 1. Import the include() function: from django.conf.urls import url, include 2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls')) """ from django.views.static import serve from django.conf.urls import include, url from django.conf import settings from django.contrib import admin admin.autodiscover() from django.contrib.auth.views import (logout,login,password_change,password_change_done) from views import * urlpatterns = [ url(r'^admin/', admin.site.urls), url(r'^index/$', index), # 首页 url(r'^$', index, name='index'), url(r'^accounts/login/$', login, {'template_name': 'registered/login.html'}, name='django.contrib.auth.views.login'), url(r'^accounts/logout/$', logout, name='django.contrib.auth.views.logout'), url(r'^password_change/$', password_change, { 'post_change_redirect': '/password_change_done/', 'template_name': 'registered/password_change.html'}, name='django.contrib.auth.views.password_change'), url(r'^password_change_done/$', password_change_done, { 'template_name': 'registered/password_change_done.html'}, name='django.contrib.auth.views.password_change_done'), url(r'^get_username/$', get_username), # 获取当前登陆用户名 url(r'^check_permission/$', check_permission), # 检测用户权限 url(r'^static/(?P.*)$', serve, {'document_root': settings.STATIC_ROOT}), # 数据同步 url(r'^datax_web/', include('datax_web.urls')), # 批处理作业 url(r'^batch_job/', include('batch_job.urls')), ] ================================================ FILE: FirstBlood/views.py ================================================ # -*- coding: UTF-8 -*- from django.contrib.auth.decorators import permission_required from django.contrib.auth.decorators import login_required from django.shortcuts import render from django.template import RequestContext from django.http import HttpResponse from django.contrib import auth from controller.core.public import Currency import json # Create your views here. @login_required def index(request): # 首页 nowuser = auth.get_user(request) return render(request, 'index.html', locals()) def page_not_found(request): return render("404.html") def permission_denied(request): return render("403.html") @login_required def get_username(request): # 获取当前登陆的用户名 nowuser = auth.get_user(request) username = nowuser.get_username() response = HttpResponse() response.write(json.dumps(username)) return response @login_required def check_permission(request): # 检测用户权限 nowuser = auth.get_user(request) cur = Currency(request) permission = cur.rq_post('permission') status = 0 if nowuser.has_perm(permission) else 1 response = HttpResponse() response.write(json.dumps({'status': status})) return response ================================================ FILE: FirstBlood/wsgi.py ================================================ """ WSGI config for FirstBlood project. It exposes the WSGI callable as a module-level variable named ``application``. For more information on this file, see https://docs.djangoproject.com/en/1.9/howto/deployment/wsgi/ """ import os from django.core.wsgi import get_wsgi_application os.environ.setdefault("DJANGO_SETTINGS_MODULE", "FirstBlood.settings") application = get_wsgi_application() ================================================ FILE: LICENSE ================================================ MIT License Copyright (c) 2022 盲僧 Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. ================================================ FILE: README.md ================================================ 精简版ETL数据转换工具 ========== ## 功能 1.数据同步(目前只支持MySQL) 2.执行SQL脚本 (后期开发) 3.定时执行 ## 安装(两种方法) ### 1.Docker 镜像安装 #### 系统已打包成docker镜像,3个多G,可以直接使用 1.1 拉取镜像 docker pull hanson001/first_blood:v1 1.2 运行容器 docker run -itd --privileged -p 10028:22 -p 9028:9000 -p 3328:3306 -p 8080:8080 --name FirstBlood_test hanson001/first_blood:v1 /usr/sbin/init 10028映射centos系统ssh连接端口,用户名root 密码 123.com 3328映射MySQL数据库端口,用户名root 密码123.com 8080映射内部websocket端口 9028映射内部系统访问端口,登录地址:http://服务器IP:9028 用户名 admim 密码123456.com ### 2.源码配置安装 #### 准备环境 1.python 环境 2.7 2.安装requirements.txt里的依赖包,若运行时还报缺少模块的错误,再安装缺少的模块。 3.创建日志目录 cd <项目路径/FirstBlood> mkdir log #### 建表 整个项目所需要的表,关于用户认证、权限控制等等使用django自带的,而项目其它功能模块使用原生SQL语句创建。 涉及到项目功能模块增删改查,全部使用原生SQL语句。 涉及到用户认证、权限控制等等,使用Django的orm。 1.create_table.sql 执行建表语句,创建项目中各个模块所需要的表 2.执行 python manage.py migrate,创建项目用户认证、权限控制所需要的表(Django自带) #### 运行其它服务 由于项目使用到定时任务和异步实时查询日志功能,所以需要使用到celery和websockted。 这两个服务的启动和运行全部交给supervisord托管,所以需要配置好supervisord配置文件。 supervisord配置文件分两个,dev为开发环境的配置文件,pro为生产环境的配置文件。 里面的路径需要根据自己实际的环境配置。 1.配置完成后执行以下命令,启动celery和websocketed服务 supervisord -c 项目路径/FirstBlood/supervisord/FirstBlood_pro.conf (开发环境使用FirstBlood_dev.conf文件) 2.根据配置文件里的日志路径查看是否报错,有报错百度、Google。 如果错误不影响功能的使用,则忽略。 #### settings配置文件 由于项目的settings配置文件,需要根据开发环境、生产环境来连接不同环境的数据库,所以需要在开发环境添加变量。 settings文件里就可以通过development_environment变量,选择是连接生产数据库,还是开发环境数据库。 1.1 修改 bash_profile 文件 vim ~/.bash_profile # 程序根据不同环境变量加载测试或生产的配置文件 development_environment=1 export development_environment #### 下载阿里开发数据同步工具datax 5.1 下载datax工具,放在项目目录下 项目路径/FirstBlood/datax 5.2 创建日志目录 由于项目的数据同步,底层使用的datax,而datax产生的日志文件名是固定长度,且以datax的 json配置文件名命名, 当配置文件名超过日志文件名的固定长度时,datax将会以固定长度截取配置文件名,来命名日志文件名称。所以无法以 datax的自生的日志来实时显示同步日志。所以需要新创建日志目录 web_log,以任务ID+13位时间戳命名日志文件名, 将datax产生的日志导入web_log目录里。 操作命令: cd 项目路径/FirstBlood/datax/ mkdir web_log ## 启动 以上步骤执行完成后,就可以运行项目。若有错误,百度Google。 ## 使用 大部分功能操作参照博客 databaseinfo 表,需要预先导入生产库的表信息 ================================================ FILE: batch_job/__init__.py ================================================ ================================================ FILE: batch_job/admin.py ================================================ # -*- coding: utf-8 -*- from __future__ import unicode_literals from django.contrib import admin # Register your models here. ================================================ FILE: batch_job/apps.py ================================================ # -*- coding: utf-8 -*- from __future__ import unicode_literals from django.apps import AppConfig class BatchJobConfig(AppConfig): name = 'batch_job' ================================================ FILE: batch_job/conf/__init__.py ================================================ #!/usr/bin/python3 # -*- coding: utf-8 -*- # @Function: # @Time : 2018/7/20 15:34 # @Author : Hanson # @Email : 229396865@qq.com # @File : __init__.py.py # @Software: PyCharm # @Company : 东方银谷 ================================================ FILE: batch_job/conf/config.py ================================================ #!/usr/bin/python3 # -*- coding: utf-8 -*- # @Function: 批处理作业配置文件 # @Time : 2018/7/20 15:35 # @Author : Hanson # @Email : @qq.com # @File : config.py # @Software: PyCharm # @Company : 东方银谷 # 最大任务个数 maxtasksperchild = 20 query_batch_job_by_name_sql = "SELECT * FROM FirstBlood.batch_job bj WHERE bj.`name` = '%s';" query_batch_job_sql1 = "SELECT * FROM FirstBlood.batch_job bj WHERE bj.`name` = '%s' and bj.id!=%s;" insert_batch_job_sql = "insert into FirstBlood.`batch_job` (`name`, `description`) values ('%s', '%s');" insert_batch_job_details_sql = "insert into FirstBlood.`batch_job_details` (`batch_job_id`, `subjob_id`, `type`) values" query_batch_job_sql2 = """ SELECT bj.*, dp.task, dp.enabled, concat(dc.`minute`,' ',dc.`hour`,' ',dc.day_of_week,' ',dc.day_of_month, ' ',dc.month_of_year, ' (m/h/d/dM/MY)') crontab FROM FirstBlood.batch_job bj LEFT JOIN FirstBlood.djcelery_periodictask dp ON CONCAT("[",bj.id,"]")=dp.args LEFT JOIN FirstBlood.djcelery_crontabschedule dc on dp.crontab_id=dc.id ORDER BY bj.id """ query_batch_job_sql3 = """ SELECT bj.*, dp.task, dp.enabled, dp.crontab_id, concat(dc.`minute`,' ',dc.`hour`,' ',dc.day_of_week,' ',dc.day_of_month, ' ',dc.month_of_year, ' (m/h/d/dM/MY)') crontab FROM FirstBlood.batch_job bj LEFT JOIN FirstBlood.djcelery_periodictask dp ON CONCAT("[",bj.id,"]")=dp.args LEFT JOIN FirstBlood.djcelery_crontabschedule dc on dp.crontab_id=dc.id WHERE bj.id = %s """ query_batch_job_sub_job_by_id_sql = """ SELECT bjd.*, dj.`name`, dj.description FROM FirstBlood.batch_job_details bjd LEFT JOIN FirstBlood.datax_job dj on bjd.subjob_id=dj.id WHERE bjd.batch_job_id = %s """ update_batch_job_by_id_sql = """ update FirstBlood.batch_job set `name` = '%s', `description` = '%s' where id = %s """ delete_batch_job_details_by_id_sql = """ delete from FirstBlood.batch_job_details where batch_job_id =%s; """ insert_batch_job_instance_sql = """ insert into FirstBlood.batch_job_instance ( `instance_id`, `name`, `description`, `trigger_mode` ) values (%s, '%s', '%s', %s); """ update_batch_job_instance_by_id_sql = """ update FirstBlood.batch_job_instance set `status`=%s, `result`=%s, `end_time`='%s' where instance_id=%s; """ insert_batch_job_instance_details_sql = """ insert into FirstBlood.batch_job_instance_details ( `instance_id`, `subjob_instance_id`, `type` ) values (%s, %s, %s); """ select_batch_job_instance_sql = "select * from FirstBlood.batch_job_instance bji" count_batch_job_instance_sql = "select count(1) count from FirstBlood.batch_job_instance bji" select_batch_job_instance_by_id_sql = "select * from FirstBlood.batch_job_instance bji where bji.instance_id=%s" # 根据ID获取子作业类型为数据同步的信息 select_sub_job_datax_instance_by_id_sql = """ select bjid.instance_id, bjid.subjob_instance_id, bjid.type, dji.`name`, dji.description, dji.trigger_mode, dji.`status`, dji.result, dji.start_time, dji.end_time from FirstBlood.batch_job_instance_details bjid LEFT JOIN FirstBlood.datax_job_instance dji on bjid.subjob_instance_id=dji.instance_id where bjid.instance_id=%s and bjid.type=1 """ ================================================ FILE: batch_job/models.py ================================================ # -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models # Create your models here. class BatchJobPermission(models.Model): """ 批处理作业权限 """ class Meta: db_table = 'batch_job_permission' permissions = ( ("viewBatchJob", u"查看批处理作业"), ("editBatchJob", u"修改批处理祖业") ) ================================================ FILE: batch_job/tests.py ================================================ # -*- coding: utf-8 -*- from __future__ import unicode_literals from django.test import TestCase # Create your tests here. import requests import json import pprint # url1 = 'http://httpbin.org/get?name=gemey&age=22' # url2 = 'http://192.168.190.132:9000/batch_job/get_batch_job_instance/?username=admin&password=123456.com&limit=10&offset=0&name=test2&description=&status=&result=&trigger_mode=' url3 = 'http://172.24.132.144:9000/batch_job/get_batch_job_instance/?limit=10&offset=0&name=big_data&description=&status=&result=&trigger_mode=' response = requests.get(url3) pprint.pprint(json.loads(response.text)) ================================================ FILE: batch_job/urls.py ================================================ # -*- coding: UTF-8 -*- from django.conf.urls import url import views urlpatterns = [ # Examples: # url(r'^$', 'YinguOnline.views.home', name='home'), # url(r'^blog/', include('blog.urls')), url(r'^index/$', views.index), # 首页 url(r'^add_batch_job/$', views.add_batch_job), # 新增批处理作业 url(r'^update_batch_job/(?P\d+)/$', views.update_batch_job), # 更新批处理作业 url(r'^batch_job_instance/$', views.batch_job_instance), # 批处理作业执行历史 url(r'^batch_job_instance_details/(?P\d+)/$', views.batch_job_instance_details), # 批处理作业详情执行历史 url(r'^get_task_template/$', views.get_task_template), # 获取任务模板 url(r'^get_crontab/$', views.get_crontab), # 获取crontab url(r'^add_crontab/$', views.add_crontab), # 新增 crontab 定时时间 url(r'^add_batch_job_data/$', views.add_batch_job_data), # 提交新增或更新批处理作业数据 # url(r'^add_job_data/$', views.add_job_data), # 新增或修改任务数据 url(r'^get_batch_job_data/$', views.get_batch_job_data), # 获取批处理作业数据 url(r'^get_batch_job_data_by_id/$', views.get_batch_job_data_by_id), # 根据ID获取需要更新的任务数据 url(r'^get_batch_job_sub_job_by_id/$', views.get_batch_job_sub_job_by_id), # 根据ID获取需要更新的子作业数据 url(r'^get_batch_job_instance/$', views.get_batch_job_instance), # 获取批处理作业实例数据 url(r'^get_batch_job_instance_data_by_id/$', views.get_batch_job_instance_data_by_id), # 根据ID获取批处理作业实例数据 url(r'^get_batch_job_sub_job_instance_data_by_id/$', views.get_batch_job_sub_job_instance_data_by_id), # 根据ID获取批处理作业子作业实例数据 url(r'^run_batch_job_task/$', views.run_batch_job_task), # 运行批处理任务 # url(r'^get_database/$', views.get_database), # 获取数据库信息 ] ================================================ FILE: batch_job/views.py ================================================ # -*- coding: utf-8 -*- from __future__ import unicode_literals from django.contrib.auth.decorators import permission_required from django.contrib.auth.decorators import login_required from django.core.exceptions import PermissionDenied from django.shortcuts import render from django.http import HttpResponse from functools import wraps from controller.core.public import (Currency, DatetimeHelp) from controller.core import query_sql from controller.public import dataconn from djcelery import loaders from djcelery.models import PeriodicTask, CrontabSchedule from djcelery.schedulers import ModelEntry # from multiprocessing import Manager,Pool import multiprocessing as mp from anyjson import loads, dumps from celery import shared_task from celery import registry from celery import schedules from conf import config from datax_web.conf import config as datax_web_config from datax_web.views import run as datax_web_run from datax_web.views import Datax from datax_web.views import JobData as DataxJobData import commands import logging import sys import json reload(sys) sys.setdefaultencoding("utf-8") # Create your views here. logger = logging.getLogger('batch_job') _SUCCESS = dict(status=0, msg=u'检测成功') _str = (str, unicode) # 操作类型 _OPERATION_TYPE = (1, 2) # 1:新增批处理作业 2:修改批处理作业 # 触发模式 _TRIGGER_MODE = (1, 2) # 1:自动 2:手动 _TRIGGER_MODE_STR = u'TRIGGER_MODE = (1, 2) # 1:自动 2:手动' # 子作业类型 _SUBJOB_TYPE = (1, 2, 3) # 1 数据同步 2 SQL脚本 3 备份。 主要用于后期扩展 # 执行状态 _STATUS = (0, 1) # 状态 0 正在执行 1 执行完成 def verification(CheckClass): """ 装饰器用于检测用户提交的信息是否合法. check_class 检测类 Decorator for views that checks that the user submitted information, redirecting to the log-in page if necessary. The test should be a callable that takes the user object and returns True if the user passes. """ def decorator(view_func): @wraps(view_func) def _wrapped_view(request, *args, **kwargs): response = HttpResponse() ccl = CheckClass(request) result = ccl.total_check() if result['status']: response.write(json.dumps(result)) return response return view_func(request, *args, **kwargs) return _wrapped_view return decorator class BatchJobData(object): """ 新增、更新、手工运行任务时处理数据 """ def __init__(self, data): # id 为 batch_job_id self._batch_job_id = data.get('_id', None) self.name = data.get('name', '') self.description = data.get('description', '') self.task_template = data.get('task_template', '') self.is_enable = data.get('is_enable', '') self.crontab = data.get('crontab', '') self.batch_job_details = data.get('batch_job_details', []) self.trigger_mode = data.get('trigger_mode', '') self.operation_type = data.get('operation_type', '') self.dtconn = dataconn.DatabaseConnection(logger) self.dtsf = dataconn.DataTransform() self.dh = DatetimeHelp() self.__timestamp1 = self.dh.timestamp1 @property def timestamp1(self): return self.__timestamp1 def _get_schedule_dict(self): schedule = CrontabSchedule.objects.get(pk=self.crontab) return { 'crontab': schedule, 'kwargs': dumps({}), 'task': self.task_template, 'enabled': self.is_enable, 'name': self.name } @property def schedule_dict(self): return self._get_schedule_dict() # def get_batch_job_id(self): # """ # 新建批处理任务数据时 batch_id 为 插入数据后的返回id # 更新或手动运行任务时 batch_id 为 页面体检的id # :return: batch_id # """ # return self._batch_job_id or self.dtconn.ygol.insert_id @property def batch_job_id(self): """ 新建批处理任务数据时 batch_id 为 插入数据后的返回id 更新或手动运行任务时 batch_id 为 页面体检的id :return: batch_id """ return self._batch_job_id @batch_job_id.setter def batch_job_id(self, batch_job_id): self._batch_job_id = batch_job_id def get_insert_datax_job_sql(self): # 在 batch_job表里创建新的任务 - 新增SQL return config.insert_batch_job_sql % (self.name, self.description) def get_update_batch_job_by_id_sql(self): # 获取batch_job表里更新任务 - 更新SQL return config.update_batch_job_by_id_sql % (self.name, self.description, self.batch_job_id) def get_insert_batch_job_details_sql(self): # 拼接插入批处理作业详情表SQL insert into values ('user_id', 1), ('card_name', 1) values_list = ["('%s', %s, %s)" % (self.batch_job_id, subjob['subjob_id'], subjob['type']) for subjob in self.batch_job_details] return config.insert_batch_job_details_sql + ','.join(values_list) def get_delete_batch_job_details_by_id_sql(self): # 获取根据ID删除batch_job_details 表的sql return config.delete_batch_job_details_by_id_sql % self.batch_job_id def create(self): # batch_job表里创建新的任务 result = _SUCCESS.copy() sql1 = self.get_insert_datax_job_sql() self.dtconn.ygol.transaction_start() self.dtconn.ygol.transaction_execute(sql1) if self.dtconn.ygol.status: msg = u'batch_job表里创建新的任务,SQL:%s 插入数据失败。 - Msg: %s' % \ (sql1, self.dtconn.ygol.msg) logger.error(msg) result = dict(status=500, msg=msg) else: self.batch_job_id = self.dtconn.ygol.insert_id sql2 = self.get_insert_batch_job_details_sql() self.dtconn.ygol.transaction_execute(sql2) if self.dtconn.ygol.status: msg = u'创建批处理作业详情,SQL:%s 插入数据失败。 - Msg: %s' % \ (sql2, self.dtconn.ygol.msg) logger.error(msg) result = dict(status=500, msg=msg) self.dtconn.ygol.transaction_commit_and_close() return result def create_PeriodicTask(self): """ 创建定时任务 在 PeriodicTask表里创建新的定时任务,并在表的args字段里保存批处理作业表的batch_job_id。这样, 就可以把批处理作业和定时调度关联起来。 :return: """ obj = PeriodicTask.objects.create(**self.schedule_dict) obj.args = dumps([self.batch_job_id]) obj.save() return obj def update(self): # 更新任务 result = _SUCCESS.copy() sql1 = self.get_update_batch_job_by_id_sql() self.dtconn.ygol.transaction_start() self.dtconn.ygol.transaction_execute(sql1) if self.dtconn.ygol.status: msg = u'batch_job表,SQL:%s 更新数据失败。 - Msg: %s' % \ (sql1, self.dtconn.ygol.msg) logger.error(msg) result = dict(status=500, msg=msg) else: sql2 = self.get_delete_batch_job_details_by_id_sql() sql3 = self.get_insert_batch_job_details_sql() self.dtconn.ygol.transaction_execute(sql2) self.dtconn.ygol.transaction_execute(sql3) if self.dtconn.ygol.status: msg = u'batch_job_details表里更新列 - SQL2:%s - SQL3: %s -' \ u' 更新数据失败。 - Msg: %s' % \ (sql2, sql3, self.dtconn.ygol.msg) logger.error(msg) result = dict(status=500, msg=msg) self.dtconn.ygol.transaction_commit_and_close() return result def update_PeriodicTask(self): # 更新PeriodicTask obj = PeriodicTask.objects.get(args="[%s]" % self.batch_job_id) for k, v in self.schedule_dict.items(): setattr(obj, k, v) obj.save() return obj def get_batch_job_by_id(self, batch_job_id): # 根据批处理作业id,获取批处理作业信息 sql = config.query_batch_job_sql3 % batch_job_id source_data = self.dtconn.ygol.getsingle(sql) return self.dtsf.get_row_by_dict_to_user(source_data) def get_batch_job_details_by_id(self, batch_job_id): # 根据批处理作业id,获取批处理作业子作业batch_job_details表信息 sql = config.query_batch_job_sub_job_by_id_sql % batch_job_id source_data = self.dtconn.ygol.getall(sql) return map(self.dtsf.get_row_by_dict_to_user, source_data) class BatchJobInstanceData(object): """ 批处理作业实例数据处理 data 为批处理作业更新页面提及的任务数据 数据格式: {u'_id': u'30', u'batch_job_details': [{u'batch_job_id': u'30', u'create_time': u'2018-07-24 21:18:31', u'description': u'\u6570\u636e\u540c\u6b65\u6d4b\u8bd5', u'id': u'37', u'modify_time': u'2018-07-24 21:18:31', u'name': u'test', u'subjob_id': u'28', u'type': u'1'}], u'crontab': u'2', u'description': u'DIY\u6d4b\u8bd5\u7ec4\u88c5\u673a1', u'is_enable': False, u'name': u'test1', u'operation_type': 2, u'task_template': u'celery.chunks', u'trigger_mode': 2} _id:batch_job_id 批处理作业表 id """ def __init__(self, data): self._batch_job_id = data.get('_id', None) self.name = data.get('name', '') self.description = data.get('description', '') self._trigger_mode = data.get('trigger_mode', None) self.dtconn = dataconn.DatabaseConnection(logger) self.dtsf = dataconn.DataTransform() self.dh = DatetimeHelp() self.__timestamp1 = self.dh.timestamp1 self._batch_job_instance_id = None self.batch_job_instance_id = self._batch_job_id @property def batch_job_instance_id(self): """ 批处理作业实例ID 由批处理作业ID + 时间戳组成 :return: 30 + 1532522114566 = 301532522114566 """ return self._batch_job_instance_id @batch_job_instance_id.setter def batch_job_instance_id(self, batch_job_id): self._batch_job_instance_id = '%s%s' % (batch_job_id, self.__timestamp1) @property def trigger_mode(self): return self._trigger_mode @trigger_mode.setter def trigger_mode(self, val): if val not in _TRIGGER_MODE: msg = u'触发模式值错误 - trigger_mode:%s - %s' % (val, _TRIGGER_MODE_STR) logger.error(msg) else: self._trigger_mode = val def get_insert_batch_job_instance_sql(self): return config.insert_batch_job_instance_sql % ( self.batch_job_instance_id, self.name, self.description, self.trigger_mode ) def get_update_batch_job_instance_by_id_sql(self, result): return config.update_batch_job_instance_by_id_sql % ( _STATUS[1], result, self.dh.nowtimestrf1, self.batch_job_instance_id ) @staticmethod def get_select_batch_job_instance_by_id_sql(batch_job_instance_id): return config.select_batch_job_instance_by_id_sql % batch_job_instance_id def get_batch_job_instance_by_id(self, batch_job_instance_id): sql = self.get_select_batch_job_instance_by_id_sql(batch_job_instance_id) souce_data = self.dtconn.ygol.getsingle(sql) return self.dtsf.get_row_by_dict_to_user(souce_data) def start_log(self): # 开始记录任务日志到batch_job_instance sql = self.get_insert_batch_job_instance_sql() self.dtconn.ygol.insert(sql) if self.dtconn.ygol.status: logger.error(u'记录任务日志到batch_job_instance 失败 - SQL: %s - msg: %s' % (sql, self.dtconn.ygol.msg)) def record_result_log(self, result): # 记录任务执行结果 datax_job_instance sql = self.get_update_batch_job_instance_by_id_sql(result) self.dtconn.ygol.update(sql) if self.dtconn.ygol.status: logger.error(u'记录任务执行结果 batch_job_instance 失败 - SQL: %s - msg: %s' % (sql, self.dtconn.ygol.msg)) class BatchJobSubjobInstanceData(object): """ 批处理作业中的子任务实例数据处理 data 格式 u'batch_job_details': [{u'batch_job_id': u'30', u'create_time': u'2018-07-24 21:18:31', u'description': u'\u6570\u636e\u540c\u6b65\u6d4b\u8bd5', u'id': u'37', u'modify_time': u'2018-07-24 21:18:31', u'name': u'test', u'subjob_id': u'28', u'type': u'1'}], """ def __init__(self, data): # id 为 子作业实例ID self._subjob_instance_id = None self._batch_job_instance_id = None self.subjob_id = data.get('subjob_id') self.type = data.get('type') self.dtconn = dataconn.DatabaseConnection(logger) self.dtsf = dataconn.DataTransform() self.dh = DatetimeHelp() self.__timestamp1 = self.dh.timestamp1 @property def subjob_instance_id(self): return self._subjob_instance_id @subjob_instance_id.setter def subjob_instance_id(self, subjob_instance_id): self._subjob_instance_id = subjob_instance_id @property def batch_job_instance_id(self): return self._batch_job_instance_id @batch_job_instance_id.setter def batch_job_instance_id(self, batch_job_instance_id): self._batch_job_instance_id = batch_job_instance_id def get_insert_batch_job_instance_details_sql_sql(self, _type): return config.insert_batch_job_instance_details_sql % ( self.batch_job_instance_id, self.subjob_instance_id, _type ) def start_subjob_log(self, _type): # 开始记录子任务日志到batch_job_instance_details sql = self.get_insert_batch_job_instance_details_sql_sql(_type) self.dtconn.ygol.insert(sql) if self.dtconn.ygol.status: logger.error(u'记录子任务日志到batch_job_instance_details 失败 - SQL: %s - msg: %s' % (sql, self.dtconn.ygol.msg)) @classmethod def run_sub_job(cls, _type, subjob_id, results, batch_job_instance_id, trigger_mode): """ 执行批处理作业中的子作业 :param _type: 子作业类型 :param subjob_id: 子作业id :param results: 保存所有的子作业执行结果 :param batch_job_instance_id: 批处理作业实例ID :return: None """ result = None if _type == _SUBJOB_TYPE[0]: dataxjd = DataxJobData({}) dataxjob_data = dataxjd.get_job_data_by_id(subjob_id) dataxjob_writer_column = dataxjd.get_datax_job_writer_column_by_id(subjob_id) writer_column_id = [dt['name'] for dt in dataxjob_writer_column] dataxjob_data['_id'] = subjob_id dataxjob_data['trigger_mode'] = trigger_mode dataxjob_data['writer_column_id'] = writer_column_id bjsid = cls(dataxjob_data) dx = Datax(dataxjob_data) dx.jd.create_file(dx.job_json_file, dx.get_job_json()) dx.jd.start_log() # 记录datax_job 实例同步日志 bjsid.batch_job_instance_id = batch_job_instance_id bjsid.subjob_instance_id = dx.jd.datax_job_instance_id bjsid.start_subjob_log(_type) # 记录子作业日志 (status, output) = commands.getstatusoutput(dx.cmd) result = 1 if status else 0 dx.jd.record_result_log(result) results.append(result) @staticmethod def get_select_sub_job_datax_instance_by_id_sql(batch_job_instance_id): return config.select_sub_job_datax_instance_by_id_sql % batch_job_instance_id @staticmethod def get_sub_job_datax_instance_data_by_id(batch_job_instance_id): dtconn = dataconn.DatabaseConnection(logger) dtsf = dataconn.DataTransform() sql = BatchJobSubjobInstanceData.get_select_sub_job_datax_instance_by_id_sql(batch_job_instance_id) source_data = dtconn.ygol.getall(sql) return map(dtsf.get_row_by_dict_to_user, source_data) class CheckBatchJob(object): """ 检测新增、更新、手工运行批处理作业提交的信息 :return result 格式: {'status': 1, 'msg': '操作类型错误'} total_check 启动所有检测,返回检测状态和错误消息 """ _SUCCESS = _SUCCESS.copy() _OPERATION_TYPE_ERROR1 = dict(status=1, msg=u'操作类型不能为空') _OPERATION_TYPE_ERROR2 = dict(status=2, msg=u'操作类型错误') _DESCRIPTION_ERROR1 = dict(status=3, msg=u'任务描述不能为空') _NAME_ERROR1 = dict(status=4, msg=u'任务名称不能为空') _NAME_ERROR2 = dict(status=5, msg=u'任务名称已存在') _TASK_TEMPLATE_ERROR1 = dict(status=6, msg=u'任务模板不能为空') _TASK_TEMPLATE_ERROR2 = dict(status=6, msg=u'任务模板不存在') _IS_ENABLE_ERROR1 = dict(status=7, msg=u'是否启用值错误') _BATCH_JOB_DETAILS_ERROR1 = dict(status=8, msg=u'批处理作业详情不能为空') _BATCH_JOB_DETAILS_ERROR2 = dict(status=9, msg=u'批处理作业详情,子作业 %s %s 类型错误') _BATCH_JOB_DETAILS_ERROR3 = dict(status=10, msg=u'批处理作业详情,子作业 %s %s ID %s 不存在') _TRIGGER_MODE_ERROR1 = dict(status=11, msg=u'触发模式 不存在') _TRIGGER_MODE_ERROR2 = dict(status=12, msg=u'触发模式值错误') _CRONTAB_ERROR = dict(status=13, msg=u'执行时间错误') _BATCH_JOB_ID_ERROR1 = dict(status=14, msg=u'batch_job_id 不能为空') _BATCH_JOB_ID_ERROR2 = dict(status=15, msg=u'batch_job_id 不存在') def __init__(self, request): cur = Currency(request) data = cur.rq_post_json('data') self.dtconn = dataconn.DatabaseConnection(logger) self.jd = BatchJobData(data) self.error_msg = [] self.result = self._SUCCESS def check_operation_type(self): # 检测操作类型 operation_type = self.jd.operation_type if not operation_type: self.result = self._OPERATION_TYPE_ERROR1 else: if operation_type not in _OPERATION_TYPE: self.result = self._OPERATION_TYPE_ERROR2 def check_name_by_operation_type(self): # 根据操作类型 检测任务名称 name = self.jd.name if self.jd.operation_type == _OPERATION_TYPE[0]: sql = config.query_batch_job_by_name_sql % name self.check_name(name, sql) if self.jd.operation_type == _OPERATION_TYPE[1]: sql = config.query_batch_job_sql1 % (name, self.jd.batch_job_id) self.check_name(name, sql) def check_name(self, name, sql): # 新增、更新、运行批处理作业时,检测名称 if name: data = self.dtconn.ygol.getsingle(sql) if self.dtconn.ygol.status: _msg = u'检测任务名称时数据库错误。 - Msg: %s' % self.dtconn.ygol.msg logger.error(_msg) self.result = dict(status=500, msg=_msg) else: if data: self.result = self._NAME_ERROR2 else: self.result = self._NAME_ERROR1 def check_description(self): # 检测任务描述 description = self.jd.description if not description: self.result = self._DESCRIPTION_ERROR1 def check_task_template(self): # 检测任务模板 loaders.autodiscover() tasks = list(sorted(registry.tasks.regular().keys())) if self.jd.task_template: if self.jd.task_template not in tasks: self.result = self._TASK_TEMPLATE_ERROR1 else: self.result = self._TASK_TEMPLATE_ERROR2 def check_is_enable(self): # 检测“是否启用” is_enable = self.jd.is_enable if not isinstance(is_enable, bool): self.result = self._IS_ENABLE_ERROR1 def check_crontab(self): crontab = self.jd.crontab crons = CrontabSchedule.objects.values('id') try: if long(crontab) not in [c['id'] for c in crons]: self.result = self._CRONTAB_ERROR except Exception, e: self.result = self._CRONTAB_ERROR def check_batch_job_details(self): """ 检查批处理作业详情 先验证子作业类型,再验证子作业是否存在 :return: """ data = self.jd.batch_job_details if data: for dt in data: try: _type = int(dt['type']) except Exception as e: msg = self._BATCH_JOB_DETAILS_ERROR2.get('msg') % (dt['name'], dt['description']) status = self._BATCH_JOB_DETAILS_ERROR2.get('status') self.result = dict(status=status, msg=msg) break else: if _type not in _SUBJOB_TYPE: msg = self._BATCH_JOB_DETAILS_ERROR2.get('msg') % (dt['name'], dt['description']) status = self._BATCH_JOB_DETAILS_ERROR2.get('status') self.result = dict(status=status, msg=msg) break else: msg = self._BATCH_JOB_DETAILS_ERROR3.get('msg') % (dt['name'], dt['description'], dt['subjob_id']) status = self._BATCH_JOB_DETAILS_ERROR3.get('status') # 数据同步 if _type == _SUBJOB_TYPE[0]: sql = datax_web_config.query_datax_job_by_id_sql % dt['subjob_id'] data = self.dtconn.ygol.getsingle(sql) if self.dtconn.ygol.status: _msg = u'检测datax_job_id 错误 - SQL: %s。 - Msg: %s' % (sql, self.dtconn.ygol.msg) logger.error(_msg) self.result = dict(status=500, msg=_msg) else: if not data: self.result = dict(status=status, msg=msg) else: self.result = self._BATCH_JOB_DETAILS_ERROR1 def check_batch_job_id(self): # 检测批处理作业ID if self.jd.operation_type == _OPERATION_TYPE[1]: _id = self.jd.batch_job_id if isinstance(_id, _str) and _id and _id.isdigit(): sql = config.query_batch_job_sql3 % _id data = self.dtconn.ygol.getsingle(sql) if self.dtconn.ygol.status: _msg = u'检测batch_job_id 错误 - SQL: %s。 - Msg: %s' % (sql, self.dtconn.ygol.msg) logger.error(_msg) self.result = dict(status=500, msg=_msg) else: if not data: self.result = self._BATCH_JOB_ID_ERROR2 else: self.result = self._BATCH_JOB_ID_ERROR1 def check_trigger_mode(self): # 检测触发模式 trigger_mode = self.jd.trigger_mode if self.jd.operation_type == _OPERATION_TYPE[1]: if not trigger_mode: self.result = self._TRIGGER_MODE_ERROR1 else: if trigger_mode not in _TRIGGER_MODE: self.result = self._TRIGGER_MODE_ERROR2 def total_check(self): check_func = ['check_operation_type', 'check_name_by_operation_type', 'check_description', 'check_trigger_mode', 'check_task_template', 'check_is_enable', 'check_crontab', 'check_batch_job_details', 'check_batch_job_id' ] for func_name in check_func: getattr(self, func_name)() if self.result['status']: break return self.result class BatchJobInstanceSql(object): # datax job instance 查询sql _table_bji = { 'name': {'data_type': 'str', 'val': ''}, 'description': {'data_type': 'str', 'val': ''}, 'status': {'data_type': 'str', 'val': ''}, 'result': {'data_type': 'str', 'val': ''}, 'trigger_mode': {'data_type': 'str', 'val': ''}, } _order_by = [{'table': 'bji', 'field': 'start_time', 'rule': 'DESC'}] def __init__(self, request): self.cur = Currency(request) self.rq_get = self.cur.rq_get self._offset = int(self.rq_get('offset')) self._limit = int(self.rq_get('limit')) self._SQL = config.select_batch_job_instance_sql self._TOTAL_SQL = config.count_batch_job_instance_sql self._set_table(self._table_bji) def _set_table(self, table): for field, attr in table.items(): val = self.rq_get(field) attr['val'] = val return table @property def tables(self): _tables = {'bji': self._table_bji} return _tables @property def cvtpara(self): _cvtpara = { 'offset': self._offset, 'limit': self._limit, 'sql': self._SQL, 'total_sql': self._TOTAL_SQL, 'order_by': self._order_by, 'order_rule': self._order_by } return _cvtpara class PaginatorBatchJobInstance(dataconn.DatabaseConnection, query_sql.Q_Data): # 分页访问数据 def __init__(self, qs): super(PaginatorBatchJobInstance, self).__init__(logger) query_sql.Q_Data.__init__(self, qs) @property def rows(self): return self._get_rows(self.ygol) @property def total(self): return self._get_total(self.ygol) @login_required @permission_required('batch_job.viewBatchJob', raise_exception=PermissionDenied) def index(request): # 批处理作业首页 return render(request, 'batch_job/index.html', locals()) @login_required @permission_required('batch_job.viewBatchJob', raise_exception=PermissionDenied) @permission_required('batch_job.editBatchJob', raise_exception=PermissionDenied) def add_batch_job(request): # 新建批处理作业 return render(request, 'batch_job/add_batch_job.html', locals()) @login_required @permission_required('batch_job.viewBatchJob', raise_exception=PermissionDenied) def update_batch_job(request, id): # 更新批处理作业 return render(request, 'batch_job/update_batch_job.html', locals()) @login_required @permission_required('batch_job.viewBatchJob', raise_exception=PermissionDenied) def batch_job_instance(request): # 批处理作业执行历史 return render(request, 'batch_job/batch_job_instance.html', locals()) @login_required @permission_required('batch_job.viewBatchJob', raise_exception=PermissionDenied) def batch_job_instance_details(request, id): # 批处理作业详情执行历史 return render(request, 'batch_job/batch_job_instance_details.html', locals()) @login_required @permission_required('batch_job.viewBatchJob', raise_exception=PermissionDenied) def get_task_template(request): # 任务模板 irrelevant_tasks = ['FirstBlood.celery.debug_task', 'celery.backend_cleanup', 'celery.chain', 'celery.chord', 'celery.chord_unlock', 'celery.chunks', 'celery.group', 'celery.map', 'celery.starmap', 'run', u'run_batch_job'] loaders.autodiscover() response = HttpResponse() tasks = list(sorted(registry.tasks.regular().keys())) for t in irrelevant_tasks: tasks.remove(t) response.write(json.dumps(tasks)) return response @login_required @permission_required('batch_job.viewBatchJob', raise_exception=PermissionDenied) def get_crontab(request): # 获取 crontab 定时时间 response = HttpResponse() data = CrontabSchedule.objects.values() response.write(json.dumps(list(data))) return response @login_required @permission_required('batch_job.viewBatchJob', raise_exception=PermissionDenied) @permission_required('batch_job.editBatchJob', raise_exception=PermissionDenied) def add_crontab(request): # 新增 crontab 定时时间 response = HttpResponse() cur = Currency(request) rq_post = getattr(cur, 'rq_post') jdata = rq_post('data') data = json.loads(jdata) ndata = dict([(k, v.replace(' ', '')) for k, v in data.items()]) # Remove all spaces crobj = schedules.crontab(**ndata) to_model_schedule = ModelEntry.to_model_schedule model_schedule, model_field = to_model_schedule(crobj) response.write(json.dumps(ndata)) return response @login_required @verification(CheckBatchJob) @permission_required('batch_job.viewBatchJob', raise_exception=PermissionDenied) @permission_required('batch_job.editBatchJob', raise_exception=PermissionDenied) def add_batch_job_data(request): # 新增或者修改任务数据 response = HttpResponse() cur = Currency(request) data = cur.rq_post_json('data') jd = BatchJobData(data) if jd.operation_type == _OPERATION_TYPE[0]: result = jd.create() # 新批处理作业 jd.create_PeriodicTask() # 创建定时任务 else: result = jd.update() jd.update_PeriodicTask() response.write(json.dumps(result)) return response @login_required @permission_required('batch_job.viewBatchJob', raise_exception=PermissionDenied) def get_batch_job_data(request): # 获取批处理作业数据 sql = config.query_batch_job_sql2 dtconn = dataconn.DatabaseConnection(logger) dtsf = dataconn.DataTransform() source_data = dtconn.ygol.getall(sql) data = [dtsf.get_row_by_dict_to_user(dt) for dt in source_data] response = HttpResponse() response.write(json.dumps(data)) return response @login_required @permission_required('batch_job.viewBatchJob', raise_exception=PermissionDenied) def get_batch_job_data_by_id(request): """ 根据ID获取批处理作业数据 :param request: id :return: """ cur = Currency(request) _id = cur.rq_post('_id') sql = config.query_batch_job_sql3 % _id dtconn = dataconn.DatabaseConnection(logger) dtsf = dataconn.DataTransform() source_data = dtconn.ygol.getsingle(sql) response = HttpResponse() response.write(json.dumps(dtsf.get_row_by_dict_to_user(source_data))) return response @login_required @permission_required('batch_job.viewBatchJob', raise_exception=PermissionDenied) def get_batch_job_sub_job_by_id(request): """ 根据ID获取批处理作业中的子作业数据 扩展: 目前子作业只包含数据同步,后期加入了SQL脚本、SQL备份等等之后, 需要先判断子作业的类型,再根据类型去相关表里查询子作业的详细信息。 例如:如果有同步类型的子作业,就需要根据同步作业表查询同步的任务详情。 如果有备份类型的,就去备份表里查询备份任务详情。 :param request: id :return: """ cur = Currency(request) _id = cur.rq_post('_id') sql = config.query_batch_job_sub_job_by_id_sql % _id dtconn = dataconn.DatabaseConnection(logger) dtsf = dataconn.DataTransform() source_data = dtconn.ygol.getall(sql) data = [dtsf.get_row_by_dict_to_user(dt) for dt in source_data] response = HttpResponse() response.write(json.dumps(data)) return response @login_required @verification(CheckBatchJob) @permission_required('batch_job.viewBatchJob', raise_exception=PermissionDenied) @permission_required('batch_job.editBatchJob', raise_exception=PermissionDenied) def run_batch_job_task(request): # 执行批处理作业 response = HttpResponse() cur = Currency(request) rq_post = getattr(cur, 'rq_post') jdata = rq_post('data') data = json.loads(jdata) # run_batch_job(**data) run_batch_job.delay(**data) response.write(json.dumps({'status': 0, 'msg': u'操作成功'})) return response def _process_run_sub_job(_type, subjob_id, results, batch_job_instance_id, trigger_mode): # 多进程执行批处理任务的子任务 BatchJobSubjobInstanceData.run_sub_job(_type, subjob_id, results, batch_job_instance_id, trigger_mode) @shared_task(name='run_batch_job') def run_batch_job(**data): """ 异步执行批处理作业任务 :param data: {u'_id': u'30', u'batch_job_details': [{u'batch_job_id': u'30', u'create_time': u'2018-07-24 21:18:31', u'description': u'\u6570\u636e\u540c\u6b65\u6d4b\u8bd5', u'id': u'37', u'modify_time': u'2018-07-24 21:18:31', u'name': u'test', u'subjob_id': u'28', u'type': u'1'}], u'crontab': u'2', u'description': u'DIY\u6d4b\u8bd5\u7ec4\u88c5\u673a1', u'is_enable': False, u'name': u'test1', u'operation_type': 2, u'task_template': u'celery.chunks', u'trigger_mode': 2} _id:batch_job_id 批处理作业表 id :return: """ bjid = BatchJobInstanceData(data) bjid.start_log() curr_proc = mp.current_process() # celery 里执行任务时,默认守护进程无法开启多进程,需要先将当前进程设置为非守护进程, 启动执行再改为守护进程 curr_proc.daemon = False manager = mp.Manager() results = manager.list() # 记录所有子作业的执行结果 p = mp.Pool(config.maxtasksperchild) curr_proc.daemon = True batch_job_details = data.get('batch_job_details') for sj in batch_job_details: _type = int(sj.get('type')) subjob_id = sj.get('subjob_id') p.apply_async(_process_run_sub_job, args=(_type, subjob_id, results, bjid.batch_job_instance_id, bjid.trigger_mode)) p.close() p.join() batch_job_result = 0 if 1 not in results else 1 bjid.record_result_log(batch_job_result) @shared_task(name='batch_job_periodictask') def batch_job_periodictask(batch_job_id): """ 定时执行批处理作业 通过batch_job_id查询批处理作业的信息来执行任务。 :param batch_job_id: 批处理作业ID,也就是batch_job表id :return: None """ bjd = BatchJobData({}) batch_job_data = bjd.get_batch_job_by_id(batch_job_id) bjid = BatchJobInstanceData(batch_job_data) bjid.batch_job_instance_id = batch_job_id bjid.trigger_mode = _TRIGGER_MODE[0] # 触发模式:自动 bjid.start_log() curr_proc = mp.current_process() # celery 里执行任务时,默认守护进程无法开启多进程,需要先将当前进程设置为非守护进程, 启动执行后再改为守护进程 curr_proc.daemon = False manager = mp.Manager() results = manager.list() # 记录所有子作业的执行结果 p = mp.Pool(config.maxtasksperchild) curr_proc.daemon = True batch_job_details = bjd.get_batch_job_details_by_id(batch_job_id) for sj in batch_job_details: _type = int(sj.get('type')) subjob_id = sj.get('subjob_id') p.apply_async(_process_run_sub_job, args=(_type, subjob_id, results, bjid.batch_job_instance_id, bjid.trigger_mode)) p.close() p.join() batch_job_result = 0 if 1 not in results else 1 bjid.record_result_log(batch_job_result) # @login_required # @permission_required('change.view_delivery', raise_exception=PermissionDenied) # @permission_required('change.edit_delivery', raise_exception=PermissionDenied) def get_batch_job_instance(request): # 分页查询批处理作业实例 dsql = BatchJobInstanceSql(request) cvtpara = dsql.cvtpara tables = dsql.tables qs = query_sql.Q_Sql(cvtpara, **tables) pd = PaginatorBatchJobInstance(qs) response = HttpResponse() response.write(json.dumps({'rows': pd.rows, 'total': pd.total})) return response @login_required @permission_required('batch_job.viewBatchJob', raise_exception=PermissionDenied) def get_batch_job_instance_data_by_id(request): # 根据ID获取批处理作业数据 cur = Currency(request) batch_job_instance_id = cur.rq_post('instance_id') bjid = BatchJobInstanceData({}) data = bjid.get_batch_job_instance_by_id(batch_job_instance_id) response = HttpResponse() response.write(json.dumps(data)) return response @login_required @permission_required('batch_job.viewBatchJob', raise_exception=PermissionDenied) def get_batch_job_sub_job_instance_data_by_id(request): # 根据ID获取批处理作业数据 cur = Currency(request) batch_job_instance_id = cur.rq_get('instance_id') # if _type == 1: 数据同步类型为1 data = BatchJobSubjobInstanceData.get_sub_job_datax_instance_data_by_id(batch_job_instance_id) response = HttpResponse() response.write(json.dumps(data)) return response ================================================ FILE: controller/__init__.py ================================================ ================================================ FILE: controller/core/__init__.py ================================================ ================================================ FILE: controller/core/access.py ================================================ # -*- coding: UTF-8 -*- # Description: # Author: 黄小雪 # Date: 2017年09月07日 # Company: 东方银谷 from public import * from django.http import HttpResponse from business_query.configuration.sqlList import * from dtmt.query import DatabaseConnection from functools import wraps import json import logging def verification(check_class): """ 装饰器用于检测用户提交的信息是否合法. check_class 检测类 Decorator for views that checks that the user submitted information, redirecting to the log-in page if necessary. The test should be a callable that takes the user object and returns True if the user passes. """ def decorator(view_func): @wraps(view_func) def _wrapped_view(request, *args, **kwargs): response = HttpResponse() ccl = check_class(request) check_status, error_msg = ccl.total_check() if check_status: response.write(json.dumps({'status': check_status, 'msg': error_msg})) return response return view_func(request, *args, **kwargs) return _wrapped_view return decorator class Check_IBQ(object): """ 检测投资批量查询提交的信息 error_msg 存放所有错误消息 check_status 错误状态 1 错误,0 正常,主要用于前端的JavaScript进行判断 total_check 启动所有检测,返回检测状态和错误消息 """ def __init__(self, request): cur = Currency(request) rq_post = getattr(cur, 'rq_post') jdata = rq_post('data') data = json.loads(jdata) self.data = data self.conf = investment_batch_query_conf self.error_msg = [] def check_data(self): # 检测脚本名称 isdigit = [d for d in self.data if str(d).isdigit()] if not isdigit: self.error_msg.append(u'请输入手机号') else: if len(self.data) > self.conf['maxNum']: self.error_msg.append(u'每次查询量不能超过%s' % self.conf['maxNum']) def total_check(self): self.check_data() status = 1 if self.error_msg else 0 return status, self.error_msg class Check_PCI(object): """ 普惠离职员工客户信息查询 error_msg 存放所有错误消息 check_status 错误状态 1 错误,0 正常,主要用于前端的JavaScript进行判断 total_check 启动所有检测,返回检测状态和错误消息 """ def __init__(self, request): cur = Currency(request) rq_post = getattr(cur, 'rq_post') jdata = rq_post('data') data = json.loads(jdata) self.logger = logging.getLogger('business_query') self.dc = DatabaseConnection(self.logger) self.data = data self.conf = puhuiCustomerInfoConf self.error_msg = [] def check_data(self): # 检测脚本名称 isdigit = [d for d in self.data if str(d)] if not isdigit: self.error_msg.append(u'请输入工号') else: if len(self.data) > self.conf['maxNum']: self.error_msg.append(u'每次查询量不能超过%s' % self.conf['maxNum']) def checkDateMaxNum(self): # 每天最多查询5次 res = self.dc._r.hmget('puhuiCustomerInfo', 'count')[0] count = int(res) if res else 0 if count >= 5: self.error_msg.append(u'今天已查询5次,欢迎明天再来查询!') def total_check(self): self.check_data() self.checkDateMaxNum() status = 1 if self.error_msg else 0 return status, self.error_msg class Check_people_upload(object): """ 检测人力上传的文件 错误码 错误消息 1 上传文件为空 2 只能上传excel文件 """ def __init__(self, request): self.fileobj = request.FILES.get('file', None) self.error_msg = [] self.error_code = [] def check_file_exists(self): """ 检测文件是否存在 :return: """ if self.fileobj is None: self.error_msg.append(u'上传文件为空') self.error_code.append(1) def check_filename(self): """ 检测文件名后缀必须为xlsx、xls :return: """ legal_file_suffixes = ['xlsx', 'xls'] if self.fileobj.name.split('.')[-1] not in legal_file_suffixes: self.error_msg.append(u'只能上传excel文件') self.error_code.append(2) def total_check(self): self.check_file_exists() if 1 not in self.error_code: self.check_filename() status = 1 if self.error_msg else 0 return status, self.error_msg ================================================ FILE: controller/core/excel.py ================================================ #! /usr/bin/env python # -*-coding:utf-8-*- ################################################## # Function: 银谷在线注册统计及出借统计脚本 # Usage: python start.py # Author: 黄小雪 # Date: 2016年7月19日 # Company: # Version: 1.2 ################################################## import xlwt import xlrd from unicode_width import * from openpyxl import Workbook from openpyxl.utils import get_column_letter from openpyxl.styles import Font, Alignment def set_style(name, height, bold=False): # 设置单元格样式 style = xlwt.XFStyle() # 初始化样式 font = xlwt.Font() # 为样式创建字体 font.name = name # 'Times New Roman' font.bold = bold font.color_index = 4 font.height = height al = xlwt.Alignment() al.horz = xlwt.Alignment.HORZ_CENTER # 设置水平居中 al.vert = xlwt.Alignment.VERT_CENTER # 设置垂直居中 style.font = font style.alignment = al return style def get_table(file, table=False): # 获取表格数据 data = xlrd.open_workbook(file) sheets = False if table: table = data.sheets()[0] else: sheets = data.sheets() return table or sheets def sheet_write(f, sheet_name, row0, rows, width): # 写入工作簿 sheet = f.add_sheet(sheet_name, cell_overwrite_ok=True) # 创建sheet # 生成第一行 for i in range(0, len(row0)): sheet.write(0, i, row0[i], set_style('Times New Roman', 220, True)) sheet.col(i).width = 256 * width[i] for j in range(0, len(rows)): row = rows[j] for i in range(0, len(row)): sheet.write(j + 1, i, row[i], set_style('Times New Roman', 220, False)) class Openpyxl(object): # openpyxl 生成excel文件 def __init__(self, filename): self.ft1 = Font(name='Calibri', bold=True) self.ft2 = Font(name='Calibri') self.al = Alignment(horizontal='center', vertical='center') self.filename = filename self.wb = Workbook() def __set_width(self, rows, ws): widths = get_width(*rows) for i in xrange(len(widths)): ws.column_dimensions[get_column_letter(i + 1)].width = widths[i] def __get_new_ws(self, title): new_ws = self.wb.get_active_sheet() if len(new_ws.get_cell_collection()) == 0: new_ws.title = title else: new_ws = self.wb.create_sheet(title=title) return new_ws def add_sheet(self, title, rows): new_ws = self.__get_new_ws(title) for i in range(len(rows)): row = rows[i] for j in range(len(row)): new_ws.cell(row=i + 1, column=j + 1).value = row[j] if i == 0: new_ws.cell(row=i + 1, column=j + 1).font = self.ft1 else: new_ws.cell(row=i + 1, column=j + 1).font = self.ft2 new_ws.cell(row=i + 1, column=j + 1).alignment = self.al self.__set_width(rows, new_ws) def save(self): self.wb.save(filename=self.filename) ================================================ FILE: controller/core/local_mysql.py ================================================ #! /usr/bin/env python # -*-coding:utf-8-*- ################################################## # Function: 银谷在线注册统计及出借统计脚本 # Usage: python start.py # Author: 黄小雪 # Date: 2016年7月19日 # Company: # Version: 1.2 ################################################## from controller.public.mysql_helper import Business Business(host, user, passwd, db) ================================================ FILE: controller/core/mailtable.py ================================================ #!/usr/bin/python env # -*- coding: UTF-8 -*- # Description: # Author: 黄小雪 # Date: 2017年07月04日 # Company: 东方银谷 class MailTable(object): """ 邮件html表格 """ def __init__(self): pass @property def style(self): _style = """ """ return _style def table(self, caption, rows): row0 = rows[0] tr0 = self._tr0_list(row0) total_tr_list = [self._tr_list(row) for row in rows[1:]] tr_body = ''.join(total_tr_list) _table = """ %s %s
%s
""" % (caption, tr0, tr_body) return _table def _tr_list(self, row): _tr_list = ['%s' % r for r in row] _tr = '%s' % ''.join(_tr_list) return _tr def _tr0_list(self, row): _tr_list = ['%s' % r for r in row] _tr = '%s' % ''.join(_tr_list) return _tr ================================================ FILE: controller/core/public.py ================================================ # -*- coding: UTF-8 -*- import datetime import time import json class Currency(object): # 通用帮助 def __init__(self, request): self.request = request def rq_get(self, key): return self.request.GET.get(key, '').strip() def rq_post(self, key): return self.request.POST.get(key, '').strip() def rq_get_json(self, key): return json.loads(self.rq_get(key)) def rq_post_json(self, key): return json.loads(self.rq_post(key)) class DatetimeHelp(object): # 日期时间帮助 def __init__(self): pass @property def now_time(self): return datetime.datetime.now() def strptime(self, value, format): return datetime.datetime.strptime(value, format) @property def nowtimestrf1(self): return self.now_time.strftime(u'%Y-%m-%d %H:%M:%S') @property def nowtimestrf2(self): return self.now_time.strftime(u'%Y年%m月%d日 %H点%M分%S秒') @property def nowtimestrf3(self): return self.now_time.strftime(u'%Y%m%d%H%M%S') @property def nowtimestrf4(self): return self.now_time.strftime(u'%Y%m%d') @property def nowtimestrf5(self): return self.now_time.strftime(u'%Y-%m-%d') @property def nowtimestrf6(self): return self.now_time.strftime(u'%Y年%m月%d日') @property def yesterday(self): yd = self.now_time - datetime.timedelta(days=1) return yd @property def yesterdaystrf4(self): return self.yesterday.strftime(u'%Y%m%d') @property def yesterdaystrf5(self): return self.yesterday.strftime(u'%Y-%m-%d') @property def yesterdaystrf6(self): return self.yesterday.strftime(u'%Y年%m月%d日') @property def timestamp1(self): """返回当前时间的13位毫秒时间戳 :return: 13 位的毫秒时间戳 1456402864242 """ return self.datetime_to_timestamp(self.now_time) @staticmethod def datetime_to_timestamp(datetime_obj): """将本地(local) datetime 格式的时间 (含毫秒) 转为毫秒时间戳 :param datetime_obj: {datetime}2016-02-25 20:21:04.242000 :return: 13 位的毫秒时间戳 1456402864242 """ local_timestamp = long(time.mktime(datetime_obj.timetuple()) * 1000.0 + datetime_obj.microsecond / 1000.0) return local_timestamp if __name__ == '__main__': dth = DatetimeHelp() print dth.timestamp1 ================================================ FILE: controller/core/query_sql.py ================================================ #!/usr/bin/python env # -*- coding: UTF-8 -*- # Description: # Author: 黄小雪 # Date: 2017年03月29日 # Company: 东方银谷 from controller.core.public import * import decimal class Q_Sql(object): """ # 查询sql table_a = { 'delivery_id': {'data_type': 'str', 'val':''}, 'customer': {'data_type': 'str', 'val':''}, 'customer_cn': {'data_type': 'str', 'val':''}, 'employee': {'data_type': 'str', 'val':''}, 'employee_cn': {'data_type': 'str', 'val':''}, 'former_employee': {'data_type': 'str', 'val':''}, 'former_employee_cn': {'data_type': 'str', 'val':''}, 'result': {'data_type': 'str', 'val':''}, 'start_time': {'data_type': 'datetime', 'val':''}, 'end_time': {'data_type': 'datetime', 'val':''}, } table_b = { 'large_area': {'data_type': 'str', 'val': '北区'}, 'store': {'data_type': 'str', 'val': '安阳一部'}, 'emp_num': {'data_type': 'str', 'val': 'CF400721'} } table_c ={ ... ... } ... ... tables = {'a':table_a, 'b':table_b, 'c':table_c ... ...} """ def __init__(self, cvtpara, **tables): self._offset = cvtpara['offset'] self._limit = cvtpara['limit'] self.tables = tables self._SQL = cvtpara['sql'] self._TOTAL_SQL = cvtpara['total_sql'] self._order_by = self._set_order_by(cvtpara['order_by']) self._para = [] self._condition = [] self._condition_sql = '' self._data() self._set_condition() def _data(self): # 获取数据 for t, table in self.tables.items(): self._set_data(t, table) def _set_data(self, t, table): # 设置查询条件 for field, attr in table.items(): if attr['val']: if attr['data_type'] == 'str': self._set_str(t, field, **attr) if attr['data_type'] == 'datetime': self._set_datetime(t, field, **attr) def _set_str(self, t, field, **attr): val = attr['val'] self._condition.append('%s.%s = %%s' % (t, field)) self._para.append(val) def _set_datetime(self, t, field, **attr): val = attr['val'] tfield = field.split('_', 1)[1] # 获取时间字段名称 judge = field.split('_', 1)[0] # 判断是开始还是结束时间 'start' or 'end' if judge == 'start': self._condition.append('%s.%s >= %%s' % (t, tfield)) self._para.append(val) elif judge == 'end': self._condition.append('%s.%s <= %%s' % (t, tfield)) self._para.append('%s 23:59:59' % val) def _set_order_by(self, order_by): _order_by_str = '' _fields = [] if order_by: for dt in order_by: _table = dt['table'] _field = dt['field'] _rule = dt['rule'] _field = '%s.%s %s' % (_table, _field, _rule) _fields.append(_field) _fields_str = ','.join(_fields) _order_by_str = 'ORDER BY %s' % _fields_str return _order_by_str def _set_condition(self): if self._condition: and_sql = '\nand '.join(self._condition) self._condition_sql = 'where \n%s' % and_sql @property def para(self): import copy _para = copy.deepcopy(self._para) _para.append(self._offset) _para.append(self._limit) return _para @property def total_para(self): return self._para @property def sql(self): _sql = '\n'.join([self._SQL, self._condition_sql, self._order_by, 'limit %s,%s']) return _sql @property def total_sql(self): _total_sql = '\n'.join([self._TOTAL_SQL, self._condition_sql]) return _total_sql class Q_Data(object): # 设置投资数据 def __init__(self, qs): self._sql = qs.sql self._para = qs.para self._total_sql = qs.total_sql self._total_para = qs.total_para def _get_data(self, databases_c): return databases_c.getall(self._sql, self._para) def _data_clean(self, data): res = {} for key,val in data.items(): res[key] = self._data_conversion(val) return res def _data_conversion(self, val): # 数据转换 new_val = None if isinstance(val, datetime.datetime): new_val = val.strftime('%Y-%m-%d %H:%M:%S') if isinstance(val, decimal.Decimal): new_val = float(val) return new_val or val def _get_rows(self, databases_c): # rows [{key1: val1, key2: val2, ... ...}] rows = [] data = self._get_data(databases_c) for dt in data: row = self._data_clean(dt) rows.append(row) return rows def _get_total(self, databases_c): total_data = databases_c.getall(self._total_sql, self._total_para) total = total_data[0]['count'] return total class Download_Sql(Q_Sql): """ 导出文件的sql 查询数据时使用重写的sql和total_para去查询 主要是去掉offset limit,导出数据时不需要分页查询 """ def __init__(self, cvtpara, **tables): super(Download_Sql, self).__init__(cvtpara, **tables) @property def sql(self): _sql = '\n'.join([self._SQL, self._condition_sql, self._order_by]) return _sql @property def para(self): return self._para ================================================ FILE: controller/core/unicode_width.py ================================================ #!/usr/bin/env python #-*-coding:utf-8-*- def get_max_length(arg): length = str_len(arg[0]) + 2 return length def sort_arg(arg): arg = list(arg) arg.sort(cmp=cmp_length) return arg def cmp_length(a, b): la = str_len(a) lb = str_len(b) if la > lb: return -1 elif la < lb: return 1 else: return 0 def str_len(string): try: string = u'%s' % string row_l=len(string) utf8_l=len(string.encode('utf-8')) return (utf8_l-row_l)/2+row_l except: return row_l def get_width(*var): out = zip(*var) res = map(sort_arg, out) width = map(get_max_length, res) return width ================================================ FILE: controller/public/__init__.py ================================================ ================================================ FILE: controller/public/dataconn.py ================================================ #!/usr/bin/python env # -*- coding: UTF-8 -*- # Description: # Author: 黄小雪 # Date: 2018年01月04日 # Company: 东方银谷 import re import datetime import decimal from django.conf import settings from mysql_helper import BusinessMysql from sqlserver_helper import BusinessSqlserver # 业绩平台数据库 _database = settings.DATABASES['default'] # 数据库用户名密码SQL dtbsif_sql = 'select * from FirstBlood.databaseinfo ' class DatabaseConnection(object): # 数据库连接 def __init__(self, logger): self._logger = logger self.ygol = BusinessMysql(_database['HOST'], _database['USER'], _database['PASSWORD'], _database['NAME']) def get_dtbs_conn(self, name): # 获取数据库连接 datainfo = self.get_datainfo(name) businessType = {'mysql': BusinessMysql, 'sqlserver': BusinessSqlserver} business = businessType[datainfo['type']] return business(datainfo['host'], datainfo['user'], datainfo['passwd'], datainfo['db']) def get_dtbs_conn_by_id(self, _id): """ 根据数据库信息表的id获取数据库连接 :param _id: 数据库表主键id :return: 数据库连接对象 """ datainfo = self.get_datainfo_by_id(_id) businessType = {'mysql': BusinessMysql, 'sqlserver': BusinessSqlserver} business = businessType[datainfo['type']] return business(datainfo['host'], datainfo['user'], datainfo['passwd'], datainfo['db']) def get_datainfo(self, name): # 获取数据库信息 conditions_sql = "where `name`='%s'" % name data = self.ygol.getsingle(dtbsif_sql + conditions_sql) if self.ygol.status: self._logger.error(u'根据数据库信息表的名称,获取数据库信息失败. - Msg:' % self.ygol.msg) return data def get_datainfo_by_id(self, _id): # 根据数据库信息表的主键id,获取数据库信息 conditions_sql = "where `id`=%s" % _id data = self.ygol.getsingle(dtbsif_sql + conditions_sql) if self.ygol.status: self._logger.error(u'根据数据库信息表的主键id,获取数据库信息失败. - Msg:' % self.ygol.msg) return data class DataTransform(object): """ 数据转换 """ def __init__(self): self._ILLEGAL_CHARACTERS_RE = re.compile(r'[\000-\010]|[\013-\014]|[\016-\037]') def get_row_by_list(self, dt, database_type): special_characters_conversion = self.special_characters(database_type) row = [] for val in dt: if isinstance(val, long): val = '%s' % str(val) if isinstance(val, str) or isinstance(val, unicode): if next(self._ILLEGAL_CHARACTERS_RE.finditer(val), None): val = re.sub(self._ILLEGAL_CHARACTERS_RE, "", val) if isinstance(val, bool): val = 'true' if val else 'false' if isinstance(val, str): val = "'%s'" % special_characters_conversion(val) if isinstance(val, unicode): val = "'%s'" % special_characters_conversion(val) if isinstance(val, datetime.datetime): val = "'%s'" % val if isinstance(val, datetime.date): val = "'%s'" % val if val is None: val = 'null' row.append(val) return row def get_row_by_dict(self, dt, database_type): special_characters_conversion = self.special_characters(database_type) row = {} for key, val in dt.items(): if isinstance(val, long): val = '%s' % str(val) if isinstance(val, str) or isinstance(val, unicode): if next(self._ILLEGAL_CHARACTERS_RE.finditer(val), None): val = re.sub(self._ILLEGAL_CHARACTERS_RE, "", val) if isinstance(val, bool): val = 'true' if val else 'false' if isinstance(val, str): val = "%s" % special_characters_conversion(val) if isinstance(val, unicode): val = "%s" % special_characters_conversion(val) if isinstance(val, datetime.datetime): val = "%s" % val if isinstance(val, datetime.date): val = "%s" % val if val is None: val = 'null' if isinstance(val, decimal.Decimal): val = float(val) row[key] = val return row def get_row_by_dict_to_user(self, dt): # 返给用户的数据,人性化展示 row = {} for key, val in dt.items(): if isinstance(val, long): val = '%s' % str(val) if isinstance(val, str) or isinstance(val, unicode): if next(self._ILLEGAL_CHARACTERS_RE.finditer(val), None): val = re.sub(self._ILLEGAL_CHARACTERS_RE, "", val) if isinstance(val, bool): val = 'true' if val else 'false' if isinstance(val, datetime.datetime): val = "%s" % val if isinstance(val, datetime.date): val = "%s" % val if val is None: val = 'null' if isinstance(val, decimal.Decimal): val = float(val) row[key] = val return row def get_row_by_list_to_excel(self, dt): # 列表数据,用于生成excel文件 row = [] for val in dt: if isinstance(val, long): val = str(val) if isinstance(val, str) or isinstance(val, unicode): if next(self._ILLEGAL_CHARACTERS_RE.finditer(val), None): val = re.sub(self._ILLEGAL_CHARACTERS_RE, "", val) row.append(val) return row @staticmethod def special_characters_mysql(string): double_slash = re.compile(r'\\') single_quotes = re.compile(r'\'') double_quotation_marks = re.compile(r'\"') string = re.sub(double_slash, "\\\\", string) string = re.sub(single_quotes, "\\'", string) string = re.sub(double_quotation_marks, "\\\"", string) return string @staticmethod def special_characters_sqlserver(string): string = string.replace("'", "''") string = string.replace('"', '""') return string def special_characters(self, database_type): # 特殊字符转义 func = {'mysql': self.special_characters_mysql, 'sqlserver': self.special_characters_sqlserver} return func[database_type] ================================================ FILE: controller/public/log.py ================================================ #!/usr/bin/python env # -*- coding: UTF-8 -*- # Description: 日志记录 # Author: 黄小雪 # Date: 2017年02月15日 # Company: 东方银谷 import logging # 开发一个日志系统, 既要把日志输出到控制台, 还要写入日志文件 class Logger(object): # 用字典保存日志级别 _format_dict = { 1: logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s'), 2: logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s'), 3: logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s'), 4: logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s'), 5: logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s') } def __init__(self, logname, loglevel, logger): ''' 指定保存日志的文件路径,日志级别,以及调用文件 将日志存入到指定的文件中 ''' # 创建一个logger self.logger = logging.getLogger(logger) self.logger.setLevel(logging.DEBUG) # 创建一个handler,用于写入日志文件 fh = logging.FileHandler(logname) fh.setLevel(logging.DEBUG) # 再创建一个handler,用于输出到控制台 ch = logging.StreamHandler() ch.setLevel(logging.DEBUG) # 定义handler的输出格式 # formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s') formatter = self._format_dict[int(loglevel)] fh.setFormatter(formatter) ch.setFormatter(formatter) # 给logger添加handler self.logger.addHandler(fh) self.logger.addHandler(ch) def getlog(self): return self.logger ================================================ FILE: controller/public/mailclass.py ================================================ #! /usr/bin/env python # -*- coding: UTF-8 -*- ################################################## # Function: 银谷在线注册统计及出借统计脚本 # Usage: python start.py # Author: 黄小雪 # Date: 2016年7月19日 # Company: # Version: 1.2 ################################################## import os import sys import xlrd import smtplib import datetime from email.header import Header from email.mime.text import MIMEText from email.mime.image import MIMEImage from email.mime.multipart import MIMEMultipart from email.utils import parseaddr, formataddr reload(sys) sys.setdefaultencoding("utf-8") class MailHelper(object): # 发html邮件 def __init__(self, mail_host, mail_user, mail_pass, sender, sender_zh_name, receivers, cc): # 第三方 SMTP 服务 self.mail_host = mail_host # 设置服务器 self.mail_user = mail_user # 用户名 self.mail_pass = mail_pass # 口令 self.sender = sender self.sender_zh_name = sender_zh_name self.receivers = receivers # 接收邮件,可设置为你的QQ邮箱或者其他邮箱 self.cc = cc # 抄送 self.message = MIMEMultipart() # 设置附件 self.status = 0 # 执行状态 self.msg = '' # 错误消息 def add_attch(self, res_file): # 添加附件 # 附件为绝对路径 /opt/script/yingu_rt/res/充值提现明细.xls # 附件 att1 = MIMEText(open(res_file, 'rb').read(), 'base64', 'utf-8') att1["Content-Type"] = 'application/octet-stream' # 这里的filename可以任意写,写什么名字,邮件中显示什么名字 att1["Content-Disposition"] = 'attachment; filename=%s' % Header(res_file.split('/')[-1], 'UTF-8') self.message.attach(att1) def insert_img(self, file): # 插入图片 # 指定图片为当前目录 fp = open(file, 'rb') msgImage = MIMEImage(fp.read()) fp.close() # 定义图片 ID,在 HTML 文本中引用 msgImage.add_header('Content-ID', '') self.message.attach(msgImage) def add_content(self, content, subject): self.message['Subject'] = Header(subject, 'utf-8') self.message['From'] = self._format_addr(u'%s <%s>'% (self.sender_zh_name, self.sender)) self.message['To'] = ''.join(self._cvt_receivers(self.receivers)) self.message['Cc'] = ''.join(self._cvt_receivers(self.cc)) head_content = """""" mail_msg = ''.join([head_content, content]) self.message.attach(MIMEText(mail_msg, 'html', 'utf-8')) def _cvt_receivers(self, receivers): # 收件人乱码处理 return [self._cvt_user(u) for u in receivers] def _cvt_user(self, user): return ''.join(['<', user, '>']) def send_htm(self): # 发送邮件 # 创建一个带附件的实例 try: smtpObj = smtplib.SMTP() smtpObj.connect(self.mail_host, 25) # 25 为 SMTP 端口号 smtpObj.login(self.mail_user, self.mail_pass) smtpObj.sendmail(self.sender, self.receivers + self.cc, self.message.as_string()) except smtplib.SMTPException, e: self.status = 1 self.msg = u"%s" % e @staticmethod def _format_addr(s): name, addr = parseaddr(s) return formataddr((Header(name, 'utf-8').encode(), addr.encode('utf-8') if isinstance(addr, unicode) else addr)) ================================================ FILE: controller/public/mysql_helper.py ================================================ #!/usr/bin/python env # -*- coding: UTF-8 -*- # Description: # Author: 黄小雪 # Date: 2017年07月12日 # Company: 东方银谷 import MySQLdb class MysqlHelper(object): """ 数据访问层 status:查询状态,0 查询正常,1 查询失败,默认为0 """ def __init__(self, host, user, passwd, db): self.__host = host self.__user = user self.__passwd = passwd self.__db = db self.row0 = None self.rowcount = None self.msg = '' self.status = 0 def __conn(self): try: conn = MySQLdb.connect(host=self.__host, user=self.__user, passwd=self.__passwd, db=self.__db, init_command="set names utf8;set net_write_timeout=3600;", charset='utf8', # cursorclass=MySQLdb.cursors.SSCursor ) except Exception, e: self.msg = '%s' % e self.status = 1 conn = None return conn def getall(self, sql, paramters=None): conn = self.__conn() if not conn: return None try: cur = conn.cursor(cursorclass=MySQLdb.cursors.DictCursor) # 返回字典 cur.execute(sql, paramters) data = cur.fetchall() self.rowcount = cur.rowcount self.row0 = [d[0] for d in cur.description] except Exception, e: self.msg = '%s' % e self.status = 1 data = None finally: cur.close() conn.commit() conn.close() return data def getallmany(self, sql, paramters=None): conn = self.__conn() if not conn: return None try: cur = conn.cursor(cursorclass=MySQLdb.cursors.DictCursor) # 返回字典 cur.executemany(sql, paramters) data = cur.fetchall() except Exception, e: self.msg = '%s' % e self.status = 1 data = None finally: cur.close() conn.commit() conn.close() return data def getsingle(self, sql, paramters=None): conn = self.__conn() if not conn: return None try: cur = conn.cursor(cursorclass=MySQLdb.cursors.DictCursor) # 返回字典 cur.execute(sql, paramters) data = cur.fetchone() except Exception, e: self.msg = '%s' % e self.status = 1 data = None finally: cur.close() conn.commit() conn.close() return data def insertmany(self, sql, paramters=None): conn = self.__conn() if not conn: return None try: cur = conn.cursor(cursorclass=MySQLdb.cursors.DictCursor) # 返回字典 cur.executemany(sql, paramters) except Exception, e: self.msg = '%s' % e self.status = 1 finally: cur.close() conn.commit() conn.close() return None def insert(self, sql, paramters=None): conn = self.__conn() if not conn: return None try: cur = conn.cursor(cursorclass=MySQLdb.cursors.DictCursor) # 返回字典 cur.execute(sql, paramters) setattr(self, 'insert_id', conn.insert_id()) except Exception, e: self.msg = '%s' % e self.status = 1 finally: cur.close() conn.commit() conn.close() return None def getall_list(self, sql, paramters=None): # 返回列表形式结果 conn = self.__conn() if not conn: return None try: cur = conn.cursor() # 返回列表 cur.execute(sql, paramters) data = cur.fetchall() self.rowcount = cur.rowcount self.row0 = [d[0] for d in cur.description] except Exception, e: self.msg = '%s' % e self.status = 1 data = None finally: cur.close() conn.commit() conn.close() return data def getall_list_sqls(self, sqls, paramters=None): """ 执行多个sql语句,返回列表形式结果 sqls = [sql1, sql2, ... ...] """ conn = self.__conn() if not conn: return None try: cur = conn.cursor() # 返回列表 for sql in sqls: cur.execute(sql, paramters) data = cur.fetchall() self.rowcount = cur.rowcount self.row0 = [d[0] for d in cur.description] except Exception, e: self.msg = '%s' % e self.status = 1 data = None finally: cur.close() conn.commit() conn.close() return data def delete(self, sql, paramters=None): conn = self.__conn() if not conn: return None try: cur = conn.cursor(cursorclass=MySQLdb.cursors.DictCursor) # 返回字典 cur.execute(sql, paramters) except Exception, e: self.msg = '%s' % e self.status = 1 finally: cur.close() conn.commit() conn.close() return None def update(self, sql, paramters=None): conn = self.__conn() if not conn: return None try: cur = conn.cursor(cursorclass=MySQLdb.cursors.DictCursor) # 返回字典 cur.execute(sql, paramters) except Exception, e: self.msg = '%s' % e self.status = 1 finally: cur.close() conn.commit() conn.close() return None def dict_generator(self, sql, paramters=None): """ 以生成器方式获取数据,用于数据量大的时候 :param sql: :param paramters: :return: """ conn = self.__conn() try: if conn: cur = conn.cursor(cursorclass=MySQLdb.cursors.SSDictCursor) # 流式数据返回字典 cur.execute(sql, paramters) self.rowcount = cur.rowcount self.row0 = [d[0] for d in cur.description] data = cur.fetchone() while data: yield data data = cur.fetchone() cur.close() conn.commit() conn.close() except Exception, e: self.msg = '%s' % e self.status = 1 def tuple_generator(self, sql, paramters=None): """ 以生成器方式获取数据,用于数据量大的时候 :param sql: SQL语句 :param paramters: :param size: 每次提取的行数,默认1000。 :return: 以生成器的方式返回数据 数据格式 row1 = [v1,v2, ...] data = (row1, row2, ...) """ conn = self.__conn() try: if conn: # SSCursor 查询结果缓存在server端 cur = conn.cursor(cursorclass=MySQLdb.cursors.SSCursor) # 返回元组 cur.execute(sql, paramters) self.rowcount = cur.rowcount self.row0 = [d[0] for d in cur.description] data = cur.fetchone() while data: yield data data = cur.fetchone() cur.close() conn.commit() conn.close() except Exception, e: self.msg = '%s' % e self.status = 1 def transaction_start(self): conn = self.__conn() if not conn: return None try: cur = conn.cursor(cursorclass=MySQLdb.cursors.DictCursor) # 返回字典 setattr(self, 'conn', conn) setattr(self, 'cur', cur) except Exception, e: self.msg = 'transaction_start - %s' % e self.status = 1 conn.commit() conn.close() return None def transaction_execute(self, sql, paramters=None): # 事务 insert update delete if hasattr(self, 'cur') and hasattr(self, 'conn'): try: self.cur.execute(sql, paramters) setattr(self, 'insert_id', self.conn.insert_id()) except Exception, e: self.msg = 'transaction_insert - %s' % e self.status = 1 return None def transaction_commit_and_close(self): # 执行成功提交事务,失败回滚,最后关闭连接 if hasattr(self, 'cur') and hasattr(self, 'conn'): self.cur.close() if self.status: self.conn.rollback() else: self.conn.commit() self.conn.close() return None class BusinessMysql(MysqlHelper): # 业务处理层 def __init__(self, host, user, passwd, db): super(BusinessMysql, self).__init__(host, user, passwd, db) def search(self, sql, para=None): return self.getsingle(sql, para) ================================================ FILE: controller/public/pagination.py ================================================ #!/usr/bin/env python # -*- coding: UTF-8 -*- from django.core.paginator import Paginator class Paginator_help: # 分页 def __init__(self, page_num, queryset, PAGE_SIZE, current_page_total, request): self.page_num = self.check_page_num(page_num) # 当前页码 self.current_page_total = current_page_total # 当前页下标 self.queryset = queryset # 需要分页的对象集合 self.PAGE_SIZE = PAGE_SIZE # 每页显示多少条 self.pages = self.get_Paginator_obj() # 获取分页对象 self.page_range = self.get_page_range() # 获取当前页的页面下标 self.qstr = self.get_qstr(request) self.current_page = self.get_current_page() # 获取当前页对象 def get_Paginator_obj(self): # 获取分页对象 pages = Paginator(self.queryset, self.PAGE_SIZE) return pages def check_page_num(self, page_num): # 检查页码 if page_num <= 0: page_num = 1 return page_num def get_current_page(self): # 获取当前页对象 current_page = self.pages.page(self.page_num) return current_page def get_qstr(self, request): qstr = '&'.join(['%s=%s' % (k, v) for k, v in request.GET.items() if k != 'p']) return qstr def calculate_begin_end(self): # 计算当前页下标的取值范围 page_total = self.pages.num_pages begin = 0 end = 0 if page_total <= self.current_page_total: begin = 0 end = page_total else: if self.page_num <= self.current_page_total / 2: begin = 0 end = self.current_page_total else: begin = self.page_num - self.current_page_total / 2 end = self.page_num + self.current_page_total / 2 if (self.current_page_total % 2) != 0: # 如果分页下标为奇数 end += 1 if end > page_total: end = page_total begin = page_total - self.current_page_total return begin, end def get_page_range(self): # 获取当前页的页面下标 begin, end = self.calculate_begin_end() # 草了,1.9的Paginator分页类的page_range方法得到的尽然是xrange,用[begin:end]切片报错 page_range = [i for i in self.pages.page_range] page_range = page_range[begin:end] return page_range class Paginator_ajax(object): # bootstrap-table 插件 后端分页 def __init__(self, offset, queryset, PAGE_SIZE): self.page_num = offset/PAGE_SIZE + 1 # 当前页码 self.queryset = queryset # 需要分页的对象集合 self.PAGE_SIZE = PAGE_SIZE # 每页显示多少条 self.pages = self.get_Paginator_obj # 获取分页对象 self.current_page = self.get_current_page # 获取当前页对象 self.total = self.pages.count # queryset 对象集合的总数 self.rows = self._get_rows @property def get_current_page(self): # 获取当前页对象 return self.pages.page(self.page_num) @property def get_Paginator_obj(self): # 获取分页对象 return Paginator(self.queryset, self.PAGE_SIZE) @property def _get_rows(self): return list(self.current_page) @property def data(self): return {'total': self.total, 'rows': self.rows} class Paginator_sql(object): # bootstrap-table 插件 后端分页 # sql 连接远程数据库查询 分页 def __init__(self, offset, queryset, PAGE_SIZE): self.offset = offset # 偏移量 self.PAGE_SIZE = PAGE_SIZE # 每页显示多少条 self.page_num = offset/PAGE_SIZE + 1 # 当前页码 self.queryset = queryset # 需要分页的对象集合 self.pages = self.get_Paginator_obj # 获取分页对象 self.current_page = self.get_current_page # 获取当前页对象 self.total = self.pages.count # queryset 对象集合的总数 self.rows = self._get_rows @property def get_current_page(self): # 获取当前页对象 return self.pages.page(self.page_num) @property def get_Paginator_obj(self): # 获取分页对象 return Paginator(self.queryset, self.PAGE_SIZE) @property def _get_rows(self): return list(self.current_page) @property def data(self): return {'total': self.total, 'rows': self.rows} ================================================ FILE: controller/public/sqlserver_helper.py ================================================ #!/usr/bin/python env # -*- coding: UTF-8 -*- # Description: # Author: 黄小雪 # Date: 2017年11月03日 # Company: 东方银谷 import pymssql class SqlserverHelper(object): """ 数据访问层 status:查询状态,0 查询正常,1 查询失败,默认为 0 """ def __init__(self, host, user, passwd, db): self.__host = host self.__user = user self.__passwd = passwd self.__db = db self.row0 = None self.rowcount = None self.msg = '' self.status = 0 def __conn(self): try: conn = pymssql.connect(server=self.__host, user=self.__user, password=self.__passwd, database=self.__db, # init_command="set names utf8", charset='utf8') except Exception, e: self.msg = '%s' % e self.status = 1 conn = None return conn def getall(self, sql, paramters=None): conn = self.__conn() if not conn: return None try: cur = conn.cursor(as_dict=True) # 返回字典 cur.execute(sql, paramters) data = cur.fetchall() self.rowcount = cur.rowcount self.row0 = [d[0] for d in cur.description] except Exception, e: self.msg = '%s' % e self.status = 1 data = None finally: cur.close() conn.commit() conn.close() return data def getallmany(self, sql, paramters=None): conn = self.__conn() if not conn: return None try: cur = conn.cursor(as_dict=True) # 返回字典 cur.executemany(sql, paramters) data = cur.fetchall() except Exception, e: self.msg = '%s' % e self.status = 1 data = None finally: cur.close() conn.commit() conn.close() return data def getsingle(self, sql, paramters=None): conn = self.__conn() if not conn: return None try: cur = conn.cursor(as_dict=True) # 返回字典 cur.execute(sql, paramters) data = cur.fetchone() except Exception, e: self.msg = '%s' % e self.status = 1 data = None finally: cur.close() conn.commit() conn.close() return data def insertmany(self, sql, paramters=None): conn = self.__conn() if not conn: return None try: cur = conn.cursor(as_dict=True) # 返回字典 cur.executemany(sql, paramters) except Exception, e: self.msg = '%s' % e self.status = 1 finally: cur.close() conn.commit() conn.close() return None def insert(self, sql, paramters=None): conn = self.__conn() if not conn: return None try: cur = conn.cursor(as_dict=True) # 返回字典 cur.execute(sql, paramters) except Exception, e: self.msg = '%s' % e self.status = 1 finally: cur.close() conn.commit() conn.close() return None def getall_list(self, sql, paramters=None): # 返回列表形式结果 conn = self.__conn() if not conn: return None try: cur = conn.cursor() # 返回列表 cur.execute(sql, paramters) data = cur.fetchall() self.rowcount = cur.rowcount self.row0 = [d[0] for d in cur.description] except Exception, e: self.msg = '%s' % e self.status = 1 data = None finally: cur.close() conn.commit() conn.close() return data def getall_list_sqls(self, sqls, paramters=None): """ 执行多个sql语句,返回列表形式结果 sqls = [sql1, sql2, ... ...] """ conn = self.__conn() if not conn: return None try: cur = conn.cursor() # 返回列表 for sql in sqls: cur.execute(sql, paramters) data = cur.fetchall() self.rowcount = cur.rowcount self.row0 = [d[0] for d in cur.description] except Exception, e: self.msg = '%s' % e self.status = 1 data = None finally: cur.close() conn.commit() conn.close() return data def delete(self, sql, paramters=None): conn = self.__conn() if not conn: return None try: cur = conn.cursor(as_dict=True) # 返回字典 cur.execute(sql, paramters) except Exception, e: self.msg = '%s' % e self.status = 1 finally: cur.close() conn.commit() conn.close() return None def dict_generator(self, sql, paramters=None): """ 以生成器方式获取数据,用于数据量大的时候 :param sql: :param paramters: :return: """ conn = self.__conn() try: if conn: cur = conn.cursor(as_dict=True) # 返回字典 cur.execute(sql, paramters) self.rowcount = cur.rowcount self.row0 = [d[0] for d in cur.description] data = cur.fetchone() while data: yield data data = cur.fetchone() cur.close() conn.commit() conn.close() except Exception, e: self.msg = '%s' % e self.status = 1 def tuple_generator(self, sql, paramters=None): """ 以生成器方式获取数据,用于数据量大的时候 :param sql: :param paramters: :return: """ conn = self.__conn() try: if conn: cur = conn.cursor() # 返回字典 cur.execute(sql, paramters) self.rowcount = cur.rowcount self.row0 = [d[0] for d in cur.description] data = cur.fetchone() while data: yield data data = cur.fetchone() cur.close() conn.commit() conn.close() except Exception, e: self.msg = '%s' % e self.status = 1 class BusinessSqlserver(SqlserverHelper): # 业务处理层 def __init__(self, host, user, passwd, db): super(BusinessSqlserver, self).__init__(host, user, passwd, db) def search(self, sql, para=None): return self.getsingle(sql, para) ================================================ FILE: create_table.sql ================================================ /* * 创建数据库 */ create database FirstBlood default character set utf8 collate utf8_bin; /* 进入数据库 */ use FirstBlood; /* * 数据库信息 */ CREATE TABLE `databaseinfo` ( `id` int(11) NOT NULL AUTO_INCREMENT, `name` varchar(255) DEFAULT NULL COMMENT '名称', `description` varchar(255) DEFAULT NULL COMMENT '描述', `host` varchar(255) DEFAULT NULL COMMENT '主机', `user` varchar(255) DEFAULT NULL COMMENT '用户', `passwd` varchar(255) DEFAULT NULL COMMENT '密码', `db` varchar(255) DEFAULT NULL COMMENT '数据库', `type` varchar(255) DEFAULT NULL COMMENT '类型', `create_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '创建时间', `modify_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP COMMENT '修改时间', PRIMARY KEY (`id`), UNIQUE KEY `databaseinfo_host_c254f05e_uniq` (`host`), UNIQUE KEY `databaseinfo_name_a3bc8190_uniq` (`name`) ) ENGINE=InnoDB AUTO_INCREMENT=28 DEFAULT CHARSET=utf8 COMMENT='数据库信息'; /* * 数据同步任务 */ drop table if exists `datax_job`; CREATE TABLE `datax_job` ( `id` int(11) NOT NULL AUTO_INCREMENT, `name` varchar(255) DEFAULT NULL COMMENT '名称', `description` varchar(255) DEFAULT NULL COMMENT '描述', `querySql` longtext COLLATE utf8_bin NOT NULL COMMENT '查询SQL语句', `reader_databaseinfo_id` int(11) NOT NULL COMMENT '读取数据库', `writer_table` varchar(255) DEFAULT NULL COMMENT '写入表名', `writer_databaseinfo_id` int(11) NOT NULL COMMENT '写入数据库', `writer_preSql` longtext COLLATE utf8_bin NOT NULL COMMENT '写入数据前执行的SQL语句', `writer_postSql` longtext COLLATE utf8_bin NOT NULL COMMENT '写入数据后执行的SQL语句', `create_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '创建时间', `modify_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP COMMENT '修改时间', PRIMARY KEY (`id`), UNIQUE KEY `datax_job_name_uniq` (`name`) ) ENGINE=InnoDB AUTO_INCREMENT=28 DEFAULT CHARSET=utf8 COMMENT='datax数据同步任务'; /* * 写入表的列信息 */ drop table if exists `datax_job_writer_column`; CREATE TABLE `datax_job_writer_column` ( `id` int(11) NOT NULL AUTO_INCREMENT, `name` varchar(255) DEFAULT NULL COMMENT '列名', `datax_job_id` int(11) NOT NULL COMMENT '数据同步任务ID', `create_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '创建时间', `modify_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP COMMENT '修改时间', PRIMARY KEY (`id`) ) ENGINE=InnoDB AUTO_INCREMENT=28 DEFAULT CHARSET=utf8 COMMENT='写入表的列信息'; /* * 数据同步任务实例 */ drop table if exists `datax_job_instance`; CREATE TABLE `datax_job_instance` ( `id` int(11) NOT NULL AUTO_INCREMENT, `instance_id` bigint(20) NOT NULL COMMENT '任务实例ID', `name` varchar(255) DEFAULT NULL COMMENT '名称', `description` varchar(255) DEFAULT NULL COMMENT '描述', `querySql` longtext COLLATE utf8_bin NOT NULL COMMENT '查询SQL语句', `reader_databaseinfo_host` varchar(255) NOT NULL COMMENT '读取数据库IP', `reader_databaseinfo_description` varchar(255) NOT NULL COMMENT '读取数据库描述', `writer_table` varchar(255) DEFAULT NULL COMMENT '写入表名', `writer_databaseinfo_host` varchar(255) NOT NULL COMMENT '写入数据库IP', `writer_databaseinfo_description` varchar(255) NOT NULL COMMENT '写入数据库描述', `writer_preSql` longtext COLLATE utf8_bin NOT NULL COMMENT '写入数据前执行的SQL语句', `writer_postSql` longtext COLLATE utf8_bin NOT NULL COMMENT '写入数据后执行的SQL语句', `trigger_mode` int(2) DEFAULT '1' COMMENT '触发模式 1 自动 2 手动(默认自动)', `status` int(2) DEFAULT '0' COMMENT '状态 0 正在执行 1 执行完成', `result` int(2) DEFAULT '2' COMMENT '执行结果 0 成功 1 失败 2 未知', `start_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '开始时间', `end_time` datetime DEFAULT NULL COMMENT '结束时间', PRIMARY KEY (`id`), UNIQUE KEY `datax_job_instance_id_uniq` (`instance_id`) ) ENGINE=InnoDB AUTO_INCREMENT=28 DEFAULT CHARSET=utf8 COMMENT='datax数据同步任务实例'; /* * 批处理作业 */ drop table if exists `batch_job`; CREATE TABLE `batch_job` ( `id` int(11) NOT NULL AUTO_INCREMENT, `name` varchar(255) DEFAULT NULL COMMENT '名称', `description` varchar(255) DEFAULT NULL COMMENT '描述', `create_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '创建时间', `modify_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP COMMENT '修改时间', PRIMARY KEY (`id`), UNIQUE KEY `batch_job_name_uniq` (`name`) ) ENGINE=InnoDB AUTO_INCREMENT=28 DEFAULT CHARSET=utf8 COMMENT='批处理作业'; /* * 批处理作业详情 */ drop table if exists `batch_job_details`; CREATE TABLE `batch_job_details` ( `id` int(11) NOT NULL AUTO_INCREMENT, `batch_job_id` int(11) NOT NULL COMMENT '批处理作业ID', `subjob_id` int(11) NOT NULL COMMENT '子作业ID', `type` int(2) NOT NUll COMMENT '类型 1 数据同步 2 SQL脚本 3 备份。 主要用于后期扩展', `create_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '创建时间', `modify_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP COMMENT '修改时间', PRIMARY KEY (`id`) ) ENGINE=InnoDB AUTO_INCREMENT=28 DEFAULT CHARSET=utf8 COMMENT='批处理作业详情'; /* * 批处理作业执行实例 */ drop table if exists `batch_job_instance`; CREATE TABLE `batch_job_instance` ( `id` int(11) NOT NULL AUTO_INCREMENT, `instance_id` bigint(20) NOT NULL COMMENT '实例ID', `name` varchar(255) DEFAULT NULL COMMENT '名称', `description` varchar(255) DEFAULT NULL COMMENT '描述', `trigger_mode` int(2) DEFAULT '1' COMMENT '触发模式 1 自动 2 手动(默认自动)', `status` int(2) DEFAULT '0' COMMENT '状态 0 正在执行 1 执行完成', `result` int(2) DEFAULT '2' COMMENT '执行结果 0 成功 1 失败 2 未知', `start_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '开始时间', `end_time` datetime DEFAULT NULL COMMENT '结束时间', PRIMARY KEY (`id`), UNIQUE KEY `batch_job_instance_id_uniq` (`instance_id`) ) ENGINE=InnoDB AUTO_INCREMENT=28 DEFAULT CHARSET=utf8 COMMENT='批处理作业执行实例'; /* * 批处理作业执行实例详情 */ drop table if exists `batch_job_instance_details`; CREATE TABLE `batch_job_instance_details` ( `id` int(11) NOT NULL AUTO_INCREMENT, `instance_id` bigint(20) NOT NULL COMMENT '实例ID', `subjob_instance_id` bigint(20) NOT NULL COMMENT '子作业实例ID', `type` int(2) NOT NUll COMMENT '类型 1 数据同步 2 SQL脚本 3 备份。 主要用于后期扩展', PRIMARY KEY (`id`) ) ENGINE=InnoDB AUTO_INCREMENT=28 DEFAULT CHARSET=utf8 COMMENT='批处理作业执行实例详情'; ================================================ FILE: datax_web/__init__.py ================================================ ================================================ FILE: datax_web/admin.py ================================================ from django.contrib import admin # Register your models here. ================================================ FILE: datax_web/apps.py ================================================ from __future__ import unicode_literals from django.apps import AppConfig class DataxWebConfig(AppConfig): name = 'datax_web' ================================================ FILE: datax_web/conf/__init__.py ================================================ ================================================ FILE: datax_web/conf/config.py ================================================ #!/usr/bin/python env # -*- coding: UTF-8 -*- import sys import os reload(sys) sys.setdefaultencoding("utf-8") _parentdir = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) _FirstBlood_dir = os.path.dirname(_parentdir) sys.path.append(_parentdir) sys.path.append(_FirstBlood_dir) _log_file_dir = _FirstBlood_dir + '/log/' img_dir = _FirstBlood_dir + '/static/img/' # datax job 路径 datax_dir = _FirstBlood_dir + '/datax' datax_job_dir = '/tmp' datax_log_dir = datax_dir + '/web_log' # 数据申请定时任务执行日志 scheduled_tasks_log_file = _log_file_dir + u'scheduled_tasks.log' # 日志主键 primary_key = 'task_instance_id' # 定时任务实例状态 1:开始执行 2:正在执行 3:执行完成 status = [1, 2, 3] # 响应类型 RESPONSE_TYPE = dict(small=1, large=2, html=3) # 操作类型 OPERATION_TYPE = ['add', 'mod'] # 触发模式 TRIGGER_MODE = [1, 2] # 1 自动 2 手动 # 数据库类型 DATABASE_TYPE = 'mysql' # 以名称查询任务 query_datax_job_by_name_sql = "SELECT * FROM FirstBlood.datax_job dj WHERE dj.`name` = '%s';" # 以名称和ID查询任务 query_datax_job_sql2 = "SELECT * FROM FirstBlood.datax_job dj WHERE dj.`name` = '%s' and dj.id!=%s;" # 以ID查询任务 query_datax_job_by_id_sql = """ SELECT dj.*, ( SELECT GROUP_CONCAT( djwc.`name` SEPARATOR '\n' ) FROM FirstBlood.datax_job_writer_column djwc WHERE djwc.datax_job_id = dj.id ) writer_column_id FROM FirstBlood.datax_job dj WHERE dj.id = %s; """ # 以ID查询数据同步任务需要写入的列 query_datax_job_writer_column_by_id_sql = """ SELECT * FROM FirstBlood.datax_job_writer_column WHERE datax_job_id = %s order by id; """ # 查询所有任务 query_datax_job_sql = """ SELECT dj.id, dj.`name`, dj.description, dj.querySql, concat(rdbi.description,' ', rdbi.`host`) reader_databaseinfo_id, dj.writer_table, concat(wdbi.description,' ', wdbi.`host`) writer_databaseinfo_id, dj.create_time, dj.modify_time FROM FirstBlood.datax_job dj LEFT JOIN FirstBlood.databaseinfo rdbi on dj.reader_databaseinfo_id=rdbi.id LEFT JOIN FirstBlood.databaseinfo wdbi on dj.writer_databaseinfo_id=wdbi.id """ insert_datax_job_sql = """ INSERT INTO FirstBlood.datax_job ( `name`, `description`, `querySql`, `reader_databaseinfo_id`, `writer_table`, `writer_databaseinfo_id`, `writer_preSql`, `writer_postSql` ) VALUES ('%s','%s','%s',%s,'%s',%s,'%s','%s') """ insert_datax_job_writer_column_sql = """ INSERT INTO FirstBlood.datax_job_writer_column (`name`, `datax_job_id`) VALUES """ update_datax_job_by_id_sql = """ update FirstBlood.datax_job set `name` = '%s', `description` = '%s', `querySql` = '%s', `reader_databaseinfo_id` = %s, `writer_table` = '%s', `writer_databaseinfo_id` = %s, `writer_preSql` = '%s', `writer_postSql` = '%s' where id = %s """ delete_datax_job_writer_column_by_id_sql = """ delete from FirstBlood.datax_job_writer_column where datax_job_id =%s; """ insert_datax_job_instance_sql = """ insert into FirstBlood.datax_job_instance ( `instance_id`, `name`, `description`, `querySql`, `reader_databaseinfo_host`, `reader_databaseinfo_description`, `writer_table`, `writer_databaseinfo_host`, `writer_databaseinfo_description`, `trigger_mode`, `writer_preSql`, `writer_postSql` ) values (%s, '%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s', %s, '%s', '%s'); """ update_datax_job_instance_by_instance_id_sql = """ update FirstBlood.datax_job_instance set `status`=%s, `result`=%s, `end_time`='%s' where instance_id=%s; """ select_datax_job_instance_sql = """ select dji.id, dji.instance_id, dji.`name`, dji.description, dji.querySql, concat(dji.reader_databaseinfo_description,' ',dji.reader_databaseinfo_host) 'reader_databaseinfo_host', dji.writer_table, concat(dji.writer_databaseinfo_description,' ',dji.writer_databaseinfo_host) 'writer_databaseinfo_host', dji.writer_preSql, dji.writer_postSql, dji.trigger_mode, dji.`status`, dji.result, dji.start_time, dji.end_time FROM FirstBlood.datax_job_instance dji """ count_datax_job_instance_sql = """ SELECT count(1) count FROM FirstBlood.datax_job_instance dji """ select_datax_job_instance_by_id_sql = select_datax_job_instance_sql + "\n where dji.id = %s" datax_job_template = """ { "job": { "content": [ { "reader": { "name": "mysqlreader", "parameter": { "connection": [ { "jdbcUrl": ["%s"], "querySql": ["%s"], } ], "password": "%s", "username": "%s", "where": "" } }, "writer": { "name": "mysqlwriter", "parameter": { "column": %s, "connection": [ { "jdbcUrl": "%s", "table": ["%s"] } ], "password": "%s", "preSql": [%s], "postSql": [%s], "session": [], "username": "%s", "writeMode": "insert" } } } ], "setting": { "speed": { "record": "1000" } } } } """ ================================================ FILE: datax_web/models.py ================================================ from __future__ import unicode_literals from django.db import models # Create your models here. ================================================ FILE: datax_web/tests.py ================================================ # -*- coding: UTF-8 -*- from django.test import TestCase # Create your tests here. #!/usr/bin/python # Copyright 2013 Joe Walnes and the websocketd team. # All rights reserved. # Use of this source code is governed by a BSD-style # license that can be found in the LICENSE file. import time from sys import stdin, stdout def follow(thefile): thefile.seek(0,2) while True: line = thefile.readline() if not line: time.sleep(1) continue yield line log_dir = '/opt/django/FirstBlood/datax/web_log/%s.json.log' file_id = stdin.readline().strip() logfile = log_dir % file_id print('Hello fuck %s!' % logfile) stdout.flush() # Remember to flush # For each line FOO received on STDIN, respond with "Hello FOO!". logfile_open = open(logfile, 'r') loglines = follow(logfile_open) print 'ok' for line in loglines: print line, stdout.flush() # Remember to flush ================================================ FILE: datax_web/urls.py ================================================ # -*- coding: UTF-8 -*- from django.conf.urls import url import views urlpatterns = [ # Examples: # url(r'^$', 'YinguOnline.views.home', name='home'), # url(r'^blog/', include('blog.urls')), url(r'^index/$', views.index), # 数据同步 url(r'^add_job/$', views.add_job), # 新增任务 url(r'^monitor_job/$', views.monitor_job), # 任务执行实例 url(r'^monitor_job_detail/(?P\d+)/$', views.monitor_job_detail), # 任务执行详情 url(r'^update_job/(?P\d+)/$', views.update_job), # 更新任务 url(r'^get_database/$', views.get_database), # 获取数据库信息 url(r'^add_job_data/$', views.add_job_data), # 新增或修改任务数据 url(r'^get_job_data/$', views.get_job_data), # 获取任务数据 url(r'^get_update_job_data/$', views.get_update_job_data), # 获取需要更新的任务数据 url(r'^get_datax_job_instance/$', views.get_datax_job_instance), # 获取任务实例数据 url(r'^get_datax_job_instance_by_id/$', views.get_datax_job_instance_by_id), # 根据ID获取任务实例数据 url(r'^run_job/$', views.run_job), # 运行 任务 url(r'^get_database/$', views.get_database), # 获取数据库信息 ] ================================================ FILE: datax_web/views.py ================================================ # -*- coding: UTF-8 -*- from django.contrib.auth.decorators import permission_required from django.contrib.auth.decorators import login_required from django.core.exceptions import PermissionDenied from django.shortcuts import render from django.http import HttpResponse from functools import wraps from controller.core.public import (Currency, DatetimeHelp) from controller.core import query_sql from controller.public import dataconn from celery import shared_task from conf import config import commands import logging import sys import json reload(sys) sys.setdefaultencoding("utf-8") logger = logging.getLogger('datax_web') _SUCCESS = dict(status=0, msg=u'检测成功') _str = (str, unicode) def verification(check_class): """ 装饰器用于检测用户提交的信息是否合法. check_class 检测类 Decorator for views that checks that the user submitted information, redirecting to the log-in page if necessary. The test should be a callable that takes the user object and returns True if the user passes. """ def decorator(view_func): @wraps(view_func) def _wrapped_view(request, *args, **kwargs): response = HttpResponse() ccl = check_class(request) result = ccl.total_check() if result['status']: response.write(json.dumps(result)) return response return view_func(request, *args, **kwargs) return _wrapped_view return decorator class JobData(object): """ 新增或更新定时任务时处理数据 data 数据格式 {u'_id': u'28', u'description': u'\u6570\u636e\u540c\u6b65\u6d4b\u8bd5', u'name': u'test', u'operation_type': u'mod', u'querySql': u'select * from `admin-service`.as_user_info limit 10;', u'reader_databaseinfo_id': u'1', u'trigger_mode': 2, u'writer_column_id': [u'*'], u'writer_databaseinfo_id': u'22', u'writer_postSql': u'', u'writer_preSql': u'truncate table `admin-service`.as_user_info;', u'writer_table': u'`admin-service`.as_user_info'} _id: datax_job_id """ def __init__(self, data): # id 为 datax_job_id self.id = data.get('_id', 0) self.name = data.get('name', '') self.description = data.get('description', '') self.querySql = data.get('querySql', '') self.reader_databaseinfo_id = data.get('reader_databaseinfo_id', '') self.writer_table = data.get('writer_table', '') self.writer_column = data.get('writer_column_id', []) self.writer_databaseinfo_id = data.get('writer_databaseinfo_id', '') self.writer_preSql = data.get('writer_preSql', '') self.writer_postSql = data.get('writer_postSql', '') self.operation_type = data.get('operation_type', '') self.trigger_mode = data.get('trigger_mode', '') self.dtconn = dataconn.DatabaseConnection(logger) self.dtsf = dataconn.DataTransform() self.dh = DatetimeHelp() self.__timestamp1 = self.dh.timestamp1 self.reader_dtbs = self._get_reader_dtbs() if self.reader_databaseinfo_id else None self.writer_dtbs = self._get_writer_dtbs() if self.writer_databaseinfo_id else None @property def timestamp1(self): return self.__timestamp1 def _get_reader_dtbs(self): return self.dtconn.get_datainfo_by_id(self.reader_databaseinfo_id) def _get_writer_dtbs(self): return self.dtconn.get_datainfo_by_id(self.writer_databaseinfo_id) def get_insert_datax_job_sql(self): # 在 datax_job表里创建新的任务 - 新增SQL querySql = self.dtsf.special_characters_mysql(self.querySql) writer_preSql = self.dtsf.special_characters_mysql(self.writer_preSql) writer_postSql = self.dtsf.special_characters_mysql(self.writer_postSql) return config.insert_datax_job_sql % ( self.name, self.description, querySql, self.reader_databaseinfo_id, self.writer_table, self.writer_databaseinfo_id, writer_preSql, writer_postSql ) def get_update_datax_job_by_id_sql(self): # 在 datax_job表里更新任务 - 更新SQL querySql = self.dtsf.special_characters_mysql(self.querySql) writer_preSql = self.dtsf.special_characters_mysql(self.writer_preSql) writer_postSql = self.dtsf.special_characters_mysql(self.writer_postSql) return config.update_datax_job_by_id_sql % ( self.name, self.description, querySql, self.reader_databaseinfo_id, self.writer_table, self.writer_databaseinfo_id, writer_preSql, writer_postSql, self.id ) def get_insert_datax_job_writer_column_sql(self): # 拼接写入列SQL insert into values ('user_id', 1), ('card_name', 1) datax_job_id = self.id or self.dtconn.ygol.insert_id values_list = ["('%s', %s)" % (column, datax_job_id) for column in self.writer_column] return config.insert_datax_job_writer_column_sql + ','.join(values_list) def get_delete_datax_job_writer_column_by_id_sql(self): # 获取删除写入列SQL , 根据ID删除 return config.delete_datax_job_writer_column_by_id_sql % self.id def get_insert_datax_job_instance_sql(self): querySql = self.dtsf.special_characters_mysql(self.querySql) writer_preSql = self.dtsf.special_characters_mysql(self.writer_preSql) writer_postSql = self.dtsf.special_characters_mysql(self.writer_postSql) return config.insert_datax_job_instance_sql % ( self.datax_job_instance_id, self.name, self.description, querySql, self.reader_dtbs['host'], self.reader_dtbs['description'], self.writer_table, self.writer_dtbs['host'], self.writer_dtbs['description'], self.trigger_mode, writer_preSql, writer_postSql ) def get_update_datax_job_instance_by_instance_id_sql(self, result): return config.update_datax_job_instance_by_instance_id_sql % ( 1, result, self.dh.now_time, self.datax_job_instance_id ) @property def datax_job_instance_id(self): return '%s%s' % (self.id, self.__timestamp1) def start_log(self): # 开始记录任务日志到datax_job_instance sql = self.get_insert_datax_job_instance_sql() self.dtconn.ygol.insert(sql) if self.dtconn.ygol.status: logger.error(u'记录任务日志到datax_job_instance 失败 - SQL: %s - msg: %s' % (sql, self.dtconn.ygol.msg)) def record_result_log(self, result): # 记录任务执行结果 datax_job_instance sql = self.get_update_datax_job_instance_by_instance_id_sql(result) self.dtconn.ygol.update(sql) if self.dtconn.ygol.status: logger.error(u'记录任务执行结果 datax_job_instance 失败 - SQL: %s - msg: %s' % (sql, self.dtconn.ygol.msg)) def create(self): # datax_job表里创建新的任务 result = _SUCCESS.copy() sql1 = self.get_insert_datax_job_sql() self.dtconn.ygol.transaction_start() self.dtconn.ygol.transaction_execute(sql1) if self.dtconn.ygol.status: msg = u'datax_job表里创建新的任务,SQL:%s 插入数据失败。 - Msg: %s' % \ (sql1, self.dtconn.ygol.msg) logger.error(msg) result = dict(status=500, msg=msg) else: sql2 = self.get_insert_datax_job_writer_column_sql() self.dtconn.ygol.transaction_execute(sql2) if self.dtconn.ygol.status: msg = u'datax_job_writer_column表里创建新的列,SQL:%s 插入数据失败。 - Msg: %s' % \ (sql2, self.dtconn.ygol.msg) logger.error(msg) result = dict(status=500, msg=msg) self.dtconn.ygol.transaction_commit_and_close() return result def update(self): # 更新任务 result = _SUCCESS.copy() sql1 = self.get_update_datax_job_by_id_sql() self.dtconn.ygol.transaction_start() self.dtconn.ygol.transaction_execute(sql1) if self.dtconn.ygol.status: msg = u'datax_job表,SQL:%s 更新数据失败。 - Msg: %s' % \ (sql1, self.dtconn.ygol.msg) logger.error(msg) result = dict(status=500, msg=msg) else: sql2 = self.get_delete_datax_job_writer_column_by_id_sql() sql3 = self.get_insert_datax_job_writer_column_sql() self.dtconn.ygol.transaction_execute(sql2) self.dtconn.ygol.transaction_execute(sql3) if self.dtconn.ygol.status: msg = u'datax_job_writer_column表里更新列 - SQL2:%s - SQL3: %s -' \ u' 更新数据失败。 - Msg: %s' % \ (sql2, sql3, self.dtconn.ygol.msg) logger.error(msg) result = dict(status=500, msg=msg) self.dtconn.ygol.transaction_commit_and_close() return result def get_job_data(self): # 获取任务数据 source_data = self.dtconn.ygol.getall(config.query_datax_job_sql) if self.dtconn.ygol.status: logger.error(u'获取datax_job信息失败 %s' % self.dtconn.ygol.msg) return [] else: return [self.dtsf.get_row_by_dict_to_user(dt) for dt in source_data] def get_job_data_by_id(self, _id): """ 根据ID获取任务数据 :param _id: datax_job_id :return: datax_job """ source_data = self.dtconn.ygol.getsingle(config.query_datax_job_by_id_sql % _id) if self.dtconn.ygol.status: logger.error(u'根据ID %s 获取任务数据信息失败 %s' % (self.id, self.dtconn.ygol.msg)) return None else: return self.dtsf.get_row_by_dict_to_user(source_data) def get_datax_job_writer_column_by_id(self, _id): """ 根据ID获取任务需要写入的列 :param _id: datax_job_id :return: datax_job_writer_column """ source_data = self.dtconn.ygol.getall(config.query_datax_job_writer_column_by_id_sql % _id) if self.dtconn.ygol.status: logger.error(u'根据ID %s 获取任务数据信息失败 %s' % (self.id, self.dtconn.ygol.msg)) return None else: return [self.dtsf.get_row_by_dict_to_user(dt) for dt in source_data] @staticmethod def create_file(file, content): # 创建文件 with open(file, 'w') as f: f.write(content) class CheckJob(object): """ 检测新增任务提交的信息 :return result 格式: {'status': 1, 'msg': '操作类型错误'} total_check 启动所有检测,返回检测状态和错误消息 """ _SUCCESS = _SUCCESS.copy() _OPERATION_TYPE_ERROR1 = dict(status=1, msg=u'操作类型不能为空') _OPERATION_TYPE_ERROR2 = dict(status=2, msg=u'操作类型错误') _DESCRIPTION_ERROR1 = dict(status=3, msg=u'任务描述不能为空') _NAME_ERROR1 = dict(status=4, msg=u'任务名称不能为空') _NAME_ERROR2 = dict(status=5, msg=u'任务名称已存在') _QUERY_SQL_ERROR1 = dict(status=6, msg=u'查询SQL语句不能为空') _READER_DATABASEINFO_ID_ERROR1 = dict(status=7, msg=u'读取数据库不能为空,必须为数字') _READER_DATABASEINFO_ID_ERROR2 = dict(status=8, msg=u'读取数据库ID不存在') _WRITER_TABLE_ERROR1 = dict(status=10, msg=u'写入表不能为空') _WRITER_COLUMN_ERROR1 = dict(status=11, msg=u'写入列不能为空') _WRITER_DATABASEINFO_ID_ERROR1 = dict(status=12, msg=u'写入数据库不能为空') _WRITER_DATABASEINFO_ID_ERROR2 = dict(status=13, msg=u'写入数据库ID不存在') _DATAX_JOB_ID_ERROR1 = dict(status=1, msg=u'datax_job_id 不能为空') _DATAX_JOB_ID_ERROR2 = dict(status=2, msg=u'datax_job_id 不存在') _TRIGGER_MODE_ERROR1 = dict(status=2, msg=u'触发模式 不存在') _TRIGGER_MODE_ERROR2 = dict(status=2, msg=u'触发模式值错误') def __init__(self, request): """ RESPONSE_TYPE 返回给用户数据的方式 1:20万行以内的数据,以excel方式返回 2:超过20万行的数据,需要分批处理 3:小量的数据以HTML表格的方式返回' """ cur = Currency(request) data = cur.rq_post_json('data') self.dtconn = dataconn.DatabaseConnection(logger) self.jd = JobData(data) self.error_msg = [] self.result = self._SUCCESS def check_operation_type(self): # 检测操作类型 operation_type = self.jd.operation_type if not operation_type: self.result = self._OPERATION_TYPE_ERROR1 else: if operation_type not in config.OPERATION_TYPE: self.result = self._OPERATION_TYPE_ERROR2 def check_name_by_operation_type(self): # 根据操作类型add/mod 检测任务名称 name = self.jd.name if self.jd.operation_type == config.OPERATION_TYPE[0]: sql = config.query_datax_job_by_name_sql % name self.check_name(name, sql) if self.jd.operation_type == config.OPERATION_TYPE[1]: sql = config.query_datax_job_sql2 % (name, self.jd.id) self.check_name(name, sql) def check_name(self, name, sql): # 修改任务时,检测任务名称 if name: data = self.dtconn.ygol.getsingle(sql) if self.dtconn.ygol.status: _msg = u'检测任务名称时数据库错误。 - Msg: %s' % self.dtconn.ygol.msg logger.error(_msg) self.result = dict(status=500, msg=_msg) else: if data: self.result = self._NAME_ERROR2 else: self.result = self._NAME_ERROR1 def check_description(self): # 检测任务描述 description = self.jd.description if not description: self.result = self._DESCRIPTION_ERROR1 def check_querySql(self): # 检测查询SQL语句 querySql = self.jd.querySql if not querySql: self.result = self._DESCRIPTION_ERROR1 def check_reader_databaseinfo_id(self): # 检测读取数据库 kwargs = { '_id': self.jd.reader_databaseinfo_id, 'operation_type': u'读取', 'ERROR1': self._READER_DATABASEINFO_ID_ERROR1, 'ERROR2': self._READER_DATABASEINFO_ID_ERROR2, } self.check_databaseinfo_id(**kwargs) def check_writer_table(self): # 检测写入表 writer_table = self.jd.writer_table if not writer_table: self.result = self._WRITER_TABLE_ERROR1 def check_writer_column(self): # 检测写入列 writer_column = self.jd.writer_column if not writer_column: self.result = self._WRITER_COLUMN_ERROR1 def check_writer_databaseinfo_id(self): # 检测写入数据库 kwargs = { '_id': self.jd.writer_databaseinfo_id, 'operation_type': u'写入', 'ERROR1': self._WRITER_DATABASEINFO_ID_ERROR1, 'ERROR2': self._WRITER_DATABASEINFO_ID_ERROR2, } self.check_databaseinfo_id(**kwargs) def check_databaseinfo_id(self, _id, operation_type, ERROR1, ERROR2): # 检测数据库ID if _id and _id.isdigit(): data = self.dtconn.get_datainfo_by_id(int(_id)) if self.dtconn.ygol.status: _msg = u'检测%s数据库错误。 - Msg: %s' % (operation_type, self.dtconn.ygol.msg) logger.error(_msg) self.result = dict(status=500, msg=_msg) else: if not data: self.result = ERROR2 else: self.result = ERROR1 def check_datax_job_id(self): # 检测任务ID if self.jd.operation_type == config.OPERATION_TYPE[1]: _id = self.jd.id if isinstance(_id, _str) and _id and _id.isdigit(): sql = config.query_datax_job_by_id_sql % _id data = self.dtconn.ygol.getsingle(sql) if self.dtconn.ygol.status: _msg = u'检测datax_job_id 错误 - SQL: %s。 - Msg: %s' % (sql, self.dtconn.ygol.msg) logger.error(_msg) self.result = dict(status=500, msg=_msg) else: if not data: self.result = self._DATAX_JOB_ID_ERROR2 else: self.result = self._DATAX_JOB_ID_ERROR1 def check_trigger_mode(self): # 检测触发模式 trigger_mode = self.jd.trigger_mode if self.jd.operation_type == config.OPERATION_TYPE[1]: if not trigger_mode: self.result = self._TRIGGER_MODE_ERROR1 else: if trigger_mode not in config.TRIGGER_MODE: self.result = self._TRIGGER_MODE_ERROR2 def total_check(self): check_func = ['check_operation_type', 'check_datax_job_id', 'check_description', 'check_name_by_operation_type', 'check_querySql', 'check_reader_databaseinfo_id', 'check_writer_table', 'check_writer_column', 'check_writer_databaseinfo_id', 'check_trigger_mode' ] for func_name in check_func: getattr(self, func_name)() if self.result['status']: break return self.result class Datax(object): """ 处理和datax相关的操作 """ def __init__(self, data): self.jd = JobData(data) def get_reader(self): return dict( jdbcUrl = 'jdbc:mysql://%s/%s' % (self.jd.reader_dtbs['host'], self.jd.reader_dtbs['db']), querySql = self.jd.querySql, password = self.jd.reader_dtbs['passwd'], username = self.jd.reader_dtbs['user'] ) def get_writer(self): return dict( column = "[%s]" % ','.join(['"%s"' % str(c) for c in self.jd.writer_column]), jdbcUrl = 'jdbc:mysql://%s/%s' % (self.jd.writer_dtbs['host'], self.jd.writer_dtbs['db']), table = self.jd.writer_table, password = self.jd.writer_dtbs['passwd'], preSql = ','.join(map(lambda x:'"%s"' % x, self.jd.writer_preSql.split(';'))), postSql = ','.join(map(lambda x:'"%s"' % x, self.jd.writer_postSql.split(';'))), username = self.jd.writer_dtbs['user'] ) def get_job_json(self): reader = self.get_reader() writer = self.get_writer() return config.datax_job_template % ( reader['jdbcUrl'], reader['querySql'], reader['password'], reader['username'], writer['column'], writer['jdbcUrl'], writer['table'], writer['password'], writer['preSql'], writer['postSql'], writer['username'] ) @property def job_json_file_name(self): return u'%s.json' % self.jd.datax_job_instance_id @property def job_json_file(self): return config.datax_job_dir + '/' + self.job_json_file_name @staticmethod def create_file(file, content): # 创建文件 with open(file, 'w') as f: f.write(content) @property def cmd(self): return 'python %s/bin/datax.py %s > %s/%s.log' % \ (config.datax_dir, self.job_json_file, config.datax_log_dir, self.job_json_file_name) @shared_task(name='run') def run(**data): # 执行任务 dx = Datax(data) dx.jd.create_file(dx.job_json_file, dx.get_job_json()) dx.jd.start_log() (status, output) = commands.getstatusoutput(dx.cmd) if status: logger.error("status:%s output:%s" % (status, output)) result = 1 if status else 0 dx.jd.record_result_log(result) @login_required @permission_required('batch_job.viewBatchJob', raise_exception=PermissionDenied) def index(request): # 数据同步 return render(request, 'datax_web/index.html', locals()) @login_required @permission_required('batch_job.viewBatchJob', raise_exception=PermissionDenied) @permission_required('batch_job.editBatchJob', raise_exception=PermissionDenied) def add_job(request): # 新增任务 return render(request, 'datax_web/add_job.html', locals()) @login_required @permission_required('batch_job.viewBatchJob', raise_exception=PermissionDenied) def update_job(request, id): # 更新任务 return render(request, 'datax_web/update_job.html', locals()) @login_required @permission_required('batch_job.viewBatchJob', raise_exception=PermissionDenied) def monitor_job(request): # 任务执行实例 return render(request, 'datax_web/monitor_job.html', locals()) @login_required @permission_required('batch_job.viewBatchJob', raise_exception=PermissionDenied) def monitor_job_detail(request, id): # 任务执行详情 return render(request, 'datax_web/monitor_job_detail.html', locals()) @login_required @permission_required('batch_job.viewBatchJob', raise_exception=PermissionDenied) def get_database(request): # 获取 数据库信息 def _data_processing(dt): # 清除数据里的密码,并对数据格式化 del dt['passwd'] return dtsf.get_row_by_dict_to_user(dt) response = HttpResponse() dtconn = dataconn.DatabaseConnection(logger) data = dtconn.ygol.getall(dataconn.dtbsif_sql) dtsf = dataconn.DataTransform() if dtconn.ygol.status: logger.error(u'获取数据库信息失败 %s' % dtconn.ygol.msg) response.write(json.dumps(map(_data_processing, data))) return response @login_required @verification(CheckJob) @permission_required('batch_job.viewBatchJob', raise_exception=PermissionDenied) @permission_required('batch_job.editBatchJob', raise_exception=PermissionDenied) def add_job_data(request): # 新增或者修改任务数据 response = HttpResponse() cur = Currency(request) data = cur.rq_post_json('data') jd = JobData(data) if jd.operation_type == config.OPERATION_TYPE[0]: result = jd.create() # 新增 else: result = jd.update() # 更新 response.write(json.dumps(result)) return response @login_required @permission_required('batch_job.viewBatchJob', raise_exception=PermissionDenied) def get_job_data(request): # 获取 任务列表数据 response = HttpResponse() jd = JobData({}) response.write(json.dumps(jd.get_job_data())) return response @login_required @permission_required('batch_job.viewBatchJob', raise_exception=PermissionDenied) def get_update_job_data(request): # 获取 更新任务数据 response = HttpResponse() cur = Currency(request) _id = cur.rq_post_json('_id') jd = JobData({}) response.write(json.dumps(jd.get_job_data_by_id(_id))) return response @login_required @verification(CheckJob) @permission_required('batch_job.viewBatchJob', raise_exception=PermissionDenied) @permission_required('batch_job.editBatchJob', raise_exception=PermissionDenied) def run_job(request): # 执行任务 response = HttpResponse() cur = Currency(request) data = cur.rq_post_json('data') # run(**data) run.delay(**data) response.write(json.dumps(_SUCCESS)) return response class DataxJobInstanceSql(object): # datax job instance 查询sql _table_dji = { 'name': {'data_type': 'str', 'val': ''}, 'description': {'data_type': 'str', 'val': ''}, 'reader_databaseinfo_host': {'data_type': 'str', 'val': ''}, 'writer_table': {'data_type': 'str', 'val': ''}, 'writer_databaseinfo_host': {'data_type': 'str', 'val': ''}, 'status': {'data_type': 'str', 'val': ''}, 'result': {'data_type': 'str', 'val': ''}, 'trigger_mode': {'data_type': 'str', 'val': ''}, } _order_by = [{'table': 'dji', 'field': 'start_time', 'rule': 'DESC'}] def __init__(self, request): self.cur = Currency(request) self.rq_get = self.cur.rq_get self._offset = int(self.rq_get('offset')) self._limit = int(self.rq_get('limit')) self._SQL = config.select_datax_job_instance_sql self._TOTAL_SQL = config.count_datax_job_instance_sql self._set_table(self._table_dji) def _set_table(self, table): for field, attr in table.items(): val = self.rq_get(field) attr['val'] = val return table @property def tables(self): _tables = {'dji': self._table_dji} return _tables @property def cvtpara(self): _cvtpara = { 'offset': self._offset, 'limit': self._limit, 'sql': self._SQL, 'total_sql': self._TOTAL_SQL, 'order_by': self._order_by, 'order_rule': self._order_by } return _cvtpara class PaginatorData(dataconn.DatabaseConnection, query_sql.Q_Data): # 分页访问数据 def __init__(self, qs): super(PaginatorData, self).__init__(logger) query_sql.Q_Data.__init__(self, qs) @property def rows(self): return self._get_rows(self.ygol) @property def total(self): return self._get_total(self.ygol) @login_required @permission_required('batch_job.viewBatchJob', raise_exception=PermissionDenied) def get_datax_job_instance(request): # 分页查询任务实例 dsql = DataxJobInstanceSql(request) cvtpara = dsql.cvtpara tables = dsql.tables qs = query_sql.Q_Sql(cvtpara, **tables) pd = PaginatorData(qs) response = HttpResponse() response.write(json.dumps({'rows': pd.rows, 'total': pd.total})) return response @login_required @permission_required('batch_job.viewBatchJob', raise_exception=PermissionDenied) def get_datax_job_instance_by_id(request): # 根据ID查询任务实例 cur = Currency(request) _id = cur.rq_post('_id') conn = dataconn.DatabaseConnection(logger) dtf = dataconn.DataTransform() sql = config.select_datax_job_instance_by_id_sql % _id source_data = conn.ygol.getsingle(sql) response = HttpResponse() response.write(json.dumps(dtf.get_row_by_dict_to_user(source_data))) return response ================================================ FILE: manage.py ================================================ #!/usr/bin/env python import os import sys if __name__ == "__main__": os.environ.setdefault("DJANGO_SETTINGS_MODULE", "FirstBlood.settings") from django.core.management import execute_from_command_line execute_from_command_line(sys.argv) ================================================ FILE: requirements.txt ================================================ celery==3.1.25 celery-with-redis==3.0 Django==1.11.14 django-celery==3.2.1 pymssql==2.1.4 redis==2.10.6 supervisor==3.3.4 ================================================ FILE: static/css/login.css ================================================ body{background: url(/static/img/login_bg.jpg) 0 -200px no-repeat #000; background-size: cover;margin:0; color:#FFF} *{-webkit-box-sizing:border-box;box-sizing:border-box;} a{-webkit-transition: All 1s ease;text-decoration: none; color: #dfc684;} input , button{border:0;background:none;color: #dfc684;} #login-box{position:absolute; width:500px; left:30%; top:29%;} .login-box-wh{height:53px;width:800px;margin-bottom:16px;} .to-index{padding-left:16px;overflow:hidden; position:absolute;left:69px;} .to-index-m{width:360px; height:53px; margin-top:-53px;margin-left:-16px;position:absolute;z-index:-1;background:yellow;transition:margin 0.8s;-moz-transition:margin 0.8s; -webkit-transition:margin 0.8s 0.3s; } .login-box-wh:hover .to-index-m{margin-top:0;} .login-box-wh:hover a{color:#f00} .box-mov{width: 53px;height: 53px;float: left; display: inline-block;opacity: .72;filter: alpha(opacity=72);background: url(/static/img/icons.png) -212px 0 #000; margin-right: 16px;position: relative;overflow:hidden;z-index:0;} .box-mov-s{width: 53px;height: 53px;z-index:1;position:absolute;background:#ff0;margin-left:-53px; background:yellow; transition:margin 0.8s;-moz-transition:margin 0.8s; -webkit-transition:margin 0.8s; } .login-box-wh:hover .box-mov-s{margin:0;} .box-mov-d{width: 53px;height: 53px;z-index:2;position:absolute;background:url(/static/img/icons.png) -212px -53px;);opacity:0; } .login-box-wh:hover .box-mov-d {opacity:1;} .text-box{width:360px; height:53px;background:#000;opacity: .72; float:left;line-height:53px;} #vdcode{width:270px;} .text-box-login{} .text-box-login-btn{width:360px; height:53px;background:#000;opacity: .72; float:left;line-height:53px;-webkit-box-sizing:border-box; position:absolute;left:69px;overflow:hidden; } .login-btn-m{width:360px; height:53px;margin-top:-53px; position:absolute;z-index:1;background:yellow;transition:margin 0.8s;-moz-transition:margin 0.8s; -webkit-transition:margin 0.8s 0.3s;} .text-box-login:hover .login-btn-m{margin-top:0} .login-btn{width:100%;height:100%; cursor:pointer;font-size:16px;position:absolute;z-index:5;transition: 0.8s;-moz-transition: 0.8s; -webkit-transition: 0.8s 0.3s;} .login-btn:hover{color:#f00;} input{height:100%;width:100%;padding:8px 0 8px 8px;background:none;color:#fff;} input:focus { outline:none; } ================================================ FILE: static/js/csrf.js ================================================ function getCookie(name) { var cookieValue = null; if (document.cookie && document.cookie != '') { var cookies = document.cookie.split(';'); for (var i = 0; i < cookies.length; i++) { var cookie = jQuery.trim(cookies[i]); // Does this cookie string begin with the name we want? if (cookie.substring(0, name.length + 1) == (name + '=')) { cookieValue = decodeURIComponent(cookie.substring(name.length + 1)); break; } } } return cookieValue; } var csrftoken = getCookie('csrftoken'); function csrfSafeMethod(method) { // these HTTP methods do not require CSRF protection return (/^(GET|HEAD|OPTIONS|TRACE)$/.test(method)); } $.ajaxSetup({ beforeSend: function(xhr, settings) { if (!csrfSafeMethod(settings.type) && !this.crossDomain) { xhr.setRequestHeader("X-CSRFToken", csrftoken); } } }); ================================================ FILE: static/plugins/bootstarp-table/bootstrap-table-zh-CN.js ================================================ /** * Bootstrap Table Chinese translation * Author: Zhixin Wen */ (function ($) { 'use strict'; $.fn.bootstrapTable.locales['zh-CN'] = { formatLoadingMessage: function () { return '正在努力地加载数据中,请稍候……'; }, formatRecordsPerPage: function (pageNumber) { return '每页显示 ' + pageNumber + ' 条记录'; }, formatShowingRows: function (pageFrom, pageTo, totalRows) { return '显示第 ' + pageFrom + ' 到第 ' + pageTo + ' 条记录,总共 ' + totalRows + ' 条记录'; }, formatSearch: function () { return '搜索'; }, formatNoMatches: function () { return '没有找到匹配的记录'; }, formatPaginationSwitch: function () { return '隐藏/显示分页'; }, formatRefresh: function () { return '刷新'; }, formatToggle: function () { return '切换'; }, formatColumns: function () { return '列'; }, formatExport: function () { return '导出数据'; }, formatClearFilters: function () { return '清空过滤'; } }; $.extend($.fn.bootstrapTable.defaults, $.fn.bootstrapTable.locales['zh-CN']); })(jQuery); ================================================ FILE: static/plugins/datatables/css/jquery.dataTables.css ================================================ /* * Table styles */ table.dataTable { width: 100%; margin: 0 auto; clear: both; border-collapse: separate; border-spacing: 0; /* * Header and footer styles */ /* * Body styles */ } table.dataTable thead th, table.dataTable tfoot th { font-weight: bold; } table.dataTable thead th, table.dataTable thead td { padding: 10px 18px; border-bottom: 1px solid #111; } table.dataTable thead th:active, table.dataTable thead td:active { outline: none; } table.dataTable tfoot th, table.dataTable tfoot td { padding: 10px 18px 6px 18px; border-top: 1px solid #111; } table.dataTable thead .sorting, table.dataTable thead .sorting_asc, table.dataTable thead .sorting_desc { cursor: pointer; *cursor: hand; } table.dataTable thead .sorting, table.dataTable thead .sorting_asc, table.dataTable thead .sorting_desc, table.dataTable thead .sorting_asc_disabled, table.dataTable thead .sorting_desc_disabled { background-repeat: no-repeat; background-position: center right; } table.dataTable thead .sorting { background-image: url("../images/sort_both.png"); } table.dataTable thead .sorting_asc { background-image: url("../images/sort_asc.png"); } table.dataTable thead .sorting_desc { background-image: url("../images/sort_desc.png"); } table.dataTable thead .sorting_asc_disabled { background-image: url("../images/sort_asc_disabled.png"); } table.dataTable thead .sorting_desc_disabled { background-image: url("../images/sort_desc_disabled.png"); } table.dataTable tbody tr { background-color: #ffffff; } table.dataTable tbody tr.selected { background-color: #B0BED9; } table.dataTable tbody th, table.dataTable tbody td { padding: 8px 10px; } table.dataTable.row-border tbody th, table.dataTable.row-border tbody td, table.dataTable.display tbody th, table.dataTable.display tbody td { border-top: 1px solid #ddd; } table.dataTable.row-border tbody tr:first-child th, table.dataTable.row-border tbody tr:first-child td, table.dataTable.display tbody tr:first-child th, table.dataTable.display tbody tr:first-child td { border-top: none; } table.dataTable.cell-border tbody th, table.dataTable.cell-border tbody td { border-top: 1px solid #ddd; border-right: 1px solid #ddd; } table.dataTable.cell-border tbody tr th:first-child, table.dataTable.cell-border tbody tr td:first-child { border-left: 1px solid #ddd; } table.dataTable.cell-border tbody tr:first-child th, table.dataTable.cell-border tbody tr:first-child td { border-top: none; } table.dataTable.stripe tbody tr.odd, table.dataTable.display tbody tr.odd { background-color: #f9f9f9; } table.dataTable.stripe tbody tr.odd.selected, table.dataTable.display tbody tr.odd.selected { background-color: #acbad4; } table.dataTable.hover tbody tr:hover, table.dataTable.display tbody tr:hover { background-color: #f6f6f6; } table.dataTable.hover tbody tr:hover.selected, table.dataTable.display tbody tr:hover.selected { background-color: #aab7d1; } table.dataTable.order-column tbody tr > .sorting_1, table.dataTable.order-column tbody tr > .sorting_2, table.dataTable.order-column tbody tr > .sorting_3, table.dataTable.display tbody tr > .sorting_1, table.dataTable.display tbody tr > .sorting_2, table.dataTable.display tbody tr > .sorting_3 { background-color: #fafafa; } table.dataTable.order-column tbody tr.selected > .sorting_1, table.dataTable.order-column tbody tr.selected > .sorting_2, table.dataTable.order-column tbody tr.selected > .sorting_3, table.dataTable.display tbody tr.selected > .sorting_1, table.dataTable.display tbody tr.selected > .sorting_2, table.dataTable.display tbody tr.selected > .sorting_3 { background-color: #acbad5; } table.dataTable.display tbody tr.odd > .sorting_1, table.dataTable.order-column.stripe tbody tr.odd > .sorting_1 { background-color: #f1f1f1; } table.dataTable.display tbody tr.odd > .sorting_2, table.dataTable.order-column.stripe tbody tr.odd > .sorting_2 { background-color: #f3f3f3; } table.dataTable.display tbody tr.odd > .sorting_3, table.dataTable.order-column.stripe tbody tr.odd > .sorting_3 { background-color: whitesmoke; } table.dataTable.display tbody tr.odd.selected > .sorting_1, table.dataTable.order-column.stripe tbody tr.odd.selected > .sorting_1 { background-color: #a6b4cd; } table.dataTable.display tbody tr.odd.selected > .sorting_2, table.dataTable.order-column.stripe tbody tr.odd.selected > .sorting_2 { background-color: #a8b5cf; } table.dataTable.display tbody tr.odd.selected > .sorting_3, table.dataTable.order-column.stripe tbody tr.odd.selected > .sorting_3 { background-color: #a9b7d1; } table.dataTable.display tbody tr.even > .sorting_1, table.dataTable.order-column.stripe tbody tr.even > .sorting_1 { background-color: #fafafa; } table.dataTable.display tbody tr.even > .sorting_2, table.dataTable.order-column.stripe tbody tr.even > .sorting_2 { background-color: #fcfcfc; } table.dataTable.display tbody tr.even > .sorting_3, table.dataTable.order-column.stripe tbody tr.even > .sorting_3 { background-color: #fefefe; } table.dataTable.display tbody tr.even.selected > .sorting_1, table.dataTable.order-column.stripe tbody tr.even.selected > .sorting_1 { background-color: #acbad5; } table.dataTable.display tbody tr.even.selected > .sorting_2, table.dataTable.order-column.stripe tbody tr.even.selected > .sorting_2 { background-color: #aebcd6; } table.dataTable.display tbody tr.even.selected > .sorting_3, table.dataTable.order-column.stripe tbody tr.even.selected > .sorting_3 { background-color: #afbdd8; } table.dataTable.display tbody tr:hover > .sorting_1, table.dataTable.order-column.hover tbody tr:hover > .sorting_1 { background-color: #eaeaea; } table.dataTable.display tbody tr:hover > .sorting_2, table.dataTable.order-column.hover tbody tr:hover > .sorting_2 { background-color: #ececec; } table.dataTable.display tbody tr:hover > .sorting_3, table.dataTable.order-column.hover tbody tr:hover > .sorting_3 { background-color: #efefef; } table.dataTable.display tbody tr:hover.selected > .sorting_1, table.dataTable.order-column.hover tbody tr:hover.selected > .sorting_1 { background-color: #a2aec7; } table.dataTable.display tbody tr:hover.selected > .sorting_2, table.dataTable.order-column.hover tbody tr:hover.selected > .sorting_2 { background-color: #a3b0c9; } table.dataTable.display tbody tr:hover.selected > .sorting_3, table.dataTable.order-column.hover tbody tr:hover.selected > .sorting_3 { background-color: #a5b2cb; } table.dataTable.no-footer { border-bottom: 1px solid #111; } table.dataTable.nowrap th, table.dataTable.nowrap td { white-space: nowrap; } table.dataTable.compact thead th, table.dataTable.compact thead td { padding: 4px 17px 4px 4px; } table.dataTable.compact tfoot th, table.dataTable.compact tfoot td { padding: 4px; } table.dataTable.compact tbody th, table.dataTable.compact tbody td { padding: 4px; } table.dataTable th.dt-left, table.dataTable td.dt-left { text-align: left; } table.dataTable th.dt-center, table.dataTable td.dt-center, table.dataTable td.dataTables_empty { text-align: center; } table.dataTable th.dt-right, table.dataTable td.dt-right { text-align: right; } table.dataTable th.dt-justify, table.dataTable td.dt-justify { text-align: justify; } table.dataTable th.dt-nowrap, table.dataTable td.dt-nowrap { white-space: nowrap; } table.dataTable thead th.dt-head-left, table.dataTable thead td.dt-head-left, table.dataTable tfoot th.dt-head-left, table.dataTable tfoot td.dt-head-left { text-align: left; } table.dataTable thead th.dt-head-center, table.dataTable thead td.dt-head-center, table.dataTable tfoot th.dt-head-center, table.dataTable tfoot td.dt-head-center { text-align: center; } table.dataTable thead th.dt-head-right, table.dataTable thead td.dt-head-right, table.dataTable tfoot th.dt-head-right, table.dataTable tfoot td.dt-head-right { text-align: right; } table.dataTable thead th.dt-head-justify, table.dataTable thead td.dt-head-justify, table.dataTable tfoot th.dt-head-justify, table.dataTable tfoot td.dt-head-justify { text-align: justify; } table.dataTable thead th.dt-head-nowrap, table.dataTable thead td.dt-head-nowrap, table.dataTable tfoot th.dt-head-nowrap, table.dataTable tfoot td.dt-head-nowrap { white-space: nowrap; } table.dataTable tbody th.dt-body-left, table.dataTable tbody td.dt-body-left { text-align: left; } table.dataTable tbody th.dt-body-center, table.dataTable tbody td.dt-body-center { text-align: center; } table.dataTable tbody th.dt-body-right, table.dataTable tbody td.dt-body-right { text-align: right; } table.dataTable tbody th.dt-body-justify, table.dataTable tbody td.dt-body-justify { text-align: justify; } table.dataTable tbody th.dt-body-nowrap, table.dataTable tbody td.dt-body-nowrap { white-space: nowrap; } table.dataTable, table.dataTable th, table.dataTable td { -webkit-box-sizing: content-box; box-sizing: content-box; } /* * Control feature layout */ .dataTables_wrapper { position: relative; clear: both; *zoom: 1; zoom: 1; } .dataTables_wrapper .dataTables_length { float: left; } .dataTables_wrapper .dataTables_filter { float: right; text-align: right; } .dataTables_wrapper .dataTables_filter input { margin-left: 0.5em; } .dataTables_wrapper .dataTables_info { clear: both; float: left; padding-top: 0.755em; } .dataTables_wrapper .dataTables_paginate { float: right; text-align: right; padding-top: 0.25em; } .dataTables_wrapper .dataTables_paginate .paginate_button { box-sizing: border-box; display: inline-block; min-width: 1.5em; padding: 0.5em 1em; margin-left: 2px; text-align: center; text-decoration: none !important; cursor: pointer; *cursor: hand; color: #333 !important; border: 1px solid transparent; border-radius: 2px; } .dataTables_wrapper .dataTables_paginate .paginate_button.current, .dataTables_wrapper .dataTables_paginate .paginate_button.current:hover { color: #333 !important; border: 1px solid #979797; background-color: white; background: -webkit-gradient(linear, left top, left bottom, color-stop(0%, white), color-stop(100%, #dcdcdc)); /* Chrome,Safari4+ */ background: -webkit-linear-gradient(top, white 0%, #dcdcdc 100%); /* Chrome10+,Safari5.1+ */ background: -moz-linear-gradient(top, white 0%, #dcdcdc 100%); /* FF3.6+ */ background: -ms-linear-gradient(top, white 0%, #dcdcdc 100%); /* IE10+ */ background: -o-linear-gradient(top, white 0%, #dcdcdc 100%); /* Opera 11.10+ */ background: linear-gradient(to bottom, white 0%, #dcdcdc 100%); /* W3C */ } .dataTables_wrapper .dataTables_paginate .paginate_button.disabled, .dataTables_wrapper .dataTables_paginate .paginate_button.disabled:hover, .dataTables_wrapper .dataTables_paginate .paginate_button.disabled:active { cursor: default; color: #666 !important; border: 1px solid transparent; background: transparent; box-shadow: none; } .dataTables_wrapper .dataTables_paginate .paginate_button:hover { color: white !important; border: 1px solid #111; background-color: #585858; background: -webkit-gradient(linear, left top, left bottom, color-stop(0%, #585858), color-stop(100%, #111)); /* Chrome,Safari4+ */ background: -webkit-linear-gradient(top, #585858 0%, #111 100%); /* Chrome10+,Safari5.1+ */ background: -moz-linear-gradient(top, #585858 0%, #111 100%); /* FF3.6+ */ background: -ms-linear-gradient(top, #585858 0%, #111 100%); /* IE10+ */ background: -o-linear-gradient(top, #585858 0%, #111 100%); /* Opera 11.10+ */ background: linear-gradient(to bottom, #585858 0%, #111 100%); /* W3C */ } .dataTables_wrapper .dataTables_paginate .paginate_button:active { outline: none; background-color: #2b2b2b; background: -webkit-gradient(linear, left top, left bottom, color-stop(0%, #2b2b2b), color-stop(100%, #0c0c0c)); /* Chrome,Safari4+ */ background: -webkit-linear-gradient(top, #2b2b2b 0%, #0c0c0c 100%); /* Chrome10+,Safari5.1+ */ background: -moz-linear-gradient(top, #2b2b2b 0%, #0c0c0c 100%); /* FF3.6+ */ background: -ms-linear-gradient(top, #2b2b2b 0%, #0c0c0c 100%); /* IE10+ */ background: -o-linear-gradient(top, #2b2b2b 0%, #0c0c0c 100%); /* Opera 11.10+ */ background: linear-gradient(to bottom, #2b2b2b 0%, #0c0c0c 100%); /* W3C */ box-shadow: inset 0 0 3px #111; } .dataTables_wrapper .dataTables_paginate .ellipsis { padding: 0 1em; } .dataTables_wrapper .dataTables_processing { position: absolute; top: 50%; left: 50%; width: 100%; height: 40px; margin-left: -50%; margin-top: -25px; padding-top: 20px; text-align: center; font-size: 1.2em; background-color: white; background: -webkit-gradient(linear, left top, right top, color-stop(0%, rgba(255, 255, 255, 0)), color-stop(25%, rgba(255, 255, 255, 0.9)), color-stop(75%, rgba(255, 255, 255, 0.9)), color-stop(100%, rgba(255, 255, 255, 0))); background: -webkit-linear-gradient(left, rgba(255, 255, 255, 0) 0%, rgba(255, 255, 255, 0.9) 25%, rgba(255, 255, 255, 0.9) 75%, rgba(255, 255, 255, 0) 100%); background: -moz-linear-gradient(left, rgba(255, 255, 255, 0) 0%, rgba(255, 255, 255, 0.9) 25%, rgba(255, 255, 255, 0.9) 75%, rgba(255, 255, 255, 0) 100%); background: -ms-linear-gradient(left, rgba(255, 255, 255, 0) 0%, rgba(255, 255, 255, 0.9) 25%, rgba(255, 255, 255, 0.9) 75%, rgba(255, 255, 255, 0) 100%); background: -o-linear-gradient(left, rgba(255, 255, 255, 0) 0%, rgba(255, 255, 255, 0.9) 25%, rgba(255, 255, 255, 0.9) 75%, rgba(255, 255, 255, 0) 100%); background: linear-gradient(to right, rgba(255, 255, 255, 0) 0%, rgba(255, 255, 255, 0.9) 25%, rgba(255, 255, 255, 0.9) 75%, rgba(255, 255, 255, 0) 100%); } .dataTables_wrapper .dataTables_length, .dataTables_wrapper .dataTables_filter, .dataTables_wrapper .dataTables_info, .dataTables_wrapper .dataTables_processing, .dataTables_wrapper .dataTables_paginate { color: #333; } .dataTables_wrapper .dataTables_scroll { clear: both; } .dataTables_wrapper .dataTables_scroll div.dataTables_scrollBody { *margin-top: -1px; -webkit-overflow-scrolling: touch; } .dataTables_wrapper .dataTables_scroll div.dataTables_scrollBody th, .dataTables_wrapper .dataTables_scroll div.dataTables_scrollBody td { vertical-align: middle; } .dataTables_wrapper .dataTables_scroll div.dataTables_scrollBody th > div.dataTables_sizing, .dataTables_wrapper .dataTables_scroll div.dataTables_scrollBody td > div.dataTables_sizing { height: 0; overflow: hidden; margin: 0 !important; padding: 0 !important; } .dataTables_wrapper.no-footer .dataTables_scrollBody { border-bottom: 1px solid #111; } .dataTables_wrapper.no-footer div.dataTables_scrollHead table, .dataTables_wrapper.no-footer div.dataTables_scrollBody table { border-bottom: none; } .dataTables_wrapper:after { visibility: hidden; display: block; content: ""; clear: both; height: 0; } @media screen and (max-width: 767px) { .dataTables_wrapper .dataTables_info, .dataTables_wrapper .dataTables_paginate { float: none; text-align: center; } .dataTables_wrapper .dataTables_paginate { margin-top: 0.5em; } } @media screen and (max-width: 640px) { .dataTables_wrapper .dataTables_length, .dataTables_wrapper .dataTables_filter { float: none; text-align: center; } .dataTables_wrapper .dataTables_filter { margin-top: 0.5em; } } ================================================ FILE: static/plugins/datatables/css/jquery.dataTables_themeroller.css ================================================ /* * Table styles */ table.dataTable { width: 100%; margin: 0 auto; clear: both; border-collapse: separate; border-spacing: 0; /* * Header and footer styles */ /* * Body styles */ } table.dataTable thead th, table.dataTable thead td, table.dataTable tfoot th, table.dataTable tfoot td { padding: 4px 10px; } table.dataTable thead th, table.dataTable tfoot th { font-weight: bold; } table.dataTable thead th:active, table.dataTable thead td:active { outline: none; } table.dataTable thead .sorting_asc, table.dataTable thead .sorting_desc, table.dataTable thead .sorting { cursor: pointer; *cursor: hand; } table.dataTable thead th div.DataTables_sort_wrapper { position: relative; padding-right: 10px; } table.dataTable thead th div.DataTables_sort_wrapper span { position: absolute; top: 50%; margin-top: -8px; right: -5px; } table.dataTable thead th.ui-state-default { border-right-width: 0; } table.dataTable thead th.ui-state-default:last-child { border-right-width: 1px; } table.dataTable tbody tr { background-color: #ffffff; } table.dataTable tbody tr.selected { background-color: #B0BED9; } table.dataTable tbody th, table.dataTable tbody td { padding: 8px 10px; } table.dataTable th.center, table.dataTable td.center, table.dataTable td.dataTables_empty { text-align: center; } table.dataTable th.right, table.dataTable td.right { text-align: right; } table.dataTable.row-border tbody th, table.dataTable.row-border tbody td, table.dataTable.display tbody th, table.dataTable.display tbody td { border-top: 1px solid #ddd; } table.dataTable.row-border tbody tr:first-child th, table.dataTable.row-border tbody tr:first-child td, table.dataTable.display tbody tr:first-child th, table.dataTable.display tbody tr:first-child td { border-top: none; } table.dataTable.cell-border tbody th, table.dataTable.cell-border tbody td { border-top: 1px solid #ddd; border-right: 1px solid #ddd; } table.dataTable.cell-border tbody tr th:first-child, table.dataTable.cell-border tbody tr td:first-child { border-left: 1px solid #ddd; } table.dataTable.cell-border tbody tr:first-child th, table.dataTable.cell-border tbody tr:first-child td { border-top: none; } table.dataTable.stripe tbody tr.odd, table.dataTable.display tbody tr.odd { background-color: #f9f9f9; } table.dataTable.stripe tbody tr.odd.selected, table.dataTable.display tbody tr.odd.selected { background-color: #abb9d3; } table.dataTable.hover tbody tr:hover, table.dataTable.hover tbody tr.odd:hover, table.dataTable.hover tbody tr.even:hover, table.dataTable.display tbody tr:hover, table.dataTable.display tbody tr.odd:hover, table.dataTable.display tbody tr.even:hover { background-color: whitesmoke; } table.dataTable.hover tbody tr:hover.selected, table.dataTable.hover tbody tr.odd:hover.selected, table.dataTable.hover tbody tr.even:hover.selected, table.dataTable.display tbody tr:hover.selected, table.dataTable.display tbody tr.odd:hover.selected, table.dataTable.display tbody tr.even:hover.selected { background-color: #a9b7d1; } table.dataTable.order-column tbody tr > .sorting_1, table.dataTable.order-column tbody tr > .sorting_2, table.dataTable.order-column tbody tr > .sorting_3, table.dataTable.display tbody tr > .sorting_1, table.dataTable.display tbody tr > .sorting_2, table.dataTable.display tbody tr > .sorting_3 { background-color: #f9f9f9; } table.dataTable.order-column tbody tr.selected > .sorting_1, table.dataTable.order-column tbody tr.selected > .sorting_2, table.dataTable.order-column tbody tr.selected > .sorting_3, table.dataTable.display tbody tr.selected > .sorting_1, table.dataTable.display tbody tr.selected > .sorting_2, table.dataTable.display tbody tr.selected > .sorting_3 { background-color: #acbad4; } table.dataTable.display tbody tr.odd > .sorting_1, table.dataTable.order-column.stripe tbody tr.odd > .sorting_1 { background-color: #f1f1f1; } table.dataTable.display tbody tr.odd > .sorting_2, table.dataTable.order-column.stripe tbody tr.odd > .sorting_2 { background-color: #f3f3f3; } table.dataTable.display tbody tr.odd > .sorting_3, table.dataTable.order-column.stripe tbody tr.odd > .sorting_3 { background-color: whitesmoke; } table.dataTable.display tbody tr.odd.selected > .sorting_1, table.dataTable.order-column.stripe tbody tr.odd.selected > .sorting_1 { background-color: #a6b3cd; } table.dataTable.display tbody tr.odd.selected > .sorting_2, table.dataTable.order-column.stripe tbody tr.odd.selected > .sorting_2 { background-color: #a7b5ce; } table.dataTable.display tbody tr.odd.selected > .sorting_3, table.dataTable.order-column.stripe tbody tr.odd.selected > .sorting_3 { background-color: #a9b6d0; } table.dataTable.display tbody tr.even > .sorting_1, table.dataTable.order-column.stripe tbody tr.even > .sorting_1 { background-color: #f9f9f9; } table.dataTable.display tbody tr.even > .sorting_2, table.dataTable.order-column.stripe tbody tr.even > .sorting_2 { background-color: #fbfbfb; } table.dataTable.display tbody tr.even > .sorting_3, table.dataTable.order-column.stripe tbody tr.even > .sorting_3 { background-color: #fdfdfd; } table.dataTable.display tbody tr.even.selected > .sorting_1, table.dataTable.order-column.stripe tbody tr.even.selected > .sorting_1 { background-color: #acbad4; } table.dataTable.display tbody tr.even.selected > .sorting_2, table.dataTable.order-column.stripe tbody tr.even.selected > .sorting_2 { background-color: #adbbd6; } table.dataTable.display tbody tr.even.selected > .sorting_3, table.dataTable.order-column.stripe tbody tr.even.selected > .sorting_3 { background-color: #afbdd8; } table.dataTable.display tbody tr:hover > .sorting_1, table.dataTable.display tbody tr.odd:hover > .sorting_1, table.dataTable.display tbody tr.even:hover > .sorting_1, table.dataTable.order-column.hover tbody tr:hover > .sorting_1, table.dataTable.order-column.hover tbody tr.odd:hover > .sorting_1, table.dataTable.order-column.hover tbody tr.even:hover > .sorting_1 { background-color: #eaeaea; } table.dataTable.display tbody tr:hover > .sorting_2, table.dataTable.display tbody tr.odd:hover > .sorting_2, table.dataTable.display tbody tr.even:hover > .sorting_2, table.dataTable.order-column.hover tbody tr:hover > .sorting_2, table.dataTable.order-column.hover tbody tr.odd:hover > .sorting_2, table.dataTable.order-column.hover tbody tr.even:hover > .sorting_2 { background-color: #ebebeb; } table.dataTable.display tbody tr:hover > .sorting_3, table.dataTable.display tbody tr.odd:hover > .sorting_3, table.dataTable.display tbody tr.even:hover > .sorting_3, table.dataTable.order-column.hover tbody tr:hover > .sorting_3, table.dataTable.order-column.hover tbody tr.odd:hover > .sorting_3, table.dataTable.order-column.hover tbody tr.even:hover > .sorting_3 { background-color: #eeeeee; } table.dataTable.display tbody tr:hover.selected > .sorting_1, table.dataTable.display tbody tr.odd:hover.selected > .sorting_1, table.dataTable.display tbody tr.even:hover.selected > .sorting_1, table.dataTable.order-column.hover tbody tr:hover.selected > .sorting_1, table.dataTable.order-column.hover tbody tr.odd:hover.selected > .sorting_1, table.dataTable.order-column.hover tbody tr.even:hover.selected > .sorting_1 { background-color: #a1aec7; } table.dataTable.display tbody tr:hover.selected > .sorting_2, table.dataTable.display tbody tr.odd:hover.selected > .sorting_2, table.dataTable.display tbody tr.even:hover.selected > .sorting_2, table.dataTable.order-column.hover tbody tr:hover.selected > .sorting_2, table.dataTable.order-column.hover tbody tr.odd:hover.selected > .sorting_2, table.dataTable.order-column.hover tbody tr.even:hover.selected > .sorting_2 { background-color: #a2afc8; } table.dataTable.display tbody tr:hover.selected > .sorting_3, table.dataTable.display tbody tr.odd:hover.selected > .sorting_3, table.dataTable.display tbody tr.even:hover.selected > .sorting_3, table.dataTable.order-column.hover tbody tr:hover.selected > .sorting_3, table.dataTable.order-column.hover tbody tr.odd:hover.selected > .sorting_3, table.dataTable.order-column.hover tbody tr.even:hover.selected > .sorting_3 { background-color: #a4b2cb; } table.dataTable.nowrap th, table.dataTable.nowrap td { white-space: nowrap; } table.dataTable.compact thead th, table.dataTable.compact thead td { padding: 5px 9px; } table.dataTable.compact tfoot th, table.dataTable.compact tfoot td { padding: 5px 9px 3px 9px; } table.dataTable.compact tbody th, table.dataTable.compact tbody td { padding: 4px 5px; } table.dataTable th.dt-left, table.dataTable td.dt-left { text-align: left; } table.dataTable th.dt-center, table.dataTable td.dt-center, table.dataTable td.dataTables_empty { text-align: center; } table.dataTable th.dt-right, table.dataTable td.dt-right { text-align: right; } table.dataTable th.dt-justify, table.dataTable td.dt-justify { text-align: justify; } table.dataTable th.dt-nowrap, table.dataTable td.dt-nowrap { white-space: nowrap; } table.dataTable thead th.dt-head-left, table.dataTable thead td.dt-head-left, table.dataTable tfoot th.dt-head-left, table.dataTable tfoot td.dt-head-left { text-align: left; } table.dataTable thead th.dt-head-center, table.dataTable thead td.dt-head-center, table.dataTable tfoot th.dt-head-center, table.dataTable tfoot td.dt-head-center { text-align: center; } table.dataTable thead th.dt-head-right, table.dataTable thead td.dt-head-right, table.dataTable tfoot th.dt-head-right, table.dataTable tfoot td.dt-head-right { text-align: right; } table.dataTable thead th.dt-head-justify, table.dataTable thead td.dt-head-justify, table.dataTable tfoot th.dt-head-justify, table.dataTable tfoot td.dt-head-justify { text-align: justify; } table.dataTable thead th.dt-head-nowrap, table.dataTable thead td.dt-head-nowrap, table.dataTable tfoot th.dt-head-nowrap, table.dataTable tfoot td.dt-head-nowrap { white-space: nowrap; } table.dataTable tbody th.dt-body-left, table.dataTable tbody td.dt-body-left { text-align: left; } table.dataTable tbody th.dt-body-center, table.dataTable tbody td.dt-body-center { text-align: center; } table.dataTable tbody th.dt-body-right, table.dataTable tbody td.dt-body-right { text-align: right; } table.dataTable tbody th.dt-body-justify, table.dataTable tbody td.dt-body-justify { text-align: justify; } table.dataTable tbody th.dt-body-nowrap, table.dataTable tbody td.dt-body-nowrap { white-space: nowrap; } table.dataTable, table.dataTable th, table.dataTable td { -webkit-box-sizing: content-box; -moz-box-sizing: content-box; box-sizing: content-box; } /* * Control feature layout */ .dataTables_wrapper { position: relative; clear: both; *zoom: 1; zoom: 1; } .dataTables_wrapper .dataTables_length { float: left; } .dataTables_wrapper .dataTables_filter { float: right; text-align: right; } .dataTables_wrapper .dataTables_filter input { margin-left: 0.5em; } .dataTables_wrapper .dataTables_info { clear: both; float: left; padding-top: 0.55em; } .dataTables_wrapper .dataTables_paginate { float: right; text-align: right; } .dataTables_wrapper .dataTables_paginate .fg-button { box-sizing: border-box; display: inline-block; min-width: 1.5em; padding: 0.5em; margin-left: 2px; text-align: center; text-decoration: none !important; cursor: pointer; *cursor: hand; color: #333 !important; border: 1px solid transparent; } .dataTables_wrapper .dataTables_paginate .fg-button:active { outline: none; } .dataTables_wrapper .dataTables_paginate .fg-button:first-child { border-top-left-radius: 3px; border-bottom-left-radius: 3px; } .dataTables_wrapper .dataTables_paginate .fg-button:last-child { border-top-right-radius: 3px; border-bottom-right-radius: 3px; } .dataTables_wrapper .dataTables_processing { position: absolute; top: 50%; left: 50%; width: 100%; height: 40px; margin-left: -50%; margin-top: -25px; padding-top: 20px; text-align: center; font-size: 1.2em; background-color: white; background: -webkit-gradient(linear, left top, right top, color-stop(0%, rgba(255, 255, 255, 0)), color-stop(25%, rgba(255, 255, 255, 0.9)), color-stop(75%, rgba(255, 255, 255, 0.9)), color-stop(100%, rgba(255, 255, 255, 0))); /* Chrome,Safari4+ */ background: -webkit-linear-gradient(left, rgba(255, 255, 255, 0) 0%, rgba(255, 255, 255, 0.9) 25%, rgba(255, 255, 255, 0.9) 75%, rgba(255, 255, 255, 0) 100%); /* Chrome10+,Safari5.1+ */ background: -moz-linear-gradient(left, rgba(255, 255, 255, 0) 0%, rgba(255, 255, 255, 0.9) 25%, rgba(255, 255, 255, 0.9) 75%, rgba(255, 255, 255, 0) 100%); /* FF3.6+ */ background: -ms-linear-gradient(left, rgba(255, 255, 255, 0) 0%, rgba(255, 255, 255, 0.9) 25%, rgba(255, 255, 255, 0.9) 75%, rgba(255, 255, 255, 0) 100%); /* IE10+ */ background: -o-linear-gradient(left, rgba(255, 255, 255, 0) 0%, rgba(255, 255, 255, 0.9) 25%, rgba(255, 255, 255, 0.9) 75%, rgba(255, 255, 255, 0) 100%); /* Opera 11.10+ */ background: linear-gradient(to right, rgba(255, 255, 255, 0) 0%, rgba(255, 255, 255, 0.9) 25%, rgba(255, 255, 255, 0.9) 75%, rgba(255, 255, 255, 0) 100%); /* W3C */ } .dataTables_wrapper .dataTables_length, .dataTables_wrapper .dataTables_filter, .dataTables_wrapper .dataTables_info, .dataTables_wrapper .dataTables_processing, .dataTables_wrapper .dataTables_paginate { color: #333; } .dataTables_wrapper .dataTables_scroll { clear: both; } .dataTables_wrapper .dataTables_scrollBody { *margin-top: -1px; -webkit-overflow-scrolling: touch; } .dataTables_wrapper .ui-widget-header { font-weight: normal; } .dataTables_wrapper .ui-toolbar { padding: 8px; } .dataTables_wrapper:after { visibility: hidden; display: block; content: ""; clear: both; height: 0; } @media screen and (max-width: 767px) { .dataTables_wrapper .dataTables_length, .dataTables_wrapper .dataTables_filter, .dataTables_wrapper .dataTables_info, .dataTables_wrapper .dataTables_paginate { float: none; text-align: center; } .dataTables_wrapper .dataTables_filter, .dataTables_wrapper .dataTables_paginate { margin-top: 0.5em; } } ================================================ FILE: static/plugins/datatables/js/jquery.dataTables.js ================================================ /*! DataTables 1.10.12 * ©2008-2015 SpryMedia Ltd - datatables.net/license */ /** * @summary DataTables * @description Paginate, search and order HTML tables * @version 1.10.12 * @file jquery.dataTables.js * @author SpryMedia Ltd (www.sprymedia.co.uk) * @contact www.sprymedia.co.uk/contact * @copyright Copyright 2008-2015 SpryMedia Ltd. * * This source file is free software, available under the following license: * MIT license - http://datatables.net/license * * This source file is distributed in the hope that it will be useful, but * WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY * or FITNESS FOR A PARTICULAR PURPOSE. See the license files for details. * * For details please refer to: http://www.datatables.net */ /*jslint evil: true, undef: true, browser: true */ /*globals $,require,jQuery,define,_selector_run,_selector_opts,_selector_first,_selector_row_indexes,_ext,_Api,_api_register,_api_registerPlural,_re_new_lines,_re_html,_re_formatted_numeric,_re_escape_regex,_empty,_intVal,_numToDecimal,_isNumber,_isHtml,_htmlNumeric,_pluck,_pluck_order,_range,_stripHtml,_unique,_fnBuildAjax,_fnAjaxUpdate,_fnAjaxParameters,_fnAjaxUpdateDraw,_fnAjaxDataSrc,_fnAddColumn,_fnColumnOptions,_fnAdjustColumnSizing,_fnVisibleToColumnIndex,_fnColumnIndexToVisible,_fnVisbleColumns,_fnGetColumns,_fnColumnTypes,_fnApplyColumnDefs,_fnHungarianMap,_fnCamelToHungarian,_fnLanguageCompat,_fnBrowserDetect,_fnAddData,_fnAddTr,_fnNodeToDataIndex,_fnNodeToColumnIndex,_fnGetCellData,_fnSetCellData,_fnSplitObjNotation,_fnGetObjectDataFn,_fnSetObjectDataFn,_fnGetDataMaster,_fnClearTable,_fnDeleteIndex,_fnInvalidate,_fnGetRowElements,_fnCreateTr,_fnBuildHead,_fnDrawHead,_fnDraw,_fnReDraw,_fnAddOptionsHtml,_fnDetectHeader,_fnGetUniqueThs,_fnFeatureHtmlFilter,_fnFilterComplete,_fnFilterCustom,_fnFilterColumn,_fnFilter,_fnFilterCreateSearch,_fnEscapeRegex,_fnFilterData,_fnFeatureHtmlInfo,_fnUpdateInfo,_fnInfoMacros,_fnInitialise,_fnInitComplete,_fnLengthChange,_fnFeatureHtmlLength,_fnFeatureHtmlPaginate,_fnPageChange,_fnFeatureHtmlProcessing,_fnProcessingDisplay,_fnFeatureHtmlTable,_fnScrollDraw,_fnApplyToChildren,_fnCalculateColumnWidths,_fnThrottle,_fnConvertToWidth,_fnGetWidestNode,_fnGetMaxLenString,_fnStringToCss,_fnSortFlatten,_fnSort,_fnSortAria,_fnSortListener,_fnSortAttachListener,_fnSortingClasses,_fnSortData,_fnSaveState,_fnLoadState,_fnSettingsFromNode,_fnLog,_fnMap,_fnBindAction,_fnCallbackReg,_fnCallbackFire,_fnLengthOverflow,_fnRenderer,_fnDataSource,_fnRowAttributes*/ (function( factory ) { "use strict"; if ( typeof define === 'function' && define.amd ) { // AMD define( ['jquery'], function ( $ ) { return factory( $, window, document ); } ); } else if ( typeof exports === 'object' ) { // CommonJS module.exports = function (root, $) { if ( ! root ) { // CommonJS environments without a window global must pass a // root. This will give an error otherwise root = window; } if ( ! $ ) { $ = typeof window !== 'undefined' ? // jQuery's factory checks for a global window require('jquery') : require('jquery')( root ); } return factory( $, root, root.document ); }; } else { // Browser factory( jQuery, window, document ); } } (function( $, window, document, undefined ) { "use strict"; /** * DataTables is a plug-in for the jQuery Javascript library. It is a highly * flexible tool, based upon the foundations of progressive enhancement, * which will add advanced interaction controls to any HTML table. For a * full list of features please refer to * [DataTables.net](href="http://datatables.net). * * Note that the `DataTable` object is not a global variable but is aliased * to `jQuery.fn.DataTable` and `jQuery.fn.dataTable` through which it may * be accessed. * * @class * @param {object} [init={}] Configuration object for DataTables. Options * are defined by {@link DataTable.defaults} * @requires jQuery 1.7+ * * @example * // Basic initialisation * $(document).ready( function { * $('#example').dataTable(); * } ); * * @example * // Initialisation with configuration options - in this case, disable * // pagination and sorting. * $(document).ready( function { * $('#example').dataTable( { * "paginate": false, * "sort": false * } ); * } ); */ var DataTable = function ( options ) { /** * Perform a jQuery selector action on the table's TR elements (from the tbody) and * return the resulting jQuery object. * @param {string|node|jQuery} sSelector jQuery selector or node collection to act on * @param {object} [oOpts] Optional parameters for modifying the rows to be included * @param {string} [oOpts.filter=none] Select TR elements that meet the current filter * criterion ("applied") or all TR elements (i.e. no filter). * @param {string} [oOpts.order=current] Order of the TR elements in the processed array. * Can be either 'current', whereby the current sorting of the table is used, or * 'original' whereby the original order the data was read into the table is used. * @param {string} [oOpts.page=all] Limit the selection to the currently displayed page * ("current") or not ("all"). If 'current' is given, then order is assumed to be * 'current' and filter is 'applied', regardless of what they might be given as. * @returns {object} jQuery object, filtered by the given selector. * @dtopt API * @deprecated Since v1.10 * * @example * $(document).ready(function() { * var oTable = $('#example').dataTable(); * * // Highlight every second row * oTable.$('tr:odd').css('backgroundColor', 'blue'); * } ); * * @example * $(document).ready(function() { * var oTable = $('#example').dataTable(); * * // Filter to rows with 'Webkit' in them, add a background colour and then * // remove the filter, thus highlighting the 'Webkit' rows only. * oTable.fnFilter('Webkit'); * oTable.$('tr', {"search": "applied"}).css('backgroundColor', 'blue'); * oTable.fnFilter(''); * } ); */ this.$ = function ( sSelector, oOpts ) { return this.api(true).$( sSelector, oOpts ); }; /** * Almost identical to $ in operation, but in this case returns the data for the matched * rows - as such, the jQuery selector used should match TR row nodes or TD/TH cell nodes * rather than any descendants, so the data can be obtained for the row/cell. If matching * rows are found, the data returned is the original data array/object that was used to * create the row (or a generated array if from a DOM source). * * This method is often useful in-combination with $ where both functions are given the * same parameters and the array indexes will match identically. * @param {string|node|jQuery} sSelector jQuery selector or node collection to act on * @param {object} [oOpts] Optional parameters for modifying the rows to be included * @param {string} [oOpts.filter=none] Select elements that meet the current filter * criterion ("applied") or all elements (i.e. no filter). * @param {string} [oOpts.order=current] Order of the data in the processed array. * Can be either 'current', whereby the current sorting of the table is used, or * 'original' whereby the original order the data was read into the table is used. * @param {string} [oOpts.page=all] Limit the selection to the currently displayed page * ("current") or not ("all"). If 'current' is given, then order is assumed to be * 'current' and filter is 'applied', regardless of what they might be given as. * @returns {array} Data for the matched elements. If any elements, as a result of the * selector, were not TR, TD or TH elements in the DataTable, they will have a null * entry in the array. * @dtopt API * @deprecated Since v1.10 * * @example * $(document).ready(function() { * var oTable = $('#example').dataTable(); * * // Get the data from the first row in the table * var data = oTable._('tr:first'); * * // Do something useful with the data * alert( "First cell is: "+data[0] ); * } ); * * @example * $(document).ready(function() { * var oTable = $('#example').dataTable(); * * // Filter to 'Webkit' and get all data for * oTable.fnFilter('Webkit'); * var data = oTable._('tr', {"search": "applied"}); * * // Do something with the data * alert( data.length+" rows matched the search" ); * } ); */ this._ = function ( sSelector, oOpts ) { return this.api(true).rows( sSelector, oOpts ).data(); }; /** * Create a DataTables Api instance, with the currently selected tables for * the Api's context. * @param {boolean} [traditional=false] Set the API instance's context to be * only the table referred to by the `DataTable.ext.iApiIndex` option, as was * used in the API presented by DataTables 1.9- (i.e. the traditional mode), * or if all tables captured in the jQuery object should be used. * @return {DataTables.Api} */ this.api = function ( traditional ) { return traditional ? new _Api( _fnSettingsFromNode( this[ _ext.iApiIndex ] ) ) : new _Api( this ); }; /** * Add a single new row or multiple rows of data to the table. Please note * that this is suitable for client-side processing only - if you are using * server-side processing (i.e. "bServerSide": true), then to add data, you * must add it to the data source, i.e. the server-side, through an Ajax call. * @param {array|object} data The data to be added to the table. This can be: *
    *
  • 1D array of data - add a single row with the data provided
  • *
  • 2D array of arrays - add multiple rows in a single call
  • *
  • object - data object when using mData
  • *
  • array of objects - multiple data objects when using mData
  • *
* @param {bool} [redraw=true] redraw the table or not * @returns {array} An array of integers, representing the list of indexes in * aoData ({@link DataTable.models.oSettings}) that have been added to * the table. * @dtopt API * @deprecated Since v1.10 * * @example * // Global var for counter * var giCount = 2; * * $(document).ready(function() { * $('#example').dataTable(); * } ); * * function fnClickAddRow() { * $('#example').dataTable().fnAddData( [ * giCount+".1", * giCount+".2", * giCount+".3", * giCount+".4" ] * ); * * giCount++; * } */ this.fnAddData = function( data, redraw ) { var api = this.api( true ); /* Check if we want to add multiple rows or not */ var rows = $.isArray(data) && ( $.isArray(data[0]) || $.isPlainObject(data[0]) ) ? api.rows.add( data ) : api.row.add( data ); if ( redraw === undefined || redraw ) { api.draw(); } return rows.flatten().toArray(); }; /** * This function will make DataTables recalculate the column sizes, based on the data * contained in the table and the sizes applied to the columns (in the DOM, CSS or * through the sWidth parameter). This can be useful when the width of the table's * parent element changes (for example a window resize). * @param {boolean} [bRedraw=true] Redraw the table or not, you will typically want to * @dtopt API * @deprecated Since v1.10 * * @example * $(document).ready(function() { * var oTable = $('#example').dataTable( { * "sScrollY": "200px", * "bPaginate": false * } ); * * $(window).bind('resize', function () { * oTable.fnAdjustColumnSizing(); * } ); * } ); */ this.fnAdjustColumnSizing = function ( bRedraw ) { var api = this.api( true ).columns.adjust(); var settings = api.settings()[0]; var scroll = settings.oScroll; if ( bRedraw === undefined || bRedraw ) { api.draw( false ); } else if ( scroll.sX !== "" || scroll.sY !== "" ) { /* If not redrawing, but scrolling, we want to apply the new column sizes anyway */ _fnScrollDraw( settings ); } }; /** * Quickly and simply clear a table * @param {bool} [bRedraw=true] redraw the table or not * @dtopt API * @deprecated Since v1.10 * * @example * $(document).ready(function() { * var oTable = $('#example').dataTable(); * * // Immediately 'nuke' the current rows (perhaps waiting for an Ajax callback...) * oTable.fnClearTable(); * } ); */ this.fnClearTable = function( bRedraw ) { var api = this.api( true ).clear(); if ( bRedraw === undefined || bRedraw ) { api.draw(); } }; /** * The exact opposite of 'opening' a row, this function will close any rows which * are currently 'open'. * @param {node} nTr the table row to 'close' * @returns {int} 0 on success, or 1 if failed (can't find the row) * @dtopt API * @deprecated Since v1.10 * * @example * $(document).ready(function() { * var oTable; * * // 'open' an information row when a row is clicked on * $('#example tbody tr').click( function () { * if ( oTable.fnIsOpen(this) ) { * oTable.fnClose( this ); * } else { * oTable.fnOpen( this, "Temporary row opened", "info_row" ); * } * } ); * * oTable = $('#example').dataTable(); * } ); */ this.fnClose = function( nTr ) { this.api( true ).row( nTr ).child.hide(); }; /** * Remove a row for the table * @param {mixed} target The index of the row from aoData to be deleted, or * the TR element you want to delete * @param {function|null} [callBack] Callback function * @param {bool} [redraw=true] Redraw the table or not * @returns {array} The row that was deleted * @dtopt API * @deprecated Since v1.10 * * @example * $(document).ready(function() { * var oTable = $('#example').dataTable(); * * // Immediately remove the first row * oTable.fnDeleteRow( 0 ); * } ); */ this.fnDeleteRow = function( target, callback, redraw ) { var api = this.api( true ); var rows = api.rows( target ); var settings = rows.settings()[0]; var data = settings.aoData[ rows[0][0] ]; rows.remove(); if ( callback ) { callback.call( this, settings, data ); } if ( redraw === undefined || redraw ) { api.draw(); } return data; }; /** * Restore the table to it's original state in the DOM by removing all of DataTables * enhancements, alterations to the DOM structure of the table and event listeners. * @param {boolean} [remove=false] Completely remove the table from the DOM * @dtopt API * @deprecated Since v1.10 * * @example * $(document).ready(function() { * // This example is fairly pointless in reality, but shows how fnDestroy can be used * var oTable = $('#example').dataTable(); * oTable.fnDestroy(); * } ); */ this.fnDestroy = function ( remove ) { this.api( true ).destroy( remove ); }; /** * Redraw the table * @param {bool} [complete=true] Re-filter and resort (if enabled) the table before the draw. * @dtopt API * @deprecated Since v1.10 * * @example * $(document).ready(function() { * var oTable = $('#example').dataTable(); * * // Re-draw the table - you wouldn't want to do it here, but it's an example :-) * oTable.fnDraw(); * } ); */ this.fnDraw = function( complete ) { // Note that this isn't an exact match to the old call to _fnDraw - it takes // into account the new data, but can hold position. this.api( true ).draw( complete ); }; /** * Filter the input based on data * @param {string} sInput String to filter the table on * @param {int|null} [iColumn] Column to limit filtering to * @param {bool} [bRegex=false] Treat as regular expression or not * @param {bool} [bSmart=true] Perform smart filtering or not * @param {bool} [bShowGlobal=true] Show the input global filter in it's input box(es) * @param {bool} [bCaseInsensitive=true] Do case-insensitive matching (true) or not (false) * @dtopt API * @deprecated Since v1.10 * * @example * $(document).ready(function() { * var oTable = $('#example').dataTable(); * * // Sometime later - filter... * oTable.fnFilter( 'test string' ); * } ); */ this.fnFilter = function( sInput, iColumn, bRegex, bSmart, bShowGlobal, bCaseInsensitive ) { var api = this.api( true ); if ( iColumn === null || iColumn === undefined ) { api.search( sInput, bRegex, bSmart, bCaseInsensitive ); } else { api.column( iColumn ).search( sInput, bRegex, bSmart, bCaseInsensitive ); } api.draw(); }; /** * Get the data for the whole table, an individual row or an individual cell based on the * provided parameters. * @param {int|node} [src] A TR row node, TD/TH cell node or an integer. If given as * a TR node then the data source for the whole row will be returned. If given as a * TD/TH cell node then iCol will be automatically calculated and the data for the * cell returned. If given as an integer, then this is treated as the aoData internal * data index for the row (see fnGetPosition) and the data for that row used. * @param {int} [col] Optional column index that you want the data of. * @returns {array|object|string} If mRow is undefined, then the data for all rows is * returned. If mRow is defined, just data for that row, and is iCol is * defined, only data for the designated cell is returned. * @dtopt API * @deprecated Since v1.10 * * @example * // Row data * $(document).ready(function() { * oTable = $('#example').dataTable(); * * oTable.$('tr').click( function () { * var data = oTable.fnGetData( this ); * // ... do something with the array / object of data for the row * } ); * } ); * * @example * // Individual cell data * $(document).ready(function() { * oTable = $('#example').dataTable(); * * oTable.$('td').click( function () { * var sData = oTable.fnGetData( this ); * alert( 'The cell clicked on had the value of '+sData ); * } ); * } ); */ this.fnGetData = function( src, col ) { var api = this.api( true ); if ( src !== undefined ) { var type = src.nodeName ? src.nodeName.toLowerCase() : ''; return col !== undefined || type == 'td' || type == 'th' ? api.cell( src, col ).data() : api.row( src ).data() || null; } return api.data().toArray(); }; /** * Get an array of the TR nodes that are used in the table's body. Note that you will * typically want to use the '$' API method in preference to this as it is more * flexible. * @param {int} [iRow] Optional row index for the TR element you want * @returns {array|node} If iRow is undefined, returns an array of all TR elements * in the table's body, or iRow is defined, just the TR element requested. * @dtopt API * @deprecated Since v1.10 * * @example * $(document).ready(function() { * var oTable = $('#example').dataTable(); * * // Get the nodes from the table * var nNodes = oTable.fnGetNodes( ); * } ); */ this.fnGetNodes = function( iRow ) { var api = this.api( true ); return iRow !== undefined ? api.row( iRow ).node() : api.rows().nodes().flatten().toArray(); }; /** * Get the array indexes of a particular cell from it's DOM element * and column index including hidden columns * @param {node} node this can either be a TR, TD or TH in the table's body * @returns {int} If nNode is given as a TR, then a single index is returned, or * if given as a cell, an array of [row index, column index (visible), * column index (all)] is given. * @dtopt API * @deprecated Since v1.10 * * @example * $(document).ready(function() { * $('#example tbody td').click( function () { * // Get the position of the current data from the node * var aPos = oTable.fnGetPosition( this ); * * // Get the data array for this row * var aData = oTable.fnGetData( aPos[0] ); * * // Update the data array and return the value * aData[ aPos[1] ] = 'clicked'; * this.innerHTML = 'clicked'; * } ); * * // Init DataTables * oTable = $('#example').dataTable(); * } ); */ this.fnGetPosition = function( node ) { var api = this.api( true ); var nodeName = node.nodeName.toUpperCase(); if ( nodeName == 'TR' ) { return api.row( node ).index(); } else if ( nodeName == 'TD' || nodeName == 'TH' ) { var cell = api.cell( node ).index(); return [ cell.row, cell.columnVisible, cell.column ]; } return null; }; /** * Check to see if a row is 'open' or not. * @param {node} nTr the table row to check * @returns {boolean} true if the row is currently open, false otherwise * @dtopt API * @deprecated Since v1.10 * * @example * $(document).ready(function() { * var oTable; * * // 'open' an information row when a row is clicked on * $('#example tbody tr').click( function () { * if ( oTable.fnIsOpen(this) ) { * oTable.fnClose( this ); * } else { * oTable.fnOpen( this, "Temporary row opened", "info_row" ); * } * } ); * * oTable = $('#example').dataTable(); * } ); */ this.fnIsOpen = function( nTr ) { return this.api( true ).row( nTr ).child.isShown(); }; /** * This function will place a new row directly after a row which is currently * on display on the page, with the HTML contents that is passed into the * function. This can be used, for example, to ask for confirmation that a * particular record should be deleted. * @param {node} nTr The table row to 'open' * @param {string|node|jQuery} mHtml The HTML to put into the row * @param {string} sClass Class to give the new TD cell * @returns {node} The row opened. Note that if the table row passed in as the * first parameter, is not found in the table, this method will silently * return. * @dtopt API * @deprecated Since v1.10 * * @example * $(document).ready(function() { * var oTable; * * // 'open' an information row when a row is clicked on * $('#example tbody tr').click( function () { * if ( oTable.fnIsOpen(this) ) { * oTable.fnClose( this ); * } else { * oTable.fnOpen( this, "Temporary row opened", "info_row" ); * } * } ); * * oTable = $('#example').dataTable(); * } ); */ this.fnOpen = function( nTr, mHtml, sClass ) { return this.api( true ) .row( nTr ) .child( mHtml, sClass ) .show() .child()[0]; }; /** * Change the pagination - provides the internal logic for pagination in a simple API * function. With this function you can have a DataTables table go to the next, * previous, first or last pages. * @param {string|int} mAction Paging action to take: "first", "previous", "next" or "last" * or page number to jump to (integer), note that page 0 is the first page. * @param {bool} [bRedraw=true] Redraw the table or not * @dtopt API * @deprecated Since v1.10 * * @example * $(document).ready(function() { * var oTable = $('#example').dataTable(); * oTable.fnPageChange( 'next' ); * } ); */ this.fnPageChange = function ( mAction, bRedraw ) { var api = this.api( true ).page( mAction ); if ( bRedraw === undefined || bRedraw ) { api.draw(false); } }; /** * Show a particular column * @param {int} iCol The column whose display should be changed * @param {bool} bShow Show (true) or hide (false) the column * @param {bool} [bRedraw=true] Redraw the table or not * @dtopt API * @deprecated Since v1.10 * * @example * $(document).ready(function() { * var oTable = $('#example').dataTable(); * * // Hide the second column after initialisation * oTable.fnSetColumnVis( 1, false ); * } ); */ this.fnSetColumnVis = function ( iCol, bShow, bRedraw ) { var api = this.api( true ).column( iCol ).visible( bShow ); if ( bRedraw === undefined || bRedraw ) { api.columns.adjust().draw(); } }; /** * Get the settings for a particular table for external manipulation * @returns {object} DataTables settings object. See * {@link DataTable.models.oSettings} * @dtopt API * @deprecated Since v1.10 * * @example * $(document).ready(function() { * var oTable = $('#example').dataTable(); * var oSettings = oTable.fnSettings(); * * // Show an example parameter from the settings * alert( oSettings._iDisplayStart ); * } ); */ this.fnSettings = function() { return _fnSettingsFromNode( this[_ext.iApiIndex] ); }; /** * Sort the table by a particular column * @param {int} iCol the data index to sort on. Note that this will not match the * 'display index' if you have hidden data entries * @dtopt API * @deprecated Since v1.10 * * @example * $(document).ready(function() { * var oTable = $('#example').dataTable(); * * // Sort immediately with columns 0 and 1 * oTable.fnSort( [ [0,'asc'], [1,'asc'] ] ); * } ); */ this.fnSort = function( aaSort ) { this.api( true ).order( aaSort ).draw(); }; /** * Attach a sort listener to an element for a given column * @param {node} nNode the element to attach the sort listener to * @param {int} iColumn the column that a click on this node will sort on * @param {function} [fnCallback] callback function when sort is run * @dtopt API * @deprecated Since v1.10 * * @example * $(document).ready(function() { * var oTable = $('#example').dataTable(); * * // Sort on column 1, when 'sorter' is clicked on * oTable.fnSortListener( document.getElementById('sorter'), 1 ); * } ); */ this.fnSortListener = function( nNode, iColumn, fnCallback ) { this.api( true ).order.listener( nNode, iColumn, fnCallback ); }; /** * Update a table cell or row - this method will accept either a single value to * update the cell with, an array of values with one element for each column or * an object in the same format as the original data source. The function is * self-referencing in order to make the multi column updates easier. * @param {object|array|string} mData Data to update the cell/row with * @param {node|int} mRow TR element you want to update or the aoData index * @param {int} [iColumn] The column to update, give as null or undefined to * update a whole row. * @param {bool} [bRedraw=true] Redraw the table or not * @param {bool} [bAction=true] Perform pre-draw actions or not * @returns {int} 0 on success, 1 on error * @dtopt API * @deprecated Since v1.10 * * @example * $(document).ready(function() { * var oTable = $('#example').dataTable(); * oTable.fnUpdate( 'Example update', 0, 0 ); // Single cell * oTable.fnUpdate( ['a', 'b', 'c', 'd', 'e'], $('tbody tr')[0] ); // Row * } ); */ this.fnUpdate = function( mData, mRow, iColumn, bRedraw, bAction ) { var api = this.api( true ); if ( iColumn === undefined || iColumn === null ) { api.row( mRow ).data( mData ); } else { api.cell( mRow, iColumn ).data( mData ); } if ( bAction === undefined || bAction ) { api.columns.adjust(); } if ( bRedraw === undefined || bRedraw ) { api.draw(); } return 0; }; /** * Provide a common method for plug-ins to check the version of DataTables being used, in order * to ensure compatibility. * @param {string} sVersion Version string to check for, in the format "X.Y.Z". Note that the * formats "X" and "X.Y" are also acceptable. * @returns {boolean} true if this version of DataTables is greater or equal to the required * version, or false if this version of DataTales is not suitable * @method * @dtopt API * @deprecated Since v1.10 * * @example * $(document).ready(function() { * var oTable = $('#example').dataTable(); * alert( oTable.fnVersionCheck( '1.9.0' ) ); * } ); */ this.fnVersionCheck = _ext.fnVersionCheck; var _that = this; var emptyInit = options === undefined; var len = this.length; if ( emptyInit ) { options = {}; } this.oApi = this.internal = _ext.internal; // Extend with old style plug-in API methods for ( var fn in DataTable.ext.internal ) { if ( fn ) { this[fn] = _fnExternApiFunc(fn); } } this.each(function() { // For each initialisation we want to give it a clean initialisation // object that can be bashed around var o = {}; var oInit = len > 1 ? // optimisation for single table case _fnExtend( o, options, true ) : options; /*global oInit,_that,emptyInit*/ var i=0, iLen, j, jLen, k, kLen; var sId = this.getAttribute( 'id' ); var bInitHandedOff = false; var defaults = DataTable.defaults; var $this = $(this); /* Sanity check */ if ( this.nodeName.toLowerCase() != 'table' ) { _fnLog( null, 0, 'Non-table node initialisation ('+this.nodeName+')', 2 ); return; } /* Backwards compatibility for the defaults */ _fnCompatOpts( defaults ); _fnCompatCols( defaults.column ); /* Convert the camel-case defaults to Hungarian */ _fnCamelToHungarian( defaults, defaults, true ); _fnCamelToHungarian( defaults.column, defaults.column, true ); /* Setting up the initialisation object */ _fnCamelToHungarian( defaults, $.extend( oInit, $this.data() ) ); /* Check to see if we are re-initialising a table */ var allSettings = DataTable.settings; for ( i=0, iLen=allSettings.length ; i').appendTo(this); } oSettings.nTHead = thead[0]; var tbody = $this.children('tbody'); if ( tbody.length === 0 ) { tbody = $('').appendTo(this); } oSettings.nTBody = tbody[0]; var tfoot = $this.children('tfoot'); if ( tfoot.length === 0 && captions.length > 0 && (oSettings.oScroll.sX !== "" || oSettings.oScroll.sY !== "") ) { // If we are a scrolling table, and no footer has been given, then we need to create // a tfoot element for the caption element to be appended to tfoot = $('').appendTo(this); } if ( tfoot.length === 0 || tfoot.children().length === 0 ) { $this.addClass( oClasses.sNoFooter ); } else if ( tfoot.length > 0 ) { oSettings.nTFoot = tfoot[0]; _fnDetectHeader( oSettings.aoFooter, oSettings.nTFoot ); } /* Check if there is data passing into the constructor */ if ( oInit.aaData ) { for ( i=0 ; i/g; var _re_date_start = /^[\w\+\-]/; var _re_date_end = /[\w\+\-]$/; // Escape regular expression special characters var _re_escape_regex = new RegExp( '(\\' + [ '/', '.', '*', '+', '?', '|', '(', ')', '[', ']', '{', '}', '\\', '$', '^', '-' ].join('|\\') + ')', 'g' ); // http://en.wikipedia.org/wiki/Foreign_exchange_market // - \u20BD - Russian ruble. // - \u20a9 - South Korean Won // - \u20BA - Turkish Lira // - \u20B9 - Indian Rupee // - R - Brazil (R$) and South Africa // - fr - Swiss Franc // - kr - Swedish krona, Norwegian krone and Danish krone // - \u2009 is thin space and \u202F is narrow no-break space, both used in many // standards as thousands separators. var _re_formatted_numeric = /[',$£€¥%\u2009\u202F\u20BD\u20a9\u20BArfk]/gi; var _empty = function ( d ) { return !d || d === true || d === '-' ? true : false; }; var _intVal = function ( s ) { var integer = parseInt( s, 10 ); return !isNaN(integer) && isFinite(s) ? integer : null; }; // Convert from a formatted number with characters other than `.` as the // decimal place, to a Javascript number var _numToDecimal = function ( num, decimalPoint ) { // Cache created regular expressions for speed as this function is called often if ( ! _re_dic[ decimalPoint ] ) { _re_dic[ decimalPoint ] = new RegExp( _fnEscapeRegex( decimalPoint ), 'g' ); } return typeof num === 'string' && decimalPoint !== '.' ? num.replace( /\./g, '' ).replace( _re_dic[ decimalPoint ], '.' ) : num; }; var _isNumber = function ( d, decimalPoint, formatted ) { var strType = typeof d === 'string'; // If empty return immediately so there must be a number if it is a // formatted string (this stops the string "k", or "kr", etc being detected // as a formatted number for currency if ( _empty( d ) ) { return true; } if ( decimalPoint && strType ) { d = _numToDecimal( d, decimalPoint ); } if ( formatted && strType ) { d = d.replace( _re_formatted_numeric, '' ); } return !isNaN( parseFloat(d) ) && isFinite( d ); }; // A string without HTML in it can be considered to be HTML still var _isHtml = function ( d ) { return _empty( d ) || typeof d === 'string'; }; var _htmlNumeric = function ( d, decimalPoint, formatted ) { if ( _empty( d ) ) { return true; } var html = _isHtml( d ); return ! html ? null : _isNumber( _stripHtml( d ), decimalPoint, formatted ) ? true : null; }; var _pluck = function ( a, prop, prop2 ) { var out = []; var i=0, ien=a.length; // Could have the test in the loop for slightly smaller code, but speed // is essential here if ( prop2 !== undefined ) { for ( ; i') .css( { position: 'fixed', top: 0, left: 0, height: 1, width: 1, overflow: 'hidden' } ) .append( $('
') .css( { position: 'absolute', top: 1, left: 1, width: 100, overflow: 'scroll' } ) .append( $('
') .css( { width: '100%', height: 10 } ) ) ) .appendTo( 'body' ); var outer = n.children(); var inner = outer.children(); // Numbers below, in order, are: // inner.offsetWidth, inner.clientWidth, outer.offsetWidth, outer.clientWidth // // IE6 XP: 100 100 100 83 // IE7 Vista: 100 100 100 83 // IE 8+ Windows: 83 83 100 83 // Evergreen Windows: 83 83 100 83 // Evergreen Mac with scrollbars: 85 85 100 85 // Evergreen Mac without scrollbars: 100 100 100 100 // Get scrollbar width browser.barWidth = outer[0].offsetWidth - outer[0].clientWidth; // IE6/7 will oversize a width 100% element inside a scrolling element, to // include the width of the scrollbar, while other browsers ensure the inner // element is contained without forcing scrolling browser.bScrollOversize = inner[0].offsetWidth === 100 && outer[0].clientWidth !== 100; // In rtl text layout, some browsers (most, but not all) will place the // scrollbar on the left, rather than the right. browser.bScrollbarLeft = Math.round( inner.offset().left ) !== 1; // IE8- don't provide height and width for getBoundingClientRect browser.bBounding = n[0].getBoundingClientRect().width ? true : false; n.remove(); } $.extend( settings.oBrowser, DataTable.__browser ); settings.oScroll.iBarWidth = DataTable.__browser.barWidth; } /** * Array.prototype reduce[Right] method, used for browsers which don't support * JS 1.6. Done this way to reduce code size, since we iterate either way * @param {object} settings dataTables settings object * @memberof DataTable#oApi */ function _fnReduce ( that, fn, init, start, end, inc ) { var i = start, value, isSet = false; if ( init !== undefined ) { value = init; isSet = true; } while ( i !== end ) { if ( ! that.hasOwnProperty(i) ) { continue; } value = isSet ? fn( value, that[i], i, that ) : that[i]; isSet = true; i += inc; } return value; } /** * Add a column to the list used for the table with default values * @param {object} oSettings dataTables settings object * @param {node} nTh The th element for this column * @memberof DataTable#oApi */ function _fnAddColumn( oSettings, nTh ) { // Add column to aoColumns array var oDefaults = DataTable.defaults.column; var iCol = oSettings.aoColumns.length; var oCol = $.extend( {}, DataTable.models.oColumn, oDefaults, { "nTh": nTh ? nTh : document.createElement('th'), "sTitle": oDefaults.sTitle ? oDefaults.sTitle : nTh ? nTh.innerHTML : '', "aDataSort": oDefaults.aDataSort ? oDefaults.aDataSort : [iCol], "mData": oDefaults.mData ? oDefaults.mData : iCol, idx: iCol } ); oSettings.aoColumns.push( oCol ); // Add search object for column specific search. Note that the `searchCols[ iCol ]` // passed into extend can be undefined. This allows the user to give a default // with only some of the parameters defined, and also not give a default var searchCols = oSettings.aoPreSearchCols; searchCols[ iCol ] = $.extend( {}, DataTable.models.oSearch, searchCols[ iCol ] ); // Use the default column options function to initialise classes etc _fnColumnOptions( oSettings, iCol, $(nTh).data() ); } /** * Apply options for a column * @param {object} oSettings dataTables settings object * @param {int} iCol column index to consider * @param {object} oOptions object with sType, bVisible and bSearchable etc * @memberof DataTable#oApi */ function _fnColumnOptions( oSettings, iCol, oOptions ) { var oCol = oSettings.aoColumns[ iCol ]; var oClasses = oSettings.oClasses; var th = $(oCol.nTh); // Try to get width information from the DOM. We can't get it from CSS // as we'd need to parse the CSS stylesheet. `width` option can override if ( ! oCol.sWidthOrig ) { // Width attribute oCol.sWidthOrig = th.attr('width') || null; // Style attribute var t = (th.attr('style') || '').match(/width:\s*(\d+[pxem%]+)/); if ( t ) { oCol.sWidthOrig = t[1]; } } /* User specified column options */ if ( oOptions !== undefined && oOptions !== null ) { // Backwards compatibility _fnCompatCols( oOptions ); // Map camel case parameters to their Hungarian counterparts _fnCamelToHungarian( DataTable.defaults.column, oOptions ); /* Backwards compatibility for mDataProp */ if ( oOptions.mDataProp !== undefined && !oOptions.mData ) { oOptions.mData = oOptions.mDataProp; } if ( oOptions.sType ) { oCol._sManualType = oOptions.sType; } // `class` is a reserved word in Javascript, so we need to provide // the ability to use a valid name for the camel case input if ( oOptions.className && ! oOptions.sClass ) { oOptions.sClass = oOptions.className; } $.extend( oCol, oOptions ); _fnMap( oCol, oOptions, "sWidth", "sWidthOrig" ); /* iDataSort to be applied (backwards compatibility), but aDataSort will take * priority if defined */ if ( oOptions.iDataSort !== undefined ) { oCol.aDataSort = [ oOptions.iDataSort ]; } _fnMap( oCol, oOptions, "aDataSort" ); } /* Cache the data get and set functions for speed */ var mDataSrc = oCol.mData; var mData = _fnGetObjectDataFn( mDataSrc ); var mRender = oCol.mRender ? _fnGetObjectDataFn( oCol.mRender ) : null; var attrTest = function( src ) { return typeof src === 'string' && src.indexOf('@') !== -1; }; oCol._bAttrSrc = $.isPlainObject( mDataSrc ) && ( attrTest(mDataSrc.sort) || attrTest(mDataSrc.type) || attrTest(mDataSrc.filter) ); oCol._setter = null; oCol.fnGetData = function (rowData, type, meta) { var innerData = mData( rowData, type, undefined, meta ); return mRender && type ? mRender( innerData, type, rowData, meta ) : innerData; }; oCol.fnSetData = function ( rowData, val, meta ) { return _fnSetObjectDataFn( mDataSrc )( rowData, val, meta ); }; // Indicate if DataTables should read DOM data as an object or array // Used in _fnGetRowElements if ( typeof mDataSrc !== 'number' ) { oSettings._rowReadObject = true; } /* Feature sorting overrides column specific when off */ if ( !oSettings.oFeatures.bSort ) { oCol.bSortable = false; th.addClass( oClasses.sSortableNone ); // Have to add class here as order event isn't called } /* Check that the class assignment is correct for sorting */ var bAsc = $.inArray('asc', oCol.asSorting) !== -1; var bDesc = $.inArray('desc', oCol.asSorting) !== -1; if ( !oCol.bSortable || (!bAsc && !bDesc) ) { oCol.sSortingClass = oClasses.sSortableNone; oCol.sSortingClassJUI = ""; } else if ( bAsc && !bDesc ) { oCol.sSortingClass = oClasses.sSortableAsc; oCol.sSortingClassJUI = oClasses.sSortJUIAscAllowed; } else if ( !bAsc && bDesc ) { oCol.sSortingClass = oClasses.sSortableDesc; oCol.sSortingClassJUI = oClasses.sSortJUIDescAllowed; } else { oCol.sSortingClass = oClasses.sSortable; oCol.sSortingClassJUI = oClasses.sSortJUI; } } /** * Adjust the table column widths for new data. Note: you would probably want to * do a redraw after calling this function! * @param {object} settings dataTables settings object * @memberof DataTable#oApi */ function _fnAdjustColumnSizing ( settings ) { /* Not interested in doing column width calculation if auto-width is disabled */ if ( settings.oFeatures.bAutoWidth !== false ) { var columns = settings.aoColumns; _fnCalculateColumnWidths( settings ); for ( var i=0 , iLen=columns.length ; i