Showing preview only (2,040K chars total). Download the full file or copy to clipboard to get everything.
Repository: hanson007/FirstBlood
Branch: master
Commit: fef5c9887eea
Files: 115
Total size: 1.9 MB
Directory structure:
gitextract_epu7r_5a/
├── .gitignore
├── FirstBlood/
│ ├── __init__.py
│ ├── celery.py
│ ├── settings.py
│ ├── urls.py
│ ├── views.py
│ └── wsgi.py
├── LICENSE
├── README.md
├── batch_job/
│ ├── __init__.py
│ ├── admin.py
│ ├── apps.py
│ ├── conf/
│ │ ├── __init__.py
│ │ └── config.py
│ ├── models.py
│ ├── tests.py
│ ├── urls.py
│ └── views.py
├── controller/
│ ├── __init__.py
│ ├── core/
│ │ ├── __init__.py
│ │ ├── access.py
│ │ ├── excel.py
│ │ ├── local_mysql.py
│ │ ├── mailtable.py
│ │ ├── public.py
│ │ ├── query_sql.py
│ │ └── unicode_width.py
│ └── public/
│ ├── __init__.py
│ ├── dataconn.py
│ ├── log.py
│ ├── mailclass.py
│ ├── mysql_helper.py
│ ├── pagination.py
│ └── sqlserver_helper.py
├── create_table.sql
├── datax_web/
│ ├── __init__.py
│ ├── admin.py
│ ├── apps.py
│ ├── conf/
│ │ ├── __init__.py
│ │ └── config.py
│ ├── models.py
│ ├── tests.py
│ ├── urls.py
│ └── views.py
├── manage.py
├── requirements.txt
├── static/
│ ├── css/
│ │ └── login.css
│ ├── js/
│ │ └── csrf.js
│ ├── plugins/
│ │ ├── bootstarp-table/
│ │ │ └── bootstrap-table-zh-CN.js
│ │ ├── datatables/
│ │ │ ├── css/
│ │ │ │ ├── jquery.dataTables.css
│ │ │ │ └── jquery.dataTables_themeroller.css
│ │ │ └── js/
│ │ │ └── jquery.dataTables.js
│ │ └── layer/
│ │ ├── layer.js
│ │ └── skin/
│ │ └── layer.css
│ └── template/
│ ├── bootstrap/
│ │ └── css/
│ │ └── bootstrap.css
│ ├── css/
│ │ ├── bootstrap-datepicker.css
│ │ ├── bootstrap-timepicker.css
│ │ ├── bootstrap-wysihtml5.css
│ │ ├── colorbox/
│ │ │ └── colorbox.css
│ │ ├── datepicker.css
│ │ ├── dropzone/
│ │ │ └── dropzone.css
│ │ ├── endless-skin.css
│ │ ├── endless.css
│ │ ├── fullcalendar.css
│ │ ├── gritter/
│ │ │ └── jquery.gritter.css
│ │ ├── jcarousel.responsive.css
│ │ ├── jquery.dataTables_themeroller.css
│ │ ├── jquery.tagsinput.css
│ │ ├── morris.css
│ │ ├── pace.css
│ │ ├── prettify.css
│ │ └── slider.css
│ └── js/
│ ├── bootstrap-datepicker.js
│ ├── endless/
│ │ ├── endless.js
│ │ ├── endless_dashboard.js
│ │ ├── endless_form.js
│ │ └── endless_wizard.js
│ ├── jquery-ui.js
│ └── uncompressed/
│ ├── bootstrap-wysihtml5.js
│ ├── holder.js
│ ├── pace.js
│ └── run_prettify.js
├── supervisord/
│ ├── FirstBlood_dev.conf
│ └── FirstBlood_pro.conf
├── templates/
│ ├── 403.html
│ ├── 404.html
│ ├── base.html
│ ├── batch_job/
│ │ ├── add_batch_job.html
│ │ ├── add_crontabs.html
│ │ ├── batch_job_instance.html
│ │ ├── batch_job_instance_details.html
│ │ ├── crontabs.html
│ │ ├── index.html
│ │ ├── mod_crontabs.html
│ │ ├── update_batch_job.html
│ │ └── update_job.html
│ ├── datax_web/
│ │ ├── add_crontabs.html
│ │ ├── add_job.html
│ │ ├── crontabs.html
│ │ ├── index.html
│ │ ├── mod_crontabs.html
│ │ ├── mod_periodic_task.html
│ │ ├── monitor_job.html
│ │ ├── monitor_job_detail.html
│ │ └── update_job.html
│ ├── index.html
│ └── registered/
│ ├── login.html
│ ├── password_change.html
│ └── password_change_done.html
└── websockted/
├── CHANGES
├── LICENSE
├── README.md
├── __init__.py
├── datax_web_job_instance.py
└── websocketd
================================================
FILE CONTENTS
================================================
================================================
FILE: .gitignore
================================================
# python pyc pyo
*.pyc
*.pyo
# log
*.log
# out
*.out
# pid
*.pid
# pycharm
.idea
# datax
datax
# create table sql
*/migrations
================================================
FILE: FirstBlood/__init__.py
================================================
from __future__ import absolute_import
# This will make sure the app is always imported when
# Django starts so that shared_task will use this app.
from .celery import app as celery_app
================================================
FILE: FirstBlood/celery.py
================================================
#!/usr/bin/python env
# -*- coding: UTF-8 -*-
# Description:
# Author: 黄小雪
# Date: 2017年03月25日
# Company: 东方银谷
from __future__ import absolute_import
import os
from celery import Celery
from django.conf import settings
# set the default Django settings module for the 'celery' program.
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'FirstBlood.settings')
app = Celery('FirstBlood')
# Using a string here means the worker will not have to
# pickle the object when using Windows.
app.config_from_object('django.conf:settings')
app.autodiscover_tasks(lambda: settings.INSTALLED_APPS)
@app.task(bind=True)
def debug_task(self):
print('Request: {0!r}'.format(self.request))
================================================
FILE: FirstBlood/settings.py
================================================
# -*-coding:utf-8-*-
"""
Django settings for FirstBlood project.
Generated by 'django-admin startproject' using Django 1.9.5.
For more information on this file, see
https://docs.djangoproject.com/en/1.9/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.9/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.9/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'on9c1o9$*rvub+_(z!_n!b$!2mwo1h8(pd_h#n_q$=2s84+^9('
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = ['*']
# 根据系统变量判断生产、测试环境来加载配置文件
SYSTEM_ENVIRONMENT_VARIABLES = 'development_environment'
# Application definition
INSTALLED_APPS = [
'datax_web',
'batch_job',
'djcelery',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
MIDDLEWARE_CLASSES = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'FirstBlood.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'templates')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'FirstBlood.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.9/ref/settings/#databases
# 测试环境
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql', # Add 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
'NAME': 'FirstBlood', # Or path to database file if using sqlite3.
'USER': 'root', # Not used with sqlite3.
'PASSWORD': '123.com', # Not used with sqlite3.
'HOST': '127.0.0.1', # Set to empty string for localhost. Not used with sqlite3.
'PORT': '3306', # Set to empty string for default. Not used with sqlite3.
'STORAGE_ENGINE': 'MYISAM'
# 'OPTIONS': {"init_command": "SET foreign_key_checks = 0;",},
}
}
CELERYD_CONCURRENCY = 3
# Password validation
# https://docs.djangoproject.com/en/1.9/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.9/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'Asia/Shanghai'
USE_I18N = True
USE_L10N = True
USE_TZ = False
# redis celery
# CELERY STUFF
import djcelery
djcelery.setup_loader()
BROKER_URL = 'redis://localhost:6379'
CELERYBEAT_SCHEDULER = 'djcelery.schedulers.DatabaseScheduler' # 定时任务
CELERY_RESULT_BACKEND = 'djcelery.backends.database:DatabaseBackend'
# CELERY_RESULT_BACKEND = 'redis://localhost:6379'
CELERY_ACCEPT_CONTENT = ['application/json']
CELERY_TASK_SERIALIZER = 'json'
CELERY_RESULT_SERIALIZER = 'json'
CELERY_TIMEZONE = 'Asia/Shanghai'
CELERY_ENABLE_UTC=False
# CELERYD_CONCURRENCY = 10
CELERYD_MAX_TASKS_PER_CHILD = 1 # 每个worker最多执行3个任务就会被销毁,可防止内存泄露
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.9/howto/static-files/
STATIC_URL = '/static/'
STATICFILES_DIRS = [
os.path.join(BASE_DIR, "static"),
]
LOG_FILE_DIR = os.path.join(BASE_DIR, "log/")
LOGGING = {
'version': 1,
'disable_existing_loggers': True,
'formatters': {
'standard': {
'format': '%(levelname)s %(asctime)s %(message)s'
},
},
'filters': {
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'class': 'django.utils.log.AdminEmailHandler',
'formatter':'standard',
},
'datax_web_handler': {
'level':'DEBUG',
'class':'logging.handlers.RotatingFileHandler',
'filename':'%s%s' % (LOG_FILE_DIR, 'datax_web.log'),
'formatter':'standard',
},
'batch_job_handler': {
'level':'DEBUG',
'class':'logging.handlers.RotatingFileHandler',
'filename':'%s%s' % (LOG_FILE_DIR, 'batch_job.log'),
'formatter':'standard',
},
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
'datax_web':{
'handlers': ['datax_web_handler'],
'level': 'INFO',
'propagate': False
},
'batch_job':{
'handlers': ['batch_job_handler'],
'level': 'INFO',
'propagate': False
},
}
}
================================================
FILE: FirstBlood/urls.py
================================================
# -*- coding: UTF-8 -*-
"""FirstBlood URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.9/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.views.static import serve
from django.conf.urls import include, url
from django.conf import settings
from django.contrib import admin
admin.autodiscover()
from django.contrib.auth.views import (logout,login,password_change,password_change_done)
from views import *
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'^index/$', index), # 首页
url(r'^$', index, name='index'),
url(r'^accounts/login/$', login, {'template_name': 'registered/login.html'}, name='django.contrib.auth.views.login'),
url(r'^accounts/logout/$', logout, name='django.contrib.auth.views.logout'),
url(r'^password_change/$', password_change, {
'post_change_redirect': '/password_change_done/',
'template_name': 'registered/password_change.html'},
name='django.contrib.auth.views.password_change'),
url(r'^password_change_done/$', password_change_done, {
'template_name': 'registered/password_change_done.html'},
name='django.contrib.auth.views.password_change_done'),
url(r'^get_username/$', get_username), # 获取当前登陆用户名
url(r'^check_permission/$', check_permission), # 检测用户权限
url(r'^static/(?P<path>.*)$', serve, {'document_root': settings.STATIC_ROOT}),
# 数据同步
url(r'^datax_web/', include('datax_web.urls')),
# 批处理作业
url(r'^batch_job/', include('batch_job.urls')),
]
================================================
FILE: FirstBlood/views.py
================================================
# -*- coding: UTF-8 -*-
from django.contrib.auth.decorators import permission_required
from django.contrib.auth.decorators import login_required
from django.shortcuts import render
from django.template import RequestContext
from django.http import HttpResponse
from django.contrib import auth
from controller.core.public import Currency
import json
# Create your views here.
@login_required
def index(request):
# 首页
nowuser = auth.get_user(request)
return render(request, 'index.html', locals())
def page_not_found(request):
return render("404.html")
def permission_denied(request):
return render("403.html")
@login_required
def get_username(request):
# 获取当前登陆的用户名
nowuser = auth.get_user(request)
username = nowuser.get_username()
response = HttpResponse()
response.write(json.dumps(username))
return response
@login_required
def check_permission(request):
# 检测用户权限
nowuser = auth.get_user(request)
cur = Currency(request)
permission = cur.rq_post('permission')
status = 0 if nowuser.has_perm(permission) else 1
response = HttpResponse()
response.write(json.dumps({'status': status}))
return response
================================================
FILE: FirstBlood/wsgi.py
================================================
"""
WSGI config for FirstBlood project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.9/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "FirstBlood.settings")
application = get_wsgi_application()
================================================
FILE: LICENSE
================================================
MIT License
Copyright (c) 2022 盲僧
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
================================================
FILE: README.md
================================================
精简版ETL数据转换工具
==========
## 功能
1.数据同步(目前只支持MySQL)
2.执行SQL脚本 (后期开发)
3.定时执行
## 安装(两种方法)
### 1.Docker 镜像安装
#### 系统已打包成docker镜像,3个多G,可以直接使用
1.1 拉取镜像
docker pull hanson001/first_blood:v1
1.2 运行容器
docker run -itd --privileged -p 10028:22 -p 9028:9000 -p 3328:3306 -p 8080:8080 --name FirstBlood_test hanson001/first_blood:v1 /usr/sbin/init
10028映射centos系统ssh连接端口,用户名root 密码 123.com
3328映射MySQL数据库端口,用户名root 密码123.com
8080映射内部websocket端口
9028映射内部系统访问端口,登录地址:http://服务器IP:9028 用户名 admim 密码123456.com
### 2.源码配置安装
#### 准备环境
1.python 环境 2.7
2.安装requirements.txt里的依赖包,若运行时还报缺少模块的错误,再安装缺少的模块。
3.创建日志目录
cd <项目路径/FirstBlood>
mkdir log
#### 建表
整个项目所需要的表,关于用户认证、权限控制等等使用django自带的,而项目其它功能模块使用原生SQL语句创建。
涉及到项目功能模块增删改查,全部使用原生SQL语句。
涉及到用户认证、权限控制等等,使用Django的orm。
1.create_table.sql 执行建表语句,创建项目中各个模块所需要的表
2.执行 python manage.py migrate,创建项目用户认证、权限控制所需要的表(Django自带)
#### 运行其它服务
由于项目使用到定时任务和异步实时查询日志功能,所以需要使用到celery和websockted。
这两个服务的启动和运行全部交给supervisord托管,所以需要配置好supervisord配置文件。
supervisord配置文件分两个,dev为开发环境的配置文件,pro为生产环境的配置文件。
里面的路径需要根据自己实际的环境配置。
1.配置完成后执行以下命令,启动celery和websocketed服务
supervisord -c 项目路径/FirstBlood/supervisord/FirstBlood_pro.conf (开发环境使用FirstBlood_dev.conf文件)
2.根据配置文件里的日志路径查看是否报错,有报错百度、Google。
如果错误不影响功能的使用,则忽略。
#### settings配置文件
由于项目的settings配置文件,需要根据开发环境、生产环境来连接不同环境的数据库,所以需要在开发环境添加变量。
settings文件里就可以通过development_environment变量,选择是连接生产数据库,还是开发环境数据库。
1.1 修改 bash_profile 文件
vim ~/.bash_profile
# 程序根据不同环境变量加载测试或生产的配置文件
development_environment=1
export development_environment
#### 下载阿里开发数据同步工具datax
5.1 下载datax工具,放在项目目录下
项目路径/FirstBlood/datax
5.2 创建日志目录
由于项目的数据同步,底层使用的datax,而datax产生的日志文件名是固定长度,且以datax的 json配置文件名命名,
当配置文件名超过日志文件名的固定长度时,datax将会以固定长度截取配置文件名,来命名日志文件名称。所以无法以
datax的自生的日志来实时显示同步日志。所以需要新创建日志目录 web_log,以任务ID+13位时间戳命名日志文件名,
将datax产生的日志导入web_log目录里。
操作命令:
cd 项目路径/FirstBlood/datax/
mkdir web_log
## 启动
以上步骤执行完成后,就可以运行项目。若有错误,百度Google。
## 使用
大部分功能操作参照博客
databaseinfo 表,需要预先导入生产库的表信息
================================================
FILE: batch_job/__init__.py
================================================
================================================
FILE: batch_job/admin.py
================================================
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.contrib import admin
# Register your models here.
================================================
FILE: batch_job/apps.py
================================================
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.apps import AppConfig
class BatchJobConfig(AppConfig):
name = 'batch_job'
================================================
FILE: batch_job/conf/__init__.py
================================================
#!/usr/bin/python3
# -*- coding: utf-8 -*-
# @Function:
# @Time : 2018/7/20 15:34
# @Author : Hanson
# @Email : 229396865@qq.com
# @File : __init__.py.py
# @Software: PyCharm
# @Company : 东方银谷
================================================
FILE: batch_job/conf/config.py
================================================
#!/usr/bin/python3
# -*- coding: utf-8 -*-
# @Function: 批处理作业配置文件
# @Time : 2018/7/20 15:35
# @Author : Hanson
# @Email : @qq.com
# @File : config.py
# @Software: PyCharm
# @Company : 东方银谷
# 最大任务个数
maxtasksperchild = 20
query_batch_job_by_name_sql = "SELECT * FROM FirstBlood.batch_job bj WHERE bj.`name` = '%s';"
query_batch_job_sql1 = "SELECT * FROM FirstBlood.batch_job bj WHERE bj.`name` = '%s' and bj.id!=%s;"
insert_batch_job_sql = "insert into FirstBlood.`batch_job` (`name`, `description`) values ('%s', '%s');"
insert_batch_job_details_sql = "insert into FirstBlood.`batch_job_details` (`batch_job_id`, `subjob_id`, `type`) values"
query_batch_job_sql2 = """
SELECT
bj.*,
dp.task,
dp.enabled,
concat(dc.`minute`,' ',dc.`hour`,' ',dc.day_of_week,' ',dc.day_of_month, ' ',dc.month_of_year, ' (m/h/d/dM/MY)') crontab
FROM
FirstBlood.batch_job bj
LEFT JOIN FirstBlood.djcelery_periodictask dp ON CONCAT("[",bj.id,"]")=dp.args
LEFT JOIN FirstBlood.djcelery_crontabschedule dc on dp.crontab_id=dc.id
ORDER BY bj.id
"""
query_batch_job_sql3 = """
SELECT
bj.*,
dp.task,
dp.enabled,
dp.crontab_id,
concat(dc.`minute`,' ',dc.`hour`,' ',dc.day_of_week,' ',dc.day_of_month, ' ',dc.month_of_year, ' (m/h/d/dM/MY)') crontab
FROM
FirstBlood.batch_job bj
LEFT JOIN FirstBlood.djcelery_periodictask dp ON CONCAT("[",bj.id,"]")=dp.args
LEFT JOIN FirstBlood.djcelery_crontabschedule dc on dp.crontab_id=dc.id
WHERE bj.id = %s
"""
query_batch_job_sub_job_by_id_sql = """
SELECT
bjd.*,
dj.`name`,
dj.description
FROM
FirstBlood.batch_job_details bjd
LEFT JOIN FirstBlood.datax_job dj on bjd.subjob_id=dj.id
WHERE
bjd.batch_job_id = %s
"""
update_batch_job_by_id_sql = """
update FirstBlood.batch_job set
`name` = '%s',
`description` = '%s'
where
id = %s
"""
delete_batch_job_details_by_id_sql = """
delete from FirstBlood.batch_job_details where batch_job_id =%s;
"""
insert_batch_job_instance_sql = """
insert into FirstBlood.batch_job_instance (
`instance_id`,
`name`,
`description`,
`trigger_mode`
)
values (%s, '%s', '%s', %s);
"""
update_batch_job_instance_by_id_sql = """
update FirstBlood.batch_job_instance set `status`=%s, `result`=%s, `end_time`='%s' where instance_id=%s;
"""
insert_batch_job_instance_details_sql = """
insert into FirstBlood.batch_job_instance_details (
`instance_id`,
`subjob_instance_id`,
`type`
)
values (%s, %s, %s);
"""
select_batch_job_instance_sql = "select * from FirstBlood.batch_job_instance bji"
count_batch_job_instance_sql = "select count(1) count from FirstBlood.batch_job_instance bji"
select_batch_job_instance_by_id_sql = "select * from FirstBlood.batch_job_instance bji where bji.instance_id=%s"
# 根据ID获取子作业类型为数据同步的信息
select_sub_job_datax_instance_by_id_sql = """
select
bjid.instance_id,
bjid.subjob_instance_id,
bjid.type,
dji.`name`,
dji.description,
dji.trigger_mode,
dji.`status`,
dji.result,
dji.start_time,
dji.end_time
from
FirstBlood.batch_job_instance_details bjid
LEFT JOIN FirstBlood.datax_job_instance dji on bjid.subjob_instance_id=dji.instance_id
where
bjid.instance_id=%s and bjid.type=1
"""
================================================
FILE: batch_job/models.py
================================================
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models
# Create your models here.
class BatchJobPermission(models.Model):
"""
批处理作业权限
"""
class Meta:
db_table = 'batch_job_permission'
permissions = (
("viewBatchJob", u"查看批处理作业"),
("editBatchJob", u"修改批处理祖业")
)
================================================
FILE: batch_job/tests.py
================================================
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.test import TestCase
# Create your tests here.
import requests
import json
import pprint
# url1 = 'http://httpbin.org/get?name=gemey&age=22'
# url2 = 'http://192.168.190.132:9000/batch_job/get_batch_job_instance/?username=admin&password=123456.com&limit=10&offset=0&name=test2&description=&status=&result=&trigger_mode='
url3 = 'http://172.24.132.144:9000/batch_job/get_batch_job_instance/?limit=10&offset=0&name=big_data&description=&status=&result=&trigger_mode='
response = requests.get(url3)
pprint.pprint(json.loads(response.text))
================================================
FILE: batch_job/urls.py
================================================
# -*- coding: UTF-8 -*-
from django.conf.urls import url
import views
urlpatterns = [
# Examples:
# url(r'^$', 'YinguOnline.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^index/$', views.index), # 首页
url(r'^add_batch_job/$', views.add_batch_job), # 新增批处理作业
url(r'^update_batch_job/(?P<id>\d+)/$', views.update_batch_job), # 更新批处理作业
url(r'^batch_job_instance/$', views.batch_job_instance), # 批处理作业执行历史
url(r'^batch_job_instance_details/(?P<id>\d+)/$', views.batch_job_instance_details), # 批处理作业详情执行历史
url(r'^get_task_template/$', views.get_task_template), # 获取任务模板
url(r'^get_crontab/$', views.get_crontab), # 获取crontab
url(r'^add_crontab/$', views.add_crontab), # 新增 crontab 定时时间
url(r'^add_batch_job_data/$', views.add_batch_job_data), # 提交新增或更新批处理作业数据
# url(r'^add_job_data/$', views.add_job_data), # 新增或修改任务数据
url(r'^get_batch_job_data/$', views.get_batch_job_data), # 获取批处理作业数据
url(r'^get_batch_job_data_by_id/$', views.get_batch_job_data_by_id), # 根据ID获取需要更新的任务数据
url(r'^get_batch_job_sub_job_by_id/$', views.get_batch_job_sub_job_by_id), # 根据ID获取需要更新的子作业数据
url(r'^get_batch_job_instance/$', views.get_batch_job_instance), # 获取批处理作业实例数据
url(r'^get_batch_job_instance_data_by_id/$', views.get_batch_job_instance_data_by_id), # 根据ID获取批处理作业实例数据
url(r'^get_batch_job_sub_job_instance_data_by_id/$',
views.get_batch_job_sub_job_instance_data_by_id), # 根据ID获取批处理作业子作业实例数据
url(r'^run_batch_job_task/$', views.run_batch_job_task), # 运行批处理任务
# url(r'^get_database/$', views.get_database), # 获取数据库信息
]
================================================
FILE: batch_job/views.py
================================================
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.contrib.auth.decorators import permission_required
from django.contrib.auth.decorators import login_required
from django.core.exceptions import PermissionDenied
from django.shortcuts import render
from django.http import HttpResponse
from functools import wraps
from controller.core.public import (Currency, DatetimeHelp)
from controller.core import query_sql
from controller.public import dataconn
from djcelery import loaders
from djcelery.models import PeriodicTask, CrontabSchedule
from djcelery.schedulers import ModelEntry
# from multiprocessing import Manager,Pool
import multiprocessing as mp
from anyjson import loads, dumps
from celery import shared_task
from celery import registry
from celery import schedules
from conf import config
from datax_web.conf import config as datax_web_config
from datax_web.views import run as datax_web_run
from datax_web.views import Datax
from datax_web.views import JobData as DataxJobData
import commands
import logging
import sys
import json
reload(sys)
sys.setdefaultencoding("utf-8")
# Create your views here.
logger = logging.getLogger('batch_job')
_SUCCESS = dict(status=0, msg=u'检测成功')
_str = (str, unicode)
# 操作类型
_OPERATION_TYPE = (1, 2) # 1:新增批处理作业 2:修改批处理作业
# 触发模式
_TRIGGER_MODE = (1, 2) # 1:自动 2:手动
_TRIGGER_MODE_STR = u'TRIGGER_MODE = (1, 2) # 1:自动 2:手动'
# 子作业类型
_SUBJOB_TYPE = (1, 2, 3) # 1 数据同步 2 SQL脚本 3 备份。 主要用于后期扩展
# 执行状态
_STATUS = (0, 1) # 状态 0 正在执行 1 执行完成
def verification(CheckClass):
"""
装饰器用于检测用户提交的信息是否合法.
check_class 检测类
Decorator for views that checks that the user submitted information,
redirecting to the log-in page if necessary. The test should be a callable
that takes the user object and returns True if the user passes.
"""
def decorator(view_func):
@wraps(view_func)
def _wrapped_view(request, *args, **kwargs):
response = HttpResponse()
ccl = CheckClass(request)
result = ccl.total_check()
if result['status']:
response.write(json.dumps(result))
return response
return view_func(request, *args, **kwargs)
return _wrapped_view
return decorator
class BatchJobData(object):
"""
新增、更新、手工运行任务时处理数据
"""
def __init__(self, data):
# id 为 batch_job_id
self._batch_job_id = data.get('_id', None)
self.name = data.get('name', '')
self.description = data.get('description', '')
self.task_template = data.get('task_template', '')
self.is_enable = data.get('is_enable', '')
self.crontab = data.get('crontab', '')
self.batch_job_details = data.get('batch_job_details', [])
self.trigger_mode = data.get('trigger_mode', '')
self.operation_type = data.get('operation_type', '')
self.dtconn = dataconn.DatabaseConnection(logger)
self.dtsf = dataconn.DataTransform()
self.dh = DatetimeHelp()
self.__timestamp1 = self.dh.timestamp1
@property
def timestamp1(self):
return self.__timestamp1
def _get_schedule_dict(self):
schedule = CrontabSchedule.objects.get(pk=self.crontab)
return {
'crontab': schedule,
'kwargs': dumps({}),
'task': self.task_template,
'enabled': self.is_enable,
'name': self.name
}
@property
def schedule_dict(self):
return self._get_schedule_dict()
# def get_batch_job_id(self):
# """
# 新建批处理任务数据时 batch_id 为 插入数据后的返回id
# 更新或手动运行任务时 batch_id 为 页面体检的id
# :return: batch_id
# """
# return self._batch_job_id or self.dtconn.ygol.insert_id
@property
def batch_job_id(self):
"""
新建批处理任务数据时 batch_id 为 插入数据后的返回id
更新或手动运行任务时 batch_id 为 页面体检的id
:return: batch_id
"""
return self._batch_job_id
@batch_job_id.setter
def batch_job_id(self, batch_job_id):
self._batch_job_id = batch_job_id
def get_insert_datax_job_sql(self):
# 在 batch_job表里创建新的任务 - 新增SQL
return config.insert_batch_job_sql % (self.name, self.description)
def get_update_batch_job_by_id_sql(self):
# 获取batch_job表里更新任务 - 更新SQL
return config.update_batch_job_by_id_sql % (self.name, self.description, self.batch_job_id)
def get_insert_batch_job_details_sql(self):
# 拼接插入批处理作业详情表SQL insert into values ('user_id', 1), ('card_name', 1)
values_list = ["('%s', %s, %s)" % (self.batch_job_id, subjob['subjob_id'], subjob['type']) for subjob in self.batch_job_details]
return config.insert_batch_job_details_sql + ','.join(values_list)
def get_delete_batch_job_details_by_id_sql(self):
# 获取根据ID删除batch_job_details 表的sql
return config.delete_batch_job_details_by_id_sql % self.batch_job_id
def create(self):
# batch_job表里创建新的任务
result = _SUCCESS.copy()
sql1 = self.get_insert_datax_job_sql()
self.dtconn.ygol.transaction_start()
self.dtconn.ygol.transaction_execute(sql1)
if self.dtconn.ygol.status:
msg = u'batch_job表里创建新的任务,SQL:%s 插入数据失败。 - Msg: %s' % \
(sql1, self.dtconn.ygol.msg)
logger.error(msg)
result = dict(status=500, msg=msg)
else:
self.batch_job_id = self.dtconn.ygol.insert_id
sql2 = self.get_insert_batch_job_details_sql()
self.dtconn.ygol.transaction_execute(sql2)
if self.dtconn.ygol.status:
msg = u'创建批处理作业详情,SQL:%s 插入数据失败。 - Msg: %s' % \
(sql2, self.dtconn.ygol.msg)
logger.error(msg)
result = dict(status=500, msg=msg)
self.dtconn.ygol.transaction_commit_and_close()
return result
def create_PeriodicTask(self):
"""
创建定时任务
在 PeriodicTask表里创建新的定时任务,并在表的args字段里保存批处理作业表的batch_job_id。这样,
就可以把批处理作业和定时调度关联起来。
:return:
"""
obj = PeriodicTask.objects.create(**self.schedule_dict)
obj.args = dumps([self.batch_job_id])
obj.save()
return obj
def update(self):
# 更新任务
result = _SUCCESS.copy()
sql1 = self.get_update_batch_job_by_id_sql()
self.dtconn.ygol.transaction_start()
self.dtconn.ygol.transaction_execute(sql1)
if self.dtconn.ygol.status:
msg = u'batch_job表,SQL:%s 更新数据失败。 - Msg: %s' % \
(sql1, self.dtconn.ygol.msg)
logger.error(msg)
result = dict(status=500, msg=msg)
else:
sql2 = self.get_delete_batch_job_details_by_id_sql()
sql3 = self.get_insert_batch_job_details_sql()
self.dtconn.ygol.transaction_execute(sql2)
self.dtconn.ygol.transaction_execute(sql3)
if self.dtconn.ygol.status:
msg = u'batch_job_details表里更新列 - SQL2:%s - SQL3: %s -' \
u' 更新数据失败。 - Msg: %s' % \
(sql2, sql3, self.dtconn.ygol.msg)
logger.error(msg)
result = dict(status=500, msg=msg)
self.dtconn.ygol.transaction_commit_and_close()
return result
def update_PeriodicTask(self):
# 更新PeriodicTask
obj = PeriodicTask.objects.get(args="[%s]" % self.batch_job_id)
for k, v in self.schedule_dict.items():
setattr(obj, k, v)
obj.save()
return obj
def get_batch_job_by_id(self, batch_job_id):
# 根据批处理作业id,获取批处理作业信息
sql = config.query_batch_job_sql3 % batch_job_id
source_data = self.dtconn.ygol.getsingle(sql)
return self.dtsf.get_row_by_dict_to_user(source_data)
def get_batch_job_details_by_id(self, batch_job_id):
# 根据批处理作业id,获取批处理作业子作业batch_job_details表信息
sql = config.query_batch_job_sub_job_by_id_sql % batch_job_id
source_data = self.dtconn.ygol.getall(sql)
return map(self.dtsf.get_row_by_dict_to_user, source_data)
class BatchJobInstanceData(object):
"""
批处理作业实例数据处理
data 为批处理作业更新页面提及的任务数据
数据格式:
{u'_id': u'30',
u'batch_job_details': [{u'batch_job_id': u'30',
u'create_time': u'2018-07-24 21:18:31',
u'description': u'\u6570\u636e\u540c\u6b65\u6d4b\u8bd5',
u'id': u'37',
u'modify_time': u'2018-07-24 21:18:31',
u'name': u'test',
u'subjob_id': u'28',
u'type': u'1'}],
u'crontab': u'2',
u'description': u'DIY\u6d4b\u8bd5\u7ec4\u88c5\u673a1',
u'is_enable': False,
u'name': u'test1',
u'operation_type': 2,
u'task_template': u'celery.chunks',
u'trigger_mode': 2}
_id:batch_job_id 批处理作业表 id
"""
def __init__(self, data):
self._batch_job_id = data.get('_id', None)
self.name = data.get('name', '')
self.description = data.get('description', '')
self._trigger_mode = data.get('trigger_mode', None)
self.dtconn = dataconn.DatabaseConnection(logger)
self.dtsf = dataconn.DataTransform()
self.dh = DatetimeHelp()
self.__timestamp1 = self.dh.timestamp1
self._batch_job_instance_id = None
self.batch_job_instance_id = self._batch_job_id
@property
def batch_job_instance_id(self):
"""
批处理作业实例ID
由批处理作业ID + 时间戳组成
:return: 30 + 1532522114566 = 301532522114566
"""
return self._batch_job_instance_id
@batch_job_instance_id.setter
def batch_job_instance_id(self, batch_job_id):
self._batch_job_instance_id = '%s%s' % (batch_job_id, self.__timestamp1)
@property
def trigger_mode(self):
return self._trigger_mode
@trigger_mode.setter
def trigger_mode(self, val):
if val not in _TRIGGER_MODE:
msg = u'触发模式值错误 - trigger_mode:%s - %s' % (val, _TRIGGER_MODE_STR)
logger.error(msg)
else:
self._trigger_mode = val
def get_insert_batch_job_instance_sql(self):
return config.insert_batch_job_instance_sql % (
self.batch_job_instance_id,
self.name,
self.description,
self.trigger_mode
)
def get_update_batch_job_instance_by_id_sql(self, result):
return config.update_batch_job_instance_by_id_sql % (
_STATUS[1],
result,
self.dh.nowtimestrf1,
self.batch_job_instance_id
)
@staticmethod
def get_select_batch_job_instance_by_id_sql(batch_job_instance_id):
return config.select_batch_job_instance_by_id_sql % batch_job_instance_id
def get_batch_job_instance_by_id(self, batch_job_instance_id):
sql = self.get_select_batch_job_instance_by_id_sql(batch_job_instance_id)
souce_data = self.dtconn.ygol.getsingle(sql)
return self.dtsf.get_row_by_dict_to_user(souce_data)
def start_log(self):
# 开始记录任务日志到batch_job_instance
sql = self.get_insert_batch_job_instance_sql()
self.dtconn.ygol.insert(sql)
if self.dtconn.ygol.status:
logger.error(u'记录任务日志到batch_job_instance 失败 - SQL: %s - msg: %s' %
(sql, self.dtconn.ygol.msg))
def record_result_log(self, result):
# 记录任务执行结果 datax_job_instance
sql = self.get_update_batch_job_instance_by_id_sql(result)
self.dtconn.ygol.update(sql)
if self.dtconn.ygol.status:
logger.error(u'记录任务执行结果 batch_job_instance 失败 - SQL: %s - msg: %s' %
(sql, self.dtconn.ygol.msg))
class BatchJobSubjobInstanceData(object):
"""
批处理作业中的子任务实例数据处理
data 格式
u'batch_job_details': [{u'batch_job_id': u'30',
u'create_time': u'2018-07-24 21:18:31',
u'description': u'\u6570\u636e\u540c\u6b65\u6d4b\u8bd5',
u'id': u'37',
u'modify_time': u'2018-07-24 21:18:31',
u'name': u'test',
u'subjob_id': u'28',
u'type': u'1'}],
"""
def __init__(self, data):
# id 为 子作业实例ID
self._subjob_instance_id = None
self._batch_job_instance_id = None
self.subjob_id = data.get('subjob_id')
self.type = data.get('type')
self.dtconn = dataconn.DatabaseConnection(logger)
self.dtsf = dataconn.DataTransform()
self.dh = DatetimeHelp()
self.__timestamp1 = self.dh.timestamp1
@property
def subjob_instance_id(self):
return self._subjob_instance_id
@subjob_instance_id.setter
def subjob_instance_id(self, subjob_instance_id):
self._subjob_instance_id = subjob_instance_id
@property
def batch_job_instance_id(self):
return self._batch_job_instance_id
@batch_job_instance_id.setter
def batch_job_instance_id(self, batch_job_instance_id):
self._batch_job_instance_id = batch_job_instance_id
def get_insert_batch_job_instance_details_sql_sql(self, _type):
return config.insert_batch_job_instance_details_sql % (
self.batch_job_instance_id,
self.subjob_instance_id,
_type
)
def start_subjob_log(self, _type):
# 开始记录子任务日志到batch_job_instance_details
sql = self.get_insert_batch_job_instance_details_sql_sql(_type)
self.dtconn.ygol.insert(sql)
if self.dtconn.ygol.status:
logger.error(u'记录子任务日志到batch_job_instance_details 失败 - SQL: %s - msg: %s' %
(sql, self.dtconn.ygol.msg))
@classmethod
def run_sub_job(cls, _type, subjob_id, results, batch_job_instance_id, trigger_mode):
"""
执行批处理作业中的子作业
:param _type: 子作业类型
:param subjob_id: 子作业id
:param results: 保存所有的子作业执行结果
:param batch_job_instance_id: 批处理作业实例ID
:return: None
"""
result = None
if _type == _SUBJOB_TYPE[0]:
dataxjd = DataxJobData({})
dataxjob_data = dataxjd.get_job_data_by_id(subjob_id)
dataxjob_writer_column = dataxjd.get_datax_job_writer_column_by_id(subjob_id)
writer_column_id = [dt['name'] for dt in dataxjob_writer_column]
dataxjob_data['_id'] = subjob_id
dataxjob_data['trigger_mode'] = trigger_mode
dataxjob_data['writer_column_id'] = writer_column_id
bjsid = cls(dataxjob_data)
dx = Datax(dataxjob_data)
dx.jd.create_file(dx.job_json_file, dx.get_job_json())
dx.jd.start_log() # 记录datax_job 实例同步日志
bjsid.batch_job_instance_id = batch_job_instance_id
bjsid.subjob_instance_id = dx.jd.datax_job_instance_id
bjsid.start_subjob_log(_type) # 记录子作业日志
(status, output) = commands.getstatusoutput(dx.cmd)
result = 1 if status else 0
dx.jd.record_result_log(result)
results.append(result)
@staticmethod
def get_select_sub_job_datax_instance_by_id_sql(batch_job_instance_id):
return config.select_sub_job_datax_instance_by_id_sql % batch_job_instance_id
@staticmethod
def get_sub_job_datax_instance_data_by_id(batch_job_instance_id):
dtconn = dataconn.DatabaseConnection(logger)
dtsf = dataconn.DataTransform()
sql = BatchJobSubjobInstanceData.get_select_sub_job_datax_instance_by_id_sql(batch_job_instance_id)
source_data = dtconn.ygol.getall(sql)
return map(dtsf.get_row_by_dict_to_user, source_data)
class CheckBatchJob(object):
"""
检测新增、更新、手工运行批处理作业提交的信息
:return result
格式: {'status': 1, 'msg': '操作类型错误'}
total_check 启动所有检测,返回检测状态和错误消息
"""
_SUCCESS = _SUCCESS.copy()
_OPERATION_TYPE_ERROR1 = dict(status=1, msg=u'操作类型不能为空')
_OPERATION_TYPE_ERROR2 = dict(status=2, msg=u'操作类型错误')
_DESCRIPTION_ERROR1 = dict(status=3, msg=u'任务描述不能为空')
_NAME_ERROR1 = dict(status=4, msg=u'任务名称不能为空')
_NAME_ERROR2 = dict(status=5, msg=u'任务名称已存在')
_TASK_TEMPLATE_ERROR1 = dict(status=6, msg=u'任务模板不能为空')
_TASK_TEMPLATE_ERROR2 = dict(status=6, msg=u'任务模板不存在')
_IS_ENABLE_ERROR1 = dict(status=7, msg=u'是否启用值错误')
_BATCH_JOB_DETAILS_ERROR1 = dict(status=8, msg=u'批处理作业详情不能为空')
_BATCH_JOB_DETAILS_ERROR2 = dict(status=9, msg=u'批处理作业详情,子作业 %s %s 类型错误')
_BATCH_JOB_DETAILS_ERROR3 = dict(status=10, msg=u'批处理作业详情,子作业 %s %s ID %s 不存在')
_TRIGGER_MODE_ERROR1 = dict(status=11, msg=u'触发模式 不存在')
_TRIGGER_MODE_ERROR2 = dict(status=12, msg=u'触发模式值错误')
_CRONTAB_ERROR = dict(status=13, msg=u'执行时间错误')
_BATCH_JOB_ID_ERROR1 = dict(status=14, msg=u'batch_job_id 不能为空')
_BATCH_JOB_ID_ERROR2 = dict(status=15, msg=u'batch_job_id 不存在')
def __init__(self, request):
cur = Currency(request)
data = cur.rq_post_json('data')
self.dtconn = dataconn.DatabaseConnection(logger)
self.jd = BatchJobData(data)
self.error_msg = []
self.result = self._SUCCESS
def check_operation_type(self):
# 检测操作类型
operation_type = self.jd.operation_type
if not operation_type:
self.result = self._OPERATION_TYPE_ERROR1
else:
if operation_type not in _OPERATION_TYPE:
self.result = self._OPERATION_TYPE_ERROR2
def check_name_by_operation_type(self):
# 根据操作类型 检测任务名称
name = self.jd.name
if self.jd.operation_type == _OPERATION_TYPE[0]:
sql = config.query_batch_job_by_name_sql % name
self.check_name(name, sql)
if self.jd.operation_type == _OPERATION_TYPE[1]:
sql = config.query_batch_job_sql1 % (name, self.jd.batch_job_id)
self.check_name(name, sql)
def check_name(self, name, sql):
# 新增、更新、运行批处理作业时,检测名称
if name:
data = self.dtconn.ygol.getsingle(sql)
if self.dtconn.ygol.status:
_msg = u'检测任务名称时数据库错误。 - Msg: %s' % self.dtconn.ygol.msg
logger.error(_msg)
self.result = dict(status=500, msg=_msg)
else:
if data:
self.result = self._NAME_ERROR2
else:
self.result = self._NAME_ERROR1
def check_description(self):
# 检测任务描述
description = self.jd.description
if not description:
self.result = self._DESCRIPTION_ERROR1
def check_task_template(self):
# 检测任务模板
loaders.autodiscover()
tasks = list(sorted(registry.tasks.regular().keys()))
if self.jd.task_template:
if self.jd.task_template not in tasks:
self.result = self._TASK_TEMPLATE_ERROR1
else:
self.result = self._TASK_TEMPLATE_ERROR2
def check_is_enable(self):
# 检测“是否启用”
is_enable = self.jd.is_enable
if not isinstance(is_enable, bool):
self.result = self._IS_ENABLE_ERROR1
def check_crontab(self):
crontab = self.jd.crontab
crons = CrontabSchedule.objects.values('id')
try:
if long(crontab) not in [c['id'] for c in crons]:
self.result = self._CRONTAB_ERROR
except Exception, e:
self.result = self._CRONTAB_ERROR
def check_batch_job_details(self):
"""
检查批处理作业详情
先验证子作业类型,再验证子作业是否存在
:return:
"""
data = self.jd.batch_job_details
if data:
for dt in data:
try:
_type = int(dt['type'])
except Exception as e:
msg = self._BATCH_JOB_DETAILS_ERROR2.get('msg') % (dt['name'], dt['description'])
status = self._BATCH_JOB_DETAILS_ERROR2.get('status')
self.result = dict(status=status, msg=msg)
break
else:
if _type not in _SUBJOB_TYPE:
msg = self._BATCH_JOB_DETAILS_ERROR2.get('msg') % (dt['name'], dt['description'])
status = self._BATCH_JOB_DETAILS_ERROR2.get('status')
self.result = dict(status=status, msg=msg)
break
else:
msg = self._BATCH_JOB_DETAILS_ERROR3.get('msg') % (dt['name'], dt['description'], dt['subjob_id'])
status = self._BATCH_JOB_DETAILS_ERROR3.get('status')
# 数据同步
if _type == _SUBJOB_TYPE[0]:
sql = datax_web_config.query_datax_job_by_id_sql % dt['subjob_id']
data = self.dtconn.ygol.getsingle(sql)
if self.dtconn.ygol.status:
_msg = u'检测datax_job_id 错误 - SQL: %s。 - Msg: %s' % (sql, self.dtconn.ygol.msg)
logger.error(_msg)
self.result = dict(status=500, msg=_msg)
else:
if not data:
self.result = dict(status=status, msg=msg)
else:
self.result = self._BATCH_JOB_DETAILS_ERROR1
def check_batch_job_id(self):
# 检测批处理作业ID
if self.jd.operation_type == _OPERATION_TYPE[1]:
_id = self.jd.batch_job_id
if isinstance(_id, _str) and _id and _id.isdigit():
sql = config.query_batch_job_sql3 % _id
data = self.dtconn.ygol.getsingle(sql)
if self.dtconn.ygol.status:
_msg = u'检测batch_job_id 错误 - SQL: %s。 - Msg: %s' % (sql, self.dtconn.ygol.msg)
logger.error(_msg)
self.result = dict(status=500, msg=_msg)
else:
if not data:
self.result = self._BATCH_JOB_ID_ERROR2
else:
self.result = self._BATCH_JOB_ID_ERROR1
def check_trigger_mode(self):
# 检测触发模式
trigger_mode = self.jd.trigger_mode
if self.jd.operation_type == _OPERATION_TYPE[1]:
if not trigger_mode:
self.result = self._TRIGGER_MODE_ERROR1
else:
if trigger_mode not in _TRIGGER_MODE:
self.result = self._TRIGGER_MODE_ERROR2
def total_check(self):
check_func = ['check_operation_type', 'check_name_by_operation_type',
'check_description', 'check_trigger_mode', 'check_task_template',
'check_is_enable', 'check_crontab', 'check_batch_job_details',
'check_batch_job_id'
]
for func_name in check_func:
getattr(self, func_name)()
if self.result['status']:
break
return self.result
class BatchJobInstanceSql(object):
# datax job instance 查询sql
_table_bji = {
'name': {'data_type': 'str', 'val': ''},
'description': {'data_type': 'str', 'val': ''},
'status': {'data_type': 'str', 'val': ''},
'result': {'data_type': 'str', 'val': ''},
'trigger_mode': {'data_type': 'str', 'val': ''},
}
_order_by = [{'table': 'bji', 'field': 'start_time', 'rule': 'DESC'}]
def __init__(self, request):
self.cur = Currency(request)
self.rq_get = self.cur.rq_get
self._offset = int(self.rq_get('offset'))
self._limit = int(self.rq_get('limit'))
self._SQL = config.select_batch_job_instance_sql
self._TOTAL_SQL = config.count_batch_job_instance_sql
self._set_table(self._table_bji)
def _set_table(self, table):
for field, attr in table.items():
val = self.rq_get(field)
attr['val'] = val
return table
@property
def tables(self):
_tables = {'bji': self._table_bji}
return _tables
@property
def cvtpara(self):
_cvtpara = {
'offset': self._offset,
'limit': self._limit,
'sql': self._SQL,
'total_sql': self._TOTAL_SQL,
'order_by': self._order_by,
'order_rule': self._order_by
}
return _cvtpara
class PaginatorBatchJobInstance(dataconn.DatabaseConnection, query_sql.Q_Data):
# 分页访问数据
def __init__(self, qs):
super(PaginatorBatchJobInstance, self).__init__(logger)
query_sql.Q_Data.__init__(self, qs)
@property
def rows(self):
return self._get_rows(self.ygol)
@property
def total(self):
return self._get_total(self.ygol)
@login_required
@permission_required('batch_job.viewBatchJob', raise_exception=PermissionDenied)
def index(request):
# 批处理作业首页
return render(request, 'batch_job/index.html', locals())
@login_required
@permission_required('batch_job.viewBatchJob', raise_exception=PermissionDenied)
@permission_required('batch_job.editBatchJob', raise_exception=PermissionDenied)
def add_batch_job(request):
# 新建批处理作业
return render(request, 'batch_job/add_batch_job.html', locals())
@login_required
@permission_required('batch_job.viewBatchJob', raise_exception=PermissionDenied)
def update_batch_job(request, id):
# 更新批处理作业
return render(request, 'batch_job/update_batch_job.html', locals())
@login_required
@permission_required('batch_job.viewBatchJob', raise_exception=PermissionDenied)
def batch_job_instance(request):
# 批处理作业执行历史
return render(request, 'batch_job/batch_job_instance.html', locals())
@login_required
@permission_required('batch_job.viewBatchJob', raise_exception=PermissionDenied)
def batch_job_instance_details(request, id):
# 批处理作业详情执行历史
return render(request, 'batch_job/batch_job_instance_details.html', locals())
@login_required
@permission_required('batch_job.viewBatchJob', raise_exception=PermissionDenied)
def get_task_template(request):
# 任务模板
irrelevant_tasks = ['FirstBlood.celery.debug_task',
'celery.backend_cleanup',
'celery.chain',
'celery.chord',
'celery.chord_unlock',
'celery.chunks',
'celery.group',
'celery.map',
'celery.starmap',
'run',
u'run_batch_job']
loaders.autodiscover()
response = HttpResponse()
tasks = list(sorted(registry.tasks.regular().keys()))
for t in irrelevant_tasks:
tasks.remove(t)
response.write(json.dumps(tasks))
return response
@login_required
@permission_required('batch_job.viewBatchJob', raise_exception=PermissionDenied)
def get_crontab(request):
# 获取 crontab 定时时间
response = HttpResponse()
data = CrontabSchedule.objects.values()
response.write(json.dumps(list(data)))
return response
@login_required
@permission_required('batch_job.viewBatchJob', raise_exception=PermissionDenied)
@permission_required('batch_job.editBatchJob', raise_exception=PermissionDenied)
def add_crontab(request):
# 新增 crontab 定时时间
response = HttpResponse()
cur = Currency(request)
rq_post = getattr(cur, 'rq_post')
jdata = rq_post('data')
data = json.loads(jdata)
ndata = dict([(k, v.replace(' ', '')) for k, v in data.items()]) # Remove all spaces
crobj = schedules.crontab(**ndata)
to_model_schedule = ModelEntry.to_model_schedule
model_schedule, model_field = to_model_schedule(crobj)
response.write(json.dumps(ndata))
return response
@login_required
@verification(CheckBatchJob)
@permission_required('batch_job.viewBatchJob', raise_exception=PermissionDenied)
@permission_required('batch_job.editBatchJob', raise_exception=PermissionDenied)
def add_batch_job_data(request):
# 新增或者修改任务数据
response = HttpResponse()
cur = Currency(request)
data = cur.rq_post_json('data')
jd = BatchJobData(data)
if jd.operation_type == _OPERATION_TYPE[0]:
result = jd.create() # 新批处理作业
jd.create_PeriodicTask() # 创建定时任务
else:
result = jd.update()
jd.update_PeriodicTask()
response.write(json.dumps(result))
return response
@login_required
@permission_required('batch_job.viewBatchJob', raise_exception=PermissionDenied)
def get_batch_job_data(request):
# 获取批处理作业数据
sql = config.query_batch_job_sql2
dtconn = dataconn.DatabaseConnection(logger)
dtsf = dataconn.DataTransform()
source_data = dtconn.ygol.getall(sql)
data = [dtsf.get_row_by_dict_to_user(dt) for dt in source_data]
response = HttpResponse()
response.write(json.dumps(data))
return response
@login_required
@permission_required('batch_job.viewBatchJob', raise_exception=PermissionDenied)
def get_batch_job_data_by_id(request):
"""
根据ID获取批处理作业数据
:param request: id
:return:
"""
cur = Currency(request)
_id = cur.rq_post('_id')
sql = config.query_batch_job_sql3 % _id
dtconn = dataconn.DatabaseConnection(logger)
dtsf = dataconn.DataTransform()
source_data = dtconn.ygol.getsingle(sql)
response = HttpResponse()
response.write(json.dumps(dtsf.get_row_by_dict_to_user(source_data)))
return response
@login_required
@permission_required('batch_job.viewBatchJob', raise_exception=PermissionDenied)
def get_batch_job_sub_job_by_id(request):
"""
根据ID获取批处理作业中的子作业数据
扩展:
目前子作业只包含数据同步,后期加入了SQL脚本、SQL备份等等之后,
需要先判断子作业的类型,再根据类型去相关表里查询子作业的详细信息。
例如:如果有同步类型的子作业,就需要根据同步作业表查询同步的任务详情。
如果有备份类型的,就去备份表里查询备份任务详情。
:param request: id
:return:
"""
cur = Currency(request)
_id = cur.rq_post('_id')
sql = config.query_batch_job_sub_job_by_id_sql % _id
dtconn = dataconn.DatabaseConnection(logger)
dtsf = dataconn.DataTransform()
source_data = dtconn.ygol.getall(sql)
data = [dtsf.get_row_by_dict_to_user(dt) for dt in source_data]
response = HttpResponse()
response.write(json.dumps(data))
return response
@login_required
@verification(CheckBatchJob)
@permission_required('batch_job.viewBatchJob', raise_exception=PermissionDenied)
@permission_required('batch_job.editBatchJob', raise_exception=PermissionDenied)
def run_batch_job_task(request):
# 执行批处理作业
response = HttpResponse()
cur = Currency(request)
rq_post = getattr(cur, 'rq_post')
jdata = rq_post('data')
data = json.loads(jdata)
# run_batch_job(**data)
run_batch_job.delay(**data)
response.write(json.dumps({'status': 0, 'msg': u'操作成功'}))
return response
def _process_run_sub_job(_type, subjob_id, results, batch_job_instance_id, trigger_mode):
# 多进程执行批处理任务的子任务
BatchJobSubjobInstanceData.run_sub_job(_type, subjob_id, results, batch_job_instance_id, trigger_mode)
@shared_task(name='run_batch_job')
def run_batch_job(**data):
"""
异步执行批处理作业任务
:param data:
{u'_id': u'30',
u'batch_job_details': [{u'batch_job_id': u'30',
u'create_time': u'2018-07-24 21:18:31',
u'description': u'\u6570\u636e\u540c\u6b65\u6d4b\u8bd5',
u'id': u'37',
u'modify_time': u'2018-07-24 21:18:31',
u'name': u'test',
u'subjob_id': u'28',
u'type': u'1'}],
u'crontab': u'2',
u'description': u'DIY\u6d4b\u8bd5\u7ec4\u88c5\u673a1',
u'is_enable': False,
u'name': u'test1',
u'operation_type': 2,
u'task_template': u'celery.chunks',
u'trigger_mode': 2}
_id:batch_job_id 批处理作业表 id
:return:
"""
bjid = BatchJobInstanceData(data)
bjid.start_log()
curr_proc = mp.current_process()
# celery 里执行任务时,默认守护进程无法开启多进程,需要先将当前进程设置为非守护进程, 启动执行再改为守护进程
curr_proc.daemon = False
manager = mp.Manager()
results = manager.list() # 记录所有子作业的执行结果
p = mp.Pool(config.maxtasksperchild)
curr_proc.daemon = True
batch_job_details = data.get('batch_job_details')
for sj in batch_job_details:
_type = int(sj.get('type'))
subjob_id = sj.get('subjob_id')
p.apply_async(_process_run_sub_job, args=(_type, subjob_id, results,
bjid.batch_job_instance_id,
bjid.trigger_mode))
p.close()
p.join()
batch_job_result = 0 if 1 not in results else 1
bjid.record_result_log(batch_job_result)
@shared_task(name='batch_job_periodictask')
def batch_job_periodictask(batch_job_id):
"""
定时执行批处理作业
通过batch_job_id查询批处理作业的信息来执行任务。
:param batch_job_id: 批处理作业ID,也就是batch_job表id
:return: None
"""
bjd = BatchJobData({})
batch_job_data = bjd.get_batch_job_by_id(batch_job_id)
bjid = BatchJobInstanceData(batch_job_data)
bjid.batch_job_instance_id = batch_job_id
bjid.trigger_mode = _TRIGGER_MODE[0] # 触发模式:自动
bjid.start_log()
curr_proc = mp.current_process()
# celery 里执行任务时,默认守护进程无法开启多进程,需要先将当前进程设置为非守护进程, 启动执行后再改为守护进程
curr_proc.daemon = False
manager = mp.Manager()
results = manager.list() # 记录所有子作业的执行结果
p = mp.Pool(config.maxtasksperchild)
curr_proc.daemon = True
batch_job_details = bjd.get_batch_job_details_by_id(batch_job_id)
for sj in batch_job_details:
_type = int(sj.get('type'))
subjob_id = sj.get('subjob_id')
p.apply_async(_process_run_sub_job, args=(_type, subjob_id, results,
bjid.batch_job_instance_id,
bjid.trigger_mode))
p.close()
p.join()
batch_job_result = 0 if 1 not in results else 1
bjid.record_result_log(batch_job_result)
# @login_required
# @permission_required('change.view_delivery', raise_exception=PermissionDenied)
# @permission_required('change.edit_delivery', raise_exception=PermissionDenied)
def get_batch_job_instance(request):
# 分页查询批处理作业实例
dsql = BatchJobInstanceSql(request)
cvtpara = dsql.cvtpara
tables = dsql.tables
qs = query_sql.Q_Sql(cvtpara, **tables)
pd = PaginatorBatchJobInstance(qs)
response = HttpResponse()
response.write(json.dumps({'rows': pd.rows, 'total': pd.total}))
return response
@login_required
@permission_required('batch_job.viewBatchJob', raise_exception=PermissionDenied)
def get_batch_job_instance_data_by_id(request):
# 根据ID获取批处理作业数据
cur = Currency(request)
batch_job_instance_id = cur.rq_post('instance_id')
bjid = BatchJobInstanceData({})
data = bjid.get_batch_job_instance_by_id(batch_job_instance_id)
response = HttpResponse()
response.write(json.dumps(data))
return response
@login_required
@permission_required('batch_job.viewBatchJob', raise_exception=PermissionDenied)
def get_batch_job_sub_job_instance_data_by_id(request):
# 根据ID获取批处理作业数据
cur = Currency(request)
batch_job_instance_id = cur.rq_get('instance_id')
# if _type == 1: 数据同步类型为1
data = BatchJobSubjobInstanceData.get_sub_job_datax_instance_data_by_id(batch_job_instance_id)
response = HttpResponse()
response.write(json.dumps(data))
return response
================================================
FILE: controller/__init__.py
================================================
================================================
FILE: controller/core/__init__.py
================================================
================================================
FILE: controller/core/access.py
================================================
# -*- coding: UTF-8 -*-
# Description:
# Author: 黄小雪
# Date: 2017年09月07日
# Company: 东方银谷
from public import *
from django.http import HttpResponse
from business_query.configuration.sqlList import *
from dtmt.query import DatabaseConnection
from functools import wraps
import json
import logging
def verification(check_class):
"""
装饰器用于检测用户提交的信息是否合法.
check_class 检测类
Decorator for views that checks that the user submitted information,
redirecting to the log-in page if necessary. The test should be a callable
that takes the user object and returns True if the user passes.
"""
def decorator(view_func):
@wraps(view_func)
def _wrapped_view(request, *args, **kwargs):
response = HttpResponse()
ccl = check_class(request)
check_status, error_msg = ccl.total_check()
if check_status:
response.write(json.dumps({'status': check_status, 'msg': error_msg}))
return response
return view_func(request, *args, **kwargs)
return _wrapped_view
return decorator
class Check_IBQ(object):
"""
检测投资批量查询提交的信息
error_msg 存放所有错误消息
check_status 错误状态 1 错误,0 正常,主要用于前端的JavaScript进行判断
total_check 启动所有检测,返回检测状态和错误消息
"""
def __init__(self, request):
cur = Currency(request)
rq_post = getattr(cur, 'rq_post')
jdata = rq_post('data')
data = json.loads(jdata)
self.data = data
self.conf = investment_batch_query_conf
self.error_msg = []
def check_data(self):
# 检测脚本名称
isdigit = [d for d in self.data if str(d).isdigit()]
if not isdigit:
self.error_msg.append(u'请输入手机号')
else:
if len(self.data) > self.conf['maxNum']:
self.error_msg.append(u'每次查询量不能超过%s' % self.conf['maxNum'])
def total_check(self):
self.check_data()
status = 1 if self.error_msg else 0
return status, self.error_msg
class Check_PCI(object):
"""
普惠离职员工客户信息查询
error_msg 存放所有错误消息
check_status 错误状态 1 错误,0 正常,主要用于前端的JavaScript进行判断
total_check 启动所有检测,返回检测状态和错误消息
"""
def __init__(self, request):
cur = Currency(request)
rq_post = getattr(cur, 'rq_post')
jdata = rq_post('data')
data = json.loads(jdata)
self.logger = logging.getLogger('business_query')
self.dc = DatabaseConnection(self.logger)
self.data = data
self.conf = puhuiCustomerInfoConf
self.error_msg = []
def check_data(self):
# 检测脚本名称
isdigit = [d for d in self.data if str(d)]
if not isdigit:
self.error_msg.append(u'请输入工号')
else:
if len(self.data) > self.conf['maxNum']:
self.error_msg.append(u'每次查询量不能超过%s' % self.conf['maxNum'])
def checkDateMaxNum(self):
# 每天最多查询5次
res = self.dc._r.hmget('puhuiCustomerInfo', 'count')[0]
count = int(res) if res else 0
if count >= 5:
self.error_msg.append(u'今天已查询5次,欢迎明天再来查询!')
def total_check(self):
self.check_data()
self.checkDateMaxNum()
status = 1 if self.error_msg else 0
return status, self.error_msg
class Check_people_upload(object):
"""
检测人力上传的文件
错误码 错误消息
1 上传文件为空
2 只能上传excel文件
"""
def __init__(self, request):
self.fileobj = request.FILES.get('file', None)
self.error_msg = []
self.error_code = []
def check_file_exists(self):
"""
检测文件是否存在
:return:
"""
if self.fileobj is None:
self.error_msg.append(u'上传文件为空')
self.error_code.append(1)
def check_filename(self):
"""
检测文件名后缀必须为xlsx、xls
:return:
"""
legal_file_suffixes = ['xlsx', 'xls']
if self.fileobj.name.split('.')[-1] not in legal_file_suffixes:
self.error_msg.append(u'只能上传excel文件')
self.error_code.append(2)
def total_check(self):
self.check_file_exists()
if 1 not in self.error_code:
self.check_filename()
status = 1 if self.error_msg else 0
return status, self.error_msg
================================================
FILE: controller/core/excel.py
================================================
#! /usr/bin/env python
# -*-coding:utf-8-*-
##################################################
# Function: 银谷在线注册统计及出借统计脚本
# Usage: python start.py
# Author: 黄小雪
# Date: 2016年7月19日
# Company:
# Version: 1.2
##################################################
import xlwt
import xlrd
from unicode_width import *
from openpyxl import Workbook
from openpyxl.utils import get_column_letter
from openpyxl.styles import Font, Alignment
def set_style(name, height, bold=False):
# 设置单元格样式
style = xlwt.XFStyle() # 初始化样式
font = xlwt.Font() # 为样式创建字体
font.name = name # 'Times New Roman'
font.bold = bold
font.color_index = 4
font.height = height
al = xlwt.Alignment()
al.horz = xlwt.Alignment.HORZ_CENTER # 设置水平居中
al.vert = xlwt.Alignment.VERT_CENTER # 设置垂直居中
style.font = font
style.alignment = al
return style
def get_table(file, table=False):
# 获取表格数据
data = xlrd.open_workbook(file)
sheets = False
if table:
table = data.sheets()[0]
else:
sheets = data.sheets()
return table or sheets
def sheet_write(f, sheet_name, row0, rows, width):
# 写入工作簿
sheet = f.add_sheet(sheet_name, cell_overwrite_ok=True) # 创建sheet
# 生成第一行
for i in range(0, len(row0)):
sheet.write(0, i, row0[i], set_style('Times New Roman', 220, True))
sheet.col(i).width = 256 * width[i]
for j in range(0, len(rows)):
row = rows[j]
for i in range(0, len(row)):
sheet.write(j + 1, i, row[i], set_style('Times New Roman', 220, False))
class Openpyxl(object):
# openpyxl 生成excel文件
def __init__(self, filename):
self.ft1 = Font(name='Calibri', bold=True)
self.ft2 = Font(name='Calibri')
self.al = Alignment(horizontal='center', vertical='center')
self.filename = filename
self.wb = Workbook()
def __set_width(self, rows, ws):
widths = get_width(*rows)
for i in xrange(len(widths)):
ws.column_dimensions[get_column_letter(i + 1)].width = widths[i]
def __get_new_ws(self, title):
new_ws = self.wb.get_active_sheet()
if len(new_ws.get_cell_collection()) == 0:
new_ws.title = title
else:
new_ws = self.wb.create_sheet(title=title)
return new_ws
def add_sheet(self, title, rows):
new_ws = self.__get_new_ws(title)
for i in range(len(rows)):
row = rows[i]
for j in range(len(row)):
new_ws.cell(row=i + 1, column=j + 1).value = row[j]
if i == 0:
new_ws.cell(row=i + 1, column=j + 1).font = self.ft1
else:
new_ws.cell(row=i + 1, column=j + 1).font = self.ft2
new_ws.cell(row=i + 1, column=j + 1).alignment = self.al
self.__set_width(rows, new_ws)
def save(self):
self.wb.save(filename=self.filename)
================================================
FILE: controller/core/local_mysql.py
================================================
#! /usr/bin/env python
# -*-coding:utf-8-*-
##################################################
# Function: 银谷在线注册统计及出借统计脚本
# Usage: python start.py
# Author: 黄小雪
# Date: 2016年7月19日
# Company:
# Version: 1.2
##################################################
from controller.public.mysql_helper import Business
Business(host, user, passwd, db)
================================================
FILE: controller/core/mailtable.py
================================================
#!/usr/bin/python env
# -*- coding: UTF-8 -*-
# Description:
# Author: 黄小雪
# Date: 2017年07月04日
# Company: 东方银谷
class MailTable(object):
"""
邮件html表格
"""
def __init__(self):
pass
@property
def style(self):
_style = """
<style type="text/css">
table.imagetable {
font-family: verdana,arial,sans-serif;
font-size:11px;
color:#333333;
border-width: 1px;
border-color: #999999;
border-collapse: collapse;
}
table.imagetable th {
background:#b5cfd2 url('cell-blue.jpg');
border-width: 1px;
padding: 8px;
border-style: solid;
border-color: #999999;
white-space:nowrap;
}
table.imagetable td {
background:#dcddc0 url('cell-grey.jpg');
border-width: 1px;
padding: 8px;
border-style: solid;
border-color: #999999;
white-space:nowrap;
}
</style>
"""
return _style
def table(self, caption, rows):
row0 = rows[0]
tr0 = self._tr0_list(row0)
total_tr_list = [self._tr_list(row) for row in rows[1:]]
tr_body = ''.join(total_tr_list)
_table = """
<table class="imagetable">
<caption align="top">%s</caption>
%s
%s
</table>
""" % (caption, tr0, tr_body)
return _table
def _tr_list(self, row):
_tr_list = ['<td>%s</td>' % r for r in row]
_tr = '<tr>%s</tr>' % ''.join(_tr_list)
return _tr
def _tr0_list(self, row):
_tr_list = ['<th>%s</th>' % r for r in row]
_tr = '<tr>%s</tr>' % ''.join(_tr_list)
return _tr
================================================
FILE: controller/core/public.py
================================================
# -*- coding: UTF-8 -*-
import datetime
import time
import json
class Currency(object):
# 通用帮助
def __init__(self, request):
self.request = request
def rq_get(self, key):
return self.request.GET.get(key, '').strip()
def rq_post(self, key):
return self.request.POST.get(key, '').strip()
def rq_get_json(self, key):
return json.loads(self.rq_get(key))
def rq_post_json(self, key):
return json.loads(self.rq_post(key))
class DatetimeHelp(object):
# 日期时间帮助
def __init__(self):
pass
@property
def now_time(self):
return datetime.datetime.now()
def strptime(self, value, format):
return datetime.datetime.strptime(value, format)
@property
def nowtimestrf1(self):
return self.now_time.strftime(u'%Y-%m-%d %H:%M:%S')
@property
def nowtimestrf2(self):
return self.now_time.strftime(u'%Y年%m月%d日 %H点%M分%S秒')
@property
def nowtimestrf3(self):
return self.now_time.strftime(u'%Y%m%d%H%M%S')
@property
def nowtimestrf4(self):
return self.now_time.strftime(u'%Y%m%d')
@property
def nowtimestrf5(self):
return self.now_time.strftime(u'%Y-%m-%d')
@property
def nowtimestrf6(self):
return self.now_time.strftime(u'%Y年%m月%d日')
@property
def yesterday(self):
yd = self.now_time - datetime.timedelta(days=1)
return yd
@property
def yesterdaystrf4(self):
return self.yesterday.strftime(u'%Y%m%d')
@property
def yesterdaystrf5(self):
return self.yesterday.strftime(u'%Y-%m-%d')
@property
def yesterdaystrf6(self):
return self.yesterday.strftime(u'%Y年%m月%d日')
@property
def timestamp1(self):
"""返回当前时间的13位毫秒时间戳
:return: 13 位的毫秒时间戳 1456402864242
"""
return self.datetime_to_timestamp(self.now_time)
@staticmethod
def datetime_to_timestamp(datetime_obj):
"""将本地(local) datetime 格式的时间 (含毫秒) 转为毫秒时间戳
:param datetime_obj: {datetime}2016-02-25 20:21:04.242000
:return: 13 位的毫秒时间戳 1456402864242
"""
local_timestamp = long(time.mktime(datetime_obj.timetuple()) * 1000.0 + datetime_obj.microsecond / 1000.0)
return local_timestamp
if __name__ == '__main__':
dth = DatetimeHelp()
print dth.timestamp1
================================================
FILE: controller/core/query_sql.py
================================================
#!/usr/bin/python env
# -*- coding: UTF-8 -*-
# Description:
# Author: 黄小雪
# Date: 2017年03月29日
# Company: 东方银谷
from controller.core.public import *
import decimal
class Q_Sql(object):
"""
# 查询sql
table_a = {
'delivery_id': {'data_type': 'str', 'val':''},
'customer': {'data_type': 'str', 'val':''},
'customer_cn': {'data_type': 'str', 'val':''},
'employee': {'data_type': 'str', 'val':''},
'employee_cn': {'data_type': 'str', 'val':''},
'former_employee': {'data_type': 'str', 'val':''},
'former_employee_cn': {'data_type': 'str', 'val':''},
'result': {'data_type': 'str', 'val':''},
'start_time': {'data_type': 'datetime', 'val':''},
'end_time': {'data_type': 'datetime', 'val':''},
}
table_b = {
'large_area': {'data_type': 'str', 'val': '北区'},
'store': {'data_type': 'str', 'val': '安阳一部'},
'emp_num': {'data_type': 'str', 'val': 'CF400721'}
}
table_c ={ ... ... }
...
...
tables = {'a':table_a, 'b':table_b, 'c':table_c ... ...}
"""
def __init__(self, cvtpara, **tables):
self._offset = cvtpara['offset']
self._limit = cvtpara['limit']
self.tables = tables
self._SQL = cvtpara['sql']
self._TOTAL_SQL = cvtpara['total_sql']
self._order_by = self._set_order_by(cvtpara['order_by'])
self._para = []
self._condition = []
self._condition_sql = ''
self._data()
self._set_condition()
def _data(self):
# 获取数据
for t, table in self.tables.items():
self._set_data(t, table)
def _set_data(self, t, table):
# 设置查询条件
for field, attr in table.items():
if attr['val']:
if attr['data_type'] == 'str':
self._set_str(t, field, **attr)
if attr['data_type'] == 'datetime':
self._set_datetime(t, field, **attr)
def _set_str(self, t, field, **attr):
val = attr['val']
self._condition.append('%s.%s = %%s' % (t, field))
self._para.append(val)
def _set_datetime(self, t, field, **attr):
val = attr['val']
tfield = field.split('_', 1)[1] # 获取时间字段名称
judge = field.split('_', 1)[0] # 判断是开始还是结束时间 'start' or 'end'
if judge == 'start':
self._condition.append('%s.%s >= %%s' % (t, tfield))
self._para.append(val)
elif judge == 'end':
self._condition.append('%s.%s <= %%s' % (t, tfield))
self._para.append('%s 23:59:59' % val)
def _set_order_by(self, order_by):
_order_by_str = ''
_fields = []
if order_by:
for dt in order_by:
_table = dt['table']
_field = dt['field']
_rule = dt['rule']
_field = '%s.%s %s' % (_table, _field, _rule)
_fields.append(_field)
_fields_str = ','.join(_fields)
_order_by_str = 'ORDER BY %s' % _fields_str
return _order_by_str
def _set_condition(self):
if self._condition:
and_sql = '\nand '.join(self._condition)
self._condition_sql = 'where \n%s' % and_sql
@property
def para(self):
import copy
_para = copy.deepcopy(self._para)
_para.append(self._offset)
_para.append(self._limit)
return _para
@property
def total_para(self):
return self._para
@property
def sql(self):
_sql = '\n'.join([self._SQL, self._condition_sql,
self._order_by, 'limit %s,%s'])
return _sql
@property
def total_sql(self):
_total_sql = '\n'.join([self._TOTAL_SQL, self._condition_sql])
return _total_sql
class Q_Data(object):
# 设置投资数据
def __init__(self, qs):
self._sql = qs.sql
self._para = qs.para
self._total_sql = qs.total_sql
self._total_para = qs.total_para
def _get_data(self, databases_c):
return databases_c.getall(self._sql, self._para)
def _data_clean(self, data):
res = {}
for key,val in data.items():
res[key] = self._data_conversion(val)
return res
def _data_conversion(self, val):
# 数据转换
new_val = None
if isinstance(val, datetime.datetime):
new_val = val.strftime('%Y-%m-%d %H:%M:%S')
if isinstance(val, decimal.Decimal):
new_val = float(val)
return new_val or val
def _get_rows(self, databases_c):
# rows [{key1: val1, key2: val2, ... ...}]
rows = []
data = self._get_data(databases_c)
for dt in data:
row = self._data_clean(dt)
rows.append(row)
return rows
def _get_total(self, databases_c):
total_data = databases_c.getall(self._total_sql, self._total_para)
total = total_data[0]['count']
return total
class Download_Sql(Q_Sql):
"""
导出文件的sql
查询数据时使用重写的sql和total_para去查询
主要是去掉offset limit,导出数据时不需要分页查询
"""
def __init__(self, cvtpara, **tables):
super(Download_Sql, self).__init__(cvtpara, **tables)
@property
def sql(self):
_sql = '\n'.join([self._SQL, self._condition_sql,
self._order_by])
return _sql
@property
def para(self):
return self._para
================================================
FILE: controller/core/unicode_width.py
================================================
#!/usr/bin/env python
#-*-coding:utf-8-*-
def get_max_length(arg):
length = str_len(arg[0]) + 2
return length
def sort_arg(arg):
arg = list(arg)
arg.sort(cmp=cmp_length)
return arg
def cmp_length(a, b):
la = str_len(a)
lb = str_len(b)
if la > lb:
return -1
elif la < lb:
return 1
else:
return 0
def str_len(string):
try:
string = u'%s' % string
row_l=len(string)
utf8_l=len(string.encode('utf-8'))
return (utf8_l-row_l)/2+row_l
except:
return row_l
def get_width(*var):
out = zip(*var)
res = map(sort_arg, out)
width = map(get_max_length, res)
return width
================================================
FILE: controller/public/__init__.py
================================================
================================================
FILE: controller/public/dataconn.py
================================================
#!/usr/bin/python env
# -*- coding: UTF-8 -*-
# Description:
# Author: 黄小雪
# Date: 2018年01月04日
# Company: 东方银谷
import re
import datetime
import decimal
from django.conf import settings
from mysql_helper import BusinessMysql
from sqlserver_helper import BusinessSqlserver
# 业绩平台数据库
_database = settings.DATABASES['default']
# 数据库用户名密码SQL
dtbsif_sql = 'select * from FirstBlood.databaseinfo '
class DatabaseConnection(object):
# 数据库连接
def __init__(self, logger):
self._logger = logger
self.ygol = BusinessMysql(_database['HOST'], _database['USER'],
_database['PASSWORD'], _database['NAME'])
def get_dtbs_conn(self, name):
# 获取数据库连接
datainfo = self.get_datainfo(name)
businessType = {'mysql': BusinessMysql, 'sqlserver': BusinessSqlserver}
business = businessType[datainfo['type']]
return business(datainfo['host'], datainfo['user'], datainfo['passwd'], datainfo['db'])
def get_dtbs_conn_by_id(self, _id):
"""
根据数据库信息表的id获取数据库连接
:param _id: 数据库表主键id
:return: 数据库连接对象
"""
datainfo = self.get_datainfo_by_id(_id)
businessType = {'mysql': BusinessMysql, 'sqlserver': BusinessSqlserver}
business = businessType[datainfo['type']]
return business(datainfo['host'], datainfo['user'], datainfo['passwd'], datainfo['db'])
def get_datainfo(self, name):
# 获取数据库信息
conditions_sql = "where `name`='%s'" % name
data = self.ygol.getsingle(dtbsif_sql + conditions_sql)
if self.ygol.status:
self._logger.error(u'根据数据库信息表的名称,获取数据库信息失败. - Msg:' % self.ygol.msg)
return data
def get_datainfo_by_id(self, _id):
# 根据数据库信息表的主键id,获取数据库信息
conditions_sql = "where `id`=%s" % _id
data = self.ygol.getsingle(dtbsif_sql + conditions_sql)
if self.ygol.status:
self._logger.error(u'根据数据库信息表的主键id,获取数据库信息失败. - Msg:' % self.ygol.msg)
return data
class DataTransform(object):
"""
数据转换
"""
def __init__(self):
self._ILLEGAL_CHARACTERS_RE = re.compile(r'[\000-\010]|[\013-\014]|[\016-\037]')
def get_row_by_list(self, dt, database_type):
special_characters_conversion = self.special_characters(database_type)
row = []
for val in dt:
if isinstance(val, long):
val = '%s' % str(val)
if isinstance(val, str) or isinstance(val, unicode):
if next(self._ILLEGAL_CHARACTERS_RE.finditer(val), None):
val = re.sub(self._ILLEGAL_CHARACTERS_RE, "", val)
if isinstance(val, bool):
val = 'true' if val else 'false'
if isinstance(val, str):
val = "'%s'" % special_characters_conversion(val)
if isinstance(val, unicode):
val = "'%s'" % special_characters_conversion(val)
if isinstance(val, datetime.datetime):
val = "'%s'" % val
if isinstance(val, datetime.date):
val = "'%s'" % val
if val is None:
val = 'null'
row.append(val)
return row
def get_row_by_dict(self, dt, database_type):
special_characters_conversion = self.special_characters(database_type)
row = {}
for key, val in dt.items():
if isinstance(val, long):
val = '%s' % str(val)
if isinstance(val, str) or isinstance(val, unicode):
if next(self._ILLEGAL_CHARACTERS_RE.finditer(val), None):
val = re.sub(self._ILLEGAL_CHARACTERS_RE, "", val)
if isinstance(val, bool):
val = 'true' if val else 'false'
if isinstance(val, str):
val = "%s" % special_characters_conversion(val)
if isinstance(val, unicode):
val = "%s" % special_characters_conversion(val)
if isinstance(val, datetime.datetime):
val = "%s" % val
if isinstance(val, datetime.date):
val = "%s" % val
if val is None:
val = 'null'
if isinstance(val, decimal.Decimal):
val = float(val)
row[key] = val
return row
def get_row_by_dict_to_user(self, dt):
# 返给用户的数据,人性化展示
row = {}
for key, val in dt.items():
if isinstance(val, long):
val = '%s' % str(val)
if isinstance(val, str) or isinstance(val, unicode):
if next(self._ILLEGAL_CHARACTERS_RE.finditer(val), None):
val = re.sub(self._ILLEGAL_CHARACTERS_RE, "", val)
if isinstance(val, bool):
val = 'true' if val else 'false'
if isinstance(val, datetime.datetime):
val = "%s" % val
if isinstance(val, datetime.date):
val = "%s" % val
if val is None:
val = 'null'
if isinstance(val, decimal.Decimal):
val = float(val)
row[key] = val
return row
def get_row_by_list_to_excel(self, dt):
# 列表数据,用于生成excel文件
row = []
for val in dt:
if isinstance(val, long):
val = str(val)
if isinstance(val, str) or isinstance(val, unicode):
if next(self._ILLEGAL_CHARACTERS_RE.finditer(val), None):
val = re.sub(self._ILLEGAL_CHARACTERS_RE, "", val)
row.append(val)
return row
@staticmethod
def special_characters_mysql(string):
double_slash = re.compile(r'\\')
single_quotes = re.compile(r'\'')
double_quotation_marks = re.compile(r'\"')
string = re.sub(double_slash, "\\\\", string)
string = re.sub(single_quotes, "\\'", string)
string = re.sub(double_quotation_marks, "\\\"", string)
return string
@staticmethod
def special_characters_sqlserver(string):
string = string.replace("'", "''")
string = string.replace('"', '""')
return string
def special_characters(self, database_type):
# 特殊字符转义
func = {'mysql': self.special_characters_mysql, 'sqlserver': self.special_characters_sqlserver}
return func[database_type]
================================================
FILE: controller/public/log.py
================================================
#!/usr/bin/python env
# -*- coding: UTF-8 -*-
# Description: 日志记录
# Author: 黄小雪
# Date: 2017年02月15日
# Company: 东方银谷
import logging
# 开发一个日志系统, 既要把日志输出到控制台, 还要写入日志文件
class Logger(object):
# 用字典保存日志级别
_format_dict = {
1: logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s'),
2: logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s'),
3: logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s'),
4: logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s'),
5: logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
}
def __init__(self, logname, loglevel, logger):
'''
指定保存日志的文件路径,日志级别,以及调用文件
将日志存入到指定的文件中
'''
# 创建一个logger
self.logger = logging.getLogger(logger)
self.logger.setLevel(logging.DEBUG)
# 创建一个handler,用于写入日志文件
fh = logging.FileHandler(logname)
fh.setLevel(logging.DEBUG)
# 再创建一个handler,用于输出到控制台
ch = logging.StreamHandler()
ch.setLevel(logging.DEBUG)
# 定义handler的输出格式
# formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
formatter = self._format_dict[int(loglevel)]
fh.setFormatter(formatter)
ch.setFormatter(formatter)
# 给logger添加handler
self.logger.addHandler(fh)
self.logger.addHandler(ch)
def getlog(self):
return self.logger
================================================
FILE: controller/public/mailclass.py
================================================
#! /usr/bin/env python
# -*- coding: UTF-8 -*-
##################################################
# Function: 银谷在线注册统计及出借统计脚本
# Usage: python start.py
# Author: 黄小雪
# Date: 2016年7月19日
# Company:
# Version: 1.2
##################################################
import os
import sys
import xlrd
import smtplib
import datetime
from email.header import Header
from email.mime.text import MIMEText
from email.mime.image import MIMEImage
from email.mime.multipart import MIMEMultipart
from email.utils import parseaddr, formataddr
reload(sys)
sys.setdefaultencoding("utf-8")
class MailHelper(object):
# 发html邮件
def __init__(self, mail_host, mail_user, mail_pass,
sender, sender_zh_name, receivers, cc):
# 第三方 SMTP 服务
self.mail_host = mail_host # 设置服务器
self.mail_user = mail_user # 用户名
self.mail_pass = mail_pass # 口令
self.sender = sender
self.sender_zh_name = sender_zh_name
self.receivers = receivers # 接收邮件,可设置为你的QQ邮箱或者其他邮箱
self.cc = cc # 抄送
self.message = MIMEMultipart() # 设置附件
self.status = 0 # 执行状态
self.msg = '' # 错误消息
def add_attch(self, res_file):
# 添加附件
# 附件为绝对路径 /opt/script/yingu_rt/res/充值提现明细.xls
# 附件
att1 = MIMEText(open(res_file, 'rb').read(), 'base64', 'utf-8')
att1["Content-Type"] = 'application/octet-stream'
# 这里的filename可以任意写,写什么名字,邮件中显示什么名字
att1["Content-Disposition"] = 'attachment; filename=%s' % Header(res_file.split('/')[-1], 'UTF-8')
self.message.attach(att1)
def insert_img(self, file):
# 插入图片
# 指定图片为当前目录
fp = open(file, 'rb')
msgImage = MIMEImage(fp.read())
fp.close()
# 定义图片 ID,在 HTML 文本中引用
msgImage.add_header('Content-ID', '<image1>')
self.message.attach(msgImage)
def add_content(self, content, subject):
self.message['Subject'] = Header(subject, 'utf-8')
self.message['From'] = self._format_addr(u'%s <%s>'% (self.sender_zh_name, self.sender))
self.message['To'] = ''.join(self._cvt_receivers(self.receivers))
self.message['Cc'] = ''.join(self._cvt_receivers(self.cc))
head_content = """"""
mail_msg = ''.join([head_content, content])
self.message.attach(MIMEText(mail_msg, 'html', 'utf-8'))
def _cvt_receivers(self, receivers):
# 收件人乱码处理
return [self._cvt_user(u) for u in receivers]
def _cvt_user(self, user):
return ''.join(['<', user, '>'])
def send_htm(self):
# 发送邮件
# 创建一个带附件的实例
try:
smtpObj = smtplib.SMTP()
smtpObj.connect(self.mail_host, 25) # 25 为 SMTP 端口号
smtpObj.login(self.mail_user, self.mail_pass)
smtpObj.sendmail(self.sender, self.receivers + self.cc, self.message.as_string())
except smtplib.SMTPException, e:
self.status = 1
self.msg = u"%s" % e
@staticmethod
def _format_addr(s):
name, addr = parseaddr(s)
return formataddr((Header(name, 'utf-8').encode(),
addr.encode('utf-8') if isinstance(addr, unicode) else addr))
================================================
FILE: controller/public/mysql_helper.py
================================================
#!/usr/bin/python env
# -*- coding: UTF-8 -*-
# Description:
# Author: 黄小雪
# Date: 2017年07月12日
# Company: 东方银谷
import MySQLdb
class MysqlHelper(object):
"""
数据访问层
status:查询状态,0 查询正常,1 查询失败,默认为0
"""
def __init__(self, host, user, passwd, db):
self.__host = host
self.__user = user
self.__passwd = passwd
self.__db = db
self.row0 = None
self.rowcount = None
self.msg = ''
self.status = 0
def __conn(self):
try:
conn = MySQLdb.connect(host=self.__host, user=self.__user,
passwd=self.__passwd, db=self.__db,
init_command="set names utf8;set net_write_timeout=3600;",
charset='utf8',
# cursorclass=MySQLdb.cursors.SSCursor
)
except Exception, e:
self.msg = '%s' % e
self.status = 1
conn = None
return conn
def getall(self, sql, paramters=None):
conn = self.__conn()
if not conn:
return None
try:
cur = conn.cursor(cursorclass=MySQLdb.cursors.DictCursor) # 返回字典
cur.execute(sql, paramters)
data = cur.fetchall()
self.rowcount = cur.rowcount
self.row0 = [d[0] for d in cur.description]
except Exception, e:
self.msg = '%s' % e
self.status = 1
data = None
finally:
cur.close()
conn.commit()
conn.close()
return data
def getallmany(self, sql, paramters=None):
conn = self.__conn()
if not conn:
return None
try:
cur = conn.cursor(cursorclass=MySQLdb.cursors.DictCursor) # 返回字典
cur.executemany(sql, paramters)
data = cur.fetchall()
except Exception, e:
self.msg = '%s' % e
self.status = 1
data = None
finally:
cur.close()
conn.commit()
conn.close()
return data
def getsingle(self, sql, paramters=None):
conn = self.__conn()
if not conn:
return None
try:
cur = conn.cursor(cursorclass=MySQLdb.cursors.DictCursor) # 返回字典
cur.execute(sql, paramters)
data = cur.fetchone()
except Exception, e:
self.msg = '%s' % e
self.status = 1
data = None
finally:
cur.close()
conn.commit()
conn.close()
return data
def insertmany(self, sql, paramters=None):
conn = self.__conn()
if not conn:
return None
try:
cur = conn.cursor(cursorclass=MySQLdb.cursors.DictCursor) # 返回字典
cur.executemany(sql, paramters)
except Exception, e:
self.msg = '%s' % e
self.status = 1
finally:
cur.close()
conn.commit()
conn.close()
return None
def insert(self, sql, paramters=None):
conn = self.__conn()
if not conn:
return None
try:
cur = conn.cursor(cursorclass=MySQLdb.cursors.DictCursor) # 返回字典
cur.execute(sql, paramters)
setattr(self, 'insert_id', conn.insert_id())
except Exception, e:
self.msg = '%s' % e
self.status = 1
finally:
cur.close()
conn.commit()
conn.close()
return None
def getall_list(self, sql, paramters=None):
# 返回列表形式结果
conn = self.__conn()
if not conn:
return None
try:
cur = conn.cursor() # 返回列表
cur.execute(sql, paramters)
data = cur.fetchall()
self.rowcount = cur.rowcount
self.row0 = [d[0] for d in cur.description]
except Exception, e:
self.msg = '%s' % e
self.status = 1
data = None
finally:
cur.close()
conn.commit()
conn.close()
return data
def getall_list_sqls(self, sqls, paramters=None):
"""
执行多个sql语句,返回列表形式结果
sqls = [sql1, sql2, ... ...]
"""
conn = self.__conn()
if not conn:
return None
try:
cur = conn.cursor() # 返回列表
for sql in sqls:
cur.execute(sql, paramters)
data = cur.fetchall()
self.rowcount = cur.rowcount
self.row0 = [d[0] for d in cur.description]
except Exception, e:
self.msg = '%s' % e
self.status = 1
data = None
finally:
cur.close()
conn.commit()
conn.close()
return data
def delete(self, sql, paramters=None):
conn = self.__conn()
if not conn:
return None
try:
cur = conn.cursor(cursorclass=MySQLdb.cursors.DictCursor) # 返回字典
cur.execute(sql, paramters)
except Exception, e:
self.msg = '%s' % e
self.status = 1
finally:
cur.close()
conn.commit()
conn.close()
return None
def update(self, sql, paramters=None):
conn = self.__conn()
if not conn:
return None
try:
cur = conn.cursor(cursorclass=MySQLdb.cursors.DictCursor) # 返回字典
cur.execute(sql, paramters)
except Exception, e:
self.msg = '%s' % e
self.status = 1
finally:
cur.close()
conn.commit()
conn.close()
return None
def dict_generator(self, sql, paramters=None):
"""
以生成器方式获取数据,用于数据量大的时候
:param sql:
:param paramters:
:return:
"""
conn = self.__conn()
try:
if conn:
cur = conn.cursor(cursorclass=MySQLdb.cursors.SSDictCursor) # 流式数据返回字典
cur.execute(sql, paramters)
self.rowcount = cur.rowcount
self.row0 = [d[0] for d in cur.description]
data = cur.fetchone()
while data:
yield data
data = cur.fetchone()
cur.close()
conn.commit()
conn.close()
except Exception, e:
self.msg = '%s' % e
self.status = 1
def tuple_generator(self, sql, paramters=None):
"""
以生成器方式获取数据,用于数据量大的时候
:param sql: SQL语句
:param paramters:
:param size: 每次提取的行数,默认1000。
:return: 以生成器的方式返回数据
数据格式 row1 = [v1,v2, ...]
data = (row1, row2, ...)
"""
conn = self.__conn()
try:
if conn:
# SSCursor 查询结果缓存在server端
cur = conn.cursor(cursorclass=MySQLdb.cursors.SSCursor) # 返回元组
cur.execute(sql, paramters)
self.rowcount = cur.rowcount
self.row0 = [d[0] for d in cur.description]
data = cur.fetchone()
while data:
yield data
data = cur.fetchone()
cur.close()
conn.commit()
conn.close()
except Exception, e:
self.msg = '%s' % e
self.status = 1
def transaction_start(self):
conn = self.__conn()
if not conn:
return None
try:
cur = conn.cursor(cursorclass=MySQLdb.cursors.DictCursor) # 返回字典
setattr(self, 'conn', conn)
setattr(self, 'cur', cur)
except Exception, e:
self.msg = 'transaction_start - %s' % e
self.status = 1
conn.commit()
conn.close()
return None
def transaction_execute(self, sql, paramters=None):
# 事务 insert update delete
if hasattr(self, 'cur') and hasattr(self, 'conn'):
try:
self.cur.execute(sql, paramters)
setattr(self, 'insert_id', self.conn.insert_id())
except Exception, e:
self.msg = 'transaction_insert - %s' % e
self.status = 1
return None
def transaction_commit_and_close(self):
# 执行成功提交事务,失败回滚,最后关闭连接
if hasattr(self, 'cur') and hasattr(self, 'conn'):
self.cur.close()
if self.status:
self.conn.rollback()
else:
self.conn.commit()
self.conn.close()
return None
class BusinessMysql(MysqlHelper):
# 业务处理层
def __init__(self, host, user, passwd, db):
super(BusinessMysql, self).__init__(host, user, passwd, db)
def search(self, sql, para=None):
return self.getsingle(sql, para)
================================================
FILE: controller/public/pagination.py
================================================
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
from django.core.paginator import Paginator
class Paginator_help:
# 分页
def __init__(self, page_num, queryset, PAGE_SIZE, current_page_total, request):
self.page_num = self.check_page_num(page_num) # 当前页码
self.current_page_total = current_page_total # 当前页下标
self.queryset = queryset # 需要分页的对象集合
self.PAGE_SIZE = PAGE_SIZE # 每页显示多少条
self.pages = self.get_Paginator_obj() # 获取分页对象
self.page_range = self.get_page_range() # 获取当前页的页面下标
self.qstr = self.get_qstr(request)
self.current_page = self.get_current_page() # 获取当前页对象
def get_Paginator_obj(self):
# 获取分页对象
pages = Paginator(self.queryset, self.PAGE_SIZE)
return pages
def check_page_num(self, page_num):
# 检查页码
if page_num <= 0:
page_num = 1
return page_num
def get_current_page(self):
# 获取当前页对象
current_page = self.pages.page(self.page_num)
return current_page
def get_qstr(self, request):
qstr = '&'.join(['%s=%s' % (k, v) for k, v in request.GET.items() if k != 'p'])
return qstr
def calculate_begin_end(self):
# 计算当前页下标的取值范围
page_total = self.pages.num_pages
begin = 0
end = 0
if page_total <= self.current_page_total:
begin = 0
end = page_total
else:
if self.page_num <= self.current_page_total / 2:
begin = 0
end = self.current_page_total
else:
begin = self.page_num - self.current_page_total / 2
end = self.page_num + self.current_page_total / 2
if (self.current_page_total % 2) != 0: # 如果分页下标为奇数
end += 1
if end > page_total:
end = page_total
begin = page_total - self.current_page_total
return begin, end
def get_page_range(self):
# 获取当前页的页面下标
begin, end = self.calculate_begin_end()
# 草了,1.9的Paginator分页类的page_range方法得到的尽然是xrange,用[begin:end]切片报错
page_range = [i for i in self.pages.page_range]
page_range = page_range[begin:end]
return page_range
class Paginator_ajax(object):
# bootstrap-table 插件 后端分页
def __init__(self, offset, queryset, PAGE_SIZE):
self.page_num = offset/PAGE_SIZE + 1 # 当前页码
self.queryset = queryset # 需要分页的对象集合
self.PAGE_SIZE = PAGE_SIZE # 每页显示多少条
self.pages = self.get_Paginator_obj # 获取分页对象
self.current_page = self.get_current_page # 获取当前页对象
self.total = self.pages.count # queryset 对象集合的总数
self.rows = self._get_rows
@property
def get_current_page(self):
# 获取当前页对象
return self.pages.page(self.page_num)
@property
def get_Paginator_obj(self):
# 获取分页对象
return Paginator(self.queryset, self.PAGE_SIZE)
@property
def _get_rows(self):
return list(self.current_page)
@property
def data(self):
return {'total': self.total, 'rows': self.rows}
class Paginator_sql(object):
# bootstrap-table 插件 后端分页
# sql 连接远程数据库查询 分页
def __init__(self, offset, queryset, PAGE_SIZE):
self.offset = offset # 偏移量
self.PAGE_SIZE = PAGE_SIZE # 每页显示多少条
self.page_num = offset/PAGE_SIZE + 1 # 当前页码
self.queryset = queryset # 需要分页的对象集合
self.pages = self.get_Paginator_obj # 获取分页对象
self.current_page = self.get_current_page # 获取当前页对象
self.total = self.pages.count # queryset 对象集合的总数
self.rows = self._get_rows
@property
def get_current_page(self):
# 获取当前页对象
return self.pages.page(self.page_num)
@property
def get_Paginator_obj(self):
# 获取分页对象
return Paginator(self.queryset, self.PAGE_SIZE)
@property
def _get_rows(self):
return list(self.current_page)
@property
def data(self):
return {'total': self.total, 'rows': self.rows}
================================================
FILE: controller/public/sqlserver_helper.py
================================================
#!/usr/bin/python env
# -*- coding: UTF-8 -*-
# Description:
# Author: 黄小雪
# Date: 2017年11月03日
# Company: 东方银谷
import pymssql
class SqlserverHelper(object):
"""
数据访问层
status:查询状态,0 查询正常,1 查询失败,默认为 0
"""
def __init__(self, host, user, passwd, db):
self.__host = host
self.__user = user
self.__passwd = passwd
self.__db = db
self.row0 = None
self.rowcount = None
self.msg = ''
self.status = 0
def __conn(self):
try:
conn = pymssql.connect(server=self.__host, user=self.__user,
password=self.__passwd, database=self.__db,
# init_command="set names utf8",
charset='utf8')
except Exception, e:
self.msg = '%s' % e
self.status = 1
conn = None
return conn
def getall(self, sql, paramters=None):
conn = self.__conn()
if not conn:
return None
try:
cur = conn.cursor(as_dict=True) # 返回字典
cur.execute(sql, paramters)
data = cur.fetchall()
self.rowcount = cur.rowcount
self.row0 = [d[0] for d in cur.description]
except Exception, e:
self.msg = '%s' % e
self.status = 1
data = None
finally:
cur.close()
conn.commit()
conn.close()
return data
def getallmany(self, sql, paramters=None):
conn = self.__conn()
if not conn:
return None
try:
cur = conn.cursor(as_dict=True) # 返回字典
cur.executemany(sql, paramters)
data = cur.fetchall()
except Exception, e:
self.msg = '%s' % e
self.status = 1
data = None
finally:
cur.close()
conn.commit()
conn.close()
return data
def getsingle(self, sql, paramters=None):
conn = self.__conn()
if not conn:
return None
try:
cur = conn.cursor(as_dict=True) # 返回字典
cur.execute(sql, paramters)
data = cur.fetchone()
except Exception, e:
self.msg = '%s' % e
self.status = 1
data = None
finally:
cur.close()
conn.commit()
conn.close()
return data
def insertmany(self, sql, paramters=None):
conn = self.__conn()
if not conn:
return None
try:
cur = conn.cursor(as_dict=True) # 返回字典
cur.executemany(sql, paramters)
except Exception, e:
self.msg = '%s' % e
self.status = 1
finally:
cur.close()
conn.commit()
conn.close()
return None
def insert(self, sql, paramters=None):
conn = self.__conn()
if not conn:
return None
try:
cur = conn.cursor(as_dict=True) # 返回字典
cur.execute(sql, paramters)
except Exception, e:
self.msg = '%s' % e
self.status = 1
finally:
cur.close()
conn.commit()
conn.close()
return None
def getall_list(self, sql, paramters=None):
# 返回列表形式结果
conn = self.__conn()
if not conn:
return None
try:
cur = conn.cursor() # 返回列表
cur.execute(sql, paramters)
data = cur.fetchall()
self.rowcount = cur.rowcount
self.row0 = [d[0] for d in cur.description]
except Exception, e:
self.msg = '%s' % e
self.status = 1
data = None
finally:
cur.close()
conn.commit()
conn.close()
return data
def getall_list_sqls(self, sqls, paramters=None):
"""
执行多个sql语句,返回列表形式结果
sqls = [sql1, sql2, ... ...]
"""
conn = self.__conn()
if not conn:
return None
try:
cur = conn.cursor() # 返回列表
for sql in sqls:
cur.execute(sql, paramters)
data = cur.fetchall()
self.rowcount = cur.rowcount
self.row0 = [d[0] for d in cur.description]
except Exception, e:
self.msg = '%s' % e
self.status = 1
data = None
finally:
cur.close()
conn.commit()
conn.close()
return data
def delete(self, sql, paramters=None):
conn = self.__conn()
if not conn:
return None
try:
cur = conn.cursor(as_dict=True) # 返回字典
cur.execute(sql, paramters)
except Exception, e:
self.msg = '%s' % e
self.status = 1
finally:
cur.close()
conn.commit()
conn.close()
return None
def dict_generator(self, sql, paramters=None):
"""
以生成器方式获取数据,用于数据量大的时候
:param sql:
:param paramters:
:return:
"""
conn = self.__conn()
try:
if conn:
cur = conn.cursor(as_dict=True) # 返回字典
cur.execute(sql, paramters)
self.rowcount = cur.rowcount
self.row0 = [d[0] for d in cur.description]
data = cur.fetchone()
while data:
yield data
data = cur.fetchone()
cur.close()
conn.commit()
conn.close()
except Exception, e:
self.msg = '%s' % e
self.status = 1
def tuple_generator(self, sql, paramters=None):
"""
以生成器方式获取数据,用于数据量大的时候
:param sql:
:param paramters:
:return:
"""
conn = self.__conn()
try:
if conn:
cur = conn.cursor() # 返回字典
cur.execute(sql, paramters)
self.rowcount = cur.rowcount
self.row0 = [d[0] for d in cur.description]
data = cur.fetchone()
while data:
yield data
data = cur.fetchone()
cur.close()
conn.commit()
conn.close()
except Exception, e:
self.msg = '%s' % e
self.status = 1
class BusinessSqlserver(SqlserverHelper):
# 业务处理层
def __init__(self, host, user, passwd, db):
super(BusinessSqlserver, self).__init__(host, user, passwd, db)
def search(self, sql, para=None):
return self.getsingle(sql, para)
================================================
FILE: create_table.sql
================================================
/*
* 创建数据库
*/
create database FirstBlood default character set utf8 collate utf8_bin;
/* 进入数据库 */
use FirstBlood;
/*
* 数据库信息
*/
CREATE TABLE `databaseinfo` (
`id` int(11) NOT NULL AUTO_INCREMENT,
`name` varchar(255) DEFAULT NULL COMMENT '名称',
`description` varchar(255) DEFAULT NULL COMMENT '描述',
`host` varchar(255) DEFAULT NULL COMMENT '主机',
`user` varchar(255) DEFAULT NULL COMMENT '用户',
`passwd` varchar(255) DEFAULT NULL COMMENT '密码',
`db` varchar(255) DEFAULT NULL COMMENT '数据库',
`type` varchar(255) DEFAULT NULL COMMENT '类型',
`create_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '创建时间',
`modify_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP COMMENT '修改时间',
PRIMARY KEY (`id`),
UNIQUE KEY `databaseinfo_host_c254f05e_uniq` (`host`),
UNIQUE KEY `databaseinfo_name_a3bc8190_uniq` (`name`)
) ENGINE=InnoDB AUTO_INCREMENT=28 DEFAULT CHARSET=utf8 COMMENT='数据库信息';
/*
* 数据同步任务
*/
drop table if exists `datax_job`;
CREATE TABLE `datax_job` (
`id` int(11) NOT NULL AUTO_INCREMENT,
`name` varchar(255) DEFAULT NULL COMMENT '名称',
`description` varchar(255) DEFAULT NULL COMMENT '描述',
`querySql` longtext COLLATE utf8_bin NOT NULL COMMENT '查询SQL语句',
`reader_databaseinfo_id` int(11) NOT NULL COMMENT '读取数据库',
`writer_table` varchar(255) DEFAULT NULL COMMENT '写入表名',
`writer_databaseinfo_id` int(11) NOT NULL COMMENT '写入数据库',
`writer_preSql` longtext COLLATE utf8_bin NOT NULL COMMENT '写入数据前执行的SQL语句',
`writer_postSql` longtext COLLATE utf8_bin NOT NULL COMMENT '写入数据后执行的SQL语句',
`create_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '创建时间',
`modify_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP COMMENT '修改时间',
PRIMARY KEY (`id`),
UNIQUE KEY `datax_job_name_uniq` (`name`)
) ENGINE=InnoDB AUTO_INCREMENT=28 DEFAULT CHARSET=utf8 COMMENT='datax数据同步任务';
/*
* 写入表的列信息
*/
drop table if exists `datax_job_writer_column`;
CREATE TABLE `datax_job_writer_column` (
`id` int(11) NOT NULL AUTO_INCREMENT,
`name` varchar(255) DEFAULT NULL COMMENT '列名',
`datax_job_id` int(11) NOT NULL COMMENT '数据同步任务ID',
`create_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '创建时间',
`modify_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP COMMENT '修改时间',
PRIMARY KEY (`id`)
) ENGINE=InnoDB AUTO_INCREMENT=28 DEFAULT CHARSET=utf8 COMMENT='写入表的列信息';
/*
* 数据同步任务实例
*/
drop table if exists `datax_job_instance`;
CREATE TABLE `datax_job_instance` (
`id` int(11) NOT NULL AUTO_INCREMENT,
`instance_id` bigint(20) NOT NULL COMMENT '任务实例ID',
`name` varchar(255) DEFAULT NULL COMMENT '名称',
`description` varchar(255) DEFAULT NULL COMMENT '描述',
`querySql` longtext COLLATE utf8_bin NOT NULL COMMENT '查询SQL语句',
`reader_databaseinfo_host` varchar(255) NOT NULL COMMENT '读取数据库IP',
`reader_databaseinfo_description` varchar(255) NOT NULL COMMENT '读取数据库描述',
`writer_table` varchar(255) DEFAULT NULL COMMENT '写入表名',
`writer_databaseinfo_host` varchar(255) NOT NULL COMMENT '写入数据库IP',
`writer_databaseinfo_description` varchar(255) NOT NULL COMMENT '写入数据库描述',
`writer_preSql` longtext COLLATE utf8_bin NOT NULL COMMENT '写入数据前执行的SQL语句',
`writer_postSql` longtext COLLATE utf8_bin NOT NULL COMMENT '写入数据后执行的SQL语句',
`trigger_mode` int(2) DEFAULT '1' COMMENT '触发模式 1 自动 2 手动(默认自动)',
`status` int(2) DEFAULT '0' COMMENT '状态 0 正在执行 1 执行完成',
`result` int(2) DEFAULT '2' COMMENT '执行结果 0 成功 1 失败 2 未知',
`start_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '开始时间',
`end_time` datetime DEFAULT NULL COMMENT '结束时间',
PRIMARY KEY (`id`),
UNIQUE KEY `datax_job_instance_id_uniq` (`instance_id`)
) ENGINE=InnoDB AUTO_INCREMENT=28 DEFAULT CHARSET=utf8 COMMENT='datax数据同步任务实例';
/*
* 批处理作业
*/
drop table if exists `batch_job`;
CREATE TABLE `batch_job` (
`id` int(11) NOT NULL AUTO_INCREMENT,
`name` varchar(255) DEFAULT NULL COMMENT '名称',
`description` varchar(255) DEFAULT NULL COMMENT '描述',
`create_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '创建时间',
`modify_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP COMMENT '修改时间',
PRIMARY KEY (`id`),
UNIQUE KEY `batch_job_name_uniq` (`name`)
) ENGINE=InnoDB AUTO_INCREMENT=28 DEFAULT CHARSET=utf8 COMMENT='批处理作业';
/*
* 批处理作业详情
*/
drop table if exists `batch_job_details`;
CREATE TABLE `batch_job_details` (
`id` int(11) NOT NULL AUTO_INCREMENT,
`batch_job_id` int(11) NOT NULL COMMENT '批处理作业ID',
`subjob_id` int(11) NOT NULL COMMENT '子作业ID',
`type` int(2) NOT NUll COMMENT '类型 1 数据同步 2 SQL脚本 3 备份。 主要用于后期扩展',
`create_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '创建时间',
`modify_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP COMMENT '修改时间',
PRIMARY KEY (`id`)
) ENGINE=InnoDB AUTO_INCREMENT=28 DEFAULT CHARSET=utf8 COMMENT='批处理作业详情';
/*
* 批处理作业执行实例
*/
drop table if exists `batch_job_instance`;
CREATE TABLE `batch_job_instance` (
`id` int(11) NOT NULL AUTO_INCREMENT,
`instance_id` bigint(20) NOT NULL COMMENT '实例ID',
`name` varchar(255) DEFAULT NULL COMMENT '名称',
`description` varchar(255) DEFAULT NULL COMMENT '描述',
`trigger_mode` int(2) DEFAULT '1' COMMENT '触发模式 1 自动 2 手动(默认自动)',
`status` int(2) DEFAULT '0' COMMENT '状态 0 正在执行 1 执行完成',
`result` int(2) DEFAULT '2' COMMENT '执行结果 0 成功 1 失败 2 未知',
`start_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '开始时间',
`end_time` datetime DEFAULT NULL COMMENT '结束时间',
PRIMARY KEY (`id`),
UNIQUE KEY `batch_job_instance_id_uniq` (`instance_id`)
) ENGINE=InnoDB AUTO_INCREMENT=28 DEFAULT CHARSET=utf8 COMMENT='批处理作业执行实例';
/*
* 批处理作业执行实例详情
*/
drop table if exists `batch_job_instance_details`;
CREATE TABLE `batch_job_instance_details` (
`id` int(11) NOT NULL AUTO_INCREMENT,
`instance_id` bigint(20) NOT NULL COMMENT '实例ID',
`subjob_instance_id` bigint(20) NOT NULL COMMENT '子作业实例ID',
`type` int(2) NOT NUll COMMENT '类型 1 数据同步 2 SQL脚本 3 备份。 主要用于后期扩展',
PRIMARY KEY (`id`)
) ENGINE=InnoDB AUTO_INCREMENT=28 DEFAULT CHARSET=utf8 COMMENT='批处理作业执行实例详情';
================================================
FILE: datax_web/__init__.py
================================================
================================================
FILE: datax_web/admin.py
================================================
from django.contrib import admin
# Register your models here.
================================================
FILE: datax_web/apps.py
================================================
from __future__ import unicode_literals
from django.apps import AppConfig
class DataxWebConfig(AppConfig):
name = 'datax_web'
================================================
FILE: datax_web/conf/__init__.py
================================================
================================================
FILE: datax_web/conf/config.py
================================================
#!/usr/bin/python env
# -*- coding: UTF-8 -*-
import sys
import os
reload(sys)
sys.setdefaultencoding("utf-8")
_parentdir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
_FirstBlood_dir = os.path.dirname(_parentdir)
sys.path.append(_parentdir)
sys.path.append(_FirstBlood_dir)
_log_file_dir = _FirstBlood_dir + '/log/'
img_dir = _FirstBlood_dir + '/static/img/'
# datax job 路径
datax_dir = _FirstBlood_dir + '/datax'
datax_job_dir = '/tmp'
datax_log_dir = datax_dir + '/web_log'
# 数据申请定时任务执行日志
scheduled_tasks_log_file = _log_file_dir + u'scheduled_tasks.log'
# 日志主键
primary_key = 'task_instance_id'
# 定时任务实例状态 1:开始执行 2:正在执行 3:执行完成
status = [1, 2, 3]
# 响应类型
RESPONSE_TYPE = dict(small=1, large=2, html=3)
# 操作类型
OPERATION_TYPE = ['add', 'mod']
# 触发模式
TRIGGER_MODE = [1, 2] # 1 自动 2 手动
# 数据库类型
DATABASE_TYPE = 'mysql'
# 以名称查询任务
query_datax_job_by_name_sql = "SELECT * FROM FirstBlood.datax_job dj WHERE dj.`name` = '%s';"
# 以名称和ID查询任务
query_datax_job_sql2 = "SELECT * FROM FirstBlood.datax_job dj WHERE dj.`name` = '%s' and dj.id!=%s;"
# 以ID查询任务
query_datax_job_by_id_sql = """
SELECT
dj.*,
( SELECT GROUP_CONCAT( djwc.`name` SEPARATOR '\n' ) FROM FirstBlood.datax_job_writer_column djwc WHERE djwc.datax_job_id = dj.id ) writer_column_id
FROM
FirstBlood.datax_job dj
WHERE
dj.id = %s;
"""
# 以ID查询数据同步任务需要写入的列
query_datax_job_writer_column_by_id_sql = """
SELECT
*
FROM
FirstBlood.datax_job_writer_column
WHERE
datax_job_id = %s
order by id;
"""
# 查询所有任务
query_datax_job_sql = """
SELECT
dj.id,
dj.`name`,
dj.description,
dj.querySql,
concat(rdbi.description,' ', rdbi.`host`) reader_databaseinfo_id,
dj.writer_table,
concat(wdbi.description,' ', wdbi.`host`) writer_databaseinfo_id,
dj.create_time,
dj.modify_time
FROM
FirstBlood.datax_job dj
LEFT JOIN FirstBlood.databaseinfo rdbi on dj.reader_databaseinfo_id=rdbi.id
LEFT JOIN FirstBlood.databaseinfo wdbi on dj.writer_databaseinfo_id=wdbi.id
"""
insert_datax_job_sql = """
INSERT INTO FirstBlood.datax_job (
`name`,
`description`,
`querySql`,
`reader_databaseinfo_id`,
`writer_table`,
`writer_databaseinfo_id`,
`writer_preSql`,
`writer_postSql`
) VALUES
('%s','%s','%s',%s,'%s',%s,'%s','%s')
"""
insert_datax_job_writer_column_sql = """
INSERT INTO FirstBlood.datax_job_writer_column (`name`, `datax_job_id`) VALUES
"""
update_datax_job_by_id_sql = """
update FirstBlood.datax_job set
`name` = '%s',
`description` = '%s',
`querySql` = '%s',
`reader_databaseinfo_id` = %s,
`writer_table` = '%s',
`writer_databaseinfo_id` = %s,
`writer_preSql` = '%s',
`writer_postSql` = '%s'
where
id = %s
"""
delete_datax_job_writer_column_by_id_sql = """
delete from FirstBlood.datax_job_writer_column where datax_job_id =%s;
"""
insert_datax_job_instance_sql = """
insert into FirstBlood.datax_job_instance (
`instance_id`,
`name`,
`description`,
`querySql`,
`reader_databaseinfo_host`,
`reader_databaseinfo_description`,
`writer_table`,
`writer_databaseinfo_host`,
`writer_databaseinfo_description`,
`trigger_mode`,
`writer_preSql`,
`writer_postSql`
)
values (%s, '%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s', %s, '%s', '%s');
"""
update_datax_job_instance_by_instance_id_sql = """
update FirstBlood.datax_job_instance set `status`=%s, `result`=%s, `end_time`='%s' where instance_id=%s;
"""
select_datax_job_instance_sql = """
select
dji.id,
dji.instance_id,
dji.`name`,
dji.description,
dji.querySql,
concat(dji.reader_databaseinfo_description,' ',dji.reader_databaseinfo_host) 'reader_databaseinfo_host',
dji.writer_table,
concat(dji.writer_databaseinfo_description,' ',dji.writer_databaseinfo_host) 'writer_databaseinfo_host',
dji.writer_preSql,
dji.writer_postSql,
dji.trigger_mode,
dji.`status`,
dji.result,
dji.start_time,
dji.end_time
FROM
FirstBlood.datax_job_instance dji
"""
count_datax_job_instance_sql = """
SELECT
count(1) count
FROM
FirstBlood.datax_job_instance dji
"""
select_datax_job_instance_by_id_sql = select_datax_job_instance_sql + "\n where dji.id = %s"
datax_job_template = """
{
"job": {
"content": [
{
"reader": {
"name": "mysqlreader",
"parameter": {
"connection": [
{
"jdbcUrl": ["%s"],
"querySql": ["%s"],
}
],
"password": "%s",
"username": "%s",
"where": ""
}
},
"writer": {
"name": "mysqlwriter",
"parameter": {
"column": %s,
"connection": [
{
"jdbcUrl": "%s",
"table": ["%s"]
}
],
"password": "%s",
"preSql": [%s],
"postSql": [%s],
"session": [],
"username": "%s",
"writeMode": "insert"
}
}
}
],
"setting": {
"speed": {
"record": "1000"
}
}
}
}
"""
================================================
FILE: datax_web/models.py
================================================
from __future__ import unicode_literals
from django.db import models
# Create your models here.
================================================
FILE: datax_web/tests.py
================================================
# -*- coding: UTF-8 -*-
from django.test import TestCase
# Create your tests here.
#!/usr/bin/python
# Copyright 2013 Joe Walnes and the websocketd team.
# All rights reserved.
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file.
import time
from sys import stdin, stdout
def follow(thefile):
thefile.seek(0,2)
while True:
line = thefile.readline()
if not line:
time.sleep(1)
continue
yield line
log_dir = '/opt/django/FirstBlood/datax/web_log/%s.json.log'
file_id = stdin.readline().strip()
logfile = log_dir % file_id
print('Hello fuck %s!' % logfile)
stdout.flush() # Remember to flush
# For each line FOO received on STDIN, respond with "Hello FOO!".
logfile_open = open(logfile, 'r')
loglines = follow(logfile_open)
print 'ok'
for line in loglines:
print line,
stdout.flush() # Remember to flush
================================================
FILE: datax_web/urls.py
================================================
# -*- coding: UTF-8 -*-
from django.conf.urls import url
import views
urlpatterns = [
# Examples:
# url(r'^$', 'YinguOnline.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^index/$', views.index), # 数据同步
url(r'^add_job/$', views.add_job), # 新增任务
url(r'^monitor_job/$', views.monitor_job), # 任务执行实例
url(r'^monitor_job_detail/(?P<id>\d+)/$', views.monitor_job_detail), # 任务执行详情
url(r'^update_job/(?P<id>\d+)/$', views.update_job), # 更新任务
url(r'^get_database/$', views.get_database), # 获取数据库信息
url(r'^add_job_data/$', views.add_job_data), # 新增或修改任务数据
url(r'^get_job_data/$', views.get_job_data), # 获取任务数据
url(r'^get_update_job_data/$', views.get_update_job_data), # 获取需要更新的任务数据
url(r'^get_datax_job_instance/$', views.get_datax_job_instance), # 获取任务实例数据
url(r'^get_datax_job_instance_by_id/$', views.get_datax_job_instance_by_id), # 根据ID获取任务实例数据
url(r'^run_job/$', views.run_job), # 运行 任务
url(r'^get_database/$', views.get_database), # 获取数据库信息
]
================================================
FILE: datax_web/views.py
================================================
# -*- coding: UTF-8 -*-
from django.contrib.auth.decorators import permission_required
from django.contrib.auth.decorators import login_required
from django.core.exceptions import PermissionDenied
from django.shortcuts import render
from django.http import HttpResponse
from functools import wraps
from controller.core.public import (Currency, DatetimeHelp)
from controller.core import query_sql
from controller.public import dataconn
from celery import shared_task
from conf import config
import commands
import logging
import sys
import json
reload(sys)
sys.setdefaultencoding("utf-8")
logger = logging.getLogger('datax_web')
_SUCCESS = dict(status=0, msg=u'检测成功')
_str = (str, unicode)
def verification(check_class):
"""
装饰器用于检测用户提交的信息是否合法.
check_class 检测类
Decorator for views that checks that the user submitted information,
redirecting to the log-in page if necessary. The test should be a callable
that takes the user object and returns True if the user passes.
"""
def decorator(view_func):
@wraps(view_func)
def _wrapped_view(request, *args, **kwargs):
response = HttpResponse()
ccl = check_class(request)
result = ccl.total_check()
if result['status']:
response.write(json.dumps(result))
return response
return view_func(request, *args, **kwargs)
return _wrapped_view
return decorator
class JobData(object):
"""
新增或更新定时任务时处理数据
data 数据格式
{u'_id': u'28',
u'description': u'\u6570\u636e\u540c\u6b65\u6d4b\u8bd5',
u'name': u'test',
u'operation_type': u'mod',
u'querySql': u'select * from `admin-service`.as_user_info limit 10;',
u'reader_databaseinfo_id': u'1',
u'trigger_mode': 2,
u'writer_column_id': [u'*'],
u'writer_databaseinfo_id': u'22',
u'writer_postSql': u'',
u'writer_preSql': u'truncate table `admin-service`.as_user_info;',
u'writer_table': u'`admin-service`.as_user_info'}
_id: datax_job_id
"""
def __init__(self, data):
# id 为 datax_job_id
self.id = data.get('_id', 0)
self.name = data.get('name', '')
self.description = data.get('description', '')
self.querySql = data.get('querySql', '')
self.reader_databaseinfo_id = data.get('reader_databaseinfo_id', '')
self.writer_table = data.get('writer_table', '')
self.writer_column = data.get('writer_column_id', [])
self.writer_databaseinfo_id = data.get('writer_databaseinfo_id', '')
self.writer_preSql = data.get('writer_preSql', '')
self.writer_postSql = data.get('writer_postSql', '')
self.operation_type = data.get('operation_type', '')
self.trigger_mode = data.get('trigger_mode', '')
self.dtconn = dataconn.DatabaseConnection(logger)
self.dtsf = dataconn.DataTransform()
self.dh = DatetimeHelp()
self.__timestamp1 = self.dh.timestamp1
self.reader_dtbs = self._get_reader_dtbs() if self.reader_databaseinfo_id else None
self.writer_dtbs = self._get_writer_dtbs() if self.writer_databaseinfo_id else None
@property
def timestamp1(self):
return self.__timestamp1
def _get_reader_dtbs(self):
return self.dtconn.get_datainfo_by_id(self.reader_databaseinfo_id)
def _get_writer_dtbs(self):
return self.dtconn.get_datainfo_by_id(self.writer_databaseinfo_id)
def get_insert_datax_job_sql(self):
# 在 datax_job表里创建新的任务 - 新增SQL
querySql = self.dtsf.special_characters_mysql(self.querySql)
writer_preSql = self.dtsf.special_characters_mysql(self.writer_preSql)
writer_postSql = self.dtsf.special_characters_mysql(self.writer_postSql)
return config.insert_datax_job_sql % (
self.name, self.description, querySql, self.reader_databaseinfo_id,
self.writer_table, self.writer_databaseinfo_id, writer_preSql, writer_postSql
)
def get_update_datax_job_by_id_sql(self):
# 在 datax_job表里更新任务 - 更新SQL
querySql = self.dtsf.special_characters_mysql(self.querySql)
writer_preSql = self.dtsf.special_characters_mysql(self.writer_preSql)
writer_postSql = self.dtsf.special_characters_mysql(self.writer_postSql)
return config.update_datax_job_by_id_sql % (
self.name, self.description, querySql, self.reader_databaseinfo_id,
self.writer_table, self.writer_databaseinfo_id, writer_preSql, writer_postSql,
self.id
)
def get_insert_datax_job_writer_column_sql(self):
# 拼接写入列SQL insert into values ('user_id', 1), ('card_name', 1)
datax_job_id = self.id or self.dtconn.ygol.insert_id
values_list = ["('%s', %s)" % (column, datax_job_id) for column in self.writer_column]
return config.insert_datax_job_writer_column_sql + ','.join(values_list)
def get_delete_datax_job_writer_column_by_id_sql(self):
# 获取删除写入列SQL , 根据ID删除
return config.delete_datax_job_writer_column_by_id_sql % self.id
def get_insert_datax_job_instance_sql(self):
querySql = self.dtsf.special_characters_mysql(self.querySql)
writer_preSql = self.dtsf.special_characters_mysql(self.writer_preSql)
writer_postSql = self.dtsf.special_characters_mysql(self.writer_postSql)
return config.insert_datax_job_instance_sql % (
self.datax_job_instance_id,
self.name,
self.description,
querySql,
self.reader_dtbs['host'],
self.reader_dtbs['description'],
self.writer_table,
self.writer_dtbs['host'],
self.writer_dtbs['description'],
self.trigger_mode,
writer_preSql,
writer_postSql
)
def get_update_datax_job_instance_by_instance_id_sql(self, result):
return config.update_datax_job_instance_by_instance_id_sql % (
1, result, self.dh.now_time, self.datax_job_instance_id
)
@property
def datax_job_instance_id(self):
return '%s%s' % (self.id, self.__timestamp1)
def start_log(self):
# 开始记录任务日志到datax_job_instance
sql = self.get_insert_datax_job_instance_sql()
self.dtconn.ygol.insert(sql)
if self.dtconn.ygol.status:
logger.error(u'记录任务日志到datax_job_instance 失败 - SQL: %s - msg: %s' %
(sql, self.dtconn.ygol.msg))
def record_result_log(self, result):
# 记录任务执行结果 datax_job_instance
sql = self.get_update_datax_job_instance_by_instance_id_sql(result)
self.dtconn.ygol.update(sql)
if self.dtconn.ygol.status:
logger.error(u'记录任务执行结果 datax_job_instance 失败 - SQL: %s - msg: %s' %
(sql, self.dtconn.ygol.msg))
def create(self):
# datax_job表里创建新的任务
result = _SUCCESS.copy()
sql1 = self.get_insert_datax_job_sql()
self.dtconn.ygol.transaction_start()
self.dtconn.ygol.transaction_execute(sql1)
if self.dtconn.ygol.status:
msg = u'datax_job表里创建新的任务,SQL:%s 插入数据失败。 - Msg: %s' % \
(sql1, self.dtconn.ygol.msg)
logger.error(msg)
result = dict(status=500, msg=msg)
else:
sql2 = self.get_insert_datax_job_writer_column_sql()
self.dtconn.ygol.transaction_execute(sql2)
if self.dtconn.ygol.status:
msg = u'datax_job_writer_column表里创建新的列,SQL:%s 插入数据失败。 - Msg: %s' % \
(sql2, self.dtconn.ygol.msg)
logger.error(msg)
result = dict(status=500, msg=msg)
self.dtconn.ygol.transaction_commit_and_close()
return result
def update(self):
# 更新任务
result = _SUCCESS.copy()
sql1 = self.get_update_datax_job_by_id_sql()
self.dtconn.ygol.transaction_start()
self.dtconn.ygol.transaction_execute(sql1)
if self.dtconn.ygol.status:
msg = u'datax_job表,SQL:%s 更新数据失败。 - Msg: %s' % \
(sql1, self.dtconn.ygol.msg)
logger.error(msg)
result = dict(status=500, msg=msg)
else:
sql2 = self.get_delete_datax_job_writer_column_by_id_sql()
sql3 = self.get_insert_datax_job_writer_column_sql()
self.dtconn.ygol.transaction_execute(sql2)
self.dtconn.ygol.transaction_execute(sql3)
if self.dtconn.ygol.status:
msg = u'datax_job_writer_column表里更新列 - SQL2:%s - SQL3: %s -' \
u' 更新数据失败。 - Msg: %s' % \
(sql2, sql3, self.dtconn.ygol.msg)
logger.error(msg)
result = dict(status=500, msg=msg)
self.dtconn.ygol.transaction_commit_and_close()
return result
def get_job_data(self):
# 获取任务数据
source_data = self.dtconn.ygol.getall(config.query_datax_job_sql)
if self.dtconn.ygol.status:
logger.error(u'获取datax_job信息失败 %s' % self.dtconn.ygol.msg)
return []
else:
return [self.dtsf.get_row_by_dict_to_user(dt) for dt in source_data]
def get_job_data_by_id(self, _id):
"""
根据ID获取任务数据
:param _id: datax_job_id
:return: datax_job
"""
source_data = self.dtconn.ygol.getsingle(config.query_datax_job_by_id_sql % _id)
if self.dtconn.ygol.status:
logger.error(u'根据ID %s 获取任务数据信息失败 %s' % (self.id, self.dtconn.ygol.msg))
return None
else:
return self.dtsf.get_row_by_dict_to_user(source_data)
def get_datax_job_writer_column_by_id(self, _id):
"""
根据ID获取任务需要写入的列
:param _id: datax_job_id
:return: datax_job_writer_column
"""
source_data = self.dtconn.ygol.getall(config.query_datax_job_writer_column_by_id_sql % _id)
if self.dtconn.ygol.status:
logger.error(u'根据ID %s 获取任务数据信息失败 %s' % (self.id, self.dtconn.ygol.msg))
return None
else:
return [self.dtsf.get_row_by_dict_to_user(dt) for dt in source_data]
@staticmethod
def create_file(file, content):
# 创建文件
with open(file, 'w') as f:
f.write(content)
class CheckJob(object):
"""
检测新增任务提交的信息
:return result
格式: {'status': 1, 'msg': '操作类型错误'}
total_check 启动所有检测,返回检测状态和错误消息
"""
_SUCCESS = _SUCCESS.copy()
_OPERATION_TYPE_ERROR1 = dict(status=1, msg=u'操作类型不能为空')
_OPERATION_TYPE_ERROR2 = dict(status=2, msg=u'操作类型错误')
_DESCRIPTION_ERROR1 = dict(status=3, msg=u'任务描述不能为空')
_NAME_ERROR1 = dict(status=4, msg=u'任务名称不能为空')
_NAME_ERROR2 = dict(status=5, msg=u'任务名称已存在')
_QUERY_SQL_ERROR1 = dict(status=6, msg=u'查询SQL语句不能为空')
_READER_DATABASEINFO_ID_ERROR1 = dict(status=7, msg=u'读取数据库不能为空,必须为数字')
_READER_DATABASEINFO_ID_ERROR2 = dict(status=8, msg=u'读取数据库ID不存在')
_WRITER_TABLE_ERROR1 = dict(status=10, msg=u'写入表不能为空')
_WRITER_COLUMN_ERROR1 = dict(status=11, msg=u'写入列不能为空')
_WRITER_DATABASEINFO_ID_ERROR1 = dict(status=12, msg=u'写入数据库不能为空')
_WRITER_DATABASEINFO_ID_ERROR2 = dict(status=13, msg=u'写入数据库ID不存在')
_DATAX_JOB_ID_ERROR1 = dict(status=1, msg=u'datax_job_id 不能为空')
_DATAX_JOB_ID_ERROR2 = dict(status=2, msg=u'datax_job_id 不存在')
_TRIGGER_MODE_ERROR1 = dict(status=2, msg=u'触发模式 不存在')
_TRIGGER_MODE_ERROR2 = dict(status=2, msg=u'触发模式值错误')
def __init__(self, request):
"""
RESPONSE_TYPE 返回给用户数据的方式
1:20万行以内的数据,以excel方式返回
2:超过20万行的数据,需要分批处理
3:小量的数据以HTML表格的方式返回'
"""
cur = Currency(request)
data = cur.rq_post_json('data')
self.dtconn = dataconn.DatabaseConnection(logger)
self.jd = JobData(data)
self.error_msg = []
self.result = self._SUCCESS
def check_operation_type(self):
# 检测操作类型
operation_type = self.jd.operation_type
if not operation_type:
self.result = self._OPERATION_TYPE_ERROR1
else:
if operation_type not in config.OPERATION_TYPE:
self.result = self._OPERATION_TYPE_ERROR2
def check_name_by_operation_type(self):
# 根据操作类型add/mod 检测任务名称
name = self.jd.name
if self.jd.operation_type == config.OPERATION_TYPE[0]:
sql = config.query_datax_job_by_name_sql % name
self.check_name(name, sql)
if self.jd.operation_type == config.OPERATION_TYPE[1]:
sql = config.query_datax_job_sql2 % (name, self.jd.id)
self.check_name(name, sql)
def check_name(self, name, sql):
# 修改任务时,检测任务名称
if name:
data = self.dtconn.ygol.getsingle(sql)
if self.dtconn.ygol.status:
_msg = u'检测任务名称时数据库错误。 - Msg: %s' % self.dtconn.ygol.msg
logger.error(_msg)
self.result = dict(status=500, msg=_msg)
else:
if data:
self.result = self._NAME_ERROR2
else:
self.result = self._NAME_ERROR1
def check_description(self):
# 检测任务描述
description = self.jd.description
if not description:
self.result = self._DESCRIPTION_ERROR1
def check_querySql(self):
# 检测查询SQL语句
querySql = self.jd.querySql
if not querySql:
self.result = self._DESCRIPTION_ERROR1
def check_reader_databaseinfo_id(self):
# 检测读取数据库
kwargs = {
'_id': self.jd.reader_databaseinfo_id,
'operation_type': u'读取',
'ERROR1': self._READER_DATABASEINFO_ID_ERROR1,
'ERROR2': self._READER_DATABASEINFO_ID_ERROR2,
}
self.check_databaseinfo_id(**kwargs)
def check_writer_table(self):
# 检测写入表
writer_table = self.jd.writer_table
if not writer_table:
self.result = self._WRITER_TABLE_ERROR1
def check_writer_column(self):
# 检测写入列
writer_column = self.jd.writer_column
if not writer_column:
self.result = self._WRITER_COLUMN_ERROR1
def check_writer_databaseinfo_id(self):
# 检测写入数据库
kwargs = {
'_id': self.jd.writer_databaseinfo_id,
'operation_type': u'写入',
'ERROR1': self._WRITER_DATABASEINFO_ID_ERROR1,
'ERROR2': self._WRITER_DATABASEINFO_ID_ERROR2,
}
self.check_databaseinfo_id(**kwargs)
def check_databaseinfo_id(self, _id, operation_type, ERROR1, ERROR2):
# 检测数据库ID
if _id and _id.isdigit():
data = self.dtconn.get_datainfo_by_id(int(_id))
if self.dtconn.ygol.status:
_msg = u'检测%s数据库错误。 - Msg: %s' % (operation_type, self.dtconn.ygol.msg)
logger.error(_msg)
self.result = dict(status=500, msg=_msg)
else:
if not data:
self.result = ERROR2
else:
self.result = ERROR1
def check_datax_job_id(self):
# 检测任务ID
if self.jd.operation_type == config.OPERATION_TYPE[1]:
_id = self.jd.id
if isinstance(_id, _str) and _id and _id.isdigit():
sql = config.query_datax_job_by_id_sql % _id
data = self.dtconn.ygol.getsingle(sql)
if self.dtconn.ygol.status:
_msg = u'检测datax_job_id 错误 - SQL: %s。 - Msg: %s' % (sql, self.dtconn.ygol.msg)
logger.error(_msg)
self.result = dict(status=500, msg=_msg)
else:
if not data:
self.result = self._DATAX_JOB_ID_ERROR2
else:
self.result = self._DATAX_JOB_ID_ERROR1
def check_trigger_mode(self):
# 检测触发模式
trigger_mode = self.jd.trigger_mode
if self.jd.operation_type == config.OPERATION_TYPE[1]:
if not trigger_mode:
self.result = self._TRIGGER_MODE_ERROR1
else:
if trigger_mode not in config.TRIGGER_MODE:
self.result = self._TRIGGER_MODE_ERROR2
def total_check(self):
check_func = ['check_operation_type', 'check_datax_job_id',
'check_description', 'check_name_by_operation_type',
'check_querySql', 'check_reader_databaseinfo_id',
'check_writer_table', 'check_writer_column',
'check_writer_databaseinfo_id', 'check_trigger_mode'
]
for func_name in check_func:
getattr(self, func_name)()
if self.result['status']:
break
return self.result
class Datax(object):
"""
处理和datax相关的操作
"""
def __init__(self, data):
self.jd = JobData(data)
def get_reader(self):
return dict(
jdbcUrl = 'jdbc:mysql://%s/%s' % (self.jd.reader_dtbs['host'], self.jd.reader_dtbs['db']),
querySql = self.jd.querySql,
password = self.jd.reader_dtbs['passwd'],
username = self.jd.reader_dtbs['user']
)
def get_writer(self):
return dict(
column = "[%s]" % ','.join(['"%s"' % str(c) for c in self.jd.writer_column]),
jdbcUrl = 'jdbc:mysql://%s/%s' % (self.jd.writer_dtbs['host'], self.jd.writer_dtbs['db']),
table = self.jd.writer_table,
password = self.jd.writer_dtbs['passwd'],
preSql = ','.join(map(lambda x:'"%s"' % x, self.jd.writer_preSql.split(';'))),
postSql = ','.join(map(lambda x:'"%s"' % x, self.jd.writer_postSql.split(';'))),
username = self.jd.writer_dtbs['user']
)
def get_job_json(self):
reader = self.get_reader()
writer = self.get_writer()
return config.datax_job_template % (
reader['jdbcUrl'], reader['querySql'], reader['password'], reader['username'],
writer['column'], writer['jdbcUrl'], writer['table'], writer['password'],
writer['preSql'], writer['postSql'], writer['username']
)
@property
def job_json_file_name(self):
return u'%s.json' % self.jd.datax_job_instance_id
@property
def job_json_file(self):
return config.datax_job_dir + '/' + self.job_json_file_name
@staticmethod
def create_file(file, content):
# 创建文件
with open(file, 'w') as f:
f.write(content)
@property
def cmd(self):
return 'python %s/bin/datax.py %s > %s/%s.log' % \
(config.datax_dir, self.job_json_file, config.datax_log_dir, self.job_json_file_name)
@shared_task(name='run')
def run(**data):
# 执行任务
dx = Datax(data)
dx.jd.create_file(dx.job_json_file, dx.get_job_json())
dx.jd.start_log()
(status, output) = commands.getstatusoutput(dx.cmd)
if status:
logger.error("status:%s output:%s" % (status, output))
result = 1 if status else 0
dx.jd.record_result_log(result)
@login_required
@permission_required('batch_job.viewBatchJob', raise_exception=PermissionDenied)
def index(request):
# 数据同步
return render(request, 'datax_web/index.html', locals())
@login_required
@permission_required('batch_job.viewBatchJob', raise_exception=PermissionDenied)
@permission_required('batch_job.editBatchJob', raise_exception=PermissionDenied)
def add_job(request):
# 新增任务
return render(request, 'datax_web/add_job.html', locals())
@login_required
@permission_required('batch_job.viewBatchJob', raise_exception=PermissionDenied)
def update_job(request, id):
# 更新任务
return render(request, 'datax_web/update_job.html', locals())
@login_required
@permission_required('batch_job.viewBatchJob', raise_exception=PermissionDenied)
def monitor_job(request):
# 任务执行实例
return render(request, 'datax_web/monitor_job.html', locals())
@login_required
@permission_required('batch_job.viewBatchJob', raise_exception=PermissionDenied)
def monitor_job_detail(request, id):
# 任务执行详情
return render(request, 'datax_web/monitor_job_detail.html', locals())
@login_required
@permission_required('batch_job.viewBatchJob', raise_exception=PermissionDenied)
def get_database(request):
# 获取 数据库信息
def _data_processing(dt):
# 清除数据里的密码,并对数据格式化
del dt['passwd']
return dtsf.get_row_by_dict_to_user(dt)
response = HttpResponse()
dtconn = dataconn.DatabaseConnection(logger)
data = dtconn.ygol.getall(dataconn.dtbsif_sql)
dtsf = dataconn.DataTransform()
if dtconn.ygol.status:
logger.error(u'获取数据库信息失败 %s' % dtconn.ygol.msg)
response.write(json.dumps(map(_data_processing, data)))
return response
@login_required
@verification(CheckJob)
@permission_required('batch_job.viewBatchJob', raise_exception=PermissionDenied)
@permission_required('batch_job.editBatchJob', raise_exception=PermissionDenied)
def add_job_data(request):
# 新增或者修改任务数据
response = HttpResponse()
cur = Currency(request)
data = cur.rq_post_json('data')
jd = JobData(data)
if jd.operation_type == config.OPERATION_TYPE[0]:
result = jd.create() # 新增
else:
result = jd.update() # 更新
response.write(json.dumps(result))
return response
@login_required
@permission_required('batch_job.viewBatchJob', raise_exception=PermissionDenied)
def get_job_data(request):
# 获取 任务列表数据
response = HttpResponse()
jd = JobData({})
response.write(json.dumps(jd.get_job_data()))
return response
@login_required
@permission_required('batch_job.viewBatchJob', raise_exception=PermissionDenied)
def get_update_job_data(request):
# 获取 更新任务数据
response = HttpResponse()
cur = Currency(request)
_id = cur.rq_post_json('_id')
jd = JobData({})
response.write(json.dumps(jd.get_job_data_by_id(_id)))
return response
@login_required
@verification(CheckJob)
@permission_required('batch_job.viewBatchJob', raise_exception=PermissionDenied)
@permission_required('batch_job.editBatchJob', raise_exception=PermissionDenied)
def run_job(request):
# 执行任务
response = HttpResponse()
cur = Currency(request)
data = cur.rq_post_json('data')
# run(**data)
run.delay(**data)
response.write(json.dumps(_SUCCESS))
return response
class DataxJobInstanceSql(object):
# datax job instance 查询sql
_table_dji = {
'name': {'data_type': 'str', 'val': ''},
'description': {'data_type': 'str', 'val': ''},
'reader_databaseinfo_host': {'data_type': 'str', 'val': ''},
'writer_table': {'data_type': 'str', 'val': ''},
'writer_databaseinfo_host': {'data_type': 'str', 'val': ''},
'status': {'data_type': 'str', 'val': ''},
'result': {'data_type': 'str', 'val': ''},
'trigger_mode': {'data_type': 'str', 'val': ''},
}
_order_by = [{'table': 'dji', 'field': 'start_time', 'rule': 'DESC'}]
def __init__(self, request):
self.cur = Currency(request)
self.rq_get = self.cur.rq_get
self._offset = int(self.rq_get('offset'))
self._limit = int(self.rq_get('limit'))
self._SQL = config.select_datax_job_instance_sql
self._TOTAL_SQL = config.count_datax_job_instance_sql
self._set_table(self._table_dji)
def _set_table(self, table):
for field, attr in table.items():
val = self.rq_get(field)
attr['val'] = val
return table
@property
def tables(self):
_tables = {'dji': self._table_dji}
return _tables
@property
def cvtpara(self):
_cvtpara = {
'offset': self._offset,
'limit': self._limit,
'sql': self._SQL,
'total_sql': self._TOTAL_SQL,
'order_by': self._order_by,
'order_rule': self._order_by
}
return _cvtpara
class PaginatorData(dataconn.DatabaseConnection, query_sql.Q_Data):
# 分页访问数据
def __init__(self, qs):
super(PaginatorData, self).__init__(logger)
query_sql.Q_Data.__init__(self, qs)
@property
def rows(self):
return self._get_rows(self.ygol)
@property
def total(self):
return self._get_total(self.ygol)
@login_required
@permission_required('batch_job.viewBatchJob', raise_exception=PermissionDenied)
def get_datax_job_instance(request):
# 分页查询任务实例
dsql = DataxJobInstanceSql(request)
cvtpara = dsql.cvtpara
tables = dsql.tables
qs = query_sql.Q_Sql(cvtpara, **tables)
pd = PaginatorData(qs)
response = HttpResponse()
response.write(json.dumps({'rows': pd.rows, 'total': pd.total}))
return response
@login_required
@permission_required('batch_job.viewBatchJob', raise_exception=PermissionDenied)
def get_datax_job_instance_by_id(request):
# 根据ID查询任务实例
cur = Currency(request)
_id = cur.rq_post('_id')
conn = dataconn.DatabaseConnection(logger)
dtf = dataconn.DataTransform()
sql = config.select_datax_job_instance_by_id_sql % _id
source_data = conn.ygol.getsingle(sql)
response = HttpResponse()
response.write(json.dumps(dtf.get_row_by_dict_to_user(source_data)))
return response
================================================
FILE: manage.py
================================================
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "FirstBlood.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
================================================
FILE: requirements.txt
================================================
celery==3.1.25
celery-with-redis==3.0
Django==1.11.14
django-celery==3.2.1
pymssql==2.1.4
redis==2.10.6
supervisor==3.3.4
================================================
FILE: static/css/login.css
================================================
body{background: url(/static/img/login_bg.jpg) 0 -200px no-repeat #000;
background-size: cover;margin:0; color:#FFF}
*{-webkit-box-sizing:border-box;box-sizing:border-box;}
a{-webkit-transition: All 1s ease;text-decoration: none;
color: #dfc684;}
input , button{border:0;background:none;color: #dfc684;}
#login-box{position:absolute; width:500px; left:30%; top:29%;}
.login-box-wh{height:53px;width:800px;margin-bottom:16px;}
.to-index{padding-left:16px;overflow:hidden; position:absolute;left:69px;}
.to-index-m{width:360px; height:53px; margin-top:-53px;margin-left:-16px;position:absolute;z-index:-1;background:yellow;transition:margin 0.8s;-moz-transition:margin 0.8s; -webkit-transition:margin 0.8s 0.3s; }
.login-box-wh:hover .to-index-m{margin-top:0;}
.login-box-wh:hover a{color:#f00}
.box-mov{width: 53px;height: 53px;float: left; display: inline-block;opacity: .72;filter: alpha(opacity=72);background: url(/static/img/icons.png) -212px 0 #000; margin-right: 16px;position: relative;overflow:hidden;z-index:0;}
.box-mov-s{width: 53px;height: 53px;z-index:1;position:absolute;background:#ff0;margin-left:-53px;
background:yellow;
transition:margin 0.8s;-moz-transition:margin 0.8s; -webkit-transition:margin 0.8s; }
.login-box-wh:hover .box-mov-s{margin:0;}
.box-mov-d{width: 53px;height: 53px;z-index:2;position:absolute;background:url(/static/img/icons.png) -212px -53px;);opacity:0; }
.login-box-wh:hover .box-mov-d {opacity:1;}
.text-box{width:360px; height:53px;background:#000;opacity: .72; float:left;line-height:53px;}
#vdcode{width:270px;}
.text-box-login{}
.text-box-login-btn{width:360px; height:53px;background:#000;opacity: .72; float:left;line-height:53px;-webkit-box-sizing:border-box; position:absolute;left:69px;overflow:hidden; }
.login-btn-m{width:360px; height:53px;margin-top:-53px; position:absolute;z-index:1;background:yellow;transition:margin 0.8s;-moz-transition:margin 0.8s; -webkit-transition:margin 0.8s 0.3s;}
.text-box-login:hover .login-btn-m{margin-top:0}
.login-btn{width:100%;height:100%; cursor:pointer;font-size:16px;position:absolute;z-index:5;transition: 0.8s;-moz-transition: 0.8s; -webkit-transition: 0.8s 0.3s;}
.login-btn:hover{color:#f00;}
input{height:100%;width:100%;padding:8px 0 8px 8px;background:none;color:#fff;}
input:focus {
outline:none;
}
================================================
FILE: static/js/csrf.js
================================================
function getCookie(name) {
var cookieValue = null;
if (document.cookie && document.cookie != '') {
var cookies = document.cookie.split(';');
for (var i = 0; i < cookies.length; i++) {
var cookie = jQuery.trim(cookies[i]);
// Does this cookie string begin with the name we want?
if (cookie.substring(0, name.length + 1) == (name + '=')) {
cookieValue = decodeURIComponent(cookie.substring(name.length + 1));
break;
}
}
}
return cookieValue;
}
var csrftoken = getCookie('csrftoken');
function csrfSafeMethod(method) {
// these HTTP methods do not require CSRF protection
return (/^(GET|HEAD|OPTIONS|TRACE)$/.test(method));
}
$.ajaxSetup({
beforeSend: function(xhr, settings) {
if (!csrfSafeMethod(settings.type) && !this.crossDomain) {
xhr.setRequestHeader("X-CSRFToken", csrftoken);
}
}
});
================================================
FILE: static/plugins/bootstarp-table/bootstrap-table-zh-CN.js
================================================
/**
* Bootstrap Table Chinese translation
* Author: Zhixin Wen<wenzhixin2010@gmail.com>
*/
(function ($) {
'use strict';
$.fn.bootstrapTable.locales['zh-CN'] = {
formatLoadingMessage: function () {
return '正在努力地加载数据中,请稍候……';
},
formatRecordsPerPage: function (pageNumber) {
return '每页显示 ' + pageNumber + ' 条记录';
},
formatShowingRows: function (pageFrom, pageTo, totalRows) {
return '显示第 ' + pageFrom + ' 到第 ' + pageTo + ' 条记录,总共 ' + totalRows + ' 条记录';
},
formatSearch: function () {
return '搜索';
},
formatNoMatches: function () {
return '没有找到匹配的记录';
},
formatPaginationSwitch: function () {
return '隐藏/显示分页';
},
formatRefresh: function () {
return '刷新';
},
formatToggle: function () {
return '切换';
},
formatColumns: function () {
return '列';
},
formatExport: function () {
return '导出数据';
},
formatClearFilters: function () {
return '清空过滤';
}
};
$.extend($.fn.bootstrapTable.defaults, $.fn.bootstrapTable.locales['zh-CN']);
})(jQuery);
================================================
FILE: static/plugins/datatables/css/jquery.dataTables.css
================================================
/*
* Table styles
*/
table.dataTable {
width: 100%;
margin: 0 auto;
clear: both;
border-collapse: separate;
border-spacing: 0;
/*
* Header and footer styles
*/
/*
* Body styles
*/
}
table.dataTable thead th,
table.dataTable tfoot th {
font-weight: bold;
}
table.dataTable thead th,
table.dataTable thead td {
padding: 10px 18px;
border-bottom: 1px solid #111;
}
table.dataTable thead th:active,
table.dataTable thead td:active {
outline: none;
}
table.dataTable tfoot th,
table.dataTable tfoot td {
padding: 10px 18px 6px 18px;
border-top: 1px solid #111;
}
table.dataTable thead .sorting,
table.dataTable thead .sorting_asc,
table.dataTable thead .sorting_desc {
cursor: pointer;
*cursor: hand;
}
table.dataTable thead .sorting,
table.dataTable thead .sorting_asc,
table.dataTable thead .sorting_desc,
table.dataTable thead .sorting_asc_disabled,
table.dataTable thead .sorting_desc_disabled {
background-repeat: no-repeat;
background-position: center right;
}
table.dataTable thead .sorting {
background-image: url("../images/sort_both.png");
}
table.dataTable thead .sorting_asc {
background-image: url("../images/sort_asc.png");
}
table.dataTable thead .sorting_desc {
background-image: url("../images/sort_desc.png");
}
table.dataTable thead .sorting_asc_disabled {
background-image: url("../images/sort_asc_disabled.png");
}
table.dataTable thead .sorting_desc_disabled {
background-image: url("../images/sort_desc_disabled.png");
}
table.dataTable tbody tr {
background-color: #ffffff;
}
table.dataTable tbody tr.selected {
background-color: #B0BED9;
}
table.dataTable tbody th,
table.dataTable tbody td {
padding: 8px 10px;
}
table.dataTable.row-border tbody th, table.dataTable.row-border tbody td, table.dataTable.display tbody th, table.dataTable.display tbody td {
border-top: 1px solid #ddd;
}
table.dataTable.row-border tbody tr:first-child th,
table.dataTable.row-border tbody tr:first-child td, table.dataTable.display tbody tr:first-child th,
table.dataTable.display tbody tr:first-child td {
border-top: none;
}
table.dataTable.cell-border tbody th, table.dataTable.cell-border tbody td {
border-top: 1px solid #ddd;
border-right: 1px solid #ddd;
}
table.dataTable.cell-border tbody tr th:first-child,
table.dataTable.cell-border tbody tr td:first-child {
border-left: 1px solid #ddd;
}
table.dataTable.cell-border tbody tr:first-child th,
table.dataTable.cell-border tbody tr:first-child td {
border-top: none;
}
table.dataTable.stripe tbody tr.odd, table.dataTable.display tbody tr.odd {
background-color: #f9f9f9;
}
table.dataTable.stripe tbody tr.odd.selected, table.dataTable.display tbody tr.odd.selected {
background-color: #acbad4;
}
table.dataTable.hover tbody tr:hover, table.dataTable.display tbody tr:hover {
background-color: #f6f6f6;
}
table.dataTable.hover tbody tr:hover.selected, table.dataTable.display tbody tr:hover.selected {
background-color: #aab7d1;
}
table.dataTable.order-column tbody tr > .sorting_1,
table.dataTable.order-column tbody tr > .sorting_2,
table.dataTable.order-column tbody tr > .sorting_3, table.dataTable.display tbody tr > .sorting_1,
table.dataTable.display tbody tr > .sorting_2,
table.dataTable.display tbody tr > .sorting_3 {
background-color: #fafafa;
}
table.dataTable.order-column tbody tr.selected > .sorting_1,
table.dataTable.order-column tbody tr.selected > .sorting_2,
table.dataTable.order-column tbody tr.selected > .sorting_3, table.dataTable.display tbody tr.selected > .sorting_1,
table.dataTable.display tbody tr.selected > .sorting_2,
table.dataTable.display tbody tr.selected > .sorting_3 {
background-color: #acbad5;
}
table.dataTable.display tbody tr.odd > .sorting_1, table.dataTable.order-column.stripe tbody tr.odd > .sorting_1 {
background-color: #f1f1f1;
}
table.dataTable.display tbody tr.odd > .sorting_2, table.dataTable.order-column.stripe tbody tr.odd > .sorting_2 {
background-color: #f3f3f3;
}
table.dataTable.display tbody tr.odd > .sorting_3, table.dataTable.order-column.stripe tbody tr.odd > .sorting_3 {
background-color: whitesmoke;
}
table.dataTable.display tbody tr.odd.selected > .sorting_1, table.dataTable.order-column.stripe tbody tr.odd.selected > .sorting_1 {
background-color: #a6b4cd;
}
table.dataTable.display tbody tr.odd.selected > .sorting_2, table.dataTable.order-column.stripe tbody tr.odd.selected > .sorting_2 {
background-color: #a8b5cf;
}
table.dataTable.display tbody tr.odd.selected > .sorting_3, table.dataTable.order-column.stripe tbody tr.odd.selected > .sorting_3 {
background-color: #a9b7d1;
}
table.dataTable.display tbody tr.even > .sorting_1, table.dataTable.order-column.stripe tbody tr.even > .sorting_1 {
background-color: #fafafa;
}
table.dataTable.display tbody tr.even > .sorting_2, table.dataTable.order-column.stripe tbody tr.even > .sorting_2 {
background-color: #fcfcfc;
}
table.dataTable.display tbody tr.even > .sorting_3, table.dataTable.order-column.stripe tbody tr.even > .sorting_3 {
background-color: #fefefe;
}
table.dataTable.display tbody tr.even.selected > .sorting_1, table.dataTable.order-column.stripe tbody tr.even.selected > .sorting_1 {
background-color: #acbad5;
}
table.dataTable.display tbody tr.even.selected > .sorting_2, table.dataTable.order-column.stripe tbody tr.even.selected > .sorting_2 {
background-color: #aebcd6;
}
table.dataTable.display tbody tr.even.selected > .sorting_3, table.dataTable.order-column.stripe tbody tr.even.selected > .sorting_3 {
background-color: #afbdd8;
}
table.dataTable.display tbody tr:hover > .sorting_1, table.dataTable.order-column.hover tbody tr:hover > .sorting_1 {
background-color: #eaeaea;
}
table.dataTable.display tbody tr:hover > .sorting_2, table.dataTable.order-column.hover tbody tr:hover > .sorting_2 {
background-color: #ececec;
}
table.dataTable.display tbody tr:hover > .sorting_3, table.dataTable.order-column.hover tbody tr:hover > .sorting_3 {
background-color: #efefef;
}
table.dataTable.display tbody tr:hover.selected > .sorting_1, table.dataTable.order-column.hover tbody tr:hover.selected > .sorting_1 {
background-color: #a2aec7;
}
table.dataTable.display tbody tr:hover.selected > .sorting_2, table.dataTable.order-column.hover tbody tr:hover.selected > .sorting_2 {
background-color: #a3b0c9;
}
table.dataTable.display tbody tr:hover.selected > .sorting_3, table.dataTable.order-column.hover tbody tr:hover.selected > .sorting_3 {
background-color: #a5b2cb;
}
table.dataTable.no-footer {
border-bottom: 1px solid #111;
}
table.dataTable.nowrap th, table.dataTable.nowrap td {
white-space: nowrap;
}
table.dataTable.compact thead th,
table.dataTable.compact thead td {
padding: 4px 17px 4px 4px;
}
table.dataTable.compact tfoot th,
table.dataTable.compact tfoot td {
padding: 4px;
}
table.dataTable.compact tbody th,
table.dataTable.compact tbody td {
padding: 4px;
}
table.dataTable th.dt-left,
table.dataTable td.dt-left {
text-align: left;
}
table.dataTable th.dt-center,
table.dataTable td.dt-center,
table.dataTable td.dataTables_empty {
text-align: center;
}
table.dataTable th.dt-right,
table.dataTable td.dt-right {
text-align: right;
}
table.dataTable th.dt-justify,
table.dataTable td.dt-justify {
text-align: justify;
}
table.dataTable th.dt-nowrap,
table.dataTable td.dt-nowrap {
white-space: nowrap;
}
table.dataTable thead th.dt-head-left,
table.dataTable thead td.dt-head-left,
table.dataTable tfoot th.dt-head-left,
table.dataTable tfoot td.dt-head-left {
text-align: left;
}
table.dataTable thead th.dt-head-center,
table.dataTable thead td.dt-head-center,
table.dataTable tfoot th.dt-head-center,
table.dataTable tfoot td.dt-head-center {
text-align: center;
}
table.dataTable thead th.dt-head-right,
table.dataTable thead td.dt-head-right,
table.dataTable tfoot th.dt-head-right,
table.dataTable tfoot td.dt-head-right {
text-align: right;
}
table.dataTable thead th.dt-head-justify,
table.dataTable thead td.dt-head-justify,
table.dataTable tfoot th.dt-head-justify,
table.dataTable tfoot td.dt-head-justify {
text-align: justify;
}
table.dataTable thead th.dt-head-nowrap,
table.dataTable thead td.dt-head-nowrap,
table.dataTable tfoot th.dt-head-nowrap,
table.dataTable tfoot td.dt-head-nowrap {
white-space: nowrap;
}
table.dataTable tbody th.dt-body-left,
table.dataTable tbody td.dt-body-left {
text-align: left;
}
table.dataTable tbody th.dt-body-center,
table.dataTable tbody td.dt-body-center {
text-align: center;
}
table.dataTable tbody th.dt-body-right,
table.dataTable tbody td.dt-body-right {
text-align: right;
}
table.dataTable tbody th.dt-body-justify,
table.dataTable tbody td.dt-body-justify {
text-align: justify;
}
table.dataTable tbody th.dt-body-nowrap,
table.dataTable tbody td.dt-body-nowrap {
white-space: nowrap;
}
table.dataTable,
table.dataTable th,
table.dataTable td {
-webkit-box-sizing: content-box;
box-sizing: content-box;
}
/*
* Control feature layout
*/
.dataTables_wrapper {
position: relative;
clear: both;
*zoom: 1;
zoom: 1;
}
.dataTables_wrapper .dataTables_length {
float: left;
}
.dataTables_wrapper .dataTables_filter {
float: right;
text-align: right;
}
.dataTables_wrapper .dataTables_filter input {
margin-left: 0.5em;
}
.dataTables_wrapper .dataTables_info {
clear: both;
float: left;
padding-top: 0.755em;
}
.dataTables_wrapper .dataTables_paginate {
float: right;
text-align: right;
padding-top: 0.25em;
}
.dataTables_wrapper .dataTables_paginate .paginate_button {
box-sizing: border-box;
display: inline-block;
min-width: 1.5em;
padding: 0.5em 1em;
margin-left: 2px;
text-align: center;
text-decoration: none !important;
cursor: pointer;
*cursor: hand;
color: #333 !important;
border: 1px solid transparent;
border-radius: 2px;
}
.dataTables_wrapper .dataTables_paginate .paginate_button.current, .dataTables_wrapper .dataTables_paginate .paginate_button.current:hover {
color: #333 !important;
border: 1px solid #979797;
background-color: white;
background: -webkit-gradient(linear, left top, left bottom, color-stop(0%, white), color-stop(100%, #dcdcdc));
/* Chrome,Safari4+ */
background: -webkit-linear-gradient(top, white 0%, #dcdcdc 100%);
/* Chrome10+,Safari5.1+ */
background: -moz-linear-gradient(top, white 0%, #dcdcdc 100%);
/* FF3.6+ */
background: -ms-linear-gradient(top, white 0%, #dcdcdc 100%);
/* IE10+ */
background: -o-linear-gradient(top, white 0%, #dcdcdc 100%);
/* Opera 11.10+ */
background: linear-gradient(to bottom, white 0%, #dcdcdc 100%);
/* W3C */
}
.dataTables_wrapper .dataTables_paginate .paginate_button.disabled, .dataTables_wrapper .dataTables_paginate .paginate_button.disabled:hover, .dataTables_wrapper .dataTables_paginate .paginate_button.disabled:active {
cursor: default;
color: #666 !important;
border: 1px solid transparent;
background: transparent;
box-shadow: none;
}
.dataTables_wrapper .dataTables_paginate .paginate_button:hover {
color: white !important;
border: 1px solid #111;
background-color: #585858;
background: -webkit-gradient(linear, left top, left bottom, color-stop(0%, #585858), color-stop(100%, #111));
/* Chrome,Safari4+ */
background: -webkit-linear-gradient(top, #585858 0%, #111 100%);
/* Chrome10+,Safari5.1+ */
background: -moz-linear-gradient(top, #585858 0%, #111 100%);
/* FF3.6+ */
background: -ms-linear-gradient(top, #585858 0%, #111 100%);
/* IE10+ */
background: -o-linear-gradient(top, #585858 0%, #111 100%);
/* Opera 11.10+ */
background: linear-gradient(to bottom, #585858 0%, #111 100%);
/* W3C */
}
.dataTables_wrapper .dataTables_paginate .paginate_button:active {
outline: none;
background-color: #2b2b2b;
background: -webkit-gradient(linear, left top, left bottom, color-stop(0%, #2b2b2b), color-stop(100%, #0c0c0c));
/* Chrome,Safari4+ */
background: -webkit-linear-gradient(top, #2b2b2b 0%, #0c0c0c 100%);
/* Chrome10+,Safari5.1+ */
background: -moz-linear-gradient(top, #2b2b2b 0%, #0c0c0c 100%);
/* FF3.6+ */
background: -ms-linear-gradient(top, #2b2b2b 0%, #0c0c0c 100%);
/* IE10+ */
background: -o-linear-gradient(top, #2b2b2b 0%, #0c0c0c 100%);
/* Opera 11.10+ */
background: linear-gradient(to bottom, #2b2b2b 0%, #0c0c0c 100%);
/* W3C */
box-shadow: inset 0 0 3px #111;
}
.dataTables_wrapper .dataTables_paginate .ellipsis {
padding: 0 1em;
}
.dataTables_wrapper .dataTables_processing {
position: absolute;
top: 50%;
left: 50%;
width: 100%;
height: 40px;
margin-left: -50%;
margin-top: -25px;
padding-top: 20px;
text-align: center;
font-size: 1.2em;
background-color: white;
background: -webkit-gradient(linear, left top, right top, color-stop(0%, rgba(255, 255, 255, 0)), color-stop(25%, rgba(255, 255, 255, 0.9)), color-stop(75%, rgba(255, 255, 255, 0.9)), color-stop(100%, rgba(255, 255, 255, 0)));
background: -webkit-linear-gradient(left, rgba(255, 255, 255, 0) 0%, rgba(255, 255, 255, 0.9) 25%, rgba(255, 255, 255, 0.9) 75%, rgba(255, 255, 255, 0) 100%);
background: -moz-linear-gradient(left, rgba(255, 255, 255, 0) 0%, rgba(255, 255, 255, 0.9) 25%, rgba(255, 255, 255, 0.9) 75%, rgba(255, 255, 255, 0) 100%);
background: -ms-linear-gradient(left, rgba(255, 255, 255, 0) 0%, rgba(255, 255, 255, 0.9) 25%, rgba(255, 255, 255, 0.9) 75%, rgba(255, 255, 255, 0) 100%);
background: -o-linear-gradient(left, rgba(255, 255, 255, 0) 0%, rgba(255, 255, 255, 0.9) 25%, rgba(255, 255, 255, 0.9) 75%, rgba(255, 255, 255, 0) 100%);
background: linear-gradient(to right, rgba(255, 255, 255, 0) 0%, rgba(255, 255, 255, 0.9) 25%, rgba(255, 255, 255, 0.9) 75%, rgba(255, 255, 255, 0) 100%);
}
.dataTables_wrapper .dataTables_length,
.dataTables_wrapper .dataTables_filter,
.dataTables_wrapper .dataTables_info,
.dataTables_wrapper .dataTables_processing,
.dataTables_wrapper .dataTables_paginate {
color: #333;
}
.dataTables_wrapper .dataTables_scroll {
clear: both;
}
.dataTables_wrapper .dataTables_scroll div.dataTables_scrollBody {
*margin-top: -1px;
-webkit-overflow-scrolling: touch;
}
.dataTables_wrapper .dataTables_scroll div.dataTables_scrollBody th, .dataTables_wrapper .dataTables_scroll div.dataTables_scrollBody td {
vertical-align: middle;
}
.dataTables_wrapper .dataTables_scroll div.dataTables_scrollBody th > div.dataTables_sizing,
.dataTables_wrapper .dataTables_scroll div.dataTables_scrollBody td > div.dataTables_sizing {
height: 0;
overflow: hidden;
margin: 0 !important;
padding: 0 !important;
}
.dataTables_wrapper.no-footer .dataTables_scrollBody {
border-bottom: 1px solid #111;
}
.dataTables_wrapper.no-footer div.dataTables_scrollHead table,
.dataTables_wrapper.no-footer div.dataTables_scrollBody table {
border-bottom: none;
}
.dataTables_wrapper:after {
visibility: hidden;
display: block;
content: "";
clear: both;
height: 0;
}
@media screen and (max-width: 767px) {
.dataTables_wrapper .dataTables_info,
.dataTables_wrapper .dataTables_paginate {
float: none;
text-align: center;
}
.dataTables_wrapper .dataTables_paginate {
margin-top: 0.5em;
}
}
@media screen and (max-width: 640px) {
.dataTables_wrapper .dataTables_length,
.dataTables_wrapper .dataTables_filter {
float: none;
text-align: center;
}
.dataTables_wrapper .dataTables_filter {
margin-top: 0.5em;
}
}
================================================
FILE: static/plugins/datatables/css/jquery.dataTables_themeroller.css
================================================
/*
* Table styles
*/
table.dataTable {
width: 100%;
margin: 0 auto;
clear: both;
border-collapse: separate;
border-spacing: 0;
/*
* Header and footer styles
*/
/*
* Body styles
*/
}
table.dataTable thead th,
table.dataTable thead td,
table.dataTable tfoot th,
table.dataTable tfoot td {
padding: 4px 10px;
}
table.dataTable thead th,
table.dataTable tfoot th {
font-weight: bold;
}
table.dataTable thead th:active,
table.dataTable thead td:active {
outline: none;
}
table.dataTable thead .sorting_asc,
table.dataTable thead .sorting_desc,
table.dataTable thead .sorting {
cursor: pointer;
*cursor: hand;
}
table.dataTable thead th div.DataTables_sort_wrapper {
position: relative;
padding-right: 10px;
}
table.dataTable thead th div.DataTables_sort_wrapper span {
position: absolute;
top: 50%;
margin-top: -8px;
right: -5px;
}
table.dataTable thead th.ui-state-default {
border-right-width: 0;
}
table.dataTable thead th.ui-state-default:last-child {
border-right-width: 1px;
}
table.dataTable tbody tr {
background-color: #ffffff;
}
table.dataTable tbody tr.selected {
background-color: #B0BED9;
}
table.dataTable tbody th,
table.dataTable tbody td {
padding: 8px 10px;
}
table.dataTable th.center,
table.dataTable td.center,
table.dataTable td.dataTables_empty {
text-align: center;
}
table.dataTable th.right,
table.dataTable td.right {
text-align: right;
}
table.dataTable.row-border tbody th, table.dataTable.row-border tbody td, table.dataTable.display tbody th, table.dataTable.display tbody td {
border-top: 1px solid #ddd;
}
table.dataTable.row-border tbody tr:first-child th,
table.dataTable.row-border tbody tr:first-child td, table.dataTable.display tbody tr:first-child th,
table.dataTable.display tbody tr:first-child td {
border-top: none;
}
table.dataTable.cell-border tbody th, table.dataTable.cell-border tbody td {
border-top: 1px solid #ddd;
border-right: 1px solid #ddd;
}
table.dataTable.cell-border tbody tr th:first-child,
table.dataTable.cell-border tbody tr td:first-child {
border-left: 1px solid #ddd;
}
table.dataTable.cell-border tbody tr:first-child th,
table.dataTable.cell-border tbody tr:first-child td {
border-top: none;
}
table.dataTable.stripe tbody tr.odd, table.dataTable.display tbody tr.odd {
background-color: #f9f9f9;
}
table.dataTable.stripe tbody tr.odd.selected, table.dataTable.display tbody tr.odd.selected {
background-color: #abb9d3;
}
table.dataTable.hover tbody tr:hover,
table.dataTable.hover tbody tr.odd:hover,
table.dataTable.hover tbody tr.even:hover, table.dataTable.display tbody tr:hover,
table.dataTable.display tbody tr.odd:hover,
table.dataTable.display tbody tr.even:hover {
background-color: whitesmoke;
}
table.dataTable.hover tbody tr:hover.selected,
table.dataTable.hover tbody tr.odd:hover.selected,
table.dataTable.hover tbody tr.even:hover.selected, table.dataTable.display tbody tr:hover.selected,
table.dataTable.display tbody tr.odd:hover.selected,
table.dataTable.display tbody tr.even:hover.selected {
background-color: #a9b7d1;
}
table.dataTable.order-column tbody tr > .sorting_1,
table.dataTable.order-column tbody tr > .sorting_2,
table.dataTable.order-column tbody tr > .sorting_3, table.dataTable.display tbody tr > .sorting_1,
table.dataTable.display tbody tr > .sorting_2,
table.dataTable.display tbody tr > .sorting_3 {
background-color: #f9f9f9;
}
table.dataTable.order-column tbody tr.selected > .sorting_1,
table.dataTable.order-column tbody tr.selected > .sorting_2,
table.dataTable.order-column tbody tr.selected > .sorting_3, table.dataTable.display tbody tr.selected > .sorting_1,
table.dataTable.display tbody tr.selected > .sorting_2,
table.dataTable.display tbody tr.selected > .sorting_3 {
background-color: #acbad4;
}
table.dataTable.display tbody tr.odd > .sorting_1, table.dataTable.order-column.stripe tbody tr.odd > .sorting_1 {
background-color: #f1f1f1;
}
table.dataTable.display tbody tr.odd > .sorting_2, table.dataTable.order-column.stripe tbody tr.odd > .sorting_2 {
background-color: #f3f3f3;
}
table.dataTable.display tbody tr.odd > .sorting_3, table.dataTable.order-column.stripe tbody tr.odd > .sorting_3 {
background-color: whitesmoke;
}
table.dataTable.display tbody tr.odd.selected > .sorting_1, table.dataTable.order-column.stripe tbody tr.odd.selected > .sorting_1 {
background-color: #a6b3cd;
}
table.dataTable.display tbody tr.odd.selected > .sorting_2, table.dataTable.order-column.stripe tbody tr.odd.selected > .sorting_2 {
background-color: #a7b5ce;
}
table.dataTable.display tbody tr.odd.selected > .sorting_3, table.dataTable.order-column.stripe tbody tr.odd.selected > .sorting_3 {
background-color: #a9b6d0;
}
table.dataTable.display tbody tr.even > .sorting_1, table.dataTable.order-column.stripe tbody tr.even > .sorting_1 {
background-color: #f9f9f9;
}
table.dataTable.display tbody tr.even > .sorting_2, table.dataTable.order-column.stripe tbody tr.even > .sorting_2 {
background-color: #fbfbfb;
}
table.dataTable.display tbody tr.even > .sorting_3, table.dataTable.order-column.stripe tbody tr.even > .sorting_3 {
background-color: #fdfdfd;
}
table.dataTable.display tbody tr.even.selected > .sorting_1, table.dataTable.order-column.stripe tbody tr.even.selected > .sorting_1 {
background-color: #acbad4;
}
table.dataTable.display tbody tr.even.selected > .sorting_2, table.dataTable.order-column.stripe tbody tr.even.selected > .sorting_2 {
background-color: #adbbd6;
}
table.dataTable.display tbody tr.even.selected > .sorting_3, table.dataTable.order-column.stripe tbody tr.even.selected > .sorting_3 {
background-color: #afbdd8;
}
table.dataTable.display tbody tr:hover > .sorting_1,
table.dataTable.display tbody tr.odd:hover > .sorting_1,
table.dataTable.display tbody tr.even:hover > .sorting_1, table.dataTable.order-column.hover tbody tr:hover > .sorting_1,
table.dataTable.order-column.hover tbody tr.odd:hover > .sorting_1,
table.dataTable.order-column.hover tbody tr.even:hover > .sorting_1 {
background-color: #eaeaea;
}
table.dataTable.display tbody tr:hover > .sorting_2,
table.dataTable.display tbody tr.odd:hover > .sorting_2,
table.dataTable.display tbody tr.even:hover > .sorting_2, table.dataTable.order-column.hover tbody tr:hover > .sorting_2,
table.dataTable.order-column.hover tbody tr.odd:hover > .sorting_2,
table.dataTable.order-column.hover tbody tr.even:hover > .sorting_2 {
background-color: #ebebeb;
}
table.dataTable.display tbody tr:hover > .sorting_3,
table.dataTable.display tbody tr.odd:hover > .sorting_3,
table.dataTable.display tbody tr.even:hover > .sorting_3, table.dataTable.order-column.hover tbody tr:hover > .sorting_3,
table.dataTable.order-column.hover tbody tr.odd:hover > .sorting_3,
table.dataTable.order-column.hover tbody tr.even:hover > .sorting_3 {
background-color: #eeeeee;
}
table.dataTable.display tbody tr:hover.selected > .sorting_1,
table.dataTable.display tbody tr.odd:hover.selected > .sorting_1,
table.dataTable.display tbody tr.even:hover.selected > .sorting_1, table.dataTable.order-column.hover tbody tr:hover.selected > .sorting_1,
table.dataTable.order-column.hover tbody tr.odd:hover.selected > .sorting_1,
table.dataTable.order-column.hover tbody tr.even:hover.selected > .sorting_1 {
background-color: #a1aec7;
}
table.dataTable.display tbody tr:hover.selected > .sorting_2,
table.dataTable.display tbody tr.odd:hover.selected > .sorting_2,
table.dataTable.display tbody tr.even:hover.selected > .sorting_2, table.dataTable.order-column.hover tbody tr:hover.selected > .sorting_2,
table.dataTable.order-column.hover tbody tr.odd:hover.selected > .sorting_2,
table.dataTable.order-column.hover tbody tr.even:hover.selected > .sorting_2 {
background-color: #a2afc8;
}
table.dataTable.display tbody tr:hover.selected > .sorting_3,
table.dataTable.display tbody tr.odd:hover.selected > .sorting_3,
table.dataTable.display tbody tr.even:hover.selected > .sorting_3, table.dataTable.order-column.hover tbody tr:hover.selected > .sorting_3,
table.dataTable.order-column.hover tbody tr.odd:hover.selected > .sorting_3,
table.dataTable.order-column.hover tbody tr.even:hover.selected > .sorting_3 {
background-color: #a4b2cb;
}
table.dataTable.nowrap th, table.dataTable.nowrap td {
white-space: nowrap;
}
table.dataTable.compact thead th,
table.dataTable.compact thead td {
padding: 5px 9px;
}
table.dataTable.compact tfoot th,
table.dataTable.compact tfoot td {
padding: 5px 9px 3px 9px;
}
table.dataTable.compact tbody th,
table.dataTable.compact tbody td {
padding: 4px 5px;
}
table.dataTable th.dt-left,
table.dataTable td.dt-left {
text-align: left;
}
table.dataTable th.dt-center,
table.dataTable td.dt-center,
table.dataTable td.dataTables_empty {
text-align: center;
}
table.dataTable th.dt-right,
table.dataTable td.dt-right {
text-align: right;
}
table.dataTable th.dt-justify,
table.dataTable td.dt-justify {
text-align: justify;
}
table.dataTable th.dt-nowrap,
table.dataTable td.dt-nowrap {
white-space: nowrap;
}
table.dataTable thead th.dt-head-left,
table.dataTable thead td.dt-head-left,
table.dataTable tfoot th.dt-head-left,
table.dataTable tfoot td.dt-head-left {
text-align: left;
}
table.dataTable thead th.dt-head-center,
table.dataTable thead td.dt-head-center,
table.dataTable tfoot th.dt-head-center,
table.dataTable tfoot td.dt-head-center {
text-align: center;
}
table.dataTable thead th.dt-head-right,
table.dataTable thead td.dt-head-right,
table.dataTable tfoot th.dt-head-right,
table.dataTable tfoot td.dt-head-right {
text-align: right;
}
table.dataTable thead th.dt-head-justify,
table.dataTable thead td.dt-head-justify,
table.dataTable tfoot th.dt-head-justify,
table.dataTable tfoot td.dt-head-justify {
text-align: justify;
}
table.dataTable thead th.dt-head-nowrap,
table.dataTable thead td.dt-head-nowrap,
table.dataTable tfoot th.dt-head-nowrap,
table.dataTable tfoot td.dt-head-nowrap {
white-space: nowrap;
}
table.dataTable tbody th.dt-body-left,
table.dataTable tbody td.dt-body-left {
text-align: left;
}
table.dataTable tbody th.dt-body-center,
table.dataTable tbody td.dt-body-center {
text-align: center;
}
table.dataTable tbody th.dt-body-right,
table.dataTable tbody td.dt-body-right {
text-align: right;
}
table.dataTable tbody th.dt-body-justify,
table.dataTable tbody td.dt-body-justify {
text-align: justify;
}
table.dataTable tbody th.dt-body-nowrap,
table.dataTable tbody td.dt-body-nowrap {
white-space: nowrap;
}
table.dataTable,
table.dataTable th,
table.dataTable td {
-webkit-box-sizing: content-box;
-moz-box-sizing: content-box;
box-sizing: content-box;
}
/*
* Control feature layout
*/
.dataTables_wrapper {
position: relative;
clear: both;
*zoom: 1;
zoom: 1;
}
.dataTables_wrapper .dataTables_length {
float: left;
}
.dataTables_wrapper .dataTables_filter {
float: right;
text-align: right;
}
.dataTables_wrapper .dataTables_filter input {
margin-left: 0.5em;
}
.dataTables_wrapper .dataTables_info {
clear: both;
float: left;
padding-top: 0.55em;
}
.dataTables_wrapper .dataTables_paginate {
float: right;
text-align: right;
}
.dataTables_wrapper .dataTables_paginate .fg-button {
box-sizing: border-box;
display: inline-block;
min-width: 1.5em;
padding: 0.5em;
margin-left: 2px;
text-align: center;
text-decoration: none !important;
cursor: pointer;
*cursor: hand;
color: #333 !important;
border: 1px solid transparent;
}
.dataTables_wrapper .dataTables_paginate .fg-button:active {
outline: none;
}
.dataTables_wrapper .dataTables_paginate .fg-button:first-child {
border-top-left-radius: 3px;
border-bottom-left-radius: 3px;
}
.dataTables_wrapper .dataTables_paginate .fg-button:last-child {
border-top-right-radius: 3px;
border-bottom-right-radius: 3px;
}
.dataTables_wrapper .dataTables_processing {
position: absolute;
top: 50%;
left: 50%;
width: 100%;
height: 40px;
margin-left: -50%;
margin-top: -25px;
padding-top: 20px;
text-align: center;
font-size: 1.2em;
background-color: white;
background: -webkit-gradient(linear, left top, right top, color-stop(0%, rgba(255, 255, 255, 0)), color-stop(25%, rgba(255, 255, 255, 0.9)), color-stop(75%, rgba(255, 255, 255, 0.9)), color-stop(100%, rgba(255, 255, 255, 0)));
/* Chrome,Safari4+ */
background: -webkit-linear-gradient(left, rgba(255, 255, 255, 0) 0%, rgba(255, 255, 255, 0.9) 25%, rgba(255, 255, 255, 0.9) 75%, rgba(255, 255, 255, 0) 100%);
/* Chrome10+,Safari5.1+ */
background: -moz-linear-gradient(left, rgba(255, 255, 255, 0) 0%, rgba(255, 255, 255, 0.9) 25%, rgba(255, 255, 255, 0.9) 75%, rgba(255, 255, 255, 0) 100%);
/* FF3.6+ */
background: -ms-linear-gradient(left, rgba(255, 255, 255, 0) 0%, rgba(255, 255, 255, 0.9) 25%, rgba(255, 255, 255, 0.9) 75%, rgba(255, 255, 255, 0) 100%);
/* IE10+ */
background: -o-linear-gradient(left, rgba(255, 255, 255, 0) 0%, rgba(255, 255, 255, 0.9) 25%, rgba(255, 255, 255, 0.9) 75%, rgba(255, 255, 255, 0) 100%);
/* Opera 11.10+ */
background: linear-gradient(to right, rgba(255, 255, 255, 0) 0%, rgba(255, 255, 255, 0.9) 25%, rgba(255, 255, 255, 0.9) 75%, rgba(255, 255, 255, 0) 100%);
/* W3C */
}
.dataTables_wrapper .dataTables_length,
.dataTables_wrapper .dataTables_filter,
.dataTables_wrapper .dataTables_info,
.dataTables_wrapper .dataTables_processing,
.dataTables_wrapper .dataTables_paginate {
color: #333;
}
.dataTables_wrapper .dataTables_scroll {
clear: both;
}
.dataTables_wrapper .dataTables_scrollBody {
*margin-top: -1px;
-webkit-overflow-scrolling: touch;
}
.dataTables_wrapper .ui-widget-header {
font-weight: normal;
}
.dataTables_wrapper .ui-toolbar {
padding: 8px;
}
.dataTables_wrapper:after {
visibility: hidden;
display: block;
content: "";
clear: both;
height: 0;
}
@media screen and (max-width: 767px) {
.dataTables_wrapper .dataTables_length,
.dataTables_wrapper .dataTables_filter,
.dataTables_wrapper .dataTables_info,
.dataTables_wrapper .dataTables_paginate {
float: none;
text-align: center;
}
.dataTables_wrapper .dataTables_filter,
.dataTables_wrapper .dataTables_paginate {
margin-top: 0.5em;
}
}
================================================
FILE: static/plugins/datatables/js/jquery.dataTables.js
================================================
/*! DataTables 1.10.12
* ©2008-2015 SpryMedia Ltd - datatables.net/license
*/
/**
* @summary DataTables
* @description Paginate, search and order HTML tables
* @version 1.10.12
* @file jquery.dataTables.js
* @author SpryMedia Ltd (www.sprymedia.co.uk)
* @contact www.sprymedia.co.uk/contact
* @copyright Copyright 2008-2015 SpryMedia Ltd.
*
* This source file is free software, available under the following license:
* MIT license - http://datatables.net/license
*
* This source file is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the license files for details.
*
* For details please refer to: http://www.datatables.net
*/
/*jslint evil: true, undef: true, browser: true */
/*globals $,require,jQuery,define,_selector_run,_selector_opts,_selector_first,_selector_row_indexes,_ext,_Api,_api_register,_api_registerPlural,_re_new_lines,_re_html,_re_formatted_numeric,_re_escape_regex,_empty,_intVal,_numToDecimal,_isNumber,_isHtml,_htmlNumeric,_pluck,_pluck_order,_range,_stripHtml,_unique,_fnBuildAjax,_fnAjaxUpdate,_fnAjaxParameters,_fnAjaxUpdateDraw,_fnAjaxDataSrc,_fnAddColumn,_fnColumnOptions,_fnAdjustColumnSizing,_fnVisibleToColumnIndex,_fnColumnIndexToVisible,_fnVisbleColumns,_fnGetColumns,_fnColumnTypes,_fnApplyColumnDefs,_fnHungarianMap,_fnCamelToHungarian,_fnLanguageCompat,_fnBrowserDetect,_fnAddData,_fnAddTr,_fnNodeToDataIndex,_fnNodeToColumnIndex,_fnGetCellData,_fnSetCellData,_fnSplitObjNotation,_fnGetObjectDataFn,_fnSetObjectDataFn,_fnGetDataMaster,_fnClearTable,_fnDeleteIndex,_fnInvalidate,_fnGetRowElements,_fnCreateTr,_fnBuildHead,_fnDrawHead,_fnDraw,_fnReDraw,_fnAddOptionsHtml,_fnDetectHeader,_fnGetUniqueThs,_fnFeatureHtmlFilter,_fnFilterComplete,_fnFilterCustom,_fnFilterColumn,_fnFilter,_fnFilterCreateSearch,_fnEscapeRegex,_fnFilterData,_fnFeatureHtmlInfo,_fnUpdateInfo,_fnInfoMacros,_fnInitialise,_fnInitComplete,_fnLengthChange,_fnFeatureHtmlLength,_fnFeatureHtmlPaginate,_fnPageChange,_fnFeatureHtmlProcessing,_fnProcessingDisplay,_fnFeatureHtmlTable,_fnScrollDraw,_fnApplyToChildren,_fnCalculateColumnWidths,_fnThrottle,_fnConvertToWidth,_fnGetWidestNode,_fnGetMaxLenString,_fnStringToCss,_fnSortFlatten,_fnSort,_fnSortAria,_fnSortListener,_fnSortAttachListener,_fnSortingClasses,_fnSortData,_fnSaveState,_fnLoadState,_fnSettingsFromNode,_fnLog,_fnMap,_fnBindAction,_fnCallbackReg,_fnCallbackFire,_fnLengthOverflow,_fnRenderer,_fnDataSource,_fnRowAttributes*/
(function( factory ) {
"use strict";
if ( typeof define === 'function' && define.amd ) {
// AMD
define( ['jquery'], function ( $ ) {
return factory( $, window, document );
} );
}
else if ( typeof exports === 'object' ) {
// CommonJS
module.exports = function (root, $) {
if ( ! root ) {
// CommonJS environments without a window global must pass a
// root. This will give an error otherwise
root = window;
}
if ( ! $ ) {
$ = typeof window !== 'undefined' ? // jQuery's factory checks for a global window
require('jquery') :
require('jquery')( root );
}
return factory( $, root, root.document );
};
}
else {
// Browser
factory( jQuery, window, document );
}
}
(function( $, window, document, undefined ) {
"use strict";
/**
* DataTables is a plug-in for the jQuery Javascript library. It is a highly
* flexible tool, based upon the foundations of progressive enhancement,
* which will add advanced interaction controls to any HTML table. For a
* full list of features please refer to
* [DataTables.net](href="http://datatables.net).
*
* Note that the `DataTable` object is not a global variable but is aliased
* to `jQuery.fn.DataTable` and `jQuery.fn.dataTable` through which it may
* be accessed.
*
* @class
* @param {object} [init={}] Configuration object for DataTables. Options
* are defined by {@link DataTable.defaults}
* @requires jQuery 1.7+
*
* @example
* // Basic initialisation
* $(document).ready( function {
* $('#example').dataTable();
* } );
*
* @example
* // Initialisation with configuration options - in this case, disable
* // pagination and sorting.
* $(document).ready( function {
* $('#example').dataTable( {
* "paginate": false,
* "sort": false
* } );
* } );
*/
var DataTable = function ( options )
{
/**
* Perform a jQuery selector action on the table's TR elements (from the tbody) and
* return the resulting jQuery object.
* @param {string|node|jQuery} sSelector jQuery selector or node collection to act on
* @param {object} [oOpts] Optional parameters for modifying the rows to be included
* @param {string} [oOpts.filter=none] Select TR elements that meet the current filter
* criterion ("applied") or all TR elements (i.e. no filter).
* @param {string} [oOpts.order=current] Order of the TR elements in the processed array.
* Can be either 'current', whereby the current sorting of the table is used, or
* 'original' whereby the original order the data was read into the table is used.
* @param {string} [oOpts.page=all] Limit the selection to the currently displayed page
* ("current") or not ("all"). If 'current' is given, then order is assumed to be
* 'current' and filter is 'applied', regardless of what they might be given as.
* @returns {object} jQuery object, filtered by the given selector.
* @dtopt API
* @deprecated Since v1.10
*
* @example
* $(document).ready(function() {
* var oTable = $('#example').dataTable();
*
* // Highlight every second row
* oTable.$('tr:odd').css('backgroundColor', 'blue');
* } );
*
* @example
* $(document).ready(function() {
* var oTable = $('#example').dataTable();
*
* // Filter to rows with 'Webkit' in them, add a background colour and then
* // remove the filter, thus highlighting the 'Webkit' rows only.
* oTable.fnFilter('Webkit');
* oTable.$('tr', {"search": "applied"}).css('backgroundColor', 'blue');
* oTable.fnFilter('');
* } );
*/
this.$ = function ( sSelector, oOpts )
{
return this.api(true).$( sSelector, oOpts );
};
/**
* Almost identical to $ in operation, but in this case returns the data for the matched
* rows - as such, the jQuery selector used should match TR row nodes or TD/TH cell nodes
* rather than any descendants, so the data can be obtained for the row/cell. If matching
* rows are found, the data returned is the original data array/object that was used to
* create the row (or a generated array if from a DOM source).
*
* This method is often useful in-combination with $ where both functions are given the
* same parameters and the array indexes will match identically.
* @param {string|node|jQuery} sSelector jQuery selector or node collection to act on
* @param {object} [oOpts] Optional parameters for modifying the rows to be included
* @param {string} [oOpts.filter=none] Select elements that meet the current filter
* criterion ("applied") or all elements (i.e. no filter).
* @param {string} [oOpts.order=current] Order of the data in the processed array.
* Can be either 'current', whereby the current sorting of the table is used, or
* 'original' whereby the original order the data was read into the table is used.
* @param {string} [oOpts.page=all] Limit the selection to the currently displayed page
* ("current") or not ("all"). If 'current' is given, then order is assumed to be
* 'current' and filter is 'applied', regardless of what they might be given as.
* @returns {array} Data for the matched elements. If any elements, as a result of the
* selector, were not TR, TD or TH elements in the DataTable, they will have a null
* entry in the array.
* @dtopt API
* @deprecated Since v1.10
*
* @example
* $(document).ready(function() {
* var oTable = $('#example').dataTable();
*
* // Get the data from the first row in the table
* var data = oTable._('tr:first');
*
* // Do something useful with the data
* alert( "First cell is: "+data[0] );
* } );
*
* @example
* $(document).ready(function() {
* var oTable = $('#example').dataTable();
*
* // Filter to 'Webkit' and get all data for
* oTable.fnFilter('Webkit');
* var data = oTable._('tr', {"search": "applied"});
*
* // Do something with the data
* alert( data.length+" rows matched the search" );
* } );
*/
this._ = function ( sSelector, oOpts )
{
return this.api(true).rows( sSelector, oOpts ).data();
};
/**
* Create a DataTables Api instance, with the currently selected tables for
* the Api's context.
* @param {boolean} [traditional=false] Set the API instance's context to be
* only the table referred to by the `DataTable.ext.iApiIndex` option, as was
* used in the API presented by DataTables 1.9- (i.e. the traditional mode),
* or if all tables captured in the jQuery object should be used.
* @return {DataTables.Api}
*/
this.api = function ( traditional )
{
return traditional ?
new _Api(
_fnSettingsFromNode( this[ _ext.iApiIndex ] )
) :
new _Api( this );
};
/**
* Add a single new row or multiple rows of data to the table. Please note
* that this is suitable for client-side processing only - if you are using
* server-side processing (i.e. "bServerSide": true), then to add data, you
* must add it to the data source, i.e. the server-side, through an Ajax call.
* @param {array|object} data The data to be added to the table. This can be:
* <ul>
* <li>1D array of data - add a single row with the data provided</li>
* <li>2D array of arrays - add multiple rows in a single call</li>
* <li>object - data object when using <i>mData</i></li>
* <li>array of objects - multiple data objects when using <i>mData</i></li>
* </ul>
* @param {bool} [redraw=true] redraw the table or not
* @returns {array} An array of integers, representing the list of indexes in
* <i>aoData</i> ({@link DataTable.models.oSettings}) that have been added to
* the table.
* @dtopt API
* @deprecated Since v1.10
*
* @example
* // Global var for counter
* var giCount = 2;
*
* $(document).ready(function() {
* $('#example').dataTable();
* } );
*
* function fnClickAddRow() {
* $('#example').dataTable().fnAddData( [
* giCount+".1",
* giCount+".2",
* giCount+".3",
* giCount+".4" ]
* );
*
* giCount++;
* }
*/
this.fnAddData = function( data, redraw )
{
var api = this.api( true );
/* Check if we want to add multiple rows or not */
var rows = $.isArray(data) && ( $.isArray(data[0]) || $.isPlainObject(data[0]) ) ?
api.rows.add( data ) :
api.row.add( data );
if ( redraw === undefined || redraw ) {
api.draw();
}
return rows.flatten().toArray();
};
/**
* This function will make DataTables recalculate the column sizes, based on the data
* contained in the table and the sizes applied to the columns (in the DOM, CSS or
* through the sWidth parameter). This can be useful when the width of the table's
* parent element changes (for example a window resize).
* @param {boolean} [bRedraw=true] Redraw the table or not, you will typically want to
* @dtopt API
* @deprecated Since v1.10
*
* @example
* $(document).ready(function() {
* var oTable = $('#example').dataTable( {
* "sScrollY": "200px",
* "bPaginate": false
* } );
*
* $(window).bind('resize', f
gitextract_epu7r_5a/
├── .gitignore
├── FirstBlood/
│ ├── __init__.py
│ ├── celery.py
│ ├── settings.py
│ ├── urls.py
│ ├── views.py
│ └── wsgi.py
├── LICENSE
├── README.md
├── batch_job/
│ ├── __init__.py
│ ├── admin.py
│ ├── apps.py
│ ├── conf/
│ │ ├── __init__.py
│ │ └── config.py
│ ├── models.py
│ ├── tests.py
│ ├── urls.py
│ └── views.py
├── controller/
│ ├── __init__.py
│ ├── core/
│ │ ├── __init__.py
│ │ ├── access.py
│ │ ├── excel.py
│ │ ├── local_mysql.py
│ │ ├── mailtable.py
│ │ ├── public.py
│ │ ├── query_sql.py
│ │ └── unicode_width.py
│ └── public/
│ ├── __init__.py
│ ├── dataconn.py
│ ├── log.py
│ ├── mailclass.py
│ ├── mysql_helper.py
│ ├── pagination.py
│ └── sqlserver_helper.py
├── create_table.sql
├── datax_web/
│ ├── __init__.py
│ ├── admin.py
│ ├── apps.py
│ ├── conf/
│ │ ├── __init__.py
│ │ └── config.py
│ ├── models.py
│ ├── tests.py
│ ├── urls.py
│ └── views.py
├── manage.py
├── requirements.txt
├── static/
│ ├── css/
│ │ └── login.css
│ ├── js/
│ │ └── csrf.js
│ ├── plugins/
│ │ ├── bootstarp-table/
│ │ │ └── bootstrap-table-zh-CN.js
│ │ ├── datatables/
│ │ │ ├── css/
│ │ │ │ ├── jquery.dataTables.css
│ │ │ │ └── jquery.dataTables_themeroller.css
│ │ │ └── js/
│ │ │ └── jquery.dataTables.js
│ │ └── layer/
│ │ ├── layer.js
│ │ └── skin/
│ │ └── layer.css
│ └── template/
│ ├── bootstrap/
│ │ └── css/
│ │ └── bootstrap.css
│ ├── css/
│ │ ├── bootstrap-datepicker.css
│ │ ├── bootstrap-timepicker.css
│ │ ├── bootstrap-wysihtml5.css
│ │ ├── colorbox/
│ │ │ └── colorbox.css
│ │ ├── datepicker.css
│ │ ├── dropzone/
│ │ │ └── dropzone.css
│ │ ├── endless-skin.css
│ │ ├── endless.css
│ │ ├── fullcalendar.css
│ │ ├── gritter/
│ │ │ └── jquery.gritter.css
│ │ ├── jcarousel.responsive.css
│ │ ├── jquery.dataTables_themeroller.css
│ │ ├── jquery.tagsinput.css
│ │ ├── morris.css
│ │ ├── pace.css
│ │ ├── prettify.css
│ │ └── slider.css
│ └── js/
│ ├── bootstrap-datepicker.js
│ ├── endless/
│ │ ├── endless.js
│ │ ├── endless_dashboard.js
│ │ ├── endless_form.js
│ │ └── endless_wizard.js
│ ├── jquery-ui.js
│ └── uncompressed/
│ ├── bootstrap-wysihtml5.js
│ ├── holder.js
│ ├── pace.js
│ └── run_prettify.js
├── supervisord/
│ ├── FirstBlood_dev.conf
│ └── FirstBlood_pro.conf
├── templates/
│ ├── 403.html
│ ├── 404.html
│ ├── base.html
│ ├── batch_job/
│ │ ├── add_batch_job.html
│ │ ├── add_crontabs.html
│ │ ├── batch_job_instance.html
│ │ ├── batch_job_instance_details.html
│ │ ├── crontabs.html
│ │ ├── index.html
│ │ ├── mod_crontabs.html
│ │ ├── update_batch_job.html
│ │ └── update_job.html
│ ├── datax_web/
│ │ ├── add_crontabs.html
│ │ ├── add_job.html
│ │ ├── crontabs.html
│ │ ├── index.html
│ │ ├── mod_crontabs.html
│ │ ├── mod_periodic_task.html
│ │ ├── monitor_job.html
│ │ ├── monitor_job_detail.html
│ │ └── update_job.html
│ ├── index.html
│ └── registered/
│ ├── login.html
│ ├── password_change.html
│ └── password_change_done.html
└── websockted/
├── CHANGES
├── LICENSE
├── README.md
├── __init__.py
├── datax_web_job_instance.py
└── websocketd
SYMBOL INDEX (521 symbols across 30 files)
FILE: FirstBlood/celery.py
function debug_task (line 25) | def debug_task(self):
FILE: FirstBlood/views.py
function index (line 15) | def index(request):
function page_not_found (line 21) | def page_not_found(request):
function permission_denied (line 25) | def permission_denied(request):
function get_username (line 30) | def get_username(request):
function check_permission (line 40) | def check_permission(request):
FILE: batch_job/apps.py
class BatchJobConfig (line 7) | class BatchJobConfig(AppConfig):
FILE: batch_job/models.py
class BatchJobPermission (line 7) | class BatchJobPermission(models.Model):
class Meta (line 11) | class Meta:
FILE: batch_job/views.py
function verification (line 50) | def verification(CheckClass):
class BatchJobData (line 74) | class BatchJobData(object):
method __init__ (line 78) | def __init__(self, data):
method timestamp1 (line 97) | def timestamp1(self):
method _get_schedule_dict (line 100) | def _get_schedule_dict(self):
method schedule_dict (line 111) | def schedule_dict(self):
method batch_job_id (line 123) | def batch_job_id(self):
method batch_job_id (line 132) | def batch_job_id(self, batch_job_id):
method get_insert_datax_job_sql (line 135) | def get_insert_datax_job_sql(self):
method get_update_batch_job_by_id_sql (line 139) | def get_update_batch_job_by_id_sql(self):
method get_insert_batch_job_details_sql (line 143) | def get_insert_batch_job_details_sql(self):
method get_delete_batch_job_details_by_id_sql (line 148) | def get_delete_batch_job_details_by_id_sql(self):
method create (line 152) | def create(self):
method create_PeriodicTask (line 175) | def create_PeriodicTask(self):
method update (line 188) | def update(self):
method update_PeriodicTask (line 213) | def update_PeriodicTask(self):
method get_batch_job_by_id (line 221) | def get_batch_job_by_id(self, batch_job_id):
method get_batch_job_details_by_id (line 227) | def get_batch_job_details_by_id(self, batch_job_id):
class BatchJobInstanceData (line 234) | class BatchJobInstanceData(object):
method __init__ (line 259) | def __init__(self, data):
method batch_job_instance_id (line 274) | def batch_job_instance_id(self):
method batch_job_instance_id (line 283) | def batch_job_instance_id(self, batch_job_id):
method trigger_mode (line 287) | def trigger_mode(self):
method trigger_mode (line 291) | def trigger_mode(self, val):
method get_insert_batch_job_instance_sql (line 298) | def get_insert_batch_job_instance_sql(self):
method get_update_batch_job_instance_by_id_sql (line 306) | def get_update_batch_job_instance_by_id_sql(self, result):
method get_select_batch_job_instance_by_id_sql (line 315) | def get_select_batch_job_instance_by_id_sql(batch_job_instance_id):
method get_batch_job_instance_by_id (line 318) | def get_batch_job_instance_by_id(self, batch_job_instance_id):
method start_log (line 323) | def start_log(self):
method record_result_log (line 331) | def record_result_log(self, result):
class BatchJobSubjobInstanceData (line 340) | class BatchJobSubjobInstanceData(object):
method __init__ (line 354) | def __init__(self, data):
method subjob_instance_id (line 367) | def subjob_instance_id(self):
method subjob_instance_id (line 371) | def subjob_instance_id(self, subjob_instance_id):
method batch_job_instance_id (line 375) | def batch_job_instance_id(self):
method batch_job_instance_id (line 379) | def batch_job_instance_id(self, batch_job_instance_id):
method get_insert_batch_job_instance_details_sql_sql (line 382) | def get_insert_batch_job_instance_details_sql_sql(self, _type):
method start_subjob_log (line 389) | def start_subjob_log(self, _type):
method run_sub_job (line 398) | def run_sub_job(cls, _type, subjob_id, results, batch_job_instance_id,...
method get_select_sub_job_datax_instance_by_id_sql (line 431) | def get_select_sub_job_datax_instance_by_id_sql(batch_job_instance_id):
method get_sub_job_datax_instance_data_by_id (line 435) | def get_sub_job_datax_instance_data_by_id(batch_job_instance_id):
class CheckBatchJob (line 443) | class CheckBatchJob(object):
method __init__ (line 468) | def __init__(self, request):
method check_operation_type (line 477) | def check_operation_type(self):
method check_name_by_operation_type (line 486) | def check_name_by_operation_type(self):
method check_name (line 497) | def check_name(self, name, sql):
method check_description (line 511) | def check_description(self):
method check_task_template (line 517) | def check_task_template(self):
method check_is_enable (line 527) | def check_is_enable(self):
method check_crontab (line 533) | def check_crontab(self):
method check_batch_job_details (line 542) | def check_batch_job_details(self):
method check_batch_job_id (line 581) | def check_batch_job_id(self):
method check_trigger_mode (line 598) | def check_trigger_mode(self):
method total_check (line 609) | def total_check(self):
class BatchJobInstanceSql (line 624) | class BatchJobInstanceSql(object):
method __init__ (line 636) | def __init__(self, request):
method _set_table (line 646) | def _set_table(self, table):
method tables (line 653) | def tables(self):
method cvtpara (line 658) | def cvtpara(self):
class PaginatorBatchJobInstance (line 670) | class PaginatorBatchJobInstance(dataconn.DatabaseConnection, query_sql.Q...
method __init__ (line 672) | def __init__(self, qs):
method rows (line 677) | def rows(self):
method total (line 681) | def total(self):
function index (line 687) | def index(request):
function add_batch_job (line 694) | def add_batch_job(request):
function update_batch_job (line 700) | def update_batch_job(request, id):
function batch_job_instance (line 706) | def batch_job_instance(request):
function batch_job_instance_details (line 713) | def batch_job_instance_details(request, id):
function get_task_template (line 720) | def get_task_template(request):
function get_crontab (line 745) | def get_crontab(request):
function add_crontab (line 756) | def add_crontab(request):
function add_batch_job_data (line 775) | def add_batch_job_data(request):
function get_batch_job_data (line 794) | def get_batch_job_data(request):
function get_batch_job_data_by_id (line 808) | def get_batch_job_data_by_id(request):
function get_batch_job_sub_job_by_id (line 827) | def get_batch_job_sub_job_by_id(request):
function run_batch_job_task (line 855) | def run_batch_job_task(request):
function _process_run_sub_job (line 868) | def _process_run_sub_job(_type, subjob_id, results, batch_job_instance_i...
function run_batch_job (line 874) | def run_batch_job(**data):
function batch_job_periodictask (line 924) | def batch_job_periodictask(batch_job_id):
function get_batch_job_instance (line 965) | def get_batch_job_instance(request):
function get_batch_job_instance_data_by_id (line 979) | def get_batch_job_instance_data_by_id(request):
function get_batch_job_sub_job_instance_data_by_id (line 992) | def get_batch_job_sub_job_instance_data_by_id(request):
FILE: controller/core/access.py
function verification (line 15) | def verification(check_class):
class Check_IBQ (line 39) | class Check_IBQ(object):
method __init__ (line 46) | def __init__(self, request):
method check_data (line 55) | def check_data(self):
method total_check (line 64) | def total_check(self):
class Check_PCI (line 71) | class Check_PCI(object):
method __init__ (line 78) | def __init__(self, request):
method check_data (line 89) | def check_data(self):
method checkDateMaxNum (line 98) | def checkDateMaxNum(self):
method total_check (line 105) | def total_check(self):
class Check_people_upload (line 113) | class Check_people_upload(object):
method __init__ (line 120) | def __init__(self, request):
method check_file_exists (line 125) | def check_file_exists(self):
method check_filename (line 134) | def check_filename(self):
method total_check (line 144) | def total_check(self):
FILE: controller/core/excel.py
function set_style (line 20) | def set_style(name, height, bold=False):
function get_table (line 39) | def get_table(file, table=False):
function sheet_write (line 51) | def sheet_write(f, sheet_name, row0, rows, width):
class Openpyxl (line 66) | class Openpyxl(object):
method __init__ (line 68) | def __init__(self, filename):
method __set_width (line 75) | def __set_width(self, rows, ws):
method __get_new_ws (line 80) | def __get_new_ws(self, title):
method add_sheet (line 90) | def add_sheet(self, title, rows):
method save (line 106) | def save(self):
FILE: controller/core/mailtable.py
class MailTable (line 9) | class MailTable(object):
method __init__ (line 13) | def __init__(self):
method style (line 17) | def style(self):
method table (line 48) | def table(self, caption, rows):
method _tr_list (line 62) | def _tr_list(self, row):
method _tr0_list (line 67) | def _tr0_list(self, row):
FILE: controller/core/public.py
class Currency (line 6) | class Currency(object):
method __init__ (line 8) | def __init__(self, request):
method rq_get (line 11) | def rq_get(self, key):
method rq_post (line 14) | def rq_post(self, key):
method rq_get_json (line 17) | def rq_get_json(self, key):
method rq_post_json (line 20) | def rq_post_json(self, key):
class DatetimeHelp (line 23) | class DatetimeHelp(object):
method __init__ (line 25) | def __init__(self):
method now_time (line 29) | def now_time(self):
method strptime (line 32) | def strptime(self, value, format):
method nowtimestrf1 (line 36) | def nowtimestrf1(self):
method nowtimestrf2 (line 40) | def nowtimestrf2(self):
method nowtimestrf3 (line 44) | def nowtimestrf3(self):
method nowtimestrf4 (line 48) | def nowtimestrf4(self):
method nowtimestrf5 (line 52) | def nowtimestrf5(self):
method nowtimestrf6 (line 56) | def nowtimestrf6(self):
method yesterday (line 60) | def yesterday(self):
method yesterdaystrf4 (line 65) | def yesterdaystrf4(self):
method yesterdaystrf5 (line 69) | def yesterdaystrf5(self):
method yesterdaystrf6 (line 73) | def yesterdaystrf6(self):
method timestamp1 (line 77) | def timestamp1(self):
method datetime_to_timestamp (line 84) | def datetime_to_timestamp(datetime_obj):
FILE: controller/core/query_sql.py
class Q_Sql (line 11) | class Q_Sql(object):
method __init__ (line 41) | def __init__(self, cvtpara, **tables):
method _data (line 57) | def _data(self):
method _set_data (line 62) | def _set_data(self, t, table):
method _set_str (line 73) | def _set_str(self, t, field, **attr):
method _set_datetime (line 78) | def _set_datetime(self, t, field, **attr):
method _set_order_by (line 89) | def _set_order_by(self, order_by):
method _set_condition (line 107) | def _set_condition(self):
method para (line 113) | def para(self):
method total_para (line 121) | def total_para(self):
method sql (line 125) | def sql(self):
method total_sql (line 131) | def total_sql(self):
class Q_Data (line 136) | class Q_Data(object):
method __init__ (line 138) | def __init__(self, qs):
method _get_data (line 144) | def _get_data(self, databases_c):
method _data_clean (line 147) | def _data_clean(self, data):
method _data_conversion (line 153) | def _data_conversion(self, val):
method _get_rows (line 164) | def _get_rows(self, databases_c):
method _get_total (line 174) | def _get_total(self, databases_c):
class Download_Sql (line 180) | class Download_Sql(Q_Sql):
method __init__ (line 186) | def __init__(self, cvtpara, **tables):
method sql (line 190) | def sql(self):
method para (line 196) | def para(self):
FILE: controller/core/unicode_width.py
function get_max_length (line 5) | def get_max_length(arg):
function sort_arg (line 10) | def sort_arg(arg):
function cmp_length (line 16) | def cmp_length(a, b):
function str_len (line 28) | def str_len(string):
function get_width (line 38) | def get_width(*var):
FILE: controller/public/dataconn.py
class DatabaseConnection (line 21) | class DatabaseConnection(object):
method __init__ (line 23) | def __init__(self, logger):
method get_dtbs_conn (line 28) | def get_dtbs_conn(self, name):
method get_dtbs_conn_by_id (line 35) | def get_dtbs_conn_by_id(self, _id):
method get_datainfo (line 46) | def get_datainfo(self, name):
method get_datainfo_by_id (line 54) | def get_datainfo_by_id(self, _id):
class DataTransform (line 63) | class DataTransform(object):
method __init__ (line 67) | def __init__(self):
method get_row_by_list (line 70) | def get_row_by_list(self, dt, database_type):
method get_row_by_dict (line 96) | def get_row_by_dict(self, dt, database_type):
method get_row_by_dict_to_user (line 124) | def get_row_by_dict_to_user(self, dt):
method get_row_by_list_to_excel (line 147) | def get_row_by_list_to_excel(self, dt):
method special_characters_mysql (line 161) | def special_characters_mysql(string):
method special_characters_sqlserver (line 172) | def special_characters_sqlserver(string):
method special_characters (line 177) | def special_characters(self, database_type):
FILE: controller/public/log.py
class Logger (line 11) | class Logger(object):
method __init__ (line 21) | def __init__(self, logname, loglevel, logger):
method getlog (line 49) | def getlog(self):
FILE: controller/public/mailclass.py
class MailHelper (line 27) | class MailHelper(object):
method __init__ (line 29) | def __init__(self, mail_host, mail_user, mail_pass,
method add_attch (line 46) | def add_attch(self, res_file):
method insert_img (line 57) | def insert_img(self, file):
method add_content (line 68) | def add_content(self, content, subject):
method _cvt_receivers (line 77) | def _cvt_receivers(self, receivers):
method _cvt_user (line 81) | def _cvt_user(self, user):
method send_htm (line 84) | def send_htm(self):
method _format_addr (line 98) | def _format_addr(s):
FILE: controller/public/mysql_helper.py
class MysqlHelper (line 10) | class MysqlHelper(object):
method __init__ (line 15) | def __init__(self, host, user, passwd, db):
method __conn (line 25) | def __conn(self):
method getall (line 39) | def getall(self, sql, paramters=None):
method getallmany (line 59) | def getallmany(self, sql, paramters=None):
method getsingle (line 77) | def getsingle(self, sql, paramters=None):
method insertmany (line 95) | def insertmany(self, sql, paramters=None):
method insert (line 111) | def insert(self, sql, paramters=None):
method getall_list (line 128) | def getall_list(self, sql, paramters=None):
method getall_list_sqls (line 149) | def getall_list_sqls(self, sqls, paramters=None):
method delete (line 175) | def delete(self, sql, paramters=None):
method update (line 191) | def update(self, sql, paramters=None):
method dict_generator (line 207) | def dict_generator(self, sql, paramters=None):
method tuple_generator (line 232) | def tuple_generator(self, sql, paramters=None):
method transaction_start (line 261) | def transaction_start(self):
method transaction_execute (line 276) | def transaction_execute(self, sql, paramters=None):
method transaction_commit_and_close (line 287) | def transaction_commit_and_close(self):
class BusinessMysql (line 299) | class BusinessMysql(MysqlHelper):
method __init__ (line 301) | def __init__(self, host, user, passwd, db):
method search (line 304) | def search(self, sql, para=None):
FILE: controller/public/pagination.py
class Paginator_help (line 7) | class Paginator_help:
method __init__ (line 10) | def __init__(self, page_num, queryset, PAGE_SIZE, current_page_total, ...
method get_Paginator_obj (line 20) | def get_Paginator_obj(self):
method check_page_num (line 25) | def check_page_num(self, page_num):
method get_current_page (line 31) | def get_current_page(self):
method get_qstr (line 36) | def get_qstr(self, request):
method calculate_begin_end (line 40) | def calculate_begin_end(self):
method get_page_range (line 62) | def get_page_range(self):
class Paginator_ajax (line 73) | class Paginator_ajax(object):
method __init__ (line 76) | def __init__(self, offset, queryset, PAGE_SIZE):
method get_current_page (line 87) | def get_current_page(self):
method get_Paginator_obj (line 92) | def get_Paginator_obj(self):
method _get_rows (line 97) | def _get_rows(self):
method data (line 101) | def data(self):
class Paginator_sql (line 105) | class Paginator_sql(object):
method __init__ (line 108) | def __init__(self, offset, queryset, PAGE_SIZE):
method get_current_page (line 122) | def get_current_page(self):
method get_Paginator_obj (line 127) | def get_Paginator_obj(self):
method _get_rows (line 132) | def _get_rows(self):
method data (line 136) | def data(self):
FILE: controller/public/sqlserver_helper.py
class SqlserverHelper (line 10) | class SqlserverHelper(object):
method __init__ (line 15) | def __init__(self, host, user, passwd, db):
method __conn (line 25) | def __conn(self):
method getall (line 37) | def getall(self, sql, paramters=None):
method getallmany (line 57) | def getallmany(self, sql, paramters=None):
method getsingle (line 75) | def getsingle(self, sql, paramters=None):
method insertmany (line 93) | def insertmany(self, sql, paramters=None):
method insert (line 109) | def insert(self, sql, paramters=None):
method getall_list (line 125) | def getall_list(self, sql, paramters=None):
method getall_list_sqls (line 146) | def getall_list_sqls(self, sqls, paramters=None):
method delete (line 172) | def delete(self, sql, paramters=None):
method dict_generator (line 188) | def dict_generator(self, sql, paramters=None):
method tuple_generator (line 213) | def tuple_generator(self, sql, paramters=None):
class BusinessSqlserver (line 239) | class BusinessSqlserver(SqlserverHelper):
method __init__ (line 241) | def __init__(self, host, user, passwd, db):
method search (line 244) | def search(self, sql, para=None):
FILE: create_table.sql
type `databaseinfo` (line 13) | CREATE TABLE `databaseinfo` (
type `datax_job` (line 34) | CREATE TABLE `datax_job` (
type `datax_job_writer_column` (line 55) | CREATE TABLE `datax_job_writer_column` (
type `datax_job_instance` (line 69) | CREATE TABLE `datax_job_instance` (
type `batch_job` (line 96) | CREATE TABLE `batch_job` (
type `batch_job_details` (line 111) | CREATE TABLE `batch_job_details` (
type `batch_job_instance` (line 126) | CREATE TABLE `batch_job_instance` (
type `batch_job_instance_details` (line 145) | CREATE TABLE `batch_job_instance_details` (
FILE: datax_web/apps.py
class DataxWebConfig (line 6) | class DataxWebConfig(AppConfig):
FILE: datax_web/tests.py
function follow (line 16) | def follow(thefile):
FILE: datax_web/views.py
function verification (line 25) | def verification(check_class):
class JobData (line 49) | class JobData(object):
method __init__ (line 69) | def __init__(self, data):
method timestamp1 (line 93) | def timestamp1(self):
method _get_reader_dtbs (line 96) | def _get_reader_dtbs(self):
method _get_writer_dtbs (line 99) | def _get_writer_dtbs(self):
method get_insert_datax_job_sql (line 102) | def get_insert_datax_job_sql(self):
method get_update_datax_job_by_id_sql (line 113) | def get_update_datax_job_by_id_sql(self):
method get_insert_datax_job_writer_column_sql (line 125) | def get_insert_datax_job_writer_column_sql(self):
method get_delete_datax_job_writer_column_by_id_sql (line 131) | def get_delete_datax_job_writer_column_by_id_sql(self):
method get_insert_datax_job_instance_sql (line 135) | def get_insert_datax_job_instance_sql(self):
method get_update_datax_job_instance_by_instance_id_sql (line 155) | def get_update_datax_job_instance_by_instance_id_sql(self, result):
method datax_job_instance_id (line 161) | def datax_job_instance_id(self):
method start_log (line 164) | def start_log(self):
method record_result_log (line 172) | def record_result_log(self, result):
method create (line 180) | def create(self):
method update (line 202) | def update(self):
method get_job_data (line 227) | def get_job_data(self):
method get_job_data_by_id (line 236) | def get_job_data_by_id(self, _id):
method get_datax_job_writer_column_by_id (line 250) | def get_datax_job_writer_column_by_id(self, _id):
method create_file (line 264) | def create_file(file, content):
class CheckJob (line 270) | class CheckJob(object):
method __init__ (line 296) | def __init__(self, request):
method check_operation_type (line 312) | def check_operation_type(self):
method check_name_by_operation_type (line 321) | def check_name_by_operation_type(self):
method check_name (line 332) | def check_name(self, name, sql):
method check_description (line 346) | def check_description(self):
method check_querySql (line 352) | def check_querySql(self):
method check_reader_databaseinfo_id (line 358) | def check_reader_databaseinfo_id(self):
method check_writer_table (line 368) | def check_writer_table(self):
method check_writer_column (line 374) | def check_writer_column(self):
method check_writer_databaseinfo_id (line 380) | def check_writer_databaseinfo_id(self):
method check_databaseinfo_id (line 390) | def check_databaseinfo_id(self, _id, operation_type, ERROR1, ERROR2):
method check_datax_job_id (line 404) | def check_datax_job_id(self):
method check_trigger_mode (line 422) | def check_trigger_mode(self):
method total_check (line 433) | def total_check(self):
class Datax (line 449) | class Datax(object):
method __init__ (line 453) | def __init__(self, data):
method get_reader (line 456) | def get_reader(self):
method get_writer (line 464) | def get_writer(self):
method get_job_json (line 475) | def get_job_json(self):
method job_json_file_name (line 485) | def job_json_file_name(self):
method job_json_file (line 489) | def job_json_file(self):
method create_file (line 493) | def create_file(file, content):
method cmd (line 499) | def cmd(self):
function run (line 504) | def run(**data):
function index (line 518) | def index(request):
function add_job (line 526) | def add_job(request):
function update_job (line 533) | def update_job(request, id):
function monitor_job (line 540) | def monitor_job(request):
function monitor_job_detail (line 547) | def monitor_job_detail(request, id):
function get_database (line 554) | def get_database(request):
function add_job_data (line 574) | def add_job_data(request):
function get_job_data (line 591) | def get_job_data(request):
function get_update_job_data (line 601) | def get_update_job_data(request):
function run_job (line 615) | def run_job(request):
class DataxJobInstanceSql (line 626) | class DataxJobInstanceSql(object):
method __init__ (line 641) | def __init__(self, request):
method _set_table (line 651) | def _set_table(self, table):
method tables (line 658) | def tables(self):
method cvtpara (line 663) | def cvtpara(self):
class PaginatorData (line 675) | class PaginatorData(dataconn.DatabaseConnection, query_sql.Q_Data):
method __init__ (line 677) | def __init__(self, qs):
method rows (line 682) | def rows(self):
method total (line 686) | def total(self):
function get_datax_job_instance (line 692) | def get_datax_job_instance(request):
function get_datax_job_instance_by_id (line 706) | def get_datax_job_instance_by_id(request):
FILE: static/js/csrf.js
function getCookie (line 1) | function getCookie(name) {
function csrfSafeMethod (line 17) | function csrfSafeMethod(method) {
FILE: static/plugins/datatables/js/jquery.dataTables.js
function _fnHungarianMap (line 1650) | function _fnHungarianMap ( o )
function _fnCamelToHungarian (line 1688) | function _fnCamelToHungarian ( src, user, force )
function _fnLanguageCompat (line 1727) | function _fnLanguageCompat( lang )
function _fnCompatOpts (line 1779) | function _fnCompatOpts ( init )
function _fnCompatCols (line 1820) | function _fnCompatCols ( init )
function _fnBrowserDetect (line 1840) | function _fnBrowserDetect( settings )
function _fnReduce (line 1920) | function _fnReduce ( that, fn, init, start, end, inc )
function _fnAddColumn (line 1954) | function _fnAddColumn( oSettings, nTh )
function _fnColumnOptions (line 1986) | function _fnColumnOptions( oSettings, iCol, oOptions )
function _fnAdjustColumnSizing (line 2114) | function _fnAdjustColumnSizing ( settings )
function _fnVisibleToColumnIndex (line 2146) | function _fnVisibleToColumnIndex( oSettings, iMatch )
function _fnColumnIndexToVisible (line 2164) | function _fnColumnIndexToVisible( oSettings, iMatch )
function _fnVisbleColumns (line 2179) | function _fnVisbleColumns( oSettings )
function _fnGetColumns (line 2202) | function _fnGetColumns( oSettings, sParam )
function _fnColumnTypes (line 2221) | function _fnColumnTypes ( settings )
function _fnApplyColumnDefs (line 2292) | function _fnApplyColumnDefs( oSettings, aoColDefs, aoCols, fn )
function _fnAddData (line 2372) | function _fnAddData ( oSettings, aDataIn, nTr, anTds )
function _fnAddTr (line 2422) | function _fnAddTr( settings, trs )
function _fnNodeToDataIndex (line 2445) | function _fnNodeToDataIndex( oSettings, n )
function _fnNodeToColumnIndex (line 2459) | function _fnNodeToColumnIndex( oSettings, iRow, n )
function _fnGetCellData (line 2474) | function _fnGetCellData( settings, rowIdx, colIdx, type )
function _fnSetCellData (line 2522) | function _fnSetCellData( settings, rowIdx, colIdx, val )
function _fnSplitObjNotation (line 2544) | function _fnSplitObjNotation( str )
function _fnGetObjectDataFn (line 2559) | function _fnGetObjectDataFn( mSource )
function _fnSetObjectDataFn (line 2684) | function _fnSetObjectDataFn( mSource )
function _fnGetDataMaster (line 2803) | function _fnGetDataMaster ( settings )
function _fnClearTable (line 2814) | function _fnClearTable( settings )
function _fnDeleteIndex (line 2830) | function _fnDeleteIndex( a, iTarget, splice )
function _fnInvalidate (line 2869) | function _fnInvalidate( settings, rowIdx, src, colIdx )
function _fnGetRowElements (line 2947) | function _fnGetRowElements( settings, row, colIdx, d )
function _fnCreateTr (line 3056) | function _fnCreateTr ( oSettings, iRow, nTrIn, anTds )
function _fnRowAttributes (line 3140) | function _fnRowAttributes( settings, row )
function _fnBuildHead (line 3180) | function _fnBuildHead( oSettings )
function _fnDrawHead (line 3266) | function _fnDrawHead( oSettings, aoSource, bIncludeHidden )
function _fnDraw (line 3364) | function _fnDraw( oSettings )
function _fnReDraw (line 3505) | function _fnReDraw( settings, holdPosition )
function _fnAddOptionsHtml (line 3543) | function _fnAddOptionsHtml ( oSettings )
function _fnDetectHeader (line 3699) | function _fnDetectHeader ( aLayout, nThead )
function _fnGetUniqueThs (line 3774) | function _fnGetUniqueThs ( oSettings, nHeader, aLayout )
function _fnBuildAjax (line 3811) | function _fnBuildAjax( oSettings, data, fn )
function _fnAjaxUpdate (line 3944) | function _fnAjaxUpdate( settings )
function _fnAjaxParameters (line 3975) | function _fnAjaxParameters( settings )
function _fnAjaxUpdateDraw (line 4083) | function _fnAjaxUpdateDraw ( settings, json )
function _fnAjaxDataSrc (line 4133) | function _fnAjaxDataSrc ( oSettings, json )
function _fnFeatureHtmlFilter (line 4156) | function _fnFeatureHtmlFilter ( settings )
function _fnFilterComplete (line 4244) | function _fnFilterComplete ( oSettings, oInput, iForce )
function _fnFilterCustom (line 4297) | function _fnFilterCustom( settings )
function _fnFilterColumn (line 4334) | function _fnFilterColumn ( settings, searchStr, colIdx, regex, smart, ca...
function _fnFilter (line 4364) | function _fnFilter( settings, input, force, regex, smart, caseInsensitive )
function _fnFilterCreateSearch (line 4416) | function _fnFilterCreateSearch( search, regex, smart, caseInsensitive )
function _fnFilterData (line 4458) | function _fnFilterData ( settings )
function _fnSearchToCamel (line 4531) | function _fnSearchToCamel ( obj )
function _fnSearchToHung (line 4550) | function _fnSearchToHung ( obj )
function _fnFeatureHtmlInfo (line 4566) | function _fnFeatureHtmlInfo ( settings )
function _fnUpdateInfo (line 4600) | function _fnUpdateInfo ( settings )
function _fnInfoMacros (line 4638) | function _fnInfoMacros ( settings, str )
function _fnInitialise (line 4665) | function _fnInitialise ( settings )
function _fnInitComplete (line 4749) | function _fnInitComplete ( settings, json )
function _fnLengthChange (line 4764) | function _fnLengthChange ( settings, val )
function _fnFeatureHtmlLength (line 4782) | function _fnFeatureHtmlLength ( settings )
function _fnFeatureHtmlPaginate (line 4843) | function _fnFeatureHtmlPaginate ( settings )
function _fnPageChange (line 4904) | function _fnPageChange ( settings, action, redraw )
function _fnFeatureHtmlProcessing (line 4977) | function _fnFeatureHtmlProcessing ( settings )
function _fnProcessingDisplay (line 4994) | function _fnProcessingDisplay ( settings, show )
function _fnFeatureHtmlTable (line 5009) | function _fnFeatureHtmlTable ( settings )
function _fnScrollDraw (line 5167) | function _fnScrollDraw ( settings )
function _fnApplyToChildren (line 5446) | function _fnApplyToChildren( fn, an1, an2 )
function _fnCalculateColumnWidths (line 5485) | function _fnCalculateColumnWidths ( oSettings )
function _fnConvertToWidth (line 5723) | function _fnConvertToWidth ( width, parent )
function _fnGetWidestNode (line 5747) | function _fnGetWidestNode( settings, colIdx )
function _fnGetMaxLenString (line 5768) | function _fnGetMaxLenString( settings, colIdx )
function _fnStringToCss (line 5793) | function _fnStringToCss( s )
function _fnSortFlatten (line 5813) | function _fnSortFlatten ( settings )
function _fnSort (line 5885) | function _fnSort ( oSettings )
function _fnSortAria (line 6011) | function _fnSortAria ( settings )
function _fnSortListener (line 6066) | function _fnSortListener ( settings, colIdx, append, callback )
function _fnSortAttachListener (line 6150) | function _fnSortAttachListener ( settings, attachTo, colIdx, callback )
function _fnSortingClasses (line 6188) | function _fnSortingClasses( settings )
function _fnSortData (line 6221) | function _fnSortData( settings, idx )
function _fnSaveState (line 6264) | function _fnSaveState ( settings )
function _fnLoadState (line 6299) | function _fnLoadState ( settings, oInit )
function _fnSettingsFromNode (line 6386) | function _fnSettingsFromNode ( table )
function _fnLog (line 6405) | function _fnLog( settings, level, msg, tn )
function _fnMap (line 6448) | function _fnMap( ret, src, name, mappedName )
function _fnExtend (line 6490) | function _fnExtend( out, extender, breakRefs )
function _fnBindAction (line 6526) | function _fnBindAction( n, oData, fn )
function _fnCallbackReg (line 6555) | function _fnCallbackReg( oSettings, sStore, fn, sName )
function _fnCallbackFire (line 6581) | function _fnCallbackFire( settings, callbackArr, eventName, args )
function _fnLengthOverflow (line 6603) | function _fnLengthOverflow ( settings )
function _fnRenderer (line 6628) | function _fnRenderer( settings, type )
function _fnDataSource (line 6657) | function _fnDataSource ( settings )
function _numbers (line 14444) | function _numbers ( page, pages ) {
function _addNumericSort (line 14731) | function _addNumericSort ( decimalPlace ) {
function _fnExternApiFunc (line 14979) | function _fnExternApiFunc (fn)
FILE: static/plugins/layer/layer.js
function b (line 2) | function b(a){a=g.find(a),a.height(i[1]-j-k-2*(0|parseFloat(a.css("paddi...
function a (line 2) | function a(){var a=g.cancel&&g.cancel(b.index,d);a===!1||f.close(b.index)}
function b (line 2) | function b(){a.css({top:f+(e.config.fix?d.scrollTop():0)})}
function g (line 2) | function g(a,b,c){var d=new Image;return d.src=a,d.complete?b(d):(d.onlo...
FILE: static/template/js/bootstrap-datepicker.js
function UTCDate (line 17) | function UTCDate(){
function UTCToday (line 20) | function UTCToday(){
function isUTCEquals (line 24) | function isUTCEquals(date1, date2) {
function alias (line 31) | function alias(method){
function isValidDate (line 36) | function isValidDate(d) {
function opts_from_el (line 1586) | function opts_from_el(el, prefix){
function opts_from_locale (line 1603) | function opts_from_locale(lang){
function applyNearbyYear (line 1816) | function applyNearbyYear(year, threshold){
function match_part (line 1866) | function match_part(){
FILE: static/template/js/jquery-ui.js
function focusable (line 122) | function focusable( element, isTabIndexNotNaN ) {
function visible (line 143) | function visible( element ) {
function reduce (line 185) | function reduce( elem, size, border, margin ) {
function handlerProxy (line 692) | function handlerProxy() {
function handlerProxy (line 728) | function handlerProxy() {
function isOverAxis (line 1931) | function isOverAxis( x, reference, size ) {
function num (line 2288) | function num(v) {
function isNumber (line 2292) | function isNumber(value) {
function isOverAxis (line 3507) | function isOverAxis( x, reference, size ) {
function isFloating (line 3511) | function isFloating(item) {
function clamp (line 4930) | function clamp( value, prop, allowEmpty ) {
function stringParse (line 4956) | function stringParse( string ) {
function hue2rgb (line 5206) | function hue2rgb( p, q, h ) {
function getElementStyles (line 5474) | function getElementStyles( elem ) {
function styleDifference (line 5502) | function styleDifference( oldStyle, newStyle ) {
function _normalizeArguments (line 5823) | function _normalizeArguments( effect, options, speed, callback ) {
function standardAnimationOption (line 5875) | function standardAnimationOption( option ) {
function run (line 5920) | function run( next ) {
function Datepicker (line 7626) | function Datepicker() {
function bindHover (line 9558) | function bindHover(dpDiv) {
function extendRemove (line 9584) | function extendRemove(target, props) {
function checkFocus (line 9877) | function checkFocus() {
function filteredUi (line 10054) | function filteredUi( ui ) {
function filteredUi (line 10096) | function filteredUi( ui ) {
function childComplete (line 10731) | function childComplete() {
function animComplete (line 10780) | function animComplete() {
function escape (line 11585) | function escape( value ) {
function getOffsets (line 12042) | function getOffsets( offsets, width, height ) {
function parseCss (line 12049) | function parseCss( element, property ) {
function getDimensions (line 12053) | function getDimensions( elem ) {
function modifier (line 13307) | function modifier( fn ) {
function checkFocus (line 13427) | function checkFocus() {
function getNextTabId (line 13789) | function getNextTabId() {
function isLocal (line 13793) | function isLocal( anchor ) {
function constrain (line 14010) | function constrain() {
function complete (line 14356) | function complete() {
function show (line 14361) | function show() {
function addDescribedBy (line 14621) | function addDescribedBy( elem, id ) {
function removeDescribedBy (line 14629) | function removeDescribedBy( elem ) {
function position (line 14850) | function position( event ) {
FILE: static/template/js/uncompressed/holder.js
function contentLoaded (line 139) | function contentLoaded(n,t){var l="complete",s="readystatechange",u=!1,h...
function selector (line 142) | function selector(a){
function extend (line 148) | function extend(a,b){
function text_size (line 172) | function text_size(width, height, template) {
function draw (line 184) | function draw(args) {
function render (line 225) | function render(mode, el, holder, src) {
function dimension_check (line 287) | function dimension_check(el, callback) {
function resizable_update (line 308) | function resizable_update(element) {
function parse_flags (line 348) | function parse_flags(flags, options) {
FILE: static/template/js/uncompressed/pace.js
function ctor (line 5) | function ctor() { this.constructor = child; }
function Evented (line 136) | function Evented() {}
function NoTargetError (line 223) | function NoTargetError() {
function Bar (line 233) | function Bar() {
function Events (line 310) | function Events() {
function RequestIntercept (line 404) | function RequestIntercept() {
function AjaxMonitor (line 505) | function AjaxMonitor() {
function XHRRequestTracker (line 529) | function XHRRequestTracker(request) {
function SocketRequestTracker (line 568) | function SocketRequestTracker(request) {
function ElementMonitor (line 586) | function ElementMonitor(options) {
function ElementTracker (line 607) | function ElementTracker(selector) {
function DocumentMonitor (line 639) | function DocumentMonitor() {
function EventLagMonitor (line 657) | function EventLagMonitor() {
function Scaler (line 688) | function Scaler(source) {
FILE: static/template/js/uncompressed/run_prettify.js
function contentLoaded (line 76) | function contentLoaded(callback) {
function loadStylesheetsFallingBack (line 116) | function loadStylesheetsFallingBack(stylesheets) {
function checkPendingLanguages (line 211) | function checkPendingLanguages() {
function combinePrefixPatterns (line 472) | function combinePrefixPatterns(regexs) {
function extractSourceSpans (line 750) | function extractSourceSpans(node, isPreformatted) {
function appendDecorations (line 802) | function appendDecorations(basePos, sourceCode, langHandler, out) {
function childContentWrapper (line 825) | function childContentWrapper(element) {
function createSimpleLexer (line 884) | function createSimpleLexer(shortcutStylePatterns, fallthroughStylePatter...
function sourceDecorator (line 1027) | function sourceDecorator(options) {
function numberLines (line 1213) | function numberLines(node, opt_startLineNum, isPreformatted) {
function recombineTagsAndDecorations (line 1349) | function recombineTagsAndDecorations(job) {
function registerLangHandler (line 1477) | function registerLangHandler(handler, fileExtensions) {
function langHandlerForExtension (line 1487) | function langHandlerForExtension(extension, source) {
function applyDecorator (line 1602) | function applyDecorator(job) {
function $prettyPrintOne (line 1635) | function $prettyPrintOne(sourceCodeHtml, opt_langExtension, opt_numberLi...
function $prettyPrint (line 1669) | function $prettyPrint(opt_whenDone, opt_root) {
function onLangsLoaded (line 1884) | function onLangsLoaded() {
FILE: websockted/datax_web_job_instance.py
function follow (line 22) | def follow(thefile):
Condensed preview — 115 files, each showing path, character count, and a content snippet. Download the .json file or copy for the full structured content (2,205K chars).
[
{
"path": ".gitignore",
"chars": 134,
"preview": "# python pyc pyo\n*.pyc\n*.pyo\n\n# log\n*.log\n\n# out\n*.out\n\n# pid \n*.pid\n\n# pycharm\n.idea\n\n# datax\ndatax\n\n# create table sql"
},
{
"path": "FirstBlood/__init__.py",
"chars": 190,
"preview": "from __future__ import absolute_import\r\n\r\n# This will make sure the app is always imported when\r\n# Django starts so that"
},
{
"path": "FirstBlood/celery.py",
"chars": 756,
"preview": "#!/usr/bin/python env\r\n# -*- coding: UTF-8 -*-\r\n# Description: \r\n# Author: 黄小雪\r\n# Date: "
},
{
"path": "FirstBlood/settings.py",
"chars": 5874,
"preview": "# -*-coding:utf-8-*-\n\"\"\"\nDjango settings for FirstBlood project.\n\nGenerated by 'django-admin startproject' using Django "
},
{
"path": "FirstBlood/urls.py",
"chars": 2023,
"preview": "# -*- coding: UTF-8 -*-\n\"\"\"FirstBlood URL Configuration\n\nThe `urlpatterns` list routes URLs to views. For more informati"
},
{
"path": "FirstBlood/views.py",
"chars": 1188,
"preview": "# -*- coding: UTF-8 -*-\nfrom django.contrib.auth.decorators import permission_required\nfrom django.contrib.auth.decorato"
},
{
"path": "FirstBlood/wsgi.py",
"chars": 397,
"preview": "\"\"\"\nWSGI config for FirstBlood project.\n\nIt exposes the WSGI callable as a module-level variable named ``application``.\n"
},
{
"path": "LICENSE",
"chars": 1059,
"preview": "MIT License\n\nCopyright (c) 2022 盲僧\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\nof this"
},
{
"path": "README.md",
"chars": 2162,
"preview": "精简版ETL数据转换工具\n==========\n## 功能\n 1.数据同步(目前只支持MySQL)\n 2.执行SQL脚本 (后期开发)\n 3.定时执行\n \n## 安装(两种方法)\n### 1.Docker 镜像安装\n"
},
{
"path": "batch_job/__init__.py",
"chars": 0,
"preview": ""
},
{
"path": "batch_job/admin.py",
"chars": 128,
"preview": "# -*- coding: utf-8 -*-\nfrom __future__ import unicode_literals\n\nfrom django.contrib import admin\n\n# Register your model"
},
{
"path": "batch_job/apps.py",
"chars": 157,
"preview": "# -*- coding: utf-8 -*-\nfrom __future__ import unicode_literals\n\nfrom django.apps import AppConfig\n\n\nclass BatchJobConfi"
},
{
"path": "batch_job/conf/__init__.py",
"chars": 209,
"preview": "#!/usr/bin/python3\r\n# -*- coding: utf-8 -*-\r\n# @Function:\r\n# @Time : 2018/7/20 15:34\r\n# @Author : Hanson\r\n# @Email "
},
{
"path": "batch_job/conf/config.py",
"chars": 3290,
"preview": "#!/usr/bin/python3\r\n# -*- coding: utf-8 -*-\r\n# @Function: 批处理作业配置文件\r\n# @Time : 2018/7/20 15:35\r\n# @Author : Hanson\r\n"
},
{
"path": "batch_job/models.py",
"chars": 365,
"preview": "# -*- coding: utf-8 -*-\nfrom __future__ import unicode_literals\n\nfrom django.db import models\n\n# Create your models here"
},
{
"path": "batch_job/tests.py",
"chars": 618,
"preview": "# -*- coding: utf-8 -*-\nfrom __future__ import unicode_literals\n\nfrom django.test import TestCase\n\n# Create your tests h"
},
{
"path": "batch_job/urls.py",
"chars": 1620,
"preview": "# -*- coding: UTF-8 -*-\nfrom django.conf.urls import url\nimport views\n\nurlpatterns = [\n # Examples:\n # url(r'^$', "
},
{
"path": "batch_job/views.py",
"chars": 35811,
"preview": "# -*- coding: utf-8 -*-\nfrom __future__ import unicode_literals\n\nfrom django.contrib.auth.decorators import permission_r"
},
{
"path": "controller/__init__.py",
"chars": 0,
"preview": ""
},
{
"path": "controller/core/__init__.py",
"chars": 0,
"preview": ""
},
{
"path": "controller/core/access.py",
"chars": 4448,
"preview": "# -*- coding: UTF-8 -*-\r\n# Description: \r\n# Author: 黄小雪\r\n# Date: 2017年09月07日\r\n#"
},
{
"path": "controller/core/excel.py",
"chars": 3096,
"preview": "#! /usr/bin/env python\r\n# -*-coding:utf-8-*-\r\n##################################################\r\n# Function: 银谷在"
},
{
"path": "controller/core/local_mysql.py",
"chars": 420,
"preview": "#! /usr/bin/env python\r\n# -*-coding:utf-8-*-\r\n##################################################\r\n# Function: 银谷在"
},
{
"path": "controller/core/mailtable.py",
"chars": 1912,
"preview": "#!/usr/bin/python env\r\n# -*- coding: UTF-8 -*-\r\n# Description: \r\n# Author: 黄小雪\r\n# Date: "
},
{
"path": "controller/core/public.py",
"chars": 2466,
"preview": "# -*- coding: UTF-8 -*-\r\nimport datetime\r\nimport time\r\nimport json\r\n\r\nclass Currency(object):\r\n # 通用帮助\r\n def __in"
},
{
"path": "controller/core/query_sql.py",
"chars": 5695,
"preview": "#!/usr/bin/python env\r\n# -*- coding: UTF-8 -*-\r\n# Description: \r\n# Author: 黄小雪\r\n# Date: "
},
{
"path": "controller/core/unicode_width.py",
"chars": 742,
"preview": "#!/usr/bin/env python\r\n#-*-coding:utf-8-*-\r\n\r\n\r\ndef get_max_length(arg):\r\n length = str_len(arg[0]) + 2\r\n return l"
},
{
"path": "controller/public/__init__.py",
"chars": 0,
"preview": ""
},
{
"path": "controller/public/dataconn.py",
"chars": 6601,
"preview": "#!/usr/bin/python env\r\n# -*- coding: UTF-8 -*-\r\n# Description: \r\n# Author: 黄小雪\r\n# Date: "
},
{
"path": "controller/public/log.py",
"chars": 1607,
"preview": "#!/usr/bin/python env\r\n# -*- coding: UTF-8 -*-\r\n# Description: 日志记录\r\n# Author: 黄小雪\r\n# Date: 2"
},
{
"path": "controller/public/mailclass.py",
"chars": 3343,
"preview": "#! /usr/bin/env python\r\n# -*- coding: UTF-8 -*-\r\n##################################################\r\n# Function: 银谷在线"
},
{
"path": "controller/public/mysql_helper.py",
"chars": 9378,
"preview": "#!/usr/bin/python env\r\n# -*- coding: UTF-8 -*-\r\n# Description:\r\n# Author: 黄小雪\r\n# Date: 2017年07月12日"
},
{
"path": "controller/public/pagination.py",
"chars": 4215,
"preview": "#!/usr/bin/env python\r\n# -*- coding: UTF-8 -*-\r\n\r\nfrom django.core.paginator import Paginator\r\n\r\n\r\nclass Paginator_help:"
},
{
"path": "controller/public/sqlserver_helper.py",
"chars": 7091,
"preview": "#!/usr/bin/python env\r\n# -*- coding: UTF-8 -*-\r\n# Description: \r\n# Author: 黄小雪\r\n# Date: "
},
{
"path": "create_table.sql",
"chars": 6240,
"preview": "/*\r\n* 创建数据库\r\n*/\r\ncreate database FirstBlood default character set utf8 collate utf8_bin;\r\n\r\n/* 进入数据库 */\r\nuse FirstBlood;"
},
{
"path": "datax_web/__init__.py",
"chars": 0,
"preview": ""
},
{
"path": "datax_web/admin.py",
"chars": 63,
"preview": "from django.contrib import admin\n\n# Register your models here.\n"
},
{
"path": "datax_web/apps.py",
"chars": 133,
"preview": "from __future__ import unicode_literals\n\nfrom django.apps import AppConfig\n\n\nclass DataxWebConfig(AppConfig):\n name ="
},
{
"path": "datax_web/conf/__init__.py",
"chars": 0,
"preview": ""
},
{
"path": "datax_web/conf/config.py",
"chars": 5714,
"preview": "#!/usr/bin/python env\r\n# -*- coding: UTF-8 -*-\r\nimport sys\r\nimport os\r\nreload(sys)\r\nsys.setdefaultencoding(\"utf-8\")\r\n\r\n_"
},
{
"path": "datax_web/models.py",
"chars": 98,
"preview": "from __future__ import unicode_literals\n\nfrom django.db import models\n\n# Create your models here.\n"
},
{
"path": "datax_web/tests.py",
"chars": 923,
"preview": "# -*- coding: UTF-8 -*-\nfrom django.test import TestCase\n\n# Create your tests here.\n\n#!/usr/bin/python\n\n# Copyright 2013"
},
{
"path": "datax_web/urls.py",
"chars": 1036,
"preview": "# -*- coding: UTF-8 -*-\nfrom django.conf.urls import url\nimport views\n\nurlpatterns = [\n # Examples:\n # url(r'^$', "
},
{
"path": "datax_web/views.py",
"chars": 25317,
"preview": "# -*- coding: UTF-8 -*-\nfrom django.contrib.auth.decorators import permission_required\nfrom django.contrib.auth.decorato"
},
{
"path": "manage.py",
"chars": 253,
"preview": "#!/usr/bin/env python\nimport os\nimport sys\n\nif __name__ == \"__main__\":\n os.environ.setdefault(\"DJANGO_SETTINGS_MODULE"
},
{
"path": "requirements.txt",
"chars": 123,
"preview": "celery==3.1.25\ncelery-with-redis==3.0\nDjango==1.11.14\ndjango-celery==3.2.1\npymssql==2.1.4\nredis==2.10.6\nsupervisor==3.3."
},
{
"path": "static/css/login.css",
"chars": 2340,
"preview": "\r\nbody{background: url(/static/img/login_bg.jpg) 0 -200px no-repeat #000;\r\nbackground-size: cover;margin:0; color:#FFF}\r"
},
{
"path": "static/js/csrf.js",
"chars": 979,
"preview": "function getCookie(name) {\r\n var cookieValue = null;\r\n if (document.cookie && document.cookie != '') {\r\n va"
},
{
"path": "static/plugins/bootstarp-table/bootstrap-table-zh-CN.js",
"chars": 1266,
"preview": "/**\n * Bootstrap Table Chinese translation\n * Author: Zhixin Wen<wenzhixin2010@gmail.com>\n */\n(function ($) {\n 'use s"
},
{
"path": "static/plugins/datatables/css/jquery.dataTables.css",
"chars": 15423,
"preview": "/*\n * Table styles\n */\ntable.dataTable {\n width: 100%;\n margin: 0 auto;\n clear: both;\n border-collapse: separate;\n "
},
{
"path": "static/plugins/datatables/css/jquery.dataTables_themeroller.css",
"chars": 14229,
"preview": "/*\n * Table styles\n */\ntable.dataTable {\n width: 100%;\n margin: 0 auto;\n clear: both;\n border-collapse: separate;\n "
},
{
"path": "static/plugins/datatables/js/jquery.dataTables.js",
"chars": 447277,
"preview": "/*! DataTables 1.10.12\n * ©2008-2015 SpryMedia Ltd - datatables.net/license\n */\n\n/**\n * @summary DataTables\n * @desc"
},
{
"path": "static/plugins/layer/layer.js",
"chars": 19831,
"preview": "/*! layer-v2.4 弹层组件 License LGPL http://layer.layui.com/ By 贤心 */\n;!function(a,b){\"use strict\";var c,d,e={getPath:funct"
},
{
"path": "static/plugins/layer/skin/layer.css",
"chars": 14042,
"preview": "/*!\r\n \r\n @Name: layer's style\r\n @Author: 贤心\r\n @Blog: sentsin.com\r\n \r\n */.layui-layer-imgbar,.layui-layer-imgtit a,.layui"
},
{
"path": "static/template/bootstrap/css/bootstrap.css",
"chars": 146010,
"preview": "/*!\n * Bootstrap v3.3.7 (http://getbootstrap.com)\n * Copyright 2011-2016 Twitter, Inc.\n * Licensed under MIT (https://gi"
},
{
"path": "static/template/css/bootstrap-datepicker.css",
"chars": 17311,
"preview": "/*!\n * Datepicker for Bootstrap v1.7.0-dev (https://github.com/uxsolutions/bootstrap-datepicker)\n *\n * Licensed under th"
},
{
"path": "static/template/css/bootstrap-timepicker.css",
"chars": 2780,
"preview": "/*!\n * Timepicker Component for Twitter Bootstrap\n *\n * Copyright 2013 Joris de Wit\n *\n * Contributors https://github.co"
},
{
"path": "static/template/css/bootstrap-wysihtml5.css",
"chars": 2334,
"preview": "ul.wysihtml5-toolbar {\n\tmargin: 0;\n\tpadding: 0;\n\tdisplay: block;\n}\n\nul.wysihtml5-toolbar::after {\n\tclear: both;\n\tdisplay"
},
{
"path": "static/template/css/colorbox/colorbox.css",
"chars": 4340,
"preview": "/*\n Colorbox Core Style:\n The following CSS is consistent between example themes and should not be altered.\n*/\n#co"
},
{
"path": "static/template/css/datepicker.css",
"chars": 4953,
"preview": "/*!\n * Datepicker for Bootstrap\n *\n * Copyright 2012 Stefan Petre\n * Licensed under the Apache License v2.0\n * http://ww"
},
{
"path": "static/template/css/dropzone/dropzone.css",
"chars": 11650,
"preview": "/* The MIT License */\n.dropzone,\n.dropzone *,\n.dropzone-previews,\n.dropzone-previews * {\n -webkit-box-sizing: border-bo"
},
{
"path": "static/template/css/endless-skin.css",
"chars": 21080,
"preview": "aside.skin-1 {\n background: #efefef;\n}\naside.skin-1 .brand {\n background: #3c8dbc;\n color: #fff;\n border-bottom-colo"
},
{
"path": "static/template/css/endless.css",
"chars": 165309,
"preview": "@import url(http://fonts.googleapis.com/css?family=Open+Sans);\n#top-nav {\n height: 45px;\n padding-right: 10px;\n backg"
},
{
"path": "static/template/css/fullcalendar.css",
"chars": 10998,
"preview": "/*!\n * FullCalendar v1.6.0 Stylesheet\n * Docs & License: http://arshaw.com/fullcalendar/\n * (c) 2013 Adam Shaw\n */\n\n\n.fc"
},
{
"path": "static/template/css/gritter/jquery.gritter.css",
"chars": 1936,
"preview": "/* the norm */\n#gritter-notice-wrapper {\n\tposition:fixed;\n\ttop:20px;\n\tright:20px;\n\twidth:301px;\n\tz-index:9999;\n}\n#gritte"
},
{
"path": "static/template/css/jcarousel.responsive.css",
"chars": 2248,
"preview": ".jcarousel-wrapper {\n margin: 20px auto;\n position: relative;\n border: 10px solid #fff;\n -webkit-border-radi"
},
{
"path": "static/template/css/jquery.dataTables_themeroller.css",
"chars": 4559,
"preview": "\n\n/*\n * Table\n */\ntable.dataTable {\n\tmargin: 0 auto;\n\tclear: both;\n\twidth: 100%;\n\tborder-collapse: collapse;\n}\n\ntable.da"
},
{
"path": "static/template/css/jquery.tagsinput.css",
"chars": 904,
"preview": "div.tagsinput { border:1px solid #CCC; background: #FFF; padding:5px; width:300px; height:100px; overflow-y: auto;}\ndiv."
},
{
"path": "static/template/css/morris.css",
"chars": 443,
"preview": ".morris-hover{position:absolute;z-index:1000;}.morris-hover.morris-default-style{border-radius:10px;padding:6px;color:#6"
},
{
"path": "static/template/css/pace.css",
"chars": 2270,
"preview": "/* This is a compiled file, you should be editing the file in the templates directory */\n.pace {\n -webkit-pointer-event"
},
{
"path": "static/template/css/prettify.css",
"chars": 1570,
"preview": "/* Pretty printing styles. Used with prettify.js. */\n\n/* SPAN elements with the classes below are added by prettyprint. "
},
{
"path": "static/template/css/slider.css",
"chars": 4165,
"preview": "/*!\n * Slider for Bootstrap\n *\n * Copyright 2012 Stefan Petre\n * Licensed under the Apache License v2.0\n * http://www.ap"
},
{
"path": "static/template/js/bootstrap-datepicker.js",
"chars": 56558,
"preview": "/*!\n * Datepicker for Bootstrap v1.7.0-dev (https://github.com/uxsolutions/bootstrap-datepicker)\n *\n * Licensed under th"
},
{
"path": "static/template/js/endless/endless.js",
"chars": 9180,
"preview": "\n$(function\t()\t{\n\n\t// Cookie validation\n\tif(jQuery.type($.cookie('skin_color')) != 'undefined')\t{\n\t\n\t\t$('aside').removeC"
},
{
"path": "static/template/js/endless/endless_dashboard.js",
"chars": 6499,
"preview": "$(function\t()\t{\n\n\t//Flot Chart\n\t//Website traffic chart\n\tvar init = { data: [[0, 5], [1, 8], [2, 5], [3, 8], [4, 7], [5,"
},
{
"path": "static/template/js/endless/endless_form.js",
"chars": 1527,
"preview": "$(function()\t{\n\t// Chosen \n\t$(\".chzn-select\").chosen();\n\t\n\t// Datepicker\n\t$('.datepicker').datepicker();\n\n\t// Timepicker"
},
{
"path": "static/template/js/endless/endless_wizard.js",
"chars": 3854,
"preview": "$(function\t()\t{\n\n\t//Form Wizard 1\n\tvar currentStep_1 = 1;\n\t\n\t//Form Wizard 2\n\tvar currentStep_2 = 1; \n\t\n\t$('.wizard-demo"
},
{
"path": "static/template/js/jquery-ui.js",
"chars": 435844,
"preview": "/*! jQuery UI - v1.10.3 - 2013-05-03\n* http://jqueryui.com\n* Includes: jquery.ui.core.js, jquery.ui.widget.js, jquery.ui"
},
{
"path": "static/template/js/uncompressed/bootstrap-wysihtml5.js",
"chars": 23154,
"preview": "!function($, wysi) {\n \"use strict\";\n\n var tpl = {\n \"font-styles\": function(locale, options) {\n v"
},
{
"path": "static/template/js/uncompressed/holder.js",
"chars": 14727,
"preview": "/*\n\nHolder - 2.2 - client side image placeholders\n(c) 2012-2013 Ivan Malopinsky / http://imsky.co\n\nProvided under the MI"
},
{
"path": "static/template/js/uncompressed/pace.js",
"chars": 25115,
"preview": "(function() {\n var AjaxMonitor, Bar, DocumentMonitor, ElementMonitor, ElementTracker, EventLagMonitor, Evented, Events,"
},
{
"path": "static/template/js/uncompressed/run_prettify.js",
"chars": 79658,
"preview": "// Copyright (C) 2013 Google Inc.\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not u"
},
{
"path": "supervisord/FirstBlood_dev.conf",
"chars": 11283,
"preview": "; Sample supervisor config file.\n;\n; For more information on the config file, please see:\n; http://supervisord.org/confi"
},
{
"path": "supervisord/FirstBlood_pro.conf",
"chars": 11380,
"preview": "; Sample supervisor config file.\n;\n; For more information on the config file, please see:\n; http://supervisord.org/confi"
},
{
"path": "templates/403.html",
"chars": 2573,
"preview": "<!DOCTYPE html>\n<html lang=\"en\">\n <head>\n <meta charset=\"utf-8\">\n <title>Endless Admin</title>\n <meta name=\"vi"
},
{
"path": "templates/404.html",
"chars": 2598,
"preview": "<!DOCTYPE html>\n<html lang=\"en\">\n <head>\n <meta charset=\"utf-8\">\n <title>Endless Admin</title>\n <meta name=\"vi"
},
{
"path": "templates/base.html",
"chars": 10733,
"preview": "<!DOCTYPE html>\n<html lang=\"en\">\n <head>\n <meta charset=\"utf-8\">\n <title>First Blood</title>\n <meta name=\"view"
},
{
"path": "templates/batch_job/add_batch_job.html",
"chars": 19520,
"preview": "{% extends 'base.html' %}\n\n{% block header_content %}\n<style>\n.center {\n width: auto;\n display: table;\n margin-left: aut"
},
{
"path": "templates/batch_job/add_crontabs.html",
"chars": 6175,
"preview": "{% extends 'base.html' %}\n{% load ygol_filter_tag %}\n\n{% block header_content %}\n<style>\n.center {\n width: auto;\n displa"
},
{
"path": "templates/batch_job/batch_job_instance.html",
"chars": 8537,
"preview": "{% extends 'base.html' %}\n\n{% block header_content %}\n<style>\n.center {\n width: auto;\n display: table;\n margin-left: aut"
},
{
"path": "templates/batch_job/batch_job_instance_details.html",
"chars": 12335,
"preview": "{% extends 'base.html' %}\n\n{% block header_content %}\n<style>\n.center {\n width: auto;\n display: table;\n margin-left: aut"
},
{
"path": "templates/batch_job/crontabs.html",
"chars": 5082,
"preview": "{% extends 'base.html' %}\n{% load ygol_filter_tag %}\n\n{% block header_content %}\n<style>\n.center {\n width: auto;\n displa"
},
{
"path": "templates/batch_job/index.html",
"chars": 5993,
"preview": "{% extends 'base.html' %}\n\n{% block header_content %}\n<style>\n.center {\n width: auto;\n display: table;\n margin-left: aut"
},
{
"path": "templates/batch_job/mod_crontabs.html",
"chars": 7866,
"preview": "{% extends 'base.html' %}\n{% load ygol_filter_tag %}\n\n{% block header_content %}\n<style>\n.center {\n width: auto;\n displa"
},
{
"path": "templates/batch_job/update_batch_job.html",
"chars": 21206,
"preview": "{% extends 'base.html' %}\n\n{% block header_content %}\n<style>\n.center {\n width: auto;\n display: table;\n margin-left: aut"
},
{
"path": "templates/batch_job/update_job.html",
"chars": 11549,
"preview": "{% extends 'base.html' %}\n\n{% block header_content %}\n<style>\n.center {\n width: auto;\n display: table;\n margin-left: aut"
},
{
"path": "templates/datax_web/add_crontabs.html",
"chars": 6175,
"preview": "{% extends 'base.html' %}\n{% load ygol_filter_tag %}\n\n{% block header_content %}\n<style>\n.center {\n width: auto;\n displa"
},
{
"path": "templates/datax_web/add_job.html",
"chars": 15144,
"preview": "{% extends 'base.html' %}\n\n{% block header_content %}\n<style>\n.center {\n width: auto;\n display: table;\n margin-left: aut"
},
{
"path": "templates/datax_web/crontabs.html",
"chars": 5082,
"preview": "{% extends 'base.html' %}\n{% load ygol_filter_tag %}\n\n{% block header_content %}\n<style>\n.center {\n width: auto;\n displa"
},
{
"path": "templates/datax_web/index.html",
"chars": 6472,
"preview": "{% extends 'base.html' %}\n\n{% block header_content %}\n<style>\n.center {\n width: auto;\n display: table;\n margin-left: aut"
},
{
"path": "templates/datax_web/mod_crontabs.html",
"chars": 7866,
"preview": "{% extends 'base.html' %}\n{% load ygol_filter_tag %}\n\n{% block header_content %}\n<style>\n.center {\n width: auto;\n displa"
},
{
"path": "templates/datax_web/mod_periodic_task.html",
"chars": 31370,
"preview": "{% extends 'base.html' %}\n{% load ygol_filter_tag %}\n\n{% block header_content %}\n<style>\n.center {\n width: auto;\n displa"
},
{
"path": "templates/datax_web/monitor_job.html",
"chars": 10915,
"preview": "{% extends 'base.html' %}\n\n{% block header_content %}\n<style>\n.center {\n width: auto;\n display: table;\n margin-left: aut"
},
{
"path": "templates/datax_web/monitor_job_detail.html",
"chars": 3456,
"preview": "{% extends 'base.html' %}\n\n{% block header_content %}\n<style>\n.center {\n width: auto;\n display: table;\n margin-left: aut"
},
{
"path": "templates/datax_web/update_job.html",
"chars": 12032,
"preview": "{% extends 'base.html' %}\n\n{% block header_content %}\n<style>\n.center {\n width: auto;\n display: table;\n margin-left: aut"
},
{
"path": "templates/index.html",
"chars": 22383,
"preview": "{% extends 'base.html' %}\n\n{% block container %}\n<div id=\"main-container\">\n\t\t\t<div id=\"breadcrumb\">\n\t\t\t\t<ul class=\"bread"
},
{
"path": "templates/registered/login.html",
"chars": 3866,
"preview": "<!DOCTYPE html>\n<html lang=\"en\">\n <head>\n <meta charset=\"utf-8\">\n <title>Login</title>\n <meta name=\"viewport\" "
},
{
"path": "templates/registered/password_change.html",
"chars": 3371,
"preview": "<!DOCTYPE html>\n<html lang=\"en\">\n <head>\n <meta charset=\"utf-8\">\n <title>Password Change</title>\n <meta name=\""
},
{
"path": "templates/registered/password_change_done.html",
"chars": 1300,
"preview": "{% extends 'base.html' %}\n\n{% block container %}\n<div id=\"main-container\">\n <div id=\"breadcrumb\">\n <ul class=\""
},
{
"path": "websockted/CHANGES",
"chars": 1462,
"preview": "Version 0.3.0 (??, 2017)\n\n* Migration of underlying websocket server to Gorilla Websocket lib.\n* Binaries build code sw"
},
{
"path": "websockted/LICENSE",
"chars": 1327,
"preview": "Copyright (c) 2014, Joe Walnes and the websocketd authors.\nAll rights reserved.\n\nRedistribution and use in source and bi"
},
{
"path": "websockted/README.md",
"chars": 4909,
"preview": "websocketd\n==========\n\n`websocketd` is a small command-line tool that will wrap an existing command-line interface progr"
},
{
"path": "websockted/__init__.py",
"chars": 0,
"preview": ""
},
{
"path": "websockted/datax_web_job_instance.py",
"chars": 1199,
"preview": "#!/usr/bin/python3\r\n# -*- coding: utf-8 -*-\r\n# @Function: 实时获取 datax 任务实例执行详情\r\n# @Time : 2018/7/13 9:34\r\n# @Author :"
}
]
// ... and 1 more files (download for full content)
About this extraction
This page contains the full source code of the hanson007/FirstBlood GitHub repository, extracted and formatted as plain text for AI agents and large language models (LLMs). The extraction includes 115 files (1.9 MB), approximately 566.2k tokens, and a symbol index with 521 extracted functions, classes, methods, constants, and types. Use this with OpenClaw, Claude, ChatGPT, Cursor, Windsurf, or any other AI tool that accepts text input. You can copy the full output to your clipboard or download it as a .txt file.
Extracted by GitExtract — free GitHub repo to text converter for AI. Built by Nikandr Surkov.