123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182 |
- #-*- coding: utf-8 -*-
- #__author__ = "dukun"
- import json
- import time
- import pymysql
- import logging
- import datetime
- import numpy as np
- import pandas as pd
- import configparser
- from celery_tasks import cel
- #from DBUtils.PooledDB import PooledDB
- from dbutils.pooled_db import PooledDB
- #配置输出日志格式
- LOG_FORMAT = "%(asctime)s %(filename)s[line:%(lineno)d] %(name)s %(levelname)s %(pathname)s %(message)s "
- #配置输出时间的格式,注意月份和天数不要搞乱了
- DATE_FORMAT = '%Y-%m-%d %H:%M:%S %a '
- logging.basicConfig(level=logging.INFO,
- format=LOG_FORMAT,
- datefmt = DATE_FORMAT ,
- filename=r"./logs/channel.log" #有了filename参数就不会直接输出显示到控制台,而是直接写入文件
- )
- cf = configparser.RawConfigParser()
- #cf = configparser.ConfigParser()
- cf.read(r"./config.ini") # 读取配置文件,如果写文件的绝对路径,就可以不用os模块
- CHANNEL_IDS = json.loads(cf.get("channel_ids","channel_ids"))
- CHANNEL_HOST = cf.get("database1","host")
- CHANNEL_PORT = int(cf.get("database1","port"))
- CHANNEL_USER = cf.get("database1","user")
- CHANNEL_PASSWORD = cf.get("database1","password")
- CHANNEL_DB = cf.get("database1","db")
- STAT_HOST = cf.get("database2","host")
- STAT_PORT = int(cf.get("database2","port"))
- STAT_USER = cf.get("database2","user")
- STAT_PASSWORD = cf.get("database2","password")
- STAT_DB = cf.get("database2","db")
- #数据库连接信息
- channel_db_config = {
- 'host' : CHANNEL_HOST,
- 'port' : CHANNEL_PORT,
- 'user' : CHANNEL_USER,
- 'password' : CHANNEL_PASSWORD,
- 'db' : CHANNEL_DB,
- 'charset' : 'utf8',
- 'autocommit' : 1
- }
- stat_db_config = {
- 'host' : STAT_HOST,
- 'port' : STAT_PORT,
- 'user' : STAT_USER,
- 'password' : STAT_PASSWORD,
- 'db' : STAT_DB,
- 'charset' : 'utf8',
- 'autocommit' : 1
- }
- #创建数据库连接池
- def createPool(db_config):
- spool = PooledDB(pymysql, 5, **db_config)
- return spool
- @cel.task
- def get_channel_use_time():
- conn = createPool(channel_db_config).connection()
- stat_conn = createPool(stat_db_config).connection()
- channel_sql = """SELECT trans_id,channel_name,flow_amount,use_time,status,cnt
- FROM
- (SELECT trans_id, flow_amount,SUM(UNIX_TIMESTAMP(modify_date) - UNIX_TIMESTAMP(create_date)) use_time,
- send_status status, COUNT(*) cnt
- FROM
- mobile_flow_dispatch_rec
- WHERE UNIX_TIMESTAMP(now())-UNIX_TIMESTAMP(create_date) <= (2 * 3600)
- GROUP BY trans_id,status) t1 LEFT JOIN access_channel_info
- ON access_channel_info.channel_seq_id = t1.trans_id
- ORDER BY channel_name ; """
- ins_sql = """ INSERT INTO channel_use_time VALUES(0,%s,%s,%s,%s,%s,%s,%s) """
- df = pd.read_sql(channel_sql,conn)
- #print(df)
- #channel_ids = CHANNEL_IDS
-
- ins_list = []
- faces = [30,50,100,200,300,500]
- date = datetime.datetime.now().strftime("%Y-%m-%d %H:%M")
-
- for channel_id in CHANNEL_IDS:
- channel_df = df[df['trans_id'] == channel_id]
- if channel_df.empty is True:
- continue
- channel_name = channel_df['channel_name'].iloc[0]
- operator,channel_name = get_operator(channel_name)
- for face in faces:
- face_df = channel_df[channel_df['flow_amount'] == face]
- if face_df.empty is True:
- continue
- succ_df = face_df[face_df['status'] == '2']
- fail_df = face_df[face_df['status'] == '4']
- #成功平均时长
- if succ_df.empty is False:
- succ_time = succ_df['use_time'].iloc[0]
- #print(succ_time)
- succ_count = succ_df['cnt'].iloc[0]
- succ_avg_time = float(succ_time/succ_count)
- else:
- succ_avg_time = 0
- if fail_df.empty is False:
- fail_time = fail_df['use_time'].iloc[0]
- fail_count = fail_df['cnt'].iloc[0]
- fail_avg_time = float(fail_time/fail_count)
- else:
- fail_avg_time = 0
- total_df = face_df.groupby('trans_id').agg({'use_time' : 'sum','cnt' : 'sum'})
- #print(total_df)
- total_time = total_df['use_time'].iloc[0]
- total_count = total_df['cnt'].iloc[0]
- total_avg_time = float(total_time / total_count)
- #print(total_avg_time)
- #print(succ_avg_time)
- #print(fail_avg_time)
- ins_list.append((date,channel_id,operator,channel_name,face,1,total_avg_time)) #合计平均时长
- ins_list.append((date,channel_id,operator,channel_name,face,2,succ_avg_time)) #成功平均时长
- ins_list.append((date,channel_id,operator,channel_name,face,3,fail_avg_time)) #失败平均时长
-
-
- stat_cursor = stat_conn.cursor()
- stat_cursor.executemany(ins_sql,ins_list)
-
- stat_conn.rollback()
- #print('数据回滚')
- conn.close()
- stat_conn.close()
- print(3333333333333333)
- def get_operator(channel_name):
- operator = '移动'
- if '移动' in channel_name:
- operator = '移动'
- elif '联通' in channel_name:
- operator = '联通'
- elif '电信' in channel_name:
- operator = '电信'
- channel_name = channel_name.replace(operator,"")
- return operator,channel_name
-
- get_channel_use_time()
|