# -*- coding: utf-8 -*- # Author: dukun import re import os import sys import time import json import pymysql import hashlib import requests import pandas as pd from DBUtils.PooledDB import PooledDB comment_re = re.compile( '(^)?[^\S\n]*/(?:\*(.*?)\*/[^\S\n]*|/[^\n]*)($)?', re.DOTALL | re.MULTILINE ) #解析带注释的json def parse_json(filename): """ Parse a JSON file First remove comments and then use the json module package Comments look like : // ... or /* ... */ """ with open(filename,encoding='utf8') as f: content = ''.join(f.readlines()) ## Looking for comments match = comment_re.search(content) while match: # single line comment content = content[:match.start()] + content[match.end():] match = comment_re.search(content) #print(content) # Return json file return json.loads(content) def changeChannelGroup(): que_sql = """ SELECT seq_id FROM channel_to_group WHERE channel_group_id = {} AND channel_seq_id = {} AND weight >= 45""" que_sql_45 = """ SELECT seq_id FROM channel_to_group WHERE channel_group_id = {} AND channel_seq_id = {} AND weight < 45 """ del_sql = """ DELETE FROM channel_to_group WHERE seq_id = %s""" up_sql = """ UPDATE channel_to_group SET weight = %s WHERE seq_id = %s """ ins_sql = """ INSERT INTO channel_to_group VALUES (0,%s,%s,%s) """ json_data = parse_json(r'./group.json') #print(json_data) cursor = conn.cursor() #print(cursor) #start = time.time() for item in json_data: channel_group_id = item['channel_group_id'] for channel in item['channels']: channel_seq_id = channel['channel_seq_id'] count = channel['run_count'] weight = channel['low_weight'] temp_que_sql = que_sql.format(channel_group_id,channel_seq_id) temp_que_sql_45 = que_sql_45.format(channel_group_id,channel_seq_id) df = pd.DataFrame() if(weight >= 45): df = pd.read_sql(temp_que_sql,conn) #print(" +40===================={}".format(len(df))) else : df = pd.read_sql(temp_que_sql_45,conn) #print(" <40********************{}".format(len(df))) #print(df) #print(df) que_count = len(df) #print('---que_count---:{}'.format(que_count)) #print(que_count) ins_list = [] up_list = [] del_list = [] try: if(count == que_count): #up_list = [] for i in range(count): up_list.append((weight + i,df['seq_id'].values.tolist()[i])) cursor.executemany(up_sql,up_list) elif( count > que_count): #print('cuunt:{}'.format(count)) #print('que_count:{}'.format(que_count)) num = count - que_count #print('-----num----:{}'.format(num)) #ins_list = [] #up_list = [] for i in range(num): ins_list.append((channel_group_id,channel_seq_id,weight + i)) cursor.executemany(ins_sql,ins_list) temp_df = pd.DataFrame() if(weight >= 45): temp_df = pd.read_sql(temp_que_sql,conn) else : #print(temp_que_sql_45) temp_df = pd.read_sql(temp_que_sql_45,conn) for i in range(count): up_list.append((weight + i,temp_df['seq_id'].values.tolist()[i])) cursor.executemany(up_sql,up_list) else: for i in range(count): up_list.append((weight + i,df['seq_id'].values.tolist()[i])) cursor.executemany(up_sql,up_list) for i in range(count,que_count): del_list.append(df['seq_id'].values.tolist()[i]) #print(del_list) cursor.executemany(del_sql,del_list) except: conn.rollback() print ("数据回滚") cursor.close() conn.close() #创建数据库连接池 def createPool(db_config): spool = PooledDB(pymysql, 5, **db_config) return spool if __name__ == '__main__': #数据库连接信息 chanel_db_config = { 'host' : '47.95.217.180', 'port' : 3306, 'user' : 'root', 'password' : '93DkChZMgZRyCbWh', 'db' : 'fmp', 'charset' : 'utf8', 'autocommit' : 1 } #数据库连接信息 test_db_config = { 'host' : '192.168.101.7', 'port' : 3306, 'user' : 'root', 'password' : 'nibuzhidaowozhidao', 'db' : 'fmp', 'charset' : 'utf8', 'autocommit' : 1 } conn = createPool(chanel_db_config).connection() changeChannelGroup()