|
|
|
#!/usr/bin/python
|
|
|
|
#encoding=utf-8
|
|
|
|
# author: tangwy
|
|
|
|
import re,os,json,time
|
|
|
|
import codecs
|
|
|
|
from db2json import DBUtils
|
|
|
|
from datetime import datetime, timedelta
|
|
|
|
from base_dataclean_pg import TRACE_KEY
|
|
|
|
from ext_logging import logger_cron,get_clean_file_path,merge_large_file_path,logger_trace
|
|
|
|
from file_helper import read_large_json_file,write_large_file,get_file_content,delete_frile,is_file_larger_than_500mb,merge_data_new
|
|
|
|
from collections import defaultdict
|
|
|
|
|
|
|
|
date_pattern = re.compile(r'^\d{8}-\d{4}\.json$')
|
|
|
|
|
|
|
|
def get_all_files(path):
|
|
|
|
# 列出所有包含匹配模式的文件名
|
|
|
|
files = []
|
|
|
|
for filename in os.listdir(path):
|
|
|
|
if date_pattern.search(filename):
|
|
|
|
files.append(filename)
|
|
|
|
return files
|
|
|
|
|
|
|
|
#对待合并文件进行分组
|
|
|
|
def get_file_merge_array(filenames):
|
|
|
|
# 创建一个defaultdict来存储分组的文件
|
|
|
|
file_dict = defaultdict(list)
|
|
|
|
for filename in filenames:
|
|
|
|
date = filename[:8]
|
|
|
|
file_dict[date].append(filename)
|
|
|
|
|
|
|
|
file_dict = dict(file_dict)
|
|
|
|
return file_dict
|
|
|
|
|
|
|
|
# 将 2024-08-08.json 移动大文件目录下 等待入库前的合并【只移动前一天的文件】
|
|
|
|
def mv_root_file_to_current(base_path,new_path):
|
|
|
|
new_file_name = (datetime.now()-timedelta(days=1)).strftime("%Y%m%d-2359.json")
|
|
|
|
old_file_name = (datetime.now()-timedelta(days=1)).strftime("%Y-%m-%d.json")
|
|
|
|
|
|
|
|
old_full_path=os.path.join(base_path,old_file_name)
|
|
|
|
new_full_path = os.path.join(new_path,new_file_name)
|
|
|
|
if os.path.exists(old_full_path):
|
|
|
|
if os.path.exists(new_full_path):
|
|
|
|
logger_cron.error("MERGE_LARG_EFILE: 文件 "+new_full_path+"已经存在,任然移动会被覆盖")
|
|
|
|
os.rename(old_full_path,new_full_path)
|
|
|
|
|
|
|
|
#合并大于>500M的文件
|
|
|
|
def merge_large_files(file_dict,base_path):
|
|
|
|
# 遍历字典中的每一个键值对
|
|
|
|
for date_str, files in file_dict.items():
|
|
|
|
#20240721
|
|
|
|
root_file_path = "{}-{}-{}.json".format(date_str[:4], date_str[4:6], date_str[6:])
|
|
|
|
full_root_file_path = os.path.join(base_path,root_file_path)
|
|
|
|
if len(files)>0:
|
|
|
|
file_objs=[]
|
|
|
|
file_full_path = []
|
|
|
|
# 合并的数据存储变量
|
|
|
|
merge_tmp_data = {}
|
|
|
|
for filename in files:
|
|
|
|
#20240721-0170.json
|
|
|
|
full_path = os.path.join(base_path,filename)
|
|
|
|
file_full_path.append(full_path)
|
|
|
|
logger_cron.info("INSERT: 准备读取文件做合并"+full_path)
|
|
|
|
tmp_data =read_large_json_file(full_path)
|
|
|
|
logger_cron.info("INSERT: 数据量"+str(len(tmp_data)))
|
|
|
|
|
|
|
|
file_objs.append(tmp_data)
|
|
|
|
file_objs.append(merge_tmp_data)
|
|
|
|
merge_tmp_data = merge_data_new(file_objs)
|
|
|
|
logger_cron.info("INSERT: 合并完成"+full_path)
|
|
|
|
#移除已经合并的数据
|
|
|
|
del file_objs[:]
|
|
|
|
|
|
|
|
#判断files目录是否存在结果文件 (2024-08-08.json)
|
|
|
|
if os.path.exists(full_root_file_path):
|
|
|
|
logger_cron.info("INSERT: 准备读取文件做合并"+full_root_file_path)
|
|
|
|
root_data = read_large_json_file(full_root_file_path)
|
|
|
|
logger_cron.info("INSERT: 数据量"+str(len(root_data)))
|
|
|
|
file_objs.append(root_data)
|
|
|
|
file_objs.append(merge_tmp_data)
|
|
|
|
merge_tmp_data = merge_data_new(file_objs)
|
|
|
|
logger_cron.info("INSERT: 合并完成"+full_root_file_path)
|
|
|
|
logger_cron.info("INSERT: 准备写入合并的文件")
|
|
|
|
|
|
|
|
######################问题排查
|
|
|
|
key=get_file_content()
|
|
|
|
if key in merge_tmp_data:
|
|
|
|
logger_trace.info("largefilemerge:"+full_root_file_path+":"+key+":"+str(merge_tmp_data[key]))
|
|
|
|
|
|
|
|
write_large_file(full_root_file_path,json.dumps(merge_tmp_data))
|
|
|
|
logger_cron.info("INSERT: 写入合并文件完成")
|
|
|
|
#准备删除合并文件
|
|
|
|
for del_file in file_full_path:
|
|
|
|
logger_cron.info("INSERT: 准备删除 "+del_file)
|
|
|
|
delete_frile(del_file)
|
|
|
|
#os.rename(del_file,del_file+".cmp")
|
|
|
|
logger_cron.info("INSERT: 完成删除 "+del_file)
|
|
|
|
|
|
|
|
#2024-08-23.json 换成20240823-1410 时分为当前时间的时分
|
|
|
|
def get_new_file_name(old_file_path):
|
|
|
|
file_name_with_ext = os.path.basename(old_file_path)
|
|
|
|
file_name, file_extension = os.path.splitext(file_name_with_ext)
|
|
|
|
sf=time.strftime("%H%M.json", time.localtime())
|
|
|
|
new_name = file_name.replace("-","")
|
|
|
|
res_name = new_name+"-"+sf
|
|
|
|
|
|
|
|
return res_name
|
|
|
|
|
|
|
|
#合并所有文件
|
|
|
|
def merge_all_files(file_dict,base_path):
|
|
|
|
# 遍历字典中的每一个键值对
|
|
|
|
for date_str, files in file_dict.items():
|
|
|
|
#20240721
|
|
|
|
root_file_path = "{}-{}-{}.json".format(date_str[:4], date_str[4:6], date_str[6:])
|
|
|
|
full_root_file_path = os.path.join(base_path,root_file_path)
|
|
|
|
if len(files)>0:
|
|
|
|
file_objs=[]
|
|
|
|
file_full_path = []
|
|
|
|
# 合并的数据存储变量
|
|
|
|
merge_tmp_data = {}
|
|
|
|
|
|
|
|
file_full_path = []
|
|
|
|
for filename in files:
|
|
|
|
#20240721-0170.json
|
|
|
|
full_path = os.path.join(base_path,filename)
|
|
|
|
file_full_path.append(full_path)
|
|
|
|
logger_cron.info("FILE_MERGE: 准备读取文件做合并"+full_path)
|
|
|
|
|
|
|
|
tmp_data =read_large_json_file(full_path)
|
|
|
|
file_objs.append(tmp_data)
|
|
|
|
file_objs.append(merge_tmp_data)
|
|
|
|
merge_tmp_data = merge_data_new(file_objs)
|
|
|
|
logger_cron.info("FILE_MERGE: 文件合并完成"+full_path)
|
|
|
|
#移除已经合并的数据
|
|
|
|
del file_objs[:]
|
|
|
|
|
|
|
|
if os.path.exists(full_root_file_path):
|
|
|
|
flag=is_file_larger_than_500mb(full_root_file_path)
|
|
|
|
if flag:
|
|
|
|
logger_cron.info("FILE_MERGE: 文件超过500M需要移动到merge_file目录"+full_root_file_path)
|
|
|
|
large_file_root_path = merge_large_file_path()
|
|
|
|
|
|
|
|
#新的文件名
|
|
|
|
new_file_name = get_new_file_name(full_root_file_path)
|
|
|
|
logger_cron.info("FILE_MERGE: 新文件名 "+new_file_name)
|
|
|
|
large_file_path = os.path.join(large_file_root_path,new_file_name)
|
|
|
|
logger_cron.info("FILE_MERGE: oldpath "+full_root_file_path)
|
|
|
|
if os.path.exists(large_file_path):
|
|
|
|
logger_cron.error("FILE_MERGE: 文件 "+large_file_path+"已经存在,任然移动会被覆盖")
|
|
|
|
|
|
|
|
os.rename(full_root_file_path, large_file_path)
|
|
|
|
logger_cron.info("FILE_MERGE: newpath "+large_file_path+"移动成功")
|
|
|
|
else:
|
|
|
|
logger_cron.info("FILE_MERGE: 文件小于500M需要参与合并"+full_root_file_path)
|
|
|
|
root_data = read_large_json_file(full_root_file_path)
|
|
|
|
file_objs.append(root_data)
|
|
|
|
file_objs.append(merge_tmp_data)
|
|
|
|
merge_tmp_data = merge_data_new(file_objs)
|
|
|
|
|
|
|
|
###################问题排查
|
|
|
|
key=get_file_content()
|
|
|
|
if key in merge_tmp_data:
|
|
|
|
logger_trace.info("filemerge:"+full_root_file_path+":"+key+":"+str(merge_tmp_data[key]))
|
|
|
|
|
|
|
|
logger_cron.info("FILE_MERGE: 所有文件合并完成")
|
|
|
|
write_large_file(full_root_file_path,json.dumps(merge_tmp_data))
|
|
|
|
logger_cron.info("FILE_MERGE: 写入合并文件完成")
|
|
|
|
#准备删除合并文件
|
|
|
|
for del_file in file_full_path:
|
|
|
|
logger_cron.info("FILE_MERGE: 准备删除 "+del_file)
|
|
|
|
delete_frile(del_file)
|
|
|
|
#os.rename(del_file,del_file+".cmp")
|
|
|
|
logger_cron.info("FILE_MERGE: 完成删除 "+del_file)
|
|
|
|
|
|
|
|
#每半小时执行的合并
|
|
|
|
def entry():
|
|
|
|
#清洗目录
|
|
|
|
base_path = get_clean_file_path()
|
|
|
|
#匹配待清洗的文件
|
|
|
|
files = get_all_files(base_path)
|
|
|
|
#对待清洗的文件进行分组
|
|
|
|
file_dict =get_file_merge_array(files)
|
|
|
|
#合并所有文件
|
|
|
|
logger_cron.info("FILE_MERGE: 准备执行文件合并")
|
|
|
|
merge_all_files(file_dict,base_path)
|
|
|
|
|
|
|
|
#入库前执行的大文件合并
|
|
|
|
def merge_large_entry():
|
|
|
|
base_path = get_clean_file_path()
|
|
|
|
#清洗目录
|
|
|
|
new_base_path = merge_large_file_path()
|
|
|
|
#将 2024-08-08.json 移动到merge_file目录下 等待入库前的合并
|
|
|
|
mv_root_file_to_current(base_path,new_base_path)
|
|
|
|
#匹配待清洗的文件
|
|
|
|
files = get_all_files(new_base_path)
|
|
|
|
logger_cron.info("INSERT: 待合并的文件"+json.dumps(files))
|
|
|
|
#对待清洗的文件进行分组
|
|
|
|
file_dict =get_file_merge_array(files)
|
|
|
|
#合并所有文件
|
|
|
|
logger_cron.info("INSERT: 准备执行文件合并")
|
|
|
|
merge_large_files(file_dict,new_base_path)
|
|
|
|
|