Compare commits
No commits in common. '216269d3cbb366cf0e1e39abe7cf6068eb51b474' and '10096ae4ec70f2c2c9ac4ebb55e5b163d3d04b90' have entirely different histories.
216269d3cb
...
10096ae4ec
@ -1,3 +0,0 @@ |
|||||||
{ |
|
||||||
"search_limit": 15 |
|
||||||
} |
|
@ -1,176 +0,0 @@ |
|||||||
# coding:utf-8 |
|
||||||
|
|
||||||
import sys |
|
||||||
import uuid |
|
||||||
import json |
|
||||||
import time |
|
||||||
import random |
|
||||||
|
|
||||||
# path = str(sys.path[0]) |
|
||||||
# home_path = path.split("isop_uebaapiData")[0] |
|
||||||
# sys.path.append(home_path) |
|
||||||
from util import send_logs |
|
||||||
|
|
||||||
def alarm(cookies, api): |
|
||||||
"""2、HTTP日志""" |
|
||||||
inputstr = '''[{"msgtype":1,"hash":"8DE9-BDAB-F622-2FA8","dev_ip":"10.67.5.17","product":"uts"},{"sid":"6004744450036c44f815500016d00a5f5151105430a3ed","timestamp":1567673939,"sip":"10.67.0.52","sport":5624,"dip":"10.67.0.53","dport":80,"protocol":6,"app":3087428650795009,"app_proto":8,"direct":4,"app.detail":{"method":"GET","http_protocol":"1.1","ret_code":200,"host":"10.67.1.1","uri":"/webtest/uploadFile.php","referer":"http://[2222::65]/webtest/","content_type":" multipart/form-data; boundary=----WebKitFormBoundary2zcCUl4lQf1h7A7S","content_type_server":" text/html","server":"Apache/2.4.4 (Win32) OpenSSL/0.9.8y PHP/5.4.19","user_agent":"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.87 Safari/537.36","link":"","cookies":"loginmainacctid=wangshiguang;operatorId=d2601586;com.huawei.boss.CURRENT_MENUID=BLAR_ChargeCrm3_WEB;","content_encoding":"","location":"","content_length":70080,"content_length_server":200,"set_cookie":"","range":"","connection":"keep-alive","connection_server":"Keep-Alive","x_forwarded_for":"","post_data":"LS0tLS0tV2ViS2l0Rm9ybUJvdW5kYXJ5MnpjQ1VsNGxRZjFoN0E3Uw0KQ29udGVudC1EaXNwb3NpdGlvbjogZm9ybS1kYXRhOyBuYW1lPSJmaWxlIjsgZmlsZW5hbWU9IjAwMDFhZDQ0MWRjYjM4NjIxOGE3Njk5MmFjZjhiNzA1Ig0=","response_body":"VXBsb2FkOiAwMDAxYWQ0NDFkY2IzODYyMThhNzY5OTJhY2Y4YjcwNTxiciAvPlR5cGU6IGFwcGxpY2F0aW9uL29jdGV0LXN0cmVhbTxiciAvPlNpemU6IDY4LjEyNzkyOTY4NzUgS2I8YnIgLz5UZW1wIGZpbGU6IEQ6XHhhbXBwXHRtcFxwaHA2ODI1LnRtcDxiciAvPjAwMDFhZDQ0MWRjYjM4NjIxOGE3Njk5MmFjZjhiNzA1IGFscmVhZHkgZXhpc3RzLiA="}}]''' |
|
||||||
inputarr = json.loads(inputstr, strict=False) |
|
||||||
# 随机生成timestamp |
|
||||||
inputarr[1]["timestamp"] = int(time.time()) |
|
||||||
inputarr[1]["sid"] = str(uuid.uuid1()) |
|
||||||
# inputarr[1]["sip"] = "10.67.4.33" |
|
||||||
inputarr[1]["sip"] = generate_random_ip() |
|
||||||
inputarr[1]["dip"] = "10.67.1.1" |
|
||||||
inputarr[1]["dport"] = "8180" |
|
||||||
inputarr[1]["app.detail"]["uri"] = "/alarmtest.action?BMECID=352432757&BMETimestamp=1692788489260&queryNumber=158713459" |
|
||||||
inputarr[1]["app.detail"]["host"] = api |
|
||||||
inputarr[1]["app.detail"]["cookies"] = cookies |
|
||||||
inputarr[1]["account"] = get_random_person() |
|
||||||
inputarr[1]["trojan_type"] = get_random_jobnum() |
|
||||||
inputarr[1]["worm_family"] = get_random_menu() |
|
||||||
inputarr[1]["interface"] = get_random_inteface() |
|
||||||
|
|
||||||
return json.dumps(inputarr) |
|
||||||
|
|
||||||
def generate_random_ip(): |
|
||||||
# 固定前缀 "192.168." |
|
||||||
prefix = "192.168." |
|
||||||
# 生成随机的第三和第四段IP地址 |
|
||||||
third_octet = 19 |
|
||||||
fourth_octet = random.randint(0, 50) |
|
||||||
# 拼接IP地址 |
|
||||||
ip = "{}{}.{}".format(prefix, third_octet, fourth_octet) |
|
||||||
return ip |
|
||||||
|
|
||||||
def AbIDVisitAPINums510404(): |
|
||||||
datalist = {"TCP_5011": list()} |
|
||||||
ID2Area = { |
|
||||||
"武汉": ["1101820", "1101821", "1101822", "1101823", "1101825"], |
|
||||||
"荆州": ["2001800", "2001801", "2001808"], |
|
||||||
"江汉": ["1801820", "1801810"], |
|
||||||
"省公司市场部": ["1002011", "1002012", "1002013"] |
|
||||||
} |
|
||||||
api_list = ["test.alarm.com/webtest", "alarm.com/testalarm", "business.system..alarmcom", "hhh.alarm.com", |
|
||||||
"good.alarm.com"] |
|
||||||
info_list = [ |
|
||||||
["u-locale=zh_CN;loginmainacctid=zhang3;operatorId=" + ID2Area["武汉"][ |
|
||||||
0] + ";com.huawei.boss.CURRENT_MENUID=BLAR_ChargeCrm3_WEB;", 60], |
|
||||||
["u-locale=zh_CN;loginmainacctid=zhang3;operatorId=" + ID2Area["荆州"][ |
|
||||||
2] + ";com.huawei.boss.CURRENT_MENUID=BLAR_ChargeCrm3_WEB;", 120] |
|
||||||
] |
|
||||||
for i in range(len(info_list)): |
|
||||||
cookies = info_list[i][0] |
|
||||||
count = info_list[i][1] |
|
||||||
for j in range(count): |
|
||||||
api = random.choice(api_list) |
|
||||||
datalist["TCP_5011"].append(alarm(cookies, api)) |
|
||||||
for key in datalist.keys(): |
|
||||||
send_logs(datalist[key]) |
|
||||||
return "510405场景的告警数据已生成" |
|
||||||
|
|
||||||
def get_random_jobnum(): |
|
||||||
# 定义包含不同前缀的字符串数组 |
|
||||||
prefix_strings = [ |
|
||||||
['10243', '10895', '10134', '10781', '10962'], # 10打头的字符串示例 |
|
||||||
['11089', '11057', '11023', '11016', '11030'], # 110打头的字符串示例 |
|
||||||
['14076', '14049', '14098', '14032', '14061'], # 140打头的字符串示例 |
|
||||||
['26054', '26013', '26087', '26029', '26061'], # 260打头的字符串示例 |
|
||||||
['20083', '20015', '20072', '20096', '20048'], # 200打头的字符串示例 |
|
||||||
['19035', '19017', '19049', '19082', '19096'], # 190打头的字符串示例 |
|
||||||
['180237', '180276', '180204', '180295', '180219'] # 1802打头的字符串示例 |
|
||||||
] |
|
||||||
|
|
||||||
# 随机选择一个前缀数组 |
|
||||||
selected_prefix_array = random.choice(prefix_strings) |
|
||||||
# 随机选择一个具体的字符串 |
|
||||||
selected_string = random.choice(selected_prefix_array) |
|
||||||
return selected_string |
|
||||||
|
|
||||||
def get_random_person(): |
|
||||||
people_list = [ |
|
||||||
"Alice", "Bob", "Charlie", "David", "Emma", "Frank", "Grace2","Alice2", "Bob2", "Charlie2", "David2", "Emma2", "Frank2", "Grace2" |
|
||||||
] |
|
||||||
|
|
||||||
random_person = random.choice(people_list) |
|
||||||
return random_person |
|
||||||
|
|
||||||
def get_random_menu(): |
|
||||||
# 定义系统菜单列表 |
|
||||||
system_menu = [ |
|
||||||
"开发", "测试", "部署", "配置", "设置", "安装", "卸载", "升级", "更新", |
|
||||||
"修复", "修正", "修补", "更新", "安全", "保护", "防护", "防御", "防止", |
|
||||||
"检查", "扫描", "监控", "跟踪", "追踪", "审计", "审查", "测试", "测量" |
|
||||||
] |
|
||||||
|
|
||||||
# 随机选择一个菜单项 |
|
||||||
random_menu_item = random.choice(system_menu) |
|
||||||
return random_menu_item |
|
||||||
|
|
||||||
def get_random_inteface(): |
|
||||||
# 定义系统菜单列表 |
|
||||||
system_menu = [ |
|
||||||
"http://bai1.doc.com/api", "http://bai2.doc.com/api", "http://bai3.doc.com/api", "http://bai4.doc.com/api", "http://bai5.doc.com/api", "http://bai12.doc.com/api","http://bai13.doc.com/api", "http://bai19.doc.com/api", |
|
||||||
"http://bai6.doc.com/api", "http://bai7.doc.com/api", "http://bai8.doc.com/api", "http://bai9.doc.com/api", "http://bai11.doc.com/api" |
|
||||||
] |
|
||||||
|
|
||||||
# 随机选择一个菜单项 |
|
||||||
random_menu_item = random.choice(system_menu) |
|
||||||
return random_menu_item |
|
||||||
|
|
||||||
if __name__ == '__main__': |
|
||||||
datalist = {"TCP_5011": list()} |
|
||||||
ID2Area = { |
|
||||||
"武汉": ["1101820", "1101821", "1101822", "1101823", "1101825"], |
|
||||||
"荆州": ["2001800", "2001801", "2001808"], |
|
||||||
"江汉": ["1801820", "1801810"], |
|
||||||
"省公司市场部": ["1002011", "1002012", "1002013"] |
|
||||||
} |
|
||||||
api_list = ["test.alarm.com/webtest", "alarm.com/testalarm", "business.system..alarmcom", "hhh.alarm.com", "good.alarm.com","baidu.com","sohu.com","xinlang.com","erpx.com"] |
|
||||||
info_list = [ |
|
||||||
["u-locale=zh_CN; loginmainacctid="+get_random_person()+"; userticket=209@9889@23223@10.0.1.183@lis8; operatorId=" + get_random_jobnum() + "; com.huawei.boss.CURRENT_MENUID="+get_random_menu()+";", 100], |
|
||||||
["u-locale=zh_CN; loginmainacctid="+get_random_person()+"; userticket=209@9889@23223@10.0.1.183@lis8; operatorId=" + get_random_jobnum() + "; com.huawei.boss.CURRENT_MENUID="+get_random_menu()+";", 400], |
|
||||||
["u-locale=zh_CN; loginmainacctid="+get_random_person()+"; userticket=209@9889@23223@10.0.1.183@lis8; operatorId=" + get_random_jobnum() + "; com.huawei.boss.CURRENT_MENUID="+get_random_menu()+";", 300], |
|
||||||
["u-locale=zh_CN; loginmainacctid="+get_random_person()+"; userticket=209@9889@23223@10.0.1.183@lis8; operatorId=" + get_random_jobnum() + "; com.huawei.boss.CURRENT_MENUID="+get_random_menu()+";", 200], |
|
||||||
["u-locale=zh_CN; loginmainacctid="+get_random_person()+"; userticket=209@9889@23223@10.0.1.183@lis8; operatorId=" + get_random_jobnum() + "; com.huawei.boss.CURRENT_MENUID="+get_random_menu()+";", 100], |
|
||||||
["u-locale=zh_CN; loginmainacctid="+get_random_person()+"; userticket=209@9889@23223@10.0.1.183@lis8; operatorId=" + get_random_jobnum() + "; com.huawei.boss.CURRENT_MENUID="+get_random_menu()+";", 400], |
|
||||||
["u-locale=zh_CN; loginmainacctid="+get_random_person()+"; userticket=209@9889@23223@10.0.1.183@lis8; operatorId=" + get_random_jobnum() + "; com.huawei.boss.CURRENT_MENUID="+get_random_menu()+";", 300], |
|
||||||
["u-locale=zh_CN; loginmainacctid="+get_random_person()+"; userticket=209@9889@23223@10.0.1.183@lis8; operatorId=" + get_random_jobnum() + "; com.huawei.boss.CURRENT_MENUID="+get_random_menu()+";", 200], |
|
||||||
["u-locale=zh_CN; loginmainacctid="+get_random_person()+"; userticket=209@9889@23223@10.0.1.183@lis8; operatorId=" + get_random_jobnum() + "; com.huawei.boss.CURRENT_MENUID="+get_random_menu()+";", 100], |
|
||||||
["u-locale=zh_CN; loginmainacctid="+get_random_person()+"; userticket=209@9889@23223@10.0.1.183@lis8; operatorId=" + get_random_jobnum() + "; com.huawei.boss.CURRENT_MENUID="+get_random_menu()+";", 400], |
|
||||||
["u-locale=zh_CN; loginmainacctid="+get_random_person()+"; userticket=209@9889@23223@10.0.1.183@lis8; operatorId=" + get_random_jobnum() + "; com.huawei.boss.CURRENT_MENUID="+get_random_menu()+";", 300], |
|
||||||
["u-locale=zh_CN; loginmainacctid="+get_random_person()+"; userticket=209@9889@23223@10.0.1.183@lis8; operatorId=" + get_random_jobnum() + "; com.huawei.boss.CURRENT_MENUID="+get_random_menu()+";", 200], |
|
||||||
["u-locale=zh_CN; loginmainacctid="+get_random_person()+"; userticket=209@9889@23223@10.0.1.183@lis8; operatorId=" + get_random_jobnum() + "; com.huawei.boss.CURRENT_MENUID="+get_random_menu()+";", 100], |
|
||||||
["u-locale=zh_CN; loginmainacctid="+get_random_person()+"; userticket=209@9889@23223@10.0.1.183@lis8; operatorId=" + get_random_jobnum() + "; com.huawei.boss.CURRENT_MENUID="+get_random_menu()+";", 400], |
|
||||||
["u-locale=zh_CN; loginmainacctid="+get_random_person()+"; userticket=209@9889@23223@10.0.1.183@lis8; operatorId=" + get_random_jobnum() + "; com.huawei.boss.CURRENT_MENUID="+get_random_menu()+";", 300], |
|
||||||
["u-locale=zh_CN; loginmainacctid="+get_random_person()+"; userticket=209@9889@23223@10.0.1.183@lis8; operatorId=" + get_random_jobnum() + "; com.huawei.boss.CURRENT_MENUID="+get_random_menu()+";", 200], |
|
||||||
["u-locale=zh_CN; loginmainacctid="+get_random_person()+"; userticket=209@9889@23223@10.0.1.183@lis8; operatorId=" + get_random_jobnum() + "; com.huawei.boss.CURRENT_MENUID="+get_random_menu()+";", 100], |
|
||||||
["u-locale=zh_CN; loginmainacctid="+get_random_person()+"; userticket=209@9889@23223@10.0.1.183@lis8; operatorId=" + get_random_jobnum() + "; com.huawei.boss.CURRENT_MENUID="+get_random_menu()+";", 400], |
|
||||||
["u-locale=zh_CN; loginmainacctid="+get_random_person()+"; userticket=209@9889@23223@10.0.1.183@lis8; operatorId=" + get_random_jobnum() + "; com.huawei.boss.CURRENT_MENUID="+get_random_menu()+";", 300], |
|
||||||
["u-locale=zh_CN; loginmainacctid="+get_random_person()+"; userticket=209@9889@23223@10.0.1.183@lis8; operatorId=" + get_random_jobnum() + "; com.huawei.boss.CURRENT_MENUID="+get_random_menu()+";", 200], |
|
||||||
["u-locale=zh_CN; loginmainacctid="+get_random_person()+"; userticket=209@9889@23223@10.0.1.183@lis8; operatorId=" + get_random_jobnum() + "; com.huawei.boss.CURRENT_MENUID="+get_random_menu()+";", 100], |
|
||||||
["u-locale=zh_CN; loginmainacctid="+get_random_person()+"; userticket=209@9889@23223@10.0.1.183@lis8; operatorId=" + get_random_jobnum() + "; com.huawei.boss.CURRENT_MENUID="+get_random_menu()+";", 400], |
|
||||||
["u-locale=zh_CN; loginmainacctid="+get_random_person()+"; userticket=209@9889@23223@10.0.1.183@lis8; operatorId=" + get_random_jobnum() + "; com.huawei.boss.CURRENT_MENUID="+get_random_menu()+";", 300], |
|
||||||
["u-locale=zh_CN; loginmainacctid="+get_random_person()+"; userticket=209@9889@23223@10.0.1.183@lis8; operatorId=" + get_random_jobnum() + "; com.huawei.boss.CURRENT_MENUID="+get_random_menu()+";", 200], |
|
||||||
["u-locale=zh_CN; loginmainacctid="+get_random_person()+"; userticket=209@9889@23223@10.0.1.183@lis8; operatorId=" + get_random_jobnum() + "; com.huawei.boss.CURRENT_MENUID="+get_random_menu()+";", 100], |
|
||||||
["u-locale=zh_CN; loginmainacctid="+get_random_person()+"; userticket=209@9889@23223@10.0.1.183@lis8; operatorId=" + get_random_jobnum() + "; com.huawei.boss.CURRENT_MENUID="+get_random_menu()+";", 400], |
|
||||||
["u-locale=zh_CN; loginmainacctid="+get_random_person()+"; userticket=209@9889@23223@10.0.1.183@lis8; operatorId=" + get_random_jobnum() + "; com.huawei.boss.CURRENT_MENUID="+get_random_menu()+";", 300], |
|
||||||
["u-locale=zh_CN; loginmainacctid="+get_random_person()+"; userticket=209@9889@23223@10.0.1.183@lis8; operatorId=" + get_random_jobnum() + "; com.huawei.boss.CURRENT_MENUID="+get_random_menu()+";", 200], |
|
||||||
["u-locale=zh_CN; loginmainacctid="+get_random_person()+"; userticket=209@9889@23223@10.0.1.183@lis8; operatorId=" + get_random_jobnum() + "; com.huawei.boss.CURRENT_MENUID="+get_random_menu()+";", 100], |
|
||||||
["u-locale=zh_CN; loginmainacctid="+get_random_person()+"; userticket=209@9889@23223@10.0.1.183@lis8; operatorId=" + get_random_jobnum() + "; com.huawei.boss.CURRENT_MENUID="+get_random_menu()+";", 400], |
|
||||||
["u-locale=zh_CN; loginmainacctid="+get_random_person()+"; userticket=209@9889@23223@10.0.1.183@lis8; operatorId=" + get_random_jobnum() + "; com.huawei.boss.CURRENT_MENUID="+get_random_menu()+";", 300], |
|
||||||
["u-locale=zh_CN; loginmainacctid="+get_random_person()+"; userticket=209@9889@23223@10.0.1.183@lis8; operatorId=" + get_random_jobnum() + "; com.huawei.boss.CURRENT_MENUID="+get_random_menu()+";", 200], |
|
||||||
["u-locale=zh_CN; loginmainacctid="+get_random_person()+"; userticket=209@9889@23223@10.0.1.183@lis8; operatorId=" + get_random_jobnum() + "; com.huawei.boss.CURRENT_MENUID="+get_random_menu()+";", 100], |
|
||||||
["u-locale=zh_CN; loginmainacctid="+get_random_person()+"; userticket=209@9889@23223@10.0.1.183@lis8; operatorId=" + get_random_jobnum() + "; com.huawei.boss.CURRENT_MENUID="+get_random_menu()+";", 400], |
|
||||||
["u-locale=zh_CN; loginmainacctid="+get_random_person()+"; userticket=209@9889@23223@10.0.1.183@lis8; operatorId=" + get_random_jobnum() + "; com.huawei.boss.CURRENT_MENUID="+get_random_menu()+";", 300], |
|
||||||
["u-locale=zh_CN; loginmainacctid="+get_random_person()+"; userticket=209@9889@23223@10.0.1.183@lis8; operatorId=" + get_random_jobnum() + "; com.huawei.boss.CURRENT_MENUID="+get_random_menu()+";", 200] |
|
||||||
] |
|
||||||
|
|
||||||
for i in range(len(info_list)): |
|
||||||
cookies = info_list[i][0] |
|
||||||
count = info_list[i][1] |
|
||||||
for j in range(count): |
|
||||||
api = random.choice(api_list) |
|
||||||
datalist["TCP_5011"].append(alarm(cookies, api)) |
|
||||||
for key in datalist.keys(): |
|
||||||
send_logs(datalist[key]) |
|
||||||
print "510405场景的告警数据已生成" |
|
Binary file not shown.
@ -1,134 +0,0 @@ |
|||||||
#!/usr/bin/python |
|
||||||
# encoding=utf-8 |
|
||||||
# author: tangwy |
|
||||||
from __future__ import division |
|
||||||
import json |
|
||||||
import os, re |
|
||||||
import codecs |
|
||||||
import traceback |
|
||||||
from datetime import datetime, timedelta |
|
||||||
from collections import defaultdict |
|
||||||
from dataInterface.functions import CFunction |
|
||||||
from dataInterface.db.params import CPgSqlParam |
|
||||||
from ext_logging import logger |
|
||||||
|
|
||||||
|
|
||||||
TABLE_NAME = "ueba_analysis_schema.logs" |
|
||||||
|
|
||||||
DATA_TYPE = { |
|
||||||
"IP": 1, |
|
||||||
"ACCOUNT": 2, |
|
||||||
"INTERFACE": 3, |
|
||||||
"MENU": 4, |
|
||||||
} |
|
||||||
|
|
||||||
#安全除 |
|
||||||
def safe_divide(numerator, denominator): |
|
||||||
if denominator == 0: |
|
||||||
return |
|
||||||
else: |
|
||||||
return numerator / denominator |
|
||||||
#ip维度 |
|
||||||
def get_ip_data(startTime, endTime,keyword): |
|
||||||
""" |
|
||||||
IP维度查询 |
|
||||||
:param startTime: 开始时间, |
|
||||||
:param endTime: 结束时间, |
|
||||||
""" |
|
||||||
result = [] |
|
||||||
sql = """ select ip,jobnum, sum(count) as count from {TABLE_NAME} |
|
||||||
where logdate >= %s and logdate <= %s and data_type = %s and company = %s |
|
||||||
group by ip,jobnum order by count desc limit 200""".format(TABLE_NAME=TABLE_NAME) |
|
||||||
res = json.loads(CFunction.execute(CPgSqlParam(sql, params=(startTime, endTime, DATA_TYPE["IP"],keyword)))) |
|
||||||
if res: |
|
||||||
for item in res: |
|
||||||
result.append({ |
|
||||||
"req_ip": item[0], |
|
||||||
"req_jobnum": item[1], |
|
||||||
"req_frequency": item[2], |
|
||||||
}) |
|
||||||
return result |
|
||||||
#账号维度 |
|
||||||
def get_account_data(startTime, endTime,keyword): |
|
||||||
""" |
|
||||||
IP维度查询 |
|
||||||
:param startTime: 开始时间, |
|
||||||
:param endTime: 结束时间, |
|
||||||
""" |
|
||||||
result = [] |
|
||||||
sql = """ select account,jobnum, sum(count) as count from {TABLE_NAME} |
|
||||||
where logdate >= %s and logdate <= %s and data_type = %s and company = %s |
|
||||||
group by account,jobnum order by count desc limit 200""".format(TABLE_NAME=TABLE_NAME) |
|
||||||
res = json.loads(CFunction.execute(CPgSqlParam(sql, params=(startTime, endTime, DATA_TYPE["ACCOUNT"],keyword)))) |
|
||||||
if res: |
|
||||||
for item in res: |
|
||||||
result.append({ |
|
||||||
"req_account": item[0], |
|
||||||
"req_jobnum": item[1], |
|
||||||
"req_frequency": item[2], |
|
||||||
}) |
|
||||||
return result |
|
||||||
|
|
||||||
#接口维度 |
|
||||||
def get_interface_data(startTime, endTime,keyword): |
|
||||||
""" |
|
||||||
IP维度查询 |
|
||||||
:param startTime: 开始时间, |
|
||||||
:param endTime: 结束时间, |
|
||||||
""" |
|
||||||
result = [] |
|
||||||
sql = """select ip,account,jobnum,sum(count) as count from {TABLE_NAME} |
|
||||||
where logdate >= %s and logdate <= %s and data_type = %s and interface = %s |
|
||||||
group by ip,account,jobnum order by count desc limit 200""".format(TABLE_NAME=TABLE_NAME) |
|
||||||
res = json.loads(CFunction.execute(CPgSqlParam(sql, params=(startTime, endTime, DATA_TYPE["INTERFACE"],keyword)))) |
|
||||||
if res: |
|
||||||
for item in res: |
|
||||||
result.append({ |
|
||||||
"req_ip": item[0], |
|
||||||
"req_jobnum": item[2], |
|
||||||
"req_account": item[1], |
|
||||||
"req_frequency": item[3], |
|
||||||
"interface_addr":keyword, |
|
||||||
}) |
|
||||||
|
|
||||||
return result |
|
||||||
|
|
||||||
#菜单维度 |
|
||||||
def get_menu_data(startTime, endTime,keyword): |
|
||||||
""" |
|
||||||
IP维度查询 |
|
||||||
:param startTime: 开始时间, |
|
||||||
:param endTime: 结束时间, |
|
||||||
""" |
|
||||||
result = [] |
|
||||||
sql = """select ip,jobnum,account,sum(count) as count from {TABLE_NAME} |
|
||||||
where logdate >= %s and logdate <= %s and data_type = %s and menu = %s |
|
||||||
group by ip,jobnum,account order by count desc limit 200""".format(TABLE_NAME=TABLE_NAME) |
|
||||||
logger.info(sql) |
|
||||||
res = json.loads(CFunction.execute(CPgSqlParam(sql, params=(startTime, endTime, DATA_TYPE["MENU"],keyword)))) |
|
||||||
if res: |
|
||||||
logger.info(str(len(res))) |
|
||||||
for item in res: |
|
||||||
result.append({ |
|
||||||
"req_ip": item[0], |
|
||||||
"req_jobnum": item[1], |
|
||||||
"req_account": item[2], |
|
||||||
"req_frequency": item[3], |
|
||||||
"menu_name":keyword, |
|
||||||
}) |
|
||||||
return result |
|
||||||
|
|
||||||
#入口 |
|
||||||
def detail_data_entry(startTime, endTime,data_type,keyWord): |
|
||||||
data = {} |
|
||||||
if data_type == "1": |
|
||||||
data=get_ip_data(startTime=startTime,endTime=endTime,keyword=keyWord) |
|
||||||
if data_type == "2": |
|
||||||
data=get_account_data(startTime=startTime,endTime=endTime,keyword=keyWord) |
|
||||||
if data_type == "3": |
|
||||||
data=get_interface_data(startTime=startTime,endTime=endTime,keyword=keyWord) |
|
||||||
if data_type == "4": |
|
||||||
data=get_menu_data(startTime=startTime,endTime=endTime,keyword=keyWord) |
|
||||||
|
|
||||||
return data |
|
||||||
|
|
@ -1,247 +0,0 @@ |
|||||||
#!/usr/bin/python |
|
||||||
# encoding=utf-8 |
|
||||||
# author: tangwy |
|
||||||
from __future__ import division |
|
||||||
import json |
|
||||||
import os, re |
|
||||||
import codecs |
|
||||||
import traceback |
|
||||||
from datetime import datetime, timedelta |
|
||||||
from collections import defaultdict |
|
||||||
from dashboard_data_conversion import adjust_times |
|
||||||
from dataInterface.functions import CFunction |
|
||||||
from dataInterface.db.params import CPgSqlParam |
|
||||||
from ext_logging import logger |
|
||||||
|
|
||||||
TABLE_NAME = "ueba_analysis_schema.logs" |
|
||||||
|
|
||||||
DATA_TYPE = { |
|
||||||
"IP": 1, |
|
||||||
"ACCOUNT": 2, |
|
||||||
"INTERFACE": 3, |
|
||||||
"MENU": 4, |
|
||||||
} |
|
||||||
|
|
||||||
#安全除 |
|
||||||
def safe_divide(numerator, denominator): |
|
||||||
if denominator == 0: |
|
||||||
return |
|
||||||
else: |
|
||||||
return numerator / denominator |
|
||||||
#ip维度 |
|
||||||
def get_ip_summary_data(startTime, endTime): |
|
||||||
""" |
|
||||||
IP维度查询 |
|
||||||
:param startTime: 开始时间, |
|
||||||
:param endTime: 结束时间, |
|
||||||
""" |
|
||||||
result = {} |
|
||||||
sql = """ select company, sum(count) as count from {TABLE_NAME} |
|
||||||
where logdate >= %s and logdate <= %s and data_type = %s |
|
||||||
group by company""".format(TABLE_NAME=TABLE_NAME) |
|
||||||
res = json.loads(CFunction.execute(CPgSqlParam(sql, params=(startTime, endTime, DATA_TYPE["IP"])))) |
|
||||||
if res: |
|
||||||
for item in res: |
|
||||||
result[item[0]]=item[1] |
|
||||||
return result |
|
||||||
#账号维度 |
|
||||||
def get_account_summary_data(startTime, endTime): |
|
||||||
""" |
|
||||||
IP维度查询 |
|
||||||
:param startTime: 开始时间, |
|
||||||
:param endTime: 结束时间, |
|
||||||
""" |
|
||||||
result = {} |
|
||||||
sql = """ select company, sum(count) as count from {TABLE_NAME} |
|
||||||
where logdate >= %s and logdate <= %s and data_type = %s |
|
||||||
group by company""".format(TABLE_NAME=TABLE_NAME) |
|
||||||
res = json.loads(CFunction.execute(CPgSqlParam(sql, params=(startTime, endTime, DATA_TYPE["ACCOUNT"])))) |
|
||||||
if res: |
|
||||||
for item in res: |
|
||||||
result[item[0]]=item[1] |
|
||||||
return result |
|
||||||
|
|
||||||
#接口维度 |
|
||||||
def get_interface_summary_data(startTime, endTime): |
|
||||||
""" |
|
||||||
IP维度查询 |
|
||||||
:param startTime: 开始时间, |
|
||||||
:param endTime: 结束时间, |
|
||||||
""" |
|
||||||
result = {} |
|
||||||
sql = """select interface, sum(count) as count from {TABLE_NAME} |
|
||||||
where logdate >= %s and logdate <= %s and data_type = %s |
|
||||||
group by interface order by count desc limit 20""".format(TABLE_NAME=TABLE_NAME) |
|
||||||
res = json.loads(CFunction.execute(CPgSqlParam(sql, params=(startTime, endTime, DATA_TYPE["INTERFACE"])))) |
|
||||||
if res: |
|
||||||
for item in res: |
|
||||||
result[item[0]]=item[1] |
|
||||||
return result |
|
||||||
|
|
||||||
#菜单维度 |
|
||||||
def get_menu_summary_data(startTime, endTime): |
|
||||||
""" |
|
||||||
IP维度查询 |
|
||||||
:param startTime: 开始时间, |
|
||||||
:param endTime: 结束时间, |
|
||||||
""" |
|
||||||
result = {} |
|
||||||
sql = """select menu, sum(count) as count from {TABLE_NAME} |
|
||||||
where logdate >= %s and logdate <= %s and data_type = %s |
|
||||||
group by menu""".format(TABLE_NAME=TABLE_NAME) |
|
||||||
res = json.loads(CFunction.execute(CPgSqlParam(sql, params=(startTime, endTime, DATA_TYPE["MENU"])))) |
|
||||||
if res: |
|
||||||
for item in res: |
|
||||||
result[item[0]]=item[1] |
|
||||||
return result |
|
||||||
|
|
||||||
#获取IP count |
|
||||||
def get_ip_count(startTime, endTime): |
|
||||||
result = {} |
|
||||||
sql = """select company, count(distinct ip) as count from {TABLE_NAME} |
|
||||||
where logdate >= %s and logdate <= %s and data_type = %s |
|
||||||
group by company """.format(TABLE_NAME=TABLE_NAME) |
|
||||||
res = json.loads(CFunction.execute(CPgSqlParam(sql, params=(startTime, endTime, DATA_TYPE["IP"])))) |
|
||||||
if res: |
|
||||||
for item in res: |
|
||||||
result[item[0]]=item[1] |
|
||||||
return result |
|
||||||
|
|
||||||
#获取account count |
|
||||||
def get_account_count(startTime, endTime): |
|
||||||
result = {} |
|
||||||
sql = """select company ,count(distinct account) as count from {TABLE_NAME} |
|
||||||
where logdate >= %s and logdate <= %s and data_type = %s |
|
||||||
group by company """.format(TABLE_NAME=TABLE_NAME) |
|
||||||
res = json.loads(CFunction.execute(CPgSqlParam(sql, params=(startTime, endTime, DATA_TYPE["ACCOUNT"])))) |
|
||||||
if res: |
|
||||||
for item in res: |
|
||||||
result[item[0]]=item[1] |
|
||||||
return result |
|
||||||
|
|
||||||
#获取前一个周期数据 |
|
||||||
def get_pre_date(startTime,endTime): |
|
||||||
date_format = "%Y-%m-%d %H:%M:%S" |
|
||||||
start = datetime.strptime(startTime, date_format) |
|
||||||
end = datetime.strptime(endTime, date_format) |
|
||||||
start = start.strftime('%Y-%m-%d') |
|
||||||
end = end.strftime('%Y-%m-%d') |
|
||||||
old_start,old_end = adjust_times(start, end) |
|
||||||
return old_start,old_end |
|
||||||
|
|
||||||
#ip维度汇总数据计算 |
|
||||||
def ip_summary_calcule(startTime, endTime): |
|
||||||
logger.info("begin") |
|
||||||
old_start,old_end = get_pre_date(startTime,endTime) |
|
||||||
pre_data = get_ip_summary_data(startTime=old_start,endTime=old_end) |
|
||||||
logger.info("完成pre_data查询") |
|
||||||
res_data = [] |
|
||||||
data = get_ip_summary_data(startTime=startTime,endTime=endTime) |
|
||||||
ip_count_data = get_ip_count(startTime, endTime) |
|
||||||
total_ip_count = sum(ip_count_data.itervalues()) |
|
||||||
total_frequency = sum(data.itervalues()) |
|
||||||
|
|
||||||
for key, value in data.iteritems(): |
|
||||||
tmp={} |
|
||||||
tmp["company"]=key |
|
||||||
tmp["req_frequency"]=value |
|
||||||
tmp["frequency_rate"]=round(safe_divide(value,total_frequency),4) |
|
||||||
tmp["ip_rate"]=round(safe_divide(ip_count_data[key],total_ip_count),4) |
|
||||||
tmp["ip_count"]=ip_count_data[key] |
|
||||||
tmp["ip_avg"]=round(safe_divide(value,ip_count_data[key]),4) |
|
||||||
if key in pre_data: |
|
||||||
tmp["trend"]= round(safe_divide((value-pre_data[key]),pre_data[key]),4) |
|
||||||
else: |
|
||||||
tmp["trend"]=0 |
|
||||||
res_data.append(tmp) |
|
||||||
result = {"summary": {"ip": res_data}, "detail": {"ip": {}}} |
|
||||||
return result |
|
||||||
|
|
||||||
#account维度汇总数据计算 |
|
||||||
def account_summary_calcule(startTime, endTime): |
|
||||||
old_start,old_end = get_pre_date(startTime,endTime) |
|
||||||
pre_data = get_account_summary_data(startTime=old_start,endTime=old_end) |
|
||||||
|
|
||||||
res_data = [] |
|
||||||
data = get_account_summary_data(startTime=startTime,endTime=endTime) |
|
||||||
account_count_data = get_account_count(startTime, endTime) |
|
||||||
total_account_count = sum(account_count_data.itervalues()) |
|
||||||
total_frequency = sum(data.itervalues()) |
|
||||||
|
|
||||||
for key, value in data.iteritems(): |
|
||||||
tmp={} |
|
||||||
tmp["company"]=key |
|
||||||
tmp["req_frequency"]=value |
|
||||||
tmp["frequency_rate"]=round(safe_divide(value,total_frequency),4) |
|
||||||
tmp["account_rate"]=round(safe_divide(account_count_data[key],total_account_count),4) |
|
||||||
tmp["account_count"]=account_count_data[key] |
|
||||||
tmp["account_avg"]=round(safe_divide(value,account_count_data[key]),4) |
|
||||||
if key in pre_data: |
|
||||||
tmp["trend"]= round(safe_divide((value-pre_data[key]),pre_data[key]),4) |
|
||||||
else: |
|
||||||
tmp["trend"]=0 |
|
||||||
res_data.append(tmp) |
|
||||||
result = {"summary": {"account": res_data}, "detail": {"account": {}}} |
|
||||||
return result |
|
||||||
|
|
||||||
#接口维度汇总数据计算 |
|
||||||
def interface_summary_calcule(startTime, endTime): |
|
||||||
old_start,old_end = get_pre_date(startTime,endTime) |
|
||||||
pre_data = get_interface_summary_data(startTime=old_start,endTime=old_end) |
|
||||||
|
|
||||||
res_data = [] |
|
||||||
data = get_interface_summary_data(startTime=startTime,endTime=endTime) |
|
||||||
total_frequency = sum(data.itervalues()) |
|
||||||
for key, value in data.iteritems(): |
|
||||||
tmp={} |
|
||||||
tmp["interface_addr"]=key |
|
||||||
tmp["req_frequency"]=value |
|
||||||
tmp["frequency_rate"]=round(safe_divide(value,total_frequency),4) |
|
||||||
tmp["frequency_avg"]=round(safe_divide(value,20),4) |
|
||||||
if key in pre_data: |
|
||||||
tmp["trend"]= round(safe_divide((value-pre_data[key]),pre_data[key]),4) |
|
||||||
else: |
|
||||||
tmp["trend"]=0 |
|
||||||
res_data.append(tmp) |
|
||||||
result = {"summary": {"interface": res_data}, "detail": {"interface": {}}} |
|
||||||
return result |
|
||||||
|
|
||||||
#菜单维度汇总数据计算 |
|
||||||
def menu_summary_calcule(startTime, endTime): |
|
||||||
logger.info("begin") |
|
||||||
old_start,old_end = get_pre_date(startTime,endTime) |
|
||||||
pre_data = get_menu_summary_data(startTime=old_start,endTime=old_end) |
|
||||||
logger.info("完成pre_data查询") |
|
||||||
res_data = [] |
|
||||||
data = get_menu_summary_data(startTime=startTime,endTime=endTime) |
|
||||||
logger.info("完成data查询") |
|
||||||
total_frequency = sum(data.itervalues()) |
|
||||||
logger.info("完成合计计算") |
|
||||||
for key, value in data.iteritems(): |
|
||||||
tmp={} |
|
||||||
tmp["menu_name"]=key |
|
||||||
tmp["req_frequency"]=value |
|
||||||
tmp["frequency_rate"]=round(safe_divide(value,total_frequency),4) |
|
||||||
tmp["frequency_avg"]=round(safe_divide(value,len(data)),4) |
|
||||||
if key in pre_data: |
|
||||||
tmp["trend"]= round(safe_divide((value-pre_data[key]),pre_data[key]),4) |
|
||||||
else: |
|
||||||
tmp["trend"]=0 |
|
||||||
res_data.append(tmp) |
|
||||||
logger.info("完成数据处理") |
|
||||||
result = {"summary": {"menu": res_data}, "detail": {"menu": {}}} |
|
||||||
return result |
|
||||||
|
|
||||||
#入口 |
|
||||||
def summary_data_entry(startTime, endTime,data_type): |
|
||||||
data = {} |
|
||||||
if data_type == "1": |
|
||||||
data=ip_summary_calcule(startTime=startTime,endTime=endTime) |
|
||||||
if data_type == "2": |
|
||||||
data=account_summary_calcule(startTime=startTime,endTime=endTime) |
|
||||||
if data_type == "3": |
|
||||||
data=interface_summary_calcule(startTime=startTime,endTime=endTime) |
|
||||||
if data_type == "4": |
|
||||||
data=menu_summary_calcule(startTime=startTime,endTime=endTime) |
|
||||||
|
|
||||||
return data |
|
@ -1,178 +0,0 @@ |
|||||||
#!/usr/bin/python |
|
||||||
#encoding=utf-8 |
|
||||||
# author: tangwy |
|
||||||
import re,os,json |
|
||||||
import codecs,csv |
|
||||||
from db2json import DBUtils |
|
||||||
from datetime import datetime, timedelta |
|
||||||
from ext_logging import logger_cron,get_clean_file_path |
|
||||||
from file_helper import read_large_json_file |
|
||||||
from file_merge import entry as merge_entry |
|
||||||
from appsUtils.confutil import ConfUtil |
|
||||||
from dataInterface.functions import CFunction |
|
||||||
from dataInterface.db.params import CPgSqlParam |
|
||||||
|
|
||||||
date_pattern = re.compile(r'^\d{4}-\d{2}-\d{2}.json$') |
|
||||||
|
|
||||||
LOG_TABLE_NAME = "ueba_analysis_schema.logs2" |
|
||||||
|
|
||||||
DATA_TYPE = { |
|
||||||
"IP": 1, |
|
||||||
"ACCOUNT": 2, |
|
||||||
"INTERFACE": 3, |
|
||||||
"MENU": 4, |
|
||||||
} |
|
||||||
|
|
||||||
# 获取当前日期并格式化为"年-月" |
|
||||||
def get_current_year_month(): |
|
||||||
now = datetime.now() |
|
||||||
return now.strftime("%Y_%m") |
|
||||||
|
|
||||||
# 获取当前月份的第一天并格式化为"年-月-日" |
|
||||||
def get_first_day_of_current_month(): |
|
||||||
now = datetime.now() |
|
||||||
first_day = now.replace(day=1) |
|
||||||
return first_day.strftime("%Y-%m-%d") |
|
||||||
|
|
||||||
# 获取当前日期,然后计算下个月的第一天 |
|
||||||
def get_first_day_of_next_month(): |
|
||||||
now = datetime.now() |
|
||||||
if now.month == 12: |
|
||||||
next_month = now.replace(year=now.year+1, month=1, day=1) |
|
||||||
else: |
|
||||||
next_month = now.replace(month=now.month+1, day=1) |
|
||||||
return next_month.strftime("%Y-%m-%d") |
|
||||||
|
|
||||||
#获取表名 |
|
||||||
def get_table_name(): |
|
||||||
year_month = get_current_year_month() |
|
||||||
return LOG_TABLE_NAME+'_'+ year_month |
|
||||||
|
|
||||||
#获取表区间 |
|
||||||
def get_table_data_range(): |
|
||||||
start= get_first_day_of_current_month() |
|
||||||
end = get_first_day_of_next_month() |
|
||||||
return start,end |
|
||||||
|
|
||||||
|
|
||||||
def get_all_files(path): |
|
||||||
# 列出所有包含匹配模式的文件名 |
|
||||||
files = [] |
|
||||||
for filename in os.listdir(path): |
|
||||||
if date_pattern.search(filename): |
|
||||||
#由于定时任务是凌晨3点执行 所以只处理昨天的数据,今天的不处理 |
|
||||||
if datetime.now().strftime("%Y-%m-%d")+".json" != filename: |
|
||||||
files.append({"filename": filename, "path": os.path.join(path,filename)}) |
|
||||||
return files |
|
||||||
|
|
||||||
def json_to_csvFile(json_data, csv_file): |
|
||||||
# 提取字段名 |
|
||||||
fields = json_data[0].keys() # 假设第一个元素包含所有可能的键 |
|
||||||
with open(csv_file, 'wb') as csvfile: # 注意这里使用 'wb' 模式 |
|
||||||
writer = csv.DictWriter(csvfile, fieldnames=fields) |
|
||||||
writer.writeheader() |
|
||||||
for row in json_data: |
|
||||||
row = {k: v.encode('utf-8') if isinstance(v, unicode) else v for k, v in row.items()} |
|
||||||
writer.writerow(row) |
|
||||||
def csv_to_pg(sql): |
|
||||||
logger_cron.info("INSERT: 准备数据入库") |
|
||||||
confutil = ConfUtil() |
|
||||||
cur_pg_conf = confutil.getPostgresqlConf() |
|
||||||
cmd = """psql {} -U {} -w -c \"{}\"""".format(cur_pg_conf["database"],cur_pg_conf["username"],sql) |
|
||||||
logger_cron.info("INSERT: "+ cmd) |
|
||||||
rtn = os.popen(cmd) |
|
||||||
cmd_rtn = rtn.readlines() |
|
||||||
logger_cron.info("INSERT: "+ json.dumps(cmd_rtn)) |
|
||||||
logger_cron.info("INSERT: 数据入库完成") |
|
||||||
|
|
||||||
#数据入库 |
|
||||||
def insert_data(files): |
|
||||||
for itemFile in files: |
|
||||||
if os.path.exists(itemFile.get("path",'')): |
|
||||||
data =read_large_json_file(itemFile.get("path",'')) |
|
||||||
logger_cron.info("INSERT: 准备读取聚合文件:"+itemFile.get('path','')) |
|
||||||
logger_cron.info("INSERT: 读取聚合文件完成") |
|
||||||
ip_list = data.get('ip', []) |
|
||||||
account_list = data.get('account', []) |
|
||||||
interface_list = data.get('interface', []) |
|
||||||
menu_list = data.get('menu', []) |
|
||||||
|
|
||||||
logger_cron.info("INSERT: IP维度 " +str(len(ip_list))) |
|
||||||
logger_cron.info("INSERT: ACCOUNT维度 " +str(len(account_list))) |
|
||||||
logger_cron.info("INSERT: INTERFACE维度 " +str(len(interface_list))) |
|
||||||
logger_cron.info("INSERT: MENU维度 " +str(len(menu_list))) |
|
||||||
|
|
||||||
basename, extension = os.path.splitext(itemFile.get('filename', '')) |
|
||||||
log_date = basename |
|
||||||
# print ("filename:"+log_date) |
|
||||||
records = [] |
|
||||||
for item in ip_list: |
|
||||||
menu = item.get('menu', '') |
|
||||||
ip = item.get('ip', '0.0.0.0') |
|
||||||
account = item.get('account', '') |
|
||||||
jobnum = item.get('jobnum', '') |
|
||||||
count = item.get('count', 0) |
|
||||||
logdate = log_date |
|
||||||
datatype = DATA_TYPE.get("IP",1) |
|
||||||
interface = item.get('interface', '') |
|
||||||
keys= json.dumps([ip,jobnum]) |
|
||||||
records.append({"menu":menu, "ip":ip, "account":account, "jobnum":jobnum, "count":count, "logdate":logdate,"data_type":datatype,"interface":interface,"keys":keys}) |
|
||||||
for item in account_list: |
|
||||||
menu = item.get('menu', '') |
|
||||||
ip = item.get('ip', '0.0.0.0') |
|
||||||
account = item.get('account', '') |
|
||||||
jobnum = item.get('jobnum', '') |
|
||||||
count = item.get('count', 0) |
|
||||||
logdate = log_date |
|
||||||
datatype = DATA_TYPE.get("ACCOUNT",2) |
|
||||||
interface = item.get('interface', '') |
|
||||||
keys= json.dumps([account,jobnum]) |
|
||||||
records.append({"menu":menu, "ip":ip, "account":account, "jobnum":jobnum, "count":count, "logdate":logdate,"data_type":datatype,"interface":interface,"keys":keys}) |
|
||||||
for item in interface_list: |
|
||||||
menu = item.get('menu', '') |
|
||||||
ip = item.get('ip', '0.0.0.0') |
|
||||||
account = item.get('account', '') |
|
||||||
jobnum = item.get('jobnum', '') |
|
||||||
count = item.get('count', 0) |
|
||||||
logdate = log_date |
|
||||||
datatype = DATA_TYPE.get("INTERFACE",3) |
|
||||||
interface = item.get('interface', '') |
|
||||||
keys= json.dumps([interface,ip,account,jobnum]) |
|
||||||
records.append({"menu":menu, "ip":ip, "account":account, "jobnum":jobnum, "count":count, "logdate":logdate,"data_type":datatype,"interface":interface,"keys":keys}) |
|
||||||
for item in menu_list: |
|
||||||
menu = item.get('menu', '') |
|
||||||
ip = item.get('ip', '0.0.0.0') |
|
||||||
account = item.get('account', '') |
|
||||||
jobnum = item.get('jobnum', '') |
|
||||||
count = item.get('count', 0) |
|
||||||
logdate = log_date |
|
||||||
datatype = DATA_TYPE.get("MENU",4) |
|
||||||
interface = item.get('interface', '') |
|
||||||
keys= json.dumps([menu,ip,account,jobnum]) |
|
||||||
records.append({"menu":menu, "ip":ip, "account":account, "jobnum":jobnum, "count":count, "logdate":logdate,"data_type":datatype,"interface":interface,"keys":keys}) |
|
||||||
|
|
||||||
csv_file = get_clean_file_path()+"/"+log_date+".csv" |
|
||||||
logger_cron.info("INSERT: 开始写csv文件") |
|
||||||
json_to_csvFile(records,csv_file) |
|
||||||
sql = "\copy ueba_analysis_schema.logs2(count,account,logdate,data_type,ip,interface,menu,jobnum,keys) from '{}' with csv header DELIMITER ',';".format(csv_file) |
|
||||||
csv_to_pg(sql) |
|
||||||
|
|
||||||
#重命名文件 |
|
||||||
logger_cron.info(itemFile.get('path','')) |
|
||||||
logger_cron.info("done_"+itemFile.get('filename', '')) |
|
||||||
os.rename(itemFile.get('path',''),get_clean_file_path()+"/done_"+itemFile.get('filename', '')) |
|
||||||
logger_cron.info("INSERT: 重命名文件完成,"+itemFile.get('filename', '')) |
|
||||||
|
|
||||||
logger_cron.info("done_"+itemFile.get('filename', '')) |
|
||||||
os.rename(csv_file,get_clean_file_path()+"/done_"+log_date+".csv") |
|
||||||
logger_cron.info("INSERT: csv重命名文件完成") |
|
||||||
|
|
||||||
def entry(): |
|
||||||
# 合并文件 |
|
||||||
base_path = get_clean_file_path() |
|
||||||
files = get_all_files(base_path) |
|
||||||
logger_cron.info("INSERT:获取文件数量"+str(len(files))) |
|
||||||
#数据入库 |
|
||||||
insert_data(files) |
|
||||||
|
|
||||||
entry() |
|
Loading…
Reference in new issue