123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573 |
- # -*- codeing = utf-8 -*-
- # @Time : 2022/7/13 9:51
- # @Author : Clown
- # @File : test_FlowByHour_All.py
- # @Software : PyCharm
- # -*- codeing = utf-8 -*-
- # @Time : 2022/7/5 17:45
- # @Author : Clown
- # @File : test_FlowByHour_M.py
- # @Software : PyCharm
- import pymysql
- import json
- import requests
- import csv
- import os
- import time
- from datetime import datetime,timedelta
- import sys
- import traceback
- from all_key_table import update_key_value_pair
- import schedule
- def linkTomySql(host,passwd,db_name):
- '''连接至数据库返回【db】,v2新增local_infile=1 打开文件导入权限'''
- try:
- #本地连接为:localhost 服务器连接为:124.222.188.59
- db = pymysql.connect(
- host=host, user="root",
- passwd=passwd,
- db=db_name,
- charset='utf8mb4',
- local_infile=1,
- port=63306)
- print('\nconnect to mysql server 成功')
- print('---------------------------------------')
- except:
- print("\ncould not connect to mysql server")
- db = "连接失败"
- return db
- def read_key_value_pair(db,brand_name,wm_plate,owner):
- '''按条件读取,数据库中all_key_table表里的key_value_pair字段中的值,以键值对的形式输出
- db:数据库,
- brand_name:品牌名,
- wm_plate:外卖平台MEITUAN或ELEME,
- owner:账号权限all或one
- '''
- cursor = db.cursor()
- sql = f'SELECT key_value_pair FROM all_key_table WHERE brand_name = "{brand_name}" AND wm_plate = "{wm_plate}" AND owner = "{owner}";'
- cursor.execute(sql)
- pair = json.loads(cursor.fetchall()[0][0])
- return pair
- def loadDataInfile(file_path ,csv_name,db,table_name):
- '''v2新增 执行csv文件导入数据库,注意ENCLOSED BY '"' 防止误判'''
- cursor = db.cursor()
- sql0 = f"SET global local_infile = 1;"
- cursor.execute(sql0)
- db.commit()
- sql1 = f'''LOAD DATA LOCAL INFILE '{file_path+'/'+csv_name}' REPLACE INTO TABLE {table_name} FIELDS TERMINATED BY ',' ENCLOSED BY '"' LINES TERMINATED BY '\r\n' IGNORE 1 LINES ;'''
- cursor.execute(sql1)
- db.commit()
- sql2= f"SET global local_infile = 0;"
- cursor.execute(sql2)
- db.commit()
- cursor.close()
- def update_scriptrunninglogs(log_date,log_time,running_Time,operation_results,table_name,db):
- '''将脚本执行情况发送至【脚本执行日志】表
- 配套内容
- from datetime import datetime
- import sys
- import traceback
- time_begin = time.time()
- log_date = datetime.now().strftime("%Y-%m-%d") # 日志记录日期
- log_time = datetime.now().strftime('%H:%M:%S') # 日志记录时间
- df_name = '数据库表名'
- try:
- operation_results = '执行成功'
- except Exception as e:
- operation_results = traceback.format_exc()
- running_Time = time.time() - time_begin # 脚本运行时长
- '''
- try:
- sql = "INSERT INTO scriptrunninglogs_test VALUES (%s,%s,%s,%s,%s,%s,%s)"#date, time, running_Time, script_name, operation_results, table_name, id
- script_name = os.path.basename(sys.argv[0])
- value = (log_date,log_time,running_Time,script_name,operation_results,table_name,0)
- cursor = db.cursor()
- cursor.execute(sql, value) # 执行sql语句
- db.commit()
- cursor.close()
- except:
- result = "数据库连接失败"
- print(result)
- def reset_mysql_table(db,table_name):
- cursor = db.cursor()
- sql = f'DELETE FROM {table_name} WHERE log_date = CURRENT_DATE() '
- cursor.execute(sql)
- db.commit()
- print(f'【{table_name}】当日数据重置成功,可导入新数据')
- def get_shops_info_to_list(db,brand_name,wm_plate,key_name):
- '''获取门店信息表【shops_info_to_list】中的信息,
- 并返回表单shops_info_df【shop_id,shop_name,update_datetime,info_for_script】
- db:数据库信息
- brand_name:品牌
- wm_plate:外卖平台
- key_name:关键信息字段名,如无填‘’,如有填对应键值对的key
- '''
- cursor = db.cursor()
- if key_name == '':
- sql = f'SELECT shop_id,shop_name,update_datetime FROM shops_info_for_script WHERE brand_name = "{brand_name}" AND wm_plate = "{wm_plate}";'
- cursor.execute(sql)
- shops_info = cursor.fetchall()
- shops_info_df = []
- for shop_info in shops_info:
- shop_info_dict = {'shop_id':shop_info[0],
- 'shop_name':shop_info[1],
- 'update_datetime':shop_info[2],
- 'info_for_script':''}
- shops_info_df.append(shop_info_dict)
- return shops_info_df
- else:
- sql = f'SELECT shop_id,shop_name,update_datetime,info_for_script -> "$.{key_name}" FROM shops_info_for_script WHERE brand_name = "{brand_name}" AND wm_plate = "{wm_plate}";'
- cursor.execute(sql)
- shops_info = cursor.fetchall()
- shops_info_df = []
- for shop_info in shops_info:
- shop_info_dict = {'shop_id':shop_info[0],
- 'shop_name':shop_info[1],
- 'update_datetime':shop_info[2],
- f'{key_name}':shop_info[3]}
- shops_info_df.append(shop_info_dict)
- return shops_info_df
- def test_FlowByHour_M(file_path,brand_name,mt_pair,shops_info_df):
- '''
- return: error or (csv_name_storage, csv_name_application)
- v2优化:增加批量错误判断,方便终止此函数
- '''
- #创建储存表
- csv_name_storage = 'test_FlowByHour_M_storage.csv'
- title_storage = ['script_name', 'brand_name', 'shop_id', 'wm_plate', 'date', 'time', 'data_details', 'id']
- f_storage = open(file_path+'/'+csv_name_storage, mode='w', encoding='utf-8-sig',newline="")
- writer_storage = csv.writer(f_storage)
- writer_storage.writerow(title_storage)
- #创建生产表
- csv_name_application = 'test_FlowByHour_M_application.csv'
- title_application = ['id', 'log_date', 'date_of_data', 'brand_name', 'shop_id', 'shop_name', 'timeSign',
- 'today_exp_pv', 'today_exp_uv', 'today_clk_pv', 'today_clk_uv', 'today_valid_order_cnt', 'today_valid_order_user_cnt', 'today_clk_exp_rate', 'today_ord_clk_rate',
- 'last_day_exp_pv', 'last_day_exp_uv', 'last_day_clk_pv', 'last_day_clk_uv', 'last_day_valid_order_cnt', 'last_day_valid_order_user_cnt', 'last_day_clk_exp_rate', 'last_day_ord_clk_rate',
- 'totalExposureAmount', 'uvCount', 'totalCost', 'averageCost']
- #['id', '记录日期', '数据所在日期', '门店id', '店名', '时段', '曝光次数', '点击率%', '点击次数', '单次点击花费', '时段竞价花费']
- f_application = open(file_path + '/' + csv_name_application, mode='w', encoding='utf-8-sig', newline="")
- writer_application = csv.writer(f_application)
- writer_application.writerow(title_application)
- #通用参数
- script_name = 'test_FlowByHour_M'
- wm_plate = 'MEITUAN'
- date = datetime.now().strftime('%Y-%m-%d')
- time = datetime.now().strftime('%H:%M:%S')
- beginTime = (datetime.today()).strftime('%Y-%m-%d') #2022-06-23
- endTime = (datetime.today() + timedelta(days=-1)).strftime('%Y-%m-%d') #2022-06-23
- timeSign = datetime.now().strftime('%H:%M') #20220706优化
- # shops_cnt = len(shops_info_df) # v2优化
- shops_cnt = 1
- check_error_cnt = 0 # v2优化
- acctId = mt_pair['data']['session']['acctId']
- token = mt_pair['data']['session']['token']
- #门店信息
- # for shop_info in shops_info_df:
- # shop_id = shop_info['shop_id']
- # shop_name = shop_info['shop_name']
- # info_update_datetime = shop_info['update_datetime']
- shop_id = 8038750
- shop_name = '浆小白·粉面粥·豆浆夜市(周浦万达店)'
- info_update_datetime = datetime.today()
- #联网获取信息
- url_flow = f'https://waimaieapp.meituan.com/bizdata/flow/single/pc/overview?acctId={acctId}&wmPoiId={shop_id}&durationType=5&tabType=3&circleType=1'
- cookie = f'acctId={acctId}; ' \
- f'token={token}; ' \
- f'wmPoiId={shop_id}; '
- headers_flow = {
- 'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/90.0.4430.85 Safari/537.36',
- 'cookie': cookie}
- url_wager = f'https://waimaieapp.meituan.com/ad/v3/statistics/cpc/today/info?acctId={acctId}&wmPoiId={shop_id}&token={token}&platform=0&bizad_cityId=310100&bizad_second_city_id=310100&bizad_third_city_id=310115'
- headers_wager = {
- 'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/90.0.4430.85 Safari/537.36'}
- if int(info_update_datetime.strftime("%Y%m%d")) >= int(datetime.strptime(beginTime,"%Y-%m-%d").strftime("%Y%m%d")):
- try:
- data_details_flow = requests.get(url_flow, headers= headers_flow).json()
- data_details_wager = requests.get(url_wager, headers= headers_wager).json()
- data_details = {"data_flow":data_details_flow,
- "data_wager":data_details_wager}
- #储存信息至csv存储表
- row_storage = [script_name, brand_name, shop_id, wm_plate, date, time, json.dumps(data_details, ensure_ascii= False), 0]
- #解析数据存储至csv生产表
- try:
- data_all_flow = data_details_flow['data']
- today_exp_pv = data_all_flow['exposureCnt']['base'] #曝光次数
- today_exp_uv = data_all_flow['exposureNum']['base'] #曝光人数
- today_clk_pv = data_all_flow['visitCnt']['base'] #进店次数
- today_clk_uv = data_all_flow['visitNum']['base'] #进店人数
- today_valid_order_cnt = data_all_flow['orderCnt']['base'] #下单次数
- today_valid_order_user_cnt = data_all_flow['orderNum']['base'] #下单人数
- today_clk_exp_rate = data_all_flow['visitRate']['base'] #进店转化率
- today_ord_clk_rate = data_all_flow['orderRate']['base'] #下单转化率
- last_day_exp_pv = today_exp_pv - data_all_flow['exposureCnt']['baseDelta'] # 曝光次数
- last_day_exp_uv = today_exp_uv - data_all_flow['exposureNum']['baseDelta'] # 曝光人数
- last_day_clk_pv = today_clk_pv - data_all_flow['visitCnt']['baseDelta'] # 进店次数
- last_day_clk_uv = today_clk_uv - data_all_flow['visitNum']['baseDelta'] # 进店人数
- last_day_valid_order_cnt = today_valid_order_cnt - data_all_flow['orderCnt']['baseDelta'] # 下单次数
- last_day_valid_order_user_cnt = today_valid_order_user_cnt - data_all_flow['orderNum']['baseDelta'] # 下单人数
- last_day_clk_exp_rate = today_clk_exp_rate - data_all_flow['visitRate']['baseDelta'] # 进店转化率
- last_day_ord_clk_rate = today_ord_clk_rate - data_all_flow['orderRate']['baseDelta'] # 下单转化率
- data_all_wager = data_details_wager['data']
- totalExposureAmount = data_all_wager['showCount'] #累计曝光次数
- uvCount = data_all_wager['clickCount'] #竞价进店次数
- totalCost = data_all_wager['cost'] #累计竞价花费
- averageCost = data_all_wager['avgPrice'] #单次点击花费
- # 解析数据存储至csv生产表
- row_application = [0,date, beginTime, brand_name, shop_id, shop_name, timeSign,
- today_exp_pv, today_exp_uv, today_clk_pv, today_clk_uv, today_valid_order_cnt, today_valid_order_user_cnt, today_clk_exp_rate, today_ord_clk_rate,
- last_day_exp_pv, last_day_exp_uv, last_day_clk_pv, last_day_clk_uv, last_day_valid_order_cnt, last_day_valid_order_user_cnt, last_day_clk_exp_rate, last_day_ord_clk_rate,
- totalExposureAmount, uvCount, totalCost, averageCost]
- writer_application.writerow(row_application)
- except:
- # 解析数据存储至csv生产表
- row_application = [0, date, beginTime, brand_name, shop_id, shop_name, timeSign, -1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0] #20220706优化
- writer_application.writerow(row_application)
- except:
- check_error_cnt += 1 # v2优化
- data_details = {'result': '此门店未打开营业or账户号已无效or相关界面无参数,请核实!'}
- # 储存信息至csv存储表
- row_storage = [script_name, brand_name, shop_id, wm_plate, date, time, json.dumps(data_details, ensure_ascii=False), 0]
- # 解析数据存储至csv生产表
- row_application = [0, date, beginTime, brand_name, shop_id, shop_name, timeSign, -1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0] #20220706优化
- writer_application.writerow(row_application)
- else:
- check_error_cnt += 1 # v2优化
- data_details = {'result':'此门店未打开营业or账户号已无效or相关界面无参数,请核实!'}
- # 储存信息至csv存储表
- row_storage = [script_name, brand_name, shop_id, wm_plate, date, time,json.dumps(data_details, ensure_ascii=False), 0]
- # 解析数据存储至csv生产表
- row_application = [0, date, beginTime, brand_name, shop_id, shop_name, timeSign, -1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0] #20220706优化
- writer_application.writerow(row_application)
- writer_storage.writerow(row_storage)
- if shops_cnt == check_error_cnt:#v2优化
- e = 0/0 #v2优化
- else: #v2优化
- print(f'{brand_name}\n【{csv_name_storage}】\n【{csv_name_application}】\n加载成功!\n')
- return csv_name_storage,csv_name_application
- def running_m():
- # 日志参数
- time_begin = time.time()
- log_date = datetime.now().strftime("%Y-%m-%d") # 日志记录日期
- log_time = datetime.now().strftime('%H:%M:%S') # 日志记录时间
- # 通用参数
- host = 'localhost'
- passwd = '111???clown'
- db_name = 'hexingxing'
- # brand_names = ['粥小鲜','浆小白']
- wm_plate = 'MEITUAN'
- owner = 'all'
- key_name = ''
- file_path = 'C:/Users/ClownHe/Desktop/goods'
- table_name = '[test_flowbyhour_m] Storage And App'
- table_name_storage = 'data_storage_test_flowbyhour_m'
- table_name_app = 'data_app_test_flowbyhour_m'
- operation_results = '执行成功'
- brand_name = '浆小白'
- # 脚本执行
- db = linkTomySql(host, passwd, db_name)
- # for brand_name in brand_names:
- mt_pair = read_key_value_pair(db, brand_name, wm_plate, owner)
- # shops_info_df = get_shops_info_to_list(db,brand_name,wm_plate,key_name)
- shops_info_df = 0
- try:
- try:
- csv_name_storage, csv_name_application = test_FlowByHour_M(file_path, brand_name, mt_pair,
- shops_info_df)
- except Exception as x:
- print('重载关键表',x)
- update_key_value_pair(db, brand_name, wm_plate, owner, '') # 报错更新【all_key_table】
- csv_name_storage, csv_name_application = test_FlowByHour_M(file_path, brand_name, mt_pair,
- shops_info_df)
- operation_results = operation_results + f',更新【{brand_name}】【{wm_plate}】【all_key_table】'
- loadDataInfile(file_path, csv_name_storage, db, table_name_storage)
- print(f'【{csv_name_storage}】导入成功')
- loadDataInfile(file_path, csv_name_application, db, table_name_app)
- print(f'【{csv_name_application}】导入成功')
- print(f'本次脚本执行总用时{(time.time() - time_begin) / 60} min')
- except Exception as e:
- operation_results = traceback.format_exc()
- running_Time = time.time() - time_begin # 脚本运行时长
- db = linkTomySql(host, passwd, db_name)
- update_scriptrunninglogs(log_date, log_time, running_Time, operation_results, table_name, db)
- print('Working done!')
- time_kill = time.time() - time_begin
- return time_kill
- def test_FlowByHour_E(file_path,brand_name,elm_pair,shops_info_df):
- '''
- return: error or (csv_name_storage, csv_name_application)
- v2优化:增加批量错误判断,方便终止此函数
- '''
- #创建储存表
- csv_name_storage = 'test_FlowByHour_E_storage.csv'
- title_storage = ['script_name', 'brand_name', 'shop_id', 'wm_plate', 'date', 'time', 'data_details', 'id']
- f_storage = open(file_path+'/'+csv_name_storage, mode='w', encoding='utf-8-sig',newline="")
- writer_storage = csv.writer(f_storage)
- writer_storage.writerow(title_storage)
- #创建生产表
- csv_name_application = 'test_FlowByHour_E_application.csv'
- title_application = ['id', 'log_date', 'date_of_data', 'brand_name', 'shop_id', 'shop_name', 'timeSign',
- 'today_exp_pv', 'today_exp_uv', 'today_clk_pv', 'today_clk_uv', 'today_valid_order_cnt', 'today_valid_order_user_cnt', 'today_clk_exp_rate', 'today_ord_clk_rate',
- 'last_day_exp_pv', 'last_day_exp_uv', 'last_day_clk_pv', 'last_day_clk_uv', 'last_day_valid_order_cnt', 'last_day_valid_order_user_cnt', 'last_day_clk_exp_rate', 'last_day_ord_clk_rate',
- 'totalExposureAmount', 'uvCount', 'totalCost', 'averageCost']
- #['id', '记录日期', '数据所在日期', '门店id', '店名', '时段', '曝光次数', '点击率%', '点击次数', '单次点击花费', '时段竞价花费']
- f_application = open(file_path + '/' + csv_name_application, mode='w', encoding='utf-8-sig', newline="")
- writer_application = csv.writer(f_application)
- writer_application.writerow(title_application)
- #通用参数
- script_name = 'test_FlowByHour_E'
- wm_plate = 'ELEME'
- date = datetime.now().strftime('%Y-%m-%d')
- time = datetime.now().strftime('%H:%M:%S')
- beginTime = (datetime.today()).strftime('%Y-%m-%d') #2022-06-23
- endTime = (datetime.today() + timedelta(days=-1)).strftime('%Y-%m-%d') #2022-06-23
- timeSign = datetime.now().strftime('%H:%M') #20220706优化
- # shops_cnt = len(shops_info_df) # v2优化
- shops_cnt = 1
- check_error_cnt = 0 # v2优化
- ksid = elm_pair['data']['session']['ksid']
- url_flow = 'https://httpizza.ele.me/napos-miracle-stats/SingleFlowService.realtimeFlowSummary'
- url_wager = 'https://app-api.shop.ele.me/alchemy/invoke/?method=wager.getUVSummary'
- headers = {
- 'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/90.0.4430.85 Safari/537.36',
- 'content-type': 'application/json'}
- #门店信息
- # for shop_info in shops_info_df:
- # shop_id = shop_info['shop_id']
- # shop_name = shop_info['shop_name']
- # info_update_datetime = shop_info['update_datetime']
- shop_id = 173858169
- shop_name = '浆小白·粉面粥·豆浆夜市(周浦万达店)'
- info_update_datetime = datetime.today()
- #联网获取信息
- json_params_flow = {"ksid":ksid,"requestParam":{"shopId":shop_id,"contrastType":"yesterday"}}
- json_params_wager = {"id":"3FC49D97DC28490990FC243CE038B9CA|1657016560816",
- "metas":{"appName":"bs-river-bid",
- "appVersion":"4.4.0",
- "ksid":ksid},
- "service":"wager",
- "method":"getUVSummary",
- "params":{"restaurantId":shop_id,
- "type":None,
- "beginTime":f"{datetime.today().strftime('%Y-%m-%d')}",
- "endTime":f"{datetime.today().strftime('%Y-%m-%d')}"},
- "ncp":"2.0.0"}
- if int(info_update_datetime.strftime("%Y%m%d")) >= int(datetime.strptime(beginTime,"%Y-%m-%d").strftime("%Y%m%d")):
- try:
- data_details_flow = requests.post(url_flow, json= json_params_flow, headers= headers).json()
- data_details_wager = requests.post(url_wager, json= json_params_wager, headers= headers).json()
- data_details = {"data_flow":data_details_flow,
- "data_wager":data_details_wager}
- #储存信息至csv存储表
- row_storage = [script_name, brand_name, shop_id, wm_plate, date, time, json.dumps(data_details, ensure_ascii= False), 0]
- #解析数据存储至csv生产表
- try:
- data_all_flow = data_details_flow['data']['result'][0]
- today_exp_pv = data_all_flow['today_exp_pv'] #曝光次数
- today_exp_uv = data_all_flow['today_exp_uv'] #曝光人数
- today_clk_pv = data_all_flow['today_clk_pv'] #进店次数
- today_clk_uv = data_all_flow['today_clk_uv'] #进店人数
- today_valid_order_cnt = data_all_flow['today_valid_order_cnt'] #下单次数
- today_valid_order_user_cnt = data_all_flow['today_valid_order_user_cnt'] #下单人数
- today_clk_exp_rate = data_all_flow['today_clk_exp_rate'] #进店转化率
- today_ord_clk_rate = data_all_flow['today_ord_clk_rate'] #下单转化率
- last_day_exp_pv = data_all_flow['last_day_exp_pv'] # 曝光次数
- last_day_exp_uv = data_all_flow['last_day_exp_uv'] # 曝光人数
- last_day_clk_pv = data_all_flow['last_day_clk_pv'] # 进店次数
- last_day_clk_uv = data_all_flow['last_day_clk_uv'] # 进店人数
- last_day_valid_order_cnt = data_all_flow['last_day_valid_order_cnt'] # 下单次数
- last_day_valid_order_user_cnt = data_all_flow['last_day_valid_order_user_cnt'] # 下单人数
- last_day_clk_exp_rate = data_all_flow['last_day_clk_exp_rate'] # 进店转化率
- last_day_ord_clk_rate = data_all_flow['last_day_ord_clk_rate'] # 下单转化率
- data_all_wager = data_details_wager['result']
- totalExposureAmount = data_all_wager['totalExposureAmount'] #累计曝光次数
- uvCount = data_all_wager['uvCount'] #竞价进店次数
- totalCost = data_all_wager['totalCost'] #累计竞价花费
- averageCost = data_all_wager['averageCost'] #单次点击花费
- # 解析数据存储至csv生产表
- row_application = [0,date, beginTime, brand_name, shop_id, shop_name, timeSign,
- today_exp_pv, today_exp_uv, today_clk_pv, today_clk_uv, today_valid_order_cnt, today_valid_order_user_cnt, today_clk_exp_rate, today_ord_clk_rate,
- last_day_exp_pv, last_day_exp_uv, last_day_clk_pv, last_day_clk_uv, last_day_valid_order_cnt, last_day_valid_order_user_cnt, last_day_clk_exp_rate, last_day_ord_clk_rate,
- totalExposureAmount, uvCount, totalCost, averageCost]
- writer_application.writerow(row_application)
- except:
- # 解析数据存储至csv生产表
- row_application = [0, date, beginTime, brand_name, shop_id, shop_name, timeSign, -1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0] #20220706优化
- writer_application.writerow(row_application)
- except:
- check_error_cnt += 1 # v2优化
- data_details = {'result': '此门店未打开营业or账户号已无效or相关界面无参数,请核实!'}
- # 储存信息至csv存储表
- row_storage = [script_name, brand_name, shop_id, wm_plate, date, time, json.dumps(data_details, ensure_ascii=False), 0]
- # 解析数据存储至csv生产表
- row_application = [0, date, beginTime, brand_name, shop_id, shop_name, timeSign, -1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0] #20220706优化
- writer_application.writerow(row_application)
- else:
- check_error_cnt += 1 # v2优化
- data_details = {'result':'此门店未打开营业or账户号已无效or相关界面无参数,请核实!'}
- # 储存信息至csv存储表
- row_storage = [script_name, brand_name, shop_id, wm_plate, date, time,json.dumps(data_details, ensure_ascii=False), 0]
- # 解析数据存储至csv生产表
- row_application = [0, date, beginTime, brand_name, shop_id, shop_name, timeSign, -1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0] #20220706优化
- writer_application.writerow(row_application)
- writer_storage.writerow(row_storage)
- if shops_cnt == check_error_cnt:#v2优化
- e = 0/0 #v2优化
- else: #v2优化
- print(f'{brand_name}\n【{csv_name_storage}】\n【{csv_name_application}】\n加载成功!\n')
- return csv_name_storage,csv_name_application
- def running_e():
- # 日志参数
- time_begin = time.time()
- log_date = datetime.now().strftime("%Y-%m-%d") # 日志记录日期
- log_time = datetime.now().strftime('%H:%M:%S') # 日志记录时间
- # 通用参数
- host = 'localhost'
- passwd = '111???clown'
- db_name = 'hexingxing'
- # brand_names = ['粥小鲜','浆小白']
- wm_plate = 'ELEME'
- owner = 'all'
- key_name = ''
- file_path = 'C:/Users/ClownHe/Desktop/goods'
- table_name = '[test_flowbyhour_e] Storage And App'
- table_name_storage = 'data_storage_test_flowbyhour_e'
- table_name_app = 'data_app_test_flowbyhour_e'
- operation_results = '执行成功'
- brand_name = '浆小白'
- # 脚本执行
- db = linkTomySql(host, passwd, db_name)
- # for brand_name in brand_names:
- elm_pair = read_key_value_pair(db, brand_name, wm_plate, owner)
- # shops_info_df = get_shops_info_to_list(db,brand_name,wm_plate,key_name)
- shops_info_df = 0
- try:
- try:
- csv_name_storage, csv_name_application = test_FlowByHour_E(file_path, brand_name, elm_pair,
- shops_info_df)
- except Exception as x:
- print('重载关键表',x)
- update_key_value_pair(db, brand_name, wm_plate, owner, '') # 报错更新【all_key_table】
- csv_name_storage, csv_name_application = test_FlowByHour_E(file_path, brand_name, elm_pair,
- shops_info_df)
- operation_results = operation_results + f',更新【{brand_name}】【{wm_plate}】【all_key_table】'
- loadDataInfile(file_path, csv_name_storage, db, table_name_storage)
- print(f'【{csv_name_storage}】导入成功')
- loadDataInfile(file_path, csv_name_application, db, table_name_app)
- print(f'【{csv_name_application}】导入成功')
- print(f'本次脚本执行总用时{(time.time() - time_begin) / 60} min')
- except Exception as e:
- operation_results = traceback.format_exc()
- running_Time = time.time() - time_begin # 脚本运行时长
- db = linkTomySql(host, passwd, db_name)
- update_scriptrunninglogs(log_date, log_time, running_Time, operation_results, table_name, db)
- print('Working done!')
- time_kill = time.time() - time_begin
- return time_kill
- if __name__ == '__main__':
- # schedule.every().hour.at("00:10").do(running) # 每个小时的n分n秒执行一次
- # schedule.every().hour.at("05:10").do(running) # 每个小时的n分n秒执行一次
- # schedule.every().hour.at("10:10").do(running) # 每个小时的n分n秒执行一次
- # schedule.every().hour.at("15:10").do(running) # 每个小时的n分n秒执行一次
- # schedule.every().hour.at("20:10").do(running) # 每个小时的n分n秒执行一次
- # schedule.every().hour.at("25:10").do(running) # 每个小时的n分n秒执行一次
- # schedule.every().hour.at("30:10").do(running) # 每个小时的n分n秒执行一次
- # schedule.every().hour.at("35:10").do(running) # 每个小时的n分n秒执行一次
- # schedule.every().hour.at("40:10").do(running) # 每个小时的n分n秒执行一次
- # schedule.every().hour.at("45:10").do(running) # 每个小时的n分n秒执行一次
- # schedule.every().hour.at("50:10").do(running) # 每个小时的n分n秒执行一次
- # schedule.every().hour.at("57:10").do(running) # 每个小时的n分n秒执行一次
- # schedule.every().minute.at(":05").do(running) # 每个小时的n分n秒执行一次
- # schedule.every().minute.at(":15").do(running) # 每个小时的n分n秒执行一次
- # schedule.every().minute.at(":25").do(running) # 每个小时的n分n秒执行一次
- # schedule.every().minute.at(":35").do(running) # 每个小时的n分n秒执行一次
- # schedule.every().minute.at(":45").do(running) # 每个小时的n分n秒执行一次
- schedule.every().minute.at(":10").do(running_m) # 每个小时的n分n秒执行一次
- schedule.every().minute.at(":15").do(running_e)
- while True:
- schedule.run_pending() # 运行所有可运行的任务
|