-
Notifications
You must be signed in to change notification settings - Fork 1
/
conversion-stats.py
83 lines (67 loc) · 3.42 KB
/
conversion-stats.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
import logging
import os
from datetime import datetime, timedelta
from time import sleep
from decouple import config
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from core.models import Base, ConversionPixels, ConversionPixelsMetaNames
from core.util import DBMQuery, clean_date_value
CWD = os.path.dirname(os.path.abspath(__file__))
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
# 1. run query with data from previous day
daterange = datetime.today() - timedelta(days=1)
dbm = DBMQuery(os.path.join(CWD, config('API_KEY_FILE')))
query_id = config('QUERY_CONVERSION_STATS')
logger.info("Running query {} with data from {}...".format(query_id, daterange))
dbm.run_query(query_id, 'CUSTOM_DATES', start_date=daterange, end_date=daterange, timezone="Europe/Warsaw")
# -----------------------------------------------------------------------------------
# 2. Fetch report from DBM API
# check if query is still running (exception RuntimeWarning). If not, proceed.
logger.info("Downloading report...")
while True:
try:
report = dbm.download_query(query_id, type='dict')
logger.info("Downloaded!")
break
except RuntimeWarning:
logger.info("Query is still running, waiting...")
sleep(30)
continue
# -----------------------------------------------------------------------------------
# 3. save report data to SQL
logger.info("Connecting to DB")
engine = create_engine(config('DB_URI'), echo=True)
# create all tables if they don't exist. If they do, SQL Alchemy skips creation
Base.metadata.create_all(engine)
# connect for inputing values
Session = sessionmaker(bind=engine)
session = Session()
conversion_ids_from_db = [x.conversion_id for x in session.query(ConversionPixelsMetaNames).all()]
for row in report:
if row["Date"] == '':
# every csv report contains summary and metadata that we don't need.
# Fortunately, we can detect when summary row starts and disregard
# everything after that in our loop
logger.info("Reached csv file's metadata, breaking loop.")
break
if row['DV360 Activity'] != 'Total': # 'Total' in report is a sum of all LI conversions and not needed
if int(row['DV360 Activity ID']) not in conversion_ids_from_db:
record_meta = ConversionPixelsMetaNames(conversion_id=int(row['DV360 Activity ID']),
conversion_name=row['DV360 Activity'])
conversion_ids_from_db.append(int(row['DV360 Activity ID']))
else:
record_meta = session.query(ConversionPixelsMetaNames).filter_by(
conversion_id=row['DV360 Activity ID']).first()
record_meta.conversion_name = row['DV360 Activity']
session.add(record_meta)
record_stats = ConversionPixels(date=clean_date_value(row['Date']),
line_item_id=int(row['Line Item ID']),
conversion_id=int(row['DV360 Activity ID']),
total_conversions=int(float(row['Total Conversions'])),
post_click_conversions=int(float(row['Post-Click Conversions'])),
post_click_revenue=row['CM Post-Click Revenue'],
post_view_revenue=row['CM Post-View Revenue'])
session.add(record_stats)
session.commit()