Skip to content

Commit 7751e64

Browse files
committed
Merge branch '494-logging' into 393-foster-import
2 parents 753485b + 8e480d5 commit 7751e64

19 files changed

+222
-113
lines changed

src/server/alembic/generate_rfm_mapping.py

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
11
import itertools
2-
2+
import structlog
3+
logger = structlog.get_logger()
34

45
def get_all_combinations(chars):
56
yield from itertools.product(*([chars] * 3))
@@ -71,7 +72,7 @@ def start():
7172
f.write("%s\n" % item)
7273

7374

74-
print('done')
75+
logger.debug('Completed generate_rfm_mapping')
7576

7677

7778
start()

src/server/api/API_ingest/dropbox_handler.py

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,18 +1,20 @@
11
import dropbox
2+
import structlog
3+
logger = structlog.get_logger()
24

35
try:
46
from secrets_dict import DROPBOX_APP
57
except ImportError:
68
# Not running locally
7-
print("Couldn't get DROPBOX_APP from file, trying environment **********")
9+
logger.debug("Couldn't get DROPBOX_APP from file, trying environment **********")
810
from os import environ
911

1012
try:
1113
DROPBOX_APP = environ['DROPBOX_APP']
1214
except KeyError:
1315
# Not in environment
1416
# You're SOL for now
15-
print("Couldn't get DROPBOX_APP from file or environment")
17+
logger.error("Couldn't get DROPBOX_APP from file or environment")
1618

1719

1820
class TransferData:

src/server/api/API_ingest/ingest_sources_from_api.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -16,4 +16,4 @@ def start(conn):
1616
print("Finished fetching raw data from different API sources")
1717

1818

19-
#TODO: Return object with count for each data source?
19+
#TODO: Return object with count for each data source?

src/server/api/API_ingest/shelterluv_api_handler.py

Lines changed: 10 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,8 @@
77
from api.API_ingest.dropbox_handler import upload_file_to_dropbox
88
from constants import RAW_DATA_PATH
99
from models import ShelterluvPeople
10+
import structlog
11+
logger = structlog.get_logger()
1012

1113

1214
TEST_MODE = os.getenv("TEST_MODE")
@@ -15,15 +17,15 @@
1517
from secrets_dict import SHELTERLUV_SECRET_TOKEN
1618
except ImportError:
1719
# Not running locally
18-
print("Couldn't get SHELTERLUV_SECRET_TOKEN from file, trying environment **********")
20+
logger.debug("Couldn't get SHELTERLUV_SECRET_TOKEN from file, trying environment **********")
1921
from os import environ
2022

2123
try:
2224
SHELTERLUV_SECRET_TOKEN = environ['SHELTERLUV_SECRET_TOKEN']
2325
except KeyError:
2426
# Not in environment
2527
# You're SOL for now
26-
print("Couldn't get SHELTERLUV_SECRET_TOKEN from file or environment")
28+
logger.error("Couldn't get SHELTERLUV_SECRET_TOKEN from file or environment")
2729

2830

2931
def write_csv(json_data):
@@ -71,7 +73,7 @@ def store_shelterluv_people_all(conn):
7173
has_more = True
7274
shelterluv_people = []
7375

74-
print("Start getting shelterluv contacts from people table")
76+
logger.debug("Start getting shelterluv contacts from people table")
7577

7678
while has_more:
7779
r = requests.get("http://shelterluv.com/api/v1/people?limit={}&offset={}".format(LIMIT, offset),
@@ -90,7 +92,7 @@ def store_shelterluv_people_all(conn):
9092

9193
print("Finish getting shelterluv contacts from people table")
9294

93-
print("Start storing latest shelterluvpeople results to container")
95+
logger.debug("Start storing latest shelterluvpeople results to container")
9496
if os.listdir(RAW_DATA_PATH):
9597
for file_name in os.listdir(RAW_DATA_PATH):
9698
file_path = os.path.join(RAW_DATA_PATH, file_name)
@@ -100,13 +102,13 @@ def store_shelterluv_people_all(conn):
100102
os.remove(file_path)
101103

102104
file_path = write_csv(shelterluv_people)
103-
print("Finish storing latest shelterluvpeople results to container")
105+
logger.debug("Finish storing latest shelterluvpeople results to container")
104106

105-
print("Start storing " + '/shelterluv/' + "results to dropbox")
107+
logger.debug("Start storing " + '/shelterluv/' + "results to dropbox")
106108
upload_file_to_dropbox(file_path, '/shelterluv/' + file_path.split('/')[-1])
107-
print("Finish storing " + '/shelterluv/' + "results to dropbox")
109+
logger.debug("Finish storing " + '/shelterluv/' + "results to dropbox")
108110

109-
print("Uploading shelterluvpeople csv to database")
111+
logger.debug("Uploading shelterluvpeople csv to database")
110112
ShelterluvPeople.insert_from_df(pd.read_csv(file_path, dtype="string"), conn)
111113

112114
return offset

src/server/api/admin_api.py

Lines changed: 22 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,10 @@
1616
from api import jwt_ops
1717
from config import RAW_DATA_PATH
1818

19+
import structlog
20+
logger = structlog.get_logger()
21+
22+
1923
ALLOWED_EXTENSIONS = {"csv", "xlsx"}
2024

2125

@@ -32,7 +36,7 @@ def upload_csv():
3236
try:
3337
validate_and_arrange_upload(file)
3438
except Exception as e:
35-
current_app.logger.exception(e)
39+
logger.exception(e)
3640
finally:
3741
file.close()
3842

@@ -44,7 +48,7 @@ def upload_csv():
4448
def list_current_files():
4549
result = None
4650

47-
current_app.logger.info("Start returning file list")
51+
logger.info("Start returning file list")
4852
file_list_result = os.listdir(RAW_DATA_PATH)
4953

5054
if len(file_list_result) > 0:
@@ -56,9 +60,9 @@ def list_current_files():
5660
@admin_api.route("/api/execute", methods=["POST"])
5761
@jwt_ops.admin_required
5862
def execute():
59-
current_app.logger.info("Execute flow")
63+
logger.info("Execute flow")
6064
job_outcome = flow_script.start_flow() # 'busy', 'completed', or 'nothing to do'
61-
current_app.logger.info("Job outcome: " + str(job_outcome))
65+
logger.info("Job outcome: %s", str(job_outcome))
6266

6367

6468
# -------- Skip update if 'busy' or 'nothing to do' as nothing changed ? ------
@@ -87,8 +91,8 @@ def execute():
8791
try:
8892
connection.execute(upsert)
8993
except Exception as e:
90-
current_app.logger.error("Insert/Update failed on Last Execution stats")
91-
current_app.logger.exception(e)
94+
logger.error("Insert/Update failed on Last Execution stats")
95+
logger.error(e)
9296
# -------------------------------------------------------------------------------
9397

9498
if job_outcome == 'busy':
@@ -127,7 +131,7 @@ def get_statistics():
127131
@jwt_ops.admin_required
128132
def list_statistics():
129133
""" Pull Last Execution stats from DB. """
130-
current_app.logger.info("list_statistics() request")
134+
logger.info("list_statistics() request")
131135
last_execution_details = '{}' # Empty but valid JSON
132136

133137
engine.dispose() # we don't want other process's conn pool
@@ -143,7 +147,7 @@ def list_statistics():
143147
last_execution_details = result.fetchone()[0]
144148

145149
except Exception as e:
146-
current_app.logger.error("Failure reading Last Execution stats from DB - OK on first run")
150+
logger.error("Failure reading Last Execution stats from DB - OK on first run")
147151
# Will happen on first run, shouldn't after
148152

149153
return last_execution_details
@@ -220,10 +224,10 @@ def start_job():
220224

221225
if running_job :
222226
# There was a running job already
223-
current_app.logger.info("Request to start job, but job_id " + str(running_job) + " already executing")
227+
logger.warn("Request to start job, but job_id " + str(running_job) + " already executing")
224228
return None
225229
else:
226-
current_app.logger.info("Assigned job_id " + job_id )
230+
logger.info("Assigned job_id %s" + str(job_id ) )
227231
return job_id
228232

229233

@@ -269,7 +273,7 @@ def import_rfm_csv():
269273
with open('C:\\Projects\\paws-stuff\\score_tuples.csv', 'r') as csvfile:
270274
reader = csv.reader(csvfile, delimiter=',')
271275
hdr = next(reader)
272-
print('Skipping header: ', hdr)
276+
logger.debug('Skipping header: %s', hdr)
273277
for row in reader:
274278
score_list.append(row)
275279

@@ -302,14 +306,14 @@ def write_rfm_edges(rfm_dict : dict) :
302306
try:
303307
connection.execute(upsert)
304308
except Exception as e:
305-
current_app.logger.error("Insert/Update failed on rfm edge ")
306-
current_app.logger.exception(e)
309+
logger.error("Insert/Update failed on rfm edge ")
310+
logger.error(e)
307311
return None
308312

309313
return 0
310314

311315
else : # Malformed dict
312-
current_app.logger.error("Received rfm_edge dictionary with " + str(len(rfm_dict)) + " entries - expected 3")
316+
logger.error("Received rfm_edge dictionary with %s entries - expected 3", str(len(rfm_dict)))
313317
return None
314318

315319

@@ -321,14 +325,14 @@ def read_rfm_edges() :
321325
with engine.begin() as connection: # BEGIN TRANSACTION
322326
q_result = connection.execute(q)
323327
if q_result.rowcount == 0:
324-
current_app.logger.error("No rfm_edge entry found in DB")
328+
logger.error("No rfm_edge entry found in DB")
325329
return None
326330
else:
327331
edge_string = q_result.fetchone()[0]
328332
try:
329333
edge_dict = json.loads(edge_string) # Convert stored string to dict
330334
except json.decoder.JSONDecodeError:
331-
current_app.logger.error("rfm_edge entry found in DB was malformed")
335+
logger.error("rfm_edge entry found in DB was malformed")
332336
return None
333337

334338
return edge_dict
@@ -380,9 +384,9 @@ def generate_dummy_rfm_scores():
380384

381385
# return jsonify(sfd_list) # enable if using endpoint, but it returns a lot of data
382386

383-
current_app.logger.debug("Inserting dummy scores...")
387+
logger.debug("Inserting dummy scores...")
384388
count = insert_rfm_scores(dummy_scores)
385-
current_app.logger.debug("Finished inserting")
389+
logger.debug("Finished inserting")
386390

387391

388392
return count

src/server/api/common_api.py

Lines changed: 8 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -6,19 +6,22 @@
66
import time
77
from datetime import datetime
88

9+
import structlog
10+
logger = structlog.get_logger()
11+
12+
913
from api.fake_data import sl_mock_data
1014

1115
try:
1216
from secrets_dict import SHELTERLUV_SECRET_TOKEN
1317
except ImportError:
1418
# Not running locally
15-
print("Couldn't get SHELTERLUV_SECRET_TOKEN from file, trying environment **********")
19+
logger.debug("Couldn't get SHELTERLUV_SECRET_TOKEN from file, trying environment **********")
1620
from os import getenv
1721

1822
SHELTERLUV_SECRET_TOKEN = getenv('SHELTERLUV_SECRET_TOKEN')
1923
if not SHELTERLUV_SECRET_TOKEN:
20-
print("Couldn't get secrets from file or environment",
21-
"Defaulting to Fake Data")
24+
logger.warn("Couldn't get secrets from file or environment - defaulting to Fake Data")
2225

2326
from api import jwt_ops
2427

@@ -262,7 +265,7 @@ def get_support_oview(matching_id):
262265
if row['source_id'].isalnum():
263266
id_list.append(row['source_id'])
264267
else:
265-
current_app.logger.warn("salesforcecontacts source_id " + row['source_id'] + "has non-alphanumeric characters; will not be used")
268+
logger.warn("salesforcecontacts source_id %s has non-alphanumeric characters; will not be used", str(row['source_id']))
266269

267270
if len(id_list) == 0: # No ids to query
268271
oview_fields['number_of_gifts'] = 0 # Marker for no support data
@@ -379,7 +382,7 @@ def get_support_oview(matching_id):
379382

380383

381384
else: # len(rows) == 0
382-
current_app.logger.debug('No SF contact IDs found for matching_id ' + str(matching_id))
385+
logger.warn('No SF contact IDs found for matching_id %', str(matching_id))
383386
oview_fields['number_of_gifts'] = 0 # Marker for no data
384387
return jsonify(oview_fields)
385388

src/server/api/file_uploader.py

Lines changed: 11 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -6,11 +6,14 @@
66
from shifts_importer import validate_import_vs
77
from werkzeug.utils import secure_filename
88

9+
import structlog
10+
logger = structlog.get_logger()
11+
912
SUCCESS_MSG = "Uploaded Successfully!"
1013

1114

1215
def validate_and_arrange_upload(file):
13-
current_app.logger.info("Start uploading file: " + file.filename)
16+
logger.info("Start uploading file: %s ", file.filename)
1417
filename = secure_filename(file.filename)
1518
file_extension = filename.rpartition(".")[2]
1619
with engine.begin() as conn:
@@ -23,18 +26,22 @@ def determine_upload_type(file, file_extension, conn):
2326
# automatically pulling from vendor APIs directly, in which case we'd know
2427
# what kind of data we had.
2528
if file_extension == "csv":
29+
logger.debug("File extension is CSV")
2630
df = pd.read_csv(file, dtype="string")
2731

2832
if {"salesforcecontacts", "volgistics", "shelterluvpeople"}.issubset(df.columns):
33+
logger.debug("File appears to be salesforcecontacts, volgistics, or shelterluvpeople (manual)")
2934
ManualMatches.insert_from_df(df, conn)
3035
return
3136
elif {"Animal_ids", "Internal-ID"}.issubset(df.columns):
37+
logger.debug("File appears to be shelterluvpeople")
3238
ShelterluvPeople.insert_from_df(df, conn)
3339
return
3440

3541
if file_extension == "xlsx":
3642
excel_file = pd.ExcelFile(file)
3743
if {"Master", "Service"}.issubset(excel_file.sheet_names):
44+
logger.debug("File appears to be Volgistics")
3845
# Volgistics
3946
validate_import_vs(file, conn)
4047
Volgistics.insert_from_file(excel_file, conn)
@@ -45,11 +52,13 @@ def determine_upload_type(file, file_extension, conn):
4552
# Salesforce something-or-other
4653
if "Amount" in df.columns:
4754
# Salesforce donations
55+
logger.debug("File appears to be Salesforce donations")
4856
validate_import_sfd(file, conn)
4957
return
5058
else:
5159
# Salesforce contacts
60+
logger.debug("File appears to be Salesforce contacts")
5261
SalesForceContacts.insert_from_file_df(df, conn)
5362
return
5463

55-
current_app.logger.error(f"Don't know how to process file {file.filename}")
64+
logger.error("Don't know how to process file: %s", file.filename)

src/server/api/internal_api.py

Lines changed: 6 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,9 @@
55
from api.API_ingest import ingest_sources_from_api
66
from rfm_funcs.create_scores import create_scores
77

8+
import structlog
9+
logger = structlog.get_logger()
10+
811
### Internal API endpoints can only be accessed from inside the cluster;
912
### they are blocked by location rule in NGINX config
1013

@@ -28,14 +31,14 @@ def ingest_raw_data():
2831
with engine.begin() as conn:
2932
ingest_sources_from_api.start(conn)
3033
except Exception as e:
31-
current_app.logger.exception(e)
34+
logger.error(e)
3235

3336
return jsonify({'outcome': 'OK'}), 200
3437

3538

3639
@internal_api.route("/api/internal/create_scores", methods=["GET"])
3740
def hit_create_scores():
38-
current_app.logger.info("Hitting create_scores() ")
41+
logger.info("Hitting create_scores() ")
3942
tuple_count = create_scores()
40-
current_app.logger.info("create_scores() processed " + str(tuple_count) + " scores")
43+
logger.info("create_scores() processed %s scores", str(tuple_count) )
4144
return jsonify(200)

0 commit comments

Comments
 (0)