· 4 years ago · Apr 21, 2021, 05:54 PM
1import base64
2import requests
3import pandas as pd
4from six.moves import urllib
5import datetime
6from datetime import timedelta, datetime
7import numpy as np
8import pyodbc
9
10
11## Getting last record date from SQL Server ##
12cnxn = pyodbc.connect(r"Driver={ODBC Driver 17 for SQL Server};"
13 r"DSN=ODBC Driver 17 for SQL Server;"
14 r"Server=TDRS706RTSQL2;"
15 r"Database=ERAP;"
16 r"UID=erap_worker;"
17 r"PWD=6k2XgAy6d9kkVV2r;"
18 r"Trusted_Connection=no"
19 )
20
21cursor = cnxn.cursor()
22test = cursor.execute('SELECT MAX([EntryDateTime]) FROM [dbo].[GreenvilleCountySC_AuditLog]')
23records = cursor.fetchall()
24records = records[0]
25
26gen_startDate = records[0]
27#gen_startDate = gen_startDate + datetime.timedelta(second = 5)
28endDate = datetime.now()
29
30
31
32## Obtaining the access token from the Token API ##
33url = "https://portal.neighborlysoftware.com/Token"
34
35data = {
36"grant_type":"password",
37"auth_type":"password",
38"tenantcode":"ERAP-GREENVILLECOUNTYSC",
39"machine_code":"7fe4fe85-4c1e-4743-b752-ac0de8f78f5e",
40"username":"TDR.ERAPData@TetraTech.com",
41"password":"TDRCategory5$"
42}
43
44page = requests.post(url, data=data)
45token_dict = page.json()
46
47new_token = (token_dict["access_token"])
48
49## Pulling the new data from the Audit API ##
50
51url_pull = "https://portal.neighborlysoftware.com/api/Reporting/GetAuditLogReport"
52
53pull_headers = {
54'Authorization': "Bearer "+new_token,
55'X-NBLY-Tenant-Code': 'ERAP-GREENVILLECOUNTYSC'
56}
57
58pull_params = {
59'workflowId':'1437',
60'startDate':gen_startDate,
61'endDate':endDate,
62'entryTypes':'0,1,2,3,4,21,22,23,24,25,26,27,28,29,30,55,56,57,59'
63}
64
65
66pull_response = requests.get(url_pull, headers=pull_headers, params=pull_params)
67pulled_data_list = []
68record_data = pull_response.json()
69data = record_data["DataSet"]
70pulled_data_list.extend(data)
71
72df = pd.DataFrame(pulled_data_list)
73df.columns = ["EntryID", "CaseID", "CaseName", "EntryType", "EntryDateTime", "User", "Note", "drop1"]
74del df['drop1']
75df['TimeInStatus'] = np.nan
76df['AssignmentProcessed'] = np.nan
77df[["CaseID","CaseName","EntryID","EntryType","Note","User","EntryDateTime",'TimeInStatus', 'AssignmentProcessed']]
78df = df.iloc[:-1]
79
80#print(df)
81
82## SQL Stuff ##
83
84from sqlalchemy import create_engine, MetaData, Table, select, exc
85
86sql_server = "TDRS706RTSQL2"
87sql_db = "ERAP"
88meta = MetaData()
89table_name = "GreenvilleCountySC_AuditLog"
90#table_name_long = Table("GreenvilleSC_AuditLog", meta,
91# Column("ID", Integer, primary_key=True, autoincrement=True),
92# Column("CaseID", Integer),
93# Column("CaseName", String),
94# Column("EntryType", String),
95# Column("EntryDateTime", DateTime),
96# Column("User", String),
97# Column("Note", String)),
98# Column("TimeInStatus", BigInteger)),
99# Column("Note", String))
100sql_username = "erap_worker"
101sql_password = "6k2XgAy6d9kkVV2r"
102
103
104engine = create_engine("mssql+pyodbc://" + sql_username + ":" + sql_password + "@" + sql_server + "/" + sql_db + "?driver=SQL+Server")
105
106connection = engine.connect()
107
108#Only needed if updating primary key column
109#engine.execute("SET IDENTITY_INSERT GreenvilleSC_AuditLog ON")
110
111#print(engine.table_names())
112
113df.to_sql(table_name, con=engine, index=False, chunksize=50, method='multi', if_exists='append')