Create a Service User in PrecisionLender Administration tab
import http.client
import getpass
import json
import urllib.parse
import pandas as pd
from io import StringIO
import os
username = "thepner@precisionlender.com.1devbank"
clientid = "a9cdadd4-1324-4a7a-842e-fe8fc9fe37f1"
username = "L3_Workbench_Service_User"
password = getpass.getpass()
········
Connect to L3 Workbench as Service User
conn = http.client.HTTPSConnection("api.precisionlender.com")
headers = {
'clientid': clientid,
'username': username,
'password': password
}
conn.request("POST", "/L3/v1/workbench/auth", headers=headers)
res = conn.getresponse()
data = res.read()
print(data.decode("utf-8"))
{"token_type":"Bearer","expires_in":3599,"ext_expires_in":3599,"access_token":"eyJ0eXAiOiJKV1QiLCJhbGciOiJSUzI1NiIsIng1dCI6ImtnMkxZczJUMENUaklmajRydDZKSXluZW4zOCIsImtpZCI6ImtnMkxZczJUMENUaklmajRydDZKSXluZW4zOCJ9.eyJhdWQiOiJodHRwczovL3N0b3JhZ2UuYXp1cmUuY29tIiwiaXNzIjoiaHR0cHM6Ly9zdHMud2luZG93cy5uZXQvZjA2ZDQ1OWItZDkzNS00YWQ3LWE5ZDMtYTgyMzQzYzRjOWRhLyIsImlhdCI6MTYwNjkyNTMxOCwibmJmIjoxNjA2OTI1MzE4LCJleHAiOjE2MDY5MjkyMTgsImFpbyI6IkUyUmdZSGhuelJod1o4WjU4YjhkUWU5V3lRV2ZBUUE9IiwiYXBwaWQiOiIwN2JlNDJlYS0xY2Y2LTQ5YTYtYmIzYy1lMTVmOGU2Y2U2MTciLCJhcHBpZGFjciI6IjEiLCJpZHAiOiJodHRwczovL3N0cy53aW5kb3dzLm5ldC9mMDZkNDU5Yi1kOTM1LTRhZDctYTlkMy1hODIzNDNjNGM5ZGEvIiwib2lkIjoiNDhlYzMyMzgtYTZjZi00NTYwLWE4MjAtYzBiNjgzOGE1NGI5IiwicmgiOiIwLkFBQUFtMFZ0OERYWjEwcXAwNmdqUThUSjJ1cEN2Z2YySEtaSnV6emhYNDVzNWhkYkFBQS4iLCJzdWIiOiI0OGVjMzIzOC1hNmNmLTQ1NjAtYTgyMC1jMGI2ODM4YTU0YjkiLCJ0aWQiOiJmMDZkNDU5Yi1kOTM1LTRhZDctYTlkMy1hODIzNDNjNGM5ZGEiLCJ1dGkiOiI5ZFhycWdHVUJVNjZfUU1VVW5nVUFBIiwidmVyIjoiMS4wIn0.HFxRD52MTiHir3_tFHJiR-RErZ_hZ0tG1m0soC7hg4qpxndNC2IlUrJVQ0xVBtjlP00sPVDtZwBdLp-gHDmBx63s8Dp1mRutjwEMyogh6HNYSYL77A6kE4aS1h9wG4Tk-agNhhPZCZ6z4sIowGD8WE6BmNcnvHW4mkoFvAfcH4uyzOLBw_82ufOKH9EtHPN0b59AaWkwsl3oqybPs08Tumu2xns2YUSpPfS3uAeZBWx6fM7Y14hhV8Tf9fSTL3-9qOi0aiqQ0IkDnwtprSa1GKqsAoYJF4zfHmQ5_huD_u4Jm8CTqiqB-U6Z1qs3yYBYzwZIPHVpGHiUyMtMDKm1OA"}
access_token = eval(data.decode("utf-8"))["access_token"]
print(access_token)
eyJ0eXAiOiJKV1QiLCJhbGciOiJSUzI1NiIsIng1dCI6ImtnMkxZczJUMENUaklmajRydDZKSXluZW4zOCIsImtpZCI6ImtnMkxZczJUMENUaklmajRydDZKSXluZW4zOCJ9.eyJhdWQiOiJodHRwczovL3N0b3JhZ2UuYXp1cmUuY29tIiwiaXNzIjoiaHR0cHM6Ly9zdHMud2luZG93cy5uZXQvZjA2ZDQ1OWItZDkzNS00YWQ3LWE5ZDMtYTgyMzQzYzRjOWRhLyIsImlhdCI6MTYwNjkyNTMxOCwibmJmIjoxNjA2OTI1MzE4LCJleHAiOjE2MDY5MjkyMTgsImFpbyI6IkUyUmdZSGhuelJod1o4WjU4YjhkUWU5V3lRV2ZBUUE9IiwiYXBwaWQiOiIwN2JlNDJlYS0xY2Y2LTQ5YTYtYmIzYy1lMTVmOGU2Y2U2MTciLCJhcHBpZGFjciI6IjEiLCJpZHAiOiJodHRwczovL3N0cy53aW5kb3dzLm5ldC9mMDZkNDU5Yi1kOTM1LTRhZDctYTlkMy1hODIzNDNjNGM5ZGEvIiwib2lkIjoiNDhlYzMyMzgtYTZjZi00NTYwLWE4MjAtYzBiNjgzOGE1NGI5IiwicmgiOiIwLkFBQUFtMFZ0OERYWjEwcXAwNmdqUThUSjJ1cEN2Z2YySEtaSnV6emhYNDVzNWhkYkFBQS4iLCJzdWIiOiI0OGVjMzIzOC1hNmNmLTQ1NjAtYTgyMC1jMGI2ODM4YTU0YjkiLCJ0aWQiOiJmMDZkNDU5Yi1kOTM1LTRhZDctYTlkMy1hODIzNDNjNGM5ZGEiLCJ1dGkiOiI5ZFhycWdHVUJVNjZfUU1VVW5nVUFBIiwidmVyIjoiMS4wIn0.HFxRD52MTiHir3_tFHJiR-RErZ_hZ0tG1m0soC7hg4qpxndNC2IlUrJVQ0xVBtjlP00sPVDtZwBdLp-gHDmBx63s8Dp1mRutjwEMyogh6HNYSYL77A6kE4aS1h9wG4Tk-agNhhPZCZ6z4sIowGD8WE6BmNcnvHW4mkoFvAfcH4uyzOLBw_82ufOKH9EtHPN0b59AaWkwsl3oqybPs08Tumu2xns2YUSpPfS3uAeZBWx6fM7Y14hhV8Tf9fSTL3-9qOi0aiqQ0IkDnwtprSa1GKqsAoYJF4zfHmQ5_huD_u4Jm8CTqiqB-U6Z1qs3yYBYzwZIPHVpGHiUyMtMDKm1OA
Do not forgot to URL encode the directory path!
# Path parameters
filesystem = 'datamart'
# Query Parameters
recursive = False
# Headers
headers = {
'clientid': clientid,
'authorization': "Bearer " + access_token
}
# Unencoded directory
directory = "Workspace/Derived/InstancePartition=" + clientid + "/Pipeline-Quality"
encoded_directory = urllib.parse.quote_plus(directory)
conn.request("GET", "/L3/v1/workbench/" + filesystem + "?directory=" + encoded_directory + "&recursive=" + str(recursive) , headers=headers)
res = conn.getresponse()
data = res.read()
l3_paths = json.loads(data.decode("utf-8"))
l3_paths["paths"]
[{'contentLength': '14648811', 'etag': '0x8D7BC7D2B711F9E', 'lastModified': 'Fri, 28 Feb 2020 18:36:48 GMT', 'name': 'Workspace/Derived/InstancePartition=a9cdadd4-1324-4a7a-842e-fe8fc9fe37f1/Pipeline-Quality/pipeline_quality.csv'}]
filesystem = "datamart"
path = "/Workspace/Derived/InstancePartition=" + clientid + "/Pipeline-Quality/pipeline_quality.csv"
encoded_path = urllib.parse.quote_plus(path)
conn.request("GET", "/L3/v1/workbench/" + filesystem + path, headers=headers)
res = conn.getresponse()
data = res.read()
data_string = StringIO(data.decode("utf-8"))
pipeline_quality_df = pd.read_csv(data_string)
pipeline_quality_df.head(n=5)
ReportDate | DatePartition | InstancePartition | LastModified | RelationshipId | RelationshipIdentifierId | OpportunityId | OpportunityName | OpportunityUrl | PricingDate | ... | DaysSinceCreatedBin | DaysSinceModifiedBin | DaysToCloseBin | LoanCommitment | LoanBalance | DepositAverageBalance | OtherIncome | AverageAssets | AverageEquity | NetIncome | |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
0 | 20200228 | 20191129 | 99b9412b-70fd-4221-8431-d1b3934889ec | 2019-11-29T02:01:04.0827583 | NaN | NaN | 025ea80d-42e8-4c14-b07d-caa34d352cf4 | New Opportunity | NaN | 2019-11-29T02:01:01.771 | ... | 60 to 90 Days | 60 to 90 Days | 60 to 90 Days | 1000000.0 | 0.0 | 0.0 | 0.0 | 0.0 | 50000.0 | -2941.828333 |
1 | 20200228 | 20191129 | 99b9412b-70fd-4221-8431-d1b3934889ec | 2019-11-29T02:01:04.0827583 | NaN | NaN | 025ea80d-42e8-4c14-b07d-caa34d352cf4 | New Opportunity | NaN | 2019-11-29T02:01:01.771 | ... | 60 to 90 Days | 60 to 90 Days | 60 to 90 Days | 1000000.0 | 0.0 | 0.0 | 0.0 | 0.0 | 50000.0 | -2941.828333 |
2 | 20200228 | 20191129 | 99b9412b-70fd-4221-8431-d1b3934889ec | 2019-11-29T02:01:04.0827583 | NaN | NaN | 025ea80d-42e8-4c14-b07d-caa34d352cf4 | New Opportunity | NaN | 2019-11-29T02:01:01.771 | ... | 60 to 90 Days | 60 to 90 Days | 60 to 90 Days | 1000000.0 | 0.0 | 0.0 | 0.0 | 0.0 | 50000.0 | -2941.828333 |
3 | 20200228 | 20191129 | 99b9412b-70fd-4221-8431-d1b3934889ec | 2019-11-29T02:01:04.0827583 | NaN | NaN | 025ea80d-42e8-4c14-b07d-caa34d352cf4 | New Opportunity | NaN | 2019-11-29T02:01:01.771 | ... | 60 to 90 Days | 60 to 90 Days | 60 to 90 Days | 1000000.0 | 0.0 | 0.0 | 0.0 | 0.0 | 50000.0 | -2941.828333 |
4 | 20200228 | 20191129 | 99b9412b-70fd-4221-8431-d1b3934889ec | 2019-11-29T02:01:04.0827583 | NaN | NaN | 025ea80d-42e8-4c14-b07d-caa34d352cf4 | New Opportunity | NaN | 2019-11-29T02:01:01.771 | ... | 60 to 90 Days | 60 to 90 Days | 60 to 90 Days | 1000000.0 | 0.0 | 0.0 | 0.0 | 0.0 | 50000.0 | -2941.828333 |
5 rows × 42 columns
# # API Credentials
# ANDI_API_URL = "https://api-andi.precisionlender.com/"
# API_KEY = # PUT ANDI API KEY HERE
# # Field Tag Skill
# SKILL_PACKAGE_ID = "XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX"
# DATASETS_FILE_NAME = "Filename"
# dataString = pipeline_quality_df.to_csv(index=False)
# # builds URL for location of datasets
# andiSkillUrl = "{andiApiUrl}skills/{skillPackageId}/dataSets/{file}".format(andiApiUrl=ANDI_API_URL,
# skillPackageId=SKILL_PACKAGE_ID,
# file=DATASETS_FILE_NAME)
# # pass in API key
# dataHeaders = {'Authorization': "Bearer {apiKey}".format(apiKey=API_KEY)}
# # pass in data file we're putting into skill storage
# andiSkillFile = {'files': dataString}
# # make request, dropping file into skill storage
# andiSkillResponse = requests.post(andiSkillUrl, headers=dataHeaders, files=andiSkillFile)
# print(andiSkillResponse)