summaryrefslogtreecommitdiff
path: root/AllAboutData
diff options
context:
space:
mode:
authorRasmus Luha <rasmus.luha@gmail.com>2022-03-17 21:09:15 +0200
committerRasmus Luha <rasmus.luha@gmail.com>2022-03-17 21:09:15 +0200
commit2e4f88a0c7dc518fe4c064d4428bef82f6fee1c8 (patch)
treea27fd54bfb5cadf4a9f8004b264920d6522450ac /AllAboutData
parent9381ccc0b597ea148b0e037ba64712bdae57c542 (diff)
API error handling
Diffstat (limited to 'AllAboutData')
-rw-r--r--AllAboutData/getData.py23
1 files changed, 17 insertions, 6 deletions
diff --git a/AllAboutData/getData.py b/AllAboutData/getData.py
index fe00c2d..0a1a1f5 100644
--- a/AllAboutData/getData.py
+++ b/AllAboutData/getData.py
@@ -47,9 +47,20 @@ def getTeamsData(url, headers):
querystring = {"page": "0"}
response = requests.request("GET", url+"teams", headers=headers, params=querystring)
+ #If API_KEY doesn't match, or other error, then stop function
+ if response.status_code != 200:
+ print("Failed to fetch from API: response code: " + str(response.status_code))
+ print(response.text)
+ return
+
teamsDf = pd.DataFrame(response.json()["data"])
teamsDf.set_index("id")
teamsDf = teamsDf.drop("id", axis=1)
+
+ # Creating new Data dir to avoid duplicates (due appending)
+ utils.deleteDataDir()
+ utils.addDataDir()
+
teamsDf.to_csv(teamsFile)
print("Teams data stored in Data directory as \"NBAteams.csv\"")
@@ -58,14 +69,17 @@ def getPlayerData(url, headers):
'''Requests Data about NBA players and stores it, based on teams
Takes API url as first and its headers as second argument.'''
-
- print("Stared reading players data")
# First request is made to get the page count to loop
querystring = {"per_page": "100","page":"0"}
response = requests.request("GET", url+"players", headers=headers, params=querystring)
+
+ #If API_KEY doesn't match, or other error, then stop function
+ if response.status_code != 200:
+ return
+
pageCount = response.json()["meta"]["total_pages"] # Got the page count here
-
+ print("Stared reading players data")
print("Pages to read: "+str(pageCount))
for el in range(1, pageCount+1):
@@ -101,9 +115,6 @@ def getPlayerData(url, headers):
if __name__ == "__main__":
- # Creating new Data dir to avoid duplicates (due appending)
- utils.deleteDataDir()
- utils.addDataDir()
getTeamsData(url, headers)
getPlayerData(url, headers)