mirror of
https://github.com/beetbox/beets.git
synced 2025-12-29 03:52:51 +01:00
fetchart: Add some error handling to prevent crashes
Today I had some network problems regarding dbpedia.org, which made
beets crash because a requests.exceptions.ConnectionError was raised
("[Errno 113] No route to host"). This commits adds some error handling
around network requests to prevent further crashes in the future.
This commit is contained in:
parent
81c5ae3fdf
commit
c07903ed66
1 changed files with 66 additions and 43 deletions
|
|
@ -365,12 +365,17 @@ class GoogleImages(RemoteArtSource):
|
|||
if not (album.albumartist and album.album):
|
||||
return
|
||||
search_string = (album.albumartist + ',' + album.album).encode('utf-8')
|
||||
response = self.request(self.URL, params={
|
||||
'key': self.key,
|
||||
'cx': self.cx,
|
||||
'q': search_string,
|
||||
'searchType': 'image'
|
||||
})
|
||||
|
||||
try:
|
||||
response = self.request(self.URL, params={
|
||||
'key': self.key,
|
||||
'cx': self.cx,
|
||||
'q': search_string,
|
||||
'searchType': 'image'
|
||||
})
|
||||
except requests.RequestException:
|
||||
self._log.debug(u'google: error receiving response')
|
||||
return
|
||||
|
||||
# Get results using JSON.
|
||||
try:
|
||||
|
|
@ -406,10 +411,14 @@ class FanartTV(RemoteArtSource):
|
|||
if not album.mb_releasegroupid:
|
||||
return
|
||||
|
||||
response = self.request(
|
||||
self.API_ALBUMS + album.mb_releasegroupid,
|
||||
headers={'api-key': self.PROJECT_KEY,
|
||||
'client-key': self.client_key})
|
||||
try:
|
||||
response = self.request(
|
||||
self.API_ALBUMS + album.mb_releasegroupid,
|
||||
headers={'api-key': self.PROJECT_KEY,
|
||||
'client-key': self.client_key})
|
||||
except requests.RequestException:
|
||||
self._log.debug(u'fanart.tv: error receiving response')
|
||||
return
|
||||
|
||||
try:
|
||||
data = response.json()
|
||||
|
|
@ -545,16 +554,22 @@ class Wikipedia(RemoteArtSource):
|
|||
|
||||
# Find the name of the cover art filename on DBpedia
|
||||
cover_filename, page_id = None, None
|
||||
dbpedia_response = self.request(
|
||||
self.DBPEDIA_URL,
|
||||
params={
|
||||
'format': 'application/sparql-results+json',
|
||||
'timeout': 2500,
|
||||
'query': self.SPARQL_QUERY.format(
|
||||
artist=album.albumartist.title(), album=album.album)
|
||||
},
|
||||
headers={'content-type': 'application/json'},
|
||||
)
|
||||
|
||||
try:
|
||||
dbpedia_response = self.request(
|
||||
self.DBPEDIA_URL,
|
||||
params={
|
||||
'format': 'application/sparql-results+json',
|
||||
'timeout': 2500,
|
||||
'query': self.SPARQL_QUERY.format(
|
||||
artist=album.albumartist.title(), album=album.album)
|
||||
},
|
||||
headers={'content-type': 'application/json'},
|
||||
)
|
||||
except requests.RequestException:
|
||||
self._log.debug(u'dbpedia: error receiving response')
|
||||
return
|
||||
|
||||
try:
|
||||
data = dbpedia_response.json()
|
||||
results = data['results']['bindings']
|
||||
|
|
@ -584,17 +599,21 @@ class Wikipedia(RemoteArtSource):
|
|||
lpart, rpart = cover_filename.rsplit(' .', 1)
|
||||
|
||||
# Query all the images in the page
|
||||
wikipedia_response = self.request(
|
||||
self.WIKIPEDIA_URL,
|
||||
params={
|
||||
'format': 'json',
|
||||
'action': 'query',
|
||||
'continue': '',
|
||||
'prop': 'images',
|
||||
'pageids': page_id,
|
||||
},
|
||||
headers={'content-type': 'application/json'},
|
||||
)
|
||||
try:
|
||||
wikipedia_response = self.request(
|
||||
self.WIKIPEDIA_URL,
|
||||
params={
|
||||
'format': 'json',
|
||||
'action': 'query',
|
||||
'continue': '',
|
||||
'prop': 'images',
|
||||
'pageids': page_id,
|
||||
},
|
||||
headers={'content-type': 'application/json'},
|
||||
)
|
||||
except requests.RequestException:
|
||||
self._log.debug(u'wikipedia: error receiving response')
|
||||
return
|
||||
|
||||
# Try to see if one of the images on the pages matches our
|
||||
# incomplete cover_filename
|
||||
|
|
@ -613,18 +632,22 @@ class Wikipedia(RemoteArtSource):
|
|||
return
|
||||
|
||||
# Find the absolute url of the cover art on Wikipedia
|
||||
wikipedia_response = self.request(
|
||||
self.WIKIPEDIA_URL,
|
||||
params={
|
||||
'format': 'json',
|
||||
'action': 'query',
|
||||
'continue': '',
|
||||
'prop': 'imageinfo',
|
||||
'iiprop': 'url',
|
||||
'titles': cover_filename.encode('utf-8'),
|
||||
},
|
||||
headers={'content-type': 'application/json'},
|
||||
)
|
||||
try:
|
||||
wikipedia_response = self.request(
|
||||
self.WIKIPEDIA_URL,
|
||||
params={
|
||||
'format': 'json',
|
||||
'action': 'query',
|
||||
'continue': '',
|
||||
'prop': 'imageinfo',
|
||||
'iiprop': 'url',
|
||||
'titles': cover_filename.encode('utf-8'),
|
||||
},
|
||||
headers={'content-type': 'application/json'},
|
||||
)
|
||||
except requests.RequestException:
|
||||
self._log.debug(u'wikipedia: error receiving response')
|
||||
return
|
||||
|
||||
try:
|
||||
data = wikipedia_response.json()
|
||||
|
|
|
|||
Loading…
Reference in a new issue