Merge pull request #3767 from jackwilsdon/skip-azlyrics

Skip AZLyrics on GitHub actions
This commit is contained in:
Adrian Sampson 2020-10-05 20:48:49 -04:00 committed by GitHub
commit ce80459110
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23

View file

@ -270,7 +270,9 @@ class LyricsPluginSourcesTest(LyricsGoogleBaseTest):
DEFAULT_SOURCES = [
# dict(artist=u'Santana', title=u'Black magic woman',
# backend=lyrics.MusiXmatch),
dict(DEFAULT_SONG, backend=lyrics.Genius),
dict(DEFAULT_SONG, backend=lyrics.Genius,
# GitHub actions is on some form of Cloudflare blacklist.
skip=os.environ.get('GITHUB_ACTIONS') == 'true'),
]
GOOGLE_SOURCES = [
@ -279,7 +281,9 @@ class LyricsPluginSourcesTest(LyricsGoogleBaseTest):
path=u'/lyrics/view/the_beatles/lady_madonna'),
dict(DEFAULT_SONG,
url=u'http://www.azlyrics.com',
path=u'/lyrics/beatles/ladymadonna.html'),
path=u'/lyrics/beatles/ladymadonna.html',
# AZLyrics returns a 403 on GitHub actions.
skip=os.environ.get('GITHUB_ACTIONS') == 'true'),
dict(DEFAULT_SONG,
url=u'http://www.chartlyrics.com',
path=u'/_LsLsZ7P4EK-F-LD4dJgDQ/Lady+Madonna.aspx'),
@ -327,11 +331,8 @@ class LyricsPluginSourcesTest(LyricsGoogleBaseTest):
"""Test default backends with songs known to exist in respective databases.
"""
errors = []
# GitHub actions seems to be on a Cloudflare blacklist, so we can't
# contact genius.
sources = [s for s in self.DEFAULT_SOURCES if
s['backend'] != lyrics.Genius or
os.environ.get('GITHUB_ACTIONS') != 'true']
# Don't test any sources marked as skipped.
sources = [s for s in self.DEFAULT_SOURCES if not s.get("skip", False)]
for s in sources:
res = s['backend'](self.plugin.config, self.plugin._log).fetch(
s['artist'], s['title'])
@ -346,7 +347,9 @@ class LyricsPluginSourcesTest(LyricsGoogleBaseTest):
"""Test if lyrics present on websites registered in beets google custom
search engine are correctly scraped.
"""
for s in self.GOOGLE_SOURCES:
# Don't test any sources marked as skipped.
sources = [s for s in self.GOOGLE_SOURCES if not s.get("skip", False)]
for s in sources:
url = s['url'] + s['path']
res = lyrics.scrape_lyrics_from_html(
raw_backend.fetch_url(url))