1
0
Fork 0
mirror of https://github.com/kemayo/leech synced 2025-12-06 08:22:56 +01:00

Add a bit more messaging around logging in to sites

This commit is contained in:
David Lynch 2025-06-09 20:06:54 -05:00
parent 5bfd1b40a0
commit 6fddf628fb
2 changed files with 20 additions and 16 deletions

View file

@ -108,6 +108,7 @@ def open_story(site, url, session, login, options):
) )
if login: if login:
logger.info("Attempting to log in as %s", login[0])
handler.login(login) handler.login(login)
try: try:

View file

@ -21,22 +21,25 @@ class ArchiveOfOurOwn(Site):
def login(self, login_details): def login(self, login_details):
with requests_cache.disabled(): with requests_cache.disabled():
# Can't just pass this url to _soup because I need the cookies later try:
login = self.session.get('https://archiveofourown.org/users/login') # Can't just pass this url to _soup because I need the cookies later
soup, nobase = self._soup(login.text) login = self.session.get('https://archiveofourown.org/users/login')
post, action, method = self._form_data(soup.find(id='new_user')) soup, nobase = self._soup(login.text)
post['user[login]'] = login_details[0] post, action, method = self._form_data(soup.find(id='new_user'))
post['user[password]'] = login_details[1] post['user[login]'] = login_details[0]
# I feel the session *should* handle this cookies bit for me. But post['user[password]'] = login_details[1]
# it doesn't. And I don't know why. # I feel the session *should* handle this cookies bit for me. But
result = self.session.post( # it doesn't. And I don't know why.
self._join_url(login.url, action), result = self.session.post(
data=post, cookies=login.cookies self._join_url(login.url, action),
) data=post, cookies=login.cookies
if result.ok: )
logger.info("Logged in as %s", login_details[0]) if result.ok:
else: logger.info("Logged in as %s", login_details[0])
logger.error("Failed to log in as %s", login_details[0]) else:
logger.error("Failed to log in as %s", login_details[0])
except Exception as e:
logger.error("Failed to log in as %s, but so hard that it threw an exception %s", login_details[0], e)
def extract(self, url): def extract(self, url):
workid = re.match(r'^https?://(?:www\.)?archiveofourown\.org/works/(\d+)/?.*', url).group(1) workid = re.match(r'^https?://(?:www\.)?archiveofourown\.org/works/(\d+)/?.*', url).group(1)