mirror of
https://github.com/kemayo/leech
synced 2025-12-06 08:22:56 +01:00
Rearrange the image options to match cover options
This commit is contained in:
parent
542774543a
commit
9508b00bcb
3 changed files with 21 additions and 28 deletions
|
|
@ -82,10 +82,12 @@ Example:
|
|||
"logins": {
|
||||
"QuestionableQuesting": ["username", "password"]
|
||||
},
|
||||
"images": true,
|
||||
"images": {
|
||||
"image_fetch": true,
|
||||
"image_format": "png",
|
||||
"compress_images": true,
|
||||
"max_image_size": 100000,
|
||||
},
|
||||
"cover": {
|
||||
"fontname": "Comic Sans MS",
|
||||
"fontsize": 30,
|
||||
|
|
@ -101,7 +103,7 @@ Example:
|
|||
}
|
||||
}
|
||||
```
|
||||
> Note: The `images` key is a boolean and can only be `true` or `false`. Booleans in JSON are written in lowercase.
|
||||
> Note: The `image_fetch` key is a boolean and can only be `true` or `false`. Booleans in JSON are written in lowercase.
|
||||
> If it is `false`, Leech will not download any images.
|
||||
> Leech will also ignore the `image_format` key if `images` is `false`.
|
||||
|
||||
|
|
|
|||
|
|
@ -81,7 +81,7 @@ class CoverOptions:
|
|||
|
||||
def chapter_html(
|
||||
story,
|
||||
image_bool=False,
|
||||
image_fetch=False,
|
||||
image_format="JPEG",
|
||||
compress_images=False,
|
||||
max_image_size=1_000_000,
|
||||
|
|
@ -97,15 +97,12 @@ def chapter_html(
|
|||
chapter, titleprefix=title, normalize=normalize))
|
||||
else:
|
||||
soup = BeautifulSoup(chapter.contents, 'html5lib')
|
||||
if image_bool:
|
||||
all_images = soup.find_all('img')
|
||||
if image_fetch:
|
||||
all_images = soup.find_all('img', src=True)
|
||||
len_of_all_images = len(all_images)
|
||||
print(f"Found {len_of_all_images} images in chapter {i}")
|
||||
|
||||
for count, img in enumerate(all_images):
|
||||
if not img.has_attr('src'):
|
||||
print(f"Image {count} has no src attribute, skipping...")
|
||||
continue
|
||||
print(f"[Chapter {i}] Image ({count+1} out of {len_of_all_images}). Source: ", end="")
|
||||
img_contents = get_image_from_url(img['src'], image_format, compress_images, max_image_size)
|
||||
chapter.images.append(Image(
|
||||
|
|
@ -154,7 +151,7 @@ def chapter_html(
|
|||
def generate_epub(story, cover_options={}, image_options=None, output_filename=None, output_dir=None, normalize=False):
|
||||
if image_options is None:
|
||||
image_options = {
|
||||
'image_bool': False,
|
||||
'image_fetch': False,
|
||||
'image_format': 'JPEG',
|
||||
'compress_images': False,
|
||||
'max_image_size': 1_000_000
|
||||
|
|
@ -203,7 +200,7 @@ def generate_epub(story, cover_options={}, image_options=None, output_filename=
|
|||
now=datetime.datetime.now(), **metadata)),
|
||||
*chapter_html(
|
||||
story,
|
||||
image_bool=image_options.get('image_bool'),
|
||||
image_fetch=image_options.get('image_fetch'),
|
||||
image_format=image_options.get('image_format'),
|
||||
compress_images=image_options.get('compress_images'),
|
||||
max_image_size=image_options.get('max_image_size'),
|
||||
|
|
|
|||
24
leech.py
24
leech.py
|
|
@ -58,26 +58,20 @@ def load_on_disk_options(site):
|
|||
with open('leech.json') as store_file:
|
||||
store = json.load(store_file)
|
||||
login = store.get('logins', {}).get(site.site_key(), False)
|
||||
image_bool: bool = store.get('images', False)
|
||||
image_format: str = store.get('image_format', 'jpeg')
|
||||
compress_images: bool = store.get('compress_images', False)
|
||||
max_image_size: int = store.get('max_image_size', 1_000_000)
|
||||
configured_site_options = store.get('site_options', {}).get(site.site_key(), {})
|
||||
cover_options = store.get('cover', {})
|
||||
image_options = store.get('images', {})
|
||||
output_dir = store.get('output_dir', False)
|
||||
except FileNotFoundError:
|
||||
logger.info("Unable to locate leech.json. Continuing assuming it does not exist.")
|
||||
login = False
|
||||
image_bool = False
|
||||
image_format = 'jpeg'
|
||||
compress_images = False
|
||||
max_image_size = 1_000_000
|
||||
configured_site_options = {}
|
||||
image_options = {}
|
||||
cover_options = {}
|
||||
output_dir = False
|
||||
if output_dir and 'output_dir' not in configured_site_options:
|
||||
configured_site_options['output_dir'] = output_dir
|
||||
return configured_site_options, login, cover_options, image_bool, image_format, compress_images, max_image_size
|
||||
return configured_site_options, login, cover_options, image_options
|
||||
|
||||
|
||||
def create_options(site, site_options, unused_flags):
|
||||
|
|
@ -88,7 +82,7 @@ def create_options(site, site_options, unused_flags):
|
|||
|
||||
flag_specified_site_options = site.interpret_site_specific_options(**unused_flags)
|
||||
|
||||
configured_site_options, login, cover_options, image_bool, image_format, compress_images, max_image_size = load_on_disk_options(site)
|
||||
configured_site_options, login, cover_options, image_options = load_on_disk_options(site)
|
||||
|
||||
overridden_site_options = json.loads(site_options)
|
||||
|
||||
|
|
@ -100,7 +94,7 @@ def create_options(site, site_options, unused_flags):
|
|||
list(overridden_site_options.items()) +
|
||||
list(flag_specified_site_options.items()) +
|
||||
list(cover_options.items()) +
|
||||
list({'image_bool': image_bool, 'image_format': image_format, 'compress_images': compress_images, 'max_image_size': max_image_size}.items())
|
||||
list(image_options.items())
|
||||
)
|
||||
return options, login
|
||||
|
||||
|
|
@ -179,10 +173,10 @@ def download(urls, site_options, cache, verbose, normalize, output_dir, **other_
|
|||
filename = ebook.generate_epub(
|
||||
story, options,
|
||||
image_options={
|
||||
'image_bool': options['image_bool'] or False,
|
||||
'image_format': options['image_format'] or 'jpeg',
|
||||
'compress_images': options['compress_images'] or False,
|
||||
'max_image_size': options['max_image_size'] or 1_000_000
|
||||
'image_fetch': options.get('image_fetch', False),
|
||||
'image_format': options.get('image_format', 'jpeg'),
|
||||
'compress_images': options.get('compress_images', False),
|
||||
'max_image_size': options.get('max_image_size', 1_000_000)
|
||||
},
|
||||
normalize=normalize,
|
||||
output_dir=output_dir or options.get('output_dir', os.getcwd())
|
||||
|
|
|
|||
Loading…
Reference in a new issue