mirror of
https://github.com/kemayo/leech
synced 2025-12-06 08:22:56 +01:00
Configurable whether to always convert images
This commit is contained in:
parent
b6310658e8
commit
400c5cc801
4 changed files with 20 additions and 10 deletions
|
|
@ -65,9 +65,6 @@ Leech can not save images in SVG because it is not supported by Pillow.
|
|||
Leech uses [Pillow](https://pillow.readthedocs.io/en/stable/index.html) for image manipulation and conversion. If you want to use a different
|
||||
image format, you can install the required dependencies for Pillow and you will probably have to tinker with Leech. See the [Pillow documentation](https://pillow.readthedocs.io/en/stable/installation.html#external-libraries) for more information.
|
||||
|
||||
By default, Leech will try and save all non-animated images as JPEG.
|
||||
The only animated images that Leech will save are GIFs.
|
||||
|
||||
To configure image support, you will need to create a file called `leech.json`. See the section below for more information.
|
||||
|
||||
Configuration
|
||||
|
|
@ -87,6 +84,7 @@ Example:
|
|||
"image_format": "png",
|
||||
"compress_images": true,
|
||||
"max_image_size": 100000,
|
||||
"always_convert_images": true
|
||||
},
|
||||
"cover": {
|
||||
"fontname": "Comic Sans MS",
|
||||
|
|
@ -128,6 +126,8 @@ Example:
|
|||
|
||||
> Warning: Leech will not compress GIFs, that might damage the animation.
|
||||
|
||||
> Note: if `always_convert_images` is `true`, Leech will convert all non-GIF images to the specified `image_format`.
|
||||
|
||||
Arbitrary Sites
|
||||
---
|
||||
|
||||
|
|
|
|||
|
|
@ -83,6 +83,7 @@ def chapter_html(
|
|||
story,
|
||||
image_fetch=False,
|
||||
image_format="JPEG",
|
||||
always_convert_images=False,
|
||||
compress_images=False,
|
||||
max_image_size=1_000_000,
|
||||
titleprefix=None,
|
||||
|
|
@ -104,7 +105,7 @@ def chapter_html(
|
|||
|
||||
for count, img in enumerate(all_images):
|
||||
print(f"[{chapter.title}] Image ({count+1} out of {len_of_all_images}). Source: ", end="")
|
||||
img_contents = get_image_from_url(img['src'], image_format, compress_images, max_image_size)
|
||||
img_contents = get_image_from_url(img['src'], image_format, compress_images, max_image_size, always_convert_images)
|
||||
chapter.images.append(Image(
|
||||
path=f"images/ch{i}_leechimage_{count}.{img_contents[1]}",
|
||||
contents=img_contents[0],
|
||||
|
|
@ -154,7 +155,8 @@ def generate_epub(story, cover_options={}, image_options=None, output_filename=
|
|||
'image_fetch': False,
|
||||
'image_format': 'JPEG',
|
||||
'compress_images': False,
|
||||
'max_image_size': 1_000_000
|
||||
'max_image_size': 1_000_000,
|
||||
'always_convert_images': False,
|
||||
}
|
||||
dates = list(story.dates())
|
||||
metadata = {
|
||||
|
|
@ -204,6 +206,7 @@ def generate_epub(story, cover_options={}, image_options=None, output_filename=
|
|||
image_format=image_options.get('image_format'),
|
||||
compress_images=image_options.get('compress_images'),
|
||||
max_image_size=image_options.get('max_image_size'),
|
||||
always_convert_images=image_options.get('always_convert_images'),
|
||||
normalize=normalize
|
||||
),
|
||||
EpubFile(
|
||||
|
|
|
|||
|
|
@ -85,7 +85,8 @@ def get_image_from_url(
|
|||
url: str,
|
||||
image_format: str = "JPEG",
|
||||
compress_images: bool = False,
|
||||
max_image_size: int = 1_000_000
|
||||
max_image_size: int = 1_000_000,
|
||||
always_convert: bool = False
|
||||
) -> Tuple[bytes, str, str]:
|
||||
"""
|
||||
Based on make_cover_from_url(), this function takes in the image url usually gotten from the `src` attribute of
|
||||
|
|
@ -128,16 +129,21 @@ def get_image_from_url(
|
|||
|
||||
PIL_image = Image.open(image)
|
||||
|
||||
if str(PIL_image.format).lower() == "gif":
|
||||
current_format = str(PIL_image.format)
|
||||
|
||||
if current_format.lower() == "gif":
|
||||
PIL_image = Image.open(image)
|
||||
if PIL_image.info['version'] not in [b"GIF89a", "GIF89a"]:
|
||||
PIL_image.info['version'] = b"GIF89a"
|
||||
return PIL_Image_to_bytes(PIL_image, "GIF"), "gif", "image/gif"
|
||||
|
||||
if compress_images:
|
||||
PIL_image = compress_image(image, max_image_size, str(PIL_image.format))
|
||||
PIL_image = compress_image(image, max_image_size, current_format)
|
||||
|
||||
return PIL_Image_to_bytes(PIL_image, image_format), image_format, f"image/{image_format.lower()}"
|
||||
if always_convert:
|
||||
current_format = image_format
|
||||
|
||||
return PIL_Image_to_bytes(PIL_image, current_format), current_format, f"image/{current_format.lower()}"
|
||||
|
||||
except Exception as e:
|
||||
logger.info("Encountered an error downloading image: " + str(e))
|
||||
|
|
|
|||
3
leech.py
3
leech.py
|
|
@ -176,7 +176,8 @@ def download(urls, site_options, cache, verbose, normalize, output_dir, **other_
|
|||
'image_fetch': options.get('image_fetch', False),
|
||||
'image_format': options.get('image_format', 'jpeg'),
|
||||
'compress_images': options.get('compress_images', False),
|
||||
'max_image_size': options.get('max_image_size', 1_000_000)
|
||||
'max_image_size': options.get('max_image_size', 1_000_000),
|
||||
'always_convert_images': options.get('always_convert_images', False)
|
||||
},
|
||||
normalize=normalize,
|
||||
output_dir=output_dir or options.get('output_dir', os.getcwd())
|
||||
|
|
|
|||
Loading…
Reference in a new issue