mirror of
https://github.com/Readarr/Readarr
synced 2026-01-03 22:28:23 +01:00
Fixed: Download mediacover using configured proxy.
This commit is contained in:
parent
3fa605177c
commit
138a188cc9
4 changed files with 42 additions and 10 deletions
|
|
@ -278,6 +278,18 @@ public void should_send_headers(string header, string value)
|
|||
response.Resource.Headers[header].ToString().Should().Be(value);
|
||||
}
|
||||
|
||||
[Test]
|
||||
public void should_download_file()
|
||||
{
|
||||
var file = GetTempFilePath();
|
||||
|
||||
var url = "https://sonarr.tv/img/slider/seriesdetails.png";
|
||||
|
||||
Subject.DownloadFile(url, file);
|
||||
|
||||
File.Exists(file).Should().BeTrue();
|
||||
}
|
||||
|
||||
[Test]
|
||||
public void should_not_download_file_with_error()
|
||||
{
|
||||
|
|
|
|||
|
|
@ -32,7 +32,7 @@ public HttpResponse GetResponse(HttpRequest request, CookieContainer cookies)
|
|||
{
|
||||
var webRequest = (HttpWebRequest)WebRequest.Create((Uri)request.Url);
|
||||
|
||||
if (PlatformInfo.IsMono)
|
||||
if (PlatformInfo.IsMono && request.ResponseStream == null)
|
||||
{
|
||||
// On Mono GZipStream/DeflateStream leaks memory if an exception is thrown, use an intermediate buffer in that case.
|
||||
webRequest.AutomaticDecompression = DecompressionMethods.None;
|
||||
|
|
@ -120,12 +120,20 @@ public HttpResponse GetResponse(HttpRequest request, CookieContainer cookies)
|
|||
{
|
||||
try
|
||||
{
|
||||
data = responseStream.ToBytes();
|
||||
|
||||
if (PlatformInfo.IsMono && httpWebResponse.ContentEncoding == "gzip")
|
||||
if (request.ResponseStream != null)
|
||||
{
|
||||
data = data.Decompress();
|
||||
httpWebResponse.Headers.Remove("Content-Encoding");
|
||||
// A target ResponseStream was specified, write to that instead.
|
||||
responseStream.CopyTo(request.ResponseStream);
|
||||
}
|
||||
else
|
||||
{
|
||||
data = responseStream.ToBytes();
|
||||
|
||||
if (PlatformInfo.IsMono && httpWebResponse.ContentEncoding == "gzip")
|
||||
{
|
||||
data = data.Decompress();
|
||||
httpWebResponse.Headers.Remove("Content-Encoding");
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
|
|
|
|||
|
|
@ -133,7 +133,7 @@ private HttpResponse ExecuteRequest(HttpRequest request, CookieContainer cookieC
|
|||
response = interceptor.PostResponse(response);
|
||||
}
|
||||
|
||||
if (request.LogResponseContent)
|
||||
if (request.LogResponseContent && response.ResponseData != null)
|
||||
{
|
||||
_logger.Trace("Response content ({0} bytes): {1}", response.ResponseData.Length, response.Content);
|
||||
}
|
||||
|
|
@ -242,9 +242,19 @@ public void DownloadFile(string url, string fileName, string userAgent = null)
|
|||
_logger.Debug("Downloading [{0}] to [{1}]", url, fileName);
|
||||
|
||||
var stopWatch = Stopwatch.StartNew();
|
||||
var webClient = new GZipWebClient();
|
||||
webClient.Headers.Add(HttpRequestHeader.UserAgent, userAgent ?? _userAgentBuilder.GetUserAgent());
|
||||
webClient.DownloadFile(url, fileName);
|
||||
using (var fileStream = new FileStream(fileName, FileMode.Create, FileAccess.ReadWrite))
|
||||
{
|
||||
var request = new HttpRequest(url);
|
||||
|
||||
if (userAgent.IsNotNullOrWhiteSpace())
|
||||
{
|
||||
request.Headers.Set("User-Agent", userAgent);
|
||||
}
|
||||
|
||||
request.ResponseStream = fileStream;
|
||||
var response = Get(request);
|
||||
}
|
||||
|
||||
stopWatch.Stop();
|
||||
_logger.Debug("Downloading Completed. took {0:0}s", stopWatch.Elapsed.Seconds);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,5 +1,6 @@
|
|||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Text;
|
||||
using NzbDrone.Common.EnvironmentInfo;
|
||||
using NzbDrone.Common.Extensions;
|
||||
|
|
@ -42,6 +43,7 @@ public HttpRequest(string url, HttpAccept httpAccept = null)
|
|||
public bool StoreResponseCookie { get; set; }
|
||||
public TimeSpan RequestTimeout { get; set; }
|
||||
public TimeSpan RateLimit { get; set; }
|
||||
public Stream ResponseStream { get; set; }
|
||||
|
||||
public override string ToString()
|
||||
{
|
||||
|
|
|
|||
Loading…
Reference in a new issue