When requesting a page with Gzip compression I am getting a lot of the following errors:
System.IO.InvalidDataException: The
CRC in GZip footer does not match the
CRC calculated from the decompressed
data
I am using native GZipStream to decompress and am looking at addressing this. With that in mind is there a work around for addressing this or another GZip library (free?) which will handle this issue properly?
I am verifying the webResponse ContentEncoding is GZIP
Update 5/11
A simplified snippit
//Caller
public void SOSampleGet(string url)
{
// Initialize the WebRequest.
webRequest = (HttpWebRequest)WebRequest.Create(url);
webRequest.Method = WebRequestMethods.Http.Get;
webRequest.KeepAlive = true;
webRequest.Accept = "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8";
webRequest.Headers.Add("Accept-Encoding", "gzip,deflate");
webRequest.Referer = WebUtil.GetDomain(url);
HttpWebResponse webResponse = (HttpWebResponse)webRequest.GetResponse();
using (Stream stream = GetStreamForResponse(webResponse, READTIMEOUT_CONST))
{
//use stream
}
}
//Method
private static Stream GetStreamForResponse(HttpWebResponse webResponse, int readTimeOut)
{
Stream stream;
switch (webResponse.ContentEncoding.ToUpperInvariant())
{
case "GZIP":
stream = new GZipStream(webResponse.GetResponseStream(), CompressionMode.Decompress);
break;
case "DEFLATE":
stream = new DeflateStream(webResponse.GetResponseStream(), CompressionMode.Decompress);
break;
default:
stream = webResponse.GetResponseStream();
stream.ReadTimeout = readTimeOut;
break;
}
return stream;
}
See Question&Answers more detail:
os 与恶龙缠斗过久,自身亦成为恶龙;凝视深渊过久,深渊将回以凝视…