Fix a tight-loop when a recursion level is used.

This commit is contained in:
Giuseppe Scrivano 2012-12-09 22:04:58 +01:00
parent 3217905144
commit 5a0f05423a
2 changed files with 7 additions and 3 deletions

View File

@ -1,5 +1,8 @@
2012-12-09 Giuseppe Scrivano <gscrivano@gnu.org>
* recur.c (retrieve_tree): Do not attempt to re-download a file if
`next_url' is known.
* test.c: Include "wget.h".
2012-12-04 Giuseppe Scrivano <gscrivano@gnu.org>

View File

@ -267,6 +267,7 @@ retrieve_tree (struct url *start_url_parsed, struct iri *pi)
char *url = NULL, *referer;
int depth;
bool html_allowed, css_allowed;
bool dequed = false;
#ifdef ENABLE_THREADS
int index = 0;
#endif
@ -286,10 +287,10 @@ retrieve_tree (struct url *start_url_parsed, struct iri *pi)
#else
if (next_url == NULL)
{
if (! url_dequeue (queue, (struct iri **) &next_i,
if (url_dequeue (queue, (struct iri **) &next_i,
(const char **)&next_url, (const char **)&next_referer,
&next_depth, &next_html_allowed, &next_css_allowed))
url = NULL;
dequed = true;
}
i = next_i;
@ -308,7 +309,7 @@ retrieve_tree (struct url *start_url_parsed, struct iri *pi)
and again under URL2, but at a different (possibly smaller)
depth, we want the URL's children to be taken into account
the second time. */
if (url && dl_url_file_map && hash_table_contains (dl_url_file_map, url))
if (dequed && url && dl_url_file_map && hash_table_contains (dl_url_file_map, url))
{
file = xstrdup (hash_table_get (dl_url_file_map, url));