mirror of
https://github.com/mirror/wget.git
synced 2025-01-21 09:41:06 +08:00
[svn] Record downloaded files and downloaded HTML files in all cases.
Published under the subject "Link conversion fix" in <sxsn1a2n2zd.fsf@florida.arsdigita.de>.
This commit is contained in:
parent
e4d49f0b40
commit
728584d072
@ -1,3 +1,13 @@
|
|||||||
|
2001-03-31 Hrvoje Niksic <hniksic@arsdigita.com>
|
||||||
|
|
||||||
|
* retr.c (retrieve_url): Call register_download() for downloaded
|
||||||
|
files and register_html() for downloaded HTML files.
|
||||||
|
|
||||||
|
* recur.c (register_download): New function; register here that a
|
||||||
|
file has been downloaded, rather than in recursive_retrieve().
|
||||||
|
(register_html): New function; enqueue the location of HTML files
|
||||||
|
here rather than in recursive_retrieve().
|
||||||
|
|
||||||
2001-03-31 Hrvoje Niksic <hniksic@arsdigita.com>
|
2001-03-31 Hrvoje Niksic <hniksic@arsdigita.com>
|
||||||
|
|
||||||
* main.c (print_help): Use multiple fputs instead of a single ugly
|
* main.c (print_help): Use multiple fputs instead of a single ugly
|
||||||
|
62
src/recur.c
62
src/recur.c
@ -54,8 +54,9 @@ extern char *version_string;
|
|||||||
static struct hash_table *dl_file_url_map;
|
static struct hash_table *dl_file_url_map;
|
||||||
static struct hash_table *dl_url_file_map;
|
static struct hash_table *dl_url_file_map;
|
||||||
|
|
||||||
/* List of HTML URLs. */
|
/* List of HTML files downloaded in this Wget run. Used for link
|
||||||
static slist *urls_html;
|
conversion after Wget is done. */
|
||||||
|
static slist *downloaded_html_files;
|
||||||
|
|
||||||
/* List of undesirable-to-load URLs. */
|
/* List of undesirable-to-load URLs. */
|
||||||
static struct hash_table *undesirable_urls;
|
static struct hash_table *undesirable_urls;
|
||||||
@ -106,8 +107,8 @@ recursive_cleanup (void)
|
|||||||
undesirable_urls = NULL;
|
undesirable_urls = NULL;
|
||||||
free_vec (forbidden);
|
free_vec (forbidden);
|
||||||
forbidden = NULL;
|
forbidden = NULL;
|
||||||
slist_free (urls_html);
|
slist_free (downloaded_html_files);
|
||||||
urls_html = NULL;
|
downloaded_html_files = NULL;
|
||||||
FREE_MAYBE (base_dir);
|
FREE_MAYBE (base_dir);
|
||||||
FREE_MAYBE (robots_host);
|
FREE_MAYBE (robots_host);
|
||||||
first_time = 1;
|
first_time = 1;
|
||||||
@ -153,25 +154,17 @@ recursive_retrieve (const char *file, const char *this_url)
|
|||||||
run. They should probably be at a different location. */
|
run. They should probably be at a different location. */
|
||||||
if (!undesirable_urls)
|
if (!undesirable_urls)
|
||||||
undesirable_urls = make_string_hash_table (0);
|
undesirable_urls = make_string_hash_table (0);
|
||||||
if (!dl_file_url_map)
|
|
||||||
dl_file_url_map = make_string_hash_table (0);
|
|
||||||
if (!dl_url_file_map)
|
|
||||||
dl_url_file_map = make_string_hash_table (0);
|
|
||||||
|
|
||||||
hash_table_clear (undesirable_urls);
|
hash_table_clear (undesirable_urls);
|
||||||
string_set_add (undesirable_urls, this_url);
|
string_set_add (undesirable_urls, this_url);
|
||||||
hash_table_clear (dl_file_url_map);
|
hash_table_clear (dl_file_url_map);
|
||||||
hash_table_clear (dl_url_file_map);
|
hash_table_clear (dl_url_file_map);
|
||||||
urls_html = NULL;
|
|
||||||
/* Enter this_url to the hash table, in original and "enhanced" form. */
|
/* Enter this_url to the hash table, in original and "enhanced" form. */
|
||||||
u = newurl ();
|
u = newurl ();
|
||||||
err = parseurl (this_url, u, 0);
|
err = parseurl (this_url, u, 0);
|
||||||
if (err == URLOK)
|
if (err == URLOK)
|
||||||
{
|
{
|
||||||
string_set_add (undesirable_urls, u->url);
|
string_set_add (undesirable_urls, u->url);
|
||||||
hash_table_put (dl_file_url_map, xstrdup (file), xstrdup (u->url));
|
|
||||||
hash_table_put (dl_url_file_map, xstrdup (u->url), xstrdup (file));
|
|
||||||
urls_html = slist_prepend (urls_html, file);
|
|
||||||
if (opt.no_parent)
|
if (opt.no_parent)
|
||||||
base_dir = xstrdup (u->dir); /* Set the base dir. */
|
base_dir = xstrdup (u->dir); /* Set the base dir. */
|
||||||
/* Set the canonical this_url to be sent as referer. This
|
/* Set the canonical this_url to be sent as referer. This
|
||||||
@ -469,22 +462,6 @@ recursive_retrieve (const char *file, const char *this_url)
|
|||||||
xfree (constr);
|
xfree (constr);
|
||||||
constr = newloc;
|
constr = newloc;
|
||||||
}
|
}
|
||||||
/* In case of convert_links: If there was no error, add it to
|
|
||||||
the list of downloaded URLs. We might need it for
|
|
||||||
conversion. */
|
|
||||||
if (opt.convert_links && filename)
|
|
||||||
{
|
|
||||||
if (dt & RETROKF)
|
|
||||||
{
|
|
||||||
hash_table_put (dl_file_url_map,
|
|
||||||
xstrdup (filename), xstrdup (constr));
|
|
||||||
hash_table_put (dl_url_file_map,
|
|
||||||
xstrdup (constr), xstrdup (filename));
|
|
||||||
/* If the URL is HTML, note it. */
|
|
||||||
if (dt & TEXTHTML)
|
|
||||||
urls_html = slist_prepend (urls_html, filename);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
/* If there was no error, and the type is text/html, parse
|
/* If there was no error, and the type is text/html, parse
|
||||||
it recursively. */
|
it recursively. */
|
||||||
if (dt & TEXTHTML)
|
if (dt & TEXTHTML)
|
||||||
@ -547,6 +524,27 @@ recursive_retrieve (const char *file, const char *this_url)
|
|||||||
return RETROK;
|
return RETROK;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void
|
||||||
|
register_download (const char *url, const char *file)
|
||||||
|
{
|
||||||
|
if (!opt.convert_links)
|
||||||
|
return;
|
||||||
|
if (!dl_file_url_map)
|
||||||
|
dl_file_url_map = make_string_hash_table (0);
|
||||||
|
hash_table_put (dl_file_url_map, xstrdup (file), xstrdup (url));
|
||||||
|
if (!dl_url_file_map)
|
||||||
|
dl_url_file_map = make_string_hash_table (0);
|
||||||
|
hash_table_put (dl_url_file_map, xstrdup (url), xstrdup (file));
|
||||||
|
}
|
||||||
|
|
||||||
|
void
|
||||||
|
register_html (const char *url, const char *file)
|
||||||
|
{
|
||||||
|
if (!opt.convert_links)
|
||||||
|
return;
|
||||||
|
downloaded_html_files = slist_prepend (downloaded_html_files, file);
|
||||||
|
}
|
||||||
|
|
||||||
/* convert_links() is called from recursive_retrieve() after we're
|
/* convert_links() is called from recursive_retrieve() after we're
|
||||||
done with an HTML file. This call to convert_links is not complete
|
done with an HTML file. This call to convert_links is not complete
|
||||||
because it converts only the downloaded files, and Wget cannot know
|
because it converts only the downloaded files, and Wget cannot know
|
||||||
@ -570,7 +568,7 @@ recursive_retrieve (const char *file, const char *this_url)
|
|||||||
convert_all_links to go once more through the entire list of
|
convert_all_links to go once more through the entire list of
|
||||||
retrieved HTMLs, and re-convert them.
|
retrieved HTMLs, and re-convert them.
|
||||||
|
|
||||||
All the downloaded HTMLs are kept in urls_html, and downloaded URLs
|
All the downloaded HTMLs are kept in downloaded_html_files, and downloaded URLs
|
||||||
in urls_downloaded. From these two lists information is
|
in urls_downloaded. From these two lists information is
|
||||||
extracted. */
|
extracted. */
|
||||||
void
|
void
|
||||||
@ -578,11 +576,11 @@ convert_all_links (void)
|
|||||||
{
|
{
|
||||||
slist *html;
|
slist *html;
|
||||||
|
|
||||||
/* Destructively reverse urls_html to get it in the right order.
|
/* Destructively reverse downloaded_html_files to get it in the right order.
|
||||||
recursive_retrieve() used slist_prepend() consistently. */
|
recursive_retrieve() used slist_prepend() consistently. */
|
||||||
urls_html = slist_nreverse (urls_html);
|
downloaded_html_files = slist_nreverse (downloaded_html_files);
|
||||||
|
|
||||||
for (html = urls_html; html; html = html->next)
|
for (html = downloaded_html_files; html; html = html->next)
|
||||||
{
|
{
|
||||||
urlpos *urls, *cur_url;
|
urlpos *urls, *cur_url;
|
||||||
char *url;
|
char *url;
|
||||||
|
@ -24,6 +24,8 @@ void recursive_cleanup PARAMS ((void));
|
|||||||
void recursive_reset PARAMS ((void));
|
void recursive_reset PARAMS ((void));
|
||||||
uerr_t recursive_retrieve PARAMS ((const char *, const char *));
|
uerr_t recursive_retrieve PARAMS ((const char *, const char *));
|
||||||
|
|
||||||
|
void register_download PARAMS ((const char *, const char *));
|
||||||
|
void register_html PARAMS ((const char *, const char *));
|
||||||
void convert_all_links PARAMS ((void));
|
void convert_all_links PARAMS ((void));
|
||||||
|
|
||||||
#endif /* RECUR_H */
|
#endif /* RECUR_H */
|
||||||
|
18
src/retr.c
18
src/retr.c
@ -446,11 +446,11 @@ retrieve_url (const char *origurl, char **file, char **newloc,
|
|||||||
assert (u->proto != URLFILE); /* #### Implement me! */
|
assert (u->proto != URLFILE); /* #### Implement me! */
|
||||||
mynewloc = NULL;
|
mynewloc = NULL;
|
||||||
|
|
||||||
|
if (u->proto == URLHTTP
|
||||||
#ifdef HAVE_SSL
|
#ifdef HAVE_SSL
|
||||||
if (u->proto == URLHTTP || u->proto == URLHTTPS )
|
|| u->proto == URLHTTPS
|
||||||
#else
|
#endif
|
||||||
if (u->proto == URLHTTP)
|
)
|
||||||
#endif /* HAVE_SSL */
|
|
||||||
result = http_loop (u, &mynewloc, dt);
|
result = http_loop (u, &mynewloc, dt);
|
||||||
else if (u->proto == URLFTP)
|
else if (u->proto == URLFTP)
|
||||||
{
|
{
|
||||||
@ -546,6 +546,16 @@ retrieve_url (const char *origurl, char **file, char **newloc,
|
|||||||
goto redirected;
|
goto redirected;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (u->local)
|
||||||
|
{
|
||||||
|
if (*dt & RETROKF)
|
||||||
|
{
|
||||||
|
register_download (url, u->local);
|
||||||
|
if (*dt & TEXTHTML)
|
||||||
|
register_html (url, u->local);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
if (file)
|
if (file)
|
||||||
{
|
{
|
||||||
if (u->local)
|
if (u->local)
|
||||||
|
Loading…
Reference in New Issue
Block a user