Update initial URL check and final download printing in main.c. Update the logic of merge_temp_files() in multi.c.

This commit is contained in:
Ilim Ugur 2012-08-12 23:57:59 +03:00
parent 54e5b5d373
commit 5abb1dc188
4 changed files with 33 additions and 16 deletions

View File

@ -1207,7 +1207,11 @@ for details.\n\n"));
exit (1);
}
if (!nurl && !opt.input_filename)
if (!nurl && !opt.input_filename
#ifdef ENABLE_METALINK
&& !opt.metalink_file
#endif
)
{
/* No URL specified. */
fprintf (stderr, _("%s: missing URL\n"), exec_name);
@ -1452,6 +1456,7 @@ outputting to a regular file.\n"));
{
int count;
int status;
status = retrieve_from_file (opt.input_filename, opt.force_html, &count);
inform_exit_status (status);
if (!count)
@ -1459,6 +1464,21 @@ outputting to a regular file.\n"));
opt.input_filename);
}
#ifdef ENABLE_METALINK
/* And then from the metalink file, if any. */
if (opt.metalink_file)
{
int count;
int status;
status = retrieve_from_file (opt.metalink_file, opt.force_html, &count);
inform_exit_status (status);
if (!count)
logprintf (LOG_NOTQUIET, _("No URLs found in %s.\n"),
opt.input_filename);
}
#endif
/* Print broken links. */
if (opt.recursive && opt.spider)
print_broken_links ();
@ -1466,7 +1486,11 @@ outputting to a regular file.\n"));
/* Print the downloaded sum. */
if ((opt.recursive || opt.page_requisites
|| nurl > 1
|| (opt.input_filename && total_downloaded_bytes != 0))
|| (opt.input_filename && total_downloaded_bytes != 0)
#ifdef ENABLE_METALINK
|| (opt.metalink_file && total_downloaded_bytes != 0)
#endif
)
&&
total_downloaded_bytes != 0)
{

View File

@ -63,34 +63,26 @@ segmented_retrieve_url (void *arg)
}
void
merge_temp_files(const char *file, int numfiles)
merge_temp_files(const char **inputs, const char *output, int numfiles)
{
FILE *out, *in;
char *file_name = malloc(strlen("temp_") + strlen(file) + (sizeof ".")-1
+ (numfiles/10 + 1) + sizeof "");
int j, ret;
void *buf = malloc(MIN_CHUNK_SIZE);
/* FIXME: Check for errors in allocations. */
void *buf = malloc (MIN_CHUNK_SIZE);
sprintf(file_name, "%s", file);
out = fopen(file_name,"w");
out = fopen (output, "w");
for(j = 0; j < numfiles; ++j)
{
sprintf(file_name, TEMP_PREFIX "%s.%d", file, j);
in = fopen(file_name,"r");
in = fopen(inputs[j],"r");
ret = MIN_CHUNK_SIZE;
while(ret == MIN_CHUNK_SIZE)
{
ret = fread(buf, 1, MIN_CHUNK_SIZE, in);
fwrite(buf, 1, ret, out);
/* FIXME: CHECK FOR ERRORS. */
}
fclose(in);
}
fclose(out);
free(buf);
free(file_name);
}
void

View File

@ -64,7 +64,7 @@ int collect_thread (sem_t *, struct s_thread_ctx *);
static void * segmented_retrieve_url (void *);
void merge_temp_files(const char *, int);
void merge_temp_files(const char **, const char *, int);
void delete_temp_files(const char *, int);

View File

@ -1067,6 +1067,7 @@ retrieve_from_file (const char *file, bool html, int *count)
if(j < N_THREADS)
N_THREADS = j;
/* Assign temporary file names. */
for (j = 0; j < N_THREADS; ++j)
files[j] = malloc(strlen("temp_") + strlen(file->name)
+ (sizeof ".")-1 + (N_THREADS/10 + 1) + sizeof "");
@ -1198,7 +1199,7 @@ retrieve_from_file (const char *file, bool html, int *count)
{
int res;
merge_temp_files(file->name, N_THREADS);
merge_temp_files(files, file->name, N_THREADS);
res = verify_file_hash(file->name, file->checksums);
if(!res)
{