Add a default value for opt.jobs. Remove a (now) redundant variable.

This commit is contained in:
Ilim Ugur 2012-08-16 15:39:24 +03:00
parent c25d01feb7
commit 76790a386a
4 changed files with 35 additions and 33 deletions

View File

@ -342,6 +342,9 @@ defaults (void)
opt.cookies = true;
opt.verbose = -1;
opt.ntry = 20;
#ifdef ENABLE_THREADS
opt.jobs = 1;
#endif
#ifdef ENABLE_METALINK
opt.n_retries = 1;
#endif

View File

@ -13,13 +13,13 @@
static struct range *ranges;
void
init_ranges(int numthreads)
init_ranges()
{
ranges = malloc (numthreads * (sizeof *ranges));
ranges = malloc (opt.jobs * (sizeof *ranges));
}
int
fill_ranges_data(int numthreads, int num_of_resources, long long int file_size,
fill_ranges_data(int num_of_resources, long long int file_size,
long int chunk_size)
{
int i, r;
@ -74,14 +74,14 @@ spawn_thread (struct s_thread_ctx *thread_ctx, int index, int resource)
}
int
collect_thread (sem_t *retr_sem, struct s_thread_ctx *thread_ctx, int numthreads)
collect_thread (sem_t *retr_sem, struct s_thread_ctx *thread_ctx)
{
int k, ret;
do
ret = sem_wait (retr_sem);
while (ret < 0 && errno == EINTR);
for (k = 0; k < numthreads; k++)
for (k = 0; k < opt.jobs; k++)
if (thread_ctx[k].used && thread_ctx[k].terminated)
{
url_free (thread_ctx[k].url_parsed);
@ -106,14 +106,14 @@ segmented_retrieve_url (void *arg)
}
void
merge_temp_files(char **inputs, const char *output, int numfiles)
merge_temp_files(char **inputs, const char *output)
{
FILE *out, *in;
int j, ret;
void *buf = malloc (MIN_CHUNK_SIZE);
out = fopen (output, "w");
for(j = 0; j < numfiles; ++j)
for(j = 0; j < opt.jobs; ++j)
{
in = fopen(inputs[j],"r");
ret = MIN_CHUNK_SIZE;
@ -129,10 +129,10 @@ merge_temp_files(char **inputs, const char *output, int numfiles)
}
void
delete_temp_files(char **files, int numfiles)
delete_temp_files(char **files)
{
int j = 0;
while(j < numfiles)
while(j < opt.jobs)
unlink(files[j++]);
}

View File

@ -56,9 +56,9 @@ struct s_thread_ctx
uerr_t status;
};
void init_ranges(int);
void init_ranges();
int fill_ranges_data(int, int, long long int, long int);
int fill_ranges_data(int, long long int, long int);
void clean_range_res_data(int);
@ -66,12 +66,12 @@ void clean_ranges();
int spawn_thread (struct s_thread_ctx*, int, int);
int collect_thread (sem_t *, struct s_thread_ctx *, int);
int collect_thread (sem_t *, struct s_thread_ctx *);
static void * segmented_retrieve_url (void *);
void merge_temp_files(char **, const char *, int);
void merge_temp_files(char **, const char *);
void delete_temp_files(char **, int);
void delete_temp_files(char **);
#endif /* MULTI_H */

View File

@ -1070,7 +1070,7 @@ retrieve_from_file (const char *file, bool html, int *count)
/*GSoC wget*/
char *temp, **files;
int i, j, r, index, dt, url_err, retries;
int ret, N_THREADS = opt.jobs > 0 ? opt.jobs : 1;
int ret;
int ranges_covered, chunk_size, num_of_resources;
pthread_t thread;
sem_t retr_sem;
@ -1079,31 +1079,30 @@ retrieve_from_file (const char *file, bool html, int *count)
metalink_resource_t* resource;
struct s_thread_ctx *thread_ctx;
files = malloc (N_THREADS * (sizeof *files));
init_ranges (N_THREADS);
thread_ctx = malloc (N_THREADS * (sizeof *thread_ctx));
files = malloc (opt.jobs * (sizeof *files));
init_ranges ();
thread_ctx = malloc (opt.jobs * (sizeof *thread_ctx));
retries = 0;
i = 0;
while ((file = metalink->files[i]) != NULL)
{
N_THREADS = (opt.jobs > 0 ? opt.jobs : 1);
memset(thread_ctx, '\0', N_THREADS * (sizeof *thread_ctx));
memset(thread_ctx, '\0', opt.jobs * (sizeof *thread_ctx));
num_of_resources = 0;
while (file->resources[num_of_resources])
++num_of_resources;
chunk_size = (file->size) / N_THREADS;
chunk_size = (file->size) / opt.jobs;
if(chunk_size < MIN_CHUNK_SIZE)
chunk_size = MIN_CHUNK_SIZE;
j = fill_ranges_data(N_THREADS, num_of_resources, file->size, chunk_size);
j = fill_ranges_data(num_of_resources, file->size, chunk_size);
if(j < N_THREADS)
N_THREADS = j;
if(j < opt.jobs)
opt.jobs = j;
/* Assign temporary file names. */
for (j = 0; j < N_THREADS; ++j)
for (j = 0; j < opt.jobs; ++j)
{
files[j] = malloc(L_tmpnam * sizeof(char));
temp = tmpnam(files[j]);
@ -1117,7 +1116,7 @@ retrieve_from_file (const char *file, bool html, int *count)
sem_init (&retr_sem, 0, 0);
j = ranges_covered = 0;
for (r = 0; r < N_THREADS; ++r)
for (r = 0; r < opt.jobs; ++r)
{
resource = file->resources[j];
if (!resource)
@ -1143,7 +1142,7 @@ retrieve_from_file (const char *file, bool html, int *count)
free(thread_ctx);
clean_range_res_data(num_of_resources);
clean_ranges ();
for (r = 0; r < N_THREADS; ++r)
for (r = 0; r < opt.jobs; ++r)
free(files[r]);
free(files);
return URLERROR;
@ -1151,9 +1150,9 @@ retrieve_from_file (const char *file, bool html, int *count)
++j;
}
while(ranges_covered < N_THREADS)
while (ranges_covered < opt.jobs)
{
r = collect_thread (&retr_sem, thread_ctx, N_THREADS);
r = collect_thread (&retr_sem, thread_ctx);
++ranges_covered;
status = thread_ctx[r].status;
@ -1200,7 +1199,7 @@ retrieve_from_file (const char *file, bool html, int *count)
free(thread_ctx);
clean_range_res_data(num_of_resources);
clean_ranges ();
for (r = 0; r < N_THREADS; ++r)
for (r = 0; r < opt.jobs; ++r)
free(files[r]);
free(files);
return URLERROR;
@ -1234,7 +1233,7 @@ retrieve_from_file (const char *file, bool html, int *count)
{
int res;
merge_temp_files(files, file->name, N_THREADS);
merge_temp_files(files, file->name);
res = verify_file_hash(file->name, file->checksums);
if(!res)
{
@ -1253,10 +1252,10 @@ retrieve_from_file (const char *file, bool html, int *count)
}
}
delete_temp_files(files, N_THREADS);
delete_temp_files(files);
clean_range_res_data(num_of_resources);
for (j = 0; j < N_THREADS; ++j)
for (j = 0; j < opt.jobs; ++j)
free(files[j]);
++i;
}