Fix a hang when dealing with really large numbers of files

in an incremental recursion scan.
This commit is contained in:
Wayne Davison
2009-10-29 17:35:50 -07:00
parent 23a0d1e200
commit fe16d9a67d
2 changed files with 5 additions and 0 deletions

View File

@@ -2349,6 +2349,7 @@ struct file_list *send_file_list(int f, int argc, char *argv[])
flist->sorted = flist->files;
flist_sort_and_clean(flist, 0);
file_total += flist->used;
file_old_total += flist->used;
if (numeric_ids <= 0 && !inc_recurse)
send_id_list(f);

View File

@@ -42,6 +42,7 @@ extern int make_backups;
extern int inplace;
extern int batch_fd;
extern int write_batch;
extern int file_old_total;
extern struct stats stats;
extern struct file_list *cur_flist, *first_flist, *dir_flist;
@@ -197,8 +198,11 @@ void send_files(int f_in, int f_out)
end_progress(0);
}
if (inc_recurse && first_flist) {
file_old_total -= first_flist->used;
flist_free(first_flist);
if (first_flist) {
if (first_flist == cur_flist)
file_old_total = cur_flist->used;
write_ndx(f_out, NDX_DONE);
continue;
}