Compare commits

...

266 Commits
v12.0 ... v14.1

Author SHA1 Message Date
Andrey Prygunkov
64abcbb577 version 14.1 (27 Nov. 2014) 2015-02-15 13:38:40 +00:00
Andrey Prygunkov
f1b6492d1c fixed: unlike to all other scripts the update-script should not be automatically terminated when the program quits 2015-02-14 21:12:42 +00:00
Andrey Prygunkov
19d297f934 fixed: the program could crash during download when article cache was active (more likely on very high download speeds) 2015-02-11 22:38:59 +00:00
Andrey Prygunkov
a23128f25f addition to r1205: when sorting by priority in auto mode (without specifying + or -) the default order is descending since it is more logical to use for priority 2015-02-07 22:23:17 +00:00
Andrey Prygunkov
567f7c3028 added on-demand queue sorting; one click on column title in web interface sorts the selected or all items; if the items were already sorted in that order they are sorted backwards; in other words the second click sorts in descending order; when sorting selected items they are also grouped together in a case there were holes between selected items; RPC-method "editqueue" has new command "GroupSort", parameter "Text" must be one of: "name", "priority", "category", "size", "left"; add character "+" or "-" to sort to explicitly define ascending or descending order (for example "name-"); if none of these characters are used the auto-mode is active: the items are sorted in ascending order first, if nothing changed - they are sorted again in descending order 2015-02-07 19:17:49 +00:00
Andrey Prygunkov
30af4cfc3d fixed: XML-RPC method "history" returned invalid xml when used with parameter "hidden=true" (JSON-RPC worked correct) 2015-02-06 21:33:42 +00:00
Andrey Prygunkov
019fcf519a addition to r1182 and fix for r1193: unused connections are now closed only if there are no active connections on the same level; this reduces the reconnects during active download (which may be caused by the random connection pick-up implemented in r1182) 2015-02-03 20:05:49 +00:00
Andrey Prygunkov
1645562d78 eliminated compiler warning 2015-02-01 15:02:10 +00:00
Andrey Prygunkov
fab726482c improved windows installer: during an update the installer stops a possibly running NZBGet automatically 2015-01-27 20:32:08 +00:00
Andrey Prygunkov
351cb9835f suppress printing of memory leaks reports when the program terminates because of wrong command line switches (Windows debug mode only) 2015-01-27 20:30:14 +00:00
Andrey Prygunkov
d0d59469bc fixed: remote command "-L HA" (which prints the history including hidden records) could crash 2015-01-27 20:26:41 +00:00
Andrey Prygunkov
577d934ccd improved timeout handling during establishing of connections 2015-01-27 20:23:46 +00:00
Andrey Prygunkov
4438131d56 fixed: web-browser was launched on program reload; now it is launched only if the reload is initiated via tray menu (Windows only) 2015-01-26 21:26:32 +00:00
Andrey Prygunkov
7b13c9a9ba addition to r1182: fixed compilation error on certain systems (added missing include-directive) 2015-01-25 20:15:18 +00:00
Andrey Prygunkov
7d60566f3c reverted r1193 because of many problems reported by users (as a temporary solution) 2015-01-25 20:08:59 +00:00
Andrey Prygunkov
e9a7c2f184 fixed possible crash when using remote command "-B dump" to print debug info 2015-01-24 19:25:43 +00:00
Andrey Prygunkov
3e07873575 addition to r1182: unused connections are now closed only if there are no active connections on the same level; this reduces the reconnects during active download (which may be caused by the random connection pick-up implemented in r1182) 2015-01-24 18:49:59 +00:00
Andrey Prygunkov
2f17584ab4 update info in about dialogs (Windows and Mac OSX) 2015-01-24 12:44:27 +00:00
Andrey Prygunkov
02f87b23fb fixed: command "download again" was not disabled for hidden history records and resulted in a crash 2015-01-23 20:01:08 +00:00
Andrey Prygunkov
31032e29f5 when launching web-browser from the tray icon now using the real IP-address from option "ControlIP" instead of hard coded "127.0.0.1" (windows only) 2015-01-23 19:41:45 +00:00
Andrey Prygunkov
11bfb57809 added support for password list file; new option "UnpackPassFile" to set the location of the file; during unpack the passwords are tried from the file until unpack succeeds or all passwords were tried; implemented different strategies for rar4 and rar5-archives taking into account the features of formats; for rar5-archives a wrong password is reported by unrar unambiguously and the program can immediately try other passwords from the password list; for rar4-archives and for 7z-archives it is not possible to differentiate between damaged archive and wrong password; for those archives if the first unpack attempt (without password) fails the program executes par-check (preferably quick par-check if enabled via option "ParQuick) before trying the passwords from the list; another optimization is that the password list is tried only when the first unpack attempt (without password) reports a password error or decryption errors; this saves unnecessary unpack attempts for damaged unencrypted archives 2015-01-22 20:57:39 +00:00
Andrey Prygunkov
0e83ef32bb addition to r1187: renaming of hidden history items is now also supported 2015-01-17 16:34:49 +00:00
Andrey Prygunkov
86ae9e94cd name and category of history items can now be changed in web-interface; RPC-API method "editqueue" extended with new actions "HistorySetName" and "HistorySetCategory" 2015-01-15 20:46:17 +00:00
Andrey Prygunkov
2388250dfa added optional parameters to remote command "--append/-A" allowing to pass duplicate key, duplicate mode and duplicate score; removed parameters "F" and "U" of command "--append/-A", which were used to set mode (file or URL), which is now detected automatically; the parameters are still supported for compatibility 2015-01-15 18:09:37 +00:00
Andrey Prygunkov
d947ea65a2 added confirmation dialogs to recently implemented mass history edit commands "mark as good" and "mark as bad" 2015-01-13 21:37:04 +00:00
Andrey Prygunkov
4a11c04742 added subcommand "HA" to remote command "--list/-L" to list the whole history including hidden records 2015-01-06 20:00:22 +00:00
Andrey Prygunkov
14b24e6050 added support for negative numeric values in rss filter (useful for fields "dupescore" and "priority") 2014-12-21 19:28:38 +00:00
Andrey Prygunkov
4402d6fbd6 improved news server connections handling: if a download of an article fails due to connection error the news server becomes temporary disabled (blocked) for several seconds (defined by option "RetryInterval"); the download is then retried on another news server (of the same level) if available; if no other news servers (of the same level) exist the program will retry the same news server after its block interval expires; this increases failure tolerance when multiple news servers are used 2014-12-21 18:21:49 +00:00
Andrey Prygunkov
c69b81404c small change in error message text 2014-12-21 18:21:16 +00:00
Andrey Prygunkov
241a3efacf options "UnrarCmd" and "SevenZipCmd" can include extra switches to pass to unrar/7-zip. This allows for easy passing of additional parameters without creating of proxy shell scripts. 2014-12-12 18:22:20 +00:00
Andrey Prygunkov
185d52a9d4 added new option "ServerX.Retention" to define server retention time (days); files older than configured server retention time are not even tried on this server 2014-12-11 20:45:08 +00:00
Andrey Prygunkov
6b933f18dd options "ParIgnoreExt" and "ExtCleanupDisk" can now contain wildcard characters * and ? 2014-12-08 21:36:23 +00:00
Andrey Prygunkov
31dbbb546c created installer for windows; the program is installed into "program files" by default; the working directory with all subdirectories is now placed into "AppData" directory; the batch files nzbget-start.bat and nzbget-recovery-mode.bat are not needed and not installed anymore 2014-11-30 17:08:00 +00:00
Andrey Prygunkov
ac4f8a30e5 improved application for Windows: added tray icon (near clock); left click on icon pauses/resumes download; right lick opens menu with important functions; console window can be shown/hidden via preferences (is hidden by default); new preference to automatically start the program after login; new preference to show browser on start; new preference to hide tray icon; menu commands to show important folders in windows explorer (destination, etc.); on first start the config file is now placed into subdirectory "NZBGet" inside standard AppData-directory; default destination and other directories are now placed in the AppData\NZBGet-directory instead of programs directory; this allows to install the program into "program files"-directory since the program does not write into the programs directory anymore; the program exe has an icon now; if the exe is started from windows explorer the program starts in application mode; if the exe is called from command prompt the program works in console mode 2014-11-30 14:24:23 +00:00
Andrey Prygunkov
a060531ae3 actions for history items can now be performed for multiple (selected) records: post-process again, download again, mark as good, mark as bad; extended RPC-API method "editqueue": for history-records of type "URL" the action "HistoryRedownload" can now be used as synonym to "HistoryReturn" (makes it easier to redownload multiple items of different types (URL and NZB) with one API call) 2014-11-25 19:23:17 +00:00
Andrey Prygunkov
fb77937acd fixed: unrar may sometimes fail with message "no files to extract" 2014-11-25 19:18:07 +00:00
Andrey Prygunkov
9d9a81710f fixed false memory leak warning when compiled in debug mode (Windows only) 2014-11-24 22:31:57 +00:00
Andrey Prygunkov
c3b4438d1f fixed: program could crash during unpack (bug introduced in v14-r1130) 2014-11-22 18:03:08 +00:00
Andrey Prygunkov
eeb3679b82 addition to r1159: fixed: menubar icon was not visible on OSX in dark mode 2014-11-18 18:26:24 +00:00
Andrey Prygunkov
d2d9bfb4bd system sleep on idle state is now prevented during download and post-processing (Mac OSX only) 2014-11-16 16:24:06 +00:00
Andrey Prygunkov
2dcbe4628b fixed: menubar icon was not visible on OSX in dark mode 2014-11-15 19:05:45 +00:00
Andrey Prygunkov
634247676a fixed: quick par-check could hang on certain nzb-files containing multiple par-sets (occured only in 64 bit mode) 2014-11-14 19:38:41 +00:00
Andrey Prygunkov
1a01b323e5 updated version string to 15.0-testing 2014-11-14 19:29:27 +00:00
Andrey Prygunkov
c71a33eba0 updated version string (preparing to release 14.0) 2014-11-09 10:04:04 +00:00
Andrey Prygunkov
0387c7a8e1 updated ChangeLog 2014-11-09 09:50:41 +00:00
Andrey Prygunkov
1ae0404592 addition to r1152: fixed: the old directory was sometimes not removed when the download was renamed or assigned to another category (bug introduced in v14) 2014-11-03 19:55:25 +00:00
Andrey Prygunkov
6796bef261 fixed: the old directory was sometimes not removed when the download was renamed or assigned to another category (bug introduced in v14) 2014-11-01 13:05:30 +00:00
Andrey Prygunkov
a5bd6dc7c5 fixed: description was not shown correctly for queue scripts with defined events (bug introduced in r1148) 2014-11-01 11:00:40 +00:00
Andrey Prygunkov
4e7b9290ac fixed: program could crash during restart if an extension script was running; now all active scripts are terminated during restart 2014-10-21 20:21:31 +00:00
Andrey Prygunkov
9acbee976d fixed potential crash which could happen in debug mode during program restart 2014-10-21 19:32:07 +00:00
Andrey Prygunkov
e6f4f8c05e queue scripts can now define what events they are interested in; this avoids unnecessary calling of the scripts which do not process certain events 2014-10-20 21:17:54 +00:00
Andrey Prygunkov
c89cb3d287 addition to r1145: fixed a compiling error on OS/2 2014-10-19 21:03:11 +00:00
Andrey Prygunkov
c5cb95fd8c additional parameters (env. vars) are now passed to scan scripts: NZBNP_DUPEKEY, NZBNP_DUPESCORE, NZBNP_DUPEMODE; scan-scripts can now set dupekey, dupemode and dupescore by printing new special commands 2014-10-16 20:40:09 +00:00
Andrey Prygunkov
fa46714b19 debug builds for Windows now print call stack on crash to the log-file, which is very useful for debugging 2014-10-15 21:58:30 +00:00
Andrey Prygunkov
bfbcde3b47 fixed: RPC-method "editqueue" with action "HistoryReturn" caused a crash if the history item did not have any remaining (parked) files 2014-10-14 16:12:25 +00:00
Andrey Prygunkov
c6dc66cb45 addition to r1128: paths with drive letters are now considered absolute on all OSes not only on Windows because there are also other OSes using drive letters 2014-10-12 21:34:26 +00:00
Andrey Prygunkov
a9e6912a2f added column "age" to history tab in web-interface 2014-10-12 14:23:54 +00:00
Andrey Prygunkov
eb8885b915 fixed: a superfluous comma at the end of option "TaskX.Time" was interpreted as an error or may cause a crash 2014-10-11 22:10:51 +00:00
Andrey Prygunkov
029c808458 added news server name to message "Cancelling hanging download ..." to help identifying problematic servers 2014-10-10 21:13:02 +00:00
Andrey Prygunkov
9269f69a38 improvement in quick par-verification: if unpack fails (excluding invalid password errors) and quick par-check does not find any errors or quick par-check was already performed the full par-check is performed; this helps in rare situations when files were correctly downloaded (and therefore assumed correct by quick par-check) but incorrectly written into disk due to abnormal program termination (caused by bugs or hardware crashes) 2014-10-09 21:11:42 +00:00
Andrey Prygunkov
63d938ae04 fixed: RPC-method "saveconfig" did not work via XML-RPC (but worked via JSON-RPC) 2014-10-09 16:06:39 +00:00
Andrey Prygunkov
a8aa110f43 added missing new line character at the end of the help screen printed by "nzbget -h" 2014-10-05 15:13:30 +00:00
Andrey Prygunkov
6f7af5aef4 option "ParThreads" can now be set to "0" (which is a default setting now) to let the program automatically determine the number of CPU cores; this works on major modern platforms) 2014-10-04 19:34:03 +00:00
Andrey Prygunkov
6afbade8f7 improved scan-scripts: if the category of nzb-file is changed by the scan-script the assigned post-processing scripts are now automatically reset according to the new category 2014-10-03 20:58:11 +00:00
Andrey Prygunkov
5ec38498f1 quick par verification now works even if articles do not contain CRCs (although it is a rare case) 2014-09-27 22:18:49 +00:00
Andrey Prygunkov
e206d3a833 fixed several compiler warnings 2014-09-27 21:04:06 +00:00
Andrey Prygunkov
6529cf6498 addition to r1089: fixed: env. var "NZBPP_PARSTATUS" were not set to "FAILURE" for deleted/marked downloads 2014-09-27 21:03:23 +00:00
Andrey Prygunkov
21f5de8de8 improved cleanup: disk cleanup is now not performed if unrar failed even if par-check was successful; 2) queue cleanup (for remaining par2-files) is now made more smarter: the files are kept (parked) if they can be used by command "post-process again" and are removed otherwise 2014-09-25 22:08:57 +00:00
Andrey Prygunkov
837d5c7f68 unpack is now immediately aborted if unrar reports wrong password (works for rar5 as well as for older formats); the unpack error status "PASSWORD" is now set for older formats (not only rar5) 2014-09-24 20:52:28 +00:00
Andrey Prygunkov
f90a53c2b0 addition to r1127: better compatibility with unrar 5 2014-09-22 19:37:52 +00:00
Andrey Prygunkov
e184e5b7c5 fixed: relative destination paths (options "DestDir" and "CategoryX.DestDir") caused failures during unrar 2014-09-21 15:37:10 +00:00
Andrey Prygunkov
1ca1381e05 unpack is now automatically immediately aborted when unrar reports CRC errors 2014-09-18 16:48:00 +00:00
Andrey Prygunkov
811f807de6 fixed: splitted .cbr-files were not properly joined 2014-09-16 22:15:02 +00:00
Andrey Prygunkov
95b76bc586 when option "ContinuePartial" is active the current state is saved not more often than once per second instead of after every downloaded article; this significantly reduce the amount of disk writings on high download speeds 2014-09-16 20:54:50 +00:00
Andrey Prygunkov
90fac39a26 added commands "PausePostProcess" and "UnpausePostProcess" to scheduler 2014-09-15 16:28:55 +00:00
Andrey Prygunkov
44cf680f14 an improvement in duplicate check: if a new download with empty dupekey and empty dupescore is marked as "dupe" and the another download with the same name have non empty dupekey or dupescore these properties are copied from that download; this is useful because the new download is most likely another upload of the same file and it should have the same duplicate properties for best duplicate handling results 2014-09-13 21:30:42 +00:00
Andrey Prygunkov
d0754e022f addition to r1121: now fixed on windows too: inner files (files listed in nzb) bigger than 2GB could not be downloaded 2014-09-08 19:35:11 +00:00
Andrey Prygunkov
ed7245c852 fixed: inner files (files listed in nzb) bigger than 2GB could not be downloaded 2014-09-07 10:00:52 +00:00
Andrey Prygunkov
2b44618858 added validation check for option "ParBuffer" when compiled in 32-bit 2014-09-06 19:50:23 +00:00
Andrey Prygunkov
a3634d689e fixed: web interface showed an error box when trying to submit files with extensions other than .nzb, although these files could be processed by a scan-script; now the error is not shown if any scan-script is set in options 2014-09-05 20:22:49 +00:00
Andrey Prygunkov
96e8cbd3c1 small improvement in multithreading par-repair: the number of repair threads is now automatically reduced to the amount of bad blocks if there are too few of them; if there is only one bad block the multithreading par-repair is switched off to avoid overhead of thread synchronisation (which does not make sense for one working thread) 2014-09-03 17:34:36 +00:00
Andrey Prygunkov
658d41f0fd refactor: moved nzbget specific code from libpar2 into nzbget units in order to make updates of libpar2 easier in the future 2014-09-03 17:28:29 +00:00
Andrey Prygunkov
9dab8fd7dc added multithreading par-repair: does not depend on other libraries and works on all platforms and all CPUs (with multiple cores); new option "ParThreads" to set the number of threads for repairing; new option "ParBuffer" to define the memory limit to use during par-repair 2014-09-02 23:07:32 +00:00
Andrey Prygunkov
2cb9d81a3c fixed: the program could crash during deleting of active download (bug introduced in r1108) 2014-08-30 15:10:49 +00:00
Andrey Prygunkov
2b4662856e better error reporting if a temp file could not be found 2014-08-29 18:12:44 +00:00
Andrey Prygunkov
44e949eafe fixed: crash and possible queue corruption when option "ParCleanuQueue" was active (which is a default setting) (bug introduced in r1108) 2014-08-29 15:05:27 +00:00
Andrey Prygunkov
aa3acd12a6 for downloads delayed due to propagation delay (option "PropagationDelay") a new badge "propagation" is now shown near download name 2014-08-28 20:51:29 +00:00
Andrey Prygunkov
1c00e62d3e fixed: the "pause extra pars"-state was missing in the pause/resume-loop of curses interface, key "P" 2014-08-28 20:29:51 +00:00
Andrey Prygunkov
0d630d9ea3 when connecting in remote mode using command line parameter "--connect/-C" the option "ControlIP" is now interpreted as "127.0.0.1" if it is set to "0.0.0.0" (instead of failing with an error message) 2014-08-28 20:22:20 +00:00
Andrey Prygunkov
7de78cd088 added new option "UrlTimeout" to set timeout for URL fetching and RSS feed fetching; renamed option "ConnectionTimeout" to "ArticleTimeout" 2014-08-28 19:31:31 +00:00
Andrey Prygunkov
0f98c72f1e fixed: cancelling of post-processing could delete the nzb-item completely (bug introduced in v14) 2014-08-28 19:15:42 +00:00
Andrey Prygunkov
459a79a1f1 improved pp-script EMail.py: now it can send time statistics (thanks to JVM for the patch) 2014-08-27 16:27:40 +00:00
Andrey Prygunkov
aaea8d9717 fixed: scheduler tasks were not checked after wake up if the sleep time was longer than 90 minutes 2014-08-25 20:12:38 +00:00
Andrey Prygunkov
d5b99732d1 fixed: no warning were printed for invalid values of option "ArticleCache" (max value 1900 when compiled in 32 bit mode) 2014-08-25 20:04:09 +00:00
Andrey Prygunkov
f5cef8a997 fixed: par-check could fail on valid files (bug introduced in libpar2 0.3) 2014-08-24 12:51:42 +00:00
Andrey Prygunkov
44907aa700 when quick par verification is active the repaired files are not verified to save time; the only reason for incorrect files after repair can be hardware errors (memory, disk) but this is not something NZBGet should care about 2014-08-22 17:24:34 +00:00
Andrey Prygunkov
54303d464b fixed: one log-message was printed only to global log but not to nzb-item pp-log 2014-08-22 17:05:30 +00:00
Andrey Prygunkov
4e83a68bf1 when a download is downloaded again (from history) the queue-scripts are now called with event "NZB_ADDED" 2014-08-22 16:57:54 +00:00
Andrey Prygunkov
00893a6cca updated configure-script to not require gcrypt for newer GnuTLS versions (when gcrypt is not needed) 2014-08-20 20:57:40 +00:00
Andrey Prygunkov
008768cea1 better error reporting during par-check 2014-08-20 18:51:13 +00:00
Andrey Prygunkov
43e096c6dc refactor: eliminated two compiler warnings 2014-08-19 20:53:00 +00:00
Andrey Prygunkov
b10b48f5e9 the list of scripts (pp-scripts, queue-scripts, etc.) is now read once on program start instead of reading everytime a script is executed; that eliminates the unnecessary disk access; the list of post-processing parameters shown on page "Postprocess" of download details dialog is now built using the preloaded list of scripts instead of reading the script config sections on every load of web-interface; the settings page of web-interface loads available scripts every time the page is shown; this allows to configure newly added scripts without restarting the program first (just like it was before); a restart is still required to apply the settings (just like it was before); RPC-method "configtemplates" has new parameter "loadFromDisk" 2014-08-19 19:56:09 +00:00
Andrey Prygunkov
1a76c72bf3 fixed: the program could crash during executing of queue-scripts (bug introduced in r1094); the list of queue-scripts is now read only once, at program start; queue-scripts added to scripts-directory after the program was started can be selected in download details dialog on page "Postprocess" but will not be executed until the program is restarted 2014-08-19 19:47:49 +00:00
Andrey Prygunkov
74a1f6301a added option "EventInterval" allowing to reduce the number of calls of queue-scripts, which can be useful on slow systems 2014-08-19 19:45:30 +00:00
Andrey Prygunkov
dd22ec68fc improvement in support for detection of bad downloads (fakes, etc.): scripts supporting two modes (post-processing-mode and queue-mode) are now executed if selected in post-processing parameters: either in options "PostScript" and "CategoryX.PostScript" or manually on page "Postprocess" of download details dialog in web-interface; it is not necessary to select dual-mode scripts in option "QueueScript"; that provides more flexibility: the scripts can be selected per-category or activated/deactivated for each nzb individually 2014-08-17 23:07:48 +00:00
Andrey Prygunkov
6ecdfc25fd updated description in config file template 2014-08-15 22:28:09 +00:00
Andrey Prygunkov
f439f09c2e improvement in support for detection of bad downloads (fakes, etc.): queue-scripts are now called after every downloaded file included in nzb; new event "FILE_DOWNLOADED" of parameter "NZBNA_EVENT"; event "UNPACK" removed; instead added event "NZB_DOWNLOADED" which is similar to "UNPACK" but is called for every download even not having archive files and even if unpack is disabled; the execution of queue-scripts is serialized - only one script is executed at a time and other scripts wait in script-queue; the script-queue is compressed so that the same script for the same event is not queued more than once; this reduces the number of calls of scripts if files are downloaded faster than queue-scripts can work up them; a call for event "NZB_DOWNLOADED" is always performed even if the previous calls for events "FILE_DOWNLOADED" were skipped; when a script marks nzb as bad the nzb is deleted from queue, no further internal post-processing (par, unrar, etc.) is made for the nzb but all post-processing scripts are executed; if option "DeleteCleanupDisk" is active the already downloaded files are deleted; new status "BAD" for field "DeleteStatus" of nzb-item in RPC-method "history"; queue-scripts can set post-processing parameters by printing special command, just like post-processing-scripts can do that; this simplifies transferring (of small amount) of information between queue-scripts and post-processing-scripts 2014-08-15 22:24:53 +00:00
Andrey Prygunkov
ebe955020c addition to r1072: fixed: renaming of active downloads was broken (bug introduced in r1070) 2014-08-15 17:17:05 +00:00
Andrey Prygunkov
60119a89c0 fixed: compiler error if configured using parameter "--disable-gzip" 2014-08-13 21:14:11 +00:00
Andrey Prygunkov
6a14353391 added support for detection of bad downloads (fakes, etc.): extended queue-scripts with new event "UNPACK", scripts are called before unpack and have a chance to detect bad downloads before unpacking; queue-scripts and post-processing scripts can mark downloads as bad by printing special command; marked downloads become status "FAILURE/BAD" and are processed by the program as failures (triggering duplicate handling); scripts executed thereafter see the new status and can react accordingly (inform an indexer or a third-party automation tool); new env. var "NZBNA_DIRECTORY" passed to queue scripts 2014-08-11 23:15:58 +00:00
Andrey Prygunkov
9090fe5fc9 fixed: not all statistic fields were reset when using command "Download again" (bug introduced in v14) 2014-08-11 18:10:47 +00:00
Andrey Prygunkov
93bc9a4293 fixed: malformed articles could crash the program (bug introduced in v14) 2014-08-11 18:02:15 +00:00
Andrey Prygunkov
80b2e22d9d added new search field "dupestatus" for use in rss filters: the search is performed through download queue and history testing items with the same dupekey or title as current rss item; the field contains comma-separated list of following possible statuses (if duplicates were found): QUEUED, DOWNLOADING, SUCCESS, WARNING, FAILURE or an empty string if there were no matching items found 2014-08-10 22:14:03 +00:00
Andrey Prygunkov
5a6a098990 suppressed certain warning types in VC++ project file (Windows) 2014-08-10 21:59:06 +00:00
Andrey Prygunkov
c64ef201ff addition to r1079: fixed: par-check could not be cancelled. 2014-08-10 16:42:23 +00:00
Andrey Prygunkov
817ae02295 fixed: damaged files could be ignored during par-check and not repair was performed (bug introduced in r1071) 2014-08-09 22:39:39 +00:00
Andrey Prygunkov
910dab98f1 fixed memory error which could lead to segfault (bug introduced in r1074) 2014-08-09 21:50:50 +00:00
Andrey Prygunkov
b9c59ffad4 fixed few compiler warnings 2014-08-09 15:50:09 +00:00
Andrey Prygunkov
79426ec959 fixed: when rotating log-files option TimeCorrection were not respected when bulding new file name - the filename could have wrong date stamp in the name (bug introduced in r1059) 2014-08-09 10:42:13 +00:00
Andrey Prygunkov
2e0ba0e3d1 integrated par2-module (libpar2) into NZBGet’s source code tree; the par2-module is now built automatically during building of NZBGet; this eliminates dependency from external libpar2 and libsigc++ making it much easier for users to compile NZBGet with newest recommended patches for libpar2 2014-08-08 22:37:30 +00:00
Andrey Prygunkov
0c3ce58ffa fixed: cleanup may leave some files undeleted (Mac OSX only) 2014-08-06 19:56:12 +00:00
Andrey Prygunkov
c482820746 addition to r1074: changed few info messages to debug as they supposed to be 2014-08-06 19:43:39 +00:00
Andrey Prygunkov
195bc1f290 addition to r1075: added missing changed file 2014-08-06 18:29:43 +00:00
Andrey Prygunkov
d8108f998b disabled block-by-block scan during par verification because: 1) it could cause incorrect verification results for certain kinds of damaged files; 1) after implementing of quick scan for damaged files the block-by-block scan was not necessary anymore; block-by-block scan was also removed from the libpar2-patch 2014-08-06 15:24:25 +00:00
Andrey Prygunkov
40de60dd8b added quick par verification for damaged (partially downloaded) files 2014-08-06 00:11:07 +00:00
Andrey Prygunkov
c9981472a8 refactor: disk state now holds info about failed files: their IDs, CRCs of download articles and full intitial article information; these data can be used later to retry download of failed articles and for quick par-verification of damaged files 2014-08-05 23:45:28 +00:00
Andrey Prygunkov
83b3789282 fixed: renaming of active downloads was broken (bug introduced in r1070) 2014-08-02 16:41:27 +00:00
Andrey Prygunkov
0078e9e225 options "ParIgnoreExt" and "ExtCleanupDisk" are now respected by par-check (in addition to being respected by par-rename): if all damaged or missing files are covered by these options then no par-repair is performed and the download assumed successful 2014-07-30 22:10:50 +00:00
Andrey Prygunkov
a62966227a added quick file verification during par-check/repair; if par-repair is required for download the files downloaded without errors are verified quickly by comparing their checksums against the checksums stored in the par2-file; this makes the verification of undamaged files almost instant; damaged files are verified as usual; new option "ParQuick" (active by default); added support for block-by-block scan of files during verification, which improves scan speed of damaged files; the quick par-verification requires a patch for libpar2 (see http://nzbget.net/libpar2 for details) 2014-07-27 21:59:00 +00:00
Andrey Prygunkov
5f0ccf3257 fixed: certain nzb-files failed to download (with decoding errors) if article cache was active 2014-07-25 22:16:33 +00:00
Andrey Prygunkov
61d0a1d498 fixed: program could crash during download if there were missing articles, DirectWrite was disabled and ArticleCache was enabled 2014-07-25 21:57:14 +00:00
Andrey Prygunkov
c626528a83 fixed: post-process time (statistic) was not correctly reset when post-processing again 2014-07-25 21:53:40 +00:00
Andrey Prygunkov
2e0e8e18ef removed accidentally committed debug logging 2014-07-25 21:51:36 +00:00
Andrey Prygunkov
54d98a6cad if an nzb has only few failed articles it may have completion shown as 100%; now it is shown as 99.9% to indicate that not everything was successfully downloaded 2014-07-21 19:44:35 +00:00
Andrey Prygunkov
0fe503658b pp-script "EMail.py" now supports mail server relays (thanks l2g for the patch) 2014-07-20 16:20:24 +00:00
Andrey Prygunkov
5941464402 addition to r1057 (added article cache): fixed a segfault which could happen if none of articles could be downloaded for a file 2014-07-19 00:17:39 +00:00
Andrey Prygunkov
3074ea62dc added per-nzb time and size statistics: total time, download, verify, repair and unpack times, downloaded size and average speed, shown in history details dialog via click on the row with total size in statistics block; RPC-methods "listgroups" and "history" return new fields: "DownloadedSizeLo", "DownloadedSizeHi", "DownloadedSizeMB", "DownloadTimeSec", "PostTotalTimeSec", "ParTimeSec", "RepairTimeSec", "UnpackTimeSec" 2014-07-19 00:06:28 +00:00
Andrey Prygunkov
312bf91003 improved joining of splitted files: instead of performing par-repair the files are now joined by unpacker, which is much faster; the files splitted before creating of par-sets are now joined as well (they were not joined in v13 because par-repair has nothing to repair in this case); the unpacker can detect missing fragments and requests par-check if necessary 2014-07-18 23:27:41 +00:00
Andrey Prygunkov
a42c323343 refactor: removed an old commented code 2014-07-18 23:19:46 +00:00
Andrey Prygunkov
39d9fe2794 added log file rotation; options "CreateLog" and "ResetLog" replaced with new option "WriteLog (none, append, reset, rotate)"; new option "RotateLog" defines rotation period; when compiled in debug mode new field "process id" is printed to the file log for each row (it is easier to identify processes than threads) 2014-07-18 23:17:16 +00:00
Andrey Prygunkov
7993e2971c renamed option "WriteBufferSize" into "WriteBuffer"; changed the dimension - now option is set in kilobytes instead of bytes; old name and value are automatically converted; if the size of article is below the value defined by the option, the buffer is allocated with the articles size (to not waste memory); therefore the special value "-1" is not required anymore; during conversion "-1" is replaced with "1024" (1 megabyte) but it can be of course manually changed to any other value later 2014-07-18 23:06:45 +00:00
Andrey Prygunkov
ba9efe43be added article cache: new option "ArticleCache" defines memory limit to use for cache; when cache is active the articles are written into cache first and then all flushed to disk into the destination file; article cache reduces disk IO and may reduce file fragmentation improving post-processing speed (unpack); it works with both writing modes (direct write on and off); when option "DirectWrite" is disabled the cache should be big enough (for best performance) to accommodate all articles of one file (sometimes up to 500 MB) in order to avoid writing articles into temporary files, otherwise temporary files are used for articles which do not fill into cache; when used in combination with DirectWrite there is no such limitation and even a small cache (100 MB or even less) can be used effectively; when the cache becomes full it is flushed automatically (directly into destination file) providing room for new articles; new row in the "statistics and status dialog" in web-interface indicates the amount of memory used for cache; new fields "ArticleCacheLo", "ArticleCacheHi" and "ArticleCacheMB" returned by RPC-method "status"; refactor: parts of unit "ArticleDownloader" responsible for writing into disk were moved into new unit "ArticleWriter" 2014-07-18 22:48:35 +00:00
Andrey Prygunkov
cfa5e7d19c updated version string to 14.0-testing 2014-07-18 15:51:38 +00:00
Andrey Prygunkov
7acd2ad884 updated version string (preparing to release 13.0) 2014-07-14 20:22:36 +00:00
Andrey Prygunkov
1f474c3097 updated ChangeLog 2014-07-05 21:56:20 +00:00
Andrey Prygunkov
c8b4f6e985 removed libpar2-patches from NZBGet source tree; the documentation now suggests to use the libpar2 version maintained by Debian/Ubuntu team, which already includes all necessary patches; also removed patches to create libpar2 and libsigc++ project files for Visual Studio on Windows, no one needed them anyway 2014-07-04 21:01:13 +00:00
Andrey Prygunkov
fc20bcca91 pp-script "EMail.py" now takes the status of previous pp-scripts into account and report a failure if any of the scripts has failed 2014-07-04 19:50:11 +00:00
Andrey Prygunkov
702b635826 improved RPC-API: history items now preserve "NZBID" from queue items; that makes the tracking of items across queue and history easier for third-party apps; field "NZBID" returned by RPC-method "history" is now available for history items of all kinds (NZB, URL, DUP); field "ID" is deprecated and should not be used 2014-07-04 19:07:51 +00:00
Andrey Prygunkov
990c5f67e4 fixed: current download could be damaged if the program was restarted during download and the option "ContinuePartial" was active (bug introduced in v13) 2014-07-03 20:45:53 +00:00
Andrey Prygunkov
8ef4ca2ce8 fixed: port number was not sent in headers when downloading from URLs which could cause issues with RSS for web-sites using non-standard http ports 2014-06-30 20:42:50 +00:00
Andrey Prygunkov
b105ce6698 fixed: queued nzb-files was not deleted from disk when deleting download without history tracking 2014-06-29 21:24:58 +00:00
Andrey Prygunkov
6c93b836f5 fixed: check for file or directory existense could fail sometimes (Windows only, bug introduced in v13) 2014-06-26 18:29:55 +00:00
Andrey Prygunkov
f0e60ee577 improvement in RPC-API: method "append" now returns id of added nzb-file or "0" on an error; this makes it easier for third-party apps to track added nzb-files; for backward compatibility with older software expecting a boolean result the old version of method "append" is still supported; the new version of method "append" has a different signature (order of parameters); parameter "content" can now be either nzb-file content (encoded in base 64) or an URL; this makes the method "appendurl" obsolete (still supported for compatibility); if an URL was added to queue the queue entry created for fetched nzb-file has the same "NZBID" for easier tracking 2014-06-19 15:00:46 +00:00
Andrey Prygunkov
2cfbb2373a fixed: scheduler command "FetchFeed" did not work properly with parameter "0" (fetch all feeds) 2014-06-17 21:31:09 +00:00
Andrey Prygunkov
d26d04d92b when changing category in web-interface the post-processing parameters are now automatically updated according to new category settings; only parameters which are different in old and new category are changed; parameters which present in both or in neither categories are not changed; that ensures that only the relevant parameters are updated and parameters which were manually changed by user remain they settings when it make sense; in the "download details dialog" the new parameters are updated on the postprocess-tab directly after changing of category and can be controlled before saving; in the "edit multiple downloads dialog" the parameters are updated individually for each download on saving; new action "CP" of remote command "--edit/-E" for groups to set category and apply parameters; new action "GroupApplyCategory of RPC-method "editqueue" for the same purpose 2014-06-13 21:53:27 +00:00
Andrey Prygunkov
0d6fe32246 to detect daylight saving activation/deactivation the time zone information is now checked every minute if a download is active or once in 3 hours if the program is in stand-by; these delays should work well with hibernation mode on synology) 2014-06-12 20:57:00 +00:00
Andrey Prygunkov
36de8073f2 apostrophe is not considered an invalid file name character anymore 2014-06-11 21:15:36 +00:00
Andrey Prygunkov
5aaaa1e6a7 fixed: the program could crash during cleanup if files with invalid timestamps were found in the directory (windows only) 2014-06-09 20:52:23 +00:00
Andrey Prygunkov
a4126a52ce fixed: par-rename initiated unnecessary par-check if option "InterDir" were not active (bug introduced in r1030) 2014-06-06 21:49:11 +00:00
Andrey Prygunkov
076017128e added support for power management on windows to avoid pc going into sleep mode during download or post-processing 2014-06-06 19:25:02 +00:00
Andrey Prygunkov
7240147418 added option "ParIgnoreExt" which lists files which do not trigger par-repair if they are missing 2014-06-03 20:47:28 +00:00
Andrey Prygunkov
0923f2bb5c added new choice "Always" for option "ParCheck"; it forces the par-check for every (even undamaged) download but in contrast to choice "Force" only one par2-file is downloaded first; additional files are downloaded if needed 2014-06-02 20:43:37 +00:00
Andrey Prygunkov
5ce0b9985a corrected a typing error in a month name 2014-06-02 20:28:36 +00:00
Andrey Prygunkov
cd76375d8e post-processing scripts which move the whole download into a new location can inform the program about new location using command "[NZB] DIRECTORY=/new/path", allowing other scripts to process files further 2014-05-30 22:09:50 +00:00
Andrey Prygunkov
df2ef01494 when checking for missing files the files whose extensions match with option "ExtCleanupDisk" are ignored now (to avoid time consuming restoring of files which will be deleted later anyway) 2014-05-30 21:35:30 +00:00
Andrey Prygunkov
1d3d875f3d refactor: created new class "Tokenizer" and replaced all usages of function "strtok_r" with new class; also created new function "MatchFileExt" for the similar code used in two places 2014-05-29 21:38:27 +00:00
Andrey Prygunkov
48446367f4 post-processing scripts now have two new parameters: env. var "NZBPP_STATUS" indicates the status of download including the total status (SUCCESS, FAILURE, etc.) and the detail field (for example in case of failures: PAR, UNPACK, etc.); env. var "NZBPP_TOTALSTATUS" is equal to the total status of parameter "NZBPP_STATUS" and is provided for convenience (to avoid parsing of "NZBPP_STATUS"); the new parameters provide a simple way for pp-scripts to determine download status without a guess work needed in previous versions; parameters "NZBPP_PARSTATUS" and "NZBPP_UNPACKSTATUS" are now considered deprecated (still passed for compatibility); updated script "EMail.py" to use new parameters "NZBPP_TOTALSTATUS" and "NZBPP_STATUS" instead of "NZBPP_PARSTATUS" and "NZBPP_UNPACKSTATUS" 2014-05-28 22:19:39 +00:00
Andrey Prygunkov
fb1f293a17 improved fast par-renamer: it can now detect missing files (files listed in par2-files but not present on disk) 2014-05-28 21:50:15 +00:00
Andrey Prygunkov
f85533d608 fixed: some nzb-file data were not calculated for history items loaded from disk state; this may cause problems for commands "Post-process again" and "Download remaining files" (bug introduced in v13) 2014-05-28 21:37:44 +00:00
Andrey Prygunkov
e32faf6053 better error reporting if deleting of directories fails 2014-05-25 20:42:50 +00:00
Andrey Prygunkov
a429ea4679 windows version is now configured to use OpenSSL instead of GnuTLS 2014-05-24 17:37:42 +00:00
Andrey Prygunkov
9112d2277e fixed: incorrect number of paused files were shown in curses output mode 2014-05-24 12:25:24 +00:00
Andrey Prygunkov
a9050045f3 renamed section "SCRIPTS" to "EXTENSION SCRIPTS" in the settings 2014-05-24 12:25:08 +00:00
Andrey Prygunkov
ed3cad6e9c when building nzbget if both OpenSSL and GnuTLS are available now using OpenSSL by default (the preferred library can still be selected with configure-parameter --with-tlslib=OpenSSL/GnuTLS) 2014-05-23 18:12:57 +00:00
Andrey Prygunkov
deee5aff00 rolled back changes made in r1019 (not necessary anymore) 2014-05-22 17:09:34 +00:00
Andrey Prygunkov
8c36a4d4c6 fixed: renaming or deleting of temporary files could fail, especially when options "UnpackPauseQueue" and "ScriptPauseQueue" were not active (windows only) 2014-05-22 16:58:16 +00:00
Andrey Prygunkov
157074db29 fixed small memory leak (bug introduced in r1012) 2014-05-22 15:50:12 +00:00
Andrey Prygunkov
14ff04d2e3 improved error reporting: added error check when closing article file for writing 2014-05-21 21:27:51 +00:00
Andrey Prygunkov
d0e2d439aa if renaming of files fails, few more attempts are made; this should improve compatibility with virus scanner or sync software; better error reporting if renaming still fails 2014-05-20 21:20:49 +00:00
Andrey Prygunkov
159340a396 fixed: remaining size and time were not printed in remote console mode (bug introduced somewhere in v13) 2014-05-19 21:28:35 +00:00
Andrey Prygunkov
de6625bcaf updated links to doc-article "Extensions scripts" 2014-05-18 21:24:34 +00:00
Andrey Prygunkov
0d7ed691e6 fixed: program could hang when adding nzb-files from fetched RSS feed (bug introduced in r966) 2014-05-17 21:39:49 +00:00
Andrey Prygunkov
0721f723be fixed: nzb-files were sometimes not deleted from NzbDir (option "NzbCleanupDisk") 2014-05-15 17:02:10 +00:00
Andrey Prygunkov
2da7239ac6 fixed: if post-processing step "move" failed, the command "post-process again" did not try to move again 2014-05-08 19:54:25 +00:00
Andrey Prygunkov
7b4c07c837 refactor: better handling of completed URL downloads 2014-05-07 19:58:47 +00:00
Andrey Prygunkov
169c56f105 implemented general scripts concept which is an extension of the post-processing scripts concept initially introduced in v11; the general scripts concept applies to all scripts used in the program: scan-script, queue-script and scheduler-script (in addition to post-processing scripts); option "NzbProcess" renamed to "ScanScript"; option "NzbAddedProcess" renamed to "QueueScript"; option "DefScript" and "CategoryX.DefScript" renamed to "PostScript" and "CategoryX.PostScript" (options with old names are recognized and automatically converted on first settings saving); new option "TaskX.Script"; old option "TaskX.Process" kept for scheduling of external programs not related to nzbget (to avoid writing of intermediate proxy scripts); scan-script, queue-script and scheduler-script now work similar to post-processing scripts: -scripts must be put into scripts-directory; -scripts can be configured via web-interface and can have options; -multiple scripts can be chosen for each scripts-option, all chosen scripts are executed; -program and script options are passed to the script as env. variables;; renamed default directory with scripts from "ppscripts" to "scripts"; script signature indicates the type of script (post-processing, scan, queue or scheduler); one script can have mixed signature allowing it to be used for multiple purposes (for example a notification script can send a notification on both events: after adding to queue and after post-processing); result of RPC-method "configtemplates" has new fields "PostScript", "ScanScript", "QueueScript", "SchedulerScript" to indicate the purpose of the script; queue-script (formerly NzbAddedProcess) has new parameter "NZBNA_EVENT" indicating the reason of calling the script; currently the script is called only after adding of files to download queue and therefore the parameter is always set to "NZB_ADDED" but the queue-script can be called on other events in the future too 2014-05-06 15:36:15 +00:00
Andrey Prygunkov
d51cdfd7c4 icreased few wait intervals which were unnecessary too small 2014-05-04 13:18:28 +00:00
Andrey Prygunkov
3c02b139e8 eliminated loop waiting time in queue coordinator on certain conditions - may improve performance on very high speed connections 2014-05-03 11:28:39 +00:00
Andrey Prygunkov
9d660b9d4e extended info printed by remote command "nzbget -B dump" 2014-05-02 19:36:02 +00:00
Andrey Prygunkov
eaf7c61f01 fixed: for downloads with force priority the status was shown orange (instead of green) and the progress info was not shown during post-processing if the program was paused 2014-04-27 12:05:56 +00:00
Andrey Prygunkov
1234c05690 small adjustment in speed formatting 2014-04-26 21:40:18 +00:00
Andrey Prygunkov
fd5b6769fa small fix: data sizes exactly equal to 10, 100, 1000 MB or GB were formatted using 4 digits instead of 3 (one digit after decimal point too much) 2014-04-26 21:29:49 +00:00
Andrey Prygunkov
63db34070e data sizes above 1000 GB are now shown as TB in web-interface (instead of GB) 2014-04-26 21:21:29 +00:00
Andrey Prygunkov
b41cd3ff97 additon to r945: adjusted modules initialization to avoid possible bugs due to delayed thread starts 2014-04-25 23:02:51 +00:00
Andrey Prygunkov
f2406ee0e4 fixed: queue was not locked during loading on program start and that could cause problems 2014-04-25 22:56:33 +00:00
Andrey Prygunkov
4712c6a372 fixed: errors during loading of queue from disk state may render the already loaded parts useless too; now at least these parts of queue are used 2014-04-25 22:51:22 +00:00
Andrey Prygunkov
56dc1b2b6c fixed: the program could crash during parsing of malformed nzb-files 2014-04-25 21:37:47 +00:00
Andrey Prygunkov
cb13d00844 added force-priorities; downloads with priorities equal to or greater than 900 are downloaded and post-processed even if the program is in paused state (force mode); in web-interface the combo for choosing priority has new entry "force" (priority value 900); new fields "ForcedSizeLo", "ForcedSizeHi" and "ForcedSizeMB" returned by RPC-method "status"; 2014-04-22 20:26:29 +00:00
Andrey Prygunkov
7a11e8eb19 splitted files are now joined automatically (again) 2014-04-17 16:33:20 +00:00
Andrey Prygunkov
482af25c90 fixed: field "STATUS" was not set correctly for par-checked downloads without unpack (bug introduced in r992) 2014-04-16 17:51:28 +00:00
Andrey Prygunkov
0c17e21b85 fixed: par-check could hang on renamed and splitted files 2014-04-16 17:49:41 +00:00
Andrey Prygunkov
0acb6ac548 fixed: cancelling of active par-job sometimes didn't work 2014-04-16 17:48:44 +00:00
Andrey Prygunkov
7f339860ad fixed: command "Pause" was not shown in actions menu in download details dialog (bug introduced in r987) 2014-04-15 19:11:56 +00:00
Andrey Prygunkov
67cf38a291 experimental: download speeds above 1024 KB/s are now indicated in MB/s 2014-04-15 16:09:35 +00:00
Andrey Prygunkov
cad28b9fd5 addition to r990: fixed: download speeds above approx. 70 MB/s were not indicated correctly in web-interface and by RPC-method "status" 2014-04-15 16:04:18 +00:00
Andrey Prygunkov
80ceca6e28 new field "STATUS" in RPC-method "history" to allow third-party apps easier determine the status of an item without inspecting status-fields of every processing step; web-interface uses new field "STATUS" 2014-04-14 22:06:23 +00:00
Andrey Prygunkov
bdcb8864fb fixed: history status "SKIPPED" and "SCAN" for URL-items were not properly read from disk state 2014-04-14 20:55:04 +00:00
Andrey Prygunkov
ced444282f fixed: download speeds above approx. 70 MB/s were not indicated correctly in web-interface and by RPC-method "status" 2014-04-14 19:47:18 +00:00
Andrey Prygunkov
5c7c11e3f4 fixed: status "PP-QUEUED" was shown in green/orange instead of gray (bug introduced in r987) 2014-04-13 08:22:27 +00:00
Andrey Prygunkov
2b4628fb43 fixed: estimated time was not shown during download (bug introduced in r987) 2014-04-13 08:13:11 +00:00
Andrey Prygunkov
a0dbd75f35 RPC-method "listgroups" now returns new field "Status" making it easier for third-party apps to determine the status of download entry; added prefix "Post" to new post-processing fields added in r984; changed web-interface to use new field "Status"; fixed: progress-label during post-processing did not show output of the pp-scripts (bug introduced in r984); fixed: button "Log" were not shown in the download details dialog for active post-processing download (bug introduced in r984) 2014-04-12 21:30:19 +00:00
Andrey Prygunkov
a20877ea80 corrected html formatting for statistics data in details dialog 2014-04-12 21:21:05 +00:00
Andrey Prygunkov
e151691711 fixed: files state were not fully reloaded when option "ContinuePartial" was used (bug introduced in r982) 2014-04-12 08:14:01 +00:00
Andrey Prygunkov
f42db27eaa RPC-method "listgroups" now returns info about post-processing similar to info returned by method "postqueue"; RPC-method "postqueue" is obsolete now; web-interface requires less requests to NZBGet on each page update and it is now easier for third-party developers to obtain the info about download and post-processing status (no need to merge download queue and post queue) 2014-04-10 20:45:46 +00:00
Andrey Prygunkov
724eab69d8 per-server/per-nzb article completion statistics are now available for active downloads in details dialog (not only for history); the info on that page is constantly updated as long as the page is active (unless refresh is disabled); download age info removed from details dialog to save place (it is shown in the download list anyway) ;if backup news-servers start to be used for nzb-file a badge appears in the download list showing the percentage of articles downloaded from backup servers; click on the badge opens download details dialog directly on the completion page 2014-04-10 20:24:28 +00:00
Andrey Prygunkov
a83dbccc6c changed the way option "ContinuePartial" works: now the information about completed articles is stored in a special file in QueueDir; when option "DirectWrite" is active no separate flag-files per article are created in TempDir; the file contains additional information, which were not stored/available before; fixed: per-server/per-nzb article completion statistics could be inaccurate for nzb-files whose download were interrupted by reload/restart; per-server/per-nzb article completion statistics are now available via RPC-method "listgroups" for active downloads (not only for "history") 2014-04-10 20:06:55 +00:00
Andrey Prygunkov
178e987650 fixed: seconds/minutes/hours slots of volume statistics could be incorrectly cleared on program start due to time zone offset not yet initialized at the time the volume data was loaded 2014-04-09 21:09:45 +00:00
Andrey Prygunkov
3cd126f08d fixed: after deleting servers from config file the program could crash on start when loading server volume statistics data from disk 2014-04-09 20:09:08 +00:00
Andrey Prygunkov
b109123a43 fixed: data volume dialoge may show wrong current date due to incorrect time zone calculation 2014-04-05 23:29:52 +00:00
Andrey Prygunkov
c97e97d2cc updated all links to go to new domain (nzbget.net) 2014-04-04 21:45:48 +00:00
Andrey Prygunkov
160d098510 extended data volume statistics dialog with numbers for current day, month, all-time total and custom counter; the custom counter can be manually reset; new fields in the result of RPC-method "servervolumes"; new RPC-method "resetservervolume" 2014-04-04 20:44:46 +00:00
Andrey Prygunkov
a72e1924ca updated options descriptions in template config file 2014-04-03 20:34:27 +00:00
Andrey Prygunkov
fd7508f152 better handling of backwards system clock changes in data volume meter 2014-04-03 20:21:59 +00:00
Andrey Prygunkov
1de995f9d5 better handling of incorrect system clock date (such as 01-01-2000) in data volume meter 2014-04-02 20:56:11 +00:00
Andrey Prygunkov
47fbe6423e added collecting of download volume statistics data per news server; in web-interface the data is shown as chart in "Statistics and Status" dialog; new RPC-method "servervolumes" returns the collected data 2014-04-01 21:06:31 +00:00
Andrey Prygunkov
461c2a38a5 fixed: sometimes URLs were removed too early from the feed history causing them to be detected as "new" and fetched again; if duplicate check was not active the same nzb-files could be downloaded again 2014-03-28 21:32:55 +00:00
Andrey Prygunkov
d89036f9f3 1) the current time zone is now determined once on program start and if a clock adjustment is detected using system function "localtime"; the function "localtime" is no longer constantly used by the scheduler; this should solve the hibernation problem on synology NAS, even when task scheduler is used; 2) fixed: RSS feed preview dialog displayed slightly incorrect post ages because of the wrong time zone conversion 2014-03-21 21:35:32 +00:00
Andrey Prygunkov
998cb16bfa added new files 2014-03-20 21:44:00 +00:00
Andrey Prygunkov
4c2a8c2892 refactor: moved speed meter code from "QueueCoordinator" into new module "StatMeter" 2014-03-20 21:37:32 +00:00
Andrey Prygunkov
8d3afa0bb6 remote command "-B dump" now can be used also in release (non-debug) versions and prints useful debug data as "INFO" instead of "DEBUG" 2014-03-20 21:18:27 +00:00
Andrey Prygunkov
3fd7bbc0a3 refactor: reducing dependencies between modules 2014-03-20 21:14:39 +00:00
Andrey Prygunkov
bf66500aac reworking queue (continued): merged url queue into main download queue: urls added to queue are now immediately shown in web-interface; urls can be reordered and deleted; when urls are fetched the downloaded nzb-files are put into queue at the positions of their urls; this solves the problem with fetched nzb-files ordered differently than the urls if the fetching of upper (position wise) urls were completed after of the lower urls; removed options "ReloadUrlQueue" and "ReloadPostQueue" since there are no separate url- and post-queues anymore; nzb-files added via urls have new field "URL" which can be accessed via RPC-methods "listgroups" and "history"; new env. var. "NZBNP_URL", "NZBNA_URL" and "NZBPP_URL" passed to NzbProcess, NzbAddedProcess and PostProcess-scripts; removed remote command "--list U", urls are now shown as groups by command "--list G"; RPC-method "urlqueue" is still supported for compatibility but should not be used since the urls are now returned by method "listgroups", the entries have new field "Kind" which can be "NZB" or "URL" 2014-03-18 22:35:58 +00:00
Andrey Prygunkov
e28da0d7fd added new option "PropagationDelay", which sets the minimum post age to download; newer posts are kept on hold in download queue until they get older than the defined delay, after that they are downloaded 2014-03-11 22:05:27 +00:00
Andrey Prygunkov
f10bc886c4 column "age" in web-interface now shows minutes for recent posts (instead of "0 h") 2014-03-11 21:42:54 +00:00
Andrey Prygunkov
df578ac78b updated MSVC project file 2014-03-09 21:51:07 +00:00
Andrey Prygunkov
18e1557cf3 fixed: if during par-repair the downloaded extra par-files were damaged and the repair was terminated with failure status the post-processing scripts were executed twice sometimes 2014-03-09 21:18:57 +00:00
Andrey Prygunkov
30e6131cd7 improved par-check for damaged collections with multiple par-sets and having missing files: only orphaned files (not belonging to any par-set) are scanned when looking for missing files; this greatly decrease the par-check time for big collections 2014-03-05 23:46:29 +00:00
Andrey Prygunkov
44310fda20 impoved error reporting if par-renamer fails to rename files 2014-03-04 21:29:27 +00:00
Andrey Prygunkov
fb7431abb5 impoved error reporting if unpacker fails to move files 2014-03-04 18:24:16 +00:00
Andrey Prygunkov
5b109ea3ce fixed missing includes (bug introduced in r957) 2014-02-26 21:34:55 +00:00
Andrey Prygunkov
a671e9f925 refactor: splitted unit ScriptController.cpp into three units: Script.cpp, QueueScript.cpp, PostScript.cpp 2014-02-26 21:28:15 +00:00
Andrey Prygunkov
8168804f05 reorganized source code directory structure: created directory 'daemon' with several subdirectories and put all source code files there 2014-02-24 22:11:14 +00:00
Andrey Prygunkov
ec576ad0a9 fixed: damaged nzb-files containing multiple par-sets and not having enough par-blocks could cause a crash during par-check 2014-02-22 23:23:45 +00:00
Andrey Prygunkov
fa3abcfdec reworking queue (continued): refactor: download queue can now be accessed without QueueCoordinator; edit and save functions can now be called directly on download queue without accessing global objects QueueEditor and DiskState (the calls are rerouted to these objects internally) 2014-02-22 23:21:20 +00:00
Andrey Prygunkov
33864614e7 eliminated the distinction between manual pause and soft-pause; there is only one pause register now; options "ParPauseQueue", "UnpackPauseQueue" and "ScriptPauseQueue" do not change the state of the pause but instead are respected directly; RPC-methods "pausedownload2" and "resumedownload2" are aliases to "pausedownload" and "resumedownload" (kept for compatibility); field "Download2Paused" of RPC-method "status" is an alias to "DownloadPaused" (kept for compatibility); action "D2" of remote commands "--pause/-P" and "--unpause/-U" is not supported anymore 2014-02-19 21:45:56 +00:00
Andrey Prygunkov
f4bf68ee59 refactor: moved parts from unit "PrePostProcessor.cpp" into new unit "HistoryCoordinator.cpp" 2014-02-19 21:17:24 +00:00
Andrey Prygunkov
2b3d6f976d refactor: removed unneded parameter in one function 2014-02-14 21:04:20 +00:00
Andrey Prygunkov
641a3313ea fixed: health check action (pause or delete) didn't work properly (bug introduced in r949) 2014-02-14 20:55:21 +00:00
Andrey Prygunkov
08e6665ffc reworking queue (continued): remote command "-E/--edit" and RPC-method "editqueue" now use NZBIDs of groups to edit groups (instead of using ID of any file in the group as in older versions); remote command "-L/--list" for groups (G) and group-view in curses-frontend now print NZBIDs instead of "FirstID-LastID"; RPC-method "listgroups" returns NZBIDs in fields "FirstID" and "LastID", which are usually used as arguments to "editqueue" (for compatibility with existing third-party software); items queued for post-processing and not having any remaining files now can be edited (to cancel post-processing), which was not possibly before due to lack of "LastID" in empty groups; edit commands for download queue and post-processing queue are now both use the same IDs (NZBIDs) 2014-02-12 21:24:46 +00:00
Andrey Prygunkov
e6a7af4ab3 fixed: crash during post-processing if history was disabled (bug introduced in r943) 2014-02-10 20:56:54 +00:00
Andrey Prygunkov
a0030a7909 fixed: strange (damaged?) par2-files could cause a crash during par-renaming 2014-02-10 20:54:12 +00:00
Andrey Prygunkov
13c7a7986e fixed: when splitting paused downloads the destination download has shown icorrect paused size (bug introduced in r934) 2014-02-09 21:01:52 +00:00
Andrey Prygunkov
bd1ea872be adjusted modules initialization to avoid possible bugs due to delayed thread starts 2014-02-08 22:09:44 +00:00
Andrey Prygunkov
dfcd595bc1 fixed a locking issue happen in non-daemon mode (bug introduced in r943) 2014-02-05 22:06:43 +00:00
Andrey Prygunkov
f77c97c66a reworking queue (continued): merged post-processing queue into main download queue; changing the order of (pp-queued) items in the download queue now also means changing the order of post-processing jobs; priorities of downloads are now respected when picking the next queued post-processing job; ; the moving of download items in web-interface is now allowed for downloads queued for post-processing; removed actions of remote command "--edit/-E" and of RPC-method "editqueue" used to move post-processing jobs in the post-processing queue (the moving of download items should be used instead) 2014-02-04 22:30:52 +00:00
Andrey Prygunkov
ca53391bdb reworked download queue (continued): removed few (not more necessary) checks from duplicate manager 2014-02-03 20:50:53 +00:00
Andrey Prygunkov
d01dd904da reworking queue (continued): field "Priority" was removed from individual files; instead nzb-files (collections) now have field "Priority"; nzb-files now also have new fields "MinTime" and "MaxTime", which are set when nzb-file is parsed and then kept; this eliminates the need of recalculation file statistics (min and max priority, min and max time); removed action "FileSetPriority" from RPC-command "editqueue"; removed action "I" from remote command "--edit/-E" for individual files (now it is allowed for groups only) 2014-01-31 20:51:14 +00:00
Andrey Prygunkov
bb885bddd4 for downloads not having any (obviously named) par2-files the critical health is assumed 85% instead of 100% as the absense of par2-files suggests; this avoids the possibly false triggering of health-check action (detele or pause) for downloads having misnamed (obfuscated) par2-files; combined with improved fast par-renamer this provides proper processing of downloads with misnamed (obfuscated) par2-files 2014-01-28 22:14:50 +00:00
Andrey Prygunkov
3d8f2c62ea improved fast par-renamer: it now automatically detects and renames misnamed (obfuscated) par2-files 2014-01-28 22:06:56 +00:00
Andrey Prygunkov
7cdb5e86c6 reworked download queue (continued): removed fields FirstID and LastID from internal nzb-file file data; RPC-method "listgroups" returns ID of the last file in the group for both FirstID and LastID fields; the only usage for these fields were in RPC-method "editqueue" where LastID was preferred anyway; remote command "--list/-L" for groups now shows only LastID; curses-interface shows only LastID 2014-01-26 21:33:26 +00:00
Andrey Prygunkov
255b2b464d fixed: download priority was not shown correctly in web-interface (and via RPC) (bug introduced in r934) 2014-01-25 10:30:04 +00:00
Andrey Prygunkov
0ef771ca15 avoiding unnecessary calls to system function "localtime" from scheduler if no tasks are defined; this solves hibernation issues on synology NAS (but requires no usage of scheduler) 2014-01-23 21:03:04 +00:00
Andrey Prygunkov
a3207496b6 fixed: post-processing scripts were not executed in standalone mode ("nzbget /path/to/file.nzb") 2014-01-23 20:46:37 +00:00
Andrey Prygunkov
741724973c reworked download queue (continued): 1) current download data such as remained size or size of paused files is now internally automatically updated on related events (download of article is completed, queue edited, etc.); 2) this eliminates the need of calculating this data upon each RPC-request (from web-interface) and greatly decrease CPU load of processing RPC-requests when having large download queue (and/or large nzb-files in queue) 2014-01-21 21:56:43 +00:00
Andrey Prygunkov
3375c91b56 reworked download queue: 1) queue now holds nzb-jobs instead of individual files (contained within nzbs); 2) this drastically improves performance when managing queue containing big nzb-files on operations such as pause/unpause/move items; 3) tested with queue of 30 nzb-files each 40-100GB size (total queue size 1.5TB) - queue managing is fast even on slow device; 4) limitation: individual files (contained within nzbs) now cannot be moved beyond nzb borders (in older version it was possible to move individual files freely and mix files from different nzbs, although this feature was not supported in web-interface and therefore was not much known); 5) this change opens doors for further speed optimizations and integration of download queue with post-processing queue and possibly url-queue; 6) NOTE: make backup of your queue-directory before trying this (devel) version 2014-01-21 21:45:47 +00:00
Andrey Prygunkov
67da9d7233 updated version string to 13.0-testing 2014-01-21 20:51:58 +00:00
201 changed files with 48356 additions and 18161 deletions

29
AUTHORS
View File

@@ -1,4 +1,27 @@
nzbget:
Sven Henkel <sidddy@users.sourceforge.net> (versions 0.1.0 - ?)
Bo Cordes Petersen <placebodk@users.sourceforge.net> (versions ? - 0.2.3)
NZBGet:
Andrey Prygunkov <hugbug@users.sourceforge.net> (versions 0.3.0 and later)
Bo Cordes Petersen <placebodk@users.sourceforge.net> (versions ? - 0.2.3)
Sven Henkel <sidddy@users.sourceforge.net> (versions 0.1.0 - ?)
PAR2:
Peter Brian Clements <peterbclements@users.sourceforge.net>
PAR2 library API:
Francois Lesueur <flesueur@users.sourceforge.net>
jQuery:
John Resig <http://jquery.com>
The Dojo Foundation <http://sizzlejs.com>
Bootstrap:
Twitter, Inc <http://twitter.github.com/bootstrap>
Raphaël:
Dmitry Baranovskiy <http://raphaeljs.com>
Sencha Labs <http://sencha.com>
Elycharts:
Void Labs s.n.c. <http://void.it>
iconSweets:
Yummygum <http://yummygum.com>

View File

File diff suppressed because it is too large Load Diff

2281
ChangeLog
View File

File diff suppressed because it is too large Load Diff

View File

File diff suppressed because it is too large Load Diff

View File

@@ -1,80 +0,0 @@
/*
* This file is part of nzbget
*
* Copyright (C) 2007-2013 Andrey Prygunkov <hugbug@users.sourceforge.net>
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*
* $Revision$
* $Date$
*
*/
#ifndef DISKSTATE_H
#define DISKSTATE_H
#include "DownloadInfo.h"
#include "FeedInfo.h"
#include "NewsServer.h"
class DiskState
{
private:
int fscanf(FILE* infile, const char* Format, ...);
int ParseFormatVersion(const char* szFormatSignature);
bool SaveFileInfo(FileInfo* pFileInfo, const char* szFilename);
bool LoadFileInfo(FileInfo* pFileInfo, const char* szFilename, bool bFileSummary, bool bArticles);
void SaveNZBList(DownloadQueue* pDownloadQueue, FILE* outfile);
bool LoadNZBList(DownloadQueue* pDownloadQueue, FILE* infile, int iFormatVersion);
void SaveFileQueue(DownloadQueue* pDownloadQueue, FileQueue* pFileQueue, FILE* outfile);
bool LoadFileQueue(DownloadQueue* pDownloadQueue, FileQueue* pFileQueue, FILE* infile, int iFormatVersion);
void SavePostQueue(DownloadQueue* pDownloadQueue, FILE* outfile);
bool LoadPostQueue(DownloadQueue* pDownloadQueue, FILE* infile, int iFormatVersion);
bool LoadOldPostQueue(DownloadQueue* pDownloadQueue);
void SaveUrlQueue(DownloadQueue* pDownloadQueue, FILE* outfile);
bool LoadUrlQueue(DownloadQueue* pDownloadQueue, FILE* infile, int iFormatVersion);
void SaveUrlInfo(UrlInfo* pUrlInfo, FILE* outfile);
bool LoadUrlInfo(UrlInfo* pUrlInfo, FILE* infile, int iFormatVersion);
void SaveDupInfo(DupInfo* pDupInfo, FILE* outfile);
bool LoadDupInfo(DupInfo* pDupInfo, FILE* infile, int iFormatVersion);
void SaveHistory(DownloadQueue* pDownloadQueue, FILE* outfile);
bool LoadHistory(DownloadQueue* pDownloadQueue, FILE* infile, int iFormatVersion);
int FindNZBInfoIndex(DownloadQueue* pDownloadQueue, NZBInfo* pNZBInfo);
bool SaveFeedStatus(Feeds* pFeeds, FILE* outfile);
bool LoadFeedStatus(Feeds* pFeeds, FILE* infile, int iFormatVersion);
bool SaveFeedHistory(FeedHistory* pFeedHistory, FILE* outfile);
bool LoadFeedHistory(FeedHistory* pFeedHistory, FILE* infile, int iFormatVersion);
void CalcCriticalHealth(DownloadQueue* pDownloadQueue);
bool SaveServerStats(Servers* pServers, FILE* outfile);
bool LoadServerStats(Servers* pServers, FILE* infile, int iFormatVersion);
void ConvertDupeKey(char* buf, int bufsize);
public:
bool DownloadQueueExists();
bool SaveDownloadQueue(DownloadQueue* pDownloadQueue);
bool LoadDownloadQueue(DownloadQueue* pDownloadQueue);
bool SaveFile(FileInfo* pFileInfo);
bool LoadArticles(FileInfo* pFileInfo);
void DiscardDownloadQueue();
bool DiscardFile(FileInfo* pFileInfo);
bool SaveFeeds(Feeds* pFeeds, FeedHistory* pFeedHistory);
bool LoadFeeds(Feeds* pFeeds, FeedHistory* pFeedHistory);
bool SaveStats(Servers* pServers);
bool LoadStats(Servers* pServers);
void CleanupTempDir(DownloadQueue* pDownloadQueue);
};
#endif

View File

@@ -1,7 +1,7 @@
#
# This file if part of nzbget
# This file is part of nzbget
#
# Copyright (C) 2008-2013 Andrey Prygunkov <hugbug@users.sourceforge.net>
# Copyright (C) 2008-2014 Andrey Prygunkov <hugbug@users.sourceforge.net>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
@@ -22,67 +22,255 @@
bin_PROGRAMS = nzbget
nzbget_SOURCES = \
ArticleDownloader.cpp ArticleDownloader.h BinRpc.cpp BinRpc.h \
ColoredFrontend.cpp ColoredFrontend.h Connection.cpp Connection.h Decoder.cpp Decoder.h \
DiskState.cpp DiskState.h DownloadInfo.cpp DownloadInfo.h DupeCoordinator.cpp DupeCoordinator.h \
Frontend.cpp Frontend.h FeedCoordinator.cpp FeedCoordinator.h FeedFile.cpp FeedFile.h \
FeedFilter.cpp FeedFilter.h FeedInfo.cpp FeedInfo.h Log.cpp Log.h LoggableFrontend.cpp \
LoggableFrontend.h Maintenance.cpp Maintenance.h MessageBase.h NCursesFrontend.cpp \
NCursesFrontend.h NNTPConnection.cpp \
NNTPConnection.h NZBFile.cpp NZBFile.h NewsServer.cpp NewsServer.h Observer.cpp \
Observer.h Options.cpp Options.h ParChecker.cpp ParChecker.h ParRenamer.cpp ParRenamer.h \
ParCoordinator.cpp ParCoordinator.h PrePostProcessor.cpp PrePostProcessor.h QueueCoordinator.cpp \
QueueCoordinator.h QueueEditor.cpp QueueEditor.h RemoteClient.cpp RemoteClient.h \
RemoteServer.cpp RemoteServer.h Scanner.cpp Scanner.h Scheduler.cpp Scheduler.h ScriptController.cpp \
ScriptController.h ServerPool.cpp ServerPool.h svn_version.cpp TLS.cpp TLS.h Thread.cpp Thread.h \
Util.cpp Util.h XmlRpc.cpp XmlRpc.h WebDownloader.cpp WebDownloader.h WebServer.cpp WebServer.h \
UrlCoordinator.cpp UrlCoordinator.h Unpack.cpp Unpack.h nzbget.cpp nzbget.h
daemon/connect/Connection.cpp \
daemon/connect/Connection.h \
daemon/connect/TLS.cpp \
daemon/connect/TLS.h \
daemon/connect/WebDownloader.cpp \
daemon/connect/WebDownloader.h \
daemon/feed/FeedCoordinator.cpp \
daemon/feed/FeedCoordinator.h \
daemon/feed/FeedFile.cpp \
daemon/feed/FeedFile.h \
daemon/feed/FeedFilter.cpp \
daemon/feed/FeedFilter.h \
daemon/feed/FeedInfo.cpp \
daemon/feed/FeedInfo.h \
daemon/frontend/ColoredFrontend.cpp \
daemon/frontend/ColoredFrontend.h \
daemon/frontend/Frontend.cpp \
daemon/frontend/Frontend.h \
daemon/frontend/LoggableFrontend.cpp \
daemon/frontend/LoggableFrontend.h \
daemon/frontend/NCursesFrontend.cpp \
daemon/frontend/NCursesFrontend.h \
daemon/main/Maintenance.cpp \
daemon/main/Maintenance.h \
daemon/main/nzbget.cpp \
daemon/main/nzbget.h \
daemon/main/Options.cpp \
daemon/main/Options.h \
daemon/main/Scheduler.cpp \
daemon/main/Scheduler.h \
daemon/main/StackTrace.cpp \
daemon/main/StackTrace.h \
daemon/nntp/ArticleDownloader.cpp \
daemon/nntp/ArticleDownloader.h \
daemon/nntp/ArticleWriter.cpp \
daemon/nntp/ArticleWriter.h \
daemon/nntp/Decoder.cpp \
daemon/nntp/Decoder.h \
daemon/nntp/NewsServer.cpp \
daemon/nntp/NewsServer.h \
daemon/nntp/NNTPConnection.cpp \
daemon/nntp/NNTPConnection.h \
daemon/nntp/ServerPool.cpp \
daemon/nntp/ServerPool.h \
daemon/nntp/StatMeter.cpp \
daemon/nntp/StatMeter.h \
daemon/postprocess/ParChecker.cpp \
daemon/postprocess/ParChecker.h \
daemon/postprocess/ParCoordinator.cpp \
daemon/postprocess/ParCoordinator.h \
daemon/postprocess/ParRenamer.cpp \
daemon/postprocess/ParRenamer.h \
daemon/postprocess/PostScript.cpp \
daemon/postprocess/PostScript.h \
daemon/postprocess/PrePostProcessor.cpp \
daemon/postprocess/PrePostProcessor.h \
daemon/postprocess/Unpack.cpp \
daemon/postprocess/Unpack.h \
daemon/queue/DiskState.cpp \
daemon/queue/DiskState.h \
daemon/queue/DownloadInfo.cpp \
daemon/queue/DownloadInfo.h \
daemon/queue/DupeCoordinator.cpp \
daemon/queue/DupeCoordinator.h \
daemon/queue/HistoryCoordinator.cpp \
daemon/queue/HistoryCoordinator.h \
daemon/queue/NZBFile.cpp \
daemon/queue/NZBFile.h \
daemon/queue/QueueCoordinator.cpp \
daemon/queue/QueueCoordinator.h \
daemon/queue/QueueEditor.cpp \
daemon/queue/QueueEditor.h \
daemon/queue/QueueScript.cpp \
daemon/queue/QueueScript.h \
daemon/queue/Scanner.cpp \
daemon/queue/Scanner.h \
daemon/queue/UrlCoordinator.cpp \
daemon/queue/UrlCoordinator.h \
daemon/remote/BinRpc.cpp \
daemon/remote/BinRpc.h \
daemon/remote/MessageBase.h \
daemon/remote/RemoteClient.cpp \
daemon/remote/RemoteClient.h \
daemon/remote/RemoteServer.cpp \
daemon/remote/RemoteServer.h \
daemon/remote/WebServer.cpp \
daemon/remote/WebServer.h \
daemon/remote/XmlRpc.cpp \
daemon/remote/XmlRpc.h \
daemon/util/Log.cpp \
daemon/util/Log.h \
daemon/util/Observer.cpp \
daemon/util/Observer.h \
daemon/util/Script.cpp \
daemon/util/Script.h \
daemon/util/Thread.cpp \
daemon/util/Thread.h \
daemon/util/Util.cpp \
daemon/util/Util.h \
svn_version.cpp
if WITH_PAR2
nzbget_SOURCES += \
lib/par2/commandline.cpp \
lib/par2/commandline.h \
lib/par2/crc.cpp \
lib/par2/crc.h \
lib/par2/creatorpacket.cpp \
lib/par2/creatorpacket.h \
lib/par2/criticalpacket.cpp \
lib/par2/criticalpacket.h \
lib/par2/datablock.cpp \
lib/par2/datablock.h \
lib/par2/descriptionpacket.cpp \
lib/par2/descriptionpacket.h \
lib/par2/diskfile.cpp \
lib/par2/diskfile.h \
lib/par2/filechecksummer.cpp \
lib/par2/filechecksummer.h \
lib/par2/galois.cpp \
lib/par2/galois.h \
lib/par2/letype.h \
lib/par2/mainpacket.cpp \
lib/par2/mainpacket.h \
lib/par2/md5.cpp \
lib/par2/md5.h \
lib/par2/par2cmdline.h \
lib/par2/par2creatorsourcefile.cpp \
lib/par2/par2creatorsourcefile.h \
lib/par2/par2fileformat.cpp \
lib/par2/par2fileformat.h \
lib/par2/par2repairer.cpp \
lib/par2/par2repairer.h \
lib/par2/par2repairersourcefile.cpp \
lib/par2/par2repairersourcefile.h \
lib/par2/parheaders.cpp \
lib/par2/parheaders.h \
lib/par2/recoverypacket.cpp \
lib/par2/recoverypacket.h \
lib/par2/reedsolomon.cpp \
lib/par2/reedsolomon.h \
lib/par2/verificationhashtable.cpp \
lib/par2/verificationhashtable.h \
lib/par2/verificationpacket.cpp \
lib/par2/verificationpacket.h
endif
AM_CPPFLAGS = \
-I$(srcdir)/daemon/connect \
-I$(srcdir)/daemon/feed \
-I$(srcdir)/daemon/frontend \
-I$(srcdir)/daemon/main \
-I$(srcdir)/daemon/nntp \
-I$(srcdir)/daemon/postprocess \
-I$(srcdir)/daemon/queue \
-I$(srcdir)/daemon/remote \
-I$(srcdir)/daemon/util \
-I$(srcdir)/lib/par2
EXTRA_DIST = \
Makefile.cvs nzbgetd \
$(patches_FILES) $(windows_FILES) $(osx_FILES)
patches_FILES = \
libpar2-0.2-bugfixes.patch libpar2-0.2-cancel.patch \
libpar2-0.2-MSVC8.patch libsigc++-2.0.18-MSVC8.patch
Makefile.cvs \
nzbgetd \
$(windows_FILES) \
$(osx_FILES)
windows_FILES = \
win32.h NTService.cpp NTService.h nzbget.sln nzbget.vcproj nzbget-shell.bat
daemon/windows/NTService.cpp \
daemon/windows/NTService.h \
daemon/windows/win32.h \
nzbget.sln \
nzbget.vcproj \
nzbget-shell.bat
osx_FILES = \
osx/App_Prefix.pch osx/NZBGet-Info.plist \
osx/DaemonController.h osx/DaemonController.m \
osx/MainApp.h osx/MainApp.m osx/MainApp.xib \
osx/PFMoveApplication.h osx/PFMoveApplication.m \
osx/PreferencesDialog.h osx/PreferencesDialog.m osx/PreferencesDialog.xib \
osx/RPC.h osx/RPC.m osx/WebClient.h osx/WebClient.m \
osx/WelcomeDialog.h osx/WelcomeDialog.m osx/WelcomeDialog.xib \
osx/App_Prefix.pch \
osx/NZBGet-Info.plist \
osx/DaemonController.h \
osx/DaemonController.m \
osx/MainApp.h \
osx/MainApp.m \
osx/MainApp.xib \
osx/PFMoveApplication.h \
osx/PFMoveApplication.m \
osx/PreferencesDialog.h \
osx/PreferencesDialog.m \
osx/PreferencesDialog.xib \
osx/RPC.h \
osx/RPC.m \
osx/WebClient.h \
osx/WebClient.m \
osx/WelcomeDialog.h \
osx/WelcomeDialog.m \
osx/WelcomeDialog.xib \
osx/NZBGet.xcodeproj/project.pbxproj \
osx/Resources/Images/mainicon.icns osx/Resources/Images/statusicon.png \
osx/Resources/Images/statusicon@2x.png osx/Resources/Images/statusicon-inv.png \
osx/Resources/Images/statusicon-inv@2x.png osx/Resources/licenses/license-bootstrap.txt \
osx/Resources/licenses/license-jquery-GPL.txt osx/Resources/licenses/license-jquery-MIT.txt \
osx/Resources/Credits.rtf osx/Resources/Localizable.strings osx/Resources/Welcome.rtf
osx/Resources/Images/mainicon.icns \
osx/Resources/Images/statusicon.png \
osx/Resources/Images/statusicon@2x.png \
osx/Resources/licenses/license-bootstrap.txt \
osx/Resources/licenses/license-jquery-GPL.txt \
osx/Resources/licenses/license-jquery-MIT.txt \
osx/Resources/Credits.rtf \
osx/Resources/Localizable.strings \
osx/Resources/Welcome.rtf
doc_FILES = \
README ChangeLog COPYING
README \
ChangeLog \
COPYING \
lib/par2/AUTHORS \
lib/par2/README
exampleconf_FILES = \
nzbget.conf
webui_FILES = \
webui/index.html webui/index.js webui/downloads.js webui/edit.js webui/fasttable.js \
webui/history.js webui/messages.js webui/status.js webui/style.css webui/upload.js \
webui/util.js webui/config.js webui/feed.js \
webui/lib/bootstrap.js webui/lib/bootstrap.min.js webui/lib/bootstrap.css \
webui/lib/jquery.js webui/lib/jquery.min.js \
webui/img/icons.png webui/img/icons-2x.png \
webui/img/transmit.gif webui/img/transmit-file.gif webui/img/favicon.ico \
webui/img/download-anim-green-2x.png webui/img/download-anim-orange-2x.png \
webui/index.html \
webui/index.js \
webui/downloads.js \
webui/edit.js \
webui/fasttable.js \
webui/history.js \
webui/messages.js \
webui/status.js \
webui/style.css \
webui/upload.js \
webui/util.js \
webui/config.js \
webui/feed.js \
webui/lib/bootstrap.js \
webui/lib/bootstrap.min.js \
webui/lib/bootstrap.css \
webui/lib/jquery.js \
webui/lib/jquery.min.js \
webui/lib/raphael.js \
webui/lib/raphael.min.js \
webui/lib/elycharts.js \
webui/lib/elycharts.min.js \
webui/img/icons.png \
webui/img/icons-2x.png \
webui/img/transmit.gif \
webui/img/transmit-file.gif \
webui/img/favicon.ico \
webui/img/download-anim-green-2x.png \
webui/img/download-anim-orange-2x.png \
webui/img/transmit-reload-2x.gif
ppscripts_FILES = \
ppscripts/EMail.py ppscripts/Logger.py
scripts_FILES = \
scripts/EMail.py \
scripts/Logger.py
# Install
sbin_SCRIPTS = nzbgetd
@@ -91,8 +279,8 @@ exampleconfdir = $(datadir)/nzbget
dist_exampleconf_DATA = $(exampleconf_FILES)
webuidir = $(datadir)/nzbget
nobase_dist_webui_DATA = $(webui_FILES)
ppscriptsdir = $(datadir)/nzbget
nobase_dist_ppscripts_SCRIPTS = $(ppscripts_FILES)
scriptsdir = $(datadir)/nzbget
nobase_dist_scripts_SCRIPTS = $(scripts_FILES)
# Note about "sed":
# We need to make some changes in installed files.
@@ -118,7 +306,7 @@ install-data-hook:
sed 's:configuration file (typically installed:configuration file (installed:' < "$(DESTDIR)$(exampleconfdir)/nzbget.conf" > "$(DESTDIR)$(exampleconfdir)/nzbget.conf.temp"
sed 's:/usr/local/share/nzbget/nzbget.conf):$(exampleconfdir)/nzbget.conf):' < "$(DESTDIR)$(exampleconfdir)/nzbget.conf.temp" > "$(DESTDIR)$(exampleconfdir)/nzbget.conf"
sed 's:^WebDir=:WebDir=$(webuidir)/webui:' < "$(DESTDIR)$(exampleconfdir)/nzbget.conf" > "$(DESTDIR)$(exampleconfdir)/nzbget.conf.temp"
sed 's:typically installed to /usr/local/share/nzbget/ppscripts:installed to $(ppscriptsdir)/ppscripts:' < "$(DESTDIR)$(exampleconfdir)/nzbget.conf.temp" > "$(DESTDIR)$(exampleconfdir)/nzbget.conf"
sed 's:typically installed to /usr/local/share/nzbget/scripts:installed to $(scriptsdir)/scripts:' < "$(DESTDIR)$(exampleconfdir)/nzbget.conf.temp" > "$(DESTDIR)$(exampleconfdir)/nzbget.conf"
rm "$(DESTDIR)$(exampleconfdir)/nzbget.conf.temp"
# Install configuration files into /etc
@@ -178,6 +366,7 @@ clean-bak: rm *~
# Fix premissions
dist-hook:
chmod -x $(distdir)/*.cpp $(distdir)/*.h
find $(distdir)/daemon -type f -print -exec chmod -x {} \;
find $(distdir)/webui -type f -print -exec chmod -x {} \;
find $(distdir)/lib -type f -print -exec chmod -x {} \;

View File

File diff suppressed because it is too large Load Diff

View File

@@ -1,997 +0,0 @@
/*
* This file is part of nzbget
*
* Copyright (C) 2007-2013 Andrey Prygunkov <hugbug@users.sourceforge.net>
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*
* $Revision$
* $Date$
*
*/
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#ifdef WIN32
#include "win32.h"
#endif
#ifndef DISABLE_PARCHECK
#include <stdlib.h>
#include <string.h>
#include <stdio.h>
#include <ctype.h>
#ifdef WIN32
#include <par2cmdline.h>
#include <par2repairer.h>
#else
#include <unistd.h>
#include <libpar2/par2cmdline.h>
#include <libpar2/par2repairer.h>
#endif
#include <algorithm>
#include "nzbget.h"
#include "ParChecker.h"
#include "ParCoordinator.h"
#include "Log.h"
#include "Options.h"
#include "Util.h"
extern Options* g_pOptions;
const char* Par2CmdLineErrStr[] = { "OK",
"data files are damaged and there is enough recovery data available to repair them",
"data files are damaged and there is insufficient recovery data available to be able to repair them",
"there was something wrong with the command line arguments",
"the PAR2 files did not contain sufficient information about the data files to be able to verify them",
"repair completed but the data files still appear to be damaged",
"an error occured when accessing files",
"internal error occurred",
"out of memory" };
class Repairer : public Par2Repairer
{
private:
CommandLine commandLine;
public:
Result PreProcess(const char *szParFilename);
Result Process(bool dorepair);
friend class ParChecker;
};
Result Repairer::PreProcess(const char *szParFilename)
{
#ifdef HAVE_PAR2_BUGFIXES_V2
// Ensure linking against the patched version of libpar2
BugfixesPatchVersion2();
#endif
if (g_pOptions->GetParScan() == Options::psFull)
{
char szWildcardParam[1024];
strncpy(szWildcardParam, szParFilename, 1024);
szWildcardParam[1024-1] = '\0';
char* szBasename = Util::BaseFileName(szWildcardParam);
if (szBasename != szWildcardParam && strlen(szBasename) > 0)
{
szBasename[0] = '*';
szBasename[1] = '\0';
}
const char* argv[] = { "par2", "r", "-v", "-v", szParFilename, szWildcardParam };
if (!commandLine.Parse(6, (char**)argv))
{
return eInvalidCommandLineArguments;
}
}
else
{
const char* argv[] = { "par2", "r", "-v", "-v", szParFilename };
if (!commandLine.Parse(5, (char**)argv))
{
return eInvalidCommandLineArguments;
}
}
return Par2Repairer::PreProcess(commandLine);
}
Result Repairer::Process(bool dorepair)
{
return Par2Repairer::Process(commandLine, dorepair);
}
class MissingFilesComparator
{
private:
const char* m_szBaseParFilename;
public:
MissingFilesComparator(const char* szBaseParFilename) : m_szBaseParFilename(szBaseParFilename) {}
bool operator()(CommandLine::ExtraFile* pFirst, CommandLine::ExtraFile* pSecond) const;
};
/*
* Files with the same name as in par-file (and a differnt extension) are
* placed at the top of the list to be scanned first.
*/
bool MissingFilesComparator::operator()(CommandLine::ExtraFile* pFile1, CommandLine::ExtraFile* pFile2) const
{
char name1[1024];
strncpy(name1, Util::BaseFileName(pFile1->FileName().c_str()), 1024);
name1[1024-1] = '\0';
if (char* ext = strrchr(name1, '.')) *ext = '\0'; // trim extension
char name2[1024];
strncpy(name2, Util::BaseFileName(pFile2->FileName().c_str()), 1024);
name2[1024-1] = '\0';
if (char* ext = strrchr(name2, '.')) *ext = '\0'; // trim extension
return strcmp(name1, m_szBaseParFilename) == 0 && strcmp(name1, name2) != 0;
}
ParChecker::ParChecker()
{
debug("Creating ParChecker");
m_eStatus = psFailed;
m_szDestDir = NULL;
m_szNZBName = NULL;
m_szParFilename = NULL;
m_szInfoName = NULL;
m_szErrMsg = NULL;
m_szProgressLabel = (char*)malloc(1024);
m_pRepairer = NULL;
m_iFileProgress = 0;
m_iStageProgress = 0;
m_iExtraFiles = 0;
m_bVerifyingExtraFiles = false;
m_bCancelled = false;
m_eStage = ptLoadingPars;
}
ParChecker::~ParChecker()
{
debug("Destroying ParChecker");
free(m_szDestDir);
free(m_szNZBName);
free(m_szInfoName);
free(m_szProgressLabel);
Cleanup();
}
void ParChecker::Cleanup()
{
delete (Repairer*)m_pRepairer;
m_pRepairer = NULL;
for (FileList::iterator it = m_QueuedParFiles.begin(); it != m_QueuedParFiles.end() ;it++)
{
free(*it);
}
m_QueuedParFiles.clear();
for (FileList::iterator it = m_ProcessedFiles.begin(); it != m_ProcessedFiles.end() ;it++)
{
free(*it);
}
m_ProcessedFiles.clear();
m_sourceFiles.clear();
free(m_szErrMsg);
m_szErrMsg = NULL;
}
void ParChecker::SetDestDir(const char * szDestDir)
{
free(m_szDestDir);
m_szDestDir = strdup(szDestDir);
}
void ParChecker::SetNZBName(const char * szNZBName)
{
free(m_szNZBName);
m_szNZBName = strdup(szNZBName);
}
void ParChecker::SetInfoName(const char * szInfoName)
{
free(m_szInfoName);
m_szInfoName = strdup(szInfoName);
}
void ParChecker::Run()
{
ParCoordinator::FileList fileList;
if (!ParCoordinator::FindMainPars(m_szDestDir, &fileList))
{
PrintMessage(Message::mkError, "Could not start par-check for %s. Could not find any par-files", m_szNZBName);
m_eStatus = psFailed;
Completed();
return;
}
m_eStatus = psRepairNotNeeded;
m_bCancelled = false;
for (ParCoordinator::FileList::iterator it = fileList.begin(); it != fileList.end(); it++)
{
char* szParFilename = *it;
debug("Found par: %s", szParFilename);
if (!IsStopped() && !m_bCancelled)
{
char szFullParFilename[1024];
snprintf(szFullParFilename, 1024, "%s%c%s", m_szDestDir, (int)PATH_SEPARATOR, szParFilename);
szFullParFilename[1024-1] = '\0';
char szInfoName[1024];
int iBaseLen = 0;
ParCoordinator::ParseParFilename(szParFilename, &iBaseLen, NULL);
int maxlen = iBaseLen < 1024 ? iBaseLen : 1024 - 1;
strncpy(szInfoName, szParFilename, maxlen);
szInfoName[maxlen] = '\0';
char szParInfoName[1024];
snprintf(szParInfoName, 1024, "%s%c%s", m_szNZBName, (int)PATH_SEPARATOR, szInfoName);
szParInfoName[1024-1] = '\0';
SetInfoName(szParInfoName);
EStatus eStatus = RunParCheck(szFullParFilename);
// accumulate total status, the worst status has priority
if (m_eStatus > eStatus)
{
m_eStatus = eStatus;
}
if (g_pOptions->GetCreateBrokenLog())
{
WriteBrokenLog(eStatus);
}
}
free(szParFilename);
}
Completed();
}
ParChecker::EStatus ParChecker::RunParCheck(const char* szParFilename)
{
Cleanup();
m_szParFilename = szParFilename;
m_eStage = ptLoadingPars;
m_iProcessedFiles = 0;
m_iExtraFiles = 0;
m_bVerifyingExtraFiles = false;
EStatus eStatus = psFailed;
PrintMessage(Message::mkInfo, "Verifying %s", m_szInfoName);
debug("par: %s", m_szParFilename);
snprintf(m_szProgressLabel, 1024, "Verifying %s", m_szInfoName);
m_szProgressLabel[1024-1] = '\0';
m_iFileProgress = 0;
m_iStageProgress = 0;
UpdateProgress();
Result res = (Result)PreProcessPar();
if (IsStopped() || res != eSuccess)
{
Cleanup();
return psFailed;
}
m_eStage = ptVerifyingSources;
Repairer* pRepairer = (Repairer*)m_pRepairer;
res = pRepairer->Process(false);
debug("ParChecker: Process-result=%i", res);
if (!IsStopped() && pRepairer->missingfilecount > 0 && g_pOptions->GetParScan() == Options::psAuto && AddMissingFiles())
{
res = pRepairer->Process(false);
debug("ParChecker: Process-result=%i", res);
}
if (!IsStopped() && res == eRepairNotPossible && CheckSplittedFragments())
{
pRepairer->UpdateVerificationResults();
res = pRepairer->Process(false);
debug("ParChecker: Process-result=%i", res);
}
if (!IsStopped() && res == eRepairNotPossible)
{
res = (Result)ProcessMorePars();
}
if (IsStopped())
{
Cleanup();
return psFailed;
}
eStatus = psFailed;
if (res == eSuccess)
{
PrintMessage(Message::mkInfo, "Repair not needed for %s", m_szInfoName);
eStatus = psRepairNotNeeded;
}
else if (res == eRepairPossible)
{
eStatus = psRepairPossible;
if (g_pOptions->GetParRepair())
{
PrintMessage(Message::mkInfo, "Repairing %s", m_szInfoName);
SaveSourceList();
snprintf(m_szProgressLabel, 1024, "Repairing %s", m_szInfoName);
m_szProgressLabel[1024-1] = '\0';
m_iFileProgress = 0;
m_iStageProgress = 0;
m_iProcessedFiles = 0;
m_eStage = ptRepairing;
m_iFilesToRepair = pRepairer->damagedfilecount + pRepairer->missingfilecount;
UpdateProgress();
res = pRepairer->Process(true);
debug("ParChecker: Process-result=%i", res);
if (res == eSuccess)
{
PrintMessage(Message::mkInfo, "Successfully repaired %s", m_szInfoName);
eStatus = psRepaired;
DeleteLeftovers();
}
}
else
{
PrintMessage(Message::mkInfo, "Repair possible for %s", m_szInfoName);
}
}
if (m_bCancelled)
{
if (m_eStage >= ptRepairing)
{
PrintMessage(Message::mkWarning, "Repair cancelled for %s", m_szInfoName);
m_szErrMsg = strdup("repair cancelled");
eStatus = psRepairPossible;
}
else
{
PrintMessage(Message::mkWarning, "Par-check cancelled for %s", m_szInfoName);
m_szErrMsg = strdup("par-check cancelled");
eStatus = psFailed;
}
}
else if (eStatus == psFailed)
{
if (!m_szErrMsg && (int)res >= 0 && (int)res <= 8)
{
m_szErrMsg = strdup(Par2CmdLineErrStr[res]);
}
PrintMessage(Message::mkError, "Repair failed for %s: %s", m_szInfoName, m_szErrMsg ? m_szErrMsg : "");
}
Cleanup();
return eStatus;
}
int ParChecker::PreProcessPar()
{
Result res = eRepairFailed;
while (!IsStopped() && res != eSuccess)
{
Cleanup();
Repairer* pRepairer = new Repairer();
m_pRepairer = pRepairer;
pRepairer->sig_filename.connect(sigc::mem_fun(*this, &ParChecker::signal_filename));
pRepairer->sig_progress.connect(sigc::mem_fun(*this, &ParChecker::signal_progress));
pRepairer->sig_done.connect(sigc::mem_fun(*this, &ParChecker::signal_done));
res = pRepairer->PreProcess(m_szParFilename);
debug("ParChecker: PreProcess-result=%i", res);
if (IsStopped())
{
PrintMessage(Message::mkError, "Could not verify %s: stopping", m_szInfoName);
m_szErrMsg = strdup("par-check was stopped");
return eRepairFailed;
}
if (res == eInvalidCommandLineArguments)
{
PrintMessage(Message::mkError, "Could not start par-check for %s. Par-file: %s", m_szInfoName, m_szParFilename);
m_szErrMsg = strdup("Command line could not be parsed");
return res;
}
if (res != eSuccess)
{
PrintMessage(Message::mkWarning, "Could not verify %s: par2-file could not be processed", m_szInfoName);
PrintMessage(Message::mkInfo, "Requesting more par2-files for %s", m_szInfoName);
bool bHasMorePars = LoadMainParBak();
if (!bHasMorePars)
{
PrintMessage(Message::mkWarning, "No more par2-files found");
break;
}
}
}
if (res != eSuccess)
{
PrintMessage(Message::mkError, "Could not verify %s: par2-file could not be processed", m_szInfoName);
m_szErrMsg = strdup("par2-file could not be processed");
return res;
}
return res;
}
bool ParChecker::LoadMainParBak()
{
while (!IsStopped())
{
m_mutexQueuedParFiles.Lock();
bool hasMorePars = !m_QueuedParFiles.empty();
for (FileList::iterator it = m_QueuedParFiles.begin(); it != m_QueuedParFiles.end() ;it++)
{
free(*it);
}
m_QueuedParFiles.clear();
m_mutexQueuedParFiles.Unlock();
if (hasMorePars)
{
return true;
}
int iBlockFound = 0;
bool requested = RequestMorePars(1, &iBlockFound);
if (requested)
{
strncpy(m_szProgressLabel, "Awaiting additional par-files", 1024);
m_szProgressLabel[1024-1] = '\0';
m_iFileProgress = 0;
UpdateProgress();
}
m_mutexQueuedParFiles.Lock();
hasMorePars = !m_QueuedParFiles.empty();
m_bQueuedParFilesChanged = false;
m_mutexQueuedParFiles.Unlock();
if (!requested && !hasMorePars)
{
return false;
}
if (!hasMorePars)
{
// wait until new files are added by "AddParFile" or a change is signaled by "QueueChanged"
bool bQueuedParFilesChanged = false;
while (!bQueuedParFilesChanged && !IsStopped())
{
m_mutexQueuedParFiles.Lock();
bQueuedParFilesChanged = m_bQueuedParFilesChanged;
m_mutexQueuedParFiles.Unlock();
usleep(100 * 1000);
}
}
}
return false;
}
int ParChecker::ProcessMorePars()
{
Result res = eRepairNotPossible;
Repairer* pRepairer = (Repairer*)m_pRepairer;
bool bMoreFilesLoaded = true;
while (!IsStopped() && res == eRepairNotPossible)
{
int missingblockcount = pRepairer->missingblockcount - pRepairer->recoverypacketmap.size();
if (bMoreFilesLoaded)
{
PrintMessage(Message::mkInfo, "Need more %i par-block(s) for %s", missingblockcount, m_szInfoName);
}
m_mutexQueuedParFiles.Lock();
bool hasMorePars = !m_QueuedParFiles.empty();
m_mutexQueuedParFiles.Unlock();
if (!hasMorePars)
{
int iBlockFound = 0;
bool requested = RequestMorePars(missingblockcount, &iBlockFound);
if (requested)
{
strncpy(m_szProgressLabel, "Awaiting additional par-files", 1024);
m_szProgressLabel[1024-1] = '\0';
m_iFileProgress = 0;
UpdateProgress();
}
m_mutexQueuedParFiles.Lock();
hasMorePars = !m_QueuedParFiles.empty();
m_bQueuedParFilesChanged = false;
m_mutexQueuedParFiles.Unlock();
if (!requested && !hasMorePars)
{
m_szErrMsg = (char*)malloc(1024);
snprintf(m_szErrMsg, 1024, "not enough par-blocks, %i block(s) needed, but %i block(s) available", missingblockcount, iBlockFound);
m_szErrMsg[1024-1] = '\0';
break;
}
if (!hasMorePars)
{
// wait until new files are added by "AddParFile" or a change is signaled by "QueueChanged"
bool bQueuedParFilesChanged = false;
while (!bQueuedParFilesChanged && !IsStopped())
{
m_mutexQueuedParFiles.Lock();
bQueuedParFilesChanged = m_bQueuedParFilesChanged;
m_mutexQueuedParFiles.Unlock();
usleep(100 * 1000);
}
}
}
if (IsStopped())
{
break;
}
bMoreFilesLoaded = LoadMorePars();
if (bMoreFilesLoaded)
{
pRepairer->UpdateVerificationResults();
res = pRepairer->Process(false);
debug("ParChecker: Process-result=%i", res);
}
}
return res;
}
bool ParChecker::LoadMorePars()
{
m_mutexQueuedParFiles.Lock();
FileList moreFiles;
moreFiles.assign(m_QueuedParFiles.begin(), m_QueuedParFiles.end());
m_QueuedParFiles.clear();
m_mutexQueuedParFiles.Unlock();
for (FileList::iterator it = moreFiles.begin(); it != moreFiles.end() ;it++)
{
char* szParFilename = *it;
bool loadedOK = ((Repairer*)m_pRepairer)->LoadPacketsFromFile(szParFilename);
if (loadedOK)
{
PrintMessage(Message::mkInfo, "File %s successfully loaded for par-check", Util::BaseFileName(szParFilename), m_szInfoName);
}
else
{
PrintMessage(Message::mkInfo, "Could not load file %s for par-check", Util::BaseFileName(szParFilename), m_szInfoName);
}
free(szParFilename);
}
return !moreFiles.empty();
}
void ParChecker::AddParFile(const char * szParFilename)
{
m_mutexQueuedParFiles.Lock();
m_QueuedParFiles.push_back(strdup(szParFilename));
m_bQueuedParFilesChanged = true;
m_mutexQueuedParFiles.Unlock();
}
void ParChecker::QueueChanged()
{
m_mutexQueuedParFiles.Lock();
m_bQueuedParFilesChanged = true;
m_mutexQueuedParFiles.Unlock();
}
bool ParChecker::CheckSplittedFragments()
{
bool bFragmentsAdded = false;
for (std::vector<Par2RepairerSourceFile*>::iterator it = ((Repairer*)m_pRepairer)->sourcefiles.begin();
it != ((Repairer*)m_pRepairer)->sourcefiles.end(); it++)
{
Par2RepairerSourceFile *sourcefile = *it;
if (AddSplittedFragments(sourcefile->TargetFileName().c_str()))
{
bFragmentsAdded = true;
}
}
return bFragmentsAdded;
}
bool ParChecker::AddSplittedFragments(const char* szFilename)
{
char szDirectory[1024];
strncpy(szDirectory, szFilename, 1024);
szDirectory[1024-1] = '\0';
char* szBasename = Util::BaseFileName(szDirectory);
if (szBasename == szDirectory)
{
return false;
}
szBasename[-1] = '\0';
int iBaseLen = strlen(szBasename);
std::list<CommandLine::ExtraFile> extrafiles;
DirBrowser dir(szDirectory);
while (const char* filename = dir.Next())
{
if (!strncasecmp(filename, szBasename, iBaseLen))
{
const char* p = filename + iBaseLen;
if (*p == '.')
{
for (p++; *p && strchr("0123456789", *p); p++) ;
if (!*p)
{
debug("Found splitted fragment %s", filename);
char fullfilename[1024];
snprintf(fullfilename, 1024, "%s%c%s", szDirectory, PATH_SEPARATOR, filename);
fullfilename[1024-1] = '\0';
CommandLine::ExtraFile extrafile(fullfilename, Util::FileSize(fullfilename));
extrafiles.push_back(extrafile);
}
}
}
}
bool bFragmentsAdded = false;
if (!extrafiles.empty())
{
m_iExtraFiles += extrafiles.size();
m_bVerifyingExtraFiles = true;
bFragmentsAdded = ((Repairer*)m_pRepairer)->VerifyExtraFiles(extrafiles);
m_bVerifyingExtraFiles = false;
}
return bFragmentsAdded;
}
bool ParChecker::AddMissingFiles()
{
PrintMessage(Message::mkInfo, "Performing extra par-scan for %s", m_szInfoName);
char szDirectory[1024];
strncpy(szDirectory, m_szParFilename, 1024);
szDirectory[1024-1] = '\0';
char* szBasename = Util::BaseFileName(szDirectory);
if (szBasename == szDirectory)
{
return false;
}
szBasename[-1] = '\0';
std::list<CommandLine::ExtraFile*> extrafiles;
DirBrowser dir(szDirectory);
while (const char* filename = dir.Next())
{
if (strcmp(filename, ".") && strcmp(filename, "..") && strcmp(filename, "_brokenlog.txt"))
{
bool bAlreadyScanned = false;
for (FileList::iterator it = m_ProcessedFiles.begin(); it != m_ProcessedFiles.end(); it++)
{
const char* szProcessedFilename = *it;
if (!strcasecmp(Util::BaseFileName(szProcessedFilename), filename))
{
bAlreadyScanned = true;
break;
}
}
if (!bAlreadyScanned)
{
char fullfilename[1024];
snprintf(fullfilename, 1024, "%s%c%s", szDirectory, PATH_SEPARATOR, filename);
fullfilename[1024-1] = '\0';
extrafiles.push_back(new CommandLine::ExtraFile(fullfilename, Util::FileSize(fullfilename)));
}
}
}
// Sort the list
char* szBaseParFilename = strdup(Util::BaseFileName(m_szParFilename));
if (char* ext = strrchr(szBaseParFilename, '.')) *ext = '\0'; // trim extension
extrafiles.sort(MissingFilesComparator(szBaseParFilename));
free(szBaseParFilename);
// Scan files
bool bFilesAdded = false;
if (!extrafiles.empty())
{
m_iExtraFiles += extrafiles.size();
m_bVerifyingExtraFiles = true;
std::list<CommandLine::ExtraFile> extrafiles1;
// adding files one by one until all missing files are found
while (!IsStopped() && !m_bCancelled && extrafiles.size() > 0 && ((Repairer*)m_pRepairer)->missingfilecount > 0)
{
CommandLine::ExtraFile* pExtraFile = extrafiles.front();
extrafiles.pop_front();
extrafiles1.clear();
extrafiles1.push_back(*pExtraFile);
bFilesAdded = ((Repairer*)m_pRepairer)->VerifyExtraFiles(extrafiles1) || bFilesAdded;
((Repairer*)m_pRepairer)->UpdateVerificationResults();
delete pExtraFile;
}
m_bVerifyingExtraFiles = false;
// free any remaining objects
for (std::list<CommandLine::ExtraFile*>::iterator it = extrafiles.begin(); it != extrafiles.end() ;it++)
{
delete *it;
}
}
return bFilesAdded;
}
void ParChecker::signal_filename(std::string str)
{
const char* szStageMessage[] = { "Loading file", "Verifying file", "Repairing file", "Verifying repaired file" };
if (m_eStage == ptRepairing)
{
m_eStage = ptVerifyingRepaired;
}
PrintMessage(Message::mkInfo, "%s %s", szStageMessage[m_eStage], str.c_str());
if (m_eStage == ptLoadingPars || m_eStage == ptVerifyingSources)
{
m_ProcessedFiles.push_back(strdup(str.c_str()));
}
snprintf(m_szProgressLabel, 1024, "%s %s", szStageMessage[m_eStage], str.c_str());
m_szProgressLabel[1024-1] = '\0';
m_iFileProgress = 0;
UpdateProgress();
}
void ParChecker::signal_progress(double progress)
{
m_iFileProgress = (int)progress;
if (m_eStage == ptRepairing)
{
// calculating repair-data for all files
m_iStageProgress = m_iFileProgress;
}
else
{
// processing individual files
int iTotalFiles = 0;
if (m_eStage == ptVerifyingRepaired)
{
// repairing individual files
iTotalFiles = m_iFilesToRepair;
}
else
{
// verifying individual files
iTotalFiles = ((Repairer*)m_pRepairer)->sourcefiles.size() + m_iExtraFiles;
}
if (iTotalFiles > 0)
{
if (m_iFileProgress < 1000)
{
m_iStageProgress = (m_iProcessedFiles * 1000 + m_iFileProgress) / iTotalFiles;
}
else
{
m_iStageProgress = m_iProcessedFiles * 1000 / iTotalFiles;
}
}
else
{
m_iStageProgress = 0;
}
}
debug("Current-progres: %i, Total-progress: %i", m_iFileProgress, m_iStageProgress);
UpdateProgress();
}
void ParChecker::signal_done(std::string str, int available, int total)
{
m_iProcessedFiles++;
if (m_eStage == ptVerifyingSources)
{
if (available < total && !m_bVerifyingExtraFiles)
{
bool bFileExists = true;
for (std::vector<Par2RepairerSourceFile*>::iterator it = ((Repairer*)m_pRepairer)->sourcefiles.begin();
it != ((Repairer*)m_pRepairer)->sourcefiles.end(); it++)
{
Par2RepairerSourceFile *sourcefile = *it;
if (sourcefile && !strcmp(str.c_str(), Util::BaseFileName(sourcefile->TargetFileName().c_str())) &&
!sourcefile->GetTargetExists())
{
bFileExists = false;
break;
}
}
if (bFileExists)
{
PrintMessage(Message::mkWarning, "File %s has %i bad block(s) of total %i block(s)", str.c_str(), total - available, total);
}
else
{
PrintMessage(Message::mkWarning, "File %s with %i block(s) is missing", str.c_str(), total);
}
}
}
}
void ParChecker::Cancel()
{
#ifdef HAVE_PAR2_CANCEL
((Repairer*)m_pRepairer)->cancelled = true;
m_bCancelled = true;
#else
PrintMessage(Message::mkError, "Could not cancel par-repair. The program was compiled using version of libpar2 which doesn't support cancelling of par-repair. Please apply libpar2-patches supplied with NZBGet and recompile libpar2 and NZBGet (see README for details).");
#endif
}
void ParChecker::WriteBrokenLog(EStatus eStatus)
{
char szBrokenLogName[1024];
snprintf(szBrokenLogName, 1024, "%s%c_brokenlog.txt", m_szDestDir, (int)PATH_SEPARATOR);
szBrokenLogName[1024-1] = '\0';
if (eStatus != psRepairNotNeeded || Util::FileExists(szBrokenLogName))
{
FILE* file = fopen(szBrokenLogName, "ab");
if (file)
{
if (eStatus == psFailed)
{
if (m_bCancelled)
{
fprintf(file, "Repair cancelled for %s\n", m_szInfoName);
}
else
{
fprintf(file, "Repair failed for %s: %s\n", m_szInfoName, m_szErrMsg ? m_szErrMsg : "");
}
}
else if (eStatus == psRepairPossible)
{
fprintf(file, "Repair possible for %s\n", m_szInfoName);
}
else if (eStatus == psRepaired)
{
fprintf(file, "Successfully repaired %s\n", m_szInfoName);
}
else if (eStatus == psRepairNotNeeded)
{
fprintf(file, "Repair not needed for %s\n", m_szInfoName);
}
fclose(file);
}
else
{
PrintMessage(Message::mkError, "Could not open file %s", szBrokenLogName);
}
}
}
void ParChecker::SaveSourceList()
{
// Buliding a list of DiskFile-objects, marked as source-files
for (std::vector<Par2RepairerSourceFile*>::iterator it = ((Repairer*)m_pRepairer)->sourcefiles.begin();
it != ((Repairer*)m_pRepairer)->sourcefiles.end(); it++)
{
Par2RepairerSourceFile* sourcefile = (Par2RepairerSourceFile*)*it;
vector<DataBlock>::iterator it2 = sourcefile->SourceBlocks();
for (int i = 0; i < (int)sourcefile->BlockCount(); i++, it2++)
{
DataBlock block = *it2;
DiskFile* pSourceFile = block.GetDiskFile();
if (pSourceFile &&
std::find(m_sourceFiles.begin(), m_sourceFiles.end(), pSourceFile) == m_sourceFiles.end())
{
m_sourceFiles.push_back(pSourceFile);
}
}
}
}
void ParChecker::DeleteLeftovers()
{
// After repairing check if all DiskFile-objects saved by "SaveSourceList()" have
// corresponding target-files. If not - the source file was replaced. In this case
// the DiskFile-object points to the renamed bak-file, which we can delete.
for (SourceList::iterator it = m_sourceFiles.begin(); it != m_sourceFiles.end(); it++)
{
DiskFile* pSourceFile = (DiskFile*)*it;
bool bFound = false;
for (std::vector<Par2RepairerSourceFile*>::iterator it2 = ((Repairer*)m_pRepairer)->sourcefiles.begin();
it2 != ((Repairer*)m_pRepairer)->sourcefiles.end(); it2++)
{
Par2RepairerSourceFile* sourcefile = *it2;
if (sourcefile->GetTargetFile() == pSourceFile)
{
bFound = true;
break;
}
}
if (!bFound)
{
PrintMessage(Message::mkInfo, "Deleting file %s", Util::BaseFileName(pSourceFile->FileName().c_str()));
remove(pSourceFile->FileName().c_str());
}
}
}
#endif

View File

File diff suppressed because it is too large Load Diff

View File

@@ -1,143 +0,0 @@
/*
* This file is part of nzbget
*
* Copyright (C) 2007-2013 Andrey Prygunkov <hugbug@users.sourceforge.net>
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*
* $Revision$
* $Date$
*
*/
#ifndef PREPOSTPROCESSOR_H
#define PREPOSTPROCESSOR_H
#include <deque>
#include "Thread.h"
#include "Observer.h"
#include "DownloadInfo.h"
#include "ParCoordinator.h"
#include "DupeCoordinator.h"
class PrePostProcessor : public Thread
{
public:
// NOTE: changes to this enum must be synced with "eRemoteEditAction" in unit "MessageBase.h"
enum EEditAction
{
eaPostMoveOffset = 51, // move post to m_iOffset relative to the current position in post-queue
eaPostMoveTop,
eaPostMoveBottom,
eaPostDelete,
eaHistoryDelete,
eaHistoryFinalDelete,
eaHistoryReturn,
eaHistoryProcess,
eaHistoryRedownload,
eaHistorySetParameter,
eaHistorySetDupeKey,
eaHistorySetDupeScore,
eaHistorySetDupeMode,
eaHistorySetDupeBackup,
eaHistoryMarkBad,
eaHistoryMarkGood
};
private:
class QueueCoordinatorObserver: public Observer
{
public:
PrePostProcessor* m_pOwner;
virtual void Update(Subject* Caller, void* Aspect) { m_pOwner->QueueCoordinatorUpdate(Caller, Aspect); }
};
class PostParCoordinator: public ParCoordinator
{
private:
PrePostProcessor* m_pOwner;
protected:
virtual bool PauseDownload() { return m_pOwner->PauseDownload(); }
virtual bool UnpauseDownload() { return m_pOwner->UnpauseDownload(); }
friend class PrePostProcessor;
};
class PostDupeCoordinator: public DupeCoordinator
{
private:
PrePostProcessor* m_pOwner;
protected:
virtual void HistoryRedownload(DownloadQueue* pDownloadQueue, HistoryInfo* pHistoryInfo);
virtual void DeleteQueuedFile(const char* szQueuedFile) { m_pOwner->DeleteQueuedFile(szQueuedFile); }
friend class PrePostProcessor;
};
private:
PostParCoordinator m_ParCoordinator;
PostDupeCoordinator m_DupeCoordinator;
QueueCoordinatorObserver m_QueueCoordinatorObserver;
bool m_bHasMoreJobs;
bool m_bSchedulerPauseChanged;
bool m_bSchedulerPause;
bool m_bPostPause;
const char* m_szPauseReason;
bool IsNZBFileCompleted(DownloadQueue* pDownloadQueue, NZBInfo* pNZBInfo,
bool bIgnorePausedPars, bool bAllowOnlyOneDeleted);
void CheckPostQueue();
void JobCompleted(DownloadQueue* pDownloadQueue, PostInfo* pPostInfo);
void StartJob(DownloadQueue* pDownloadQueue, PostInfo* pPostInfo);
void SaveQueue(DownloadQueue* pDownloadQueue);
void SanitisePostQueue(PostQueue* pPostQueue);
void CheckDiskSpace();
void ApplySchedulerState();
void CheckScheduledResume();
void UpdatePauseState(bool bNeedPause, const char* szReason);
bool PauseDownload();
bool UnpauseDownload();
void NZBFound(DownloadQueue* pDownloadQueue, NZBInfo* pNZBInfo);
void NZBAdded(DownloadQueue* pDownloadQueue, NZBInfo* pNZBInfo);
void NZBDownloaded(DownloadQueue* pDownloadQueue, NZBInfo* pNZBInfo);
void NZBDeleted(DownloadQueue* pDownloadQueue, NZBInfo* pNZBInfo);
void NZBCompleted(DownloadQueue* pDownloadQueue, NZBInfo* pNZBInfo, bool bSaveQueue);
void DeleteQueuedFile(const char* szQueuedFile);
int FindGroupID(DownloadQueue* pDownloadQueue, NZBInfo* pNZBInfo);
bool PostQueueMove(IDList* pIDList, EEditAction eAction, int iOffset);
bool PostQueueDelete(IDList* pIDList);
bool HistoryEdit(IDList* pIDList, EEditAction eAction, int iOffset, const char* szText);
void HistoryDelete(DownloadQueue* pDownloadQueue, HistoryList::iterator itHistory, HistoryInfo* pHistoryInfo, bool bFinal);
void HistoryReturn(DownloadQueue* pDownloadQueue, HistoryList::iterator itHistory, HistoryInfo* pHistoryInfo, bool bReprocess);
void HistoryRedownload(DownloadQueue* pDownloadQueue, HistoryList::iterator itHistory, HistoryInfo* pHistoryInfo, bool bRestorePauseState);
void HistorySetParameter(HistoryInfo* pHistoryInfo, const char* szText);
void HistorySetDupeParam(HistoryInfo* pHistoryInfo, EEditAction eAction, const char* szText);
void HistoryTransformToDup(DownloadQueue* pDownloadQueue, HistoryInfo* pHistoryInfo, int rindex);
void CheckHistory();
void Cleanup();
FileInfo* GetQueueGroup(DownloadQueue* pDownloadQueue, NZBInfo* pNZBInfo);
void DeletePostThread(PostInfo* pPostInfo);
public:
PrePostProcessor();
virtual ~PrePostProcessor();
virtual void Run();
virtual void Stop();
void QueueCoordinatorUpdate(Subject* Caller, void* Aspect);
bool HasMoreJobs() { return m_bHasMoreJobs; }
bool QueueEditList(IDList* pIDList, EEditAction eAction, int iOffset, const char* szText);
};
#endif

View File

File diff suppressed because it is too large Load Diff

View File

@@ -1,138 +0,0 @@
/*
* This file is part of nzbget
*
* Copyright (C) 2004 Sven Henkel <sidddy@users.sourceforge.net>
* Copyright (C) 2007-2013 Andrey Prygunkov <hugbug@users.sourceforge.net>
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*
* $Revision$
* $Date$
*
*/
#ifndef QUEUECOORDINATOR_H
#define QUEUECOORDINATOR_H
#include <deque>
#include <list>
#include <time.h>
#include "Thread.h"
#include "NZBFile.h"
#include "ArticleDownloader.h"
#include "DownloadInfo.h"
#include "Observer.h"
#include "QueueEditor.h"
#include "NNTPConnection.h"
class QueueCoordinator : public Thread, public Observer, public Subject, public DownloadSpeedMeter, public DownloadQueueHolder
{
public:
typedef std::list<ArticleDownloader*> ActiveDownloads;
enum EAspectAction
{
eaNZBFileFound,
eaNZBFileAdded,
eaFileCompleted,
eaFileDeleted
};
struct Aspect
{
EAspectAction eAction;
DownloadQueue* pDownloadQueue;
NZBInfo* pNZBInfo;
FileInfo* pFileInfo;
};
private:
DownloadQueue m_DownloadQueue;
ActiveDownloads m_ActiveDownloads;
QueueEditor m_QueueEditor;
Mutex m_mutexDownloadQueue;
bool m_bHasMoreJobs;
int m_iDownloadsLimit;
int m_iServerConfigGeneration;
// statistics
static const int SPEEDMETER_SLOTS = 30;
static const int SPEEDMETER_SLOTSIZE = 1; //Split elapsed time into this number of secs.
int m_iSpeedBytes[SPEEDMETER_SLOTS];
int m_iSpeedTotalBytes;
int m_iSpeedTime[SPEEDMETER_SLOTS];
int m_iSpeedStartTime;
time_t m_tSpeedCorrection;
#ifdef HAVE_SPINLOCK
SpinLock m_spinlockSpeed;
#else
Mutex m_mutexSpeed;
#endif
int m_iSpeedBytesIndex;
long long m_iAllBytes;
time_t m_tStartServer;
time_t m_tLastCheck;
time_t m_tStartDownload;
time_t m_tPausedFrom;
bool m_bStandBy;
Mutex m_mutexStat;
bool GetNextArticle(FileInfo* &pFileInfo, ArticleInfo* &pArticleInfo);
void StartArticleDownload(FileInfo* pFileInfo, ArticleInfo* pArticleInfo, NNTPConnection* pConnection);
void ArticleCompleted(ArticleDownloader* pArticleDownloader);
void DeleteFileInfo(FileInfo* pFileInfo, bool bCompleted);
void StatFileInfo(FileInfo* pFileInfo, bool bCompleted);
void CheckHealth(FileInfo* pFileInfo);
void ResetHangingDownloads();
void ResetSpeedStat();
void EnterLeaveStandBy(bool bEnter);
void AdjustStartTime();
void AdjustDownloadsLimit();
public:
QueueCoordinator();
virtual ~QueueCoordinator();
virtual void Run();
virtual void Stop();
void Update(Subject* Caller, void* Aspect);
// statistics
long long CalcRemainingSize();
virtual int CalcCurrentDownloadSpeed();
virtual void AddSpeedReading(int iBytes);
void CalcStat(int* iUpTimeSec, int* iDnTimeSec, long long* iAllBytes, bool* bStandBy);
// Editing the queue
DownloadQueue* LockQueue();
void UnlockQueue() ;
void AddNZBFileToQueue(NZBFile* pNZBFile, bool bAddFirst);
void AddFileInfosToFileQueue(NZBFile* pNZBFile, FileQueue* pFileQueue, bool bAddFirst);
bool HasMoreJobs() { return m_bHasMoreJobs; }
bool GetStandBy() { return m_bStandBy; }
bool DeleteQueueEntry(FileInfo* pFileInfo);
bool SetQueueEntryNZBCategory(NZBInfo* pNZBInfo, const char* szCategory);
bool SetQueueEntryNZBName(NZBInfo* pNZBInfo, const char* szName);
bool MergeQueueEntries(NZBInfo* pDestNZBInfo, NZBInfo* pSrcNZBInfo);
bool SplitQueueEntries(FileQueue* pFileList, const char* szName, NZBInfo** pNewNZBInfo);
void DiscardDiskFile(FileInfo* pFileInfo);
QueueEditor* GetQueueEditor() { return &m_QueueEditor; }
void LogDebugInfo();
};
#endif

View File

File diff suppressed because it is too large Load Diff

View File

@@ -1,128 +0,0 @@
/*
* This file is part of nzbget
*
* Copyright (C) 2007-2013 Andrey Prygunkov <hugbug@users.sourceforge.net>
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*
* $Revision$
* $Date$
*
*/
#ifndef QUEUEEDITOR_H
#define QUEUEEDITOR_H
#include <vector>
#include "DownloadInfo.h"
class QueueEditor
{
public:
// NOTE: changes to this enum must be synced with "eRemoteEditAction" in unit "MessageBase.h"
enum EEditAction
{
eaFileMoveOffset = 1, // move to m_iOffset relative to the current position in queue
eaFileMoveTop,
eaFileMoveBottom,
eaFilePause,
eaFileResume,
eaFileDelete,
eaFilePauseAllPars,
eaFilePauseExtraPars,
eaFileSetPriority,
eaFileReorder,
eaFileSplit,
eaGroupMoveOffset, // move to m_iOffset relative to the current position in queue
eaGroupMoveTop,
eaGroupMoveBottom,
eaGroupPause,
eaGroupResume,
eaGroupDelete,
eaGroupDupeDelete,
eaGroupFinalDelete,
eaGroupPauseAllPars,
eaGroupPauseExtraPars,
eaGroupSetPriority,
eaGroupSetCategory,
eaGroupMerge,
eaGroupSetParameter,
eaGroupSetName,
eaGroupSetDupeKey,
eaGroupSetDupeScore,
eaGroupSetDupeMode
};
enum EMatchMode
{
mmID = 1,
mmName,
mmRegEx
};
private:
class EditItem
{
public:
int m_iOffset;
FileInfo* m_pFileInfo;
EditItem(FileInfo* pFileInfo, int iOffset);
};
typedef std::vector<EditItem*> ItemList;
typedef std::vector<FileInfo*> FileList;
private:
FileInfo* FindFileInfo(DownloadQueue* pDownloadQueue, int iID);
int FindFileInfoEntry(DownloadQueue* pDownloadQueue, FileInfo* pFileInfo);
bool InternEditList(DownloadQueue* pDownloadQueue, IDList* pIDList, bool bSmartOrder, EEditAction eAction, int iOffset, const char* szText);
void PrepareList(DownloadQueue* pDownloadQueue, ItemList* pItemList, IDList* pIDList, bool bSmartOrder, EEditAction eAction, int iOffset);
bool BuildIDListFromNameList(DownloadQueue* pDownloadQueue, IDList* pIDList, NameList* pNameList, EMatchMode eMatchMode, EEditAction eAction);
bool EditGroup(DownloadQueue* pDownloadQueue, FileInfo* pFileInfo, EEditAction eAction, int iOffset, const char* szText);
void BuildGroupList(DownloadQueue* pDownloadQueue, FileList* pGroupList);
void AlignAffectedGroups(DownloadQueue* pDownloadQueue, IDList* pIDList, bool bSmartOrder, int iOffset);
bool ItemExists(FileList* pFileList, FileInfo* pFileInfo);
void AlignGroup(DownloadQueue* pDownloadQueue, NZBInfo* pNZBInfo);
void PauseParsInGroups(ItemList* pItemList, bool bExtraParsOnly);
void PausePars(FileList* pFileList, bool bExtraParsOnly);
void SetNZBCategory(NZBInfo* pNZBInfo, const char* szCategory);
void SetNZBName(NZBInfo* pNZBInfo, const char* szName);
bool CanCleanupDisk(DownloadQueue* pDownloadQueue, NZBInfo* pNZBInfo);
bool MergeGroups(DownloadQueue* pDownloadQueue, ItemList* pItemList);
bool SplitGroup(DownloadQueue* pDownloadQueue, ItemList* pItemList, const char* szName);
void ReorderFiles(DownloadQueue* pDownloadQueue, ItemList* pItemList);
void SetNZBParameter(NZBInfo* pNZBInfo, const char* szParamString);
void SetNZBDupeParam(NZBInfo* pNZBInfo, EEditAction eAction, const char* szText);
void PauseUnpauseEntry(FileInfo* pFileInfo, bool bPause);
void DeleteEntry(FileInfo* pFileInfo);
void MoveEntry(DownloadQueue* pDownloadQueue, FileInfo* pFileInfo, int iOffset);
void SetPriorityEntry(FileInfo* pFileInfo, const char* szPriority);
public:
QueueEditor();
~QueueEditor();
bool EditEntry(int ID, bool bSmartOrder, EEditAction eAction, int iOffset, const char* szText);
bool EditList(IDList* pIDList, NameList* pNameList, EMatchMode eMatchMode, bool bSmartOrder, EEditAction eAction, int iOffset, const char* szText);
bool LockedEditEntry(DownloadQueue* pDownloadQueue, int ID, bool bSmartOrder, EEditAction eAction, int iOffset, const char* szText);
bool LockedEditList(DownloadQueue* pDownloadQueue, IDList* pIDList, bool bSmartOrder, EEditAction eAction, int iOffset, const char* szText);
};
#endif

154
README
View File

@@ -4,7 +4,7 @@
This is a short documentation. For more information please
visit NZBGet home page at
http://nzbget.sourceforge.net
http://nzbget.net
Contents
--------
@@ -85,15 +85,11 @@ And the following libraries are optional:
- libcurses (usually part of commercial systems)
or (better)
- libncurses (http://invisible-island.net/ncurses)
- for par-check and -repair (enabled by default):
- libpar2 (http://parchive.sourceforge.net)
- libsigc++ (http://libsigc.sourceforge.net)
- for encrypted connections (TLS/SSL):
- GnuTLS (http://www.gnu.org/software/gnutls)
or
- OpenSSL (http://www.openssl.org)
or
- GnuTLS (http://www.gnu.org/software/gnutls)
- for gzip support in web-server and web-client (enabled by default):
- zlib (http://www.zlib.net)
@@ -151,13 +147,13 @@ You may run configure with additional arguments:
if you can not use curses/ncurses.
--disable-parcheck - to make without parcheck-support. Use this option
if you can not use libpar2 or libsigc++.
if you have troubles when compiling par2-module.
--with-tlslib=(GnuTLS, OpenSSL) - to select which TLS/SSL library
--with-tlslib=(OpenSSL, GnuTLS) - to select which TLS/SSL library
should be used for encrypted server connections.
--disable-tls - to make without TLS/SSL support. Use this option if
you can not neither GnuTLS nor OpenSSL.
you can not neither OpenSSL nor GnuTLS.
--disable-gzip - to make without gzip support. Use this option
if you can not use zlib.
@@ -168,37 +164,13 @@ You may run configure with additional arguments:
Optional package: par-check
---------------------------
NZBGet can check and repair downloaded files for you. For this purpose
it uses library par2 (libpar2), which needs sigc++ on its part.
it uses library par2.
The libpar2 and libsigc++ (version 2 or later) must be installed on your
system. On most linux distributions these libraries are available as packages.
If you do not have these packages you can compile them yourself.
Following configure-parameters may be usefull:
For your convenience the source code of libpar2 is integrated into
NZBGets source tree and is compiled automatically when you make NZBGet.
--with-libpar2-includes
--with-libpar2-libraries
--with-libsigc-includes
--with-libsigc-libraries
The library libsigc++ must be installed first, since libpar2 requires it.
If you use nzbget on a very slow computer like NAS-device, it may be good to
limit the time allowed for par-repair (option "ParTimeLimit" in nzbget
configuration file). This feature requires a patched version of libpar2.
To compile that version download the original source code of libpar2
(version 0.2) and apply patches "libpar2-0.2-bugfixes.patch" and
"libpar2-0.2-cancel.patch", provided with nzbget:
cd libpar2-0.2
cp ~/nzbget/libpar2-0.2-*.patch .
patch < libpar2-0.2-bugfixes.patch
patch < libpar2-0.2-cancel.patch
./configure
make
make install
If you are not able to use libpar2 or libsigc++ or do not want them you can
make nzbget without support for par-check using option "--disable-parcheck":
In a case errors occur during this process the inclusion of par2-module
can be disabled using configure option "--disable-parcheck":
./configure --disable-parcheck
@@ -206,7 +178,7 @@ Optional package: curses
-------------------------
For curses-outputmode you need ncurses or curses on your system.
If you do not have one of them you can download and compile ncurses yourself.
Following configure-parameters may be usefull:
Following configure-parameters may be useful:
--with-libcurses-includes
--with-libcurses-libraries
@@ -219,14 +191,14 @@ make the program without support for curses using option "--disable-curses":
Optional package: TLS
-------------------------
To enable encrypted server connections (TLS/SSL) you need to build the program
with TLS/SSL support. NZBGet can use two libraries: GnuTLS or OpenSSL.
with TLS/SSL support. NZBGet can use two libraries: OpenSSL or GnuTLS.
Configure-script checks which library is installed and use it. If both are
avialable it gives the precedence to GnuTLS. You may override that with
the option --with-tlslib=(GnuTLS, OpenSSL). For example to build whith OpenSSL:
available it gives the precedence to OpenSSL. You may override that with
the option --with-tlslib=(OpenSSL, GnuTLS). For example to build with GnuTLS:
./configure --with-tlslib=OpenSSL
./configure --with-tlslib= GnuTLS
Following configure-parameters may be usefull:
Following configure-parameters may be useful:
--with-libtls-includes
--with-libtls-libraries
@@ -247,28 +219,14 @@ NZBGet is developed using MS Visual C++ 2005. The project file and solution
are provided. If you use MS Visual C++ 2005 Express you need to download
and install Platform SDK.
To compile the program with par-check-support you also need the following
libraries:
- libsigc++ (http://libsigc.sourceforge.net)
- libpar2 (http://parchive.sourceforge.net)
Download these libaries, then use patch-files provided with NZBGet to create
preconfigured project files and solutions for each library.
Look at http://gnuwin32.sourceforge.net/packages/patch.htm for info on how
to use patch-files, if you do not familiar with this technique.
To compile the program with TLS/SSL support you also need the library:
To compile the program with TLS/SSL support you need either OpenSSL or GnuTLS:
- OpenSSL (http://www.openssl.org)
or
- GnuTLS (http://www.gnu.org/software/gnutls)
Download a precompiled version of GnuTLS from http://josefsson.org/gnutls4win
and create lib-file as described there in section "Using the GnuTLS DLL from
your Visual Studio program".
After libsigc++ and libpar2 are compiled in static libraries (.lib), the
library for GnuTLS is created and include- and libraries-paths are configured
in MS Visual C++ 2005 you should be able to compile NZBGet.
Also required are:
- Regex (http://gnuwin32.sourceforge.net/packages/regex.htm)
- Zlib (http://gnuwin32.sourceforge.net/packages/zlib.htm)
=====================================
6. Configuration
@@ -387,9 +345,18 @@ It prints something like:
[1] nzbname\filename1.rar (50.00 MB)
[2] nzbname\filename1.r01 (50.00 MB)
[3] another-nzb\filename3.r01 (100.00 MB)
[4] another-nzb\filename3.r02 (100.00 MB)
The numbers in square braces are ID's of files in queue. They can be used
in edit-command. For example to move file with ID 2 to the top of queue:
This is the list of individual files listed within nzb-file. To print
the list of nzb-files (without content) add G-modifier to the list command:
[1] nzbname (4.56 GB)
[2] another-nzb (4.20 GB)
The numbers in square braces are ID's of files or groups in queue.
They can be used in edit-command. For example to move file with
ID 2 to the top of queue:
nzbget -E T 2
@@ -402,8 +369,8 @@ or to delete files from queue:
nzbget -E D 3 10-15 20-21 16
The edit-command has also a group-mode which affects all files from the
same nzb-request. You need to pass one ID of any file in the group. For
example to delete all files from the first nzb-request:
same nzb-file. You need to pass an ID of the group. For example to delete
the whole group 1:
nzbget -E G D 1
@@ -444,10 +411,10 @@ Post processing scripts
After the download of nzb-file is completed nzbget can call post-processing
scripts, defined in configuration file.
Example post-processing scripts are provided in directory "ppscripts".
Example post-processing scripts are provided in directory "scripts".
To use the scripts copy them into your local directory and set options
<ScriptDir>, <DefScript> and <ScriptOrder>.
<ScriptDir>, <PostScript> and <ScriptOrder>.
For information on writing your own post-processing scripts please
visit NZBGet web site.
@@ -469,13 +436,14 @@ and port defined in NZBGet configuration file in options "ControlIP" and
http://localhost:6789/
For login credentials type username "nzbget" (predefined and not changeable)
and the password from the option "ControlPassword" (default is tegbzn6789).
For login credentials type username and the password defined by
options "ControlUsername" (default "nzbget") and "ControlPassword"
(default "tegbzn6789").
In a case your browser forget credentials, to prevent typing them each
time, there is a workaround - use URL in the form:
http://localhost:6789/nzbget:password/
http://localhost:6789/username:password/
Please note, that in this case the password is saved in a bookmark or in
browser history in plain text and is easy to find by persons having
@@ -494,6 +462,32 @@ Bo Cordes Petersen (placebodk@users.sourceforge.net) until 2005.
In 2007 the abandoned project was overtaken by Andrey Prygunkov.
Since then the program has been completely rewritten.
NZBGet distribution archive includes additional components
written by other authors:
PAR2:
Peter Brian Clements <peterbclements@users.sourceforge.net>
PAR2 library API:
Francois Lesueur <flesueur@users.sourceforge.net>
jQuery:
John Resig <http://jquery.com>
The Dojo Foundation <http://sizzlejs.com>
Bootstrap:
Twitter, Inc <http://twitter.github.com/bootstrap>
Raphaël:
Dmitry Baranovskiy <http://raphaeljs.com>
Sencha Labs <http://sencha.com>
Elycharts:
Void Labs s.n.c. <http://void.it>
iconSweets:
Yummygum <http://yummygum.com>
=====================================
9. Copyright
=====================================
@@ -507,21 +501,13 @@ The complete content of license is provided in file COPYING.
Additional exemption: compiling, linking, and/or using OpenSSL is allowed.
Binary distribution for Windows contains code from the following libraries:
- libpar2 (http://parchive.sourceforge.net)
- libsigc++ (http://libsigc.sourceforge.net)
- GnuTLS (http://www.gnu.org/software/gnutls)
libpar2 is distributed under GPL; libsigc++ and GnuTLS - under LGPL.
=====================================
10. Contact
=====================================
If you encounter any problem, feel free to use the forum
nzbget.sourceforge.net/forum
nzbget.net/forum
or contact me at

View File

@@ -1,352 +0,0 @@
/*
* This file is part of nzbget
*
* Copyright (C) 2008-2013 Andrey Prygunkov <hugbug@users.sourceforge.net>
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*
* $Revision$
* $Date$
*
*/
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#ifdef WIN32
#include "win32.h"
#else
#include <unistd.h>
#endif
#include <stdlib.h>
#include <string.h>
#include <stdio.h>
#include "nzbget.h"
#include "Scheduler.h"
#include "ScriptController.h"
#include "Options.h"
#include "Log.h"
#include "NewsServer.h"
#include "ServerPool.h"
#include "FeedInfo.h"
#include "FeedCoordinator.h"
extern Options* g_pOptions;
extern ServerPool* g_pServerPool;
extern FeedCoordinator* g_pFeedCoordinator;
Scheduler::Task::Task(int iHours, int iMinutes, int iWeekDaysBits, ECommand eCommand, const char* szParam)
{
m_iHours = iHours;
m_iMinutes = iMinutes;
m_iWeekDaysBits = iWeekDaysBits;
m_eCommand = eCommand;
m_szParam = szParam ? strdup(szParam) : NULL;
m_tLastExecuted = 0;
}
Scheduler::Task::~Task()
{
free(m_szParam);
}
Scheduler::Scheduler()
{
debug("Creating Scheduler");
m_tLastCheck = 0;
m_TaskList.clear();
}
Scheduler::~Scheduler()
{
debug("Destroying Scheduler");
for (TaskList::iterator it = m_TaskList.begin(); it != m_TaskList.end(); it++)
{
delete *it;
}
}
void Scheduler::AddTask(Task* pTask)
{
m_mutexTaskList.Lock();
m_TaskList.push_back(pTask);
m_mutexTaskList.Unlock();
}
bool Scheduler::CompareTasks(Scheduler::Task* pTask1, Scheduler::Task* pTask2)
{
return (pTask1->m_iHours < pTask2->m_iHours) ||
((pTask1->m_iHours == pTask2->m_iHours) && (pTask1->m_iMinutes < pTask2->m_iMinutes));
}
void Scheduler::FirstCheck()
{
m_mutexTaskList.Lock();
m_TaskList.sort(CompareTasks);
m_mutexTaskList.Unlock();
// check all tasks for the last week
time_t tCurrent = time(NULL);
m_tLastCheck = tCurrent - 60*60*24*7;
m_bDetectClockChanges = false;
m_bExecuteProcess = false;
CheckTasks();
}
void Scheduler::IntervalCheck()
{
m_bDetectClockChanges = true;
m_bExecuteProcess = true;
CheckTasks();
}
void Scheduler::CheckTasks()
{
PrepareLog();
m_mutexTaskList.Lock();
time_t tCurrent = time(NULL);
if (m_bDetectClockChanges)
{
// Detect large step changes of system time
time_t tDiff = tCurrent - m_tLastCheck;
if (tDiff > 60*90 || tDiff < -60*90)
{
debug("Reset scheduled tasks (detected clock adjustment greater than 90 minutes)");
m_bExecuteProcess = false;
m_tLastCheck = tCurrent;
for (TaskList::iterator it = m_TaskList.begin(); it != m_TaskList.end(); it++)
{
Task* pTask = *it;
pTask->m_tLastExecuted = 0;
}
}
}
tm tmCurrent;
localtime_r(&tCurrent, &tmCurrent);
tm tmLastCheck;
localtime_r(&m_tLastCheck, &tmLastCheck);
tm tmLoop;
memcpy(&tmLoop, &tmLastCheck, sizeof(tmLastCheck));
tmLoop.tm_hour = tmCurrent.tm_hour;
tmLoop.tm_min = tmCurrent.tm_min;
tmLoop.tm_sec = tmCurrent.tm_sec;
time_t tLoop = mktime(&tmLoop);
while (tLoop <= tCurrent)
{
for (TaskList::iterator it = m_TaskList.begin(); it != m_TaskList.end(); it++)
{
Task* pTask = *it;
if (pTask->m_tLastExecuted != tLoop)
{
tm tmAppoint;
memcpy(&tmAppoint, &tmLoop, sizeof(tmLoop));
tmAppoint.tm_hour = pTask->m_iHours;
tmAppoint.tm_min = pTask->m_iMinutes;
tmAppoint.tm_sec = 0;
time_t tAppoint = mktime(&tmAppoint);
tAppoint -= g_pOptions->GetTimeCorrection();
int iWeekDay = tmAppoint.tm_wday;
if (iWeekDay == 0)
{
iWeekDay = 7;
}
bool bWeekDayOK = pTask->m_iWeekDaysBits == 0 || (pTask->m_iWeekDaysBits & (1 << (iWeekDay - 1)));
bool bDoTask = bWeekDayOK && m_tLastCheck < tAppoint && tAppoint <= tCurrent;
//debug("TEMP: 1) m_tLastCheck=%i, tCurrent=%i, tLoop=%i, tAppoint=%i, bWeekDayOK=%i, bDoTask=%i", m_tLastCheck, tCurrent, tLoop, tAppoint, (int)bWeekDayOK, (int)bDoTask);
if (bDoTask)
{
ExecuteTask(pTask);
pTask->m_tLastExecuted = tLoop;
}
}
}
tLoop += 60*60*24; // inc day
localtime_r(&tLoop, &tmLoop);
}
m_tLastCheck = tCurrent;
m_mutexTaskList.Unlock();
PrintLog();
}
void Scheduler::ExecuteTask(Task* pTask)
{
const char* szCommandName[] = { "Pause", "Unpause", "Set download rate", "Execute program", "Pause Scan", "Unpause Scan",
"Enable Server", "Disable Server", "Fetch Feed" };
debug("Executing scheduled command: %s", szCommandName[pTask->m_eCommand]);
switch (pTask->m_eCommand)
{
case scDownloadRate:
if (!Util::EmptyStr(pTask->m_szParam))
{
g_pOptions->SetDownloadRate(atoi(pTask->m_szParam) * 1024);
m_bDownloadRateChanged = true;
}
break;
case scPauseDownload:
case scUnpauseDownload:
m_bPauseDownload = pTask->m_eCommand == scPauseDownload;
m_bPauseDownloadChanged = true;
break;
case scProcess:
if (m_bExecuteProcess)
{
SchedulerScriptController::StartScript(pTask->m_szParam);
}
break;
case scPauseScan:
case scUnpauseScan:
g_pOptions->SetPauseScan(pTask->m_eCommand == scPauseScan);
m_bPauseScanChanged = true;
break;
case scActivateServer:
case scDeactivateServer:
EditServer(pTask->m_eCommand == scActivateServer, pTask->m_szParam);
break;
case scFetchFeed:
if (m_bExecuteProcess)
{
FetchFeed(pTask->m_szParam);
break;
}
}
}
void Scheduler::PrepareLog()
{
m_bDownloadRateChanged = false;
m_bPauseDownloadChanged = false;
m_bPauseScanChanged = false;
m_bServerChanged = false;
}
void Scheduler::PrintLog()
{
if (m_bDownloadRateChanged)
{
info("Scheduler: setting download rate to %i KB/s", g_pOptions->GetDownloadRate() / 1024);
}
if (m_bPauseScanChanged)
{
info("Scheduler: %s scan", g_pOptions->GetPauseScan() ? "pausing" : "unpausing");
}
if (m_bServerChanged)
{
int index = 0;
for (Servers::iterator it = g_pServerPool->GetServers()->begin(); it != g_pServerPool->GetServers()->end(); it++, index++)
{
NewsServer* pServer = *it;
if (pServer->GetActive() != m_ServerStatusList[index])
{
info("Scheduler: %s %s", pServer->GetActive() ? "activating" : "deactivating", pServer->GetName());
}
}
g_pServerPool->Changed();
}
}
void Scheduler::EditServer(bool bActive, const char* szServerList)
{
char* szServerList2 = strdup(szServerList);
char* saveptr;
char* szServer = strtok_r(szServerList2, ",;", &saveptr);
while (szServer)
{
szServer = Util::Trim(szServer);
if (!Util::EmptyStr(szServer))
{
int iID = atoi(szServer);
for (Servers::iterator it = g_pServerPool->GetServers()->begin(); it != g_pServerPool->GetServers()->end(); it++)
{
NewsServer* pServer = *it;
if ((iID > 0 && pServer->GetID() == iID) ||
!strcasecmp(pServer->GetName(), szServer))
{
if (!m_bServerChanged)
{
// store old server status for logging
m_ServerStatusList.clear();
m_ServerStatusList.reserve(g_pServerPool->GetServers()->size());
for (Servers::iterator it2 = g_pServerPool->GetServers()->begin(); it2 != g_pServerPool->GetServers()->end(); it2++)
{
NewsServer* pServer2 = *it2;
m_ServerStatusList.push_back(pServer2->GetActive());
}
}
m_bServerChanged = true;
pServer->SetActive(bActive);
break;
}
}
}
szServer = strtok_r(NULL, ",;", &saveptr);
}
free(szServerList2);
}
void Scheduler::FetchFeed(const char* szFeedList)
{
char* szFeedList2 = strdup(szFeedList);
char* saveptr;
char* szFeed = strtok_r(szFeedList2, ",;", &saveptr);
while (szFeed)
{
szFeed = Util::Trim(szFeed);
if (!Util::EmptyStr(szFeed))
{
int iID = atoi(szFeed);
for (Feeds::iterator it = g_pFeedCoordinator->GetFeeds()->begin(); it != g_pFeedCoordinator->GetFeeds()->end(); it++)
{
FeedInfo* pFeed = *it;
if (pFeed->GetID() == iID ||
!strcasecmp(pFeed->GetName(), szFeed) ||
!strcasecmp("0", szFeed))
{
g_pFeedCoordinator->FetchFeed(pFeed->GetID());
break;
}
}
}
szFeed = strtok_r(NULL, ",;", &saveptr);
}
free(szFeedList2);
}

View File

@@ -3,13 +3,17 @@
/* Define to 1 to include debug-code */
#undef DEBUG
/* Define to 1 if deleting of files during reading of directory is not
properly supported by OS */
#undef DIRBROWSER_SNAPSHOT
/* Define to 1 to not use curses */
#undef DISABLE_CURSES
/* Define to 1 to disable gzip-support */
#undef DISABLE_GZIP
/* Define to 1 to disable smart par-verification and restoration */
/* Define to 1 to disable par-verification and repair */
#undef DISABLE_PARCHECK
/* Define to 1 to not use TLS/SSL */
@@ -31,6 +35,16 @@
/* Define to 1 if you have the <curses.h> header file. */
#undef HAVE_CURSES_H
/* Define to 1 if you have the <dirent.h> header file, and it defines `DIR'.
*/
#undef HAVE_DIRENT_H
/* Define to 1 if you have the <endian.h> header file. */
#undef HAVE_ENDIAN_H
/* Define to 1 if fseeko (and presumably ftello) exists and is declared. */
#undef HAVE_FSEEKO
/* Define to 1 if getaddrinfo is supported */
#undef HAVE_GETADDRINFO
@@ -46,6 +60,12 @@
/* Define to 1 if gethostbyname_r takes 6 arguments */
#undef HAVE_GETHOSTBYNAME_R_6
/* Define to 1 if you have the `getopt' function. */
#undef HAVE_GETOPT
/* Define to 1 if you have the <getopt.h> header file. */
#undef HAVE_GETOPT_H
/* Define to 1 if getopt_long is supported */
#undef HAVE_GETOPT_LONG
@@ -55,6 +75,9 @@
/* Define to 1 to use GnuTLS library for TLS/SSL-support. */
#undef HAVE_LIBGNUTLS
/* Define to 1 if you have the `memcpy' function. */
#undef HAVE_MEMCPY
/* Define to 1 if you have the <memory.h> header file. */
#undef HAVE_MEMORY_H
@@ -64,33 +87,56 @@
/* Define to 1 if you have the <ncurses/ncurses.h> header file. */
#undef HAVE_NCURSES_NCURSES_H
/* Define to 1 if you have the <ndir.h> header file, and it defines `DIR'. */
#undef HAVE_NDIR_H
/* Define to 1 to use OpenSSL library for TLS/SSL-support. */
#undef HAVE_OPENSSL
/* Define to 1 if libpar2 has recent bugfixes-patch (version 2) */
#undef HAVE_PAR2_BUGFIXES_V2
/* Define to 1 if libpar2 supports cancelling (needs a special patch) */
#undef HAVE_PAR2_CANCEL
/* Define to 1 if you have the <regex.h> header file. */
#undef HAVE_REGEX_H
/* Define to 1 if _SC_NPROCESSORS_ONLN is present in unistd.h */
#undef HAVE_SC_NPROCESSORS_ONLN
/* Define to 1 if spinlocks are supported */
#undef HAVE_SPINLOCK
/* Define to 1 if stdbool.h conforms to C99. */
#undef HAVE_STDBOOL_H
/* Define to 1 if you have the <stdint.h> header file. */
#undef HAVE_STDINT_H
/* Define to 1 if you have the <stdio.h> header file. */
#undef HAVE_STDIO_H
/* Define to 1 if you have the <stdlib.h> header file. */
#undef HAVE_STDLIB_H
/* Define to 1 if you have the `strcasecmp' function. */
#undef HAVE_STRCASECMP
/* Define to 1 if you have the `strchr' function. */
#undef HAVE_STRCHR
/* Define to 1 if you have the `stricmp' function. */
#undef HAVE_STRICMP
/* Define to 1 if you have the <strings.h> header file. */
#undef HAVE_STRINGS_H
/* Define to 1 if you have the <string.h> header file. */
#undef HAVE_STRING_H
/* Define to 1 if you have the <sys/dir.h> header file, and it defines `DIR'.
*/
#undef HAVE_SYS_DIR_H
/* Define to 1 if you have the <sys/ndir.h> header file, and it defines `DIR'.
*/
#undef HAVE_SYS_NDIR_H
/* Define to 1 if you have the <sys/prctl.h> header file. */
#undef HAVE_SYS_PRCTL_H
@@ -106,6 +152,9 @@
/* Define to 1 if variadic macros are supported */
#undef HAVE_VARIADIC_MACROS
/* Define to 1 if the system has the type `_Bool'. */
#undef HAVE__BOOL
/* Name of package */
#undef PACKAGE
@@ -136,8 +185,27 @@
/* Version number of package */
#undef VERSION
/* Define to 1 if your processor stores words with the most significant byte
first (like Motorola and SPARC, unlike Intel and VAX). */
#undef WORDS_BIGENDIAN
/* Number of bits in a file offset, on hosts where this is settable. */
#undef _FILE_OFFSET_BITS
/* Define to 1 to make fseeko visible on some hosts (e.g. glibc 2.2). */
#undef _LARGEFILE_SOURCE
/* Define for large files, on AIX-style hosts. */
#undef _LARGE_FILES
/* Define to empty if `const' does not conform to ANSI C. */
#undef const
/* Define to `__inline__' or `__inline' if that's what the C compiler
calls it, or to nothing if 'inline' is not supported under any name. */
#ifndef __cplusplus
#undef inline
#endif
/* Define to `unsigned int' if <sys/types.h> does not define. */
#undef size_t

3141
configure vendored
View File

File diff suppressed because it is too large Load Diff

View File

@@ -1,11 +1,32 @@
#
# This file is part of nzbget
#
# Copyright (C) 2008-2014 Andrey Prygunkov <hugbug@users.sourceforge.net>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
#
# -*- Autoconf -*-
# Process this file with autoconf to produce a configure script.
AC_PREREQ(2.59)
AC_INIT(nzbget, 12.0, hugbug@users.sourceforge.net)
AC_INIT(nzbget, 14.1, hugbug@users.sourceforge.net)
AC_CANONICAL_SYSTEM
AM_INIT_AUTOMAKE(nzbget, 12.0)
AC_CONFIG_SRCDIR([nzbget.cpp])
AM_INIT_AUTOMAKE(nzbget, 14.1)
AC_CONFIG_SRCDIR([daemon/main/nzbget.cpp])
AC_CONFIG_HEADERS([config.h])
@@ -143,7 +164,7 @@ fi
dnl
dnl cCheck if spinlocks are available
dnl Check if spinlocks are available
dnl
AC_CHECK_FUNC(pthread_spin_init,
[AC_DEFINE([HAVE_SPINLOCK], 1, [Define to 1 if spinlocks are supported])]
@@ -180,6 +201,31 @@ AC_TRY_COMPILE([
AC_DEFINE_UNQUOTED(SOCKLEN_T, $SOCKLEN_T, [Determine what socket length (socklen_t) data type is])
dnl
dnl Dir-browser's snapshot
dnl
AC_MSG_CHECKING(whether dir-browser snapshot workaround is needed)
if test "$target_vendor" == "apple"; then
AC_MSG_RESULT([[yes]])
AC_DEFINE([DIRBROWSER_SNAPSHOT], 1, [Define to 1 if deleting of files during reading of directory is not properly supported by OS])
else
AC_MSG_RESULT([[no]])
fi
dnl
dnl check cpu cores via sysconf
dnl
AC_MSG_CHECKING(for cpu cores via sysconf)
AC_TRY_COMPILE(
[#include <unistd.h>],
[ int a = _SC_NPROCESSORS_ONLN; ],
FOUND="yes"
AC_MSG_RESULT([[yes]])
AC_DEFINE([HAVE_SC_NPROCESSORS_ONLN], 1, [Define to 1 if _SC_NPROCESSORS_ONLN is present in unistd.h]),
FOUND="no")
dnl
dnl checks for libxml2 includes and libraries.
dnl
@@ -196,7 +242,8 @@ AC_ARG_WITH(libxml2_libraries,
if test "$INCVAL" = "no" -o "$LIBVAL" = "no"; then
PKG_CHECK_MODULES(libxml2, libxml-2.0,
[LIBS="${LIBS} $libxml2_LIBS"]
[CPPFLAGS="${CPPFLAGS} $libxml2_CFLAGS"])
[CPPFLAGS="${CPPFLAGS} $libxml2_CFLAGS"],
AC_MSG_ERROR("libxml2 library not found"))
fi
AC_CHECK_HEADER(libxml/tree.h,,
AC_MSG_ERROR("libxml2 header files not found"))
@@ -252,116 +299,37 @@ fi
dnl
dnl Use libpar2 for par-checking. Deafult: no
dnl Use par-checking. Deafult: yes.
dnl
AC_MSG_CHECKING(whether to include code for par-checking)
AC_ARG_ENABLE(parcheck,
[AS_HELP_STRING([--disable-parcheck], [do not include par-check/-repair-support (removes dependency from libpar2- and libsigc-libraries)])],
[AS_HELP_STRING([--disable-parcheck], [do not include par-check/-repair-support])],
[ ENABLEPARCHECK=$enableval ],
[ ENABLEPARCHECK=yes] )
AC_MSG_RESULT($ENABLEPARCHECK)
if test "$ENABLEPARCHECK" = "yes"; then
dnl PAR2 checks.
dnl
dnl checks for libsigc++ includes and libraries (required for libpar2).
dnl
AC_ARG_WITH(libsigc_includes,
[AS_HELP_STRING([--with-libsigc-includes=DIR], [libsigc++-2.0 include directory])],
[CPPFLAGS="${CPPFLAGS} -I${withval}"]
[INCVAL="yes"],
[INCVAL="no"])
AC_ARG_WITH(libsigc_libraries,
[AS_HELP_STRING([--with-libsigc-libraries=DIR], [libsigc++-2.0 library directory])],
[LDFLAGS="${LDFLAGS} -L${withval}"]
[CPPFLAGS="${CPPFLAGS} -I${withval}/sigc++-2.0/include"]
[LIBVAL="yes"],
[LIBVAL="no"])
if test "$INCVAL" = "no" -o "$LIBVAL" = "no"; then
PKG_CHECK_MODULES(libsigc, sigc++-2.0,
[LIBS="${LIBS} $libsigc_LIBS"]
[CPPFLAGS="${CPPFLAGS} $libsigc_CFLAGS"])
fi
AC_CHECK_HEADER(sigc++/type_traits.h,,
AC_MSG_ERROR("libsigc++-2.0 header files not found"))
dnl
dnl checks for libpar2 includes and libraries.
dnl
INCVAL="${LIBPREF}/include"
LIBVAL="${LIBPREF}/lib"
AC_ARG_WITH(libpar2_includes,
[AS_HELP_STRING([--with-libpar2-includes=DIR], [libpar2 include directory])],
[INCVAL="$withval"])
CPPFLAGS="${CPPFLAGS} -I${INCVAL}"
AC_CHECK_HEADER(libpar2/libpar2.h,,
AC_MSG_ERROR("libpar2 header files not found"))
AC_ARG_WITH(libpar2_libraries,
[AS_HELP_STRING([--with-libpar2-libraries=DIR], [libpar2 library directory])],
[LIBVAL="$withval"])
LDFLAGS="${LDFLAGS} -L${LIBVAL}"
AC_SEARCH_LIBS([_ZN12Par2RepairerC1Ev], [par2], ,
AC_MSG_ERROR("libpar2 library not found"))
dnl
dnl check if libpar2 library is linkable
dnl
AC_MSG_CHECKING(for libpar2 linking)
AC_TRY_LINK(
[#include <libpar2/par2cmdline.h>]
[#include <libpar2/par2repairer.h>]
[ class Repairer : public Par2Repairer { }; ],
[ Repairer* p = new Repairer(); ],
AC_MSG_RESULT([[yes]]),
AC_MSG_RESULT([[no]])
AC_MSG_ERROR("libpar2 library not found"))
dnl
dnl check if libpar2 has support for cancelling
dnl
AC_MSG_CHECKING(whether libpar2 supports cancelling)
AC_TRY_COMPILE(
[#include <libpar2/par2cmdline.h>]
[#include <libpar2/par2repairer.h>]
[ class Repairer : public Par2Repairer { void test() { cancelled = true; } }; ],
[],
AC_MSG_RESULT([[yes]])
AC_DEFINE([HAVE_PAR2_CANCEL], 1, [Define to 1 if libpar2 supports cancelling (needs a special patch)]),
AC_MSG_RESULT([[no]]))
dnl
dnl check if libpar2 has recent bugfixes-patch
dnl
AC_MSG_CHECKING(whether libpar2 has recent bugfixes-patch (version 2))
AC_TRY_COMPILE(
[#include <libpar2/par2cmdline.h>]
[#include <libpar2/par2repairer.h>]
[ class Repairer : public Par2Repairer { void test() { BugfixesPatchVersion2(); } }; ],
[],
AC_MSG_RESULT([[yes]])
PAR2PATCHV2=yes
AC_DEFINE([HAVE_PAR2_BUGFIXES_V2], 1, [Define to 1 if libpar2 has recent bugfixes-patch (version 2)]),
AC_MSG_RESULT([[no]])
PAR2PATCHV2=no)
if test "$PAR2PATCHV2" = "no" ; then
AC_ARG_ENABLE(libpar2-bugfixes-check,
[AS_HELP_STRING([--disable-libpar2-bugfixes-check], [do not check if libpar2 has recent bugfixes-patch applied])],
[ PAR2PATCHCHECK=$enableval ],
[ PAR2PATCHCHECK=yes] )
if test "$PAR2PATCHCHECK" = "yes"; then
AC_ERROR([Your version of libpar2 doesn't include the recent bugfixes-patch (version 2, updated Dec 3, 2012). Please patch libpar2 with the patches supplied with NZBGet (see README for details). If you cannot patch libpar2, you can use configure parameter --disable-libpar2-bugfixes-check to suppress the check. Please note however that in this case the program may crash during par-check/repair. The patch is highly recommended!])
fi
fi
dnl Checks for header files.
AC_HEADER_DIRENT
AC_HEADER_STDBOOL
AC_HEADER_STDC
AC_CHECK_HEADERS([stdio.h] [endian.h] [getopt.h])
dnl Checks for typedefs, structures, and compiler characteristics.
AC_TYPE_SIZE_T
AC_C_BIGENDIAN
AC_C_CONST
AC_C_INLINE
AC_FUNC_FSEEKO
dnl Checks for library functions.
AC_FUNC_MEMCMP
AC_CHECK_FUNCS([stricmp] [strcasecmp])
AC_CHECK_FUNCS([strchr] [memcpy])
AC_CHECK_FUNCS([getopt])
AM_CONDITIONAL(WITH_PAR2, true)
else
AC_DEFINE([DISABLE_PARCHECK],1,[Define to 1 to disable smart par-verification and restoration])
AC_DEFINE([DISABLE_PARCHECK],1,[Define to 1 to disable par-verification and repair])
AM_CONDITIONAL(WITH_PAR2, false)
fi
@@ -376,47 +344,12 @@ AC_ARG_ENABLE(tls,
AC_MSG_RESULT($USETLS)
if test "$USETLS" = "yes"; then
AC_ARG_WITH(tlslib,
[AS_HELP_STRING([--with-tlslib=(GnuTLS, OpenSSL)], [TLS/SSL library to use])],
[AS_HELP_STRING([--with-tlslib=(OpenSSL, GnuTLS)], [TLS/SSL library to use])],
[TLSLIB="$withval"])
if test "$TLSLIB" != "GnuTLS" -a "$TLSLIB" != "OpenSSL" -a "$TLSLIB" != ""; then
AC_MSG_ERROR([Invalid argument for option --with-tlslib])
fi
if test "$TLSLIB" = "GnuTLS" -o "$TLSLIB" = ""; then
INCVAL="${LIBPREF}/include"
LIBVAL="${LIBPREF}/lib"
AC_ARG_WITH(libgnutls_includes,
[AS_HELP_STRING([--with-libgnutls-includes=DIR], [GnuTLS include directory])],
[INCVAL="$withval"])
CPPFLAGS="${CPPFLAGS} -I${INCVAL}"
AC_ARG_WITH(libgnutls_libraries,
[AS_HELP_STRING([--with-libgnutls-libraries=DIR], [GnuTLS library directory])],
[LIBVAL="$withval"])
LDFLAGS="${LDFLAGS} -L${LIBVAL}"
AC_CHECK_HEADER(gnutls/gnutls.h,
FOUND=yes
TLSHEADERS=yes,
FOUND=no)
if test "$FOUND" = "no" -a "$TLSLIB" = "GnuTLS"; then
AC_MSG_ERROR([Couldn't find GnuTLS headers (gnutls.h)])
fi
if test "$FOUND" = "yes"; then
AC_SEARCH_LIBS([gnutls_global_init], [gnutls],
AC_SEARCH_LIBS([gcry_control], [gnutls gcrypt],
FOUND=yes,
FOUND=no),
FOUND=no)
if test "$FOUND" = "no" -a "$TLSLIB" = "GnuTLS"; then
AC_MSG_ERROR([Couldn't find GnuTLS library])
fi
if test "$FOUND" = "yes"; then
TLSLIB="GnuTLS"
AC_DEFINE([HAVE_LIBGNUTLS],1,[Define to 1 to use GnuTLS library for TLS/SSL-support.])
fi
fi
fi
if test "$TLSLIB" = "OpenSSL" -o "$TLSLIB" = ""; then
AC_ARG_WITH(openssl_includes,
[AS_HELP_STRING([--with-openssl-includes=DIR], [OpenSSL include directory])],
@@ -429,9 +362,10 @@ if test "$USETLS" = "yes"; then
[LIBVAL="yes"],
[LIBVAL="no"])
if test "$INCVAL" = "no" -o "$LIBVAL" = "no"; then
PKG_CHECK_MODULES(openssl, openssl,
PKG_CHECK_MODULES([openssl], [openssl],
[LIBS="${LIBS} $openssl_LIBS"]
[CPPFLAGS="${CPPFLAGS} $openssl_CFLAGS"])
[CPPFLAGS="${CPPFLAGS} $openssl_CFLAGS"],
FOUND=no)
fi
AC_CHECK_HEADER(openssl/ssl.h,
@@ -456,12 +390,66 @@ if test "$USETLS" = "yes"; then
fi
fi
fi
if test "$TLSLIB" = "GnuTLS" -o "$TLSLIB" = ""; then
INCVAL="${LIBPREF}/include"
LIBVAL="${LIBPREF}/lib"
AC_ARG_WITH(libgnutls_includes,
[AS_HELP_STRING([--with-libgnutls-includes=DIR], [GnuTLS include directory])],
[INCVAL="$withval"])
CPPFLAGS="${CPPFLAGS} -I${INCVAL}"
AC_ARG_WITH(libgnutls_libraries,
[AS_HELP_STRING([--with-libgnutls-libraries=DIR], [GnuTLS library directory])],
[LIBVAL="$withval"])
LDFLAGS="${LDFLAGS} -L${LIBVAL}"
AC_CHECK_HEADER(gnutls/gnutls.h,
FOUND=yes
TLSHEADERS=yes,
FOUND=no)
if test "$FOUND" = "no" -a "$TLSLIB" = "GnuTLS"; then
AC_MSG_ERROR([Couldn't find GnuTLS headers (gnutls.h)])
fi
if test "$FOUND" = "yes"; then
AC_SEARCH_LIBS([gnutls_global_init], [gnutls],
FOUND=yes,
FOUND=no)
if test "$FOUND" = "yes"; then
dnl gcrypt is optional
AC_MSG_CHECKING([whether gcrypt is needed])
AC_TRY_COMPILE(
[#include <gnutls/gnutls.h>]
[#if GNUTLS_VERSION_NUMBER <= 0x020b00]
[compile error]
[#endif],
[int a;],
AC_MSG_RESULT([no])
GCRYPT=no,
AC_MSG_RESULT([yes])
GCRYPT=yes)
if test "$GCRYPT" = "yes"; then
AC_CHECK_HEADER([gcrypt.h],
AC_SEARCH_LIBS([gcry_control], [gnutls gcrypt],
FOUND=yes,
FOUND=no),
FOUND=yes)
fi
fi
if test "$FOUND" = "no" -a "$TLSLIB" = "GnuTLS"; then
AC_MSG_ERROR([Couldn't find GnuTLS library])
fi
if test "$FOUND" = "yes"; then
TLSLIB="GnuTLS"
AC_DEFINE([HAVE_LIBGNUTLS],1,[Define to 1 to use GnuTLS library for TLS/SSL-support.])
fi
fi
fi
if test "$TLSLIB" = ""; then
if test "$TLSHEADERS" = ""; then
AC_MSG_ERROR([Couldn't find neither GnuTLS nor OpenSSL headers (gnutls.h or ssl.h)])
AC_MSG_ERROR([Couldn't find neither OpenSSL nor GnuTLS headers (ssl.h or gnutls.h)])
else
AC_MSG_ERROR([Couldn't find neither GnuTLS nor OpenSSL library])
AC_MSG_ERROR([Couldn't find neither OpenSSL nor GnuTLS library])
fi
fi
else

View File

@@ -2,7 +2,7 @@
* This file is part of nzbget
*
* Copyright (C) 2004 Sven Henkel <sidddy@users.sourceforge.net>
* Copyright (C) 2007-2013 Andrey Prygunkov <hugbug@users.sourceforge.net>
* Copyright (C) 2007-2014 Andrey Prygunkov <hugbug@users.sourceforge.net>
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
@@ -117,19 +117,20 @@ Connection::Connection(const char* szHost, int iPort, bool bTLS)
{
debug("Creating Connection");
m_szHost = NULL;
m_iPort = iPort;
m_bTLS = bTLS;
m_szCipher = NULL;
m_eStatus = csDisconnected;
m_iSocket = INVALID_SOCKET;
m_iBufAvail = 0;
m_iTimeout = 60;
m_bSuppressErrors = true;
m_szReadBuf = (char*)malloc(CONNECTION_READBUFFER_SIZE + 1);
m_szHost = NULL;
m_iPort = iPort;
m_bTLS = bTLS;
m_szCipher = NULL;
m_eStatus = csDisconnected;
m_iSocket = INVALID_SOCKET;
m_iBufAvail = 0;
m_iTimeout = 60;
m_bSuppressErrors = true;
m_szReadBuf = (char*)malloc(CONNECTION_READBUFFER_SIZE + 1);
m_iTotalBytesRead = 0;
#ifndef DISABLE_TLS
m_pTLSSocket = NULL;
m_bTLSError = false;
m_pTLSSocket = NULL;
m_bTLSError = false;
#endif
if (szHost)
@@ -438,7 +439,9 @@ char* Connection::ReadLine(char* pBuffer, int iSize, int* pBytesRead)
{
*pBytesRead = iBytesRead;
}
m_iTotalBytesRead += iBytesRead;
if (pBufPtr == pBuffer)
{
return NULL;
@@ -858,3 +861,10 @@ const char* Connection::GetRemoteAddr()
return m_szRemoteAddr;
}
int Connection::FetchTotalBytesRead()
{
int iTotal = m_iTotalBytesRead;
m_iTotalBytesRead = 0;
return iTotal;
}

View File

@@ -2,7 +2,7 @@
* This file is part of nzbget
*
* Copyright (C) 2004 Sven Henkel <sidddy@users.sourceforge.net>
* Copyright (C) 2007-2013 Andrey Prygunkov <hugbug@users.sourceforge.net>
* Copyright (C) 2007-2014 Andrey Prygunkov <hugbug@users.sourceforge.net>
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
@@ -60,6 +60,7 @@ protected:
int m_iTimeout;
bool m_bSuppressErrors;
char m_szRemoteAddr[20];
int m_iTotalBytesRead;
#ifndef DISABLE_TLS
TLSSocket* m_pTLSSocket;
bool m_bTLSError;
@@ -112,6 +113,7 @@ public:
#ifndef DISABLE_TLS
bool StartTLS(bool bIsClient, const char* szCertFile, const char* szKeyFile);
#endif
int FetchTotalBytesRead();
};
#endif

View File

View File

View File

@@ -1,7 +1,7 @@
/*
* This file is part of nzbget
*
* Copyright (C) 2012-2013 Andrey Prygunkov <hugbug@users.sourceforge.net>
* Copyright (C) 2012-2014 Andrey Prygunkov <hugbug@users.sourceforge.net>
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
@@ -124,19 +124,19 @@ void WebDownloader::Run()
if ((((Status == adFailed) && (iRemainedDownloadRetries > 1)) ||
((Status == adConnectError) && (iRemainedConnectRetries > 1)))
&& !IsStopped() && !(!m_bForce && (g_pOptions->GetPauseDownload() || g_pOptions->GetPauseDownload2())))
&& !IsStopped() && !(!m_bForce && g_pOptions->GetPauseDownload()))
{
detail("Waiting %i sec to retry", g_pOptions->GetRetryInterval());
int msec = 0;
while (!IsStopped() && (msec < g_pOptions->GetRetryInterval() * 1000) &&
!(!m_bForce && (g_pOptions->GetPauseDownload() || g_pOptions->GetPauseDownload2())))
!(!m_bForce && g_pOptions->GetPauseDownload()))
{
usleep(100 * 1000);
msec += 100;
}
}
if (IsStopped() || (!m_bForce && (g_pOptions->GetPauseDownload() || g_pOptions->GetPauseDownload2())))
if (IsStopped() || (!m_bForce && g_pOptions->GetPauseDownload()))
{
Status = adRetry;
break;
@@ -207,6 +207,7 @@ WebDownloader::EStatus WebDownloader::Download()
return Status;
}
m_pConnection->SetTimeout(g_pOptions->GetUrlTimeout());
m_pConnection->SetSuppressErrors(false);
// connection
@@ -298,7 +299,15 @@ void WebDownloader::SendHeaders(URL *pUrl)
tmp[1024-1] = '\0';
m_pConnection->WriteLine(tmp);
snprintf(tmp, 1024, "Host: %s\r\n", pUrl->GetHost());
if ((!strcasecmp(pUrl->GetProtocol(), "http") && (pUrl->GetPort() == 80 || pUrl->GetPort() == 0)) ||
(!strcasecmp(pUrl->GetProtocol(), "https") && (pUrl->GetPort() == 443 || pUrl->GetPort() == 0)))
{
snprintf(tmp, 1024, "Host: %s\r\n", pUrl->GetHost());
}
else
{
snprintf(tmp, 1024, "Host: %s:%i\r\n", pUrl->GetHost(), pUrl->GetPort());
}
tmp[1024-1] = '\0';
m_pConnection->WriteLine(tmp);
@@ -650,15 +659,15 @@ bool WebDownloader::PrepareFile()
// prepare file for writing
const char* szFilename = m_szOutputFilename;
m_pOutFile = fopen(szFilename, "wb");
m_pOutFile = fopen(szFilename, FOPEN_WB);
if (!m_pOutFile)
{
error("Could not %s file %s", "create", szFilename);
return false;
}
if (g_pOptions->GetWriteBufferSize() > 0)
if (g_pOptions->GetWriteBuffer() > 0)
{
setvbuf(m_pOutFile, (char *)NULL, _IOFBF, g_pOptions->GetWriteBufferSize());
setvbuf(m_pOutFile, NULL, _IOFBF, g_pOptions->GetWriteBuffer() * 1024);
}
return true;
@@ -673,7 +682,7 @@ void WebDownloader::LogDebugInfo()
ctime_r(&m_tLastUpdateTime, szTime);
#endif
debug(" Web-Download: status=%i, LastUpdateTime=%s, filename=%s", m_eStatus, szTime, Util::BaseFileName(m_szOutputFilename));
info(" Web-Download: status=%i, LastUpdateTime=%s, filename=%s", m_eStatus, szTime, Util::BaseFileName(m_szOutputFilename));
}
void WebDownloader::Stop()

View File

@@ -88,7 +88,7 @@ protected:
public:
WebDownloader();
~WebDownloader();
virtual ~WebDownloader();
EStatus GetStatus() { return m_eStatus; }
virtual void Run();
virtual void Stop();

View File

@@ -1,7 +1,7 @@
/*
* This file is part of nzbget
*
* Copyright (C) 2013 Andrey Prygunkov <hugbug@users.sourceforge.net>
* Copyright (C) 2013-2014 Andrey Prygunkov <hugbug@users.sourceforge.net>
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
@@ -44,18 +44,14 @@
#include "FeedCoordinator.h"
#include "Options.h"
#include "WebDownloader.h"
#include "Log.h"
#include "Util.h"
#include "FeedFile.h"
#include "FeedFilter.h"
#include "UrlCoordinator.h"
#include "DiskState.h"
extern Options* g_pOptions;
extern UrlCoordinator* g_pUrlCoordinator;
extern DiskState* g_pDiskState;
FeedCoordinator::FeedCacheItem::FeedCacheItem(const char* szUrl, int iCacheTimeSec,const char* szCacheId,
time_t tLastUsage, FeedItemInfos* pFeedItemInfos)
{
@@ -80,8 +76,12 @@ FeedCoordinator::FeedCoordinator()
m_bForce = false;
m_bSave = false;
m_UrlCoordinatorObserver.m_pOwner = this;
g_pUrlCoordinator->Attach(&m_UrlCoordinatorObserver);
g_pLog->RegisterDebuggable(this);
m_DownloadQueueObserver.m_pOwner = this;
DownloadQueue* pDownloadQueue = DownloadQueue::Lock();
pDownloadQueue->Attach(&m_DownloadQueueObserver);
DownloadQueue::Unlock();
}
FeedCoordinator::~FeedCoordinator()
@@ -89,6 +89,8 @@ FeedCoordinator::~FeedCoordinator()
debug("Destroying FeedCoordinator");
// Cleanup
g_pLog->UnregisterDebuggable(this);
debug("Deleting FeedDownloaders");
for (ActiveDownloads::iterator it = m_ActiveDownloads.begin(); it != m_ActiveDownloads.end(); it++)
{
@@ -122,12 +124,17 @@ void FeedCoordinator::Run()
{
debug("Entering FeedCoordinator-loop");
m_mutexDownloads.Lock();
while (!DownloadQueue::IsLoaded())
{
usleep(20 * 1000);
}
if (g_pOptions->GetServerMode() && g_pOptions->GetSaveQueue() && g_pOptions->GetReloadQueue())
{
m_mutexDownloads.Lock();
g_pDiskState->LoadFeeds(&m_Feeds, &m_FeedHistory);
m_mutexDownloads.Unlock();
}
m_mutexDownloads.Unlock();
int iSleepInterval = 100;
int iUpdateCounter = 0;
@@ -142,7 +149,7 @@ void FeedCoordinator::Run()
{
// this code should not be called too often, once per second is OK
if (!(g_pOptions->GetPauseDownload() || g_pOptions->GetPauseDownload2()) || m_bForce || g_pOptions->GetUrlForce())
if (!g_pOptions->GetPauseDownload() || m_bForce || g_pOptions->GetUrlForce())
{
m_mutexDownloads.Lock();
time_t tCurrent = time(NULL);
@@ -258,11 +265,10 @@ void FeedCoordinator::ResetHangingDownloads()
void FeedCoordinator::LogDebugInfo()
{
debug(" FeedCoordinator");
debug(" ----------------");
info(" ---------- FeedCoordinator");
m_mutexDownloads.Lock();
debug(" Active Downloads: %i", m_ActiveDownloads.size());
info(" Active Downloads: %i", m_ActiveDownloads.size());
for (ActiveDownloads::iterator it = m_ActiveDownloads.begin(); it != m_ActiveDownloads.end(); it++)
{
FeedDownloader* pFeedDownloader = *it;
@@ -273,7 +279,7 @@ void FeedCoordinator::LogDebugInfo()
void FeedCoordinator::StartFeedDownload(FeedInfo* pFeedInfo, bool bForce)
{
debug("Starting new FeedDownloader for %", pFeedInfo->GetName());
debug("Starting new FeedDownloader for %s", pFeedInfo->GetName());
FeedDownloader* pFeedDownloader = new FeedDownloader();
pFeedDownloader->SetAutoDestroy(true);
@@ -287,7 +293,7 @@ void FeedCoordinator::StartFeedDownload(FeedInfo* pFeedInfo, bool bForce)
else
{
char szUrlName[1024];
UrlInfo::MakeNiceName(pFeedInfo->GetUrl(), "", szUrlName, sizeof(szUrlName));
NZBInfo::MakeNiceUrlName(pFeedInfo->GetUrl(), "", szUrlName, sizeof(szUrlName));
pFeedDownloader->SetInfoName(szUrlName);
}
pFeedDownloader->SetForce(bForce || g_pOptions->GetUrlForce());
@@ -358,19 +364,27 @@ void FeedCoordinator::FeedCompleted(FeedDownloader* pFeedDownloader)
FeedFile* pFeedFile = FeedFile::Create(pFeedInfo->GetOutputFilename());
remove(pFeedInfo->GetOutputFilename());
m_mutexDownloads.Lock();
NZBList addedNZBs;
m_mutexDownloads.Lock();
if (pFeedFile)
{
ProcessFeed(pFeedInfo, pFeedFile->GetFeedItemInfos());
ProcessFeed(pFeedInfo, pFeedFile->GetFeedItemInfos(), &addedNZBs);
delete pFeedFile;
}
pFeedInfo->SetLastUpdate(time(NULL));
pFeedInfo->SetForce(false);
m_bSave = true;
m_mutexDownloads.Unlock();
DownloadQueue* pDownloadQueue = DownloadQueue::Lock();
for (NZBList::iterator it = addedNZBs.begin(); it != addedNZBs.end(); it++)
{
NZBInfo* pNZBInfo = *it;
pDownloadQueue->GetQueue()->Add(pNZBInfo, false);
}
pDownloadQueue->Save();
DownloadQueue::Unlock();
}
pFeedInfo->SetStatus(FeedInfo::fsFinished);
}
@@ -410,7 +424,7 @@ void FeedCoordinator::FilterFeed(FeedInfo* pFeedInfo, FeedItemInfos* pFeedItemIn
delete pFeedFilter;
}
void FeedCoordinator::ProcessFeed(FeedInfo* pFeedInfo, FeedItemInfos* pFeedItemInfos)
void FeedCoordinator::ProcessFeed(FeedInfo* pFeedInfo, FeedItemInfos* pFeedItemInfos, NZBList* pAddedNZBs)
{
debug("Process feed %s", pFeedInfo->GetName());
@@ -432,7 +446,8 @@ void FeedCoordinator::ProcessFeed(FeedInfo* pFeedInfo, FeedItemInfos* pFeedItemI
}
else if (!pFeedHistoryInfo)
{
DownloadItem(pFeedInfo, pFeedItemInfo);
NZBInfo* pNZBInfo = CreateNZBInfo(pFeedInfo, pFeedItemInfo);
pAddedNZBs->Add(pNZBInfo, false);
eStatus = FeedHistoryInfo::hsFetched;
iAdded++;
}
@@ -458,12 +473,13 @@ void FeedCoordinator::ProcessFeed(FeedInfo* pFeedInfo, FeedItemInfos* pFeedItemI
}
}
void FeedCoordinator::DownloadItem(FeedInfo* pFeedInfo, FeedItemInfo* pFeedItemInfo)
NZBInfo* FeedCoordinator::CreateNZBInfo(FeedInfo* pFeedInfo, FeedItemInfo* pFeedItemInfo)
{
debug("Download %s from %s", pFeedItemInfo->GetUrl(), pFeedInfo->GetName());
UrlInfo* pUrlInfo = new UrlInfo();
pUrlInfo->SetURL(pFeedItemInfo->GetUrl());
NZBInfo* pNZBInfo = new NZBInfo();
pNZBInfo->SetKind(NZBInfo::nkUrl);
pNZBInfo->SetURL(pFeedItemInfo->GetUrl());
// add .nzb-extension if not present
char szNZBName[1024];
@@ -479,17 +495,17 @@ void FeedCoordinator::DownloadItem(FeedInfo* pFeedInfo, FeedItemInfo* pFeedItemI
Util::MakeValidFilename(szNZBName2, '_', false);
if (strlen(szNZBName) > 0)
{
pUrlInfo->SetNZBFilename(szNZBName2);
pNZBInfo->SetFilename(szNZBName2);
}
pUrlInfo->SetCategory(pFeedItemInfo->GetAddCategory());
pUrlInfo->SetPriority(pFeedItemInfo->GetPriority());
pUrlInfo->SetAddPaused(pFeedItemInfo->GetPauseNzb());
pUrlInfo->SetDupeKey(pFeedItemInfo->GetDupeKey());
pUrlInfo->SetDupeScore(pFeedItemInfo->GetDupeScore());
pUrlInfo->SetDupeMode(pFeedItemInfo->GetDupeMode());
pUrlInfo->SetForce(pFeedInfo->GetForce() || g_pOptions->GetUrlForce());
g_pUrlCoordinator->AddUrlToQueue(pUrlInfo, false);
pNZBInfo->SetCategory(pFeedItemInfo->GetAddCategory());
pNZBInfo->SetPriority(pFeedItemInfo->GetPriority());
pNZBInfo->SetAddUrlPaused(pFeedItemInfo->GetPauseNzb());
pNZBInfo->SetDupeKey(pFeedItemInfo->GetDupeKey());
pNZBInfo->SetDupeScore(pFeedItemInfo->GetDupeScore());
pNZBInfo->SetDupeMode(pFeedItemInfo->GetDupeMode());
return pNZBInfo;
}
bool FeedCoordinator::ViewFeed(int iID, FeedItemInfos** ppFeedItemInfos)
@@ -627,22 +643,22 @@ void FeedCoordinator::FetchFeed(int iID)
m_mutexDownloads.Unlock();
}
void FeedCoordinator::UrlCoordinatorUpdate(Subject* pCaller, void* pAspect)
void FeedCoordinator::DownloadQueueUpdate(Subject* pCaller, void* pAspect)
{
debug("Notification from URL-Coordinator received");
UrlCoordinator::Aspect* pUrlAspect = (UrlCoordinator::Aspect*)pAspect;
if (pUrlAspect->eAction == UrlCoordinator::eaUrlCompleted)
DownloadQueue::Aspect* pQueueAspect = (DownloadQueue::Aspect*)pAspect;
if (pQueueAspect->eAction == DownloadQueue::eaUrlCompleted)
{
m_mutexDownloads.Lock();
FeedHistoryInfo* pFeedHistoryInfo = m_FeedHistory.Find(pUrlAspect->pUrlInfo->GetURL());
FeedHistoryInfo* pFeedHistoryInfo = m_FeedHistory.Find(pQueueAspect->pNZBInfo->GetURL());
if (pFeedHistoryInfo)
{
pFeedHistoryInfo->SetStatus(FeedHistoryInfo::hsFetched);
}
else
{
m_FeedHistory.Add(pUrlAspect->pUrlInfo->GetURL(), FeedHistoryInfo::hsFetched, time(NULL));
m_FeedHistory.Add(pQueueAspect->pNZBInfo->GetURL(), FeedHistoryInfo::hsFetched, time(NULL));
}
m_bSave = true;
m_mutexDownloads.Unlock();
@@ -689,7 +705,7 @@ void FeedCoordinator::CleanupHistory()
}
}
time_t tBorderDate = tOldestUpdate - g_pOptions->GetFeedHistory();
time_t tBorderDate = tOldestUpdate - g_pOptions->GetFeedHistory() * 60*60*24;
int i = 0;
for (FeedHistory::iterator it = m_FeedHistory.begin(); it != m_FeedHistory.end(); )
{

View File

@@ -1,7 +1,7 @@
/*
* This file is part of nzbget
*
* Copyright (C) 2013 Andrey Prygunkov <hugbug@users.sourceforge.net>
* Copyright (C) 2013-2014 Andrey Prygunkov <hugbug@users.sourceforge.net>
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
@@ -30,6 +30,7 @@
#include <list>
#include <time.h>
#include "Log.h"
#include "Thread.h"
#include "WebDownloader.h"
#include "DownloadInfo.h"
@@ -39,17 +40,14 @@
class FeedDownloader;
class FeedCoordinator : public Thread, public Observer, public Subject
class FeedCoordinator : public Thread, public Observer, public Subject, public Debuggable
{
public:
typedef std::list<FeedDownloader*> ActiveDownloads;
private:
class UrlCoordinatorObserver: public Observer
class DownloadQueueObserver: public Observer
{
public:
FeedCoordinator* m_pOwner;
virtual void Update(Subject* pCaller, void* pAspect) { m_pOwner->UrlCoordinatorUpdate(pCaller, pAspect); }
virtual void Update(Subject* pCaller, void* pAspect) { m_pOwner->DownloadQueueUpdate(pCaller, pAspect); }
};
class FeedCacheItem
@@ -74,13 +72,14 @@ private:
};
typedef std::deque<FeedCacheItem*> FeedCache;
typedef std::list<FeedDownloader*> ActiveDownloads;
private:
Feeds m_Feeds;
ActiveDownloads m_ActiveDownloads;
FeedHistory m_FeedHistory;
Mutex m_mutexDownloads;
UrlCoordinatorObserver m_UrlCoordinatorObserver;
DownloadQueueObserver m_DownloadQueueObserver;
bool m_bForce;
bool m_bSave;
FeedCache m_FeedCache;
@@ -88,14 +87,17 @@ private:
void StartFeedDownload(FeedInfo* pFeedInfo, bool bForce);
void FeedCompleted(FeedDownloader* pFeedDownloader);
void FilterFeed(FeedInfo* pFeedInfo, FeedItemInfos* pFeedItemInfos);
void ProcessFeed(FeedInfo* pFeedInfo, FeedItemInfos* pFeedItemInfos);
void DownloadItem(FeedInfo* pFeedInfo, FeedItemInfo* pFeedItemInfo);
void ProcessFeed(FeedInfo* pFeedInfo, FeedItemInfos* pFeedItemInfos, NZBList* pAddedNZBs);
NZBInfo* CreateNZBInfo(FeedInfo* pFeedInfo, FeedItemInfo* pFeedItemInfo);
void ResetHangingDownloads();
void UrlCoordinatorUpdate(Subject* pCaller, void* pAspect);
void DownloadQueueUpdate(Subject* pCaller, void* pAspect);
void CleanupHistory();
void CleanupCache();
void CheckSaveFeeds();
protected:
virtual void LogDebugInfo();
public:
FeedCoordinator();
virtual ~FeedCoordinator();
@@ -110,8 +112,6 @@ public:
void FetchFeed(int iID);
bool HasActiveDownloads();
Feeds* GetFeeds() { return &m_Feeds; }
void LogDebugInfo();
};
class FeedDownloader : public WebDownloader

View File

@@ -1,7 +1,7 @@
/*
* This file is part of nzbget
*
* Copyright (C) 2013 Andrey Prygunkov <hugbug@users.sourceforge.net>
* Copyright (C) 2013-2014 Andrey Prygunkov <hugbug@users.sourceforge.net>
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
@@ -84,7 +84,7 @@ FeedFile::~FeedFile()
void FeedFile::LogDebugInfo()
{
debug(" FeedFile %s", m_szFileName);
info(" FeedFile %s", m_szFileName);
}
void FeedFile::AddItem(FeedItemInfo* pFeedItemInfo)

View File

View File

@@ -1,7 +1,7 @@
/*
* This file is part of nzbget
*
* Copyright (C) 2013 Andrey Prygunkov <hugbug@users.sourceforge.net>
* Copyright (C) 2013-2014 Andrey Prygunkov <hugbug@users.sourceforge.net>
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
@@ -153,22 +153,17 @@ bool FeedFilter::Term::MatchText(const char* szStrValue)
// Word-search
// split szStrValue into tokens
char* szStrValue2 = strdup(szStrValue);
char* saveptr;
char* szWord = strtok_r(szStrValue2, WORD_SEPARATORS, &saveptr);
while (szWord)
Tokenizer tok(szStrValue, WORD_SEPARATORS);
while (const char* szWord = tok.Next())
{
szWord = Util::Trim(szWord);
WildMask mask(m_szParam, m_pRefValues != NULL);
bMatch = *szWord && mask.Match(szWord);
bMatch = mask.Match(szWord);
if (bMatch)
{
FillWildMaskRefValues(szWord, &mask, 0);
break;
}
szWord = strtok_r(NULL, WORD_SEPARATORS, &saveptr);
}
free(szStrValue2);
}
else
{
@@ -335,7 +330,7 @@ bool FeedFilter::Term::Compile(char* szToken)
}
/*
* If pFeedItemInfo is NULL, only field type info is returned
* If pFeedItemInfo is NULL, only field name is validated
*/
bool FeedFilter::Term::GetFieldData(const char* szField, FeedItemInfo* pFeedItemInfo,
const char** StrValue, long long* IntValue)
@@ -413,6 +408,11 @@ bool FeedFilter::Term::GetFieldData(const char* szField, FeedItemInfo* pFeedItem
*IntValue = pFeedItemInfo ? pFeedItemInfo->GetDupeScore() : 0;
return true;
}
else if (!strcasecmp(szField, "dupestatus"))
{
*StrValue = pFeedItemInfo ? pFeedItemInfo->GetDupeStatus() : NULL;
return true;
}
else if (!strncasecmp(szField, "attr-", 5))
{
if (pFeedItemInfo)
@@ -732,146 +732,139 @@ char* FeedFilter::Rule::CompileOptions(char* szRule)
// split command into tokens
*p = '\0';
char* saveptr;
char* szToken = strtok_r(szRule, ",", &saveptr);
while (szToken)
Tokenizer tok(szRule, ",", true);
while (char* szOption = tok.Next())
{
szToken = Util::Trim(szToken);
if (*szToken)
const char* szValue = "";
char* szColon = strchr(szOption, ':');
if (szColon)
{
char* szOption = szToken;
const char* szValue = "";
char* szColon = strchr(szToken, ':');
if (szColon)
{
*szColon = '\0';
szValue = Util::Trim(szColon + 1);
}
*szColon = '\0';
szValue = Util::Trim(szColon + 1);
}
if (!strcasecmp(szOption, "category") || !strcasecmp(szOption, "cat") || !strcasecmp(szOption, "c"))
if (!strcasecmp(szOption, "category") || !strcasecmp(szOption, "cat") || !strcasecmp(szOption, "c"))
{
m_bHasCategory = true;
free(m_szCategory);
m_szCategory = strdup(szValue);
m_bPatCategory = strstr(szValue, "${");
}
else if (!strcasecmp(szOption, "pause") || !strcasecmp(szOption, "p"))
{
m_bHasPause = true;
m_bPause = !*szValue || !strcasecmp(szValue, "yes") || !strcasecmp(szValue, "y");
if (!m_bPause && !(!strcasecmp(szValue, "no") || !strcasecmp(szValue, "n")))
{
m_bHasCategory = true;
free(m_szCategory);
m_szCategory = strdup(szValue);
m_bPatCategory = strstr(szValue, "${");
// error
return NULL;
}
else if (!strcasecmp(szOption, "pause") || !strcasecmp(szOption, "p"))
}
else if (!strcasecmp(szOption, "priority") || !strcasecmp(szOption, "pr") || !strcasecmp(szOption, "r"))
{
if (!strchr("0123456789-+", *szValue))
{
m_bHasPause = true;
m_bPause = !*szValue || !strcasecmp(szValue, "yes") || !strcasecmp(szValue, "y");
if (!m_bPause && !(!strcasecmp(szValue, "no") || !strcasecmp(szValue, "n")))
{
// error
return NULL;
}
// error
return NULL;
}
else if (!strcasecmp(szOption, "priority") || !strcasecmp(szOption, "pr") || !strcasecmp(szOption, "r"))
m_bHasPriority = true;
m_iPriority = atoi(szValue);
}
else if (!strcasecmp(szOption, "priority+") || !strcasecmp(szOption, "pr+") || !strcasecmp(szOption, "r+"))
{
if (!strchr("0123456789-+", *szValue))
{
if (!strchr("0123456789-+", *szValue))
{
// error
return NULL;
}
m_bHasPriority = true;
m_iPriority = atoi(szValue);
// error
return NULL;
}
else if (!strcasecmp(szOption, "priority+") || !strcasecmp(szOption, "pr+") || !strcasecmp(szOption, "r+"))
m_bHasAddPriority = true;
m_iAddPriority = atoi(szValue);
}
else if (!strcasecmp(szOption, "dupescore") || !strcasecmp(szOption, "ds") || !strcasecmp(szOption, "s"))
{
if (!strchr("0123456789-+", *szValue))
{
if (!strchr("0123456789-+", *szValue))
{
// error
return NULL;
}
m_bHasAddPriority = true;
m_iAddPriority = atoi(szValue);
// error
return NULL;
}
else if (!strcasecmp(szOption, "dupescore") || !strcasecmp(szOption, "ds") || !strcasecmp(szOption, "s"))
m_bHasDupeScore = true;
m_iDupeScore = atoi(szValue);
}
else if (!strcasecmp(szOption, "dupescore+") || !strcasecmp(szOption, "ds+") || !strcasecmp(szOption, "s+"))
{
if (!strchr("0123456789-+", *szValue))
{
if (!strchr("0123456789-+", *szValue))
{
// error
return NULL;
}
m_bHasDupeScore = true;
m_iDupeScore = atoi(szValue);
// error
return NULL;
}
else if (!strcasecmp(szOption, "dupescore+") || !strcasecmp(szOption, "ds+") || !strcasecmp(szOption, "s+"))
m_bHasAddDupeScore = true;
m_iAddDupeScore = atoi(szValue);
}
else if (!strcasecmp(szOption, "dupekey") || !strcasecmp(szOption, "dk") || !strcasecmp(szOption, "k"))
{
m_bHasDupeKey = true;
free(m_szDupeKey);
m_szDupeKey = strdup(szValue);
m_bPatDupeKey = strstr(szValue, "${");
}
else if (!strcasecmp(szOption, "dupekey+") || !strcasecmp(szOption, "dk+") || !strcasecmp(szOption, "k+"))
{
m_bHasAddDupeKey = true;
free(m_szAddDupeKey);
m_szAddDupeKey = strdup(szValue);
m_bPatAddDupeKey = strstr(szValue, "${");
}
else if (!strcasecmp(szOption, "dupemode") || !strcasecmp(szOption, "dm") || !strcasecmp(szOption, "m"))
{
m_bHasDupeMode = true;
if (!strcasecmp(szValue, "score") || !strcasecmp(szValue, "s"))
{
if (!strchr("0123456789-+", *szValue))
{
// error
return NULL;
}
m_bHasAddDupeScore = true;
m_iAddDupeScore = atoi(szValue);
m_eDupeMode = dmScore;
}
else if (!strcasecmp(szOption, "dupekey") || !strcasecmp(szOption, "dk") || !strcasecmp(szOption, "k"))
else if (!strcasecmp(szValue, "all") || !strcasecmp(szValue, "a"))
{
m_bHasDupeKey = true;
free(m_szDupeKey);
m_szDupeKey = strdup(szValue);
m_bPatDupeKey = strstr(szValue, "${");
m_eDupeMode = dmAll;
}
else if (!strcasecmp(szOption, "dupekey+") || !strcasecmp(szOption, "dk+") || !strcasecmp(szOption, "k+"))
else if (!strcasecmp(szValue, "force") || !strcasecmp(szValue, "f"))
{
m_bHasAddDupeKey = true;
free(m_szAddDupeKey);
m_szAddDupeKey = strdup(szValue);
m_bPatAddDupeKey = strstr(szValue, "${");
}
else if (!strcasecmp(szOption, "dupemode") || !strcasecmp(szOption, "dm") || !strcasecmp(szOption, "m"))
{
m_bHasDupeMode = true;
if (!strcasecmp(szValue, "score") || !strcasecmp(szValue, "s"))
{
m_eDupeMode = dmScore;
}
else if (!strcasecmp(szValue, "all") || !strcasecmp(szValue, "a"))
{
m_eDupeMode = dmAll;
}
else if (!strcasecmp(szValue, "force") || !strcasecmp(szValue, "f"))
{
m_eDupeMode = dmForce;
}
else
{
// error
return NULL;
}
}
else if (!strcasecmp(szOption, "rageid"))
{
m_bHasRageId = true;
free(m_szRageId);
m_szRageId = strdup(szValue);
}
else if (!strcasecmp(szOption, "series"))
{
m_bHasSeries = true;
free(m_szSeries);
m_szSeries = strdup(szValue);
}
// for compatibility with older version we support old commands too
else if (!strcasecmp(szOption, "paused") || !strcasecmp(szOption, "unpaused"))
{
m_bHasPause = true;
m_bPause = !strcasecmp(szOption, "paused");
}
else if (strchr("0123456789-+", *szOption))
{
m_bHasPriority = true;
m_iPriority = atoi(szOption);
m_eDupeMode = dmForce;
}
else
{
m_bHasCategory = true;
free(m_szCategory);
m_szCategory = strdup(szOption);
// error
return NULL;
}
}
szToken = strtok_r(NULL, ",", &saveptr);
else if (!strcasecmp(szOption, "rageid"))
{
m_bHasRageId = true;
free(m_szRageId);
m_szRageId = strdup(szValue);
}
else if (!strcasecmp(szOption, "series"))
{
m_bHasSeries = true;
free(m_szSeries);
m_szSeries = strdup(szValue);
}
// for compatibility with older version we support old commands too
else if (!strcasecmp(szOption, "paused") || !strcasecmp(szOption, "unpaused"))
{
m_bHasPause = true;
m_bPause = !strcasecmp(szOption, "paused");
}
else if (strchr("0123456789-+", *szOption))
{
m_bHasPriority = true;
m_iPriority = atoi(szOption);
}
else
{
m_bHasCategory = true;
free(m_szCategory);
m_szCategory = strdup(szOption);
}
}
szRule = p + 1;

View File

View File

@@ -1,7 +1,7 @@
/*
* This file is part of nzbget
*
* Copyright (C) 2007-2013 Andrey Prygunkov <hugbug@users.sourceforge.net>
* Copyright (C) 2013-2014 Andrey Prygunkov <hugbug@users.sourceforge.net>
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
@@ -39,8 +39,10 @@
#include "nzbget.h"
#include "FeedInfo.h"
#include "DupeCoordinator.h"
#include "Util.h"
extern DupeCoordinator* g_pDupeCoordinator;
FeedInfo::FeedInfo(int iID, const char* szName, const char* szUrl, int iInterval,
const char* szFilter, bool bPauseNzb, const char* szCategory, int iPriority)
@@ -49,7 +51,7 @@ FeedInfo::FeedInfo(int iID, const char* szName, const char* szUrl, int iInterval
m_szName = strdup(szName ? szName : "");
m_szUrl = strdup(szUrl ? szUrl : "");
m_szFilter = strdup(szFilter ? szFilter : "");
m_iFilterHash = Util::HashBJ96(szFilter, strlen(szFilter), 0);
m_iFilterHash = Util::HashBJ96(m_szFilter, strlen(m_szFilter), 0);
m_szCategory = strdup(szCategory ? szCategory : "");
m_iInterval = iInterval;
m_bPauseNzb = bPauseNzb;
@@ -145,6 +147,7 @@ FeedItemInfo::FeedItemInfo()
m_szDupeKey = NULL;
m_iDupeScore = 0;
m_eDupeMode = dmScore;
m_szDupeStatus = NULL;
}
FeedItemInfo::~FeedItemInfo()
@@ -158,6 +161,7 @@ FeedItemInfo::~FeedItemInfo()
free(m_szEpisode);
free(m_szAddCategory);
free(m_szDupeKey);
free(m_szDupeStatus);
}
void FeedItemInfo::SetTitle(const char* szTitle)
@@ -319,6 +323,37 @@ void FeedItemInfo::ParseSeasonEpisode()
}
}
const char* FeedItemInfo::GetDupeStatus()
{
if (!m_szDupeStatus)
{
const char* szDupeStatusName[] = { "", "QUEUED", "DOWNLOADING", "3", "SUCCESS", "5", "6", "7", "WARNING",
"9", "10", "11", "12", "13", "14", "15", "FAILURE" };
char szStatuses[200];
szStatuses[0] = '\0';
DownloadQueue* pDownloadQueue = DownloadQueue::Lock();
DupeCoordinator::EDupeStatus eDupeStatus = g_pDupeCoordinator->GetDupeStatus(pDownloadQueue, m_szTitle, m_szDupeKey);
DownloadQueue::Unlock();
for (int i = 1; i <= (int)DupeCoordinator::dsFailure; i = i << 1)
{
if (eDupeStatus & i)
{
if (*szStatuses)
{
strcat(szStatuses, ",");
}
strcat(szStatuses, szDupeStatusName[i]);
}
}
m_szDupeStatus = strdup(szStatuses);
}
return m_szDupeStatus;
}
FeedHistoryInfo::FeedHistoryInfo(const char* szUrl, FeedHistoryInfo::EStatus eStatus, time_t tLastSeen)
{

View File

@@ -1,7 +1,7 @@
/*
* This file is part of nzbget
*
* Copyright (C) 2007-2013 Andrey Prygunkov <hugbug@users.sourceforge.net>
* Copyright (C) 2013-2014 Andrey Prygunkov <hugbug@users.sourceforge.net>
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
@@ -165,6 +165,7 @@ private:
char* m_szDupeKey;
int m_iDupeScore;
EDupeMode m_eDupeMode;
char* m_szDupeStatus;
SharedFeedData* m_pSharedFeedData;
Attributes m_Attributes;
@@ -219,6 +220,7 @@ public:
void SetDupeScore(int iDupeScore) { m_iDupeScore = iDupeScore; }
EDupeMode GetDupeMode() { return m_eDupeMode; }
void SetDupeMode(EDupeMode eDupeMode) { m_eDupeMode = eDupeMode; }
const char* GetDupeStatus();
Attributes* GetAttributes() { return &m_Attributes; }
};

View File

@@ -76,7 +76,7 @@ void ColoredFrontend::PrintStatus()
timeString[0] = '\0';
int iCurrentDownloadSpeed = m_bStandBy ? 0 : m_iCurrentDownloadSpeed;
if (iCurrentDownloadSpeed > 0 && !(m_bPauseDownload || m_bPauseDownload2))
if (iCurrentDownloadSpeed > 0 && !m_bPauseDownload)
{
long long remain_sec = (long long)(m_lRemainingSize / iCurrentDownloadSpeed);
int h = (int)(remain_sec / 3600);
@@ -115,7 +115,7 @@ void ColoredFrontend::PrintStatus()
snprintf(tmp, 1024, " %d threads, %.*f KB/s, %.2f MB remaining%s%s%s%s%s\n",
m_iThreadCount, (iCurrentDownloadSpeed >= 10*1024 ? 0 : 1), (float)iCurrentDownloadSpeed / 1024.0,
(float)(Util::Int64ToFloat(m_lRemainingSize) / 1024.0 / 1024.0), timeString, szPostStatus,
m_bPauseDownload || m_bPauseDownload2 ? (m_bStandBy ? ", Paused" : ", Pausing") : "",
m_bPauseDownload ? (m_bStandBy ? ", Paused" : ", Pausing") : "",
szDownloadLimit, szControlSeq);
tmp[1024-1] = '\0';
printf("%s", tmp);

View File

@@ -2,7 +2,7 @@
* This file is part of nzbget
*
* Copyright (C) 2004 Sven Henkel <sidddy@users.sourceforge.net>
* Copyright (C) 2007-2013 Andrey Prygunkov <hugbug@users.sourceforge.net>
* Copyright (C) 2007-2014 Andrey Prygunkov <hugbug@users.sourceforge.net>
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
@@ -47,12 +47,12 @@
#include "Log.h"
#include "Connection.h"
#include "MessageBase.h"
#include "QueueCoordinator.h"
#include "RemoteClient.h"
#include "Util.h"
#include "StatMeter.h"
extern QueueCoordinator* g_pQueueCoordinator;
extern Options* g_pOptions;
extern StatMeter* g_pStatMeter;
Frontend::Frontend()
{
@@ -65,7 +65,6 @@ Frontend::Frontend()
m_iCurrentDownloadSpeed = 0;
m_lRemainingSize = 0;
m_bPauseDownload = false;
m_bPauseDownload2 = false;
m_iDownloadLimit = 0;
m_iThreadCount = 0;
m_iPostJobCount = 0;
@@ -95,16 +94,22 @@ bool Frontend::PrepareData()
{
if (m_bSummary)
{
m_iCurrentDownloadSpeed = g_pQueueCoordinator->CalcCurrentDownloadSpeed();
m_lRemainingSize = g_pQueueCoordinator->CalcRemainingSize();
m_iCurrentDownloadSpeed = g_pStatMeter->CalcCurrentDownloadSpeed();
m_bPauseDownload = g_pOptions->GetPauseDownload();
m_bPauseDownload2 = g_pOptions->GetPauseDownload2();
m_iDownloadLimit = g_pOptions->GetDownloadRate();
m_iThreadCount = Thread::GetThreadCount();
PostQueue* pPostQueue = g_pQueueCoordinator->LockQueue()->GetPostQueue();
m_iPostJobCount = pPostQueue->size();
g_pQueueCoordinator->UnlockQueue();
g_pQueueCoordinator->CalcStat(&m_iUpTimeSec, &m_iDnTimeSec, &m_iAllBytes, &m_bStandBy);
g_pStatMeter->CalcTotalStat(&m_iUpTimeSec, &m_iDnTimeSec, &m_iAllBytes, &m_bStandBy);
DownloadQueue *pDownloadQueue = DownloadQueue::Lock();
m_iPostJobCount = 0;
for (NZBList::iterator it = pDownloadQueue->GetQueue()->begin(); it != pDownloadQueue->GetQueue()->end(); it++)
{
NZBInfo* pNZBInfo = *it;
m_iPostJobCount += pNZBInfo->GetPostInfo() ? 1 : 0;
}
pDownloadQueue->CalcRemainingSize(&m_lRemainingSize, NULL);
DownloadQueue::Unlock();
}
}
return true;
@@ -120,15 +125,13 @@ void Frontend::FreeData()
}
m_RemoteMessages.clear();
for (FileQueue::iterator it = m_RemoteQueue.GetFileQueue()->begin(); it != m_RemoteQueue.GetFileQueue()->end(); it++)
{
delete *it;
}
m_RemoteQueue.GetFileQueue()->clear();
DownloadQueue* pDownloadQueue = DownloadQueue::Lock();
pDownloadQueue->GetQueue()->Clear();
DownloadQueue::Unlock();
}
}
Log::Messages * Frontend::LockMessages()
Log::Messages* Frontend::LockMessages()
{
if (IsRemoteMode())
{
@@ -150,22 +153,12 @@ void Frontend::UnlockMessages()
DownloadQueue* Frontend::LockQueue()
{
if (IsRemoteMode())
{
return &m_RemoteQueue;
}
else
{
return g_pQueueCoordinator->LockQueue();
}
return DownloadQueue::Lock();
}
void Frontend::UnlockQueue()
{
if (!IsRemoteMode())
{
g_pQueueCoordinator->UnlockQueue();
}
DownloadQueue::Unlock();
}
bool Frontend::IsRemoteMode()
@@ -173,23 +166,16 @@ bool Frontend::IsRemoteMode()
return g_pOptions->GetRemoteClientMode();
}
void Frontend::ServerPauseUnpause(bool bPause, bool bSecondRegister)
void Frontend::ServerPauseUnpause(bool bPause)
{
if (IsRemoteMode())
{
RequestPauseUnpause(bPause, bSecondRegister);
RequestPauseUnpause(bPause);
}
else
{
g_pOptions->SetResumeTime(0);
if (bSecondRegister)
{
g_pOptions->SetPauseDownload2(bPause);
}
else
{
g_pOptions->SetPauseDownload(bPause);
}
g_pOptions->SetPauseDownload(bPause);
}
}
@@ -205,27 +191,18 @@ void Frontend::ServerSetDownloadRate(int iRate)
}
}
void Frontend::ServerDumpDebug()
bool Frontend::ServerEditQueue(DownloadQueue::EEditAction eAction, int iOffset, int iID)
{
if (IsRemoteMode())
{
RequestDumpDebug();
return RequestEditQueue(eAction, iOffset, iID);
}
else
{
g_pQueueCoordinator->LogDebugInfo();
}
}
bool Frontend::ServerEditQueue(QueueEditor::EEditAction eAction, int iOffset, int iID)
{
if (IsRemoteMode())
{
return RequestEditQueue((eRemoteEditAction)eAction, iOffset, iID);
}
else
{
return g_pQueueCoordinator->GetQueueEditor()->EditEntry(iID, true, eAction, iOffset, NULL);
DownloadQueue* pDownloadQueue = LockQueue();
bool bOK = pDownloadQueue->EditEntry(iID, eAction, iOffset, NULL);
UnlockQueue();
return bOK;
}
return false;
}
@@ -360,7 +337,6 @@ bool Frontend::RequestFileList()
if (m_bSummary)
{
m_bPauseDownload = ntohl(ListResponse.m_bDownloadPaused);
m_bPauseDownload2 = ntohl(ListResponse.m_bDownload2Paused);
m_lRemainingSize = Util::JoinInt64(ntohl(ListResponse.m_iRemainingSizeHi), ntohl(ListResponse.m_iRemainingSizeLo));
m_iCurrentDownloadSpeed = ntohl(ListResponse.m_iDownloadRate);
m_iDownloadLimit = ntohl(ListResponse.m_iDownloadLimit);
@@ -376,7 +352,10 @@ bool Frontend::RequestFileList()
{
RemoteClient client;
client.SetVerbose(false);
client.BuildFileList(&ListResponse, pBuf, &m_RemoteQueue);
DownloadQueue* pDownloadQueue = LockQueue();
client.BuildFileList(&ListResponse, pBuf, pDownloadQueue);
UnlockQueue();
}
if (pBuf)
@@ -387,11 +366,11 @@ bool Frontend::RequestFileList()
return true;
}
bool Frontend::RequestPauseUnpause(bool bPause, bool bSecondRegister)
bool Frontend::RequestPauseUnpause(bool bPause)
{
RemoteClient client;
client.SetVerbose(false);
return client.RequestServerPauseUnpause(bPause, bSecondRegister ? eRemotePauseUnpauseActionDownload2 : eRemotePauseUnpauseActionDownload);
return client.RequestServerPauseUnpause(bPause, eRemotePauseUnpauseActionDownload);
}
bool Frontend::RequestSetDownloadRate(int iRate)
@@ -401,16 +380,9 @@ bool Frontend::RequestSetDownloadRate(int iRate)
return client.RequestServerSetDownloadRate(iRate);
}
bool Frontend::RequestDumpDebug()
bool Frontend::RequestEditQueue(DownloadQueue::EEditAction eAction, int iOffset, int iID)
{
RemoteClient client;
client.SetVerbose(false);
return client.RequestServerDumpDebug();
}
bool Frontend::RequestEditQueue(eRemoteEditAction iAction, int iOffset, int iID)
{
RemoteClient client;
client.SetVerbose(false);
return client.RequestServerEditQueue(iAction, iOffset, NULL, &iID, 1, NULL, eRemoteMatchModeID, false);
return client.RequestServerEditQueue(eAction, iOffset, NULL, &iID, 1, NULL, eRemoteMatchModeID);
}

View File

@@ -2,7 +2,7 @@
* This file is part of nzbget
*
* Copyright (C) 2004 Sven Henkel <sidddy@users.sourceforge.net>
* Copyright (C) 2007-2010 Andrey Prygunkov <hugbug@users.sourceforge.net>
* Copyright (C) 2007-2014 Andrey Prygunkov <hugbug@users.sourceforge.net>
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
@@ -37,7 +37,6 @@ class Frontend : public Thread
{
private:
Log::Messages m_RemoteMessages;
DownloadQueue m_RemoteQueue;
bool RequestMessages();
bool RequestFileList();
@@ -53,7 +52,6 @@ protected:
int m_iCurrentDownloadSpeed;
long long m_lRemainingSize;
bool m_bPauseDownload;
bool m_bPauseDownload2;
int m_iDownloadLimit;
int m_iThreadCount;
int m_iPostJobCount;
@@ -70,14 +68,12 @@ protected:
void UnlockQueue();
bool IsRemoteMode();
void InitMessageBase(SNZBRequestBase* pMessageBase, int iRequest, int iSize);
void ServerPauseUnpause(bool bPause, bool bSecondRegister);
bool RequestPauseUnpause(bool bPause, bool bSecondRegister);
void ServerPauseUnpause(bool bPause);
bool RequestPauseUnpause(bool bPause);
void ServerSetDownloadRate(int iRate);
bool RequestSetDownloadRate(int iRate);
void ServerDumpDebug();
bool RequestDumpDebug();
bool ServerEditQueue(QueueEditor::EEditAction eAction, int iOffset, int iEntry);
bool RequestEditQueue(eRemoteEditAction iAction, int iOffset, int iID);
bool ServerEditQueue(DownloadQueue::EEditAction eAction, int iOffset, int iEntry);
bool RequestEditQueue(DownloadQueue::EEditAction eAction, int iOffset, int iID);
public:
Frontend();

View File

@@ -2,7 +2,7 @@
* This file is part of nzbget
*
* Copyright (C) 2004 Sven Henkel <sidddy@users.sourceforge.net>
* Copyright (C) 2007-2011 Andrey Prygunkov <hugbug@users.sourceforge.net>
* Copyright (C) 2007-2014 Andrey Prygunkov <hugbug@users.sourceforge.net>
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
@@ -43,6 +43,7 @@
#include <stdlib.h>
#include <string.h>
#include <stdio.h>
#ifndef WIN32
#include <unistd.h>
#endif
@@ -269,7 +270,6 @@ void NCursesFrontend::Run()
}
FreeData();
ClearGroupQueue();
debug("Exiting NCursesFrontend-loop");
}
@@ -288,13 +288,11 @@ void NCursesFrontend::Update(int iKey)
if (m_iDataUpdatePos <= 0)
{
FreeData();
ClearGroupQueue();
m_iNeededLogEntries = m_iMessagesWinClientHeight;
if (!PrepareData())
{
return;
}
PrepareGroupQueue();
// recalculate frame sizes
CalcWindowSizes();
@@ -387,17 +385,22 @@ void NCursesFrontend::CalcWindowSizes()
int NCursesFrontend::CalcQueueSize()
{
int iQueueSize = 0;
DownloadQueue* pDownloadQueue = LockQueue();
if (m_bGroupFiles)
{
return m_groupQueue.size();
iQueueSize = pDownloadQueue->GetQueue()->size();
}
else
{
DownloadQueue* pDownloadQueue = LockQueue();
int iQueueSize = pDownloadQueue->GetFileQueue()->size();
UnlockQueue();
return iQueueSize;
for (NZBList::iterator it = pDownloadQueue->GetQueue()->begin(); it != pDownloadQueue->GetQueue()->end(); it++)
{
NZBInfo* pNZBInfo = *it;
iQueueSize += pNZBInfo->GetFileList()->size();
}
}
UnlockQueue();
return iQueueSize;
}
void NCursesFrontend::PlotLine(const char * szString, int iRow, int iPos, int iColorPair)
@@ -611,7 +614,7 @@ void NCursesFrontend::PrintStatus()
timeString[0] = '\0';
int iCurrentDownloadSpeed = m_bStandBy ? 0 : m_iCurrentDownloadSpeed;
if (iCurrentDownloadSpeed > 0 && !(m_bPauseDownload || m_bPauseDownload2))
if (iCurrentDownloadSpeed > 0 && !m_bPauseDownload)
{
long long remain_sec = (long long)(m_lRemainingSize / iCurrentDownloadSpeed);
int h = (int)(remain_sec / 3600);
@@ -642,12 +645,10 @@ void NCursesFrontend::PrintStatus()
float fAverageSpeed = (float)(Util::Int64ToFloat(m_iDnTimeSec > 0 ? m_iAllBytes / m_iDnTimeSec : 0) / 1024.0);
snprintf(tmp, MAX_SCREEN_WIDTH, " %d threads, %.*f KB/s, %.2f MB remaining%s%s%s%s%s, Avg. %.*f KB/s",
snprintf(tmp, MAX_SCREEN_WIDTH, " %d threads, %.*f KB/s, %.2f MB remaining%s%s%s%s, Avg. %.*f KB/s",
m_iThreadCount, (iCurrentDownloadSpeed >= 10*1024 ? 0 : 1), (float)iCurrentDownloadSpeed / 1024.0,
(float)(Util::Int64ToFloat(m_lRemainingSize) / 1024.0 / 1024.0), timeString, szPostStatus,
m_bPauseDownload || m_bPauseDownload2 ? (m_bStandBy ? ", Paused" : ", Pausing") : "",
m_bPauseDownload || m_bPauseDownload2 ?
(m_bPauseDownload && m_bPauseDownload2 ? " (+2)" : m_bPauseDownload2 ? " (2)" : "") : "",
m_bPauseDownload ? (m_bStandBy ? ", Paused" : ", Pausing") : "",
szDownloadLimit, (fAverageSpeed >= 10 ? 0 : 1), fAverageSpeed);
tmp[MAX_SCREEN_WIDTH - 1] = '\0';
PlotLine(tmp, iStatusRow, 0, NCURSES_COLORPAIR_STATUS);
@@ -743,31 +744,24 @@ void NCursesFrontend::PrintQueue()
void NCursesFrontend::PrintFileQueue()
{
int iLineNr = m_iQueueWinTop;
DownloadQueue* pDownloadQueue = LockQueue();
if (pDownloadQueue->GetFileQueue()->empty())
{
char szBuffer[MAX_SCREEN_WIDTH];
snprintf(szBuffer, sizeof(szBuffer), "%s Files for downloading", m_bUseColor ? "" : "*** ");
szBuffer[MAX_SCREEN_WIDTH - 1] = '\0';
PrintTopHeader(szBuffer, iLineNr++, true);
PlotLine("Ready to receive nzb-job", iLineNr++, 0, NCURSES_COLORPAIR_TEXT);
}
else
{
iLineNr++;
long long lRemaining = 0;
long long lPaused = 0;
int iPausedFiles = 0;
int i = 0;
for (FileQueue::iterator it = pDownloadQueue->GetFileQueue()->begin(); it != pDownloadQueue->GetFileQueue()->end(); it++, i++)
{
FileInfo* pFileInfo = *it;
if (i >= m_iQueueScrollOffset && i < m_iQueueScrollOffset + m_iQueueWinHeight -1)
int iLineNr = m_iQueueWinTop + 1;
long long lRemaining = 0;
long long lPaused = 0;
int iPausedFiles = 0;
int iFileNum = 0;
for (NZBList::iterator it = pDownloadQueue->GetQueue()->begin(); it != pDownloadQueue->GetQueue()->end(); it++)
{
NZBInfo* pNZBInfo = *it;
for (FileList::iterator it2 = pNZBInfo->GetFileList()->begin(); it2 != pNZBInfo->GetFileList()->end(); it2++, iFileNum++)
{
FileInfo* pFileInfo = *it2;
if (iFileNum >= m_iQueueScrollOffset && iFileNum < m_iQueueScrollOffset + m_iQueueWinHeight -1)
{
PrintFilename(pFileInfo, iLineNr++, i == m_iSelectedQueueEntry);
PrintFilename(pFileInfo, iLineNr++, iFileNum == m_iSelectedQueueEntry);
}
if (pFileInfo->GetPaused())
@@ -776,8 +770,11 @@ void NCursesFrontend::PrintFileQueue()
lPaused += pFileInfo->GetRemainingSize();
}
lRemaining += pFileInfo->GetRemainingSize();
}
}
}
if (iFileNum > 0)
{
char szRemaining[20];
Util::FormatFileSize(szRemaining, sizeof(szRemaining), lRemaining);
@@ -786,11 +783,21 @@ void NCursesFrontend::PrintFileQueue()
char szBuffer[MAX_SCREEN_WIDTH];
snprintf(szBuffer, sizeof(szBuffer), " %sFiles for downloading - %i / %i files in queue - %s / %s",
m_bUseColor ? "" : "*** ", (int)pDownloadQueue->GetFileQueue()->size(),
(int)pDownloadQueue->GetFileQueue()->size() - iPausedFiles, szRemaining, szUnpaused);
m_bUseColor ? "" : "*** ", iFileNum,
iFileNum - iPausedFiles, szRemaining, szUnpaused);
szBuffer[MAX_SCREEN_WIDTH - 1] = '\0';
PrintTopHeader(szBuffer, m_iQueueWinTop, true);
}
else
{
iLineNr--;
char szBuffer[MAX_SCREEN_WIDTH];
snprintf(szBuffer, sizeof(szBuffer), "%s Files for downloading", m_bUseColor ? "" : "*** ");
szBuffer[MAX_SCREEN_WIDTH - 1] = '\0';
PrintTopHeader(szBuffer, iLineNr++, true);
PlotLine("Ready to receive nzb-job", iLineNr++, 0, NCURSES_COLORPAIR_TEXT);
}
UnlockQueue();
}
@@ -821,9 +828,9 @@ void NCursesFrontend::PrintFilename(FileInfo * pFileInfo, int iRow, bool bSelect
char szPriority[100];
szPriority[0] = '\0';
if (pFileInfo->GetPriority() != 0)
if (pFileInfo->GetNZBInfo()->GetPriority() != 0)
{
sprintf(szPriority, " [%+i]", pFileInfo->GetPriority());
sprintf(szPriority, " [%+i]", pFileInfo->GetNZBInfo()->GetPriority());
}
char szCompleted[20];
@@ -920,9 +927,8 @@ void NCursesFrontend::PrintGroupQueue()
{
int iLineNr = m_iQueueWinTop;
LockQueue();
GroupQueue* pGroupQueue = &m_groupQueue;
if (pGroupQueue->empty())
DownloadQueue* pDownloadQueue = LockQueue();
if (pDownloadQueue->GetQueue()->empty())
{
char szBuffer[MAX_SCREEN_WIDTH];
snprintf(szBuffer, sizeof(szBuffer), "%s NZBs for downloading", m_bUseColor ? "" : "*** ");
@@ -937,30 +943,27 @@ void NCursesFrontend::PrintGroupQueue()
ResetColWidths();
int iCalcLineNr = iLineNr;
int i = 0;
for (GroupQueue::iterator it = pGroupQueue->begin(); it != pGroupQueue->end(); it++, i++)
for (NZBList::iterator it = pDownloadQueue->GetQueue()->begin(); it != pDownloadQueue->GetQueue()->end(); it++, i++)
{
GroupInfo* pGroupInfo = *it;
NZBInfo* pNZBInfo = *it;
if (i >= m_iQueueScrollOffset && i < m_iQueueScrollOffset + m_iQueueWinHeight -1)
{
PrintGroupname(pGroupInfo, iCalcLineNr++, false, true);
PrintGroupname(pNZBInfo, iCalcLineNr++, false, true);
}
}
long long lRemaining = 0;
long long lPaused = 0;
i = 0;
for (GroupQueue::iterator it = pGroupQueue->begin(); it != pGroupQueue->end(); it++, i++)
for (NZBList::iterator it = pDownloadQueue->GetQueue()->begin(); it != pDownloadQueue->GetQueue()->end(); it++, i++)
{
GroupInfo* pGroupInfo = *it;
NZBInfo* pNZBInfo = *it;
if (i >= m_iQueueScrollOffset && i < m_iQueueScrollOffset + m_iQueueWinHeight -1)
{
PrintGroupname(pGroupInfo, iLineNr++, i == m_iSelectedQueueEntry, false);
PrintGroupname(pNZBInfo, iLineNr++, i == m_iSelectedQueueEntry, false);
}
lRemaining += pGroupInfo->GetRemainingSize();
lPaused += pGroupInfo->GetPausedSize();
lRemaining += pNZBInfo->GetRemainingSize();
lPaused += pNZBInfo->GetPausedSize();
}
char szRemaining[20];
@@ -971,7 +974,7 @@ void NCursesFrontend::PrintGroupQueue()
char szBuffer[MAX_SCREEN_WIDTH];
snprintf(szBuffer, sizeof(szBuffer), " %sNZBs for downloading - %i NZBs in queue - %s / %s",
m_bUseColor ? "" : "*** ", (int)pGroupQueue->size(), szRemaining, szUnpaused);
m_bUseColor ? "" : "*** ", (int)pDownloadQueue->GetQueue()->size(), szRemaining, szUnpaused);
szBuffer[MAX_SCREEN_WIDTH - 1] = '\0';
PrintTopHeader(szBuffer, m_iQueueWinTop, false);
}
@@ -985,7 +988,7 @@ void NCursesFrontend::ResetColWidths()
m_iColWidthLeft = 0;
}
void NCursesFrontend::PrintGroupname(GroupInfo * pGroupInfo, int iRow, bool bSelected, bool bCalcColWidth)
void NCursesFrontend::PrintGroupname(NZBInfo* pNZBInfo, int iRow, bool bSelected, bool bCalcColWidth)
{
int color = NCURSES_COLORPAIR_TEXT;
char chBrace1 = '[';
@@ -1001,28 +1004,21 @@ void NCursesFrontend::PrintGroupname(GroupInfo * pGroupInfo, int iRow, bool bSel
}
const char* szDownloading = "";
if (pGroupInfo->GetActiveDownloads() > 0)
if (pNZBInfo->GetActiveDownloads() > 0)
{
szDownloading = " *";
}
long long lUnpausedRemainingSize = pGroupInfo->GetRemainingSize() - pGroupInfo->GetPausedSize();
long long lUnpausedRemainingSize = pNZBInfo->GetRemainingSize() - pNZBInfo->GetPausedSize();
char szRemaining[20];
Util::FormatFileSize(szRemaining, sizeof(szRemaining), lUnpausedRemainingSize);
char szPriority[100];
szPriority[0] = '\0';
if (pGroupInfo->GetMinPriority() != 0 || pGroupInfo->GetMaxPriority() != 0)
if (pNZBInfo->GetPriority() != 0)
{
if (pGroupInfo->GetMinPriority() == pGroupInfo->GetMaxPriority())
{
sprintf(szPriority, " [%+i]", pGroupInfo->GetMinPriority());
}
else
{
sprintf(szPriority, " [%+i..%+i]", pGroupInfo->GetMinPriority(), pGroupInfo->GetMaxPriority());
}
sprintf(szPriority, " [%+i]", pNZBInfo->GetPriority());
}
char szBuffer[MAX_SCREEN_WIDTH];
@@ -1046,26 +1042,26 @@ void NCursesFrontend::PrintGroupname(GroupInfo * pGroupInfo, int iRow, bool bSel
if (bPrintFormatted)
{
char szFiles[20];
snprintf(szFiles, 20, "%i/%i", pGroupInfo->GetRemainingFileCount(), pGroupInfo->GetPausedFileCount());
snprintf(szFiles, 20, "%i/%i", (int)pNZBInfo->GetFileList()->size(), pNZBInfo->GetPausedFileCount());
szFiles[20-1] = '\0';
char szTotal[20];
Util::FormatFileSize(szTotal, sizeof(szTotal), pGroupInfo->GetNZBInfo()->GetSize());
Util::FormatFileSize(szTotal, sizeof(szTotal), pNZBInfo->GetSize());
char szNameWithIds[1024];
snprintf(szNameWithIds, 1024, "%c%i-%i%c%s%s %s", chBrace1, pGroupInfo->GetFirstID(), pGroupInfo->GetLastID(), chBrace2,
szPriority, szDownloading, pGroupInfo->GetNZBInfo()->GetName());
snprintf(szNameWithIds, 1024, "%c%i%c%s%s %s", chBrace1, pNZBInfo->GetID(), chBrace2,
szPriority, szDownloading, pNZBInfo->GetName());
szNameWithIds[iNameLen] = '\0';
char szTime[100];
szTime[0] = '\0';
int iCurrentDownloadSpeed = m_bStandBy ? 0 : m_iCurrentDownloadSpeed;
if (pGroupInfo->GetPausedSize() > 0 && lUnpausedRemainingSize == 0)
if (pNZBInfo->GetPausedSize() > 0 && lUnpausedRemainingSize == 0)
{
snprintf(szTime, 100, "[paused]");
Util::FormatFileSize(szRemaining, sizeof(szRemaining), pGroupInfo->GetRemainingSize());
Util::FormatFileSize(szRemaining, sizeof(szRemaining), pNZBInfo->GetRemainingSize());
}
else if (iCurrentDownloadSpeed > 0 && !(m_bPauseDownload || m_bPauseDownload2))
else if (iCurrentDownloadSpeed > 0 && !m_bPauseDownload)
{
long long remain_sec = (long long)(lUnpausedRemainingSize / iCurrentDownloadSpeed);
int h = (int)(remain_sec / 3600);
@@ -1099,8 +1095,8 @@ void NCursesFrontend::PrintGroupname(GroupInfo * pGroupInfo, int iRow, bool bSel
}
else
{
snprintf(szBuffer, MAX_SCREEN_WIDTH, "%c%i-%i%c%s %s", chBrace1, pGroupInfo->GetFirstID(),
pGroupInfo->GetLastID(), chBrace2, szDownloading, pGroupInfo->GetNZBInfo()->GetName());
snprintf(szBuffer, MAX_SCREEN_WIDTH, "%c%i%c%s %s", chBrace1, pNZBInfo->GetID(),
chBrace2, szDownloading, pNZBInfo->GetName());
}
szBuffer[MAX_SCREEN_WIDTH - 1] = '\0';
@@ -1111,75 +1107,73 @@ void NCursesFrontend::PrintGroupname(GroupInfo * pGroupInfo, int iRow, bool bSel
}
}
void NCursesFrontend::PrepareGroupQueue()
{
m_groupQueue.clear();
DownloadQueue* pDownloadQueue = LockQueue();
pDownloadQueue->BuildGroups(&m_groupQueue);
UnlockQueue();
}
void NCursesFrontend::ClearGroupQueue()
{
m_groupQueue.Clear();
}
bool NCursesFrontend::EditQueue(QueueEditor::EEditAction eAction, int iOffset)
bool NCursesFrontend::EditQueue(DownloadQueue::EEditAction eAction, int iOffset)
{
int ID = 0;
if (m_bGroupFiles)
{
if (m_iSelectedQueueEntry >= 0 && m_iSelectedQueueEntry < (int)m_groupQueue.size())
DownloadQueue* pDownloadQueue = LockQueue();
if (m_iSelectedQueueEntry >= 0 && m_iSelectedQueueEntry < (int)pDownloadQueue->GetQueue()->size())
{
GroupInfo* pGroupInfo = m_groupQueue[m_iSelectedQueueEntry];
ID = pGroupInfo->GetLastID();
if (eAction == QueueEditor::eaFilePause)
NZBInfo* pNZBInfo = pDownloadQueue->GetQueue()->at(m_iSelectedQueueEntry);
ID = pNZBInfo->GetID();
if (eAction == DownloadQueue::eaFilePause)
{
if (pGroupInfo->GetRemainingSize() == pGroupInfo->GetPausedSize())
if (pNZBInfo->GetRemainingSize() == pNZBInfo->GetPausedSize())
{
eAction = QueueEditor::eaFileResume;
eAction = DownloadQueue::eaFileResume;
}
else if (pGroupInfo->GetPausedSize() == 0 && (pGroupInfo->GetRemainingParCount() > 0) &&
else if (pNZBInfo->GetPausedSize() == 0 && (pNZBInfo->GetRemainingParCount() > 0) &&
!(m_bLastPausePars && m_iLastEditEntry == m_iSelectedQueueEntry))
{
eAction = QueueEditor::eaFilePauseExtraPars;
eAction = DownloadQueue::eaFilePauseExtraPars;
m_bLastPausePars = true;
}
else
{
eAction = QueueEditor::eaFilePause;
eAction = DownloadQueue::eaFilePause;
m_bLastPausePars = false;
}
}
}
UnlockQueue();
// map file-edit-actions to group-edit-actions
QueueEditor::EEditAction FileToGroupMap[] = {
(QueueEditor::EEditAction)0,
QueueEditor::eaGroupMoveOffset,
QueueEditor::eaGroupMoveTop,
QueueEditor::eaGroupMoveBottom,
QueueEditor::eaGroupPause,
QueueEditor::eaGroupResume,
QueueEditor::eaGroupDelete,
QueueEditor::eaGroupPauseAllPars,
QueueEditor::eaGroupPauseExtraPars };
DownloadQueue::EEditAction FileToGroupMap[] = {
(DownloadQueue::EEditAction)0,
DownloadQueue::eaGroupMoveOffset,
DownloadQueue::eaGroupMoveTop,
DownloadQueue::eaGroupMoveBottom,
DownloadQueue::eaGroupPause,
DownloadQueue::eaGroupResume,
DownloadQueue::eaGroupDelete,
DownloadQueue::eaGroupPauseAllPars,
DownloadQueue::eaGroupPauseExtraPars };
eAction = FileToGroupMap[eAction];
}
else
{
DownloadQueue* pDownloadQueue = LockQueue();
if (m_iSelectedQueueEntry >= 0 && m_iSelectedQueueEntry < (int)pDownloadQueue->GetFileQueue()->size())
int iFileNum = 0;
for (NZBList::iterator it = pDownloadQueue->GetQueue()->begin(); it != pDownloadQueue->GetQueue()->end(); it++)
{
FileInfo* pFileInfo = pDownloadQueue->GetFileQueue()->at(m_iSelectedQueueEntry);
ID = pFileInfo->GetID();
if (eAction == QueueEditor::eaFilePause)
NZBInfo* pNZBInfo = *it;
for (FileList::iterator it2 = pNZBInfo->GetFileList()->begin(); it2 != pNZBInfo->GetFileList()->end(); it2++, iFileNum++)
{
eAction = !pFileInfo->GetPaused() ? QueueEditor::eaFilePause : QueueEditor::eaFileResume;
if (m_iSelectedQueueEntry == iFileNum)
{
FileInfo* pFileInfo = *it2;
ID = pFileInfo->GetID();
if (eAction == DownloadQueue::eaFilePause)
{
eAction = !pFileInfo->GetPaused() ? DownloadQueue::eaFilePause : DownloadQueue::eaFileResume;
}
}
}
}
UnlockQueue();
}
@@ -1296,12 +1290,9 @@ void NCursesFrontend::UpdateInput(int initialKey)
// Key 'p' for pause
if (!IsRemoteMode())
{
info(m_bPauseDownload || m_bPauseDownload2 ? "Unpausing download" : "Pausing download");
info(m_bPauseDownload ? "Unpausing download" : "Pausing download");
}
ServerPauseUnpause(!(m_bPauseDownload || m_bPauseDownload2), m_bPauseDownload2 && !m_bPauseDownload);
break;
case '\'':
ServerDumpDebug();
ServerPauseUnpause(!m_bPauseDownload);
break;
case 'e':
case 10: // return
@@ -1387,38 +1378,38 @@ void NCursesFrontend::UpdateInput(int initialKey)
break;
case 'p':
// Key 'p' for pause
EditQueue(QueueEditor::eaFilePause, 0);
EditQueue(DownloadQueue::eaFilePause, 0);
break;
case 'd':
SetHint(" Use Uppercase \"D\" for delete");
break;
case 'D':
// Delete entry
if (EditQueue(QueueEditor::eaFileDelete, 0))
if (EditQueue(DownloadQueue::eaFileDelete, 0))
{
SetCurrentQueueEntry(m_iSelectedQueueEntry);
}
break;
case 'u':
if (EditQueue(QueueEditor::eaFileMoveOffset, -1))
if (EditQueue(DownloadQueue::eaFileMoveOffset, -1))
{
SetCurrentQueueEntry(m_iSelectedQueueEntry - 1);
}
break;
case 'n':
if (EditQueue(QueueEditor::eaFileMoveOffset, +1))
if (EditQueue(DownloadQueue::eaFileMoveOffset, +1))
{
SetCurrentQueueEntry(m_iSelectedQueueEntry + 1);
}
break;
case 't':
if (EditQueue(QueueEditor::eaFileMoveTop, 0))
if (EditQueue(DownloadQueue::eaFileMoveTop, 0))
{
SetCurrentQueueEntry(0);
}
break;
case 'b':
if (EditQueue(QueueEditor::eaFileMoveBottom, 0))
if (EditQueue(DownloadQueue::eaFileMoveBottom, 0))
{
SetCurrentQueueEntry(iQueueSize > 0 ? iQueueSize - 1 : 0);
}

View File

@@ -2,7 +2,7 @@
* This file is part of nzbget
*
* Copyright (C) 2004 Sven Henkel <sidddy@users.sourceforge.net>
* Copyright (C) 2007-2009 Andrey Prygunkov <hugbug@users.sourceforge.net>
* Copyright (C) 2007-2014 Andrey Prygunkov <hugbug@users.sourceforge.net>
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
@@ -62,7 +62,6 @@ private:
int m_iLastEditEntry;
bool m_bLastPausePars;
int m_iQueueScrollOffset;
GroupQueue m_groupQueue;
char* m_szHint;
time_t m_tStartHint;
int m_iColWidthFiles;
@@ -99,10 +98,8 @@ private:
void PrintFilename(FileInfo* pFileInfo, int iRow, bool bSelected);
void PrintGroupQueue();
void ResetColWidths();
void PrintGroupname(GroupInfo * pGroupInfo, int iRow, bool bSelected, bool bCalcColWidth);
void PrepareGroupQueue();
void PrintGroupname(NZBInfo* pNZBInfo, int iRow, bool bSelected, bool bCalcColWidth);
void PrintTopHeader(char* szHeader, int iLineNr, bool bUpTime);
void ClearGroupQueue();
int PrintMessage(Message* Msg, int iRow, int iMaxLines);
void PrintKeyInputBar();
void PrintStatus();
@@ -114,7 +111,7 @@ private:
int ReadConsoleKey();
int CalcQueueSize();
void NeedUpdateData();
bool EditQueue(QueueEditor::EEditAction eAction, int iOffset);
bool EditQueue(DownloadQueue::EEditAction eAction, int iOffset);
void SetHint(const char* szHint);
protected:

View File

@@ -1,7 +1,7 @@
/*
* This file is part of nzbget
*
* Copyright (C) 2013 Andrey Prygunkov <hugbug@users.sourceforge.net>
* Copyright (C) 2013-2014 Andrey Prygunkov <hugbug@users.sourceforge.net>
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
@@ -44,11 +44,11 @@
#include "Log.h"
#include "Util.h"
#include "Maintenance.h"
#include "Options.h"
extern Options* g_pOptions;
extern Maintenance* g_pMaintenance;
Maintenance::Maintenance()
{
m_iIDMessageGen = 0;

View File

@@ -1,7 +1,7 @@
/*
* This file is part of nzbget
*
* Copyright (C) 2013 Andrey Prygunkov <hugbug@users.sourceforge.net>
* Copyright (C) 2013-2014 Andrey Prygunkov <hugbug@users.sourceforge.net>
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
@@ -26,11 +26,10 @@
#define MAINTENANCE_H
#include "Thread.h"
#include "ScriptController.h"
#include "Script.h"
#include "Log.h"
#include "Util.h"
class UpdateScriptController;
class Maintenance

View File

File diff suppressed because it is too large Load Diff

View File

@@ -2,7 +2,7 @@
* This file is part of nzbget
*
* Copyright (C) 2004 Sven Henkel <sidddy@users.sourceforge.net>
* Copyright (C) 2007-2013 Andrey Prygunkov <hugbug@users.sourceforge.net>
* Copyright (C) 2007-2014 Andrey Prygunkov <hugbug@users.sourceforge.net>
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
@@ -57,15 +57,19 @@ public:
opClientRequestScanAsync,
opClientRequestDownloadPause,
opClientRequestDownloadUnpause,
opClientRequestDownload2Pause,
opClientRequestDownload2Unpause,
opClientRequestPostPause,
opClientRequestPostUnpause,
opClientRequestScanPause,
opClientRequestScanUnpause,
opClientRequestHistory,
opClientRequestDownloadUrl,
opClientRequestUrlQueue
opClientRequestDownloadUrl
};
enum EWriteLog
{
wlNone,
wlAppend,
wlReset,
wlRotate
};
enum EMessageTarget
{
@@ -83,6 +87,7 @@ public:
enum EParCheck
{
pcAuto,
pcAlways,
pcForce,
pcManual
};
@@ -98,15 +103,6 @@ public:
hcDelete,
hcNone
};
enum EScriptLogKind
{
slNone,
slDetail,
slInfo,
slWarning,
slError,
slDebug
};
enum EMatchMode
{
mmID = 1,
@@ -147,31 +143,6 @@ public:
OptEntry* FindOption(const char* szName);
};
class ConfigTemplate
{
private:
char* m_szName;
char* m_szDisplayName;
char* m_szTemplate;
friend class Options;
public:
ConfigTemplate(const char* szName, const char* szDisplayName, const char* szTemplate);
~ConfigTemplate();
const char* GetName() { return m_szName; }
const char* GetDisplayName() { return m_szDisplayName; }
const char* GetTemplate() { return m_szTemplate; }
};
typedef std::vector<ConfigTemplate*> ConfigTemplatesBase;
class ConfigTemplates: public ConfigTemplatesBase
{
public:
~ConfigTemplates();
};
typedef std::vector<char*> NameList;
class Category
@@ -180,16 +151,16 @@ public:
char* m_szName;
char* m_szDestDir;
bool m_bUnpack;
char* m_szDefScript;
char* m_szPostScript;
NameList m_Aliases;
public:
Category(const char* szName, const char* szDestDir, bool bUnpack, const char* szDefScript);
Category(const char* szName, const char* szDestDir, bool bUnpack, const char* szPostScript);
~Category();
const char* GetName() { return m_szName; }
const char* GetDestDir() { return m_szDestDir; }
bool GetUnpack() { return m_bUnpack; }
const char* GetDefScript() { return m_szDefScript; }
const char* GetPostScript() { return m_szPostScript; }
NameList* GetAliases() { return &m_Aliases; }
};
@@ -208,6 +179,11 @@ public:
char* m_szName;
char* m_szLocation;
char* m_szDisplayName;
bool m_bPostScript;
bool m_bScanScript;
bool m_bQueueScript;
bool m_bSchedulerScript;
char* m_szQueueEvents;
public:
Script(const char* szName, const char* szLocation);
@@ -216,22 +192,58 @@ public:
const char* GetLocation() { return m_szLocation; }
void SetDisplayName(const char* szDisplayName);
const char* GetDisplayName() { return m_szDisplayName; }
bool GetPostScript() { return m_bPostScript; }
void SetPostScript(bool bPostScript) { m_bPostScript = bPostScript; }
bool GetScanScript() { return m_bScanScript; }
void SetScanScript(bool bScanScript) { m_bScanScript = bScanScript; }
bool GetQueueScript() { return m_bQueueScript; }
void SetQueueScript(bool bQueueScript) { m_bQueueScript = bQueueScript; }
bool GetSchedulerScript() { return m_bSchedulerScript; }
void SetSchedulerScript(bool bSchedulerScript) { m_bSchedulerScript = bSchedulerScript; }
void SetQueueEvents(const char* szQueueEvents);
const char* GetQueueEvents() { return m_szQueueEvents; }
};
typedef std::list<Script*> ScriptListBase;
typedef std::list<Script*> ScriptsBase;
class ScriptList: public ScriptListBase
class Scripts: public ScriptsBase
{
public:
~ScriptList();
~Scripts();
void Clear();
Script* Find(const char* szName);
};
class ConfigTemplate
{
private:
Script* m_pScript;
char* m_szTemplate;
friend class Options;
public:
ConfigTemplate(Script* pScript, const char* szTemplate);
~ConfigTemplate();
Script* GetScript() { return m_pScript; }
const char* GetTemplate() { return m_szTemplate; }
};
typedef std::vector<ConfigTemplate*> ConfigTemplatesBase;
class ConfigTemplates: public ConfigTemplatesBase
{
public:
~ConfigTemplates();
};
private:
OptEntries m_OptEntries;
bool m_bConfigInitialized;
Mutex m_mutexOptEntries;
Categories m_Categories;
Scripts m_Scripts;
ConfigTemplates m_ConfigTemplates;
// Options
bool m_bConfigErrors;
@@ -252,8 +264,8 @@ private:
EMessageTarget m_eDetailTarget;
bool m_bDecode;
bool m_bCreateBrokenLog;
bool m_bResetLog;
int m_iConnectionTimeout;
int m_iArticleTimeout;
int m_iUrlTimeout;
int m_iTerminateTimeout;
bool m_bAppendCategoryDir;
bool m_bContinuePartial;
@@ -274,21 +286,23 @@ private:
char* m_szDaemonUsername;
EOutputMode m_eOutputMode;
bool m_bReloadQueue;
bool m_bReloadUrlQueue;
bool m_bReloadPostQueue;
int m_iUrlConnections;
int m_iLogBufferSize;
bool m_bCreateLog;
EWriteLog m_eWriteLog;
int m_iRotateLog;
char* m_szLogFile;
EParCheck m_eParCheck;
bool m_bParRepair;
EParScan m_eParScan;
bool m_bParQuick;
bool m_bParRename;
int m_iParBuffer;
int m_iParThreads;
EHealthCheck m_eHealthCheck;
char* m_szDefScript;
char* m_szPostScript;
char* m_szScriptOrder;
char* m_szNZBProcess;
char* m_szNZBAddedProcess;
char* m_szScanScript;
char* m_szQueueScript;
bool m_bNoConfig;
int m_iUMask;
int m_iUpdateInterval;
@@ -297,7 +311,7 @@ private:
bool m_bCursesGroup;
bool m_bCrcCheck;
bool m_bDirectWrite;
int m_iWriteBufferSize;
int m_iWriteBuffer;
int m_iNzbDirInterval;
int m_iNzbDirFileAge;
bool m_bParCleanupQueue;
@@ -317,9 +331,13 @@ private:
char* m_szSevenZipCmd;
bool m_bUnpackPauseQueue;
char* m_szExtCleanupDisk;
char* m_szParIgnoreExt;
int m_iFeedHistory;
bool m_bUrlForce;
int m_iTimeCorrection;
int m_iPropagationDelay;
int m_iArticleCache;
int m_iEventInterval;
// Parsed command-line parameters
bool m_bServerMode;
@@ -347,12 +365,13 @@ private:
// Current state
bool m_bPauseDownload;
bool m_bPauseDownload2;
bool m_bPausePostProcess;
bool m_bPauseScan;
bool m_bTempPauseDownload;
int m_iDownloadRate;
EClientOperation m_eClientOperation;
time_t m_tResumeTime;
int m_iLocalTimeOffset;
void InitDefault();
void InitOptFile();
@@ -363,6 +382,8 @@ private:
void InitCategories();
void InitScheduler();
void InitFeeds();
void InitScripts();
void InitConfigTemplates();
void CheckOptions();
void PrintUsage(char* com);
void Dump();
@@ -374,20 +395,22 @@ private:
void SetOption(const char* optname, const char* value);
bool SetOptionString(const char* option);
bool SplitOptionString(const char* option, char** pOptName, char** pOptValue);
bool ValidateOptionName(const char* optname);
bool ValidateOptionName(const char* optname, const char* optvalue);
void LoadConfigFile();
void CheckDir(char** dir, const char* szOptionName, bool bAllowEmpty, bool bCreate);
void CheckDir(char** dir, const char* szOptionName, const char* szParentDir,
bool bAllowEmpty, bool bCreate);
void ParseFileIDList(int argc, char* argv[], int optind);
void ParseFileNameList(int argc, char* argv[], int optind);
bool ParseTime(const char** pTime, int* pHours, int* pMinutes);
bool ParseTime(const char* szTime, int* pHours, int* pMinutes);
bool ParseWeekDays(const char* szWeekDays, int* pWeekDaysBits);
void ConfigError(const char* msg, ...);
void ConfigWarn(const char* msg, ...);
void LocateOptionSrcPos(const char *szOptionName);
void ConvertOldOption(char *szOption, int iOptionBufLen, char *szValue, int iValueBufLen);
static bool CompareScripts(Script* pScript1, Script* pScript2);
void LoadScriptDir(ScriptList* pScriptList, const char* szDirectory, bool bIsSubDir);
void BuildScriptDisplayNames(ScriptList* pScriptList);
void LoadScriptDir(Scripts* pScripts, const char* szDirectory, bool bIsSubDir);
void BuildScriptDisplayNames(Scripts* pScripts);
void LoadScripts(Scripts* pScripts);
public:
Options(int argc, char* argv[]);
@@ -396,7 +419,8 @@ public:
bool LoadConfig(OptEntries* pOptEntries);
bool SaveConfig(OptEntries* pOptEntries);
bool LoadConfigTemplates(ConfigTemplates* pConfigTemplates);
void LoadScriptList(ScriptList* pScriptList);
Scripts* GetScripts() { return &m_Scripts; }
ConfigTemplates* GetConfigTemplates() { return &m_ConfigTemplates; }
// Options
OptEntries* LockOptEntries();
@@ -411,13 +435,13 @@ public:
const char* GetConfigTemplate() { return m_szConfigTemplate; }
const char* GetScriptDir() { return m_szScriptDir; }
bool GetCreateBrokenLog() const { return m_bCreateBrokenLog; }
bool GetResetLog() const { return m_bResetLog; }
EMessageTarget GetInfoTarget() const { return m_eInfoTarget; }
EMessageTarget GetWarningTarget() const { return m_eWarningTarget; }
EMessageTarget GetErrorTarget() const { return m_eErrorTarget; }
EMessageTarget GetDebugTarget() const { return m_eDebugTarget; }
EMessageTarget GetDetailTarget() const { return m_eDetailTarget; }
int GetConnectionTimeout() { return m_iConnectionTimeout; }
int GetArticleTimeout() { return m_iArticleTimeout; }
int GetUrlTimeout() { return m_iUrlTimeout; }
int GetTerminateTimeout() { return m_iTerminateTimeout; }
bool GetDecode() { return m_bDecode; };
bool GetAppendCategoryDir() { return m_bAppendCategoryDir; }
@@ -426,7 +450,7 @@ public:
int GetRetryInterval() { return m_iRetryInterval; }
bool GetSaveQueue() { return m_bSaveQueue; }
bool GetDupeCheck() { return m_bDupeCheck; }
const char* GetControlIP() { return m_szControlIP; }
const char* GetControlIP();
const char* GetControlUsername() { return m_szControlUsername; }
const char* GetControlPassword() { return m_szControlPassword; }
int GetControlPort() { return m_iControlPort; }
@@ -439,21 +463,23 @@ public:
const char* GetDaemonUsername() { return m_szDaemonUsername; }
EOutputMode GetOutputMode() { return m_eOutputMode; }
bool GetReloadQueue() { return m_bReloadQueue; }
bool GetReloadUrlQueue() { return m_bReloadUrlQueue; }
bool GetReloadPostQueue() { return m_bReloadPostQueue; }
int GetUrlConnections() { return m_iUrlConnections; }
int GetLogBufferSize() { return m_iLogBufferSize; }
bool GetCreateLog() { return m_bCreateLog; }
EWriteLog GetWriteLog() { return m_eWriteLog; }
const char* GetLogFile() { return m_szLogFile; }
int GetRotateLog() { return m_iRotateLog; }
EParCheck GetParCheck() { return m_eParCheck; }
bool GetParRepair() { return m_bParRepair; }
EParScan GetParScan() { return m_eParScan; }
bool GetParQuick() { return m_bParQuick; }
bool GetParRename() { return m_bParRename; }
int GetParBuffer() { return m_iParBuffer; }
int GetParThreads() { return m_iParThreads; }
EHealthCheck GetHealthCheck() { return m_eHealthCheck; }
const char* GetScriptOrder() { return m_szScriptOrder; }
const char* GetDefScript() { return m_szDefScript; }
const char* GetNZBProcess() { return m_szNZBProcess; }
const char* GetNZBAddedProcess() { return m_szNZBAddedProcess; }
const char* GetPostScript() { return m_szPostScript; }
const char* GetScanScript() { return m_szScanScript; }
const char* GetQueueScript() { return m_szQueueScript; }
int GetUMask() { return m_iUMask; }
int GetUpdateInterval() {return m_iUpdateInterval; }
bool GetCursesNZBName() { return m_bCursesNZBName; }
@@ -461,7 +487,7 @@ public:
bool GetCursesGroup() { return m_bCursesGroup; }
bool GetCrcCheck() { return m_bCrcCheck; }
bool GetDirectWrite() { return m_bDirectWrite; }
int GetWriteBufferSize() { return m_iWriteBufferSize; }
int GetWriteBuffer() { return m_iWriteBuffer; }
int GetNzbDirInterval() { return m_iNzbDirInterval; }
int GetNzbDirFileAge() { return m_iNzbDirFileAge; }
bool GetParCleanupQueue() { return m_bParCleanupQueue; }
@@ -481,9 +507,13 @@ public:
const char* GetSevenZipCmd() { return m_szSevenZipCmd; }
bool GetUnpackPauseQueue() { return m_bUnpackPauseQueue; }
const char* GetExtCleanupDisk() { return m_szExtCleanupDisk; }
const char* GetParIgnoreExt() { return m_szParIgnoreExt; }
int GetFeedHistory() { return m_iFeedHistory; }
bool GetUrlForce() { return m_bUrlForce; }
int GetTimeCorrection() { return m_iTimeCorrection; }
int GetPropagationDelay() { return m_iPropagationDelay; }
int GetArticleCache() { return m_iArticleCache; }
int GetEventInterval() { return m_iEventInterval; }
Category* FindCategory(const char* szName, bool bSearchAliases) { return m_Categories.FindCategory(szName, bSearchAliases); }
@@ -514,16 +544,18 @@ public:
// Current state
void SetPauseDownload(bool bPauseDownload) { m_bPauseDownload = bPauseDownload; }
bool GetPauseDownload() const { return m_bPauseDownload; }
void SetPauseDownload2(bool bPauseDownload2) { m_bPauseDownload2 = bPauseDownload2; }
bool GetPauseDownload2() const { return m_bPauseDownload2; }
void SetPausePostProcess(bool bPausePostProcess) { m_bPausePostProcess = bPausePostProcess; }
bool GetPausePostProcess() const { return m_bPausePostProcess; }
void SetPauseScan(bool bPauseScan) { m_bPauseScan = bPauseScan; }
bool GetPauseScan() const { return m_bPauseScan; }
void SetTempPauseDownload(bool bTempPauseDownload) { m_bTempPauseDownload = bTempPauseDownload; }
bool GetTempPauseDownload() const { return m_bTempPauseDownload; }
void SetDownloadRate(int iRate) { m_iDownloadRate = iRate; }
int GetDownloadRate() const { return m_iDownloadRate; }
void SetResumeTime(time_t tResumeTime) { m_tResumeTime = tResumeTime; }
time_t GetResumeTime() const { return m_tResumeTime; }
void SetLocalTimeOffset(int iLocalTimeOffset) { m_iLocalTimeOffset = iLocalTimeOffset; }
int GetLocalTimeOffset() { return m_iLocalTimeOffset; }
};
#endif

483
daemon/main/Scheduler.cpp Normal file
View File

@@ -0,0 +1,483 @@
/*
* This file is part of nzbget
*
* Copyright (C) 2008-2014 Andrey Prygunkov <hugbug@users.sourceforge.net>
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*
* $Revision$
* $Date$
*
*/
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#ifdef WIN32
#include "win32.h"
#else
#include <unistd.h>
#endif
#include <stdlib.h>
#include <string.h>
#include <stdio.h>
#include "nzbget.h"
#include "Scheduler.h"
#include "Options.h"
#include "Log.h"
#include "NewsServer.h"
#include "ServerPool.h"
#include "FeedInfo.h"
#include "FeedCoordinator.h"
#include "QueueScript.h"
extern Options* g_pOptions;
extern ServerPool* g_pServerPool;
extern FeedCoordinator* g_pFeedCoordinator;
class SchedulerScriptController : public Thread, public NZBScriptController
{
private:
char* m_szScript;
bool m_bExternalProcess;
int m_iTaskID;
void PrepareParams(const char* szScriptName);
void ExecuteExternalProcess();
protected:
virtual void ExecuteScript(Options::Script* pScript);
public:
virtual ~SchedulerScriptController();
virtual void Run();
static void StartScript(const char* szParam, bool bExternalProcess, int iTaskID);
};
Scheduler::Task::Task(int iID, int iHours, int iMinutes, int iWeekDaysBits, ECommand eCommand, const char* szParam)
{
m_iID = iID;
m_iHours = iHours;
m_iMinutes = iMinutes;
m_iWeekDaysBits = iWeekDaysBits;
m_eCommand = eCommand;
m_szParam = szParam ? strdup(szParam) : NULL;
m_tLastExecuted = 0;
}
Scheduler::Task::~Task()
{
free(m_szParam);
}
Scheduler::Scheduler()
{
debug("Creating Scheduler");
m_tLastCheck = 0;
m_TaskList.clear();
}
Scheduler::~Scheduler()
{
debug("Destroying Scheduler");
for (TaskList::iterator it = m_TaskList.begin(); it != m_TaskList.end(); it++)
{
delete *it;
}
}
void Scheduler::AddTask(Task* pTask)
{
m_mutexTaskList.Lock();
m_TaskList.push_back(pTask);
m_mutexTaskList.Unlock();
}
bool Scheduler::CompareTasks(Scheduler::Task* pTask1, Scheduler::Task* pTask2)
{
return (pTask1->m_iHours < pTask2->m_iHours) ||
((pTask1->m_iHours == pTask2->m_iHours) && (pTask1->m_iMinutes < pTask2->m_iMinutes));
}
void Scheduler::FirstCheck()
{
m_mutexTaskList.Lock();
m_TaskList.sort(CompareTasks);
m_mutexTaskList.Unlock();
// check all tasks for the last week
CheckTasks();
}
void Scheduler::IntervalCheck()
{
m_bExecuteProcess = true;
CheckTasks();
CheckScheduledResume();
}
void Scheduler::CheckTasks()
{
PrepareLog();
m_mutexTaskList.Lock();
time_t tCurrent = time(NULL);
if (!m_TaskList.empty())
{
// Detect large step changes of system time
time_t tDiff = tCurrent - m_tLastCheck;
if (tDiff > 60*90 || tDiff < 0)
{
debug("Reset scheduled tasks (detected clock change greater than 90 minutes or negative)");
// check all tasks for the last week
m_tLastCheck = tCurrent - 60*60*24*7;
m_bExecuteProcess = false;
for (TaskList::iterator it = m_TaskList.begin(); it != m_TaskList.end(); it++)
{
Task* pTask = *it;
pTask->m_tLastExecuted = 0;
}
}
time_t tLocalCurrent = tCurrent + g_pOptions->GetLocalTimeOffset();
time_t tLocalLastCheck = m_tLastCheck + g_pOptions->GetLocalTimeOffset();
tm tmCurrent;
gmtime_r(&tLocalCurrent, &tmCurrent);
tm tmLastCheck;
gmtime_r(&tLocalLastCheck, &tmLastCheck);
tm tmLoop;
memcpy(&tmLoop, &tmLastCheck, sizeof(tmLastCheck));
tmLoop.tm_hour = tmCurrent.tm_hour;
tmLoop.tm_min = tmCurrent.tm_min;
tmLoop.tm_sec = tmCurrent.tm_sec;
time_t tLoop = Util::Timegm(&tmLoop);
while (tLoop <= tLocalCurrent)
{
for (TaskList::iterator it = m_TaskList.begin(); it != m_TaskList.end(); it++)
{
Task* pTask = *it;
if (pTask->m_tLastExecuted != tLoop)
{
tm tmAppoint;
memcpy(&tmAppoint, &tmLoop, sizeof(tmLoop));
tmAppoint.tm_hour = pTask->m_iHours;
tmAppoint.tm_min = pTask->m_iMinutes;
tmAppoint.tm_sec = 0;
time_t tAppoint = Util::Timegm(&tmAppoint);
int iWeekDay = tmAppoint.tm_wday;
if (iWeekDay == 0)
{
iWeekDay = 7;
}
bool bWeekDayOK = pTask->m_iWeekDaysBits == 0 || (pTask->m_iWeekDaysBits & (1 << (iWeekDay - 1)));
bool bDoTask = bWeekDayOK && tLocalLastCheck < tAppoint && tAppoint <= tLocalCurrent;
//debug("TEMP: 1) m_tLastCheck=%i, tLocalCurrent=%i, tLoop=%i, tAppoint=%i, bWeekDayOK=%i, bDoTask=%i", m_tLastCheck, tLocalCurrent, tLoop, tAppoint, (int)bWeekDayOK, (int)bDoTask);
if (bDoTask)
{
ExecuteTask(pTask);
pTask->m_tLastExecuted = tLoop;
}
}
}
tLoop += 60*60*24; // inc day
gmtime_r(&tLoop, &tmLoop);
}
}
m_tLastCheck = tCurrent;
m_mutexTaskList.Unlock();
PrintLog();
}
void Scheduler::ExecuteTask(Task* pTask)
{
const char* szCommandName[] = { "Pause", "Unpause", "Pause Post-processing", "Unpause Post-processing",
"Set download rate", "Execute process", "Execute script",
"Pause Scan", "Unpause Scan", "Enable Server", "Disable Server", "Fetch Feed" };
debug("Executing scheduled command: %s", szCommandName[pTask->m_eCommand]);
switch (pTask->m_eCommand)
{
case scDownloadRate:
if (!Util::EmptyStr(pTask->m_szParam))
{
g_pOptions->SetDownloadRate(atoi(pTask->m_szParam) * 1024);
m_bDownloadRateChanged = true;
}
break;
case scPauseDownload:
case scUnpauseDownload:
g_pOptions->SetPauseDownload(pTask->m_eCommand == scPauseDownload);
m_bPauseDownloadChanged = true;
break;
case scPausePostProcess:
case scUnpausePostProcess:
g_pOptions->SetPausePostProcess(pTask->m_eCommand == scPausePostProcess);
m_bPausePostProcessChanged = true;
break;
case scPauseScan:
case scUnpauseScan:
g_pOptions->SetPauseScan(pTask->m_eCommand == scPauseScan);
m_bPauseScanChanged = true;
break;
case scScript:
case scProcess:
if (m_bExecuteProcess)
{
SchedulerScriptController::StartScript(pTask->m_szParam, pTask->m_eCommand == scProcess, pTask->m_iID);
}
break;
case scActivateServer:
case scDeactivateServer:
EditServer(pTask->m_eCommand == scActivateServer, pTask->m_szParam);
break;
case scFetchFeed:
if (m_bExecuteProcess)
{
FetchFeed(pTask->m_szParam);
break;
}
}
}
void Scheduler::PrepareLog()
{
m_bDownloadRateChanged = false;
m_bPauseDownloadChanged = false;
m_bPausePostProcessChanged = false;
m_bPauseScanChanged = false;
m_bServerChanged = false;
}
void Scheduler::PrintLog()
{
if (m_bDownloadRateChanged)
{
info("Scheduler: setting download rate to %i KB/s", g_pOptions->GetDownloadRate() / 1024);
}
if (m_bPauseDownloadChanged)
{
info("Scheduler: %s download", g_pOptions->GetPauseDownload() ? "pausing" : "unpausing");
}
if (m_bPausePostProcessChanged)
{
info("Scheduler: %s post-processing", g_pOptions->GetPausePostProcess() ? "pausing" : "unpausing");
}
if (m_bPauseScanChanged)
{
info("Scheduler: %s scan", g_pOptions->GetPauseScan() ? "pausing" : "unpausing");
}
if (m_bServerChanged)
{
int index = 0;
for (Servers::iterator it = g_pServerPool->GetServers()->begin(); it != g_pServerPool->GetServers()->end(); it++, index++)
{
NewsServer* pServer = *it;
if (pServer->GetActive() != m_ServerStatusList[index])
{
info("Scheduler: %s %s", pServer->GetActive() ? "activating" : "deactivating", pServer->GetName());
}
}
g_pServerPool->Changed();
}
}
void Scheduler::EditServer(bool bActive, const char* szServerList)
{
Tokenizer tok(szServerList, ",;");
while (const char* szServer = tok.Next())
{
int iID = atoi(szServer);
for (Servers::iterator it = g_pServerPool->GetServers()->begin(); it != g_pServerPool->GetServers()->end(); it++)
{
NewsServer* pServer = *it;
if ((iID > 0 && pServer->GetID() == iID) ||
!strcasecmp(pServer->GetName(), szServer))
{
if (!m_bServerChanged)
{
// store old server status for logging
m_ServerStatusList.clear();
m_ServerStatusList.reserve(g_pServerPool->GetServers()->size());
for (Servers::iterator it2 = g_pServerPool->GetServers()->begin(); it2 != g_pServerPool->GetServers()->end(); it2++)
{
NewsServer* pServer2 = *it2;
m_ServerStatusList.push_back(pServer2->GetActive());
}
}
m_bServerChanged = true;
pServer->SetActive(bActive);
break;
}
}
}
}
void Scheduler::FetchFeed(const char* szFeedList)
{
Tokenizer tok(szFeedList, ",;");
while (const char* szFeed = tok.Next())
{
int iID = atoi(szFeed);
for (Feeds::iterator it = g_pFeedCoordinator->GetFeeds()->begin(); it != g_pFeedCoordinator->GetFeeds()->end(); it++)
{
FeedInfo* pFeed = *it;
if (pFeed->GetID() == iID ||
!strcasecmp(pFeed->GetName(), szFeed) ||
!strcasecmp("0", szFeed))
{
g_pFeedCoordinator->FetchFeed(!strcasecmp("0", szFeed) ? 0 : pFeed->GetID());
break;
}
}
}
}
void Scheduler::CheckScheduledResume()
{
time_t tResumeTime = g_pOptions->GetResumeTime();
time_t tCurrentTime = time(NULL);
if (tResumeTime > 0 && tCurrentTime >= tResumeTime)
{
info("Autoresume");
g_pOptions->SetResumeTime(0);
g_pOptions->SetPauseDownload(false);
g_pOptions->SetPausePostProcess(false);
g_pOptions->SetPauseScan(false);
}
}
SchedulerScriptController::~SchedulerScriptController()
{
free(m_szScript);
}
void SchedulerScriptController::StartScript(const char* szParam, bool bExternalProcess, int iTaskID)
{
char** argv = NULL;
if (bExternalProcess && !Util::SplitCommandLine(szParam, &argv))
{
error("Could not execute scheduled process-script, failed to parse command line: %s", szParam);
return;
}
SchedulerScriptController* pScriptController = new SchedulerScriptController();
pScriptController->m_bExternalProcess = bExternalProcess;
pScriptController->m_szScript = strdup(szParam);
pScriptController->m_iTaskID = iTaskID;
if (bExternalProcess)
{
pScriptController->SetScript(argv[0]);
pScriptController->SetArgs((const char**)argv, true);
}
pScriptController->SetAutoDestroy(true);
pScriptController->Start();
}
void SchedulerScriptController::Run()
{
if (m_bExternalProcess)
{
ExecuteExternalProcess();
}
else
{
ExecuteScriptList(m_szScript);
}
}
void SchedulerScriptController::ExecuteScript(Options::Script* pScript)
{
if (!pScript->GetSchedulerScript())
{
return;
}
PrintMessage(Message::mkInfo, "Executing scheduler-script %s for Task%i", pScript->GetName(), m_iTaskID);
SetScript(pScript->GetLocation());
SetArgs(NULL, false);
char szInfoName[1024];
snprintf(szInfoName, 1024, "scheduler-script %s for Task%i", pScript->GetName(), m_iTaskID);
szInfoName[1024-1] = '\0';
SetInfoName(szInfoName);
SetLogPrefix(pScript->GetDisplayName());
PrepareParams(pScript->GetName());
Execute();
SetLogPrefix(NULL);
}
void SchedulerScriptController::PrepareParams(const char* szScriptName)
{
ResetEnv();
SetIntEnvVar("NZBSP_TASKID", m_iTaskID);
PrepareEnvScript(NULL, szScriptName);
}
void SchedulerScriptController::ExecuteExternalProcess()
{
info("Executing scheduled process-script %s for Task%i", Util::BaseFileName(GetScript()), m_iTaskID);
char szInfoName[1024];
snprintf(szInfoName, 1024, "scheduled process-script %s for Task%i", Util::BaseFileName(GetScript()), m_iTaskID);
szInfoName[1024-1] = '\0';
SetInfoName(szInfoName);
char szLogPrefix[1024];
strncpy(szLogPrefix, Util::BaseFileName(GetScript()), 1024);
szLogPrefix[1024-1] = '\0';
if (char* ext = strrchr(szLogPrefix, '.')) *ext = '\0'; // strip file extension
SetLogPrefix(szLogPrefix);
Execute();
}

View File

@@ -1,7 +1,7 @@
/*
* This file is part of nzbget
*
* Copyright (C) 2008-2013 Andrey Prygunkov <hugbug@users.sourceforge.net>
* Copyright (C) 2008-2014 Andrey Prygunkov <hugbug@users.sourceforge.net>
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
@@ -39,7 +39,10 @@ public:
{
scPauseDownload,
scUnpauseDownload,
scPausePostProcess,
scUnpausePostProcess,
scDownloadRate,
scScript,
scProcess,
scPauseScan,
scUnpauseScan,
@@ -51,6 +54,7 @@ public:
class Task
{
private:
int m_iID;
int m_iHours;
int m_iMinutes;
int m_iWeekDaysBits;
@@ -59,7 +63,7 @@ public:
time_t m_tLastExecuted;
public:
Task(int iHours, int iMinutes, int iWeekDaysBits, ECommand eCommand,
Task(int iID, int iHours, int iMinutes, int iWeekDaysBits, ECommand eCommand,
const char* szParam);
~Task();
friend class Scheduler;
@@ -73,11 +77,10 @@ private:
TaskList m_TaskList;
Mutex m_mutexTaskList;
time_t m_tLastCheck;
bool m_bDetectClockChanges;
bool m_bDownloadRateChanged;
bool m_bExecuteProcess;
bool m_bPauseDownloadChanged;
bool m_bPauseDownload;
bool m_bPausePostProcessChanged;
bool m_bPauseScanChanged;
bool m_bServerChanged;
ServerStatusList m_ServerStatusList;
@@ -88,6 +91,7 @@ private:
void PrintLog();
void EditServer(bool bActive, const char* szServerList);
void FetchFeed(const char* szFeedList);
void CheckScheduledResume();
public:
Scheduler();
@@ -95,8 +99,6 @@ public:
void AddTask(Task* pTask);
void FirstCheck();
void IntervalCheck();
bool GetPauseDownloadChanged() { return m_bPauseDownloadChanged; }
bool GetPauseDownload() { return m_bPauseDownload; }
};
#endif

321
daemon/main/StackTrace.cpp Executable file
View File

@@ -0,0 +1,321 @@
/*
* This file is part of nzbget
*
* Copyright (C) 2007-2014 Andrey Prygunkov <hugbug@users.sourceforge.net>
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*
* $Revision$
* $Date$
*
*/
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#ifdef WIN32
#include "win32.h"
#endif
#include <stdlib.h>
#include <string.h>
#include <stdio.h>
#ifdef WIN32
#include <dbghelp.h>
#else
#include <unistd.h>
#include <sys/resource.h>
#include <signal.h>
#endif
#ifdef HAVE_SYS_PRCTL_H
#include <sys/prctl.h>
#endif
#ifdef HAVE_BACKTRACE
#include <execinfo.h>
#endif
#include "nzbget.h"
#include "Log.h"
#include "Options.h"
#include "StackTrace.h"
extern Options* g_pOptions;
extern void ExitProc();
#ifdef WIN32
#ifdef DEBUG
void PrintBacktrace(PCONTEXT pContext)
{
HANDLE hProcess = GetCurrentProcess();
HANDLE hThread = GetCurrentThread();
char szAppDir[MAX_PATH + 1];
GetModuleFileName(NULL, szAppDir, sizeof(szAppDir));
char* end = strrchr(szAppDir, PATH_SEPARATOR);
if (end) *end = '\0';
SymSetOptions(SymGetOptions() | SYMOPT_LOAD_LINES | SYMOPT_FAIL_CRITICAL_ERRORS);
if (!SymInitialize(hProcess, szAppDir, TRUE))
{
warn("Could not obtain detailed exception information: SymInitialize failed");
return;
}
const int MAX_NAMELEN = 1024;
IMAGEHLP_SYMBOL64* pSym = (IMAGEHLP_SYMBOL64 *) malloc(sizeof(IMAGEHLP_SYMBOL64) + MAX_NAMELEN);
memset(pSym, 0, sizeof(IMAGEHLP_SYMBOL64) + MAX_NAMELEN);
pSym->SizeOfStruct = sizeof(IMAGEHLP_SYMBOL64);
pSym->MaxNameLength = MAX_NAMELEN;
IMAGEHLP_LINE64 ilLine;
memset(&ilLine, 0, sizeof(ilLine));
ilLine.SizeOfStruct = sizeof(ilLine);
STACKFRAME64 sfStackFrame;
memset(&sfStackFrame, 0, sizeof(sfStackFrame));
DWORD imageType;
#ifdef _M_IX86
imageType = IMAGE_FILE_MACHINE_I386;
sfStackFrame.AddrPC.Offset = pContext->Eip;
sfStackFrame.AddrPC.Mode = AddrModeFlat;
sfStackFrame.AddrFrame.Offset = pContext->Ebp;
sfStackFrame.AddrFrame.Mode = AddrModeFlat;
sfStackFrame.AddrStack.Offset = pContext->Esp;
sfStackFrame.AddrStack.Mode = AddrModeFlat;
#elif _M_X64
imageType = IMAGE_FILE_MACHINE_AMD64;
sfStackFrame.AddrPC.Offset = pContext->Rip;
sfStackFrame.AddrPC.Mode = AddrModeFlat;
sfStackFrame.AddrFrame.Offset = pContext->Rsp;
sfStackFrame.AddrFrame.Mode = AddrModeFlat;
sfStackFrame.AddrStack.Offset = pContext->Rsp;
sfStackFrame.AddrStack.Mode = AddrModeFlat;
#else
warn("Could not obtain detailed exception information: platform not supported");
return;
#endif
for (int frameNum = 0; ; frameNum++)
{
if (frameNum > 1000)
{
warn("Endless stack, abort tracing");
return;
}
if (!StackWalk64(imageType, hProcess, hThread, &sfStackFrame, pContext, NULL, SymFunctionTableAccess64, SymGetModuleBase64, NULL))
{
warn("Could not obtain detailed exception information: StackWalk64 failed");
return;
}
DWORD64 dwAddr = sfStackFrame.AddrPC.Offset;
char szSymName[1024];
char szSrcFileName[1024];
int iLineNumber = 0;
DWORD64 dwSymbolDisplacement;
if (SymGetSymFromAddr64(hProcess, dwAddr, &dwSymbolDisplacement, pSym))
{
UnDecorateSymbolName(pSym->Name, szSymName, sizeof(szSymName), UNDNAME_COMPLETE);
szSymName[sizeof(szSymName) - 1] = '\0';
}
else
{
strncpy(szSymName, "<symbol not available>", sizeof(szSymName));
}
DWORD dwLineDisplacement;
if (SymGetLineFromAddr64(hProcess, dwAddr, &dwLineDisplacement, &ilLine))
{
iLineNumber = ilLine.LineNumber;
char* szUseFileName = ilLine.FileName;
char* szRoot = strstr(szUseFileName, "\\daemon\\");
if (szRoot)
{
szUseFileName = szRoot;
}
strncpy(szSrcFileName, szUseFileName, sizeof(szSrcFileName));
szSrcFileName[sizeof(szSrcFileName) - 1] = '\0';
}
else
{
strncpy(szSrcFileName, "<filename not available>", sizeof(szSymName));
}
info("%s (%i) : %s", szSrcFileName, iLineNumber, szSymName);
if (sfStackFrame.AddrReturn.Offset == 0)
{
break;
}
}
}
#endif
LONG __stdcall ExceptionFilter(EXCEPTION_POINTERS* pExPtrs)
{
error("Unhandled Exception: code: 0x%8.8X, flags: %d, address: 0x%8.8X",
pExPtrs->ExceptionRecord->ExceptionCode,
pExPtrs->ExceptionRecord->ExceptionFlags,
pExPtrs->ExceptionRecord->ExceptionAddress);
#ifdef DEBUG
PrintBacktrace(pExPtrs->ContextRecord);
#else
info("Detailed exception information can be printed by debug version of NZBGet (available from download page)");
#endif
ExitProcess(-1);
return EXCEPTION_CONTINUE_SEARCH;
}
void InstallErrorHandler()
{
SetUnhandledExceptionFilter(ExceptionFilter);
}
#else
#ifdef DEBUG
typedef void(*sighandler)(int);
std::vector<sighandler> SignalProcList;
#endif
#ifdef HAVE_SYS_PRCTL_H
/**
* activates the creation of core-files
*/
void EnableDumpCore()
{
rlimit rlim;
rlim.rlim_cur= RLIM_INFINITY;
rlim.rlim_max= RLIM_INFINITY;
setrlimit(RLIMIT_CORE, &rlim);
prctl(PR_SET_DUMPABLE, 1);
}
#endif
void PrintBacktrace()
{
#ifdef HAVE_BACKTRACE
printf("Segmentation fault, tracing...\n");
void *array[100];
size_t size;
char **strings;
size_t i;
size = backtrace(array, 100);
strings = backtrace_symbols(array, size);
// first trace to screen
printf("Obtained %zd stack frames\n", size);
for (i = 0; i < size; i++)
{
printf("%s\n", strings[i]);
}
// then trace to log
error("Segmentation fault, tracing...");
error("Obtained %zd stack frames", size);
for (i = 0; i < size; i++)
{
error("%s", strings[i]);
}
free(strings);
#else
error("Segmentation fault");
#endif
}
/*
* Signal handler
*/
void SignalProc(int iSignal)
{
switch (iSignal)
{
case SIGINT:
signal(SIGINT, SIG_DFL); // Reset the signal handler
ExitProc();
break;
case SIGTERM:
signal(SIGTERM, SIG_DFL); // Reset the signal handler
ExitProc();
break;
case SIGCHLD:
// ignoring
break;
#ifdef DEBUG
case SIGSEGV:
signal(SIGSEGV, SIG_DFL); // Reset the signal handler
PrintBacktrace();
break;
#endif
}
}
void InstallErrorHandler()
{
#ifdef HAVE_SYS_PRCTL_H
if (g_pOptions->GetDumpCore())
{
EnableDumpCore();
}
#endif
signal(SIGINT, SignalProc);
signal(SIGTERM, SignalProc);
signal(SIGPIPE, SIG_IGN);
#ifdef DEBUG
signal(SIGSEGV, SignalProc);
#endif
#ifdef SIGCHLD_HANDLER
// it could be necessary on some systems to activate a handler for SIGCHLD
// however it make troubles on other systems and is deactivated by default
signal(SIGCHLD, SignalProc);
#endif
}
#endif
#ifdef DEBUG
class SegFault
{
public:
void DoSegFault()
{
char* N = NULL;
strcpy(N, "");
}
};
void TestSegFault()
{
SegFault s;
s.DoSegFault();
}
#endif

35
daemon/main/StackTrace.h Executable file
View File

@@ -0,0 +1,35 @@
/*
* This file is part of nzbget
*
* Copyright (C) 2007-2014 Andrey Prygunkov <hugbug@users.sourceforge.net>
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*
* $Revision$
* $Date$
*
*/
#ifndef STACKTRACE_H
#define STACKTRACE_H
void InstallErrorHandler();
#ifdef DEBUG
void TestSegFault();
#endif
#endif

View File

@@ -2,7 +2,7 @@
* This file is part of nzbget
*
* Copyright (C) 2004 Sven Henkel <sidddy@users.sourceforge.net>
* Copyright (C) 2007-2013 Andrey Prygunkov <hugbug@users.sourceforge.net>
* Copyright (C) 2007-2014 Andrey Prygunkov <hugbug@users.sourceforge.net>
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
@@ -40,7 +40,6 @@
#include <unistd.h>
#include <pwd.h>
#include <grp.h>
#include <sys/resource.h>
#ifdef HAVE_SYS_PRCTL_H
#include <sys/prctl.h>
#endif
@@ -53,9 +52,6 @@
#ifndef DISABLE_PARCHECK
#include <iostream>
#endif
#ifdef HAVE_BACKTRACE
#include <execinfo.h>
#endif
#include "nzbget.h"
#include "ServerPool.h"
@@ -72,12 +68,18 @@
#include "MessageBase.h"
#include "DiskState.h"
#include "PrePostProcessor.h"
#include "HistoryCoordinator.h"
#include "DupeCoordinator.h"
#include "ParChecker.h"
#include "Scheduler.h"
#include "Scanner.h"
#include "FeedCoordinator.h"
#include "Maintenance.h"
#include "ArticleWriter.h"
#include "StatMeter.h"
#include "QueueScript.h"
#include "Util.h"
#include "StackTrace.h"
#ifdef WIN32
#include "NTService.h"
#endif
@@ -89,15 +91,7 @@ void Reload();
void Cleanup();
void ProcessClientRequest();
#ifndef WIN32
void InstallSignalHandlers();
void Daemonize();
void PrintBacktrace();
#ifdef HAVE_SYS_PRCTL_H
void EnableDumpCore();
#endif
#ifdef DEBUG
void MakeSegFault();
#endif
#endif
#ifndef DISABLE_PARCHECK
void DisableCout();
@@ -110,15 +104,18 @@ QueueCoordinator* g_pQueueCoordinator = NULL;
UrlCoordinator* g_pUrlCoordinator = NULL;
RemoteServer* g_pRemoteServer = NULL;
RemoteServer* g_pRemoteSecureServer = NULL;
DownloadSpeedMeter* g_pDownloadSpeedMeter = NULL;
DownloadQueueHolder* g_pDownloadQueueHolder = NULL;
StatMeter* g_pStatMeter = NULL;
Log* g_pLog = NULL;
PrePostProcessor* g_pPrePostProcessor = NULL;
HistoryCoordinator* g_pHistoryCoordinator = NULL;
DupeCoordinator* g_pDupeCoordinator = NULL;
DiskState* g_pDiskState = NULL;
Scheduler* g_pScheduler = NULL;
Scanner* g_pScanner = NULL;
FeedCoordinator* g_pFeedCoordinator = NULL;
Maintenance* g_pMaintenance = NULL;
ArticleCache* g_pArticleCache = NULL;
QueueScriptCoordinator* g_pQueueScriptCoordinator = NULL;
int g_iArgumentCount;
char* (*g_szEnvironmentVariables)[] = NULL;
char* (*g_szArguments)[] = NULL;
@@ -133,7 +130,7 @@ int main(int argc, char *argv[], char *argp[])
#ifdef _DEBUG
_CrtSetReportMode(_CRT_WARN, _CRTDBG_MODE_FILE | _CRTDBG_MODE_DEBUG);
_CrtSetReportFile(_CRT_WARN, _CRTDBG_FILE_STDERR);
_CrtSetDbgFlag(_CRTDBG_ALLOC_MEM_DF
_CrtSetDbgFlag(_CRTDBG_ALLOC_MEM_DF | _CRTDBG_LEAK_CHECK_DF
#ifdef DEBUG_CRTMEMLEAKS
| _CRTDBG_CHECK_CRT_DF | _CRTDBG_CHECK_ALWAYS_DF
#endif
@@ -170,12 +167,6 @@ int main(int argc, char *argv[], char *argp[])
RunMain();
#ifdef WIN32
#ifdef _DEBUG
_CrtDumpMemoryLeaks();
#endif
#endif
return 0;
}
@@ -212,11 +203,16 @@ void Run(bool bReload)
g_pServerPool = new ServerPool();
g_pScheduler = new Scheduler();
g_pQueueCoordinator = new QueueCoordinator();
g_pDownloadSpeedMeter = g_pQueueCoordinator;
g_pDownloadQueueHolder = g_pQueueCoordinator;
g_pStatMeter = new StatMeter();
g_pScanner = new Scanner();
g_pPrePostProcessor = new PrePostProcessor();
g_pHistoryCoordinator = new HistoryCoordinator();
g_pDupeCoordinator = new DupeCoordinator();
g_pUrlCoordinator = new UrlCoordinator();
g_pFeedCoordinator = new FeedCoordinator();
g_pArticleCache = new ArticleCache();
g_pMaintenance = new Maintenance();
g_pQueueScriptCoordinator = new QueueScriptCoordinator();
debug("Reading options");
g_pOptions = new Options(g_iArgumentCount, *g_szArguments);
@@ -229,13 +225,9 @@ void Run(bool bReload)
}
#endif
if (g_pOptions->GetServerMode() && g_pOptions->GetCreateLog() && g_pOptions->GetResetLog())
{
debug("Deleting old log-file");
g_pLog->ResetLog();
}
g_pLog->InitOptions();
g_pScanner->InitOptions();
g_pQueueScriptCoordinator->InitOptions();
if (g_pOptions->GetDaemonMode())
{
@@ -266,25 +258,16 @@ void Run(bool bReload)
if (!g_pOptions->GetRemoteClientMode())
{
g_pServerPool->InitConnections();
g_pStatMeter->Init();
}
#ifndef WIN32
#ifdef HAVE_SYS_PRCTL_H
if (g_pOptions->GetDumpCore())
{
EnableDumpCore();
}
#endif
#endif
InstallErrorHandler();
#ifndef WIN32
InstallSignalHandlers();
#ifdef DEBUG
if (g_pOptions->GetTestBacktrace())
{
MakeSegFault();
TestSegFault();
}
#endif
#endif
// client request
@@ -308,13 +291,6 @@ void Run(bool bReload)
}
}
// Creating PrePostProcessor
if (!g_pOptions->GetRemoteClientMode())
{
g_pScanner = new Scanner();
g_pPrePostProcessor = new PrePostProcessor();
}
// Create the frontend
if (!g_pOptions->GetDaemonMode())
{
@@ -346,13 +322,15 @@ void Run(bool bReload)
// Standalone-mode
if (!g_pOptions->GetServerMode())
{
NZBFile* pNZBFile = NZBFile::Create(g_pOptions->GetArgFilename(), g_pOptions->GetAddCategory() ? g_pOptions->GetAddCategory() : "");
const char* szCategory = g_pOptions->GetAddCategory() ? g_pOptions->GetAddCategory() : "";
NZBFile* pNZBFile = NZBFile::Create(g_pOptions->GetArgFilename(), szCategory);
if (!pNZBFile)
{
abort("FATAL ERROR: Parsing NZB-document %s failed\n\n", g_pOptions->GetArgFilename() ? g_pOptions->GetArgFilename() : "N/A");
return;
}
g_pQueueCoordinator->AddNZBFileToQueue(pNZBFile, false);
g_pScanner->InitPPParameters(szCategory, pNZBFile->GetNZBInfo()->GetParameters(), false);
g_pQueueCoordinator->AddNZBFileToQueue(pNZBFile, NULL, false);
delete pNZBFile;
}
@@ -365,12 +343,17 @@ void Run(bool bReload)
g_pUrlCoordinator->Start();
g_pPrePostProcessor->Start();
g_pFeedCoordinator->Start();
if (g_pOptions->GetArticleCache() > 0)
{
g_pArticleCache->Start();
}
// enter main program-loop
while (g_pQueueCoordinator->IsRunning() ||
g_pUrlCoordinator->IsRunning() ||
g_pPrePostProcessor->IsRunning() ||
g_pFeedCoordinator->IsRunning())
g_pFeedCoordinator->IsRunning() ||
g_pArticleCache->IsRunning())
{
if (!g_pOptions->GetServerMode() &&
!g_pQueueCoordinator->HasMoreJobs() &&
@@ -394,6 +377,10 @@ void Run(bool bReload)
{
g_pFeedCoordinator->Stop();
}
if (!g_pArticleCache->IsStopped())
{
g_pArticleCache->Stop();
}
}
usleep(100 * 1000);
}
@@ -403,8 +390,11 @@ void Run(bool bReload)
debug("UrlCoordinator stopped");
debug("PrePostProcessor stopped");
debug("FeedCoordinator stopped");
debug("ArticleCache stopped");
}
ScriptController::TerminateAll();
// Stop network-server
if (g_pRemoteServer)
{
@@ -486,14 +476,6 @@ void ProcessClientRequest()
Client->RequestServerPauseUnpause(false, eRemotePauseUnpauseActionDownload);
break;
case Options::opClientRequestDownload2Pause:
Client->RequestServerPauseUnpause(true, eRemotePauseUnpauseActionDownload2);
break;
case Options::opClientRequestDownload2Unpause:
Client->RequestServerPauseUnpause(false, eRemotePauseUnpauseActionDownload2);
break;
case Options::opClientRequestSetRate:
Client->RequestServerSetDownloadRate(g_pOptions->GetSetRate());
break;
@@ -503,9 +485,10 @@ void ProcessClientRequest()
break;
case Options::opClientRequestEditQueue:
Client->RequestServerEditQueue((eRemoteEditAction)g_pOptions->GetEditQueueAction(), g_pOptions->GetEditQueueOffset(),
g_pOptions->GetEditQueueText(), g_pOptions->GetEditQueueIDList(), g_pOptions->GetEditQueueIDCount(),
g_pOptions->GetEditQueueNameList(), (eRemoteMatchMode)g_pOptions->GetMatchMode(), true);
Client->RequestServerEditQueue((DownloadQueue::EEditAction)g_pOptions->GetEditQueueAction(),
g_pOptions->GetEditQueueOffset(), g_pOptions->GetEditQueueText(),
g_pOptions->GetEditQueueIDList(), g_pOptions->GetEditQueueIDCount(),
g_pOptions->GetEditQueueNameList(), (eRemoteMatchMode)g_pOptions->GetMatchMode());
break;
case Options::opClientRequestLog:
@@ -568,10 +551,6 @@ void ProcessClientRequest()
Client->RequestServerDownloadUrl(g_pOptions->GetLastArg(), g_pOptions->GetAddNZBFilename(), g_pOptions->GetAddCategory(), g_pOptions->GetAddTop(), g_pOptions->GetAddPaused(), g_pOptions->GetAddPriority());
break;
case Options::opClientRequestUrlQueue:
Client->RequestUrlQueue();
break;
case Options::opClientNoOperation:
break;
}
@@ -602,6 +581,7 @@ void ExitProc()
g_pUrlCoordinator->Stop();
g_pPrePostProcessor->Stop();
g_pFeedCoordinator->Stop();
g_pArticleCache->Stop();
}
}
}
@@ -613,114 +593,6 @@ void Reload()
ExitProc();
}
#ifndef WIN32
#ifdef DEBUG
typedef void(*sighandler)(int);
std::vector<sighandler> SignalProcList;
#endif
/*
* Signal handler
*/
void SignalProc(int iSignal)
{
switch (iSignal)
{
case SIGINT:
signal(SIGINT, SIG_DFL); // Reset the signal handler
ExitProc();
break;
case SIGTERM:
signal(SIGTERM, SIG_DFL); // Reset the signal handler
ExitProc();
break;
case SIGCHLD:
// ignoring
break;
#ifdef DEBUG
case SIGSEGV:
signal(SIGSEGV, SIG_DFL); // Reset the signal handler
PrintBacktrace();
break;
#endif
}
}
void InstallSignalHandlers()
{
signal(SIGINT, SignalProc);
signal(SIGTERM, SignalProc);
signal(SIGPIPE, SIG_IGN);
#ifdef DEBUG
signal(SIGSEGV, SignalProc);
#endif
#ifdef SIGCHLD_HANDLER
// it could be necessary on some systems to activate a handler for SIGCHLD
// however it make troubles on other systems and is deactivated by default
signal(SIGCHLD, SignalProc);
#endif
}
void PrintBacktrace()
{
#ifdef HAVE_BACKTRACE
printf("Segmentation fault, tracing...\n");
void *array[100];
size_t size;
char **strings;
size_t i;
size = backtrace(array, 100);
strings = backtrace_symbols(array, size);
// first trace to screen
printf("Obtained %zd stack frames\n", size);
for (i = 0; i < size; i++)
{
printf("%s\n", strings[i]);
}
// then trace to log
error("Segmentation fault, tracing...");
error("Obtained %zd stack frames", size);
for (i = 0; i < size; i++)
{
error("%s", strings[i]);
}
free(strings);
#else
error("Segmentation fault");
#endif
}
#ifdef DEBUG
void MakeSegFault()
{
char* N = NULL;
strcpy(N, "");
}
#endif
#ifdef HAVE_SYS_PRCTL_H
/**
* activates the creation of core-files
*/
void EnableDumpCore()
{
rlimit rlim;
rlim.rlim_cur= RLIM_INFINITY;
rlim.rlim_max= RLIM_INFINITY;
setrlimit(RLIMIT_CORE, &rlim);
prctl(PR_SET_DUMPABLE, 1);
}
#endif
#endif
void Cleanup()
{
debug("Cleaning up global objects");
@@ -747,6 +619,16 @@ void Cleanup()
g_pScanner = NULL;
debug("PrePostProcessor deleted");
debug("Deleting HistoryCoordinator");
delete g_pHistoryCoordinator;
g_pHistoryCoordinator = NULL;
debug("HistoryCoordinator deleted");
debug("Deleting DupeCoordinator");
delete g_pDupeCoordinator;
g_pDupeCoordinator = NULL;
debug("DupeCoordinator deleted");
debug("Deleting Frontend");
delete g_pFrontend;
g_pFrontend = NULL;
@@ -790,11 +672,26 @@ void Cleanup()
g_pFeedCoordinator = NULL;
debug("FeedCoordinator deleted");
debug("Deleting ArticleCache");
delete g_pArticleCache;
g_pArticleCache = NULL;
debug("ArticleCache deleted");
debug("Deleting QueueScriptCoordinator");
delete g_pQueueScriptCoordinator;
g_pQueueScriptCoordinator = NULL;
debug("QueueScriptCoordinator deleted");
debug("Deleting Maintenance");
delete g_pMaintenance;
g_pMaintenance = NULL;
debug("Maintenance deleted");
debug("Deleting StatMeter");
delete g_pStatMeter;
g_pStatMeter = NULL;
debug("StatMeter deleted");
if (!g_bReloading)
{
Connection::Final();

View File

@@ -36,7 +36,7 @@
#endif
#define fdopen _fdopen
#define ctime_r(timep, buf, bufsize) ctime_s(buf, bufsize, timep)
#define localtime_r(time, tm) localtime_s(tm, time)
#define gmtime_r(time, tm) gmtime_s(tm, time)
#define strtok_r(str, delim, saveptr) strtok_s(str, delim, saveptr)
#define strerror_r(errnum, buffer, size) strerror_s(buffer, size, errnum)
#define int32_t __int32
@@ -50,7 +50,6 @@
#define S_ISREG(mode) __S_ISTYPE((mode), _S_IFREG)
#define S_DIRMODE NULL
#define usleep(usec) Sleep((usec) / 1000)
#define gettimeofday(tm, ignore) _ftime(tm)
#define socklen_t int
#define SHUT_WR 0x01
#define SHUT_RDWR 0x02
@@ -59,9 +58,17 @@
#define LINE_ENDING "\r\n"
#define pid_t int
#define atoll _atoi64
#define fseek _fseeki64
#define ftell _ftelli64
#ifndef FSCTL_SET_SPARSE
#define FSCTL_SET_SPARSE 590020
#endif
#define FOPEN_RB "rbN"
#define FOPEN_RBP "rb+N"
#define FOPEN_WB "wbN"
#define FOPEN_WBP "wb+N"
#define FOPEN_AB "abN"
#define FOPEN_ABP "ab+N"
#pragma warning(disable:4800) // 'type' : forcing value to bool 'true' or 'false' (performance warning)
#pragma warning(disable:4267) // 'var' : conversion from 'size_t' to 'type', possible loss of data
@@ -78,6 +85,12 @@
#define MAX_PATH 1024
#define S_DIRMODE (S_IRWXU | S_IRWXG | S_IRWXO)
#define LINE_ENDING "\n"
#define FOPEN_RB "rb"
#define FOPEN_RBP "rb+"
#define FOPEN_WB "wb"
#define FOPEN_WBP "wb+"
#define FOPEN_AB "ab"
#define FOPEN_ABP "ab+"
#endif

View File

@@ -0,0 +1,723 @@
/*
* This file is part of nzbget
*
* Copyright (C) 2004 Sven Henkel <sidddy@users.sourceforge.net>
* Copyright (C) 2007-2014 Andrey Prygunkov <hugbug@users.sourceforge.net>
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*
* $Revision$
* $Date$
*
*/
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#ifdef WIN32
#include "win32.h"
#endif
#include <stdlib.h>
#include <string.h>
#include <stdio.h>
#ifdef WIN32
#include <direct.h>
#else
#include <unistd.h>
#include <sys/time.h>
#endif
#include <sys/stat.h>
#include <errno.h>
#include "nzbget.h"
#include "ArticleDownloader.h"
#include "ArticleWriter.h"
#include "Decoder.h"
#include "Log.h"
#include "Options.h"
#include "ServerPool.h"
#include "StatMeter.h"
#include "Util.h"
extern Options* g_pOptions;
extern ServerPool* g_pServerPool;
extern StatMeter* g_pStatMeter;
ArticleDownloader::ArticleDownloader()
{
debug("Creating ArticleDownloader");
m_szInfoName = NULL;
m_szConnectionName[0] = '\0';
m_pConnection = NULL;
m_eStatus = adUndefined;
m_eFormat = Decoder::efUnknown;
m_szArticleFilename = NULL;
m_iDownloadedSize = 0;
m_ArticleWriter.SetOwner(this);
SetLastUpdateTimeNow();
}
ArticleDownloader::~ArticleDownloader()
{
debug("Destroying ArticleDownloader");
free(m_szInfoName);
free(m_szArticleFilename);
}
void ArticleDownloader::SetInfoName(const char* szInfoName)
{
m_szInfoName = strdup(szInfoName);
m_ArticleWriter.SetInfoName(m_szInfoName);
}
/*
* How server management (for one particular article) works:
- there is a list of failed servers which is initially empty;
- level is initially 0;
<loop>
- request a connection from server pool for current level;
Exception: this step is skipped for the very first download attempt, because a
level-0 connection is initially passed from queue manager;
- try to download from server;
- if connection to server cannot be established or download fails due to interrupted connection,
try again (as many times as needed without limit) the same server until connection is OK;
- if download fails with error "Not-Found" (article or group not found) or with CRC error,
add the server to failed server list;
- if download fails with general failure error (article incomplete, other unknown error
codes), try the same server again as many times as defined by option <Retries>; if all attempts
fail, add the server to failed server list;
- if all servers from current level were tried, increase level;
- if all servers from all levels were tried, break the loop with failure status.
<end-loop>
*/
void ArticleDownloader::Run()
{
debug("Entering ArticleDownloader-loop");
SetStatus(adRunning);
m_ArticleWriter.SetFileInfo(m_pFileInfo);
m_ArticleWriter.SetArticleInfo(m_pArticleInfo);
m_ArticleWriter.Prepare();
EStatus Status = adFailed;
int iRetries = g_pOptions->GetRetries() > 0 ? g_pOptions->GetRetries() : 1;
int iRemainedRetries = iRetries;
Servers failedServers;
failedServers.reserve(g_pServerPool->GetServers()->size());
NewsServer* pWantServer = NULL;
NewsServer* pLastServer = NULL;
int iLevel = 0;
int iServerConfigGeneration = g_pServerPool->GetGeneration();
bool bForce = m_pFileInfo->GetNZBInfo()->GetForcePriority();
while (!IsStopped())
{
Status = adFailed;
SetStatus(adWaiting);
while (!m_pConnection && !(IsStopped() || iServerConfigGeneration != g_pServerPool->GetGeneration()))
{
m_pConnection = g_pServerPool->GetConnection(iLevel, pWantServer, &failedServers);
usleep(5 * 1000);
}
SetLastUpdateTimeNow();
SetStatus(adRunning);
if (IsStopped() || (g_pOptions->GetPauseDownload() && !bForce) ||
(g_pOptions->GetTempPauseDownload() && !m_pFileInfo->GetExtraPriority()) ||
iServerConfigGeneration != g_pServerPool->GetGeneration())
{
Status = adRetry;
break;
}
pLastServer = m_pConnection->GetNewsServer();
m_pConnection->SetSuppressErrors(false);
snprintf(m_szConnectionName, sizeof(m_szConnectionName), "%s (%s)",
m_pConnection->GetNewsServer()->GetName(), m_pConnection->GetHost());
m_szConnectionName[sizeof(m_szConnectionName) - 1] = '\0';
// test connection
bool bConnected = m_pConnection && m_pConnection->Connect();
if (bConnected && !IsStopped())
{
NewsServer* pNewsServer = m_pConnection->GetNewsServer();
detail("Downloading %s @ %s", m_szInfoName, m_szConnectionName);
Status = Download();
if (Status == adFinished || Status == adFailed || Status == adNotFound || Status == adCrcError)
{
m_ServerStats.StatOp(pNewsServer->GetID(), Status == adFinished ? 1 : 0, Status == adFinished ? 0 : 1, ServerStatList::soSet);
}
}
if (bConnected)
{
if (Status == adConnectError)
{
m_pConnection->Disconnect();
bConnected = false;
Status = adFailed;
}
else
{
// freeing connection allows other threads to start.
// we doing this only if the problem was with article or group.
// if the problem occurs by connecting or authorization we do not
// free the connection, to prevent starting of thousands of threads
// (cause each of them will also free it's connection after the
// same connect-error).
FreeConnection(Status == adFinished || Status == adNotFound);
}
}
if (m_pConnection)
{
AddServerData();
}
if (Status == adFinished || Status == adFatalError)
{
break;
}
pWantServer = NULL;
if (bConnected && Status == adFailed)
{
iRemainedRetries--;
}
if (!bConnected || (Status == adFailed && iRemainedRetries > 0))
{
pWantServer = pLastServer;
}
if (pWantServer &&
!(IsStopped() || (g_pOptions->GetPauseDownload() && !bForce) ||
(g_pOptions->GetTempPauseDownload() && !m_pFileInfo->GetExtraPriority()) ||
iServerConfigGeneration != g_pServerPool->GetGeneration()))
{
detail("Waiting %i sec to retry", g_pOptions->GetRetryInterval());
SetStatus(adWaiting);
int msec = 0;
while (!(IsStopped() || (g_pOptions->GetPauseDownload() && !bForce) ||
(g_pOptions->GetTempPauseDownload() && !m_pFileInfo->GetExtraPriority()) ||
iServerConfigGeneration != g_pServerPool->GetGeneration()) &&
msec < g_pOptions->GetRetryInterval() * 1000)
{
usleep(100 * 1000);
msec += 100;
}
SetLastUpdateTimeNow();
SetStatus(adRunning);
}
if (IsStopped() || (g_pOptions->GetPauseDownload() && !bForce) ||
(g_pOptions->GetTempPauseDownload() && !m_pFileInfo->GetExtraPriority()) ||
iServerConfigGeneration != g_pServerPool->GetGeneration())
{
Status = adRetry;
break;
}
if (!pWantServer)
{
failedServers.push_back(pLastServer);
// if all servers from current level were tried, increase level
// if all servers from all levels were tried, break the loop with failure status
bool bAllServersOnLevelFailed = true;
for (Servers::iterator it = g_pServerPool->GetServers()->begin(); it != g_pServerPool->GetServers()->end(); it++)
{
NewsServer* pCandidateServer = *it;
if (pCandidateServer->GetNormLevel() == iLevel)
{
bool bServerFailed = !pCandidateServer->GetActive() || pCandidateServer->GetMaxConnections() == 0;
if (!bServerFailed)
{
for (Servers::iterator it = failedServers.begin(); it != failedServers.end(); it++)
{
NewsServer* pIgnoreServer = *it;
if (pIgnoreServer == pCandidateServer ||
(pIgnoreServer->GetGroup() > 0 && pIgnoreServer->GetGroup() == pCandidateServer->GetGroup() &&
pIgnoreServer->GetNormLevel() == pCandidateServer->GetNormLevel()))
{
bServerFailed = true;
break;
}
}
}
if (!bServerFailed)
{
bAllServersOnLevelFailed = false;
break;
}
}
}
if (bAllServersOnLevelFailed)
{
if (iLevel < g_pServerPool->GetMaxNormLevel())
{
detail("Article %s @ all level %i servers failed, increasing level", m_szInfoName, iLevel);
iLevel++;
}
else
{
detail("Article %s @ all servers failed", m_szInfoName);
Status = adFailed;
break;
}
}
iRemainedRetries = iRetries;
}
}
FreeConnection(Status == adFinished);
if (m_ArticleWriter.GetDuplicate())
{
Status = adFinished;
}
if (Status != adFinished && Status != adRetry)
{
Status = adFailed;
}
if (IsStopped())
{
detail("Download %s cancelled", m_szInfoName);
Status = adRetry;
}
if (Status == adFailed)
{
detail("Download %s failed", m_szInfoName);
}
SetStatus(Status);
Notify(NULL);
debug("Exiting ArticleDownloader-loop");
}
ArticleDownloader::EStatus ArticleDownloader::Download()
{
const char* szResponse = NULL;
EStatus Status = adRunning;
m_bWritingStarted = false;
m_pArticleInfo->SetCrc(0);
if (m_pConnection->GetNewsServer()->GetJoinGroup())
{
// change group
for (FileInfo::Groups::iterator it = m_pFileInfo->GetGroups()->begin(); it != m_pFileInfo->GetGroups()->end(); it++)
{
szResponse = m_pConnection->JoinGroup(*it);
if (szResponse && !strncmp(szResponse, "2", 1))
{
break;
}
}
Status = CheckResponse(szResponse, "could not join group");
if (Status != adFinished)
{
return Status;
}
}
// retrieve article
char tmp[1024];
snprintf(tmp, 1024, "ARTICLE %s\r\n", m_pArticleInfo->GetMessageID());
tmp[1024-1] = '\0';
for (int retry = 3; retry > 0; retry--)
{
szResponse = m_pConnection->Request(tmp);
if ((szResponse && !strncmp(szResponse, "2", 1)) || m_pConnection->GetAuthError())
{
break;
}
}
Status = CheckResponse(szResponse, "could not fetch article");
if (Status != adFinished)
{
return Status;
}
if (g_pOptions->GetDecode())
{
m_YDecoder.Clear();
m_YDecoder.SetCrcCheck(g_pOptions->GetCrcCheck());
m_UDecoder.Clear();
}
bool bBody = false;
bool bEnd = false;
const int LineBufSize = 1024*10;
char* szLineBuf = (char*)malloc(LineBufSize);
Status = adRunning;
while (!IsStopped())
{
time_t tOldTime = m_tLastUpdateTime;
SetLastUpdateTimeNow();
if (tOldTime != m_tLastUpdateTime)
{
AddServerData();
}
// Throttle the bandwidth
while (!IsStopped() && (g_pOptions->GetDownloadRate() > 0.0f) &&
(g_pStatMeter->CalcCurrentDownloadSpeed() > g_pOptions->GetDownloadRate()))
{
SetLastUpdateTimeNow();
usleep(10 * 1000);
}
int iLen = 0;
char* line = m_pConnection->ReadLine(szLineBuf, LineBufSize, &iLen);
g_pStatMeter->AddSpeedReading(iLen);
if (g_pOptions->GetAccurateRate())
{
AddServerData();
}
// Have we encountered a timeout?
if (!line)
{
if (!IsStopped())
{
detail("Article %s @ %s failed: Unexpected end of article", m_szInfoName, m_szConnectionName);
}
Status = adFailed;
break;
}
//detect end of article
if (!strcmp(line, ".\r\n") || !strcmp(line, ".\n"))
{
bEnd = true;
break;
}
//detect lines starting with "." (marked as "..")
if (!strncmp(line, "..", 2))
{
line++;
iLen--;
}
if (!bBody)
{
// detect body of article
if (*line == '\r' || *line == '\n')
{
bBody = true;
}
// check id of returned article
else if (!strncmp(line, "Message-ID: ", 12))
{
char* p = line + 12;
if (strncmp(p, m_pArticleInfo->GetMessageID(), strlen(m_pArticleInfo->GetMessageID())))
{
if (char* e = strrchr(p, '\r')) *e = '\0'; // remove trailing CR-character
detail("Article %s @ %s failed: Wrong message-id, expected %s, returned %s", m_szInfoName,
m_szConnectionName, m_pArticleInfo->GetMessageID(), p);
Status = adFailed;
break;
}
}
}
else if (m_eFormat == Decoder::efUnknown && g_pOptions->GetDecode())
{
m_eFormat = Decoder::DetectFormat(line, iLen);
}
// write to output file
if (((bBody && m_eFormat != Decoder::efUnknown) || !g_pOptions->GetDecode()) && !Write(line, iLen))
{
Status = adFatalError;
break;
}
}
free(szLineBuf);
if (!bEnd && Status == adRunning && !IsStopped())
{
detail("Article %s @ %s failed: article incomplete", m_szInfoName, m_szConnectionName);
Status = adFailed;
}
if (IsStopped())
{
Status = adFailed;
}
if (Status == adRunning)
{
FreeConnection(true);
Status = DecodeCheck();
}
if (m_bWritingStarted)
{
m_ArticleWriter.Finish(Status == adFinished);
}
if (Status == adFinished)
{
detail("Successfully downloaded %s", m_szInfoName);
}
return Status;
}
ArticleDownloader::EStatus ArticleDownloader::CheckResponse(const char* szResponse, const char* szComment)
{
if (!szResponse)
{
if (!IsStopped())
{
detail("Article %s @ %s failed, %s: Connection closed by remote host",
m_szInfoName, m_szConnectionName, szComment);
}
return adConnectError;
}
else if (m_pConnection->GetAuthError() || !strncmp(szResponse, "400", 3) || !strncmp(szResponse, "499", 3))
{
detail("Article %s @ %s failed, %s: %s", m_szInfoName, m_szConnectionName, szComment, szResponse);
return adConnectError;
}
else if (!strncmp(szResponse, "41", 2) || !strncmp(szResponse, "42", 2) || !strncmp(szResponse, "43", 2))
{
detail("Article %s @ %s failed, %s: %s", m_szInfoName, m_szConnectionName, szComment, szResponse);
return adNotFound;
}
else if (!strncmp(szResponse, "2", 1))
{
// OK
return adFinished;
}
else
{
// unknown error, no special handling
detail("Article %s @ %s failed, %s: %s", m_szInfoName, m_szConnectionName, szComment, szResponse);
return adFailed;
}
}
bool ArticleDownloader::Write(char* szLine, int iLen)
{
const char* szArticleFilename = NULL;
long long iArticleFileSize = 0;
long long iArticleOffset = 0;
int iArticleSize = 0;
if (g_pOptions->GetDecode())
{
if (m_eFormat == Decoder::efYenc)
{
iLen = m_YDecoder.DecodeBuffer(szLine, iLen);
szArticleFilename = m_YDecoder.GetArticleFilename();
iArticleFileSize = m_YDecoder.GetSize();
}
else if (m_eFormat == Decoder::efUx)
{
iLen = m_UDecoder.DecodeBuffer(szLine, iLen);
szArticleFilename = m_UDecoder.GetArticleFilename();
}
else
{
detail("Decoding %s failed: unsupported encoding", m_szInfoName);
return false;
}
if (iLen > 0 && m_eFormat == Decoder::efYenc)
{
if (m_YDecoder.GetBegin() == 0 || m_YDecoder.GetEnd() == 0)
{
return false;
}
iArticleOffset = m_YDecoder.GetBegin() - 1;
iArticleSize = (int)(m_YDecoder.GetEnd() - m_YDecoder.GetBegin() + 1);
}
}
if (!m_bWritingStarted && iLen > 0)
{
if (!m_ArticleWriter.Start(m_eFormat, szArticleFilename, iArticleFileSize, iArticleOffset, iArticleSize))
{
return false;
}
m_bWritingStarted = true;
}
bool bOK = iLen == 0 || m_ArticleWriter.Write(szLine, iLen);
return bOK;
}
ArticleDownloader::EStatus ArticleDownloader::DecodeCheck()
{
if (g_pOptions->GetDecode())
{
Decoder* pDecoder = NULL;
if (m_eFormat == Decoder::efYenc)
{
pDecoder = &m_YDecoder;
}
else if (m_eFormat == Decoder::efUx)
{
pDecoder = &m_UDecoder;
}
else
{
detail("Decoding %s failed: no binary data or unsupported encoding format", m_szInfoName);
return adFailed;
}
Decoder::EStatus eStatus = pDecoder->Check();
if (eStatus == Decoder::eFinished)
{
if (pDecoder->GetArticleFilename())
{
free(m_szArticleFilename);
m_szArticleFilename = strdup(pDecoder->GetArticleFilename());
}
if (m_eFormat == Decoder::efYenc)
{
m_pArticleInfo->SetCrc(g_pOptions->GetCrcCheck() ?
m_YDecoder.GetCalculatedCrc() : m_YDecoder.GetExpectedCrc());
}
return adFinished;
}
else if (eStatus == Decoder::eCrcError)
{
detail("Decoding %s failed: CRC-Error", m_szInfoName);
return adCrcError;
}
else if (eStatus == Decoder::eArticleIncomplete)
{
detail("Decoding %s failed: article incomplete", m_szInfoName);
return adFailed;
}
else if (eStatus == Decoder::eInvalidSize)
{
detail("Decoding %s failed: size mismatch", m_szInfoName);
return adFailed;
}
else if (eStatus == Decoder::eNoBinaryData)
{
detail("Decoding %s failed: no binary data found", m_szInfoName);
return adFailed;
}
else
{
detail("Decoding %s failed", m_szInfoName);
return adFailed;
}
}
else
{
return adFinished;
}
}
void ArticleDownloader::LogDebugInfo()
{
char szTime[50];
#ifdef HAVE_CTIME_R_3
ctime_r(&m_tLastUpdateTime, szTime, 50);
#else
ctime_r(&m_tLastUpdateTime, szTime);
#endif
info(" Download: Status=%i, LastUpdateTime=%s, InfoName=%s", m_eStatus, szTime, m_szInfoName);
}
void ArticleDownloader::Stop()
{
debug("Trying to stop ArticleDownloader");
Thread::Stop();
m_mutexConnection.Lock();
if (m_pConnection)
{
m_pConnection->SetSuppressErrors(true);
m_pConnection->Cancel();
}
m_mutexConnection.Unlock();
debug("ArticleDownloader stopped successfully");
}
bool ArticleDownloader::Terminate()
{
NNTPConnection* pConnection = m_pConnection;
bool terminated = Kill();
if (terminated && pConnection)
{
debug("Terminating connection");
pConnection->SetSuppressErrors(true);
pConnection->Cancel();
pConnection->Disconnect();
g_pStatMeter->AddServerData(pConnection->FetchTotalBytesRead(), pConnection->GetNewsServer()->GetID());
g_pServerPool->FreeConnection(pConnection, true);
}
return terminated;
}
void ArticleDownloader::FreeConnection(bool bKeepConnected)
{
if (m_pConnection)
{
debug("Releasing connection");
m_mutexConnection.Lock();
if (!bKeepConnected || m_pConnection->GetStatus() == Connection::csCancelled)
{
m_pConnection->Disconnect();
}
AddServerData();
g_pServerPool->FreeConnection(m_pConnection, true);
m_pConnection = NULL;
m_mutexConnection.Unlock();
}
}
void ArticleDownloader::AddServerData()
{
int iBytesRead = m_pConnection->FetchTotalBytesRead();
g_pStatMeter->AddServerData(iBytesRead, m_pConnection->GetNewsServer()->GetID());
m_iDownloadedSize += iBytesRead;
}

View File

@@ -2,7 +2,7 @@
* This file is part of nzbget
*
* Copyright (C) 2004 Sven Henkel <sidddy@users.sourceforge.net>
* Copyright (C) 2007-2013 Andrey Prygunkov <hugbug@users.sourceforge.net>
* Copyright (C) 2007-2014 Andrey Prygunkov <hugbug@users.sourceforge.net>
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
@@ -34,6 +34,7 @@
#include "Thread.h"
#include "NNTPConnection.h"
#include "Decoder.h"
#include "ArticleWriter.h"
class ArticleDownloader : public Thread, public Subject
{
@@ -47,13 +48,20 @@ public:
adFailed,
adRetry,
adCrcError,
adDecoding,
adJoining,
adJoined,
adNotFound,
adConnectError,
adFatalError
};
class ArticleWriterImpl : public ArticleWriter
{
private:
ArticleDownloader* m_pOwner;
protected:
virtual void SetLastUpdateTimeNow() { m_pOwner->SetLastUpdateTimeNow(); }
public:
void SetOwner(ArticleDownloader* pOwner) { m_pOwner = pOwner; }
};
private:
FileInfo* m_pFileInfo;
@@ -61,35 +69,29 @@ private:
NNTPConnection* m_pConnection;
EStatus m_eStatus;
Mutex m_mutexConnection;
const char* m_szResultFilename;
char* m_szTempFilename;
char* m_szArticleFilename;
char* m_szInfoName;
char* m_szOutputFilename;
char m_szConnectionName[250];
char* m_szArticleFilename;
time_t m_tLastUpdateTime;
Decoder::EFormat m_eFormat;
YDecoder m_YDecoder;
UDecoder m_UDecoder;
FILE* m_pOutFile;
bool m_bDuplicate;
ArticleWriterImpl m_ArticleWriter;
ServerStatList m_ServerStats;
bool m_bWritingStarted;
int m_iDownloadedSize;
EStatus Download();
bool Write(char* szLine, int iLen);
bool PrepareFile(char* szLine);
bool CreateOutputFile(int iSize);
void BuildOutputFilename();
EStatus DecodeCheck();
void FreeConnection(bool bKeepConnected);
EStatus CheckResponse(const char* szResponse, const char* szComment);
void SetStatus(EStatus eStatus) { m_eStatus = eStatus; }
const char* GetTempFilename() { return m_szTempFilename; }
void SetTempFilename(const char* v);
void SetOutputFilename(const char* v);
bool Write(char* szLine, int iLen);
void AddServerData();
public:
ArticleDownloader();
~ArticleDownloader();
virtual ~ArticleDownloader();
void SetFileInfo(FileInfo* pFileInfo) { m_pFileInfo = pFileInfo; }
FileInfo* GetFileInfo() { return m_pFileInfo; }
void SetArticleInfo(ArticleInfo* pArticleInfo) { m_pArticleInfo = pArticleInfo; }
@@ -102,21 +104,14 @@ public:
time_t GetLastUpdateTime() { return m_tLastUpdateTime; }
void SetLastUpdateTimeNow() { m_tLastUpdateTime = ::time(NULL); }
const char* GetArticleFilename() { return m_szArticleFilename; }
void SetInfoName(const char* v);
void SetInfoName(const char* szInfoName);
const char* GetInfoName() { return m_szInfoName; }
void CompleteFileParts();
static bool MoveCompletedFiles(NZBInfo* pNZBInfo, const char* szOldDestDir);
const char* GetConnectionName() { return m_szConnectionName; }
void SetConnection(NNTPConnection* pConnection) { m_pConnection = pConnection; }
void CompleteFileParts() { m_ArticleWriter.CompleteFileParts(); }
int GetDownloadedSize() { return m_iDownloadedSize; }
void LogDebugInfo();
};
class DownloadSpeedMeter
{
public:
virtual ~DownloadSpeedMeter() {};
virtual int CalcCurrentDownloadSpeed() = 0;
virtual void AddSpeedReading(int iBytes) = 0;
};
#endif

View File

File diff suppressed because it is too large Load Diff

102
daemon/nntp/ArticleWriter.h Normal file
View File

@@ -0,0 +1,102 @@
/*
* This file is part of nzbget
*
* Copyright (C) 2014 Andrey Prygunkov <hugbug@users.sourceforge.net>
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*
* $Revision$
* $Date$
*
*/
#ifndef ARTICLEWRITER_H
#define ARTICLEWRITER_H
#include "DownloadInfo.h"
#include "Decoder.h"
class ArticleWriter
{
private:
FileInfo* m_pFileInfo;
ArticleInfo* m_pArticleInfo;
FILE* m_pOutFile;
char* m_szTempFilename;
char* m_szOutputFilename;
const char* m_szResultFilename;
Decoder::EFormat m_eFormat;
char* m_pArticleData;
long long m_iArticleOffset;
int m_iArticleSize;
int m_iArticlePtr;
bool m_bFlushing;
bool m_bDuplicate;
char* m_szInfoName;
bool PrepareFile(char* szLine);
bool CreateOutputFile(long long iSize);
void BuildOutputFilename();
bool IsFileCached();
void SetWriteBuffer(FILE* pOutFile, int iRecSize);
protected:
virtual void SetLastUpdateTimeNow() {}
public:
ArticleWriter();
~ArticleWriter();
void SetInfoName(const char* szInfoName);
void SetFileInfo(FileInfo* pFileInfo) { m_pFileInfo = pFileInfo; }
void SetArticleInfo(ArticleInfo* pArticleInfo) { m_pArticleInfo = pArticleInfo; }
void Prepare();
bool Start(Decoder::EFormat eFormat, const char* szFilename, long long iFileSize, long long iArticleOffset, int iArticleSize);
bool Write(char* szBufffer, int iLen);
void Finish(bool bSuccess);
bool GetDuplicate() { return m_bDuplicate; }
void CompleteFileParts();
static bool MoveCompletedFiles(NZBInfo* pNZBInfo, const char* szOldDestDir);
void FlushCache();
};
class ArticleCache : public Thread
{
private:
size_t m_iAllocated;
bool m_bFlushing;
Mutex m_mutexAlloc;
Mutex m_mutexFlush;
Mutex m_mutexContent;
FileInfo* m_pFileInfo;
bool CheckFlush(bool bFlushEverything);
public:
ArticleCache();
virtual void Run();
void* Alloc(int iSize);
void* Realloc(void* buf, int iOldSize, int iNewSize);
void Free(int iSize);
void LockFlush();
void UnlockFlush();
void LockContent() { m_mutexContent.Lock(); }
void UnlockContent() { m_mutexContent.Unlock(); }
bool GetFlushing() { return m_bFlushing; }
size_t GetAllocated() { return m_iAllocated; }
bool FileBusy(FileInfo* pFileInfo) { return pFileInfo == m_pFileInfo; }
};
#endif

View File

@@ -1,8 +1,7 @@
/*
* This file is part of nzbget
*
* Copyright (C) 2004 Sven Henkel <sidddy@users.sourceforge.net>
* Copyright (C) 2007 Andrey Prygunkov <hugbug@users.sourceforge.net>
* Copyright (C) 2007-2014 Andrey Prygunkov <hugbug@users.sourceforge.net>
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
@@ -45,14 +44,11 @@
#include "Util.h"
const char* Decoder::FormatNames[] = { "Unknown", "yEnc", "UU" };
unsigned int YDecoder::crc_tab[256];
Decoder::Decoder()
{
debug("Creating Decoder");
m_szSrcFilename = NULL;
m_szDestFilename = NULL;
m_szArticleFilename = NULL;
}
@@ -107,17 +103,6 @@ Decoder::EFormat Decoder::DetectFormat(const char* buffer, int len)
* YDecoder: fast implementation of yEnc-Decoder
*/
void YDecoder::Init()
{
debug("Initializing global decoder");
crc32gentab();
}
void YDecoder::Final()
{
debug("Finalizing global Decoder");
}
YDecoder::YDecoder()
{
Clear();
@@ -135,69 +120,13 @@ void YDecoder::Clear()
m_lExpectedCRC = 0;
m_lCalculatedCRC = 0xFFFFFFFF;
m_iBegin = 0;
m_iEnd = 0xFFFFFFFF;
m_iEnd = 0;
m_iSize = 0;
m_iEndSize = 0;
m_bAutoSeek = false;
m_bNeedSetPos = false;
m_bCrcCheck = false;
}
/* from crc32.c (http://www.koders.com/c/fid699AFE0A656F0022C9D6B9D1743E697B69CE5815.aspx)
*
* (c) 1999,2000 Krzysztof Dabrowski
* (c) 1999,2000 ElysiuM deeZine
* Released under GPL (thanks)
*
* chksum_crc32gentab() -- to a global crc_tab[256], this one will
* calculate the crcTable for crc32-checksums.
* it is generated to the polynom [..]
*/
void YDecoder::crc32gentab()
{
unsigned long crc, poly;
int i, j;
poly = 0xEDB88320L;
for (i = 0; i < 256; i++)
{
crc = i;
for (j = 8; j > 0; j--)
{
if (crc & 1)
{
crc = (crc >> 1) ^ poly;
}
else
{
crc >>= 1;
}
}
crc_tab[i] = crc;
}
}
/* This is modified version of chksum_crc() from
* crc32.c (http://www.koders.com/c/fid699AFE0A656F0022C9D6B9D1743E697B69CE5815.aspx)
* (c) 1999,2000 Krzysztof Dabrowski
* (c) 1999,2000 ElysiuM deeZine
*
* chksum_crc() -- to a given block, this one calculates the
* crc32-checksum until the length is
* reached. the crc32-checksum will be
* the result.
*/
unsigned long YDecoder::crc32m(unsigned long startCrc, unsigned char *block, unsigned int length)
{
register unsigned long crc = startCrc;
for (unsigned long i = 0; i < length; i++)
{
crc = ((crc >> 8) & 0x00FFFFFF) ^ crc_tab[(crc ^ *block++) & 0xFF];
}
return crc;
}
unsigned int YDecoder::DecodeBuffer(char* buffer)
int YDecoder::DecodeBuffer(char* buffer, int len)
{
if (m_bBody && !m_bEnd)
{
@@ -215,7 +144,7 @@ unsigned int YDecoder::DecodeBuffer(char* buffer)
if (pb)
{
pb += 6; //=strlen(" size=")
m_iEndSize = (int)atoi(pb);
m_iEndSize = (long long)atoll(pb);
}
return 0;
}
@@ -247,9 +176,9 @@ BreakLoop:
if (m_bCrcCheck)
{
m_lCalculatedCRC = crc32m(m_lCalculatedCRC, (unsigned char *)buffer, (unsigned int)(optr - buffer));
m_lCalculatedCRC = Util::Crc32m(m_lCalculatedCRC, (unsigned char *)buffer, (unsigned int)(optr - buffer));
}
return (unsigned int)(optr - buffer);
return optr - buffer;
}
else
{
@@ -271,7 +200,7 @@ BreakLoop:
if (pb)
{
pb += 6; //=strlen(" size=")
m_iSize = (int)atoi(pb);
m_iSize = (long long)atoll(pb);
}
m_bPart = strstr(buffer, " part=");
if (!m_bPart)
@@ -289,13 +218,13 @@ BreakLoop:
if (pb)
{
pb += 7; //=strlen(" begin=")
m_iBegin = (int)atoi(pb);
m_iBegin = (long long)atoll(pb);
}
pb = strstr(buffer, " end=");
if (pb)
{
pb += 5; //=strlen(" end=")
m_iEnd = (int)atoi(pb);
m_iEnd = (long long)atoll(pb);
}
}
}
@@ -303,28 +232,6 @@ BreakLoop:
return 0;
}
bool YDecoder::Write(char* buffer, int len, FILE* outfile)
{
unsigned int wcnt = DecodeBuffer(buffer);
if (wcnt > 0)
{
if (m_bNeedSetPos)
{
if (m_iBegin == 0 || m_iEnd == 0xFFFFFFFF || !outfile)
{
return false;
}
if (fseek(outfile, m_iBegin - 1, SEEK_SET))
{
return false;
}
m_bNeedSetPos = false;
}
fwrite(buffer, 1, wcnt, outfile);
}
return true;
}
Decoder::EStatus YDecoder::Check()
{
m_lCalculatedCRC ^= 0xFFFFFFFF;
@@ -379,7 +286,7 @@ void UDecoder::Clear()
#define UU_DECODE_CHAR(c) (c == '`' ? 0 : (((c) - ' ') & 077))
unsigned int UDecoder::DecodeBuffer(char* buffer, int len)
int UDecoder::DecodeBuffer(char* buffer, int len)
{
if (!m_bBody)
{
@@ -446,22 +353,12 @@ unsigned int UDecoder::DecodeBuffer(char* buffer, int len)
}
}
return (unsigned int)(optr - buffer);
return optr - buffer;
}
return 0;
}
bool UDecoder::Write(char* buffer, int len, FILE* outfile)
{
unsigned int wcnt = DecodeBuffer(buffer, len);
if (wcnt > 0)
{
fwrite(buffer, 1, wcnt, outfile);
}
return true;
}
Decoder::EStatus UDecoder::Check()
{
if (!m_bBody)

View File

@@ -1,8 +1,7 @@
/*
* This file is part of nzbget
*
* Copyright (C) 2004 Sven Henkel <sidddy@users.sourceforge.net>
* Copyright (C) 2007-2008 Andrey Prygunkov <hugbug@users.sourceforge.net>
* Copyright (C) 2007-2014 Andrey Prygunkov <hugbug@users.sourceforge.net>
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
@@ -50,8 +49,6 @@ public:
static const char* FormatNames[];
protected:
const char* m_szSrcFilename;
const char* m_szDestFilename;
char* m_szArticleFilename;
public:
@@ -59,9 +56,7 @@ public:
virtual ~Decoder();
virtual EStatus Check() = 0;
virtual void Clear();
virtual bool Write(char* buffer, int len, FILE* outfile) = 0;
void SetSrcFilename(const char* szSrcFilename) { m_szSrcFilename = szSrcFilename; }
void SetDestFilename(const char* szDestFilename) { m_szDestFilename = szDestFilename; }
virtual int DecodeBuffer(char* buffer, int len) = 0;
const char* GetArticleFilename() { return m_szArticleFilename; }
static EFormat DetectFormat(const char* buffer, int len);
};
@@ -69,7 +64,6 @@ public:
class YDecoder: public Decoder
{
protected:
static unsigned int crc_tab[256];
bool m_bBegin;
bool m_bPart;
bool m_bBody;
@@ -77,28 +71,23 @@ protected:
bool m_bCrc;
unsigned long m_lExpectedCRC;
unsigned long m_lCalculatedCRC;
unsigned long m_iBegin;
unsigned long m_iEnd;
unsigned long m_iSize;
unsigned long m_iEndSize;
bool m_bAutoSeek;
bool m_bNeedSetPos;
long long m_iBegin;
long long m_iEnd;
long long m_iSize;
long long m_iEndSize;
bool m_bCrcCheck;
unsigned int DecodeBuffer(char* buffer);
static void crc32gentab();
unsigned long crc32m(unsigned long startCrc, unsigned char *block, unsigned int length);
public:
YDecoder();
virtual EStatus Check();
virtual void Clear();
virtual bool Write(char* buffer, int len, FILE* outfile);
void SetAutoSeek(bool bAutoSeek) { m_bAutoSeek = m_bNeedSetPos = bAutoSeek; }
virtual int DecodeBuffer(char* buffer, int len);
void SetCrcCheck(bool bCrcCheck) { m_bCrcCheck = bCrcCheck; }
static void Init();
static void Final();
long long GetBegin() { return m_iBegin; }
long long GetEnd() { return m_iEnd; }
long long GetSize() { return m_iSize; }
unsigned long GetExpectedCrc() { return m_lExpectedCRC; }
unsigned long GetCalculatedCrc() { return m_lCalculatedCRC; }
};
class UDecoder: public Decoder
@@ -107,13 +96,11 @@ private:
bool m_bBody;
bool m_bEnd;
unsigned int DecodeBuffer(char* buffer, int len);
public:
UDecoder();
virtual EStatus Check();
virtual void Clear();
virtual bool Write(char* buffer, int len, FILE* outfile);
virtual int DecodeBuffer(char* buffer, int len);
};
#endif

View File

View File

@@ -2,7 +2,7 @@
* This file is part of nzbget
*
* Copyright (C) 2004 Sven Henkel <sidddy@users.sourceforge.net>
* Copyright (C) 2007-2013 Andrey Prygunkov <hugbug@users.sourceforge.net>
* Copyright (C) 2007-2014 Andrey Prygunkov <hugbug@users.sourceforge.net>
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
@@ -42,7 +42,6 @@
#include "nzbget.h"
#include "ServerPool.h"
#include "Log.h"
static const int CONNECTION_HOLD_SECODNS = 5;
@@ -59,12 +58,16 @@ ServerPool::ServerPool()
m_iMaxNormLevel = 0;
m_iTimeout = 60;
m_iGeneration = 0;
g_pLog->RegisterDebuggable(this);
}
ServerPool::~ ServerPool()
{
debug("Destroying ServerPool");
g_pLog->UnregisterDebuggable(this);
m_Levels.clear();
for (Servers::iterator it = m_Servers.begin(); it != m_Servers.end(); it++)
@@ -327,34 +330,33 @@ void ServerPool::Changed()
void ServerPool::LogDebugInfo()
{
debug(" ServerPool");
debug(" ----------------");
info(" ---------- ServerPool");
debug(" Max-Level: %i", m_iMaxNormLevel);
info(" Max-Level: %i", m_iMaxNormLevel);
m_mutexConnections.Lock();
debug(" Servers: %i", m_Servers.size());
info(" Servers: %i", m_Servers.size());
for (Servers::iterator it = m_Servers.begin(); it != m_Servers.end(); it++)
{
NewsServer* pNewsServer = *it;
debug(" %i) %s (%s): Level=%i, NormLevel=%i", pNewsServer->GetID(), pNewsServer->GetName(),
info(" %i) %s (%s): Level=%i, NormLevel=%i", pNewsServer->GetID(), pNewsServer->GetName(),
pNewsServer->GetHost(), pNewsServer->GetLevel(), pNewsServer->GetNormLevel());
}
debug(" Levels: %i", m_Levels.size());
info(" Levels: %i", m_Levels.size());
int index = 0;
for (Levels::iterator it = m_Levels.begin(); it != m_Levels.end(); it++, index++)
{
int iSize = *it;
debug(" %i: Size=%i", index, iSize);
info(" %i: Free connections=%i", index, iSize);
}
debug(" Connections: %i", m_Connections.size());
info(" Connections: %i", m_Connections.size());
for (Connections::iterator it = m_Connections.begin(); it != m_Connections.end(); it++)
{
PooledConnection* pConnection = *it;
debug(" %i) %s (%s): Level=%i, NormLevel=%i, InUse:%i", pConnection->GetNewsServer()->GetID(),
info(" %i) %s (%s): Level=%i, NormLevel=%i, InUse:%i", pConnection->GetNewsServer()->GetID(),
pConnection->GetNewsServer()->GetName(), pConnection->GetNewsServer()->GetHost(),
pConnection->GetNewsServer()->GetLevel(), pConnection->GetNewsServer()->GetNormLevel(),
(int)pConnection->GetInUse());

View File

@@ -30,11 +30,12 @@
#include <vector>
#include <time.h>
#include "Log.h"
#include "Thread.h"
#include "NewsServer.h"
#include "NNTPConnection.h"
class ServerPool
class ServerPool : public Debuggable
{
private:
class PooledConnection : public NNTPConnection
@@ -65,6 +66,9 @@ private:
void NormalizeLevels();
static bool CompareServers(NewsServer* pServer1, NewsServer* pServer2);
protected:
virtual void LogDebugInfo();
public:
ServerPool();
~ServerPool();
@@ -78,8 +82,6 @@ public:
void CloseUnusedConnections();
void Changed();
int GetGeneration() { return m_iGeneration; }
void LogDebugInfo();
};
#endif

546
daemon/nntp/StatMeter.cpp Normal file
View File

@@ -0,0 +1,546 @@
/*
* This file is part of nzbget
*
* Copyright (C) 2014 Andrey Prygunkov <hugbug@users.sourceforge.net>
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*
* $Revision$
* $Date$
*
*/
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#ifdef WIN32
#include "win32.h"
#endif
#include <stdlib.h>
#include <string.h>
#include <stdio.h>
#include "nzbget.h"
#include "StatMeter.h"
#include "Options.h"
#include "ServerPool.h"
#include "DiskState.h"
extern ServerPool* g_pServerPool;
extern Options* g_pOptions;
extern DiskState* g_pDiskState;
static const int DAYS_UP_TO_2013_JAN_1 = 15706;
static const int DAYS_IN_TWENTY_YEARS = 366*20;
ServerVolume::ServerVolume()
{
m_BytesPerSeconds.resize(60);
m_BytesPerMinutes.resize(60);
m_BytesPerHours.resize(24);
m_BytesPerDays.resize(0);
m_iFirstDay = 0;
m_tDataTime = 0;
m_lTotalBytes = 0;
m_lCustomBytes = 0;
m_tCustomTime = time(NULL);
m_iSecSlot = 0;
m_iMinSlot = 0;
m_iHourSlot = 0;
m_iDaySlot = 0;
}
void ServerVolume::CalcSlots(time_t tLocCurTime)
{
m_iSecSlot = (int)tLocCurTime % 60;
m_iMinSlot = ((int)tLocCurTime / 60) % 60;
m_iHourSlot = ((int)tLocCurTime % 86400) / 3600;
int iDaysSince1970 = (int)tLocCurTime / 86400;
m_iDaySlot = iDaysSince1970 - DAYS_UP_TO_2013_JAN_1 + 1;
if (0 <= m_iDaySlot && m_iDaySlot < DAYS_IN_TWENTY_YEARS)
{
int iCurDay = iDaysSince1970;
if (m_iFirstDay == 0 || m_iFirstDay > iCurDay)
{
m_iFirstDay = iCurDay;
}
m_iDaySlot = iCurDay - m_iFirstDay;
if (m_iDaySlot + 1 > (int)m_BytesPerDays.size())
{
m_BytesPerDays.resize(m_iDaySlot + 1);
}
}
else
{
m_iDaySlot = -1;
}
}
void ServerVolume::AddData(int iBytes)
{
time_t tCurTime = time(NULL);
time_t tLocCurTime = tCurTime + g_pOptions->GetLocalTimeOffset();
time_t tLocDataTime = m_tDataTime + g_pOptions->GetLocalTimeOffset();
int iLastMinSlot = m_iMinSlot;
int iLastHourSlot = m_iHourSlot;
CalcSlots(tLocCurTime);
if (tLocCurTime != tLocDataTime)
{
// clear seconds/minutes/hours slots if necessary
// also handle the backwards changes of system clock
int iTotalDelta = (int)(tLocCurTime - tLocDataTime);
int iDeltaSign = iTotalDelta >= 0 ? 1 : -1;
iTotalDelta = abs(iTotalDelta);
int iSecDelta = iTotalDelta;
if (iDeltaSign < 0) iSecDelta++;
if (iSecDelta >= 60) iSecDelta = 60;
for (int i = 0; i < iSecDelta; i++)
{
int iNulSlot = m_iSecSlot - i * iDeltaSign;
if (iNulSlot < 0) iNulSlot += 60;
if (iNulSlot >= 60) iNulSlot -= 60;
m_BytesPerSeconds[iNulSlot] = 0;
}
int iMinDelta = iTotalDelta / 60;
if (iDeltaSign < 0) iMinDelta++;
if (abs(iMinDelta) >= 60) iMinDelta = 60;
if (iMinDelta == 0 && m_iMinSlot != iLastMinSlot) iMinDelta = 1;
for (int i = 0; i < iMinDelta; i++)
{
int iNulSlot = m_iMinSlot - i * iDeltaSign;
if (iNulSlot < 0) iNulSlot += 60;
if (iNulSlot >= 60) iNulSlot -= 60;
m_BytesPerMinutes[iNulSlot] = 0;
}
int iHourDelta = iTotalDelta / (60 * 60);
if (iDeltaSign < 0) iHourDelta++;
if (iHourDelta >= 24) iHourDelta = 24;
if (iHourDelta == 0 && m_iHourSlot != iLastHourSlot) iHourDelta = 1;
for (int i = 0; i < iHourDelta; i++)
{
int iNulSlot = m_iHourSlot - i * iDeltaSign;
if (iNulSlot < 0) iNulSlot += 24;
if (iNulSlot >= 24) iNulSlot -= 24;
m_BytesPerHours[iNulSlot] = 0;
}
}
// add bytes to every slot
m_BytesPerSeconds[m_iSecSlot] += iBytes;
m_BytesPerMinutes[m_iMinSlot] += iBytes;
m_BytesPerHours[m_iHourSlot] += iBytes;
if (m_iDaySlot >= 0)
{
m_BytesPerDays[m_iDaySlot] += iBytes;
}
m_lTotalBytes += iBytes;
m_lCustomBytes += iBytes;
m_tDataTime = tCurTime;
}
void ServerVolume::ResetCustom()
{
m_lCustomBytes = 0;
m_tCustomTime = time(NULL);
}
void ServerVolume::LogDebugInfo()
{
info(" ---------- ServerVolume");
char szSec[4000];
szSec[0] = '\0';
for (int i = 0; i < 60; i++)
{
char szNum[20];
snprintf(szNum, 20, "[%i]=%lli ", i, m_BytesPerSeconds[i]);
strncat(szSec, szNum, 4000);
}
info("Secs: %s", szSec);
szSec[0] = '\0';
for (int i = 0; i < 60; i++)
{
char szNum[20];
snprintf(szNum, 20, "[%i]=%lli ", i, m_BytesPerMinutes[i]);
strncat(szSec, szNum, 4000);
}
info("Mins: %s", szSec);
szSec[0] = '\0';
for (int i = 0; i < 24; i++)
{
char szNum[20];
snprintf(szNum, 20, "[%i]=%lli ", i, m_BytesPerHours[i]);
strncat(szSec, szNum, 4000);
}
info("Hours: %s", szSec);
szSec[0] = '\0';
for (int i = 0; i < (int)m_BytesPerDays.size(); i++)
{
char szNum[20];
snprintf(szNum, 20, "[%i]=%lli ", m_iFirstDay + i, m_BytesPerDays[i]);
strncat(szSec, szNum, 4000);
}
info("Days: %s", szSec);
}
StatMeter::StatMeter()
{
debug("Creating StatMeter");
ResetSpeedStat();
m_iAllBytes = 0;
m_tStartDownload = 0;
m_tPausedFrom = 0;
m_bStandBy = true;
m_tStartServer = 0;
m_tLastCheck = 0;
m_tLastTimeOffset = 0;
m_bStatChanged = false;
g_pLog->RegisterDebuggable(this);
}
StatMeter::~StatMeter()
{
debug("Destroying StatMeter");
// Cleanup
g_pLog->UnregisterDebuggable(this);
for (ServerVolumes::iterator it = m_ServerVolumes.begin(); it != m_ServerVolumes.end(); it++)
{
delete *it;
}
debug("StatMeter destroyed");
}
void StatMeter::Init()
{
m_tStartServer = time(NULL);
m_tLastCheck = m_tStartServer;
AdjustTimeOffset();
m_ServerVolumes.resize(1 + g_pServerPool->GetServers()->size());
m_ServerVolumes[0] = new ServerVolume();
for (Servers::iterator it = g_pServerPool->GetServers()->begin(); it != g_pServerPool->GetServers()->end(); it++)
{
NewsServer* pServer = *it;
m_ServerVolumes[pServer->GetID()] = new ServerVolume();
}
}
void StatMeter::AdjustTimeOffset()
{
time_t tUtcTime = time(NULL);
tm tmSplittedTime;
gmtime_r(&tUtcTime, &tmSplittedTime);
tmSplittedTime.tm_isdst = -1;
time_t tLocTime = mktime(&tmSplittedTime);
time_t tLocalTimeDelta = tUtcTime - tLocTime;
g_pOptions->SetLocalTimeOffset((int)tLocalTimeDelta + g_pOptions->GetTimeCorrection());
m_tLastTimeOffset = tUtcTime;
debug("UTC delta: %i (%i+%i)", g_pOptions->GetLocalTimeOffset(), (int)tLocalTimeDelta, g_pOptions->GetTimeCorrection());
}
/*
* Called once per second.
* - detect large step changes of system time and adjust statistics;
* - save volume stats (if changed).
*/
void StatMeter::IntervalCheck()
{
time_t m_tCurTime = time(NULL);
time_t tDiff = m_tCurTime - m_tLastCheck;
if (tDiff > 60 || tDiff < 0)
{
m_tStartServer += tDiff + 1; // "1" because the method is called once per second
if (m_tStartDownload != 0 && !m_bStandBy)
{
m_tStartDownload += tDiff + 1;
}
AdjustTimeOffset();
}
else if (m_tLastTimeOffset > m_tCurTime ||
m_tCurTime - m_tLastTimeOffset > 60 * 60 * 3 ||
(m_tCurTime - m_tLastTimeOffset > 60 && !m_bStandBy))
{
// checking time zone settings may prevent the device from entering sleep/hibernate mode
// check every minute if not in standby
// check at least every 3 hours even in standby
AdjustTimeOffset();
}
m_tLastCheck = m_tCurTime;
if (m_bStatChanged)
{
Save();
}
}
void StatMeter::EnterLeaveStandBy(bool bEnter)
{
m_mutexStat.Lock();
m_bStandBy = bEnter;
if (bEnter)
{
m_tPausedFrom = time(NULL);
}
else
{
if (m_tStartDownload == 0)
{
m_tStartDownload = time(NULL);
}
else
{
m_tStartDownload += time(NULL) - m_tPausedFrom;
}
m_tPausedFrom = 0;
ResetSpeedStat();
}
m_mutexStat.Unlock();
}
void StatMeter::CalcTotalStat(int* iUpTimeSec, int* iDnTimeSec, long long* iAllBytes, bool* bStandBy)
{
m_mutexStat.Lock();
if (m_tStartServer > 0)
{
*iUpTimeSec = (int)(time(NULL) - m_tStartServer);
}
else
{
*iUpTimeSec = 0;
}
*bStandBy = m_bStandBy;
if (m_bStandBy)
{
*iDnTimeSec = (int)(m_tPausedFrom - m_tStartDownload);
}
else
{
*iDnTimeSec = (int)(time(NULL) - m_tStartDownload);
}
*iAllBytes = m_iAllBytes;
m_mutexStat.Unlock();
}
/*
* NOTE: see note to "AddSpeedReading"
*/
int StatMeter::CalcCurrentDownloadSpeed()
{
if (m_bStandBy)
{
return 0;
}
int iTimeDiff = (int)time(NULL) - m_iSpeedStartTime * SPEEDMETER_SLOTSIZE;
if (iTimeDiff == 0)
{
return 0;
}
return (int)(m_iSpeedTotalBytes / iTimeDiff);
}
void StatMeter::AddSpeedReading(int iBytes)
{
time_t tCurTime = time(NULL);
int iNowSlot = (int)tCurTime / SPEEDMETER_SLOTSIZE;
if (g_pOptions->GetAccurateRate())
{
#ifdef HAVE_SPINLOCK
m_spinlockSpeed.Lock();
#else
m_mutexSpeed.Lock();
#endif
}
while (iNowSlot > m_iSpeedTime[m_iSpeedBytesIndex])
{
//record bytes in next slot
m_iSpeedBytesIndex++;
if (m_iSpeedBytesIndex >= SPEEDMETER_SLOTS)
{
m_iSpeedBytesIndex = 0;
}
//Adjust counters with outgoing information.
m_iSpeedTotalBytes = m_iSpeedTotalBytes - (long long)m_iSpeedBytes[m_iSpeedBytesIndex];
//Note we should really use the start time of the next slot
//but its easier to just use the outgoing slot time. This
//will result in a small error.
m_iSpeedStartTime = m_iSpeedTime[m_iSpeedBytesIndex];
//Now reset.
m_iSpeedBytes[m_iSpeedBytesIndex] = 0;
m_iSpeedTime[m_iSpeedBytesIndex] = iNowSlot;
}
// Once per second recalculate summary field "m_iSpeedTotalBytes" to recover from possible synchronisation errors
if (tCurTime > m_tSpeedCorrection)
{
long long iSpeedTotalBytes = 0;
for (int i = 0; i < SPEEDMETER_SLOTS; i++)
{
iSpeedTotalBytes += m_iSpeedBytes[i];
}
m_iSpeedTotalBytes = iSpeedTotalBytes;
m_tSpeedCorrection = tCurTime;
}
if (m_iSpeedTotalBytes == 0)
{
m_iSpeedStartTime = iNowSlot;
}
m_iSpeedBytes[m_iSpeedBytesIndex] += iBytes;
m_iSpeedTotalBytes += iBytes;
m_iAllBytes += iBytes;
if (g_pOptions->GetAccurateRate())
{
#ifdef HAVE_SPINLOCK
m_spinlockSpeed.Unlock();
#else
m_mutexSpeed.Unlock();
#endif
}
}
void StatMeter::ResetSpeedStat()
{
time_t tCurTime = time(NULL);
m_iSpeedStartTime = (int)tCurTime / SPEEDMETER_SLOTSIZE;
for (int i = 0; i < SPEEDMETER_SLOTS; i++)
{
m_iSpeedBytes[i] = 0;
m_iSpeedTime[i] = m_iSpeedStartTime;
}
m_iSpeedBytesIndex = 0;
m_iSpeedTotalBytes = 0;
m_tSpeedCorrection = tCurTime;
}
void StatMeter::LogDebugInfo()
{
info(" ---------- SpeedMeter");
float fSpeed = (float)(CalcCurrentDownloadSpeed() / 1024.0);
int iTimeDiff = (int)time(NULL) - m_iSpeedStartTime * SPEEDMETER_SLOTSIZE;
info(" Speed: %f", fSpeed);
info(" SpeedStartTime: %i", m_iSpeedStartTime);
info(" SpeedTotalBytes: %i", m_iSpeedTotalBytes);
info(" SpeedBytesIndex: %i", m_iSpeedBytesIndex);
info(" AllBytes: %i", m_iAllBytes);
info(" Time: %i", (int)time(NULL));
info(" TimeDiff: %i", iTimeDiff);
for (int i=0; i < SPEEDMETER_SLOTS; i++)
{
info(" Bytes[%i]: %i, Time[%i]: %i", i, m_iSpeedBytes[i], i, m_iSpeedTime[i]);
}
m_mutexVolume.Lock();
int index = 0;
for (ServerVolumes::iterator it = m_ServerVolumes.begin(); it != m_ServerVolumes.end(); it++, index++)
{
ServerVolume* pServerVolume = *it;
info(" ServerVolume %i", index);
pServerVolume->LogDebugInfo();
}
m_mutexVolume.Unlock();
}
void StatMeter::AddServerData(int iBytes, int iServerID)
{
if (iBytes == 0)
{
return;
}
m_mutexVolume.Lock();
m_ServerVolumes[0]->AddData(iBytes);
m_ServerVolumes[iServerID]->AddData(iBytes);
m_bStatChanged = true;
m_mutexVolume.Unlock();
}
ServerVolumes* StatMeter::LockServerVolumes()
{
m_mutexVolume.Lock();
// update slots
for (ServerVolumes::iterator it = m_ServerVolumes.begin(); it != m_ServerVolumes.end(); it++)
{
ServerVolume* pServerVolume = *it;
pServerVolume->AddData(0);
}
return &m_ServerVolumes;
}
void StatMeter::UnlockServerVolumes()
{
m_mutexVolume.Unlock();
}
void StatMeter::Save()
{
if (!g_pOptions->GetServerMode())
{
return;
}
m_mutexVolume.Lock();
g_pDiskState->SaveStats(g_pServerPool->GetServers(), &m_ServerVolumes);
m_bStatChanged = false;
m_mutexVolume.Unlock();
}
bool StatMeter::Load(bool* pPerfectServerMatch)
{
m_mutexVolume.Lock();
bool bOK = g_pDiskState->LoadStats(g_pServerPool->GetServers(), &m_ServerVolumes, pPerfectServerMatch);
for (ServerVolumes::iterator it = m_ServerVolumes.begin(); it != m_ServerVolumes.end(); it++)
{
ServerVolume* pServerVolume = *it;
pServerVolume->CalcSlots(pServerVolume->GetDataTime() + g_pOptions->GetLocalTimeOffset());
}
m_mutexVolume.Unlock();
return bOK;
}

140
daemon/nntp/StatMeter.h Normal file
View File

@@ -0,0 +1,140 @@
/*
* This file is part of nzbget
*
* Copyright (C) 2014 Andrey Prygunkov <hugbug@users.sourceforge.net>
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*
* $Revision$
* $Date$
*
*/
#ifndef STATMETER_H
#define STATMETER_H
#include <vector>
#include <time.h>
#include "Log.h"
#include "Thread.h"
class ServerVolume
{
public:
typedef std::vector<long long> VolumeArray;
private:
VolumeArray m_BytesPerSeconds;
VolumeArray m_BytesPerMinutes;
VolumeArray m_BytesPerHours;
VolumeArray m_BytesPerDays;
int m_iFirstDay;
long long m_lTotalBytes;
long long m_lCustomBytes;
time_t m_tDataTime;
time_t m_tCustomTime;
int m_iSecSlot;
int m_iMinSlot;
int m_iHourSlot;
int m_iDaySlot;
public:
ServerVolume();
VolumeArray* BytesPerSeconds() { return &m_BytesPerSeconds; }
VolumeArray* BytesPerMinutes() { return &m_BytesPerMinutes; }
VolumeArray* BytesPerHours() { return &m_BytesPerHours; }
VolumeArray* BytesPerDays() { return &m_BytesPerDays; }
void SetFirstDay(int iFirstDay) { m_iFirstDay = iFirstDay; }
int GetFirstDay() { return m_iFirstDay; }
void SetTotalBytes(long long lTotalBytes) { m_lTotalBytes = lTotalBytes; }
long long GetTotalBytes() { return m_lTotalBytes; }
void SetCustomBytes(long long lCustomBytes) { m_lCustomBytes = lCustomBytes; }
long long GetCustomBytes() { return m_lCustomBytes; }
int GetSecSlot() { return m_iSecSlot; }
int GetMinSlot() { return m_iMinSlot; }
int GetHourSlot() { return m_iHourSlot; }
int GetDaySlot() { return m_iDaySlot; }
time_t GetDataTime() { return m_tDataTime; }
void SetDataTime(time_t tDataTime) { m_tDataTime = tDataTime; }
time_t GetCustomTime() { return m_tCustomTime; }
void SetCustomTime(time_t tCustomTime) { m_tCustomTime = tCustomTime; }
void AddData(int iBytes);
void CalcSlots(time_t tLocCurTime);
void ResetCustom();
void LogDebugInfo();
};
typedef std::vector<ServerVolume*> ServerVolumes;
class StatMeter : public Debuggable
{
private:
// speed meter
static const int SPEEDMETER_SLOTS = 30;
static const int SPEEDMETER_SLOTSIZE = 1; //Split elapsed time into this number of secs.
int m_iSpeedBytes[SPEEDMETER_SLOTS];
long long m_iSpeedTotalBytes;
int m_iSpeedTime[SPEEDMETER_SLOTS];
int m_iSpeedStartTime;
time_t m_tSpeedCorrection;
int m_iSpeedBytesIndex;
#ifdef HAVE_SPINLOCK
SpinLock m_spinlockSpeed;
#else
Mutex m_mutexSpeed;
#endif
// time
long long m_iAllBytes;
time_t m_tStartServer;
time_t m_tLastCheck;
time_t m_tLastTimeOffset;
time_t m_tStartDownload;
time_t m_tPausedFrom;
bool m_bStandBy;
Mutex m_mutexStat;
// data volume
bool m_bStatChanged;
ServerVolumes m_ServerVolumes;
Mutex m_mutexVolume;
void ResetSpeedStat();
void AdjustTimeOffset();
protected:
virtual void LogDebugInfo();
public:
StatMeter();
~StatMeter();
void Init();
int CalcCurrentDownloadSpeed();
void AddSpeedReading(int iBytes);
void AddServerData(int iBytes, int iServerID);
void CalcTotalStat(int* iUpTimeSec, int* iDnTimeSec, long long* iAllBytes, bool* bStandBy);
bool GetStandBy() { return m_bStandBy; }
void IntervalCheck();
void EnterLeaveStandBy(bool bEnter);
ServerVolumes* LockServerVolumes();
void UnlockServerVolumes();
void Save();
bool Load(bool* pPerfectServerMatch);
};
#endif

View File

File diff suppressed because it is too large Load Diff

View File

@@ -1,7 +1,7 @@
/*
* This file is part of nzbget
*
* Copyright (C) 2007-2013 Andrey Prygunkov <hugbug@users.sourceforge.net>
* Copyright (C) 2007-2014 Andrey Prygunkov <hugbug@users.sourceforge.net>
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
@@ -29,6 +29,7 @@
#ifndef DISABLE_PARCHECK
#include <deque>
#include <vector>
#include <string>
#include "Thread.h"
@@ -53,8 +54,43 @@ public:
ptVerifyingRepaired,
};
enum EFileStatus
{
fsUnknown,
fsSuccess,
fsPartial,
fsFailure
};
class Segment
{
private:
bool m_bSuccess;
long long m_iOffset;
int m_iSize;
unsigned long m_lCrc;
public:
Segment(bool bSuccess, long long iOffset, int iSize, unsigned long lCrc);
bool GetSuccess() { return m_bSuccess; }
long long GetOffset() { return m_iOffset; }
int GetSize() { return m_iSize; }
unsigned long GetCrc() { return m_lCrc; }
};
typedef std::deque<Segment*> SegmentListBase;
class SegmentList : public SegmentListBase
{
public:
~SegmentList();
};
typedef std::deque<char*> FileList;
typedef std::deque<void*> SourceList;
typedef std::vector<bool> ValidBlocks;
friend class Repairer;
private:
char* m_szInfoName;
@@ -63,7 +99,8 @@ private:
const char* m_szParFilename;
EStatus m_eStatus;
EStage m_eStage;
void* m_pRepairer; // declared as void* to prevent the including of libpar2-headers into this header-file
// declared as void* to prevent the including of libpar2-headers into this header-file
void* m_pRepairer;
char* m_szErrMsg;
FileList m_QueuedParFiles;
Mutex m_mutexQueuedParFiles;
@@ -78,22 +115,35 @@ private:
int m_iStageProgress;
bool m_bCancelled;
SourceList m_sourceFiles;
std::string m_lastFilename;
bool m_bHasDamagedFiles;
bool m_bParQuick;
bool m_bForceRepair;
void Cleanup();
EStatus RunParCheckAll();
EStatus RunParCheck(const char* szParFilename);
int PreProcessPar();
bool LoadMainParBak();
int ProcessMorePars();
bool LoadMorePars();
bool CheckSplittedFragments();
bool AddSplittedFragments(const char* szFilename);
bool AddSplittedFragments();
bool AddMissingFiles();
bool IsProcessedFile(const char* szFilename);
void WriteBrokenLog(EStatus eStatus);
void SaveSourceList();
void DeleteLeftovers();
void signal_filename(std::string str);
void signal_progress(double progress);
void signal_done(std::string str, int available, int total);
// declared as void* to prevent the including of libpar2-headers into this header-file
// DiskFile* pDiskfile, Par2RepairerSourceFile* pSourcefile
EFileStatus VerifyDataFile(void* pDiskfile, void* pSourcefile, int* pAvailableBlocks);
bool VerifySuccessDataFile(void* pDiskfile, void* pSourcefile, unsigned long lDownloadCrc);
bool VerifyPartialDataFile(void* pDiskfile, void* pSourcefile, SegmentList* pSegments, ValidBlocks* pValidBlocks);
bool SmartCalcFileRangeCrc(FILE* pFile, long long lStart, long long lEnd, SegmentList* pSegments,
unsigned long* pDownloadCrc);
bool DumbCalcFileRangeCrc(FILE* pFile, long long lStart, long long lEnd, unsigned long* pDownloadCrc);
protected:
/**
@@ -105,6 +155,9 @@ protected:
virtual void UpdateProgress() {}
virtual void Completed() {}
virtual void PrintMessage(Message::EKind eKind, const char* szFormat, ...) {}
virtual void RegisterParredFile(const char* szFilename) {}
virtual bool IsParredFile(const char* szFilename) { return false; }
virtual EFileStatus FindFileCrc(const char* szFilename, unsigned long* lCrc, SegmentList* pSegments) { return fsUnknown; }
EStage GetStage() { return m_eStage; }
const char* GetProgressLabel() { return m_szProgressLabel; }
int GetFileProgress() { return m_iFileProgress; }
@@ -119,6 +172,10 @@ public:
const char* GetInfoName() { return m_szInfoName; }
void SetInfoName(const char* szInfoName);
void SetNZBName(const char* szNZBName);
void SetParQuick(bool bParQuick) { m_bParQuick = bParQuick; }
bool GetParQuick() { return m_bParQuick; }
void SetForceRepair(bool bForceRepair) { m_bForceRepair = bForceRepair; }
bool GetForceRepair() { return m_bForceRepair; }
EStatus GetStatus() { return m_eStatus; }
void AddParFile(const char* szParFilename);
void QueueChanged();

View File

@@ -1,7 +1,7 @@
/*
* This file is part of nzbget
*
* Copyright (C) 2007-2013 Andrey Prygunkov <hugbug@users.sourceforge.net>
* Copyright (C) 2007-2014 Andrey Prygunkov <hugbug@users.sourceforge.net>
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
@@ -45,12 +45,10 @@
#include "nzbget.h"
#include "ParCoordinator.h"
#include "Options.h"
#include "DiskState.h"
#include "Log.h"
#include "Util.h"
#include "QueueCoordinator.h"
#include "DiskState.h"
extern QueueCoordinator* g_pQueueCoordinator;
extern Options* g_pOptions;
extern DiskState* g_pDiskState;
@@ -77,6 +75,77 @@ void ParCoordinator::PostParChecker::PrintMessage(Message::EKind eKind, const ch
m_pOwner->PrintMessage(m_pPostInfo, eKind, "%s", szText);
}
void ParCoordinator::PostParChecker::RegisterParredFile(const char* szFilename)
{
m_pPostInfo->GetParredFiles()->push_back(strdup(szFilename));
}
bool ParCoordinator::PostParChecker::IsParredFile(const char* szFilename)
{
for (PostInfo::ParredFiles::iterator it = m_pPostInfo->GetParredFiles()->begin(); it != m_pPostInfo->GetParredFiles()->end(); it++)
{
const char* szParredFile = *it;
if (!strcasecmp(szParredFile, szFilename))
{
return true;
}
}
return false;
}
ParChecker::EFileStatus ParCoordinator::PostParChecker::FindFileCrc(const char* szFilename,
unsigned long* lCrc, SegmentList* pSegments)
{
CompletedFile* pCompletedFile = NULL;
for (CompletedFiles::iterator it = m_pPostInfo->GetNZBInfo()->GetCompletedFiles()->begin(); it != m_pPostInfo->GetNZBInfo()->GetCompletedFiles()->end(); it++)
{
CompletedFile* pCompletedFile2 = *it;
if (!strcasecmp(pCompletedFile2->GetFileName(), szFilename))
{
pCompletedFile = pCompletedFile2;
break;
}
}
if (!pCompletedFile)
{
return ParChecker::fsUnknown;
}
debug("Found completed file: %s, CRC: %.8x, Status: %i", Util::BaseFileName(pCompletedFile->GetFileName()), pCompletedFile->GetCrc(), (int)pCompletedFile->GetStatus());
*lCrc = pCompletedFile->GetCrc();
if (pCompletedFile->GetStatus() == CompletedFile::cfPartial && pCompletedFile->GetID() > 0 &&
!m_pPostInfo->GetNZBInfo()->GetReprocess())
{
FileInfo* pTmpFileInfo = new FileInfo(pCompletedFile->GetID());
if (!g_pDiskState->LoadFileState(pTmpFileInfo, NULL, true))
{
delete pTmpFileInfo;
return ParChecker::fsUnknown;
}
for (FileInfo::Articles::iterator it = pTmpFileInfo->GetArticles()->begin(); it != pTmpFileInfo->GetArticles()->end(); it++)
{
ArticleInfo* pa = *it;
ParChecker::Segment* pSegment = new Segment(pa->GetStatus() == ArticleInfo::aiFinished,
pa->GetSegmentOffset(), pa->GetSegmentSize(), pa->GetCrc());
pSegments->push_back(pSegment);
}
delete pTmpFileInfo;
}
return pCompletedFile->GetStatus() == CompletedFile::cfSuccess ? ParChecker::fsSuccess :
pCompletedFile->GetStatus() == CompletedFile::cfFailure &&
!m_pPostInfo->GetNZBInfo()->GetReprocess() ? ParChecker::fsFailure :
pCompletedFile->GetStatus() == CompletedFile::cfPartial && pSegments->size() > 0 &&
!m_pPostInfo->GetNZBInfo()->GetReprocess()? ParChecker::fsPartial :
ParChecker::fsUnknown;
}
void ParCoordinator::PostParRenamer::UpdateProgress()
{
m_pOwner->UpdateParRenameProgress();
@@ -93,6 +162,28 @@ void ParCoordinator::PostParRenamer::PrintMessage(Message::EKind eKind, const ch
m_pOwner->PrintMessage(m_pPostInfo, eKind, "%s", szText);
}
void ParCoordinator::PostParRenamer::RegisterParredFile(const char* szFilename)
{
m_pPostInfo->GetParredFiles()->push_back(strdup(szFilename));
}
/**
* Update file name in the CompletedFiles-list of NZBInfo
*/
void ParCoordinator::PostParRenamer::RegisterRenamedFile(const char* szOldFilename, const char* szNewFileName)
{
for (CompletedFiles::iterator it = m_pPostInfo->GetNZBInfo()->GetCompletedFiles()->begin(); it != m_pPostInfo->GetNZBInfo()->GetCompletedFiles()->end(); it++)
{
CompletedFile* pCompletedFile = *it;
if (!strcasecmp(pCompletedFile->GetFileName(), szOldFilename))
{
pCompletedFile->SetFileName(szNewFileName);
break;
}
}
}
#endif
ParCoordinator::ParCoordinator()
@@ -139,20 +230,12 @@ void ParCoordinator::Stop()
void ParCoordinator::PausePars(DownloadQueue* pDownloadQueue, NZBInfo* pNZBInfo)
{
debug("ParCoordinator: Pausing pars");
for (FileQueue::iterator it = pDownloadQueue->GetFileQueue()->begin(); it != pDownloadQueue->GetFileQueue()->end(); it++)
{
FileInfo* pFileInfo = *it;
if (pFileInfo->GetNZBInfo() == pNZBInfo)
{
g_pQueueCoordinator->GetQueueEditor()->LockedEditEntry(pDownloadQueue, pFileInfo->GetID(), false,
QueueEditor::eaGroupPauseExtraPars, 0, NULL);
break;
}
}
pDownloadQueue->EditEntry(pNZBInfo->GetID(),
DownloadQueue::eaGroupPauseExtraPars, 0, NULL);
}
bool ParCoordinator::FindMainPars(const char* szPath, FileList* pFileList)
bool ParCoordinator::FindMainPars(const char* szPath, ParFileList* pFileList)
{
if (pFileList)
{
@@ -172,7 +255,7 @@ bool ParCoordinator::FindMainPars(const char* szPath, FileList* pFileList)
// check if the base file already added to list
bool exists = false;
for (FileList::iterator it = pFileList->begin(); it != pFileList->end(); it++)
for (ParFileList::iterator it = pFileList->begin(); it != pFileList->end(); it++)
{
const char* filename2 = *it;
exists = SameParCollection(filename, filename2);
@@ -268,7 +351,11 @@ void ParCoordinator::StartParCheckJob(PostInfo* pPostInfo)
m_ParChecker.SetPostInfo(pPostInfo);
m_ParChecker.SetDestDir(pPostInfo->GetNZBInfo()->GetDestDir());
m_ParChecker.SetNZBName(pPostInfo->GetNZBInfo()->GetName());
m_ParChecker.PrintMessage(Message::mkInfo, "Checking pars for %s", pPostInfo->GetInfoName());
m_ParChecker.SetParTime(time(NULL));
m_ParChecker.SetDownloadSec(pPostInfo->GetNZBInfo()->GetDownloadSec());
m_ParChecker.SetParQuick(g_pOptions->GetParQuick() && !pPostInfo->GetForceParFull());
m_ParChecker.SetForceRepair(pPostInfo->GetForceRepair());
m_ParChecker.PrintMessage(Message::mkInfo, "Checking pars for %s", pPostInfo->GetNZBInfo()->GetName());
pPostInfo->SetWorking(true);
m_ParChecker.Start();
}
@@ -292,6 +379,7 @@ void ParCoordinator::StartParRenameJob(PostInfo* pPostInfo)
m_ParRenamer.SetPostInfo(pPostInfo);
m_ParRenamer.SetDestDir(szDestDir);
m_ParRenamer.SetInfoName(pPostInfo->GetNZBInfo()->GetName());
m_ParRenamer.SetDetectMissing(pPostInfo->GetNZBInfo()->GetUnpackStatus() == NZBInfo::usNone);
m_ParRenamer.PrintMessage(Message::mkInfo, "Checking renamed files for %s", pPostInfo->GetNZBInfo()->GetName());
pPostInfo->SetWorking(true);
m_ParRenamer.Start();
@@ -301,16 +389,12 @@ bool ParCoordinator::Cancel()
{
if (m_eCurrentJob == jkParCheck)
{
#ifdef HAVE_PAR2_CANCEL
if (!m_ParChecker.GetCancelled())
{
debug("Cancelling par-repair for %s", m_ParChecker.GetInfoName());
m_ParChecker.Cancel();
return true;
}
#else
warn("Cannot cancel par-repair for %s, used version of libpar2 does not support cancelling", m_ParChecker.GetInfoName());
#endif
}
else if (m_eCurrentJob == jkParRename)
{
@@ -330,19 +414,13 @@ bool ParCoordinator::Cancel()
bool ParCoordinator::AddPar(FileInfo* pFileInfo, bool bDeleted)
{
bool bSameCollection = m_ParChecker.IsRunning() &&
pFileInfo->GetNZBInfo() == m_ParChecker.GetPostInfo()->GetNZBInfo() &&
SameParCollection(pFileInfo->GetFilename(), Util::BaseFileName(m_ParChecker.GetParFilename()));
pFileInfo->GetNZBInfo() == m_ParChecker.GetPostInfo()->GetNZBInfo();
if (bSameCollection && !bDeleted)
{
char szFullFilename[1024];
snprintf(szFullFilename, 1024, "%s%c%s", pFileInfo->GetNZBInfo()->GetDestDir(), (int)PATH_SEPARATOR, pFileInfo->GetFilename());
szFullFilename[1024-1] = '\0';
m_ParChecker.AddParFile(szFullFilename);
if (g_pOptions->GetParPauseQueue())
{
PauseDownload();
}
}
else
{
@@ -353,7 +431,7 @@ bool ParCoordinator::AddPar(FileInfo* pFileInfo, bool bDeleted)
void ParCoordinator::ParCheckCompleted()
{
DownloadQueue* pDownloadQueue = g_pQueueCoordinator->LockQueue();
DownloadQueue* pDownloadQueue = DownloadQueue::Lock();
PostInfo* pPostInfo = m_ParChecker.GetPostInfo();
@@ -374,15 +452,19 @@ void ParCoordinator::ParCheckCompleted()
pPostInfo->GetNZBInfo()->SetParStatus(NZBInfo::psFailure);
}
int iWaitTime = pPostInfo->GetNZBInfo()->GetDownloadSec() - m_ParChecker.GetDownloadSec();
pPostInfo->SetStartTime(pPostInfo->GetStartTime() + (time_t)iWaitTime);
int iParSec = (int)(time(NULL) - m_ParChecker.GetParTime()) - iWaitTime;
pPostInfo->GetNZBInfo()->SetParSec(pPostInfo->GetNZBInfo()->GetParSec() + iParSec);
pPostInfo->GetNZBInfo()->SetParFull(!m_ParChecker.GetParQuick());
pPostInfo->SetWorking(false);
pPostInfo->SetStage(PostInfo::ptQueued);
if (g_pOptions->GetSaveQueue() && g_pOptions->GetServerMode())
{
g_pDiskState->SaveDownloadQueue(pDownloadQueue);
}
pDownloadQueue->Save();
g_pQueueCoordinator->UnlockQueue();
DownloadQueue::Unlock();
}
/**
@@ -392,7 +474,7 @@ void ParCoordinator::ParCheckCompleted()
*/
bool ParCoordinator::RequestMorePars(NZBInfo* pNZBInfo, const char* szParFilename, int iBlockNeeded, int* pBlockFound)
{
DownloadQueue* pDownloadQueue = g_pQueueCoordinator->LockQueue();
DownloadQueue* pDownloadQueue = DownloadQueue::Lock();
Blocks blocks;
blocks.clear();
@@ -466,7 +548,7 @@ bool ParCoordinator::RequestMorePars(NZBInfo* pNZBInfo, const char* szParFilenam
}
}
g_pQueueCoordinator->UnlockQueue();
DownloadQueue::Unlock();
if (pBlockFound)
{
@@ -481,11 +563,6 @@ bool ParCoordinator::RequestMorePars(NZBInfo* pNZBInfo, const char* szParFilenam
bool bOK = iBlockNeeded <= 0;
if (bOK && g_pOptions->GetParPauseQueue())
{
UnpauseDownload();
}
return bOK;
}
@@ -509,12 +586,11 @@ void ParCoordinator::FindPars(DownloadQueue* pDownloadQueue, NZBInfo* pNZBInfo,
szMainBaseFilename[maxlen] = '\0';
for (char* p = szMainBaseFilename; *p; p++) *p = tolower(*p); // convert string to lowercase
for (FileQueue::iterator it = pDownloadQueue->GetFileQueue()->begin(); it != pDownloadQueue->GetFileQueue()->end(); it++)
for (FileList::iterator it = pNZBInfo->GetFileList()->begin(); it != pNZBInfo->GetFileList()->end(); it++)
{
FileInfo* pFileInfo = *it;
int iBlocks = 0;
if (pFileInfo->GetNZBInfo() == pNZBInfo &&
ParseParFilename(pFileInfo->GetFilename(), NULL, &iBlocks) &&
if (ParseParFilename(pFileInfo->GetFilename(), NULL, &iBlocks) &&
iBlocks > 0)
{
bool bUseFile = true;
@@ -576,7 +652,7 @@ void ParCoordinator::FindPars(DownloadQueue* pDownloadQueue, NZBInfo* pNZBInfo,
void ParCoordinator::UpdateParCheckProgress()
{
g_pQueueCoordinator->LockQueue();
DownloadQueue::Lock();
PostInfo* pPostInfo = m_ParChecker.GetPostInfo();
if (m_ParChecker.GetFileProgress() == 0)
@@ -589,19 +665,22 @@ void ParCoordinator::UpdateParCheckProgress()
PostInfo::EStage eStage = StageKind[m_ParChecker.GetStage()];
time_t tCurrent = time(NULL);
if (!pPostInfo->GetStartTime())
{
pPostInfo->SetStartTime(tCurrent);
}
if (pPostInfo->GetStage() != eStage)
{
pPostInfo->SetStage(eStage);
pPostInfo->SetStageTime(tCurrent);
if (pPostInfo->GetStage() == PostInfo::ptRepairing)
{
m_ParChecker.SetRepairTime(tCurrent);
}
else if (pPostInfo->GetStage() == PostInfo::ptVerifyingRepaired)
{
int iRepairSec = (int)(tCurrent - m_ParChecker.GetRepairTime());
pPostInfo->GetNZBInfo()->SetRepairSec(pPostInfo->GetNZBInfo()->GetRepairSec() + iRepairSec);
}
}
bool bParCancel = false;
#ifdef HAVE_PAR2_CANCEL
if (!m_ParChecker.GetCancelled())
{
if ((g_pOptions->GetParTimeLimit() > 0) &&
@@ -620,30 +699,31 @@ void ParCoordinator::UpdateParCheckProgress()
}
}
}
#endif
if (bParCancel)
{
m_ParChecker.Cancel();
}
g_pQueueCoordinator->UnlockQueue();
DownloadQueue::Unlock();
CheckPauseState(pPostInfo);
}
void ParCoordinator::CheckPauseState(PostInfo* pPostInfo)
{
if (g_pOptions->GetPausePostProcess())
if (g_pOptions->GetPausePostProcess() && !pPostInfo->GetNZBInfo()->GetForcePriority())
{
time_t tStageTime = pPostInfo->GetStageTime();
time_t tStartTime = pPostInfo->GetStartTime();
time_t tParTime = m_ParChecker.GetParTime();
time_t tRepairTime = m_ParChecker.GetRepairTime();
time_t tWaitTime = time(NULL);
// wait until Post-processor is unpaused
while (g_pOptions->GetPausePostProcess() && !m_bStopped)
while (g_pOptions->GetPausePostProcess() && !pPostInfo->GetNZBInfo()->GetForcePriority() && !m_bStopped)
{
usleep(100 * 1000);
usleep(50 * 1000);
// update time stamps
@@ -653,53 +733,59 @@ void ParCoordinator::CheckPauseState(PostInfo* pPostInfo)
{
pPostInfo->SetStageTime(tStageTime + tDelta);
}
if (tStartTime > 0)
{
pPostInfo->SetStartTime(tStartTime + tDelta);
}
if (tParTime > 0)
{
m_ParChecker.SetParTime(tParTime + tDelta);
}
if (tRepairTime > 0)
{
m_ParChecker.SetRepairTime(tRepairTime + tDelta);
}
}
}
}
void ParCoordinator::ParRenameCompleted()
{
DownloadQueue* pDownloadQueue = g_pQueueCoordinator->LockQueue();
DownloadQueue* pDownloadQueue = DownloadQueue::Lock();
PostInfo* pPostInfo = m_ParRenamer.GetPostInfo();
pPostInfo->GetNZBInfo()->SetRenameStatus(m_ParRenamer.GetStatus() == ParRenamer::psSuccess ? NZBInfo::rsSuccess : NZBInfo::rsFailure);
if (m_ParRenamer.HasMissedFiles() && pPostInfo->GetNZBInfo()->GetParStatus() <= NZBInfo::psSkipped)
{
PrintMessage(pPostInfo, Message::mkInfo, "Requesting par-check/repair for %s to restore missing files ", m_ParRenamer.GetInfoName());
pPostInfo->SetRequestParCheck(true);
}
pPostInfo->SetWorking(false);
pPostInfo->SetStage(PostInfo::ptQueued);
if (g_pOptions->GetSaveQueue() && g_pOptions->GetServerMode())
{
g_pDiskState->SaveDownloadQueue(pDownloadQueue);
}
g_pQueueCoordinator->UnlockQueue();
pDownloadQueue->Save();
DownloadQueue::Unlock();
}
void ParCoordinator::UpdateParRenameProgress()
{
g_pQueueCoordinator->LockQueue();
DownloadQueue::Lock();
PostInfo* pPostInfo = m_ParRenamer.GetPostInfo();
pPostInfo->SetProgressLabel(m_ParRenamer.GetProgressLabel());
pPostInfo->SetStageProgress(m_ParRenamer.GetStageProgress());
time_t tCurrent = time(NULL);
if (!pPostInfo->GetStartTime())
{
pPostInfo->SetStartTime(tCurrent);
}
if (pPostInfo->GetStage() != PostInfo::ptRenaming)
{
pPostInfo->SetStage(PostInfo::ptRenaming);
pPostInfo->SetStageTime(tCurrent);
}
g_pQueueCoordinator->UnlockQueue();
DownloadQueue::Unlock();
CheckPauseState(pPostInfo);
}

View File

@@ -1,7 +1,7 @@
/*
* This file is part of nzbget
*
* Copyright (C) 2007-2013 Andrey Prygunkov <hugbug@users.sourceforge.net>
* Copyright (C) 2007-2014 Andrey Prygunkov <hugbug@users.sourceforge.net>
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
@@ -45,14 +45,26 @@ private:
private:
ParCoordinator* m_pOwner;
PostInfo* m_pPostInfo;
time_t m_tParTime;
time_t m_tRepairTime;
int m_iDownloadSec;
protected:
virtual bool RequestMorePars(int iBlockNeeded, int* pBlockFound);
virtual void UpdateProgress();
virtual void Completed() { m_pOwner->ParCheckCompleted(); }
virtual void PrintMessage(Message::EKind eKind, const char* szFormat, ...);
virtual void RegisterParredFile(const char* szFilename);
virtual bool IsParredFile(const char* szFilename);
virtual EFileStatus FindFileCrc(const char* szFilename, unsigned long* lCrc, SegmentList* pSegments);
public:
PostInfo* GetPostInfo() { return m_pPostInfo; }
void SetPostInfo(PostInfo* pPostInfo) { m_pPostInfo = pPostInfo; }
time_t GetParTime() { return m_tParTime; }
void SetParTime(time_t tParTime) { m_tParTime = tParTime; }
time_t GetRepairTime() { return m_tRepairTime; }
void SetRepairTime(time_t tRepairTime) { m_tRepairTime = tRepairTime; }
int GetDownloadSec() { return m_iDownloadSec; }
void SetDownloadSec(int iDownloadSec) { m_iDownloadSec = iDownloadSec; }
friend class ParCoordinator;
};
@@ -66,6 +78,8 @@ private:
virtual void UpdateProgress();
virtual void Completed() { m_pOwner->ParRenameCompleted(); }
virtual void PrintMessage(Message::EKind eKind, const char* szFormat, ...);
virtual void RegisterParredFile(const char* szFilename);
virtual void RegisterRenamedFile(const char* szOldFilename, const char* szNewFileName);
public:
PostInfo* GetPostInfo() { return m_pPostInfo; }
void SetPostInfo(PostInfo* pPostInfo) { m_pPostInfo = pPostInfo; }
@@ -94,8 +108,6 @@ private:
EJobKind m_eCurrentJob;
protected:
virtual bool PauseDownload() = 0;
virtual bool UnpauseDownload() = 0;
void UpdateParCheckProgress();
void UpdateParRenameProgress();
void ParCheckCompleted();
@@ -106,12 +118,12 @@ protected:
#endif
public:
typedef std::deque<char*> FileList;
typedef std::deque<char*> ParFileList;
public:
ParCoordinator();
virtual ~ParCoordinator();
static bool FindMainPars(const char* szPath, FileList* pFileList);
static bool FindMainPars(const char* szPath, ParFileList* pFileList);
static bool ParseParFilename(const char* szParFilename, int* iBaseNameLen, int* iBlocks);
static bool SameParCollection(const char* szFilename1, const char* szFilename2);
void PausePars(DownloadQueue* pDownloadQueue, NZBInfo* pNZBInfo);

View File

@@ -1,7 +1,7 @@
/*
* This file is part of nzbget
*
* Copyright (C) 2013 Andrey Prygunkov <hugbug@users.sourceforge.net>
* Copyright (C) 2013-2014 Andrey Prygunkov <hugbug@users.sourceforge.net>
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
@@ -37,17 +37,14 @@
#include <string.h>
#include <stdio.h>
#include <ctype.h>
#ifdef WIN32
#include <par2cmdline.h>
#include <par2repairer.h>
#include <md5.h>
#else
#ifndef WIN32
#include <unistd.h>
#include <libpar2/par2cmdline.h>
#include <libpar2/par2repairer.h>
#include <libpar2/md5.h>
#endif
#include "par2cmdline.h"
#include "par2repairer.h"
#include "md5.h"
#include "nzbget.h"
#include "ParRenamer.h"
#include "ParCoordinator.h"
@@ -67,6 +64,7 @@ ParRenamer::FileHash::FileHash(const char* szFilename, const char* szHash)
{
m_szFilename = strdup(szFilename);
m_szHash = strdup(szHash);
m_bFileExists = false;
}
ParRenamer::FileHash::~FileHash()
@@ -77,7 +75,7 @@ ParRenamer::FileHash::~FileHash()
ParRenamer::ParRenamer()
{
debug("Creating ParRenamer");
debug("Creating ParRenamer");
m_eStatus = psFailed;
m_szDestDir = NULL;
@@ -85,11 +83,13 @@ ParRenamer::ParRenamer()
m_szProgressLabel = (char*)malloc(1024);
m_iStageProgress = 0;
m_bCancelled = false;
m_bHasMissedFiles = false;
m_bDetectMissing = false;
}
ParRenamer::~ParRenamer()
{
debug("Destroying ParRenamer");
debug("Destroying ParRenamer");
free(m_szDestDir);
free(m_szInfoName);
@@ -142,6 +142,7 @@ void ParRenamer::Run()
m_iFileCount = 0;
m_iCurFile = 0;
m_iRenamedCount = 0;
m_bHasMissedFiles = false;
m_eStatus = psFailed;
snprintf(m_szProgressLabel, 1024, "Checking renamed files for %s", m_szInfoName);
@@ -157,9 +158,23 @@ void ParRenamer::Run()
debug("Checking %s", szDestDir);
ClearHashList();
LoadParFiles(szDestDir);
CheckFiles(szDestDir);
if (m_FileHashList.empty())
{
int iSavedCurFile = m_iCurFile;
CheckFiles(szDestDir, true);
m_iCurFile = iSavedCurFile; // restore progress indicator
LoadParFiles(szDestDir);
}
CheckFiles(szDestDir, false);
if (m_bDetectMissing)
{
CheckMissing();
}
}
if (m_bCancelled)
{
PrintMessage(Message::mkWarning, "Renaming cancelled for %s", m_szInfoName);
@@ -209,10 +224,10 @@ void ParRenamer::BuildDirList(const char* szDestDir)
void ParRenamer::LoadParFiles(const char* szDestDir)
{
ParCoordinator::FileList parFileList;
ParCoordinator::ParFileList parFileList;
ParCoordinator::FindMainPars(szDestDir, &parFileList);
for (ParCoordinator::FileList::iterator it = parFileList.begin(); it != parFileList.end(); it++)
for (ParCoordinator::ParFileList::iterator it = parFileList.begin(); it != parFileList.end(); it++)
{
char* szParFilename = *it;
@@ -245,14 +260,20 @@ void ParRenamer::LoadParFile(const char* szParFilename)
}
Par2RepairerSourceFile* sourceFile = (*it).second;
if (!sourceFile || !sourceFile->GetDescriptionPacket())
{
warn("Damaged par2-file detected: %s", szParFilename);
continue;
}
m_FileHashList.push_back(new FileHash(sourceFile->GetDescriptionPacket()->FileName().c_str(),
sourceFile->GetDescriptionPacket()->Hash16k().print().c_str()));
RegisterParredFile(sourceFile->GetDescriptionPacket()->FileName().c_str());
}
delete pRepairer;
}
void ParRenamer::CheckFiles(const char* szDestDir)
void ParRenamer::CheckFiles(const char* szDestDir, bool bRenamePars)
{
DirBrowser dir(szDestDir);
while (const char* filename = dir.Next())
@@ -271,30 +292,78 @@ void ParRenamer::CheckFiles(const char* szDestDir)
UpdateProgress();
m_iCurFile++;
CheckFile(szDestDir, szFullFilename);
if (bRenamePars)
{
CheckParFile(szDestDir, szFullFilename);
}
else
{
CheckRegularFile(szDestDir, szFullFilename);
}
}
}
}
}
void ParRenamer::CheckFile(const char* szDestDir, const char* szFilename)
void ParRenamer::CheckMissing()
{
for (FileHashList::iterator it = m_FileHashList.begin(); it != m_FileHashList.end(); it++)
{
FileHash* pFileHash = *it;
if (!pFileHash->GetFileExists())
{
if (Util::MatchFileExt(pFileHash->GetFilename(), g_pOptions->GetParIgnoreExt(), ",;") ||
Util::MatchFileExt(pFileHash->GetFilename(), g_pOptions->GetExtCleanupDisk(), ",;"))
{
info("File %s is missing, ignoring", pFileHash->GetFilename());
}
else
{
info("File %s is missing", pFileHash->GetFilename());
m_bHasMissedFiles = true;
}
}
}
}
bool ParRenamer::IsSplittedFragment(const char* szFilename, const char* szCorrectName)
{
bool bSplittedFragement = false;
const char* szDiskBasename = Util::BaseFileName(szFilename);
const char* szExtension = strrchr(szDiskBasename, '.');
int iBaseLen = strlen(szCorrectName);
if (szExtension && !strncasecmp(szDiskBasename, szCorrectName, iBaseLen))
{
const char* p = szDiskBasename + iBaseLen;
if (*p == '.')
{
for (p++; *p && strchr("0123456789", *p); p++) ;
bSplittedFragement = !*p;
bSplittedFragement = bSplittedFragement && atoi(szDiskBasename + iBaseLen + 1) <= 1; // .000 or .001
}
}
return bSplittedFragement;
}
void ParRenamer::CheckRegularFile(const char* szDestDir, const char* szFilename)
{
debug("Computing hash for %s", szFilename);
const int iBlockSize = 16*1024;
FILE* pFile = fopen(szFilename, "rb");
if (!pFile)
{
FILE* pFile = fopen(szFilename, FOPEN_RB);
if (!pFile)
{
PrintMessage(Message::mkError, "Could not open file %s", szFilename);
return;
}
return;
}
// load first 16K of the file into buffer
void* pBuffer = malloc(iBlockSize);
int iReadBytes = fread(pBuffer, 1, iBlockSize, pFile);
int iReadBytes = fread(pBuffer, 1, iBlockSize, pFile);
int iError = ferror(pFile);
if (iReadBytes != iBlockSize && iError)
{
@@ -302,7 +371,7 @@ void ParRenamer::CheckFile(const char* szDestDir, const char* szFilename)
return;
}
fclose(pFile);
fclose(pFile);
MD5Hash hash16k;
MD5Context context;
@@ -319,22 +388,15 @@ void ParRenamer::CheckFile(const char* szDestDir, const char* szFilename)
if (!strcmp(pFileHash->GetHash(), hash16k.print().c_str()))
{
debug("Found correct filename: %s", pFileHash->GetFilename());
pFileHash->SetFileExists(true);
char szDstFilename[1024];
snprintf(szDstFilename, 1024, "%s%c%s", szDestDir, PATH_SEPARATOR, pFileHash->GetFilename());
szDstFilename[1024-1] = '\0';
if (!Util::FileExists(szDstFilename))
if (!Util::FileExists(szDstFilename) && !IsSplittedFragment(szFilename, pFileHash->GetFilename()))
{
PrintMessage(Message::mkInfo, "Renaming %s to %s", Util::BaseFileName(szFilename), pFileHash->GetFilename());
if (Util::MoveFile(szFilename, szDstFilename))
{
m_iRenamedCount++;
}
else
{
PrintMessage(Message::mkError, "Could not rename %s to %s", szFilename, szDstFilename);
}
RenameFile(szFilename, szDstFilename);
}
break;
@@ -342,4 +404,86 @@ void ParRenamer::CheckFile(const char* szDestDir, const char* szFilename)
}
}
/*
* For files not having par2-extensions: checks if the file is a par2-file and renames
* it according to its set-id.
*/
void ParRenamer::CheckParFile(const char* szDestDir, const char* szFilename)
{
debug("Checking par2-header for %s", szFilename);
const char* szBasename = Util::BaseFileName(szFilename);
const char* szExtension = strrchr(szBasename, '.');
if (szExtension && !strcasecmp(szExtension, ".par2"))
{
// do not process files already having par2-extension
return;
}
FILE* pFile = fopen(szFilename, FOPEN_RB);
if (!pFile)
{
PrintMessage(Message::mkError, "Could not open file %s", szFilename);
return;
}
// load par2-header
PACKET_HEADER header;
int iReadBytes = fread(&header, 1, sizeof(header), pFile);
int iError = ferror(pFile);
if (iReadBytes != sizeof(header) && iError)
{
PrintMessage(Message::mkError, "Could not read file %s", szFilename);
return;
}
fclose(pFile);
// Check the packet header
if (packet_magic != header.magic || // not par2-file
sizeof(PACKET_HEADER) > header.length || // packet length is too small
0 != (header.length & 3) || // packet length is not a multiple of 4
Util::FileSize(szFilename) < (int)header.length) // packet would extend beyond the end of the file
{
// not par2-file or damaged header, ignoring the file
return;
}
char szSetId[33];
strncpy(szSetId, header.setid.print().c_str(), sizeof(szSetId));
szSetId[33-1] = '\0';
for (char* p = szSetId; *p; p++) *p = tolower(*p); // convert string to lowercase
debug("Renaming: %s; setid: %s", Util::BaseFileName(szFilename), szSetId);
char szDestFileName[1024];
int iNum = 1;
while (iNum == 1 || Util::FileExists(szDestFileName))
{
snprintf(szDestFileName, 1024, "%s%c%s.vol%03i+01.PAR2", szDestDir, PATH_SEPARATOR, szSetId, iNum);
szDestFileName[1024-1] = '\0';
iNum++;
}
RenameFile(szFilename, szDestFileName);
}
void ParRenamer::RenameFile(const char* szSrcFilename, const char* szDestFileName)
{
PrintMessage(Message::mkInfo, "Renaming %s to %s", Util::BaseFileName(szSrcFilename), Util::BaseFileName(szDestFileName));
if (!Util::MoveFile(szSrcFilename, szDestFileName))
{
char szErrBuf[256];
PrintMessage(Message::mkError, "Could not rename %s to %s: %s", szSrcFilename, szDestFileName,
Util::GetLastErrorMessage(szErrBuf, sizeof(szErrBuf)));
return;
}
m_iRenamedCount++;
// notify about new file name
RegisterRenamedFile(Util::BaseFileName(szSrcFilename), Util::BaseFileName(szDestFileName));
}
#endif

View File

@@ -1,7 +1,7 @@
/*
* This file is part of nzbget
*
* Copyright (C) 2013 Andrey Prygunkov <hugbug@users.sourceforge.net>
* Copyright (C) 2013-2014 Andrey Prygunkov <hugbug@users.sourceforge.net>
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
@@ -47,16 +47,18 @@ public:
private:
char* m_szFilename;
char* m_szHash;
bool m_bFileExists;
public:
FileHash(const char* szFilename, const char* szHash);
~FileHash();
const char* GetFilename() { return m_szFilename; }
const char* GetHash() { return m_szHash; }
bool GetFileExists() { return m_bFileExists; }
void SetFileExists(bool bFileExists) { m_bFileExists = bFileExists; }
};
typedef std::deque<FileHash*> FileHashList;
typedef std::deque<char*> DirList;
private:
@@ -71,6 +73,8 @@ private:
int m_iFileCount;
int m_iCurFile;
int m_iRenamedCount;
bool m_bHasMissedFiles;
bool m_bDetectMissing;
void Cleanup();
void ClearHashList();
@@ -78,13 +82,19 @@ private:
void CheckDir(const char* szDestDir);
void LoadParFiles(const char* szDestDir);
void LoadParFile(const char* szParFilename);
void CheckFiles(const char* szDestDir);
void CheckFile(const char* szDestDir, const char* szFilename);
void CheckFiles(const char* szDestDir, bool bRenamePars);
void CheckRegularFile(const char* szDestDir, const char* szFilename);
void CheckParFile(const char* szDestDir, const char* szFilename);
bool IsSplittedFragment(const char* szFilename, const char* szCorrectName);
void CheckMissing();
void RenameFile(const char* szSrcFilename, const char* szDestFileName);
protected:
virtual void UpdateProgress() {}
virtual void Completed() {}
virtual void PrintMessage(Message::EKind eKind, const char* szFormat, ...) {}
virtual void RegisterParredFile(const char* szFilename) {}
virtual void RegisterRenamedFile(const char* szOldFilename, const char* szNewFileName) {}
const char* GetProgressLabel() { return m_szProgressLabel; }
int GetStageProgress() { return m_iStageProgress; }
@@ -99,6 +109,8 @@ public:
EStatus GetStatus() { return m_eStatus; }
void Cancel();
bool GetCancelled() { return m_bCancelled; }
bool HasMissedFiles() { return m_bHasMissedFiles; }
void SetDetectMissing(bool bDetectMissing) { m_bDetectMissing = bDetectMissing; }
};
#endif

View File

@@ -0,0 +1,339 @@
/*
* This file is part of nzbget
*
* Copyright (C) 2007-2014 Andrey Prygunkov <hugbug@users.sourceforge.net>
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*
* $Revision$
* $Date$
*
*/
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#ifdef WIN32
#include "win32.h"
#endif
#include <stdlib.h>
#include <string.h>
#include <ctype.h>
#ifndef WIN32
#include <unistd.h>
#endif
#include <stdio.h>
#include "nzbget.h"
#include "PostScript.h"
#include "Log.h"
#include "Util.h"
#include "Options.h"
extern Options* g_pOptions;
static const int POSTPROCESS_PARCHECK = 92;
static const int POSTPROCESS_SUCCESS = 93;
static const int POSTPROCESS_ERROR = 94;
static const int POSTPROCESS_NONE = 95;
void PostScriptController::StartJob(PostInfo* pPostInfo)
{
PostScriptController* pScriptController = new PostScriptController();
pScriptController->m_pPostInfo = pPostInfo;
pScriptController->SetWorkingDir(g_pOptions->GetDestDir());
pScriptController->SetAutoDestroy(false);
pScriptController->m_iPrefixLen = 0;
pPostInfo->SetPostThread(pScriptController);
pScriptController->Start();
}
void PostScriptController::Run()
{
StringBuilder scriptCommaList;
// the locking is needed for accessing the members of NZBInfo
DownloadQueue::Lock();
for (NZBParameterList::iterator it = m_pPostInfo->GetNZBInfo()->GetParameters()->begin(); it != m_pPostInfo->GetNZBInfo()->GetParameters()->end(); it++)
{
NZBParameter* pParameter = *it;
const char* szVarname = pParameter->GetName();
if (strlen(szVarname) > 0 && szVarname[0] != '*' && szVarname[strlen(szVarname)-1] == ':' &&
(!strcasecmp(pParameter->GetValue(), "yes") || !strcasecmp(pParameter->GetValue(), "on") || !strcasecmp(pParameter->GetValue(), "1")))
{
char* szScriptName = strdup(szVarname);
szScriptName[strlen(szScriptName)-1] = '\0'; // remove trailing ':'
scriptCommaList.Append(szScriptName);
scriptCommaList.Append(",");
free(szScriptName);
}
}
m_pPostInfo->GetNZBInfo()->GetScriptStatuses()->Clear();
DownloadQueue::Unlock();
ExecuteScriptList(scriptCommaList.GetBuffer());
m_pPostInfo->SetStage(PostInfo::ptFinished);
m_pPostInfo->SetWorking(false);
}
void PostScriptController::ExecuteScript(Options::Script* pScript)
{
// if any script has requested par-check, do not execute other scripts
if (!pScript->GetPostScript() || m_pPostInfo->GetRequestParCheck())
{
return;
}
PrintMessage(Message::mkInfo, "Executing post-process-script %s for %s", pScript->GetName(), m_pPostInfo->GetNZBInfo()->GetName());
SetScript(pScript->GetLocation());
SetArgs(NULL, false);
char szInfoName[1024];
snprintf(szInfoName, 1024, "post-process-script %s for %s", pScript->GetName(), m_pPostInfo->GetNZBInfo()->GetName());
szInfoName[1024-1] = '\0';
SetInfoName(szInfoName);
m_pScript = pScript;
SetLogPrefix(pScript->GetDisplayName());
m_iPrefixLen = strlen(pScript->GetDisplayName()) + 2; // 2 = strlen(": ");
PrepareParams(pScript->GetName());
int iExitCode = Execute();
szInfoName[0] = 'P'; // uppercase
SetLogPrefix(NULL);
ScriptStatus::EStatus eStatus = AnalyseExitCode(iExitCode);
// the locking is needed for accessing the members of NZBInfo
DownloadQueue::Lock();
m_pPostInfo->GetNZBInfo()->GetScriptStatuses()->Add(pScript->GetName(), eStatus);
DownloadQueue::Unlock();
}
void PostScriptController::PrepareParams(const char* szScriptName)
{
// the locking is needed for accessing the members of NZBInfo
DownloadQueue::Lock();
ResetEnv();
SetIntEnvVar("NZBPP_NZBID", m_pPostInfo->GetNZBInfo()->GetID());
SetEnvVar("NZBPP_NZBNAME", m_pPostInfo->GetNZBInfo()->GetName());
SetEnvVar("NZBPP_DIRECTORY", m_pPostInfo->GetNZBInfo()->GetDestDir());
SetEnvVar("NZBPP_NZBFILENAME", m_pPostInfo->GetNZBInfo()->GetFilename());
SetEnvVar("NZBPP_URL", m_pPostInfo->GetNZBInfo()->GetURL());
SetEnvVar("NZBPP_FINALDIR", m_pPostInfo->GetNZBInfo()->GetFinalDir());
SetEnvVar("NZBPP_CATEGORY", m_pPostInfo->GetNZBInfo()->GetCategory());
SetIntEnvVar("NZBPP_HEALTH", m_pPostInfo->GetNZBInfo()->CalcHealth());
SetIntEnvVar("NZBPP_CRITICALHEALTH", m_pPostInfo->GetNZBInfo()->CalcCriticalHealth(false));
char szStatus[256];
strncpy(szStatus, m_pPostInfo->GetNZBInfo()->MakeTextStatus(true), sizeof(szStatus));
szStatus[256-1] = '\0';
SetEnvVar("NZBPP_STATUS", szStatus);
char* szDetail = strchr(szStatus, '/');
if (szDetail) *szDetail = '\0';
SetEnvVar("NZBPP_TOTALSTATUS", szStatus);
const char* szScriptStatusName[] = { "NONE", "FAILURE", "SUCCESS" };
SetEnvVar("NZBPP_SCRIPTSTATUS", szScriptStatusName[m_pPostInfo->GetNZBInfo()->GetScriptStatuses()->CalcTotalStatus()]);
// deprecated
int iParStatus[] = { 0, 0, 1, 2, 3, 4 };
NZBInfo::EParStatus eParStatus = m_pPostInfo->GetNZBInfo()->GetParStatus();
// for downloads marked as bad and for deleted downloads pass par status "Failure"
// for compatibility with older scripts which don't check "NZBPP_TOTALSTATUS"
if (m_pPostInfo->GetNZBInfo()->GetDeleteStatus() != NZBInfo::dsNone ||
m_pPostInfo->GetNZBInfo()->GetMarkStatus() == NZBInfo::ksBad)
{
eParStatus = NZBInfo::psFailure;
}
SetIntEnvVar("NZBPP_PARSTATUS", iParStatus[eParStatus]);
// deprecated
int iUnpackStatus[] = { 0, 0, 1, 2, 3, 4 };
SetIntEnvVar("NZBPP_UNPACKSTATUS", iUnpackStatus[m_pPostInfo->GetNZBInfo()->GetUnpackStatus()]);
// deprecated
SetIntEnvVar("NZBPP_HEALTHDELETED", (int)m_pPostInfo->GetNZBInfo()->GetDeleteStatus() == NZBInfo::dsHealth);
SetIntEnvVar("NZBPP_TOTALARTICLES", (int)m_pPostInfo->GetNZBInfo()->GetTotalArticles());
SetIntEnvVar("NZBPP_SUCCESSARTICLES", (int)m_pPostInfo->GetNZBInfo()->GetSuccessArticles());
SetIntEnvVar("NZBPP_FAILEDARTICLES", (int)m_pPostInfo->GetNZBInfo()->GetFailedArticles());
for (ServerStatList::iterator it = m_pPostInfo->GetNZBInfo()->GetServerStats()->begin(); it != m_pPostInfo->GetNZBInfo()->GetServerStats()->end(); it++)
{
ServerStat* pServerStat = *it;
char szName[50];
snprintf(szName, 50, "NZBPP_SERVER%i_SUCCESSARTICLES", pServerStat->GetServerID());
szName[50-1] = '\0';
SetIntEnvVar(szName, pServerStat->GetSuccessArticles());
snprintf(szName, 50, "NZBPP_SERVER%i_FAILEDARTICLES", pServerStat->GetServerID());
szName[50-1] = '\0';
SetIntEnvVar(szName, pServerStat->GetFailedArticles());
}
PrepareEnvScript(m_pPostInfo->GetNZBInfo()->GetParameters(), szScriptName);
DownloadQueue::Unlock();
}
ScriptStatus::EStatus PostScriptController::AnalyseExitCode(int iExitCode)
{
// The ScriptStatus is accumulated for all scripts:
// If any script has failed the status is "failure", etc.
switch (iExitCode)
{
case POSTPROCESS_SUCCESS:
PrintMessage(Message::mkInfo, "%s successful", GetInfoName());
return ScriptStatus::srSuccess;
case POSTPROCESS_ERROR:
case -1: // Execute() returns -1 if the process could not be started (file not found or other problem)
PrintMessage(Message::mkError, "%s failed", GetInfoName());
return ScriptStatus::srFailure;
case POSTPROCESS_NONE:
PrintMessage(Message::mkInfo, "%s skipped", GetInfoName());
return ScriptStatus::srNone;
#ifndef DISABLE_PARCHECK
case POSTPROCESS_PARCHECK:
if (m_pPostInfo->GetNZBInfo()->GetParStatus() > NZBInfo::psSkipped)
{
PrintMessage(Message::mkError, "%s requested par-check/repair, but the collection was already checked", GetInfoName());
return ScriptStatus::srFailure;
}
else
{
PrintMessage(Message::mkInfo, "%s requested par-check/repair", GetInfoName());
m_pPostInfo->SetRequestParCheck(true);
m_pPostInfo->SetForceRepair(true);
return ScriptStatus::srSuccess;
}
break;
#endif
default:
PrintMessage(Message::mkError, "%s failed (terminated with unknown status)", GetInfoName());
return ScriptStatus::srFailure;
}
}
void PostScriptController::AddMessage(Message::EKind eKind, const char* szText)
{
const char* szMsgText = szText + m_iPrefixLen;
if (!strncmp(szMsgText, "[NZB] ", 6))
{
debug("Command %s detected", szMsgText + 6);
if (!strncmp(szMsgText + 6, "FINALDIR=", 9))
{
DownloadQueue::Lock();
m_pPostInfo->GetNZBInfo()->SetFinalDir(szMsgText + 6 + 9);
DownloadQueue::Unlock();
}
else if (!strncmp(szMsgText + 6, "DIRECTORY=", 10))
{
DownloadQueue::Lock();
m_pPostInfo->GetNZBInfo()->SetDestDir(szMsgText + 6 + 10);
DownloadQueue::Unlock();
}
else if (!strncmp(szMsgText + 6, "NZBPR_", 6))
{
char* szParam = strdup(szMsgText + 6 + 6);
char* szValue = strchr(szParam, '=');
if (szValue)
{
*szValue = '\0';
DownloadQueue::Lock();
m_pPostInfo->GetNZBInfo()->GetParameters()->SetParameter(szParam, szValue + 1);
DownloadQueue::Unlock();
}
else
{
error("Invalid command \"%s\" received from %s", szMsgText, GetInfoName());
}
free(szParam);
}
else if (!strncmp(szMsgText + 6, "MARK=BAD", 8))
{
SetLogPrefix(NULL);
PrintMessage(Message::mkWarning, "Marking %s as bad", m_pPostInfo->GetNZBInfo()->GetName());
SetLogPrefix(m_pScript->GetDisplayName());
m_pPostInfo->GetNZBInfo()->SetMarkStatus(NZBInfo::ksBad);
}
else
{
error("Invalid command \"%s\" received from %s", szMsgText, GetInfoName());
}
}
else if (!strncmp(szMsgText, "[HISTORY] ", 10))
{
m_pPostInfo->GetNZBInfo()->AppendMessage(eKind, 0, szMsgText);
}
else
{
ScriptController::AddMessage(eKind, szText);
m_pPostInfo->AppendMessage(eKind, szText);
}
if (g_pOptions->GetPausePostProcess() && !m_pPostInfo->GetNZBInfo()->GetForcePriority())
{
time_t tStageTime = m_pPostInfo->GetStageTime();
time_t tStartTime = m_pPostInfo->GetStartTime();
time_t tWaitTime = time(NULL);
// wait until Post-processor is unpaused
while (g_pOptions->GetPausePostProcess() && !m_pPostInfo->GetNZBInfo()->GetForcePriority() && !IsStopped())
{
usleep(100 * 1000);
// update time stamps
time_t tDelta = time(NULL) - tWaitTime;
if (tStageTime > 0)
{
m_pPostInfo->SetStageTime(tStageTime + tDelta);
}
if (tStartTime > 0)
{
m_pPostInfo->SetStartTime(tStartTime + tDelta);
}
}
}
}
void PostScriptController::Stop()
{
debug("Stopping post-process-script");
Thread::Stop();
Terminate();
}

View File

@@ -0,0 +1,55 @@
/*
* This file is part of nzbget
*
* Copyright (C) 2007-2014 Andrey Prygunkov <hugbug@users.sourceforge.net>
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*
* $Revision$
* $Date$
*
*/
#ifndef POSTSCRIPT_H
#define POSTSCRIPT_H
#include "Thread.h"
#include "Log.h"
#include "QueueScript.h"
#include "DownloadInfo.h"
#include "Options.h"
class PostScriptController : public Thread, public NZBScriptController
{
private:
PostInfo* m_pPostInfo;
int m_iPrefixLen;
Options::Script* m_pScript;
void PrepareParams(const char* szScriptName);
ScriptStatus::EStatus AnalyseExitCode(int iExitCode);
protected:
virtual void ExecuteScript(Options::Script* pScript);
virtual void AddMessage(Message::EKind eKind, const char* szText);
public:
virtual void Run();
virtual void Stop();
static void StartJob(PostInfo* pPostInfo);
};
#endif

View File

@@ -0,0 +1,847 @@
/*
* This file is part of nzbget
*
* Copyright (C) 2007-2014 Andrey Prygunkov <hugbug@users.sourceforge.net>
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*
* $Revision$
* $Date$
*
*/
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#ifdef WIN32
#include "win32.h"
#endif
#include <stdlib.h>
#include <string.h>
#include <stdio.h>
#ifdef WIN32
#include <direct.h>
#else
#include <unistd.h>
#endif
#include <set>
#include <algorithm>
#include "nzbget.h"
#include "PrePostProcessor.h"
#include "Options.h"
#include "Log.h"
#include "HistoryCoordinator.h"
#include "DupeCoordinator.h"
#include "PostScript.h"
#include "Util.h"
#include "Scheduler.h"
#include "Scanner.h"
#include "Unpack.h"
#include "NZBFile.h"
#include "StatMeter.h"
#include "QueueScript.h"
extern HistoryCoordinator* g_pHistoryCoordinator;
extern DupeCoordinator* g_pDupeCoordinator;
extern Options* g_pOptions;
extern Scheduler* g_pScheduler;
extern Scanner* g_pScanner;
extern StatMeter* g_pStatMeter;
extern QueueScriptCoordinator* g_pQueueScriptCoordinator;
PrePostProcessor::PrePostProcessor()
{
debug("Creating PrePostProcessor");
m_iJobCount = 0;
m_pCurJob = NULL;
m_szPauseReason = NULL;
m_DownloadQueueObserver.m_pOwner = this;
DownloadQueue* pDownloadQueue = DownloadQueue::Lock();
pDownloadQueue->Attach(&m_DownloadQueueObserver);
DownloadQueue::Unlock();
}
PrePostProcessor::~PrePostProcessor()
{
debug("Destroying PrePostProcessor");
}
void PrePostProcessor::Run()
{
debug("Entering PrePostProcessor-loop");
while (!DownloadQueue::IsLoaded())
{
usleep(20 * 1000);
}
if (g_pOptions->GetServerMode() && g_pOptions->GetSaveQueue() && g_pOptions->GetReloadQueue())
{
DownloadQueue* pDownloadQueue = DownloadQueue::Lock();
SanitisePostQueue(pDownloadQueue);
DownloadQueue::Unlock();
}
g_pScheduler->FirstCheck();
int iDiskSpaceInterval = 1000;
int iSchedulerInterval = 1000;
int iHistoryInterval = 600000;
const int iStepMSec = 200;
while (!IsStopped())
{
// check incoming nzb directory
g_pScanner->Check();
if (!g_pOptions->GetPauseDownload() &&
g_pOptions->GetDiskSpace() > 0 && !g_pStatMeter->GetStandBy() &&
iDiskSpaceInterval >= 1000)
{
// check free disk space every 1 second
CheckDiskSpace();
iDiskSpaceInterval = 0;
}
iDiskSpaceInterval += iStepMSec;
// check post-queue every 200 msec
CheckPostQueue();
if (iSchedulerInterval >= 1000)
{
// check scheduler tasks every 1 second
g_pScheduler->IntervalCheck();
iSchedulerInterval = 0;
}
iSchedulerInterval += iStepMSec;
if (iHistoryInterval >= 600000)
{
// check history (remove old entries) every 10 minutes
g_pHistoryCoordinator->IntervalCheck();
iHistoryInterval = 0;
}
iHistoryInterval += iStepMSec;
Util::SetStandByMode(!m_pCurJob);
usleep(iStepMSec * 1000);
}
g_pHistoryCoordinator->Cleanup();
debug("Exiting PrePostProcessor-loop");
}
void PrePostProcessor::Stop()
{
Thread::Stop();
DownloadQueue::Lock();
#ifndef DISABLE_PARCHECK
m_ParCoordinator.Stop();
#endif
if (m_pCurJob && m_pCurJob->GetPostInfo() &&
(m_pCurJob->GetPostInfo()->GetStage() == PostInfo::ptUnpacking ||
m_pCurJob->GetPostInfo()->GetStage() == PostInfo::ptExecutingScript) &&
m_pCurJob->GetPostInfo()->GetPostThread())
{
Thread* pPostThread = m_pCurJob->GetPostInfo()->GetPostThread();
m_pCurJob->GetPostInfo()->SetPostThread(NULL);
pPostThread->SetAutoDestroy(true);
pPostThread->Stop();
}
DownloadQueue::Unlock();
}
void PrePostProcessor::DownloadQueueUpdate(Subject* Caller, void* Aspect)
{
if (IsStopped())
{
return;
}
DownloadQueue::Aspect* pQueueAspect = (DownloadQueue::Aspect*)Aspect;
if (pQueueAspect->eAction == DownloadQueue::eaNzbFound)
{
NZBFound(pQueueAspect->pDownloadQueue, pQueueAspect->pNZBInfo);
}
else if (pQueueAspect->eAction == DownloadQueue::eaNzbAdded)
{
NZBAdded(pQueueAspect->pDownloadQueue, pQueueAspect->pNZBInfo);
}
else if (pQueueAspect->eAction == DownloadQueue::eaNzbDeleted &&
pQueueAspect->pNZBInfo->GetDeleting() &&
!pQueueAspect->pNZBInfo->GetPostInfo() &&
!pQueueAspect->pNZBInfo->GetParCleanup() &&
pQueueAspect->pNZBInfo->GetFileList()->empty())
{
// the deleting of nzbs is usually handled via eaFileDeleted-event, but when deleting nzb without
// any files left the eaFileDeleted-event is not fired and we need to process eaNzbDeleted-event instead
info("Collection %s deleted from queue", pQueueAspect->pNZBInfo->GetName());
NZBDeleted(pQueueAspect->pDownloadQueue, pQueueAspect->pNZBInfo);
}
else if ((pQueueAspect->eAction == DownloadQueue::eaFileCompleted ||
pQueueAspect->eAction == DownloadQueue::eaFileDeleted))
{
if (pQueueAspect->eAction == DownloadQueue::eaFileCompleted && !pQueueAspect->pNZBInfo->GetPostInfo())
{
g_pQueueScriptCoordinator->EnqueueScript(pQueueAspect->pNZBInfo, QueueScriptCoordinator::qeFileDownloaded);
}
if (
#ifndef DISABLE_PARCHECK
!m_ParCoordinator.AddPar(pQueueAspect->pFileInfo, pQueueAspect->eAction == DownloadQueue::eaFileDeleted) &&
#endif
IsNZBFileCompleted(pQueueAspect->pNZBInfo, true, false) &&
!pQueueAspect->pNZBInfo->GetPostInfo() &&
(!pQueueAspect->pFileInfo->GetPaused() || IsNZBFileCompleted(pQueueAspect->pNZBInfo, false, false)))
{
if ((pQueueAspect->eAction == DownloadQueue::eaFileCompleted ||
(pQueueAspect->pFileInfo->GetAutoDeleted() &&
IsNZBFileCompleted(pQueueAspect->pNZBInfo, false, true))) &&
pQueueAspect->pFileInfo->GetNZBInfo()->GetDeleteStatus() != NZBInfo::dsHealth)
{
info("Collection %s completely downloaded", pQueueAspect->pNZBInfo->GetName());
g_pQueueScriptCoordinator->EnqueueScript(pQueueAspect->pNZBInfo, QueueScriptCoordinator::qeNzbDownloaded);
NZBDownloaded(pQueueAspect->pDownloadQueue, pQueueAspect->pNZBInfo);
}
else if ((pQueueAspect->eAction == DownloadQueue::eaFileDeleted ||
(pQueueAspect->eAction == DownloadQueue::eaFileCompleted &&
pQueueAspect->pFileInfo->GetNZBInfo()->GetDeleteStatus() > NZBInfo::dsNone)) &&
!pQueueAspect->pNZBInfo->GetParCleanup() &&
IsNZBFileCompleted(pQueueAspect->pNZBInfo, false, true))
{
info("Collection %s deleted from queue", pQueueAspect->pNZBInfo->GetName());
NZBDeleted(pQueueAspect->pDownloadQueue, pQueueAspect->pNZBInfo);
}
}
}
}
void PrePostProcessor::NZBFound(DownloadQueue* pDownloadQueue, NZBInfo* pNZBInfo)
{
if (g_pOptions->GetDupeCheck() && pNZBInfo->GetDupeMode() != dmForce)
{
g_pDupeCoordinator->NZBFound(pDownloadQueue, pNZBInfo);
}
}
void PrePostProcessor::NZBAdded(DownloadQueue* pDownloadQueue, NZBInfo* pNZBInfo)
{
if (g_pOptions->GetParCheck() != Options::pcForce)
{
m_ParCoordinator.PausePars(pDownloadQueue, pNZBInfo);
}
if (g_pOptions->GetDupeCheck() && pNZBInfo->GetDupeMode() != dmForce &&
pNZBInfo->GetDeleteStatus() == NZBInfo::dsDupe)
{
NZBCompleted(pDownloadQueue, pNZBInfo, false);
}
else
{
g_pQueueScriptCoordinator->EnqueueScript(pNZBInfo, QueueScriptCoordinator::qeNzbAdded);
}
}
void PrePostProcessor::NZBDownloaded(DownloadQueue* pDownloadQueue, NZBInfo* pNZBInfo)
{
if (!pNZBInfo->GetPostInfo() && g_pOptions->GetDecode())
{
info("Queueing %s for post-processing", pNZBInfo->GetName());
pNZBInfo->EnterPostProcess();
m_iJobCount++;
if (pNZBInfo->GetParStatus() == NZBInfo::psNone &&
g_pOptions->GetParCheck() != Options::pcAlways &&
g_pOptions->GetParCheck() != Options::pcForce)
{
pNZBInfo->SetParStatus(NZBInfo::psSkipped);
}
if (pNZBInfo->GetRenameStatus() == NZBInfo::rsNone && !g_pOptions->GetParRename())
{
pNZBInfo->SetRenameStatus(NZBInfo::rsSkipped);
}
pDownloadQueue->Save();
}
else
{
NZBCompleted(pDownloadQueue, pNZBInfo, true);
}
}
void PrePostProcessor::NZBDeleted(DownloadQueue* pDownloadQueue, NZBInfo* pNZBInfo)
{
if (pNZBInfo->GetDeleteStatus() == NZBInfo::dsNone)
{
pNZBInfo->SetDeleteStatus(NZBInfo::dsManual);
}
pNZBInfo->SetDeleting(false);
DeleteCleanup(pNZBInfo);
if (pNZBInfo->GetDeleteStatus() == NZBInfo::dsHealth ||
pNZBInfo->GetDeleteStatus() == NZBInfo::dsBad)
{
NZBDownloaded(pDownloadQueue, pNZBInfo);
}
else
{
NZBCompleted(pDownloadQueue, pNZBInfo, true);
}
}
void PrePostProcessor::NZBCompleted(DownloadQueue* pDownloadQueue, NZBInfo* pNZBInfo, bool bSaveQueue)
{
bool bAddToHistory = g_pOptions->GetKeepHistory() > 0 && !pNZBInfo->GetAvoidHistory();
if (bAddToHistory)
{
g_pHistoryCoordinator->AddToHistory(pDownloadQueue, pNZBInfo);
}
pNZBInfo->SetAvoidHistory(false);
bool bNeedSave = bAddToHistory;
if (g_pOptions->GetDupeCheck() && pNZBInfo->GetDupeMode() != dmForce &&
(pNZBInfo->GetDeleteStatus() == NZBInfo::dsNone ||
pNZBInfo->GetDeleteStatus() == NZBInfo::dsHealth ||
pNZBInfo->GetDeleteStatus() == NZBInfo::dsBad))
{
g_pDupeCoordinator->NZBCompleted(pDownloadQueue, pNZBInfo);
bNeedSave = true;
}
if (!bAddToHistory)
{
g_pHistoryCoordinator->DeleteDiskFiles(pNZBInfo);
pDownloadQueue->GetQueue()->Remove(pNZBInfo);
delete pNZBInfo;
}
if (bSaveQueue && bNeedSave)
{
pDownloadQueue->Save();
}
}
void PrePostProcessor::DeleteCleanup(NZBInfo* pNZBInfo)
{
if ((g_pOptions->GetDeleteCleanupDisk() && pNZBInfo->GetCleanupDisk()) ||
pNZBInfo->GetDeleteStatus() == NZBInfo::dsDupe)
{
// download was cancelled, deleting already downloaded files from disk
for (CompletedFiles::reverse_iterator it = pNZBInfo->GetCompletedFiles()->rbegin(); it != pNZBInfo->GetCompletedFiles()->rend(); it++)
{
CompletedFile* pCompletedFile = *it;
char szFullFileName[1024];
snprintf(szFullFileName, 1024, "%s%c%s", pNZBInfo->GetDestDir(), (int)PATH_SEPARATOR, pCompletedFile->GetFileName());
szFullFileName[1024-1] = '\0';
if (Util::FileExists(szFullFileName))
{
detail("Deleting file %s", pCompletedFile->GetFileName());
remove(szFullFileName);
}
}
// delete .out.tmp-files and _brokenlog.txt
DirBrowser dir(pNZBInfo->GetDestDir());
while (const char* szFilename = dir.Next())
{
int iLen = strlen(szFilename);
if ((iLen > 8 && !strcmp(szFilename + iLen - 8, ".out.tmp")) || !strcmp(szFilename, "_brokenlog.txt"))
{
char szFullFilename[1024];
snprintf(szFullFilename, 1024, "%s%c%s", pNZBInfo->GetDestDir(), PATH_SEPARATOR, szFilename);
szFullFilename[1024-1] = '\0';
detail("Deleting file %s", szFilename);
remove(szFullFilename);
}
}
// delete old directory (if empty)
if (Util::DirEmpty(pNZBInfo->GetDestDir()))
{
rmdir(pNZBInfo->GetDestDir());
}
}
}
void PrePostProcessor::CheckDiskSpace()
{
long long lFreeSpace = Util::FreeDiskSize(g_pOptions->GetDestDir());
if (lFreeSpace > -1 && lFreeSpace / 1024 / 1024 < g_pOptions->GetDiskSpace())
{
warn("Low disk space on %s. Pausing download", g_pOptions->GetDestDir());
g_pOptions->SetPauseDownload(true);
}
if (!Util::EmptyStr(g_pOptions->GetInterDir()))
{
lFreeSpace = Util::FreeDiskSize(g_pOptions->GetInterDir());
if (lFreeSpace > -1 && lFreeSpace / 1024 / 1024 < g_pOptions->GetDiskSpace())
{
warn("Low disk space on %s. Pausing download", g_pOptions->GetInterDir());
g_pOptions->SetPauseDownload(true);
}
}
}
void PrePostProcessor::CheckPostQueue()
{
DownloadQueue* pDownloadQueue = DownloadQueue::Lock();
if (!m_pCurJob && m_iJobCount > 0)
{
m_pCurJob = GetNextJob(pDownloadQueue);
}
if (m_pCurJob)
{
PostInfo* pPostInfo = m_pCurJob->GetPostInfo();
if (!pPostInfo->GetWorking() && !IsNZBFileDownloading(m_pCurJob))
{
#ifndef DISABLE_PARCHECK
if (pPostInfo->GetRequestParCheck() &&
(pPostInfo->GetNZBInfo()->GetParStatus() <= NZBInfo::psSkipped ||
(pPostInfo->GetForceRepair() && !pPostInfo->GetNZBInfo()->GetParFull())) &&
g_pOptions->GetParCheck() != Options::pcManual)
{
pPostInfo->SetForceParFull(pPostInfo->GetNZBInfo()->GetParStatus() > NZBInfo::psSkipped);
pPostInfo->GetNZBInfo()->SetParStatus(NZBInfo::psNone);
pPostInfo->SetRequestParCheck(false);
pPostInfo->SetStage(PostInfo::ptQueued);
pPostInfo->GetNZBInfo()->GetScriptStatuses()->Clear();
DeletePostThread(pPostInfo);
}
else if (pPostInfo->GetRequestParCheck() && pPostInfo->GetNZBInfo()->GetParStatus() <= NZBInfo::psSkipped &&
g_pOptions->GetParCheck() == Options::pcManual)
{
pPostInfo->SetRequestParCheck(false);
pPostInfo->GetNZBInfo()->SetParStatus(NZBInfo::psManual);
DeletePostThread(pPostInfo);
if (!pPostInfo->GetNZBInfo()->GetFileList()->empty())
{
info("Downloading all remaining files for manual par-check for %s", pPostInfo->GetNZBInfo()->GetName());
pDownloadQueue->EditEntry(pPostInfo->GetNZBInfo()->GetID(), DownloadQueue::eaGroupResume, 0, NULL);
pPostInfo->SetStage(PostInfo::ptFinished);
}
else
{
info("There are no par-files remain for download for %s", pPostInfo->GetNZBInfo()->GetName());
pPostInfo->SetStage(PostInfo::ptQueued);
}
}
#endif
if (pPostInfo->GetDeleted())
{
pPostInfo->SetStage(PostInfo::ptFinished);
}
if (pPostInfo->GetStage() == PostInfo::ptQueued &&
(!g_pOptions->GetPausePostProcess() || pPostInfo->GetNZBInfo()->GetForcePriority()))
{
DeletePostThread(pPostInfo);
StartJob(pDownloadQueue, pPostInfo);
}
else if (pPostInfo->GetStage() == PostInfo::ptFinished)
{
UpdatePauseState(false, NULL);
JobCompleted(pDownloadQueue, pPostInfo);
}
else if (!g_pOptions->GetPausePostProcess())
{
error("Internal error: invalid state in post-processor");
// TODO: cancel (delete) current job
}
}
}
DownloadQueue::Unlock();
}
NZBInfo* PrePostProcessor::GetNextJob(DownloadQueue* pDownloadQueue)
{
NZBInfo* pNZBInfo = NULL;
for (NZBList::iterator it = pDownloadQueue->GetQueue()->begin(); it != pDownloadQueue->GetQueue()->end(); it++)
{
NZBInfo* pNZBInfo1 = *it;
if (pNZBInfo1->GetPostInfo() && !g_pQueueScriptCoordinator->HasJob(pNZBInfo1->GetID()) &&
(!pNZBInfo || pNZBInfo1->GetPriority() > pNZBInfo->GetPriority()) &&
(!g_pOptions->GetPausePostProcess() || pNZBInfo1->GetForcePriority()))
{
pNZBInfo = pNZBInfo1;
}
}
return pNZBInfo;
}
/**
* Reset the state of items after reloading from disk and
* delete items which could not be resumed.
* Also count the number of post-jobs.
*/
void PrePostProcessor::SanitisePostQueue(DownloadQueue* pDownloadQueue)
{
for (NZBList::iterator it = pDownloadQueue->GetQueue()->begin(); it != pDownloadQueue->GetQueue()->end(); it++)
{
NZBInfo* pNZBInfo = *it;
PostInfo* pPostInfo = pNZBInfo->GetPostInfo();
if (pPostInfo)
{
m_iJobCount++;
if (pPostInfo->GetStage() == PostInfo::ptExecutingScript ||
!Util::DirectoryExists(pNZBInfo->GetDestDir()))
{
pPostInfo->SetStage(PostInfo::ptFinished);
}
else
{
pPostInfo->SetStage(PostInfo::ptQueued);
}
}
}
}
void PrePostProcessor::DeletePostThread(PostInfo* pPostInfo)
{
delete pPostInfo->GetPostThread();
pPostInfo->SetPostThread(NULL);
}
void PrePostProcessor::StartJob(DownloadQueue* pDownloadQueue, PostInfo* pPostInfo)
{
if (!pPostInfo->GetStartTime())
{
pPostInfo->SetStartTime(time(NULL));
}
#ifndef DISABLE_PARCHECK
if (pPostInfo->GetNZBInfo()->GetRenameStatus() == NZBInfo::rsNone &&
pPostInfo->GetNZBInfo()->GetDeleteStatus() == NZBInfo::dsNone)
{
UpdatePauseState(g_pOptions->GetParPauseQueue(), "par-rename");
m_ParCoordinator.StartParRenameJob(pPostInfo);
return;
}
else if (pPostInfo->GetNZBInfo()->GetParStatus() == NZBInfo::psNone &&
pPostInfo->GetNZBInfo()->GetDeleteStatus() == NZBInfo::dsNone)
{
if (m_ParCoordinator.FindMainPars(pPostInfo->GetNZBInfo()->GetDestDir(), NULL))
{
UpdatePauseState(g_pOptions->GetParPauseQueue(), "par-check");
m_ParCoordinator.StartParCheckJob(pPostInfo);
}
else
{
info("Nothing to par-check for %s", pPostInfo->GetNZBInfo()->GetName());
pPostInfo->GetNZBInfo()->SetParStatus(NZBInfo::psSkipped);
pPostInfo->SetWorking(false);
pPostInfo->SetStage(PostInfo::ptQueued);
}
return;
}
else if (pPostInfo->GetNZBInfo()->GetParStatus() == NZBInfo::psSkipped &&
pPostInfo->GetNZBInfo()->CalcHealth() < pPostInfo->GetNZBInfo()->CalcCriticalHealth(false) &&
pPostInfo->GetNZBInfo()->CalcCriticalHealth(false) < 1000 &&
m_ParCoordinator.FindMainPars(pPostInfo->GetNZBInfo()->GetDestDir(), NULL))
{
warn("Skipping par-check for %s due to health %.1f%% below critical %.1f%%", pPostInfo->GetNZBInfo()->GetName(),
pPostInfo->GetNZBInfo()->CalcHealth() / 10.0, pPostInfo->GetNZBInfo()->CalcCriticalHealth(false) / 10.0);
pPostInfo->GetNZBInfo()->SetParStatus(NZBInfo::psFailure);
return;
}
else if (pPostInfo->GetNZBInfo()->GetParStatus() == NZBInfo::psSkipped &&
pPostInfo->GetNZBInfo()->GetFailedSize() - pPostInfo->GetNZBInfo()->GetParFailedSize() > 0 &&
m_ParCoordinator.FindMainPars(pPostInfo->GetNZBInfo()->GetDestDir(), NULL))
{
info("Collection %s with health %.1f%% needs par-check",
pPostInfo->GetNZBInfo()->GetName(), pPostInfo->GetNZBInfo()->CalcHealth() / 10.0);
pPostInfo->SetRequestParCheck(true);
return;
}
#endif
NZBParameter* pUnpackParameter = pPostInfo->GetNZBInfo()->GetParameters()->Find("*Unpack:", false);
bool bUnpackParam = !(pUnpackParameter && !strcasecmp(pUnpackParameter->GetValue(), "no"));
bool bUnpack = bUnpackParam && pPostInfo->GetNZBInfo()->GetUnpackStatus() == NZBInfo::usNone &&
pPostInfo->GetNZBInfo()->GetDeleteStatus() == NZBInfo::dsNone;
bool bParFailed = pPostInfo->GetNZBInfo()->GetParStatus() == NZBInfo::psFailure ||
pPostInfo->GetNZBInfo()->GetParStatus() == NZBInfo::psRepairPossible ||
pPostInfo->GetNZBInfo()->GetParStatus() == NZBInfo::psManual;
bool bCleanup = !bUnpack &&
pPostInfo->GetNZBInfo()->GetCleanupStatus() == NZBInfo::csNone &&
((pPostInfo->GetNZBInfo()->GetParStatus() == NZBInfo::psSuccess &&
pPostInfo->GetNZBInfo()->GetUnpackStatus() != NZBInfo::usFailure &&
pPostInfo->GetNZBInfo()->GetUnpackStatus() != NZBInfo::usSpace &&
pPostInfo->GetNZBInfo()->GetUnpackStatus() != NZBInfo::usPassword) ||
(pPostInfo->GetNZBInfo()->GetUnpackStatus() == NZBInfo::usSuccess &&
pPostInfo->GetNZBInfo()->GetParStatus() != NZBInfo::psFailure)) &&
!Util::EmptyStr(g_pOptions->GetExtCleanupDisk());
bool bMoveInter = !bUnpack &&
pPostInfo->GetNZBInfo()->GetMoveStatus() == NZBInfo::msNone &&
pPostInfo->GetNZBInfo()->GetUnpackStatus() != NZBInfo::usFailure &&
pPostInfo->GetNZBInfo()->GetUnpackStatus() != NZBInfo::usSpace &&
pPostInfo->GetNZBInfo()->GetUnpackStatus() != NZBInfo::usPassword &&
pPostInfo->GetNZBInfo()->GetParStatus() != NZBInfo::psFailure &&
pPostInfo->GetNZBInfo()->GetParStatus() != NZBInfo::psManual &&
pPostInfo->GetNZBInfo()->GetDeleteStatus() == NZBInfo::dsNone &&
!Util::EmptyStr(g_pOptions->GetInterDir()) &&
!strncmp(pPostInfo->GetNZBInfo()->GetDestDir(), g_pOptions->GetInterDir(), strlen(g_pOptions->GetInterDir()));
bool bPostScript = true;
if (bUnpack && bParFailed)
{
warn("Skipping unpack for %s due to %s", pPostInfo->GetNZBInfo()->GetName(),
pPostInfo->GetNZBInfo()->GetParStatus() == NZBInfo::psManual ? "required par-repair" : "par-failure");
pPostInfo->GetNZBInfo()->SetUnpackStatus(NZBInfo::usSkipped);
bUnpack = false;
}
if (!bUnpack && !bMoveInter && !bPostScript)
{
pPostInfo->SetStage(PostInfo::ptFinished);
return;
}
pPostInfo->SetProgressLabel(bUnpack ? "Unpacking" : bMoveInter ? "Moving" : "Executing post-process-script");
pPostInfo->SetWorking(true);
pPostInfo->SetStage(bUnpack ? PostInfo::ptUnpacking : bMoveInter ? PostInfo::ptMoving : PostInfo::ptExecutingScript);
pPostInfo->SetFileProgress(0);
pPostInfo->SetStageProgress(0);
pDownloadQueue->Save();
pPostInfo->SetStageTime(time(NULL));
if (bUnpack)
{
UpdatePauseState(g_pOptions->GetUnpackPauseQueue(), "unpack");
UnpackController::StartJob(pPostInfo);
}
else if (bCleanup)
{
UpdatePauseState(g_pOptions->GetUnpackPauseQueue() || g_pOptions->GetScriptPauseQueue(), "cleanup");
CleanupController::StartJob(pPostInfo);
}
else if (bMoveInter)
{
UpdatePauseState(g_pOptions->GetUnpackPauseQueue() || g_pOptions->GetScriptPauseQueue(), "move");
MoveController::StartJob(pPostInfo);
}
else
{
UpdatePauseState(g_pOptions->GetScriptPauseQueue(), "post-process-script");
PostScriptController::StartJob(pPostInfo);
}
}
void PrePostProcessor::JobCompleted(DownloadQueue* pDownloadQueue, PostInfo* pPostInfo)
{
NZBInfo* pNZBInfo = pPostInfo->GetNZBInfo();
if (pPostInfo->GetStartTime() > 0)
{
pNZBInfo->SetPostTotalSec((int)(time(NULL) - pPostInfo->GetStartTime()));
pPostInfo->SetStartTime(0);
}
DeletePostThread(pPostInfo);
pNZBInfo->LeavePostProcess();
if (IsNZBFileCompleted(pNZBInfo, true, false))
{
// Cleaning up queue if par-check was successful or unpack was successful or
// health is 100% (if unpack and par-check were not performed)
// or health is below critical health
bool bCanCleanupQueue =
((pNZBInfo->GetParStatus() == NZBInfo::psSuccess ||
pNZBInfo->GetParStatus() == NZBInfo::psRepairPossible) &&
pNZBInfo->GetUnpackStatus() != NZBInfo::usFailure &&
pNZBInfo->GetUnpackStatus() != NZBInfo::usSpace &&
pNZBInfo->GetUnpackStatus() != NZBInfo::usPassword) ||
(pNZBInfo->GetUnpackStatus() == NZBInfo::usSuccess &&
pNZBInfo->GetParStatus() != NZBInfo::psFailure) ||
(pNZBInfo->GetUnpackStatus() <= NZBInfo::usSkipped &&
pNZBInfo->GetParStatus() != NZBInfo::psFailure &&
pNZBInfo->GetFailedSize() - pNZBInfo->GetParFailedSize() == 0) ||
(pNZBInfo->CalcHealth() < pNZBInfo->CalcCriticalHealth(false) &&
pNZBInfo->CalcCriticalHealth(false) < 1000);
if (g_pOptions->GetParCleanupQueue() && bCanCleanupQueue && !pNZBInfo->GetFileList()->empty())
{
info("Cleaning up download queue for %s", pNZBInfo->GetName());
pNZBInfo->SetParCleanup(true);
pDownloadQueue->EditEntry(pNZBInfo->GetID(), DownloadQueue::eaGroupDelete, 0, NULL);
}
if (pNZBInfo->GetUnpackCleanedUpDisk())
{
pNZBInfo->ClearCompletedFiles();
}
NZBCompleted(pDownloadQueue, pNZBInfo, false);
}
if (pNZBInfo == m_pCurJob)
{
m_pCurJob = NULL;
}
m_iJobCount--;
pDownloadQueue->Save();
}
bool PrePostProcessor::IsNZBFileCompleted(NZBInfo* pNZBInfo, bool bIgnorePausedPars, bool bAllowOnlyOneDeleted)
{
int iDeleted = 0;
for (FileList::iterator it = pNZBInfo->GetFileList()->begin(); it != pNZBInfo->GetFileList()->end(); it++)
{
FileInfo* pFileInfo = *it;
if (pFileInfo->GetDeleted())
{
iDeleted++;
}
if (((!pFileInfo->GetPaused() || !bIgnorePausedPars || !pFileInfo->GetParFile()) &&
!pFileInfo->GetDeleted()) ||
(bAllowOnlyOneDeleted && iDeleted > 1))
{
return false;
}
}
return true;
}
bool PrePostProcessor::IsNZBFileDownloading(NZBInfo* pNZBInfo)
{
if (pNZBInfo->GetActiveDownloads())
{
return true;
}
for (FileList::iterator it = pNZBInfo->GetFileList()->begin(); it != pNZBInfo->GetFileList()->end(); it++)
{
FileInfo* pFileInfo = *it;
if (!pFileInfo->GetPaused())
{
return true;
}
}
return false;
}
void PrePostProcessor::UpdatePauseState(bool bNeedPause, const char* szReason)
{
if (bNeedPause && !g_pOptions->GetTempPauseDownload())
{
info("Pausing download before %s", szReason);
}
else if (!bNeedPause && g_pOptions->GetTempPauseDownload())
{
info("Unpausing download after %s", m_szPauseReason);
}
g_pOptions->SetTempPauseDownload(bNeedPause);
m_szPauseReason = szReason;
}
bool PrePostProcessor::EditList(DownloadQueue* pDownloadQueue, IDList* pIDList, DownloadQueue::EEditAction eAction, int iOffset, const char* szText)
{
debug("Edit-command for post-processor received");
switch (eAction)
{
case DownloadQueue::eaPostDelete:
return PostQueueDelete(pDownloadQueue, pIDList);
default:
return false;
}
}
bool PrePostProcessor::PostQueueDelete(DownloadQueue* pDownloadQueue, IDList* pIDList)
{
bool bOK = false;
for (IDList::iterator itID = pIDList->begin(); itID != pIDList->end(); itID++)
{
int iID = *itID;
for (NZBList::iterator it = pDownloadQueue->GetQueue()->begin(); it != pDownloadQueue->GetQueue()->end(); it++)
{
NZBInfo* pNZBInfo = *it;
PostInfo* pPostInfo = pNZBInfo->GetPostInfo();
if (pPostInfo && pNZBInfo->GetID() == iID)
{
if (pPostInfo->GetWorking())
{
info("Deleting active post-job %s", pPostInfo->GetNZBInfo()->GetName());
pPostInfo->SetDeleted(true);
#ifndef DISABLE_PARCHECK
if (PostInfo::ptLoadingPars <= pPostInfo->GetStage() && pPostInfo->GetStage() <= PostInfo::ptRenaming)
{
if (m_ParCoordinator.Cancel())
{
bOK = true;
}
}
else
#endif
if (pPostInfo->GetPostThread())
{
debug("Terminating %s for %s", (pPostInfo->GetStage() == PostInfo::ptUnpacking ? "unpack" : "post-process-script"), pPostInfo->GetNZBInfo()->GetName());
pPostInfo->GetPostThread()->Stop();
bOK = true;
}
else
{
error("Internal error in PrePostProcessor::QueueDelete");
}
}
else
{
info("Deleting queued post-job %s", pPostInfo->GetNZBInfo()->GetName());
JobCompleted(pDownloadQueue, pPostInfo);
bOK = true;
}
break;
}
}
}
return bOK;
}

View File

@@ -0,0 +1,83 @@
/*
* This file is part of nzbget
*
* Copyright (C) 2007-2014 Andrey Prygunkov <hugbug@users.sourceforge.net>
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*
* $Revision$
* $Date$
*
*/
#ifndef PREPOSTPROCESSOR_H
#define PREPOSTPROCESSOR_H
#include <deque>
#include "Thread.h"
#include "Observer.h"
#include "DownloadInfo.h"
#include "ParCoordinator.h"
class PrePostProcessor : public Thread
{
private:
class DownloadQueueObserver: public Observer
{
public:
PrePostProcessor* m_pOwner;
virtual void Update(Subject* Caller, void* Aspect) { m_pOwner->DownloadQueueUpdate(Caller, Aspect); }
};
private:
ParCoordinator m_ParCoordinator;
DownloadQueueObserver m_DownloadQueueObserver;
int m_iJobCount;
NZBInfo* m_pCurJob;
const char* m_szPauseReason;
bool IsNZBFileCompleted(NZBInfo* pNZBInfo, bool bIgnorePausedPars, bool bAllowOnlyOneDeleted);
bool IsNZBFileDownloading(NZBInfo* pNZBInfo);
void CheckPostQueue();
void JobCompleted(DownloadQueue* pDownloadQueue, PostInfo* pPostInfo);
void StartJob(DownloadQueue* pDownloadQueue, PostInfo* pPostInfo);
void SaveQueue(DownloadQueue* pDownloadQueue);
void SanitisePostQueue(DownloadQueue* pDownloadQueue);
void CheckDiskSpace();
void UpdatePauseState(bool bNeedPause, const char* szReason);
void NZBFound(DownloadQueue* pDownloadQueue, NZBInfo* pNZBInfo);
void NZBDeleted(DownloadQueue* pDownloadQueue, NZBInfo* pNZBInfo);
void NZBCompleted(DownloadQueue* pDownloadQueue, NZBInfo* pNZBInfo, bool bSaveQueue);
bool PostQueueDelete(DownloadQueue* pDownloadQueue, IDList* pIDList);
void DeletePostThread(PostInfo* pPostInfo);
NZBInfo* GetNextJob(DownloadQueue* pDownloadQueue);
void DownloadQueueUpdate(Subject* Caller, void* Aspect);
void DeleteCleanup(NZBInfo* pNZBInfo);
public:
PrePostProcessor();
virtual ~PrePostProcessor();
virtual void Run();
virtual void Stop();
bool HasMoreJobs() { return m_iJobCount > 0; }
int GetJobCount() { return m_iJobCount; }
bool EditList(DownloadQueue* pDownloadQueue, IDList* pIDList, DownloadQueue::EEditAction eAction, int iOffset, const char* szText);
void NZBAdded(DownloadQueue* pDownloadQueue, NZBInfo* pNZBInfo);
void NZBDownloaded(DownloadQueue* pDownloadQueue, NZBInfo* pNZBInfo);
};
#endif

View File

@@ -1,7 +1,7 @@
/*
* This file is part of nzbget
*
* Copyright (C) 2013 Andrey Prygunkov <hugbug@users.sourceforge.net>
* Copyright (C) 2013-2014 Andrey Prygunkov <hugbug@users.sourceforge.net>
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
@@ -45,9 +45,9 @@
#include "Log.h"
#include "Util.h"
#include "ParCoordinator.h"
#include "Options.h"
extern Options* g_pOptions;
extern DownloadQueueHolder* g_pDownloadQueueHolder;
void UnpackController::FileList::Clear()
{
@@ -85,8 +85,10 @@ void UnpackController::StartJob(PostInfo* pPostInfo)
void UnpackController::Run()
{
time_t tStart = time(NULL);
// the locking is needed for accessing the members of NZBInfo
g_pDownloadQueueHolder->LockQueue();
DownloadQueue::Lock();
strncpy(m_szDestDir, m_pPostInfo->GetNZBInfo()->GetDestDir(), 1024);
m_szDestDir[1024-1] = '\0';
@@ -98,7 +100,7 @@ void UnpackController::Run()
m_szPassword[0] = '\0';
m_szFinalDir[0] = '\0';
m_bFinalDirCreated = false;
NZBParameter* pParameter = m_pPostInfo->GetNZBInfo()->GetParameters()->Find("*Unpack:", false);
bool bUnpack = !(pParameter && !strcasecmp(pParameter->GetValue(), "no"));
@@ -109,7 +111,7 @@ void UnpackController::Run()
m_szPassword[1024-1] = '\0';
}
g_pDownloadQueueHolder->UnlockQueue();
DownloadQueue::Unlock();
snprintf(m_szInfoName, 1024, "unpack for %s", m_szName);
m_szInfoName[1024-1] = '\0';
@@ -127,20 +129,24 @@ void UnpackController::Run()
CheckArchiveFiles(bScanNonStdFiles);
}
if (bUnpack && (m_bHasRarFiles || m_bHasNonStdRarFiles || m_bHasSevenZipFiles || m_bHasSevenZipMultiFiles))
SetInfoName(m_szInfoName);
SetWorkingDir(m_szDestDir);
bool bHasFiles = m_bHasRarFiles || m_bHasNonStdRarFiles || m_bHasSevenZipFiles || m_bHasSevenZipMultiFiles || m_bHasSplittedFiles;
if (bUnpack && bHasFiles)
{
SetInfoName(m_szInfoName);
SetWorkingDir(m_szDestDir);
m_bUnpackOK = true;
m_bUnpackStartError = false;
m_bUnpackSpaceError = false;
m_bUnpackPasswordError4 = false;
m_bUnpackPasswordError5 = false;
m_bAutoTerminated = false;
PrintMessage(Message::mkInfo, "Unpacking %s", m_szName);
CreateUnpackDir();
m_bUnpackOK = true;
m_bUnpackStartError = false;
m_bUnpackSpaceError = false;
m_bUnpackPasswordError = false;
if (m_bHasRarFiles || m_bHasNonStdRarFiles)
{
ExecuteUnrar();
@@ -156,7 +162,14 @@ void UnpackController::Run()
ExecuteSevenZip(true);
}
if (m_bHasSplittedFiles && m_bUnpackOK)
{
JoinSplittedFiles();
}
Completed();
m_JoinedFiles.Clear();
}
else
{
@@ -166,7 +179,7 @@ void UnpackController::Run()
if (bUnpack && m_pPostInfo->GetNZBInfo()->GetParStatus() <= NZBInfo::psSkipped &&
m_pPostInfo->GetNZBInfo()->GetRenameStatus() <= NZBInfo::rsSkipped && m_bHasParFiles)
{
RequestParCheck();
RequestParCheck(false);
}
else
#endif
@@ -176,6 +189,9 @@ void UnpackController::Run()
}
}
int iUnpackSec = (int)(time(NULL) - tStart);
m_pPostInfo->GetNZBInfo()->SetUnpackSec(m_pPostInfo->GetNZBInfo()->GetUnpackSec() + iUnpackSec);
m_pPostInfo->SetWorking(false);
}
@@ -193,11 +209,17 @@ void UnpackController::ExecuteUnrar()
if (strlen(m_szPassword) > 0)
{
snprintf(szPasswordParam, 1024, "-p%s", m_szPassword);
szPasswordParam[1024-1] = '\0';
szArgs[3] = szPasswordParam;
}
szArgs[4] = "-o+";
szArgs[5] = m_bHasNonStdRarFiles ? "*.*" : "*.rar";
szArgs[6] = m_szUnpackDir;
char szUnpackDirParam[1024];
snprintf(szUnpackDirParam, 1024, "%s%c", m_szUnpackDir, PATH_SEPARATOR);
szUnpackDirParam[1024-1] = '\0';
szArgs[6] = szUnpackDirParam;
szArgs[7] = NULL;
SetArgs(szArgs, false);
@@ -206,7 +228,7 @@ void UnpackController::ExecuteUnrar()
m_bAllOKMessageReceived = false;
m_eUnpacker = upUnrar;
SetProgressLabel("");
int iExitCode = Execute();
SetLogPrefix(NULL);
@@ -215,7 +237,7 @@ void UnpackController::ExecuteUnrar()
m_bUnpackOK = iExitCode == 0 && m_bAllOKMessageReceived && !GetTerminated();
m_bUnpackStartError = iExitCode == -1;
m_bUnpackSpaceError = iExitCode == 5;
m_bUnpackPasswordError = iExitCode == 11; // only for rar5-archives
m_bUnpackPasswordError5 |= iExitCode == 11; // only for rar5-archives
if (!m_bUnpackOK && iExitCode > 0)
{
@@ -240,11 +262,13 @@ void UnpackController::ExecuteSevenZip(bool bMultiVolumes)
if (strlen(m_szPassword) > 0)
{
snprintf(szPasswordParam, 1024, "-p%s", m_szPassword);
szPasswordParam[1024-1] = '\0';
szArgs[3] = szPasswordParam;
}
char szUnpackDirParam[1024];
snprintf(szUnpackDirParam, 1024, "-o%s", m_szUnpackDir);
szUnpackDirParam[1024-1] = '\0';
szArgs[4] = szUnpackDirParam;
szArgs[5] = bMultiVolumes ? "*.7z.001" : "*.7z";
@@ -272,6 +296,179 @@ void UnpackController::ExecuteSevenZip(bool bMultiVolumes)
}
}
void UnpackController::JoinSplittedFiles()
{
SetLogPrefix("Join");
SetProgressLabel("");
m_pPostInfo->SetStageProgress(0);
// determine groups
FileList groups;
RegEx regExSplitExt(".*\\.[a-z,0-9]{3}\\.001$");
DirBrowser dir(m_szDestDir);
while (const char* filename = dir.Next())
{
char szFullFilename[1024];
snprintf(szFullFilename, 1024, "%s%c%s", m_szDestDir, PATH_SEPARATOR, filename);
szFullFilename[1024-1] = '\0';
if (strcmp(filename, ".") && strcmp(filename, "..") && !Util::DirectoryExists(szFullFilename))
{
if (regExSplitExt.Match(filename) && !FileHasRarSignature(szFullFilename))
{
if (!JoinFile(filename))
{
m_bUnpackOK = false;
break;
}
}
}
}
SetLogPrefix(NULL);
SetProgressLabel("");
}
bool UnpackController::JoinFile(const char* szFragBaseName)
{
char szDestBaseName[1024];
strncpy(szDestBaseName, szFragBaseName, 1024);
szDestBaseName[1024-1] = '\0';
// trim extension
char* szExtension = strrchr(szDestBaseName, '.');
*szExtension = '\0';
char szFullFilename[1024];
snprintf(szFullFilename, 1024, "%s%c%s", m_szDestDir, PATH_SEPARATOR, szFragBaseName);
szFullFilename[1024-1] = '\0';
long long lFirstSegmentSize = Util::FileSize(szFullFilename);
long long lDifSegmentSize = 0;
// Validate joinable file:
// - fragments have continuous numbers (no holes);
// - fragments have the same size (except of the last fragment);
// - the last fragment must be smaller than other fragments,
// if it has the same size it is probably not the last and there are missing fragments.
RegEx regExSplitExt(".*\\.[a-z,0-9]{3}\\.[0-9]{3}$");
int iCount = 0;
int iMin = -1;
int iMax = -1;
int iDifSizeCount = 0;
int iDifSizeMin = 999999;
DirBrowser dir(m_szDestDir);
while (const char* filename = dir.Next())
{
snprintf(szFullFilename, 1024, "%s%c%s", m_szDestDir, PATH_SEPARATOR, filename);
szFullFilename[1024-1] = '\0';
if (strcmp(filename, ".") && strcmp(filename, "..") && !Util::DirectoryExists(szFullFilename) &&
regExSplitExt.Match(filename))
{
const char* szSegExt = strrchr(filename, '.');
int iSegNum = atoi(szSegExt + 1);
iCount++;
iMin = iSegNum < iMin || iMin == -1 ? iSegNum : iMin;
iMax = iSegNum > iMax ? iSegNum : iMax;
long long lSegmentSize = Util::FileSize(szFullFilename);
if (lSegmentSize != lFirstSegmentSize)
{
iDifSizeCount++;
iDifSizeMin = iSegNum < iDifSizeMin ? iSegNum : iDifSizeMin;
lDifSegmentSize = lSegmentSize;
}
}
}
int iCorrectedCount = iCount - (iMin == 0 ? 1 : 0);
if ((iMin > 1) || iCorrectedCount != iMax ||
((iDifSizeMin != iCorrectedCount || iDifSizeMin > iMax) &&
m_pPostInfo->GetNZBInfo()->GetParStatus() != NZBInfo::psSuccess))
{
PrintMessage(Message::mkWarning, "Could not join splitted file %s: missing fragments detected", szDestBaseName);
return false;
}
// Now can join
PrintMessage(Message::mkInfo, "Joining splitted file %s", szDestBaseName);
m_pPostInfo->SetStageProgress(0);
char szErrBuf[256];
char szDestFilename[1024];
snprintf(szDestFilename, 1024, "%s%c%s", m_szUnpackDir, PATH_SEPARATOR, szDestBaseName);
szDestFilename[1024-1] = '\0';
FILE* pOutFile = fopen(szDestFilename, FOPEN_WBP);
if (!pOutFile)
{
error("Could not create file %s: %s", szDestFilename, Util::GetLastErrorMessage(szErrBuf, sizeof(szErrBuf)));
return false;
}
if (g_pOptions->GetWriteBuffer() > 0)
{
setvbuf(pOutFile, NULL, _IOFBF, g_pOptions->GetWriteBuffer() * 1024);
}
long long lTotalSize = lFirstSegmentSize * (iCount - 1) + lDifSegmentSize;
long long lWritten = 0;
static const int BUFFER_SIZE = 1024 * 50;
char* buffer = (char*)malloc(BUFFER_SIZE);
bool bOK = true;
for (int i = iMin; i <= iMax; i++)
{
PrintMessage(Message::mkInfo, "Joining from %s.%.3i", szDestBaseName, i);
char szMessage[1024];
snprintf(szMessage, 1024, "Joining from %s.%.3i", szDestBaseName, i);
szMessage[1024-1] = '\0';
SetProgressLabel(szMessage);
char szFragFilename[1024];
snprintf(szFragFilename, 1024, "%s%c%s.%.3i", m_szDestDir, PATH_SEPARATOR, szDestBaseName, i);
szFragFilename[1024-1] = '\0';
if (!Util::FileExists(szFragFilename))
{
break;
}
FILE* pInFile = fopen(szFragFilename, FOPEN_RB);
if (pInFile)
{
int cnt = BUFFER_SIZE;
while (cnt == BUFFER_SIZE)
{
cnt = (int)fread(buffer, 1, BUFFER_SIZE, pInFile);
fwrite(buffer, 1, cnt, pOutFile);
lWritten += cnt;
m_pPostInfo->SetStageProgress(int(lWritten * 1000 / lTotalSize));
}
fclose(pInFile);
char szFragFilename[1024];
snprintf(szFragFilename, 1024, "%s.%.3i", szDestBaseName, i);
szFragFilename[1024-1] = '\0';
m_JoinedFiles.push_back(strdup(szFragFilename));
}
else
{
error("Could not open file %s", szFragFilename);
bOK = false;
break;
}
}
fclose(pOutFile);
free(buffer);
return bOK;
}
void UnpackController::Completed()
{
bool bCleanupSuccess = Cleanup();
@@ -291,11 +488,14 @@ void UnpackController::Completed()
else
{
#ifndef DISABLE_PARCHECK
if (!m_bUnpackOK && m_pPostInfo->GetNZBInfo()->GetParStatus() <= NZBInfo::psSkipped &&
!m_bUnpackStartError && !m_bUnpackSpaceError && !m_bUnpackPasswordError &&
!GetTerminated() && m_bHasParFiles)
if (!m_bUnpackOK &&
(m_pPostInfo->GetNZBInfo()->GetParStatus() <= NZBInfo::psSkipped ||
!m_pPostInfo->GetNZBInfo()->GetParFull()) &&
!m_bUnpackStartError && !m_bUnpackSpaceError &&
(!m_bUnpackPasswordError5 || m_bUnpackPasswordError4) &&
(!GetTerminated() || m_bAutoTerminated) && m_bHasParFiles)
{
RequestParCheck();
RequestParCheck(true);
}
else
#endif
@@ -303,7 +503,8 @@ void UnpackController::Completed()
PrintMessage(Message::mkError, "%s failed", m_szInfoNameUp);
m_pPostInfo->GetNZBInfo()->SetUnpackStatus(
m_bUnpackSpaceError ? NZBInfo::usSpace :
m_bUnpackPasswordError ? NZBInfo::usPassword :
m_bUnpackPasswordError5 || (m_bUnpackPasswordError4 &&
m_pPostInfo->GetNZBInfo()->GetParStatus() == NZBInfo::psSuccess) ? NZBInfo::usPassword :
NZBInfo::usFailure);
m_pPostInfo->SetStage(PostInfo::ptQueued);
}
@@ -311,10 +512,11 @@ void UnpackController::Completed()
}
#ifndef DISABLE_PARCHECK
void UnpackController::RequestParCheck()
void UnpackController::RequestParCheck(bool bForceRepair)
{
PrintMessage(Message::mkInfo, "%s requested par-check/repair", m_szInfoNameUp);
m_pPostInfo->SetRequestParCheck(true);
m_pPostInfo->SetForceRepair(bForceRepair);
m_pPostInfo->SetStage(PostInfo::ptFinished);
}
#endif
@@ -350,12 +552,14 @@ void UnpackController::CheckArchiveFiles(bool bScanNonStdFiles)
m_bHasNonStdRarFiles = false;
m_bHasSevenZipFiles = false;
m_bHasSevenZipMultiFiles = false;
m_bHasSplittedFiles = false;
RegEx regExRar(".*\\.rar$");
RegEx regExRarMultiSeq(".*\\.(r|s)[0-9][0-9]$");
RegEx regExSevenZip(".*\\.7z$");
RegEx regExSevenZipMulti(".*\\.7z\\.[0-9]+$");
RegEx regExNumExt(".*\\.[0-9]+$");
RegEx regExSplitExt(".*\\.[a-z,0-9]{3}\\.[0-9]{3}$");
DirBrowser dir(m_szDestDir);
while (const char* filename = dir.Next())
@@ -366,6 +570,9 @@ void UnpackController::CheckArchiveFiles(bool bScanNonStdFiles)
if (strcmp(filename, ".") && strcmp(filename, "..") && !Util::DirectoryExists(szFullFilename))
{
const char* szExt = strchr(filename, '.');
int iExtNum = szExt ? atoi(szExt + 1) : -1;
if (regExRar.Match(filename))
{
m_bHasRarFiles = true;
@@ -378,12 +585,16 @@ void UnpackController::CheckArchiveFiles(bool bScanNonStdFiles)
{
m_bHasSevenZipMultiFiles = true;
}
else if (bScanNonStdFiles && !m_bHasNonStdRarFiles &&
else if (bScanNonStdFiles && !m_bHasNonStdRarFiles && iExtNum > 1 &&
!regExRarMultiSeq.Match(filename) && regExNumExt.Match(filename) &&
FileHasRarSignature(szFullFilename))
{
m_bHasNonStdRarFiles = true;
}
else if (regExSplitExt.Match(filename) && (iExtNum == 0 || iExtNum == 1))
{
m_bHasSplittedFiles = true;
}
}
}
}
@@ -397,7 +608,7 @@ bool UnpackController::FileHasRarSignature(const char* szFilename)
int cnt = 0;
FILE* infile;
infile = fopen(szFilename, "rb");
infile = fopen(szFilename, FOPEN_RB);
if (infile)
{
cnt = (int)fread(fileSignature, 1, sizeof(fileSignature), infile);
@@ -444,7 +655,8 @@ bool UnpackController::Cleanup()
if (!Util::MoveFile(szSrcFile, szDstFile))
{
PrintMessage(Message::mkError, "Could not move file %s to %s", szSrcFile, szDstFile);
char szErrBuf[256];
PrintMessage(Message::mkError, "Could not move file %s to %s: %s", szSrcFile, szDstFile, Util::GetLastErrorMessage(szErrBuf, sizeof(szErrBuf)));
bOK = false;
}
@@ -453,9 +665,10 @@ bool UnpackController::Cleanup()
}
}
if (bOK && !Util::DeleteDirectoryWithContent(m_szUnpackDir))
char szErrBuf[256];
if (bOK && !Util::DeleteDirectoryWithContent(m_szUnpackDir, szErrBuf, sizeof(szErrBuf)))
{
PrintMessage(Message::mkError, "Could not remove temporary directory %s", m_szUnpackDir);
PrintMessage(Message::mkError, "Could not delete temporary directory %s: %s", m_szUnpackDir, szErrBuf);
}
if (!m_bUnpackOK && m_bFinalDirCreated)
@@ -471,6 +684,7 @@ bool UnpackController::Cleanup()
RegEx regExRarMultiSeq(".*\\.[r-z][0-9][0-9]$");
RegEx regExSevenZip(".*\\.7z$|.*\\.7z\\.[0-9]+$");
RegEx regExNumExt(".*\\.[0-9]+$");
RegEx regExSplitExt(".*\\.[a-z,0-9]{3}\\.[0-9]{3}$");
DirBrowser dir(m_szDestDir);
while (const char* filename = dir.Next())
@@ -483,14 +697,16 @@ bool UnpackController::Cleanup()
!Util::DirectoryExists(szFullFilename) &&
(m_bInterDir || !extractedFiles.Exists(filename)) &&
(regExRar.Match(filename) || regExSevenZip.Match(filename) ||
(regExRarMultiSeq.Match(filename) && FileHasRarSignature(szFullFilename)) ||
(m_bHasNonStdRarFiles && regExNumExt.Match(filename) && FileHasRarSignature(szFullFilename))))
(regExRarMultiSeq.Match(filename) && FileHasRarSignature(szFullFilename)) ||
(m_bHasNonStdRarFiles && regExNumExt.Match(filename) && FileHasRarSignature(szFullFilename)) ||
(m_bHasSplittedFiles && regExSplitExt.Match(filename) && m_JoinedFiles.Exists(filename))))
{
PrintMessage(Message::mkInfo, "Deleting file %s", filename);
if (remove(szFullFilename) != 0)
{
PrintMessage(Message::mkError, "Could not delete file %s", szFullFilename);
char szErrBuf[256];
PrintMessage(Message::mkError, "Could not delete file %s: %s", szFullFilename, Util::GetLastErrorMessage(szErrBuf, sizeof(szErrBuf)));
}
}
}
@@ -604,6 +820,35 @@ void UnpackController::AddMessage(Message::EKind eKind, const char* szText)
SetProgressLabel(szText + 7);
}
if (m_eUnpacker == upUnrar &&
(!strncmp(szText, "Unrar: Checksum error in the encrypted file", 42) ||
!strncmp(szText, "Unrar: CRC failed in the encrypted file", 39)))
{
m_bUnpackPasswordError4 = true;
}
if (m_eUnpacker == upUnrar && !strncmp(szText, "Unrar: The specified password is incorrect.'", 43))
{
m_bUnpackPasswordError5 = true;
}
int iLen = strlen(szText);
if (m_eUnpacker == upUnrar && !IsStopped() && (m_bUnpackPasswordError4 || m_bUnpackPasswordError5 ||
strstr(szText, " : packed data CRC failed in volume") ||
strstr(szText, " : packed data checksum error in volume") ||
(iLen > 13 && !strncmp(szText + iLen - 13, " - CRC failed", 13)) ||
(iLen > 18 && !strncmp(szText + iLen - 18, " - checksum failed", 18)) ||
!strncmp(szText, "Unrar: WARNING: You need to start extraction from a previous volume", 67)))
{
char szMsgText[1024];
snprintf(szMsgText, 1024, "Cancelling %s due to errors", m_szInfoName);
szMsgText[1024-1] = '\0';
ScriptController::AddMessage(Message::mkWarning, szMsgText);
m_pPostInfo->AppendMessage(Message::mkWarning, szMsgText);
m_bAutoTerminated = true;
Stop();
}
if ((m_eUnpacker == upUnrar && !strncmp(szText, "Unrar: All OK", 13)) ||
(m_eUnpacker == upSevenZip && !strncmp(szText, "7-Zip: Everything is Ok", 23)))
{
@@ -620,9 +865,9 @@ void UnpackController::Stop()
void UnpackController::SetProgressLabel(const char* szProgressLabel)
{
g_pDownloadQueueHolder->LockQueue();
DownloadQueue::Lock();
m_pPostInfo->SetProgressLabel(szProgressLabel);
g_pDownloadQueueHolder->UnlockQueue();
DownloadQueue::Unlock();
}
@@ -640,7 +885,7 @@ void MoveController::StartJob(PostInfo* pPostInfo)
void MoveController::Run()
{
// the locking is needed for accessing the members of NZBInfo
g_pDownloadQueueHolder->LockQueue();
DownloadQueue::Lock();
char szNZBName[1024];
strncpy(szNZBName, m_pPostInfo->GetNZBInfo()->GetName(), 1024);
@@ -657,7 +902,7 @@ void MoveController::Run()
m_pPostInfo->GetNZBInfo()->BuildFinalDirName(m_szDestDir, 1024);
m_szDestDir[1024-1] = '\0';
g_pDownloadQueueHolder->UnlockQueue();
DownloadQueue::Unlock();
info("Moving completed files for %s", szNZBName);
@@ -669,10 +914,10 @@ void MoveController::Run()
{
info("%s successful", szInfoName);
// save new dest dir
g_pDownloadQueueHolder->LockQueue();
DownloadQueue::Lock();
m_pPostInfo->GetNZBInfo()->SetDestDir(m_szDestDir);
m_pPostInfo->GetNZBInfo()->SetMoveStatus(NZBInfo::msSuccess);
g_pDownloadQueueHolder->UnlockQueue();
DownloadQueue::Unlock();
}
else
{
@@ -710,15 +955,15 @@ bool MoveController::MoveFiles()
PrintMessage(Message::mkInfo, "Moving file %s to %s", Util::BaseFileName(szSrcFile), m_szDestDir);
if (!Util::MoveFile(szSrcFile, szDstFile))
{
PrintMessage(Message::mkError, "Could not move file %s to %s! Errcode: %i", szSrcFile, szDstFile, errno);
PrintMessage(Message::mkError, "Could not move file %s to %s: %s", szSrcFile, szDstFile, Util::GetLastErrorMessage(szErrBuf, sizeof(szErrBuf)));
bOK = false;
}
}
}
if (bOK && !Util::DeleteDirectoryWithContent(m_szInterDir))
if (bOK && !Util::DeleteDirectoryWithContent(m_szInterDir, szErrBuf, sizeof(szErrBuf)))
{
PrintMessage(Message::mkError, "Could not remove intermediate directory %s", m_szInterDir);
PrintMessage(Message::mkError, "Could not delete intermediate directory %s: %s", m_szInterDir, szErrBuf);
}
return bOK;
@@ -739,7 +984,7 @@ void CleanupController::StartJob(PostInfo* pPostInfo)
void CleanupController::Run()
{
// the locking is needed for accessing the members of NZBInfo
g_pDownloadQueueHolder->LockQueue();
DownloadQueue::Lock();
char szNZBName[1024];
strncpy(szNZBName, m_pPostInfo->GetNZBInfo()->GetName(), 1024);
@@ -765,7 +1010,7 @@ void CleanupController::Run()
m_szFinalDir[0] = '\0';
}
g_pDownloadQueueHolder->UnlockQueue();
DownloadQueue::Unlock();
info("Cleaning up %s", szNZBName);
@@ -806,36 +1051,11 @@ bool CleanupController::Cleanup(const char* szDestDir, bool *bDeleted)
*bDeleted = false;
bool bOK = true;
ExtList extList;
// split ExtCleanupDisk into tokens and create a list
char* szExtCleanupDisk = strdup(g_pOptions->GetExtCleanupDisk());
char* saveptr;
char* szExt = strtok_r(szExtCleanupDisk, ",; ", &saveptr);
while (szExt)
{
extList.push_back(szExt);
szExt = strtok_r(NULL, ",; ", &saveptr);
}
DirBrowser dir(szDestDir);
while (const char* filename = dir.Next())
{
// check file extension
int iFilenameLen = strlen(filename);
bool bDeleteIt = false;
for (ExtList::iterator it = extList.begin(); it != extList.end(); it++)
{
const char* szExt = *it;
int iExtLen = strlen(szExt);
if (iFilenameLen >= iExtLen && !strcasecmp(szExt, filename + iFilenameLen - iExtLen))
{
bDeleteIt = true;
break;
}
}
bool bDeleteIt = Util::MatchFileExt(filename, g_pOptions->GetExtCleanupDisk(), ",;");
if (bDeleteIt)
{
@@ -846,7 +1066,8 @@ bool CleanupController::Cleanup(const char* szDestDir, bool *bDeleted)
PrintMessage(Message::mkInfo, "Deleting file %s", filename);
if (remove(szFullFilename) != 0)
{
PrintMessage(Message::mkError, "Could not delete file %s! Errcode: %i", szFullFilename, errno);
char szErrBuf[256];
PrintMessage(Message::mkError, "Could not delete file %s: %s", szFullFilename, Util::GetLastErrorMessage(szErrBuf, sizeof(szErrBuf)));
bOK = false;
}
@@ -854,7 +1075,5 @@ bool CleanupController::Cleanup(const char* szDestDir, bool *bDeleted)
}
}
free(szExtCleanupDisk);
return bOK;
}

View File

@@ -1,7 +1,7 @@
/*
* This file is part of nzbget
*
* Copyright (C) 2013 Andrey Prygunkov <hugbug@users.sourceforge.net>
* Copyright (C) 2013-2014 Andrey Prygunkov <hugbug@users.sourceforge.net>
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
@@ -31,7 +31,7 @@
#include "Log.h"
#include "Thread.h"
#include "DownloadInfo.h"
#include "ScriptController.h"
#include "Script.h"
class UnpackController : public Thread, public ScriptController
{
@@ -67,26 +67,32 @@ private:
bool m_bHasNonStdRarFiles;
bool m_bHasSevenZipFiles;
bool m_bHasSevenZipMultiFiles;
bool m_bHasSplittedFiles;
bool m_bUnpackOK;
bool m_bUnpackStartError;
bool m_bUnpackSpaceError;
bool m_bUnpackPasswordError;
bool m_bUnpackPasswordError4;
bool m_bUnpackPasswordError5;
bool m_bCleanedUpDisk;
bool m_bAutoTerminated;
EUnpacker m_eUnpacker;
bool m_bFinalDirCreated;
FileList m_JoinedFiles;
protected:
virtual bool ReadLine(char* szBuf, int iBufSize, FILE* pStream);
virtual void AddMessage(Message::EKind eKind, const char* szText);
void ExecuteUnrar();
void ExecuteSevenZip(bool bMultiVolumes);
void JoinSplittedFiles();
bool JoinFile(const char* szFragBaseName);
void Completed();
void CreateUnpackDir();
bool Cleanup();
void CheckArchiveFiles(bool bScanNonStdFiles);
void SetProgressLabel(const char* szProgressLabel);
#ifndef DISABLE_PARCHECK
void RequestParCheck();
void RequestParCheck(bool bForceRepair);
#endif
bool FileHasRarSignature(const char* szFilename);
@@ -119,8 +125,6 @@ private:
bool Cleanup(const char* szDestDir, bool *bDeleted);
typedef std::deque<char*> ExtList;
public:
virtual void Run();
static void StartJob(PostInfo* pPostInfo);

2886
daemon/queue/DiskState.cpp Normal file
View File

File diff suppressed because it is too large Load Diff

97
daemon/queue/DiskState.h Normal file
View File

@@ -0,0 +1,97 @@
/*
* This file is part of nzbget
*
* Copyright (C) 2007-2014 Andrey Prygunkov <hugbug@users.sourceforge.net>
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*
* $Revision$
* $Date$
*
*/
#ifndef DISKSTATE_H
#define DISKSTATE_H
#include "DownloadInfo.h"
#include "FeedInfo.h"
#include "NewsServer.h"
#include "StatMeter.h"
class DiskState
{
private:
int fscanf(FILE* infile, const char* Format, ...);
int ParseFormatVersion(const char* szFormatSignature);
bool SaveFileInfo(FileInfo* pFileInfo, const char* szFilename);
bool LoadFileInfo(FileInfo* pFileInfo, const char* szFilename, bool bFileSummary, bool bArticles);
void SaveNZBQueue(DownloadQueue* pDownloadQueue, FILE* outfile);
bool LoadNZBList(NZBList* pNZBList, Servers* pServers, FILE* infile, int iFormatVersion);
void SaveNZBInfo(NZBInfo* pNZBInfo, FILE* outfile);
bool LoadNZBInfo(NZBInfo* pNZBInfo, Servers* pServers, FILE* infile, int iFormatVersion);
void SavePostQueue(DownloadQueue* pDownloadQueue, FILE* outfile);
void SaveDupInfo(DupInfo* pDupInfo, FILE* outfile);
bool LoadDupInfo(DupInfo* pDupInfo, FILE* infile, int iFormatVersion);
void SaveHistory(DownloadQueue* pDownloadQueue, FILE* outfile);
bool LoadHistory(DownloadQueue* pDownloadQueue, NZBList* pNZBList, Servers* pServers, FILE* infile, int iFormatVersion);
NZBInfo* FindNZBInfo(DownloadQueue* pDownloadQueue, int iID);
bool SaveFeedStatus(Feeds* pFeeds, FILE* outfile);
bool LoadFeedStatus(Feeds* pFeeds, FILE* infile, int iFormatVersion);
bool SaveFeedHistory(FeedHistory* pFeedHistory, FILE* outfile);
bool LoadFeedHistory(FeedHistory* pFeedHistory, FILE* infile, int iFormatVersion);
bool SaveServerInfo(Servers* pServers, FILE* outfile);
bool LoadServerInfo(Servers* pServers, FILE* infile, int iFormatVersion, bool* pPerfectMatch);
bool SaveVolumeStat(ServerVolumes* pServerVolumes, FILE* outfile);
bool LoadVolumeStat(Servers* pServers, ServerVolumes* pServerVolumes, FILE* infile, int iFormatVersion);
void CalcFileStats(DownloadQueue* pDownloadQueue, int iFormatVersion);
void CalcNZBFileStats(NZBInfo* pNZBInfo, int iFormatVersion);
bool LoadAllFileStates(DownloadQueue* pDownloadQueue, Servers* pServers);
void SaveServerStats(ServerStatList* pServerStatList, FILE* outfile);
bool LoadServerStats(ServerStatList* pServerStatList, Servers* pServers, FILE* infile);
// backward compatibility functions (conversions from older formats)
bool LoadPostQueue12(DownloadQueue* pDownloadQueue, NZBList* pNZBList, FILE* infile, int iFormatVersion);
bool LoadPostQueue5(DownloadQueue* pDownloadQueue, NZBList* pNZBList);
bool LoadUrlQueue12(DownloadQueue* pDownloadQueue, FILE* infile, int iFormatVersion);
bool LoadUrlInfo12(NZBInfo* pNZBInfo, FILE* infile, int iFormatVersion);
int FindNZBInfoIndex(NZBList* pNZBList, NZBInfo* pNZBInfo);
void ConvertDupeKey(char* buf, int bufsize);
bool LoadFileQueue12(NZBList* pNZBList, NZBList* pSortList, FILE* infile, int iFormatVersion);
void CompleteNZBList12(DownloadQueue* pDownloadQueue, NZBList* pNZBList, int iFormatVersion);
void CompleteDupList12(DownloadQueue* pDownloadQueue, int iFormatVersion);
void CalcCriticalHealth(NZBList* pNZBList);
public:
bool DownloadQueueExists();
bool SaveDownloadQueue(DownloadQueue* pDownloadQueue);
bool LoadDownloadQueue(DownloadQueue* pDownloadQueue, Servers* pServers);
bool SaveFile(FileInfo* pFileInfo);
bool SaveFileState(FileInfo* pFileInfo, bool bCompleted);
bool LoadFileState(FileInfo* pFileInfo, Servers* pServers, bool bCompleted);
bool LoadArticles(FileInfo* pFileInfo);
void DiscardDownloadQueue();
bool DiscardFile(FileInfo* pFileInfo, bool bDeleteData, bool bDeletePartialState, bool bDeleteCompletedState);
void DiscardFiles(NZBInfo* pNZBInfo);
bool SaveFeeds(Feeds* pFeeds, FeedHistory* pFeedHistory);
bool LoadFeeds(Feeds* pFeeds, FeedHistory* pFeedHistory);
bool SaveStats(Servers* pServers, ServerVolumes* pServerVolumes);
bool LoadStats(Servers* pServers, ServerVolumes* pServerVolumes, bool* pPerfectMatch);
void CleanupTempDir(DownloadQueue* pDownloadQueue);
void WriteCacheFlag();
void DeleteCacheFlag();
};
#endif

View File

File diff suppressed because it is too large Load Diff

View File

@@ -2,7 +2,7 @@
* This file is part of nzbget
*
* Copyright (C) 2004 Sven Henkel <sidddy@users.sourceforge.net>
* Copyright (C) 2007-2013 Andrey Prygunkov <hugbug@users.sourceforge.net>
* Copyright (C) 2007-2014 Andrey Prygunkov <hugbug@users.sourceforge.net>
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
@@ -31,11 +31,48 @@
#include <deque>
#include <time.h>
#include "Observer.h"
#include "Log.h"
#include "Thread.h"
class NZBInfo;
class DownloadQueue;
class PostInfo;
class ServerStat
{
private:
int m_iServerID;
int m_iSuccessArticles;
int m_iFailedArticles;
public:
ServerStat(int iServerID);
int GetServerID() { return m_iServerID; }
int GetSuccessArticles() { return m_iSuccessArticles; }
void SetSuccessArticles(int iSuccessArticles) { m_iSuccessArticles = iSuccessArticles; }
int GetFailedArticles() { return m_iFailedArticles; }
void SetFailedArticles(int iFailedArticles) { m_iFailedArticles = iFailedArticles; }
};
typedef std::vector<ServerStat*> ServerStatListBase;
class ServerStatList : public ServerStatListBase
{
public:
enum EStatOperation
{
soSet,
soAdd,
soSubtract
};
public:
~ServerStatList();
void StatOp(int iServerID, int iSuccessArticles, int iFailedArticles, EStatOperation eStatOperation);
void ListOp(ServerStatList* pServerStats, EStatOperation eStatOperation);
void Clear();
};
class ArticleInfo
{
@@ -52,8 +89,12 @@ private:
int m_iPartNumber;
char* m_szMessageID;
int m_iSize;
char* m_pSegmentContent;
long long m_iSegmentOffset;
int m_iSegmentSize;
EStatus m_eStatus;
char* m_szResultFilename;
unsigned long m_lCrc;
public:
ArticleInfo();
@@ -62,12 +103,21 @@ public:
int GetPartNumber() { return m_iPartNumber; }
const char* GetMessageID() { return m_szMessageID; }
void SetMessageID(const char* szMessageID);
void SetSize(int s) { m_iSize = s; }
void SetSize(int iSize) { m_iSize = iSize; }
int GetSize() { return m_iSize; }
void AttachSegment(char* pContent, long long iOffset, int iSize);
void DiscardSegment();
const char* GetSegmentContent() { return m_pSegmentContent; }
void SetSegmentOffset(long long iSegmentOffset) { m_iSegmentOffset = iSegmentOffset; }
long long GetSegmentOffset() { return m_iSegmentOffset; }
void SetSegmentSize(int iSegmentSize) { m_iSegmentSize = iSegmentSize; }
int GetSegmentSize() { return m_iSegmentSize; }
EStatus GetStatus() { return m_eStatus; }
void SetStatus(EStatus Status) { m_eStatus = Status; }
const char* GetResultFilename() { return m_szResultFilename; }
void SetResultFilename(const char* v);
unsigned long GetCrc() { return m_lCrc; }
void SetCrc(unsigned long lCrc) { m_lCrc = lCrc; }
};
class FileInfo
@@ -81,6 +131,7 @@ private:
NZBInfo* m_pNZBInfo;
Articles m_Articles;
Groups m_Groups;
ServerStatList m_ServerStats;
char* m_szSubject;
char* m_szFilename;
long long m_lSize;
@@ -97,26 +148,29 @@ private:
bool m_bDeleted;
bool m_bFilenameConfirmed;
bool m_bParFile;
int m_iCompleted;
int m_iCompletedArticles;
bool m_bOutputInitialized;
char* m_szOutputFilename;
Mutex* m_pMutexOutputFile;
int m_iPriority;
bool m_bExtraPriority;
int m_iActiveDownloads;
bool m_bAutoDeleted;
int m_iCachedArticles;
bool m_bPartialChanged;
static int m_iIDGen;
static int m_iIDMax;
friend class CompletedFile;
public:
FileInfo();
FileInfo(int iID = 0);
~FileInfo();
int GetID() { return m_iID; }
void SetID(int iID);
static void ResetGenID(bool bMax);
NZBInfo* GetNZBInfo() { return m_pNZBInfo; }
void SetNZBInfo(NZBInfo* pNZBInfo);
void SetNZBInfo(NZBInfo* pNZBInfo) { m_pNZBInfo = pNZBInfo; }
Articles* GetArticles() { return &m_Articles; }
Groups* GetGroups() { return &m_Groups; }
const char* GetSubject() { return m_szSubject; }
@@ -147,11 +201,11 @@ public:
time_t GetTime() { return m_tTime; }
void SetTime(time_t tTime) { m_tTime = tTime; }
bool GetPaused() { return m_bPaused; }
void SetPaused(bool Paused) { m_bPaused = Paused; }
void SetPaused(bool bPaused);
bool GetDeleted() { return m_bDeleted; }
void SetDeleted(bool Deleted) { m_bDeleted = Deleted; }
int GetCompleted() { return m_iCompleted; }
void SetCompleted(int iCompleted) { m_iCompleted = iCompleted; }
int GetCompletedArticles() { return m_iCompletedArticles; }
void SetCompletedArticles(int iCompletedArticles) { m_iCompletedArticles = iCompletedArticles; }
bool GetParFile() { return m_bParFile; }
void SetParFile(bool bParFile) { m_bParFile = bParFile; }
void ClearArticles();
@@ -161,64 +215,60 @@ public:
void SetOutputFilename(const char* szOutputFilename);
bool GetOutputInitialized() { return m_bOutputInitialized; }
void SetOutputInitialized(bool bOutputInitialized) { m_bOutputInitialized = bOutputInitialized; }
int GetPriority() { return m_iPriority; }
void SetPriority(int iPriority) { m_iPriority = iPriority; }
bool GetExtraPriority() { return m_bExtraPriority; }
void SetExtraPriority(bool bExtraPriority) { m_bExtraPriority = bExtraPriority; };
void SetExtraPriority(bool bExtraPriority) { m_bExtraPriority = bExtraPriority; }
int GetActiveDownloads() { return m_iActiveDownloads; }
void SetActiveDownloads(int iActiveDownloads);
bool GetAutoDeleted() { return m_bAutoDeleted; }
void SetAutoDeleted(bool bAutoDeleted) { m_bAutoDeleted = bAutoDeleted; }
int GetCachedArticles() { return m_iCachedArticles; }
void SetCachedArticles(int iCachedArticles) { m_iCachedArticles = iCachedArticles; }
bool GetPartialChanged() { return m_bPartialChanged; }
void SetPartialChanged(bool bPartialChanged) { m_bPartialChanged = bPartialChanged; }
ServerStatList* GetServerStats() { return &m_ServerStats; }
};
typedef std::deque<FileInfo*> FileQueue;
typedef std::deque<FileInfo*> FileListBase;
class GroupInfo
class FileList : public FileListBase
{
private:
NZBInfo* m_pNZBInfo;
int m_iFirstID;
int m_iLastID;
int m_iRemainingFileCount;
int m_iPausedFileCount;
long long m_lRemainingSize;
long long m_lPausedSize;
int m_iRemainingParCount;
time_t m_tMinTime;
time_t m_tMaxTime;
int m_iMinPriority;
int m_iMaxPriority;
int m_iActiveDownloads;
friend class DownloadQueue;
bool m_bOwnObjects;
public:
GroupInfo();
~GroupInfo();
NZBInfo* GetNZBInfo() { return m_pNZBInfo; }
int GetFirstID() { return m_iFirstID; }
int GetLastID() { return m_iLastID; }
long long GetRemainingSize() { return m_lRemainingSize; }
long long GetPausedSize() { return m_lPausedSize; }
int GetRemainingFileCount() { return m_iRemainingFileCount; }
int GetPausedFileCount() { return m_iPausedFileCount; }
int GetRemainingParCount() { return m_iRemainingParCount; }
time_t GetMinTime() { return m_tMinTime; }
time_t GetMaxTime() { return m_tMaxTime; }
int GetMinPriority() { return m_iMinPriority; }
int GetMaxPriority() { return m_iMaxPriority; }
int GetActiveDownloads() { return m_iActiveDownloads; }
FileList(bool bOwnObjects = false) { m_bOwnObjects = bOwnObjects; }
~FileList();
void Clear();
void Remove(FileInfo* pFileInfo);
};
typedef std::deque<GroupInfo*> GroupQueueBase;
class GroupQueue : public GroupQueueBase
class CompletedFile
{
public:
~GroupQueue();
void Clear();
enum EStatus
{
cfUnknown,
cfSuccess,
cfPartial,
cfFailure
};
private:
int m_iID;
char* m_szFileName;
EStatus m_eStatus;
unsigned long m_lCrc;
public:
CompletedFile(int iID, const char* szFileName, EStatus eStatus, unsigned long lCrc);
~CompletedFile();
int GetID() { return m_iID; }
void SetFileName(const char* szFileName);
const char* GetFileName() { return m_szFileName; }
EStatus GetStatus() { return m_eStatus; }
unsigned long GetCrc() { return m_lCrc; }
};
typedef std::deque<CompletedFile*> CompletedFiles;
class NZBParameter
{
@@ -262,7 +312,7 @@ public:
private:
char* m_szName;
EStatus m_eStatus;
friend class ScriptStatusList;
public:
@@ -283,33 +333,6 @@ public:
ScriptStatus::EStatus CalcTotalStatus();
};
class ServerStat
{
private:
int m_iServerID;
int m_iSuccessArticles;
int m_iFailedArticles;
public:
ServerStat(int iServerID);
int GetServerID() { return m_iServerID; }
int GetSuccessArticles() { return m_iSuccessArticles; }
void SetSuccessArticles(int iSuccessArticles) { m_iSuccessArticles = iSuccessArticles; }
int GetFailedArticles() { return m_iFailedArticles; }
void SetFailedArticles(int iFailedArticles) { m_iFailedArticles = iFailedArticles; }
};
typedef std::vector<ServerStat*> ServerStatListBase;
class ServerStatList : public ServerStatListBase
{
public:
~ServerStatList();
void SetStat(int iServerID, int iSuccessArticles, int iFailedArticles, bool bAdd);
void Add(ServerStatList* pServerStats);
void Clear();
};
enum EDupeMode
{
dmScore,
@@ -317,8 +340,6 @@ enum EDupeMode
dmForce
};
class NZBInfoList;
class NZBInfo
{
public:
@@ -369,7 +390,8 @@ public:
dsNone,
dsManual,
dsHealth,
dsDupe
dsDupe,
dsBad
};
enum EMarkStatus
@@ -379,12 +401,33 @@ public:
ksGood
};
typedef std::vector<char*> Files;
enum EUrlStatus
{
lsNone,
lsRunning,
lsFinished,
lsFailed,
lsRetry,
lsScanSkipped,
lsScanFailed
};
enum EKind
{
nkNzb,
nkUrl
};
typedef std::deque<Message*> Messages;
static const int FORCE_PRIORITY = 900;
friend class DupInfo;
private:
int m_iID;
int m_iRefCount;
EKind m_eKind;
char* m_szURL;
char* m_szFilename;
char* m_szName;
char* m_szDestDir;
@@ -393,6 +436,11 @@ private:
int m_iFileCount;
int m_iParkedFileCount;
long long m_lSize;
long long m_lRemainingSize;
int m_iPausedFileCount;
long long m_lPausedSize;
int m_iRemainingParCount;
int m_iActiveDownloads;
long long m_lSuccessSize;
long long m_lFailedSize;
long long m_lCurrentSuccessSize;
@@ -405,8 +453,12 @@ private:
int m_iTotalArticles;
int m_iSuccessArticles;
int m_iFailedArticles;
Files m_completedFiles;
bool m_bPostProcess;
int m_iCurrentSuccessArticles;
int m_iCurrentFailedArticles;
time_t m_tMinTime;
time_t m_tMaxTime;
int m_iPriority;
CompletedFiles m_completedFiles;
ERenameStatus m_eRenameStatus;
EParStatus m_eParStatus;
EUnpackStatus m_eUnpackStatus;
@@ -414,6 +466,8 @@ private:
EMoveStatus m_eMoveStatus;
EDeleteStatus m_eDeleteStatus;
EMarkStatus m_eMarkStatus;
EUrlStatus m_eUrlStatus;
bool m_bAddUrlPaused;
bool m_bDeletePaused;
bool m_bManyDupeFiles;
char* m_szQueuedFilename;
@@ -429,30 +483,44 @@ private:
EDupeMode m_eDupeMode;
unsigned int m_iFullContentHash;
unsigned int m_iFilteredContentHash;
NZBInfoList* m_Owner;
FileList m_FileList;
NZBParameterList m_ppParameters;
ScriptStatusList m_scriptStatuses;
ServerStatList m_ServerStats;
ServerStatList m_CurrentServerStats;
Mutex m_mutexLog;
Messages m_Messages;
int m_iIDMessageGen;
PostInfo* m_pPostInfo;
long long m_lDownloadedSize;
time_t m_tDownloadStartTime;
int m_iDownloadSec;
int m_iPostTotalSec;
int m_iParSec;
int m_iRepairSec;
int m_iUnpackSec;
bool m_bReprocess;
time_t m_tQueueScriptTime;
bool m_bParFull;
static int m_iIDGen;
static int m_iIDMax;
friend class NZBInfoList;
public:
NZBInfo(bool bPersistent = true);
NZBInfo();
~NZBInfo();
void Retain();
void Release();
int GetID() { return m_iID; }
void SetID(int iID);
static void ResetGenID(bool bMax);
static int GenerateID();
EKind GetKind() { return m_eKind; }
void SetKind(EKind eKind) { m_eKind = eKind; }
const char* GetURL() { return m_szURL; } // needs locking (for shared objects)
void SetURL(const char* szURL); // needs locking (for shared objects)
const char* GetFilename() { return m_szFilename; }
void SetFilename(const char* szFilename);
static void MakeNiceNZBName(const char* szNZBFilename, char* szBuffer, int iSize, bool bRemoveExt);
static void MakeNiceUrlName(const char* szURL, const char* szNZBFilename, char* szBuffer, int iSize);
const char* GetDestDir() { return m_szDestDir; } // needs locking (for shared objects)
void SetDestDir(const char* szDestDir); // needs locking (for shared objects)
const char* GetFinalDir() { return m_szFinalDir; } // needs locking (for shared objects)
@@ -467,6 +535,16 @@ public:
void SetParkedFileCount(int iParkedFileCount) { m_iParkedFileCount = iParkedFileCount; }
long long GetSize() { return m_lSize; }
void SetSize(long long lSize) { m_lSize = lSize; }
long long GetRemainingSize() { return m_lRemainingSize; }
void SetRemainingSize(long long lRemainingSize) { m_lRemainingSize = lRemainingSize; }
long long GetPausedSize() { return m_lPausedSize; }
void SetPausedSize(long long lPausedSize) { m_lPausedSize = lPausedSize; }
int GetPausedFileCount() { return m_iPausedFileCount; }
void SetPausedFileCount(int iPausedFileCount) { m_iPausedFileCount = iPausedFileCount; }
int GetRemainingParCount() { return m_iRemainingParCount; }
void SetRemainingParCount(int iRemainingParCount) { m_iRemainingParCount = iRemainingParCount; }
int GetActiveDownloads() { return m_iActiveDownloads; }
void SetActiveDownloads(int iActiveDownloads);
long long GetSuccessSize() { return m_lSuccessSize; }
void SetSuccessSize(long long lSuccessSize) { m_lSuccessSize = lSuccessSize; }
long long GetFailedSize() { return m_lFailedSize; }
@@ -491,12 +569,21 @@ public:
void SetSuccessArticles(int iSuccessArticles) { m_iSuccessArticles = iSuccessArticles; }
int GetFailedArticles() { return m_iFailedArticles; }
void SetFailedArticles(int iFailedArticles) { m_iFailedArticles = iFailedArticles; }
int GetCurrentSuccessArticles() { return m_iCurrentSuccessArticles; }
void SetCurrentSuccessArticles(int iCurrentSuccessArticles) { m_iCurrentSuccessArticles = iCurrentSuccessArticles; }
int GetCurrentFailedArticles() { return m_iCurrentFailedArticles; }
void SetCurrentFailedArticles(int iCurrentFailedArticles) { m_iCurrentFailedArticles = iCurrentFailedArticles; }
int GetPriority() { return m_iPriority; }
void SetPriority(int iPriority) { m_iPriority = iPriority; }
bool GetForcePriority() { return m_iPriority >= FORCE_PRIORITY; }
time_t GetMinTime() { return m_tMinTime; }
void SetMinTime(time_t tMinTime) { m_tMinTime = tMinTime; }
time_t GetMaxTime() { return m_tMaxTime; }
void SetMaxTime(time_t tMaxTime) { m_tMaxTime = tMaxTime; }
void BuildDestDirName();
void BuildFinalDirName(char* szFinalDirBuf, int iBufSize);
Files* GetCompletedFiles() { return &m_completedFiles; } // needs locking (for shared objects)
CompletedFiles* GetCompletedFiles() { return &m_completedFiles; } // needs locking (for shared objects)
void ClearCompletedFiles();
bool GetPostProcess() { return m_bPostProcess; }
void SetPostProcess(bool bPostProcess) { m_bPostProcess = bPostProcess; }
ERenameStatus GetRenameStatus() { return m_eRenameStatus; }
void SetRenameStatus(ERenameStatus eRenameStatus) { m_eRenameStatus = eRenameStatus; }
EParStatus GetParStatus() { return m_eParStatus; }
@@ -511,6 +598,8 @@ public:
void SetDeleteStatus(EDeleteStatus eDeleteStatus) { m_eDeleteStatus = eDeleteStatus; }
EMarkStatus GetMarkStatus() { return m_eMarkStatus; }
void SetMarkStatus(EMarkStatus eMarkStatus) { m_eMarkStatus = eMarkStatus; }
EUrlStatus GetUrlStatus() { return m_eUrlStatus; }
void SetUrlStatus(EUrlStatus eUrlStatus) { m_eUrlStatus = eUrlStatus; }
const char* GetQueuedFilename() { return m_szQueuedFilename; }
void SetQueuedFilename(const char* szQueuedFilename);
bool GetDeleting() { return m_bDeleting; }
@@ -529,11 +618,15 @@ public:
void SetCleanupDisk(bool bCleanupDisk) { m_bCleanupDisk = bCleanupDisk; }
bool GetUnpackCleanedUpDisk() { return m_bUnpackCleanedUpDisk; }
void SetUnpackCleanedUpDisk(bool bUnpackCleanedUpDisk) { m_bUnpackCleanedUpDisk = bUnpackCleanedUpDisk; }
bool GetAddUrlPaused() { return m_bAddUrlPaused; }
void SetAddUrlPaused(bool bAddUrlPaused) { m_bAddUrlPaused = bAddUrlPaused; }
FileList* GetFileList() { return &m_FileList; } // needs locking (for shared objects)
NZBParameterList* GetParameters() { return &m_ppParameters; } // needs locking (for shared objects)
ScriptStatusList* GetScriptStatuses() { return &m_scriptStatuses; } // needs locking (for shared objects)
ServerStatList* GetServerStats() { return &m_ServerStats; }
ServerStatList* GetCurrentServerStats() { return &m_CurrentServerStats; }
int CalcHealth();
int CalcCriticalHealth();
int CalcCriticalHealth(bool bAllowEstimation);
const char* GetDupeKey() { return m_szDupeKey; } // needs locking (for shared objects)
void SetDupeKey(const char* szDupeKey); // needs locking (for shared objects)
int GetDupeScore() { return m_iDupeScore; }
@@ -544,19 +637,53 @@ public:
void SetFullContentHash(unsigned int iFullContentHash) { m_iFullContentHash = iFullContentHash; }
unsigned int GetFilteredContentHash() { return m_iFilteredContentHash; }
void SetFilteredContentHash(unsigned int iFilteredContentHash) { m_iFilteredContentHash = iFilteredContentHash; }
long long GetDownloadedSize() { return m_lDownloadedSize; }
void SetDownloadedSize(long long lDownloadedSize) { m_lDownloadedSize = lDownloadedSize; }
int GetDownloadSec() { return m_iDownloadSec; }
void SetDownloadSec(int iDownloadSec) { m_iDownloadSec = iDownloadSec; }
int GetPostTotalSec() { return m_iPostTotalSec; }
void SetPostTotalSec(int iPostTotalSec) { m_iPostTotalSec = iPostTotalSec; }
int GetParSec() { return m_iParSec; }
void SetParSec(int iParSec) { m_iParSec = iParSec; }
int GetRepairSec() { return m_iRepairSec; }
void SetRepairSec(int iRepairSec) { m_iRepairSec = iRepairSec; }
int GetUnpackSec() { return m_iUnpackSec; }
void SetUnpackSec(int iUnpackSec) { m_iUnpackSec = iUnpackSec; }
time_t GetDownloadStartTime() { return m_tDownloadStartTime; }
void SetDownloadStartTime(time_t tDownloadStartTime) { m_tDownloadStartTime = tDownloadStartTime; }
void SetReprocess(bool bReprocess) { m_bReprocess = bReprocess; }
bool GetReprocess() { return m_bReprocess; }
time_t GetQueueScriptTime() { return m_tQueueScriptTime; }
void SetQueueScriptTime(time_t tQueueScriptTime) { m_tQueueScriptTime = tQueueScriptTime; }
void SetParFull(bool bParFull) { m_bParFull = bParFull; }
bool GetParFull() { return m_bParFull; }
void CopyFileList(NZBInfo* pSrcNZBInfo);
void UpdateMinMaxTime();
PostInfo* GetPostInfo() { return m_pPostInfo; }
void EnterPostProcess();
void LeavePostProcess();
bool IsDupeSuccess();
const char* MakeTextStatus(bool bIgnoreScriptStatus);
void AppendMessage(Message::EKind eKind, time_t tTime, const char* szText);
Messages* LockMessages();
void UnlockMessages();
};
typedef std::deque<NZBInfo*> NZBInfoListBase;
typedef std::deque<NZBInfo*> NZBQueueBase;
class NZBInfoList : public NZBInfoListBase
class NZBList : public NZBQueueBase
{
private:
bool m_bOwnObjects;
public:
void Add(NZBInfo* pNZBInfo);
NZBList(bool bOwnObjects = false) { m_bOwnObjects = bOwnObjects; }
~NZBList();
void Clear();
void Add(NZBInfo* pNZBInfo, bool bAddTop);
void Remove(NZBInfo* pNZBInfo);
void ReleaseAll();
NZBInfo* Find(int iID);
};
class PostInfo
@@ -577,14 +704,15 @@ public:
};
typedef std::deque<Message*> Messages;
typedef std::vector<char*> ParredFiles;
private:
int m_iID;
NZBInfo* m_pNZBInfo;
char* m_szInfoName;
bool m_bWorking;
bool m_bDeleted;
bool m_bRequestParCheck;
bool m_bForceParFull;
bool m_bForceRepair;
EStage m_eStage;
char* m_szProgressLabel;
int m_iFileProgress;
@@ -596,18 +724,13 @@ private:
Mutex m_mutexLog;
Messages m_Messages;
int m_iIDMessageGen;
static int m_iIDGen;
static int m_iIDMax;
ParredFiles m_ParredFiles;
public:
PostInfo();
~PostInfo();
int GetID() { return m_iID; }
NZBInfo* GetNZBInfo() { return m_pNZBInfo; }
void SetNZBInfo(NZBInfo* pNZBInfo);
const char* GetInfoName() { return m_szInfoName; }
void SetInfoName(const char* szInfoName);
void SetNZBInfo(NZBInfo* pNZBInfo) { m_pNZBInfo = pNZBInfo; }
EStage GetStage() { return m_eStage; }
void SetStage(EStage eStage) { m_eStage = eStage; }
void SetProgressLabel(const char* szProgressLabel);
@@ -626,84 +749,22 @@ public:
void SetDeleted(bool bDeleted) { m_bDeleted = bDeleted; }
bool GetRequestParCheck() { return m_bRequestParCheck; }
void SetRequestParCheck(bool bRequestParCheck) { m_bRequestParCheck = bRequestParCheck; }
void AppendMessage(Message::EKind eKind, const char* szText);
bool GetForceParFull() { return m_bForceParFull; }
void SetForceParFull(bool bForceParFull) { m_bForceParFull = bForceParFull; }
bool GetForceRepair() { return m_bForceRepair; }
void SetForceRepair(bool bForceRepair) { m_bForceRepair = bForceRepair; }
Thread* GetPostThread() { return m_pPostThread; }
void SetPostThread(Thread* pPostThread) { m_pPostThread = pPostThread; }
void AppendMessage(Message::EKind eKind, const char* szText);
Messages* LockMessages();
void UnlockMessages();
ParredFiles* GetParredFiles() { return &m_ParredFiles; }
};
typedef std::deque<PostInfo*> PostQueue;
typedef std::vector<int> IDList;
typedef std::vector<char*> NameList;
class UrlInfo
{
public:
enum EStatus
{
aiUndefined,
aiRunning,
aiFinished,
aiFailed,
aiRetry,
aiScanSkipped,
aiScanFailed
};
private:
int m_iID;
char* m_szURL;
char* m_szNZBFilename;
char* m_szCategory;
int m_iPriority;
char* m_szDupeKey;
int m_iDupeScore;
EDupeMode m_eDupeMode;
bool m_bAddTop;
bool m_bAddPaused;
bool m_bForce;
EStatus m_eStatus;
static int m_iIDGen;
static int m_iIDMax;
public:
UrlInfo();
~UrlInfo();
int GetID() { return m_iID; }
void SetID(int iID);
static void ResetGenID(bool bMax);
const char* GetURL() { return m_szURL; } // needs locking (for shared objects)
void SetURL(const char* szURL); // needs locking (for shared objects)
const char* GetNZBFilename() { return m_szNZBFilename; } // needs locking (for shared objects)
void SetNZBFilename(const char* szNZBFilename); // needs locking (for shared objects)
const char* GetCategory() { return m_szCategory; } // needs locking (for shared objects)
void SetCategory(const char* szCategory); // needs locking (for shared objects)
int GetPriority() { return m_iPriority; }
void SetPriority(int iPriority) { m_iPriority = iPriority; }
const char* GetDupeKey() { return m_szDupeKey; }
void SetDupeKey(const char* szDupeKey);
int GetDupeScore() { return m_iDupeScore; }
void SetDupeScore(int iDupeScore) { m_iDupeScore = iDupeScore; }
EDupeMode GetDupeMode() { return m_eDupeMode; }
void SetDupeMode(EDupeMode eDupeMode) { m_eDupeMode = eDupeMode; }
bool GetAddTop() { return m_bAddTop; }
void SetAddTop(bool bAddTop) { m_bAddTop = bAddTop; }
bool GetAddPaused() { return m_bAddPaused; }
void SetAddPaused(bool bAddPaused) { m_bAddPaused = bAddPaused; }
void GetName(char* szBuffer, int iSize); // needs locking (for shared objects)
static void MakeNiceName(const char* szURL, const char* szNZBFilename, char* szBuffer, int iSize);
bool GetForce() { return m_bForce; }
void SetForce(bool bForce) { m_bForce = bForce; }
EStatus GetStatus() { return m_eStatus; }
void SetStatus(EStatus Status) { m_eStatus = Status; }
};
typedef std::deque<UrlInfo*> UrlQueue;
class DupInfo
{
public:
@@ -719,6 +780,7 @@ public:
};
private:
int m_iID;
char* m_szName;
char* m_szDupeKey;
int m_iDupeScore;
@@ -731,6 +793,8 @@ private:
public:
DupInfo();
~DupInfo();
int GetID() { return m_iID; }
void SetID(int iID);
const char* GetName() { return m_szName; } // needs locking (for shared objects)
void SetName(const char* szName); // needs locking (for shared objects)
const char* GetDupeKey() { return m_szDupeKey; } // needs locking (for shared objects)
@@ -755,33 +819,25 @@ public:
enum EKind
{
hkUnknown,
hkNZBInfo,
hkUrlInfo,
hkDupInfo
hkNzb,
hkUrl,
hkDup
};
private:
int m_iID;
EKind m_eKind;
void* m_pInfo;
time_t m_tTime;
static int m_iIDGen;
static int m_iIDMax;
public:
HistoryInfo(NZBInfo* pNZBInfo);
HistoryInfo(UrlInfo* pUrlInfo);
HistoryInfo(DupInfo* pDupInfo);
~HistoryInfo();
int GetID() { return m_iID; }
void SetID(int iID);
static void ResetGenID(bool bMax);
EKind GetKind() { return m_eKind; }
int GetID();
NZBInfo* GetNZBInfo() { return (NZBInfo*)m_pInfo; }
UrlInfo* GetUrlInfo() { return (UrlInfo*)m_pInfo; }
DupInfo* GetDupInfo() { return (DupInfo*)m_pInfo; }
void DiscardUrlInfo() { m_pInfo = NULL; }
void DiscardNZBInfo() { m_pInfo = NULL; }
time_t GetTime() { return m_tTime; }
void SetTime(time_t tTime) { m_tTime = tTime; }
void GetName(char* szBuffer, int iSize); // needs locking (for shared objects)
@@ -789,32 +845,105 @@ public:
typedef std::deque<HistoryInfo*> HistoryList;
class DownloadQueue
class DownloadQueue : public Subject
{
public:
enum EAspectAction
{
eaNzbFound,
eaNzbAdded,
eaNzbDeleted,
eaFileCompleted,
eaFileDeleted,
eaUrlCompleted
};
struct Aspect
{
EAspectAction eAction;
DownloadQueue* pDownloadQueue;
NZBInfo* pNZBInfo;
FileInfo* pFileInfo;
};
enum EEditAction
{
eaFileMoveOffset = 1, // move files to m_iOffset relative to the current position in download-queue
eaFileMoveTop, // move files to the top of download-queue
eaFileMoveBottom, // move files to the bottom of download-queue
eaFilePause, // pause files
eaFileResume, // resume (unpause) files
eaFileDelete, // delete files
eaFilePauseAllPars, // pause only (all) pars (does not affect other files)
eaFilePauseExtraPars, // pause only (almost all) pars, except main par-file (does not affect other files)
eaFileReorder, // set file order
eaFileSplit, // split - create new group from selected files
eaGroupMoveOffset, // move group to m_iOffset relative to the current position in download-queue
eaGroupMoveTop, // move group to the top of download-queue
eaGroupMoveBottom, // move group to the bottom of download-queue
eaGroupPause, // pause group
eaGroupResume, // resume (unpause) group
eaGroupDelete, // delete group and put to history
eaGroupDupeDelete, // delete group, put to history and mark as duplicate
eaGroupFinalDelete, // delete group without adding to history
eaGroupPauseAllPars, // pause only (all) pars (does not affect other files) in group
eaGroupPauseExtraPars, // pause only (almost all) pars in group, except main par-file (does not affect other files)
eaGroupSetPriority, // set priority for groups
eaGroupSetCategory, // set or change category for a group
eaGroupApplyCategory, // set or change category for a group and reassign pp-params according to category settings
eaGroupMerge, // merge groups
eaGroupSetParameter, // set post-process parameter for group
eaGroupSetName, // set group name (rename group)
eaGroupSetDupeKey, // set duplicate key
eaGroupSetDupeScore, // set duplicate score
eaGroupSetDupeMode, // set duplicate mode
eaPostDelete, // cancel post-processing
eaHistoryDelete, // hide history-item
eaHistoryFinalDelete, // delete history-item
eaHistoryReturn, // move history-item back to download queue
eaHistoryProcess, // move history-item back to download queue and start postprocessing
eaHistoryRedownload, // move history-item back to download queue for redownload
eaHistorySetParameter, // set post-process parameter for history-item
eaHistorySetDupeKey, // set duplicate key
eaHistorySetDupeScore, // set duplicate score
eaHistorySetDupeMode, // set duplicate mode
eaHistorySetDupeBackup, // set duplicate backup flag
eaHistoryMarkBad, // mark history-item as bad (and download other duplicate)
eaHistoryMarkGood // mark history-item as good (and push it into dup-history)
};
enum EMatchMode
{
mmID = 1,
mmName,
mmRegEx
};
private:
NZBList m_Queue;
HistoryList m_History;
Mutex m_LockMutex;
static DownloadQueue* g_pDownloadQueue;
static bool g_bLoaded;
protected:
NZBInfoList m_NZBInfoList;
FileQueue m_FileQueue;
PostQueue m_PostQueue;
HistoryList m_HistoryList;
FileQueue m_ParkedFiles;
UrlQueue m_UrlQueue;
DownloadQueue() : m_Queue(true) {}
static void Init(DownloadQueue* pGlobalInstance) { g_pDownloadQueue = pGlobalInstance; }
static void Final() { g_pDownloadQueue = NULL; }
static void Loaded() { g_bLoaded = true; }
public:
NZBInfoList* GetNZBInfoList() { return &m_NZBInfoList; }
FileQueue* GetFileQueue() { return &m_FileQueue; }
PostQueue* GetPostQueue() { return &m_PostQueue; }
HistoryList* GetHistoryList() { return &m_HistoryList; }
FileQueue* GetParkedFiles() { return &m_ParkedFiles; }
UrlQueue* GetUrlQueue() { return &m_UrlQueue; }
void BuildGroups(GroupQueue* pGroupQueue);
};
class DownloadQueueHolder
{
public:
virtual ~DownloadQueueHolder() {};
virtual DownloadQueue* LockQueue() = 0;
virtual void UnlockQueue() = 0;
virtual ~DownloadQueue() {}
static bool IsLoaded() { return g_bLoaded; }
static DownloadQueue* Lock();
static void Unlock();
NZBList* GetQueue() { return &m_Queue; }
HistoryList* GetHistory() { return &m_History; }
virtual bool EditEntry(int ID, EEditAction eAction, int iOffset, const char* szText) = 0;
virtual bool EditList(IDList* pIDList, NameList* pNameList, EMatchMode eMatchMode, EEditAction eAction, int iOffset, const char* szText) = 0;
virtual void Save() = 0;
void CalcRemainingSize(long long* pRemaining, long long* pRemainingForced);
};
#endif

View File

@@ -1,7 +1,7 @@
/*
* This file is part of nzbget
*
* Copyright (C) 2007-2013 Andrey Prygunkov <hugbug@users.sourceforge.net>
* Copyright (C) 2007-2014 Andrey Prygunkov <hugbug@users.sourceforge.net>
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
@@ -46,28 +46,12 @@
#include "Options.h"
#include "Log.h"
#include "Util.h"
#include "DiskState.h"
#include "NZBFile.h"
#include "QueueCoordinator.h"
#include "HistoryCoordinator.h"
#include "DupeCoordinator.h"
extern QueueCoordinator* g_pQueueCoordinator;
extern HistoryCoordinator* g_pHistoryCoordinator;
extern Options* g_pOptions;
extern DiskState* g_pDiskState;
bool DupeCoordinator::IsDupeSuccess(NZBInfo* pNZBInfo)
{
bool bFailure =
pNZBInfo->GetDeleteStatus() != NZBInfo::dsNone ||
pNZBInfo->GetMarkStatus() == NZBInfo::ksBad ||
pNZBInfo->GetParStatus() == NZBInfo::psFailure ||
pNZBInfo->GetUnpackStatus() == NZBInfo::usFailure ||
pNZBInfo->GetUnpackStatus() == NZBInfo::usPassword ||
(pNZBInfo->GetParStatus() == NZBInfo::psSkipped &&
pNZBInfo->GetUnpackStatus() == NZBInfo::usSkipped &&
pNZBInfo->CalcHealth() < pNZBInfo->CalcCriticalHealth());
return !bFailure;
}
bool DupeCoordinator::SameNameOrKey(const char* szName1, const char* szDupeKey1,
const char* szName2, const char* szDupeKey2)
@@ -95,65 +79,79 @@ void DupeCoordinator::NZBFound(DownloadQueue* pDownloadQueue, NZBInfo* pNZBInfo)
{
debug("Checking duplicates for %s", pNZBInfo->GetName());
GroupQueue groupQueue;
pDownloadQueue->BuildGroups(&groupQueue);
// find duplicates in download queue with exactly same content
for (GroupQueue::iterator it = groupQueue.begin(); it != groupQueue.end(); it++)
for (NZBList::iterator it = pDownloadQueue->GetQueue()->begin(); it != pDownloadQueue->GetQueue()->end(); it++)
{
GroupInfo* pGroupInfo = *it;
NZBInfo* pGroupNZBInfo = pGroupInfo->GetNZBInfo();
NZBInfo* pQueuedNZBInfo = *it;
bool bSameContent = (pNZBInfo->GetFullContentHash() > 0 &&
pNZBInfo->GetFullContentHash() == pGroupNZBInfo->GetFullContentHash()) ||
pNZBInfo->GetFullContentHash() == pQueuedNZBInfo->GetFullContentHash()) ||
(pNZBInfo->GetFilteredContentHash() > 0 &&
pNZBInfo->GetFilteredContentHash() == pGroupNZBInfo->GetFilteredContentHash());
pNZBInfo->GetFilteredContentHash() == pQueuedNZBInfo->GetFilteredContentHash());
// if there is a duplicate with exactly same content (via hash-check)
// in queue - the new item is skipped
if (pGroupNZBInfo != pNZBInfo && bSameContent)
if (pQueuedNZBInfo != pNZBInfo && bSameContent && pNZBInfo->GetKind() == NZBInfo::nkNzb)
{
if (!strcmp(pNZBInfo->GetName(), pGroupNZBInfo->GetName()))
if (!strcmp(pNZBInfo->GetName(), pQueuedNZBInfo->GetName()))
{
warn("Skipping duplicate %s, already queued", pNZBInfo->GetName());
}
else
{
warn("Skipping duplicate %s, already queued as %s",
pNZBInfo->GetName(), pGroupNZBInfo->GetName());
pNZBInfo->GetName(), pQueuedNZBInfo->GetName());
}
// Flag saying QueueCoordinator to skip nzb-file
pNZBInfo->SetDeleteStatus(NZBInfo::dsManual);
DeleteQueuedFile(pNZBInfo->GetQueuedFilename());
g_pHistoryCoordinator->DeleteDiskFiles(pNZBInfo);
return;
}
}
// find duplicates in post queue with exactly same content
for (PostQueue::iterator it = pDownloadQueue->GetPostQueue()->begin(); it != pDownloadQueue->GetPostQueue()->end(); it++)
// if download has empty dupekey and empty dupescore - check if download queue
// or history have an item with the same name and non empty dupekey or dupescore and
// take these properties from this item
if (Util::EmptyStr(pNZBInfo->GetDupeKey()) && pNZBInfo->GetDupeScore() == 0)
{
PostInfo* pPostInfo = *it;
bool bSameContent = (pNZBInfo->GetFullContentHash() > 0 &&
pNZBInfo->GetFullContentHash() == pPostInfo->GetNZBInfo()->GetFullContentHash()) ||
(pNZBInfo->GetFilteredContentHash() > 0 &&
pNZBInfo->GetFilteredContentHash() == pPostInfo->GetNZBInfo()->GetFilteredContentHash());
// if there is a duplicate with exactly same content (via hash-check)
// in queue - the new item is skipped;
if (bSameContent)
for (NZBList::iterator it = pDownloadQueue->GetQueue()->begin(); it != pDownloadQueue->GetQueue()->end(); it++)
{
if (!strcmp(pNZBInfo->GetName(), pPostInfo->GetNZBInfo()->GetName()))
NZBInfo* pQueuedNZBInfo = *it;
if (!strcmp(pQueuedNZBInfo->GetName(), pNZBInfo->GetName()) &&
(!Util::EmptyStr(pQueuedNZBInfo->GetDupeKey()) || pQueuedNZBInfo->GetDupeScore() != 0))
{
warn("Skipping duplicate %s, already queued", pNZBInfo->GetName());
pNZBInfo->SetDupeKey(pQueuedNZBInfo->GetDupeKey());
pNZBInfo->SetDupeScore(pQueuedNZBInfo->GetDupeScore());
info("Assigning dupekey %s and dupescore %i to %s from existing queue item with the same name",
pNZBInfo->GetDupeKey(), pNZBInfo->GetDupeScore(), pNZBInfo->GetName());
break;
}
else
}
}
if (Util::EmptyStr(pNZBInfo->GetDupeKey()) && pNZBInfo->GetDupeScore() == 0)
{
for (HistoryList::iterator it = pDownloadQueue->GetHistory()->begin(); it != pDownloadQueue->GetHistory()->end(); it++)
{
HistoryInfo* pHistoryInfo = *it;
if (pHistoryInfo->GetKind() == HistoryInfo::hkNzb &&
!strcmp(pHistoryInfo->GetNZBInfo()->GetName(), pNZBInfo->GetName()) &&
(!Util::EmptyStr(pHistoryInfo->GetNZBInfo()->GetDupeKey()) || pHistoryInfo->GetNZBInfo()->GetDupeScore() != 0))
{
warn("Skipping duplicate %s, already queued as %s",
pNZBInfo->GetName(), pPostInfo->GetNZBInfo()->GetName());
pNZBInfo->SetDupeKey(pHistoryInfo->GetNZBInfo()->GetDupeKey());
pNZBInfo->SetDupeScore(pHistoryInfo->GetNZBInfo()->GetDupeScore());
info("Assigning dupekey %s and dupescore %i to %s from existing history item with the same name",
pNZBInfo->GetDupeKey(), pNZBInfo->GetDupeScore(), pNZBInfo->GetName());
break;
}
if (pHistoryInfo->GetKind() == HistoryInfo::hkDup &&
!strcmp(pHistoryInfo->GetDupInfo()->GetName(), pNZBInfo->GetName()) &&
(!Util::EmptyStr(pHistoryInfo->GetDupInfo()->GetDupeKey()) || pHistoryInfo->GetDupInfo()->GetDupeScore() != 0))
{
pNZBInfo->SetDupeKey(pHistoryInfo->GetDupInfo()->GetDupeKey());
pNZBInfo->SetDupeScore(pHistoryInfo->GetDupInfo()->GetDupeScore());
info("Assigning dupekey %s and dupescore %i to %s from existing history item with the same name",
pNZBInfo->GetDupeKey(), pNZBInfo->GetDupeScore(), pNZBInfo->GetName());
break;
}
// Flag saying QueueCoordinator to skip nzb-file
pNZBInfo->SetDeleteStatus(NZBInfo::dsManual);
DeleteQueuedFile(pNZBInfo->GetQueuedFilename());
return;
}
}
@@ -167,11 +165,11 @@ void DupeCoordinator::NZBFound(DownloadQueue* pDownloadQueue, NZBInfo* pNZBInfo)
// find duplicates in queue having exactly same content
// also: nzb-files having duplicates marked as good are skipped
// also (only in score mode): nzb-files having success-duplicates in dup-history but don't having duplicates in recent history are skipped
for (HistoryList::iterator it = pDownloadQueue->GetHistoryList()->begin(); it != pDownloadQueue->GetHistoryList()->end(); it++)
for (HistoryList::iterator it = pDownloadQueue->GetHistory()->begin(); it != pDownloadQueue->GetHistory()->end(); it++)
{
HistoryInfo* pHistoryInfo = *it;
if (pHistoryInfo->GetKind() == HistoryInfo::hkNZBInfo &&
if (pHistoryInfo->GetKind() == HistoryInfo::hkNzb &&
((pNZBInfo->GetFullContentHash() > 0 &&
pNZBInfo->GetFullContentHash() == pHistoryInfo->GetNZBInfo()->GetFullContentHash()) ||
(pNZBInfo->GetFilteredContentHash() > 0 &&
@@ -183,7 +181,7 @@ void DupeCoordinator::NZBFound(DownloadQueue* pDownloadQueue, NZBInfo* pNZBInfo)
break;
}
if (pHistoryInfo->GetKind() == HistoryInfo::hkDupInfo &&
if (pHistoryInfo->GetKind() == HistoryInfo::hkDup &&
((pNZBInfo->GetFullContentHash() > 0 &&
pNZBInfo->GetFullContentHash() == pHistoryInfo->GetDupInfo()->GetFullContentHash()) ||
(pNZBInfo->GetFilteredContentHash() > 0 &&
@@ -195,7 +193,7 @@ void DupeCoordinator::NZBFound(DownloadQueue* pDownloadQueue, NZBInfo* pNZBInfo)
break;
}
if (pHistoryInfo->GetKind() == HistoryInfo::hkNZBInfo &&
if (pHistoryInfo->GetKind() == HistoryInfo::hkNzb &&
pHistoryInfo->GetNZBInfo()->GetDupeMode() != dmForce &&
pHistoryInfo->GetNZBInfo()->GetMarkStatus() == NZBInfo::ksGood &&
SameNameOrKey(pHistoryInfo->GetNZBInfo()->GetName(), pHistoryInfo->GetNZBInfo()->GetDupeKey(),
@@ -207,7 +205,7 @@ void DupeCoordinator::NZBFound(DownloadQueue* pDownloadQueue, NZBInfo* pNZBInfo)
break;
}
if (pHistoryInfo->GetKind() == HistoryInfo::hkDupInfo &&
if (pHistoryInfo->GetKind() == HistoryInfo::hkDup &&
pHistoryInfo->GetDupInfo()->GetDupeMode() != dmForce &&
(pHistoryInfo->GetDupInfo()->GetStatus() == DupInfo::dsGood ||
(pNZBInfo->GetDupeMode() == dmScore &&
@@ -226,15 +224,15 @@ void DupeCoordinator::NZBFound(DownloadQueue* pDownloadQueue, NZBInfo* pNZBInfo)
if (!bSameContent && !bGood && pNZBInfo->GetDupeMode() == dmScore)
{
// nzb-files having success-duplicates in recent history (with different content) are added to history for backup
for (HistoryList::iterator it = pDownloadQueue->GetHistoryList()->begin(); it != pDownloadQueue->GetHistoryList()->end(); it++)
for (HistoryList::iterator it = pDownloadQueue->GetHistory()->begin(); it != pDownloadQueue->GetHistory()->end(); it++)
{
HistoryInfo* pHistoryInfo = *it;
if (pHistoryInfo->GetKind() == HistoryInfo::hkNZBInfo &&
if (pHistoryInfo->GetKind() == HistoryInfo::hkNzb &&
pHistoryInfo->GetNZBInfo()->GetDupeMode() != dmForce &&
SameNameOrKey(pHistoryInfo->GetNZBInfo()->GetName(), pHistoryInfo->GetNZBInfo()->GetDupeKey(),
pNZBInfo->GetName(), pNZBInfo->GetDupeKey()) &&
pNZBInfo->GetDupeScore() <= pHistoryInfo->GetNZBInfo()->GetDupeScore() &&
IsDupeSuccess(pHistoryInfo->GetNZBInfo()))
pHistoryInfo->GetNZBInfo()->IsDupeSuccess())
{
// Flag saying QueueCoordinator to skip nzb-file
pNZBInfo->SetDeleteStatus(NZBInfo::dsDupe);
@@ -260,7 +258,7 @@ void DupeCoordinator::NZBFound(DownloadQueue* pDownloadQueue, NZBInfo* pNZBInfo)
// Flag saying QueueCoordinator to skip nzb-file
pNZBInfo->SetDeleteStatus(NZBInfo::dsManual);
DeleteQueuedFile(pNZBInfo->GetQueuedFilename());
g_pHistoryCoordinator->DeleteDiskFiles(pNZBInfo);
return;
}
@@ -269,82 +267,53 @@ void DupeCoordinator::NZBFound(DownloadQueue* pDownloadQueue, NZBInfo* pNZBInfo)
if (pNZBInfo->GetDupeMode() == dmScore)
{
// find duplicates in download queue
for (GroupQueue::iterator it = groupQueue.begin(); it != groupQueue.end(); it++)
int index = 0;
for (NZBList::iterator it = pDownloadQueue->GetQueue()->begin(); it != pDownloadQueue->GetQueue()->end(); index++)
{
GroupInfo* pGroupInfo = *it;
NZBInfo* pGroupNZBInfo = pGroupInfo->GetNZBInfo();
if (pGroupNZBInfo != pNZBInfo &&
pGroupNZBInfo->GetDupeMode() != dmForce &&
SameNameOrKey(pGroupNZBInfo->GetName(), pGroupNZBInfo->GetDupeKey(),
NZBInfo* pQueuedNZBInfo = *it++;
if (pQueuedNZBInfo != pNZBInfo &&
pQueuedNZBInfo->GetKind() == NZBInfo::nkNzb &&
pQueuedNZBInfo->GetDupeMode() != dmForce &&
SameNameOrKey(pQueuedNZBInfo->GetName(), pQueuedNZBInfo->GetDupeKey(),
pNZBInfo->GetName(), pNZBInfo->GetDupeKey()))
{
// if queue has a duplicate with the same or higher score - the new item
// is moved to history as dupe-backup
if (pNZBInfo->GetDupeScore() <= pGroupNZBInfo->GetDupeScore())
if (pNZBInfo->GetDupeScore() <= pQueuedNZBInfo->GetDupeScore())
{
// Flag saying QueueCoordinator to skip nzb-file
pNZBInfo->SetDeleteStatus(NZBInfo::dsDupe);
info("Collection %s is duplicate to %s", pNZBInfo->GetName(), pGroupNZBInfo->GetName());
info("Collection %s is duplicate to %s", pNZBInfo->GetName(), pQueuedNZBInfo->GetName());
return;
}
// if queue has a duplicate with lower score - the existing item is moved
// to history as dupe-backup (unless it is in post-processing stage) and
// the new item is added to queue
else
// the new item is added to queue (unless it is in post-processing stage)
if (!pQueuedNZBInfo->GetPostInfo())
{
// unless it is in post-processing stage
bool bPostProcess = false;
for (PostQueue::iterator it = pDownloadQueue->GetPostQueue()->begin(); it != pDownloadQueue->GetPostQueue()->end(); it++)
{
PostInfo* pPostInfo = *it;
if (pPostInfo->GetNZBInfo() == pGroupNZBInfo)
{
bPostProcess = true;
break;
}
}
if (!bPostProcess)
{
// the existing queue item is moved to history as dupe-backup
info("Moving collection %s with lower duplicate score to history", pGroupNZBInfo->GetName());
pGroupNZBInfo->SetDeleteStatus(NZBInfo::dsDupe);
g_pQueueCoordinator->GetQueueEditor()->LockedEditEntry(pDownloadQueue, pGroupInfo->GetLastID(), false, QueueEditor::eaGroupDelete, 0, NULL);
}
// the existing queue item is moved to history as dupe-backup
info("Moving collection %s with lower duplicate score to history", pQueuedNZBInfo->GetName());
pQueuedNZBInfo->SetDeleteStatus(NZBInfo::dsDupe);
pDownloadQueue->EditEntry(pQueuedNZBInfo->GetID(),
DownloadQueue::eaGroupDelete, 0, NULL);
it = pDownloadQueue->GetQueue()->begin() + index;
}
}
}
// find duplicates in post queue
for (PostQueue::iterator it = pDownloadQueue->GetPostQueue()->begin(); it != pDownloadQueue->GetPostQueue()->end(); it++)
{
PostInfo* pPostInfo = *it;
// if queue has a duplicate with the same or higher score - the new item
// is moved to history as dupe-backup;
if (pPostInfo->GetNZBInfo()->GetDupeMode() != dmForce &&
pNZBInfo->GetDupeScore() <= pPostInfo->GetNZBInfo()->GetDupeScore() &&
SameNameOrKey(pPostInfo->GetNZBInfo()->GetName(), pPostInfo->GetNZBInfo()->GetDupeKey(),
pNZBInfo->GetName(), pNZBInfo->GetDupeKey()))
{
// Flag saying QueueCoordinator to skip nzb-file
pNZBInfo->SetDeleteStatus(NZBInfo::dsDupe);
info("Collection %s is duplicate to %s", pNZBInfo->GetName(), pPostInfo->GetNZBInfo()->GetName());
return;
}
}
}
}
/**
- if download of an item fails and there are duplicates in history -
return the best duplicate from historyto queue for download;
return the best duplicate from history to queue for download;
- if download of an item completes successfully - nothing extra needs to be done;
*/
void DupeCoordinator::NZBCompleted(DownloadQueue* pDownloadQueue, NZBInfo* pNZBInfo)
{
debug("Processing duplicates for %s", pNZBInfo->GetName());
if (pNZBInfo->GetDupeMode() == dmScore && !IsDupeSuccess(pNZBInfo))
if (pNZBInfo->GetDupeMode() == dmScore && !pNZBInfo->IsDupeSuccess())
{
ReturnBestDupe(pDownloadQueue, pNZBInfo, pNZBInfo->GetName(), pNZBInfo->GetDupeKey());
}
@@ -358,14 +327,14 @@ void DupeCoordinator::ReturnBestDupe(DownloadQueue* pDownloadQueue, NZBInfo* pNZ
// check if history (recent or dup) has other success-duplicates or good-duplicates
bool bHistoryDupe = false;
int iHistoryScore = 0;
for (HistoryList::iterator it = pDownloadQueue->GetHistoryList()->begin(); it != pDownloadQueue->GetHistoryList()->end(); it++)
for (HistoryList::iterator it = pDownloadQueue->GetHistory()->begin(); it != pDownloadQueue->GetHistory()->end(); it++)
{
HistoryInfo* pHistoryInfo = *it;
bool bGoodDupe = false;
if (pHistoryInfo->GetKind() == HistoryInfo::hkNZBInfo &&
if (pHistoryInfo->GetKind() == HistoryInfo::hkNzb &&
pHistoryInfo->GetNZBInfo()->GetDupeMode() != dmForce &&
(IsDupeSuccess(pHistoryInfo->GetNZBInfo()) ||
(pHistoryInfo->GetNZBInfo()->IsDupeSuccess() ||
pHistoryInfo->GetNZBInfo()->GetMarkStatus() == NZBInfo::ksGood) &&
SameNameOrKey(pHistoryInfo->GetNZBInfo()->GetName(), pHistoryInfo->GetNZBInfo()->GetDupeKey(), szNZBName, szDupeKey))
{
@@ -377,7 +346,7 @@ void DupeCoordinator::ReturnBestDupe(DownloadQueue* pDownloadQueue, NZBInfo* pNZ
bGoodDupe = pHistoryInfo->GetNZBInfo()->GetMarkStatus() == NZBInfo::ksGood;
}
if (pHistoryInfo->GetKind() == HistoryInfo::hkDupInfo &&
if (pHistoryInfo->GetKind() == HistoryInfo::hkDup &&
pHistoryInfo->GetDupInfo()->GetDupeMode() != dmForce &&
(pHistoryInfo->GetDupInfo()->GetStatus() == DupInfo::dsSuccess ||
pHistoryInfo->GetDupInfo()->GetStatus() == DupInfo::dsGood) &&
@@ -398,37 +367,19 @@ void DupeCoordinator::ReturnBestDupe(DownloadQueue* pDownloadQueue, NZBInfo* pNZ
}
}
// check if duplicates exist in post-processing queue
bool bPostDupe = false;
int iPostScore = 0;
for (PostQueue::iterator it = pDownloadQueue->GetPostQueue()->begin(); it != pDownloadQueue->GetPostQueue()->end(); it++)
{
PostInfo* pPostInfo = *it;
if (pPostInfo->GetNZBInfo() != pNZBInfo &&
pPostInfo->GetNZBInfo()->GetDupeMode() != dmForce &&
SameNameOrKey(pPostInfo->GetNZBInfo()->GetName(), pPostInfo->GetNZBInfo()->GetDupeKey(), szNZBName, szDupeKey) &&
(!bPostDupe || pPostInfo->GetNZBInfo()->GetDupeScore() > iPostScore))
{
iPostScore = pPostInfo->GetNZBInfo()->GetDupeScore();
bPostDupe = true;
}
}
// check if duplicates exist in download queue
GroupQueue groupQueue;
pDownloadQueue->BuildGroups(&groupQueue);
bool bQueueDupe = false;
int iQueueScore = 0;
for (GroupQueue::iterator it = groupQueue.begin(); it != groupQueue.end(); it++)
for (NZBList::iterator it = pDownloadQueue->GetQueue()->begin(); it != pDownloadQueue->GetQueue()->end(); it++)
{
GroupInfo* pGroupInfo = *it;
NZBInfo* pGroupNZBInfo = pGroupInfo->GetNZBInfo();
if (pGroupNZBInfo != pNZBInfo &&
pGroupNZBInfo->GetDupeMode() != dmForce &&
SameNameOrKey(pGroupNZBInfo->GetName(), pGroupNZBInfo->GetDupeKey(), szNZBName, szDupeKey) &&
(!bQueueDupe || pGroupNZBInfo->GetDupeScore() > iQueueScore))
NZBInfo* pQueuedNZBInfo = *it;
if (pQueuedNZBInfo != pNZBInfo &&
pQueuedNZBInfo->GetKind() == NZBInfo::nkNzb &&
pQueuedNZBInfo->GetDupeMode() != dmForce &&
SameNameOrKey(pQueuedNZBInfo->GetName(), pQueuedNZBInfo->GetDupeKey(), szNZBName, szDupeKey) &&
(!bQueueDupe || pQueuedNZBInfo->GetDupeScore() > iQueueScore))
{
iQueueScore = pGroupNZBInfo->GetDupeScore();
iQueueScore = pQueuedNZBInfo->GetDupeScore();
bQueueDupe = true;
}
}
@@ -436,16 +387,15 @@ void DupeCoordinator::ReturnBestDupe(DownloadQueue* pDownloadQueue, NZBInfo* pNZ
// find dupe-backup with highest score, whose score is also higher than other
// success-duplicates and higher than already queued items
HistoryInfo* pHistoryDupe = NULL;
for (HistoryList::iterator it = pDownloadQueue->GetHistoryList()->begin(); it != pDownloadQueue->GetHistoryList()->end(); it++)
for (HistoryList::iterator it = pDownloadQueue->GetHistory()->begin(); it != pDownloadQueue->GetHistory()->end(); it++)
{
HistoryInfo* pHistoryInfo = *it;
if (pHistoryInfo->GetKind() == HistoryInfo::hkNZBInfo &&
if (pHistoryInfo->GetKind() == HistoryInfo::hkNzb &&
pHistoryInfo->GetNZBInfo()->GetDupeMode() != dmForce &&
pHistoryInfo->GetNZBInfo()->GetDeleteStatus() == NZBInfo::dsDupe &&
pHistoryInfo->GetNZBInfo()->CalcHealth() >= pHistoryInfo->GetNZBInfo()->CalcCriticalHealth() &&
pHistoryInfo->GetNZBInfo()->CalcHealth() >= pHistoryInfo->GetNZBInfo()->CalcCriticalHealth(true) &&
pHistoryInfo->GetNZBInfo()->GetMarkStatus() != NZBInfo::ksBad &&
(!bHistoryDupe || pHistoryInfo->GetNZBInfo()->GetDupeScore() > iHistoryScore) &&
(!bPostDupe || pHistoryInfo->GetNZBInfo()->GetDupeScore() > iPostScore) &&
(!bQueueDupe || pHistoryInfo->GetNZBInfo()->GetDupeScore() > iQueueScore) &&
(!pHistoryDupe || pHistoryInfo->GetNZBInfo()->GetDupeScore() > pHistoryDupe->GetNZBInfo()->GetDupeScore()) &&
SameNameOrKey(pHistoryInfo->GetNZBInfo()->GetName(), pHistoryInfo->GetNZBInfo()->GetDupeKey(), szNZBName, szDupeKey))
@@ -458,7 +408,7 @@ void DupeCoordinator::ReturnBestDupe(DownloadQueue* pDownloadQueue, NZBInfo* pNZ
if (pHistoryDupe)
{
info("Found duplicate %s for %s", pHistoryDupe->GetNZBInfo()->GetName(), szNZBName);
HistoryRedownload(pDownloadQueue, pHistoryDupe);
g_pHistoryCoordinator->Redownload(pDownloadQueue, pHistoryDupe);
}
}
@@ -469,11 +419,11 @@ void DupeCoordinator::HistoryMark(DownloadQueue* pDownloadQueue, HistoryInfo* pH
info("Marking %s as %s", szNZBName, (bGood ? "good" : "bad"));
if (pHistoryInfo->GetKind() == HistoryInfo::hkNZBInfo)
if (pHistoryInfo->GetKind() == HistoryInfo::hkNzb)
{
pHistoryInfo->GetNZBInfo()->SetMarkStatus(bGood ? NZBInfo::ksGood : NZBInfo::ksBad);
}
else if (pHistoryInfo->GetKind() == HistoryInfo::hkDupInfo)
else if (pHistoryInfo->GetKind() == HistoryInfo::hkDup)
{
pHistoryInfo->GetDupInfo()->SetStatus(bGood ? DupInfo::dsGood : DupInfo::dsBad);
}
@@ -484,9 +434,9 @@ void DupeCoordinator::HistoryMark(DownloadQueue* pDownloadQueue, HistoryInfo* pH
}
if (!g_pOptions->GetDupeCheck() ||
(pHistoryInfo->GetKind() == HistoryInfo::hkNZBInfo &&
(pHistoryInfo->GetKind() == HistoryInfo::hkNzb &&
pHistoryInfo->GetNZBInfo()->GetDupeMode() == dmForce) ||
(pHistoryInfo->GetKind() == HistoryInfo::hkDupInfo &&
(pHistoryInfo->GetKind() == HistoryInfo::hkDup &&
pHistoryInfo->GetDupInfo()->GetDupeMode() == dmForce))
{
return;
@@ -501,8 +451,8 @@ void DupeCoordinator::HistoryMark(DownloadQueue* pDownloadQueue, HistoryInfo* pH
else
{
// mark as bad
const char* szDupeKey = pHistoryInfo->GetKind() == HistoryInfo::hkNZBInfo ? pHistoryInfo->GetNZBInfo()->GetDupeKey() :
pHistoryInfo->GetKind() == HistoryInfo::hkDupInfo ? pHistoryInfo->GetDupInfo()->GetDupeKey() :
const char* szDupeKey = pHistoryInfo->GetKind() == HistoryInfo::hkNzb ? pHistoryInfo->GetNZBInfo()->GetDupeKey() :
pHistoryInfo->GetKind() == HistoryInfo::hkDup ? pHistoryInfo->GetDupInfo()->GetDupeKey() :
NULL;
ReturnBestDupe(pDownloadQueue, NULL, szNZBName, szDupeKey);
}
@@ -510,30 +460,30 @@ void DupeCoordinator::HistoryMark(DownloadQueue* pDownloadQueue, HistoryInfo* pH
void DupeCoordinator::HistoryCleanup(DownloadQueue* pDownloadQueue, HistoryInfo* pMarkHistoryInfo)
{
const char* szDupeKey = pMarkHistoryInfo->GetKind() == HistoryInfo::hkNZBInfo ? pMarkHistoryInfo->GetNZBInfo()->GetDupeKey() :
pMarkHistoryInfo->GetKind() == HistoryInfo::hkDupInfo ? pMarkHistoryInfo->GetDupInfo()->GetDupeKey() :
const char* szDupeKey = pMarkHistoryInfo->GetKind() == HistoryInfo::hkNzb ? pMarkHistoryInfo->GetNZBInfo()->GetDupeKey() :
pMarkHistoryInfo->GetKind() == HistoryInfo::hkDup ? pMarkHistoryInfo->GetDupInfo()->GetDupeKey() :
NULL;
const char* szNZBName = pMarkHistoryInfo->GetKind() == HistoryInfo::hkNZBInfo ? pMarkHistoryInfo->GetNZBInfo()->GetName() :
pMarkHistoryInfo->GetKind() == HistoryInfo::hkDupInfo ? pMarkHistoryInfo->GetDupInfo()->GetName() :
const char* szNZBName = pMarkHistoryInfo->GetKind() == HistoryInfo::hkNzb ? pMarkHistoryInfo->GetNZBInfo()->GetName() :
pMarkHistoryInfo->GetKind() == HistoryInfo::hkDup ? pMarkHistoryInfo->GetDupInfo()->GetName() :
NULL;
bool bChanged = false;
int index = 0;
// traversing in a reverse order to delete items in order they were added to history
// (just to produce the log-messages in a more logical order)
for (HistoryList::reverse_iterator it = pDownloadQueue->GetHistoryList()->rbegin(); it != pDownloadQueue->GetHistoryList()->rend(); )
for (HistoryList::reverse_iterator it = pDownloadQueue->GetHistory()->rbegin(); it != pDownloadQueue->GetHistory()->rend(); )
{
HistoryInfo* pHistoryInfo = *it;
if (pHistoryInfo->GetKind() == HistoryInfo::hkNZBInfo &&
if (pHistoryInfo->GetKind() == HistoryInfo::hkNzb &&
pHistoryInfo->GetNZBInfo()->GetDupeMode() != dmForce &&
pHistoryInfo->GetNZBInfo()->GetDeleteStatus() == NZBInfo::dsDupe &&
pHistoryInfo != pMarkHistoryInfo &&
SameNameOrKey(pHistoryInfo->GetNZBInfo()->GetName(), pHistoryInfo->GetNZBInfo()->GetDupeKey(), szNZBName, szDupeKey))
{
HistoryTransformToDup(pDownloadQueue, pHistoryInfo, index);
g_pHistoryCoordinator->HistoryHide(pDownloadQueue, pHistoryInfo, index);
index++;
it = pDownloadQueue->GetHistoryList()->rbegin() + index;
it = pDownloadQueue->GetHistory()->rbegin() + index;
bChanged = true;
}
else
@@ -543,41 +493,72 @@ void DupeCoordinator::HistoryCleanup(DownloadQueue* pDownloadQueue, HistoryInfo*
}
}
if (bChanged && g_pOptions->GetSaveQueue() && g_pOptions->GetServerMode())
if (bChanged)
{
g_pDiskState->SaveDownloadQueue(pDownloadQueue);
pDownloadQueue->Save();
}
}
void DupeCoordinator::HistoryTransformToDup(DownloadQueue* pDownloadQueue, HistoryInfo* pHistoryInfo, int rindex)
DupeCoordinator::EDupeStatus DupeCoordinator::GetDupeStatus(DownloadQueue* pDownloadQueue,
const char* szName, const char* szDupeKey)
{
char szNiceName[1024];
pHistoryInfo->GetName(szNiceName, 1024);
EDupeStatus eStatuses = dsNone;
// replace history element
DupInfo* pDupInfo = new DupInfo();
pDupInfo->SetName(pHistoryInfo->GetNZBInfo()->GetName());
pDupInfo->SetDupeKey(pHistoryInfo->GetNZBInfo()->GetDupeKey());
pDupInfo->SetDupeScore(pHistoryInfo->GetNZBInfo()->GetDupeScore());
pDupInfo->SetDupeMode(pHistoryInfo->GetNZBInfo()->GetDupeMode());
pDupInfo->SetSize(pHistoryInfo->GetNZBInfo()->GetSize());
pDupInfo->SetFullContentHash(pHistoryInfo->GetNZBInfo()->GetFullContentHash());
pDupInfo->SetFilteredContentHash(pHistoryInfo->GetNZBInfo()->GetFilteredContentHash());
// find duplicates in download queue
for (NZBList::iterator it = pDownloadQueue->GetQueue()->begin(); it != pDownloadQueue->GetQueue()->end(); it++)
{
NZBInfo* pNZBInfo = *it;
if (SameNameOrKey(szName, szDupeKey, pNZBInfo->GetName(), pNZBInfo->GetDupeKey()))
{
if (pNZBInfo->GetSuccessArticles() + pNZBInfo->GetFailedArticles() > 0)
{
eStatuses = (EDupeStatus)(eStatuses | dsDownloading);
}
else
{
eStatuses = (EDupeStatus)(eStatuses | dsQueued);
}
}
}
pDupInfo->SetStatus(
pHistoryInfo->GetNZBInfo()->GetMarkStatus() == NZBInfo::ksGood ? DupInfo::dsGood :
pHistoryInfo->GetNZBInfo()->GetMarkStatus() == NZBInfo::ksBad ? DupInfo::dsBad :
pHistoryInfo->GetNZBInfo()->GetDeleteStatus() == NZBInfo::dsDupe ? DupInfo::dsDupe :
pHistoryInfo->GetNZBInfo()->GetDeleteStatus() == NZBInfo::dsManual ? DupInfo::dsDeleted :
IsDupeSuccess(pHistoryInfo->GetNZBInfo()) ? DupInfo::dsSuccess :
DupInfo::dsFailed);
// find duplicates in history
for (HistoryList::iterator it = pDownloadQueue->GetHistory()->begin(); it != pDownloadQueue->GetHistory()->end(); it++)
{
HistoryInfo* pHistoryInfo = *it;
HistoryInfo* pNewHistoryInfo = new HistoryInfo(pDupInfo);
pNewHistoryInfo->SetTime(pHistoryInfo->GetTime());
(*pDownloadQueue->GetHistoryList())[pDownloadQueue->GetHistoryList()->size() - 1 - rindex] = pNewHistoryInfo;
if (pHistoryInfo->GetKind() == HistoryInfo::hkNzb &&
SameNameOrKey(szName, szDupeKey, pHistoryInfo->GetNZBInfo()->GetName(), pHistoryInfo->GetNZBInfo()->GetDupeKey()))
{
const char* szTextStatus = pHistoryInfo->GetNZBInfo()->MakeTextStatus(true);
if (!strncasecmp(szTextStatus, "SUCCESS", 7))
{
eStatuses = (EDupeStatus)(eStatuses | dsSuccess);
}
else if (!strncasecmp(szTextStatus, "FAILURE", 7))
{
eStatuses = (EDupeStatus)(eStatuses | dsFailure);
}
else if (!strncasecmp(szTextStatus, "WARNING", 7))
{
eStatuses = (EDupeStatus)(eStatuses | dsWarning);
}
}
DeleteQueuedFile(pHistoryInfo->GetNZBInfo()->GetQueuedFilename());
if (pHistoryInfo->GetKind() == HistoryInfo::hkDup &&
SameNameOrKey(szName, szDupeKey, pHistoryInfo->GetDupInfo()->GetName(), pHistoryInfo->GetDupInfo()->GetDupeKey()))
{
if (pHistoryInfo->GetDupInfo()->GetStatus() == DupInfo::dsSuccess ||
pHistoryInfo->GetDupInfo()->GetStatus() == DupInfo::dsGood)
{
eStatuses = (EDupeStatus)(eStatuses | dsSuccess);
}
else if (pHistoryInfo->GetDupInfo()->GetStatus() == DupInfo::dsFailed ||
pHistoryInfo->GetDupInfo()->GetStatus() == DupInfo::dsBad)
{
eStatuses = (EDupeStatus)(eStatuses | dsFailure);
}
}
}
delete pHistoryInfo;
info("Collection %s removed from history", szNiceName);
return eStatuses;
}

View File

@@ -1,7 +1,7 @@
/*
* This file is part of nzbget
*
* Copyright (C) 2007-2013 Andrey Prygunkov <hugbug@users.sourceforge.net>
* Copyright (C) 2007-2014 Andrey Prygunkov <hugbug@users.sourceforge.net>
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
@@ -26,28 +26,31 @@
#ifndef DUPECOORDINATOR_H
#define DUPECOORDINATOR_H
#include <deque>
#include "DownloadInfo.h"
class DupeCoordinator
{
public:
enum EDupeStatus
{
dsNone = 0,
dsQueued = 1,
dsDownloading = 2,
dsSuccess = 4,
dsWarning = 8,
dsFailure = 16
};
private:
bool IsDupeSuccess(NZBInfo* pNZBInfo);
void ReturnBestDupe(DownloadQueue* pDownloadQueue, NZBInfo* pNZBInfo, const char* szNZBName, const char* szDupeKey);
void HistoryReturnDupe(DownloadQueue* pDownloadQueue, HistoryInfo* pHistoryInfo);
void HistoryCleanup(DownloadQueue* pDownloadQueue, HistoryInfo* pMarkHistoryInfo);
bool SameNameOrKey(const char* szName1, const char* szDupeKey1, const char* szName2, const char* szDupeKey2);
protected:
virtual void HistoryRedownload(DownloadQueue* pDownloadQueue, HistoryInfo* pHistoryInfo) = 0;
virtual void DeleteQueuedFile(const char* szQueuedFile) = 0;
public:
void NZBCompleted(DownloadQueue* pDownloadQueue, NZBInfo* pNZBInfo);
void NZBFound(DownloadQueue* pDownloadQueue, NZBInfo* pNZBInfo);
void HistoryMark(DownloadQueue* pDownloadQueue, HistoryInfo* pHistoryInfo, bool bGood);
void HistoryTransformToDup(DownloadQueue* pDownloadQueue, HistoryInfo* pHistoryInfo, int rindex);
EDupeStatus GetDupeStatus(DownloadQueue* pDownloadQueue, const char* szName, const char* szDupeKey);
};
#endif

View File

@@ -0,0 +1,667 @@
/*
* This file is part of nzbget
*
* Copyright (C) 2007-2014 Andrey Prygunkov <hugbug@users.sourceforge.net>
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*
* $Revision: 951 $
* $Date$
*
*/
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#ifdef WIN32
#include "win32.h"
#endif
#include <stdlib.h>
#include <string.h>
#include <stdio.h>
#ifdef WIN32
#include <direct.h>
#else
#include <unistd.h>
#endif
#include <set>
#include <algorithm>
#include "nzbget.h"
#include "HistoryCoordinator.h"
#include "Options.h"
#include "Log.h"
#include "QueueCoordinator.h"
#include "DiskState.h"
#include "Util.h"
#include "NZBFile.h"
#include "DupeCoordinator.h"
#include "ParCoordinator.h"
#include "PrePostProcessor.h"
#include "DupeCoordinator.h"
extern QueueCoordinator* g_pQueueCoordinator;
extern PrePostProcessor* g_pPrePostProcessor;
extern DupeCoordinator* g_pDupeCoordinator;
extern Options* g_pOptions;
extern DiskState* g_pDiskState;
HistoryCoordinator::HistoryCoordinator()
{
debug("Creating HistoryCoordinator");
}
HistoryCoordinator::~HistoryCoordinator()
{
debug("Destroying HistoryCoordinator");
}
void HistoryCoordinator::Cleanup()
{
debug("Cleaning up HistoryCoordinator");
DownloadQueue* pDownloadQueue = DownloadQueue::Lock();
for (HistoryList::iterator it = pDownloadQueue->GetHistory()->begin(); it != pDownloadQueue->GetHistory()->end(); it++)
{
delete *it;
}
pDownloadQueue->GetHistory()->clear();
DownloadQueue::Unlock();
}
/**
* Removes old entries from (recent) history
*/
void HistoryCoordinator::IntervalCheck()
{
DownloadQueue* pDownloadQueue = DownloadQueue::Lock();
time_t tMinTime = time(NULL) - g_pOptions->GetKeepHistory() * 60*60*24;
bool bChanged = false;
int index = 0;
// traversing in a reverse order to delete items in order they were added to history
// (just to produce the log-messages in a more logical order)
for (HistoryList::reverse_iterator it = pDownloadQueue->GetHistory()->rbegin(); it != pDownloadQueue->GetHistory()->rend(); )
{
HistoryInfo* pHistoryInfo = *it;
if (pHistoryInfo->GetKind() != HistoryInfo::hkDup && pHistoryInfo->GetTime() < tMinTime)
{
if (g_pOptions->GetDupeCheck() && pHistoryInfo->GetKind() == HistoryInfo::hkNzb)
{
// replace history element
HistoryHide(pDownloadQueue, pHistoryInfo, index);
index++;
}
else
{
char szNiceName[1024];
pHistoryInfo->GetName(szNiceName, 1024);
pDownloadQueue->GetHistory()->erase(pDownloadQueue->GetHistory()->end() - 1 - index);
if (pHistoryInfo->GetKind() == HistoryInfo::hkNzb)
{
DeleteDiskFiles(pHistoryInfo->GetNZBInfo());
}
info("Collection %s removed from history", szNiceName);
delete pHistoryInfo;
}
it = pDownloadQueue->GetHistory()->rbegin() + index;
bChanged = true;
}
else
{
it++;
index++;
}
}
if (bChanged)
{
pDownloadQueue->Save();
}
DownloadQueue::Unlock();
}
void HistoryCoordinator::DeleteDiskFiles(NZBInfo* pNZBInfo)
{
if (g_pOptions->GetSaveQueue() && g_pOptions->GetServerMode())
{
// delete parked files
g_pDiskState->DiscardFiles(pNZBInfo);
}
pNZBInfo->GetFileList()->Clear();
// delete nzb-file
if (!g_pOptions->GetNzbCleanupDisk())
{
return;
}
// QueuedFile may contain one filename or several filenames separated
// with "|"-character (for merged groups)
char* szFilename = strdup(pNZBInfo->GetQueuedFilename());
char* szEnd = szFilename - 1;
while (szEnd)
{
char* szName1 = szEnd + 1;
szEnd = strchr(szName1, '|');
if (szEnd) *szEnd = '\0';
if (Util::FileExists(szName1))
{
info("Deleting file %s", szName1);
remove(szName1);
}
}
free(szFilename);
}
void HistoryCoordinator::AddToHistory(DownloadQueue* pDownloadQueue, NZBInfo* pNZBInfo)
{
//remove old item for the same NZB
for (HistoryList::iterator it = pDownloadQueue->GetHistory()->begin(); it != pDownloadQueue->GetHistory()->end(); it++)
{
HistoryInfo* pHistoryInfo = *it;
if (pHistoryInfo->GetNZBInfo() == pNZBInfo)
{
delete pHistoryInfo;
pDownloadQueue->GetHistory()->erase(it);
break;
}
}
HistoryInfo* pHistoryInfo = new HistoryInfo(pNZBInfo);
pHistoryInfo->SetTime(time(NULL));
pDownloadQueue->GetHistory()->push_front(pHistoryInfo);
pDownloadQueue->GetQueue()->Remove(pNZBInfo);
if (pNZBInfo->GetDeleteStatus() == NZBInfo::dsNone)
{
// park files and delete files marked for deletion
int iParkedFiles = 0;
for (FileList::iterator it = pNZBInfo->GetFileList()->begin(); it != pNZBInfo->GetFileList()->end(); )
{
FileInfo* pFileInfo = *it;
if (!pFileInfo->GetDeleted())
{
detail("Parking file %s", pFileInfo->GetFilename());
g_pQueueCoordinator->DiscardDiskFile(pFileInfo);
iParkedFiles++;
it++;
}
else
{
// since we removed pNZBInfo from queue we need to take care of removing file infos marked for deletion
pNZBInfo->GetFileList()->erase(it);
delete pFileInfo;
it = pNZBInfo->GetFileList()->begin() + iParkedFiles;
}
}
pNZBInfo->SetParkedFileCount(iParkedFiles);
}
else
{
pNZBInfo->GetFileList()->Clear();
}
info("Collection %s added to history", pNZBInfo->GetName());
}
void HistoryCoordinator::HistoryHide(DownloadQueue* pDownloadQueue, HistoryInfo* pHistoryInfo, int rindex)
{
char szNiceName[1024];
pHistoryInfo->GetName(szNiceName, 1024);
// replace history element
DupInfo* pDupInfo = new DupInfo();
pDupInfo->SetID(pHistoryInfo->GetNZBInfo()->GetID());
pDupInfo->SetName(pHistoryInfo->GetNZBInfo()->GetName());
pDupInfo->SetDupeKey(pHistoryInfo->GetNZBInfo()->GetDupeKey());
pDupInfo->SetDupeScore(pHistoryInfo->GetNZBInfo()->GetDupeScore());
pDupInfo->SetDupeMode(pHistoryInfo->GetNZBInfo()->GetDupeMode());
pDupInfo->SetSize(pHistoryInfo->GetNZBInfo()->GetSize());
pDupInfo->SetFullContentHash(pHistoryInfo->GetNZBInfo()->GetFullContentHash());
pDupInfo->SetFilteredContentHash(pHistoryInfo->GetNZBInfo()->GetFilteredContentHash());
pDupInfo->SetStatus(
pHistoryInfo->GetNZBInfo()->GetMarkStatus() == NZBInfo::ksGood ? DupInfo::dsGood :
pHistoryInfo->GetNZBInfo()->GetMarkStatus() == NZBInfo::ksBad ? DupInfo::dsBad :
pHistoryInfo->GetNZBInfo()->GetDeleteStatus() == NZBInfo::dsDupe ? DupInfo::dsDupe :
pHistoryInfo->GetNZBInfo()->GetDeleteStatus() == NZBInfo::dsManual ? DupInfo::dsDeleted :
pHistoryInfo->GetNZBInfo()->IsDupeSuccess() ? DupInfo::dsSuccess :
DupInfo::dsFailed);
HistoryInfo* pNewHistoryInfo = new HistoryInfo(pDupInfo);
pNewHistoryInfo->SetTime(pHistoryInfo->GetTime());
(*pDownloadQueue->GetHistory())[pDownloadQueue->GetHistory()->size() - 1 - rindex] = pNewHistoryInfo;
DeleteDiskFiles(pHistoryInfo->GetNZBInfo());
delete pHistoryInfo;
info("Collection %s removed from history", szNiceName);
}
bool HistoryCoordinator::EditList(DownloadQueue* pDownloadQueue, IDList* pIDList, DownloadQueue::EEditAction eAction, int iOffset, const char* szText)
{
bool bOK = false;
for (IDList::iterator itID = pIDList->begin(); itID != pIDList->end(); itID++)
{
int iID = *itID;
for (HistoryList::iterator itHistory = pDownloadQueue->GetHistory()->begin(); itHistory != pDownloadQueue->GetHistory()->end(); itHistory++)
{
HistoryInfo* pHistoryInfo = *itHistory;
if (pHistoryInfo->GetID() == iID)
{
switch (eAction)
{
case DownloadQueue::eaHistoryDelete:
case DownloadQueue::eaHistoryFinalDelete:
HistoryDelete(pDownloadQueue, itHistory, pHistoryInfo, eAction == DownloadQueue::eaHistoryFinalDelete);
break;
case DownloadQueue::eaHistoryReturn:
case DownloadQueue::eaHistoryProcess:
HistoryReturn(pDownloadQueue, itHistory, pHistoryInfo, eAction == DownloadQueue::eaHistoryProcess);
break;
case DownloadQueue::eaHistoryRedownload:
HistoryRedownload(pDownloadQueue, itHistory, pHistoryInfo, false);
break;
case DownloadQueue::eaHistorySetParameter:
HistorySetParameter(pHistoryInfo, szText);
break;
case DownloadQueue::eaHistorySetDupeKey:
case DownloadQueue::eaHistorySetDupeScore:
case DownloadQueue::eaHistorySetDupeMode:
case DownloadQueue::eaHistorySetDupeBackup:
HistorySetDupeParam(pHistoryInfo, eAction, szText);
break;
case DownloadQueue::eaHistoryMarkBad:
case DownloadQueue::eaHistoryMarkGood:
g_pDupeCoordinator->HistoryMark(pDownloadQueue, pHistoryInfo, eAction == DownloadQueue::eaHistoryMarkGood);
break;
default:
// nothing, just to avoid compiler warning
break;
}
bOK = true;
break;
}
}
}
if (bOK)
{
pDownloadQueue->Save();
}
return bOK;
}
void HistoryCoordinator::HistoryDelete(DownloadQueue* pDownloadQueue, HistoryList::iterator itHistory,
HistoryInfo* pHistoryInfo, bool bFinal)
{
char szNiceName[1024];
pHistoryInfo->GetName(szNiceName, 1024);
info("Deleting %s from history", szNiceName);
if (pHistoryInfo->GetKind() == HistoryInfo::hkNzb)
{
DeleteDiskFiles(pHistoryInfo->GetNZBInfo());
}
if (pHistoryInfo->GetKind() == HistoryInfo::hkNzb &&
g_pOptions->GetDeleteCleanupDisk() &&
(pHistoryInfo->GetNZBInfo()->GetDeleteStatus() != NZBInfo::dsNone ||
pHistoryInfo->GetNZBInfo()->GetParStatus() == NZBInfo::psFailure ||
pHistoryInfo->GetNZBInfo()->GetUnpackStatus() == NZBInfo::usFailure ||
pHistoryInfo->GetNZBInfo()->GetUnpackStatus() == NZBInfo::usPassword) &&
Util::DirectoryExists(pHistoryInfo->GetNZBInfo()->GetDestDir()))
{
info("Deleting %s", pHistoryInfo->GetNZBInfo()->GetDestDir());
char szErrBuf[256];
if (!Util::DeleteDirectoryWithContent(pHistoryInfo->GetNZBInfo()->GetDestDir(), szErrBuf, sizeof(szErrBuf)))
{
error("Could not delete directory %s: %s", pHistoryInfo->GetNZBInfo()->GetDestDir(), szErrBuf);
}
}
if (bFinal || !g_pOptions->GetDupeCheck() || pHistoryInfo->GetKind() == HistoryInfo::hkUrl)
{
pDownloadQueue->GetHistory()->erase(itHistory);
delete pHistoryInfo;
}
else
{
if (pHistoryInfo->GetKind() == HistoryInfo::hkNzb)
{
// replace history element
int rindex = pDownloadQueue->GetHistory()->size() - 1 - (itHistory - pDownloadQueue->GetHistory()->begin());
HistoryHide(pDownloadQueue, pHistoryInfo, rindex);
}
}
}
void HistoryCoordinator::HistoryReturn(DownloadQueue* pDownloadQueue, HistoryList::iterator itHistory, HistoryInfo* pHistoryInfo, bool bReprocess)
{
char szNiceName[1024];
pHistoryInfo->GetName(szNiceName, 1024);
debug("Returning %s from history back to download queue", szNiceName);
NZBInfo* pNZBInfo = NULL;
if (bReprocess && pHistoryInfo->GetKind() != HistoryInfo::hkNzb)
{
error("Could not restart postprocessing for %s: history item has wrong type", szNiceName);
return;
}
if (pHistoryInfo->GetKind() == HistoryInfo::hkNzb)
{
pNZBInfo = pHistoryInfo->GetNZBInfo();
// unpark files
bool bUnparked = false;
for (FileList::iterator it = pNZBInfo->GetFileList()->begin(); it != pNZBInfo->GetFileList()->end(); it++)
{
FileInfo* pFileInfo = *it;
detail("Unpark file %s", pFileInfo->GetFilename());
bUnparked = true;
}
if (!(bUnparked || bReprocess))
{
warn("Could not return %s back from history to download queue: history item does not have any files left for download", szNiceName);
return;
}
pDownloadQueue->GetQueue()->push_front(pNZBInfo);
pHistoryInfo->DiscardNZBInfo();
// reset postprocessing status variables
pNZBInfo->SetParCleanup(false);
if (!pNZBInfo->GetUnpackCleanedUpDisk())
{
pNZBInfo->SetUnpackStatus(NZBInfo::usNone);
pNZBInfo->SetCleanupStatus(NZBInfo::csNone);
pNZBInfo->SetRenameStatus(NZBInfo::rsNone);
pNZBInfo->SetPostTotalSec(pNZBInfo->GetPostTotalSec() - pNZBInfo->GetUnpackSec());
pNZBInfo->SetUnpackSec(0);
if (ParCoordinator::FindMainPars(pNZBInfo->GetDestDir(), NULL))
{
pNZBInfo->SetParStatus(NZBInfo::psNone);
pNZBInfo->SetPostTotalSec(pNZBInfo->GetPostTotalSec() - pNZBInfo->GetParSec());
pNZBInfo->SetParSec(0);
pNZBInfo->SetRepairSec(0);
pNZBInfo->SetParFull(false);
}
}
pNZBInfo->SetDeleteStatus(NZBInfo::dsNone);
pNZBInfo->SetDeletePaused(false);
pNZBInfo->SetMarkStatus(NZBInfo::ksNone);
pNZBInfo->GetScriptStatuses()->Clear();
pNZBInfo->SetParkedFileCount(0);
if (pNZBInfo->GetMoveStatus() == NZBInfo::msFailure)
{
pNZBInfo->SetMoveStatus(NZBInfo::msNone);
}
pNZBInfo->SetReprocess(bReprocess);
}
if (pHistoryInfo->GetKind() == HistoryInfo::hkUrl)
{
NZBInfo* pNZBInfo = pHistoryInfo->GetNZBInfo();
pHistoryInfo->DiscardNZBInfo();
pNZBInfo->SetUrlStatus(NZBInfo::lsNone);
pNZBInfo->SetDeleteStatus(NZBInfo::dsNone);
pDownloadQueue->GetQueue()->push_front(pNZBInfo);
}
pDownloadQueue->GetHistory()->erase(itHistory);
// the object "pHistoryInfo" is released few lines later, after the call to "NZBDownloaded"
info("%s returned from history back to download queue", szNiceName);
if (bReprocess)
{
// start postprocessing
debug("Restarting postprocessing for %s", szNiceName);
g_pPrePostProcessor->NZBDownloaded(pDownloadQueue, pNZBInfo);
}
delete pHistoryInfo;
}
void HistoryCoordinator::HistoryRedownload(DownloadQueue* pDownloadQueue, HistoryList::iterator itHistory,
HistoryInfo* pHistoryInfo, bool bRestorePauseState)
{
NZBInfo* pNZBInfo = pHistoryInfo->GetNZBInfo();
bool bPaused = bRestorePauseState && pNZBInfo->GetDeletePaused();
if (!Util::FileExists(pNZBInfo->GetQueuedFilename()))
{
error("Could not return collection %s from history back to queue: could not find source nzb-file %s",
pNZBInfo->GetName(), pNZBInfo->GetQueuedFilename());
return;
}
NZBFile* pNZBFile = NZBFile::Create(pNZBInfo->GetQueuedFilename(), "");
if (pNZBFile == NULL)
{
error("Could not return collection %s from history back to queue: could not parse nzb-file",
pNZBInfo->GetName());
return;
}
info("Returning collection %s from history back to queue", pNZBInfo->GetName());
for (FileList::iterator it = pNZBFile->GetNZBInfo()->GetFileList()->begin(); it != pNZBFile->GetNZBInfo()->GetFileList()->end(); it++)
{
FileInfo* pFileInfo = *it;
pFileInfo->SetPaused(bPaused);
}
if (Util::DirectoryExists(pNZBInfo->GetDestDir()))
{
detail("Deleting %s", pNZBInfo->GetDestDir());
char szErrBuf[256];
if (!Util::DeleteDirectoryWithContent(pNZBInfo->GetDestDir(), szErrBuf, sizeof(szErrBuf)))
{
error("Could not delete directory %s: %s", pNZBInfo->GetDestDir(), szErrBuf);
}
}
pNZBInfo->BuildDestDirName();
if (Util::DirectoryExists(pNZBInfo->GetDestDir()))
{
detail("Deleting %s", pNZBInfo->GetDestDir());
char szErrBuf[256];
if (!Util::DeleteDirectoryWithContent(pNZBInfo->GetDestDir(), szErrBuf, sizeof(szErrBuf)))
{
error("Could not delete directory %s: %s", pNZBInfo->GetDestDir(), szErrBuf);
}
}
g_pDiskState->DiscardFiles(pNZBInfo);
// reset status fields (which are not reset by "HistoryReturn")
pNZBInfo->SetMoveStatus(NZBInfo::msNone);
pNZBInfo->SetUnpackCleanedUpDisk(false);
pNZBInfo->SetParStatus(NZBInfo::psNone);
pNZBInfo->SetRenameStatus(NZBInfo::rsNone);
pNZBInfo->SetDownloadedSize(0);
pNZBInfo->SetDownloadSec(0);
pNZBInfo->SetPostTotalSec(0);
pNZBInfo->SetParSec(0);
pNZBInfo->SetRepairSec(0);
pNZBInfo->SetUnpackSec(0);
pNZBInfo->ClearCompletedFiles();
pNZBInfo->GetServerStats()->Clear();
pNZBInfo->GetCurrentServerStats()->Clear();
pNZBInfo->CopyFileList(pNZBFile->GetNZBInfo());
g_pQueueCoordinator->CheckDupeFileInfos(pNZBInfo);
delete pNZBFile;
HistoryReturn(pDownloadQueue, itHistory, pHistoryInfo, false);
g_pPrePostProcessor->NZBAdded(pDownloadQueue, pNZBInfo);
}
void HistoryCoordinator::HistorySetParameter(HistoryInfo* pHistoryInfo, const char* szText)
{
char szNiceName[1024];
pHistoryInfo->GetName(szNiceName, 1024);
debug("Setting post-process-parameter '%s' for '%s'", szText, szNiceName);
if (!(pHistoryInfo->GetKind() == HistoryInfo::hkNzb || pHistoryInfo->GetKind() == HistoryInfo::hkUrl))
{
error("Could not set post-process-parameter for %s: history item has wrong type", szNiceName);
return;
}
char* szStr = strdup(szText);
char* szValue = strchr(szStr, '=');
if (szValue)
{
*szValue = '\0';
szValue++;
pHistoryInfo->GetNZBInfo()->GetParameters()->SetParameter(szStr, szValue);
}
else
{
error("Could not set post-process-parameter for %s: invalid argument: %s", pHistoryInfo->GetNZBInfo()->GetName(), szText);
}
free(szStr);
}
void HistoryCoordinator::HistorySetDupeParam(HistoryInfo* pHistoryInfo, DownloadQueue::EEditAction eAction, const char* szText)
{
char szNiceName[1024];
pHistoryInfo->GetName(szNiceName, 1024);
debug("Setting dupe-parameter '%i'='%s' for '%s'", (int)eAction, szText, szNiceName);
EDupeMode eMode = dmScore;
if (eAction == DownloadQueue::eaHistorySetDupeMode)
{
if (!strcasecmp(szText, "SCORE"))
{
eMode = dmScore;
}
else if (!strcasecmp(szText, "ALL"))
{
eMode = dmAll;
}
else if (!strcasecmp(szText, "FORCE"))
{
eMode = dmForce;
}
else
{
error("Could not set duplicate mode for %s: incorrect mode (%s)", szNiceName, szText);
return;
}
}
if (pHistoryInfo->GetKind() == HistoryInfo::hkNzb || pHistoryInfo->GetKind() == HistoryInfo::hkUrl)
{
switch (eAction)
{
case DownloadQueue::eaHistorySetDupeKey:
pHistoryInfo->GetNZBInfo()->SetDupeKey(szText);
break;
case DownloadQueue::eaHistorySetDupeScore:
pHistoryInfo->GetNZBInfo()->SetDupeScore(atoi(szText));
break;
case DownloadQueue::eaHistorySetDupeMode:
pHistoryInfo->GetNZBInfo()->SetDupeMode(eMode);
break;
case DownloadQueue::eaHistorySetDupeBackup:
if (pHistoryInfo->GetKind() == HistoryInfo::hkUrl)
{
error("Could not set duplicate parameter for %s: history item has wrong type", szNiceName);
return;
}
else if (pHistoryInfo->GetNZBInfo()->GetDeleteStatus() != NZBInfo::dsDupe &&
pHistoryInfo->GetNZBInfo()->GetDeleteStatus() != NZBInfo::dsManual)
{
error("Could not set duplicate parameter for %s: history item has wrong delete status", szNiceName);
return;
}
pHistoryInfo->GetNZBInfo()->SetDeleteStatus(!strcasecmp(szText, "YES") ||
!strcasecmp(szText, "TRUE") || !strcasecmp(szText, "1") ? NZBInfo::dsDupe : NZBInfo::dsManual);
break;
default:
// suppress compiler warning
break;
}
}
else if (pHistoryInfo->GetKind() == HistoryInfo::hkDup)
{
switch (eAction)
{
case DownloadQueue::eaHistorySetDupeKey:
pHistoryInfo->GetDupInfo()->SetDupeKey(szText);
break;
case DownloadQueue::eaHistorySetDupeScore:
pHistoryInfo->GetDupInfo()->SetDupeScore(atoi(szText));
break;
case DownloadQueue::eaHistorySetDupeMode:
pHistoryInfo->GetDupInfo()->SetDupeMode(eMode);
break;
case DownloadQueue::eaHistorySetDupeBackup:
error("Could not set duplicate parameter for %s: history item has wrong type", szNiceName);
return;
default:
// suppress compiler warning
break;
}
}
}
void HistoryCoordinator::Redownload(DownloadQueue* pDownloadQueue, HistoryInfo* pHistoryInfo)
{
HistoryList::iterator it = std::find(pDownloadQueue->GetHistory()->begin(),
pDownloadQueue->GetHistory()->end(), pHistoryInfo);
HistoryRedownload(pDownloadQueue, it, pHistoryInfo, true);
}

View File

@@ -0,0 +1,54 @@
/*
* This file is part of nzbget
*
* Copyright (C) 2007-2014 Andrey Prygunkov <hugbug@users.sourceforge.net>
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*
* $Revision: 951 $
* $Date$
*
*/
#ifndef HISTORYCOORDINATOR_H
#define HISTORYCOORDINATOR_H
#include "DownloadInfo.h"
class HistoryCoordinator
{
private:
void HistoryDelete(DownloadQueue* pDownloadQueue, HistoryList::iterator itHistory, HistoryInfo* pHistoryInfo, bool bFinal);
void HistoryReturn(DownloadQueue* pDownloadQueue, HistoryList::iterator itHistory, HistoryInfo* pHistoryInfo, bool bReprocess);
void HistoryRedownload(DownloadQueue* pDownloadQueue, HistoryList::iterator itHistory, HistoryInfo* pHistoryInfo, bool bRestorePauseState);
void HistorySetParameter(HistoryInfo* pHistoryInfo, const char* szText);
void HistorySetDupeParam(HistoryInfo* pHistoryInfo, DownloadQueue::EEditAction eAction, const char* szText);
void HistoryTransformToDup(DownloadQueue* pDownloadQueue, HistoryInfo* pHistoryInfo, int rindex);
void SaveQueue(DownloadQueue* pDownloadQueue);
public:
HistoryCoordinator();
virtual ~HistoryCoordinator();
void AddToHistory(DownloadQueue* pDownloadQueue, NZBInfo* pNZBInfo);
bool EditList(DownloadQueue* pDownloadQueue, IDList* pIDList, DownloadQueue::EEditAction eAction, int iOffset, const char* szText);
void DeleteDiskFiles(NZBInfo* pNZBInfo);
void HistoryHide(DownloadQueue* pDownloadQueue, HistoryInfo* pHistoryInfo, int rindex);
void Redownload(DownloadQueue* pDownloadQueue, HistoryInfo* pHistoryInfo);
void IntervalCheck();
void Cleanup();
};
#endif

View File

@@ -2,7 +2,7 @@
* This file is part of nzbget
*
* Copyright (C) 2004 Sven Henkel <sidddy@users.sourceforge.net>
* Copyright (C) 2007-2013 Andrey Prygunkov <hugbug@users.sourceforge.net>
* Copyright (C) 2007-2014 Andrey Prygunkov <hugbug@users.sourceforge.net>
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
@@ -64,7 +64,6 @@ NZBFile::NZBFile(const char* szFileName, const char* szCategory)
m_szFileName = strdup(szFileName);
m_szPassword = NULL;
m_pNZBInfo = new NZBInfo();
m_pNZBInfo->Retain();
m_pNZBInfo->SetFilename(szFileName);
m_pNZBInfo->SetCategory(szCategory);
m_pNZBInfo->BuildDestDirName();
@@ -76,8 +75,6 @@ NZBFile::NZBFile(const char* szFileName, const char* szCategory)
m_szTagContent = NULL;
m_iTagContentLen = 0;
#endif
m_FileInfos.clear();
}
NZBFile::~NZBFile()
@@ -88,31 +85,17 @@ NZBFile::~NZBFile()
free(m_szFileName);
free(m_szPassword);
for (FileInfos::iterator it = m_FileInfos.begin(); it != m_FileInfos.end(); it++)
{
delete *it;
}
m_FileInfos.clear();
if (m_pNZBInfo)
{
m_pNZBInfo->Release();
}
#ifndef WIN32
delete m_pFileInfo;
free(m_szTagContent);
#endif
delete m_pNZBInfo;
}
void NZBFile::LogDebugInfo()
{
debug(" NZBFile %s", m_szFileName);
}
void NZBFile::DetachFileInfos()
{
m_FileInfos.clear();
info(" NZBFile %s", m_szFileName);
}
void NZBFile::AddArticle(FileInfo* pFileInfo, ArticleInfo* pArticleInfo)
@@ -178,7 +161,7 @@ void NZBFile::AddFileInfo(FileInfo* pFileInfo)
lMissedSize += iUncountedArticles * lOneSize;
lSize += lMissedSize;
m_FileInfos.push_back(pFileInfo);
m_pNZBInfo->GetFileList()->push_back(pFileInfo);
pFileInfo->SetNZBInfo(m_pNZBInfo);
pFileInfo->SetSize(lSize);
pFileInfo->SetRemainingSize(lSize - lMissedSize);
@@ -301,11 +284,11 @@ void NZBFile::ParseSubject(FileInfo* pFileInfo, bool TryQuotes)
bool NZBFile::HasDuplicateFilenames()
{
for (FileInfos::iterator it = m_FileInfos.begin(); it != m_FileInfos.end(); it++)
for (FileList::iterator it = m_pNZBInfo->GetFileList()->begin(); it != m_pNZBInfo->GetFileList()->end(); it++)
{
FileInfo* pFileInfo1 = *it;
int iDupe = 1;
for (FileInfos::iterator it2 = it + 1; it2 != m_FileInfos.end(); it2++)
for (FileList::iterator it2 = it + 1; it2 != m_pNZBInfo->GetFileList()->end(); it2++)
{
FileInfo* pFileInfo2 = *it2;
if (!strcmp(pFileInfo1->GetFilename(), pFileInfo2->GetFilename()) &&
@@ -321,7 +304,7 @@ bool NZBFile::HasDuplicateFilenames()
// false "duplicate files"-alarm.
// It's Ok for just two files to have the same filename, this is
// an often case by posting-errors to repost bad files
if (iDupe > 2 || (iDupe == 2 && m_FileInfos.size() == 2))
if (iDupe > 2 || (iDupe == 2 && m_pNZBInfo->GetFileList()->size() == 2))
{
return true;
}
@@ -335,7 +318,7 @@ bool NZBFile::HasDuplicateFilenames()
*/
void NZBFile::BuildFilenames()
{
for (FileInfos::iterator it = m_FileInfos.begin(); it != m_FileInfos.end(); it++)
for (FileList::iterator it = m_pNZBInfo->GetFileList()->begin(); it != m_pNZBInfo->GetFileList()->end(); it++)
{
FileInfo* pFileInfo = *it;
ParseSubject(pFileInfo, true);
@@ -343,7 +326,7 @@ void NZBFile::BuildFilenames()
if (HasDuplicateFilenames())
{
for (FileInfos::iterator it = m_FileInfos.begin(); it != m_FileInfos.end(); it++)
for (FileList::iterator it = m_pNZBInfo->GetFileList()->begin(); it != m_pNZBInfo->GetFileList()->end(); it++)
{
FileInfo* pFileInfo = *it;
ParseSubject(pFileInfo, false);
@@ -353,7 +336,7 @@ void NZBFile::BuildFilenames()
if (HasDuplicateFilenames())
{
m_pNZBInfo->SetManyDupeFiles(true);
for (FileInfos::iterator it = m_FileInfos.begin(); it != m_FileInfos.end(); it++)
for (FileList::iterator it = m_pNZBInfo->GetFileList()->begin(); it != m_pNZBInfo->GetFileList()->end(); it++)
{
FileInfo* pFileInfo = *it;
pFileInfo->SetFilename(pFileInfo->GetSubject());
@@ -368,49 +351,26 @@ bool CompareFileInfo(FileInfo* pFirst, FileInfo* pSecond)
void NZBFile::CalcHashes()
{
FileInfoList fileList;
TempFileList fileList;
for (FileInfos::iterator it = m_FileInfos.begin(); it != m_FileInfos.end(); it++)
for (FileList::iterator it = m_pNZBInfo->GetFileList()->begin(); it != m_pNZBInfo->GetFileList()->end(); it++)
{
fileList.push_back(*it);
}
fileList.sort(CompareFileInfo);
// split ExtCleanupDisk into tokens and create a list
ExtList extList;
char* szExtCleanupDisk = strdup(g_pOptions->GetExtCleanupDisk());
char* saveptr;
char* szExt = strtok_r(szExtCleanupDisk, ",; ", &saveptr);
while (szExt)
{
extList.push_back(szExt);
szExt = strtok_r(NULL, ",; ", &saveptr);
}
unsigned int iFullContentHash = 0;
unsigned int iFilteredContentHash = 0;
int iUseForFilteredCount = 0;
for (FileInfoList::iterator it = fileList.begin(); it != fileList.end(); it++)
for (TempFileList::iterator it = fileList.begin(); it != fileList.end(); it++)
{
FileInfo* pFileInfo = *it;
// check file extension
int iFilenameLen = strlen(pFileInfo->GetFilename());
bool bSkip = false;
for (ExtList::iterator it = extList.begin(); it != extList.end(); it++)
{
const char* szExt = *it;
int iExtLen = strlen(szExt);
if (iFilenameLen >= iExtLen && !strcasecmp(szExt, pFileInfo->GetFilename() + iFilenameLen - iExtLen))
{
bSkip = true;
break;
}
}
bSkip = bSkip && !pFileInfo->GetParFile();
bool bSkip = !pFileInfo->GetParFile() &&
Util::MatchFileExt(pFileInfo->GetFilename(), g_pOptions->GetExtCleanupDisk(), ",;");
for (FileInfo::Articles::iterator it = pFileInfo->GetArticles()->begin(); it != pFileInfo->GetArticles()->end(); it++)
{
@@ -425,8 +385,6 @@ void NZBFile::CalcHashes()
}
}
free(szExtCleanupDisk);
// if filtered hash is based on less than a half of files - do not use filtered hash at all
if (iUseForFilteredCount < (int)fileList.size() / 2)
{
@@ -441,7 +399,7 @@ void NZBFile::ProcessFiles()
{
BuildFilenames();
for (FileInfos::iterator it = m_FileInfos.begin(); it != m_FileInfos.end(); it++)
for (FileList::iterator it = m_pNZBInfo->GetFileList()->begin(); it != m_pNZBInfo->GetFileList()->end(); it++)
{
FileInfo* pFileInfo = *it;
pFileInfo->MakeValidFilename();
@@ -455,6 +413,7 @@ void NZBFile::ProcessFiles()
m_pNZBInfo->SetFileCount(m_pNZBInfo->GetFileCount() + 1);
m_pNZBInfo->SetTotalArticles(m_pNZBInfo->GetTotalArticles() + pFileInfo->GetTotalArticles());
m_pNZBInfo->SetSize(m_pNZBInfo->GetSize() + pFileInfo->GetSize());
m_pNZBInfo->SetRemainingSize(m_pNZBInfo->GetRemainingSize() + pFileInfo->GetRemainingSize());
m_pNZBInfo->SetFailedSize(m_pNZBInfo->GetFailedSize() + pFileInfo->GetMissedSize());
m_pNZBInfo->SetCurrentFailedSize(m_pNZBInfo->GetFailedSize());
@@ -464,14 +423,17 @@ void NZBFile::ProcessFiles()
m_pNZBInfo->SetParSize(m_pNZBInfo->GetParSize() + pFileInfo->GetSize());
m_pNZBInfo->SetParFailedSize(m_pNZBInfo->GetParFailedSize() + pFileInfo->GetMissedSize());
m_pNZBInfo->SetParCurrentFailedSize(m_pNZBInfo->GetParFailedSize());
m_pNZBInfo->SetRemainingParCount(m_pNZBInfo->GetRemainingParCount() + 1);
}
}
m_pNZBInfo->UpdateMinMaxTime();
CalcHashes();
if (g_pOptions->GetSaveQueue() && g_pOptions->GetServerMode())
{
for (FileInfos::iterator it = m_FileInfos.begin(); it != m_FileInfos.end(); it++)
for (FileList::iterator it = m_pNZBInfo->GetFileList()->begin(); it != m_pNZBInfo->GetFileList()->end(); it++)
{
FileInfo* pFileInfo = *it;
g_pDiskState->SaveFile(pFileInfo);
@@ -491,7 +453,7 @@ void NZBFile::ProcessFiles()
*/
void NZBFile::ReadPassword()
{
FILE* pFile = fopen(m_szFileName, "rb");
FILE* pFile = fopen(m_szFileName, FOPEN_RB);
if (!pFile)
{
return;
@@ -499,7 +461,7 @@ void NZBFile::ReadPassword()
// obtain file size.
fseek(pFile , 0 , SEEK_END);
int iSize = ftell(pFile);
int iSize = (int)ftell(pFile);
rewind(pFile);
// reading first 4KB of the file
@@ -720,6 +682,12 @@ void NZBFile::Parse_StartElement(const char *name, const char **atts)
m_pFileInfo = new FileInfo();
m_pFileInfo->SetFilename(m_szFileName);
if (!atts)
{
warn("Malformed nzb-file, tag <%s> must have attributes", name);
return;
}
for (int i = 0; atts[i]; i += 2)
{
const char* attrname = atts[i];
@@ -738,10 +706,16 @@ void NZBFile::Parse_StartElement(const char *name, const char **atts)
{
if (!m_pFileInfo)
{
// error: bad nzb-file
warn("Malformed nzb-file, tag <segment> without tag <file>");
return;
}
if (!atts)
{
warn("Malformed nzb-file, tag <%s> must have attributes", name);
return;
}
long long lsize = -1;
int partNumber = -1;
@@ -770,6 +744,11 @@ void NZBFile::Parse_StartElement(const char *name, const char **atts)
}
else if (!strcmp("meta", name))
{
if (!atts)
{
warn("Malformed nzb-file, tag <%s> must have attributes", name);
return;
}
m_bPassword = atts[0] && atts[1] && !strcmp("type", atts[0]) && !strcmp("password", atts[1]);
}
}

View File

@@ -27,7 +27,6 @@
#ifndef NZBFILE_H
#define NZBFILE_H
#include <vector>
#include <list>
#include "DownloadInfo.h"
@@ -35,12 +34,9 @@
class NZBFile
{
public:
typedef std::vector<FileInfo*> FileInfos;
typedef std::list<FileInfo*> FileInfoList;
typedef std::list<char*> ExtList;
typedef std::list<FileInfo*> TempFileList;
private:
FileInfos m_FileInfos;
NZBInfo* m_pNZBInfo;
char* m_szFileName;
char* m_szPassword;
@@ -79,10 +75,9 @@ public:
virtual ~NZBFile();
static NZBFile* Create(const char* szFileName, const char* szCategory);
const char* GetFileName() const { return m_szFileName; }
FileInfos* GetFileInfos() { return &m_FileInfos; }
NZBInfo* GetNZBInfo() { return m_pNZBInfo; }
const char* GetPassword() { return m_szPassword; }
void DetachFileInfos();
void DetachNZBInfo() { m_pNZBInfo = NULL; }
void LogDebugInfo();
};

View File

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,100 @@
/*
* This file is part of nzbget
*
* Copyright (C) 2004 Sven Henkel <sidddy@users.sourceforge.net>
* Copyright (C) 2007-2014 Andrey Prygunkov <hugbug@users.sourceforge.net>
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*
* $Revision$
* $Date$
*
*/
#ifndef QUEUECOORDINATOR_H
#define QUEUECOORDINATOR_H
#include <deque>
#include <list>
#include "Log.h"
#include "Thread.h"
#include "NZBFile.h"
#include "ArticleDownloader.h"
#include "DownloadInfo.h"
#include "Observer.h"
#include "QueueEditor.h"
#include "NNTPConnection.h"
class QueueCoordinator : public Thread, public Observer, public Debuggable
{
public:
typedef std::list<ArticleDownloader*> ActiveDownloads;
private:
class CoordinatorDownloadQueue : public DownloadQueue
{
private:
QueueCoordinator* m_pOwner;
friend class QueueCoordinator;
public:
virtual bool EditEntry(int ID, EEditAction eAction, int iOffset, const char* szText);
virtual bool EditList(IDList* pIDList, NameList* pNameList, EMatchMode eMatchMode, EEditAction eAction, int iOffset, const char* szText);
virtual void Save();
};
private:
CoordinatorDownloadQueue m_DownloadQueue;
ActiveDownloads m_ActiveDownloads;
QueueEditor m_QueueEditor;
bool m_bHasMoreJobs;
int m_iDownloadsLimit;
int m_iServerConfigGeneration;
bool GetNextArticle(DownloadQueue* pDownloadQueue, FileInfo* &pFileInfo, ArticleInfo* &pArticleInfo);
void StartArticleDownload(FileInfo* pFileInfo, ArticleInfo* pArticleInfo, NNTPConnection* pConnection);
void ArticleCompleted(ArticleDownloader* pArticleDownloader);
void DeleteFileInfo(DownloadQueue* pDownloadQueue, FileInfo* pFileInfo, bool bCompleted);
void StatFileInfo(FileInfo* pFileInfo, bool bCompleted);
void CheckHealth(DownloadQueue* pDownloadQueue, FileInfo* pFileInfo);
void ResetHangingDownloads();
void AdjustDownloadsLimit();
void Load();
void SavePartialState();
protected:
virtual void LogDebugInfo();
public:
QueueCoordinator();
virtual ~QueueCoordinator();
virtual void Run();
virtual void Stop();
void Update(Subject* Caller, void* Aspect);
// editing queue
void AddNZBFileToQueue(NZBFile* pNZBFile, NZBInfo* pUrlInfo, bool bAddFirst);
void CheckDupeFileInfos(NZBInfo* pNZBInfo);
bool HasMoreJobs() { return m_bHasMoreJobs; }
void DiscardDiskFile(FileInfo* pFileInfo);
bool DeleteQueueEntry(DownloadQueue* pDownloadQueue, FileInfo* pFileInfo);
bool SetQueueEntryCategory(DownloadQueue* pDownloadQueue, NZBInfo* pNZBInfo, const char* szCategory);
bool SetQueueEntryName(DownloadQueue* pDownloadQueue, NZBInfo* pNZBInfo, const char* szName);
bool MergeQueueEntries(DownloadQueue* pDownloadQueue, NZBInfo* pDestNZBInfo, NZBInfo* pSrcNZBInfo);
bool SplitQueueEntries(DownloadQueue* pDownloadQueue, FileList* pFileList, const char* szName, NZBInfo** pNewNZBInfo);
};
#endif

1085
daemon/queue/QueueEditor.cpp Normal file
View File

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,80 @@
/*
* This file is part of nzbget
*
* Copyright (C) 2007-2014 Andrey Prygunkov <hugbug@users.sourceforge.net>
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*
* $Revision$
* $Date$
*
*/
#ifndef QUEUEEDITOR_H
#define QUEUEEDITOR_H
#include <vector>
#include "DownloadInfo.h"
class QueueEditor
{
private:
class EditItem
{
public:
int m_iOffset;
FileInfo* m_pFileInfo;
NZBInfo* m_pNZBInfo;
EditItem(FileInfo* pFileInfo, NZBInfo* pNZBInfo, int iOffset);
};
typedef std::vector<EditItem*> ItemList;
DownloadQueue* m_pDownloadQueue;
private:
FileInfo* FindFileInfo(int iID);
bool InternEditList(ItemList* pItemList, IDList* pIDList, DownloadQueue::EEditAction eAction, int iOffset, const char* szText);
void PrepareList(ItemList* pItemList, IDList* pIDList, DownloadQueue::EEditAction eAction, int iOffset);
bool BuildIDListFromNameList(IDList* pIDList, NameList* pNameList, DownloadQueue::EMatchMode eMatchMode, DownloadQueue::EEditAction eAction);
bool EditGroup(NZBInfo* pNZBInfo, DownloadQueue::EEditAction eAction, int iOffset, const char* szText);
void PauseParsInGroups(ItemList* pItemList, bool bExtraParsOnly);
void PausePars(FileList* pFileList, bool bExtraParsOnly);
void SetNZBPriority(NZBInfo* pNZBInfo, const char* szPriority);
void SetNZBCategory(NZBInfo* pNZBInfo, const char* szCategory, bool bApplyParams);
void SetNZBName(NZBInfo* pNZBInfo, const char* szName);
bool CanCleanupDisk(NZBInfo* pNZBInfo);
bool MergeGroups(ItemList* pItemList);
bool SplitGroup(ItemList* pItemList, const char* szName);
bool DeleteUrl(NZBInfo* pNZBInfo, DownloadQueue::EEditAction eAction);
void ReorderFiles(ItemList* pItemList);
void SetNZBParameter(NZBInfo* pNZBInfo, const char* szParamString);
void SetNZBDupeParam(NZBInfo* pNZBInfo, DownloadQueue::EEditAction eAction, const char* szText);
void PauseUnpauseEntry(FileInfo* pFileInfo, bool bPause);
void DeleteEntry(FileInfo* pFileInfo);
void MoveEntry(FileInfo* pFileInfo, int iOffset);
void MoveGroup(NZBInfo* pNZBInfo, int iOffset);
public:
QueueEditor();
~QueueEditor();
bool EditEntry(DownloadQueue* pDownloadQueue, int ID, DownloadQueue::EEditAction eAction, int iOffset, const char* szText);
bool EditList(DownloadQueue* pDownloadQueue, IDList* pIDList, NameList* pNameList, DownloadQueue::EMatchMode eMatchMode, DownloadQueue::EEditAction eAction, int iOffset, const char* szText);
};
#endif

View File

@@ -0,0 +1,539 @@
/*
* This file is part of nzbget
*
* Copyright (C) 2007-2014 Andrey Prygunkov <hugbug@users.sourceforge.net>
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*
* $Revision$
* $Date$
*
*/
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#ifdef WIN32
#include "win32.h"
#endif
#include <stdlib.h>
#include <string.h>
#include <ctype.h>
#ifndef WIN32
#include <unistd.h>
#endif
#include <sys/stat.h>
#include <stdio.h>
#include <algorithm>
#include "nzbget.h"
#include "QueueScript.h"
#include "Options.h"
#include "Log.h"
#include "Util.h"
extern Options* g_pOptions;
extern QueueScriptCoordinator* g_pQueueScriptCoordinator;
static const char* QUEUE_EVENT_NAMES[] = { "FILE_DOWNLOADED", "NZB_ADDED", "NZB_DOWNLOADED" };
class QueueScriptController : public Thread, public NZBScriptController
{
private:
char* m_szNZBName;
char* m_szNZBFilename;
char* m_szUrl;
char* m_szCategory;
char* m_szDestDir;
int m_iID;
int m_iPriority;
NZBParameterList m_Parameters;
int m_iPrefixLen;
Options::Script* m_pScript;
QueueScriptCoordinator::EEvent m_eEvent;
bool m_bMarkBad;
void PrepareParams(const char* szScriptName);
protected:
virtual void ExecuteScript(Options::Script* pScript);
virtual void AddMessage(Message::EKind eKind, const char* szText);
public:
virtual ~QueueScriptController();
virtual void Run();
static void StartScript(NZBInfo* pNZBInfo, Options::Script* pScript, QueueScriptCoordinator::EEvent eEvent);
};
/**
* If szStripPrefix is not NULL, only pp-parameters, whose names start with the prefix
* are processed. The prefix is then stripped from the names.
* If szStripPrefix is NULL, all pp-parameters are processed; without stripping.
*/
void NZBScriptController::PrepareEnvParameters(NZBParameterList* pParameters, const char* szStripPrefix)
{
int iPrefixLen = szStripPrefix ? strlen(szStripPrefix) : 0;
for (NZBParameterList::iterator it = pParameters->begin(); it != pParameters->end(); it++)
{
NZBParameter* pParameter = *it;
const char* szValue = pParameter->GetValue();
#ifdef WIN32
char* szAnsiValue = strdup(szValue);
WebUtil::Utf8ToAnsi(szAnsiValue, strlen(szAnsiValue) + 1);
szValue = szAnsiValue;
#endif
if (szStripPrefix && !strncmp(pParameter->GetName(), szStripPrefix, iPrefixLen) && (int)strlen(pParameter->GetName()) > iPrefixLen)
{
SetEnvVarSpecial("NZBPR", pParameter->GetName() + iPrefixLen, szValue);
}
else if (!szStripPrefix)
{
SetEnvVarSpecial("NZBPR", pParameter->GetName(), szValue);
}
#ifdef WIN32
free(szAnsiValue);
#endif
}
}
void NZBScriptController::PrepareEnvScript(NZBParameterList* pParameters, const char* szScriptName)
{
if (pParameters)
{
PrepareEnvParameters(pParameters, NULL);
}
char szParamPrefix[1024];
snprintf(szParamPrefix, 1024, "%s:", szScriptName);
szParamPrefix[1024-1] = '\0';
if (pParameters)
{
PrepareEnvParameters(pParameters, szParamPrefix);
}
PrepareEnvOptions(szParamPrefix);
}
void NZBScriptController::ExecuteScriptList(const char* szScriptList)
{
for (Options::Scripts::iterator it = g_pOptions->GetScripts()->begin(); it != g_pOptions->GetScripts()->end(); it++)
{
Options::Script* pScript = *it;
if (szScriptList && *szScriptList)
{
// split szScriptList into tokens
Tokenizer tok(szScriptList, ",;");
while (const char* szScriptName = tok.Next())
{
if (Util::SameFilename(szScriptName, pScript->GetName()))
{
ExecuteScript(pScript);
break;
}
}
}
}
}
QueueScriptController::~QueueScriptController()
{
free(m_szNZBName);
free(m_szNZBFilename);
free(m_szUrl);
free(m_szCategory);
free(m_szDestDir);
}
void QueueScriptController::StartScript(NZBInfo* pNZBInfo, Options::Script* pScript, QueueScriptCoordinator::EEvent eEvent)
{
QueueScriptController* pScriptController = new QueueScriptController();
pScriptController->m_szNZBName = strdup(pNZBInfo->GetName());
pScriptController->m_szNZBFilename = strdup(pNZBInfo->GetFilename());
pScriptController->m_szUrl = strdup(pNZBInfo->GetURL());
pScriptController->m_szCategory = strdup(pNZBInfo->GetCategory());
pScriptController->m_szDestDir = strdup(pNZBInfo->GetDestDir());
pScriptController->m_iID = pNZBInfo->GetID();
pScriptController->m_iPriority = pNZBInfo->GetPriority();
pScriptController->m_Parameters.CopyFrom(pNZBInfo->GetParameters());
pScriptController->m_pScript = pScript;
pScriptController->m_eEvent = eEvent;
pScriptController->m_iPrefixLen = 0;
pScriptController->m_bMarkBad = false;
pScriptController->SetAutoDestroy(true);
pScriptController->Start();
}
void QueueScriptController::Run()
{
ExecuteScript(m_pScript);
SetLogPrefix(NULL);
if (m_bMarkBad)
{
DownloadQueue* pDownloadQueue = DownloadQueue::Lock();
NZBInfo* pNZBInfo = pDownloadQueue->GetQueue()->Find(m_iID);
if (pNZBInfo)
{
PrintMessage(Message::mkWarning, "Cancelling download and deleting %s", m_szNZBName);
pNZBInfo->SetDeleteStatus(NZBInfo::dsBad);
pDownloadQueue->EditEntry(m_iID, DownloadQueue::eaGroupDelete, 0, NULL);
}
DownloadQueue::Unlock();
}
g_pQueueScriptCoordinator->CheckQueue();
}
void QueueScriptController::ExecuteScript(Options::Script* pScript)
{
PrintMessage(m_eEvent == QueueScriptCoordinator::qeFileDownloaded ? Message::mkDetail : Message::mkInfo,
"Executing queue-script %s for %s", pScript->GetName(), Util::BaseFileName(m_szNZBName));
SetScript(pScript->GetLocation());
SetArgs(NULL, false);
char szInfoName[1024];
snprintf(szInfoName, 1024, "queue-script %s for %s", pScript->GetName(), Util::BaseFileName(m_szNZBName));
szInfoName[1024-1] = '\0';
SetInfoName(szInfoName);
SetLogPrefix(pScript->GetDisplayName());
m_iPrefixLen = strlen(pScript->GetDisplayName()) + 2; // 2 = strlen(": ");
PrepareParams(pScript->GetName());
Execute();
SetLogPrefix(NULL);
}
void QueueScriptController::PrepareParams(const char* szScriptName)
{
ResetEnv();
SetEnvVar("NZBNA_NZBNAME", m_szNZBName);
SetIntEnvVar("NZBNA_NZBID", m_iID);
SetEnvVar("NZBNA_FILENAME", m_szNZBFilename);
SetEnvVar("NZBNA_DIRECTORY", m_szDestDir);
SetEnvVar("NZBNA_URL", m_szUrl);
SetEnvVar("NZBNA_CATEGORY", m_szCategory);
SetIntEnvVar("NZBNA_PRIORITY", m_iPriority);
SetIntEnvVar("NZBNA_LASTID", m_iID); // deprecated
SetEnvVar("NZBNA_EVENT", QUEUE_EVENT_NAMES[m_eEvent]);
PrepareEnvScript(&m_Parameters, szScriptName);
}
void QueueScriptController::AddMessage(Message::EKind eKind, const char* szText)
{
const char* szMsgText = szText + m_iPrefixLen;
if (!strncmp(szMsgText, "[NZB] ", 6))
{
debug("Command %s detected", szMsgText + 6);
if (!strncmp(szMsgText + 6, "NZBPR_", 6))
{
char* szParam = strdup(szMsgText + 6 + 6);
char* szValue = strchr(szParam, '=');
if (szValue)
{
*szValue = '\0';
DownloadQueue* pDownloadQueue = DownloadQueue::Lock();
NZBInfo* pNZBInfo = pDownloadQueue->GetQueue()->Find(m_iID);
if (pNZBInfo)
{
pNZBInfo->GetParameters()->SetParameter(szParam, szValue + 1);
}
DownloadQueue::Unlock();
}
else
{
error("Invalid command \"%s\" received from %s", szMsgText, GetInfoName());
}
free(szParam);
}
else if (!strncmp(szMsgText + 6, "MARK=BAD", 8))
{
m_bMarkBad = true;
DownloadQueue* pDownloadQueue = DownloadQueue::Lock();
NZBInfo* pNZBInfo = pDownloadQueue->GetQueue()->Find(m_iID);
if (pNZBInfo)
{
SetLogPrefix(NULL);
PrintMessage(Message::mkWarning, "Marking %s as bad", m_szNZBName);
SetLogPrefix(m_pScript->GetDisplayName());
pNZBInfo->SetMarkStatus(NZBInfo::ksBad);
}
DownloadQueue::Unlock();
}
else
{
error("Invalid command \"%s\" received from %s", szMsgText, GetInfoName());
}
}
else
{
ScriptController::AddMessage(eKind, szText);
}
}
QueueScriptCoordinator::QueueItem::QueueItem(int iNZBID, Options::Script* pScript, EEvent eEvent)
{
m_iNZBID = iNZBID;
m_pScript = pScript;
m_eEvent = eEvent;
}
QueueScriptCoordinator::QueueScriptCoordinator()
{
m_pCurItem = NULL;
}
QueueScriptCoordinator::~QueueScriptCoordinator()
{
delete m_pCurItem;
for (Queue::iterator it = m_Queue.begin(); it != m_Queue.end(); )
{
delete *it;
}
}
void QueueScriptCoordinator::InitOptions()
{
m_bHasQueueScripts = false;
for (Options::Scripts::iterator it = g_pOptions->GetScripts()->begin(); it != g_pOptions->GetScripts()->end(); it++)
{
Options::Script* pScript = *it;
if (pScript->GetQueueScript())
{
m_bHasQueueScripts = true;
break;
}
}
}
void QueueScriptCoordinator::EnqueueScript(NZBInfo* pNZBInfo, EEvent eEvent)
{
if (!m_bHasQueueScripts)
{
return;
}
m_mutexQueue.Lock();
if (eEvent == qeNzbDownloaded)
{
// delete all other queued scripts for this nzb
for (Queue::iterator it = m_Queue.begin(); it != m_Queue.end(); )
{
QueueItem* pQueueItem = *it;
if (pQueueItem->GetNZBID() == pNZBInfo->GetID())
{
delete pQueueItem;
it = m_Queue.erase(it);
continue;
}
it++;
}
}
// respect option "EventInterval"
time_t tCurTime = time(NULL);
if (eEvent == qeFileDownloaded &&
(g_pOptions->GetEventInterval() == -1 ||
(g_pOptions->GetEventInterval() > 0 && tCurTime - pNZBInfo->GetQueueScriptTime() > 0 &&
(int)(tCurTime - pNZBInfo->GetQueueScriptTime()) < g_pOptions->GetEventInterval())))
{
m_mutexQueue.Unlock();
return;
}
for (Options::Scripts::iterator it = g_pOptions->GetScripts()->begin(); it != g_pOptions->GetScripts()->end(); it++)
{
Options::Script* pScript = *it;
if (!pScript->GetQueueScript())
{
continue;
}
bool bUseScript = false;
// check queue-scripts
const char* szQueueScript = g_pOptions->GetQueueScript();
if (!Util::EmptyStr(szQueueScript))
{
// split szQueueScript into tokens
Tokenizer tok(szQueueScript, ",;");
while (const char* szScriptName = tok.Next())
{
if (Util::SameFilename(szScriptName, pScript->GetName()))
{
bUseScript = true;
break;
}
}
}
// check post-processing-scripts
if (!bUseScript)
{
for (NZBParameterList::iterator it = pNZBInfo->GetParameters()->begin(); it != pNZBInfo->GetParameters()->end(); it++)
{
NZBParameter* pParameter = *it;
const char* szVarname = pParameter->GetName();
if (strlen(szVarname) > 0 && szVarname[0] != '*' && szVarname[strlen(szVarname)-1] == ':' &&
(!strcasecmp(pParameter->GetValue(), "yes") ||
!strcasecmp(pParameter->GetValue(), "on") ||
!strcasecmp(pParameter->GetValue(), "1")))
{
char szScriptName[1024];
strncpy(szScriptName, szVarname, 1024);
szScriptName[1024-1] = '\0';
szScriptName[strlen(szScriptName)-1] = '\0'; // remove trailing ':'
if (Util::SameFilename(szScriptName, pScript->GetName()))
{
bUseScript = true;
break;
}
}
}
}
bUseScript &= Util::EmptyStr(pScript->GetQueueEvents()) || strstr(pScript->GetQueueEvents(), QUEUE_EVENT_NAMES[eEvent]);
if (bUseScript)
{
bool bAlreadyQueued = false;
if (eEvent == qeFileDownloaded)
{
// check if this script is already queued for this nzb
for (Queue::iterator it2 = m_Queue.begin(); it2 != m_Queue.end(); it2++)
{
QueueItem* pQueueItem = *it2;
if (pQueueItem->GetNZBID() == pNZBInfo->GetID() && pQueueItem->GetScript() == pScript)
{
bAlreadyQueued = true;
break;
}
}
}
if (!bAlreadyQueued)
{
QueueItem* pQueueItem = new QueueItem(pNZBInfo->GetID(), pScript, eEvent);
if (m_pCurItem)
{
m_Queue.push_back(pQueueItem);
}
else
{
StartScript(pNZBInfo, pQueueItem);
}
}
pNZBInfo->SetQueueScriptTime(time(NULL));
}
}
m_mutexQueue.Unlock();
}
void QueueScriptCoordinator::CheckQueue()
{
DownloadQueue* pDownloadQueue = DownloadQueue::Lock();
m_mutexQueue.Lock();
delete m_pCurItem;
m_pCurItem = NULL;
NZBInfo* pCurNZBInfo = NULL;
Queue::iterator itCurItem = m_Queue.end();
for (Queue::iterator it = m_Queue.begin(); it != m_Queue.end(); )
{
QueueItem* pQueueItem = *it;
NZBInfo* pNZBInfo = pDownloadQueue->GetQueue()->Find(pQueueItem->GetNZBID());
// in a case this nzb must not be processed further - delete queue script from queue
if (!pNZBInfo || pNZBInfo->GetDeleteStatus() != NZBInfo::dsNone ||
pNZBInfo->GetMarkStatus() == NZBInfo::ksBad)
{
delete pQueueItem;
it = m_Queue.erase(it);
continue;
}
if (!m_pCurItem || pQueueItem->GetEvent() > m_pCurItem->GetEvent())
{
m_pCurItem = pQueueItem;
itCurItem = it;
pCurNZBInfo = pNZBInfo;
}
it++;
}
if (m_pCurItem)
{
m_Queue.erase(itCurItem);
StartScript(pCurNZBInfo, m_pCurItem);
}
m_mutexQueue.Unlock();
DownloadQueue::Unlock();
}
void QueueScriptCoordinator::StartScript(NZBInfo* pNZBInfo, QueueItem* pQueueItem)
{
m_pCurItem = pQueueItem;
QueueScriptController::StartScript(pNZBInfo, pQueueItem->GetScript(), pQueueItem->GetEvent());
}
bool QueueScriptCoordinator::HasJob(int iNZBID)
{
m_mutexQueue.Lock();
bool bWorking = m_pCurItem && m_pCurItem->GetNZBID() == iNZBID;
if (!bWorking)
{
for (Queue::iterator it = m_Queue.begin(); it != m_Queue.end(); it++)
{
QueueItem* pQueueItem = *it;
bWorking = pQueueItem->GetNZBID() == iNZBID;
if (bWorking)
{
break;
}
}
}
m_mutexQueue.Unlock();
return bWorking;
}

View File

@@ -0,0 +1,87 @@
/*
* This file is part of nzbget
*
* Copyright (C) 2007-2014 Andrey Prygunkov <hugbug@users.sourceforge.net>
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*
* $Revision$
* $Date$
*
*/
#ifndef QUEUESCRIPT_H
#define QUEUESCRIPT_H
#include <list>
#include "Script.h"
#include "Thread.h"
#include "DownloadInfo.h"
#include "Options.h"
class NZBScriptController : public ScriptController
{
protected:
void PrepareEnvParameters(NZBParameterList* pParameters, const char* szStripPrefix);
void PrepareEnvScript(NZBParameterList* pParameters, const char* szScriptName);
void ExecuteScriptList(const char* szScriptList);
virtual void ExecuteScript(Options::Script* pScript) = 0;
};
class QueueScriptCoordinator
{
public:
enum EEvent
{
qeFileDownloaded, // lowest priority
qeNzbAdded,
qeNzbDownloaded // highest priority
};
private:
class QueueItem
{
private:
int m_iNZBID;
Options::Script* m_pScript;
EEvent m_eEvent;
public:
QueueItem(int iNZBID, Options::Script* pScript, EEvent eEvent);
int GetNZBID() { return m_iNZBID; }
Options::Script* GetScript() { return m_pScript; }
EEvent GetEvent() { return m_eEvent; }
};
typedef std::list<QueueItem*> Queue;
Queue m_Queue;
Mutex m_mutexQueue;
QueueItem* m_pCurItem;
bool m_bHasQueueScripts;
void StartScript(NZBInfo* pNZBInfo, QueueItem* pQueueItem);
public:
QueueScriptCoordinator();
~QueueScriptCoordinator();
void InitOptions();
void EnqueueScript(NZBInfo* pNZBInfo, EEvent eEvent);
void CheckQueue();
bool HasJob(int iNZBID);
};
#endif

View File

@@ -1,7 +1,7 @@
/*
* This file is part of nzbget
*
* Copyright (C) 2007-2013 Andrey Prygunkov <hugbug@users.sourceforge.net>
* Copyright (C) 2007-2014 Andrey Prygunkov <hugbug@users.sourceforge.net>
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
@@ -46,13 +46,203 @@
#include "Options.h"
#include "Log.h"
#include "QueueCoordinator.h"
#include "ScriptController.h"
#include "DiskState.h"
#include "QueueScript.h"
#include "Util.h"
extern QueueCoordinator* g_pQueueCoordinator;
extern Options* g_pOptions;
extern DiskState* g_pDiskState;
extern Scanner* g_pScanner;
class ScanScriptController : public NZBScriptController
{
private:
const char* m_szNZBFilename;
const char* m_szUrl;
const char* m_szDirectory;
char** m_pNZBName;
char** m_pCategory;
int* m_iPriority;
NZBParameterList* m_pParameters;
bool* m_bAddTop;
bool* m_bAddPaused;
char** m_pDupeKey;
int* m_iDupeScore;
EDupeMode* m_eDupeMode;
int m_iPrefixLen;
void PrepareParams(const char* szScriptName);
protected:
virtual void ExecuteScript(Options::Script* pScript);
virtual void AddMessage(Message::EKind eKind, const char* szText);
public:
static void ExecuteScripts(const char* szNZBFilename, const char* szUrl,
const char* szDirectory, char** pNZBName, char** pCategory, int* iPriority,
NZBParameterList* pParameters, bool* bAddTop, bool* bAddPaused,
char** pDupeKey, int* iDupeScore, EDupeMode* eDupeMode);
};
void ScanScriptController::ExecuteScripts(const char* szNZBFilename,
const char* szUrl, const char* szDirectory, char** pNZBName, char** pCategory,
int* iPriority, NZBParameterList* pParameters, bool* bAddTop, bool* bAddPaused,
char** pDupeKey, int* iDupeScore, EDupeMode* eDupeMode)
{
ScanScriptController* pScriptController = new ScanScriptController();
pScriptController->m_szNZBFilename = szNZBFilename;
pScriptController->m_szUrl = szUrl;
pScriptController->m_szDirectory = szDirectory;
pScriptController->m_pNZBName = pNZBName;
pScriptController->m_pCategory = pCategory;
pScriptController->m_pParameters = pParameters;
pScriptController->m_iPriority = iPriority;
pScriptController->m_bAddTop = bAddTop;
pScriptController->m_bAddPaused = bAddPaused;
pScriptController->m_pDupeKey = pDupeKey;
pScriptController->m_iDupeScore = iDupeScore;
pScriptController->m_eDupeMode = eDupeMode;
pScriptController->m_iPrefixLen = 0;
pScriptController->ExecuteScriptList(g_pOptions->GetScanScript());
delete pScriptController;
}
void ScanScriptController::ExecuteScript(Options::Script* pScript)
{
if (!pScript->GetScanScript() || !Util::FileExists(m_szNZBFilename))
{
return;
}
PrintMessage(Message::mkInfo, "Executing scan-script %s for %s", pScript->GetName(), Util::BaseFileName(m_szNZBFilename));
SetScript(pScript->GetLocation());
SetArgs(NULL, false);
char szInfoName[1024];
snprintf(szInfoName, 1024, "scan-script %s for %s", pScript->GetName(), Util::BaseFileName(m_szNZBFilename));
szInfoName[1024-1] = '\0';
SetInfoName(szInfoName);
SetLogPrefix(pScript->GetDisplayName());
m_iPrefixLen = strlen(pScript->GetDisplayName()) + 2; // 2 = strlen(": ");
PrepareParams(pScript->GetName());
Execute();
SetLogPrefix(NULL);
}
void ScanScriptController::PrepareParams(const char* szScriptName)
{
ResetEnv();
SetEnvVar("NZBNP_FILENAME", m_szNZBFilename);
SetEnvVar("NZBNP_URL", m_szUrl);
SetEnvVar("NZBNP_NZBNAME", strlen(*m_pNZBName) > 0 ? *m_pNZBName : Util::BaseFileName(m_szNZBFilename));
SetEnvVar("NZBNP_CATEGORY", *m_pCategory);
SetIntEnvVar("NZBNP_PRIORITY", *m_iPriority);
SetIntEnvVar("NZBNP_TOP", *m_bAddTop ? 1 : 0);
SetIntEnvVar("NZBNP_PAUSED", *m_bAddPaused ? 1 : 0);
SetEnvVar("NZBNP_DUPEKEY", *m_pDupeKey);
SetIntEnvVar("NZBNP_DUPESCORE", *m_iDupeScore);
const char* szDupeModeName[] = { "SCORE", "ALL", "FORCE" };
SetEnvVar("NZBNP_DUPEMODE", szDupeModeName[*m_eDupeMode]);
// remove trailing slash
char szDir[1024];
strncpy(szDir, m_szDirectory, 1024);
szDir[1024-1] = '\0';
int iLen = strlen(szDir);
if (szDir[iLen-1] == PATH_SEPARATOR)
{
szDir[iLen-1] = '\0';
}
SetEnvVar("NZBNP_DIRECTORY", szDir);
PrepareEnvScript(m_pParameters, szScriptName);
}
void ScanScriptController::AddMessage(Message::EKind eKind, const char* szText)
{
const char* szMsgText = szText + m_iPrefixLen;
if (!strncmp(szMsgText, "[NZB] ", 6))
{
debug("Command %s detected", szMsgText + 6);
if (!strncmp(szMsgText + 6, "NZBNAME=", 8))
{
free(*m_pNZBName);
*m_pNZBName = strdup(szMsgText + 6 + 8);
}
else if (!strncmp(szMsgText + 6, "CATEGORY=", 9))
{
free(*m_pCategory);
*m_pCategory = strdup(szMsgText + 6 + 9);
g_pScanner->InitPPParameters(*m_pCategory, m_pParameters, true);
}
else if (!strncmp(szMsgText + 6, "NZBPR_", 6))
{
char* szParam = strdup(szMsgText + 6 + 6);
char* szValue = strchr(szParam, '=');
if (szValue)
{
*szValue = '\0';
m_pParameters->SetParameter(szParam, szValue + 1);
}
else
{
error("Invalid command \"%s\" received from %s", szMsgText, GetInfoName());
}
free(szParam);
}
else if (!strncmp(szMsgText + 6, "PRIORITY=", 9))
{
*m_iPriority = atoi(szMsgText + 6 + 9);
}
else if (!strncmp(szMsgText + 6, "TOP=", 4))
{
*m_bAddTop = atoi(szMsgText + 6 + 4) != 0;
}
else if (!strncmp(szMsgText + 6, "PAUSED=", 7))
{
*m_bAddPaused = atoi(szMsgText + 6 + 7) != 0;
}
else if (!strncmp(szMsgText + 6, "DUPEKEY=", 8))
{
free(*m_pDupeKey);
*m_pDupeKey = strdup(szMsgText + 6 + 8);
}
else if (!strncmp(szMsgText + 6, "DUPESCORE=", 10))
{
*m_iDupeScore = atoi(szMsgText + 6 + 10);
}
else if (!strncmp(szMsgText + 6, "DUPEMODE=", 9))
{
const char* szDupeMode = szMsgText + 6 + 9;
if (strcasecmp(szDupeMode, "score") && strcasecmp(szDupeMode, "all") && strcasecmp(szDupeMode, "force"))
{
error("Invalid value \"%s\" for command \"DUPEMODE\" received from %s", szDupeMode, GetInfoName());
return;
}
*m_eDupeMode = !strcasecmp(szDupeMode, "all") ? dmAll :
!strcasecmp(szDupeMode, "force") ? dmForce : dmScore;
}
else
{
error("Invalid command \"%s\" received from %s", szMsgText, GetInfoName());
}
}
else
{
ScriptController::AddMessage(eKind, szText);
}
}
Scanner::FileData::FileData(const char* szFilename)
{
@@ -69,7 +259,8 @@ Scanner::FileData::~FileData()
Scanner::QueueData::QueueData(const char* szFilename, const char* szNZBName, const char* szCategory,
int iPriority, const char* szDupeKey, int iDupeScore, EDupeMode eDupeMode,
NZBParameterList* pParameters, bool bAddTop, bool bAddPaused, EAddStatus* pAddStatus)
NZBParameterList* pParameters, bool bAddTop, bool bAddPaused, NZBInfo* pUrlInfo,
EAddStatus* pAddStatus, int* pNZBID)
{
m_szFilename = strdup(szFilename);
m_szNZBName = strdup(szNZBName);
@@ -80,7 +271,9 @@ Scanner::QueueData::QueueData(const char* szFilename, const char* szNZBName, con
m_eDupeMode = eDupeMode;
m_bAddTop = bAddTop;
m_bAddPaused = bAddPaused;
m_pUrlInfo = pUrlInfo;
m_pAddStatus = pAddStatus;
m_pNZBID = pNZBID;
if (pParameters)
{
@@ -104,6 +297,14 @@ void Scanner::QueueData::SetAddStatus(EAddStatus eAddStatus)
}
}
void Scanner::QueueData::SetNZBID(int iNZBID)
{
if (m_pNZBID)
{
*m_pNZBID = iNZBID;
}
}
Scanner::Scanner()
{
@@ -111,11 +312,9 @@ Scanner::Scanner()
m_bRequestedNZBDirScan = false;
m_bScanning = false;
m_iNZBDirInterval = g_pOptions->GetNzbDirInterval() * 1000;
m_iNZBDirInterval = 0;
m_iPass = 0;
const char* szNZBScript = g_pOptions->GetNZBProcess();
m_bNZBScript = szNZBScript && strlen(szNZBScript) > 0;
m_bScanScript = false;
}
Scanner::~Scanner()
@@ -131,6 +330,13 @@ Scanner::~Scanner()
ClearQueueList();
}
void Scanner::InitOptions()
{
m_iNZBDirInterval = g_pOptions->GetNzbDirInterval() * 1000;
const char* szScanScript = g_pOptions->GetScanScript();
m_bScanScript = szScanScript && strlen(szScanScript) > 0;
}
void Scanner::ClearQueueList()
{
for (QueueList::iterator it = m_QueueList.begin(); it != m_QueueList.end(); it++)
@@ -153,7 +359,7 @@ void Scanner::Check()
m_bRequestedNZBDirScan = false;
m_bScanning = true;
CheckIncomingNZBs(g_pOptions->GetNzbDir(), "", bCheckStat);
if (!bCheckStat && m_bNZBScript)
if (!bCheckStat && m_bScanScript)
{
// if immediate scan requested, we need second scan to process files extracted by NzbProcess-script
CheckIncomingNZBs(g_pOptions->GetNzbDir(), "", bCheckStat);
@@ -168,7 +374,7 @@ void Scanner::Check()
// - third scan is needed to check sizes of extracted files.
if (g_pOptions->GetNzbDirInterval() > 0 && g_pOptions->GetNzbDirFileAge() < g_pOptions->GetNzbDirInterval())
{
int iMaxPass = m_bNZBScript ? 3 : 1;
int iMaxPass = m_bScanScript ? 3 : 1;
if (m_iPass < iMaxPass)
{
// scheduling another scan of incoming directory in NzbDirFileAge seconds.
@@ -198,31 +404,28 @@ void Scanner::CheckIncomingNZBs(const char* szDirectory, const char* szCategory,
DirBrowser dir(szDirectory);
while (const char* filename = dir.Next())
{
struct stat buffer;
char fullfilename[1023 + 1]; // one char reserved for the trailing slash (if needed)
snprintf(fullfilename, 1023, "%s%s", szDirectory, filename);
fullfilename[1023 - 1] = '\0';
if (!stat(fullfilename, &buffer))
bool bIsDirectory = Util::DirectoryExists(fullfilename);
// check subfolders
if (bIsDirectory && strcmp(filename, ".") && strcmp(filename, ".."))
{
// check subfolders
if ((buffer.st_mode & S_IFDIR) != 0 && strcmp(filename, ".") && strcmp(filename, ".."))
fullfilename[strlen(fullfilename) + 1] = '\0';
fullfilename[strlen(fullfilename)] = PATH_SEPARATOR;
const char* szUseCategory = filename;
char szSubCategory[1024];
if (strlen(szCategory) > 0)
{
fullfilename[strlen(fullfilename) + 1] = '\0';
fullfilename[strlen(fullfilename)] = PATH_SEPARATOR;
const char* szUseCategory = filename;
char szSubCategory[1024];
if (strlen(szCategory) > 0)
{
snprintf(szSubCategory, 1023, "%s%c%s", szCategory, PATH_SEPARATOR, filename);
szSubCategory[1024 - 1] = '\0';
szUseCategory = szSubCategory;
}
CheckIncomingNZBs(fullfilename, szUseCategory, bCheckStat);
}
else if ((buffer.st_mode & S_IFDIR) == 0 && CanProcessFile(fullfilename, bCheckStat))
{
ProcessIncomingFile(szDirectory, filename, fullfilename, szCategory);
snprintf(szSubCategory, 1023, "%s%c%s", szCategory, PATH_SEPARATOR, filename);
szSubCategory[1024 - 1] = '\0';
szUseCategory = szSubCategory;
}
CheckIncomingNZBs(fullfilename, szUseCategory, bCheckStat);
}
else if (!bIsDirectory && CanProcessFile(fullfilename, bCheckStat))
{
ProcessIncomingFile(szDirectory, filename, fullfilename, szCategory);
}
}
}
@@ -338,12 +541,14 @@ void Scanner::ProcessIncomingFile(const char* szDirectory, const char* szBaseFil
int iPriority = 0;
bool bAddTop = false;
bool bAddPaused = false;
const char* szDupeKey = NULL;
char* szDupeKey = strdup("");
int iDupeScore = 0;
EDupeMode eDupeMode = dmScore;
EAddStatus eAddStatus = asSkipped;
bool bAdded = false;
QueueData* pQueueData = NULL;
NZBInfo* pUrlInfo = NULL;
int iNZBID = 0;
for (QueueList::iterator it = m_QueueList.begin(); it != m_QueueList.end(); it++)
{
@@ -356,23 +561,27 @@ void Scanner::ProcessIncomingFile(const char* szDirectory, const char* szBaseFil
free(szNZBCategory);
szNZBCategory = strdup(pQueueData->GetCategory());
iPriority = pQueueData->GetPriority();
szDupeKey = pQueueData->GetDupeKey();
free(szDupeKey);
szDupeKey = strdup(pQueueData->GetDupeKey());
iDupeScore = pQueueData->GetDupeScore();
eDupeMode = pQueueData->GetDupeMode();
bAddTop = pQueueData->GetAddTop();
bAddPaused = pQueueData->GetAddPaused();
pParameters->CopyFrom(pQueueData->GetParameters());
pUrlInfo = pQueueData->GetUrlInfo();
}
}
InitPPParameters(szNZBCategory, pParameters);
InitPPParameters(szNZBCategory, pParameters, false);
bool bExists = true;
if (m_bNZBScript && strcasecmp(szExtension, ".nzb_processed"))
if (m_bScanScript && strcasecmp(szExtension, ".nzb_processed"))
{
NZBScriptController::ExecuteScript(g_pOptions->GetNZBProcess(), szFullFilename, szDirectory,
&szNZBName, &szNZBCategory, &iPriority, pParameters, &bAddTop, &bAddPaused);
ScanScriptController::ExecuteScripts(szFullFilename,
pUrlInfo ? pUrlInfo->GetURL() : "", szDirectory,
&szNZBName, &szNZBCategory, &iPriority, pParameters, &bAddTop,
&bAddPaused, &szDupeKey, &iDupeScore, &eDupeMode);
bExists = Util::FileExists(szFullFilename);
if (bExists && strcasecmp(szExtension, ".nzb"))
{
@@ -393,7 +602,7 @@ void Scanner::ProcessIncomingFile(const char* szDirectory, const char* szBaseFil
if (bRenameOK)
{
bAdded = AddFileToQueue(szRenamedName, szNZBName, szNZBCategory, iPriority,
szDupeKey, iDupeScore, eDupeMode, pParameters, bAddTop, bAddPaused);
szDupeKey, iDupeScore, eDupeMode, pParameters, bAddTop, bAddPaused, pUrlInfo, &iNZBID);
}
else
{
@@ -405,69 +614,75 @@ void Scanner::ProcessIncomingFile(const char* szDirectory, const char* szBaseFil
else if (bExists && !strcasecmp(szExtension, ".nzb"))
{
bAdded = AddFileToQueue(szFullFilename, szNZBName, szNZBCategory, iPriority,
szDupeKey, iDupeScore, eDupeMode, pParameters, bAddTop, bAddPaused);
szDupeKey, iDupeScore, eDupeMode, pParameters, bAddTop, bAddPaused, pUrlInfo, &iNZBID);
}
delete pParameters;
free(szNZBName);
free(szNZBCategory);
free(szDupeKey);
if (pQueueData)
{
pQueueData->SetAddStatus(eAddStatus == asFailed ? asFailed : bAdded ? asSuccess : asSkipped);
pQueueData->SetNZBID(iNZBID);
}
}
void Scanner::InitPPParameters(const char* szCategory, NZBParameterList* pParameters)
void Scanner::InitPPParameters(const char* szCategory, NZBParameterList* pParameters, bool bReset)
{
bool bUnpack = g_pOptions->GetUnpack();
const char* szDefScript = g_pOptions->GetDefScript();
const char* szPostScript = g_pOptions->GetPostScript();
if (szCategory && *szCategory)
if (!Util::EmptyStr(szCategory))
{
Options::Category* pCategory = g_pOptions->FindCategory(szCategory, false);
if (pCategory)
{
bUnpack = pCategory->GetUnpack();
if (pCategory->GetDefScript() && *pCategory->GetDefScript())
if (!Util::EmptyStr(pCategory->GetPostScript()))
{
szDefScript = pCategory->GetDefScript();
szPostScript = pCategory->GetPostScript();
}
}
}
if (bReset)
{
for (Options::Scripts::iterator it = g_pOptions->GetScripts()->begin(); it != g_pOptions->GetScripts()->end(); it++)
{
Options::Script* pScript = *it;
char szParam[1024];
snprintf(szParam, 1024, "%s:", pScript->GetName());
szParam[1024-1] = '\0';
pParameters->SetParameter(szParam, NULL);
}
}
pParameters->SetParameter("*Unpack:", bUnpack ? "yes" : "no");
if (szDefScript && *szDefScript)
if (!Util::EmptyStr(szPostScript))
{
// split szDefScript into tokens and create pp-parameter for each token
char* szDefScript2 = strdup(szDefScript);
char* saveptr;
char* szScriptName = strtok_r(szDefScript2, ",;", &saveptr);
while (szScriptName)
// split szPostScript into tokens and create pp-parameter for each token
Tokenizer tok(szPostScript, ",;");
while (const char* szScriptName = tok.Next())
{
szScriptName = Util::Trim(szScriptName);
if (szScriptName[0] != '\0')
{
char szParam[1024];
snprintf(szParam, 1024, "%s:", szScriptName);
szParam[1024-1] = '\0';
pParameters->SetParameter(szParam, "yes");
}
szScriptName = strtok_r(NULL, ",;", &saveptr);
char szParam[1024];
snprintf(szParam, 1024, "%s:", szScriptName);
szParam[1024-1] = '\0';
pParameters->SetParameter(szParam, "yes");
}
free(szDefScript2);
}
}
bool Scanner::AddFileToQueue(const char* szFilename, const char* szNZBName, const char* szCategory,
int iPriority, const char* szDupeKey, int iDupeScore, EDupeMode eDupeMode,
NZBParameterList* pParameters, bool bAddTop, bool bAddPaused)
NZBParameterList* pParameters, bool bAddTop, bool bAddPaused, NZBInfo* pUrlInfo, int* pNZBID)
{
const char* szBasename = Util::BaseFileName(szFilename);
info("Collection %s found", szBasename);
info("Adding collection %s to queue", szBasename);
NZBFile* pNZBFile = NZBFile::Create(szFilename, szCategory);
bool bOK = pNZBFile != NULL;
@@ -486,41 +701,52 @@ bool Scanner::AddFileToQueue(const char* szFilename, const char* szNZBName, cons
if (bOK)
{
pNZBFile->GetNZBInfo()->SetQueuedFilename(bakname2);
NZBInfo* pNZBInfo = pNZBFile->GetNZBInfo();
pNZBInfo->SetQueuedFilename(bakname2);
if (szNZBName && strlen(szNZBName) > 0)
{
pNZBFile->GetNZBInfo()->SetName(NULL);
pNZBInfo->SetName(NULL);
#ifdef WIN32
char* szAnsiFilename = strdup(szNZBName);
WebUtil::Utf8ToAnsi(szAnsiFilename, strlen(szAnsiFilename) + 1);
pNZBFile->GetNZBInfo()->SetFilename(szAnsiFilename);
pNZBInfo->SetFilename(szAnsiFilename);
free(szAnsiFilename);
#else
pNZBFile->GetNZBInfo()->SetFilename(szNZBName);
pNZBInfo->SetFilename(szNZBName);
#endif
pNZBFile->GetNZBInfo()->BuildDestDirName();
pNZBInfo->BuildDestDirName();
}
pNZBFile->GetNZBInfo()->SetDupeKey(szDupeKey);
pNZBFile->GetNZBInfo()->SetDupeScore(iDupeScore);
pNZBFile->GetNZBInfo()->SetDupeMode(eDupeMode);
pNZBInfo->SetDupeKey(szDupeKey);
pNZBInfo->SetDupeScore(iDupeScore);
pNZBInfo->SetDupeMode(eDupeMode);
pNZBInfo->SetPriority(iPriority);
if (pUrlInfo)
{
pNZBInfo->SetURL(pUrlInfo->GetURL());
pNZBInfo->SetUrlStatus(pUrlInfo->GetUrlStatus());
}
if (pNZBFile->GetPassword())
{
pNZBFile->GetNZBInfo()->GetParameters()->SetParameter("*Unpack:Password", pNZBFile->GetPassword());
pNZBInfo->GetParameters()->SetParameter("*Unpack:Password", pNZBFile->GetPassword());
}
pNZBFile->GetNZBInfo()->GetParameters()->CopyFrom(pParameters);
pNZBInfo->GetParameters()->CopyFrom(pParameters);
for (NZBFile::FileInfos::iterator it = pNZBFile->GetFileInfos()->begin(); it != pNZBFile->GetFileInfos()->end(); it++)
for (::FileList::iterator it = pNZBInfo->GetFileList()->begin(); it != pNZBInfo->GetFileList()->end(); it++)
{
FileInfo* pFileInfo = *it;
pFileInfo->SetPriority(iPriority);
pFileInfo->SetPaused(bAddPaused);
}
g_pQueueCoordinator->AddNZBFileToQueue(pNZBFile, bAddTop);
g_pQueueCoordinator->AddNZBFileToQueue(pNZBFile, pUrlInfo, bAddTop);
if (pNZBID)
{
*pNZBID = pNZBInfo->GetID();
}
}
delete pNZBFile;
@@ -543,8 +769,8 @@ void Scanner::ScanNZBDir(bool bSyncMode)
Scanner::EAddStatus Scanner::AddExternalFile(const char* szNZBName, const char* szCategory,
int iPriority, const char* szDupeKey, int iDupeScore, EDupeMode eDupeMode,
NZBParameterList* pParameters, bool bAddTop, bool bAddPaused,
const char* szFileName, const char* szBuffer, int iBufSize)
NZBParameterList* pParameters, bool bAddTop, bool bAddPaused, NZBInfo* pUrlInfo,
const char* szFileName, const char* szBuffer, int iBufSize, int* pNZBID)
{
bool bNZB = false;
char szTempFileName[1024];
@@ -629,8 +855,8 @@ Scanner::EAddStatus Scanner::AddExternalFile(const char* szNZBName, const char*
}
char* szUseCategory = strdup(szCategory ? szCategory : "");
Options::Category *pCategory = g_pOptions->FindCategory(szCategory, true);
if (pCategory && strcmp(szCategory, pCategory->GetName()))
Options::Category *pCategory = g_pOptions->FindCategory(szUseCategory, true);
if (pCategory && strcmp(szUseCategory, pCategory->GetName()))
{
free(szUseCategory);
szUseCategory = strdup(pCategory->GetName());
@@ -638,8 +864,9 @@ Scanner::EAddStatus Scanner::AddExternalFile(const char* szNZBName, const char*
}
EAddStatus eAddStatus = asSkipped;
QueueData* pQueueData = new QueueData(szScanFileName, szNZBName, szUseCategory,
iPriority, szDupeKey, iDupeScore, eDupeMode, pParameters, bAddTop, bAddPaused, &eAddStatus);
QueueData* pQueueData = new QueueData(szScanFileName, szNZBName, szUseCategory, iPriority,
szDupeKey, iDupeScore, eDupeMode, pParameters, bAddTop, bAddPaused, pUrlInfo,
&eAddStatus, pNZBID);
free(szUseCategory);
m_QueueList.push_back(pQueueData);

View File

@@ -1,7 +1,7 @@
/*
* This file is part of nzbget
*
* Copyright (C) 2007-2013 Andrey Prygunkov <hugbug@users.sourceforge.net>
* Copyright (C) 2007-2014 Andrey Prygunkov <hugbug@users.sourceforge.net>
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
@@ -74,12 +74,15 @@ private:
NZBParameterList m_Parameters;
bool m_bAddTop;
bool m_bAddPaused;
NZBInfo* m_pUrlInfo;
EAddStatus* m_pAddStatus;
int* m_pNZBID;
public:
QueueData(const char* szFilename, const char* szNZBName, const char* szCategory,
int iPriority, const char* szDupeKey, int iDupeScore, EDupeMode eDupeMode,
NZBParameterList* pParameters, bool bAddTop, bool bAddPaused, EAddStatus* pAddStatus);
NZBParameterList* pParameters, bool bAddTop, bool bAddPaused, NZBInfo* pUrlInfo,
EAddStatus* pAddStatus, int* pNZBID);
~QueueData();
const char* GetFilename() { return m_szFilename; }
const char* GetNZBName() { return m_szNZBName; }
@@ -91,14 +94,16 @@ private:
NZBParameterList* GetParameters() { return &m_Parameters; }
bool GetAddTop() { return m_bAddTop; }
bool GetAddPaused() { return m_bAddPaused; }
NZBInfo* GetUrlInfo() { return m_pUrlInfo; }
void SetAddStatus(EAddStatus eAddStatus);
void SetNZBID(int iNZBID);
};
typedef std::deque<QueueData*> QueueList;
bool m_bRequestedNZBDirScan;
int m_iNZBDirInterval;
bool m_bNZBScript;
bool m_bScanScript;
int m_iPass;
FileList m_FileList;
QueueList m_QueueList;
@@ -108,23 +113,24 @@ private:
void CheckIncomingNZBs(const char* szDirectory, const char* szCategory, bool bCheckStat);
bool AddFileToQueue(const char* szFilename, const char* szNZBName, const char* szCategory,
int iPriority, const char* szDupeKey, int iDupeScore, EDupeMode eDupeMode,
NZBParameterList* pParameters, bool bAddTop, bool bAddPaused);
NZBParameterList* pParameters, bool bAddTop, bool bAddPaused, NZBInfo* pUrlInfo, int* pNZBID);
void ProcessIncomingFile(const char* szDirectory, const char* szBaseFilename,
const char* szFullFilename, const char* szCategory);
bool CanProcessFile(const char* szFullFilename, bool bCheckStat);
void InitPPParameters(const char* szCategory, NZBParameterList* pParameters);
void DropOldFiles();
void ClearQueueList();
public:
Scanner();
~Scanner();
void InitOptions();
void ScanNZBDir(bool bSyncMode);
void Check();
EAddStatus AddExternalFile(const char* szNZBName, const char* szCategory, int iPriority,
const char* szDupeKey, int iDupeScore, EDupeMode eDupeMode,
NZBParameterList* pParameters, bool bAddTop, bool bAddPaused,
const char* szFileName, const char* szBuffer, int iBufSize);
NZBParameterList* pParameters, bool bAddTop, bool bAddPaused, NZBInfo* pUrlInfo,
const char* szFileName, const char* szBuffer, int iBufSize, int* pNZBID);
void InitPPParameters(const char* szCategory, NZBParameterList* pParameters, bool bReset);
};
#endif

View File

@@ -1,7 +1,7 @@
/*
* This file is part of nzbget
*
* Copyright (C) 2012-2013 Andrey Prygunkov <hugbug@users.sourceforge.net>
* Copyright (C) 2012-2014 Andrey Prygunkov <hugbug@users.sourceforge.net>
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
@@ -44,19 +44,13 @@
#include "UrlCoordinator.h"
#include "Options.h"
#include "WebDownloader.h"
#include "DiskState.h"
#include "Log.h"
#include "Util.h"
#include "NZBFile.h"
#include "QueueCoordinator.h"
#include "Scanner.h"
extern Options* g_pOptions;
extern DiskState* g_pDiskState;
extern QueueCoordinator* g_pQueueCoordinator;
extern Scanner* g_pScanner;
UrlDownloader::UrlDownloader() : WebDownloader()
{
m_szCategory = NULL;
@@ -86,7 +80,7 @@ void UrlDownloader::ProcessHeader(const char* szLine)
char* szValue = strchr(szModLine, ':');
if (szValue)
{
*szValue = NULL;
*szValue = '\0';
szValue++;
while (*szValue == ' ') szValue++;
Util::Trim(szValue);
@@ -99,7 +93,7 @@ void UrlDownloader::ProcessHeader(const char* szLine)
szParamName[100-1] = '\0';
char* szVal = WebUtil::Latin1ToUtf8(szValue);
m_ppParameters.SetParameter(szParamName, szVal);
m_pNZBInfo->GetParameters()->SetParameter(szParamName, szVal);
free(szVal);
}
free(szModLine);
@@ -111,7 +105,8 @@ UrlCoordinator::UrlCoordinator()
debug("Creating UrlCoordinator");
m_bHasMoreJobs = true;
m_bForce = false;
g_pLog->RegisterDebuggable(this);
}
UrlCoordinator::~UrlCoordinator()
@@ -119,6 +114,8 @@ UrlCoordinator::~UrlCoordinator()
debug("Destroying UrlCoordinator");
// Cleanup
g_pLog->UnregisterDebuggable(this);
debug("Deleting UrlDownloaders");
for (ActiveDownloads::iterator it = m_ActiveDownloads.begin(); it != m_ActiveDownloads.end(); it++)
{
@@ -126,14 +123,6 @@ UrlCoordinator::~UrlCoordinator()
}
m_ActiveDownloads.clear();
DownloadQueue* pDownloadQueue = g_pQueueCoordinator->LockQueue();
for (UrlQueue::iterator it = pDownloadQueue->GetUrlQueue()->begin(); it != pDownloadQueue->GetUrlQueue()->end(); it++)
{
delete *it;
}
pDownloadQueue->GetUrlQueue()->clear();
g_pQueueCoordinator->UnlockQueue();
debug("UrlCoordinator destroyed");
}
@@ -141,34 +130,36 @@ void UrlCoordinator::Run()
{
debug("Entering UrlCoordinator-loop");
while (!DownloadQueue::IsLoaded())
{
usleep(20 * 1000);
}
int iResetCounter = 0;
while (!IsStopped())
{
if (!(g_pOptions->GetPauseDownload() || g_pOptions->GetPauseDownload2()) || m_bForce || g_pOptions->GetUrlForce())
bool bDownloadStarted = false;
if (!g_pOptions->GetPauseDownload() || g_pOptions->GetUrlForce())
{
// start download for next URL
DownloadQueue* pDownloadQueue = g_pQueueCoordinator->LockQueue();
DownloadQueue* pDownloadQueue = DownloadQueue::Lock();
if ((int)m_ActiveDownloads.size() < g_pOptions->GetUrlConnections())
{
UrlInfo* pUrlInfo;
bool bHasMoreUrls = GetNextUrl(pDownloadQueue, pUrlInfo);
NZBInfo* pNZBInfo = GetNextUrl(pDownloadQueue);
bool bHasMoreUrls = pNZBInfo != NULL;
bool bUrlDownloadsRunning = !m_ActiveDownloads.empty();
m_bHasMoreJobs = bHasMoreUrls || bUrlDownloadsRunning;
if (bHasMoreUrls && !IsStopped())
{
StartUrlDownload(pUrlInfo);
}
if (!bHasMoreUrls)
{
m_bForce = false;
StartUrlDownload(pNZBInfo);
bDownloadStarted = true;
}
}
g_pQueueCoordinator->UnlockQueue();
DownloadQueue::Unlock();
}
int iSleepInterval = 100;
int iSleepInterval = bDownloadStarted ? 0 : 100;
usleep(iSleepInterval * 1000);
iResetCounter += iSleepInterval;
@@ -185,9 +176,9 @@ void UrlCoordinator::Run()
bool completed = false;
while (!completed)
{
g_pQueueCoordinator->LockQueue();
DownloadQueue::Lock();
completed = m_ActiveDownloads.size() == 0;
g_pQueueCoordinator->UnlockQueue();
DownloadQueue::Unlock();
usleep(100 * 1000);
ResetHangingDownloads();
}
@@ -201,12 +192,12 @@ void UrlCoordinator::Stop()
Thread::Stop();
debug("Stopping UrlDownloads");
g_pQueueCoordinator->LockQueue();
DownloadQueue::Lock();
for (ActiveDownloads::iterator it = m_ActiveDownloads.begin(); it != m_ActiveDownloads.end(); it++)
{
(*it)->Stop();
}
g_pQueueCoordinator->UnlockQueue();
DownloadQueue::Unlock();
debug("UrlDownloads are notified");
}
@@ -218,8 +209,8 @@ void UrlCoordinator::ResetHangingDownloads()
return;
}
g_pQueueCoordinator->LockQueue();
time_t tm = ::time(NULL);
DownloadQueue::Lock();
time_t tm = time(NULL);
for (ActiveDownloads::iterator it = m_ActiveDownloads.begin(); it != m_ActiveDownloads.end();)
{
@@ -227,12 +218,12 @@ void UrlCoordinator::ResetHangingDownloads()
if (tm - pUrlDownloader->GetLastUpdateTime() > TimeOut &&
pUrlDownloader->GetStatus() == UrlDownloader::adRunning)
{
UrlInfo* pUrlInfo = pUrlDownloader->GetUrlInfo();
NZBInfo* pNZBInfo = pUrlDownloader->GetNZBInfo();
debug("Terminating hanging download %s", pUrlDownloader->GetInfoName());
if (pUrlDownloader->Terminate())
{
error("Terminated hanging download %s", pUrlDownloader->GetInfoName());
pUrlInfo->SetStatus(UrlInfo::aiUndefined);
pNZBInfo->SetUrlStatus(NZBInfo::lsNone);
}
else
{
@@ -247,86 +238,87 @@ void UrlCoordinator::ResetHangingDownloads()
it++;
}
g_pQueueCoordinator->UnlockQueue();
DownloadQueue::Unlock();
}
void UrlCoordinator::LogDebugInfo()
{
debug(" UrlCoordinator");
debug(" ----------------");
info(" ---------- UrlCoordinator");
g_pQueueCoordinator->LockQueue();
debug(" Active Downloads: %i", m_ActiveDownloads.size());
DownloadQueue::Lock();
info(" Active Downloads: %i", m_ActiveDownloads.size());
for (ActiveDownloads::iterator it = m_ActiveDownloads.begin(); it != m_ActiveDownloads.end(); it++)
{
UrlDownloader* pUrlDownloader = *it;
pUrlDownloader->LogDebugInfo();
}
g_pQueueCoordinator->UnlockQueue();
DownloadQueue::Unlock();
}
void UrlCoordinator::AddUrlToQueue(UrlInfo* pUrlInfo, bool AddFirst)
void UrlCoordinator::AddUrlToQueue(NZBInfo* pNZBInfo, bool bAddTop)
{
debug("Adding NZB-URL to queue");
DownloadQueue* pDownloadQueue = g_pQueueCoordinator->LockQueue();
pDownloadQueue->GetUrlQueue()->push_back(pUrlInfo);
if (g_pOptions->GetSaveQueue() && g_pOptions->GetServerMode())
DownloadQueue* pDownloadQueue = DownloadQueue::Lock();
if (bAddTop)
{
g_pDiskState->SaveDownloadQueue(pDownloadQueue);
pDownloadQueue->GetQueue()->push_front(pNZBInfo);
}
if (pUrlInfo->GetForce())
else
{
m_bForce = true;
pDownloadQueue->GetQueue()->push_back(pNZBInfo);
}
g_pQueueCoordinator->UnlockQueue();
pDownloadQueue->Save();
DownloadQueue::Unlock();
}
/*
* Returns next URL for download.
*/
bool UrlCoordinator::GetNextUrl(DownloadQueue* pDownloadQueue, UrlInfo* &pUrlInfo)
NZBInfo* UrlCoordinator::GetNextUrl(DownloadQueue* pDownloadQueue)
{
bool bPauseDownload = g_pOptions->GetPauseDownload() || g_pOptions->GetPauseDownload2();
bool bPauseDownload = g_pOptions->GetPauseDownload();
for (UrlQueue::iterator at = pDownloadQueue->GetUrlQueue()->begin(); at != pDownloadQueue->GetUrlQueue()->end(); at++)
NZBInfo* pNZBInfo = NULL;
for (NZBList::iterator it = pDownloadQueue->GetQueue()->begin(); it != pDownloadQueue->GetQueue()->end(); it++)
{
pUrlInfo = *at;
if (pUrlInfo->GetStatus() == 0 && (!bPauseDownload || pUrlInfo->GetForce() || g_pOptions->GetUrlForce()))
NZBInfo* pNZBInfo1 = *it;
if (pNZBInfo1->GetKind() == NZBInfo::nkUrl &&
pNZBInfo1->GetUrlStatus() == NZBInfo::lsNone &&
pNZBInfo1->GetDeleteStatus() == NZBInfo::dsNone &&
(!bPauseDownload || g_pOptions->GetUrlForce()) &&
(!pNZBInfo || pNZBInfo1->GetPriority() > pNZBInfo->GetPriority()))
{
return true;
break;
pNZBInfo = pNZBInfo1;
}
}
return false;
return pNZBInfo;
}
void UrlCoordinator::StartUrlDownload(UrlInfo* pUrlInfo)
void UrlCoordinator::StartUrlDownload(NZBInfo* pNZBInfo)
{
debug("Starting new UrlDownloader");
UrlDownloader* pUrlDownloader = new UrlDownloader();
pUrlDownloader->SetAutoDestroy(true);
pUrlDownloader->Attach(this);
pUrlDownloader->SetUrlInfo(pUrlInfo);
pUrlDownloader->SetURL(pUrlInfo->GetURL());
pUrlDownloader->SetForce(pUrlInfo->GetForce() || g_pOptions->GetUrlForce());
pUrlDownloader->SetNZBInfo(pNZBInfo);
pUrlDownloader->SetURL(pNZBInfo->GetURL());
pUrlDownloader->SetForce(g_pOptions->GetUrlForce());
pNZBInfo->SetActiveDownloads(1);
char tmp[1024];
pUrlInfo->GetName(tmp, 1024);
pNZBInfo->MakeNiceUrlName(pNZBInfo->GetURL(), pNZBInfo->GetFilename(), tmp, 1024);
pUrlDownloader->SetInfoName(tmp);
snprintf(tmp, 1024, "%surl-%i.tmp", g_pOptions->GetTempDir(), pUrlInfo->GetID());
snprintf(tmp, 1024, "%surl-%i.tmp", g_pOptions->GetTempDir(), pNZBInfo->GetID());
tmp[1024-1] = '\0';
pUrlDownloader->SetOutputFilename(tmp);
pUrlInfo->SetStatus(UrlInfo::aiRunning);
pNZBInfo->SetUrlStatus(NZBInfo::lsRunning);
m_ActiveDownloads.push_back(pUrlDownloader);
pUrlDownloader->Start();
@@ -349,20 +341,8 @@ void UrlCoordinator::UrlCompleted(UrlDownloader* pUrlDownloader)
{
debug("URL downloaded");
UrlInfo* pUrlInfo = pUrlDownloader->GetUrlInfo();
if (pUrlDownloader->GetStatus() == WebDownloader::adFinished)
{
pUrlInfo->SetStatus(UrlInfo::aiFinished);
}
else if (pUrlDownloader->GetStatus() == WebDownloader::adFailed)
{
pUrlInfo->SetStatus(UrlInfo::aiFailed);
}
else if (pUrlDownloader->GetStatus() == WebDownloader::adRetry)
{
pUrlInfo->SetStatus(UrlInfo::aiUndefined);
}
bool bRetry = pUrlDownloader->GetStatus() == WebDownloader::adRetry;
NZBInfo* pNZBInfo = pUrlDownloader->GetNZBInfo();
char filename[1024];
if (pUrlDownloader->GetOriginalFilename())
@@ -372,7 +352,7 @@ void UrlCoordinator::UrlCompleted(UrlDownloader* pUrlDownloader)
}
else
{
strncpy(filename, Util::BaseFileName(pUrlInfo->GetURL()), 1024);
strncpy(filename, Util::BaseFileName(pNZBInfo->GetURL()), 1024);
filename[1024-1] = '\0';
// TODO: decode URL escaping
@@ -382,8 +362,9 @@ void UrlCoordinator::UrlCompleted(UrlDownloader* pUrlDownloader)
debug("Filename: [%s]", filename);
DownloadQueue* pDownloadQueue = DownloadQueue::Lock();
// delete Download from active jobs
g_pQueueCoordinator->LockQueue();
for (ActiveDownloads::iterator it = m_ActiveDownloads.begin(); it != m_ActiveDownloads.end(); it++)
{
UrlDownloader* pa = *it;
@@ -393,62 +374,124 @@ void UrlCoordinator::UrlCompleted(UrlDownloader* pUrlDownloader)
break;
}
}
g_pQueueCoordinator->UnlockQueue();
pNZBInfo->SetActiveDownloads(0);
Aspect aspect = { eaUrlCompleted, pUrlInfo };
Notify(&aspect);
if (pNZBInfo->GetDeleting())
{
pNZBInfo->SetDeleteStatus(NZBInfo::dsManual);
pNZBInfo->SetUrlStatus(NZBInfo::lsNone);
pNZBInfo->SetDeleting(false);
}
else if (pUrlDownloader->GetStatus() == WebDownloader::adFinished)
{
pNZBInfo->SetUrlStatus(NZBInfo::lsFinished);
}
else if (pUrlDownloader->GetStatus() == WebDownloader::adFailed)
{
pNZBInfo->SetUrlStatus(NZBInfo::lsFailed);
}
else if (pUrlDownloader->GetStatus() == WebDownloader::adRetry)
{
pNZBInfo->SetUrlStatus(NZBInfo::lsNone);
}
if (pUrlInfo->GetStatus() == UrlInfo::aiFinished)
if (!bRetry)
{
DownloadQueue::Aspect aspect = { DownloadQueue::eaUrlCompleted, pDownloadQueue, pNZBInfo, NULL };
pDownloadQueue->Notify(&aspect);
}
DownloadQueue::Unlock();
if (bRetry)
{
return;
}
if (pNZBInfo->GetUrlStatus() == NZBInfo::lsFinished)
{
// add nzb-file to download queue
Scanner::EAddStatus eAddStatus = g_pScanner->AddExternalFile(
pUrlInfo->GetNZBFilename() && strlen(pUrlInfo->GetNZBFilename()) > 0 ? pUrlInfo->GetNZBFilename() : filename,
strlen(pUrlInfo->GetCategory()) > 0 ? pUrlInfo->GetCategory() : pUrlDownloader->GetCategory(),
pUrlInfo->GetPriority(), pUrlInfo->GetDupeKey(), pUrlInfo->GetDupeScore(), pUrlInfo->GetDupeMode(),
pUrlDownloader->GetParameters(), pUrlInfo->GetAddTop(), pUrlInfo->GetAddPaused(),
pUrlDownloader->GetOutputFilename(), NULL, 0);
!Util::EmptyStr(pNZBInfo->GetFilename()) ? pNZBInfo->GetFilename() : filename,
!Util::EmptyStr(pNZBInfo->GetCategory()) ? pNZBInfo->GetCategory() : pUrlDownloader->GetCategory(),
pNZBInfo->GetPriority(), pNZBInfo->GetDupeKey(), pNZBInfo->GetDupeScore(), pNZBInfo->GetDupeMode(),
pNZBInfo->GetParameters(), false, pNZBInfo->GetAddUrlPaused(), pNZBInfo,
pUrlDownloader->GetOutputFilename(), NULL, 0, NULL);
if (eAddStatus != Scanner::asSuccess)
if (eAddStatus == Scanner::asSuccess)
{
pUrlInfo->SetStatus(eAddStatus == Scanner::asFailed ? UrlInfo::aiScanFailed : UrlInfo::aiScanSkipped);
// if scanner has successfully added nzb-file to queue, our pNZBInfo is
// already removed from queue and destroyed
return;
}
pNZBInfo->SetUrlStatus(eAddStatus == Scanner::asFailed ? NZBInfo::lsScanFailed : NZBInfo::lsScanSkipped);
}
// delete Download from Url Queue
if (pUrlInfo->GetStatus() != UrlInfo::aiRetry)
// the rest of function is only for failed URLs or for failed scans
pDownloadQueue = DownloadQueue::Lock();
// delete URL from queue
pDownloadQueue->GetQueue()->Remove(pNZBInfo);
bool bDeleteObj = true;
// add failed URL to history
if (g_pOptions->GetKeepHistory() > 0 &&
pNZBInfo->GetUrlStatus() != NZBInfo::lsFinished &&
!pNZBInfo->GetAvoidHistory())
{
DownloadQueue* pDownloadQueue = g_pQueueCoordinator->LockQueue();
HistoryInfo* pHistoryInfo = new HistoryInfo(pNZBInfo);
pHistoryInfo->SetTime(time(NULL));
pDownloadQueue->GetHistory()->push_front(pHistoryInfo);
bDeleteObj = false;
}
pDownloadQueue->Save();
for (UrlQueue::iterator it = pDownloadQueue->GetUrlQueue()->begin(); it != pDownloadQueue->GetUrlQueue()->end(); it++)
{
UrlInfo* pa = *it;
if (pa == pUrlInfo)
{
pDownloadQueue->GetUrlQueue()->erase(it);
break;
}
}
DownloadQueue::Unlock();
bool bDeleteObj = true;
if (g_pOptions->GetKeepHistory() > 0 && pUrlInfo->GetStatus() != UrlInfo::aiFinished)
{
HistoryInfo* pHistoryInfo = new HistoryInfo(pUrlInfo);
pHistoryInfo->SetTime(time(NULL));
pDownloadQueue->GetHistoryList()->push_front(pHistoryInfo);
bDeleteObj = false;
}
if (g_pOptions->GetSaveQueue() && g_pOptions->GetServerMode())
{
g_pDiskState->SaveDownloadQueue(pDownloadQueue);
}
g_pQueueCoordinator->UnlockQueue();
if (bDeleteObj)
{
delete pUrlInfo;
}
if (bDeleteObj)
{
delete pNZBInfo;
}
}
bool UrlCoordinator::DeleteQueueEntry(DownloadQueue* pDownloadQueue, NZBInfo* pNZBInfo, bool bAvoidHistory)
{
if (pNZBInfo->GetActiveDownloads() > 0)
{
info("Deleting active URL %s", pNZBInfo->GetName());
pNZBInfo->SetDeleting(true);
pNZBInfo->SetAvoidHistory(bAvoidHistory);
for (ActiveDownloads::iterator it = m_ActiveDownloads.begin(); it != m_ActiveDownloads.end(); it++)
{
UrlDownloader* pUrlDownloader = *it;
if (pUrlDownloader->GetNZBInfo() == pNZBInfo)
{
pUrlDownloader->Stop();
return true;
}
}
}
info("Deleting URL %s", pNZBInfo->GetName());
pNZBInfo->SetDeleteStatus(NZBInfo::dsManual);
pNZBInfo->SetUrlStatus(NZBInfo::lsNone);
pDownloadQueue->GetQueue()->Remove(pNZBInfo);
if (g_pOptions->GetKeepHistory() > 0 && !bAvoidHistory)
{
HistoryInfo* pHistoryInfo = new HistoryInfo(pNZBInfo);
pHistoryInfo->SetTime(time(NULL));
pDownloadQueue->GetHistory()->push_front(pHistoryInfo);
}
else
{
delete pNZBInfo;
}
return true;
}

View File

@@ -1,7 +1,7 @@
/*
* This file is part of nzbget
*
* Copyright (C) 2012-2013 Andrey Prygunkov <hugbug@users.sourceforge.net>
* Copyright (C) 2012-2014 Andrey Prygunkov <hugbug@users.sourceforge.net>
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
@@ -30,6 +30,7 @@
#include <list>
#include <time.h>
#include "Log.h"
#include "Thread.h"
#include "WebDownloader.h"
#include "DownloadInfo.h"
@@ -37,31 +38,24 @@
class UrlDownloader;
class UrlCoordinator : public Thread, public Observer, public Subject
class UrlCoordinator : public Thread, public Observer, public Debuggable
{
public:
private:
typedef std::list<UrlDownloader*> ActiveDownloads;
enum EAspectAction
{
eaUrlAdded,
eaUrlCompleted
};
struct Aspect
{
EAspectAction eAction;
UrlInfo* pUrlInfo;
};
private:
ActiveDownloads m_ActiveDownloads;
bool m_bHasMoreJobs;
bool m_bForce;
bool GetNextUrl(DownloadQueue* pDownloadQueue, UrlInfo* &pUrlInfo);
void StartUrlDownload(UrlInfo* pUrlInfo);
NZBInfo* GetNextUrl(DownloadQueue* pDownloadQueue);
void StartUrlDownload(NZBInfo* pNZBInfo);
void UrlCompleted(UrlDownloader* pUrlDownloader);
void ResetHangingDownloads();
protected:
virtual void LogDebugInfo();
public:
UrlCoordinator();
virtual ~UrlCoordinator();
@@ -70,18 +64,16 @@ public:
void Update(Subject* pCaller, void* pAspect);
// Editing the queue
void AddUrlToQueue(UrlInfo* pUrlInfo, bool AddFirst);
void AddUrlToQueue(NZBInfo* pNZBInfo, bool bAddTop);
bool HasMoreJobs() { return m_bHasMoreJobs; }
void LogDebugInfo();
bool DeleteQueueEntry(DownloadQueue* pDownloadQueue, NZBInfo* pNZBInfo, bool bAvoidHistory);
};
class UrlDownloader : public WebDownloader
{
private:
UrlInfo* m_pUrlInfo;
NZBInfo* m_pNZBInfo;
char* m_szCategory;
NZBParameterList m_ppParameters;
protected:
virtual void ProcessHeader(const char* szLine);
@@ -89,10 +81,9 @@ protected:
public:
UrlDownloader();
~UrlDownloader();
void SetUrlInfo(UrlInfo* pUrlInfo) { m_pUrlInfo = pUrlInfo; }
UrlInfo* GetUrlInfo() { return m_pUrlInfo; }
void SetNZBInfo(NZBInfo* pNZBInfo) { m_pNZBInfo = pNZBInfo; }
NZBInfo* GetNZBInfo() { return m_pNZBInfo; }
const char* GetCategory() { return m_szCategory; }
NZBParameterList* GetParameters() { return &m_ppParameters; }
};
#endif

View File

@@ -2,7 +2,7 @@
* This file is part of nzbget
*
* Copyright (C) 2005 Bo Cordes Petersen <placebodk@sourceforge.net>
* Copyright (C) 2007-2013 Andrey Prygunkov <hugbug@users.sourceforge.net>
* Copyright (C) 2007-2014 Andrey Prygunkov <hugbug@users.sourceforge.net>
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
@@ -46,26 +46,22 @@
#include "BinRpc.h"
#include "Log.h"
#include "Options.h"
#include "QueueCoordinator.h"
#include "UrlCoordinator.h"
#include "QueueEditor.h"
#include "PrePostProcessor.h"
#include "Util.h"
#include "DownloadInfo.h"
#include "Scanner.h"
#include "StatMeter.h"
extern Options* g_pOptions;
extern QueueCoordinator* g_pQueueCoordinator;
extern UrlCoordinator* g_pUrlCoordinator;
extern PrePostProcessor* g_pPrePostProcessor;
extern Scanner* g_pScanner;
extern StatMeter* g_pStatMeter;
extern void ExitProc();
extern void Reload();
const char* g_szMessageRequestNames[] =
{ "N/A", "Download", "Pause/Unpause", "List", "Set download rate", "Dump debug",
"Edit queue", "Log", "Quit", "Reload", "Version", "Post-queue", "Write log", "Scan",
"Pause/Unpause postprocessor", "History", "Download URL", "URL-queue" };
"Pause/Unpause postprocessor", "History", "Download URL" };
const unsigned int g_iMessageRequestSizes[] =
{ 0,
@@ -83,8 +79,7 @@ const unsigned int g_iMessageRequestSizes[] =
sizeof(SNZBWriteLogRequest),
sizeof(SNZBScanRequest),
sizeof(SNZBHistoryRequest),
sizeof(SNZBDownloadUrlRequest),
sizeof(SNZBUrlQueueRequest)
sizeof(SNZBDownloadUrlRequest)
};
//*****************************************************************
@@ -191,10 +186,6 @@ void BinRpcProcessor::Dispatch()
command = new DownloadUrlBinCommand();
break;
case eRemoteRequestUrlQueue:
command = new UrlQueueBinCommand();
break;
default:
error("Received unsupported request %i", ntohl(m_MessageBase.m_iType));
break;
@@ -259,10 +250,6 @@ void PauseUnpauseBinCommand::Execute()
g_pOptions->SetPauseDownload(ntohl(PauseUnpauseRequest.m_bPause));
break;
case eRemotePauseUnpauseActionDownload2:
g_pOptions->SetPauseDownload2(ntohl(PauseUnpauseRequest.m_bPause));
break;
case eRemotePauseUnpauseActionPostProcess:
g_pOptions->SetPausePostProcess(ntohl(PauseUnpauseRequest.m_bPause));
break;
@@ -295,8 +282,7 @@ void DumpDebugBinCommand::Execute()
return;
}
g_pQueueCoordinator->LogDebugInfo();
g_pUrlCoordinator->LogDebugInfo();
g_pLog->LogDebugInfo();
SendBoolResponse(true, "Debug-Command completed successfully");
}
@@ -358,7 +344,7 @@ void DownloadBinCommand::Execute()
bool bAddTop = ntohl(DownloadRequest.m_bAddFirst);
bool bOK = g_pScanner->AddExternalFile(DownloadRequest.m_szFilename, DownloadRequest.m_szCategory,
iPriority, NULL, 0, dmScore, NULL, bAddTop, bAddPaused, NULL, pRecvBuffer, iBufLen) != Scanner::asFailed;
iPriority, NULL, 0, dmScore, NULL, bAddTop, bAddPaused, NULL, NULL, pRecvBuffer, iBufLen, NULL) != Scanner::asFailed;
char tmp[1024];
snprintf(tmp, 1024, bOK ? "Collection %s added to queue" : "Download Request failed for %s",
@@ -402,13 +388,13 @@ void ListBinCommand::Execute()
}
// Make a data structure and copy all the elements of the list into it
DownloadQueue* pDownloadQueue = g_pQueueCoordinator->LockQueue();
DownloadQueue* pDownloadQueue = DownloadQueue::Lock();
// calculate required buffer size for nzbs
int iNrNZBEntries = pDownloadQueue->GetNZBInfoList()->size();
int iNrNZBEntries = pDownloadQueue->GetQueue()->size();
int iNrPPPEntries = 0;
bufsize += iNrNZBEntries * sizeof(SNZBListResponseNZBEntry);
for (NZBInfoList::iterator it = pDownloadQueue->GetNZBInfoList()->begin(); it != pDownloadQueue->GetNZBInfoList()->end(); it++)
for (NZBList::iterator it = pDownloadQueue->GetQueue()->begin(); it != pDownloadQueue->GetQueue()->end(); it++)
{
NZBInfo* pNZBInfo = *it;
bufsize += strlen(pNZBInfo->GetFilename()) + 1;
@@ -433,29 +419,48 @@ void ListBinCommand::Execute()
}
// calculate required buffer size for files
int iNrFileEntries = pDownloadQueue->GetFileQueue()->size();
bufsize += iNrFileEntries * sizeof(SNZBListResponseFileEntry);
for (FileQueue::iterator it = pDownloadQueue->GetFileQueue()->begin(); it != pDownloadQueue->GetFileQueue()->end(); it++)
int iNrFileEntries = 0;
for (NZBList::iterator it = pDownloadQueue->GetQueue()->begin(); it != pDownloadQueue->GetQueue()->end(); it++)
{
FileInfo* pFileInfo = *it;
bufsize += strlen(pFileInfo->GetSubject()) + 1;
bufsize += strlen(pFileInfo->GetFilename()) + 1;
// align struct to 4-bytes, needed by ARM-processor (and may be others)
bufsize += bufsize % 4 > 0 ? 4 - bufsize % 4 : 0;
NZBInfo* pNZBInfo = *it;
for (FileList::iterator it2 = pNZBInfo->GetFileList()->begin(); it2 != pNZBInfo->GetFileList()->end(); it2++)
{
FileInfo* pFileInfo = *it2;
iNrFileEntries++;
bufsize += sizeof(SNZBListResponseFileEntry);
bufsize += strlen(pFileInfo->GetSubject()) + 1;
bufsize += strlen(pFileInfo->GetFilename()) + 1;
// align struct to 4-bytes, needed by ARM-processor (and may be others)
bufsize += bufsize % 4 > 0 ? 4 - bufsize % 4 : 0;
}
}
buf = (char*) malloc(bufsize);
char* bufptr = buf;
// write nzb entries
for (NZBInfoList::iterator it = pDownloadQueue->GetNZBInfoList()->begin(); it != pDownloadQueue->GetNZBInfoList()->end(); it++)
for (NZBList::iterator it = pDownloadQueue->GetQueue()->begin(); it != pDownloadQueue->GetQueue()->end(); it++)
{
unsigned long iSizeHi, iSizeLo;
NZBInfo* pNZBInfo = *it;
SNZBListResponseNZBEntry* pListAnswer = (SNZBListResponseNZBEntry*) bufptr;
unsigned long iSizeHi, iSizeLo, iRemainingSizeHi, iRemainingSizeLo, iPausedSizeHi, iPausedSizeLo;
Util::SplitInt64(pNZBInfo->GetSize(), &iSizeHi, &iSizeLo);
Util::SplitInt64(pNZBInfo->GetRemainingSize(), &iRemainingSizeHi, &iRemainingSizeLo);
Util::SplitInt64(pNZBInfo->GetPausedSize(), &iPausedSizeHi, &iPausedSizeLo);
pListAnswer->m_iID = htonl(pNZBInfo->GetID());
pListAnswer->m_iKind = htonl(pNZBInfo->GetKind());
pListAnswer->m_iSizeLo = htonl(iSizeLo);
pListAnswer->m_iSizeHi = htonl(iSizeHi);
pListAnswer->m_iRemainingSizeLo = htonl(iRemainingSizeLo);
pListAnswer->m_iRemainingSizeHi = htonl(iRemainingSizeHi);
pListAnswer->m_iPausedSizeLo = htonl(iPausedSizeLo);
pListAnswer->m_iPausedSizeHi = htonl(iPausedSizeHi);
pListAnswer->m_iPausedCount = htonl(pNZBInfo->GetPausedFileCount());
pListAnswer->m_iRemainingParCount = htonl(pNZBInfo->GetRemainingParCount());
pListAnswer->m_iPriority = htonl(pNZBInfo->GetPriority());
pListAnswer->m_bMatch = htonl(bMatchGroup && (!pRegEx || pRegEx->Match(pNZBInfo->GetName())));
pListAnswer->m_iFilenameLen = htonl(strlen(pNZBInfo->GetFilename()) + 1);
pListAnswer->m_iNameLen = htonl(strlen(pNZBInfo->GetName()) + 1);
@@ -484,7 +489,7 @@ void ListBinCommand::Execute()
// write ppp entries
int iNZBIndex = 1;
for (NZBInfoList::iterator it = pDownloadQueue->GetNZBInfoList()->begin(); it != pDownloadQueue->GetNZBInfoList()->end(); it++, iNZBIndex++)
for (NZBList::iterator it = pDownloadQueue->GetQueue()->begin(); it != pDownloadQueue->GetQueue()->end(); it++, iNZBIndex++)
{
NZBInfo* pNZBInfo = *it;
for (NZBParameterList::iterator it = pNZBInfo->GetParameters()->begin(); it != pNZBInfo->GetParameters()->end(); it++)
@@ -510,58 +515,62 @@ void ListBinCommand::Execute()
}
// write file entries
for (FileQueue::iterator it = pDownloadQueue->GetFileQueue()->begin(); it != pDownloadQueue->GetFileQueue()->end(); it++)
for (NZBList::iterator it = pDownloadQueue->GetQueue()->begin(); it != pDownloadQueue->GetQueue()->end(); it++)
{
unsigned long iSizeHi, iSizeLo;
FileInfo* pFileInfo = *it;
SNZBListResponseFileEntry* pListAnswer = (SNZBListResponseFileEntry*) bufptr;
pListAnswer->m_iID = htonl(pFileInfo->GetID());
int iNZBIndex = 0;
for (unsigned int i = 0; i < pDownloadQueue->GetNZBInfoList()->size(); i++)
NZBInfo* pNZBInfo = *it;
for (FileList::iterator it2 = pNZBInfo->GetFileList()->begin(); it2 != pNZBInfo->GetFileList()->end(); it2++)
{
iNZBIndex++;
if (pDownloadQueue->GetNZBInfoList()->at(i) == pFileInfo->GetNZBInfo())
FileInfo* pFileInfo = *it2;
unsigned long iSizeHi, iSizeLo;
SNZBListResponseFileEntry* pListAnswer = (SNZBListResponseFileEntry*) bufptr;
pListAnswer->m_iID = htonl(pFileInfo->GetID());
int iNZBIndex = 0;
for (unsigned int i = 0; i < pDownloadQueue->GetQueue()->size(); i++)
{
break;
iNZBIndex++;
if (pDownloadQueue->GetQueue()->at(i) == pFileInfo->GetNZBInfo())
{
break;
}
}
}
pListAnswer->m_iNZBIndex = htonl(iNZBIndex);
pListAnswer->m_iNZBIndex = htonl(iNZBIndex);
if (pRegEx && !bMatchGroup)
{
char szFilename[MAX_PATH];
snprintf(szFilename, sizeof(szFilename) - 1, "%s/%s", pFileInfo->GetNZBInfo()->GetName(), Util::BaseFileName(pFileInfo->GetFilename()));
pListAnswer->m_bMatch = htonl(pRegEx->Match(szFilename));
}
if (pRegEx && !bMatchGroup)
{
char szFilename[MAX_PATH];
snprintf(szFilename, sizeof(szFilename) - 1, "%s/%s", pFileInfo->GetNZBInfo()->GetName(), Util::BaseFileName(pFileInfo->GetFilename()));
pListAnswer->m_bMatch = htonl(pRegEx->Match(szFilename));
}
Util::SplitInt64(pFileInfo->GetSize(), &iSizeHi, &iSizeLo);
pListAnswer->m_iFileSizeLo = htonl(iSizeLo);
pListAnswer->m_iFileSizeHi = htonl(iSizeHi);
Util::SplitInt64(pFileInfo->GetRemainingSize(), &iSizeHi, &iSizeLo);
pListAnswer->m_iRemainingSizeLo = htonl(iSizeLo);
pListAnswer->m_iRemainingSizeHi = htonl(iSizeHi);
pListAnswer->m_bFilenameConfirmed = htonl(pFileInfo->GetFilenameConfirmed());
pListAnswer->m_bPaused = htonl(pFileInfo->GetPaused());
pListAnswer->m_iActiveDownloads = htonl(pFileInfo->GetActiveDownloads());
pListAnswer->m_iPriority = htonl(pFileInfo->GetPriority());
pListAnswer->m_iSubjectLen = htonl(strlen(pFileInfo->GetSubject()) + 1);
pListAnswer->m_iFilenameLen = htonl(strlen(pFileInfo->GetFilename()) + 1);
bufptr += sizeof(SNZBListResponseFileEntry);
strcpy(bufptr, pFileInfo->GetSubject());
bufptr += ntohl(pListAnswer->m_iSubjectLen);
strcpy(bufptr, pFileInfo->GetFilename());
bufptr += ntohl(pListAnswer->m_iFilenameLen);
// align struct to 4-bytes, needed by ARM-processor (and may be others)
if ((size_t)bufptr % 4 > 0)
{
pListAnswer->m_iFilenameLen = htonl(ntohl(pListAnswer->m_iFilenameLen) + 4 - (size_t)bufptr % 4);
memset(bufptr, 0, 4 - (size_t)bufptr % 4); //suppress valgrind warning "uninitialized data"
bufptr += 4 - (size_t)bufptr % 4;
Util::SplitInt64(pFileInfo->GetSize(), &iSizeHi, &iSizeLo);
pListAnswer->m_iFileSizeLo = htonl(iSizeLo);
pListAnswer->m_iFileSizeHi = htonl(iSizeHi);
Util::SplitInt64(pFileInfo->GetRemainingSize(), &iSizeHi, &iSizeLo);
pListAnswer->m_iRemainingSizeLo = htonl(iSizeLo);
pListAnswer->m_iRemainingSizeHi = htonl(iSizeHi);
pListAnswer->m_bFilenameConfirmed = htonl(pFileInfo->GetFilenameConfirmed());
pListAnswer->m_bPaused = htonl(pFileInfo->GetPaused());
pListAnswer->m_iActiveDownloads = htonl(pFileInfo->GetActiveDownloads());
pListAnswer->m_iSubjectLen = htonl(strlen(pFileInfo->GetSubject()) + 1);
pListAnswer->m_iFilenameLen = htonl(strlen(pFileInfo->GetFilename()) + 1);
bufptr += sizeof(SNZBListResponseFileEntry);
strcpy(bufptr, pFileInfo->GetSubject());
bufptr += ntohl(pListAnswer->m_iSubjectLen);
strcpy(bufptr, pFileInfo->GetFilename());
bufptr += ntohl(pListAnswer->m_iFilenameLen);
// align struct to 4-bytes, needed by ARM-processor (and may be others)
if ((size_t)bufptr % 4 > 0)
{
pListAnswer->m_iFilenameLen = htonl(ntohl(pListAnswer->m_iFilenameLen) + 4 - (size_t)bufptr % 4);
memset(bufptr, 0, 4 - (size_t)bufptr % 4); //suppress valgrind warning "uninitialized data"
bufptr += 4 - (size_t)bufptr % 4;
}
}
}
g_pQueueCoordinator->UnlockQueue();
DownloadQueue::Unlock();
delete pRegEx;
@@ -573,25 +582,33 @@ void ListBinCommand::Execute()
if (htonl(ListRequest.m_bServerState))
{
DownloadQueue *pDownloadQueue = DownloadQueue::Lock();
int iPostJobCount = 0;
for (NZBList::iterator it = pDownloadQueue->GetQueue()->begin(); it != pDownloadQueue->GetQueue()->end(); it++)
{
NZBInfo* pNZBInfo = *it;
iPostJobCount += pNZBInfo->GetPostInfo() ? 1 : 0;
}
long long lRemainingSize;
pDownloadQueue->CalcRemainingSize(&lRemainingSize, NULL);
DownloadQueue::Unlock();
unsigned long iSizeHi, iSizeLo;
ListResponse.m_iDownloadRate = htonl(g_pQueueCoordinator->CalcCurrentDownloadSpeed());
Util::SplitInt64(g_pQueueCoordinator->CalcRemainingSize(), &iSizeHi, &iSizeLo);
ListResponse.m_iDownloadRate = htonl(g_pStatMeter->CalcCurrentDownloadSpeed());
Util::SplitInt64(lRemainingSize, &iSizeHi, &iSizeLo);
ListResponse.m_iRemainingSizeHi = htonl(iSizeHi);
ListResponse.m_iRemainingSizeLo = htonl(iSizeLo);
ListResponse.m_iDownloadLimit = htonl(g_pOptions->GetDownloadRate());
ListResponse.m_bDownloadPaused = htonl(g_pOptions->GetPauseDownload());
ListResponse.m_bDownload2Paused = htonl(g_pOptions->GetPauseDownload2());
ListResponse.m_bPostPaused = htonl(g_pOptions->GetPausePostProcess());
ListResponse.m_bScanPaused = htonl(g_pOptions->GetPauseScan());
ListResponse.m_iThreadCount = htonl(Thread::GetThreadCount() - 1); // not counting itself
PostQueue* pPostQueue = g_pQueueCoordinator->LockQueue()->GetPostQueue();
ListResponse.m_iPostJobCount = htonl(pPostQueue->size());
g_pQueueCoordinator->UnlockQueue();
ListResponse.m_iPostJobCount = htonl(iPostJobCount);
int iUpTimeSec, iDnTimeSec;
long long iAllBytes;
bool bStandBy;
g_pQueueCoordinator->CalcStat(&iUpTimeSec, &iDnTimeSec, &iAllBytes, &bStandBy);
g_pStatMeter->CalcTotalStat(&iUpTimeSec, &iDnTimeSec, &iAllBytes, &bStandBy);
ListResponse.m_iUpTimeSec = htonl(iUpTimeSec);
ListResponse.m_iDownloadTimeSec = htonl(iDnTimeSec);
ListResponse.m_bDownloadStandBy = htonl(bStandBy);
@@ -715,7 +732,6 @@ void EditQueueBinCommand::Execute()
int iMatchMode = ntohl(EditQueueRequest.m_iMatchMode);
int iOffset = ntohl(EditQueueRequest.m_iOffset);
int iTextLen = ntohl(EditQueueRequest.m_iTextLen);
bool bSmartOrder = ntohl(EditQueueRequest.m_bSmartOrder);
unsigned int iBufLength = ntohl(EditQueueRequest.m_iTrailingDataLength);
if (iNrIDEntries * sizeof(int32_t) + iTextLen + iNameEntriesLen != iBufLength)
@@ -765,19 +781,12 @@ void EditQueueBinCommand::Execute()
}
}
bool bOK = false;
if (iAction < eRemoteEditActionPostMoveOffset)
{
bOK = g_pQueueCoordinator->GetQueueEditor()->EditList(
iNrIDEntries > 0 ? &cIDList : NULL,
iNrNameEntries > 0 ? &cNameList : NULL,
(QueueEditor::EMatchMode)iMatchMode, bSmartOrder, (QueueEditor::EEditAction)iAction, iOffset, szText);
}
else
{
bOK = g_pPrePostProcessor->QueueEditList(&cIDList, (PrePostProcessor::EEditAction)iAction, iOffset, szText);
}
DownloadQueue* pDownloadQueue = DownloadQueue::Lock();
bool bOK = pDownloadQueue->EditList(
iNrIDEntries > 0 ? &cIDList : NULL,
iNrNameEntries > 0 ? &cNameList : NULL,
(DownloadQueue::EMatchMode)iMatchMode, (DownloadQueue::EEditAction)iAction, iOffset, szText);
DownloadQueue::Unlock();
free(pBuf);
@@ -816,17 +825,23 @@ void PostQueueBinCommand::Execute()
int bufsize = 0;
// Make a data structure and copy all the elements of the list into it
PostQueue* pPostQueue = g_pQueueCoordinator->LockQueue()->GetPostQueue();
int NrEntries = pPostQueue->size();
NZBList* pNZBList = DownloadQueue::Lock()->GetQueue();
// calculate required buffer size
bufsize = NrEntries * sizeof(SNZBPostQueueResponseEntry);
for (PostQueue::iterator it = pPostQueue->begin(); it != pPostQueue->end(); it++)
int NrEntries = 0;
for (NZBList::iterator it = pNZBList->begin(); it != pNZBList->end(); it++)
{
PostInfo* pPostInfo = *it;
NZBInfo* pNZBInfo = *it;
PostInfo* pPostInfo = pNZBInfo->GetPostInfo();
if (!pPostInfo)
{
continue;
}
NrEntries++;
bufsize += sizeof(SNZBPostQueueResponseEntry);
bufsize += strlen(pPostInfo->GetNZBInfo()->GetFilename()) + 1;
bufsize += strlen(pPostInfo->GetInfoName()) + 1;
bufsize += strlen(pPostInfo->GetNZBInfo()->GetName()) + 1;
bufsize += strlen(pPostInfo->GetNZBInfo()->GetDestDir()) + 1;
bufsize += strlen(pPostInfo->GetProgressLabel()) + 1;
// align struct to 4-bytes, needed by ARM-processor (and may be others)
@@ -837,24 +852,30 @@ void PostQueueBinCommand::Execute()
buf = (char*) malloc(bufsize);
char* bufptr = buf;
for (PostQueue::iterator it = pPostQueue->begin(); it != pPostQueue->end(); it++)
for (NZBList::iterator it = pNZBList->begin(); it != pNZBList->end(); it++)
{
PostInfo* pPostInfo = *it;
NZBInfo* pNZBInfo = *it;
PostInfo* pPostInfo = pNZBInfo->GetPostInfo();
if (!pPostInfo)
{
continue;
}
SNZBPostQueueResponseEntry* pPostQueueAnswer = (SNZBPostQueueResponseEntry*) bufptr;
pPostQueueAnswer->m_iID = htonl(pPostInfo->GetID());
pPostQueueAnswer->m_iID = htonl(pNZBInfo->GetID());
pPostQueueAnswer->m_iStage = htonl(pPostInfo->GetStage());
pPostQueueAnswer->m_iStageProgress = htonl(pPostInfo->GetStageProgress());
pPostQueueAnswer->m_iFileProgress = htonl(pPostInfo->GetFileProgress());
pPostQueueAnswer->m_iTotalTimeSec = htonl((int)(pPostInfo->GetStartTime() ? tCurTime - pPostInfo->GetStartTime() : 0));
pPostQueueAnswer->m_iStageTimeSec = htonl((int)(pPostInfo->GetStageTime() ? tCurTime - pPostInfo->GetStageTime() : 0));
pPostQueueAnswer->m_iNZBFilenameLen = htonl(strlen(pPostInfo->GetNZBInfo()->GetFilename()) + 1);
pPostQueueAnswer->m_iInfoNameLen = htonl(strlen(pPostInfo->GetInfoName()) + 1);
pPostQueueAnswer->m_iInfoNameLen = htonl(strlen(pPostInfo->GetNZBInfo()->GetName()) + 1);
pPostQueueAnswer->m_iDestDirLen = htonl(strlen(pPostInfo->GetNZBInfo()->GetDestDir()) + 1);
pPostQueueAnswer->m_iProgressLabelLen = htonl(strlen(pPostInfo->GetProgressLabel()) + 1);
bufptr += sizeof(SNZBPostQueueResponseEntry);
strcpy(bufptr, pPostInfo->GetNZBInfo()->GetFilename());
bufptr += ntohl(pPostQueueAnswer->m_iNZBFilenameLen);
strcpy(bufptr, pPostInfo->GetInfoName());
strcpy(bufptr, pPostInfo->GetNZBInfo()->GetName());
bufptr += ntohl(pPostQueueAnswer->m_iInfoNameLen);
strcpy(bufptr, pPostInfo->GetNZBInfo()->GetDestDir());
bufptr += ntohl(pPostQueueAnswer->m_iDestDirLen);
@@ -869,7 +890,7 @@ void PostQueueBinCommand::Execute()
}
}
g_pQueueCoordinator->UnlockQueue();
DownloadQueue::Unlock();
PostQueueResponse.m_iNrTrailingEntries = htonl(NrEntries);
PostQueueResponse.m_iTrailingDataLength = htonl(bufsize);
@@ -961,12 +982,12 @@ void HistoryBinCommand::Execute()
int bufsize = 0;
// Make a data structure and copy all the elements of the list into it
DownloadQueue* pDownloadQueue = g_pQueueCoordinator->LockQueue();
DownloadQueue* pDownloadQueue = DownloadQueue::Lock();
// calculate required buffer size for nzbs
int iNrEntries = pDownloadQueue->GetHistoryList()->size();
int iNrEntries = pDownloadQueue->GetHistory()->size();
bufsize += iNrEntries * sizeof(SNZBHistoryResponseEntry);
for (HistoryList::iterator it = pDownloadQueue->GetHistoryList()->begin(); it != pDownloadQueue->GetHistoryList()->end(); it++)
for (HistoryList::iterator it = pDownloadQueue->GetHistory()->begin(); it != pDownloadQueue->GetHistory()->end(); it++)
{
HistoryInfo* pHistoryInfo = *it;
char szNicename[1024];
@@ -980,7 +1001,7 @@ void HistoryBinCommand::Execute()
char* bufptr = buf;
// write nzb entries
for (HistoryList::iterator it = pDownloadQueue->GetHistoryList()->begin(); it != pDownloadQueue->GetHistoryList()->end(); it++)
for (HistoryList::iterator it = pDownloadQueue->GetHistory()->begin(); it != pDownloadQueue->GetHistory()->end(); it++)
{
HistoryInfo* pHistoryInfo = *it;
SNZBHistoryResponseEntry* pListAnswer = (SNZBHistoryResponseEntry*) bufptr;
@@ -992,7 +1013,7 @@ void HistoryBinCommand::Execute()
pHistoryInfo->GetName(szNicename, sizeof(szNicename));
pListAnswer->m_iNicenameLen = htonl(strlen(szNicename) + 1);
if (pHistoryInfo->GetKind() == HistoryInfo::hkNZBInfo)
if (pHistoryInfo->GetKind() == HistoryInfo::hkNzb)
{
NZBInfo* pNZBInfo = pHistoryInfo->GetNZBInfo();
unsigned long iSizeHi, iSizeLo;
@@ -1003,10 +1024,10 @@ void HistoryBinCommand::Execute()
pListAnswer->m_iParStatus = htonl(pNZBInfo->GetParStatus());
pListAnswer->m_iScriptStatus = htonl(pNZBInfo->GetScriptStatuses()->CalcTotalStatus());
}
else if (pHistoryInfo->GetKind() == HistoryInfo::hkUrlInfo)
else if (pHistoryInfo->GetKind() == HistoryInfo::hkUrl)
{
UrlInfo* pUrlInfo = pHistoryInfo->GetUrlInfo();
pListAnswer->m_iUrlStatus = htonl(pUrlInfo->GetStatus());
NZBInfo* pNZBInfo = pHistoryInfo->GetNZBInfo();
pListAnswer->m_iUrlStatus = htonl(pNZBInfo->GetUrlStatus());
}
bufptr += sizeof(SNZBHistoryResponseEntry);
@@ -1021,7 +1042,7 @@ void HistoryBinCommand::Execute()
}
}
g_pQueueCoordinator->UnlockQueue();
DownloadQueue::Unlock();
HistoryResponse.m_iNrTrailingEntries = htonl(iNrEntries);
HistoryResponse.m_iTrailingDataLength = htonl(bufsize);
@@ -1056,15 +1077,18 @@ void DownloadUrlBinCommand::Execute()
return;
}
UrlInfo* pUrlInfo = new UrlInfo();
pUrlInfo->SetURL(DownloadUrlRequest.m_szURL);
pUrlInfo->SetNZBFilename(DownloadUrlRequest.m_szNZBFilename);
pUrlInfo->SetCategory(DownloadUrlRequest.m_szCategory);
pUrlInfo->SetPriority(ntohl(DownloadUrlRequest.m_iPriority));
pUrlInfo->SetAddTop(ntohl(DownloadUrlRequest.m_bAddFirst));
pUrlInfo->SetAddPaused(ntohl(DownloadUrlRequest.m_bAddPaused));
NZBInfo* pNZBInfo = new NZBInfo();
pNZBInfo->SetKind(NZBInfo::nkUrl);
pNZBInfo->SetURL(DownloadUrlRequest.m_szURL);
pNZBInfo->SetFilename(DownloadUrlRequest.m_szNZBFilename);
pNZBInfo->SetCategory(DownloadUrlRequest.m_szCategory);
pNZBInfo->SetPriority(ntohl(DownloadUrlRequest.m_iPriority));
pNZBInfo->SetAddUrlPaused(ntohl(DownloadUrlRequest.m_bAddPaused));
g_pUrlCoordinator->AddUrlToQueue(pUrlInfo, ntohl(DownloadUrlRequest.m_bAddFirst));
DownloadQueue* pDownloadQueue = DownloadQueue::Lock();
pDownloadQueue->GetQueue()->Add(pNZBInfo, ntohl(DownloadUrlRequest.m_bAddFirst));
pDownloadQueue->Save();
DownloadQueue::Unlock();
info("Request: Queue url %s", DownloadUrlRequest.m_szURL);
@@ -1073,77 +1097,3 @@ void DownloadUrlBinCommand::Execute()
tmp[1024-1] = '\0';
SendBoolResponse(true, tmp);
}
void UrlQueueBinCommand::Execute()
{
SNZBUrlQueueRequest UrlQueueRequest;
if (!ReceiveRequest(&UrlQueueRequest, sizeof(UrlQueueRequest)))
{
return;
}
SNZBUrlQueueResponse UrlQueueResponse;
memset(&UrlQueueResponse, 0, sizeof(UrlQueueResponse));
UrlQueueResponse.m_MessageBase.m_iSignature = htonl(NZBMESSAGE_SIGNATURE);
UrlQueueResponse.m_MessageBase.m_iStructSize = htonl(sizeof(UrlQueueResponse));
UrlQueueResponse.m_iEntrySize = htonl(sizeof(SNZBUrlQueueResponseEntry));
char* buf = NULL;
int bufsize = 0;
// Make a data structure and copy all the elements of the list into it
UrlQueue* pUrlQueue = g_pQueueCoordinator->LockQueue()->GetUrlQueue();
int NrEntries = pUrlQueue->size();
// calculate required buffer size
bufsize = NrEntries * sizeof(SNZBUrlQueueResponseEntry);
for (UrlQueue::iterator it = pUrlQueue->begin(); it != pUrlQueue->end(); it++)
{
UrlInfo* pUrlInfo = *it;
bufsize += strlen(pUrlInfo->GetURL()) + 1;
bufsize += strlen(pUrlInfo->GetNZBFilename()) + 1;
// align struct to 4-bytes, needed by ARM-processor (and may be others)
bufsize += bufsize % 4 > 0 ? 4 - bufsize % 4 : 0;
}
buf = (char*) malloc(bufsize);
char* bufptr = buf;
for (UrlQueue::iterator it = pUrlQueue->begin(); it != pUrlQueue->end(); it++)
{
UrlInfo* pUrlInfo = *it;
SNZBUrlQueueResponseEntry* pUrlQueueAnswer = (SNZBUrlQueueResponseEntry*) bufptr;
pUrlQueueAnswer->m_iID = htonl(pUrlInfo->GetID());
pUrlQueueAnswer->m_iURLLen = htonl(strlen(pUrlInfo->GetURL()) + 1);
pUrlQueueAnswer->m_iNZBFilenameLen = htonl(strlen(pUrlInfo->GetNZBFilename()) + 1);
bufptr += sizeof(SNZBUrlQueueResponseEntry);
strcpy(bufptr, pUrlInfo->GetURL());
bufptr += ntohl(pUrlQueueAnswer->m_iURLLen);
strcpy(bufptr, pUrlInfo->GetNZBFilename());
bufptr += ntohl(pUrlQueueAnswer->m_iNZBFilenameLen);
// align struct to 4-bytes, needed by ARM-processor (and may be others)
if ((size_t)bufptr % 4 > 0)
{
pUrlQueueAnswer->m_iNZBFilenameLen = htonl(ntohl(pUrlQueueAnswer->m_iNZBFilenameLen) + 4 - (size_t)bufptr % 4);
memset(bufptr, 0, 4 - (size_t)bufptr % 4); //suppress valgrind warning "uninitialized data"
bufptr += 4 - (size_t)bufptr % 4;
}
}
g_pQueueCoordinator->UnlockQueue();
UrlQueueResponse.m_iNrTrailingEntries = htonl(NrEntries);
UrlQueueResponse.m_iTrailingDataLength = htonl(bufsize);
// Send the request answer
m_pConnection->Send((char*) &UrlQueueResponse, sizeof(UrlQueueResponse));
// Send the data
if (bufsize > 0)
{
m_pConnection->Send(buf, bufsize);
}
free(buf);
}

View File

View File

@@ -2,7 +2,7 @@
* This file is part of nzbget
*
* Copyright (C) 2005 Bo Cordes Petersen <placebodk@users.sourceforge.net>
* Copyright (C) 2007-2013 Andrey Prygunkov <hugbug@users.sourceforge.net>
* Copyright (C) 2007-2014 Andrey Prygunkov <hugbug@users.sourceforge.net>
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
@@ -27,7 +27,7 @@
#ifndef MESSAGEBASE_H
#define MESSAGEBASE_H
static const int32_t NZBMESSAGE_SIGNATURE = 0x6E7A621B; // = "nzb-XX" (protocol version)
static const int32_t NZBMESSAGE_SIGNATURE = 0x6E7A6225; // = "nzb-XX" (protocol version)
static const int NZBREQUESTFILENAMESIZE = 512;
static const int NZBREQUESTPASSWORDSIZE = 32;
@@ -62,67 +62,13 @@ enum eRemoteRequest
eRemoteRequestWriteLog,
eRemoteRequestScan,
eRemoteRequestHistory,
eRemoteRequestDownloadUrl,
eRemoteRequestUrlQueue
};
// Possible values for field "m_iAction" of struct "SNZBEditQueueRequest":
// File-Actions affect one file, Group-Actions affect all files in group.
// Group is a list of files, added to queue from one NZB-File.
enum eRemoteEditAction
{
eRemoteEditActionFileMoveOffset = 1, // move files to m_iOffset relative to the current position in download-queue
eRemoteEditActionFileMoveTop, // move files to the top of download-queue
eRemoteEditActionFileMoveBottom, // move files to the bottom of download-queue
eRemoteEditActionFilePause, // pause files
eRemoteEditActionFileResume, // resume (unpause) files
eRemoteEditActionFileDelete, // delete files
eRemoteEditActionFilePauseAllPars, // pause only (all) pars (does not affect other files)
eRemoteEditActionFilePauseExtraPars, // pause only (almost all) pars, except main par-file (does not affect other files)
eRemoteEditActionFileSetPriority, // set priority for files
eRemoteEditActionFileReorder, // (not supported)
eRemoteEditActionFileSplit, // split - create new group from selected files
eRemoteEditActionGroupMoveOffset, // move group to m_iOffset relative to the current position in download-queue
eRemoteEditActionGroupMoveTop, // move group to the top of download-queue
eRemoteEditActionGroupMoveBottom, // move group to the bottom of download-queue
eRemoteEditActionGroupPause, // pause group
eRemoteEditActionGroupResume, // resume (unpause) group
eRemoteEditActionGroupDelete, // delete group
eRemoteEditActionGroupDupeDelete, // delete group
eRemoteEditActionGroupFinalDelete, // delete group
eRemoteEditActionGroupPauseAllPars, // pause only (all) pars (does not affect other files) in group
eRemoteEditActionGroupPauseExtraPars, // pause only (almost all) pars in group, except main par-file (does not affect other files)
eRemoteEditActionGroupSetPriority, // set priority for groups
eRemoteEditActionGroupSetCategory, // set or change category for a group
eRemoteEditActionGroupMerge, // merge group
eRemoteEditActionGroupSetParameter, // set post-process parameter for group
eRemoteEditActionGroupSetName, // set group name (rename group)
eRemoteEditActionGroupSetDupeKey, // (reserved)
eRemoteEditActionGroupSetDupeScore, // (reserved)
eRemoteEditActionGroupSetDupeMode, // (reserved)
eRemoteEditActionPostMoveOffset = 51, // move post-job to m_iOffset relative to the current position in post-queue
eRemoteEditActionPostMoveTop, // move post-job to the top of post-queue
eRemoteEditActionPostMoveBottom, // move post-job to the bottom of post-queue
eRemoteEditActionPostDelete, // delete post-job
eRemoteEditActionHistoryDelete, // hide history-item
eRemoteEditActionHistoryFinalDelete, // delete history-item
eRemoteEditActionHistoryReturn, // move history-item back to download queue
eRemoteEditActionHistoryProcess, // move history-item back to download queue and start postprocessing
eRemoteEditActionHistoryRedownload, // move history-item back to download queue for redownload
eRemoteEditActionHistorySetParameter, // set post-process parameter for history-item
eRemoteEditActionHistorySetDupeKey, // (reserved)
eRemoteEditActionHistorySetDupeScore, // (reserved)
eRemoteEditActionHistorySetDupeMode, // (reserved)
eRemoteEditActionHistorySetDupeBackup, // (reserved)
eRemoteEditActionHistoryMarkBad, // mark history-item as bad (and download other duplicate)
eRemoteEditActionHistoryMarkGood // mark history-item as good (and push it into dup-history)
eRemoteRequestDownloadUrl
};
// Possible values for field "m_iAction" of struct "SNZBPauseUnpauseRequest":
enum eRemotePauseUnpauseAction
{
eRemotePauseUnpauseActionDownload = 1, // pause/unpause download queue
eRemotePauseUnpauseActionDownload2, // pause/unpause download queue (second pause-register)
eRemotePauseUnpauseActionPostProcess, // pause/unpause post-processor queue
eRemotePauseUnpauseActionScan // pause/unpause scan of incoming nzb-directory
};
@@ -218,8 +164,17 @@ struct SNZBListResponse
// A list response nzb entry
struct SNZBListResponseNZBEntry
{
int32_t m_iID; // NZB-ID
int32_t m_iKind; // Item Kind (see NZBInfo::Kind)
int32_t m_iSizeLo; // Size of all files in bytes, Low 32-bits of 64-bit value
int32_t m_iSizeHi; // Size of all files in bytes, High 32-bits of 64-bit value
int32_t m_iRemainingSizeLo; // Size of remaining (unpaused) files in bytes, Low 32-bits of 64-bit value
int32_t m_iRemainingSizeHi; // Size of remaining (unpaused) files in bytes, High 32-bits of 64-bit value
int32_t m_iPausedSizeLo; // Size of npaused files in bytes, Low 32-bits of 64-bit value
int32_t m_iPausedSizeHi; // Size of paused files in bytes, High 32-bits of 64-bit value
int32_t m_iPausedCount; // Number of paused files
int32_t m_iRemainingParCount; // Number of remaining par-files
int32_t m_iPriority; // Download priority
int32_t m_bMatch; // 1 - group matches the pattern (only when Request has eRemoteMatchModeRegEx)
int32_t m_iFilenameLen; // Length of Filename-string (m_szFilename), following to this record
int32_t m_iNameLen; // Length of Name-string (m_szName), following to this record
@@ -254,7 +209,6 @@ struct SNZBListResponseFileEntry
int32_t m_iRemainingSizeHi; // Remaining size in bytes, High 32-bits of 64-bit value
int32_t m_bPaused; // 1 - file is paused
int32_t m_bFilenameConfirmed; // 1 - Filename confirmed (read from article body), 0 - Filename parsed from subject (can be changed after reading of article)
int32_t m_iPriority; // Download priority
int32_t m_iActiveDownloads; // Number of active downloads for this file
int32_t m_bMatch; // 1 - file matches the pattern (only when Request has eRemoteMatchModeRegEx)
int32_t m_iSubjectLen; // Length of Subject-string (m_szSubject), following to this record
@@ -328,10 +282,8 @@ struct SNZBSetDownloadRateResponse
struct SNZBEditQueueRequest
{
SNZBRequestBase m_MessageBase; // Must be the first in the struct
int32_t m_iAction; // Action to be executed, see enum eRemoteEditAction
int32_t m_iAction; // Action to be executed, see enum DownloadQueue::EEditAction
int32_t m_iOffset; // Offset to move (for m_iAction = 0)
int32_t m_bSmartOrder; // For Move-Actions: 0 - execute action for each ID in order they are placed in array;
// 1 - smart execute to ensure that the relative order of all affected IDs are not changed.
int32_t m_iMatchMode; // File/Group match mode, see enum eRemoteMatchMode
int32_t m_iNrTrailingIDEntries; // Number of ID-entries, following to this structure
int32_t m_iNrTrailingNameEntries; // Number of Name-entries, following to this structure
@@ -511,7 +463,7 @@ struct SNZBHistoryResponseEntry
int32_t m_iParStatus; // See NZBInfo::EParStatus
int32_t m_iScriptStatus; // See NZBInfo::EScriptStatus
// for URL items (m_iKind = 2)
int32_t m_iUrlStatus; // See UrlInfo::EStatus
int32_t m_iUrlStatus; // See NZBInfo::EUrlStatus
// trailing data
//char m_szNicename[m_iNicenameLen]; // variable sized
};
@@ -537,30 +489,4 @@ struct SNZBDownloadUrlResponse
//char m_szText[m_iTrailingDataLength]; // variable sized
};
// UrlQueue request
struct SNZBUrlQueueRequest
{
SNZBRequestBase m_MessageBase; // Must be the first in the struct
};
// UrlQueue response
struct SNZBUrlQueueResponse
{
SNZBResponseBase m_MessageBase; // Must be the first in the struct
int32_t m_iEntrySize; // Size of the SNZBUrlQueueResponseEntry-struct
int32_t m_iNrTrailingEntries; // Number of UrlQueue-entries, following to this structure
int32_t m_iTrailingDataLength; // Length of all UrlQueue-entries, following to this structure
// SNZBUrlQueueResponseEntry m_Entries[m_iNrTrailingEntries] // variable sized
};
// UrlQueue response entry
struct SNZBUrlQueueResponseEntry
{
int32_t m_iID; // ID of Url-entry
int32_t m_iURLLen; // Length of URL-string (m_szURL), following to this record
int32_t m_iNZBFilenameLen; // Length of NZBFilename-string (m_szNZBFilename), following to this record
//char m_szURL[m_iURLLen]; // variable sized
//char m_szNZBFilename[m_iNZBFilenameLen]; // variable sized
};
#endif

Some files were not shown because too many files have changed in this diff Show More