Improved archive management for subscription downloads

Downloads that fail due to existing in the archive now appears as an error in the manager

Fixed issue where redownloading sub videos wouldn't occur if it was not cleared from the download manager
This commit is contained in:
Isaac Abadi
2021-09-25 22:33:22 -06:00
parent db53a12635
commit 5dd48035fb
2 changed files with 21 additions and 15 deletions

View File

@@ -279,7 +279,9 @@ async function downloadQueuedFile(download_uid) {
} else if (output) { } else if (output) {
if (output.length === 0 || output[0].length === 0) { if (output.length === 0 || output[0].length === 0) {
// ERROR! // ERROR!
logger.warn(`No output received for video download, check if it exists in your archive.`) const error_message = `No output received for video download, check if it exists in your archive.`;
await handleDownloadError(download_uid, error_message);
logger.warn(error_message);
resolve(false); resolve(false);
return; return;
} }
@@ -344,9 +346,10 @@ async function downloadQueuedFile(download_uid) {
} }
if (options.merged_string !== null && options.merged_string !== undefined) { if (options.merged_string !== null && options.merged_string !== undefined) {
let current_merged_archive = fs.readFileSync(path.join(fileFolderPath, `merged_${type}.txt`), 'utf8'); const archive_folder = getArchiveFolder(fileFolderPath, options, download['user_uid']);
let diff = current_merged_archive.replace(options.merged_string, ''); const current_merged_archive = fs.readFileSync(path.join(archive_folder, `merged_${type}.txt`), 'utf8');
const archive_path = download['user_uid'] ? path.join(fileFolderPath, 'archives', `archive_${type}.txt`) : path.join(archivePath, `archive_${type}.txt`); const diff = current_merged_archive.replace(options.merged_string, '');
const archive_path = path.join(archive_folder, `archive_${type}.txt`);
fs.appendFileSync(archive_path, diff); fs.appendFileSync(archive_path, diff);
} }
@@ -455,23 +458,16 @@ exports.generateArgs = async (url, type, options, user_uid = null, simulated = f
let useYoutubeDLArchive = config_api.getConfigItem('ytdl_use_youtubedl_archive'); let useYoutubeDLArchive = config_api.getConfigItem('ytdl_use_youtubedl_archive');
if (useYoutubeDLArchive) { if (useYoutubeDLArchive) {
let archive_folder = null; const archive_folder = getArchiveFolder(fileFolderPath, options, user_uid);
if (options.customArchivePath) {
archive_folder = path.join(options.customArchivePath);
} else if (user_uid) {
archive_folder = path.join(fileFolderPath, 'archives');
} else {
archive_folder = path.join(archivePath);
}
const archive_path = path.join(archive_folder, `archive_${type}.txt`); const archive_path = path.join(archive_folder, `archive_${type}.txt`);
await fs.ensureDir(archive_folder); await fs.ensureDir(archive_folder);
await fs.ensureFile(archive_path); await fs.ensureFile(archive_path);
let blacklist_path = path.join(archive_folder, `blacklist_${type}.txt`); const blacklist_path = path.join(archive_folder, `blacklist_${type}.txt`);
await fs.ensureFile(blacklist_path); await fs.ensureFile(blacklist_path);
let merged_path = path.join(fileFolderPath, `merged_${type}.txt`); const merged_path = path.join(archive_folder, `merged_${type}.txt`);
await fs.ensureFile(merged_path); await fs.ensureFile(merged_path);
// merges blacklist and regular archive // merges blacklist and regular archive
let inputPathList = [archive_path, blacklist_path]; let inputPathList = [archive_path, blacklist_path];
@@ -624,3 +620,13 @@ exports.generateNFOFile = (info, output_path) => {
const xml = doc.end({ prettyPrint: true }); const xml = doc.end({ prettyPrint: true });
fs.writeFileSync(output_path, xml); fs.writeFileSync(output_path, xml);
} }
function getArchiveFolder(fileFolderPath, options, user_uid) {
if (options.customArchivePath) {
return path.join(options.customArchivePath);
} else if (user_uid) {
return path.join(fileFolderPath, 'archives');
} else {
return path.join(archivePath);
}
}

View File

@@ -431,7 +431,7 @@ async function getFilesToDownload(sub, output_jsons) {
const files_to_download = []; const files_to_download = [];
for (let i = 0; i < output_jsons.length; i++) { for (let i = 0; i < output_jsons.length; i++) {
const output_json = output_jsons[i]; const output_json = output_jsons[i];
const file_missing = !(await db_api.getRecord('files', {sub_id: sub.id, url: output_json['webpage_url']})) && !(await db_api.getRecord('download_queue', {sub_id: sub.id, url: output_json['webpage_url'], error: null})); const file_missing = !(await db_api.getRecord('files', {sub_id: sub.id, url: output_json['webpage_url']})) && !(await db_api.getRecord('download_queue', {sub_id: sub.id, url: output_json['webpage_url'], error: null, finished: false}));
if (file_missing) { if (file_missing) {
const file_with_path_exists = await db_api.getRecord('files', {sub_id: sub.id, path: output_json['_filename']}); const file_with_path_exists = await db_api.getRecord('files', {sub_id: sub.id, path: output_json['_filename']});
if (file_with_path_exists) { if (file_with_path_exists) {