aboutsummaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
authordec05eba <dec05eba@protonmail.com>2024-07-16 20:27:53 +0200
committerdec05eba <dec05eba@protonmail.com>2024-07-16 20:27:53 +0200
commitcfc9ef1cade470f6d2a7ba67c625eb8c11ff2743 (patch)
treecab15e6b04b8773d98ed5165ab3efca3775e5f4f /src
parentb3ef5d250c3cc4e35e82812bfd29fb19eb9bba83 (diff)
Add fallback urls for rss (right now, fallback nyaa.si to nyaa.land)
Diffstat (limited to 'src')
-rw-r--r--src/buffer.h2
-rw-r--r--src/download.c75
-rw-r--r--src/download.h8
-rw-r--r--src/fallback.c193
-rw-r--r--src/fallback.h37
-rw-r--r--src/html.c7
-rw-r--r--src/html.h1
-rw-r--r--src/main.c47
-rw-r--r--src/rss.c32
-rw-r--r--src/rss.h5
-rw-r--r--src/rss_html_common.c19
-rw-r--r--src/rss_html_common.h6
12 files changed, 378 insertions, 54 deletions
diff --git a/src/buffer.h b/src/buffer.h
index f95e874..2a67fe0 100644
--- a/src/buffer.h
+++ b/src/buffer.h
@@ -4,7 +4,7 @@
#include <stddef.h>
/*
- TODO: Optimize small size buffers by using data and size members (16 bytes on x86)
+ TODO: Optimize small size buffers by using data and size members (16 bytes on x86-64)
instead of heap allocation
*/
diff --git a/src/download.c b/src/download.c
index 4154dcb..3b3692c 100644
--- a/src/download.c
+++ b/src/download.c
@@ -1,32 +1,91 @@
#include "download.h"
#include "buffer.h"
#include "program.h"
+#include "fallback.h"
+#include <stdio.h>
+
+int download_to_buffer(const char *url, Buffer *buffer, fallback *fall) {
+ char new_url[2048];
+ snprintf(new_url, sizeof(new_url), "%s", url);
+
+ fallback_item *fall_item = NULL;
+ if(fall) {
+ fall_item = fallback_get_from_url(fall, url);
+ if(fall_item && fall_item->source_to_use)
+ fallback_item_replace_url_with_fallback_url(fall_item, url, new_url, sizeof(new_url));
+ }
-int download_to_buffer(const char *url, Buffer *buffer) {
const char *args[] = {
"curl", "-s", "-L", "-f",
"-H", "user-agent: Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/81.0.4044.138 Safari/537.36",
"-H", "Accept-Language: en-US,en;q=0.5",
"--compressed",
"--",
- url,
+ new_url,
NULL
};
+
int result = program_exec(args, program_buffer_write_callback, buffer);
- if(result != 0)
+ if(result == 0) {
+ buffer_append(buffer, "\0", 1);
+ return 0;
+ }
+
+ if(!fall || !fall_item || fall_item->source_to_use)
return result;
- buffer_append(buffer, "\0", 1);
- return result;
+
+ const char **fallbacks_it = buffer_begin(&fall_item->fallbacks);
+ const char **fallbacks_end = buffer_end(&fall_item->fallbacks);
+ for(; fallbacks_it != fallbacks_end; ++fallbacks_it) {
+ buffer_clear(buffer);
+ fallback_replace_url_with_fallback_url(url, *fallbacks_it, new_url, sizeof(new_url));
+ if(download_to_buffer(new_url, buffer, NULL) == 0) {
+ fprintf(stderr, "Download failed for url %s, replacing domain with %s for this sync session\n", url, *fallbacks_it);
+ fall_item->source_to_use = *fallbacks_it;
+ return 0;
+ }
+ }
+
+ return -1;
}
-int is_header_response_ok(const char *url) {
+int is_header_response_ok(const char *url, fallback *fall) {
+ char new_url[2048];
+ snprintf(new_url, sizeof(new_url), "%s", url);
+
+ fallback_item *fall_item = NULL;
+ if(fall) {
+ fall_item = fallback_get_from_url(fall, url);
+ if(fall_item && fall_item->source_to_use)
+ fallback_item_replace_url_with_fallback_url(fall_item, url, new_url, sizeof(new_url));
+ }
+
const char *args[] = {
"curl", "-s", "-L", "-f", "-I",
"-H", "user-agent: Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/81.0.4044.138 Safari/537.36",
"-H", "Accept-Language: en-US,en;q=0.5",
"--",
- url,
+ new_url,
NULL
};
- return program_exec(args, NULL, NULL);
+
+ int result = program_exec(args, NULL, NULL);
+ if(result == 0)
+ return 0;
+
+ if(!fall || !fall_item || fall_item->source_to_use)
+ return result;
+
+ const char **fallbacks_it = buffer_begin(&fall_item->fallbacks);
+ const char **fallbacks_end = buffer_end(&fall_item->fallbacks);
+ for(; fallbacks_it != fallbacks_end; ++fallbacks_it) {
+ fallback_replace_url_with_fallback_url(url, *fallbacks_it, new_url, sizeof(new_url));
+ if(is_header_response_ok(new_url, NULL) == 0) {
+ fprintf(stderr, "Download failed for url %s, replacing domain with %s for this sync session\n", url, *fallbacks_it);
+ fall_item->source_to_use = *fallbacks_it;
+ return 0;
+ }
+ }
+
+ return -1;
}
diff --git a/src/download.h b/src/download.h
index 3c7f0aa..ff188cc 100644
--- a/src/download.h
+++ b/src/download.h
@@ -2,7 +2,11 @@
#define DOWNLOAD_H
struct Buffer;
-int download_to_buffer(const char *url, struct Buffer *buffer);
-int is_header_response_ok(const char *url);
+typedef struct fallback fallback;
+
+/* |fall| can be NULL */
+int download_to_buffer(const char *url, struct Buffer *buffer, fallback *fall);
+/* |fall| can be NULL */
+int is_header_response_ok(const char *url, fallback *fall);
#endif
diff --git a/src/fallback.c b/src/fallback.c
new file mode 100644
index 0000000..058b23a
--- /dev/null
+++ b/src/fallback.c
@@ -0,0 +1,193 @@
+#include "fallback.h"
+#include "fileutils.h"
+#include "../depends/cJSON.h"
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+
+bool fallback_load_from_file(fallback *self, const char *filepath) {
+ memset(self, 0, sizeof(*self));
+
+ char *file_data;
+ long file_size;
+ if(file_get_content(filepath, &file_data, &file_size) != 0) {
+ fprintf(stderr, "Error: fallback_load_from_file: failed to read %s\n", filepath);
+ return false;
+ }
+
+ cJSON *json_root = cJSON_ParseWithLength(file_data, file_size);
+ if(!json_root) {
+ fprintf(stderr, "Error: fallback_load_from_file: failed to parse file %s as json\n", filepath);
+ goto error;
+ }
+ free(file_data);
+ file_data = NULL;
+
+ if(!cJSON_IsArray(json_root)) {
+ fprintf(stderr, "File %s contains malformed json. Expected json root element to be an array\n", filepath);
+ goto error;
+ }
+
+ buffer_init(&self->fallbacks);
+
+ const cJSON *fallback_item_json = NULL;
+ cJSON_ArrayForEach(fallback_item_json, json_root) {
+ if(!cJSON_IsObject(fallback_item_json))
+ continue;
+
+ const cJSON *source_json = cJSON_GetObjectItemCaseSensitive(fallback_item_json, "source");
+ if(!cJSON_IsString(source_json))
+ continue;
+
+ const cJSON *fallbacks_json = cJSON_GetObjectItemCaseSensitive(fallback_item_json, "fallbacks");
+ if(!cJSON_IsArray(fallbacks_json))
+ continue;
+
+ char *source_str = strdup(source_json->valuestring);
+ if(!source_str) {
+ fprintf(stderr, "Error: failed to clone string: %s\n", source_str);
+ abort();
+ }
+
+ fallback_item item;
+ item.source = source_str;
+ item.source_to_use = NULL;
+ buffer_init(&item.fallbacks);
+
+ const cJSON *fallback_str_json = NULL;
+ cJSON_ArrayForEach(fallback_str_json, fallbacks_json) {
+ if(!cJSON_IsString(fallback_str_json))
+ continue;
+
+ char *fallback = strdup(fallback_str_json->valuestring);
+ if(!fallback) {
+ fprintf(stderr, "Error: failed to clone string: %s\n", fallback);
+ abort();
+ }
+
+ buffer_append(&item.fallbacks, &fallback, sizeof(fallback));
+ }
+
+ buffer_append(&self->fallbacks, &item, sizeof(item));
+ }
+
+ cJSON_Delete(json_root);
+ free(file_data);
+ return true;
+
+ error:
+ cJSON_Delete(json_root);
+ free(file_data);
+ return false;
+}
+
+void fallback_deinit(fallback *self) {
+ fallback_item *items_it = buffer_begin(&self->fallbacks);
+ fallback_item *items_end = buffer_end(&self->fallbacks);
+ for(; items_it != items_end; ++items_it) {
+ if(items_it->source) {
+ free(items_it->source);
+ items_it->source = NULL;
+ }
+
+ char **fallbacks_it = buffer_begin(&items_it->fallbacks);
+ char **fallbacks_end = buffer_end(&items_it->fallbacks);
+ for(; fallbacks_it != fallbacks_end; ++fallbacks_it) {
+ if(*fallbacks_it) {
+ free(*fallbacks_it);
+ *fallbacks_it = NULL;
+ }
+ }
+ buffer_deinit(&items_it->fallbacks);
+ }
+ buffer_deinit(&self->fallbacks);
+}
+
+static void url_extract_domain(const char **url, int *len) {
+ if(*len >= 7 && strncmp("http://", *url, 7) == 0) {
+ *url += 7;
+ *len -= 7;
+ } else if(*len >= 8 && strncmp("https://", *url, 8) == 0) {
+ *url += 8;
+ *len -= 8;
+ }
+
+ const char *end = strchr(*url, '/');
+ if(end)
+ *len = end - *url;
+}
+
+fallback_item* fallback_get_from_url(fallback *self, const char *url) {
+ int url_len = strlen(url);
+ url_extract_domain(&url, &url_len);
+
+ fallback_item *items_it = buffer_begin(&self->fallbacks);
+ fallback_item *items_end = buffer_end(&self->fallbacks);
+ for(; items_it != items_end; ++items_it) {
+ int source_len = strlen(items_it->source);
+ const char *source = items_it->source;
+ url_extract_domain(&source, &source_len);
+
+ if(url_len == source_len && memcmp(url, source, url_len) == 0)
+ return items_it;
+
+ if(!items_it->source_to_use)
+ continue;
+
+ int source_to_use_len = strlen(items_it->source_to_use);
+ const char *source_to_use = items_it->source_to_use;
+ url_extract_domain(&source_to_use, &source_to_use_len);
+
+ if(url_len == source_to_use_len && memcmp(url, source_to_use, url_len) == 0)
+ return items_it;
+ }
+
+ return NULL;
+}
+
+void fallback_clear_sources_to_use(fallback *self) {
+ fallback_item *items_it = buffer_begin(&self->fallbacks);
+ fallback_item *items_end = buffer_end(&self->fallbacks);
+ for(; items_it != items_end; ++items_it) {
+ items_it->source_to_use = NULL;
+ }
+}
+
+static const char* get_url_part_after_domain(const char *url) {
+ int len = strlen(url);
+ if(len >= 7 && strncmp(url, "http://", 7) == 0) {
+ url += 7;
+ len -= 7;
+ } else if(len >= 8 && strncmp(url, "https://", 8) == 0) {
+ url += 8;
+ len -= 8;
+ }
+
+ const char *after_domain = strchr(url, '/');
+ if(after_domain)
+ return after_domain;
+ else
+ return url + len;
+}
+
+void fallback_replace_url_with_fallback_url(const char *url, const char *fallback_url, char *new_url, size_t new_url_len) {
+ const char *url_part_after_domain = get_url_part_after_domain(url);
+ snprintf(new_url, new_url_len, "%s%s", fallback_url, url_part_after_domain);
+}
+
+void fallback_replace_active_fallback_url_with_source_url(fallback *self, const char *url, char *new_url, size_t new_url_len) {
+ fallback_item *fall_item = fallback_get_from_url(self, url);
+ if(!fall_item || !fall_item->source_to_use) {
+ snprintf(new_url, new_url_len, "%s", url);
+ return;
+ }
+ fallback_replace_url_with_fallback_url(url, fall_item->source, new_url, new_url_len);
+}
+
+void fallback_item_replace_url_with_fallback_url(fallback_item *self, const char *url, char *new_url, size_t new_url_len) {
+ if(!self->source_to_use) {
+ snprintf(new_url, new_url_len, "%s", url);
+ return;
+ }
+ fallback_replace_url_with_fallback_url(url, self->source_to_use, new_url, new_url_len);
+}
diff --git a/src/fallback.h b/src/fallback.h
new file mode 100644
index 0000000..cfca233
--- /dev/null
+++ b/src/fallback.h
@@ -0,0 +1,37 @@
+#ifndef FALLBACK_H
+#define FALLBACK_H
+
+/*
+ Fallback reads the fallback.json to get fallbacks for downloads (currently only for rss).
+ If the regular download fails then the download is retried with fallbacks until one succeeds.
+ That fallback is then used for all downloads for the current sync for that domain.
+*/
+
+#include "buffer.h"
+#include <stdbool.h>
+
+typedef struct {
+ char *source;
+ /* If this is NULL (default) then |source| is used. This is set to the first fallback that succeeds if |source| fails and is reset to NULL the next sync. This is a reference. */
+ const char *source_to_use;
+ /* list of char* (malloced) */
+ Buffer fallbacks;
+} fallback_item;
+
+typedef struct fallback fallback;
+struct fallback {
+ /* list of fallback_item */
+ Buffer fallbacks;
+};
+
+bool fallback_load_from_file(fallback *self, const char *filepath);
+void fallback_deinit(fallback *self);
+
+/* Given an url, look for a fallback item that has a matching source or source_to_use (matching by host). If no match found, return NULL */
+fallback_item* fallback_get_from_url(fallback *self, const char *url);
+void fallback_clear_sources_to_use(fallback *self);
+void fallback_replace_url_with_fallback_url(const char *url, const char *fallback_url, char *new_url, size_t new_url_len);
+void fallback_replace_active_fallback_url_with_source_url(fallback *self, const char *url, char *new_url, size_t new_url_len);
+void fallback_item_replace_url_with_fallback_url(fallback_item *self, const char *url, char *new_url, size_t new_url_len);
+
+#endif /* FALLBACK_H */
diff --git a/src/html.c b/src/html.c
index a83d33f..a0a5e73 100644
--- a/src/html.c
+++ b/src/html.c
@@ -364,7 +364,7 @@ int add_html(const char *name, const char *url, char *html_config_dir, char *pro
}
size_t num_download_items = download_items_start ? (((DownloadItemsData*)buffer_end(&download_items_buffer)) - download_items_start) : 0;
- result = write_plugin_json_to_file(html_tracked_dir, "data", url, updated, download_items_start, num_download_items, plugin_name);
+ result = write_plugin_json_to_file(html_tracked_dir, "data", url, updated, download_items_start, num_download_items, plugin_name, NULL);
if(result != 0) {
fprintf(stderr, "Failed to create %s/data\n", html_tracked_dir);
remove_recursive(html_tracked_dir);
@@ -426,7 +426,7 @@ static int download_html_items_in_reverse(const char *plugin_filepath, Buffer *d
if(result != 0)
fprintf(stderr, "Failed while downloading html, url: %s\n", download_items_it->link);
- const char *notify_args[] = { "notify-send", "-a", "automedia", "-u", result == 0 ? "low" : "critical", "-t", "3000", "--", result == 0 ? "Download finished" : "Download failed", notify_msg, NULL };
+ const char *notify_args[] = { "notify-send", "-a", "automedia", "-u", result == 0 ? "low" : "critical", "-t", "5000", "--", result == 0 ? "Download finished" : "Download failed", notify_msg, NULL };
program_exec(notify_args, NULL, NULL);
if(result != 0)
@@ -447,9 +447,8 @@ static int download_html_items_in_reverse(const char *plugin_filepath, Buffer *d
TrackedItem tracked_item;
tracked_item.title = tracked_html->title;
- tracked_item.link = tracked_html->link;
tracked_item.json_data = tracked_html->json_data;
- result = tracked_item_update_latest(&tracked_item, html_tracked_dir, added_download_items, NULL, timestamps, download_item_index);
+ result = tracked_item_update_latest(&tracked_item, html_tracked_dir, added_download_items, NULL, timestamps, download_item_index, NULL);
buffer_deinit(&json_element_buffer);
return result;
diff --git a/src/html.h b/src/html.h
index b604d48..5ce2809 100644
--- a/src/html.h
+++ b/src/html.h
@@ -2,6 +2,7 @@
#define HTML_H
typedef struct cJSON cJSON;
+typedef struct fallback fallback;
typedef struct {
char *plugin;
diff --git a/src/main.c b/src/main.c
index 2669545..de7bebe 100644
--- a/src/main.c
+++ b/src/main.c
@@ -10,6 +10,7 @@
#include "rss_html_common.h"
#include "html.h"
#include "track_remove_parser.h"
+#include "fallback.h"
#include "../depends/cJSON.h"
#include "alloc.h"
@@ -322,6 +323,12 @@ static void command_add(int argc, char **argv, char *rss_config_dir, char *html_
media_url = strip(media_url);
+ fallback fall;
+ if(!fallback_load_from_file(&fall, "/usr/share/automedia/fallback.json")) {
+ fprintf(stderr, "Error: command_add: failed to load fallbacks file (/usr/share/automedia/fallback.json)\n");
+ exit(1);
+ }
+
if(strcmp(media_type, "rss") == 0) {
int res = create_directory_recursive(rss_config_dir);
if(res != 0) {
@@ -329,7 +336,7 @@ static void command_add(int argc, char **argv, char *rss_config_dir, char *html_
exit(1);
}
- if(add_rss(media_name, media_url, rss_config_dir, start_after) != 0)
+ if(add_rss(media_name, media_url, rss_config_dir, start_after, &fall) != 0)
exit(1);
} else if(strcmp(media_type, "html") == 0) {
int res = create_directory_recursive(html_config_dir);
@@ -344,6 +351,8 @@ static void command_add(int argc, char **argv, char *rss_config_dir, char *html_
fprintf(stderr, "type should be either rss or html\n");
usage_add();
}
+
+ fallback_deinit(&fall);
}
sig_atomic_t automedia_running = 0;
@@ -357,8 +366,8 @@ int is_program_running() {
}
/* plugin is NULL for rss */
-typedef int (*IterateTrackedItemCallback)(char *title, char *link, char *plugin, char *config_dir, cJSON *json_data, void *userdata);
-static void iterate_tracked_items(char *config_dir, IterateTrackedItemCallback iterate_callback, void *userdata) {
+typedef int (*IterateTrackedItemCallback)(char *title, char *link, char *plugin, char *config_dir, cJSON *json_data, fallback *fall, void *userdata);
+static void iterate_tracked_items(char *config_dir, IterateTrackedItemCallback iterate_callback, fallback *fall, void *userdata) {
/*
TODO: Only iterate updated items. To make that work, sync_rss and sync_html need to be updated to update
the json_object with new downloaded items (and not only temporary downloaded items) and then also only
@@ -431,7 +440,7 @@ static void iterate_tracked_items(char *config_dir, IterateTrackedItemCallback i
free(data_file_content);
data_file_content = NULL;
- if(iterate_callback(dir->d_name, link_file_content, plugin_file_content, config_dir, json_data, userdata) != 0)
+ if(iterate_callback(dir->d_name, link_file_content, plugin_file_content, config_dir, json_data, fall, userdata) != 0)
fprintf(stderr, "Failed to sync %s\n", dir->d_name);
cleanup_item:
@@ -444,14 +453,14 @@ static void iterate_tracked_items(char *config_dir, IterateTrackedItemCallback i
closedir(d);
}
-static int iterate_tracked_item_rss_callback(char *title, char *link, char *plugin, char *config_dir, cJSON *json_data, void *userdata) {
+static int iterate_tracked_item_rss_callback(char *title, char *link, char *plugin, char *config_dir, cJSON *json_data, fallback *fall, void *userdata) {
(void)plugin;
TransmissionSession *transmission_session = userdata;
TrackedRss tracked_rss;
tracked_rss.title = title;
tracked_rss.link = link;
tracked_rss.json_data = json_data;
- return sync_rss(&tracked_rss, transmission_session, config_dir);
+ return sync_rss(&tracked_rss, transmission_session, config_dir, fall);
}
typedef struct {
@@ -459,7 +468,8 @@ typedef struct {
const char *download_dir;
} IterateHtmlItemUserdata;
-static int iterate_tracked_item_html_callback(char *title, char *link, char *plugin, char *config_dir, cJSON *json_data, void *userdata) {
+static int iterate_tracked_item_html_callback(char *title, char *link, char *plugin, char *config_dir, cJSON *json_data, fallback *fall, void *userdata) {
+ (void)fall;
if(!plugin) {
fprintf(stderr, "Missing plugin name for html item: %s\n", title);
return -1;
@@ -474,15 +484,15 @@ static int iterate_tracked_item_html_callback(char *title, char *link, char *plu
return sync_html(&tracked_html, iterate_html_item_userdata->program_dir, iterate_html_item_userdata->download_dir, config_dir);
}
-static void sync_tracked_rss(TransmissionSession *transmission_session, char *rss_config_dir) {
- iterate_tracked_items(rss_config_dir, iterate_tracked_item_rss_callback, transmission_session);
+static void sync_tracked_rss(TransmissionSession *transmission_session, char *rss_config_dir, fallback *fall) {
+ iterate_tracked_items(rss_config_dir, iterate_tracked_item_rss_callback, fall, transmission_session);
}
static void sync_tracked_html(char *html_config_dir, char *program_dir, const char *download_dir) {
IterateHtmlItemUserdata iterate_html_item_userdata;
iterate_html_item_userdata.program_dir = program_dir;
iterate_html_item_userdata.download_dir = download_dir;
- iterate_tracked_items(html_config_dir, iterate_tracked_item_html_callback, &iterate_html_item_userdata);
+ iterate_tracked_items(html_config_dir, iterate_tracked_item_html_callback, NULL, &iterate_html_item_userdata);
}
typedef struct {
@@ -511,7 +521,7 @@ static void torrent_list_check_new_downloads_callback(int id, const char *name,
if(is_finished) {
if(id < unfinished_torrents->size && unfinished_torrents->items[id] == 1) {
unfinished_torrents->items[id] = 0;
- const char *notify_args[] = { "notify-send", "-u", "low", "-a", "automedia", "-t", "3000", "--", "Download finished", name, NULL };
+ const char *notify_args[] = { "notify-send", "-u", "low", "-a", "automedia", "-t", "5000", "--", "Download finished", name, NULL };
program_exec(notify_args, NULL, NULL);
char item_path[PATH_MAX];
@@ -533,7 +543,7 @@ static void torrent_list_check_new_downloads_callback(int id, const char *name,
}
}
-static void sync_rss_html(char *rss_config_dir, char *html_config_dir, char *program_dir, const char *download_dir, int sync_rate_sec) {
+static void sync_rss_html(char *rss_config_dir, char *html_config_dir, char *program_dir, const char *download_dir, int sync_rate_sec, fallback *fall) {
if(transmission_is_daemon_running() != 0) {
if(transmission_start_daemon(download_dir) != 0) {
fprintf(stderr, "Failed to start torrent daemon\n");
@@ -560,7 +570,7 @@ static void sync_rss_html(char *rss_config_dir, char *html_config_dir, char *pro
transmission_list_torrents(&transmission_session, torrent_list_check_new_downloads_callback, &unfinished_torrents);
/* running is set to 0 in SIGINT signal handler (ctrl+c) */
while(automedia_running) {
- sync_tracked_rss(&transmission_session, rss_config_dir);
+ sync_tracked_rss(&transmission_session, rss_config_dir, fall);
sync_tracked_html(html_config_dir, program_dir, download_dir);
fprintf(stderr, "Finished syncing rss and html. Syncing again in %d minutes\n", sync_rate_sec / 60);
@@ -570,6 +580,8 @@ static void sync_rss_html(char *rss_config_dir, char *html_config_dir, char *pro
sleep(check_torrent_status_rate_sec);
++check_count;
}
+
+ fallback_clear_sources_to_use(fall);
}
free(unfinished_torrents.items);
@@ -627,6 +639,12 @@ static void command_sync(int argc, char **argv, char *rss_config_dir, char *html
fprintf(stderr, "Failed to create download directory %s\n", download_dir);
exit(1);
}
+
+ fallback fall;
+ if(!fallback_load_from_file(&fall, "/usr/share/automedia/fallback.json")) {
+ fprintf(stderr, "Error: command_sync: failed to load fallbacks file (/usr/share/automedia/fallback.json)\n");
+ exit(1);
+ }
/* Create a pid file because we only want to allow one instance of automedia to run at once */
int pid_file = open(automedia_pid_path, O_CREAT | O_EXCL | O_SYNC | O_RDWR, 0666);
@@ -677,8 +695,9 @@ static void command_sync(int argc, char **argv, char *rss_config_dir, char *html
close(pid_file);
const int sync_rate_sec = 15 * 60; /* every 15 min */
- sync_rss_html(rss_config_dir, html_config_dir, program_dir, download_dir, sync_rate_sec);
+ sync_rss_html(rss_config_dir, html_config_dir, program_dir, download_dir, sync_rate_sec, &fall);
unlink(automedia_pid_path);
+ fallback_deinit(&fall);
}
static void command_downloaded(int argc, char **argv, const char *rss_config_dir, const char *html_config_dir) {
diff --git a/src/rss.c b/src/rss.c
index c8bdd2d..780deee 100644
--- a/src/rss.c
+++ b/src/rss.c
@@ -220,7 +220,7 @@ static void url_escape(const char *str, char *output) {
}
}
-static int get_rss_url_from_episode_info(const char *episode_name, EpisodeInfo *episode_info, char *rss_url) {
+static int get_rss_url_from_episode_info(const char *episode_name, EpisodeInfo *episode_info, char *rss_url, fallback *fall) {
char *selected_submitter = NULL;
char response[512];
char group_name_escaped[1536];
@@ -247,7 +247,7 @@ static int get_rss_url_from_episode_info(const char *episode_name, EpisodeInfo *
return -1;
}
- if(is_header_response_ok(url) == 0) {
+ if(is_header_response_ok(url, fall) == 0) {
selected_submitter = response_str;
break;
} else {
@@ -296,13 +296,17 @@ static int get_rss_url_from_episode_info(const char *episode_name, EpisodeInfo *
return 0;
}
-int add_rss(const char *name, char *url, char *rss_config_dir, const char *start_after) {
+int add_rss(const char *name, char *url, char *rss_config_dir, const char *start_after, fallback *fall) {
int result = 0;
char rss_url[4096];
Buffer buffer;
buffer_init(&buffer);
- result = download_to_buffer(url, &buffer);
+
+ Buffer download_items_buffer;
+ buffer_init(&download_items_buffer);
+
+ result = download_to_buffer(url, &buffer, fall);
if(result != 0) {
EpisodeInfo episode_info;
if(episode_info_create_from_episode_name(&episode_info, url) != 0) {
@@ -310,7 +314,7 @@ int add_rss(const char *name, char *url, char *rss_config_dir, const char *start
goto cleanup;
}
- if(get_rss_url_from_episode_info(url, &episode_info, rss_url) != 0)
+ if(get_rss_url_from_episode_info(url, &episode_info, rss_url, fall) != 0)
goto cleanup;
/* User didn't want to track rss */
@@ -325,16 +329,13 @@ int add_rss(const char *name, char *url, char *rss_config_dir, const char *start
url = rss_url;
buffer_clear(&buffer);
- result = download_to_buffer(url, &buffer);
+ result = download_to_buffer(url, &buffer, fall);
if(result != 0) {
fprintf(stderr, "Failed to download rss: %s\n", url);
goto cleanup;
}
}
- Buffer download_items_buffer;
- buffer_init(&download_items_buffer);
-
char *rss_title = NULL;
result = parse_rss(buffer.data, &rss_title, rss_parse_add_callback, &download_items_buffer);
if(result != 0) {
@@ -427,7 +428,7 @@ int add_rss(const char *name, char *url, char *rss_config_dir, const char *start
}
size_t num_download_items = download_items_start ? (((DownloadItemsData*)buffer_end(&download_items_buffer)) - download_items_start) : 0;
- result = write_plugin_json_to_file(rss_tracked_dir, "data", url, updated, download_items_start, num_download_items, NULL);
+ result = write_plugin_json_to_file(rss_tracked_dir, "data", url, updated, download_items_start, num_download_items, NULL, fall);
if(result != 0) {
fprintf(stderr, "Failed to create %s/data\n", rss_tracked_dir);
remove_recursive(rss_tracked_dir);
@@ -484,7 +485,7 @@ static int int_min(int a, int b) {
return a < b ? a : b;
}
-static int add_torrents_in_reverse(TransmissionSession *transmission_session, Buffer *download_items_buffer, TrackedRss *tracked_rss, char *rss_tracked_dir) {
+static int add_torrents_in_reverse(TransmissionSession *transmission_session, Buffer *download_items_buffer, TrackedRss *tracked_rss, char *rss_tracked_dir, fallback *fall) {
int result = 0;
char *torrent_names[MAX_UPDATE_ITEMS];
DownloadItemsData *added_download_items[MAX_UPDATE_ITEMS];
@@ -514,9 +515,8 @@ static int add_torrents_in_reverse(TransmissionSession *transmission_session, Bu
TrackedItem tracked_item;
tracked_item.title = tracked_rss->title;
- tracked_item.link = tracked_rss->link;
tracked_item.json_data = tracked_rss->json_data;
- result = tracked_item_update_latest(&tracked_item, rss_tracked_dir, added_download_items, torrent_names, timestamps, torrent_name_index);
+ result = tracked_item_update_latest(&tracked_item, rss_tracked_dir, added_download_items, torrent_names, timestamps, torrent_name_index, fall);
for(int i = 0; i < torrent_name_index; ++i) {
free(torrent_names[i]);
@@ -526,7 +526,7 @@ static int add_torrents_in_reverse(TransmissionSession *transmission_session, Bu
return result;
}
-int sync_rss(TrackedRss *tracked_rss, TransmissionSession *transmission_session, char *rss_config_dir) {
+int sync_rss(TrackedRss *tracked_rss, TransmissionSession *transmission_session, char *rss_config_dir, fallback *fall) {
/* TODO: This can be cached */
int rss_config_dir_len = strlen(rss_config_dir);
@@ -538,7 +538,7 @@ int sync_rss(TrackedRss *tracked_rss, TransmissionSession *transmission_session,
Buffer rss_data_buffer;
buffer_init(&rss_data_buffer);
- result = download_to_buffer(tracked_rss->link, &rss_data_buffer);
+ result = download_to_buffer(tracked_rss->link, &rss_data_buffer, fall);
if(result != 0) {
fprintf(stderr, "Failed to download rss: %s\n", tracked_rss->link);
goto cleanup;
@@ -557,7 +557,7 @@ int sync_rss(TrackedRss *tracked_rss, TransmissionSession *transmission_session,
char *rss_tracked_dir = rss_config_dir;
strcat(rss_tracked_dir, "/tracked/");
- result = add_torrents_in_reverse(transmission_session, &download_items_buffer, tracked_rss, rss_tracked_dir);
+ result = add_torrents_in_reverse(transmission_session, &download_items_buffer, tracked_rss, rss_tracked_dir, fall);
if(result != 0) {
fprintf(stderr, "Failed while adding torrents for url: %s\n", tracked_rss->link);
goto cleanup;
diff --git a/src/rss.h b/src/rss.h
index a9cd65b..2508660 100644
--- a/src/rss.h
+++ b/src/rss.h
@@ -3,6 +3,7 @@
typedef struct cJSON cJSON;
struct TransmissionSession;
+typedef struct fallback fallback;
typedef struct {
char *title;
@@ -11,7 +12,7 @@ typedef struct {
} TrackedRss;
/* Modifies @rss_config_dir */
-int add_rss(const char *name, char *url, char *rss_config_dir, const char *start_after);
-int sync_rss(TrackedRss *tracked_rss, struct TransmissionSession *transmission_session, char *rss_config_dir);
+int add_rss(const char *name, char *url, char *rss_config_dir, const char *start_after, fallback *fall);
+int sync_rss(TrackedRss *tracked_rss, struct TransmissionSession *transmission_session, char *rss_config_dir, fallback *fall);
#endif
diff --git a/src/rss_html_common.c b/src/rss_html_common.c
index e5e44ab..c210452 100644
--- a/src/rss_html_common.c
+++ b/src/rss_html_common.c
@@ -1,4 +1,5 @@
#include "rss_html_common.h"
+#include "fallback.h"
#include "../depends/cJSON.h"
#include "fileutils.h"
#include <string.h>
@@ -7,7 +8,7 @@
#include <time.h>
#include <assert.h>
-int write_plugin_json_to_file(const char *dir, const char *filename, const char *url, const char *updated, DownloadItemsData *prev_download_items, size_t num_prev_download_items, const char *plugin_name) {
+int write_plugin_json_to_file(const char *dir, const char *filename, const char *url, const char *updated, DownloadItemsData *prev_download_items, size_t num_prev_download_items, const char *plugin_name, fallback *fall) {
int result = 0;
cJSON *json_body = cJSON_CreateObject();
@@ -35,12 +36,17 @@ int write_plugin_json_to_file(const char *dir, const char *filename, const char
goto cleanup;
}
+ char new_url[2048];
+ snprintf(new_url, sizeof(new_url), "%s", prev_download_items[i].link);
+ if(fall)
+ fallback_replace_active_fallback_url_with_source_url(fall, prev_download_items[i].link, new_url, sizeof(new_url));
+
char item_created_timestamp_fake[32];
snprintf(item_created_timestamp_fake, sizeof(item_created_timestamp_fake), "%ld", time_now - i);
cJSON_AddStringToObject(downloaded_item_json, "title", prev_download_items[i].title);
cJSON_AddStringToObject(downloaded_item_json, "time", item_created_timestamp_fake);
- cJSON_AddStringToObject(downloaded_item_json, "url", prev_download_items[i].link);
+ cJSON_AddStringToObject(downloaded_item_json, "url", new_url);
cJSON_AddBoolToObject(downloaded_item_json, "filler", 1);
cJSON_AddItemToArray(downloaded_json, downloaded_item_json);
@@ -72,7 +78,7 @@ static long timestamps_get_max(long *timestamps, size_t num_timestamps) {
}
/* TODO: If this fails in the middle, recover and update the next time somehow */
-int tracked_item_update_latest(TrackedItem *tracked_item, char *tracked_dir, DownloadItemsData **download_items, char **filenames, long *timestamps, int num_download_items) {
+int tracked_item_update_latest(TrackedItem *tracked_item, char *tracked_dir, DownloadItemsData **download_items, char **filenames, long *timestamps, int num_download_items, fallback *fall) {
if(num_download_items == 0)
return 0;
@@ -116,12 +122,17 @@ int tracked_item_update_latest(TrackedItem *tracked_item, char *tracked_dir, Dow
goto cleanup;
}
+ char new_url[2048];
+ snprintf(new_url, sizeof(new_url), "%s", download_items[i]->link);
+ if(fall)
+ fallback_replace_active_fallback_url_with_source_url(fall, download_items[i]->link, new_url, sizeof(new_url));
+
char timestamp[32];
snprintf(timestamp, sizeof(timestamp), "%ld", timestamps[i]);
cJSON_AddStringToObject(downloaded_item_json, "title", download_items[i]->title);
cJSON_AddStringToObject(downloaded_item_json, "time", timestamp);
- cJSON_AddStringToObject(downloaded_item_json, "url", download_items[i]->link);
+ cJSON_AddStringToObject(downloaded_item_json, "url", new_url);
if(filenames)
cJSON_AddStringToObject(downloaded_item_json, "filename", filenames[i]);
diff --git a/src/rss_html_common.h b/src/rss_html_common.h
index cc22b24..88b8c3e 100644
--- a/src/rss_html_common.h
+++ b/src/rss_html_common.h
@@ -6,6 +6,7 @@
#define MAX_UPDATE_ITEMS 10
typedef struct cJSON cJSON;
+typedef struct fallback fallback;
typedef struct {
const char *title;
@@ -14,11 +15,10 @@ typedef struct {
typedef struct {
const char *title;
- const char *link;
cJSON *json_data;
} TrackedItem;
-int write_plugin_json_to_file(const char *dir, const char *filename, const char *url, const char *updated, DownloadItemsData *prev_download_items, size_t num_prev_download_items, const char *plugin_name);
+int write_plugin_json_to_file(const char *dir, const char *filename, const char *url, const char *updated, DownloadItemsData *prev_download_items, size_t num_prev_download_items, const char *plugin_name, fallback *fall);
/*
Note: tracked_item.json_data becomes invalid after this call.
@@ -26,6 +26,6 @@ int write_plugin_json_to_file(const char *dir, const char *filename, const char
@tracked_dir is also modified and then restored at the end.
@download_items and @timestamps both need to be @num_download_items long. If @filenames is not NULL, then it also has to be @num_download_items long.
*/
-int tracked_item_update_latest(TrackedItem *tracked_item, char *tracked_dir, DownloadItemsData **download_items, char **filenames, long *timestamps, int num_download_items);
+int tracked_item_update_latest(TrackedItem *tracked_item, char *tracked_dir, DownloadItemsData **download_items, char **filenames, long *timestamps, int num_download_items, fallback *fall);
#endif \ No newline at end of file