aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authordec05eba <dec05eba@protonmail.com>2021-06-17 02:03:11 +0200
committerdec05eba <dec05eba@protonmail.com>2021-06-17 02:03:11 +0200
commitd9d1363ee5a06eb6632e15a14afe64ec80c3df56 (patch)
tree2c2fb02d290530d96e3f389588b272c0a410c65f
parentf869fff9e9b27562837e90400bb3cca098a0376a (diff)
Do not show start after items in the downloaded list
-rw-r--r--TODO3
-rwxr-xr-xautomediabin120760 -> 120760 bytes
-rw-r--r--src/html.c1
-rw-r--r--src/main.c10
-rw-r--r--src/rss_html_common.c1
5 files changed, 8 insertions, 7 deletions
diff --git a/TODO b/TODO
index 648b9aa..7f0e11b 100644
--- a/TODO
+++ b/TODO
@@ -6,5 +6,4 @@ Add rate limiting for downloading manga.
Verify path lengths. Currently there is limit to 255 characters for remote names, but not local + remote names.
Deal with replacing of / with _.
Handle strdup failure.
-Make downloading manga asynchronous, just like torrents. And have timeout for download.
-Start after items will be missing from downloaded command for rss but not manga (because of filenames). This is ok but it looks weird. \ No newline at end of file
+Make downloading manga asynchronous, just like torrents. And have timeout for download. \ No newline at end of file
diff --git a/automedia b/automedia
index 09a04b7..5fe737d 100755
--- a/automedia
+++ b/automedia
Binary files differ
diff --git a/src/html.c b/src/html.c
index 6e27d13..c163ec8 100644
--- a/src/html.c
+++ b/src/html.c
@@ -80,6 +80,7 @@ static int url_extract_domain(const char *url, char *domain, int domain_len) {
}
]
+ Note: there might be other fields in the input but those should be ignored by the plugin.
TODO: Rename input "title" to "url", to make input and output match (easier to test with).
*/
typedef int (*PluginListCallback)(const char *name, const char *url, void *userdata);
diff --git a/src/main.c b/src/main.c
index b47e24a..28130f6 100644
--- a/src/main.c
+++ b/src/main.c
@@ -108,13 +108,15 @@ static void data_file_get_downloaded(const char *dir_name, const char *data_file
goto cleanup;
}
- size_t dir_name_len = strlen(dir_name);
-
const cJSON *downloaded_item_json = NULL;
cJSON_ArrayForEach(downloaded_item_json, downloaded_json) {
if(!cJSON_IsObject(downloaded_item_json))
continue;
+ const cJSON *filler_json = cJSON_GetObjectItemCaseSensitive(downloaded_item_json, "filler");
+ if(cJSON_IsTrue(filler_json))
+ continue;
+
const cJSON *time_json = cJSON_GetObjectItemCaseSensitive(downloaded_item_json, "time");
if(!time_json || !cJSON_IsString(time_json))
continue;
@@ -128,9 +130,7 @@ static void data_file_get_downloaded(const char *dir_name, const char *data_file
strcpy(title, filename_json->valuestring);
} else if(title_json && cJSON_IsString(title_json)) {
if(is_html) {
- strcpy(title, dir_name);
- title[dir_name_len] = '/';
- strcpy(title + dir_name_len + 1, title_json->valuestring);
+ snprintf(title, sizeof(title), "%s/%s", dir_name, title_json->valuestring);
} else {
strcpy(title, title_json->valuestring);
}
diff --git a/src/rss_html_common.c b/src/rss_html_common.c
index 567671e..e5e44ab 100644
--- a/src/rss_html_common.c
+++ b/src/rss_html_common.c
@@ -41,6 +41,7 @@ int write_plugin_json_to_file(const char *dir, const char *filename, const char
cJSON_AddStringToObject(downloaded_item_json, "title", prev_download_items[i].title);
cJSON_AddStringToObject(downloaded_item_json, "time", item_created_timestamp_fake);
cJSON_AddStringToObject(downloaded_item_json, "url", prev_download_items[i].link);
+ cJSON_AddBoolToObject(downloaded_item_json, "filler", 1);
cJSON_AddItemToArray(downloaded_json, downloaded_item_json);
}