#include "html.h"
#include "fileutils.h"
#include "program.h"
#include "buffer.h"
#include "stringutils.h"
#include "rss_html_common.h"
#include "main.h"
#include "../depends/cJSON.h"
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
static int str_starts_with(const char *str, int len, const char *substr, int substr_len) {
return len >= substr_len && memcmp(str, substr, substr_len) == 0;
}
static int url_extract_domain(const char *url, char *domain, int domain_len) {
int url_len = strlen(url);
if(str_starts_with(url, url_len, "http://", 7)) {
url += 7;
url_len -= 7;
} else if(str_starts_with(url, url_len, "https://", 8)) {
url += 8;
url_len -= 8;
}
if(str_starts_with(url, url_len, "www.", 4)) {
url += 4;
url_len -= 4;
}
char *end = strchr(url, '.');
if(end) {
int len = end - url;
if(len >= domain_len)
return -1;
memcpy(domain, url, len);
domain[len] = '\0';
} else {
if(url_len >= domain_len)
return -1;
memcpy(domain, url, url_len);
domain[url_len] = '\0';
}
return 0;
}
/*
The plugin should print the names and urls of each item (chapter for manga) and the output list should stop when an
item matches any item in the input. The output should be sorted from newest to oldest.
The input should be in this format:
[
{
"title": "Example name",
"url": "https://example.com"
},
{
"title": "Another item",
"url": "https://another.url.com"
}
]
And the output should be in this format:
[
{
"name": "Example name",
"url": "https://example.com"
},
{
"name": "Another item",
"url": "https://another.url.com"
}
]
Note: there might be other fields in the input but those should be ignored by the plugin.
TODO: Rename input "title" to "url", to make input and output match (easier to test with).
*/
typedef int (*PluginListCallback)(const char *name, const char *url, void *userdata);
static cJSON* plugin_list(char *plugin_filepath, const char *url, cJSON *downloaded_items, PluginListCallback callback, void *userdata) {
int result;
Buffer buffer;
buffer_init(&buffer);
const char *args[] = { plugin_filepath, "list", url, NULL };
int process_id = -1;
int stdin_file = -1;
int stdout_file = -1;
result = program_exec_async(args, &process_id, &stdin_file, &stdout_file);
if(result != 0) {
fprintf(stderr, "Failed to launch plugin list for plugin %s\n", basename(plugin_filepath));
goto err_cleanup;
}
if(cJSON_IsArray(downloaded_items)) {
char *json_body_str = cJSON_PrintUnformatted(downloaded_items);
if(!json_body_str) {
fprintf(stderr, "Failed to convert downloaded items to json\n");
if(process_id != -1)
kill(process_id, SIGKILL);
close(stdin_file);
close(stdout_file);
goto err_cleanup;
}
size_t json_output_len = strlen(json_body_str);
if(write(stdin_file, json_body_str, json_output_len) != (ssize_t)json_output_len) {
free(json_body_str);
fprintf(stderr, "Failed to write all bytes to plugin list\n");
if(process_id != -1)
kill(process_id, SIGKILL);
close(stdin_file);
close(stdout_file);
goto err_cleanup;
}
free(json_body_str);
}
result = program_wait_until_exit(process_id, stdin_file, stdout_file, program_buffer_write_callback, &buffer);
if(result != 0) {
fprintf(stderr, "Failed to launch plugin list for plugin %s\n", basename(plugin_filepath));
goto err_cleanup;
}
cJSON *json_root = cJSON_ParseWithLength(buffer.data, buffer.size);
if(!json_root) {
fprintf(stderr, "Failed to load plugin %s list output as json\n", basename(plugin_filepath));
goto err_cleanup;
}
buffer_deinit(&buffer);
if(!cJSON_IsArray(json_root)) {
fprintf(stderr, "Failed to load plugin %s list output as json\n", basename(plugin_filepath));
cJSON_Delete(json_root);
goto err_cleanup;
}
cJSON *array_item = NULL;
cJSON_ArrayForEach(array_item, json_root) {
if(!cJSON_IsObject(array_item))
continue;
cJSON *name_json = cJSON_GetObjectItemCaseSensitive(array_item, "name");
cJSON *url_json = cJSON_GetObjectItemCaseSensitive(array_item, "url");
if(!cJSON_IsString(name_json) || !cJSON_IsString(url_json))
continue;
/* Cast from const to mutable variable because I can */
char *name = name_json->valuestring;
string_replace(name, '/', '_');
name = strip(name);
if(callback(name, url_json->valuestring, userdata) != 0)
break;
}
return json_root;
err_cleanup:
buffer_deinit(&buffer);
return NULL;
}
static int plugin_list_append_item_callback(const char *name, const char *url, void *userdata) {
Buffer *download_items_buffer = userdata;
DownloadItemsData download_items_data;
download_items_data.title = name;
download_items_data.link = url;
buffer_append(download_items_buffer, &download_items_data, sizeof(download_items_data));
return 0;
}
/* Store result in @plugin_filepath */
static int get_plugin_filepath(const char *program_dir, const char *plugin_name, char *plugin_filepath) {
(void)program_dir;
strcpy(plugin_filepath, "/usr/share/automedia/plugins");
if(file_exists(plugin_filepath) != 0) {
fprintf(stderr, "Failed to find plugins directory\n");
return -1;
}
strcat(plugin_filepath, "/");
strcat(plugin_filepath, plugin_name);
if(file_exists(plugin_filepath) != 0) {
strcat(plugin_filepath, ".py");
if(file_exists(plugin_filepath) != 0) {
fprintf(stderr, "Plugin doesn't exist: %s\n", plugin_name);
return -1;
}
}
return 0;
}
typedef struct {
const char *str;
size_t size;
} string_view;
static int is_hex_num(char c) {
return (c >= '0' && c <= '9') || (c >= 'a' && c <= 'f') || (c >= 'A' && c <= 'F');
}
/* mangadex ids are in this format: aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee */
static int looks_like_mangadex_id(const char *str) {
const int len = strlen(str);
if(len != 36)
return 0;
string_view parts[] = {
{ str, 8 },
{ str + 9, 4 },
{ str + 9 + 5, 4 },
{ str + 9 + 5 + 5, 4 },
{ str + 9 + 5 + 5 + 5, 12 }
};
for(size_t i = 0; i < 5; ++i) {
for(size_t j = 0; j < parts[i].size; ++j) {
const char c = parts[i].str[j];
if(!is_hex_num(c))
return 0;
}
}
for(size_t i = 0; i < 4; ++i) {
if(parts[i].str[parts[i].size] != '-')
return 0;
}
return 1;
}
int add_html(const char *name, const char *url, char *html_config_dir, char *program_dir, const char *start_after) {
int result = 0;
if(!name || name[0] == '\0') {
fprintf(stderr, "Name not provided or empty\n");
return -1;
}
char domain[2086];
if(looks_like_mangadex_id(url)) {
strcpy(domain, "mangadex");
} else {
if(url_extract_domain(url, domain, sizeof(domain)) != 0) {
fprintf(stderr, "Url %s is too long\n", url);
return -1;
}
if(domain[0] == '\0') {
fprintf(stderr, "Invalid url: %s\n", url);
return -1;
}
}
char domain_plugin_path[PATH_MAX];
if(get_plugin_filepath(program_dir, domain, domain_plugin_path) != 0)
return -1;
char *html_tracked_dir = html_config_dir;
strcat(html_tracked_dir, "/tracked/");
strcat(html_tracked_dir, name);
char in_progress_filepath[PATH_MAX];
strcpy(in_progress_filepath, html_tracked_dir);
strcat(in_progress_filepath, "/.in_progress");
Buffer download_items_buffer;
buffer_init(&download_items_buffer);
cJSON *json_root = NULL;
if(file_exists(html_tracked_dir) == 0 && file_exists(in_progress_filepath) != 0) {
fprintf(stderr, "You are already tracking %s\n", url);
result = -1;
goto cleanup;
}
json_root = plugin_list(domain_plugin_path, url, NULL, plugin_list_append_item_callback, &download_items_buffer);
if(!json_root) {
result = -1;
goto cleanup;
}
DownloadItemsData *download_items_start = NULL;
if(start_after) {
DownloadItemsData *download_items_it = buffer_begin(&download_items_buffer);
DownloadItemsData *download_items_end = buffer_end(&download_items_buffer);
for(; download_items_it != download_items_end; ++download_items_it) {
if(strcmp(start_after, download_items_it->title) == 0) {
download_items_start = download_items_it;
break;
}
}
if(!download_items_start) {
fprintf(stderr, "Failed to find %s in html %s\n", start_after, url);
result = -1;
goto cleanup;
}
}
result = create_directory_recursive(html_tracked_dir);
if(result != 0) {
fprintf(stderr, "Failed to create %s, error: %s\n", html_tracked_dir, strerror(result));
goto cleanup;
}
/*
Create an ".in_progress" file to prevent periodic sync from reading rss data
before we have finished adding all the data.
*/
remove(in_progress_filepath);
result = create_lock_file(in_progress_filepath);
if(result != 0) {
fprintf(stderr, "Failed to create %s/.in_progress\n", html_tracked_dir);
remove_recursive(html_tracked_dir);
goto cleanup;
}
result = file_overwrite_in_dir(html_tracked_dir, "link", url, strlen(url));
if(result != 0) {
fprintf(stderr, "Failed to create %s/link\n", html_tracked_dir);
remove_recursive(html_tracked_dir);
goto cleanup;
}
char *plugin_name = basename(domain_plugin_path);
result = file_overwrite_in_dir(html_tracked_dir, "plugin", plugin_name, strlen(plugin_name));
if(result != 0) {
fprintf(stderr, "Failed to create %s/link\n", html_tracked_dir);
remove_recursive(html_tracked_dir);
goto cleanup;
}
char updated[32];
assert(sizeof(time_t) == sizeof(long));
snprintf(updated, sizeof(updated), "%ld", time(NULL));
result = file_overwrite_in_dir(html_tracked_dir, "updated", updated, strlen(updated));
if(result != 0) {
fprintf(stderr, "Failed to create %s/updated\n", html_tracked_dir);
remove_recursive(html_tracked_dir);
goto cleanup;
}
size_t num_download_items = download_items_start ? (((DownloadItemsData*)buffer_end(&download_items_buffer)) - download_items_start) : 0;
result = write_plugin_json_to_file(html_tracked_dir, "data", url, updated, download_items_start, num_download_items, plugin_name);
if(result != 0) {
fprintf(stderr, "Failed to create %s/data\n", html_tracked_dir);
remove_recursive(html_tracked_dir);
goto cleanup;
}
cleanup:
remove(in_progress_filepath);
buffer_deinit(&download_items_buffer);
cJSON_Delete(json_root);
return result;
}
static int download_html_items_in_reverse(const char *plugin_filepath, Buffer *download_items_buffer, TrackedHtml *tracked_html, char *html_tracked_dir, const char *download_dir) {
int result = 0;
DownloadItemsData *added_download_items[MAX_UPDATE_ITEMS];
long timestamps[MAX_UPDATE_ITEMS];
char item_dir[PATH_MAX];
const char *path_components[] = { download_dir, tracked_html->title };
int item_dir_len = path_join(item_dir, path_components, 2);
Buffer json_element_buffer;
buffer_init(&json_element_buffer);
DownloadItemsData *download_items_it = buffer_end(download_items_buffer);
DownloadItemsData *download_items_end = buffer_begin(download_items_buffer);
download_items_it--;
download_items_end--;
int download_item_index = 0;
for(; download_items_it != download_items_end && download_item_index < MAX_UPDATE_ITEMS && is_program_running(); --download_items_it) {
char notify_msg[PATH_MAX];
const char *path_components[] = { tracked_html->title, download_items_it->title };
path_join(notify_msg, path_components, 2);
item_dir[item_dir_len] = '/';
strcpy(item_dir + item_dir_len + 1, download_items_it->title);
fprintf(stderr, "Starting download of html item: %s (title: %s)\n", download_items_it->link, notify_msg);
if(create_directory_recursive(item_dir) != 0) {
fprintf(stderr, "Failed to create directory for html item: %s\n", download_items_it->title);
result = -1;
break;
}
/* TODO: Make asynchronous */
const char *args[] = { plugin_filepath, "download", download_items_it->link, item_dir, NULL };
result = program_exec(args, NULL, NULL);
if(result != 0)
fprintf(stderr, "Failed while downloading html, url: %s\n", download_items_it->link);
const char *notify_args[] = { "notify-send", "-u", result == 0 ? "normal" : "critical", "-t", "10000", "--", result == 0 ? "Download finished" : "Download failed", notify_msg, NULL };
program_exec(notify_args, NULL, NULL);
if(result != 0)
break;
fprintf(stderr, "Download finished for html item: %s (title: %s)\n", download_items_it->link, notify_msg);
added_download_items[download_item_index] = download_items_it;
// TODO: What if the download is so fast two items have the same timestamp? Maybe substract by MAX_UPDATE_ITEMS and then add 1 each loop,
// or use milliseconds/microseconds timestamps?
timestamps[download_item_index] = time(NULL);
++download_item_index;
}
TrackedItem tracked_item;
tracked_item.title = tracked_html->title;
tracked_item.link = tracked_html->link;
tracked_item.json_data = tracked_html->json_data;
result = tracked_item_update_latest(&tracked_item, html_tracked_dir, added_download_items, NULL, timestamps, download_item_index);
buffer_deinit(&json_element_buffer);
return result;
}
/* TODO: Make asynchronous. Right now this will only complete when the whole chapter download completes */
int sync_html(TrackedHtml *tracked_html, char *program_dir, const char *download_dir, char *html_config_dir) {
/* TODO: This can be cached */
int html_config_dir_len = strlen(html_config_dir);
fprintf(stderr, "Syncing %s\n", tracked_html->title);
char plugin_filepath[PATH_MAX];
/* This will check with ${tracked_html->plugin}.py as well, but that is fine */
if(get_plugin_filepath(program_dir, tracked_html->plugin, plugin_filepath) != 0)
return -1;
cJSON *downloaded_items = cJSON_GetObjectItemCaseSensitive(tracked_html->json_data, "downloaded");
if(!cJSON_IsArray(downloaded_items)) {
fprintf(stderr, "Corrupt json for html item: %s\n", tracked_html->title);
return -1;
}
Buffer download_items_buffer;
buffer_init(&download_items_buffer);
int result = 0;
cJSON *json_root = plugin_list(plugin_filepath, tracked_html->link, downloaded_items, plugin_list_append_item_callback, &download_items_buffer);
if(!json_root) {
result = -1;
goto cleanup;
}
char *html_tracked_dir = html_config_dir;
strcat(html_tracked_dir, "/tracked/");
result = download_html_items_in_reverse(plugin_filepath, &download_items_buffer, tracked_html, html_tracked_dir, download_dir);
if(result != 0) {
fprintf(stderr, "Failed while download html item for url: %s\n", tracked_html->link);
goto cleanup;
}
char updated[32];
snprintf(updated, sizeof(updated), "%ld", time(NULL));
strcat(html_tracked_dir, tracked_html->title);
result = file_overwrite_in_dir(html_tracked_dir, "synced", updated, strlen(updated));
if(result != 0) {
fprintf(stderr, "Failed to update %s/synced\n", html_tracked_dir);
goto cleanup;
}
cleanup:
cJSON_Delete(json_root);
buffer_deinit(&download_items_buffer);
html_config_dir[html_config_dir_len] = '\0';
return result;
}