aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--TODO4
-rwxr-xr-xplugins/mangakatana.py2
-rwxr-xr-xplugins/manganelo.py32
-rw-r--r--src/episode.c4
-rw-r--r--src/html.c12
-rw-r--r--src/html.h4
-rw-r--r--src/main.c40
7 files changed, 67 insertions, 31 deletions
diff --git a/TODO b/TODO
index 375c795..898e7f4 100644
--- a/TODO
+++ b/TODO
@@ -16,3 +16,7 @@ Use fallbacks for manga download too.
Put anime and manga downloads into separate subdirectories in the download directory.
Automatically cleanup downloaded (and seeded) torrents in transmission.
+
+Add command to migrate from manganelo/mangakakalot to mangakatana/mangadex.
+
+Instead of doing multiple requests for rss, do one for each sub group and filter out the ones that match the track filter.
diff --git a/plugins/mangakatana.py b/plugins/mangakatana.py
index 85fd8d0..586c4ee 100755
--- a/plugins/mangakatana.py
+++ b/plugins/mangakatana.py
@@ -110,7 +110,7 @@ def get_javascript_string_arrays(js_source):
return arrays
arr = js_source[start:end].replace("'", "").split(",")
- arrays.extend(list(filter(None, arr)))
+ arrays.extend(list(filter(lambda x: x is not None and ".com" in x, arr)))
start = end + 1
def uniq_str_arr(arr):
diff --git a/plugins/manganelo.py b/plugins/manganelo.py
index bbbe856..0391eec 100755
--- a/plugins/manganelo.py
+++ b/plugins/manganelo.py
@@ -9,7 +9,8 @@ import json
from lxml import etree
headers = {
- 'User-Agent': "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.163 Safari/537.36"
+ 'User-Agent': "Mozilla/5.0 (X11; Linux x86_64; rv:136.0) Gecko/20100101 Firefox/136.0",
+ 'Referer': "https://www.nelomanga.com/"
}
def usage():
@@ -36,15 +37,15 @@ if len(sys.argv) < 2:
def download_file(url, save_path):
file_size = 0
- headers = {
- "accept-language": "en-US,en;q=0.9",
- "accept": "image/webp,image/apng,image/*,*/*;q=0.8",
- "sec-fetch-site": "cross-site",
- "sec-fetch-mode": "no-cors",
- "sec-fetch-dest": "image",
- "referer": "https://manganelo.com/",
- "user-agent": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/81.0.4044.138 Safari/537.36"
- }
+ # headers = {
+ # "accept-language": "en-US,en;q=0.9",
+ # "accept": "image/webp,image/apng,image/*,*/*;q=0.8",
+ # "sec-fetch-site": "cross-site",
+ # "sec-fetch-mode": "no-cors",
+ # "sec-fetch-dest": "image",
+ # "referer": "https://manganelo.com/",
+ # "user-agent": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/81.0.4044.138 Safari/537.36"
+ # }
with requests.get(url, stream=True, headers=headers, timeout=30) as response:
if not response.ok:
return 0
@@ -99,7 +100,11 @@ def redirect_migrated_url(url, tree, is_chapter):
return tree
def list_chapters(url, chapter_list_input):
+ url = url.replace("mangakakalot", "manganelo").replace("manganelo", "nelomanga")
response = requests.get(url, timeout=30, headers=headers)
+ if not response.ok:
+ url = url.replace("_", "-")
+ response = requests.get(url, timeout=30, headers=headers)
response.raise_for_status()
seen_titles = set()
@@ -112,7 +117,7 @@ def list_chapters(url, chapter_list_input):
for item in chapter_list_input:
chapter_url = item.get("url")
if chapter_url and len(chapter_url) > 0:
- seen_urls.add(chapter_url.replace("mangakakalot", "manganelo"))
+ seen_urls.add(chapter_url.replace("mangakakalot", "manganelo").replace("manganelo", "nelomanga"))
tree = etree.HTML(response.text)
tree = redirect_migrated_url(url, tree, False)
@@ -136,11 +141,12 @@ def list_chapters(url, chapter_list_input):
print(json.dumps(chapters))
def download_chapter_images(url, download_dir, use_backup_server):
+ url = url.replace("mangakakalot", "manganelo").replace("manganelo", "nelomanga")
cookies = {}
try:
new_headers = headers.copy()
new_headers['referer'] = url
- response = requests.get('https://mangakakalot.com/change_content_s2' if use_backup_server else 'https://mangakakalot.com/change_content_s1', headers=new_headers, allow_redirects=False)
+ response = requests.get('https://nelomanga.com/change_content_s2' if use_backup_server else 'https://nelomanga.com/change_content_s1', headers=new_headers, allow_redirects=False)
response.raise_for_status()
cookies = response.cookies
except requests.HTTPError:
@@ -161,7 +167,7 @@ def download_chapter_images(url, download_dir, use_backup_server):
image_path = os.path.join(download_dir, image_name)
print("Downloading {} to {}".format(image_source, image_path))
file_size = download_file(image_source, image_path)
- if file_size < 255:
+ if file_size < 100:
print("resource temporary unavailable: %s" % image_source)
return False
img_number += 1
diff --git a/src/episode.c b/src/episode.c
index b74417a..cf747d6 100644
--- a/src/episode.c
+++ b/src/episode.c
@@ -126,12 +126,12 @@ int episode_info_get_generic_name(EpisodeInfo *self, char *output_buffer, int ou
char res_start_symbol = (self->resolution_in_brackets ? '[' : '(');
char res_end_symbol = (self->resolution_in_brackets ? ']' : ')');
if(self->extension)
- bytes_written = snprintf(output_buffer, output_buffer_size, "[%s] %s %c%s%c%s", self->group_name, self->anime_name, res_start_symbol, self->resolution, res_end_symbol, self->extension);
+ bytes_written = snprintf(output_buffer, output_buffer_size, "[%s] %s %c%s%c %s", self->group_name, self->anime_name, res_start_symbol, self->resolution, res_end_symbol, self->extension);
else
bytes_written = snprintf(output_buffer, output_buffer_size, "[%s] %s %c%s%c", self->group_name, self->anime_name, res_start_symbol, self->resolution, res_end_symbol);
} else {
if(self->extension)
- bytes_written = snprintf(output_buffer, output_buffer_size, "[%s] %s%s", self->group_name, self->anime_name, self->extension);
+ bytes_written = snprintf(output_buffer, output_buffer_size, "[%s] %s %s", self->group_name, self->anime_name, self->extension);
else
bytes_written = snprintf(output_buffer, output_buffer_size, "[%s] %s", self->group_name, self->anime_name);
}
diff --git a/src/html.c b/src/html.c
index a0a5e73..40cc1eb 100644
--- a/src/html.c
+++ b/src/html.c
@@ -382,7 +382,7 @@ static int int_min(int a, int b) {
return a < b ? a : b;
}
-static int download_html_items_in_reverse(const char *plugin_filepath, Buffer *download_items_buffer, TrackedHtml *tracked_html, char *html_tracked_dir, const char *download_dir) {
+static int download_html_items_in_reverse(const char *plugin_filepath, Buffer *download_items_buffer, TrackedHtml *tracked_html, char *html_tracked_dir, const char *download_dir, bool show_error_notifications) {
const char *home_dir = get_home_dir();
char download_finished_script[PATH_MAX];
@@ -426,8 +426,10 @@ static int download_html_items_in_reverse(const char *plugin_filepath, Buffer *d
if(result != 0)
fprintf(stderr, "Failed while downloading html, url: %s\n", download_items_it->link);
- const char *notify_args[] = { "notify-send", "-a", "automedia", "-u", result == 0 ? "low" : "critical", "-t", "5000", "--", result == 0 ? "Download finished" : "Download failed", notify_msg, NULL };
- program_exec(notify_args, NULL, NULL);
+ if(result == 0 || show_error_notifications) {
+ const char *notify_args[] = { "notify-send", "-a", "automedia", "-u", result == 0 ? "low" : "critical", "-t", "5000", "--", result == 0 ? "Download finished" : "Download failed", notify_msg, NULL };
+ program_exec(notify_args, NULL, NULL);
+ }
if(result != 0)
break;
@@ -455,7 +457,7 @@ static int download_html_items_in_reverse(const char *plugin_filepath, Buffer *d
}
/* TODO: Make asynchronous. Right now this will only complete when the whole chapter download completes */
-int sync_html(TrackedHtml *tracked_html, char *program_dir, const char *download_dir, char *html_config_dir) {
+int sync_html(TrackedHtml *tracked_html, char *program_dir, const char *download_dir, char *html_config_dir, bool show_error_notifications) {
/* TODO: This can be cached */
int html_config_dir_len = strlen(html_config_dir);
@@ -486,7 +488,7 @@ int sync_html(TrackedHtml *tracked_html, char *program_dir, const char *download
char *html_tracked_dir = html_config_dir;
strcat(html_tracked_dir, "/tracked/");
- result = download_html_items_in_reverse(plugin_filepath, &download_items_buffer, tracked_html, html_tracked_dir, download_dir);
+ result = download_html_items_in_reverse(plugin_filepath, &download_items_buffer, tracked_html, html_tracked_dir, download_dir, show_error_notifications);
if(result != 0) {
fprintf(stderr, "Failed while download html item for url: %s\n", tracked_html->link);
goto cleanup;
diff --git a/src/html.h b/src/html.h
index 5ce2809..bfa8e08 100644
--- a/src/html.h
+++ b/src/html.h
@@ -1,6 +1,8 @@
#ifndef HTML_H
#define HTML_H
+#include <stdbool.h>
+
typedef struct cJSON cJSON;
typedef struct fallback fallback;
@@ -13,6 +15,6 @@ typedef struct {
/* Modifies @html_config_dir */
int add_html(const char *name, const char *url, char *html_config_dir, char *program_dir, const char *start_after);
-int sync_html(TrackedHtml *tracked_html, char *program_dir, const char *download_dir, char *html_config_dir);
+int sync_html(TrackedHtml *tracked_html, char *program_dir, const char *download_dir, char *html_config_dir, bool show_error_notifications);
#endif
diff --git a/src/main.c b/src/main.c
index 52d68ec..16cc482 100644
--- a/src/main.c
+++ b/src/main.c
@@ -60,9 +60,10 @@ static void usage_add(void) {
}
static void usage_sync(void) {
- fprintf(stderr, "usage: automedia sync <download_dir>\n");
+ fprintf(stderr, "usage: automedia sync [--no-error-notification] <download_dir>\n");
fprintf(stderr, "OPTIONS\n");
- fprintf(stderr, " download_dir The path where media should be downloaded to\n");
+ fprintf(stderr, " --no-error-notification Disable error notifications\n");
+ fprintf(stderr, " download_dir The path where media should be downloaded to\n");
fprintf(stderr, "EXAMPLES\n");
fprintf(stderr, " automedia sync /home/user/Downloads/automedia\n");
exit(1);
@@ -82,7 +83,7 @@ static void usage_cleanup(void) {
fprintf(stderr, "usage: automedia cleanup [-d <days>] [search_term]\n");
fprintf(stderr, "OPTIONS\n");
fprintf(stderr, " -d <days> Media that haven't received any updates in the specified amount of days will be shown. If not specified then all media will be included\n");
- fprintf(stderr, " search_term The name of the media to find. If not inclued then all media (within -d days) will be included. Note this is case insensitive\n");
+ fprintf(stderr, " search_term The name of the media to find. If not specified then all media (within -d days) will be included. Note this is case insensitive\n");
fprintf(stderr, "EXAMPLES\n");
fprintf(stderr, " automedia cleanup -d 100\n");
fprintf(stderr, " automedia cleanup baki\n");
@@ -466,6 +467,7 @@ static int iterate_tracked_item_rss_callback(char *title, char *link, char *plug
typedef struct {
char *program_dir;
const char *download_dir;
+ bool show_error_notifications;
} IterateHtmlItemUserdata;
static int iterate_tracked_item_html_callback(char *title, char *link, char *plugin, char *config_dir, cJSON *json_data, fallback *fall, void *userdata) {
@@ -481,17 +483,18 @@ static int iterate_tracked_item_html_callback(char *title, char *link, char *plu
tracked_html.title = title;
tracked_html.link = link;
tracked_html.json_data = json_data;
- return sync_html(&tracked_html, iterate_html_item_userdata->program_dir, iterate_html_item_userdata->download_dir, config_dir);
+ return sync_html(&tracked_html, iterate_html_item_userdata->program_dir, iterate_html_item_userdata->download_dir, config_dir, iterate_html_item_userdata->show_error_notifications);
}
static void sync_tracked_rss(TransmissionSession *transmission_session, char *rss_config_dir, fallback *fall) {
iterate_tracked_items(rss_config_dir, iterate_tracked_item_rss_callback, fall, transmission_session);
}
-static void sync_tracked_html(char *html_config_dir, char *program_dir, const char *download_dir) {
+static void sync_tracked_html(char *html_config_dir, char *program_dir, const char *download_dir, bool show_error_notifications) {
IterateHtmlItemUserdata iterate_html_item_userdata;
iterate_html_item_userdata.program_dir = program_dir;
iterate_html_item_userdata.download_dir = download_dir;
+ iterate_html_item_userdata.show_error_notifications = show_error_notifications;
iterate_tracked_items(html_config_dir, iterate_tracked_item_html_callback, NULL, &iterate_html_item_userdata);
}
@@ -543,7 +546,7 @@ static void torrent_list_check_new_downloads_callback(int id, const char *name,
}
}
-static void sync_rss_html(char *rss_config_dir, char *html_config_dir, char *program_dir, const char *download_dir, int sync_rate_sec, fallback *fall) {
+static void sync_rss_html(char *rss_config_dir, char *html_config_dir, char *program_dir, const char *download_dir, int sync_rate_sec, fallback *fall, bool show_error_notifications) {
if(transmission_is_daemon_running() != 0) {
if(transmission_start_daemon(download_dir) != 0) {
fprintf(stderr, "Failed to start torrent daemon\n");
@@ -571,7 +574,7 @@ static void sync_rss_html(char *rss_config_dir, char *html_config_dir, char *pro
/* running is set to 0 in SIGINT signal handler (ctrl+c) */
while(automedia_running) {
sync_tracked_rss(&transmission_session, rss_config_dir, fall);
- sync_tracked_html(html_config_dir, program_dir, download_dir);
+ sync_tracked_html(html_config_dir, program_dir, download_dir, show_error_notifications);
fprintf(stderr, "Finished syncing rss and html. Syncing again in %d minutes\n", sync_rate_sec / 60);
int check_count = 0;
@@ -632,7 +635,26 @@ static void command_sync(int argc, char **argv, char *rss_config_dir, char *html
if(argc < 1)
usage_sync();
- char *download_dir = argv[0];
+ char *download_dir = NULL;
+ bool show_error_notifications = true;
+ for(int i = 0; i < argc; ++i) {
+ if(strncmp(argv[i], "--", 2) == 0) {
+ if(strcmp(argv[i], "--no-error-notification") == 0) {
+ show_error_notifications = false;
+ } else {
+ fprintf(stderr, "Error: invalid option to sync '%s'\n", argv[i]);
+ usage_sync();
+ }
+ } else {
+ download_dir = argv[i];
+ }
+ }
+
+ if(!download_dir) {
+ fprintf(stderr, "Error: download_dir option not specified to sync command\n");
+ usage_sync();
+ }
+
const char automedia_pid_path[] = "/tmp/automedia.pid";
if(create_directory_recursive(download_dir) != 0) {
@@ -695,7 +717,7 @@ static void command_sync(int argc, char **argv, char *rss_config_dir, char *html
close(pid_file);
const int sync_rate_sec = 15 * 60; /* every 15 min */
- sync_rss_html(rss_config_dir, html_config_dir, program_dir, download_dir, sync_rate_sec, &fall);
+ sync_rss_html(rss_config_dir, html_config_dir, program_dir, download_dir, sync_rate_sec, &fall, show_error_notifications);
unlink(automedia_pid_path);
fallback_deinit(&fall);
}