diff options
-rw-r--r-- | README.md | 3 | ||||
-rw-r--r-- | TODO | 11 | ||||
-rwxr-xr-x | build.sh | 9 | ||||
-rw-r--r-- | depends/cJSON.c | 59 | ||||
-rw-r--r-- | depends/cJSON.h | 13 | ||||
-rw-r--r-- | fallback.json | 8 | ||||
-rwxr-xr-x | install.sh | 3 | ||||
-rwxr-xr-x | plugins/mangakatana.py | 2 | ||||
-rwxr-xr-x | plugins/manganelo.py | 78 | ||||
-rwxr-xr-x | release.sh | 3 | ||||
-rw-r--r-- | src/buffer.h | 2 | ||||
-rw-r--r-- | src/download.c | 75 | ||||
-rw-r--r-- | src/download.h | 8 | ||||
-rw-r--r-- | src/episode.c | 4 | ||||
-rw-r--r-- | src/fallback.c | 193 | ||||
-rw-r--r-- | src/fallback.h | 37 | ||||
-rw-r--r-- | src/html.c | 19 | ||||
-rw-r--r-- | src/html.h | 5 | ||||
-rw-r--r-- | src/main.c | 87 | ||||
-rw-r--r-- | src/rss.c | 36 | ||||
-rw-r--r-- | src/rss.h | 5 | ||||
-rw-r--r-- | src/rss_html_common.c | 19 | ||||
-rw-r--r-- | src/rss_html_common.h | 6 | ||||
-rwxr-xr-x | tools/list-missing-unwatched.py | 2 | ||||
-rwxr-xr-x | tools/list-unwatched.py | 2 | ||||
-rwxr-xr-x | tools/remove-watched.py | 63 | ||||
-rwxr-xr-x | tools/track.py | 2 |
27 files changed, 626 insertions, 128 deletions
@@ -3,8 +3,7 @@ Automatically track new releases of media and download them. Currently works wit A notification is shown on the screen when a download finishes (if notify-send is installed).\ AutoMedia checks and downloads updates every 15 minutes. Torrents stop seeding after a ratio of 2.0.\ ## Installation -Run `sudo ./install.sh` to install AutoMedia. It will build automedia with musl if installed, otherwise it will use gcc/clang (whichever you have configured as the default). If you are running Arch Linux, then you can find AutoMedia on aur under the name automedia-git (`yay -S automedia-git`).\ -There is also a prebuilt binary of the latest commit for x86_64 systems available at https://dec05eba.com/files/automedia. +Run `sudo ./install.sh` to install AutoMedia. It will build automedia with musl if installed, otherwise it will use gcc/clang (whichever you have configured as the default). If you are running Arch Linux, then you can find AutoMedia on aur under the name automedia (`yay -S automedia`). ## Usage Run automedia with `sync` option and keep it running to track media. You can then use `add` option to add new media to track.\ Removing media from being synced can be done by removing the tracked directory in `~/.config/automedia/rss/tracked` or `~/.config/automedia/html/tracked` or by using `automedia cleanup`.\ @@ -10,4 +10,13 @@ Make downloading manga asynchronous, just like torrents. And have timeout for do Detect if a website is very slow (timeout?) and ignore sync for that website for the current sync. This is to prevent a slow website from preventing all syncs.. Cleanup command should remove torrents from transmission. Remove dirname because it breaks automedia.pid because it modified /proc/.../cmdline. Should also do readlink on the first arg of cmdline (which doesn't work if automedia is in /usr/bin???). Use socket! look at quickmedia. -Some mangadex chapters redirect to mangaplus. Those should redirect to the mangaplus plugin. Right now they are simply skipped.
\ No newline at end of file +Some mangadex chapters redirect to mangaplus. Those should redirect to the mangaplus plugin. Right now they are simply skipped. +Remove torrents when running automedia cleanup. This can be done by removing torrents by anime config data "filename" (in the json file). +Use fallbacks for manga download too. + +Put anime and manga downloads into separate subdirectories in the download directory. +Automatically cleanup downloaded (and seeded) torrents in transmission. + +Add command to migrate from manganelo/mangakakalot to mangakatana/mangadex. + +Instead of doing multiple requests for rss, do one for each sub group and filter out the ones that match the track filter. @@ -1,10 +1,13 @@ #!/bin/sh -CFLAGS="-O3 -s -flto -Wall -Wextra -Werror -DNDEBUG" -[ -z "$RELEASE" ] && CFLAGS="-O0 -g3 -Wall -Wextra -Werror"; +script_dir=$(dirname "$0") +cd "$script_dir" + +CFLAGS="-O3 -s -flto -Wall -Wextra -DNDEBUG" +[ -z "$RELEASE" ] && CFLAGS="-O0 -g3 -Wall -Wextra"; CC=cc if [ $(which musl-gcc 2> /dev/null) ]; then CC="musl-gcc" CFLAGS+=" -static" fi -$CC src/main.c src/program.c src/alloc.c src/buffer.c src/fileutils.c src/transmission.c src/rss.c src/html.c src/rss_html_common.c src/download.c src/stringutils.c src/episode.c src/track_remove_parser.c depends/cJSON.c -o automedia $CFLAGS +$CC src/main.c src/program.c src/alloc.c src/buffer.c src/fileutils.c src/transmission.c src/rss.c src/html.c src/rss_html_common.c src/download.c src/stringutils.c src/episode.c src/track_remove_parser.c src/fallback.c depends/cJSON.c -o automedia $CFLAGS diff --git a/depends/cJSON.c b/depends/cJSON.c index 030311c..48a401a 100644 --- a/depends/cJSON.c +++ b/depends/cJSON.c @@ -96,9 +96,9 @@ CJSON_PUBLIC(const char *) cJSON_GetErrorPtr(void) return (const char*) (global_error.json + global_error.position); } -CJSON_PUBLIC(char *) cJSON_GetStringValue(const cJSON * const item) +CJSON_PUBLIC(char *) cJSON_GetStringValue(const cJSON * const item) { - if (!cJSON_IsString(item)) + if (!cJSON_IsString(item)) { return NULL; } @@ -106,9 +106,9 @@ CJSON_PUBLIC(char *) cJSON_GetStringValue(const cJSON * const item) return item->valuestring; } -CJSON_PUBLIC(double) cJSON_GetNumberValue(const cJSON * const item) +CJSON_PUBLIC(double) cJSON_GetNumberValue(const cJSON * const item) { - if (!cJSON_IsNumber(item)) + if (!cJSON_IsNumber(item)) { return (double) NAN; } @@ -117,7 +117,7 @@ CJSON_PUBLIC(double) cJSON_GetNumberValue(const cJSON * const item) } /* This is a safeguard to prevent copy-pasters from using incompatible C and header files */ -#if (CJSON_VERSION_MAJOR != 1) || (CJSON_VERSION_MINOR != 7) || (CJSON_VERSION_PATCH != 14) +#if (CJSON_VERSION_MAJOR != 1) || (CJSON_VERSION_MINOR != 7) || (CJSON_VERSION_PATCH != 18) #error cJSON.h and cJSON.c have different versions. Make sure that both have the same. #endif @@ -263,10 +263,12 @@ CJSON_PUBLIC(void) cJSON_Delete(cJSON *item) if (!(item->type & cJSON_IsReference) && (item->valuestring != NULL)) { global_hooks.deallocate(item->valuestring); + item->valuestring = NULL; } if (!(item->type & cJSON_StringIsConst) && (item->string != NULL)) { global_hooks.deallocate(item->string); + item->string = NULL; } global_hooks.deallocate(item); item = next; @@ -397,11 +399,17 @@ CJSON_PUBLIC(double) cJSON_SetNumberHelper(cJSON *object, double number) return object->valuedouble = number; } +/* Note: when passing a NULL valuestring, cJSON_SetValuestring treats this as an error and return NULL */ CJSON_PUBLIC(char*) cJSON_SetValuestring(cJSON *object, const char *valuestring) { char *copy = NULL; /* if object's type is not cJSON_String or is cJSON_IsReference, it should not set valuestring */ - if (!(object->type & cJSON_String) || (object->type & cJSON_IsReference)) + if ((object == NULL) || !(object->type & cJSON_String) || (object->type & cJSON_IsReference)) + { + return NULL; + } + /* return NULL if the object is corrupted or valuestring is NULL */ + if (object->valuestring == NULL || valuestring == NULL) { return NULL; } @@ -511,7 +519,7 @@ static unsigned char* ensure(printbuffer * const p, size_t needed) return NULL; } - + memcpy(newbuffer, p->buffer, p->offset + 1); p->hooks.deallocate(p->buffer); } @@ -562,6 +570,10 @@ static cJSON_bool print_number(const cJSON * const item, printbuffer * const out { length = sprintf((char*)number_buffer, "null"); } + else if(d == (double)item->valueint) + { + length = sprintf((char*)number_buffer, "%d", item->valueint); + } else { /* Try 15 decimal places of precision to avoid nonsignificant nonzero digits */ @@ -884,6 +896,7 @@ fail: if (output != NULL) { input_buffer->hooks.deallocate(output); + output = NULL; } if (input_pointer != NULL) @@ -1103,7 +1116,7 @@ CJSON_PUBLIC(cJSON *) cJSON_ParseWithLengthOpts(const char *value, size_t buffer } buffer.content = (const unsigned char*)value; - buffer.length = buffer_length; + buffer.length = buffer_length; buffer.offset = 0; buffer.hooks = global_hooks; @@ -1226,6 +1239,7 @@ static unsigned char *print(const cJSON * const item, cJSON_bool format, const i /* free the buffer */ hooks->deallocate(buffer->buffer); + buffer->buffer = NULL; } return printed; @@ -1234,11 +1248,13 @@ fail: if (buffer->buffer != NULL) { hooks->deallocate(buffer->buffer); + buffer->buffer = NULL; } if (printed != NULL) { hooks->deallocate(printed); + printed = NULL; } return NULL; @@ -1279,6 +1295,7 @@ CJSON_PUBLIC(char *) cJSON_PrintBuffered(const cJSON *item, int prebuffer, cJSON if (!print_value(item, &p)) { global_hooks.deallocate(p.buffer); + p.buffer = NULL; return NULL; } @@ -1650,6 +1667,11 @@ static cJSON_bool parse_object(cJSON * const item, parse_buffer * const input_bu current_item = new_item; } + if (cannot_access_at_index(input_buffer, 1)) + { + goto fail; /* nothing comes after the comma */ + } + /* parse the name of the child */ input_buffer->offset++; buffer_skip_whitespace(input_buffer); @@ -2260,7 +2282,7 @@ CJSON_PUBLIC(cJSON_bool) cJSON_InsertItemInArray(cJSON *array, int which, cJSON { cJSON *after_inserted = NULL; - if (which < 0) + if (which < 0 || newitem == NULL) { return false; } @@ -2271,6 +2293,11 @@ CJSON_PUBLIC(cJSON_bool) cJSON_InsertItemInArray(cJSON *array, int which, cJSON return add_item_to_array(array, newitem); } + if (after_inserted != array->child && after_inserted->prev == NULL) { + /* return false if after_inserted is a corrupted array item */ + return false; + } + newitem->next = after_inserted; newitem->prev = after_inserted->prev; after_inserted->prev = newitem; @@ -2287,7 +2314,7 @@ CJSON_PUBLIC(cJSON_bool) cJSON_InsertItemInArray(cJSON *array, int which, cJSON CJSON_PUBLIC(cJSON_bool) cJSON_ReplaceItemViaPointer(cJSON * const parent, cJSON * const item, cJSON * replacement) { - if ((parent == NULL) || (replacement == NULL) || (item == NULL)) + if ((parent == NULL) || (parent->child == NULL) || (replacement == NULL) || (item == NULL)) { return false; } @@ -2357,6 +2384,11 @@ static cJSON_bool replace_item_in_object(cJSON *object, const char *string, cJSO cJSON_free(replacement->string); } replacement->string = (char*)cJSON_strdup((const unsigned char*)string, &global_hooks); + if (replacement->string == NULL) + { + return false; + } + replacement->type &= ~cJSON_StringIsConst; return cJSON_ReplaceItemViaPointer(object, get_object_item(object, string, case_sensitive), replacement); @@ -2689,7 +2721,7 @@ CJSON_PUBLIC(cJSON *) cJSON_CreateStringArray(const char *const *strings, int co if (a && a->child) { a->child->prev = n; } - + return a; } @@ -2976,7 +3008,7 @@ CJSON_PUBLIC(cJSON_bool) cJSON_IsRaw(const cJSON * const item) CJSON_PUBLIC(cJSON_bool) cJSON_Compare(const cJSON * const a, const cJSON * const b, const cJSON_bool case_sensitive) { - if ((a == NULL) || (b == NULL) || ((a->type & 0xFF) != (b->type & 0xFF)) || cJSON_IsInvalid(a)) + if ((a == NULL) || (b == NULL) || ((a->type & 0xFF) != (b->type & 0xFF))) { return false; } @@ -3107,4 +3139,5 @@ CJSON_PUBLIC(void *) cJSON_malloc(size_t size) CJSON_PUBLIC(void) cJSON_free(void *object) { global_hooks.deallocate(object); -} + object = NULL; +}
\ No newline at end of file diff --git a/depends/cJSON.h b/depends/cJSON.h index e97e5f4..a37d69e 100644 --- a/depends/cJSON.h +++ b/depends/cJSON.h @@ -81,7 +81,7 @@ then using the CJSON_API_VISIBILITY flag to "export" the same symbols the way CJ /* project version */ #define CJSON_VERSION_MAJOR 1 #define CJSON_VERSION_MINOR 7 -#define CJSON_VERSION_PATCH 14 +#define CJSON_VERSION_PATCH 18 #include <stddef.h> @@ -256,7 +256,7 @@ CJSON_PUBLIC(cJSON_bool) cJSON_Compare(const cJSON * const a, const cJSON * cons /* Minify a strings, remove blank characters(such as ' ', '\t', '\r', '\n') from strings. * The input pointer json cannot point to a read-only address area, such as a string constant, - * but should point to a readable and writable adress area. */ + * but should point to a readable and writable address area. */ CJSON_PUBLIC(void) cJSON_Minify(char *json); /* Helper functions for creating and adding items to an object at the same time. @@ -279,6 +279,13 @@ CJSON_PUBLIC(double) cJSON_SetNumberHelper(cJSON *object, double number); /* Change the valuestring of a cJSON_String object, only takes effect when type of object is cJSON_String */ CJSON_PUBLIC(char*) cJSON_SetValuestring(cJSON *object, const char *valuestring); +/* If the object is not a boolean type this does nothing and returns cJSON_Invalid else it returns the new type*/ +#define cJSON_SetBoolValue(object, boolValue) ( \ + (object != NULL && ((object)->type & (cJSON_False|cJSON_True))) ? \ + (object)->type=((object)->type &(~(cJSON_False|cJSON_True)))|((boolValue)?cJSON_True:cJSON_False) : \ + cJSON_Invalid\ +) + /* Macro for iterating over an array or object */ #define cJSON_ArrayForEach(element, array) for(element = (array != NULL) ? (array)->child : NULL; element != NULL; element = element->next) @@ -290,4 +297,4 @@ CJSON_PUBLIC(void) cJSON_free(void *object); } #endif -#endif +#endif
\ No newline at end of file diff --git a/fallback.json b/fallback.json new file mode 100644 index 0000000..e6b277d --- /dev/null +++ b/fallback.json @@ -0,0 +1,8 @@ +[ + { + "source": "https://nyaa.si", + "fallbacks": [ + "https://nyaa.land" + ] + } +]
\ No newline at end of file @@ -3,7 +3,10 @@ script_dir=$(dirname "$0") cd "$script_dir" +RELEASE=1 ./build.sh + install -Dm755 "automedia" "/usr/bin/automedia" for file in plugins/*; do install -Dm755 "$file" "/usr/share/automedia/$file" done +install -Dm755 "fallback.json" "/usr/share/automedia/fallback.json"
\ No newline at end of file diff --git a/plugins/mangakatana.py b/plugins/mangakatana.py index 85fd8d0..586c4ee 100755 --- a/plugins/mangakatana.py +++ b/plugins/mangakatana.py @@ -110,7 +110,7 @@ def get_javascript_string_arrays(js_source): return arrays arr = js_source[start:end].replace("'", "").split(",") - arrays.extend(list(filter(None, arr))) + arrays.extend(list(filter(lambda x: x is not None and ".com" in x, arr))) start = end + 1 def uniq_str_arr(arr): diff --git a/plugins/manganelo.py b/plugins/manganelo.py index 5593fd4..0391eec 100755 --- a/plugins/manganelo.py +++ b/plugins/manganelo.py @@ -9,7 +9,8 @@ import json from lxml import etree headers = { - 'User-Agent': "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.163 Safari/537.36" + 'User-Agent': "Mozilla/5.0 (X11; Linux x86_64; rv:136.0) Gecko/20100101 Firefox/136.0", + 'Referer': "https://www.nelomanga.com/" } def usage(): @@ -36,18 +37,15 @@ if len(sys.argv) < 2: def download_file(url, save_path): file_size = 0 - headers = { - "accept-language": "en-US,en;q=0.9", - "accept": "image/webp,image/apng,image/*,*/*;q=0.8", - "sec-fetch-site": "cross-site", - "sec-fetch-mode": "no-cors", - "sec-fetch-dest": "image", - "referer": "https://manganelo.com/", - "user-agent": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/81.0.4044.138 Safari/537.36" - } - #cookies = { - # "content_server": "server2" - #} + # headers = { + # "accept-language": "en-US,en;q=0.9", + # "accept": "image/webp,image/apng,image/*,*/*;q=0.8", + # "sec-fetch-site": "cross-site", + # "sec-fetch-mode": "no-cors", + # "sec-fetch-dest": "image", + # "referer": "https://manganelo.com/", + # "user-agent": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/81.0.4044.138 Safari/537.36" + # } with requests.get(url, stream=True, headers=headers, timeout=30) as response: if not response.ok: return 0 @@ -102,7 +100,11 @@ def redirect_migrated_url(url, tree, is_chapter): return tree def list_chapters(url, chapter_list_input): + url = url.replace("mangakakalot", "manganelo").replace("manganelo", "nelomanga") response = requests.get(url, timeout=30, headers=headers) + if not response.ok: + url = url.replace("_", "-") + response = requests.get(url, timeout=30, headers=headers) response.raise_for_status() seen_titles = set() @@ -115,7 +117,7 @@ def list_chapters(url, chapter_list_input): for item in chapter_list_input: chapter_url = item.get("url") if chapter_url and len(chapter_url) > 0: - seen_urls.add(chapter_url.replace("mangakakalot", "manganelo")) + seen_urls.add(chapter_url.replace("mangakakalot", "manganelo").replace("manganelo", "nelomanga")) tree = etree.HTML(response.text) tree = redirect_migrated_url(url, tree, False) @@ -138,19 +140,25 @@ def list_chapters(url, chapter_list_input): print(json.dumps(chapters)) -def download_chapter(url, download_dir): - response = requests.get(url, timeout=30) - response.raise_for_status() +def download_chapter_images(url, download_dir, use_backup_server): + url = url.replace("mangakakalot", "manganelo").replace("manganelo", "nelomanga") + cookies = {} + try: + new_headers = headers.copy() + new_headers['referer'] = url + response = requests.get('https://nelomanga.com/change_content_s2' if use_backup_server else 'https://nelomanga.com/change_content_s1', headers=new_headers, allow_redirects=False) + response.raise_for_status() + cookies = response.cookies + except requests.HTTPError: + return False - in_progress_filepath = os.path.join(download_dir, ".in_progress") - with open(in_progress_filepath, "w") as file: - file.write(url) + response = requests.get(url, timeout=30, headers=headers, cookies=cookies) + response.raise_for_status() tree = etree.HTML(response.text) tree = redirect_migrated_url(url, tree, True) if tree is None: - os.remove(in_progress_filepath) - exit(2) + return False img_number = 1 for image_source in tree.xpath('//div[@class="container-chapter-reader"]/img/@src'): @@ -159,16 +167,26 @@ def download_chapter(url, download_dir): image_path = os.path.join(download_dir, image_name) print("Downloading {} to {}".format(image_source, image_path)) file_size = download_file(image_source, image_path) - if file_size < 255: + if file_size < 100: print("resource temporary unavailable: %s" % image_source) - os.remove(in_progress_filepath) - exit(2) + return False img_number += 1 if img_number == 1: print("Failed to find images for chapter") - os.remove(in_progress_filepath) - exit(2) + return False + + return True + +def download_chapter(url, download_dir): + in_progress_filepath = os.path.join(download_dir, ".in_progress") + with open(in_progress_filepath, "w") as file: + file.write(url) + + if not download_chapter_images(url, download_dir, False): + if not download_chapter_images(url, download_dir, True): + os.remove(in_progress_filepath) + exit(2) with open(os.path.join(download_dir, ".finished"), "w") as file: file.write("1") @@ -180,7 +198,8 @@ if command == "list": if len(sys.argv) < 3: usage_list() - url = sys.argv[2].replace("mangakakalot", "manganelo") + #url = sys.argv[2].replace("mangakakalot", "manganelo") + url = sys.argv[2] chapter_list_input = sys.stdin.read() if len(chapter_list_input) == 0: chapter_list_input = [] @@ -191,7 +210,8 @@ elif command == "download": if len(sys.argv) < 4: usage_download() - url = sys.argv[2].replace("mangakakalot", "manganelo") + #url = sys.argv[2].replace("mangakakalot", "manganelo") + url = sys.argv[2] download_dir = sys.argv[3] download_chapter(url, download_dir) else: @@ -1,3 +1,6 @@ #!/bin/sh +script_dir=$(dirname "$0") +cd "$script_dir" + RELEASE=1 ./build.sh diff --git a/src/buffer.h b/src/buffer.h index f95e874..2a67fe0 100644 --- a/src/buffer.h +++ b/src/buffer.h @@ -4,7 +4,7 @@ #include <stddef.h> /* - TODO: Optimize small size buffers by using data and size members (16 bytes on x86) + TODO: Optimize small size buffers by using data and size members (16 bytes on x86-64) instead of heap allocation */ diff --git a/src/download.c b/src/download.c index 4154dcb..3b3692c 100644 --- a/src/download.c +++ b/src/download.c @@ -1,32 +1,91 @@ #include "download.h" #include "buffer.h" #include "program.h" +#include "fallback.h" +#include <stdio.h> + +int download_to_buffer(const char *url, Buffer *buffer, fallback *fall) { + char new_url[2048]; + snprintf(new_url, sizeof(new_url), "%s", url); + + fallback_item *fall_item = NULL; + if(fall) { + fall_item = fallback_get_from_url(fall, url); + if(fall_item && fall_item->source_to_use) + fallback_item_replace_url_with_fallback_url(fall_item, url, new_url, sizeof(new_url)); + } -int download_to_buffer(const char *url, Buffer *buffer) { const char *args[] = { "curl", "-s", "-L", "-f", "-H", "user-agent: Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/81.0.4044.138 Safari/537.36", "-H", "Accept-Language: en-US,en;q=0.5", "--compressed", "--", - url, + new_url, NULL }; + int result = program_exec(args, program_buffer_write_callback, buffer); - if(result != 0) + if(result == 0) { + buffer_append(buffer, "\0", 1); + return 0; + } + + if(!fall || !fall_item || fall_item->source_to_use) return result; - buffer_append(buffer, "\0", 1); - return result; + + const char **fallbacks_it = buffer_begin(&fall_item->fallbacks); + const char **fallbacks_end = buffer_end(&fall_item->fallbacks); + for(; fallbacks_it != fallbacks_end; ++fallbacks_it) { + buffer_clear(buffer); + fallback_replace_url_with_fallback_url(url, *fallbacks_it, new_url, sizeof(new_url)); + if(download_to_buffer(new_url, buffer, NULL) == 0) { + fprintf(stderr, "Download failed for url %s, replacing domain with %s for this sync session\n", url, *fallbacks_it); + fall_item->source_to_use = *fallbacks_it; + return 0; + } + } + + return -1; } -int is_header_response_ok(const char *url) { +int is_header_response_ok(const char *url, fallback *fall) { + char new_url[2048]; + snprintf(new_url, sizeof(new_url), "%s", url); + + fallback_item *fall_item = NULL; + if(fall) { + fall_item = fallback_get_from_url(fall, url); + if(fall_item && fall_item->source_to_use) + fallback_item_replace_url_with_fallback_url(fall_item, url, new_url, sizeof(new_url)); + } + const char *args[] = { "curl", "-s", "-L", "-f", "-I", "-H", "user-agent: Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/81.0.4044.138 Safari/537.36", "-H", "Accept-Language: en-US,en;q=0.5", "--", - url, + new_url, NULL }; - return program_exec(args, NULL, NULL); + + int result = program_exec(args, NULL, NULL); + if(result == 0) + return 0; + + if(!fall || !fall_item || fall_item->source_to_use) + return result; + + const char **fallbacks_it = buffer_begin(&fall_item->fallbacks); + const char **fallbacks_end = buffer_end(&fall_item->fallbacks); + for(; fallbacks_it != fallbacks_end; ++fallbacks_it) { + fallback_replace_url_with_fallback_url(url, *fallbacks_it, new_url, sizeof(new_url)); + if(is_header_response_ok(new_url, NULL) == 0) { + fprintf(stderr, "Download failed for url %s, replacing domain with %s for this sync session\n", url, *fallbacks_it); + fall_item->source_to_use = *fallbacks_it; + return 0; + } + } + + return -1; } diff --git a/src/download.h b/src/download.h index 3c7f0aa..ff188cc 100644 --- a/src/download.h +++ b/src/download.h @@ -2,7 +2,11 @@ #define DOWNLOAD_H struct Buffer; -int download_to_buffer(const char *url, struct Buffer *buffer); -int is_header_response_ok(const char *url); +typedef struct fallback fallback; + +/* |fall| can be NULL */ +int download_to_buffer(const char *url, struct Buffer *buffer, fallback *fall); +/* |fall| can be NULL */ +int is_header_response_ok(const char *url, fallback *fall); #endif diff --git a/src/episode.c b/src/episode.c index b74417a..cf747d6 100644 --- a/src/episode.c +++ b/src/episode.c @@ -126,12 +126,12 @@ int episode_info_get_generic_name(EpisodeInfo *self, char *output_buffer, int ou char res_start_symbol = (self->resolution_in_brackets ? '[' : '('); char res_end_symbol = (self->resolution_in_brackets ? ']' : ')'); if(self->extension) - bytes_written = snprintf(output_buffer, output_buffer_size, "[%s] %s %c%s%c%s", self->group_name, self->anime_name, res_start_symbol, self->resolution, res_end_symbol, self->extension); + bytes_written = snprintf(output_buffer, output_buffer_size, "[%s] %s %c%s%c %s", self->group_name, self->anime_name, res_start_symbol, self->resolution, res_end_symbol, self->extension); else bytes_written = snprintf(output_buffer, output_buffer_size, "[%s] %s %c%s%c", self->group_name, self->anime_name, res_start_symbol, self->resolution, res_end_symbol); } else { if(self->extension) - bytes_written = snprintf(output_buffer, output_buffer_size, "[%s] %s%s", self->group_name, self->anime_name, self->extension); + bytes_written = snprintf(output_buffer, output_buffer_size, "[%s] %s %s", self->group_name, self->anime_name, self->extension); else bytes_written = snprintf(output_buffer, output_buffer_size, "[%s] %s", self->group_name, self->anime_name); } diff --git a/src/fallback.c b/src/fallback.c new file mode 100644 index 0000000..058b23a --- /dev/null +++ b/src/fallback.c @@ -0,0 +1,193 @@ +#include "fallback.h" +#include "fileutils.h" +#include "../depends/cJSON.h" +#include <stdio.h> +#include <stdlib.h> +#include <string.h> + +bool fallback_load_from_file(fallback *self, const char *filepath) { + memset(self, 0, sizeof(*self)); + + char *file_data; + long file_size; + if(file_get_content(filepath, &file_data, &file_size) != 0) { + fprintf(stderr, "Error: fallback_load_from_file: failed to read %s\n", filepath); + return false; + } + + cJSON *json_root = cJSON_ParseWithLength(file_data, file_size); + if(!json_root) { + fprintf(stderr, "Error: fallback_load_from_file: failed to parse file %s as json\n", filepath); + goto error; + } + free(file_data); + file_data = NULL; + + if(!cJSON_IsArray(json_root)) { + fprintf(stderr, "File %s contains malformed json. Expected json root element to be an array\n", filepath); + goto error; + } + + buffer_init(&self->fallbacks); + + const cJSON *fallback_item_json = NULL; + cJSON_ArrayForEach(fallback_item_json, json_root) { + if(!cJSON_IsObject(fallback_item_json)) + continue; + + const cJSON *source_json = cJSON_GetObjectItemCaseSensitive(fallback_item_json, "source"); + if(!cJSON_IsString(source_json)) + continue; + + const cJSON *fallbacks_json = cJSON_GetObjectItemCaseSensitive(fallback_item_json, "fallbacks"); + if(!cJSON_IsArray(fallbacks_json)) + continue; + + char *source_str = strdup(source_json->valuestring); + if(!source_str) { + fprintf(stderr, "Error: failed to clone string: %s\n", source_str); + abort(); + } + + fallback_item item; + item.source = source_str; + item.source_to_use = NULL; + buffer_init(&item.fallbacks); + + const cJSON *fallback_str_json = NULL; + cJSON_ArrayForEach(fallback_str_json, fallbacks_json) { + if(!cJSON_IsString(fallback_str_json)) + continue; + + char *fallback = strdup(fallback_str_json->valuestring); + if(!fallback) { + fprintf(stderr, "Error: failed to clone string: %s\n", fallback); + abort(); + } + + buffer_append(&item.fallbacks, &fallback, sizeof(fallback)); + } + + buffer_append(&self->fallbacks, &item, sizeof(item)); + } + + cJSON_Delete(json_root); + free(file_data); + return true; + + error: + cJSON_Delete(json_root); + free(file_data); + return false; +} + +void fallback_deinit(fallback *self) { + fallback_item *items_it = buffer_begin(&self->fallbacks); + fallback_item *items_end = buffer_end(&self->fallbacks); + for(; items_it != items_end; ++items_it) { + if(items_it->source) { + free(items_it->source); + items_it->source = NULL; + } + + char **fallbacks_it = buffer_begin(&items_it->fallbacks); + char **fallbacks_end = buffer_end(&items_it->fallbacks); + for(; fallbacks_it != fallbacks_end; ++fallbacks_it) { + if(*fallbacks_it) { + free(*fallbacks_it); + *fallbacks_it = NULL; + } + } + buffer_deinit(&items_it->fallbacks); + } + buffer_deinit(&self->fallbacks); +} + +static void url_extract_domain(const char **url, int *len) { + if(*len >= 7 && strncmp("http://", *url, 7) == 0) { + *url += 7; + *len -= 7; + } else if(*len >= 8 && strncmp("https://", *url, 8) == 0) { + *url += 8; + *len -= 8; + } + + const char *end = strchr(*url, '/'); + if(end) + *len = end - *url; +} + +fallback_item* fallback_get_from_url(fallback *self, const char *url) { + int url_len = strlen(url); + url_extract_domain(&url, &url_len); + + fallback_item *items_it = buffer_begin(&self->fallbacks); + fallback_item *items_end = buffer_end(&self->fallbacks); + for(; items_it != items_end; ++items_it) { + int source_len = strlen(items_it->source); + const char *source = items_it->source; + url_extract_domain(&source, &source_len); + + if(url_len == source_len && memcmp(url, source, url_len) == 0) + return items_it; + + if(!items_it->source_to_use) + continue; + + int source_to_use_len = strlen(items_it->source_to_use); + const char *source_to_use = items_it->source_to_use; + url_extract_domain(&source_to_use, &source_to_use_len); + + if(url_len == source_to_use_len && memcmp(url, source_to_use, url_len) == 0) + return items_it; + } + + return NULL; +} + +void fallback_clear_sources_to_use(fallback *self) { + fallback_item *items_it = buffer_begin(&self->fallbacks); + fallback_item *items_end = buffer_end(&self->fallbacks); + for(; items_it != items_end; ++items_it) { + items_it->source_to_use = NULL; + } +} + +static const char* get_url_part_after_domain(const char *url) { + int len = strlen(url); + if(len >= 7 && strncmp(url, "http://", 7) == 0) { + url += 7; + len -= 7; + } else if(len >= 8 && strncmp(url, "https://", 8) == 0) { + url += 8; + len -= 8; + } + + const char *after_domain = strchr(url, '/'); + if(after_domain) + return after_domain; + else + return url + len; +} + +void fallback_replace_url_with_fallback_url(const char *url, const char *fallback_url, char *new_url, size_t new_url_len) { + const char *url_part_after_domain = get_url_part_after_domain(url); + snprintf(new_url, new_url_len, "%s%s", fallback_url, url_part_after_domain); +} + +void fallback_replace_active_fallback_url_with_source_url(fallback *self, const char *url, char *new_url, size_t new_url_len) { + fallback_item *fall_item = fallback_get_from_url(self, url); + if(!fall_item || !fall_item->source_to_use) { + snprintf(new_url, new_url_len, "%s", url); + return; + } + fallback_replace_url_with_fallback_url(url, fall_item->source, new_url, new_url_len); +} + +void fallback_item_replace_url_with_fallback_url(fallback_item *self, const char *url, char *new_url, size_t new_url_len) { + if(!self->source_to_use) { + snprintf(new_url, new_url_len, "%s", url); + return; + } + fallback_replace_url_with_fallback_url(url, self->source_to_use, new_url, new_url_len); +} diff --git a/src/fallback.h b/src/fallback.h new file mode 100644 index 0000000..cfca233 --- /dev/null +++ b/src/fallback.h @@ -0,0 +1,37 @@ +#ifndef FALLBACK_H +#define FALLBACK_H + +/* + Fallback reads the fallback.json to get fallbacks for downloads (currently only for rss). + If the regular download fails then the download is retried with fallbacks until one succeeds. + That fallback is then used for all downloads for the current sync for that domain. +*/ + +#include "buffer.h" +#include <stdbool.h> + +typedef struct { + char *source; + /* If this is NULL (default) then |source| is used. This is set to the first fallback that succeeds if |source| fails and is reset to NULL the next sync. This is a reference. */ + const char *source_to_use; + /* list of char* (malloced) */ + Buffer fallbacks; +} fallback_item; + +typedef struct fallback fallback; +struct fallback { + /* list of fallback_item */ + Buffer fallbacks; +}; + +bool fallback_load_from_file(fallback *self, const char *filepath); +void fallback_deinit(fallback *self); + +/* Given an url, look for a fallback item that has a matching source or source_to_use (matching by host). If no match found, return NULL */ +fallback_item* fallback_get_from_url(fallback *self, const char *url); +void fallback_clear_sources_to_use(fallback *self); +void fallback_replace_url_with_fallback_url(const char *url, const char *fallback_url, char *new_url, size_t new_url_len); +void fallback_replace_active_fallback_url_with_source_url(fallback *self, const char *url, char *new_url, size_t new_url_len); +void fallback_item_replace_url_with_fallback_url(fallback_item *self, const char *url, char *new_url, size_t new_url_len); + +#endif /* FALLBACK_H */ @@ -327,7 +327,7 @@ int add_html(const char *name, const char *url, char *html_config_dir, char *pro } /* - Create an ".in_progress" file to prevent periodic sync from reading rss data + Create an ".in_progress" file to prevent periodic sync from reading html data before we have finished adding all the data. */ remove(in_progress_filepath); @@ -364,7 +364,7 @@ int add_html(const char *name, const char *url, char *html_config_dir, char *pro } size_t num_download_items = download_items_start ? (((DownloadItemsData*)buffer_end(&download_items_buffer)) - download_items_start) : 0; - result = write_plugin_json_to_file(html_tracked_dir, "data", url, updated, download_items_start, num_download_items, plugin_name); + result = write_plugin_json_to_file(html_tracked_dir, "data", url, updated, download_items_start, num_download_items, plugin_name, NULL); if(result != 0) { fprintf(stderr, "Failed to create %s/data\n", html_tracked_dir); remove_recursive(html_tracked_dir); @@ -382,7 +382,7 @@ static int int_min(int a, int b) { return a < b ? a : b; } -static int download_html_items_in_reverse(const char *plugin_filepath, Buffer *download_items_buffer, TrackedHtml *tracked_html, char *html_tracked_dir, const char *download_dir) { +static int download_html_items_in_reverse(const char *plugin_filepath, Buffer *download_items_buffer, TrackedHtml *tracked_html, char *html_tracked_dir, const char *download_dir, bool show_error_notifications) { const char *home_dir = get_home_dir(); char download_finished_script[PATH_MAX]; @@ -426,8 +426,10 @@ static int download_html_items_in_reverse(const char *plugin_filepath, Buffer *d if(result != 0) fprintf(stderr, "Failed while downloading html, url: %s\n", download_items_it->link); - const char *notify_args[] = { "notify-send", "-a", "automedia", "-u", result == 0 ? "normal" : "critical", "-t", "10000", "--", result == 0 ? "Download finished" : "Download failed", notify_msg, NULL }; - program_exec(notify_args, NULL, NULL); + if(result == 0 || show_error_notifications) { + const char *notify_args[] = { "notify-send", "-a", "automedia", "-u", result == 0 ? "low" : "critical", "-t", "5000", "--", result == 0 ? "Download finished" : "Download failed", notify_msg, NULL }; + program_exec(notify_args, NULL, NULL); + } if(result != 0) break; @@ -447,16 +449,15 @@ static int download_html_items_in_reverse(const char *plugin_filepath, Buffer *d TrackedItem tracked_item; tracked_item.title = tracked_html->title; - tracked_item.link = tracked_html->link; tracked_item.json_data = tracked_html->json_data; - result = tracked_item_update_latest(&tracked_item, html_tracked_dir, added_download_items, NULL, timestamps, download_item_index); + result = tracked_item_update_latest(&tracked_item, html_tracked_dir, added_download_items, NULL, timestamps, download_item_index, NULL); buffer_deinit(&json_element_buffer); return result; } /* TODO: Make asynchronous. Right now this will only complete when the whole chapter download completes */ -int sync_html(TrackedHtml *tracked_html, char *program_dir, const char *download_dir, char *html_config_dir) { +int sync_html(TrackedHtml *tracked_html, char *program_dir, const char *download_dir, char *html_config_dir, bool show_error_notifications) { /* TODO: This can be cached */ int html_config_dir_len = strlen(html_config_dir); @@ -487,7 +488,7 @@ int sync_html(TrackedHtml *tracked_html, char *program_dir, const char *download char *html_tracked_dir = html_config_dir; strcat(html_tracked_dir, "/tracked/"); - result = download_html_items_in_reverse(plugin_filepath, &download_items_buffer, tracked_html, html_tracked_dir, download_dir); + result = download_html_items_in_reverse(plugin_filepath, &download_items_buffer, tracked_html, html_tracked_dir, download_dir, show_error_notifications); if(result != 0) { fprintf(stderr, "Failed while download html item for url: %s\n", tracked_html->link); goto cleanup; @@ -1,7 +1,10 @@ #ifndef HTML_H #define HTML_H +#include <stdbool.h> + typedef struct cJSON cJSON; +typedef struct fallback fallback; typedef struct { char *plugin; @@ -12,6 +15,6 @@ typedef struct { /* Modifies @html_config_dir */ int add_html(const char *name, const char *url, char *html_config_dir, char *program_dir, const char *start_after); -int sync_html(TrackedHtml *tracked_html, char *program_dir, const char *download_dir, char *html_config_dir); +int sync_html(TrackedHtml *tracked_html, char *program_dir, const char *download_dir, char *html_config_dir, bool show_error_notifications); #endif @@ -10,6 +10,7 @@ #include "rss_html_common.h" #include "html.h" #include "track_remove_parser.h" +#include "fallback.h" #include "../depends/cJSON.h" #include "alloc.h" @@ -59,11 +60,12 @@ static void usage_add(void) { } static void usage_sync(void) { - fprintf(stderr, "usage: automedia sync <download_dir>\n"); + fprintf(stderr, "usage: automedia sync [--no-error-notification] <download_dir>\n"); fprintf(stderr, "OPTIONS\n"); - fprintf(stderr, " download_dir The path where media should be downloaded to\n"); + fprintf(stderr, " --no-error-notification Disable error notifications\n"); + fprintf(stderr, " download_dir The path where media should be downloaded to\n"); fprintf(stderr, "EXAMPLES\n"); - fprintf(stderr, " automedia sync /home/adam/Downloads/automedia\n"); + fprintf(stderr, " automedia sync /home/user/Downloads/automedia\n"); exit(1); } @@ -81,7 +83,7 @@ static void usage_cleanup(void) { fprintf(stderr, "usage: automedia cleanup [-d <days>] [search_term]\n"); fprintf(stderr, "OPTIONS\n"); fprintf(stderr, " -d <days> Media that haven't received any updates in the specified amount of days will be shown. If not specified then all media will be included\n"); - fprintf(stderr, " search_term The name of the media to find. If not inclued then all media (within -d days) will be included. Note this is case insensitive\n"); + fprintf(stderr, " search_term The name of the media to find. If not specified then all media (within -d days) will be included. Note this is case insensitive\n"); fprintf(stderr, "EXAMPLES\n"); fprintf(stderr, " automedia cleanup -d 100\n"); fprintf(stderr, " automedia cleanup baki\n"); @@ -322,6 +324,12 @@ static void command_add(int argc, char **argv, char *rss_config_dir, char *html_ media_url = strip(media_url); + fallback fall; + if(!fallback_load_from_file(&fall, "/usr/share/automedia/fallback.json")) { + fprintf(stderr, "Error: command_add: failed to load fallbacks file (/usr/share/automedia/fallback.json)\n"); + exit(1); + } + if(strcmp(media_type, "rss") == 0) { int res = create_directory_recursive(rss_config_dir); if(res != 0) { @@ -329,7 +337,7 @@ static void command_add(int argc, char **argv, char *rss_config_dir, char *html_ exit(1); } - if(add_rss(media_name, media_url, rss_config_dir, start_after) != 0) + if(add_rss(media_name, media_url, rss_config_dir, start_after, &fall) != 0) exit(1); } else if(strcmp(media_type, "html") == 0) { int res = create_directory_recursive(html_config_dir); @@ -344,6 +352,8 @@ static void command_add(int argc, char **argv, char *rss_config_dir, char *html_ fprintf(stderr, "type should be either rss or html\n"); usage_add(); } + + fallback_deinit(&fall); } sig_atomic_t automedia_running = 0; @@ -357,8 +367,8 @@ int is_program_running() { } /* plugin is NULL for rss */ -typedef int (*IterateTrackedItemCallback)(char *title, char *link, char *plugin, char *config_dir, cJSON *json_data, void *userdata); -static void iterate_tracked_items(char *config_dir, IterateTrackedItemCallback iterate_callback, void *userdata) { +typedef int (*IterateTrackedItemCallback)(char *title, char *link, char *plugin, char *config_dir, cJSON *json_data, fallback *fall, void *userdata); +static void iterate_tracked_items(char *config_dir, IterateTrackedItemCallback iterate_callback, fallback *fall, void *userdata) { /* TODO: Only iterate updated items. To make that work, sync_rss and sync_html need to be updated to update the json_object with new downloaded items (and not only temporary downloaded items) and then also only @@ -431,7 +441,7 @@ static void iterate_tracked_items(char *config_dir, IterateTrackedItemCallback i free(data_file_content); data_file_content = NULL; - if(iterate_callback(dir->d_name, link_file_content, plugin_file_content, config_dir, json_data, userdata) != 0) + if(iterate_callback(dir->d_name, link_file_content, plugin_file_content, config_dir, json_data, fall, userdata) != 0) fprintf(stderr, "Failed to sync %s\n", dir->d_name); cleanup_item: @@ -444,22 +454,24 @@ static void iterate_tracked_items(char *config_dir, IterateTrackedItemCallback i closedir(d); } -static int iterate_tracked_item_rss_callback(char *title, char *link, char *plugin, char *config_dir, cJSON *json_data, void *userdata) { +static int iterate_tracked_item_rss_callback(char *title, char *link, char *plugin, char *config_dir, cJSON *json_data, fallback *fall, void *userdata) { (void)plugin; TransmissionSession *transmission_session = userdata; TrackedRss tracked_rss; tracked_rss.title = title; tracked_rss.link = link; tracked_rss.json_data = json_data; - return sync_rss(&tracked_rss, transmission_session, config_dir); + return sync_rss(&tracked_rss, transmission_session, config_dir, fall); } typedef struct { char *program_dir; const char *download_dir; + bool show_error_notifications; } IterateHtmlItemUserdata; -static int iterate_tracked_item_html_callback(char *title, char *link, char *plugin, char *config_dir, cJSON *json_data, void *userdata) { +static int iterate_tracked_item_html_callback(char *title, char *link, char *plugin, char *config_dir, cJSON *json_data, fallback *fall, void *userdata) { + (void)fall; if(!plugin) { fprintf(stderr, "Missing plugin name for html item: %s\n", title); return -1; @@ -471,18 +483,19 @@ static int iterate_tracked_item_html_callback(char *title, char *link, char *plu tracked_html.title = title; tracked_html.link = link; tracked_html.json_data = json_data; - return sync_html(&tracked_html, iterate_html_item_userdata->program_dir, iterate_html_item_userdata->download_dir, config_dir); + return sync_html(&tracked_html, iterate_html_item_userdata->program_dir, iterate_html_item_userdata->download_dir, config_dir, iterate_html_item_userdata->show_error_notifications); } -static void sync_tracked_rss(TransmissionSession *transmission_session, char *rss_config_dir) { - iterate_tracked_items(rss_config_dir, iterate_tracked_item_rss_callback, transmission_session); +static void sync_tracked_rss(TransmissionSession *transmission_session, char *rss_config_dir, fallback *fall) { + iterate_tracked_items(rss_config_dir, iterate_tracked_item_rss_callback, fall, transmission_session); } -static void sync_tracked_html(char *html_config_dir, char *program_dir, const char *download_dir) { +static void sync_tracked_html(char *html_config_dir, char *program_dir, const char *download_dir, bool show_error_notifications) { IterateHtmlItemUserdata iterate_html_item_userdata; iterate_html_item_userdata.program_dir = program_dir; iterate_html_item_userdata.download_dir = download_dir; - iterate_tracked_items(html_config_dir, iterate_tracked_item_html_callback, &iterate_html_item_userdata); + iterate_html_item_userdata.show_error_notifications = show_error_notifications; + iterate_tracked_items(html_config_dir, iterate_tracked_item_html_callback, NULL, &iterate_html_item_userdata); } typedef struct { @@ -511,7 +524,7 @@ static void torrent_list_check_new_downloads_callback(int id, const char *name, if(is_finished) { if(id < unfinished_torrents->size && unfinished_torrents->items[id] == 1) { unfinished_torrents->items[id] = 0; - const char *notify_args[] = { "notify-send", "-u", "normal", "-a", "automedia", "-t", "10000", "--", "Download finished", name, NULL }; + const char *notify_args[] = { "notify-send", "-u", "low", "-a", "automedia", "-t", "5000", "--", "Download finished", name, NULL }; program_exec(notify_args, NULL, NULL); char item_path[PATH_MAX]; @@ -533,7 +546,7 @@ static void torrent_list_check_new_downloads_callback(int id, const char *name, } } -static void sync_rss_html(char *rss_config_dir, char *html_config_dir, char *program_dir, const char *download_dir, int sync_rate_sec) { +static void sync_rss_html(char *rss_config_dir, char *html_config_dir, char *program_dir, const char *download_dir, int sync_rate_sec, fallback *fall, bool show_error_notifications) { if(transmission_is_daemon_running() != 0) { if(transmission_start_daemon(download_dir) != 0) { fprintf(stderr, "Failed to start torrent daemon\n"); @@ -560,9 +573,9 @@ static void sync_rss_html(char *rss_config_dir, char *html_config_dir, char *pro transmission_list_torrents(&transmission_session, torrent_list_check_new_downloads_callback, &unfinished_torrents); /* running is set to 0 in SIGINT signal handler (ctrl+c) */ while(automedia_running) { - sync_tracked_rss(&transmission_session, rss_config_dir); - sync_tracked_html(html_config_dir, program_dir, download_dir); - fprintf(stderr, "Finished syncing rss and html. Syncing again in 15 minutes\n"); + sync_tracked_rss(&transmission_session, rss_config_dir, fall); + sync_tracked_html(html_config_dir, program_dir, download_dir, show_error_notifications); + fprintf(stderr, "Finished syncing rss and html. Syncing again in %d minutes\n", sync_rate_sec / 60); int check_count = 0; while(automedia_running && check_count < sync_rate_sec/check_torrent_status_rate_sec) { @@ -570,6 +583,8 @@ static void sync_rss_html(char *rss_config_dir, char *html_config_dir, char *pro sleep(check_torrent_status_rate_sec); ++check_count; } + + fallback_clear_sources_to_use(fall); } free(unfinished_torrents.items); @@ -620,13 +635,38 @@ static void command_sync(int argc, char **argv, char *rss_config_dir, char *html if(argc < 1) usage_sync(); - char *download_dir = argv[0]; + char *download_dir = NULL; + bool show_error_notifications = true; + for(int i = 0; i < argc; ++i) { + if(strncmp(argv[i], "--", 2) == 0) { + if(strcmp(argv[i], "--no-error-notification") == 0) { + show_error_notifications = false; + } else { + fprintf(stderr, "Error: invalid option to sync '%s'\n", argv[i]); + usage_sync(); + } + } else { + download_dir = argv[i]; + } + } + + if(!download_dir) { + fprintf(stderr, "Error: download_dir option not specified to sync command\n"); + usage_sync(); + } + const char automedia_pid_path[] = "/tmp/automedia.pid"; if(create_directory_recursive(download_dir) != 0) { fprintf(stderr, "Failed to create download directory %s\n", download_dir); exit(1); } + + fallback fall; + if(!fallback_load_from_file(&fall, "/usr/share/automedia/fallback.json")) { + fprintf(stderr, "Error: command_sync: failed to load fallbacks file (/usr/share/automedia/fallback.json)\n"); + exit(1); + } /* Create a pid file because we only want to allow one instance of automedia to run at once */ int pid_file = open(automedia_pid_path, O_CREAT | O_EXCL | O_SYNC | O_RDWR, 0666); @@ -677,8 +717,9 @@ static void command_sync(int argc, char **argv, char *rss_config_dir, char *html close(pid_file); const int sync_rate_sec = 15 * 60; /* every 15 min */ - sync_rss_html(rss_config_dir, html_config_dir, program_dir, download_dir, sync_rate_sec); + sync_rss_html(rss_config_dir, html_config_dir, program_dir, download_dir, sync_rate_sec, &fall, show_error_notifications); unlink(automedia_pid_path); + fallback_deinit(&fall); } static void command_downloaded(int argc, char **argv, const char *rss_config_dir, const char *html_config_dir) { @@ -220,7 +220,7 @@ static void url_escape(const char *str, char *output) { } } -static int get_rss_url_from_episode_info(const char *episode_name, EpisodeInfo *episode_info, char *rss_url) { +static int get_rss_url_from_episode_info(const char *episode_name, EpisodeInfo *episode_info, char *rss_url, fallback *fall) { char *selected_submitter = NULL; char response[512]; char group_name_escaped[1536]; @@ -247,7 +247,7 @@ static int get_rss_url_from_episode_info(const char *episode_name, EpisodeInfo * return -1; } - if(is_header_response_ok(url) == 0) { + if(is_header_response_ok(url, fall) == 0) { selected_submitter = response_str; break; } else { @@ -289,20 +289,24 @@ static int get_rss_url_from_episode_info(const char *episode_name, EpisodeInfo * url_escape(generic_name, generic_name_escaped); if(selected_submitter) - sprintf(rss_url, "https://nyaa.si/?page=rss&q=%s&c=0_0&f=0&u=%s", generic_name_escaped, group_name_escaped); + sprintf(rss_url, "https://nyaa.si/?page=rss&q=%s&u=%s", generic_name_escaped, group_name_escaped); else - sprintf(rss_url, "https://nyaa.si/?page=rss&q=%s&c=0_0&f=0", generic_name_escaped); + sprintf(rss_url, "https://nyaa.si/?page=rss&q=%s", generic_name_escaped); return 0; } -int add_rss(const char *name, char *url, char *rss_config_dir, const char *start_after) { +int add_rss(const char *name, char *url, char *rss_config_dir, const char *start_after, fallback *fall) { int result = 0; char rss_url[4096]; Buffer buffer; buffer_init(&buffer); - result = download_to_buffer(url, &buffer); + + Buffer download_items_buffer; + buffer_init(&download_items_buffer); + + result = download_to_buffer(url, &buffer, fall); if(result != 0) { EpisodeInfo episode_info; if(episode_info_create_from_episode_name(&episode_info, url) != 0) { @@ -310,7 +314,7 @@ int add_rss(const char *name, char *url, char *rss_config_dir, const char *start goto cleanup; } - if(get_rss_url_from_episode_info(url, &episode_info, rss_url) != 0) + if(get_rss_url_from_episode_info(url, &episode_info, rss_url, fall) != 0) goto cleanup; /* User didn't want to track rss */ @@ -325,16 +329,13 @@ int add_rss(const char *name, char *url, char *rss_config_dir, const char *start url = rss_url; buffer_clear(&buffer); - result = download_to_buffer(url, &buffer); + result = download_to_buffer(url, &buffer, fall); if(result != 0) { fprintf(stderr, "Failed to download rss: %s\n", url); goto cleanup; } } - Buffer download_items_buffer; - buffer_init(&download_items_buffer); - char *rss_title = NULL; result = parse_rss(buffer.data, &rss_title, rss_parse_add_callback, &download_items_buffer); if(result != 0) { @@ -427,7 +428,7 @@ int add_rss(const char *name, char *url, char *rss_config_dir, const char *start } size_t num_download_items = download_items_start ? (((DownloadItemsData*)buffer_end(&download_items_buffer)) - download_items_start) : 0; - result = write_plugin_json_to_file(rss_tracked_dir, "data", url, updated, download_items_start, num_download_items, NULL); + result = write_plugin_json_to_file(rss_tracked_dir, "data", url, updated, download_items_start, num_download_items, NULL, fall); if(result != 0) { fprintf(stderr, "Failed to create %s/data\n", rss_tracked_dir); remove_recursive(rss_tracked_dir); @@ -484,7 +485,7 @@ static int int_min(int a, int b) { return a < b ? a : b; } -static int add_torrents_in_reverse(TransmissionSession *transmission_session, Buffer *download_items_buffer, TrackedRss *tracked_rss, char *rss_tracked_dir) { +static int add_torrents_in_reverse(TransmissionSession *transmission_session, Buffer *download_items_buffer, TrackedRss *tracked_rss, char *rss_tracked_dir, fallback *fall) { int result = 0; char *torrent_names[MAX_UPDATE_ITEMS]; DownloadItemsData *added_download_items[MAX_UPDATE_ITEMS]; @@ -514,9 +515,8 @@ static int add_torrents_in_reverse(TransmissionSession *transmission_session, Bu TrackedItem tracked_item; tracked_item.title = tracked_rss->title; - tracked_item.link = tracked_rss->link; tracked_item.json_data = tracked_rss->json_data; - result = tracked_item_update_latest(&tracked_item, rss_tracked_dir, added_download_items, torrent_names, timestamps, torrent_name_index); + result = tracked_item_update_latest(&tracked_item, rss_tracked_dir, added_download_items, torrent_names, timestamps, torrent_name_index, fall); for(int i = 0; i < torrent_name_index; ++i) { free(torrent_names[i]); @@ -526,7 +526,7 @@ static int add_torrents_in_reverse(TransmissionSession *transmission_session, Bu return result; } -int sync_rss(TrackedRss *tracked_rss, TransmissionSession *transmission_session, char *rss_config_dir) { +int sync_rss(TrackedRss *tracked_rss, TransmissionSession *transmission_session, char *rss_config_dir, fallback *fall) { /* TODO: This can be cached */ int rss_config_dir_len = strlen(rss_config_dir); @@ -538,7 +538,7 @@ int sync_rss(TrackedRss *tracked_rss, TransmissionSession *transmission_session, Buffer rss_data_buffer; buffer_init(&rss_data_buffer); - result = download_to_buffer(tracked_rss->link, &rss_data_buffer); + result = download_to_buffer(tracked_rss->link, &rss_data_buffer, fall); if(result != 0) { fprintf(stderr, "Failed to download rss: %s\n", tracked_rss->link); goto cleanup; @@ -557,7 +557,7 @@ int sync_rss(TrackedRss *tracked_rss, TransmissionSession *transmission_session, char *rss_tracked_dir = rss_config_dir; strcat(rss_tracked_dir, "/tracked/"); - result = add_torrents_in_reverse(transmission_session, &download_items_buffer, tracked_rss, rss_tracked_dir); + result = add_torrents_in_reverse(transmission_session, &download_items_buffer, tracked_rss, rss_tracked_dir, fall); if(result != 0) { fprintf(stderr, "Failed while adding torrents for url: %s\n", tracked_rss->link); goto cleanup; @@ -3,6 +3,7 @@ typedef struct cJSON cJSON; struct TransmissionSession; +typedef struct fallback fallback; typedef struct { char *title; @@ -11,7 +12,7 @@ typedef struct { } TrackedRss; /* Modifies @rss_config_dir */ -int add_rss(const char *name, char *url, char *rss_config_dir, const char *start_after); -int sync_rss(TrackedRss *tracked_rss, struct TransmissionSession *transmission_session, char *rss_config_dir); +int add_rss(const char *name, char *url, char *rss_config_dir, const char *start_after, fallback *fall); +int sync_rss(TrackedRss *tracked_rss, struct TransmissionSession *transmission_session, char *rss_config_dir, fallback *fall); #endif diff --git a/src/rss_html_common.c b/src/rss_html_common.c index e5e44ab..c210452 100644 --- a/src/rss_html_common.c +++ b/src/rss_html_common.c @@ -1,4 +1,5 @@ #include "rss_html_common.h" +#include "fallback.h" #include "../depends/cJSON.h" #include "fileutils.h" #include <string.h> @@ -7,7 +8,7 @@ #include <time.h> #include <assert.h> -int write_plugin_json_to_file(const char *dir, const char *filename, const char *url, const char *updated, DownloadItemsData *prev_download_items, size_t num_prev_download_items, const char *plugin_name) { +int write_plugin_json_to_file(const char *dir, const char *filename, const char *url, const char *updated, DownloadItemsData *prev_download_items, size_t num_prev_download_items, const char *plugin_name, fallback *fall) { int result = 0; cJSON *json_body = cJSON_CreateObject(); @@ -35,12 +36,17 @@ int write_plugin_json_to_file(const char *dir, const char *filename, const char goto cleanup; } + char new_url[2048]; + snprintf(new_url, sizeof(new_url), "%s", prev_download_items[i].link); + if(fall) + fallback_replace_active_fallback_url_with_source_url(fall, prev_download_items[i].link, new_url, sizeof(new_url)); + char item_created_timestamp_fake[32]; snprintf(item_created_timestamp_fake, sizeof(item_created_timestamp_fake), "%ld", time_now - i); cJSON_AddStringToObject(downloaded_item_json, "title", prev_download_items[i].title); cJSON_AddStringToObject(downloaded_item_json, "time", item_created_timestamp_fake); - cJSON_AddStringToObject(downloaded_item_json, "url", prev_download_items[i].link); + cJSON_AddStringToObject(downloaded_item_json, "url", new_url); cJSON_AddBoolToObject(downloaded_item_json, "filler", 1); cJSON_AddItemToArray(downloaded_json, downloaded_item_json); @@ -72,7 +78,7 @@ static long timestamps_get_max(long *timestamps, size_t num_timestamps) { } /* TODO: If this fails in the middle, recover and update the next time somehow */ -int tracked_item_update_latest(TrackedItem *tracked_item, char *tracked_dir, DownloadItemsData **download_items, char **filenames, long *timestamps, int num_download_items) { +int tracked_item_update_latest(TrackedItem *tracked_item, char *tracked_dir, DownloadItemsData **download_items, char **filenames, long *timestamps, int num_download_items, fallback *fall) { if(num_download_items == 0) return 0; @@ -116,12 +122,17 @@ int tracked_item_update_latest(TrackedItem *tracked_item, char *tracked_dir, Dow goto cleanup; } + char new_url[2048]; + snprintf(new_url, sizeof(new_url), "%s", download_items[i]->link); + if(fall) + fallback_replace_active_fallback_url_with_source_url(fall, download_items[i]->link, new_url, sizeof(new_url)); + char timestamp[32]; snprintf(timestamp, sizeof(timestamp), "%ld", timestamps[i]); cJSON_AddStringToObject(downloaded_item_json, "title", download_items[i]->title); cJSON_AddStringToObject(downloaded_item_json, "time", timestamp); - cJSON_AddStringToObject(downloaded_item_json, "url", download_items[i]->link); + cJSON_AddStringToObject(downloaded_item_json, "url", new_url); if(filenames) cJSON_AddStringToObject(downloaded_item_json, "filename", filenames[i]); diff --git a/src/rss_html_common.h b/src/rss_html_common.h index cc22b24..88b8c3e 100644 --- a/src/rss_html_common.h +++ b/src/rss_html_common.h @@ -6,6 +6,7 @@ #define MAX_UPDATE_ITEMS 10 typedef struct cJSON cJSON; +typedef struct fallback fallback; typedef struct { const char *title; @@ -14,11 +15,10 @@ typedef struct { typedef struct { const char *title; - const char *link; cJSON *json_data; } TrackedItem; -int write_plugin_json_to_file(const char *dir, const char *filename, const char *url, const char *updated, DownloadItemsData *prev_download_items, size_t num_prev_download_items, const char *plugin_name); +int write_plugin_json_to_file(const char *dir, const char *filename, const char *url, const char *updated, DownloadItemsData *prev_download_items, size_t num_prev_download_items, const char *plugin_name, fallback *fall); /* Note: tracked_item.json_data becomes invalid after this call. @@ -26,6 +26,6 @@ int write_plugin_json_to_file(const char *dir, const char *filename, const char @tracked_dir is also modified and then restored at the end. @download_items and @timestamps both need to be @num_download_items long. If @filenames is not NULL, then it also has to be @num_download_items long. */ -int tracked_item_update_latest(TrackedItem *tracked_item, char *tracked_dir, DownloadItemsData **download_items, char **filenames, long *timestamps, int num_download_items); +int tracked_item_update_latest(TrackedItem *tracked_item, char *tracked_dir, DownloadItemsData **download_items, char **filenames, long *timestamps, int num_download_items, fallback *fall); #endif
\ No newline at end of file diff --git a/tools/list-missing-unwatched.py b/tools/list-missing-unwatched.py index 980c500..431054a 100755 --- a/tools/list-missing-unwatched.py +++ b/tools/list-missing-unwatched.py @@ -6,7 +6,7 @@ import sys def show_notification(title, description, urgency): print("Notification: title: %s, description: %s" % (title, description)) - process = subprocess.Popen(["notify-send", "-a", "automedia", "-t", "10000", "-u", urgency, "--", title, description]) + process = subprocess.Popen(["notify-send", "-a", "automedia", "-t", "5000", "-u", urgency, "--", title, description]) stdout, stderr = process.communicate() return process.returncode == 0 diff --git a/tools/list-unwatched.py b/tools/list-unwatched.py index 874df7b..3582745 100755 --- a/tools/list-unwatched.py +++ b/tools/list-unwatched.py @@ -5,7 +5,7 @@ import subprocess def show_notification(title, description, urgency): print("Notification: title: %s, description: %s" % (title, description)) - process = subprocess.Popen(["notify-send", "-a", "automedia", "-t", "10000", "-u", urgency, "--", title, description]) + process = subprocess.Popen(["notify-send", "-a", "automedia", "-t", "5000", "-u", urgency, "--", title, description]) stdout, stderr = process.communicate() return process.returncode == 0 diff --git a/tools/remove-watched.py b/tools/remove-watched.py new file mode 100755 index 0000000..a2a5b00 --- /dev/null +++ b/tools/remove-watched.py @@ -0,0 +1,63 @@ +#!/usr/bin/env python3 + +import json +import os +import sys + +def file_get_content_json(filepath): + with open(filepath) as f: + return json.load(f) + +def get_watch_progress(time, duration): + if duration == 0: + return 0 + return time / duration + +def get_quickmedia_watched_episodes(downloads_dir): + watched_episodes = [] + local_anime_watch_progress_json = file_get_content_json(os.path.expanduser("~/.config/quickmedia/watch-progress/local-anime")) + for filename, anime_item in local_anime_watch_progress_json.items(): + episode_filepath = os.path.join(downloads_dir, filename) + time = int(anime_item["time"]) + duration = int(anime_item["duration"]) + watch_progress = get_watch_progress(time, duration) + if watch_progress >= 0.9: + watched_episodes.append(episode_filepath) + return watched_episodes + +def main(args): + if len(args) != 2: + print("usage: remove-watched.py <downloads-dir>") + print("") + print("This script removes anime episodes that you have finished watching with QuickMedia") + exit(1) + + downloads_dir = args[1] + watched_episodes = get_quickmedia_watched_episodes(downloads_dir) + + print("Files to remove:") + removable_files = [] + for watched_episode in watched_episodes: + if os.path.exists(watched_episode): + print(" %s" % watched_episode) + removable_files.append(watched_episode) + + while True: + response = input("Are you sure you want to remove the above %d episode(s)? (y/n): " % len(removable_files)) + if len(response) < 1: + continue + + if response[0] == 'Y' or response[0] == 'y': + for filepath in removable_files: + try: + os.remove(filepath) + print("Removed %s" % filepath) + except OSError: + pass + return + + if response[0] == 'N' or response[0] == 'n': + print("Cancelled deletion of episodes") + return + +main(sys.argv)
\ No newline at end of file diff --git a/tools/track.py b/tools/track.py index d56d8b3..09d94cf 100755 --- a/tools/track.py +++ b/tools/track.py @@ -7,7 +7,7 @@ import sys def show_notification(title, description, urgency): print("Notification: title: %s, description: %s" % (title, description)) - process = subprocess.Popen(["notify-send", "-a", "automedia", "-t", "10000", "-u", urgency, "--", title, description]) + process = subprocess.Popen(["notify-send", "-a", "automedia", "-t", "5000", "-u", urgency, "--", title, description]) stdout, stderr = process.communicate() return process.returncode == 0 |