Skip to content

Commit

Permalink
added wget as an external download manager (#460)
Browse files Browse the repository at this point in the history
* added wget

* unquote user agent, and create download path for wget

* autopep8

* remove unnecessary logging

Co-authored-by: AbdullahM0hamed <[email protected]>
  • Loading branch information
ArjixWasTaken and AbdullahM0hamed authored Oct 19, 2020
1 parent 1dd3c3a commit 3aea048
Showing 1 changed file with 30 additions and 11 deletions.
41 changes: 30 additions & 11 deletions anime_downloader/util.py
Original file line number Diff line number Diff line change
Expand Up @@ -136,7 +136,8 @@ def primitive_search(search_results):
click.echo(table, err=True)

while True:
val = click.prompt('Enter the anime no: ', type=int, default=1, err=True)
val = click.prompt('Enter the anime no: ',
type=int, default=1, err=True)
try:
return search_results[val - 1]
except IndexError:
Expand Down Expand Up @@ -207,7 +208,8 @@ def print_episodeurl(episode):
# print(episode.source().stream_url + "?referer=" + episode.source().referer)
# else:
# Currently I don't know of a way to specify referer in url itself so leaving it here.
url = episode.url if episode.url.startswith("magnet") else episode.source().stream_url
url = episode.url if episode.url.startswith(
"magnet") else episode.source().stream_url
print(unquote(url))


Expand Down Expand Up @@ -269,7 +271,8 @@ def format_command(cmd, episode, file_format, speed_limit, path):
log_levels = ['debug', 'info', 'notice', 'warn', 'error']
log_level = Config['dl']['aria2c_log_level'].lower()
if log_level not in log_levels:
logger.warn('Invalid logging level "{}", defaulting to "error".'.format(log_level))
logger.warn(
'Invalid logging level "{}", defaulting to "error".'.format(log_level))
logger.debug('Possible levels: {}.'.format(log_levels))
log_level = 'error'

Expand All @@ -279,7 +282,8 @@ def format_command(cmd, episode, file_format, speed_limit, path):
'--stream-piece-selector=inorder --min-split-size=5M --referer={referer} '
'--check-certificate=false --user-agent={useragent} --max-overall-download-limit={speed_limit} '
'--console-log-level={log_level}',
'{idm}': 'idman.exe /n /d {stream_url} /p {download_dir} /f {file_format}.mp4'
'{idm}': 'idman.exe /n /d {stream_url} /p {download_dir} /f {file_format}.mp4',
'{wget}': 'wget {stream_url} --referer={referer} --user-agent={useragent} -O {download_dir}/{file_format}.mp4 -c'
}

# Allows for passing the user agent with self.headers in the site.
Expand All @@ -299,6 +303,15 @@ def format_command(cmd, episode, file_format, speed_limit, path):
'log_level': log_level
}

if cmd == "{wget}":
# Create the directory if it doesn't exist
make_dir(
f"{rep_dict['download_dir']}/{os.path.dirname(format_filename(rep_dict['file_format'], episode))}")
path_string = file_format.replace('\\', '/').split('/')
rep_dict['file_format'] = path_string.pop(-1)
path_string = '/'.join(path_string)
rep_dict['download_dir'] = os.path.join(path, path_string)

if cmd == "{idm}":
rep_dict['file_format'] = rep_dict['file_format'].replace('/', '\\')

Expand All @@ -308,6 +321,7 @@ def format_command(cmd, episode, file_format, speed_limit, path):
cmd = cmd.split(' ')
cmd = [c.format(**rep_dict) for c in cmd]
cmd = [format_filename(c, episode) for c in cmd]

return cmd


Expand Down Expand Up @@ -364,8 +378,10 @@ def make_dir(path):
def get_filler_episodes(query):
def search_filler_episodes(query, page):
url = 'https://animefillerlist.com/search/node/'
search_results = helpers.soupify(helpers.get(url + query, params={'page': page})).select('h3.title > a')
urls = [a.get('href') for a in search_results if a.get('href').split('/')[-2] == 'shows']
search_results = helpers.soupify(helpers.get(
url + query, params={'page': page})).select('h3.title > a')
urls = [a.get('href') for a in search_results if a.get(
'href').split('/')[-2] == 'shows']
search_results = [
[
search_results[a].text]
Expand All @@ -378,7 +394,8 @@ def search_filler_episodes(query, page):

for a in range(5): # Max 5 pages, could be done using the pager element
search_results, urls = search_filler_episodes(query, a)
if urls == prev and not (len(urls) == 0 or a == 0): # stops the loop if the same site is visited twice
# stops the loop if the same site is visited twice
if urls == prev and not (len(urls) == 0 or a == 0):
break
prev = urls[:]

Expand All @@ -387,14 +404,16 @@ def search_filler_episodes(query, page):
for c in urls:
urls_list.append(c)

[results_list[a].insert(0, a + 1)for a in range(len(results_list))] # inserts numbers
[results_list[a].insert(0, a + 1)
for a in range(len(results_list))] # inserts numbers

headers = ["SlNo", "Title"]
table = tabulate(results_list, headers, tablefmt='psql')
table = '\n'.join(table.split('\n')[::-1])

click.echo(table)
val = click.prompt('Enter the filler-anime no (0 to cancel): ', type=int, default=1, err=True)
val = click.prompt(
'Enter the filler-anime no (0 to cancel): ', type=int, default=1, err=True)
if val == 0:
return False

Expand All @@ -421,7 +440,8 @@ def search_filler_episodes(query, page):
return episodes

except:
logger.warn("Can't get filler episodes. Will download all specified episodes.")
logger.warn(
"Can't get filler episodes. Will download all specified episodes.")
return False


Expand All @@ -434,4 +454,3 @@ def type_cast_value(self, ctx, value):
return ast.literal_eval(value)
except:
raise click.BadParameter(value)

0 comments on commit 3aea048

Please sign in to comment.