mirror of
https://github.com/ksyasuda/aniwrapper.git
synced 2024-11-22 03:19:53 -08:00
cleanup code and update download function
This commit is contained in:
parent
98cddd04a0
commit
3cc43a145a
90
ani-cli
90
ani-cli
@ -74,51 +74,11 @@ check_input() {
|
||||
# $1: dpage_link | $2: video_url | $3: anime_id | $4: episode | $5: download_dir
|
||||
download() {
|
||||
case $2 in
|
||||
*manifest*m3u8*)
|
||||
ffmpeg -loglevel error -stats -referer "$1" -i "$2" -c copy "$3/$4.mp4"
|
||||
;;
|
||||
*m3u8*)
|
||||
progress "Fetching Metadata.."
|
||||
[ -d "$TMPDIR" ] || mkdir "$TMPDIR"
|
||||
m3u8_data="$(curl -s "$2")"
|
||||
key_uri="$(printf "%s" "$m3u8_data" | sed -nE 's/^#EXT-X-KEY.*URI="([^"]*)"/\1/p')"
|
||||
m3u8_data="$(printf "%s" "$m3u8_data" | sed "/#/d")"
|
||||
printf "%s" "$m3u8_data" | grep -q "http" || relative_url=$(printf "%s" "$2" | sed "s|[^/]*$||")
|
||||
range=$(printf "%s\n" "$m3u8_data" | wc -l)
|
||||
# Getting and 'decrypting' encryption key
|
||||
if [ -n "$key_uri" ]; then
|
||||
key=$(curl -s "$key_uri" | od -A n -t x1 | tr -d ' |\n')
|
||||
iv=$(openssl rand -hex 16)
|
||||
fi
|
||||
# Asyncronously downloading pieces to temporary directory
|
||||
inf "pieces : $range"
|
||||
for i in $(seq "$range"); do
|
||||
curl -s "${relative_url}$(printf "%s" "$m3u8_data" | sed -n "${i}p")" > "$TMPDIR/$(printf "%04d" "$i").ts" && printf "\033[2K\r \033[1;32m✓ %s / %s done" "$i" "$range" &
|
||||
jobs -p > "$JOBFILE"
|
||||
while [ "$(wc -w "$JOBFILE" | cut -d' ' -f1)" -ge 35 ]; do
|
||||
jobs > "$JOBFILE"
|
||||
sleep 0.05
|
||||
done
|
||||
done
|
||||
wait
|
||||
# Decrypting and concatenating the pieces
|
||||
if [ -n "$key_uri" ]; then
|
||||
progress "Decrypting and Concatenating pieces into single file.."
|
||||
for i in "$TMPDIR"/*; do
|
||||
openssl enc -aes128 -d -K "$key" -iv "$iv" -nopad >> video.ts < "$i"
|
||||
done
|
||||
else
|
||||
progress "Concatenating pieces into single file.."
|
||||
cat "$TMPDIR"/* >> video.ts
|
||||
fi
|
||||
# cleanup and encoding
|
||||
rm -rdf "$TMPDIR" "$JOBFILE"
|
||||
inf "Encoding file to mp4 video.."
|
||||
ffmpeg -loglevel error -stats -i "video.ts" -c copy "$3/$4.mp4"
|
||||
rm -f video.ts
|
||||
ffmpeg -loglevel error -stats -referer "$1" -i "$2" -c copy "$5/$4.mp4"
|
||||
;;
|
||||
*)
|
||||
axel -a -k -n 30 --header=Referer:"$1" "$2" -o "$5/$4.mp4"
|
||||
axel -a -k -n 10 --header=Referer:"$1" "$2" -o "$5/$4.mp4"
|
||||
;;
|
||||
esac
|
||||
}
|
||||
@ -206,8 +166,6 @@ get_video_quality_m3u8() {
|
||||
get_video_link() {
|
||||
dpage_url="$1"
|
||||
id=$(printf "%s" "$dpage_url" | sed -nE 's/.*id=(.*)&title.*/\1/p')
|
||||
al_links=$(printf "%s" "$al_data" | sed -e 's_:\[_\n_g' -e 's_:"_\n"_g' | sed -e 's/].*//g' -e '1,2d' | sed -n "${episode}p" | tr -d '"' | tr "," "\n")
|
||||
[ -z "$id" ] && id=$(printf "%s" "$al_links" | sed -nE 's/.*id=(.*)&title.*/\1/p')
|
||||
#multiple sed are used (regex seperated by ';') for extracting only required data from response of embed url
|
||||
resp="$(curl -A "$AGENT" -s "https://goload.pro/streaming.php?id=$id" |
|
||||
sed -nE 's/.*class="container-(.*)">/\1/p ;
|
||||
@ -215,23 +173,17 @@ get_video_link() {
|
||||
s/.*class=".*videocontent-(.*)">/\1/p ;
|
||||
s/.*data-value="(.*)">.*/\1/p ;
|
||||
s/.*data-status="1".*data-video="(.*)">.*/\1/p')"
|
||||
# providers: Doodstream for default, mp4upload for downloading. For best quality use okru, for fallback use goload. Then it's a round robin of which links are returned.
|
||||
provider=2
|
||||
uname -a | grep -qE '[Aa]ndroid' && provider=3
|
||||
[ "$is_download" -eq 1 ] && provider=1
|
||||
[ "$quality" != "best" ] && provider=4
|
||||
provider=1
|
||||
[ -n "$select_provider" ] && provider="$select_provider"
|
||||
i=0
|
||||
while [ "$i" -lt 7 ] && [ -z "$result_links" ]; do
|
||||
while [ "$i" -lt 3 ] && [ -z "$result_links" ]; do
|
||||
generate_link "$provider"
|
||||
provider=$((provider % 7 + 1))
|
||||
provider=$((provider % 3 + 1))
|
||||
: $((i += 1))
|
||||
done
|
||||
if printf '%s' "$result_links" | grep -q "m3u8"; then
|
||||
IS_MP4=0
|
||||
get_video_quality_m3u8 "$result_links"
|
||||
else
|
||||
IS_MP4=1
|
||||
video_url=$(get_video_quality_mp4 "$result_links")
|
||||
fi
|
||||
unset result_links
|
||||
@ -269,37 +221,11 @@ process_search() {
|
||||
printf "%s\n" "$search_results"
|
||||
}
|
||||
|
||||
# searches on gogoanime (instead of gogoplay) because they index english titles
|
||||
extended_search() {
|
||||
indexing_url=$(curl -s -L -o /dev/null -w "%{url_effective}\n" https://gogoanime.cm)
|
||||
search=$(printf '%s' "$1" | tr ' ' '-')
|
||||
curl -s "$indexing_url//search.html" -G -d "keyword=$search" |
|
||||
sed -n -E 's_^[[:space:]]*<a href="/category/([^"]*)" title="([^"]*)".*_\1_p'
|
||||
}
|
||||
|
||||
episode_list() {
|
||||
data=$(curl -A "$AGENT" -s "$BASE_URL/v1/$1" | sed -nE "s/.*malid = '(.*)';/\1/p ; s_.*epslistplace.*>(.*)</div>_\1_p" | tr -d '\r')
|
||||
#extract all embed links of all episode from data
|
||||
select_ep_result=$(printf "%s" "$data" | head -1 | tr "," "\n" | sed '/extra/d' | sed -nE 's_".*":"(.*)".*_\1_p')
|
||||
lg "Episode List: $select_ep_result"
|
||||
select_ep_result=$(curl -A "$AGENT" -s "$BASE_URL/v1/$1" | sed -nE "s_.*epslistplace.*>(.*)</div>_\1_p" | tr "," "\n" | sed -e '/extra/d' -e '/PV/d' | sed -nE 's_".*":"(.*)".*_\1_p')
|
||||
FIRST_EP_NUMBER=1
|
||||
[ -z "$select_ep_result" ] && LAST_EP_NUMBER=0 || LAST_EP_NUMBER=$(printf "%s\n" "$select_ep_result" | wc -l)
|
||||
lg "First Ep: $FIRST_EP_NUMBER, Last Ep: $LAST_EP_NUMBER"
|
||||
}
|
||||
|
||||
#from allanime server
|
||||
al_episode_list() {
|
||||
ext_id=$(printf "%s" "$data" | tail -1)
|
||||
al_server_link=$(curl -s -H "x-requested-with:XMLHttpRequest" -X POST "https://animixplay.to/api/search" -d "recomended=$ext_id" -A "$AGENT" |
|
||||
sed -nE 's_.*"AL","items":\[(.*)\]\},.*_\1_p' | tr '{|}' '\n' | sed -nE 's_"url":"(.*)",.*title.*_\1_p')
|
||||
[ -z "$al_server_link" ] && return 0
|
||||
progress "(Allanime) Searching Episodes.."
|
||||
if printf "%s" "$selection_id" | grep -q "dub"; then
|
||||
al_server_link=$(printf "%s" "$al_server_link" | grep "dub" | head -1)
|
||||
else
|
||||
al_server_link=$(printf "%s" "$al_server_link" | sed 's/-dub//' | head -1)
|
||||
fi
|
||||
al_data=$(curl -s "${BASE_URL}${al_server_link}" -A "$AGENT" | sed -nE 's_.*epslistplace.*>(.*)</div>_\1_p')
|
||||
lg "First Ep #: $FIRST_EP_NUMBER | Last Ep #: $LAST_EP_NUMBER"
|
||||
}
|
||||
|
||||
open_episode() {
|
||||
@ -389,7 +315,6 @@ stream() {
|
||||
selection_id="$anime_id"
|
||||
insert_history "search" "$anime_id" &
|
||||
episode_list "$anime_id"
|
||||
al_episode_list
|
||||
else
|
||||
search_results=$(process_search $query) # want word splitting to account for both input cases
|
||||
[ -z "$search_results" ] && die
|
||||
@ -673,7 +598,6 @@ main() {
|
||||
# get everything after -episode-
|
||||
ep_choice_start="${selection##*/ep}"
|
||||
episode_list "$selection_id"
|
||||
al_episode_list
|
||||
;;
|
||||
esac
|
||||
|
||||
|
@ -93,7 +93,6 @@ anime_selection() {
|
||||
lg "Selection: $selection_id"
|
||||
progress "(Gogoanime) Searching Episodes.."
|
||||
episode_list "$selection_id"
|
||||
al_episode_list
|
||||
return 0
|
||||
}
|
||||
|
||||
|
@ -112,7 +112,6 @@ anime_selection() {
|
||||
lg "Selection: $selection_id"
|
||||
progress "(Gogoanime) Searching Episodes.."
|
||||
episode_list "$selection_id"
|
||||
al_episode_list
|
||||
return 0
|
||||
}
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user