apply fixes for scraping/downloading

This commit is contained in:
ksyasuda 2022-01-03 22:12:24 -08:00
parent 9d7af52ad0
commit f814497f44

88
ani-cli
View File

@ -95,62 +95,54 @@ check_input() {
fi fi
} }
get_embedded_video_link() { get_dpage_link() {
# get the download page url # get the download page url
anime_id=$1 anime_id=$1
ep_no=$2 ep_no=$2
# credits to fork: https://github.com/Dink4n/ani-cli for the fix # credits to fork: https://github.com/Dink4n/ani-cli for the fix
# dub prefix takes the value "-dub" when dub is needed else is empty # dub prefix takes the value "-dub" when dub is needed else is empty
req_link=$(curl -s "$BASE_URL/$anime_id${dub_prefix}-$ep_no" | grep -o "404") anime_page=$(curl -s "$BASE_URL/$anime_id${dub_prefix}-$ep_no")
if [ "$req_link" ]; then if [ -n "$(printf '%s' "$anime_page" | grep -o "404")" ]; then
curl -s "$BASE_URL/$anime_id${dub_prefix}-episode-$ep_no" | anime_page=$(curl -s "$BASE_URL/$anime_id${dub_prefix}-episode-$ep_no")
sed -n -E '
/^[[:space:]]*<a href="#" rel="100"/{
s/.*data-video="([^"]*)".*/https:\1/p
q
}'
else
curl -s "$BASE_URL/$anime_id${dub_prefix}-$ep_no" |
sed -n -E '
/^[[:space:]]*<a href="#" rel="100"/{
s/.*data-video="([^"]*)".*/https:\1/p
q
}'
fi fi
}
get_links() { printf '%s' "$anime_page" |
embedded_video_url=$(get_embedded_video_link "$anime_id" "$episode") sed -n -E '
episode_id=$(echo "$embedded_video_url" | grep -oE "id.+?&") /^[[:space:]]*<li class="dowloads">/{
video_url="https://gogoplay1.com/download?${episode_id}" s/.*href="([^"]*)".*/\1/p
log 'GET_LINKS() VARS:' q
log "embedded_video_url: $embedded_video_url" }'
log "episode_id: $episode_id"
log "video_url: $video_url"
} }
get_video_quality() { get_video_quality() {
get_links dpage_url=$1
video_quality=$(curl -s "$video_url" | grep -oE "(http|https):\/\/.*com\/cdn.*expiry=[0-9]*" | sort -V | sed 's/amp;//')
log "VIDEO QUALITY: $video_quality" video_links=$(curl -s "$dpage_url" | sed -n -E 's/.*href="([^"]*)" download>Download.*/\1/p' | sed 's/amp;//')
case $quality in case $quality in
best) best)
play_link=$(echo "$video_quality" | sort -V | tail -n 1) video_link=$(printf '%s' "$video_links" | tail -n 1)
;; ;;
worst) worst)
play_link=$(echo "$video_quality" | sort -V | head -n 1) video_link=$(printf '%s' "$video_links" | head -n 1)
;; ;;
*) *)
play_link=$(echo "$video_quality" | grep -oE "(http|https):\/\/.*com\/cdn.*"${quality}".*expiry=[0-9]*") video_link=$(printf '%s' "$video_links" | grep -i "${quality}p")
if [ -z "$play_link" ]; then if [ -z "$video_link" ]; then
printf "$c_red%s$c_reset\n" "Current video quality is not available (defaulting to highest quality)" >&2 err "Current video quality is not available (defaulting to highest quality)"
quality=best quality=best
play_link=$(echo "$video_quality" | sort -V | tail -n 1) video_link=$(printf '%s' "$video_links" | tail -n 1)
fi fi
;; ;;
esac esac
printf '%s' "$video_link"
}
get_links() {
dpage_url="$1"
video_url=$(get_video_quality "$dpage_url")
printf '%s' "$video_url"
} }
dep_ch() { dep_ch() {
@ -523,10 +515,8 @@ open_episode() {
insert_history "$anime_id" "$episode" insert_history "$anime_id" "$episode"
fi fi
get_video_quality dpage_link=$(get_dpage_link "$anime_id" "$episode")
log "Play link: $play_link" video_url=$(get_links "$dpage_link")
status_code=$(curl -s -I "$play_link" | head -n 1 | cut -d ' ' -f2)
log "Status code: $status_code"
if [ $half_ep -eq 1 ]; then if [ $half_ep -eq 1 ]; then
episode=$temp_ep episode=$temp_ep
@ -534,19 +524,15 @@ open_episode() {
fi fi
if [ "$is_download" -eq 0 ]; then if [ "$is_download" -eq 0 ]; then
if echo "$status_code" | grep -vE "^2.*"; then kill $PID > /dev/null 2>&1
printf "${c_red}\nCannot reach servers!" log "PLAYING VIDEO: $video_url"
else log "REFERRER: $dpage_link"
if ps "$PID" &> /dev/null; then nohup $player_fn --http-header-fields="Referer: $dpage_link" "$video_url" > /dev/null 2>&1 &
kill "$PID" PID=$!
fi printf "${c_green}\nVideo playing"
nohup $player_fn "$play_link" > /dev/null 2>&1 &
PID="$!"
printf "${c_green}\nVideo playing"
fi
else else
log "Downloading episode $episode ..." log "Downloading episode $episode ..."
log "$play_link" log "$video_url"
# add 0 padding to the episode name # add 0 padding to the episode name
episode=$(printf "%03d" "$episode") episode=$(printf "%03d" "$episode")
{ {
@ -555,12 +541,12 @@ open_episode() {
cd "$anime_id" || die "Could not enter subdirectory $ddir/$anime_id" cd "$anime_id" || die "Could not enter subdirectory $ddir/$anime_id"
if command -v "notify-send" > /dev/null; then if command -v "notify-send" > /dev/null; then
# ffmpeg -i "$play_link" -c copy "${episode}.mkv" > /dev/null 2>&1 && # ffmpeg -i "$play_link" -c copy "${episode}.mkv" > /dev/null 2>&1 &&
curl -L -# -C - "$play_link" -o "${anime_id}-${episode}.mp4" && curl -L -# -e "$dpage_link" -C - "$video_url" -o "${anime_id}-${episode}.mp4" &&
notify-send -i "$ANIWRAPPER_ICON_PATH" "Download complete for ${anime_id//-/ } - Episode: $episode" || notify-send -i "$ANIWRAPPER_ICON_PATH" "Download complete for ${anime_id//-/ } - Episode: $episode" ||
notify-send -i "$MAISAN_ICON_PATH" "Download failed for ${anime_id//-/ } - Episode: $episode. Please retry or check your internet connection" notify-send -i "$MAISAN_ICON_PATH" "Download failed for ${anime_id//-/ } - Episode: $episode. Please retry or check your internet connection"
else else
# ffmpeg -i "$play_link" -c copy "${episode}.mkv" > /dev/null 2>&1 && # ffmpeg -i "$play_link" -c copy "${episode}.mkv" > /dev/null 2>&1 &&
curl -L -# -C - "$play_link" -o "${anime_id}-${episode}.mp4" && curl -L -# -e "$dpage_link" -C - "$play_link" -o "${anime_id}-${episode}.mp4" &&
printf "${c_green}Downloaded complete for %s - Episode: %s${c_reset}\n" "${anime_id//-/ }" "$episode" || printf "${c_green}Downloaded complete for %s - Episode: %s${c_reset}\n" "${anime_id//-/ }" "$episode" ||
printf "${c_red}Download failed for %s - Episode: %s, please retry or check your internet connection${c_reset}\n" "${anime_id//-/ }" "$episode" printf "${c_red}Download failed for %s - Episode: %s, please retry or check your internet connection${c_reset}\n" "${anime_id//-/ }" "$episode"
fi fi