Browse Source

first commit

Your Name 3 years ago
commit
42bec97ebb
2 changed files with 222 additions and 0 deletions
  1. 41 0
      README.md
  2. 181 0
      megadown

+ 41 - 0
README.md

@@ -0,0 +1,41 @@
+# badown
+The name is a short for bash-downloader.
+This script can download files from mediafire, zippyshare & mega (file & folder).
+It fully support folder download on mega.
+Futhermore, you can control the bandwidth with the option, `-s 70K` or `--speed 1M`.
+
+### dependencies
+It requires:
+* bash (tested with 4.4), 
+* wget (tested with 1.19), 
+* gzip (tested with 1.8),
+* awk  (tested with 4.1),
+* openssl aes-128-(cbc, ecb & ctr)
+* (coreutils).
+
+### usage
+To execute the script give it execute right.  
+`chmod +x badown`
+  
+To download a file:  
+`./badown 'https://mega.nz/#F!NogxFaIK!PavsMkUPQSXJ_o5zwCs5Ew'`  
+`./badown 'https://mega.nz/folder/NogxFaIK#PavsMkUPQSXJ_o5zwCs5Ew'`  
+`./badown 'https://mega.nz/#!RnQFkTYS!rFIJp7MBKxcS-Po8okSSoykR17KpIGV7xcXNZvpx38I'`  
+`./badown 'https://mega.nz/file/RnQFkTYS#rFIJp7MBKxcS-Po8okSSoykR17KpIGV7xcXNZvpx38I'`  
+`./badown 'https://www.mediafire.com/file/jbbbncd27n5mukh/test.zip'`  
+`./badown 'https://www74.zippyshare.com/v/WjE4KUUF/file.html'`  
+It is unziped for the first link.  
+Those links refer to the same zipped test folder.  
+And might be dead for inactivity, however they will stay here for the syntax.  
+  
+To download from a file with a lot of urls just use a loop for:  
+`for i in $(cat urls); do ./badown $i && sleep .5; done`  
+
+### todo
+- Find a more stable solution for Zippyshare,   
+(there is 5-ish line of code in js that needs to be interpreted in order to produce a number.  
+this number protect the download link and need to be computed.  
+the formula to get this number changes quite often.)
+- Add more sites and resume paused download.  
+- Extend mega function with specific file in folder download.  
+- Add proxy support, else as an option or as a function with automatic grabber.  

+ 181 - 0
megadown

@@ -0,0 +1,181 @@
+#!/bin/bash
+
+function url_str {
+   echo $1\
+      | awk '{gsub("-","+"); gsub("_","/"); gsub(",",""); print}'
+}
+function json_req {
+   wget -q -O- --post-data="$1" --header='Content-Type:application/json' "https://g.api.mega.co.nz/cs$2"
+}
+function key_solver {
+   echo -n $1 \
+      | base64 --decode --ignore-garbage \
+      2> /dev/null \
+      | xxd -p \
+      | tr -d '\n'
+}
+function json_post {
+   echo $2\
+      | awk -v c=$1 -F'"' '{for(i = 1; i <= NF; i++)
+   {if($i==c)
+      if((c=="t")||(c=="s")||(c=="ts"))
+	 {gsub(/[[:punct:]]/,"",$(i+1));print $(i+1);}
+      else
+	 {print $(i+2);}
+      }
+   }'
+}
+function key_dec {
+   local var=$(key_solver "$(url_str $key)")
+   echo $(url_str $1)\
+      | openssl enc -a -d -A -aes-128-ecb -K $var -iv "00000000000000000000000000000000" -nopad \
+      2> /dev/null \
+      | base64
+}
+function size {
+   local i=0
+   local var=$1
+   local pad=$(((4-${#var}%4)%4))
+   for i in $(seq 1 $pad); do
+      var="$var="
+   done
+   echo $var
+}
+function meta_dec_key {
+   local var
+   var[0]=$(( 0x${1:00:16} ^ 0x${1:32:16} ))
+   var[1]=$(( 0x${1:16:16} ^ 0x${1:48:16} ))
+   meta_key=$(printf "%016x" ${var[*]})
+   meta_iv="${1:32:16}0000000000000000"
+}
+function meta_dec {
+   echo -n $2 \
+      | openssl enc -a -A -d -aes-128-cbc -K $1 -iv "00000000000000000000000000000000" -nopad \
+      | tr -d '\0' \
+      2> /dev/null
+}
+function mega_link_vars {
+   if [[ "$1" ==  *"/#"* ]]; then
+      id=`echo $1 | awk -F'!' '{print $2}'`
+      key=`echo $1 | awk -F'!' '{print $3}'`
+      fld=`echo $1 | awk -F'!' '{print $1}'`
+   elif [[ "$1" == *"/folder"*"/file"* ]]; then
+      fld=`echo $1 | awk '{gsub(/(folder\/).*/,"folder/");print}'`
+      id=`echo $1 | awk -F'/' '{print $(NF-2)}' | awk -F# '{print $1}'`
+      key=`echo $1 | awk -F'/' '{print $(NF-2)}' | awk -F# '{print $2}'`
+      fid=`echo $1 | awk -F'/' '{print $NF}'`
+   else
+      fld=`echo $1 | awk '{gsub(/[^\/]*$/,"");print}'`
+      id=`echo $1 | awk -F'/' '{print $NF}' | awk -F# '{print $1}'`
+      key=`echo $1 | awk -F'/' '{print $NF}' | awk -F# '{print $2}'`
+   fi
+}
+function file_downdec {
+   echo $1
+   echo $2
+   echo $3
+   echo $4
+   wget -O "$2".tmp  $speed -q --show-progress "$1"
+   cat "$2.tmp" \
+      | openssl enc -d -aes-128-ctr -K $3 -iv $4 \
+      > "$2"
+   rm -f "$2".tmp
+}
+function file_down {
+   wget -O "$2".tmp $speed -q --show-progress "$1"
+   mv "$2".tmp "$2"
+}
+function tree_gen {
+   local i=0
+   while [[ $i -lt $2 ]] && ! [[ ${names[i]} == "$1" ]]; do
+      let i++
+   done
+   if ! [[ $i == $2 ]]; then
+      tree_gen ${parents[i]} $2
+      meta_dec_key "$(key_solver $(key_dec $(size ${keys[i]})))"
+      file_name="$(json_post 'n' "$(meta_dec $meta_key $(size $(url_str ${attrs[i]})))")"
+      path=$path/$file_name
+   fi
+}
+function error {
+	echo -e "\033[31merror\e[0m: $1" 1>&2
+	exit 1
+}
+function mega {
+   mega_link_vars $1
+   if [ "${fld: -1}" == "F" ] || [[ "$fld" == *"folder"* ]];then
+      json_req '[{"a":"f","c":1,"ca":1,"r":1}]' "?id=&n=$id" > .badown.tmp
+      [[ $(file .badown.tmp) == *"gzip"* ]] && response1=$(cat .badown.tmp | gunzip) || response1=$(cat .badown.tmp)
+      keys=($(json_post 'k' $response1 | awk -F':' '{print $2}'))
+      names=($(json_post 'h' $response1 ))
+      types=($(json_post 't' $response1 ))
+      attrs=($(json_post 'a' $response1 ))
+      sizes=($(json_post 's' $response1 ))
+      parents=($(json_post 'p' $response1 ))
+      for i in $(seq 0 $((${#types[@]}-1)));do
+	 unset path
+	 tree_gen ${parents[i]} $((${#types[@]}-1))
+	 meta_dec_key "$(key_solver $(key_dec $(size ${keys[i]})))"
+	 file_name="$(json_post 'n' "$(meta_dec $meta_key $(size $(url_str ${attrs[i]})))")"
+	 path=$path/$file_name
+	 #echo -e "===\n${keys[i]}\n${names[i]}\n${types[i]}\n${attrs[i]}\n${sizes[i]}\n${parents[i]}\n"
+	 #hint if specific folder is specified in names and parents hold value
+	 #probably modify the init phase of mega function to carry new argument of specific folder to download
+	 #maybe add if condition to test how path relate to folder we want to download
+	 if [ -z $fid ]; then
+	    if [ ${types[i]} == 1 ];then
+	       sleep .5;mkdir -p "$PWD$path"
+	    elif [ ${types[i]} == 0 ];then
+	       file_url=$(json_post 'g' $(json_req "[{\"a\":\"g\",\"g\":1,\"n\":\"${names[i]}\"}]" "?id=&n=$id"))
+	       file_downdec $file_url "$file_name" $meta_key $meta_iv
+	       sleep .5;mv "$file_name" "$PWD$path"
+	    fi
+	 else
+	    [ $fid == ${names[i]} ] &&\
+	       file_url=$(json_post 'g' $(json_req "[{\"a\":\"g\",\"g\":1,\"n\":\"${names[i]}\"}]" "?id=&n=$id")) &&\
+	       file_downdec $file_url "$file_name" $meta_key $meta_iv
+	 fi
+      done
+   elif [ "${fld: -1}" == "#" ] || [[ "$fld" == *"file"* ]];then
+      meta_dec_key $(key_solver $(url_str $key))
+      name_key=$(url_str $(json_post 'at' $(json_req "[{\"a\":\"g\", \"p\":\"$id\"}]" '?id=&ak=')))
+      file_name="$(json_post 'n' "$(meta_dec $meta_key $(size $name_key))")"
+      file_url=$(json_post 'g' $(json_req "[{\"a\":\"g\",\"g\":1,\"p\":\"$id\"}]" '?'))
+      file_downdec $file_url "$file_name" $meta_key $meta_iv
+   fi
+   if [ -f .badown.tmp ]; then rm .badown.tmp;fi
+}
+function switch {
+   if [[ "$1" == *"mega"* ]]
+    then
+      mega "$1"
+    else
+      showhelp;exit 1
+   fi
+}
+function showhelp {
+   echo -e "megadown 0.4b"
+   echo -e "bash downloader for hostsite mega.io light edition"
+   echo -e "https://github.com/stck-lzm/badown modded by wareck"
+   echo -e "megadown [OPTION] ['URL']"
+   echo -e "\tOptions:"
+   echo -e "\t-s,\t--speed SPEED Download speed limit (integer values: 500B, 70K, 2M)."
+   echo -e "\t-h,\t--help  	 Display this help."
+   echo -e ""
+   echo -e ""
+}
+TEMP=$(getopt -o "s:h"  --long "speed:,help" -n badown -- "$@")
+[ $? -eq 0 ] || {
+echo "Incorrect options provided"
+exit 1
+}
+eval set -- "$TEMP"
+while true; do
+   case "$1" in
+      -s|--speed)		speed=" --limit-rate $2"; shift 2;;
+      -h|--help)		showhelp; exit 1;;
+      --)			shift; break;;
+      **)			showhelp;exit 1;;
+   esac
+done
+switch $1