Advertisement
v1ral_ITS

dropbox uploader.sh [ NEEDED FOR DROPBOX SHELL ]

Jun 15th, 2018
1,241
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
Bash 50.62 KB | None | 0 0
  1. #!/usr/bin/env bash
  2. #
  3. # Dropbox Uploader
  4. #
  5. # Copyright (C) 2010-2017 Andrea Fabrizi <andrea.fabrizi@gmail.com>
  6. #
  7. # This program is free software; you can redistribute it and/or modify
  8. # it under the terms of the GNU General Public License as published by
  9. # the Free Software Foundation; either version 2 of the License, or
  10. # (at your option) any later version.
  11. #
  12. # This program is distributed in the hope that it will be useful,
  13. # but WITHOUT ANY WARRANTY; without even the implied warranty of
  14. # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
  15. # GNU General Public License for more details.
  16. #
  17. # You should have received a copy of the GNU General Public License
  18. # along with this program; if not, write to the Free Software
  19. # Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
  20. #
  21.  
  22. #Default configuration file
  23. CONFIG_FILE=~/.dropbox_uploader
  24.  
  25. #Default chunk size in Mb for the upload process
  26. #It is recommended to increase this value only if you have enough free space on your /tmp partition
  27. #Lower values may increase the number of http requests
  28. CHUNK_SIZE=50
  29.  
  30. #Curl location
  31. #If not set, curl will be searched into the $PATH
  32. #CURL_BIN="/usr/bin/curl"
  33.  
  34. #Default values
  35. TMP_DIR="./tmp"
  36. DEBUG=0
  37. QUIET=0
  38. SHOW_PROGRESSBAR=0
  39. SKIP_EXISTING_FILES=0
  40. ERROR_STATUS=0
  41. EXCLUDE=()
  42.  
  43. #Don't edit these...
  44. API_LONGPOLL_FOLDER="https://notify.dropboxapi.com/2/files/list_folder/longpoll"
  45. API_CHUNKED_UPLOAD_START_URL="https://content.dropboxapi.com/2/files/upload_session/start"
  46. API_CHUNKED_UPLOAD_FINISH_URL="https://content.dropboxapi.com/2/files/upload_session/finish"
  47. API_CHUNKED_UPLOAD_APPEND_URL="https://content.dropboxapi.com/2/files/upload_session/append_v2"
  48. API_UPLOAD_URL="https://content.dropboxapi.com/2/files/upload"
  49. API_DOWNLOAD_URL="https://content.dropboxapi.com/2/files/download"
  50. API_DELETE_URL="https://api.dropboxapi.com/2/files/delete"
  51. API_MOVE_URL="https://api.dropboxapi.com/2/files/move"
  52. API_COPY_URL="https://api.dropboxapi.com/2/files/copy"
  53. API_METADATA_URL="https://api.dropboxapi.com/2/files/get_metadata"
  54. API_LIST_FOLDER_URL="https://api.dropboxapi.com/2/files/list_folder"
  55. API_LIST_FOLDER_CONTINUE_URL="https://api.dropboxapi.com/2/files/list_folder/continue"
  56. API_ACCOUNT_INFO_URL="https://api.dropboxapi.com/2/users/get_current_account"
  57. API_ACCOUNT_SPACE_URL="https://api.dropboxapi.com/2/users/get_space_usage"
  58. API_MKDIR_URL="https://api.dropboxapi.com/2/files/create_folder"
  59. API_SHARE_URL="https://api.dropboxapi.com/2/sharing/create_shared_link_with_settings"
  60. API_SHARE_LIST="https://api.dropboxapi.com/2/sharing/list_shared_links"
  61. API_SAVEURL_URL="https://api.dropboxapi.com/2/files/save_url"
  62. API_SAVEURL_JOBSTATUS_URL="https://api.dropboxapi.com/2/files/save_url/check_job_status"
  63. API_SEARCH_URL="https://api.dropboxapi.com/2/files/search"
  64. APP_CREATE_URL="https://www.dropbox.com/developers/apps"
  65. RESPONSE_FILE="$TMP_DIR/du_resp_$RANDOM"
  66. CHUNK_FILE="$TMP_DIR/du_chunk_$RANDOM"
  67. TEMP_FILE="$TMP_DIR/du_tmp_$RANDOM"
  68. BIN_DEPS="sed basename date grep stat dd mkdir"
  69. VERSION="1.0"
  70.  
  71. umask 077
  72.  
  73. #Check the shell
  74. if [ -z "$BASH_VERSION" ]; then
  75.     echo -e "Error: this script requires the BASH shell!"
  76.     exit 1
  77. fi
  78.  
  79. shopt -s nullglob #Bash allows filename patterns which match no files to expand to a null string, rather than themselves
  80. shopt -s dotglob  #Bash includes filenames beginning with a "." in the results of filename expansion
  81.  
  82. #Check temp folder
  83. if [[ ! -d "$TMP_DIR" ]]; then
  84.     echo -e "Error: the temporary folder $TMP_DIR doesn't exists!"
  85.     echo -e "Please edit this script and set the TMP_DIR variable to a valid temporary folder to use."
  86.     exit 1
  87. fi
  88.  
  89. #Look for optional config file parameter
  90. while getopts ":qpskdhf:x:" opt; do
  91.     case $opt in
  92.  
  93.     f)
  94.       CONFIG_FILE=$OPTARG
  95.     ;;
  96.  
  97.     d)
  98.       DEBUG=1
  99.     ;;
  100.  
  101.     q)
  102.       QUIET=1
  103.     ;;
  104.  
  105.     p)
  106.       SHOW_PROGRESSBAR=1
  107.     ;;
  108.  
  109.     k)
  110.       CURL_ACCEPT_CERTIFICATES="-k"
  111.     ;;
  112.  
  113.     s)
  114.       SKIP_EXISTING_FILES=1
  115.     ;;
  116.  
  117.     h)
  118.       HUMAN_READABLE_SIZE=1
  119.     ;;
  120.  
  121.     x)
  122.       EXCLUDE+=( $OPTARG )
  123.     ;;
  124.  
  125.     \?)
  126.       echo "Invalid option: -$OPTARG" >&2
  127.       exit 1
  128.     ;;
  129.  
  130.     :)
  131.       echo "Option -$OPTARG requires an argument." >&2
  132.       exit 1
  133.     ;;
  134.  
  135.   esac
  136. done
  137.  
  138. if [[ $DEBUG != 0 ]]; then
  139.     echo $VERSION
  140.     uname -a 2> /dev/null
  141.     cat /etc/issue 2> /dev/null
  142.     set -x
  143.     RESPONSE_FILE="$TMP_DIR/du_resp_debug"
  144. fi
  145.  
  146. if [[ $CURL_BIN == "" ]]; then
  147.     BIN_DEPS="$BIN_DEPS curl"
  148.     CURL_BIN="curl"
  149. fi
  150.  
  151. #Dependencies check
  152. which $BIN_DEPS > /dev/null
  153. if [[ $? != 0 ]]; then
  154.     for i in $BIN_DEPS; do
  155.         which $i > /dev/null ||
  156.             NOT_FOUND="$i $NOT_FOUND"
  157.     done
  158.     echo -e "Error: Required program could not be found: $NOT_FOUND"
  159.     exit 1
  160. fi
  161.  
  162. #Check if readlink is installed and supports the -m option
  163. #It's not necessary, so no problem if it's not installed
  164. which readlink > /dev/null
  165. if [[ $? == 0 && $(readlink -m "//test" 2> /dev/null) == "/test" ]]; then
  166.     HAVE_READLINK=1
  167. else
  168.     HAVE_READLINK=0
  169. fi
  170.  
  171. #Forcing to use the builtin printf, if it's present, because it's better
  172. #otherwise the external printf program will be used
  173. #Note that the external printf command can cause character encoding issues!
  174. builtin printf "" 2> /dev/null
  175. if [[ $? == 0 ]]; then
  176.     PRINTF="builtin printf"
  177.     PRINTF_OPT="-v o"
  178. else
  179.     PRINTF=$(which printf)
  180.     if [[ $? != 0 ]]; then
  181.         echo -e "Error: Required program could not be found: printf"
  182.     fi
  183.     PRINTF_OPT=""
  184. fi
  185.  
  186. #Print the message based on $QUIET variable
  187. function print
  188. {
  189.     if [[ $QUIET == 0 ]]; then
  190.         echo -ne "$1";
  191.     fi
  192. }
  193.  
  194. #Returns unix timestamp
  195. function utime
  196. {
  197.     date '+%s'
  198. }
  199.  
  200. #Remove temporary files
  201. function remove_temp_files
  202. {
  203.     if [[ $DEBUG == 0 ]]; then
  204.         rm -fr "$RESPONSE_FILE"
  205.         rm -fr "$CHUNK_FILE"
  206.         rm -fr "$TEMP_FILE"
  207.     fi
  208. }
  209.  
  210. #Converts bytes to human readable format
  211. function convert_bytes
  212. {
  213.     if [[ $HUMAN_READABLE_SIZE == 1 && "$1" != "" ]]; then
  214.         if (($1 > 1073741824));then
  215.             echo $(($1/1073741824)).$(($1%1073741824/100000000))"G";
  216.         elif (($1 > 1048576));then
  217.             echo $(($1/1048576)).$(($1%1048576/100000))"M";
  218.         elif (($1 > 1024));then
  219.             echo $(($1/1024)).$(($1%1024/100))"K";
  220.         else
  221.             echo $1;
  222.         fi
  223.     else
  224.         echo $1;
  225.     fi
  226. }
  227.  
  228. #Returns the file size in bytes
  229. function file_size
  230. {
  231.     #Generic GNU
  232.     SIZE=$(stat --format="%s" "$1" 2> /dev/null)
  233.     if [ $? -eq 0 ]; then
  234.         echo $SIZE
  235.         return
  236.     fi
  237.  
  238.     #Some embedded linux devices
  239.     SIZE=$(stat -c "%s" "$1" 2> /dev/null)
  240.     if [ $? -eq 0 ]; then
  241.         echo $SIZE
  242.         return
  243.     fi
  244.  
  245.     #BSD, OSX and other OSs
  246.     SIZE=$(stat -f "%z" "$1" 2> /dev/null)
  247.     if [ $? -eq 0 ]; then
  248.         echo $SIZE
  249.         return
  250.     fi
  251.  
  252.     echo "0"
  253. }
  254.  
  255.  
  256. #Usage
  257. function usage
  258. {
  259.     echo -e "Dropbox Uploader v$VERSION"
  260.     echo -e "Andrea Fabrizi - andrea.fabrizi@gmail.com\n"
  261.     echo -e "Usage: $0 [PARAMETERS] COMMAND..."
  262.     echo -e "\nCommands:"
  263.  
  264.     echo -e "\t upload   <LOCAL_FILE/DIR ...>  <REMOTE_FILE/DIR>"
  265.     echo -e "\t download <REMOTE_FILE/DIR> [LOCAL_FILE/DIR]"
  266.     echo -e "\t delete   <REMOTE_FILE/DIR>"
  267.     echo -e "\t move     <REMOTE_FILE/DIR> <REMOTE_FILE/DIR>"
  268.     echo -e "\t copy     <REMOTE_FILE/DIR> <REMOTE_FILE/DIR>"
  269.     echo -e "\t mkdir    <REMOTE_DIR>"
  270.     echo -e "\t list     [REMOTE_DIR]"
  271.     echo -e "\t monitor  [REMOTE_DIR] [TIMEOUT]"
  272.     echo -e "\t share    <REMOTE_FILE>"
  273.     echo -e "\t saveurl  <URL> <REMOTE_DIR>"
  274.     echo -e "\t search   <QUERY>"
  275.     echo -e "\t info"
  276.     echo -e "\t space"
  277.     echo -e "\t unlink"
  278.  
  279.     echo -e "\nOptional parameters:"
  280.     echo -e "\t-f <FILENAME> Load the configuration file from a specific file"
  281.     echo -e "\t-s            Skip already existing files when download/upload. Default: Overwrite"
  282.     echo -e "\t-d            Enable DEBUG mode"
  283.     echo -e "\t-q            Quiet mode. Don't show messages"
  284.     echo -e "\t-h            Show file sizes in human readable format"
  285.     echo -e "\t-p            Show cURL progress meter"
  286.     echo -e "\t-k            Doesn't check for SSL certificates (insecure)"
  287.     echo -e "\t-x            Ignores/excludes directories or files from syncing. -x filename -x directoryname. example: -x .git"
  288.  
  289.     echo -en "\nFor more info and examples, please see the README file.\n\n"
  290.     remove_temp_files
  291.     exit 1
  292. }
  293.  
  294. #Check the curl exit code
  295. function check_http_response
  296. {
  297.     CODE=$?
  298.  
  299.     #Checking curl exit code
  300.     case $CODE in
  301.  
  302.         #OK
  303.         0)
  304.  
  305.         ;;
  306.  
  307.         #Proxy error
  308.         5)
  309.             print "\nError: Couldn't resolve proxy. The given proxy host could not be resolved.\n"
  310.  
  311.             remove_temp_files
  312.             exit 1
  313.         ;;
  314.  
  315.         #Missing CA certificates
  316.         60|58|77)
  317.             print "\nError: cURL is not able to performs peer SSL certificate verification.\n"
  318.             print "Please, install the default ca-certificates bundle.\n"
  319.             print "To do this in a Debian/Ubuntu based system, try:\n"
  320.             print "  sudo apt-get install ca-certificates\n\n"
  321.             print "If the problem persists, try to use the -k option (insecure).\n"
  322.  
  323.             remove_temp_files
  324.             exit 1
  325.         ;;
  326.  
  327.         6)
  328.             print "\nError: Couldn't resolve host.\n"
  329.  
  330.             remove_temp_files
  331.             exit 1
  332.         ;;
  333.  
  334.         7)
  335.             print "\nError: Couldn't connect to host.\n"
  336.  
  337.             remove_temp_files
  338.             exit 1
  339.         ;;
  340.  
  341.     esac
  342.  
  343.     #Checking response file for generic errors
  344.     if grep -q "HTTP/1.1 400" "$RESPONSE_FILE"; then
  345.         ERROR_MSG=$(sed -n -e 's/{"error": "\([^"]*\)"}/\1/p' "$RESPONSE_FILE")
  346.  
  347.         case $ERROR_MSG in
  348.              *access?attempt?failed?because?this?app?is?not?configured?to?have*)
  349.                 echo -e "\nError: The Permission type/Access level configured doesn't match the DropBox App settings!\nPlease run \"$0 unlink\" and try again."
  350.                 exit 1
  351.             ;;
  352.         esac
  353.  
  354.     fi
  355.  
  356. }
  357.  
  358. #Urlencode
  359. function urlencode
  360. {
  361.     #The printf is necessary to correctly decode unicode sequences
  362.     local string=$($PRINTF "${1}")
  363.     local strlen=${#string}
  364.     local encoded=""
  365.  
  366.     for (( pos=0 ; pos<strlen ; pos++ )); do
  367.         c=${string:$pos:1}
  368.         case "$c" in
  369.             [-_.~a-zA-Z0-9] ) o="${c}" ;;
  370.             * ) $PRINTF $PRINTF_OPT '%%%02x' "'$c"
  371.         esac
  372.         encoded="${encoded}${o}"
  373.     done
  374.  
  375.     echo "$encoded"
  376. }
  377.  
  378. function normalize_path
  379. {
  380.     #The printf is necessary to correctly decode unicode sequences
  381.     path=$($PRINTF "${1//\/\///}")
  382.     if [[ $HAVE_READLINK == 1 ]]; then
  383.         new_path=$(readlink -m "$path")
  384.  
  385.         #Adding back the final slash, if present in the source
  386.         if [[ ${path: -1} == "/" && ${#path} -gt 1 ]]; then
  387.             new_path="$new_path/"
  388.         fi
  389.  
  390.         echo "$new_path"
  391.     else
  392.         echo "$path"
  393.     fi
  394. }
  395.  
  396. #Check if it's a file or directory
  397. #Returns FILE/DIR/ERR
  398. function db_stat
  399. {
  400.     local FILE=$(normalize_path "$1")
  401.  
  402.     if [[ $FILE == "/" ]]; then
  403.         echo "DIR"
  404.         return
  405.     fi
  406.  
  407.     #Checking if it's a file or a directory
  408.     $CURL_BIN $CURL_ACCEPT_CERTIFICATES -X POST -L -s --show-error --globoff -i -o "$RESPONSE_FILE" --header "Authorization: Bearer $OAUTH_ACCESS_TOKEN" --header "Content-Type: application/json" --data "{\"path\": \"$FILE\"}" "$API_METADATA_URL" 2> /dev/null
  409.     check_http_response
  410.  
  411.     local TYPE=$(sed -n 's/{".tag": *"*\([^"]*\)"*.*/\1/p' "$RESPONSE_FILE")
  412.  
  413.     case $TYPE in
  414.  
  415.         file)
  416.             echo "FILE"
  417.         ;;
  418.  
  419.         folder)
  420.             echo "DIR"
  421.         ;;
  422.  
  423.         deleted)
  424.             echo "ERR"
  425.         ;;
  426.  
  427.         *)
  428.             echo "ERR"
  429.         ;;
  430.  
  431.     esac
  432. }
  433.  
  434. #Generic upload wrapper around db_upload_file and db_upload_dir functions
  435. #$1 = Local source file/dir
  436. #$2 = Remote destination file/dir
  437. function db_upload
  438. {
  439.     local SRC=$(normalize_path "$1")
  440.     local DST=$(normalize_path "$2")
  441.  
  442.     for j in "${EXCLUDE[@]}"
  443.         do :
  444.             if [[ $(echo "$SRC" | grep "$j" | wc -l) -gt 0 ]]; then
  445.                 print "Skipping excluded file/dir: "$j
  446.                 return
  447.             fi
  448.     done
  449.  
  450.     #Checking if the file/dir exists
  451.     if [[ ! -e $SRC && ! -d $SRC ]]; then
  452.         print " > No such file or directory: $SRC\n"
  453.         ERROR_STATUS=1
  454.         return
  455.     fi
  456.  
  457.     #Checking if the file/dir has read permissions
  458.     if [[ ! -r $SRC ]]; then
  459.         print " > Error reading file $SRC: permission denied\n"
  460.         ERROR_STATUS=1
  461.         return
  462.     fi
  463.  
  464.     TYPE=$(db_stat "$DST")
  465.  
  466.     #If DST it's a file, do nothing, it's the default behaviour
  467.     if [[ $TYPE == "FILE" ]]; then
  468.         DST="$DST"
  469.  
  470.     #if DST doesn't exists and doesn't ends with a /, it will be the destination file name
  471.     elif [[ $TYPE == "ERR" && "${DST: -1}" != "/" ]]; then
  472.         DST="$DST"
  473.  
  474.     #if DST doesn't exists and ends with a /, it will be the destination folder
  475.     elif [[ $TYPE == "ERR" && "${DST: -1}" == "/" ]]; then
  476.         local filename=$(basename "$SRC")
  477.         DST="$DST/$filename"
  478.  
  479.     #If DST it's a directory, it will be the destination folder
  480.     elif [[ $TYPE == "DIR" ]]; then
  481.         local filename=$(basename "$SRC")
  482.         DST="$DST/$filename"
  483.     fi
  484.  
  485.     #It's a directory
  486.     if [[ -d $SRC ]]; then
  487.         db_upload_dir "$SRC" "$DST"
  488.  
  489.     #It's a file
  490.     elif [[ -e $SRC ]]; then
  491.         db_upload_file "$SRC" "$DST"
  492.  
  493.     #Unsupported object...
  494.     else
  495.         print " > Skipping not regular file \"$SRC\"\n"
  496.     fi
  497. }
  498.  
  499. #Generic upload wrapper around db_chunked_upload_file and db_simple_upload_file
  500. #The final upload function will be choosen based on the file size
  501. #$1 = Local source file
  502. #$2 = Remote destination file
  503. function db_upload_file
  504. {
  505.     local FILE_SRC=$(normalize_path "$1")
  506.     local FILE_DST=$(normalize_path "$2")
  507.  
  508.     shopt -s nocasematch
  509.  
  510.     #Checking not allowed file names
  511.     basefile_dst=$(basename "$FILE_DST")
  512.     if [[ $basefile_dst == "thumbs.db" || \
  513.           $basefile_dst == "desktop.ini" || \
  514.           $basefile_dst == ".ds_store" || \
  515.           $basefile_dst == "icon\r" || \
  516.           $basefile_dst == ".dropbox" || \
  517.           $basefile_dst == ".dropbox.attr" \
  518.        ]]; then
  519.         print " > Skipping not allowed file name \"$FILE_DST\"\n"
  520.         return
  521.     fi
  522.  
  523.     shopt -u nocasematch
  524.  
  525.     #Checking file size
  526.     FILE_SIZE=$(file_size "$FILE_SRC")
  527.  
  528.     #Checking if the file already exists
  529.     TYPE=$(db_stat "$FILE_DST")
  530.     if [[ $TYPE != "ERR" && $SKIP_EXISTING_FILES == 1 ]]; then
  531.         print " > Skipping already existing file \"$FILE_DST\"\n"
  532.         return
  533.     fi
  534.  
  535.     # Checking if the file has the correct check sum
  536.     if [[ $TYPE != "ERR" ]]; then
  537.         sha_src=$(db_sha_local "$FILE_SRC")
  538.         sha_dst=$(db_sha "$FILE_DST")
  539.         if [[ $sha_src == $sha_dst && $sha_src != "ERR" ]]; then
  540.             print "> Skipping file \"$FILE_SRC\", file exists with the same hash\n"
  541.             return
  542.         fi
  543.     fi
  544.  
  545.     if [[ $FILE_SIZE -gt 157286000 ]]; then
  546.         #If the file is greater than 150Mb, the chunked_upload API will be used
  547.         db_chunked_upload_file "$FILE_SRC" "$FILE_DST"
  548.     else
  549.         db_simple_upload_file "$FILE_SRC" "$FILE_DST"
  550.     fi
  551.  
  552. }
  553.  
  554. #Simple file upload
  555. #$1 = Local source file
  556. #$2 = Remote destination file
  557. function db_simple_upload_file
  558. {
  559.     local FILE_SRC=$(normalize_path "$1")
  560.     local FILE_DST=$(normalize_path "$2")
  561.  
  562.     if [[ $SHOW_PROGRESSBAR == 1 && $QUIET == 0 ]]; then
  563.         CURL_PARAMETERS="--progress-bar"
  564.         LINE_CR="\n"
  565.     else
  566.         CURL_PARAMETERS="-L -s"
  567.         LINE_CR=""
  568.     fi
  569.  
  570.     print " > Uploading \"$FILE_SRC\" to \"$FILE_DST\"... $LINE_CR"
  571.     $CURL_BIN $CURL_ACCEPT_CERTIFICATES $CURL_PARAMETERS -X POST -i --globoff -o "$RESPONSE_FILE" --header "Authorization: Bearer $OAUTH_ACCESS_TOKEN" --header "Dropbox-API-Arg: {\"path\": \"$FILE_DST\",\"mode\": \"overwrite\",\"autorename\": true,\"mute\": false}" --header "Content-Type: application/octet-stream" --data-binary @"$FILE_SRC" "$API_UPLOAD_URL"
  572.     check_http_response
  573.  
  574.     #Check
  575.     if grep -q "^HTTP/1.1 200 OK" "$RESPONSE_FILE"; then
  576.         print "DONE\n"
  577.     else
  578.         print "FAILED\n"
  579.         print "An error occurred requesting /upload\n"
  580.         ERROR_STATUS=1
  581.     fi
  582. }
  583.  
  584. #Chunked file upload
  585. #$1 = Local source file
  586. #$2 = Remote destination file
  587. function db_chunked_upload_file
  588. {
  589.     local FILE_SRC=$(normalize_path "$1")
  590.     local FILE_DST=$(normalize_path "$2")
  591.  
  592.  
  593.     if [[ $SHOW_PROGRESSBAR == 1 && $QUIET == 0 ]]; then
  594.         VERBOSE=1
  595.         CURL_PARAMETERS="--progress-bar"
  596.     else
  597.         VERBOSE=0
  598.         CURL_PARAMETERS="-L -s"
  599.     fi
  600.  
  601.  
  602.  
  603.     local FILE_SIZE=$(file_size "$FILE_SRC")
  604.     local OFFSET=0
  605.     local UPLOAD_ID=""
  606.     local UPLOAD_ERROR=0
  607.     local CHUNK_PARAMS=""
  608.  
  609.     ## Ceil division
  610.     let NUMBEROFCHUNK=($FILE_SIZE/1024/1024+$CHUNK_SIZE-1)/$CHUNK_SIZE
  611.  
  612.     if [[ $VERBOSE == 1 ]]; then
  613.         print " > Uploading \"$FILE_SRC\" to \"$FILE_DST\" by $NUMBEROFCHUNK chunks ...\n"
  614.     else
  615.         print " > Uploading \"$FILE_SRC\" to \"$FILE_DST\" by $NUMBEROFCHUNK chunks "
  616.     fi
  617.  
  618.     #Starting a new upload session
  619.     $CURL_BIN $CURL_ACCEPT_CERTIFICATES -X POST -L -s --show-error --globoff -i -o "$RESPONSE_FILE" --header "Authorization: Bearer $OAUTH_ACCESS_TOKEN" --header "Dropbox-API-Arg: {\"close\": false}" --header "Content-Type: application/octet-stream" --data-binary @/dev/null "$API_CHUNKED_UPLOAD_START_URL" 2> /dev/null
  620.     check_http_response
  621.  
  622.     SESSION_ID=$(sed -n 's/{"session_id": *"*\([^"]*\)"*.*/\1/p' "$RESPONSE_FILE")
  623.  
  624.     chunkNumber=1
  625.     #Uploading chunks...
  626.     while ([[ $OFFSET != "$FILE_SIZE" ]]); do
  627.  
  628.         let OFFSET_MB=$OFFSET/1024/1024
  629.  
  630.         #Create the chunk
  631.         dd if="$FILE_SRC" of="$CHUNK_FILE" bs=1048576 skip=$OFFSET_MB count=$CHUNK_SIZE 2> /dev/null
  632.         local CHUNK_REAL_SIZE=$(file_size "$CHUNK_FILE")
  633.  
  634.         if [[ $VERBOSE == 1 ]]; then
  635.             print " >> Uploading chunk $chunkNumber of $NUMBEROFCHUNK\n"
  636.         fi
  637.  
  638.         #Uploading the chunk...
  639.         echo > "$RESPONSE_FILE"
  640.         $CURL_BIN $CURL_ACCEPT_CERTIFICATES -X POST $CURL_PARAMETERS --show-error --globoff -i -o "$RESPONSE_FILE" --header "Authorization: Bearer $OAUTH_ACCESS_TOKEN" --header "Dropbox-API-Arg: {\"cursor\": {\"session_id\": \"$SESSION_ID\",\"offset\": $OFFSET},\"close\": false}" --header "Content-Type: application/octet-stream" --data-binary @"$CHUNK_FILE" "$API_CHUNKED_UPLOAD_APPEND_URL"
  641.         #check_http_response not needed, because we have to retry the request in case of error
  642.  
  643.         #Check
  644.         if grep -q "^HTTP/1.1 200 OK" "$RESPONSE_FILE"; then
  645.             let OFFSET=$OFFSET+$CHUNK_REAL_SIZE
  646.             UPLOAD_ERROR=0
  647.             if [[ $VERBOSE != 1 ]]; then
  648.                 print "."
  649.             fi
  650.             ((chunkNumber=chunkNumber+1))
  651.         else
  652.             if [[ $VERBOSE != 1 ]]; then
  653.                 print "*"
  654.             fi
  655.             let UPLOAD_ERROR=$UPLOAD_ERROR+1
  656.  
  657.             #On error, the upload is retried for max 3 times
  658.             if [[ $UPLOAD_ERROR -gt 2 ]]; then
  659.                 print " FAILED\n"
  660.                 print "An error occurred requesting /chunked_upload\n"
  661.                 ERROR_STATUS=1
  662.                 return
  663.             fi
  664.         fi
  665.  
  666.     done
  667.  
  668.     UPLOAD_ERROR=0
  669.  
  670.     #Commit the upload
  671.     while (true); do
  672.  
  673.         echo > "$RESPONSE_FILE"
  674.         $CURL_BIN $CURL_ACCEPT_CERTIFICATES -X POST -L -s --show-error --globoff -i -o "$RESPONSE_FILE" --header "Authorization: Bearer $OAUTH_ACCESS_TOKEN" --header "Dropbox-API-Arg: {\"cursor\": {\"session_id\": \"$SESSION_ID\",\"offset\": $OFFSET},\"commit\": {\"path\": \"$FILE_DST\",\"mode\": \"overwrite\",\"autorename\": true,\"mute\": false}}" --header "Content-Type: application/octet-stream" --data-binary @/dev/null "$API_CHUNKED_UPLOAD_FINISH_URL" 2> /dev/null
  675.         #check_http_response not needed, because we have to retry the request in case of error
  676.  
  677.         #Check
  678.         if grep -q "^HTTP/1.1 200 OK" "$RESPONSE_FILE"; then
  679.             UPLOAD_ERROR=0
  680.             break
  681.         else
  682.             print "*"
  683.             let UPLOAD_ERROR=$UPLOAD_ERROR+1
  684.  
  685.             #On error, the commit is retried for max 3 times
  686.             if [[ $UPLOAD_ERROR -gt 2 ]]; then
  687.                 print " FAILED\n"
  688.                 print "An error occurred requesting /commit_chunked_upload\n"
  689.                 ERROR_STATUS=1
  690.                 return
  691.             fi
  692.         fi
  693.  
  694.     done
  695.  
  696.     print " DONE\n"
  697. }
  698.  
  699. #Directory upload
  700. #$1 = Local source dir
  701. #$2 = Remote destination dir
  702. function db_upload_dir
  703. {
  704.     local DIR_SRC=$(normalize_path "$1")
  705.     local DIR_DST=$(normalize_path "$2")
  706.  
  707.     #Creatig remote directory
  708.     db_mkdir "$DIR_DST"
  709.  
  710.     for file in "$DIR_SRC/"*; do
  711.         db_upload "$file" "$DIR_DST"
  712.     done
  713. }
  714.  
  715. #Generic download wrapper
  716. #$1 = Remote source file/dir
  717. #$2 = Local destination file/dir
  718. function db_download
  719. {
  720.     local SRC=$(normalize_path "$1")
  721.     local DST=$(normalize_path "$2")
  722.  
  723.     TYPE=$(db_stat "$SRC")
  724.  
  725.     #It's a directory
  726.     if [[ $TYPE == "DIR" ]]; then
  727.  
  728.         #If the DST folder is not specified, I assume that is the current directory
  729.         if [[ $DST == "" ]]; then
  730.             DST="."
  731.         fi
  732.  
  733.         #Checking if the destination directory exists
  734.         if [[ ! -d $DST ]]; then
  735.             local basedir=""
  736.         else
  737.             local basedir=$(basename "$SRC")
  738.         fi
  739.  
  740.         local DEST_DIR=$(normalize_path "$DST/$basedir")
  741.         print " > Downloading folder \"$SRC\" to \"$DEST_DIR\"... \n"
  742.  
  743.         if [[ ! -d "$DEST_DIR" ]]; then
  744.             print " > Creating local directory \"$DEST_DIR\"... "
  745.             mkdir -p "$DEST_DIR"
  746.  
  747.             #Check
  748.             if [[ $? == 0 ]]; then
  749.                 print "DONE\n"
  750.             else
  751.                 print "FAILED\n"
  752.                 ERROR_STATUS=1
  753.                 return
  754.             fi
  755.         fi
  756.  
  757.         if [[ $SRC == "/" ]]; then
  758.             SRC_REQ=""
  759.         else
  760.             SRC_REQ="$SRC"
  761.         fi
  762.  
  763.         OUT_FILE=$(db_list_outfile "$SRC_REQ")
  764.         if [ $? -ne 0 ]; then
  765.             # When db_list_outfile fail, the error message is OUT_FILE
  766.             print "$OUT_FILE\n"
  767.             ERROR_STATUS=1
  768.             return
  769.         fi
  770.  
  771.         #For each entry...
  772.         while read -r line; do
  773.  
  774.             local FILE=${line%:*}
  775.             local META=${line##*:}
  776.             local TYPE=${META%;*}
  777.             local SIZE=${META#*;}
  778.  
  779.             #Removing unneeded /
  780.             FILE=${FILE##*/}
  781.  
  782.             if [[ $TYPE == "file" ]]; then
  783.                 db_download_file "$SRC/$FILE" "$DEST_DIR/$FILE"
  784.             elif [[ $TYPE == "folder" ]]; then
  785.                 db_download "$SRC/$FILE" "$DEST_DIR"
  786.             fi
  787.  
  788.         done < $OUT_FILE
  789.  
  790.         rm -fr $OUT_FILE
  791.  
  792.     #It's a file
  793.     elif [[ $TYPE == "FILE" ]]; then
  794.  
  795.         #Checking DST
  796.         if [[ $DST == "" ]]; then
  797.             DST=$(basename "$SRC")
  798.         fi
  799.  
  800.         #If the destination is a directory, the file will be download into
  801.         if [[ -d $DST ]]; then
  802.             DST="$DST/$SRC"
  803.         fi
  804.  
  805.         db_download_file "$SRC" "$DST"
  806.  
  807.     #Doesn't exists
  808.     else
  809.         print " > No such file or directory: $SRC\n"
  810.         ERROR_STATUS=1
  811.         return
  812.     fi
  813. }
  814.  
  815. #Simple file download
  816. #$1 = Remote source file
  817. #$2 = Local destination file
  818. function db_download_file
  819. {
  820.     local FILE_SRC=$(normalize_path "$1")
  821.     local FILE_DST=$(normalize_path "$2")
  822.  
  823.     if [[ $SHOW_PROGRESSBAR == 1 && $QUIET == 0 ]]; then
  824.         CURL_PARAMETERS="-L --progress-bar"
  825.         LINE_CR="\n"
  826.     else
  827.         CURL_PARAMETERS="-L -s"
  828.         LINE_CR=""
  829.     fi
  830.  
  831.     #Checking if the file already exists
  832.     if [[ -e $FILE_DST && $SKIP_EXISTING_FILES == 1 ]]; then
  833.         print " > Skipping already existing file \"$FILE_DST\"\n"
  834.         return
  835.     fi
  836.  
  837.     # Checking if the file has the correct check sum
  838.     if [[ $TYPE != "ERR" ]]; then
  839.         sha_src=$(db_sha "$FILE_SRC")
  840.         sha_dst=$(db_sha_local "$FILE_DST")
  841.         if [[ $sha_src == $sha_dst && $sha_src != "ERR" ]]; then
  842.             print "> Skipping file \"$FILE_SRC\", file exists with the same hash\n"
  843.             return
  844.         fi
  845.     fi
  846.  
  847.     #Creating the empty file, that for two reasons:
  848.     #1) In this way I can check if the destination file is writable or not
  849.     #2) Curl doesn't automatically creates files with 0 bytes size
  850.     dd if=/dev/zero of="$FILE_DST" count=0 2> /dev/null
  851.     if [[ $? != 0 ]]; then
  852.         print " > Error writing file $FILE_DST: permission denied\n"
  853.         ERROR_STATUS=1
  854.         return
  855.     fi
  856.  
  857.     print " > Downloading \"$FILE_SRC\" to \"$FILE_DST\"... $LINE_CR"
  858.     $CURL_BIN $CURL_ACCEPT_CERTIFICATES $CURL_PARAMETERS -X POST --globoff -D "$RESPONSE_FILE" -o "$FILE_DST" --header "Authorization: Bearer $OAUTH_ACCESS_TOKEN" --header "Dropbox-API-Arg: {\"path\": \"$FILE_SRC\"}" "$API_DOWNLOAD_URL"
  859.     check_http_response
  860.  
  861.     #Check
  862.     if grep -q "^HTTP/1.1 200 OK" "$RESPONSE_FILE"; then
  863.         print "DONE\n"
  864.     else
  865.         print "FAILED\n"
  866.         rm -fr "$FILE_DST"
  867.         ERROR_STATUS=1
  868.         return
  869.     fi
  870. }
  871.  
  872. #Saveurl
  873. #$1 = URL
  874. #$2 = Remote file destination
  875. function db_saveurl
  876. {
  877.     local URL="$1"
  878.     local FILE_DST=$(normalize_path "$2")
  879.     local FILE_NAME=$(basename "$URL")
  880.  
  881.     print " > Downloading \"$URL\" to \"$FILE_DST\"..."
  882.     $CURL_BIN $CURL_ACCEPT_CERTIFICATES -X POST -L -s --show-error --globoff -i -o "$RESPONSE_FILE" --header "Authorization: Bearer $OAUTH_ACCESS_TOKEN" --header "Content-Type: application/json" --data "{\"path\": \"$FILE_DST/$FILE_NAME\", \"url\": \"$URL\"}" "$API_SAVEURL_URL" 2> /dev/null
  883.     check_http_response
  884.  
  885.     JOB_ID=$(sed -n 's/.*"async_job_id": *"*\([^"]*\)"*.*/\1/p' "$RESPONSE_FILE")
  886.     if [[ $JOB_ID == "" ]]; then
  887.         print " > Error getting the job id\n"
  888.         return
  889.     fi
  890.  
  891.     #Checking the status
  892.     while (true); do
  893.  
  894.         $CURL_BIN $CURL_ACCEPT_CERTIFICATES -X POST -L -s --show-error --globoff -i -o "$RESPONSE_FILE" --header "Authorization: Bearer $OAUTH_ACCESS_TOKEN" --header "Content-Type: application/json" --data "{\"async_job_id\": \"$JOB_ID\"}" "$API_SAVEURL_JOBSTATUS_URL" 2> /dev/null
  895.         check_http_response
  896.  
  897.         STATUS=$(sed -n 's/{".tag": *"*\([^"]*\)"*.*/\1/p' "$RESPONSE_FILE")
  898.         case $STATUS in
  899.  
  900.             in_progress)
  901.                 print "+"
  902.             ;;
  903.  
  904.             complete)
  905.                 print " DONE\n"
  906.                 break
  907.             ;;
  908.  
  909.             failed)
  910.                 print " ERROR\n"
  911.                 MESSAGE=$(sed -n 's/.*"error_summary": *"*\([^"]*\)"*.*/\1/p' "$RESPONSE_FILE")
  912.                 print " > Error: $MESSAGE\n"
  913.                 break
  914.             ;;
  915.  
  916.         esac
  917.  
  918.         sleep 2
  919.  
  920.     done
  921. }
  922.  
  923. #Prints account info
  924. function db_account_info
  925. {
  926.     print "Dropbox Uploader v$VERSION\n\n"
  927.     print " > Getting info... "
  928.     $CURL_BIN $CURL_ACCEPT_CERTIFICATES -X POST -L -s --show-error --globoff -i -o "$RESPONSE_FILE" --header "Authorization: Bearer $OAUTH_ACCESS_TOKEN" "$API_ACCOUNT_INFO_URL" 2> /dev/null
  929.     check_http_response
  930.  
  931.     #Check
  932.     if grep -q "^HTTP/1.1 200 OK" "$RESPONSE_FILE"; then
  933.  
  934.         name=$(sed -n 's/.*"display_name": "\([^"]*\).*/\1/p' "$RESPONSE_FILE")
  935.         echo -e "\n\nName:\t\t$name"
  936.  
  937.         uid=$(sed -n 's/.*"account_id": "\([^"]*\).*/\1/p' "$RESPONSE_FILE")
  938.         echo -e "UID:\t\t$uid"
  939.  
  940.         email=$(sed -n 's/.*"email": "\([^"]*\).*/\1/p' "$RESPONSE_FILE")
  941.         echo -e "Email:\t\t$email"
  942.  
  943.         country=$(sed -n 's/.*"country": "\([^"]*\).*/\1/p' "$RESPONSE_FILE")
  944.         echo -e "Country:\t$country"
  945.  
  946.         echo ""
  947.  
  948.     else
  949.         print "FAILED\n"
  950.         ERROR_STATUS=1
  951.     fi
  952. }
  953.  
  954. #Prints account space usage info
  955. function db_account_space
  956. {
  957.     print "Dropbox Uploader v$VERSION\n\n"
  958.     print " > Getting space usage info... "
  959.     $CURL_BIN $CURL_ACCEPT_CERTIFICATES -X POST -L -s --show-error --globoff -i -o "$RESPONSE_FILE" --header "Authorization: Bearer $OAUTH_ACCESS_TOKEN" "$API_ACCOUNT_SPACE_URL" 2> /dev/null
  960.     check_http_response
  961.  
  962.     #Check
  963.     if grep -q "^HTTP/1.1 200 OK" "$RESPONSE_FILE"; then
  964.  
  965.         quota=$(sed -n 's/.*"allocated": \([0-9]*\).*/\1/p' "$RESPONSE_FILE")
  966.         let quota_mb=$quota/1024/1024
  967.         echo -e "\n\nQuota:\t$quota_mb Mb"
  968.  
  969.         used=$(sed -n 's/.*"used": \([0-9]*\).*/\1/p' "$RESPONSE_FILE")
  970.         let used_mb=$used/1024/1024
  971.         echo -e "Used:\t$used_mb Mb"
  972.  
  973.         let free_mb=$((quota-used))/1024/1024
  974.         echo -e "Free:\t$free_mb Mb"
  975.  
  976.         echo ""
  977.  
  978.     else
  979.         print "FAILED\n"
  980.         ERROR_STATUS=1
  981.     fi
  982. }
  983.  
  984. #Account unlink
  985. function db_unlink
  986. {
  987.     echo -ne "Are you sure you want unlink this script from your Dropbox account? [y/n]"
  988.     read -r answer
  989.     if [[ $answer == "y" ]]; then
  990.         rm -fr "$CONFIG_FILE"
  991.         echo -ne "DONE\n"
  992.     fi
  993. }
  994.  
  995. #Delete a remote file
  996. #$1 = Remote file to delete
  997. function db_delete
  998. {
  999.     local FILE_DST=$(normalize_path "$1")
  1000.  
  1001.     print " > Deleting \"$FILE_DST\"... "
  1002.     $CURL_BIN $CURL_ACCEPT_CERTIFICATES -X POST -L -s --show-error --globoff -i -o "$RESPONSE_FILE" --header "Authorization: Bearer $OAUTH_ACCESS_TOKEN" --header "Content-Type: application/json" --data "{\"path\": \"$FILE_DST\"}" "$API_DELETE_URL" 2> /dev/null
  1003.     check_http_response
  1004.  
  1005.     #Check
  1006.     if grep -q "^HTTP/1.1 200 OK" "$RESPONSE_FILE"; then
  1007.         print "DONE\n"
  1008.     else
  1009.         print "FAILED\n"
  1010.         ERROR_STATUS=1
  1011.     fi
  1012. }
  1013.  
  1014. #Move/Rename a remote file
  1015. #$1 = Remote file to rename or move
  1016. #$2 = New file name or location
  1017. function db_move
  1018. {
  1019.     local FILE_SRC=$(normalize_path "$1")
  1020.     local FILE_DST=$(normalize_path "$2")
  1021.  
  1022.     TYPE=$(db_stat "$FILE_DST")
  1023.  
  1024.     #If the destination it's a directory, the source will be moved into it
  1025.     if [[ $TYPE == "DIR" ]]; then
  1026.         local filename=$(basename "$FILE_SRC")
  1027.         FILE_DST=$(normalize_path "$FILE_DST/$filename")
  1028.     fi
  1029.  
  1030.     print " > Moving \"$FILE_SRC\" to \"$FILE_DST\" ... "
  1031.     $CURL_BIN $CURL_ACCEPT_CERTIFICATES -X POST -L -s --show-error --globoff -i -o "$RESPONSE_FILE" --header "Authorization: Bearer $OAUTH_ACCESS_TOKEN" --header "Content-Type: application/json" --data "{\"from_path\": \"$FILE_SRC\", \"to_path\": \"$FILE_DST\"}" "$API_MOVE_URL" 2> /dev/null
  1032.     check_http_response
  1033.  
  1034.     #Check
  1035.     if grep -q "^HTTP/1.1 200 OK" "$RESPONSE_FILE"; then
  1036.         print "DONE\n"
  1037.     else
  1038.         print "FAILED\n"
  1039.         ERROR_STATUS=1
  1040.     fi
  1041. }
  1042.  
  1043. #Copy a remote file to a remote location
  1044. #$1 = Remote file to rename or move
  1045. #$2 = New file name or location
  1046. function db_copy
  1047. {
  1048.     local FILE_SRC=$(normalize_path "$1")
  1049.     local FILE_DST=$(normalize_path "$2")
  1050.  
  1051.     TYPE=$(db_stat "$FILE_DST")
  1052.  
  1053.     #If the destination it's a directory, the source will be copied into it
  1054.     if [[ $TYPE == "DIR" ]]; then
  1055.         local filename=$(basename "$FILE_SRC")
  1056.         FILE_DST=$(normalize_path "$FILE_DST/$filename")
  1057.     fi
  1058.  
  1059.     print " > Copying \"$FILE_SRC\" to \"$FILE_DST\" ... "
  1060.     $CURL_BIN $CURL_ACCEPT_CERTIFICATES -X POST -L -s --show-error --globoff -i -o "$RESPONSE_FILE" --header "Authorization: Bearer $OAUTH_ACCESS_TOKEN" --header "Content-Type: application/json" --data "{\"from_path\": \"$FILE_SRC\", \"to_path\": \"$FILE_DST\"}" "$API_COPY_URL" 2> /dev/null
  1061.     check_http_response
  1062.  
  1063.     #Check
  1064.     if grep -q "^HTTP/1.1 200 OK" "$RESPONSE_FILE"; then
  1065.         print "DONE\n"
  1066.     else
  1067.         print "FAILED\n"
  1068.         ERROR_STATUS=1
  1069.     fi
  1070. }
  1071.  
  1072. #Create a new directory
  1073. #$1 = Remote directory to create
  1074. function db_mkdir
  1075. {
  1076.     local DIR_DST=$(normalize_path "$1")
  1077.  
  1078.     print " > Creating Directory \"$DIR_DST\"... "
  1079.     $CURL_BIN $CURL_ACCEPT_CERTIFICATES -X POST -L -s --show-error --globoff -i -o "$RESPONSE_FILE" --header "Authorization: Bearer $OAUTH_ACCESS_TOKEN" --header "Content-Type: application/json" --data "{\"path\": \"$DIR_DST\"}" "$API_MKDIR_URL" 2> /dev/null
  1080.     check_http_response
  1081.  
  1082.     #Check
  1083.     if grep -q "^HTTP/1.1 200 OK" "$RESPONSE_FILE"; then
  1084.         print "DONE\n"
  1085.     elif grep -q "^HTTP/1.1 403 Forbidden" "$RESPONSE_FILE"; then
  1086.         print "ALREADY EXISTS\n"
  1087.     else
  1088.         print "FAILED\n"
  1089.         ERROR_STATUS=1
  1090.     fi
  1091. }
  1092.  
  1093. #List a remote folder and returns the path to the file containing the output
  1094. #$1 = Remote directory
  1095. #$2 = Cursor (Optional)
  1096. function db_list_outfile
  1097. {
  1098.  
  1099.     local DIR_DST="$1"
  1100.     local HAS_MORE="false"
  1101.     local CURSOR=""
  1102.  
  1103.     if [[ -n "$2" ]]; then
  1104.         CURSOR="$2"
  1105.         HAS_MORE="true"
  1106.     fi
  1107.  
  1108.     OUT_FILE="$TMP_DIR/du_tmp_out_$RANDOM"
  1109.  
  1110.     while (true); do
  1111.  
  1112.         if [[ $HAS_MORE == "true" ]]; then
  1113.             $CURL_BIN $CURL_ACCEPT_CERTIFICATES -X POST -L -s --show-error --globoff -i -o "$RESPONSE_FILE" --header "Authorization: Bearer $OAUTH_ACCESS_TOKEN" --header "Content-Type: application/json" --data "{\"cursor\": \"$CURSOR\"}" "$API_LIST_FOLDER_CONTINUE_URL" 2> /dev/null
  1114.         else
  1115.             $CURL_BIN $CURL_ACCEPT_CERTIFICATES -X POST -L -s --show-error --globoff -i -o "$RESPONSE_FILE" --header "Authorization: Bearer $OAUTH_ACCESS_TOKEN" --header "Content-Type: application/json" --data "{\"path\": \"$DIR_DST\",\"include_media_info\": false,\"include_deleted\": false,\"include_has_explicit_shared_members\": false}" "$API_LIST_FOLDER_URL" 2> /dev/null
  1116.         fi
  1117.  
  1118.         check_http_response
  1119.  
  1120.         HAS_MORE=$(sed -n 's/.*"has_more": *\([a-z]*\).*/\1/p' "$RESPONSE_FILE")
  1121.         CURSOR=$(sed -n 's/.*"cursor": *"\([^"]*\)".*/\1/p' "$RESPONSE_FILE")
  1122.  
  1123.         #Check
  1124.         if grep -q "^HTTP/1.1 200 OK" "$RESPONSE_FILE"; then
  1125.  
  1126.             #Extracting directory content [...]
  1127.             #and replacing "}, {" with "}\n{"
  1128.             #I don't like this piece of code... but seems to be the only way to do this with SED, writing a portable code...
  1129.             local DIR_CONTENT=$(sed -n 's/.*: \[{\(.*\)/\1/p' "$RESPONSE_FILE" | sed 's/}, *{/}\
  1130.    {/g')
  1131.  
  1132.             #Converting escaped quotes to unicode format
  1133.             echo "$DIR_CONTENT" | sed 's/\\"/\\u0022/' > "$TEMP_FILE"
  1134.  
  1135.             #Extracting files and subfolders
  1136.             while read -r line; do
  1137.  
  1138.                 local FILE=$(echo "$line" | sed -n 's/.*"path_display": *"\([^"]*\)".*/\1/p')
  1139.                 local TYPE=$(echo "$line" | sed -n 's/.*".tag": *"\([^"]*\).*/\1/p')
  1140.                 local SIZE=$(convert_bytes $(echo "$line" | sed -n 's/.*"size": *\([0-9]*\).*/\1/p'))
  1141.  
  1142.                 echo -e "$FILE:$TYPE;$SIZE" >> "$OUT_FILE"
  1143.  
  1144.             done < "$TEMP_FILE"
  1145.  
  1146.             if [[ $HAS_MORE == "false" ]]; then
  1147.                 break
  1148.             fi
  1149.  
  1150.         else
  1151.             return
  1152.         fi
  1153.  
  1154.     done
  1155.  
  1156.     echo $OUT_FILE
  1157. }
  1158.  
  1159. #List remote directory
  1160. #$1 = Remote directory
  1161. function db_list
  1162. {
  1163.     local DIR_DST=$(normalize_path "$1")
  1164.  
  1165.     print " > Listing \"$DIR_DST\"... "
  1166.  
  1167.     if [[ "$DIR_DST" == "/" ]]; then
  1168.         DIR_DST=""
  1169.     fi
  1170.  
  1171.     OUT_FILE=$(db_list_outfile "$DIR_DST")
  1172.     if [ -z "$OUT_FILE" ]; then
  1173.         print "FAILED\n"
  1174.         ERROR_STATUS=1
  1175.         return
  1176.     else
  1177.         print "DONE\n"
  1178.     fi
  1179.  
  1180.     #Looking for the biggest file size
  1181.     #to calculate the padding to use
  1182.     local padding=0
  1183.     while read -r line; do
  1184.         local FILE=${line%:*}
  1185.         local META=${line##*:}
  1186.         local SIZE=${META#*;}
  1187.  
  1188.         if [[ ${#SIZE} -gt $padding ]]; then
  1189.             padding=${#SIZE}
  1190.         fi
  1191.     done < "$OUT_FILE"
  1192.  
  1193.     #For each entry, printing directories...
  1194.     while read -r line; do
  1195.  
  1196.         local FILE=${line%:*}
  1197.         local META=${line##*:}
  1198.         local TYPE=${META%;*}
  1199.         local SIZE=${META#*;}
  1200.  
  1201.         #Removing unneeded /
  1202.         FILE=${FILE##*/}
  1203.  
  1204.         if [[ $TYPE == "folder" ]]; then
  1205.             FILE=$(echo -e "$FILE")
  1206.             $PRINTF " [D] %-${padding}s %s\n" "$SIZE" "$FILE"
  1207.         fi
  1208.  
  1209.     done < "$OUT_FILE"
  1210.  
  1211.     #For each entry, printing files...
  1212.     while read -r line; do
  1213.  
  1214.         local FILE=${line%:*}
  1215.         local META=${line##*:}
  1216.         local TYPE=${META%;*}
  1217.         local SIZE=${META#*;}
  1218.  
  1219.         #Removing unneeded /
  1220.         FILE=${FILE##*/}
  1221.  
  1222.         if [[ $TYPE == "file" ]]; then
  1223.             FILE=$(echo -e "$FILE")
  1224.             $PRINTF " [F] %-${padding}s %s\n" "$SIZE" "$FILE"
  1225.         fi
  1226.  
  1227.     done < "$OUT_FILE"
  1228.  
  1229.     rm -fr "$OUT_FILE"
  1230. }
  1231.  
  1232. #Longpoll remote directory only once
  1233. #$1 = Timeout
  1234. #$2 = Remote directory
  1235. function db_monitor_nonblock
  1236. {
  1237.     local TIMEOUT=$1
  1238.     local DIR_DST=$(normalize_path "$2")
  1239.  
  1240.     if [[ "$DIR_DST" == "/" ]]; then
  1241.         DIR_DST=""
  1242.     fi
  1243.  
  1244.     $CURL_BIN $CURL_ACCEPT_CERTIFICATES -X POST -L -s --show-error --globoff -i -o "$RESPONSE_FILE" --header "Authorization: Bearer $OAUTH_ACCESS_TOKEN" --header "Content-Type: application/json" --data "{\"path\": \"$DIR_DST\",\"include_media_info\": false,\"include_deleted\": false,\"include_has_explicit_shared_members\": false}" "$API_LIST_FOLDER_URL" 2> /dev/null
  1245.     check_http_response
  1246.  
  1247.     if grep -q "^HTTP/1.1 200 OK" "$RESPONSE_FILE"; then
  1248.  
  1249.         local CURSOR=$(sed -n 's/.*"cursor": *"\([^"]*\)".*/\1/p' "$RESPONSE_FILE")
  1250.  
  1251.         $CURL_BIN $CURL_ACCEPT_CERTIFICATES -X POST -L -s --show-error --globoff -i -o "$RESPONSE_FILE" --header "Content-Type: application/json" --data "{\"cursor\": \"$CURSOR\",\"timeout\": ${TIMEOUT}}" "$API_LONGPOLL_FOLDER" 2> /dev/null
  1252.         check_http_response
  1253.  
  1254.         if grep -q "^HTTP/1.1 200 OK" "$RESPONSE_FILE"; then
  1255.             local CHANGES=$(sed -n 's/.*"changes" *: *\([a-z]*\).*/\1/p' "$RESPONSE_FILE")
  1256.         else
  1257.             ERROR_MSG=$(grep "Error in call" "$RESPONSE_FILE")
  1258.             print "FAILED to longpoll (http error): $ERROR_MSG\n"
  1259.             ERROR_STATUS=1
  1260.             return 1
  1261.         fi
  1262.  
  1263.         if [[ -z "$CHANGES" ]]; then
  1264.             print "FAILED to longpoll (unexpected response)\n"
  1265.             ERROR_STATUS=1
  1266.             return 1
  1267.         fi
  1268.  
  1269.         if [ "$CHANGES" == "true" ]; then
  1270.  
  1271.             OUT_FILE=$(db_list_outfile "$DIR_DST" "$CURSOR")
  1272.  
  1273.             if [ -z "$OUT_FILE" ]; then
  1274.                 print "FAILED to list changes\n"
  1275.                 ERROR_STATUS=1
  1276.                 return
  1277.             fi
  1278.  
  1279.             #For each entry, printing directories...
  1280.             while read -r line; do
  1281.  
  1282.                 local FILE=${line%:*}
  1283.                 local META=${line##*:}
  1284.                 local TYPE=${META%;*}
  1285.                 local SIZE=${META#*;}
  1286.  
  1287.                 #Removing unneeded /
  1288.                 FILE=${FILE##*/}
  1289.  
  1290.                 if [[ $TYPE == "folder" ]]; then
  1291.                     FILE=$(echo -e "$FILE")
  1292.                     $PRINTF " [D] %s\n" "$FILE"
  1293.                 elif [[ $TYPE == "file" ]]; then
  1294.                     FILE=$(echo -e "$FILE")
  1295.                     $PRINTF " [F] %s %s\n" "$SIZE" "$FILE"
  1296.                 elif [[ $TYPE == "deleted" ]]; then
  1297.                     FILE=$(echo -e "$FILE")
  1298.                     $PRINTF " [-] %s\n" "$FILE"
  1299.                 fi
  1300.  
  1301.             done < "$OUT_FILE"
  1302.  
  1303.             rm -fr "$OUT_FILE"
  1304.         fi
  1305.  
  1306.     else
  1307.         ERROR_STATUS=1
  1308.         return 1
  1309.     fi
  1310.  
  1311. }
  1312.  
  1313. #Longpoll continuously remote directory
  1314. #$1 = Timeout
  1315. #$2 = Remote directory
  1316. function db_monitor
  1317. {
  1318.     local TIMEOUT=$1
  1319.     local DIR_DST=$(normalize_path "$2")
  1320.  
  1321.     while (true); do
  1322.         db_monitor_nonblock "$TIMEOUT" "$2"
  1323.     done
  1324. }
  1325.  
  1326. #Share remote file
  1327. #$1 = Remote file
  1328. function db_share
  1329. {
  1330.     local FILE_DST=$(normalize_path "$1")
  1331.  
  1332.     $CURL_BIN $CURL_ACCEPT_CERTIFICATES -X POST -L -s --show-error --globoff -i -o "$RESPONSE_FILE" --header "Authorization: Bearer $OAUTH_ACCESS_TOKEN" --header "Content-Type: application/json" --data "{\"path\": \"$FILE_DST\",\"settings\": {\"requested_visibility\": \"public\"}}" "$API_SHARE_URL" 2> /dev/null
  1333.     check_http_response
  1334.  
  1335.     #Check
  1336.     if grep -q "^HTTP/1.1 200 OK" "$RESPONSE_FILE"; then
  1337.         print " > Share link: "
  1338.         SHARE_LINK=$(sed -n 's/.*"url": "\([^"]*\).*/\1/p' "$RESPONSE_FILE")
  1339.         echo "$SHARE_LINK"
  1340.     else
  1341.         get_Share "$FILE_DST"
  1342.     fi
  1343. }
  1344.  
  1345. #Query existing shared link
  1346. #$1 = Remote file
  1347. function get_Share
  1348. {
  1349.     local FILE_DST=$(normalize_path "$1")
  1350.     $CURL_BIN $CURL_ACCEPT_CERTIFICATES -X POST -L -s --show-error --globoff -i -o "$RESPONSE_FILE" --header "Authorization: Bearer $OAUTH_ACCESS_TOKEN" --header "Content-Type: application/json" --data "{\"path\": \"$FILE_DST\",\"direct_only\": true}" "$API_SHARE_LIST"
  1351.     check_http_response
  1352.  
  1353.     #Check
  1354.     if grep -q "^HTTP/1.1 200 OK" "$RESPONSE_FILE"; then
  1355.         print " > Share link: "
  1356.         SHARE_LINK=$(sed -n 's/.*"url": "\([^"]*\).*/\1/p' "$RESPONSE_FILE")
  1357.         echo "$SHARE_LINK"
  1358.     else
  1359.         print "FAILED\n"
  1360.         MESSAGE=$(sed -n 's/.*"error_summary": *"*\([^"]*\)"*.*/\1/p' "$RESPONSE_FILE")
  1361.         print " > Error: $MESSAGE\n"
  1362.         ERROR_STATUS=1
  1363.     fi
  1364. }
  1365.  
  1366. #Search on Dropbox
  1367. #$1 = query
  1368. function db_search
  1369. {
  1370.     local QUERY="$1"
  1371.  
  1372.     print " > Searching for \"$QUERY\"... "
  1373.  
  1374.     $CURL_BIN $CURL_ACCEPT_CERTIFICATES -X POST -L -s --show-error --globoff -i -o "$RESPONSE_FILE" --header "Authorization: Bearer $OAUTH_ACCESS_TOKEN" --header "Content-Type: application/json" --data "{\"path\": \"\",\"query\": \"$QUERY\",\"start\": 0,\"max_results\": 1000,\"mode\": \"filename\"}" "$API_SEARCH_URL" 2> /dev/null
  1375.     check_http_response
  1376.  
  1377.     #Check
  1378.     if grep -q "^HTTP/1.1 200 OK" "$RESPONSE_FILE"; then
  1379.         print "DONE\n"
  1380.     else
  1381.         print "FAILED\n"
  1382.         ERROR_STATUS=1
  1383.     fi
  1384.  
  1385.     #Extracting directory content [...]
  1386.     #and replacing "}, {" with "}\n{"
  1387.     #I don't like this piece of code... but seems to be the only way to do this with SED, writing a portable code...
  1388.     local DIR_CONTENT=$(sed 's/}, *{/}\
  1389. {/g' "$RESPONSE_FILE")
  1390.  
  1391.     #Converting escaped quotes to unicode format
  1392.     echo "$DIR_CONTENT" | sed 's/\\"/\\u0022/' > "$TEMP_FILE"
  1393.  
  1394.     #Extracting files and subfolders
  1395.     rm -fr "$RESPONSE_FILE"
  1396.     while read -r line; do
  1397.  
  1398.         local FILE=$(echo "$line" | sed -n 's/.*"path_display": *"\([^"]*\)".*/\1/p')
  1399.         local TYPE=$(echo "$line" | sed -n 's/.*".tag": *"\([^"]*\).*/\1/p')
  1400.         local SIZE=$(convert_bytes $(echo "$line" | sed -n 's/.*"size": *\([0-9]*\).*/\1/p'))
  1401.  
  1402.         echo -e "$FILE:$TYPE;$SIZE" >> "$RESPONSE_FILE"
  1403.  
  1404.     done < "$TEMP_FILE"
  1405.  
  1406.     #Looking for the biggest file size
  1407.     #to calculate the padding to use
  1408.     local padding=0
  1409.     while read -r line; do
  1410.         local FILE=${line%:*}
  1411.         local META=${line##*:}
  1412.         local SIZE=${META#*;}
  1413.  
  1414.         if [[ ${#SIZE} -gt $padding ]]; then
  1415.             padding=${#SIZE}
  1416.         fi
  1417.     done < "$RESPONSE_FILE"
  1418.  
  1419.     #For each entry, printing directories...
  1420.     while read -r line; do
  1421.  
  1422.         local FILE=${line%:*}
  1423.         local META=${line##*:}
  1424.         local TYPE=${META%;*}
  1425.         local SIZE=${META#*;}
  1426.  
  1427.         if [[ $TYPE == "folder" ]]; then
  1428.             FILE=$(echo -e "$FILE")
  1429.             $PRINTF " [D] %-${padding}s %s\n" "$SIZE" "$FILE"
  1430.         fi
  1431.  
  1432.     done < "$RESPONSE_FILE"
  1433.  
  1434.     #For each entry, printing files...
  1435.     while read -r line; do
  1436.  
  1437.         local FILE=${line%:*}
  1438.         local META=${line##*:}
  1439.         local TYPE=${META%;*}
  1440.         local SIZE=${META#*;}
  1441.  
  1442.         if [[ $TYPE == "file" ]]; then
  1443.             FILE=$(echo -e "$FILE")
  1444.             $PRINTF " [F] %-${padding}s %s\n" "$SIZE" "$FILE"
  1445.         fi
  1446.  
  1447.     done < "$RESPONSE_FILE"
  1448.  
  1449. }
  1450.  
  1451. #Query the sha256-dropbox-sum of a remote file
  1452. #see https://www.dropbox.com/developers/reference/content-hash for more information
  1453. #$1 = Remote file
  1454. function db_sha
  1455. {
  1456.     local FILE=$(normalize_path "$1")
  1457.  
  1458.     if [[ $FILE == "/" ]]; then
  1459.         echo "ERR"
  1460.         return
  1461.     fi
  1462.  
  1463.     #Checking if it's a file or a directory and get the sha-sum
  1464.     $CURL_BIN $CURL_ACCEPT_CERTIFICATES -X POST -L -s --show-error --globoff -i -o "$RESPONSE_FILE" --header "Authorization: Bearer $OAUTH_ACCESS_TOKEN" --header "Content-Type: application/json" --data "{\"path\": \"$FILE\"}" "$API_METADATA_URL" 2> /dev/null
  1465.     check_http_response
  1466.  
  1467.     local TYPE=$(sed -n 's/{".tag": *"*\([^"]*\)"*.*/\1/p' "$RESPONSE_FILE")
  1468.     if [[ $TYPE == "folder" ]]; then
  1469.         echo "ERR"
  1470.         return
  1471.     fi
  1472.  
  1473.     local SHA256=$(sed -n 's/.*"content_hash": "\([^"]*\).*/\1/p' "$RESPONSE_FILE")
  1474.     echo "$SHA256"
  1475. }
  1476.  
  1477. #Query the sha256-dropbox-sum of a local file
  1478. #see https://www.dropbox.com/developers/reference/content-hash for more information
  1479. #$1 = Local file
  1480. function db_sha_local
  1481. {
  1482.     local FILE=$(normalize_path "$1")
  1483.     local FILE_SIZE=$(file_size "$FILE")
  1484.     local OFFSET=0
  1485.     local SKIP=0
  1486.     local SHA_CONCAT=""
  1487.  
  1488.     which shasum > /dev/null
  1489.     if [[ $? != 0 ]]; then
  1490.         echo "ERR"
  1491.         return
  1492.     fi
  1493.  
  1494.     while ([[ $OFFSET -lt "$FILE_SIZE" ]]); do
  1495.         dd if="$FILE" of="$CHUNK_FILE" bs=4194304 skip=$SKIP count=1 2> /dev/null
  1496.         local SHA=$(shasum -a 256 "$CHUNK_FILE" | awk '{print $1}')
  1497.         SHA_CONCAT="${SHA_CONCAT}${SHA}"
  1498.  
  1499.         let OFFSET=$OFFSET+4194304
  1500.         let SKIP=$SKIP+1
  1501.     done
  1502.  
  1503.     shaHex=$(echo $SHA_CONCAT | sed 's/\([0-9A-F]\{2\}\)/\\x\1/gI')
  1504.     echo -ne $shaHex | shasum -a 256 | awk '{print $1}'
  1505. }
  1506.  
  1507. ################
  1508. #### SETUP  ####
  1509. ################
  1510.  
  1511. #CHECKING FOR AUTH FILE
  1512. if [[ -e $CONFIG_FILE ]]; then
  1513.  
  1514.     #Loading data... and change old format config if necesary.
  1515.     source "$CONFIG_FILE" 2>/dev/null || {
  1516.         sed -i'' 's/:/=/' "$CONFIG_FILE" && source "$CONFIG_FILE" 2>/dev/null
  1517.     }
  1518.  
  1519.     #Checking if it's still a v1 API configuration file
  1520.     if [[ $APPKEY != "" || $APPSECRET != "" ]]; then
  1521.         echo -ne "The config file contains the old deprecated v1 oauth tokens.\n"
  1522.         echo -ne "Please run again the script and follow the configuration wizard. The old configuration file has been backed up to $CONFIG_FILE.old\n"
  1523.         mv "$CONFIG_FILE" "$CONFIG_FILE".old
  1524.         exit 1
  1525.     fi
  1526.  
  1527.     #Checking loaded data
  1528.     if [[ $OAUTH_ACCESS_TOKEN = "" ]]; then
  1529.         echo -ne "Error loading data from $CONFIG_FILE...\n"
  1530.         echo -ne "It is recommended to run $0 unlink\n"
  1531.         remove_temp_files
  1532.         exit 1
  1533.     fi
  1534.  
  1535. #NEW SETUP...
  1536. else
  1537.  
  1538.     echo -ne "\n This is the first time you run this script, please follow the instructions:\n\n"
  1539.     echo -ne " 1) Open the following URL in your Browser, and log in using your account: $APP_CREATE_URL\n"
  1540.     echo -ne " 2) Click on \"Create App\", then select \"Dropbox API app\"\n"
  1541.     echo -ne " 3) Now go on with the configuration, choosing the app permissions and access restrictions to your DropBox folder\n"
  1542.     echo -ne " 4) Enter the \"App Name\" that you prefer (e.g. MyUploader$RANDOM$RANDOM$RANDOM)\n\n"
  1543.  
  1544.     echo -ne " Now, click on the \"Create App\" button.\n\n"
  1545.  
  1546.     echo -ne " When your new App is successfully created, please click on the Generate button\n"
  1547.     echo -ne " under the 'Generated access token' section, then copy and paste the new access token here:\n\n"
  1548.  
  1549.     echo -ne " # Access token: "
  1550.     read -r OAUTH_ACCESS_TOKEN
  1551.  
  1552.     echo -ne "\n > The access token is $OAUTH_ACCESS_TOKEN. Looks ok? [y/N]: "
  1553.     read -r answer
  1554.     if [[ $answer != "y" ]]; then
  1555.         remove_temp_files
  1556.         exit 1
  1557.     fi
  1558.  
  1559.     echo "OAUTH_ACCESS_TOKEN=$OAUTH_ACCESS_TOKEN" > "$CONFIG_FILE"
  1560.     echo "   The configuration has been saved."
  1561.  
  1562.     remove_temp_files
  1563.     exit 0
  1564. fi
  1565.  
  1566. ################
  1567. #### START  ####
  1568. ################
  1569.  
  1570. COMMAND="${*:$OPTIND:1}"
  1571. ARG1="${*:$OPTIND+1:1}"
  1572. ARG2="${*:$OPTIND+2:1}"
  1573.  
  1574. let argnum=$#-$OPTIND
  1575.  
  1576. #CHECKING PARAMS VALUES
  1577. case $COMMAND in
  1578.  
  1579.     upload)
  1580.  
  1581.         if [[ $argnum -lt 2 ]]; then
  1582.             usage
  1583.         fi
  1584.  
  1585.         FILE_DST="${*:$#:1}"
  1586.  
  1587.         for (( i=OPTIND+1; i<$#; i++ )); do
  1588.             FILE_SRC="${*:$i:1}"
  1589.             db_upload "$FILE_SRC" "/$FILE_DST"
  1590.         done
  1591.  
  1592.     ;;
  1593.  
  1594.     download)
  1595.  
  1596.         if [[ $argnum -lt 1 ]]; then
  1597.             usage
  1598.         fi
  1599.  
  1600.         FILE_SRC="$ARG1"
  1601.         FILE_DST="$ARG2"
  1602.  
  1603.         db_download "/$FILE_SRC" "$FILE_DST"
  1604.  
  1605.     ;;
  1606.  
  1607.     saveurl)
  1608.  
  1609.         if [[ $argnum -lt 1 ]]; then
  1610.             usage
  1611.         fi
  1612.  
  1613.         URL=$ARG1
  1614.         FILE_DST="$ARG2"
  1615.  
  1616.         db_saveurl "$URL" "/$FILE_DST"
  1617.  
  1618.     ;;
  1619.  
  1620.     share)
  1621.  
  1622.         if [[ $argnum -lt 1 ]]; then
  1623.             usage
  1624.         fi
  1625.  
  1626.         FILE_DST="$ARG1"
  1627.  
  1628.         db_share "/$FILE_DST"
  1629.  
  1630.     ;;
  1631.  
  1632.     info)
  1633.  
  1634.         db_account_info
  1635.  
  1636.     ;;
  1637.  
  1638.     space)
  1639.  
  1640.         db_account_space
  1641.  
  1642.     ;;
  1643.  
  1644.     delete|remove)
  1645.  
  1646.         if [[ $argnum -lt 1 ]]; then
  1647.             usage
  1648.         fi
  1649.  
  1650.         FILE_DST="$ARG1"
  1651.  
  1652.         db_delete "/$FILE_DST"
  1653.  
  1654.     ;;
  1655.  
  1656.     move|rename)
  1657.  
  1658.         if [[ $argnum -lt 2 ]]; then
  1659.             usage
  1660.         fi
  1661.  
  1662.         FILE_SRC="$ARG1"
  1663.         FILE_DST="$ARG2"
  1664.  
  1665.         db_move "/$FILE_SRC" "/$FILE_DST"
  1666.  
  1667.     ;;
  1668.  
  1669.     copy)
  1670.  
  1671.         if [[ $argnum -lt 2 ]]; then
  1672.             usage
  1673.         fi
  1674.  
  1675.         FILE_SRC="$ARG1"
  1676.         FILE_DST="$ARG2"
  1677.  
  1678.         db_copy "/$FILE_SRC" "/$FILE_DST"
  1679.  
  1680.     ;;
  1681.  
  1682.     mkdir)
  1683.  
  1684.         if [[ $argnum -lt 1 ]]; then
  1685.             usage
  1686.         fi
  1687.  
  1688.         DIR_DST="$ARG1"
  1689.  
  1690.         db_mkdir "/$DIR_DST"
  1691.  
  1692.     ;;
  1693.  
  1694.     search)
  1695.  
  1696.         if [[ $argnum -lt 1 ]]; then
  1697.             usage
  1698.         fi
  1699.  
  1700.         QUERY=$ARG1
  1701.  
  1702.         db_search "$QUERY"
  1703.  
  1704.     ;;
  1705.  
  1706.     list)
  1707.  
  1708.         DIR_DST="$ARG1"
  1709.  
  1710.         #Checking DIR_DST
  1711.         if [[ $DIR_DST == "" ]]; then
  1712.             DIR_DST="/"
  1713.         fi
  1714.  
  1715.         db_list "/$DIR_DST"
  1716.  
  1717.     ;;
  1718.  
  1719.     monitor)
  1720.  
  1721.         DIR_DST="$ARG1"
  1722.         TIMEOUT=$ARG2
  1723.  
  1724.         #Checking DIR_DST
  1725.         if [[ $DIR_DST == "" ]]; then
  1726.             DIR_DST="/"
  1727.         fi
  1728.  
  1729.         print " > Monitoring \"$DIR_DST\" for changes...\n"
  1730.  
  1731.         if [[ -n $TIMEOUT ]]; then
  1732.             db_monitor_nonblock $TIMEOUT "/$DIR_DST"
  1733.         else
  1734.             db_monitor 60 "/$DIR_DST"
  1735.         fi
  1736.  
  1737.     ;;
  1738.  
  1739.     unlink)
  1740.  
  1741.         db_unlink
  1742.  
  1743.     ;;
  1744.  
  1745.     *)
  1746.  
  1747.         if [[ $COMMAND != "" ]]; then
  1748.             print "Error: Unknown command: $COMMAND\n\n"
  1749.             ERROR_STATUS=1
  1750.         fi
  1751.         usage
  1752.  
  1753.     ;;
  1754.  
  1755. esac
  1756.  
  1757. remove_temp_files
  1758.  
  1759. if [[ $ERROR_STATUS -ne 0 ]]; then
  1760.     echo "Some error occured. Please check the log."
  1761. fi
  1762.  
  1763. exit $ERROR_STATUS
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement