#!/bin/sh
: ; exec klone $0 "$@"
; The above line allows not to embed the exact path of the klone executable
;;Skeleton of a typical klone script
;; TODO: 
;; option to implement retries for -r & -l
;; in perpetual mode, dont try to get files if we cannot go into dir
;; -a for accelerating transfers: we can file-position after end, and
;;    a RETR for too far away just returns a nul string
;;    new arg for giving supposed size
;;    new arg to get a chunk
;;    first kget -x checks for REST implementation, then if ok
;;    runs as many chunks as we can, like the parallel case
;;    stop when we get a sucessful retrieve of size 0 meaning we are past end
;; recursive listing in lr form
;; print file/dir name swith %-quoting only on non-printing chars, and
;;    spaces before and after
;; recursive listing creating dirs and dirs with size appended to name,
;;     set to date, and with contents the full URL of it
;; Convert chars in %xx for local paths optionnaly?

;; TEMPORARY: use our lib
(insert *load-pathname* 0 (expand-filename "~colas/Klone/src/kl"))

(require 'klone2) ; This uses klone v2

(setq *progname* *arguments*.0)
(setq args (getopts USAGE: %0 [options] ftp-URL-of-a-directory
Connects to the ftp server, and stays connected, listing recursively the
directory every 2 minutes, and mirroring new files in parallel.
It avoids downloading files being uploaded by getting only files whose size 
hasnt changed in the last 2 minutes.
URL can be given in global var KGET
URL has the syntax: [ftp://][user:password@]hostname[:port]/path

You can exclude file names to retrieve in ~/.kget/exclude (reread every 2mn):
  # text...                          comments
  -x[ifp] regexp  # comment          exclude files matching regexp
         with options: i  ignore case
                       f  exact match, no regexp
                       p  regexp must match full path
There must be a space before the # for comments at end of line

    ("-l" () only-list "only lists directory, then exit")
    ("-L" () only-list-recursive "like -l, but recurive listing")
    ("-x" () only-get "only get this URL, which must be a single file,
(recreates the local hierarchy) then exit")
    ("-g" () no-local-dir "like -x, but store file in current dir, do not
recreate path")
    ("-r" () only-rget "retreives recursively the directory, then exit")
    ("-%" () quote-localpaths "quote non-URL chars in local filenames a la
wget: a~b becomes a%7Eb")
    ("-q" () quiet "quiet mode")
    ("-p" N max-gets "maximum number of parallel gets at any time (def. 20)")
    ("-1" () max-gets-1 "same as -p 1. -1 ... -9 same as -p 1...9")
    ("-n" () nothing "do not get files, only list them")
    ("-t" hours r-timeout "Timeout in hours for -r retrieval. default 24 hours")
    ("-v" () verbose "verbose operation")
    ("-V" () Verbose "very verbose operation, lists FTP traffic")
    ("-w" seconds wait-time "time between dir probes. default to 2mn")
    ("-W" seconds poll-time "time between thread polling. default to 5s")
    ("-u" URL url-option "give URL as option instead of as argument")
    ("-a" () absolute-path "use absolute paths to retrieve remote files")
    ("-i" () incremental "do not mirror present state, only future files")
    ("-LL" level max-dir-recursive-level "maximum recusrion level for -L")
    ("-nk" () ignore-kgetrc "ignore ~/.kget/exclude")
    ("-start" bytes get-file:from "for -r/-x: only get file from offset")
    ("-len" bytes get-file:size "for -r/-x: only get these bytes of file")
    ("-gt" seconds get-timeout "timeout on gets. defaults to 15mn")
    ("-@" () use-joe "use joe@ as passwd")
;; --- Hidden Options ---
    ("-C" () crypted-url "URL argument is crypted" :hidden t)
    ("-enc" () only-encrypt "Only encrypt URL and print it" :hidden t)
    ("-dec" () only-decrypt "Only decrypt URL and print it" :hidden t)
    ("-_" () dummy "placeholder" :hidden t)
    ("-debug" () enter-debugger-on-error "enter klone debugger on error"
 :hidden t)
    ("-stackdump" () stackdump-on-error "verbose stack dump on error"
 :hidden t)
    ("-2" () max-gets-2 "same as -p 2")
    ("-3" () max-gets-3 "same as -p 3")
    ("-4" () max-gets-4 "same as -p 4")
    ("-5" () max-gets-5 "same as -p 5")
    ("-6" () max-gets-6 "same as -p 6")
    ("-7" () max-gets-7 "same as -p 7")
    ("-8" () max-gets-8 "same as -p 8")
    ("-9" () max-gets-9 "same as -p 9")
))

;; Options
(defun parse-options ()
  (if enter-debugger-on-error (kdb t))
  (if stackdump-on-error (stack-dump-on-error t))
  (if Verbose (setq verbose t))
  (stack-dump-on-error t)

  URL = (if url-option url-option (if args (0 args) (getenv "KGET")))
  
  (if (or (not URL) (= "" URL)) (getopts :usage))
  (if crypted-url (setq URL (decrypt URL)))

  (when only-encrypt (PF "%0\n" (encrypt URL)) (exit 0))
  (when only-decrypt (PF "%0\n" (decrypt URL)) (exit 0))
  
  wait-time = (Int (if wait-time wait-time 120))
  max-gets  = (Int (if max-gets max-gets 20))
  (if max-gets-1 max-gets = 1)
  (if max-gets-2 max-gets = 2)
  (if max-gets-3 max-gets = 3)
  (if max-gets-4 max-gets = 4)
  (if max-gets-5 max-gets = 5)
  (if max-gets-6 max-gets = 6)
  (if max-gets-7 max-gets = 7)
  (if max-gets-8 max-gets = 8)
  (if max-gets-9 max-gets = 9)
  poll-time = (Int (if poll-time poll-time 5))
  r-timeout = (Number (if r-timeout r-timeout 24))
  (if no-local-dir only-get = t)
  (if get-file:size get-file:size = (Int get-file:size))
  (if get-file:from get-file:from = (Int get-file:from))
  default-pass = (if use-joe "joe@" "ftp@anonymous.com")
  ftp:anonymous-pass = default-pass
  default-username = "anonymous"
  get-timeout = (if get-timeout (* 60000 (Int get-timeout)) 900000)
  (if max-dir-recursive-level 
    max-dir-recursive-level = (Int max-dir-recursive-level)
    max-dir-recursive-level = *maxint*
  )

  ;; globals
  running-wgets = (list)		;plist paths pids
  already-got = (list)		;plist paths size (may grow...)
  to-get = (list)			;a-list (path size)
  being-get = (list)		;a-list (path size)
  excluded-regexps = (list)		;list of excluded files
  excluded-urls = (list)		;list of excluded host/paths
  
  ;; settings
  list-ftp-dir-indent = 2			;spaces to indent rec lists
)

(defun main (&aux
  )
  (parse-options)
  (fix-kget)   ;; check .kgetrc syntax
  (parse-kgetrc)
  (if 
    only-list-recursive (list-ftp-dir URL :recursive t)
    only-list (list-ftp-dir URL)
    only-get (get-file URL) 
    (catch 'End (while t 
	(catch 'EOF (ftp-monitor URL)))
    )
  )
)

(defun ftp-monitor (URL &aux host root port user password
    c dir old-f stats localpath
    files old-files stable-files	;plists: complete-path ftp:F
  )
  (setq c (ftp:open URL :nocd t))
  ;; TODO we may retry
  (if (not c) (fatal-error 1 "Could not connect to site: %0\n" URL))
  (setqn host c.host root c.root port c.port user c.user password c.passwd)
  (catch 'ftp:ERROR				;catch all errors
    (if only-rget (ftp-rget))
    (while t
      ;; get list of current files
      old-files = files
      files = (list)
      (recursive-ls c root files)
      (update-already-got host)
      (remove-excluded)
      ;; find the ones that are not growing anymore in size since last time
      stable-files = (list)
      (dohash (path f files)
	(if (and (setq old-f (getn old-files path))
	    (= old-f.size f.size)
	  )
	  (put stable-files path f)
      ))
      (collect-wgets-results running-wgets)
      ;; do not get existing state if incremental
      (when incremental
	(setq incremental ())
	(dohash (path f stable-files) (put already-got path f.size))
      )
      ;; get them in parallel, if not already here
      (dohash (path f stable-files)
	(if (and (getn already-got path)
	    (=  f.size (getn already-got path))
	  )
	  (if Verbose (verbose? "=== already got before: %0%1" host path))
	  
	  (and (setq stats (file-stats (setq localpath (if quote-localpaths
		    (html:quote-values (full-paths host path))
		    (full-paths host path)))
	    ))
	    (= stats.size f.size)
	  ) (progn
	    (verbose? "=== already here: %0%1" host path)
	    (put already-got path f.size)
	  )

	  (progn
	    (if (not nothing)
	      (if (not (getn running-wgets path))
		(to-get-add path f.size)
      )))))
      ;; wait wait-time, monitoring running parallel ftp gets
      (with (wakeup (+ wait-time (get-current-time))) ;when to do a ls
	(while (< (get-current-time) wakeup)
	  (if running-wgets (collect-wgets-results running-wgets))
	  (if to-get (to-get-run c))
	  (select () :timeout (* 1000 poll-time))
      ))
      (collect-wgets-results running-wgets)
      ;; clean already-got
      (with (todel (list) f ())
	(dohash (path size already-got)
	  (if (not (and (setq f (getn stable-files path))
	      (= size f.size)
	    ))
	    (lappend todel path)
	))
	(dolist (path todel) (delete already-got path))
))))

(defun to-get-add (path size &aux)
  (catch 'Found
    (dolist (ps to-get)
      (if (= ps.0 path) (throw 'Found))
    )
    (lappend to-get (list path size))
  )
)

(defun to-get-sort (ps1 ps2)		;sort smallest files first
  (compare ps1.1 ps2.1)
)

(defun delete-ps (alist path)
  (catch 'Done
    (dolist (ps alist)
      (if (= ps.0 path) (throw 'Done (delete-item-eq alist ps)))
  ))
)

(defun get-ps (alist path)
  (catch 'Done
    (dolist (ps alist)
      (if (= ps.0 path) (throw 'Done ps))
)))

;wait 2 s between each launch, to avoid "burst congestion"
to-get-run:delay = 2				

(defun to-get-run (c &aux (torun max-gets) 
    path quotedpath 
    (start? t)
    url
  )
  (sort to-get to-get-sort)
  (incf torun (- (/ (length running-wgets) 2)))
  (dolist (ps to-get)
    (when (> torun 0)
      (if start? (setq start? ())
	(select () :timeout (* 1000 to-get-run:delay))
      )
      (setqn path ps.0 quotedpath (html:quote-values ps.0))
      url = (if (or (/= default-pass c.passwd) (/= default-username c.user))
	(PF String "ftp://%0:%1@%2%3%4" c.user c.passwd c.host 
	  (if (= c.port 21) "" (+ ":" (String port)))
	  quotedpath
	)
	(PF String "%2%3%4" () () c.host 
	  (if (= c.port 21) "" (+ ":" (String port)))
	  quotedpath
      ))
      (verbose? "=== GETTING: %0" url)
      (put running-wgets path
	(system (list *progname* "-x" "-C" (if quote-localpaths "-%" "-_")
	    (encrypt url :quote t
      ))))
      (lappend being-get ps)
    )
    (incf torun -1)
  )
  (dolist (ps being-get) (delete-item-eq to-get ps))
)

;; fills the files plist: (fullpath ftp:F)
(defun recursive-ls (c root files &aux fullpath)
  (if (not (ftp:cd c root))
    (PF *stderr* "*** WARNING *** On host %1, Could not go into dir: %0"
      root host
    )
    (dolist (f (ftp:ls-noerr c))
      fullpath = (full-paths root f.name)
      (if 
	(is-excluded-url? (+ c.host fullpath))
	()

	(and (= #\d f.type) (/= "." f.name) (/= ".." f.name))
	(recursive-ls c fullpath files)
	(= #\- f.type)
	(put files fullpath f)
	;; ignore entries that are not dirs or regular files
    ))
))

(defun collect-wgets-results (running-wgets &aux child path)
  (while (setq child (wait () :blocking ()))
    (dohash (p pid running-wgets)
      (if (eq pid child.0) (setq path p))
    )
    (when path
      (if (= 0 child.1) (progn
	  (verbose? "ftp get of %1%0 terminated normally" (quote-names path) c.host)
	)
	(= 10 child.1) (ftp:fatal-error c 1 "Disk Full! *** ABORTING! ***")
	
	(> child.1 10) (progn
	  (PF *stderr* "*** ERROR ***: could not get file %0\n" 
	    (full-paths c.host path)
	))
	(with (ps (get-ps being-get path)) ;retryable error, put back in list
	  (verbose? "Cannot get now, will retry, file: %0%1" c.host path)
	  (to-get-add ps.0 ps.1)
	  (with (pr (- (/ (length running-wgets) 2) 1))
	    (when (and (> pr 0) (< pr max-gets))
	      (verbose? "### Reducing maximum parallel gets to %0 for %1%2" pr
	      c.host c.root)
	      (setq max-gets pr)
      ))))
      (delete running-wgets path)
      (delete-ps being-get path)
  ))
)

;;=============================================================================
;;                    config files
;;=============================================================================
(defun fix-kget ()
  (when (/= 'directory (file-type "~/.kget"))
    (if (/= 0 (wait (system (list "mkdir" "-p" (expand-filename "~/.kget")))))
      (fatal-error 1 "*** kget couldnt create a ~/.kget dir\n")
  ))
)

;; remove files excluded by the .kgetrc from the files list
(defun parse-kgetrc (&aux
    (fd (if ignore-kgetrc () (open "~/.kget/exclude" :error ())))
  )
  (setq excluded-regexps (list))
  (when fd
    (domatch (re fd)
      "^[ \t]*#" ()			;comments
      "^[ \t]*$" ()			;empty lines
      "^-x([ifp]*)[ \t]+(.*)$" ((excluded-regexps-add re)) ;exclude
      ".*" ((PE "**WARNING**: bad syntax in ~/.kget/exclude: \n %0\n" (re 0))))
  )
  fd = (open "~/.kget/excluded-urls" :error ())
  (setq excluded-urls (list (regcomp "\n"))) ; we pre-exclude newlines in files
  (when fd
    (domatch (re fd)
      "^[ \t]*#" ()			;comments
      "^[ \t]*$" ()			;empty lines
      "^(.*)$" ((excluded-urls-add re)) ;exclude
  ))
)

(setq excluded-regexps:re-comment (regcomp "[ \t]*#.*$"))

(defun excluded-urls-add (re &aux (expr (re 1)))
  (catch 'ALL
    (if (excluded-regexps:re-comment expr)
      (setq expr (subseq expr 0 excluded-regexps:re-comment.0.0))
    )
    (lappend excluded-urls (regcomp expr))
  )
)

(defun is-excluded-url? (s)
  (catch 'Done
    (dolist (re excluded-urls)
      (if (re s) (throw 'Done 
	  (verbose?  "URL EXCLUDED: %r0\n   by regexp: %r1" (quote-names s) re)
	  t
      ))
    )
    ()
))

(defun excluded-regexps-add (re &aux
    (options (re 1))
    (expr (re 2))
  )
  (catch 'ALL
    ;; we must do this to remove comments, we have not the power of perl regexps
    (if (excluded-regexps:re-comment expr)
      (setq expr (subseq expr 0 excluded-regexps:re-comment.0.0))
    )
    (if (seek options #\f)
      (setq expr (quote-string-for-regexp expr))
    )
    (if (seek options #\i)
      (setq expr (re-nocase expr))
    )
    (if (not (seek options #\p))
      (setq expr (+ "(^|[/])" expr "$"))
    )
    (lappend excluded-regexps (regcomp expr))
  )
)

(defun remove-excluded (&aux
    (paths-to-remove (list))
  )
  (dohash (path f files)
    (catch 'Done
      (dolist (re excluded-regexps)
	(when (re path) (lappend paths-to-remove path) (throw 'Done))
  )))
  (dolist (path paths-to-remove)
    (verbose? "=== excluded: %0" path)
    (delete files path)
  )
)

;; caches already got files to remember states between invocations
(defun update-already-got (host &aux
    (filename (expand-filename (+ "~/.kget/already-got=" host)))
    fd
    saved-list
  )
  (file-lock filename :file t)
  (catch 'ALL
    (if (setq fd (open filename :error ())) ;read saved list
      (setq saved-list (read fd))
    )
  )
  (dohash (path size saved-list)	;merge saved and memory lists
    (if (not (getn already-got path))	;memory has precedence
      (put already-got path size)
  ))
  (catch 'ALL
    (if (/= saved-list already-got)
      (when (setq fd (open filename :error () ; write back whole things
	    :direction :output :if-exists :supersede)) 
	(PF fd "(\n")
	(dohash (path size already-got) (PF fd "  %r0 %r1\n" path size))
	(PF fd ")\n")
	(close fd)
  )))
  (file-unlock filename :file t)
)

;;=============================================================================
;;                    standalone funcs
;;=============================================================================
;; -r

(defun ftp-rget (&aux
    (stop-time (+ (get-current-time) (* r-timeout 3600))) ;timeout
    stat localpath
    (host c.host)
  )			;uses vars of ftp-monitor
  (setq files (list))
  (recursive-ls c root files)
  (ftp:close c)				; free the connection slot
  (dohash (path f files)
    (if (and (setq stats (file-stats (setq localpath (if quote-localpaths
		(html:quote-values (full-paths host path))
		(full-paths host path)))
	))
	(= stats.size f.size)
      )
      (verbose? "=== already here: %0%1" host path)
      (to-get-add path f.size)
  ))
  (while (or to-get being-get)		;wait till we got everything
    (if (>= (get-current-time) stop-time)
      (ftp:fatal-error c 1 "##### KGet for %1%2 aborted: timeout after %0 hours" r-timeout c.host c.root)
    )	
    (if running-wgets (collect-wgets-results running-wgets))
    (if to-get (to-get-run c))
    (select () :timeout (* 1000 poll-time))
  )
  (exit 0)
)


;; -l
(defun list-ftp-dir (URL &key recursive &aux
    c files root
  )
  (setq c (ftp:open URL))
  ;; TODO we may retry
  (if (not c) (fatal-error 1 "Could not connect to URL: %0\n  %1 %2"
      (quote-names URL) ftp:open-error ftp:open-errortext))
  (setq root c.root)
  (setq files (ftp:ls-noerr c))
  (sort files dirsfirst-nocase-fileorder)
  (dolist (f files)
    (PF "%0 %1 %4%2 %3\n" (subseq f.flags 0 1) f.date f.size 
      (quote-names f.name)
      (make-string (- 8 (length (String f.size))))
  ))
  (when recursive
    (dolist (dir files)
      (if (and (= #\d dir.type) (/= "." dir.name) (/= ".." dir.name))
	(list-ftp-dir-recursive c dir root 0))
    )
))

;; -L
(defun list-ftp-dir-recursive (c dir pwd level &aux 
    files 
    (new-pwd (full-paths pwd dir.name))
  )
  (if (is-excluded-url? (+ c.host new-pwd))
    ()
    (when (ftp:cd c dir.name)
      (PF "%0 :\n" (quote-names new-pwd))
      (incf level list-ftp-dir-indent)
      (setq files (ftp:ls-noerr c))
      (sort files dirsfirst-nocase-fileorder)
      (dolist (f files)
	(PF "%5%0 %1 %4%2 %3\n" (subseq f.flags 0 1) f.date f.size 
	  (quote-names f.name)
	  (make-string (- 8 (length (String f.size))))
	  (make-string level)
      ))
      (dolist (subdir files)
	(if (and (= #\d subdir.type) 
	    (/= "." subdir.name) 
	    (/= ".." subdir.name)
	    (< level max-dir-recursive-level)
	  )
	  (list-ftp-dir-recursive c subdir new-pwd level))
      )
      (if (and (not (ftp:cdup c))
	  (not (ftp:cd c pwd))
	)
	(ftp:fatal-error c 12 
	  "On host %1, Could not go back up from dir:\n    %0\n    up to: %2\n"
	  (quote-names c.pwd) c.host (quote-names pwd))
      ))
)))

;; -x
;; returns error codes: 0=ok, <10 we should retry, >10 not there, 10 local prob
;; 0: ok, done
;; 1: could not connect to site (may be too many users)
;; 2: transmission error
;; 10: disk full
;; 11: file not found
;; 12: dir not found
;; 13: bad URL syntax
;;
;; understands the globals: 
;; get-file:from to get file from offset
;; get-file:size to get only this size

(defun get-file (URL &aux
    (c (ftp:open URL :nocd t))
    conn fd (curlen 0) (connection-error t) dir file localdir localpath
    (re (regcomp "^(.*)[/]([^/]*)$"))
    (blocksize 8192) (go-on t) 
    lfd size host
  )
  (if (not c) (fatal-error 1 "Could not connect to URL: %0\n  %1 %2"
      URL ftp:open-error ftp:open-errortext))
  (catch 'EOF
    (if (not (re c.root))
      (ftp:fatal-error c 13 "Bad URL syntax: %0" URL)
    )
    (setq dir (re 1)) (if (= "" dir) (setq dir "/"))
    (setq file (re 2))
    (setq host c.host)
    (setq localdir (+ c.host dir))
    (setq localpath (if no-local-dir file (+ c.host dir "/" file)))
    (if quote-localpaths
      (setqn localpath (html:quote-values localpath)
	localdir (html:quote-values localdir)
    ))

    (if get-file:from 
      (setq size get-file:from)
      (setq size (getn (file-stats localpath) 'size))
    )
    (if (not (ftp:cd c dir))
      (ftp:fatal-error c 12 "On host %1, Could not go into dir: %0" dir host)
    )
    (ftp:trap-error (ftp:fatal-error c 2 "Connection to %0 cut!" host)
      (ftp:mode c "I")
      (catch 'ftp:ERROR
	(setq conn (ftp:transfercmd c (+ "RETR " 
	      (if absolute-path (full-paths dir file) file)
	    ) :from size))
      )
      (if (not conn)
	(if ;;server do not implement REST? restart
	  (and size
	    (or
	      ({regcomp "^5[0-9][0-9] REST "} c.errortext)
	      ({regcomp "^5[0-9][0-9] Reply marker must be 0"} c.errortext)
	  ))
	  (if get-file:from
	    (fatal-error 1 "Server %0 do not support REST, aborting\n" host)
	    (progn
	      (verbose? "Server do not support REST, re-getting all of %0" URL)
	      (setq size ())
	    (setq conn (ftp:transfercmd c (+ "RETR " file)))
	  ))
	  ;; file not found error
	  (match {regcomp "^55"} c.errortext)
	  (ftp:fatal-error c 11 "File not found: %0" localpath)
	  ;; else retry
	  (throw 'ftp:ERROR c.errortext)
      ))
      (catch 'EOF
	(when (and (not no-local-dir) (/= 'directory (file-type localdir)))
	  (wait (system (list "mkdir" "-p" localdir) :error "/dev/null"))
	  (make-visible-tildas localdir)
	)
	(if size
	  ;; restart transfer at end
	  (progn
	    (setq fd (open localpath :direction :output :error ()
		:if-exists :overwrite))
	    (file-position fd size)
	  )
	  ;; else create new file
	  (setq fd (open localpath
	      :direction :output :if-exists :supersede :error ())
	  )
	)
	(if (not fd)
	  (ftp:fatal-error c 10 "could not create local file %0" localpath)
	)
	(if size
	  (if get-file:size
	    (verbose? "=== Getting %0 from %1 bytes, %2 bytes"
	      localpath size get-file:size
	    )
	    (verbose? "=== Getting %0 from %1 bytes to end" localpath size)
	  )
	  (verbose? "=== Getting %0" localpath)
	)
	(stream-mode conn :blocking ())
	(while (and go-on (or (not get-file:size) (> get-file:size 0)))
	  (if get-file:size 
	    (if (> blocksize get-file:size)
	      (setq blocksize get-file:size)
	  ))
	  (if (not (0 (0 (select :input conn :timeout get-timeout))))
	    (throw 'EOF (verbose? "### Timeout while getting %0" localpath))
	  )
	  (setq data (read-chars blocksize conn))
	  (if get-file:size
	    (incf get-file:size (- (length data)))
	  )
	  (if Verbose (verbose? "--- [%0] bytes read, (total %1) ---"
	      (length data) (+ curlen (length data))
	  ))
	  (if (not data) 
	    (setq go-on ())
	    (unless (trap-error 'Errors:StreamError (write-chars data () fd))
	    ;; error: couldnt write local file
	      (close fd)
	      (close conn)
	      (ftp:fatal-error c 10 "No more room on local disk for %0!" 
		localpath)
	  ))
	  (incf curlen (length data))
	)
	(close fd)
      )
      (close conn)
      (ftp:voidresp c)
    )
    (setq  connection-error ())
  )
  (if connection-error (ftp:fatal-error c 2 "Connection to %0 cut!" c.host)
    (exit 0)				;ok
  )
)

(defun ftp:fatal-error (c &rest args)
  (ftp:close c)
  (apply fatal-error args)
)
  
;; guards ls againts errors
(defun ftp:ls-noerr (c &optional dir &aux files)
  (catch 'ls-ok
    (catch 'ftp:ERROR
      (setq files (if dir (ftp:ls c dir) (ftp:ls c)))
      (throw 'ls-ok files)
    )
    ;; error in ls
    (PF *stderr* "*** WARNING *** On host %1, Could not list dir: %0"
      (quote-names (if dir (full-paths c.root dir) c.root)) c.host
    )
    ()					;error ==> returns no files
))

;;=============================================================================
;;                    Misc utils
;;=============================================================================
;; Sorts in place a ftp:F list of files in alphabetical order, dirs first
;; (comparison function to be used in sort)
(defun dirsfirst-nocase-fileorder (f1 f2 &aux res)
  (if (= 0 res = (compare f1.type f2.type))
    (compare-nocase f1.name f2.name)
    (if (= #\d f1.type) -1
       (= #\d f2.type) -1
      res
)))

;; concatenates 2 paths, adding "/" between them id necessary
(defun full-paths (host path)
  (if (or (= #\/ (getn host -1)) (= #\/ (0 path)))
    (+ host path) (+ host "/" path))
)

;; Returns string with non-printing chars quoted HTML style
;; We quote also ' and \ for ease of handling under shell, and %
;; And we quote spaces at start and end

(setq quote-names:re (regcomp "^[\x01-\x1f\x25\x27\x5c\x7f-\xff]"))
(setq quote-names:re+1 (regcomp "^[]\x01-\x20\x26\x28\x80-\xff]"))
(setq quote-names:space (regcomp "^ "))
(setq quote-names:endspace (regcomp " *$"))
(setq hexdigits "0123456789ABCDEF")

(defun quote-names (s &optional (re quote-names:re) &aux
    (res (copy ""))
    (pos 0)
    endpos
  )
  (quote-names:endspace s)
  endpos = quote-names:endspace.0.0			;end of non-space chars
  (while (and (< pos endpos) (quote-names:space s pos))
    (nconc res "%20") (incf pos)
  )
  (while (< pos endpos)
    (if (re s pos) (progn
	(put res -1 #\%)
	(put res -1 ((/ (pos s) 16) hexdigits))
	(put res -1 ((mod (pos s) 16) hexdigits))
      )
      (put res -1 (pos s))
    )
    (incf pos)
  )
  (dotimes (i (- (length s) endpos))
    (nconc res "%20")
  )
  res
)

;; if directories in path begin with ~, supplement them with a tilda-less dir
;; pointing to them
(defun make-visible-tildas (dir &aux
    parent (offset 0)
    (re (regcomp "[/]([~]([^/]*))"))
  )
  (while (regexec re dir offset)
    (setq parent (subseq dir 0 (get (get re 0) 0)))
    (setq offset (get (get re 0) 1))
    (wait (system (list "ln" "-s" (re 1) (+ parent "/%7E" (re 2)))))
  )
)

;; Crypt / Uncrypt strings. clean quoted values, to get the sortest possible
;; string, but yet cut/pastable in shell if surrounded by '' (no ' or \ inside)
(defun encrypt (s &key quote)
  (if quote () (setq s (html:unquote-values s)))
  (map String minus1 (quote-names s quote-names:re+1))
)

(defun decrypt (s)
  (quote-names (html:unquote-values (map String add1 s)))
)
(defun add1 (c) (+ c 1))
(defun minus1 (c) (- c 1))
(defun same (s) s)

;; Change current dir to .., return t if ok, () if not
(defun ftp:cdup (c &aux ok)
  (catch 'ftp:ERROR
    (ftp:voidcmd c "CDUP")
    (setq ok t)
  )
  ok
)

;;=============================================================================
;;                    debug funcs
;;=============================================================================

(defun ftp:error-hook (c code text)
  (throw 'ftp:ERROR text)
)

(main)

;;; EMACS MODES
;;; Local Variables: ***
;;; mode:lisp ***
;;; End: ***


