Commits

Pierre Neidhardt committed 3fd1c4c

Doc fixes.

  • Participants
  • Parent commits 59639f2
  • Tags v1.2

Comments (0)

Files changed (3)

-2013-02-09 1.3
-	* wikils: New script for listing of category and prefix pages.
-
 2013-02-08 1.2
-	* Documentation fixes.
+	* wikils: New script for listing of category and prefix pages.
 
 2013-02-08 1.1
 	* Typo fixes.
+	* Documentation fixes.
 
 2013-02-07 1.0
 	* wikiex: First public release.
 NAME="Wikiex"
 AUTHOR="Pierre Neidhardt"
 DATE="2013"
-VERSION="1.3"
-
-MAIN_SITE="http://en.wikibooks.org/wiki"
-EXPORT_PARAM="Special:Export"
-OUTPUT_FOLDER="wikipages"
-RES_PARAM="Special:FilePath"
-ERRORS=0
-OVERWRITE_PAGES=-N
-OVERWRITE_IMAGES=-nc
-REPLACE_SPACES=false
+VERSION="1.2"
 
 if [ "$command -v wget)" = "" ]; then
     echo "ERROR: wget not found."
     exit
 fi
 
+MAIN_SITE="http://en.wikibooks.org/wiki"
+EXPORT_PARAM="Special:Export"
+OUTPUT_FOLDER="wikipages"
+RES_PARAM="Special:FilePath"
+ERRORS=0
+OVERWRITE_PAGES=-N
+OVERWRITE_IMAGES=-nc
+REPLACE_SPACES=false
+
 printhelp()
 {
     cat<<EOF
-Usage: $1 [OPTION] PAGES
+Usage: ${1##*/} [OPTION] PAGES
 
 PAGES can be either complete links to MediaWiki pages, or just page titles
 (i.e. the last part of the URI). Everything gets downloaded to default output
 in a 'cat' call, surrounded by double-quotes. See examples. Lines beginning
 with '#' are skipped. Surrounding whitespaces are ignored.
 
-Note that complete links are not reliable, since $1 needs to change the URI to
+Note that complete links are not reliable, since ${1##*/} needs to change the URI to
 fetch the page in the proper export format. You are always better off specifying
-a main site, then titles. In such a case $1 will insert '$EXPORT_PARAM' between
+a main site, then titles. In such a case ${1##*/} will insert '$EXPORT_PARAM' between
 the main site URI and the title.
 
 Options:
   -v         Print version.
 
 Examples:
-  $1 'http://en.wikipedia.org/wiki/Main_Page' 'http://en.wikipedia.org/wiki/LaTeX'
-  $1 -s 'http://en.wikipedia.org/wiki' 'Main Page' 'LaTeX'
-  $1 -o output-folder 'TeX'
-  $1 'TeX' "\$(cat pagelist1 pagelist2)"
+  ${1##*/} 'http://en.wikipedia.org/wiki/Main_Page' 'http://en.wikipedia.org/wiki/LaTeX'
+  ${1##*/} -s 'http://en.wikipedia.org/wiki' 'Main Page' 'LaTeX'
+  ${1##*/} -o output-folder 'TeX'
+  ${1##*/} 'TeX' "\$(cat pagelist1 pagelist2)"
 EOF
 }
 
 print_missing_arg ()
 {
     echo "Missing argument."
-    echo "Use $1 -h for help."
+    echo "Use ${1##*/} -h for help."
 }
 
 while getopts ":ihvo:prs:" opt; do
 NAME="Wikils"
 AUTHOR="Pierre Neidhardt"
 DATE="2013"
-VERSION="1.3"
+VERSION="1.2"
 
 if [ "$command -v wget)" = "" ]; then
     echo "ERROR: wget not found."
 printhelp()
 {
     cat<<EOF
-Usage: $1 [OPTION] NAME
+Usage: ${1##*/} [OPTION] NAME
 
 This script will request a special page to a wiki, like a category page or a
 prefix page.  (A prefix means 'all pages that begins with'.) Currently this only
   -v         Print version.
 
 Examples:
-  $1 LaTeX
-  $1 -s 'http://en.wikipedia.org/wiki' 'Main Page' 
-  $1 -c 'A-level Mathematics'
+  ${1##*/} LaTeX
+  ${1##*/} -s 'http://en.wikipedia.org/wiki' 'Main Page' 
+  ${1##*/} -c 'A-level Mathematics'
 
 You can use it in combination with wikiex to download a bunch of pages:
 
 print_missing_arg ()
 {
     echo "Missing argument."
-    echo "Use $1 -h for help."
+    echo "Use ${1##*/} -h for help."
 }
 
 while getopts ":hvcps:" opt; do