Commits

Shlomi Fish committed c48e39c

POD encoding fixes.

Comments (0)

Files changed (13)

src/wml_aux/iselect/iselect.pod

 ##  iselect.pod -- manual page 
 ##
 
+=encoding utf8
+
 =head1 NAME
 
 iSelect -- Interactive Selection Tool

src/wml_aux/linklint/linklint.pod

+=encoding utf8
 
 =head1 NAME
 
 
 =head1 SYNOPSIS
 
-B<linklint> 
+B<linklint>
 [B<-cache I<directory>>]
 [B<-case>]
 [B<-checksum>]

src/wml_aux/linklint/linklint.src

 #  it under the terms of the GNU General Public License as published by
 #  the Free Software Foundation; either version 2 of the License, or
 #  (at your option) any later version.
-# 
+#
 #  This program is distributed in the hope that it will be useful,
 #  but WITHOUT ANY WARRANTY; without even the implied warranty of
 #  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 #  GNU General Public License for more details.
-# 
+#
 #  You should have received a copy of the GNU General Public License
 #  along with this program; if not, write to
-# 
+#
 #      Free Software Foundation, Inc.
 #      59 Temple Place - Suite 330
 #      Boston, MA  02111-1307, USA
-# 
+#
 #  Notice, that ``free software'' addresses the fact that this program
 #  is __distributed__ under the term of the GNU General Public License
 #  and because of this, it can be redistributed and modified under the
 #  conditions of this license, but the software remains __copyrghted__
 #  by the author. Don't intermix this with the general meaning of
-#  Public Domain software or such a derivated distribution label.     
+#  Public Domain software or such a derivated distribution label.
 #
 #  Linklint is a total rewrite of Rick Jansen's 4/15/96 version of webxref.
 #
 #
 # RECENT CHANGES (see CHANGES.txt for full list):
 #
-# Version 2.3.5 August 13, 2001 
+# Version 2.3.5 August 13, 2001
 # -----------------------------
 #   o added -no_anchors tag (for larger sites)
 #   o fixed bug that prevented site checks of
 #     some non port 80 sites. (Thanks Rick Perry).
 #
-# Version 2.3.4 August 8, 2001 
+# Version 2.3.4 August 8, 2001
 # ----------------------------
 #   o keep query string for http site checks
 #   o added no_query_string option to disable above
 #   o s!//+!/!g inside of UniqueUrl()
 #   o -http_header and -language options
 #
-# Version 2.3.3 July 6, 2001 
+# Version 2.3.3 July 6, 2001
 # ---------------------------
 #   o added 2nd argument to mkdir() on line 921
 #   o for creating url doc directory
 #
-# Version 2.3.2 June 22, 2001 
+# Version 2.3.2 June 22, 2001
 # ---------------------------
 #   o -no_warn_index for missing index file warnings
 #   o -concise_url flag to suppress output of valid remote links
 #     on STDOUT
 #
-# Version 2.3.1 June 21, 2001 
+# Version 2.3.1 June 21, 2001
 # ---------------------------
 #   o unified -proxy support (no conflict w/ virtual hosts now)
 #     and moved it to Request() so we should support proxies
 #     for site checking
 #
-# Version 2.3.0 June 3, 2001 
+# Version 2.3.0 June 3, 2001
 # --------------------------
 #   o moved home site and email address
 #   o added -help_all -version -license "@"
 #   o updated to GPL
-#    
+#
 #========================================================================
 
 $version = "2.3.5";
 
 $Usage3 = qq~
 Use $prog with no arguments for standard usage.
-Use "$prog -help_all" for a list of ALL options.   
+Use "$prog -help_all" for a list of ALL options.
 ~;
 
 $Version_Usage = <<VERSION_USAGE;
 This program is distributed in the hope that it will be useful,
 but WITHOUT ANY WARRANTY; without even the implied warranty of
 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-GNU General Public License for more details.          
+GNU General Public License for more details.
 
 VERSION_USAGE
 
 		       Version 2, June 1991
 
  Copyright (C) 1989, 1991 Free Software Foundation, Inc.
-      59 Temple Place, Suite 330, 
+      59 Temple Place, Suite 330,
       Boston, MA  02111-1307  USA
  Everyone is permitted to copy and distribute verbatim copies
  of this license document, but changing it is not allowed.
   -host name[:port] Use "name" as domain name of site.
   -http             Check site remotely via http (requires -host).
   -net              Check status of remote http links found in site.
-  -help             Show help. (Use "-help_all for complete usage).                          
+  -help             Show help. (Use "-help_all for complete usage).
   -version          Show version info.
 
 Multi File Output:
 linkset:
   home page only: /  (default)                   root dir only: /#
      entire site: /@                           and all subdirs: /#/#
-  specific links: /link1 /link2 ...        "sub" dir and below: /sub/@ 
+  specific links: /link1 /link2 ...        "sub" dir and below: /sub/@
 USAGE
 
 $Help2 = <<'HELP2';
 
   5) linklint -doc linkdoc -host host /@ -http
      Same as (2) but checks site using http instead of file system.
- 
+
   6) linklint -doc linkdoc @@linkdoc/remote.txt
      Check remote link status without rechecking entire site.
 EXAMPLES
     ignore
     index
     javascript
-    language 
+    language
     local
     map
     skip
-   
+
     /;
 
 $FullOpts = join "|", qw/
 
     $CacheNet && !$CheckNet && !@CheckUrls && do {
 
-        $Arg{'flush'} && 
+        $Arg{'flush'} &&
             print STDERR "ERROR: -flush requires -net or -netmod or -netset\n";
 
-        $Arg{'retry'} && 
+        $Arg{'retry'} &&
             print STDERR "ERROR: -retry requires -net or -netmod or -netset\n";
 
-        die($ErrUsage);      
+        die($ErrUsage);
     };
 
 
 
     $Use_QS = ($Arg{'http'} and not $Arg{no_query_string}) ? 1 : 0;
 
-    %LANGUAGE and        
+    %LANGUAGE and
         $HTTP_HEADER{"Accept-Language: " . join ", ", sort keys %LANGUAGE} = 1;
 
     $Arg{'http'} && do {
         &Progress( $Http ? "\nChecking links via http://$ServerHost"
                          : "\nChecking links locally in $ServerRoot");
         &Progress("that match: " . join(" ", keys %LinkSet));
-        &Progress(&Plural(scalar keys %Seeds, "%d seed%s: ") . 
+        &Progress(&Plural(scalar keys %Seeds, "%d seed%s: ") .
             join(" ", sort keys %Seeds));
 
         $Time = -time;
     $DocDir and $URL_PREFIX =~ m~/~ and do {
         $url_doc_dir = "$DocDir/$URL_PREFIX";
         $url_doc_dir =~ s~/[^/]+$~~;
-        -d $url_doc_dir or mkdir($url_doc_dir, 0777) or 
+        -d $url_doc_dir or mkdir($url_doc_dir, 0777) or
             Error("Could not mkdir($url_doc_dir)");
     };
 
     local($link, $referer) = @_;
 
 	$Parsed{$link} && return '1';
-     
+
     $Abort || $link !~ /$LinkSet/o || ($Skip && $link =~ /$Skip/o) ||
     ++$Parsed > $Limit || do {
         $Parsed{$link}++;
         $Mapped{$oldlink} = $link;
     };
 
-    $flag || return '';                 # new url was already cached 
+    $flag || return '';                 # new url was already cached
 
     local($msg) = &Http'ErrorMsg($flag);
 
         $att  = $2;
         $term = $4;
 
-        while ( ! $term  ) { 
+        while ( ! $term  ) {
              $att .= $_;
             ($term, $att) = &FixTag(*HANDLE, $att);
              $term eq 'next' && next TAG;
             &Warn("missing </script>", $link);
             last TAG;
         }
-        
+
         $att || next TAG;
 
         $DbP && $att && do {
     }
 
     $/ = "\n";          # reset line seperator to "\n"
-    $DbP or return;    
+    $DbP or return;
     print '=' x 60 , "\n\n";
     $ErrTag = '';
 }
     $BasePath   = '';
     $BaseFile   = $CurFile;
     ($CurPath = $CurFile) =~ s#(\?.*)$##;     # strip query off of path
-    $CurPath =~ s#([^/]+)$##;                 # strip file off of path  
+    $CurPath =~ s#([^/]+)$##;                 # strip file off of path
     $CurPath = $CurPath || "/";               # default to root
 }
 
     ($place = $ProtoJS{$1}) || return;
     grep( $place{$_}++, split("\n", $place));
     while ($link =~ s/^\s*("([^"]*)"|'([^']*)'|([^"'\)]+))\s*[,\)]//) {
-        $place{++$cnt} && $list{&UniqueUrl($+)}++;        
+        $place{++$cnt} && $list{&UniqueUrl($+)}++;
     }
 }
 
 
     ($erratt = substr($att, 0, 20)) =~ s/\s+/ /;   # for error msg
     $temp = $att;
-    $temp =~ s/"[^"]*"|'[^']*'//g;                 # strip leading ".." 
-
-    $DbP && print "tail  = [$temp]\n";       
+    $temp =~ s/"[^"]*"|'[^']*'//g;                 # strip leading ".."
+
+    $DbP && print "tail  = [$temp]\n";
 
     $temp = m/(['"])/ || do {                      # should have ' or "
 
 
     $DbP && print "append3 [$1]\n";
 
-    $term = $3;  
+    $term = $3;
     $DbP && print "term  = [$term]\n";
     ! $term && ($att .= $_);
     ($term, $att);
     local(@files, %files, $file, $link, $absdir, $reldir);
 
     foreach $dir (sort keys %dirlist) {
- 
+
         &Progress("checking $dir");
         &Progress($msg);
 
     #---- Resolve named anchors
 
     $Arg{no_anchors} or do {
-        &HashUnique(*WantAnch);    
+        &HashUnique(*WantAnch);
         &ResolveAnch(*WantAnch, *Anchor, *LostAnch);
     };
 
     #---- resolve named image maps
 
-    &HashUnique(*WantMap);    
+    &HashUnique(*WantMap);
     &ResolveAnch(*WantMap, *ImgMap, *LostMap);
-    
+
     &HashUnique(*FileList);   # pathinfo and dirlookup can cause extras.
     &HashUnique(*LostFile);   # pathinfo and dirlookup can cause extras.
     &HashUnique(*Action);
             &HtmlDoc($txt, $DOCFILES{$txt}, *DefDir, *Action, $DocBase );
             $Arg{'htmlonly'} && $txt !~ /^remote/ && unlink($txt);
         };
-    
+
         &Progress(&Plural(scalar keys %DOCFILES, "wrote %n html file%s"));
 
         &HtmlSummary("${indexfile}.txt", $indexhtml, $title, *DOCFILES) &&
 
     %ExtLink && print &Plural($UrlFailedF , "%N file%s had failed urls.\n");
 
-    $ErrF && $UrlFailF && print 
-        &Plural($ErrF + $UrlFailedF, 
+    $ErrF && $UrlFailF && print
+        &Plural($ErrF + $UrlFailedF,
             "There were %n file%s with broken links.\n");
 
     $CacheNet && print
 sub Abbrev
 {
     local($max, $str) = @_;
-    length($str) > $max &&  ($str =  substr($str, 0, $max - 4) . " ..."); 
+    length($str) > $max &&  ($str =  substr($str, 0, $max - 4) . " ...");
     $str;
 }
 
     $LogFile && close($LogFile);
     $LogFile = $name;
 
-    open($LogFile, ">$LogFile") || 
+    open($LogFile, ">$LogFile") ||
        &Error(qq~could not open file "$LogFile" for output~, 'sys');
     select($LogFile);
 }
     elsif ($file) {
         open($file, $file) || &Error(qq~could not open file-list "$file"~, 'sys');
     }
-    else { 
+    else {
         $file = \*STDIN;
     }
-    
+
     while (<$file>) {
         s/^#! ?// && do {print STDERR $_; next; };  # print comments
         m/^#/ && next;
     while ( @_ && ($_ = shift)) {
         s/^@@// && ( push(@HttpFiles, $_), next);
         s/^@//  && ( push(@out, &ReadFile($_)), next);
-        (m#^/# || m#\.html?$#i || s#^http://#http://#i ) && 
+        (m#^/# || m#\.html?$#i || s#^http://#http://#i ) &&
             (push(@out, $_), next);
 
         s/^-// || &Error( qq~at "$_"~ .
            qq~\nexpected: "-flag" or "/linkset" or "http: ..."\n~ . $ErrUsage);
-                       
+
         if ( /^($MiscFlags)$/o ) {
             $Arg{$1}++;
         }
             (@_ < 2 || $_[0] =~ /^-/ || $_[1] =~ /^-/ ) &&
                 &Error("expected 2 parameters after -$_\n" . $ErrUsage);
                 $_[1] =~ /:/ ||
-                    &Error(qq~expected username:password at "$_[1]"\n~ . 
+                    &Error(qq~expected username:password at "$_[1]"\n~ .
                         $ErrUsage);
                 $PASSWORD{$_[0]} = $_[1];
                 shift; shift;
     $Proxy = $Arg{'proxy'} || '';
     $Proxy_Port = ($Proxy and $Proxy  =~ s/:(\d*)$//)  ? $1 : '';
 
-    ($DB{7} || $DB{8}) && do { 
+    ($DB{7} || $DB{8}) && do {
         print "TimeOut: $TimeOut\n";
         print "User-Agent: $UserAgent\n";
         $Proxy && print "Proxy: $Proxy\n";
     $Init++ && return;
 
     $CRLF = $Arg{'DOS'} ? "\n" : pack("cc", 13, 10);
-    
+
     for my $header_line (keys %headers) {
         my ($name, $value) = $header_line =~ m/^([\w\-]+):\s*(.*)/;
         $name && $value or do {
         };
     }
 
-    else { 
+    else {
         $REQHEAD{'if-Modified-Since'} = $TIMESTR;
-        $flag = &Request($host, $port, $path, 'GET', "GetModified"); 
+        $flag = &Request($host, $port, $path, 'GET', "GetModified");
     }
 
     $csum = -1 if $flag == 304;  # -1: server obeys "if-mod"
     while( ($url, $cache) = each %StatCache ) {
         ($flag) = split(" ", $cache);
         push(@urls, $url) if
-            ($fail  && $FlagWarn{$flag}  ) ||          
+            ($fail  && $FlagWarn{$flag}  ) ||
             ($fail  && $FlagFail{$flag}  ) ||
             ($ok    && $FlagOk{$flag}    ) ||
             ($retry && $FlagRetry{$flag} );
 
     local($redir) = &main'ParseRedirect(@_);
     $redir || return $flag;
-    $HEADER{'location'} = $redir; 
+    $HEADER{'location'} = $redir;
     return -3003;
 }
 
     local($host, $port, $path, $method, $getmethod, @params) = @_;
     local($request, $ipaddr, $flag);
     $port = $port || 80;
-    
+
     %HEADER = ();             # global %HEADER holds http header info.
 
     $DB{9} && do {
     $@ = '';
 
     $TimeOut && alarm($TimeOut);
-   
+
     $SOCKETOPEN = 0;
 
     eval {
     $TimeOut && alarm(0);
 
     $SOCKETOPEN && close(S);
-    $SOCKETOPEN = 0;    
+    $SOCKETOPEN = 0;
     $@ || return $flag;
     $@ =~ /^timeout/ && return $ALARMFLAG;
     $@ =~ /^user interrupt/ || return -6000;
     return $flag;
 }
 
-sub AlarmHandler 
+sub AlarmHandler
 {
    die "timeout";
 }
 
-sub IntHandler 
+sub IntHandler
 {
    die "user interrupt";
 }
 
 1;                     # required packages must return true
 
+=encoding utf8
+
+=head1 NAME
+
+Linklint - fast link checker and website maintenance tool
+
+=head1 SYNOPSIS
+
+B<linklint>
+[B<-cache I<directory>>]
+[B<-case>]
+[B<-checksum>]
+[B<-concise_url>]
+[B<-db1..9>]
+[B<-delay I<d>>]
+[B<-doc>]
+[B<-docbase I<base>>]
+[B<-dont_output I<xxxx>>]
+[B<-error>]
+[B<-flush>]
+[B<-forward>]
+[B<-help>]
+[B<-help_all>]
+[B<-host I<hostname:port>>]
+[B<-host I<hostname>>]
+[B<-htmlonly>]
+[B<-http>]
+[B<-http_header I<name:value>>]
+[B<-ignore I<ignoreset>>]
+[B<-index I<file>>]
+[B<-language I<zz>>]
+[B<-limit I<n>>]
+[B<-list>]
+[B<-local I<linkset>>]
+[B<-map I</a=[/b]>>]
+[B<-net>]
+[B<-netmod>]
+[B<-netset>]
+[B<-no_anchors>]
+[B<-no_query_string>]
+[B<-no_warn_index>]
+[B<-orphan>]
+[B<-out I<file>>]
+[B<-output_frames>]
+[B<-output_index I<filename>>]
+[B<-password I<realm user:password>>]
+[B<-proxy I<hostname[:port]>>]
+[B<-quiet>]
+[B<-redirect>]
+[B<-retry>]
+[B<-silent>]
+[B<-skip I<skipset>>]
+[B<-textonly>]
+[B<-timeout I<t>>]
+[B<-url_doc_prefix I<url/>>]
+[B<-version>]
+[B<-warn>]
+[B<-xref>]
+I<linkset>
+
+=head1 VERSION
+
+2.3.5 August 13, 2001
+
+=head1 DESCRIPTION
+
+This manual page documents briefly the Linklint program, which is an
+Open Source Perl program that checks local and remote HTML links.
+
+This manual page was written for the Debian distribution because the
+original program does not have a manual page.  Instead, it has
+documentation in the HTML format; see below.
+
+=head1 OPTIONS
+
+=head2 Input File Selection
+
+Whether you are doing a local site check or an HTTP site check, you
+specify which directories (presumably containing HTML files) to check
+with one or more linksets. A linkset uses two wildcard characters
+@ and #.
+Each linkset specifies one or more directories much like the standard
+* and ? wildcard characters are used to specify the characters in the
+* names of files in one directory.
+
+The @ character matches any string of characters (this kind of acts like
+"*"), and the # character (which is kind of like "?") matches any string
+of characters except "/" . The best way to understand how @ and # work
+is to look at a few examples:
+
+                          the entire site /@
+              the homepage only (default) /
+         files in the root directory only /#
+             . . . and one directory down /#/#
+          files in the sub directory only /sub/#
+     files in the sub directory and below /sub/@
+                           specific files /file1 /file2 ...
+                  specific subdirectories /sub1/@ /sub2/@ ...
+
+If you specify more than one linkset, files matching any of the linksets
+will be checked. HTML files that don't match any of the linksets will be
+skipped. Linklint will see if they exist but won't check any of their
+links.
+
+=head2 Other File Selection Options
+
+=over 4
+
+=item B<-skip> I<skipset>
+
+Skips HTML files that match I<skipset>.  C<Linklint> will make sure
+these files exist but won't add any of their links to the list of files
+to check.  Multiple I<skipsets> are allowed, but each must be preceded
+with B<-skip> on the command line. Skipsets use the same wildcard
+characters as linksets.
+
+=item B<-ignore> I<ignoreset>
+
+Ignores files matching I<ignoreset>.  C<Linklint> doesn't even check to
+see if these files exist.  Multiple I<ignoresets> are allowed, but each
+must be preceded with B<-ignore> on the command line.  Ignoresets use
+the same wildcard characters as linksets.
+
+=item B<-limit> I<n>
+
+Limits checking to I<n> HTML files (default 500).  All HTML files after
+the first I<n> are skipped.
+
+=back
+
+=head2 Local Site Checking
+
+If you are developing HTML pages on a computer that does not have an
+http server, or if you are developing a simple site that does not use
+Server Redirection or extensive CGI, you should use local site checking.
+
+     linklint /@
+
+Checks all HTML files in the current directory and below. Assumes that
+the current directory is the server root directory so links starting
+with "/" default to this directory. You must specify B</@> to check the
+entire site. See Which Files to Check for details.
+
+     linklint -root dir /@
+
+Checks all HTML files in dir and below. This is useful if you want to
+check several sites on the same machine or if you don't want to run
+Linklint in your public HTML directory.
+
+=head2 Other Local Site Options
+
+=over 4
+
+=item B<-host> I<hostname>
+
+By default C<Linklint> assumes all links on your site that start with
+C<http://> are remote links to other sites.  If you have absolute links
+to your own site, give C<Linklint> your hostname and links starting with
+C<http://hostname> will be treated as local files.  If you specify
+B<-host hostname:port,> only http links to this hostname and port will
+be treated as local files.
+
+=item B<-case>
+
+Makes sure that the filename (upper/lower) case used links inside of
+html tags matches the case used by the file system.  This is for
+Windows only and is very handy if you are porting a site to a Unix
+host.
+
+=item B<-orphan>
+
+Checks all directories that contain files used on the site for unused
+(orphan) files.
+
+=item B<-index> I<file>
+
+Uses I<file> as the default index file instead of the default list used
+by C<Linklint>. You can specify more than one file but each one must be
+preceded by B<-index> on the command line.  If a default index file is
+not found, C<Linklint> uses a listing of the entire directory. See the
+Default File section for details.
+
+=item B<-map> I</a=[/b]>
+
+Substitutes leading I</a> with I</b>.  For server-side image maps or to
+simulate Server Redirection.
+
+=item B<-no_warn_index>
+
+Turns of the "index file not found" warning.  Applies to local site
+checking only.
+
+=item B<-no_anchors>
+
+Tells C<Linklint> to ignore named anchors.  This could ease memory
+problems for people with large sites who are primarily interested in
+missing pages and not missing named anchors.  This option works for
+both HTTP and local site checks.
+
+=back
+
+=head2 HTTP Site Checking
+
+If you have a complicated site that uses lots of CGI or Server
+Redirection, you should use HTTP site checking. Even though an HTTP
+site check reads pages via your HTTP server, you will get the best
+performance if you do your checking on a machine that has a high speed
+connection to your server.
+
+     linklint -http -host www.site.com /@
+
+The B<-http> flag tells C<Linklint> to check HTML files on the site
+www.site.com via a remote http connection. You must specify a -host
+whenever you do an HTTP site check (otherwise Linklint won't where to
+get your pages). You can specify B</@> to check the entire site.  See Which
+Files to Check for details.
+
+=head2 HTTP Site Check Options
+
+=over 4
+
+=item B<-http>
+
+This flag tells Linklint to perform an HTTP site check instead of a
+local site check.  All files (except server side image maps) will be
+read via the HTTP protocol from your web server.
+
+=item B<-host> I<hostname:port>
+
+If you include I<:port> at the end of your
+hostname, Linklint uses this port for the HTTP site check.
+
+=item B<-password> I<realm user:password>
+
+Uses I<user> and I<password> as authorization to enter password
+protected I<realm.> Realms are named areas of a site that share a common
+set of usernames and passwords.  If passwords are needed to check your
+site, Linklint will tell you which realms need passwords in warning
+messages.  Enclose the realm in double quotes if it contains spaces.  If
+no password is given for a specific realm, Linklint will try using the
+password for the "C<DEFAULT>" realm if it was provided.
+
+=item B<-timeout> I<t>
+
+Times out after I<t> seconds (default 15) when
+getting files via http.  Once data is received, an additional
+I<t> seconds is allowed.  The timeout is
+disabled on Windows machines since the Windows port of Perl does not
+support the C<alarm()> function.
+
+=item B<-delay> I<d>
+
+Delays I<d> seconds between requests to the same
+host (default 0).  This is a friendly thing to do especially if you are
+checking many links on the same host.
+
+=item B<-local> I<linkset>
+
+Gets files that match I<linkset> locally.  The default B<-local>
+I<linkset> is B<@.map> (which matches any link ending in F<.map>).  This
+allows Linklint to follow links through server-side image maps.  The
+default is ignored if you specify your own B<-local> expressions.  You
+need to specify the B<-root> directory for this option to work propery.
+
+=item B<-map> I</a=[/b]>
+
+Substitutes leading I</a> with I</b>.  For server-side image maps or to
+simulate Server Redirection.
+
+=item B<-no_anchors>
+
+Tells C<Linklint> to ignore named anchors.
+
+=item B<-no_query_string>
+
+Up until version 2.3.4, Linklint did not use query strings while doing
+HTTP site checks.  Query strings were removed before making HTTP
+requests.  As of 2.3.4 query strings in links are used in the
+requests.  Use the B<-no_query_string> flag to get back the "old"
+behavior.
+
+=item B<-http_header> I<Name:value>
+
+Adds the HTTP header I<Name: value> to all HTTP requests
+generated by Linklint.  You will need to use quotation marks to hide
+spaces in the header line from the command line interpreter. Linklint
+will automatically add a space after the first colon if there is not
+one there already.  Multiple (unique) header lines are allowed.
+
+=item B<-language> I<zz>
+
+This option is only useful if you are checking a site that uses
+content negotiation to present the same URL in different languages.
+
+Creates an HTTP Request header of the form
+B<Accept-Language: zz> that is included as part of all HTTP
+requests generated by Linklint.  Multiple B<-language>
+specifications are allowed.  This will result in a single
+B<Accept-Language:> header that lists all of the languages you
+have specified in alphabetical order.  Some web sites can use this
+information to return pages to you in a specific language.
+
+If you need to get more complicated than this, use the more general
+purpose B<-http_header> to create your own header.  There is a partial
+list of language abbreviations (taken from Debian) included as part of
+the Linklint documentation.
+
+=back
+
+=head2 Remote URL Checking
+
+A remote URL check is used to see if a remote URL exists (or has been
+recently modified). Links in the remote pages are not checked nor does
+Linklint look for named anchors in remote URLs.
+     
+Remote URL checking can be used to check all of the "remote" links on
+your site (those that link to pages on other sites) or it can check a
+list of URLs. There are several ways to specify which remote URLs to
+check:
+     
+     linklint http://somehost/file.html
+
+Checks to see if F</file.html> exists on somehost. Multiple URLs can be
+entered on the command line, in an I<@commandfile>, or in an
+I<@@httpfile>.  Every URL to be checked must begin with C<http://>. This
+will disable site checking.
+     
+     linklint @@httpfile
+
+Checks all the remote http URLs found in httpfile. Anything in the file
+starting with C<http://> is considered to be a URL. If the file looks
+like a F<remoteX.txt> file generated by Linklint then all failed URLs will
+be cross referenced.
+     
+     linklint @@ -doc linkdoc
+
+Assuming you have already done a site check and used B<-doc linkdoc> to
+put all of your output files in the linkdoc directory, Linklint will
+check all the remote links that were found on your site and cross
+reference all failed URLs without doing a site check. You can use the
+B<-netmod> or B<-netset> flags to enable the status-cache.
+     
+     linklint -net [site check options]
+
+The B<-net> flag tells Linklint to check all remote links after doing
+either a local or HTTP site check site. If you are having memory
+problems, don't use the B<-net> option, instead use one of the B<@@>
+options above.
+
+=head2 Other Remote URL Options
+
+=over 4
+
+=item B<-timeout> I<t>
+
+Times out after I<t> seconds (default 15) when getting files via http.
+Once data is received, an additional I<t> seconds is allowed.  The
+timeout is disabled on Windows machines since the Windows port of Perl
+does not support the C<alarm()> function.
+
+=item B<-delay> I<d>
+
+Delays I<d> seconds between requests to the same host (default 0).  This
+is a friendly thing to do especially if you are checking many links on
+the same host.
+
+=item B<-redirect>
+
+Checks for E<lt>metaE<gt> redirects in the headers of remote  URLs that
+are html files.  If a redirect is found it is followed.  This feature is
+disabled if the status cache is used.
+
+=item B<-proxy> I<hostname[:port]>
+
+Sends all remote HTTP requests through the proxy server I<hostname> and
+the optional I<port>.  This allows you to check remote URLs or (new with
+version 2.3.1) your entire site from within a firewall that has an http
+proxy server.  Some error messages (relating to host errors) may not be
+available through a proxy server.
+
+=item B<-concise_url>
+
+Turns off printing successful URLs to STDOUT during remote link
+checking.
+
+=back
+
+=head2 Status Cache Options
+
+The Status Cache is a very powerful feature. It allows you to keep track
+of recent changes in all of the remote (off-site) pages you link to. You
+can then use the Linklint output files to quickly check changed pages to
+see if they still meet your needs.
+     
+The flags below make use of the status cache file linklint.url (kept in
+your HOME or LINKLINT directory). This file keeps track of the
+modification dates of all the remote URLs that you check.
+
+=over 4
+
+=item B<-netmod>
+
+Operates just like B<-net> but makes use of the status cache.  Newly
+checked URLs will be entered in the cache.  Linklint will tell you which
+(previously cached) URLs have been modified since the last B<-netset>.
+
+=item B<-netset>
+
+Like B<-netmod> but also resets the last modified status in the cache
+for all URLs that checked ok.  If you always use B<-netset>, modified
+URLs will be reported just once.
+
+=item B<-retry>
+
+Only checks URLs that have a host fail status in the cache.  Sometimes a
+URL fails because its host is temporarily down.  This flag enables you
+to recheck just those links.  An easy way to recheck all the cached URLs
+with host failures is C<linklint @@ -retry>.  Use
+C<linklint @@linkdoc/remoteX.txt -retry> if you want failed URLs to be
+cross referenced.
+
+=item B<-flush>
+
+Removes all URLs from the cache that are not currently being checked.
+The B<-retry> flag has no effect on which URLs are flushed.
+
+=item B<-checksum>
+
+Ensures that every URL that has been modified is reported as such.  This
+flag can make the remote checking take longer.  Many of the pages that
+require a checksum are dynamically generated and will always be reported
+as modified.
+
+=item B<-cache> I<directory>
+
+Reads and writes the F<linklint.url> cache file in this directory.  The
+default directory is set by your LINKLINT or HOME environment variables.
+
+=back
+
+=head2 Output Options
+
+No output files are generated by default, only progress and a brief
+summary of the results are printed to the screen. You can produce
+complete documentation (split up into separate files) in a B<-doc>
+directory or put selected output in a single B<-out> file or by
+redirecting the standard output to a file. See the Output File
+Specification section for a detailed description of all output files.
+
+=head2 Multi File Output
+
+=over 4
+
+=item B<-doc> I<linkdoc>
+
+Sends all output to the I<linkdoc> directory.  The output is divided
+into separate F<.txt> and F<.html> files.  Complete documentation is
+always produced regardless of the single file flags.
+
+The file F<index.txt> contains an index to all the other files;
+F<index.html> is an HTML version of the index.  The index files for
+remote URL checking are F<ur_lindex.txt> and F<url_index.html>.
+
+=item B<-textonly>
+
+Prevents any HTML files from being created in the B<-doc> directory.
+
+=item B<-htmlonly>
+
+Erases redundant text files in the B<-doc> directory after they have
+been used to create the HTML output files.  The files F<remote.txt> and
+F<remoteX.txt> are not erased since they can be used by Linklint to
+recheck remote URLs.
+
+=item B<-docbase> I<base>
+
+Overrides the default I<base> expression used for directing a browser to
+the resources listed in the output HTML files.  The base is prepended to
+local links in the output HTML files.  This only affects the links in
+HTML output files, it has no effect on what is displayed in these files.
+Ordinarily this flag would only be used during a local site check to set
+the base to C<http://host>.
+
+=item B<-output_frames>
+
+All HTML output data files are linked to from F<index.html>.  If you use
+this flag then the the data files will be opened up in a new frame
+(window) which can be handy in some cases since it always leaves the
+F<index.html> file open in its own window.
+
+=item B<-output_index> I<filename>
+
+The output index files were previously named F<linklint.txt> and
+F<linklint.html>.  These have now been changed to F<index.txt> and
+F<index.html>.  You can use the B<-output_index> option to change this
+name back to C<linklint> or to something else.
+
+=item B<-url_doc_prefix> I<url/>
+
+By default, the output files associate with remote URL checking all
+start with "url".  You can change this with the B<-url_doc_prefix>
+option.  If the url_doc_prefix contains a "/" character then the
+appropriate directory will be created (as a subdirectory of the -doc
+directory).
+
+=item B<-dont_output> I<xxxx>
+
+Don't create output files that contain "xxxx".  Can be repeated.
+Example:
+
+        -dont_output "X$"
+
+will supress the output of all cross reference files.
+
+=back
+
+=head2 Single File Output
+
+=over 4
+
+=item B<-error>
+
+Lists missing files and other errors.
+
+=item B<-out> I<file>
+
+Sends list output and summary information to I<file>.
+
+=item B<-list>
+
+Lists all found files, links, directories etc.
+
+=item B<-warn>
+
+Lists all warnings.
+
+=item B<-xref>
+
+Adds cross references to the lists.
+
+=item B<-forward>
+
+Sorts lists by referring file.
+
+=back
+
+=head2 Debug and other Flags
+
+=over 4
+
+=item B<-db1>
+
+Debugs command line input and linkset expressions.
+
+=item B<-db2>
+
+Prints the name of every file that gets checked (not just HTML files).
+
+=item B<-db3>
+
+Debugs HTML parser, prints out tags and resulting links.
+
+=item B<-db4>
+
+Debugs socket connection (kind of).
+
+=item B<-db5>
+
+Not used.
+
+=item B<-db6>
+
+Details last-modified status for remote URLs (requires B<-netset> or
+B<-netmod>).
+
+=item B<-db7>
+
+Prints brief debug information while checking remote URLs.
+
+=item B<-db8>
+
+Prints all http headers while checking remote URLs.
+
+=item B<-db9>
+
+Generates random http errors.
+
+=item B<-version>
+
+Gives version information.
+
+=item B<-help>
+
+Lists a few simple examples of how to use Linklint.
+
+=item B<-help_all>
+
+Lists all help (contained in program) including every input option.
+
+=item B<-quiet>
+
+Disables printing progress to the screen.
+
+=item B<-silent>
+
+Disables printing summarys to the screen.
+
+=back
+
+=head1 AUTHOR
+
+Linklint is written by James B. Bowlin E<lt>jbowlin@linklint.orgE<gt>.
+This manual page was written by Denis Barbier
+E<lt>barbier@debian.orgE<gt> for the Debian system (but may be used by
+others) by cut'n'paste from original documentation written in HTML.
+
+=cut
+
 #==========================================================================
 # End of linkhttp.pl
 #==========================================================================

src/wml_backend/p2_mp4h/mp4h-config.src

 ##EOF##
 __END__
 
+=encoding utf8
+
 =head1 NAME
 
 mp4h-config - Displays configuration suitable to build mp4h modules

src/wml_backend/p3_eperl/eperl.proto.pod

 ##
-##        ____           _ 
+##        ____           _
 ##    ___|  _ \ ___ _ __| |
 ##   / _ \ |_) / _ \ '__| |
 ##  |  __/  __/  __/ |  | |
 ##   \___|_|   \___|_|  |_|
-## 
+##
 ##  ePerl -- Embedded Perl 5 Language
 ##
 ##  ePerl interprets an ASCII file bristled with Perl 5 program
 ##  eperl.pod -- ePerl Documentation in Plain Old Document (POD) Format
 ##
 
+=encoding utf8
+
 =head1 NAME
 
 ePerl - Embedded Perl 5 Language
 To achieve this, ePerl translates all plain code into (escaped) Perl 5 strings
 placed into F<print> constructs while passing through all embedded native Perl
 5 code. As you can see, ePerl itself does exactly the same internally, a silly
-programmer had to do when writing a plain Perl generation script. 
+programmer had to do when writing a plain Perl generation script.
 
 Due to the nature of such bristled code, ePerl is really the better attempt
 when the generated ASCII data contains really more static as dynamic data. Or
    <: cmd; ...; cmd; :>
 
 But when the last semicolon is missing it is automatically added
-by ePerl, i.e. 
+by ePerl, i.e.
 
    <: cmd; ...; cmd :>
 
    <: if (...) { _:>
    foo
    <: } else { _:>
-   bar 
+   bar
    <: } :>
 
 where you want to spread a Perl directive over more ePerl blocks.
 the result is
 
   foo
-  
+
   quux
 
 because ePerl always preserves code around ePerl blocks, even
   <: $x = 1; :>//
   quux
 
-the result is 
+the result is
 
   foo
   quux
 ePerl blocks itself. It entirely relies on the Perl interpreter library,
 because it is the only instance which can do this without errors.  But the
 problem is that ePerl at least has to recognize the begin and end positions of
-those ePerl blocks. 
+those ePerl blocks.
 
 There are two ways: It can either look for the end delimiter while parsing but
 at least recognize quoted strings (where the end delimiter gets treated as
 even number of quotes.
 
 So, whenever your end delimiter also occurs inside Perl constructs you have to
-escape it in any way. 
+escape it in any way.
 
 =item I<8. HTML entity conversion.>
 
 provide a complete HTTP response itself. The advantage is that the program can
 generate arbitrary HTTP headers or MIME-encoded multi-block messages.
 
-So, 
+So,
 above we have renamed the file to F<file.cgi> which restricted us a little
 bit. When we alternatively rename F<file.html> to F<nph-file.cgi> and force
 the NPH-CGI/1.1 interface mode via option B<-mn> then this file becomes a
   ...
 
 As you expect this can be also used with the implicit Server-Side Scripting
-Language technique. Put  
+Language technique. Put
 
   AddType      application/x-httpd-eperl  .phtml .eperl .epl
   Action       application/x-httpd-eperl  /internal/cgi/nph-eperl
      .html, .phtml, .ephtml, .epl, .pl, .cgi
   2. The UID of the calling process has to be a valid UID,
      i.e. it has to be found in the systems password file
-  3. The UID of the calling process has to match the 
+  3. The UID of the calling process has to match the
      following users: root, nobody
   4. The UID of the script owner has to be a valid UID,
      i.e. it has to be found in the systems password file
 
 =over 4
 
-=item C<#include path> 
+=item C<#include path>
 
 This directive is an include directive which can be used to include really any
 stuff, but was actually designed to be used to include other ePerl source
 In case of the absolute path the file is directly accessed on the filesystem,
 while the relative path is first searched in the current working directory and
 then in all directories specified via option B<-I>. In the third case
-(HTTP URL) the file is retrieves via a HTTP/1.0 request on the network. 
+(HTTP URL) the file is retrieves via a HTTP/1.0 request on the network.
 Here HTTP redirects (response codes 301 and 302) are supported, too.
 
 Notice: While ePerl strictly preserves the line numbers when translating the
 I<NEVER USE #INCLUDE FOR ANY DATA WHICH IS NOT UNDER YOUR OWN CONTROL>.
 Instead always use C<#sinclude> for such situations.
 
-=item C<#sinclude path> 
+=item C<#sinclude path>
 
 This is the secure variant of C<#include> where after reading the data from
 I<path> all ePerl begin and end delimiters are removed. So risky ePerl blocks
 http://www.perl.com/perl/CPAN and grab your favorite packages which can make
 your life easier (both from within plain Perl scripts I<and> ePerl scripts)
 and just use the construct ``C<use name;>'' in any ePerl block to use them
-from within ePerl. 
+from within ePerl.
 
 When using ePerl as a Server-Side-Scripting-Language I really recommend you to
 install at least the packages F<CGI.pm> (currently vers.  2.36),
 via C<$name> or more explicitly via C<$main::name>. The command
 
   eperl -d name=value ..
-  
+
 is actually equivalent to having
 
   <? $name = value; !>
 inside the Perl blocks. The command
 
   eperl -D name=value ..
-  
-is actually equivalent to 
+
+is actually equivalent to
 
   export name=value; eperl ...
 
 =item B<-C>
 
 This enables the HTML entity conversion for ePerl blocks. This option is
-automatically forced in CGI modes. 
+automatically forced in CGI modes.
 
 The solved problem here is the following: When you use ePerl as a
 Server-Side-Scripting-Language for HTML pages and you edit your ePerl source
 
 The filename part of C<SCRIPT_SRC_URL>. Use this one when you need the name of
 the script, for instance for relative self-references through URLs.  Actually
-the same as C<SCRIPT_SRC_PATH_FILE>, but provided for consistency. 
+the same as C<SCRIPT_SRC_PATH_FILE>, but provided for consistency.
 
 =item C<SCRIPT_SRC_SIZE>
 

src/wml_backend/p6_asubst/asubst.src

 ##EOF##
 __END__
 
+=encoding utf8
+
 =head1 NAME
 
 asubst - Area Substitution

src/wml_backend/p9_slice/slice.pod

+=encoding utf8
 
 =head1 NAME
 

src/wml_docs/wml_faq.pod

 ##
 ##  WML FAQ
-##  Copyright (c) 1997 Ralf S. Engelschall, All Rights Reserved. 
+##  Copyright (c) 1997 Ralf S. Engelschall, All Rights Reserved.
 ##  Copyright (c) 1999 Denis Barbier
 ##
 
+=encoding utf8
+
 =head1 NAME
 
 WML FAQ - Frequently Asked Questions on WML
 
 =item B<G04:> Which tools are included in the WML distribution?
 
-=item B<G05:> WML is as powerful as it can, but are there 
+=item B<G05:> WML is as powerful as it can, but are there
               even any restrictions?
 
 =item B<G06:> Where do I find documentation?
 
 =back
 
-=item B<G05: WML is as powerful as it can, but are there 
+=item B<G05: WML is as powerful as it can, but are there
              even any restrictions?>
 
 Sure, WML has some restrictions which cannot be eliminated easily.
 
 =item B<G07: Is there any official support for WML?>
 
-Yes, there is. First there is an official support mailing list for WML users: 
+Yes, there is. First there is an official support mailing list for WML users:
 
   sw-wml@engelschall.com
 
 =item B<G09: I really want to use WML for textprocessing because it's such
              powerful, isn't it?>
 
-Yes, WML is, but only for HTML generation. 
+Yes, WML is, but only for HTML generation.
 
 Although WML provides a lot of hot features (like Slicing or Diversion) one
 often also wants for textprocessing, WML is not a general textprocessing
 =item B<P01: How can I avoid conflicts with a particular WML pass?>
 
 When you are sure you don't need this particular pass, you can use WML option
-B<-p> and just leave out the number of the pass. 
+B<-p> and just leave out the number of the pass.
 
 =item B<P02: How can I avoid conflicts with a particular WML pass
              for only some areas of the input file?>
 Just put the C<E<lt>gfontE<gt>> tag directly into the C<name> attribute
 of C<E<lt>hrefE<gt>>:
 
-  <href name="<gfont>The Hyperlink Text</gfont>" 
+  <href name="<gfont>The Hyperlink Text</gfont>"
         url="http://...">
 
 =item B<I04: How can I combine the gfont- with the rollover-tag?>

src/wml_docs/wml_intro.pod

 ##
 ##  WML Introduction
-##  Copyright (c) 1997 Ralf S. Engelschall, All Rights Reserved. 
+##  Copyright (c) 1997 Ralf S. Engelschall, All Rights Reserved.
 ##
 
+=encoding utf8
+
 =head1 NAME
 
 WML Introduction - An introduction to WML's basic concepts

src/wml_docs/wml_macros.pod

 ##
 ##  WML Macros
-##  Copyright (c) 2000-2001 Denis Barbier, All Rights Reserved. 
+##  Copyright (c) 2000-2001 Denis Barbier, All Rights Reserved.
 ##
 
+=encoding utf8
+
 =head1 NAME
 
 WML Macros - Writing powerful WML macros
 =item *
 
 An I<end> tag is a tag which ends an I<element> (see below).  It
-consists of a left angle bracket, a slash, the element name, and a 
+consists of a left angle bracket, a slash, the element name, and a
 right angle bracket, like in
 
      </table>
 
   1| <define-tag foo endtag=required>bar</define-tag>
   2| <foo>baz</foo>
- 
+
 Output:
 
   1|
 
 Output:
 
-  1| 
-  2| 
+  1|
+  2|
   3| Macro name:          foo
   4| Number of arguments: 3
   5| First argument:      Here
   6| Second argument:     are
   7| All arguments:       Here are attributes
-  8| Body macro:          
+  8| Body macro:
   9| And the body
  10| goes here.
- 11| 
- 12| 
+ 11|
+ 12|
 
 These special strings may also be altered by modifiers, which are a set
 of letters (one or more) put after the percent sign.  These modifiers,
 
 Output:
 
-  1| 
-  2| 
+  1|
+  2|
   3| First argument:      Here
   4| All arguments:       Here
   5| are
   6| attributes
-  7| Body macro:          
+  7| Body macro:
   8| And the body
   9| goes here.
- 10| 
- 11| 
+ 10|
+ 11|
 
 =back
 
 
 Output:
 
-  1| 
+  1|
   2| <a href="http://www.w3.org/"><tt>http://www.w3.org/</tt></a>
 
 The C<E<lt>preserveE<gt>> tag pushes the variable passed in argument in
 Output:
 
   1| ePerl:Error: Perl parsing error (interpreter rc=255)
-  2| 
+  2|
   3| ---- Contents of STDERR channel: ---------
   4| Backslash found where operator expected at /tmp/wml.1183.tmp1.wml line
   5| 10, near ""attrs:<: print attrs:0; print "\"
 Output:
 
   1| ePerl:Error: Perl parsing error (interpreter rc=255)
-  2| 
+  2|
   3| ---- Contents of STDERR channel: ---------
   4| Bareword found where operator expected at /tmp/wml.1198.tmp1.wml
   5| line 10, near "q|$string = q|Hello"
 Input:
 
   1| #use wml::std::tags
-  2| 
+  2|
   3| <define-tag remove-letter endtag=required whitespace=delete>
   4| <perl>
   5| <perl:assign $string>%body</perl:assign>
   7| $string =~ s|$letter||g;
   8| <perl:print: $string />
   9| </perl>
- 10| </define-tag>\ 
+ 10| </define-tag>\
  11| <remove-letter s><remove-letter e>\
  12| Hello this is a test\
  13| </remove-letter></remove-letter>
   <perl:assign $var1>%body</perl:assign>
   <perl:assign:sq $var2>%body</perl:assign:sq>
 
-and second rule 
+and second rule
 
   print $string;
   print "<img src=\"$src\" alt=\"$alt\">";

src/wml_docs/wml_tutorial.pod

 ##
 ##  WML Tutorial
-##  Copyright (c) 1997 Ralf S. Engelschall, All Rights Reserved. 
+##  Copyright (c) 1997 Ralf S. Engelschall, All Rights Reserved.
 ##
 
+=encoding utf8
+
 =head1 NAME
 
 WML Tutorial - Understanding WML step-by-step
 
 Sometimes situations can occur where some of your markup code or page contents
 conflicts with WML due to overlapping tagnames, etc. Here WML interprets some
-stuff you actually don't want to be interpreted. 
+stuff you actually don't want to be interpreted.
 
 Input:
 
 Input:
 
  1| <body>
- 2| 
+ 2|
  3| <img src = "file.gif"    alt="  test  " >
  4| <pre>
  5|
- 6|     Preformatted          Text  
+ 6|     Preformatted          Text
  7| </pre>
  8|     Not    Preformatted   Text
  9| </body>
  2| <img src="file.gif" alt=" test ">
  4| <pre>
  5|
- 6|     Preformatted          Text  
+ 6|     Preformatted          Text
  7| </pre>
  8|     Not Preformatted Text
  9| </body>
 
 =head1 STRUCTURING THE MARKUP CODE
 
-=head2 LESSON: Using Include Files 
+=head2 LESSON: Using Include Files
 
 One of the most useful features of WML is the ability to move commonly used
 stuff into include files which can be selectively read in at later steps.
  1| bar
  2| The value of bar is: $(bar:-unknown)
 
-...and the following input file: 
+...and the following input file:
 
  1| foo
  3| #include 'bar.wml' bar="FooBar"
   3| <html>
   4| <pod notypo>
   5| =head1 Headline1
-  6| 
+  6|
   7| Foo
-  8| 
+  8|
   9| =head2 Headline1.1
- 10| 
+ 10|
  11| Bar
  12| </pod>
- 13| 
+ 13|
  14| <sdf notypo>
  15| H1: Headline1
- 16| 
+ 16|
  17| Foo
- 18| 
+ 18|
  19| H2: Headline 1.1
- 20| 
+ 20|
  21| Bar
  22|   * Baz
  23|     - Foobar
- 24|     - Quux 
+ 24|     - Quux
  25|   * Foo
  26| </sdf>
  27| </html>
   4|   (*, 2) align=right
   5|   (1, *) valign=top
   6|   (2|3, *) valign=bottom
-  7|   (1,1) 
-  8|   Header 1 
+  7|   (1,1)
+  8|   Header 1
   9|   (1,2)
  10|   Header 2
- 11|   (2,1) 
- 12|   Cell-A   
+ 11|   (2,1)
+ 12|   Cell-A
  13|   (2,2)
  14|   Cell-B
- 15|   (3,1) 
- 16|   Cell-C   
- 17|   (3,2) 
+ 15|   (3,1)
+ 16|   Cell-C
+ 17|   (3,2)
  18|   Cell-D
  19| </xtable>
 
   1| <define-tag red endtag=required whitespace=delete>
   2| <font color="#cc3333">%body</font>
   3| </define-tag>
-  4| 
+  4|
   5| This is <red>very important</red>.
 
 Output:
   6| <ifeq "%0" "freebsd"     "rse@freebsd.org">
   7| <ifeq "%0" "sdm"         "rse@sdm.de">
   8| </define-tag>
-  9| 
+  9|
  10| This is <me> and <me apache>.
 
 Output:
  10| <ifeq "<get-var at>" "sdm"         "rse@sdm.de">
  11| <restore at>
  12| </define-tag>
- 13| 
+ 13|
  14| This is <me> and <me at=apache>.
 
 Output:
    5|
    6| Some Text<br>
    7| Some more Text
-    
+
 Output:
 
    1| Some Text<br><br>
   2| <define-tag me whitespace=delete>
   3| <preserve at>
   4| <set-var %attributes>
-  5| <:{ 
+  5| <:{
   6|     my $at = qq/<get-var at>/;
   7|     my $addr;
   8|     $addr = "rse\@engelschall.com" if $at eq '';
  15| }:>
  16| <restore at>
  17| </define-tag>
- 18| 
+ 18|
  19| This is <me> and <me at=apache>.
 
 Output:
 feature to create templates.  Assume we have the following template defined in
 the file F<template.wml>.
 
-  1| #   the template itself 
+  1| #   the template itself
   2| <html>
   3| <head>
   4| <title>{#SUBJECT_LOC#}</title>
  10| </blockquote>
  11| </body>
  12| </html>
- 13| 
+ 13|
  14| #   way to insert the subject
  15| <define-tag subject>
  16| {#SUBJECT_LOC#:%0:##}
  17| </define-tag>
- 18| 
+ 18|
  19| #   per default we are in body
  20| {#BODY#:
 
 Input:
 
   1| #include 'template.wml'
-  2| 
+  2|
   3| <subject "Foo, Bar and Quux">
   4|
   5| This is about Foo, Bar and Quux...
 
   1| #!wml -o (ALL-LANG_*)+LANG_EN:index.en.html \
   2|       -o (ALL-LANG_*)+LANG_DE:index.de.html
-  3| 
+  3|
   4| #use wml::std::page
   5| #use wml::std::lang
-  6| 
+  6|
   7| <lang:new id=en short>
   8| <lang:new id=de short>
-  9| 
+  9|
  10| <page>
- 11| 
+ 11|
  12| <h1><en: Welcome><de: Willkommen>!</h1>
- 13| 
+ 13|
  14| <a href="<lang:star: index2.*.html>">Index 2</a>
- 15| 
+ 15|
  16| <lang:area>
  17| (en)This is a test page
  18| (de)Dies ist eine Testseite
   5| <h1>Welcome!</h1>
   6| <a href="index2.en.html">Index 2</a>
   7| This is a test page
-  8| 
+  8|
   9| </body>
  10| </html>
 

src/wml_include/des/typography.src

 ##EOF##
 __END__
 
+=encoding utf8
+
 =head1 NAME
 
 wml::des::typography - Typography

src/wml_include/fmt/isolatin.src

 ##EOF##
 __END__
 
+=encoding utf8
+
 =head1 NAME
 
 wml::fmt::isolatin - ISO-Latin-1 to HTML Entity Conversion