# sample resource file with default values # see linkchecker -h for help on these options # commandline options override these settings! [output] # turn on/off debug messages debug=0 # use the text logger log=text # turn on/off --verbose verbose=0 # turn on/off --warnings warnings=0 # turn on/off --quiet quiet=0 # additional file output fileoutput= #fileoutput = text, colored, html, gml, sql # fields names: # all (for all fields) # realurl (the full url link) # result (valid or invalid, with messages) # extern (1 or 0, only in some logger types reported) # base (base href=...) # name (<a href=...>name</a> and <img alt="name">) # parenturl (if any) # info (some additional info, e.g. FTP welcome messages) # warning (warnings) # dltime (download time) # checktime (check time) # url (the original url name, can be relative) # intro (the blurb at the beginning, "starting at ...") # outro (the blurb at the end, "found x errors ...") # each Logger can have separate configuration parameters # standard text logger [text] filename=linkchecker-out.txt fields=all # GML logger [gml] filename=linkchecker-out.gml fields=all # CSV logger [csv] filename=linkchecker-out.csv separator=; fields=all # SQL logger [sql] filename=linkchecker-out.sql dbname=linksdb commandsep=; fields=all # HTML logger [html] filename=linkchecker-out.html # colors for the various parts colorbackground="#fff7e5" colorurl="#dcd5cf" colorborder="#000000" colorlink="#191c83" tablewarning=<td bgcolor="#e0954e"> tableerror=<td bgcolor="#db4930"> tableok=<td bgcolor="#3ba557"> fields=all # ANSI color logger [colored] filename=linkchecker-out.colored # [Pixel] removed colors as its broken # # colors for the various parts # colorparent=37 # colorurl=0 # colorname=0 # colorreal=36 # colorbase=35 # colorvalid=1;32 # colorinvalid=1;31 # colorinfo=0 # colorwarning=1;33 # colordltime=0 # colorreset=0 fields=all # blacklist logger [blacklist] filename=~/.blacklist # checking options [checking] # number of threads threads=5 # check anchors? anchors=0 recursionlevel=1 # obey robots.txt exclusion? robotstxt=1 # overall strict checking. You can specify for each extern URL # separately if its strict or not. See the [filtering] section strict=0 # supply a regular expression for which warnings are printed if found # in any HTML files. warningregex= # Basic NNTP server. Overrides NNTP_SERVER environment variable. nntpserver= # interactive operation interactive=0 # filtering options (see FAQ) # for each extern link we can specify if it is strict or not [filtering] # everything with 'lconline' in the URL name is extern # extern1=lconline 0 # everything with 'bookmark' in the URL name is strict # extern2=bookmark 1 # links to our domain are intern # internlinks=calvinsplayground\.de # check only syntax of all mail adresses # extern3=^mailto: 1 denyallow=0 # You can provide different user/password pairs for different link types. # Entries are a triple (link regular expression, username, password), # separated by whitespace. # If the regular expression matches, the given user/password pair is used # for authentication. The commandline options -u,-p match every link # and therefore override the entries given here. The first match wins. # At the moment, authentication is used/needed for http[s] and ftp links. [authentication] #entry1=^http://treasure\.calvinsplayground\.de/~calvin/isnichmehr/ lebowski lebowski #entry2=^ftp://void.cs.uni-sb.de calvin hutzli