# Makefile.in generated by automake 1.16.5 from Makefile.am.
# docs/Makefile.  Generated from Makefile.in by configure.

# Copyright (C) 1994-2021 Free Software Foundation, Inc.

# This Makefile.in is free software; the Free Software Foundation
# gives unlimited permission to copy and/or distribute it,
# with or without modifications, as long as this notice is preserved.

# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY, to the extent permitted by law; without
# even the implied warranty of MERCHANTABILITY or FITNESS FOR A
# PARTICULAR PURPOSE.



# Network UPS Tools: main docs

# FIXME: There is a lot of shell-scripting below which gets processed by
# whatever system shell there is. While bash handles expressions like
#   VAL="`cmd "some params"`"
# in a way that "some params" are a single token passed to "cmd" as an
# argument, and the stdout of such command execution is collected into a
# single-token string as "VAL" (even if with white-spaces). Some other
# shells, e.g. "ksh" seems to actively dislike unbalanced double-quotes
# inside the backticks (e.g. matched in some grep/sed regexes below),
# although generally the approach works for such "VAL" assignments too.
# In any case, it is portable and preferable to instead write that like
#   VAL="`cmd \"some params\"`"
# Note that the newer "$(...)" syntax is not portable, older shells
# have no idea about it, and it is cumbersome with `make` substitution.
# Keep a lookout with multi-platform NUT CI jobs, and try to use single
# quotes where possible (e.g. where pre-expanded `make` variables are
# involved - so shell should not process them again anyway).


am__is_gnu_make = { \
  if test -z '$(MAKELEVEL)'; then \
    false; \
  elif test -n '$(MAKE_HOST)'; then \
    true; \
  elif test -n '$(MAKE_VERSION)' && test -n '$(CURDIR)'; then \
    true; \
  else \
    false; \
  fi; \
}
am__make_running_with_option = \
  case $${target_option-} in \
      ?) ;; \
      *) echo "am__make_running_with_option: internal error: invalid" \
              "target option '$${target_option-}' specified" >&2; \
         exit 1;; \
  esac; \
  has_opt=no; \
  sane_makeflags=$$MAKEFLAGS; \
  if $(am__is_gnu_make); then \
    sane_makeflags=$$MFLAGS; \
  else \
    case $$MAKEFLAGS in \
      *\\[\ \	]*) \
        bs=\\; \
        sane_makeflags=`printf '%s\n' "$$MAKEFLAGS" \
          | sed "s/$$bs$$bs[$$bs $$bs	]*//g"`;; \
    esac; \
  fi; \
  skip_next=no; \
  strip_trailopt () \
  { \
    flg=`printf '%s\n' "$$flg" | sed "s/$$1.*$$//"`; \
  }; \
  for flg in $$sane_makeflags; do \
    test $$skip_next = yes && { skip_next=no; continue; }; \
    case $$flg in \
      *=*|--*) continue;; \
        -*I) strip_trailopt 'I'; skip_next=yes;; \
      -*I?*) strip_trailopt 'I';; \
        -*O) strip_trailopt 'O'; skip_next=yes;; \
      -*O?*) strip_trailopt 'O';; \
        -*l) strip_trailopt 'l'; skip_next=yes;; \
      -*l?*) strip_trailopt 'l';; \
      -[dEDm]) skip_next=yes;; \
      -[JT]) skip_next=yes;; \
    esac; \
    case $$flg in \
      *$$target_option*) has_opt=yes; break;; \
    esac; \
  done; \
  test $$has_opt = yes
am__make_dryrun = (target_option=n; $(am__make_running_with_option))
am__make_keepgoing = (target_option=k; $(am__make_running_with_option))
pkgdatadir = $(datadir)/nut
pkgincludedir = $(includedir)/nut
pkglibdir = $(libdir)/nut
pkglibexecdir = $(libexecdir)/nut
am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd
install_sh_DATA = $(install_sh) -c -m 644
install_sh_PROGRAM = $(install_sh) -c
install_sh_SCRIPT = $(install_sh) -c
INSTALL_HEADER = $(INSTALL_DATA)
transform = $(program_transform_name)
NORMAL_INSTALL = :
PRE_INSTALL = :
POST_INSTALL = :
NORMAL_UNINSTALL = :
PRE_UNINSTALL = :
POST_UNINSTALL = :
build_triplet = aarch64-slackware-linux-gnu
host_triplet = aarch64-slackware-linux-gnu
target_triplet = aarch64-slackware-linux-gnu
#am__append_1 = $(top_builddir)/docs/ChangeLog.html
#am__append_2 = $(top_builddir)/docs/ChangeLog.chunked
#am__append_3 = $(top_builddir)/docs/ChangeLog.pdf
#am__append_4 = $(top_builddir)/ChangeLog
#am__append_5 = $(top_builddir)/ChangeLog.adoc $(top_builddir)/ChangeLog.adoc-parsed $(top_builddir)/ChangeLog.adoc-parsed.latest
am__append_6 = spellcheck
am__append_7 = $(ASCIIDOC_HTML_SINGLE)
#am__append_8 = --filter-path="$(ASPELL_FILTER_TEX_PATH)"
am__append_9 = $(NUT_SPELL_DICT).bak-pre-sorting $(NUT_SPELL_DICT).bak-pre-interactive .$(NUT_SPELL_DICT).sorted $(NUT_SPELL_DICT).sorted
subdir = docs
ACLOCAL_M4 = $(top_srcdir)/aclocal.m4
am__aclocal_m4_deps = $(top_srcdir)/m4/ax_c___attribute__.m4 \
	$(top_srcdir)/m4/ax_c_pragmas.m4 \
	$(top_srcdir)/m4/ax_check_compile_flag.m4 \
	$(top_srcdir)/m4/ax_compare_version.m4 \
	$(top_srcdir)/m4/ax_realpath.m4 \
	$(top_srcdir)/m4/ax_realpath_lib.m4 \
	$(top_srcdir)/m4/ax_run_or_link_ifelse.m4 \
	$(top_srcdir)/m4/libtool.m4 $(top_srcdir)/m4/ltoptions.m4 \
	$(top_srcdir)/m4/ltsugar.m4 $(top_srcdir)/m4/ltversion.m4 \
	$(top_srcdir)/m4/lt~obsolete.m4 \
	$(top_srcdir)/m4/nut_arg_with.m4 \
	$(top_srcdir)/m4/nut_check_asciidoc.m4 \
	$(top_srcdir)/m4/nut_check_aspell.m4 \
	$(top_srcdir)/m4/nut_check_bool.m4 \
	$(top_srcdir)/m4/nut_check_cppcheck.m4 \
	$(top_srcdir)/m4/nut_check_headers_windows.m4 \
	$(top_srcdir)/m4/nut_check_libavahi.m4 \
	$(top_srcdir)/m4/nut_check_libfreeipmi.m4 \
	$(top_srcdir)/m4/nut_check_libgd.m4 \
	$(top_srcdir)/m4/nut_check_libglib.m4 \
	$(top_srcdir)/m4/nut_check_libgpiod.m4 \
	$(top_srcdir)/m4/nut_check_libltdl.m4 \
	$(top_srcdir)/m4/nut_check_libmodbus.m4 \
	$(top_srcdir)/m4/nut_check_libneon.m4 \
	$(top_srcdir)/m4/nut_check_libnetsnmp.m4 \
	$(top_srcdir)/m4/nut_check_libnss.m4 \
	$(top_srcdir)/m4/nut_check_libopenssl.m4 \
	$(top_srcdir)/m4/nut_check_libpowerman.m4 \
	$(top_srcdir)/m4/nut_check_libregex.m4 \
	$(top_srcdir)/m4/nut_check_libsystemd.m4 \
	$(top_srcdir)/m4/nut_check_libusb.m4 \
	$(top_srcdir)/m4/nut_check_libwrap.m4 \
	$(top_srcdir)/m4/nut_check_os.m4 \
	$(top_srcdir)/m4/nut_check_pkgconfig.m4 \
	$(top_srcdir)/m4/nut_check_python.m4 \
	$(top_srcdir)/m4/nut_check_socketlib.m4 \
	$(top_srcdir)/m4/nut_compiler_family.m4 \
	$(top_srcdir)/m4/nut_func_getnameinfo_argtypes.m4 \
	$(top_srcdir)/m4/nut_report_feature.m4 \
	$(top_srcdir)/m4/nut_stash_warnings.m4 \
	$(top_srcdir)/m4/nut_type_socklen_t.m4 \
	$(top_srcdir)/configure.ac
am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \
	$(ACLOCAL_M4)
DIST_COMMON = $(srcdir)/Makefile.am $(am__DIST_COMMON)
mkinstalldirs = $(install_sh) -d
CONFIG_HEADER = $(top_builddir)/include/config.h
CONFIG_CLEAN_FILES = asciidoc.conf docinfo.xml docinfo.xml.sh
CONFIG_CLEAN_VPATH_FILES =
AM_V_P = $(am__v_P_$(V))
am__v_P_ = $(am__v_P_$(AM_DEFAULT_VERBOSITY))
am__v_P_0 = false
am__v_P_1 = :
AM_V_GEN = $(am__v_GEN_$(V))
am__v_GEN_ = $(am__v_GEN_$(AM_DEFAULT_VERBOSITY))
am__v_GEN_0 = @echo "  GEN     " $@;
am__v_GEN_1 = 
AM_V_at = $(am__v_at_$(V))
am__v_at_ = $(am__v_at_$(AM_DEFAULT_VERBOSITY))
am__v_at_0 = @
am__v_at_1 = 
SOURCES =
DIST_SOURCES =
RECURSIVE_TARGETS = all-recursive check-recursive cscopelist-recursive \
	ctags-recursive dvi-recursive html-recursive info-recursive \
	install-data-recursive install-dvi-recursive \
	install-exec-recursive install-html-recursive \
	install-info-recursive install-pdf-recursive \
	install-ps-recursive install-recursive installcheck-recursive \
	installdirs-recursive pdf-recursive ps-recursive \
	tags-recursive uninstall-recursive
am__can_run_installinfo = \
  case $$AM_UPDATE_INFO_DIR in \
    n|no|NO) false;; \
    *) (install-info --version) >/dev/null 2>&1;; \
  esac
am__vpath_adj_setup = srcdirstrip=`echo "$(srcdir)" | sed 's|.|.|g'`;
am__vpath_adj = case $$p in \
    $(srcdir)/*) f=`echo "$$p" | sed "s|^$$srcdirstrip/||"`;; \
    *) f=$$p;; \
  esac;
am__strip_dir = f=`echo $$p | sed -e 's|^.*/||'`;
am__install_max = 40
am__nobase_strip_setup = \
  srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*|]/\\\\&/g'`
am__nobase_strip = \
  for p in $$list; do echo "$$p"; done | sed -e "s|$$srcdirstrip/||"
am__nobase_list = $(am__nobase_strip_setup); \
  for p in $$list; do echo "$$p $$p"; done | \
  sed "s| $$srcdirstrip/| |;"' / .*\//!s/ .*/ ./; s,\( .*\)/[^/]*$$,\1,' | \
  $(AWK) 'BEGIN { files["."] = "" } { files[$$2] = files[$$2] " " $$1; \
    if (++n[$$2] == $(am__install_max)) \
      { print $$2, files[$$2]; n[$$2] = 0; files[$$2] = "" } } \
    END { for (dir in files) print dir, files[dir] }'
am__base_list = \
  sed '$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;s/\n/ /g' | \
  sed '$$!N;$$!N;$$!N;$$!N;s/\n/ /g'
am__uninstall_files_from_dir = { \
  test -z "$$files" \
    || { test ! -d "$$dir" && test ! -f "$$dir" && test ! -r "$$dir"; } \
    || { echo " ( cd '$$dir' && rm -f" $$files ")"; \
         $(am__cd) "$$dir" && rm -f $$files; }; \
  }
am__installdirs = "$(DESTDIR)$(htmldocdir)" "$(DESTDIR)$(pdfdir)"
DATA = $(htmldoc_DATA) $(pdf_DATA)
RECURSIVE_CLEAN_TARGETS = mostlyclean-recursive clean-recursive	\
  distclean-recursive maintainer-clean-recursive
am__recursive_targets = \
  $(RECURSIVE_TARGETS) \
  $(RECURSIVE_CLEAN_TARGETS) \
  $(am__extra_recursive_targets)
AM_RECURSIVE_TARGETS = $(am__recursive_targets:-recursive=) TAGS CTAGS \
	distdir distdir-am
am__tagged_files = $(HEADERS) $(SOURCES) $(TAGS_FILES) $(LISP)
# Read a list of newline-separated strings from the standard input,
# and print each of them once, without duplicates.  Input order is
# *not* preserved.
am__uniquify_input = $(AWK) '\
  BEGIN { nonempty = 0; } \
  { items[$$0] = 1; nonempty = 1; } \
  END { if (nonempty) { for (i in items) print i; }; } \
'
# Make sure the list of sources is unique.  This is necessary because,
# e.g., the same source file might be shared among _SOURCES variables
# for different programs/libraries.
am__define_uniq_tagged_files = \
  list='$(am__tagged_files)'; \
  unique=`for i in $$list; do \
    if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \
  done | $(am__uniquify_input)`
DIST_SUBDIRS = $(SUBDIRS)
am__DIST_COMMON = $(srcdir)/Makefile.in $(srcdir)/asciidoc.conf.in \
	$(srcdir)/docinfo.xml.in $(srcdir)/docinfo.xml.sh.in
DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST)
am__relativize = \
  dir0=`pwd`; \
  sed_first='s,^\([^/]*\)/.*$$,\1,'; \
  sed_rest='s,^[^/]*/*,,'; \
  sed_last='s,^.*/\([^/]*\)$$,\1,'; \
  sed_butlast='s,/*[^/]*$$,,'; \
  while test -n "$$dir1"; do \
    first=`echo "$$dir1" | sed -e "$$sed_first"`; \
    if test "$$first" != "."; then \
      if test "$$first" = ".."; then \
        dir2=`echo "$$dir0" | sed -e "$$sed_last"`/"$$dir2"; \
        dir0=`echo "$$dir0" | sed -e "$$sed_butlast"`; \
      else \
        first2=`echo "$$dir2" | sed -e "$$sed_first"`; \
        if test "$$first2" = "$$first"; then \
          dir2=`echo "$$dir2" | sed -e "$$sed_rest"`; \
        else \
          dir2="../$$dir2"; \
        fi; \
        dir0="$$dir0"/"$$first"; \
      fi; \
    fi; \
    dir1=`echo "$$dir1" | sed -e "$$sed_rest"`; \
  done; \
  reldir="$$dir2"
A2X = /usr/bin/a2x
ABS_TOP_BUILDDIR = /dev/shm/buildtmp/build-nut/nut-2.8.5
ABS_TOP_SRCDIR = /dev/shm/buildtmp/build-nut/nut-2.8.5
ACLOCAL = ${SHELL} '/dev/shm/buildtmp/build-nut/nut-2.8.5/missing' aclocal-1.16
ALTPIDPATH = /run/nut
AMTAR = $${TAR-tar}
AM_DEFAULT_VERBOSITY = 0
AR = /usr/bin/ar
ASCIIDOC = /usr/bin/asciidoc
ASCIIDOC_LINKMANEXT_SECTION_REWRITE = yes
ASCIIDOC_TEMPLATE_LINKMANEXT = https://linux.die.net/man/{0}/{target}
ASCIIDOC_TEMPLATE_LINKMANEXT2 = https://linux.die.net/man/{2}/{target}
ASPELL = /usr/bin/aspell
ASPELL_FILTER_LIB_PATH = none
ASPELL_FILTER_SHARE_PATH = none
ASPELL_FILTER_TEX_PATH = none
AUGPARSE = none
AUTOCONF = ${SHELL} '/dev/shm/buildtmp/build-nut/nut-2.8.5/missing' autoconf
AUTOHEADER = ${SHELL} '/dev/shm/buildtmp/build-nut/nut-2.8.5/missing' autoheader
AUTOMAKE = ${SHELL} '/dev/shm/buildtmp/build-nut/nut-2.8.5/missing' automake-1.16
AWK = gawk
BINDIR = /usr/bin
BSDKVMPROCLIBS = 
CC = aarch64-slackware-linux-gnu-gcc
CCACHE_BASEDIR = 
CCACHE_DIR = 
CCACHE_NAMESPACE = nut:aarch64-slackware-linux-gnu
CCACHE_PATH = 
CCDEPMODE = depmode=gcc3
CFLAGS = -isystem /usr/local/include  -O2 -fPIC -Wno-reserved-identifier -Wno-unknown-warning-option -std=gnu99 -Wno-system-headers -Wall -Wextra -Wsign-compare -pedantic -Wno-error
CGIDATAPATH = /usr/share/nut/html
CGIEXECPATH = /usr/share/nut/cgi-bin
CONFIG_CFLAGS =  -O2 -fPIC
CONFIG_CPPFLAGS = 
CONFIG_CXXFLAGS =  -O2 -fPIC
CONFIG_FLAGS = --prefix=/usr --libdir=/usr/lib64 --libexecdir=/usr/lib64/nut --sbindir=/usr/sbin --sysconfdir=/etc/nut --localstatedir=/var --datadir=/usr/share/nut --mandir=/usr/man --docdir=/usr/doc/nut-2.8.5 --disable-static --without-python2 --with-dev --with-serial --with-usb --with-snmp --with-neon --with-wrap --with-cgi --with-openssl --with-cgipath=/usr/share/nut/cgi-bin --with-htmlpath=/usr/share/nut/html --with-drvpath=/usr/lib64/nut --with-statepath=/run/nut --with-pidpath=/run/nut --with-altpidpath=/run/nut --with-user=nut --with-group=nut --enable-docs-changelog=no --host=aarch64-slackware-linux-gnu --build=aarch64-slackware-linux-gnu
CONFIG_LDFLAGS = 
CONFPATH = /etc/nut
CONFPATH_EXAMPLES = /etc/nut
CPP = aarch64-slackware-linux-gnu-gcc -E
CPPCHECK = 
CPPFLAGS = 
CPPUNIT_CFLAGS = 
CPPUNIT_LIBS = 
CPPUNIT_NUT_CXXFLAGS = 
CSCOPE = cscope
CTAGS = ctags
CXX = aarch64-slackware-linux-gnu-g++
CXXCPP = aarch64-slackware-linux-gnu-g++ -E
CXXDEPMODE = depmode=gcc3
CXXFLAGS = -isystem /usr/local/include  -O2 -fPIC -Wno-reserved-identifier -Wno-unknown-warning-option -std=gnu++11 -Wno-system-headers -Wall -Wextra -Wno-error
CYGPATH = 
CYGPATH_W = echo
DBLATEX = 
DEFS = -DHAVE_CONFIG_H
DEPDIR = .deps
DEPLOYED_DUMMYUPS = 
DEPLOYED_UPSC = 
DEPLOYED_UPSD = 
DEPLOYED_UPSIMAGE = 
DLLTOOL = dlltool
DOC_BUILD_LIST =  man-man html-single html-chunked
DOC_CHECK_LIST =  check-man check-html-single check-html-chunked
DRIVER_BUILD_LIST = all
DRIVER_INSTALL_TARGET = 
DRIVER_MAN_LIST = all
DRIVER_MAN_LIST_PAGES = 
DRVPATH = /usr/lib64/nut
DSYMUTIL = 
DUMPBIN = 
ECHO_C = 
ECHO_N = -n
ECHO_T = 
EGREP = /usr/bin/grep -E
ESCAPED_TEMPLATE_VERSION = @VERSION@
ETAGS = etags
EXEEXT = 
FGREP = /usr/bin/grep -F
FORCE_NUT_VERSION = FORCE
GDLIB_CONFIG = 
GETENT = getent
GITLOG_END_POINT = HEAD
GITLOG_START_POINT = v2.6.0
GREP = /usr/bin/grep
HAVE_SYS_SOCKET_H = 1
HAVE_WINSOCK2_H = 0
HAVE_WS2TCPIP_H = 0
ID = id
INSTALL = /usr/bin/ginstall -c
INSTALL_DATA = ${INSTALL} -m 644
INSTALL_PROGRAM = ${INSTALL}
INSTALL_SCRIPT = ${INSTALL}
INSTALL_STRIP_PROGRAM = $(install_sh) -c -s
LD = /usr/bin/ld
LDD = /usr/bin/ldd
LDFLAGS = 
LDFLAGS_NUT_RPATH = 
LDFLAGS_NUT_RPATH_CXX = 
LIBAVAHI_CFLAGS = -D_REENTRANT
LIBAVAHI_LIBS = -lavahi-core -lavahi-common -lavahi-client
LIBDIR = /usr/lib64
LIBGD_CFLAGS = -I/usr/include/freetype2 -I/usr/include/libpng16 -I/usr/include/webp
LIBGD_LDFLAGS =  -lgd
LIBGIO_CFLAGS = -pthread -isystem /usr/include/glib-2.0 -I/usr/include/glib-2.0 -isystem /usr/lib64/glib-2.0/include -I/usr/lib64/glib-2.0/include -isystem /usr/include/libmount -I/usr/include/libmount -isystem /usr/include/blkid -I/usr/include/blkid
LIBGIO_LIBS = -lgio-2.0 -lgobject-2.0 -lglib-2.0
LIBGLIB_CFLAGS = -isystem /usr/include/glib-2.0 -I/usr/include/glib-2.0 -isystem /usr/lib64/glib-2.0/include -I/usr/lib64/glib-2.0/include
LIBGLIB_LIBS = -lglib-2.0
LIBGPIO_CFLAGS = 
LIBGPIO_LIBS = -lgpiod
LIBI2C_LIBS = -li2c 
LIBIPMI_CFLAGS = 
LIBIPMI_LIBS = 
LIBLTDL_CFLAGS = 
LIBLTDL_LIBS = -lltdl  
LIBMODBUS_CFLAGS = 
LIBMODBUS_LIBS = 
LIBNEON_CFLAGS = -I/usr/include/neon
LIBNEON_LIBS = -lneon
LIBNETSNMP_CFLAGS = 
LIBNETSNMP_LIBS = -lnetsnmp
LIBOBJS = 
LIBPOWERMAN_CFLAGS = 
LIBPOWERMAN_LIBS = 
LIBREGEX_LIBS = 
LIBS = 
LIBSSL_CFLAGS = 
LIBSSL_CXXFLAGS = 
LIBSSL_LDFLAGS_RPATH = 
LIBSSL_LIBS = -lssl -lcrypto
LIBSSL_REQUIRES = openssl
LIBSYSTEMD_CFLAGS = 
LIBSYSTEMD_LIBS = 
LIBTOOL = $(SHELL) $(top_builddir)/libtool
LIBTOOL_DEPS = ././/ltmain.sh
LIBUSB_CFLAGS = -I/usr/include/libusb-1.0
LIBUSB_CONFIG = /usr/bin/libusb-config
LIBUSB_LIBS = -lusb-1.0
LIBWRAP_CFLAGS = 
LIBWRAP_LIBS = -lnsl  -lwrap
LIPO = 
LN_S = ln -s
LN_S_R = ln -s -r
LTLIBOBJS = 
LT_SYS_LIBRARY_PATH = 
MAINT = #
MAKEINFO = ${SHELL} '/dev/shm/buildtmp/build-nut/nut-2.8.5/missing' makeinfo
MANIFEST_TOOL = :
MAN_DIR_AS_BASE = yes
MAN_SECTION_API = 3

# Possible man page section remapping in some distros:
MAN_SECTION_API_BASE = 3
MAN_SECTION_CFG = 5
MAN_SECTION_CFG_BASE = 5
MAN_SECTION_CMD_SYS = 8
MAN_SECTION_CMD_SYS_BASE = 8
MAN_SECTION_CMD_USR = 1
MAN_SECTION_CMD_USR_BASE = 1
MAN_SECTION_MISC = 7
MAN_SECTION_MISC_BASE = 7
MKDIR_P = /usr/bin/mkdir -p
MKTEMP = mktemp
MSGFMT = /usr/bin/msgfmt
NETLIBS = 
NETLIBS_GETADDRS = 
NET_SNMP_CONFIG = /usr/bin/net-snmp-config
NM = /usr/bin/nm -B
NMEDIT = 
NUT_AM_EXPORT_CCACHE_BASEDIR = #
NUT_AM_EXPORT_CCACHE_DIR = #
NUT_AM_EXPORT_CCACHE_NAMESPACE = 
NUT_AM_EXPORT_CCACHE_PATH = #
NUT_AM_MAKE_CAN_EXPORT = 
NUT_CGI_URI = /cgi-bin/nut
NUT_CONFIG_CFLAGS = -O2 -fPIC
NUT_CONFIG_CPPFLAGS = 
NUT_CONFIG_CXXFLAGS = -O2 -fPIC
NUT_CONFIG_LDFLAGS = 
NUT_DATADIR = /usr/share/nut
NUT_LIBEXECDIR = /usr/lib64/nut
NUT_MANDIR = /usr/man
NUT_NETVERSION = 1.3
NUT_PORT = 3493
NUT_SOURCE_GITREV = 2.8.5
NUT_SOURCE_GITREV_IS_PRERELEASE = false
NUT_SOURCE_GITREV_IS_RELEASE = true
NUT_SOURCE_GITREV_NUMERIC = 2.8.5
NUT_SOURCE_GITREV_NUMERIC_COLONS = 2:8:5
NUT_SOURCE_GITREV_NUMERIC_UNDERSCORES = 2_8_5
NUT_SOURCE_GITREV_SEMVER = 2.8.5
NUT_SOURCE_GITREV_SEMVER_COLONS = 2:8:5
NUT_SOURCE_GITREV_SEMVER_UNDERSCORES = 2_8_5
# Other rewritable properties:
NUT_WEBSITE_BASE = https://www.networkupstools.org/historic/v2.8.5
OBJDUMP = objdump
OBJEXT = o
OS_NAME = 
OTOOL = 
OTOOL64 = 
PACKAGE = nut
PACKAGE_BUGREPORT = https://github.com/networkupstools/nut/issues
PACKAGE_NAME = nut
PACKAGE_STRING = nut 2.8.5
PACKAGE_TARNAME = nut
PACKAGE_URL = https://www.networkupstools.org/historic/v2.8.5/index.html
PACKAGE_VERSION = 2.8.5
PATH_DURING_CONFIGURE = /tmp/DISTCC:/usr/lib64/go1.25.7/go/bin:/usr/local/sbin:/usr/sbin:/sbin:/usr/local/bin:/usr/bin:/bin:/usr/games:/usr/lib64/libexec/kf5:/usr/lib64/qt5/bin:/usr/lib64/qt6/bin
PATH_SEPARATOR = :
PIDPATH = /run/nut
PKGCONFIGDIR = /usr/lib64/pkgconfig
PKG_CONFIG = /usr/bin/pkg-config
PKG_CONFIG_LIBDIR = 
PKG_CONFIG_PATH = /usr/local/lib64/pkgconfig:/usr/local/share/pkgconfig:/usr/lib64/pkgconfig:/usr/share/pkgconfig
POWERDOWNFLAG = /etc/killpower
PREFIX = /usr
PWDTOOL = 
PYTHON = 
PYTHON2 = 
PYTHON2_SITE_PACKAGES = 
PYTHON3 = /usr/bin/python3.12
PYTHON3_SITE_PACKAGES = /usr/lib/python3.12/site-packages
PYTHON_DEFAULT = /usr/bin/python3.12
PYTHON_SITE_PACKAGES = 
RANLIB = ranlib
REALPATH = realpath
RUN_AS_GROUP = nut
RUN_AS_USER = nut
SBINDIR = /usr/sbin
SED = /usr/bin/sed
SEMLIBS = 
SERLIBS = 
SET_MAKE = 
SHELL = /bin/sh
SOURCE_HIGHLIGHT = 
STATEPATH = /run/nut
STRIP = strip
SUN_LIBUSB = 
SYSTEMCTL = none
SYSTEMD_ANALYZE_PROGRAM = /usr/bin/systemd-analyze
SYSTEMD_AVAILABLE_EXEC_CONDITION = ### Since systemd v243 ### 
SYSTEMD_DAEMON_ARGS_DRIVER = 
SYSTEMD_DAEMON_ARGS_UPSD = -F
SYSTEMD_DAEMON_ARGS_UPSLOG = -F
SYSTEMD_DAEMON_ARGS_UPSMON = -F
SYSTEMD_DAEMON_NOTIFYACCESS_DRIVER = 
SYSTEMD_DAEMON_NOTIFYACCESS_UPSD = 
SYSTEMD_DAEMON_NOTIFYACCESS_UPSLOG = 
SYSTEMD_DAEMON_NOTIFYACCESS_UPSMON = 
SYSTEMD_DAEMON_TIMEOUT_STOP_UPSMON = TimeoutStopSec=infinity
SYSTEMD_DAEMON_TYPE_DRIVER = forking
SYSTEMD_DAEMON_TYPE_UPSD = simple
SYSTEMD_DAEMON_TYPE_UPSLOG = simple
SYSTEMD_DAEMON_TYPE_UPSMON = simple
SYSTEMD_DAEMON_WATCHDOG_DRIVER = #WatchdogSec=240s
SYSTEMD_DAEMON_WATCHDOG_UPSD = #WatchdogSec=240s
SYSTEMD_DAEMON_WATCHDOG_UPSLOG = #WatchdogSec=240s
SYSTEMD_DAEMON_WATCHDOG_UPSMON = #WatchdogSec=240s
SYSTEMD_SYSTEMCTL_PROGRAM = /usr/bin/systemctl
SYSTEMD_SYSUSERS_PROGRAM = /usr/bin/systemd-sysusers
SYSTEMD_TMPFILES_PROGRAM = /usr/bin/systemd-tmpfiles
TAIL = /usr/bin/tail
TAIL_ARGS_FROM_NTH_LINE = 
TREE_VERSION = 2.8
VALGRIND = /usr/bin/valgrind
VERSION = 2.8.5
WINDMC = windmc
WINDRES = none
WORDS_BIGENDIAN = 
XMLLINT = /usr/bin/xmllint
XSLTPROC = /usr/bin/xsltproc
abs_builddir = /dev/shm/buildtmp/build-nut/nut-2.8.5/docs
abs_srcdir = /dev/shm/buildtmp/build-nut/nut-2.8.5/docs
abs_top_builddir = /dev/shm/buildtmp/build-nut/nut-2.8.5
abs_top_srcdir = /dev/shm/buildtmp/build-nut/nut-2.8.5
ac_ct_AR = 
ac_ct_CC = 
ac_ct_CXX = 
ac_ct_DLLTOOL = dlltool
ac_ct_DUMPBIN = 
ac_ct_LD = 
ac_ct_OBJDUMP = objdump
am__include = include
am__leading_dot = .
am__quote = 
am__tar = tar --format=posix -chf - "$$tardir"
am__untar = tar -xf -
auglensdir = 
auglenstestsdir = 
bindir = ${exec_prefix}/bin
build = aarch64-slackware-linux-gnu
build_alias = aarch64-slackware-linux-gnu
build_cpu = aarch64
build_os = linux-gnu
build_vendor = slackware
builddir = .
cgiexecdir = /usr/share/nut/cgi-bin
datadir = /usr/share/nut
datarootdir = ${prefix}/share
devddir = 
docdir = /usr/doc/nut-2.8.5
dotMAKE = 
driverexecdir = /usr/lib64/nut
dummy_PKG_CONFIG = 
dummy_PKG_CONFIG_CFLAGS = 
dummy_PKG_CONFIG_LIBS = 
dvidir = ${docdir}
exec_prefix = ${prefix}
freebsdquirksdir = 
host = aarch64-slackware-linux-gnu
host_alias = aarch64-slackware-linux-gnu
host_cpu = aarch64
host_os = linux-gnu
host_vendor = slackware
hotplugdir = 
htmlcgidir = /usr/share/nut/html
htmldir = ${docdir}
htmldocdir = ${docdir}/html-doc
htmlmandir = ${docdir}/html-man
includedir = ${prefix}/include
infodir = ${datarootdir}/info
install_sh = ${SHELL} /dev/shm/buildtmp/build-nut/nut-2.8.5/install-sh
libdir = /usr/lib64
libexecdir = /usr/lib64/nut
localedir = ${datarootdir}/locale
localstatedir = /var
mandir = /usr/man
mkdir_p = $(MKDIR_P)
now = 2026-04-09
nut_with_nut_monitor = yes
nut_with_nut_monitor_desktop = desktop-file-install
nut_with_nut_monitor_dir = ${prefix}/share/nut-monitor
nut_with_nut_monitor_py2gtk2 = 
nut_with_nut_monitor_py3qt5 = yes
nut_with_nut_monitor_py3qt6 = 
nut_with_pynut = yes
nut_with_pynut_py = 
nut_with_pynut_py2 = 
nut_with_pynut_py3 = yes
oldincludedir = /usr/include
pdfdir = ${docdir}
pkgconfigdir = ${libdir}/pkgconfig
prefix = /usr
program_transform_name = s,x,x,
psdir = ${docdir}
runstatedir = ${localstatedir}/run
sbindir = /usr/sbin
sharedstatedir = ${prefix}/com
srcdir = .
sysconfdir = /etc/nut
systemdshutdowndir = 
systemdsystempresetdir = 
systemdsystemunitdir = 
systemdsysusersdir = 
systemdtmpfilesdir = 
target = aarch64-slackware-linux-gnu
target_alias = 
target_cpu = aarch64
target_os = linux-gnu
target_vendor = slackware
top_build_prefix = ../
top_builddir = ..
top_srcdir = ..
udevdir = /lib/udev
MAINTAINERCLEANFILES = Makefile.in .dirstamp \
	$(NUT_SPELL_DICT).usage-report

# NOTE: ALL_TXT_SRC does not include sms-brazil-protocol.txt because it
# primarily includes samples of configuration files with a lot of Spanish
# (Brazilian) words which confuse the spell-checker. Adding them to common
# NUT_SPELL_DICT would compromise its usefulness for purely English documents.
# FIXME: Add support for custom additional dictionaries for specific document
# files, e.g. "something.txt.dict" if present? There is precedent and code in
# nut-website recipes by now...
EXTRA_DIST = $(ALL_TXT_SRC) $(SHARED_DEPS) $(IMAGE_FILES) \
	$(IMAGE_LOGO_FILES) $(IMAGE_LOGO_FILES_JENKINS_NUT) \
	$(CABLES_IMAGES) $(NUT_SPELL_DICT) docinfo.xml common.xsl \
	xhtml.xsl chunked.xsl asciidoc.txt asciidoc-vars.conf \
	sms-brazil-protocol.txt
IMAGE_FILES = \
	images/asciidoc.png \
	images/hostedby.png \
	images/nut_layering.png \
	images/nut-logo.svg \
	images/nut-logo-60x60.png \
	images/nut-logo-256x256.png \
	images/note.png \
	images/warning.png \
	images/blue-arrow.png \
	images/simple.png \
	images/advanced.png \
	images/bigbox.png \
	images/bizarre.png \
	images/old-cgi.png


# Logos which pop up in README.adoc acknowledgements and maybe other places:
IMAGE_LOGO_FILES = \
	images/ci/AppVeyor_logo-2x.png \
	images/ci/AppVeyor_logo-ar21.png \
	images/ci/CircleCI_vertical_black_logo.png \
	images/ci/DO_Powered_by_Badge_blue.png \
	images/ci/DO_Powered_by_Badge_blue_140pxW.png \
	images/ci/fosshost_org_Host_Dark_56px.png \
	images/ci/fosshost_org_Host_Light_309px.png \
	images/ci/fosshost_org_Host_Light_38px.png \
	images/ci/gandi-ar21.png \
	images/ci/gandi-ar21.svg \
	images/ci/GitHub-Mark-140pxW.png \
	images/ci/GitHub-Mark-ea2971cee799.png \
	images/ci/obs-logo.png \
	images/ci/obs-logo.svg \
	images/ci/openSUSEBuildService.png \
	images/ci/OC_logotype.png \
	images/ci/OC_logo-watercolor-256.png \
	images/ci/OC_logo_merged_171x32.png \
	images/ci/OC_logo_merged_140x26.png

IMAGE_LOGO_FILES_JENKINS_NUT = \
	images/ci/ci-root.css \
	images/ci/jenkins-nut-large-256px.png \
	images/ci/jenkins-nut-large-squared.png \
	images/ci/jenkins-nut-large.pdn \
	images/ci/jenkins-nut-large.png \
	images/ci/jenkins-nut-small-256px.png \
	images/ci/jenkins-nut-small.pdn \
	images/ci/jenkins-nut-small.png \
	images/ci/jenkins-nut-squared.png \
	images/ci/jenkins-nut-transparent-bg-140pxW.png \
	images/ci/jenkins-nut-transparent-bg-40px.png \
	images/ci/jenkins-nut-transparent-bg.png \
	images/ci/jenkins-nut.css \
	images/ci/jenkins-nut.png \
	images/ci/jenkins-nut.txt


# Only track here the local deps
SHARED_DEPS = nut-names.txt daisychain.txt asciidoc.conf asciidoc.txt

# See also conversions included via FULL_USER_MANUAL_DEPS
USER_MANUAL_DEPS = acknowledgements.txt cables.txt config-notes.txt	\
 configure.txt download.txt documentation.txt features.txt history.txt	\
 outlets.txt scheduling.txt security.txt support.txt user-manual.txt


# See also conversions included via FULL_DEVELOPER_GUIDE_DEPS
DEVELOPER_GUIDE_DEPS = contact-closure.txt design.txt developers.txt	\
 developer-guide.txt hid-subdrivers.txt macros.txt new-clients.txt	\
 new-drivers.txt net-protocol.txt nutdrv_qx-subdrivers.txt	\
 nut-versioning.adoc snmp-subdrivers.txt sock-protocol.txt


# See also conversions included via FULL_QA_GUIDE_DEPS
QA_GUIDE_DEPS = nut-qa.txt config-prereqs.txt ci-farm-do-setup.adoc	\
 ci-farm-lxc-setup.txt qa-guide.adoc

CABLES_DEPS = cables/apc-rs500-serial.txt	\
 cables/apc.txt cables/ge-imv-victron.txt cables/imv.txt		\
 cables/mgeups.txt cables/powerware.txt cables/repotec.txt		\
 cables/sms.txt

CABLES_IMAGES = images/cables/73-0724.png images/cables/940-0024C.jpg \
 images/cables/belkin-f6cx-rkm-xu-cable.jpg images/cables/Lansafecable.jpg \
 images/cables/mac-940-0024C.png images/cables/mge-66049.png \
 images/cables/mge-db9-rj12.jpg images/cables/mge-db9-rj45.jpg \
 images/cables/mge-usb-rj45.jpg \
 images/cables/SOLA-330.png

ALL_TXT_SRC = nut-names.txt daisychain.txt \
 $(USER_MANUAL_DEPS) $(DEVELOPER_GUIDE_DEPS) \
 $(CABLES_DEPS) $(QA_GUIDE_DEPS) FAQ.txt packager-guide.txt snmp.txt \
 release-notes.txt ChangeLog.txt solaris-usb.txt

ASPELL_FILTER_PATH = @ASPELL_FILTER_PATH@
# NOTE: This can be set by caller such as nut-website builder:
NUT_SPELL_DICT = nut.dict
ASCIIDOC_HTML_SINGLE = user-manual.html developer-guide.html \
	packager-guide.html qa-guide.html release-notes.html \
	solaris-usb.html cables.html FAQ.html $(am__append_1)
ASCIIDOC_HTML_CHUNKED = user-manual.chunked developer-guide.chunked \
	packager-guide.chunked qa-guide.chunked release-notes.chunked \
	solaris-usb.chunked cables.chunked FAQ.chunked $(am__append_2)
ASCIIDOC_PDF = user-manual.pdf developer-guide.pdf packager-guide.pdf \
	qa-guide.pdf release-notes.pdf solaris-usb.pdf cables.pdf \
	FAQ.pdf $(am__append_3)
DOC_BUILD_CHANGELOG_TEXT = $(am__append_4)
DOC_BUILD_CHANGELOG_ADOC = $(am__append_5)

# Note: "man" subdir is handled separately via all-local and check-local
# for a few goals, and among SUBDIRs directly from the top-level Makefile
# due to some potential dependency collisions with parallel builds.
SUBDIRS = cables
SUFFIXES = .txt .html .pdf .txt-spellchecked-auto .txt-spellchecked \
	.txt-prepped .adoc-spellchecked-auto .adoc-spellchecked \
	.adoc-prepped .in-spellchecked-auto .in-spellchecked \
	.in-prepped .sample-spellchecked-auto .sample-spellchecked \
	.sample-prepped .conf-spellchecked-auto .conf-spellchecked \
	.conf-prepped

# This list is defined by configure script choices and options:
CHECK_LOCAL_TARGETS =  check-man check-html-single check-html-chunked $(am__append_6)

# htmldocdir and pdfdir are set by autoconf/automake
htmldoc_DATA = $(am__append_7)
# FIXME: Install tools refuse to work with directories in this context
# and html-chunked documentation has a separate tree per document.
# Maybe an "(un)install-data-local" or "install-data-hook" for this?
#if WITH_HTML_CHUNKED
#htmldoc_DATA += $(ASCIIDOC_HTML_CHUNKED)
#endif WITH_HTML_CHUNKED
#pdf_DATA = $(ASCIIDOC_PDF)
CLEANFILES = *.xml *.html *.pdf *-spellchecked* *-contentchecked* \
	.check-* docbook-xsl.css docinfo.xml.in.tmp \
	docinfo-since-v*.xml* *-docinfo.xml* docinfo.xml.sh \
	$(top_builddir)/INSTALL.nut $(top_builddir)/UPGRADING \
	$(top_builddir)/ChangeLog.adoc $(top_builddir)/*.adoc-parsed \
	*.adoc-parsed .ChangeLog.adoc-parsed.latest \
	$(NUT_SPELL_DICT).usage-report.tmp
# These two "must be" there per autotools standards, so a "make clean"
# should not compromise a rebuild:
DISTCLEANFILES = $(top_builddir)/NEWS $(top_builddir)/README \
	$(am__append_9)

### TODO: general automatic dependency generation
MAINTAINER_ASCIIDOCS_CHANGELOG_DEBUG = no
A2X_ASCII_IDS = ":ascii-ids:\n"

# BSD Make dislikes the path resolution here and does not always populate '$<'
# (and claims why: "Using $< in a non-suffix rule context is a GNUmake idiom"),
# but it has a '$?' for "list of dependencies that are newer than the target".
# For more details see https://man.freebsd.org/cgi/man.cgi
#A2X_ASCII_IDS = 

# Timeout for ChangeLog.adoc rule to wait for competing threads to maybe create
# (or start creating) the file first - then this "make" thread just waits for
# it to be done, and exits, as the goal is fulfilled.
MAINTAINER_ASCIIDOCS_CHANGELOG_DELAY = 10

# Add other directory deps (not for local EXTRA_DIST) and generated contents
FULL_USER_MANUAL_DEPS = $(USER_MANUAL_DEPS) $(SHARED_DEPS) \
	$(top_builddir)/README.adoc-parsed \
	$(top_builddir)/INSTALL.nut.adoc-parsed \
	$(top_builddir)/UPGRADING.adoc-parsed \
	../TODO.adoc ../scripts/ufw/README.adoc

FULL_DEVELOPER_GUIDE_DEPS = $(DEVELOPER_GUIDE_DEPS) $(SHARED_DEPS) \
	../scripts/augeas/README.adoc ../TODO.adoc \
	../lib/README.adoc \
	../tools/nut-scanner/README.adoc

FULL_QA_GUIDE_DEPS = $(QA_GUIDE_DEPS) $(SHARED_DEPS) \
	$(top_builddir)/ci_build.adoc-parsed


# Note: without the "-v", asciidoc (circa 8.6.2) sometimes hangs when
# generating the chunked HTML. In this case, export the environment
# variable ASCIIDOC_VERBOSE to "-v", ie:
#   $ ASCIIDOC_VERBOSE=-v make
# Note: `(top_)srcdir` and `(top_)builddir` must end with a path
# separator, or be empty -- so in all cases letting the resulting
# string resolve meaningfully in the filesystem during docs build.
A2X_DOCINFO_DIR = $(builddir)
A2X_COMMON_OPTS = $(ASCIIDOC_VERBOSE) \
    --attribute=icons \
    --xsltproc-opts="--nonet" \
    --xsltproc-opts="--stringparam nut.localdate \"`TZ=UTC date +%Y-%m-%d`\"" \
    --xsltproc-opts="--stringparam nut.localtime \"`TZ=UTC date +%H:%M:%S`\"" \
    --xsltproc-opts="--stringparam nut.nutversion \"$(PACKAGE_VERSION)\"" \
    --attribute=docinfodir="$${A2X_DOCINFO_DIR}" \
    --attribute=iconsdir="$(srcdir)/images" \
    --attribute=badges \
    --attribute=external_title \
    --attribute=tree_version="$(TREE_VERSION)" \
    --attribute=srcdir="$(abs_srcdir)/" \
    --attribute=builddir="$(abs_builddir)/" \
    --attribute=top_srcdir="$(abs_top_srcdir)/" \
    --attribute=top_builddir="$(abs_top_builddir)/" \
    --attribute=MAN_SECTION_API_BASE='$(MAN_SECTION_API_BASE)' \
    --attribute=MAN_SECTION_CFG_BASE='$(MAN_SECTION_CFG_BASE)' \
    --attribute=MAN_SECTION_CMD_SYS_BASE='$(MAN_SECTION_CMD_SYS_BASE)' \
    --attribute=MAN_SECTION_CMD_USR_BASE='$(MAN_SECTION_CMD_USR_BASE)' \
    --attribute=MAN_SECTION_MISC_BASE='$(MAN_SECTION_MISC_BASE)' \
    --attribute=NUT_WEBSITE_BASE='$(NUT_WEBSITE_BASE)' \
    -a toc -a numbered --destination-dir=$${A2X_OUTDIR}

# NOTE: a2x newer than 8.6.8 says "--destination-dir" is only valid for HTML.
# As of version 8.6.9 it lies, and the argument is required for our distcheck
# (and does affect PDF builds, as found during work on collision-avoidance -
# true with at least asciidoc/a2x versions 9.0.0rc2).
# For more details see issues https://web.archive.org/web/20201207082352/https://github.com/asciidoc/asciidoc/issues/44
# and https://github.com/networkupstools/nut/pull/281 (in short, attempts
# to "fix" this warning broke NUT build). If this is to be retried later, see
# https://github.com/networkupstools/nut/pull/281/commits/fe17861c4ea12679b3ebfefa8a6d692d79d99f2d
# and do not forget to fix up docs/man/Makefile.am too ;)

# NOTE: a2x tends to copy some files into its working area, preserving original
# permissions. If those files are read-only in origin (e.g. packaged stylesheet
# or our resources coming from EXTRA_DIST) the next a2x can not overwrite it.
# Also note that such hoarding of files has potential to break parallel builds
# (or cause them to produce undefined results if some bad timing happens).
# As a brutal workaround for the former problem, we chmod. For second one we
# might try magic with .SEQUENTIAL recipe hints, but that is gmake-dependent.

# Note that empirically it treats "destination-dir" as the source root for
# PDF generation (even though it claims the argument is ignored for non-HTML
# targets) so we have to provide the "images/" in this case. ONLY for PDF!

# Note we only remove the original target (if present), if it is a directory -
# e.g. created by "html-chunked" targets.

# NOTE: MKDIR_P may be defined via expanded $(top_builddir)/install-sh
# so should be run from $(abs_builddir) to be safe, as we jump around
# the build workspace
DOCBUILD_BEGIN = { \
    if test -n "$${A2X_OUTDIR}" && test "$${A2X_OUTDIR}" != '.' ; then \
        rm -rf "./$${A2X_OUTDIR}" || true ; \
        test -d '$@' && rm -rf '$@' || true ; \
        _CWD="`pwd`" && (cd '$(abs_builddir)' && $(MKDIR_P) "$${_CWD}/$${A2X_OUTDIR}") || exit ; \
        case "$${A2X_OUTDIR}" in \
            tmp/pdf.*) ln -s ../../images "./$${A2X_OUTDIR}" ; \
                case "$(@F)" in \
                    qa-guide.pdf) \
                        ln -s ../../docinfo-since-v2.8.3.xml "./$${A2X_OUTDIR}/docinfo.xml" ; \
                        ln -s ../../docinfo-since-v2.8.3.xml "./$${A2X_OUTDIR}/qa-guide-docinfo.xml" ;; \
                esac \
                ;; \
        esac; \
    else A2X_OUTDIR='.' ; fi; \
    if test -s "${builddir}/docbook-xsl.css" \
    && test -r "${builddir}/docbook-xsl.css" \
    && test ! -w "${builddir}/docbook-xsl.css" \
    ; then chmod u+w "${builddir}/docbook-xsl.css" ; fi ; \
    chmod -R u+w "./$${A2X_OUTDIR}" || true; \
}


# When moving "*" hope a2x did not make any "hidden" files
# like ".*" that would be required for resulting documents.
# Leave the "images/" dir there, though.
# Otherwise, we would have to `find` them all.
DOCBUILD_END = { \
    if test -n "$${A2X_OUTDIR}" && test "$${A2X_OUTDIR}" != '.' ; then \
        chmod -R u+w "./$${A2X_OUTDIR}" || true; \
        test -d '$@' && rm -rf '$@' || true ; \
        rm -f "./$${A2X_OUTDIR}/"*docinfo*.xml* || exit ; \
        mv -f "./$${A2X_OUTDIR}/$(@F)" ./ || exit ; \
        mv -f "./$${A2X_OUTDIR}/"*.* ./ 2>/dev/null || true ; \
        rm -rf "./$${A2X_OUTDIR}" ; \
    fi ; \
}


# PORTABILITY NOTE: POSIX Make forbids the suffix rule definitions with
# prerequisites like done below, and GNU Make of some versions complains;
# https://www.gnu.org/software/make/manual/html_node/Error-Messages.html
# says the prerequisites were ignored while a suffix rule was created;
# eventually the POSIX stance would be taken to define a rule for a weird
# verbatim target file name with prerequisites:
# ../docs/Makefile:936: warning: ignoring prerequisites on suffix rule definition
# Changes from ".txt.pdf: docinfo.xml" to "*.pdf: docinfo.xml" = ".txt.pdf:"
# as done below may be pointless in the end (with regard to a portable way
# to trigger builds by a changed dependency), but at least predictable and
# not toxic.
###.txt.txt-prepped: $(abs_top_builddir)/docs/.prep-src-docs
###.adoc.adoc-prepped: $(abs_top_builddir)/docs/.prep-src-docs

# NOTE: inference rules can have only one target before the colon (POSIX),
# so we use helper snippets to share code for *.adoc and *.txt sources
GENERATE_HTML_SINGLE = ( \
	A2X_OUTDIR="tmp/html-single.$(@F).$$$$" ; \
	echo "  DOC-HTML	Generating $@"; \
	$(DOCBUILD_BEGIN) ; RES=0; \
	$(A2X) $(A2X_COMMON_OPTS) --attribute=xhtml11_format --format=xhtml --xsl-file=$(srcdir)/xhtml.xsl '$<' || RES=$$? ; \
	$(DOCBUILD_END) ; \
	case "$(@F)" in \
	    *ChangeLog*) \
	        if [ -s '$(top_builddir)/ChangeLog' ] ; then \
	            $(MAKE) $(AM_MAKEFLAGS) "`basename '$(@F)'`"-contentchecked || RES=$$? ; \
	            if [ "$$RES" != 0 ] ; then \
	                echo "  DOC-HTML	Generating $@ (retry once)" >&2; \
	                rm -f '$@'; \
	                A2X_OUTDIR="tmp/html-single.$(@F).$$$$-retry" ; \
	                $(DOCBUILD_BEGIN) ; RES=0; rm -f "`basename '$(@F)'`"-contentchecked || true ; \
	                $(A2X) $(A2X_COMMON_OPTS) --attribute=xhtml11_format --format=xhtml --xsl-file=$(srcdir)/xhtml.xsl '$<' || RES=$$? ; \
	                $(DOCBUILD_END) ; \
	                $(MAKE) $(AM_MAKEFLAGS) "`basename '$(@F)'`"-contentchecked || RES=$$? ; \
	            fi ; \
	        fi ;; \
	esac ; \
	exit $$RES ; \
)


# Note: extra age check here because *.chunked is a directory and not all
# "make" implementations check its age vs. source files, just always build:
GENERATE_HTML_CHUNKED = ( \
	if [ -d '$@' ] && [ x"`find '$@' -newer '$<' 2>/dev/null`" != x ] ; then \
	    echo "  DOC-HTML-CHUNKED	SKIP: keep existing $@"; \
	    exit 0 ; \
	fi ; \
	A2X_OUTDIR="tmp/html-chunked.$(@F).$$$$" ; \
	echo "  DOC-HTML-CHUNKED	Generating $@"; \
	$(DOCBUILD_BEGIN) ; RES=0; \
	$(A2X) $(A2X_COMMON_OPTS) --attribute=chunked_format --format=chunked --xsl-file=$(srcdir)/chunked.xsl '$<' || RES=$$? ; \
	$(DOCBUILD_END) ; exit $$RES ; \
)


# Note: non-HTML a2x modes may ignore the destination directory
GENERATE_PDF = ( \
	A2X_OUTDIR="tmp/pdf.$(@F).$$$$" ; \
	echo "  DOC-PDF	Generating $@"; \
	$(DOCBUILD_BEGIN) ; RES=0; \
	if [ -s "$${A2X_OUTDIR}/docinfo.xml" ] ; then A2X_DOCINFO_DIR="$${A2X_OUTDIR}"; fi ; \
	$(A2X) $(A2X_COMMON_OPTS) --attribute=pdf_format --format=pdf -a docinfo1 '$<' || RES=$$? ; \
	$(DOCBUILD_END) ; exit $$RES ; \
)


# Used below for spellcheck and for .prep-src-docs
# List most-frequently edited files first, to hit typos there sooner when
# developing. Some files from sub-directories are spell-checked here and
# not in their own makefiles, because they are included as chapters in
# some larger documents anchored here.
# Note that in builds with enabled parallel fanout mode (even if done
# sequentially, e.g. without NUT_MAKE_SKIP_FANOUT=true explicitly),
# files located in the current directory are processed first anyway,
# as one (possibly parallelized) sub-make call.
SPELLCHECK_SRC_DEFAULT = \
	../NEWS.adoc ../UPGRADING.adoc \
	asciidoc-vars.conf \
	../ci_build.adoc ../README.adoc \
	../INSTALL.nut.adoc \
	../TODO.adoc ../scripts/ufw/README.adoc \
	../scripts/augeas/README.adoc ../lib/README.adoc \
	../tools/nut-scanner/README.adoc \
	../AUTHORS ../COPYING ../LICENSE-GPL2 ../LICENSE-GPL3 ../LICENSE-DCO \
	$(ALL_TXT_SRC)


# Non-interactively spell check all documentation source files.
# This is useful for Buildbot and automated QA processing
# FIXME: how to present output (std{out,err}, single file or per target)?
# NOTE: ../ChangeLog is nowadays generated from commit messages, so
# its spelling (or errors in that) are not fixable and thus irrelevant.
# Similarly for the ../INSTALL file that is prepared by autoconf and not
# tracked as a source file by NUT Git repository.
# Note that `docs/asciidoc-vars.conf` is included into docs and so impacts
# their resulting spellcheck verdicts.
SPELLCHECK_SRC = $(SPELLCHECK_SRC_DEFAULT)

# Directory SPELLCHECK_SRC files are relative to. Overridden by other Makefiles.
SPELLCHECK_SRCDIR = $(srcdir)
SPELLCHECK_BUILDDIR = $(builddir)

# Note: de-facto our documentation is beyond ASCII (at least in names of
# international committers). The grep tests below look if the aspell output
# contained something other than the OK lines (tagged with asterisk) and
# aspell's version (tagged with @) and if it did - those lines must be the
# spellcheck complaints. Empty OUT is ok.
# We also must indent the input, because certain piped-in characters are
# interpreted as commands, and seems this feature can not be turned off.
# See also http://aspell.net/man-html/Through-A-Pipe.html
# TODO: Is "grep -a" or "grep -b" (treat input as ascii/bin) portable enough?
# Set SPELLCHECK_ERROR_FATAL=no if there are some unavoidable issues
# due to spellchecking, to temporarily not fail builds due to this.
# For Travis CI in particular, see ci_build.sh in NUT codebase root.
SPELLCHECK_ERROR_FATAL = yes
SPELLCHECK_ENV_DEBUG = no
ASPELL_NUT_COMMON_ARGS = -p \
	$(abs_srcdir)/$(NUT_SPELL_DICT) -d en \
	--lang=en --ignore-accents --encoding=utf-8
# Note: If there is a need to use filter path (e.g. in mingw/msys2 builds),
# it must be before --mode=tex (-t) option!
ASPELL_NUT_TEXMODE_ARGS = $(am__append_8) -t
ASPELL_ENV_LANG = en.UTF-8
ASPELL_OUT_NOTERRORS = (^[ \t]*[\*\@]|^$$)

# WARNING: The percent wildcard is a GNU extension; otherwise we need
# a ".txt.txt-spellchecked" type of rule and files like "README" all
# renamed to *.txt, or lots of rules for files without the extensions.
# Maybe this will get simplified with renaming to *.adoc though ;)
#
# Other Makefiles have a relatively simple life, dealing with just a
# few texts and name/extension patterns in their directories.
#?#.txt.txt-spellchecked: Makefile.am $(abs_srcdir)/$(NUT_SPELL_DICT)
#%-spellchecked: % Makefile.am $(abs_srcdir)/$(NUT_SPELL_DICT)
#*-spellchecked */*-spellchecked: $(@:-spellchecked=) $(top_srcdir)/docs/Makefile.am $(abs_srcdir)/$(NUT_SPELL_DICT)
#
# NOTE: For some reason, at least GNU make insists on bogus calls:
#   update target 'asciidoc-vars.conf' due to: asciidoc-vars.conf-spellchecked
# when we e.g. `make dist` after a `make spellcheck` and ended up
# with removed and touched (emptied) file, only this one so far.
#
# NOTE: This portable rule RELIES on just one SPELLCHECK_SRC defined
# at a time, with an outer Makefile caller ensuring the looping:
SPELLCHECK_RECIPE_DEBUG_STREAM = /dev/null

# If NUT_MAKE_SKIP_FANOUT!=true we have a hack to spellcheck certain files more
# quickly (in parallel). This approach is constrained to files with a known
# extension and located in same directory as the Makefile that called them
# (extension-less docs and relative links are built sequentially).
SPELLCHECK_AUTO_ONE = ( \
	SPELLCHECK_SRC_ONE="`basename '$?'`" ; \
	rm -f '$@'.failed ; \
	$(MAKE) $(AM_MAKEFLAGS) -k -s -f "$(abs_top_builddir)/docs/Makefile" SPELLCHECK_SRC="" SPELLCHECK_SRC_ONE="$${SPELLCHECK_SRC_ONE}" SPELLCHECK_BUILDDIR="$(SPELLCHECK_BUILDDIR)" SPELLCHECK_SRCDIR="$(SPELLCHECK_SRCDIR)" VPATH="$(SPELLCHECK_SRCDIR):$(SPELLCHECK_BUILDDIR):$(VPATH)" "$(SPELLCHECK_BUILDDIR)/$${SPELLCHECK_SRC_ONE}-spellchecked" \
	|| { RES=$$? ; touch '$@'.failed; exit $$RES; } \
)


# NOTE: In "make SPELLCHECK_INTERACTIVE=true ${docsrc}-spellchecked",
# after an interactive "aspell check" we follow-up by a run of usual
# non-interactive spell-checker to verify that the developer actually
# has fixed all of the files that the tool had concerns about, and
# that the touch-file is updated if the file is okay (to speed up
# any future re-runs). We also must update all relevant *-spellchecked
# touch-files after "make spellcheck-sortdict" which updates "nut.dict"
# file which is a prerequisite for docs checks.
# After the (possibly SUBDIR-based) run we may report to the developer
# that their dictionary was updated and may need a Git recommit - either
# if it did change, or if caller's SPELLCHECK_REPORT_MAYBE_UPDATED_DICT=yes.
SPELLCHECK_REPORT_MAYBE_UPDATED_DICT = no

# When building out-of-tree, be sure to have all asciidoc resources
# under the same dir structure (tool limitation)
PREP_SRC = $(EXTRA_DIST) $(SPELLCHECK_SRC_DEFAULT)
all: all-recursive

.SUFFIXES:
.SUFFIXES: .txt .html .pdf .txt-spellchecked-auto .txt-spellchecked .txt-prepped .adoc-spellchecked-auto .adoc-spellchecked .adoc-prepped .in-spellchecked-auto .in-spellchecked .in-prepped .sample-spellchecked-auto .sample-spellchecked .sample-prepped .conf-spellchecked-auto .conf-spellchecked .conf-prepped .adoc .adoc-parsed .chunked .conf .in .sample
$(srcdir)/Makefile.in: # $(srcdir)/Makefile.am  $(am__configure_deps)
	@for dep in $?; do \
	  case '$(am__configure_deps)' in \
	    *$$dep*) \
	      ( cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh ) \
	        && { if test -f $@; then exit 0; else break; fi; }; \
	      exit 1;; \
	  esac; \
	done; \
	echo ' cd $(top_srcdir) && $(AUTOMAKE) --gnu docs/Makefile'; \
	$(am__cd) $(top_srcdir) && \
	  $(AUTOMAKE) --gnu docs/Makefile
Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status
	@case '$?' in \
	  *config.status*) \
	    cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh;; \
	  *) \
	    echo ' cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__maybe_remake_depfiles)'; \
	    cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__maybe_remake_depfiles);; \
	esac;

$(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES)
	cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh

$(top_srcdir)/configure: # $(am__configure_deps)
	cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh
$(ACLOCAL_M4): # $(am__aclocal_m4_deps)
	cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh
$(am__aclocal_m4_deps):
asciidoc.conf: $(top_builddir)/config.status $(srcdir)/asciidoc.conf.in
	cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@
docinfo.xml: $(top_builddir)/config.status $(srcdir)/docinfo.xml.in
	cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@
docinfo.xml.sh: $(top_builddir)/config.status $(srcdir)/docinfo.xml.sh.in
	cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@

mostlyclean-libtool:
	-rm -f *.lo

clean-libtool:
	-rm -rf .libs _libs
install-htmldocDATA: $(htmldoc_DATA)
	@$(NORMAL_INSTALL)
	@list='$(htmldoc_DATA)'; test -n "$(htmldocdir)" || list=; \
	if test -n "$$list"; then \
	  echo " $(MKDIR_P) '$(DESTDIR)$(htmldocdir)'"; \
	  $(MKDIR_P) "$(DESTDIR)$(htmldocdir)" || exit 1; \
	fi; \
	for p in $$list; do \
	  if test -f "$$p"; then d=; else d="$(srcdir)/"; fi; \
	  echo "$$d$$p"; \
	done | $(am__base_list) | \
	while read files; do \
	  echo " $(INSTALL_DATA) $$files '$(DESTDIR)$(htmldocdir)'"; \
	  $(INSTALL_DATA) $$files "$(DESTDIR)$(htmldocdir)" || exit $$?; \
	done

uninstall-htmldocDATA:
	@$(NORMAL_UNINSTALL)
	@list='$(htmldoc_DATA)'; test -n "$(htmldocdir)" || list=; \
	files=`for p in $$list; do echo $$p; done | sed -e 's|^.*/||'`; \
	dir='$(DESTDIR)$(htmldocdir)'; $(am__uninstall_files_from_dir)
install-pdfDATA: $(pdf_DATA)
	@$(NORMAL_INSTALL)
	@list='$(pdf_DATA)'; test -n "$(pdfdir)" || list=; \
	if test -n "$$list"; then \
	  echo " $(MKDIR_P) '$(DESTDIR)$(pdfdir)'"; \
	  $(MKDIR_P) "$(DESTDIR)$(pdfdir)" || exit 1; \
	fi; \
	for p in $$list; do \
	  if test -f "$$p"; then d=; else d="$(srcdir)/"; fi; \
	  echo "$$d$$p"; \
	done | $(am__base_list) | \
	while read files; do \
	  echo " $(INSTALL_DATA) $$files '$(DESTDIR)$(pdfdir)'"; \
	  $(INSTALL_DATA) $$files "$(DESTDIR)$(pdfdir)" || exit $$?; \
	done

uninstall-pdfDATA:
	@$(NORMAL_UNINSTALL)
	@list='$(pdf_DATA)'; test -n "$(pdfdir)" || list=; \
	files=`for p in $$list; do echo $$p; done | sed -e 's|^.*/||'`; \
	dir='$(DESTDIR)$(pdfdir)'; $(am__uninstall_files_from_dir)

# This directory's subdirectories are mostly independent; you can cd
# into them and run 'make' without going through this Makefile.
# To change the values of 'make' variables: instead of editing Makefiles,
# (1) if the variable is set in 'config.status', edit 'config.status'
#     (which will cause the Makefiles to be regenerated when you run 'make');
# (2) otherwise, pass the desired values on the 'make' command line.
$(am__recursive_targets):
	@fail=; \
	if $(am__make_keepgoing); then \
	  failcom='fail=yes'; \
	else \
	  failcom='exit 1'; \
	fi; \
	dot_seen=no; \
	target=`echo $@ | sed s/-recursive//`; \
	case "$@" in \
	  distclean-* | maintainer-clean-*) list='$(DIST_SUBDIRS)' ;; \
	  *) list='$(SUBDIRS)' ;; \
	esac; \
	for subdir in $$list; do \
	  echo "Making $$target in $$subdir"; \
	  if test "$$subdir" = "."; then \
	    dot_seen=yes; \
	    local_target="$$target-am"; \
	  else \
	    local_target="$$target"; \
	  fi; \
	  ($(am__cd) $$subdir && $(MAKE) $(AM_MAKEFLAGS) $$local_target) \
	  || eval $$failcom; \
	done; \
	if test "$$dot_seen" = "no"; then \
	  $(MAKE) $(AM_MAKEFLAGS) "$$target-am" || exit 1; \
	fi; test -z "$$fail"

ID: $(am__tagged_files)
	$(am__define_uniq_tagged_files); mkid -fID $$unique
tags: tags-recursive
TAGS: tags

tags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files)
	set x; \
	here=`pwd`; \
	if ($(ETAGS) --etags-include --version) >/dev/null 2>&1; then \
	  include_option=--etags-include; \
	  empty_fix=.; \
	else \
	  include_option=--include; \
	  empty_fix=; \
	fi; \
	list='$(SUBDIRS)'; for subdir in $$list; do \
	  if test "$$subdir" = .; then :; else \
	    test ! -f $$subdir/TAGS || \
	      set "$$@" "$$include_option=$$here/$$subdir/TAGS"; \
	  fi; \
	done; \
	$(am__define_uniq_tagged_files); \
	shift; \
	if test -z "$(ETAGS_ARGS)$$*$$unique"; then :; else \
	  test -n "$$unique" || unique=$$empty_fix; \
	  if test $$# -gt 0; then \
	    $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \
	      "$$@" $$unique; \
	  else \
	    $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \
	      $$unique; \
	  fi; \
	fi
ctags: ctags-recursive

CTAGS: ctags
ctags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files)
	$(am__define_uniq_tagged_files); \
	test -z "$(CTAGS_ARGS)$$unique" \
	  || $(CTAGS) $(CTAGSFLAGS) $(AM_CTAGSFLAGS) $(CTAGS_ARGS) \
	     $$unique

GTAGS:
	here=`$(am__cd) $(top_builddir) && pwd` \
	  && $(am__cd) $(top_srcdir) \
	  && gtags -i $(GTAGS_ARGS) "$$here"
cscopelist: cscopelist-recursive

cscopelist-am: $(am__tagged_files)
	list='$(am__tagged_files)'; \
	case "$(srcdir)" in \
	  [\\/]* | ?:[\\/]*) sdir="$(srcdir)" ;; \
	  *) sdir=$(subdir)/$(srcdir) ;; \
	esac; \
	for i in $$list; do \
	  if test -f "$$i"; then \
	    echo "$(subdir)/$$i"; \
	  else \
	    echo "$$sdir/$$i"; \
	  fi; \
	done >> $(top_builddir)/cscope.files

distclean-tags:
	-rm -f TAGS ID GTAGS GRTAGS GSYMS GPATH tags
distdir: $(BUILT_SOURCES)
	$(MAKE) $(AM_MAKEFLAGS) distdir-am

distdir-am: $(DISTFILES)
	@srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \
	topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \
	list='$(DISTFILES)'; \
	  dist_files=`for file in $$list; do echo $$file; done | \
	  sed -e "s|^$$srcdirstrip/||;t" \
	      -e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \
	case $$dist_files in \
	  */*) $(MKDIR_P) `echo "$$dist_files" | \
			   sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \
			   sort -u` ;; \
	esac; \
	for file in $$dist_files; do \
	  if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \
	  if test -d $$d/$$file; then \
	    dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \
	    if test -d "$(distdir)/$$file"; then \
	      find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \
	    fi; \
	    if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \
	      cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \
	      find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \
	    fi; \
	    cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \
	  else \
	    test -f "$(distdir)/$$file" \
	    || cp -p $$d/$$file "$(distdir)/$$file" \
	    || exit 1; \
	  fi; \
	done
	@list='$(DIST_SUBDIRS)'; for subdir in $$list; do \
	  if test "$$subdir" = .; then :; else \
	    $(am__make_dryrun) \
	      || test -d "$(distdir)/$$subdir" \
	      || $(MKDIR_P) "$(distdir)/$$subdir" \
	      || exit 1; \
	    dir1=$$subdir; dir2="$(distdir)/$$subdir"; \
	    $(am__relativize); \
	    new_distdir=$$reldir; \
	    dir1=$$subdir; dir2="$(top_distdir)"; \
	    $(am__relativize); \
	    new_top_distdir=$$reldir; \
	    echo " (cd $$subdir && $(MAKE) $(AM_MAKEFLAGS) top_distdir="$$new_top_distdir" distdir="$$new_distdir" \\"; \
	    echo "     am__remove_distdir=: am__skip_length_check=: am__skip_mode_fix=: distdir)"; \
	    ($(am__cd) $$subdir && \
	      $(MAKE) $(AM_MAKEFLAGS) \
	        top_distdir="$$new_top_distdir" \
	        distdir="$$new_distdir" \
		am__remove_distdir=: \
		am__skip_length_check=: \
		am__skip_mode_fix=: \
	        distdir) \
	      || exit 1; \
	  fi; \
	done
check-am: all-am
	$(MAKE) $(AM_MAKEFLAGS) check-local
check: check-recursive
all-am: Makefile $(DATA) all-local
installdirs: installdirs-recursive
installdirs-am:
	for dir in "$(DESTDIR)$(htmldocdir)" "$(DESTDIR)$(pdfdir)"; do \
	  test -z "$$dir" || $(MKDIR_P) "$$dir"; \
	done
install: install-recursive
install-exec: install-exec-recursive
install-data: install-data-recursive
uninstall: uninstall-recursive

install-am: all-am
	@$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am

installcheck: installcheck-recursive
install-strip:
	if test -z '$(STRIP)'; then \
	  $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \
	    install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \
	      install; \
	else \
	  $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \
	    install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \
	    "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'" install; \
	fi
mostlyclean-generic:

clean-generic:
	-test -z "$(CLEANFILES)" || rm -f $(CLEANFILES)

distclean-generic:
	-test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES)
	-test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES)
	-test -z "$(DISTCLEANFILES)" || rm -f $(DISTCLEANFILES)

maintainer-clean-generic:
	@echo "This command is intended for maintainers to use"
	@echo "it deletes files that may require special tools to rebuild."
	-test -z "$(MAINTAINERCLEANFILES)" || rm -f $(MAINTAINERCLEANFILES)
clean: clean-recursive

clean-am: clean-generic clean-libtool clean-local mostlyclean-am

distclean: distclean-recursive
	-rm -f Makefile
distclean-am: clean-am distclean-generic distclean-tags

dvi: dvi-recursive

dvi-am:

html: html-recursive

html-am:

info: info-recursive

info-am:

install-data-am: install-htmldocDATA install-pdfDATA

install-dvi: install-dvi-recursive

install-dvi-am:

install-exec-am:

install-html: install-html-recursive

install-html-am:

install-info: install-info-recursive

install-info-am:

install-man:

install-pdf: install-pdf-recursive

install-pdf-am:

install-ps: install-ps-recursive

install-ps-am:

installcheck-am:

maintainer-clean: maintainer-clean-recursive
	-rm -f Makefile
maintainer-clean-am: distclean-am maintainer-clean-generic

mostlyclean: mostlyclean-recursive

mostlyclean-am: mostlyclean-generic mostlyclean-libtool

pdf-am:

ps: ps-recursive

ps-am:

uninstall-am: uninstall-htmldocDATA uninstall-pdfDATA

.MAKE: $(am__recursive_targets) check-am install-am install-strip

.PHONY: $(am__recursive_targets) CTAGS GTAGS TAGS all all-am all-local \
	check check-am check-local clean clean-generic clean-libtool \
	clean-local cscopelist-am ctags ctags-am distclean \
	distclean-generic distclean-libtool distclean-tags distdir dvi \
	dvi-am html html-am info info-am install install-am \
	install-data install-data-am install-dvi install-dvi-am \
	install-exec install-exec-am install-html install-html-am \
	install-htmldocDATA install-info install-info-am install-man \
	install-pdf install-pdf-am install-pdfDATA install-ps \
	install-ps-am install-strip installcheck installcheck-am \
	installdirs installdirs-am maintainer-clean \
	maintainer-clean-generic mostlyclean mostlyclean-generic \
	mostlyclean-libtool pdf pdf-am ps ps-am tags tags-am uninstall \
	uninstall-am uninstall-htmldocDATA uninstall-pdfDATA

.PRECIOUS: Makefile


# Note: "doc" ensures the `configure`-specified list of documents we actually
# want, while the default generated "all: all-am" target historically causes
# some but not all of these targets to get built (e.g. ChangeLog html/pdf is
# usually not made there). Post-processing "doc" as part of "all" helps
# ensure that we do not rebuild stuff in vain during parallel builds (where
# "all-am" and "doc" would be unordered parallel goals of the "all" target)
# while getting those further goals achieved eventually in the default build.
# Crucially, this allows to make sure "ChangeLog(.adoc*)" files have been
# generated once (can take a looong while), settled into place, and only then
# we revisit them for html/pdf rendering (another long while) without randomly
# confusing the system with new timestamps and needless regenerations later on.
all: 
	@echo "  DOC-FOLLOW-UP	Basic 'make $@' in `pwd` is done, following up with 'make doc' to ensure complex document types"
	+@$(MAKE) $(AM_MAKEFLAGS) doc
check-local: $(CHECK_LOCAL_TARGETS) 
	+@cd $(builddir)/man && $(MAKE) $(AM_MAKEFLAGS) check

# Make sure sources are there for out-of-tree builds:
all-local all-am-local \
$(DOC_BUILD_CHANGELOG_TEXT) $(DOC_BUILD_CHANGELOG_ADOC) \
 man-man html-single html-chunked $(ASCIIDOC_PDF) $(ASCIIDOC_HTML_SINGLE) $(ASCIIDOC_HTML_CHUNKED): $(abs_top_builddir)/docs/.prep-src-docs

all-local: 
	+@cd $(builddir)/man && $(MAKE) $(AM_MAKEFLAGS) all-optional

# This list is defined by configure script choices and options:
doc: $(abs_top_builddir)/docs/.prep-src-docs $(DOC_BUILD_CHANGELOG_TEXT) $(DOC_BUILD_CHANGELOG_ADOC)  man-man html-single html-chunked

# This target can be called by developers to go around the configure
# script choices at their risk (e.g. missing tools are possible) and
# try to build all documentation file types:
docs: $(DOC_BUILD_CHANGELOG_TEXT) $(DOC_BUILD_CHANGELOG_ADOC) pdf html-single html-chunked man-man html-man

all-docs: docs

check-docs: check-pdf check-html-single check-html-chunked check-man

# Not called by default, but handy for maintainers to check which words
# in the custom dictionary are used or not by the current NUT codebase.
# Note that historically many words were added to facilitate rendition
# of the nut-website (long ago splintered from main nut repository),
# but since recently it has a way to track its own additions to the
# dictionary file. This code should help populate it as well, and keep
# only relevant entries in the appropriate corner of the sources.
# Note this can take 5-10 minutes!
spellcheck-report-dict-usage: $(NUT_SPELL_DICT).usage-report

pdf: $(ASCIIDOC_PDF)
# also build the HTML manpages with these targets
html-single: $(ASCIIDOC_HTML_SINGLE)
html-chunked: $(ASCIIDOC_HTML_CHUNKED)

# the "for" loops might better use $^ but it might be not portable
check-pdf: .check-pdf
.check-pdf: $(ASCIIDOC_PDF) Makefile
	@FAILED=""; LANG=C; LC_ALL=C; export LANG; export LC_ALL; \
	for F in $(ASCIIDOC_PDF) ; do \
	    test -s "$$F" && { file "$$F" | $(EGREP) -i 'PDF document' > /dev/null ; } || FAILED="$$FAILED $$F" ; \
	done; if test -n "$$FAILED" ; then \
	    echo "FAILED PDF sanity check for:$$FAILED" >&2 ; file $$FAILED >&2 ; exit 1; \
	fi; echo "PASSED PDF sanity check"; exit 0
	@touch $@

# Regarding ChangeLog check: sometimes asciidoc gives up early
# (we have megabytes of text and thousands of sections here).
# In some cases, the intermediate XML is broken and a2x=>xmllint
# notices it. In others, it is truncated at just the right place
# structurally and leads to a short HTML with only part of the
# expected contents. We should no longer have several processes
# trying to create the files involved (or rather do so atomically
# and rename into final path, in case we still have competition
# here); earlier when several generators appended to the same
# file we could have several copies overlaid, with one of the
# document's copies starting mid-sentence of another.
# The two expected mentions are in the table of contents and
# in the eventual section. Checking for first/second entries,
# and not exactly two mentions, should allow to catch the case
# of overlapping documents. Checking for the last entry allows
# to catch incomplete parses, where asciidoc gave up early.
# NOTE: Technically it may be more than two, if the author and
# date were used several times in the original ChangeLog file
# (either with different e-mails, or if different author's work
# is interleaved during the day, e.g. many PRs merged, and no
# CHANGELOG_REQUIRE_GROUP_BY_DATE_AUTHOR=true setting was in place.
# NOTE: No dependencies, avoids (re-)generation and log messages
# but causes re-run of the check every time.
ChangeLog.html-contentchecked:
	@FAILED=""; \
	 entry_filter() { sed -e 's/ *[\"<].*//' -e 's/\([][(){}|+?.*]\)/\\\1/g' ; } ; \
	 if [ -s '$(top_builddir)/ChangeLog' ] && [ -s ChangeLog.html ] ; then \
	    SECOND_ENTRY="`$(EGREP) '^[0-9]' '$(top_builddir)/ChangeLog' | head -2 | tail -1 | entry_filter`" || SECOND_ENTRY="" ; \
	    FIRST_ENTRY="`$(EGREP) '^[0-9]' '$(top_builddir)/ChangeLog' | head -1 | entry_filter`" || FIRST_ENTRY="" ; \
	    LAST_ENTRY="`$(EGREP) '^[0-9]' '$(top_builddir)/ChangeLog' | tail -1 | entry_filter`" || LAST_ENTRY="" ; \
	    if [ -n "$${FIRST_ENTRY}" ] ; then \
	        O="`$(EGREP) -c \"^$${FIRST_ENTRY}\" '$(top_builddir)/ChangeLog'`" ; \
	        N="`$(EGREP) -c \"title.*$${FIRST_ENTRY}\" 'ChangeLog.html'`" ; \
	        MIN="`expr $${O} + 1`"     && [ "$${MIN}" -gt 0 ] 2>/dev/null || MIN=1 ; \
	        MAX="`expr $${O} + $${O}`" && [ "$${MAX}" -gt 2 ] 2>/dev/null || MAX=2 ; \
	        if [ "$${N}" -lt "$${MIN}" ] || [ "$${N}" -gt "$${MAX}" ]; then \
	            echo "FAILED ChangeLog.html check: does not contain expected first entry the right amount of times (huge doc, must have got aborted mid-way): $${FIRST_ENTRY} (seen $${N} times, expected between $${MIN} and $${MAX})" >&2 ; \
	            if [ -z "$$FAILED" ] ; then \
	                echo "Expected size over 3MB (for common builds):" >&2 ; \
	                ls -la "ChangeLog.html" '$(top_builddir)/ChangeLog'* >&2 ; \
	                FAILED="ChangeLog.html" ; \
	            fi ; \
	        fi ; \
	    fi; \
	    if [ -n "$${SECOND_ENTRY}" ] ; then \
	        O="`$(EGREP) -c \"^$${SECOND_ENTRY}\" '$(top_builddir)/ChangeLog'`" ; \
	        N="`$(EGREP) -c \"title.*$${SECOND_ENTRY}\" 'ChangeLog.html'`" ; \
	        MIN="`expr $${O} + 1`"     && [ "$${MIN}" -gt 0 ] 2>/dev/null || MIN=1 ; \
	        MAX="`expr $${O} + $${O}`" && [ "$${MAX}" -gt 2 ] 2>/dev/null || MAX=2 ; \
	        if [ "$${N}" -lt "$${MIN}" ] || [ "$${N}" -gt "$${MAX}" ]; then \
	            echo "FAILED ChangeLog.html check: does not contain expected second entry the right amount of times (huge doc, must have got aborted mid-way): $${SECOND_ENTRY} (seen $${N} times, expected between $${MIN} and $${MAX})" >&2 ; \
	            if [ -z "$$FAILED" ] ; then \
	                echo "Expected size over 3MB (for common builds):" >&2 ; \
	                ls -la "ChangeLog.html" '$(top_builddir)/ChangeLog'* >&2 ; \
	                FAILED="ChangeLog.html" ; \
	            fi ; \
	        fi ; \
	    fi; \
	    if [ -n "$${LAST_ENTRY}" ] ; then \
	        O="`$(EGREP) -c \"^$${LAST_ENTRY}\" '$(top_builddir)/ChangeLog'`" ; \
	        N="`$(EGREP) -c \"title.*$${LAST_ENTRY}\" 'ChangeLog.html'`" ; \
	        MIN="`expr $${O} + 1`"     && [ "$${MIN}" -gt 0 ] 2>/dev/null || MIN=1 ; \
	        MAX="`expr $${O} + $${O}`" && [ "$${MAX}" -gt 2 ] 2>/dev/null || MAX=2 ; \
	        if [ "$${N}" -lt "$${MIN}" ] || [ "$${N}" -gt "$${MAX}" ]; then \
	            echo "FAILED ChangeLog.html check: does not contain expected last entry the right amount of times (huge doc, must have got aborted mid-way): $${LAST_ENTRY} (seen $${N} times, expected between $${MIN} and $${MAX})" >&2 ; \
	            if [ -z "$$FAILED" ] ; then \
	                echo "Expected size over 3MB (for common builds):" >&2 ; \
	                ls -la "ChangeLog.html" '$(top_builddir)/ChangeLog'* >&2 ; \
	                FAILED="ChangeLog.html" ; \
	            fi ; \
	        fi ; \
	    fi; \
	    if [ x"$$FAILED" = x ] ; then \
	        echo "PASSED $@" >&2 ; \
	        exit 0 ; \
	    fi ; \
	    if [ x"$$FAILED" != x ] && [ -s '$(top_builddir)/ChangeLog.adoc' ] \
	    && [ "`head -1 $(top_builddir)/ChangeLog.adoc`" = "=== Failed to generate the ChangeLog" ] \
	    ; then \
	        FAILED="" ; \
	    fi; \
	 fi; \
	 if [ x"$$FAILED" = x ] ; then \
	    echo "  SKIP	$@ : because input files were not available" >&2 ; \
	    exit 0 ; \
	 fi ; \
	 exit 1

check-html-single: .check-html-single
.check-html-single: $(ASCIIDOC_HTML_SINGLE) Makefile 
	+@FAILED=""; LANG=C; LC_ALL=C; export LANG; export LC_ALL; \
	 for F in $(ASCIIDOC_HTML_SINGLE) ; do \
	    test -s "$$F" && { file "$$F" | $(EGREP) -i '(XML|HTML.*document)' > /dev/null ; } || FAILED="$$FAILED $$F" ; \
	    case "$$F" in \
	        *ChangeLog*) if [ -s '$(top_builddir)/ChangeLog' ] ; then \
	                $(MAKE) $(AM_MAKEFLAGS) "`basename \"$$F\"`"-contentchecked || FAILED="$$FAILED $$F" ; \
	            fi ;; \
	    esac ; \
	 done; if test -n "$$FAILED" ; then \
	    echo "FAILED HTML-single sanity check for:$$FAILED" >&2 ; file $$FAILED >&2 ; exit 1; \
	 fi; echo "PASSED HTML-single sanity check"; exit 0
	@touch $@

check-html-chunked: .check-html-chunked
.check-html-chunked: $(ASCIIDOC_HTML_CHUNKED) Makefile
	@FAILED=""; LANG=C; LC_ALL=C; export LANG; export LC_ALL; \
	for D in $(ASCIIDOC_HTML_CHUNKED); do \
	    for F in "$$D"/*.html ; do \
	        test -s "$$F" && { file "$$F" | $(EGREP) -i '(XML|HTML.*document)' > /dev/null ; } || FAILED="$$FAILED $$F" ; \
	    done; \
	    for F in "$$D"/*.css ; do \
	        test -s "$$F" && { $(EGREP) -i 'CSS stylesheet' "$$F" > /dev/null ; } || FAILED="$$FAILED $$F" ; \
	    done; \
	done; if test -n "$$FAILED" ; then \
	    echo "FAILED HTML-chunked sanity check for:$$FAILED" >&2 ; file $$FAILED >&2 ; exit 1; \
	fi; echo "PASSED HTML-chunked sanity check"; exit 0
	@touch $@

# Note: usually the results from man-page check will be reported twice:
# once as a SUBDIRS child makefile, and once via DOC_CHECK_LIST expansion
# Note: default `make all` in the man directory caters to drivers etc.
# chosen during configure script execution. The "all-man" and "all-html"
# rules build everything documented.
# NOTE; we rig it up with a DOCS_NO_MAN option to simplify parallel work
# from top-level Makefile, while allowing legacy "cd docs && make" to
# still do the right thing by default :)
check-man check-man-man all-man man-man all-html html-man: 
	+@if [ x"$(DOCS_NO_MAN)" = xtrue ] ; then echo "  DOC-NOT-MAN	SKIP: $@ called in docs/Makefile" ; exit 0 ; fi ; \
	  cd $(abs_top_builddir)/docs/man/ && $(MAKE) $(AM_MAKEFLAGS) -f Makefile $@

man: 
	+@if [ x"$(DOCS_NO_MAN)" = xtrue ] ; then echo "  DOC-NOT-MAN	SKIP: $@ called in docs/Makefile" ; exit 0 ; fi ; \
	  cd $(abs_top_builddir)/docs/man/ && $(MAKE) $(AM_MAKEFLAGS) -f Makefile all

# Prepare text files (currently a manually tracked list)
# with known presence of GitHub links to convert them from
# short <hash><number> notation into asciidoc link markup
# before rendering into HTML/PDF.
# Work around some documents that have originally included
# the asciidoc markup (use double-hash to avoid conversion).
# The $< is okay here, it is used in a suffix rule below
.adoc.adoc-parsed: 
	+@if [ ! -s '$<' ] ; then \
		echo "  DOC-ASCIIDOC-GITHUB-LINKS	STRANGE: input $< does not exist or is empty" >&2 ; \
		$(MAKE) $(AM_MAKEFLAGS) '$<' ; \
	 fi
	@if [ -s '$@' ] && [ -s '$<' ] && [ x"`find '$@' -newer '$<'`" != x ] ; then \
		echo "  DOC-ASCIIDOC-GITHUB-LINKS	SKIP: $@ already exists and is newer than $<" ; \
		if [ x"$(MAINTAINER_ASCIIDOCS_CHANGELOG_DEBUG)" != xno ] ; then \
			ls -lad $@ $< || true ; \
			stat $@ $< || true ; \
		fi ; \
		exit 0 ; \
	 fi ; \
	 echo "  DOC-ASCIIDOC-GITHUB-LINKS	Parsing GitHub link patterns $< => $@"; \
	 if [ x"$(MAINTAINER_ASCIIDOCS_CHANGELOG_DEBUG)" != xno ] ; then \
		ls -lad $@ $< || true ; \
		stat $@ $< || true ; \
	 fi ; \
	 cat '$<' | { $(SED) \
		-e 's%\(link:https*://github.com/networkupstools/[a-zA-Z0-9./-]*/[1-9][0-9]*/*\[[^]]*\)\#\([1-9][0-9]*\)%\1\#\#\2%g' \
		-e 's%\(issue\) *\#\([1-9][0-9]*\)\([^0-9]\|$$\)%link:https://github.com/networkupstools/nut/issues/\2[\1 \#\#\2]\3%g' \
		-e 's%\(PR\|pull request\) *\#\([1-9][0-9]*\)\([^0-9]\|$$\)%link:https://github.com/networkupstools/nut/pull/\2[\1 \#\#\2]\3%g' \
		-e 's%\([[ ,]\)\#\([1-9][0-9]*\)\([^0-9]\|$$\)%\1link:https://github.com/networkupstools/nut/issues/\2[\#\#\2]\3%g' \
		-e 's%\(issue\) networkupstools/\([^ ][^ ]*\)\#\([1-9][0-9]*\)\([^0-9]\|$$\)%link:https://github.com/networkupstools/\2/issues/\3[\1 \2\#\#\3]\4%g' \
		-e 's%\(PR\|pull request\) *networkupstools/\([^ ][^ ]*\)\#\([1-9][0-9]*\)\([^0-9]\|$$\)%link:https://github.com/networkupstools/\2/pull/\3[\1 \2\#\#\3]\4%g' \
		-e 's%\([[ ,]\)networkupstools/\([^ ][^ ]*\)\#\([1-9][0-9]*\)\([^0-9]\|$$\)%\1link:https://github.com/networkupstools/\2/issues/\3[\2\#\#\3]\4%g' \
		-e 's%\#\(\#[1-9][0-9]*\)%\1%g' \
		-e 's,\(https*://[^ \+]*\)[\]*[+],\1%2B,g' \
	 ; } > "$@.tmp.$$$$" \
	 && test -s "$@.tmp.$$$$" \
	 && mv -f "$@.tmp.$$$$" '$@' \
	 || { RES="$$?" ; rm -f "$@.tmp.$$$$" ; exit $$RES ; }
	@if [ x'$@' = x'$(top_builddir)/ChangeLog.adoc-parsed' ] ; then \
		touch -r '$@' '$(top_builddir)/docs/.ChangeLog.adoc-parsed.latest' || touch '$(top_builddir)/docs/.ChangeLog.adoc-parsed.latest' ; \
	 fi

$(top_builddir)/ChangeLog.adoc-parsed: $(top_builddir)/ChangeLog.adoc

.ChangeLog.adoc-parsed.latest: $(top_builddir)/ChangeLog.adoc-parsed
	@[ x'$?' != x ] && touch -r '$?' '$@' || touch '$@'

dummy:
$(top_builddir)/ChangeLog: dummy 
	@+echo "  DOC-CHANGELOG-GENERATE-WRAPPER	$@ : call parent Makefile to decide if (re-)generation is needed" \
	 && cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) $(@F)

# Probably due to the web of makefiles and an overwhelmed job server in some
# implementations, during parallel builds we can end up scheduling several
# threads creating this asciidoc (and adoc-parsed later). This step only
# costs a few seconds, however the updated timestamp may cause new HTML/PDF
# builds which cost a lot more. Below we try a few ways to detect a build
# already running and bail out early if the file exists. Otherwise we bite
# the bullet and spend a few seconds, and then re-check if another thread
# did exist and finished first.
# Weird ways about "INPUT": in some cases the '$?' came in empty, in others
# it has several files listed, e.g. full paths to "ChangeLog .prep-src-docs".
# The "$<" may only be used for suffix rules (and nothing forbids it from
# reporting the two changed files noted above, either). The approach below
# allows us to get a most-reasonably populated path to the original (from
# tarball or freshly generated) plain-text ChangeLog file across a lot
# of "make" implementations.
# FIXME: Fallback URL should refer not to "master" but current commit/tag?
$(top_builddir)/ChangeLog.adoc: $(top_builddir)/ChangeLog
	@INPUT="`for F in $? ; do case \"$$F\" in */ChangeLog) [ -s \"$$F\" ] && echo \"$$F\"; exit 0 ;; esac; done`"; \
	 test -n "$${INPUT}" || INPUT="$(top_builddir)/ChangeLog" ; \
	 test -n "$${INPUT}" && test -n '$@' && test -s "$${INPUT}" \
	 || { \
	     MSG="FAILED to resolve input or output filename with this make implementation, or input was not generated!"; \
	     echo "  DOC-CHANGELOG-ASCIIDOC	SKIP: $${MSG}" >&2; \
	     test -n '$@' && { printf '=== Failed to generate the ChangeLog\n\n%s\n\nNOTE: See https://github.com/networkupstools/nut/commits/master for change history.\n\n' "$${MSG}" > '$@' ; } ; \
	     exit ; \
	 } ; \
	 if [ -s '$@' ] && [ x"`find '$@' -newer \"$${INPUT}\" 2>/dev/null`" != x ] ; then echo "  DOC-CHANGELOG-ASCIIDOC	$${INPUT} => $@ : SKIP (keep existing)"; rm -f "$@.tmp.$$$$"; exit 0 ; fi ; \
	 W=0 ; \
	 if [ "$(MAINTAINER_ASCIIDOCS_CHANGELOG_DELAY)" -gt 0 ] 2>/dev/null ; then \
	        echo "  DOC-CHANGELOG-ASCIIDOC	$${INPUT} => $@ : Block for up to $(MAINTAINER_ASCIIDOCS_CHANGELOG_DELAY) sec, maybe another thread will make the file first" ; \
	        while [ "$${W}" -lt "$(MAINTAINER_ASCIIDOCS_CHANGELOG_DELAY)" ] && [ x"`find '$@.tmp.'* '$@' -newer \"$${INPUT}\" 2>/dev/null`" = x ] ; do sleep 1 ; W="`expr $$W + 1`"; done ; touch "$@.tmp.$$$$"; \
	        if [ x"`find '$@' -newer \"$${INPUT}\" 2>/dev/null`" != x ] ; then echo "  DOC-CHANGELOG-ASCIIDOC	$${INPUT} => $@ : SKIP (keep existing)"; rm -f "$@.tmp.$$$$"; exit 0 ; fi ; \
	 fi ; \
	 if [ -s '$@' ] ; then echo "  DOC-CHANGELOG-ASCIIDOC	$${INPUT} => $@ : RE-GENERATE" ; else echo "  DOC-CHANGELOG-ASCIIDOC	$${INPUT} => $@ : GENERATE" ; fi ; \
	 { if [ x"$(MAINTAINER_ASCIIDOCS_CHANGELOG_DEBUG)" != xno ] ; then ls -la "$${INPUT}" '$@' || true ; stat "$${INPUT}" '$@' || true ; fi ; } ; \
	 if [ "$(MAINTAINER_ASCIIDOCS_CHANGELOG_DELAY)" -gt 0 ] 2>/dev/null ; then \
	        echo "  DOC-CHANGELOG-ASCIIDOC	$${INPUT} => $@ : PROCEED, waited for $${W} sec" ; \
	 fi ; \
	 printf "ifdef::txt[]\n== Very detailed Change Log\n"$(A2X_ASCII_IDS)"endif::txt[]\n\n" > "$@.tmp.$$$$" \
	 && TABCHAR="`printf '\t'`" \
	 && $(SED) \
	        -e 's,^\([0-9a-zA-Z]\),=== \1,' \
	        -e 's,^=== \(NOTE: \),\1,' \
	        -e 's,/[+],/\\\\\+,g' \
	        -e 's,[+][+],\\\+\\\+,g' \
	        -e 's,^\([ '"$${TABCHAR}"'][^+]*\)\([^+/\]\)[+],\1\2\\\+,g' \
	        -e 's,^\([ '"$${TABCHAR}"'].*\)\([~|^]\),\1\\\2,g' \
	        -e 's,\[\[\([^]]*\)\]\],[\1],g' \
	        -e 's,^\([ '"$${TABCHAR}"'][ '"$${TABCHAR}"']*\)\([0-9]\),\1{empty}\2,g' \
	   < "$${INPUT}" >> "$@.tmp.$$$$" \
	 && if [ x"`find '$@' -newer \"$${INPUT}\" 2>/dev/null`" != x ] ; then \
	        echo "  DOC-CHANGELOG-ASCIIDOC	$${INPUT} => $@ : SKIP (keep recently born competitor)"; rm -f "$@.tmp.$$$$"; \
	    else \
	        test -s "$@.tmp.$$$$" && mv -f "$@.tmp.$$$$" '$@' \
	        || { RES="$$?" ; rm -f "$@.tmp.$$$$" ; exit $$RES ; } ; \
	    fi

user-manual.html user-manual.chunked user-manual.pdf: $(FULL_USER_MANUAL_DEPS)
developer-guide.html developer-guide.chunked developer-guide.pdf: $(FULL_DEVELOPER_GUIDE_DEPS)
packager-guide.html packager-guide.chunked packager-guide.pdf: packager-guide.txt asciidoc.conf
qa-guide.html qa-guide.chunked qa-guide.pdf: $(FULL_QA_GUIDE_DEPS)
release-notes.html release-notes.chunked release-notes.pdf: release-notes.txt $(top_builddir)/NEWS.adoc-parsed $(top_builddir)/UPGRADING.adoc-parsed asciidoc.conf
solaris-usb.html solaris-usb.chunked solaris-usb.pdf: solaris-usb.txt asciidoc.conf

# We intentionally evaluate that the original generated ChangeLog file is
# up to date (from dist or against git) every time we look at it. However
# we do want to skip re-generation of heavy file formats afterwards if it
# is still valid (for make, a need for re-evaluation without timestamps
# to look at is cause to run a recipe always). We define recipes outside
# the suffix-based handling and require *them* for default target builds.
ChangeLog.html ChangeLog.chunked ChangeLog.pdf: ChangeLog.txt .ChangeLog.adoc-parsed.latest asciidoc.conf
$(top_builddir)/docs/ChangeLog.html $(top_builddir)/docs/ChangeLog.chunked $(top_builddir)/docs/ChangeLog.pdf: ChangeLog.txt $(top_builddir)/ChangeLog.adoc-parsed asciidoc.conf 
	@+if [ -s '$(@F)' ] && [ x"`find '$(@F)' -newer '$(top_builddir)/ChangeLog.adoc-parsed'`" != x ] ; then \
		echo "  DOC-CHANGELOG-RENDER-WRAPPER	SKIP: `pwd`/$(@F) already exists and is newer than ChangeLog.adoc-parsed" ; \
		if [ x"$(MAINTAINER_ASCIIDOCS_CHANGELOG_DEBUG)" != xno ] ; then \
			ls -lad `pwd` $@ ChangeLog.txt $(top_builddir)/ChangeLog.adoc-parsed asciidoc.conf $(top_builddir)/ChangeLog $(top_builddir)/ChangeLog.adoc .ChangeLog.adoc-parsed.latest || true ; \
			stat `pwd` $@ ChangeLog.txt $(top_builddir)/ChangeLog.adoc-parsed asciidoc.conf $(top_builddir)/ChangeLog $(top_builddir)/ChangeLog.adoc .ChangeLog.adoc-parsed.latest || true ; \
		fi ; \
		exit 0 ; \
	  else \
		echo "  DOC-CHANGELOG-RENDER-WRAPPER	`pwd`/$(@F) does not already exist or is older than ChangeLog.adoc-parsed" ; \
		if [ x"$(MAINTAINER_ASCIIDOCS_CHANGELOG_DEBUG)" != xno ] ; then \
			ls -lad `pwd` $@ ChangeLog.txt $(top_builddir)/ChangeLog.adoc-parsed asciidoc.conf $(top_builddir)/ChangeLog $(top_builddir)/ChangeLog.adoc .ChangeLog.adoc-parsed.latest || true ; \
			stat `pwd` $@ ChangeLog.txt $(top_builddir)/ChangeLog.adoc-parsed asciidoc.conf $(top_builddir)/ChangeLog $(top_builddir)/ChangeLog.adoc .ChangeLog.adoc-parsed.latest || true ; \
			time $(MAKE) $(AM_MAKEFLAGS) $(@F) || exit ; \
		else \
			$(MAKE) $(AM_MAKEFLAGS) $(@F) || exit ; \
		fi ; \
		echo "  DOC-CHANGELOG-RENDER-WRAPPER	`pwd`/$(@F): SUCCESS" ; \
	  fi

# Assume revisions are sorted in the file, from newest to oldest
# FIXME: If we ever get more than one file to chomp this way, make it a snippet
docinfo-since-v2.8.3.xml: docinfo.xml
	@OLDEST_REL="2.8.3"; \
	 IN_REVISION=false ; OLDEST_SEEN=false ; \
	 while read LINE ; do \
		case "$${LINE}" in \
			*'<revision>'*) if $$OLDEST_SEEN ; then true ; else IN_REVISION=true ; fi ;; \
			*'<revnumber>'"$${OLDEST_REL}"'</revnumber>'*) if $$IN_REVISION ; then echo '<revision>'; OLDEST_SEEN=true ; fi ; echo "$$LINE" ;; \
			*'<revnumber>'*'</revnumber>'*) if $$OLDEST_SEEN ; then true ; else if $$IN_REVISION ; then echo '<revision>'; fi ; echo "$$LINE" ; fi ;; \
			*'</revision>'*) if $$IN_REVISION ; then echo "$$LINE" ; fi ; IN_REVISION=false ;; \
			*'</revhistory>'*) echo "$$LINE" ;; \
			*) if test x"$$OLDEST_SEEN" = xfalse || $$IN_REVISION ; then echo "$$LINE" ; fi ;; \
		esac ; \
	 done < '$?' > '$@'.tmp \
	 && mv -f '$@'.tmp '$@'

# Some versions of asciidoc ignore the argument of :docinfo: tag
# and require (TXT_BASE_NAME)-docinfo.xml to exist:
qa-guide-docinfo.xml: docinfo-since-v2.8.3.xml
	@rm -f '$@' || true
	@$(LN_S) '$?' '$@'

### Call the prep step consistently to create symlinks (out-of-tree)
### or just touch-files for peace of mind (in-tree builds). Then we
### use these path names (truncated "-prepped") now surely located
### in the builddir as the sources for rendered docs.
*.txt-prepped *.adoc-prepped: $(abs_top_builddir)/docs/.prep-src-docs

*.html: common.xsl xhtml.xsl
.txt-prepped.html: 
	+@$(GENERATE_HTML_SINGLE)
.adoc-prepped.html: 
	+@$(GENERATE_HTML_SINGLE)

*.chunked: common.xsl chunked.xsl
.txt-prepped.chunked:
	@$(GENERATE_HTML_CHUNKED)
.adoc-prepped.chunked:
	@$(GENERATE_HTML_CHUNKED)

*.pdf: docinfo.xml
# Technically only needed for PDF generation, but some other recipes still complained
qa-guide.adoc-prepped \
qa-guide.pdf: docinfo-since-v2.8.3.xml qa-guide-docinfo.xml
.txt-prepped.pdf:
	@$(GENERATE_PDF)
.adoc-prepped.pdf:
	@$(GENERATE_PDF)
#SPELLCHECK_RECIPE_DEBUG_STREAM = &2

# Note: if we do an interactive spell-check, it updates "nut.dict"
# timestamp even if contents remain. If the caller left a copy of
# the file as "$(abs_builddir)/$(NUT_SPELL_DICT).bak-pre-sorting",
# and/or "$(abs_builddir)/$(NUT_SPELL_DICT).bak-pre-interactive",
# and the dictionary was NOT in fact modified, restore the timestamp.
$(SPELLCHECK_BUILDDIR)/$(SPELLCHECK_SRC_ONE)-spellchecked: $(SPELLCHECK_SRCDIR)/$(SPELLCHECK_SRC_ONE) $(abs_top_srcdir)/docs/Makefile.am $(abs_srcdir)/$(NUT_SPELL_DICT)
	@LANG=C; LC_ALL=C; export LANG; export LC_ALL; \
	 if test x"$(SPELLCHECK_SRC_ONE)" = x ; then echo "  SKIP	Bogus spellcheck call for empty target filename (with make target $@ from `pwd`)" >&2 ; exit 0; fi; \
	 case '$@' in *-spellchecked) ;; *) echo "  SKIP	Bogus spellcheck call for non '*-spellchecked' target filename (with make target $@ from `pwd`)" >&2 ; exit 0;; esac; \
	 rm -f '$@' || true ; \
	 $(MKDIR_P) "$(@D)" || exit ; \
	 REPORT_SRCDIR="$(SPELLCHECK_SRCDIR)"; \
	 REPORT_SRC_ONE="$(SPELLCHECK_SRC_ONE)"; \
	 REPORT_PREFIX="" ; \
	 case "$(SPELLCHECK_SRC_ONE)" in \
		/*) ;; \
		*/*)	if [ x"$${REPORT_SRCDIR}" = x ] ; then \
				echo EMPTY >$(SPELLCHECK_RECIPE_DEBUG_STREAM) ; \
				REPORT_SRCDIR="`dirname '$(SPELLCHECK_SRC_ONE)'`"; \
			else \
				echo "APPEND: SPELLCHECK_SRCDIR='$(SPELLCHECK_SRCDIR)' SPELLCHECK_SRC_ONE='$(SPELLCHECK_SRC_ONE)' dirname='`dirname '$(SPELLCHECK_SRC_ONE)'`'" >$(SPELLCHECK_RECIPE_DEBUG_STREAM) ; \
				REPORT_SRCDIR="$${REPORT_SRCDIR}/`dirname '$(SPELLCHECK_SRC_ONE)'`"; \
			fi ; \
			REPORT_SRC_ONE="`basename '$(SPELLCHECK_SRC_ONE)'`"; \
			;; \
		*) ;; \
	 esac; \
	 if [ x"$${REPORT_SRCDIR}" != x ] ; then \
		tmpREPORT_PREFIX="NUT source root :: $${REPORT_SRCDIR} :: " ; \
		REPORT_SRCDIR="`cd \"$${REPORT_SRCDIR}\" && { pwd >$(SPELLCHECK_RECIPE_DEBUG_STREAM) ; pwd | sed 's|^'\"$(abs_top_srcdir)\"'/*||' ; }`" \
		|| { REPORT_SRCDIR="$(SPELLCHECK_SRCDIR)" ; REPORT_SRC_ONE="$(SPELLCHECK_SRC_ONE)" ; REPORT_PREFIX="" ; } ; \
	 fi ; \
	 echo "=== Got REPORT_SRCDIR='$${REPORT_SRCDIR}'" >$(SPELLCHECK_RECIPE_DEBUG_STREAM) ; \
	 case "$${REPORT_SRCDIR}" in \
		"") ;; \
		*/) ;; \
		*)  REPORT_SRCDIR="$${REPORT_SRCDIR}/" ;; \
	 esac ; \
	 if [ x"$(SPELLCHECK_INTERACTIVE)" = xtrue ] ; then \
		echo "  ASPELL	Spell checking (interactively) on $${REPORT_PREFIX}$${REPORT_SRCDIR}$${REPORT_SRC_ONE}"; \
		LANG=$(ASPELL_ENV_LANG) LC_ALL=$(ASPELL_ENV_LANG) $(ASPELL) check $(ASPELL_NUT_COMMON_ARGS) '$(SPELLCHECK_SRCDIR)/$(SPELLCHECK_SRC_ONE)' || exit ; \
		if [ -s $(abs_builddir)/$(NUT_SPELL_DICT).bak-pre-interactive ] && [ -s $(abs_srcdir)/$(NUT_SPELL_DICT) ] \
		&& ( [ x"$(SPELLCHECK_QUICK)" = xtrue ] || diff $(abs_srcdir)/$(NUT_SPELL_DICT) $(abs_builddir)/$(NUT_SPELL_DICT).bak-pre-interactive >/dev/null ) \
		; then \
			touch -r $(abs_builddir)/$(NUT_SPELL_DICT).bak-pre-interactive $(abs_srcdir)/$(NUT_SPELL_DICT) ; \
		else \
		if [ -s $(abs_builddir)/$(NUT_SPELL_DICT).bak-pre-sorting ] && [ -s $(abs_srcdir)/$(NUT_SPELL_DICT) ] \
		&& ( [ x"$(SPELLCHECK_QUICK)" = xtrue ] || diff $(abs_srcdir)/$(NUT_SPELL_DICT) $(abs_builddir)/$(NUT_SPELL_DICT).bak-pre-sorting >/dev/null ) \
		; then \
			touch -r $(abs_builddir)/$(NUT_SPELL_DICT).bak-pre-sorting $(abs_srcdir)/$(NUT_SPELL_DICT) ; \
		fi ; fi ; \
	 else \
		echo "  ASPELL	Spell checking on $${REPORT_PREFIX}$${REPORT_SRCDIR}$${REPORT_SRC_ONE}"; \
	 fi ; \
	 OUT="`(sed 's,^\(.*\)$$, \1,' | $(ASPELL) -a $(ASPELL_NUT_TEXMODE_ARGS) $(ASPELL_NUT_COMMON_ARGS) 2>&1) < '$(SPELLCHECK_SRCDIR)/$(SPELLCHECK_SRC_ONE)'`" \
		&& { if test -n "$$OUT" ; then OUT="`echo \"$$OUT\" | $(EGREP) -b -v '$(ASPELL_OUT_NOTERRORS)' `" ; fi; \
		     test -z "$$OUT" ; } \
		|| { RES=$$? ; \
		     echo "FAILED : Aspell reported errors here:" >&2 \
		     && echo "----- vvv" >&2 \
		     && echo "$$OUT" >&2 \
		     && echo "----- ^^^" >&2 ; \
		     exit $$RES; } ; \
	 if $(EGREP) '^[ *.-]*(NOTE|TIP|IMPORTANT|CAUTION|WARNING):[ \t]*[a-z]' '$(SPELLCHECK_SRCDIR)/$(SPELLCHECK_SRC_ONE)' ; then \
		echo "FAILED: Admonition block should contain a sentence, not starting with a lower-case letter" >&2 ; \
		exit 2 ; \
	 fi ; \
	 touch '$@'

.txt.txt-spellchecked-auto: 
	+@$(SPELLCHECK_AUTO_ONE)

.adoc.adoc-spellchecked-auto: 
	+@$(SPELLCHECK_AUTO_ONE)

.in.in-spellchecked-auto: 
	+@$(SPELLCHECK_AUTO_ONE)

.sample.sample-spellchecked-auto: 
	+@$(SPELLCHECK_AUTO_ONE)

.conf.conf-spellchecked-auto: 
	+@$(SPELLCHECK_AUTO_ONE)

spellcheck: 
	@if test "$(SPELLCHECK_ENV_DEBUG)" = detailed ; then \
		echo "ASPELL DEBUG : information about the setup follows:"; \
		LANG=$(ASPELL_ENV_LANG); LC_ALL=$(ASPELL_ENV_LANG); export LANG; export LC_ALL; \
		$(ASPELL) --help || true; \
		(command -v dpkg) && ( dpkg -l | $(GREP) -i aspell ) || true ; \
		echo "ASPELL automatic execution line is : ( sed 's,^\(.*\)$$, \1,' < docfile.txt | $(ASPELL) -a $(ASPELL_NUT_TEXMODE_ARGS) $(ASPELL_NUT_COMMON_ARGS) | $(EGREP) -b -v '$(ASPELL_OUT_NOTERRORS)' )" ; \
		echo "SPELLCHECK_SRCDIR: $(SPELLCHECK_SRCDIR)" ; \
		echo "SPELLCHECK_SRC: $(SPELLCHECK_SRC)" ; \
		echo "ASPELL proceeding to spellchecking job..."; \
	 else true; fi
	+@FAILED="" ; LANG=C; LC_ALL=C; export LANG; export LC_ALL; \
	 if [ x"$(NUT_MAKE_SKIP_FANOUT)" = xtrue ] ; then \
		for docsrc in $(SPELLCHECK_SRC); do \
			if test "$(SPELLCHECK_ENV_DEBUG)" != no ; then \
				echo "ASPELL MAKEFILE DEBUG: Will see from `pwd` if '$(SPELLCHECK_SRCDIR)/$${docsrc}-spellchecked' is up to date" >&2; \
			else true ; fi ; \
			$(MAKE) $(AM_MAKEFLAGS) -s -f "$(abs_top_builddir)/docs/Makefile" SPELLCHECK_SRC="" SPELLCHECK_SRC_ONE="$${docsrc}" SPELLCHECK_BUILDDIR="$(SPELLCHECK_BUILDDIR)" SPELLCHECK_SRCDIR="$(SPELLCHECK_SRCDIR)" VPATH="$(SPELLCHECK_SRCDIR):$(SPELLCHECK_BUILDDIR):$(VPATH)" "$(SPELLCHECK_BUILDDIR)/$${docsrc}-spellchecked" \
			|| FAILED="$$FAILED $(SPELLCHECK_SRCDIR)/$$docsrc"; \
		done ; \
	 else \
		SPELLCHECK_NOEXT_DOCS_FIRST="`for docsrc in $(SPELLCHECK_SRC); do case \"$${docsrc}\" in ../NEWS.adoc|../UPGRADING.adoc) printf '%s ' \"$${docsrc}\" ;; *) ;; esac ; done`" ; \
		SPELLCHECK_AUTO_TGT="`for docsrc in $(SPELLCHECK_SRC); do case \"$${docsrc}\" in */*) ;; *.adoc|*.txt|*.in|*.conf|*.sample) printf '%s ' \"$${docsrc}-spellchecked-auto\" ;; esac ; done`" ; \
		SPELLCHECK_NOEXT_DOCS="`for docsrc in $(SPELLCHECK_SRC); do case \"$${docsrc}\" in ../NEWS.adoc|../UPGRADING.adoc) ;; */*) printf '%s ' \"$${docsrc}\" ;; *.adoc|*.txt|*.in|*.conf|*.sample) ;; *) printf '%s ' \"$${docsrc}\" ;; esac ; done`" ; \
		if test "$(SPELLCHECK_ENV_DEBUG)" != no ; then \
			echo "ASPELL MAKEFILE DEBUG: from `pwd`: SPELLCHECK_NOEXT_DOCS_FIRST='$${SPELLCHECK_NOEXT_DOCS_FIRST}'    SPELLCHECK_AUTO_TGT='$${SPELLCHECK_AUTO_TGT}'    SPELLCHECK_NOEXT_DOCS='$${SPELLCHECK_NOEXT_DOCS}'" ; \
		else true ; fi ; \
		if [ x"$${SPELLCHECK_NOEXT_DOCS_FIRST}" != x ] ; then \
			for docsrc in $${SPELLCHECK_NOEXT_DOCS_FIRST} ; do \
				$(MAKE) $(AM_MAKEFLAGS) -k -s -f "$(abs_top_builddir)/docs/Makefile" SPELLCHECK_SRC="" SPELLCHECK_SRC_ONE="$${docsrc}" SPELLCHECK_BUILDDIR="$(SPELLCHECK_BUILDDIR)" SPELLCHECK_SRCDIR="$(SPELLCHECK_SRCDIR)" VPATH="$(SPELLCHECK_SRCDIR):$(SPELLCHECK_BUILDDIR):$(VPATH)" "$(SPELLCHECK_BUILDDIR)/$${docsrc}-spellchecked" \
				|| FAILED="$$FAILED $(SPELLCHECK_SRCDIR)/$$docsrc"; \
			done ; \
		fi ; \
		if [ x"$${SPELLCHECK_AUTO_TGT}" != x ] ; then \
			$(MAKE) $(AM_MAKEFLAGS) -k -s -f "$(abs_top_builddir)/docs/Makefile" SPELLCHECK_SRC="" SPELLCHECK_BUILDDIR="$(SPELLCHECK_BUILDDIR)" SPELLCHECK_SRCDIR="$(SPELLCHECK_SRCDIR)" VPATH="$(SPELLCHECK_SRCDIR):$(SPELLCHECK_BUILDDIR):$(VPATH)" $${SPELLCHECK_AUTO_TGT} ; \
			FAILED_AUTO_TGT="`for docsrc in $(SPELLCHECK_SRC); do if [ -f \"$(SPELLCHECK_BUILDDIR)/$${docsrc}-spellchecked-auto.failed\" ] ; then printf '%s ' \"$(SPELLCHECK_SRCDIR)/$${docsrc}\" ; rm -f \"$(SPELLCHECK_BUILDDIR)/$${docsrc}-spellchecked-auto.failed\" ; fi ; done`" ; \
			if test -n "$$FAILED" ; then FAILED="$$FAILED $$FAILED_AUTO_TGT" ; else FAILED="$$FAILED_AUTO_TGT" ; fi ; \
		fi ; \
		if [ x"$${SPELLCHECK_NOEXT_DOCS}" != x ] ; then \
			for docsrc in $${SPELLCHECK_NOEXT_DOCS} ; do \
				$(MAKE) $(AM_MAKEFLAGS) -k -s -f "$(abs_top_builddir)/docs/Makefile" SPELLCHECK_SRC="" SPELLCHECK_SRC_ONE="$${docsrc}" SPELLCHECK_BUILDDIR="$(SPELLCHECK_BUILDDIR)" SPELLCHECK_SRCDIR="$(SPELLCHECK_SRCDIR)" VPATH="$(SPELLCHECK_SRCDIR):$(SPELLCHECK_BUILDDIR):$(VPATH)" "$(SPELLCHECK_BUILDDIR)/$${docsrc}-spellchecked" \
				|| FAILED="$$FAILED $(SPELLCHECK_SRCDIR)/$$docsrc"; \
			done ; \
		fi ; \
	 fi ; \
	 if test -n "$$FAILED" ; then \
		echo "=====================================================================" ; \
		echo "FAILED automatic spellcheck for the following sources (relative to `pwd`) using custom dictionary file '$(NUT_SPELL_DICT)': $$FAILED" ; \
		echo "=====================================================================" ; \
		echo "Please 'cd $(abs_top_builddir) && make spellcheck-interactive'"; \
		echo "to either fix document sources or update the dictionary of accepted"; \
		echo "words and spellings listed in the '$(NUT_SPELL_DICT)' file there."; \
		echo "Either way, please follow up by posting a pull request or a patch"; \
		echo "to integrate your fixes into the common NUT codebase."; \
		echo "=====================================================================" ; \
		test x"$(SPELLCHECK_ERROR_FATAL)" = xno || exit 1; \
		echo "NOTE: SPELLCHECK_ERROR_FATAL == no so this make does not break the build!"; \
		echo "=====================================================================" ; \
	 fi >&2 ; exit 0

# Interactively spell check all documentation source files below (so a human
# can edit the documentation errors and/or add words to custom dictionary).
# Note that here we do not restrain reported issues, so this might catch more
# than the automated test above.
spellcheck-sortdict: $(abs_builddir)/$(NUT_SPELL_DICT).sorted

# Note that the source file may be not overwritable (distcheck, cdrom, ...),
# so we'd ignore that failure. But the practical use-case is a developer's
# in-tree workspace, so we want the working copy of the dictionary fixed up
# for easy `git diff`ing if possible.
# Note also that "$(<F)" is not POSIX portable, so we spell out the name var :(
$(abs_builddir)/$(NUT_SPELL_DICT).sorted: $(abs_srcdir)/$(NUT_SPELL_DICT)
	@cp -pf $(abs_srcdir)/$(NUT_SPELL_DICT) $(abs_builddir)/$(NUT_SPELL_DICT).bak-pre-sorting
	@LANG=$(ASPELL_ENV_LANG); LC_ALL=$(ASPELL_ENV_LANG); export LANG; export LC_ALL; ( \
	    WORDLIST="`tail -n +2 < '$(?)' | sort | uniq`"; \
	    WORDCOUNT="`echo \"$$WORDLIST\" | wc -l`"; \
	    head -1 < '$?' | while read P L C E ; do echo "$$P $$L $$WORDCOUNT $$E"; break; done ; \
	    echo "$$WORDLIST"; \
	 ) > '$@'
	@cp -f '$@' "$(abs_builddir)/$(NUT_SPELL_DICT)"
	@if [ "$(abs_builddir)" != "$(abs_srcdir)" ] ; then \
	    cp -f '$@' '$?' || true ; \
	    cp -f "$(abs_builddir)/$(NUT_SPELL_DICT).bak-pre-sorting" "$(abs_srcdir)/" || true ; \
	 fi
spellcheck-interactive: 
	@rm -f $(abs_builddir)/$(NUT_SPELL_DICT).bak-pre-interactive || true
	@cp -pf $(abs_srcdir)/$(NUT_SPELL_DICT) $(abs_builddir)/$(NUT_SPELL_DICT).bak-pre-interactive
	+@FAILED="" ; for docsrc in $(SPELLCHECK_SRC); do \
		if test "$(SPELLCHECK_ENV_DEBUG)" != no ; then \
			echo "ASPELL (INTERACTIVE) MAKEFILE DEBUG: Will see from `pwd` if '$(SPELLCHECK_SRCDIR)/$${docsrc}-spellchecked' is up to date" >&2; \
		else true ; fi ; \
		$(MAKE) $(AM_MAKEFLAGS) -s -f "$(abs_top_builddir)/docs/Makefile" SPELLCHECK_INTERACTIVE="true" SPELLCHECK_SRC="" SPELLCHECK_SRC_ONE="$${docsrc}" SPELLCHECK_BUILDDIR="$(SPELLCHECK_BUILDDIR)" SPELLCHECK_SRCDIR="$(SPELLCHECK_SRCDIR)" VPATH="$(SPELLCHECK_SRCDIR):$(SPELLCHECK_BUILDDIR):$(VPATH)" "$(SPELLCHECK_BUILDDIR)/$${docsrc}-spellchecked" \
		|| FAILED="$$FAILED $(SPELLCHECK_SRCDIR)/$$docsrc"; \
	 done ; \
	 if test -n "$$FAILED" ; then \
		echo "FAILED interactive spellcheck for the following sources (relative to `pwd`) using custom dictionary file '$(NUT_SPELL_DICT)': $$FAILED" >&2 ; \
		exit 1; \
	 fi ; \
	 $(MAKE) $(AM_MAKEFLAGS) spellcheck-sortdict || exit ; \
	 for docsrc in $(SPELLCHECK_SRC); do \
		if test -f "$(SPELLCHECK_BUILDDIR)/$${docsrc}-spellchecked" ; then \
			touch "$(SPELLCHECK_BUILDDIR)/$${docsrc}-spellchecked" ; \
		fi ; \
	 done ; \
	 if [ x"$(SPELLCHECK_QUICK)" = xtrue ] ; then \
		touch -r $(abs_builddir)/$(NUT_SPELL_DICT).bak-pre-interactive $(abs_srcdir)/$(NUT_SPELL_DICT) ; \
	 fi
	@DO_REPORT=false ; \
	 if [ "$(SPELLCHECK_REPORT_MAYBE_UPDATED_DICT)" != no ] ; then \
		DO_REPORT=true ; \
	 else if [ -s $(abs_builddir)/$(NUT_SPELL_DICT).bak-pre-interactive ] && [ -s $(abs_srcdir)/$(NUT_SPELL_DICT) ] ; then \
	      if diff $(abs_srcdir)/$(NUT_SPELL_DICT) $(abs_builddir)/$(NUT_SPELL_DICT).bak-pre-interactive >/dev/null ; then \
		true ; else DO_REPORT=true ; \
	      fi; fi; \
	 fi; \
	 if $${DO_REPORT}; then \
		echo "------------------------------------------------------------------------"; \
		echo "Custom dictionary file $(NUT_SPELL_DICT) may have been updated now."; \
		echo "Use e.g. 'git add -p docs/$(NUT_SPELL_DICT) && git checkout -- docs/$(NUT_SPELL_DICT) && make spellcheck-sortdict && git add -p docs/$(NUT_SPELL_DICT)'"; \
		echo "to review changes (please DO NOT REMOVE LINES that aspell chose to drop,"; \
		echo "because other systems might not know these words in their system dictionaries)"; \
		echo "------------------------------------------------------------------------" ; \
	 fi

# This rule would probably just fail; normally with no ASPELL there are no callers for it
#*/*-spellchecked *-spellchecked: Makefile.am $(abs_srcdir)/$(NUT_SPELL_DICT)
#	@echo "  SKIP-ASPELL	$@ : Documentation spell check not available since 'aspell' was not found (or missing its English dictionary)." >&2
#spellcheck:
#	@echo "Documentation spell check not available since 'aspell' was not found (or missing its English dictionary)."
#spellcheck-interactive:
#	@echo "Documentation spell check not available since 'aspell' was not found (or missing its English dictionary)."

# Note that NUT_SPELL_DICT may be an include snippet without the header line.
# To exclude files like `docs/nut.dict` or `nut-website.dict(.addons)` from
# the usage lookups, we assume that a `*.dict*` pattern fits any used names.
# Entries prefixed with '+++' mean something used in NUT sources in context
# that aspell is likely to treat as a word (standalone or surrounded by certain
# chars); otherwise in entries prefixed with '---' we print hit counts and
# contents (if any, ending with '^^^') for the character pattern across the
# whole Git-tracked codebase (case-insensitively for good measure).
# Note this can take 5-10 minutes!
# TOTHINK: Constrain to (caller-specified or default) SPELLCHECK_SRC?
$(NUT_SPELL_DICT).usage-report: $(NUT_SPELL_DICT)
	@echo "Preparing $@"; \
	 LANG=C; LC_ALL=C; export LANG; export LC_ALL; \
	 $(EGREP) -v '^personal_ws' < $? \
	 | while read W ; do ( \
		cd "$(abs_top_srcdir)" || exit ; \
		git grep -q "$$W" -- ':!*.dict*' || git grep -qE "[0-9_,./\ -]$$W[0-9_,./\ -]" -- ':!*.dict*' ) \
		&& echo "+++ $$W" \
		|| ( \
			HITS_CS="`git grep \"$$W\" -- ':!*.dict*'`" || true; \
			HITS_CI="`git grep -i \"$$W\" -- ':!*.dict*'`" || true; \
			if [ -n "$$HITS_CS" ] ; then HITC_CS="`echo \"$$HITS_CS\" | wc -l`" ; else HITC_CS=0; fi; \
			if [ -n "$$HITS_CI" ] ; then HITC_CI="`echo \"$$HITS_CI\" | wc -l`" ; else HITC_CI=0; fi; \
			printf '%s (%d case-sensitive/%d case-insensitive)\n' "--- $$W" "$$HITC_CS" "$$HITC_CI"; \
			if [ "$$HITC_CS" != 0 ] ; then echo "$$HITS_CS" ; echo "^^^"; else \
				if [ "$$HITC_CI" != 0 ] ; then echo "$$HITS_CI" ; echo "^^^"; fi; \
			fi; \
		); \
	 done > "$@.tmp.$$$$" \
	 && test -s "$@.tmp.$$$$" \
	 && mv -f "$@.tmp.$$$$" '$@' \
	 || { RES="$$?" ; rm -f "$@.tmp.$$$$" ; exit $$RES ; }
	@echo "Reporting words from $? possibly not used in current inspected code base revision under $(abs_top_srcdir)" >&2 ; \
	 $(EGREP) '^--- ' < '$@' | $(GREP) '(0 ' || echo "SUCCESS: None found"

# NOTE: Some "make" implementations prefix a relative or absent path to
# the filenames in PREP_SRC, others (e.g. Sun make) prepend the absolute
# path to locate the sources, so we end up with bogus trees under docs/.
# Code below tries to detect and truncate this mess, including possible
# source texts located in/under parent dirs.
# We also handle man page links (section-aware) for platforms where they
# differ from common defaults.
# NOTE: MKDIR_P may be defined via expanded $(top_builddir)/install-sh
# so should be run from $(abs_builddir) to be safe, as we jump around
# the build workspace
prep-src-docs: $(abs_top_builddir)/docs/.prep-src-docs
$(abs_top_builddir)/docs/.prep-src-docs: $(PREP_SRC) Makefile
	@cd "$(@D)" || exit ; \
	 linkroot="$(abs_builddir)" ; \
	 MAN_SECTIONS_DEFAULT=false ; \
	 if [ x"$(MAN_SECTION_API)$(MAN_SECTION_CFG)$(MAN_SECTION_CMD_SYS)$(MAN_SECTION_CMD_USR)$(MAN_SECTION_MISC)" = x35817 ] ; then \
	    MAN_SECTIONS_DEFAULT=true ; \
	 fi ; \
	 if test x"$(abs_srcdir)" = x"$(abs_builddir)" ; then \
	    COUNT=0; \
	    for F in $(PREP_SRC) ; do \
	        case "$$F" in \
	            /*) F="`echo \"$$F\" | sed 's#^$(abs_top_srcdir)/*#./#'`"; \
	                if test x"$${linkroot}" = x"$(abs_builddir)" ; then \
	                    linkroot="$(abs_top_builddir)" ; \
	                    cd "$(abs_top_builddir)" ; \
	                fi ;; \
	        esac ; \
	        case "$$F" in \
	        *.xml|*.xsl|*.css|*.jpg|*.png|*.pdn|*.svg) ;; \
	        *.txt|*.adoc|*.in|*.sample|*.conf|*) \
	        if $$MAN_SECTIONS_DEFAULT ; then \
	            sed \
	                -e 's,\(home page:\) https://www.networkupstools.org/*$$,\1 $(NUT_WEBSITE_BASE)/,' ; \
	        else \
	            sed \
	                -e 's,\(home page:\) https://www.networkupstools.org/*$$,\1 $(NUT_WEBSITE_BASE)/,' \
	                -e 's,\(linkman:[^ []*\[\)3\],\1$(MAN_SECTION_API)],g' \
	                -e 's,\(linkman:[^ []*\[\)5\],\1$(MAN_SECTION_CFG)],g' \
	                -e 's,\(linkman:[^ []*\[\)8\],\1$(MAN_SECTION_CMD_SYS)],g' \
	                -e 's,\(linkman:[^ []*\[\)1\],\1$(MAN_SECTION_CMD_USR)],g' \
	                -e 's,\(linkman:[^ []*\[\)7\],\1$(MAN_SECTION_MISC)],g' | \
	            if [ x"$(ASCIIDOC_LINKMANEXT_SECTION_REWRITE)" = xyes ] ; then \
	                sed \
	                -e 's,\(linkmanext2?:[^ []*\[\)3\],\1$(MAN_SECTION_API)],g' \
	                -e 's,\(linkmanext2?:[^ []*\[\)5\],\1$(MAN_SECTION_CFG)],g' \
	                -e 's,\(linkmanext2?:[^ []*\[\)8\],\1$(MAN_SECTION_CMD_SYS)],g' \
	                -e 's,\(linkmanext2?:[^ []*\[\)1\],\1$(MAN_SECTION_CMD_USR)],g' \
	                -e 's,\(linkmanext2?:[^ []*\[\)7\],\1$(MAN_SECTION_MISC)],g' ; \
	            else cat ; fi; \
	        fi < "$${F}" > "$${F}-prepped" || exit ;; \
	        esac; \
	        COUNT="`expr $$COUNT + 1`" ; \
	    done ; \
	    if test ! -f '$@' ; then touch '$@' ; fi ; \
	 else \
	    COUNT=30 ; \
	    touch "$@.$$$$" ; \
	    while test -f "$@.working" -a "$$COUNT" -gt 0 ; do sleep 1; COUNT="`expr $$COUNT - 1`"; done ; \
	    touch "$@.working" ; \
	    if test -n "`find '$@' -newer \"$@.$$$$\" 2>/dev/null`" ; then \
	        rm -f "$@.$$$$" "$@.working" ; \
	        exit 0; \
	    fi ; \
	    rm -f "$@.$$$$" ; \
	    COUNT=0; \
	    linksrcroot="$(abs_srcdir)" ; \
	    for F in `echo $(PREP_SRC) | tr ' ' '\n' | sort -n | uniq` ; do \
	        case "$$F" in \
	            /*) F="`echo \"$$F\" | sed 's#^$(abs_top_srcdir)/*#./#'`"; \
	                if test x"$${linkroot}" = x"$(abs_builddir)" ; then \
	                    linkroot="$(abs_top_builddir)" ; \
	                    linksrcroot="$(abs_top_srcdir)" ; \
	                    cd "$(abs_top_builddir)" ; \
	                fi ;; \
	            "$(srcdir)"/*) F="`echo \"$$F\" | sed 's#^$(srcdir)/*#./#'`" ;; \
	            */*) ;; \
	            *) \
	                linkroot="$(abs_builddir)" ; \
	                linksrcroot="$(abs_srcdir)" ; \
	                cd "$(abs_top_builddir)" ;; \
	        esac ; \
	        D="`dirname \"$$F\"`" ; \
	        (cd '$(abs_builddir)' && $(MKDIR_P) "$${linkroot}/$$D") || { rm -f "$@.working" ; exit 1 ; } ; \
	        if test ! -s "$${linkroot}/$$F" && test -s "$${linksrcroot}/$$F" ; then \
	            echo "  LN	'$${linksrcroot}/$$F' => '$${linkroot}/$$F' (PWD = '`pwd`')" ; \
	            ln -fs "$${linksrcroot}/$$F" "$${linkroot}/$$F" || { rm -f "$@.working" ; exit 1 ; } ; \
	            COUNT="`expr $$COUNT + 1`" ; \
	        fi ; \
	        case "$$F" in \
	            *.txt|*.adoc) IS_TEXT=true ;; \
	            *.*) IS_TEXT=false ;; \
	            *) IS_TEXT=true ;; \
	        esac; \
	        if $$IS_TEXT ; then \
	            $(GREP) -w linkman "$${linkroot}/$${F}" > /dev/null || IS_TEXT=false ; \
	        fi ; \
	        case "$$F" in \
	        *.xml|*.xsl|*.css|*.jpg|*.png|*.pdn|*.svg) ;; \
	        *.txt|*.adoc|*.in|*.sample|*.conf|*) \
	        if $$MAN_SECTIONS_DEFAULT || test $$IS_TEXT = false ; then \
	            sed \
	                -e 's,\(home page:\) https://www.networkupstools.org/*$$,\1 $(NUT_WEBSITE_BASE)/,' ; \
	        else \
	            sed \
	                -e 's,\(home page:\) https://www.networkupstools.org/*$$,\1 $(NUT_WEBSITE_BASE)/,' \
	                -e 's,\(linkman:[^ []*\[\)3\],\1$(MAN_SECTION_API)],g' \
	                -e 's,\(linkman:[^ []*\[\)5\],\1$(MAN_SECTION_CFG)],g' \
	                -e 's,\(linkman:[^ []*\[\)8\],\1$(MAN_SECTION_CMD_SYS)],g' \
	                -e 's,\(linkman:[^ []*\[\)1\],\1$(MAN_SECTION_CMD_USR)],g' \
	                -e 's,\(linkman:[^ []*\[\)7\],\1$(MAN_SECTION_MISC)],g' | \
	            if [ x"$(ASCIIDOC_LINKMANEXT_SECTION_REWRITE)" = xyes ] ; then \
	                sed \
	                -e 's,\(linkmanext2?:[^ []*\[\)3\],\1$(MAN_SECTION_API)],g' \
	                -e 's,\(linkmanext2?:[^ []*\[\)5\],\1$(MAN_SECTION_CFG)],g' \
	                -e 's,\(linkmanext2?:[^ []*\[\)8\],\1$(MAN_SECTION_CMD_SYS)],g' \
	                -e 's,\(linkmanext2?:[^ []*\[\)1\],\1$(MAN_SECTION_CMD_USR)],g' \
	                -e 's,\(linkmanext2?:[^ []*\[\)7\],\1$(MAN_SECTION_MISC)],g' ; \
	            else cat ; fi; \
	        fi < "$${linkroot}/$${F}" > "$${linkroot}/$${F}-prepped" \
	        || { rm -f "$@.working" ; exit 1 ; } ;; \
	        esac ; \
	        COUNT="`expr $$COUNT + 1`" ; \
	    done ; \
	 fi ; \
	 if test "$$COUNT" -gt 0 -o ! -f '$@' ; then touch '$@' ; fi
	@rm -f "$@.working"

# Dirs to clean, etc.
clean-local:
	$(AM_V_at)rm -rf *.chunked *.bak tmp
	$(AM_V_at)for F in $(PREP_SRC) ; do \
	        case "$$F" in \
	            /*) F="`echo \"$$F\" | sed 's#^$(abs_top_srcdir)/*#./#'`"; cd "$(abs_top_builddir)" ;; \
	        esac ; \
	        if test x"$(abs_srcdir)" != x"$(abs_builddir)" ; then \
	            if test -L "$$F" || test -h "$$F" ; then \
	                rm -f "$$F" ; \
	            fi ; \
	        fi ; \
	        rm -f "$${F}-prepped" ; \
	    done ; \
	    rm -f "$(abs_top_builddir)/docs/.prep-src-docs"*

.PHONY: html html-chunked html-single pdf man

# Tell versions [3.59,3.63) of GNU make to not export all variables.
# Otherwise a system limit (for SysV at least) may be exceeded.
.NOEXPORT:
