| 1 |
# |
| 2 |
# $FreeBSD$ |
| 3 |
# |
| 4 |
# This include file <doc.docbook.mk> handles building and installing of |
| 5 |
# DocBook documentation in the FreeBSD Documentation Project. |
| 6 |
# |
| 7 |
# Documentation using DOCFORMAT=docbook is expected to be marked up |
| 8 |
# according to the DocBook DTD |
| 9 |
# |
| 10 |
|
| 11 |
# ------------------------------------------------------------------------ |
| 12 |
# |
| 13 |
# Document-specific variables |
| 14 |
# |
| 15 |
# DOC This should be set to the name of the DocBook |
| 16 |
# marked-up file, without the .xml suffix. |
| 17 |
# |
| 18 |
# It also determins the name of the output files - |
| 19 |
# ${DOC}.html. |
| 20 |
# |
| 21 |
# SRCS The names of all the files that are needed to |
| 22 |
# build this document - This is useful if any of |
| 23 |
# them need to be generated. Changing any file in |
| 24 |
# SRCS causes the documents to be rebuilt. |
| 25 |
# |
| 26 |
|
| 27 |
# ------------------------------------------------------------------------ |
| 28 |
# |
| 29 |
# Variables used by both users and documents: |
| 30 |
# |
| 31 |
# EXTRA_CATALOGS Additional catalog files that should be used by |
| 32 |
# any XML processing applications. |
| 33 |
# |
| 34 |
# CSS_SHEET Full path to a CSS stylesheet suitable for DocBook. |
| 35 |
# Default is ${DOC_PREFIX}/share/misc/docbook.css |
| 36 |
# |
| 37 |
# Package building options: |
| 38 |
# |
| 39 |
# BZIP2_PACKAGE Use bzip2(1) utility to compress package tarball |
| 40 |
# instead of gzip(1). It results packages to have |
| 41 |
# suffix .tbz instead of .tgz. Using bzip2(1) |
| 42 |
# provides better compression, but requires longer |
| 43 |
# time and utilizes more CPU resources than gzip(1). |
| 44 |
|
| 45 |
# Either dblatex or fop |
| 46 |
RENDERENGINE?= fop |
| 47 |
|
| 48 |
# |
| 49 |
# Documents should use the += format to access these. |
| 50 |
# |
| 51 |
|
| 52 |
MASTERDOC?= ${.CURDIR}/${DOC}.xml |
| 53 |
|
| 54 |
DB5RNC?= ${DOC_PREFIX}/share/xml/freebsd50.rnc |
| 55 |
|
| 56 |
XSLPROF?= http://docbook.sourceforge.net/release/xsl-ns/current/profiling/profile.xsl |
| 57 |
XSLXHTML?= http://www.FreeBSD.org/XML/share/xml/freebsd-xhtml.xsl |
| 58 |
XSLXHTMLCHUNK?= http://www.FreeBSD.org/XML/share/xml/freebsd-xhtml-chunk.xsl |
| 59 |
XSLEPUB?= http://www.FreeBSD.org/XML/share/xml/freebsd-epub.xsl |
| 60 |
XSLFO?= http://www.FreeBSD.org/XML/share/xml/freebsd-fo.xsl |
| 61 |
|
| 62 |
XSLSCH?= /usr/local/share/xsl/iso-schematron/xslt1/iso_schematron_skeleton_for_xslt1.xsl |
| 63 |
|
| 64 |
IMAGES_LIB?= |
| 65 |
|
| 66 |
SCHEMATRONS?= ${DOC_PREFIX}/share/xml/freebsd.sch |
| 67 |
XSLTPROCOPTS?= --nonet |
| 68 |
|
| 69 |
IMGDIR?= ${IMAGES_EN_DIR}/${DOC}s/${.CURDIR:T} |
| 70 |
CALLOUTDIR= ${.CURDIR}/imagelib/callouts |
| 71 |
XSLDBLATEX= ${DOC_PREFIX}/share/xml/freebsd-dblatex.xsl |
| 72 |
DBLATEXOPTS?= -I ${IMGDIR} -p ${XSLDBLATEX} -b xetex -d |
| 73 |
FOPJAVAOPTS?= -Xss1024k -Xmx1431552k |
| 74 |
FOPOPTS?= -c ${DOC_PREFIX}/share/misc/fop.xconf |
| 75 |
|
| 76 |
KNOWN_FORMATS= html html.tar html-split html-split.tar \ |
| 77 |
epub txt rtf ps pdf tex dvi tar pdb |
| 78 |
|
| 79 |
CSS_SHEET?= ${DOC_PREFIX}/share/misc/docbook.css |
| 80 |
|
| 81 |
# |
| 82 |
# Instruction for bsd.subdir.mk to not to process SUBDIR directive. |
| 83 |
# It is not necessary since doc.docbook.mk do it too. |
| 84 |
# |
| 85 |
NO_SUBDIR= YES |
| 86 |
|
| 87 |
# |
| 88 |
# Determine latest revision |
| 89 |
# |
| 90 |
# This needs to contain all of: |
| 91 |
# --stringparam latestrevision.timestamp "timestamp" |
| 92 |
# --stringparam latestrevision.committer "committer" |
| 93 |
# --stringparam latestrevision.number "revision id" |
| 94 |
|
| 95 |
# If using git, use git log. The revision won't work with the generated links, |
| 96 |
# because it is a hash, and git log doesn't know about git svn find-rev. |
| 97 |
.if exists(${DOC_PREFIX}/.git) && exists(${GIT}) |
| 98 |
LATESTREVISION!=cd ${.CURDIR} && ${GIT} log -1 --pretty=format:'\ |
| 99 |
--stringparam latestrevision.timestamp "%ci" \ |
| 100 |
--stringparam latestrevision.committer "%cn" \ |
| 101 |
--stringparam latestrevision.number "%h"' ${SRCS} |
| 102 |
.else |
| 103 |
# if using Subversion, get information from metadata |
| 104 |
# rather than embedded version strings |
| 105 |
.if exists(${DOC_PREFIX}/.svn) && exists(${SVN}) |
| 106 |
LATESTREVISION!=cd ${.CURDIR} && ${SVN} info ${SRCS} 2>/dev/null | \ |
| 107 |
${AWK} 'BEGIN { genfmt="--stringparam latestrevision."; \ |
| 108 |
timefmt=genfmt"timestamp \"%s %s\""; \ |
| 109 |
comtfmt=genfmt"committer \"%s\""; \ |
| 110 |
revnfmt=genfmt"number \"%u\""; \ |
| 111 |
fmt=timefmt" "comtfmt"\t\t"revnfmt"\n"; } \ |
| 112 |
/^Last Changed Author:/ { committer=$$4 } \ |
| 113 |
/^Last Changed Rev:/ { number=$$4 } \ |
| 114 |
/^Last Changed Date:/ { date=$$4; time=$$5; \ |
| 115 |
printf(fmt, date, time, committer, number) }' \ |
| 116 |
| ${SORT} | ${TAIL} -n1 |
| 117 |
.endif |
| 118 |
.endif |
| 119 |
|
| 120 |
# if neither Subversion nor Git revision numbers are available, use |
| 121 |
# the revision date from the timestamp of the most recent file and |
| 122 |
# set the revision number to "filedate" |
| 123 |
.if empty(LATESTREVISION) |
| 124 |
LATESTREVISION!=${STAT} 2>/dev/null -t '%F %T %Z' -f '\ |
| 125 |
--stringparam latestrevision.timestamp "%Sc" \ |
| 126 |
--stringparam latestrevision.committer "%Su" \ |
| 127 |
--stringparam latestrevision.number "filedate"' \ |
| 128 |
${SRCS} | ${SORT} -r | ${TAIL} -n1 |
| 129 |
.endif |
| 130 |
|
| 131 |
XSLTPROCOPTS+= ${LATESTREVISION} |
| 132 |
|
| 133 |
# ------------------------------------------------------------------------ |
| 134 |
# |
| 135 |
# Look at ${FORMATS} and work out which documents need to be generated. |
| 136 |
# It is assumed that the HTML transformation will always create a file |
| 137 |
# called index.html, and that for every other transformation the name |
| 138 |
# of the generated file is ${DOC}.format. |
| 139 |
# |
| 140 |
# ${_docs} will be set to a list of all documents that must be made |
| 141 |
# up to date. |
| 142 |
# |
| 143 |
# ${CLEANFILES} is a list of files that should be removed by the "clean" |
| 144 |
# target. ${COMPRESS_EXT:S/^/${DOC}.${_cf}.&/ takes the COMPRESS_EXT |
| 145 |
# var, and prepends the filename to each listed extension, building a |
| 146 |
# second list of files with the compressed extensions added. |
| 147 |
# |
| 148 |
|
| 149 |
# Note: ".for _curformat in ${KNOWN_FORMATS}" is used several times in |
| 150 |
# this file. I know they could have been rolled together in to one, much |
| 151 |
# larger, loop. However, that would have made things more complicated |
| 152 |
# for a newcomer to this file to unravel and understand, and a syntax |
| 153 |
# error in the loop would have affected the entire |
| 154 |
# build/compress/install process, instead of just one of them, making it |
| 155 |
# more difficult to debug. |
| 156 |
# |
| 157 |
|
| 158 |
# Note: It is the aim of this file that *all* the targets be available, |
| 159 |
# not just those appropriate to the current ${FORMATS} and |
| 160 |
# ${INSTALL_COMPRESSED} values. |
| 161 |
# |
| 162 |
# For example, if FORMATS=html and INSTALL_COMPRESSED=gz you could still |
| 163 |
# type |
| 164 |
# |
| 165 |
# make book.rtf.bz2 |
| 166 |
# |
| 167 |
# and it will do the right thing. Or |
| 168 |
# |
| 169 |
# make install-rtf.bz2 |
| 170 |
# |
| 171 |
# for that matter. But don't expect "make clean" to work if the FORMATS |
| 172 |
# and INSTALL_COMPRESSED variables are wrong. |
| 173 |
# |
| 174 |
|
| 175 |
.if ${.OBJDIR} != ${.CURDIR} |
| 176 |
LOCAL_CSS_SHEET= ${.OBJDIR}/${CSS_SHEET:T} |
| 177 |
.else |
| 178 |
LOCAL_CSS_SHEET= ${CSS_SHEET:T} |
| 179 |
.endif |
| 180 |
|
| 181 |
CLEANFILES+= ${DOC}.parsed.xml ${DOC}.parsed.print.xml |
| 182 |
|
| 183 |
.if ${FORMATS:R:Mhtml-split} && ${FORMATS:R:Mhtml} |
| 184 |
XSLTPROCOPTS+= --param docformatnav "'1'" |
| 185 |
.endif |
| 186 |
|
| 187 |
.for _curformat in ${FORMATS} |
| 188 |
_cf=${_curformat} |
| 189 |
|
| 190 |
.if ${_cf} == "html-split" |
| 191 |
_docs+= index.html HTML.manifest ln*.html |
| 192 |
CLEANFILES+= $$([ -f HTML.manifest ] && ${XARGS} < HTML.manifest) \ |
| 193 |
HTML.manifest ln*.html |
| 194 |
CLEANFILES+= PLIST.${_curformat} |
| 195 |
|
| 196 |
.else |
| 197 |
_docs+= ${DOC}.${_curformat} |
| 198 |
CLEANFILES+= ${DOC}.${_curformat} |
| 199 |
CLEANFILES+= PLIST.${_curformat} |
| 200 |
|
| 201 |
.if ${_cf} == "html-split.tar" |
| 202 |
CLEANFILES+= $$([ -f HTML.manifest ] && ${XARGS} < HTML.manifest) \ |
| 203 |
HTML.manifest ln*.html |
| 204 |
|
| 205 |
.elif ${_cf} == "epub" |
| 206 |
CLEANFILES+= ${DOC}.epub mimetype |
| 207 |
CLEANDIRS+= META-INF OEBPS |
| 208 |
|
| 209 |
.elif ${_cf} == "html.tar" |
| 210 |
CLEANFILES+= ${DOC}.html |
| 211 |
|
| 212 |
.elif ${_cf} == "txt" |
| 213 |
CLEANFILES+= ${DOC}.html-text |
| 214 |
|
| 215 |
.elif ${_cf} == "dvi" |
| 216 |
CLEANFILES+= ${DOC}.aux ${DOC}.log ${DOC}.out ${DOC}.tex ${DOC}.tex-tmp |
| 217 |
|
| 218 |
.elif ${_cf} == "rtf" |
| 219 |
CLEANFILES+= ${DOC}.rtf-nopng |
| 220 |
|
| 221 |
.elif ${_cf} == "tex" |
| 222 |
CLEANFILES+= ${DOC}.aux ${DOC}.log |
| 223 |
|
| 224 |
.elif ${_cf} == "ps" |
| 225 |
CLEANFILES+= ${DOC}.aux ${DOC}.dvi ${DOC}.log ${DOC}.out ${DOC}.tex-ps \ |
| 226 |
${DOC}.tex ${DOC}.tex-tmp ${DOC}.fo |
| 227 |
.for _curimage in ${LOCAL_IMAGES_EPS:M*share*} |
| 228 |
CLEANFILES+= ${_curimage:T} ${_curimage:H:T}/${_curimage:T} |
| 229 |
.endfor |
| 230 |
|
| 231 |
.elif ${_cf} == "pdf" |
| 232 |
CLEANFILES+= ${DOC}.aux ${DOC}.dvi ${DOC}.log ${DOC}.out ${DOC}.tex-pdf ${DOC}.tex-pdf-tmp \ |
| 233 |
${DOC}.tex ${DOC}.fo |
| 234 |
.if ${RENDERENGINE} == "fop" |
| 235 |
XSLTPROCOPTS+= --param img.src.path "'${IMGDIR}/'" |
| 236 |
XSLTPROCOPTS+= --param callout.graphics.path "'${CALLOUTDIR}/'" |
| 237 |
XSLTPROCOPTS+= --maxdepth 6000 |
| 238 |
.endif |
| 239 |
.for _curimage in ${LOCAL_IMAGES_EPS:M*share*} |
| 240 |
CLEANFILES+= ${_curimage:T} ${_curimage:H:T}/${_curimage:T} |
| 241 |
.endfor |
| 242 |
|
| 243 |
.elif ${_cf} == "pdb" |
| 244 |
_docs+= ${.CURDIR:T}.pdb |
| 245 |
CLEANFILES+= ${.CURDIR:T}.pdb |
| 246 |
|
| 247 |
.endif |
| 248 |
.endif |
| 249 |
|
| 250 |
.if (${LOCAL_CSS_SHEET} != ${CSS_SHEET}) && \ |
| 251 |
(${_cf} == "html-split" || ${_cf} == "html-split.tar" || \ |
| 252 |
${_cf} == "html" || ${_cf} == "html.tar" || ${_cf} == "txt") |
| 253 |
CLEANFILES+= ${LOCAL_CSS_SHEET} |
| 254 |
.endif |
| 255 |
|
| 256 |
.if !defined(WITH_INLINE_LEGALNOTICE) || empty(WITH_INLINE_LEGALNOTICE) && \ |
| 257 |
(${_cf} == "html-split" || ${_cf} == "html-split.tar" || \ |
| 258 |
${_cf} == "html" || ${_cf} == "html.tar" || ${_cf} == "txt") |
| 259 |
.endif |
| 260 |
|
| 261 |
.endfor # _curformat in ${FORMATS} # |
| 262 |
|
| 263 |
|
| 264 |
# |
| 265 |
# Build a list of install-${format}.${compress_format} targets to be |
| 266 |
# by "make install". Also, add ${DOC}.${format}.${compress_format} to |
| 267 |
# ${_docs} and ${CLEANFILES} so they get built/cleaned by "all" and |
| 268 |
# "clean". |
| 269 |
# |
| 270 |
|
| 271 |
.if defined(INSTALL_COMPRESSED) && !empty(INSTALL_COMPRESSED) |
| 272 |
.for _curformat in ${FORMATS} |
| 273 |
_cf=${_curformat} |
| 274 |
.for _curcomp in ${INSTALL_COMPRESSED} |
| 275 |
|
| 276 |
.if ${_cf} != "html-split" && ${_cf} != "html" && ${_cf} != "epub" |
| 277 |
_curinst+= install-${_curformat}.${_curcomp} |
| 278 |
_docs+= ${DOC}.${_curformat}.${_curcomp} |
| 279 |
CLEANFILES+= ${DOC}.${_curformat}.${_curcomp} |
| 280 |
|
| 281 |
.if ${_cf} == "pdb" |
| 282 |
_docs+= ${.CURDIR:T}.${_curformat}.${_curcomp} |
| 283 |
CLEANFILES+= ${.CURDIR:T}.${_curformat}.${_curcomp} |
| 284 |
|
| 285 |
.endif |
| 286 |
.endif |
| 287 |
.endfor |
| 288 |
.endfor |
| 289 |
.endif |
| 290 |
|
| 291 |
.MAIN: all |
| 292 |
|
| 293 |
all: ${SRCS} ${_docs} |
| 294 |
|
| 295 |
.if defined(SCHEMATRONS) |
| 296 |
.for sch in ${SCHEMATRONS} |
| 297 |
schxslts+= ${sch:T}.xsl |
| 298 |
CLEANFILES+= ${sch:T}.xsl |
| 299 |
|
| 300 |
${sch:T}.xsl: ${sch} |
| 301 |
${XSLTPROC} --param allow-foreign "true" ${XSLSCH} ${.ALLSRC} > ${.TARGET} |
| 302 |
.endfor |
| 303 |
.endif |
| 304 |
|
| 305 |
# Parsed XML ------------------------------------------------------- |
| 306 |
|
| 307 |
${DOC}.parsed.xml: ${SRCS} ${XML_INCLUDES} |
| 308 |
${XMLLINT} --nonet --noent --valid --dropdtd --xinclude ${MASTERDOC} > ${.TARGET}.tmp |
| 309 |
.if defined(PROFILING) |
| 310 |
@${ECHO} "==> Profiling" |
| 311 |
${XSLTPROC} ${PROFILING} ${XSLPROF} ${.TARGET}.tmp > ${.TARGET} |
| 312 |
${RM} ${.TARGET}.tmp |
| 313 |
.else |
| 314 |
${MV} ${.TARGET}.tmp ${.TARGET} |
| 315 |
.endif |
| 316 |
${SED} 's|@@URL_RELPREFIX@@|http://www.FreeBSD.org|g' < ${.TARGET} > ${DOC}.parsed.print.xml |
| 317 |
${SED} -i '' -e 's|@@URL_RELPREFIX@@|../../../..|g' ${.TARGET} |
| 318 |
|
| 319 |
# translation ------------------------------------------------------- |
| 320 |
|
| 321 |
# Master English document |
| 322 |
MASTERDOC_EN?= ${MASTERDOC:S/${LANGCODE}/en_US.ISO8859-1/} |
| 323 |
TRAN_DIR?= ${MASTERDOC:H} |
| 324 |
EN_DIR?= ${TRAN_DIR:S/${LANGCODE}/en_US.ISO8859-1/} |
| 325 |
PO_LANG?= ${LANGCODE:C/\..*$//} |
| 326 |
PO_CHARSET?= ${LANGCODE:tl:C/^.*\.//:S/^iso/iso-/:S/utf-8/UTF-8/} |
| 327 |
CLEANFILES+= ${DOC}.translate.xml ${PO_LANG}.mo |
| 328 |
|
| 329 |
PO_CATALOG_FILES= file://${EN_DIR}/catalog-cwd.xml \ |
| 330 |
file://${EN_DIR:H:H}/share/xml/catalog.xml \ |
| 331 |
file://${DOC_PREFIX}/share/xml/catalog.xml \ |
| 332 |
file://${LOCALBASE}/share/xml/catalog |
| 333 |
.if defined(EXTRA_CATALOGS) |
| 334 |
PO_CATALOG_FILES+= ${EXTRA_CATALOGS} |
| 335 |
.endif |
| 336 |
PO_XMLLINT= env XML_CATALOG_FILES="${PO_CATALOG_FILES}" ${PREFIX}/bin/xmllint |
| 337 |
|
| 338 |
# fix settings in PO file |
| 339 |
IDSTR1= $$Free |
| 340 |
IDSTR2= BSD$$ |
| 341 |
POSET_CMD= ${SED} -i '' -e '1s,^,\#${IDSTR1}${IDSTR2}\${.newline},' \ |
| 342 |
-e 's,^\(\"Language-Team:.*\\n\"\),\1\${.newline}\"Language: ${PO_LANG}\\n\",' \ |
| 343 |
-e 's,^\"Content-Type: text/plain; charset=.*\\n,\"Content-Type: text/plain; charset=${PO_CHARSET}\\n,' |
| 344 |
|
| 345 |
.if ${.TARGETS:Mpo} || ${.TARGETS:Mtran} || ${.TARGETS:M${DOC}.translate.xml} |
| 346 |
|
| 347 |
MASTER_SRCS!= ${MAKE} -C ${EN_DIR} -V SRCS |
| 348 |
|
| 349 |
${DOC}.translate.xml: |
| 350 |
@if [ "${TRAN_DIR}" == "${EN_DIR}" ]; then \ |
| 351 |
${ECHO} "build PO file in a non-English dir" ; \ |
| 352 |
exit 1 ; \ |
| 353 |
fi |
| 354 |
# some SRCS files might need to be generated, make sure they exist |
| 355 |
@${MAKE} -C ${EN_DIR} ${MASTER_SRCS} > /dev/null |
| 356 |
# normalize the English original into a single file |
| 357 |
@${PO_XMLLINT} --nonet --noent --valid --xinclude ${MASTERDOC_EN} > ${.TARGET}.tmp |
| 358 |
# remove redundant namespace attributes |
| 359 |
@${PO_XMLLINT} --nsclean ${.TARGET}.tmp > ${.TARGET} |
| 360 |
@${RM} ${.TARGET}.tmp |
| 361 |
@${MAKE} -C ${EN_DIR} clean > /dev/null |
| 362 |
|
| 363 |
po: ${PO_LANG}.po |
| 364 |
.PHONY: po |
| 365 |
${PO_LANG}.po: ${DOC}.translate.xml |
| 366 |
@${ITSTOOL} -o ${PO_LANG}.po.tmp ${DOC}.translate.xml |
| 367 |
@( if [ -f "${PO_LANG}.po" ]; then \ |
| 368 |
echo "${PO_LANG}.po exists, merging" ; \ |
| 369 |
${MSGMERGE} -o ${PO_LANG}.po.new ${PO_LANG}.po ${PO_LANG}.po.tmp ; \ |
| 370 |
${MSGATTRIB} --no-obsolete -o ${PO_LANG}.po.new ${PO_LANG}.po ; \ |
| 371 |
${MV} ${PO_LANG}.po.new ${PO_LANG}.po ; \ |
| 372 |
${RM} ${PO_LANG}.po.tmp ${DOC}.translate.xml ; \ |
| 373 |
else \ |
| 374 |
${ECHO} "${PO_LANG}.po created, please check and correct the settings in the header" ; \ |
| 375 |
${MV} ${PO_LANG}.po.tmp ${PO_LANG}.po ; \ |
| 376 |
${POSET_CMD} ${.TARGET} ; \ |
| 377 |
fi ) |
| 378 |
|
| 379 |
${PO_LANG}.mo: ${PO_LANG}.po |
| 380 |
@${MSGFMT} -o ${.TARGET} ${.ALLSRC} |
| 381 |
|
| 382 |
tran ${DOC}.xml: ${DOC}.translate.xml ${PO_LANG}.mo |
| 383 |
@if [ "${TRAN_DIR}" = "${EN_DIR}" ]; then \ |
| 384 |
${ECHO} "build translation in a non-English dir" ; \ |
| 385 |
exit 1 ; \ |
| 386 |
fi |
| 387 |
${ITSTOOL} -l ${PO_LANG} -m ${PO_LANG}.mo -o ${DOC}.xml ${DOC}.translate.xml |
| 388 |
.endif |
| 389 |
|
| 390 |
# XHTML ------------------------------------------------------------- |
| 391 |
|
| 392 |
index.html: ${DOC}.parsed.xml ${LOCAL_IMAGES_LIB} ${LOCAL_IMAGES_PNG} \ |
| 393 |
${HTML_SPLIT_INDEX} ${LOCAL_CSS_SHEET} ${XML_INCLUDES} |
| 394 |
${XSLTPROC} ${XSLTPROCOPTS} ${XSLXHTMLCHUNK} ${DOC}.parsed.xml |
| 395 |
|
| 396 |
${DOC}.html: ${DOC}.parsed.xml ${LOCAL_IMAGES_LIB} ${LOCAL_IMAGES_PNG} \ |
| 397 |
${LOCAL_CSS_SHEET} ${XML_INCLUDES} |
| 398 |
${XSLTPROC} ${XSLTPROCOPTS} ${XSLXHTML} ${DOC}.parsed.xml > ${.TARGET} |
| 399 |
|
| 400 |
${DOC}.html-split.tar: HTML.manifest ${LOCAL_IMAGES_LIB} \ |
| 401 |
${LOCAL_IMAGES_PNG} ${LOCAL_CSS_SHEET} |
| 402 |
${TAR} cf ${.TARGET} $$(${XARGS} < HTML.manifest) \ |
| 403 |
${LOCAL_IMAGES_LIB} ${IMAGES_PNG:N*share*} ${CSS_SHEET:T} |
| 404 |
.for _curimage in ${IMAGES_PNG:M*share*} |
| 405 |
${TAR} rf ${.TARGET} -C ${IMAGES_EN_DIR}/${DOC}s/${.CURDIR:T} ${_curimage:S|${IMAGES_EN_DIR}/${DOC}s/${.CURDIR:T}/||} |
| 406 |
.endfor |
| 407 |
|
| 408 |
${DOC}.html.tar: ${DOC}.html ${LOCAL_IMAGES_LIB} \ |
| 409 |
${LOCAL_IMAGES_PNG} ${LOCAL_CSS_SHEET} |
| 410 |
${TAR} cf ${.TARGET} ${DOC}.html \ |
| 411 |
${LOCAL_IMAGES_LIB} ${IMAGES_PNG:N*share*} ${CSS_SHEET:T} |
| 412 |
.for _curimage in ${IMAGES_PNG:M*share*} |
| 413 |
${TAR} rf ${.TARGET} -C ${IMAGES_EN_DIR}/${DOC}s/${.CURDIR:T} ${_curimage:S|${IMAGES_EN_DIR}/${DOC}s/${.CURDIR:T}/||} |
| 414 |
.endfor |
| 415 |
|
| 416 |
# EPUB ------------------------------------------------------------- |
| 417 |
|
| 418 |
${DOC}.epub: ${DOC}.parsed.xml ${LOCAL_IMAGES_LIB} ${LOCAL_IMAGES_PNG} \ |
| 419 |
${CSS_SHEET} ${XML_INCLUDES} |
| 420 |
${XSLTPROC} ${XSLTPROCOPTS} ${XSLEPUB} ${DOC}.parsed.xml |
| 421 |
.if defined(LOCAL_IMAGES_LIB) |
| 422 |
.for f in ${LOCAL_IMAGES_LIB} |
| 423 |
[ -d "OEBPS/${f:H}" ] || ${MKDIR} -pv "OEBPS/${f:H}" |
| 424 |
${CP} ${f} OEBPS/${f} |
| 425 |
.endfor |
| 426 |
.endif |
| 427 |
.if defined(LOCAL_IMAGES_PNG) |
| 428 |
.for f in ${LOCAL_IMAGES_PNG} |
| 429 |
[ -d "OEBPS/${f:H:T}" ] || ${MKDIR} -pv "OEBPS/${f:H:T}" |
| 430 |
${CP} -v ${f} OEBPS/${f:H:T}/${f:T} |
| 431 |
.endfor |
| 432 |
.endif |
| 433 |
${ZIP} ${ZIPOPTS} -r -X ${DOC}.epub mimetype OEBPS META-INF |
| 434 |
|
| 435 |
# TXT -------------------------------------------------------------------- |
| 436 |
|
| 437 |
.if !target(${DOC}.txt) |
| 438 |
.if !defined(NO_PLAINTEXT) |
| 439 |
${DOC}.txt: ${DOC}.html |
| 440 |
${HTML2TXT} ${HTML2TXTOPTS} ${.ALLSRC} > ${.TARGET} |
| 441 |
.else |
| 442 |
${DOC}.txt: |
| 443 |
${TOUCH} ${.TARGET} |
| 444 |
.endif |
| 445 |
.endif |
| 446 |
|
| 447 |
# PDB -------------------------------------------------------------------- |
| 448 |
|
| 449 |
${DOC}.pdb: ${DOC}.html ${LOCAL_IMAGES_LIB} ${LOCAL_IMAGES_PNG} |
| 450 |
${HTML2PDB} ${HTML2PDBOPTS} ${DOC}.html ${.TARGET} |
| 451 |
|
| 452 |
${.CURDIR:T}.pdb: ${DOC}.pdb |
| 453 |
${LN} -f ${.ALLSRC} ${.TARGET} |
| 454 |
|
| 455 |
.if defined(INSTALL_COMPRESSED) && !empty(INSTALL_COMPRESSED) |
| 456 |
.for _curcomp in ${INSTALL_COMPRESSED} |
| 457 |
${.CURDIR:T}.pdb.${_curcomp}: ${DOC}.pdb.${_curcomp} |
| 458 |
${LN} -f ${.ALLSRC} ${.TARGET} |
| 459 |
.endfor |
| 460 |
.endif |
| 461 |
|
| 462 |
# PS/PDF/RTF ----------------------------------------------------------------- |
| 463 |
|
| 464 |
${DOC}.fo: ${DOC}.xml ${LOCAL_IMAGES_LIB} ${LOCAL_IMAGES_PNG} ${DOC}.parsed.xml ${XML_INCLUDES} |
| 465 |
${XSLTPROC} ${XSLTPROCOPTS} ${XSLFO} ${DOC}.parsed.print.xml > ${.TARGET} |
| 466 |
|
| 467 |
.if ${RENDERENGINE} == "fop" |
| 468 |
${DOC}.pdf: ${DOC}.fo ${LOCAL_IMAGES_LIB} ${LOCAL_IMAGES_PNG} |
| 469 |
${SETENV} FOP_OPTS="${FOPJAVAOPTS}" ${FOP} ${FOPOPTS} ${DOC}.fo -pdf ${.TARGET} |
| 470 |
|
| 471 |
${DOC}.ps: ${DOC}.fo ${LOCAL_IMAGES_LIB} ${LOCAL_IMAGES_PNG} |
| 472 |
${SETENV} FOP_OPTS="${FOPJAVAOPTS}" ${FOP} ${FOPOPTS} ${DOC}.fo -ps ${.TARGET} |
| 473 |
|
| 474 |
${DOC}.rtf: ${DOC}.fo ${LOCAL_IMAGES_LIB} ${LOCAL_IMAGES_PNG} |
| 475 |
${SETENV} FOP_OPTS="${FOPJAVAOPTS}" ${FOP} ${FOPOPTS} ${DOC}.fo -rtf ${.TARGET} |
| 476 |
.else |
| 477 |
# Default is dblatex |
| 478 |
${DOC}.pdf: ${DOC}.parsed.xml ${LOCAL_IMAGES_LIB} ${LOCAL_IMAGES_PNG} |
| 479 |
${DBLATEX} ${DOC}.parsed.print.xml ${DBLATEXOPTS} -tpdf -o ${.TARGET} |
| 480 |
|
| 481 |
${DOC}.ps: ${DOC}.parsed.xml ${LOCAL_IMAGES_LIB} ${LOCAL_IMAGES_PNG} |
| 482 |
${DBLATEX} ${DOC}.parsed.print.xml ${DBLATEXOPTS} -tps -o ${.TARGET} |
| 483 |
.endif |
| 484 |
|
| 485 |
|
| 486 |
${DOC}.tar: ${SRCS} ${LOCAL_IMAGES} ${LOCAL_CSS_SHEET} |
| 487 |
${TAR} cf ${.TARGET} -C ${.CURDIR} ${SRCS} \ |
| 488 |
-C ${.OBJDIR} ${IMAGES} ${CSS_SHEET:T} |
| 489 |
|
| 490 |
# |
| 491 |
# Build targets for any formats we've missed that we don't handle. |
| 492 |
# |
| 493 |
.for _curformat in ${ALL_FORMATS} |
| 494 |
.if !target(${DOC}.${_curformat}) |
| 495 |
${DOC}.${_curformat}: |
| 496 |
@${ECHO_CMD} \"${_curformat}\" is not a valid output format for this document. |
| 497 |
.endif |
| 498 |
.endfor |
| 499 |
|
| 500 |
|
| 501 |
# ------------------------------------------------------------------------ |
| 502 |
# |
| 503 |
# Validation targets |
| 504 |
# |
| 505 |
|
| 506 |
# |
| 507 |
# Lets you quickly check that the document conforms to the DTD without |
| 508 |
# having to convert it to any other formats |
| 509 |
# |
| 510 |
|
| 511 |
# |
| 512 |
# XXX: There is duplicated code below. In general, we want to see what |
| 513 |
# is actually run but when validation is executed, it is better to |
| 514 |
# silence the command invocation so that only error messages appear. |
| 515 |
# |
| 516 |
|
| 517 |
lint validate: ${SRCS} ${schxslts} |
| 518 |
@${ECHO} "==> Basic validation" |
| 519 |
@${XMLLINT} --nonet --noent --valid --dropdtd --xinclude ${MASTERDOC} > ${DOC}.parsed.xml |
| 520 |
.if defined(schxslts) |
| 521 |
@${ECHO} "==> Validating with Schematron constraints" |
| 522 |
.for sch in ${schxslts} |
| 523 |
@( out=`${XSLTPROC} ${sch} ${DOC}.parsed.xml`; \ |
| 524 |
if [ -n "$${out}" ]; then \ |
| 525 |
echo "$${out}" | ${GREP} -v '^<?xml'; \ |
| 526 |
false; \ |
| 527 |
fi ) |
| 528 |
.endfor |
| 529 |
.endif |
| 530 |
.if exists(${JING}) |
| 531 |
@${ECHO} "==> Validating with RELAX NG" |
| 532 |
@${JING} -c ${DB5RNC} ${DOC}.parsed.xml |
| 533 |
.endif |
| 534 |
@${RM} -rf ${CLEANFILES} ${CLEANDIRS} ${DOC}.parsed.xml |
| 535 |
|
| 536 |
# ------------------------------------------------------------------------ |
| 537 |
# |
| 538 |
# Compress targets |
| 539 |
# |
| 540 |
|
| 541 |
# |
| 542 |
# The list of compression extensions this Makefile knows about. If you |
| 543 |
# add new compression schemes, add to this list (which is a list of |
| 544 |
# extensions, hence bz2, *not* bzip2) and extend the _PROG_COMPRESS_* |
| 545 |
# targets. |
| 546 |
# |
| 547 |
|
| 548 |
KNOWN_COMPRESS= gz bz2 zip |
| 549 |
|
| 550 |
# |
| 551 |
# You can't build suffix rules to do compression, since you can't |
| 552 |
# wildcard the source suffix. So these are defined .USE, to be tacked on |
| 553 |
# as dependencies of the compress-* targets. |
| 554 |
# |
| 555 |
|
| 556 |
_PROG_COMPRESS_gz: .USE |
| 557 |
${GZIP} ${GZIPOPTS} < ${.ALLSRC} > ${.TARGET} |
| 558 |
|
| 559 |
_PROG_COMPRESS_bz2: .USE |
| 560 |
${BZIP2} ${BZIP2OPTS} < ${.ALLSRC} > ${.TARGET} |
| 561 |
|
| 562 |
_PROG_COMPRESS_zip: .USE |
| 563 |
${ZIP} ${ZIPOPTS} ${.TARGET} ${.ALLSRC} |
| 564 |
|
| 565 |
# |
| 566 |
# Build a list of targets for each compression scheme and output format. |
| 567 |
# Don't compress the html-split or html output format (because they need |
| 568 |
# to be rolled in to tar files first). |
| 569 |
# |
| 570 |
.for _curformat in ${KNOWN_FORMATS} |
| 571 |
_cf=${_curformat} |
| 572 |
.for _curcompress in ${KNOWN_COMPRESS} |
| 573 |
.if ${_cf} == "html-split" || ${_cf} == "html" |
| 574 |
${DOC}.${_cf}.tar.${_curcompress}: ${DOC}.${_cf}.tar \ |
| 575 |
_PROG_COMPRESS_${_curcompress} |
| 576 |
.else |
| 577 |
${DOC}.${_cf}.${_curcompress}: ${DOC}.${_cf} _PROG_COMPRESS_${_curcompress} |
| 578 |
.endif |
| 579 |
.endfor |
| 580 |
.endfor |
| 581 |
|
| 582 |
# |
| 583 |
# Build targets for any formats we've missed that we don't handle. |
| 584 |
# |
| 585 |
.for _curformat in ${ALL_FORMATS} |
| 586 |
.for _curcompress in ${KNOWN_COMPRESS} |
| 587 |
.if !target(${DOC}.${_curformat}.${_curcompress}) |
| 588 |
${DOC}.${_curformat}.${_curcompress}: |
| 589 |
@${ECHO_CMD} \"${_curformat}.${_curcompress}\" is not a valid output format for this document. |
| 590 |
.endif |
| 591 |
.endfor |
| 592 |
.endfor |
| 593 |
|
| 594 |
|
| 595 |
# ------------------------------------------------------------------------ |
| 596 |
# |
| 597 |
# Install targets |
| 598 |
# |
| 599 |
# Build install-* targets, one per allowed value in FORMATS. Need to |
| 600 |
# build two specific targets; |
| 601 |
# |
| 602 |
# install-html-split - Handles multiple .html files being generated |
| 603 |
# from one source. Uses the HTML.manifest file |
| 604 |
# created by the stylesheets, which should list |
| 605 |
# each .html file that's been created. |
| 606 |
# |
| 607 |
# install-* - Every other format. The wildcard expands to |
| 608 |
# the other allowed formats, all of which should |
| 609 |
# generate just one file. |
| 610 |
# |
| 611 |
# "beforeinstall" and "afterinstall" are hooks in to this process. |
| 612 |
# Redefine them to do things before and after the files are installed, |
| 613 |
# respectively. |
| 614 |
|
| 615 |
populate_html_docs: |
| 616 |
.if exists(HTML.manifest) |
| 617 |
_html_docs!=${CAT} HTML.manifest |
| 618 |
.endif |
| 619 |
|
| 620 |
spellcheck-html-split: populate_html_docs |
| 621 |
.for _html_file in ${_html_docs} |
| 622 |
@echo "Spellcheck ${_html_file}" |
| 623 |
@${HTML2TXT} ${HTML2TXTOPTS} ${.CURDIR}/${_html_file} | ${ISPELL} ${ISPELLOPTS} |
| 624 |
.endfor |
| 625 |
spellcheck-html: |
| 626 |
.for _entry in ${_docs} |
| 627 |
@echo "Spellcheck ${_entry}" |
| 628 |
@${HTML2TXT} ${HTML2TXTOPTS} ${.CURDIR}/${_entry} | ${ISPELL} ${ISPELLOPTS} |
| 629 |
.endfor |
| 630 |
spellcheck-txt: |
| 631 |
.for _entry in ${_docs:M*.txt} |
| 632 |
@echo "Spellcheck ${_entry}" |
| 633 |
@ < ${.CURDIR}/${_entry} ${ISPELL} ${ISPELLOPTS} |
| 634 |
.endfor |
| 635 |
.for _curformat in ${FORMATS} |
| 636 |
.if !target(spellcheck-${_curformat}) |
| 637 |
spellcheck-${_curformat}: |
| 638 |
@echo "Spellcheck is not currently supported for the ${_curformat} format." |
| 639 |
.endif |
| 640 |
.endfor |
| 641 |
|
| 642 |
spellcheck: ${FORMATS:C/^/spellcheck-/} |
| 643 |
|
| 644 |
# |
| 645 |
# Build a list of install-format targets to be installed. These will be |
| 646 |
# dependencies for the "realinstall" target. |
| 647 |
# |
| 648 |
|
| 649 |
.if !defined(INSTALL_ONLY_COMPRESSED) || empty(INSTALL_ONLY_COMPRESSED) |
| 650 |
_curinst+= ${FORMATS:S/^/install-/g} |
| 651 |
.endif |
| 652 |
|
| 653 |
.if defined(NO_TEX) |
| 654 |
_curinst_filter+=N*dvi* N*tex* N*ps* N*pdf* |
| 655 |
.endif |
| 656 |
.if defined(NO_RTF) |
| 657 |
_curinst_filter+=N*rtf* |
| 658 |
.endif |
| 659 |
.if defined(NO_PLAINTEXT) |
| 660 |
_curinst_filter+=N*txt* |
| 661 |
.endif |
| 662 |
|
| 663 |
_cff!=${ECHO_CMD} "${_curinst_filter}" | ${SED} 's, ,:,g' |
| 664 |
|
| 665 |
.if !defined(_cff) || empty(_cff) |
| 666 |
realinstall: ${_curinst} |
| 667 |
.else |
| 668 |
.for i in ${_cff} |
| 669 |
realinstall: ${_curinst:$i} |
| 670 |
.endfor |
| 671 |
.endif |
| 672 |
|
| 673 |
.for _curformat in ${KNOWN_FORMATS} |
| 674 |
_cf=${_curformat} |
| 675 |
.if !target(install-${_cf}) |
| 676 |
.if ${_cf} == "html-split" |
| 677 |
install-${_curformat}: index.html |
| 678 |
.else |
| 679 |
install-${_curformat}: ${DOC}.${_curformat} |
| 680 |
.endif |
| 681 |
@[ -d ${DESTDIR} ] || ${MKDIR} -p ${DESTDIR} |
| 682 |
.if ${_cf} == "html-split" |
| 683 |
${INSTALL_DOCS} $$(${XARGS} < HTML.manifest) ${DESTDIR} |
| 684 |
.else |
| 685 |
${INSTALL_DOCS} ${.ALLSRC} ${DESTDIR} |
| 686 |
.endif |
| 687 |
.if (${_cf} == "html-split" || ${_cf} == "html") && !empty(LOCAL_CSS_SHEET) |
| 688 |
${INSTALL_DOCS} ${LOCAL_CSS_SHEET} ${DESTDIR} |
| 689 |
.if ${_cf} == "html-split" |
| 690 |
@if [ -f ln*.html ]; then \ |
| 691 |
${INSTALL_DOCS} ln*.html ${DESTDIR}; \ |
| 692 |
fi |
| 693 |
@if [ -f LEGALNOTICE.html ]; then \ |
| 694 |
${INSTALL_DOCS} LEGALNOTICE.html ${DESTDIR}; \ |
| 695 |
fi |
| 696 |
@if [ -f trademarks.html ]; then \ |
| 697 |
${INSTALL_DOCS} trademarks.html ${DESTDIR}; \ |
| 698 |
fi |
| 699 |
@if [ -f ${.OBJDIR}/${DOC}.ln ]; then \ |
| 700 |
cd ${DESTDIR}; sh ${.OBJDIR}/${DOC}.ln; \ |
| 701 |
fi |
| 702 |
.endif |
| 703 |
.for _curimage in ${IMAGES_LIB} |
| 704 |
@[ -d ${DESTDIR}/${LOCAL_IMAGES_LIB_DIR}/${_curimage:H} ] || \ |
| 705 |
${MKDIR} -p ${DESTDIR}/${LOCAL_IMAGES_LIB_DIR}/${_curimage:H} |
| 706 |
${INSTALL_DOCS} ${LOCAL_IMAGES_LIB_DIR}/${_curimage} \ |
| 707 |
${DESTDIR}/${LOCAL_IMAGES_LIB_DIR}/${_curimage:H} |
| 708 |
.endfor |
| 709 |
# Install the images. First, loop over all the image names that contain a |
| 710 |
# directory separator, make the subdirectories, and install. Then loop over |
| 711 |
# the ones that don't contain a directory separator, and install them in the |
| 712 |
# top level. |
| 713 |
# Install at first images from /usr/share/images then localized ones |
| 714 |
# cause of a different origin path. |
| 715 |
.for _curimage in ${IMAGES_PNG:M*/*:M*share*} |
| 716 |
${MKDIR} -p ${DESTDIR:H:H}/${_curimage:H:S|${IMAGES_EN_DIR}/||:S|${.CURDIR}||} |
| 717 |
${INSTALL_DOCS} ${_curimage} ${DESTDIR:H:H}/${_curimage:H:S|${IMAGES_EN_DIR}/||:S|${.CURDIR}||} |
| 718 |
.endfor |
| 719 |
.for _curimage in ${IMAGES_PNG:M*/*:N*share*} |
| 720 |
${MKDIR} -p ${DESTDIR}/${_curimage:H} |
| 721 |
${INSTALL_DOCS} ${_curimage} ${DESTDIR}/${_curimage:H} |
| 722 |
.endfor |
| 723 |
.for _curimage in ${IMAGES_PNG:N*/*} |
| 724 |
${INSTALL_DOCS} ${_curimage} ${DESTDIR}/${_curimage} |
| 725 |
.endfor |
| 726 |
.elif ${_cf} == "tex" || ${_cf} == "dvi" |
| 727 |
.for _curimage in ${IMAGES_EPS:M*/*} |
| 728 |
${MKDIR} -p ${DESTDIR}/${_curimage:H:S|${IMAGES_EN_DIR}/||:S|${.CURDIR:T}/||} |
| 729 |
${INSTALL_DOCS} ${_curimage} ${DESTDIR}/${_curimage:H:S|${IMAGES_EN_DIR}/||:S|${.CURDIR:T}/||} |
| 730 |
.endfor |
| 731 |
.for _curimage in ${IMAGES_EPS:N*/*} |
| 732 |
${INSTALL_DOCS} ${_curimage} ${DESTDIR} |
| 733 |
.endfor |
| 734 |
.elif ${_cf} == "pdb" |
| 735 |
${LN} -f ${DESTDIR}/${.ALLSRC} ${DESTDIR}/${.CURDIR:T}.${_curformat} |
| 736 |
.endif |
| 737 |
|
| 738 |
.if ${_cf} == "html-split" |
| 739 |
.for _compressext in ${KNOWN_COMPRESS} |
| 740 |
install-${_curformat}.tar.${_compressext}: ${DOC}.${_curformat}.tar.${_compressext} |
| 741 |
@[ -d ${DESTDIR} ] || ${MKDIR} -p ${DESTDIR} |
| 742 |
${INSTALL_DOCS} ${.ALLSRC} ${DESTDIR} |
| 743 |
.endfor |
| 744 |
.else |
| 745 |
.for _compressext in ${KNOWN_COMPRESS} |
| 746 |
.if !target(install-${_curformat}.${_compressext}) |
| 747 |
install-${_curformat}.${_compressext}: ${DOC}.${_curformat}.${_compressext} |
| 748 |
@[ -d ${DESTDIR} ] || ${MKDIR} -p ${DESTDIR} |
| 749 |
${INSTALL_DOCS} ${.ALLSRC} ${DESTDIR} |
| 750 |
.if ${_cf} == "pdb" |
| 751 |
${LN} -f ${DESTDIR}/${.ALLSRC} \ |
| 752 |
${DESTDIR}/${.CURDIR:T}.${_curformat}.${_compressext} |
| 753 |
.endif |
| 754 |
.endif |
| 755 |
.endfor |
| 756 |
.endif |
| 757 |
.endif |
| 758 |
.endfor |
| 759 |
|
| 760 |
# |
| 761 |
# Build install- targets for any formats we've missed that we don't handle. |
| 762 |
# |
| 763 |
|
| 764 |
.for _curformat in ${ALL_FORMATS} |
| 765 |
.if !target(install-${_curformat}) |
| 766 |
install-${_curformat}: |
| 767 |
@${ECHO_CMD} \"${_curformat}\" is not a valid output format for this document. |
| 768 |
|
| 769 |
.for _compressext in ${KNOWN_COMPRESS} |
| 770 |
install-${_curformat}.${_compressext}: |
| 771 |
@${ECHO_CMD} \"${_curformat}.${_compressext}\" is not a valid output format for this document. |
| 772 |
.endfor |
| 773 |
.endif |
| 774 |
.endfor |
| 775 |
|
| 776 |
|
| 777 |
# ------------------------------------------------------------------------ |
| 778 |
# |
| 779 |
# Package building |
| 780 |
# |
| 781 |
|
| 782 |
# |
| 783 |
# realpackage is what is called in each subdirectory when a package |
| 784 |
# target is called, or, rather, package calls realpackage in each |
| 785 |
# subdirectory as it goes. |
| 786 |
# |
| 787 |
# packagelist returns the list of targets that would be called during |
| 788 |
# package building. |
| 789 |
# |
| 790 |
|
| 791 |
realpackage: ${FORMATS:S/^/package-/} |
| 792 |
packagelist: |
| 793 |
@${ECHO_CMD} ${FORMATS:S/^/package-/} |
| 794 |
|
| 795 |
# |
| 796 |
# Build a list of package targets for each output target. Each package |
| 797 |
# target depends on the corresponding install target running. |
| 798 |
# |
| 799 |
|
| 800 |
.if defined(BZIP2_PACKAGE) |
| 801 |
PKG_SUFFIX= tbz |
| 802 |
.else |
| 803 |
PKG_SUFFIX= tgz |
| 804 |
.endif |
| 805 |
|
| 806 |
PKGDOCPFX!= realpath ${DOC_PREFIX} |
| 807 |
|
| 808 |
.for _curformat in ${KNOWN_FORMATS} |
| 809 |
__curformat=${_curformat} |
| 810 |
|
| 811 |
${PACKAGES}/${.CURDIR:T}.${LANGCODE}.${_curformat}.${PKG_SUFFIX}: |
| 812 |
${MKDIR} -p ${.OBJDIR}/pkg; \ |
| 813 |
(cd ${.CURDIR} && \ |
| 814 |
${MAKE} FORMATS=${_curformat} \ |
| 815 |
DOCDIR=${.OBJDIR}/pkg \ |
| 816 |
${PKGMAKEFLAGS} \ |
| 817 |
install); \ |
| 818 |
PKGSRCDIR=${.OBJDIR}/pkg/${.CURDIR:S/${PKGDOCPFX}\///}; \ |
| 819 |
/bin/ls -1 $$PKGSRCDIR > ${.OBJDIR}/PLIST.${_curformat}; \ |
| 820 |
${PKG_CREATE} -v -f ${.OBJDIR}/PLIST.${_curformat} \ |
| 821 |
-p ${DESTDIR} -s $$PKGSRCDIR \ |
| 822 |
-c -"FDP ${.CURDIR:T} ${_curformat} package" \ |
| 823 |
-d -"FDP ${.CURDIR:T} ${_curformat} package" ${.TARGET} || \ |
| 824 |
(${RM} -fr ${.TARGET} PLIST.${_curformat} && false); \ |
| 825 |
${RM} -rf ${.OBJDIR}/pkg |
| 826 |
|
| 827 |
.if !defined(_cff) || empty(_cff) |
| 828 |
package-${_curformat}: ${PACKAGES}/${.CURDIR:T}.${LANGCODE}.${_curformat}.${PKG_SUFFIX} |
| 829 |
.else |
| 830 |
.for i in ${_cff} |
| 831 |
.if !empty(__curformat:$i) |
| 832 |
package-${_curformat}: ${PACKAGES}/${.CURDIR:T}.${LANGCODE}.${_curformat}.${PKG_SUFFIX} |
| 833 |
.else |
| 834 |
package-${_curformat}: |
| 835 |
.endif |
| 836 |
.endfor |
| 837 |
.endif |
| 838 |
|
| 839 |
.endfor |
| 840 |
|
| 841 |
.if ${LOCAL_CSS_SHEET} != ${CSS_SHEET} |
| 842 |
${LOCAL_CSS_SHEET}: ${CSS_SHEET} |
| 843 |
${RM} -f ${.TARGET} |
| 844 |
${CAT} ${.ALLSRC} > ${.TARGET} |
| 845 |
.if defined(CSS_SHEET_ADDITIONS) |
| 846 |
${CAT} ${.CURDIR}/${CSS_SHEET_ADDITIONS} >> ${.TARGET} |
| 847 |
.endif |
| 848 |
.endif |
| 849 |
|
| 850 |
HTML.manifest: index.html |
| 851 |
|