mirror of
https://git.savannah.gnu.org/git/guile.git
synced 2025-04-29 19:30:36 +02:00
Update Gnulib to v0.0-7695-g26c0590.
* gnulib-local/m4/canonicalize.m4.diff: Remove. * Makefile.am (EXTRA_DIST): Adjust accordingly.
This commit is contained in:
parent
44cd55752a
commit
7ae4e75af5
34 changed files with 1116 additions and 796 deletions
|
@ -2,10 +2,10 @@
|
|||
# gendocs.sh -- generate a GNU manual in many formats. This script is
|
||||
# mentioned in maintain.texi. See the help message below for usage details.
|
||||
|
||||
scriptversion=2011-04-08.14
|
||||
scriptversion=2012-10-27.11
|
||||
|
||||
# Copyright 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010 Free Software
|
||||
# Foundation, Inc.
|
||||
# Copyright 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011, 2012
|
||||
# Free Software Foundation, Inc.
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
|
@ -30,6 +30,12 @@ scriptversion=2011-04-08.14
|
|||
#
|
||||
# An up-to-date copy is also maintained in Gnulib (gnu.org/software/gnulib).
|
||||
|
||||
# TODO:
|
||||
# - image importation was only implemented for HTML generated by
|
||||
# makeinfo. But it should be simple enough to adjust.
|
||||
# - images are not imported in the source tarball. All the needed
|
||||
# formats (PDF, PNG, etc.) should be included.
|
||||
|
||||
prog=`basename "$0"`
|
||||
srcdir=`pwd`
|
||||
|
||||
|
@ -39,35 +45,37 @@ templateurl="http://savannah.gnu.org/cgi-bin/viewcvs/~checkout~/texinfo/texinfo/
|
|||
: ${SETLANG="env LANG= LC_MESSAGES= LC_ALL= LANGUAGE="}
|
||||
: ${MAKEINFO="makeinfo"}
|
||||
: ${TEXI2DVI="texi2dvi -t @finalout"}
|
||||
: ${DVIPS="dvips"}
|
||||
: ${DOCBOOK2HTML="docbook2html"}
|
||||
: ${DOCBOOK2PDF="docbook2pdf"}
|
||||
: ${DOCBOOK2PS="docbook2ps"}
|
||||
: ${DOCBOOK2TXT="docbook2txt"}
|
||||
: ${GENDOCS_TEMPLATE_DIR="."}
|
||||
: ${PERL='perl'}
|
||||
: ${TEXI2HTML="texi2html"}
|
||||
unset CDPATH
|
||||
unset use_texi2html
|
||||
|
||||
version="gendocs.sh $scriptversion
|
||||
|
||||
Copyright 2010 Free Software Foundation, Inc.
|
||||
Copyright 2012 Free Software Foundation, Inc.
|
||||
There is NO warranty. You may redistribute this software
|
||||
under the terms of the GNU General Public License.
|
||||
For more information about these matters, see the files named COPYING."
|
||||
|
||||
usage="Usage: $prog [OPTION]... PACKAGE MANUAL-TITLE
|
||||
|
||||
Generate various output formats from PACKAGE.texinfo (or .texi or .txi) source.
|
||||
See the GNU Maintainers document for a more extensive discussion:
|
||||
Generate output in various formats from PACKAGE.texinfo (or .texi or
|
||||
.txi) source. See the GNU Maintainers document for a more extensive
|
||||
discussion:
|
||||
http://www.gnu.org/prep/maintain_toc.html
|
||||
|
||||
Options:
|
||||
-s SRCFILE read Texinfo from SRCFILE, instead of PACKAGE.{texinfo|texi|txi}
|
||||
-o OUTDIR write files into OUTDIR, instead of manual/.
|
||||
-I DIR append DIR to the Texinfo search path.
|
||||
--email ADR use ADR as contact in generated web pages.
|
||||
--docbook convert to DocBook too (xml, txt, html, pdf and ps).
|
||||
--docbook convert through DocBook too (xml, txt, html, pdf).
|
||||
--html ARG pass indicated ARG to makeinfo or texi2html for HTML targets.
|
||||
--info ARG pass indicated ARG to makeinfo for Info, instead of --no-split.
|
||||
--texi2html use texi2html to generate HTML targets.
|
||||
--help display this help and exit successfully.
|
||||
--version display version information and exit successfully.
|
||||
|
@ -80,11 +88,11 @@ Typical sequence:
|
|||
wget \"$templateurl\"
|
||||
$prog --email BUGLIST MANUAL \"GNU MANUAL - One-line description\"
|
||||
|
||||
Output will be in a new subdirectory \"manual\" (by default, use -o OUTDIR
|
||||
to override). Move all the new files into your web CVS tree, as
|
||||
explained in the Web Pages node of maintain.texi.
|
||||
Output will be in a new subdirectory \"manual\" (by default;
|
||||
use -o OUTDIR to override). Move all the new files into your web CVS
|
||||
tree, as explained in the Web Pages node of maintain.texi.
|
||||
|
||||
Please use the --email ADDRESS option to specify your bug-reporting
|
||||
Please do use the --email ADDRESS option to specify your bug-reporting
|
||||
address in the generated HTML pages.
|
||||
|
||||
MANUAL-TITLE is included as part of the HTML <title> of the overall
|
||||
|
@ -102,11 +110,14 @@ If a manual's Texinfo sources are spread across several directories,
|
|||
first copy or symlink all Texinfo sources into a single directory.
|
||||
(Part of the script's work is to make a tar.gz of the sources.)
|
||||
|
||||
You can set the environment variables MAKEINFO, TEXI2DVI, TEXI2HTML, and
|
||||
DVIPS to control the programs that get executed, and
|
||||
As implied above, by default monolithic Info files are generated.
|
||||
If you want split Info, or other Info options, use --info to override.
|
||||
|
||||
You can set the environment variables MAKEINFO, TEXI2DVI, TEXI2HTML,
|
||||
and PERL to control the programs that get executed, and
|
||||
GENDOCS_TEMPLATE_DIR to control where the gendocs_template file is
|
||||
looked for. With --docbook, the environment variables DOCBOOK2HTML,
|
||||
DOCBOOK2PDF, DOCBOOK2PS, and DOCBOOK2TXT are also respected.
|
||||
DOCBOOK2PDF, and DOCBOOK2TXT are also respected.
|
||||
|
||||
By default, makeinfo and texi2dvi are run in the default (English)
|
||||
locale, since that's the language of most Texinfo manuals. If you
|
||||
|
@ -116,16 +127,13 @@ SETLANG setting in the source.
|
|||
Email bug reports or enhancement requests to bug-texinfo@gnu.org.
|
||||
"
|
||||
|
||||
calcsize()
|
||||
{
|
||||
size=`ls -ksl $1 | awk '{print $1}'`
|
||||
echo $size
|
||||
}
|
||||
|
||||
MANUAL_TITLE=
|
||||
PACKAGE=
|
||||
EMAIL=webmasters@gnu.org # please override with --email
|
||||
commonarg= # Options passed to all the tools (-I dir).
|
||||
dirs= # -I's directories.
|
||||
htmlarg=
|
||||
infoarg=--no-split
|
||||
outdir=manual
|
||||
srcfile=
|
||||
|
||||
|
@ -136,8 +144,10 @@ while test $# -gt 0; do
|
|||
--version) echo "$version"; exit 0;;
|
||||
-s) shift; srcfile=$1;;
|
||||
-o) shift; outdir=$1;;
|
||||
-I) shift; commonarg="$commonarg -I '$1'"; dirs="$dirs $1";;
|
||||
--docbook) docbook=yes;;
|
||||
--html) shift; htmlarg=$1;;
|
||||
--info) shift; infoarg=$1;;
|
||||
--texi2html) use_texi2html=1;;
|
||||
-*)
|
||||
echo "$0: Unknown option \`$1'." >&2
|
||||
|
@ -183,15 +193,64 @@ if test ! -r $GENDOCS_TEMPLATE_DIR/gendocs_template; then
|
|||
exit 1
|
||||
fi
|
||||
|
||||
# Function to return size of $1 in something resembling kilobytes.
|
||||
calcsize()
|
||||
{
|
||||
size=`ls -ksl $1 | awk '{print $1}'`
|
||||
echo $size
|
||||
}
|
||||
|
||||
# copy_images OUTDIR HTML-FILE...
|
||||
# -------------------------------
|
||||
# Copy all the images needed by the HTML-FILEs into OUTDIR. Look
|
||||
# for them in the -I directories.
|
||||
copy_images()
|
||||
{
|
||||
local odir
|
||||
odir=$1
|
||||
shift
|
||||
$PERL -n -e "
|
||||
BEGIN {
|
||||
\$me = '$prog';
|
||||
\$odir = '$odir';
|
||||
@dirs = qw($dirs);
|
||||
}
|
||||
" -e '
|
||||
/<img src="(.*?)"/g && ++$need{$1};
|
||||
|
||||
END {
|
||||
#print "$me: @{[keys %need]}\n"; # for debugging, show images found.
|
||||
FILE: for my $f (keys %need) {
|
||||
for my $d (@dirs) {
|
||||
if (-f "$d/$f") {
|
||||
use File::Basename;
|
||||
my $dest = dirname ("$odir/$f");
|
||||
#
|
||||
use File::Path;
|
||||
-d $dest || mkpath ($dest)
|
||||
|| die "$me: cannot mkdir $dest: $!\n";
|
||||
#
|
||||
use File::Copy;
|
||||
copy ("$d/$f", $dest)
|
||||
|| die "$me: cannot copy $d/$f to $dest: $!\n";
|
||||
next FILE;
|
||||
}
|
||||
}
|
||||
die "$me: $ARGV: cannot find image $f\n";
|
||||
}
|
||||
}
|
||||
' -- "$@" || exit 1
|
||||
}
|
||||
|
||||
case $outdir in
|
||||
/*) abs_outdir=$outdir;;
|
||||
*) abs_outdir=$srcdir/$outdir;;
|
||||
esac
|
||||
|
||||
echo Generating output formats for $srcfile
|
||||
echo "Generating output formats for $srcfile"
|
||||
|
||||
cmd="$SETLANG $MAKEINFO -o $PACKAGE.info \"$srcfile\""
|
||||
echo "Generating info files... ($cmd)"
|
||||
cmd="$SETLANG $MAKEINFO -o $PACKAGE.info $commonarg $infoarg \"$srcfile\""
|
||||
echo "Generating info file(s)... ($cmd)"
|
||||
eval "$cmd"
|
||||
mkdir -p "$outdir/"
|
||||
tar czf "$outdir/$PACKAGE.info.tar.gz" $PACKAGE.info*
|
||||
|
@ -199,29 +258,23 @@ info_tgz_size=`calcsize "$outdir/$PACKAGE.info.tar.gz"`
|
|||
# do not mv the info files, there's no point in having them available
|
||||
# separately on the web.
|
||||
|
||||
cmd="$SETLANG ${TEXI2DVI} \"$srcfile\""
|
||||
cmd="$SETLANG $TEXI2DVI $commonarg \"$srcfile\""
|
||||
echo "Generating dvi ... ($cmd)"
|
||||
eval "$cmd"
|
||||
|
||||
# now, before we compress dvi:
|
||||
echo Generating postscript...
|
||||
${DVIPS} $PACKAGE -o
|
||||
gzip -f -9 $PACKAGE.ps
|
||||
ps_gz_size=`calcsize $PACKAGE.ps.gz`
|
||||
mv $PACKAGE.ps.gz "$outdir/"
|
||||
|
||||
# compress/finish dvi:
|
||||
gzip -f -9 $PACKAGE.dvi
|
||||
dvi_gz_size=`calcsize $PACKAGE.dvi.gz`
|
||||
mv $PACKAGE.dvi.gz "$outdir/"
|
||||
|
||||
cmd="$SETLANG ${TEXI2DVI} --pdf \"$srcfile\""
|
||||
cmd="$SETLANG $TEXI2DVI --pdf $commonarg \"$srcfile\""
|
||||
echo "Generating pdf ... ($cmd)"
|
||||
eval "$cmd"
|
||||
pdf_size=`calcsize $PACKAGE.pdf`
|
||||
mv $PACKAGE.pdf "$outdir/"
|
||||
|
||||
cmd="$SETLANG $MAKEINFO -o $PACKAGE.txt --no-split --no-headers \"$srcfile\""
|
||||
opt="-o $PACKAGE.txt --no-split --no-headers $commonarg"
|
||||
cmd="$SETLANG $MAKEINFO $opt \"$srcfile\""
|
||||
echo "Generating ASCII... ($cmd)"
|
||||
eval "$cmd"
|
||||
ascii_size=`calcsize $PACKAGE.txt`
|
||||
|
@ -231,7 +284,7 @@ mv $PACKAGE.txt "$outdir/"
|
|||
|
||||
html_split()
|
||||
{
|
||||
opt="--split=$1 $htmlarg --node-files"
|
||||
opt="--split=$1 $commonarg $htmlarg --node-files"
|
||||
cmd="$SETLANG $TEXI2HTML --output $PACKAGE.html $opt \"$srcfile\""
|
||||
echo "Generating html by $1... ($cmd)"
|
||||
eval "$cmd"
|
||||
|
@ -249,7 +302,7 @@ html_split()
|
|||
}
|
||||
|
||||
if test -z "$use_texi2html"; then
|
||||
opt="--no-split --html -o $PACKAGE.html $htmlarg"
|
||||
opt="--no-split --html -o $PACKAGE.html $commonarg $htmlarg"
|
||||
cmd="$SETLANG $MAKEINFO $opt \"$srcfile\""
|
||||
echo "Generating monolithic html... ($cmd)"
|
||||
rm -rf $PACKAGE.html # in case a directory is left over
|
||||
|
@ -257,23 +310,25 @@ if test -z "$use_texi2html"; then
|
|||
html_mono_size=`calcsize $PACKAGE.html`
|
||||
gzip -f -9 -c $PACKAGE.html >"$outdir/$PACKAGE.html.gz"
|
||||
html_mono_gz_size=`calcsize "$outdir/$PACKAGE.html.gz"`
|
||||
copy_images "$outdir/" $PACKAGE.html
|
||||
mv $PACKAGE.html "$outdir/"
|
||||
|
||||
cmd="$SETLANG $MAKEINFO --html -o $PACKAGE.html $htmlarg \"$srcfile\""
|
||||
opt="--html -o $PACKAGE.html $commonarg $htmlarg"
|
||||
cmd="$SETLANG $MAKEINFO $opt \"$srcfile\""
|
||||
echo "Generating html by node... ($cmd)"
|
||||
eval "$cmd"
|
||||
split_html_dir=$PACKAGE.html
|
||||
copy_images $split_html_dir/ $split_html_dir/*.html
|
||||
(
|
||||
cd ${split_html_dir} || exit 1
|
||||
tar -czf "$abs_outdir/${PACKAGE}.html_node.tar.gz" -- *.html
|
||||
cd $split_html_dir || exit 1
|
||||
tar -czf "$abs_outdir/$PACKAGE.html_node.tar.gz" -- *
|
||||
)
|
||||
html_node_tgz_size=`calcsize "$outdir/${PACKAGE}.html_node.tar.gz"`
|
||||
rm -f "$outdir"/html_node/*.html
|
||||
mkdir -p "$outdir/html_node/"
|
||||
mv ${split_html_dir}/*.html "$outdir/html_node/"
|
||||
rmdir ${split_html_dir}
|
||||
html_node_tgz_size=`calcsize "$outdir/$PACKAGE.html_node.tar.gz"`
|
||||
rm -rf "$outdir/html_node/"
|
||||
mv $split_html_dir "$outdir/html_node/"
|
||||
else
|
||||
cmd="$SETLANG $TEXI2HTML --output $PACKAGE.html $htmlarg \"$srcfile\""
|
||||
opt="--output $PACKAGE.html $commonarg $htmlarg"
|
||||
cmd="$SETLANG $TEXI2HTML $opt \"$srcfile\""
|
||||
echo "Generating monolithic html... ($cmd)"
|
||||
rm -rf $PACKAGE.html # in case a directory is left over
|
||||
eval "$cmd"
|
||||
|
@ -297,7 +352,8 @@ d=`dirname $srcfile`
|
|||
texi_tgz_size=`calcsize "$outdir/$PACKAGE.texi.tar.gz"`
|
||||
|
||||
if test -n "$docbook"; then
|
||||
cmd="$SETLANG $MAKEINFO -o - --docbook \"$srcfile\" > ${srcdir}/$PACKAGE-db.xml"
|
||||
opt="-o - --docbook $commonarg"
|
||||
cmd="$SETLANG $MAKEINFO $opt \"$srcfile\" >${srcdir}/$PACKAGE-db.xml"
|
||||
echo "Generating docbook XML... ($cmd)"
|
||||
eval "$cmd"
|
||||
docbook_xml_size=`calcsize $PACKAGE-db.xml`
|
||||
|
@ -306,7 +362,8 @@ if test -n "$docbook"; then
|
|||
mv $PACKAGE-db.xml "$outdir/"
|
||||
|
||||
split_html_db_dir=html_node_db
|
||||
cmd="${DOCBOOK2HTML} -o $split_html_db_dir \"${outdir}/$PACKAGE-db.xml\""
|
||||
opt="$commonarg -o $split_html_db_dir"
|
||||
cmd="$DOCBOOK2HTML $opt \"${outdir}/$PACKAGE-db.xml\""
|
||||
echo "Generating docbook HTML... ($cmd)"
|
||||
eval "$cmd"
|
||||
(
|
||||
|
@ -319,20 +376,13 @@ if test -n "$docbook"; then
|
|||
mv ${split_html_db_dir}/*.html "$outdir/html_node_db/"
|
||||
rmdir ${split_html_db_dir}
|
||||
|
||||
cmd="${DOCBOOK2TXT} \"${outdir}/$PACKAGE-db.xml\""
|
||||
cmd="$DOCBOOK2TXT \"${outdir}/$PACKAGE-db.xml\""
|
||||
echo "Generating docbook ASCII... ($cmd)"
|
||||
eval "$cmd"
|
||||
docbook_ascii_size=`calcsize $PACKAGE-db.txt`
|
||||
mv $PACKAGE-db.txt "$outdir/"
|
||||
|
||||
cmd="${DOCBOOK2PS} \"${outdir}/$PACKAGE-db.xml\""
|
||||
echo "Generating docbook PS... ($cmd)"
|
||||
eval "$cmd"
|
||||
gzip -f -9 -c $PACKAGE-db.ps >"$outdir/$PACKAGE-db.ps.gz"
|
||||
docbook_ps_gz_size=`calcsize "$outdir/$PACKAGE-db.ps.gz"`
|
||||
mv $PACKAGE-db.ps "$outdir/"
|
||||
|
||||
cmd="${DOCBOOK2PDF} \"${outdir}/$PACKAGE-db.xml\""
|
||||
cmd="$DOCBOOK2PDF \"${outdir}/$PACKAGE-db.xml\""
|
||||
echo "Generating docbook PDF... ($cmd)"
|
||||
eval "$cmd"
|
||||
docbook_pdf_size=`calcsize $PACKAGE-db.pdf`
|
||||
|
@ -346,6 +396,7 @@ if test -z "$use_texi2html"; then
|
|||
else
|
||||
CONDS="/%%ENDIF.*%%/d;/%%IF *HTML_SECTION%%/d;/%%IF *HTML_CHAPTER%%/d"
|
||||
fi
|
||||
|
||||
curdate=`$SETLANG date '+%B %d, %Y'`
|
||||
sed \
|
||||
-e "s!%%TITLE%%!$MANUAL_TITLE!g" \
|
||||
|
@ -360,13 +411,11 @@ sed \
|
|||
-e "s!%%INFO_TGZ_SIZE%%!$info_tgz_size!g" \
|
||||
-e "s!%%DVI_GZ_SIZE%%!$dvi_gz_size!g" \
|
||||
-e "s!%%PDF_SIZE%%!$pdf_size!g" \
|
||||
-e "s!%%PS_GZ_SIZE%%!$ps_gz_size!g" \
|
||||
-e "s!%%ASCII_SIZE%%!$ascii_size!g" \
|
||||
-e "s!%%ASCII_GZ_SIZE%%!$ascii_gz_size!g" \
|
||||
-e "s!%%TEXI_TGZ_SIZE%%!$texi_tgz_size!g" \
|
||||
-e "s!%%DOCBOOK_HTML_NODE_TGZ_SIZE%%!$html_node_db_tgz_size!g" \
|
||||
-e "s!%%DOCBOOK_ASCII_SIZE%%!$docbook_ascii_size!g" \
|
||||
-e "s!%%DOCBOOK_PS_GZ_SIZE%%!$docbook_ps_gz_size!g" \
|
||||
-e "s!%%DOCBOOK_PDF_SIZE%%!$docbook_pdf_size!g" \
|
||||
-e "s!%%DOCBOOK_XML_SIZE%%!$docbook_xml_size!g" \
|
||||
-e "s!%%DOCBOOK_XML_GZ_SIZE%%!$docbook_xml_gz_size!g" \
|
||||
|
|
|
@ -3,7 +3,7 @@ eval '(exit $?0)' && eval 'exec perl -wS "$0" ${1+"$@"}'
|
|||
if 0;
|
||||
# Convert git log output to ChangeLog format.
|
||||
|
||||
my $VERSION = '2012-05-22 09:40'; # UTC
|
||||
my $VERSION = '2012-07-29 06:11'; # UTC
|
||||
# The definition above must lie within the first 8 lines in order
|
||||
# for the Emacs time-stamp write hook (at end) to update it.
|
||||
# If you change this file with Emacs, please let the write hook
|
||||
|
@ -68,6 +68,8 @@ OPTIONS:
|
|||
header; the default is to cluster adjacent commit messages
|
||||
if their headers are the same and neither commit message
|
||||
contains multiple paragraphs.
|
||||
--srcdir=DIR the root of the source tree, from which the .git/
|
||||
directory can be derived.
|
||||
--since=DATE convert only the logs since DATE;
|
||||
the default is to convert all log entries.
|
||||
--format=FMT set format string for commit subject and body;
|
||||
|
@ -192,6 +194,30 @@ sub parse_amend_file($)
|
|||
return $h;
|
||||
}
|
||||
|
||||
# git_dir_option $SRCDIR
|
||||
#
|
||||
# From $SRCDIR, the --git-dir option to pass to git (none if $SRCDIR
|
||||
# is undef). Return as a list (0 or 1 element).
|
||||
sub git_dir_option($)
|
||||
{
|
||||
my ($srcdir) = @_;
|
||||
my @res = ();
|
||||
if (defined $srcdir)
|
||||
{
|
||||
my $qdir = shell_quote $srcdir;
|
||||
my $cmd = "cd $qdir && git rev-parse --show-toplevel";
|
||||
my $qcmd = shell_quote $cmd;
|
||||
my $git_dir = qx($cmd);
|
||||
defined $git_dir
|
||||
or die "$ME: cannot run $qcmd: $!\n";
|
||||
$? == 0
|
||||
or die "$ME: $qcmd had unexpected exit code or signal ($?)\n";
|
||||
chomp $git_dir;
|
||||
push @res, "--git-dir=$git_dir/.git";
|
||||
}
|
||||
@res;
|
||||
}
|
||||
|
||||
{
|
||||
my $since_date;
|
||||
my $format_string = '%s%n%b%n';
|
||||
|
@ -200,6 +226,7 @@ sub parse_amend_file($)
|
|||
my $cluster = 1;
|
||||
my $strip_tab = 0;
|
||||
my $strip_cherry_pick = 0;
|
||||
my $srcdir;
|
||||
GetOptions
|
||||
(
|
||||
help => sub { usage 0 },
|
||||
|
@ -211,9 +238,9 @@ sub parse_amend_file($)
|
|||
'cluster!' => \$cluster,
|
||||
'strip-tab' => \$strip_tab,
|
||||
'strip-cherry-pick' => \$strip_cherry_pick,
|
||||
'srcdir=s' => \$srcdir,
|
||||
) or usage 1;
|
||||
|
||||
|
||||
defined $since_date
|
||||
and unshift @ARGV, "--since=$since_date";
|
||||
|
||||
|
@ -221,7 +248,9 @@ sub parse_amend_file($)
|
|||
# that makes a correction in the log or attribution of that commit.
|
||||
my $amend_code = defined $amend_file ? parse_amend_file $amend_file : {};
|
||||
|
||||
my @cmd = (qw (git log --log-size),
|
||||
my @cmd = ('git',
|
||||
git_dir_option $srcdir,
|
||||
qw(log --log-size),
|
||||
'--pretty=format:%H:%ct %an <%ae>%n%n'.$format_string, @ARGV);
|
||||
open PIPE, '-|', @cmd
|
||||
or die ("$ME: failed to run '". quoted_cmd (@cmd) ."': $!\n"
|
||||
|
|
|
@ -24,9 +24,6 @@ VERSION=2009-07-21.16; # UTC
|
|||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
# Requirements: everything required to bootstrap your package,
|
||||
# plus these: git, cvs, cvsu, rsync, mktemp
|
||||
|
||||
ME=$(basename "$0")
|
||||
warn() { printf '%s: %s\n' "$ME" "$*" >&2; }
|
||||
die() { warn "$*"; exit 1; }
|
||||
|
@ -36,10 +33,9 @@ help()
|
|||
cat <<EOF
|
||||
Usage: $ME
|
||||
|
||||
Run this script from top_srcdir (no options or arguments) after each
|
||||
non-alpha release, to update the web documentation at
|
||||
http://www.gnu.org/software/\$pkg/manual/ Run it from your project's
|
||||
the top-level directory.
|
||||
Run this script from top_srcdir (no arguments) after each non-alpha
|
||||
release, to update the web documentation at
|
||||
http://www.gnu.org/software/\$pkg/manual/
|
||||
|
||||
Options:
|
||||
-C, --builddir=DIR location of (configured) Makefile (default: .)
|
||||
|
@ -64,6 +60,51 @@ EOF
|
|||
exit
|
||||
}
|
||||
|
||||
# find_tool ENVVAR NAMES...
|
||||
# -------------------------
|
||||
# Search for a required program. Use the value of ENVVAR, if set,
|
||||
# otherwise find the first of the NAMES that can be run (i.e.,
|
||||
# supports --version). If found, set ENVVAR to the program name,
|
||||
# die otherwise.
|
||||
#
|
||||
# FIXME: code duplication, see also bootstrap.
|
||||
find_tool ()
|
||||
{
|
||||
find_tool_envvar=$1
|
||||
shift
|
||||
find_tool_names=$@
|
||||
eval "find_tool_res=\$$find_tool_envvar"
|
||||
if test x"$find_tool_res" = x; then
|
||||
for i
|
||||
do
|
||||
if ($i --version </dev/null) >/dev/null 2>&1; then
|
||||
find_tool_res=$i
|
||||
break
|
||||
fi
|
||||
done
|
||||
else
|
||||
find_tool_error_prefix="\$$find_tool_envvar: "
|
||||
fi
|
||||
test x"$find_tool_res" != x \
|
||||
|| die "one of these is required: $find_tool_names"
|
||||
($find_tool_res --version </dev/null) >/dev/null 2>&1 \
|
||||
|| die "${find_tool_error_prefix}cannot run $find_tool_res --version"
|
||||
eval "$find_tool_envvar=\$find_tool_res"
|
||||
eval "export $find_tool_envvar"
|
||||
}
|
||||
|
||||
## ------ ##
|
||||
## Main. ##
|
||||
## ------ ##
|
||||
|
||||
# Requirements: everything required to bootstrap your package, plus
|
||||
# these.
|
||||
find_tool CVS cvs
|
||||
find_tool CVSU cvsu
|
||||
find_tool GIT git
|
||||
find_tool RSYNC rsync
|
||||
find_tool XARGS gxargs xargs
|
||||
|
||||
builddir=.
|
||||
while test $# != 0
|
||||
do
|
||||
|
@ -86,22 +127,22 @@ do
|
|||
done
|
||||
|
||||
test $# = 0 \
|
||||
|| die "$ME: too many arguments"
|
||||
|| die "too many arguments"
|
||||
|
||||
prev=.prev-version
|
||||
version=$(cat $prev) || die "$ME: no $prev file?"
|
||||
version=$(cat $prev) || die "no $prev file?"
|
||||
pkg=$(sed -n 's/^PACKAGE = \(.*\)/\1/p' $builddir/Makefile) \
|
||||
|| die "$ME: no Makefile?"
|
||||
|| die "no Makefile?"
|
||||
tmp_branch=web-doc-$version-$$
|
||||
current_branch=$(git branch | sed -ne '/^\* /{s///;p;q;}')
|
||||
current_branch=$($GIT branch | sed -ne '/^\* /{s///;p;q;}')
|
||||
|
||||
cleanup()
|
||||
{
|
||||
__st=$?
|
||||
rm -rf "$tmp"
|
||||
git checkout "$current_branch"
|
||||
git submodule update --recursive
|
||||
git branch -d $tmp_branch
|
||||
$GIT checkout "$current_branch"
|
||||
$GIT submodule update --recursive
|
||||
$GIT branch -d $tmp_branch
|
||||
exit $__st
|
||||
}
|
||||
trap cleanup 0
|
||||
|
@ -111,8 +152,8 @@ trap 'exit $?' 1 2 13 15
|
|||
# just-released version number, not some string like 7.6.18-20761.
|
||||
# That version string propagates into all documentation.
|
||||
set -e
|
||||
git checkout -b $tmp_branch v$version
|
||||
git submodule update --recursive
|
||||
$GIT checkout -b $tmp_branch v$version
|
||||
$GIT submodule update --recursive
|
||||
./bootstrap
|
||||
srcdir=$(pwd)
|
||||
cd "$builddir"
|
||||
|
@ -125,16 +166,18 @@ set +e
|
|||
|
||||
tmp=$(mktemp -d web-doc-update.XXXXXX) || exit 1
|
||||
( cd $tmp \
|
||||
&& cvs -d $USER@cvs.sv.gnu.org:/webcvs/$pkg co $pkg )
|
||||
rsync -avP "$builddir"/doc/manual/ $tmp/$pkg/manual
|
||||
&& $CVS -d $USER@cvs.sv.gnu.org:/webcvs/$pkg co $pkg )
|
||||
$RSYNC -avP "$builddir"/doc/manual/ $tmp/$pkg/manual
|
||||
|
||||
(
|
||||
cd $tmp/$pkg/manual
|
||||
|
||||
# Add any new files:
|
||||
cvsu --types='?'|sed s/..// | xargs --no-run-if-empty -- cvs add -ko
|
||||
$CVSU --types='?' \
|
||||
| sed s/..// \
|
||||
| $XARGS --no-run-if-empty -- $CVS add -ko
|
||||
|
||||
cvs ci -m $version
|
||||
$CVS ci -m $version
|
||||
)
|
||||
|
||||
# Local variables:
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue