aboutsummaryrefslogtreecommitdiff
path: root/maintainer-scripts/update_web_docs_svn
blob: ab991bbf1ac6130af98f469b7661717d6a5e0f40 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
#!/bin/sh -x

# Generate HTML documentation from GCC Texinfo docs.
# This version is for GCC 3.1 and later versions.

set -e

# Run this from /tmp.
SVNROOT=${SVNROOT:-"file:///svn/gcc"}
export SVNROOT

PATH=/usr/local/bin:$PATH

WWWBASE=/www/gcc/htdocs
WWWBASE_PREFORMATTED=/www/gcc/htdocs-preformatted
WWWPREPROCESS='/www/gcc/bin/preprocess -r'

# Process options -rrelease and -ddirectory
RELEASE=""
SUBDIR=""

while [ $# -gt 0 ]; do
  case $1 in
    -r*)
      if [ -n "$RELEASE" ]; then
        echo "Multiple releases specified" >&2
	exit 1
      fi
      RELEASE="${1#-r}"
      if [ -z "$RELEASE" ]; then
	shift
	RELEASE="$1"
	if [ -z "$RELEASE" ]; then
	  echo "No release specified with -r" >&2
	  exit 1
	fi
      fi
      ;;
    -d*)
      if [ -n "$SUBDIR" ]; then
        echo "Multiple subdirectories specified" >&2
	exit 1
      fi
      SUBDIR="${1#-d}"
      if [ -z "$SUBDIR" ]; then
	shift
	SUBDIR="$1"
	if [ -z "$SUBDIR" ]; then
	  echo "No subdirectory specified with -d" >&2
	  exit 1
	fi
      fi
      ;;
    *)
      echo "Unknown argument \"$1\"" >&2
      exit 1
      ;;
  esac
  shift
done

if [ -n "$RELEASE" ] && [ -z "$SUBDIR" ]; then
  echo "Release specified without subdirectory" >&2
  exit 1
fi

if [ -z "$SUBDIR" ]; then
  DOCSDIR=$WWWBASE/onlinedocs
else
  DOCSDIR=$WWWBASE/onlinedocs/$SUBDIR
fi

if [ ! -d $DOCSDIR ]; then
  mkdir $DOCSDIR
fi

if [ -z "$RELEASE" ]; then
  RELEASE=trunk
fi

WORKDIR=/tmp/gcc-doc-update.$$

rm -rf $WORKDIR
mkdir $WORKDIR
cd $WORKDIR
if [ "$RELEASE" = "trunk" ]; then
  svn -q export $SVNROOT/$RELEASE gcc
else
  svn -q export $SVNROOT/tags/$RELEASE gcc
fi

# Remove all unwanted files.  This is needed (a) to build the Ada
# generator programs with the installed library, not the new one and
# (b) to avoid packaging all the sources instead of only documentation
# sources.
find gcc -type f \( -name '*.texi' \
  -o -path gcc/gcc/doc/install.texi2html \
  -o -path gcc/gcc/doc/include/texinfo.tex \
  -o -path gcc/gcc/ada/xgnatugn.adb \
  -o -path gcc/gcc/ada/ug_words \
  -o -path gcc/gcc/BASE-VER \
  -o -path gcc/gcc/DEV-PHASE \
  -o -print0 \) | xargs -0 rm -f

# Build a tarball of the sources.
tar cf docs-sources.tar gcc

# The directory to pass to -I; this is the one with texinfo.tex
# and fdl.texi.
includedir=gcc/gcc/doc/include

MANUALS="cpp cppinternals fastjar gcc gccint gcj g77 gfortran gnat_ug_unx gnat_ug_vms gnat_ug_vxw gnat_ug_wnt gnat_ugn_unw gnat-style gnat_rm libiberty porting"

# Generate gnat_ugn_unw

if [ -f gcc/gcc/ada/xgnatugn.adb ]; then
   gnatmake -q gcc/gcc/ada/xgnatugn
   ./xgnatugn unw gcc/gcc/ada/gnat_ugn.texi \
     gcc/gcc/ada/ug_words gnat_ugn_unw.texi
fi

# Generate gcc-vers.texi.
(
   echo "@set version-GCC $(cat gcc/gcc/BASE-VER)"
   if [ "$(cat gcc/gcc/DEV-PHASE)" = "experimental" ]; then
      echo "@set DEVELOPMENT"
   else
      echo "@clear DEVELOPMENT"
   fi
   echo "@set srcdir $WORKDIR/gcc/gcc"
) > $includedir/gcc-vers.texi

# Now convert the relevant files from texi to HTML, PDF and PostScript.
for file in $MANUALS; do
  filename=`find . -name ${file}.texi`
  if [ "${filename}" ]; then
    makeinfo --html -I ${includedir} -I `dirname ${filename}` ${filename}
    tar cf ${file}-html.tar ${file}/*.html
    texi2dvi -I ${includedir} ${filename} </dev/null && dvips -o ${file}.ps ${file}.dvi
    texi2pdf -I ${includedir} ${filename} </dev/null
    mkdir -p $DOCSDIR/$file
  fi
done

# Then build a gzipped copy of each of the resulting .html, .ps and .tar files
for file in */*.html *.ps *.pdf *.tar; do
  cat $file | gzip --best > $file.gz
done

# On the 15th of the month, wipe all the old files from the
# web server.
today=`date +%d`
if test $today = 15; then
  find $DOCSDIR -type f -maxdepth 1 -print | grep -v index.html | xargs rm
  for m in $MANUALS; do
    rm -f $DOCSDIR/$m/*.html $DOCSDIR/$m/*.html.gz
  done
fi

# And copy the resulting files to the web server
for file in */*.html *.ps *.pdf *.tar; do
  if [ -f $DOCSDIR/$file ]; then
    cat $DOCSDIR/$file | 
      sed -e '/^<meta name=generator/d' \
          -e '/^%DVIPSSource:/d' > file1
  fi
  cat $file |
    sed -e '/^<meta name=generator/d' \
        -e '/^%DVIPSSource:/d' > file2
  if cmp -s file1 file2; then
    :
  else
    cp $file $DOCSDIR/$file
    cp $file.gz $DOCSDIR/$file.gz
  fi
done

cd $DOCSDIR

# Finally, generate the installation documentation
if [ "$RELEASE" = "trunk" ]; then
  SOURCEDIR=$WORKDIR/gcc/gcc/doc
  DESTDIR=$WWWBASE_PREFORMATTED/install
  export SOURCEDIR
  export DESTDIR
  $WORKDIR/gcc/gcc/doc/install.texi2html

  # Preprocess the entire web site, not just the install docs!
  echo "Invoking $WWWPREPROCESS"
  $WWWPREPROCESS |grep -v '^  Warning: Keeping'
fi

# Clean up behind us.

rm -rf $WORKDIR