cvs commit: www/scripts styleswitcher.js update-website.sh

jeroen at linuxfromscratch.org jeroen at linuxfromscratch.org
Sat Nov 22 05:34:29 PST 2003


jeroen      03/11/22 06:34:29

  Added:       scripts  styleswitcher.js update-website.sh
  Log:
  Add update-website script and styleswitcher.js
  
  Revision  Changes    Path
  1.1                  www/scripts/styleswitcher.js
  
  Index: styleswitcher.js
  ===================================================================
  function setActiveStyleSheet(title) {
    var i, a, main;
    for(i=0; (a = document.getElementsByTagName("link")[i]); i++) {
      if(a.getAttribute("rel").indexOf("style") != -1 && a.getAttribute("title")) {
        a.disabled = true;
        if(a.getAttribute("title") == title) a.disabled = false;
      }
    }
  }
  
  function getActiveStyleSheet() {
    var i, a;
    for(i=0; (a = document.getElementsByTagName("link")[i]); i++) {
      if(a.getAttribute("rel").indexOf("style") != -1 && a.getAttribute("title") && !a.disabled) return a.getAttribute("title");
    }
    return null;
  }
  
  function getPreferredStyleSheet() {
    var i, a;
    for(i=0; (a = document.getElementsByTagName("link")[i]); i++) {
      if(a.getAttribute("rel").indexOf("style") != -1
         && a.getAttribute("rel").indexOf("alt") == -1
         && a.getAttribute("title")
         ) return a.getAttribute("title");
    }
    return null;
  }
  
  function createCookie(name,value,days) {
    if (days) {
      var date = new Date();
      date.setTime(date.getTime()+(days*24*60*60*1000));
      var expires = "; expires="+date.toGMTString();
    }
    else expires = "";
    document.cookie = name+"="+value+expires+"; path=/";
  }
  
  function readCookie(name) {
    var nameEQ = name + "=";
    var ca = document.cookie.split(';');
    for(var i=0;i < ca.length;i++) {
      var c = ca[i];
      while (c.charAt(0)==' ') c = c.substring(1,c.length);
      if (c.indexOf(nameEQ) == 0) return c.substring(nameEQ.length,c.length);
    }
    return null;
  }
  
  window.onload = function(e) {
    var cookie = readCookie("style");
    var title = cookie ? cookie : getPreferredStyleSheet();
    setActiveStyleSheet(title);
  }
  
  window.onunload = function(e) {
    var title = getActiveStyleSheet();
    createCookie("style", title, 365);
  }
  
  var cookie = readCookie("style");
  var title = cookie ? cookie : getPreferredStyleSheet();
  setActiveStyleSheet(title);
  
  
  1.1                  www/scripts/update-website.sh
  
  Index: update-website.sh
  ===================================================================
  #!/bin/bash
  # 
  set -x
  
  ##########################################################
  # Ensure there's only 1 instance of this script running  #
  
  LOCKFILE="/tmp/update-website-in-progress"
  CVSCLEANLOCK="/tmp/cvs_being_cleaned.lock"
  
  while [ -f $LOCKFILE ]; do
          echo "Lock file present, sleeping for 5 secs and trying again"
          sleep 5
  done
  
  touch $LOCKFILE
  
  while [ -f $CVSCLEANLOCK ]; do
          echo "CVS being cleaned, sleeping for 5 secs and trying again"
          sleep 5
  done
  
  ##########################################
  # Set variables, create working dirs     #
  
  export CVSROOT="/home/cvsroot"
  
  # Temporary dir for assembling the site
  export TMPDIR=`mktemp -d` && cd $TMPDIR
  cvs -Q export -D now www
  mv www NEWSITE && export NEWSITE="$TMPDIR/NEWSITE"
  
  export TMPCVS="$TMPDIR/TMPCVS"
  mkdir $TMPCVS
  cd $TMPCVS
  
  cvs -Q co LFS/BOOK && export LFS="$TMPCVS/LFS/BOOK"
  cvs -Q co BLFS/BOOK && export BLFS="$TMPCVS/BLFS/BOOK"
  cvs -Q co ALFS && export ALFS="$TMPCVS/ALFS"
  cvs -Q co hints && export HINTS="$TMPCVS/hints"
  cvs -Q co patches && export PATCHES="$TMPCVS/patches"
  cvs -Q co www && export WWW="$TMPCVS/www"
  
  export SCRIPTS="$NEWSITE/scripts"
  export CL2HTML="$SCRIPTS/cl2html.pl"
  export MANAGE_NEWS="$SCRIPTS/manage_news.pl"
  export TEMPLATES="$NEWSITE/templates"
  export YEAR=`date +%Y`
  
  ##############################################
  # Generate all changelogs for the news pages #
  
  for i in "$LFS/" "$BLFS/" "$ALFS" "$HINTS/" "$PATCHES/" "$WWW"
  	do cd $i
  	$SCRIPTS/cvs2cl.pl -P -U $WWW/usermap --xml -l "-d'1 month ago<today'"
  done
  
  cd $NEWSITE
  
  # Generate all archives
  for i in . lfs alfs blfs hints; do
      # "LC_ALL=C" garantees a sane localization setting
      LC_ALL=C $MANAGE_NEWS -a news/ -t templates/$i/archive-top.html -b templates/$i/archive-bottom.html -i $i/news-2003.txt
  done
  
  #############################################
  # Now assemble all newspages                #
  
  # Website.html
  cp $TEMPLATES/website-top.html website.html
  if [ -f $WWW/ChangeLog ]; then
      $CL2HTML --infile $WWW/ChangeLog >> website.html
  else
      echo "<p>No changes were made recently.</p>" >> website.html
  fi
  cat $TEMPLATES/website-bottom.html >> website.html 
  
  # Patches/news.html
  cd $NEWSITE/patches
  cp $TEMPLATES/patches/news-top.html news.html
  if [ -f $PATCHES/ChangeLog ]; then
      $CL2HTML --infile $PATCHES/ChangeLog >> news.html
  else
      echo "<p>No changes were made recently.</p>" >> news.html
  fi
  cat $TEMPLATES/patches/news-bottom.html >> news.html
  
  # LFS/news.html
  cd $NEWSITE/lfs
  cp $TEMPLATES/lfs/news-top.html news.html
  $MANAGE_NEWS -i news-$YEAR.txt >> news.html
  $MANAGE_NEWS -i ../news-$YEAR.txt >> news.html
  if [ -f $LFS/ChangeLog ]; then
      $CL2HTML --infile $LFS/ChangeLog >> news.html
  else
      echo "<p>No changes were made recently.</p>" >> news.html
  fi
  cat $TEMPLATES/lfs/news-bottom.html >> news.html 
  
  # BLFS/news.html
  cd $NEWSITE/blfs
  cp $TEMPLATES/blfs/news-top.html news.html
  $MANAGE_NEWS -i news-$YEAR.txt >> news.html
  $MANAGE_NEWS -i ../news-$YEAR.txt >> news.html
  if [ -f $BLFS/ChangeLog ]; then
      $CL2HTML --infile $BLFS/ChangeLog >> news.html
  else
      echo "<p>No changes were made recently.</p>" >> news.html
  fi
  cat $TEMPLATES/blfs/news-bottom.html >> news.html 
  
  # Hints/news.html
  cd $NEWSITE/hints
  cp $TEMPLATES/hints/news-top.html news.html
  if [ -f $HINTS/ChangeLog ]; then
      $CL2HTML --with-filename --infile $HINTS/ChangeLog >> news.html
  else
      echo "<p>No changes were made recently.</p>" >> news.html
  fi
  cat $TEMPLATES/hints/news-bottom.html >> news.html
  
  #ALFS 
  cd $NEWSITE/alfs
  cp $TEMPLATES/alfs/news-top.html news.html
  $MANAGE_NEWS -i news-$YEAR.txt >> news.html
  $MANAGE_NEWS -i ../news-$YEAR.txt >> news.html
  if [ -f $ALFS/ChangeLog ]; then
      $CL2HTML --infile $ALFS/ChangeLog >> news.html
  else
      echo "<p>No changes were made recently.</p>" >> news.html
  fi
  cat $TEMPLATES/alfs/news-bottom.html >> news.html
  
  ##########################################
  # Generate all RSS feeds                 #
  
  cd $NEWSITE/lfs
  $SCRIPTS/lfs2rss.pl -n news.html -r feed.rss
  cd $NEWSITE/blfs
  $SCRIPTS/lfs2rss.pl -n news.html -r feed.rss
  
  ##########################################
  # Create mirrors list for each section   #
  
  cd $NEWSITE
  cat $TEMPLATES/index-top.html > index.html
  cat mirrorlist.html >> index.html
  cat $TEMPLATES/index-bottom.html >> index.html
  for i in alfs blfs hints lfs patches; do
      cat $TEMPLATES/$i/index-top.html > $i/index.html
      sed "s@/lfs/news.html@/$i/news.html at g" mirrorlist.html >> $i/index.html
      cat $TEMPLATES/$i/index-bottom.html >> $i/index.html
  done
  rm mirrorlist.html
  
  # Copy these to the httpd error dirs
  cp $NEWSITE/{403,404}.html /home/httpd/error/
  
  #######################################################
  # Add all other content 
  
  export TARGETDIR="/home/httpd/www.linuxfromscratch.org"
  [ ! -d $TARGETDIR ] && mkdir -p $TARGETDIR
  cd $NEWSITE
  cp -a $TARGETDIR/lfs/{view,downloads} lfs/
  cp -a $TARGETDIR/blfs/{view,artwork,edguide,downloads} blfs/
  cp -a $TARGETDIR/alfs/{downloads,view} alfs/
  cp -a $TARGETDIR/patches/{lfs,blfs} patches/
  
  # Fetch latest versions from patches 
  cd $NEWSITE/patches/ &&
  cvs -Q export -D now patches && mv patches downloads
  
  # Fetch current hints and create the hints tarball
  [ ! -d $NEWSITE/hints/downloads ] && mkdir -p $NEWSITE/hints/downloads/
  cd $NEWSITE/hints/downloads/ && cvs -Q export -D now hints 
  tar cjf hints.tar.bz2 hints/
  mv hints/ATTACHMENTS attachments
  mv hints/ files/
  # Generate list.html
  cd $NEWSITE/hints
  $SCRIPTS/genlist.sh
  cd $NEWSITE/hints/downloads/files &&
  for i in $(ls PREVIOUS_FORMAT/*.txt); do ln -sf $i; done
  
  # Remove all templates
  rm -rf $TEMPLATES
  
  # Validate all XHTML files 
  
  for file in $(find $NEWSITE/ -name \*.html -print); do
  	xmllint --valid --noout $file
  	if [ $? != 0 ]; then
  		# Something is wrong, going to clean up our files in /tmp
  		# so you can re-run the script after you fix the errors
  		rm -r $TMPDIR $LOCKFILE
  		exit 1
  	fi
  done
  
  #################################################
  # Everything is in place now, so replace TARGETDIR with NEWSITE
  
  cd /
  rm -rf /var/tmp/website-backup
  mv $TARGETDIR /var/tmp/website-backup
  mv $NEWSITE $TARGETDIR
  mkdir -p $TARGETDIR/timestamp
  date +%s > $TARGETDIR/timestamp/timestamp
  rm -rf $TMPDIR
  
  # Add some compatibility symlinks
  cd $TARGETDIR
  ln -s lfs/view
  ln -s images/favicon.ico
  
  # Set proper permissions
  chmod -R g+w $TARGETDIR/
  chgrp -R lfswww $TARGETDIR
  
  # Remove lock file
  rm -f $LOCKFILE
  
  
  



More information about the website mailing list