Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found
Select Git revision
  • master
1 result

Target

Select target project
  • hpcag-admin/blast_latest
  • applied-bioinformatics/blast_latest
2 results
Select Git revision
  • master
1 result
Show changes
Commits on Source (40)
......@@ -6,6 +6,8 @@
# Modules
use strict;
use Getopt::Long;
# Module to shuffle a list
use List::Util qw(shuffle);
# Let unknown options pass through to @ARGV (so they can be forwarded to rsync)
&Getopt::Long::Configure( 'pass_through');
# Use a signalhandler to work the finished childs
......@@ -19,15 +21,18 @@ my $sync_dir = '';
my @errors;
my @seeds;
my @sinks;
my @input_sinks;
my %processes;
my $rsync_settings = '--archive';
my $source_setting = 'master';
# Retrieve data from commandline
GetOptions (
'directory=s' => \$sync_dir,
'settings:s' => \$rsync_settings,
'source:s' => \$source_setting,
);
if ($sync_dir eq ""){ # Als project niet meegegeven.
print "Usage: $0 --directory directory/to/synchronize [$rsync_settings]\n";
print "Usage: $0 [--source=$source_setting] --directory directory/to/synchronize [$rsync_settings]\n";
exit;
}
# Put the rest of the options into rsync_settings
......@@ -35,11 +40,21 @@ GetOptions (
$rsync_settings .= " ".$option;
}
# Fill arrays
# First set the frontend as seed
push (@seeds, `qconf -ss`);
# Then the nodes as sinks
push (@sinks, `qconf -sel`);
if ($source_setting eq 'master'){
# First set the frontend as seed
push (@seeds, `qconf -ss`);
# Then the nodes as sinks
push (@input_sinks, `qconf -sel`);
@sinks=shuffle(@input_sinks);
}else{
# First set the first node as seed
push (@seeds, $source_setting);
# Then the rest of the nodes as sinks
push (@input_sinks, `qconf -sel | grep -v $source_setting`);
@sinks=shuffle(@input_sinks);
}
# Put the size of the sinks array in a variable
$number_of_sinks=scalar(@sinks);
# Clear newlines from names
......
......@@ -2,20 +2,26 @@
# This script fetches blast executables from ncbi
# written by Jan van Haarst, PRI
# needed : wget, grep, tar & gzip
# latest update 8 March 2006
# latest update 1 July 2010
# Sleep for random time, so we have less chance of a race condition
sleep $[ ( $RANDOM % 60 ) + 1 ]s
# variables
data_dir="/home/jvh/data/blast_latest/executables.nobackup"
data_dir=${1-"/mnt/nexenta/reference/blast_latest/executables.nobackup"}
log_dir=$data_dir"/logs"
now=`date '+%Y_%m_%d_%A_%Hh%M'`
logfile=$log_dir"/blast_executable_log_"$now
lockfile=$data_dir"/executable_update_is_running.lock"
reject_list="*.html"
wget_options="--proxy=off --mirror --no-parent --no-directories --level=1 --timestamping --passive-ftp -o "$logfile" --reject "$reject_list
site_location="ftp://ftp.ncbi.nlm.nih.gov/blast/executables/LATEST/"
reject_list="*.html,*ia64*,ncbi.tar.gz"
wget_options="--proxy=off --mirror --no-parent --no-directories --level=1 --timestamping --passive-ftp --retr-symlinks --reject "$reject_list" -a "$logfile
site_locations="ftp://ftp.ncbi.nlm.nih.gov/blast/executables/release/LATEST/ ftp://ftp.ncbi.nlm.nih.gov/blast/executables/blast+/LATEST/ ftp://ftp.ncbi.nlm.nih.gov/blast/executables/rmblast/LATEST/ ftp://ftp.ncbi.nlm.nih.gov/blast/executables/magicblast/LATEST/"
# make the needed directories
mkdir -p $data_dir
......@@ -42,17 +48,20 @@ trap remove_lock SIGINT
# fetch the data
if test -e $lockfile
then
# update is running, so print a message and stop.
echo "Update is already running."
exit
# update is running, so print a message and stop.
echo "Update is already running."
exit
else
# lockfile does NOT exist, so we create the lockfile, run the update and remove the file.
touch $lockfile
# get the data
/home/jvh/bin/wget $wget_options $site_location
# remove the lockfile.
rm $lockfile
# done
exit
# lockfile does NOT exist, so we create the lockfile, run the update and remove the file.
touch $lockfile
# get the data
for site_location in $site_locations
do
wget $wget_options $site_location
done
# remove the lockfile.
rm $lockfile
# done
exit
fi
#!/bin/bash
# This script fetches taxonomy databases from ncbi
# written by Jan van Haarst, PRI
# needed : wget, curl & perl
# Sleep for random time, so we have less chance of a race condition
sleep $[ ( $RANDOM % 60 ) + 1 ]s
# Variables
# Get location of this script, so we can call the last_file_time script
DIRNAME=`dirname $0`
SCRIPT_LOCATION=`perl -e 'END {use Cwd qw(realpath);print realpath($ARGV[0]);}' $DIRNAME`
site_location="ftp://ftp.ncbi.nih.gov/refseq/release/mitochondrion/"
data_dir=${1-"/mnt/nexenta/reference/blast_latest/mito.nobackup"}
extracted_dir=$data_dir"/extracted_latest.nobackup"
# Remove secondary checkfile
rm -f $data_dir"/newest_date.txt"
WGET="wget"
CURL="curl -s"
TAR="tar"
PRIMARY="true"
now=`date '+%Y_%m_%d_%A_%Hh%M'`
log_dir=$data_dir"/logs"
logfile=$log_dir"/mitolog_"$now
lockfile=$data_dir"/update_is_running.lock"
accept_list="*.f*a.gz"
reject_list=""
wget_options="--proxy=off --mirror --no-parent --no-directories --level=1 --timestamping --passive-ftp --accept="$accept_list" -o "$logfile" --reject="$reject_list
tar_options="--use-compress-program=gzip --keep-newer-files --directory=${extracted_dir} -xvf"
# make the needed directories
mkdir -p $data_dir
mkdir -p $log_dir
mkdir -p $extracted_dir
# change to datadir
cd $data_dir
# catch sigint and remove lockfile
remove_lock()
{
if test -e $lockfile
then
# remove the update file.
rm $lockfile
exit
fi
}
trap remove_lock SIGINT SIGQUIT SIGTERM ERR
# fetch the data
if test -e $lockfile
then
# update is running, so print a message and stop.
echo "Update is already running."
exit
else
# lockfile does NOT exist, so we create the lockfile, run the update and remove the file.
touch $lockfile
# get the data
$WGET $wget_options $site_location
# Add return value to log file
echo "Return Value="$? >> $logfile
# Generate a timestamp, so we know what date the latest file was added
perl $SCRIPT_LOCATION"/last_file_time.pl" > $data_dir"/newest_date.txt"
# Unpack the retrieved set
for NAME in *.gz
do
FILE=$(readlink -f $NAME)
SHORTFILE=$(basename $FILE .gz)
cd $extracted_dir
pigz -k -d --to-stdout $FILE > $SHORTFILE
cd - > /dev/null
done
# remove the lockfile.
rm $lockfile
# done
exit
fi
#!/bin/bash
# This script fetches taxonomy databases from ncbi
# written by Jan van Haarst, PRI
# needed : wget, curl & perl
# Sleep for random time, so we have less chance of a race condition
sleep $[ ( $RANDOM % 60 ) + 1 ]s
# Variables
# Get location of this script, so we can call the last_file_time script
DIRNAME=`dirname $0`
SCRIPT_LOCATION=`perl -e 'END {use Cwd qw(realpath);print realpath($ARGV[0]);}' $DIRNAME`
site_location="ftp://ftp.ncbi.nih.gov/refseq/release/plastid/"
data_dir=${1-"/mnt/nexenta/reference/blast_latest/plastid.nobackup"}
extracted_dir=$data_dir"/extracted_latest.nobackup"
# Remove secondary checkfile
rm -f $data_dir"/newest_date.txt"
WGET="wget"
CURL="curl -s"
TAR="tar"
PRIMARY="true"
now=`date '+%Y_%m_%d_%A_%Hh%M'`
log_dir=$data_dir"/logs"
logfile=$log_dir"/plastid.log_"$now
lockfile=$data_dir"/update_is_running.lock"
accept_list="*.gz"
reject_list=""
wget_options="--proxy=off --mirror --no-parent --no-directories --level=1 --timestamping --passive-ftp --accept="$accept_list" -o "$logfile" --reject="$reject_list
tar_options="--use-compress-program=gzip --keep-newer-files --directory=${extracted_dir} -xvf"
# make the needed directories
mkdir -p $data_dir
mkdir -p $log_dir
mkdir -p $extracted_dir
# change to datadir
cd $data_dir
# catch sigint and remove lockfile
remove_lock()
{
if test -e $lockfile
then
# remove the update file.
rm $lockfile
exit
fi
}
trap remove_lock SIGINT SIGQUIT SIGTERM ERR
# fetch the data
if test -e $lockfile
then
# update is running, so print a message and stop.
echo "Update is already running."
exit
else
# lockfile does NOT exist, so we create the lockfile, run the update and remove the file.
touch $lockfile
# get the data
$WGET $wget_options $site_location
# Add return value to log file
echo "Return Value="$? >> $logfile
# Generate a timestamp, so we know what date the latest file was added
perl $SCRIPT_LOCATION"/last_file_time.pl" > $data_dir"/newest_date.txt"
# Unpack the retrieved set
for NAME in *.gz
do
FILE=$(readlink -f $NAME)
SHORTFILE=$(basename $FILE .gz)
cd $extracted_dir
pigz -k -d --to-stdout $FILE > $SHORTFILE
cd - > /dev/null
done
# remove the lockfile.
rm $lockfile
# done
exit
fi
#!/bin/bash
# This script fetches nr & nt blast databases from ncbi
# This script fetches taxonomy databases from ncbi
# written by Jan van Haarst, PRI
# needed : wget, curl & perl
# latest update 28 Februari 2007
# variables
# Get location of this script, so we can call the other script
DIRNAME=`dirname $0`
SCRIPT_LOCATION=`perl -e 'END {use Cwd qw(realpath);print realpath($ARGV[0]);}' $DIRNAME`
# Set site and data locations depending on where we are
if [ $HOSTNAME == "dev1" ] # Primary site
then
site_location="ftp://ftp.ncbi.nih.gov/pub/taxonomy/"
data_dir="/home/jvh/data/blast_latest/taxonomy.nobackup"
extracted_dir=$data_dir"/extracted"
# Remove secondary checkfile
rm $data_dir"/newest_date.txt"
WGET="/home/jvh/bin/wget"
CURL="/usr/bin/curl -s"
TAR="/bin/tar"
PRIMARY="true"
else # Secondary sites
site_location="http://dev1.ab.wurnet.nl/~jvh/databases.nobackup/"
PRIMARY="false"
if [ $HOSTNAME == "kwatta" ]
then
target_dir="/hwraid4/data/blast/databases"
data_dir=$target_dir"/updates"
extracted_dir=$data_dir"/extracted"
WGET="/usr/local/bin/wget"
CURL="/opt/sfw/bin/curl"
TAR="/usr/local/bin/tar"
else
target_dir="/state/partition1/blast/db/"
data_dir=$target_dir"/updates"
extracted_dir=$data_dir"/extracted"
WGET="/usr/bin/wget"
CURL="/usr/bin/curl"
TAR="/bin/tar"
fi
# Sleep for random time, so we have less chance of a race condition
sleep $[ ( $RANDOM % 60 ) + 1 ]s
# Only secondary sites need to check the date
newest_date_local=`cat $data_dir"/newest_date.txt"`
if [ $? != "0" ]
then
echo Problem checking local file !
#exit
fi
# Variables
# Get location of this script, so we can call the last_file_time script
DIRNAME=`dirname $0`
SCRIPT_LOCATION=`perl -e 'END {use Cwd qw(realpath);print realpath($ARGV[0]);}' $DIRNAME`
newest_date_remote=`$CURL -s -f $site_location"newest_date.txt"`
if [ $? != "0" ]
then
# Primary site is updating or offline
echo Primary site is updating or offline
exit
fi
if [ $newest_date_remote == $newest_date_local ]
then
# No update needed
echo No update needed
exit
fi
fi
site_location="ftp://ftp.ncbi.nih.gov/pub/taxonomy/"
data_dir=${1-"/mnt/nexenta/reference/blast_latest/taxonomy.nobackup"}
extracted_dir=$data_dir"/extracted_latest.nobackup"
# Remove secondary checkfile
rm -f $data_dir"/newest_date.txt"
WGET="wget"
CURL="curl -s"
TAR="tar"
PRIMARY="true"
now=`date '+%Y_%m_%d_%A_%Hh%M'`
log_dir=$data_dir"/logs"
......@@ -71,6 +29,7 @@ lockfile=$data_dir"/update_is_running.lock"
accept_list=""
reject_list="*.zip,*.Z,newest_date.txt"
wget_options="--proxy=off --mirror --no-parent --no-directories --level=1 --timestamping --passive-ftp --accept="$accept_list" -o "$logfile" --reject="$reject_list
tar_options="--no-same-owner --use-compress-program=gzip --keep-newer-files --directory=${extracted_dir} -xvf"
# make the needed directories
mkdir -p $data_dir
......@@ -92,67 +51,32 @@ if test -e $lockfile
fi
}
trap remove_lock SIGINT
trap remove_lock SIGINT SIGQUIT SIGTERM ERR
# fetch the data
if test -e $lockfile
then
# update is running, so print a message and stop.
echo "Update is already running."
exit
# update is running, so print a message and stop.
echo "Update is already running."
exit
else
# lockfile does NOT exist, so we create the lockfile, run the update and remove the file.
touch $lockfile
# get the data
$WGET $wget_options $site_location
# Add return value to log file
echo "Return Value="$? >> $logfile
# Generate a timestamp, so we know what date the latest file was added
perl $SCRIPT_LOCATION"/last_file_time.pl" > $data_dir"/newest_date.txt"
# For secondary sites :
# Unpack the retrieved set, and remove the original archive
if [ $PRIMARY == "false" ]
then
for NAME in *.tar.gz
do
$TAR --directory=$extracted_dir -xzf $NAME
if [ $? != "0" ]
then
echo Problem unpacking $NAME
else
rm $NAME
fi
done
if [ $HOSTNAME == "apbioinf100.wurnet.nl" ]
then
perl "$DIRNAME"/fastcopy.pl --directory $extracted_dir
cluster-fork rsync --archive --include="/*" -e ssh `hostname`:$extracted_dir $extracted_dir
fi
if [ $HOSTNAME == "kwatta" ]
then
# Check if blast is running
while [ `/usr/bin/pgrep blastall > /dev/null;echo $?` != "1" ]
do
echo "Blast is running"
# Try again in an hour
sleep 3600
done
# Double check before move
/usr/bin/pgrep blastall
# If not, move the data to the target location
if [ $? == "1" ]
then
pushd $extracted_dir > /dev/null
mv * $target_dir
popd > /dev/null
else
echo "Blast was running"
fi
fi
fi
# remove the lockfile.
rm $lockfile
# done
exit
# lockfile does NOT exist, so we create the lockfile, run the update and remove the file.
touch $lockfile
# get the data
$WGET $wget_options $site_location
# Add return value to log file
echo "Return Value="$? >> $logfile
# Generate a timestamp, so we know what date the latest file was added
perl $SCRIPT_LOCATION"/last_file_time.pl" > $data_dir"/newest_date.txt"
# Unpack the retrieved set
for NAME in *.tar.gz
do
${TAR} ${tar_options} ${NAME} &>> $logfile
done
# remove the lockfile.
rm $lockfile
# done
exit
fi
#!/bin/bash
# This script fetches taxonomy databases from ncbi
# written by Jan van Haarst, PRI
# needed : wget, curl & perl
# Sleep for random time, so we have less chance of a race condition
sleep $[ ( $RANDOM % 60 ) + 1 ]s
# Variables
# Get location of this script, so we can call the last_file_time script
DIRNAME=`dirname $0`
SCRIPT_LOCATION=`perl -e 'END {use Cwd qw(realpath);print realpath($ARGV[0]);}' $DIRNAME`
site_location="ftp://ftp.ncbi.nih.gov/pub/UniVec/"
data_dir=${1-"/mnt/nexenta/reference/blast_latest/univec.nobackup"}
extracted_dir=$data_dir"/extracted_latest.nobackup"
# Remove secondary checkfile
rm -f $data_dir"/newest_date.txt"
WGET="wget"
CURL="curl -s"
TAR="tar"
PRIMARY="true"
now=`date '+%Y_%m_%d_%A_%Hh%M'`
log_dir=$data_dir"/logs"
logfile=$log_dir"/univeclog_"$now
lockfile=$data_dir"/update_is_running.lock"
accept_list=""
reject_list="*.zip,*.Z,newest_date.txt"
wget_options="--proxy=off --mirror --no-parent --no-directories --level=1 --timestamping --passive-ftp --accept="$accept_list" -o "$logfile" --reject="$reject_list
tar_options="--use-compress-program=gzip --keep-newer-files --directory=${extracted_dir} -xvf"
# make the needed directories
mkdir -p $data_dir
mkdir -p $log_dir
mkdir -p $extracted_dir
# change to datadir
cd $data_dir
# catch sigint and remove lockfile
remove_lock()
{
if test -e $lockfile
then
# remove the update file.
rm $lockfile
exit
fi
}
trap remove_lock SIGINT SIGQUIT SIGTERM ERR
# fetch the data
if test -e $lockfile
then
# update is running, so print a message and stop.
echo "Update is already running."
exit
else
# lockfile does NOT exist, so we create the lockfile, run the update and remove the file.
touch $lockfile
# get the data
$WGET $wget_options $site_location
# Add return value to log file
echo "Return Value="$? >> $logfile
# Generate a timestamp, so we know what date the latest file was added
perl $SCRIPT_LOCATION"/last_file_time.pl" > $data_dir"/newest_date.txt"
# remove the lockfile.
rm $lockfile
# done
exit
fi
......@@ -3,6 +3,10 @@
# written by Jan van Haarst, PRI
# needed : wget, curl & perl
# latest update 28 Februari 2007
# Sleep for random time, so we have less chance of a race condition
sleep $[ ( $RANDOM % 60 ) + 1 ]s
# variables
# Get location of this script, so we can call the other script
DIRNAME=`dirname $0`
......@@ -11,57 +15,58 @@ SCRIPT_LOCATION=`perl -e 'END {use Cwd qw(realpath);print realpath($ARGV[0]);}'
# Set site and data locations depending on where we are
if [ $HOSTNAME == "dev1" ] # Primary site
then
site_location="ftp://ftp.ncbi.nlm.nih.gov/blast/db/"
data_dir="/home/jvh/data/blast_latest/databases.nobackup"
extracted_dir=$data_dir"/extracted"
# Remove secondary checkfile
rm $data_dir"/newest_date.txt"
WGET="/home/jvh/bin/wget"
CURL="/usr/bin/curl -s"
TAR="/bin/tar"
PRIMARY="true"
site_location="ftp://ftp.ncbi.nlm.nih.gov/blast/db/"
data_dir="/home/jvh/data/blast_latest/databases.nobackup"
extracted_dir=$data_dir"/extracted"
# Remove secondary checkfile
rm $data_dir"/newest_date.txt"
WGET="/home/jvh/bin/wget"
CURL="/usr/bin/curl -s"
TAR="/bin/tar"
PRIMARY="true"
else # Secondary sites
site_location="http://dev1.ab.wurnet.nl/~jvh/databases.nobackup/"
PRIMARY="false"
if [ $HOSTNAME == "kwatta" ]
then
target_dir="/hwraid4/data/blast/databases"
data_dir=$target_dir"/updates"
extracted_dir=$data_dir"/extracted"
WGET="/usr/local/bin/wget"
CURL="/opt/sfw/bin/curl"
TAR="/usr/local/bin/tar"
else
target_dir="/state/partition1/blast/db/"
data_dir=$target_dir"/updates"
extracted_dir=$data_dir"/extracted"
WGET="/usr/bin/wget"
CURL="/usr/bin/curl"
TAR="/bin/tar"
fi
site_location="http://dev1.ab.wurnet.nl/~jvh/databases.nobackup/"
PRIMARY="false"
if [ $HOSTNAME == "kwatta" ]
then
target_dir="/hwraid4/data/blast/databases"
data_dir=$target_dir"/updates"
extracted_dir=$data_dir"/extracted"
WGET="/usr/local/bin/wget"
CURL="/opt/sfw/bin/curl"
TAR="/usr/local/bin/tar"
else
target_dir="/state/partition1/blast/db/"
data_dir=$target_dir"/updates"
extracted_dir=$data_dir"/extracted"
WGET="/usr/bin/wget"
CURL="/usr/bin/curl"
TAR="/bin/tar"
fi
# Only secondary sites need to check the date
newest_date_local=`cat $data_dir"/newest_date.txt"`
if [ $? != "0" ]
then
echo Problem checking local file !
#exit
fi
# Only secondary sites need to check the date
newest_date_remote=`$CURL -s -f $site_location"newest_date.txt"`
if [ $? != "0" ]
then
# Primary site is updating or offline
echo Primary site is updating or offline
exit
fi
newest_date_local=`cat $data_dir"/newest_date.txt"`
if [ $? != "0" ]
then
echo Problem checking local file !
newest_date_local=0
#exit
fi
newest_date_remote=`$CURL -s -f $site_location"newest_date.txt"`
if [ $? != "0" ]
then
# Primary site is updating or offline
#echo Primary site is updating or offline
exit
fi
if [ $newest_date_remote == $newest_date_local ]
then
# No update needed
echo No update needed
exit
fi
if [ $newest_date_remote == $newest_date_local ]
then
# No update needed
exit
fi
fi
now=`date '+%Y_%m_%d_%A_%Hh%M'`
......@@ -97,62 +102,61 @@ trap remove_lock SIGINT
# fetch the data
if test -e $lockfile
then
# update is running, so print a message and stop.
echo "Update is already running."
exit
# update is running, so print a message and stop.
echo "Update is already running."
exit
else
# lockfile does NOT exist, so we create the lockfile, run the update and remove the file.
touch $lockfile
# get the data
$WGET $wget_options $site_location
# Add return value to log file
echo "Return Value="$? >> $logfile
# Generate a timestamp, so we know what date the latest file was added
perl $SCRIPT_LOCATION"/last_file_time.pl" > $data_dir"/newest_date.txt"
# For secondary sites :
# Unpack the retrieved set, and remove the original archive
if [ $PRIMARY == "false" ]
then
for NAME in *.tar.gz
do
$TAR --directory=$extracted_dir -xzf $NAME
if [ $? != "0" ]
then
echo Problem unpacking $NAME
else
rm $NAME
fi
done
if [ $HOSTNAME == "apbioinf100.wurnet.nl" ]
then
perl "$DIRNAME"/fastcopy.pl --directory $extracted_dir
cluster-fork rsync --archive --include="/*" -e ssh `hostname`:$extracted_dir $extracted_dir
fi
if [ $HOSTNAME == "kwatta" ]
then
# Check if blast is running
while [ `/usr/bin/pgrep blastall > /dev/null;echo $?` != "1" ]
do
echo "Blast is running"
# Try again in an hour
sleep 3600
done
# Double check before move
/usr/bin/pgrep blastall
# If not, move the data to the target location
if [ $? == "1" ]
then
pushd $extracted_dir > /dev/null
mv * $target_dir
popd > /dev/null
else
echo "Blast was running"
fi
fi
fi
# remove the lockfile.
rm $lockfile
# done
exit
# lockfile does NOT exist, so we create the lockfile, run the update and remove the file.
touch $lockfile
# get the data
$WGET $wget_options $site_location
# Add return value to log file
echo "Return Value="$? >> $logfile
# Generate a timestamp, so we know what date the latest file was added
perl $SCRIPT_LOCATION"/last_file_time.pl" > $data_dir"/newest_date.txt"
# For secondary sites :
# Unpack the retrieved set, and remove the original archive
if [ $PRIMARY == "false" ]
then
for NAME in *.tar.gz
do
$TAR --directory=$extracted_dir -xzf $NAME
if [ $? != "0" ]
then
echo Problem unpacking $NAME
else
rm $NAME
fi
done
if [ $HOSTNAME == "apbioinf100.wurnet.nl" ]
then
perl "$DIRNAME"/fastcopy.pl --directory $extracted_dir
cluster-fork rsync --archive --include="/*" -e ssh `hostname`:$extracted_dir $extracted_dir
fi
if [ $HOSTNAME == "kwatta" ]
then
# Check if blast is running
while [ `/usr/bin/pgrep blastall > /dev/null;echo $?` != "1" ]
do
echo "Blast is running"
# Try again in an hour
sleep 3600
done
# Double check before move
/usr/bin/pgrep blastall
# If not, move the data to the target location
if [ $? == "1" ]
then
pushd $extracted_dir > /dev/null
mv * $target_dir
popd > /dev/null
else
echo "Blast was running"
fi
fi
fi
# remove the lockfile.
rm $lockfile
# done
exit
fi
#!/bin/bash
# This script fetches nr & nt blast databases from ncbi
# written by Jan van Haarst, jan.vanhaarst@wur.nl
# needed : rsync, perl, tar , pigz
# Activate debugging from here
# set -o xtrace
# set -o verbose
# Safeguards
set -o nounset
set -o errexit
# Sleep for random time, so we have less chance of a race condition
sleep $(( ( RANDOM % 60 ) + 1 ))s
# Variables
# Get location of this script, so we can call the last_file_time script
DIRNAME=$(dirname "$0")
SCRIPT_LOCATION="$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )"
# Set site and data locations
site_location="ftp.ncbi.nlm.nih.gov::blast/db/*"
data_dir=${1-"/mnt/nexenta/reference/blast_latest/databases.nobackup/"}
extracted_dir=$data_dir"/extracted_latest.nobackup"
# Remove checkfile
rm -f "$data_dir""/newest_date.txt"
RSYNC="rsync"
TAR="tar"
now=$(date '+%Y_%m_%d_%A_%Hh%M')
log_dir=$data_dir"/logs"
logfile=$log_dir"/blastlog_"$now
lockfile=$data_dir"/update_is_running.lock"
accept_list=${SCRIPT_LOCATION}/rsync_include.txt
reject_list=${SCRIPT_LOCATION}/rsync_exclude.txt
rsync_options="--times --stats --copy-unsafe-links -z --compress-level=1 --log-file="${logfile}" --include-from="${accept_list}" --exclude-from="${reject_list}
tar_options="--use-compress-program=gzip --keep-newer-files --no-same-owner --directory=${extracted_dir} -xvf"
# make the needed directories
mkdir -vp "$data_dir"
mkdir -vp "$log_dir"
mkdir -vp "$extracted_dir"
# change to datadir
cd "$data_dir"
# catch sigint and remove lockfile
remove_lock()
{
if test -e "$lockfile"
then
# remove the update file.
rm "$lockfile"
exit
fi
}
trap remove_lock SIGINT SIGQUIT SIGTERM ERR
# fetch the data
if test -e "$lockfile"
then
# update is running, so print a message and stop.
echo "Update is already running."
exit
else
# lockfile does NOT exist, so we create the lockfile, run the update and remove the file.
touch "$lockfile"
# Add hostname to log
hostname >> "$logfile"
# get the data
$RSYNC ${rsync_options} ${site_location} ${data_dir} >> $logfile
# Add return value to log file
echo "Return Value="$? >> "$logfile"
# Generate a timestamp, so we know what date the latest file was added
perl "$SCRIPT_LOCATION""/last_file_time.pl" > "$data_dir""/newest_date.txt"
# Unpack the retrieved set
for NAME in *.tar.gz
do
${TAR} ${tar_options} ${NAME} &>> $logfile
done
# remove the lockfile.
rm "$lockfile"
# done
exit
fi
#!/bin/bash
# Remove logs that are older than a week.
find /home/jvh/data/blast_latest -type f -name '*log*' -ctime +7 -exec rm {} \;
# Remove logs that are older than a month from the blast getter tree.
find /mnt/nexenta/reference/blast_latest -type f -wholename '*logs/blast*' -ctime +30 -exec rm -v {} \;
find /mnt/nexenta/reference/blast_latest -type f -wholename '*logs/taxlog*' -ctime +30 -exec rm -v {} \;
16SMicrobial*
cdd_delta*
env*
est*
gss*
htgs*
human*
mouse*
other*
pat*
pdb*
refseq*
sts*
swissprot*
tsa*
wgs*
*.html*
*.tZIK07
nt*
nr*
taxdb*
*.txt
*.htm*
16SMicrobial*
wgs*
\ No newline at end of file