1 |
#!/bin/bash
|
2 |
#===================================================================================================
|
3 |
# Submit a set of jobs to run over a given dataset, splitting the jobs according to the filesets.
|
4 |
#
|
5 |
# Version 1.0 November 14, 2008
|
6 |
#===================================================================================================
|
7 |
# Read the arguments
|
8 |
echo " "
|
9 |
echo "Starting data processing with arguments:"
|
10 |
echo " --> $*"
|
11 |
|
12 |
runMacro=$1
|
13 |
catalogDir=$2
|
14 |
book=$3
|
15 |
dataset=$4
|
16 |
skim=$5
|
17 |
outputName=$6
|
18 |
outputDir=$7
|
19 |
runTypeIndex=$8
|
20 |
noStage=$9
|
21 |
|
22 |
jobId=`date +%j%m%d%k%M%S`
|
23 |
dataDir=`tail -1 $catalogDir/$book/$dataset/Filesets | cut -d' ' -f2`
|
24 |
|
25 |
# Prepare environment
|
26 |
echo " "
|
27 |
echo " Process: dataset=$dataset, book=$book, catalog=$catalogDir"
|
28 |
#echo " "
|
29 |
workDir=/home/$USER/cms/condor
|
30 |
mkdir -p $workDir
|
31 |
cd $workDir
|
32 |
cp /home/$USER/cms/root/.rootlogon.C $workDir
|
33 |
cp $MIT_ANA_DIR/bin/run.sh $workDir
|
34 |
cp $MIT_HGG_DIR/macros/$runMacro $workDir
|
35 |
script=$workDir/run.sh
|
36 |
|
37 |
# Create the directory for the results
|
38 |
mkdir -p $workDir/res/$outputName/$book/$dataset/
|
39 |
|
40 |
# Looping through each single fileset and submitting the condor jobs
|
41 |
echo " Submitting jobs to condor"
|
42 |
|
43 |
if [ "$skim" == "noskim" ]
|
44 |
then
|
45 |
filesets=$catalogDir/$book/$dataset/Filesets
|
46 |
else
|
47 |
filesets=$catalogDir/$book/$dataset/$skim/Filesets
|
48 |
fi
|
49 |
|
50 |
for fileset in `cat $filesets | cut -d' ' -f1 `
|
51 |
do
|
52 |
# check if the output already exists
|
53 |
rFile="$outputDir/$outputName/$book/$dataset"
|
54 |
rFile=`ls $rFile/${outputName}_${dataset}_${skim}_${fileset}*.root 2> /dev/null`
|
55 |
|
56 |
if [ -f "$rFile" ]
|
57 |
then
|
58 |
echo " File: $rFile exists already."
|
59 |
else
|
60 |
logFile=`echo $book/$dataset/$fileset | tr '/' '+'`
|
61 |
logFile=/tmp/$USER/$logFile
|
62 |
mkdir -p /tmp/$USER
|
63 |
rm -f $logFile
|
64 |
echo " $script $runMacro $catalogDir $book $dataset $skim $fileset $outputName $outputDir $runTypeIndex"
|
65 |
|
66 |
cat > submit.cmd <<EOF
|
67 |
Universe = vanilla
|
68 |
Requirements = ((Arch == "X86_64" || Arch == "INTEL") && (OpSys == "LINUX") && (Disk >= DiskUsage) && ((Memory * 1024) >= ImageSize) && (HasFileTransfer))
|
69 |
Notification = Error
|
70 |
Executable = $script
|
71 |
Arguments = $runMacro $catalogDir $book $dataset $skim $fileset $outputName $outputDir $runTypeIndex
|
72 |
Rank = Mips
|
73 |
GetEnv = True
|
74 |
Initialdir = $workDir
|
75 |
Input = /dev/null
|
76 |
Output = $workDir/res/$outputName/$book/$dataset/${skim}_${runTypeIndex}_${fileset}.out
|
77 |
Error = $workDir/res/$outputName/$book/$dataset/${skim}_${runTypeIndex}_${fileset}.err
|
78 |
Log = $logFile
|
79 |
should_transfer_files = YES
|
80 |
when_to_transfer_output = ON_EXIT
|
81 |
Queue
|
82 |
EOF
|
83 |
|
84 |
condor_submit submit.cmd >& /dev/null;
|
85 |
rm submit.cmd
|
86 |
fi
|
87 |
done
|
88 |
|
89 |
exit 0
|