ViewVC Help
View File | Revision Log | Show Annotations | Root Listing
root/cvsroot/UserCode/MitProd/Processing/bin/downloadSample.sh
Revision: 1.13
Committed: Thu Aug 9 21:16:03 2012 UTC (12 years, 8 months ago) by paus
Content type: application/x-sh
Branch: MAIN
CVS Tags: Mit_032, Mit_031, Mit_030, Mit_029c, Mit_029b, Mit_030_pre1, Mit_029a, Mit_029, HEAD
Changes since 1.12: +0 -2 lines
Log Message:
For version 029.

File Contents

# User Rev Content
1 paus 1.1 #!/bin/bash
2     #---------------------------------------------------------------------------------------------------
3     # Download a list of files
4     #---------------------------------------------------------------------------------------------------
5    
6     # Read the arguments
7     echo ""
8 paus 1.11 echo " downloadSample.sh $*"
9 paus 1.1 echo ""
10     dataDir=$1; shift
11     book=$1; shift
12     dataset=$1; shift
13     target=$1; shift
14     nCopyProcs=$1; shift
15     condorOutput=$1; shift
16 paus 1.3 onlyMissing=$1; shift
17    
18     DN=`grid-proxy-info -subject`
19 paus 1.1
20     # Prepare environment
21     echo " "
22     echo " Process dataset: $dataset of book: $book"
23     echo " in directory : $dataDir"
24     echo " to target : $target"
25     echo " n copy procs : $nCopyProcs"
26     echo " condor output: $condorOutput"
27 paus 1.3 echo " only missing : $onlyMissing"
28 paus 1.1
29 paus 1.8 mkdir -p $condorOutput/$book/$dataset
30 paus 1.9 makedir --exe $target
31     makedir --exe $target/$book
32 paus 1.8 makedir --exe $target/$book/$dataset
33 paus 1.1 script=`which downloadFiles.sh`
34    
35 paus 1.3 # cleanup our lists and remake a clean one
36 paus 1.11 #echo "rm -f $condorOutput/$book/$dataset/fileList*.$$.txt*"
37     rm -f $condorOutput/$book/$dataset/fileList*.$$.txt*
38 paus 1.1
39 paus 1.3 # make list of all local files
40 paus 1.4 if [ "`echo $HOSTNAME | grep mit.edu`" != "" ] && \
41 paus 1.12 ( [ "`echo $dataDir | grep /castor/cern.ch`" != "" ] || \
42     [ "`echo $target | grep /castor/cern.ch`" != "" ] )
43 paus 1.1 then
44 paus 1.3 opt="--simple"
45 paus 1.1 else
46 paus 1.3 opt=""
47 paus 1.1 fi
48    
49 paus 1.11 list $opt $dataDir/$book/$dataset | sort > $condorOutput/$book/$dataset/fileList-all.$$.txt-bak
50 paus 1.3
51 paus 1.11 # Make sure there are kerberos and globus tickets available
52 paus 1.3 id=`id -u`
53 paus 1.11 mkdir -p ~/.krb5/
54 paus 1.3 cp /tmp/x509up_u${id} ~/.krb5/
55 paus 1.1 KRB5CCNAME=`klist -5 | grep 'Ticket cache:' | cut -d' ' -f 3`
56     if ! [ -z $KRB5CCNAME ]
57     then
58 paus 1.12 mkdir -p ~/.krb5/
59 paus 1.1 chmod 0 ~/.krb5
60     chmod u=rwx ~/.krb5
61     file=`echo $KRB5CCNAME | cut -d: -f2`
62     if [ -f "$file" ]
63     then
64     cp $file ~/.krb5/ticket
65     else
66     echo " ERROR -- missing kerberos ticket ($KRB5CCNAME)."
67     exit 1
68     fi
69     else
70     echo " ERROR -- missing kerberos ticket ($KRB5CCNAME)."
71     fi
72    
73     # make list of all remote files
74 paus 1.11 rm -f $condorOutput/$book/$dataset/fileList-all.$$.txt
75     touch $condorOutput/$book/$dataset/fileList-all.$$.txt
76    
77     cat $condorOutput/$book/$dataset/fileList-all.$$.txt-bak | grep root | sort | \
78 paus 1.1 while read line
79     do
80     size=`echo $line | tr -s ' ' | cut -d ' ' -f 1`
81     file=`echo $line | tr -s ' ' | cut -d ' ' -f 2`
82     file=`basename $file`
83 paus 1.11 echo "$size $file" >> $condorOutput/$book/$dataset/fileList-all.$$.txt
84 paus 1.1 done
85    
86     # make list of all local files
87 paus 1.9 if [ "`echo $HOSTNAME | grep mit.edu`" != "" ] && \
88     ( [ "`echo $dataDir | grep /castor/cern.ch`" != "" ] || \
89     [ "`echo $target | grep /castor/cern.ch`" != "" ] )
90 paus 1.1 then
91 paus 1.3 opt="--simple"
92     else
93     opt=""
94 paus 1.1 fi
95 paus 1.3
96 paus 1.12 list $opt $target/$book/$dataset | grep root | sort \
97 paus 1.11 > $condorOutput/$book/$dataset/fileList-done.$$.txt
98    
99     diff -y $condorOutput/$book/$dataset/fileList-all.$$.txt \
100     $condorOutput/$book/$dataset/fileList-done.$$.txt > diff.$$
101     echo ""
102     echo " Files different in size: "
103     grep \| diff.$$
104     echo ""
105     echo " Files available in all and not done: "
106     grep \< diff.$$
107     echo ""
108     echo " Files done but not listed in all available: "
109     grep \> diff.$$
110     echo ""
111     rm diff.$$
112 paus 1.1
113     # make list of missing files
114 paus 1.11 rm -f $condorOutput/$book/$dataset/fileList.$$.txt
115     touch $condorOutput/$book/$dataset/fileList.$$.txt
116    
117     cat $condorOutput/$book/$dataset/fileList-all.$$.txt | grep root | \
118 paus 1.1 while read line
119     do
120     size=`echo $line | tr -s ' ' | cut -d ' ' -f 1`
121     file=`echo $line | tr -s ' ' | cut -d ' ' -f 2`
122 paus 1.11 exists=`grep "$file" $condorOutput/$book/$dataset/fileList-done.$$.txt`
123 paus 1.1 if [ "$exists" == "" ]
124     then
125 paus 1.3 echo " -missing-- $file with $size bytes"
126 paus 1.11 echo "$size $file" >> $condorOutput/$book/$dataset/fileList.$$.txt
127 paus 1.1 # else
128 paus 1.3 # echo " -exists--- $file with $size bytes - exists"
129     else
130     # now check that size matches
131 paus 1.11 test=`grep "$size $file" $condorOutput/$book/$dataset/fileList-done.$$.txt`
132 paus 1.3 if [ "$test" == "" ]
133     then
134     if [ "$onlyMissing" == "" ]
135     then
136     echo " -fileSize- $exists (remote: $size)"
137 paus 1.11 echo "$size $file" >> $condorOutput/$book/$dataset/fileList.$$.txt
138 paus 1.3 fi
139     fi
140 paus 1.1 fi
141     done
142 paus 1.11
143     nAll=`wc -l $condorOutput/$book/$dataset/fileList-all.$$.txt | cut -d ' ' -f1`
144     nMissing=`wc -l $condorOutput/$book/$dataset/fileList.$$.txt | cut -d ' ' -f1`
145     nDone=`wc -l $condorOutput/$book/$dataset/fileList-done.$$.txt | cut -d ' ' -f1`
146 paus 1.10 echo ""
147     echo " Download Summary "
148     echo " All $nAll"
149     echo " Done $nDone"
150     echo " Missing $nMissing"
151     echo ""
152 paus 1.1
153     # construct our job
154 paus 1.11 nFiles=`wc -l $condorOutput/$book/$dataset/fileList.$$.txt | cut -d ' ' -f1`
155 paus 1.1 if [ "$nFiles" == "" ] || [ "$nFiles" == "0" ]
156     then
157     echo " "
158     echo " No more files to download. EXIT."
159     exit 0
160     elif [ $nFiles -lt $nCopyProcs ]
161     then
162     nCopyProcs=$nFiles
163     fi
164 paus 1.4 # how many files per job?
165 paus 1.1 nFilesPerJob=$(( $nFiles/$nCopyProcs ))
166 paus 1.4 nFilesTmp=$(( $nFilesPerJob*$nCopyProcs ))
167     if [ $nFilesPerJob == 1 ] && [ $nFiles -gt $nCopyProcs ]
168     then
169     nFilesPerJob=2
170     elif [ $nFilesTmp -lt $nFiles ]
171     then
172     nFilesPerJob=$(( $nFilesPerJob+1 ))
173     fi
174    
175 paus 1.10 echo " n files to copy: $nFiles"
176     echo " n files/proc : $nFilesPerJob"
177 paus 1.1
178     i=1
179     next=1
180     last=$nFilesPerJob
181    
182 paus 1.3 # make sure condor is properly setup for us
183     if ! [ -z $CONDOR_LOCATION ]
184     then
185     unset CONDOR_LOCATION
186     export CONDOR_CONFIG=/usr/local/condor/etc/condor_config
187     fi
188    
189 paus 1.12 # stage in the missing files if it is at CERN
190     if [ "`echo $dataDir | grep /castor/cern.ch`" != "" ]
191     then
192     echo " scp $condorOutput/$book/$dataset/fileList.$$.txt $TICKET_HOLDER@lxplus.cern.ch:"
193     scp $condorOutput/$book/$dataset/fileList.$$.txt $TICKET_HOLDER@lxplus.cern.ch:
194     echo " ssh $TICKET_HOLDER@lxplus.cern.ch ./stageSample.py --dataDir=$dataDir/$book/$dataset --fileList=fileList.$$.txt"
195     ssh $TICKET_HOLDER@lxplus.cern.ch ./stageSample.py --dataDir=$dataDir/$book/$dataset --fileList=fileList.$$.txt
196     echo " ssh $TICKET_HOLDER@lxplus.cern.ch rm fileList.$$.txt"
197     ssh $TICKET_HOLDER@lxplus.cern.ch rm fileList.$$.txt
198     fi
199    
200 paus 1.3 # loop over the condor jobs and submit them
201 paus 1.4 while [ $i -le $nCopyProcs ] && [ $last -le $nFiles ]
202 paus 1.1 do
203     if [ $i == $nCopyProcs ]
204     then
205     last=$nFiles
206     fi
207    
208     # say what we are going to submit
209 paus 1.11 echo " downloadFiles.sh $dataDir $book $dataset $target $condorOutput $$ $next $last"
210 paus 1.1
211     logFile=`echo download:$book/$dataset/${next}-${last}.txt | tr '/' '+'`
212     logFile=/tmp/$logFile
213     rm -f $logFile
214    
215     # prepare the condor_submit files
216     cat > submit_$$.cmd <<EOF
217     Universe = vanilla
218 paus 1.7 Requirements = ( (Arch == "INTEL") && (Disk >= DiskUsage) && ((Memory * 1024) >= ImageSize) && (HasFileTransfer) )
219 paus 1.12 Notify_user = $TICKET_HOLDER@mit.edu
220 paus 1.1 Notification = Error
221     Executable = $script
222 paus 1.11 Arguments = $dataDir $book $dataset $target $condorOutput $$ $next $last
223 paus 1.1 Rank = Mips
224     GetEnv = True
225     Input = /dev/null
226     Output = $condorOutput/$book/$dataset/${next}-${last}.out
227     Error = $condorOutput/$book/$dataset/${next}-${last}.err
228     Log = $logFile
229     should_transfer_files = YES
230     when_to_transfer_output = ON_EXIT
231 paus 1.3
232     +AccountingGroup = "group_cmsuser.cmsu0284"
233    
234 paus 1.1 Queue
235     EOF
236    
237 paus 1.3 #+x509userproxysubject = $DN
238    
239 paus 1.1 # submit the jobs
240     condor_submit submit_$$.cmd >& /dev/null #>& lastSub
241     #cat submit_$$.cmd
242     rm submit_$$.cmd
243    
244     # update counters
245     next=$(( $next + $nFilesPerJob ))
246     last=$(( $last + $nFilesPerJob ))
247     i=$(( $i + 1 ))
248     done
249    
250     exit 0