ViewVC Help
View File | Revision Log | Show Annotations | Root Listing
root/cvsroot/UserCode/MitProd/Processing/bin/downloadFiles.sh
Revision: 1.4
Committed: Sat Mar 19 01:49:13 2011 UTC (14 years, 1 month ago) by paus
Content type: application/x-sh
Branch: MAIN
CVS Tags: Mit_025c_branch1, Mit_025c_branch0, Mit_025c, Mit_025b, Mit_025a, Mit_025, Mit_025pre2, Mit_024b, Mit_025pre1, Mit_024a, Mit_024, Mit_023, Mit_022a, Mit_022, Mit_020d, TMit_020d, Mit_020c, Mit_021, Mit_021pre2, Mit_021pre1, Mit_020b, Mit_020a, Mit_020
Branch point for: Mit_025c_branch
Changes since 1.3: +1 -1 lines
Log Message:
Small updates here and there.

File Contents

# User Rev Content
1 paus 1.1 #!/bin/bash
2     #---------------------------------------------------------------------------------------------------
3     # Download a list of files
4     #---------------------------------------------------------------------------------------------------
5    
6     # read the arguments
7     echo ""
8     echo "downloadFiles.sh $*"
9     echo ""
10     dataDir=$1; shift
11     book=$1; shift
12     dataset=$1; shift
13     target=$1; shift
14     condorOutput=$1; shift
15     first=$1; shift
16     last=$1; shift
17    
18     # prepare environment
19     echo " "
20     echo " Process dataset: $dataset of book: $book"
21     echo " in directory : $dataDir"
22     echo " to target : $target"
23     echo " condor output: $condorOutput"
24 ceballos 1.2 echo " file range : $first -- $last"
25 paus 1.1
26     mkdir -p $condorOutput/$book/$dataset
27     script=`which downloadFile.sh`
28    
29     # make sure the request is good
30 ceballos 1.2 nFiles=`wc -l $condorOutput/$book/$dataset/fileList.txt | cut -d ' ' -f 1`
31 paus 1.1 if [ $first -gt $nFiles ] || [ $last -gt $nFiles ]
32     then
33     echo "Request makes no sense: nFiles=$nFile but first=$first and last=$last"
34     exit 0
35     fi
36    
37     # see how many we do in this job
38     nFilesPerJob=$(($last - $first + 1))
39     fList=`head -$last $condorOutput/$book/$dataset/fileList.txt | tail -$nFilesPerJob | cut -d' ' -f 2`
40    
41 ceballos 1.2 echo LIST $fList
42    
43 paus 1.1 # spread the jobs out by a bit
44 paus 1.4 #sleep $first
45 paus 1.1
46     # loop through our list now
47     for file in $fList
48     do
49     file=`basename $file`
50     # find the line to this dataset and do further analysis
51     line=`grep $file $condorOutput/$book/$dataset/fileList.txt`
52     # find potential JSON file
53     export size=`echo $line | tr -s ' ' | cut -d ' ' -f 1`
54     # now run the download
55     echo "$script $dataDir/$book/$dataset/$file $target/$book/$dataset/$file"
56     $script $dataDir/$book/$dataset/$file $target/$book/$dataset/$file
57     done
58    
59     exit 0