ViewVC Help
View File | Revision Log | Show Annotations | Root Listing
root/cvsroot/COMP/CRAB/python/cms_cmssw.py
(Generate patch)

Comparing COMP/CRAB/python/cms_cmssw.py (file contents):
Revision 1.3 by gutsche, Sun May 28 02:27:52 2006 UTC vs.
Revision 1.221 by fanzago, Wed Jun 18 14:02:42 2008 UTC

# Line 2 | Line 2 | from JobType import JobType
2   from crab_logger import Logger
3   from crab_exceptions import *
4   from crab_util import *
5 + from BlackWhiteListParser import BlackWhiteListParser
6   import common
6 import PsetManipulator  
7
8 import DBSInfo_EDM
9 #from DataDiscovery_EDM import DataDiscovery_EDM
10 import DataDiscovery_EDM
11 #from DataLocation_EDM import DataLocation_EDM
12 import DataLocation_EDM
7   import Scram
8 + from LFNBaseName import *
9  
10 < import os, string, re
10 > import os, string, glob
11  
12   class Cmssw(JobType):
13 <    def __init__(self, cfg_params):
13 >    def __init__(self, cfg_params, ncjobs,skip_blocks, isNew):
14          JobType.__init__(self, 'CMSSW')
15          common.logger.debug(3,'CMSSW::__init__')
16 +        self.skip_blocks = skip_blocks
17 +
18 +        self.argsList = []
19  
22        self.analisys_common_info = {}
23        # Marco.
20          self._params = {}
21          self.cfg_params = cfg_params
22 +        # init BlackWhiteListParser
23 +        self.blackWhiteListParser = BlackWhiteListParser(cfg_params)
24 +
25 +        self.MaxTarBallSize = float(self.cfg_params.get('EDG.maxtarballsize',9.5))
26 +
27 +        # number of jobs requested to be created, limit obj splitting
28 +        self.ncjobs = ncjobs
29  
30          log = common.logger
31 <        
31 >
32          self.scram = Scram.Scram(cfg_params)
30        scramArea = ''
33          self.additional_inbox_files = []
34          self.scriptExe = ''
35          self.executable = ''
36 +        self.executable_arch = self.scram.getArch()
37          self.tgz_name = 'default.tgz'
38 +        self.scriptName = 'CMSSW.sh'
39 +        self.pset = ''
40 +        self.datasetPath = ''
41  
42 +        # set FJR file name
43 +        self.fjrFileName = 'crab_fjr.xml'
44  
45          self.version = self.scram.getSWVersion()
46 <        common.analisys_common_info['sw_version'] = self.version
47 <        ### FEDE
48 <        common.analisys_common_info['copy_input_data'] = 0
49 <        common.analisys_common_info['events_management'] = 1
46 >        version_array = self.version.split('_')
47 >        self.CMSSW_major = 0
48 >        self.CMSSW_minor = 0
49 >        self.CMSSW_patch = 0
50 >        try:
51 >            self.CMSSW_major = int(version_array[1])
52 >            self.CMSSW_minor = int(version_array[2])
53 >            self.CMSSW_patch = int(version_array[3])
54 >        except:
55 >            msg = "Cannot parse CMSSW version string: " + self.version + " for major and minor release number!"
56 >            raise CrabException(msg)
57  
58          ### collect Data cards
59 <        try:
60 <         #   self.owner = cfg_params['CMSSW.owner']
61 <         #   log.debug(6, "CMSSW::CMSSW(): owner = "+self.owner)
47 <         #   self.dataset = cfg_params['CMSSW.dataset']
48 <            self.datasetPath = cfg_params['CMSSW.datasetpath']
49 <            log.debug(6, "CMSSW::CMSSW(): datasetPath = "+self.datasetPath)
50 <        except KeyError:
51 <        #    msg = "Error: owner and/or dataset not defined "
52 <            msg = "Error: datasetpath not defined "  
59 >
60 >        if not cfg_params.has_key('CMSSW.datasetpath'):
61 >            msg = "Error: datasetpath not defined "
62              raise CrabException(msg)
63 <        self.dataTiers = []
64 < #       try:
65 < #           tmpDataTiers = string.split(cfg_params['CMSSW.data_tier'],',')
66 < #           for tmp in tmpDataTiers:
67 < #               tmp=string.strip(tmp)
68 < #               self.dataTiers.append(tmp)
69 < #               pass
70 < #           pass
71 < #       except KeyError:
72 < #           pass
73 < #       log.debug(6, "Cmssw::Cmssw(): dataTiers = "+str(self.dataTiers))
63 >        
64 >        ### Temporary: added to remove input file control in the case of PU
65 >        if not cfg_params.has_key('USER.dataset_pu'):
66 >            self.dataset_pu = 'NONE'
67 >        else:
68 >            self.dataset_pu = cfg_params['USER.dataset_pu']
69 >        ####    
70 >        
71 >        tmp =  cfg_params['CMSSW.datasetpath']
72 >        log.debug(6, "CMSSW::CMSSW(): datasetPath = "+tmp)
73 >        if string.lower(tmp)=='none':
74 >            self.datasetPath = None
75 >            self.selectNoInput = 1
76 >        else:
77 >            self.datasetPath = tmp
78 >            self.selectNoInput = 0
79  
80 +        self.dataTiers = []
81 +        self.debugWrap = ''
82 +        self.debug_wrapper = cfg_params.get('USER.debug_wrapper',False)
83 +        if self.debug_wrapper: self.debugWrap='--debug'
84          ## now the application
85 <        try:
86 <            self.executable = cfg_params['CMSSW.executable']
69 <            log.debug(6, "CMSSW::CMSSW(): executable = "+self.executable)
70 <            msg = "Default executable cmsRun overridden. Switch to " + self.executable
71 <            log.debug(3,msg)
72 <        except KeyError:
73 <            self.executable = 'cmsRun'
74 <            msg = "User executable not defined. Use cmsRun"
75 <            log.debug(3,msg)
76 <            pass
85 >        self.executable = cfg_params.get('CMSSW.executable','cmsRun')
86 >        log.debug(6, "CMSSW::CMSSW(): executable = "+self.executable)
87  
88 <        try:
89 <            self.pset = cfg_params['CMSSW.pset']
90 <            log.debug(6, "Cmssw::Cmssw(): PSet file = "+self.pset)
88 >        if not cfg_params.has_key('CMSSW.pset'):
89 >            raise CrabException("PSet file missing. Cannot run cmsRun ")
90 >        self.pset = cfg_params['CMSSW.pset']
91 >        log.debug(6, "Cmssw::Cmssw(): PSet file = "+self.pset)
92 >        if self.pset.lower() != 'none' :
93              if (not os.path.exists(self.pset)):
94                  raise CrabException("User defined PSet file "+self.pset+" does not exist")
95 <        except KeyError:
96 <            raise CrabException("PSet file missing. Cannot run cmsRun ")
95 >        else:
96 >            self.pset = None
97  
98          # output files
99 <        try:
100 <            self.output_file = []
99 >        ## stuff which must be returned always via sandbox
100 >        self.output_file_sandbox = []
101  
102 <            tmp = cfg_params['CMSSW.output_file']
103 <            if tmp != '':
92 <                tmpOutFiles = string.split(cfg_params['CMSSW.output_file'],',')
93 <                log.debug(7, 'cmssw::cmssw(): output files '+str(tmpOutFiles))
94 <                for tmp in tmpOutFiles:
95 <                    tmp=string.strip(tmp)
96 <                    self.output_file.append(tmp)
97 <                    pass
98 <            else:
99 <                log.message("No output file defined: only stdout/err will be available")
100 <                pass
101 <            pass
102 <        except KeyError:
103 <            log.message("No output file defined: only stdout/err will be available")
104 <            pass
102 >        # add fjr report by default via sandbox
103 >        self.output_file_sandbox.append(self.fjrFileName)
104  
105 <        # script_exe file as additional file in inputSandbox
106 <        try:
107 <           self.scriptExe = cfg_params['USER.script_exe']
108 <           self.additional_inbox_files.append(self.scriptExe)
109 <        except KeyError:
110 <           pass
111 <        if self.scriptExe != '':
112 <           if os.path.isfile(self.scriptExe):
113 <              pass
115 <           else:
116 <              log.message("WARNING. file "+self.scriptExe+" not found")
117 <              sys.exit()
118 <                  
119 <        ## additional input files
120 <        try:
121 <            tmpAddFiles = string.split(cfg_params['CMSSW.additional_input_files'],',')
122 <            for tmp in tmpAddFiles:
123 <                if not os.path.exists(tmp):
124 <                    raise CrabException("Additional input file not found: "+tmp)
125 <                tmp=string.strip(tmp)
126 <                self.additional_inbox_files.append(tmp)
127 <                pass
128 <            pass
129 <        except KeyError:
130 <            pass
105 >        # other output files to be returned via sandbox or copied to SE
106 >        outfileflag = False
107 >        self.output_file = []
108 >        tmp = cfg_params.get('CMSSW.output_file',None)
109 >        if tmp :
110 >            self.output_file = [x.strip() for x in tmp.split(',')]
111 >            outfileflag = True #output found
112 >        #else:
113 >        #    log.message("No output file defined: only stdout/err and the CRAB Framework Job Report will be available\n")
114  
115 <        try:
116 <            self.filesPerJob = int(cfg_params['CMSSW.files_per_jobs']) #Daniele
117 <        except KeyError:
118 <            self.filesPerJob = 1
115 >        # script_exe file as additional file in inputSandbox
116 >        self.scriptExe = cfg_params.get('USER.script_exe',None)
117 >        if self.scriptExe :
118 >            if not os.path.isfile(self.scriptExe):
119 >                msg ="ERROR. file "+self.scriptExe+" not found"
120 >                raise CrabException(msg)
121 >            self.additional_inbox_files.append(string.strip(self.scriptExe))
122  
123 <        ## Max event   will be total_number_of_events ???  Daniele
124 <        try:
139 <            self.maxEv = cfg_params['CMSSW.event_per_job']
140 <        except KeyError:
141 <            self.maxEv = "-1"
142 <        ##  
143 <        try:
144 <            self.total_number_of_events = int(cfg_params['CMSSW.total_number_of_events'])
145 <        except KeyError:
146 <            msg = 'Must define total_number_of_events'
123 >        if self.datasetPath == None and self.pset == None and self.scriptExe == '' :
124 >            msg ="Error. script_exe  not defined"
125              raise CrabException(msg)
148        
149        CEBlackList = []
150        try:
151            tmpBad = string.split(cfg_params['EDG.ce_black_list'],',')
152            for tmp in tmpBad:
153                tmp=string.strip(tmp)
154                CEBlackList.append(tmp)
155        except KeyError:
156            pass
157
158        self.reCEBlackList=[]
159        for bad in CEBlackList:
160            self.reCEBlackList.append(re.compile( bad ))
126  
127 <        common.logger.debug(5,'CEBlackList: '+str(CEBlackList))
127 >        # use parent files...
128 >        self.useParent = self.cfg_params.get('CMSSW.use_parent',False)
129  
130 <        CEWhiteList = []
131 <        try:
132 <            tmpGood = string.split(cfg_params['EDG.ce_white_list'],',')
133 <            for tmp in tmpGood:
134 <                tmp=string.strip(tmp)
135 <                CEWhiteList.append(tmp)
136 <        except KeyError:
130 >        ## additional input files
131 >        if cfg_params.has_key('USER.additional_input_files'):
132 >            tmpAddFiles = string.split(cfg_params['USER.additional_input_files'],',')
133 >            for tmp in tmpAddFiles:
134 >                tmp = string.strip(tmp)
135 >                dirname = ''
136 >                if not tmp[0]=="/": dirname = "."
137 >                files = []
138 >                if string.find(tmp,"*")>-1:
139 >                    files = glob.glob(os.path.join(dirname, tmp))
140 >                    if len(files)==0:
141 >                        raise CrabException("No additional input file found with this pattern: "+tmp)
142 >                else:
143 >                    files.append(tmp)
144 >                for file in files:
145 >                    if not os.path.exists(file):
146 >                        raise CrabException("Additional input file not found: "+file)
147 >                    pass
148 >                    self.additional_inbox_files.append(string.strip(file))
149 >                pass
150              pass
151 +            common.logger.debug(5,"Additional input files: "+str(self.additional_inbox_files))
152 +        pass
153  
154 <        #print 'CEWhiteList: ',CEWhiteList
155 <        self.reCEWhiteList=[]
156 <        for Good in CEWhiteList:
157 <            self.reCEWhiteList.append(re.compile( Good ))
154 >        ## Events per job
155 >        if cfg_params.has_key('CMSSW.events_per_job'):
156 >            self.eventsPerJob =int( cfg_params['CMSSW.events_per_job'])
157 >            self.selectEventsPerJob = 1
158 >        else:
159 >            self.eventsPerJob = -1
160 >            self.selectEventsPerJob = 0
161  
162 <        common.logger.debug(5,'CEWhiteList: '+str(CEWhiteList))
162 >        ## number of jobs
163 >        if cfg_params.has_key('CMSSW.number_of_jobs'):
164 >            self.theNumberOfJobs =int( cfg_params['CMSSW.number_of_jobs'])
165 >            self.selectNumberOfJobs = 1
166 >        else:
167 >            self.theNumberOfJobs = 0
168 >            self.selectNumberOfJobs = 0
169  
170 <        self.PsetEdit = PsetManipulator.PsetManipulator(self.pset) #Daniele Pset
170 >        if cfg_params.has_key('CMSSW.total_number_of_events'):
171 >            self.total_number_of_events = int(cfg_params['CMSSW.total_number_of_events'])
172 >            self.selectTotalNumberEvents = 1
173 >            if self.selectNumberOfJobs  == 1:
174 >                if (self.total_number_of_events != -1) and int(self.total_number_of_events) < int(self.theNumberOfJobs):
175 >                    msg = 'Must specify at least one event per job. total_number_of_events > number_of_jobs '
176 >                    raise CrabException(msg)
177 >        else:
178 >            self.total_number_of_events = 0
179 >            self.selectTotalNumberEvents = 0
180 >
181 >        if self.pset != None:
182 >             if ( (self.selectTotalNumberEvents + self.selectEventsPerJob + self.selectNumberOfJobs) != 2 ):
183 >                 msg = 'Must define exactly two of total_number_of_events, events_per_job, or number_of_jobs.'
184 >                 raise CrabException(msg)
185 >        else:
186 >             if (self.selectNumberOfJobs == 0):
187 >                 msg = 'Must specify  number_of_jobs.'
188 >                 raise CrabException(msg)
189 >
190 >        ## New method of dealing with seeds
191 >        self.incrementSeeds = []
192 >        self.preserveSeeds = []
193 >        if cfg_params.has_key('CMSSW.preserve_seeds'):
194 >            tmpList = cfg_params['CMSSW.preserve_seeds'].split(',')
195 >            for tmp in tmpList:
196 >                tmp.strip()
197 >                self.preserveSeeds.append(tmp)
198 >        if cfg_params.has_key('CMSSW.increment_seeds'):
199 >            tmpList = cfg_params['CMSSW.increment_seeds'].split(',')
200 >            for tmp in tmpList:
201 >                tmp.strip()
202 >                self.incrementSeeds.append(tmp)
203 >
204 >        ## Old method of dealing with seeds
205 >        ## FUTURE: This is for old CMSSW and old CRAB. Can throw exceptions after a couple of CRAB releases and then
206 >        ## remove
207 >        self.sourceSeed = cfg_params.get('CMSSW.pythia_seed',None)
208 >        if self.sourceSeed:
209 >            print "pythia_seed is a deprecated parameter. Use preserve_seeds or increment_seeds in the future.\n","Added to increment_seeds."
210 >            self.incrementSeeds.append('sourceSeed')
211 >            self.incrementSeeds.append('theSource')
212 >
213 >        self.sourceSeedVtx = cfg_params.get('CMSSW.vtx_seed',None)
214 >        if self.sourceSeedVtx:
215 >            print "vtx_seed is a deprecated parameter. Use preserve_seeds or increment_seeds in the future.\n","Added to increment_seeds."
216 >            self.incrementSeeds.append('VtxSmeared')
217 >
218 >        self.sourceSeedG4 = cfg_params.get('CMSSW.g4_seed',None)
219 >        if self.sourceSeedG4:
220 >            print "g4_seed is a deprecated parameter. Use preserve_seeds or increment_seeds in the future.\n","Added to increment_seeds."
221 >            self.incrementSeeds.append('g4SimHits')
222 >
223 >        self.sourceSeedMix = cfg_params.get('CMSSW.mix_seed',None)
224 >        if self.sourceSeedMix:
225 >            print "mix_seed is a deprecated parameter. Use preserve_seeds or increment_seeds in the future.\n","Added to increment_seeds."
226 >            self.incrementSeeds.append('mix')
227 >
228 >        self.firstRun = cfg_params.get('CMSSW.first_run',None)
229 >
230 >
231 >        # Copy/return
232 >        self.copy_data = int(cfg_params.get('USER.copy_data',0))
233 >        self.return_data = int(cfg_params.get('USER.return_data',0))
234  
235          #DBSDLS-start
236 <        ## Initialize the variables that are extracted from DBS/DLS and needed in other places of the code
236 >        ## Initialize the variables that are extracted from DBS/DLS and needed in other places of the code
237          self.maxEvents=0  # max events available   ( --> check the requested nb. of evts in Creator.py)
238          self.DBSPaths={}  # all dbs paths requested ( --> input to the site local discovery script)
239 +        self.jobDestination=[]  # Site destination(s) for each job (list of lists)
240          ## Perform the data location and discovery (based on DBS/DLS)
241 <        self.DataDiscoveryAndLocation(cfg_params)
242 <        #DBSDLS-end          
243 <
244 <        self.tgzNameWithPath = self.getTarBall(self.executable)
241 >        ## SL: Don't if NONE is specified as input (pythia use case)
242 >        blockSites = {}
243 >        if self.datasetPath:
244 >            blockSites = self.DataDiscoveryAndLocation(cfg_params)
245 >        #DBSDLS-end
246 >
247 >        ## Select Splitting
248 >        if self.selectNoInput:
249 >            if self.pset == None:
250 >                self.jobSplittingForScript()
251 >            else:
252 >                self.jobSplittingNoInput()
253 >        else:
254 >            self.jobSplittingByBlocks(blockSites)
255  
256 <        self.jobSplitting()  #Daniele job Splitting
257 <        self.PsetEdit.maxEvent(self.maxEv) #Daniele  
258 <        self.PsetEdit.inputModule("INPUT") #Daniele  
259 <        self.PsetEdit.psetWriter(self.configFilename())
256 >        # modify Pset only the first time
257 >        if isNew:
258 >            if self.pset != None:
259 >                import PsetManipulator as pp
260 >                PsetEdit = pp.PsetManipulator(self.pset)
261 >                try:
262 >                    # Add FrameworkJobReport to parameter-set, set max events.
263 >                    # Reset later for data jobs by writeCFG which does all modifications
264 >                    PsetEdit.addCrabFJR(self.fjrFileName) # FUTURE: Job report addition not needed by CMSSW>1.5
265 >                    PsetEdit.maxEvent(self.eventsPerJob)
266 >                    PsetEdit.psetWriter(self.configFilename())
267 >                    ## If present, add TFileService to output files
268 >                    if not int(cfg_params.get('CMSSW.skip_TFileService_output',0)):
269 >                        tfsOutput = PsetEdit.getTFileService()
270 >                        if tfsOutput:
271 >                            if tfsOutput in self.output_file:
272 >                                common.logger.debug(5,"Output from TFileService "+tfsOutput+" already in output files")
273 >                            else:
274 >                                outfileflag = True #output found
275 >                                self.output_file.append(tfsOutput)
276 >                                common.logger.message("Adding "+tfsOutput+" to output files (from TFileService)")
277 >                            pass
278 >                        pass
279 >                    ## If present and requested, add PoolOutputModule to output files
280 >                    if int(cfg_params.get('CMSSW.get_edm_output',0)):
281 >                        edmOutput = PsetEdit.getPoolOutputModule()
282 >                        if edmOutput:
283 >                            if edmOutput in self.output_file:
284 >                                common.logger.debug(5,"Output from PoolOutputModule "+edmOutput+" already in output files")
285 >                            else:
286 >                                self.output_file.append(edmOutput)
287 >                                common.logger.message("Adding "+edmOutput+" to output files (from PoolOutputModule)")
288 >                            pass
289 >                        pass
290 >                except CrabException:
291 >                    msg='Error while manipulating ParameterSet: exiting...'
292 >                    raise CrabException(msg)
293 >            ## Prepare inputSandbox TarBall (only the first time)  
294 >            self.tgzNameWithPath = self.getTarBall(self.executable)
295  
296      def DataDiscoveryAndLocation(self, cfg_params):
297  
298 +        import DataDiscovery
299 +        import DataLocation
300          common.logger.debug(10,"CMSSW::DataDiscoveryAndLocation()")
301  
201        #datasetPath = "/"+self.owner+"/"+self.dataTiers[0]+"/"+self.dataset
202        
302          datasetPath=self.datasetPath
303  
205        ## TODO
206        dataTiersList = ""
207        dataTiers = dataTiersList.split(',')
208
304          ## Contact the DBS
305 +        common.logger.message("Contacting Data Discovery Services ...")
306          try:
307 <            self.pubdata=DataDiscovery_EDM.DataDiscovery_EDM(datasetPath, dataTiers, dataTiers)
307 >            self.pubdata=DataDiscovery.DataDiscovery(datasetPath, cfg_params,self.skip_blocks)
308              self.pubdata.fetchDBSInfo()
309  
310 <        except DataDiscovery_EDM.NotExistingDatasetError, ex :
310 >        except DataDiscovery.NotExistingDatasetError, ex :
311              msg = 'ERROR ***: failed Data Discovery in DBS : %s'%ex.getErrorMessage()
312              raise CrabException(msg)
313 <
218 <        except DataDiscovery_EDM.NoDataTierinProvenanceError, ex :
313 >        except DataDiscovery.NoDataTierinProvenanceError, ex :
314              msg = 'ERROR ***: failed Data Discovery in DBS : %s'%ex.getErrorMessage()
315              raise CrabException(msg)
316 <        except DataDiscovery_EDM.DataDiscoveryError, ex:
317 <            msg = 'ERROR ***: failed Data Discovery in DBS  %s'%ex.getErrorMessage()
316 >        except DataDiscovery.DataDiscoveryError, ex:
317 >            msg = 'ERROR ***: failed Data Discovery in DBS :  %s'%ex.getErrorMessage()
318              raise CrabException(msg)
319  
320 <        ## get list of all required data in the form of dbs paths  (dbs path = /dataset/datatier/owner)
321 <        ## self.DBSPaths=self.pubdata.getDBSPaths()
322 <        common.logger.message("Required data are :"+self.datasetPath)
323 <
229 <        filesbyblock=self.pubdata.getFiles()
230 <        self.AllInputFiles=filesbyblock.values()
231 <        self.files = self.AllInputFiles        
232 <
233 <        ## TEMP
234 <    #    self.filesTmp = filesbyblock.values()
235 <    #    self.files = []
236 <    #    locPath='rfio:cmsbose2.bo.infn.it:/flatfiles/SE00/cms/fanfani/ProdTest/'
237 <    #    locPath=''
238 <    #    tmp = []
239 <    #    for file in self.filesTmp[0]:
240 <    #        tmp.append(locPath+file)
241 <    #    self.files.append(tmp)
242 <        ## END TEMP
320 >        self.filesbyblock=self.pubdata.getFiles()
321 >        self.eventsbyblock=self.pubdata.getEventsPerBlock()
322 >        self.eventsbyfile=self.pubdata.getEventsPerFile()
323 >        self.parentFiles=self.pubdata.getParent()
324  
325          ## get max number of events
326 <        #common.logger.debug(10,"number of events for primary fileblocks %i"%self.pubdata.getMaxEvents())
246 <        self.maxEvents=self.pubdata.getMaxEvents() ##  self.maxEvents used in Creator.py
247 <        common.logger.message("\nThe number of available events is %s"%self.maxEvents)
326 >        self.maxEvents=self.pubdata.getMaxEvents()
327  
328          ## Contact the DLS and build a list of sites hosting the fileblocks
329          try:
330 <            dataloc=DataLocation_EDM.DataLocation_EDM(filesbyblock.keys(),cfg_params)
330 >            dataloc=DataLocation.DataLocation(self.filesbyblock.keys(),cfg_params)
331              dataloc.fetchDLSInfo()
332 <        except DataLocation_EDM.DataLocationError , ex:
332 >        except DataLocation.DataLocationError , ex:
333              msg = 'ERROR ***: failed Data Location in DLS \n %s '%ex.getErrorMessage()
334              raise CrabException(msg)
256        
257        allsites=dataloc.getSites()
258        common.logger.debug(5,"sites are %s"%allsites)
259        sites=self.checkBlackList(allsites)
260        common.logger.debug(5,"sites are (after black list) %s"%sites)
261        sites=self.checkWhiteList(sites)
262        common.logger.debug(5,"sites are (after white list) %s"%sites)
335  
336 <        if len(sites)==0:
337 <            msg = 'No sites hosting all the needed data! Exiting... '
336 >
337 >        sites = dataloc.getSites()
338 >        allSites = []
339 >        listSites = sites.values()
340 >        for listSite in listSites:
341 >            for oneSite in listSite:
342 >                allSites.append(oneSite)
343 >        allSites = self.uniquelist(allSites)
344 >
345 >        # screen output
346 >        common.logger.message("Requested dataset: " + datasetPath + " has " + str(self.maxEvents) + " events in " + str(len(self.filesbyblock.keys())) + " blocks.\n")
347 >
348 >        return sites
349 >
350 >    def jobSplittingByBlocks(self, blockSites):
351 >        """
352 >        Perform job splitting. Jobs run over an integer number of files
353 >        and no more than one block.
354 >        ARGUMENT: blockSites: dictionary with blocks as keys and list of host sites as values
355 >        REQUIRES: self.selectTotalNumberEvents, self.selectEventsPerJob, self.selectNumberofJobs,
356 >                  self.total_number_of_events, self.eventsPerJob, self.theNumberOfJobs,
357 >                  self.maxEvents, self.filesbyblock
358 >        SETS: self.jobDestination - Site destination(s) for each job (a list of lists)
359 >              self.total_number_of_jobs - Total # of jobs
360 >              self.list_of_args - File(s) job will run on (a list of lists)
361 >        """
362 >
363 >        # ---- Handle the possible job splitting configurations ---- #
364 >        if (self.selectTotalNumberEvents):
365 >            totalEventsRequested = self.total_number_of_events
366 >        if (self.selectEventsPerJob):
367 >            eventsPerJobRequested = self.eventsPerJob
368 >            if (self.selectNumberOfJobs):
369 >                totalEventsRequested = self.theNumberOfJobs * self.eventsPerJob
370 >
371 >        # If user requested all the events in the dataset
372 >        if (totalEventsRequested == -1):
373 >            eventsRemaining=self.maxEvents
374 >        # If user requested more events than are in the dataset
375 >        elif (totalEventsRequested > self.maxEvents):
376 >            eventsRemaining = self.maxEvents
377 >            common.logger.message("Requested "+str(self.total_number_of_events)+ " events, but only "+str(self.maxEvents)+" events are available.")
378 >        # If user requested less events than are in the dataset
379 >        else:
380 >            eventsRemaining = totalEventsRequested
381 >
382 >        # If user requested more events per job than are in the dataset
383 >        if (self.selectEventsPerJob and eventsPerJobRequested > self.maxEvents):
384 >            eventsPerJobRequested = self.maxEvents
385 >
386 >        # For user info at end
387 >        totalEventCount = 0
388 >
389 >        if (self.selectTotalNumberEvents and self.selectNumberOfJobs):
390 >            eventsPerJobRequested = int(eventsRemaining/self.theNumberOfJobs)
391 >
392 >        if (self.selectNumberOfJobs):
393 >            common.logger.message("May not create the exact number_of_jobs requested.")
394 >
395 >        if ( self.ncjobs == 'all' ) :
396 >            totalNumberOfJobs = 999999999
397 >        else :
398 >            totalNumberOfJobs = self.ncjobs
399 >
400 >        blocks = blockSites.keys()
401 >        blockCount = 0
402 >        # Backup variable in case self.maxEvents counted events in a non-included block
403 >        numBlocksInDataset = len(blocks)
404 >
405 >        jobCount = 0
406 >        list_of_lists = []
407 >
408 >        # list tracking which jobs are in which jobs belong to which block
409 >        jobsOfBlock = {}
410 >
411 >        # ---- Iterate over the blocks in the dataset until ---- #
412 >        # ---- we've met the requested total # of events    ---- #
413 >        while ( (eventsRemaining > 0) and (blockCount < numBlocksInDataset) and (jobCount < totalNumberOfJobs)):
414 >            block = blocks[blockCount]
415 >            blockCount += 1
416 >            if block not in jobsOfBlock.keys() :
417 >                jobsOfBlock[block] = []
418 >
419 >            if self.eventsbyblock.has_key(block) :
420 >                numEventsInBlock = self.eventsbyblock[block]
421 >                common.logger.debug(5,'Events in Block File '+str(numEventsInBlock))
422 >
423 >                files = self.filesbyblock[block]
424 >                numFilesInBlock = len(files)
425 >                if (numFilesInBlock <= 0):
426 >                    continue
427 >                fileCount = 0
428 >
429 >                # ---- New block => New job ---- #
430 >                parString = ""
431 >                # counter for number of events in files currently worked on
432 >                filesEventCount = 0
433 >                # flag if next while loop should touch new file
434 >                newFile = 1
435 >                # job event counter
436 >                jobSkipEventCount = 0
437 >
438 >                # ---- Iterate over the files in the block until we've met the requested ---- #
439 >                # ---- total # of events or we've gone over all the files in this block  ---- #
440 >                pString=''
441 >                while ( (eventsRemaining > 0) and (fileCount < numFilesInBlock) and (jobCount < totalNumberOfJobs) ):
442 >                    file = files[fileCount]
443 >                    if self.useParent:
444 >                        parent = self.parentFiles[file]
445 >                        for f in parent :
446 >                            pString += '\\\"' + f + '\\\"\,'
447 >                        common.logger.debug(6, "File "+str(file)+" has the following parents: "+str(parent))
448 >                        common.logger.write("File "+str(file)+" has the following parents: "+str(parent))
449 >                    if newFile :
450 >                        try:
451 >                            numEventsInFile = self.eventsbyfile[file]
452 >                            common.logger.debug(6, "File "+str(file)+" has "+str(numEventsInFile)+" events")
453 >                            # increase filesEventCount
454 >                            filesEventCount += numEventsInFile
455 >                            # Add file to current job
456 >                            parString += '\\\"' + file + '\\\"\,'
457 >                            newFile = 0
458 >                        except KeyError:
459 >                            common.logger.message("File "+str(file)+" has unknown number of events: skipping")
460 >
461 >                    eventsPerJobRequested = min(eventsPerJobRequested, eventsRemaining)
462 >                    # if less events in file remain than eventsPerJobRequested
463 >                    if ( filesEventCount - jobSkipEventCount < eventsPerJobRequested):
464 >                        # if last file in block
465 >                        if ( fileCount == numFilesInBlock-1 ) :
466 >                            # end job using last file, use remaining events in block
467 >                            # close job and touch new file
468 >                            fullString = parString[:-2]
469 >                            if self.useParent:
470 >                                fullParentString = pString[:-2]
471 >                                list_of_lists.append([fullString,fullParentString,str(-1),str(jobSkipEventCount)])
472 >                            else:
473 >                                list_of_lists.append([fullString,str(-1),str(jobSkipEventCount)])
474 >                            common.logger.debug(3,"Job "+str(jobCount+1)+" can run over "+str(filesEventCount - jobSkipEventCount)+" events (last file in block).")
475 >                            self.jobDestination.append(blockSites[block])
476 >                            common.logger.debug(5,"Job "+str(jobCount+1)+" Destination: "+str(self.jobDestination[jobCount]))
477 >                            # fill jobs of block dictionary
478 >                            jobsOfBlock[block].append(jobCount+1)
479 >                            # reset counter
480 >                            jobCount = jobCount + 1
481 >                            totalEventCount = totalEventCount + filesEventCount - jobSkipEventCount
482 >                            eventsRemaining = eventsRemaining - filesEventCount + jobSkipEventCount
483 >                            jobSkipEventCount = 0
484 >                            # reset file
485 >                            pString = ""
486 >                            parString = ""
487 >                            filesEventCount = 0
488 >                            newFile = 1
489 >                            fileCount += 1
490 >                        else :
491 >                            # go to next file
492 >                            newFile = 1
493 >                            fileCount += 1
494 >                    # if events in file equal to eventsPerJobRequested
495 >                    elif ( filesEventCount - jobSkipEventCount == eventsPerJobRequested ) :
496 >                        # close job and touch new file
497 >                        fullString = parString[:-2]
498 >                        if self.useParent:
499 >                            fullParentString = pString[:-2]
500 >                            list_of_lists.append([fullString,fullParentString,str(eventsPerJobRequested),str(jobSkipEventCount)])
501 >                        else:
502 >                            list_of_lists.append([fullString,str(eventsPerJobRequested),str(jobSkipEventCount)])
503 >                        common.logger.debug(3,"Job "+str(jobCount+1)+" can run over "+str(eventsPerJobRequested)+" events.")
504 >                        self.jobDestination.append(blockSites[block])
505 >                        common.logger.debug(5,"Job "+str(jobCount+1)+" Destination: "+str(self.jobDestination[jobCount]))
506 >                        jobsOfBlock[block].append(jobCount+1)
507 >                        # reset counter
508 >                        jobCount = jobCount + 1
509 >                        totalEventCount = totalEventCount + eventsPerJobRequested
510 >                        eventsRemaining = eventsRemaining - eventsPerJobRequested
511 >                        jobSkipEventCount = 0
512 >                        # reset file
513 >                        pString = ""
514 >                        parString = ""
515 >                        filesEventCount = 0
516 >                        newFile = 1
517 >                        fileCount += 1
518 >
519 >                    # if more events in file remain than eventsPerJobRequested
520 >                    else :
521 >                        # close job but don't touch new file
522 >                        fullString = parString[:-2]
523 >                        if self.useParent:
524 >                            fullParentString = pString[:-2]
525 >                            list_of_lists.append([fullString,fullParentString,str(eventsPerJobRequested),str(jobSkipEventCount)])
526 >                        else:
527 >                            list_of_lists.append([fullString,str(eventsPerJobRequested),str(jobSkipEventCount)])
528 >                        common.logger.debug(3,"Job "+str(jobCount+1)+" can run over "+str(eventsPerJobRequested)+" events.")
529 >                        self.jobDestination.append(blockSites[block])
530 >                        common.logger.debug(5,"Job "+str(jobCount+1)+" Destination: "+str(self.jobDestination[jobCount]))
531 >                        jobsOfBlock[block].append(jobCount+1)
532 >                        # increase counter
533 >                        jobCount = jobCount + 1
534 >                        totalEventCount = totalEventCount + eventsPerJobRequested
535 >                        eventsRemaining = eventsRemaining - eventsPerJobRequested
536 >                        # calculate skip events for last file
537 >                        # use filesEventCount (contains several files), jobSkipEventCount and eventsPerJobRequest
538 >                        jobSkipEventCount = eventsPerJobRequested - (filesEventCount - jobSkipEventCount - self.eventsbyfile[file])
539 >                        # remove all but the last file
540 >                        filesEventCount = self.eventsbyfile[file]
541 >                        if self.useParent:
542 >                            for f in parent : pString += '\\\"' + f + '\\\"\,'
543 >                        parString = '\\\"' + file + '\\\"\,'
544 >                    pass # END if
545 >                pass # END while (iterate over files in the block)
546 >        pass # END while (iterate over blocks in the dataset)
547 >        self.ncjobs = self.total_number_of_jobs = jobCount
548 >        if (eventsRemaining > 0 and jobCount < totalNumberOfJobs ):
549 >            common.logger.message("Could not run on all requested events because some blocks not hosted at allowed sites.")
550 >        common.logger.message(str(jobCount)+" job(s) can run on "+str(totalEventCount)+" events.\n")
551 >
552 >        # screen output
553 >        screenOutput = "List of jobs and available destination sites:\n\n"
554 >
555 >        # keep trace of block with no sites to print a warning at the end
556 >        noSiteBlock = []
557 >        bloskNoSite = []
558 >
559 >        blockCounter = 0
560 >        for block in blocks:
561 >            if block in jobsOfBlock.keys() :
562 >                blockCounter += 1
563 >                screenOutput += "Block %5i: jobs %20s: sites: %s\n" % (blockCounter,spanRanges(jobsOfBlock[block]),
564 >                    ','.join(self.blackWhiteListParser.checkWhiteList(self.blackWhiteListParser.checkBlackList(blockSites[block],block),block)))
565 >                if len(self.blackWhiteListParser.checkWhiteList(self.blackWhiteListParser.checkBlackList(blockSites[block],block),block)) == 0:
566 >                    noSiteBlock.append( spanRanges(jobsOfBlock[block]) )
567 >                    bloskNoSite.append( blockCounter )
568 >
569 >        common.logger.message(screenOutput)
570 >        if len(noSiteBlock) > 0 and len(bloskNoSite) > 0:
571 >            msg = 'WARNING: No sites are hosting any part of data for block:\n                '
572 >            virgola = ""
573 >            if len(bloskNoSite) > 1:
574 >                virgola = ","
575 >            for block in bloskNoSite:
576 >                msg += ' ' + str(block) + virgola
577 >            msg += '\n               Related jobs:\n                 '
578 >            virgola = ""
579 >            if len(noSiteBlock) > 1:
580 >                virgola = ","
581 >            for range_jobs in noSiteBlock:
582 >                msg += str(range_jobs) + virgola
583 >            msg += '\n               will not be submitted and this block of data can not be analyzed!\n'
584 >            if self.cfg_params.has_key('EDG.se_white_list'):
585 >                msg += 'WARNING: SE White List: '+self.cfg_params['EDG.se_white_list']+'\n'
586 >                msg += '(Hint: By whitelisting you force the job to run at this particular site(s).\n'
587 >                msg += 'Please check if the dataset is available at this site!)\n'
588 >            if self.cfg_params.has_key('EDG.ce_white_list'):
589 >                msg += 'WARNING: CE White List: '+self.cfg_params['EDG.ce_white_list']+'\n'
590 >                msg += '(Hint: By whitelisting you force the job to run at this particular site(s).\n'
591 >                msg += 'Please check if the dataset is available at this site!)\n'
592 >
593 >            common.logger.message(msg)
594 >
595 >        self.list_of_args = list_of_lists
596 >        return
597 >
598 >    def jobSplittingNoInput(self):
599 >        """
600 >        Perform job splitting based on number of event per job
601 >        """
602 >        common.logger.debug(5,'Splitting per events')
603 >
604 >        if (self.selectEventsPerJob):
605 >            common.logger.message('Required '+str(self.eventsPerJob)+' events per job ')
606 >        if (self.selectNumberOfJobs):
607 >            common.logger.message('Required '+str(self.theNumberOfJobs)+' jobs in total ')
608 >        if (self.selectTotalNumberEvents):
609 >            common.logger.message('Required '+str(self.total_number_of_events)+' events in total ')
610 >
611 >        if (self.total_number_of_events < 0):
612 >            msg='Cannot split jobs per Events with "-1" as total number of events'
613              raise CrabException(msg)
614  
615 <        common.logger.message("List of Sites hosting the data : "+str(sites))
616 <        common.logger.debug(6, "List of Sites: "+str(sites))
617 <        common.analisys_common_info['sites']=sites    ## used in SchedulerEdg.py in createSchScript
615 >        if (self.selectEventsPerJob):
616 >            if (self.selectTotalNumberEvents):
617 >                self.total_number_of_jobs = int(self.total_number_of_events/self.eventsPerJob)
618 >            elif(self.selectNumberOfJobs) :
619 >                self.total_number_of_jobs =self.theNumberOfJobs
620 >                self.total_number_of_events =int(self.theNumberOfJobs*self.eventsPerJob)
621 >
622 >        elif (self.selectNumberOfJobs) :
623 >            self.total_number_of_jobs = self.theNumberOfJobs
624 >            self.eventsPerJob = int(self.total_number_of_events/self.total_number_of_jobs)
625 >
626 >        common.logger.debug(5,'N jobs  '+str(self.total_number_of_jobs))
627 >
628 >        # is there any remainder?
629 >        check = int(self.total_number_of_events) - (int(self.total_number_of_jobs)*self.eventsPerJob)
630 >
631 >        common.logger.debug(5,'Check  '+str(check))
632 >
633 >        common.logger.message(str(self.total_number_of_jobs)+' jobs can be created, each for '+str(self.eventsPerJob)+' for a total of '+str(self.total_number_of_jobs*self.eventsPerJob)+' events')
634 >        if check > 0:
635 >            common.logger.message('Warning: asked '+str(self.total_number_of_events)+' but can do only '+str(int(self.total_number_of_jobs)*self.eventsPerJob))
636 >
637 >        # argument is seed number.$i
638 >        self.list_of_args = []
639 >        for i in range(self.total_number_of_jobs):
640 >            ## Since there is no input, any site is good
641 >            self.jobDestination.append([""]) #must be empty to write correctly the xml
642 >            args=[]
643 >            if (self.firstRun):
644 >                ## pythia first run
645 >                args.append(str(self.firstRun)+str(i))
646 >            self.list_of_args.append(args)
647 >
648          return
649 <    
650 <    def jobSplitting(self):
649 >
650 >
651 >    def jobSplittingForScript(self):
652          """
653 <        first implemntation for job splitting  
654 <        """    
655 <      #  print 'eventi totali '+str(self.maxEvents)
656 <      #  print 'eventi totali richiesti dallo user '+str(self.total_number_of_events)
279 <        #print 'files per job '+str(self.filesPerJob)
280 <        common.logger.message('Required '+str(self.filesPerJob)+' files per job ')
281 <        common.logger.message('Required '+str(self.total_number_of_events)+' events in total ')
282 <
283 <        ## TODO: SL need to have (from DBS) a detailed list of how many events per each file
284 <        n_tot_files = (len(self.files[0]))
285 <        ## SL: this is wrong if the files have different number of events
286 <        evPerFile = int(self.maxEvents)/n_tot_files
287 <        
288 <        common.logger.debug(5,'Events per File '+str(evPerFile))
653 >        Perform job splitting based on number of job
654 >        """
655 >        common.logger.debug(5,'Splitting per job')
656 >        common.logger.message('Required '+str(self.theNumberOfJobs)+' jobs in total ')
657  
658 <        ## if asked to process all events, do it
291 <        if self.total_number_of_events == -1:
292 <            self.total_number_of_events=self.maxEvents
293 <            self.total_number_of_jobs = int(n_tot_files)*1/int(self.filesPerJob)
294 <            common.logger.message(str(self.total_number_of_jobs)+' jobs will be created for all available events '+str(self.total_number_of_events)+' events')
295 <        
296 <        else:
297 <            self.total_number_of_files = int(self.total_number_of_events/evPerFile)
298 <            ## SL: if ask for less event than what is computed to be available on a
299 <            ##     file, process the first file anyhow.
300 <            if self.total_number_of_files == 0:
301 <                self.total_number_of_files = self.total_number_of_files + 1
302 <
303 <            common.logger.debug(5,'N files  '+str(self.total_number_of_files))
304 <
305 <            check = 0
306 <            
307 <            ## Compute the number of jobs
308 <            #self.total_number_of_jobs = int(n_tot_files)*1/int(self.filesPerJob)
309 <            self.total_number_of_jobs = int(self.total_number_of_files/self.filesPerJob)
310 <            common.logger.debug(5,'N jobs  '+str(self.total_number_of_jobs))
311 <
312 <            ## is there any remainder?
313 <            check = int(self.total_number_of_files) - (int(self.total_number_of_jobs)*self.filesPerJob)
314 <
315 <            common.logger.debug(5,'Check  '+str(check))
316 <
317 <            if check > 0:
318 <                self.total_number_of_jobs =  self.total_number_of_jobs + 1
319 <                common.logger.message('Warning: last job will be created with '+str(check)+' files')
658 >        self.total_number_of_jobs = self.theNumberOfJobs
659  
660 <            common.logger.message(str(self.total_number_of_jobs)+' jobs will be created for a total of '+str((self.total_number_of_jobs-1)*self.filesPerJob*evPerFile + check*evPerFile)+' events')
322 <            pass
660 >        common.logger.debug(5,'N jobs  '+str(self.total_number_of_jobs))
661  
662 <        list_of_lists = []
325 <        for i in xrange(0, int(n_tot_files), self.filesPerJob):
326 <            list_of_lists.append(self.files[0][i: i+self.filesPerJob])
662 >        common.logger.message(str(self.total_number_of_jobs)+' jobs can be created')
663  
664 <        self.list_of_files = list_of_lists
665 <      
664 >        # argument is seed number.$i
665 >        self.list_of_args = []
666 >        for i in range(self.total_number_of_jobs):
667 >            self.jobDestination.append([""])
668 >            self.list_of_args.append([str(i)])
669          return
670  
671 <    def split(self, jobParams):
672 <
334 <        common.jobDB.load()
335 <        #### Fabio
671 >    def split(self, jobParams,firstJobID):
672 >
673          njobs = self.total_number_of_jobs
674 <        filelist = self.list_of_files
674 >        arglist = self.list_of_args
675          # create the empty structure
676          for i in range(njobs):
677              jobParams.append("")
341        
342        for job in range(njobs):
343            jobParams[job] = filelist[job]
344            common.jobDB.setArguments(job, jobParams[job])
678  
679 <        common.jobDB.save()
679 >        listID=[]
680 >        listField=[]
681 >        for id in range(njobs):
682 >            job = id + int(firstJobID)
683 >            jobParams[id] = arglist[id]
684 >            listID.append(job+1)
685 >            job_ToSave ={}
686 >            concString = ' '
687 >            argu=''
688 >            if len(jobParams[id]):
689 >                argu +=   concString.join(jobParams[id] )
690 >            job_ToSave['arguments']= str(job+1)+' '+argu
691 >            job_ToSave['dlsDestination']= self.jobDestination[id]
692 >            listField.append(job_ToSave)
693 >            msg="Job "+str(job)+" Arguments:   "+str(job+1)+" "+argu+"\n"  \
694 >            +"                     Destination: "+str(self.jobDestination[id])
695 >            common.logger.debug(5,msg)
696 >        common._db.updateJob_(listID,listField)
697 >        self.argsList = (len(jobParams[0])+1)
698 >
699          return
348    
349    def getJobTypeArguments(self, nj, sched):
350        params = common.jobDB.arguments(nj)
351        #print params
352        parString = "\\{"
353        
354        for i in range(len(params) - 1):
355            parString += '\\\"' + params[i] + '\\\"\,'
356        
357        parString += '\\\"' + params[len(params) - 1] + '\\\"\\}'
358        return parString
359  
360    def numberOfJobs(self):
361        # Fabio
700  
701 +    def numberOfJobs(self):
702          return self.total_number_of_jobs
364
365
366
367    def checkBlackList(self, allSites):
368        if len(self.reCEBlackList)==0: return allSites
369        sites = []
370        for site in allSites:
371            common.logger.debug(10,'Site '+site)
372            good=1
373            for re in self.reCEBlackList:
374                if re.search(site):
375                    common.logger.message('CE in black list, skipping site '+site)
376                    good=0
377                pass
378            if good: sites.append(site)
379        if len(sites) == 0:
380            common.logger.debug(3,"No sites found after BlackList")
381        return sites
382
383    def checkWhiteList(self, allSites):
384
385        if len(self.reCEWhiteList)==0: return allSites
386        sites = []
387        for site in allSites:
388            good=0
389            for re in self.reCEWhiteList:
390                if re.search(site):
391                    common.logger.debug(5,'CE in white list, adding site '+site)
392                    good=1
393                if not good: continue
394                sites.append(site)
395        if len(sites) == 0:
396            common.logger.message("No sites found after WhiteList\n")
397        else:
398            common.logger.debug(5,"Selected sites via WhiteList are "+str(sites)+"\n")
399        return sites
703  
704      def getTarBall(self, exe):
705          """
706          Return the TarBall with lib and exe
707          """
708 <        
406 <        # if it exist, just return it
407 <        self.tgzNameWithPath = common.work_space.shareDir()+self.tgz_name
708 >        self.tgzNameWithPath = common.work_space.pathForTgz()+'share/'+self.tgz_name
709          if os.path.exists(self.tgzNameWithPath):
710              return self.tgzNameWithPath
711  
# Line 417 | Line 718 | class Cmssw(JobType):
718  
719          # First of all declare the user Scram area
720          swArea = self.scram.getSWArea_()
420        #print "swArea = ", swArea
421        swVersion = self.scram.getSWVersion()
422        #print "swVersion = ", swVersion
721          swReleaseTop = self.scram.getReleaseTop_()
722 <        #print "swReleaseTop = ", swReleaseTop
425 <        
722 >
723          ## check if working area is release top
724          if swReleaseTop == '' or swArea == swReleaseTop:
725 +            common.logger.debug(3,"swArea = "+swArea+" swReleaseTop ="+swReleaseTop)
726              return
727  
728 <        filesToBeTarred = []
729 <        ## First find the executable
730 <        if (self.executable != ''):
731 <            exeWithPath = self.scram.findFile_(executable)
732 < #           print exeWithPath
733 <            if ( not exeWithPath ):
734 <                raise CrabException('User executable '+executable+' not found')
735 <
736 <            ## then check if it's private or not
737 <            if exeWithPath.find(swReleaseTop) == -1:
738 <                # the exe is private, so we must ship
739 <                common.logger.debug(5,"Exe "+exeWithPath+" to be tarred")
740 <                path = swArea+'/'
741 <                exe = string.replace(exeWithPath, path,'')
742 <                filesToBeTarred.append(exe)
743 <                pass
744 <            else:
745 <                # the exe is from release, we'll find it on WN
728 >        import tarfile
729 >        try: # create tar ball
730 >            tar = tarfile.open(self.tgzNameWithPath, "w:gz")
731 >            ## First find the executable
732 >            if (self.executable != ''):
733 >                exeWithPath = self.scram.findFile_(executable)
734 >                if ( not exeWithPath ):
735 >                    raise CrabException('User executable '+executable+' not found')
736 >
737 >                ## then check if it's private or not
738 >                if exeWithPath.find(swReleaseTop) == -1:
739 >                    # the exe is private, so we must ship
740 >                    common.logger.debug(5,"Exe "+exeWithPath+" to be tarred")
741 >                    path = swArea+'/'
742 >                    # distinguish case when script is in user project area or given by full path somewhere else
743 >                    if exeWithPath.find(path) >= 0 :
744 >                        exe = string.replace(exeWithPath, path,'')
745 >                        tar.add(path+exe,exe)
746 >                    else :
747 >                        tar.add(exeWithPath,os.path.basename(executable))
748 >                    pass
749 >                else:
750 >                    # the exe is from release, we'll find it on WN
751 >                    pass
752 >
753 >            ## Now get the libraries: only those in local working area
754 >            libDir = 'lib'
755 >            lib = swArea+'/' +libDir
756 >            common.logger.debug(5,"lib "+lib+" to be tarred")
757 >            if os.path.exists(lib):
758 >                tar.add(lib,libDir)
759 >
760 >            ## Now check if module dir is present
761 >            moduleDir = 'module'
762 >            module = swArea + '/' + moduleDir
763 >            if os.path.isdir(module):
764 >                tar.add(module,moduleDir)
765 >
766 >            ## Now check if any data dir(s) is present
767 >            self.dataExist = False
768 >            todo_list = [(i, i) for i in  os.listdir(swArea+"/src")]
769 >            while len(todo_list):
770 >                entry, name = todo_list.pop()
771 >                if name.startswith('crab_0_') or  name.startswith('.') or name == 'CVS':
772 >                    continue
773 >                if os.path.isdir(swArea+"/src/"+entry):
774 >                    entryPath = entry + '/'
775 >                    todo_list += [(entryPath + i, i) for i in  os.listdir(swArea+"/src/"+entry)]
776 >                    if name == 'data':
777 >                        self.dataExist=True
778 >                        common.logger.debug(5,"data "+entry+" to be tarred")
779 >                        tar.add(swArea+"/src/"+entry,"src/"+entry)
780 >                    pass
781                  pass
782 <
783 <        ## Now get the libraries: only those in local working area
784 <        libDir = 'lib'
785 <        lib = swArea+'/' +libDir
786 <        common.logger.debug(5,"lib "+lib+" to be tarred")
787 <        if os.path.exists(lib):
788 <            filesToBeTarred.append(libDir)
789 <
790 <        ## Now check if module dir is present
791 <        moduleDir = 'module'
792 <        if os.path.isdir(swArea+'/'+moduleDir):
793 <            filesToBeTarred.append(moduleDir)
794 <
795 <        ## Now check if the Data dir is present
796 <        dataDir = 'src/Data/'
797 <        if os.path.isdir(swArea+'/'+dataDir):
798 <            filesToBeTarred.append(dataDir)
799 <
800 <        ## Create the tar-ball
801 <        if len(filesToBeTarred)>0:
802 <            cwd = os.getcwd()
803 <            os.chdir(swArea)
804 <            tarcmd = 'tar zcvf ' + self.tgzNameWithPath + ' '
805 <            for line in filesToBeTarred:
806 <                tarcmd = tarcmd + line + ' '
807 <            cout = runCommand(tarcmd)
808 <            if not cout:
809 <                raise CrabException('Could not create tar-ball')
810 <            os.chdir(cwd)
811 <        else:
812 <            common.logger.debug(5,"No files to be to be tarred")
813 <        
814 <        return
815 <        
816 <    def wsSetupEnvironment(self, nj):
782 >
783 >            ### CMSSW ParameterSet
784 >            if not self.pset is None:
785 >                cfg_file = common.work_space.jobDir()+self.configFilename()
786 >                tar.add(cfg_file,self.configFilename())
787 >                common.logger.debug(5,"File added to "+self.tgzNameWithPath+" : "+str(tar.getnames()))
788 >
789 >
790 >            ## Add ProdCommon dir to tar
791 >            prodcommonDir = './'
792 >            prodcommonPath = os.environ['CRABDIR'] + '/' + 'external/'
793 >            neededStuff = ['ProdCommon/__init__.py','ProdCommon/FwkJobRep', 'ProdCommon/CMSConfigTools','ProdCommon/Core','ProdCommon/MCPayloads', 'IMProv']
794 >            for file in neededStuff:
795 >                tar.add(prodcommonPath+file,prodcommonDir+file)
796 >            common.logger.debug(5,"Files added to "+self.tgzNameWithPath+" : "+str(tar.getnames()))
797 >
798 >            ##### ML stuff
799 >            ML_file_list=['report.py', 'DashboardAPI.py', 'Logger.py', 'ProcInfo.py', 'apmon.py']
800 >            path=os.environ['CRABDIR'] + '/python/'
801 >            for file in ML_file_list:
802 >                tar.add(path+file,file)
803 >            common.logger.debug(5,"Files added to "+self.tgzNameWithPath+" : "+str(tar.getnames()))
804 >
805 >            ##### Utils
806 >            Utils_file_list=['parseCrabFjr.py','writeCfg.py', 'fillCrabFjr.py']
807 >            for file in Utils_file_list:
808 >                tar.add(path+file,file)
809 >            common.logger.debug(5,"Files added to "+self.tgzNameWithPath+" : "+str(tar.getnames()))
810 >
811 >            ##### AdditionalFiles
812 >            for file in self.additional_inbox_files:
813 >                tar.add(file,string.split(file,'/')[-1])
814 >            common.logger.debug(5,"Files added to "+self.tgzNameWithPath+" : "+str(tar.getnames()))
815 >
816 >            tar.close()
817 >        except IOError:
818 >            raise CrabException('Could not create tar-ball '+self.tgzNameWithPath)
819 >        except tarfile.TarError:
820 >            raise CrabException('Could not create tar-ball '+self.tgzNameWithPath)
821 >
822 >        ## check for tarball size
823 >        tarballinfo = os.stat(self.tgzNameWithPath)
824 >        if ( tarballinfo.st_size > self.MaxTarBallSize*1024*1024 ) :
825 >            raise CrabException('Input sandbox size of ' + str(float(tarballinfo.st_size)/1024.0/1024.0) + ' MB is larger than the allowed ' + str(self.MaxTarBallSize) + ' MB input sandbox limit and not supported by the used GRID submission system. Please make sure that no unnecessary files are in all data directories in your local CMSSW project area as they are automatically packed into the input sandbox.')
826 >
827 >        ## create tar-ball with ML stuff
828 >
829 >    def wsSetupEnvironment(self, nj=0):
830          """
831          Returns part of a job script which prepares
832          the execution environment for the job 'nj'.
833          """
834 +        if (self.CMSSW_major >= 2 and self.CMSSW_minor >= 1) or (self.CMSSW_major >= 3):
835 +            psetName = 'pset.py'
836 +        else:
837 +            psetName = 'pset.cfg'
838          # Prepare JobType-independent part
839 <        txt = ''
840 <  
841 <        ## OLI_Daniele at this level  middleware already known
492 <
493 <        txt += 'if [ $middleware == LCG ]; then \n'
839 >        txt = '\n#Written by cms_cmssw::wsSetupEnvironment\n'
840 >        txt += 'echo ">>> setup environment"\n'
841 >        txt += 'if [ $middleware == LCG ]; then \n'
842          txt += self.wsSetupCMSLCGEnvironment_()
843          txt += 'elif [ $middleware == OSG ]; then\n'
844 <        txt += '    time=`date -u +"%s"`\n'
845 <        txt += '    WORKING_DIR=$OSG_WN_TMP/cms_$time\n'
846 <        txt += '    echo "Creating working directory: $WORKING_DIR"\n'
847 <        txt += '    /bin/mkdir -p $WORKING_DIR\n'
848 <        txt += '    if [ ! -d $WORKING_DIR ] ;then\n'
501 <        txt += '        echo "OSG WORKING DIR ==> $WORKING_DIR could not be created on on WN `hostname`"\n'
502 <    
503 <        txt += '        echo "JOB_EXIT_STATUS = 1"\n'
504 <        txt += '        exit 1\n'
844 >        txt += '    WORKING_DIR=`/bin/mktemp  -d $OSG_WN_TMP/cms_XXXXXXXXXXXX`\n'
845 >        txt += '    if [ ! $? == 0 ] ;then\n'
846 >        txt += '        echo "ERROR ==> OSG $WORKING_DIR could not be created on WN `hostname`"\n'
847 >        txt += '        job_exit_code=10016\n'
848 >        txt += '        func_exit\n'
849          txt += '    fi\n'
850 +        txt += '    echo ">>> Created working directory: $WORKING_DIR"\n'
851          txt += '\n'
852          txt += '    echo "Change to working directory: $WORKING_DIR"\n'
853          txt += '    cd $WORKING_DIR\n'
854 <        txt += self.wsSetupCMSOSGEnvironment_()
854 >        txt += '    echo ">>> current directory (WORKING_DIR): $WORKING_DIR"\n'
855 >        txt += self.wsSetupCMSOSGEnvironment_()
856          txt += 'fi\n'
857  
858          # Prepare JobType-specific part
859          scram = self.scram.commandName()
860          txt += '\n\n'
861 <        txt += 'echo "### SPECIFIC JOB SETUP ENVIRONMENT ###"\n'
861 >        txt += 'echo ">>> specific cmssw setup environment:"\n'
862 >        txt += 'echo "CMSSW_VERSION =  '+self.version+'"\n'
863          txt += scram+' project CMSSW '+self.version+'\n'
864          txt += 'status=$?\n'
865          txt += 'if [ $status != 0 ] ; then\n'
866 <        txt += '   echo "SET_EXE_ENV 1 ==>ERROR CMSSW '+self.version+' not found on `hostname`" \n'
867 <        txt += '   echo "JOB_EXIT_STATUS = 10034"\n'
868 <        txt += '   echo "SanityCheckCode = 10034" | tee -a $RUNTIME_AREA/$repo\n'
522 <        txt += '   dumpStatus $RUNTIME_AREA/$repo\n'
523 <        ## OLI_Daniele
524 <        txt += '    if [ $middleware == OSG ]; then \n'
525 <        txt += '        echo "Remove working directory: $WORKING_DIR"\n'
526 <        txt += '        cd $RUNTIME_AREA\n'
527 <        txt += '        /bin/rm -rf $WORKING_DIR\n'
528 <        txt += '        if [ -d $WORKING_DIR ] ;then\n'
529 <        txt += '            echo "OSG WORKING DIR ==> $WORKING_DIR could not be deleted on on WN `hostname`"\n'
530 <        txt += '        fi\n'
531 <        txt += '    fi \n'
532 <        txt += '   exit 1 \n'
866 >        txt += '    echo "ERROR ==> CMSSW '+self.version+' not found on `hostname`" \n'
867 >        txt += '    job_exit_code=10034\n'
868 >        txt += '    func_exit\n'
869          txt += 'fi \n'
534        txt += 'echo "CMSSW_VERSION =  '+self.version+'"\n'
870          txt += 'cd '+self.version+'\n'
871 <        ### needed grep for bug in scramv1 ###
871 >        txt += 'SOFTWARE_DIR=`pwd`\n'
872 >        txt += 'echo ">>> current directory (SOFTWARE_DIR): $SOFTWARE_DIR" \n'
873          txt += 'eval `'+scram+' runtime -sh | grep -v SCRAMRT_LSB_JOBNAME`\n'
874 <
874 >        txt += 'if [ $? != 0 ] ; then\n'
875 >        txt += '    echo "ERROR ==> Problem with the command: "\n'
876 >        txt += '    echo "eval \`'+scram+' runtime -sh | grep -v SCRAMRT_LSB_JOBNAME \` at `hostname`"\n'
877 >        txt += '    job_exit_code=10034\n'
878 >        txt += '    func_exit\n'
879 >        txt += 'fi \n'
880          # Handle the arguments:
881          txt += "\n"
882 <        txt += "## ARGUMNETS: $1 Job Number\n"
542 <        # txt += "## ARGUMNETS: $2 First Event for this job\n"
543 <        # txt += "## ARGUMNETS: $3 Max Event for this job\n"
882 >        txt += "## number of arguments (first argument always jobnumber)\n"
883          txt += "\n"
884 <        txt += "narg=$#\n"
546 <        txt += "if [ $narg -lt 2 ]\n"
884 >        txt += "if [ $nargs -lt "+str(self.argsList)+" ]\n"
885          txt += "then\n"
886 <        txt += "    echo 'SET_EXE_ENV 1 ==> ERROR Too few arguments' +$narg+ \n"
887 <        txt += '    echo "JOB_EXIT_STATUS = 50113"\n'
888 <        txt += '    echo "SanityCheckCode = 50113" | tee -a $RUNTIME_AREA/$repo\n'
551 <        txt += '    dumpStatus $RUNTIME_AREA/$repo\n'
552 <        ## OLI_Daniele
553 <        txt += '    if [ $middleware == OSG ]; then \n'
554 <        txt += '        echo "Remove working directory: $WORKING_DIR"\n'
555 <        txt += '        cd $RUNTIME_AREA\n'
556 <        txt += '        /bin/rm -rf $WORKING_DIR\n'
557 <        txt += '        if [ -d $WORKING_DIR ] ;then\n'
558 <        txt += '            echo "OSG WORKING DIR ==> $WORKING_DIR could not be deleted on on WN `hostname`"\n'
559 <        txt += '        fi\n'
560 <        txt += '    fi \n'
561 <        txt += "    exit 1\n"
886 >        txt += "    echo 'ERROR ==> Too few arguments' +$nargs+ \n"
887 >        txt += '    job_exit_code=50113\n'
888 >        txt += "    func_exit\n"
889          txt += "fi\n"
890          txt += "\n"
564        txt += "NJob=$1\n"
565        txt += "InputFiles=$2\n"
566        txt += "echo \"<$InputFiles>\"\n"
567        # txt += "Args = ` cat $2 |  sed -e \'s/\\\\//g\' -e \'s/\"/\\x27/g\' `"
568
569        ### OLI_DANIELE
570        txt += 'if [ $middleware == LCG ]; then \n'
571        txt += '    echo "MonitorJobID=`echo ${NJob}_$EDG_WL_JOBID`" | tee -a $RUNTIME_AREA/$repo\n'
572        txt += '    echo "SyncGridJobId=`echo $EDG_WL_JOBID`" | tee -a $RUNTIME_AREA/$repo\n'
573        txt += '    echo "SyncCE=`edg-brokerinfo getCE`" | tee -a $RUNTIME_AREA/$repo\n'
574        txt += 'elif [ $middleware == OSG ]; then\n'
575
576        # OLI: added monitoring for dashbord, use hash of crab.cfg
577        if common.scheduler.boss_scheduler_name == 'condor_g':
578            # create hash of cfg file
579            hash = makeCksum(common.work_space.cfgFileName())
580            txt += '    echo "MonitorJobID=`echo ${NJob}_'+hash+'_$GLOBUS_GRAM_JOB_CONTACT`" | tee -a $RUNTIME_AREA/$repo\n'
581            txt += '    echo "SyncGridJobId=`echo $GLOBUS_GRAM_JOB_CONTACT`" | tee -a $RUNTIME_AREA/$repo\n'
582            txt += '    echo "SyncCE=`echo $hostname`" | tee -a $RUNTIME_AREA/$repo\n'
583        else :
584            txt += '    echo "MonitorJobID=`echo ${NJob}_$EDG_WL_JOBID`" | tee -a $RUNTIME_AREA/$repo\n'
585            txt += '    echo "SyncGridJobId=`echo $EDG_WL_JOBID`" | tee -a $RUNTIME_AREA/$repo\n'
586            txt += '    echo "SyncCE=`$EDG_WL_LOG_DESTINATION`" | tee -a $RUNTIME_AREA/$repo\n'
587
588        txt += 'fi\n'
589        txt += 'dumpStatus $RUNTIME_AREA/$repo\n'
891  
892          # Prepare job-specific part
893          job = common.job_list[nj]
894 <        pset = os.path.basename(job.configFilename())
895 <        txt += '\n'
896 <        #txt += 'echo sed "s#{\'INPUT\'}#$InputFiles#" $RUNTIME_AREA/'+pset+' \n'
596 <        txt += 'sed "s#{\'INPUT\'}#$InputFiles#" $RUNTIME_AREA/'+pset+' > pset.cfg\n'
597 <        #txt += 'sed "s#{\'INPUT\'}#${InputFiles}#" $RUNTIME_AREA/'+pset+' > pset1.cfg\n'
894 >        if (self.datasetPath):
895 >            txt += '\n'
896 >            txt += 'DatasetPath='+self.datasetPath+'\n'
897  
898 <        if len(self.additional_inbox_files) > 0:
600 <            for file in self.additional_inbox_files:
601 <                txt += 'if [ -e $RUNTIME_AREA/'+file+' ] ; then\n'
602 <                txt += '   cp $RUNTIME_AREA/'+file+' .\n'
603 <                txt += '   chmod +x '+file+'\n'
604 <                txt += 'fi\n'
605 <            pass
898 >            datasetpath_split = self.datasetPath.split("/")
899  
900 <        txt += 'echo "### END JOB SETUP ENVIRONMENT ###"\n\n'
900 >            txt += 'PrimaryDataset='+datasetpath_split[1]+'\n'
901 >            txt += 'DataTier='+datasetpath_split[2]+'\n'
902 >            txt += 'ApplicationFamily=cmsRun\n'
903  
904 <        txt += '\n'
905 <        txt += 'echo "***** cat pset.cfg *********"\n'
906 <        txt += 'cat pset.cfg\n'
907 <        txt += 'echo "****** end pset.cfg ********"\n'
908 <        txt += '\n'
909 <        # txt += 'echo "***** cat pset1.cfg *********"\n'
910 <        # txt += 'cat pset1.cfg\n'
911 <        # txt += 'echo "****** end pset1.cfg ********"\n'
904 >        else:
905 >            txt += 'DatasetPath=MCDataTier\n'
906 >            txt += 'PrimaryDataset=null\n'
907 >            txt += 'DataTier=null\n'
908 >            txt += 'ApplicationFamily=MCDataTier\n'
909 >        if self.pset != None:
910 >            pset = os.path.basename(job.configFilename())
911 >            txt += '\n'
912 >            txt += 'cp  $RUNTIME_AREA/'+pset+' .\n'
913 >            if (self.datasetPath): # standard job
914 >                txt += 'InputFiles=${args[1]}; export InputFiles\n'
915 >                if (self.useParent):  
916 >                    txt += 'ParentFiles=${args[2]}; export ParentFiles\n'
917 >                    txt += 'MaxEvents=${args[3]}; export MaxEvents\n'
918 >                    txt += 'SkipEvents=${args[4]}; export SkipEvents\n'
919 >                else:
920 >                    txt += 'MaxEvents=${args[2]}; export MaxEvents\n'
921 >                    txt += 'SkipEvents=${args[3]}; export SkipEvents\n'
922 >                txt += 'echo "Inputfiles:<$InputFiles>"\n'
923 >                if (self.useParent): txt += 'echo "ParentFiles:<$ParentFiles>"\n'
924 >                txt += 'echo "MaxEvents:<$MaxEvents>"\n'
925 >                txt += 'echo "SkipEvents:<$SkipEvents>"\n'
926 >            else:  # pythia like job
927 >                txt += 'PreserveSeeds='  + ','.join(self.preserveSeeds)  + '; export PreserveSeeds\n'
928 >                txt += 'IncrementSeeds=' + ','.join(self.incrementSeeds) + '; export IncrementSeeds\n'
929 >                txt += 'echo "PreserveSeeds: <$PreserveSeeds>"\n'
930 >                txt += 'echo "IncrementSeeds:<$IncrementSeeds>"\n'
931 >                if (self.firstRun):
932 >                    txt += 'FirstRun=${args[1]}; export FirstRun\n'
933 >                    txt += 'echo "FirstRun: <$FirstRun>"\n'
934 >
935 >            txt += 'mv -f ' + pset + ' ' + psetName + '\n'
936 >
937 >
938 >        if self.pset != None:
939 >            # FUTURE: Can simply for 2_1_x and higher
940 >            txt += '\n'
941 >            if self.debug_wrapper==True:
942 >                txt += 'echo "***** cat ' + psetName + ' *********"\n'
943 >                txt += 'cat ' + psetName + '\n'
944 >                txt += 'echo "****** end ' + psetName + ' ********"\n'
945 >                txt += '\n'
946 >            txt += 'PSETHASH=`edmConfigHash < ' + psetName + '` \n'
947 >            txt += 'echo "PSETHASH = $PSETHASH" \n'
948 >            txt += '\n'
949          return txt
950  
951 <    def wsBuildExe(self, nj):
951 >    def wsUntarSoftware(self, nj=0):
952          """
953          Put in the script the commands to build an executable
954          or a library.
955          """
956  
957 <        txt = ""
957 >        txt = '\n#Written by cms_cmssw::wsUntarSoftware\n'
958  
959          if os.path.isfile(self.tgzNameWithPath):
960 <            txt += 'echo "tar xzvf $RUNTIME_AREA/'+os.path.basename(self.tgzNameWithPath)+'"\n'
960 >            txt += 'echo ">>> tar xzvf $RUNTIME_AREA/'+os.path.basename(self.tgzNameWithPath)+' :" \n'
961              txt += 'tar xzvf $RUNTIME_AREA/'+os.path.basename(self.tgzNameWithPath)+'\n'
962 +            if  self.debug_wrapper:
963 +                txt += 'ls -Al \n'
964              txt += 'untar_status=$? \n'
965              txt += 'if [ $untar_status -ne 0 ]; then \n'
966 <            txt += '   echo "SET_EXE 1 ==> ERROR Untarring .tgz file failed"\n'
967 <            txt += '   echo "JOB_EXIT_STATUS = $untar_status" \n'
968 <            txt += '   echo "SanityCheckCode = $untar_status" | tee -a $repo\n'
635 <            txt += '   if [ $middleware == OSG ]; then \n'
636 <            txt += '       echo "Remove working directory: $WORKING_DIR"\n'
637 <            txt += '       cd $RUNTIME_AREA\n'
638 <            txt += '       /bin/rm -rf $WORKING_DIR\n'
639 <            txt += '       if [ -d $WORKING_DIR ] ;then\n'
640 <            txt += '           echo "OSG WORKING DIR ==> $WORKING_DIR could not be deleted on on WN `hostname`"\n'
641 <            txt += '       fi\n'
642 <            txt += '   fi \n'
643 <            txt += '   \n'
644 <            txt += '   exit $untar_status \n'
966 >            txt += '   echo "ERROR ==> Untarring .tgz file failed"\n'
967 >            txt += '   job_exit_code=$untar_status\n'
968 >            txt += '   func_exit\n'
969              txt += 'else \n'
970              txt += '   echo "Successful untar" \n'
971              txt += 'fi \n'
972 +            txt += '\n'
973 +            txt += 'echo ">>> Include $RUNTIME_AREA in PYTHONPATH:"\n'
974 +            txt += 'if [ -z "$PYTHONPATH" ]; then\n'
975 +            txt += '   export PYTHONPATH=$RUNTIME_AREA/\n'
976 +            txt += 'else\n'
977 +            txt += '   export PYTHONPATH=$RUNTIME_AREA/:${PYTHONPATH}\n'
978 +            txt += 'echo "PYTHONPATH=$PYTHONPATH"\n'
979 +            txt += 'fi\n'
980 +            txt += '\n'
981 +
982              pass
983 <        
983 >
984          return txt
985  
986 <    def modifySteeringCards(self, nj):
986 >    def wsBuildExe(self, nj=0):
987          """
988 <        modify the card provided by the user,
989 <        writing a new card into share dir
988 >        Put in the script the commands to build an executable
989 >        or a library.
990          """
991 <        
991 >
992 >        txt = '\n#Written by cms_cmssw::wsBuildExe\n'
993 >        txt += 'echo ">>> moving CMSSW software directories in `pwd`" \n'
994 >
995 >        txt += 'rm -r lib/ module/ \n'
996 >        txt += 'mv $RUNTIME_AREA/lib/ . \n'
997 >        txt += 'mv $RUNTIME_AREA/module/ . \n'
998 >        if self.dataExist == True:
999 >            txt += 'rm -r src/ \n'
1000 >            txt += 'mv $RUNTIME_AREA/src/ . \n'
1001 >        if len(self.additional_inbox_files)>0:
1002 >            for file in self.additional_inbox_files:
1003 >                txt += 'mv $RUNTIME_AREA/'+os.path.basename(file)+' . \n'
1004 >        # txt += 'mv $RUNTIME_AREA/ProdCommon/ . \n'
1005 >        # txt += 'mv $RUNTIME_AREA/IMProv/ . \n'
1006 >
1007 >        txt += 'echo ">>> Include $RUNTIME_AREA in PYTHONPATH:"\n'
1008 >        txt += 'if [ -z "$PYTHONPATH" ]; then\n'
1009 >        txt += '   export PYTHONPATH=$RUNTIME_AREA/\n'
1010 >        txt += 'else\n'
1011 >        txt += '   export PYTHONPATH=$RUNTIME_AREA/:${PYTHONPATH}\n'
1012 >        txt += 'echo "PYTHONPATH=$PYTHONPATH"\n'
1013 >        txt += 'fi\n'
1014 >        txt += '\n'
1015 >
1016 >        return txt
1017 >
1018 >
1019      def executableName(self):
1020 <        return self.executable
1020 >        if self.scriptExe:
1021 >            return "sh "
1022 >        else:
1023 >            return self.executable
1024  
1025      def executableArgs(self):
1026 <        return " -p pset.cfg"
1026 >        # FUTURE: This function tests the CMSSW version. Can be simplified as we drop support for old versions
1027 >        if self.scriptExe:#CarlosDaniele
1028 >            return   self.scriptExe + " $NJob"
1029 >        else:
1030 >            ex_args = ""
1031 >            # FUTURE: This tests the CMSSW version. Can remove code as versions deprecated
1032 >            # Framework job report
1033 >            if (self.CMSSW_major >= 1 and self.CMSSW_minor >= 5) or (self.CMSSW_major >= 2):
1034 >                ex_args += " -j $RUNTIME_AREA/crab_fjr_$NJob.xml"
1035 >            # Type of config file
1036 >            if self.CMSSW_major >= 2 :
1037 >                ex_args += " -p pset.py"
1038 >            else:
1039 >                ex_args += " -p pset.cfg"
1040 >            return ex_args
1041  
1042      def inputSandbox(self, nj):
1043          """
1044          Returns a list of filenames to be put in JDL input sandbox.
1045          """
1046          inp_box = []
669        # dict added to delete duplicate from input sandbox file list
670        seen = {}
671        ## code
1047          if os.path.isfile(self.tgzNameWithPath):
1048              inp_box.append(self.tgzNameWithPath)
1049 <        ## config
1050 <        inp_box.append(common.job_list[nj].configFilename())
676 <        ## additional input files
677 <        #for file in self.additional_inbox_files:
678 <        #    inp_box.append(common.work_space.cwdDir()+file)
1049 >        wrapper = os.path.basename(str(common._db.queryTask('scriptName')))
1050 >        inp_box.append(common.work_space.pathForTgz() +'job/'+ wrapper)
1051          return inp_box
1052  
1053      def outputSandbox(self, nj):
# Line 684 | Line 1056 | class Cmssw(JobType):
1056          """
1057          out_box = []
1058  
687        stdout=common.job_list[nj].stdout()
688        stderr=common.job_list[nj].stderr()
689
1059          ## User Declared output files
1060 <        for out in self.output_file:
1061 <            n_out = nj + 1
1062 <            out_box.append(self.numberFile_(out,str(n_out)))
1060 >        for out in (self.output_file+self.output_file_sandbox):
1061 >            n_out = nj + 1
1062 >            out_box.append(numberFile(out,str(n_out)))
1063          return out_box
695        return []
1064  
697    def prepareSteeringCards(self):
698        """
699        Make initial modifications of the user's steering card file.
700        """
701        return
1065  
1066      def wsRenameOutput(self, nj):
1067          """
1068          Returns part of a job script which renames the produced files.
1069          """
1070  
1071 <        txt = '\n'
1072 <        file_list = ''
1073 <        check = len(self.output_file)
1074 <        i = 0
1075 <        for fileWithSuffix in self.output_file:
1076 <            i= i + 1
1077 <            output_file_num = self.numberFile_(fileWithSuffix, '$NJob')
1078 <            file_list=file_list+output_file_num+''
1079 <            txt += '\n'
717 <            txt += 'ls \n'
1071 >        txt = '\n#Written by cms_cmssw::wsRenameOutput\n'
1072 >        txt += 'echo ">>> current directory (SOFTWARE_DIR): $SOFTWARE_DIR" \n'
1073 >        txt += 'echo ">>> current directory content:"\n'
1074 >        if self.debug_wrapper:
1075 >            txt += 'ls -Al\n'
1076 >        txt += '\n'
1077 >
1078 >        for fileWithSuffix in (self.output_file):
1079 >            output_file_num = numberFile(fileWithSuffix, '$NJob')
1080              txt += '\n'
1081 <            txt += 'ls '+fileWithSuffix+'\n'
1082 <            txt += 'exe_result=$?\n'
1083 <            txt += 'if [ $exe_result -ne 0 ] ; then\n'
1084 <            txt += '   echo "ERROR: No output file to manage"\n'
1085 <            ### OLI_DANIELE
1086 <            txt += '    if [ $middleware == OSG ]; then \n'
1087 <            txt += '        echo "prepare dummy output file"\n'
1088 <            txt += '        cp '+fileWithSuffix+' $RUNTIME_AREA/'+output_file_num+'\n'
727 <            txt += '    fi \n'
1081 >            txt += '# check output file\n'
1082 >            txt += 'if [ -e ./'+fileWithSuffix+' ] ; then\n'
1083 >            if (self.copy_data == 1):  # For OSG nodes, file is in $WORKING_DIR, should not be moved to $RUNTIME_AREA
1084 >                txt += '    mv '+fileWithSuffix+' '+output_file_num+'\n'
1085 >                txt += '    ln -s `pwd`/'+output_file_num+' $RUNTIME_AREA/'+fileWithSuffix+'\n'
1086 >            else:
1087 >                txt += '    mv '+fileWithSuffix+' $RUNTIME_AREA/'+output_file_num+'\n'
1088 >                txt += '    ln -s $RUNTIME_AREA/'+output_file_num+' $RUNTIME_AREA/'+fileWithSuffix+'\n'
1089              txt += 'else\n'
1090 <            txt += '   cp '+fileWithSuffix+' $RUNTIME_AREA/'+output_file_num+'\n'
1090 >            txt += '    job_exit_code=60302\n'
1091 >            txt += '    echo "WARNING: Output file '+fileWithSuffix+' not found"\n'
1092 >            if common.scheduler.name().upper() == 'CONDOR_G':
1093 >                txt += '    if [ $middleware == OSG ]; then \n'
1094 >                txt += '        echo "prepare dummy output file"\n'
1095 >                txt += '        echo "Processing of job output failed" > $RUNTIME_AREA/'+output_file_num+'\n'
1096 >                txt += '    fi \n'
1097              txt += 'fi\n'
1098 <            if i == check:
1099 <                txt += 'cd $RUNTIME_AREA\n'
1100 <                pass      
1101 <            pass
1102 <      
736 <        file_list=file_list[:-1]
737 <        txt += 'file_list="'+file_list+'"\n'
738 <        ### OLI_DANIELE
739 <        txt += 'if [ $middleware == OSG ]; then\n'  
740 <        txt += '    cd $RUNTIME_AREA\n'
741 <        txt += '    echo "Remove working directory: $WORKING_DIR"\n'
742 <        txt += '    /bin/rm -rf $WORKING_DIR\n'
743 <        txt += '    if [ -d $WORKING_DIR ] ;then\n'
744 <        txt += '        echo "OSG WORKING DIR ==> $WORKING_DIR could not be deleted on on WN `hostname`"\n'
745 <        txt += '    fi\n'
746 <        txt += 'fi\n'
1098 >        file_list = []
1099 >        for fileWithSuffix in (self.output_file):
1100 >             file_list.append(numberFile(fileWithSuffix, '$NJob'))
1101 >
1102 >        txt += 'file_list="'+string.join(file_list,' ')+'"\n'
1103          txt += '\n'
1104 +        txt += 'echo ">>> current directory (SOFTWARE_DIR): $SOFTWARE_DIR" \n'
1105 +        txt += 'echo ">>> current directory content:"\n'
1106 +        if self.debug_wrapper:
1107 +            txt += 'ls -Al\n'
1108 +        txt += '\n'
1109 +        txt += 'cd $RUNTIME_AREA\n'
1110 +        txt += 'echo ">>> current directory (RUNTIME_AREA):  $RUNTIME_AREA"\n'
1111          return txt
1112  
1113 <    def numberFile_(self, file, txt):
751 <        """
752 <        append _'txt' before last extension of a file
753 <        """
754 <        p = string.split(file,".")
755 <        # take away last extension
756 <        name = p[0]
757 <        for x in p[1:-1]:
758 <           name=name+"."+x
759 <        # add "_txt"
760 <        if len(p)>1:
761 <          ext = p[len(p)-1]
762 <          #result = name + '_' + str(txt) + "." + ext
763 <          result = name + '_' + txt + "." + ext
764 <        else:
765 <          #result = name + '_' + str(txt)
766 <          result = name + '_' + txt
767 <        
768 <        return result
769 <
770 <    def getRequirements(self):
1113 >    def getRequirements(self, nj=[]):
1114          """
1115 <        return job requirements to add to jdl files
1115 >        return job requirements to add to jdl files
1116          """
1117          req = ''
1118 <        if common.analisys_common_info['sites']:
1119 <            if common.analisys_common_info['sw_version']:
1120 <                req='Member("VO-cms-' + \
1121 <                     common.analisys_common_info['sw_version'] + \
1122 <                     '", other.GlueHostApplicationSoftwareRunTimeEnvironment)'
1123 <            if len(common.analisys_common_info['sites'])>0:
1124 <                req = req + ' && ('
1125 <                for i in range(len(common.analisys_common_info['sites'])):
1126 <                    req = req + 'other.GlueCEInfoHostName == "' \
1127 <                         + common.analisys_common_info['sites'][i] + '"'
1128 <                    if ( i < (int(len(common.analisys_common_info['sites']) - 1)) ):
1129 <                        req = req + ' || '
1130 <            req = req + ')'
788 <        #print "req = ", req
1118 >        if self.version:
1119 >            req='Member("VO-cms-' + \
1120 >                 self.version + \
1121 >                 '", other.GlueHostApplicationSoftwareRunTimeEnvironment)'
1122 >        if self.executable_arch:
1123 >            req+=' && Member("VO-cms-' + \
1124 >                 self.executable_arch + \
1125 >                 '", other.GlueHostApplicationSoftwareRunTimeEnvironment)'
1126 >
1127 >        req = req + ' && (other.GlueHostNetworkAdapterOutboundIP)'
1128 >        if common.scheduler.name() == "glitecoll":
1129 >            req += ' && other.GlueCEStateStatus == "Production" '
1130 >
1131          return req
1132  
1133      def configFilename(self):
1134          """ return the config filename """
1135 <        return self.name()+'.cfg'
1135 >        # FUTURE: Can remove cfg mode for CMSSW >= 2_1_x
1136 >        if (self.CMSSW_major >= 2 and self.CMSSW_minor >= 1) or (self.CMSSW_major >= 3):
1137 >          return self.name()+'.py'
1138 >        else:
1139 >          return self.name()+'.cfg'
1140  
795    ### OLI_DANIELE
1141      def wsSetupCMSOSGEnvironment_(self):
1142          """
1143          Returns part of a job script which is prepares
1144          the execution environment and which is common for all CMS jobs.
1145          """
1146 <        txt = '\n'
1147 <        txt += '   echo "### SETUP CMS OSG  ENVIRONMENT ###"\n'
1148 <        txt += '   if [ -f $GRID3_APP_DIR/cmssoft/cmsset_default.sh ] ;then\n'
1149 <        txt += '      # Use $GRID3_APP_DIR/cmssoft/cmsset_default.sh to setup cms software\n'
1150 <        txt += '       source $GRID3_APP_DIR/cmssoft/cmsset_default.sh '+self.version+'\n'
1151 <        txt += '   elif [ -f $OSG_APP/cmssoft/cmsset_default.sh ] ;then\n'
1152 <        txt += '      # Use $OSG_APP/cmssoft/cmsset_default.sh to setup cms software\n'
1153 <        txt += '       source $OSG_APP/cmssoft/cmsset_default.sh '+self.version+'\n'
1154 <        txt += '   else\n'
1155 <        txt += '       echo "SET_CMS_ENV 10020 ==> ERROR $GRID3_APP_DIR/cmssoft/cmsset_default.sh and $OSG_APP/cmssoft/cmsset_default.sh file not found"\n'
1156 <        txt += '       echo "JOB_EXIT_STATUS = 10020"\n'
1157 <        txt += '       echo "JobExitCode=10020" | tee -a $RUNTIME_AREA/$repo\n'
1158 <        txt += '       dumpStatus $RUNTIME_AREA/$repo\n'
814 <        txt += '       exit\n'
815 <        txt += '\n'
816 <        txt += '       echo "Remove working directory: $WORKING_DIR"\n'
817 <        txt += '       cd $RUNTIME_AREA\n'
818 <        txt += '       /bin/rm -rf $WORKING_DIR\n'
819 <        txt += '       if [ -d $WORKING_DIR ] ;then\n'
820 <        txt += '           echo "OSG WORKING DIR ==> $WORKING_DIR could not be deleted on on WN `hostname`"\n'
821 <        txt += '       fi\n'
822 <        txt += '\n'
823 <        txt += '       exit\n'
824 <        txt += '   fi\n'
1146 >        txt = '\n#Written by cms_cmssw::wsSetupCMSOSGEnvironment_\n'
1147 >        txt += '    echo ">>> setup CMS OSG environment:"\n'
1148 >        txt += '    echo "set SCRAM ARCH to ' + self.executable_arch + '"\n'
1149 >        txt += '    export SCRAM_ARCH='+self.executable_arch+'\n'
1150 >        txt += '    echo "SCRAM_ARCH = $SCRAM_ARCH"\n'
1151 >        txt += '    if [ -f $OSG_APP/cmssoft/cms/cmsset_default.sh ] ;then\n'
1152 >        txt += '      # Use $OSG_APP/cmssoft/cms/cmsset_default.sh to setup cms software\n'
1153 >        txt += '        source $OSG_APP/cmssoft/cms/cmsset_default.sh '+self.version+'\n'
1154 >        txt += '    else\n'
1155 >        txt += '        echo "ERROR ==> $OSG_APP/cmssoft/cms/cmsset_default.sh file not found"\n'
1156 >        txt += '        job_exit_code=10020\n'
1157 >        txt += '        func_exit\n'
1158 >        txt += '    fi\n'
1159          txt += '\n'
1160 <        txt += '   echo "SET_CMS_ENV 0 ==> setup cms environment ok"\n'
1161 <        txt += '   echo " END SETUP CMS OSG  ENVIRONMENT "\n'
1160 >        txt += '    echo "==> setup cms environment ok"\n'
1161 >        txt += '    echo "SCRAM_ARCH = $SCRAM_ARCH"\n'
1162  
1163          return txt
1164 <
831 <    ### OLI_DANIELE
1164 >
1165      def wsSetupCMSLCGEnvironment_(self):
1166          """
1167          Returns part of a job script which is prepares
1168          the execution environment and which is common for all CMS jobs.
1169          """
1170 <        txt  = '   \n'
1171 <        txt += '   echo " ### SETUP CMS LCG  ENVIRONMENT ### "\n'
1172 <        txt += '      echo "JOB_EXIT_STATUS = 0"\n'
1173 <        txt += '   if [ ! $VO_CMS_SW_DIR ] ;then\n'
1174 <        txt += '       echo "SET_CMS_ENV 10031 ==> ERROR CMS software dir not found on WN `hostname`"\n'
1175 <        txt += '       echo "JOB_EXIT_STATUS = 10031" \n'
1176 <        txt += '       echo "JobExitCode=10031" | tee -a $RUNTIME_AREA/$repo\n'
1177 <        txt += '       dumpStatus $RUNTIME_AREA/$repo\n'
1178 <        txt += '       exit\n'
1179 <        txt += '   else\n'
1180 <        txt += '       echo "Sourcing environment... "\n'
1181 <        txt += '       if [ ! -s $VO_CMS_SW_DIR/cmsset_default.sh ] ;then\n'
1182 <        txt += '           echo "SET_CMS_ENV 10020 ==> ERROR cmsset_default.sh file not found into dir $VO_CMS_SW_DIR"\n'
1183 <        txt += '           echo "JOB_EXIT_STATUS = 10020"\n'
1184 <        txt += '           echo "JobExitCode=10020" | tee -a $RUNTIME_AREA/$repo\n'
1185 <        txt += '           dumpStatus $RUNTIME_AREA/$repo\n'
1186 <        txt += '           exit\n'
1187 <        txt += '       fi\n'
1188 <        txt += '       echo "sourcing $VO_CMS_SW_DIR/cmsset_default.sh"\n'
1189 <        txt += '       source $VO_CMS_SW_DIR/cmsset_default.sh\n'
1190 <        txt += '       result=$?\n'
1191 <        txt += '       if [ $result -ne 0 ]; then\n'
1192 <        txt += '           echo "SET_CMS_ENV 10032 ==> ERROR problem sourcing $VO_CMS_SW_DIR/cmsset_default.sh"\n'
1193 <        txt += '           echo "JOB_EXIT_STATUS = 10032"\n'
1194 <        txt += '           echo "JobExitCode=10032" | tee -a $RUNTIME_AREA/$repo\n'
1195 <        txt += '           dumpStatus $RUNTIME_AREA/$repo\n'
1196 <        txt += '           exit\n'
1197 <        txt += '       fi\n'
1198 <        txt += '   fi\n'
1199 <        txt += '   \n'
1200 <        txt += '   string=`cat /etc/redhat-release`\n'
1201 <        txt += '   echo $string\n'
1202 <        txt += '   if [[ $string = *alhalla* ]]; then\n'
1203 <        txt += '       echo "SCRAM_ARCH= $SCRAM_ARCH"\n'
1204 <        txt += '   elif [[ $string = *Enterprise* ]] || [[ $string = *cientific* ]]; then\n'
1205 <        txt += '       export SCRAM_ARCH=slc3_ia32_gcc323\n'
1206 <        txt += '       echo "SCRAM_ARCH= $SCRAM_ARCH"\n'
1207 <        txt += '   else\n'
1208 <        txt += '       echo "SET_CMS_ENV 1 ==> ERROR OS unknown, LCG environment not initialized"\n'
1209 <        txt += '       echo "JOB_EXIT_STATUS = 10033"\n'
1210 <        txt += '       echo "JobExitCode=10033" | tee -a $RUNTIME_AREA/$repo\n'
1211 <        txt += '       dumpStatus $RUNTIME_AREA/$repo\n'
1212 <        txt += '       exit\n'
1213 <        txt += '   fi\n'
1214 <        txt += '   echo "SET_CMS_ENV 0 ==> setup cms environment ok"\n'
1215 <        txt += '   echo "### END SETUP CMS LCG ENVIRONMENT ###"\n'
1170 >        txt = '\n#Written by cms_cmssw::wsSetupCMSLCGEnvironment_\n'
1171 >        txt += '    echo ">>> setup CMS LCG environment:"\n'
1172 >        txt += '    echo "set SCRAM ARCH and BUILD_ARCH to ' + self.executable_arch + ' ###"\n'
1173 >        txt += '    export SCRAM_ARCH='+self.executable_arch+'\n'
1174 >        txt += '    export BUILD_ARCH='+self.executable_arch+'\n'
1175 >        txt += '    if [ ! $VO_CMS_SW_DIR ] ;then\n'
1176 >        txt += '        echo "ERROR ==> CMS software dir not found on WN `hostname`"\n'
1177 >        txt += '        job_exit_code=10031\n'
1178 >        txt += '        func_exit\n'
1179 >        txt += '    else\n'
1180 >        txt += '        echo "Sourcing environment... "\n'
1181 >        txt += '        if [ ! -s $VO_CMS_SW_DIR/cmsset_default.sh ] ;then\n'
1182 >        txt += '            echo "ERROR ==> cmsset_default.sh file not found into dir $VO_CMS_SW_DIR"\n'
1183 >        txt += '            job_exit_code=10020\n'
1184 >        txt += '            func_exit\n'
1185 >        txt += '        fi\n'
1186 >        txt += '        echo "sourcing $VO_CMS_SW_DIR/cmsset_default.sh"\n'
1187 >        txt += '        source $VO_CMS_SW_DIR/cmsset_default.sh\n'
1188 >        txt += '        result=$?\n'
1189 >        txt += '        if [ $result -ne 0 ]; then\n'
1190 >        txt += '            echo "ERROR ==> problem sourcing $VO_CMS_SW_DIR/cmsset_default.sh"\n'
1191 >        txt += '            job_exit_code=10032\n'
1192 >        txt += '            func_exit\n'
1193 >        txt += '        fi\n'
1194 >        txt += '    fi\n'
1195 >        txt += '    \n'
1196 >        txt += '    echo "==> setup cms environment ok"\n'
1197 >        return txt
1198 >
1199 >    def modifyReport(self, nj):
1200 >        """
1201 >        insert the part of the script that modifies the FrameworkJob Report
1202 >        """
1203 >        txt = '\n#Written by cms_cmssw::modifyReport\n'
1204 >        publish_data = int(self.cfg_params.get('USER.publish_data',0))
1205 >        if (publish_data == 1):
1206 >            processedDataset = self.cfg_params['USER.publish_data_name']
1207 >            LFNBaseName = LFNBase(processedDataset)
1208 >
1209 >            txt += 'if [ $copy_exit_status -eq 0 ]; then\n'
1210 >            txt += '    FOR_LFN=%s_${PSETHASH}/\n'%(LFNBaseName)
1211 >            txt += 'else\n'
1212 >            txt += '    FOR_LFN=/copy_problems/ \n'
1213 >            txt += '    SE=""\n'
1214 >            txt += '    SE_PATH=""\n'
1215 >            txt += 'fi\n'
1216 >
1217 >            txt += 'echo ">>> Modify Job Report:" \n'
1218 >            txt += 'chmod a+x $RUNTIME_AREA/ProdCommon/FwkJobRep/ModifyJobReport.py\n'
1219 >            txt += 'ProcessedDataset='+processedDataset+'\n'
1220 >            txt += 'echo "ProcessedDataset = $ProcessedDataset"\n'
1221 >            txt += 'echo "SE = $SE"\n'
1222 >            txt += 'echo "SE_PATH = $SE_PATH"\n'
1223 >            txt += 'echo "FOR_LFN = $FOR_LFN" \n'
1224 >            txt += 'echo "CMSSW_VERSION = $CMSSW_VERSION"\n\n'
1225 >            txt += 'echo "$RUNTIME_AREA/ProdCommon/FwkJobRep/ModifyJobReport.py $RUNTIME_AREA/crab_fjr_$NJob.xml $NJob $FOR_LFN $PrimaryDataset $DataTier $ProcessedDataset $ApplicationFamily $executable $CMSSW_VERSION $PSETHASH $SE $SE_PATH"\n'
1226 >            txt += '$RUNTIME_AREA/ProdCommon/FwkJobRep/ModifyJobReport.py $RUNTIME_AREA/crab_fjr_$NJob.xml $NJob $FOR_LFN $PrimaryDataset $DataTier $ProcessedDataset $ApplicationFamily $executable $CMSSW_VERSION $PSETHASH $SE $SE_PATH\n'
1227 >            txt += 'modifyReport_result=$?\n'
1228 >            txt += 'if [ $modifyReport_result -ne 0 ]; then\n'
1229 >            txt += '    modifyReport_result=70500\n'
1230 >            txt += '    job_exit_code=$modifyReport_result\n'
1231 >            txt += '    echo "ModifyReportResult=$modifyReport_result" | tee -a $RUNTIME_AREA/$repo\n'
1232 >            txt += '    echo "WARNING: Problem with ModifyJobReport"\n'
1233 >            txt += 'else\n'
1234 >            txt += '    mv NewFrameworkJobReport.xml $RUNTIME_AREA/crab_fjr_$NJob.xml\n'
1235 >            txt += 'fi\n'
1236 >        return txt
1237 >
1238 >    def wsParseFJR(self):
1239 >        """
1240 >        Parse the FrameworkJobReport to obtain useful infos
1241 >        """
1242 >        txt = '\n#Written by cms_cmssw::wsParseFJR\n'
1243 >        txt += 'echo ">>> Parse FrameworkJobReport crab_fjr.xml"\n'
1244 >        txt += 'if [ -s $RUNTIME_AREA/crab_fjr_$NJob.xml ]; then\n'
1245 >        txt += '    if [ -s $RUNTIME_AREA/parseCrabFjr.py ]; then\n'
1246 >        txt += '        cmd_out=`python $RUNTIME_AREA/parseCrabFjr.py --input $RUNTIME_AREA/crab_fjr_$NJob.xml --dashboard $MonitorID,$MonitorJobID '+self.debugWrap+'`\n'
1247 >        if self.debug_wrapper :
1248 >            txt += '        echo "Result of parsing the FrameworkJobReport crab_fjr.xml: $cmd_out"\n'
1249 >        txt += '        executable_exit_status=`python $RUNTIME_AREA/parseCrabFjr.py --input $RUNTIME_AREA/crab_fjr_$NJob.xml --exitcode`\n'
1250 >        txt += '        if [ $executable_exit_status -eq 50115 ];then\n'
1251 >        txt += '            echo ">>> crab_fjr.xml contents: "\n'
1252 >        txt += '            cat $RUNTIME_AREA/crab_fjr_NJob.xml\n'
1253 >        txt += '            echo "Wrong FrameworkJobReport --> does not contain useful info. ExitStatus: $executable_exit_status"\n'
1254 >        txt += '        elif [ $executable_exit_status -eq -999 ];then\n'
1255 >        txt += '            echo "ExitStatus from FrameworkJobReport not available. not available. Using exit code of executable from command line."\n'
1256 >        txt += '        else\n'
1257 >        txt += '            echo "Extracted ExitStatus from FrameworkJobReport parsing output: $executable_exit_status"\n'
1258 >        txt += '        fi\n'
1259 >        txt += '    else\n'
1260 >        txt += '        echo "CRAB python script to parse CRAB FrameworkJobReport crab_fjr.xml is not available, using exit code of executable from command line."\n'
1261 >        txt += '    fi\n'
1262 >          #### Patch to check input data reading for CMSSW16x Hopefully we-ll remove it asap
1263 >
1264 >        if (self.datasetPath and self.dataset_pu == 'NONE'):
1265 >          # VERIFY PROCESSED DATA
1266 >            txt += '    if [ $executable_exit_status -eq 0 ];then\n'
1267 >            txt += '      echo ">>> Verify list of processed files:"\n'
1268 >            txt += '      echo $InputFiles |tr -d \'\\\\\' |tr \',\' \'\\n\'|tr -d \'"\' > input-files.txt\n'
1269 >            txt += '      python $RUNTIME_AREA/parseCrabFjr.py --input $RUNTIME_AREA/crab_fjr_$NJob.xml --lfn > processed-files.txt\n'
1270 >            txt += '      cat input-files.txt  | sort | uniq > tmp.txt\n'
1271 >            txt += '      mv tmp.txt input-files.txt\n'
1272 >            txt += '      echo "cat input-files.txt"\n'
1273 >            txt += '      echo "----------------------"\n'
1274 >            txt += '      cat input-files.txt\n'
1275 >            txt += '      cat processed-files.txt | sort | uniq > tmp.txt\n'
1276 >            txt += '      mv tmp.txt processed-files.txt\n'
1277 >            txt += '      echo "----------------------"\n'
1278 >            txt += '      echo "cat processed-files.txt"\n'
1279 >            txt += '      echo "----------------------"\n'
1280 >            txt += '      cat processed-files.txt\n'
1281 >            txt += '      echo "----------------------"\n'
1282 >            txt += '      diff -q input-files.txt processed-files.txt\n'
1283 >            txt += '      fileverify_status=$?\n'
1284 >            txt += '      if [ $fileverify_status -ne 0 ]; then\n'
1285 >            txt += '         executable_exit_status=30001\n'
1286 >            txt += '         echo "ERROR ==> not all input files processed"\n'
1287 >            txt += '         echo "      ==> list of processed files from crab_fjr.xml differs from list in pset.cfg"\n'
1288 >            txt += '         echo "      ==> diff input-files.txt processed-files.txt"\n'
1289 >            txt += '      fi\n'
1290 >            txt += '    fi\n'
1291 >            txt += '\n'
1292 >        txt += 'else\n'
1293 >        txt += '    echo "CRAB FrameworkJobReport crab_fjr.xml is not available, using exit code of executable from command line."\n'
1294 >        txt += 'fi\n'
1295 >        txt += '\n'
1296 >        txt += 'echo "ExeExitCode=$executable_exit_status" | tee -a $RUNTIME_AREA/$repo\n'
1297 >        txt += 'echo "EXECUTABLE_EXIT_STATUS = $executable_exit_status"\n'
1298 >        txt += 'job_exit_code=$executable_exit_status\n'
1299 >
1300 >        return txt
1301 >
1302 >    def setParam_(self, param, value):
1303 >        self._params[param] = value
1304 >
1305 >    def getParams(self):
1306 >        return self._params
1307 >
1308 >    def uniquelist(self, old):
1309 >        """
1310 >        remove duplicates from a list
1311 >        """
1312 >        nd={}
1313 >        for e in old:
1314 >            nd[e]=0
1315 >        return nd.keys()
1316 >
1317 >    def outList(self):
1318 >        """
1319 >        check the dimension of the output files
1320 >        """
1321 >        txt = ''
1322 >        txt += 'echo ">>> list of expected files on output sandbox"\n'
1323 >        listOutFiles = []
1324 >        stdout = 'CMSSW_$NJob.stdout'
1325 >        stderr = 'CMSSW_$NJob.stderr'
1326 >        if (self.return_data == 1):
1327 >            for file in (self.output_file+self.output_file_sandbox):
1328 >                listOutFiles.append(numberFile(file, '$NJob'))
1329 >            listOutFiles.append(stdout)
1330 >            listOutFiles.append(stderr)
1331 >        else:
1332 >            for file in (self.output_file_sandbox):
1333 >                listOutFiles.append(numberFile(file, '$NJob'))
1334 >            listOutFiles.append(stdout)
1335 >            listOutFiles.append(stderr)
1336 >        txt += 'echo "output files: '+string.join(listOutFiles,' ')+'"\n'
1337 >        txt += 'filesToCheck="'+string.join(listOutFiles,' ')+'"\n'
1338 >        txt += 'export filesToCheck\n'
1339          return txt

Diff Legend

Removed lines
+ Added lines
< Changed lines
> Changed lines