ViewVC Help
View File | Revision Log | Show Annotations | Root Listing
root/cvsroot/COMP/CRAB/python/cms_cmssw.py
(Generate patch)

Comparing COMP/CRAB/python/cms_cmssw.py (file contents):
Revision 1.2 by slacapra, Wed Apr 26 15:31:06 2006 UTC vs.
Revision 1.226 by ewv, Thu Jul 3 19:30:07 2008 UTC

# Line 2 | Line 2 | from JobType import JobType
2   from crab_logger import Logger
3   from crab_exceptions import *
4   from crab_util import *
5 + from BlackWhiteListParser import BlackWhiteListParser
6   import common
6
7 import DataDiscovery
8 import DataLocation
7   import Scram
8 + from LFNBaseName import *
9  
10 < import os, string, re
10 > import os, string, glob
11  
12   class Cmssw(JobType):
13 <    def __init__(self, cfg_params):
13 >    def __init__(self, cfg_params, ncjobs,skip_blocks, isNew):
14          JobType.__init__(self, 'CMSSW')
15          common.logger.debug(3,'CMSSW::__init__')
16 +        self.skip_blocks = skip_blocks
17 +
18 +        self.argsList = []
19 +
20 +        self._params = {}
21 +        self.cfg_params = cfg_params
22 +        # init BlackWhiteListParser
23 +        self.blackWhiteListParser = BlackWhiteListParser(cfg_params)
24 +
25 +        self.MaxTarBallSize = float(self.cfg_params.get('EDG.maxtarballsize',9.5))
26  
27 <        self.analisys_common_info = {}
27 >        # number of jobs requested to be created, limit obj splitting
28 >        self.ncjobs = ncjobs
29  
30          log = common.logger
31 <        
31 >
32          self.scram = Scram.Scram(cfg_params)
23        scramArea = ''
33          self.additional_inbox_files = []
34          self.scriptExe = ''
35          self.executable = ''
36 +        self.executable_arch = self.scram.getArch()
37          self.tgz_name = 'default.tgz'
38 +        self.scriptName = 'CMSSW.sh'
39 +        self.pset = ''
40 +        self.datasetPath = ''
41 +
42 +        # set FJR file name
43 +        self.fjrFileName = 'crab_fjr.xml'
44  
45          self.version = self.scram.getSWVersion()
46 <        common.analisys_common_info['sw_version'] = self.version
46 >        version_array = self.version.split('_')
47 >        self.CMSSW_major = 0
48 >        self.CMSSW_minor = 0
49 >        self.CMSSW_patch = 0
50 >        try:
51 >            self.CMSSW_major = int(version_array[1])
52 >            self.CMSSW_minor = int(version_array[2])
53 >            self.CMSSW_patch = int(version_array[3])
54 >        except:
55 >            msg = "Cannot parse CMSSW version string: " + self.version + " for major and minor release number!"
56 >            raise CrabException(msg)
57  
58          ### collect Data cards
59 <        try:
60 <            self.owner = cfg_params['CMSSW.owner']
61 <            log.debug(6, "CMSSW::CMSSW(): owner = "+self.owner)
36 <            self.dataset = cfg_params['CMSSW.dataset']
37 <            log.debug(6, "CMSSW::CMSSW(): dataset = "+self.dataset)
38 <        except KeyError:
39 <            msg = "Error: owner and/or dataset not defined "
59 >
60 >        if not cfg_params.has_key('CMSSW.datasetpath'):
61 >            msg = "Error: datasetpath not defined "
62              raise CrabException(msg)
63  
64 <        self.dataTiers = []
65 <        try:
66 <            tmpDataTiers = string.split(cfg_params['CMSSW.data_tier'],',')
67 <            for tmp in tmpDataTiers:
68 <                tmp=string.strip(tmp)
69 <                self.dataTiers.append(tmp)
70 <                pass
71 <            pass
72 <        except KeyError:
73 <            pass
74 <        log.debug(6, "Cmssw::Cmssw(): dataTiers = "+str(self.dataTiers))
64 >        ### Temporary: added to remove input file control in the case of PU
65 >        self.dataset_pu = cfg_params.get('CMSSW.dataset_pu', None)
66 >
67 >        tmp =  cfg_params['CMSSW.datasetpath']
68 >        log.debug(6, "CMSSW::CMSSW(): datasetPath = "+tmp)
69 >        if string.lower(tmp)=='none':
70 >            self.datasetPath = None
71 >            self.selectNoInput = 1
72 >        else:
73 >            self.datasetPath = tmp
74 >            self.selectNoInput = 0
75  
76 +        self.dataTiers = []
77 +        self.debugWrap = ''
78 +        self.debug_wrapper = cfg_params.get('USER.debug_wrapper',False)
79 +        if self.debug_wrapper: self.debugWrap='--debug'
80          ## now the application
81 <        try:
82 <            self.executable = cfg_params['CMSSW.executable']
57 <            log.debug(6, "CMSSW::CMSSW(): executable = "+self.executable)
58 <            msg = "Default executable cmsRun overridden. Switch to " + self.executable
59 <            log.debug(3,msg)
60 <        except KeyError:
61 <            self.executable = 'cmsRun'
62 <            msg = "User executable not defined. Use cmsRun"
63 <            log.debug(3,msg)
64 <            pass
81 >        self.executable = cfg_params.get('CMSSW.executable','cmsRun')
82 >        log.debug(6, "CMSSW::CMSSW(): executable = "+self.executable)
83  
84 <        try:
85 <            self.pset = cfg_params['CMSSW.pset']
86 <            log.debug(6, "Cmssw::Cmssw(): PSet file = "+self.pset)
84 >        if not cfg_params.has_key('CMSSW.pset'):
85 >            raise CrabException("PSet file missing. Cannot run cmsRun ")
86 >        self.pset = cfg_params['CMSSW.pset']
87 >        log.debug(6, "Cmssw::Cmssw(): PSet file = "+self.pset)
88 >        if self.pset.lower() != 'none' :
89              if (not os.path.exists(self.pset)):
90                  raise CrabException("User defined PSet file "+self.pset+" does not exist")
91 <        except KeyError:
92 <            raise CrabException("PSet file missing. Cannot run cmsRun ")
91 >        else:
92 >            self.pset = None
93  
94          # output files
95 <        try:
96 <            self.output_file = []
95 >        ## stuff which must be returned always via sandbox
96 >        self.output_file_sandbox = []
97  
98 <            tmp = cfg_params['CMSSW.output_file']
99 <            if tmp != '':
80 <                tmpOutFiles = string.split(cfg_params['CMSSW.output_file'],',')
81 <                log.debug(7, 'cmssw::cmssw(): output files '+str(tmpOutFiles))
82 <                for tmp in tmpOutFiles:
83 <                    tmp=string.strip(tmp)
84 <                    self.output_file.append(tmp)
85 <                    pass
98 >        # add fjr report by default via sandbox
99 >        self.output_file_sandbox.append(self.fjrFileName)
100  
101 <            else:
102 <                log.message("No output file defined: only stdout/err will be available")
103 <                pass
104 <            pass
105 <        except KeyError:
106 <            log.message("No output file defined: only stdout/err will be available")
107 <            pass
101 >        # other output files to be returned via sandbox or copied to SE
102 >        outfileflag = False
103 >        self.output_file = []
104 >        tmp = cfg_params.get('CMSSW.output_file',None)
105 >        if tmp :
106 >            self.output_file = [x.strip() for x in tmp.split(',')]
107 >            outfileflag = True #output found
108 >        #else:
109 >        #    log.message("No output file defined: only stdout/err and the CRAB Framework Job Report will be available\n")
110  
111          # script_exe file as additional file in inputSandbox
112 <        try:
113 <           self.scriptExe = cfg_params['CMSSW.script_exe']
114 <           self.additional_inbox_files.append(self.scriptExe)
115 <        except KeyError:
116 <           pass
117 <        if self.scriptExe != '':
118 <           if os.path.isfile(self.scriptExe):
119 <              pass
120 <           else:
121 <              log.message("WARNING. file "+self.scriptExe+" not found")
122 <              sys.exit()
123 <                  
112 >        self.scriptExe = cfg_params.get('USER.script_exe',None)
113 >        if self.scriptExe :
114 >            if not os.path.isfile(self.scriptExe):
115 >                msg ="ERROR. file "+self.scriptExe+" not found"
116 >                raise CrabException(msg)
117 >            self.additional_inbox_files.append(string.strip(self.scriptExe))
118 >
119 >        if self.datasetPath == None and self.pset == None and self.scriptExe == '' :
120 >            msg ="Error. script_exe  not defined"
121 >            raise CrabException(msg)
122 >
123 >        # use parent files...
124 >        self.useParent = self.cfg_params.get('CMSSW.use_parent',False)
125 >
126          ## additional input files
127 <        try:
128 <            tmpAddFiles = string.split(cfg_params['CMSSW.additional_input_files'],',')
127 >        if cfg_params.has_key('USER.additional_input_files'):
128 >            tmpAddFiles = string.split(cfg_params['USER.additional_input_files'],',')
129              for tmp in tmpAddFiles:
130 <                tmp=string.strip(tmp)
131 <                self.additional_inbox_files.append(tmp)
130 >                tmp = string.strip(tmp)
131 >                dirname = ''
132 >                if not tmp[0]=="/": dirname = "."
133 >                files = []
134 >                if string.find(tmp,"*")>-1:
135 >                    files = glob.glob(os.path.join(dirname, tmp))
136 >                    if len(files)==0:
137 >                        raise CrabException("No additional input file found with this pattern: "+tmp)
138 >                else:
139 >                    files.append(tmp)
140 >                for file in files:
141 >                    if not os.path.exists(file):
142 >                        raise CrabException("Additional input file not found: "+file)
143 >                    pass
144 >                    self.additional_inbox_files.append(string.strip(file))
145                  pass
146              pass
147 <        except KeyError:
148 <            pass
118 <
119 <        try:
120 <            self.total_number_of_events = int(cfg_params['CMSSW.total_number_of_events'])
121 <        except KeyError:
122 <            msg = 'Must define total_number_of_events and job_number_of_events'
123 <            raise CrabException(msg)
124 <            
125 < #Marco: FirstEvent is nolonger used inside PSet
126 < #        try:
127 < #            self.first = int(cfg_params['CMSSW.first_event'])
128 < #        except KeyError:
129 < #            self.first = 0
130 < #            pass
131 < #        log.debug(6, "Orca::Orca(): total number of events = "+`self.total_number_of_events`)
132 <        #log.debug(6, "Orca::Orca(): events per job = "+`self.job_number_of_events`)
133 < #        log.debug(6, "Orca::Orca(): first event = "+`self.first`)
134 <        
135 <        CEBlackList = []
136 <        try:
137 <            tmpBad = string.split(cfg_params['EDG.ce_black_list'],',')
138 <            for tmp in tmpBad:
139 <                tmp=string.strip(tmp)
140 <                CEBlackList.append(tmp)
141 <        except KeyError:
142 <            pass
143 <
144 <        self.reCEBlackList=[]
145 <        for bad in CEBlackList:
146 <            self.reCEBlackList.append(re.compile( bad ))
147 >            common.logger.debug(5,"Additional input files: "+str(self.additional_inbox_files))
148 >        pass
149  
150 <        common.logger.debug(5,'CEBlackList: '+str(CEBlackList))
150 >        ## Events per job
151 >        if cfg_params.has_key('CMSSW.events_per_job'):
152 >            self.eventsPerJob =int( cfg_params['CMSSW.events_per_job'])
153 >            self.selectEventsPerJob = 1
154 >        else:
155 >            self.eventsPerJob = -1
156 >            self.selectEventsPerJob = 0
157  
158 <        CEWhiteList = []
159 <        try:
160 <            tmpGood = string.split(cfg_params['EDG.ce_white_list'],',')
161 <            #tmpGood = ['cern']
162 <            for tmp in tmpGood:
163 <                tmp=string.strip(tmp)
164 <                #if (tmp == 'cnaf'): tmp = 'webserver' ########## warning: temp. patch
157 <                CEWhiteList.append(tmp)
158 <        except KeyError:
159 <            pass
158 >        ## number of jobs
159 >        if cfg_params.has_key('CMSSW.number_of_jobs'):
160 >            self.theNumberOfJobs =int( cfg_params['CMSSW.number_of_jobs'])
161 >            self.selectNumberOfJobs = 1
162 >        else:
163 >            self.theNumberOfJobs = 0
164 >            self.selectNumberOfJobs = 0
165  
166 <        #print 'CEWhiteList: ',CEWhiteList
167 <        self.reCEWhiteList=[]
168 <        for Good in CEWhiteList:
169 <            self.reCEWhiteList.append(re.compile( Good ))
166 >        if cfg_params.has_key('CMSSW.total_number_of_events'):
167 >            self.total_number_of_events = int(cfg_params['CMSSW.total_number_of_events'])
168 >            self.selectTotalNumberEvents = 1
169 >            if self.selectNumberOfJobs  == 1:
170 >                if (self.total_number_of_events != -1) and int(self.total_number_of_events) < int(self.theNumberOfJobs):
171 >                    msg = 'Must specify at least one event per job. total_number_of_events > number_of_jobs '
172 >                    raise CrabException(msg)
173 >        else:
174 >            self.total_number_of_events = 0
175 >            self.selectTotalNumberEvents = 0
176  
177 <        common.logger.debug(5,'CEWhiteList: '+str(CEWhiteList))
177 >        if self.pset != None:
178 >             if ( (self.selectTotalNumberEvents + self.selectEventsPerJob + self.selectNumberOfJobs) != 2 ):
179 >                 msg = 'Must define exactly two of total_number_of_events, events_per_job, or number_of_jobs.'
180 >                 raise CrabException(msg)
181 >        else:
182 >             if (self.selectNumberOfJobs == 0):
183 >                 msg = 'Must specify  number_of_jobs.'
184 >                 raise CrabException(msg)
185 >
186 >        ## New method of dealing with seeds
187 >        self.incrementSeeds = []
188 >        self.preserveSeeds = []
189 >        if cfg_params.has_key('CMSSW.preserve_seeds'):
190 >            tmpList = cfg_params['CMSSW.preserve_seeds'].split(',')
191 >            for tmp in tmpList:
192 >                tmp.strip()
193 >                self.preserveSeeds.append(tmp)
194 >        if cfg_params.has_key('CMSSW.increment_seeds'):
195 >            tmpList = cfg_params['CMSSW.increment_seeds'].split(',')
196 >            for tmp in tmpList:
197 >                tmp.strip()
198 >                self.incrementSeeds.append(tmp)
199 >
200 >        ## Old method of dealing with seeds
201 >        ## FUTURE: This is for old CMSSW and old CRAB. Can throw exceptions after a couple of CRAB releases and then
202 >        ## remove
203 >        self.sourceSeed = cfg_params.get('CMSSW.pythia_seed',None)
204 >        if self.sourceSeed:
205 >            print "pythia_seed is a deprecated parameter. Use preserve_seeds or increment_seeds in the future.\n","Added to increment_seeds."
206 >            self.incrementSeeds.append('sourceSeed')
207 >            self.incrementSeeds.append('theSource')
208 >
209 >        self.sourceSeedVtx = cfg_params.get('CMSSW.vtx_seed',None)
210 >        if self.sourceSeedVtx:
211 >            print "vtx_seed is a deprecated parameter. Use preserve_seeds or increment_seeds in the future.\n","Added to increment_seeds."
212 >            self.incrementSeeds.append('VtxSmeared')
213 >
214 >        self.sourceSeedG4 = cfg_params.get('CMSSW.g4_seed',None)
215 >        if self.sourceSeedG4:
216 >            print "g4_seed is a deprecated parameter. Use preserve_seeds or increment_seeds in the future.\n","Added to increment_seeds."
217 >            self.incrementSeeds.append('g4SimHits')
218 >
219 >        self.sourceSeedMix = cfg_params.get('CMSSW.mix_seed',None)
220 >        if self.sourceSeedMix:
221 >            print "mix_seed is a deprecated parameter. Use preserve_seeds or increment_seeds in the future.\n","Added to increment_seeds."
222 >            self.incrementSeeds.append('mix')
223 >
224 >        self.firstRun = cfg_params.get('CMSSW.first_run',None)
225 >
226 >
227 >        # Copy/return
228 >        self.copy_data = int(cfg_params.get('USER.copy_data',0))
229 >        self.return_data = int(cfg_params.get('USER.return_data',0))
230  
231          #DBSDLS-start
232 <        ## Initialize the variables that are extracted from DBS/DLS and needed in other places of the code
232 >        ## Initialize the variables that are extracted from DBS/DLS and needed in other places of the code
233          self.maxEvents=0  # max events available   ( --> check the requested nb. of evts in Creator.py)
234          self.DBSPaths={}  # all dbs paths requested ( --> input to the site local discovery script)
235 +        self.jobDestination=[]  # Site destination(s) for each job (list of lists)
236          ## Perform the data location and discovery (based on DBS/DLS)
237 <        self.DataDiscoveryAndLocation(cfg_params)
238 <        #DBSDLS-end          
237 >        ## SL: Don't if NONE is specified as input (pythia use case)
238 >        blockSites = {}
239 >        if self.datasetPath:
240 >            blockSites = self.DataDiscoveryAndLocation(cfg_params)
241 >        #DBSDLS-end
242 >
243 >        ## Select Splitting
244 >        if self.selectNoInput:
245 >            if self.pset == None:
246 >                self.jobSplittingForScript()
247 >            else:
248 >                self.jobSplittingNoInput()
249 >        else:
250 >            self.jobSplittingByBlocks(blockSites)
251  
252 <        self.tgzNameWithPath = self.getTarBall(self.executable)
252 >        # modify Pset only the first time
253 >        if isNew:
254 >            if self.pset != None:
255 >                import PsetManipulator as pp
256 >                PsetEdit = pp.PsetManipulator(self.pset)
257 >                try:
258 >                    # Add FrameworkJobReport to parameter-set, set max events.
259 >                    # Reset later for data jobs by writeCFG which does all modifications
260 >                    PsetEdit.addCrabFJR(self.fjrFileName) # FUTURE: Job report addition not needed by CMSSW>1.5
261 >                    PsetEdit.maxEvent(self.eventsPerJob)
262 >                    PsetEdit.psetWriter(self.configFilename())
263 >                    ## If present, add TFileService to output files
264 >                    if not int(cfg_params.get('CMSSW.skip_TFileService_output',0)):
265 >                        tfsOutput = PsetEdit.getTFileService()
266 >                        if tfsOutput:
267 >                            if tfsOutput in self.output_file:
268 >                                common.logger.debug(5,"Output from TFileService "+tfsOutput+" already in output files")
269 >                            else:
270 >                                outfileflag = True #output found
271 >                                self.output_file.append(tfsOutput)
272 >                                common.logger.message("Adding "+tfsOutput+" to output files (from TFileService)")
273 >                            pass
274 >                        pass
275 >                    ## If present and requested, add PoolOutputModule to output files
276 >                    if int(cfg_params.get('CMSSW.get_edm_output',0)):
277 >                        edmOutput = PsetEdit.getPoolOutputModule()
278 >                        if edmOutput:
279 >                            if edmOutput in self.output_file:
280 >                                common.logger.debug(5,"Output from PoolOutputModule "+edmOutput+" already in output files")
281 >                            else:
282 >                                self.output_file.append(edmOutput)
283 >                                common.logger.message("Adding "+edmOutput+" to output files (from PoolOutputModule)")
284 >                            pass
285 >                        pass
286 >                except CrabException:
287 >                    msg='Error while manipulating ParameterSet: exiting...'
288 >                    raise CrabException(msg)
289 >            ## Prepare inputSandbox TarBall (only the first time)
290 >            self.tgzNameWithPath = self.getTarBall(self.executable)
291  
292      def DataDiscoveryAndLocation(self, cfg_params):
293  
294 <        fun = "CMSSW::DataDiscoveryAndLocation()"
294 >        import DataDiscovery
295 >        import DataLocation
296 >        common.logger.debug(10,"CMSSW::DataDiscoveryAndLocation()")
297 >
298 >        datasetPath=self.datasetPath
299  
300          ## Contact the DBS
301 +        common.logger.message("Contacting Data Discovery Services ...")
302          try:
303 <            self.pubdata=DataDiscovery.DataDiscovery(self.owner,
185 <                                                     self.dataset,
186 <                                                     self.dataTiers,
187 <                                                     cfg_params)
303 >            self.pubdata=DataDiscovery.DataDiscovery(datasetPath, cfg_params,self.skip_blocks)
304              self.pubdata.fetchDBSInfo()
305  
306          except DataDiscovery.NotExistingDatasetError, ex :
307              msg = 'ERROR ***: failed Data Discovery in DBS : %s'%ex.getErrorMessage()
308              raise CrabException(msg)
193
309          except DataDiscovery.NoDataTierinProvenanceError, ex :
310              msg = 'ERROR ***: failed Data Discovery in DBS : %s'%ex.getErrorMessage()
311              raise CrabException(msg)
312          except DataDiscovery.DataDiscoveryError, ex:
313 <            msg = 'ERROR ***: failed Data Discovery in DBS  %s'%ex.getErrorMessage()
313 >            msg = 'ERROR ***: failed Data Discovery in DBS :  %s'%ex.getErrorMessage()
314              raise CrabException(msg)
315  
316 <        ## get list of all required data in the form of dbs paths  (dbs path = /dataset/datatier/owner)
317 <        self.DBSPaths=self.pubdata.getDBSPaths()
318 <        common.logger.message("Required data are : ")
319 <        for path in self.DBSPaths:
205 <            common.logger.message(" --> "+path )
316 >        self.filesbyblock=self.pubdata.getFiles()
317 >        self.eventsbyblock=self.pubdata.getEventsPerBlock()
318 >        self.eventsbyfile=self.pubdata.getEventsPerFile()
319 >        self.parentFiles=self.pubdata.getParent()
320  
321          ## get max number of events
322 <        common.logger.debug(10,"number of events for primary fileblocks %i"%self.pubdata.getMaxEvents())
209 <        self.maxEvents=self.pubdata.getMaxEvents() ##  self.maxEvents used in Creator.py
210 <        common.logger.message("\nThe number of available events is %s"%self.maxEvents)
211 <
212 <        ## get fileblocks corresponding to the required data
213 <        fb=self.pubdata.getFileBlocks()
214 <        common.logger.debug(5,"fileblocks are %s"%fb)
322 >        self.maxEvents=self.pubdata.getMaxEvents()
323  
324          ## Contact the DLS and build a list of sites hosting the fileblocks
325          try:
326 <            dataloc=DataLocation.DataLocation(self.pubdata.getFileBlocks(),cfg_params)
326 >            dataloc=DataLocation.DataLocation(self.filesbyblock.keys(),cfg_params)
327              dataloc.fetchDLSInfo()
328          except DataLocation.DataLocationError , ex:
329              msg = 'ERROR ***: failed Data Location in DLS \n %s '%ex.getErrorMessage()
330              raise CrabException(msg)
223        
224        allsites=dataloc.getSites()
225        common.logger.debug(5,"sites are %s"%allsites)
226        sites=self.checkBlackList(allsites)
227        common.logger.debug(5,"sites are (after black list) %s"%sites)
228        sites=self.checkWhiteList(sites)
229        common.logger.debug(5,"sites are (after white list) %s"%sites)
331  
332 <        if len(sites)==0:
333 <            msg = 'No sites hosting all the needed data! Exiting... '
334 <            raise CrabException(msg)
335 <        common.logger.message("List of Sites hosting the data : "+str(sites))
336 <        common.logger.debug(6, "List of Sites: "+str(sites))
337 <        common.analisys_common_info['sites']=sites    ## used in SchedulerEdg.py in createSchScript
338 <        return
339 <        
340 <    def checkBlackList(self, allSites):
341 <        if len(self.reCEBlackList)==0: return allSites
342 <        sites = []
343 <        for site in allSites:
243 <            common.logger.debug(10,'Site '+site)
244 <            good=1
245 <            for re in self.reCEBlackList:
246 <                if re.search(site):
247 <                    common.logger.message('CE in black list, skipping site '+site)
248 <                    good=0
249 <                pass
250 <            if good: sites.append(site)
251 <        if len(sites) == 0:
252 <            common.logger.debug(3,"No sites found after BlackList")
332 >
333 >        sites = dataloc.getSites()
334 >        allSites = []
335 >        listSites = sites.values()
336 >        for listSite in listSites:
337 >            for oneSite in listSite:
338 >                allSites.append(oneSite)
339 >        allSites = self.uniquelist(allSites)
340 >
341 >        # screen output
342 >        common.logger.message("Requested dataset: " + datasetPath + " has " + str(self.maxEvents) + " events in " + str(len(self.filesbyblock.keys())) + " blocks.\n")
343 >
344          return sites
345  
346 <    def checkWhiteList(self, allsites):
346 >    def jobSplittingByBlocks(self, blockSites):
347 >        """
348 >        Perform job splitting. Jobs run over an integer number of files
349 >        and no more than one block.
350 >        ARGUMENT: blockSites: dictionary with blocks as keys and list of host sites as values
351 >        REQUIRES: self.selectTotalNumberEvents, self.selectEventsPerJob, self.selectNumberofJobs,
352 >                  self.total_number_of_events, self.eventsPerJob, self.theNumberOfJobs,
353 >                  self.maxEvents, self.filesbyblock
354 >        SETS: self.jobDestination - Site destination(s) for each job (a list of lists)
355 >              self.total_number_of_jobs - Total # of jobs
356 >              self.list_of_args - File(s) job will run on (a list of lists)
357 >        """
358  
359 <        if len(self.reCEWhiteList)==0: return pubDBUrls
360 <        sites = []
361 <        for site in allsites:
362 <            #print 'connecting to the URL ',url
363 <            good=0
364 <            for re in self.reCEWhiteList:
365 <                if re.search(site):
366 <                    common.logger.debug(5,'CE in white list, adding site '+site)
367 <                    good=1
368 <                if not good: continue
369 <                sites.append(site)
370 <        if len(sites) == 0:
371 <            common.logger.message("No sites found after WhiteList\n")
359 >        # ---- Handle the possible job splitting configurations ---- #
360 >        if (self.selectTotalNumberEvents):
361 >            totalEventsRequested = self.total_number_of_events
362 >        if (self.selectEventsPerJob):
363 >            eventsPerJobRequested = self.eventsPerJob
364 >            if (self.selectNumberOfJobs):
365 >                totalEventsRequested = self.theNumberOfJobs * self.eventsPerJob
366 >
367 >        # If user requested all the events in the dataset
368 >        if (totalEventsRequested == -1):
369 >            eventsRemaining=self.maxEvents
370 >        # If user requested more events than are in the dataset
371 >        elif (totalEventsRequested > self.maxEvents):
372 >            eventsRemaining = self.maxEvents
373 >            common.logger.message("Requested "+str(self.total_number_of_events)+ " events, but only "+str(self.maxEvents)+" events are available.")
374 >        # If user requested less events than are in the dataset
375          else:
376 <            common.logger.debug(5,"Selected sites via WhiteList are "+str(sites)+"\n")
377 <        return sites
376 >            eventsRemaining = totalEventsRequested
377 >
378 >        # If user requested more events per job than are in the dataset
379 >        if (self.selectEventsPerJob and eventsPerJobRequested > self.maxEvents):
380 >            eventsPerJobRequested = self.maxEvents
381 >
382 >        # For user info at end
383 >        totalEventCount = 0
384 >
385 >        if (self.selectTotalNumberEvents and self.selectNumberOfJobs):
386 >            eventsPerJobRequested = int(eventsRemaining/self.theNumberOfJobs)
387 >
388 >        if (self.selectNumberOfJobs):
389 >            common.logger.message("May not create the exact number_of_jobs requested.")
390 >
391 >        if ( self.ncjobs == 'all' ) :
392 >            totalNumberOfJobs = 999999999
393 >        else :
394 >            totalNumberOfJobs = self.ncjobs
395 >
396 >        blocks = blockSites.keys()
397 >        blockCount = 0
398 >        # Backup variable in case self.maxEvents counted events in a non-included block
399 >        numBlocksInDataset = len(blocks)
400 >
401 >        jobCount = 0
402 >        list_of_lists = []
403 >
404 >        # list tracking which jobs are in which jobs belong to which block
405 >        jobsOfBlock = {}
406 >
407 >        # ---- Iterate over the blocks in the dataset until ---- #
408 >        # ---- we've met the requested total # of events    ---- #
409 >        while ( (eventsRemaining > 0) and (blockCount < numBlocksInDataset) and (jobCount < totalNumberOfJobs)):
410 >            block = blocks[blockCount]
411 >            blockCount += 1
412 >            if block not in jobsOfBlock.keys() :
413 >                jobsOfBlock[block] = []
414 >
415 >            if self.eventsbyblock.has_key(block) :
416 >                numEventsInBlock = self.eventsbyblock[block]
417 >                common.logger.debug(5,'Events in Block File '+str(numEventsInBlock))
418 >
419 >                files = self.filesbyblock[block]
420 >                numFilesInBlock = len(files)
421 >                if (numFilesInBlock <= 0):
422 >                    continue
423 >                fileCount = 0
424 >
425 >                # ---- New block => New job ---- #
426 >                parString = ""
427 >                # counter for number of events in files currently worked on
428 >                filesEventCount = 0
429 >                # flag if next while loop should touch new file
430 >                newFile = 1
431 >                # job event counter
432 >                jobSkipEventCount = 0
433 >
434 >                # ---- Iterate over the files in the block until we've met the requested ---- #
435 >                # ---- total # of events or we've gone over all the files in this block  ---- #
436 >                pString=''
437 >                while ( (eventsRemaining > 0) and (fileCount < numFilesInBlock) and (jobCount < totalNumberOfJobs) ):
438 >                    file = files[fileCount]
439 >                    if self.useParent:
440 >                        parent = self.parentFiles[file]
441 >                        for f in parent :
442 >                            pString += '\\\"' + f + '\\\"\,'
443 >                        common.logger.debug(6, "File "+str(file)+" has the following parents: "+str(parent))
444 >                        common.logger.write("File "+str(file)+" has the following parents: "+str(parent))
445 >                    if newFile :
446 >                        try:
447 >                            numEventsInFile = self.eventsbyfile[file]
448 >                            common.logger.debug(6, "File "+str(file)+" has "+str(numEventsInFile)+" events")
449 >                            # increase filesEventCount
450 >                            filesEventCount += numEventsInFile
451 >                            # Add file to current job
452 >                            parString += '\\\"' + file + '\\\"\,'
453 >                            newFile = 0
454 >                        except KeyError:
455 >                            common.logger.message("File "+str(file)+" has unknown number of events: skipping")
456 >
457 >                    eventsPerJobRequested = min(eventsPerJobRequested, eventsRemaining)
458 >                    # if less events in file remain than eventsPerJobRequested
459 >                    if ( filesEventCount - jobSkipEventCount < eventsPerJobRequested):
460 >                        # if last file in block
461 >                        if ( fileCount == numFilesInBlock-1 ) :
462 >                            # end job using last file, use remaining events in block
463 >                            # close job and touch new file
464 >                            fullString = parString[:-2]
465 >                            if self.useParent:
466 >                                fullParentString = pString[:-2]
467 >                                list_of_lists.append([fullString,fullParentString,str(-1),str(jobSkipEventCount)])
468 >                            else:
469 >                                list_of_lists.append([fullString,str(-1),str(jobSkipEventCount)])
470 >                            common.logger.debug(3,"Job "+str(jobCount+1)+" can run over "+str(filesEventCount - jobSkipEventCount)+" events (last file in block).")
471 >                            self.jobDestination.append(blockSites[block])
472 >                            common.logger.debug(5,"Job "+str(jobCount+1)+" Destination: "+str(self.jobDestination[jobCount]))
473 >                            # fill jobs of block dictionary
474 >                            jobsOfBlock[block].append(jobCount+1)
475 >                            # reset counter
476 >                            jobCount = jobCount + 1
477 >                            totalEventCount = totalEventCount + filesEventCount - jobSkipEventCount
478 >                            eventsRemaining = eventsRemaining - filesEventCount + jobSkipEventCount
479 >                            jobSkipEventCount = 0
480 >                            # reset file
481 >                            pString = ""
482 >                            parString = ""
483 >                            filesEventCount = 0
484 >                            newFile = 1
485 >                            fileCount += 1
486 >                        else :
487 >                            # go to next file
488 >                            newFile = 1
489 >                            fileCount += 1
490 >                    # if events in file equal to eventsPerJobRequested
491 >                    elif ( filesEventCount - jobSkipEventCount == eventsPerJobRequested ) :
492 >                        # close job and touch new file
493 >                        fullString = parString[:-2]
494 >                        if self.useParent:
495 >                            fullParentString = pString[:-2]
496 >                            list_of_lists.append([fullString,fullParentString,str(eventsPerJobRequested),str(jobSkipEventCount)])
497 >                        else:
498 >                            list_of_lists.append([fullString,str(eventsPerJobRequested),str(jobSkipEventCount)])
499 >                        common.logger.debug(3,"Job "+str(jobCount+1)+" can run over "+str(eventsPerJobRequested)+" events.")
500 >                        self.jobDestination.append(blockSites[block])
501 >                        common.logger.debug(5,"Job "+str(jobCount+1)+" Destination: "+str(self.jobDestination[jobCount]))
502 >                        jobsOfBlock[block].append(jobCount+1)
503 >                        # reset counter
504 >                        jobCount = jobCount + 1
505 >                        totalEventCount = totalEventCount + eventsPerJobRequested
506 >                        eventsRemaining = eventsRemaining - eventsPerJobRequested
507 >                        jobSkipEventCount = 0
508 >                        # reset file
509 >                        pString = ""
510 >                        parString = ""
511 >                        filesEventCount = 0
512 >                        newFile = 1
513 >                        fileCount += 1
514 >
515 >                    # if more events in file remain than eventsPerJobRequested
516 >                    else :
517 >                        # close job but don't touch new file
518 >                        fullString = parString[:-2]
519 >                        if self.useParent:
520 >                            fullParentString = pString[:-2]
521 >                            list_of_lists.append([fullString,fullParentString,str(eventsPerJobRequested),str(jobSkipEventCount)])
522 >                        else:
523 >                            list_of_lists.append([fullString,str(eventsPerJobRequested),str(jobSkipEventCount)])
524 >                        common.logger.debug(3,"Job "+str(jobCount+1)+" can run over "+str(eventsPerJobRequested)+" events.")
525 >                        self.jobDestination.append(blockSites[block])
526 >                        common.logger.debug(5,"Job "+str(jobCount+1)+" Destination: "+str(self.jobDestination[jobCount]))
527 >                        jobsOfBlock[block].append(jobCount+1)
528 >                        # increase counter
529 >                        jobCount = jobCount + 1
530 >                        totalEventCount = totalEventCount + eventsPerJobRequested
531 >                        eventsRemaining = eventsRemaining - eventsPerJobRequested
532 >                        # calculate skip events for last file
533 >                        # use filesEventCount (contains several files), jobSkipEventCount and eventsPerJobRequest
534 >                        jobSkipEventCount = eventsPerJobRequested - (filesEventCount - jobSkipEventCount - self.eventsbyfile[file])
535 >                        # remove all but the last file
536 >                        filesEventCount = self.eventsbyfile[file]
537 >                        if self.useParent:
538 >                            for f in parent : pString += '\\\"' + f + '\\\"\,'
539 >                        parString = '\\\"' + file + '\\\"\,'
540 >                    pass # END if
541 >                pass # END while (iterate over files in the block)
542 >        pass # END while (iterate over blocks in the dataset)
543 >        self.ncjobs = self.total_number_of_jobs = jobCount
544 >        if (eventsRemaining > 0 and jobCount < totalNumberOfJobs ):
545 >            common.logger.message("Could not run on all requested events because some blocks not hosted at allowed sites.")
546 >        common.logger.message(str(jobCount)+" job(s) can run on "+str(totalEventCount)+" events.\n")
547 >
548 >        # screen output
549 >        screenOutput = "List of jobs and available destination sites:\n\n"
550 >
551 >        # keep trace of block with no sites to print a warning at the end
552 >        noSiteBlock = []
553 >        bloskNoSite = []
554 >
555 >        blockCounter = 0
556 >        for block in blocks:
557 >            if block in jobsOfBlock.keys() :
558 >                blockCounter += 1
559 >                screenOutput += "Block %5i: jobs %20s: sites: %s\n" % (blockCounter,spanRanges(jobsOfBlock[block]),
560 >                    ','.join(self.blackWhiteListParser.checkWhiteList(self.blackWhiteListParser.checkBlackList(blockSites[block],block),block)))
561 >                if len(self.blackWhiteListParser.checkWhiteList(self.blackWhiteListParser.checkBlackList(blockSites[block],block),block)) == 0:
562 >                    noSiteBlock.append( spanRanges(jobsOfBlock[block]) )
563 >                    bloskNoSite.append( blockCounter )
564 >
565 >        common.logger.message(screenOutput)
566 >        if len(noSiteBlock) > 0 and len(bloskNoSite) > 0:
567 >            msg = 'WARNING: No sites are hosting any part of data for block:\n                '
568 >            virgola = ""
569 >            if len(bloskNoSite) > 1:
570 >                virgola = ","
571 >            for block in bloskNoSite:
572 >                msg += ' ' + str(block) + virgola
573 >            msg += '\n               Related jobs:\n                 '
574 >            virgola = ""
575 >            if len(noSiteBlock) > 1:
576 >                virgola = ","
577 >            for range_jobs in noSiteBlock:
578 >                msg += str(range_jobs) + virgola
579 >            msg += '\n               will not be submitted and this block of data can not be analyzed!\n'
580 >            if self.cfg_params.has_key('EDG.se_white_list'):
581 >                msg += 'WARNING: SE White List: '+self.cfg_params['EDG.se_white_list']+'\n'
582 >                msg += '(Hint: By whitelisting you force the job to run at this particular site(s).\n'
583 >                msg += 'Please check if the dataset is available at this site!)\n'
584 >            if self.cfg_params.has_key('EDG.ce_white_list'):
585 >                msg += 'WARNING: CE White List: '+self.cfg_params['EDG.ce_white_list']+'\n'
586 >                msg += '(Hint: By whitelisting you force the job to run at this particular site(s).\n'
587 >                msg += 'Please check if the dataset is available at this site!)\n'
588 >
589 >            common.logger.message(msg)
590 >
591 >        self.list_of_args = list_of_lists
592 >        return
593 >
594 >    def jobSplittingNoInput(self):
595 >        """
596 >        Perform job splitting based on number of event per job
597 >        """
598 >        common.logger.debug(5,'Splitting per events')
599 >
600 >        if (self.selectEventsPerJob):
601 >            common.logger.message('Required '+str(self.eventsPerJob)+' events per job ')
602 >        if (self.selectNumberOfJobs):
603 >            common.logger.message('Required '+str(self.theNumberOfJobs)+' jobs in total ')
604 >        if (self.selectTotalNumberEvents):
605 >            common.logger.message('Required '+str(self.total_number_of_events)+' events in total ')
606 >
607 >        if (self.total_number_of_events < 0):
608 >            msg='Cannot split jobs per Events with "-1" as total number of events'
609 >            raise CrabException(msg)
610 >
611 >        if (self.selectEventsPerJob):
612 >            if (self.selectTotalNumberEvents):
613 >                self.total_number_of_jobs = int(self.total_number_of_events/self.eventsPerJob)
614 >            elif(self.selectNumberOfJobs) :
615 >                self.total_number_of_jobs =self.theNumberOfJobs
616 >                self.total_number_of_events =int(self.theNumberOfJobs*self.eventsPerJob)
617 >
618 >        elif (self.selectNumberOfJobs) :
619 >            self.total_number_of_jobs = self.theNumberOfJobs
620 >            self.eventsPerJob = int(self.total_number_of_events/self.total_number_of_jobs)
621 >
622 >        common.logger.debug(5,'N jobs  '+str(self.total_number_of_jobs))
623 >
624 >        # is there any remainder?
625 >        check = int(self.total_number_of_events) - (int(self.total_number_of_jobs)*self.eventsPerJob)
626 >
627 >        common.logger.debug(5,'Check  '+str(check))
628 >
629 >        common.logger.message(str(self.total_number_of_jobs)+' jobs can be created, each for '+str(self.eventsPerJob)+' for a total of '+str(self.total_number_of_jobs*self.eventsPerJob)+' events')
630 >        if check > 0:
631 >            common.logger.message('Warning: asked '+str(self.total_number_of_events)+' but can do only '+str(int(self.total_number_of_jobs)*self.eventsPerJob))
632 >
633 >        # argument is seed number.$i
634 >        self.list_of_args = []
635 >        for i in range(self.total_number_of_jobs):
636 >            ## Since there is no input, any site is good
637 >            self.jobDestination.append([""]) #must be empty to write correctly the xml
638 >            args=[]
639 >            if (self.firstRun):
640 >                ## pythia first run
641 >                args.append(str(self.firstRun)+str(i))
642 >            self.list_of_args.append(args)
643 >
644 >        return
645 >
646 >
647 >    def jobSplittingForScript(self):
648 >        """
649 >        Perform job splitting based on number of job
650 >        """
651 >        common.logger.debug(5,'Splitting per job')
652 >        common.logger.message('Required '+str(self.theNumberOfJobs)+' jobs in total ')
653 >
654 >        self.total_number_of_jobs = self.theNumberOfJobs
655 >
656 >        common.logger.debug(5,'N jobs  '+str(self.total_number_of_jobs))
657 >
658 >        common.logger.message(str(self.total_number_of_jobs)+' jobs can be created')
659 >
660 >        # argument is seed number.$i
661 >        self.list_of_args = []
662 >        for i in range(self.total_number_of_jobs):
663 >            self.jobDestination.append([""])
664 >            self.list_of_args.append([str(i)])
665 >        return
666 >
667 >    def split(self, jobParams,firstJobID):
668 >
669 >        njobs = self.total_number_of_jobs
670 >        arglist = self.list_of_args
671 >        # create the empty structure
672 >        for i in range(njobs):
673 >            jobParams.append("")
674 >
675 >        listID=[]
676 >        listField=[]
677 >        for id in range(njobs):
678 >            job = id + int(firstJobID)
679 >            jobParams[id] = arglist[id]
680 >            listID.append(job+1)
681 >            job_ToSave ={}
682 >            concString = ' '
683 >            argu=''
684 >            if len(jobParams[id]):
685 >                argu +=   concString.join(jobParams[id] )
686 >            job_ToSave['arguments']= str(job+1)+' '+argu
687 >            job_ToSave['dlsDestination']= self.jobDestination[id]
688 >            listField.append(job_ToSave)
689 >            msg="Job "+str(job)+" Arguments:   "+str(job+1)+" "+argu+"\n"  \
690 >            +"                     Destination: "+str(self.jobDestination[id])
691 >            common.logger.debug(5,msg)
692 >        common._db.updateJob_(listID,listField)
693 >        self.argsList = (len(jobParams[0])+1)
694 >
695 >        return
696 >
697 >    def numberOfJobs(self):
698 >        return self.total_number_of_jobs
699  
700      def getTarBall(self, exe):
701          """
702          Return the TarBall with lib and exe
703          """
704 <        
279 <        # if it exist, just return it
280 <        self.tgzNameWithPath = common.work_space.shareDir()+self.tgz_name
704 >        self.tgzNameWithPath = common.work_space.pathForTgz()+'share/'+self.tgz_name
705          if os.path.exists(self.tgzNameWithPath):
706              return self.tgzNameWithPath
707  
# Line 290 | Line 714 | class Cmssw(JobType):
714  
715          # First of all declare the user Scram area
716          swArea = self.scram.getSWArea_()
293        #print "swArea = ", swArea
294        swVersion = self.scram.getSWVersion()
295        #print "swVersion = ", swVersion
717          swReleaseTop = self.scram.getReleaseTop_()
718 <        #print "swReleaseTop = ", swReleaseTop
298 <        
718 >
719          ## check if working area is release top
720          if swReleaseTop == '' or swArea == swReleaseTop:
721 +            common.logger.debug(3,"swArea = "+swArea+" swReleaseTop ="+swReleaseTop)
722              return
723  
724 <        filesToBeTarred = []
725 <        ## First find the executable
726 <        if (self.executable != ''):
727 <            exeWithPath = self.scram.findFile_(executable)
728 < #           print exeWithPath
729 <            if ( not exeWithPath ):
730 <                raise CrabException('User executable '+executable+' not found')
731 <
732 <            ## then check if it's private or not
733 <            if exeWithPath.find(swReleaseTop) == -1:
734 <                # the exe is private, so we must ship
735 <                common.logger.debug(5,"Exe "+exeWithPath+" to be tarred")
736 <                path = swArea+'/'
737 <                exe = string.replace(exeWithPath, path,'')
738 <                filesToBeTarred.append(exe)
739 <                pass
740 <            else:
741 <                # the exe is from release, we'll find it on WN
724 >        import tarfile
725 >        try: # create tar ball
726 >            tar = tarfile.open(self.tgzNameWithPath, "w:gz")
727 >            ## First find the executable
728 >            if (self.executable != ''):
729 >                exeWithPath = self.scram.findFile_(executable)
730 >                if ( not exeWithPath ):
731 >                    raise CrabException('User executable '+executable+' not found')
732 >
733 >                ## then check if it's private or not
734 >                if exeWithPath.find(swReleaseTop) == -1:
735 >                    # the exe is private, so we must ship
736 >                    common.logger.debug(5,"Exe "+exeWithPath+" to be tarred")
737 >                    path = swArea+'/'
738 >                    # distinguish case when script is in user project area or given by full path somewhere else
739 >                    if exeWithPath.find(path) >= 0 :
740 >                        exe = string.replace(exeWithPath, path,'')
741 >                        tar.add(path+exe,exe)
742 >                    else :
743 >                        tar.add(exeWithPath,os.path.basename(executable))
744 >                    pass
745 >                else:
746 >                    # the exe is from release, we'll find it on WN
747 >                    pass
748 >
749 >            ## Now get the libraries: only those in local working area
750 >            libDir = 'lib'
751 >            lib = swArea+'/' +libDir
752 >            common.logger.debug(5,"lib "+lib+" to be tarred")
753 >            if os.path.exists(lib):
754 >                tar.add(lib,libDir)
755 >
756 >            ## Now check if module dir is present
757 >            moduleDir = 'module'
758 >            module = swArea + '/' + moduleDir
759 >            if os.path.isdir(module):
760 >                tar.add(module,moduleDir)
761 >
762 >            ## Now check if any data dir(s) is present
763 >            self.dataExist = False
764 >            todo_list = [(i, i) for i in  os.listdir(swArea+"/src")]
765 >            while len(todo_list):
766 >                entry, name = todo_list.pop()
767 >                if name.startswith('crab_0_') or  name.startswith('.') or name == 'CVS':
768 >                    continue
769 >                if os.path.isdir(swArea+"/src/"+entry):
770 >                    entryPath = entry + '/'
771 >                    todo_list += [(entryPath + i, i) for i in  os.listdir(swArea+"/src/"+entry)]
772 >                    if name == 'data':
773 >                        self.dataExist=True
774 >                        common.logger.debug(5,"data "+entry+" to be tarred")
775 >                        tar.add(swArea+"/src/"+entry,"src/"+entry)
776 >                    pass
777                  pass
778 <
779 <        ## Now get the libraries: only those in local working area
780 <        libDir = 'lib'
781 <        lib = swArea+'/' +libDir
782 <        common.logger.debug(5,"lib "+lib+" to be tarred")
783 <        if os.path.exists(lib):
784 <            filesToBeTarred.append(libDir)
785 <
786 <        ## Now check if the Data dir is present
787 <        dataDir = 'src/Data/'
788 <        if os.path.isdir(swArea+'/'+dataDir):
789 <            filesToBeTarred.append(dataDir)
790 <
791 <        ## Create the tar-ball
792 <        if len(filesToBeTarred)>0:
793 <            cwd = os.getcwd()
794 <            os.chdir(swArea)
795 <            tarcmd = 'tar zcvf ' + self.tgzNameWithPath + ' '
796 <            for line in filesToBeTarred:
797 <                tarcmd = tarcmd + line + ' '
798 <            cout = runCommand(tarcmd)
799 <            if not cout:
800 <                raise CrabException('Could not create tar-ball')
801 <            os.chdir(cwd)
802 <        else:
803 <            common.logger.debug(5,"No files to be to be tarred")
804 <        
805 <        return
806 <        
807 <    def wsSetupEnvironment(self, nj):
778 >
779 >            ### CMSSW ParameterSet
780 >            if not self.pset is None:
781 >                cfg_file = common.work_space.jobDir()+self.configFilename()
782 >                tar.add(cfg_file,self.configFilename())
783 >                common.logger.debug(5,"File added to "+self.tgzNameWithPath+" : "+str(tar.getnames()))
784 >
785 >
786 >            ## Add ProdCommon dir to tar
787 >            prodcommonDir = './'
788 >            prodcommonPath = os.environ['CRABDIR'] + '/' + 'external/'
789 >            neededStuff = ['ProdCommon/__init__.py','ProdCommon/FwkJobRep', 'ProdCommon/CMSConfigTools','ProdCommon/Core','ProdCommon/MCPayloads', 'IMProv']
790 >            for file in neededStuff:
791 >                tar.add(prodcommonPath+file,prodcommonDir+file)
792 >            common.logger.debug(5,"Files added to "+self.tgzNameWithPath+" : "+str(tar.getnames()))
793 >
794 >            ##### ML stuff
795 >            ML_file_list=['report.py', 'DashboardAPI.py', 'Logger.py', 'ProcInfo.py', 'apmon.py']
796 >            path=os.environ['CRABDIR'] + '/python/'
797 >            for file in ML_file_list:
798 >                tar.add(path+file,file)
799 >            common.logger.debug(5,"Files added to "+self.tgzNameWithPath+" : "+str(tar.getnames()))
800 >
801 >            ##### Utils
802 >            Utils_file_list=['parseCrabFjr.py','writeCfg.py', 'fillCrabFjr.py']
803 >            for file in Utils_file_list:
804 >                tar.add(path+file,file)
805 >            common.logger.debug(5,"Files added to "+self.tgzNameWithPath+" : "+str(tar.getnames()))
806 >
807 >            ##### AdditionalFiles
808 >            for file in self.additional_inbox_files:
809 >                tar.add(file,string.split(file,'/')[-1])
810 >            common.logger.debug(5,"Files added to "+self.tgzNameWithPath+" : "+str(tar.getnames()))
811 >
812 >            tar.close()
813 >        except IOError:
814 >            raise CrabException('Could not create tar-ball '+self.tgzNameWithPath)
815 >        except tarfile.TarError:
816 >            raise CrabException('Could not create tar-ball '+self.tgzNameWithPath)
817 >
818 >        ## check for tarball size
819 >        tarballinfo = os.stat(self.tgzNameWithPath)
820 >        if ( tarballinfo.st_size > self.MaxTarBallSize*1024*1024 ) :
821 >            raise CrabException('Input sandbox size of ' + str(float(tarballinfo.st_size)/1024.0/1024.0) + ' MB is larger than the allowed ' + str(self.MaxTarBallSize) + ' MB input sandbox limit and not supported by the used GRID submission system. Please make sure that no unnecessary files are in all data directories in your local CMSSW project area as they are automatically packed into the input sandbox.')
822 >
823 >        ## create tar-ball with ML stuff
824 >
825 >    def wsSetupEnvironment(self, nj=0):
826          """
827          Returns part of a job script which prepares
828          the execution environment for the job 'nj'.
829          """
830 +        if (self.CMSSW_major >= 2 and self.CMSSW_minor >= 1) or (self.CMSSW_major >= 3):
831 +            psetName = 'pset.py'
832 +        else:
833 +            psetName = 'pset.cfg'
834          # Prepare JobType-independent part
835 <        txt = self.wsSetupCMSEnvironment_()
835 >        txt = '\n#Written by cms_cmssw::wsSetupEnvironment\n'
836 >        txt += 'echo ">>> setup environment"\n'
837 >        txt += 'if [ $middleware == LCG ]; then \n'
838 >        txt += self.wsSetupCMSLCGEnvironment_()
839 >        txt += 'elif [ $middleware == OSG ]; then\n'
840 >        txt += '    WORKING_DIR=`/bin/mktemp  -d $OSG_WN_TMP/cms_XXXXXXXXXXXX`\n'
841 >        txt += '    if [ ! $? == 0 ] ;then\n'
842 >        txt += '        echo "ERROR ==> OSG $WORKING_DIR could not be created on WN `hostname`"\n'
843 >        txt += '        job_exit_code=10016\n'
844 >        txt += '        func_exit\n'
845 >        txt += '    fi\n'
846 >        txt += '    echo ">>> Created working directory: $WORKING_DIR"\n'
847 >        txt += '\n'
848 >        txt += '    echo "Change to working directory: $WORKING_DIR"\n'
849 >        txt += '    cd $WORKING_DIR\n'
850 >        txt += '    echo ">>> current directory (WORKING_DIR): $WORKING_DIR"\n'
851 >        txt += self.wsSetupCMSOSGEnvironment_()
852 >        txt += 'fi\n'
853  
854          # Prepare JobType-specific part
855          scram = self.scram.commandName()
856          txt += '\n\n'
857 <        txt += 'echo "### SPECIFIC JOB SETUP ENVIRONMENT ###"\n'
857 >        txt += 'echo ">>> specific cmssw setup environment:"\n'
858 >        txt += 'echo "CMSSW_VERSION =  '+self.version+'"\n'
859          txt += scram+' project CMSSW '+self.version+'\n'
860          txt += 'status=$?\n'
861          txt += 'if [ $status != 0 ] ; then\n'
862 <        txt += '   echo "SET_EXE_ENV 1 ==>ERROR CMSSW '+self.version+' not found on `hostname`" \n'
863 <        txt += '   echo "JOB_EXIT_STATUS = 5"\n'
864 <        txt += '   echo "SanityCheckCode = 5" | tee -a $RUNTIME_AREA/$repo\n'
369 <        txt += '   dumpStatus $RUNTIME_AREA/$repo\n'
370 <        txt += '   exit 5 \n'
862 >        txt += '    echo "ERROR ==> CMSSW '+self.version+' not found on `hostname`" \n'
863 >        txt += '    job_exit_code=10034\n'
864 >        txt += '    func_exit\n'
865          txt += 'fi \n'
372        txt += 'echo "CMSSW_VERSION =  '+self.version+'"\n'
866          txt += 'cd '+self.version+'\n'
867 <        ### needed grep for bug in scramv1 ###
867 >        txt += 'SOFTWARE_DIR=`pwd`\n'
868 >        txt += 'echo ">>> current directory (SOFTWARE_DIR): $SOFTWARE_DIR" \n'
869          txt += 'eval `'+scram+' runtime -sh | grep -v SCRAMRT_LSB_JOBNAME`\n'
870 <
870 >        txt += 'if [ $? != 0 ] ; then\n'
871 >        txt += '    echo "ERROR ==> Problem with the command: "\n'
872 >        txt += '    echo "eval \`'+scram+' runtime -sh | grep -v SCRAMRT_LSB_JOBNAME \` at `hostname`"\n'
873 >        txt += '    job_exit_code=10034\n'
874 >        txt += '    func_exit\n'
875 >        txt += 'fi \n'
876          # Handle the arguments:
877          txt += "\n"
878 <        txt += "## ARGUMNETS: $1 Job Number\n"
380 <        # txt += "## ARGUMNETS: $2 First Event for this job\n"
381 <        # txt += "## ARGUMNETS: $3 Max Event for this job\n"
878 >        txt += "## number of arguments (first argument always jobnumber)\n"
879          txt += "\n"
880 <        txt += "narg=$#\n"
384 <        txt += "if [ $narg -lt 1 ]\n"
880 >        txt += "if [ $nargs -lt "+str(self.argsList)+" ]\n"
881          txt += "then\n"
882 <        txt += "    echo 'SET_EXE_ENV 1 ==> ERROR Too few arguments' +$narg+ \n"
883 <        txt += '    echo "JOB_EXIT_STATUS = 1"\n'
884 <        txt += '    echo "SanityCheckCode = 1" | tee -a $RUNTIME_AREA/$repo\n'
389 <        txt += '    dumpStatus $RUNTIME_AREA/$repo\n'
390 <        txt += "    exit 1\n"
882 >        txt += "    echo 'ERROR ==> Too few arguments' +$nargs+ \n"
883 >        txt += '    job_exit_code=50113\n'
884 >        txt += "    func_exit\n"
885          txt += "fi\n"
886          txt += "\n"
393        txt += "NJob=$1\n"
394        # txt += "FirstEvent=$2\n"
395        # txt += "MaxEvents=$3\n"
887  
888          # Prepare job-specific part
889          job = common.job_list[nj]
890 <        pset = os.path.basename(job.configFilename())
891 <        txt += '\n'
892 <        txt += 'cp $RUNTIME_AREA/'+pset+' pset.cfg\n'
402 <        # txt += 'if [ -e $RUNTIME_AREA/orcarc_$CE ] ; then\n'
403 <        # txt += '  cat $RUNTIME_AREA/orcarc_$CE .orcarc >> .orcarc_tmp\n'
404 <        # txt += '  mv .orcarc_tmp .orcarc\n'
405 <        # txt += 'fi\n'
406 <        # txt += 'if [ -e $RUNTIME_AREA/init_$CE.sh ] ; then\n'
407 <        # txt += '  cp $RUNTIME_AREA/init_$CE.sh init.sh\n'
408 <        # txt += 'fi\n'
890 >        if (self.datasetPath):
891 >            txt += '\n'
892 >            txt += 'DatasetPath='+self.datasetPath+'\n'
893  
894 <        if len(self.additional_inbox_files) > 0:
411 <            for file in self.additional_inbox_files:
412 <                txt += 'if [ -e $RUNTIME_AREA/'+file+' ] ; then\n'
413 <                txt += '   cp $RUNTIME_AREA/'+file+' .\n'
414 <                txt += '   chmod +x '+file+'\n'
415 <                txt += 'fi\n'
416 <            pass
417 <
418 <        # txt += '\n'
419 <        # txt += 'chmod +x ./init.sh\n'
420 <        # txt += './init.sh\n'
421 <        # txt += 'exitStatus=$?\n'
422 <        # txt += 'if [ $exitStatus != 0 ] ; then\n'
423 <        # txt += '  echo "SET_EXE_ENV 1 ==> ERROR StageIn init script failed"\n'
424 <        # txt += '  echo "JOB_EXIT_STATUS = $exitStatus" \n'
425 <        # txt += '  echo "SanityCheckCode = $exitStatus" | tee -a $RUNTIME_AREA/$repo\n'
426 <        # txt += '  dumpStatus $RUNTIME_AREA/$repo\n'
427 <        # txt += '  exit $exitStatus\n'
428 <        # txt += 'fi\n'
429 <        # txt += "echo 'SET_EXE_ENV 0 ==> job setup ok'\n"
430 <        txt += 'echo "### END JOB SETUP ENVIRONMENT ###"\n\n'
431 <
432 <        # txt += 'echo "FirstEvent=$FirstEvent" >> .orcarc\n'
433 <        # txt += 'echo "MaxEvents=$MaxEvents" >> .orcarc\n'
434 <        # if self.ML:
435 <        #     txt += 'echo "MonalisaJobId=$NJob" >> .orcarc\n'
894 >            datasetpath_split = self.datasetPath.split("/")
895  
896 <        txt += '\n'
897 <        txt += 'echo "***** cat pset.cfg *********"\n'
898 <        txt += 'cat pset.cfg\n'
899 <        txt += 'echo "****** end pset.cfg ********"\n'
896 >            txt += 'PrimaryDataset='+datasetpath_split[1]+'\n'
897 >            txt += 'DataTier='+datasetpath_split[2]+'\n'
898 >            txt += 'ApplicationFamily=cmsRun\n'
899 >
900 >        else:
901 >            txt += 'DatasetPath=MCDataTier\n'
902 >            txt += 'PrimaryDataset=null\n'
903 >            txt += 'DataTier=null\n'
904 >            txt += 'ApplicationFamily=MCDataTier\n'
905 >        if self.pset != None:
906 >            pset = os.path.basename(job.configFilename())
907 >            txt += '\n'
908 >            txt += 'cp  $RUNTIME_AREA/'+pset+' .\n'
909 >            if (self.datasetPath): # standard job
910 >                txt += 'InputFiles=${args[1]}; export InputFiles\n'
911 >                if (self.useParent):
912 >                    txt += 'ParentFiles=${args[2]}; export ParentFiles\n'
913 >                    txt += 'MaxEvents=${args[3]}; export MaxEvents\n'
914 >                    txt += 'SkipEvents=${args[4]}; export SkipEvents\n'
915 >                else:
916 >                    txt += 'MaxEvents=${args[2]}; export MaxEvents\n'
917 >                    txt += 'SkipEvents=${args[3]}; export SkipEvents\n'
918 >                txt += 'echo "Inputfiles:<$InputFiles>"\n'
919 >                if (self.useParent): txt += 'echo "ParentFiles:<$ParentFiles>"\n'
920 >                txt += 'echo "MaxEvents:<$MaxEvents>"\n'
921 >                txt += 'echo "SkipEvents:<$SkipEvents>"\n'
922 >            else:  # pythia like job
923 >                txt += 'PreserveSeeds='  + ','.join(self.preserveSeeds)  + '; export PreserveSeeds\n'
924 >                txt += 'IncrementSeeds=' + ','.join(self.incrementSeeds) + '; export IncrementSeeds\n'
925 >                txt += 'echo "PreserveSeeds: <$PreserveSeeds>"\n'
926 >                txt += 'echo "IncrementSeeds:<$IncrementSeeds>"\n'
927 >                if (self.firstRun):
928 >                    txt += 'FirstRun=${args[1]}; export FirstRun\n'
929 >                    txt += 'echo "FirstRun: <$FirstRun>"\n'
930 >
931 >            txt += 'mv -f ' + pset + ' ' + psetName + '\n'
932 >
933 >
934 >        if self.pset != None:
935 >            # FUTURE: Can simply for 2_1_x and higher
936 >            txt += '\n'
937 >            if self.debug_wrapper==True:
938 >                txt += 'echo "***** cat ' + psetName + ' *********"\n'
939 >                txt += 'cat ' + psetName + '\n'
940 >                txt += 'echo "****** end ' + psetName + ' ********"\n'
941 >                txt += '\n'
942 >            if (self.CMSSW_major >= 2 and self.CMSSW_minor >= 1) or (self.CMSSW_major >= 3):
943 >                txt += 'PSETHASH=`edmConfigHash ' + psetName + '` \n'
944 >            else:
945 >                txt += 'PSETHASH=`edmConfigHash < ' + psetName + '` \n'
946 >            txt += 'echo "PSETHASH = $PSETHASH" \n'
947 >            txt += '\n'
948          return txt
949  
950 <    def modifySteeringCards(self, nj):
950 >    def wsUntarSoftware(self, nj=0):
951          """
952 <        modify the card provided by the user,
953 <        writing a new card into share dir
952 >        Put in the script the commands to build an executable
953 >        or a library.
954          """
955 <        
955 >
956 >        txt = '\n#Written by cms_cmssw::wsUntarSoftware\n'
957 >
958 >        if os.path.isfile(self.tgzNameWithPath):
959 >            txt += 'echo ">>> tar xzvf $RUNTIME_AREA/'+os.path.basename(self.tgzNameWithPath)+' :" \n'
960 >            txt += 'tar xzvf $RUNTIME_AREA/'+os.path.basename(self.tgzNameWithPath)+'\n'
961 >            if  self.debug_wrapper:
962 >                txt += 'ls -Al \n'
963 >            txt += 'untar_status=$? \n'
964 >            txt += 'if [ $untar_status -ne 0 ]; then \n'
965 >            txt += '   echo "ERROR ==> Untarring .tgz file failed"\n'
966 >            txt += '   job_exit_code=$untar_status\n'
967 >            txt += '   func_exit\n'
968 >            txt += 'else \n'
969 >            txt += '   echo "Successful untar" \n'
970 >            txt += 'fi \n'
971 >            txt += '\n'
972 >            txt += 'echo ">>> Include $RUNTIME_AREA in PYTHONPATH:"\n'
973 >            txt += 'if [ -z "$PYTHONPATH" ]; then\n'
974 >            txt += '   export PYTHONPATH=$RUNTIME_AREA/\n'
975 >            txt += 'else\n'
976 >            txt += '   export PYTHONPATH=$RUNTIME_AREA/:${PYTHONPATH}\n'
977 >            txt += 'echo "PYTHONPATH=$PYTHONPATH"\n'
978 >            txt += 'fi\n'
979 >            txt += '\n'
980 >
981 >            pass
982 >
983 >        return txt
984 >
985 >    def wsBuildExe(self, nj=0):
986 >        """
987 >        Put in the script the commands to build an executable
988 >        or a library.
989 >        """
990 >
991 >        txt = '\n#Written by cms_cmssw::wsBuildExe\n'
992 >        txt += 'echo ">>> moving CMSSW software directories in `pwd`" \n'
993 >
994 >        txt += 'rm -r lib/ module/ \n'
995 >        txt += 'mv $RUNTIME_AREA/lib/ . \n'
996 >        txt += 'mv $RUNTIME_AREA/module/ . \n'
997 >        if self.dataExist == True:
998 >            txt += 'rm -r src/ \n'
999 >            txt += 'mv $RUNTIME_AREA/src/ . \n'
1000 >        if len(self.additional_inbox_files)>0:
1001 >            for file in self.additional_inbox_files:
1002 >                txt += 'mv $RUNTIME_AREA/'+os.path.basename(file)+' . \n'
1003 >        # txt += 'mv $RUNTIME_AREA/ProdCommon/ . \n'
1004 >        # txt += 'mv $RUNTIME_AREA/IMProv/ . \n'
1005 >
1006 >        txt += 'echo ">>> Include $RUNTIME_AREA in PYTHONPATH:"\n'
1007 >        txt += 'if [ -z "$PYTHONPATH" ]; then\n'
1008 >        txt += '   export PYTHONPATH=$RUNTIME_AREA/\n'
1009 >        txt += 'else\n'
1010 >        txt += '   export PYTHONPATH=$RUNTIME_AREA/:${PYTHONPATH}\n'
1011 >        txt += 'echo "PYTHONPATH=$PYTHONPATH"\n'
1012 >        txt += 'fi\n'
1013 >        txt += '\n'
1014 >
1015 >        return txt
1016 >
1017 >
1018      def executableName(self):
1019 <        return self.executable
1019 >        if self.scriptExe:
1020 >            return "sh "
1021 >        else:
1022 >            return self.executable
1023  
1024      def executableArgs(self):
1025 <        return "-p pset.cfg"
1025 >        # FUTURE: This function tests the CMSSW version. Can be simplified as we drop support for old versions
1026 >        if self.scriptExe:#CarlosDaniele
1027 >            return   self.scriptExe + " $NJob"
1028 >        else:
1029 >            ex_args = ""
1030 >            # FUTURE: This tests the CMSSW version. Can remove code as versions deprecated
1031 >            # Framework job report
1032 >            if (self.CMSSW_major >= 1 and self.CMSSW_minor >= 5) or (self.CMSSW_major >= 2):
1033 >                ex_args += " -j $RUNTIME_AREA/crab_fjr_$NJob.xml"
1034 >            # Type of config file
1035 >            if self.CMSSW_major >= 2 :
1036 >                ex_args += " -p pset.py"
1037 >            else:
1038 >                ex_args += " -p pset.cfg"
1039 >            return ex_args
1040  
1041      def inputSandbox(self, nj):
1042          """
1043          Returns a list of filenames to be put in JDL input sandbox.
1044          """
1045          inp_box = []
460        # dict added to delete duplicate from input sandbox file list
461        seen = {}
462        ## code
1046          if os.path.isfile(self.tgzNameWithPath):
1047              inp_box.append(self.tgzNameWithPath)
1048 <        ## config
1049 <        inp_box.append(common.job_list[nj].configFilename())
467 <        ## additional input files
468 <        for file in self.additional_inbox_files:
469 <            inp_box.append(common.work_space.cwdDir()+file)
470 <        #print "sono inputSandbox, inp_box = ", inp_box
1048 >        wrapper = os.path.basename(str(common._db.queryTask('scriptName')))
1049 >        inp_box.append(common.work_space.pathForTgz() +'job/'+ wrapper)
1050          return inp_box
1051  
1052      def outputSandbox(self, nj):
# Line 476 | Line 1055 | class Cmssw(JobType):
1055          """
1056          out_box = []
1057  
479        stdout=common.job_list[nj].stdout()
480        stderr=common.job_list[nj].stderr()
481
1058          ## User Declared output files
1059 <        for out in self.output_file:
1060 <            n_out = nj + 1
1061 <            out_box.append(self.numberFile_(out,str(n_out)))
1059 >        for out in (self.output_file+self.output_file_sandbox):
1060 >            n_out = nj + 1
1061 >            out_box.append(numberFile(out,str(n_out)))
1062          return out_box
487        return []
1063  
1064 <    def prepareSteeringCards(self):
1064 >
1065 >    def wsRenameOutput(self, nj):
1066          """
1067 <        Make initial modifications of the user's steering card file.
1067 >        Returns part of a job script which renames the produced files.
1068          """
493        infile = open(self.pset,'r')
494            
495        outfile = open(common.work_space.jobDir()+self.name()+'.cfg', 'w')
496          
497        outfile.write('\n\n##### The following cards have been created by CRAB: DO NOT TOUCH #####\n')
1069  
1070 <        outfile.write('InputCollections=/System/'+self.owner+'/'+self.dataset+'/'+self.dataset+'\n')
1070 >        txt = '\n#Written by cms_cmssw::wsRenameOutput\n'
1071 >        txt += 'echo ">>> current directory (SOFTWARE_DIR): $SOFTWARE_DIR" \n'
1072 >        txt += 'echo ">>> current directory content:"\n'
1073 >        if self.debug_wrapper:
1074 >            txt += 'ls -Al\n'
1075 >        txt += '\n'
1076  
1077 <        infile.close()
1078 <        outfile.close()
1079 <        return
1077 >        for fileWithSuffix in (self.output_file):
1078 >            output_file_num = numberFile(fileWithSuffix, '$NJob')
1079 >            txt += '\n'
1080 >            txt += '# check output file\n'
1081 >            txt += 'if [ -e ./'+fileWithSuffix+' ] ; then\n'
1082 >            if (self.copy_data == 1):  # For OSG nodes, file is in $WORKING_DIR, should not be moved to $RUNTIME_AREA
1083 >                txt += '    mv '+fileWithSuffix+' '+output_file_num+'\n'
1084 >                txt += '    ln -s `pwd`/'+output_file_num+' $RUNTIME_AREA/'+fileWithSuffix+'\n'
1085 >            else:
1086 >                txt += '    mv '+fileWithSuffix+' $RUNTIME_AREA/'+output_file_num+'\n'
1087 >                txt += '    ln -s $RUNTIME_AREA/'+output_file_num+' $RUNTIME_AREA/'+fileWithSuffix+'\n'
1088 >            txt += 'else\n'
1089 >            txt += '    job_exit_code=60302\n'
1090 >            txt += '    echo "WARNING: Output file '+fileWithSuffix+' not found"\n'
1091 >            if common.scheduler.name().upper() == 'CONDOR_G':
1092 >                txt += '    if [ $middleware == OSG ]; then \n'
1093 >                txt += '        echo "prepare dummy output file"\n'
1094 >                txt += '        echo "Processing of job output failed" > $RUNTIME_AREA/'+output_file_num+'\n'
1095 >                txt += '    fi \n'
1096 >            txt += 'fi\n'
1097 >        file_list = []
1098 >        for fileWithSuffix in (self.output_file):
1099 >             file_list.append(numberFile(fileWithSuffix, '$NJob'))
1100  
1101 <    def wsRenameOutput(self, nj):
1101 >        txt += 'file_list="'+string.join(file_list,' ')+'"\n'
1102 >        txt += '\n'
1103 >        txt += 'echo ">>> current directory (SOFTWARE_DIR): $SOFTWARE_DIR" \n'
1104 >        txt += 'echo ">>> current directory content:"\n'
1105 >        if self.debug_wrapper:
1106 >            txt += 'ls -Al\n'
1107 >        txt += '\n'
1108 >        txt += 'cd $RUNTIME_AREA\n'
1109 >        txt += 'echo ">>> current directory (RUNTIME_AREA):  $RUNTIME_AREA"\n'
1110 >        return txt
1111 >
1112 >    def getRequirements(self, nj=[]):
1113          """
1114 <        Returns part of a job script which renames the produced files.
1114 >        return job requirements to add to jdl files
1115          """
1116 +        req = ''
1117 +        if self.version:
1118 +            req='Member("VO-cms-' + \
1119 +                 self.version + \
1120 +                 '", other.GlueHostApplicationSoftwareRunTimeEnvironment)'
1121 +        if self.executable_arch:
1122 +            req+=' && Member("VO-cms-' + \
1123 +                 self.executable_arch + \
1124 +                 '", other.GlueHostApplicationSoftwareRunTimeEnvironment)'
1125 +
1126 +        req = req + ' && (other.GlueHostNetworkAdapterOutboundIP)'
1127 +        if common.scheduler.name() == "glitecoll":
1128 +            req += ' && other.GlueCEStateStatus == "Production" '
1129  
1130 <        txt = '\n'
1131 <        file_list = ''
1132 <        for fileWithSuffix in self.output_file:
1133 <            output_file_num = self.numberFile_(fileWithSuffix, '$NJob')
1134 <            file_list=file_list+output_file_num+' '
1135 <            txt += '\n'
1136 <            txt += 'ls \n'
1137 <            txt += '\n'
1138 <            txt += 'ls '+fileWithSuffix+'\n'
1139 <            txt += 'exe_result=$?\n'
1140 <            txt += 'if [ $exe_result -ne 0 ] ; then\n'
1141 <            txt += '   echo "ERROR: No output file to manage"\n'
1142 <            txt += '   echo "JOB_EXIT_STATUS = $exe_result"\n'
1143 <            txt += '   echo "SanityCheckCode = $exe_result" | tee -a $RUNTIME_AREA/$repo\n'
1144 <            txt += '   dumpStatus $RUNTIME_AREA/$repo\n'
1145 <            txt += '   exit $exe_result \n'
1130 >        return req
1131 >
1132 >    def configFilename(self):
1133 >        """ return the config filename """
1134 >        # FUTURE: Can remove cfg mode for CMSSW >= 2_1_x
1135 >        if (self.CMSSW_major >= 2 and self.CMSSW_minor >= 1) or (self.CMSSW_major >= 3):
1136 >          return self.name()+'.py'
1137 >        else:
1138 >          return self.name()+'.cfg'
1139 >
1140 >    def wsSetupCMSOSGEnvironment_(self):
1141 >        """
1142 >        Returns part of a job script which is prepares
1143 >        the execution environment and which is common for all CMS jobs.
1144 >        """
1145 >        txt = '\n#Written by cms_cmssw::wsSetupCMSOSGEnvironment_\n'
1146 >        txt += '    echo ">>> setup CMS OSG environment:"\n'
1147 >        txt += '    echo "set SCRAM ARCH to ' + self.executable_arch + '"\n'
1148 >        txt += '    export SCRAM_ARCH='+self.executable_arch+'\n'
1149 >        txt += '    echo "SCRAM_ARCH = $SCRAM_ARCH"\n'
1150 >        txt += '    if [ -f $OSG_APP/cmssoft/cms/cmsset_default.sh ] ;then\n'
1151 >        txt += '      # Use $OSG_APP/cmssoft/cms/cmsset_default.sh to setup cms software\n'
1152 >        txt += '        source $OSG_APP/cmssoft/cms/cmsset_default.sh '+self.version+'\n'
1153 >        txt += '    else\n'
1154 >        txt += '        echo "ERROR ==> $OSG_APP/cmssoft/cms/cmsset_default.sh file not found"\n'
1155 >        txt += '        job_exit_code=10020\n'
1156 >        txt += '        func_exit\n'
1157 >        txt += '    fi\n'
1158 >        txt += '\n'
1159 >        txt += '    echo "==> setup cms environment ok"\n'
1160 >        txt += '    echo "SCRAM_ARCH = $SCRAM_ARCH"\n'
1161 >
1162 >        return txt
1163 >
1164 >    def wsSetupCMSLCGEnvironment_(self):
1165 >        """
1166 >        Returns part of a job script which is prepares
1167 >        the execution environment and which is common for all CMS jobs.
1168 >        """
1169 >        txt = '\n#Written by cms_cmssw::wsSetupCMSLCGEnvironment_\n'
1170 >        txt += '    echo ">>> setup CMS LCG environment:"\n'
1171 >        txt += '    echo "set SCRAM ARCH and BUILD_ARCH to ' + self.executable_arch + ' ###"\n'
1172 >        txt += '    export SCRAM_ARCH='+self.executable_arch+'\n'
1173 >        txt += '    export BUILD_ARCH='+self.executable_arch+'\n'
1174 >        txt += '    if [ ! $VO_CMS_SW_DIR ] ;then\n'
1175 >        txt += '        echo "ERROR ==> CMS software dir not found on WN `hostname`"\n'
1176 >        txt += '        job_exit_code=10031\n'
1177 >        txt += '        func_exit\n'
1178 >        txt += '    else\n'
1179 >        txt += '        echo "Sourcing environment... "\n'
1180 >        txt += '        if [ ! -s $VO_CMS_SW_DIR/cmsset_default.sh ] ;then\n'
1181 >        txt += '            echo "ERROR ==> cmsset_default.sh file not found into dir $VO_CMS_SW_DIR"\n'
1182 >        txt += '            job_exit_code=10020\n'
1183 >        txt += '            func_exit\n'
1184 >        txt += '        fi\n'
1185 >        txt += '        echo "sourcing $VO_CMS_SW_DIR/cmsset_default.sh"\n'
1186 >        txt += '        source $VO_CMS_SW_DIR/cmsset_default.sh\n'
1187 >        txt += '        result=$?\n'
1188 >        txt += '        if [ $result -ne 0 ]; then\n'
1189 >        txt += '            echo "ERROR ==> problem sourcing $VO_CMS_SW_DIR/cmsset_default.sh"\n'
1190 >        txt += '            job_exit_code=10032\n'
1191 >        txt += '            func_exit\n'
1192 >        txt += '        fi\n'
1193 >        txt += '    fi\n'
1194 >        txt += '    \n'
1195 >        txt += '    echo "==> setup cms environment ok"\n'
1196 >        return txt
1197 >
1198 >    def modifyReport(self, nj):
1199 >        """
1200 >        insert the part of the script that modifies the FrameworkJob Report
1201 >        """
1202 >        txt = '\n#Written by cms_cmssw::modifyReport\n'
1203 >        publish_data = int(self.cfg_params.get('USER.publish_data',0))
1204 >        if (publish_data == 1):
1205 >            processedDataset = self.cfg_params['USER.publish_data_name']
1206 >            ### FEDE  for publication with LSF and CAF schedulers ####
1207 >            print "common.scheduler.name().upper() = ", common.scheduler.name().upper()
1208 >            if (common.scheduler.name().upper() == "CAF" or common.scheduler.name().upper() == "LSF"):
1209 >                print "chiamo LFNBaseName con localUser = true"
1210 >                LFNBaseName = LFNBase(processedDataset, LocalUser=True)
1211 >            else :    
1212 >                LFNBaseName = LFNBase(processedDataset)
1213 >            ####    
1214 >
1215 >            txt += 'if [ $copy_exit_status -eq 0 ]; then\n'
1216 >            txt += '    FOR_LFN=%s_${PSETHASH}/\n'%(LFNBaseName)
1217              txt += 'else\n'
1218 <            txt += '   cp '+fileWithSuffix+' $RUNTIME_AREA/'+output_file_num+'\n'
1218 >            txt += '    FOR_LFN=/copy_problems/ \n'
1219 >            txt += '    SE=""\n'
1220 >            txt += '    SE_PATH=""\n'
1221 >            txt += 'fi\n'
1222 >
1223 >            txt += 'echo ">>> Modify Job Report:" \n'
1224 >            txt += 'chmod a+x $RUNTIME_AREA/ProdCommon/FwkJobRep/ModifyJobReport.py\n'
1225 >            txt += 'ProcessedDataset='+processedDataset+'\n'
1226 >            txt += 'echo "ProcessedDataset = $ProcessedDataset"\n'
1227 >            txt += 'echo "SE = $SE"\n'
1228 >            txt += 'echo "SE_PATH = $SE_PATH"\n'
1229 >            txt += 'echo "FOR_LFN = $FOR_LFN" \n'
1230 >            txt += 'echo "CMSSW_VERSION = $CMSSW_VERSION"\n\n'
1231 >            txt += 'echo "$RUNTIME_AREA/ProdCommon/FwkJobRep/ModifyJobReport.py $RUNTIME_AREA/crab_fjr_$NJob.xml $NJob $FOR_LFN $PrimaryDataset $DataTier $ProcessedDataset $ApplicationFamily $executable $CMSSW_VERSION $PSETHASH $SE $SE_PATH"\n'
1232 >            txt += '$RUNTIME_AREA/ProdCommon/FwkJobRep/ModifyJobReport.py $RUNTIME_AREA/crab_fjr_$NJob.xml $NJob $FOR_LFN $PrimaryDataset $DataTier $ProcessedDataset $ApplicationFamily $executable $CMSSW_VERSION $PSETHASH $SE $SE_PATH\n'
1233 >            txt += 'modifyReport_result=$?\n'
1234 >            txt += 'if [ $modifyReport_result -ne 0 ]; then\n'
1235 >            txt += '    modifyReport_result=70500\n'
1236 >            txt += '    job_exit_code=$modifyReport_result\n'
1237 >            txt += '    echo "ModifyReportResult=$modifyReport_result" | tee -a $RUNTIME_AREA/$repo\n'
1238 >            txt += '    echo "WARNING: Problem with ModifyJobReport"\n'
1239 >            txt += 'else\n'
1240 >            txt += '    mv NewFrameworkJobReport.xml $RUNTIME_AREA/crab_fjr_$NJob.xml\n'
1241              txt += 'fi\n'
529            txt += 'cd $RUNTIME_AREA\n'
530                      
531            pass
532      
533        file_list=file_list[:-1]
534        txt += 'file_list="'+file_list+'"\n'
1242          return txt
1243  
1244 <    def numberFile_(self, file, txt):
1244 >    def wsParseFJR(self):
1245          """
1246 <        append _'txt' before last extension of a file
1246 >        Parse the FrameworkJobReport to obtain useful infos
1247          """
1248 <        p = string.split(file,".")
1249 <        # take away last extension
1250 <        name = p[0]
1251 <        for x in p[1:-1]:
1252 <           name=name+"."+x
1253 <        # add "_txt"
1254 <        if len(p)>1:
1255 <          ext = p[len(p)-1]
1256 <          #result = name + '_' + str(txt) + "." + ext
1257 <          result = name + '_' + txt + "." + ext
1258 <        else:
1259 <          #result = name + '_' + str(txt)
1260 <          result = name + '_' + txt
1261 <        
1262 <        return result
1248 >        txt = '\n#Written by cms_cmssw::wsParseFJR\n'
1249 >        txt += 'echo ">>> Parse FrameworkJobReport crab_fjr.xml"\n'
1250 >        txt += 'if [ -s $RUNTIME_AREA/crab_fjr_$NJob.xml ]; then\n'
1251 >        txt += '    if [ -s $RUNTIME_AREA/parseCrabFjr.py ]; then\n'
1252 >        txt += '        cmd_out=`python $RUNTIME_AREA/parseCrabFjr.py --input $RUNTIME_AREA/crab_fjr_$NJob.xml --dashboard $MonitorID,$MonitorJobID '+self.debugWrap+'`\n'
1253 >        if self.debug_wrapper :
1254 >            txt += '        echo "Result of parsing the FrameworkJobReport crab_fjr.xml: $cmd_out"\n'
1255 >        txt += '        executable_exit_status=`python $RUNTIME_AREA/parseCrabFjr.py --input $RUNTIME_AREA/crab_fjr_$NJob.xml --exitcode`\n'
1256 >        txt += '        if [ $executable_exit_status -eq 50115 ];then\n'
1257 >        txt += '            echo ">>> crab_fjr.xml contents: "\n'
1258 >        txt += '            cat $RUNTIME_AREA/crab_fjr_$NJob.xml\n'
1259 >        txt += '            echo "Wrong FrameworkJobReport --> does not contain useful info. ExitStatus: $executable_exit_status"\n'
1260 >        txt += '        elif [ $executable_exit_status -eq -999 ];then\n'
1261 >        txt += '            echo "ExitStatus from FrameworkJobReport not available. not available. Using exit code of executable from command line."\n'
1262 >        txt += '        else\n'
1263 >        txt += '            echo "Extracted ExitStatus from FrameworkJobReport parsing output: $executable_exit_status"\n'
1264 >        txt += '        fi\n'
1265 >        txt += '    else\n'
1266 >        txt += '        echo "CRAB python script to parse CRAB FrameworkJobReport crab_fjr.xml is not available, using exit code of executable from command line."\n'
1267 >        txt += '    fi\n'
1268 >          #### Patch to check input data reading for CMSSW16x Hopefully we-ll remove it asap
1269 >
1270 >        if (self.datasetPath and not self.dataset_pu ):
1271 >          # VERIFY PROCESSED DATA
1272 >            txt += '    if [ $executable_exit_status -eq 0 ];then\n'
1273 >            txt += '      echo ">>> Verify list of processed files:"\n'
1274 >            txt += '      echo $InputFiles |tr -d \'\\\\\' |tr \',\' \'\\n\'|tr -d \'"\' > input-files.txt\n'
1275 >            txt += '      python $RUNTIME_AREA/parseCrabFjr.py --input $RUNTIME_AREA/crab_fjr_$NJob.xml --lfn > processed-files.txt\n'
1276 >            txt += '      cat input-files.txt  | sort | uniq > tmp.txt\n'
1277 >            txt += '      mv tmp.txt input-files.txt\n'
1278 >            txt += '      echo "cat input-files.txt"\n'
1279 >            txt += '      echo "----------------------"\n'
1280 >            txt += '      cat input-files.txt\n'
1281 >            txt += '      cat processed-files.txt | sort | uniq > tmp.txt\n'
1282 >            txt += '      mv tmp.txt processed-files.txt\n'
1283 >            txt += '      echo "----------------------"\n'
1284 >            txt += '      echo "cat processed-files.txt"\n'
1285 >            txt += '      echo "----------------------"\n'
1286 >            txt += '      cat processed-files.txt\n'
1287 >            txt += '      echo "----------------------"\n'
1288 >            txt += '      diff -q input-files.txt processed-files.txt\n'
1289 >            txt += '      fileverify_status=$?\n'
1290 >            txt += '      if [ $fileverify_status -ne 0 ]; then\n'
1291 >            txt += '         executable_exit_status=30001\n'
1292 >            txt += '         echo "ERROR ==> not all input files processed"\n'
1293 >            txt += '         echo "      ==> list of processed files from crab_fjr.xml differs from list in pset.cfg"\n'
1294 >            txt += '         echo "      ==> diff input-files.txt processed-files.txt"\n'
1295 >            txt += '      fi\n'
1296 >            txt += '    fi\n'
1297 >            txt += '\n'
1298 >        txt += 'else\n'
1299 >        txt += '    echo "CRAB FrameworkJobReport crab_fjr.xml is not available, using exit code of executable from command line."\n'
1300 >        txt += 'fi\n'
1301 >        txt += '\n'
1302 >        txt += 'echo "ExeExitCode=$executable_exit_status" | tee -a $RUNTIME_AREA/$repo\n'
1303 >        txt += 'echo "EXECUTABLE_EXIT_STATUS = $executable_exit_status"\n'
1304 >        txt += 'job_exit_code=$executable_exit_status\n'
1305 >
1306 >        return txt
1307  
1308 <    def getRequirements(self):
1308 >    def setParam_(self, param, value):
1309 >        self._params[param] = value
1310 >
1311 >    def getParams(self):
1312 >        return self._params
1313 >
1314 >    def uniquelist(self, old):
1315          """
1316 <        return job requirements to add to jdl files
1316 >        remove duplicates from a list
1317          """
1318 <        req = ''
1319 <        if common.analisys_common_info['sites']:
1320 <            if common.analisys_common_info['sw_version']:
1321 <                req='Member("VO-cms-' + \
1322 <                     common.analisys_common_info['sw_version'] + \
1323 <                     '", other.GlueHostApplicationSoftwareRunTimeEnvironment)'
1324 <            if len(common.analisys_common_info['sites'])>0:
1325 <                req = req + ' && ('
1326 <                for i in range(len(common.analisys_common_info['sites'])):
1327 <                    req = req + 'other.GlueCEInfoHostName == "' \
1328 <                         + common.analisys_common_info['sites'][i] + '"'
1329 <                    if ( i < (int(len(common.analisys_common_info['sites']) - 1)) ):
1330 <                        req = req + ' || '
1331 <            req = req + ')'
1332 <        #print "req = ", req
1333 <        return req
1318 >        nd={}
1319 >        for e in old:
1320 >            nd[e]=0
1321 >        return nd.keys()
1322 >
1323 >    def outList(self):
1324 >        """
1325 >        check the dimension of the output files
1326 >        """
1327 >        txt = ''
1328 >        txt += 'echo ">>> list of expected files on output sandbox"\n'
1329 >        listOutFiles = []
1330 >        stdout = 'CMSSW_$NJob.stdout'
1331 >        stderr = 'CMSSW_$NJob.stderr'
1332 >        if (self.return_data == 1):
1333 >            for file in (self.output_file+self.output_file_sandbox):
1334 >                listOutFiles.append(numberFile(file, '$NJob'))
1335 >            listOutFiles.append(stdout)
1336 >            listOutFiles.append(stderr)
1337 >        else:
1338 >            for file in (self.output_file_sandbox):
1339 >                listOutFiles.append(numberFile(file, '$NJob'))
1340 >            listOutFiles.append(stdout)
1341 >            listOutFiles.append(stderr)
1342 >        txt += 'echo "output files: '+string.join(listOutFiles,' ')+'"\n'
1343 >        txt += 'filesToCheck="'+string.join(listOutFiles,' ')+'"\n'
1344 >        txt += 'export filesToCheck\n'
1345 >        return txt

Diff Legend

Removed lines
+ Added lines
< Changed lines
> Changed lines