ViewVC Help
View File | Revision Log | Show Annotations | Root Listing
root/cvsroot/COMP/CRAB/python/cms_cmssw.py
(Generate patch)

Comparing COMP/CRAB/python/cms_cmssw.py (file contents):
Revision 1.2 by slacapra, Wed Apr 26 15:31:06 2006 UTC vs.
Revision 1.235 by spiga, Fri Aug 29 15:06:41 2008 UTC

# Line 2 | Line 2 | from JobType import JobType
2   from crab_logger import Logger
3   from crab_exceptions import *
4   from crab_util import *
5 + from BlackWhiteListParser import SEBlackWhiteListParser
6   import common
6
7 import DataDiscovery
8 import DataLocation
7   import Scram
8 + from LFNBaseName import *
9  
10 < import os, string, re
10 > import os, string, glob
11  
12   class Cmssw(JobType):
13 <    def __init__(self, cfg_params):
13 >    def __init__(self, cfg_params, ncjobs,skip_blocks, isNew):
14          JobType.__init__(self, 'CMSSW')
15          common.logger.debug(3,'CMSSW::__init__')
16 +        self.skip_blocks = skip_blocks
17 +
18 +        self.argsList = []
19  
20 <        self.analisys_common_info = {}
20 >        self._params = {}
21 >        self.cfg_params = cfg_params
22 >        # init BlackWhiteListParser
23 >        self.blackWhiteListParser = SEBlackWhiteListParser(cfg_params)
24 >
25 >        ### Temporary patch to automatically skip the ISB size check:
26 >        server=self.cfg_params.get('CRAB.server_name',None)
27 >        size = 9.5
28 >        if server: size = 99999
29 >        ### D.S.
30 >        self.MaxTarBallSize = float(self.cfg_params.get('EDG.maxtarballsize',size))
31 >
32 >        # number of jobs requested to be created, limit obj splitting
33 >        self.ncjobs = ncjobs
34  
35          log = common.logger
36 <        
36 >
37          self.scram = Scram.Scram(cfg_params)
23        scramArea = ''
38          self.additional_inbox_files = []
39          self.scriptExe = ''
40          self.executable = ''
41 +        self.executable_arch = self.scram.getArch()
42          self.tgz_name = 'default.tgz'
43 +        self.scriptName = 'CMSSW.sh'
44 +        self.pset = ''
45 +        self.datasetPath = ''
46 +
47 +        # set FJR file name
48 +        self.fjrFileName = 'crab_fjr.xml'
49  
50          self.version = self.scram.getSWVersion()
51 <        common.analisys_common_info['sw_version'] = self.version
51 >        version_array = self.version.split('_')
52 >        self.CMSSW_major = 0
53 >        self.CMSSW_minor = 0
54 >        self.CMSSW_patch = 0
55 >        try:
56 >            self.CMSSW_major = int(version_array[1])
57 >            self.CMSSW_minor = int(version_array[2])
58 >            self.CMSSW_patch = int(version_array[3])
59 >        except:
60 >            msg = "Cannot parse CMSSW version string: " + self.version + " for major and minor release number!"
61 >            raise CrabException(msg)
62  
63          ### collect Data cards
64 <        try:
65 <            self.owner = cfg_params['CMSSW.owner']
66 <            log.debug(6, "CMSSW::CMSSW(): owner = "+self.owner)
36 <            self.dataset = cfg_params['CMSSW.dataset']
37 <            log.debug(6, "CMSSW::CMSSW(): dataset = "+self.dataset)
38 <        except KeyError:
39 <            msg = "Error: owner and/or dataset not defined "
64 >
65 >        if not cfg_params.has_key('CMSSW.datasetpath'):
66 >            msg = "Error: datasetpath not defined "
67              raise CrabException(msg)
68  
69 <        self.dataTiers = []
70 <        try:
71 <            tmpDataTiers = string.split(cfg_params['CMSSW.data_tier'],',')
72 <            for tmp in tmpDataTiers:
73 <                tmp=string.strip(tmp)
74 <                self.dataTiers.append(tmp)
75 <                pass
76 <            pass
77 <        except KeyError:
78 <            pass
79 <        log.debug(6, "Cmssw::Cmssw(): dataTiers = "+str(self.dataTiers))
69 >        ### Temporary: added to remove input file control in the case of PU
70 >        self.dataset_pu = cfg_params.get('CMSSW.dataset_pu', None)
71 >
72 >        tmp =  cfg_params['CMSSW.datasetpath']
73 >        log.debug(6, "CMSSW::CMSSW(): datasetPath = "+tmp)
74 >        if string.lower(tmp)=='none':
75 >            self.datasetPath = None
76 >            self.selectNoInput = 1
77 >        else:
78 >            self.datasetPath = tmp
79 >            self.selectNoInput = 0
80  
81 +        self.dataTiers = []
82 +        self.debugWrap = ''
83 +        self.debug_wrapper = cfg_params.get('USER.debug_wrapper',False)
84 +        if self.debug_wrapper: self.debugWrap='--debug'
85          ## now the application
86 <        try:
87 <            self.executable = cfg_params['CMSSW.executable']
57 <            log.debug(6, "CMSSW::CMSSW(): executable = "+self.executable)
58 <            msg = "Default executable cmsRun overridden. Switch to " + self.executable
59 <            log.debug(3,msg)
60 <        except KeyError:
61 <            self.executable = 'cmsRun'
62 <            msg = "User executable not defined. Use cmsRun"
63 <            log.debug(3,msg)
64 <            pass
86 >        self.executable = cfg_params.get('CMSSW.executable','cmsRun')
87 >        log.debug(6, "CMSSW::CMSSW(): executable = "+self.executable)
88  
89 <        try:
90 <            self.pset = cfg_params['CMSSW.pset']
91 <            log.debug(6, "Cmssw::Cmssw(): PSet file = "+self.pset)
89 >        if not cfg_params.has_key('CMSSW.pset'):
90 >            raise CrabException("PSet file missing. Cannot run cmsRun ")
91 >        self.pset = cfg_params['CMSSW.pset']
92 >        log.debug(6, "Cmssw::Cmssw(): PSet file = "+self.pset)
93 >        if self.pset.lower() != 'none' :
94              if (not os.path.exists(self.pset)):
95                  raise CrabException("User defined PSet file "+self.pset+" does not exist")
96 <        except KeyError:
97 <            raise CrabException("PSet file missing. Cannot run cmsRun ")
96 >        else:
97 >            self.pset = None
98  
99          # output files
100 <        try:
101 <            self.output_file = []
100 >        ## stuff which must be returned always via sandbox
101 >        self.output_file_sandbox = []
102  
103 <            tmp = cfg_params['CMSSW.output_file']
104 <            if tmp != '':
80 <                tmpOutFiles = string.split(cfg_params['CMSSW.output_file'],',')
81 <                log.debug(7, 'cmssw::cmssw(): output files '+str(tmpOutFiles))
82 <                for tmp in tmpOutFiles:
83 <                    tmp=string.strip(tmp)
84 <                    self.output_file.append(tmp)
85 <                    pass
103 >        # add fjr report by default via sandbox
104 >        self.output_file_sandbox.append(self.fjrFileName)
105  
106 <            else:
107 <                log.message("No output file defined: only stdout/err will be available")
108 <                pass
109 <            pass
110 <        except KeyError:
111 <            log.message("No output file defined: only stdout/err will be available")
112 <            pass
106 >        # other output files to be returned via sandbox or copied to SE
107 >        outfileflag = False
108 >        self.output_file = []
109 >        tmp = cfg_params.get('CMSSW.output_file',None)
110 >        if tmp :
111 >            self.output_file = [x.strip() for x in tmp.split(',')]
112 >            outfileflag = True #output found
113 >        #else:
114 >        #    log.message("No output file defined: only stdout/err and the CRAB Framework Job Report will be available\n")
115  
116          # script_exe file as additional file in inputSandbox
117 <        try:
118 <           self.scriptExe = cfg_params['CMSSW.script_exe']
119 <           self.additional_inbox_files.append(self.scriptExe)
120 <        except KeyError:
121 <           pass
122 <        if self.scriptExe != '':
123 <           if os.path.isfile(self.scriptExe):
124 <              pass
125 <           else:
126 <              log.message("WARNING. file "+self.scriptExe+" not found")
127 <              sys.exit()
128 <                  
117 >        self.scriptExe = cfg_params.get('USER.script_exe',None)
118 >        if self.scriptExe :
119 >            if not os.path.isfile(self.scriptExe):
120 >                msg ="ERROR. file "+self.scriptExe+" not found"
121 >                raise CrabException(msg)
122 >            self.additional_inbox_files.append(string.strip(self.scriptExe))
123 >
124 >        if self.datasetPath == None and self.pset == None and self.scriptExe == '' :
125 >            msg ="Error. script_exe  not defined"
126 >            raise CrabException(msg)
127 >
128 >        # use parent files...
129 >        self.useParent = self.cfg_params.get('CMSSW.use_parent',False)
130 >
131          ## additional input files
132 <        try:
133 <            tmpAddFiles = string.split(cfg_params['CMSSW.additional_input_files'],',')
132 >        if cfg_params.has_key('USER.additional_input_files'):
133 >            tmpAddFiles = string.split(cfg_params['USER.additional_input_files'],',')
134              for tmp in tmpAddFiles:
135 <                tmp=string.strip(tmp)
136 <                self.additional_inbox_files.append(tmp)
135 >                tmp = string.strip(tmp)
136 >                dirname = ''
137 >                if not tmp[0]=="/": dirname = "."
138 >                files = []
139 >                if string.find(tmp,"*")>-1:
140 >                    files = glob.glob(os.path.join(dirname, tmp))
141 >                    if len(files)==0:
142 >                        raise CrabException("No additional input file found with this pattern: "+tmp)
143 >                else:
144 >                    files.append(tmp)
145 >                for file in files:
146 >                    if not os.path.exists(file):
147 >                        raise CrabException("Additional input file not found: "+file)
148 >                    pass
149 >                    self.additional_inbox_files.append(string.strip(file))
150                  pass
151              pass
152 <        except KeyError:
153 <            pass
152 >            common.logger.debug(5,"Additional input files: "+str(self.additional_inbox_files))
153 >        pass
154  
155 <        try:
156 <            self.total_number_of_events = int(cfg_params['CMSSW.total_number_of_events'])
157 <        except KeyError:
158 <            msg = 'Must define total_number_of_events and job_number_of_events'
159 <            raise CrabException(msg)
160 <            
161 < #Marco: FirstEvent is nolonger used inside PSet
126 < #        try:
127 < #            self.first = int(cfg_params['CMSSW.first_event'])
128 < #        except KeyError:
129 < #            self.first = 0
130 < #            pass
131 < #        log.debug(6, "Orca::Orca(): total number of events = "+`self.total_number_of_events`)
132 <        #log.debug(6, "Orca::Orca(): events per job = "+`self.job_number_of_events`)
133 < #        log.debug(6, "Orca::Orca(): first event = "+`self.first`)
134 <        
135 <        CEBlackList = []
136 <        try:
137 <            tmpBad = string.split(cfg_params['EDG.ce_black_list'],',')
138 <            for tmp in tmpBad:
139 <                tmp=string.strip(tmp)
140 <                CEBlackList.append(tmp)
141 <        except KeyError:
142 <            pass
155 >        ## Events per job
156 >        if cfg_params.has_key('CMSSW.events_per_job'):
157 >            self.eventsPerJob =int( cfg_params['CMSSW.events_per_job'])
158 >            self.selectEventsPerJob = 1
159 >        else:
160 >            self.eventsPerJob = -1
161 >            self.selectEventsPerJob = 0
162  
163 <        self.reCEBlackList=[]
164 <        for bad in CEBlackList:
165 <            self.reCEBlackList.append(re.compile( bad ))
163 >        ## number of jobs
164 >        if cfg_params.has_key('CMSSW.number_of_jobs'):
165 >            self.theNumberOfJobs =int( cfg_params['CMSSW.number_of_jobs'])
166 >            self.selectNumberOfJobs = 1
167 >        else:
168 >            self.theNumberOfJobs = 0
169 >            self.selectNumberOfJobs = 0
170  
171 <        common.logger.debug(5,'CEBlackList: '+str(CEBlackList))
171 >        if cfg_params.has_key('CMSSW.total_number_of_events'):
172 >            self.total_number_of_events = int(cfg_params['CMSSW.total_number_of_events'])
173 >            self.selectTotalNumberEvents = 1
174 >            if self.selectNumberOfJobs  == 1:
175 >                if (self.total_number_of_events != -1) and int(self.total_number_of_events) < int(self.theNumberOfJobs):
176 >                    msg = 'Must specify at least one event per job. total_number_of_events > number_of_jobs '
177 >                    raise CrabException(msg)
178 >        else:
179 >            self.total_number_of_events = 0
180 >            self.selectTotalNumberEvents = 0
181  
182 <        CEWhiteList = []
183 <        try:
184 <            tmpGood = string.split(cfg_params['EDG.ce_white_list'],',')
185 <            #tmpGood = ['cern']
186 <            for tmp in tmpGood:
187 <                tmp=string.strip(tmp)
188 <                #if (tmp == 'cnaf'): tmp = 'webserver' ########## warning: temp. patch
189 <                CEWhiteList.append(tmp)
190 <        except KeyError:
191 <            pass
182 >        if self.pset != None:
183 >             if ( (self.selectTotalNumberEvents + self.selectEventsPerJob + self.selectNumberOfJobs) != 2 ):
184 >                 msg = 'Must define exactly two of total_number_of_events, events_per_job, or number_of_jobs.'
185 >                 raise CrabException(msg)
186 >        else:
187 >             if (self.selectNumberOfJobs == 0):
188 >                 msg = 'Must specify  number_of_jobs.'
189 >                 raise CrabException(msg)
190 >
191 >        ## New method of dealing with seeds
192 >        self.incrementSeeds = []
193 >        self.preserveSeeds = []
194 >        if cfg_params.has_key('CMSSW.preserve_seeds'):
195 >            tmpList = cfg_params['CMSSW.preserve_seeds'].split(',')
196 >            for tmp in tmpList:
197 >                tmp.strip()
198 >                self.preserveSeeds.append(tmp)
199 >        if cfg_params.has_key('CMSSW.increment_seeds'):
200 >            tmpList = cfg_params['CMSSW.increment_seeds'].split(',')
201 >            for tmp in tmpList:
202 >                tmp.strip()
203 >                self.incrementSeeds.append(tmp)
204 >
205 >        ## FUTURE: Can remove in CRAB 2.4.0
206 >        self.sourceSeed    = cfg_params.get('CMSSW.pythia_seed',None)
207 >        self.sourceSeedVtx = cfg_params.get('CMSSW.vtx_seed',None)
208 >        self.sourceSeedG4  = cfg_params.get('CMSSW.g4_seed',None)
209 >        self.sourceSeedMix = cfg_params.get('CMSSW.mix_seed',None)
210 >        if self.sourceSeed or self.sourceSeedVtx or self.sourceSeedG4 or self.sourceSeedMix:
211 >            msg = 'pythia_seed, vtx_seed, g4_seed, and mix_seed are no longer valid settings. You must use increment_seeds or preserve_seeds'
212 >            raise CrabException(msg)
213  
214 <        #print 'CEWhiteList: ',CEWhiteList
162 <        self.reCEWhiteList=[]
163 <        for Good in CEWhiteList:
164 <            self.reCEWhiteList.append(re.compile( Good ))
214 >        self.firstRun = cfg_params.get('CMSSW.first_run',None)
215  
216 <        common.logger.debug(5,'CEWhiteList: '+str(CEWhiteList))
216 >        # Copy/return
217 >        self.copy_data = int(cfg_params.get('USER.copy_data',0))
218 >        self.return_data = int(cfg_params.get('USER.return_data',0))
219  
220          #DBSDLS-start
221 <        ## Initialize the variables that are extracted from DBS/DLS and needed in other places of the code
221 >        ## Initialize the variables that are extracted from DBS/DLS and needed in other places of the code
222          self.maxEvents=0  # max events available   ( --> check the requested nb. of evts in Creator.py)
223          self.DBSPaths={}  # all dbs paths requested ( --> input to the site local discovery script)
224 +        self.jobDestination=[]  # Site destination(s) for each job (list of lists)
225          ## Perform the data location and discovery (based on DBS/DLS)
226 <        self.DataDiscoveryAndLocation(cfg_params)
227 <        #DBSDLS-end          
226 >        ## SL: Don't if NONE is specified as input (pythia use case)
227 >        blockSites = {}
228 >        if self.datasetPath:
229 >            blockSites = self.DataDiscoveryAndLocation(cfg_params)
230 >        #DBSDLS-end
231 >
232 >        ## Select Splitting
233 >        if self.selectNoInput:
234 >            if self.pset == None:
235 >                self.jobSplittingForScript()
236 >            else:
237 >                self.jobSplittingNoInput()
238 >        else:
239 >            self.jobSplittingByBlocks(blockSites)
240  
241 <        self.tgzNameWithPath = self.getTarBall(self.executable)
241 >        # modify Pset only the first time
242 >        if isNew:
243 >            if self.pset != None:
244 >                import PsetManipulator as pp
245 >                PsetEdit = pp.PsetManipulator(self.pset)
246 >                try:
247 >                    # Add FrameworkJobReport to parameter-set, set max events.
248 >                    # Reset later for data jobs by writeCFG which does all modifications
249 >                    PsetEdit.addCrabFJR(self.fjrFileName) # FUTURE: Job report addition not needed by CMSSW>1.5
250 >                    PsetEdit.maxEvent(self.eventsPerJob)
251 >                    PsetEdit.psetWriter(self.configFilename())
252 >                    ## If present, add TFileService to output files
253 >                    if not int(cfg_params.get('CMSSW.skip_TFileService_output',0)):
254 >                        tfsOutput = PsetEdit.getTFileService()
255 >                        if tfsOutput:
256 >                            if tfsOutput in self.output_file:
257 >                                common.logger.debug(5,"Output from TFileService "+tfsOutput+" already in output files")
258 >                            else:
259 >                                outfileflag = True #output found
260 >                                self.output_file.append(tfsOutput)
261 >                                common.logger.message("Adding "+tfsOutput+" to output files (from TFileService)")
262 >                            pass
263 >                        pass
264 >                    ## If present and requested, add PoolOutputModule to output files
265 >                    if int(cfg_params.get('CMSSW.get_edm_output',0)):
266 >                        edmOutput = PsetEdit.getPoolOutputModule()
267 >                        if edmOutput:
268 >                            if edmOutput in self.output_file:
269 >                                common.logger.debug(5,"Output from PoolOutputModule "+edmOutput+" already in output files")
270 >                            else:
271 >                                self.output_file.append(edmOutput)
272 >                                common.logger.message("Adding "+edmOutput+" to output files (from PoolOutputModule)")
273 >                            pass
274 >                        pass
275 >                except CrabException:
276 >                    msg='Error while manipulating ParameterSet: exiting...'
277 >                    raise CrabException(msg)
278 >            ## Prepare inputSandbox TarBall (only the first time)
279 >            self.tgzNameWithPath = self.getTarBall(self.executable)
280  
281      def DataDiscoveryAndLocation(self, cfg_params):
282  
283 <        fun = "CMSSW::DataDiscoveryAndLocation()"
283 >        import DataDiscovery
284 >        import DataLocation
285 >        common.logger.debug(10,"CMSSW::DataDiscoveryAndLocation()")
286 >
287 >        datasetPath=self.datasetPath
288  
289          ## Contact the DBS
290 +        common.logger.message("Contacting Data Discovery Services ...")
291          try:
292 <            self.pubdata=DataDiscovery.DataDiscovery(self.owner,
185 <                                                     self.dataset,
186 <                                                     self.dataTiers,
187 <                                                     cfg_params)
292 >            self.pubdata=DataDiscovery.DataDiscovery(datasetPath, cfg_params,self.skip_blocks)
293              self.pubdata.fetchDBSInfo()
294  
295          except DataDiscovery.NotExistingDatasetError, ex :
296              msg = 'ERROR ***: failed Data Discovery in DBS : %s'%ex.getErrorMessage()
297              raise CrabException(msg)
193
298          except DataDiscovery.NoDataTierinProvenanceError, ex :
299              msg = 'ERROR ***: failed Data Discovery in DBS : %s'%ex.getErrorMessage()
300              raise CrabException(msg)
301          except DataDiscovery.DataDiscoveryError, ex:
302 <            msg = 'ERROR ***: failed Data Discovery in DBS  %s'%ex.getErrorMessage()
302 >            msg = 'ERROR ***: failed Data Discovery in DBS :  %s'%ex.getErrorMessage()
303              raise CrabException(msg)
304  
305 <        ## get list of all required data in the form of dbs paths  (dbs path = /dataset/datatier/owner)
306 <        self.DBSPaths=self.pubdata.getDBSPaths()
307 <        common.logger.message("Required data are : ")
308 <        for path in self.DBSPaths:
205 <            common.logger.message(" --> "+path )
305 >        self.filesbyblock=self.pubdata.getFiles()
306 >        self.eventsbyblock=self.pubdata.getEventsPerBlock()
307 >        self.eventsbyfile=self.pubdata.getEventsPerFile()
308 >        self.parentFiles=self.pubdata.getParent()
309  
310          ## get max number of events
311 <        common.logger.debug(10,"number of events for primary fileblocks %i"%self.pubdata.getMaxEvents())
209 <        self.maxEvents=self.pubdata.getMaxEvents() ##  self.maxEvents used in Creator.py
210 <        common.logger.message("\nThe number of available events is %s"%self.maxEvents)
211 <
212 <        ## get fileblocks corresponding to the required data
213 <        fb=self.pubdata.getFileBlocks()
214 <        common.logger.debug(5,"fileblocks are %s"%fb)
311 >        self.maxEvents=self.pubdata.getMaxEvents()
312  
313          ## Contact the DLS and build a list of sites hosting the fileblocks
314          try:
315 <            dataloc=DataLocation.DataLocation(self.pubdata.getFileBlocks(),cfg_params)
315 >            dataloc=DataLocation.DataLocation(self.filesbyblock.keys(),cfg_params)
316              dataloc.fetchDLSInfo()
317          except DataLocation.DataLocationError , ex:
318              msg = 'ERROR ***: failed Data Location in DLS \n %s '%ex.getErrorMessage()
319              raise CrabException(msg)
223        
224        allsites=dataloc.getSites()
225        common.logger.debug(5,"sites are %s"%allsites)
226        sites=self.checkBlackList(allsites)
227        common.logger.debug(5,"sites are (after black list) %s"%sites)
228        sites=self.checkWhiteList(sites)
229        common.logger.debug(5,"sites are (after white list) %s"%sites)
320  
321 <        if len(sites)==0:
322 <            msg = 'No sites hosting all the needed data! Exiting... '
323 <            raise CrabException(msg)
324 <        common.logger.message("List of Sites hosting the data : "+str(sites))
325 <        common.logger.debug(6, "List of Sites: "+str(sites))
326 <        common.analisys_common_info['sites']=sites    ## used in SchedulerEdg.py in createSchScript
327 <        return
328 <        
329 <    def checkBlackList(self, allSites):
330 <        if len(self.reCEBlackList)==0: return allSites
331 <        sites = []
332 <        for site in allSites:
243 <            common.logger.debug(10,'Site '+site)
244 <            good=1
245 <            for re in self.reCEBlackList:
246 <                if re.search(site):
247 <                    common.logger.message('CE in black list, skipping site '+site)
248 <                    good=0
249 <                pass
250 <            if good: sites.append(site)
251 <        if len(sites) == 0:
252 <            common.logger.debug(3,"No sites found after BlackList")
321 >
322 >        sites = dataloc.getSites()
323 >        allSites = []
324 >        listSites = sites.values()
325 >        for listSite in listSites:
326 >            for oneSite in listSite:
327 >                allSites.append(oneSite)
328 >        allSites = self.uniquelist(allSites)
329 >
330 >        # screen output
331 >        common.logger.message("Requested dataset: " + datasetPath + " has " + str(self.maxEvents) + " events in " + str(len(self.filesbyblock.keys())) + " blocks.\n")
332 >
333          return sites
334  
335 <    def checkWhiteList(self, allsites):
335 >    def jobSplittingByBlocks(self, blockSites):
336 >        """
337 >        Perform job splitting. Jobs run over an integer number of files
338 >        and no more than one block.
339 >        ARGUMENT: blockSites: dictionary with blocks as keys and list of host sites as values
340 >        REQUIRES: self.selectTotalNumberEvents, self.selectEventsPerJob, self.selectNumberofJobs,
341 >                  self.total_number_of_events, self.eventsPerJob, self.theNumberOfJobs,
342 >                  self.maxEvents, self.filesbyblock
343 >        SETS: self.jobDestination - Site destination(s) for each job (a list of lists)
344 >              self.total_number_of_jobs - Total # of jobs
345 >              self.list_of_args - File(s) job will run on (a list of lists)
346 >        """
347  
348 <        if len(self.reCEWhiteList)==0: return pubDBUrls
349 <        sites = []
350 <        for site in allsites:
351 <            #print 'connecting to the URL ',url
352 <            good=0
353 <            for re in self.reCEWhiteList:
354 <                if re.search(site):
355 <                    common.logger.debug(5,'CE in white list, adding site '+site)
356 <                    good=1
357 <                if not good: continue
358 <                sites.append(site)
359 <        if len(sites) == 0:
360 <            common.logger.message("No sites found after WhiteList\n")
348 >        # ---- Handle the possible job splitting configurations ---- #
349 >        if (self.selectTotalNumberEvents):
350 >            totalEventsRequested = self.total_number_of_events
351 >        if (self.selectEventsPerJob):
352 >            eventsPerJobRequested = self.eventsPerJob
353 >            if (self.selectNumberOfJobs):
354 >                totalEventsRequested = self.theNumberOfJobs * self.eventsPerJob
355 >
356 >        # If user requested all the events in the dataset
357 >        if (totalEventsRequested == -1):
358 >            eventsRemaining=self.maxEvents
359 >        # If user requested more events than are in the dataset
360 >        elif (totalEventsRequested > self.maxEvents):
361 >            eventsRemaining = self.maxEvents
362 >            common.logger.message("Requested "+str(self.total_number_of_events)+ " events, but only "+str(self.maxEvents)+" events are available.")
363 >        # If user requested less events than are in the dataset
364          else:
365 <            common.logger.debug(5,"Selected sites via WhiteList are "+str(sites)+"\n")
366 <        return sites
365 >            eventsRemaining = totalEventsRequested
366 >
367 >        # If user requested more events per job than are in the dataset
368 >        if (self.selectEventsPerJob and eventsPerJobRequested > self.maxEvents):
369 >            eventsPerJobRequested = self.maxEvents
370 >
371 >        # For user info at end
372 >        totalEventCount = 0
373 >
374 >        if (self.selectTotalNumberEvents and self.selectNumberOfJobs):
375 >            eventsPerJobRequested = int(eventsRemaining/self.theNumberOfJobs)
376 >
377 >        if (self.selectNumberOfJobs):
378 >            common.logger.message("May not create the exact number_of_jobs requested.")
379 >
380 >        if ( self.ncjobs == 'all' ) :
381 >            totalNumberOfJobs = 999999999
382 >        else :
383 >            totalNumberOfJobs = self.ncjobs
384 >
385 >        blocks = blockSites.keys()
386 >        blockCount = 0
387 >        # Backup variable in case self.maxEvents counted events in a non-included block
388 >        numBlocksInDataset = len(blocks)
389 >
390 >        jobCount = 0
391 >        list_of_lists = []
392 >
393 >        # list tracking which jobs are in which jobs belong to which block
394 >        jobsOfBlock = {}
395 >
396 >        # ---- Iterate over the blocks in the dataset until ---- #
397 >        # ---- we've met the requested total # of events    ---- #
398 >        while ( (eventsRemaining > 0) and (blockCount < numBlocksInDataset) and (jobCount < totalNumberOfJobs)):
399 >            block = blocks[blockCount]
400 >            blockCount += 1
401 >            if block not in jobsOfBlock.keys() :
402 >                jobsOfBlock[block] = []
403 >
404 >            if self.eventsbyblock.has_key(block) :
405 >                numEventsInBlock = self.eventsbyblock[block]
406 >                common.logger.debug(5,'Events in Block File '+str(numEventsInBlock))
407 >
408 >                files = self.filesbyblock[block]
409 >                numFilesInBlock = len(files)
410 >                if (numFilesInBlock <= 0):
411 >                    continue
412 >                fileCount = 0
413 >
414 >                # ---- New block => New job ---- #
415 >                parString = ""
416 >                # counter for number of events in files currently worked on
417 >                filesEventCount = 0
418 >                # flag if next while loop should touch new file
419 >                newFile = 1
420 >                # job event counter
421 >                jobSkipEventCount = 0
422 >
423 >                # ---- Iterate over the files in the block until we've met the requested ---- #
424 >                # ---- total # of events or we've gone over all the files in this block  ---- #
425 >                pString=''
426 >                while ( (eventsRemaining > 0) and (fileCount < numFilesInBlock) and (jobCount < totalNumberOfJobs) ):
427 >                    file = files[fileCount]
428 >                    if self.useParent:
429 >                        parent = self.parentFiles[file]
430 >                        for f in parent :
431 >                            pString += '\\\"' + f + '\\\"\,'
432 >                        common.logger.debug(6, "File "+str(file)+" has the following parents: "+str(parent))
433 >                        common.logger.write("File "+str(file)+" has the following parents: "+str(parent))
434 >                    if newFile :
435 >                        try:
436 >                            numEventsInFile = self.eventsbyfile[file]
437 >                            common.logger.debug(6, "File "+str(file)+" has "+str(numEventsInFile)+" events")
438 >                            # increase filesEventCount
439 >                            filesEventCount += numEventsInFile
440 >                            # Add file to current job
441 >                            parString += '\\\"' + file + '\\\"\,'
442 >                            newFile = 0
443 >                        except KeyError:
444 >                            common.logger.message("File "+str(file)+" has unknown number of events: skipping")
445 >
446 >                    eventsPerJobRequested = min(eventsPerJobRequested, eventsRemaining)
447 >                    # if less events in file remain than eventsPerJobRequested
448 >                    if ( filesEventCount - jobSkipEventCount < eventsPerJobRequested):
449 >                        # if last file in block
450 >                        if ( fileCount == numFilesInBlock-1 ) :
451 >                            # end job using last file, use remaining events in block
452 >                            # close job and touch new file
453 >                            fullString = parString[:-2]
454 >                            if self.useParent:
455 >                                fullParentString = pString[:-2]
456 >                                list_of_lists.append([fullString,fullParentString,str(-1),str(jobSkipEventCount)])
457 >                            else:
458 >                                list_of_lists.append([fullString,str(-1),str(jobSkipEventCount)])
459 >                            common.logger.debug(3,"Job "+str(jobCount+1)+" can run over "+str(filesEventCount - jobSkipEventCount)+" events (last file in block).")
460 >                            self.jobDestination.append(blockSites[block])
461 >                            common.logger.debug(5,"Job "+str(jobCount+1)+" Destination: "+str(self.jobDestination[jobCount]))
462 >                            # fill jobs of block dictionary
463 >                            jobsOfBlock[block].append(jobCount+1)
464 >                            # reset counter
465 >                            jobCount = jobCount + 1
466 >                            totalEventCount = totalEventCount + filesEventCount - jobSkipEventCount
467 >                            eventsRemaining = eventsRemaining - filesEventCount + jobSkipEventCount
468 >                            jobSkipEventCount = 0
469 >                            # reset file
470 >                            pString = ""
471 >                            parString = ""
472 >                            filesEventCount = 0
473 >                            newFile = 1
474 >                            fileCount += 1
475 >                        else :
476 >                            # go to next file
477 >                            newFile = 1
478 >                            fileCount += 1
479 >                    # if events in file equal to eventsPerJobRequested
480 >                    elif ( filesEventCount - jobSkipEventCount == eventsPerJobRequested ) :
481 >                        # close job and touch new file
482 >                        fullString = parString[:-2]
483 >                        if self.useParent:
484 >                            fullParentString = pString[:-2]
485 >                            list_of_lists.append([fullString,fullParentString,str(eventsPerJobRequested),str(jobSkipEventCount)])
486 >                        else:
487 >                            list_of_lists.append([fullString,str(eventsPerJobRequested),str(jobSkipEventCount)])
488 >                        common.logger.debug(3,"Job "+str(jobCount+1)+" can run over "+str(eventsPerJobRequested)+" events.")
489 >                        self.jobDestination.append(blockSites[block])
490 >                        common.logger.debug(5,"Job "+str(jobCount+1)+" Destination: "+str(self.jobDestination[jobCount]))
491 >                        jobsOfBlock[block].append(jobCount+1)
492 >                        # reset counter
493 >                        jobCount = jobCount + 1
494 >                        totalEventCount = totalEventCount + eventsPerJobRequested
495 >                        eventsRemaining = eventsRemaining - eventsPerJobRequested
496 >                        jobSkipEventCount = 0
497 >                        # reset file
498 >                        pString = ""
499 >                        parString = ""
500 >                        filesEventCount = 0
501 >                        newFile = 1
502 >                        fileCount += 1
503 >
504 >                    # if more events in file remain than eventsPerJobRequested
505 >                    else :
506 >                        # close job but don't touch new file
507 >                        fullString = parString[:-2]
508 >                        if self.useParent:
509 >                            fullParentString = pString[:-2]
510 >                            list_of_lists.append([fullString,fullParentString,str(eventsPerJobRequested),str(jobSkipEventCount)])
511 >                        else:
512 >                            list_of_lists.append([fullString,str(eventsPerJobRequested),str(jobSkipEventCount)])
513 >                        common.logger.debug(3,"Job "+str(jobCount+1)+" can run over "+str(eventsPerJobRequested)+" events.")
514 >                        self.jobDestination.append(blockSites[block])
515 >                        common.logger.debug(5,"Job "+str(jobCount+1)+" Destination: "+str(self.jobDestination[jobCount]))
516 >                        jobsOfBlock[block].append(jobCount+1)
517 >                        # increase counter
518 >                        jobCount = jobCount + 1
519 >                        totalEventCount = totalEventCount + eventsPerJobRequested
520 >                        eventsRemaining = eventsRemaining - eventsPerJobRequested
521 >                        # calculate skip events for last file
522 >                        # use filesEventCount (contains several files), jobSkipEventCount and eventsPerJobRequest
523 >                        jobSkipEventCount = eventsPerJobRequested - (filesEventCount - jobSkipEventCount - self.eventsbyfile[file])
524 >                        # remove all but the last file
525 >                        filesEventCount = self.eventsbyfile[file]
526 >                        if self.useParent:
527 >                            for f in parent : pString += '\\\"' + f + '\\\"\,'
528 >                        parString = '\\\"' + file + '\\\"\,'
529 >                    pass # END if
530 >                pass # END while (iterate over files in the block)
531 >        pass # END while (iterate over blocks in the dataset)
532 >        self.ncjobs = self.total_number_of_jobs = jobCount
533 >        if (eventsRemaining > 0 and jobCount < totalNumberOfJobs ):
534 >            common.logger.message("Could not run on all requested events because some blocks not hosted at allowed sites.")
535 >        common.logger.message(str(jobCount)+" job(s) can run on "+str(totalEventCount)+" events.\n")
536 >
537 >        # screen output
538 >        screenOutput = "List of jobs and available destination sites:\n\n"
539 >
540 >        # keep trace of block with no sites to print a warning at the end
541 >        noSiteBlock = []
542 >        bloskNoSite = []
543 >
544 >        blockCounter = 0
545 >        for block in blocks:
546 >            if block in jobsOfBlock.keys() :
547 >                blockCounter += 1
548 >                screenOutput += "Block %5i: jobs %20s: sites: %s\n" % (blockCounter,spanRanges(jobsOfBlock[block]),
549 >                    ','.join(self.blackWhiteListParser.checkWhiteList(self.blackWhiteListParser.checkBlackList(blockSites[block],block),block)))
550 >                if len(self.blackWhiteListParser.checkWhiteList(self.blackWhiteListParser.checkBlackList(blockSites[block],block),block)) == 0:
551 >                    noSiteBlock.append( spanRanges(jobsOfBlock[block]) )
552 >                    bloskNoSite.append( blockCounter )
553 >
554 >        common.logger.message(screenOutput)
555 >        if len(noSiteBlock) > 0 and len(bloskNoSite) > 0:
556 >            msg = 'WARNING: No sites are hosting any part of data for block:\n                '
557 >            virgola = ""
558 >            if len(bloskNoSite) > 1:
559 >                virgola = ","
560 >            for block in bloskNoSite:
561 >                msg += ' ' + str(block) + virgola
562 >            msg += '\n               Related jobs:\n                 '
563 >            virgola = ""
564 >            if len(noSiteBlock) > 1:
565 >                virgola = ","
566 >            for range_jobs in noSiteBlock:
567 >                msg += str(range_jobs) + virgola
568 >            msg += '\n               will not be submitted and this block of data can not be analyzed!\n'
569 >            if self.cfg_params.has_key('EDG.se_white_list'):
570 >                msg += 'WARNING: SE White List: '+self.cfg_params['EDG.se_white_list']+'\n'
571 >                msg += '(Hint: By whitelisting you force the job to run at this particular site(s).\n'
572 >                msg += 'Please check if the dataset is available at this site!)\n'
573 >            if self.cfg_params.has_key('EDG.ce_white_list'):
574 >                msg += 'WARNING: CE White List: '+self.cfg_params['EDG.ce_white_list']+'\n'
575 >                msg += '(Hint: By whitelisting you force the job to run at this particular site(s).\n'
576 >                msg += 'Please check if the dataset is available at this site!)\n'
577 >
578 >            common.logger.message(msg)
579 >
580 >        self.list_of_args = list_of_lists
581 >        return
582 >
583 >    def jobSplittingNoInput(self):
584 >        """
585 >        Perform job splitting based on number of event per job
586 >        """
587 >        common.logger.debug(5,'Splitting per events')
588 >
589 >        if (self.selectEventsPerJob):
590 >            common.logger.message('Required '+str(self.eventsPerJob)+' events per job ')
591 >        if (self.selectNumberOfJobs):
592 >            common.logger.message('Required '+str(self.theNumberOfJobs)+' jobs in total ')
593 >        if (self.selectTotalNumberEvents):
594 >            common.logger.message('Required '+str(self.total_number_of_events)+' events in total ')
595 >
596 >        if (self.total_number_of_events < 0):
597 >            msg='Cannot split jobs per Events with "-1" as total number of events'
598 >            raise CrabException(msg)
599 >
600 >        if (self.selectEventsPerJob):
601 >            if (self.selectTotalNumberEvents):
602 >                self.total_number_of_jobs = int(self.total_number_of_events/self.eventsPerJob)
603 >            elif(self.selectNumberOfJobs) :
604 >                self.total_number_of_jobs =self.theNumberOfJobs
605 >                self.total_number_of_events =int(self.theNumberOfJobs*self.eventsPerJob)
606 >
607 >        elif (self.selectNumberOfJobs) :
608 >            self.total_number_of_jobs = self.theNumberOfJobs
609 >            self.eventsPerJob = int(self.total_number_of_events/self.total_number_of_jobs)
610 >
611 >        common.logger.debug(5,'N jobs  '+str(self.total_number_of_jobs))
612 >
613 >        # is there any remainder?
614 >        check = int(self.total_number_of_events) - (int(self.total_number_of_jobs)*self.eventsPerJob)
615 >
616 >        common.logger.debug(5,'Check  '+str(check))
617 >
618 >        common.logger.message(str(self.total_number_of_jobs)+' jobs can be created, each for '+str(self.eventsPerJob)+' for a total of '+str(self.total_number_of_jobs*self.eventsPerJob)+' events')
619 >        if check > 0:
620 >            common.logger.message('Warning: asked '+str(self.total_number_of_events)+' but can do only '+str(int(self.total_number_of_jobs)*self.eventsPerJob))
621 >
622 >        # argument is seed number.$i
623 >        self.list_of_args = []
624 >        for i in range(self.total_number_of_jobs):
625 >            ## Since there is no input, any site is good
626 >            self.jobDestination.append([""]) #must be empty to write correctly the xml
627 >            args=[]
628 >            if (self.firstRun):
629 >                ## pythia first run
630 >                args.append(str(self.firstRun)+str(i))
631 >            self.list_of_args.append(args)
632 >
633 >        return
634 >
635 >
636 >    def jobSplittingForScript(self):
637 >        """
638 >        Perform job splitting based on number of job
639 >        """
640 >        common.logger.debug(5,'Splitting per job')
641 >        common.logger.message('Required '+str(self.theNumberOfJobs)+' jobs in total ')
642 >
643 >        self.total_number_of_jobs = self.theNumberOfJobs
644 >
645 >        common.logger.debug(5,'N jobs  '+str(self.total_number_of_jobs))
646 >
647 >        common.logger.message(str(self.total_number_of_jobs)+' jobs can be created')
648 >
649 >        # argument is seed number.$i
650 >        self.list_of_args = []
651 >        for i in range(self.total_number_of_jobs):
652 >            self.jobDestination.append([""])
653 >            self.list_of_args.append([str(i)])
654 >        return
655 >
656 >    def split(self, jobParams,firstJobID):
657 >
658 >        njobs = self.total_number_of_jobs
659 >        arglist = self.list_of_args
660 >        # create the empty structure
661 >        for i in range(njobs):
662 >            jobParams.append("")
663 >
664 >        listID=[]
665 >        listField=[]
666 >        for id in range(njobs):
667 >            job = id + int(firstJobID)
668 >            jobParams[id] = arglist[id]
669 >            listID.append(job+1)
670 >            job_ToSave ={}
671 >            concString = ' '
672 >            argu=''
673 >            if len(jobParams[id]):
674 >                argu +=   concString.join(jobParams[id] )
675 >            job_ToSave['arguments']= str(job+1)+' '+argu
676 >            job_ToSave['dlsDestination']= self.jobDestination[id]
677 >            listField.append(job_ToSave)
678 >            msg="Job "+str(job)+" Arguments:   "+str(job+1)+" "+argu+"\n"  \
679 >            +"                     Destination: "+str(self.jobDestination[id])
680 >            common.logger.debug(5,msg)
681 >        common._db.updateJob_(listID,listField)
682 >        self.argsList = (len(jobParams[0])+1)
683 >
684 >        return
685 >
686 >    def numberOfJobs(self):
687 >        return self.total_number_of_jobs
688  
689      def getTarBall(self, exe):
690          """
691          Return the TarBall with lib and exe
692          """
693 <        
279 <        # if it exist, just return it
280 <        self.tgzNameWithPath = common.work_space.shareDir()+self.tgz_name
693 >        self.tgzNameWithPath = common.work_space.pathForTgz()+'share/'+self.tgz_name
694          if os.path.exists(self.tgzNameWithPath):
695              return self.tgzNameWithPath
696  
# Line 290 | Line 703 | class Cmssw(JobType):
703  
704          # First of all declare the user Scram area
705          swArea = self.scram.getSWArea_()
293        #print "swArea = ", swArea
294        swVersion = self.scram.getSWVersion()
295        #print "swVersion = ", swVersion
706          swReleaseTop = self.scram.getReleaseTop_()
707 <        #print "swReleaseTop = ", swReleaseTop
298 <        
707 >
708          ## check if working area is release top
709          if swReleaseTop == '' or swArea == swReleaseTop:
710 +            common.logger.debug(3,"swArea = "+swArea+" swReleaseTop ="+swReleaseTop)
711              return
712  
713 <        filesToBeTarred = []
714 <        ## First find the executable
715 <        if (self.executable != ''):
716 <            exeWithPath = self.scram.findFile_(executable)
717 < #           print exeWithPath
718 <            if ( not exeWithPath ):
719 <                raise CrabException('User executable '+executable+' not found')
720 <
721 <            ## then check if it's private or not
722 <            if exeWithPath.find(swReleaseTop) == -1:
723 <                # the exe is private, so we must ship
724 <                common.logger.debug(5,"Exe "+exeWithPath+" to be tarred")
725 <                path = swArea+'/'
726 <                exe = string.replace(exeWithPath, path,'')
727 <                filesToBeTarred.append(exe)
728 <                pass
729 <            else:
730 <                # the exe is from release, we'll find it on WN
713 >        import tarfile
714 >        try: # create tar ball
715 >            tar = tarfile.open(self.tgzNameWithPath, "w:gz")
716 >            ## First find the executable
717 >            if (self.executable != ''):
718 >                exeWithPath = self.scram.findFile_(executable)
719 >                if ( not exeWithPath ):
720 >                    raise CrabException('User executable '+executable+' not found')
721 >
722 >                ## then check if it's private or not
723 >                if exeWithPath.find(swReleaseTop) == -1:
724 >                    # the exe is private, so we must ship
725 >                    common.logger.debug(5,"Exe "+exeWithPath+" to be tarred")
726 >                    path = swArea+'/'
727 >                    # distinguish case when script is in user project area or given by full path somewhere else
728 >                    if exeWithPath.find(path) >= 0 :
729 >                        exe = string.replace(exeWithPath, path,'')
730 >                        tar.add(path+exe,exe)
731 >                    else :
732 >                        tar.add(exeWithPath,os.path.basename(executable))
733 >                    pass
734 >                else:
735 >                    # the exe is from release, we'll find it on WN
736 >                    pass
737 >
738 >            ## Now get the libraries: only those in local working area
739 >            libDir = 'lib'
740 >            lib = swArea+'/' +libDir
741 >            common.logger.debug(5,"lib "+lib+" to be tarred")
742 >            if os.path.exists(lib):
743 >                tar.add(lib,libDir)
744 >
745 >            ## Now check if module dir is present
746 >            moduleDir = 'module'
747 >            module = swArea + '/' + moduleDir
748 >            if os.path.isdir(module):
749 >                tar.add(module,moduleDir)
750 >
751 >            ## Now check if any data dir(s) is present
752 >            self.dataExist = False
753 >            todo_list = [(i, i) for i in  os.listdir(swArea+"/src")]
754 >            while len(todo_list):
755 >                entry, name = todo_list.pop()
756 >                if name.startswith('crab_0_') or  name.startswith('.') or name == 'CVS':
757 >                    continue
758 >                if os.path.isdir(swArea+"/src/"+entry):
759 >                    entryPath = entry + '/'
760 >                    todo_list += [(entryPath + i, i) for i in  os.listdir(swArea+"/src/"+entry)]
761 >                    if name == 'data':
762 >                        self.dataExist=True
763 >                        common.logger.debug(5,"data "+entry+" to be tarred")
764 >                        tar.add(swArea+"/src/"+entry,"src/"+entry)
765 >                    pass
766                  pass
767 <
768 <        ## Now get the libraries: only those in local working area
769 <        libDir = 'lib'
770 <        lib = swArea+'/' +libDir
771 <        common.logger.debug(5,"lib "+lib+" to be tarred")
772 <        if os.path.exists(lib):
773 <            filesToBeTarred.append(libDir)
774 <
775 <        ## Now check if the Data dir is present
776 <        dataDir = 'src/Data/'
777 <        if os.path.isdir(swArea+'/'+dataDir):
778 <            filesToBeTarred.append(dataDir)
779 <
780 <        ## Create the tar-ball
781 <        if len(filesToBeTarred)>0:
782 <            cwd = os.getcwd()
783 <            os.chdir(swArea)
784 <            tarcmd = 'tar zcvf ' + self.tgzNameWithPath + ' '
785 <            for line in filesToBeTarred:
786 <                tarcmd = tarcmd + line + ' '
787 <            cout = runCommand(tarcmd)
788 <            if not cout:
789 <                raise CrabException('Could not create tar-ball')
790 <            os.chdir(cwd)
791 <        else:
792 <            common.logger.debug(5,"No files to be to be tarred")
793 <        
794 <        return
795 <        
796 <    def wsSetupEnvironment(self, nj):
767 >
768 >            ### CMSSW ParameterSet
769 >            if not self.pset is None:
770 >                cfg_file = common.work_space.jobDir()+self.configFilename()
771 >                tar.add(cfg_file,self.configFilename())
772 >                common.logger.debug(5,"File added to "+self.tgzNameWithPath+" : "+str(tar.getnames()))
773 >
774 >
775 >            ## Add ProdCommon dir to tar
776 >            prodcommonDir = './'
777 >            prodcommonPath = os.environ['CRABDIR'] + '/' + 'external/'
778 >            neededStuff = ['ProdCommon/__init__.py','ProdCommon/FwkJobRep', 'ProdCommon/CMSConfigTools','ProdCommon/Core','ProdCommon/MCPayloads', 'IMProv']
779 >            for file in neededStuff:
780 >                tar.add(prodcommonPath+file,prodcommonDir+file)
781 >            common.logger.debug(5,"Files added to "+self.tgzNameWithPath+" : "+str(tar.getnames()))
782 >
783 >            ##### ML stuff
784 >            ML_file_list=['report.py', 'DashboardAPI.py', 'Logger.py', 'ProcInfo.py', 'apmon.py']
785 >            path=os.environ['CRABDIR'] + '/python/'
786 >            for file in ML_file_list:
787 >                tar.add(path+file,file)
788 >            common.logger.debug(5,"Files added to "+self.tgzNameWithPath+" : "+str(tar.getnames()))
789 >
790 >            ##### Utils
791 >            Utils_file_list=['parseCrabFjr.py','writeCfg.py', 'fillCrabFjr.py']
792 >            for file in Utils_file_list:
793 >                tar.add(path+file,file)
794 >            common.logger.debug(5,"Files added to "+self.tgzNameWithPath+" : "+str(tar.getnames()))
795 >
796 >            ##### AdditionalFiles
797 >            for file in self.additional_inbox_files:
798 >                tar.add(file,string.split(file,'/')[-1])
799 >            common.logger.debug(5,"Files added to "+self.tgzNameWithPath+" : "+str(tar.getnames()))
800 >
801 >            tar.close()
802 >        except IOError:
803 >            raise CrabException('Could not create tar-ball '+self.tgzNameWithPath)
804 >        except tarfile.TarError:
805 >            raise CrabException('Could not create tar-ball '+self.tgzNameWithPath)
806 >
807 >        ## check for tarball size
808 >        tarballinfo = os.stat(self.tgzNameWithPath)
809 >        if ( tarballinfo.st_size > self.MaxTarBallSize*1024*1024 ) :
810 >            raise CrabException('Input sandbox size of ' + str(float(tarballinfo.st_size)/1024.0/1024.0) + ' MB is larger than the allowed ' + str(self.MaxTarBallSize) + ' MB input sandbox limit and not supported by the used GRID submission system. Please make sure that no unnecessary files are in all data directories in your local CMSSW project area as they are automatically packed into the input sandbox.')
811 >
812 >        ## create tar-ball with ML stuff
813 >
814 >    def wsSetupEnvironment(self, nj=0):
815          """
816          Returns part of a job script which prepares
817          the execution environment for the job 'nj'.
818          """
819 +        if (self.CMSSW_major >= 2 and self.CMSSW_minor >= 1) or (self.CMSSW_major >= 3):
820 +            psetName = 'pset.py'
821 +        else:
822 +            psetName = 'pset.cfg'
823          # Prepare JobType-independent part
824 <        txt = self.wsSetupCMSEnvironment_()
824 >        txt = '\n#Written by cms_cmssw::wsSetupEnvironment\n'
825 >        txt += 'echo ">>> setup environment"\n'
826 >        txt += 'if [ $middleware == LCG ]; then \n'
827 >        txt += self.wsSetupCMSLCGEnvironment_()
828 >        txt += 'elif [ $middleware == OSG ]; then\n'
829 >        txt += '    WORKING_DIR=`/bin/mktemp  -d $OSG_WN_TMP/cms_XXXXXXXXXXXX`\n'
830 >        txt += '    if [ ! $? == 0 ] ;then\n'
831 >        txt += '        echo "ERROR ==> OSG $WORKING_DIR could not be created on WN `hostname`"\n'
832 >        txt += '        job_exit_code=10016\n'
833 >        txt += '        func_exit\n'
834 >        txt += '    fi\n'
835 >        txt += '    echo ">>> Created working directory: $WORKING_DIR"\n'
836 >        txt += '\n'
837 >        txt += '    echo "Change to working directory: $WORKING_DIR"\n'
838 >        txt += '    cd $WORKING_DIR\n'
839 >        txt += '    echo ">>> current directory (WORKING_DIR): $WORKING_DIR"\n'
840 >        txt += self.wsSetupCMSOSGEnvironment_()
841 >        txt += 'fi\n'
842  
843          # Prepare JobType-specific part
844          scram = self.scram.commandName()
845          txt += '\n\n'
846 <        txt += 'echo "### SPECIFIC JOB SETUP ENVIRONMENT ###"\n'
846 >        txt += 'echo ">>> specific cmssw setup environment:"\n'
847 >        txt += 'echo "CMSSW_VERSION =  '+self.version+'"\n'
848          txt += scram+' project CMSSW '+self.version+'\n'
849          txt += 'status=$?\n'
850          txt += 'if [ $status != 0 ] ; then\n'
851 <        txt += '   echo "SET_EXE_ENV 1 ==>ERROR CMSSW '+self.version+' not found on `hostname`" \n'
852 <        txt += '   echo "JOB_EXIT_STATUS = 5"\n'
853 <        txt += '   echo "SanityCheckCode = 5" | tee -a $RUNTIME_AREA/$repo\n'
369 <        txt += '   dumpStatus $RUNTIME_AREA/$repo\n'
370 <        txt += '   exit 5 \n'
851 >        txt += '    echo "ERROR ==> CMSSW '+self.version+' not found on `hostname`" \n'
852 >        txt += '    job_exit_code=10034\n'
853 >        txt += '    func_exit\n'
854          txt += 'fi \n'
372        txt += 'echo "CMSSW_VERSION =  '+self.version+'"\n'
855          txt += 'cd '+self.version+'\n'
856 <        ### needed grep for bug in scramv1 ###
856 >        txt += 'SOFTWARE_DIR=`pwd`\n'
857 >        txt += 'echo ">>> current directory (SOFTWARE_DIR): $SOFTWARE_DIR" \n'
858          txt += 'eval `'+scram+' runtime -sh | grep -v SCRAMRT_LSB_JOBNAME`\n'
859 <
859 >        txt += 'if [ $? != 0 ] ; then\n'
860 >        txt += '    echo "ERROR ==> Problem with the command: "\n'
861 >        txt += '    echo "eval \`'+scram+' runtime -sh | grep -v SCRAMRT_LSB_JOBNAME \` at `hostname`"\n'
862 >        txt += '    job_exit_code=10034\n'
863 >        txt += '    func_exit\n'
864 >        txt += 'fi \n'
865          # Handle the arguments:
866          txt += "\n"
867 <        txt += "## ARGUMNETS: $1 Job Number\n"
380 <        # txt += "## ARGUMNETS: $2 First Event for this job\n"
381 <        # txt += "## ARGUMNETS: $3 Max Event for this job\n"
867 >        txt += "## number of arguments (first argument always jobnumber)\n"
868          txt += "\n"
869 <        txt += "narg=$#\n"
384 <        txt += "if [ $narg -lt 1 ]\n"
869 >        txt += "if [ $nargs -lt "+str(self.argsList)+" ]\n"
870          txt += "then\n"
871 <        txt += "    echo 'SET_EXE_ENV 1 ==> ERROR Too few arguments' +$narg+ \n"
872 <        txt += '    echo "JOB_EXIT_STATUS = 1"\n'
873 <        txt += '    echo "SanityCheckCode = 1" | tee -a $RUNTIME_AREA/$repo\n'
389 <        txt += '    dumpStatus $RUNTIME_AREA/$repo\n'
390 <        txt += "    exit 1\n"
871 >        txt += "    echo 'ERROR ==> Too few arguments' +$nargs+ \n"
872 >        txt += '    job_exit_code=50113\n'
873 >        txt += "    func_exit\n"
874          txt += "fi\n"
875          txt += "\n"
393        txt += "NJob=$1\n"
394        # txt += "FirstEvent=$2\n"
395        # txt += "MaxEvents=$3\n"
876  
877          # Prepare job-specific part
878          job = common.job_list[nj]
879 <        pset = os.path.basename(job.configFilename())
880 <        txt += '\n'
881 <        txt += 'cp $RUNTIME_AREA/'+pset+' pset.cfg\n'
402 <        # txt += 'if [ -e $RUNTIME_AREA/orcarc_$CE ] ; then\n'
403 <        # txt += '  cat $RUNTIME_AREA/orcarc_$CE .orcarc >> .orcarc_tmp\n'
404 <        # txt += '  mv .orcarc_tmp .orcarc\n'
405 <        # txt += 'fi\n'
406 <        # txt += 'if [ -e $RUNTIME_AREA/init_$CE.sh ] ; then\n'
407 <        # txt += '  cp $RUNTIME_AREA/init_$CE.sh init.sh\n'
408 <        # txt += 'fi\n'
879 >        if (self.datasetPath):
880 >            txt += '\n'
881 >            txt += 'DatasetPath='+self.datasetPath+'\n'
882  
883 <        if len(self.additional_inbox_files) > 0:
884 <            for file in self.additional_inbox_files:
885 <                txt += 'if [ -e $RUNTIME_AREA/'+file+' ] ; then\n'
886 <                txt += '   cp $RUNTIME_AREA/'+file+' .\n'
887 <                txt += '   chmod +x '+file+'\n'
888 <                txt += 'fi\n'
889 <            pass
890 <
891 <        # txt += '\n'
892 <        # txt += 'chmod +x ./init.sh\n'
893 <        # txt += './init.sh\n'
894 <        # txt += 'exitStatus=$?\n'
895 <        # txt += 'if [ $exitStatus != 0 ] ; then\n'
896 <        # txt += '  echo "SET_EXE_ENV 1 ==> ERROR StageIn init script failed"\n'
897 <        # txt += '  echo "JOB_EXIT_STATUS = $exitStatus" \n'
898 <        # txt += '  echo "SanityCheckCode = $exitStatus" | tee -a $RUNTIME_AREA/$repo\n'
899 <        # txt += '  dumpStatus $RUNTIME_AREA/$repo\n'
900 <        # txt += '  exit $exitStatus\n'
901 <        # txt += 'fi\n'
902 <        # txt += "echo 'SET_EXE_ENV 0 ==> job setup ok'\n"
903 <        txt += 'echo "### END JOB SETUP ENVIRONMENT ###"\n\n'
904 <
905 <        # txt += 'echo "FirstEvent=$FirstEvent" >> .orcarc\n'
906 <        # txt += 'echo "MaxEvents=$MaxEvents" >> .orcarc\n'
907 <        # if self.ML:
908 <        #     txt += 'echo "MonalisaJobId=$NJob" >> .orcarc\n'
883 >            datasetpath_split = self.datasetPath.split("/")
884 >            ### FEDE FOR NEW LFN ###
885 >            self.primaryDataset = datasetpath_split[1]
886 >            ########################
887 >            txt += 'PrimaryDataset='+datasetpath_split[1]+'\n'
888 >            txt += 'DataTier='+datasetpath_split[2]+'\n'
889 >            txt += 'ApplicationFamily=cmsRun\n'
890 >
891 >        else:
892 >            txt += 'DatasetPath=MCDataTier\n'
893 >            ### FEDE FOR NEW LFN ###
894 >            self.primaryDataset = 'null'
895 >            ########################
896 >            txt += 'PrimaryDataset=null\n'
897 >            txt += 'DataTier=null\n'
898 >            txt += 'ApplicationFamily=MCDataTier\n'
899 >        if self.pset != None:
900 >            pset = os.path.basename(job.configFilename())
901 >            txt += '\n'
902 >            txt += 'cp  $RUNTIME_AREA/'+pset+' .\n'
903 >            if (self.datasetPath): # standard job
904 >                txt += 'InputFiles=${args[1]}; export InputFiles\n'
905 >                if (self.useParent):
906 >                    txt += 'ParentFiles=${args[2]}; export ParentFiles\n'
907 >                    txt += 'MaxEvents=${args[3]}; export MaxEvents\n'
908 >                    txt += 'SkipEvents=${args[4]}; export SkipEvents\n'
909 >                else:
910 >                    txt += 'MaxEvents=${args[2]}; export MaxEvents\n'
911 >                    txt += 'SkipEvents=${args[3]}; export SkipEvents\n'
912 >                txt += 'echo "Inputfiles:<$InputFiles>"\n'
913 >                if (self.useParent): txt += 'echo "ParentFiles:<$ParentFiles>"\n'
914 >                txt += 'echo "MaxEvents:<$MaxEvents>"\n'
915 >                txt += 'echo "SkipEvents:<$SkipEvents>"\n'
916 >            else:  # pythia like job
917 >                txt += 'PreserveSeeds='  + ','.join(self.preserveSeeds)  + '; export PreserveSeeds\n'
918 >                txt += 'IncrementSeeds=' + ','.join(self.incrementSeeds) + '; export IncrementSeeds\n'
919 >                txt += 'echo "PreserveSeeds: <$PreserveSeeds>"\n'
920 >                txt += 'echo "IncrementSeeds:<$IncrementSeeds>"\n'
921 >                if (self.firstRun):
922 >                    txt += 'FirstRun=${args[1]}; export FirstRun\n'
923 >                    txt += 'echo "FirstRun: <$FirstRun>"\n'
924 >
925 >            txt += 'mv -f ' + pset + ' ' + psetName + '\n'
926 >
927 >
928 >        if self.pset != None:
929 >            # FUTURE: Can simply for 2_1_x and higher
930 >            txt += '\n'
931 >            if self.debug_wrapper==True:
932 >                txt += 'echo "***** cat ' + psetName + ' *********"\n'
933 >                txt += 'cat ' + psetName + '\n'
934 >                txt += 'echo "****** end ' + psetName + ' ********"\n'
935 >                txt += '\n'
936 >            if (self.CMSSW_major >= 2 and self.CMSSW_minor >= 1) or (self.CMSSW_major >= 3):
937 >                txt += 'PSETHASH=`edmConfigHash ' + psetName + '` \n'
938 >            else:
939 >                txt += 'PSETHASH=`edmConfigHash < ' + psetName + '` \n'
940 >            txt += 'echo "PSETHASH = $PSETHASH" \n'
941 >            txt += '\n'
942 >        return txt
943 >
944 >    def wsUntarSoftware(self, nj=0):
945 >        """
946 >        Put in the script the commands to build an executable
947 >        or a library.
948 >        """
949 >
950 >        txt = '\n#Written by cms_cmssw::wsUntarSoftware\n'
951 >
952 >        if os.path.isfile(self.tgzNameWithPath):
953 >            txt += 'echo ">>> tar xzvf $RUNTIME_AREA/'+os.path.basename(self.tgzNameWithPath)+' :" \n'
954 >            txt += 'tar xzvf $RUNTIME_AREA/'+os.path.basename(self.tgzNameWithPath)+'\n'
955 >            if  self.debug_wrapper:
956 >                txt += 'ls -Al \n'
957 >            txt += 'untar_status=$? \n'
958 >            txt += 'if [ $untar_status -ne 0 ]; then \n'
959 >            txt += '   echo "ERROR ==> Untarring .tgz file failed"\n'
960 >            txt += '   job_exit_code=$untar_status\n'
961 >            txt += '   func_exit\n'
962 >            txt += 'else \n'
963 >            txt += '   echo "Successful untar" \n'
964 >            txt += 'fi \n'
965 >            txt += '\n'
966 >            txt += 'echo ">>> Include $RUNTIME_AREA in PYTHONPATH:"\n'
967 >            txt += 'if [ -z "$PYTHONPATH" ]; then\n'
968 >            txt += '   export PYTHONPATH=$RUNTIME_AREA/\n'
969 >            txt += 'else\n'
970 >            txt += '   export PYTHONPATH=$RUNTIME_AREA/:${PYTHONPATH}\n'
971 >            txt += 'echo "PYTHONPATH=$PYTHONPATH"\n'
972 >            txt += 'fi\n'
973 >            txt += '\n'
974 >
975 >            pass
976  
437        txt += '\n'
438        txt += 'echo "***** cat pset.cfg *********"\n'
439        txt += 'cat pset.cfg\n'
440        txt += 'echo "****** end pset.cfg ********"\n'
977          return txt
978  
979 <    def modifySteeringCards(self, nj):
979 >    def wsBuildExe(self, nj=0):
980          """
981 <        modify the card provided by the user,
982 <        writing a new card into share dir
981 >        Put in the script the commands to build an executable
982 >        or a library.
983          """
984 <        
984 >
985 >        txt = '\n#Written by cms_cmssw::wsBuildExe\n'
986 >        txt += 'echo ">>> moving CMSSW software directories in `pwd`" \n'
987 >
988 >        txt += 'rm -r lib/ module/ \n'
989 >        txt += 'mv $RUNTIME_AREA/lib/ . \n'
990 >        txt += 'mv $RUNTIME_AREA/module/ . \n'
991 >        if self.dataExist == True:
992 >            txt += 'rm -r src/ \n'
993 >            txt += 'mv $RUNTIME_AREA/src/ . \n'
994 >        if len(self.additional_inbox_files)>0:
995 >            for file in self.additional_inbox_files:
996 >                txt += 'mv $RUNTIME_AREA/'+os.path.basename(file)+' . \n'
997 >        # txt += 'mv $RUNTIME_AREA/ProdCommon/ . \n'
998 >        # txt += 'mv $RUNTIME_AREA/IMProv/ . \n'
999 >
1000 >        txt += 'echo ">>> Include $RUNTIME_AREA in PYTHONPATH:"\n'
1001 >        txt += 'if [ -z "$PYTHONPATH" ]; then\n'
1002 >        txt += '   export PYTHONPATH=$RUNTIME_AREA/\n'
1003 >        txt += 'else\n'
1004 >        txt += '   export PYTHONPATH=$RUNTIME_AREA/:${PYTHONPATH}\n'
1005 >        txt += 'echo "PYTHONPATH=$PYTHONPATH"\n'
1006 >        txt += 'fi\n'
1007 >        txt += '\n'
1008 >
1009 >        return txt
1010 >
1011 >
1012      def executableName(self):
1013 <        return self.executable
1013 >        if self.scriptExe:
1014 >            return "sh "
1015 >        else:
1016 >            return self.executable
1017  
1018      def executableArgs(self):
1019 <        return "-p pset.cfg"
1019 >        # FUTURE: This function tests the CMSSW version. Can be simplified as we drop support for old versions
1020 >        if self.scriptExe:#CarlosDaniele
1021 >            return   self.scriptExe + " $NJob"
1022 >        else:
1023 >            ex_args = ""
1024 >            # FUTURE: This tests the CMSSW version. Can remove code as versions deprecated
1025 >            # Framework job report
1026 >            if (self.CMSSW_major >= 1 and self.CMSSW_minor >= 5) or (self.CMSSW_major >= 2):
1027 >                ex_args += " -j $RUNTIME_AREA/crab_fjr_$NJob.xml"
1028 >            # Type of config file
1029 >            if self.CMSSW_major >= 2 :
1030 >                ex_args += " -p pset.py"
1031 >            else:
1032 >                ex_args += " -p pset.cfg"
1033 >            return ex_args
1034  
1035      def inputSandbox(self, nj):
1036          """
1037          Returns a list of filenames to be put in JDL input sandbox.
1038          """
1039          inp_box = []
460        # dict added to delete duplicate from input sandbox file list
461        seen = {}
462        ## code
1040          if os.path.isfile(self.tgzNameWithPath):
1041              inp_box.append(self.tgzNameWithPath)
1042 <        ## config
1043 <        inp_box.append(common.job_list[nj].configFilename())
467 <        ## additional input files
468 <        for file in self.additional_inbox_files:
469 <            inp_box.append(common.work_space.cwdDir()+file)
470 <        #print "sono inputSandbox, inp_box = ", inp_box
1042 >        wrapper = os.path.basename(str(common._db.queryTask('scriptName')))
1043 >        inp_box.append(common.work_space.pathForTgz() +'job/'+ wrapper)
1044          return inp_box
1045  
1046      def outputSandbox(self, nj):
# Line 476 | Line 1049 | class Cmssw(JobType):
1049          """
1050          out_box = []
1051  
479        stdout=common.job_list[nj].stdout()
480        stderr=common.job_list[nj].stderr()
481
1052          ## User Declared output files
1053 <        for out in self.output_file:
1054 <            n_out = nj + 1
1055 <            out_box.append(self.numberFile_(out,str(n_out)))
1053 >        for out in (self.output_file+self.output_file_sandbox):
1054 >            n_out = nj + 1
1055 >            out_box.append(numberFile(out,str(n_out)))
1056          return out_box
487        return []
1057  
1058 <    def prepareSteeringCards(self):
1058 >
1059 >    def wsRenameOutput(self, nj):
1060          """
1061 <        Make initial modifications of the user's steering card file.
1061 >        Returns part of a job script which renames the produced files.
1062          """
493        infile = open(self.pset,'r')
494            
495        outfile = open(common.work_space.jobDir()+self.name()+'.cfg', 'w')
496          
497        outfile.write('\n\n##### The following cards have been created by CRAB: DO NOT TOUCH #####\n')
1063  
1064 <        outfile.write('InputCollections=/System/'+self.owner+'/'+self.dataset+'/'+self.dataset+'\n')
1064 >        txt = '\n#Written by cms_cmssw::wsRenameOutput\n'
1065 >        txt += 'echo ">>> current directory (SOFTWARE_DIR): $SOFTWARE_DIR" \n'
1066 >        txt += 'echo ">>> current directory content:"\n'
1067 >        if self.debug_wrapper:
1068 >            txt += 'ls -Al\n'
1069 >        txt += '\n'
1070  
1071 <        infile.close()
1072 <        outfile.close()
1073 <        return
1071 >        for fileWithSuffix in (self.output_file):
1072 >            output_file_num = numberFile(fileWithSuffix, '$NJob')
1073 >            txt += '\n'
1074 >            txt += '# check output file\n'
1075 >            txt += 'if [ -e ./'+fileWithSuffix+' ] ; then\n'
1076 >            if (self.copy_data == 1):  # For OSG nodes, file is in $WORKING_DIR, should not be moved to $RUNTIME_AREA
1077 >                txt += '    mv '+fileWithSuffix+' '+output_file_num+'\n'
1078 >                txt += '    ln -s `pwd`/'+output_file_num+' $RUNTIME_AREA/'+fileWithSuffix+'\n'
1079 >            else:
1080 >                txt += '    mv '+fileWithSuffix+' $RUNTIME_AREA/'+output_file_num+'\n'
1081 >                txt += '    ln -s $RUNTIME_AREA/'+output_file_num+' $RUNTIME_AREA/'+fileWithSuffix+'\n'
1082 >            txt += 'else\n'
1083 >            txt += '    job_exit_code=60302\n'
1084 >            txt += '    echo "WARNING: Output file '+fileWithSuffix+' not found"\n'
1085 >            if common.scheduler.name().upper() == 'CONDOR_G':
1086 >                txt += '    if [ $middleware == OSG ]; then \n'
1087 >                txt += '        echo "prepare dummy output file"\n'
1088 >                txt += '        echo "Processing of job output failed" > $RUNTIME_AREA/'+output_file_num+'\n'
1089 >                txt += '    fi \n'
1090 >            txt += 'fi\n'
1091 >        file_list = []
1092 >        for fileWithSuffix in (self.output_file):
1093 >             file_list.append(numberFile(fileWithSuffix, '$NJob'))
1094  
1095 <    def wsRenameOutput(self, nj):
1095 >        txt += 'file_list="'+string.join(file_list,' ')+'"\n'
1096 >        txt += '\n'
1097 >        txt += 'echo ">>> current directory (SOFTWARE_DIR): $SOFTWARE_DIR" \n'
1098 >        txt += 'echo ">>> current directory content:"\n'
1099 >        if self.debug_wrapper:
1100 >            txt += 'ls -Al\n'
1101 >        txt += '\n'
1102 >        txt += 'cd $RUNTIME_AREA\n'
1103 >        txt += 'echo ">>> current directory (RUNTIME_AREA):  $RUNTIME_AREA"\n'
1104 >        return txt
1105 >
1106 >    def getRequirements(self, nj=[]):
1107          """
1108 <        Returns part of a job script which renames the produced files.
1108 >        return job requirements to add to jdl files
1109          """
1110 +        req = ''
1111 +        if self.version:
1112 +            req='Member("VO-cms-' + \
1113 +                 self.version + \
1114 +                 '", other.GlueHostApplicationSoftwareRunTimeEnvironment)'
1115 +        if self.executable_arch:
1116 +            req+=' && Member("VO-cms-' + \
1117 +                 self.executable_arch + \
1118 +                 '", other.GlueHostApplicationSoftwareRunTimeEnvironment)'
1119 +
1120 +        req = req + ' && (other.GlueHostNetworkAdapterOutboundIP)'
1121 +        if ( common.scheduler.name() == "glitecoll" ) or ( common.scheduler.name() == "glite"):
1122 +            req += ' && other.GlueCEStateStatus == "Production" '
1123  
1124 <        txt = '\n'
1125 <        file_list = ''
1126 <        for fileWithSuffix in self.output_file:
1127 <            output_file_num = self.numberFile_(fileWithSuffix, '$NJob')
1128 <            file_list=file_list+output_file_num+' '
1129 <            txt += '\n'
1130 <            txt += 'ls \n'
1131 <            txt += '\n'
1132 <            txt += 'ls '+fileWithSuffix+'\n'
1133 <            txt += 'exe_result=$?\n'
1134 <            txt += 'if [ $exe_result -ne 0 ] ; then\n'
1135 <            txt += '   echo "ERROR: No output file to manage"\n'
1136 <            txt += '   echo "JOB_EXIT_STATUS = $exe_result"\n'
1137 <            txt += '   echo "SanityCheckCode = $exe_result" | tee -a $RUNTIME_AREA/$repo\n'
1138 <            txt += '   dumpStatus $RUNTIME_AREA/$repo\n'
1139 <            txt += '   exit $exe_result \n'
1124 >        return req
1125 >
1126 >    def configFilename(self):
1127 >        """ return the config filename """
1128 >        # FUTURE: Can remove cfg mode for CMSSW >= 2_1_x
1129 >        if (self.CMSSW_major >= 2 and self.CMSSW_minor >= 1) or (self.CMSSW_major >= 3):
1130 >          return self.name()+'.py'
1131 >        else:
1132 >          return self.name()+'.cfg'
1133 >
1134 >    def wsSetupCMSOSGEnvironment_(self):
1135 >        """
1136 >        Returns part of a job script which is prepares
1137 >        the execution environment and which is common for all CMS jobs.
1138 >        """
1139 >        txt = '\n#Written by cms_cmssw::wsSetupCMSOSGEnvironment_\n'
1140 >        txt += '    echo ">>> setup CMS OSG environment:"\n'
1141 >        txt += '    echo "set SCRAM ARCH to ' + self.executable_arch + '"\n'
1142 >        txt += '    export SCRAM_ARCH='+self.executable_arch+'\n'
1143 >        txt += '    echo "SCRAM_ARCH = $SCRAM_ARCH"\n'
1144 >        txt += '    if [ -f $OSG_APP/cmssoft/cms/cmsset_default.sh ] ;then\n'
1145 >        txt += '      # Use $OSG_APP/cmssoft/cms/cmsset_default.sh to setup cms software\n'
1146 >        txt += '        source $OSG_APP/cmssoft/cms/cmsset_default.sh '+self.version+'\n'
1147 >        txt += '    else\n'
1148 >        txt += '        echo "ERROR ==> $OSG_APP/cmssoft/cms/cmsset_default.sh file not found"\n'
1149 >        txt += '        job_exit_code=10020\n'
1150 >        txt += '        func_exit\n'
1151 >        txt += '    fi\n'
1152 >        txt += '\n'
1153 >        txt += '    echo "==> setup cms environment ok"\n'
1154 >        txt += '    echo "SCRAM_ARCH = $SCRAM_ARCH"\n'
1155 >
1156 >        return txt
1157 >
1158 >    def wsSetupCMSLCGEnvironment_(self):
1159 >        """
1160 >        Returns part of a job script which is prepares
1161 >        the execution environment and which is common for all CMS jobs.
1162 >        """
1163 >        txt = '\n#Written by cms_cmssw::wsSetupCMSLCGEnvironment_\n'
1164 >        txt += '    echo ">>> setup CMS LCG environment:"\n'
1165 >        txt += '    echo "set SCRAM ARCH and BUILD_ARCH to ' + self.executable_arch + ' ###"\n'
1166 >        txt += '    export SCRAM_ARCH='+self.executable_arch+'\n'
1167 >        txt += '    export BUILD_ARCH='+self.executable_arch+'\n'
1168 >        txt += '    if [ ! $VO_CMS_SW_DIR ] ;then\n'
1169 >        txt += '        echo "ERROR ==> CMS software dir not found on WN `hostname`"\n'
1170 >        txt += '        job_exit_code=10031\n'
1171 >        txt += '        func_exit\n'
1172 >        txt += '    else\n'
1173 >        txt += '        echo "Sourcing environment... "\n'
1174 >        txt += '        if [ ! -s $VO_CMS_SW_DIR/cmsset_default.sh ] ;then\n'
1175 >        txt += '            echo "ERROR ==> cmsset_default.sh file not found into dir $VO_CMS_SW_DIR"\n'
1176 >        txt += '            job_exit_code=10020\n'
1177 >        txt += '            func_exit\n'
1178 >        txt += '        fi\n'
1179 >        txt += '        echo "sourcing $VO_CMS_SW_DIR/cmsset_default.sh"\n'
1180 >        txt += '        source $VO_CMS_SW_DIR/cmsset_default.sh\n'
1181 >        txt += '        result=$?\n'
1182 >        txt += '        if [ $result -ne 0 ]; then\n'
1183 >        txt += '            echo "ERROR ==> problem sourcing $VO_CMS_SW_DIR/cmsset_default.sh"\n'
1184 >        txt += '            job_exit_code=10032\n'
1185 >        txt += '            func_exit\n'
1186 >        txt += '        fi\n'
1187 >        txt += '    fi\n'
1188 >        txt += '    \n'
1189 >        txt += '    echo "==> setup cms environment ok"\n'
1190 >        return txt
1191 >
1192 >    def modifyReport(self, nj):
1193 >        """
1194 >        insert the part of the script that modifies the FrameworkJob Report
1195 >        """
1196 >        txt = '\n#Written by cms_cmssw::modifyReport\n'
1197 >        publish_data = int(self.cfg_params.get('USER.publish_data',0))
1198 >        if (publish_data == 1):
1199 >            processedDataset = self.cfg_params['USER.publish_data_name']
1200 >            if (self.primaryDataset == 'null'):
1201 >                 self.primaryDataset = processedDataset
1202 >            if (common.scheduler.name().upper() == "CAF" or common.scheduler.name().upper() == "LSF"):
1203 >                ### FEDE FOR NEW LFN ###
1204 >                LFNBaseName = LFNBase(self.primaryDataset, processedDataset, LocalUser=True)
1205 >                self.user = getUserName(LocalUser=True)
1206 >                ########################
1207 >            else :
1208 >                ### FEDE FOR NEW LFN ###
1209 >                LFNBaseName = LFNBase(self.primaryDataset, processedDataset)
1210 >                self.user = getUserName()
1211 >                ########################
1212 >
1213 >            txt += 'if [ $copy_exit_status -eq 0 ]; then\n'
1214 >            ### FEDE FOR NEW LFN ###
1215 >            #txt += '    FOR_LFN=%s_${PSETHASH}/\n'%(LFNBaseName)
1216 >            txt += '    FOR_LFN=%s/${PSETHASH}/\n'%(LFNBaseName)
1217 >            ########################
1218              txt += 'else\n'
1219 <            txt += '   cp '+fileWithSuffix+' $RUNTIME_AREA/'+output_file_num+'\n'
1219 >            txt += '    FOR_LFN=/copy_problems/ \n'
1220 >            txt += '    SE=""\n'
1221 >            txt += '    SE_PATH=""\n'
1222 >            txt += 'fi\n'
1223 >
1224 >            txt += 'echo ">>> Modify Job Report:" \n'
1225 >            txt += 'chmod a+x $RUNTIME_AREA/ProdCommon/FwkJobRep/ModifyJobReport.py\n'
1226 >            txt += 'ProcessedDataset='+processedDataset+'\n'
1227 >            txt += 'echo "ProcessedDataset = $ProcessedDataset"\n'
1228 >            txt += 'echo "SE = $SE"\n'
1229 >            txt += 'echo "SE_PATH = $SE_PATH"\n'
1230 >            txt += 'echo "FOR_LFN = $FOR_LFN" \n'
1231 >            txt += 'echo "CMSSW_VERSION = $CMSSW_VERSION"\n\n'
1232 >            ### FEDE FOR NEW LFN ###
1233 >            txt += 'echo "$RUNTIME_AREA/ProdCommon/FwkJobRep/ModifyJobReport.py $RUNTIME_AREA/crab_fjr_$NJob.xml $NJob $FOR_LFN $PrimaryDataset $DataTier ' + self.user + '-$ProcessedDataset-$PSETHASH $ApplicationFamily $executable $CMSSW_VERSION $PSETHASH $SE $SE_PATH"\n'
1234 >            txt += '$RUNTIME_AREA/ProdCommon/FwkJobRep/ModifyJobReport.py $RUNTIME_AREA/crab_fjr_$NJob.xml $NJob $FOR_LFN $PrimaryDataset $DataTier ' + self.user + '-$ProcessedDataset-$PSETHASH $ApplicationFamily $executable $CMSSW_VERSION $PSETHASH $SE $SE_PATH\n'
1235 >            ########################
1236 >            txt += 'modifyReport_result=$?\n'
1237 >            txt += 'if [ $modifyReport_result -ne 0 ]; then\n'
1238 >            txt += '    modifyReport_result=70500\n'
1239 >            txt += '    job_exit_code=$modifyReport_result\n'
1240 >            txt += '    echo "ModifyReportResult=$modifyReport_result" | tee -a $RUNTIME_AREA/$repo\n'
1241 >            txt += '    echo "WARNING: Problem with ModifyJobReport"\n'
1242 >            txt += 'else\n'
1243 >            txt += '    mv NewFrameworkJobReport.xml $RUNTIME_AREA/crab_fjr_$NJob.xml\n'
1244              txt += 'fi\n'
529            txt += 'cd $RUNTIME_AREA\n'
530                      
531            pass
532      
533        file_list=file_list[:-1]
534        txt += 'file_list="'+file_list+'"\n'
1245          return txt
1246  
1247 <    def numberFile_(self, file, txt):
1247 >    def wsParseFJR(self):
1248          """
1249 <        append _'txt' before last extension of a file
1249 >        Parse the FrameworkJobReport to obtain useful infos
1250          """
1251 <        p = string.split(file,".")
1252 <        # take away last extension
1253 <        name = p[0]
1254 <        for x in p[1:-1]:
1255 <           name=name+"."+x
1256 <        # add "_txt"
1257 <        if len(p)>1:
1258 <          ext = p[len(p)-1]
1259 <          #result = name + '_' + str(txt) + "." + ext
1260 <          result = name + '_' + txt + "." + ext
1261 <        else:
1262 <          #result = name + '_' + str(txt)
1263 <          result = name + '_' + txt
1264 <        
1265 <        return result
1251 >        txt = '\n#Written by cms_cmssw::wsParseFJR\n'
1252 >        txt += 'echo ">>> Parse FrameworkJobReport crab_fjr.xml"\n'
1253 >        txt += 'if [ -s $RUNTIME_AREA/crab_fjr_$NJob.xml ]; then\n'
1254 >        txt += '    if [ -s $RUNTIME_AREA/parseCrabFjr.py ]; then\n'
1255 >        txt += '        cmd_out=`python $RUNTIME_AREA/parseCrabFjr.py --input $RUNTIME_AREA/crab_fjr_$NJob.xml --dashboard $MonitorID,$MonitorJobID '+self.debugWrap+'`\n'
1256 >        if self.debug_wrapper :
1257 >            txt += '        echo "Result of parsing the FrameworkJobReport crab_fjr.xml: $cmd_out"\n'
1258 >        txt += '        executable_exit_status=`python $RUNTIME_AREA/parseCrabFjr.py --input $RUNTIME_AREA/crab_fjr_$NJob.xml --exitcode`\n'
1259 >        txt += '        if [ $executable_exit_status -eq 50115 ];then\n'
1260 >        txt += '            echo ">>> crab_fjr.xml contents: "\n'
1261 >        txt += '            cat $RUNTIME_AREA/crab_fjr_$NJob.xml\n'
1262 >        txt += '            echo "Wrong FrameworkJobReport --> does not contain useful info. ExitStatus: $executable_exit_status"\n'
1263 >        txt += '        elif [ $executable_exit_status -eq -999 ];then\n'
1264 >        txt += '            echo "ExitStatus from FrameworkJobReport not available. not available. Using exit code of executable from command line."\n'
1265 >        txt += '        else\n'
1266 >        txt += '            echo "Extracted ExitStatus from FrameworkJobReport parsing output: $executable_exit_status"\n'
1267 >        txt += '        fi\n'
1268 >        txt += '    else\n'
1269 >        txt += '        echo "CRAB python script to parse CRAB FrameworkJobReport crab_fjr.xml is not available, using exit code of executable from command line."\n'
1270 >        txt += '    fi\n'
1271 >          #### Patch to check input data reading for CMSSW16x Hopefully we-ll remove it asap
1272 >
1273 >        txt += '    if [ $executable_exit_status -eq 0 ];then\n'
1274 >        txt += '      echo ">>> Executable succeded  $executable_exit_status"\n'
1275 >        if (self.datasetPath and not (self.dataset_pu or self.useParent)) :
1276 >          # VERIFY PROCESSED DATA
1277 >            txt += '      echo ">>> Verify list of processed files:"\n'
1278 >            txt += '      echo $InputFiles |tr -d \'\\\\\' |tr \',\' \'\\n\'|tr -d \'"\' > input-files.txt\n'
1279 >            txt += '      python $RUNTIME_AREA/parseCrabFjr.py --input $RUNTIME_AREA/crab_fjr_$NJob.xml --lfn > processed-files.txt\n'
1280 >            txt += '      cat input-files.txt  | sort | uniq > tmp.txt\n'
1281 >            txt += '      mv tmp.txt input-files.txt\n'
1282 >            txt += '      echo "cat input-files.txt"\n'
1283 >            txt += '      echo "----------------------"\n'
1284 >            txt += '      cat input-files.txt\n'
1285 >            txt += '      cat processed-files.txt | sort | uniq > tmp.txt\n'
1286 >            txt += '      mv tmp.txt processed-files.txt\n'
1287 >            txt += '      echo "----------------------"\n'
1288 >            txt += '      echo "cat processed-files.txt"\n'
1289 >            txt += '      echo "----------------------"\n'
1290 >            txt += '      cat processed-files.txt\n'
1291 >            txt += '      echo "----------------------"\n'
1292 >            txt += '      diff -q input-files.txt processed-files.txt\n'
1293 >            txt += '      fileverify_status=$?\n'
1294 >            txt += '      if [ $fileverify_status -ne 0 ]; then\n'
1295 >            txt += '         executable_exit_status=30001\n'
1296 >            txt += '         echo "ERROR ==> not all input files processed"\n'
1297 >            txt += '         echo "      ==> list of processed files from crab_fjr.xml differs from list in pset.cfg"\n'
1298 >            txt += '         echo "      ==> diff input-files.txt processed-files.txt"\n'
1299 >            txt += '      fi\n'
1300 >        txt += '    elif [ $executable_exit_status -ne 0 ] || [ $executable_exit_status -ne 50015 ] || [ $executable_exit_status -ne 50017 ];then\n'
1301 >        txt += '      echo ">>> Executable failed  $executable_exit_status"\n'
1302 >        txt += '      func_exit\n'
1303 >        txt += '    fi\n'
1304 >        txt += '\n'
1305 >        txt += 'else\n'
1306 >        txt += '    echo "CRAB FrameworkJobReport crab_fjr.xml is not available, using exit code of executable from command line."\n'
1307 >        txt += 'fi\n'
1308 >        txt += '\n'
1309 >        txt += 'echo "ExeExitCode=$executable_exit_status" | tee -a $RUNTIME_AREA/$repo\n'
1310 >        txt += 'echo "EXECUTABLE_EXIT_STATUS = $executable_exit_status"\n'
1311 >        txt += 'job_exit_code=$executable_exit_status\n'
1312 >
1313 >        return txt
1314  
1315 <    def getRequirements(self):
1315 >    def setParam_(self, param, value):
1316 >        self._params[param] = value
1317 >
1318 >    def getParams(self):
1319 >        return self._params
1320 >
1321 >    def uniquelist(self, old):
1322          """
1323 <        return job requirements to add to jdl files
1323 >        remove duplicates from a list
1324          """
1325 <        req = ''
1326 <        if common.analisys_common_info['sites']:
1327 <            if common.analisys_common_info['sw_version']:
1328 <                req='Member("VO-cms-' + \
1329 <                     common.analisys_common_info['sw_version'] + \
1330 <                     '", other.GlueHostApplicationSoftwareRunTimeEnvironment)'
1331 <            if len(common.analisys_common_info['sites'])>0:
1332 <                req = req + ' && ('
1333 <                for i in range(len(common.analisys_common_info['sites'])):
1334 <                    req = req + 'other.GlueCEInfoHostName == "' \
1335 <                         + common.analisys_common_info['sites'][i] + '"'
1336 <                    if ( i < (int(len(common.analisys_common_info['sites']) - 1)) ):
1337 <                        req = req + ' || '
1338 <            req = req + ')'
1339 <        #print "req = ", req
1340 <        return req
1325 >        nd={}
1326 >        for e in old:
1327 >            nd[e]=0
1328 >        return nd.keys()
1329 >
1330 >    def outList(self):
1331 >        """
1332 >        check the dimension of the output files
1333 >        """
1334 >        txt = ''
1335 >        txt += 'echo ">>> list of expected files on output sandbox"\n'
1336 >        listOutFiles = []
1337 >        stdout = 'CMSSW_$NJob.stdout'
1338 >        stderr = 'CMSSW_$NJob.stderr'
1339 >        if (self.return_data == 1):
1340 >            for file in (self.output_file+self.output_file_sandbox):
1341 >                listOutFiles.append(numberFile(file, '$NJob'))
1342 >            listOutFiles.append(stdout)
1343 >            listOutFiles.append(stderr)
1344 >        else:
1345 >            for file in (self.output_file_sandbox):
1346 >                listOutFiles.append(numberFile(file, '$NJob'))
1347 >            listOutFiles.append(stdout)
1348 >            listOutFiles.append(stderr)
1349 >        txt += 'echo "output files: '+string.join(listOutFiles,' ')+'"\n'
1350 >        txt += 'filesToCheck="'+string.join(listOutFiles,' ')+'"\n'
1351 >        txt += 'export filesToCheck\n'
1352 >        return txt

Diff Legend

Removed lines
+ Added lines
< Changed lines
> Changed lines