ViewVC Help
View File | Revision Log | Show Annotations | Root Listing
root/cvsroot/COMP/CRAB/python/cms_cmssw.py
(Generate patch)

Comparing COMP/CRAB/python/cms_cmssw.py (file contents):
Revision 1.27 by spiga, Thu Jul 6 17:50:33 2006 UTC vs.
Revision 1.235 by spiga, Fri Aug 29 15:06:41 2008 UTC

# Line 2 | Line 2 | from JobType import JobType
2   from crab_logger import Logger
3   from crab_exceptions import *
4   from crab_util import *
5 < import math
5 > from BlackWhiteListParser import SEBlackWhiteListParser
6   import common
7 import PsetManipulator  
8
9 import DBSInfo_EDM
10 import DataDiscovery_EDM
11 import DataLocation_EDM
7   import Scram
8 + from LFNBaseName import *
9  
10 < import os, string, re
10 > import os, string, glob
11  
12   class Cmssw(JobType):
13 <    def __init__(self, cfg_params):
13 >    def __init__(self, cfg_params, ncjobs,skip_blocks, isNew):
14          JobType.__init__(self, 'CMSSW')
15          common.logger.debug(3,'CMSSW::__init__')
16 +        self.skip_blocks = skip_blocks
17 +
18 +        self.argsList = []
19  
21        self.analisys_common_info = {}
22        # Marco.
20          self._params = {}
21          self.cfg_params = cfg_params
22 +        # init BlackWhiteListParser
23 +        self.blackWhiteListParser = SEBlackWhiteListParser(cfg_params)
24 +
25 +        ### Temporary patch to automatically skip the ISB size check:
26 +        server=self.cfg_params.get('CRAB.server_name',None)
27 +        size = 9.5
28 +        if server: size = 99999
29 +        ### D.S.
30 +        self.MaxTarBallSize = float(self.cfg_params.get('EDG.maxtarballsize',size))
31 +
32 +        # number of jobs requested to be created, limit obj splitting
33 +        self.ncjobs = ncjobs
34 +
35          log = common.logger
36 <        
36 >
37          self.scram = Scram.Scram(cfg_params)
28        scramArea = ''
38          self.additional_inbox_files = []
39          self.scriptExe = ''
40          self.executable = ''
41 +        self.executable_arch = self.scram.getArch()
42          self.tgz_name = 'default.tgz'
43 +        self.scriptName = 'CMSSW.sh'
44 +        self.pset = ''
45 +        self.datasetPath = ''
46  
47 +        # set FJR file name
48 +        self.fjrFileName = 'crab_fjr.xml'
49  
50          self.version = self.scram.getSWVersion()
51 <        self.setParam_('application', self.version)
52 <        common.analisys_common_info['sw_version'] = self.version
53 <        ### FEDE
54 <        common.analisys_common_info['copy_input_data'] = 0
55 <        common.analisys_common_info['events_management'] = 1
51 >        version_array = self.version.split('_')
52 >        self.CMSSW_major = 0
53 >        self.CMSSW_minor = 0
54 >        self.CMSSW_patch = 0
55 >        try:
56 >            self.CMSSW_major = int(version_array[1])
57 >            self.CMSSW_minor = int(version_array[2])
58 >            self.CMSSW_patch = int(version_array[3])
59 >        except:
60 >            msg = "Cannot parse CMSSW version string: " + self.version + " for major and minor release number!"
61 >            raise CrabException(msg)
62  
63          ### collect Data cards
64 <        try:
65 <            tmp =  cfg_params['CMSSW.datasetpath']
66 <            log.debug(6, "CMSSW::CMSSW(): datasetPath = "+tmp)
46 <            if string.lower(tmp)=='none':
47 <                self.datasetPath = None
48 <                self.selectNoInput = 1
49 <            else:
50 <                self.datasetPath = tmp
51 <                self.selectNoInput = 0
52 <        except KeyError:
53 <            msg = "Error: datasetpath not defined "  
64 >
65 >        if not cfg_params.has_key('CMSSW.datasetpath'):
66 >            msg = "Error: datasetpath not defined "
67              raise CrabException(msg)
68  
69 <        # ML monitoring
70 <        # split dataset path style: /PreProdR3Minbias/SIM/GEN-SIM
58 <        if not self.datasetPath:
59 <            self.setParam_('dataset', 'None')
60 <            self.setParam_('owner', 'None')
61 <        else:
62 <            datasetpath_split = self.datasetPath.split("/")
63 <            self.setParam_('dataset', datasetpath_split[1])
64 <            self.setParam_('owner', datasetpath_split[-1])
69 >        ### Temporary: added to remove input file control in the case of PU
70 >        self.dataset_pu = cfg_params.get('CMSSW.dataset_pu', None)
71  
72 <        self.setTaskid_()
73 <        self.setParam_('taskId', self.cfg_params['taskId'])
72 >        tmp =  cfg_params['CMSSW.datasetpath']
73 >        log.debug(6, "CMSSW::CMSSW(): datasetPath = "+tmp)
74 >        if string.lower(tmp)=='none':
75 >            self.datasetPath = None
76 >            self.selectNoInput = 1
77 >        else:
78 >            self.datasetPath = tmp
79 >            self.selectNoInput = 0
80  
81          self.dataTiers = []
82 <
82 >        self.debugWrap = ''
83 >        self.debug_wrapper = cfg_params.get('USER.debug_wrapper',False)
84 >        if self.debug_wrapper: self.debugWrap='--debug'
85          ## now the application
86 <        try:
87 <            self.executable = cfg_params['CMSSW.executable']
74 <            self.setParam_('exe', self.executable)
75 <            log.debug(6, "CMSSW::CMSSW(): executable = "+self.executable)
76 <            msg = "Default executable cmsRun overridden. Switch to " + self.executable
77 <            log.debug(3,msg)
78 <        except KeyError:
79 <            self.executable = 'cmsRun'
80 <            self.setParam_('exe', self.executable)
81 <            msg = "User executable not defined. Use cmsRun"
82 <            log.debug(3,msg)
83 <            pass
86 >        self.executable = cfg_params.get('CMSSW.executable','cmsRun')
87 >        log.debug(6, "CMSSW::CMSSW(): executable = "+self.executable)
88  
89 <        try:
90 <            self.pset = cfg_params['CMSSW.pset']
91 <            log.debug(6, "Cmssw::Cmssw(): PSet file = "+self.pset)
89 >        if not cfg_params.has_key('CMSSW.pset'):
90 >            raise CrabException("PSet file missing. Cannot run cmsRun ")
91 >        self.pset = cfg_params['CMSSW.pset']
92 >        log.debug(6, "Cmssw::Cmssw(): PSet file = "+self.pset)
93 >        if self.pset.lower() != 'none' :
94              if (not os.path.exists(self.pset)):
95                  raise CrabException("User defined PSet file "+self.pset+" does not exist")
96 <        except KeyError:
97 <            raise CrabException("PSet file missing. Cannot run cmsRun ")
96 >        else:
97 >            self.pset = None
98  
99          # output files
100 <        try:
101 <            self.output_file = []
100 >        ## stuff which must be returned always via sandbox
101 >        self.output_file_sandbox = []
102  
103 <            tmp = cfg_params['CMSSW.output_file']
104 <            if tmp != '':
105 <                tmpOutFiles = string.split(cfg_params['CMSSW.output_file'],',')
106 <                log.debug(7, 'cmssw::cmssw(): output files '+str(tmpOutFiles))
107 <                for tmp in tmpOutFiles:
108 <                    tmp=string.strip(tmp)
109 <                    self.output_file.append(tmp)
110 <                    pass
111 <            else:
112 <                log.message("No output file defined: only stdout/err will be available")
113 <                pass
114 <            pass
109 <        except KeyError:
110 <            log.message("No output file defined: only stdout/err will be available")
111 <            pass
103 >        # add fjr report by default via sandbox
104 >        self.output_file_sandbox.append(self.fjrFileName)
105 >
106 >        # other output files to be returned via sandbox or copied to SE
107 >        outfileflag = False
108 >        self.output_file = []
109 >        tmp = cfg_params.get('CMSSW.output_file',None)
110 >        if tmp :
111 >            self.output_file = [x.strip() for x in tmp.split(',')]
112 >            outfileflag = True #output found
113 >        #else:
114 >        #    log.message("No output file defined: only stdout/err and the CRAB Framework Job Report will be available\n")
115  
116          # script_exe file as additional file in inputSandbox
117 <        try:
118 <            self.scriptExe = cfg_params['USER.script_exe']
119 <            self.additional_inbox_files.append(self.scriptExe)
120 <            if self.scriptExe != '':
121 <               if not os.path.isfile(self.scriptExe):
122 <                  msg ="WARNING. file "+self.scriptExe+" not found"
123 <                  raise CrabException(msg)
124 <        except KeyError:
125 <           pass
126 <                  
117 >        self.scriptExe = cfg_params.get('USER.script_exe',None)
118 >        if self.scriptExe :
119 >            if not os.path.isfile(self.scriptExe):
120 >                msg ="ERROR. file "+self.scriptExe+" not found"
121 >                raise CrabException(msg)
122 >            self.additional_inbox_files.append(string.strip(self.scriptExe))
123 >
124 >        if self.datasetPath == None and self.pset == None and self.scriptExe == '' :
125 >            msg ="Error. script_exe  not defined"
126 >            raise CrabException(msg)
127 >
128 >        # use parent files...
129 >        self.useParent = self.cfg_params.get('CMSSW.use_parent',False)
130 >
131          ## additional input files
132 <        try:
133 <            tmpAddFiles = string.split(cfg_params['CMSSW.additional_input_files'],',')
132 >        if cfg_params.has_key('USER.additional_input_files'):
133 >            tmpAddFiles = string.split(cfg_params['USER.additional_input_files'],',')
134              for tmp in tmpAddFiles:
135 <                if not os.path.exists(tmp):
136 <                    raise CrabException("Additional input file not found: "+tmp)
137 <                tmp=string.strip(tmp)
138 <                self.additional_inbox_files.append(tmp)
135 >                tmp = string.strip(tmp)
136 >                dirname = ''
137 >                if not tmp[0]=="/": dirname = "."
138 >                files = []
139 >                if string.find(tmp,"*")>-1:
140 >                    files = glob.glob(os.path.join(dirname, tmp))
141 >                    if len(files)==0:
142 >                        raise CrabException("No additional input file found with this pattern: "+tmp)
143 >                else:
144 >                    files.append(tmp)
145 >                for file in files:
146 >                    if not os.path.exists(file):
147 >                        raise CrabException("Additional input file not found: "+file)
148 >                    pass
149 >                    self.additional_inbox_files.append(string.strip(file))
150                  pass
151              pass
152 <        except KeyError:
153 <            pass
136 <
137 <        # files per job
138 <        try:
139 <            self.filesPerJob = int(cfg_params['CMSSW.files_per_jobs']) #Daniele
140 <            self.selectFilesPerJob = 1
141 <        except KeyError:
142 <            self.filesPerJob = 0
143 <            self.selectFilesPerJob = 0
152 >            common.logger.debug(5,"Additional input files: "+str(self.additional_inbox_files))
153 >        pass
154  
155          ## Events per job
156 <        try:
156 >        if cfg_params.has_key('CMSSW.events_per_job'):
157              self.eventsPerJob =int( cfg_params['CMSSW.events_per_job'])
158              self.selectEventsPerJob = 1
159 <        except KeyError:
159 >        else:
160              self.eventsPerJob = -1
161              self.selectEventsPerJob = 0
162 <    
162 >
163          ## number of jobs
164 <        try:
164 >        if cfg_params.has_key('CMSSW.number_of_jobs'):
165              self.theNumberOfJobs =int( cfg_params['CMSSW.number_of_jobs'])
166              self.selectNumberOfJobs = 1
167 <        except KeyError:
167 >        else:
168              self.theNumberOfJobs = 0
169              self.selectNumberOfJobs = 0
170  
171 <        ## source seed for pythia
162 <        try:
163 <            self.sourceSeed = int(cfg_params['CMSSW.pythia_seed'])
164 <        except KeyError:
165 <            self.sourceSeed = None
166 <            common.logger.debug(5,"No seed given")
167 <
168 <        if not (self.selectFilesPerJob + self.selectEventsPerJob + self.selectNumberOfJobs == 1 ):
169 <            msg = 'Must define either files_per_jobs or events_per_job or number_of_jobs'
170 <            raise CrabException(msg)
171 <
172 <        try:
171 >        if cfg_params.has_key('CMSSW.total_number_of_events'):
172              self.total_number_of_events = int(cfg_params['CMSSW.total_number_of_events'])
173 <        except KeyError:
174 <            msg = 'Must define total_number_of_events'
175 <            raise CrabException(msg)
176 <        
177 <        CEBlackList = []
178 <        try:
179 <            tmpBad = string.split(cfg_params['EDG.ce_black_list'],',')
180 <            for tmp in tmpBad:
182 <                tmp=string.strip(tmp)
183 <                CEBlackList.append(tmp)
184 <        except KeyError:
185 <            pass
186 <
187 <        self.reCEBlackList=[]
188 <        for bad in CEBlackList:
189 <            self.reCEBlackList.append(re.compile( bad ))
190 <
191 <        common.logger.debug(5,'CEBlackList: '+str(CEBlackList))
192 <
193 <        CEWhiteList = []
194 <        try:
195 <            tmpGood = string.split(cfg_params['EDG.ce_white_list'],',')
196 <            for tmp in tmpGood:
197 <                tmp=string.strip(tmp)
198 <                CEWhiteList.append(tmp)
199 <        except KeyError:
200 <            pass
173 >            self.selectTotalNumberEvents = 1
174 >            if self.selectNumberOfJobs  == 1:
175 >                if (self.total_number_of_events != -1) and int(self.total_number_of_events) < int(self.theNumberOfJobs):
176 >                    msg = 'Must specify at least one event per job. total_number_of_events > number_of_jobs '
177 >                    raise CrabException(msg)
178 >        else:
179 >            self.total_number_of_events = 0
180 >            self.selectTotalNumberEvents = 0
181  
182 <        #print 'CEWhiteList: ',CEWhiteList
183 <        self.reCEWhiteList=[]
184 <        for Good in CEWhiteList:
185 <            self.reCEWhiteList.append(re.compile( Good ))
182 >        if self.pset != None:
183 >             if ( (self.selectTotalNumberEvents + self.selectEventsPerJob + self.selectNumberOfJobs) != 2 ):
184 >                 msg = 'Must define exactly two of total_number_of_events, events_per_job, or number_of_jobs.'
185 >                 raise CrabException(msg)
186 >        else:
187 >             if (self.selectNumberOfJobs == 0):
188 >                 msg = 'Must specify  number_of_jobs.'
189 >                 raise CrabException(msg)
190 >
191 >        ## New method of dealing with seeds
192 >        self.incrementSeeds = []
193 >        self.preserveSeeds = []
194 >        if cfg_params.has_key('CMSSW.preserve_seeds'):
195 >            tmpList = cfg_params['CMSSW.preserve_seeds'].split(',')
196 >            for tmp in tmpList:
197 >                tmp.strip()
198 >                self.preserveSeeds.append(tmp)
199 >        if cfg_params.has_key('CMSSW.increment_seeds'):
200 >            tmpList = cfg_params['CMSSW.increment_seeds'].split(',')
201 >            for tmp in tmpList:
202 >                tmp.strip()
203 >                self.incrementSeeds.append(tmp)
204 >
205 >        ## FUTURE: Can remove in CRAB 2.4.0
206 >        self.sourceSeed    = cfg_params.get('CMSSW.pythia_seed',None)
207 >        self.sourceSeedVtx = cfg_params.get('CMSSW.vtx_seed',None)
208 >        self.sourceSeedG4  = cfg_params.get('CMSSW.g4_seed',None)
209 >        self.sourceSeedMix = cfg_params.get('CMSSW.mix_seed',None)
210 >        if self.sourceSeed or self.sourceSeedVtx or self.sourceSeedG4 or self.sourceSeedMix:
211 >            msg = 'pythia_seed, vtx_seed, g4_seed, and mix_seed are no longer valid settings. You must use increment_seeds or preserve_seeds'
212 >            raise CrabException(msg)
213  
214 <        common.logger.debug(5,'CEWhiteList: '+str(CEWhiteList))
214 >        self.firstRun = cfg_params.get('CMSSW.first_run',None)
215  
216 <        self.PsetEdit = PsetManipulator.PsetManipulator(self.pset) #Daniele Pset
216 >        # Copy/return
217 >        self.copy_data = int(cfg_params.get('USER.copy_data',0))
218 >        self.return_data = int(cfg_params.get('USER.return_data',0))
219  
220          #DBSDLS-start
221 <        ## Initialize the variables that are extracted from DBS/DLS and needed in other places of the code
221 >        ## Initialize the variables that are extracted from DBS/DLS and needed in other places of the code
222          self.maxEvents=0  # max events available   ( --> check the requested nb. of evts in Creator.py)
223          self.DBSPaths={}  # all dbs paths requested ( --> input to the site local discovery script)
224 +        self.jobDestination=[]  # Site destination(s) for each job (list of lists)
225          ## Perform the data location and discovery (based on DBS/DLS)
226          ## SL: Don't if NONE is specified as input (pythia use case)
227 <        common.analisys_common_info['sites']=None
227 >        blockSites = {}
228          if self.datasetPath:
229 <            self.DataDiscoveryAndLocation(cfg_params)
230 <        #DBSDLS-end          
229 >            blockSites = self.DataDiscoveryAndLocation(cfg_params)
230 >        #DBSDLS-end
231  
222        self.tgzNameWithPath = self.getTarBall(self.executable)
223    
232          ## Select Splitting
233 <        if self.selectNoInput: self.jobSplittingNoInput()
234 <        elif self.selectFilesPerJob or self.selectEventsPerJob or self.selectNumberOfJobs: self.jobSplittingPerFiles()
233 >        if self.selectNoInput:
234 >            if self.pset == None:
235 >                self.jobSplittingForScript()
236 >            else:
237 >                self.jobSplittingNoInput()
238          else:
239 <            msg = 'Don\'t know how to split...'
229 <            raise CrabException(msg)
239 >            self.jobSplittingByBlocks(blockSites)
240  
241 <        # modify Pset
242 <        try:
243 <            if (self.datasetPath): # standard job
244 <                #self.PsetEdit.maxEvent(self.eventsPerJob)
245 <                # always process all events in a file
246 <                self.PsetEdit.maxEvent("-1")
247 <                self.PsetEdit.inputModule("INPUT")
248 <
249 <            else:  # pythia like job
250 <                self.PsetEdit.maxEvent(self.eventsPerJob)
251 <                if (self.sourceSeed) :
252 <                    self.PsetEdit.pythiaSeed("INPUT","INPUTVTX")
253 <            self.PsetEdit.psetWriter(self.configFilename())
254 <        except:
255 <            msg='Error while manipuliating ParameterSet: exiting...'
256 <            raise CrabException(msg)
241 >        # modify Pset only the first time
242 >        if isNew:
243 >            if self.pset != None:
244 >                import PsetManipulator as pp
245 >                PsetEdit = pp.PsetManipulator(self.pset)
246 >                try:
247 >                    # Add FrameworkJobReport to parameter-set, set max events.
248 >                    # Reset later for data jobs by writeCFG which does all modifications
249 >                    PsetEdit.addCrabFJR(self.fjrFileName) # FUTURE: Job report addition not needed by CMSSW>1.5
250 >                    PsetEdit.maxEvent(self.eventsPerJob)
251 >                    PsetEdit.psetWriter(self.configFilename())
252 >                    ## If present, add TFileService to output files
253 >                    if not int(cfg_params.get('CMSSW.skip_TFileService_output',0)):
254 >                        tfsOutput = PsetEdit.getTFileService()
255 >                        if tfsOutput:
256 >                            if tfsOutput in self.output_file:
257 >                                common.logger.debug(5,"Output from TFileService "+tfsOutput+" already in output files")
258 >                            else:
259 >                                outfileflag = True #output found
260 >                                self.output_file.append(tfsOutput)
261 >                                common.logger.message("Adding "+tfsOutput+" to output files (from TFileService)")
262 >                            pass
263 >                        pass
264 >                    ## If present and requested, add PoolOutputModule to output files
265 >                    if int(cfg_params.get('CMSSW.get_edm_output',0)):
266 >                        edmOutput = PsetEdit.getPoolOutputModule()
267 >                        if edmOutput:
268 >                            if edmOutput in self.output_file:
269 >                                common.logger.debug(5,"Output from PoolOutputModule "+edmOutput+" already in output files")
270 >                            else:
271 >                                self.output_file.append(edmOutput)
272 >                                common.logger.message("Adding "+edmOutput+" to output files (from PoolOutputModule)")
273 >                            pass
274 >                        pass
275 >                except CrabException:
276 >                    msg='Error while manipulating ParameterSet: exiting...'
277 >                    raise CrabException(msg)
278 >            ## Prepare inputSandbox TarBall (only the first time)
279 >            self.tgzNameWithPath = self.getTarBall(self.executable)
280  
281      def DataDiscoveryAndLocation(self, cfg_params):
282  
283 +        import DataDiscovery
284 +        import DataLocation
285          common.logger.debug(10,"CMSSW::DataDiscoveryAndLocation()")
286  
287          datasetPath=self.datasetPath
288  
254        ## TODO
255        dataTiersList = ""
256        dataTiers = dataTiersList.split(',')
257
289          ## Contact the DBS
290 +        common.logger.message("Contacting Data Discovery Services ...")
291          try:
292 <            self.pubdata=DataDiscovery_EDM.DataDiscovery_EDM(datasetPath, dataTiers, cfg_params)
292 >            self.pubdata=DataDiscovery.DataDiscovery(datasetPath, cfg_params,self.skip_blocks)
293              self.pubdata.fetchDBSInfo()
294  
295 <        except DataDiscovery_EDM.NotExistingDatasetError, ex :
295 >        except DataDiscovery.NotExistingDatasetError, ex :
296              msg = 'ERROR ***: failed Data Discovery in DBS : %s'%ex.getErrorMessage()
297              raise CrabException(msg)
298 <
267 <        except DataDiscovery_EDM.NoDataTierinProvenanceError, ex :
298 >        except DataDiscovery.NoDataTierinProvenanceError, ex :
299              msg = 'ERROR ***: failed Data Discovery in DBS : %s'%ex.getErrorMessage()
300              raise CrabException(msg)
301 <        except DataDiscovery_EDM.DataDiscoveryError, ex:
302 <            msg = 'ERROR ***: failed Data Discovery in DBS  %s'%ex.getErrorMessage()
301 >        except DataDiscovery.DataDiscoveryError, ex:
302 >            msg = 'ERROR ***: failed Data Discovery in DBS :  %s'%ex.getErrorMessage()
303              raise CrabException(msg)
304  
305 <        ## get list of all required data in the form of dbs paths  (dbs path = /dataset/datatier/owner)
306 <        ## self.DBSPaths=self.pubdata.getDBSPaths()
307 <        common.logger.message("Required data are :"+self.datasetPath)
308 <
278 <        filesbyblock=self.pubdata.getFiles()
279 < #        print filesbyblock
280 <        self.AllInputFiles=filesbyblock.values()
281 <        self.files = self.AllInputFiles        
305 >        self.filesbyblock=self.pubdata.getFiles()
306 >        self.eventsbyblock=self.pubdata.getEventsPerBlock()
307 >        self.eventsbyfile=self.pubdata.getEventsPerFile()
308 >        self.parentFiles=self.pubdata.getParent()
309  
310          ## get max number of events
311 <        #common.logger.debug(10,"number of events for primary fileblocks %i"%self.pubdata.getMaxEvents())
285 <        self.maxEvents=self.pubdata.getMaxEvents() ##  self.maxEvents used in Creator.py
286 <        common.logger.message("\nThe number of available events is %s"%self.maxEvents)
311 >        self.maxEvents=self.pubdata.getMaxEvents()
312  
313          ## Contact the DLS and build a list of sites hosting the fileblocks
314          try:
315 <            dataloc=DataLocation_EDM.DataLocation_EDM(filesbyblock.keys(),cfg_params)
315 >            dataloc=DataLocation.DataLocation(self.filesbyblock.keys(),cfg_params)
316              dataloc.fetchDLSInfo()
317 <        except DataLocation_EDM.DataLocationError , ex:
317 >        except DataLocation.DataLocationError , ex:
318              msg = 'ERROR ***: failed Data Location in DLS \n %s '%ex.getErrorMessage()
319              raise CrabException(msg)
295        
296        allsites=dataloc.getSites()
297        common.logger.debug(5,"sites are %s"%allsites)
298        sites=self.checkBlackList(allsites)
299        common.logger.debug(5,"sites are (after black list) %s"%sites)
300        sites=self.checkWhiteList(sites)
301        common.logger.debug(5,"sites are (after white list) %s"%sites)
320  
303        if len(sites)==0:
304            msg = 'No sites hosting all the needed data! Exiting... '
305            raise CrabException(msg)
321  
322 <        common.logger.message("List of Sites ("+str(len(sites))+") hosting the data : "+str(sites))
323 <        common.logger.debug(6, "List of Sites: "+str(sites))
324 <        common.analisys_common_info['sites']=sites    ## used in SchedulerEdg.py in createSchScript
325 <        self.setParam_('TargetCE', ','.join(sites))
326 <        return
327 <    
328 <    def jobSplittingPerFiles(self):
329 <        """
330 <        Perform job splitting based on number of files to be accessed per job
331 <        """
332 <        common.logger.debug(5,'Splitting per input files')
333 <        common.logger.message('Required '+str(self.total_number_of_events)+' events in total ')
319 <        common.logger.message('Available '+str(self.maxEvents)+' events in total ')
320 <        common.logger.message('Required '+str(self.filesPerJob)+' files per job ')
321 <        common.logger.message('Required '+str(self.theNumberOfJobs)+' jobs in total ')
322 <        common.logger.message('Required '+str(self.eventsPerJob)+' events per job')
322 >        sites = dataloc.getSites()
323 >        allSites = []
324 >        listSites = sites.values()
325 >        for listSite in listSites:
326 >            for oneSite in listSite:
327 >                allSites.append(oneSite)
328 >        allSites = self.uniquelist(allSites)
329 >
330 >        # screen output
331 >        common.logger.message("Requested dataset: " + datasetPath + " has " + str(self.maxEvents) + " events in " + str(len(self.filesbyblock.keys())) + " blocks.\n")
332 >
333 >        return sites
334  
335 <        ## if asked to process all events, do it
336 <        if self.total_number_of_events == -1:
337 <            self.total_number_of_events=self.maxEvents
335 >    def jobSplittingByBlocks(self, blockSites):
336 >        """
337 >        Perform job splitting. Jobs run over an integer number of files
338 >        and no more than one block.
339 >        ARGUMENT: blockSites: dictionary with blocks as keys and list of host sites as values
340 >        REQUIRES: self.selectTotalNumberEvents, self.selectEventsPerJob, self.selectNumberofJobs,
341 >                  self.total_number_of_events, self.eventsPerJob, self.theNumberOfJobs,
342 >                  self.maxEvents, self.filesbyblock
343 >        SETS: self.jobDestination - Site destination(s) for each job (a list of lists)
344 >              self.total_number_of_jobs - Total # of jobs
345 >              self.list_of_args - File(s) job will run on (a list of lists)
346 >        """
347 >
348 >        # ---- Handle the possible job splitting configurations ---- #
349 >        if (self.selectTotalNumberEvents):
350 >            totalEventsRequested = self.total_number_of_events
351 >        if (self.selectEventsPerJob):
352 >            eventsPerJobRequested = self.eventsPerJob
353 >            if (self.selectNumberOfJobs):
354 >                totalEventsRequested = self.theNumberOfJobs * self.eventsPerJob
355 >
356 >        # If user requested all the events in the dataset
357 >        if (totalEventsRequested == -1):
358 >            eventsRemaining=self.maxEvents
359 >        # If user requested more events than are in the dataset
360 >        elif (totalEventsRequested > self.maxEvents):
361 >            eventsRemaining = self.maxEvents
362 >            common.logger.message("Requested "+str(self.total_number_of_events)+ " events, but only "+str(self.maxEvents)+" events are available.")
363 >        # If user requested less events than are in the dataset
364          else:
365 <            if self.total_number_of_events>self.maxEvents:
329 <                common.logger.message("Asked "+str(self.total_number_of_events)+" but only "+str(self.maxEvents)+" available.")
330 <                self.total_number_of_events=self.maxEvents
331 <            pass
365 >            eventsRemaining = totalEventsRequested
366  
367 <        ## TODO: SL need to have (from DBS) a detailed list of how many events per each file
368 <        n_tot_files = (len(self.files[0]))
369 <        ## SL: this is wrong if the files have different number of events
336 <        evPerFile = int(self.maxEvents)/n_tot_files
337 <
338 <        common.logger.debug(5,'Events per File '+str(evPerFile))
339 <
340 <        ## compute job splitting parameters: filesPerJob, eventsPerJob and theNumberOfJobs
341 <        if self.selectFilesPerJob:
342 <            ## user define files per event.
343 <            filesPerJob = self.filesPerJob
344 <            eventsPerJob = filesPerJob*evPerFile
345 <            theNumberOfJobs = int(self.total_number_of_events*1./eventsPerJob)
346 <            check = int(self.total_number_of_events) - (theNumberOfJobs*eventsPerJob)
347 <            if check > 0:
348 <                theNumberOfJobs +=1
349 <                filesLastJob = int(check*1./evPerFile+0.5)
350 <                common.logger.message('Warning: last job will be created with '+str(check)+' files')
351 <            else:
352 <                filesLastJob = filesPerJob
367 >        # If user requested more events per job than are in the dataset
368 >        if (self.selectEventsPerJob and eventsPerJobRequested > self.maxEvents):
369 >            eventsPerJobRequested = self.maxEvents
370  
371 <        elif self.selectNumberOfJobs:
372 <            ## User select the number of jobs: last might be bigger to match request of events
356 <            theNumberOfJobs =  self.theNumberOfJobs
357 <
358 <            eventsPerJob = self.total_number_of_events/theNumberOfJobs
359 <            filesPerJob = int(eventsPerJob/evPerFile)
360 <            if (filesPerJob==0) : filesPerJob=1
361 <            check = int(self.total_number_of_events) - (int(theNumberOfJobs)*filesPerJob*evPerFile)
362 <            if not check == 0:
363 <                if check<0:
364 <                    missingFiles = int(check/evPerFile)
365 <                    additionalJobs = int(missingFiles/filesPerJob)
366 <                    #print missingFiles, additionalJobs
367 <                    theNumberOfJobs+=additionalJobs
368 <                    common.logger.message('Warning: will create only '+str(theNumberOfJobs)+' jobs')
369 <                    check = int(self.total_number_of_events) - (int(theNumberOfJobs)*filesPerJob*evPerFile)
370 <                    
371 <                if check >0 :
372 <                    filesLastJob = filesPerJob+int(check*1./evPerFile+0.5)
373 <                    common.logger.message('Warning: last job will be created with '+str(filesLastJob*evPerFile)+' events')
374 <                else:
375 <                    filesLastJob = filesPerJob
376 <            else:
377 <                filesLastJob = filesPerJob
378 <        elif self.selectEventsPerJob:
379 <            # SL case if asked events per job
380 <            ## estimate the number of files per job to match the user requirement
381 <            filesPerJob = int(float(self.eventsPerJob)/float(evPerFile))
382 <            if filesPerJob==0: filesPerJob=1
383 <            common.logger.debug(5,"filesPerJob "+str(filesPerJob))
384 <            if (filesPerJob==0): filesPerJob=1
385 <            eventsPerJob=filesPerJob*evPerFile
386 <            theNumberOfJobs = int(self.total_number_of_events)/int(eventsPerJob)
387 <            check = int(self.total_number_of_events) - (int(theNumberOfJobs)*eventsPerJob)
388 <            if not check == 0:
389 <                missingFiles = int(check/evPerFile)
390 <                additionalJobs = int(missingFiles/filesPerJob)
391 <                if ( additionalJobs>0) : theNumberOfJobs+=additionalJobs
392 <                check = int(self.total_number_of_events) - (int(theNumberOfJobs)*eventsPerJob)
393 <                if not check == 0:
394 <                    if (check <0 ):
395 <                        filesLastJob = filesPerJob+int(check*1./evPerFile-0.5)
396 <                    else:
397 <                        theNumberOfJobs+=1
398 <                        filesLastJob = int(check*1./evPerFile+0.5)
371 >        # For user info at end
372 >        totalEventCount = 0
373  
374 <                    common.logger.message('Warning: last job will be created with '+str(filesLastJob*evPerFile)+' events')
375 <                else:
402 <                    filesLastJob = filesPerJob
403 <            else:
404 <                filesLastJob = filesPerJob
405 <        
406 <        self.total_number_of_jobs = theNumberOfJobs
374 >        if (self.selectTotalNumberEvents and self.selectNumberOfJobs):
375 >            eventsPerJobRequested = int(eventsRemaining/self.theNumberOfJobs)
376  
377 <        totalEventsToBeUsed=theNumberOfJobs*filesPerJob*evPerFile
378 <        if not check == 0:
410 <        #    print (theNumberOfJobs-1)*filesPerJob*evPerFile,filesLastJob*evPerFile
411 <            totalEventsToBeUsed=(theNumberOfJobs-1)*filesPerJob*evPerFile+filesLastJob*evPerFile
377 >        if (self.selectNumberOfJobs):
378 >            common.logger.message("May not create the exact number_of_jobs requested.")
379  
380 <        common.logger.message(str(self.total_number_of_jobs)+' jobs will be created, each for '+str(filesPerJob*evPerFile)+' events, for a total of '+str(totalEventsToBeUsed)+' events')
380 >        if ( self.ncjobs == 'all' ) :
381 >            totalNumberOfJobs = 999999999
382 >        else :
383 >            totalNumberOfJobs = self.ncjobs
384  
385 <        totalFilesToBeUsed=filesPerJob*(theNumberOfJobs-1)+filesLastJob
385 >        blocks = blockSites.keys()
386 >        blockCount = 0
387 >        # Backup variable in case self.maxEvents counted events in a non-included block
388 >        numBlocksInDataset = len(blocks)
389  
390 <        ## set job arguments (files)
390 >        jobCount = 0
391          list_of_lists = []
419        lastFile=0
420        for i in range(0, int(totalFilesToBeUsed), filesPerJob)[:-1]:
421            parString = "\\{"
422            
423            lastFile=i+filesPerJob
424            params = self.files[0][i: lastFile]
425            for i in range(len(params) - 1):
426                parString += '\\\"' + params[i] + '\\\"\,'
427            
428            parString += '\\\"' + params[len(params) - 1] + '\\\"\\}'
429            list_of_lists.append([parString])
430            pass
392  
393 <        ## last job
394 <        parString = "\\{"
395 <        
396 <        params = self.files[0][lastFile: lastFile+filesLastJob]
397 <        for i in range(len(params) - 1):
398 <            parString += '\\\"' + params[i] + '\\\"\,'
399 <        
400 <        parString += '\\\"' + params[len(params) - 1] + '\\\"\\}'
401 <        list_of_lists.append([parString])
402 <        pass
393 >        # list tracking which jobs are in which jobs belong to which block
394 >        jobsOfBlock = {}
395 >
396 >        # ---- Iterate over the blocks in the dataset until ---- #
397 >        # ---- we've met the requested total # of events    ---- #
398 >        while ( (eventsRemaining > 0) and (blockCount < numBlocksInDataset) and (jobCount < totalNumberOfJobs)):
399 >            block = blocks[blockCount]
400 >            blockCount += 1
401 >            if block not in jobsOfBlock.keys() :
402 >                jobsOfBlock[block] = []
403 >
404 >            if self.eventsbyblock.has_key(block) :
405 >                numEventsInBlock = self.eventsbyblock[block]
406 >                common.logger.debug(5,'Events in Block File '+str(numEventsInBlock))
407 >
408 >                files = self.filesbyblock[block]
409 >                numFilesInBlock = len(files)
410 >                if (numFilesInBlock <= 0):
411 >                    continue
412 >                fileCount = 0
413 >
414 >                # ---- New block => New job ---- #
415 >                parString = ""
416 >                # counter for number of events in files currently worked on
417 >                filesEventCount = 0
418 >                # flag if next while loop should touch new file
419 >                newFile = 1
420 >                # job event counter
421 >                jobSkipEventCount = 0
422 >
423 >                # ---- Iterate over the files in the block until we've met the requested ---- #
424 >                # ---- total # of events or we've gone over all the files in this block  ---- #
425 >                pString=''
426 >                while ( (eventsRemaining > 0) and (fileCount < numFilesInBlock) and (jobCount < totalNumberOfJobs) ):
427 >                    file = files[fileCount]
428 >                    if self.useParent:
429 >                        parent = self.parentFiles[file]
430 >                        for f in parent :
431 >                            pString += '\\\"' + f + '\\\"\,'
432 >                        common.logger.debug(6, "File "+str(file)+" has the following parents: "+str(parent))
433 >                        common.logger.write("File "+str(file)+" has the following parents: "+str(parent))
434 >                    if newFile :
435 >                        try:
436 >                            numEventsInFile = self.eventsbyfile[file]
437 >                            common.logger.debug(6, "File "+str(file)+" has "+str(numEventsInFile)+" events")
438 >                            # increase filesEventCount
439 >                            filesEventCount += numEventsInFile
440 >                            # Add file to current job
441 >                            parString += '\\\"' + file + '\\\"\,'
442 >                            newFile = 0
443 >                        except KeyError:
444 >                            common.logger.message("File "+str(file)+" has unknown number of events: skipping")
445 >
446 >                    eventsPerJobRequested = min(eventsPerJobRequested, eventsRemaining)
447 >                    # if less events in file remain than eventsPerJobRequested
448 >                    if ( filesEventCount - jobSkipEventCount < eventsPerJobRequested):
449 >                        # if last file in block
450 >                        if ( fileCount == numFilesInBlock-1 ) :
451 >                            # end job using last file, use remaining events in block
452 >                            # close job and touch new file
453 >                            fullString = parString[:-2]
454 >                            if self.useParent:
455 >                                fullParentString = pString[:-2]
456 >                                list_of_lists.append([fullString,fullParentString,str(-1),str(jobSkipEventCount)])
457 >                            else:
458 >                                list_of_lists.append([fullString,str(-1),str(jobSkipEventCount)])
459 >                            common.logger.debug(3,"Job "+str(jobCount+1)+" can run over "+str(filesEventCount - jobSkipEventCount)+" events (last file in block).")
460 >                            self.jobDestination.append(blockSites[block])
461 >                            common.logger.debug(5,"Job "+str(jobCount+1)+" Destination: "+str(self.jobDestination[jobCount]))
462 >                            # fill jobs of block dictionary
463 >                            jobsOfBlock[block].append(jobCount+1)
464 >                            # reset counter
465 >                            jobCount = jobCount + 1
466 >                            totalEventCount = totalEventCount + filesEventCount - jobSkipEventCount
467 >                            eventsRemaining = eventsRemaining - filesEventCount + jobSkipEventCount
468 >                            jobSkipEventCount = 0
469 >                            # reset file
470 >                            pString = ""
471 >                            parString = ""
472 >                            filesEventCount = 0
473 >                            newFile = 1
474 >                            fileCount += 1
475 >                        else :
476 >                            # go to next file
477 >                            newFile = 1
478 >                            fileCount += 1
479 >                    # if events in file equal to eventsPerJobRequested
480 >                    elif ( filesEventCount - jobSkipEventCount == eventsPerJobRequested ) :
481 >                        # close job and touch new file
482 >                        fullString = parString[:-2]
483 >                        if self.useParent:
484 >                            fullParentString = pString[:-2]
485 >                            list_of_lists.append([fullString,fullParentString,str(eventsPerJobRequested),str(jobSkipEventCount)])
486 >                        else:
487 >                            list_of_lists.append([fullString,str(eventsPerJobRequested),str(jobSkipEventCount)])
488 >                        common.logger.debug(3,"Job "+str(jobCount+1)+" can run over "+str(eventsPerJobRequested)+" events.")
489 >                        self.jobDestination.append(blockSites[block])
490 >                        common.logger.debug(5,"Job "+str(jobCount+1)+" Destination: "+str(self.jobDestination[jobCount]))
491 >                        jobsOfBlock[block].append(jobCount+1)
492 >                        # reset counter
493 >                        jobCount = jobCount + 1
494 >                        totalEventCount = totalEventCount + eventsPerJobRequested
495 >                        eventsRemaining = eventsRemaining - eventsPerJobRequested
496 >                        jobSkipEventCount = 0
497 >                        # reset file
498 >                        pString = ""
499 >                        parString = ""
500 >                        filesEventCount = 0
501 >                        newFile = 1
502 >                        fileCount += 1
503 >
504 >                    # if more events in file remain than eventsPerJobRequested
505 >                    else :
506 >                        # close job but don't touch new file
507 >                        fullString = parString[:-2]
508 >                        if self.useParent:
509 >                            fullParentString = pString[:-2]
510 >                            list_of_lists.append([fullString,fullParentString,str(eventsPerJobRequested),str(jobSkipEventCount)])
511 >                        else:
512 >                            list_of_lists.append([fullString,str(eventsPerJobRequested),str(jobSkipEventCount)])
513 >                        common.logger.debug(3,"Job "+str(jobCount+1)+" can run over "+str(eventsPerJobRequested)+" events.")
514 >                        self.jobDestination.append(blockSites[block])
515 >                        common.logger.debug(5,"Job "+str(jobCount+1)+" Destination: "+str(self.jobDestination[jobCount]))
516 >                        jobsOfBlock[block].append(jobCount+1)
517 >                        # increase counter
518 >                        jobCount = jobCount + 1
519 >                        totalEventCount = totalEventCount + eventsPerJobRequested
520 >                        eventsRemaining = eventsRemaining - eventsPerJobRequested
521 >                        # calculate skip events for last file
522 >                        # use filesEventCount (contains several files), jobSkipEventCount and eventsPerJobRequest
523 >                        jobSkipEventCount = eventsPerJobRequested - (filesEventCount - jobSkipEventCount - self.eventsbyfile[file])
524 >                        # remove all but the last file
525 >                        filesEventCount = self.eventsbyfile[file]
526 >                        if self.useParent:
527 >                            for f in parent : pString += '\\\"' + f + '\\\"\,'
528 >                        parString = '\\\"' + file + '\\\"\,'
529 >                    pass # END if
530 >                pass # END while (iterate over files in the block)
531 >        pass # END while (iterate over blocks in the dataset)
532 >        self.ncjobs = self.total_number_of_jobs = jobCount
533 >        if (eventsRemaining > 0 and jobCount < totalNumberOfJobs ):
534 >            common.logger.message("Could not run on all requested events because some blocks not hosted at allowed sites.")
535 >        common.logger.message(str(jobCount)+" job(s) can run on "+str(totalEventCount)+" events.\n")
536 >
537 >        # screen output
538 >        screenOutput = "List of jobs and available destination sites:\n\n"
539 >
540 >        # keep trace of block with no sites to print a warning at the end
541 >        noSiteBlock = []
542 >        bloskNoSite = []
543 >
544 >        blockCounter = 0
545 >        for block in blocks:
546 >            if block in jobsOfBlock.keys() :
547 >                blockCounter += 1
548 >                screenOutput += "Block %5i: jobs %20s: sites: %s\n" % (blockCounter,spanRanges(jobsOfBlock[block]),
549 >                    ','.join(self.blackWhiteListParser.checkWhiteList(self.blackWhiteListParser.checkBlackList(blockSites[block],block),block)))
550 >                if len(self.blackWhiteListParser.checkWhiteList(self.blackWhiteListParser.checkBlackList(blockSites[block],block),block)) == 0:
551 >                    noSiteBlock.append( spanRanges(jobsOfBlock[block]) )
552 >                    bloskNoSite.append( blockCounter )
553 >
554 >        common.logger.message(screenOutput)
555 >        if len(noSiteBlock) > 0 and len(bloskNoSite) > 0:
556 >            msg = 'WARNING: No sites are hosting any part of data for block:\n                '
557 >            virgola = ""
558 >            if len(bloskNoSite) > 1:
559 >                virgola = ","
560 >            for block in bloskNoSite:
561 >                msg += ' ' + str(block) + virgola
562 >            msg += '\n               Related jobs:\n                 '
563 >            virgola = ""
564 >            if len(noSiteBlock) > 1:
565 >                virgola = ","
566 >            for range_jobs in noSiteBlock:
567 >                msg += str(range_jobs) + virgola
568 >            msg += '\n               will not be submitted and this block of data can not be analyzed!\n'
569 >            if self.cfg_params.has_key('EDG.se_white_list'):
570 >                msg += 'WARNING: SE White List: '+self.cfg_params['EDG.se_white_list']+'\n'
571 >                msg += '(Hint: By whitelisting you force the job to run at this particular site(s).\n'
572 >                msg += 'Please check if the dataset is available at this site!)\n'
573 >            if self.cfg_params.has_key('EDG.ce_white_list'):
574 >                msg += 'WARNING: CE White List: '+self.cfg_params['EDG.ce_white_list']+'\n'
575 >                msg += '(Hint: By whitelisting you force the job to run at this particular site(s).\n'
576 >                msg += 'Please check if the dataset is available at this site!)\n'
577 >
578 >            common.logger.message(msg)
579  
580          self.list_of_args = list_of_lists
444        # print self.list_of_args[0]
581          return
582  
583      def jobSplittingNoInput(self):
# Line 449 | Line 585 | class Cmssw(JobType):
585          Perform job splitting based on number of event per job
586          """
587          common.logger.debug(5,'Splitting per events')
588 <        common.logger.message('Required '+str(self.eventsPerJob)+' events per job ')
589 <        common.logger.message('Required '+str(self.theNumberOfJobs)+' jobs in total ')
590 <        common.logger.message('Required '+str(self.total_number_of_events)+' events in total ')
588 >
589 >        if (self.selectEventsPerJob):
590 >            common.logger.message('Required '+str(self.eventsPerJob)+' events per job ')
591 >        if (self.selectNumberOfJobs):
592 >            common.logger.message('Required '+str(self.theNumberOfJobs)+' jobs in total ')
593 >        if (self.selectTotalNumberEvents):
594 >            common.logger.message('Required '+str(self.total_number_of_events)+' events in total ')
595  
596          if (self.total_number_of_events < 0):
597              msg='Cannot split jobs per Events with "-1" as total number of events'
598              raise CrabException(msg)
599  
600          if (self.selectEventsPerJob):
601 <            self.total_number_of_jobs = int(self.total_number_of_events/self.eventsPerJob)
601 >            if (self.selectTotalNumberEvents):
602 >                self.total_number_of_jobs = int(self.total_number_of_events/self.eventsPerJob)
603 >            elif(self.selectNumberOfJobs) :
604 >                self.total_number_of_jobs =self.theNumberOfJobs
605 >                self.total_number_of_events =int(self.theNumberOfJobs*self.eventsPerJob)
606 >
607          elif (self.selectNumberOfJobs) :
608              self.total_number_of_jobs = self.theNumberOfJobs
609              self.eventsPerJob = int(self.total_number_of_events/self.total_number_of_jobs)
# Line 470 | Line 615 | class Cmssw(JobType):
615  
616          common.logger.debug(5,'Check  '+str(check))
617  
618 <        common.logger.message(str(self.total_number_of_jobs)+' jobs will be created, each for '+str(self.eventsPerJob)+' for a total of '+str(self.total_number_of_jobs*self.eventsPerJob)+' events')
618 >        common.logger.message(str(self.total_number_of_jobs)+' jobs can be created, each for '+str(self.eventsPerJob)+' for a total of '+str(self.total_number_of_jobs*self.eventsPerJob)+' events')
619          if check > 0:
620 <            common.logger.message('Warning: asked '+str(self.total_number_of_events)+' but will do only '+str(int(self.total_number_of_jobs)*self.eventsPerJob))
476 <
620 >            common.logger.message('Warning: asked '+str(self.total_number_of_events)+' but can do only '+str(int(self.total_number_of_jobs)*self.eventsPerJob))
621  
622          # argument is seed number.$i
623          self.list_of_args = []
624          for i in range(self.total_number_of_jobs):
625 <            if (self.sourceSeed):
626 <                self.list_of_args.append([(str(self.sourceSeed)+str(i))])
627 <            else:
628 <                self.list_of_args.append([str(i)])
629 <        #print self.list_of_args
625 >            ## Since there is no input, any site is good
626 >            self.jobDestination.append([""]) #must be empty to write correctly the xml
627 >            args=[]
628 >            if (self.firstRun):
629 >                ## pythia first run
630 >                args.append(str(self.firstRun)+str(i))
631 >            self.list_of_args.append(args)
632 >
633 >        return
634 >
635 >
636 >    def jobSplittingForScript(self):
637 >        """
638 >        Perform job splitting based on number of job
639 >        """
640 >        common.logger.debug(5,'Splitting per job')
641 >        common.logger.message('Required '+str(self.theNumberOfJobs)+' jobs in total ')
642  
643 +        self.total_number_of_jobs = self.theNumberOfJobs
644 +
645 +        common.logger.debug(5,'N jobs  '+str(self.total_number_of_jobs))
646 +
647 +        common.logger.message(str(self.total_number_of_jobs)+' jobs can be created')
648 +
649 +        # argument is seed number.$i
650 +        self.list_of_args = []
651 +        for i in range(self.total_number_of_jobs):
652 +            self.jobDestination.append([""])
653 +            self.list_of_args.append([str(i)])
654          return
655  
656 <    def split(self, jobParams):
657 <
491 <        common.jobDB.load()
492 <        #### Fabio
656 >    def split(self, jobParams,firstJobID):
657 >
658          njobs = self.total_number_of_jobs
659          arglist = self.list_of_args
660          # create the empty structure
661          for i in range(njobs):
662              jobParams.append("")
498        
499        for job in range(njobs):
500            jobParams[job] = arglist[job]
501            # print str(arglist[job])
502            # print jobParams[job]
503            common.jobDB.setArguments(job, jobParams[job])
663  
664 <        common.jobDB.save()
664 >        listID=[]
665 >        listField=[]
666 >        for id in range(njobs):
667 >            job = id + int(firstJobID)
668 >            jobParams[id] = arglist[id]
669 >            listID.append(job+1)
670 >            job_ToSave ={}
671 >            concString = ' '
672 >            argu=''
673 >            if len(jobParams[id]):
674 >                argu +=   concString.join(jobParams[id] )
675 >            job_ToSave['arguments']= str(job+1)+' '+argu
676 >            job_ToSave['dlsDestination']= self.jobDestination[id]
677 >            listField.append(job_ToSave)
678 >            msg="Job "+str(job)+" Arguments:   "+str(job+1)+" "+argu+"\n"  \
679 >            +"                     Destination: "+str(self.jobDestination[id])
680 >            common.logger.debug(5,msg)
681 >        common._db.updateJob_(listID,listField)
682 >        self.argsList = (len(jobParams[0])+1)
683 >
684          return
685 <    
508 <    def getJobTypeArguments(self, nj, sched):
509 <        result = ''
510 <        for i in common.jobDB.arguments(nj):
511 <            result=result+str(i)+" "
512 <        return result
513 <  
685 >
686      def numberOfJobs(self):
515        # Fabio
687          return self.total_number_of_jobs
688  
518    def checkBlackList(self, allSites):
519        if len(self.reCEBlackList)==0: return allSites
520        sites = []
521        for site in allSites:
522            common.logger.debug(10,'Site '+site)
523            good=1
524            for re in self.reCEBlackList:
525                if re.search(site):
526                    common.logger.message('CE in black list, skipping site '+site)
527                    good=0
528                pass
529            if good: sites.append(site)
530        if len(sites) == 0:
531            common.logger.debug(3,"No sites found after BlackList")
532        return sites
533
534    def checkWhiteList(self, allSites):
535
536        if len(self.reCEWhiteList)==0: return allSites
537        sites = []
538        for site in allSites:
539            good=0
540            for re in self.reCEWhiteList:
541                if re.search(site):
542                    common.logger.debug(5,'CE in white list, adding site '+site)
543                    good=1
544                if not good: continue
545                sites.append(site)
546        if len(sites) == 0:
547            common.logger.message("No sites found after WhiteList\n")
548        else:
549            common.logger.debug(5,"Selected sites via WhiteList are "+str(sites)+"\n")
550        return sites
551
689      def getTarBall(self, exe):
690          """
691          Return the TarBall with lib and exe
692          """
693 <        
557 <        # if it exist, just return it
558 <        self.tgzNameWithPath = common.work_space.shareDir()+self.tgz_name
693 >        self.tgzNameWithPath = common.work_space.pathForTgz()+'share/'+self.tgz_name
694          if os.path.exists(self.tgzNameWithPath):
695              return self.tgzNameWithPath
696  
# Line 568 | Line 703 | class Cmssw(JobType):
703  
704          # First of all declare the user Scram area
705          swArea = self.scram.getSWArea_()
571        #print "swArea = ", swArea
572        swVersion = self.scram.getSWVersion()
573        #print "swVersion = ", swVersion
706          swReleaseTop = self.scram.getReleaseTop_()
707 <        #print "swReleaseTop = ", swReleaseTop
576 <        
707 >
708          ## check if working area is release top
709          if swReleaseTop == '' or swArea == swReleaseTop:
710 +            common.logger.debug(3,"swArea = "+swArea+" swReleaseTop ="+swReleaseTop)
711              return
712  
713 <        filesToBeTarred = []
714 <        ## First find the executable
715 <        if (self.executable != ''):
716 <            exeWithPath = self.scram.findFile_(executable)
717 < #           print exeWithPath
718 <            if ( not exeWithPath ):
719 <                raise CrabException('User executable '+executable+' not found')
720 <
721 <            ## then check if it's private or not
722 <            if exeWithPath.find(swReleaseTop) == -1:
723 <                # the exe is private, so we must ship
724 <                common.logger.debug(5,"Exe "+exeWithPath+" to be tarred")
725 <                path = swArea+'/'
726 <                exe = string.replace(exeWithPath, path,'')
727 <                filesToBeTarred.append(exe)
728 <                pass
729 <            else:
730 <                # the exe is from release, we'll find it on WN
713 >        import tarfile
714 >        try: # create tar ball
715 >            tar = tarfile.open(self.tgzNameWithPath, "w:gz")
716 >            ## First find the executable
717 >            if (self.executable != ''):
718 >                exeWithPath = self.scram.findFile_(executable)
719 >                if ( not exeWithPath ):
720 >                    raise CrabException('User executable '+executable+' not found')
721 >
722 >                ## then check if it's private or not
723 >                if exeWithPath.find(swReleaseTop) == -1:
724 >                    # the exe is private, so we must ship
725 >                    common.logger.debug(5,"Exe "+exeWithPath+" to be tarred")
726 >                    path = swArea+'/'
727 >                    # distinguish case when script is in user project area or given by full path somewhere else
728 >                    if exeWithPath.find(path) >= 0 :
729 >                        exe = string.replace(exeWithPath, path,'')
730 >                        tar.add(path+exe,exe)
731 >                    else :
732 >                        tar.add(exeWithPath,os.path.basename(executable))
733 >                    pass
734 >                else:
735 >                    # the exe is from release, we'll find it on WN
736 >                    pass
737 >
738 >            ## Now get the libraries: only those in local working area
739 >            libDir = 'lib'
740 >            lib = swArea+'/' +libDir
741 >            common.logger.debug(5,"lib "+lib+" to be tarred")
742 >            if os.path.exists(lib):
743 >                tar.add(lib,libDir)
744 >
745 >            ## Now check if module dir is present
746 >            moduleDir = 'module'
747 >            module = swArea + '/' + moduleDir
748 >            if os.path.isdir(module):
749 >                tar.add(module,moduleDir)
750 >
751 >            ## Now check if any data dir(s) is present
752 >            self.dataExist = False
753 >            todo_list = [(i, i) for i in  os.listdir(swArea+"/src")]
754 >            while len(todo_list):
755 >                entry, name = todo_list.pop()
756 >                if name.startswith('crab_0_') or  name.startswith('.') or name == 'CVS':
757 >                    continue
758 >                if os.path.isdir(swArea+"/src/"+entry):
759 >                    entryPath = entry + '/'
760 >                    todo_list += [(entryPath + i, i) for i in  os.listdir(swArea+"/src/"+entry)]
761 >                    if name == 'data':
762 >                        self.dataExist=True
763 >                        common.logger.debug(5,"data "+entry+" to be tarred")
764 >                        tar.add(swArea+"/src/"+entry,"src/"+entry)
765 >                    pass
766                  pass
767 <
768 <        ## Now get the libraries: only those in local working area
769 <        libDir = 'lib'
770 <        lib = swArea+'/' +libDir
771 <        common.logger.debug(5,"lib "+lib+" to be tarred")
772 <        if os.path.exists(lib):
773 <            filesToBeTarred.append(libDir)
774 <
775 <        ## Now check if module dir is present
776 <        moduleDir = 'module'
777 <        if os.path.isdir(swArea+'/'+moduleDir):
778 <            filesToBeTarred.append(moduleDir)
779 <
780 <        ## Now check if the Data dir is present
781 <        dataDir = 'src/Data/'
782 <        if os.path.isdir(swArea+'/'+dataDir):
783 <            filesToBeTarred.append(dataDir)
784 <
785 <        ## Create the tar-ball
786 <        if len(filesToBeTarred)>0:
787 <            cwd = os.getcwd()
788 <            os.chdir(swArea)
789 <            tarcmd = 'tar zcvf ' + self.tgzNameWithPath + ' '
790 <            for line in filesToBeTarred:
791 <                tarcmd = tarcmd + line + ' '
792 <            cout = runCommand(tarcmd)
793 <            if not cout:
794 <                raise CrabException('Could not create tar-ball')
795 <            os.chdir(cwd)
796 <        else:
797 <            common.logger.debug(5,"No files to be to be tarred")
798 <        
799 <        return
800 <        
801 <    def wsSetupEnvironment(self, nj):
767 >
768 >            ### CMSSW ParameterSet
769 >            if not self.pset is None:
770 >                cfg_file = common.work_space.jobDir()+self.configFilename()
771 >                tar.add(cfg_file,self.configFilename())
772 >                common.logger.debug(5,"File added to "+self.tgzNameWithPath+" : "+str(tar.getnames()))
773 >
774 >
775 >            ## Add ProdCommon dir to tar
776 >            prodcommonDir = './'
777 >            prodcommonPath = os.environ['CRABDIR'] + '/' + 'external/'
778 >            neededStuff = ['ProdCommon/__init__.py','ProdCommon/FwkJobRep', 'ProdCommon/CMSConfigTools','ProdCommon/Core','ProdCommon/MCPayloads', 'IMProv']
779 >            for file in neededStuff:
780 >                tar.add(prodcommonPath+file,prodcommonDir+file)
781 >            common.logger.debug(5,"Files added to "+self.tgzNameWithPath+" : "+str(tar.getnames()))
782 >
783 >            ##### ML stuff
784 >            ML_file_list=['report.py', 'DashboardAPI.py', 'Logger.py', 'ProcInfo.py', 'apmon.py']
785 >            path=os.environ['CRABDIR'] + '/python/'
786 >            for file in ML_file_list:
787 >                tar.add(path+file,file)
788 >            common.logger.debug(5,"Files added to "+self.tgzNameWithPath+" : "+str(tar.getnames()))
789 >
790 >            ##### Utils
791 >            Utils_file_list=['parseCrabFjr.py','writeCfg.py', 'fillCrabFjr.py']
792 >            for file in Utils_file_list:
793 >                tar.add(path+file,file)
794 >            common.logger.debug(5,"Files added to "+self.tgzNameWithPath+" : "+str(tar.getnames()))
795 >
796 >            ##### AdditionalFiles
797 >            for file in self.additional_inbox_files:
798 >                tar.add(file,string.split(file,'/')[-1])
799 >            common.logger.debug(5,"Files added to "+self.tgzNameWithPath+" : "+str(tar.getnames()))
800 >
801 >            tar.close()
802 >        except IOError:
803 >            raise CrabException('Could not create tar-ball '+self.tgzNameWithPath)
804 >        except tarfile.TarError:
805 >            raise CrabException('Could not create tar-ball '+self.tgzNameWithPath)
806 >
807 >        ## check for tarball size
808 >        tarballinfo = os.stat(self.tgzNameWithPath)
809 >        if ( tarballinfo.st_size > self.MaxTarBallSize*1024*1024 ) :
810 >            raise CrabException('Input sandbox size of ' + str(float(tarballinfo.st_size)/1024.0/1024.0) + ' MB is larger than the allowed ' + str(self.MaxTarBallSize) + ' MB input sandbox limit and not supported by the used GRID submission system. Please make sure that no unnecessary files are in all data directories in your local CMSSW project area as they are automatically packed into the input sandbox.')
811 >
812 >        ## create tar-ball with ML stuff
813 >
814 >    def wsSetupEnvironment(self, nj=0):
815          """
816          Returns part of a job script which prepares
817          the execution environment for the job 'nj'.
818          """
819 +        if (self.CMSSW_major >= 2 and self.CMSSW_minor >= 1) or (self.CMSSW_major >= 3):
820 +            psetName = 'pset.py'
821 +        else:
822 +            psetName = 'pset.cfg'
823          # Prepare JobType-independent part
824 <        txt = ''
825 <  
826 <        ## OLI_Daniele at this level  middleware already known
643 <
644 <        txt += 'if [ $middleware == LCG ]; then \n'
824 >        txt = '\n#Written by cms_cmssw::wsSetupEnvironment\n'
825 >        txt += 'echo ">>> setup environment"\n'
826 >        txt += 'if [ $middleware == LCG ]; then \n'
827          txt += self.wsSetupCMSLCGEnvironment_()
828          txt += 'elif [ $middleware == OSG ]; then\n'
829 <        txt += '    time=`date -u +"%s"`\n'
830 <        txt += '    WORKING_DIR=$OSG_WN_TMP/cms_$time\n'
831 <        txt += '    echo "Creating working directory: $WORKING_DIR"\n'
832 <        txt += '    /bin/mkdir -p $WORKING_DIR\n'
833 <        txt += '    if [ ! -d $WORKING_DIR ] ;then\n'
652 <        txt += '        echo "SET_CMS_ENV 10016 ==> OSG $WORKING_DIR could not be created on WN `hostname`"\n'
653 <        txt += '        echo "JOB_EXIT_STATUS = 10016"\n'
654 <        txt += '        echo "JobExitCode=10016" | tee -a $RUNTIME_AREA/$repo\n'
655 <        txt += '        dumpStatus $RUNTIME_AREA/$repo\n'
656 <        txt += '        rm -f $RUNTIME_AREA/$repo \n'
657 <        txt += '        echo "MonitorJobID=`echo $MonitorJobID`" | tee -a $RUNTIME_AREA/$repo \n'
658 <        txt += '        echo "MonitorID=`echo $MonitorID`" | tee -a $RUNTIME_AREA/$repo\n'
659 <        txt += '        exit 1\n'
829 >        txt += '    WORKING_DIR=`/bin/mktemp  -d $OSG_WN_TMP/cms_XXXXXXXXXXXX`\n'
830 >        txt += '    if [ ! $? == 0 ] ;then\n'
831 >        txt += '        echo "ERROR ==> OSG $WORKING_DIR could not be created on WN `hostname`"\n'
832 >        txt += '        job_exit_code=10016\n'
833 >        txt += '        func_exit\n'
834          txt += '    fi\n'
835 +        txt += '    echo ">>> Created working directory: $WORKING_DIR"\n'
836          txt += '\n'
837          txt += '    echo "Change to working directory: $WORKING_DIR"\n'
838          txt += '    cd $WORKING_DIR\n'
839 <        txt += self.wsSetupCMSOSGEnvironment_()
839 >        txt += '    echo ">>> current directory (WORKING_DIR): $WORKING_DIR"\n'
840 >        txt += self.wsSetupCMSOSGEnvironment_()
841          txt += 'fi\n'
842  
843          # Prepare JobType-specific part
844          scram = self.scram.commandName()
845          txt += '\n\n'
846 <        txt += 'echo "### SPECIFIC JOB SETUP ENVIRONMENT ###"\n'
846 >        txt += 'echo ">>> specific cmssw setup environment:"\n'
847 >        txt += 'echo "CMSSW_VERSION =  '+self.version+'"\n'
848          txt += scram+' project CMSSW '+self.version+'\n'
849          txt += 'status=$?\n'
850          txt += 'if [ $status != 0 ] ; then\n'
851 <        txt += '   echo "SET_EXE_ENV 10034 ==>ERROR CMSSW '+self.version+' not found on `hostname`" \n'
852 <        txt += '   echo "JOB_EXIT_STATUS = 10034"\n'
853 <        txt += '   echo "JobExitCode=10034" | tee -a $RUNTIME_AREA/$repo\n'
677 <        txt += '   dumpStatus $RUNTIME_AREA/$repo\n'
678 <        txt += '   rm -f $RUNTIME_AREA/$repo \n'
679 <        txt += '   echo "MonitorJobID=`echo $MonitorJobID`" | tee -a $RUNTIME_AREA/$repo \n'
680 <        txt += '   echo "MonitorID=`echo $MonitorID`" | tee -a $RUNTIME_AREA/$repo\n'
681 <        ## OLI_Daniele
682 <        txt += '    if [ $middleware == OSG ]; then \n'
683 <        txt += '        echo "Remove working directory: $WORKING_DIR"\n'
684 <        txt += '        cd $RUNTIME_AREA\n'
685 <        txt += '        /bin/rm -rf $WORKING_DIR\n'
686 <        txt += '        if [ -d $WORKING_DIR ] ;then\n'
687 <        txt += '            echo "SET_CMS_ENV 10018 ==> OSG $WORKING_DIR could not be deleted on WN `hostname` after CMSSW CMSSW_0_6_1 not found on `hostname`"\n'
688 <        txt += '            echo "JOB_EXIT_STATUS = 10018"\n'
689 <        txt += '            echo "JobExitCode=10018" | tee -a $RUNTIME_AREA/$repo\n'
690 <        txt += '            dumpStatus $RUNTIME_AREA/$repo\n'
691 <        txt += '            rm -f $RUNTIME_AREA/$repo \n'
692 <        txt += '            echo "MonitorJobID=`echo $MonitorJobID`" | tee -a $RUNTIME_AREA/$repo \n'
693 <        txt += '            echo "MonitorID=`echo $MonitorID`" | tee -a $RUNTIME_AREA/$repo\n'
694 <        txt += '        fi\n'
695 <        txt += '    fi \n'
696 <        txt += '   exit 1 \n'
851 >        txt += '    echo "ERROR ==> CMSSW '+self.version+' not found on `hostname`" \n'
852 >        txt += '    job_exit_code=10034\n'
853 >        txt += '    func_exit\n'
854          txt += 'fi \n'
698        txt += 'echo "CMSSW_VERSION =  '+self.version+'"\n'
855          txt += 'cd '+self.version+'\n'
856 <        ### needed grep for bug in scramv1 ###
856 >        txt += 'SOFTWARE_DIR=`pwd`\n'
857 >        txt += 'echo ">>> current directory (SOFTWARE_DIR): $SOFTWARE_DIR" \n'
858          txt += 'eval `'+scram+' runtime -sh | grep -v SCRAMRT_LSB_JOBNAME`\n'
859 <
859 >        txt += 'if [ $? != 0 ] ; then\n'
860 >        txt += '    echo "ERROR ==> Problem with the command: "\n'
861 >        txt += '    echo "eval \`'+scram+' runtime -sh | grep -v SCRAMRT_LSB_JOBNAME \` at `hostname`"\n'
862 >        txt += '    job_exit_code=10034\n'
863 >        txt += '    func_exit\n'
864 >        txt += 'fi \n'
865          # Handle the arguments:
866          txt += "\n"
867          txt += "## number of arguments (first argument always jobnumber)\n"
868          txt += "\n"
869 <        txt += "narg=$#\n"
708 <        txt += "if [ $narg -lt 2 ]\n"
869 >        txt += "if [ $nargs -lt "+str(self.argsList)+" ]\n"
870          txt += "then\n"
871 <        txt += "    echo 'SET_EXE_ENV 1 ==> ERROR Too few arguments' +$narg+ \n"
872 <        txt += '    echo "JOB_EXIT_STATUS = 50113"\n'
873 <        txt += '    echo "JobExitCode=50113" | tee -a $RUNTIME_AREA/$repo\n'
713 <        txt += '    dumpStatus $RUNTIME_AREA/$repo\n'
714 <        txt += '    rm -f $RUNTIME_AREA/$repo \n'
715 <        txt += '    echo "MonitorJobID=`echo $MonitorJobID`" | tee -a $RUNTIME_AREA/$repo \n'
716 <        txt += '    echo "MonitorID=`echo $MonitorID`" | tee -a $RUNTIME_AREA/$repo\n'
717 <        ## OLI_Daniele
718 <        txt += '    if [ $middleware == OSG ]; then \n'
719 <        txt += '        echo "Remove working directory: $WORKING_DIR"\n'
720 <        txt += '        cd $RUNTIME_AREA\n'
721 <        txt += '        /bin/rm -rf $WORKING_DIR\n'
722 <        txt += '        if [ -d $WORKING_DIR ] ;then\n'
723 <        txt += '            echo "SET_EXE_ENV 50114 ==> OSG $WORKING_DIR could not be deleted on WN `hostname` after Too few arguments for CRAB job wrapper"\n'
724 <        txt += '            echo "JOB_EXIT_STATUS = 50114"\n'
725 <        txt += '            echo "JobExitCode=50114" | tee -a $RUNTIME_AREA/$repo\n'
726 <        txt += '            dumpStatus $RUNTIME_AREA/$repo\n'
727 <        txt += '            rm -f $RUNTIME_AREA/$repo \n'
728 <        txt += '            echo "MonitorJobID=`echo $MonitorJobID`" | tee -a $RUNTIME_AREA/$repo \n'
729 <        txt += '            echo "MonitorID=`echo $MonitorID`" | tee -a $RUNTIME_AREA/$repo\n'
730 <        txt += '        fi\n'
731 <        txt += '    fi \n'
732 <        txt += "    exit 1\n"
871 >        txt += "    echo 'ERROR ==> Too few arguments' +$nargs+ \n"
872 >        txt += '    job_exit_code=50113\n'
873 >        txt += "    func_exit\n"
874          txt += "fi\n"
875          txt += "\n"
876  
877          # Prepare job-specific part
878          job = common.job_list[nj]
879 <        pset = os.path.basename(job.configFilename())
880 <        txt += '\n'
881 <        if (self.datasetPath): # standard job
882 <            txt += 'InputFiles=$2\n'
883 <            txt += 'echo "Inputfiles:<$InputFiles>"\n'
884 <            txt += 'sed "s#{\'INPUT\'}#$InputFiles#" $RUNTIME_AREA/'+pset+' > pset.cfg\n'
885 <        else:  # pythia like job
886 <            if (self.sourceSeed):
887 <                txt += 'Seed=$2\n'
888 <                txt += 'echo "Seed: <$Seed>"\n'
889 <                txt += 'sed "s#INPUT#$Seed#" $RUNTIME_AREA/'+pset+' > pset.cfg\n'
749 <            else:
750 <                txt += '# Copy untouched pset\n'
751 <                txt += 'cp $RUNTIME_AREA/'+pset+' pset.cfg\n'
879 >        if (self.datasetPath):
880 >            txt += '\n'
881 >            txt += 'DatasetPath='+self.datasetPath+'\n'
882 >
883 >            datasetpath_split = self.datasetPath.split("/")
884 >            ### FEDE FOR NEW LFN ###
885 >            self.primaryDataset = datasetpath_split[1]
886 >            ########################
887 >            txt += 'PrimaryDataset='+datasetpath_split[1]+'\n'
888 >            txt += 'DataTier='+datasetpath_split[2]+'\n'
889 >            txt += 'ApplicationFamily=cmsRun\n'
890  
891 +        else:
892 +            txt += 'DatasetPath=MCDataTier\n'
893 +            ### FEDE FOR NEW LFN ###
894 +            self.primaryDataset = 'null'
895 +            ########################
896 +            txt += 'PrimaryDataset=null\n'
897 +            txt += 'DataTier=null\n'
898 +            txt += 'ApplicationFamily=MCDataTier\n'
899 +        if self.pset != None:
900 +            pset = os.path.basename(job.configFilename())
901 +            txt += '\n'
902 +            txt += 'cp  $RUNTIME_AREA/'+pset+' .\n'
903 +            if (self.datasetPath): # standard job
904 +                txt += 'InputFiles=${args[1]}; export InputFiles\n'
905 +                if (self.useParent):
906 +                    txt += 'ParentFiles=${args[2]}; export ParentFiles\n'
907 +                    txt += 'MaxEvents=${args[3]}; export MaxEvents\n'
908 +                    txt += 'SkipEvents=${args[4]}; export SkipEvents\n'
909 +                else:
910 +                    txt += 'MaxEvents=${args[2]}; export MaxEvents\n'
911 +                    txt += 'SkipEvents=${args[3]}; export SkipEvents\n'
912 +                txt += 'echo "Inputfiles:<$InputFiles>"\n'
913 +                if (self.useParent): txt += 'echo "ParentFiles:<$ParentFiles>"\n'
914 +                txt += 'echo "MaxEvents:<$MaxEvents>"\n'
915 +                txt += 'echo "SkipEvents:<$SkipEvents>"\n'
916 +            else:  # pythia like job
917 +                txt += 'PreserveSeeds='  + ','.join(self.preserveSeeds)  + '; export PreserveSeeds\n'
918 +                txt += 'IncrementSeeds=' + ','.join(self.incrementSeeds) + '; export IncrementSeeds\n'
919 +                txt += 'echo "PreserveSeeds: <$PreserveSeeds>"\n'
920 +                txt += 'echo "IncrementSeeds:<$IncrementSeeds>"\n'
921 +                if (self.firstRun):
922 +                    txt += 'FirstRun=${args[1]}; export FirstRun\n'
923 +                    txt += 'echo "FirstRun: <$FirstRun>"\n'
924  
925 <        if len(self.additional_inbox_files) > 0:
755 <            for file in self.additional_inbox_files:
756 <                txt += 'if [ -e $RUNTIME_AREA/'+file+' ] ; then\n'
757 <                txt += '   cp $RUNTIME_AREA/'+file+' .\n'
758 <                txt += '   chmod +x '+file+'\n'
759 <                txt += 'fi\n'
760 <            pass
925 >            txt += 'mv -f ' + pset + ' ' + psetName + '\n'
926  
762        txt += 'echo "### END JOB SETUP ENVIRONMENT ###"\n\n'
927  
928 <        txt += '\n'
929 <        txt += 'echo "***** cat pset.cfg *********"\n'
930 <        txt += 'cat pset.cfg\n'
931 <        txt += 'echo "****** end pset.cfg ********"\n'
932 <        txt += '\n'
933 <        # txt += 'echo "***** cat pset1.cfg *********"\n'
934 <        # txt += 'cat pset1.cfg\n'
935 <        # txt += 'echo "****** end pset1.cfg ********"\n'
928 >        if self.pset != None:
929 >            # FUTURE: Can simply for 2_1_x and higher
930 >            txt += '\n'
931 >            if self.debug_wrapper==True:
932 >                txt += 'echo "***** cat ' + psetName + ' *********"\n'
933 >                txt += 'cat ' + psetName + '\n'
934 >                txt += 'echo "****** end ' + psetName + ' ********"\n'
935 >                txt += '\n'
936 >            if (self.CMSSW_major >= 2 and self.CMSSW_minor >= 1) or (self.CMSSW_major >= 3):
937 >                txt += 'PSETHASH=`edmConfigHash ' + psetName + '` \n'
938 >            else:
939 >                txt += 'PSETHASH=`edmConfigHash < ' + psetName + '` \n'
940 >            txt += 'echo "PSETHASH = $PSETHASH" \n'
941 >            txt += '\n'
942          return txt
943  
944 <    def wsBuildExe(self, nj):
944 >    def wsUntarSoftware(self, nj=0):
945          """
946          Put in the script the commands to build an executable
947          or a library.
948          """
949  
950 <        txt = ""
950 >        txt = '\n#Written by cms_cmssw::wsUntarSoftware\n'
951  
952          if os.path.isfile(self.tgzNameWithPath):
953 <            txt += 'echo "tar xzvf $RUNTIME_AREA/'+os.path.basename(self.tgzNameWithPath)+'"\n'
953 >            txt += 'echo ">>> tar xzvf $RUNTIME_AREA/'+os.path.basename(self.tgzNameWithPath)+' :" \n'
954              txt += 'tar xzvf $RUNTIME_AREA/'+os.path.basename(self.tgzNameWithPath)+'\n'
955 +            if  self.debug_wrapper:
956 +                txt += 'ls -Al \n'
957              txt += 'untar_status=$? \n'
958              txt += 'if [ $untar_status -ne 0 ]; then \n'
959 <            txt += '   echo "SET_EXE 1 ==> ERROR Untarring .tgz file failed"\n'
960 <            txt += '   echo "JOB_EXIT_STATUS = $untar_status" \n'
961 <            txt += '   echo "JobExitCode=$untar_status" | tee -a $RUNTIME_AREA/$repo\n'
790 <            txt += '   if [ $middleware == OSG ]; then \n'
791 <            txt += '       echo "Remove working directory: $WORKING_DIR"\n'
792 <            txt += '       cd $RUNTIME_AREA\n'
793 <            txt += '       /bin/rm -rf $WORKING_DIR\n'
794 <            txt += '       if [ -d $WORKING_DIR ] ;then\n'
795 <            txt += '           echo "SET_EXE 50999 ==> OSG $WORKING_DIR could not be deleted on WN `hostname` after Untarring .tgz file failed"\n'
796 <            txt += '           echo "JOB_EXIT_STATUS = 50999"\n'
797 <            txt += '           echo "JobExitCode=50999" | tee -a $RUNTIME_AREA/$repo\n'
798 <            txt += '           dumpStatus $RUNTIME_AREA/$repo\n'
799 <            txt += '           rm -f $RUNTIME_AREA/$repo \n'
800 <            txt += '           echo "MonitorJobID=`echo $MonitorJobID`" | tee -a $RUNTIME_AREA/$repo \n'
801 <            txt += '           echo "MonitorID=`echo $MonitorID`" | tee -a $RUNTIME_AREA/$repo\n'
802 <            txt += '       fi\n'
803 <            txt += '   fi \n'
804 <            txt += '   \n'
805 <            txt += '   exit 1 \n'
959 >            txt += '   echo "ERROR ==> Untarring .tgz file failed"\n'
960 >            txt += '   job_exit_code=$untar_status\n'
961 >            txt += '   func_exit\n'
962              txt += 'else \n'
963              txt += '   echo "Successful untar" \n'
964              txt += 'fi \n'
965 +            txt += '\n'
966 +            txt += 'echo ">>> Include $RUNTIME_AREA in PYTHONPATH:"\n'
967 +            txt += 'if [ -z "$PYTHONPATH" ]; then\n'
968 +            txt += '   export PYTHONPATH=$RUNTIME_AREA/\n'
969 +            txt += 'else\n'
970 +            txt += '   export PYTHONPATH=$RUNTIME_AREA/:${PYTHONPATH}\n'
971 +            txt += 'echo "PYTHONPATH=$PYTHONPATH"\n'
972 +            txt += 'fi\n'
973 +            txt += '\n'
974 +
975              pass
976 <        
976 >
977          return txt
978  
979 <    def modifySteeringCards(self, nj):
979 >    def wsBuildExe(self, nj=0):
980          """
981 <        modify the card provided by the user,
982 <        writing a new card into share dir
981 >        Put in the script the commands to build an executable
982 >        or a library.
983          """
984 <        
984 >
985 >        txt = '\n#Written by cms_cmssw::wsBuildExe\n'
986 >        txt += 'echo ">>> moving CMSSW software directories in `pwd`" \n'
987 >
988 >        txt += 'rm -r lib/ module/ \n'
989 >        txt += 'mv $RUNTIME_AREA/lib/ . \n'
990 >        txt += 'mv $RUNTIME_AREA/module/ . \n'
991 >        if self.dataExist == True:
992 >            txt += 'rm -r src/ \n'
993 >            txt += 'mv $RUNTIME_AREA/src/ . \n'
994 >        if len(self.additional_inbox_files)>0:
995 >            for file in self.additional_inbox_files:
996 >                txt += 'mv $RUNTIME_AREA/'+os.path.basename(file)+' . \n'
997 >        # txt += 'mv $RUNTIME_AREA/ProdCommon/ . \n'
998 >        # txt += 'mv $RUNTIME_AREA/IMProv/ . \n'
999 >
1000 >        txt += 'echo ">>> Include $RUNTIME_AREA in PYTHONPATH:"\n'
1001 >        txt += 'if [ -z "$PYTHONPATH" ]; then\n'
1002 >        txt += '   export PYTHONPATH=$RUNTIME_AREA/\n'
1003 >        txt += 'else\n'
1004 >        txt += '   export PYTHONPATH=$RUNTIME_AREA/:${PYTHONPATH}\n'
1005 >        txt += 'echo "PYTHONPATH=$PYTHONPATH"\n'
1006 >        txt += 'fi\n'
1007 >        txt += '\n'
1008 >
1009 >        return txt
1010 >
1011 >
1012      def executableName(self):
1013 <        return self.executable
1013 >        if self.scriptExe:
1014 >            return "sh "
1015 >        else:
1016 >            return self.executable
1017  
1018      def executableArgs(self):
1019 <        return " -p pset.cfg"
1019 >        # FUTURE: This function tests the CMSSW version. Can be simplified as we drop support for old versions
1020 >        if self.scriptExe:#CarlosDaniele
1021 >            return   self.scriptExe + " $NJob"
1022 >        else:
1023 >            ex_args = ""
1024 >            # FUTURE: This tests the CMSSW version. Can remove code as versions deprecated
1025 >            # Framework job report
1026 >            if (self.CMSSW_major >= 1 and self.CMSSW_minor >= 5) or (self.CMSSW_major >= 2):
1027 >                ex_args += " -j $RUNTIME_AREA/crab_fjr_$NJob.xml"
1028 >            # Type of config file
1029 >            if self.CMSSW_major >= 2 :
1030 >                ex_args += " -p pset.py"
1031 >            else:
1032 >                ex_args += " -p pset.cfg"
1033 >            return ex_args
1034  
1035      def inputSandbox(self, nj):
1036          """
1037          Returns a list of filenames to be put in JDL input sandbox.
1038          """
1039          inp_box = []
830        # dict added to delete duplicate from input sandbox file list
831        seen = {}
832        ## code
1040          if os.path.isfile(self.tgzNameWithPath):
1041              inp_box.append(self.tgzNameWithPath)
1042 <        ## config
1043 <        inp_box.append(common.job_list[nj].configFilename())
837 <        ## additional input files
838 <        #for file in self.additional_inbox_files:
839 <        #    inp_box.append(common.work_space.cwdDir()+file)
1042 >        wrapper = os.path.basename(str(common._db.queryTask('scriptName')))
1043 >        inp_box.append(common.work_space.pathForTgz() +'job/'+ wrapper)
1044          return inp_box
1045  
1046      def outputSandbox(self, nj):
# Line 845 | Line 1049 | class Cmssw(JobType):
1049          """
1050          out_box = []
1051  
848        stdout=common.job_list[nj].stdout()
849        stderr=common.job_list[nj].stderr()
850
1052          ## User Declared output files
1053 <        for out in self.output_file:
1054 <            n_out = nj + 1
1055 <            out_box.append(self.numberFile_(out,str(n_out)))
1053 >        for out in (self.output_file+self.output_file_sandbox):
1054 >            n_out = nj + 1
1055 >            out_box.append(numberFile(out,str(n_out)))
1056          return out_box
856        return []
1057  
858    def prepareSteeringCards(self):
859        """
860        Make initial modifications of the user's steering card file.
861        """
862        return
1058  
1059      def wsRenameOutput(self, nj):
1060          """
1061          Returns part of a job script which renames the produced files.
1062          """
1063  
1064 <        txt = '\n'
1065 <        txt += '# directory content\n'
1066 <        txt += 'ls \n'
1067 <        file_list = ''
1068 <        for fileWithSuffix in self.output_file:
1069 <            output_file_num = self.numberFile_(fileWithSuffix, '$NJob')
1070 <            file_list=file_list+output_file_num+' '
1064 >        txt = '\n#Written by cms_cmssw::wsRenameOutput\n'
1065 >        txt += 'echo ">>> current directory (SOFTWARE_DIR): $SOFTWARE_DIR" \n'
1066 >        txt += 'echo ">>> current directory content:"\n'
1067 >        if self.debug_wrapper:
1068 >            txt += 'ls -Al\n'
1069 >        txt += '\n'
1070 >
1071 >        for fileWithSuffix in (self.output_file):
1072 >            output_file_num = numberFile(fileWithSuffix, '$NJob')
1073              txt += '\n'
1074              txt += '# check output file\n'
1075 <            txt += 'ls '+fileWithSuffix+'\n'
1076 <            txt += 'ls_result=$?\n'
1077 <            #txt += 'exe_result=$?\n'
1078 <            txt += 'if [ $ls_result -ne 0 ] ; then\n'
1079 <            txt += '   echo "ERROR: Problem with output file"\n'
1080 <            #txt += '   echo "JOB_EXIT_STATUS = $exe_result"\n'
1081 <            #txt += '   echo "JobExitCode=60302" | tee -a $RUNTIME_AREA/$repo\n'
1082 <            #txt += '   dumpStatus $RUNTIME_AREA/$repo\n'
1083 <            ### OLI_DANIELE
1084 <            if common.scheduler.boss_scheduler_name == 'condor_g':
1075 >            txt += 'if [ -e ./'+fileWithSuffix+' ] ; then\n'
1076 >            if (self.copy_data == 1):  # For OSG nodes, file is in $WORKING_DIR, should not be moved to $RUNTIME_AREA
1077 >                txt += '    mv '+fileWithSuffix+' '+output_file_num+'\n'
1078 >                txt += '    ln -s `pwd`/'+output_file_num+' $RUNTIME_AREA/'+fileWithSuffix+'\n'
1079 >            else:
1080 >                txt += '    mv '+fileWithSuffix+' $RUNTIME_AREA/'+output_file_num+'\n'
1081 >                txt += '    ln -s $RUNTIME_AREA/'+output_file_num+' $RUNTIME_AREA/'+fileWithSuffix+'\n'
1082 >            txt += 'else\n'
1083 >            txt += '    job_exit_code=60302\n'
1084 >            txt += '    echo "WARNING: Output file '+fileWithSuffix+' not found"\n'
1085 >            if common.scheduler.name().upper() == 'CONDOR_G':
1086                  txt += '    if [ $middleware == OSG ]; then \n'
1087                  txt += '        echo "prepare dummy output file"\n'
1088                  txt += '        echo "Processing of job output failed" > $RUNTIME_AREA/'+output_file_num+'\n'
1089                  txt += '    fi \n'
892            txt += 'else\n'
893            txt += '   cp '+fileWithSuffix+' $RUNTIME_AREA/'+output_file_num+'\n'
1090              txt += 'fi\n'
1091 <      
1092 <        txt += 'cd $RUNTIME_AREA\n'
1093 <        file_list=file_list[:-1]
1094 <        txt += 'file_list="'+file_list+'"\n'
1095 <        txt += 'cd $RUNTIME_AREA\n'
900 <        ### OLI_DANIELE
901 <        txt += 'if [ $middleware == OSG ]; then\n'  
902 <        txt += '    cd $RUNTIME_AREA\n'
903 <        txt += '    echo "Remove working directory: $WORKING_DIR"\n'
904 <        txt += '    /bin/rm -rf $WORKING_DIR\n'
905 <        txt += '    if [ -d $WORKING_DIR ] ;then\n'
906 <        txt += '        echo "SET_EXE 60999 ==> OSG $WORKING_DIR could not be deleted on WN `hostname` after cleanup of WN"\n'
907 <        txt += '        echo "JOB_EXIT_STATUS = 60999"\n'
908 <        txt += '        echo "JobExitCode=60999" | tee -a $RUNTIME_AREA/$repo\n'
909 <        txt += '        dumpStatus $RUNTIME_AREA/$repo\n'
910 <        txt += '        rm -f $RUNTIME_AREA/$repo \n'
911 <        txt += '        echo "MonitorJobID=`echo $MonitorJobID`" | tee -a $RUNTIME_AREA/$repo \n'
912 <        txt += '        echo "MonitorID=`echo $MonitorID`" | tee -a $RUNTIME_AREA/$repo\n'
913 <        txt += '    fi\n'
914 <        txt += 'fi\n'
1091 >        file_list = []
1092 >        for fileWithSuffix in (self.output_file):
1093 >             file_list.append(numberFile(fileWithSuffix, '$NJob'))
1094 >
1095 >        txt += 'file_list="'+string.join(file_list,' ')+'"\n'
1096          txt += '\n'
1097 +        txt += 'echo ">>> current directory (SOFTWARE_DIR): $SOFTWARE_DIR" \n'
1098 +        txt += 'echo ">>> current directory content:"\n'
1099 +        if self.debug_wrapper:
1100 +            txt += 'ls -Al\n'
1101 +        txt += '\n'
1102 +        txt += 'cd $RUNTIME_AREA\n'
1103 +        txt += 'echo ">>> current directory (RUNTIME_AREA):  $RUNTIME_AREA"\n'
1104          return txt
1105  
1106 <    def numberFile_(self, file, txt):
919 <        """
920 <        append _'txt' before last extension of a file
921 <        """
922 <        p = string.split(file,".")
923 <        # take away last extension
924 <        name = p[0]
925 <        for x in p[1:-1]:
926 <           name=name+"."+x
927 <        # add "_txt"
928 <        if len(p)>1:
929 <          ext = p[len(p)-1]
930 <          #result = name + '_' + str(txt) + "." + ext
931 <          result = name + '_' + txt + "." + ext
932 <        else:
933 <          #result = name + '_' + str(txt)
934 <          result = name + '_' + txt
935 <        
936 <        return result
937 <
938 <    def getRequirements(self):
1106 >    def getRequirements(self, nj=[]):
1107          """
1108 <        return job requirements to add to jdl files
1108 >        return job requirements to add to jdl files
1109          """
1110          req = ''
1111 <        if common.analisys_common_info['sw_version']:
1111 >        if self.version:
1112              req='Member("VO-cms-' + \
1113 <                 common.analisys_common_info['sw_version'] + \
1113 >                 self.version + \
1114                   '", other.GlueHostApplicationSoftwareRunTimeEnvironment)'
1115 <        if common.analisys_common_info['sites']:
1116 <            if len(common.analisys_common_info['sites'])>0:
1117 <                req = req + ' && ('
1118 <                for i in range(len(common.analisys_common_info['sites'])):
1119 <                    req = req + 'other.GlueCEInfoHostName == "' \
1120 <                         + common.analisys_common_info['sites'][i] + '"'
1121 <                    if ( i < (int(len(common.analisys_common_info['sites']) - 1)) ):
1122 <                        req = req + ' || '
1123 <            req = req + ')'
956 <        #print "req = ", req
1115 >        if self.executable_arch:
1116 >            req+=' && Member("VO-cms-' + \
1117 >                 self.executable_arch + \
1118 >                 '", other.GlueHostApplicationSoftwareRunTimeEnvironment)'
1119 >
1120 >        req = req + ' && (other.GlueHostNetworkAdapterOutboundIP)'
1121 >        if ( common.scheduler.name() == "glitecoll" ) or ( common.scheduler.name() == "glite"):
1122 >            req += ' && other.GlueCEStateStatus == "Production" '
1123 >
1124          return req
1125  
1126      def configFilename(self):
1127          """ return the config filename """
1128 <        return self.name()+'.cfg'
1128 >        # FUTURE: Can remove cfg mode for CMSSW >= 2_1_x
1129 >        if (self.CMSSW_major >= 2 and self.CMSSW_minor >= 1) or (self.CMSSW_major >= 3):
1130 >          return self.name()+'.py'
1131 >        else:
1132 >          return self.name()+'.cfg'
1133  
963    ### OLI_DANIELE
1134      def wsSetupCMSOSGEnvironment_(self):
1135          """
1136          Returns part of a job script which is prepares
1137          the execution environment and which is common for all CMS jobs.
1138          """
1139 <        txt = '\n'
1140 <        txt += '   echo "### SETUP CMS OSG  ENVIRONMENT ###"\n'
1141 <        txt += '   if [ -f $GRID3_APP_DIR/cmssoft/cmsset_default.sh ] ;then\n'
1142 <        txt += '      # Use $GRID3_APP_DIR/cmssoft/cmsset_default.sh to setup cms software\n'
1143 <        txt += '       source $GRID3_APP_DIR/cmssoft/cmsset_default.sh '+self.version+'\n'
1144 <        txt += '   elif [ -f $OSG_APP/cmssoft/cmsset_default.sh ] ;then\n'
1145 <        txt += '      # Use $OSG_APP/cmssoft/cmsset_default.sh to setup cms software\n'
1146 <        txt += '       source $OSG_APP/cmssoft/cmsset_default.sh '+self.version+'\n'
1147 <        txt += '   else\n'
1148 <        txt += '       echo "SET_CMS_ENV 10020 ==> ERROR $GRID3_APP_DIR/cmssoft/cmsset_default.sh and $OSG_APP/cmssoft/cmsset_default.sh file not found"\n'
1149 <        txt += '       echo "JOB_EXIT_STATUS = 10020"\n'
1150 <        txt += '       echo "JobExitCode=10020" | tee -a $RUNTIME_AREA/$repo\n'
1151 <        txt += '       dumpStatus $RUNTIME_AREA/$repo\n'
982 <        txt += '       rm -f $RUNTIME_AREA/$repo \n'
983 <        txt += '       echo "MonitorJobID=`echo $MonitorJobID`" | tee -a $RUNTIME_AREA/$repo \n'
984 <        txt += '       echo "MonitorID=`echo $MonitorID`" | tee -a $RUNTIME_AREA/$repo\n'
985 <        txt += '       exit 1\n'
986 <        txt += '\n'
987 <        txt += '       echo "Remove working directory: $WORKING_DIR"\n'
988 <        txt += '       cd $RUNTIME_AREA\n'
989 <        txt += '       /bin/rm -rf $WORKING_DIR\n'
990 <        txt += '       if [ -d $WORKING_DIR ] ;then\n'
991 <        txt += '            echo "SET_CMS_ENV 10017 ==> OSG $WORKING_DIR could not be deleted on WN `hostname` after $GRID3_APP_DIR/cmssoft/cmsset_default.sh and $OSG_APP/cmssoft/cmsset_default.sh file not found"\n'
992 <        txt += '            echo "JOB_EXIT_STATUS = 10017"\n'
993 <        txt += '            echo "JobExitCode=10017" | tee -a $RUNTIME_AREA/$repo\n'
994 <        txt += '            dumpStatus $RUNTIME_AREA/$repo\n'
995 <        txt += '            rm -f $RUNTIME_AREA/$repo \n'
996 <        txt += '            echo "MonitorJobID=`echo $MonitorJobID`" | tee -a $RUNTIME_AREA/$repo \n'
997 <        txt += '            echo "MonitorID=`echo $MonitorID`" | tee -a $RUNTIME_AREA/$repo\n'
998 <        txt += '       fi\n'
999 <        txt += '\n'
1000 <        txt += '       exit 1\n'
1001 <        txt += '   fi\n'
1139 >        txt = '\n#Written by cms_cmssw::wsSetupCMSOSGEnvironment_\n'
1140 >        txt += '    echo ">>> setup CMS OSG environment:"\n'
1141 >        txt += '    echo "set SCRAM ARCH to ' + self.executable_arch + '"\n'
1142 >        txt += '    export SCRAM_ARCH='+self.executable_arch+'\n'
1143 >        txt += '    echo "SCRAM_ARCH = $SCRAM_ARCH"\n'
1144 >        txt += '    if [ -f $OSG_APP/cmssoft/cms/cmsset_default.sh ] ;then\n'
1145 >        txt += '      # Use $OSG_APP/cmssoft/cms/cmsset_default.sh to setup cms software\n'
1146 >        txt += '        source $OSG_APP/cmssoft/cms/cmsset_default.sh '+self.version+'\n'
1147 >        txt += '    else\n'
1148 >        txt += '        echo "ERROR ==> $OSG_APP/cmssoft/cms/cmsset_default.sh file not found"\n'
1149 >        txt += '        job_exit_code=10020\n'
1150 >        txt += '        func_exit\n'
1151 >        txt += '    fi\n'
1152          txt += '\n'
1153 <        txt += '   echo "SET_CMS_ENV 0 ==> setup cms environment ok"\n'
1154 <        txt += '   echo " END SETUP CMS OSG  ENVIRONMENT "\n'
1153 >        txt += '    echo "==> setup cms environment ok"\n'
1154 >        txt += '    echo "SCRAM_ARCH = $SCRAM_ARCH"\n'
1155  
1156          return txt
1157 <
1008 <    ### OLI_DANIELE
1157 >
1158      def wsSetupCMSLCGEnvironment_(self):
1159          """
1160          Returns part of a job script which is prepares
1161          the execution environment and which is common for all CMS jobs.
1162          """
1163 <        txt  = '   \n'
1164 <        txt += '   echo " ### SETUP CMS LCG  ENVIRONMENT ### "\n'
1165 <        txt += '   if [ ! $VO_CMS_SW_DIR ] ;then\n'
1166 <        txt += '       echo "SET_CMS_ENV 10031 ==> ERROR CMS software dir not found on WN `hostname`"\n'
1167 <        txt += '       echo "JOB_EXIT_STATUS = 10031" \n'
1168 <        txt += '       echo "JobExitCode=10031" | tee -a $RUNTIME_AREA/$repo\n'
1169 <        txt += '       dumpStatus $RUNTIME_AREA/$repo\n'
1170 <        txt += '       rm -f $RUNTIME_AREA/$repo \n'
1171 <        txt += '       echo "MonitorJobID=`echo $MonitorJobID`" | tee -a $RUNTIME_AREA/$repo \n'
1172 <        txt += '       echo "MonitorID=`echo $MonitorID`" | tee -a $RUNTIME_AREA/$repo\n'
1173 <        txt += '       exit 1\n'
1174 <        txt += '   else\n'
1175 <        txt += '       echo "Sourcing environment... "\n'
1176 <        txt += '       if [ ! -s $VO_CMS_SW_DIR/cmsset_default.sh ] ;then\n'
1177 <        txt += '           echo "SET_CMS_ENV 10020 ==> ERROR cmsset_default.sh file not found into dir $VO_CMS_SW_DIR"\n'
1178 <        txt += '           echo "JOB_EXIT_STATUS = 10020"\n'
1179 <        txt += '           echo "JobExitCode=10020" | tee -a $RUNTIME_AREA/$repo\n'
1180 <        txt += '           dumpStatus $RUNTIME_AREA/$repo\n'
1181 <        txt += '           rm -f $RUNTIME_AREA/$repo \n'
1182 <        txt += '           echo "MonitorJobID=`echo $MonitorJobID`" | tee -a $RUNTIME_AREA/$repo \n'
1183 <        txt += '           echo "MonitorID=`echo $MonitorID`" | tee -a $RUNTIME_AREA/$repo\n'
1184 <        txt += '           exit 1\n'
1185 <        txt += '       fi\n'
1186 <        txt += '       echo "sourcing $VO_CMS_SW_DIR/cmsset_default.sh"\n'
1187 <        txt += '       source $VO_CMS_SW_DIR/cmsset_default.sh\n'
1188 <        txt += '       result=$?\n'
1189 <        txt += '       if [ $result -ne 0 ]; then\n'
1190 <        txt += '           echo "SET_CMS_ENV 10032 ==> ERROR problem sourcing $VO_CMS_SW_DIR/cmsset_default.sh"\n'
1191 <        txt += '           echo "JOB_EXIT_STATUS = 10032"\n'
1192 <        txt += '           echo "JobExitCode=10032" | tee -a $RUNTIME_AREA/$repo\n'
1193 <        txt += '           dumpStatus $RUNTIME_AREA/$repo\n'
1194 <        txt += '           rm -f $RUNTIME_AREA/$repo \n'
1195 <        txt += '           echo "MonitorJobID=`echo $MonitorJobID`" | tee -a $RUNTIME_AREA/$repo \n'
1196 <        txt += '           echo "MonitorID=`echo $MonitorID`" | tee -a $RUNTIME_AREA/$repo\n'
1197 <        txt += '           exit 1\n'
1198 <        txt += '       fi\n'
1199 <        txt += '   fi\n'
1200 <        txt += '   \n'
1201 <        txt += '   string=`cat /etc/redhat-release`\n'
1202 <        txt += '   echo $string\n'
1203 <        txt += '   if [[ $string = *alhalla* ]]; then\n'
1204 <        txt += '       echo "SCRAM_ARCH= $SCRAM_ARCH"\n'
1205 <        txt += '   elif [[ $string = *Enterprise* ]] || [[ $string = *cientific* ]]; then\n'
1206 <        txt += '       export SCRAM_ARCH=slc3_ia32_gcc323\n'
1207 <        txt += '       echo "SCRAM_ARCH= $SCRAM_ARCH"\n'
1208 <        txt += '   else\n'
1209 <        txt += '       echo "SET_CMS_ENV 10033 ==> ERROR OS unknown, LCG environment not initialized"\n'
1210 <        txt += '       echo "JOB_EXIT_STATUS = 10033"\n'
1211 <        txt += '       echo "JobExitCode=10033" | tee -a $RUNTIME_AREA/$repo\n'
1212 <        txt += '       dumpStatus $RUNTIME_AREA/$repo\n'
1213 <        txt += '       rm -f $RUNTIME_AREA/$repo \n'
1214 <        txt += '       echo "MonitorJobID=`echo $MonitorJobID`" | tee -a $RUNTIME_AREA/$repo \n'
1215 <        txt += '       echo "MonitorID=`echo $MonitorID`" | tee -a $RUNTIME_AREA/$repo\n'
1216 <        txt += '       exit 1\n'
1217 <        txt += '   fi\n'
1218 <        txt += '   echo "SET_CMS_ENV 0 ==> setup cms environment ok"\n'
1219 <        txt += '   echo "### END SETUP CMS LCG ENVIRONMENT ###"\n'
1163 >        txt = '\n#Written by cms_cmssw::wsSetupCMSLCGEnvironment_\n'
1164 >        txt += '    echo ">>> setup CMS LCG environment:"\n'
1165 >        txt += '    echo "set SCRAM ARCH and BUILD_ARCH to ' + self.executable_arch + ' ###"\n'
1166 >        txt += '    export SCRAM_ARCH='+self.executable_arch+'\n'
1167 >        txt += '    export BUILD_ARCH='+self.executable_arch+'\n'
1168 >        txt += '    if [ ! $VO_CMS_SW_DIR ] ;then\n'
1169 >        txt += '        echo "ERROR ==> CMS software dir not found on WN `hostname`"\n'
1170 >        txt += '        job_exit_code=10031\n'
1171 >        txt += '        func_exit\n'
1172 >        txt += '    else\n'
1173 >        txt += '        echo "Sourcing environment... "\n'
1174 >        txt += '        if [ ! -s $VO_CMS_SW_DIR/cmsset_default.sh ] ;then\n'
1175 >        txt += '            echo "ERROR ==> cmsset_default.sh file not found into dir $VO_CMS_SW_DIR"\n'
1176 >        txt += '            job_exit_code=10020\n'
1177 >        txt += '            func_exit\n'
1178 >        txt += '        fi\n'
1179 >        txt += '        echo "sourcing $VO_CMS_SW_DIR/cmsset_default.sh"\n'
1180 >        txt += '        source $VO_CMS_SW_DIR/cmsset_default.sh\n'
1181 >        txt += '        result=$?\n'
1182 >        txt += '        if [ $result -ne 0 ]; then\n'
1183 >        txt += '            echo "ERROR ==> problem sourcing $VO_CMS_SW_DIR/cmsset_default.sh"\n'
1184 >        txt += '            job_exit_code=10032\n'
1185 >        txt += '            func_exit\n'
1186 >        txt += '        fi\n'
1187 >        txt += '    fi\n'
1188 >        txt += '    \n'
1189 >        txt += '    echo "==> setup cms environment ok"\n'
1190 >        return txt
1191 >
1192 >    def modifyReport(self, nj):
1193 >        """
1194 >        insert the part of the script that modifies the FrameworkJob Report
1195 >        """
1196 >        txt = '\n#Written by cms_cmssw::modifyReport\n'
1197 >        publish_data = int(self.cfg_params.get('USER.publish_data',0))
1198 >        if (publish_data == 1):
1199 >            processedDataset = self.cfg_params['USER.publish_data_name']
1200 >            if (self.primaryDataset == 'null'):
1201 >                 self.primaryDataset = processedDataset
1202 >            if (common.scheduler.name().upper() == "CAF" or common.scheduler.name().upper() == "LSF"):
1203 >                ### FEDE FOR NEW LFN ###
1204 >                LFNBaseName = LFNBase(self.primaryDataset, processedDataset, LocalUser=True)
1205 >                self.user = getUserName(LocalUser=True)
1206 >                ########################
1207 >            else :
1208 >                ### FEDE FOR NEW LFN ###
1209 >                LFNBaseName = LFNBase(self.primaryDataset, processedDataset)
1210 >                self.user = getUserName()
1211 >                ########################
1212 >
1213 >            txt += 'if [ $copy_exit_status -eq 0 ]; then\n'
1214 >            ### FEDE FOR NEW LFN ###
1215 >            #txt += '    FOR_LFN=%s_${PSETHASH}/\n'%(LFNBaseName)
1216 >            txt += '    FOR_LFN=%s/${PSETHASH}/\n'%(LFNBaseName)
1217 >            ########################
1218 >            txt += 'else\n'
1219 >            txt += '    FOR_LFN=/copy_problems/ \n'
1220 >            txt += '    SE=""\n'
1221 >            txt += '    SE_PATH=""\n'
1222 >            txt += 'fi\n'
1223 >
1224 >            txt += 'echo ">>> Modify Job Report:" \n'
1225 >            txt += 'chmod a+x $RUNTIME_AREA/ProdCommon/FwkJobRep/ModifyJobReport.py\n'
1226 >            txt += 'ProcessedDataset='+processedDataset+'\n'
1227 >            txt += 'echo "ProcessedDataset = $ProcessedDataset"\n'
1228 >            txt += 'echo "SE = $SE"\n'
1229 >            txt += 'echo "SE_PATH = $SE_PATH"\n'
1230 >            txt += 'echo "FOR_LFN = $FOR_LFN" \n'
1231 >            txt += 'echo "CMSSW_VERSION = $CMSSW_VERSION"\n\n'
1232 >            ### FEDE FOR NEW LFN ###
1233 >            txt += 'echo "$RUNTIME_AREA/ProdCommon/FwkJobRep/ModifyJobReport.py $RUNTIME_AREA/crab_fjr_$NJob.xml $NJob $FOR_LFN $PrimaryDataset $DataTier ' + self.user + '-$ProcessedDataset-$PSETHASH $ApplicationFamily $executable $CMSSW_VERSION $PSETHASH $SE $SE_PATH"\n'
1234 >            txt += '$RUNTIME_AREA/ProdCommon/FwkJobRep/ModifyJobReport.py $RUNTIME_AREA/crab_fjr_$NJob.xml $NJob $FOR_LFN $PrimaryDataset $DataTier ' + self.user + '-$ProcessedDataset-$PSETHASH $ApplicationFamily $executable $CMSSW_VERSION $PSETHASH $SE $SE_PATH\n'
1235 >            ########################
1236 >            txt += 'modifyReport_result=$?\n'
1237 >            txt += 'if [ $modifyReport_result -ne 0 ]; then\n'
1238 >            txt += '    modifyReport_result=70500\n'
1239 >            txt += '    job_exit_code=$modifyReport_result\n'
1240 >            txt += '    echo "ModifyReportResult=$modifyReport_result" | tee -a $RUNTIME_AREA/$repo\n'
1241 >            txt += '    echo "WARNING: Problem with ModifyJobReport"\n'
1242 >            txt += 'else\n'
1243 >            txt += '    mv NewFrameworkJobReport.xml $RUNTIME_AREA/crab_fjr_$NJob.xml\n'
1244 >            txt += 'fi\n'
1245 >        return txt
1246 >
1247 >    def wsParseFJR(self):
1248 >        """
1249 >        Parse the FrameworkJobReport to obtain useful infos
1250 >        """
1251 >        txt = '\n#Written by cms_cmssw::wsParseFJR\n'
1252 >        txt += 'echo ">>> Parse FrameworkJobReport crab_fjr.xml"\n'
1253 >        txt += 'if [ -s $RUNTIME_AREA/crab_fjr_$NJob.xml ]; then\n'
1254 >        txt += '    if [ -s $RUNTIME_AREA/parseCrabFjr.py ]; then\n'
1255 >        txt += '        cmd_out=`python $RUNTIME_AREA/parseCrabFjr.py --input $RUNTIME_AREA/crab_fjr_$NJob.xml --dashboard $MonitorID,$MonitorJobID '+self.debugWrap+'`\n'
1256 >        if self.debug_wrapper :
1257 >            txt += '        echo "Result of parsing the FrameworkJobReport crab_fjr.xml: $cmd_out"\n'
1258 >        txt += '        executable_exit_status=`python $RUNTIME_AREA/parseCrabFjr.py --input $RUNTIME_AREA/crab_fjr_$NJob.xml --exitcode`\n'
1259 >        txt += '        if [ $executable_exit_status -eq 50115 ];then\n'
1260 >        txt += '            echo ">>> crab_fjr.xml contents: "\n'
1261 >        txt += '            cat $RUNTIME_AREA/crab_fjr_$NJob.xml\n'
1262 >        txt += '            echo "Wrong FrameworkJobReport --> does not contain useful info. ExitStatus: $executable_exit_status"\n'
1263 >        txt += '        elif [ $executable_exit_status -eq -999 ];then\n'
1264 >        txt += '            echo "ExitStatus from FrameworkJobReport not available. not available. Using exit code of executable from command line."\n'
1265 >        txt += '        else\n'
1266 >        txt += '            echo "Extracted ExitStatus from FrameworkJobReport parsing output: $executable_exit_status"\n'
1267 >        txt += '        fi\n'
1268 >        txt += '    else\n'
1269 >        txt += '        echo "CRAB python script to parse CRAB FrameworkJobReport crab_fjr.xml is not available, using exit code of executable from command line."\n'
1270 >        txt += '    fi\n'
1271 >          #### Patch to check input data reading for CMSSW16x Hopefully we-ll remove it asap
1272 >
1273 >        txt += '    if [ $executable_exit_status -eq 0 ];then\n'
1274 >        txt += '      echo ">>> Executable succeded  $executable_exit_status"\n'
1275 >        if (self.datasetPath and not (self.dataset_pu or self.useParent)) :
1276 >          # VERIFY PROCESSED DATA
1277 >            txt += '      echo ">>> Verify list of processed files:"\n'
1278 >            txt += '      echo $InputFiles |tr -d \'\\\\\' |tr \',\' \'\\n\'|tr -d \'"\' > input-files.txt\n'
1279 >            txt += '      python $RUNTIME_AREA/parseCrabFjr.py --input $RUNTIME_AREA/crab_fjr_$NJob.xml --lfn > processed-files.txt\n'
1280 >            txt += '      cat input-files.txt  | sort | uniq > tmp.txt\n'
1281 >            txt += '      mv tmp.txt input-files.txt\n'
1282 >            txt += '      echo "cat input-files.txt"\n'
1283 >            txt += '      echo "----------------------"\n'
1284 >            txt += '      cat input-files.txt\n'
1285 >            txt += '      cat processed-files.txt | sort | uniq > tmp.txt\n'
1286 >            txt += '      mv tmp.txt processed-files.txt\n'
1287 >            txt += '      echo "----------------------"\n'
1288 >            txt += '      echo "cat processed-files.txt"\n'
1289 >            txt += '      echo "----------------------"\n'
1290 >            txt += '      cat processed-files.txt\n'
1291 >            txt += '      echo "----------------------"\n'
1292 >            txt += '      diff -q input-files.txt processed-files.txt\n'
1293 >            txt += '      fileverify_status=$?\n'
1294 >            txt += '      if [ $fileverify_status -ne 0 ]; then\n'
1295 >            txt += '         executable_exit_status=30001\n'
1296 >            txt += '         echo "ERROR ==> not all input files processed"\n'
1297 >            txt += '         echo "      ==> list of processed files from crab_fjr.xml differs from list in pset.cfg"\n'
1298 >            txt += '         echo "      ==> diff input-files.txt processed-files.txt"\n'
1299 >            txt += '      fi\n'
1300 >        txt += '    elif [ $executable_exit_status -ne 0 ] || [ $executable_exit_status -ne 50015 ] || [ $executable_exit_status -ne 50017 ];then\n'
1301 >        txt += '      echo ">>> Executable failed  $executable_exit_status"\n'
1302 >        txt += '      func_exit\n'
1303 >        txt += '    fi\n'
1304 >        txt += '\n'
1305 >        txt += 'else\n'
1306 >        txt += '    echo "CRAB FrameworkJobReport crab_fjr.xml is not available, using exit code of executable from command line."\n'
1307 >        txt += 'fi\n'
1308 >        txt += '\n'
1309 >        txt += 'echo "ExeExitCode=$executable_exit_status" | tee -a $RUNTIME_AREA/$repo\n'
1310 >        txt += 'echo "EXECUTABLE_EXIT_STATUS = $executable_exit_status"\n'
1311 >        txt += 'job_exit_code=$executable_exit_status\n'
1312 >
1313          return txt
1314  
1315      def setParam_(self, param, value):
# Line 1076 | Line 1318 | class Cmssw(JobType):
1318      def getParams(self):
1319          return self._params
1320  
1321 <    def setTaskid_(self):
1322 <        self._taskId = self.cfg_params['taskId']
1323 <        
1324 <    def getTaskid(self):
1325 <        return self._taskId
1321 >    def uniquelist(self, old):
1322 >        """
1323 >        remove duplicates from a list
1324 >        """
1325 >        nd={}
1326 >        for e in old:
1327 >            nd[e]=0
1328 >        return nd.keys()
1329 >
1330 >    def outList(self):
1331 >        """
1332 >        check the dimension of the output files
1333 >        """
1334 >        txt = ''
1335 >        txt += 'echo ">>> list of expected files on output sandbox"\n'
1336 >        listOutFiles = []
1337 >        stdout = 'CMSSW_$NJob.stdout'
1338 >        stderr = 'CMSSW_$NJob.stderr'
1339 >        if (self.return_data == 1):
1340 >            for file in (self.output_file+self.output_file_sandbox):
1341 >                listOutFiles.append(numberFile(file, '$NJob'))
1342 >            listOutFiles.append(stdout)
1343 >            listOutFiles.append(stderr)
1344 >        else:
1345 >            for file in (self.output_file_sandbox):
1346 >                listOutFiles.append(numberFile(file, '$NJob'))
1347 >            listOutFiles.append(stdout)
1348 >            listOutFiles.append(stderr)
1349 >        txt += 'echo "output files: '+string.join(listOutFiles,' ')+'"\n'
1350 >        txt += 'filesToCheck="'+string.join(listOutFiles,' ')+'"\n'
1351 >        txt += 'export filesToCheck\n'
1352 >        return txt

Diff Legend

Removed lines
+ Added lines
< Changed lines
> Changed lines