ViewVC Help
View File | Revision Log | Show Annotations | Root Listing
root/cvsroot/COMP/CRAB/python/cms_cmssw.py
(Generate patch)

Comparing COMP/CRAB/python/cms_cmssw.py (file contents):
Revision 1.33 by mkirn, Fri Jul 28 18:19:34 2006 UTC vs.
Revision 1.52 by slacapra, Tue Oct 17 11:54:02 2006 UTC

# Line 6 | Line 6 | import math
6   import common
7   import PsetManipulator  
8  
9 < import DBSInfo_EDM
10 < import DataDiscovery_EDM
11 < import DataLocation_EDM
9 > import DBSInfo
10 > import DataDiscovery
11 > import DataLocation
12   import Scram
13  
14 < import os, string, re
14 > import glob, os, string, re
15  
16   class Cmssw(JobType):
17 <    def __init__(self, cfg_params):
17 >    def __init__(self, cfg_params, ncjobs):
18          JobType.__init__(self, 'CMSSW')
19          common.logger.debug(3,'CMSSW::__init__')
20  
21        self.analisys_common_info = {}
21          # Marco.
22          self._params = {}
23          self.cfg_params = cfg_params
24 +
25 +        # number of jobs requested to be created, limit obj splitting
26 +        self.ncjobs = ncjobs
27 +
28          log = common.logger
29          
30          self.scram = Scram.Scram(cfg_params)
# Line 30 | Line 33 | class Cmssw(JobType):
33          self.scriptExe = ''
34          self.executable = ''
35          self.tgz_name = 'default.tgz'
36 +        self.pset = ''      #scrip use case Da  
37 +        self.datasetPath = '' #scrip use case Da
38  
39 +        # set FJR file name
40 +        self.fjrFileName = 'crab_fjr.xml'
41  
42          self.version = self.scram.getSWVersion()
43          self.setParam_('application', self.version)
37        common.analisys_common_info['sw_version'] = self.version
38        ### FEDE
39        common.analisys_common_info['copy_input_data'] = 0
40        common.analisys_common_info['events_management'] = 1
44  
45          ### collect Data cards
46          try:
# Line 85 | Line 88 | class Cmssw(JobType):
88          try:
89              self.pset = cfg_params['CMSSW.pset']
90              log.debug(6, "Cmssw::Cmssw(): PSet file = "+self.pset)
91 <            if (not os.path.exists(self.pset)):
92 <                raise CrabException("User defined PSet file "+self.pset+" does not exist")
91 >            if self.pset.lower() != 'none' :
92 >                if (not os.path.exists(self.pset)):
93 >                    raise CrabException("User defined PSet file "+self.pset+" does not exist")
94 >            else:
95 >                self.pset = None
96          except KeyError:
97              raise CrabException("PSet file missing. Cannot run cmsRun ")
98  
# Line 94 | Line 100 | class Cmssw(JobType):
100          try:
101              self.output_file = []
102  
103 +            # add fjr report by default
104 +            self.output_file.append(self.fjrFileName)
105 +
106              tmp = cfg_params['CMSSW.output_file']
107              if tmp != '':
108                  tmpOutFiles = string.split(cfg_params['CMSSW.output_file'],',')
# Line 103 | Line 112 | class Cmssw(JobType):
112                      self.output_file.append(tmp)
113                      pass
114              else:
115 <                log.message("No output file defined: only stdout/err will be available")
115 >                log.message("No output file defined: only stdout/err and the CRAB Framework Job Report will be available")
116                  pass
117              pass
118          except KeyError:
119 <            log.message("No output file defined: only stdout/err will be available")
119 >            log.message("No output file defined: only stdout/err and the CRAB Framework Job Report will be available")
120              pass
121  
122          # script_exe file as additional file in inputSandbox
123          try:
124              self.scriptExe = cfg_params['USER.script_exe']
116            self.additional_inbox_files.append(self.scriptExe)
125              if self.scriptExe != '':
126                 if not os.path.isfile(self.scriptExe):
127                    msg ="WARNING. file "+self.scriptExe+" not found"
128                    raise CrabException(msg)
129 +               self.additional_inbox_files.append(string.strip(self.scriptExe))
130          except KeyError:
131 <           pass
132 <                  
131 >            self.scriptExe = ''
132 >        #CarlosDaniele
133 >        if self.datasetPath == None and self.pset == None and self.scriptExe == '' :
134 >           msg ="WARNING. script_exe  not defined"
135 >           raise CrabException(msg)
136 >
137          ## additional input files
138          try:
139              tmpAddFiles = string.split(cfg_params['USER.additional_input_files'],',')
140              for tmp in tmpAddFiles:
141 <                if not os.path.exists(tmp):
142 <                    raise CrabException("Additional input file not found: "+tmp)
143 <                self.additional_inbox_files.append(string.strip(tmp))
141 >                tmp = string.strip(tmp)
142 >                dirname = ''
143 >                if not tmp[0]=="/": dirname = "."
144 >                files = glob.glob(os.path.join(dirname, tmp))
145 >                for file in files:
146 >                    if not os.path.exists(file):
147 >                        raise CrabException("Additional input file not found: "+file)
148 >                    pass
149 >                    self.additional_inbox_files.append(string.strip(file))
150                  pass
151              pass
152 +            common.logger.debug(5,"Additional input files: "+str(self.additional_inbox_files))
153          except KeyError:
154              pass
155  
156          # files per job
157          try:
158 <            self.filesPerJob = int(cfg_params['CMSSW.files_per_jobs']) #Daniele
159 <            self.selectFilesPerJob = 1
158 >            if (cfg_params['CMSSW.files_per_jobs']):
159 >                raise CrabException("files_per_jobs no longer supported.  Quitting.")
160          except KeyError:
161 <            self.filesPerJob = 0
142 <            self.selectFilesPerJob = 0
161 >            pass
162  
163          ## Events per job
164          try:
# Line 157 | Line 176 | class Cmssw(JobType):
176              self.theNumberOfJobs = 0
177              self.selectNumberOfJobs = 0
178  
179 +        try:
180 +            self.total_number_of_events = int(cfg_params['CMSSW.total_number_of_events'])
181 +            self.selectTotalNumberEvents = 1
182 +        except KeyError:
183 +            self.total_number_of_events = 0
184 +            self.selectTotalNumberEvents = 0
185 +
186 +        if self.pset != None: #CarlosDaniele
187 +             if ( (self.selectTotalNumberEvents + self.selectEventsPerJob + self.selectNumberOfJobs) != 2 ):
188 +                 msg = 'Must define exactly two of total_number_of_events, events_per_job, or number_of_jobs.'
189 +                 raise CrabException(msg)
190 +        else:
191 +             if (self.selectNumberOfJobs == 0):
192 +                 msg = 'Must specify  number_of_jobs.'
193 +                 raise CrabException(msg)
194 +
195          ## source seed for pythia
196          try:
197              self.sourceSeed = int(cfg_params['CMSSW.pythia_seed'])
# Line 169 | Line 204 | class Cmssw(JobType):
204          except KeyError:
205              self.sourceSeedVtx = None
206              common.logger.debug(5,"No vertex seed given")
207 <
208 <        if not (self.selectFilesPerJob + self.selectEventsPerJob + self.selectNumberOfJobs == 1 ):
174 <            msg = 'Must define either files_per_jobs or events_per_job or number_of_jobs'
175 <            raise CrabException(msg)
176 <
177 <        try:
178 <            self.total_number_of_events = int(cfg_params['CMSSW.total_number_of_events'])
179 <        except KeyError:
180 <            msg = 'Must define total_number_of_events'
181 <            raise CrabException(msg)
182 <        
183 <        CEBlackList = []
184 <        try:
185 <            tmpBad = string.split(cfg_params['EDG.ce_black_list'],',')
186 <            for tmp in tmpBad:
187 <                tmp=string.strip(tmp)
188 <                CEBlackList.append(tmp)
189 <        except KeyError:
190 <            pass
191 <
192 <        self.reCEBlackList=[]
193 <        for bad in CEBlackList:
194 <            self.reCEBlackList.append(re.compile( bad ))
195 <
196 <        common.logger.debug(5,'CEBlackList: '+str(CEBlackList))
197 <
198 <        CEWhiteList = []
199 <        try:
200 <            tmpGood = string.split(cfg_params['EDG.ce_white_list'],',')
201 <            for tmp in tmpGood:
202 <                tmp=string.strip(tmp)
203 <                CEWhiteList.append(tmp)
204 <        except KeyError:
205 <            pass
206 <
207 <        #print 'CEWhiteList: ',CEWhiteList
208 <        self.reCEWhiteList=[]
209 <        for Good in CEWhiteList:
210 <            self.reCEWhiteList.append(re.compile( Good ))
211 <
212 <        common.logger.debug(5,'CEWhiteList: '+str(CEWhiteList))
213 <
214 <        self.PsetEdit = PsetManipulator.PsetManipulator(self.pset) #Daniele Pset
207 >        if self.pset != None: #CarlosDaniele
208 >            self.PsetEdit = PsetManipulator.PsetManipulator(self.pset) #Daniele Pset
209  
210          #DBSDLS-start
211          ## Initialize the variables that are extracted from DBS/DLS and needed in other places of the code
212          self.maxEvents=0  # max events available   ( --> check the requested nb. of evts in Creator.py)
213          self.DBSPaths={}  # all dbs paths requested ( --> input to the site local discovery script)
214 +        self.jobDestination=[]  # Site destination(s) for each job (list of lists)
215          ## Perform the data location and discovery (based on DBS/DLS)
216          ## SL: Don't if NONE is specified as input (pythia use case)
217 <        common.analisys_common_info['sites']=None
217 >        blockSites = {}
218          if self.datasetPath:
219 <            self.DataDiscoveryAndLocation(cfg_params)
219 >            blockSites = self.DataDiscoveryAndLocation(cfg_params)
220          #DBSDLS-end          
221  
222          self.tgzNameWithPath = self.getTarBall(self.executable)
223      
224          ## Select Splitting
225 <        if self.selectNoInput: self.jobSplittingNoInput()
226 <        elif self.selectFilesPerJob or self.selectEventsPerJob or self.selectNumberOfJobs: self.jobSplittingPerFiles()
227 <        else:
228 <            msg = 'Don\'t know how to split...'
229 <            raise CrabException(msg)
225 >        if self.selectNoInput:
226 >            if self.pset == None: #CarlosDaniele
227 >                self.jobSplittingForScript()
228 >            else:
229 >                self.jobSplittingNoInput()
230 >        else: self.jobSplittingByBlocks(blockSites)
231  
232          # modify Pset
233 <        try:
234 <            if (self.datasetPath): # standard job
235 <                #self.PsetEdit.maxEvent(self.eventsPerJob)
236 <                # always process all events in a file
237 <                self.PsetEdit.maxEvent("-1")
238 <                self.PsetEdit.inputModule("INPUT")
239 <
240 <            else:  # pythia like job
241 <                self.PsetEdit.maxEvent(self.eventsPerJob)
242 <                if (self.sourceSeed) :
243 <                    self.PsetEdit.pythiaSeed("INPUT")
244 <                    if (self.sourceSeedVtx) :
245 <                        self.PsetEdit.pythiaSeedVtx("INPUTVTX")
246 <            self.PsetEdit.psetWriter(self.configFilename())
247 <        except:
248 <            msg='Error while manipuliating ParameterSet: exiting...'
249 <            raise CrabException(msg)
233 >        if self.pset != None: #CarlosDaniele
234 >            try:
235 >                if (self.datasetPath): # standard job
236 >                    # allow to processa a fraction of events in a file
237 >                    self.PsetEdit.inputModule("INPUT")
238 >                    self.PsetEdit.maxEvent("INPUTMAXEVENTS")
239 >                    self.PsetEdit.skipEvent("INPUTSKIPEVENTS")
240 >                else:  # pythia like job
241 >                    self.PsetEdit.maxEvent(self.eventsPerJob)
242 >                    if (self.sourceSeed) :
243 >                        self.PsetEdit.pythiaSeed("INPUT")
244 >                        if (self.sourceSeedVtx) :
245 >                            self.PsetEdit.pythiaSeedVtx("INPUTVTX")
246 >                # add FrameworkJobReport to parameter-set
247 >                self.PsetEdit.addCrabFJR(self.fjrFileName)
248 >                self.PsetEdit.psetWriter(self.configFilename())
249 >            except:
250 >                msg='Error while manipuliating ParameterSet: exiting...'
251 >                raise CrabException(msg)
252  
253      def DataDiscoveryAndLocation(self, cfg_params):
254  
# Line 263 | Line 261 | class Cmssw(JobType):
261          dataTiers = dataTiersList.split(',')
262  
263          ## Contact the DBS
264 +        common.logger.message("Contacting DBS...")
265          try:
266 <            self.pubdata=DataDiscovery_EDM.DataDiscovery_EDM(datasetPath, dataTiers, cfg_params)
266 >            self.pubdata=DataDiscovery.DataDiscovery(datasetPath, dataTiers, cfg_params)
267              self.pubdata.fetchDBSInfo()
268  
269 <        except DataDiscovery_EDM.NotExistingDatasetError, ex :
269 >        except DataDiscovery.NotExistingDatasetError, ex :
270              msg = 'ERROR ***: failed Data Discovery in DBS : %s'%ex.getErrorMessage()
271              raise CrabException(msg)
272  
273 <        except DataDiscovery_EDM.NoDataTierinProvenanceError, ex :
273 >        except DataDiscovery.NoDataTierinProvenanceError, ex :
274              msg = 'ERROR ***: failed Data Discovery in DBS : %s'%ex.getErrorMessage()
275              raise CrabException(msg)
276 <        except DataDiscovery_EDM.DataDiscoveryError, ex:
276 >        except DataDiscovery.DataDiscoveryError, ex:
277              msg = 'ERROR ***: failed Data Discovery in DBS  %s'%ex.getErrorMessage()
278              raise CrabException(msg)
279  
# Line 282 | Line 281 | class Cmssw(JobType):
281          ## self.DBSPaths=self.pubdata.getDBSPaths()
282          common.logger.message("Required data are :"+self.datasetPath)
283  
284 <        filesbyblock=self.pubdata.getFiles()
285 < #        print filesbyblock
286 <        self.AllInputFiles=filesbyblock.values()
287 <        self.files = self.AllInputFiles        
284 >        self.filesbyblock=self.pubdata.getFiles()
285 >        self.eventsbyblock=self.pubdata.getEventsPerBlock()
286 >        self.eventsbyfile=self.pubdata.getEventsPerFile()
287 >        # print str(self.filesbyblock)
288 >        # print 'self.eventsbyfile',len(self.eventsbyfile)
289 >        # print str(self.eventsbyfile)
290  
291          ## get max number of events
291        #common.logger.debug(10,"number of events for primary fileblocks %i"%self.pubdata.getMaxEvents())
292          self.maxEvents=self.pubdata.getMaxEvents() ##  self.maxEvents used in Creator.py
293 <        common.logger.message("\nThe number of available events is %s"%self.maxEvents)
293 >        common.logger.message("The number of available events is %s\n"%self.maxEvents)
294  
295 +        common.logger.message("Contacting DLS...")
296          ## Contact the DLS and build a list of sites hosting the fileblocks
297          try:
298 <            dataloc=DataLocation_EDM.DataLocation_EDM(filesbyblock.keys(),cfg_params)
298 >            dataloc=DataLocation.DataLocation(self.filesbyblock.keys(),cfg_params)
299              dataloc.fetchDLSInfo()
300 <        except DataLocation_EDM.DataLocationError , ex:
300 >        except DataLocation.DataLocationError , ex:
301              msg = 'ERROR ***: failed Data Location in DLS \n %s '%ex.getErrorMessage()
302              raise CrabException(msg)
303          
303        allsites=dataloc.getSites()
304        common.logger.debug(5,"sites are %s"%allsites)
305        sites=self.checkBlackList(allsites)
306        common.logger.debug(5,"sites are (after black list) %s"%sites)
307        sites=self.checkWhiteList(sites)
308        common.logger.debug(5,"sites are (after white list) %s"%sites)
304  
305 <        if len(sites)==0:
306 <            msg = 'No sites hosting all the needed data! Exiting... '
307 <            raise CrabException(msg)
305 >        sites = dataloc.getSites()
306 >        allSites = []
307 >        listSites = sites.values()
308 >        for list in listSites:
309 >            for oneSite in list:
310 >                allSites.append(oneSite)
311 >        allSites = self.uniquelist(allSites)
312  
313 <        common.logger.message("List of Sites ("+str(len(sites))+") hosting the data : "+str(sites))
314 <        common.logger.debug(6, "List of Sites: "+str(sites))
315 <        common.analisys_common_info['sites']=sites    ## used in SchedulerEdg.py in createSchScript
317 <        self.setParam_('TargetCE', ','.join(sites))
318 <        return
313 >        common.logger.message("Sites ("+str(len(allSites))+") hosting part/all of dataset: "+str(allSites))
314 >        common.logger.debug(6, "List of Sites: "+str(allSites))
315 >        return sites
316      
317 <    def jobSplittingPerFiles(self):
321 <        """
322 <        Perform job splitting based on number of files to be accessed per job
317 >    def jobSplittingByBlocks(self, blockSites):
318          """
319 <        common.logger.debug(5,'Splitting per input files')
320 <        common.logger.message('Required '+str(self.total_number_of_events)+' events in total ')
321 <        common.logger.message('Available '+str(self.maxEvents)+' events in total ')
322 <        common.logger.message('Required '+str(self.filesPerJob)+' files per job ')
323 <        common.logger.message('Required '+str(self.theNumberOfJobs)+' jobs in total ')
324 <        common.logger.message('Required '+str(self.eventsPerJob)+' events per job')
325 <
326 <        ## if asked to process all events, do it
327 <        if self.total_number_of_events == -1:
328 <            self.total_number_of_events=self.maxEvents
319 >        Perform job splitting. Jobs run over an integer number of files
320 >        and no more than one block.
321 >        ARGUMENT: blockSites: dictionary with blocks as keys and list of host sites as values
322 >        REQUIRES: self.selectTotalNumberEvents, self.selectEventsPerJob, self.selectNumberofJobs,
323 >                  self.total_number_of_events, self.eventsPerJob, self.theNumberOfJobs,
324 >                  self.maxEvents, self.filesbyblock
325 >        SETS: self.jobDestination - Site destination(s) for each job (a list of lists)
326 >              self.total_number_of_jobs - Total # of jobs
327 >              self.list_of_args - File(s) job will run on (a list of lists)
328 >        """
329 >
330 >        # ---- Handle the possible job splitting configurations ---- #
331 >        if (self.selectTotalNumberEvents):
332 >            totalEventsRequested = self.total_number_of_events
333 >        if (self.selectEventsPerJob):
334 >            eventsPerJobRequested = self.eventsPerJob
335 >            if (self.selectNumberOfJobs):
336 >                totalEventsRequested = self.theNumberOfJobs * self.eventsPerJob
337 >
338 >        # If user requested all the events in the dataset
339 >        if (totalEventsRequested == -1):
340 >            eventsRemaining=self.maxEvents
341 >        # If user requested more events than are in the dataset
342 >        elif (totalEventsRequested > self.maxEvents):
343 >            eventsRemaining = self.maxEvents
344 >            common.logger.message("Requested "+str(self.total_number_of_events)+ " events, but only "+str(self.maxEvents)+" events are available.")
345 >        # If user requested less events than are in the dataset
346          else:
347 <            if self.total_number_of_events>self.maxEvents:
336 <                common.logger.message("Asked "+str(self.total_number_of_events)+" but only "+str(self.maxEvents)+" available.")
337 <                self.total_number_of_events=self.maxEvents
338 <            pass
347 >            eventsRemaining = totalEventsRequested
348  
349 <        ## TODO: SL need to have (from DBS) a detailed list of how many events per each file
350 <        n_tot_files = (len(self.files[0]))
351 <        ## SL: this is wrong if the files have different number of events
343 <        evPerFile = int(self.maxEvents)/n_tot_files
344 <
345 <        common.logger.debug(5,'Events per File '+str(evPerFile))
346 <
347 <        ## compute job splitting parameters: filesPerJob, eventsPerJob and theNumberOfJobs
348 <        if self.selectFilesPerJob:
349 <            ## user define files per event.
350 <            filesPerJob = self.filesPerJob
351 <            eventsPerJob = filesPerJob*evPerFile
352 <            theNumberOfJobs = int(self.total_number_of_events*1./eventsPerJob)
353 <            check = int(self.total_number_of_events) - (theNumberOfJobs*eventsPerJob)
354 <            if check > 0:
355 <                theNumberOfJobs +=1
356 <                filesLastJob = int(check*1./evPerFile+0.5)
357 <                common.logger.message('Warning: last job will be created with '+str(check)+' files')
358 <            else:
359 <                filesLastJob = filesPerJob
349 >        # If user requested more events per job than are in the dataset
350 >        if (self.selectEventsPerJob and eventsPerJobRequested > self.maxEvents):
351 >            eventsPerJobRequested = self.maxEvents
352  
353 <        elif self.selectNumberOfJobs:
354 <            ## User select the number of jobs: last might be bigger to match request of events
363 <            theNumberOfJobs =  self.theNumberOfJobs
364 <
365 <            eventsPerJob = self.total_number_of_events/theNumberOfJobs
366 <            filesPerJob = int(eventsPerJob/evPerFile)
367 <            if (filesPerJob==0) : filesPerJob=1
368 <            check = int(self.total_number_of_events) - (int(theNumberOfJobs)*filesPerJob*evPerFile)
369 <            if not check == 0:
370 <                if check<0:
371 <                    missingFiles = int(check/evPerFile)
372 <                    additionalJobs = int(missingFiles/filesPerJob)
373 <                    #print missingFiles, additionalJobs
374 <                    theNumberOfJobs+=additionalJobs
375 <                    common.logger.message('Warning: will create only '+str(theNumberOfJobs)+' jobs')
376 <                    check = int(self.total_number_of_events) - (int(theNumberOfJobs)*filesPerJob*evPerFile)
377 <                    
378 <                if check >0 :
379 <                    filesLastJob = filesPerJob+int(check*1./evPerFile+0.5)
380 <                    common.logger.message('Warning: last job will be created with '+str(filesLastJob*evPerFile)+' events')
381 <                else:
382 <                    filesLastJob = filesPerJob
383 <            else:
384 <                filesLastJob = filesPerJob
385 <        elif self.selectEventsPerJob:
386 <            # SL case if asked events per job
387 <            ## estimate the number of files per job to match the user requirement
388 <            filesPerJob = int(float(self.eventsPerJob)/float(evPerFile))
389 <            if filesPerJob==0: filesPerJob=1
390 <            common.logger.debug(5,"filesPerJob "+str(filesPerJob))
391 <            if (filesPerJob==0): filesPerJob=1
392 <            eventsPerJob=filesPerJob*evPerFile
393 <            theNumberOfJobs = int(self.total_number_of_events)/int(eventsPerJob)
394 <            check = int(self.total_number_of_events) - (int(theNumberOfJobs)*eventsPerJob)
395 <            if not check == 0:
396 <                missingFiles = int(check/evPerFile)
397 <                additionalJobs = int(missingFiles/filesPerJob)
398 <                if ( additionalJobs>0) : theNumberOfJobs+=additionalJobs
399 <                check = int(self.total_number_of_events) - (int(theNumberOfJobs)*eventsPerJob)
400 <                if not check == 0:
401 <                    if (check <0 ):
402 <                        filesLastJob = filesPerJob+int(check*1./evPerFile-0.5)
403 <                    else:
404 <                        theNumberOfJobs+=1
405 <                        filesLastJob = int(check*1./evPerFile+0.5)
353 >        # For user info at end
354 >        totalEventCount = 0
355  
356 <                    common.logger.message('Warning: last job will be created with '+str(filesLastJob*evPerFile)+' events')
357 <                else:
409 <                    filesLastJob = filesPerJob
410 <            else:
411 <                filesLastJob = filesPerJob
412 <        
413 <        self.total_number_of_jobs = theNumberOfJobs
356 >        if (self.selectTotalNumberEvents and self.selectNumberOfJobs):
357 >            eventsPerJobRequested = int(eventsRemaining/self.theNumberOfJobs)
358  
359 <        totalEventsToBeUsed=theNumberOfJobs*filesPerJob*evPerFile
360 <        if not check == 0:
417 <        #    print (theNumberOfJobs-1)*filesPerJob*evPerFile,filesLastJob*evPerFile
418 <            totalEventsToBeUsed=(theNumberOfJobs-1)*filesPerJob*evPerFile+filesLastJob*evPerFile
359 >        if (self.selectNumberOfJobs):
360 >            common.logger.message("May not create the exact number_of_jobs requested.")
361  
362 <        common.logger.message(str(self.total_number_of_jobs)+' jobs will be created, each for '+str(filesPerJob*evPerFile)+' events, for a total of '+str(totalEventsToBeUsed)+' events')
362 >        if ( self.ncjobs == 'all' ) :
363 >            totalNumberOfJobs = 999999999
364 >        else :
365 >            totalNumberOfJobs = self.ncjobs
366 >            
367  
368 <        totalFilesToBeUsed=filesPerJob*(theNumberOfJobs-1)+filesLastJob
368 >        blocks = blockSites.keys()
369 >        blockCount = 0
370 >        # Backup variable in case self.maxEvents counted events in a non-included block
371 >        numBlocksInDataset = len(blocks)
372  
373 <        ## set job arguments (files)
373 >        jobCount = 0
374          list_of_lists = []
375 <        lastFile=0
376 <        for i in range(0, int(totalFilesToBeUsed), filesPerJob)[:-1]:
377 <            parString = "\\{"
378 <            
379 <            lastFile=i+filesPerJob
380 <            params = self.files[0][i: lastFile]
432 <            for i in range(len(params) - 1):
433 <                parString += '\\\"' + params[i] + '\\\"\,'
375 >
376 >        # ---- Iterate over the blocks in the dataset until ---- #
377 >        # ---- we've met the requested total # of events    ---- #
378 >        while ( (eventsRemaining > 0) and (blockCount < numBlocksInDataset) and (jobCount < totalNumberOfJobs)):
379 >            block = blocks[blockCount]
380 >            blockCount += 1
381              
435            parString += '\\\"' + params[len(params) - 1] + '\\\"\\}'
436            list_of_lists.append([parString])
437            pass
382  
383 <        ## last job
384 <        parString = "\\{"
385 <        
386 <        params = self.files[0][lastFile: lastFile+filesLastJob]
387 <        for i in range(len(params) - 1):
388 <            parString += '\\\"' + params[i] + '\\\"\,'
383 >            numEventsInBlock = self.eventsbyblock[block]
384 >            common.logger.debug(5,'Events in Block File '+str(numEventsInBlock))
385 >            
386 >            files = self.filesbyblock[block]
387 >            numFilesInBlock = len(files)
388 >            if (numFilesInBlock <= 0):
389 >                continue
390 >            fileCount = 0
391 >
392 >            # ---- New block => New job ---- #
393 >            parString = "\\{"
394 >            # counter for number of events in files currently worked on
395 >            filesEventCount = 0
396 >            # flag if next while loop should touch new file
397 >            newFile = 1
398 >            # job event counter
399 >            jobSkipEventCount = 0
400 >            
401 >            # ---- Iterate over the files in the block until we've met the requested ---- #
402 >            # ---- total # of events or we've gone over all the files in this block  ---- #
403 >            while ( (eventsRemaining > 0) and (fileCount < numFilesInBlock) and (jobCount < totalNumberOfJobs) ):
404 >                file = files[fileCount]
405 >                if newFile :
406 >                    try:
407 >                        numEventsInFile = self.eventsbyfile[file]
408 >                        common.logger.debug(6, "File "+str(file)+" has "+str(numEventsInFile)+" events")
409 >                        # increase filesEventCount
410 >                        filesEventCount += numEventsInFile
411 >                        # Add file to current job
412 >                        parString += '\\\"' + file + '\\\"\,'
413 >                        newFile = 0
414 >                    except KeyError:
415 >                        common.logger.message("File "+str(file)+" has unknown number of events: skipping")
416 >                        
417 >
418 >                # if less events in file remain than eventsPerJobRequested
419 >                if ( filesEventCount - jobSkipEventCount < eventsPerJobRequested ) :
420 >                    # if last file in block
421 >                    if ( fileCount == numFilesInBlock-1 ) :
422 >                        # end job using last file, use remaining events in block
423 >                        # close job and touch new file
424 >                        fullString = parString[:-2]
425 >                        fullString += '\\}'
426 >                        list_of_lists.append([fullString,str(-1),str(jobSkipEventCount)])
427 >                        common.logger.debug(3,"Job "+str(jobCount+1)+" can run over "+str(filesEventCount - jobSkipEventCount)+" events (last file in block).")
428 >                        self.jobDestination.append(blockSites[block])
429 >                        common.logger.debug(5,"Job "+str(jobCount+1)+" Destination: "+str(self.jobDestination[jobCount]))
430 >                        # reset counter
431 >                        jobCount = jobCount + 1
432 >                        totalEventCount = totalEventCount + filesEventCount - jobSkipEventCount
433 >                        eventsRemaining = eventsRemaining - filesEventCount + jobSkipEventCount
434 >                        jobSkipEventCount = 0
435 >                        # reset file
436 >                        parString = "\\{"
437 >                        filesEventCount = 0
438 >                        newFile = 1
439 >                        fileCount += 1
440 >                    else :
441 >                        # go to next file
442 >                        newFile = 1
443 >                        fileCount += 1
444 >                # if events in file equal to eventsPerJobRequested
445 >                elif ( filesEventCount - jobSkipEventCount == eventsPerJobRequested ) :
446 >                    # close job and touch new file
447 >                    fullString = parString[:-2]
448 >                    fullString += '\\}'
449 >                    list_of_lists.append([fullString,str(eventsPerJobRequested),str(jobSkipEventCount)])
450 >                    common.logger.debug(3,"Job "+str(jobCount+1)+" can run over "+str(eventsPerJobRequested)+" events.")
451 >                    self.jobDestination.append(blockSites[block])
452 >                    common.logger.debug(5,"Job "+str(jobCount+1)+" Destination: "+str(self.jobDestination[jobCount]))
453 >                    # reset counter
454 >                    jobCount = jobCount + 1
455 >                    totalEventCount = totalEventCount + eventsPerJobRequested
456 >                    eventsRemaining = eventsRemaining - eventsPerJobRequested
457 >                    jobSkipEventCount = 0
458 >                    # reset file
459 >                    parString = "\\{"
460 >                    filesEventCount = 0
461 >                    newFile = 1
462 >                    fileCount += 1
463 >                    
464 >                # if more events in file remain than eventsPerJobRequested
465 >                else :
466 >                    # close job but don't touch new file
467 >                    fullString = parString[:-2]
468 >                    fullString += '\\}'
469 >                    list_of_lists.append([fullString,str(eventsPerJobRequested),str(jobSkipEventCount)])
470 >                    common.logger.debug(3,"Job "+str(jobCount+1)+" can run over "+str(eventsPerJobRequested)+" events.")
471 >                    self.jobDestination.append(blockSites[block])
472 >                    common.logger.debug(5,"Job "+str(jobCount+1)+" Destination: "+str(self.jobDestination[jobCount]))
473 >                    # increase counter
474 >                    jobCount = jobCount + 1
475 >                    totalEventCount = totalEventCount + eventsPerJobRequested
476 >                    eventsRemaining = eventsRemaining - eventsPerJobRequested
477 >                    # calculate skip events for last file
478 >                    # use filesEventCount (contains several files), jobSkipEventCount and eventsPerJobRequest
479 >                    jobSkipEventCount = eventsPerJobRequested - (filesEventCount - jobSkipEventCount - self.eventsbyfile[file])
480 >                    # remove all but the last file
481 >                    filesEventCount = self.eventsbyfile[file]
482 >                    parString = "\\{"
483 >                    parString += '\\\"' + file + '\\\"\,'
484 >                pass # END if
485 >            pass # END while (iterate over files in the block)
486 >        pass # END while (iterate over blocks in the dataset)
487 >        self.ncjobs = self.total_number_of_jobs = jobCount
488 >        if (eventsRemaining > 0 and jobCount < totalNumberOfJobs ):
489 >            common.logger.message("Could not run on all requested events because some blocks not hosted at allowed sites.")
490 >        common.logger.message("\n"+str(jobCount)+" job(s) can run on "+str(totalEventCount)+" events.\n")
491          
446        parString += '\\\"' + params[len(params) - 1] + '\\\"\\}'
447        list_of_lists.append([parString])
448        pass
449
492          self.list_of_args = list_of_lists
451        # print self.list_of_args[0]
493          return
494  
495      def jobSplittingNoInput(self):
# Line 477 | Line 518 | class Cmssw(JobType):
518  
519          common.logger.debug(5,'Check  '+str(check))
520  
521 <        common.logger.message(str(self.total_number_of_jobs)+' jobs will be created, each for '+str(self.eventsPerJob)+' for a total of '+str(self.total_number_of_jobs*self.eventsPerJob)+' events')
521 >        common.logger.message(str(self.total_number_of_jobs)+' jobs can be created, each for '+str(self.eventsPerJob)+' for a total of '+str(self.total_number_of_jobs*self.eventsPerJob)+' events')
522          if check > 0:
523 <            common.logger.message('Warning: asked '+str(self.total_number_of_events)+' but will do only '+str(int(self.total_number_of_jobs)*self.eventsPerJob))
483 <
523 >            common.logger.message('Warning: asked '+str(self.total_number_of_events)+' but can do only '+str(int(self.total_number_of_jobs)*self.eventsPerJob))
524  
525          # argument is seed number.$i
526          self.list_of_args = []
527          for i in range(self.total_number_of_jobs):
528 +            ## Since there is no input, any site is good
529 +           # self.jobDestination.append(["Any"])
530 +            self.jobDestination.append([""]) #must be empty to write correctly the xml
531              if (self.sourceSeed):
532                  if (self.sourceSeedVtx):
533                      ## pythia + vtx random seed
# Line 502 | Line 545 | class Cmssw(JobType):
545  
546          return
547  
548 +
549 +    def jobSplittingForScript(self):#CarlosDaniele
550 +        """
551 +        Perform job splitting based on number of job
552 +        """
553 +        common.logger.debug(5,'Splitting per job')
554 +        common.logger.message('Required '+str(self.theNumberOfJobs)+' jobs in total ')
555 +
556 +        self.total_number_of_jobs = self.theNumberOfJobs
557 +
558 +        common.logger.debug(5,'N jobs  '+str(self.total_number_of_jobs))
559 +
560 +        common.logger.message(str(self.total_number_of_jobs)+' jobs can be created')
561 +
562 +        # argument is seed number.$i
563 +        self.list_of_args = []
564 +        for i in range(self.total_number_of_jobs):
565 +            ## Since there is no input, any site is good
566 +           # self.jobDestination.append(["Any"])
567 +            self.jobDestination.append([""])
568 +            ## no random seed
569 +            self.list_of_args.append([str(i)])
570 +        return
571 +
572      def split(self, jobParams):
573  
574          common.jobDB.load()
# Line 517 | Line 584 | class Cmssw(JobType):
584              # print str(arglist[job])
585              # print jobParams[job]
586              common.jobDB.setArguments(job, jobParams[job])
587 +            common.logger.debug(5,"Job "+str(job)+" Destination: "+str(self.jobDestination[job]))
588 +            common.jobDB.setDestination(job, self.jobDestination[job])
589  
590          common.jobDB.save()
591          return
# Line 531 | Line 600 | class Cmssw(JobType):
600          # Fabio
601          return self.total_number_of_jobs
602  
534    def checkBlackList(self, allSites):
535        if len(self.reCEBlackList)==0: return allSites
536        sites = []
537        for site in allSites:
538            common.logger.debug(10,'Site '+site)
539            good=1
540            for re in self.reCEBlackList:
541                if re.search(site):
542                    common.logger.message('CE in black list, skipping site '+site)
543                    good=0
544                pass
545            if good: sites.append(site)
546        if len(sites) == 0:
547            common.logger.debug(3,"No sites found after BlackList")
548        return sites
549
550    def checkWhiteList(self, allSites):
551
552        if len(self.reCEWhiteList)==0: return allSites
553        sites = []
554        for site in allSites:
555            good=0
556            for re in self.reCEWhiteList:
557                if re.search(site):
558                    common.logger.debug(5,'CE in white list, adding site '+site)
559                    good=1
560                if not good: continue
561                sites.append(site)
562        if len(sites) == 0:
563            common.logger.message("No sites found after WhiteList\n")
564        else:
565            common.logger.debug(5,"Selected sites via WhiteList are "+str(sites)+"\n")
566        return sites
567
603      def getTarBall(self, exe):
604          """
605          Return the TarBall with lib and exe
# Line 630 | Line 665 | class Cmssw(JobType):
665          dataDir = 'src/Data/'
666          if os.path.isdir(swArea+'/'+dataDir):
667              filesToBeTarred.append(dataDir)
668 <
668 >
669 >        ## copy ProdAgent dir to swArea
670 >        cmd = '\cp -rf ' + os.environ['CRABDIR'] + '/ProdAgentApi ' + swArea
671 >        cmd_out = runCommand(cmd)
672 >        if cmd_out != '':
673 >            common.logger.message('ProdAgentApi directory could not be copied to local CMSSW project directory.')
674 >            common.logger.message('No FrameworkJobreport parsing is possible on the WorkerNode.')
675 >
676 >        ## Now check if the Data dir is present
677 >        paDir = 'ProdAgentApi'
678 >        if os.path.isdir(swArea+'/'+paDir):
679 >            filesToBeTarred.append(paDir)
680 >
681          ## Create the tar-ball
682          if len(filesToBeTarred)>0:
683              cwd = os.getcwd()
# Line 660 | Line 707 | class Cmssw(JobType):
707          txt += 'if [ $middleware == LCG ]; then \n'
708          txt += self.wsSetupCMSLCGEnvironment_()
709          txt += 'elif [ $middleware == OSG ]; then\n'
710 <        txt += '    time=`date -u +"%s"`\n'
711 <        txt += '    WORKING_DIR=$OSG_WN_TMP/cms_$time\n'
665 <        txt += '    echo "Creating working directory: $WORKING_DIR"\n'
666 <        txt += '    /bin/mkdir -p $WORKING_DIR\n'
710 >        txt += '    WORKING_DIR=`/bin/mktemp  -d $OSG_WN_TMP/cms_XXXXXXXXXXXX`\n'
711 >        txt += '    echo "Created working directory: $WORKING_DIR"\n'
712          txt += '    if [ ! -d $WORKING_DIR ] ;then\n'
713          txt += '        echo "SET_CMS_ENV 10016 ==> OSG $WORKING_DIR could not be created on WN `hostname`"\n'
714          txt += '        echo "JOB_EXIT_STATUS = 10016"\n'
# Line 751 | Line 796 | class Cmssw(JobType):
796  
797          # Prepare job-specific part
798          job = common.job_list[nj]
799 <        pset = os.path.basename(job.configFilename())
800 <        txt += '\n'
801 <        if (self.datasetPath): # standard job
802 <            #txt += 'InputFiles=$2\n'
803 <            txt += 'InputFiles=${args[1]}\n'
804 <            txt += 'echo "Inputfiles:<$InputFiles>"\n'
805 <            txt += 'sed "s#{\'INPUT\'}#$InputFiles#" $RUNTIME_AREA/'+pset+' > pset.cfg\n'
806 <        else:  # pythia like job
807 <            if (self.sourceSeed):
808 <                txt += 'Seed=$2\n'
809 <                txt += 'echo "Seed: <$Seed>"\n'
810 <                txt += 'sed "s#\<INPUT\>#$Seed#" $RUNTIME_AREA/'+pset+' > tmp.cfg\n'
811 <                if (self.sourceSeedVtx):
812 <                    txt += 'VtxSeed=$3\n'
813 <                    txt += 'echo "VtxSeed: <$VtxSeed>"\n'
814 <                    txt += 'sed "s#INPUTVTX#$VtxSeed#" tmp.cfg > pset.cfg\n'
799 >        if self.pset != None: #CarlosDaniele
800 >            pset = os.path.basename(job.configFilename())
801 >            txt += '\n'
802 >            if (self.datasetPath): # standard job
803 >                #txt += 'InputFiles=$2\n'
804 >                txt += 'InputFiles=${args[1]}\n'
805 >                txt += 'MaxEvents=${args[2]}\n'
806 >                txt += 'SkipEvents=${args[3]}\n'
807 >                txt += 'echo "Inputfiles:<$InputFiles>"\n'
808 >                txt += 'sed "s#{\'INPUT\'}#$InputFiles#" $RUNTIME_AREA/'+pset+' > pset_tmp_1.cfg\n'
809 >                txt += 'echo "MaxEvents:<$MaxEvents>"\n'
810 >                txt += 'sed "s#INPUTMAXEVENTS#$MaxEvents#" pset_tmp_1.cfg > pset_tmp_2.cfg\n'
811 >                txt += 'echo "SkipEvents:<$SkipEvents>"\n'
812 >                txt += 'sed "s#INPUTSKIPEVENTS#$SkipEvents#" pset_tmp_2.cfg > pset.cfg\n'
813 >            else:  # pythia like job
814 >                if (self.sourceSeed):
815 > #                    txt += 'Seed=$2\n'
816 >                    txt += 'Seed=${args[1]}\n'
817 >                    txt += 'echo "Seed: <$Seed>"\n'
818 >                    txt += 'sed "s#\<INPUT\>#$Seed#" $RUNTIME_AREA/'+pset+' > tmp.cfg\n'
819 >                    if (self.sourceSeedVtx):
820 > #                        txt += 'VtxSeed=$3\n'
821 >                        txt += 'VtxSeed=${args[2]}\n'
822 >                        txt += 'echo "VtxSeed: <$VtxSeed>"\n'
823 >                        txt += 'sed "s#INPUTVTX#$VtxSeed#" tmp.cfg > pset.cfg\n'
824 >                    else:
825 >                        txt += 'mv tmp.cfg pset.cfg\n'
826                  else:
827 <                    txt += 'mv tmp.cfg pset.cfg\n'
828 <            else:
773 <                txt += '# Copy untouched pset\n'
774 <                txt += 'cp $RUNTIME_AREA/'+pset+' pset.cfg\n'
827 >                    txt += '# Copy untouched pset\n'
828 >                    txt += 'cp $RUNTIME_AREA/'+pset+' pset.cfg\n'
829  
830  
831          if len(self.additional_inbox_files) > 0:
# Line 783 | Line 837 | class Cmssw(JobType):
837                  txt += 'fi\n'
838              pass
839  
840 <        txt += 'echo "### END JOB SETUP ENVIRONMENT ###"\n\n'
841 <
842 <        txt += '\n'
843 <        txt += 'echo "***** cat pset.cfg *********"\n'
844 <        txt += 'cat pset.cfg\n'
845 <        txt += 'echo "****** end pset.cfg ********"\n'
846 <        txt += '\n'
847 <        # txt += 'echo "***** cat pset1.cfg *********"\n'
848 <        # txt += 'cat pset1.cfg\n'
849 <        # txt += 'echo "****** end pset1.cfg ********"\n'
840 >        if self.pset != None: #CarlosDaniele
841 >            txt += 'echo "### END JOB SETUP ENVIRONMENT ###"\n\n'
842 >        
843 >            txt += '\n'
844 >            txt += 'echo "***** cat pset.cfg *********"\n'
845 >            txt += 'cat pset.cfg\n'
846 >            txt += 'echo "****** end pset.cfg ********"\n'
847 >            txt += '\n'
848 >            # txt += 'echo "***** cat pset1.cfg *********"\n'
849 >            # txt += 'cat pset1.cfg\n'
850 >            # txt += 'echo "****** end pset1.cfg ********"\n'
851          return txt
852  
853      def wsBuildExe(self, nj):
# Line 830 | Line 885 | class Cmssw(JobType):
885              txt += 'else \n'
886              txt += '   echo "Successful untar" \n'
887              txt += 'fi \n'
888 +            txt += '\n'
889 +            txt += 'echo "Include ProdAgentApi in PYTHONPATH"\n'
890 +            txt += 'if [ -z "$PYTHONPATH" ]; then\n'
891 +            txt += '   export PYTHONPATH=ProdAgentApi\n'
892 +            txt += 'else\n'
893 +            txt += '   export PYTHONPATH=ProdAgentApi:${PYTHONPATH}\n'
894 +            txt += 'fi\n'
895 +            txt += '\n'
896 +
897              pass
898          
899          return txt
# Line 841 | Line 905 | class Cmssw(JobType):
905          """
906          
907      def executableName(self):
908 <        return self.executable
908 >        if self.pset == None: #CarlosDaniele
909 >            return "sh "
910 >        else:
911 >            return self.executable
912  
913      def executableArgs(self):
914 <        return " -p pset.cfg"
914 >        if self.pset == None:#CarlosDaniele
915 >            return   self.scriptExe + " $NJob"
916 >        else:
917 >            return " -p pset.cfg"
918  
919      def inputSandbox(self, nj):
920          """
# Line 857 | Line 927 | class Cmssw(JobType):
927          if os.path.isfile(self.tgzNameWithPath):
928              inp_box.append(self.tgzNameWithPath)
929          ## config
930 <        inp_box.append(common.job_list[nj].configFilename())
930 >        if not self.pset is None: #CarlosDaniele
931 >            inp_box.append(common.job_list[nj].configFilename())
932          ## additional input files
933          #for file in self.additional_inbox_files:
934          #    inp_box.append(common.work_space.cwdDir()+file)
# Line 951 | Line 1022 | class Cmssw(JobType):
1022          # add "_txt"
1023          if len(p)>1:
1024            ext = p[len(p)-1]
954          #result = name + '_' + str(txt) + "." + ext
1025            result = name + '_' + txt + "." + ext
1026          else:
957          #result = name + '_' + str(txt)
1027            result = name + '_' + txt
1028          
1029          return result
# Line 964 | Line 1033 | class Cmssw(JobType):
1033          return job requirements to add to jdl files
1034          """
1035          req = ''
1036 <        if common.analisys_common_info['sw_version']:
1036 >        if self.version:
1037              req='Member("VO-cms-' + \
1038 <                 common.analisys_common_info['sw_version'] + \
1038 >                 self.version + \
1039                   '", other.GlueHostApplicationSoftwareRunTimeEnvironment)'
1040 <        if common.analisys_common_info['sites']:
1041 <            if len(common.analisys_common_info['sites'])>0:
1042 <                req = req + ' && ('
974 <                for i in range(len(common.analisys_common_info['sites'])):
975 <                    req = req + 'other.GlueCEInfoHostName == "' \
976 <                         + common.analisys_common_info['sites'][i] + '"'
977 <                    if ( i < (int(len(common.analisys_common_info['sites']) - 1)) ):
978 <                        req = req + ' || '
979 <            req = req + ')'
980 <        #print "req = ", req
1040 >
1041 >        req = req + ' && (other.GlueHostNetworkAdapterOutboundIP)'
1042 >
1043          return req
1044  
1045      def configFilename(self):
# Line 995 | Line 1057 | class Cmssw(JobType):
1057          txt += '   if [ -f $GRID3_APP_DIR/cmssoft/cmsset_default.sh ] ;then\n'
1058          txt += '      # Use $GRID3_APP_DIR/cmssoft/cmsset_default.sh to setup cms software\n'
1059          txt += '       source $GRID3_APP_DIR/cmssoft/cmsset_default.sh '+self.version+'\n'
1060 <        txt += '   elif [ -f $OSG_APP/cmssoft/cmsset_default.sh ] ;then\n'
1061 <        txt += '      # Use $OSG_APP/cmssoft/cmsset_default.sh to setup cms software\n'
1062 <        txt += '       source $OSG_APP/cmssoft/cmsset_default.sh '+self.version+'\n'
1060 >        txt += '   elif [ -f $OSG_APP/cmssoft/cms/cmsset_default.sh ] ;then\n'
1061 >        txt += '      # Use $OSG_APP/cmssoft/cms/cmsset_default.sh to setup cms software\n'
1062 >        txt += '       source $OSG_APP/cmssoft/cms/cmsset_default.sh '+self.version+'\n'
1063          txt += '   else\n'
1064 <        txt += '       echo "SET_CMS_ENV 10020 ==> ERROR $GRID3_APP_DIR/cmssoft/cmsset_default.sh and $OSG_APP/cmssoft/cmsset_default.sh file not found"\n'
1064 >        txt += '       echo "SET_CMS_ENV 10020 ==> ERROR $GRID3_APP_DIR/cmssoft/cmsset_default.sh and $OSG_APP/cmssoft/cms/cmsset_default.sh file not found"\n'
1065          txt += '       echo "JOB_EXIT_STATUS = 10020"\n'
1066          txt += '       echo "JobExitCode=10020" | tee -a $RUNTIME_AREA/$repo\n'
1067          txt += '       dumpStatus $RUNTIME_AREA/$repo\n'
# Line 1012 | Line 1074 | class Cmssw(JobType):
1074          txt += '       cd $RUNTIME_AREA\n'
1075          txt += '       /bin/rm -rf $WORKING_DIR\n'
1076          txt += '       if [ -d $WORKING_DIR ] ;then\n'
1077 <        txt += '            echo "SET_CMS_ENV 10017 ==> OSG $WORKING_DIR could not be deleted on WN `hostname` after $GRID3_APP_DIR/cmssoft/cmsset_default.sh and $OSG_APP/cmssoft/cmsset_default.sh file not found"\n'
1077 >        txt += '            echo "SET_CMS_ENV 10017 ==> OSG $WORKING_DIR could not be deleted on WN `hostname` after $GRID3_APP_DIR/cmssoft/cmsset_default.sh and $OSG_APP/cmssoft/cms/cmsset_default.sh file not found"\n'
1078          txt += '            echo "JOB_EXIT_STATUS = 10017"\n'
1079          txt += '            echo "JobExitCode=10017" | tee -a $RUNTIME_AREA/$repo\n'
1080          txt += '            dumpStatus $RUNTIME_AREA/$repo\n'
# Line 1105 | Line 1167 | class Cmssw(JobType):
1167          
1168      def getTaskid(self):
1169          return self._taskId
1170 +
1171 + #######################################################################
1172 +    def uniquelist(self, old):
1173 +        """
1174 +        remove duplicates from a list
1175 +        """
1176 +        nd={}
1177 +        for e in old:
1178 +            nd[e]=0
1179 +        return nd.keys()

Diff Legend

Removed lines
+ Added lines
< Changed lines
> Changed lines