ViewVC Help
View File | Revision Log | Show Annotations | Root Listing
root/cvsroot/COMP/CRAB/python/cms_cmssw.py
(Generate patch)

Comparing COMP/CRAB/python/cms_cmssw.py (file contents):
Revision 1.3 by gutsche, Sun May 28 02:27:52 2006 UTC vs.
Revision 1.195 by slacapra, Wed May 28 15:42:01 2008 UTC

# Line 2 | Line 2 | from JobType import JobType
2   from crab_logger import Logger
3   from crab_exceptions import *
4   from crab_util import *
5 + from BlackWhiteListParser import BlackWhiteListParser
6   import common
6 import PsetManipulator  
7
8 import DBSInfo_EDM
9 #from DataDiscovery_EDM import DataDiscovery_EDM
10 import DataDiscovery_EDM
11 #from DataLocation_EDM import DataLocation_EDM
12 import DataLocation_EDM
7   import Scram
8 + from LFNBaseName import *
9  
10 < import os, string, re
10 > import os, string, glob
11  
12   class Cmssw(JobType):
13 <    def __init__(self, cfg_params):
13 >    def __init__(self, cfg_params, ncjobs):
14          JobType.__init__(self, 'CMSSW')
15          common.logger.debug(3,'CMSSW::__init__')
16  
17 <        self.analisys_common_info = {}
18 <        # Marco.
17 >        self.argsList = []
18 >
19          self._params = {}
20          self.cfg_params = cfg_params
21 +        # init BlackWhiteListParser
22 +        self.blackWhiteListParser = BlackWhiteListParser(cfg_params)
23 +
24 +        self.MaxTarBallSize = float(self.cfg_params.get('EDG.maxtarballsize',9.5))
25 +
26 +        # number of jobs requested to be created, limit obj splitting
27 +        self.ncjobs = ncjobs
28  
29          log = common.logger
30 <        
30 >
31          self.scram = Scram.Scram(cfg_params)
30        scramArea = ''
32          self.additional_inbox_files = []
33          self.scriptExe = ''
34          self.executable = ''
35 +        self.executable_arch = self.scram.getArch()
36          self.tgz_name = 'default.tgz'
37 +        self.scriptName = 'CMSSW.sh'
38 +        self.pset = ''
39 +        self.datasetPath = ''
40  
41 +        # set FJR file name
42 +        self.fjrFileName = 'crab_fjr.xml'
43  
44          self.version = self.scram.getSWVersion()
45 <        common.analisys_common_info['sw_version'] = self.version
46 <        ### FEDE
47 <        common.analisys_common_info['copy_input_data'] = 0
48 <        common.analisys_common_info['events_management'] = 1
45 >        version_array = self.version.split('_')
46 >        self.CMSSW_major = 0
47 >        self.CMSSW_minor = 0
48 >        self.CMSSW_patch = 0
49 >        try:
50 >            self.CMSSW_major = int(version_array[1])
51 >            self.CMSSW_minor = int(version_array[2])
52 >            self.CMSSW_patch = int(version_array[3])
53 >        except:
54 >            msg = "Cannot parse CMSSW version string: " + self.version + " for major and minor release number!"
55 >            raise CrabException(msg)
56  
57          ### collect Data cards
58 <        try:
59 <         #   self.owner = cfg_params['CMSSW.owner']
60 <         #   log.debug(6, "CMSSW::CMSSW(): owner = "+self.owner)
47 <         #   self.dataset = cfg_params['CMSSW.dataset']
48 <            self.datasetPath = cfg_params['CMSSW.datasetpath']
49 <            log.debug(6, "CMSSW::CMSSW(): datasetPath = "+self.datasetPath)
50 <        except KeyError:
51 <        #    msg = "Error: owner and/or dataset not defined "
52 <            msg = "Error: datasetpath not defined "  
58 >
59 >        if not cfg_params.has_key('CMSSW.datasetpath'):
60 >            msg = "Error: datasetpath not defined "
61              raise CrabException(msg)
62 +        tmp =  cfg_params['CMSSW.datasetpath']
63 +        log.debug(6, "CMSSW::CMSSW(): datasetPath = "+tmp)
64 +        if string.lower(tmp)=='none':
65 +            self.datasetPath = None
66 +            self.selectNoInput = 1
67 +        else:
68 +            self.datasetPath = tmp
69 +            self.selectNoInput = 0
70 +
71          self.dataTiers = []
72 < #       try:
73 < #           tmpDataTiers = string.split(cfg_params['CMSSW.data_tier'],',')
57 < #           for tmp in tmpDataTiers:
58 < #               tmp=string.strip(tmp)
59 < #               self.dataTiers.append(tmp)
60 < #               pass
61 < #           pass
62 < #       except KeyError:
63 < #           pass
64 < #       log.debug(6, "Cmssw::Cmssw(): dataTiers = "+str(self.dataTiers))
72 >
73 >        self.debug_pset = cfg_params.get('USER.debug_pset',False)
74  
75          ## now the application
76 <        try:
77 <            self.executable = cfg_params['CMSSW.executable']
69 <            log.debug(6, "CMSSW::CMSSW(): executable = "+self.executable)
70 <            msg = "Default executable cmsRun overridden. Switch to " + self.executable
71 <            log.debug(3,msg)
72 <        except KeyError:
73 <            self.executable = 'cmsRun'
74 <            msg = "User executable not defined. Use cmsRun"
75 <            log.debug(3,msg)
76 <            pass
76 >        self.executable = cfg_params.get('CMSSW.executable','cmsRun')
77 >        log.debug(6, "CMSSW::CMSSW(): executable = "+self.executable)
78  
79 <        try:
80 <            self.pset = cfg_params['CMSSW.pset']
81 <            log.debug(6, "Cmssw::Cmssw(): PSet file = "+self.pset)
79 >        if not cfg_params.has_key('CMSSW.pset'):
80 >            raise CrabException("PSet file missing. Cannot run cmsRun ")
81 >        self.pset = cfg_params['CMSSW.pset']
82 >        log.debug(6, "Cmssw::Cmssw(): PSet file = "+self.pset)
83 >        if self.pset.lower() != 'none' :
84              if (not os.path.exists(self.pset)):
85                  raise CrabException("User defined PSet file "+self.pset+" does not exist")
86 <        except KeyError:
87 <            raise CrabException("PSet file missing. Cannot run cmsRun ")
86 >        else:
87 >            self.pset = None
88  
89          # output files
90 <        try:
91 <            self.output_file = []
90 >        ## stuff which must be returned always via sandbox
91 >        self.output_file_sandbox = []
92  
93 <            tmp = cfg_params['CMSSW.output_file']
94 <            if tmp != '':
92 <                tmpOutFiles = string.split(cfg_params['CMSSW.output_file'],',')
93 <                log.debug(7, 'cmssw::cmssw(): output files '+str(tmpOutFiles))
94 <                for tmp in tmpOutFiles:
95 <                    tmp=string.strip(tmp)
96 <                    self.output_file.append(tmp)
97 <                    pass
98 <            else:
99 <                log.message("No output file defined: only stdout/err will be available")
100 <                pass
101 <            pass
102 <        except KeyError:
103 <            log.message("No output file defined: only stdout/err will be available")
104 <            pass
93 >        # add fjr report by default via sandbox
94 >        self.output_file_sandbox.append(self.fjrFileName)
95  
96 <        # script_exe file as additional file in inputSandbox
97 <        try:
98 <           self.scriptExe = cfg_params['USER.script_exe']
99 <           self.additional_inbox_files.append(self.scriptExe)
100 <        except KeyError:
101 <           pass
102 <        if self.scriptExe != '':
113 <           if os.path.isfile(self.scriptExe):
114 <              pass
115 <           else:
116 <              log.message("WARNING. file "+self.scriptExe+" not found")
117 <              sys.exit()
118 <                  
119 <        ## additional input files
120 <        try:
121 <            tmpAddFiles = string.split(cfg_params['CMSSW.additional_input_files'],',')
122 <            for tmp in tmpAddFiles:
123 <                if not os.path.exists(tmp):
124 <                    raise CrabException("Additional input file not found: "+tmp)
96 >        # other output files to be returned via sandbox or copied to SE
97 >        self.output_file = []
98 >        tmp = cfg_params.get('CMSSW.output_file',None)
99 >        if tmp :
100 >            tmpOutFiles = string.split(tmp,',')
101 >            log.debug(7, 'cmssw::cmssw(): output files '+str(tmpOutFiles))
102 >            for tmp in tmpOutFiles:
103                  tmp=string.strip(tmp)
104 <                self.additional_inbox_files.append(tmp)
104 >                self.output_file.append(tmp)
105                  pass
106 <            pass
107 <        except KeyError:
108 <            pass
106 >        else:
107 >            log.message("No output file defined: only stdout/err and the CRAB Framework Job Report will be available\n")
108 >        pass
109  
110 <        try:
111 <            self.filesPerJob = int(cfg_params['CMSSW.files_per_jobs']) #Daniele
112 <        except KeyError:
113 <            self.filesPerJob = 1
110 >        # script_exe file as additional file in inputSandbox
111 >        self.scriptExe = cfg_params.get('USER.script_exe',None)
112 >        if self.scriptExe :
113 >            if not os.path.isfile(self.scriptExe):
114 >                msg ="ERROR. file "+self.scriptExe+" not found"
115 >                raise CrabException(msg)
116 >            self.additional_inbox_files.append(string.strip(self.scriptExe))
117  
118 <        ## Max event   will be total_number_of_events ???  Daniele
119 <        try:
139 <            self.maxEv = cfg_params['CMSSW.event_per_job']
140 <        except KeyError:
141 <            self.maxEv = "-1"
142 <        ##  
143 <        try:
144 <            self.total_number_of_events = int(cfg_params['CMSSW.total_number_of_events'])
145 <        except KeyError:
146 <            msg = 'Must define total_number_of_events'
118 >        if self.datasetPath == None and self.pset == None and self.scriptExe == '' :
119 >            msg ="Error. script_exe  not defined"
120              raise CrabException(msg)
121 <        
122 <        CEBlackList = []
123 <        try:
124 <            tmpBad = string.split(cfg_params['EDG.ce_black_list'],',')
125 <            for tmp in tmpBad:
126 <                tmp=string.strip(tmp)
127 <                CEBlackList.append(tmp)
128 <        except KeyError:
121 >
122 >        ## additional input files
123 >        if cfg_params.has_key('USER.additional_input_files'):
124 >            tmpAddFiles = string.split(cfg_params['USER.additional_input_files'],',')
125 >            for tmp in tmpAddFiles:
126 >                tmp = string.strip(tmp)
127 >                dirname = ''
128 >                if not tmp[0]=="/": dirname = "."
129 >                files = []
130 >                if string.find(tmp,"*")>-1:
131 >                    files = glob.glob(os.path.join(dirname, tmp))
132 >                    if len(files)==0:
133 >                        raise CrabException("No additional input file found with this pattern: "+tmp)
134 >                else:
135 >                    files.append(tmp)
136 >                for file in files:
137 >                    if not os.path.exists(file):
138 >                        raise CrabException("Additional input file not found: "+file)
139 >                    pass
140 >                    self.additional_inbox_files.append(string.strip(file))
141 >                pass
142              pass
143 +            common.logger.debug(5,"Additional input files: "+str(self.additional_inbox_files))
144 +        pass
145  
146 <        self.reCEBlackList=[]
147 <        for bad in CEBlackList:
148 <            self.reCEBlackList.append(re.compile( bad ))
146 >        ## Events per job
147 >        if cfg_params.has_key('CMSSW.events_per_job'):
148 >            self.eventsPerJob =int( cfg_params['CMSSW.events_per_job'])
149 >            self.selectEventsPerJob = 1
150 >        else:
151 >            self.eventsPerJob = -1
152 >            self.selectEventsPerJob = 0
153  
154 <        common.logger.debug(5,'CEBlackList: '+str(CEBlackList))
154 >        ## number of jobs
155 >        if cfg_params.has_key('CMSSW.number_of_jobs'):
156 >            self.theNumberOfJobs =int( cfg_params['CMSSW.number_of_jobs'])
157 >            self.selectNumberOfJobs = 1
158 >        else:
159 >            self.theNumberOfJobs = 0
160 >            self.selectNumberOfJobs = 0
161  
162 <        CEWhiteList = []
163 <        try:
164 <            tmpGood = string.split(cfg_params['EDG.ce_white_list'],',')
165 <            for tmp in tmpGood:
166 <                tmp=string.strip(tmp)
167 <                CEWhiteList.append(tmp)
168 <        except KeyError:
169 <            pass
162 >        if cfg_params.has_key('CMSSW.total_number_of_events'):
163 >            self.total_number_of_events = int(cfg_params['CMSSW.total_number_of_events'])
164 >            self.selectTotalNumberEvents = 1
165 >            if self.selectNumberOfJobs  == 1:
166 >                if int(self.total_number_of_events) < int(self.theNumberOfJobs):
167 >                    msg = 'Must specify at least one event per job. total_number_of_events > number_of_jobs '
168 >                    raise CrabException(msg)
169 >        else:
170 >            self.total_number_of_events = 0
171 >            self.selectTotalNumberEvents = 0
172  
173 <        #print 'CEWhiteList: ',CEWhiteList
174 <        self.reCEWhiteList=[]
175 <        for Good in CEWhiteList:
176 <            self.reCEWhiteList.append(re.compile( Good ))
173 >        if self.pset != None:
174 >             if ( (self.selectTotalNumberEvents + self.selectEventsPerJob + self.selectNumberOfJobs) != 2 ):
175 >                 msg = 'Must define exactly two of total_number_of_events, events_per_job, or number_of_jobs.'
176 >                 raise CrabException(msg)
177 >        else:
178 >             if (self.selectNumberOfJobs == 0):
179 >                 msg = 'Must specify  number_of_jobs.'
180 >                 raise CrabException(msg)
181 >
182 >        ## New method of dealing with seeds
183 >        self.incrementSeeds = []
184 >        self.preserveSeeds = []
185 >        if cfg_params.has_key('CMSSW.preserve_seeds'):
186 >            tmpList = cfg_params['CMSSW.preserve_seeds'].split(',')
187 >            for tmp in tmpList:
188 >                tmp.strip()
189 >                self.preserveSeeds.append(tmp)
190 >        if cfg_params.has_key('CMSSW.increment_seeds'):
191 >            tmpList = cfg_params['CMSSW.increment_seeds'].split(',')
192 >            for tmp in tmpList:
193 >                tmp.strip()
194 >                self.incrementSeeds.append(tmp)
195 >
196 >        ## Old method of dealing with seeds
197 >        ## FUTURE: This is for old CMSSW and old CRAB. Can throw exceptions after a couple of CRAB releases and then
198 >        ## remove
199 >        self.sourceSeed = cfg_params.get('CMSSW.pythia_seed',None)
200 >        if self.sourceSeed:
201 >            print "pythia_seed is a deprecated parameter. Use preserve_seeds or increment_seeds in the future.\n","Added to increment_seeds."
202 >            self.incrementSeeds.append('sourceSeed')
203 >            self.incrementSeeds.append('theSource')
204 >
205 >        self.sourceSeedVtx = cfg_params.get('CMSSW.vtx_seed',None)
206 >        if self.sourceSeedVtx:
207 >            print "vtx_seed is a deprecated parameter. Use preserve_seeds or increment_seeds in the future.\n","Added to increment_seeds."
208 >            self.incrementSeeds.append('VtxSmeared')
209 >
210 >        self.sourceSeedG4 = cfg_params.get('CMSSW.g4_seed',None)
211 >        if self.sourceSeedG4:
212 >            print "g4_seed is a deprecated parameter. Use preserve_seeds or increment_seeds in the future.\n","Added to increment_seeds."
213 >            self.incrementSeeds.append('g4SimHits')
214 >
215 >        self.sourceSeedMix = cfg_params.get('CMSSW.mix_seed',None)
216 >        if self.sourceSeedMix:
217 >            print "mix_seed is a deprecated parameter. Use preserve_seeds or increment_seeds in the future.\n","Added to increment_seeds."
218 >            self.incrementSeeds.append('mix')
219 >
220 >        self.firstRun = cfg_params.get('CMSSW.first_run',None)
221 >
222 >        if self.pset != None: #CarlosDaniele
223 >            import PsetManipulator as pp
224 >            PsetEdit = pp.PsetManipulator(self.pset) #Daniele Pset
225  
226 <        common.logger.debug(5,'CEWhiteList: '+str(CEWhiteList))
226 >        # Copy/return
227  
228 <        self.PsetEdit = PsetManipulator.PsetManipulator(self.pset) #Daniele Pset
228 >        self.copy_data = int(cfg_params.get('USER.copy_data',0))
229 >        self.return_data = int(cfg_params.get('USER.return_data',0))
230  
231          #DBSDLS-start
232 <        ## Initialize the variables that are extracted from DBS/DLS and needed in other places of the code
232 >        ## Initialize the variables that are extracted from DBS/DLS and needed in other places of the code
233          self.maxEvents=0  # max events available   ( --> check the requested nb. of evts in Creator.py)
234          self.DBSPaths={}  # all dbs paths requested ( --> input to the site local discovery script)
235 +        self.jobDestination=[]  # Site destination(s) for each job (list of lists)
236          ## Perform the data location and discovery (based on DBS/DLS)
237 <        self.DataDiscoveryAndLocation(cfg_params)
238 <        #DBSDLS-end          
237 >        ## SL: Don't if NONE is specified as input (pythia use case)
238 >        blockSites = {}
239 >        if self.datasetPath:
240 >            blockSites = self.DataDiscoveryAndLocation(cfg_params)
241 >        #DBSDLS-end
242  
190        self.tgzNameWithPath = self.getTarBall(self.executable)
243  
244 <        self.jobSplitting()  #Daniele job Splitting
245 <        self.PsetEdit.maxEvent(self.maxEv) #Daniele  
246 <        self.PsetEdit.inputModule("INPUT") #Daniele  
247 <        self.PsetEdit.psetWriter(self.configFilename())
244 >        ## Select Splitting
245 >        if self.selectNoInput:
246 >            if self.pset == None:
247 >                self.jobSplittingForScript()
248 >            else:
249 >                self.jobSplittingNoInput()
250 >        else:
251 >            self.jobSplittingByBlocks(blockSites)
252 >
253 >        # modify Pset
254 >        if self.pset != None:
255 >            try:
256 >                # Add FrameworkJobReport to parameter-set, set max events.
257 >                # Reset later for data jobs by writeCFG which does all modifications
258 >                PsetEdit.addCrabFJR(self.fjrFileName) # FUTURE: Job report addition not needed by CMSSW>1.5
259 >                PsetEdit.maxEvent(self.eventsPerJob)
260 >                PsetEdit.psetWriter(self.configFilename())
261 >            except:
262 >                msg='Error while manipulating ParameterSet: exiting...'
263 >                raise CrabException(msg)
264 >        self.tgzNameWithPath = self.getTarBall(self.executable)
265  
266      def DataDiscoveryAndLocation(self, cfg_params):
267  
268 +        import DataDiscovery
269 +        import DataLocation
270          common.logger.debug(10,"CMSSW::DataDiscoveryAndLocation()")
271  
201        #datasetPath = "/"+self.owner+"/"+self.dataTiers[0]+"/"+self.dataset
202        
272          datasetPath=self.datasetPath
273  
205        ## TODO
206        dataTiersList = ""
207        dataTiers = dataTiersList.split(',')
208
274          ## Contact the DBS
275 +        common.logger.message("Contacting Data Discovery Services ...")
276          try:
277 <            self.pubdata=DataDiscovery_EDM.DataDiscovery_EDM(datasetPath, dataTiers, dataTiers)
277 >            self.pubdata=DataDiscovery.DataDiscovery(datasetPath, cfg_params)
278              self.pubdata.fetchDBSInfo()
279  
280 <        except DataDiscovery_EDM.NotExistingDatasetError, ex :
280 >        except DataDiscovery.NotExistingDatasetError, ex :
281              msg = 'ERROR ***: failed Data Discovery in DBS : %s'%ex.getErrorMessage()
282              raise CrabException(msg)
283 <
218 <        except DataDiscovery_EDM.NoDataTierinProvenanceError, ex :
283 >        except DataDiscovery.NoDataTierinProvenanceError, ex :
284              msg = 'ERROR ***: failed Data Discovery in DBS : %s'%ex.getErrorMessage()
285              raise CrabException(msg)
286 <        except DataDiscovery_EDM.DataDiscoveryError, ex:
287 <            msg = 'ERROR ***: failed Data Discovery in DBS  %s'%ex.getErrorMessage()
286 >        except DataDiscovery.DataDiscoveryError, ex:
287 >            msg = 'ERROR ***: failed Data Discovery in DBS :  %s'%ex.getErrorMessage()
288              raise CrabException(msg)
289  
290 <        ## get list of all required data in the form of dbs paths  (dbs path = /dataset/datatier/owner)
291 <        ## self.DBSPaths=self.pubdata.getDBSPaths()
292 <        common.logger.message("Required data are :"+self.datasetPath)
228 <
229 <        filesbyblock=self.pubdata.getFiles()
230 <        self.AllInputFiles=filesbyblock.values()
231 <        self.files = self.AllInputFiles        
232 <
233 <        ## TEMP
234 <    #    self.filesTmp = filesbyblock.values()
235 <    #    self.files = []
236 <    #    locPath='rfio:cmsbose2.bo.infn.it:/flatfiles/SE00/cms/fanfani/ProdTest/'
237 <    #    locPath=''
238 <    #    tmp = []
239 <    #    for file in self.filesTmp[0]:
240 <    #        tmp.append(locPath+file)
241 <    #    self.files.append(tmp)
242 <        ## END TEMP
290 >        self.filesbyblock=self.pubdata.getFiles()
291 >        self.eventsbyblock=self.pubdata.getEventsPerBlock()
292 >        self.eventsbyfile=self.pubdata.getEventsPerFile()
293  
294          ## get max number of events
295 <        #common.logger.debug(10,"number of events for primary fileblocks %i"%self.pubdata.getMaxEvents())
246 <        self.maxEvents=self.pubdata.getMaxEvents() ##  self.maxEvents used in Creator.py
247 <        common.logger.message("\nThe number of available events is %s"%self.maxEvents)
295 >        self.maxEvents=self.pubdata.getMaxEvents()
296  
297          ## Contact the DLS and build a list of sites hosting the fileblocks
298          try:
299 <            dataloc=DataLocation_EDM.DataLocation_EDM(filesbyblock.keys(),cfg_params)
299 >            dataloc=DataLocation.DataLocation(self.filesbyblock.keys(),cfg_params)
300              dataloc.fetchDLSInfo()
301 <        except DataLocation_EDM.DataLocationError , ex:
301 >        except DataLocation.DataLocationError , ex:
302              msg = 'ERROR ***: failed Data Location in DLS \n %s '%ex.getErrorMessage()
303              raise CrabException(msg)
256        
257        allsites=dataloc.getSites()
258        common.logger.debug(5,"sites are %s"%allsites)
259        sites=self.checkBlackList(allsites)
260        common.logger.debug(5,"sites are (after black list) %s"%sites)
261        sites=self.checkWhiteList(sites)
262        common.logger.debug(5,"sites are (after white list) %s"%sites)
304  
305 <        if len(sites)==0:
306 <            msg = 'No sites hosting all the needed data! Exiting... '
305 >
306 >        sites = dataloc.getSites()
307 >        allSites = []
308 >        listSites = sites.values()
309 >        for listSite in listSites:
310 >            for oneSite in listSite:
311 >                allSites.append(oneSite)
312 >        allSites = self.uniquelist(allSites)
313 >
314 >        # screen output
315 >        common.logger.message("Requested dataset: " + datasetPath + " has " + str(self.maxEvents) + " events in " + str(len(self.filesbyblock.keys())) + " blocks.\n")
316 >
317 >        return sites
318 >
319 >    def jobSplittingByBlocks(self, blockSites):
320 >        """
321 >        Perform job splitting. Jobs run over an integer number of files
322 >        and no more than one block.
323 >        ARGUMENT: blockSites: dictionary with blocks as keys and list of host sites as values
324 >        REQUIRES: self.selectTotalNumberEvents, self.selectEventsPerJob, self.selectNumberofJobs,
325 >                  self.total_number_of_events, self.eventsPerJob, self.theNumberOfJobs,
326 >                  self.maxEvents, self.filesbyblock
327 >        SETS: self.jobDestination - Site destination(s) for each job (a list of lists)
328 >              self.total_number_of_jobs - Total # of jobs
329 >              self.list_of_args - File(s) job will run on (a list of lists)
330 >        """
331 >
332 >        # ---- Handle the possible job splitting configurations ---- #
333 >        if (self.selectTotalNumberEvents):
334 >            totalEventsRequested = self.total_number_of_events
335 >        if (self.selectEventsPerJob):
336 >            eventsPerJobRequested = self.eventsPerJob
337 >            if (self.selectNumberOfJobs):
338 >                totalEventsRequested = self.theNumberOfJobs * self.eventsPerJob
339 >
340 >        # If user requested all the events in the dataset
341 >        if (totalEventsRequested == -1):
342 >            eventsRemaining=self.maxEvents
343 >        # If user requested more events than are in the dataset
344 >        elif (totalEventsRequested > self.maxEvents):
345 >            eventsRemaining = self.maxEvents
346 >            common.logger.message("Requested "+str(self.total_number_of_events)+ " events, but only "+str(self.maxEvents)+" events are available.")
347 >        # If user requested less events than are in the dataset
348 >        else:
349 >            eventsRemaining = totalEventsRequested
350 >
351 >        # If user requested more events per job than are in the dataset
352 >        if (self.selectEventsPerJob and eventsPerJobRequested > self.maxEvents):
353 >            eventsPerJobRequested = self.maxEvents
354 >
355 >        # For user info at end
356 >        totalEventCount = 0
357 >
358 >        if (self.selectTotalNumberEvents and self.selectNumberOfJobs):
359 >            eventsPerJobRequested = int(eventsRemaining/self.theNumberOfJobs)
360 >
361 >        if (self.selectNumberOfJobs):
362 >            common.logger.message("May not create the exact number_of_jobs requested.")
363 >
364 >        if ( self.ncjobs == 'all' ) :
365 >            totalNumberOfJobs = 999999999
366 >        else :
367 >            totalNumberOfJobs = self.ncjobs
368 >
369 >        blocks = blockSites.keys()
370 >        blockCount = 0
371 >        # Backup variable in case self.maxEvents counted events in a non-included block
372 >        numBlocksInDataset = len(blocks)
373 >
374 >        jobCount = 0
375 >        list_of_lists = []
376 >
377 >        # list tracking which jobs are in which jobs belong to which block
378 >        jobsOfBlock = {}
379 >
380 >        # ---- Iterate over the blocks in the dataset until ---- #
381 >        # ---- we've met the requested total # of events    ---- #
382 >        while ( (eventsRemaining > 0) and (blockCount < numBlocksInDataset) and (jobCount < totalNumberOfJobs)):
383 >            block = blocks[blockCount]
384 >            blockCount += 1
385 >            if block not in jobsOfBlock.keys() :
386 >                jobsOfBlock[block] = []
387 >
388 >            if self.eventsbyblock.has_key(block) :
389 >                numEventsInBlock = self.eventsbyblock[block]
390 >                common.logger.debug(5,'Events in Block File '+str(numEventsInBlock))
391 >
392 >                files = self.filesbyblock[block]
393 >                numFilesInBlock = len(files)
394 >                if (numFilesInBlock <= 0):
395 >                    continue
396 >                fileCount = 0
397 >
398 >                # ---- New block => New job ---- #
399 >                parString = ""
400 >                # counter for number of events in files currently worked on
401 >                filesEventCount = 0
402 >                # flag if next while loop should touch new file
403 >                newFile = 1
404 >                # job event counter
405 >                jobSkipEventCount = 0
406 >
407 >                # ---- Iterate over the files in the block until we've met the requested ---- #
408 >                # ---- total # of events or we've gone over all the files in this block  ---- #
409 >                while ( (eventsRemaining > 0) and (fileCount < numFilesInBlock) and (jobCount < totalNumberOfJobs) ):
410 >                    file = files[fileCount]
411 >                    if newFile :
412 >                        try:
413 >                            numEventsInFile = self.eventsbyfile[file]
414 >                            common.logger.debug(6, "File "+str(file)+" has "+str(numEventsInFile)+" events")
415 >                            # increase filesEventCount
416 >                            filesEventCount += numEventsInFile
417 >                            # Add file to current job
418 >                            parString += '\\\"' + file + '\\\"\,'
419 >                            newFile = 0
420 >                        except KeyError:
421 >                            common.logger.message("File "+str(file)+" has unknown number of events: skipping")
422 >
423 >                    eventsPerJobRequested = min(eventsPerJobRequested, eventsRemaining)
424 >                    # if less events in file remain than eventsPerJobRequested
425 >                    if ( filesEventCount - jobSkipEventCount < eventsPerJobRequested):
426 >                        # if last file in block
427 >                        if ( fileCount == numFilesInBlock-1 ) :
428 >                            # end job using last file, use remaining events in block
429 >                            # close job and touch new file
430 >                            fullString = parString[:-2]
431 >                            list_of_lists.append([fullString,str(-1),str(jobSkipEventCount)])
432 >                            common.logger.debug(3,"Job "+str(jobCount+1)+" can run over "+str(filesEventCount - jobSkipEventCount)+" events (last file in block).")
433 >                            self.jobDestination.append(blockSites[block])
434 >                            common.logger.debug(5,"Job "+str(jobCount+1)+" Destination: "+str(self.jobDestination[jobCount]))
435 >                            # fill jobs of block dictionary
436 >                            jobsOfBlock[block].append(jobCount+1)
437 >                            # reset counter
438 >                            jobCount = jobCount + 1
439 >                            totalEventCount = totalEventCount + filesEventCount - jobSkipEventCount
440 >                            eventsRemaining = eventsRemaining - filesEventCount + jobSkipEventCount
441 >                            jobSkipEventCount = 0
442 >                            # reset file
443 >                            parString = ""
444 >                            filesEventCount = 0
445 >                            newFile = 1
446 >                            fileCount += 1
447 >                        else :
448 >                            # go to next file
449 >                            newFile = 1
450 >                            fileCount += 1
451 >                    # if events in file equal to eventsPerJobRequested
452 >                    elif ( filesEventCount - jobSkipEventCount == eventsPerJobRequested ) :
453 >                        # close job and touch new file
454 >                        fullString = parString[:-2]
455 >                        list_of_lists.append([fullString,str(eventsPerJobRequested),str(jobSkipEventCount)])
456 >                        common.logger.debug(3,"Job "+str(jobCount+1)+" can run over "+str(eventsPerJobRequested)+" events.")
457 >                        self.jobDestination.append(blockSites[block])
458 >                        common.logger.debug(5,"Job "+str(jobCount+1)+" Destination: "+str(self.jobDestination[jobCount]))
459 >                        jobsOfBlock[block].append(jobCount+1)
460 >                        # reset counter
461 >                        jobCount = jobCount + 1
462 >                        totalEventCount = totalEventCount + eventsPerJobRequested
463 >                        eventsRemaining = eventsRemaining - eventsPerJobRequested
464 >                        jobSkipEventCount = 0
465 >                        # reset file
466 >                        parString = ""
467 >                        filesEventCount = 0
468 >                        newFile = 1
469 >                        fileCount += 1
470 >
471 >                    # if more events in file remain than eventsPerJobRequested
472 >                    else :
473 >                        # close job but don't touch new file
474 >                        fullString = parString[:-2]
475 >                        list_of_lists.append([fullString,str(eventsPerJobRequested),str(jobSkipEventCount)])
476 >                        common.logger.debug(3,"Job "+str(jobCount+1)+" can run over "+str(eventsPerJobRequested)+" events.")
477 >                        self.jobDestination.append(blockSites[block])
478 >                        common.logger.debug(5,"Job "+str(jobCount+1)+" Destination: "+str(self.jobDestination[jobCount]))
479 >                        jobsOfBlock[block].append(jobCount+1)
480 >                        # increase counter
481 >                        jobCount = jobCount + 1
482 >                        totalEventCount = totalEventCount + eventsPerJobRequested
483 >                        eventsRemaining = eventsRemaining - eventsPerJobRequested
484 >                        # calculate skip events for last file
485 >                        # use filesEventCount (contains several files), jobSkipEventCount and eventsPerJobRequest
486 >                        jobSkipEventCount = eventsPerJobRequested - (filesEventCount - jobSkipEventCount - self.eventsbyfile[file])
487 >                        # remove all but the last file
488 >                        filesEventCount = self.eventsbyfile[file]
489 >                        parString = '\\\"' + file + '\\\"\,'
490 >                    pass # END if
491 >                pass # END while (iterate over files in the block)
492 >        pass # END while (iterate over blocks in the dataset)
493 >        self.ncjobs = self.total_number_of_jobs = jobCount
494 >        if (eventsRemaining > 0 and jobCount < totalNumberOfJobs ):
495 >            common.logger.message("Could not run on all requested events because some blocks not hosted at allowed sites.")
496 >        common.logger.message(str(jobCount)+" job(s) can run on "+str(totalEventCount)+" events.\n")
497 >
498 >        # screen output
499 >        screenOutput = "List of jobs and available destination sites:\n\n"
500 >
501 >        # keep trace of block with no sites to print a warning at the end
502 >        noSiteBlock = []
503 >        bloskNoSite = []
504 >
505 >        blockCounter = 0
506 >        for block in blocks:
507 >            if block in jobsOfBlock.keys() :
508 >                blockCounter += 1
509 >                screenOutput += "Block %5i: jobs %20s: sites: %s\n" % (blockCounter,spanRanges(jobsOfBlock[block]),
510 >                    ','.join(self.blackWhiteListParser.checkWhiteList(self.blackWhiteListParser.checkBlackList(blockSites[block],block),block)))
511 >                if len(self.blackWhiteListParser.checkWhiteList(self.blackWhiteListParser.checkBlackList(blockSites[block],block),block)) == 0:
512 >                    noSiteBlock.append( spanRanges(jobsOfBlock[block]) )
513 >                    bloskNoSite.append( blockCounter )
514 >
515 >        common.logger.message(screenOutput)
516 >        if len(noSiteBlock) > 0 and len(bloskNoSite) > 0:
517 >            msg = 'WARNING: No sites are hosting any part of data for block:\n                '
518 >            virgola = ""
519 >            if len(bloskNoSite) > 1:
520 >                virgola = ","
521 >            for block in bloskNoSite:
522 >                msg += ' ' + str(block) + virgola
523 >            msg += '\n               Related jobs:\n                 '
524 >            virgola = ""
525 >            if len(noSiteBlock) > 1:
526 >                virgola = ","
527 >            for range_jobs in noSiteBlock:
528 >                msg += str(range_jobs) + virgola
529 >            msg += '\n               will not be submitted and this block of data can not be analyzed!\n'
530 >            if self.cfg_params.has_key('EDG.se_white_list'):
531 >                msg += 'WARNING: SE White List: '+self.cfg_params['EDG.se_white_list']+'\n'
532 >                msg += '(Hint: By whitelisting you force the job to run at this particular site(s).\n'
533 >                msg += 'Please check if the dataset is available at this site!)\n'
534 >            if self.cfg_params.has_key('EDG.ce_white_list'):
535 >                msg += 'WARNING: CE White List: '+self.cfg_params['EDG.ce_white_list']+'\n'
536 >                msg += '(Hint: By whitelisting you force the job to run at this particular site(s).\n'
537 >                msg += 'Please check if the dataset is available at this site!)\n'
538 >
539 >            common.logger.message(msg)
540 >
541 >        self.list_of_args = list_of_lists
542 >        return
543 >
544 >    def jobSplittingNoInput(self):
545 >        """
546 >        Perform job splitting based on number of event per job
547 >        """
548 >        common.logger.debug(5,'Splitting per events')
549 >
550 >        if (self.selectEventsPerJob):
551 >            common.logger.message('Required '+str(self.eventsPerJob)+' events per job ')
552 >        if (self.selectNumberOfJobs):
553 >            common.logger.message('Required '+str(self.theNumberOfJobs)+' jobs in total ')
554 >        if (self.selectTotalNumberEvents):
555 >            common.logger.message('Required '+str(self.total_number_of_events)+' events in total ')
556 >
557 >        if (self.total_number_of_events < 0):
558 >            msg='Cannot split jobs per Events with "-1" as total number of events'
559              raise CrabException(msg)
560  
561 <        common.logger.message("List of Sites hosting the data : "+str(sites))
562 <        common.logger.debug(6, "List of Sites: "+str(sites))
563 <        common.analisys_common_info['sites']=sites    ## used in SchedulerEdg.py in createSchScript
561 >        if (self.selectEventsPerJob):
562 >            if (self.selectTotalNumberEvents):
563 >                self.total_number_of_jobs = int(self.total_number_of_events/self.eventsPerJob)
564 >            elif(self.selectNumberOfJobs) :
565 >                self.total_number_of_jobs =self.theNumberOfJobs
566 >                self.total_number_of_events =int(self.theNumberOfJobs*self.eventsPerJob)
567 >
568 >        elif (self.selectNumberOfJobs) :
569 >            self.total_number_of_jobs = self.theNumberOfJobs
570 >            self.eventsPerJob = int(self.total_number_of_events/self.total_number_of_jobs)
571 >
572 >        common.logger.debug(5,'N jobs  '+str(self.total_number_of_jobs))
573 >
574 >        # is there any remainder?
575 >        check = int(self.total_number_of_events) - (int(self.total_number_of_jobs)*self.eventsPerJob)
576 >
577 >        common.logger.debug(5,'Check  '+str(check))
578 >
579 >        common.logger.message(str(self.total_number_of_jobs)+' jobs can be created, each for '+str(self.eventsPerJob)+' for a total of '+str(self.total_number_of_jobs*self.eventsPerJob)+' events')
580 >        if check > 0:
581 >            common.logger.message('Warning: asked '+str(self.total_number_of_events)+' but can do only '+str(int(self.total_number_of_jobs)*self.eventsPerJob))
582 >
583 >        # argument is seed number.$i
584 >        self.list_of_args = []
585 >        for i in range(self.total_number_of_jobs):
586 >            ## Since there is no input, any site is good
587 >            self.jobDestination.append([""]) #must be empty to write correctly the xml
588 >            args=[]
589 >            if (self.firstRun):
590 >                ## pythia first run
591 >                args.append(str(self.firstRun)+str(i))
592 >            self.list_of_args.append(args)
593 >
594          return
595 <    
596 <    def jobSplitting(self):
595 >
596 >
597 >    def jobSplittingForScript(self):
598 >        """
599 >        Perform job splitting based on number of job
600          """
601 <        first implemntation for job splitting  
602 <        """    
277 <      #  print 'eventi totali '+str(self.maxEvents)
278 <      #  print 'eventi totali richiesti dallo user '+str(self.total_number_of_events)
279 <        #print 'files per job '+str(self.filesPerJob)
280 <        common.logger.message('Required '+str(self.filesPerJob)+' files per job ')
281 <        common.logger.message('Required '+str(self.total_number_of_events)+' events in total ')
282 <
283 <        ## TODO: SL need to have (from DBS) a detailed list of how many events per each file
284 <        n_tot_files = (len(self.files[0]))
285 <        ## SL: this is wrong if the files have different number of events
286 <        evPerFile = int(self.maxEvents)/n_tot_files
287 <        
288 <        common.logger.debug(5,'Events per File '+str(evPerFile))
289 <
290 <        ## if asked to process all events, do it
291 <        if self.total_number_of_events == -1:
292 <            self.total_number_of_events=self.maxEvents
293 <            self.total_number_of_jobs = int(n_tot_files)*1/int(self.filesPerJob)
294 <            common.logger.message(str(self.total_number_of_jobs)+' jobs will be created for all available events '+str(self.total_number_of_events)+' events')
295 <        
296 <        else:
297 <            self.total_number_of_files = int(self.total_number_of_events/evPerFile)
298 <            ## SL: if ask for less event than what is computed to be available on a
299 <            ##     file, process the first file anyhow.
300 <            if self.total_number_of_files == 0:
301 <                self.total_number_of_files = self.total_number_of_files + 1
302 <
303 <            common.logger.debug(5,'N files  '+str(self.total_number_of_files))
304 <
305 <            check = 0
306 <            
307 <            ## Compute the number of jobs
308 <            #self.total_number_of_jobs = int(n_tot_files)*1/int(self.filesPerJob)
309 <            self.total_number_of_jobs = int(self.total_number_of_files/self.filesPerJob)
310 <            common.logger.debug(5,'N jobs  '+str(self.total_number_of_jobs))
311 <
312 <            ## is there any remainder?
313 <            check = int(self.total_number_of_files) - (int(self.total_number_of_jobs)*self.filesPerJob)
314 <
315 <            common.logger.debug(5,'Check  '+str(check))
316 <
317 <            if check > 0:
318 <                self.total_number_of_jobs =  self.total_number_of_jobs + 1
319 <                common.logger.message('Warning: last job will be created with '+str(check)+' files')
601 >        common.logger.debug(5,'Splitting per job')
602 >        common.logger.message('Required '+str(self.theNumberOfJobs)+' jobs in total ')
603  
604 <            common.logger.message(str(self.total_number_of_jobs)+' jobs will be created for a total of '+str((self.total_number_of_jobs-1)*self.filesPerJob*evPerFile + check*evPerFile)+' events')
322 <            pass
604 >        self.total_number_of_jobs = self.theNumberOfJobs
605  
606 <        list_of_lists = []
325 <        for i in xrange(0, int(n_tot_files), self.filesPerJob):
326 <            list_of_lists.append(self.files[0][i: i+self.filesPerJob])
606 >        common.logger.debug(5,'N jobs  '+str(self.total_number_of_jobs))
607  
608 <        self.list_of_files = list_of_lists
609 <      
608 >        common.logger.message(str(self.total_number_of_jobs)+' jobs can be created')
609 >
610 >        # argument is seed number.$i
611 >        self.list_of_args = []
612 >        for i in range(self.total_number_of_jobs):
613 >            self.jobDestination.append([""])
614 >            self.list_of_args.append([str(i)])
615          return
616  
617      def split(self, jobParams):
618 <
334 <        common.jobDB.load()
335 <        #### Fabio
618 >
619          njobs = self.total_number_of_jobs
620 <        filelist = self.list_of_files
620 >        arglist = self.list_of_args
621          # create the empty structure
622          for i in range(njobs):
623              jobParams.append("")
624 <        
624 >
625 >        listID=[]
626 >        listField=[]
627          for job in range(njobs):
628 <            jobParams[job] = filelist[job]
629 <            common.jobDB.setArguments(job, jobParams[job])
628 >            jobParams[job] = arglist[job]
629 >            listID.append(job+1)
630 >            job_ToSave ={}
631 >            concString = ' '
632 >            argu=''
633 >            if len(jobParams[job]):
634 >                argu +=   concString.join(jobParams[job] )
635 >            job_ToSave['arguments']= str(job+1)+' '+argu
636 >            job_ToSave['dlsDestination']= self.jobDestination[job]
637 >            listField.append(job_ToSave)
638 >            msg="Job "+str(job)+" Arguments:   "+str(job+1)+" "+argu+"\n"  \
639 >            +"                     Destination: "+str(self.jobDestination[job])
640 >            common.logger.debug(5,msg)
641 >        common._db.updateJob_(listID,listField)
642 >        self.argsList = (len(jobParams[0])+1)
643  
346        common.jobDB.save()
644          return
348    
349    def getJobTypeArguments(self, nj, sched):
350        params = common.jobDB.arguments(nj)
351        #print params
352        parString = "\\{"
353        
354        for i in range(len(params) - 1):
355            parString += '\\\"' + params[i] + '\\\"\,'
356        
357        parString += '\\\"' + params[len(params) - 1] + '\\\"\\}'
358        return parString
359  
360    def numberOfJobs(self):
361        # Fabio
645  
646 +    def numberOfJobs(self):
647          return self.total_number_of_jobs
364
365
366
367    def checkBlackList(self, allSites):
368        if len(self.reCEBlackList)==0: return allSites
369        sites = []
370        for site in allSites:
371            common.logger.debug(10,'Site '+site)
372            good=1
373            for re in self.reCEBlackList:
374                if re.search(site):
375                    common.logger.message('CE in black list, skipping site '+site)
376                    good=0
377                pass
378            if good: sites.append(site)
379        if len(sites) == 0:
380            common.logger.debug(3,"No sites found after BlackList")
381        return sites
382
383    def checkWhiteList(self, allSites):
384
385        if len(self.reCEWhiteList)==0: return allSites
386        sites = []
387        for site in allSites:
388            good=0
389            for re in self.reCEWhiteList:
390                if re.search(site):
391                    common.logger.debug(5,'CE in white list, adding site '+site)
392                    good=1
393                if not good: continue
394                sites.append(site)
395        if len(sites) == 0:
396            common.logger.message("No sites found after WhiteList\n")
397        else:
398            common.logger.debug(5,"Selected sites via WhiteList are "+str(sites)+"\n")
399        return sites
648  
649      def getTarBall(self, exe):
650          """
651          Return the TarBall with lib and exe
652          """
653 <        
406 <        # if it exist, just return it
407 <        self.tgzNameWithPath = common.work_space.shareDir()+self.tgz_name
653 >        self.tgzNameWithPath = common.work_space.pathForTgz()+'share/'+self.tgz_name
654          if os.path.exists(self.tgzNameWithPath):
655              return self.tgzNameWithPath
656  
# Line 417 | Line 663 | class Cmssw(JobType):
663  
664          # First of all declare the user Scram area
665          swArea = self.scram.getSWArea_()
420        #print "swArea = ", swArea
421        swVersion = self.scram.getSWVersion()
422        #print "swVersion = ", swVersion
666          swReleaseTop = self.scram.getReleaseTop_()
667 <        #print "swReleaseTop = ", swReleaseTop
425 <        
667 >
668          ## check if working area is release top
669          if swReleaseTop == '' or swArea == swReleaseTop:
670 +            common.logger.debug(3,"swArea = "+swArea+" swReleaseTop ="+swReleaseTop)
671              return
672  
673 <        filesToBeTarred = []
674 <        ## First find the executable
675 <        if (self.executable != ''):
676 <            exeWithPath = self.scram.findFile_(executable)
677 < #           print exeWithPath
678 <            if ( not exeWithPath ):
679 <                raise CrabException('User executable '+executable+' not found')
680 <
681 <            ## then check if it's private or not
682 <            if exeWithPath.find(swReleaseTop) == -1:
683 <                # the exe is private, so we must ship
684 <                common.logger.debug(5,"Exe "+exeWithPath+" to be tarred")
685 <                path = swArea+'/'
686 <                exe = string.replace(exeWithPath, path,'')
687 <                filesToBeTarred.append(exe)
688 <                pass
689 <            else:
690 <                # the exe is from release, we'll find it on WN
691 <                pass
692 <
693 <        ## Now get the libraries: only those in local working area
694 <        libDir = 'lib'
695 <        lib = swArea+'/' +libDir
696 <        common.logger.debug(5,"lib "+lib+" to be tarred")
697 <        if os.path.exists(lib):
698 <            filesToBeTarred.append(libDir)
699 <
700 <        ## Now check if module dir is present
701 <        moduleDir = 'module'
702 <        if os.path.isdir(swArea+'/'+moduleDir):
703 <            filesToBeTarred.append(moduleDir)
704 <
705 <        ## Now check if the Data dir is present
706 <        dataDir = 'src/Data/'
707 <        if os.path.isdir(swArea+'/'+dataDir):
708 <            filesToBeTarred.append(dataDir)
709 <
710 <        ## Create the tar-ball
711 <        if len(filesToBeTarred)>0:
712 <            cwd = os.getcwd()
713 <            os.chdir(swArea)
714 <            tarcmd = 'tar zcvf ' + self.tgzNameWithPath + ' '
715 <            for line in filesToBeTarred:
716 <                tarcmd = tarcmd + line + ' '
717 <            cout = runCommand(tarcmd)
718 <            if not cout:
719 <                raise CrabException('Could not create tar-ball')
720 <            os.chdir(cwd)
721 <        else:
722 <            common.logger.debug(5,"No files to be to be tarred")
723 <        
724 <        return
725 <        
726 <    def wsSetupEnvironment(self, nj):
673 >        import tarfile
674 >        try: # create tar ball
675 >            tar = tarfile.open(self.tgzNameWithPath, "w:gz")
676 >            ## First find the executable
677 >            if (self.executable != ''):
678 >                exeWithPath = self.scram.findFile_(executable)
679 >                if ( not exeWithPath ):
680 >                    raise CrabException('User executable '+executable+' not found')
681 >
682 >                ## then check if it's private or not
683 >                if exeWithPath.find(swReleaseTop) == -1:
684 >                    # the exe is private, so we must ship
685 >                    common.logger.debug(5,"Exe "+exeWithPath+" to be tarred")
686 >                    path = swArea+'/'
687 >                    # distinguish case when script is in user project area or given by full path somewhere else
688 >                    if exeWithPath.find(path) >= 0 :
689 >                        exe = string.replace(exeWithPath, path,'')
690 >                        tar.add(path+exe,exe)
691 >                    else :
692 >                        tar.add(exeWithPath,os.path.basename(executable))
693 >                    pass
694 >                else:
695 >                    # the exe is from release, we'll find it on WN
696 >                    pass
697 >
698 >            ## Now get the libraries: only those in local working area
699 >            libDir = 'lib'
700 >            lib = swArea+'/' +libDir
701 >            common.logger.debug(5,"lib "+lib+" to be tarred")
702 >            if os.path.exists(lib):
703 >                tar.add(lib,libDir)
704 >
705 >            ## Now check if module dir is present
706 >            moduleDir = 'module'
707 >            module = swArea + '/' + moduleDir
708 >            if os.path.isdir(module):
709 >                tar.add(module,moduleDir)
710 >
711 >            ## Now check if any data dir(s) is present
712 >            swAreaLen=len(swArea)
713 >            self.dataExist = False
714 >            for root, dirs, files in os.walk(swArea):
715 >                if "data" in dirs:
716 >                    self.dataExist=True
717 >                    common.logger.debug(5,"data "+root+"/data"+" to be tarred")
718 >                    tar.add(root+"/data",root[swAreaLen:]+"/data")
719 >
720 >            ### CMSSW ParameterSet
721 >            if not self.pset is None:
722 >                cfg_file = common.work_space.jobDir()+self.configFilename()
723 >                tar.add(cfg_file,self.configFilename())
724 >                common.logger.debug(5,"File added to "+self.tgzNameWithPath+" : "+str(tar.getnames()))
725 >
726 >
727 >            ## Add ProdCommon dir to tar
728 >            prodcommonDir = 'ProdCommon'
729 >            prodcommonPath = os.environ['CRABDIR'] + '/' + 'ProdCommon'
730 >            if os.path.isdir(prodcommonPath):
731 >                tar.add(prodcommonPath,prodcommonDir)
732 >            common.logger.debug(5,"Files added to "+self.tgzNameWithPath+" : "+str(tar.getnames()))
733 >
734 >            ##### ML stuff
735 >            ML_file_list=['report.py', 'DashboardAPI.py', 'Logger.py', 'ProcInfo.py', 'apmon.py']
736 >            path=os.environ['CRABDIR'] + '/python/'
737 >            for file in ML_file_list:
738 >                tar.add(path+file,file)
739 >            common.logger.debug(5,"Files added to "+self.tgzNameWithPath+" : "+str(tar.getnames()))
740 >
741 >            ##### Utils
742 >            Utils_file_list=['parseCrabFjr.py','writeCfg.py', 'JobReportErrorCode.py']
743 >            for file in Utils_file_list:
744 >                tar.add(path+file,file)
745 >            common.logger.debug(5,"Files added to "+self.tgzNameWithPath+" : "+str(tar.getnames()))
746 >
747 >            ##### AdditionalFiles
748 >            for file in self.additional_inbox_files:
749 >                tar.add(file,string.split(file,'/')[-1])
750 >            common.logger.debug(5,"Files added to "+self.tgzNameWithPath+" : "+str(tar.getnames()))
751 >
752 >            tar.close()
753 >        except :
754 >            raise CrabException('Could not create tar-ball')
755 >
756 >        ## check for tarball size
757 >        tarballinfo = os.stat(self.tgzNameWithPath)
758 >        if ( tarballinfo.st_size > self.MaxTarBallSize*1024*1024 ) :
759 >            raise CrabException('Input sandbox size of ' + str(float(tarballinfo.st_size)/1024.0/1024.0) + ' MB is larger than the allowed ' + str(self.MaxTarBallSize) + ' MB input sandbox limit and not supported by the used GRID submission system. Please make sure that no unnecessary files are in all data directories in your local CMSSW project area as they are automatically packed into the input sandbox.')
760 >
761 >        ## create tar-ball with ML stuff
762 >
763 >    def wsSetupEnvironment(self, nj=0):
764          """
765          Returns part of a job script which prepares
766          the execution environment for the job 'nj'.
767          """
768 +        if (self.CMSSW_major >= 2 and self.CMSSW_minor >= 1) or (self.CMSSW_major >= 3):
769 +            psetName = 'pset.py'
770 +        else:
771 +            psetName = 'pset.cfg'
772          # Prepare JobType-independent part
773 <        txt = ''
774 <  
775 <        ## OLI_Daniele at this level  middleware already known
492 <
493 <        txt += 'if [ $middleware == LCG ]; then \n'
773 >        txt = '\n#Written by cms_cmssw::wsSetupEnvironment\n'
774 >        txt += 'echo ">>> setup environment"\n'
775 >        txt += 'if [ $middleware == LCG ]; then \n'
776          txt += self.wsSetupCMSLCGEnvironment_()
777          txt += 'elif [ $middleware == OSG ]; then\n'
778 <        txt += '    time=`date -u +"%s"`\n'
779 <        txt += '    WORKING_DIR=$OSG_WN_TMP/cms_$time\n'
780 <        txt += '    echo "Creating working directory: $WORKING_DIR"\n'
781 <        txt += '    /bin/mkdir -p $WORKING_DIR\n'
782 <        txt += '    if [ ! -d $WORKING_DIR ] ;then\n'
501 <        txt += '        echo "OSG WORKING DIR ==> $WORKING_DIR could not be created on on WN `hostname`"\n'
502 <    
503 <        txt += '        echo "JOB_EXIT_STATUS = 1"\n'
504 <        txt += '        exit 1\n'
778 >        txt += '    WORKING_DIR=`/bin/mktemp  -d $OSG_WN_TMP/cms_XXXXXXXXXXXX`\n'
779 >        txt += '    if [ ! $? == 0 ] ;then\n'
780 >        txt += '        echo "ERROR ==> OSG $WORKING_DIR could not be created on WN `hostname`"\n'
781 >        txt += '        job_exit_code=10016\n'
782 >        txt += '        func_exit\n'
783          txt += '    fi\n'
784 +        txt += '    echo ">>> Created working directory: $WORKING_DIR"\n'
785          txt += '\n'
786          txt += '    echo "Change to working directory: $WORKING_DIR"\n'
787          txt += '    cd $WORKING_DIR\n'
788 <        txt += self.wsSetupCMSOSGEnvironment_()
788 >        txt += '    echo ">>> current directory (WORKING_DIR): $WORKING_DIR"\n'
789 >        txt += self.wsSetupCMSOSGEnvironment_()
790          txt += 'fi\n'
791  
792          # Prepare JobType-specific part
793          scram = self.scram.commandName()
794          txt += '\n\n'
795 <        txt += 'echo "### SPECIFIC JOB SETUP ENVIRONMENT ###"\n'
795 >        txt += 'echo ">>> specific cmssw setup environment:"\n'
796 >        txt += 'echo "CMSSW_VERSION =  '+self.version+'"\n'
797          txt += scram+' project CMSSW '+self.version+'\n'
798          txt += 'status=$?\n'
799          txt += 'if [ $status != 0 ] ; then\n'
800 <        txt += '   echo "SET_EXE_ENV 1 ==>ERROR CMSSW '+self.version+' not found on `hostname`" \n'
801 <        txt += '   echo "JOB_EXIT_STATUS = 10034"\n'
802 <        txt += '   echo "SanityCheckCode = 10034" | tee -a $RUNTIME_AREA/$repo\n'
522 <        txt += '   dumpStatus $RUNTIME_AREA/$repo\n'
523 <        ## OLI_Daniele
524 <        txt += '    if [ $middleware == OSG ]; then \n'
525 <        txt += '        echo "Remove working directory: $WORKING_DIR"\n'
526 <        txt += '        cd $RUNTIME_AREA\n'
527 <        txt += '        /bin/rm -rf $WORKING_DIR\n'
528 <        txt += '        if [ -d $WORKING_DIR ] ;then\n'
529 <        txt += '            echo "OSG WORKING DIR ==> $WORKING_DIR could not be deleted on on WN `hostname`"\n'
530 <        txt += '        fi\n'
531 <        txt += '    fi \n'
532 <        txt += '   exit 1 \n'
800 >        txt += '    echo "ERROR ==> CMSSW '+self.version+' not found on `hostname`" \n'
801 >        txt += '    job_exit_code=10034\n'
802 >        txt += '    func_exit\n'
803          txt += 'fi \n'
534        txt += 'echo "CMSSW_VERSION =  '+self.version+'"\n'
804          txt += 'cd '+self.version+'\n'
805 <        ### needed grep for bug in scramv1 ###
805 >        txt += 'SOFTWARE_DIR=`pwd`\n'
806 >        txt += 'echo ">>> current directory (SOFTWARE_DIR): $SOFTWARE_DIR" \n'
807          txt += 'eval `'+scram+' runtime -sh | grep -v SCRAMRT_LSB_JOBNAME`\n'
808 <
808 >        txt += 'if [ $? != 0 ] ; then\n'
809 >        txt += '    echo "ERROR ==> Problem with the command: "\n'
810 >        txt += '    echo "eval \`'+scram+' runtime -sh | grep -v SCRAMRT_LSB_JOBNAME \` at `hostname`"\n'
811 >        txt += '    job_exit_code=10034\n'
812 >        txt += '    func_exit\n'
813 >        txt += 'fi \n'
814          # Handle the arguments:
815          txt += "\n"
816 <        txt += "## ARGUMNETS: $1 Job Number\n"
542 <        # txt += "## ARGUMNETS: $2 First Event for this job\n"
543 <        # txt += "## ARGUMNETS: $3 Max Event for this job\n"
816 >        txt += "## number of arguments (first argument always jobnumber)\n"
817          txt += "\n"
818 <        txt += "narg=$#\n"
546 <        txt += "if [ $narg -lt 2 ]\n"
818 >        txt += "if [ $nargs -lt "+str(self.argsList)+" ]\n"
819          txt += "then\n"
820 <        txt += "    echo 'SET_EXE_ENV 1 ==> ERROR Too few arguments' +$narg+ \n"
821 <        txt += '    echo "JOB_EXIT_STATUS = 50113"\n'
822 <        txt += '    echo "SanityCheckCode = 50113" | tee -a $RUNTIME_AREA/$repo\n'
551 <        txt += '    dumpStatus $RUNTIME_AREA/$repo\n'
552 <        ## OLI_Daniele
553 <        txt += '    if [ $middleware == OSG ]; then \n'
554 <        txt += '        echo "Remove working directory: $WORKING_DIR"\n'
555 <        txt += '        cd $RUNTIME_AREA\n'
556 <        txt += '        /bin/rm -rf $WORKING_DIR\n'
557 <        txt += '        if [ -d $WORKING_DIR ] ;then\n'
558 <        txt += '            echo "OSG WORKING DIR ==> $WORKING_DIR could not be deleted on on WN `hostname`"\n'
559 <        txt += '        fi\n'
560 <        txt += '    fi \n'
561 <        txt += "    exit 1\n"
820 >        txt += "    echo 'ERROR ==> Too few arguments' +$nargs+ \n"
821 >        txt += '    job_exit_code=50113\n'
822 >        txt += "    func_exit\n"
823          txt += "fi\n"
824          txt += "\n"
564        txt += "NJob=$1\n"
565        txt += "InputFiles=$2\n"
566        txt += "echo \"<$InputFiles>\"\n"
567        # txt += "Args = ` cat $2 |  sed -e \'s/\\\\//g\' -e \'s/\"/\\x27/g\' `"
568
569        ### OLI_DANIELE
570        txt += 'if [ $middleware == LCG ]; then \n'
571        txt += '    echo "MonitorJobID=`echo ${NJob}_$EDG_WL_JOBID`" | tee -a $RUNTIME_AREA/$repo\n'
572        txt += '    echo "SyncGridJobId=`echo $EDG_WL_JOBID`" | tee -a $RUNTIME_AREA/$repo\n'
573        txt += '    echo "SyncCE=`edg-brokerinfo getCE`" | tee -a $RUNTIME_AREA/$repo\n'
574        txt += 'elif [ $middleware == OSG ]; then\n'
575
576        # OLI: added monitoring for dashbord, use hash of crab.cfg
577        if common.scheduler.boss_scheduler_name == 'condor_g':
578            # create hash of cfg file
579            hash = makeCksum(common.work_space.cfgFileName())
580            txt += '    echo "MonitorJobID=`echo ${NJob}_'+hash+'_$GLOBUS_GRAM_JOB_CONTACT`" | tee -a $RUNTIME_AREA/$repo\n'
581            txt += '    echo "SyncGridJobId=`echo $GLOBUS_GRAM_JOB_CONTACT`" | tee -a $RUNTIME_AREA/$repo\n'
582            txt += '    echo "SyncCE=`echo $hostname`" | tee -a $RUNTIME_AREA/$repo\n'
583        else :
584            txt += '    echo "MonitorJobID=`echo ${NJob}_$EDG_WL_JOBID`" | tee -a $RUNTIME_AREA/$repo\n'
585            txt += '    echo "SyncGridJobId=`echo $EDG_WL_JOBID`" | tee -a $RUNTIME_AREA/$repo\n'
586            txt += '    echo "SyncCE=`$EDG_WL_LOG_DESTINATION`" | tee -a $RUNTIME_AREA/$repo\n'
587
588        txt += 'fi\n'
589        txt += 'dumpStatus $RUNTIME_AREA/$repo\n'
825  
826          # Prepare job-specific part
827          job = common.job_list[nj]
828 <        pset = os.path.basename(job.configFilename())
829 <        txt += '\n'
830 <        #txt += 'echo sed "s#{\'INPUT\'}#$InputFiles#" $RUNTIME_AREA/'+pset+' \n'
596 <        txt += 'sed "s#{\'INPUT\'}#$InputFiles#" $RUNTIME_AREA/'+pset+' > pset.cfg\n'
597 <        #txt += 'sed "s#{\'INPUT\'}#${InputFiles}#" $RUNTIME_AREA/'+pset+' > pset1.cfg\n'
828 >        if (self.datasetPath):
829 >            txt += '\n'
830 >            txt += 'DatasetPath='+self.datasetPath+'\n'
831  
832 <        if len(self.additional_inbox_files) > 0:
600 <            for file in self.additional_inbox_files:
601 <                txt += 'if [ -e $RUNTIME_AREA/'+file+' ] ; then\n'
602 <                txt += '   cp $RUNTIME_AREA/'+file+' .\n'
603 <                txt += '   chmod +x '+file+'\n'
604 <                txt += 'fi\n'
605 <            pass
832 >            datasetpath_split = self.datasetPath.split("/")
833  
834 <        txt += 'echo "### END JOB SETUP ENVIRONMENT ###"\n\n'
834 >            txt += 'PrimaryDataset='+datasetpath_split[1]+'\n'
835 >            txt += 'DataTier='+datasetpath_split[2]+'\n'
836 >            txt += 'ApplicationFamily=cmsRun\n'
837  
838 <        txt += '\n'
839 <        txt += 'echo "***** cat pset.cfg *********"\n'
840 <        txt += 'cat pset.cfg\n'
841 <        txt += 'echo "****** end pset.cfg ********"\n'
842 <        txt += '\n'
843 <        # txt += 'echo "***** cat pset1.cfg *********"\n'
844 <        # txt += 'cat pset1.cfg\n'
845 <        # txt += 'echo "****** end pset1.cfg ********"\n'
838 >        else:
839 >            txt += 'DatasetPath=MCDataTier\n'
840 >            txt += 'PrimaryDataset=null\n'
841 >            txt += 'DataTier=null\n'
842 >            txt += 'ApplicationFamily=MCDataTier\n'
843 >        if self.pset != None:
844 >            pset = os.path.basename(job.configFilename())
845 >            txt += '\n'
846 >            txt += 'cp  $RUNTIME_AREA/'+pset+' .\n'
847 >            if (self.datasetPath): # standard job
848 >                txt += 'InputFiles=${args[1]}; export InputFiles\n'
849 >                txt += 'MaxEvents=${args[2]}; export MaxEvents\n'
850 >                txt += 'SkipEvents=${args[3]}; export SkipEvents\n'
851 >                txt += 'echo "Inputfiles:<$InputFiles>"\n'
852 >                txt += 'echo "MaxEvents:<$MaxEvents>"\n'
853 >                txt += 'echo "SkipEvents:<$SkipEvents>"\n'
854 >            else:  # pythia like job
855 >                txt += 'PreserveSeeds='  + ','.join(self.preserveSeeds)  + '; export PreserveSeeds\n'
856 >                txt += 'IncrementSeeds=' + ','.join(self.incrementSeeds) + '; export IncrementSeeds\n'
857 >                txt += 'echo "PreserveSeeds: <$PreserveSeeds>"\n'
858 >                txt += 'echo "IncrementSeeds:<$IncrementSeeds>"\n'
859 >                if (self.firstRun):
860 >                    txt += 'FirstRun=${args[1]}; export FirstRun\n'
861 >                    txt += 'echo "FirstRun: <$FirstRun>"\n'
862 >
863 >            txt += 'mv -f ' + pset + ' ' + psetName + '\n'
864 >
865 >
866 >        if self.pset != None:
867 >            # FUTURE: Can simply for 2_1_x and higher
868 >            txt += '\n'
869 >            if self.debug_pset==True:
870 >                txt += 'echo "***** cat ' + psetName + ' *********"\n'
871 >                txt += 'cat ' + psetName + '\n'
872 >                txt += 'echo "****** end ' + psetName + ' ********"\n'
873 >                txt += '\n'
874 >            txt += 'PSETHASH=`edmConfigHash < ' + psetName + '` \n'
875 >            txt += 'echo "PSETHASH = $PSETHASH" \n'
876 >            txt += '\n'
877          return txt
878  
879 <    def wsBuildExe(self, nj):
879 >    def wsUntarSoftware(self, nj=0):
880          """
881          Put in the script the commands to build an executable
882          or a library.
883          """
884  
885 <        txt = ""
885 >        txt = '\n#Written by cms_cmssw::wsUntarSoftware\n'
886  
887          if os.path.isfile(self.tgzNameWithPath):
888 <            txt += 'echo "tar xzvf $RUNTIME_AREA/'+os.path.basename(self.tgzNameWithPath)+'"\n'
888 >            txt += 'echo ">>> tar xzvf $RUNTIME_AREA/'+os.path.basename(self.tgzNameWithPath)+' :" \n'
889              txt += 'tar xzvf $RUNTIME_AREA/'+os.path.basename(self.tgzNameWithPath)+'\n'
890 +            txt += 'ls -Al \n'
891              txt += 'untar_status=$? \n'
892              txt += 'if [ $untar_status -ne 0 ]; then \n'
893 <            txt += '   echo "SET_EXE 1 ==> ERROR Untarring .tgz file failed"\n'
894 <            txt += '   echo "JOB_EXIT_STATUS = $untar_status" \n'
895 <            txt += '   echo "SanityCheckCode = $untar_status" | tee -a $repo\n'
635 <            txt += '   if [ $middleware == OSG ]; then \n'
636 <            txt += '       echo "Remove working directory: $WORKING_DIR"\n'
637 <            txt += '       cd $RUNTIME_AREA\n'
638 <            txt += '       /bin/rm -rf $WORKING_DIR\n'
639 <            txt += '       if [ -d $WORKING_DIR ] ;then\n'
640 <            txt += '           echo "OSG WORKING DIR ==> $WORKING_DIR could not be deleted on on WN `hostname`"\n'
641 <            txt += '       fi\n'
642 <            txt += '   fi \n'
643 <            txt += '   \n'
644 <            txt += '   exit $untar_status \n'
893 >            txt += '   echo "ERROR ==> Untarring .tgz file failed"\n'
894 >            txt += '   job_exit_code=$untar_status\n'
895 >            txt += '   func_exit\n'
896              txt += 'else \n'
897              txt += '   echo "Successful untar" \n'
898              txt += 'fi \n'
899 +            txt += '\n'
900 +            txt += 'echo ">>> Include ProdCommon in PYTHONPATH:"\n'
901 +            txt += 'if [ -z "$PYTHONPATH" ]; then\n'
902 +            txt += '   export PYTHONPATH=$RUNTIME_AREA/ProdCommon\n'
903 +            txt += 'else\n'
904 +            txt += '   export PYTHONPATH=$RUNTIME_AREA/ProdCommon:${PYTHONPATH}\n'
905 +            txt += 'echo "PYTHONPATH=$PYTHONPATH"\n'
906 +            txt += 'fi\n'
907 +            txt += '\n'
908 +
909              pass
910 <        
910 >
911 >        return txt
912 >
913 >    def wsBuildExe(self, nj=0):
914 >        """
915 >        Put in the script the commands to build an executable
916 >        or a library.
917 >        """
918 >
919 >        txt = '\n#Written by cms_cmssw::wsBuildExe\n'
920 >        txt += 'echo ">>> moving CMSSW software directories in `pwd`" \n'
921 >
922 >        txt += 'rm -r lib/ module/ \n'
923 >        txt += 'mv $RUNTIME_AREA/lib/ . \n'
924 >        txt += 'mv $RUNTIME_AREA/module/ . \n'
925 >        if self.dataExist == True:
926 >            txt += 'rm -r src/ \n'
927 >            txt += 'mv $RUNTIME_AREA/src/ . \n'
928 >        if len(self.additional_inbox_files)>0:
929 >            for file in self.additional_inbox_files:
930 >                txt += 'mv $RUNTIME_AREA/'+os.path.basename(file)+' . \n'
931 >        txt += 'mv $RUNTIME_AREA/ProdCommon/ . \n'
932 >
933 >        txt += 'if [ -z "$PYTHONPATH" ]; then\n'
934 >        txt += '   export PYTHONPATH=$SOFTWARE_DIR/ProdCommon\n'
935 >        txt += 'else\n'
936 >        txt += '   export PYTHONPATH=$SOFTWARE_DIR/ProdCommon:${PYTHONPATH}\n'
937 >        txt += 'echo "PYTHONPATH=$PYTHONPATH"\n'
938 >        txt += 'fi\n'
939 >        txt += '\n'
940 >
941          return txt
942  
943      def modifySteeringCards(self, nj):
944          """
945 <        modify the card provided by the user,
945 >        modify the card provided by the user,
946          writing a new card into share dir
947          """
948 <        
948 >
949      def executableName(self):
950 <        return self.executable
950 >        if self.scriptExe:
951 >            return "sh "
952 >        else:
953 >            return self.executable
954  
955      def executableArgs(self):
956 <        return " -p pset.cfg"
956 >        # FUTURE: This function tests the CMSSW version. Can be simplified as we drop support for old versions
957 >        if self.scriptExe:#CarlosDaniele
958 >            return   self.scriptExe + " $NJob"
959 >        else:
960 >            ex_args = ""
961 >            # FUTURE: This tests the CMSSW version. Can remove code as versions deprecated
962 >            # Framework job report
963 >            if (self.CMSSW_major >= 1 and self.CMSSW_minor >= 5) or (self.CMSSW_major >= 2):
964 >                ex_args += " -j $RUNTIME_AREA/crab_fjr_$NJob.xml"
965 >            # Type of config file
966 >            if self.CMSSW_major >= 2 :
967 >                ex_args += " -p pset.py"
968 >            else:
969 >                ex_args += " -p pset.cfg"
970 >            return ex_args
971  
972      def inputSandbox(self, nj):
973          """
974          Returns a list of filenames to be put in JDL input sandbox.
975          """
976          inp_box = []
669        # dict added to delete duplicate from input sandbox file list
670        seen = {}
671        ## code
977          if os.path.isfile(self.tgzNameWithPath):
978              inp_box.append(self.tgzNameWithPath)
979 <        ## config
980 <        inp_box.append(common.job_list[nj].configFilename())
676 <        ## additional input files
677 <        #for file in self.additional_inbox_files:
678 <        #    inp_box.append(common.work_space.cwdDir()+file)
979 >        wrapper = os.path.basename(str(common._db.queryTask('scriptName')))
980 >        inp_box.append(common.work_space.pathForTgz() +'job/'+ wrapper)
981          return inp_box
982  
983      def outputSandbox(self, nj):
# Line 684 | Line 986 | class Cmssw(JobType):
986          """
987          out_box = []
988  
687        stdout=common.job_list[nj].stdout()
688        stderr=common.job_list[nj].stderr()
689
989          ## User Declared output files
990 <        for out in self.output_file:
991 <            n_out = nj + 1
990 >        for out in (self.output_file+self.output_file_sandbox):
991 >            n_out = nj + 1
992              out_box.append(self.numberFile_(out,str(n_out)))
993          return out_box
695        return []
994  
995      def prepareSteeringCards(self):
996          """
# Line 705 | Line 1003 | class Cmssw(JobType):
1003          Returns part of a job script which renames the produced files.
1004          """
1005  
1006 <        txt = '\n'
1007 <        file_list = ''
1008 <        check = len(self.output_file)
1009 <        i = 0
1010 <        for fileWithSuffix in self.output_file:
1011 <            i= i + 1
1006 >        txt = '\n#Written by cms_cmssw::wsRenameOutput\n'
1007 >        txt += 'echo ">>> current directory (SOFTWARE_DIR): $SOFTWARE_DIR" \n'
1008 >        txt += 'echo ">>> current directory content:"\n'
1009 >        txt += 'ls \n'
1010 >        txt += '\n'
1011 >
1012 >        for fileWithSuffix in (self.output_file):
1013              output_file_num = self.numberFile_(fileWithSuffix, '$NJob')
715            file_list=file_list+output_file_num+''
716            txt += '\n'
717            txt += 'ls \n'
1014              txt += '\n'
1015 <            txt += 'ls '+fileWithSuffix+'\n'
1016 <            txt += 'exe_result=$?\n'
1017 <            txt += 'if [ $exe_result -ne 0 ] ; then\n'
1018 <            txt += '   echo "ERROR: No output file to manage"\n'
1019 <            ### OLI_DANIELE
1020 <            txt += '    if [ $middleware == OSG ]; then \n'
1021 <            txt += '        echo "prepare dummy output file"\n'
1022 <            txt += '        cp '+fileWithSuffix+' $RUNTIME_AREA/'+output_file_num+'\n'
727 <            txt += '    fi \n'
1015 >            txt += '# check output file\n'
1016 >            txt += 'if [ -e ./'+fileWithSuffix+' ] ; then\n'
1017 >            if (self.copy_data == 1):  # For OSG nodes, file is in $WORKING_DIR, should not be moved to $RUNTIME_AREA
1018 >                txt += '    mv '+fileWithSuffix+' '+output_file_num+'\n'
1019 >                txt += '    ln -s `pwd`/'+output_file_num+' $RUNTIME_AREA/'+fileWithSuffix+'\n'
1020 >            else:
1021 >                txt += '    mv '+fileWithSuffix+' $RUNTIME_AREA/'+output_file_num+'\n'
1022 >                txt += '    ln -s $RUNTIME_AREA/'+output_file_num+' $RUNTIME_AREA/'+fileWithSuffix+'\n'
1023              txt += 'else\n'
1024 <            txt += '   cp '+fileWithSuffix+' $RUNTIME_AREA/'+output_file_num+'\n'
1024 >            txt += '    job_exit_code=60302\n'
1025 >            txt += '    echo "WARNING: Output file '+fileWithSuffix+' not found"\n'
1026 >            if common.scheduler.name().upper() == 'CONDOR_G':
1027 >                txt += '    if [ $middleware == OSG ]; then \n'
1028 >                txt += '        echo "prepare dummy output file"\n'
1029 >                txt += '        echo "Processing of job output failed" > $RUNTIME_AREA/'+output_file_num+'\n'
1030 >                txt += '    fi \n'
1031              txt += 'fi\n'
1032 <            if i == check:
1033 <                txt += 'cd $RUNTIME_AREA\n'
1034 <                pass      
1035 <            pass
1036 <      
736 <        file_list=file_list[:-1]
737 <        txt += 'file_list="'+file_list+'"\n'
738 <        ### OLI_DANIELE
739 <        txt += 'if [ $middleware == OSG ]; then\n'  
740 <        txt += '    cd $RUNTIME_AREA\n'
741 <        txt += '    echo "Remove working directory: $WORKING_DIR"\n'
742 <        txt += '    /bin/rm -rf $WORKING_DIR\n'
743 <        txt += '    if [ -d $WORKING_DIR ] ;then\n'
744 <        txt += '        echo "OSG WORKING DIR ==> $WORKING_DIR could not be deleted on on WN `hostname`"\n'
745 <        txt += '    fi\n'
746 <        txt += 'fi\n'
1032 >        file_list = []
1033 >        for fileWithSuffix in (self.output_file):
1034 >             file_list.append(self.numberFile_(fileWithSuffix, '$NJob'))
1035 >
1036 >        txt += 'file_list="'+string.join(file_list,' ')+'"\n'
1037          txt += '\n'
1038 +        txt += 'echo ">>> current directory (SOFTWARE_DIR): $SOFTWARE_DIR" \n'
1039 +        txt += 'echo ">>> current directory content:"\n'
1040 +        txt += 'ls \n'
1041 +        txt += '\n'
1042 +        txt += 'cd $RUNTIME_AREA\n'
1043 +        txt += 'echo ">>> current directory (RUNTIME_AREA):  $RUNTIME_AREA"\n'
1044          return txt
1045  
1046      def numberFile_(self, file, txt):
# Line 755 | Line 1051 | class Cmssw(JobType):
1051          # take away last extension
1052          name = p[0]
1053          for x in p[1:-1]:
1054 <           name=name+"."+x
1054 >            name=name+"."+x
1055          # add "_txt"
1056          if len(p)>1:
1057 <          ext = p[len(p)-1]
1058 <          #result = name + '_' + str(txt) + "." + ext
1059 <          result = name + '_' + txt + "." + ext
1060 <        else:
1061 <          #result = name + '_' + str(txt)
766 <          result = name + '_' + txt
767 <        
1057 >            ext = p[len(p)-1]
1058 >            result = name + '_' + txt + "." + ext
1059 >        else:
1060 >            result = name + '_' + txt
1061 >
1062          return result
1063  
1064 <    def getRequirements(self):
1064 >    def getRequirements(self, nj=[]):
1065          """
1066 <        return job requirements to add to jdl files
1066 >        return job requirements to add to jdl files
1067          """
1068          req = ''
1069 <        if common.analisys_common_info['sites']:
1070 <            if common.analisys_common_info['sw_version']:
1071 <                req='Member("VO-cms-' + \
1072 <                     common.analisys_common_info['sw_version'] + \
1073 <                     '", other.GlueHostApplicationSoftwareRunTimeEnvironment)'
1074 <            if len(common.analisys_common_info['sites'])>0:
1075 <                req = req + ' && ('
1076 <                for i in range(len(common.analisys_common_info['sites'])):
1077 <                    req = req + 'other.GlueCEInfoHostName == "' \
1078 <                         + common.analisys_common_info['sites'][i] + '"'
1079 <                    if ( i < (int(len(common.analisys_common_info['sites']) - 1)) ):
1080 <                        req = req + ' || '
1081 <            req = req + ')'
788 <        #print "req = ", req
1069 >        if self.version:
1070 >            req='Member("VO-cms-' + \
1071 >                 self.version + \
1072 >                 '", other.GlueHostApplicationSoftwareRunTimeEnvironment)'
1073 >        if self.executable_arch:
1074 >            req+=' && Member("VO-cms-' + \
1075 >                 self.executable_arch + \
1076 >                 '", other.GlueHostApplicationSoftwareRunTimeEnvironment)'
1077 >
1078 >        req = req + ' && (other.GlueHostNetworkAdapterOutboundIP)'
1079 >        if common.scheduler.name() == "glitecoll":
1080 >            req += ' && other.GlueCEStateStatus == "Production" '
1081 >
1082          return req
1083  
1084      def configFilename(self):
1085          """ return the config filename """
1086 <        return self.name()+'.cfg'
1086 >        # FUTURE: Can remove cfg mode for CMSSW >= 2_1_x
1087 >        if (self.CMSSW_major >= 2 and self.CMSSW_minor >= 1) or (self.CMSSW_major >= 3):
1088 >          return self.name()+'.py'
1089 >        else:
1090 >          return self.name()+'.cfg'
1091  
795    ### OLI_DANIELE
1092      def wsSetupCMSOSGEnvironment_(self):
1093          """
1094          Returns part of a job script which is prepares
1095          the execution environment and which is common for all CMS jobs.
1096          """
1097 <        txt = '\n'
1098 <        txt += '   echo "### SETUP CMS OSG  ENVIRONMENT ###"\n'
1099 <        txt += '   if [ -f $GRID3_APP_DIR/cmssoft/cmsset_default.sh ] ;then\n'
1100 <        txt += '      # Use $GRID3_APP_DIR/cmssoft/cmsset_default.sh to setup cms software\n'
1101 <        txt += '       source $GRID3_APP_DIR/cmssoft/cmsset_default.sh '+self.version+'\n'
1102 <        txt += '   elif [ -f $OSG_APP/cmssoft/cmsset_default.sh ] ;then\n'
1103 <        txt += '      # Use $OSG_APP/cmssoft/cmsset_default.sh to setup cms software\n'
1104 <        txt += '       source $OSG_APP/cmssoft/cmsset_default.sh '+self.version+'\n'
1105 <        txt += '   else\n'
1106 <        txt += '       echo "SET_CMS_ENV 10020 ==> ERROR $GRID3_APP_DIR/cmssoft/cmsset_default.sh and $OSG_APP/cmssoft/cmsset_default.sh file not found"\n'
1107 <        txt += '       echo "JOB_EXIT_STATUS = 10020"\n'
1108 <        txt += '       echo "JobExitCode=10020" | tee -a $RUNTIME_AREA/$repo\n'
1109 <        txt += '       dumpStatus $RUNTIME_AREA/$repo\n'
814 <        txt += '       exit\n'
815 <        txt += '\n'
816 <        txt += '       echo "Remove working directory: $WORKING_DIR"\n'
817 <        txt += '       cd $RUNTIME_AREA\n'
818 <        txt += '       /bin/rm -rf $WORKING_DIR\n'
819 <        txt += '       if [ -d $WORKING_DIR ] ;then\n'
820 <        txt += '           echo "OSG WORKING DIR ==> $WORKING_DIR could not be deleted on on WN `hostname`"\n'
821 <        txt += '       fi\n'
822 <        txt += '\n'
823 <        txt += '       exit\n'
824 <        txt += '   fi\n'
1097 >        txt = '\n#Written by cms_cmssw::wsSetupCMSOSGEnvironment_\n'
1098 >        txt += '    echo ">>> setup CMS OSG environment:"\n'
1099 >        txt += '    echo "set SCRAM ARCH to ' + self.executable_arch + '"\n'
1100 >        txt += '    export SCRAM_ARCH='+self.executable_arch+'\n'
1101 >        txt += '    echo "SCRAM_ARCH = $SCRAM_ARCH"\n'
1102 >        txt += '    if [ -f $OSG_APP/cmssoft/cms/cmsset_default.sh ] ;then\n'
1103 >        txt += '      # Use $OSG_APP/cmssoft/cms/cmsset_default.sh to setup cms software\n'
1104 >        txt += '        source $OSG_APP/cmssoft/cms/cmsset_default.sh '+self.version+'\n'
1105 >        txt += '    else\n'
1106 >        txt += '        echo "ERROR ==> $OSG_APP/cmssoft/cms/cmsset_default.sh file not found"\n'
1107 >        txt += '        job_exit_code=10020\n'
1108 >        txt += '        func_exit\n'
1109 >        txt += '    fi\n'
1110          txt += '\n'
1111 <        txt += '   echo "SET_CMS_ENV 0 ==> setup cms environment ok"\n'
1112 <        txt += '   echo " END SETUP CMS OSG  ENVIRONMENT "\n'
1111 >        txt += '    echo "==> setup cms environment ok"\n'
1112 >        txt += '    echo "SCRAM_ARCH = $SCRAM_ARCH"\n'
1113  
1114          return txt
1115 <
831 <    ### OLI_DANIELE
1115 >
1116      def wsSetupCMSLCGEnvironment_(self):
1117          """
1118          Returns part of a job script which is prepares
1119          the execution environment and which is common for all CMS jobs.
1120          """
1121 <        txt  = '   \n'
1122 <        txt += '   echo " ### SETUP CMS LCG  ENVIRONMENT ### "\n'
1123 <        txt += '      echo "JOB_EXIT_STATUS = 0"\n'
1124 <        txt += '   if [ ! $VO_CMS_SW_DIR ] ;then\n'
1125 <        txt += '       echo "SET_CMS_ENV 10031 ==> ERROR CMS software dir not found on WN `hostname`"\n'
1126 <        txt += '       echo "JOB_EXIT_STATUS = 10031" \n'
1127 <        txt += '       echo "JobExitCode=10031" | tee -a $RUNTIME_AREA/$repo\n'
1128 <        txt += '       dumpStatus $RUNTIME_AREA/$repo\n'
1129 <        txt += '       exit\n'
1130 <        txt += '   else\n'
1131 <        txt += '       echo "Sourcing environment... "\n'
1132 <        txt += '       if [ ! -s $VO_CMS_SW_DIR/cmsset_default.sh ] ;then\n'
1133 <        txt += '           echo "SET_CMS_ENV 10020 ==> ERROR cmsset_default.sh file not found into dir $VO_CMS_SW_DIR"\n'
1134 <        txt += '           echo "JOB_EXIT_STATUS = 10020"\n'
1135 <        txt += '           echo "JobExitCode=10020" | tee -a $RUNTIME_AREA/$repo\n'
1136 <        txt += '           dumpStatus $RUNTIME_AREA/$repo\n'
1137 <        txt += '           exit\n'
1138 <        txt += '       fi\n'
1139 <        txt += '       echo "sourcing $VO_CMS_SW_DIR/cmsset_default.sh"\n'
1140 <        txt += '       source $VO_CMS_SW_DIR/cmsset_default.sh\n'
1141 <        txt += '       result=$?\n'
1142 <        txt += '       if [ $result -ne 0 ]; then\n'
1143 <        txt += '           echo "SET_CMS_ENV 10032 ==> ERROR problem sourcing $VO_CMS_SW_DIR/cmsset_default.sh"\n'
1144 <        txt += '           echo "JOB_EXIT_STATUS = 10032"\n'
1145 <        txt += '           echo "JobExitCode=10032" | tee -a $RUNTIME_AREA/$repo\n'
1146 <        txt += '           dumpStatus $RUNTIME_AREA/$repo\n'
1147 <        txt += '           exit\n'
1148 <        txt += '       fi\n'
1149 <        txt += '   fi\n'
1150 <        txt += '   \n'
1151 <        txt += '   string=`cat /etc/redhat-release`\n'
1152 <        txt += '   echo $string\n'
1153 <        txt += '   if [[ $string = *alhalla* ]]; then\n'
1154 <        txt += '       echo "SCRAM_ARCH= $SCRAM_ARCH"\n'
1155 <        txt += '   elif [[ $string = *Enterprise* ]] || [[ $string = *cientific* ]]; then\n'
1156 <        txt += '       export SCRAM_ARCH=slc3_ia32_gcc323\n'
1157 <        txt += '       echo "SCRAM_ARCH= $SCRAM_ARCH"\n'
1158 <        txt += '   else\n'
1159 <        txt += '       echo "SET_CMS_ENV 1 ==> ERROR OS unknown, LCG environment not initialized"\n'
1160 <        txt += '       echo "JOB_EXIT_STATUS = 10033"\n'
1161 <        txt += '       echo "JobExitCode=10033" | tee -a $RUNTIME_AREA/$repo\n'
1162 <        txt += '       dumpStatus $RUNTIME_AREA/$repo\n'
1163 <        txt += '       exit\n'
1164 <        txt += '   fi\n'
1165 <        txt += '   echo "SET_CMS_ENV 0 ==> setup cms environment ok"\n'
1166 <        txt += '   echo "### END SETUP CMS LCG ENVIRONMENT ###"\n'
1121 >        txt = '\n#Written by cms_cmssw::wsSetupCMSLCGEnvironment_\n'
1122 >        txt += '    echo ">>> setup CMS LCG environment:"\n'
1123 >        txt += '    echo "set SCRAM ARCH and BUILD_ARCH to ' + self.executable_arch + ' ###"\n'
1124 >        txt += '    export SCRAM_ARCH='+self.executable_arch+'\n'
1125 >        txt += '    export BUILD_ARCH='+self.executable_arch+'\n'
1126 >        txt += '    if [ ! $VO_CMS_SW_DIR ] ;then\n'
1127 >        txt += '        echo "ERROR ==> CMS software dir not found on WN `hostname`"\n'
1128 >        txt += '        job_exit_code=10031\n'
1129 >        txt += '        func_exit\n'
1130 >        txt += '    else\n'
1131 >        txt += '        echo "Sourcing environment... "\n'
1132 >        txt += '        if [ ! -s $VO_CMS_SW_DIR/cmsset_default.sh ] ;then\n'
1133 >        txt += '            echo "ERROR ==> cmsset_default.sh file not found into dir $VO_CMS_SW_DIR"\n'
1134 >        txt += '            job_exit_code=10020\n'
1135 >        txt += '            func_exit\n'
1136 >        txt += '        fi\n'
1137 >        txt += '        echo "sourcing $VO_CMS_SW_DIR/cmsset_default.sh"\n'
1138 >        txt += '        source $VO_CMS_SW_DIR/cmsset_default.sh\n'
1139 >        txt += '        result=$?\n'
1140 >        txt += '        if [ $result -ne 0 ]; then\n'
1141 >        txt += '            echo "ERROR ==> problem sourcing $VO_CMS_SW_DIR/cmsset_default.sh"\n'
1142 >        txt += '            job_exit_code=10032\n'
1143 >        txt += '            func_exit\n'
1144 >        txt += '        fi\n'
1145 >        txt += '    fi\n'
1146 >        txt += '    \n'
1147 >        txt += '    echo "==> setup cms environment ok"\n'
1148 >        return txt
1149 >
1150 >    def modifyReport(self, nj):
1151 >        """
1152 >        insert the part of the script that modifies the FrameworkJob Report
1153 >        """
1154 >        txt = '\n#Written by cms_cmssw::modifyReport\n'
1155 >        publish_data = int(self.cfg_params.get('USER.publish_data',0))
1156 >        if (publish_data == 1):
1157 >            processedDataset = self.cfg_params['USER.publish_data_name']
1158 >            LFNBaseName = LFNBase(processedDataset)
1159 >
1160 >            txt += 'if [ $copy_exit_status -eq 0 ]; then\n'
1161 >            txt += '    FOR_LFN=%s_${PSETHASH}/\n'%(LFNBaseName)
1162 >            txt += 'else\n'
1163 >            txt += '    FOR_LFN=/copy_problems/ \n'
1164 >            txt += '    SE=""\n'
1165 >            txt += '    SE_PATH=""\n'
1166 >            txt += 'fi\n'
1167 >
1168 >            txt += 'echo ">>> Modify Job Report:" \n'
1169 >            txt += 'chmod a+x $SOFTWARE_DIR/ProdCommon/ProdCommon/FwkJobRep/ModifyJobReport.py\n'
1170 >            txt += 'ProcessedDataset='+processedDataset+'\n'
1171 >            txt += 'echo "ProcessedDataset = $ProcessedDataset"\n'
1172 >            txt += 'echo "SE = $SE"\n'
1173 >            txt += 'echo "SE_PATH = $SE_PATH"\n'
1174 >            txt += 'echo "FOR_LFN = $FOR_LFN" \n'
1175 >            txt += 'echo "CMSSW_VERSION = $CMSSW_VERSION"\n\n'
1176 >            txt += 'echo "$SOFTWARE_DIR/ProdCommon/ProdCommon/FwkJobRep/ModifyJobReport.py $RUNTIME_AREA/crab_fjr_$NJob.xml $NJob $FOR_LFN $PrimaryDataset $DataTier $ProcessedDataset $ApplicationFamily $executable $CMSSW_VERSION $PSETHASH $SE $SE_PATH"\n'
1177 >            txt += '$SOFTWARE_DIR/ProdCommon/ProdCommon/FwkJobRep/ModifyJobReport.py $RUNTIME_AREA/crab_fjr_$NJob.xml $NJob $FOR_LFN $PrimaryDataset $DataTier $ProcessedDataset $ApplicationFamily $executable $CMSSW_VERSION $PSETHASH $SE $SE_PATH\n'
1178 >            txt += 'modifyReport_result=$?\n'
1179 >            txt += 'if [ $modifyReport_result -ne 0 ]; then\n'
1180 >            txt += '    modifyReport_result=70500\n'
1181 >            txt += '    job_exit_code=$modifyReport_result\n'
1182 >            txt += '    echo "ModifyReportResult=$modifyReport_result" | tee -a $RUNTIME_AREA/$repo\n'
1183 >            txt += '    echo "WARNING: Problem with ModifyJobReport"\n'
1184 >            txt += 'else\n'
1185 >            txt += '    mv NewFrameworkJobReport.xml $RUNTIME_AREA/crab_fjr_$NJob.xml\n'
1186 >            txt += 'fi\n'
1187 >        return txt
1188 >
1189 >    def wsParseFJR(self):
1190 >        """
1191 >        Parse the FrameworkJobReport to obtain useful infos
1192 >        """
1193 >        txt = '\n#Written by cms_cmssw::wsParseFJR\n'
1194 >        txt += 'echo ">>> Parse FrameworkJobReport crab_fjr.xml"\n'
1195 >        txt += 'if [ -s $RUNTIME_AREA/crab_fjr_$NJob.xml ]; then\n'
1196 >        txt += '    if [ -s $RUNTIME_AREA/parseCrabFjr.py ]; then\n'
1197 >        txt += '        cmd_out=`python $RUNTIME_AREA/parseCrabFjr.py --input $RUNTIME_AREA/crab_fjr_$NJob.xml --MonitorID $MonitorID --MonitorJobID $MonitorJobID`\n'
1198 >        txt += '        echo "Result of parsing the FrameworkJobReport crab_fjr.xml: $cmd_out"\n'
1199 >        txt += '        tmp_executable_exit_status=`echo $cmd_out | awk -F\; \'{print $1}\' | awk -F \' \' \'{print $NF}\'`\n'
1200 >        txt += '        if [ -n $tmp_executable_exit_status ];then\n'
1201 >        txt += '            executable_exit_status=$tmp_executable_exit_status\n'
1202 >        txt += '        fi\n'
1203 >        txt += '        if [ $executable_exit_status -eq 50115 ];then\n'
1204 >        txt += '            echo ">>> crab_fjr.xml contents: "\n'
1205 >        txt += '            cat $RUNTIME_AREA/crab_fjr_NJob.xml\n'
1206 >        txt += '            echo "Wrong FrameworkJobReport --> does not contain useful info. ExitStatus: $executable_exit_status"\n'
1207 >        txt += '        else\n'
1208 >        txt += '            echo "Extracted ExitStatus from FrameworkJobReport parsing output: $executable_exit_status"\n'
1209 >        txt += '        fi\n'
1210 >        txt += '    else\n'
1211 >        txt += '        echo "CRAB python script to parse CRAB FrameworkJobReport crab_fjr.xml is not available, using exit code of executable from command line."\n'
1212 >        txt += '    fi\n'
1213 >          #### Patch to check input data reading for CMSSW16x Hopefully we-ll remove it asap
1214 >
1215 >        if self.datasetPath:
1216 >          # VERIFY PROCESSED DATA
1217 >            txt += '    if [ $executable_exit_status -eq 0 ];then\n'
1218 >            txt += '      echo ">>> Verify list of processed files:"\n'
1219 >            txt += '      echo $InputFiles |tr -d "\\\\" |tr "," "\\n"|tr -d "\\"" > input-files.txt\n'
1220 >            txt += '      grep LFN $RUNTIME_AREA/crab_fjr_$NJob.xml |cut -d">" -f2|cut -d"<" -f1|grep "/" > processed-files.txt\n'
1221 >            txt += '      cat input-files.txt  | sort | uniq > tmp.txt\n'
1222 >            txt += '      mv tmp.txt input-files.txt\n'
1223 >            txt += '      echo "cat input-files.txt"\n'
1224 >            txt += '      echo "----------------------"\n'
1225 >            txt += '      cat input-files.txt\n'
1226 >            txt += '      cat processed-files.txt | sort | uniq > tmp.txt\n'
1227 >            txt += '      mv tmp.txt processed-files.txt\n'
1228 >            txt += '      echo "----------------------"\n'
1229 >            txt += '      echo "cat processed-files.txt"\n'
1230 >            txt += '      echo "----------------------"\n'
1231 >            txt += '      cat processed-files.txt\n'
1232 >            txt += '      echo "----------------------"\n'
1233 >            txt += '      diff -q input-files.txt processed-files.txt\n'
1234 >            txt += '      fileverify_status=$?\n'
1235 >            txt += '      if [ $fileverify_status -ne 0 ]; then\n'
1236 >            txt += '         executable_exit_status=30001\n'
1237 >            txt += '         echo "ERROR ==> not all input files processed"\n'
1238 >            txt += '         echo "      ==> list of processed files from crab_fjr.xml differs from list in pset.cfg"\n'
1239 >            txt += '         echo "      ==> diff input-files.txt processed-files.txt"\n'
1240 >            txt += '      fi\n'
1241 >            txt += '    fi\n'
1242 >            txt += '\n'
1243 >        txt += 'else\n'
1244 >        txt += '    echo "CRAB FrameworkJobReport crab_fjr.xml is not available, using exit code of executable from command line."\n'
1245 >        txt += 'fi\n'
1246 >        txt += '\n'
1247 >        txt += 'echo "ExeExitCode=$executable_exit_status" | tee -a $RUNTIME_AREA/$repo\n'
1248 >        txt += 'echo "EXECUTABLE_EXIT_STATUS = $executable_exit_status"\n'
1249 >        txt += 'job_exit_code=$executable_exit_status\n'
1250 >
1251 >        return txt
1252 >
1253 >    def setParam_(self, param, value):
1254 >        self._params[param] = value
1255 >
1256 >    def getParams(self):
1257 >        return self._params
1258 >
1259 >    def uniquelist(self, old):
1260 >        """
1261 >        remove duplicates from a list
1262 >        """
1263 >        nd={}
1264 >        for e in old:
1265 >            nd[e]=0
1266 >        return nd.keys()
1267 >
1268 >    def outList(self):
1269 >        """
1270 >        check the dimension of the output files
1271 >        """
1272 >        txt = ''
1273 >        txt += 'echo ">>> list of expected files on output sandbox"\n'
1274 >        listOutFiles = []
1275 >        stdout = 'CMSSW_$NJob.stdout'
1276 >        stderr = 'CMSSW_$NJob.stderr'
1277 >        if (self.return_data == 1):
1278 >            for file in (self.output_file+self.output_file_sandbox):
1279 >                listOutFiles.append(self.numberFile_(file, '$NJob'))
1280 >            listOutFiles.append(stdout)
1281 >            listOutFiles.append(stderr)
1282 >        else:
1283 >            for file in (self.output_file_sandbox):
1284 >                listOutFiles.append(self.numberFile_(file, '$NJob'))
1285 >            listOutFiles.append(stdout)
1286 >            listOutFiles.append(stderr)
1287 >        txt += 'echo "output files: '+string.join(listOutFiles,' ')+'"\n'
1288 >        txt += 'filesToCheck="'+string.join(listOutFiles,' ')+'"\n'
1289 >        txt += 'export filesToCheck\n'
1290          return txt

Diff Legend

Removed lines
+ Added lines
< Changed lines
> Changed lines