5 |
|
from BlackWhiteListParser import BlackWhiteListParser |
6 |
|
import common |
7 |
|
import Scram |
8 |
+ |
from LFNBaseName import * |
9 |
|
|
10 |
|
import os, string, glob |
11 |
|
|
14 |
|
JobType.__init__(self, 'CMSSW') |
15 |
|
common.logger.debug(3,'CMSSW::__init__') |
16 |
|
|
17 |
+ |
self.argsList = [] |
18 |
+ |
|
19 |
|
self._params = {} |
20 |
|
self.cfg_params = cfg_params |
18 |
– |
|
21 |
|
# init BlackWhiteListParser |
22 |
|
self.blackWhiteListParser = BlackWhiteListParser(cfg_params) |
23 |
|
|
24 |
< |
try: |
23 |
< |
self.MaxTarBallSize = float(self.cfg_params['EDG.maxtarballsize']) |
24 |
< |
except KeyError: |
25 |
< |
self.MaxTarBallSize = 9.5 |
24 |
> |
self.MaxTarBallSize = float(self.cfg_params.get('EDG.maxtarballsize',9.5)) |
25 |
|
|
26 |
|
# number of jobs requested to be created, limit obj splitting |
27 |
|
self.ncjobs = ncjobs |
48 |
|
# Try to block creation in case of arch/version mismatch |
49 |
|
# |
50 |
|
|
51 |
< |
a = string.split(self.version, "_") |
51 |
> |
# a = string.split(self.version, "_") |
52 |
> |
# |
53 |
> |
# if int(a[1]) == 1 and (int(a[2]) < 5 and self.executable_arch.find('slc4') == 0): |
54 |
> |
# msg = "Warning: You are using %s version of CMSSW with %s architecture. \n--> Did you compile your libraries with SLC3? Otherwise you can find some problems running on SLC4 Grid nodes.\n"%(self.version, self.executable_arch) |
55 |
> |
# common.logger.message(msg) |
56 |
> |
# if int(a[1]) == 1 and (int(a[2]) >= 5 and self.executable_arch.find('slc3') == 0): |
57 |
> |
# msg = "Error: CMS does not support %s with %s architecture"%(self.version, self.executable_arch) |
58 |
> |
# raise CrabException(msg) |
59 |
> |
# |
60 |
|
|
54 |
– |
if int(a[1]) == 1 and (int(a[2]) < 5 and self.executable_arch.find('slc4') == 0): |
55 |
– |
msg = "Warning: You are using %s version of CMSSW with %s architecture. \n--> Did you compile your libraries with SLC3? Otherwise you can find some problems running on SLC4 Grid nodes.\n"%(self.version, self.executable_arch) |
56 |
– |
common.logger.message(msg) |
57 |
– |
if int(a[1]) == 1 and (int(a[2]) >= 5 and self.executable_arch.find('slc3') == 0): |
58 |
– |
msg = "Error: CMS does not support %s with %s architecture"%(self.version, self.executable_arch) |
59 |
– |
raise CrabException(msg) |
60 |
– |
|
61 |
– |
common.taskDB.setDict('codeVersion',self.version) |
62 |
– |
self.setParam_('application', self.version) |
61 |
|
|
62 |
|
### collect Data cards |
63 |
|
|
64 |
< |
try: |
67 |
< |
tmp = cfg_params['CMSSW.datasetpath'] |
68 |
< |
log.debug(6, "CMSSW::CMSSW(): datasetPath = "+tmp) |
69 |
< |
if string.lower(tmp)=='none': |
70 |
< |
self.datasetPath = None |
71 |
< |
self.selectNoInput = 1 |
72 |
< |
else: |
73 |
< |
self.datasetPath = tmp |
74 |
< |
self.selectNoInput = 0 |
75 |
< |
except KeyError: |
64 |
> |
if not cfg_params.has_key('CMSSW.datasetpath'): |
65 |
|
msg = "Error: datasetpath not defined " |
66 |
|
raise CrabException(msg) |
67 |
< |
|
68 |
< |
# ML monitoring |
69 |
< |
# split dataset path style: /PreProdR3Minbias/SIM/GEN-SIM |
70 |
< |
if not self.datasetPath: |
71 |
< |
self.setParam_('dataset', 'None') |
83 |
< |
self.setParam_('owner', 'None') |
67 |
> |
tmp = cfg_params['CMSSW.datasetpath'] |
68 |
> |
log.debug(6, "CMSSW::CMSSW(): datasetPath = "+tmp) |
69 |
> |
if string.lower(tmp)=='none': |
70 |
> |
self.datasetPath = None |
71 |
> |
self.selectNoInput = 1 |
72 |
|
else: |
73 |
< |
try: |
74 |
< |
datasetpath_split = self.datasetPath.split("/") |
87 |
< |
# standard style |
88 |
< |
self.setParam_('datasetFull', self.datasetPath) |
89 |
< |
self.setParam_('dataset', datasetpath_split[1]) |
90 |
< |
self.setParam_('owner', datasetpath_split[2]) |
91 |
< |
except: |
92 |
< |
self.setParam_('dataset', self.datasetPath) |
93 |
< |
self.setParam_('owner', self.datasetPath) |
94 |
< |
|
95 |
< |
self.setTaskid_() |
96 |
< |
self.setParam_('taskId', self.cfg_params['taskId']) |
73 |
> |
self.datasetPath = tmp |
74 |
> |
self.selectNoInput = 0 |
75 |
|
|
76 |
|
self.dataTiers = [] |
77 |
|
|
78 |
|
## now the application |
79 |
< |
try: |
80 |
< |
self.executable = cfg_params['CMSSW.executable'] |
103 |
< |
self.setParam_('exe', self.executable) |
104 |
< |
log.debug(6, "CMSSW::CMSSW(): executable = "+self.executable) |
105 |
< |
msg = "Default executable cmsRun overridden. Switch to " + self.executable |
106 |
< |
log.debug(3,msg) |
107 |
< |
except KeyError: |
108 |
< |
self.executable = 'cmsRun' |
109 |
< |
self.setParam_('exe', self.executable) |
110 |
< |
msg = "User executable not defined. Use cmsRun" |
111 |
< |
log.debug(3,msg) |
112 |
< |
pass |
79 |
> |
self.executable = cfg_params.get('CMSSW.executable','cmsRun') |
80 |
> |
log.debug(6, "CMSSW::CMSSW(): executable = "+self.executable) |
81 |
|
|
82 |
< |
try: |
115 |
< |
self.pset = cfg_params['CMSSW.pset'] |
116 |
< |
log.debug(6, "Cmssw::Cmssw(): PSet file = "+self.pset) |
117 |
< |
if self.pset.lower() != 'none' : |
118 |
< |
if (not os.path.exists(self.pset)): |
119 |
< |
raise CrabException("User defined PSet file "+self.pset+" does not exist") |
120 |
< |
else: |
121 |
< |
self.pset = None |
122 |
< |
except KeyError: |
82 |
> |
if not cfg_params.has_key('CMSSW.pset'): |
83 |
|
raise CrabException("PSet file missing. Cannot run cmsRun ") |
84 |
+ |
self.pset = cfg_params['CMSSW.pset'] |
85 |
+ |
log.debug(6, "Cmssw::Cmssw(): PSet file = "+self.pset) |
86 |
+ |
if self.pset.lower() != 'none' : |
87 |
+ |
if (not os.path.exists(self.pset)): |
88 |
+ |
raise CrabException("User defined PSet file "+self.pset+" does not exist") |
89 |
+ |
else: |
90 |
+ |
self.pset = None |
91 |
|
|
92 |
|
# output files |
93 |
|
## stuff which must be returned always via sandbox |
97 |
|
self.output_file_sandbox.append(self.fjrFileName) |
98 |
|
|
99 |
|
# other output files to be returned via sandbox or copied to SE |
100 |
< |
try: |
101 |
< |
self.output_file = [] |
102 |
< |
tmp = cfg_params['CMSSW.output_file'] |
103 |
< |
if tmp != '': |
104 |
< |
tmpOutFiles = string.split(cfg_params['CMSSW.output_file'],',') |
105 |
< |
log.debug(7, 'cmssw::cmssw(): output files '+str(tmpOutFiles)) |
106 |
< |
for tmp in tmpOutFiles: |
107 |
< |
tmp=string.strip(tmp) |
141 |
< |
self.output_file.append(tmp) |
142 |
< |
pass |
143 |
< |
else: |
144 |
< |
log.message("No output file defined: only stdout/err and the CRAB Framework Job Report will be available\n") |
100 |
> |
self.output_file = [] |
101 |
> |
tmp = cfg_params.get('CMSSW.output_file',None) |
102 |
> |
if tmp : |
103 |
> |
tmpOutFiles = string.split(tmp,',') |
104 |
> |
log.debug(7, 'cmssw::cmssw(): output files '+str(tmpOutFiles)) |
105 |
> |
for tmp in tmpOutFiles: |
106 |
> |
tmp=string.strip(tmp) |
107 |
> |
self.output_file.append(tmp) |
108 |
|
pass |
109 |
< |
pass |
147 |
< |
except KeyError: |
109 |
> |
else: |
110 |
|
log.message("No output file defined: only stdout/err and the CRAB Framework Job Report will be available\n") |
111 |
< |
pass |
111 |
> |
pass |
112 |
|
|
113 |
|
# script_exe file as additional file in inputSandbox |
114 |
< |
try: |
115 |
< |
self.scriptExe = cfg_params['USER.script_exe'] |
116 |
< |
if self.scriptExe != '': |
117 |
< |
if not os.path.isfile(self.scriptExe): |
118 |
< |
msg ="ERROR. file "+self.scriptExe+" not found" |
119 |
< |
raise CrabException(msg) |
158 |
< |
self.additional_inbox_files.append(string.strip(self.scriptExe)) |
159 |
< |
except KeyError: |
160 |
< |
self.scriptExe = '' |
114 |
> |
self.scriptExe = cfg_params.get('USER.script_exe',None) |
115 |
> |
if self.scriptExe : |
116 |
> |
if not os.path.isfile(self.scriptExe): |
117 |
> |
msg ="ERROR. file "+self.scriptExe+" not found" |
118 |
> |
raise CrabException(msg) |
119 |
> |
self.additional_inbox_files.append(string.strip(self.scriptExe)) |
120 |
|
|
121 |
|
#CarlosDaniele |
122 |
|
if self.datasetPath == None and self.pset == None and self.scriptExe == '' : |
123 |
< |
msg ="Error. script_exe not defined" |
124 |
< |
raise CrabException(msg) |
123 |
> |
msg ="Error. script_exe not defined" |
124 |
> |
raise CrabException(msg) |
125 |
|
|
126 |
|
## additional input files |
127 |
< |
try: |
127 |
> |
if cfg_params.has_key('USER.additional_input_files'): |
128 |
|
tmpAddFiles = string.split(cfg_params['USER.additional_input_files'],',') |
129 |
|
for tmp in tmpAddFiles: |
130 |
|
tmp = string.strip(tmp) |
148 |
|
pass |
149 |
|
pass |
150 |
|
common.logger.debug(5,"Additional input files: "+str(self.additional_inbox_files)) |
151 |
< |
except KeyError: |
193 |
< |
pass |
194 |
< |
|
195 |
< |
# files per job |
196 |
< |
try: |
197 |
< |
if (cfg_params['CMSSW.files_per_jobs']): |
198 |
< |
raise CrabException("files_per_jobs no longer supported. Quitting.") |
199 |
< |
except KeyError: |
200 |
< |
pass |
151 |
> |
pass |
152 |
|
|
153 |
|
## Events per job |
154 |
< |
try: |
154 |
> |
if cfg_params.has_key('CMSSW.events_per_job'): |
155 |
|
self.eventsPerJob =int( cfg_params['CMSSW.events_per_job']) |
156 |
|
self.selectEventsPerJob = 1 |
157 |
< |
except KeyError: |
157 |
> |
else: |
158 |
|
self.eventsPerJob = -1 |
159 |
|
self.selectEventsPerJob = 0 |
160 |
|
|
161 |
|
## number of jobs |
162 |
< |
try: |
162 |
> |
if cfg_params.has_key('CMSSW.number_of_jobs'): |
163 |
|
self.theNumberOfJobs =int( cfg_params['CMSSW.number_of_jobs']) |
164 |
|
self.selectNumberOfJobs = 1 |
165 |
< |
except KeyError: |
165 |
> |
else: |
166 |
|
self.theNumberOfJobs = 0 |
167 |
|
self.selectNumberOfJobs = 0 |
168 |
|
|
169 |
< |
try: |
169 |
> |
if cfg_params.has_key('CMSSW.total_number_of_events'): |
170 |
|
self.total_number_of_events = int(cfg_params['CMSSW.total_number_of_events']) |
171 |
|
self.selectTotalNumberEvents = 1 |
172 |
< |
except KeyError: |
172 |
> |
else: |
173 |
|
self.total_number_of_events = 0 |
174 |
|
self.selectTotalNumberEvents = 0 |
175 |
|
|
182 |
|
msg = 'Must specify number_of_jobs.' |
183 |
|
raise CrabException(msg) |
184 |
|
|
185 |
< |
## source seed for pythia |
186 |
< |
try: |
187 |
< |
self.sourceSeed = int(cfg_params['CMSSW.pythia_seed']) |
188 |
< |
except KeyError: |
189 |
< |
self.sourceSeed = None |
190 |
< |
common.logger.debug(5,"No seed given") |
185 |
> |
## New method of dealing with seeds |
186 |
> |
self.incrementSeeds = [] |
187 |
> |
self.preserveSeeds = [] |
188 |
> |
if cfg_params.has_key('CMSSW.preserve_seeds'): |
189 |
> |
tmpList = cfg_params['CMSSW.preserve_seeds'].split(',') |
190 |
> |
for tmp in tmpList: |
191 |
> |
tmp.strip() |
192 |
> |
self.preserveSeeds.append(tmp) |
193 |
> |
if cfg_params.has_key('CMSSW.increment_seeds'): |
194 |
> |
tmpList = cfg_params['CMSSW.increment_seeds'].split(',') |
195 |
> |
for tmp in tmpList: |
196 |
> |
tmp.strip() |
197 |
> |
self.incrementSeeds.append(tmp) |
198 |
> |
|
199 |
> |
## Old method of dealing with seeds |
200 |
> |
## FUTURE: This is for old CMSSW and old CRAB. Can throw exceptions after a couple of CRAB releases and then |
201 |
> |
## remove |
202 |
> |
self.sourceSeed = cfg_params.get('CMSSW.pythia_seed',None) |
203 |
> |
if self.sourceSeed: |
204 |
> |
print "pythia_seed is a deprecated parameter. Use preserve_seeds or increment_seeds in the future.\n","Added to increment_seeds." |
205 |
> |
self.incrementSeeds.append('sourceSeed') |
206 |
> |
|
207 |
> |
self.sourceSeedVtx = cfg_params.get('CMSSW.vtx_seed',None) |
208 |
> |
if self.sourceSeedVtx: |
209 |
> |
print "vtx_seed is a deprecated parameter. Use preserve_seeds or increment_seeds in the future.\n","Added to increment_seeds." |
210 |
> |
self.incrementSeeds.append('VtxSmeared') |
211 |
> |
|
212 |
> |
self.sourceSeedG4 = cfg_params.get('CMSSW.g4_seed',None) |
213 |
> |
if self.sourceSeedG4: |
214 |
> |
print "g4_seed is a deprecated parameter. Use preserve_seeds or increment_seeds in the future.\n","Added to increment_seeds." |
215 |
> |
self.incrementSeeds.append('g4SimHits') |
216 |
> |
|
217 |
> |
self.sourceSeedMix = cfg_params.get('CMSSW.mix_seed',None) |
218 |
> |
if self.sourceSeedMix: |
219 |
> |
print "mix_seed is a deprecated parameter. Use preserve_seeds or increment_seeds in the future.\n","Added to increment_seeds." |
220 |
> |
self.incrementSeeds.append('mix') |
221 |
|
|
222 |
< |
try: |
242 |
< |
self.sourceSeedVtx = int(cfg_params['CMSSW.vtx_seed']) |
243 |
< |
except KeyError: |
244 |
< |
self.sourceSeedVtx = None |
245 |
< |
common.logger.debug(5,"No vertex seed given") |
246 |
< |
|
247 |
< |
try: |
248 |
< |
self.sourceSeedG4 = int(cfg_params['CMSSW.g4_seed']) |
249 |
< |
except KeyError: |
250 |
< |
self.sourceSeedG4 = None |
251 |
< |
common.logger.debug(5,"No g4 sim hits seed given") |
252 |
< |
|
253 |
< |
try: |
254 |
< |
self.sourceSeedMix = int(cfg_params['CMSSW.mix_seed']) |
255 |
< |
except KeyError: |
256 |
< |
self.sourceSeedMix = None |
257 |
< |
common.logger.debug(5,"No mix seed given") |
222 |
> |
self.firstRun = cfg_params.get('CMSSW.first_run',None) |
223 |
|
|
259 |
– |
try: |
260 |
– |
self.firstRun = int(cfg_params['CMSSW.first_run']) |
261 |
– |
except KeyError: |
262 |
– |
self.firstRun = None |
263 |
– |
common.logger.debug(5,"No first run given") |
224 |
|
if self.pset != None: #CarlosDaniele |
225 |
|
import PsetManipulator as pp |
226 |
|
PsetEdit = pp.PsetManipulator(self.pset) #Daniele Pset |
227 |
|
|
228 |
+ |
# Copy/return |
229 |
+ |
|
230 |
+ |
self.copy_data = int(cfg_params.get('USER.copy_data',0)) |
231 |
+ |
self.return_data = int(cfg_params.get('USER.return_data',0)) |
232 |
+ |
|
233 |
|
#DBSDLS-start |
234 |
|
## Initialize the variables that are extracted from DBS/DLS and needed in other places of the code |
235 |
|
self.maxEvents=0 # max events available ( --> check the requested nb. of evts in Creator.py) |
256 |
|
# modify Pset |
257 |
|
if self.pset != None: #CarlosDaniele |
258 |
|
try: |
259 |
< |
if (self.datasetPath): # standard job |
260 |
< |
# allow to processa a fraction of events in a file |
296 |
< |
PsetEdit.inputModule("INPUTFILE") |
297 |
< |
PsetEdit.maxEvent(0) |
298 |
< |
PsetEdit.skipEvent(0) |
299 |
< |
else: # pythia like job |
300 |
< |
PsetEdit.maxEvent(self.eventsPerJob) |
301 |
< |
if (self.firstRun): |
302 |
< |
PsetEdit.pythiaFirstRun(0) #First Run |
303 |
< |
if (self.sourceSeed) : |
304 |
< |
PsetEdit.pythiaSeed(0) |
305 |
< |
if (self.sourceSeedVtx) : |
306 |
< |
PsetEdit.vtxSeed(0) |
307 |
< |
if (self.sourceSeedG4) : |
308 |
< |
PsetEdit.g4Seed(0) |
309 |
< |
if (self.sourceSeedMix) : |
310 |
< |
PsetEdit.mixSeed(0) |
311 |
< |
# add FrameworkJobReport to parameter-set |
259 |
> |
# Add FrameworkJobReport to parameter-set, set max events. |
260 |
> |
# Reset later for data jobs by writeCFG which does all modifications |
261 |
|
PsetEdit.addCrabFJR(self.fjrFileName) |
262 |
+ |
PsetEdit.maxEvent(self.eventsPerJob) |
263 |
|
PsetEdit.psetWriter(self.configFilename()) |
264 |
|
except: |
265 |
|
msg='Error while manipuliating ParameterSet: exiting...' |
276 |
|
## Contact the DBS |
277 |
|
common.logger.message("Contacting Data Discovery Services ...") |
278 |
|
try: |
329 |
– |
|
279 |
|
self.pubdata=DataDiscovery.DataDiscovery(datasetPath, cfg_params) |
280 |
|
self.pubdata.fetchDBSInfo() |
281 |
|
|
318 |
|
|
319 |
|
return sites |
320 |
|
|
321 |
+ |
# to Be Removed DS -- BL |
322 |
+ |
# def setArgsList(self, argsList): |
323 |
+ |
# self.argsList = argsList |
324 |
+ |
|
325 |
|
def jobSplittingByBlocks(self, blockSites): |
326 |
|
""" |
327 |
|
Perform job splitting. Jobs run over an integer number of files |
372 |
|
else : |
373 |
|
totalNumberOfJobs = self.ncjobs |
374 |
|
|
422 |
– |
|
375 |
|
blocks = blockSites.keys() |
376 |
|
blockCount = 0 |
377 |
|
# Backup variable in case self.maxEvents counted events in a non-included block |
426 |
|
except KeyError: |
427 |
|
common.logger.message("File "+str(file)+" has unknown number of events: skipping") |
428 |
|
|
429 |
< |
|
429 |
> |
eventsPerJobRequested = min(eventsPerJobRequested, eventsRemaining) |
430 |
|
# if less events in file remain than eventsPerJobRequested |
431 |
< |
if ( filesEventCount - jobSkipEventCount < eventsPerJobRequested ) : |
431 |
> |
if ( filesEventCount - jobSkipEventCount < eventsPerJobRequested): |
432 |
|
# if last file in block |
433 |
|
if ( fileCount == numFilesInBlock-1 ) : |
434 |
|
# end job using last file, use remaining events in block |
492 |
|
jobSkipEventCount = eventsPerJobRequested - (filesEventCount - jobSkipEventCount - self.eventsbyfile[file]) |
493 |
|
# remove all but the last file |
494 |
|
filesEventCount = self.eventsbyfile[file] |
495 |
< |
parString = "" |
544 |
< |
parString += '\\\"' + file + '\\\"\,' |
495 |
> |
parString = '\\\"' + file + '\\\"\,' |
496 |
|
pass # END if |
497 |
|
pass # END while (iterate over files in the block) |
498 |
|
pass # END while (iterate over blocks in the dataset) |
512 |
|
for block in blocks: |
513 |
|
if block in jobsOfBlock.keys() : |
514 |
|
blockCounter += 1 |
515 |
< |
screenOutput += "Block %5i: jobs %20s: sites: %s\n" % (blockCounter,spanRanges(jobsOfBlock[block]),','.join(self.blackWhiteListParser.checkWhiteList(self.blackWhiteListParser.checkBlackList(blockSites[block],block),block))) |
515 |
> |
screenOutput += "Block %5i: jobs %20s: sites: %s\n" % (blockCounter,spanRanges(jobsOfBlock[block]), |
516 |
> |
','.join(self.blackWhiteListParser.checkWhiteList(self.blackWhiteListParser.checkBlackList(blockSites[block],block),block))) |
517 |
|
if len(self.blackWhiteListParser.checkWhiteList(self.blackWhiteListParser.checkBlackList(blockSites[block],block),block)) == 0: |
518 |
|
noSiteBlock.append( spanRanges(jobsOfBlock[block]) ) |
519 |
|
bloskNoSite.append( blockCounter ) |
533 |
|
for range_jobs in noSiteBlock: |
534 |
|
msg += str(range_jobs) + virgola |
535 |
|
msg += '\n will not be submitted and this block of data can not be analyzed!\n' |
536 |
+ |
if self.cfg_params.has_key('EDG.se_white_list'): |
537 |
+ |
msg += 'WARNING: SE White List: '+self.cfg_params['EDG.se_white_list']+'\n' |
538 |
+ |
msg += '(Hint: By whitelisting you force the job to run at this particular site(s).\n' |
539 |
+ |
msg += 'Please check if the dataset is available at this site!)\n' |
540 |
+ |
if self.cfg_params.has_key('EDG.ce_white_list'): |
541 |
+ |
msg += 'WARNING: CE White List: '+self.cfg_params['EDG.ce_white_list']+'\n' |
542 |
+ |
msg += '(Hint: By whitelisting you force the job to run at this particular site(s).\n' |
543 |
+ |
msg += 'Please check if the dataset is available at this site!)\n' |
544 |
+ |
|
545 |
|
common.logger.message(msg) |
546 |
|
|
547 |
|
self.list_of_args = list_of_lists |
595 |
|
if (self.firstRun): |
596 |
|
## pythia first run |
597 |
|
args.append(str(self.firstRun)+str(i)) |
637 |
– |
if (self.sourceSeed): |
638 |
– |
args.append(str(self.sourceSeed)+str(i)) |
639 |
– |
if (self.sourceSeedVtx): |
640 |
– |
## + vtx random seed |
641 |
– |
args.append(str(self.sourceSeedVtx)+str(i)) |
642 |
– |
if (self.sourceSeedG4): |
643 |
– |
## + G4 random seed |
644 |
– |
args.append(str(self.sourceSeedG4)+str(i)) |
645 |
– |
if (self.sourceSeedMix): |
646 |
– |
## + Mix random seed |
647 |
– |
args.append(str(self.sourceSeedMix)+str(i)) |
648 |
– |
pass |
649 |
– |
pass |
598 |
|
self.list_of_args.append(args) |
651 |
– |
pass |
652 |
– |
|
653 |
– |
# print self.list_of_args |
599 |
|
|
600 |
|
return |
601 |
|
|
625 |
|
|
626 |
|
def split(self, jobParams): |
627 |
|
|
683 |
– |
common.jobDB.load() |
628 |
|
#### Fabio |
629 |
|
njobs = self.total_number_of_jobs |
630 |
|
arglist = self.list_of_args |
632 |
|
for i in range(njobs): |
633 |
|
jobParams.append("") |
634 |
|
|
635 |
+ |
listID=[] |
636 |
+ |
listField=[] |
637 |
|
for job in range(njobs): |
638 |
|
jobParams[job] = arglist[job] |
639 |
< |
# print str(arglist[job]) |
640 |
< |
# print jobParams[job] |
641 |
< |
common.jobDB.setArguments(job, jobParams[job]) |
642 |
< |
common.logger.debug(5,"Job "+str(job)+" Destination: "+str(self.jobDestination[job])) |
643 |
< |
common.jobDB.setDestination(job, self.jobDestination[job]) |
639 |
> |
listID.append(job+1) |
640 |
> |
job_ToSave ={} |
641 |
> |
concString = ' ' |
642 |
> |
argu='' |
643 |
> |
if len(jobParams[job]): |
644 |
> |
argu += concString.join(jobParams[job] ) |
645 |
> |
job_ToSave['arguments']= str(job+1)+' '+argu## new BL--DS |
646 |
> |
job_ToSave['dlsDestination']= self.jobDestination[job]## new BL--DS |
647 |
> |
#common._db.updateJob_(job,job_ToSave)## new BL--DS |
648 |
> |
listField.append(job_ToSave) |
649 |
> |
msg="Job "+str(job)+" Arguments: "+str(job+1)+" "+argu+"\n" \ |
650 |
> |
+" Destination: "+str(self.jobDestination[job]) |
651 |
> |
common.logger.debug(5,msg) |
652 |
> |
#common.logger.debug(5,"Job "+str(job)+" Destination: "+str(self.jobDestination[job])) |
653 |
> |
common._db.updateJob_(listID,listField)## new BL--DS |
654 |
> |
## Pay Attention Here....DS--BL |
655 |
> |
self.argsList = (len(jobParams[1])+1) |
656 |
|
|
699 |
– |
common.jobDB.save() |
657 |
|
return |
658 |
|
|
702 |
– |
def getJobTypeArguments(self, nj, sched): |
703 |
– |
result = '' |
704 |
– |
for i in common.jobDB.arguments(nj): |
705 |
– |
result=result+str(i)+" " |
706 |
– |
return result |
707 |
– |
|
659 |
|
def numberOfJobs(self): |
660 |
|
# Fabio |
661 |
|
return self.total_number_of_jobs |
690 |
|
|
691 |
|
## check if working area is release top |
692 |
|
if swReleaseTop == '' or swArea == swReleaseTop: |
693 |
+ |
common.logger.debug(3,"swArea = "+swArea+" swReleaseTop ="+swReleaseTop) |
694 |
|
return |
695 |
|
|
696 |
|
import tarfile |
738 |
|
common.logger.debug(5,"data "+root+"/data"+" to be tarred") |
739 |
|
tar.add(root+"/data",root[swAreaLen:]+"/data") |
740 |
|
|
789 |
– |
## Add ProdAgent dir to tar |
790 |
– |
paDir = 'ProdAgentApi' |
791 |
– |
pa = os.environ['CRABDIR'] + '/' + 'ProdAgentApi' |
792 |
– |
if os.path.isdir(pa): |
793 |
– |
tar.add(pa,paDir) |
741 |
|
|
742 |
< |
### FEDE FOR DBS PUBLICATION |
796 |
< |
## Add PRODCOMMON dir to tar |
742 |
> |
## Add ProdCommon dir to tar |
743 |
|
prodcommonDir = 'ProdCommon' |
744 |
|
prodcommonPath = os.environ['CRABDIR'] + '/' + 'ProdCommon' |
745 |
|
if os.path.isdir(prodcommonPath): |
746 |
|
tar.add(prodcommonPath,prodcommonDir) |
801 |
– |
############################# |
747 |
|
|
748 |
|
common.logger.debug(5,"Files added to "+self.tgzNameWithPath+" : "+str(tar.getnames())) |
749 |
|
tar.close() |
760 |
|
try: |
761 |
|
tar = tarfile.open(self.MLtgzfile, "w:gz") |
762 |
|
path=os.environ['CRABDIR'] + '/python/' |
763 |
< |
for file in ['report.py', 'DashboardAPI.py', 'Logger.py', 'ProcInfo.py', 'apmon.py', 'parseCrabFjr.py']: |
763 |
> |
for file in ['report.py', 'DashboardAPI.py', 'Logger.py', 'ProcInfo.py', 'apmon.py', 'parseCrabFjr.py','writeCfg.py', 'JobReportErrorCode.py']: |
764 |
|
tar.add(path+file,file) |
765 |
|
common.logger.debug(5,"Files added to "+self.MLtgzfile+" : "+str(tar.getnames())) |
766 |
|
tar.close() |
782 |
|
tar.close() |
783 |
|
return tarName |
784 |
|
|
785 |
< |
def wsSetupEnvironment(self, nj): |
785 |
> |
def wsSetupEnvironment(self, nj=0): |
786 |
|
""" |
787 |
|
Returns part of a job script which prepares |
788 |
|
the execution environment for the job 'nj'. |
789 |
|
""" |
790 |
|
# Prepare JobType-independent part |
791 |
< |
txt = '' |
791 |
> |
txt = '\n#Written by cms_cmssw::wsSetupEnvironment\n' |
792 |
|
txt += 'echo ">>> setup environment"\n' |
793 |
|
txt += 'if [ $middleware == LCG ]; then \n' |
794 |
|
txt += self.wsSetupCMSLCGEnvironment_() |
795 |
|
txt += 'elif [ $middleware == OSG ]; then\n' |
796 |
|
txt += ' WORKING_DIR=`/bin/mktemp -d $OSG_WN_TMP/cms_XXXXXXXXXXXX`\n' |
797 |
|
txt += ' if [ ! $? == 0 ] ;then\n' |
798 |
< |
txt += ' echo "SET_CMS_ENV 10016 ==> OSG $WORKING_DIR could not be created on WN `hostname`"\n' |
799 |
< |
txt += ' echo "JOB_EXIT_STATUS = 10016"\n' |
800 |
< |
txt += ' echo "JobExitCode=10016" | tee -a $RUNTIME_AREA/$repo\n' |
856 |
< |
txt += ' dumpStatus $RUNTIME_AREA/$repo\n' |
857 |
< |
txt += ' exit 1\n' |
798 |
> |
txt += ' echo "ERROR ==> OSG $WORKING_DIR could not be created on WN `hostname`"\n' |
799 |
> |
txt += ' job_exit_code=10016\n' |
800 |
> |
txt += ' func_exit\n' |
801 |
|
txt += ' fi\n' |
802 |
|
txt += ' echo ">>> Created working directory: $WORKING_DIR"\n' |
803 |
|
txt += '\n' |
817 |
|
txt += scram+' project CMSSW '+self.version+'\n' |
818 |
|
txt += 'status=$?\n' |
819 |
|
txt += 'if [ $status != 0 ] ; then\n' |
820 |
< |
txt += ' echo "SET_EXE_ENV 10034 ==>ERROR CMSSW '+self.version+' not found on `hostname`" \n' |
821 |
< |
txt += ' echo "JOB_EXIT_STATUS = 10034"\n' |
822 |
< |
txt += ' echo "JobExitCode=10034" | tee -a $RUNTIME_AREA/$repo\n' |
880 |
< |
txt += ' dumpStatus $RUNTIME_AREA/$repo\n' |
881 |
< |
txt += ' if [ $middleware == OSG ]; then \n' |
882 |
< |
txt += ' cd $RUNTIME_AREA\n' |
883 |
< |
txt += ' echo ">>> current directory (RUNTIME_AREA): $RUNTIME_AREA"\n' |
884 |
< |
txt += ' echo ">>> Remove working directory: $WORKING_DIR"\n' |
885 |
< |
txt += ' /bin/rm -rf $WORKING_DIR\n' |
886 |
< |
txt += ' if [ -d $WORKING_DIR ] ;then\n' |
887 |
< |
txt += ' echo "SET_CMS_ENV 10018 ==> OSG $WORKING_DIR could not be deleted on WN `hostname` after CMSSW CMSSW_0_6_1 not found on `hostname`"\n' |
888 |
< |
txt += ' echo "JOB_EXIT_STATUS = 10018"\n' |
889 |
< |
txt += ' echo "JobExitCode=10018" | tee -a $RUNTIME_AREA/$repo\n' |
890 |
< |
txt += ' dumpStatus $RUNTIME_AREA/$repo\n' |
891 |
< |
txt += ' fi\n' |
892 |
< |
txt += ' fi \n' |
893 |
< |
txt += ' exit 1 \n' |
820 |
> |
txt += ' echo "ERROR ==> CMSSW '+self.version+' not found on `hostname`" \n' |
821 |
> |
txt += ' job_exit_code=10034\n' |
822 |
> |
txt += ' func_exit\n' |
823 |
|
txt += 'fi \n' |
824 |
|
txt += 'cd '+self.version+'\n' |
825 |
|
########## FEDE FOR DBS2 ###################### |
827 |
|
txt += 'echo ">>> current directory (SOFTWARE_DIR): $SOFTWARE_DIR" \n' |
828 |
|
############################################### |
829 |
|
### needed grep for bug in scramv1 ### |
901 |
– |
txt += scram+' runtime -sh\n' |
830 |
|
txt += 'eval `'+scram+' runtime -sh | grep -v SCRAMRT_LSB_JOBNAME`\n' |
903 |
– |
txt += 'echo $PATH\n' |
904 |
– |
|
831 |
|
# Handle the arguments: |
832 |
|
txt += "\n" |
833 |
|
txt += "## number of arguments (first argument always jobnumber)\n" |
834 |
|
txt += "\n" |
835 |
< |
txt += "if [ $nargs -lt 2 ]\n" |
835 |
> |
txt += "if [ $nargs -lt "+str(self.argsList)+" ]\n" |
836 |
|
txt += "then\n" |
837 |
< |
txt += " echo 'SET_EXE_ENV 1 ==> ERROR Too few arguments' +$nargs+ \n" |
838 |
< |
txt += ' echo "JOB_EXIT_STATUS = 50113"\n' |
839 |
< |
txt += ' echo "JobExitCode=50113" | tee -a $RUNTIME_AREA/$repo\n' |
914 |
< |
txt += ' dumpStatus $RUNTIME_AREA/$repo\n' |
915 |
< |
txt += ' if [ $middleware == OSG ]; then \n' |
916 |
< |
txt += ' cd $RUNTIME_AREA\n' |
917 |
< |
txt += ' echo ">>> current directory (RUNTIME_AREA): $RUNTIME_AREA"\n' |
918 |
< |
txt += ' echo ">>> Remove working directory: $WORKING_DIR"\n' |
919 |
< |
txt += ' /bin/rm -rf $WORKING_DIR\n' |
920 |
< |
txt += ' if [ -d $WORKING_DIR ] ;then\n' |
921 |
< |
txt += ' echo "SET_EXE_ENV 50114 ==> OSG $WORKING_DIR could not be deleted on WN `hostname` after Too few arguments for CRAB job wrapper"\n' |
922 |
< |
txt += ' echo "JOB_EXIT_STATUS = 50114"\n' |
923 |
< |
txt += ' echo "JobExitCode=50114" | tee -a $RUNTIME_AREA/$repo\n' |
924 |
< |
txt += ' dumpStatus $RUNTIME_AREA/$repo\n' |
925 |
< |
txt += ' fi\n' |
926 |
< |
txt += ' fi \n' |
927 |
< |
txt += " exit 1\n" |
837 |
> |
txt += " echo 'ERROR ==> Too few arguments' +$nargs+ \n" |
838 |
> |
txt += ' job_exit_code=50113\n' |
839 |
> |
txt += " func_exit\n" |
840 |
|
txt += "fi\n" |
841 |
|
txt += "\n" |
842 |
|
|
858 |
|
txt += 'PrimaryDataset=null\n' |
859 |
|
txt += 'DataTier=null\n' |
860 |
|
txt += 'ApplicationFamily=MCDataTier\n' |
861 |
< |
if self.pset != None: #CarlosDaniele |
861 |
> |
if self.pset != None: |
862 |
|
pset = os.path.basename(job.configFilename()) |
863 |
|
txt += '\n' |
864 |
|
txt += 'cp $RUNTIME_AREA/'+pset+' .\n' |
865 |
|
if (self.datasetPath): # standard job |
866 |
< |
txt += 'InputFiles=${args[1]}\n' |
867 |
< |
txt += 'MaxEvents=${args[2]}\n' |
868 |
< |
txt += 'SkipEvents=${args[3]}\n' |
866 |
> |
txt += 'InputFiles=${args[1]}; export InputFiles\n' |
867 |
> |
txt += 'MaxEvents=${args[2]}; export MaxEvents\n' |
868 |
> |
txt += 'SkipEvents=${args[3]}; export SkipEvents\n' |
869 |
|
txt += 'echo "Inputfiles:<$InputFiles>"\n' |
958 |
– |
txt += 'sed "s#\'INPUTFILE\'#$InputFiles#" '+pset+' > tmp && mv -f tmp '+pset+'\n' |
870 |
|
txt += 'echo "MaxEvents:<$MaxEvents>"\n' |
960 |
– |
txt += 'sed "s#int32 input = 0#int32 input = $MaxEvents#" '+pset+' > tmp && mv -f tmp '+pset+'\n' |
871 |
|
txt += 'echo "SkipEvents:<$SkipEvents>"\n' |
962 |
– |
txt += 'sed "s#uint32 skipEvents = 0#uint32 skipEvents = $SkipEvents#" '+pset+' > tmp && mv -f tmp '+pset+'\n' |
872 |
|
else: # pythia like job |
873 |
< |
seedIndex=1 |
873 |
> |
txt += 'PreserveSeeds=' + ','.join(self.preserveSeeds) + '; export PreserveSeeds\n' |
874 |
> |
txt += 'IncrementSeeds=' + ','.join(self.incrementSeeds) + '; export IncrementSeeds\n' |
875 |
> |
txt += 'echo "PreserveSeeds: <$PreserveSeeds>"\n' |
876 |
> |
txt += 'echo "IncrementSeeds:<$IncrementSeeds>"\n' |
877 |
|
if (self.firstRun): |
878 |
< |
txt += 'FirstRun=${args['+str(seedIndex)+']}\n' |
878 |
> |
txt += 'FirstRun=${args[1]}; export FirstRun\n' |
879 |
|
txt += 'echo "FirstRun: <$FirstRun>"\n' |
968 |
– |
txt += 'sed "s#uint32 firstRun = 0#uint32 firstRun = $FirstRun#" '+pset+' > tmp && mv -f tmp '+pset+'\n' |
969 |
– |
seedIndex=seedIndex+1 |
880 |
|
|
971 |
– |
if (self.sourceSeed): |
972 |
– |
txt += 'Seed=${args['+str(seedIndex)+']}\n' |
973 |
– |
txt += 'sed "s#uint32 sourceSeed = 0#uint32 sourceSeed = $Seed#" '+pset+' > tmp && mv -f tmp '+pset+'\n' |
974 |
– |
seedIndex=seedIndex+1 |
975 |
– |
## the following seeds are not always present |
976 |
– |
if (self.sourceSeedVtx): |
977 |
– |
txt += 'VtxSeed=${args['+str(seedIndex)+']}\n' |
978 |
– |
txt += 'echo "VtxSeed: <$VtxSeed>"\n' |
979 |
– |
txt += 'sed "s#uint32 VtxSmeared = 0#uint32 VtxSmeared = $VtxSeed#" '+pset+' > tmp && mv -f tmp '+pset+'\n' |
980 |
– |
seedIndex += 1 |
981 |
– |
if (self.sourceSeedG4): |
982 |
– |
txt += 'G4Seed=${args['+str(seedIndex)+']}\n' |
983 |
– |
txt += 'echo "G4Seed: <$G4Seed>"\n' |
984 |
– |
txt += 'sed "s#uint32 g4SimHits = 0#uint32 g4SimHits = $G4Seed#" '+pset+' > tmp && mv -f tmp '+pset+'\n' |
985 |
– |
seedIndex += 1 |
986 |
– |
if (self.sourceSeedMix): |
987 |
– |
txt += 'mixSeed=${args['+str(seedIndex)+']}\n' |
988 |
– |
txt += 'echo "MixSeed: <$mixSeed>"\n' |
989 |
– |
txt += 'sed "s#uint32 mix = 0#uint32 mix = $mixSeed#" '+pset+' > tmp && mv -f tmp '+pset+'\n' |
990 |
– |
seedIndex += 1 |
991 |
– |
pass |
992 |
– |
pass |
881 |
|
txt += 'mv -f '+pset+' pset.cfg\n' |
882 |
|
|
883 |
|
if len(self.additional_inbox_files) > 0: |
886 |
|
txt += 'fi\n' |
887 |
|
pass |
888 |
|
|
889 |
< |
if self.pset != None: #CarlosDaniele |
889 |
> |
if self.pset != None: |
890 |
|
txt += '\n' |
891 |
|
txt += 'echo "***** cat pset.cfg *********"\n' |
892 |
|
txt += 'cat pset.cfg\n' |
893 |
|
txt += 'echo "****** end pset.cfg ********"\n' |
894 |
|
txt += '\n' |
1007 |
– |
### FEDE FOR DBS OUTPUT PUBLICATION |
895 |
|
txt += 'PSETHASH=`EdmConfigHash < pset.cfg` \n' |
896 |
|
txt += 'echo "PSETHASH = $PSETHASH" \n' |
1010 |
– |
############## |
897 |
|
txt += '\n' |
898 |
|
return txt |
899 |
|
|
900 |
< |
def wsBuildExe(self, nj=0): |
900 |
> |
def wsUntarSoftware(self, nj=0): |
901 |
|
""" |
902 |
|
Put in the script the commands to build an executable |
903 |
|
or a library. |
904 |
|
""" |
905 |
|
|
906 |
< |
txt = "" |
906 |
> |
txt = '\n#Written by cms_cmssw::wsUntarSoftware\n' |
907 |
|
|
908 |
|
if os.path.isfile(self.tgzNameWithPath): |
909 |
|
txt += 'echo ">>> tar xzvf $RUNTIME_AREA/'+os.path.basename(self.tgzNameWithPath)+' :" \n' |
910 |
|
txt += 'tar xzvf $RUNTIME_AREA/'+os.path.basename(self.tgzNameWithPath)+'\n' |
911 |
|
txt += 'untar_status=$? \n' |
912 |
|
txt += 'if [ $untar_status -ne 0 ]; then \n' |
913 |
< |
txt += ' echo "SET_EXE 1 ==> ERROR Untarring .tgz file failed"\n' |
914 |
< |
txt += ' echo "JOB_EXIT_STATUS = $untar_status" \n' |
915 |
< |
txt += ' echo "JobExitCode=$untar_status" | tee -a $RUNTIME_AREA/$repo\n' |
1030 |
< |
txt += ' if [ $middleware == OSG ]; then \n' |
1031 |
< |
txt += ' cd $RUNTIME_AREA\n' |
1032 |
< |
txt += ' echo ">>> current directory (RUNTIME_AREA): $RUNTIME_AREA"\n' |
1033 |
< |
txt += ' echo ">>> Remove working directory: $WORKING_DIR"\n' |
1034 |
< |
txt += ' /bin/rm -rf $WORKING_DIR\n' |
1035 |
< |
txt += ' if [ -d $WORKING_DIR ] ;then\n' |
1036 |
< |
txt += ' echo "SET_EXE 50999 ==> OSG $WORKING_DIR could not be deleted on WN `hostname` after Untarring .tgz file failed"\n' |
1037 |
< |
txt += ' echo "JOB_EXIT_STATUS = 50999"\n' |
1038 |
< |
txt += ' echo "JobExitCode=50999" | tee -a $RUNTIME_AREA/$repo\n' |
1039 |
< |
txt += ' dumpStatus $RUNTIME_AREA/$repo\n' |
1040 |
< |
txt += ' fi\n' |
1041 |
< |
txt += ' fi \n' |
1042 |
< |
txt += ' \n' |
1043 |
< |
txt += ' exit 1 \n' |
913 |
> |
txt += ' echo "ERROR ==> Untarring .tgz file failed"\n' |
914 |
> |
txt += ' job_exit_code=$untar_status\n' |
915 |
> |
txt += ' func_exit\n' |
916 |
|
txt += 'else \n' |
917 |
|
txt += ' echo "Successful untar" \n' |
918 |
|
txt += 'fi \n' |
919 |
|
txt += '\n' |
920 |
< |
txt += 'echo ">>> Include ProdAgentApi and PRODCOMMON in PYTHONPATH:"\n' |
920 |
> |
txt += 'echo ">>> Include ProdCommon in PYTHONPATH:"\n' |
921 |
|
txt += 'if [ -z "$PYTHONPATH" ]; then\n' |
922 |
< |
#### FEDE FOR DBS OUTPUT PUBLICATION |
1051 |
< |
txt += ' export PYTHONPATH=$SOFTWARE_DIR/ProdAgentApi:$SOFTWARE_DIR/ProdCommon\n' |
922 |
> |
txt += ' export PYTHONPATH=$RUNTIME_AREA/ProdCommon\n' |
923 |
|
txt += 'else\n' |
924 |
< |
txt += ' export PYTHONPATH=$SOFTWARE_DIR/ProdAgentApi:$SOFTWARE_DIR/ProdCommon:${PYTHONPATH}\n' |
924 |
> |
txt += ' export PYTHONPATH=$RUNTIME_AREA/ProdCommon:${PYTHONPATH}\n' |
925 |
|
txt += 'echo "PYTHONPATH=$PYTHONPATH"\n' |
1055 |
– |
################### |
926 |
|
txt += 'fi\n' |
927 |
|
txt += '\n' |
928 |
|
|
930 |
|
|
931 |
|
return txt |
932 |
|
|
933 |
+ |
def wsBuildExe(self, nj=0): |
934 |
+ |
""" |
935 |
+ |
Put in the script the commands to build an executable |
936 |
+ |
or a library. |
937 |
+ |
""" |
938 |
+ |
|
939 |
+ |
txt = '\n#Written by cms_cmssw::wsBuildExe\n' |
940 |
+ |
txt += 'echo ">>> moving CMSSW software directories in `pwd`" \n' |
941 |
+ |
|
942 |
+ |
txt += 'rm -r lib/ module/ \n' |
943 |
+ |
txt += 'mv $RUNTIME_AREA/lib/ . \n' |
944 |
+ |
txt += 'mv $RUNTIME_AREA/module/ . \n' |
945 |
+ |
txt += 'mv $RUNTIME_AREA/ProdCommon/ . \n' |
946 |
+ |
|
947 |
+ |
txt += 'if [ -z "$PYTHONPATH" ]; then\n' |
948 |
+ |
txt += ' export PYTHONPATH=$SOFTWARE_DIR/ProdCommon\n' |
949 |
+ |
txt += 'else\n' |
950 |
+ |
txt += ' export PYTHONPATH=$SOFTWARE_DIR/ProdCommon:${PYTHONPATH}\n' |
951 |
+ |
txt += 'echo "PYTHONPATH=$PYTHONPATH"\n' |
952 |
+ |
txt += 'fi\n' |
953 |
+ |
txt += '\n' |
954 |
+ |
|
955 |
+ |
return txt |
956 |
+ |
|
957 |
|
def modifySteeringCards(self, nj): |
958 |
|
""" |
959 |
|
modify the card provided by the user, |
967 |
|
return self.executable |
968 |
|
|
969 |
|
def executableArgs(self): |
970 |
+ |
# FUTURE: This function tests the CMSSW version. Can be simplified as we drop support for old versions |
971 |
|
if self.scriptExe:#CarlosDaniele |
972 |
|
return self.scriptExe + " $NJob" |
973 |
|
else: |
1079 |
– |
# if >= CMSSW_1_5_X, add -j crab_fjr.xml |
974 |
|
version_array = self.scram.getSWVersion().split('_') |
975 |
|
major = 0 |
976 |
|
minor = 0 |
980 |
|
except: |
981 |
|
msg = "Cannot parse CMSSW version string: " + "_".join(version_array) + " for major and minor release number!" |
982 |
|
raise CrabException(msg) |
983 |
+ |
|
984 |
+ |
ex_args = "" |
985 |
+ |
# FUTURE: This tests the CMSSW version. Can remove code as versions deprecated |
986 |
+ |
# Framework job report |
987 |
|
if major >= 1 and minor >= 5 : |
988 |
< |
return " -j " + self.fjrFileName + " -p pset.cfg" |
988 |
> |
ex_args += " -j $RUNTIME_AREA/crab_fjr_$NJob.xml" |
989 |
> |
# Type of cfg file |
990 |
> |
if major >= 2 : |
991 |
> |
ex_args += " -p pset.py" |
992 |
|
else: |
993 |
< |
return " -p pset.cfg" |
993 |
> |
ex_args += " -p pset.cfg" |
994 |
> |
return ex_args |
995 |
|
|
996 |
|
def inputSandbox(self, nj): |
997 |
|
""" |
1011 |
|
## additional input files |
1012 |
|
tgz = self.additionalInputFileTgz() |
1013 |
|
inp_box.append(tgz) |
1014 |
+ |
## executable |
1015 |
+ |
wrapper = os.path.basename(str(common._db.queryTask('scriptName'))) |
1016 |
+ |
inp_box.append(common.work_space.pathForTgz() +'job/'+ wrapper) |
1017 |
|
return inp_box |
1018 |
|
|
1019 |
|
def outputSandbox(self, nj): |
1039 |
|
Returns part of a job script which renames the produced files. |
1040 |
|
""" |
1041 |
|
|
1042 |
< |
txt = '\n' |
1043 |
< |
txt += 'echo" >>> directory content:"\n' |
1042 |
> |
txt = '\n#Written by cms_cmssw::wsRenameOutput\n' |
1043 |
> |
txt += 'echo ">>> current directory (SOFTWARE_DIR): $SOFTWARE_DIR" \n' |
1044 |
> |
txt += 'echo ">>> current directory content:"\n' |
1045 |
|
txt += 'ls \n' |
1046 |
< |
txt = '\n' |
1141 |
< |
|
1142 |
< |
txt += 'output_exit_status=0\n' |
1143 |
< |
|
1144 |
< |
for fileWithSuffix in (self.output_file_sandbox): |
1145 |
< |
output_file_num = self.numberFile_(fileWithSuffix, '$NJob') |
1146 |
< |
txt += '\n' |
1147 |
< |
txt += '# check output file\n' |
1148 |
< |
txt += 'if [ -e ./'+fileWithSuffix+' ] ; then\n' |
1149 |
< |
txt += ' mv '+fileWithSuffix+' $RUNTIME_AREA\n' |
1150 |
< |
txt += ' cp $RUNTIME_AREA/'+fileWithSuffix+' $RUNTIME_AREA/'+output_file_num+'\n' |
1151 |
< |
txt += 'else\n' |
1152 |
< |
txt += ' exit_status=60302\n' |
1153 |
< |
txt += ' echo "ERROR: Problem with output file '+fileWithSuffix+'"\n' |
1154 |
< |
if common.scheduler.boss_scheduler_name == 'condor_g': |
1155 |
< |
txt += ' if [ $middleware == OSG ]; then \n' |
1156 |
< |
txt += ' echo "prepare dummy output file"\n' |
1157 |
< |
txt += ' echo "Processing of job output failed" > $RUNTIME_AREA/'+output_file_num+'\n' |
1158 |
< |
txt += ' fi \n' |
1159 |
< |
txt += 'fi\n' |
1046 |
> |
txt += '\n' |
1047 |
|
|
1048 |
|
for fileWithSuffix in (self.output_file): |
1049 |
|
output_file_num = self.numberFile_(fileWithSuffix, '$NJob') |
1050 |
|
txt += '\n' |
1051 |
|
txt += '# check output file\n' |
1052 |
|
txt += 'if [ -e ./'+fileWithSuffix+' ] ; then\n' |
1053 |
< |
txt += ' mv '+fileWithSuffix+' $RUNTIME_AREA\n' |
1054 |
< |
txt += ' cp $RUNTIME_AREA/'+fileWithSuffix+' $RUNTIME_AREA/'+output_file_num+'\n' |
1053 |
> |
if (self.copy_data == 1): # For OSG nodes, file is in $WORKING_DIR, should not be moved to $RUNTIME_AREA |
1054 |
> |
txt += ' mv '+fileWithSuffix+' '+output_file_num+'\n' |
1055 |
> |
txt += ' ln -s `pwd`/'+output_file_num+' $RUNTIME_AREA/'+fileWithSuffix+'\n' |
1056 |
> |
else: |
1057 |
> |
txt += ' mv '+fileWithSuffix+' $RUNTIME_AREA/'+output_file_num+'\n' |
1058 |
> |
txt += ' ln -s $RUNTIME_AREA/'+output_file_num+' $RUNTIME_AREA/'+fileWithSuffix+'\n' |
1059 |
|
txt += 'else\n' |
1060 |
< |
txt += ' exit_status=60302\n' |
1061 |
< |
txt += ' echo "ERROR: Problem with output file '+fileWithSuffix+'"\n' |
1062 |
< |
txt += ' echo "JOB_EXIT_STATUS = $exit_status"\n' |
1172 |
< |
txt += ' output_exit_status=$exit_status\n' |
1173 |
< |
if common.scheduler.boss_scheduler_name == 'condor_g': |
1060 |
> |
txt += ' job_exit_code=60302\n' |
1061 |
> |
txt += ' echo "WARNING: Output file '+fileWithSuffix+' not found"\n' |
1062 |
> |
if common.scheduler.name().upper() == 'CONDOR_G': |
1063 |
|
txt += ' if [ $middleware == OSG ]; then \n' |
1064 |
|
txt += ' echo "prepare dummy output file"\n' |
1065 |
|
txt += ' echo "Processing of job output failed" > $RUNTIME_AREA/'+output_file_num+'\n' |
1070 |
|
file_list.append(self.numberFile_(fileWithSuffix, '$NJob')) |
1071 |
|
|
1072 |
|
txt += 'file_list="'+string.join(file_list,' ')+'"\n' |
1073 |
+ |
txt += '\n' |
1074 |
+ |
txt += 'echo ">>> current directory (SOFTWARE_DIR): $SOFTWARE_DIR" \n' |
1075 |
+ |
txt += 'echo ">>> current directory content:"\n' |
1076 |
+ |
txt += 'ls \n' |
1077 |
+ |
txt += '\n' |
1078 |
|
txt += 'cd $RUNTIME_AREA\n' |
1079 |
|
txt += 'echo ">>> current directory (RUNTIME_AREA): $RUNTIME_AREA"\n' |
1080 |
|
return txt |
1114 |
|
'", other.GlueHostApplicationSoftwareRunTimeEnvironment)' |
1115 |
|
|
1116 |
|
req = req + ' && (other.GlueHostNetworkAdapterOutboundIP)' |
1117 |
+ |
if common.scheduler.name() == "glitecoll": |
1118 |
+ |
req += ' && other.GlueCEStateStatus == "Production" ' |
1119 |
|
|
1120 |
|
return req |
1121 |
|
|
1128 |
|
Returns part of a job script which is prepares |
1129 |
|
the execution environment and which is common for all CMS jobs. |
1130 |
|
""" |
1131 |
< |
txt = ' echo ">>> setup CMS OSG environment:"\n' |
1131 |
> |
txt = '\n#Written by cms_cmssw::wsSetupCMSOSGEnvironment_\n' |
1132 |
> |
txt += ' echo ">>> setup CMS OSG environment:"\n' |
1133 |
|
txt += ' echo "set SCRAM ARCH to ' + self.executable_arch + '"\n' |
1134 |
|
txt += ' export SCRAM_ARCH='+self.executable_arch+'\n' |
1135 |
|
txt += ' echo "SCRAM_ARCH = $SCRAM_ARCH"\n' |
1137 |
|
txt += ' # Use $OSG_APP/cmssoft/cms/cmsset_default.sh to setup cms software\n' |
1138 |
|
txt += ' source $OSG_APP/cmssoft/cms/cmsset_default.sh '+self.version+'\n' |
1139 |
|
txt += ' else\n' |
1140 |
< |
txt += ' echo "SET_CMS_ENV 10020 ==> ERROR $OSG_APP/cmssoft/cms/cmsset_default.sh file not found"\n' |
1141 |
< |
txt += ' echo "JOB_EXIT_STATUS = 10020"\n' |
1142 |
< |
txt += ' echo "JobExitCode=10020" | tee -a $RUNTIME_AREA/$repo\n' |
1246 |
< |
txt += ' dumpStatus $RUNTIME_AREA/$repo\n' |
1247 |
< |
txt += '\n' |
1248 |
< |
txt += ' cd $RUNTIME_AREA\n' |
1249 |
< |
txt += ' echo ">>> current directory (RUNTIME_AREA): $RUNTIME_AREA"\n' |
1250 |
< |
txt += ' echo ">>> Remove working directory: $WORKING_DIR"\n' |
1251 |
< |
txt += ' /bin/rm -rf $WORKING_DIR\n' |
1252 |
< |
txt += ' if [ -d $WORKING_DIR ] ;then\n' |
1253 |
< |
txt += ' echo "SET_CMS_ENV 10017 ==> OSG $WORKING_DIR could not be deleted on WN `hostname` after $OSG_APP/cmssoft/cms/cmsset_default.sh file not found"\n' |
1254 |
< |
txt += ' echo "JOB_EXIT_STATUS = 10017"\n' |
1255 |
< |
txt += ' echo "JobExitCode=10017" | tee -a $RUNTIME_AREA/$repo\n' |
1256 |
< |
txt += ' dumpStatus $RUNTIME_AREA/$repo\n' |
1257 |
< |
txt += ' fi\n' |
1258 |
< |
txt += '\n' |
1259 |
< |
txt += ' exit 1\n' |
1140 |
> |
txt += ' echo "ERROR ==> $OSG_APP/cmssoft/cms/cmsset_default.sh file not found"\n' |
1141 |
> |
txt += ' job_exit_code=10020\n' |
1142 |
> |
txt += ' func_exit\n' |
1143 |
|
txt += ' fi\n' |
1144 |
|
txt += '\n' |
1145 |
< |
txt += ' echo "SET_CMS_ENV 0 ==> setup cms environment ok"\n' |
1145 |
> |
txt += ' echo "==> setup cms environment ok"\n' |
1146 |
|
txt += ' echo "SCRAM_ARCH = $SCRAM_ARCH"\n' |
1147 |
|
|
1148 |
|
return txt |
1153 |
|
Returns part of a job script which is prepares |
1154 |
|
the execution environment and which is common for all CMS jobs. |
1155 |
|
""" |
1156 |
< |
txt = ' echo ">>> setup CMS LCG environment:"\n' |
1156 |
> |
txt = '\n#Written by cms_cmssw::wsSetupCMSLCGEnvironment_\n' |
1157 |
> |
txt += ' echo ">>> setup CMS LCG environment:"\n' |
1158 |
|
txt += ' echo "set SCRAM ARCH and BUILD_ARCH to ' + self.executable_arch + ' ###"\n' |
1159 |
|
txt += ' export SCRAM_ARCH='+self.executable_arch+'\n' |
1160 |
|
txt += ' export BUILD_ARCH='+self.executable_arch+'\n' |
1161 |
|
txt += ' if [ ! $VO_CMS_SW_DIR ] ;then\n' |
1162 |
< |
txt += ' echo "SET_CMS_ENV 10031 ==> ERROR CMS software dir not found on WN `hostname`"\n' |
1163 |
< |
txt += ' echo "JOB_EXIT_STATUS = 10031" \n' |
1164 |
< |
txt += ' echo "JobExitCode=10031" | tee -a $RUNTIME_AREA/$repo\n' |
1281 |
< |
txt += ' dumpStatus $RUNTIME_AREA/$repo\n' |
1282 |
< |
txt += ' exit 1\n' |
1162 |
> |
txt += ' echo "ERROR ==> CMS software dir not found on WN `hostname`"\n' |
1163 |
> |
txt += ' job_exit_code=10031\n' |
1164 |
> |
txt += ' func_exit\n' |
1165 |
|
txt += ' else\n' |
1166 |
|
txt += ' echo "Sourcing environment... "\n' |
1167 |
|
txt += ' if [ ! -s $VO_CMS_SW_DIR/cmsset_default.sh ] ;then\n' |
1168 |
< |
txt += ' echo "SET_CMS_ENV 10020 ==> ERROR cmsset_default.sh file not found into dir $VO_CMS_SW_DIR"\n' |
1169 |
< |
txt += ' echo "JOB_EXIT_STATUS = 10020"\n' |
1170 |
< |
txt += ' echo "JobExitCode=10020" | tee -a $RUNTIME_AREA/$repo\n' |
1289 |
< |
txt += ' dumpStatus $RUNTIME_AREA/$repo\n' |
1290 |
< |
txt += ' exit 1\n' |
1168 |
> |
txt += ' echo "ERROR ==> cmsset_default.sh file not found into dir $VO_CMS_SW_DIR"\n' |
1169 |
> |
txt += ' job_exit_code=10020\n' |
1170 |
> |
txt += ' func_exit\n' |
1171 |
|
txt += ' fi\n' |
1172 |
|
txt += ' echo "sourcing $VO_CMS_SW_DIR/cmsset_default.sh"\n' |
1173 |
|
txt += ' source $VO_CMS_SW_DIR/cmsset_default.sh\n' |
1174 |
|
txt += ' result=$?\n' |
1175 |
|
txt += ' if [ $result -ne 0 ]; then\n' |
1176 |
< |
txt += ' echo "SET_CMS_ENV 10032 ==> ERROR problem sourcing $VO_CMS_SW_DIR/cmsset_default.sh"\n' |
1177 |
< |
txt += ' echo "JOB_EXIT_STATUS = 10032"\n' |
1178 |
< |
txt += ' echo "JobExitCode=10032" | tee -a $RUNTIME_AREA/$repo\n' |
1299 |
< |
txt += ' dumpStatus $RUNTIME_AREA/$repo\n' |
1300 |
< |
txt += ' exit 1\n' |
1176 |
> |
txt += ' echo "ERROR ==> problem sourcing $VO_CMS_SW_DIR/cmsset_default.sh"\n' |
1177 |
> |
txt += ' job_exit_code=10032\n' |
1178 |
> |
txt += ' func_exit\n' |
1179 |
|
txt += ' fi\n' |
1180 |
|
txt += ' fi\n' |
1181 |
|
txt += ' \n' |
1182 |
< |
txt += ' echo "SET_CMS_ENV 0 ==> setup cms environment ok"\n' |
1182 |
> |
txt += ' echo "==> setup cms environment ok"\n' |
1183 |
|
return txt |
1184 |
|
|
1185 |
|
### FEDE FOR DBS OUTPUT PUBLICATION |
1188 |
|
insert the part of the script that modifies the FrameworkJob Report |
1189 |
|
""" |
1190 |
|
|
1191 |
< |
txt = '' |
1192 |
< |
try: |
1315 |
< |
publish_data = int(self.cfg_params['USER.publish_data']) |
1316 |
< |
except KeyError: |
1317 |
< |
publish_data = 0 |
1191 |
> |
txt = '\n#Written by cms_cmssw::modifyReport\n' |
1192 |
> |
publish_data = int(self.cfg_params.get('USER.publish_data',0)) |
1193 |
|
if (publish_data == 1): |
1319 |
– |
txt += 'echo ">>> Modify Job Report:" \n' |
1320 |
– |
################ FEDE FOR DBS2 ############################################# |
1321 |
– |
txt += 'chmod a+x $SOFTWARE_DIR/ProdAgentApi/FwkJobRep/ModifyJobReport.py\n' |
1322 |
– |
############################################################################# |
1323 |
– |
|
1324 |
– |
txt += 'if [ -z "$SE" ]; then\n' |
1325 |
– |
txt += ' SE="" \n' |
1326 |
– |
txt += 'fi \n' |
1327 |
– |
txt += 'if [ -z "$SE_PATH" ]; then\n' |
1328 |
– |
txt += ' SE_PATH="" \n' |
1329 |
– |
txt += 'fi \n' |
1330 |
– |
txt += 'echo "SE = $SE"\n' |
1331 |
– |
txt += 'echo "SE_PATH = $SE_PATH"\n' |
1332 |
– |
|
1194 |
|
processedDataset = self.cfg_params['USER.publish_data_name'] |
1195 |
< |
txt += 'ProcessedDataset='+processedDataset+'\n' |
1196 |
< |
#### LFN=/store/user/<user>/processedDataset_PSETHASH |
1197 |
< |
txt += 'if [ "$SE_PATH" == "" ]; then\n' |
1198 |
< |
#### FEDE: added slash in LFN ############## |
1195 |
> |
LFNBaseName = LFNBase(processedDataset) |
1196 |
> |
|
1197 |
> |
txt += 'if [ $copy_exit_status -eq 0 ]; then\n' |
1198 |
> |
txt += ' FOR_LFN=%s_${PSETHASH}/\n'%(LFNBaseName) |
1199 |
> |
txt += 'else\n' |
1200 |
|
txt += ' FOR_LFN=/copy_problems/ \n' |
1201 |
< |
txt += 'else \n' |
1202 |
< |
txt += ' tmp=`echo $SE_PATH | awk -F \'store\' \'{print$2}\'` \n' |
1203 |
< |
##### FEDE TO BE CHANGED, BECAUSE STORE IS HARDCODED!!!! ######## |
1204 |
< |
txt += ' FOR_LFN=/store$tmp \n' |
1205 |
< |
txt += 'fi \n' |
1201 |
> |
txt += ' SE=""\n' |
1202 |
> |
txt += ' SE_PATH=""\n' |
1203 |
> |
txt += 'fi\n' |
1204 |
> |
|
1205 |
> |
txt += 'echo ">>> Modify Job Report:" \n' |
1206 |
> |
txt += 'chmod a+x $SOFTWARE_DIR/ProdCommon/ProdCommon/FwkJobRep/ModifyJobReport.py\n' |
1207 |
> |
txt += 'ProcessedDataset='+processedDataset+'\n' |
1208 |
|
txt += 'echo "ProcessedDataset = $ProcessedDataset"\n' |
1209 |
+ |
txt += 'echo "SE = $SE"\n' |
1210 |
+ |
txt += 'echo "SE_PATH = $SE_PATH"\n' |
1211 |
|
txt += 'echo "FOR_LFN = $FOR_LFN" \n' |
1212 |
|
txt += 'echo "CMSSW_VERSION = $CMSSW_VERSION"\n\n' |
1213 |
< |
#txt += 'echo "$RUNTIME_AREA/'+self.version+'/ProdAgentApi/FwkJobRep/ModifyJobReport.py crab_fjr_$NJob.xml $NJob $FOR_LFN $PrimaryDataset $DataTier $ProcessedDataset $ApplicationFamily $executable $CMSSW_VERSION $PSETHASH $SE $SE_PATH"\n' |
1214 |
< |
txt += 'echo "$SOFTWARE_DIR/ProdAgentApi/FwkJobRep/ModifyJobReport.py crab_fjr_$NJob.xml $NJob $FOR_LFN $PrimaryDataset $DataTier $ProcessedDataset $ApplicationFamily $executable $CMSSW_VERSION $PSETHASH $SE $SE_PATH"\n' |
1349 |
< |
txt += '$SOFTWARE_DIR/ProdAgentApi/FwkJobRep/ModifyJobReport.py crab_fjr_$NJob.xml $NJob $FOR_LFN $PrimaryDataset $DataTier $ProcessedDataset $ApplicationFamily $executable $CMSSW_VERSION $PSETHASH $SE $SE_PATH\n' |
1350 |
< |
#txt += '$RUNTIME_AREA/'+self.version+'/ProdAgentApi/FwkJobRep/ModifyJobReport.py crab_fjr_$NJob.xml $NJob $FOR_LFN $PrimaryDataset $DataTier $ProcessedDataset $ApplicationFamily $executable $CMSSW_VERSION $PSETHASH $SE $SE_PATH\n' |
1351 |
< |
|
1213 |
> |
txt += 'echo "$SOFTWARE_DIR/ProdCommon/ProdCommon/FwkJobRep/ModifyJobReport.py $RUNTIME_AREA/crab_fjr_$NJob.xml $NJob $FOR_LFN $PrimaryDataset $DataTier $ProcessedDataset $ApplicationFamily $executable $CMSSW_VERSION $PSETHASH $SE $SE_PATH"\n' |
1214 |
> |
txt += '$SOFTWARE_DIR/ProdCommon/ProdCommon/FwkJobRep/ModifyJobReport.py $RUNTIME_AREA/crab_fjr_$NJob.xml $NJob $FOR_LFN $PrimaryDataset $DataTier $ProcessedDataset $ApplicationFamily $executable $CMSSW_VERSION $PSETHASH $SE $SE_PATH\n' |
1215 |
|
txt += 'modifyReport_result=$?\n' |
1353 |
– |
txt += 'echo modifyReport_result = $modifyReport_result\n' |
1216 |
|
txt += 'if [ $modifyReport_result -ne 0 ]; then\n' |
1217 |
< |
txt += ' exit_status=1\n' |
1218 |
< |
txt += ' echo "ERROR: Problem with ModifyJobReport"\n' |
1217 |
> |
txt += ' modifyReport_result=70500\n' |
1218 |
> |
txt += ' job_exit_code=$modifyReport_result\n' |
1219 |
> |
txt += ' echo "ModifyReportResult=$modifyReport_result" | tee -a $RUNTIME_AREA/$repo\n' |
1220 |
> |
txt += ' echo "WARNING: Problem with ModifyJobReport"\n' |
1221 |
|
txt += 'else\n' |
1222 |
< |
txt += ' mv NewFrameworkJobReport.xml crab_fjr_$NJob.xml\n' |
1222 |
> |
txt += ' mv NewFrameworkJobReport.xml $RUNTIME_AREA/crab_fjr_$NJob.xml\n' |
1223 |
|
txt += 'fi\n' |
1360 |
– |
else: |
1361 |
– |
txt += 'echo "no data publication required"\n' |
1362 |
– |
return txt |
1363 |
– |
|
1364 |
– |
def cleanEnv(self): |
1365 |
– |
txt = '' |
1366 |
– |
txt += 'if [ $middleware == OSG ]; then\n' |
1367 |
– |
txt += ' cd $RUNTIME_AREA\n' |
1368 |
– |
txt += ' echo ">>> current directory (RUNTIME_AREA): $RUNTIME_AREA"\n' |
1369 |
– |
txt += ' echo ">>> Remove working directory: $WORKING_DIR"\n' |
1370 |
– |
txt += ' /bin/rm -rf $WORKING_DIR\n' |
1371 |
– |
txt += ' if [ -d $WORKING_DIR ] ;then\n' |
1372 |
– |
txt += ' echo "SET_EXE 60999 ==> OSG $WORKING_DIR could not be deleted on WN `hostname` after cleanup of WN"\n' |
1373 |
– |
txt += ' echo "JOB_EXIT_STATUS = 60999"\n' |
1374 |
– |
txt += ' echo "JobExitCode=60999" | tee -a $RUNTIME_AREA/$repo\n' |
1375 |
– |
txt += ' dumpStatus $RUNTIME_AREA/$repo\n' |
1376 |
– |
txt += ' fi\n' |
1377 |
– |
txt += 'fi\n' |
1378 |
– |
txt += '\n' |
1224 |
|
return txt |
1225 |
|
|
1226 |
|
def setParam_(self, param, value): |
1229 |
|
def getParams(self): |
1230 |
|
return self._params |
1231 |
|
|
1387 |
– |
def setTaskid_(self): |
1388 |
– |
self._taskId = self.cfg_params['taskId'] |
1389 |
– |
|
1390 |
– |
def getTaskid(self): |
1391 |
– |
return self._taskId |
1392 |
– |
|
1232 |
|
def uniquelist(self, old): |
1233 |
|
""" |
1234 |
|
remove duplicates from a list |
1238 |
|
nd[e]=0 |
1239 |
|
return nd.keys() |
1240 |
|
|
1241 |
< |
|
1403 |
< |
def checkOut(self, limit): |
1241 |
> |
def outList(self): |
1242 |
|
""" |
1243 |
|
check the dimension of the output files |
1244 |
|
""" |
1245 |
< |
txt += 'echo ">>> Starting output sandbox limit check :"\n' |
1246 |
< |
allOutFiles = "" |
1245 |
> |
txt = '' |
1246 |
> |
txt += 'echo ">>> list of expected files on output sandbox"\n' |
1247 |
|
listOutFiles = [] |
1248 |
< |
for fileOut in (self.output_file+self.output_file_sandbox): |
1249 |
< |
if fileOut.find('crab_fjr') == -1: |
1250 |
< |
allOutFiles = allOutFiles + " " + self.numberFile_(fileOut, '$NJob') |
1251 |
< |
listOutFiles.append(self.numberFile_(fileOut, '$NJob')) |
1252 |
< |
txt += 'echo "OUTPUT files: '+str(allOutFiles)+'";\n' |
1253 |
< |
txt += 'ls -gGhrta;\n' |
1254 |
< |
txt += 'sum=0;\n' |
1255 |
< |
txt += 'for file in '+str(allOutFiles)+' ; do\n' |
1256 |
< |
txt += ' if [ -e $file ]; then\n' |
1257 |
< |
txt += ' tt=`ls -gGrta $file | awk \'{ print $3 }\'`\n' |
1258 |
< |
txt += ' sum=`expr $sum + $tt`\n' |
1259 |
< |
txt += ' else\n' |
1260 |
< |
txt += ' echo "WARNING: output file $file not found!"\n' |
1261 |
< |
txt += ' fi\n' |
1262 |
< |
txt += 'done\n' |
1425 |
< |
txt += 'echo "Total Output dimension: $sum";\n' |
1426 |
< |
txt += 'limit='+str(limit)+';\n' |
1427 |
< |
txt += 'echo "OUTPUT FILES LIMIT SET TO: $limit";\n' |
1428 |
< |
txt += 'if [ $limit -lt $sum ]; then\n' |
1429 |
< |
txt += ' echo "WARNING: output files have to big size - something will be lost;"\n' |
1430 |
< |
txt += ' echo " checking the output file sizes..."\n' |
1431 |
< |
""" |
1432 |
< |
txt += ' dim=0;\n' |
1433 |
< |
txt += ' exclude=0;\n' |
1434 |
< |
txt += ' for files in '+str(allOutFiles)+' ; do\n' |
1435 |
< |
txt += ' sumTemp=0;\n' |
1436 |
< |
txt += ' for file2 in '+str(allOutFiles)+' ; do\n' |
1437 |
< |
txt += ' if [ $file != $file2 ]; then\n' |
1438 |
< |
txt += ' tt=`ls -gGrta $file2 | awk \'{ print $3 }\';`\n' |
1439 |
< |
txt += ' sumTemp=`expr $sumTemp + $tt`;\n' |
1440 |
< |
txt += ' fi\n' |
1441 |
< |
txt += ' done\n' |
1442 |
< |
txt += ' if [ $sumTemp -lt $limit ]; then\n' |
1443 |
< |
txt += ' if [ $dim -lt $sumTemp ]; then\n' |
1444 |
< |
txt += ' dim=$sumTemp;\n' |
1445 |
< |
txt += ' exclude=$file;\n' |
1446 |
< |
txt += ' fi\n' |
1447 |
< |
txt += ' fi\n' |
1448 |
< |
txt += ' done\n' |
1449 |
< |
txt += ' echo "Dimension calculated: $dim"; echo "File to exclude: $exclude";\n' |
1450 |
< |
""" |
1451 |
< |
txt += ' tot=0;\n' |
1452 |
< |
txt += ' for file2 in '+str(allOutFiles)+' ; do\n' |
1453 |
< |
txt += ' tt=`ls -gGrta $file2 | awk \'{ print $3 }\';`\n' |
1454 |
< |
txt += ' tot=`expr $tot + $tt`;\n' |
1455 |
< |
txt += ' if [ $limit -lt $tot ]; then\n' |
1456 |
< |
txt += ' tot=`expr $tot - $tt`;\n' |
1457 |
< |
txt += ' fileLast=$file;\n' |
1458 |
< |
txt += ' break;\n' |
1459 |
< |
txt += ' fi\n' |
1460 |
< |
txt += ' done\n' |
1461 |
< |
txt += ' echo "Dimension calculated: $tot"; echo "First file to exclude: $file";\n' |
1462 |
< |
txt += ' flag=0;\n' |
1463 |
< |
txt += ' for filess in '+str(allOutFiles)+' ; do\n' |
1464 |
< |
txt += ' if [ $fileLast = $filess ]; then\n' |
1465 |
< |
txt += ' flag=1;\n' |
1466 |
< |
txt += ' fi\n' |
1467 |
< |
txt += ' if [ $flag -eq 1 ]; then\n' |
1468 |
< |
txt += ' rm -f $filess;\n' |
1469 |
< |
txt += ' fi\n' |
1470 |
< |
txt += ' done\n' |
1471 |
< |
txt += ' ls -agGhrt;\n' |
1472 |
< |
txt += ' echo "WARNING: output files are too big in dimension: can not put in the output_sandbox.";\n' |
1473 |
< |
txt += ' echo "JOB_EXIT_STATUS = 70000";\n' |
1474 |
< |
txt += ' exit_status=70000;\n' |
1475 |
< |
txt += 'else' |
1476 |
< |
txt += ' echo "Total Output dimension $sum is fine.";\n' |
1477 |
< |
txt += 'fi\n' |
1478 |
< |
txt += 'echo "Ending output sandbox limit check"\n' |
1248 |
> |
stdout = 'CMSSW_$NJob.stdout' |
1249 |
> |
stderr = 'CMSSW_$NJob.stderr' |
1250 |
> |
if (self.return_data == 1): |
1251 |
> |
for file in (self.output_file+self.output_file_sandbox): |
1252 |
> |
listOutFiles.append(self.numberFile_(file, '$NJob')) |
1253 |
> |
listOutFiles.append(stdout) |
1254 |
> |
listOutFiles.append(stderr) |
1255 |
> |
else: |
1256 |
> |
for file in (self.output_file_sandbox): |
1257 |
> |
listOutFiles.append(self.numberFile_(file, '$NJob')) |
1258 |
> |
listOutFiles.append(stdout) |
1259 |
> |
listOutFiles.append(stderr) |
1260 |
> |
txt += 'echo "output files: '+string.join(listOutFiles,' ')+'"\n' |
1261 |
> |
txt += 'filesToCheck="'+string.join(listOutFiles,' ')+'"\n' |
1262 |
> |
txt += 'export filesToCheck\n' |
1263 |
|
return txt |