13 |
|
JobType.__init__(self, 'CMSSW') |
14 |
|
common.logger.debug(3,'CMSSW::__init__') |
15 |
|
|
16 |
+ |
self.argsList = [] |
17 |
+ |
|
18 |
|
self._params = {} |
19 |
|
self.cfg_params = cfg_params |
18 |
– |
|
20 |
|
# init BlackWhiteListParser |
21 |
|
self.blackWhiteListParser = BlackWhiteListParser(cfg_params) |
22 |
|
|
23 |
< |
try: |
23 |
< |
self.MaxTarBallSize = float(self.cfg_params['EDG.maxtarballsize']) |
24 |
< |
except KeyError: |
25 |
< |
self.MaxTarBallSize = 9.5 |
23 |
> |
self.MaxTarBallSize = float(self.cfg_params.get('EDG.maxtarballsize',9.5)) |
24 |
|
|
25 |
|
# number of jobs requested to be created, limit obj splitting |
26 |
|
self.ncjobs = ncjobs |
50 |
|
a = string.split(self.version, "_") |
51 |
|
|
52 |
|
if int(a[1]) == 1 and (int(a[2]) < 5 and self.executable_arch.find('slc4') == 0): |
53 |
< |
msg = "Error: CMS does not support %s with %s architecture"%(self.version, self.executable_arch) |
54 |
< |
raise CrabException(msg) |
53 |
> |
msg = "Warning: You are using %s version of CMSSW with %s architecture. \n--> Did you compile your libraries with SLC3? Otherwise you can find some problems running on SLC4 Grid nodes.\n"%(self.version, self.executable_arch) |
54 |
> |
common.logger.message(msg) |
55 |
|
if int(a[1]) == 1 and (int(a[2]) >= 5 and self.executable_arch.find('slc3') == 0): |
56 |
|
msg = "Error: CMS does not support %s with %s architecture"%(self.version, self.executable_arch) |
57 |
|
raise CrabException(msg) |
61 |
|
|
62 |
|
### collect Data cards |
63 |
|
|
64 |
< |
## get DBS mode |
67 |
< |
try: |
68 |
< |
self.use_dbs_1 = int(self.cfg_params['CMSSW.use_dbs_1']) |
69 |
< |
except KeyError: |
70 |
< |
self.use_dbs_1 = 0 |
71 |
< |
|
72 |
< |
try: |
73 |
< |
tmp = cfg_params['CMSSW.datasetpath'] |
74 |
< |
log.debug(6, "CMSSW::CMSSW(): datasetPath = "+tmp) |
75 |
< |
if string.lower(tmp)=='none': |
76 |
< |
self.datasetPath = None |
77 |
< |
self.selectNoInput = 1 |
78 |
< |
else: |
79 |
< |
self.datasetPath = tmp |
80 |
< |
self.selectNoInput = 0 |
81 |
< |
except KeyError: |
64 |
> |
if not cfg_params.has_key('CMSSW.datasetpath'): |
65 |
|
msg = "Error: datasetpath not defined " |
66 |
|
raise CrabException(msg) |
67 |
+ |
tmp = cfg_params['CMSSW.datasetpath'] |
68 |
+ |
log.debug(6, "CMSSW::CMSSW(): datasetPath = "+tmp) |
69 |
+ |
if string.lower(tmp)=='none': |
70 |
+ |
self.datasetPath = None |
71 |
+ |
self.selectNoInput = 1 |
72 |
+ |
else: |
73 |
+ |
self.datasetPath = tmp |
74 |
+ |
self.selectNoInput = 0 |
75 |
|
|
76 |
|
# ML monitoring |
77 |
|
# split dataset path style: /PreProdR3Minbias/SIM/GEN-SIM |
79 |
|
self.setParam_('dataset', 'None') |
80 |
|
self.setParam_('owner', 'None') |
81 |
|
else: |
82 |
+ |
## SL what is supposed to fail here? |
83 |
|
try: |
84 |
|
datasetpath_split = self.datasetPath.split("/") |
85 |
|
# standard style |
86 |
|
self.setParam_('datasetFull', self.datasetPath) |
87 |
< |
if self.use_dbs_1 == 1 : |
88 |
< |
self.setParam_('dataset', datasetpath_split[1]) |
97 |
< |
self.setParam_('owner', datasetpath_split[-1]) |
98 |
< |
else: |
99 |
< |
self.setParam_('dataset', datasetpath_split[1]) |
100 |
< |
self.setParam_('owner', datasetpath_split[2]) |
87 |
> |
self.setParam_('dataset', datasetpath_split[1]) |
88 |
> |
self.setParam_('owner', datasetpath_split[2]) |
89 |
|
except: |
90 |
|
self.setParam_('dataset', self.datasetPath) |
91 |
|
self.setParam_('owner', self.datasetPath) |
92 |
|
|
93 |
< |
self.setTaskid_() |
106 |
< |
self.setParam_('taskId', self.cfg_params['taskId']) |
93 |
> |
self.setParam_('taskId', common.taskDB.dict('taskId')) |
94 |
|
|
95 |
|
self.dataTiers = [] |
96 |
|
|
97 |
|
## now the application |
98 |
< |
try: |
99 |
< |
self.executable = cfg_params['CMSSW.executable'] |
100 |
< |
self.setParam_('exe', self.executable) |
114 |
< |
log.debug(6, "CMSSW::CMSSW(): executable = "+self.executable) |
115 |
< |
msg = "Default executable cmsRun overridden. Switch to " + self.executable |
116 |
< |
log.debug(3,msg) |
117 |
< |
except KeyError: |
118 |
< |
self.executable = 'cmsRun' |
119 |
< |
self.setParam_('exe', self.executable) |
120 |
< |
msg = "User executable not defined. Use cmsRun" |
121 |
< |
log.debug(3,msg) |
122 |
< |
pass |
98 |
> |
self.executable = cfg_params.get('CMSSW.executable','cmsRun') |
99 |
> |
self.setParam_('exe', self.executable) |
100 |
> |
log.debug(6, "CMSSW::CMSSW(): executable = "+self.executable) |
101 |
|
|
102 |
< |
try: |
125 |
< |
self.pset = cfg_params['CMSSW.pset'] |
126 |
< |
log.debug(6, "Cmssw::Cmssw(): PSet file = "+self.pset) |
127 |
< |
if self.pset.lower() != 'none' : |
128 |
< |
if (not os.path.exists(self.pset)): |
129 |
< |
raise CrabException("User defined PSet file "+self.pset+" does not exist") |
130 |
< |
else: |
131 |
< |
self.pset = None |
132 |
< |
except KeyError: |
102 |
> |
if not cfg_params.has_key('CMSSW.pset'): |
103 |
|
raise CrabException("PSet file missing. Cannot run cmsRun ") |
104 |
+ |
self.pset = cfg_params['CMSSW.pset'] |
105 |
+ |
log.debug(6, "Cmssw::Cmssw(): PSet file = "+self.pset) |
106 |
+ |
if self.pset.lower() != 'none' : |
107 |
+ |
if (not os.path.exists(self.pset)): |
108 |
+ |
raise CrabException("User defined PSet file "+self.pset+" does not exist") |
109 |
+ |
else: |
110 |
+ |
self.pset = None |
111 |
|
|
112 |
|
# output files |
113 |
|
## stuff which must be returned always via sandbox |
117 |
|
self.output_file_sandbox.append(self.fjrFileName) |
118 |
|
|
119 |
|
# other output files to be returned via sandbox or copied to SE |
120 |
< |
try: |
121 |
< |
self.output_file = [] |
122 |
< |
tmp = cfg_params['CMSSW.output_file'] |
123 |
< |
if tmp != '': |
124 |
< |
tmpOutFiles = string.split(cfg_params['CMSSW.output_file'],',') |
125 |
< |
log.debug(7, 'cmssw::cmssw(): output files '+str(tmpOutFiles)) |
126 |
< |
for tmp in tmpOutFiles: |
127 |
< |
tmp=string.strip(tmp) |
151 |
< |
self.output_file.append(tmp) |
152 |
< |
pass |
153 |
< |
else: |
154 |
< |
log.message("No output file defined: only stdout/err and the CRAB Framework Job Report will be available\n") |
120 |
> |
self.output_file = [] |
121 |
> |
tmp = cfg_params.get('CMSSW.output_file',None) |
122 |
> |
if tmp : |
123 |
> |
tmpOutFiles = string.split(tmp,',') |
124 |
> |
log.debug(7, 'cmssw::cmssw(): output files '+str(tmpOutFiles)) |
125 |
> |
for tmp in tmpOutFiles: |
126 |
> |
tmp=string.strip(tmp) |
127 |
> |
self.output_file.append(tmp) |
128 |
|
pass |
129 |
< |
pass |
157 |
< |
except KeyError: |
129 |
> |
else: |
130 |
|
log.message("No output file defined: only stdout/err and the CRAB Framework Job Report will be available\n") |
131 |
< |
pass |
131 |
> |
pass |
132 |
|
|
133 |
|
# script_exe file as additional file in inputSandbox |
134 |
< |
try: |
135 |
< |
self.scriptExe = cfg_params['USER.script_exe'] |
136 |
< |
if self.scriptExe != '': |
137 |
< |
if not os.path.isfile(self.scriptExe): |
138 |
< |
msg ="ERROR. file "+self.scriptExe+" not found" |
139 |
< |
raise CrabException(msg) |
168 |
< |
self.additional_inbox_files.append(string.strip(self.scriptExe)) |
169 |
< |
except KeyError: |
170 |
< |
self.scriptExe = '' |
134 |
> |
self.scriptExe = cfg_params.get('USER.script_exe',None) |
135 |
> |
if self.scriptExe : |
136 |
> |
if not os.path.isfile(self.scriptExe): |
137 |
> |
msg ="ERROR. file "+self.scriptExe+" not found" |
138 |
> |
raise CrabException(msg) |
139 |
> |
self.additional_inbox_files.append(string.strip(self.scriptExe)) |
140 |
|
|
141 |
|
#CarlosDaniele |
142 |
|
if self.datasetPath == None and self.pset == None and self.scriptExe == '' : |
144 |
|
raise CrabException(msg) |
145 |
|
|
146 |
|
## additional input files |
147 |
< |
try: |
147 |
> |
if cfg_params.has_key('USER.additional_input_files'): |
148 |
|
tmpAddFiles = string.split(cfg_params['USER.additional_input_files'],',') |
149 |
|
for tmp in tmpAddFiles: |
150 |
|
tmp = string.strip(tmp) |
168 |
|
pass |
169 |
|
pass |
170 |
|
common.logger.debug(5,"Additional input files: "+str(self.additional_inbox_files)) |
171 |
< |
except KeyError: |
203 |
< |
pass |
204 |
< |
|
205 |
< |
# files per job |
206 |
< |
try: |
207 |
< |
if (cfg_params['CMSSW.files_per_jobs']): |
208 |
< |
raise CrabException("files_per_jobs no longer supported. Quitting.") |
209 |
< |
except KeyError: |
210 |
< |
pass |
171 |
> |
pass |
172 |
|
|
173 |
|
## Events per job |
174 |
< |
try: |
174 |
> |
if cfg_params.has_key('CMSSW.events_per_job'): |
175 |
|
self.eventsPerJob =int( cfg_params['CMSSW.events_per_job']) |
176 |
|
self.selectEventsPerJob = 1 |
177 |
< |
except KeyError: |
177 |
> |
else: |
178 |
|
self.eventsPerJob = -1 |
179 |
|
self.selectEventsPerJob = 0 |
180 |
|
|
181 |
|
## number of jobs |
182 |
< |
try: |
182 |
> |
if cfg_params.has_key('CMSSW.number_of_jobs'): |
183 |
|
self.theNumberOfJobs =int( cfg_params['CMSSW.number_of_jobs']) |
184 |
|
self.selectNumberOfJobs = 1 |
185 |
< |
except KeyError: |
185 |
> |
else: |
186 |
|
self.theNumberOfJobs = 0 |
187 |
|
self.selectNumberOfJobs = 0 |
188 |
|
|
189 |
< |
try: |
189 |
> |
if cfg_params.has_key('CMSSW.total_number_of_events'): |
190 |
|
self.total_number_of_events = int(cfg_params['CMSSW.total_number_of_events']) |
191 |
|
self.selectTotalNumberEvents = 1 |
192 |
< |
except KeyError: |
192 |
> |
else: |
193 |
|
self.total_number_of_events = 0 |
194 |
|
self.selectTotalNumberEvents = 0 |
195 |
|
|
203 |
|
raise CrabException(msg) |
204 |
|
|
205 |
|
## source seed for pythia |
206 |
< |
try: |
246 |
< |
self.sourceSeed = int(cfg_params['CMSSW.pythia_seed']) |
247 |
< |
except KeyError: |
248 |
< |
self.sourceSeed = None |
249 |
< |
common.logger.debug(5,"No seed given") |
206 |
> |
self.sourceSeed = cfg_params.get('CMSSW.pythia_seed',None) |
207 |
|
|
208 |
< |
try: |
252 |
< |
self.sourceSeedVtx = int(cfg_params['CMSSW.vtx_seed']) |
253 |
< |
except KeyError: |
254 |
< |
self.sourceSeedVtx = None |
255 |
< |
common.logger.debug(5,"No vertex seed given") |
208 |
> |
self.sourceSeedVtx = cfg_params.get('CMSSW.vtx_seed',None) |
209 |
|
|
210 |
< |
try: |
258 |
< |
self.sourceSeedG4 = int(cfg_params['CMSSW.g4_seed']) |
259 |
< |
except KeyError: |
260 |
< |
self.sourceSeedG4 = None |
261 |
< |
common.logger.debug(5,"No g4 sim hits seed given") |
210 |
> |
self.sourceSeedG4 = cfg_params.get('CMSSW.g4_seed',None) |
211 |
|
|
212 |
< |
try: |
213 |
< |
self.sourceSeedMix = int(cfg_params['CMSSW.mix_seed']) |
214 |
< |
except KeyError: |
266 |
< |
self.sourceSeedMix = None |
267 |
< |
common.logger.debug(5,"No mix seed given") |
212 |
> |
self.sourceSeedMix = cfg_params.get('CMSSW.mix_seed',None) |
213 |
> |
|
214 |
> |
self.firstRun = cfg_params.get('CMSSW.first_run',None) |
215 |
|
|
269 |
– |
try: |
270 |
– |
self.firstRun = int(cfg_params['CMSSW.first_run']) |
271 |
– |
except KeyError: |
272 |
– |
self.firstRun = None |
273 |
– |
common.logger.debug(5,"No first run given") |
216 |
|
if self.pset != None: #CarlosDaniele |
217 |
|
import PsetManipulator as pp |
218 |
|
PsetEdit = pp.PsetManipulator(self.pset) #Daniele Pset |
219 |
|
|
220 |
+ |
# Copy/return |
221 |
+ |
|
222 |
+ |
self.copy_data = int(cfg_params.get('USER.copy_data',0)) |
223 |
+ |
self.return_data = int(cfg_params.get('USER.return_data',0)) |
224 |
+ |
|
225 |
|
#DBSDLS-start |
226 |
|
## Initialize the variables that are extracted from DBS/DLS and needed in other places of the code |
227 |
|
self.maxEvents=0 # max events available ( --> check the requested nb. of evts in Creator.py) |
275 |
|
def DataDiscoveryAndLocation(self, cfg_params): |
276 |
|
|
277 |
|
import DataDiscovery |
331 |
– |
import DataDiscovery_DBS2 |
278 |
|
import DataLocation |
279 |
|
common.logger.debug(10,"CMSSW::DataDiscoveryAndLocation()") |
280 |
|
|
283 |
|
## Contact the DBS |
284 |
|
common.logger.message("Contacting Data Discovery Services ...") |
285 |
|
try: |
286 |
< |
|
341 |
< |
if self.use_dbs_1 == 1 : |
342 |
< |
self.pubdata=DataDiscovery.DataDiscovery(datasetPath, cfg_params) |
343 |
< |
else : |
344 |
< |
self.pubdata=DataDiscovery_DBS2.DataDiscovery_DBS2(datasetPath, cfg_params) |
286 |
> |
self.pubdata=DataDiscovery.DataDiscovery(datasetPath, cfg_params) |
287 |
|
self.pubdata.fetchDBSInfo() |
288 |
|
|
289 |
|
except DataDiscovery.NotExistingDatasetError, ex : |
295 |
|
except DataDiscovery.DataDiscoveryError, ex: |
296 |
|
msg = 'ERROR ***: failed Data Discovery in DBS : %s'%ex.getErrorMessage() |
297 |
|
raise CrabException(msg) |
356 |
– |
except DataDiscovery_DBS2.NotExistingDatasetError_DBS2, ex : |
357 |
– |
msg = 'ERROR ***: failed Data Discovery in DBS : %s'%ex.getErrorMessage() |
358 |
– |
raise CrabException(msg) |
359 |
– |
except DataDiscovery_DBS2.NoDataTierinProvenanceError_DBS2, ex : |
360 |
– |
msg = 'ERROR ***: failed Data Discovery in DBS : %s'%ex.getErrorMessage() |
361 |
– |
raise CrabException(msg) |
362 |
– |
except DataDiscovery_DBS2.DataDiscoveryError_DBS2, ex: |
363 |
– |
msg = 'ERROR ***: failed Data Discovery in DBS : %s'%ex.getErrorMessage() |
364 |
– |
raise CrabException(msg) |
298 |
|
|
299 |
|
self.filesbyblock=self.pubdata.getFiles() |
300 |
|
self.eventsbyblock=self.pubdata.getEventsPerBlock() |
325 |
|
|
326 |
|
return sites |
327 |
|
|
328 |
+ |
def setArgsList(self, argsList): |
329 |
+ |
self.argsList = argsList |
330 |
+ |
|
331 |
|
def jobSplittingByBlocks(self, blockSites): |
332 |
|
""" |
333 |
|
Perform job splitting. Jobs run over an integer number of files |
540 |
|
for range_jobs in noSiteBlock: |
541 |
|
msg += str(range_jobs) + virgola |
542 |
|
msg += '\n will not be submitted and this block of data can not be analyzed!\n' |
543 |
+ |
if self.cfg_params.has_key('EDG.se_white_list'): |
544 |
+ |
msg += 'WARNING: SE White List: '+self.cfg_params['EDG.se_white_list']+'\n' |
545 |
+ |
msg += '(Hint: By whitelisting you force the job to run at this particular site(s).\n' |
546 |
+ |
msg += 'Please check if the dataset is available at this site!)\n' |
547 |
+ |
if self.cfg_params.has_key('EDG.ce_white_list'): |
548 |
+ |
msg += 'WARNING: CE White List: '+self.cfg_params['EDG.ce_white_list']+'\n' |
549 |
+ |
msg += '(Hint: By whitelisting you force the job to run at this particular site(s).\n' |
550 |
+ |
msg += 'Please check if the dataset is available at this site!)\n' |
551 |
+ |
|
552 |
|
common.logger.message(msg) |
553 |
|
|
554 |
|
self.list_of_args = list_of_lists |
597 |
|
self.list_of_args = [] |
598 |
|
for i in range(self.total_number_of_jobs): |
599 |
|
## Since there is no input, any site is good |
655 |
– |
# self.jobDestination.append(["Any"]) |
600 |
|
self.jobDestination.append([""]) #must be empty to write correctly the xml |
601 |
|
args=[] |
602 |
|
if (self.firstRun): |
603 |
< |
## pythia first run |
660 |
< |
#self.list_of_args.append([(str(self.firstRun)+str(i))]) |
603 |
> |
## pythia first run |
604 |
|
args.append(str(self.firstRun)+str(i)) |
662 |
– |
else: |
663 |
– |
## no first run |
664 |
– |
#self.list_of_args.append([str(i)]) |
665 |
– |
args.append(str(i)) |
605 |
|
if (self.sourceSeed): |
606 |
|
args.append(str(self.sourceSeed)+str(i)) |
607 |
|
if (self.sourceSeedVtx): |
618 |
|
self.list_of_args.append(args) |
619 |
|
pass |
620 |
|
|
682 |
– |
# print self.list_of_args |
683 |
– |
|
621 |
|
return |
622 |
|
|
623 |
|
|
752 |
|
common.logger.debug(5,"data "+root+"/data"+" to be tarred") |
753 |
|
tar.add(root+"/data",root[swAreaLen:]+"/data") |
754 |
|
|
755 |
< |
## Add ProdAgent dir to tar |
756 |
< |
paDir = 'ProdAgentApi' |
757 |
< |
pa = os.environ['CRABDIR'] + '/' + 'ProdAgentApi' |
758 |
< |
if os.path.isdir(pa): |
759 |
< |
tar.add(pa,paDir) |
755 |
> |
### Removed ProdAgent Api dependencies ### |
756 |
> |
### Add ProdAgent dir to tar |
757 |
> |
#paDir = 'ProdAgentApi' |
758 |
> |
#pa = os.environ['CRABDIR'] + '/' + 'ProdAgentApi' |
759 |
> |
#if os.path.isdir(pa): |
760 |
> |
# tar.add(pa,paDir) |
761 |
|
|
762 |
< |
### FEDE FOR DBS PUBLICATION |
825 |
< |
## Add PRODCOMMON dir to tar |
762 |
> |
## Add ProdCommon dir to tar |
763 |
|
prodcommonDir = 'ProdCommon' |
764 |
|
prodcommonPath = os.environ['CRABDIR'] + '/' + 'ProdCommon' |
765 |
|
if os.path.isdir(prodcommonPath): |
766 |
|
tar.add(prodcommonPath,prodcommonDir) |
830 |
– |
############################# |
767 |
|
|
768 |
|
common.logger.debug(5,"Files added to "+self.tgzNameWithPath+" : "+str(tar.getnames())) |
769 |
|
tar.close() |
809 |
|
""" |
810 |
|
# Prepare JobType-independent part |
811 |
|
txt = '' |
812 |
< |
|
877 |
< |
## OLI_Daniele at this level middleware already known |
878 |
< |
|
812 |
> |
txt += 'echo ">>> setup environment"\n' |
813 |
|
txt += 'if [ $middleware == LCG ]; then \n' |
880 |
– |
txt += ' echo "### First set SCRAM ARCH and BUILD_ARCH to ' + self.executable_arch + ' ###"\n' |
881 |
– |
txt += ' export SCRAM_ARCH='+self.executable_arch+'\n' |
882 |
– |
txt += ' export BUILD_ARCH='+self.executable_arch+'\n' |
814 |
|
txt += self.wsSetupCMSLCGEnvironment_() |
815 |
|
txt += 'elif [ $middleware == OSG ]; then\n' |
816 |
|
txt += ' WORKING_DIR=`/bin/mktemp -d $OSG_WN_TMP/cms_XXXXXXXXXXXX`\n' |
817 |
|
txt += ' if [ ! $? == 0 ] ;then\n' |
818 |
|
txt += ' echo "SET_CMS_ENV 10016 ==> OSG $WORKING_DIR could not be created on WN `hostname`"\n' |
819 |
< |
txt += ' echo "JOB_EXIT_STATUS = 10016"\n' |
820 |
< |
txt += ' echo "JobExitCode=10016" | tee -a $RUNTIME_AREA/$repo\n' |
821 |
< |
txt += ' dumpStatus $RUNTIME_AREA/$repo\n' |
891 |
< |
txt += ' rm -f $RUNTIME_AREA/$repo \n' |
892 |
< |
txt += ' echo "MonitorJobID=`echo $MonitorJobID`" | tee -a $RUNTIME_AREA/$repo \n' |
893 |
< |
txt += ' echo "MonitorID=`echo $MonitorID`" | tee -a $RUNTIME_AREA/$repo\n' |
819 |
> |
txt += ' echo "JOB_EXIT_STATUS = 10016"\n' |
820 |
> |
txt += ' echo "JobExitCode=10016" | tee -a $RUNTIME_AREA/$repo\n' |
821 |
> |
txt += ' dumpStatus $RUNTIME_AREA/$repo\n' |
822 |
|
txt += ' exit 1\n' |
823 |
|
txt += ' fi\n' |
824 |
< |
txt += ' echo "Created working directory: $WORKING_DIR"\n' |
824 |
> |
txt += ' echo ">>> Created working directory: $WORKING_DIR"\n' |
825 |
|
txt += '\n' |
826 |
|
txt += ' echo "Change to working directory: $WORKING_DIR"\n' |
827 |
|
txt += ' cd $WORKING_DIR\n' |
828 |
+ |
txt += ' echo ">>> current directory (WORKING_DIR): $WORKING_DIR"\n' |
829 |
|
txt += self.wsSetupCMSOSGEnvironment_() |
830 |
< |
txt += ' echo "### Set SCRAM ARCH to ' + self.executable_arch + ' ###"\n' |
831 |
< |
txt += ' export SCRAM_ARCH='+self.executable_arch+'\n' |
830 |
> |
#txt += ' echo "### Set SCRAM ARCH to ' + self.executable_arch + ' ###"\n' |
831 |
> |
#txt += ' export SCRAM_ARCH='+self.executable_arch+'\n' |
832 |
|
txt += 'fi\n' |
833 |
|
|
834 |
|
# Prepare JobType-specific part |
835 |
|
scram = self.scram.commandName() |
836 |
|
txt += '\n\n' |
837 |
< |
txt += 'echo "### SPECIFIC JOB SETUP ENVIRONMENT ###"\n' |
837 |
> |
txt += 'echo ">>> specific cmssw setup environment:"\n' |
838 |
> |
txt += 'echo "CMSSW_VERSION = '+self.version+'"\n' |
839 |
|
txt += scram+' project CMSSW '+self.version+'\n' |
840 |
|
txt += 'status=$?\n' |
841 |
|
txt += 'if [ $status != 0 ] ; then\n' |
842 |
< |
txt += ' echo "SET_EXE_ENV 10034 ==>ERROR CMSSW '+self.version+' not found on `hostname`" \n' |
843 |
< |
txt += ' echo "JOB_EXIT_STATUS = 10034"\n' |
844 |
< |
txt += ' echo "JobExitCode=10034" | tee -a $RUNTIME_AREA/$repo\n' |
845 |
< |
txt += ' dumpStatus $RUNTIME_AREA/$repo\n' |
916 |
< |
txt += ' rm -f $RUNTIME_AREA/$repo \n' |
917 |
< |
txt += ' echo "MonitorJobID=`echo $MonitorJobID`" | tee -a $RUNTIME_AREA/$repo \n' |
918 |
< |
txt += ' echo "MonitorID=`echo $MonitorID`" | tee -a $RUNTIME_AREA/$repo\n' |
919 |
< |
## OLI_Daniele |
842 |
> |
txt += ' echo "SET_EXE_ENV 10034 ==>ERROR CMSSW '+self.version+' not found on `hostname`" \n' |
843 |
> |
txt += ' echo "JOB_EXIT_STATUS = 10034"\n' |
844 |
> |
txt += ' echo "JobExitCode=10034" | tee -a $RUNTIME_AREA/$repo\n' |
845 |
> |
txt += ' dumpStatus $RUNTIME_AREA/$repo\n' |
846 |
|
txt += ' if [ $middleware == OSG ]; then \n' |
921 |
– |
txt += ' echo "Remove working directory: $WORKING_DIR"\n' |
847 |
|
txt += ' cd $RUNTIME_AREA\n' |
848 |
+ |
txt += ' echo ">>> current directory (RUNTIME_AREA): $RUNTIME_AREA"\n' |
849 |
+ |
txt += ' echo ">>> Remove working directory: $WORKING_DIR"\n' |
850 |
|
txt += ' /bin/rm -rf $WORKING_DIR\n' |
851 |
|
txt += ' if [ -d $WORKING_DIR ] ;then\n' |
852 |
|
txt += ' echo "SET_CMS_ENV 10018 ==> OSG $WORKING_DIR could not be deleted on WN `hostname` after CMSSW CMSSW_0_6_1 not found on `hostname`"\n' |
853 |
|
txt += ' echo "JOB_EXIT_STATUS = 10018"\n' |
854 |
|
txt += ' echo "JobExitCode=10018" | tee -a $RUNTIME_AREA/$repo\n' |
855 |
|
txt += ' dumpStatus $RUNTIME_AREA/$repo\n' |
929 |
– |
txt += ' rm -f $RUNTIME_AREA/$repo \n' |
930 |
– |
txt += ' echo "MonitorJobID=`echo $MonitorJobID`" | tee -a $RUNTIME_AREA/$repo \n' |
931 |
– |
txt += ' echo "MonitorID=`echo $MonitorID`" | tee -a $RUNTIME_AREA/$repo\n' |
856 |
|
txt += ' fi\n' |
857 |
|
txt += ' fi \n' |
858 |
< |
txt += ' exit 1 \n' |
858 |
> |
txt += ' exit 1 \n' |
859 |
|
txt += 'fi \n' |
936 |
– |
txt += 'echo "CMSSW_VERSION = '+self.version+'"\n' |
860 |
|
txt += 'cd '+self.version+'\n' |
861 |
|
########## FEDE FOR DBS2 ###################### |
862 |
|
txt += 'SOFTWARE_DIR=`pwd`\n' |
863 |
< |
txt += 'echo SOFTWARE_DIR=$SOFTWARE_DIR \n' |
863 |
> |
txt += 'echo ">>> current directory (SOFTWARE_DIR): $SOFTWARE_DIR" \n' |
864 |
|
############################################### |
865 |
|
### needed grep for bug in scramv1 ### |
943 |
– |
txt += scram+' runtime -sh\n' |
866 |
|
txt += 'eval `'+scram+' runtime -sh | grep -v SCRAMRT_LSB_JOBNAME`\n' |
945 |
– |
txt += 'echo $PATH\n' |
946 |
– |
|
867 |
|
# Handle the arguments: |
868 |
|
txt += "\n" |
869 |
|
txt += "## number of arguments (first argument always jobnumber)\n" |
870 |
|
txt += "\n" |
871 |
< |
# txt += "narg=$#\n" |
952 |
< |
txt += "if [ $nargs -lt 2 ]\n" |
871 |
> |
txt += "if [ $nargs -lt "+str(len(self.argsList[nj].split()))+" ]\n" |
872 |
|
txt += "then\n" |
873 |
|
txt += " echo 'SET_EXE_ENV 1 ==> ERROR Too few arguments' +$nargs+ \n" |
874 |
|
txt += ' echo "JOB_EXIT_STATUS = 50113"\n' |
875 |
|
txt += ' echo "JobExitCode=50113" | tee -a $RUNTIME_AREA/$repo\n' |
876 |
|
txt += ' dumpStatus $RUNTIME_AREA/$repo\n' |
958 |
– |
txt += ' rm -f $RUNTIME_AREA/$repo \n' |
959 |
– |
txt += ' echo "MonitorJobID=`echo $MonitorJobID`" | tee -a $RUNTIME_AREA/$repo \n' |
960 |
– |
txt += ' echo "MonitorID=`echo $MonitorID`" | tee -a $RUNTIME_AREA/$repo\n' |
961 |
– |
## OLI_Daniele |
877 |
|
txt += ' if [ $middleware == OSG ]; then \n' |
963 |
– |
txt += ' echo "Remove working directory: $WORKING_DIR"\n' |
878 |
|
txt += ' cd $RUNTIME_AREA\n' |
879 |
+ |
txt += ' echo ">>> current directory (RUNTIME_AREA): $RUNTIME_AREA"\n' |
880 |
+ |
txt += ' echo ">>> Remove working directory: $WORKING_DIR"\n' |
881 |
|
txt += ' /bin/rm -rf $WORKING_DIR\n' |
882 |
|
txt += ' if [ -d $WORKING_DIR ] ;then\n' |
883 |
|
txt += ' echo "SET_EXE_ENV 50114 ==> OSG $WORKING_DIR could not be deleted on WN `hostname` after Too few arguments for CRAB job wrapper"\n' |
884 |
|
txt += ' echo "JOB_EXIT_STATUS = 50114"\n' |
885 |
|
txt += ' echo "JobExitCode=50114" | tee -a $RUNTIME_AREA/$repo\n' |
886 |
|
txt += ' dumpStatus $RUNTIME_AREA/$repo\n' |
971 |
– |
txt += ' rm -f $RUNTIME_AREA/$repo \n' |
972 |
– |
txt += ' echo "MonitorJobID=`echo $MonitorJobID`" | tee -a $RUNTIME_AREA/$repo \n' |
973 |
– |
txt += ' echo "MonitorID=`echo $MonitorID`" | tee -a $RUNTIME_AREA/$repo\n' |
887 |
|
txt += ' fi\n' |
888 |
|
txt += ' fi \n' |
889 |
|
txt += " exit 1\n" |
901 |
|
|
902 |
|
txt += 'PrimaryDataset='+datasetpath_split[1]+'\n' |
903 |
|
txt += 'DataTier='+datasetpath_split[2]+'\n' |
991 |
– |
#txt += 'ProcessedDataset='+datasetpath_split[3]+'\n' |
904 |
|
txt += 'ApplicationFamily=cmsRun\n' |
905 |
|
|
906 |
|
else: |
907 |
|
txt += 'DatasetPath=MCDataTier\n' |
908 |
|
txt += 'PrimaryDataset=null\n' |
909 |
|
txt += 'DataTier=null\n' |
998 |
– |
#txt += 'ProcessedDataset=null\n' |
910 |
|
txt += 'ApplicationFamily=MCDataTier\n' |
911 |
|
if self.pset != None: #CarlosDaniele |
912 |
|
pset = os.path.basename(job.configFilename()) |
913 |
|
txt += '\n' |
914 |
|
txt += 'cp $RUNTIME_AREA/'+pset+' .\n' |
915 |
|
if (self.datasetPath): # standard job |
1005 |
– |
#txt += 'InputFiles=$2\n' |
916 |
|
txt += 'InputFiles=${args[1]}\n' |
917 |
|
txt += 'MaxEvents=${args[2]}\n' |
918 |
|
txt += 'SkipEvents=${args[3]}\n' |
961 |
|
pass |
962 |
|
|
963 |
|
if self.pset != None: #CarlosDaniele |
1054 |
– |
txt += 'echo "### END JOB SETUP ENVIRONMENT ###"\n\n' |
1055 |
– |
|
964 |
|
txt += '\n' |
965 |
|
txt += 'echo "***** cat pset.cfg *********"\n' |
966 |
|
txt += 'cat pset.cfg\n' |
971 |
|
txt += 'echo "PSETHASH = $PSETHASH" \n' |
972 |
|
############## |
973 |
|
txt += '\n' |
1066 |
– |
# txt += 'echo "***** cat pset1.cfg *********"\n' |
1067 |
– |
# txt += 'cat pset1.cfg\n' |
1068 |
– |
# txt += 'echo "****** end pset1.cfg ********"\n' |
974 |
|
return txt |
975 |
|
|
976 |
|
def wsBuildExe(self, nj=0): |
982 |
|
txt = "" |
983 |
|
|
984 |
|
if os.path.isfile(self.tgzNameWithPath): |
985 |
< |
txt += 'echo "tar xzvf $RUNTIME_AREA/'+os.path.basename(self.tgzNameWithPath)+'"\n' |
985 |
> |
txt += 'echo ">>> tar xzvf $RUNTIME_AREA/'+os.path.basename(self.tgzNameWithPath)+' :" \n' |
986 |
|
txt += 'tar xzvf $RUNTIME_AREA/'+os.path.basename(self.tgzNameWithPath)+'\n' |
987 |
|
txt += 'untar_status=$? \n' |
988 |
|
txt += 'if [ $untar_status -ne 0 ]; then \n' |
990 |
|
txt += ' echo "JOB_EXIT_STATUS = $untar_status" \n' |
991 |
|
txt += ' echo "JobExitCode=$untar_status" | tee -a $RUNTIME_AREA/$repo\n' |
992 |
|
txt += ' if [ $middleware == OSG ]; then \n' |
1088 |
– |
txt += ' echo "Remove working directory: $WORKING_DIR"\n' |
993 |
|
txt += ' cd $RUNTIME_AREA\n' |
994 |
+ |
txt += ' echo ">>> current directory (RUNTIME_AREA): $RUNTIME_AREA"\n' |
995 |
+ |
txt += ' echo ">>> Remove working directory: $WORKING_DIR"\n' |
996 |
|
txt += ' /bin/rm -rf $WORKING_DIR\n' |
997 |
|
txt += ' if [ -d $WORKING_DIR ] ;then\n' |
998 |
|
txt += ' echo "SET_EXE 50999 ==> OSG $WORKING_DIR could not be deleted on WN `hostname` after Untarring .tgz file failed"\n' |
999 |
|
txt += ' echo "JOB_EXIT_STATUS = 50999"\n' |
1000 |
|
txt += ' echo "JobExitCode=50999" | tee -a $RUNTIME_AREA/$repo\n' |
1001 |
|
txt += ' dumpStatus $RUNTIME_AREA/$repo\n' |
1096 |
– |
txt += ' rm -f $RUNTIME_AREA/$repo \n' |
1097 |
– |
txt += ' echo "MonitorJobID=`echo $MonitorJobID`" | tee -a $RUNTIME_AREA/$repo \n' |
1098 |
– |
txt += ' echo "MonitorID=`echo $MonitorID`" | tee -a $RUNTIME_AREA/$repo\n' |
1002 |
|
txt += ' fi\n' |
1003 |
|
txt += ' fi \n' |
1004 |
|
txt += ' \n' |
1007 |
|
txt += ' echo "Successful untar" \n' |
1008 |
|
txt += 'fi \n' |
1009 |
|
txt += '\n' |
1010 |
< |
txt += 'echo "Include ProdAgentApi and PRODCOMMON in PYTHONPATH"\n' |
1010 |
> |
#### Removed ProdAgent API dependencies |
1011 |
> |
txt += 'echo ">>> Include ProdCommon in PYTHONPATH:"\n' |
1012 |
|
txt += 'if [ -z "$PYTHONPATH" ]; then\n' |
1013 |
|
#### FEDE FOR DBS OUTPUT PUBLICATION |
1014 |
< |
txt += ' export PYTHONPATH=$SOFTWARE_DIR/ProdAgentApi:$SOFTWARE_DIR/ProdCommon\n' |
1111 |
< |
#txt += ' export PYTHONPATH=`pwd`/ProdAgentApi:`pwd`/ProdCommon\n' |
1112 |
< |
#txt += ' export PYTHONPATH=ProdAgentApi\n' |
1014 |
> |
txt += ' export PYTHONPATH=$SOFTWARE_DIR/ProdCommon\n' |
1015 |
|
txt += 'else\n' |
1016 |
< |
txt += ' export PYTHONPATH=$SOFTWARE_DIR/ProdAgentApi:$SOFTWARE_DIR/ProdCommon:${PYTHONPATH}\n' |
1115 |
< |
#txt += ' export PYTHONPATH=`pwd`/ProdAgentApi:`pwd`/ProdCommon:${PYTHONPATH}\n' |
1116 |
< |
#txt += ' export PYTHONPATH=ProdAgentApi:${PYTHONPATH}\n' |
1016 |
> |
txt += ' export PYTHONPATH=$SOFTWARE_DIR/ProdCommon:${PYTHONPATH}\n' |
1017 |
|
txt += 'echo "PYTHONPATH=$PYTHONPATH"\n' |
1018 |
|
################### |
1019 |
|
txt += 'fi\n' |
1039 |
|
if self.scriptExe:#CarlosDaniele |
1040 |
|
return self.scriptExe + " $NJob" |
1041 |
|
else: |
1042 |
< |
# if >= CMSSW_1_5_X, add -e |
1042 |
> |
# if >= CMSSW_1_5_X, add -j crab_fjr.xml |
1043 |
|
version_array = self.scram.getSWVersion().split('_') |
1044 |
|
major = 0 |
1045 |
|
minor = 0 |
1050 |
|
msg = "Cannot parse CMSSW version string: " + "_".join(version_array) + " for major and minor release number!" |
1051 |
|
raise CrabException(msg) |
1052 |
|
if major >= 1 and minor >= 5 : |
1053 |
< |
return " -e -p pset.cfg" |
1053 |
> |
return " -j " + self.fjrFileName + " -p pset.cfg" |
1054 |
|
else: |
1055 |
|
return " -p pset.cfg" |
1056 |
|
|
1098 |
|
""" |
1099 |
|
|
1100 |
|
txt = '\n' |
1101 |
< |
txt += '# directory content\n' |
1101 |
> |
txt += 'echo ">>> current directory (SOFTWARE_DIR): $SOFTWARE_DIR" \n' |
1102 |
> |
txt += 'echo ">>> current directory content:"\n' |
1103 |
|
txt += 'ls \n' |
1104 |
+ |
txt += '\n' |
1105 |
|
|
1106 |
|
txt += 'output_exit_status=0\n' |
1107 |
|
|
1110 |
|
txt += '\n' |
1111 |
|
txt += '# check output file\n' |
1112 |
|
txt += 'if [ -e ./'+fileWithSuffix+' ] ; then\n' |
1113 |
< |
txt += ' mv '+fileWithSuffix+' $RUNTIME_AREA\n' |
1114 |
< |
txt += ' cp $RUNTIME_AREA/'+fileWithSuffix+' $RUNTIME_AREA/'+output_file_num+'\n' |
1113 |
> |
txt += ' mv '+fileWithSuffix+' $RUNTIME_AREA/'+output_file_num+'\n' |
1114 |
> |
txt += ' ln -s $RUNTIME_AREA/'+output_file_num+' $RUNTIME_AREA/'+fileWithSuffix+'\n' |
1115 |
|
txt += 'else\n' |
1116 |
|
txt += ' exit_status=60302\n' |
1117 |
< |
txt += ' echo "ERROR: Problem with output file '+fileWithSuffix+'"\n' |
1118 |
< |
if common.scheduler.boss_scheduler_name == 'condor_g': |
1117 |
> |
txt += ' echo "ERROR: Output file '+fileWithSuffix+' not found"\n' |
1118 |
> |
if common.scheduler.name() == 'CONDOR_G': |
1119 |
|
txt += ' if [ $middleware == OSG ]; then \n' |
1120 |
|
txt += ' echo "prepare dummy output file"\n' |
1121 |
|
txt += ' echo "Processing of job output failed" > $RUNTIME_AREA/'+output_file_num+'\n' |
1127 |
|
txt += '\n' |
1128 |
|
txt += '# check output file\n' |
1129 |
|
txt += 'if [ -e ./'+fileWithSuffix+' ] ; then\n' |
1130 |
< |
txt += ' mv '+fileWithSuffix+' $RUNTIME_AREA\n' |
1131 |
< |
txt += ' cp $RUNTIME_AREA/'+fileWithSuffix+' $RUNTIME_AREA/'+output_file_num+'\n' |
1130 |
> |
if (self.copy_data == 1): # For OSG nodes, file is in $WORKING_DIR, should not be moved to $RUNTIME_AREA |
1131 |
> |
txt += ' mv '+fileWithSuffix+' '+output_file_num+'\n' |
1132 |
> |
txt += ' ln -s `pwd`/'+output_file_num+' $RUNTIME_AREA/'+fileWithSuffix+'\n' |
1133 |
> |
else: |
1134 |
> |
txt += ' mv '+fileWithSuffix+' $RUNTIME_AREA/'+output_file_num+'\n' |
1135 |
> |
txt += ' ln -s $RUNTIME_AREA/'+output_file_num+' $RUNTIME_AREA/'+fileWithSuffix+'\n' |
1136 |
|
txt += 'else\n' |
1137 |
|
txt += ' exit_status=60302\n' |
1138 |
< |
txt += ' echo "ERROR: Problem with output file '+fileWithSuffix+'"\n' |
1138 |
> |
txt += ' echo "ERROR: Output file '+fileWithSuffix+' not found"\n' |
1139 |
|
txt += ' echo "JOB_EXIT_STATUS = $exit_status"\n' |
1140 |
|
txt += ' output_exit_status=$exit_status\n' |
1141 |
< |
if common.scheduler.boss_scheduler_name == 'condor_g': |
1141 |
> |
if common.scheduler.name() == 'CONDOR_G': |
1142 |
|
txt += ' if [ $middleware == OSG ]; then \n' |
1143 |
|
txt += ' echo "prepare dummy output file"\n' |
1144 |
|
txt += ' echo "Processing of job output failed" > $RUNTIME_AREA/'+output_file_num+'\n' |
1149 |
|
file_list.append(self.numberFile_(fileWithSuffix, '$NJob')) |
1150 |
|
|
1151 |
|
txt += 'file_list="'+string.join(file_list,' ')+'"\n' |
1152 |
+ |
txt += '\n' |
1153 |
+ |
txt += 'echo ">>> current directory (SOFTWARE_DIR): $SOFTWARE_DIR" \n' |
1154 |
+ |
txt += 'echo ">>> current directory content:"\n' |
1155 |
+ |
txt += 'ls \n' |
1156 |
+ |
txt += '\n' |
1157 |
|
txt += 'cd $RUNTIME_AREA\n' |
1158 |
+ |
txt += 'echo ">>> current directory (RUNTIME_AREA): $RUNTIME_AREA"\n' |
1159 |
|
return txt |
1160 |
|
|
1161 |
|
def numberFile_(self, file, txt): |
1200 |
|
""" return the config filename """ |
1201 |
|
return self.name()+'.cfg' |
1202 |
|
|
1291 |
– |
### OLI_DANIELE |
1203 |
|
def wsSetupCMSOSGEnvironment_(self): |
1204 |
|
""" |
1205 |
|
Returns part of a job script which is prepares |
1206 |
|
the execution environment and which is common for all CMS jobs. |
1207 |
|
""" |
1208 |
< |
txt = '\n' |
1209 |
< |
txt += ' echo "### SETUP CMS OSG ENVIRONMENT ###"\n' |
1210 |
< |
txt += ' if [ -f $GRID3_APP_DIR/cmssoft/cmsset_default.sh ] ;then\n' |
1211 |
< |
txt += ' # Use $GRID3_APP_DIR/cmssoft/cmsset_default.sh to setup cms software\n' |
1212 |
< |
txt += ' export SCRAM_ARCH='+self.executable_arch+'\n' |
1302 |
< |
txt += ' source $GRID3_APP_DIR/cmssoft/cmsset_default.sh '+self.version+'\n' |
1303 |
< |
txt += ' elif [ -f $OSG_APP/cmssoft/cms/cmsset_default.sh ] ;then\n' |
1208 |
> |
txt = ' echo ">>> setup CMS OSG environment:"\n' |
1209 |
> |
txt += ' echo "set SCRAM ARCH to ' + self.executable_arch + '"\n' |
1210 |
> |
txt += ' export SCRAM_ARCH='+self.executable_arch+'\n' |
1211 |
> |
txt += ' echo "SCRAM_ARCH = $SCRAM_ARCH"\n' |
1212 |
> |
txt += ' if [ -f $OSG_APP/cmssoft/cms/cmsset_default.sh ] ;then\n' |
1213 |
|
txt += ' # Use $OSG_APP/cmssoft/cms/cmsset_default.sh to setup cms software\n' |
1214 |
< |
txt += ' export SCRAM_ARCH='+self.executable_arch+'\n' |
1215 |
< |
txt += ' source $OSG_APP/cmssoft/cms/cmsset_default.sh '+self.version+'\n' |
1216 |
< |
txt += ' else\n' |
1217 |
< |
txt += ' echo "SET_CMS_ENV 10020 ==> ERROR $GRID3_APP_DIR/cmssoft/cmsset_default.sh and $OSG_APP/cmssoft/cms/cmsset_default.sh file not found"\n' |
1218 |
< |
txt += ' echo "JOB_EXIT_STATUS = 10020"\n' |
1219 |
< |
txt += ' echo "JobExitCode=10020" | tee -a $RUNTIME_AREA/$repo\n' |
1311 |
< |
txt += ' dumpStatus $RUNTIME_AREA/$repo\n' |
1312 |
< |
txt += ' rm -f $RUNTIME_AREA/$repo \n' |
1313 |
< |
txt += ' echo "MonitorJobID=`echo $MonitorJobID`" | tee -a $RUNTIME_AREA/$repo \n' |
1314 |
< |
txt += ' echo "MonitorID=`echo $MonitorID`" | tee -a $RUNTIME_AREA/$repo\n' |
1315 |
< |
txt += ' exit 1\n' |
1214 |
> |
txt += ' source $OSG_APP/cmssoft/cms/cmsset_default.sh '+self.version+'\n' |
1215 |
> |
txt += ' else\n' |
1216 |
> |
txt += ' echo "SET_CMS_ENV 10020 ==> ERROR $OSG_APP/cmssoft/cms/cmsset_default.sh file not found"\n' |
1217 |
> |
txt += ' echo "JOB_EXIT_STATUS = 10020"\n' |
1218 |
> |
txt += ' echo "JobExitCode=10020" | tee -a $RUNTIME_AREA/$repo\n' |
1219 |
> |
txt += ' dumpStatus $RUNTIME_AREA/$repo\n' |
1220 |
|
txt += '\n' |
1221 |
< |
txt += ' echo "Remove working directory: $WORKING_DIR"\n' |
1222 |
< |
txt += ' cd $RUNTIME_AREA\n' |
1223 |
< |
txt += ' /bin/rm -rf $WORKING_DIR\n' |
1224 |
< |
txt += ' if [ -d $WORKING_DIR ] ;then\n' |
1225 |
< |
txt += ' echo "SET_CMS_ENV 10017 ==> OSG $WORKING_DIR could not be deleted on WN `hostname` after $GRID3_APP_DIR/cmssoft/cmsset_default.sh and $OSG_APP/cmssoft/cms/cmsset_default.sh file not found"\n' |
1226 |
< |
txt += ' echo "JOB_EXIT_STATUS = 10017"\n' |
1227 |
< |
txt += ' echo "JobExitCode=10017" | tee -a $RUNTIME_AREA/$repo\n' |
1228 |
< |
txt += ' dumpStatus $RUNTIME_AREA/$repo\n' |
1229 |
< |
txt += ' rm -f $RUNTIME_AREA/$repo \n' |
1230 |
< |
txt += ' echo "MonitorJobID=`echo $MonitorJobID`" | tee -a $RUNTIME_AREA/$repo \n' |
1327 |
< |
txt += ' echo "MonitorID=`echo $MonitorID`" | tee -a $RUNTIME_AREA/$repo\n' |
1328 |
< |
txt += ' fi\n' |
1221 |
> |
txt += ' cd $RUNTIME_AREA\n' |
1222 |
> |
txt += ' echo ">>> current directory (RUNTIME_AREA): $RUNTIME_AREA"\n' |
1223 |
> |
txt += ' echo ">>> Remove working directory: $WORKING_DIR"\n' |
1224 |
> |
txt += ' /bin/rm -rf $WORKING_DIR\n' |
1225 |
> |
txt += ' if [ -d $WORKING_DIR ] ;then\n' |
1226 |
> |
txt += ' echo "SET_CMS_ENV 10017 ==> OSG $WORKING_DIR could not be deleted on WN `hostname` after $OSG_APP/cmssoft/cms/cmsset_default.sh file not found"\n' |
1227 |
> |
txt += ' echo "JOB_EXIT_STATUS = 10017"\n' |
1228 |
> |
txt += ' echo "JobExitCode=10017" | tee -a $RUNTIME_AREA/$repo\n' |
1229 |
> |
txt += ' dumpStatus $RUNTIME_AREA/$repo\n' |
1230 |
> |
txt += ' fi\n' |
1231 |
|
txt += '\n' |
1232 |
< |
txt += ' exit 1\n' |
1233 |
< |
txt += ' fi\n' |
1232 |
> |
txt += ' exit 1\n' |
1233 |
> |
txt += ' fi\n' |
1234 |
|
txt += '\n' |
1235 |
< |
txt += ' echo "SET_CMS_ENV 0 ==> setup cms environment ok"\n' |
1236 |
< |
txt += ' echo " END SETUP CMS OSG ENVIRONMENT "\n' |
1235 |
> |
txt += ' echo "SET_CMS_ENV 0 ==> setup cms environment ok"\n' |
1236 |
> |
txt += ' echo "SCRAM_ARCH = $SCRAM_ARCH"\n' |
1237 |
|
|
1238 |
|
return txt |
1239 |
|
|
1243 |
|
Returns part of a job script which is prepares |
1244 |
|
the execution environment and which is common for all CMS jobs. |
1245 |
|
""" |
1246 |
< |
txt = ' \n' |
1247 |
< |
txt += ' echo " ### SETUP CMS LCG ENVIRONMENT ### "\n' |
1248 |
< |
txt += ' if [ ! $VO_CMS_SW_DIR ] ;then\n' |
1249 |
< |
txt += ' echo "SET_CMS_ENV 10031 ==> ERROR CMS software dir not found on WN `hostname`"\n' |
1250 |
< |
txt += ' echo "JOB_EXIT_STATUS = 10031" \n' |
1251 |
< |
txt += ' echo "JobExitCode=10031" | tee -a $RUNTIME_AREA/$repo\n' |
1252 |
< |
txt += ' dumpStatus $RUNTIME_AREA/$repo\n' |
1253 |
< |
txt += ' rm -f $RUNTIME_AREA/$repo \n' |
1254 |
< |
txt += ' echo "MonitorJobID=`echo $MonitorJobID`" | tee -a $RUNTIME_AREA/$repo \n' |
1255 |
< |
txt += ' echo "MonitorID=`echo $MonitorID`" | tee -a $RUNTIME_AREA/$repo\n' |
1256 |
< |
txt += ' exit 1\n' |
1257 |
< |
txt += ' else\n' |
1258 |
< |
txt += ' echo "Sourcing environment... "\n' |
1259 |
< |
txt += ' if [ ! -s $VO_CMS_SW_DIR/cmsset_default.sh ] ;then\n' |
1260 |
< |
txt += ' echo "SET_CMS_ENV 10020 ==> ERROR cmsset_default.sh file not found into dir $VO_CMS_SW_DIR"\n' |
1261 |
< |
txt += ' echo "JOB_EXIT_STATUS = 10020"\n' |
1262 |
< |
txt += ' echo "JobExitCode=10020" | tee -a $RUNTIME_AREA/$repo\n' |
1263 |
< |
txt += ' dumpStatus $RUNTIME_AREA/$repo\n' |
1264 |
< |
txt += ' rm -f $RUNTIME_AREA/$repo \n' |
1265 |
< |
txt += ' echo "MonitorJobID=`echo $MonitorJobID`" | tee -a $RUNTIME_AREA/$repo \n' |
1266 |
< |
txt += ' echo "MonitorID=`echo $MonitorID`" | tee -a $RUNTIME_AREA/$repo\n' |
1267 |
< |
txt += ' exit 1\n' |
1268 |
< |
txt += ' fi\n' |
1269 |
< |
txt += ' echo "sourcing $VO_CMS_SW_DIR/cmsset_default.sh"\n' |
1270 |
< |
txt += ' source $VO_CMS_SW_DIR/cmsset_default.sh\n' |
1271 |
< |
txt += ' result=$?\n' |
1272 |
< |
txt += ' if [ $result -ne 0 ]; then\n' |
1273 |
< |
txt += ' echo "SET_CMS_ENV 10032 ==> ERROR problem sourcing $VO_CMS_SW_DIR/cmsset_default.sh"\n' |
1274 |
< |
txt += ' echo "JOB_EXIT_STATUS = 10032"\n' |
1275 |
< |
txt += ' echo "JobExitCode=10032" | tee -a $RUNTIME_AREA/$repo\n' |
1276 |
< |
txt += ' dumpStatus $RUNTIME_AREA/$repo\n' |
1277 |
< |
txt += ' rm -f $RUNTIME_AREA/$repo \n' |
1376 |
< |
txt += ' echo "MonitorJobID=`echo $MonitorJobID`" | tee -a $RUNTIME_AREA/$repo \n' |
1377 |
< |
txt += ' echo "MonitorID=`echo $MonitorID`" | tee -a $RUNTIME_AREA/$repo\n' |
1378 |
< |
txt += ' exit 1\n' |
1379 |
< |
txt += ' fi\n' |
1380 |
< |
txt += ' fi\n' |
1381 |
< |
txt += ' \n' |
1382 |
< |
txt += ' echo "SET_CMS_ENV 0 ==> setup cms environment ok"\n' |
1383 |
< |
txt += ' echo "### END SETUP CMS LCG ENVIRONMENT ###"\n' |
1246 |
> |
txt = ' echo ">>> setup CMS LCG environment:"\n' |
1247 |
> |
txt += ' echo "set SCRAM ARCH and BUILD_ARCH to ' + self.executable_arch + ' ###"\n' |
1248 |
> |
txt += ' export SCRAM_ARCH='+self.executable_arch+'\n' |
1249 |
> |
txt += ' export BUILD_ARCH='+self.executable_arch+'\n' |
1250 |
> |
txt += ' if [ ! $VO_CMS_SW_DIR ] ;then\n' |
1251 |
> |
txt += ' echo "SET_CMS_ENV 10031 ==> ERROR CMS software dir not found on WN `hostname`"\n' |
1252 |
> |
txt += ' echo "JOB_EXIT_STATUS = 10031" \n' |
1253 |
> |
txt += ' echo "JobExitCode=10031" | tee -a $RUNTIME_AREA/$repo\n' |
1254 |
> |
txt += ' dumpStatus $RUNTIME_AREA/$repo\n' |
1255 |
> |
txt += ' exit 1\n' |
1256 |
> |
txt += ' else\n' |
1257 |
> |
txt += ' echo "Sourcing environment... "\n' |
1258 |
> |
txt += ' if [ ! -s $VO_CMS_SW_DIR/cmsset_default.sh ] ;then\n' |
1259 |
> |
txt += ' echo "SET_CMS_ENV 10020 ==> ERROR cmsset_default.sh file not found into dir $VO_CMS_SW_DIR"\n' |
1260 |
> |
txt += ' echo "JOB_EXIT_STATUS = 10020"\n' |
1261 |
> |
txt += ' echo "JobExitCode=10020" | tee -a $RUNTIME_AREA/$repo\n' |
1262 |
> |
txt += ' dumpStatus $RUNTIME_AREA/$repo\n' |
1263 |
> |
txt += ' exit 1\n' |
1264 |
> |
txt += ' fi\n' |
1265 |
> |
txt += ' echo "sourcing $VO_CMS_SW_DIR/cmsset_default.sh"\n' |
1266 |
> |
txt += ' source $VO_CMS_SW_DIR/cmsset_default.sh\n' |
1267 |
> |
txt += ' result=$?\n' |
1268 |
> |
txt += ' if [ $result -ne 0 ]; then\n' |
1269 |
> |
txt += ' echo "SET_CMS_ENV 10032 ==> ERROR problem sourcing $VO_CMS_SW_DIR/cmsset_default.sh"\n' |
1270 |
> |
txt += ' echo "JOB_EXIT_STATUS = 10032"\n' |
1271 |
> |
txt += ' echo "JobExitCode=10032" | tee -a $RUNTIME_AREA/$repo\n' |
1272 |
> |
txt += ' dumpStatus $RUNTIME_AREA/$repo\n' |
1273 |
> |
txt += ' exit 1\n' |
1274 |
> |
txt += ' fi\n' |
1275 |
> |
txt += ' fi\n' |
1276 |
> |
txt += ' \n' |
1277 |
> |
txt += ' echo "SET_CMS_ENV 0 ==> setup cms environment ok"\n' |
1278 |
|
return txt |
1279 |
|
|
1280 |
|
### FEDE FOR DBS OUTPUT PUBLICATION |
1289 |
|
except KeyError: |
1290 |
|
publish_data = 0 |
1291 |
|
if (publish_data == 1): |
1292 |
< |
txt += 'echo "Modify Job Report" \n' |
1399 |
< |
#txt += 'chmod a+x $RUNTIME_AREA/'+self.version+'/ProdAgentApi/FwkJobRep/ModifyJobReport.py\n' |
1292 |
> |
txt += 'echo ">>> Modify Job Report:" \n' |
1293 |
|
################ FEDE FOR DBS2 ############################################# |
1294 |
< |
txt += 'chmod a+x $SOFTWARE_DIR/ProdAgentApi/FwkJobRep/ModifyJobReport.py\n' |
1294 |
> |
#txt += 'chmod a+x $SOFTWARE_DIR/ProdAgentApi/FwkJobRep/ModifyJobReport.py\n' |
1295 |
> |
txt += 'chmod a+x $SOFTWARE_DIR/ProdCommon/ProdCommon/FwkJobRep/ModifyJobReport.py\n' |
1296 |
|
############################################################################# |
1403 |
– |
#try: |
1404 |
– |
# publish_data = int(self.cfg_params['USER.publish_data']) |
1405 |
– |
#except KeyError: |
1406 |
– |
# publish_data = 0 |
1297 |
|
|
1298 |
|
txt += 'if [ -z "$SE" ]; then\n' |
1299 |
|
txt += ' SE="" \n' |
1304 |
|
txt += 'echo "SE = $SE"\n' |
1305 |
|
txt += 'echo "SE_PATH = $SE_PATH"\n' |
1306 |
|
|
1417 |
– |
#if (publish_data == 1): |
1418 |
– |
#processedDataset = self.cfg_params['USER.processed_datasetname'] |
1307 |
|
processedDataset = self.cfg_params['USER.publish_data_name'] |
1308 |
|
txt += 'ProcessedDataset='+processedDataset+'\n' |
1309 |
|
#### LFN=/store/user/<user>/processedDataset_PSETHASH |
1318 |
|
txt += 'echo "ProcessedDataset = $ProcessedDataset"\n' |
1319 |
|
txt += 'echo "FOR_LFN = $FOR_LFN" \n' |
1320 |
|
txt += 'echo "CMSSW_VERSION = $CMSSW_VERSION"\n\n' |
1321 |
< |
#txt += 'echo "$RUNTIME_AREA/'+self.version+'/ProdAgentApi/FwkJobRep/ModifyJobReport.py crab_fjr_$NJob.xml $NJob $FOR_LFN $PrimaryDataset $DataTier $ProcessedDataset $ApplicationFamily $executable $CMSSW_VERSION $PSETHASH $SE $SE_PATH"\n' |
1322 |
< |
txt += 'echo "$SOFTWARE_DIR/ProdAgentApi/FwkJobRep/ModifyJobReport.py crab_fjr_$NJob.xml $NJob $FOR_LFN $PrimaryDataset $DataTier $ProcessedDataset $ApplicationFamily $executable $CMSSW_VERSION $PSETHASH $SE $SE_PATH"\n' |
1435 |
< |
txt += '$SOFTWARE_DIR/ProdAgentApi/FwkJobRep/ModifyJobReport.py crab_fjr_$NJob.xml $NJob $FOR_LFN $PrimaryDataset $DataTier $ProcessedDataset $ApplicationFamily $executable $CMSSW_VERSION $PSETHASH $SE $SE_PATH\n' |
1436 |
< |
#txt += '$RUNTIME_AREA/'+self.version+'/ProdAgentApi/FwkJobRep/ModifyJobReport.py crab_fjr_$NJob.xml $NJob $FOR_LFN $PrimaryDataset $DataTier $ProcessedDataset $ApplicationFamily $executable $CMSSW_VERSION $PSETHASH $SE $SE_PATH\n' |
1321 |
> |
txt += 'echo "$SOFTWARE_DIR/ProdCommon/ProdCommon/FwkJobRep/ModifyJobReport.py crab_fjr_$NJob.xml $NJob $FOR_LFN $PrimaryDataset $DataTier $ProcessedDataset $ApplicationFamily $executable $CMSSW_VERSION $PSETHASH $SE $SE_PATH"\n' |
1322 |
> |
txt += '$SOFTWARE_DIR/ProdCommon/ProdCommon/FwkJobRep/ModifyJobReport.py crab_fjr_$NJob.xml $NJob $FOR_LFN $PrimaryDataset $DataTier $ProcessedDataset $ApplicationFamily $executable $CMSSW_VERSION $PSETHASH $SE $SE_PATH\n' |
1323 |
|
|
1324 |
|
txt += 'modifyReport_result=$?\n' |
1325 |
|
txt += 'echo modifyReport_result = $modifyReport_result\n' |
1331 |
|
txt += 'fi\n' |
1332 |
|
else: |
1333 |
|
txt += 'echo "no data publication required"\n' |
1448 |
– |
#txt += 'ProcessedDataset=no_data_to_publish \n' |
1449 |
– |
#### FEDE: added slash in LFN ############## |
1450 |
– |
#txt += 'FOR_LFN=/local/ \n' |
1451 |
– |
#txt += 'echo "ProcessedDataset = $ProcessedDataset"\n' |
1452 |
– |
#txt += 'echo "FOR_LFN = $FOR_LFN" \n' |
1334 |
|
return txt |
1335 |
|
|
1336 |
|
def cleanEnv(self): |
1456 |
– |
### OLI_DANIELE |
1337 |
|
txt = '' |
1338 |
|
txt += 'if [ $middleware == OSG ]; then\n' |
1339 |
|
txt += ' cd $RUNTIME_AREA\n' |
1340 |
< |
txt += ' echo "Remove working directory: $WORKING_DIR"\n' |
1340 |
> |
txt += ' echo ">>> current directory (RUNTIME_AREA): $RUNTIME_AREA"\n' |
1341 |
> |
txt += ' echo ">>> Remove working directory: $WORKING_DIR"\n' |
1342 |
|
txt += ' /bin/rm -rf $WORKING_DIR\n' |
1343 |
|
txt += ' if [ -d $WORKING_DIR ] ;then\n' |
1344 |
< |
txt += ' echo "SET_EXE 60999 ==> OSG $WORKING_DIR could not be deleted on WN `hostname` after cleanup of WN"\n' |
1345 |
< |
txt += ' echo "JOB_EXIT_STATUS = 60999"\n' |
1346 |
< |
txt += ' echo "JobExitCode=60999" | tee -a $RUNTIME_AREA/$repo\n' |
1347 |
< |
txt += ' dumpStatus $RUNTIME_AREA/$repo\n' |
1467 |
< |
txt += ' rm -f $RUNTIME_AREA/$repo \n' |
1468 |
< |
txt += ' echo "MonitorJobID=`echo $MonitorJobID`" | tee -a $RUNTIME_AREA/$repo \n' |
1469 |
< |
txt += ' echo "MonitorID=`echo $MonitorID`" | tee -a $RUNTIME_AREA/$repo\n' |
1344 |
> |
txt += ' echo "SET_EXE 60999 ==> OSG $WORKING_DIR could not be deleted on WN `hostname` after cleanup of WN"\n' |
1345 |
> |
txt += ' echo "JOB_EXIT_STATUS = 60999"\n' |
1346 |
> |
txt += ' echo "JobExitCode=60999" | tee -a $RUNTIME_AREA/$repo\n' |
1347 |
> |
txt += ' dumpStatus $RUNTIME_AREA/$repo\n' |
1348 |
|
txt += ' fi\n' |
1349 |
|
txt += 'fi\n' |
1350 |
|
txt += '\n' |
1356 |
|
def getParams(self): |
1357 |
|
return self._params |
1358 |
|
|
1481 |
– |
def setTaskid_(self): |
1482 |
– |
self._taskId = self.cfg_params['taskId'] |
1483 |
– |
|
1484 |
– |
def getTaskid(self): |
1485 |
– |
return self._taskId |
1486 |
– |
|
1359 |
|
def uniquelist(self, old): |
1360 |
|
""" |
1361 |
|
remove duplicates from a list |
1370 |
|
""" |
1371 |
|
check the dimension of the output files |
1372 |
|
""" |
1373 |
< |
txt = 'echo "*****************************************"\n' |
1502 |
< |
txt += 'echo "** Starting output sandbox limit check **"\n' |
1503 |
< |
txt += 'echo "*****************************************"\n' |
1373 |
> |
txt = 'echo ">>> Starting output sandbox limit check :"\n' |
1374 |
|
allOutFiles = "" |
1375 |
|
listOutFiles = [] |
1376 |
< |
for fileOut in (self.output_file+self.output_file_sandbox): |
1377 |
< |
if fileOut.find('crab_fjr') == -1: |
1378 |
< |
allOutFiles = allOutFiles + " " + self.numberFile_(fileOut, '$NJob') |
1379 |
< |
listOutFiles.append(self.numberFile_(fileOut, '$NJob')) |
1376 |
> |
txt += 'stdoutFile=`ls *stdout` \n' |
1377 |
> |
txt += 'stderrFile=`ls *stderr` \n' |
1378 |
> |
if (self.return_data == 1): |
1379 |
> |
for fileOut in (self.output_file+self.output_file_sandbox): |
1380 |
> |
allOutFiles = allOutFiles + " " + self.numberFile_(fileOut, '$NJob') + " $stdoutFile $stderrFile" |
1381 |
> |
else: |
1382 |
> |
for fileOut in (self.output_file_sandbox): |
1383 |
> |
txt += 'echo " '+fileOut+'";\n' |
1384 |
> |
allOutFiles = allOutFiles + " " + self.numberFile_(fileOut, '$NJob') + " $stdoutFile $stderrFile" |
1385 |
|
txt += 'echo "OUTPUT files: '+str(allOutFiles)+'";\n' |
1386 |
|
txt += 'ls -gGhrta;\n' |
1387 |
|
txt += 'sum=0;\n' |
1399 |
|
txt += 'if [ $limit -lt $sum ]; then\n' |
1400 |
|
txt += ' echo "WARNING: output files have to big size - something will be lost;"\n' |
1401 |
|
txt += ' echo " checking the output file sizes..."\n' |
1527 |
– |
""" |
1528 |
– |
txt += ' dim=0;\n' |
1529 |
– |
txt += ' exclude=0;\n' |
1530 |
– |
txt += ' for files in '+str(allOutFiles)+' ; do\n' |
1531 |
– |
txt += ' sumTemp=0;\n' |
1532 |
– |
txt += ' for file2 in '+str(allOutFiles)+' ; do\n' |
1533 |
– |
txt += ' if [ $file != $file2 ]; then\n' |
1534 |
– |
txt += ' tt=`ls -gGrta $file2 | awk \'{ print $3 }\';`\n' |
1535 |
– |
txt += ' sumTemp=`expr $sumTemp + $tt`;\n' |
1536 |
– |
txt += ' fi\n' |
1537 |
– |
txt += ' done\n' |
1538 |
– |
txt += ' if [ $sumTemp -lt $limit ]; then\n' |
1539 |
– |
txt += ' if [ $dim -lt $sumTemp ]; then\n' |
1540 |
– |
txt += ' dim=$sumTemp;\n' |
1541 |
– |
txt += ' exclude=$file;\n' |
1542 |
– |
txt += ' fi\n' |
1543 |
– |
txt += ' fi\n' |
1544 |
– |
txt += ' done\n' |
1545 |
– |
txt += ' echo "Dimension calculated: $dim"; echo "File to exclude: $exclude";\n' |
1546 |
– |
""" |
1402 |
|
txt += ' tot=0;\n' |
1403 |
< |
txt += ' for file2 in '+str(allOutFiles)+' ; do\n' |
1404 |
< |
txt += ' tt=`ls -gGrta $file2 | awk \'{ print $3 }\';`\n' |
1403 |
> |
txt += ' for filefile in '+str(allOutFiles)+' ; do\n' |
1404 |
> |
txt += ' dimFile=`ls -gGrta $filefile | awk \'{ print $3 }\';`\n' |
1405 |
|
txt += ' tot=`expr $tot + $tt`;\n' |
1406 |
< |
txt += ' if [ $limit -lt $tot ]; then\n' |
1407 |
< |
txt += ' tot=`expr $tot - $tt`;\n' |
1408 |
< |
txt += ' fileLast=$file;\n' |
1409 |
< |
txt += ' break;\n' |
1410 |
< |
txt += ' fi\n' |
1411 |
< |
txt += ' done\n' |
1412 |
< |
txt += ' echo "Dimension calculated: $tot"; echo "First file to exclude: $file";\n' |
1413 |
< |
txt += ' flag=0;\n' |
1559 |
< |
txt += ' for filess in '+str(allOutFiles)+' ; do\n' |
1560 |
< |
txt += ' if [ $fileLast = $filess ]; then\n' |
1561 |
< |
txt += ' flag=1;\n' |
1562 |
< |
txt += ' fi\n' |
1563 |
< |
txt += ' if [ $flag -eq 1 ]; then\n' |
1564 |
< |
txt += ' rm -f $filess;\n' |
1406 |
> |
txt += ' if [ $limit -lt $dimFile ]; then\n' |
1407 |
> |
txt += ' echo "deleting file: $filefile";\n' |
1408 |
> |
txt += ' rm -f $filefile\n' |
1409 |
> |
txt += ' elif [ $limit -lt $tot ]; then\n' |
1410 |
> |
txt += ' echo "deleting file: $filefile";\n' |
1411 |
> |
txt += ' rm -f $filefile\n' |
1412 |
> |
txt += ' else\n' |
1413 |
> |
txt += ' echo "saving file: $filefile"\n' |
1414 |
|
txt += ' fi\n' |
1415 |
|
txt += ' done\n' |
1416 |
+ |
|
1417 |
|
txt += ' ls -agGhrt;\n' |
1418 |
|
txt += ' echo "WARNING: output files are too big in dimension: can not put in the output_sandbox.";\n' |
1419 |
|
txt += ' echo "JOB_EXIT_STATUS = 70000";\n' |
1421 |
|
txt += 'else' |
1422 |
|
txt += ' echo "Total Output dimension $sum is fine.";\n' |
1423 |
|
txt += 'fi\n' |
1424 |
< |
txt += 'echo "*****************************************"\n' |
1575 |
< |
txt += 'echo "*** Ending output sandbox limit check ***"\n' |
1576 |
< |
txt += 'echo "*****************************************"\n' |
1424 |
> |
txt += 'echo "Ending output sandbox limit check"\n' |
1425 |
|
return txt |