11 |
|
import DataLocation |
12 |
|
import Scram |
13 |
|
|
14 |
< |
import os, string, re |
14 |
> |
import glob, os, string, re |
15 |
|
|
16 |
|
class Cmssw(JobType): |
17 |
|
def __init__(self, cfg_params, ncjobs): |
18 |
|
JobType.__init__(self, 'CMSSW') |
19 |
|
common.logger.debug(3,'CMSSW::__init__') |
20 |
|
|
21 |
– |
self.analisys_common_info = {} |
21 |
|
# Marco. |
22 |
|
self._params = {} |
23 |
|
self.cfg_params = cfg_params |
24 |
|
|
25 |
< |
# number of jobs requested to be created, limit ojb splitting |
25 |
> |
# number of jobs requested to be created, limit obj splitting |
26 |
|
self.ncjobs = ncjobs |
27 |
|
|
28 |
|
log = common.logger |
33 |
|
self.scriptExe = '' |
34 |
|
self.executable = '' |
35 |
|
self.tgz_name = 'default.tgz' |
36 |
+ |
self.pset = '' #scrip use case Da |
37 |
+ |
self.datasetPath = '' #scrip use case Da |
38 |
|
|
39 |
+ |
# set FJR file name |
40 |
+ |
self.fjrFileName = 'crab_fjr.xml' |
41 |
|
|
42 |
|
self.version = self.scram.getSWVersion() |
43 |
+ |
common.taskDB.setDict('codeVersion',self.version) |
44 |
|
self.setParam_('application', self.version) |
41 |
– |
common.analisys_common_info['sw_version'] = self.version |
42 |
– |
### FEDE |
43 |
– |
common.analisys_common_info['copy_input_data'] = 0 |
44 |
– |
common.analisys_common_info['events_management'] = 1 |
45 |
|
|
46 |
|
### collect Data cards |
47 |
|
try: |
89 |
|
try: |
90 |
|
self.pset = cfg_params['CMSSW.pset'] |
91 |
|
log.debug(6, "Cmssw::Cmssw(): PSet file = "+self.pset) |
92 |
< |
if (not os.path.exists(self.pset)): |
93 |
< |
raise CrabException("User defined PSet file "+self.pset+" does not exist") |
92 |
> |
if self.pset.lower() != 'none' : |
93 |
> |
if (not os.path.exists(self.pset)): |
94 |
> |
raise CrabException("User defined PSet file "+self.pset+" does not exist") |
95 |
> |
else: |
96 |
> |
self.pset = None |
97 |
|
except KeyError: |
98 |
|
raise CrabException("PSet file missing. Cannot run cmsRun ") |
99 |
|
|
100 |
|
# output files |
101 |
+ |
## stuff which must be returned always via sandbox |
102 |
+ |
self.output_file_sandbox = [] |
103 |
+ |
|
104 |
+ |
# add fjr report by default via sandbox |
105 |
+ |
self.output_file_sandbox.append(self.fjrFileName) |
106 |
+ |
|
107 |
+ |
# other output files to be returned via sandbox or copied to SE |
108 |
|
try: |
109 |
|
self.output_file = [] |
110 |
|
|
111 |
+ |
|
112 |
|
tmp = cfg_params['CMSSW.output_file'] |
113 |
|
if tmp != '': |
114 |
|
tmpOutFiles = string.split(cfg_params['CMSSW.output_file'],',') |
118 |
|
self.output_file.append(tmp) |
119 |
|
pass |
120 |
|
else: |
121 |
< |
log.message("No output file defined: only stdout/err will be available") |
121 |
> |
log.message("No output file defined: only stdout/err and the CRAB Framework Job Report will be available") |
122 |
|
pass |
123 |
|
pass |
124 |
|
except KeyError: |
125 |
< |
log.message("No output file defined: only stdout/err will be available") |
125 |
> |
log.message("No output file defined: only stdout/err and the CRAB Framework Job Report will be available") |
126 |
|
pass |
127 |
|
|
128 |
|
# script_exe file as additional file in inputSandbox |
129 |
|
try: |
130 |
|
self.scriptExe = cfg_params['USER.script_exe'] |
120 |
– |
self.additional_inbox_files.append(self.scriptExe) |
131 |
|
if self.scriptExe != '': |
132 |
|
if not os.path.isfile(self.scriptExe): |
133 |
|
msg ="WARNING. file "+self.scriptExe+" not found" |
134 |
|
raise CrabException(msg) |
135 |
+ |
self.additional_inbox_files.append(string.strip(self.scriptExe)) |
136 |
|
except KeyError: |
137 |
< |
pass |
138 |
< |
|
137 |
> |
self.scriptExe = '' |
138 |
> |
#CarlosDaniele |
139 |
> |
if self.datasetPath == None and self.pset == None and self.scriptExe == '' : |
140 |
> |
msg ="WARNING. script_exe not defined" |
141 |
> |
raise CrabException(msg) |
142 |
> |
|
143 |
|
## additional input files |
144 |
|
try: |
145 |
|
tmpAddFiles = string.split(cfg_params['USER.additional_input_files'],',') |
146 |
|
for tmp in tmpAddFiles: |
147 |
< |
if not os.path.exists(tmp): |
148 |
< |
raise CrabException("Additional input file not found: "+tmp) |
149 |
< |
self.additional_inbox_files.append(string.strip(tmp)) |
147 |
> |
tmp = string.strip(tmp) |
148 |
> |
dirname = '' |
149 |
> |
if not tmp[0]=="/": dirname = "." |
150 |
> |
files = glob.glob(os.path.join(dirname, tmp)) |
151 |
> |
for file in files: |
152 |
> |
if not os.path.exists(file): |
153 |
> |
raise CrabException("Additional input file not found: "+file) |
154 |
> |
pass |
155 |
> |
self.additional_inbox_files.append(string.strip(file)) |
156 |
|
pass |
157 |
|
pass |
158 |
+ |
common.logger.debug(5,"Additional input files: "+str(self.additional_inbox_files)) |
159 |
|
except KeyError: |
160 |
|
pass |
161 |
|
|
189 |
|
self.total_number_of_events = 0 |
190 |
|
self.selectTotalNumberEvents = 0 |
191 |
|
|
192 |
< |
if ( (self.selectTotalNumberEvents + self.selectEventsPerJob + self.selectNumberOfJobs) != 2 ): |
193 |
< |
msg = 'Must define exactly two of total_number_of_events, events_per_job, or number_of_jobs.' |
194 |
< |
raise CrabException(msg) |
192 |
> |
if self.pset != None: #CarlosDaniele |
193 |
> |
if ( (self.selectTotalNumberEvents + self.selectEventsPerJob + self.selectNumberOfJobs) != 2 ): |
194 |
> |
msg = 'Must define exactly two of total_number_of_events, events_per_job, or number_of_jobs.' |
195 |
> |
raise CrabException(msg) |
196 |
> |
else: |
197 |
> |
if (self.selectNumberOfJobs == 0): |
198 |
> |
msg = 'Must specify number_of_jobs.' |
199 |
> |
raise CrabException(msg) |
200 |
|
|
201 |
|
## source seed for pythia |
202 |
|
try: |
210 |
|
except KeyError: |
211 |
|
self.sourceSeedVtx = None |
212 |
|
common.logger.debug(5,"No vertex seed given") |
213 |
< |
|
214 |
< |
self.PsetEdit = PsetManipulator.PsetManipulator(self.pset) #Daniele Pset |
213 |
> |
if self.pset != None: #CarlosDaniele |
214 |
> |
self.PsetEdit = PsetManipulator.PsetManipulator(self.pset) #Daniele Pset |
215 |
|
|
216 |
|
#DBSDLS-start |
217 |
|
## Initialize the variables that are extracted from DBS/DLS and needed in other places of the code |
228 |
|
self.tgzNameWithPath = self.getTarBall(self.executable) |
229 |
|
|
230 |
|
## Select Splitting |
231 |
< |
if self.selectNoInput: self.jobSplittingNoInput() |
231 |
> |
if self.selectNoInput: |
232 |
> |
if self.pset == None: #CarlosDaniele |
233 |
> |
self.jobSplittingForScript() |
234 |
> |
else: |
235 |
> |
self.jobSplittingNoInput() |
236 |
|
else: self.jobSplittingByBlocks(blockSites) |
237 |
|
|
238 |
|
# modify Pset |
239 |
< |
try: |
240 |
< |
if (self.datasetPath): # standard job |
241 |
< |
# allow to processa a fraction of events in a file |
242 |
< |
self.PsetEdit.inputModule("INPUT") |
243 |
< |
self.PsetEdit.maxEvent("INPUTMAXEVENTS") |
244 |
< |
self.PsetEdit.skipEvent("INPUTSKIPEVENTS") |
245 |
< |
|
246 |
< |
else: # pythia like job |
247 |
< |
self.PsetEdit.maxEvent(self.eventsPerJob) |
248 |
< |
if (self.sourceSeed) : |
249 |
< |
self.PsetEdit.pythiaSeed("INPUT") |
250 |
< |
if (self.sourceSeedVtx) : |
251 |
< |
self.PsetEdit.pythiaSeedVtx("INPUTVTX") |
252 |
< |
self.PsetEdit.psetWriter(self.configFilename()) |
253 |
< |
except: |
254 |
< |
msg='Error while manipuliating ParameterSet: exiting...' |
255 |
< |
raise CrabException(msg) |
239 |
> |
if self.pset != None: #CarlosDaniele |
240 |
> |
try: |
241 |
> |
if (self.datasetPath): # standard job |
242 |
> |
# allow to processa a fraction of events in a file |
243 |
> |
self.PsetEdit.inputModule("INPUT") |
244 |
> |
self.PsetEdit.maxEvent("INPUTMAXEVENTS") |
245 |
> |
self.PsetEdit.skipEvent("INPUTSKIPEVENTS") |
246 |
> |
else: # pythia like job |
247 |
> |
self.PsetEdit.maxEvent(self.eventsPerJob) |
248 |
> |
if (self.sourceSeed) : |
249 |
> |
self.PsetEdit.pythiaSeed("INPUT") |
250 |
> |
if (self.sourceSeedVtx) : |
251 |
> |
self.PsetEdit.pythiaSeedVtx("INPUTVTX") |
252 |
> |
# add FrameworkJobReport to parameter-set |
253 |
> |
self.PsetEdit.addCrabFJR(self.fjrFileName) |
254 |
> |
self.PsetEdit.psetWriter(self.configFilename()) |
255 |
> |
except: |
256 |
> |
msg='Error while manipuliating ParameterSet: exiting...' |
257 |
> |
raise CrabException(msg) |
258 |
|
|
259 |
|
def DataDiscoveryAndLocation(self, cfg_params): |
260 |
|
|
296 |
|
|
297 |
|
## get max number of events |
298 |
|
self.maxEvents=self.pubdata.getMaxEvents() ## self.maxEvents used in Creator.py |
299 |
< |
common.logger.message("\nThe number of available events is %s"%self.maxEvents) |
299 |
> |
common.logger.message("The number of available events is %s\n"%self.maxEvents) |
300 |
|
|
301 |
|
common.logger.message("Contacting DLS...") |
302 |
|
## Contact the DLS and build a list of sites hosting the fileblocks |
383 |
|
# ---- we've met the requested total # of events ---- # |
384 |
|
while ( (eventsRemaining > 0) and (blockCount < numBlocksInDataset) and (jobCount < totalNumberOfJobs)): |
385 |
|
block = blocks[blockCount] |
386 |
+ |
blockCount += 1 |
387 |
+ |
|
388 |
|
|
389 |
< |
|
390 |
< |
evInBlock = self.eventsbyblock[block] |
356 |
< |
common.logger.debug(5,'Events in Block File '+str(evInBlock)) |
357 |
< |
|
358 |
< |
#Correct - switch to this when DBS up |
359 |
< |
#numEventsInBlock = self.eventsbyblock[block] |
360 |
< |
numEventsInBlock = evInBlock |
389 |
> |
numEventsInBlock = self.eventsbyblock[block] |
390 |
> |
common.logger.debug(5,'Events in Block File '+str(numEventsInBlock)) |
391 |
|
|
392 |
|
files = self.filesbyblock[block] |
393 |
|
numFilesInBlock = len(files) |
418 |
|
parString += '\\\"' + file + '\\\"\,' |
419 |
|
newFile = 0 |
420 |
|
except KeyError: |
421 |
< |
common.logger.message("File "+str(file)+" has unknown numbe of events: skipping") |
421 |
> |
common.logger.message("File "+str(file)+" has unknown number of events: skipping") |
422 |
|
|
423 |
|
|
424 |
|
# if less events in file remain than eventsPerJobRequested |
425 |
|
if ( filesEventCount - jobSkipEventCount < eventsPerJobRequested ) : |
426 |
|
# if last file in block |
427 |
< |
if ( fileCount == numFilesInBlock ) : |
427 |
> |
if ( fileCount == numFilesInBlock-1 ) : |
428 |
|
# end job using last file, use remaining events in block |
429 |
|
# close job and touch new file |
430 |
|
fullString = parString[:-2] |
435 |
|
common.logger.debug(5,"Job "+str(jobCount+1)+" Destination: "+str(self.jobDestination[jobCount])) |
436 |
|
# reset counter |
437 |
|
jobCount = jobCount + 1 |
438 |
< |
totalEventCount = totalEventCount + eventsPerJobRequested |
439 |
< |
eventsRemaining = eventsRemaining - eventsPerJobRequested |
438 |
> |
totalEventCount = totalEventCount + filesEventCount - jobSkipEventCount |
439 |
> |
eventsRemaining = eventsRemaining - filesEventCount + jobSkipEventCount |
440 |
|
jobSkipEventCount = 0 |
441 |
|
# reset file |
442 |
|
parString = "\\{" |
532 |
|
self.list_of_args = [] |
533 |
|
for i in range(self.total_number_of_jobs): |
534 |
|
## Since there is no input, any site is good |
535 |
< |
self.jobDestination.append(["Any"]) |
535 |
> |
# self.jobDestination.append(["Any"]) |
536 |
> |
self.jobDestination.append([""]) #must be empty to write correctly the xml |
537 |
|
if (self.sourceSeed): |
538 |
|
if (self.sourceSeedVtx): |
539 |
|
## pythia + vtx random seed |
551 |
|
|
552 |
|
return |
553 |
|
|
554 |
+ |
|
555 |
+ |
def jobSplittingForScript(self):#CarlosDaniele |
556 |
+ |
""" |
557 |
+ |
Perform job splitting based on number of job |
558 |
+ |
""" |
559 |
+ |
common.logger.debug(5,'Splitting per job') |
560 |
+ |
common.logger.message('Required '+str(self.theNumberOfJobs)+' jobs in total ') |
561 |
+ |
|
562 |
+ |
self.total_number_of_jobs = self.theNumberOfJobs |
563 |
+ |
|
564 |
+ |
common.logger.debug(5,'N jobs '+str(self.total_number_of_jobs)) |
565 |
+ |
|
566 |
+ |
common.logger.message(str(self.total_number_of_jobs)+' jobs can be created') |
567 |
+ |
|
568 |
+ |
# argument is seed number.$i |
569 |
+ |
self.list_of_args = [] |
570 |
+ |
for i in range(self.total_number_of_jobs): |
571 |
+ |
## Since there is no input, any site is good |
572 |
+ |
# self.jobDestination.append(["Any"]) |
573 |
+ |
self.jobDestination.append([""]) |
574 |
+ |
## no random seed |
575 |
+ |
self.list_of_args.append([str(i)]) |
576 |
+ |
return |
577 |
+ |
|
578 |
|
def split(self, jobParams): |
579 |
|
|
580 |
|
common.jobDB.load() |
671 |
|
dataDir = 'src/Data/' |
672 |
|
if os.path.isdir(swArea+'/'+dataDir): |
673 |
|
filesToBeTarred.append(dataDir) |
674 |
< |
|
674 |
> |
|
675 |
> |
## copy ProdAgent dir to swArea |
676 |
> |
cmd = '\cp -rf ' + os.environ['CRABDIR'] + '/ProdAgentApi ' + swArea |
677 |
> |
cmd_out = runCommand(cmd) |
678 |
> |
if cmd_out != '': |
679 |
> |
common.logger.message('ProdAgentApi directory could not be copied to local CMSSW project directory.') |
680 |
> |
common.logger.message('No FrameworkJobreport parsing is possible on the WorkerNode.') |
681 |
> |
|
682 |
> |
## Now check if the Data dir is present |
683 |
> |
paDir = 'ProdAgentApi' |
684 |
> |
if os.path.isdir(swArea+'/'+paDir): |
685 |
> |
filesToBeTarred.append(paDir) |
686 |
> |
|
687 |
|
## Create the tar-ball |
688 |
|
if len(filesToBeTarred)>0: |
689 |
|
cwd = os.getcwd() |
713 |
|
txt += 'if [ $middleware == LCG ]; then \n' |
714 |
|
txt += self.wsSetupCMSLCGEnvironment_() |
715 |
|
txt += 'elif [ $middleware == OSG ]; then\n' |
716 |
< |
txt += ' time=`date -u +"%s"`\n' |
717 |
< |
txt += ' WORKING_DIR=$OSG_WN_TMP/cms_$time\n' |
651 |
< |
txt += ' echo "Creating working directory: $WORKING_DIR"\n' |
652 |
< |
txt += ' /bin/mkdir -p $WORKING_DIR\n' |
716 |
> |
txt += ' WORKING_DIR=`/bin/mktemp -d $OSG_WN_TMP/cms_XXXXXXXXXXXX`\n' |
717 |
> |
txt += ' echo "Created working directory: $WORKING_DIR"\n' |
718 |
|
txt += ' if [ ! -d $WORKING_DIR ] ;then\n' |
719 |
|
txt += ' echo "SET_CMS_ENV 10016 ==> OSG $WORKING_DIR could not be created on WN `hostname`"\n' |
720 |
|
txt += ' echo "JOB_EXIT_STATUS = 10016"\n' |
802 |
|
|
803 |
|
# Prepare job-specific part |
804 |
|
job = common.job_list[nj] |
805 |
< |
pset = os.path.basename(job.configFilename()) |
806 |
< |
txt += '\n' |
807 |
< |
if (self.datasetPath): # standard job |
808 |
< |
#txt += 'InputFiles=$2\n' |
809 |
< |
txt += 'InputFiles=${args[1]}\n' |
810 |
< |
txt += 'MaxEvents=${args[2]}\n' |
811 |
< |
txt += 'SkipEvents=${args[3]}\n' |
812 |
< |
txt += 'echo "Inputfiles:<$InputFiles>"\n' |
813 |
< |
txt += 'sed "s#{\'INPUT\'}#$InputFiles#" $RUNTIME_AREA/'+pset+' > pset_tmp_1.cfg\n' |
814 |
< |
txt += 'echo "MaxEvents:<$MaxEvents>"\n' |
815 |
< |
txt += 'sed "s#INPUTMAXEVENTS#$MaxEvents#" $RUNTIME_AREA/ pset_tmp_1.cfg > pset_tmp_2.cfg\n' |
816 |
< |
txt += 'echo "SkipEvents:<$SkipEvents>"\n' |
817 |
< |
txt += 'sed "s#INPUTSKIPEVENTS#$SkipEvents#" $RUNTIME_AREA/ pset_tmp_2.cfg > pset.cfg\n' |
818 |
< |
else: # pythia like job |
819 |
< |
if (self.sourceSeed): |
820 |
< |
# txt += 'Seed=$2\n' |
821 |
< |
txt += 'Seed=${args[1]}\n' |
822 |
< |
txt += 'echo "Seed: <$Seed>"\n' |
823 |
< |
txt += 'sed "s#\<INPUT\>#$Seed#" $RUNTIME_AREA/'+pset+' > tmp.cfg\n' |
824 |
< |
if (self.sourceSeedVtx): |
825 |
< |
# txt += 'VtxSeed=$3\n' |
826 |
< |
txt += 'VtxSeed=${args[2]}\n' |
827 |
< |
txt += 'echo "VtxSeed: <$VtxSeed>"\n' |
828 |
< |
txt += 'sed "s#INPUTVTX#$VtxSeed#" tmp.cfg > pset.cfg\n' |
805 |
> |
if self.pset != None: #CarlosDaniele |
806 |
> |
pset = os.path.basename(job.configFilename()) |
807 |
> |
txt += '\n' |
808 |
> |
if (self.datasetPath): # standard job |
809 |
> |
#txt += 'InputFiles=$2\n' |
810 |
> |
txt += 'InputFiles=${args[1]}\n' |
811 |
> |
txt += 'MaxEvents=${args[2]}\n' |
812 |
> |
txt += 'SkipEvents=${args[3]}\n' |
813 |
> |
txt += 'echo "Inputfiles:<$InputFiles>"\n' |
814 |
> |
txt += 'sed "s#{\'INPUT\'}#$InputFiles#" $RUNTIME_AREA/'+pset+' > pset_tmp_1.cfg\n' |
815 |
> |
txt += 'echo "MaxEvents:<$MaxEvents>"\n' |
816 |
> |
txt += 'sed "s#INPUTMAXEVENTS#$MaxEvents#" pset_tmp_1.cfg > pset_tmp_2.cfg\n' |
817 |
> |
txt += 'echo "SkipEvents:<$SkipEvents>"\n' |
818 |
> |
txt += 'sed "s#INPUTSKIPEVENTS#$SkipEvents#" pset_tmp_2.cfg > pset.cfg\n' |
819 |
> |
else: # pythia like job |
820 |
> |
if (self.sourceSeed): |
821 |
> |
# txt += 'Seed=$2\n' |
822 |
> |
txt += 'Seed=${args[1]}\n' |
823 |
> |
txt += 'echo "Seed: <$Seed>"\n' |
824 |
> |
txt += 'sed "s#\<INPUT\>#$Seed#" $RUNTIME_AREA/'+pset+' > tmp.cfg\n' |
825 |
> |
if (self.sourceSeedVtx): |
826 |
> |
# txt += 'VtxSeed=$3\n' |
827 |
> |
txt += 'VtxSeed=${args[2]}\n' |
828 |
> |
txt += 'echo "VtxSeed: <$VtxSeed>"\n' |
829 |
> |
txt += 'sed "s#INPUTVTX#$VtxSeed#" tmp.cfg > pset.cfg\n' |
830 |
> |
else: |
831 |
> |
txt += 'mv tmp.cfg pset.cfg\n' |
832 |
|
else: |
833 |
< |
txt += 'mv tmp.cfg pset.cfg\n' |
834 |
< |
else: |
767 |
< |
txt += '# Copy untouched pset\n' |
768 |
< |
txt += 'cp $RUNTIME_AREA/'+pset+' pset.cfg\n' |
833 |
> |
txt += '# Copy untouched pset\n' |
834 |
> |
txt += 'cp $RUNTIME_AREA/'+pset+' pset.cfg\n' |
835 |
|
|
836 |
|
|
837 |
|
if len(self.additional_inbox_files) > 0: |
843 |
|
txt += 'fi\n' |
844 |
|
pass |
845 |
|
|
846 |
< |
txt += 'echo "### END JOB SETUP ENVIRONMENT ###"\n\n' |
847 |
< |
|
848 |
< |
txt += '\n' |
849 |
< |
txt += 'echo "***** cat pset.cfg *********"\n' |
850 |
< |
txt += 'cat pset.cfg\n' |
851 |
< |
txt += 'echo "****** end pset.cfg ********"\n' |
852 |
< |
txt += '\n' |
853 |
< |
# txt += 'echo "***** cat pset1.cfg *********"\n' |
854 |
< |
# txt += 'cat pset1.cfg\n' |
855 |
< |
# txt += 'echo "****** end pset1.cfg ********"\n' |
846 |
> |
if self.pset != None: #CarlosDaniele |
847 |
> |
txt += 'echo "### END JOB SETUP ENVIRONMENT ###"\n\n' |
848 |
> |
|
849 |
> |
txt += '\n' |
850 |
> |
txt += 'echo "***** cat pset.cfg *********"\n' |
851 |
> |
txt += 'cat pset.cfg\n' |
852 |
> |
txt += 'echo "****** end pset.cfg ********"\n' |
853 |
> |
txt += '\n' |
854 |
> |
# txt += 'echo "***** cat pset1.cfg *********"\n' |
855 |
> |
# txt += 'cat pset1.cfg\n' |
856 |
> |
# txt += 'echo "****** end pset1.cfg ********"\n' |
857 |
|
return txt |
858 |
|
|
859 |
|
def wsBuildExe(self, nj): |
891 |
|
txt += 'else \n' |
892 |
|
txt += ' echo "Successful untar" \n' |
893 |
|
txt += 'fi \n' |
894 |
+ |
txt += '\n' |
895 |
+ |
txt += 'echo "Include ProdAgentApi in PYTHONPATH"\n' |
896 |
+ |
txt += 'if [ -z "$PYTHONPATH" ]; then\n' |
897 |
+ |
txt += ' export PYTHONPATH=ProdAgentApi\n' |
898 |
+ |
txt += 'else\n' |
899 |
+ |
txt += ' export PYTHONPATH=ProdAgentApi:${PYTHONPATH}\n' |
900 |
+ |
txt += 'fi\n' |
901 |
+ |
txt += '\n' |
902 |
+ |
|
903 |
|
pass |
904 |
|
|
905 |
|
return txt |
911 |
|
""" |
912 |
|
|
913 |
|
def executableName(self): |
914 |
< |
return self.executable |
914 |
> |
if self.pset == None: #CarlosDaniele |
915 |
> |
return "sh " |
916 |
> |
else: |
917 |
> |
return self.executable |
918 |
|
|
919 |
|
def executableArgs(self): |
920 |
< |
return " -p pset.cfg" |
920 |
> |
if self.pset == None:#CarlosDaniele |
921 |
> |
return self.scriptExe + " $NJob" |
922 |
> |
else: |
923 |
> |
return " -p pset.cfg" |
924 |
|
|
925 |
|
def inputSandbox(self, nj): |
926 |
|
""" |
927 |
|
Returns a list of filenames to be put in JDL input sandbox. |
928 |
|
""" |
929 |
|
inp_box = [] |
930 |
< |
# dict added to delete duplicate from input sandbox file list |
931 |
< |
seen = {} |
930 |
> |
# # dict added to delete duplicate from input sandbox file list |
931 |
> |
# seen = {} |
932 |
|
## code |
933 |
|
if os.path.isfile(self.tgzNameWithPath): |
934 |
|
inp_box.append(self.tgzNameWithPath) |
935 |
|
## config |
936 |
< |
inp_box.append(common.job_list[nj].configFilename()) |
936 |
> |
if not self.pset is None: #CarlosDaniele |
937 |
> |
inp_box.append(common.job_list[nj].configFilename()) |
938 |
|
## additional input files |
939 |
|
#for file in self.additional_inbox_files: |
940 |
|
# inp_box.append(common.work_space.cwdDir()+file) |
946 |
|
""" |
947 |
|
out_box = [] |
948 |
|
|
866 |
– |
stdout=common.job_list[nj].stdout() |
867 |
– |
stderr=common.job_list[nj].stderr() |
868 |
– |
|
949 |
|
## User Declared output files |
950 |
< |
for out in self.output_file: |
950 |
> |
for out in (self.output_file+self.output_file_sandbox): |
951 |
|
n_out = nj + 1 |
952 |
|
out_box.append(self.numberFile_(out,str(n_out))) |
953 |
|
return out_box |
874 |
– |
return [] |
954 |
|
|
955 |
|
def prepareSteeringCards(self): |
956 |
|
""" |
966 |
|
txt = '\n' |
967 |
|
txt += '# directory content\n' |
968 |
|
txt += 'ls \n' |
969 |
< |
file_list = '' |
970 |
< |
for fileWithSuffix in self.output_file: |
969 |
> |
|
970 |
> |
for fileWithSuffix in (self.output_file+self.output_file_sandbox): |
971 |
|
output_file_num = self.numberFile_(fileWithSuffix, '$NJob') |
893 |
– |
file_list=file_list+output_file_num+' ' |
972 |
|
txt += '\n' |
973 |
|
txt += '# check output file\n' |
974 |
|
txt += 'ls '+fileWithSuffix+'\n' |
975 |
|
txt += 'ls_result=$?\n' |
898 |
– |
#txt += 'exe_result=$?\n' |
976 |
|
txt += 'if [ $ls_result -ne 0 ] ; then\n' |
977 |
|
txt += ' echo "ERROR: Problem with output file"\n' |
901 |
– |
#txt += ' echo "JOB_EXIT_STATUS = $exe_result"\n' |
902 |
– |
#txt += ' echo "JobExitCode=60302" | tee -a $RUNTIME_AREA/$repo\n' |
903 |
– |
#txt += ' dumpStatus $RUNTIME_AREA/$repo\n' |
904 |
– |
### OLI_DANIELE |
978 |
|
if common.scheduler.boss_scheduler_name == 'condor_g': |
979 |
|
txt += ' if [ $middleware == OSG ]; then \n' |
980 |
|
txt += ' echo "prepare dummy output file"\n' |
985 |
|
txt += 'fi\n' |
986 |
|
|
987 |
|
txt += 'cd $RUNTIME_AREA\n' |
915 |
– |
file_list=file_list[:-1] |
916 |
– |
txt += 'file_list="'+file_list+'"\n' |
988 |
|
txt += 'cd $RUNTIME_AREA\n' |
989 |
|
### OLI_DANIELE |
990 |
|
txt += 'if [ $middleware == OSG ]; then\n' |
1002 |
|
txt += ' fi\n' |
1003 |
|
txt += 'fi\n' |
1004 |
|
txt += '\n' |
1005 |
+ |
|
1006 |
+ |
file_list = '' |
1007 |
+ |
## Add to filelist only files to be possibly copied to SE |
1008 |
+ |
for fileWithSuffix in self.output_file: |
1009 |
+ |
output_file_num = self.numberFile_(fileWithSuffix, '$NJob') |
1010 |
+ |
file_list=file_list+output_file_num+' ' |
1011 |
+ |
file_list=file_list[:-1] |
1012 |
+ |
txt += 'file_list="'+file_list+'"\n' |
1013 |
+ |
|
1014 |
|
return txt |
1015 |
|
|
1016 |
|
def numberFile_(self, file, txt): |
1036 |
|
return job requirements to add to jdl files |
1037 |
|
""" |
1038 |
|
req = '' |
1039 |
< |
if common.analisys_common_info['sw_version']: |
1039 |
> |
if self.version: |
1040 |
|
req='Member("VO-cms-' + \ |
1041 |
< |
common.analisys_common_info['sw_version'] + \ |
1041 |
> |
self.version + \ |
1042 |
|
'", other.GlueHostApplicationSoftwareRunTimeEnvironment)' |
1043 |
|
|
1044 |
|
req = req + ' && (other.GlueHostNetworkAdapterOutboundIP)' |