ViewVC Help
View File | Revision Log | Show Annotations | Root Listing
root/cvsroot/COMP/CRAB/python/cms_orca.py
Revision: 1.55
Committed: Tue Jun 27 02:31:31 2006 UTC (18 years, 10 months ago) by gutsche
Content type: text/x-python
Branch: MAIN
Changes since 1.54: +86 -2 lines
Log Message:
changes WN wrapper script ML reporting to report only the relevant information (repo is deleted after report to ML) at any stage of WN script

File Contents

# User Rev Content
1 nsmirnov 1.1 from JobType import JobType
2     from crab_logger import Logger
3     from crab_exceptions import *
4     from crab_util import *
5     import common
6 slacapra 1.53 import DataDiscovery
7     import DataLocation
8 slacapra 1.9 import Scram
9 slacapra 1.41 import TarBall
10 nsmirnov 1.1
11     import os, string, re
12 slacapra 1.53
13     ###Fabio
14 gutsche 1.50 import math
15 nsmirnov 1.1
16     class Orca(JobType):
17     def __init__(self, cfg_params):
18     JobType.__init__(self, 'ORCA')
19 slacapra 1.6 common.logger.debug(3,'ORCA::__init__')
20 nsmirnov 1.1
21     self.analisys_common_info = {}
22 corvo 1.36 # Marco.
23     self._params = {}
24     self.cfg_params = cfg_params
25 nsmirnov 1.1
26     log = common.logger
27    
28 slacapra 1.9 self.scram = Scram.Scram(cfg_params)
29 nsmirnov 1.1 scramArea = ''
30 corvo 1.20 self.additional_inbox_files = []
31     self.scriptExe = ''
32 slacapra 1.6
33 slacapra 1.9 self.version = self.scram.getSWVersion()
34 corvo 1.36 self.setParam_('application', self.version)
35 slacapra 1.9 common.analisys_common_info['sw_version'] = self.version
36 fanzago 1.33 common.analisys_common_info['copy_input_data'] = 0
37 gutsche 1.50
38     self.total_number_of_jobs = 0
39     self.job_number_of_events = 0
40 nsmirnov 1.1
41 slacapra 1.9 ### collect Data cards
42 nsmirnov 1.1 try:
43 fanzago 1.32 self.owner = cfg_params['ORCA.owner']
44 corvo 1.36 self.setParam_('owner', self.owner)
45 nsmirnov 1.3 log.debug(6, "Orca::Orca(): owner = "+self.owner)
46 fanzago 1.32 self.dataset = cfg_params['ORCA.dataset']
47 corvo 1.36 self.setParam_('dataset', self.dataset)
48 nsmirnov 1.3 log.debug(6, "Orca::Orca(): dataset = "+self.dataset)
49 slacapra 1.9 except KeyError:
50     msg = "Error: owner and/or dataset not defined "
51     raise CrabException(msg)
52    
53     self.dataTiers = []
54     try:
55 fanzago 1.32 tmpDataTiers = string.split(cfg_params['ORCA.data_tier'],',')
56 slacapra 1.9 for tmp in tmpDataTiers:
57     tmp=string.strip(tmp)
58     self.dataTiers.append(tmp)
59     pass
60     pass
61     except KeyError:
62     pass
63     log.debug(6, "Orca::Orca(): dataTiers = "+str(self.dataTiers))
64    
65     ## now the application
66     try:
67 fanzago 1.32 self.executable = cfg_params['ORCA.executable']
68 nsmirnov 1.3 log.debug(6, "Orca::Orca(): executable = "+self.executable)
69 corvo 1.36 self.setParam_('exe', self.executable)
70 slacapra 1.23 except KeyError:
71 slacapra 1.9 msg = "Error: executable not defined "
72     raise CrabException(msg)
73    
74     try:
75 fanzago 1.32 self.orcarc_file = cfg_params['ORCA.orcarc_file']
76 nsmirnov 1.3 log.debug(6, "Orca::Orca(): orcarc file = "+self.orcarc_file)
77 slacapra 1.23 if (not os.path.exists(self.orcarc_file)):
78     raise CrabException("User defined .orcarc file "+self.orcarc_file+" does not exist")
79     except KeyError:
80 slacapra 1.9 log.message("Using empty orcarc file")
81     self.orcarc_file = ''
82 nsmirnov 1.1
83 slacapra 1.9 # output files
84     try:
85 nsmirnov 1.1 self.output_file = []
86    
87 fanzago 1.32 tmp = cfg_params['ORCA.output_file']
88 nsmirnov 1.1 if tmp != '':
89 fanzago 1.32 tmpOutFiles = string.split(cfg_params['ORCA.output_file'],',')
90 nsmirnov 1.3 log.debug(7, 'Orca::Orca(): output files '+str(tmpOutFiles))
91 nsmirnov 1.1 for tmp in tmpOutFiles:
92     tmp=string.strip(tmp)
93     self.output_file.append(tmp)
94     pass
95    
96 slacapra 1.9 else:
97     log.message("No output file defined: only stdout/err will be available")
98 nsmirnov 1.1 pass
99     pass
100     except KeyError:
101 slacapra 1.9 log.message("No output file defined: only stdout/err will be available")
102 nsmirnov 1.1 pass
103    
104 corvo 1.20 # script_exe file as additional file in inputSandbox
105     try:
106 fanzago 1.32 self.scriptExe = cfg_params['ORCA.script_exe']
107 corvo 1.20 self.additional_inbox_files.append(self.scriptExe)
108     except KeyError:
109     pass
110     if self.scriptExe != '':
111     if os.path.isfile(self.scriptExe):
112     pass
113     else:
114     log.message("WARNING. file "+self.scriptExe+" not found")
115     sys.exit()
116    
117 slacapra 1.9 ## additional input files
118 nsmirnov 1.1 try:
119     tmpAddFiles = string.split(cfg_params['USER.additional_input_files'],',')
120     for tmp in tmpAddFiles:
121     tmp=string.strip(tmp)
122 slacapra 1.7 self.additional_inbox_files.append(tmp)
123 nsmirnov 1.1 pass
124     pass
125     except KeyError:
126     pass
127    
128     try:
129 slacapra 1.54 self.total_number_of_events = int(cfg_params['ORCA.total_number_of_events'])
130 nsmirnov 1.1 except KeyError:
131     msg = 'Must define total_number_of_events and job_number_of_events'
132     raise CrabException(msg)
133    
134     try:
135 slacapra 1.54 self.first_event = int(cfg_params['ORCA.first_event'])
136 nsmirnov 1.1 except KeyError:
137 gutsche 1.50 self.first_event = 0
138 nsmirnov 1.1 pass
139 nsmirnov 1.3 log.debug(6, "Orca::Orca(): total number of events = "+`self.total_number_of_events`)
140 slacapra 1.6 #log.debug(6, "Orca::Orca(): events per job = "+`self.job_number_of_events`)
141 gutsche 1.50 log.debug(6, "Orca::Orca(): first event = "+`self.first_event`)
142 slacapra 1.53
143     #DBSDLS-start
144     ## Initialize the variables that are extracted from DBS/DLS and needed in other places of the code
145     self.maxEvents=0 # max events available ( --> check the requested nb. of evts in Creator.py)
146     self.DBSPaths={} # all dbs paths requested ( --> input to the site local discovery script)
147     ## Perform the data location and discovery (based on DBS/DLS)
148     self.DataDiscoveryAndLocation(cfg_params)
149     #DBSDLS-end
150 nsmirnov 1.1
151 slacapra 1.41 self.TarBaller = TarBall.TarBall(self.executable, self.scram)
152     self.tgzNameWithPath = self.TarBaller.prepareTarBall()
153 nsmirnov 1.1
154 slacapra 1.10 try:
155     self.ML = int(cfg_params['USER.activate_monalisa'])
156     except KeyError:
157     self.ML = 0
158     pass
159 corvo 1.36
160     self.setTaskid_()
161 corvo 1.44 self.setParam_('taskId', self.cfg_params['taskId'])
162 corvo 1.36
163 nsmirnov 1.1 return
164    
165 gutsche 1.50 def split(self, jobParams):
166     """
167 slacapra 1.53 This method returns the list of orca specific job type items
168 gutsche 1.50 needed to run the jobs
169     """
170     common.jobDB.load()
171     njobs = self.total_number_of_jobs
172     # create the empty structure
173     for i in range(njobs):
174     jobParams.append("")
175    
176     # fill the both the list and the DB (part of the code taken from jobsToDB)
177     firstEvent = self.first_event
178     lastJobsNumberOfEvents = self.job_number_of_events
179     # last jobs is different...
180     for job in range(njobs-1):
181     jobParams[job] = [firstEvent, lastJobsNumberOfEvents]
182     common.jobDB.setArguments(job, jobParams[job])
183     firstEvent += self.job_number_of_events
184    
185     # this is the last job
186     lastJobsNumberOfEvents = (self.total_number_of_events + self.first_event) - firstEvent
187     status = common.jobDB.status(njobs - 1)
188     jobParams[njobs - 1] = [firstEvent, lastJobsNumberOfEvents]
189     common.jobDB.setArguments(njobs - 1, jobParams[njobs - 1])
190    
191     if (lastJobsNumberOfEvents!=self.job_number_of_events):
192     common.logger.message(str(self.total_number_of_jobs-1)+' jobs will be created for '+str(self.job_number_of_events)+' events each plus 1 for '+str(lastJobsNumberOfEvents)+' events for a total of '+str(self.job_number_of_events*(self.total_number_of_jobs-1)+lastJobsNumberOfEvents)+' events')
193     else:
194     common.logger.message(str(self.total_number_of_jobs)+' jobs will be created for '+str(self.job_number_of_events)+' events each for a total of '+str(self.job_number_of_events*(self.total_number_of_jobs-1)+lastJobsNumberOfEvents)+' events')
195    
196     common.jobDB.save()
197     return
198    
199     def getJobTypeArguments(self, nj, sched):
200     params = common.jobDB.arguments(nj)
201    
202 slacapra 1.53 if sched=="EDG" or sched=="CONDOR" or sched=="GRID":
203 gutsche 1.50 parString = "" + str(params[0])+' '+str(params[1])
204     elif sched=="BOSS":
205     parString = "" + str(params[0])+' '+str(params[1])
206     else:
207     return ""
208     return parString
209    
210     def numberOfJobs(self):
211     first_event = self.first_event
212     maxAvailableEvents = int(self.maxEvents)
213     common.logger.debug(1,"Available events: "+str(maxAvailableEvents))
214     if first_event>=maxAvailableEvents:
215     raise CrabException('First event is bigger than maximum number of available events!')
216    
217     try:
218     n = self.total_number_of_events
219     if n == 'all': n = '-1'
220     if n == '-1':
221     tot_num_events = (maxAvailableEvents - first_event)
222     common.logger.debug(1,"Analysing all available events "+str(tot_num_events))
223     else:
224     if maxAvailableEvents<(int(n)+ first_event): # + self.first_event):
225     raise CrabException('(First event + total events)='+str(int(n)+first_event)+' is bigger than maximum number of available events '+str(maxAvailableEvents)+' !! Use "total_number_of_events=-1" to analyze to whole dataset')
226     tot_num_events = int(n)
227     except KeyError:
228     common.logger.message("total_number_of_events not defined, set it to maximum available")
229     tot_num_events = (maxAvailableEvents - first_event)
230     pass
231     common.logger.message("Total number of events to be analyzed: "+str(self.total_number_of_events))
232    
233    
234     # read user directives
235     eventPerJob=0
236     try:
237 slacapra 1.54 eventPerJob = self.cfg_params['ORCA.job_number_of_events']
238 gutsche 1.50 except KeyError:
239     pass
240    
241     jobsPerTask=0
242     try:
243 slacapra 1.54 jobsPerTask = int(self.cfg_params['ORCA.total_number_of_jobs'])
244 gutsche 1.50 except KeyError:
245     pass
246    
247     # If both the above set, complain and use event per jobs
248     if eventPerJob>0 and jobsPerTask>0:
249     msg = 'Warning. '
250     msg += 'job_number_of_events and total_number_of_jobs are both defined '
251     msg += 'Using job_number_of_events.'
252     common.logger.message(msg)
253     jobsPerTask = 0
254     if eventPerJob==0 and jobsPerTask==0:
255     msg = 'Warning. '
256     msg += 'job_number_of_events and total_number_of_jobs are not defined '
257     msg += 'Creating just one job for all events.'
258     common.logger.message(msg)
259     jobsPerTask = 1
260    
261     # first case: events per job defined
262     if eventPerJob>0:
263     n=eventPerJob
264     #if n == 'all' or n == '-1' or (int(n)>self.total_number_of_events and self.total_number_of_events>0):
265     if n == 'all' or n == '-1' or (int(n)>tot_num_events and tot_num_events>0):
266     common.logger.message("Asking more events than available: set it to maximum available")
267     job_num_events = tot_num_events
268     tot_num_jobs = 1
269     else:
270     job_num_events = int(n)
271     tot_num_jobs = int((tot_num_events-1)/job_num_events)+1
272    
273     elif jobsPerTask>0:
274     common.logger.debug(2,"total number of events: "+str(tot_num_events)+" JobPerTask "+str(jobsPerTask))
275     job_num_events = int(math.floor((tot_num_events)/jobsPerTask))
276     tot_num_jobs = jobsPerTask
277    
278     # should not happen...
279     else:
280     raise CrabException('Something wrong with splitting')
281    
282     common.logger.debug(2,"total number of events: "+str(tot_num_events)+" events per job: "+str(job_num_events))
283    
284     #used by jobsToDB for logs
285     self.job_number_of_events = job_num_events
286     self.total_number_of_jobs = tot_num_jobs
287     return tot_num_jobs
288    
289    
290     def jobsToDB(self, nJobs):
291     """
292 slacapra 1.53 Fill the DB with proper entries for ORCA
293 gutsche 1.50 """
294    
295     firstEvent = self.first_event
296     lastJobsNumberOfEvents = self.job_number_of_events
297    
298     # last jobs is different...
299     for job in range(nJobs-1):
300     common.jobDB.setFirstEvent(job, firstEvent)
301     common.jobDB.setMaxEvents(job, self.job_number_of_events)
302     firstEvent=firstEvent+self.job_number_of_events
303    
304     # this is the last job
305     common.jobDB.setFirstEvent(nJobs-1, firstEvent)
306     lastJobsNumberOfEvents= (self.total_number_of_events+self.first_event)-firstEvent
307     common.jobDB.setMaxEvents(nJobs-1, lastJobsNumberOfEvents)
308    
309     if (lastJobsNumberOfEvents!=self.job_number_of_events):
310     common.logger.message(str(self.total_number_of_jobs-1)+' jobs will be created for '+str(self.job_number_of_events)+' events each plus 1 for '+str(lastJobsNumberOfEvents)+' events for a total of '+str(self.job_number_of_events*(self.total_number_of_jobs-1)+lastJobsNumberOfEvents)+' events')
311     else:
312     common.logger.message(str(self.total_number_of_jobs)+' jobs will be created for '+str(self.job_number_of_events)+' events each for a total of '+str(self.job_number_of_events*(self.total_number_of_jobs-1)+lastJobsNumberOfEvents)+' events')
313    
314     return
315    
316    
317 nsmirnov 1.4 def wsSetupEnvironment(self, nj):
318     """
319     Returns part of a job script which prepares
320     the execution environment for the job 'nj'.
321     """
322    
323     # Prepare JobType-independent part
324 spiga 1.46 txt = ''
325    
326     ## OLI_Daniele at this level middleware already known
327    
328     txt += 'if [ $middleware == LCG ]; then \n'
329     txt += self.wsSetupCMSLCGEnvironment_()
330     txt += 'elif [ $middleware == OSG ]; then\n'
331 slacapra 1.53 txt += ' time=`date -u +"%s"`\n'
332     txt += ' WORKING_DIR=$OSG_WN_TMP/cms_$time\n'
333     txt += ' echo "Creating working directory: $WORKING_DIR"\n'
334     txt += ' /bin/mkdir -p $WORKING_DIR\n'
335     txt += ' if [ ! -d $WORKING_DIR ] ;then\n'
336 gutsche 1.52 txt += ' echo "SET_CMS_ENV 10016 ==> OSG $WORKING_DIR could not be created on WN `hostname`"\n'
337     txt += ' echo "JOB_EXIT_STATUS = 10016"\n'
338     txt += ' echo "JobExitCode=10016" | tee -a $RUNTIME_AREA/$repo\n'
339     txt += ' dumpStatus $RUNTIME_AREA/$repo\n'
340 gutsche 1.55 txt += ' rm -f $RUNTIME_AREA/$repo \n'
341     txt += ' echo "MonitorJobID=`echo $MonitorJobID`" | tee -a $RUNTIME_AREA/$repo \n'
342     txt += ' echo "SyncGridJobId=`echo $SyncGridJobId`" | tee -a $RUNTIME_AREA/$repo \n'
343     txt += ' echo "MonitorID=`echo $MonitorID`" | tee -a $RUNTIME_AREA/$repo\n'
344 slacapra 1.53 txt += ' exit 1\n'
345     txt += ' fi\n'
346 spiga 1.46 txt += '\n'
347     txt += ' echo "Change to working directory: $WORKING_DIR"\n'
348     txt += ' cd $WORKING_DIR\n'
349     txt += self.wsSetupCMSOSGEnvironment_()
350     txt += 'fi\n'
351 nsmirnov 1.4
352 fanzago 1.16 # Prepare JobType-specific part
353     scram = self.scram.commandName()
354     txt += '\n\n'
355     txt += 'echo "### SPECIFIC JOB SETUP ENVIRONMENT ###"\n'
356     txt += scram+' project ORCA '+self.version+'\n'
357     txt += 'status=$?\n'
358     txt += 'if [ $status != 0 ] ; then\n'
359 gutsche 1.52 txt += ' echo "SET_EXE_ENV 10034 ==>ERROR ORCA '+self.version+' not found on `hostname`" \n'
360 spiga 1.46 txt += ' echo "JOB_EXIT_STATUS = 10034"\n'
361     txt += ' echo "JobExitCode=10034" | tee -a $RUNTIME_AREA/$repo\n'
362     txt += ' dumpStatus $RUNTIME_AREA/$repo\n'
363 gutsche 1.55 txt += ' rm -f $RUNTIME_AREA/$repo \n'
364     txt += ' echo "MonitorJobID=`echo $MonitorJobID`" | tee -a $RUNTIME_AREA/$repo \n'
365     txt += ' echo "SyncGridJobId=`echo $SyncGridJobId`" | tee -a $RUNTIME_AREA/$repo \n'
366     txt += ' echo "MonitorID=`echo $MonitorID`" | tee -a $RUNTIME_AREA/$repo\n'
367 spiga 1.46 ## OLI_Daniele
368 slacapra 1.53 txt += ' if [ $middleware == OSG ]; then \n'
369     txt += ' echo "Remove working directory: $WORKING_DIR"\n'
370     txt += ' cd $RUNTIME_AREA\n'
371     txt += ' /bin/rm -rf $WORKING_DIR\n'
372     txt += ' if [ -d $WORKING_DIR ] ;then\n'
373 gutsche 1.55 txt += ' echo "SET_CMS_ENV 10018 ==> OSG $WORKING_DIR could not be deleted on WN `hostname` after ORCA not found on `hostname`"\n'
374 gutsche 1.52 txt += ' echo "JOB_EXIT_STATUS = 10018"\n'
375     txt += ' echo "JobExitCode=10018" | tee -a $RUNTIME_AREA/$repo\n'
376     txt += ' dumpStatus $RUNTIME_AREA/$repo\n'
377 gutsche 1.55 txt += ' rm -f $RUNTIME_AREA/$repo \n'
378     txt += ' echo "MonitorJobID=`echo $MonitorJobID`" | tee -a $RUNTIME_AREA/$repo \n'
379     txt += ' echo "SyncGridJobId=`echo $SyncGridJobId`" | tee -a $RUNTIME_AREA/$repo \n'
380     txt += ' echo "MonitorID=`echo $MonitorID`" | tee -a $RUNTIME_AREA/$repo\n'
381 slacapra 1.53 txt += ' fi\n'
382     txt += ' fi \n'
383     txt += ' exit 1\n'
384 fanzago 1.16 txt += 'fi \n'
385     txt += 'echo "ORCA_VERSION = '+self.version+'"\n'
386     txt += 'cd '+self.version+'\n'
387 fanzago 1.29 ### needed grep for bug in scramv1 ###
388 spiga 1.46
389 fanzago 1.35 #txt += 'eval `'+scram+' runtime -sh | grep -v SCRAMRT_LSB_JOBNAME`\n'
390 fanzago 1.16
391 slacapra 1.9 # Handle the arguments:
392     txt += "\n"
393 spiga 1.46 txt += "## ARGUMNETS: $1 Job Number\n"
394     txt += "## ARGUMNETS: $2 First Event for this job\n"
395     txt += "## ARGUMNETS: $3 Max Event for this job\n"
396 slacapra 1.9 txt += "\n"
397     txt += "narg=$#\n"
398 gutsche 1.52 txt += "NJob=$1\n"
399     txt += "FirstEvent=$2\n"
400     txt += "MaxEvents=$3\n"
401 slacapra 1.9 txt += "if [ $narg -lt 3 ]\n"
402     txt += "then\n"
403 gutsche 1.52 txt += " echo 'SET_EXE_ENV 50113 ==> ERROR Too few arguments' +$narg+ \n"
404 corvo 1.38 txt += ' echo "JOB_EXIT_STATUS = 50113"\n'
405     txt += ' echo "JobExitCode=50113" | tee -a $RUNTIME_AREA/$repo\n'
406 corvo 1.28 txt += ' dumpStatus $RUNTIME_AREA/$repo\n'
407 gutsche 1.55 txt += ' rm -f $RUNTIME_AREA/$repo \n'
408     txt += ' echo "MonitorJobID=`echo $MonitorJobID`" | tee -a $RUNTIME_AREA/$repo \n'
409     txt += ' echo "SyncGridJobId=`echo $SyncGridJobId`" | tee -a $RUNTIME_AREA/$repo \n'
410     txt += ' echo "MonitorID=`echo $MonitorID`" | tee -a $RUNTIME_AREA/$repo\n'
411 spiga 1.46
412     ## OLI_Daniele
413 slacapra 1.53 txt += ' if [ $middleware == OSG ]; then \n'
414     txt += ' echo "Remove working directory: $WORKING_DIR"\n'
415     txt += ' cd $RUNTIME_AREA\n'
416     txt += ' /bin/rm -rf $WORKING_DIR\n'
417     txt += ' if [ -d $WORKING_DIR ] ;then\n'
418 gutsche 1.52 txt += ' echo "SET_EXE_ENV 50114 ==> OSG $WORKING_DIR could not be deleted on WN `hostname` after Too few arguments for CRAB job wrapper"\n'
419     txt += ' echo "JOB_EXIT_STATUS = 50114"\n'
420     txt += ' echo "JobExitCode=50114" | tee -a $RUNTIME_AREA/$repo\n'
421     txt += ' dumpStatus $RUNTIME_AREA/$repo\n'
422 gutsche 1.55 txt += ' rm -f $RUNTIME_AREA/$repo \n'
423     txt += ' echo "MonitorJobID=`echo $MonitorJobID`" | tee -a $RUNTIME_AREA/$repo \n'
424     txt += ' echo "SyncGridJobId=`echo $SyncGridJobId`" | tee -a $RUNTIME_AREA/$repo \n'
425     txt += ' echo "MonitorID=`echo $MonitorID`" | tee -a $RUNTIME_AREA/$repo\n'
426 slacapra 1.53 txt += ' fi\n'
427     txt += ' fi \n'
428 gutsche 1.52 txt += " exit 1\n"
429 slacapra 1.9 txt += "fi\n"
430 fanzago 1.16 txt += "\n"
431 spiga 1.46
432 nsmirnov 1.4 # Prepare job-specific part
433     job = common.job_list[nj]
434     orcarc = os.path.basename(job.configFilename())
435     txt += '\n'
436 slacapra 1.53 #DBSDLS-start
437     #### site-local catalogue discovery mechanism:
438     ## check that the site configuration file exists
439     txt += 'echo "### Site Local Catalogue Discovery ### "\n'
440     txt += 'if [ $middleware == LCG ]; then \n'
441     txt += ' if [ -f $VO_CMS_SW_DIR/cms_site_config ]; then \n'
442     txt += ' dbsdls_cms_site_config=$VO_CMS_SW_DIR/cms_site_config\n'
443     txt += ' else\n'
444     txt += ' echo "Site Local Catalogue Discovery Failed: No site configuration file $VO_CMS_SW_DIR/cms_site_config !" \n'
445     txt += ' echo "JOB_EXIT_STATUS = 10037"\n'
446     txt += ' echo "JobExitCode=10037" | tee -a $RUNTIME_AREA/$repo\n'
447     txt += ' dumpStatus $RUNTIME_AREA/$repo\n'
448 gutsche 1.55 txt += ' rm -f $RUNTIME_AREA/$repo \n'
449     txt += ' echo "MonitorJobID=`echo $MonitorJobID`" | tee -a $RUNTIME_AREA/$repo \n'
450     txt += ' echo "SyncGridJobId=`echo $SyncGridJobId`" | tee -a $RUNTIME_AREA/$repo \n'
451     txt += ' echo "MonitorID=`echo $MonitorID`" | tee -a $RUNTIME_AREA/$repo\n'
452 slacapra 1.53 txt += ' exit 1\n'
453     txt += ' fi \n'
454     txt += 'elif [ $middleware == OSG ]; then\n'
455     txt += ' if [ -f $GRID3_APP_DIR/cmssoft/cms_site_config ]; then \n'
456     txt += ' dbsdls_cms_site_config=$GRID3_APP_DIR/cmssoft/cms_site_config\n'
457     txt += ' elif [ -f $OSG_APP/cmssoft/cms_site_config ]; then \n'
458     txt += ' dbsdls_cms_site_config=$OSG_APP/cmssoft/cms_site_config\n'
459     txt += ' else\n'
460     txt += ' echo "Site Local Catalogue Discovery Failed: No site configuration file $GRID3_APP_DIR/cmssoft/cms_site_config or $OSG_APP/cmssoft/cms_site_config !" \n'
461     txt += ' echo "JOB_EXIT_STATUS = 10037"\n'
462     txt += ' echo "JobExitCode=10037" | tee -a $RUNTIME_AREA/$repo\n'
463     txt += ' if [ $middleware == OSG ]; then \n'
464     txt += ' echo "Remove working directory: $WORKING_DIR"\n'
465     txt += ' cd $RUNTIME_AREA\n'
466     txt += ' /bin/rm -rf $WORKING_DIR\n'
467     txt += ' if [ -d $WORKING_DIR ] ;then\n'
468     txt += ' echo "SET_EXE_ENV 10019 ==> OSG $WORKING_DIR could not be deleted on WN `hostname` after No site configuration file $GRID3_APP_DIR/cmssoft/cms_site_config or $OSG_APP/cmssoft/cms_site_config"\n'
469     txt += ' echo "JOB_EXIT_STATUS = 10019"\n'
470     txt += ' echo "JobExitCode=10019" | tee -a $RUNTIME_AREA/$repo\n'
471     txt += ' dumpStatus $RUNTIME_AREA/$repo\n'
472 gutsche 1.55 txt += ' rm -f $RUNTIME_AREA/$repo \n'
473     txt += ' echo "MonitorJobID=`echo $MonitorJobID`" | tee -a $RUNTIME_AREA/$repo \n'
474     txt += ' echo "SyncGridJobId=`echo $SyncGridJobId`" | tee -a $RUNTIME_AREA/$repo \n'
475     txt += ' echo "MonitorID=`echo $MonitorID`" | tee -a $RUNTIME_AREA/$repo\n'
476 slacapra 1.53 txt += ' fi\n'
477     txt += ' fi \n'
478     txt += ' exit 1\n'
479     txt += ' fi \n'
480 slacapra 1.22 txt += 'fi\n'
481 slacapra 1.53 txt += 'echo "Site Local Catalog Discovery, selected site configuration: $dbsdls_cms_site_config"\n'
482     ## look for a site local script sent as inputsandbox otherwise use the default one under $VO_CMS_SW_DIR for LCG or $GRID3_APP_DIR/cmssoft or $OSG_APP/cmssoft for OSG
483     txt += 'if [ -f $RUNTIME_AREA/cms_site_catalogue.sh ]; then \n'
484     txt += ' sitelocalscript=$RUNTIME_AREA/cms_site_catalogue.sh \n'
485     txt += 'elif [ $middleware == LCG ]; then \n'
486     txt += ' if [ -f $VO_CMS_SW_DIR/cms_site_catalogue.sh ]; then \n'
487     txt += ' sitelocalscript=$VO_CMS_SW_DIR/cms_site_catalogue.sh \n'
488     txt += ' else \n'
489     txt += ' echo "Site Local Catalogue Discovery Failed: No site local script cms_site_catalogue.sh !"\n'
490     txt += ' echo "JOB_EXIT_STATUS = 10038"\n'
491     txt += ' echo "JobExitCode=10038" | tee -a $RUNTIME_AREA/$repo\n'
492     txt += ' dumpStatus $RUNTIME_AREA/$repo\n'
493 gutsche 1.55 txt += ' rm -f $RUNTIME_AREA/$repo \n'
494     txt += ' echo "MonitorJobID=`echo $MonitorJobID`" | tee -a $RUNTIME_AREA/$repo \n'
495     txt += ' echo "SyncGridJobId=`echo $SyncGridJobId`" | tee -a $RUNTIME_AREA/$repo \n'
496     txt += ' echo "MonitorID=`echo $MonitorID`" | tee -a $RUNTIME_AREA/$repo\n'
497 slacapra 1.53 txt += ' exit 1\n'
498     txt += ' fi \n'
499     txt += 'elif [ $middleware == OSG ]; then\n'
500     txt += ' if [ -f $GRID3_APP_DIR/cmssoft/cms_site_catalogue.sh ]; then \n'
501     txt += ' sitelocalscript=$GRID3_APP_DIR/cmssoft/cms_site_catalogue.sh\n'
502     txt += ' elif [ -f $OSG_APP/cmssoft/cms_site_catalogue.sh ]; then \n'
503     txt += ' sitelocalscript=$OSG_APP/cmssoft/cms_site_catalogue.sh\n'
504     txt += ' else\n'
505     txt += ' echo "Site Local Catalogue Discovery Failed: No site local script cms_site_catalogue.sh !"\n'
506     txt += ' echo "JOB_EXIT_STATUS = 10038"\n'
507     txt += ' echo "JobExitCode=10038" | tee -a $RUNTIME_AREA/$repo\n'
508     txt += ' dumpStatus $RUNTIME_AREA/$repo\n'
509 gutsche 1.55 txt += ' rm -f $RUNTIME_AREA/$repo \n'
510     txt += ' echo "MonitorJobID=`echo $MonitorJobID`" | tee -a $RUNTIME_AREA/$repo \n'
511     txt += ' echo "SyncGridJobId=`echo $SyncGridJobId`" | tee -a $RUNTIME_AREA/$repo \n'
512     txt += ' echo "MonitorID=`echo $MonitorID`" | tee -a $RUNTIME_AREA/$repo\n'
513 slacapra 1.53 txt += ' if [ $middleware == OSG ]; then \n'
514     txt += ' echo "Remove working directory: $WORKING_DIR"\n'
515     txt += ' cd $RUNTIME_AREA\n'
516     txt += ' /bin/rm -rf $WORKING_DIR\n'
517     txt += ' if [ -d $WORKING_DIR ] ;then\n'
518     txt += ' echo "SET_EXE_ENV 10014 ==> OSG $WORKING_DIR could not be deleted on WN `hostname` after No site local script cms_site_catalogue.sh"\n'
519     txt += ' echo "JOB_EXIT_STATUS = 10014"\n'
520     txt += ' echo "JobExitCode=10014" | tee -a $RUNTIME_AREA/$repo\n'
521     txt += ' dumpStatus $RUNTIME_AREA/$repo\n'
522 gutsche 1.55 txt += ' rm -f $RUNTIME_AREA/$repo \n'
523     txt += ' echo "MonitorJobID=`echo $MonitorJobID`" | tee -a $RUNTIME_AREA/$repo \n'
524     txt += ' echo "SyncGridJobId=`echo $SyncGridJobId`" | tee -a $RUNTIME_AREA/$repo \n'
525     txt += ' echo "MonitorID=`echo $MonitorID`" | tee -a $RUNTIME_AREA/$repo\n'
526 slacapra 1.53 txt += ' fi\n'
527     txt += ' fi \n'
528     txt += ' exit 1\n'
529     txt += ' fi \n'
530 nsmirnov 1.4 txt += 'fi\n'
531 slacapra 1.53 txt += 'echo "Site Local Catalog Discovery, selected local script: $sitelocalscript"\n'
532     ## execute the site local configuration script with the user requied data as input
533     inputdata=string.join(self.DBSPaths,' ')
534     sitecatalog_cmd='$sitelocalscript -c $dbsdls_cms_site_config '+inputdata
535     txt += sitecatalog_cmd+'\n'
536     txt += 'sitestatus=$?\n'
537     txt += 'if [ ! -f inputurl_orcarc ] || [ $sitestatus -ne 0 ]; then\n'
538     txt += ' echo "Site Local Catalogue Discovery Failed: exiting with $sitestatus"\n'
539     txt += ' echo "JOB_EXIT_STATUS = 1"\n'
540     txt += ' echo "JobExitCode=10039" | tee -a $RUNTIME_AREA/$repo\n'
541     txt += ' dumpStatus $RUNTIME_AREA/$repo\n'
542 gutsche 1.55 txt += ' rm -f $RUNTIME_AREA/$repo \n'
543     txt += ' echo "MonitorJobID=`echo $MonitorJobID`" | tee -a $RUNTIME_AREA/$repo \n'
544     txt += ' echo "SyncGridJobId=`echo $SyncGridJobId`" | tee -a $RUNTIME_AREA/$repo \n'
545     txt += ' echo "MonitorID=`echo $MonitorID`" | tee -a $RUNTIME_AREA/$repo\n'
546 slacapra 1.53 txt += ' if [ $middleware == OSG ]; then \n'
547     txt += ' echo "Remove working directory: $WORKING_DIR"\n'
548     txt += ' cd $RUNTIME_AREA\n'
549     txt += ' /bin/rm -rf $WORKING_DIR\n'
550     txt += ' if [ -d $WORKING_DIR ] ;then\n'
551     txt += ' echo "SET_EXE_ENV 10013 ==> OSG $WORKING_DIR could not be deleted on WN `hostname` after Site Local Catalogue Discovery Failed: exiting with $sitestatus"\n'
552     txt += ' echo "JOB_EXIT_STATUS = 10013"\n'
553     txt += ' echo "JobExitCode=10013" | tee -a $RUNTIME_AREA/$repo\n'
554     txt += ' dumpStatus $RUNTIME_AREA/$repo\n'
555 gutsche 1.55 txt += ' rm -f $RUNTIME_AREA/$repo \n'
556     txt += ' echo "MonitorJobID=`echo $MonitorJobID`" | tee -a $RUNTIME_AREA/$repo \n'
557     txt += ' echo "SyncGridJobId=`echo $SyncGridJobId`" | tee -a $RUNTIME_AREA/$repo \n'
558     txt += ' echo "MonitorID=`echo $MonitorID`" | tee -a $RUNTIME_AREA/$repo\n'
559 slacapra 1.53 txt += ' fi\n'
560     txt += ' fi \n'
561     txt += ' exit 1\n'
562     txt += 'fi \n'
563     ## append the orcarc fragment about the Input catalogues to the .orcarc
564     txt += 'cp $RUNTIME_AREA/'+orcarc+' .orcarc\n'
565     txt +=' cat inputurl_orcarc >> .orcarc\n'
566     #DBSDLS-end
567 fanzago 1.26
568     if len(self.additional_inbox_files) > 0:
569     for file in self.additional_inbox_files:
570 fanzago 1.34 file = os.path.basename(file)
571 fanzago 1.26 txt += 'if [ -e $RUNTIME_AREA/'+file+' ] ; then\n'
572 fanzago 1.34 txt += ' cp $RUNTIME_AREA/'+file+' .\n'
573     txt += ' chmod +x '+file+'\n'
574 fanzago 1.26 txt += 'fi\n'
575 spiga 1.46 pass
576 fanzago 1.26
577 spiga 1.46 ### OLI_DANIELE
578 fanzago 1.16 txt += "echo 'SET_EXE_ENV 0 ==> job setup ok'\n"
579     txt += 'echo "### END JOB SETUP ENVIRONMENT ###"\n\n'
580 slacapra 1.9
581 slacapra 1.10 txt += 'echo "FirstEvent=$FirstEvent" >> .orcarc\n'
582 afanfani 1.24 txt += 'echo "MaxEvents=$MaxEvents" >> .orcarc\n'
583 slacapra 1.10 if self.ML:
584     txt += 'echo "MonalisaJobId=$NJob" >> .orcarc\n'
585    
586     txt += '\n'
587     txt += 'echo "***** cat .orcarc *********"\n'
588     txt += 'cat .orcarc\n'
589     txt += 'echo "****** end .orcarc ********"\n'
590 nsmirnov 1.4 return txt
591    
592     def wsBuildExe(self, nj):
593     """
594     Put in the script the commands to build an executable
595     or a library.
596     """
597    
598     txt = ""
599    
600 fanzago 1.12 if os.path.isfile(self.tgzNameWithPath):
601 fanzago 1.42 txt += 'echo "tar xzvf $RUNTIME_AREA/'+os.path.basename(self.tgzNameWithPath)+'"\n'
602     txt += 'tar xzvf $RUNTIME_AREA/'+os.path.basename(self.tgzNameWithPath)+'\n'
603 nsmirnov 1.4 txt += 'untar_status=$? \n'
604     txt += 'if [ $untar_status -ne 0 ]; then \n'
605 fanzago 1.34 txt += ' echo "SET_EXE 1 ==> ERROR Untarring .tgz file failed"\n'
606     txt += ' echo "JOB_EXIT_STATUS = $untar_status" \n'
607 gutsche 1.52 txt += ' echo "JobExitCode=$untar_status" | tee -a $repo\n'
608 spiga 1.46
609     ### OLI_DANIELE
610     txt += ' if [ $middleware == OSG ]; then \n'
611     txt += ' echo "Remove working directory: $WORKING_DIR"\n'
612     txt += ' cd $RUNTIME_AREA\n'
613     txt += ' /bin/rm -rf $WORKING_DIR\n'
614     txt += ' if [ -d $WORKING_DIR ] ;then\n'
615 gutsche 1.52 txt += ' echo "SET_EXE 50999 ==> OSG $WORKING_DIR could not be deleted on WN `hostname` after Untarring .tgz file failed"\n'
616     txt += ' echo "JOB_EXIT_STATUS = 50999"\n'
617     txt += ' echo "JobExitCode=50999" | tee -a $RUNTIME_AREA/$repo\n'
618     txt += ' dumpStatus $RUNTIME_AREA/$repo\n'
619 gutsche 1.55 txt += ' rm -f $RUNTIME_AREA/$repo \n'
620     txt += ' echo "MonitorJobID=`echo $MonitorJobID`" | tee -a $RUNTIME_AREA/$repo \n'
621     txt += ' echo "SyncGridJobId=`echo $SyncGridJobId`" | tee -a $RUNTIME_AREA/$repo \n'
622     txt += ' echo "MonitorID=`echo $MonitorID`" | tee -a $RUNTIME_AREA/$repo\n'
623 spiga 1.46 txt += ' fi\n'
624     txt += ' fi \n'
625     txt += ' \n'
626 gutsche 1.52 txt += ' exit 1 \n'
627 nsmirnov 1.4 txt += 'else \n'
628 fanzago 1.34 txt += ' echo "Successful untar" \n'
629 nsmirnov 1.4 txt += 'fi \n'
630     # TODO: what does this code do here ?
631     # SL check that lib/Linux__... is present
632 slacapra 1.9 txt += 'mkdir -p lib/${SCRAM_ARCH} \n'
633 nsmirnov 1.4 pass
634 fanzago 1.37 txt += 'eval `'+self.scram.commandName()+' runtime -sh |grep -v SCRAMRT_LSB_JOBNAME`'+'\n'
635 nsmirnov 1.4
636     return txt
637    
638     def wsRenameOutput(self, nj):
639     """
640     Returns part of a job script which renames the produced files.
641     """
642 slacapra 1.9
643 nsmirnov 1.4 txt = '\n'
644 gutsche 1.52 txt += '# directory content\n'
645     txt += 'ls \n'
646 fanzago 1.15 file_list = ''
647 slacapra 1.9 for fileWithSuffix in self.output_file:
648     output_file_num = self.numberFile_(fileWithSuffix, '$NJob')
649 slacapra 1.48 file_list=file_list+output_file_num+' '
650 fanzago 1.26 txt += '\n'
651 spiga 1.17 txt += 'ls '+fileWithSuffix+'\n'
652 fanzago 1.18 txt += 'exe_result=$?\n'
653     txt += 'if [ $exe_result -ne 0 ] ; then\n'
654 fanzago 1.34 txt += ' echo "ERROR: No output file to manage"\n'
655     txt += ' echo "JOB_EXIT_STATUS = $exe_result"\n'
656 corvo 1.38 txt += ' echo "JobExitCode=60302" | tee -a $RUNTIME_AREA/$repo\n'
657 fanzago 1.34 txt += ' dumpStatus $RUNTIME_AREA/$repo\n'
658 gutsche 1.55 txt += ' rm -f $RUNTIME_AREA/$repo \n'
659     txt += ' echo "MonitorJobID=`echo $MonitorJobID`" | tee -a $RUNTIME_AREA/$repo \n'
660     txt += ' echo "SyncGridJobId=`echo $SyncGridJobId`" | tee -a $RUNTIME_AREA/$repo \n'
661     txt += ' echo "MonitorID=`echo $MonitorID`" | tee -a $RUNTIME_AREA/$repo\n'
662    
663 spiga 1.46 ### OLI_DANIELE
664 gutsche 1.52 if common.scheduler.boss_scheduler_name == 'condor_g':
665     txt += ' if [ $middleware == OSG ]; then \n'
666     txt += ' echo "prepare dummy output file"\n'
667     txt += ' echo "Processing of job output failed" > $RUNTIME_AREA/'+output_file_num+'\n'
668     txt += ' fi \n'
669 spiga 1.17 txt += 'else\n'
670 fanzago 1.34 txt += ' cp '+fileWithSuffix+' $RUNTIME_AREA/'+output_file_num+'\n'
671 fanzago 1.25 txt += 'fi\n'
672 spiga 1.46
673    
674 fanzago 1.40 txt += 'cd $RUNTIME_AREA\n'
675 fanzago 1.15 file_list=file_list[:-1]
676 slacapra 1.48 txt += 'file_list="'+file_list+'"\n'
677 spiga 1.46 ### OLI_DANIELE
678     txt += 'if [ $middleware == OSG ]; then\n'
679     txt += ' cd $RUNTIME_AREA\n'
680     txt += ' echo "Remove working directory: $WORKING_DIR"\n'
681     txt += ' /bin/rm -rf $WORKING_DIR\n'
682     txt += ' if [ -d $WORKING_DIR ] ;then\n'
683 gutsche 1.52 txt += ' echo "SET_EXE 60999 ==> OSG $WORKING_DIR could not be deleted on WN `hostname` after cleanup of WN"\n'
684     txt += ' echo "JOB_EXIT_STATUS = 60999"\n'
685     txt += ' echo "JobExitCode=60999" | tee -a $RUNTIME_AREA/$repo\n'
686     txt += ' dumpStatus $RUNTIME_AREA/$repo\n'
687 gutsche 1.55 txt += ' rm -f $RUNTIME_AREA/$repo \n'
688     txt += ' echo "MonitorJobID=`echo $MonitorJobID`" | tee -a $RUNTIME_AREA/$repo \n'
689     txt += ' echo "SyncGridJobId=`echo $SyncGridJobId`" | tee -a $RUNTIME_AREA/$repo \n'
690     txt += ' echo "MonitorID=`echo $MonitorID`" | tee -a $RUNTIME_AREA/$repo\n'
691 spiga 1.46 txt += ' fi\n'
692     txt += 'fi\n'
693     txt += '\n'
694    
695 nsmirnov 1.4 return txt
696    
697 nsmirnov 1.1 def executableName(self):
698 corvo 1.20 if self.scriptExe != '':
699     return "./" + os.path.basename(self.scriptExe)
700     else:
701     return self.executable
702 nsmirnov 1.1
703 slacapra 1.53 #DBSDLS-start
704     def DataDiscoveryAndLocation(self, cfg_params):
705    
706     fun = "Orca::DataDiscoveryAndLocation()"
707 nsmirnov 1.1
708 slacapra 1.53 ## Contact the DBS
709     try:
710     self.pubdata=DataDiscovery.DataDiscovery(self.owner,
711     self.dataset,
712     self.dataTiers,
713     cfg_params)
714     self.pubdata.fetchDBSInfo()
715 nsmirnov 1.1
716 slacapra 1.53 except DataDiscovery.NotExistingDatasetError, ex :
717     msg = 'ERROR ***: failed Data Discovery in DBS : %s'%ex.getErrorMessage()
718 nsmirnov 1.1 raise CrabException(msg)
719    
720 slacapra 1.53 except DataDiscovery.NoDataTierinProvenanceError, ex :
721     msg = 'ERROR ***: failed Data Discovery in DBS : %s'%ex.getErrorMessage()
722     raise CrabException(msg)
723     except DataDiscovery.DataDiscoveryError, ex:
724     msg = 'ERROR ***: failed Data Discovery in DBS %s'%ex.getErrorMessage()
725 nsmirnov 1.1 raise CrabException(msg)
726 nsmirnov 1.3
727 slacapra 1.53 ## get list of all required data in the form of dbs paths (dbs path = /dataset/datatier/owner)
728     self.DBSPaths=self.pubdata.getDBSPaths()
729     common.logger.message("Required data are : ")
730     for path in self.DBSPaths:
731     common.logger.message(" --> "+path )
732    
733     ## get max number of events
734     #common.logger.debug(10,"number of events for primary fileblocks %i"%self.pubdata.getMaxEvents())
735     self.maxEvents=self.pubdata.getMaxEvents() ## self.maxEvents used in Creator.py
736     common.logger.message("\nThe number of available events is %s"%self.maxEvents)
737    
738     ## get fileblocks corresponding to the required data
739     fb=self.pubdata.getFileBlocks()
740    
741    
742     ## Contact the DLS and build a list of sites hosting the fileblocks
743     try:
744     dataloc=DataLocation.DataLocation(self.pubdata.getFileBlocks(),cfg_params)
745     dataloc.fetchDLSInfo()
746     except DataLocation.DataLocationError , ex:
747     msg = 'ERROR ***: failes Data Location in DLS \n %s '%ex.getErrorMessage()
748     raise CrabException(msg)
749 nsmirnov 1.1
750    
751 slacapra 1.53 sites=dataloc.getSites()
752 nsmirnov 1.1
753 slacapra 1.53 if len(sites)==0:
754     msg = 'No sites hosting all the needed data! Exiting... '
755 nsmirnov 1.1 raise CrabException(msg)
756 slacapra 1.53 common.logger.message("List of Sites hosting the data : "+str(sites))
757     common.logger.debug(6, "List of Sites: "+str(sites))
758     common.analisys_common_info['sites']=sites ## used in SchedulerEdg.py in createSchScript
759     self.setParam_('TargetCE', ','.join(sites))
760    
761     return
762 nsmirnov 1.1
763 slacapra 1.53 #DBDDLS-stop
764 nsmirnov 1.1
765    
766     def nJobs(self):
767     # TODO: should not be here !
768     # JobType should have no internal knowledge about submitted jobs
769     # One possibility is to use len(common.job_list).
770     """ return the number of job to be created """
771 slacapra 1.6 return len(common.job_list)
772 nsmirnov 1.5
773     def prepareSteeringCards(self):
774     """
775     modify the orcarc card provided by the user,
776     writing a new card into share dir
777     """
778     infile = ''
779     try:
780     infile = open(self.orcarc_file,'r')
781     except:
782     self.orcarc_file = 'empty.orcarc'
783     cmd='touch '+self.orcarc_file
784 slacapra 1.9 runCommand(cmd)
785 nsmirnov 1.5 infile = open(self.orcarc_file,'r')
786    
787 slacapra 1.9 outfile = open(common.work_space.jobDir()+self.name()+'.orcarc', 'w')
788 nsmirnov 1.5
789     inline=infile.readlines()
790     ### remove from user card these lines ###
791 slacapra 1.53 wordRemove=['InputFileCatalogURL', 'InputCollections', 'FirstEvent', 'MaxEvents', 'TFileAdaptor']
792 slacapra 1.9 for line in inline:
793     word = string.strip(string.split(line,'=')[0])
794 slacapra 1.8
795 slacapra 1.9 if word not in wordRemove:
796     outfile.write(line)
797     else:
798     continue
799     pass
800    
801     outfile.write('\n\n##### The following cards have been created by CRAB: DO NOT TOUCH #####\n')
802     outfile.write('TFileAdaptor = true\n')
803    
804 corvo 1.45 outfile.write('MonRecAlisaBuilder=false\n')
805 slacapra 1.10
806 slacapra 1.9 outfile.write('InputCollections=/System/'+self.owner+'/'+self.dataset+'/'+self.dataset+'\n')
807 slacapra 1.8
808 nsmirnov 1.5 infile.close()
809     outfile.close()
810     return
811 nsmirnov 1.1
812     def modifySteeringCards(self, nj):
813     """
814     Creates steering cards file modifying a template file
815     """
816     return
817    
818     def cardsBaseName(self):
819     """
820     Returns name of user orcarc card-file
821     """
822     return os.path.split (self.orcarc_file)[1]
823    
824 fanzago 1.27 ### content of input_sanbdox ...
825 nsmirnov 1.1 def inputSandbox(self, nj):
826     """
827     Returns a list of filenames to be put in JDL input sandbox.
828     """
829     inp_box = []
830 corvo 1.13 # dict added to delete duplicate from input sandbox file list
831     seen = {}
832 slacapra 1.7 ## code
833 fanzago 1.12 if os.path.isfile(self.tgzNameWithPath):
834     inp_box.append(self.tgzNameWithPath)
835 slacapra 1.53
836     ##DBSDLS: no orcarc_CE and init_CE.sh produced on UI , thus not inserting them in inputSandbox
837     # ## orcarc
838     # for o in self.allOrcarcs:
839     # for f in o.fileList():
840     # if (f not in seen.keys()):
841     # inp_box.append(common.work_space.jobDir()+f)
842     # seen[f] = 1
843 slacapra 1.9
844 slacapra 1.7 ## config
845 nsmirnov 1.1 inp_box.append(common.job_list[nj].configFilename())
846 slacapra 1.7 ## additional input files
847 fanzago 1.26 #inp_box = inp_box + self.additional_inbox_files
848 nsmirnov 1.1 return inp_box
849    
850 fanzago 1.15 ### and of output_sandbox
851 nsmirnov 1.1 def outputSandbox(self, nj):
852     """
853     Returns a list of filenames to be put in JDL output sandbox.
854     """
855     out_box = []
856    
857 slacapra 1.9 stdout=common.job_list[nj].stdout()
858     stderr=common.job_list[nj].stderr()
859 fanzago 1.15 #out_box.append(stdout)
860     #out_box.append(stderr)
861 slacapra 1.9
862 slacapra 1.7 ## User Declared output files
863 slacapra 1.9 for out in self.output_file:
864 fanzago 1.14 n_out = nj + 1
865 fanzago 1.25 #FEDE
866     #out_box.append(self.version+'/'+self.numberFile_(out,str(n_out)))
867     out_box.append(self.numberFile_(out,str(n_out)))
868 nsmirnov 1.1 return out_box
869 slacapra 1.7
870 fanzago 1.32 def getRequirements(self):
871     """
872 slacapra 1.53 return job requirements to add to jdl files
873 fanzago 1.32 """
874     req = ''
875     if common.analisys_common_info['sites']:
876     if common.analisys_common_info['sw_version']:
877     req='Member("VO-cms-' + \
878     common.analisys_common_info['sw_version'] + \
879     '", other.GlueHostApplicationSoftwareRunTimeEnvironment)'
880     if len(common.analisys_common_info['sites'])>0:
881     req = req + ' && ('
882     for i in range(len(common.analisys_common_info['sites'])):
883     req = req + 'other.GlueCEInfoHostName == "' \
884     + common.analisys_common_info['sites'][i] + '"'
885     if ( i < (int(len(common.analisys_common_info['sites']) - 1)) ):
886     req = req + ' || '
887     req = req + ')'
888     #print "req = ", req
889     return req
890    
891 slacapra 1.9 def numberFile_(self, file, txt):
892     """
893     append _'txt' before last extension of a file
894     """
895     p = string.split(file,".")
896     # take away last extension
897     name = p[0]
898     for x in p[1:-1]:
899     name=name+"."+x
900     # add "_txt"
901     if len(p)>1:
902     ext = p[len(p)-1]
903 fanzago 1.14 #result = name + '_' + str(txt) + "." + ext
904     result = name + '_' + txt + "." + ext
905 slacapra 1.9 else:
906 fanzago 1.14 #result = name + '_' + str(txt)
907     result = name + '_' + txt
908 slacapra 1.9
909     return result
910    
911    
912 slacapra 1.7 def stdOut(self):
913     return self.stdOut_
914    
915     def stdErr(self):
916     return self.stdErr_
917 spiga 1.46
918 corvo 1.36 # marco
919     def setParam_(self, param, value):
920     self._params[param] = value
921    
922     def getParams(self):
923     return self._params
924    
925     def setTaskid_(self):
926 corvo 1.44 self._taskId = self.cfg_params['taskId']
927 corvo 1.36
928     def getTaskid(self):
929     return self._taskId
930     # marco
931 spiga 1.46
932 gutsche 1.50 def configFilename(self):
933     """ return the config filename """
934     return self.name()+'.orcarc'
935    
936 spiga 1.46 ### OLI_DANIELE
937     def wsSetupCMSOSGEnvironment_(self):
938     """
939     Returns part of a job script which is prepares
940     the execution environment and which is common for all CMS jobs.
941     """
942     txt = '\n'
943     txt += ' echo "### SETUP CMS OSG ENVIRONMENT ###"\n'
944     txt += ' if [ -f $GRID3_APP_DIR/cmssoft/cmsset_default.sh ] ;then\n'
945     txt += ' # Use $GRID3_APP_DIR/cmssoft/cmsset_default.sh to setup cms software\n'
946     txt += ' source $GRID3_APP_DIR/cmssoft/cmsset_default.sh '+self.version+'\n'
947     txt += ' elif [ -f $OSG_APP/cmssoft/cmsset_default.sh ] ;then\n'
948     txt += ' # Use $OSG_APP/cmssoft/cmsset_default.sh to setup cms software\n'
949     txt += ' source $OSG_APP/cmssoft/cmsset_default.sh '+self.version+'\n'
950     txt += ' else\n'
951 slacapra 1.53 txt += ' echo "SET_CMS_ENV 10020 ==> ERROR $GRID3_APP_DIR/cmssoft/cmsset_default.sh and $OSG_APP/cmssoft/cmsset_default.sh file not found"\n'
952     txt += ' echo "JOB_EXIT_STATUS = 10020"\n'
953     txt += ' echo "JobExitCode=10020" | tee -a $RUNTIME_AREA/$repo\n'
954     txt += ' dumpStatus $RUNTIME_AREA/$repo\n'
955 gutsche 1.55 txt += ' rm -f $RUNTIME_AREA/$repo \n'
956     txt += ' echo "MonitorJobID=`echo $MonitorJobID`" | tee -a $RUNTIME_AREA/$repo \n'
957     txt += ' echo "SyncGridJobId=`echo $SyncGridJobId`" | tee -a $RUNTIME_AREA/$repo \n'
958     txt += ' echo "MonitorID=`echo $MonitorID`" | tee -a $RUNTIME_AREA/$repo\n'
959 slacapra 1.53 txt += ' exit 1\n'
960 spiga 1.46 txt += '\n'
961     txt += ' echo "Remove working directory: $WORKING_DIR"\n'
962     txt += ' cd $RUNTIME_AREA\n'
963     txt += ' /bin/rm -rf $WORKING_DIR\n'
964     txt += ' if [ -d $WORKING_DIR ] ;then\n'
965 gutsche 1.52 txt += ' echo "SET_CMS_ENV 10017 ==> OSG $WORKING_DIR could not be deleted on WN `hostname` after $GRID3_APP_DIR/cmssoft/cmsset_default.sh and $OSG_APP/cmssoft/cmsset_default.sh file not found"\n'
966     txt += ' echo "JOB_EXIT_STATUS = 10017"\n'
967     txt += ' echo "JobExitCode=10017" | tee -a $RUNTIME_AREA/$repo\n'
968     txt += ' dumpStatus $RUNTIME_AREA/$repo\n'
969 gutsche 1.55 txt += ' rm -f $RUNTIME_AREA/$repo \n'
970     txt += ' echo "MonitorJobID=`echo $MonitorJobID`" | tee -a $RUNTIME_AREA/$repo \n'
971     txt += ' echo "SyncGridJobId=`echo $SyncGridJobId`" | tee -a $RUNTIME_AREA/$repo \n'
972     txt += ' echo "MonitorID=`echo $MonitorID`" | tee -a $RUNTIME_AREA/$repo\n'
973 spiga 1.46 txt += ' fi\n'
974     txt += '\n'
975 slacapra 1.53 txt += ' exit 1\n'
976 spiga 1.46 txt += ' fi\n'
977     txt += '\n'
978     txt += ' echo "SET_CMS_ENV 0 ==> setup cms environment ok"\n'
979     txt += ' echo " END SETUP CMS OSG ENVIRONMENT "\n'
980    
981     return txt
982    
983     ### OLI_DANIELE
984     def wsSetupCMSLCGEnvironment_(self):
985     """
986     Returns part of a job script which is prepares
987     the execution environment and which is common for all CMS jobs.
988     """
989     txt = ' \n'
990     txt += ' echo " ### SETUP CMS LCG ENVIRONMENT ### "\n'
991     txt += ' if [ ! $VO_CMS_SW_DIR ] ;then\n'
992 slacapra 1.53 txt += ' echo "SET_CMS_ENV 10031 ==> ERROR CMS software dir not found on WN `hostname`"\n'
993     txt += ' echo "JOB_EXIT_STATUS = 10031" \n'
994     txt += ' echo "JobExitCode=10031" | tee -a $RUNTIME_AREA/$repo\n'
995     txt += ' dumpStatus $RUNTIME_AREA/$repo\n'
996 gutsche 1.55 txt += ' rm -f $RUNTIME_AREA/$repo \n'
997     txt += ' echo "MonitorJobID=`echo $MonitorJobID`" | tee -a $RUNTIME_AREA/$repo \n'
998     txt += ' echo "SyncGridJobId=`echo $SyncGridJobId`" | tee -a $RUNTIME_AREA/$repo \n'
999     txt += ' echo "MonitorID=`echo $MonitorID`" | tee -a $RUNTIME_AREA/$repo\n'
1000 slacapra 1.53 txt += ' exit 1\n'
1001 spiga 1.46 txt += ' else\n'
1002 slacapra 1.53 txt += ' echo "Sourcing environment... "\n'
1003     txt += ' if [ ! -s $VO_CMS_SW_DIR/cmsset_default.sh ] ;then\n'
1004     txt += ' echo "SET_CMS_ENV 10020 ==> ERROR cmsset_default.sh file not found into dir $VO_CMS_SW_DIR"\n'
1005     txt += ' echo "JOB_EXIT_STATUS = 10020"\n'
1006     txt += ' echo "JobExitCode=10020" | tee -a $RUNTIME_AREA/$repo\n'
1007     txt += ' dumpStatus $RUNTIME_AREA/$repo\n'
1008 gutsche 1.55 txt += ' rm -f $RUNTIME_AREA/$repo \n'
1009     txt += ' echo "MonitorJobID=`echo $MonitorJobID`" | tee -a $RUNTIME_AREA/$repo \n'
1010     txt += ' echo "SyncGridJobId=`echo $SyncGridJobId`" | tee -a $RUNTIME_AREA/$repo \n'
1011     txt += ' echo "MonitorID=`echo $MonitorID`" | tee -a $RUNTIME_AREA/$repo\n'
1012 slacapra 1.53 txt += ' exit 1\n'
1013     txt += ' fi\n'
1014     txt += ' echo "sourcing $VO_CMS_SW_DIR/cmsset_default.sh"\n'
1015     txt += ' source $VO_CMS_SW_DIR/cmsset_default.sh\n'
1016     txt += ' result=$?\n'
1017     txt += ' if [ $result -ne 0 ]; then\n'
1018     txt += ' echo "SET_CMS_ENV 10032 ==> ERROR problem sourcing $VO_CMS_SW_DIR/cmsset_default.sh"\n'
1019     txt += ' echo "JOB_EXIT_STATUS = 10032"\n'
1020     txt += ' echo "JobExitCode=10032" | tee -a $RUNTIME_AREA/$repo\n'
1021     txt += ' dumpStatus $RUNTIME_AREA/$repo\n'
1022 gutsche 1.55 txt += ' rm -f $RUNTIME_AREA/$repo \n'
1023     txt += ' echo "MonitorJobID=`echo $MonitorJobID`" | tee -a $RUNTIME_AREA/$repo \n'
1024     txt += ' echo "SyncGridJobId=`echo $SyncGridJobId`" | tee -a $RUNTIME_AREA/$repo \n'
1025     txt += ' echo "MonitorID=`echo $MonitorID`" | tee -a $RUNTIME_AREA/$repo\n'
1026 slacapra 1.53 txt += ' exit 1\n'
1027     txt += ' fi\n'
1028 spiga 1.46 txt += ' fi\n'
1029     txt += ' \n'
1030     txt += ' string=`cat /etc/redhat-release`\n'
1031     txt += ' echo $string\n'
1032     txt += ' if [[ $string = *alhalla* ]]; then\n'
1033     txt += ' echo "SCRAM_ARCH= $SCRAM_ARCH"\n'
1034     txt += ' elif [[ $string = *Enterprise* ]] || [[ $string = *cientific* ]]; then\n'
1035     txt += ' export SCRAM_ARCH=slc3_ia32_gcc323\n'
1036     txt += ' echo "SCRAM_ARCH= $SCRAM_ARCH"\n'
1037     txt += ' else\n'
1038 slacapra 1.53 txt += ' echo "SET_CMS_ENV 10033 ==> ERROR OS unknown, LCG environment not initialized"\n'
1039     txt += ' echo "JOB_EXIT_STATUS = 10033"\n'
1040     txt += ' echo "JobExitCode=10033" | tee -a $RUNTIME_AREA/$repo\n'
1041     txt += ' dumpStatus $RUNTIME_AREA/$repo\n'
1042 gutsche 1.55 txt += ' rm -f $RUNTIME_AREA/$repo \n'
1043     txt += ' echo "MonitorJobID=`echo $MonitorJobID`" | tee -a $RUNTIME_AREA/$repo \n'
1044     txt += ' echo "SyncGridJobId=`echo $SyncGridJobId`" | tee -a $RUNTIME_AREA/$repo \n'
1045     txt += ' echo "MonitorID=`echo $MonitorID`" | tee -a $RUNTIME_AREA/$repo\n'
1046 slacapra 1.53 txt += ' exit 1\n'
1047 spiga 1.46 txt += ' fi\n'
1048     txt += ' echo "SET_CMS_ENV 0 ==> setup cms environment ok"\n'
1049     txt += ' echo "### END SETUP CMS LCG ENVIRONMENT ###"\n'
1050     return txt
1051