1 |
+ |
""" |
2 |
+ |
Implements the vanilla (local) Condor scheduler |
3 |
+ |
""" |
4 |
+ |
|
5 |
|
__revision__ = "$Id$" |
6 |
|
__version__ = "$Revision$" |
7 |
|
|
8 |
< |
from Scheduler import Scheduler |
9 |
< |
from SchedulerLocal import SchedulerLocal |
10 |
< |
from crab_exceptions import * |
7 |
< |
from crab_util import * |
8 |
< |
from crab_logger import Logger |
9 |
< |
import common |
8 |
> |
from SchedulerLocal import SchedulerLocal |
9 |
> |
from crab_exceptions import CrabException |
10 |
> |
#from crab_logger import Logger |
11 |
|
|
12 |
+ |
import common |
13 |
|
import os |
14 |
|
|
15 |
< |
# Naming convention: |
16 |
< |
# methods starting with 'ws' are responsible to provide |
15 |
< |
# corresponding part of the job script ('ws' stands for 'write script'). |
15 |
> |
# Naming convention: Methods starting with 'ws' provide the corresponding part of the job script |
16 |
> |
# ('ws' stands for 'write script'). |
17 |
|
|
18 |
|
class SchedulerCondor(SchedulerLocal) : |
19 |
+ |
""" |
20 |
+ |
Class to implement the vanilla (local) Condor scheduler |
21 |
+ |
""" |
22 |
|
|
23 |
< |
def __init__(self): |
24 |
< |
Scheduler.__init__(self,"CONDOR") |
25 |
< |
return |
26 |
< |
|
27 |
< |
|
28 |
< |
def configure(self, cfg_params): |
25 |
< |
SchedulerLocal.configure(self, cfg_params) |
26 |
< |
self.environment_unique_identifier ='${HOSTNAME}_${CONDOR_ID}_' + common._db.queryTask('name') |
27 |
< |
|
28 |
< |
try: |
29 |
< |
tmp = cfg_params['CMSSW.datasetpath'] |
30 |
< |
if string.lower(tmp)=='none': |
31 |
< |
self.datasetPath = None |
32 |
< |
self.selectNoInput = 1 |
33 |
< |
else: |
34 |
< |
self.datasetPath = tmp |
35 |
< |
self.selectNoInput = 0 |
36 |
< |
except KeyError: |
37 |
< |
msg = "Error: datasetpath not defined " |
38 |
< |
raise CrabException(msg) |
23 |
> |
def __init__(self): |
24 |
> |
SchedulerLocal.__init__(self,"CONDOR") |
25 |
> |
self.datasetPath = None |
26 |
> |
self.selectNoInput = None |
27 |
> |
self.environment_unique_identifier = None |
28 |
> |
return |
29 |
|
|
40 |
– |
return |
30 |
|
|
31 |
+ |
def configure(self, cfg_params): |
32 |
+ |
""" |
33 |
+ |
Configure the scheduler with the config settings from the user |
34 |
+ |
""" |
35 |
|
|
36 |
< |
def sched_parameter(self,i,task): |
37 |
< |
""" |
45 |
< |
Return scheduler-specific parameters |
46 |
< |
""" |
47 |
< |
index = int(common._db.nJobs()) - 1 |
48 |
< |
sched_param= '' |
36 |
> |
SchedulerLocal.configure(self, cfg_params) |
37 |
> |
self.environment_unique_identifier ='${HOSTNAME}_${CONDOR_ID}_' + common._db.queryTask('name') |
38 |
|
|
39 |
< |
for i in range(index): |
40 |
< |
pass |
39 |
> |
try: |
40 |
> |
tmp = cfg_params['CMSSW.datasetpath'] |
41 |
> |
if tmp.lower() == 'none': |
42 |
> |
self.datasetPath = None |
43 |
> |
self.selectNoInput = 1 |
44 |
> |
else: |
45 |
> |
self.datasetPath = tmp |
46 |
> |
self.selectNoInput = 0 |
47 |
> |
except KeyError: |
48 |
> |
msg = "Error: datasetpath not defined " |
49 |
> |
raise CrabException(msg) |
50 |
|
|
51 |
< |
return sched_param |
51 |
> |
return |
52 |
|
|
53 |
|
|
54 |
< |
def realSchedParams(self,cfg_params): |
55 |
< |
""" |
56 |
< |
Return dictionary with specific parameters, to use |
57 |
< |
with real scheduler |
60 |
< |
""" |
54 |
> |
def sched_parameter(self, i, task): |
55 |
> |
""" |
56 |
> |
Return scheduler-specific parameters |
57 |
> |
""" |
58 |
|
|
59 |
< |
tmpDir = os.path.join(common.work_space.shareDir(),'.condor_temp') |
60 |
< |
params = {'tmpDir':tmpDir} |
64 |
< |
return params |
59 |
> |
index = int(common._db.nJobs()) - 1 |
60 |
> |
schedParam = '' |
61 |
|
|
62 |
+ |
for i in range(index): |
63 |
+ |
pass |
64 |
|
|
65 |
< |
def listMatch(self, seList, full, onlyOSG=True): |
68 |
< |
""" |
69 |
< |
Check the compatibility of available resources |
70 |
< |
""" |
65 |
> |
return schedParam |
66 |
|
|
72 |
– |
# May have problems with onlyOSG being false, probably due to lengths of lists and command line. |
73 |
– |
# Either re-write osg_bdii.py with a proper ldap library or break the queries apart |
67 |
|
|
68 |
< |
if self.selectNoInput: |
69 |
< |
return [True] |
70 |
< |
else: |
71 |
< |
return SchedulerLocal.listMatch(self, seList, full) |
68 |
> |
def realSchedParams(self, cfg_params): |
69 |
> |
""" |
70 |
> |
Return dictionary with specific parameters, to use with real scheduler |
71 |
> |
""" |
72 |
|
|
73 |
< |
def decodeLogInfo(self, file): |
74 |
< |
""" |
75 |
< |
Parse logging info file and return main info |
83 |
< |
""" |
84 |
< |
import CondorGLoggingInfo |
85 |
< |
loggingInfo = CondorGLoggingInfo.CondorGLoggingInfo() |
86 |
< |
reason = loggingInfo.decodeReason(file) |
87 |
< |
return reason |
73 |
> |
tmpDir = os.path.join(common.work_space.shareDir(),'.condor_temp') |
74 |
> |
params = {'tmpDir':tmpDir} |
75 |
> |
return params |
76 |
|
|
77 |
|
|
78 |
< |
def wsExitFunc(self): |
79 |
< |
""" |
80 |
< |
""" |
81 |
< |
txt = '\n' |
94 |
< |
txt += '#\n' |
95 |
< |
txt += '# EXECUTE THIS FUNCTION BEFORE EXIT \n' |
96 |
< |
txt += '#\n\n' |
78 |
> |
def listMatch(self, seList, full): |
79 |
> |
""" |
80 |
> |
Check the compatibility of available resources |
81 |
> |
""" |
82 |
|
|
83 |
< |
txt += 'func_exit() { \n' |
84 |
< |
txt += self.wsExitFunc_common() |
83 |
> |
if self.selectNoInput: |
84 |
> |
return [True] |
85 |
> |
else: |
86 |
> |
return SchedulerLocal.listMatch(self, seList, full) |
87 |
|
|
101 |
– |
txt += ' tar zcvf ${out_files}.tgz ${final_list}\n' |
102 |
– |
txt += ' cp ${out_files}.tgz $ORIG_WD/\n' |
103 |
– |
txt += ' cp crab_fjr_$NJob.xml $ORIG_WD/\n' |
88 |
|
|
89 |
< |
txt += ' exit $job_exit_code\n' |
90 |
< |
txt += '}\n' |
89 |
> |
def decodeLogInfo(self, fileName): |
90 |
> |
""" |
91 |
> |
Parse logging info file and return main info |
92 |
> |
""" |
93 |
|
|
94 |
< |
return txt |
94 |
> |
import CondorGLoggingInfo |
95 |
> |
loggingInfo = CondorGLoggingInfo.CondorGLoggingInfo() |
96 |
> |
reason = loggingInfo.decodeReason(fileName) |
97 |
> |
return reason |
98 |
|
|
110 |
– |
def wsInitialEnvironment(self): |
111 |
– |
""" |
112 |
– |
Returns part of a job script which does scheduler-specific work. |
113 |
– |
""" |
99 |
|
|
100 |
< |
txt = '\n# Written by SchedulerCondor::wsInitialEnvironment\n' |
101 |
< |
txt += 'echo "Beginning environment"\n' |
102 |
< |
txt += 'printenv | sort\n' |
100 |
> |
def wsExitFunc(self): |
101 |
> |
""" |
102 |
> |
Returns the part of the job script which runs prior to exit |
103 |
> |
""" |
104 |
> |
|
105 |
> |
txt = '\n' |
106 |
> |
txt += '#\n' |
107 |
> |
txt += '# EXECUTE THIS FUNCTION BEFORE EXIT \n' |
108 |
> |
txt += '#\n\n' |
109 |
> |
|
110 |
> |
txt += 'func_exit() { \n' |
111 |
> |
txt += self.wsExitFunc_common() |
112 |
> |
|
113 |
> |
txt += ' tar zcvf ${out_files}.tgz ${final_list}\n' |
114 |
> |
txt += ' cp ${out_files}.tgz $ORIG_WD/\n' |
115 |
> |
txt += ' cp crab_fjr_$NJob.xml $ORIG_WD/\n' |
116 |
> |
|
117 |
> |
txt += ' exit $job_exit_code\n' |
118 |
> |
txt += '}\n' |
119 |
> |
|
120 |
> |
return txt |
121 |
> |
|
122 |
> |
def wsInitialEnvironment(self): |
123 |
> |
""" |
124 |
> |
Returns part of a job script which does scheduler-specific work. |
125 |
> |
""" |
126 |
> |
|
127 |
> |
txt = '\n# Written by SchedulerCondor::wsInitialEnvironment\n' |
128 |
> |
txt += 'echo "Beginning environment"\n' |
129 |
> |
txt += 'printenv | sort\n' |
130 |
|
|
131 |
< |
txt += 'middleware='+self.name()+' \n' |
132 |
< |
txt += """ |
131 |
> |
txt += 'middleware='+self.name()+' \n' |
132 |
> |
txt += """ |
133 |
|
if [ $_CONDOR_SCRATCH_DIR ] && [ -d $_CONDOR_SCRATCH_DIR ]; then |
134 |
|
ORIG_WD=`pwd` |
135 |
|
echo "Change from $ORIG_WD to Condor scratch directory: $_CONDOR_SCRATCH_DIR" |
141 |
|
fi |
142 |
|
""" |
143 |
|
|
144 |
< |
return txt |
144 |
> |
return txt |