1 |
+ |
""" |
2 |
+ |
Implements the vanilla (local) Condor scheduler |
3 |
+ |
""" |
4 |
+ |
|
5 |
|
__revision__ = "$Id$" |
6 |
|
__version__ = "$Revision$" |
7 |
|
|
8 |
< |
from Scheduler import Scheduler |
9 |
< |
from SchedulerLocal import SchedulerLocal |
6 |
< |
from crab_exceptions import * |
7 |
< |
from crab_util import * |
8 |
< |
from crab_logger import Logger |
9 |
< |
import common |
8 |
> |
from SchedulerLocal import SchedulerLocal |
9 |
> |
from crab_exceptions import CrabException |
10 |
|
|
11 |
+ |
import common |
12 |
|
import os |
13 |
+ |
import socket |
14 |
|
|
15 |
< |
# Naming convention: |
16 |
< |
# methods starting with 'ws' are responsible to provide |
17 |
< |
# corresponding part of the job script ('ws' stands for 'write script'). |
15 |
> |
# FUTURE: for python 2.4 & 2.6 |
16 |
> |
try: |
17 |
> |
from hashlib import sha1 |
18 |
> |
except: |
19 |
> |
from sha import sha as sha1 |
20 |
|
|
21 |
|
class SchedulerCondor(SchedulerLocal) : |
18 |
– |
|
19 |
– |
def __init__(self): |
20 |
– |
Scheduler.__init__(self,"CONDOR") |
21 |
– |
return |
22 |
– |
|
23 |
– |
|
24 |
– |
def configure(self, cfg_params): |
25 |
– |
SchedulerLocal.configure(self, cfg_params) |
26 |
– |
self.environment_unique_identifier ='${HOSTNAME}_${CONDOR_ID}_' + common._db.queryTask('name') |
27 |
– |
return |
28 |
– |
|
29 |
– |
|
30 |
– |
def sched_parameter(self,i,task): |
31 |
– |
""" |
32 |
– |
Return scheduler-specific parameters |
33 |
– |
""" |
34 |
– |
index = int(common._db.nJobs()) - 1 |
35 |
– |
sched_param= '' |
36 |
– |
|
37 |
– |
for i in range(index): |
38 |
– |
pass |
39 |
– |
|
40 |
– |
return sched_param |
41 |
– |
|
42 |
– |
|
43 |
– |
def realSchedParams(self,cfg_params): |
44 |
– |
""" |
45 |
– |
Return dictionary with specific parameters, to use |
46 |
– |
with real scheduler |
22 |
|
""" |
23 |
+ |
Class to implement the vanilla (local) Condor scheduler |
24 |
+ |
Naming convention: Methods starting with 'ws' provide |
25 |
+ |
the corresponding part of the job script |
26 |
+ |
('ws' stands for 'write script'). |
27 |
+ |
""" |
28 |
+ |
|
29 |
+ |
def __init__(self): |
30 |
+ |
SchedulerLocal.__init__(self,"CONDOR") |
31 |
+ |
self.datasetPath = None |
32 |
+ |
self.selectNoInput = None |
33 |
+ |
|
34 |
+ |
self.environment_unique_identifier = None |
35 |
+ |
return |
36 |
+ |
|
37 |
+ |
|
38 |
+ |
def configure(self, cfg_params): |
39 |
+ |
""" |
40 |
+ |
Configure the scheduler with the config settings from the user |
41 |
+ |
""" |
42 |
+ |
|
43 |
+ |
SchedulerLocal.configure(self, cfg_params) |
44 |
+ |
|
45 |
+ |
self.proxyValid=0 |
46 |
+ |
self.dontCheckProxy=int(cfg_params.get("GRID.dont_check_proxy",0)) |
47 |
+ |
self.space_token = cfg_params.get("USER.space_token",None) |
48 |
+ |
try: |
49 |
+ |
self.proxyServer = Downloader("http://cmsdoc.cern.ch/cms/LCG/crab/config/").config("myproxy_server.conf") |
50 |
+ |
self.proxyServer = self.proxyServer.strip() |
51 |
+ |
if self.proxyServer is None: |
52 |
+ |
raise CrabException("myproxy_server.conf retrieved but empty") |
53 |
+ |
except Exception, e: |
54 |
+ |
common.logger.info("Problem setting myproxy server endpoint: using myproxy.cern.ch") |
55 |
+ |
common.logger.debug(e) |
56 |
+ |
self.proxyServer= 'myproxy.cern.ch' |
57 |
+ |
self.group = cfg_params.get("GRID.group", None) |
58 |
+ |
self.role = cfg_params.get("GRID.role", None) |
59 |
+ |
self.VO = cfg_params.get('GRID.virtual_organization','cms') |
60 |
+ |
|
61 |
+ |
try: |
62 |
+ |
tmp = cfg_params['CMSSW.datasetpath'] |
63 |
+ |
if tmp.lower() == 'none': |
64 |
+ |
self.datasetPath = None |
65 |
+ |
self.selectNoInput = 1 |
66 |
+ |
else: |
67 |
+ |
self.datasetPath = tmp |
68 |
+ |
self.selectNoInput = 0 |
69 |
+ |
except KeyError: |
70 |
+ |
msg = "Error: datasetpath not defined " |
71 |
+ |
raise CrabException(msg) |
72 |
+ |
|
73 |
+ |
self.checkProxy() |
74 |
+ |
|
75 |
+ |
return |
76 |
+ |
|
77 |
+ |
def envUniqueID(self): |
78 |
+ |
taskHash = sha1(common._db.queryTask('name')).hexdigest() |
79 |
+ |
id = "https://" + socket.gethostname() + '/' + taskHash + "/${NJob}" |
80 |
+ |
return id |
81 |
+ |
|
82 |
+ |
def sched_parameter(self, i, task): |
83 |
+ |
""" |
84 |
+ |
Return scheduler-specific parameters |
85 |
+ |
""" |
86 |
+ |
req = '' |
87 |
+ |
if self.EDG_addJdlParam: |
88 |
+ |
if self.EDG_addJdlParam[-1] == '': |
89 |
+ |
self.EDG_addJdlParam = self.EDG_addJdlParam[:-1] |
90 |
+ |
for p in self.EDG_addJdlParam: |
91 |
+ |
req += p.strip()+';\n' |
92 |
+ |
|
93 |
+ |
return req |
94 |
+ |
|
95 |
+ |
|
96 |
+ |
def realSchedParams(self, cfg_params): |
97 |
+ |
""" |
98 |
+ |
Return dictionary with specific parameters, to use with real scheduler |
99 |
+ |
""" |
100 |
+ |
|
101 |
+ |
tmpDir = os.path.join(common.work_space.shareDir(),'.condor_temp') |
102 |
+ |
tmpDir = os.path.join(common.work_space.shareDir(),'.condor_temp') |
103 |
+ |
jobDir = common.work_space.jobDir() |
104 |
+ |
params = {'tmpDir':tmpDir, |
105 |
+ |
'jobDir':jobDir} |
106 |
+ |
return params |
107 |
+ |
|
108 |
+ |
|
109 |
+ |
def listMatch(self, seList, full): |
110 |
+ |
""" |
111 |
+ |
Check the compatibility of available resources |
112 |
+ |
""" |
113 |
+ |
|
114 |
+ |
return [True] |
115 |
+ |
|
116 |
+ |
|
117 |
+ |
def decodeLogInfo(self, fileName): |
118 |
+ |
""" |
119 |
+ |
Parse logging info file and return main info |
120 |
+ |
""" |
121 |
+ |
|
122 |
+ |
import CondorGLoggingInfo |
123 |
+ |
loggingInfo = CondorGLoggingInfo.CondorGLoggingInfo() |
124 |
+ |
reason = loggingInfo.decodeReason(fileName) |
125 |
+ |
return reason |
126 |
+ |
|
127 |
+ |
|
128 |
+ |
def wsCopyOutput(self): |
129 |
+ |
""" |
130 |
+ |
Write a CopyResults part of a job script, e.g. |
131 |
+ |
to copy produced output into a storage element. |
132 |
+ |
""" |
133 |
+ |
txt = self.wsCopyOutput_comm() |
134 |
+ |
return txt |
135 |
+ |
|
136 |
+ |
|
137 |
+ |
def wsExitFunc(self): |
138 |
+ |
""" |
139 |
+ |
Returns the part of the job script which runs prior to exit |
140 |
+ |
""" |
141 |
+ |
|
142 |
+ |
txt = '\n' |
143 |
+ |
txt += '#\n' |
144 |
+ |
txt += '# EXECUTE THIS FUNCTION BEFORE EXIT \n' |
145 |
+ |
txt += '#\n\n' |
146 |
+ |
|
147 |
+ |
txt += 'func_exit() { \n' |
148 |
+ |
txt += self.wsExitFunc_common() |
149 |
+ |
|
150 |
+ |
txt += ' tar zcvf ${out_files}.tgz ${final_list}\n' |
151 |
+ |
txt += ' cp ${out_files}.tgz $_CONDOR_SCRATCH_DIR/\n' |
152 |
+ |
txt += ' cp crab_fjr_$NJob.xml $_CONDOR_SCRATCH_DIR/\n' |
153 |
+ |
|
154 |
+ |
txt += ' exit $job_exit_code\n' |
155 |
+ |
txt += '}\n' |
156 |
+ |
|
157 |
+ |
return txt |
158 |
+ |
|
159 |
+ |
def wsInitialEnvironment(self): |
160 |
+ |
""" |
161 |
+ |
Returns part of a job script which does scheduler-specific work. |
162 |
+ |
""" |
163 |
+ |
|
164 |
+ |
txt = '\n# Written by SchedulerCondor::wsInitialEnvironment\n' |
165 |
+ |
txt += 'echo "Beginning environment"\n' |
166 |
+ |
txt += 'printenv | sort\n' |
167 |
+ |
|
168 |
+ |
txt += 'middleware='+self.name()+' \n' |
169 |
+ |
txt += 'if [ -e /opt/d-cache/srm/bin ]; then\n' |
170 |
+ |
txt += ' export PATH=${PATH}:/opt/d-cache/srm/bin\n' |
171 |
+ |
txt += 'fi\n' |
172 |
|
|
173 |
< |
tmpDir = os.path.join(common.work_space.shareDir(),'.condor_temp') |
50 |
< |
params = {'tmpDir':tmpDir} |
51 |
< |
return params |
52 |
< |
|
53 |
< |
|
54 |
< |
def decodeLogInfo(self, file): |
55 |
< |
""" |
56 |
< |
Parse logging info file and return main info |
57 |
< |
""" |
58 |
< |
import CondorGLoggingInfo |
59 |
< |
loggingInfo = CondorGLoggingInfo.CondorGLoggingInfo() |
60 |
< |
reason = loggingInfo.decodeReason(file) |
61 |
< |
return reason |
62 |
< |
|
63 |
< |
|
64 |
< |
def wsExitFunc(self): |
65 |
< |
""" |
66 |
< |
""" |
67 |
< |
txt = '\n' |
68 |
< |
txt += '#\n' |
69 |
< |
txt += '# EXECUTE THIS FUNCTION BEFORE EXIT \n' |
70 |
< |
txt += '#\n\n' |
71 |
< |
|
72 |
< |
txt += 'func_exit() { \n' |
73 |
< |
txt += ' if [ $PYTHONPATH ]; then \n' |
74 |
< |
txt += ' update_fjr\n' |
75 |
< |
txt += ' fi\n' |
76 |
< |
txt += ' for file in $filesToCheck ; do\n' |
77 |
< |
txt += ' if [ -e $file ]; then\n' |
78 |
< |
txt += ' echo "tarring file $file in $out_files"\n' |
79 |
< |
txt += ' else\n' |
80 |
< |
txt += ' echo "WARNING: output file $file not found!"\n' |
81 |
< |
txt += ' fi\n' |
82 |
< |
txt += ' done\n' |
83 |
< |
txt += ' final_list=$filesToCheck\n' |
84 |
< |
txt += ' echo "JOB_EXIT_STATUS = $job_exit_code"\n' |
85 |
< |
txt += ' echo "JobExitCode=$job_exit_code" >> $RUNTIME_AREA/$repo\n' |
86 |
< |
txt += ' dumpStatus $RUNTIME_AREA/$repo\n' |
87 |
< |
txt += ' tar zcvf ${out_files}.tgz ${final_list}\n' |
88 |
< |
txt += ' cp ${out_files}.tgz $ORIG_WD/\n' |
89 |
< |
txt += ' cp crab_fjr_$NJob.xml $ORIG_WD/\n' |
90 |
< |
|
91 |
< |
txt += ' exit $job_exit_code\n' |
92 |
< |
txt += '}\n' |
93 |
< |
|
94 |
< |
return txt |
95 |
< |
|
96 |
< |
def wsInitialEnvironment(self): |
97 |
< |
""" |
98 |
< |
Returns part of a job script which does scheduler-specific work. |
99 |
< |
""" |
100 |
< |
|
101 |
< |
txt = '\n# Written by SchedulerCondor::wsInitialEnvironment\n' |
102 |
< |
txt += 'echo "Beginning environment"\n' |
103 |
< |
txt += 'printenv | sort\n' |
104 |
< |
|
105 |
< |
txt += 'middleware='+self.name()+' \n' |
106 |
< |
txt += """ |
173 |
> |
txt += """ |
174 |
|
if [ $_CONDOR_SCRATCH_DIR ] && [ -d $_CONDOR_SCRATCH_DIR ]; then |
175 |
< |
ORIG_WD=`pwd` |
109 |
< |
echo "Change from $ORIG_WD to Condor scratch directory: $_CONDOR_SCRATCH_DIR" |
175 |
> |
echo "cd to Condor scratch directory: $_CONDOR_SCRATCH_DIR" |
176 |
|
if [ -e ../default.tgz ] ;then |
177 |
|
echo "Found ISB in parent directory (Local Condor)" |
178 |
|
cp ../default.tgz $_CONDOR_SCRATCH_DIR |
181 |
|
fi |
182 |
|
""" |
183 |
|
|
184 |
< |
return txt |
184 |
> |
return txt |
185 |
> |
|
186 |
> |
|
187 |
> |
def sched_fix_parameter(self): |
188 |
> |
""" |
189 |
> |
Returns string with requirements and scheduler-specific parameters |
190 |
> |
""" |
191 |
> |
|
192 |
> |
if self.EDG_requirements: |
193 |
> |
req = self.EDG_requirements |
194 |
> |
taskReq = {'commonRequirements':req} |
195 |
> |
common._db.updateTask_(taskReq) |