7 |
|
|
8 |
|
from SchedulerLocal import SchedulerLocal |
9 |
|
from crab_exceptions import CrabException |
10 |
– |
#from crab_logger import Logger |
10 |
|
|
11 |
|
import common |
12 |
|
import os |
13 |
< |
|
14 |
< |
# Naming convention: Methods starting with 'ws' provide the corresponding part of the job script |
16 |
< |
# ('ws' stands for 'write script'). |
13 |
> |
import socket |
14 |
> |
import sha |
15 |
|
|
16 |
|
class SchedulerCondor(SchedulerLocal) : |
17 |
|
""" |
18 |
|
Class to implement the vanilla (local) Condor scheduler |
19 |
+ |
Naming convention: Methods starting with 'ws' provide |
20 |
+ |
the corresponding part of the job script |
21 |
+ |
('ws' stands for 'write script'). |
22 |
|
""" |
23 |
|
|
24 |
|
def __init__(self): |
35 |
|
""" |
36 |
|
|
37 |
|
SchedulerLocal.configure(self, cfg_params) |
38 |
< |
self.environment_unique_identifier ='${HOSTNAME}_${CONDOR_ID}_' + common._db.queryTask('name') |
38 |
> |
taskHash = sha.new(common._db.queryTask('name')).hexdigest() |
39 |
> |
self.environment_unique_identifier = "https://" + socket.gethostname() + \ |
40 |
> |
'/' + taskHash + "/${NJob}" |
41 |
|
|
42 |
|
try: |
43 |
|
tmp = cfg_params['CMSSW.datasetpath'] |
51 |
|
msg = "Error: datasetpath not defined " |
52 |
|
raise CrabException(msg) |
53 |
|
|
54 |
+ |
self.return_data = cfg_params.get('USER.return_data', 0) |
55 |
+ |
self.copy_data = cfg_params.get("USER.copy_data", 0) |
56 |
+ |
|
57 |
+ |
if ( int(self.return_data) == 0 and int(self.copy_data) == 0 ): |
58 |
+ |
msg = 'Error: return_data and copy_data cannot be set both to 0\n' |
59 |
+ |
msg = msg + 'Please modify your crab.cfg file\n' |
60 |
+ |
raise CrabException(msg) |
61 |
+ |
|
62 |
+ |
if ( int(self.return_data) == 1 and int(self.copy_data) == 1 ): |
63 |
+ |
msg = 'Error: return_data and copy_data cannot be set both to 1\n' |
64 |
+ |
msg = msg + 'Please modify your crab.cfg file\n' |
65 |
+ |
raise CrabException(msg) |
66 |
+ |
|
67 |
+ |
if ( int(self.copy_data) == 0 and int(self.publish_data) == 1 ): |
68 |
+ |
msg = 'Warning: publish_data = 1 must be used with copy_data = 1\n' |
69 |
+ |
msg = msg + 'Please modify copy_data value in your crab.cfg file\n' |
70 |
+ |
common.logger.message(msg) |
71 |
+ |
raise CrabException(msg) |
72 |
+ |
|
73 |
+ |
if int(self.copy_data) == 1: |
74 |
+ |
self.SE = cfg_params.get('USER.storage_element', None) |
75 |
+ |
if not self.SE: |
76 |
+ |
msg = "Error. The [USER] section has no 'storage_element'" |
77 |
+ |
common.logger.message(msg) |
78 |
+ |
raise CrabException(msg) |
79 |
+ |
|
80 |
+ |
self.proxyValid = 0 |
81 |
+ |
self.dontCheckProxy = int(cfg_params.get("EDG.dont_check_proxy",0)) |
82 |
+ |
self.proxyServer = cfg_params.get("EDG.proxy_server",'myproxy.cern.ch') |
83 |
+ |
common.logger.debug(5,'Setting myproxy server to ' + self.proxyServer) |
84 |
+ |
|
85 |
+ |
self.group = cfg_params.get("EDG.group", None) |
86 |
+ |
self.role = cfg_params.get("EDG.role", None) |
87 |
+ |
self.VO = cfg_params.get('EDG.virtual_organization', 'cms') |
88 |
+ |
|
89 |
+ |
self.checkProxy() |
90 |
+ |
self.role = None |
91 |
+ |
|
92 |
|
return |
93 |
|
|
94 |
|
|
112 |
|
""" |
113 |
|
|
114 |
|
tmpDir = os.path.join(common.work_space.shareDir(),'.condor_temp') |
115 |
< |
params = {'tmpDir':tmpDir} |
115 |
> |
tmpDir = os.path.join(common.work_space.shareDir(),'.condor_temp') |
116 |
> |
jobDir = common.work_space.jobDir() |
117 |
> |
params = {'tmpDir':tmpDir, |
118 |
> |
'jobDir':jobDir} |
119 |
|
return params |
120 |
|
|
121 |
|
|
124 |
|
Check the compatibility of available resources |
125 |
|
""" |
126 |
|
|
127 |
< |
if self.selectNoInput: |
84 |
< |
return [True] |
85 |
< |
else: |
86 |
< |
return SchedulerLocal.listMatch(self, seList, full) |
127 |
> |
return [True] |
128 |
|
|
129 |
|
|
130 |
|
def decodeLogInfo(self, fileName): |
138 |
|
return reason |
139 |
|
|
140 |
|
|
141 |
+ |
def wsCopyOutput(self): |
142 |
+ |
""" |
143 |
+ |
Write a CopyResults part of a job script, e.g. |
144 |
+ |
to copy produced output into a storage element. |
145 |
+ |
""" |
146 |
+ |
txt = self.wsCopyOutput_comm() |
147 |
+ |
return txt |
148 |
+ |
|
149 |
+ |
|
150 |
|
def wsExitFunc(self): |
151 |
|
""" |
152 |
|
Returns the part of the job script which runs prior to exit |
161 |
|
txt += self.wsExitFunc_common() |
162 |
|
|
163 |
|
txt += ' tar zcvf ${out_files}.tgz ${final_list}\n' |
164 |
< |
txt += ' cp ${out_files}.tgz $ORIG_WD/\n' |
165 |
< |
txt += ' cp crab_fjr_$NJob.xml $ORIG_WD/\n' |
164 |
> |
txt += ' cp ${out_files}.tgz $_CONDOR_SCRATCH_DIR/\n' |
165 |
> |
txt += ' cp crab_fjr_$NJob.xml $_CONDOR_SCRATCH_DIR/\n' |
166 |
|
|
167 |
|
txt += ' exit $job_exit_code\n' |
168 |
|
txt += '}\n' |
179 |
|
txt += 'printenv | sort\n' |
180 |
|
|
181 |
|
txt += 'middleware='+self.name()+' \n' |
182 |
+ |
txt += 'if [ -e /opt/d-cache/srm/bin ]; then\n' |
183 |
+ |
txt += ' export PATH=${PATH}:/opt/d-cache/srm/bin\n' |
184 |
+ |
txt += 'fi\n' |
185 |
+ |
|
186 |
|
txt += """ |
187 |
|
if [ $_CONDOR_SCRATCH_DIR ] && [ -d $_CONDOR_SCRATCH_DIR ]; then |
188 |
< |
ORIG_WD=`pwd` |
135 |
< |
echo "Change from $ORIG_WD to Condor scratch directory: $_CONDOR_SCRATCH_DIR" |
188 |
> |
echo "cd to Condor scratch directory: $_CONDOR_SCRATCH_DIR" |
189 |
|
if [ -e ../default.tgz ] ;then |
190 |
|
echo "Found ISB in parent directory (Local Condor)" |
191 |
|
cp ../default.tgz $_CONDOR_SCRATCH_DIR |