10 |
|
|
11 |
|
import common |
12 |
|
import os |
13 |
< |
|
14 |
< |
# Naming convention: Methods starting with 'ws' provide the corresponding part of the job script |
15 |
< |
# ('ws' stands for 'write script'). |
13 |
> |
import socket |
14 |
> |
import sha |
15 |
|
|
16 |
|
class SchedulerCondor(SchedulerLocal) : |
17 |
|
""" |
18 |
|
Class to implement the vanilla (local) Condor scheduler |
19 |
+ |
Naming convention: Methods starting with 'ws' provide |
20 |
+ |
the corresponding part of the job script |
21 |
+ |
('ws' stands for 'write script'). |
22 |
|
""" |
23 |
|
|
24 |
|
def __init__(self): |
25 |
|
SchedulerLocal.__init__(self,"CONDOR") |
26 |
|
self.datasetPath = None |
27 |
|
self.selectNoInput = None |
28 |
+ |
self.return_data = 0 |
29 |
+ |
self.copy_data = 0 |
30 |
+ |
|
31 |
|
self.environment_unique_identifier = None |
32 |
|
return |
33 |
|
|
38 |
|
""" |
39 |
|
|
40 |
|
SchedulerLocal.configure(self, cfg_params) |
36 |
– |
self.environment_unique_identifier ='${HOSTNAME}_${CONDOR_ID}_' + common._db.queryTask('name') |
41 |
|
|
42 |
|
try: |
43 |
|
tmp = cfg_params['CMSSW.datasetpath'] |
51 |
|
msg = "Error: datasetpath not defined " |
52 |
|
raise CrabException(msg) |
53 |
|
|
54 |
+ |
self.return_data = cfg_params.get('USER.return_data', 0) |
55 |
+ |
self.copy_data = cfg_params.get("USER.copy_data", 0) |
56 |
+ |
|
57 |
+ |
if int(self.copy_data) == 1: |
58 |
+ |
|
59 |
+ |
self.proxyValid = 0 |
60 |
+ |
self.dontCheckProxy = int(cfg_params.get("EDG.dont_check_proxy", 0)) |
61 |
+ |
self.proxyServer = cfg_params.get("EDG.proxy_server", 'myproxy.cern.ch') |
62 |
+ |
common.logger.debug(5,'Setting myproxy server to ' + self.proxyServer) |
63 |
+ |
|
64 |
+ |
self.group = cfg_params.get("EDG.group", None) |
65 |
+ |
self.role = cfg_params.get("EDG.role", None) |
66 |
+ |
self.VO = cfg_params.get('EDG.virtual_organization', 'cms') |
67 |
+ |
|
68 |
+ |
self.checkProxy() |
69 |
+ |
|
70 |
+ |
self.role = None |
71 |
+ |
|
72 |
|
return |
73 |
|
|
74 |
+ |
def envUniqueID(self): |
75 |
+ |
taskHash = sha.new(common._db.queryTask('name')).hexdigest() |
76 |
+ |
id = "https://" + socket.gethostname() + '/' + taskHash + "/${NJob}" |
77 |
+ |
return id |
78 |
|
|
79 |
|
def sched_parameter(self, i, task): |
80 |
|
""" |
96 |
|
""" |
97 |
|
|
98 |
|
tmpDir = os.path.join(common.work_space.shareDir(),'.condor_temp') |
99 |
< |
params = {'tmpDir':tmpDir} |
99 |
> |
tmpDir = os.path.join(common.work_space.shareDir(),'.condor_temp') |
100 |
> |
jobDir = common.work_space.jobDir() |
101 |
> |
params = {'tmpDir':tmpDir, |
102 |
> |
'jobDir':jobDir} |
103 |
|
return params |
104 |
|
|
105 |
|
|
108 |
|
Check the compatibility of available resources |
109 |
|
""" |
110 |
|
|
111 |
< |
if self.selectNoInput: |
83 |
< |
return [True] |
84 |
< |
else: |
85 |
< |
return SchedulerLocal.listMatch(self, seList, full) |
111 |
> |
return [True] |
112 |
|
|
113 |
|
|
114 |
|
def decodeLogInfo(self, fileName): |
122 |
|
return reason |
123 |
|
|
124 |
|
|
125 |
+ |
def wsCopyOutput(self): |
126 |
+ |
""" |
127 |
+ |
Write a CopyResults part of a job script, e.g. |
128 |
+ |
to copy produced output into a storage element. |
129 |
+ |
""" |
130 |
+ |
txt = self.wsCopyOutput_comm() |
131 |
+ |
return txt |
132 |
+ |
|
133 |
+ |
|
134 |
|
def wsExitFunc(self): |
135 |
|
""" |
136 |
|
Returns the part of the job script which runs prior to exit |
163 |
|
txt += 'printenv | sort\n' |
164 |
|
|
165 |
|
txt += 'middleware='+self.name()+' \n' |
166 |
+ |
txt += 'if [ -e /opt/d-cache/srm/bin ]; then\n' |
167 |
+ |
txt += ' export PATH=${PATH}:/opt/d-cache/srm/bin\n' |
168 |
+ |
txt += 'fi\n' |
169 |
+ |
|
170 |
|
txt += """ |
171 |
|
if [ $_CONDOR_SCRATCH_DIR ] && [ -d $_CONDOR_SCRATCH_DIR ]; then |
172 |
|
echo "cd to Condor scratch directory: $_CONDOR_SCRATCH_DIR" |