ViewVC Help
View File | Revision Log | Show Annotations | Root Listing
root/cvsroot/COMP/CRAB/python/SchedulerCondor.py
(Generate patch)

Comparing COMP/CRAB/python/SchedulerCondor.py (file contents):
Revision 1.8.2.2 by fanzago, Tue Jun 24 15:20:56 2008 UTC vs.
Revision 1.18 by fanzago, Mon Feb 16 23:24:34 2009 UTC

# Line 1 | Line 1
1 + """
2 + Implements the vanilla (local) Condor scheduler
3 + """
4 +
5   __revision__ = "$Id$"
6   __version__ = "$Revision$"
7  
8 < from Scheduler import Scheduler
9 < from SchedulerLocal import SchedulerLocal
6 < from crab_exceptions import *
7 < from crab_util import *
8 < from crab_logger import Logger
9 < import common
8 > from SchedulerLocal  import SchedulerLocal
9 > from crab_exceptions import CrabException
10  
11 + import common
12   import os
13 <
14 < #  Naming convention:
14 < #  methods starting with 'ws' are responsible to provide
15 < #  corresponding part of the job script ('ws' stands for 'write script').
13 > import socket
14 > import sha
15  
16   class SchedulerCondor(SchedulerLocal) :
18
19  def __init__(self):
20    Scheduler.__init__(self,"CONDOR")
21    return
22
23
24  def configure(self, cfg_params):
25    SchedulerLocal.configure(self, cfg_params)
26    self.environment_unique_identifier ='${HOSTNAME}_${CONDOR_ID}_' + common._db.queryTask('name')
27
28    try:
29      tmp =  cfg_params['CMSSW.datasetpath']
30      if string.lower(tmp)=='none':
31        self.datasetPath = None
32        self.selectNoInput = 1
33      else:
34        self.datasetPath = tmp
35        self.selectNoInput = 0
36    except KeyError:
37      msg = "Error: datasetpath not defined "
38      raise CrabException(msg)
39
40    return
41
42
43  def sched_parameter(self,i,task):
17      """
18 <    Return scheduler-specific parameters
19 <    """
20 <    index = int(common._db.nJobs()) - 1
21 <    sched_param= ''
22 <
23 <    for i in range(index):
24 <      pass
25 <
26 <    return sched_param
27 <
28 <
29 <  def realSchedParams(self,cfg_params):
30 <    """
31 <    Return dictionary with specific parameters, to use
32 <    with real scheduler
33 <    """
34 <
35 <    tmpDir = os.path.join(common.work_space.shareDir(),'.condor_temp')
36 <    params = {'tmpDir':tmpDir}
37 <    return  params
38 <
39 <
40 <  def listMatch(self, seList, full, onlyOSG=True):
41 <    """
42 <    Check the compatibility of available resources
43 <    """
44 <
45 <    # May have problems with onlyOSG being false, probably due to lengths of lists and command line.
46 <    # Either re-write osg_bdii.py with a proper ldap library or break the queries apart
47 <
48 <    if self.selectNoInput:
49 <      return [True]
50 <    else:
51 <      return SchedulerLocal.listMatch(self, seList, full)
52 <
53 <  def decodeLogInfo(self, file):
54 <    """
55 <    Parse logging info file and return main info
56 <    """
57 <    import CondorGLoggingInfo
58 <    loggingInfo = CondorGLoggingInfo.CondorGLoggingInfo()
59 <    reason = loggingInfo.decodeReason(file)
60 <    return reason
61 <
62 <
63 <  def wsExitFunc(self):
64 <    """
65 <    """
66 <    txt = '\n'
67 <    txt += '#\n'
68 <    txt += '# EXECUTE THIS FUNCTION BEFORE EXIT \n'
69 <    txt += '#\n\n'
70 <
71 <    txt += 'func_exit() { \n'
72 <    txt += self.wsExitFunc_common()
73 <
74 <    txt += '    tar zcvf ${out_files}.tgz  ${final_list}\n'
75 <    txt += '    cp  ${out_files}.tgz $ORIG_WD/\n'
76 <    txt += '    cp  crab_fjr_$NJob.xml $ORIG_WD/\n'
77 <
78 <    txt += '    exit $job_exit_code\n'
79 <    txt += '}\n'
80 <
81 <    return txt
82 <
83 <  def wsInitialEnvironment(self):
84 <    """
85 <    Returns part of a job script which does scheduler-specific work.
86 <    """
87 <
88 <    txt  = '\n# Written by SchedulerCondor::wsInitialEnvironment\n'
89 <    txt += 'echo "Beginning environment"\n'
90 <    txt += 'printenv | sort\n'
18 >    Class to implement the vanilla (local) Condor scheduler
19 >     Naming convention:  Methods starting with 'ws' provide
20 >     the corresponding part of the job script
21 >     ('ws' stands for 'write script').
22 >    """
23 >
24 >    def __init__(self):
25 >        SchedulerLocal.__init__(self,"CONDOR")
26 >        self.datasetPath   = None
27 >        self.selectNoInput = None
28 >        self.environment_unique_identifier = None
29 >        return
30 >
31 >
32 >    def configure(self, cfg_params):
33 >        """
34 >        Configure the scheduler with the config settings from the user
35 >        """
36 >
37 >        SchedulerLocal.configure(self, cfg_params)
38 >        taskHash = sha.new(common._db.queryTask('name')).hexdigest()
39 >        self.environment_unique_identifier = "https://" + socket.gethostname() + \
40 >                                              '/' + taskHash + "/${NJob}"
41 >
42 >        try:
43 >            tmp =  cfg_params['CMSSW.datasetpath']
44 >            if tmp.lower() == 'none':
45 >                self.datasetPath = None
46 >                self.selectNoInput = 1
47 >            else:
48 >                self.datasetPath = tmp
49 >                self.selectNoInput = 0
50 >        except KeyError:
51 >            msg = "Error: datasetpath not defined "
52 >            raise CrabException(msg)
53 >
54 >        self.return_data = cfg_params.get('USER.return_data', 0)
55 >        self.copy_data   = cfg_params.get("USER.copy_data", 0)
56 >        self.backup_copy = cfg_params.get('USER.backup_copy',0)
57 >
58 >        if ( int(self.return_data) == 0 and int(self.copy_data) == 0 ):
59 >            msg = 'Error: return_data and copy_data cannot be set both to 0\n'
60 >            msg = msg + 'Please modify your crab.cfg file\n'
61 >            raise CrabException(msg)
62 >
63 >        if ( int(self.return_data) == 1 and int(self.copy_data) == 1 ):
64 >            msg = 'Error: return_data and copy_data cannot be set both to 1\n'
65 >            msg = msg + 'Please modify your crab.cfg file\n'
66 >            raise CrabException(msg)
67 >
68 >        if ( int(self.copy_data) == 0 and int(self.publish_data) == 1 ):
69 >            msg = 'Warning: publish_data = 1 must be used with copy_data = 1\n'
70 >            msg = msg + 'Please modify copy_data value in your crab.cfg file\n'
71 >            common.logger.message(msg)
72 >            raise CrabException(msg)
73 >
74 >        if ( int(self.copy_data) == 0 and int(self.backup_copy) == 1 ):
75 >            msg = 'Error: copy_data = 0 and backup_data = 1 ==> to use the backup_copy function, the copy_data value has to be = 1\n'
76 >            msg = msg + 'Please modify copy_data value in your crab.cfg file\n'
77 >            raise CrabException(msg)
78 >
79 >        if int(self.copy_data) == 1:
80 >            self.SE = cfg_params.get('USER.storage_element', None)
81 >            if not self.SE:
82 >                msg = "Error. The [USER] section has no 'storage_element'"
83 >                common.logger.message(msg)
84 >                raise CrabException(msg)
85 >                
86 >        if ( int(self.backup_copy) == 1 and int(self.publish_data) == 1 ):
87 >            msg = 'Warning: currently the publication is not supported with the backup copy. Work in progress....\n'
88 >            common.logger.message(msg)
89 >            raise CrabException(msg)
90 >
91 >            self.proxyValid = 0
92 >            self.dontCheckProxy = int(cfg_params.get("EDG.dont_check_proxy",0))
93 >            self.proxyServer = cfg_params.get("EDG.proxy_server",'myproxy.cern.ch')
94 >            common.logger.debug(5,'Setting myproxy server to ' + self.proxyServer)
95 >
96 >            self.group = cfg_params.get("EDG.group", None)
97 >            self.role  = cfg_params.get("EDG.role", None)
98 >            self.VO    = cfg_params.get('EDG.virtual_organization', 'cms')
99 >
100 >            self.checkProxy()
101 >        self.role  = None
102 >
103 >        return
104 >
105 >
106 >    def sched_parameter(self, i, task):
107 >        """
108 >        Return scheduler-specific parameters
109 >        """
110 >
111 >        index = int(common._db.nJobs()) - 1
112 >        schedParam = ''
113 >
114 >        for i in range(index):
115 >            pass
116 >
117 >        return schedParam
118 >
119 >
120 >    def realSchedParams(self, cfg_params):
121 >        """
122 >        Return dictionary with specific parameters, to use with real scheduler
123 >        """
124 >
125 >        tmpDir = os.path.join(common.work_space.shareDir(),'.condor_temp')
126 >        tmpDir = os.path.join(common.work_space.shareDir(),'.condor_temp')
127 >        jobDir = common.work_space.jobDir()
128 >        params = {'tmpDir':tmpDir,
129 >                  'jobDir':jobDir}
130 >        return params
131 >
132 >
133 >    def listMatch(self, seList, full):
134 >        """
135 >        Check the compatibility of available resources
136 >        """
137 >
138 >        return [True]
139 >
140 >
141 >    def decodeLogInfo(self, fileName):
142 >        """
143 >        Parse logging info file and return main info
144 >        """
145 >
146 >        import CondorGLoggingInfo
147 >        loggingInfo = CondorGLoggingInfo.CondorGLoggingInfo()
148 >        reason = loggingInfo.decodeReason(fileName)
149 >        return reason
150 >
151 >
152 >    def wsCopyOutput(self):
153 >        """
154 >        Write a CopyResults part of a job script, e.g.
155 >        to copy produced output into a storage element.
156 >        """
157 >        txt = self.wsCopyOutput_comm()
158 >        return txt
159 >
160 >
161 >    def wsExitFunc(self):
162 >        """
163 >        Returns the part of the job script which runs prior to exit
164 >        """
165 >
166 >        txt = '\n'
167 >        txt += '#\n'
168 >        txt += '# EXECUTE THIS FUNCTION BEFORE EXIT \n'
169 >        txt += '#\n\n'
170 >
171 >        txt += 'func_exit() { \n'
172 >        txt += self.wsExitFunc_common()
173 >
174 >        txt += '    tar zcvf ${out_files}.tgz  ${final_list}\n'
175 >        txt += '    cp  ${out_files}.tgz $_CONDOR_SCRATCH_DIR/\n'
176 >        txt += '    cp  crab_fjr_$NJob.xml $_CONDOR_SCRATCH_DIR/\n'
177 >
178 >        txt += '    exit $job_exit_code\n'
179 >        txt += '}\n'
180 >
181 >        return txt
182 >
183 >    def wsInitialEnvironment(self):
184 >        """
185 >        Returns part of a job script which does scheduler-specific work.
186 >        """
187 >
188 >        txt  = '\n# Written by SchedulerCondor::wsInitialEnvironment\n'
189 >        txt += 'echo "Beginning environment"\n'
190 >        txt += 'printenv | sort\n'
191 >
192 >        txt += 'middleware='+self.name()+' \n'
193 >        txt += 'if [ -e /opt/d-cache/srm/bin ]; then\n'
194 >        txt += '  export PATH=${PATH}:/opt/d-cache/srm/bin\n'
195 >        txt += 'fi\n'
196  
197 <    txt += 'middleware='+self.name()+' \n'
120 <    txt += """
197 >        txt += """
198   if [ $_CONDOR_SCRATCH_DIR ] && [ -d $_CONDOR_SCRATCH_DIR ]; then
199 <    ORIG_WD=`pwd`
123 <    echo "Change from $ORIG_WD to Condor scratch directory: $_CONDOR_SCRATCH_DIR"
199 >    echo "cd to Condor scratch directory: $_CONDOR_SCRATCH_DIR"
200      if [ -e ../default.tgz ] ;then
201        echo "Found ISB in parent directory (Local Condor)"
202        cp ../default.tgz $_CONDOR_SCRATCH_DIR
# Line 129 | Line 205 | if [ $_CONDOR_SCRATCH_DIR ] && [ -d $_CO
205   fi
206   """
207  
208 <    return txt
208 >        return txt

Diff Legend

Removed lines
+ Added lines
< Changed lines
> Changed lines