ViewVC Help
View File | Revision Log | Show Annotations | Root Listing
root/cvsroot/COMP/CRAB/python/full_crab.cfg
Revision: 1.1
Committed: Thu Feb 21 16:33:37 2008 UTC (17 years, 2 months ago) by spiga
Branch: MAIN
CVS Tags: CRAB_2_1_2, CRAB_2_1_2_pre2, CRAB_2_1_2_pre1, CRAB_2_1_1, CRAB_2_1_1_pre3, CRAB_2_2_0_pre1, CRAB_2_1_1_pre1, CRAB_2_1_0, CRAB_2_1_0_pre6
Branch point for: CRAB_2_1_2_br, CRAB_2_1_1_pre2
Log Message:
complete version of crab configuration file

File Contents

# User Rev Content
1 spiga 1.1 [CRAB]
2     #
3     # This section contains the default values for general parameters.
4     # They can be set also as a command-line option, i.e.
5     #
6     # key1 = value1
7     # [SECTION]
8     # key2 = value2
9     #
10     # in this section corresponds to
11     #
12     # crab.py -key1=value1 -key2=SECTION.value2
13     #
14     # in the command line.
15     #
16     jobtype = cmssw
17    
18     ### Specify the scheduler to be used.
19     ### Supported schedulers are : [ edg, glite, condor_g]
20     scheduler = glitecoll
21    
22     ### To configure CRAB as client set server_mode = 1
23     ### CRAB will submit jobs to the server, which will submit and manage for the users
24     server_mode = 0
25     ### Specify here the name of the server to use
26     server_name = crabas.lnl.infn.it/data1/cms/
27    
28     [CMSSW]
29    
30     ### The data you want to access (to be found on DBS)
31     ### /primarydataset/datatier/processeddataset
32     datasetpath=/ttbar_inclusive_TopRex/CMSSW_1_3_1-Spring07-1122/GEN-SIM-DIGI-RECO
33     ### To run CRAB for private events production set datasetPath= None
34    
35     ### To select a single (list of) run within a single processed dataset define run number (list)
36     ### selection can be a comma-separated list of run numbers and run number ranges: 1,2,3-4
37     #runselection=1,2,3-4
38    
39     ### To use a local DBS istance specify the related URL here.
40     # dbs_url = http://cmsdoc.cern.ch/cms/test/aprom/DBS/CGIServer/prodquery
41    
42     ### The name of ParameterSet to be used
43     pset=runSimHitAnalyzer.cfg
44    
45     ### Splitting parameters:
46     ### Total number of events to be accessed: -1 means all
47     ### NOTE: "-1" is not usable if no input
48     total_number_of_events=-1
49    
50     ### Number of events to be processed per job
51     #events_per_job = 1000
52    
53     ### Number of jobs to be created per task
54     number_of_jobs = 1
55    
56     ### The output files produced by your application (comma separated list)
57     output_file = Histos.root
58    
59     [USER]
60    
61     ### If CRAB.server_mode = 1
62     ### To set Your e-mail address to be used by the server to notify you
63     #eMail = your_email_address
64    
65     ### To specify the percentage of finished job in a task, corresponding
66     ### to when the notification email will be sent to you by the server default is 100%
67     #thresholdLevel = 100
68    
69     ### To specify additional files to be put in InputSandBox
70     ### write the full path if the files are not in the current directory
71     ### (wildcard * are allowed): comma separated list
72     #additional_input_files = file1, file2, /full/path/file3
73    
74     ### To use a specific name of UI directory where CRAB will create job to submit (with full path).
75     ### the default directory will be "crab_0_data_time"
76     #ui_working_dir = /full/path/Name_of_Directory
77    
78    
79     ### OUTPUT file management ###
80     ### To have back the job executable output into UI set return_data= 1
81     return_data = 1
82    
83     ### If return_data = 1 ###
84     ### To specify the UI directory where to store the CMS executable output
85     ### FULL path is mandatory. Default is <ui_working_dir>/res will be used.
86     #outputdir= /full/path/yourOutDir
87    
88     ### If return_data = 1 ###
89     ### To specify the UI directory where to store the stderr, stdout and .BrokerInfo of submitted jobs
90     ### FULL path is mandatory. Default is <ui_working_dir>/res will be used.
91     #logdir= /full/path/yourLogDir
92    
93     ### To copy the CMS executable output into a SE (i:e castor) set copy_data = 1
94     copy_data = 0
95    
96     ### if copy_data = 1 ###
97     ### Specify the name of the SE where to copy the CMS executable output.
98     #storage_element = srm.cern.ch
99     ### Specify the SE directory (or the mountpoint) that has to be writable from all
100     #storage_path = /castor/cern.ch/user/u/user
101     ### example for LNL SRM
102     #storage_element = t2-srm-02.lnl.infn.it
103     #storage_path = /srm/managerv1?SFN=/pnfs/lnl.infn.it/data/cms/store/user
104    
105     ### To specify the version of srm client to use.
106     #srm_version = 1
107    
108     ### To publish produced output in a local istance of DBS set publish_data = 1
109     publish_data=0
110     ### Specify the dataset name. The full path will be <primarydataset>/<publish_data_name>/USER
111     #publish_data_name = yourDataName
112     ### Specify the URL of DBS istance where CRAB has to publish the output files
113     #dbs_url_for_publication = http://cmssrv17.fnal.gov:8989/DBS108LOC1/servlet/DBSServlet
114    
115    
116     ### BOSS parameters
117     ### To use central BOSS DB instead of one for each task: the DB must be already been setup!
118     use_central_bossDB = 0
119     ### To Use Boss RealTime monitoring set use_boss_rt = 1
120     use_boss_rt = 1
121    
122     ### To use a different set of BOSS config files specify the location here
123     boss_clads=
124    
125     [EDG]
126    
127     ### To change the CMS-broker RB/WMS to be used. The ones available for CMS
128     ### are "CERN" and "CNAF": the configuration
129     ### files needed to change the broker will be automatically downloaded from CRAB web page.
130     ### If the files are already present on the working directory they will be used.
131     #rb = CNAF
132    
133     ### CMS myproxy server, to proxy delegation
134     proxy_server = myproxy.cern.ch
135    
136     ### To specify VOMS role and/or group
137     #role = superman
138     #group = superheros
139    
140     ### To skip the CRAB check of your proxy
141     #dont_check_proxy = 1
142    
143     ### To add other requirements to jdl file, as example the Operating System
144     #requirements = (other.GlueHostOperatingSystemName == "RedHat")
145    
146     ### To add other parameters to jdl file: semicolon separated list;
147     #additional_jdl_parameters = AllowZippedISB = false
148    
149     ### To specify a cpu time and wall_clock_time(=real time) in minutes
150     #max_cpu_time = 60
151     #max_wall_clock_time = 60
152    
153     ### To manage White/Black lists: For discovery, please use http://cmslcgco01.cern.ch:8001/
154     ### Use the dns domain (eg fnal, cern, ifae, fzk, cnaf, lnl,....)
155     ### All the storage/computing elements (SE/CE) contained in the strings (comma separated list)
156     ### will/will not be considered for submission.
157     ### SE Black List:
158     #se_black_list = infn
159     ### SE White List
160     #se_white_list = infn
161    
162     ### CE Black List:
163     #ce_black_list = infn
164     ### CE White List:
165     #ce_white_list = infn
166    
167     ## fields written into jdl
168     virtual_organization = cms
169    
170     ## number or retry count
171     retry_count = 2
172    
173     [CONDORG]
174    
175     # Set this to condor to override the batchsystem defined in gridcat.
176     #batchsystem = condor
177    
178     # Specify addition condor_g requirments
179     # use this requirment to run on a cms dedicated hardare
180     # globus_rsl = (condor_submit=(requirements 'ClusterName == \"CMS\" && (Arch == \"INTEL\" || Arch == \"X86_64\")'))
181     # use this requirement to run on the new hardware
182     #globus_rsl = (condor_submit=(requirements 'regexp(\"cms-*\",Machine)'))
183