ViewVC Help
View File | Revision Log | Show Annotations | Root Listing
root/cvsroot/COMP/CRAB/python/full_crab.cfg
Revision: 1.3
Committed: Tue Apr 15 16:54:12 2008 UTC (17 years ago) by spiga
Branch: MAIN
CVS Tags: CRAB_2_2_0, CRAB_2_2_0_pre21, CRAB_2_2_0_pre19, CRAB_2_2_0_pre18, CRAB_2_2_0_pre17, CRAB_2_2_0_pre16, CRAB_2_2_0_pre15, CRAB_2_2_0_pre13, CRAB_2_2_0_pre12, CRAB_2_2_0_pre11, CRAB_2_2_0_pre10, bp_osg_bdii, CRAB_2_2_0_pre9, CRAB_2_2_0_pre8, CRAB_2_2_0_pre7
Branch point for: osg_bdii
Changes since 1.2: +3 -0 lines
Log Message:
update for xml_report parameter

File Contents

# User Rev Content
1 spiga 1.1 [CRAB]
2     #
3     # This section contains the default values for general parameters.
4     # They can be set also as a command-line option, i.e.
5     #
6     # key1 = value1
7     # [SECTION]
8     # key2 = value2
9     #
10     # in this section corresponds to
11     #
12     # crab.py -key1=value1 -key2=SECTION.value2
13     #
14     # in the command line.
15     #
16     jobtype = cmssw
17    
18     ### Specify the scheduler to be used.
19     ### Supported schedulers are : [ edg, glite, condor_g]
20     scheduler = glitecoll
21    
22 spiga 1.2 ### To configure CRAB as client set the name of the server
23     ### (e.g. server_pi, server_lnl etc etc )
24 spiga 1.1 ### CRAB will submit jobs to the server, which will submit and manage for the users
25 spiga 1.2 #server_name = server_lxb
26 spiga 1.1
27     [CMSSW]
28    
29     ### The data you want to access (to be found on DBS)
30     ### /primarydataset/datatier/processeddataset
31     datasetpath=/ttbar_inclusive_TopRex/CMSSW_1_3_1-Spring07-1122/GEN-SIM-DIGI-RECO
32     ### To run CRAB for private events production set datasetPath= None
33    
34     ### To select a single (list of) run within a single processed dataset define run number (list)
35     ### selection can be a comma-separated list of run numbers and run number ranges: 1,2,3-4
36     #runselection=1,2,3-4
37    
38     ### To use a local DBS istance specify the related URL here.
39     # dbs_url = http://cmsdoc.cern.ch/cms/test/aprom/DBS/CGIServer/prodquery
40    
41     ### The name of ParameterSet to be used
42     pset=runSimHitAnalyzer.cfg
43    
44     ### Splitting parameters:
45     ### Total number of events to be accessed: -1 means all
46     ### NOTE: "-1" is not usable if no input
47     total_number_of_events=-1
48    
49     ### Number of events to be processed per job
50     #events_per_job = 1000
51    
52     ### Number of jobs to be created per task
53     number_of_jobs = 1
54    
55     ### The output files produced by your application (comma separated list)
56     output_file = Histos.root
57    
58     [USER]
59    
60     ### If CRAB.server_mode = 1
61     ### To set Your e-mail address to be used by the server to notify you
62     #eMail = your_email_address
63    
64     ### To specify the percentage of finished job in a task, corresponding
65     ### to when the notification email will be sent to you by the server default is 100%
66     #thresholdLevel = 100
67    
68     ### To specify additional files to be put in InputSandBox
69     ### write the full path if the files are not in the current directory
70     ### (wildcard * are allowed): comma separated list
71     #additional_input_files = file1, file2, /full/path/file3
72    
73     ### To use a specific name of UI directory where CRAB will create job to submit (with full path).
74     ### the default directory will be "crab_0_data_time"
75     #ui_working_dir = /full/path/Name_of_Directory
76    
77    
78     ### OUTPUT file management ###
79     ### To have back the job executable output into UI set return_data= 1
80     return_data = 1
81    
82     ### If return_data = 1 ###
83     ### To specify the UI directory where to store the CMS executable output
84     ### FULL path is mandatory. Default is <ui_working_dir>/res will be used.
85     #outputdir= /full/path/yourOutDir
86    
87     ### If return_data = 1 ###
88     ### To specify the UI directory where to store the stderr, stdout and .BrokerInfo of submitted jobs
89     ### FULL path is mandatory. Default is <ui_working_dir>/res will be used.
90     #logdir= /full/path/yourLogDir
91    
92     ### To copy the CMS executable output into a SE (i:e castor) set copy_data = 1
93     copy_data = 0
94    
95     ### if copy_data = 1 ###
96     ### Specify the name of the SE where to copy the CMS executable output.
97     #storage_element = srm.cern.ch
98     ### Specify the SE directory (or the mountpoint) that has to be writable from all
99     #storage_path = /castor/cern.ch/user/u/user
100     ### example for LNL SRM
101     #storage_element = t2-srm-02.lnl.infn.it
102     #storage_path = /srm/managerv1?SFN=/pnfs/lnl.infn.it/data/cms/store/user
103    
104     ### To specify the version of srm client to use.
105     #srm_version = 1
106    
107     ### To publish produced output in a local istance of DBS set publish_data = 1
108     publish_data=0
109     ### Specify the dataset name. The full path will be <primarydataset>/<publish_data_name>/USER
110     #publish_data_name = yourDataName
111     ### Specify the URL of DBS istance where CRAB has to publish the output files
112     #dbs_url_for_publication = http://cmssrv17.fnal.gov:8989/DBS108LOC1/servlet/DBSServlet
113    
114 spiga 1.3 ### To switch from status print on screen to DB serialization to a file specify here the destination files.
115     ### CRAB will create it on CRAB_Working_Dir/share
116     #xml_report=
117 spiga 1.1
118     [EDG]
119    
120     ### To change the CMS-broker RB/WMS to be used. The ones available for CMS
121     ### are "CERN" and "CNAF": the configuration
122     ### files needed to change the broker will be automatically downloaded from CRAB web page.
123     ### If the files are already present on the working directory they will be used.
124     #rb = CNAF
125    
126     ### CMS myproxy server, to proxy delegation
127     proxy_server = myproxy.cern.ch
128    
129     ### To specify VOMS role and/or group
130     #role = superman
131     #group = superheros
132    
133     ### To skip the CRAB check of your proxy
134     #dont_check_proxy = 1
135    
136     ### To add other requirements to jdl file, as example the Operating System
137     #requirements = (other.GlueHostOperatingSystemName == "RedHat")
138    
139     ### To add other parameters to jdl file: semicolon separated list;
140     #additional_jdl_parameters = AllowZippedISB = false
141    
142 spiga 1.2 ### To use a specific WMS end point put here the right name:
143     #wms_service=
144    
145 spiga 1.1 ### To specify a cpu time and wall_clock_time(=real time) in minutes
146     #max_cpu_time = 60
147     #max_wall_clock_time = 60
148    
149     ### To manage White/Black lists: For discovery, please use http://cmslcgco01.cern.ch:8001/
150     ### Use the dns domain (eg fnal, cern, ifae, fzk, cnaf, lnl,....)
151     ### All the storage/computing elements (SE/CE) contained in the strings (comma separated list)
152     ### will/will not be considered for submission.
153     ### SE Black List:
154     #se_black_list = infn
155     ### SE White List
156     #se_white_list = infn
157    
158     ### CE Black List:
159     #ce_black_list = infn
160     ### CE White List:
161     #ce_white_list = infn
162    
163     ## fields written into jdl
164     virtual_organization = cms
165    
166     ## number or retry count
167     retry_count = 2
168    
169     [CONDORG]
170    
171     # Set this to condor to override the batchsystem defined in gridcat.
172     #batchsystem = condor
173    
174     # Specify addition condor_g requirments
175     # use this requirment to run on a cms dedicated hardare
176     # globus_rsl = (condor_submit=(requirements 'ClusterName == \"CMS\" && (Arch == \"INTEL\" || Arch == \"X86_64\")'))
177     # use this requirement to run on the new hardware
178     #globus_rsl = (condor_submit=(requirements 'regexp(\"cms-*\",Machine)'))
179