6 |
|
from crab_logger import Logger |
7 |
|
from crab_exceptions import * |
8 |
|
from ProdCommon.FwkJobRep.ReportParser import readJobReport |
9 |
+ |
from ProdCommon.FwkJobRep.ReportState import checkSuccess |
10 |
|
from ProdCommon.MCPayloads.WorkflowSpec import WorkflowSpec |
11 |
|
from ProdCommon.DataMgmt.DBS.DBSWriter import DBSWriter |
12 |
|
from ProdCommon.DataMgmt.DBS.DBSErrors import DBSWriterError, formatEx,DBSReaderError |
25 |
|
""" |
26 |
|
|
27 |
|
try: |
28 |
< |
userprocessedData = cfg_params['USER.publish_data_name'] |
28 |
> |
self.userprocessedData = cfg_params['USER.publish_data_name'] |
29 |
|
self.processedData = None |
30 |
|
except KeyError: |
31 |
|
raise CrabException('Cannot publish output data, because you did not specify USER.publish_data_name parameter in the crab.cfg file') |
32 |
|
|
33 |
|
try: |
34 |
< |
if (int(cfg_params['USER.copy_data']) != 1): raise KeyError |
34 |
> |
if (int(cfg_params['USER.copy_data']) != 1): |
35 |
> |
raise KeyError |
36 |
|
except KeyError: |
37 |
< |
raise CrabException('You can not publish data because you did not selected *** copy_data = 1 *** in the crab.cfg file') |
37 |
> |
msg = 'You can not publish data because you did not selected \n' |
38 |
> |
msg += '\t*** copy_data = 1 or publish_data = 1 *** in the crab.cfg file' |
39 |
> |
raise CrabException(msg) |
40 |
|
try: |
41 |
|
self.pset = cfg_params['CMSSW.pset'] |
42 |
|
except KeyError: |
75 |
|
dataset=string.strip(dataset) |
76 |
|
self.dataset_to_import.append(dataset) |
77 |
|
### |
78 |
< |
|
78 |
> |
|
79 |
> |
self.skipOcheck=cfg_params.get('CMSSW.pubilish_zero_event',0) |
80 |
> |
|
81 |
|
self.SEName='' |
82 |
|
self.CMSSW_VERSION='' |
83 |
|
self.exit_status='' |
92 |
|
dbsWriter = DBSWriter(self.DBSURL,level='ERROR') |
93 |
|
|
94 |
|
try: |
95 |
< |
dbsWriter.importDatasetWithoutParentage(globalDBS, self.datasetpath, self.DBSURL) |
95 |
> |
#dbsWriter.importDatasetWithoutParentage(globalDBS, self.datasetpath, self.DBSURL) |
96 |
> |
dbsWriter.importDataset(globalDBS, self.datasetpath, self.DBSURL) |
97 |
|
except DBSWriterError, ex: |
98 |
|
msg = "Error importing dataset to be processed into local DBS\n" |
99 |
|
msg += "Source Dataset: %s\n" % datasetpath |
100 |
|
msg += "Source DBS: %s\n" % globalDBS |
101 |
|
msg += "Destination DBS: %s\n" % self.DBSURL |
102 |
|
common.logger.message(msg) |
103 |
+ |
common.logger.write(str(ex)) |
104 |
|
return 1 |
105 |
|
return 0 |
106 |
|
|
141 |
|
datasets=fileinfo.dataset |
142 |
|
common.logger.debug(6,"FileInfo = " + str(fileinfo)) |
143 |
|
common.logger.debug(6,"DatasetInfo = " + str(datasets)) |
144 |
+ |
if len(datasets)<=0: |
145 |
+ |
self.exit_status = '1' |
146 |
+ |
msg = "Error: No info about dataset in the xml file "+file |
147 |
+ |
common.logger.message(msg) |
148 |
+ |
return self.exit_status |
149 |
|
for dataset in datasets: |
150 |
|
#### for production data |
151 |
|
self.processedData = dataset['ProcessedDataset'] |
152 |
|
if (dataset['PrimaryDataset'] == 'null'): |
153 |
< |
dataset['PrimaryDataset'] = dataset['ProcessedDataset'] |
154 |
< |
else: # add parentage from input dataset |
153 |
> |
#dataset['PrimaryDataset'] = dataset['ProcessedDataset'] |
154 |
> |
dataset['PrimaryDataset'] = self.userprocessedData |
155 |
> |
#else: # add parentage from input dataset |
156 |
> |
elif self.datasetpath.upper() != 'NONE': |
157 |
|
dataset['ParentDataset']= self.datasetpath |
158 |
|
|
159 |
|
dataset['PSetContent']=self.content |
200 |
|
elif (file['LFN'] == ''): |
201 |
|
self.noLFN.append(file['PFN']) |
202 |
|
else: |
203 |
< |
if int(file['TotalEvents']) != 0 : |
204 |
< |
#file.lumisections = {} |
205 |
< |
# lumi info are now in run hash |
203 |
> |
if self.skipOcheck==0: |
204 |
> |
if int(file['TotalEvents']) != 0: |
205 |
> |
#file.lumisections = {} |
206 |
> |
# lumi info are now in run hash |
207 |
> |
file.runs = {} |
208 |
> |
for ds in file.dataset: |
209 |
> |
### Fede for production |
210 |
> |
if (ds['PrimaryDataset'] == 'null'): |
211 |
> |
#ds['PrimaryDataset']=procdataset |
212 |
> |
ds['PrimaryDataset']=self.userprocessedData |
213 |
> |
filestopublish.append(file) |
214 |
> |
else: |
215 |
> |
self.noEventsFiles.append(file['LFN']) |
216 |
> |
else: |
217 |
|
file.runs = {} |
218 |
|
for ds in file.dataset: |
193 |
– |
### FEDE FOR NEW LFN ### |
194 |
– |
#ds['ProcessedDataset']=procdataset |
195 |
– |
######################## |
219 |
|
### Fede for production |
220 |
|
if (ds['PrimaryDataset'] == 'null'): |
221 |
< |
ds['PrimaryDataset']=procdataset |
221 |
> |
#ds['PrimaryDataset']=procdataset |
222 |
> |
ds['PrimaryDataset']=self.userprocessedData |
223 |
|
filestopublish.append(file) |
224 |
< |
else: |
201 |
< |
self.noEventsFiles.append(file['LFN']) |
224 |
> |
|
225 |
|
jobReport.files = filestopublish |
226 |
|
### if all files of FJR have number of events = 0 |
227 |
|
if (len(filestopublish) == 0): |
233 |
|
Blocks=None |
234 |
|
try: |
235 |
|
Blocks=dbswriter.insertFiles(jobReport) |
236 |
< |
common.logger.message("Blocks = %s"%Blocks) |
236 |
> |
common.logger.message("Inserting file in blocks = %s"%Blocks) |
237 |
|
except DBSWriterError, ex: |
238 |
|
common.logger.message("Insert file error: %s"%ex) |
239 |
|
return Blocks |
244 |
|
""" |
245 |
|
|
246 |
|
file_list = glob.glob(self.resDir+"crab_fjr*.xml") |
247 |
+ |
## Select only those fjr that are succesfull |
248 |
+ |
good_list=[] |
249 |
+ |
for fjr in file_list: |
250 |
+ |
reports = readJobReport(fjr) |
251 |
+ |
if len(reports)>0: |
252 |
+ |
if reports[0].status == "Success": |
253 |
+ |
good_list.append(fjr) |
254 |
+ |
file_list=good_list |
255 |
+ |
## |
256 |
|
common.logger.debug(6, "file_list = "+str(file_list)) |
257 |
|
common.logger.debug(6, "len(file_list) = "+str(len(file_list))) |
258 |
|
|
267 |
|
|
268 |
|
common.logger.message("--->>> Start files publication") |
269 |
|
for file in file_list: |
270 |
< |
common.logger.message("file = "+file) |
270 |
> |
common.logger.debug(1, "file = "+file) |
271 |
|
Blocks=self.publishAJobReport(file,self.processedData) |
272 |
|
if Blocks: |
273 |
|
for x in Blocks: # do not allow multiple entries of the same block |