6 |
|
from crab_logger import Logger |
7 |
|
from crab_exceptions import * |
8 |
|
from ProdCommon.FwkJobRep.ReportParser import readJobReport |
9 |
+ |
from ProdCommon.FwkJobRep.ReportState import checkSuccess |
10 |
|
from ProdCommon.MCPayloads.WorkflowSpec import WorkflowSpec |
11 |
|
from ProdCommon.DataMgmt.DBS.DBSWriter import DBSWriter |
12 |
|
from ProdCommon.DataMgmt.DBS.DBSErrors import DBSWriterError, formatEx,DBSReaderError |
134 |
|
datasets=fileinfo.dataset |
135 |
|
common.logger.debug(6,"FileInfo = " + str(fileinfo)) |
136 |
|
common.logger.debug(6,"DatasetInfo = " + str(datasets)) |
137 |
+ |
if len(datasets)<=0: |
138 |
+ |
self.exit_status = '1' |
139 |
+ |
msg = "Error: No info about dataset in the xml file "+file |
140 |
+ |
common.logger.message(msg) |
141 |
+ |
return self.exit_status |
142 |
|
for dataset in datasets: |
143 |
|
#### for production data |
144 |
|
self.processedData = dataset['ProcessedDataset'] |
192 |
|
self.noLFN.append(file['PFN']) |
193 |
|
else: |
194 |
|
if int(file['TotalEvents']) != 0 : |
195 |
< |
file.lumisections = {} |
195 |
> |
#file.lumisections = {} |
196 |
> |
# lumi info are now in run hash |
197 |
> |
file.runs = {} |
198 |
|
for ds in file.dataset: |
199 |
|
### FEDE FOR NEW LFN ### |
200 |
|
#ds['ProcessedDataset']=procdataset |
216 |
|
Blocks=None |
217 |
|
try: |
218 |
|
Blocks=dbswriter.insertFiles(jobReport) |
219 |
< |
common.logger.message("Blocks = %s"%Blocks) |
219 |
> |
common.logger.message("Inserting file in blocks = %s"%Blocks) |
220 |
|
except DBSWriterError, ex: |
221 |
< |
common.logger.message("Insert file error: %s"%ex) |
221 |
> |
common.logger.error("Insert file error: %s"%ex) |
222 |
|
return Blocks |
223 |
|
|
224 |
|
def run(self): |
227 |
|
""" |
228 |
|
|
229 |
|
file_list = glob.glob(self.resDir+"crab_fjr*.xml") |
230 |
+ |
## Select only those fjr that are succesfull |
231 |
+ |
good_list=[] |
232 |
+ |
for fjr in file_list: |
233 |
+ |
reports = readJobReport(fjr) |
234 |
+ |
if reports[0].status == "Success": |
235 |
+ |
good_list.append(fjr) |
236 |
+ |
file_list=good_list |
237 |
+ |
## |
238 |
|
common.logger.debug(6, "file_list = "+str(file_list)) |
239 |
|
common.logger.debug(6, "len(file_list) = "+str(len(file_list))) |
240 |
|
|
249 |
|
|
250 |
|
common.logger.message("--->>> Start files publication") |
251 |
|
for file in file_list: |
252 |
< |
common.logger.message("file = "+file) |
252 |
> |
common.logger.debug(1, "file = "+file) |
253 |
|
Blocks=self.publishAJobReport(file,self.processedData) |
254 |
|
if Blocks: |
255 |
|
for x in Blocks: # do not allow multiple entries of the same block |