ViewVC Help
View File | Revision Log | Show Annotations | Root Listing
root/cvsroot/COMP/CRAB/python/Splitter.py
(Generate patch)

Comparing COMP/CRAB/python/Splitter.py (file contents):
Revision 1.29 by ewv, Thu Oct 1 22:00:40 2009 UTC vs.
Revision 1.46 by spiga, Mon Sep 27 08:56:08 2010 UTC

# Line 3 | Line 3 | __revision__ = "$Id$"
3   __version__ = "$Revision$"
4  
5   import common
6 from sets import Set
6   from crab_exceptions import *
7   from crab_util import *
8  
# Line 14 | Line 13 | from WMCore.DataStructs.Subscription imp
13   from WMCore.DataStructs.Workflow import Workflow
14   from WMCore.JobSplitting.SplitterFactory import SplitterFactory
15   from WMCore.SiteScreening.BlackWhiteListParser import SEBlackWhiteListParser
16 + try: # Can remove when CMSSW 3.7 and earlier are dropped
17 +    from FWCore.PythonUtilities.LumiList import LumiList
18 + except ImportError:
19 +    from LumiList import LumiList
20  
21   class JobSplitter:
22      def __init__( self, cfg_params,  args ):
# Line 33 | Line 36 | class JobSplitter:
36          seBlackList = cfg_params.get('GRID.se_black_list',[])
37          self.blackWhiteListParser = SEBlackWhiteListParser(self.seWhiteList, seBlackList, common.logger())
38  
39 +        ## check if has been asked for a non default file to store/read analyzed fileBlocks
40 +        defaultName = common.work_space.shareDir()+'AnalyzedBlocks.txt'
41 +        self.fileBlocks_FileName = os.path.abspath(self.cfg_params.get('CMSSW.fileblocks_file',defaultName))
42 +
43  
44      def checkUserSettings(self):
45          ## Events per job
# Line 62 | Line 69 | class JobSplitter:
69              self.total_number_of_events = 0
70              self.selectTotalNumberEvents = 0
71  
72 +        return
73  
74      def checkLumiSettings(self):
75          """
# Line 85 | Line 93 | class JobSplitter:
93              settings += 1
94  
95          if settings != 2:
96 <            msg = 'When running on analysis datasets you must specify two and only two of:\n'
96 >            msg = 'When splitting by lumi section you must specify two and only two of:\n'
97              msg += '  number_of_jobs, lumis_per_job, total_number_of_lumis'
98              raise CrabException(msg)
99          if self.limitNJobs and self.limitJobLumis:
100              self.limitTotalLumis = True
101              self.totalNLumis = self.lumisPerJob * self.theNumberOfJobs
102  
103 +        # Has the user specified runselection?
104 +        if (self.cfg_params.has_key('CMSSW.runselection')):
105 +            common.logger.info('You have specified runselection and split by lumi.')
106 +            common.logger.info('Good lumi list will be the intersection of runselection and lumimask or ADS (if any).')
107 +        return
108  
109      def ComputeSubBlockSites( self, blockSites ):
110          """
# Line 373 | Line 386 | class JobSplitter:
386          allBlock = []
387  
388          blockCounter = 0
389 +        saveFblocks =''
390          for block in blocks:
391              if block in jobsOfBlock.keys() :
392                  blockCounter += 1
# Line 383 | Line 397 | class JobSplitter:
397                  if len(sites) == 0:
398                      noSiteBlock.append( spanRanges(jobsOfBlock[block]) )
399                      bloskNoSite.append( blockCounter )
400 +                else:
401 +                    saveFblocks += str(block)+'\n'
402 +        writeTXTfile(self, self.fileBlocks_FileName , saveFblocks)
403  
404          common.logger.info(screenOutput)
405          if len(noSiteBlock) > 0 and len(bloskNoSite) > 0:
# Line 411 | Line 428 | class JobSplitter:
428              common.logger.info(msg)
429  
430          if bloskNoSite == allBlock:
431 <            raise CrabException('No jobs created')
431 >            msg += 'Requested jobs cannot be Created! \n'
432 >            if self.cfg_params.has_key('GRID.se_white_list'):
433 >                msg += '\tWARNING: SE White List: '+self.cfg_params['GRID.se_white_list']+'\n'
434 >                msg += '\t(Hint: By whitelisting you force the job to run at this particular site(s).\n'
435 >                msg += '\tPlease check if the dataset is available at this site!)'
436 >            if self.cfg_params.has_key('GRID.ce_white_list'):
437 >                msg += '\tWARNING: CE White List: '+self.cfg_params['GRID.ce_white_list']+'\n'
438 >                msg += '\t(Hint: By whitelisting you force the job to run at this particular site(s).\n'
439 >                msg += '\tPlease check if the dataset is available at this site!)\n'
440 >            raise CrabException(msg)
441  
442          return
443  
# Line 458 | Line 484 | class JobSplitter:
484          jobfactory = splitter(subs)
485  
486          #loop over all runs
461        set = Set(runList)
487          list_of_lists = []
488          jobDestination = []
489 +        list_of_blocks = []
490          count = 0
491          for jobGroup in  jobfactory():
492              if count <  self.theNumberOfJobs:
# Line 472 | Line 498 | class JobSplitter:
498                  list_of_lists.append([fullString,str(-1),str(0)])
499                  #need to check single file location
500                  jobDestination.append(res['locations'])
501 +                list_of_blocks.append(res['block'])
502                  count +=1
503 <       # prepare dict output
503 >        # prepare dict output
504          dictOut = {}
505          dictOut['params']= ['InputFiles','MaxEvents','SkipEvents']
506          dictOut['args'] = list_of_lists
507          dictOut['jobDestination'] = jobDestination
508          dictOut['njobs']=count
509  
510 +        self.cacheBlocks(list_of_blocks,jobDestination)
511 +
512          return dictOut
513  
514      def getJobInfo( self,jobGroup ):
# Line 493 | Line 522 | class JobSplitter:
522                  for loc in file['locations']:
523                      if tmp_check < 1 :
524                          locations.append(loc)
525 +                        res['block']= file['block']
526                  tmp_check = tmp_check + 1
497                ### qui va messo il check per la locations
527          res['lfns'] = lfns
528          res['locations'] = locations
529          return res
# Line 539 | Line 568 | class JobSplitter:
568  
569          managedGenerators =self.args['managedGenerators']
570          generator = self.args['generator']
571 <        firstRun = self.cfg_params.get('CMSSW.first_run', 1)
571 >        firstLumi = self.cfg_params.get('CMSSW.first_lumi', 1)
572  
573          self.prepareSplittingNoInput()
574  
# Line 560 | Line 589 | class JobSplitter:
589              ## Since there is no input, any site is good
590              jobDestination.append([""]) # must be empty to correctly write the XML
591              args=[]
592 <            if (firstRun): # Pythia first run
593 <                args.append(str(int(firstRun)+i))
592 >            if (firstLumi): # Pythia first lumi
593 >                args.append(str(int(firstLumi)+i))
594              if (generator in managedGenerators):
595                 args.append(generator)
596                 if (generator == 'comphep' and i == 0):
# Line 575 | Line 604 | class JobSplitter:
604  
605          dictOut = {}
606          dictOut['params'] = ['MaxEvents']
607 <        if (firstRun):
608 <            dictOut['params'] = ['FirstRun','MaxEvents']
609 <            if ( generator in managedGenerators ) :
610 <                dictOut['params'] = ['FirstRun', 'Generator', 'FirstEvent', 'MaxEvents']
607 >        if (firstLumi):
608 >            dictOut['params'] = ['FirstLumi','MaxEvents']
609 >            if (generator in managedGenerators):
610 >                dictOut['params'] = ['FirstLumi', 'Generator', 'FirstEvent', 'MaxEvents']
611          else:
612              if (generator in managedGenerators) :
613                  dictOut['params'] = ['Generator', 'FirstEvent', 'MaxEvents']
# Line 635 | Line 664 | class JobSplitter:
664          so the job will have AT LEAST as many lumis as requested, perhaps
665          more
666          """
667 <
667 >        self.useParent = int(self.cfg_params.get('CMSSW.use_parent',0))
668          common.logger.debug('Splitting by Lumi')
669          self.checkLumiSettings()
670  
# Line 643 | Line 672 | class JobSplitter:
672          pubdata = self.args['pubdata']
673  
674          lumisPerFile  = pubdata.getLumis()
675 <
675 >        self.parentFiles=pubdata.getParent()
676          # Make the list of WMBS files for job splitter
677          fileList = pubdata.getListFiles()
678 <        thefiles = Fileset(name='FilesToSplit')
678 >        wmFileList = []
679          for jobFile in fileList:
680              block = jobFile['Block']['Name']
681              try:
# Line 654 | Line 683 | class JobSplitter:
683              except:
684                  continue
685              wmbsFile = File(jobFile['LogicalFileName'])
686 +            if not  blockSites[block]:
687 +                wmbsFile['locations'].add('Nowhere')
688              [ wmbsFile['locations'].add(x) for x in blockSites[block] ]
689              wmbsFile['block'] = block
690              for lumi in lumisPerFile[jobFile['LogicalFileName']]:
691                  wmbsFile.addRun(Run(lumi[0], lumi[1]))
692 <            thefiles.addFile(wmbsFile)
692 >            wmFileList.append(wmbsFile)
693 >
694 >        fileSet = set(wmFileList)
695 >        thefiles = Fileset(name='FilesToSplit', files = fileSet)
696  
697          # Create the factory and workflow
698          work = Workflow()
# Line 671 | Line 705 | class JobSplitter:
705          jobDestination = []
706          jobCount = 0
707          lumisCreated = 0
708 <
708 >        list_of_blocks = []
709          if not self.limitJobLumis:
710              self.lumisPerJob = pubdata.getMaxLumis() // self.theNumberOfJobs + 1
711              common.logger.info('Each job will process about %s lumis.' %
712                                  self.lumisPerJob)
713  
714 <        for jobGroup in  jobFactory(lumis_per_job = self.lumisPerJob):
714 >        for jobGroup in jobFactory(lumis_per_job = self.lumisPerJob):
715              for job in jobGroup.jobs:
716                  if (self.limitNJobs and jobCount >= self.theNumberOfJobs):
717 <                    common.logger.info('Limit on number of jobs reached.')
717 >                    common.logger.info('Requested number of jobs reached.')
718                      break
719                  if (self.limitTotalLumis and lumisCreated >= self.totalNLumis):
720 <                    common.logger.info('Limit on number of lumis reached.')
720 >                    common.logger.info('Requested number of lumis reached.')
721                      break
722                  lumis = []
723                  lfns  = []
724 +                if self.useParent==1:
725 +                 parentlfns  = []  
726 +                 pString =""
727 +
728                  locations = []
729 +                blocks = []
730                  firstFile = True
731                  # Collect information from all the files
732                  for jobFile in job.getFiles():
733 +                    doFile = False
734                      if firstFile:  # Get locations from first file in the job
735                          for loc in jobFile['locations']:
736                              locations.append(loc)
737 +                        blocks.append(jobFile['block'])
738                          firstFile = False
739                      # Accumulate Lumis from all files
740                      for lumiList in jobFile['runs']:
741                          theRun = lumiList.run
742                          for theLumi in list(lumiList):
743 <                            lumis.append( (theRun, theLumi) )
744 <
745 <                    lfns.append(jobFile['lfn'])
743 >                            if (not self.limitTotalLumis) or \
744 >                               (lumisCreated < self.totalNLumis):
745 >                                doFile = True
746 >                                lumisCreated += 1
747 >                                lumis.append( (theRun, theLumi) )
748 >                    if doFile:
749 >                        lfns.append(jobFile['lfn'])
750 >                        if self.useParent==1:
751 >                           parent = self.parentFiles[jobFile['lfn']]
752 >                           for p in parent :
753 >                               pString += p  + ','
754                  fileString = ','.join(lfns)
755 <                lumiString = compressLumiString(lumis)
756 <                list_of_lists.append([fileString, str(-1), str(0), lumiString])
757 <
755 >                lumiLister = LumiList(lumis = lumis)
756 >                lumiString = lumiLister.getCMSSWString()
757 >                if self.useParent==1:
758 >                  common.logger.debug("Files: "+fileString+" with the following parents: "+pString[:-1])
759 >                  pfileString = pString[:-1]
760 >                  list_of_lists.append([fileString, pfileString, str(-1), str(0), lumiString])
761 >                else:
762 >                 list_of_lists.append([fileString, str(-1), str(0), lumiString])
763 >                list_of_blocks.append(blocks)
764                  jobDestination.append(locations)
765                  jobCount += 1
711                lumisCreated += len(lumis)
766                  common.logger.debug('Job %s will run on %s files and %s lumis '
767                      % (jobCount, len(lfns), len(lumis) ))
768  
# Line 718 | Line 772 | class JobSplitter:
772          # Prepare dict output matching back to non-WMBS job creation
773          dictOut = {}
774          dictOut['params'] = ['InputFiles', 'MaxEvents', 'SkipEvents', 'Lumis']
775 +        if self.useParent==1:  
776 +         dictOut['params']= ['InputFiles','ParentFiles','MaxEvents','SkipEvents','Lumis']
777          dictOut['args'] = list_of_lists
778          dictOut['jobDestination'] = jobDestination
779          dictOut['njobs'] = jobCount
780  
781 +        self.cacheBlocks(list_of_blocks,jobDestination)
782 +
783          return dictOut
784  
785 +    def cacheBlocks(self, blocks,destinations):
786 +
787 +        saveFblocks=''
788 +        for i in range(len(blocks)):
789 +            sites=self.blackWhiteListParser.checkWhiteList(self.blackWhiteListParser.checkBlackList(destinations[i]))
790 +            if len(sites) != 0:
791 +                for block in blocks[i]:
792 +                    saveFblocks += str(block)+'\n'
793 +        writeTXTfile(self, self.fileBlocks_FileName , saveFblocks)
794  
795      def Algos(self):
796          """
# Line 738 | Line 805 | class JobSplitter:
805                       }
806          return SplitAlogs
807  
741
742
743 def compressLumiString(lumis):
744    """
745    Turn a list of 2-tuples of run/lumi numbers into a list of the format
746    R1:L1,R2:L2-R3:L3 which is acceptable to CMSSW LumiBlockRange variable
747    """
748
749    lumis.sort()
750    parts = []
751    startRange = None
752    endRange = None
753
754    for lumiBlock in lumis:
755        if not startRange: # This is the first one
756            startRange = lumiBlock
757            endRange = lumiBlock
758        elif lumiBlock == endRange: # Same Lumi (different files?)
759            pass
760        elif lumiBlock[0] == endRange[0] and lumiBlock[1] == endRange[1] + 1: # This is a continuation
761            endRange = lumiBlock
762        else: # This is the start of a new range
763            part = ':'.join(map(str, startRange))
764            if startRange != endRange:
765                part += '-' + ':'.join(map(str, endRange))
766            parts.append(part)
767            startRange = lumiBlock
768            endRange = lumiBlock
769
770    # Put out what's left
771    if startRange:
772        part = ':'.join(map(str, startRange))
773        if startRange != endRange:
774            part += '-' + ':'.join(map(str, endRange))
775        parts.append(part)
776
777    output = ','.join(parts)
778    return output
779
780

Diff Legend

Removed lines
+ Added lines
< Changed lines
> Changed lines