ViewVC Help
View File | Revision Log | Show Annotations | Root Listing
root/cvsroot/COMP/CRAB/python/cms_cmssw.py
(Generate patch)

Comparing COMP/CRAB/python/cms_cmssw.py (file contents):
Revision 1.230 by fanzago, Wed Aug 20 12:43:43 2008 UTC vs.
Revision 1.247 by fanzago, Mon Sep 29 17:19:16 2008 UTC

# Line 5 | Line 5 | from crab_util import *
5   from BlackWhiteListParser import SEBlackWhiteListParser
6   import common
7   import Scram
8 from LFNBaseName import *
8  
9   import os, string, glob
10  
# Line 22 | Line 21 | class Cmssw(JobType):
21          # init BlackWhiteListParser
22          self.blackWhiteListParser = SEBlackWhiteListParser(cfg_params)
23  
24 <        self.MaxTarBallSize = float(self.cfg_params.get('EDG.maxtarballsize',9.5))
24 >        ### Temporary patch to automatically skip the ISB size check:
25 >        server=self.cfg_params.get('CRAB.server_name',None)
26 >        size = 9.5
27 >        if server: size = 99999
28 >        ### D.S.
29 >        self.MaxTarBallSize = float(self.cfg_params.get('EDG.maxtarballsize',size))
30  
31          # number of jobs requested to be created, limit obj splitting
32          self.ncjobs = ncjobs
# Line 66 | Line 70 | class Cmssw(JobType):
70  
71          tmp =  cfg_params['CMSSW.datasetpath']
72          log.debug(6, "CMSSW::CMSSW(): datasetPath = "+tmp)
73 <        if string.lower(tmp)=='none':
73 >
74 >        if tmp =='':
75 >            msg = "Error: datasetpath not defined "
76 >            raise CrabException(msg)
77 >        elif string.lower(tmp)=='none':
78              self.datasetPath = None
79              self.selectNoInput = 1
80          else:
# Line 230 | Line 238 | class Cmssw(JobType):
238                  self.jobSplittingForScript()
239              else:
240                  self.jobSplittingNoInput()
241 +        elif (cfg_params.get('CMSSW.noblockboundary',0)):
242 +            self.jobSplittingNoBlockBoundary(blockSites)
243          else:
244              self.jobSplittingByBlocks(blockSites)
245  
# Line 575 | Line 585 | class Cmssw(JobType):
585          self.list_of_args = list_of_lists
586          return
587  
588 +    def jobSplittingNoBlockBoundary(self,blockSites):
589 +        """
590 +        """
591 +        # ---- Handle the possible job splitting configurations ---- #
592 +        if (self.selectTotalNumberEvents):
593 +            totalEventsRequested = self.total_number_of_events
594 +        if (self.selectEventsPerJob):
595 +            eventsPerJobRequested = self.eventsPerJob
596 +            if (self.selectNumberOfJobs):
597 +                totalEventsRequested = self.theNumberOfJobs * self.eventsPerJob
598 +                                                                                                          
599 +        # If user requested all the events in the dataset
600 +        if (totalEventsRequested == -1):
601 +            eventsRemaining=self.maxEvents
602 +        # If user requested more events than are in the dataset
603 +        elif (totalEventsRequested > self.maxEvents):
604 +            eventsRemaining = self.maxEvents
605 +            common.logger.message("Requested "+str(self.total_number_of_events)+ " events, but only "+str(self.maxEvents)+" events are available.")
606 +        # If user requested less events than are in the dataset
607 +        else:
608 +            eventsRemaining = totalEventsRequested
609 +                                                                                                          
610 +        # If user requested more events per job than are in the dataset
611 +        if (self.selectEventsPerJob and eventsPerJobRequested > self.maxEvents):
612 +            eventsPerJobRequested = self.maxEvents
613 +                                                                                                          
614 +        # For user info at end
615 +        totalEventCount = 0
616 +
617 +        if (self.selectTotalNumberEvents and self.selectNumberOfJobs):
618 +            eventsPerJobRequested = int(eventsRemaining/self.theNumberOfJobs)
619 +                                                                                                          
620 +        if (self.selectNumberOfJobs):
621 +            common.logger.message("May not create the exact number_of_jobs requested.")
622 +                                                                                                          
623 +        if ( self.ncjobs == 'all' ) :
624 +            totalNumberOfJobs = 999999999
625 +        else :
626 +            totalNumberOfJobs = self.ncjobs
627 +                                                                                                          
628 +        blocks = blockSites.keys()
629 +        blockCount = 0
630 +        # Backup variable in case self.maxEvents counted events in a non-included block
631 +        numBlocksInDataset = len(blocks)
632 +                                                                                                          
633 +        jobCount = 0
634 +        list_of_lists = []
635 +
636 +        #AF
637 +        #AF do not reset input files and event count on block boundary
638 +        #AF
639 +        parString=""
640 +        filesEventCount = 0
641 +        #AF
642 +
643 +        # list tracking which jobs are in which jobs belong to which block
644 +        jobsOfBlock = {}
645 +        while ( (eventsRemaining > 0) and (blockCount < numBlocksInDataset) and (jobCount < totalNumberOfJobs)):
646 +            block = blocks[blockCount]
647 +            blockCount += 1
648 +            if block not in jobsOfBlock.keys() :
649 +                jobsOfBlock[block] = []
650 +
651 +            if self.eventsbyblock.has_key(block) :
652 +                numEventsInBlock = self.eventsbyblock[block]
653 +                common.logger.debug(5,'Events in Block File '+str(numEventsInBlock))
654 +                files = self.filesbyblock[block]
655 +                numFilesInBlock = len(files)
656 +                if (numFilesInBlock <= 0):
657 +                    continue
658 +                fileCount = 0
659 +                #AF
660 +                #AF do not reset input files and event count of block boundary
661 +                #AF
662 +                ## ---- New block => New job ---- #
663 +                #parString = ""
664 +                # counter for number of events in files currently worked on
665 +                #filesEventCount = 0
666 +                #AF
667 +                # flag if next while loop should touch new file
668 +                newFile = 1
669 +                # job event counter
670 +                jobSkipEventCount = 0
671 +
672 +                # ---- Iterate over the files in the block until we've met the requested ---- #
673 +                # ---- total # of events or we've gone over all the files in this block  ---- #
674 +                pString=''
675 +                while ( (eventsRemaining > 0) and (fileCount < numFilesInBlock) and (jobCount < totalNumberOfJobs) ):
676 +                    file = files[fileCount]
677 +                    if self.useParent:
678 +                        parent = self.parentFiles[file]
679 +                        for f in parent :
680 +                            pString += '\\\"' + f + '\\\"\,'
681 +                        common.logger.debug(6, "File "+str(file)+" has the following parents: "+str(parent))
682 +                        common.logger.write("File "+str(file)+" has the following parents: "+str(parent))
683 +                    if newFile :
684 +                        try:
685 +                            numEventsInFile = self.eventsbyfile[file]
686 +                            common.logger.debug(6, "File "+str(file)+" has "+str(numEventsInFile)+" events")
687 +                            # increase filesEventCount
688 +                            filesEventCount += numEventsInFile
689 +                            # Add file to current job
690 +                            parString += '\\\"' + file + '\\\"\,'
691 +                            newFile = 0
692 +                        except KeyError:
693 +                            common.logger.message("File "+str(file)+" has unknown number of events: skipping")
694 +                    eventsPerJobRequested = min(eventsPerJobRequested, eventsRemaining)
695 +                    #common.logger.message("AF filesEventCount %s - jobSkipEventCount %s "%(filesEventCount,jobSkipEventCount))  
696 +                    # if less events in file remain than eventsPerJobRequested
697 +                    if ( filesEventCount - jobSkipEventCount < eventsPerJobRequested):
698 +                      #AF
699 +                      #AF skip fileboundary part
700 +                      #AF
701 +                            # go to next file
702 +                            newFile = 1
703 +                            fileCount += 1
704 +                    # if events in file equal to eventsPerJobRequested
705 +                    elif ( filesEventCount - jobSkipEventCount == eventsPerJobRequested ) :
706 +                        # close job and touch new file
707 +                        fullString = parString[:-2]
708 +                        if self.useParent:
709 +                            fullParentString = pString[:-2]
710 +                            list_of_lists.append([fullString,fullParentString,str(eventsPerJobRequested),str(jobSkipEventCount)])
711 +                        else:
712 +                            list_of_lists.append([fullString,str(eventsPerJobRequested),str(jobSkipEventCount)])
713 +                        common.logger.debug(3,"Job "+str(jobCount+1)+" can run over "+str(eventsPerJobRequested)+" events.")
714 +                        self.jobDestination.append(blockSites[block])
715 +                        common.logger.debug(5,"Job "+str(jobCount+1)+" Destination: "+str(self.jobDestination[jobCount]))
716 +                        jobsOfBlock[block].append(jobCount+1)
717 +                        # reset counter
718 +                        jobCount = jobCount + 1
719 +                        totalEventCount = totalEventCount + eventsPerJobRequested
720 +                        eventsRemaining = eventsRemaining - eventsPerJobRequested
721 +                        jobSkipEventCount = 0
722 +                        # reset file
723 +                        pString = ""
724 +                        parString = ""
725 +                        filesEventCount = 0
726 +                        newFile = 1
727 +                        fileCount += 1
728 +
729 +                    # if more events in file remain than eventsPerJobRequested
730 +                    else :
731 +                        # close job but don't touch new file
732 +                        fullString = parString[:-2]
733 +                        if self.useParent:
734 +                            fullParentString = pString[:-2]
735 +                            list_of_lists.append([fullString,fullParentString,str(eventsPerJobRequested),str(jobSkipEventCount)])
736 +                        else:
737 +                            list_of_lists.append([fullString,str(eventsPerJobRequested),str(jobSkipEventCount)])
738 +                        common.logger.debug(3,"Job "+str(jobCount+1)+" can run over "+str(eventsPerJobRequested)+" events.")
739 +                        self.jobDestination.append(blockSites[block])
740 +                        common.logger.debug(5,"Job "+str(jobCount+1)+" Destination: "+str(self.jobDestination[jobCount]))
741 +                        jobsOfBlock[block].append(jobCount+1)
742 +                        # increase counter
743 +                        jobCount = jobCount + 1
744 +                        totalEventCount = totalEventCount + eventsPerJobRequested
745 +                        eventsRemaining = eventsRemaining - eventsPerJobRequested
746 +                        # calculate skip events for last file
747 +                        # use filesEventCount (contains several files), jobSkipEventCount and eventsPerJobRequest
748 +                        jobSkipEventCount = eventsPerJobRequested - (filesEventCount - jobSkipEventCount - self.eventsbyfile[file])
749 +                        # remove all but the last file
750 +                        filesEventCount = self.eventsbyfile[file]
751 +                        if self.useParent:
752 +                            for f in parent : pString += '\\\"' + f + '\\\"\,'
753 +                        parString = '\\\"' + file + '\\\"\,'
754 +                    pass # END if
755 +                pass # END while (iterate over files in the block)
756 +        pass # END while (iterate over blocks in the dataset)
757 +        self.ncjobs = self.total_number_of_jobs = jobCount
758 +        if (eventsRemaining > 0 and jobCount < totalNumberOfJobs ):
759 +            common.logger.message("eventsRemaining "+str(eventsRemaining))
760 +            common.logger.message("jobCount "+str(jobCount))
761 +            common.logger.message(" totalNumberOfJobs "+str(totalNumberOfJobs))
762 +            common.logger.message("Could not run on all requested events because some blocks not hosted at allowed sites.")
763 +        common.logger.message(str(jobCount)+" job(s) can run on "+str(totalEventCount)+" events.\n")
764 +
765 +        # screen output
766 +        screenOutput = "List of jobs and available destination sites:\n\n"
767 +
768 +        #AF
769 +        #AF   skip check on  block with no sites
770 +        #AF
771 +        self.list_of_args = list_of_lists
772 +
773 +        return
774 +
775 +
776 +
777      def jobSplittingNoInput(self):
778          """
779          Perform job splitting based on number of event per job
# Line 685 | Line 884 | class Cmssw(JobType):
884          """
885          Return the TarBall with lib and exe
886          """
887 <        self.tgzNameWithPath = common.work_space.pathForTgz()+'share/'+self.tgz_name
887 >        self.tgzNameWithPath = common.work_space.pathForTgz()+self.tgz_name
888          if os.path.exists(self.tgzNameWithPath):
889              return self.tgzNameWithPath
890  
# Line 770 | Line 969 | class Cmssw(JobType):
969              ## Add ProdCommon dir to tar
970              prodcommonDir = './'
971              prodcommonPath = os.environ['CRABDIR'] + '/' + 'external/'
972 <            neededStuff = ['ProdCommon/__init__.py','ProdCommon/FwkJobRep', 'ProdCommon/CMSConfigTools','ProdCommon/Core','ProdCommon/MCPayloads', 'IMProv']
972 >            neededStuff = ['ProdCommon/__init__.py','ProdCommon/FwkJobRep', 'ProdCommon/CMSConfigTools', \
973 >                           'ProdCommon/Core', 'ProdCommon/MCPayloads', 'IMProv', 'ProdCommon/Storage']
974              for file in neededStuff:
975                  tar.add(prodcommonPath+file,prodcommonDir+file)
976              common.logger.debug(5,"Files added to "+self.tgzNameWithPath+" : "+str(tar.getnames()))
# Line 783 | Line 983 | class Cmssw(JobType):
983              common.logger.debug(5,"Files added to "+self.tgzNameWithPath+" : "+str(tar.getnames()))
984  
985              ##### Utils
986 <            Utils_file_list=['parseCrabFjr.py','writeCfg.py', 'fillCrabFjr.py']
986 >            Utils_file_list=['parseCrabFjr.py','writeCfg.py', 'fillCrabFjr.py','cmscp.py']
987              for file in Utils_file_list:
988                  tar.add(path+file,file)
989              common.logger.debug(5,"Files added to "+self.tgzNameWithPath+" : "+str(tar.getnames()))
# Line 794 | Line 994 | class Cmssw(JobType):
994              common.logger.debug(5,"Files added to "+self.tgzNameWithPath+" : "+str(tar.getnames()))
995  
996              tar.close()
997 <        except IOError:
997 >        except IOError, exc:
998 >            common.logger.write(str(exc))
999              raise CrabException('Could not create tar-ball '+self.tgzNameWithPath)
1000 <        except tarfile.TarError:
1000 >        except tarfile.TarError, exc:
1001 >            common.logger.write(str(exc))
1002              raise CrabException('Could not create tar-ball '+self.tgzNameWithPath)
1003  
1004          ## check for tarball size
1005          tarballinfo = os.stat(self.tgzNameWithPath)
1006          if ( tarballinfo.st_size > self.MaxTarBallSize*1024*1024 ) :
1007 <            raise CrabException('Input sandbox size of ' + str(float(tarballinfo.st_size)/1024.0/1024.0) + ' MB is larger than the allowed ' + str(self.MaxTarBallSize) + ' MB input sandbox limit and not supported by the used GRID submission system. Please make sure that no unnecessary files are in all data directories in your local CMSSW project area as they are automatically packed into the input sandbox.')
1007 >            msg  = 'Input sandbox size of ' + str(float(tarballinfo.st_size)/1024.0/1024.0) + ' MB is larger than the allowed ' + str(self.MaxTarBallSize) \
1008 >               +'MB input sandbox limit \n'
1009 >            msg += '      and not supported by the direct GRID submission system.\n'
1010 >            msg += '      Please use the CRAB server mode by setting server_name=<NAME> in section [CRAB] of your crab.cfg.\n'
1011 >            msg += '      For further infos please see https://twiki.cern.ch/twiki/bin/view/CMS/CrabServer#CRABSERVER_for_Users'
1012 >            raise CrabException(msg)
1013  
1014          ## create tar-ball with ML stuff
1015  
# Line 872 | Line 1079 | class Cmssw(JobType):
1079          # Prepare job-specific part
1080          job = common.job_list[nj]
1081          if (self.datasetPath):
1082 +            self.primaryDataset = self.datasetPath.split("/")[1]
1083 +            DataTier = self.datasetPath.split("/")[2]
1084              txt += '\n'
1085              txt += 'DatasetPath='+self.datasetPath+'\n'
1086  
1087 <            datasetpath_split = self.datasetPath.split("/")
1088 <            ### FEDE FOR NEW LFN ###
880 <            self.primaryDataset = datasetpath_split[1]
881 <            ########################
882 <            txt += 'PrimaryDataset='+datasetpath_split[1]+'\n'
883 <            txt += 'DataTier='+datasetpath_split[2]+'\n'
1087 >            txt += 'PrimaryDataset='+self.primaryDataset +'\n'
1088 >            txt += 'DataTier='+DataTier+'\n'
1089              txt += 'ApplicationFamily=cmsRun\n'
1090  
1091          else:
887            txt += 'DatasetPath=MCDataTier\n'
888            ### FEDE FOR NEW LFN ###
1092              self.primaryDataset = 'null'
1093 <            ########################
1093 >            txt += 'DatasetPath=MCDataTier\n'
1094              txt += 'PrimaryDataset=null\n'
1095              txt += 'DataTier=null\n'
1096              txt += 'ApplicationFamily=MCDataTier\n'
# Line 1034 | Line 1237 | class Cmssw(JobType):
1237          inp_box = []
1238          if os.path.isfile(self.tgzNameWithPath):
1239              inp_box.append(self.tgzNameWithPath)
1240 <        wrapper = os.path.basename(str(common._db.queryTask('scriptName')))
1038 <        inp_box.append(common.work_space.pathForTgz() +'job/'+ wrapper)
1240 >        inp_box.append(common.work_space.jobDir() + self.scriptName)
1241          return inp_box
1242  
1243      def outputSandbox(self, nj):
# Line 1085 | Line 1287 | class Cmssw(JobType):
1287              txt += 'fi\n'
1288          file_list = []
1289          for fileWithSuffix in (self.output_file):
1290 <             file_list.append(numberFile(fileWithSuffix, '$NJob'))
1290 >             file_list.append(numberFile('$SOFTWARE_DIR/'+fileWithSuffix, '$NJob'))
1291  
1292 <        txt += 'file_list="'+string.join(file_list,' ')+'"\n'
1292 >        txt += 'file_list="'+string.join(file_list,',')+'"\n'
1293          txt += '\n'
1294          txt += 'echo ">>> current directory (SOFTWARE_DIR): $SOFTWARE_DIR" \n'
1295          txt += 'echo ">>> current directory content:"\n'
# Line 1184 | Line 1386 | class Cmssw(JobType):
1386          txt += '    echo "==> setup cms environment ok"\n'
1387          return txt
1388  
1389 <    def modifyReport(self, nj):
1389 >    def wsModifyReport(self, nj):
1390          """
1391          insert the part of the script that modifies the FrameworkJob Report
1392          """
1393 <        txt = '\n#Written by cms_cmssw::modifyReport\n'
1393 >        txt = '\n#Written by cms_cmssw::wsModifyReport\n'
1394          publish_data = int(self.cfg_params.get('USER.publish_data',0))
1395          if (publish_data == 1):
1396 <            processedDataset = self.cfg_params['USER.publish_data_name']
1397 <            if (self.primaryDataset == 'null'):
1398 <                 self.primaryDataset = processedDataset
1197 <            if (common.scheduler.name().upper() == "CAF" or common.scheduler.name().upper() == "LSF"):
1198 <                ### FEDE FOR NEW LFN ###
1199 <                LFNBaseName = LFNBase(self.primaryDataset, processedDataset, LocalUser=True)
1200 <                self.user = getUserName(LocalUser=True)
1201 <                ########################
1202 <            else :
1203 <                ### FEDE FOR NEW LFN ###
1204 <                LFNBaseName = LFNBase(self.primaryDataset, processedDataset)
1205 <                self.user = getUserName()
1206 <                ########################
1207 <
1208 <            txt += 'if [ $copy_exit_status -eq 0 ]; then\n'
1209 <            ### FEDE FOR NEW LFN ###
1210 <            #txt += '    FOR_LFN=%s_${PSETHASH}/\n'%(LFNBaseName)
1211 <            txt += '    FOR_LFN=%s/${PSETHASH}/\n'%(LFNBaseName)
1212 <            ########################
1396 >
1397 >            txt += 'if [ $StageOutExitStatus -eq 0 ]; then\n'
1398 >            txt += '    FOR_LFN=$LFNBaseName/${PSETHASH}/\n'
1399              txt += 'else\n'
1400              txt += '    FOR_LFN=/copy_problems/ \n'
1401              txt += '    SE=""\n'
# Line 1218 | Line 1404 | class Cmssw(JobType):
1404  
1405              txt += 'echo ">>> Modify Job Report:" \n'
1406              txt += 'chmod a+x $RUNTIME_AREA/ProdCommon/FwkJobRep/ModifyJobReport.py\n'
1407 <            txt += 'ProcessedDataset='+processedDataset+'\n'
1407 >            txt += 'ProcessedDataset= $procDataset \n'
1408              txt += 'echo "ProcessedDataset = $ProcessedDataset"\n'
1409              txt += 'echo "SE = $SE"\n'
1410              txt += 'echo "SE_PATH = $SE_PATH"\n'
1411              txt += 'echo "FOR_LFN = $FOR_LFN" \n'
1412              txt += 'echo "CMSSW_VERSION = $CMSSW_VERSION"\n\n'
1413 <            ### FEDE FOR NEW LFN ###
1414 <            txt += 'echo "$RUNTIME_AREA/ProdCommon/FwkJobRep/ModifyJobReport.py $RUNTIME_AREA/crab_fjr_$NJob.xml $NJob $FOR_LFN $PrimaryDataset $DataTier ' + self.user + '-$ProcessedDataset-$PSETHASH $ApplicationFamily $executable $CMSSW_VERSION $PSETHASH $SE $SE_PATH"\n'
1415 <            txt += '$RUNTIME_AREA/ProdCommon/FwkJobRep/ModifyJobReport.py $RUNTIME_AREA/crab_fjr_$NJob.xml $NJob $FOR_LFN $PrimaryDataset $DataTier ' + self.user + '-$ProcessedDataset-$PSETHASH $ApplicationFamily $executable $CMSSW_VERSION $PSETHASH $SE $SE_PATH\n'
1416 <            ########################
1413 >            args = '$RUNTIME_AREA/crab_fjr_$NJob.xml $NJob $FOR_LFN $PrimaryDataset $DataTier ' \
1414 >                   '$User -$ProcessedDataset-$PSETHASH $ApplicationFamily '+ \
1415 >                    '  $executable $CMSSW_VERSION $PSETHASH $SE $SE_PATH'
1416 >            txt += 'echo "$RUNTIME_AREA/ProdCommon/FwkJobRep/ModifyJobReport.py '+str(args)+'"\n'
1417 >            txt += '$RUNTIME_AREA/ProdCommon/FwkJobRep/ModifyJobReport.py '+str(args)+'\n'
1418              txt += 'modifyReport_result=$?\n'
1419              txt += 'if [ $modifyReport_result -ne 0 ]; then\n'
1420              txt += '    modifyReport_result=70500\n'
# Line 1264 | Line 1451 | class Cmssw(JobType):
1451          txt += '        echo "CRAB python script to parse CRAB FrameworkJobReport crab_fjr.xml is not available, using exit code of executable from command line."\n'
1452          txt += '    fi\n'
1453            #### Patch to check input data reading for CMSSW16x Hopefully we-ll remove it asap
1454 <
1455 <        if (self.datasetPath and not self.dataset_pu ):
1454 >        txt += '    if [ $executable_exit_status -eq 0 ];then\n'
1455 >        txt += '      echo ">>> Executable succeded  $executable_exit_status"\n'
1456 >        if (self.datasetPath and not (self.dataset_pu or self.useParent)) :
1457            # VERIFY PROCESSED DATA
1270            txt += '    if [ $executable_exit_status -eq 0 ];then\n'
1458              txt += '      echo ">>> Verify list of processed files:"\n'
1459              txt += '      echo $InputFiles |tr -d \'\\\\\' |tr \',\' \'\\n\'|tr -d \'"\' > input-files.txt\n'
1460              txt += '      python $RUNTIME_AREA/parseCrabFjr.py --input $RUNTIME_AREA/crab_fjr_$NJob.xml --lfn > processed-files.txt\n'
# Line 1291 | Line 1478 | class Cmssw(JobType):
1478              txt += '         echo "      ==> list of processed files from crab_fjr.xml differs from list in pset.cfg"\n'
1479              txt += '         echo "      ==> diff input-files.txt processed-files.txt"\n'
1480              txt += '      fi\n'
1481 <            txt += '    fi\n'
1482 <            txt += '\n'
1481 >        txt += '    elif [ $executable_exit_status -ne 0 ] || [ $executable_exit_status -ne 50015 ] || [ $executable_exit_status -ne 50017 ];then\n'
1482 >        txt += '      echo ">>> Executable failed  $executable_exit_status"\n'
1483 >        txt += '      func_exit\n'
1484 >        txt += '    fi\n'
1485 >        txt += '\n'
1486          txt += 'else\n'
1487          txt += '    echo "CRAB FrameworkJobReport crab_fjr.xml is not available, using exit code of executable from command line."\n'
1488          txt += 'fi\n'

Diff Legend

Removed lines
+ Added lines
< Changed lines
> Changed lines