Skip to content

Commit

Permalink
DBS to DAS migration
Browse files Browse the repository at this point in the history
  • Loading branch information
vlimant committed Jan 21, 2014
1 parent 0465fec commit f21edd6
Show file tree
Hide file tree
Showing 5 changed files with 36 additions and 31 deletions.
28 changes: 14 additions & 14 deletions Configuration/Applications/python/ConfigBuilder.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ class Options:
defaultOptions.name = "NO NAME GIVEN"
defaultOptions.evt_type = ""
defaultOptions.filein = ""
defaultOptions.dbsquery=""
defaultOptions.dasquery=""
defaultOptions.secondfilein = ""
defaultOptions.customisation_file = []
defaultOptions.customise_commands = ""
Expand Down Expand Up @@ -116,13 +116,13 @@ def filesFromList(fileName,s=None):
print "found parent files:",sec
return (prim,sec)

def filesFromDBSQuery(query,s=None):
def filesFromDASQuery(query,s=None):
import os
import FWCore.ParameterSet.Config as cms
prim=[]
sec=[]
print "the query is",query
for line in os.popen('dbs search --query "%s"'%(query)):
for line in os.popen('das_client.py --query "%s"'%(query)):
if line.count(".root")>=2:
#two files solution...
entries=line.replace("\n","").split()
Expand Down Expand Up @@ -335,8 +335,8 @@ def filesFromOption(self):
print "entry",entry
if entry.startswith("filelist:"):
filesFromList(entry[9:],self.process.source)
elif entry.startswith("dbs:"):
filesFromDBSQuery('find file where dataset = %s'%(entry[4:]),self.process.source)
elif entry.startswith("dbs:") or entry.startswith("das:"):
filesFromDASQuery('file dataset = %s'%(entry[4:]),self.process.source)
else:
self.process.source.fileNames.append(self._options.dirin+entry)
if self._options.secondfilein:
Expand All @@ -346,12 +346,12 @@ def filesFromOption(self):
print "entry",entry
if entry.startswith("filelist:"):
self.process.source.secondaryFileNames.extend((filesFromList(entry[9:]))[0])
elif entry.startswith("dbs:"):
self.process.source.secondaryFileNames.extend((filesFromDBSQuery('find file where dataset = %s'%(entry[4:])))[0])
elif entry.startswith("dbs:") or entry.startswith("das:"):
self.process.source.secondaryFileNames.extend((filesFromDASQuery('file dataset = %s'%(entry[4:])))[0])
else:
self.process.source.secondaryFileNames.append(self._options.dirin+entry)

if self._options.filein or self._options.dbsquery:
if self._options.filein or self._options.dasquery:
if self._options.filetype == "EDM":
self.process.source=cms.Source("PoolSource",
fileNames = cms.untracked.vstring(),
Expand Down Expand Up @@ -387,9 +387,9 @@ def filesFromOption(self):
if ('HARVESTING' in self.stepMap.keys() or 'ALCAHARVEST' in self.stepMap.keys()) and (not self._options.filetype == "DQM"):
self.process.source.processingMode = cms.untracked.string("RunsAndLumis")

if self._options.dbsquery!='':
if self._options.dasquery!='':
self.process.source=cms.Source("PoolSource", fileNames = cms.untracked.vstring(),secondaryFileNames = cms.untracked.vstring())
filesFromDBSQuery(self._options.dbsquery,self.process.source)
filesFromDASQuery(self._options.dasquery,self.process.source)

if self._options.inputCommands:
if not hasattr(self.process.source,'inputCommands'): self.process.source.inputCommands=cms.untracked.vstring()
Expand Down Expand Up @@ -629,8 +629,8 @@ def addStandardSequences(self):

mixingDict.pop('file')
if self._options.pileup_input:
if self._options.pileup_input.startswith('dbs'):
mixingDict['F']=filesFromDBSQuery('find file where dataset = %s'%(self._options.pileup_input[4:],))[0]
if self._options.pileup_input.startswith('dbs:') or self._options.pileup_input.startswith('das:'):
mixingDict['F']=filesFromDASQuery('file dataset = %s'%(self._options.pileup_input[4:],))[0]
else:
mixingDict['F']=self._options.pileup_input.split(',')
specialization=defineMixing(mixingDict,self._options.fast)
Expand Down Expand Up @@ -1252,7 +1252,7 @@ def prepare_GEN(self, sequence = None):
except:
loadFailure=True
#if self.process.source and self.process.source.type_()=='EmptySource':
if not (self._options.filein or self._options.dbsquery):
if not (self._options.filein or self._options.dasquery):
raise Exception("Neither gen fragment of input files provided: this is an inconsistent GEN step configuration")

if not loadFailure:
Expand Down Expand Up @@ -2087,7 +2087,7 @@ def prepare(self, doChecking = False):
if hasattr(self.process.source,"secondaryFileNames"):
if len(self.process.source.secondaryFileNames.value()):
ioJson['secondary']=self.process.source.secondaryFileNames.value()
if self._options.pileup_input and self._options.pileup_input.startswith('dbs'):
if self._options.pileup_input and (self._options.pileup_input.startswith('dbs:') or self._options.pileup_input.startswith('das:')):
ioJson['pileup']=self._options.pileup_input[4:]
for (o,om) in self.process.outputModules_().items():
ioJson[o]=om.fileName.value()
Expand Down
9 changes: 7 additions & 2 deletions Configuration/Applications/python/Options.py
Original file line number Diff line number Diff line change
Expand Up @@ -251,10 +251,15 @@
default=defaultOptions.particleTable,
dest="particleTable")

expertSettings.add_option("--dasquery",
help="Allow to define the source.fileNames from the das search command",
default='',
dest="dasquery")

expertSettings.add_option("--dbsquery",
help="Allow to define the source.fileNames from the dbs search command",
help="Deprecated. Please use dasquery option. Functions for backward compatibility",
default='',
dest="dbsquery")
dest="dasquery")

expertSettings.add_option("--lazy_download",
help="Enable lazy downloading of input files",
Expand Down
2 changes: 1 addition & 1 deletion Configuration/PyReleaseValidation/python/MatrixReader.py
Original file line number Diff line number Diff line change
Expand Up @@ -346,7 +346,7 @@ def showRaw(self, useInput, refRel=None, fromScratch=None, what='all',step1Only=
for (index,s) in enumerate(indexAndSteps):
for (stepName,cmd) in s:
stepIndex=index+1
if 'dbsquery.log' in cmd: continue
if 'dasquery.log' in cmd: continue
line = 'STEP%d ++ '%(stepIndex,) +stepName + ' @@@ '+cmd
line=line.replace('DQMROOT','DQM')
outFile.write(line+'\n')
Expand Down
14 changes: 7 additions & 7 deletions Configuration/PyReleaseValidation/python/WorkFlowRunner.py
Original file line number Diff line number Diff line change
Expand Up @@ -94,27 +94,27 @@ def closeCmd(i,ID):
self.stat.append('NOTRUN')
aborted=True
continue
#create lumiRange file first so if dbs fails we get its error code
#create lumiRange file first so if das fails we get its error code
cmd2 = com.lumiRanges()
if cmd2:
cmd2 =cmd+cmd2+closeCmd(istep,'lumiRanges')
lumiRangeFile='step%d_lumiRanges.log'%(istep,)
retStep = self.doCmd(cmd2)
cmd+=com.dbs()
cmd+=closeCmd(istep,'dbsquery')
cmd+=com.das()
cmd+=closeCmd(istep,'dasquery')
retStep = self.doCmd(cmd)
#don't use the file list executed, but use the dbs command of cmsDriver for next step
inFile='filelist:step%d_dbsquery.log'%(istep,)
#don't use the file list executed, but use the das command of cmsDriver for next step
inFile='filelist:step%d_dasquery.log'%(istep,)
print "---"
else:
#chaining IO , which should be done in WF object already and not using stepX.root but <stepName>.root
cmd += com
if self.noRun:
cmd +=' --no_exec'
if inFile: #in case previous step used DBS query (either filelist of dbs:)
if inFile: #in case previous step used DAS query (either filelist of das:)
cmd += ' --filein '+inFile
inFile=None
if lumiRangeFile: #DBS query can also restrict lumi range
if lumiRangeFile: #DAS query can also restrict lumi range
cmd += ' --lumiToProcess '+lumiRangeFile
lumiRangeFile=None
if 'HARVESTING' in cmd and not '134' in str(self.wf.numId) and not '--filein' in cmd:
Expand Down
14 changes: 7 additions & 7 deletions Configuration/PyReleaseValidation/python/relval_steps.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@ def __init__(self,dataSet,label='',run=[],files=1000,events=InputInfoNDefault,sp
self.ib_blacklist = ib_blacklist
self.ib_block = ib_block

def dbs(self):
def das(self):
query_by = "block" if self.ib_block else "dataset"
query_source = "{0}#{1}".format(self.dataSet, self.ib_block) if self.ib_block else self.dataSet
if len(self.run) is not 0:
Expand Down Expand Up @@ -890,9 +890,9 @@ def genvalid(fragment,d,suffix='all',fi=''):
steps['ZJetsLNu_Tune4C_8TeV_madgraph-pythia8']=genvalid('Hadronizer_MgmMatchTune4C_8TeV_madgraph_pythia8_cff',step1GenDefaults,fi=5591)
steps['ReggeGribovPartonMC_EposLHC_5TeV_pPb']=genvalid('GeneratorInterface/ReggeGribovPartonMCInterface/ReggeGribovPartonMC_EposLHC_5TeV_pPb_cfi',step1GenDefaults)

PU={'-n':10,'--pileup':'default','--pileup_input':'dbs:/RelValMinBias/%s/GEN-SIM'%(baseDataSetRelease[0],)}
PU25={'-n':10,'--pileup':'AVE_10_BX_25ns_m8','--pileup_input':'dbs:/RelValMinBias_13/%s/GEN-SIM'%(baseDataSetRelease[7],)}
PU50={'-n':10,'--pileup':'AVE_20_BX_50ns_m8','--pileup_input':'dbs:/RelValMinBias_13/%s/GEN-SIM'%(baseDataSetRelease[7],)}
PU={'-n':10,'--pileup':'default','--pileup_input':'das:/RelValMinBias/%s/GEN-SIM'%(baseDataSetRelease[0],)}
PU25={'-n':10,'--pileup':'AVE_10_BX_25ns_m8','--pileup_input':'das:/RelValMinBias_13/%s/GEN-SIM'%(baseDataSetRelease[7],)}
PU50={'-n':10,'--pileup':'AVE_20_BX_50ns_m8','--pileup_input':'das:/RelValMinBias_13/%s/GEN-SIM'%(baseDataSetRelease[7],)}
PUFS={'--pileup':'default'}
PUFS2={'--pileup':'mix_2012_Startup_inTimeOnly'}
steps['TTbarFSPU']=merge([PUFS,Kby(100,500),steps['TTbarFS']] )
Expand Down Expand Up @@ -1281,7 +1281,7 @@ def genvalid(fragment,d,suffix='all',fi=''):
'--data':'',
'--scenario':'pp',
'--filein':'file:step2.root',
'--secondfilein':'filelist:step1_dbsquery.log'}
'--secondfilein':'filelist:step1_dasquery.log'}

steps['SKIMDreHLT'] = merge([ {'--conditions':'auto:com10_%s'%menu,'--filein':'file:step3.root'}, steps['SKIMD'] ])

Expand All @@ -1290,7 +1290,7 @@ def genvalid(fragment,d,suffix='all',fi=''):
'--data':'',
'--scenario':'cosmics',
'--filein':'file:step2.root',
'--secondfilein':'filelist:step1_dbsquery.log'}
'--secondfilein':'filelist:step1_dasquery.log'}


#### for special wfs ###
Expand Down Expand Up @@ -1322,7 +1322,7 @@ def genvalid(fragment,d,suffix='all',fi=''):
'--process':'rereRECO',
'--datatier':'AOD',
'--eventcontent':'AOD',
'--secondfilein':'filelist:step1_dbsquery.log',
'--secondfilein':'filelist:step1_dasquery.log',
},
steps['RECOD']])

Expand Down

0 comments on commit f21edd6

Please sign in to comment.