1 |
ggraham |
1.1 |
"""
|
2 |
|
|
publishAgent.py
|
3 |
|
|
|
4 |
|
|
This file contains APIP methods that use the ViewObjectLayer to
|
5 |
|
|
populate the DBS. A utility class to read and parse SQL is included.
|
6 |
|
|
Module level representations of the table schemas are constructed and
|
7 |
|
|
stored at module level when the module is imported, but this should
|
8 |
|
|
probably go into a class at some point. A DBSpublisher class is
|
9 |
|
|
included that contains views constructed from the individual schemas.
|
10 |
|
|
Example inserts are at the end of the file in the __main__ section.
|
11 |
|
|
|
12 |
|
|
Gregory Edwin Graham, 16-May-2005
|
13 |
|
|
"""
|
14 |
|
|
__version__ = "$Id: publishAgent.py,v 1.4 2005/05/20 04:47:54 ggraham Exp $"
|
15 |
|
|
__revision__ = "$Revision: 1.4 $"
|
16 |
|
|
|
17 |
|
|
import os, sys, time, pdb
|
18 |
|
|
from ViewObjectLayer import *
|
19 |
|
|
|
20 |
|
|
class ReadTable:
|
21 |
|
|
"""
|
22 |
|
|
This is a crap SQL parser to be replaced by Anzar and Vijay
|
23 |
|
|
"""
|
24 |
|
|
def __init__(self) :
|
25 |
|
|
"""
|
26 |
|
|
Constructor
|
27 |
|
|
This should fill the arrays below with the correct values
|
28 |
|
|
"""
|
29 |
|
|
self.schema = [] # list of strings
|
30 |
|
|
self.types = {} # list of 'int' or 'string', could be better
|
31 |
|
|
self.pk = [] # list of strings
|
32 |
|
|
self.name = None # string table name
|
33 |
|
|
self.uniqueKeys = [] # list of lists of strings
|
34 |
|
|
self.foreignKeys = {} # dictionary of here.attribute references there.attribute
|
35 |
|
|
self.notNulls = [] # list of strings
|
36 |
|
|
|
37 |
|
|
def getTable(self, i) :
|
38 |
|
|
"""
|
39 |
|
|
This function reads a druid generated table definition
|
40 |
|
|
and fills the arrays in the constructor
|
41 |
|
|
"""
|
42 |
|
|
if sqlContent[i].find("create table") != 0 :
|
43 |
|
|
raise "Could not find table"
|
44 |
|
|
self.name = sqlContent[i].split()[2]
|
45 |
|
|
if self.name not in allSchemas :
|
46 |
|
|
allSchemas[self.name] = None
|
47 |
|
|
j=i+2
|
48 |
|
|
stSchema = 1
|
49 |
|
|
self.schema = []
|
50 |
|
|
while sqlContent[j][0:4] != " );" :
|
51 |
|
|
linecon = sqlContent[j].split()
|
52 |
|
|
if stSchema == 1 :
|
53 |
|
|
if len(linecon) > 0 :
|
54 |
|
|
if linecon[0].strip() == '' :
|
55 |
|
|
pass
|
56 |
|
|
elif linecon[0].strip() == ');' :
|
57 |
|
|
stSchema = 0
|
58 |
|
|
elif linecon[0][0:6] in ['check('] :
|
59 |
|
|
pass
|
60 |
|
|
elif sqlContent[j].strip()[0:9] in ['primary k', 'foreign k'] :
|
61 |
|
|
if linecon[0] == 'primary' :
|
62 |
|
|
self.pk = linecon[1].split('(')[1]
|
63 |
|
|
self.pk = self.pk.split(')')[0]
|
64 |
|
|
self.pk = self.pk.split(',')
|
65 |
|
|
elif linecon[0][0:7] == 'foreign' :
|
66 |
|
|
tmp = linecon[1]
|
67 |
|
|
tmp = tmp.split(')')[0]
|
68 |
|
|
tmp = tmp.split('(')[1]
|
69 |
|
|
tbln = linecon[3].split('(')[0]
|
70 |
|
|
attr = linecon[3].split('(')[1].split(')')[0]
|
71 |
|
|
self.foreignKeys[tmp] = tbln + '.' + attr
|
72 |
|
|
elif linecon[0][0:6] == 'unique' :
|
73 |
|
|
tmp = linecon[0][6:]
|
74 |
|
|
tmp = tmp.split(')')[0]
|
75 |
|
|
tmp = tmp.split('(')[1]
|
76 |
|
|
tmp = tmp.split(',')
|
77 |
|
|
self.uniqueKeys.append(tmp)
|
78 |
|
|
else :
|
79 |
|
|
name = linecon[0]
|
80 |
|
|
typee = linecon[1]
|
81 |
|
|
if typee[0:3] in ['var', 'dat', 'cha'] :
|
82 |
|
|
typee = 'string'
|
83 |
|
|
elif typee[0:3] in ['int', 'num'] :
|
84 |
|
|
typee = 'int'
|
85 |
|
|
self.schema.append(name)
|
86 |
|
|
self.types[name] = typee
|
87 |
|
|
if sqlContent[j].find("not null") >= 0 :
|
88 |
|
|
self.notNulls.append(name)
|
89 |
|
|
if sqlContent[j].find("unique") >= 0 :
|
90 |
|
|
self.uniqueKeys.append([name])
|
91 |
|
|
j = j + 1
|
92 |
|
|
|
93 |
|
|
def makeObject(self) :
|
94 |
|
|
"""
|
95 |
|
|
Using the filled arrays in the constructor, this calls
|
96 |
|
|
SingleSchema from the ViewObjectLayer and returns the result.
|
97 |
|
|
It also looks for attributes named "tableid" in tables named
|
98 |
|
|
"table" and if it finds one, makes it a sequencer.
|
99 |
|
|
"""
|
100 |
|
|
autoSeqID = self.name + 'id'
|
101 |
|
|
if autoSeqID in self.pk :
|
102 |
|
|
seq = {autoSeqID : 0}
|
103 |
|
|
else :
|
104 |
|
|
seq = {}
|
105 |
|
|
a = SingleSchema(self.name, self.schema, self.types, \
|
106 |
|
|
self.pk, self.uniqueKeys, self.foreignKeys, self.notNulls, {}, seq)
|
107 |
|
|
return a
|
108 |
|
|
|
109 |
|
|
# For now read in the schema in lowercase
|
110 |
|
|
sqlContent = map(lambda x : x.lower(), \
|
111 |
|
|
open("../../dbs-schema/sql/DBS-DB.sql",'r').readlines())
|
112 |
|
|
# This is the module level array holding all individual schemas
|
113 |
|
|
allSchemas = {}
|
114 |
|
|
# Fill the schemas array.
|
115 |
|
|
for i in range(len(sqlContent)) :
|
116 |
|
|
line = sqlContent[i]
|
117 |
|
|
if line[0:5] == 'creat' :
|
118 |
|
|
a = ReadTable()
|
119 |
|
|
a.getTable(i)
|
120 |
|
|
allSchemas[a.name] = a.makeObject()
|
121 |
|
|
|
122 |
|
|
class DBSpublisher :
|
123 |
|
|
"""
|
124 |
|
|
This class contains the entire DBS schema in
|
125 |
|
|
terms of approporiate ViewObjects.
|
126 |
|
|
"""
|
127 |
|
|
def __init__(self) :
|
128 |
|
|
"""
|
129 |
|
|
Constructor
|
130 |
|
|
This builds all of the required ViewObjects.
|
131 |
|
|
"""
|
132 |
|
|
|
133 |
|
|
# We will ignore createdby and lastmodifiedby attributes that will
|
134 |
|
|
# be filled in automatically in the future by some other mechanism
|
135 |
|
|
self.fkExclusionAttributes = ['lastmodifiedby', 'createdby']
|
136 |
|
|
self.cParms = {}
|
137 |
|
|
|
138 |
|
|
# View that descibes Application Configurations
|
139 |
ggraham |
1.2 |
self.Applications = MultiSchema(fkExcl = self.fkExclusionAttributes)
|
140 |
ggraham |
1.1 |
self.Applications.addSchema(allSchemas['application'])
|
141 |
|
|
self.Applications.addSchema(allSchemas['applicationfamily'])
|
142 |
|
|
self.Applications.addSchema(allSchemas['collectiontype'])
|
143 |
|
|
self.Applications.addSchema(allSchemas['parameterset'])
|
144 |
|
|
self.Applications.addSchema(allSchemas['parameterbinding'], 1)
|
145 |
|
|
self.Applications.addSchema(allSchemas['applicationconfiguration'])
|
146 |
|
|
self.Applications.addCondition("application.applicationfamily = " + \
|
147 |
|
|
"applicationfamily.applicationfamilyid")
|
148 |
|
|
self.Applications.addCondition("applicationconfiguration.applicationid = " + \
|
149 |
|
|
"application.applicationid")
|
150 |
|
|
self.Applications.addCondition("applicationconfiguration.parametersetid = " + \
|
151 |
|
|
"parameterset.parametersetid")
|
152 |
|
|
self.Applications.addCondition("parameterbinding.parametersetid = " + \
|
153 |
|
|
"parameterset.parametersetid")
|
154 |
|
|
self.ApplicationsTable = Table(self.Applications, **self.cParms)
|
155 |
|
|
self.ApplicationsTable.initializeSequencers()
|
156 |
|
|
|
157 |
|
|
# View that describes Administrative roles
|
158 |
ggraham |
1.2 |
self.Administrative = MultiSchema(fkExcl = self.fkExclusionAttributes)
|
159 |
ggraham |
1.1 |
self.Administrative.addSchema(allSchemas['person'])
|
160 |
|
|
self.Administrative.addSchema(allSchemas['role'])
|
161 |
|
|
self.Administrative.addSchema(allSchemas['assignedrole'])
|
162 |
|
|
self.Administrative.addCondition('assignedrole.roleid = role.roleid')
|
163 |
|
|
self.Administrative.addCondition('assignedrole.personid = person.personid')
|
164 |
|
|
self.AdministrativeTable = Table(self.Administrative, **self.cParms)
|
165 |
|
|
self.AdministrativeTable.initializeSequencers()
|
166 |
|
|
|
167 |
|
|
# View that describes Person
|
168 |
ggraham |
1.2 |
self.Person = MultiSchema(fkExcl = self.fkExclusionAttributes)
|
169 |
ggraham |
1.1 |
self.Person.addSchema(allSchemas['person'])
|
170 |
|
|
self.PersonTable = Table(self.Person, **self.cParms)
|
171 |
|
|
self.PersonTable.initializeSequencers()
|
172 |
|
|
|
173 |
|
|
# View that describes Role
|
174 |
ggraham |
1.2 |
self.Role = MultiSchema(fkExcl = self.fkExclusionAttributes)
|
175 |
ggraham |
1.1 |
self.Role.addSchema(allSchemas['role'])
|
176 |
|
|
self.RoleTable = Table(self.Role, **self.cParms)
|
177 |
|
|
self.RoleTable.initializeSequencers()
|
178 |
|
|
|
179 |
|
|
# View that describes PhysicsGroup
|
180 |
ggraham |
1.2 |
self.PhysicsGroup = MultiSchema(fkExcl = self.fkExclusionAttributes)
|
181 |
ggraham |
1.1 |
self.PhysicsGroup.addSchema(allSchemas['physicsgroup'])
|
182 |
|
|
self.PhysicsGroup.addSchema(allSchemas['person'])
|
183 |
|
|
self.PhysicsGroup.addCondition('physicsgroup.physicsgroupconvener = person.personid')
|
184 |
|
|
self.PhysicsGroupTable = Table(self.PhysicsGroup, **self.cParms)
|
185 |
|
|
self.PhysicsGroupTable.initializeSequencers()
|
186 |
|
|
|
187 |
ggraham |
1.2 |
# View for generic parameter sets
|
188 |
|
|
self.GenParameterSets = MultiSchema(fkExcl = self.fkExclusionAttributes)
|
189 |
|
|
self.GenParameterSets.addSchema(allSchemas['parameterset'])
|
190 |
|
|
self.GenParameterSets.addSchema(allSchemas['parameterbinding'])
|
191 |
|
|
self.GenParameterSets.addCondition('parameterset.parametersetid = ' + \
|
192 |
|
|
'parameterbinding.parametersetid')
|
193 |
|
|
self.GenParameterSetsTable = Table(self.GenParameterSets, **self.cParms)
|
194 |
|
|
self.GenParameterSetsTable.initializeSequencers()
|
195 |
|
|
|
196 |
ggraham |
1.1 |
# View that describes EventCollections
|
197 |
ggraham |
1.2 |
self.EventCollections = MultiSchema(fkExcl = self.fkExclusionAttributes)
|
198 |
ggraham |
1.1 |
self.EventCollections.addSchema(allSchemas['filetype'])
|
199 |
|
|
self.EventCollections.addSchema(allSchemas['filestatus'])
|
200 |
|
|
self.EventCollections.addSchema(allSchemas['file'])
|
201 |
|
|
self.EventCollections.addSchema(allSchemas['evcollfile'])
|
202 |
|
|
self.EventCollections.addSchema(allSchemas['eventcollection'])
|
203 |
|
|
self.EventCollections.addSchema(allSchemas['analysiscollectiondata'])
|
204 |
|
|
self.EventCollections.addSchema(allSchemas['validationstatus'])
|
205 |
|
|
self.EventCollections.addSchema(allSchemas['analysiscollectionstatus'])
|
206 |
ggraham |
1.2 |
# self.EventCollections.addSchema(allSchemas['parameterset'])
|
207 |
|
|
# self.EventCollections.addSchema(allSchemas['parameterbinding'],1)
|
208 |
ggraham |
1.1 |
self.EventCollections.addCondition('evcollfile.fileid = file.fileid')
|
209 |
ggraham |
1.2 |
# self.EventCollections.addCondition('parameterset.parametersetid = ' + \
|
210 |
|
|
# 'parameterbinding.parametersetid')
|
211 |
|
|
# self.EventCollections.addCondition('analysiscollectiondata.otherqueryablemetadata' + \
|
212 |
|
|
# ' = parameterset.parametersetid')
|
213 |
ggraham |
1.1 |
self.EventCollections.addCondition('file.filetype = filetype.filetypeid')
|
214 |
|
|
self.EventCollections.addCondition('file.filestatus = filestatus.filestatusid')
|
215 |
|
|
self.EventCollections.addCondition('evcollfile.evcollid = ' + \
|
216 |
|
|
'eventcollection.eventcollectionid')
|
217 |
|
|
self.EventCollections.addCondition('analysiscollectiondata.eventcollectionid = ' + \
|
218 |
|
|
'eventcollection.eventcollectionid')
|
219 |
|
|
self.EventCollections.addCondition('analysiscollectiondata.validationstatus = ' + \
|
220 |
|
|
'validationstatus.validationstatusid')
|
221 |
|
|
self.EventCollections.addCondition('analysiscollectiondata.analysiscollectionstatus' +\
|
222 |
|
|
' = analysiscollectionstatus.analysiscollectionstatusid')
|
223 |
|
|
self.EventCollectionsTable = Table(self.EventCollections, **self.cParms)
|
224 |
|
|
self.EventCollectionsTable.initializeSequencers()
|
225 |
|
|
|
226 |
|
|
# View that describes EventCollections with complex
|
227 |
|
|
# parentage
|
228 |
ggraham |
1.2 |
self.EventCollections2 = MultiSchema(fkExcl = self.fkExclusionAttributes)
|
229 |
ggraham |
1.1 |
self.EventCollections2.addSchema(allSchemas['filetype'])
|
230 |
|
|
self.EventCollections2.addSchema(allSchemas['filestatus'])
|
231 |
|
|
self.EventCollections2.addSchema(allSchemas['file'])
|
232 |
|
|
self.EventCollections2.addSchema(allSchemas['evcollfile'])
|
233 |
|
|
self.EventCollections2.addSchema(allSchemas['eventcollection'])
|
234 |
|
|
self.EventCollections2.addSchema(allSchemas['compositeeventcollection'],1)
|
235 |
|
|
self.EventCollections2.addSchema(allSchemas['analysiscollectiondata'])
|
236 |
|
|
self.EventCollections2.addSchema(allSchemas['validationstatus'])
|
237 |
|
|
self.EventCollections2.addSchema(allSchemas['analysiscollectionstatus'])
|
238 |
ggraham |
1.2 |
# self.EventCollections2.addSchema(allSchemas['parameterset'])
|
239 |
|
|
# self.EventCollections2.addSchema(allSchemas['parameterbinding'],1)
|
240 |
ggraham |
1.1 |
self.EventCollections2.addCondition('evcollfile.fileid = file.fileid')
|
241 |
ggraham |
1.2 |
# self.EventCollections2.addCondition('parameterset.parametersetid = ' + \
|
242 |
|
|
# 'parameterbinding.parametersetid')
|
243 |
|
|
# self.EventCollections2.addCondition('analysiscollectiondata.otherqueryablemetadata' + \
|
244 |
|
|
# ' = parameterset.parametersetid')
|
245 |
ggraham |
1.1 |
self.EventCollections2.addCondition('file.filetype = filetype.filetypeid')
|
246 |
|
|
self.EventCollections2.addCondition('file.filestatus = filestatus.filestatusid')
|
247 |
|
|
self.EventCollections2.addCondition('compositeeventcollection.childecid = ' + \
|
248 |
|
|
'eventcollection.eventcollectionid')
|
249 |
|
|
self.EventCollections2.addCondition('evcollfile.evcollid = ' + \
|
250 |
|
|
'eventcollection.eventcollectionid')
|
251 |
|
|
self.EventCollections2.addCondition('analysiscollectiondata.eventcollectionid = ' + \
|
252 |
|
|
'eventcollection.eventcollectionid')
|
253 |
|
|
self.EventCollections2.addCondition('analysiscollectiondata.validationstatus = ' + \
|
254 |
|
|
'validationstatus.validationstatusid')
|
255 |
|
|
self.EventCollections2.addCondition('analysiscollectiondata.analysiscollectionstatus' +\
|
256 |
|
|
' = analysiscollectionstatus.analysiscollectionstatusid')
|
257 |
|
|
self.EventCollectionsTable2 = Table(self.EventCollections2, **self.cParms)
|
258 |
|
|
self.EventCollectionsTable2.initializeSequencers()
|
259 |
|
|
|
260 |
|
|
# View that describes primary/processed dataset parameters
|
261 |
ggraham |
1.2 |
self.PrimaryDataset = MultiSchema(fkExcl = self.fkExclusionAttributes)
|
262 |
ggraham |
1.1 |
self.PrimaryDataset.addSchema(allSchemas['triggerpathdescription'])
|
263 |
|
|
self.PrimaryDataset.addSchema(allSchemas['mcdescription'])
|
264 |
|
|
self.PrimaryDataset.addSchema(allSchemas['primarydatasetdescription'])
|
265 |
|
|
self.PrimaryDataset.addSchema(allSchemas['primarydataset'])
|
266 |
|
|
self.PrimaryDataset.addSchema(allSchemas['stream'])
|
267 |
|
|
self.PrimaryDataset.addSchema(allSchemas['physicsgroup'])
|
268 |
|
|
self.PrimaryDataset.addCondition('mcdescription.mcdescriptionid = ' + \
|
269 |
|
|
'primarydatasetdescription.mcchanneldescriptionid')
|
270 |
|
|
self.PrimaryDataset.addCondition('triggerpathdescription.triggerpathdescriptionid' + \
|
271 |
|
|
' = primarydatasetdescription.triggerdescriptionid')
|
272 |
|
|
self.PrimaryDataset.addCondition('primarydataset.primarydatasetdescriptionid = ' + \
|
273 |
|
|
'primarydatasetdescription.primarydatasetdescriptionid')
|
274 |
|
|
self.PrimaryDataset.addCondition('primarydataset.streamid = stream.streamid')
|
275 |
|
|
self.PrimaryDataset.addCondition('primarydataset.physicsgroupid = ' +\
|
276 |
|
|
'physicsgroup.physicsgroupid')
|
277 |
|
|
self.PrimaryDatasetTable = Table(self.PrimaryDataset, **self.cParms)
|
278 |
|
|
self.PrimaryDatasetTable.initializeSequencers()
|
279 |
|
|
|
280 |
|
|
# View that describes processing path - self referencing
|
281 |
ggraham |
1.2 |
self.ProcessingPath = MultiSchema(fkExcl = self.fkExclusionAttributes)
|
282 |
ggraham |
1.1 |
self.ProcessingPath.addSchema(allSchemas['processingpath'])
|
283 |
|
|
self.ProcessingPath.addSchema(allSchemas['processeddataset'])
|
284 |
|
|
self.ProcessingPath.addSchema(allSchemas['primarydataset'])
|
285 |
|
|
self.ProcessingPath.addCondition('primarydataset.primarydatasetid = ' + \
|
286 |
|
|
'processeddataset.primarydatasetid')
|
287 |
|
|
self.ProcessingPath.addCondition('processeddataset.processingpathid = ' + \
|
288 |
|
|
'processingpath.processingpathid')
|
289 |
|
|
self.ProcessingPathTable = Table(self.ProcessingPath, **self.cParms)
|
290 |
|
|
self.ProcessingPathTable.initializeSequencers()
|
291 |
|
|
|
292 |
|
|
# View that describes AnalysisDatasets
|
293 |
ggraham |
1.2 |
self.AnalysisDataset = MultiSchema(fkExcl = self.fkExclusionAttributes)
|
294 |
ggraham |
1.1 |
self.AnalysisDataset.addSchema(allSchemas['analysisdataset'])
|
295 |
|
|
self.AnalysisDataset.addSchema(allSchemas['evcollandata'])
|
296 |
|
|
self.AnalysisDataset.addSchema(allSchemas['analysiscollectiondata'])
|
297 |
|
|
self.AnalysisDataset.addSchema(allSchemas['analysisdatasetsubtype'])
|
298 |
|
|
self.AnalysisDataset.addSchema(allSchemas['block'])
|
299 |
|
|
self.AnalysisDataset.addSchema(allSchemas['snapshot'])
|
300 |
|
|
self.AnalysisDataset.addSchema(allSchemas['productionassignment'])
|
301 |
|
|
self.AnalysisDataset.addSchema(allSchemas['usercollection'])
|
302 |
|
|
self.AnalysisDatasetTable = Table(self.AnalysisDataset, **self.cParms)
|
303 |
|
|
self.AnalysisDatasetTable.initializeSequencers()
|
304 |
|
|
self._Connection = self.AnalysisDatasetTable.getConnection()
|
305 |
|
|
|
306 |
|
|
def resetTransaction(self) :
|
307 |
|
|
"""
|
308 |
|
|
This clears the transaction with implicit rollback.
|
309 |
|
|
Implements TableInterface::resetTransaction
|
310 |
|
|
"""
|
311 |
|
|
self._Connection.rollback()
|
312 |
|
|
|
313 |
|
|
def saveTransaction(self) :
|
314 |
|
|
"""
|
315 |
|
|
This clears the transaction with implicit commit.
|
316 |
|
|
Implements TableInterface::saveTransaction
|
317 |
|
|
"""
|
318 |
|
|
self._Connection.commit()
|
319 |
|
|
|
320 |
|
|
def publishApplicationConfiguration(self, appFamName, exeName, appVersion, paramSetName, \
|
321 |
|
|
paramSetVersion, paramSetComments, parameterBindings, inputType = None, \
|
322 |
|
|
outputType = None) :
|
323 |
|
|
"""
|
324 |
|
|
API method for publishing an Application Configuration.
|
325 |
|
|
The arguments are in order:
|
326 |
|
|
|
327 |
|
|
appFamName
|
328 |
|
|
Application Family Name (eg- CMKIN, OSCAR, ORCA)
|
329 |
|
|
|
330 |
|
|
exeName
|
331 |
|
|
Executable Name
|
332 |
|
|
|
333 |
|
|
appVersion
|
334 |
|
|
Application Version
|
335 |
|
|
|
336 |
|
|
paramSetName
|
337 |
|
|
Parameter Set Name (A unique name to be given by
|
338 |
|
|
client for future reference. This will be
|
339 |
|
|
automatically generated in teh future.)
|
340 |
|
|
|
341 |
|
|
paramSetVersion
|
342 |
|
|
Parameter Set Version (To be given by client for
|
343 |
|
|
future reference - this will be automatically
|
344 |
|
|
generated in the future..)
|
345 |
|
|
|
346 |
|
|
paramSetVersion
|
347 |
|
|
Parameter Set Comments
|
348 |
|
|
|
349 |
|
|
parameterBindings
|
350 |
|
|
Parameter Bindings (dictionary of key/value pairs)
|
351 |
|
|
|
352 |
|
|
inputType (optional)
|
353 |
|
|
Input Type (eg- NoInput, CMKIN, OSCAR, etc.)
|
354 |
|
|
|
355 |
|
|
outputType (optional)
|
356 |
|
|
OutputType (eg- CMKIN, OSCAR, ORCA, NoOutput, etc.)
|
357 |
|
|
"""
|
358 |
|
|
rowData = Row(self.Applications)
|
359 |
|
|
if inputType != None :
|
360 |
|
|
rowData['collectiontype.collectiontype(application.inputcollectiontype)'] = inputType
|
361 |
|
|
if outputType != None :
|
362 |
|
|
rowData['collectiontype.collectiontype(application.outputcollectiontype)'] = outputType
|
363 |
|
|
rowData['applicationfamily.applicationfamilyname'] = appFamName
|
364 |
|
|
rowData['application.executablename'] = exeName
|
365 |
|
|
rowData['application.applicationversion'] = appVersion
|
366 |
|
|
rowData['parameterset.parametersetname'] = paramSetName
|
367 |
|
|
rowData['parameterset.parametersetversion'] = paramSetVersion
|
368 |
|
|
rowData['parameterset.parametersetannotation'] = paramSetComments
|
369 |
|
|
rowData['parameterset.composite'] = 'n'
|
370 |
|
|
firstInsert = 1
|
371 |
|
|
for key, val in parameterBindings.items() :
|
372 |
|
|
if firstInsert == 0 :
|
373 |
|
|
rowData.newData('parameterbinding')
|
374 |
|
|
firstInsert = 0
|
375 |
|
|
rowData['parameterbinding.parametername'] = key
|
376 |
|
|
rowData['parameterbinding.parametervalue'] = val
|
377 |
|
|
self.ApplicationsTable.smartInsert(rowData, rowData.keys())
|
378 |
|
|
self.saveTransaction()
|
379 |
|
|
return rowData['applicationconfiguration.applicationconfigurationid']
|
380 |
|
|
|
381 |
|
|
def adminAssignRole(self, name, role) :
|
382 |
|
|
"""
|
383 |
|
|
API Method to assign a role.
|
384 |
|
|
The arguments are in order :
|
385 |
|
|
|
386 |
|
|
name
|
387 |
|
|
UNIX style name of the person
|
388 |
|
|
|
389 |
|
|
role
|
390 |
|
|
name of the role
|
391 |
|
|
"""
|
392 |
|
|
rowData = Row(self.Administrative)
|
393 |
|
|
rowData['person.name'] = name
|
394 |
|
|
rowData['role.rolename'] = role
|
395 |
|
|
self.AdministrativeTable.smartInsert(rowData, rowData.keys())
|
396 |
|
|
self.saveTransaction()
|
397 |
|
|
return rowData['assignedrole.assignedroleid']
|
398 |
|
|
|
399 |
|
|
def adminPhysicsGroup(self, groupName, personName) :
|
400 |
|
|
"""
|
401 |
|
|
API method to publish a physics group
|
402 |
|
|
The arguments are in order:
|
403 |
|
|
|
404 |
|
|
groupName
|
405 |
|
|
the physics group name
|
406 |
|
|
|
407 |
|
|
personName
|
408 |
|
|
the name of the convener
|
409 |
|
|
"""
|
410 |
|
|
rowData = Row(self.PhysicsGroup)
|
411 |
|
|
rowData['physicsgroup.physicsgroupname'] = groupName
|
412 |
|
|
rowData['person.name'] = personName
|
413 |
|
|
self.PhysicsGroupTable.smartInsert(rowData, rowData.keys())
|
414 |
|
|
self.saveTransaction()
|
415 |
|
|
return rowData['physicsgroup.physicsgroupid']
|
416 |
|
|
|
417 |
|
|
def adminPerson(self, name, distinguishedname, contactinfo) :
|
418 |
|
|
"""
|
419 |
|
|
API method to publish a person
|
420 |
|
|
The arguments are in order:
|
421 |
|
|
|
422 |
|
|
name
|
423 |
|
|
the UNIX style name of the person
|
424 |
|
|
|
425 |
|
|
distinguishedname
|
426 |
|
|
the grid credential distinguished name of the person
|
427 |
|
|
|
428 |
|
|
contactinfo
|
429 |
|
|
contact information
|
430 |
|
|
"""
|
431 |
|
|
rowData = Row(self.Person)
|
432 |
|
|
rowData['person.name'] = name
|
433 |
|
|
rowData['person.distinguishedname'] = distinguishedname
|
434 |
|
|
rowData['person.contactinfo'] = contactinfo
|
435 |
|
|
self.PersonTable.smartInsert(rowData, rowData.keys())
|
436 |
|
|
return rowData['person.personid']
|
437 |
|
|
|
438 |
|
|
def adminRole(self, rolename, description) :
|
439 |
|
|
"""
|
440 |
|
|
API method to publish a role
|
441 |
|
|
The arguments are in order:
|
442 |
|
|
|
443 |
|
|
rolename
|
444 |
|
|
the name of the role
|
445 |
|
|
|
446 |
|
|
roledescription
|
447 |
|
|
description of the role
|
448 |
|
|
"""
|
449 |
|
|
rowData = Row(self.Role)
|
450 |
|
|
rowData['role.rolename'] = rolename
|
451 |
|
|
rowData['role.roledescription'] = description
|
452 |
|
|
self.RoleTable.smartInsert(rowData, rowData.keys())
|
453 |
|
|
return rowData['role.roleid']
|
454 |
|
|
|
455 |
|
|
def publishPrimaryDataset(self, description, datasetName, \
|
456 |
|
|
datasetComments, streamName, physicsGroupName, streamComments = None) :
|
457 |
|
|
"""
|
458 |
|
|
API method to publish a primary dataset
|
459 |
|
|
The arguments are in order:
|
460 |
|
|
|
461 |
|
|
description
|
462 |
|
|
A dictionary description of the dataset
|
463 |
|
|
The dictionary can have MCChannel, MCProduction, and MCDecayChain
|
464 |
|
|
OR
|
465 |
|
|
The dictionary can have TriggerPath
|
466 |
|
|
|
467 |
|
|
datasetName
|
468 |
|
|
A dataset name (eg- the COBRA dataset name)
|
469 |
|
|
|
470 |
|
|
datasetComments
|
471 |
|
|
Comments
|
472 |
|
|
|
473 |
|
|
streamName
|
474 |
|
|
A Stream name
|
475 |
|
|
|
476 |
|
|
physicsGroupName
|
477 |
|
|
A physics group name
|
478 |
|
|
"""
|
479 |
|
|
rowData = Row(self.PrimaryDataset)
|
480 |
|
|
rowData.setSkipOnEmpty('mcdescription')
|
481 |
|
|
rowData.setSkipOnEmpty('triggerpathdescription')
|
482 |
|
|
if description.has_key('MCChannel') :
|
483 |
|
|
rowData['mcdescription.mcchanneldescription'] = description['MCChannel']
|
484 |
|
|
rowData['mcdescription.mcproduction'] = description['MCProduction']
|
485 |
|
|
rowData['mcdescription.mcdecaychain'] = description['MCDecayChain']
|
486 |
|
|
rowData['primarydatasetdescription.mcdataset'] = 'y'
|
487 |
|
|
rowData['primarydatasetdescription.triggerdescriptionid'] = None
|
488 |
|
|
elif description.has_key('TriggerPath') :
|
489 |
|
|
rowData['triggerpathdescription.triggerpathdescription'] = description['TriggerPath']
|
490 |
|
|
rowData['primarydatasetdescription.mcdataset'] = 'n'
|
491 |
|
|
rowData['primarydatasetdescription.mcchanneldescriptionid'] = None
|
492 |
|
|
rowData['stream.streamname'] = streamName
|
493 |
|
|
if streamComments != None :
|
494 |
|
|
rowData['stream.streamannotation'] = streamComments
|
495 |
|
|
rowData['primarydataset.primarydatasetname'] = datasetName
|
496 |
|
|
rowData['primarydataset.cobradatasetname'] = datasetName
|
497 |
|
|
rowData['primarydataset.primarydatasetannotation'] = datasetComments
|
498 |
|
|
rowData['primarydataset.openforwriting'] = 'y'
|
499 |
|
|
rowData['physicsgroup.physicsgroupname'] = physicsGroupName
|
500 |
|
|
self.PrimaryDatasetTable.smartInsert(rowData, rowData.keys())
|
501 |
|
|
self.saveTransaction()
|
502 |
|
|
return rowData['primarydataset.primarydatasetid']
|
503 |
|
|
|
504 |
|
|
def publishProcessedDataset(self, datasetName, ownerName, parentProcPathID, applicationConfigID, agPath = None) :
|
505 |
|
|
"""
|
506 |
|
|
API method to publish a processed dataset
|
507 |
|
|
The arguments are in order
|
508 |
|
|
|
509 |
|
|
datasetName
|
510 |
|
|
dataset name of the primary dataset
|
511 |
|
|
|
512 |
|
|
ownerName
|
513 |
|
|
owner name (eg- the COBRA owner name)
|
514 |
|
|
|
515 |
|
|
parantProcPathID
|
516 |
|
|
parent processing path id (can be null)
|
517 |
|
|
|
518 |
|
|
applicationConfiguration
|
519 |
|
|
application configuration id (returned by publishApplicationConfiguration)
|
520 |
|
|
"""
|
521 |
|
|
rowData = Row(self.ProcessingPath)
|
522 |
|
|
rowData['primarydataset.primarydatasetname'] = datasetName
|
523 |
|
|
rowData['processingpath.parentprocessingpathid'] = parentProcPathID
|
524 |
|
|
rowData['processingpath.processingrecordid'] = applicationConfigID
|
525 |
|
|
if agPath != None :
|
526 |
|
|
rowData['processingpath.aggregatedpath'] = agPath
|
527 |
|
|
rowData['processeddataset.cobraownername'] = ownerName
|
528 |
|
|
rowData['processeddataset.openforwriting'] = 'y'
|
529 |
|
|
self.ProcessingPathTable.smartInsert(rowData, rowData.keys())
|
530 |
|
|
self.saveTransaction()
|
531 |
|
|
return rowData['processeddataset.processeddatasetid']
|
532 |
|
|
|
533 |
|
|
def publishEventCollection(self, evcollStatus, validationStatus, nEvents, \
|
534 |
|
|
luminosity, collectionName, procDatasetID, evCollIndex, primaryEC, \
|
535 |
ggraham |
1.2 |
paramSetName, parameterBindings, filelist, oqm = None) :
|
536 |
ggraham |
1.1 |
"""
|
537 |
|
|
API to publish an EventCollection
|
538 |
|
|
The arguments are in order:
|
539 |
|
|
|
540 |
|
|
evcollStatus
|
541 |
|
|
event collection status (eg- "OK")
|
542 |
|
|
|
543 |
|
|
validationStatus
|
544 |
|
|
validation status (eg- "OK")
|
545 |
|
|
|
546 |
|
|
nEvents
|
547 |
|
|
number of events
|
548 |
|
|
|
549 |
|
|
luminosity
|
550 |
|
|
estimated luminosity
|
551 |
|
|
|
552 |
|
|
collectionName
|
553 |
|
|
collection name (eg- COBRA collection nam, can be None)
|
554 |
|
|
|
555 |
|
|
procDatasetID
|
556 |
|
|
processed dataset ID (returned by publishProcessedDataset)
|
557 |
|
|
|
558 |
|
|
evCollIndex
|
559 |
|
|
event collection index (eg- Run number)
|
560 |
|
|
|
561 |
|
|
primaryEC
|
562 |
|
|
boolean: is this a "primary" evCollection? 'y' for CMKIN, 'n'
|
563 |
|
|
otherwise.
|
564 |
|
|
|
565 |
ggraham |
1.2 |
paramSetName
|
566 |
ggraham |
1.1 |
parameter set name : this is a unique name for the parameter set
|
567 |
|
|
associated with the Event Collection. (This will be
|
568 |
|
|
automatically generated in the future.)
|
569 |
|
|
|
570 |
ggraham |
1.2 |
parameterBindings
|
571 |
ggraham |
1.1 |
parameters : this is a dictionary containing the parameters
|
572 |
|
|
describing this Event Collection.
|
573 |
|
|
|
574 |
|
|
filelist
|
575 |
|
|
file list: list of tuples of
|
576 |
|
|
(logical file name, checksum, size, status, type)
|
577 |
ggraham |
1.2 |
|
578 |
ggraham |
1.1 |
NOTES:
|
579 |
|
|
This API is useful when event collections can be lined up
|
580 |
|
|
on the event collection index exactly across different
|
581 |
|
|
processing levels. Parentage information is assumed to
|
582 |
|
|
be "collection 123 @ processing X" is a parent of
|
583 |
|
|
"collection 123 @ processing Y" if Y follows X in
|
584 |
|
|
ProcessingPath.
|
585 |
|
|
"""
|
586 |
|
|
rowData = Row(self.EventCollections)
|
587 |
|
|
rowData['analysiscollectionstatus.analysiscollectionstatus'] = evcollStatus
|
588 |
|
|
rowData['validationstatus.validationstatus'] = validationStatus
|
589 |
|
|
rowData['analysiscollectiondata.numberofevents'] = nEvents
|
590 |
|
|
rowData['analysiscollectiondata.estimatedluminosity'] = luminosity
|
591 |
|
|
rowData['analysiscollectiondata.cobracollectionname'] = collectionName
|
592 |
|
|
rowData['eventcollection.processeddatasetid'] = procDatasetID
|
593 |
|
|
rowData['eventcollection.primaryeventcollection'] = primaryEC
|
594 |
|
|
rowData['eventcollection.compositeeventcollection'] = 'n'
|
595 |
|
|
rowData['eventcollection.eventcollectionindex'] = evCollIndex
|
596 |
ggraham |
1.2 |
|
597 |
|
|
bowData = Row(self.GenParameterSets)
|
598 |
|
|
bowData['parameterset.parametersetname'] = paramSetName
|
599 |
|
|
bowData['parameterset.parametersetversion'] = '1.0'
|
600 |
|
|
bowData['parameterset.parametersetannotation'] = \
|
601 |
|
|
'Parameter set describing event collection ' + paramSetName
|
602 |
|
|
bowData['parameterset.composite'] = 'n'
|
603 |
ggraham |
1.1 |
firstInsert = 1
|
604 |
|
|
for key, val in parameterBindings.items() :
|
605 |
|
|
if firstInsert == 0 :
|
606 |
ggraham |
1.2 |
bowData.newData('parameterbinding')
|
607 |
ggraham |
1.1 |
firstInsert = 0
|
608 |
ggraham |
1.2 |
bowData['parameterbinding.parametername'] = key
|
609 |
|
|
bowData['parameterbinding.parametervalue'] = val
|
610 |
|
|
self.GenParameterSetsTable.smartInsert(bowData, bowData.keys())
|
611 |
|
|
self.saveTransaction()
|
612 |
|
|
rowData['analysiscollectiondata.otherqueryablemetadata'] = bowData['parameterset.parametersetid']
|
613 |
|
|
|
614 |
ggraham |
1.1 |
for i in range(len(filelist)) :
|
615 |
|
|
rowData['file.logicalfilename'] = filelist[i][0]
|
616 |
|
|
rowData['file.checksum'] = filelist[i][1]
|
617 |
|
|
rowData['file.size'] = filelist[i][2]
|
618 |
|
|
rowData['filestatus.filestatus'] = filelist[i][3]
|
619 |
|
|
rowData['filetype.filetype'] = filelist[i][4]
|
620 |
|
|
# Bug to be fixed later. The files inserts should be batched. 16-May-2005!!!
|
621 |
|
|
if 'file.fileid' in rowData.keys() :
|
622 |
|
|
del rowData['file.fileid']
|
623 |
|
|
del rowData['evcollfile.evcollfileid']
|
624 |
|
|
del rowData['evcollfile.fileid']
|
625 |
|
|
self.EventCollectionsTable.smartInsert(rowData, rowData.keys())
|
626 |
|
|
self.saveTransaction()
|
627 |
|
|
return rowData['eventcollection.eventcollectionid']
|
628 |
|
|
|
629 |
|
|
def publishEventCollection2(self, evcollStatus, validationStatus, nEvents, \
|
630 |
|
|
luminosity, collectionName, procDatasetID, evCollIndex, parentECList, \
|
631 |
|
|
paramSetName, parameterBindings, filelist) :
|
632 |
|
|
"""
|
633 |
|
|
Another API to publish an EventCollection
|
634 |
|
|
The arguments are in order:
|
635 |
|
|
|
636 |
|
|
evcollStatus
|
637 |
|
|
event collection status (eg- "OK")
|
638 |
|
|
|
639 |
|
|
validationStatus
|
640 |
|
|
validation status (eg- "OK")
|
641 |
|
|
|
642 |
|
|
nEvents
|
643 |
|
|
number of events
|
644 |
|
|
|
645 |
|
|
luminosity
|
646 |
|
|
estimated luminosity
|
647 |
|
|
|
648 |
|
|
collectionName
|
649 |
|
|
collection name (eg- COBRA collection nam, can be None)
|
650 |
|
|
|
651 |
|
|
procDatasetID
|
652 |
|
|
processed dataset ID (returned by publishProcessedDataset)
|
653 |
|
|
|
654 |
|
|
evCollIndex
|
655 |
|
|
event collection index (eg- Run number)
|
656 |
|
|
|
657 |
|
|
parentECList
|
658 |
|
|
list of event collection IDs of the parents of this event collection.
|
659 |
|
|
|
660 |
|
|
paramSetName
|
661 |
|
|
parameter set name : this is a unique name for the parameter set
|
662 |
|
|
associated with the Event Collection. (This will be
|
663 |
|
|
automatically generated in the future.)
|
664 |
|
|
|
665 |
|
|
parameterBindings
|
666 |
|
|
parameters : this is a dictionary containing the parameters
|
667 |
|
|
describing this Event Collection.
|
668 |
|
|
|
669 |
|
|
filelist
|
670 |
|
|
file list: list of tuples of
|
671 |
|
|
(logical file name, checksum, size, status, type)
|
672 |
|
|
|
673 |
|
|
NOTES:
|
674 |
|
|
This API is useful when event collections must be loaded
|
675 |
|
|
with multiple parents. The "CompositeEventCollection"
|
676 |
|
|
table is used to track complex parentage relationships
|
677 |
|
|
directly. This API method is not yet tested.
|
678 |
|
|
"""
|
679 |
|
|
rowData = Row(self.EventCollections2)
|
680 |
|
|
rowData['analysiscollectionstatus.analysiscollectionstatus'] = evcollStatus
|
681 |
|
|
rowData['validationstatus.validationstatus'] = validationStatus
|
682 |
|
|
rowData['analysiscollectiondata.numberofevents'] = nEvents
|
683 |
|
|
rowData['analysiscollectiondata.estimatedluminosity'] = luminosity
|
684 |
|
|
rowData['analysiscollectiondata.cobracollectionname'] = collectionName
|
685 |
|
|
rowData['eventcollection.processeddatasetid'] = procDatasetID
|
686 |
|
|
if len(parentECList) == 0 :
|
687 |
|
|
rowData['eventcollection.primaryeventcollection'] = 'y'
|
688 |
|
|
else :
|
689 |
|
|
rowData['eventcollection.primaryeventcollection'] = 'n'
|
690 |
|
|
rowData['eventcollection.compositeeventcollection'] = 'y'
|
691 |
|
|
rowData['eventcollection.eventcollectionindex'] = evCollIndex
|
692 |
|
|
|
693 |
|
|
firstInsert = 1
|
694 |
|
|
for elem in parentECList :
|
695 |
|
|
if firstInsert == 0 :
|
696 |
|
|
rowData.newData('compositeeventcollection')
|
697 |
|
|
firstInsert = 0
|
698 |
|
|
rowData['compositeeventcollection.compositeecid'] = elem
|
699 |
|
|
|
700 |
|
|
rowData['parameterset.parametersetname'] = paramSetName
|
701 |
|
|
rowData['parameterset.parametersetversion'] = '1.0'
|
702 |
|
|
rowData['parameterset.parametersetannotation'] = \
|
703 |
|
|
'Parameter set describing event collection ' + paramSetName
|
704 |
|
|
rowData['parameterset.composite'] = 'n'
|
705 |
|
|
firstInsert = 1
|
706 |
|
|
for key, val in parameterBindings.items() :
|
707 |
|
|
if firstInsert == 0 :
|
708 |
|
|
rowData.newData('parameterbinding')
|
709 |
|
|
firstInsert = 0
|
710 |
|
|
rowData['parameterbinding.parametername'] = key
|
711 |
|
|
rowData['parameterbinding.parametervalue'] = val
|
712 |
|
|
for i in range(len(filelist)) :
|
713 |
|
|
rowData['file.logicalfilename'] = filelist[i][0]
|
714 |
|
|
rowData['file.checksum'] = filelist[i][1]
|
715 |
|
|
rowData['file.size'] = filelist[i][2]
|
716 |
|
|
rowData['filestatus.filestatus'] = filelist[i][3]
|
717 |
|
|
rowData['filetype.filetype'] = filelist[i][4]
|
718 |
|
|
# Bug to be fixed later. The files inserts should be batched. 16-May-2005!!!
|
719 |
|
|
if 'file.fileid' in rowData.keys() :
|
720 |
|
|
del rowData['file.fileid']
|
721 |
|
|
del rowData['evcollfile.evcollfileid']
|
722 |
|
|
del rowData['evcollfile.fileid']
|
723 |
|
|
self.EventCollectionsTable2.smartInsert(rowData, rowData.keys())
|
724 |
|
|
self.saveTransaction()
|
725 |
|
|
return rowData['eventcollection.eventcollectionid']
|
726 |
|
|
|
727 |
|
|
if __name__ == "__main__" :
|
728 |
|
|
|
729 |
|
|
a = DBSpublisher()
|
730 |
|
|
b = a.publishApplicationConfiguration('OSCAR', 'oscar_new.exe', '3_6-2', 'test44', \
|
731 |
|
|
'v1', 'test parameters', {'a':'1', 'b':'2', 'c':'3', 'd':'4'}, 'CMKIN', 'OSCAR')
|
732 |
|
|
print b
|
733 |
|
|
c = a.adminPerson('ggraham', 'Gregory Graham 123456', '1055 W. Addison')
|
734 |
|
|
print c
|
735 |
|
|
d = a.adminRole('admin','administrative')
|
736 |
|
|
print d
|
737 |
ggraham |
1.2 |
d = a.adminRole('hokey','administrative')
|
738 |
|
|
print d
|
739 |
|
|
d = a.adminRole('pokey','administrative')
|
740 |
|
|
print d
|
741 |
|
|
d = a.adminRole('okey','administrative')
|
742 |
|
|
print d
|
743 |
|
|
d = a.adminRole('dokey','administrative')
|
744 |
|
|
print d
|
745 |
|
|
d = a.adminRole('smokey','administrative')
|
746 |
|
|
print d
|
747 |
ggraham |
1.1 |
e = a.adminAssignRole('ggraham','admin')
|
748 |
|
|
print e
|
749 |
ggraham |
1.2 |
e = a.adminAssignRole('ggraham','okey')
|
750 |
|
|
print e
|
751 |
|
|
e = a.adminAssignRole('ggraham','dokey')
|
752 |
|
|
print e
|
753 |
|
|
e = a.adminAssignRole('ggraham','pokey')
|
754 |
|
|
print e
|
755 |
|
|
e = a.adminAssignRole('ggraham','smokey')
|
756 |
|
|
print e
|
757 |
|
|
e = a.adminAssignRole('ggraham','hokey')
|
758 |
|
|
print e
|
759 |
ggraham |
1.1 |
f = a.adminPhysicsGroup('egammaGroup', 'ggraham')
|
760 |
|
|
print f
|
761 |
|
|
g = a.publishPrimaryDataset({'MCChannel':'blahblah', 'MCProduction':'blahblah', \
|
762 |
|
|
'MCDecayChain':'blahblah'}, 'testDataset3', 'A test dataset', \
|
763 |
|
|
'testStream', 'egammaGroup', 'This is a test stream.')
|
764 |
|
|
print g
|
765 |
|
|
h = a.publishProcessedDataset('testDataset3', 'testOwner3', None, b, '/')
|
766 |
|
|
print h
|
767 |
|
|
i = a.publishEventCollection('OK', 'Valid', 100, '10e-15', 'testCollection3', h, 1, 'y', \
|
768 |
|
|
'testDataset3-testOwner3-testCollection3', {'ranseed':'123456', 'runnum':'1'}, \
|
769 |
|
|
[('file3', '364239', '125251', 'OK', 'Data'), \
|
770 |
|
|
('file4', '01273421', '120970112', 'OK', 'Data')] )
|
771 |
|
|
print i
|
772 |
|
|
|
773 |
ggraham |
1.2 |
|
774 |
|
|
print a.Administrative.attributes()
|
775 |
|
|
j = a.AdministrativeTable.smartSelect(["name = 'ggraham'"])
|
776 |
|
|
print map(lambda x : x._SubRows['role'], j) |