ViewVC Help
View File | Revision Log | Show Annotations | Root Listing
root/cvsroot/COMP/DBS/Import/PublishAgent/publishAgent.py
Revision: 1.1
Committed: Fri May 20 04:59:55 2005 UTC (19 years, 11 months ago) by ggraham
Content type: text/x-python
Branch: MAIN
Log Message:
Import of DBS Prototype 0 publishAgent

File Contents

# User Rev Content
1 ggraham 1.1 """
2     publishAgent.py
3    
4     This file contains APIP methods that use the ViewObjectLayer to
5     populate the DBS. A utility class to read and parse SQL is included.
6     Module level representations of the table schemas are constructed and
7     stored at module level when the module is imported, but this should
8     probably go into a class at some point. A DBSpublisher class is
9     included that contains views constructed from the individual schemas.
10     Example inserts are at the end of the file in the __main__ section.
11    
12     Gregory Edwin Graham, 16-May-2005
13     """
14     __version__ = "$Id: publishAgent.py,v 1.4 2005/05/20 04:47:54 ggraham Exp $"
15     __revision__ = "$Revision: 1.4 $"
16    
17     import os, sys, time, pdb
18     from ViewObjectLayer import *
19    
20     class ReadTable:
21     """
22     This is a crap SQL parser to be replaced by Anzar and Vijay
23     """
24     def __init__(self) :
25     """
26     Constructor
27     This should fill the arrays below with the correct values
28     """
29     self.schema = [] # list of strings
30     self.types = {} # list of 'int' or 'string', could be better
31     self.pk = [] # list of strings
32     self.name = None # string table name
33     self.uniqueKeys = [] # list of lists of strings
34     self.foreignKeys = {} # dictionary of here.attribute references there.attribute
35     self.notNulls = [] # list of strings
36    
37     def getTable(self, i) :
38     """
39     This function reads a druid generated table definition
40     and fills the arrays in the constructor
41     """
42     if sqlContent[i].find("create table") != 0 :
43     raise "Could not find table"
44     self.name = sqlContent[i].split()[2]
45     if self.name not in allSchemas :
46     allSchemas[self.name] = None
47     j=i+2
48     stSchema = 1
49     self.schema = []
50     while sqlContent[j][0:4] != " );" :
51     linecon = sqlContent[j].split()
52     if stSchema == 1 :
53     if len(linecon) > 0 :
54     if linecon[0].strip() == '' :
55     pass
56     elif linecon[0].strip() == ');' :
57     stSchema = 0
58     elif linecon[0][0:6] in ['check('] :
59     pass
60     elif sqlContent[j].strip()[0:9] in ['primary k', 'foreign k'] :
61     if linecon[0] == 'primary' :
62     self.pk = linecon[1].split('(')[1]
63     self.pk = self.pk.split(')')[0]
64     self.pk = self.pk.split(',')
65     elif linecon[0][0:7] == 'foreign' :
66     tmp = linecon[1]
67     tmp = tmp.split(')')[0]
68     tmp = tmp.split('(')[1]
69     tbln = linecon[3].split('(')[0]
70     attr = linecon[3].split('(')[1].split(')')[0]
71     self.foreignKeys[tmp] = tbln + '.' + attr
72     elif linecon[0][0:6] == 'unique' :
73     tmp = linecon[0][6:]
74     tmp = tmp.split(')')[0]
75     tmp = tmp.split('(')[1]
76     tmp = tmp.split(',')
77     self.uniqueKeys.append(tmp)
78     else :
79     name = linecon[0]
80     typee = linecon[1]
81     if typee[0:3] in ['var', 'dat', 'cha'] :
82     typee = 'string'
83     elif typee[0:3] in ['int', 'num'] :
84     typee = 'int'
85     self.schema.append(name)
86     self.types[name] = typee
87     if sqlContent[j].find("not null") >= 0 :
88     self.notNulls.append(name)
89     if sqlContent[j].find("unique") >= 0 :
90     self.uniqueKeys.append([name])
91     j = j + 1
92    
93     def makeObject(self) :
94     """
95     Using the filled arrays in the constructor, this calls
96     SingleSchema from the ViewObjectLayer and returns the result.
97     It also looks for attributes named "tableid" in tables named
98     "table" and if it finds one, makes it a sequencer.
99     """
100     autoSeqID = self.name + 'id'
101     if autoSeqID in self.pk :
102     seq = {autoSeqID : 0}
103     else :
104     seq = {}
105     a = SingleSchema(self.name, self.schema, self.types, \
106     self.pk, self.uniqueKeys, self.foreignKeys, self.notNulls, {}, seq)
107     return a
108    
109     # For now read in the schema in lowercase
110     sqlContent = map(lambda x : x.lower(), \
111     open("../../dbs-schema/sql/DBS-DB.sql",'r').readlines())
112     # This is the module level array holding all individual schemas
113     allSchemas = {}
114     # Fill the schemas array.
115     for i in range(len(sqlContent)) :
116     line = sqlContent[i]
117     if line[0:5] == 'creat' :
118     a = ReadTable()
119     a.getTable(i)
120     allSchemas[a.name] = a.makeObject()
121    
122     class DBSpublisher :
123     """
124     This class contains the entire DBS schema in
125     terms of approporiate ViewObjects.
126     """
127     def __init__(self) :
128     """
129     Constructor
130     This builds all of the required ViewObjects.
131     """
132    
133     # We will ignore createdby and lastmodifiedby attributes that will
134     # be filled in automatically in the future by some other mechanism
135     self.fkExclusionAttributes = ['lastmodifiedby', 'createdby']
136     self.cParms = {}
137    
138     # View that descibes Application Configurations
139     self.Applications = MultiSchema(self.fkExclusionAttributes)
140     self.Applications.addSchema(allSchemas['application'])
141     self.Applications.addSchema(allSchemas['applicationfamily'])
142     self.Applications.addSchema(allSchemas['collectiontype'])
143     self.Applications.addSchema(allSchemas['parameterset'])
144     self.Applications.addSchema(allSchemas['parameterbinding'], 1)
145     self.Applications.addSchema(allSchemas['applicationconfiguration'])
146     self.Applications.addCondition("application.applicationfamily = " + \
147     "applicationfamily.applicationfamilyid")
148     self.Applications.addCondition("applicationconfiguration.applicationid = " + \
149     "application.applicationid")
150     self.Applications.addCondition("applicationconfiguration.parametersetid = " + \
151     "parameterset.parametersetid")
152     self.Applications.addCondition("parameterbinding.parametersetid = " + \
153     "parameterset.parametersetid")
154     self.ApplicationsTable = Table(self.Applications, **self.cParms)
155     self.ApplicationsTable.initializeSequencers()
156    
157     # View that describes Administrative roles
158     self.Administrative = MultiSchema(self.fkExclusionAttributes)
159     self.Administrative.addSchema(allSchemas['person'])
160     self.Administrative.addSchema(allSchemas['role'])
161     self.Administrative.addSchema(allSchemas['assignedrole'])
162     self.Administrative.addCondition('assignedrole.roleid = role.roleid')
163     self.Administrative.addCondition('assignedrole.personid = person.personid')
164     self.AdministrativeTable = Table(self.Administrative, **self.cParms)
165     self.AdministrativeTable.initializeSequencers()
166    
167     # View that describes Person
168     self.Person = MultiSchema(self.fkExclusionAttributes)
169     self.Person.addSchema(allSchemas['person'])
170     self.PersonTable = Table(self.Person, **self.cParms)
171     self.PersonTable.initializeSequencers()
172    
173     # View that describes Role
174     self.Role = MultiSchema(self.fkExclusionAttributes)
175     self.Role.addSchema(allSchemas['role'])
176     self.RoleTable = Table(self.Role, **self.cParms)
177     self.RoleTable.initializeSequencers()
178    
179     # View that describes PhysicsGroup
180     self.PhysicsGroup = MultiSchema(self.fkExclusionAttributes)
181     self.PhysicsGroup.addSchema(allSchemas['physicsgroup'])
182     self.PhysicsGroup.addSchema(allSchemas['person'])
183     self.PhysicsGroup.addCondition('physicsgroup.physicsgroupconvener = person.personid')
184     self.PhysicsGroupTable = Table(self.PhysicsGroup, **self.cParms)
185     self.PhysicsGroupTable.initializeSequencers()
186    
187     # View that describes EventCollections
188     self.EventCollections = MultiSchema(self.fkExclusionAttributes)
189     self.EventCollections.addSchema(allSchemas['filetype'])
190     self.EventCollections.addSchema(allSchemas['filestatus'])
191     self.EventCollections.addSchema(allSchemas['file'])
192     self.EventCollections.addSchema(allSchemas['evcollfile'])
193     self.EventCollections.addSchema(allSchemas['eventcollection'])
194     self.EventCollections.addSchema(allSchemas['analysiscollectiondata'])
195     self.EventCollections.addSchema(allSchemas['validationstatus'])
196     self.EventCollections.addSchema(allSchemas['analysiscollectionstatus'])
197     self.EventCollections.addSchema(allSchemas['parameterset'])
198     self.EventCollections.addSchema(allSchemas['parameterbinding'],1)
199     self.EventCollections.addCondition('evcollfile.fileid = file.fileid')
200     self.EventCollections.addCondition('parameterset.parametersetid = ' + \
201     'parameterbinding.parametersetid')
202     self.EventCollections.addCondition('analysiscollectiondata.otherqueryablemetadata' + \
203     ' = parameterset.parametersetid')
204     self.EventCollections.addCondition('file.filetype = filetype.filetypeid')
205     self.EventCollections.addCondition('file.filestatus = filestatus.filestatusid')
206     self.EventCollections.addCondition('evcollfile.evcollid = ' + \
207     'eventcollection.eventcollectionid')
208     self.EventCollections.addCondition('analysiscollectiondata.eventcollectionid = ' + \
209     'eventcollection.eventcollectionid')
210     self.EventCollections.addCondition('analysiscollectiondata.validationstatus = ' + \
211     'validationstatus.validationstatusid')
212     self.EventCollections.addCondition('analysiscollectiondata.analysiscollectionstatus' +\
213     ' = analysiscollectionstatus.analysiscollectionstatusid')
214     self.EventCollectionsTable = Table(self.EventCollections, **self.cParms)
215     self.EventCollectionsTable.initializeSequencers()
216    
217     # View that describes EventCollections with complex
218     # parentage
219     self.EventCollections2 = MultiSchema(self.fkExclusionAttributes)
220     self.EventCollections2.addSchema(allSchemas['filetype'])
221     self.EventCollections2.addSchema(allSchemas['filestatus'])
222     self.EventCollections2.addSchema(allSchemas['file'])
223     self.EventCollections2.addSchema(allSchemas['evcollfile'])
224     self.EventCollections2.addSchema(allSchemas['eventcollection'])
225     self.EventCollections2.addSchema(allSchemas['compositeeventcollection'],1)
226     self.EventCollections2.addSchema(allSchemas['analysiscollectiondata'])
227     self.EventCollections2.addSchema(allSchemas['validationstatus'])
228     self.EventCollections2.addSchema(allSchemas['analysiscollectionstatus'])
229     self.EventCollections2.addSchema(allSchemas['parameterset'])
230     self.EventCollections2.addSchema(allSchemas['parameterbinding'],1)
231     self.EventCollections2.addCondition('evcollfile.fileid = file.fileid')
232     self.EventCollections2.addCondition('parameterset.parametersetid = ' + \
233     'parameterbinding.parametersetid')
234     self.EventCollections2.addCondition('analysiscollectiondata.otherqueryablemetadata' + \
235     ' = parameterset.parametersetid')
236     self.EventCollections2.addCondition('file.filetype = filetype.filetypeid')
237     self.EventCollections2.addCondition('file.filestatus = filestatus.filestatusid')
238     self.EventCollections2.addCondition('compositeeventcollection.childecid = ' + \
239     'eventcollection.eventcollectionid')
240     self.EventCollections2.addCondition('evcollfile.evcollid = ' + \
241     'eventcollection.eventcollectionid')
242     self.EventCollections2.addCondition('analysiscollectiondata.eventcollectionid = ' + \
243     'eventcollection.eventcollectionid')
244     self.EventCollections2.addCondition('analysiscollectiondata.validationstatus = ' + \
245     'validationstatus.validationstatusid')
246     self.EventCollections2.addCondition('analysiscollectiondata.analysiscollectionstatus' +\
247     ' = analysiscollectionstatus.analysiscollectionstatusid')
248     self.EventCollectionsTable2 = Table(self.EventCollections2, **self.cParms)
249     self.EventCollectionsTable2.initializeSequencers()
250    
251     # View that describes primary/processed dataset parameters
252     self.PrimaryDataset = MultiSchema(self.fkExclusionAttributes)
253     self.PrimaryDataset.addSchema(allSchemas['triggerpathdescription'])
254     self.PrimaryDataset.addSchema(allSchemas['mcdescription'])
255     self.PrimaryDataset.addSchema(allSchemas['primarydatasetdescription'])
256     self.PrimaryDataset.addSchema(allSchemas['primarydataset'])
257     self.PrimaryDataset.addSchema(allSchemas['stream'])
258     self.PrimaryDataset.addSchema(allSchemas['physicsgroup'])
259     self.PrimaryDataset.addCondition('mcdescription.mcdescriptionid = ' + \
260     'primarydatasetdescription.mcchanneldescriptionid')
261     self.PrimaryDataset.addCondition('triggerpathdescription.triggerpathdescriptionid' + \
262     ' = primarydatasetdescription.triggerdescriptionid')
263     self.PrimaryDataset.addCondition('primarydataset.primarydatasetdescriptionid = ' + \
264     'primarydatasetdescription.primarydatasetdescriptionid')
265     self.PrimaryDataset.addCondition('primarydataset.streamid = stream.streamid')
266     self.PrimaryDataset.addCondition('primarydataset.physicsgroupid = ' +\
267     'physicsgroup.physicsgroupid')
268     self.PrimaryDatasetTable = Table(self.PrimaryDataset, **self.cParms)
269     self.PrimaryDatasetTable.initializeSequencers()
270    
271     # View that describes processing path - self referencing
272     self.ProcessingPath = MultiSchema(self.fkExclusionAttributes)
273     self.ProcessingPath.addSchema(allSchemas['processingpath'])
274     self.ProcessingPath.addSchema(allSchemas['processeddataset'])
275     self.ProcessingPath.addSchema(allSchemas['primarydataset'])
276     self.ProcessingPath.addCondition('primarydataset.primarydatasetid = ' + \
277     'processeddataset.primarydatasetid')
278     self.ProcessingPath.addCondition('processeddataset.processingpathid = ' + \
279     'processingpath.processingpathid')
280     self.ProcessingPathTable = Table(self.ProcessingPath, **self.cParms)
281     self.ProcessingPathTable.initializeSequencers()
282    
283     # View that describes AnalysisDatasets
284     self.AnalysisDataset = MultiSchema(self.fkExclusionAttributes)
285     self.AnalysisDataset.addSchema(allSchemas['analysisdataset'])
286     self.AnalysisDataset.addSchema(allSchemas['evcollandata'])
287     self.AnalysisDataset.addSchema(allSchemas['analysiscollectiondata'])
288     self.AnalysisDataset.addSchema(allSchemas['analysisdatasetsubtype'])
289     self.AnalysisDataset.addSchema(allSchemas['block'])
290     self.AnalysisDataset.addSchema(allSchemas['snapshot'])
291     self.AnalysisDataset.addSchema(allSchemas['productionassignment'])
292     self.AnalysisDataset.addSchema(allSchemas['usercollection'])
293     self.AnalysisDatasetTable = Table(self.AnalysisDataset, **self.cParms)
294     self.AnalysisDatasetTable.initializeSequencers()
295     self._Connection = self.AnalysisDatasetTable.getConnection()
296     print self._Connection.connectionName()
297    
298     def resetTransaction(self) :
299     """
300     This clears the transaction with implicit rollback.
301     Implements TableInterface::resetTransaction
302     """
303     self._Connection.rollback()
304    
305     def saveTransaction(self) :
306     """
307     This clears the transaction with implicit commit.
308     Implements TableInterface::saveTransaction
309     """
310     self._Connection.commit()
311    
312     def publishApplicationConfiguration(self, appFamName, exeName, appVersion, paramSetName, \
313     paramSetVersion, paramSetComments, parameterBindings, inputType = None, \
314     outputType = None) :
315     """
316     API method for publishing an Application Configuration.
317     The arguments are in order:
318    
319     appFamName
320     Application Family Name (eg- CMKIN, OSCAR, ORCA)
321    
322     exeName
323     Executable Name
324    
325     appVersion
326     Application Version
327    
328     paramSetName
329     Parameter Set Name (A unique name to be given by
330     client for future reference. This will be
331     automatically generated in teh future.)
332    
333     paramSetVersion
334     Parameter Set Version (To be given by client for
335     future reference - this will be automatically
336     generated in the future..)
337    
338     paramSetVersion
339     Parameter Set Comments
340    
341     parameterBindings
342     Parameter Bindings (dictionary of key/value pairs)
343    
344     inputType (optional)
345     Input Type (eg- NoInput, CMKIN, OSCAR, etc.)
346    
347     outputType (optional)
348     OutputType (eg- CMKIN, OSCAR, ORCA, NoOutput, etc.)
349     """
350     rowData = Row(self.Applications)
351     if inputType != None :
352     rowData['collectiontype.collectiontype(application.inputcollectiontype)'] = inputType
353     if outputType != None :
354     rowData['collectiontype.collectiontype(application.outputcollectiontype)'] = outputType
355     rowData['applicationfamily.applicationfamilyname'] = appFamName
356     rowData['application.executablename'] = exeName
357     rowData['application.applicationversion'] = appVersion
358     rowData['parameterset.parametersetname'] = paramSetName
359     rowData['parameterset.parametersetversion'] = paramSetVersion
360     rowData['parameterset.parametersetannotation'] = paramSetComments
361     rowData['parameterset.composite'] = 'n'
362     firstInsert = 1
363     for key, val in parameterBindings.items() :
364     if firstInsert == 0 :
365     rowData.newData('parameterbinding')
366     firstInsert = 0
367     rowData['parameterbinding.parametername'] = key
368     rowData['parameterbinding.parametervalue'] = val
369     self.ApplicationsTable.smartInsert(rowData, rowData.keys())
370     self.saveTransaction()
371     return rowData['applicationconfiguration.applicationconfigurationid']
372    
373     def adminAssignRole(self, name, role) :
374     """
375     API Method to assign a role.
376     The arguments are in order :
377    
378     name
379     UNIX style name of the person
380    
381     role
382     name of the role
383     """
384     rowData = Row(self.Administrative)
385     rowData['person.name'] = name
386     rowData['role.rolename'] = role
387     self.AdministrativeTable.smartInsert(rowData, rowData.keys())
388     self.saveTransaction()
389     return rowData['assignedrole.assignedroleid']
390    
391     def adminPhysicsGroup(self, groupName, personName) :
392     """
393     API method to publish a physics group
394     The arguments are in order:
395    
396     groupName
397     the physics group name
398    
399     personName
400     the name of the convener
401     """
402     rowData = Row(self.PhysicsGroup)
403     rowData['physicsgroup.physicsgroupname'] = groupName
404     rowData['person.name'] = personName
405     self.PhysicsGroupTable.smartInsert(rowData, rowData.keys())
406     self.saveTransaction()
407     return rowData['physicsgroup.physicsgroupid']
408    
409     def adminPerson(self, name, distinguishedname, contactinfo) :
410     """
411     API method to publish a person
412     The arguments are in order:
413    
414     name
415     the UNIX style name of the person
416    
417     distinguishedname
418     the grid credential distinguished name of the person
419    
420     contactinfo
421     contact information
422     """
423     rowData = Row(self.Person)
424     rowData['person.name'] = name
425     rowData['person.distinguishedname'] = distinguishedname
426     rowData['person.contactinfo'] = contactinfo
427     self.PersonTable.smartInsert(rowData, rowData.keys())
428     return rowData['person.personid']
429    
430     def adminRole(self, rolename, description) :
431     """
432     API method to publish a role
433     The arguments are in order:
434    
435     rolename
436     the name of the role
437    
438     roledescription
439     description of the role
440     """
441     rowData = Row(self.Role)
442     rowData['role.rolename'] = rolename
443     rowData['role.roledescription'] = description
444     self.RoleTable.smartInsert(rowData, rowData.keys())
445     return rowData['role.roleid']
446    
447     def publishPrimaryDataset(self, description, datasetName, \
448     datasetComments, streamName, physicsGroupName, streamComments = None) :
449     """
450     API method to publish a primary dataset
451     The arguments are in order:
452    
453     description
454     A dictionary description of the dataset
455     The dictionary can have MCChannel, MCProduction, and MCDecayChain
456     OR
457     The dictionary can have TriggerPath
458    
459     datasetName
460     A dataset name (eg- the COBRA dataset name)
461    
462     datasetComments
463     Comments
464    
465     streamName
466     A Stream name
467    
468     physicsGroupName
469     A physics group name
470     """
471     rowData = Row(self.PrimaryDataset)
472     rowData.setSkipOnEmpty('mcdescription')
473     rowData.setSkipOnEmpty('triggerpathdescription')
474     if description.has_key('MCChannel') :
475     rowData['mcdescription.mcchanneldescription'] = description['MCChannel']
476     rowData['mcdescription.mcproduction'] = description['MCProduction']
477     rowData['mcdescription.mcdecaychain'] = description['MCDecayChain']
478     rowData['primarydatasetdescription.mcdataset'] = 'y'
479     rowData['primarydatasetdescription.triggerdescriptionid'] = None
480     elif description.has_key('TriggerPath') :
481     rowData['triggerpathdescription.triggerpathdescription'] = description['TriggerPath']
482     rowData['primarydatasetdescription.mcdataset'] = 'n'
483     rowData['primarydatasetdescription.mcchanneldescriptionid'] = None
484     rowData['stream.streamname'] = streamName
485     if streamComments != None :
486     rowData['stream.streamannotation'] = streamComments
487     rowData['primarydataset.primarydatasetname'] = datasetName
488     rowData['primarydataset.cobradatasetname'] = datasetName
489     rowData['primarydataset.primarydatasetannotation'] = datasetComments
490     rowData['primarydataset.openforwriting'] = 'y'
491     rowData['physicsgroup.physicsgroupname'] = physicsGroupName
492     self.PrimaryDatasetTable.smartInsert(rowData, rowData.keys())
493     self.saveTransaction()
494     return rowData['primarydataset.primarydatasetid']
495    
496     def publishProcessedDataset(self, datasetName, ownerName, parentProcPathID, applicationConfigID, agPath = None) :
497     """
498     API method to publish a processed dataset
499     The arguments are in order
500    
501     datasetName
502     dataset name of the primary dataset
503    
504     ownerName
505     owner name (eg- the COBRA owner name)
506    
507     parantProcPathID
508     parent processing path id (can be null)
509    
510     applicationConfiguration
511     application configuration id (returned by publishApplicationConfiguration)
512     """
513     rowData = Row(self.ProcessingPath)
514     rowData['primarydataset.primarydatasetname'] = datasetName
515     rowData['processingpath.parentprocessingpathid'] = parentProcPathID
516     rowData['processingpath.processingrecordid'] = applicationConfigID
517     if agPath != None :
518     rowData['processingpath.aggregatedpath'] = agPath
519     rowData['processeddataset.cobraownername'] = ownerName
520     rowData['processeddataset.openforwriting'] = 'y'
521     self.ProcessingPathTable.smartInsert(rowData, rowData.keys())
522     self.saveTransaction()
523     return rowData['processeddataset.processeddatasetid']
524    
525     def publishEventCollection(self, evcollStatus, validationStatus, nEvents, \
526     luminosity, collectionName, procDatasetID, evCollIndex, primaryEC, \
527     paramSetName, parameterBindings, filelist) :
528     """
529     API to publish an EventCollection
530     The arguments are in order:
531    
532     evcollStatus
533     event collection status (eg- "OK")
534    
535     validationStatus
536     validation status (eg- "OK")
537    
538     nEvents
539     number of events
540    
541     luminosity
542     estimated luminosity
543    
544     collectionName
545     collection name (eg- COBRA collection nam, can be None)
546    
547     procDatasetID
548     processed dataset ID (returned by publishProcessedDataset)
549    
550     evCollIndex
551     event collection index (eg- Run number)
552    
553     primaryEC
554     boolean: is this a "primary" evCollection? 'y' for CMKIN, 'n'
555     otherwise.
556    
557     paramSetName
558     parameter set name : this is a unique name for the parameter set
559     associated with the Event Collection. (This will be
560     automatically generated in the future.)
561    
562     parameterBindings
563     parameters : this is a dictionary containing the parameters
564     describing this Event Collection.
565    
566     filelist
567     file list: list of tuples of
568     (logical file name, checksum, size, status, type)
569    
570     NOTES:
571     This API is useful when event collections can be lined up
572     on the event collection index exactly across different
573     processing levels. Parentage information is assumed to
574     be "collection 123 @ processing X" is a parent of
575     "collection 123 @ processing Y" if Y follows X in
576     ProcessingPath.
577     """
578     rowData = Row(self.EventCollections)
579     rowData['analysiscollectionstatus.analysiscollectionstatus'] = evcollStatus
580     rowData['validationstatus.validationstatus'] = validationStatus
581     rowData['analysiscollectiondata.numberofevents'] = nEvents
582     rowData['analysiscollectiondata.estimatedluminosity'] = luminosity
583     rowData['analysiscollectiondata.cobracollectionname'] = collectionName
584     rowData['eventcollection.processeddatasetid'] = procDatasetID
585     rowData['eventcollection.primaryeventcollection'] = primaryEC
586     rowData['eventcollection.compositeeventcollection'] = 'n'
587     rowData['eventcollection.eventcollectionindex'] = evCollIndex
588     rowData['parameterset.parametersetname'] = paramSetName
589     rowData['parameterset.parametersetversion'] = '1.0'
590     rowData['parameterset.parametersetannotation'] = \
591     'Parameter set describing event collection ' + paramSetName
592     rowData['parameterset.composite'] = 'n'
593     firstInsert = 1
594     for key, val in parameterBindings.items() :
595     if firstInsert == 0 :
596     rowData.newData('parameterbinding')
597     firstInsert = 0
598     rowData['parameterbinding.parametername'] = key
599     rowData['parameterbinding.parametervalue'] = val
600     for i in range(len(filelist)) :
601     rowData['file.logicalfilename'] = filelist[i][0]
602     rowData['file.checksum'] = filelist[i][1]
603     rowData['file.size'] = filelist[i][2]
604     rowData['filestatus.filestatus'] = filelist[i][3]
605     rowData['filetype.filetype'] = filelist[i][4]
606     # Bug to be fixed later. The files inserts should be batched. 16-May-2005!!!
607     if 'file.fileid' in rowData.keys() :
608     del rowData['file.fileid']
609     del rowData['evcollfile.evcollfileid']
610     del rowData['evcollfile.fileid']
611     self.EventCollectionsTable.smartInsert(rowData, rowData.keys())
612     self.saveTransaction()
613     return rowData['eventcollection.eventcollectionid']
614    
615     def publishEventCollection2(self, evcollStatus, validationStatus, nEvents, \
616     luminosity, collectionName, procDatasetID, evCollIndex, parentECList, \
617     paramSetName, parameterBindings, filelist) :
618     """
619     Another API to publish an EventCollection
620     The arguments are in order:
621    
622     evcollStatus
623     event collection status (eg- "OK")
624    
625     validationStatus
626     validation status (eg- "OK")
627    
628     nEvents
629     number of events
630    
631     luminosity
632     estimated luminosity
633    
634     collectionName
635     collection name (eg- COBRA collection nam, can be None)
636    
637     procDatasetID
638     processed dataset ID (returned by publishProcessedDataset)
639    
640     evCollIndex
641     event collection index (eg- Run number)
642    
643     parentECList
644     list of event collection IDs of the parents of this event collection.
645    
646     paramSetName
647     parameter set name : this is a unique name for the parameter set
648     associated with the Event Collection. (This will be
649     automatically generated in the future.)
650    
651     parameterBindings
652     parameters : this is a dictionary containing the parameters
653     describing this Event Collection.
654    
655     filelist
656     file list: list of tuples of
657     (logical file name, checksum, size, status, type)
658    
659     NOTES:
660     This API is useful when event collections must be loaded
661     with multiple parents. The "CompositeEventCollection"
662     table is used to track complex parentage relationships
663     directly. This API method is not yet tested.
664     """
665     rowData = Row(self.EventCollections2)
666     rowData['analysiscollectionstatus.analysiscollectionstatus'] = evcollStatus
667     rowData['validationstatus.validationstatus'] = validationStatus
668     rowData['analysiscollectiondata.numberofevents'] = nEvents
669     rowData['analysiscollectiondata.estimatedluminosity'] = luminosity
670     rowData['analysiscollectiondata.cobracollectionname'] = collectionName
671     rowData['eventcollection.processeddatasetid'] = procDatasetID
672     if len(parentECList) == 0 :
673     rowData['eventcollection.primaryeventcollection'] = 'y'
674     else :
675     rowData['eventcollection.primaryeventcollection'] = 'n'
676     rowData['eventcollection.compositeeventcollection'] = 'y'
677     rowData['eventcollection.eventcollectionindex'] = evCollIndex
678    
679     firstInsert = 1
680     for elem in parentECList :
681     if firstInsert == 0 :
682     rowData.newData('compositeeventcollection')
683     firstInsert = 0
684     rowData['compositeeventcollection.compositeecid'] = elem
685    
686     rowData['parameterset.parametersetname'] = paramSetName
687     rowData['parameterset.parametersetversion'] = '1.0'
688     rowData['parameterset.parametersetannotation'] = \
689     'Parameter set describing event collection ' + paramSetName
690     rowData['parameterset.composite'] = 'n'
691     firstInsert = 1
692     for key, val in parameterBindings.items() :
693     if firstInsert == 0 :
694     rowData.newData('parameterbinding')
695     firstInsert = 0
696     rowData['parameterbinding.parametername'] = key
697     rowData['parameterbinding.parametervalue'] = val
698     for i in range(len(filelist)) :
699     rowData['file.logicalfilename'] = filelist[i][0]
700     rowData['file.checksum'] = filelist[i][1]
701     rowData['file.size'] = filelist[i][2]
702     rowData['filestatus.filestatus'] = filelist[i][3]
703     rowData['filetype.filetype'] = filelist[i][4]
704     # Bug to be fixed later. The files inserts should be batched. 16-May-2005!!!
705     if 'file.fileid' in rowData.keys() :
706     del rowData['file.fileid']
707     del rowData['evcollfile.evcollfileid']
708     del rowData['evcollfile.fileid']
709     self.EventCollectionsTable2.smartInsert(rowData, rowData.keys())
710     self.saveTransaction()
711     return rowData['eventcollection.eventcollectionid']
712    
713     if __name__ == "__main__" :
714    
715     a = DBSpublisher()
716     b = a.publishApplicationConfiguration('OSCAR', 'oscar_new.exe', '3_6-2', 'test44', \
717     'v1', 'test parameters', {'a':'1', 'b':'2', 'c':'3', 'd':'4'}, 'CMKIN', 'OSCAR')
718     print b
719     c = a.adminPerson('ggraham', 'Gregory Graham 123456', '1055 W. Addison')
720     print c
721     d = a.adminRole('admin','administrative')
722     print d
723     e = a.adminAssignRole('ggraham','admin')
724     print e
725     f = a.adminPhysicsGroup('egammaGroup', 'ggraham')
726     print f
727     g = a.publishPrimaryDataset({'MCChannel':'blahblah', 'MCProduction':'blahblah', \
728     'MCDecayChain':'blahblah'}, 'testDataset3', 'A test dataset', \
729     'testStream', 'egammaGroup', 'This is a test stream.')
730     print g
731     h = a.publishProcessedDataset('testDataset3', 'testOwner3', None, b, '/')
732     print h
733     i = a.publishEventCollection('OK', 'Valid', 100, '10e-15', 'testCollection3', h, 1, 'y', \
734     'testDataset3-testOwner3-testCollection3', {'ranseed':'123456', 'runnum':'1'}, \
735     [('file3', '364239', '125251', 'OK', 'Data'), \
736     ('file4', '01273421', '120970112', 'OK', 'Data')] )
737     print i
738