1 |
#!/usr/bin/env python
|
2 |
|
3 |
from optparse import OptionParser, OptionGroup
|
4 |
import sqlite3
|
5 |
#from pysqlite2 import dbapi2 as sqlite3
|
6 |
from IOV_WrapperFramework import IOV
|
7 |
import os
|
8 |
import sys
|
9 |
import re
|
10 |
import datetime
|
11 |
import base64
|
12 |
import pdb
|
13 |
import smtplib
|
14 |
import email
|
15 |
|
16 |
# utility function
|
17 |
def sortedDictValues(adict):
|
18 |
keys = adict.keys()
|
19 |
keys.sort()
|
20 |
# if(_verbose):
|
21 |
# for key in keys:
|
22 |
# print key + ' ' + adict[key]
|
23 |
return [key for key in keys]
|
24 |
|
25 |
|
26 |
|
27 |
class DB:
|
28 |
""" A simple ad-hoc interface to reference sqlite Database"""
|
29 |
main_db = ""
|
30 |
connected = False
|
31 |
table_name = 'reference'
|
32 |
verbose = False
|
33 |
JsonOutput = False
|
34 |
|
35 |
def __init__(self, databaseName, table_name, Json=False, notification=True, rcpt='erosales@cern.ch'):
|
36 |
"""
|
37 |
Constructor.
|
38 |
|
39 |
Keyword arguments:
|
40 |
databaseName -- the name of the sqlite file to use as database
|
41 |
table_name -- the name of the table to use inside the
|
42 |
specified database
|
43 |
Json -- a boolean to specify if JSON putput is to be
|
44 |
generated (used when run from Web Server)
|
45 |
(default = False)
|
46 |
|
47 |
"""
|
48 |
self.main_db = databaseName
|
49 |
self.table_name = table_name
|
50 |
self.JsonOutput = Json
|
51 |
self.notification = notification
|
52 |
self.rcpt = rcpt
|
53 |
|
54 |
def connect(self):
|
55 |
"""
|
56 |
Establish a connection with the database.
|
57 |
The connection is cached across a single instance of the object.
|
58 |
|
59 |
"""
|
60 |
if(self.checkDB()):
|
61 |
if not self.connected:
|
62 |
self.con = sqlite3.connect(self.main_db)
|
63 |
self.con.row_factory = sqlite3.Row
|
64 |
self.cur = self.con.cursor()
|
65 |
self.connected = True
|
66 |
else:
|
67 |
print "Error.\nDatabase " + self.main_db + " not found.\nExiting\n"
|
68 |
sys.exit(1)
|
69 |
|
70 |
def connectFirstTime(self):
|
71 |
"""
|
72 |
Establish a connection with the database.
|
73 |
The connection is cached across a single instance of the object.
|
74 |
It does NOT check for sqlite file existance.
|
75 |
|
76 |
"""
|
77 |
if not self.connected:
|
78 |
self.con = sqlite3.connect(self.main_db)
|
79 |
self.cur = self.con.cursor()
|
80 |
self.connected = True
|
81 |
|
82 |
def setDatabase(self, databaseName):
|
83 |
"""Set the database internal variable to the specified value.
|
84 |
|
85 |
Keyword arguments:
|
86 |
databaseName -- the filename to use as sqlite database"""
|
87 |
self.main_db = databaseName
|
88 |
|
89 |
def setVerbose(self, value):
|
90 |
"""Set verbosity. Always remember to turn it OFF when used
|
91 |
from Web Server.
|
92 |
|
93 |
Keyword arguments:
|
94 |
value -- boolean t turn on/off verbosity.
|
95 |
|
96 |
"""
|
97 |
self.verbose = value
|
98 |
|
99 |
def checkDB(self):
|
100 |
"""Check if the specified sqlite file exists and returns a Boolean """
|
101 |
return os.path.exists(self.main_db)
|
102 |
|
103 |
def createDB(self):
|
104 |
"""Create a new database and the default table. """
|
105 |
if (not self.checkDB()):
|
106 |
table = {
|
107 |
'1_Rcd_id' : 'integer DEFAULT 1 NOT NULL'
|
108 |
,'2_InsertTime' : 'ts'
|
109 |
,'3_LocalTag' : 'text'
|
110 |
,'4_StartRun' : 'int'
|
111 |
,'5_Subsystem' : 'text'
|
112 |
,'6_XML' : 'clob'
|
113 |
,'7_CMS_ORCOFF_PREP' : 'text DEFAULT "None" NOT NULL'
|
114 |
,'8_CMS_ORCOFF_PROD' : 'text DEFAULT "None" NOT NULL'
|
115 |
,'9_LastUpdate' : 'ts DEFAULT CURRENT_TIMESTAMP NOT NULL'}
|
116 |
statement = "Create table " + self.table_name + " ("
|
117 |
for colname in sortedDictValues(table):
|
118 |
real_colname = re.sub('\d+_', '', colname)
|
119 |
statement += "'" + real_colname + "' " + table[colname] + ','
|
120 |
statement += "PRIMARY KEY (Rcd_ID), CONSTRAINT uc_RunTagSubsystemXML UNIQUE (LocalTag, StartRun, Subsystem))"
|
121 |
# statement = re.sub(",$", ')',statement)
|
122 |
if(self.verbose): print statement
|
123 |
self.connectFirstTime()
|
124 |
self.cur.execute(statement)
|
125 |
self.con.commit()
|
126 |
else:
|
127 |
print "Warning, nothing to do:\nDatabase already present"
|
128 |
sys.exit(1)
|
129 |
|
130 |
def updateRealRunNumber(self, oldRun, tag, newRun):
|
131 |
"""Update the starting run from which a payload will be valid in the orcoff_prod database"""
|
132 |
self.connect()
|
133 |
#statement = "update " + self.table_name + " StartRun=newrun where LocalTag=tag and StartRun =oldrun
|
134 |
#statement = "update " + self.table_name + " set StartRun = " + newRun + " where LocalTag = " +tag " and " StartRun = " +oldRun";
|
135 |
statement = "update " + self.table_name + " set StartRun = \"" + str(newRun) + "\" , CMS_ORCOFF_PROD = \"Y\" where LocalTag = \"" + tag + "\" and StartRun = \"" + str(oldRun) +"\""
|
136 |
#print statement
|
137 |
self.cur.execute(statement)
|
138 |
self.con.commit()
|
139 |
#print 'done'
|
140 |
|
141 |
def updateProdRunNumber(self, newRun, payload):
|
142 |
"""Update the starting run from which a payload will be valid in the orcoff_prod database
|
143 |
Returns"""
|
144 |
statement = "update " + self.table_name + " set StartRun = \"" + str(newRun) + "\" , CMS_ORCOFF_PROD = \"Y\" where Rcd_id = \"" + str(payload) +"\""
|
145 |
#print statement
|
146 |
try:
|
147 |
self.connect()
|
148 |
self.cur.execute(statement)
|
149 |
self.con.commit()
|
150 |
return True
|
151 |
except Exception, err_msg:
|
152 |
print 'loaded: false, err_msg: "' ,err_msg, '"'
|
153 |
return False
|
154 |
|
155 |
def getTagFromPayload(self, payload):
|
156 |
"""Gets a tag for a given payload"""
|
157 |
statement = "select LocalTag from reference WHERE Rcd_id = \"" + str(payload) +"\""
|
158 |
try:
|
159 |
self.connect()
|
160 |
#print statement
|
161 |
self.cur.execute(statement)
|
162 |
return self.cur.fetchone()[0]
|
163 |
except Exception, err_msg:
|
164 |
print 'loaded: false, err_msg: "' ,err_msg, '"'
|
165 |
return None
|
166 |
|
167 |
def getRunFromPayload(self, payload):
|
168 |
"""Gets a run number for a given payload"""
|
169 |
statement = "select StartRun from reference WHERE Rcd_id = \"" + str(payload) +"\""
|
170 |
try:
|
171 |
self.connect()
|
172 |
#print statement
|
173 |
self.cur.execute(statement)
|
174 |
return self.cur.fetchone()[0]
|
175 |
except Exception, err_msg:
|
176 |
print 'loaded: false, err_msg: "' ,err_msg, '"'
|
177 |
return None
|
178 |
|
179 |
def createFakeLocalTag(self, tag):
|
180 |
"""Create a fake localtag. The StartRun field is set to -1
|
181 |
to distinguish a fake local tag from a real one.
|
182 |
|
183 |
Keyword argument:
|
184 |
tag -- the local tag to create
|
185 |
|
186 |
"""
|
187 |
self.connect()
|
188 |
statement = "insert into " + self.table_name + " ('InsertTime', 'LocalTag', 'StartRun', 'Subsystem', 'XML') values(?,?,?,?,?)"
|
189 |
now = datetime.datetime.now()
|
190 |
if(self.verbose): print statement,now,tag, -1, '',''
|
191 |
self.cur.execute(statement,(now,tag, -1, '','' ) )
|
192 |
self.con.commit()
|
193 |
if(self.notification):
|
194 |
self.notifyMe('createFakeLocalTag created a new localtag named ' + tag + ' on db ' + self.main_db)
|
195 |
|
196 |
def insertRow(self,localtag, start, subsystem, xml):
|
197 |
"""Insert a row into the database.
|
198 |
|
199 |
Keyword arguments:
|
200 |
localtag -- the localtag to insert
|
201 |
start -- the start run to insert
|
202 |
subsystem -- the name of the subsystem to insert
|
203 |
xml -- the path to the XML file to insert
|
204 |
|
205 |
"""
|
206 |
self.connect()
|
207 |
if (not os.path.exists(xml)):
|
208 |
print "Error.\nThe specified XML does not exist on disk.\nExiting."
|
209 |
sys.exit(1)
|
210 |
else:
|
211 |
blobdata = open(xml,'rb').read()
|
212 |
statement = "Insert into " + self.table_name + " ('InsertTime', 'LocalTag', 'StartRun', 'Subsystem', 'XML') values(?,?,?,?,?)"
|
213 |
now = datetime.datetime.now()
|
214 |
if(self.verbose): print statement,now,localtag, start, subsystem,sqlite3.Binary(blobdata)
|
215 |
self.cur.execute(statement,(now,localtag, start, subsystem,sqlite3.Binary(blobdata) ) )
|
216 |
self.con.commit()
|
217 |
if(self.notification):
|
218 |
self.notifyMe('[insertRow]\nTag ' + localtag + '\nStartRun ' + str(start) + '\nSubsystem ' + subsystem + '\nXML\n' + blobdata + '\non DB ' + self.main_db)
|
219 |
|
220 |
def insertRowFromWeb(self,localtag, start, subsystem, xml):
|
221 |
"""Insert a row into the database from the Web Server.
|
222 |
A check is performed to assert that the specified
|
223 |
run is not in the past for CMS official DB.
|
224 |
|
225 |
Keyword arguments:
|
226 |
localtag -- the localtag to insert
|
227 |
start -- the start run to insert
|
228 |
subsystem -- the name of the subsystem to insert
|
229 |
xml -- the path to the XML file to insert
|
230 |
|
231 |
"""
|
232 |
# xml is base64 encoded!!!
|
233 |
iov = IOV() #IOV_Wrapper('cms_orcoff_prod', 'CMS_COND_34X_DQM', False)
|
234 |
if not iov.checkAvailability(localtag,start) :
|
235 |
print '{loaded: false, err_msg: "Selected run is in the past."}'
|
236 |
return
|
237 |
self.connect()
|
238 |
blobdata = base64.decodestring(xml)
|
239 |
statement = "Insert into " + self.table_name + " ('InsertTime', 'LocalTag', 'StartRun', 'Subsystem', 'XML') values(?,?,?,?,?)"
|
240 |
now = datetime.datetime.now()
|
241 |
if(self.verbose): print statement,now,localtag, start, subsystem,sqlite3.Binary(blobdata)
|
242 |
try:
|
243 |
self.cur.execute(statement,(now,localtag, start, subsystem,sqlite3.Binary(blobdata) ) )
|
244 |
self.con.commit()
|
245 |
print '{loaded: true, err_msg: ""}'
|
246 |
except Exception, err_msg:
|
247 |
print '{loaded: false, err_msg: "' ,err_msg, '"}'
|
248 |
if(self.notification):
|
249 |
self.notifyMe('[insertRowFromWeb]\nTag ' + localtag + '\nStartRun ' + str(start) + '\nSubsystem' + subsystem + '\nXML\n' + blobdata + '\non DB ' + self.main_db)
|
250 |
|
251 |
def insertRowFromWebRelVal(self,localtag, subsystem, xml):
|
252 |
"""Insert a record into the database for RelVal samples.
|
253 |
The logic is the following:
|
254 |
1. Loop over all entries in specified skeleton_localtag for specific subsystem and take max startRun
|
255 |
For RelVal StartRun is a versioning mechanism, that's it. Latest gets picked-up.
|
256 |
2. Create a new record with calling sequences args and number computed at #1 + 1 as StartRun
|
257 |
|
258 |
Keyword arguments:
|
259 |
localtag -- the local tag to insert
|
260 |
subsystem -- the subsystem to insert
|
261 |
xml -- the base64 encoded xml string to insert
|
262 |
|
263 |
"""
|
264 |
|
265 |
version = 1
|
266 |
self.connect()
|
267 |
blobdata = base64.decodestring(xml)
|
268 |
statement = 'select Max(StartRun) from ' + self.table_name + ' where subsystem = "' + subsystem + '" and localtag ="' + localtag + '"'
|
269 |
self.cur.execute(statement)
|
270 |
rows = self.cur.fetchall()
|
271 |
for row in rows:
|
272 |
if row[0]:
|
273 |
version = row[0] + 1
|
274 |
|
275 |
statement2 = "Insert into " + self.table_name + " ('InsertTime', 'LocalTag', 'StartRun', 'Subsystem', 'XML') values(?,?,?,?,?)"
|
276 |
now = datetime.datetime.now()
|
277 |
if(self.verbose): print statement2,now,localtag, version, subsystem,sqlite3.Binary(blobdata)
|
278 |
try:
|
279 |
cur2 = self.con.cursor()
|
280 |
cur2.execute(statement2,(now,localtag, version, subsystem,sqlite3.Binary(blobdata) ) )
|
281 |
self.con.commit()
|
282 |
print '{loaded: true, err_msg: ""}'
|
283 |
except Exception, err_msg:
|
284 |
print '{loaded: false, err_msg: "' ,err_msg, '"}'
|
285 |
if(self.notification):
|
286 |
self.notifyMe('[insertRowFromWebRelVal]\nTag ' + localtag + '\nSubsystem ' + subsystem + '\nXML\n' + blobdata + '\non DB ' + self.main_db)
|
287 |
|
288 |
def listKnownSubSystems(self):
|
289 |
"""List all the subsystems that have been so far inserted into the database."""
|
290 |
|
291 |
self.connect()
|
292 |
statement = "select distinct Subsystem from " + self.table_name + " order by Subsystem asc"
|
293 |
if(self.verbose): print statement
|
294 |
self.cur.execute(statement)
|
295 |
if not self.JsonOutput:
|
296 |
self.prettyPrintIt(self.cur)
|
297 |
# for row in self.cur:
|
298 |
# print row
|
299 |
else:
|
300 |
index = 0
|
301 |
print '{ rows: ['
|
302 |
print '{ id: -1, subsystem: "ALL", value: ""},'
|
303 |
for row in self.cur:
|
304 |
if not row[0] : # this means no data is there or that Subsystem is empty (fake record to make localtag appear...
|
305 |
continue
|
306 |
print '{ id: ' + str(index) + ', subsystem: "' + row[0] + '", value: "' + row[0] + '" },'
|
307 |
index +=1
|
308 |
print ']}'
|
309 |
|
310 |
|
311 |
def dumpLocalTags(self):
|
312 |
"""List all the local tags that have been so far inserted into the database.
|
313 |
Returns a list of corresponding tags.
|
314 |
"""
|
315 |
self.connect()
|
316 |
|
317 |
localtags = []
|
318 |
statement = "Select distinct LocalTag from " + self.table_name
|
319 |
if(self.verbose): print statement
|
320 |
self.cur.execute(statement)
|
321 |
if not self.JsonOutput:
|
322 |
r = self.cur.fetchone()
|
323 |
while r:
|
324 |
for key in r.keys():
|
325 |
print key, r[key]
|
326 |
localtags.append(r[key])
|
327 |
r = self.cur.fetchone()
|
328 |
else:
|
329 |
index = 0
|
330 |
print '{ rows: ['
|
331 |
print '{ id: -1, localtag: "ALL", value: ""},'
|
332 |
for row in self.cur:
|
333 |
print '{ id: ' + str(index) + ', localtag: "' + row[0] + '", value: "' + row[0] + '"},'
|
334 |
index +=1
|
335 |
print ']}'
|
336 |
|
337 |
return localtags
|
338 |
|
339 |
# print '<data>'
|
340 |
# print ' <tags>'
|
341 |
# for row in self.cur:
|
342 |
# print ' <localtag>' + row[0] + '</localtag>'
|
343 |
# print ' </tags>'
|
344 |
# print '</data>'
|
345 |
|
346 |
def dumpAllData(self, localtag, subsystem, start=0, limit=sys.maxint):
|
347 |
"""Dump all data currently available in the database.
|
348 |
|
349 |
Keyword arguments:
|
350 |
localtag -- the name of the localtag used to filter data (if not '')
|
351 |
subsystem -- the name of the subsystem used to filter data (if not '')
|
352 |
start -- the record after which data is to be dumped (used only for paging
|
353 |
in the Web Server) ( default = 0 )
|
354 |
limit -- the number of records to be dumped (used only for paging
|
355 |
in the Web Server) ( default = sys.maxint )
|
356 |
|
357 |
"""
|
358 |
startV = float(start)
|
359 |
limitV = float(limit)
|
360 |
self.connect()
|
361 |
statement = "Select InsertTime, LocalTag, StartRun, Subsystem, XML, cms_orcoff_prep, cms_orcoff_prod, LastUpdate from " + self.table_name + " where StartRun > 0 "
|
362 |
if(localtag != ''):
|
363 |
statement += ' and LocalTag ="' + localtag +'"'
|
364 |
if(subsystem != ''):
|
365 |
statement += ' and Subsystem ="' + subsystem +'"'
|
366 |
statement += " order by LocalTag, Subsystem"
|
367 |
if(self.verbose): print statement
|
368 |
self.cur.execute(statement)
|
369 |
# toBoolean = {}
|
370 |
iov = IOV() #_Wrapper('cms_orcoff_prep','CMS_COND_31X_DQM_SUMMARY', self.verbose)
|
371 |
if not self.JsonOutput:
|
372 |
for row in self.cur:
|
373 |
print row
|
374 |
print self.cur.fetchall()
|
375 |
else:
|
376 |
rows = self.cur.fetchall()
|
377 |
print '{ "total": ' + str(len(rows)) + ', "rows": ['
|
378 |
for index, row in enumerate(rows):
|
379 |
if not row[3]: # this means either subsystem is an empty string or that no data is available
|
380 |
continue
|
381 |
# print 'Before ' + str(index) + ' ' + str(startV) + ' ' + str(startV+limitV)
|
382 |
if(index < startV):
|
383 |
continue
|
384 |
if(index >= startV+limitV):
|
385 |
continue
|
386 |
# print 'After ' + str(index) + ' ' + str(start) + ' ' + str(start+limit)
|
387 |
# for row in self.cur:
|
388 |
# inserted = 1
|
389 |
# if iov.checkAvailability(row[1], row[2]) :
|
390 |
# inserted = 0
|
391 |
date = row[0]
|
392 |
date = re.sub("\.\d+$", '', date) # get rid of final .XXXXX from InsertTime field
|
393 |
lastupdate = row[7]
|
394 |
lastupdate = re.sub("\.\d+$", '', lastupdate) # get rid of final .XXXXX from LastUpdate field
|
395 |
encodedXML = base64.encodestring(row[4])
|
396 |
encodedXML = re.sub('\n', '', encodedXML)
|
397 |
|
398 |
# toBoolean['Y'] = '1'
|
399 |
# toBoolean['N'] = '0'
|
400 |
# toBoolean['None'] = '0'
|
401 |
# print '{ "id": ' + str(index) + ', "InsertTime": "' + date + '"' + ', "LocalTag": "' + row[1] + '"' + ', "StartRun": ' + str(row[2]) + ', "Subsystem": "' + row[3] + '", "XML": "' + encodedXML + '", "InDB": ' + str(inserted) + '},'
|
402 |
print '{ "id": ' + str(index) + ', "InsertTime": "' + date + '"' + ', "LocalTag": "' + row[1] + '"' + ', "StartRun": ' + str(row[2]) + ', "Subsystem": "' + row[3] + '", "XML": "' + encodedXML + '", isIn: "None", "cms_orcoff_prep": "' + row[5] + '", "cms_orcoff_prod": "' + row[6] + '", "LastUpdate": "' + lastupdate + '"},'
|
403 |
#index +=1
|
404 |
print ']}'
|
405 |
|
406 |
def dumpLocalTag(self,tag):
|
407 |
"""Dump data pertaining to the specified local tag."""
|
408 |
self.connect()
|
409 |
statement = "Select InsertTime, LocalTag, StartRun, Subsystem from " + self.table_name + " where LocalTag = '" + tag + "' order by Subsystem"
|
410 |
if(self.verbose): print statement
|
411 |
self.cur.execute(statement)
|
412 |
for row in self.cur:
|
413 |
print row
|
414 |
print self.cur.fetchall()
|
415 |
|
416 |
def getNextAvailableRun(self, localtag):
|
417 |
""" Search for the next available run number, based on currently SAVED IOVs
|
418 |
in cms_orcoff_prep(To be switched to prod ASAP!!).
|
419 |
|
420 |
The Logic:
|
421 |
1. select the Maximum run number of all records
|
422 |
with specified localtag that have already been
|
423 |
inserted into cms_orcoff_prep.
|
424 |
2. If no record has been found it means that:
|
425 |
a. the tag does not yet exists => return 1.
|
426 |
b. the tag is there but the record has not
|
427 |
yet been uploaded. Return the maximum
|
428 |
safest value anyway!!! with an alert message.
|
429 |
3. return the maximum value + 1.
|
430 |
"""
|
431 |
|
432 |
self.connect()
|
433 |
localtag = re.sub('\n', '', localtag)
|
434 |
statement = 'select Max(StartRun), cms_orcoff_prep from ' + self.table_name + ' where LocalTag = "' + localtag + '";'
|
435 |
if(self.verbose): print statement
|
436 |
self.cur.execute(statement)
|
437 |
print '{ rows: ['
|
438 |
|
439 |
for row in self.cur:
|
440 |
if re.match('None', str(row[0])) :
|
441 |
print '{ nextAvailable: 1, Mesg: "New Tag"},'
|
442 |
else:
|
443 |
if re.match('Y', str(row[1])) :
|
444 |
print '{ nextAvailable: ' + str(row[0]+1) + ', Mesg: "Existing.<br/>Loaded."},'
|
445 |
if re.match('N$', str(row[1])) : # match just 'N'
|
446 |
statement2 = 'select Max(StartRun) from ' + self.table_name + ' where LocalTag = "' + localtag + '" and cms_orcoff_prep = "Y" ;'
|
447 |
if(self.verbose): print statement
|
448 |
cur2 = self.con.cursor()
|
449 |
cur2.execute(statement2)
|
450 |
for row2 in cur2:
|
451 |
if re.match('None', str(row2[0])):
|
452 |
print '{ nextAvailable: ' + str(row[0]+1) + ', Mesg: "Existing.<br/>Never Loaded."},'
|
453 |
else:
|
454 |
print '{ nextAvailable: ' + str(row[0]+1) + ', Mesg: "Existing.<br/>Loaded.<br/>Last Loaded: ' + row[0]+ '"},'
|
455 |
if re.match('None', str(row[1])) :
|
456 |
print '{ nextAvailable: ' + str(row[0]+1) + ', Mesg: "Existing.<br/>NOT Checked."},'
|
457 |
|
458 |
print ']}'
|
459 |
|
460 |
|
461 |
def produceValidDataForRun(self, tag, subsystem, runToCheck,start=0, limit=sys.maxint):
|
462 |
"""Search for data valid for the specified tag and run and subsystem.
|
463 |
This method must be used only by the Web Server, since it prints data only
|
464 |
in JSON format.
|
465 |
|
466 |
The Logic:
|
467 |
1. make a loop over all the localtags
|
468 |
2. make a loop over all the subsystems
|
469 |
3. if the user supplied a tag and/or a subsystem to filter on,
|
470 |
pick up only proper records.
|
471 |
4. do a select on each proper record taking the Max(StartRun)
|
472 |
where StartRun <= runToCheck: this guarantes that this
|
473 |
record is the one if validity for that particular run!!
|
474 |
(it is the latest startrun before or at the run we want
|
475 |
to check: the next StartRun would be beyond our run of interest!!
|
476 |
|
477 |
|
478 |
Keyword arguments:
|
479 |
tag -- the name of the tag to search for
|
480 |
subsystem -- the name of the subsystem to search data for
|
481 |
runToCheck -- the run for which data must be valid (data returned
|
482 |
is contained in the proper IOV for the specified run)
|
483 |
start -- the record after which data is to be dumped (used
|
484 |
only for paging in the Web Server) ( default = 0 )
|
485 |
limit -- the number of records to be dumped (used only for
|
486 |
paging in the Web Server) ( default = sys.maxint )
|
487 |
|
488 |
"""
|
489 |
|
490 |
startV = float(start)
|
491 |
limitV = float(limit)
|
492 |
self.connect()
|
493 |
statement = 'select distinct LocalTag from reference order by LocalTag ASC;'
|
494 |
if(self.verbose): print statement
|
495 |
self.cur.execute(statement)
|
496 |
print '{ "rows": ['
|
497 |
index = 0
|
498 |
for ltag in self.cur:
|
499 |
if(tag != ''):
|
500 |
if(tag != ltag[0]):
|
501 |
continue
|
502 |
statement2 = 'select distinct Subsystem from reference order by Subsystem ASC;'
|
503 |
cur2 = self.con.cursor()
|
504 |
cur2.execute(statement2)
|
505 |
for subsys in cur2:
|
506 |
if(subsystem != ''):
|
507 |
if(subsystem != subsys[0]):
|
508 |
continue
|
509 |
statement3 = 'select Max(StartRun), LocalTag, Subsystem, InsertTime, XML , cms_orcoff_prep, cms_orcoff_prod, lastupdate from reference where StartRun <= ' + str(runToCheck) + ' and LocalTag ="' + ltag[0] + '" and Subsystem = "' +subsys[0] + '"'
|
510 |
cur3 = self.con.cursor()
|
511 |
cur3.execute(statement3)
|
512 |
rows = cur3.fetchall()
|
513 |
for row in rows:
|
514 |
if not row[3]: #this means no data match the criteria!!!
|
515 |
continue
|
516 |
date = row[3]
|
517 |
date = re.sub("\.\d+$", '', date) # get rid of final .XXXXX from InsertTime field
|
518 |
lastupdate = row[7]
|
519 |
lastupdate = re.sub("\.\d+$", '', lastupdate) # get rid of final .XXXXX from InsertTime field
|
520 |
encodedXML = base64.encodestring(row[4])
|
521 |
encodedXML = re.sub('\n', '', encodedXML)
|
522 |
if(index < startV):
|
523 |
continue
|
524 |
if(index >= startV+limitV):
|
525 |
continue
|
526 |
print '{ id: ' + str(index) + ', InsertTime: "' + date + '", LocalTag: "' + row[1] + '", StartRun: ' + str(row[0]) + ', Subsystem: "' + row[2] + '", XML: "' + encodedXML + '", cms_orcoff_prep: "' + row[5] + '", cms_orcoff_prod: "' + row[6] + '", LastUpdate: "' + lastupdate + '"},'
|
527 |
index +=1
|
528 |
print ']}'
|
529 |
|
530 |
|
531 |
def produceForTag(self,tag, oracleDB):
|
532 |
"""Write XML files for the specified tag and use the specified Oracle database
|
533 |
in the metadata definition.
|
534 |
All records that are in the past with respect to the official CMS
|
535 |
Oracle Database are discarded and NO XML file is produced: a
|
536 |
warning message is issued to the end user.
|
537 |
|
538 |
The Logic:
|
539 |
1. first fetch all StartRun linked to a particular local tag and
|
540 |
order them in ASCENDING order: for each of this values there
|
541 |
should be a new record, since one or more subsystem have changed
|
542 |
their reference histograms
|
543 |
2. Loop over values obtained from point 1
|
544 |
3. Fetch all subsystems defined in the localTag under study
|
545 |
4. For each subsystem of point 3 I query the DB for the XML
|
546 |
corresponding to StartRun <= values of point one and I order
|
547 |
the results in DESCENDING order according to StartRun: in
|
548 |
this way the most up-to-date record for this run and this
|
549 |
subsystem is always the first one, that is the one I use.
|
550 |
|
551 |
|
552 |
Keyword arguments:
|
553 |
tag -- the tag for which the XML have to be produced
|
554 |
oracleDB -- the name of the official CMS Oracle Database
|
555 |
to which payloads have to put uploaded. This
|
556 |
is automatically written in the metadata file.
|
557 |
|
558 |
Returns:
|
559 |
A list of produced XML files.
|
560 |
|
561 |
"""
|
562 |
self.connect()
|
563 |
writtenXML = []
|
564 |
statement = 'select distinct StartRun from reference where LocalTag = "' + tag + '" order by StartRun ASC;'
|
565 |
if(self.verbose): print statement
|
566 |
self.cur.execute(statement)
|
567 |
runs = []
|
568 |
for row in self.cur:
|
569 |
runs.append(row[0])
|
570 |
if(self.verbose): print runs
|
571 |
|
572 |
statement = 'select distinct Subsystem from reference where LocalTag = "' + tag + '" order by Subsystem ASC;'
|
573 |
if(self.verbose): print statement
|
574 |
self.cur.execute(statement)
|
575 |
subsystems = []
|
576 |
for row in self.cur:
|
577 |
subsystems.append(row[0])
|
578 |
if(self.verbose): print subsystems
|
579 |
|
580 |
iov = IOV() #_Wrapper(oracleDB,'CMS_COND_34X_DQM', self.verbose)
|
581 |
for run in runs:
|
582 |
if(self.verbose):
|
583 |
print "Analyzing Run Number " + str(run)
|
584 |
if not iov.checkAvailability(tag, run) :
|
585 |
print 'Run ', run, ' for tag ', tag, ' has to be discarded since is in the past for DB ', oracleDB
|
586 |
continue
|
587 |
xmlFileName = 'referencesHisto_since_%(run)06d_%(tag)s.xml' % {"run": int(run), 'tag': tag} #+ str(run) + '_' + tag + '.xml'
|
588 |
f = open(xmlFileName, 'w')
|
589 |
self.printHeader(f,tag,run, oracleDB)
|
590 |
for subsystem in subsystems:
|
591 |
if(self.verbose):
|
592 |
print "Analyzing SubSystem " + subsystem
|
593 |
statement = 'Select XML, StartRun from reference where StartRun <=' + str(run) + ' and Subsystem ="' + subsystem + '" and localtag="' + tag +'" order by StartRun DESC'
|
594 |
if(self.verbose):
|
595 |
print statement
|
596 |
self.cur.execute(statement)
|
597 |
try:
|
598 |
row = self.cur.fetchone()
|
599 |
f.write(" ")
|
600 |
f.write(row[0])
|
601 |
except:
|
602 |
if(self.verbose):
|
603 |
print 'No record found for subsystem %s and run within %s' % ( subsystem, run)
|
604 |
f.write("\n\n")
|
605 |
f.write('</RefHistos>\n')
|
606 |
f.close()
|
607 |
writtenXML.append(xmlFileName)
|
608 |
return writtenXML
|
609 |
|
610 |
|
611 |
def printHeader(self,f, tag, run, oracleDB):
|
612 |
"""Print the header of the XML. The values specified
|
613 |
will be automatically propagated into the metadata file.
|
614 |
|
615 |
Keyword arguments:
|
616 |
tag -- the tag to be written in the header as 'tag' attribute to
|
617 |
the MetaData xml field.
|
618 |
run -- the run to be written in the header as 'since' attribute to
|
619 |
the MetaData xml field.
|
620 |
oracleDB -- the name of the official CMS Oracle database to be
|
621 |
written in the header as 'destDB' attribute to the MetaData xml field.
|
622 |
|
623 |
"""
|
624 |
f.write('<?xml version="1.0" encoding="UTF-8"?>\n')
|
625 |
f.write('<!DOCTYPE RefHistos SYSTEM "referenceHisto.dtd">')
|
626 |
f.write('\n\n')
|
627 |
f.write('<RefHistos DataSetName="Cosmics" SoftwareVersion="CMSSW_3_5_0" GlobalTag="GR09_P_V1">')
|
628 |
f.write('\n\n')
|
629 |
f.write(' <MetaData destDB="oracle://' + oracleDB + '/CMS_COND_42X_DQM" tag="' + tag + '" since="' + str(run) +'"/>\n\n')
|
630 |
|
631 |
def produceSkeletonXML(self, subsystem, sourceFile):
|
632 |
"""Produce a skeleton XML using the specified subsystem
|
633 |
and path filename as source and return the generated XML.
|
634 |
|
635 |
Keyword arguments:
|
636 |
|
637 |
subsystem -- the name of the subsystem to use in the XML
|
638 |
sourceFile -- the path to the file to be used in the 'source'
|
639 |
attribute of the SubDetectors XML field.
|
640 |
If it is on castor, please prepend rfio:/
|
641 |
|
642 |
"""
|
643 |
paths = sourceFile.split("/")
|
644 |
filename = paths[len(paths)-1]
|
645 |
filename = re.sub("DQM.*V\d+_R\d+?__", "", filename)
|
646 |
filename = subsystem + "_" + re.sub("\.root", ".XML", filename )
|
647 |
if(self.verbose): print filename
|
648 |
f = open(filename,'w')
|
649 |
f.write('<SubDetectors name="' + subsystem + '" category="Run summary" source="' + sourceFile + '">\n')
|
650 |
f.write(' <ReferenceEntity name=".*"/>\n')
|
651 |
f.write('</SubDetectors>\n')
|
652 |
f.close()
|
653 |
return filename
|
654 |
|
655 |
def produceSkeletonXMLAndInsert(self, subsystem, sourceFile, tag, run):
|
656 |
"""Produce a skeleton XML and insert it into the database.
|
657 |
|
658 |
Keyword arguments:
|
659 |
subsystem -- the subsystem for which the skeleton XML has to
|
660 |
be created and inserted
|
661 |
sourceFile -- the path to the file to be used in the 'source'
|
662 |
attribute of the SubDetectors XML field.
|
663 |
If it is on castor, please prepend rfio:/
|
664 |
tag -- the tag to be used
|
665 |
run -- teh run to be used
|
666 |
|
667 |
"""
|
668 |
|
669 |
xml = self.produceSkeletonXML(subsystem, sourceFile)
|
670 |
self.insertRow(tag, run, subsystem, xml)
|
671 |
|
672 |
def cloneDBTag(self, oldTag, newTag):
|
673 |
"""
|
674 |
This method is used to clone an existing tag into a new one. The copy is done for each
|
675 |
and every IOV of the existing tag, for each and every subsystem of the existing
|
676 |
tag.
|
677 |
|
678 |
Keyword arguments:
|
679 |
oldTag -- the tag that is to be cloned
|
680 |
newTag -- the tag that is to be created
|
681 |
|
682 |
"""
|
683 |
self.connect()
|
684 |
statement = 'select InsertTime, LocalTag, StartRun, Subsystem, XML from ' + self.table_name + ' where localtag= "' + oldTag + '" and StartRun > 0'
|
685 |
self.cur.execute(statement)
|
686 |
rows = self.cur.fetchall()
|
687 |
for row in rows:
|
688 |
statement2 = 'insert into ' + self.table_name + ' ("InsertTime", "LocalTag", "StartRun", "Subsystem", "XML") values(?,?,?,?,?) '
|
689 |
now = datetime.datetime.now()
|
690 |
xml_orig = row[3]
|
691 |
# xml_final = re.sub("source=\".*\"", 'source="' + sourceFile + '"', xml_orig)
|
692 |
if(self.verbose): print statement2,now,newTag, run, row[1], row[2], row[3]
|
693 |
cur3 = self.con.cursor() ;
|
694 |
cur3.execute(statement2,(now,newTag, row[2], row[3], row[4] ) )
|
695 |
self.con.commit()
|
696 |
if(self.notification):
|
697 |
self.notifyMe('[cloneDBTag]\nOriginal Tag ' + oldTag + '\nNew Tag ' + newTag + '\non DB ' + self.main_db)
|
698 |
|
699 |
def modifySource(self, tag, run, subsystem, newSource):
|
700 |
"""
|
701 |
This method is used to modify the source attribute of an
|
702 |
existing XML, using the newSource field supplied by the
|
703 |
end user
|
704 |
|
705 |
Keyword arguments:
|
706 |
tag -- the tag to which the XML to modify belongs to
|
707 |
run -- the run to which the XML to modify belongs to
|
708 |
subsystem -- the subsystem to which the XML to modify belongs to
|
709 |
newSource -- the new source that has to be replaced into the
|
710 |
record identified by the previous 3 parameters
|
711 |
|
712 |
"""
|
713 |
self.connect()
|
714 |
statement = 'select InsertTime, LocalTag, StartRun, Subsystem, XML from ' + self.table_name + ' where localtag= "' + tag + '" and StartRun = ' + str(run) + ' and subsystem = "' + subsystem + '"'
|
715 |
if self.verbose: print statement
|
716 |
self.cur.execute(statement)
|
717 |
rows = self.cur.fetchall()
|
718 |
# print len(rows)
|
719 |
if int(len(rows)) == 0:
|
720 |
print 'No matching object to update: (', tag, ',', run, ',', subsystem, ')'
|
721 |
return
|
722 |
for row in rows:
|
723 |
now = datetime.datetime.now()
|
724 |
xml_orig = row[4]
|
725 |
xml_final = re.sub("source=\".*\"", 'source="' + newSource + '"', xml_orig)
|
726 |
statement2 = 'update ' + self.table_name + ' set XML=?,LastUpdate=? where localtag= "' + tag + '" and StartRun = ' + str(run) + ' and subsystem = "' + subsystem + '"'
|
727 |
if self.verbose: print 'updating record ... ', statement2
|
728 |
if self.verbose : print sqlite3.Binary(xml_final)
|
729 |
cur2 = self.con.cursor() ;
|
730 |
cur2.execute( statement2, (sqlite3.Binary(xml_final),now) )
|
731 |
self.con.commit()
|
732 |
if(self.notification):
|
733 |
self.notifyMe('[modifySource]\nRecord to be modified\nTag ' + tag + '\nStartRun ' + str(run) + '\nSubsystem ' + subsystem + '\nNewSource ' + newSource + '\non DB ' + self.main_db)
|
734 |
|
735 |
def syncWithCMSDBS(self, fast=True):
|
736 |
"""
|
737 |
This method is used to modify the records that still
|
738 |
have no information about their status on both
|
739 |
orcoff_prep and orcoff_prod. These records are
|
740 |
updated according to their status in the CMS DBs
|
741 |
|
742 |
Keyword arguments:
|
743 |
fast -- when set to True (default) the check on the CMS DBs
|
744 |
is done only on the records that have not been
|
745 |
already updated, that is the records that still
|
746 |
have CMSDB='None'.
|
747 |
When set to False, the check is done on ALL the
|
748 |
records (actually all the distinct couples
|
749 |
localtag-startrun) of the DB.
|
750 |
|
751 |
"""
|
752 |
# self.syncWithCMSDB('cms_orcoff_prep', fast)
|
753 |
self.syncWithCMSDB('cms_orcoff_prod', fast)
|
754 |
|
755 |
def syncWithCMSDB(self, cmsdb, fast=True):
|
756 |
"""
|
757 |
This method is used to modify the records that still
|
758 |
have no information about their status on selected db.
|
759 |
These records are updated according to their status
|
760 |
in the specified CMS DBs
|
761 |
|
762 |
Keyword arguments:
|
763 |
fast -- when set to True (default) the check on the CMS DBs
|
764 |
is done only on the records that have not been
|
765 |
already updated, that is the records that still
|
766 |
have CMSDB='None'.
|
767 |
When set to False, the check is done on ALL the
|
768 |
records (actually all the distinct couples
|
769 |
localtag-startrun) of the DB.
|
770 |
|
771 |
"""
|
772 |
# CMS_ORCOFF_PROD
|
773 |
checker = IOV() #_Wrapper(cmsdb, 'CMS_COND_34X_DQM', self.verbose)
|
774 |
self.connect()
|
775 |
statement = 'select distinct localtag, startrun from ' + self.table_name + ' where StartRun >0'
|
776 |
if fast:
|
777 |
statement += ' and ' + cmsdb + ' = "None"'
|
778 |
if self.verbose: print statement
|
779 |
self.cur.execute(statement)
|
780 |
rows = self.cur.fetchall()
|
781 |
for row in rows:
|
782 |
# checker.loadIOVs(row[0])
|
783 |
now = datetime.datetime.now()
|
784 |
statement = 'update ' + self.table_name + ' set ' + cmsdb + '=?,LastUpdate=? where localtag="' + row[0] + '" and StartRun =' + str(row[1])
|
785 |
if self.verbose: print 'updating record ... ', statement
|
786 |
if checker.checkPresence(str(row[0]), int(row[1])) :
|
787 |
self.cur.execute(statement, ('Y', now))
|
788 |
self.con.commit()
|
789 |
if(self.notification):
|
790 |
self.notifyMe('[syncwithCMSDBS]\nRecord syncronized with ' + cmsdb + '\nTag ' + str(row[0]) + '\nStartRun ' + str(row[1]) + '\nStatus Y\non DB ' + self.main_db)
|
791 |
else:
|
792 |
self.cur.execute(statement, ('N', now))
|
793 |
self.con.commit()
|
794 |
if(self.notification):
|
795 |
self.notifyMe('[syncwithCMSDBS]\nRecord syncronized with ' + cmsdb + '\nTag ' + str(row[0]) + '\nStartRun ' + str(row[1]) + '\nStatus N\non DB ' + self.main_db)
|
796 |
|
797 |
|
798 |
|
799 |
def modifyRun(self, tag, run, subsystem, newRun):
|
800 |
"""
|
801 |
This method is used to modify the start Run of an
|
802 |
existing record using the newRun field supplied by the
|
803 |
end user.
|
804 |
|
805 |
Keyword arguments:
|
806 |
tag -- the tag to which the record to modify belongs to
|
807 |
run -- the run to which the record to modify belongs to
|
808 |
subsystem -- the subsystem to which the record to
|
809 |
modify belongs to
|
810 |
newRun -- the new run that has to be used
|
811 |
|
812 |
"""
|
813 |
self.connect()
|
814 |
now = datetime.datetime.now()
|
815 |
statement = 'update ' + self.table_name + ' set StartRun=?,LastUpdate=? where localtag= "' + tag + '" and StartRun = ' + str(run) + ' and subsystem = "' + subsystem + '"'
|
816 |
if self.verbose: print 'updating record ... ', statement
|
817 |
self.cur.execute(statement, (newRun, now))
|
818 |
self.con.commit()
|
819 |
if(self.notification):
|
820 |
self.notifyMe('[modifyRun]\nRecord to be modified\n Tag ' + tag + '\nStartRun ' + str(run) + '\nSubsystem ' + subsystem + '\nNewRun ' + str(newRun) + '\non DB ' + self.main_db)
|
821 |
|
822 |
def produceXMLFromDBSkeletonAndInsert(self, skeletonTag, sourceFile, tag, run):
|
823 |
"""Produce a new local tag from a skeleton one
|
824 |
and insert latest payloads it into the database.
|
825 |
The skeletonTag is not entirely cloned, but
|
826 |
only the latest record for each subsystem
|
827 |
is cloned, with the modified source, into
|
828 |
the new local tag.
|
829 |
|
830 |
Keyword arguments:
|
831 |
skeletonTag -- the skeleton tag that you want to 'clone'
|
832 |
sourceFile -- the new source file that you want to be
|
833 |
put in the XML for the newly created
|
834 |
local tag.
|
835 |
tag -- the tag to be used
|
836 |
run -- teh run to be used
|
837 |
|
838 |
"""
|
839 |
|
840 |
self.connect()
|
841 |
statement = 'select distinct subsystem from ' + self.table_name + ' where localtag= "' + skeletonTag + '" and StartRun > 0'
|
842 |
self.cur.execute(statement)
|
843 |
rows = self.cur.fetchall()
|
844 |
for row in rows:
|
845 |
# pdb.set_trace()
|
846 |
if not row[0]:
|
847 |
continue
|
848 |
cur2 = self.con.cursor()
|
849 |
statement2 = 'select Max(StartRun), LocalTag, Subsystem, XML from ' + self.table_name + ' where LocalTag = "' + skeletonTag + '" and SubSystem = "' + row[0] + '" and StartRun > 0'
|
850 |
cur2.execute(statement2)
|
851 |
for row in cur2:
|
852 |
print 'Cloning ', row[2]
|
853 |
statement3 = 'insert into ' + self.table_name + ' ("InsertTime", "LocalTag", "StartRun", "Subsystem", "XML") values(?,?,?,?,?) '
|
854 |
now = datetime.datetime.now()
|
855 |
xml_orig = row[3]
|
856 |
xml_final = re.sub("source=\".*\"", 'source="' + sourceFile + '"', xml_orig)
|
857 |
if(self.verbose): print statement2,now,tag, run, row[2], xml_final
|
858 |
cur3 = self.con.cursor() ;
|
859 |
cur3.execute(statement3,(now,tag, run, row[2], xml_final ) )
|
860 |
self.con.commit()
|
861 |
if(self.notification):
|
862 |
self.notifyMe('[produceXMLFromDBSkeletonAndInsert]\nSkeletonTag ' + skeletonTag + '\nTag ' + tag + '\nStartRun ' + str(run) + '\nSource ' + sourceFile + '\non DB ' + self.main_db)
|
863 |
|
864 |
def produceXMLFromDBSkeletonAndInsertNoSourceOverried(self, skeletonTag, sourceFile, tag, run):
|
865 |
"""Produce a new local tag from a skeleton one
|
866 |
and insert latest payloads it into the database.
|
867 |
The skeletonTag is not entirely clone, but
|
868 |
only the latest record for each subsystem
|
869 |
is cloned, with NO modified source, into
|
870 |
the new local tag.
|
871 |
|
872 |
Keyword arguments:
|
873 |
skeletonTag -- the skeleton tag that you want to 'clone'
|
874 |
sourceFile -- useless, never used
|
875 |
tag -- the tag to be used
|
876 |
run -- teh run to be used
|
877 |
|
878 |
"""
|
879 |
|
880 |
self.connect()
|
881 |
statement = 'select distinct subsystem from ' + self.table_name + ' where localtag= "' + skeletonTag + '" and StartRun > 0'
|
882 |
self.cur.execute(statement)
|
883 |
rows = self.cur.fetchall()
|
884 |
for row in rows:
|
885 |
# pdb.set_trace()
|
886 |
if not row[0]:
|
887 |
continue
|
888 |
cur2 = self.con.cursor()
|
889 |
statement2 = 'select Max(StartRun), LocalTag, Subsystem, XML from ' + self.table_name + ' where LocalTag = "' + skeletonTag + '" and SubSystem = "' + row[0] + '" and StartRun > 0'
|
890 |
cur2.execute(statement2)
|
891 |
for row in cur2:
|
892 |
print 'Cloning ', row[2]
|
893 |
statement3 = 'insert into ' + self.table_name + ' ("InsertTime", "LocalTag", "StartRun", "Subsystem", "XML") values(?,?,?,?,?) '
|
894 |
now = datetime.datetime.now()
|
895 |
xml_orig = row[3]
|
896 |
#xml_final = re.sub("source=\".*\"", 'source="' + sourceFile + '"', xml_orig)
|
897 |
if(self.verbose): print statement2,now,tag, run, row[2], xml_final
|
898 |
cur3 = self.con.cursor() ;
|
899 |
cur3.execute(statement3,(now,tag, run, row[2], xml_final ) )
|
900 |
self.con.commit()
|
901 |
if(self.notification):
|
902 |
self.notifyMe('[produceXMLFromDBSkeletonAndInsertNoSourceOverrid]\nSkeletonTag ' + skeletonTag + '\nTag ' + tag + 'StartRun ' + str(run) + '\nSubsystem ' + subsystem + '\non DB ' + self.main_db)
|
903 |
|
904 |
def JsonIt(self, cursor):
|
905 |
"""
|
906 |
This is a simple method that, given a cursor (filled after
|
907 |
executing a query) will print the correspondig JSON Object
|
908 |
"""
|
909 |
r = cursor.fetchone()
|
910 |
obj = ''
|
911 |
obj += '{"rows": ['
|
912 |
while r:
|
913 |
obj += '{'
|
914 |
for key in r.keys():
|
915 |
obj += ' "' + str(key) + '": "' + str(r[key]) + '",'
|
916 |
obj = re.sub(',$', '', obj)
|
917 |
obj += ' },'
|
918 |
r = self.cur.fetchone()
|
919 |
obj = re.sub(',$', '', obj)
|
920 |
obj += ']}'
|
921 |
print obj
|
922 |
|
923 |
def prettyPrintIt(self, cursor):
|
924 |
"""
|
925 |
This is a simple method that, given a cursor (filled after
|
926 |
executing a query) will pretty print it on the screen
|
927 |
"""
|
928 |
r = cursor.fetchone()
|
929 |
while r:
|
930 |
for key in r.keys():
|
931 |
print key, r[key]
|
932 |
r = self.cur.fetchone()
|
933 |
|
934 |
|
935 |
def notifyMe(self, message):
|
936 |
"""Send an email notification to the appropriate recipient
|
937 |
with fixed SUBJECT(ReferenceWebGUI) and the specified message body.
|
938 |
This function is triggered only if the class variable notification
|
939 |
is set to True in the constructor (which is the default case)."""
|
940 |
|
941 |
me = 'marco.rovere@cern.ch'
|
942 |
eduardo = 'erosales@cern.ch'
|
943 |
msg = email.message_from_string(message+'\n')
|
944 |
msg['Subject'] = 'ReferenceWebGUI'
|
945 |
msg['From'] = me
|
946 |
msg['To'] = self.rcpt
|
947 |
|
948 |
if(self.verbose):
|
949 |
print msg.as_string()
|
950 |
|
951 |
s=smtplib.SMTP("localhost")
|
952 |
s.sendmail(me, [self.rcpt], msg.as_string())
|
953 |
s.sendmail(eduardo, [self.rcpt], msg.as_string())
|
954 |
s.quit()
|
955 |
|
956 |
|
957 |
|
958 |
|
959 |
class DBOptionParser:
|
960 |
def __init__(self):
|
961 |
self.parser = OptionParser()
|
962 |
self.group_general = OptionGroup(self.parser, "General Options",
|
963 |
"These options are used with other options(insert/list/update)."
|
964 |
)
|
965 |
self.group_insert = OptionGroup(self.parser, "Insertion Options",
|
966 |
"These options directly insert new records into the Database: use with caution."
|
967 |
)
|
968 |
self.group_list = OptionGroup(self.parser, "Listing Options",
|
969 |
"These options dump values from the Database to the terminal."
|
970 |
)
|
971 |
self.group_update = OptionGroup(self.parser, "Update Options",
|
972 |
"These options directly update existing records into the Database: use with caution."
|
973 |
)
|
974 |
self.group_output = OptionGroup(self.parser, "Output Options",
|
975 |
"These options produce the final XML to be used to assemble a DQM Reference Histogram File."
|
976 |
)
|
977 |
|
978 |
def parseOptions(self):
|
979 |
(self.options, self.args) = self.parser.parse_args()
|
980 |
|
981 |
def error(self, message):
|
982 |
self.parser.error(message)
|
983 |
|
984 |
def clone(self):
|
985 |
return self.options.cloneSkeletonTag
|
986 |
|
987 |
def db(self):
|
988 |
return self.options.db
|
989 |
|
990 |
def db_filename(self):
|
991 |
return self.options.db_filename
|
992 |
|
993 |
def duplicateTag(self):
|
994 |
return self.options.duplicateTag
|
995 |
|
996 |
def insert(self):
|
997 |
return self.options.insert
|
998 |
|
999 |
def fastsync(self):
|
1000 |
return self.options.fastsync
|
1001 |
|
1002 |
def faketag(self):
|
1003 |
return self.options.faketag
|
1004 |
|
1005 |
def g(self):
|
1006 |
return self.options.g
|
1007 |
|
1008 |
def source(self):
|
1009 |
return self.options.source
|
1010 |
|
1011 |
def l(self):
|
1012 |
return self.options.l
|
1013 |
|
1014 |
def ltag(self):
|
1015 |
return self.options.ltag
|
1016 |
|
1017 |
def lsub(self):
|
1018 |
return self.options.lsub
|
1019 |
|
1020 |
def modifySource(self):
|
1021 |
return self.options.modifySource
|
1022 |
|
1023 |
def modifyRun(self):
|
1024 |
return self.options.modifyRun
|
1025 |
|
1026 |
def n(self):
|
1027 |
return self.options.n
|
1028 |
|
1029 |
def o(self):
|
1030 |
return self.options.o
|
1031 |
|
1032 |
def p(self):
|
1033 |
return self.options.p
|
1034 |
|
1035 |
def t(self):
|
1036 |
return self.options.t
|
1037 |
|
1038 |
def r(self):
|
1039 |
return self.options.r
|
1040 |
|
1041 |
def x(self):
|
1042 |
return self.options.x
|
1043 |
|
1044 |
def s(self):
|
1045 |
return self.options.s
|
1046 |
|
1047 |
def sync(self):
|
1048 |
return self.options.sync
|
1049 |
|
1050 |
def v(self):
|
1051 |
return self.options.v
|
1052 |
|
1053 |
def u(self):
|
1054 |
return self.options.u
|
1055 |
|
1056 |
|
1057 |
def defineOptions(self):
|
1058 |
# GENARAL OPTIONS
|
1059 |
|
1060 |
self.group_general.add_option("-d",
|
1061 |
"--database",
|
1062 |
dest="db",
|
1063 |
default="referenceHisto.db",
|
1064 |
help="Use the specified database to perform operations." +
|
1065 |
" If this option is not specified, the default is used [referenceHisto.db].",
|
1066 |
action="store",
|
1067 |
type="string",
|
1068 |
metavar="DB_NAME")
|
1069 |
self.group_general.add_option("--source",
|
1070 |
dest="source",
|
1071 |
help="Source file to be used when producing skeleton XMLs",
|
1072 |
action="store")
|
1073 |
self.group_general.add_option("-r",
|
1074 |
"--run",
|
1075 |
dest="r",
|
1076 |
type="long",
|
1077 |
help="Specify the starting run from which this record is valid (inclusive)",
|
1078 |
action="store",
|
1079 |
metavar="RUN")
|
1080 |
self.group_general.add_option("-s",
|
1081 |
"--subsystem",
|
1082 |
dest="s",
|
1083 |
type="string",
|
1084 |
help="Specify to which subsystem this record belongs to",
|
1085 |
action="store",
|
1086 |
metavar="SUBSYS")
|
1087 |
self.group_general.add_option("-t",
|
1088 |
"--tag",
|
1089 |
dest="t",
|
1090 |
help="Specify which local tag is to be used to insert data",
|
1091 |
action="store",
|
1092 |
metavar="TAG")
|
1093 |
self.group_general.add_option("-v",
|
1094 |
"--verbose",
|
1095 |
dest="v",
|
1096 |
help="Verbose printouts",
|
1097 |
action="store_true",
|
1098 |
default=False)
|
1099 |
self.group_general.add_option("-n",
|
1100 |
"--no-notify",
|
1101 |
dest="n",
|
1102 |
help="Suppress Email Notifications",
|
1103 |
action="store_false",
|
1104 |
default=True)
|
1105 |
self.group_general.add_option("-x",
|
1106 |
"--xml-file",
|
1107 |
dest="x",
|
1108 |
type="string",
|
1109 |
help="Specify the location of the XML file to upload into the DB",
|
1110 |
action="store",
|
1111 |
metavar="XML")
|
1112 |
self.group_general.add_option("--oracle",
|
1113 |
dest="o",
|
1114 |
type="string",
|
1115 |
help="Use the specified Oracle DB to produce the Metadata file (cms_orcoff_prep|cms_orcoff_prod)",
|
1116 |
action="store",
|
1117 |
metavar="ORACLE_DB")
|
1118 |
self.parser.add_option_group(self.group_general)
|
1119 |
|
1120 |
# INSERTION OPTIONS
|
1121 |
|
1122 |
self.group_insert.add_option("-c",
|
1123 |
"--create",
|
1124 |
dest="db_filename",
|
1125 |
help="Create Database Table from scratch using the supplied filename",
|
1126 |
action="store",
|
1127 |
type="string",
|
1128 |
metavar="DB_NAME")
|
1129 |
self.group_insert.add_option("-i",
|
1130 |
"--insert",
|
1131 |
dest="insert",
|
1132 |
help="Insert new data into the database."+
|
1133 |
" Options -r -t -s -x are mandatory. Calls insertRow().",
|
1134 |
action="store_true",
|
1135 |
default=False)
|
1136 |
self.group_insert.add_option("--faketag",
|
1137 |
dest="faketag",
|
1138 |
help="Create a fake Tag that needs to be filled in via Web Application."
|
1139 |
+ " Calls createFakeLocalTag().",
|
1140 |
action="store",
|
1141 |
metavar="TAG")
|
1142 |
self.group_insert.add_option("--cloneskeleton",
|
1143 |
dest="cloneSkeletonTag",
|
1144 |
help="Create New records based on the specified skeleton." +
|
1145 |
" Calls produceXMLFromDBSkeletonAndInsert()"
|
1146 |
,action="store",
|
1147 |
type="string",
|
1148 |
metavar="TAG_TO_CLONE")
|
1149 |
self.group_insert.add_option("--duplicate",
|
1150 |
dest="duplicateTag",
|
1151 |
help="Deep copy the specified tag into a new one."+
|
1152 |
" The option -t is mandatory.",
|
1153 |
action="store",
|
1154 |
type="string",
|
1155 |
metavar="TAG_TO_DUPLICATE" )
|
1156 |
self.group_insert.add_option("--generate",
|
1157 |
dest="g",
|
1158 |
help="Generate a skeleton XML for all the known subsystems using the specified file a source." +
|
1159 |
" The --source parameter is mandatory." +
|
1160 |
" The default behaviour is to dump the produced XMLs into the local filesystem (using produceSkeletonXML)." +
|
1161 |
" If used with -u option, the XMLs will be uploaded directly into the DB (using produceSkeletopnXMLAndInsert). In" +
|
1162 |
" this case the options -t and -r are mandatory.",
|
1163 |
action="store_true",
|
1164 |
default=False
|
1165 |
)
|
1166 |
self.group_insert.add_option("-u",
|
1167 |
"--upload",
|
1168 |
dest="u",
|
1169 |
help="Upload new skeleton XML files into the database using specified tag(mandatory) and run(mandatory)",
|
1170 |
action="store_true",
|
1171 |
default=False)
|
1172 |
self.parser.add_option_group(self.group_insert)
|
1173 |
|
1174 |
# LISTING OPTIONS
|
1175 |
|
1176 |
self.group_list.add_option("-l",
|
1177 |
"--list",
|
1178 |
dest="l",
|
1179 |
help="List all local tags known to the specified Database",
|
1180 |
action="store_true")
|
1181 |
self.group_list.add_option("--localtag",
|
1182 |
dest="ltag",
|
1183 |
help="List the specified local tag composition",
|
1184 |
action="store",
|
1185 |
metavar="TAG")
|
1186 |
self.group_list.add_option("--listsub",
|
1187 |
dest="lsub",
|
1188 |
help="List all known subsystems",
|
1189 |
action="store_true")
|
1190 |
self.parser.add_option_group(self.group_list)
|
1191 |
|
1192 |
# UPDATE OPTIONS
|
1193 |
|
1194 |
self.group_update.add_option("--modifyRun",
|
1195 |
dest="modifyRun",
|
1196 |
help="Modify the StartRun of an existing record identified by localtag, run and subsystem",
|
1197 |
action="store",
|
1198 |
metavar="NEW_RUN")
|
1199 |
self.group_update.add_option("--modifySource",
|
1200 |
dest="modifySource",
|
1201 |
help="Modify the source attribute of the XML specified by localtag, run and subsystem",
|
1202 |
action="store",
|
1203 |
metavar="NEW_SOURCE")
|
1204 |
self.group_update.add_option("--sync",
|
1205 |
dest="sync",
|
1206 |
help="Like fastsync, but the check is done on all records, including the ones already sync'ed.",
|
1207 |
action="store_true")
|
1208 |
self.group_update.add_option("--fastsync",
|
1209 |
dest="fastsync",
|
1210 |
help="Modify a record to see if it is present in orcoff_prod and orcoff_prep. It only checks records that were not already sync'ed.",
|
1211 |
action="store_true")
|
1212 |
self.parser.add_option_group(self.group_update)
|
1213 |
|
1214 |
# OUPUT OPTIONS
|
1215 |
|
1216 |
self.group_output.add_option("-p",
|
1217 |
"--producefortag",
|
1218 |
dest="p",
|
1219 |
type="string",
|
1220 |
help="Produce all necessary XMLs for the specified localTag in the range 1 to infinity",
|
1221 |
action="store",
|
1222 |
metavar="TAG")
|
1223 |
self.parser.add_option_group(self.group_output)
|
1224 |
|