diff options
author | showard <showard@592f7852-d20e-0410-864c-8624ca9c26a4> | 2009-12-23 00:03:56 +0000 |
---|---|---|
committer | showard <showard@592f7852-d20e-0410-864c-8624ca9c26a4> | 2009-12-23 00:03:56 +0000 |
commit | cf87d69c7b54c8f41e11a7ee6a7399a830dcfb5c (patch) | |
tree | 016f5ccf21a1e9870f2dc52e8a87efbe7b82a185 /tko | |
parent | 2bea5139735e5ead1ac3ba39cef272ecfd6b6e18 (diff) |
Rename the tables in the databases, by prefixing the app name. This is
in preparation for merging the two databases and the two Django
projects into one.
Note that this renames *all* standard Autotest DB tables in both the
autotest_web and tko databases. If you have scripts written directly
against these databases, *they will break*. If your scripts access
the RPC interfaces, they should continue to work.
Another patch will be along within the next few weeks to actually move
the TKO tables into the autotest_web database.
From: James Ren <jamesren@google.com>
Signed-off-by: Steve Howard <showard@google.com>
Rename the tables in the databases, by prefixing the app name. This is
in preparation for merging the two databases and the two Django projects
into one.
Note that this renames *all* standard Autotest DB tables in both the autotest_web and tko databases. If you have scripts written directly against these databases, *they will break*. If your scripts access the RPC interfaces, they should continue to work.
From: James Ren <jamesren@google.com>
Signed-off-by: Steve Howard <showard@google.com>
git-svn-id: svn://test.kernel.org/autotest/trunk@4040 592f7852-d20e-0410-864c-8624ca9c26a4
Diffstat (limited to 'tko')
-rw-r--r-- | tko/create_db-postgres | 137 | ||||
-rw-r--r-- | tko/db.py | 57 | ||||
-rwxr-xr-x | tko/frontend.py | 18 | ||||
-rwxr-xr-x | tko/machine_benchmark.cgi | 8 | ||||
-rw-r--r-- | tko/migrations/031_rename_tko_tables.py | 378 | ||||
-rw-r--r-- | tko/migrations/common.py | 8 | ||||
-rw-r--r-- | tko/nightly.py | 16 | ||||
-rwxr-xr-x | tko/parse.py | 2 | ||||
-rwxr-xr-x | tko/query_history.cgi | 2 | ||||
-rwxr-xr-x | tko/retrieve_jobs | 2 | ||||
-rwxr-xr-x | tko/save_query.cgi | 7 |
11 files changed, 441 insertions, 194 deletions
diff --git a/tko/create_db-postgres b/tko/create_db-postgres index 0a34a1e6..e69de29b 100644 --- a/tko/create_db-postgres +++ b/tko/create_db-postgres @@ -1,137 +0,0 @@ --- drop all views (since they depend on some or all of the following tables) -DROP VIEW IF EXISTS test_view; - --- kernel versions -DROP SEQUENCE IF EXISTS kernel_idx_seq; -CREATE SEQUENCE kernel_idx_seq - INCREMENT BY 1 - NO MAXVALUE - NO MINVALUE - CACHE 1; - -DROP TABLE IF EXISTS kernels; -CREATE TABLE kernels ( -kernel_idx int NOT NULL DEFAULT nextval('kernel_idx_seq'::TEXT) PRIMARY KEY, -kernel_hash TEXT, -- Hash of base + all patches -base TEXT, -- Base version without patches -printable TEXT -- Full version with patches -); - --- main jobs table -DROP SEQUENCE IF EXISTS machine_idx_seq; -CREATE SEQUENCE machine_idx_seq - INCREMENT BY 1 - NO MAXVALUE - NO MINVALUE - CACHE 1; - -DROP TABLE IF EXISTS machines; -CREATE TABLE machines ( -machine_idx int NOT NULL DEFAULT nextval('machine_idx_seq'::TEXT) PRIMARY KEY, -hostname TEXT, -- hostname -machine_group TEXT, -- group name -owner TEXT -- owner name -); - --- main jobs table -DROP SEQUENCE IF EXISTS job_idx_seq; -CREATE SEQUENCE job_idx_seq - INCREMENT BY 1 - NO MAXVALUE - NO MINVALUE - CACHE 1; - -DROP TABLE IF EXISTS jobs; -CREATE TABLE jobs ( -job_idx int NOT NULL DEFAULT nextval('job_idx_seq'::TEXT) PRIMARY KEY, -tag TEXT, -- job key -label TEXT, -- job label assigned by user -username TEXT, -- user name -machine_idx int -- reference to machine table -); - --- One entry per patch used, anywhere -DROP TABLE IF EXISTS patches; -CREATE TABLE patches ( -kernel_idx INTEGER, -- index number -name TEXT, -- short name -url TEXT, -- full URL -hash TEXT -); - --- test functional results -DROP SEQUENCE IF EXISTS test_idx_seq; -CREATE SEQUENCE test_idx_seq - INCREMENT BY 1 - NO MAXVALUE - NO MINVALUE - CACHE 1; - -DROP TABLE IF EXISTS tests; -CREATE TABLE tests ( -test_idx int NOT NULL DEFAULT nextval('test_idx_seq'::TEXT) PRIMARY KEY, -job_idx INTEGER, -- ref to job table -test TEXT, -- name of test -subdir TEXT, -- subdirectory name -kernel_idx INTEGER, -- kernel test was AGAINST -status INTEGER, -- test status -reason TEXT, -- reason for test status -machine_idx int -- reference to machine table -); - --- test functional results -DROP TABLE IF EXISTS test_attributes; -CREATE TABLE test_attributes ( -test_idx INTEGER, -- ref to test table -attribute TEXT, -- attribute name (e.g. 'throughput') -value TEXT -- attribute value -); - --- test functional results -DROP TABLE IF EXISTS iteration_result; -CREATE TABLE iteration_result( -test_idx INTEGER, -- ref to test table -iteration INTEGER, -- integer -attribute TEXT, -- attribute name (e.g. 'throughput') -value FLOAT -- attribute value (eg 700.1) -); - --- status key -DROP SEQUENCE IF EXISTS status_idx_seq; -CREATE SEQUENCE status_idx_seq - INCREMENT BY 1 - NO MAXVALUE - NO MINVALUE - CACHE 1; - -DROP TABLE IF EXISTS status; -CREATE TABLE status ( -status_idx int NOT NULL DEFAULT nextval('status_idx_seq'::TEXT) PRIMARY KEY, -word TEXT -- status word -); - --- BRRD syncronization -DROP TABLE IF EXISTS brrd_sync; -CREATE TABLE brrd_sync ( -test_idx INTEGER -- ref to test table -); - --- test_view (to make life easier for people trying to mine data) -CREATE VIEW test_view AS -SELECT tests.test_idx, tests.job_idx, tests.test, tests.subdir, - tests.kernel_idx, tests.status, tests.reason, jobs.machine_idx, - jobs.tag, jobs.label, jobs.username, machines.hostname, - machines.machine_group, machines.owner, kernels.kernel_hash, - kernels.base, kernels.printable -FROM tests -INNER JOIN jobs ON jobs.job_idx = tests.job_idx -INNER JOIN machines ON machines.machine_idx = jobs.machine_idx -INNER JOIN kernels ON kernels.kernel_idx = tests.kernel_idx; - -INSERT INTO status (word) VALUES ('NOSTATUS'); -INSERT INTO status (word) VALUES ('ERROR'); -INSERT INTO status (word) VALUES ('ABORT'); -INSERT INTO status (word) VALUES ('FAIL'); -INSERT INTO status (word) VALUES ('WARN'); -INSERT INTO status (word) VALUES ('GOOD'); -INSERT INTO status (word) VALUES ('ALERT'); @@ -21,7 +21,7 @@ class db_sql(object): # if not present, insert statuses self.status_idx = {} self.status_word = {} - status_rows = self.select('status_idx, word', 'status', None) + status_rows = self.select('status_idx, word', 'tko_status', None) for s in status_rows: self.status_idx[s[1]] = s[0] self.status_word[s[0]] = s[1] @@ -302,13 +302,13 @@ class db_sql(object): job_idx = self.find_job(tag) for test_idx in self.find_tests(job_idx): where = {'test_idx' : test_idx} - self.delete('iteration_result', where) - self.delete('iteration_attributes', where) - self.delete('test_attributes', where) - self.delete('test_labels_tests', {'test_id': test_idx}) + self.delete('tko_iteration_result', where) + self.delete('tko_iteration_attributes', where) + self.delete('tko_test_attributes', where) + self.delete('tko_test_labels_tests', {'test_id': test_idx}) where = {'job_idx' : job_idx} - self.delete('tests', where) - self.delete('jobs', where) + self.delete('tko_tests', where) + self.delete('tko_jobs', where) def insert_job(self, tag, job, commit = None): @@ -334,9 +334,9 @@ class db_sql(object): 'afe_job_id': afe_job_id} is_update = hasattr(job, 'index') if is_update: - self.update('jobs', data, {'job_idx': job.index}, commit=commit) + self.update('tko_jobs', data, {'job_idx': job.index}, commit=commit) else: - self.insert('jobs', data, commit=commit) + self.insert('tko_jobs', data, commit=commit) job.index = self.get_last_autonumber_value() for test in job.tests: self.insert_test(job, test, commit=commit) @@ -353,14 +353,15 @@ class db_sql(object): is_update = hasattr(test, "test_idx") if is_update: test_idx = test.test_idx - self.update('tests', data, {'test_idx': test_idx}, commit=commit) + self.update('tko_tests', data, + {'test_idx': test_idx}, commit=commit) where = {'test_idx': test_idx} - self.delete('iteration_result', where) - self.delete('iteration_attributes', where) + self.delete('tko_iteration_result', where) + self.delete('tko_iteration_attributes', where) where['user_created'] = 0 - self.delete('test_attributes', where) + self.delete('tko_test_attributes', where) else: - self.insert('tests', data, commit=commit) + self.insert('tko_tests', data, commit=commit) test_idx = test.test_idx = self.get_last_autonumber_value() data = {'test_idx': test_idx} @@ -369,23 +370,23 @@ class db_sql(object): for key, value in i.attr_keyval.iteritems(): data['attribute'] = key data['value'] = value - self.insert('iteration_attributes', data, + self.insert('tko_iteration_attributes', data, commit=commit) for key, value in i.perf_keyval.iteritems(): data['attribute'] = key data['value'] = value - self.insert('iteration_result', data, + self.insert('tko_iteration_result', data, commit=commit) for key, value in test.attributes.iteritems(): data = {'test_idx': test_idx, 'attribute': key, 'value': value} - self.insert('test_attributes', data, commit=commit) + self.insert('tko_test_attributes', data, commit=commit) if not is_update: for label_index in test.labels: data = {'test_id': test_idx, 'testlabel_id': label_index} - self.insert('test_labels_tests', data, commit=commit) + self.insert('tko_test_labels_tests', data, commit=commit) def read_machine_map(self): @@ -412,20 +413,20 @@ class db_sql(object): def insert_machine(self, job, commit = None): machine_info = self.machine_info_dict(job) - self.insert('machines', machine_info, commit=commit) + self.insert('tko_machines', machine_info, commit=commit) return self.get_last_autonumber_value() def update_machine_information(self, job, commit = None): machine_info = self.machine_info_dict(job) - self.update('machines', machine_info, + self.update('tko_machines', machine_info, where={'hostname': machine_info['hostname']}, commit=commit) def lookup_machine(self, hostname): where = { 'hostname' : hostname } - rows = self.select('machine_idx', 'machines', where) + rows = self.select('machine_idx', 'tko_machines', where) if rows: return rows[0][0] else: @@ -433,7 +434,7 @@ class db_sql(object): def lookup_kernel(self, kernel): - rows = self.select('kernel_idx', 'kernels', + rows = self.select('kernel_idx', 'tko_kernels', {'kernel_hash':kernel.kernel_hash}) if rows: return rows[0][0] @@ -456,7 +457,7 @@ class db_sql(object): if not match: patch_count += 1 - self.insert('kernels', + self.insert('tko_kernels', {'base':kernel.base, 'kernel_hash':kernel.kernel_hash, 'printable':printable}, @@ -465,7 +466,7 @@ class db_sql(object): if patch_count > 0: printable += ' p%d' % (kver) - self.update('kernels', + self.update('tko_kernels', {'printable':printable}, {'kernel_idx':kver}) @@ -477,7 +478,7 @@ class db_sql(object): def insert_patch(self, kver, patch, commit = None): print patch.reference name = os.path.basename(patch.reference)[:80] - self.insert('patches', + self.insert('tko_patches', {'kernel_idx': kver, 'name':name, 'url':patch.reference, @@ -487,7 +488,7 @@ class db_sql(object): def find_test(self, job_idx, testname, subdir): where = {'job_idx': job_idx , 'test': testname, 'subdir': subdir} - rows = self.select('test_idx', 'tests', where) + rows = self.select('test_idx', 'tko_tests', where) if rows: return rows[0][0] else: @@ -496,7 +497,7 @@ class db_sql(object): def find_tests(self, job_idx): where = { 'job_idx':job_idx } - rows = self.select('test_idx', 'tests', where) + rows = self.select('test_idx', 'tko_tests', where) if rows: return [row[0] for row in rows] else: @@ -504,7 +505,7 @@ class db_sql(object): def find_job(self, tag): - rows = self.select('job_idx', 'jobs', {'tag': tag}) + rows = self.select('job_idx', 'tko_jobs', {'tag': tag}) if rows: return rows[0][0] else: diff --git a/tko/frontend.py b/tko/frontend.py index 1b51270d..cbc328de 100755 --- a/tko/frontend.py +++ b/tko/frontend.py @@ -85,7 +85,7 @@ def get_matrix_data(db_obj, x_axis, y_axis, where = None, fields = ','.join(query_fields_list) group_by = '%s, %s, status' % (x_field, y_field) - rows = db_obj.select(fields, 'test_view', + rows = db_obj.select(fields, 'tko_test_view', where=where, group_by=group_by, max_rows = MAX_RECORDS) return status_data(rows, x_field, y_field, query_reasons) @@ -138,7 +138,7 @@ class group: @classmethod def select(klass, db): """Return all possible machine groups""" - rows = db.select('distinct machine_group', 'machines', + rows = db.select('distinct machine_group', 'tko_machines', 'machine_group is not null') groupnames = sorted([row[0] for row in rows]) return [klass(db, groupname) for groupname in groupnames] @@ -155,7 +155,7 @@ class group: def tests(self, where = {}): values = [self.name] - sql = 't inner join machines m on m.machine_idx=t.machine_idx' + sql = 't inner join tko_machines m on m.machine_idx=t.machine_idx' sql += ' where m.machine_group=%s' for key in where.keys(): sql += ' and %s=%%s' % key @@ -168,7 +168,7 @@ class machine: def select(klass, db, where = {}): fields = ['machine_idx', 'hostname', 'machine_group', 'owner'] machines = [] - for row in db.select(','.join(fields), 'machines', where): + for row in db.select(','.join(fields), 'tko_machines', where): machines.append(klass(db, *row)) return machines @@ -185,7 +185,7 @@ class kernel: @classmethod def select(klass, db, where = {}): fields = ['kernel_idx', 'kernel_hash', 'base', 'printable'] - rows = db.select(','.join(fields), 'kernels', where) + rows = db.select(','.join(fields), 'tko_kernels', where) return [klass(db, *row) for row in rows] @@ -204,7 +204,7 @@ class test: fields = ['test_idx', 'job_idx', 'test', 'subdir', 'kernel_idx', 'status', 'reason', 'machine_idx'] tests = [] - for row in db.select(','.join(fields), 'tests', where, + for row in db.select(','.join(fields), 'tko_tests', where, wherein,distinct): tests.append(klass(db, *row)) return tests @@ -215,7 +215,7 @@ class test: fields = ['test_idx', 'job_idx', 'test', 'subdir', 'kernel_idx', 'status', 'reason', 'machine_idx'] fields = ['t.'+field for field in fields] - rows = db.select_sql(','.join(fields), 'tests', sql, values) + rows = db.select_sql(','.join(fields), 'tko_tests', sql, values) return [klass(db, *row) for row in rows] @@ -279,7 +279,7 @@ class test: class job: def __init__(self, db, job_idx): where = {'job_idx' : job_idx} - rows = db.select('tag, machine_idx', 'jobs', where) + rows = db.select('tag, machine_idx', 'tko_jobs', where) if rows: self.tag, self.machine_idx = rows[0] self.job_idx = job_idx @@ -290,7 +290,7 @@ class iteration: def select(klass, db, where): fields = ['iteration', 'attribute', 'value'] iterations = [] - rows = db.select(','.join(fields), 'iteration_result', where) + rows = db.select(','.join(fields), 'tko_iteration_result', where) for row in rows: iterations.append(klass(*row)) return iterations diff --git a/tko/machine_benchmark.cgi b/tko/machine_benchmark.cgi index c9cd636f..083abc40 100755 --- a/tko/machine_benchmark.cgi +++ b/tko/machine_benchmark.cgi @@ -21,7 +21,7 @@ def main(): display.set_brief_mode() ## getting available tests - rows = db.select('test', 'tests', {}, distinct=True) + rows = db.select('test', 'tko_tests', {}, distinct=True) all_benchmarks = [] for row in rows: benchmark = row[0] @@ -34,10 +34,10 @@ def main(): fields_tests = 'test_idx, count(status_word)' where_tests = { 'subdir': benchmark, 'status_word' : 'GOOD' } fields_params = 'attribute' - for (id, count) in db.select(fields_tests, 'test_view', + for (id, count) in db.select(fields_tests, 'tko_test_view', where_tests, group_by='machine_hostname'): where_params = {'test_idx': id} - for (attribute) in db.select(fields_params, 'iteration_result', + for (attribute) in db.select(fields_params, 'tko_iteration_result', where_params): available_params.add("%s - %s" % (benchmark, attribute[0])) @@ -87,7 +87,7 @@ def main(): fields = 'machine_idx,machine_hostname,count(status_word)' where = { 'subdir': benchmark, 'status_word' : 'GOOD' } data = {} - for (idx, machine, count) in db.select(fields, 'test_view', + for (idx, machine, count) in db.select(fields, 'tko_test_view', where, group_by='machine_hostname'): data[machine] = count machine_idx[machine] = idx diff --git a/tko/migrations/031_rename_tko_tables.py b/tko/migrations/031_rename_tko_tables.py new file mode 100644 index 00000000..127ef43e --- /dev/null +++ b/tko/migrations/031_rename_tko_tables.py @@ -0,0 +1,378 @@ +import common +from autotest_lib.database import db_utils + + +RECREATE_VIEWS_UP = """ +CREATE VIEW tko_test_view AS +SELECT tko_tests.test_idx, + tko_tests.job_idx, + tko_tests.test, + tko_tests.subdir, + tko_tests.kernel_idx, + tko_tests.status, + tko_tests.reason, + tko_tests.machine_idx, + tko_tests.started_time AS test_started_time, + tko_tests.finished_time AS test_finished_time, + tko_jobs.tag AS job_tag, + tko_jobs.label AS job_label, + tko_jobs.username AS job_username, + tko_jobs.queued_time AS job_queued_time, + tko_jobs.started_time AS job_started_time, + tko_jobs.finished_time AS job_finished_time, + tko_machines.hostname AS machine_hostname, + tko_machines.machine_group, + tko_machines.owner AS machine_owner, + tko_kernels.kernel_hash, + tko_kernels.base AS kernel_base, + tko_kernels.printable AS kernel_printable, + tko_status.word AS status_word +FROM tko_tests +INNER JOIN tko_jobs ON tko_jobs.job_idx = tko_tests.job_idx +INNER JOIN tko_machines ON tko_machines.machine_idx = tko_jobs.machine_idx +INNER JOIN tko_kernels ON tko_kernels.kernel_idx = tko_tests.kernel_idx +INNER JOIN tko_status ON tko_status.status_idx = tko_tests.status; + + +CREATE VIEW tko_perf_view AS +SELECT tko_tests.test_idx, + tko_tests.job_idx, + tko_tests.test, + tko_tests.subdir, + tko_tests.kernel_idx, + tko_tests.status, + tko_tests.reason, + tko_tests.machine_idx, + tko_tests.started_time AS test_started_time, + tko_tests.finished_time AS test_finished_time, + tko_jobs.tag AS job_tag, + tko_jobs.label AS job_label, + tko_jobs.username AS job_username, + tko_jobs.queued_time AS job_queued_time, + tko_jobs.started_time AS job_started_time, + tko_jobs.finished_time AS job_finished_time, + tko_machines.hostname AS machine_hostname, + tko_machines.machine_group, + tko_machines.owner AS machine_owner, + tko_kernels.kernel_hash, + tko_kernels.base AS kernel_base, + tko_kernels.printable AS kernel_printable, + tko_status.word AS status_word, + tko_iteration_result.iteration, + tko_iteration_result.attribute AS iteration_key, + tko_iteration_result.value AS iteration_value +FROM tko_tests +INNER JOIN tko_jobs ON tko_jobs.job_idx = tko_tests.job_idx +INNER JOIN tko_machines ON tko_machines.machine_idx = tko_jobs.machine_idx +INNER JOIN tko_kernels ON tko_kernels.kernel_idx = tko_tests.kernel_idx +INNER JOIN tko_status ON tko_status.status_idx = tko_tests.status +INNER JOIN tko_iteration_result ON + tko_iteration_result.test_idx = tko_tests.test_idx; + + +CREATE VIEW tko_test_view_2 AS +SELECT tko_tests.test_idx, + tko_tests.job_idx, + tko_tests.test AS test_name, + tko_tests.subdir, + tko_tests.kernel_idx, + tko_tests.status AS status_idx, + tko_tests.reason, + tko_tests.machine_idx, + tko_tests.started_time AS test_started_time, + tko_tests.finished_time AS test_finished_time, + tko_jobs.tag AS job_tag, + tko_jobs.label AS job_name, + tko_jobs.username AS job_owner, + tko_jobs.queued_time AS job_queued_time, + tko_jobs.started_time AS job_started_time, + tko_jobs.finished_time AS job_finished_time, + tko_jobs.afe_job_id AS afe_job_id, + tko_machines.hostname AS hostname, + tko_machines.machine_group AS platform, + tko_machines.owner AS machine_owner, + tko_kernels.kernel_hash, + tko_kernels.base AS kernel_base, + tko_kernels.printable AS kernel, + tko_status.word AS status +FROM tko_tests +INNER JOIN tko_jobs ON tko_jobs.job_idx = tko_tests.job_idx +INNER JOIN tko_machines ON tko_machines.machine_idx = tko_jobs.machine_idx +INNER JOIN tko_kernels ON tko_kernels.kernel_idx = tko_tests.kernel_idx +INNER JOIN tko_status ON tko_status.status_idx = tko_tests.status; + + +CREATE VIEW tko_test_view_outer_joins AS +SELECT tko_tests.test_idx, + tko_tests.job_idx, + tko_tests.test AS test_name, + tko_tests.subdir, + tko_tests.kernel_idx, + tko_tests.status AS status_idx, + tko_tests.reason, + tko_tests.machine_idx, + tko_tests.started_time AS test_started_time, + tko_tests.finished_time AS test_finished_time, + tko_jobs.tag AS job_tag, + tko_jobs.label AS job_name, + tko_jobs.username AS job_owner, + tko_jobs.queued_time AS job_queued_time, + tko_jobs.started_time AS job_started_time, + tko_jobs.finished_time AS job_finished_time, + tko_machines.hostname AS hostname, + tko_machines.machine_group AS platform, + tko_machines.owner AS machine_owner, + tko_kernels.kernel_hash, + tko_kernels.base AS kernel_base, + tko_kernels.printable AS kernel, + tko_status.word AS status +FROM tko_tests +LEFT OUTER JOIN tko_jobs ON tko_jobs.job_idx = tko_tests.job_idx +LEFT OUTER JOIN tko_machines ON tko_machines.machine_idx = tko_jobs.machine_idx +LEFT OUTER JOIN tko_kernels ON tko_kernels.kernel_idx = tko_tests.kernel_idx +LEFT OUTER JOIN tko_status ON tko_status.status_idx = tko_tests.status; + + +CREATE VIEW tko_perf_view_2 AS +SELECT tko_tests.test_idx, + tko_tests.job_idx, + tko_tests.test AS test_name, + tko_tests.subdir, + tko_tests.kernel_idx, + tko_tests.status AS status_idx, + tko_tests.reason, + tko_tests.machine_idx, + tko_tests.started_time AS test_started_time, + tko_tests.finished_time AS test_finished_time, + tko_jobs.tag AS job_tag, + tko_jobs.label AS job_name, + tko_jobs.username AS job_owner, + tko_jobs.queued_time AS job_queued_time, + tko_jobs.started_time AS job_started_time, + tko_jobs.finished_time AS job_finished_time, + tko_machines.hostname AS hostname, + tko_machines.machine_group AS platform, + tko_machines.owner AS machine_owner, + tko_kernels.kernel_hash, + tko_kernels.base AS kernel_base, + tko_kernels.printable AS kernel, + tko_status.word AS status, + tko_iteration_result.iteration, + tko_iteration_result.attribute AS iteration_key, + tko_iteration_result.value AS iteration_value +FROM tko_tests +LEFT OUTER JOIN tko_jobs ON tko_jobs.job_idx = tko_tests.job_idx +LEFT OUTER JOIN tko_machines ON tko_machines.machine_idx = tko_jobs.machine_idx +LEFT OUTER JOIN tko_kernels ON tko_kernels.kernel_idx = tko_tests.kernel_idx +LEFT OUTER JOIN tko_status ON tko_status.status_idx = tko_tests.status +LEFT OUTER JOIN tko_iteration_result ON + tko_iteration_result.test_idx = tko_tests.test_idx; +""" + + +RECREATE_VIEWS_DOWN = """ +CREATE VIEW test_view AS +SELECT tests.test_idx, + tests.job_idx, + tests.test, + tests.subdir, + tests.kernel_idx, + tests.status, + tests.reason, + tests.machine_idx, + tests.started_time AS test_started_time, + tests.finished_time AS test_finished_time, + jobs.tag AS job_tag, + jobs.label AS job_label, + jobs.username AS job_username, + jobs.queued_time AS job_queued_time, + jobs.started_time AS job_started_time, + jobs.finished_time AS job_finished_time, + machines.hostname AS machine_hostname, + machines.machine_group, + machines.owner AS machine_owner, + kernels.kernel_hash, + kernels.base AS kernel_base, + kernels.printable AS kernel_printable, + status.word AS status_word +FROM tests +INNER JOIN jobs ON jobs.job_idx = tests.job_idx +INNER JOIN machines ON machines.machine_idx = jobs.machine_idx +INNER JOIN kernels ON kernels.kernel_idx = tests.kernel_idx +INNER JOIN status ON status.status_idx = tests.status; + + +CREATE VIEW perf_view AS +SELECT tests.test_idx, + tests.job_idx, + tests.test, + tests.subdir, + tests.kernel_idx, + tests.status, + tests.reason, + tests.machine_idx, + tests.started_time AS test_started_time, + tests.finished_time AS test_finished_time, + jobs.tag AS job_tag, + jobs.label AS job_label, + jobs.username AS job_username, + jobs.queued_time AS job_queued_time, + jobs.started_time AS job_started_time, + jobs.finished_time AS job_finished_time, + machines.hostname AS machine_hostname, + machines.machine_group, + machines.owner AS machine_owner, + kernels.kernel_hash, + kernels.base AS kernel_base, + kernels.printable AS kernel_printable, + status.word AS status_word, + iteration_result.iteration, + iteration_result.attribute AS iteration_key, + iteration_result.value AS iteration_value +FROM tests +INNER JOIN jobs ON jobs.job_idx = tests.job_idx +INNER JOIN machines ON machines.machine_idx = jobs.machine_idx +INNER JOIN kernels ON kernels.kernel_idx = tests.kernel_idx +INNER JOIN status ON status.status_idx = tests.status +INNER JOIN iteration_result ON iteration_result.test_idx = tests.test_idx; + + +CREATE VIEW test_view_outer_joins AS +SELECT tests.test_idx, + tests.job_idx, + tests.test AS test_name, + tests.subdir, + tests.kernel_idx, + tests.status AS status_idx, + tests.reason, + tests.machine_idx, + tests.started_time AS test_started_time, + tests.finished_time AS test_finished_time, + jobs.tag AS job_tag, + jobs.label AS job_name, + jobs.username AS job_owner, + jobs.queued_time AS job_queued_time, + jobs.started_time AS job_started_time, + jobs.finished_time AS job_finished_time, + machines.hostname AS hostname, + machines.machine_group AS platform, + machines.owner AS machine_owner, + kernels.kernel_hash, + kernels.base AS kernel_base, + kernels.printable AS kernel, + status.word AS status +FROM tests +LEFT OUTER JOIN jobs ON jobs.job_idx = tests.job_idx +LEFT OUTER JOIN machines ON machines.machine_idx = jobs.machine_idx +LEFT OUTER JOIN kernels ON kernels.kernel_idx = tests.kernel_idx +LEFT OUTER JOIN status ON status.status_idx = tests.status; + + +CREATE VIEW test_view_2 AS +SELECT tests.test_idx, + tests.job_idx, + tests.test AS test_name, + tests.subdir, + tests.kernel_idx, + tests.status AS status_idx, + tests.reason, + tests.machine_idx, + tests.started_time AS test_started_time, + tests.finished_time AS test_finished_time, + jobs.tag AS job_tag, + jobs.label AS job_name, + jobs.username AS job_owner, + jobs.queued_time AS job_queued_time, + jobs.started_time AS job_started_time, + jobs.finished_time AS job_finished_time, + jobs.afe_job_id AS afe_job_id, + machines.hostname AS hostname, + machines.machine_group AS platform, + machines.owner AS machine_owner, + kernels.kernel_hash, + kernels.base AS kernel_base, + kernels.printable AS kernel, + status.word AS status +FROM tests +INNER JOIN jobs ON jobs.job_idx = tests.job_idx +INNER JOIN machines ON machines.machine_idx = jobs.machine_idx +INNER JOIN kernels ON kernels.kernel_idx = tests.kernel_idx +INNER JOIN status ON status.status_idx = tests.status; + + +CREATE VIEW perf_view_2 AS +SELECT tests.test_idx, + tests.job_idx, + tests.test AS test_name, + tests.subdir, + tests.kernel_idx, + tests.status AS status_idx, + tests.reason, + tests.machine_idx, + tests.started_time AS test_started_time, + tests.finished_time AS test_finished_time, + jobs.tag AS job_tag, + jobs.label AS job_name, + jobs.username AS job_owner, + jobs.queued_time AS job_queued_time, + jobs.started_time AS job_started_time, + jobs.finished_time AS job_finished_time, + machines.hostname AS hostname, + machines.machine_group AS platform, + machines.owner AS machine_owner, + kernels.kernel_hash, + kernels.base AS kernel_base, + kernels.printable AS kernel, + status.word AS status, + iteration_result.iteration, + iteration_result.attribute AS iteration_key, + iteration_result.value AS iteration_value +FROM tests +LEFT OUTER JOIN jobs ON jobs.job_idx = tests.job_idx +LEFT OUTER JOIN machines ON machines.machine_idx = jobs.machine_idx +LEFT OUTER JOIN kernels ON kernels.kernel_idx = tests.kernel_idx +LEFT OUTER JOIN status ON status.status_idx = tests.status +LEFT OUTER JOIN iteration_result ON iteration_result.test_idx = tests.test_idx; +""" + + +ORIG_NAMES = ( + 'embedded_graphing_queries', + 'iteration_attributes', + 'iteration_result', + 'jobs', + 'kernels', + 'machines', + 'patches', + 'query_history', + 'saved_queries', + 'status', + 'test_attributes', + 'test_labels', + 'test_labels_tests', + 'tests', + ) + +RENAMES_UP = dict((name, 'tko_' + name) for name in ORIG_NAMES) +VIEWS_TO_DROP_UP = ( + 'test_view', + 'test_view_2', + 'test_view_outer_joins', + 'perf_view', + 'perf_view_2', + ) + +RENAMES_DOWN = dict((value, key) for key, value in RENAMES_UP.iteritems()) +VIEWS_TO_DROP_DOWN = ['tko_' + view for view in VIEWS_TO_DROP_UP] + + +def migrate_up(manager): + db_utils.drop_views(manager, VIEWS_TO_DROP_UP) + db_utils.rename(manager, RENAMES_UP) + manager.execute_script(RECREATE_VIEWS_UP) + + +def migrate_down(manager): + db_utils.drop_views(manager, VIEWS_TO_DROP_DOWN) + db_utils.rename(manager, RENAMES_DOWN) + manager.execute_script(RECREATE_VIEWS_DOWN) diff --git a/tko/migrations/common.py b/tko/migrations/common.py new file mode 100644 index 00000000..4c8760bd --- /dev/null +++ b/tko/migrations/common.py @@ -0,0 +1,8 @@ +import os, sys +dirname = os.path.dirname(sys.modules[__name__].__file__) +autotest_dir = os.path.abspath(os.path.join(dirname, "..", '..')) +client_dir = os.path.join(autotest_dir, "client") +sys.path.insert(0, client_dir) +import setup_modules +sys.path.pop(0) +setup_modules.setup(base_path=autotest_dir, root_module_name="autotest_lib") diff --git a/tko/nightly.py b/tko/nightly.py index 5a08608b..16c80449 100644 --- a/tko/nightly.py +++ b/tko/nightly.py @@ -8,7 +8,7 @@ from autotest_lib.client.common_lib import kernel_versions def add_kernel_jobs(label_pattern): - cmd = "select job_idx from jobs where label like '%s'" % label_pattern + cmd = "select job_idx from tko_jobs where label like '%s'" % label_pattern nrows = perf.db_cur.execute(cmd) return [row[0] for row in perf.db_cur.fetchall()] @@ -23,7 +23,7 @@ def is_filtered_platform(platform, platforms_filter): def get_test_attributes(testrunx): - cmd = ( "select attribute, value from test_attributes" + cmd = ( "select attribute, value from tko_test_attributes" " where test_idx = %d" % testrunx ) nrows = perf.db_cur.execute(cmd) return dict(perf.db_cur.fetchall()) @@ -49,7 +49,7 @@ def collect_testruns(jobs, test, test_attributes, # TODO: add filtering on test series? runs = {} # platform --> list of test runs for jobx in jobs: - cmd = ( "select test_idx, machine_idx from tests" + cmd = ( "select test_idx, machine_idx from tko_tests" " where job_idx = %s and test = %s" ) args = [jobx, test] nrows = perf.db_cur.execute(cmd, args) @@ -178,11 +178,11 @@ def find_regressions(kernels, test_runs, metric): def get_testrun_context(testrun): - cmd = ( 'select jobs.label, jobs.tag, tests.subdir,' - ' tests.started_time' - ' from jobs, tests' - ' where jobs.job_idx = tests.job_idx' - ' and tests.test_idx = %d' % testrun ) + cmd = ( 'select tko_jobs.label, tko_jobs.tag, tko_tests.subdir,' + ' tko_tests.started_time' + ' from tko_jobs, tko_tests' + ' where tko_jobs.job_idx = tko_tests.job_idx' + ' and tko_tests.test_idx = %d' % testrun ) nrows = perf.db_cur.execute(cmd) assert nrows == 1 row = perf.db_cur.fetchone() diff --git a/tko/parse.py b/tko/parse.py index a82ab0ab..7639a767 100755 --- a/tko/parse.py +++ b/tko/parse.py @@ -84,7 +84,7 @@ def parse_one(db, jobname, path, reparse, mail_on_failure): tko_utils.dprint("! Job is already parsed, done") return - raw_old_tests = db.select("test_idx,subdir,test", "tests", + raw_old_tests = db.select("test_idx,subdir,test", "tko_tests", {"job_idx": old_job_idx}) if raw_old_tests: old_tests = dict(((test, subdir), test_idx) diff --git a/tko/query_history.cgi b/tko/query_history.cgi index a1e2f3a9..627238db 100755 --- a/tko/query_history.cgi +++ b/tko/query_history.cgi @@ -14,7 +14,7 @@ def body(): where = (condition,[]) try: rows = db_obj.select("time_created,user_comment,url", - "query_history", where) + "tko_query_history", where) except MySQLdb.ProgrammingError, err: print err rows = () diff --git a/tko/retrieve_jobs b/tko/retrieve_jobs index 58985c6d..74bdbca6 100755 --- a/tko/retrieve_jobs +++ b/tko/retrieve_jobs @@ -7,5 +7,5 @@ except: arg = '' db = db.db() -for record in db.select('* from jobs ' + arg): +for record in db.select('* from tko_jobs ' + arg): print record diff --git a/tko/save_query.cgi b/tko/save_query.cgi index 04db50bc..d651cf3b 100755 --- a/tko/save_query.cgi +++ b/tko/save_query.cgi @@ -31,7 +31,7 @@ def log_query(): data_to_insert = {'uid':uid, 'time_created':tm, 'user_comment':comment, 'url':HTTP_REFERER } try: - db_obj.insert('query_history', data_to_insert) + db_obj.insert('tko_query_history', data_to_insert) except: raise QueryHistoryError("Could not save query") @@ -41,7 +41,7 @@ def delete_query(time_stamp): db_obj = db.db() data_to_delete = {'time_created':time_stamp} try: - db_obj.delete('query_history', data_to_delete) + db_obj.delete('tko_query_history', data_to_delete) except Exception: raise QueryHistoryError("Could not delete query") @@ -78,6 +78,3 @@ def main(): main() - - - |