From 626cbc53dfcf03108e8af38e771c9902c74e5cc9 Mon Sep 17 00:00:00 2001 From: Vasileios Karakasis Date: Wed, 2 Oct 2024 16:49:14 +0200 Subject: [PATCH] Support importing results from other DB files --- reframe/frontend/cli.py | 22 +++-- reframe/frontend/reporting/__init__.py | 112 +++++++++++++++++++------ reframe/frontend/reporting/storage.py | 28 +++++-- 3 files changed, 123 insertions(+), 39 deletions(-) diff --git a/reframe/frontend/cli.py b/reframe/frontend/cli.py index aac59d398..239f122fa 100644 --- a/reframe/frontend/cli.py +++ b/reframe/frontend/cli.py @@ -1051,11 +1051,23 @@ def restrict_logging(): if spec['import']['from'] == 'perflog': kwargs = spec['import'] del kwargs['from'] - report = reporting.RunReport.create_from_perflog(*options.args, - **kwargs) - # report.save('foo.json', link_to_last=False) - uuid = report.store() - printer.info(f'Results imported successfully as session {uuid}') + reports = reporting.RunReport.create_from_perflog( + *options.args, **kwargs + ) + elif spec['import']['from'] == 'sqlite': + kwargs = spec['import'] + del kwargs['from'] + reports = reporting.RunReport.create_from_sqlite_db( + *options.args, **kwargs + ) + + for rpt in reports: + uuid = rpt.store() + printer.info(f'Successfully imported session {uuid}') + + if not reports: + printer.info('No sessions have been imported') + sys.exit(0) # Show configuration after everything is set up diff --git a/reframe/frontend/reporting/__init__.py b/reframe/frontend/reporting/__init__.py index d9ca83b4f..474086c96 100644 --- a/reframe/frontend/reporting/__init__.py +++ b/reframe/frontend/reporting/__init__.py @@ -4,6 +4,7 @@ # SPDX-License-Identifier: BSD-3-Clause import decimal +import collections import functools import inspect import json @@ -225,8 +226,8 @@ class RunReport: ''' def __init__(self): # Initialize the report with the required fields - self.__filename = None - self.__report = { + self._filename = None + self._report = { 'session_info': { 'data_version': DATA_VERSION, 'hostname': socket.gethostname(), @@ -240,16 +241,16 @@ def __init__(self): @property def filename(self): - return self.__filename + return self._filename def __getattr__(self, name): - return getattr(self.__report, name) + return getattr(self._report, name) def __getitem__(self, key): - return self.__report[key] + return self._report[key] def __rfm_json_encode__(self): - return self.__report + return self._report @classmethod def create_from_perflog(cls, *logfiles, format=None, @@ -372,23 +373,60 @@ def _convert(x): 'run_index': run_index, 'testcases': testcases }) - return report + return [report] + + @classmethod + def create_from_sqlite_db(cls, *dbfiles, exclude_sessions=None, + include_sessions=None, time_period=None): + dst_backend = StorageBackend.default() + dst_schema = dst_backend.schema_version() + if not time_period: + time_period = {'start': '19700101T0000+0000', 'end': 'now'} + + start = time_period.get('start', '19700101T0000+0000') + end = time_period.get('end', 'now') + ts_start, ts_end = parse_time_period(f'{start}:{end}') + include_sessions = set(include_sessions) if include_sessions else set() + exclude_sessions = set(exclude_sessions) if exclude_sessions else set() + reports = [] + for filename in dbfiles: + src_backend = StorageBackend.create('sqlite', filename) + src_schema = src_backend.schema_version() + if src_schema != dst_schema: + getlogger().warning( + f'ignoring DB file {filename}: schema version mismatch: ' + f'cannot import from DB v{src_schema} to v{dst_schema}' + ) + continue + + sessions = src_backend.fetch_sessions_time_period(ts_start, ts_end) + for sess in sessions: + uuid = sess['session_info']['uuid'] + if include_sessions and uuid not in include_sessions: + continue + + if exclude_sessions and uuid in exclude_sessions: + continue + + reports.append(_ImportedRunReport(sess)) + + return reports def _add_run(self, run): - self.__report['runs'].append(run) + self._report['runs'].append(run) def update_session_info(self, session_info): # Remove timestamps for key, val in session_info.items(): if not key.startswith('time_'): - self.__report['session_info'][key] = val + self._report['session_info'][key] = val def update_restored_cases(self, restored_cases, restored_session): - self.__report['restored_cases'] = [restored_session.case(c) - for c in restored_cases] + self._report['restored_cases'] = [restored_session.case(c) + for c in restored_cases] def update_timestamps(self, ts_start, ts_end): - self.__report['session_info'].update({ + self._report['session_info'].update({ 'time_start': time.strftime(_DATETIME_FMT, time.localtime(ts_start)), 'time_start_unix': ts_start, @@ -403,10 +441,10 @@ def update_extras(self, extras): # We prepend a special character to the user extras in order to avoid # possible conflicts with existing keys for k, v in extras.items(): - self.__report['session_info'][f'${k}'] = v + self._report['session_info'][f'${k}'] = v def update_run_stats(self, stats): - session_uuid = self.__report['session_info']['uuid'] + session_uuid = self._report['session_info']['uuid'] for runidx, tasks in stats.runs(): testcases = [] num_failures = 0 @@ -501,7 +539,7 @@ def update_run_stats(self, stats): testcases.append(entry) - self.__report['runs'].append({ + self._report['runs'].append({ 'num_cases': len(tasks), 'num_failures': num_failures, 'num_aborted': num_aborted, @@ -511,23 +549,23 @@ def update_run_stats(self, stats): }) # Update session info from stats - self.__report['session_info'].update({ - 'num_cases': self.__report['runs'][0]['num_cases'], - 'num_failures': self.__report['runs'][-1]['num_failures'], - 'num_aborted': self.__report['runs'][-1]['num_aborted'], - 'num_skipped': self.__report['runs'][-1]['num_skipped'] + self._report['session_info'].update({ + 'num_cases': self._report['runs'][0]['num_cases'], + 'num_failures': self._report['runs'][-1]['num_failures'], + 'num_aborted': self._report['runs'][-1]['num_aborted'], + 'num_skipped': self._report['runs'][-1]['num_skipped'] }) def _save(self, filename, compress, link_to_last): filename = _expand_report_filename(filename, newfile=True) with open(filename, 'w') as fp: if compress: - jsonext.dump(self.__report, fp) + jsonext.dump(self._report, fp) else: - jsonext.dump(self.__report, fp, indent=2) + jsonext.dump(self._report, fp, indent=2) fp.write('\n') - self.__filename = filename + self._filename = filename if not link_to_last: return @@ -547,7 +585,7 @@ def _save(self, filename, compress, link_to_last): def is_empty(self): '''Return :obj:`True` is no test cases where run''' - return self.__report['session_info']['num_cases'] == 0 + return self._report['session_info']['num_cases'] == 0 def save(self, filename, compress=False, link_to_last=True): prefix = os.path.dirname(filename) or '.' @@ -562,7 +600,7 @@ def store(self): def generate_xml_report(self): '''Generate a JUnit report from a standard ReFrame JSON report.''' - report = self.__report + report = self._report xml_testsuites = etree.Element('testsuites') # Create a XSD-friendly timestamp session_ts = time.strftime( @@ -623,6 +661,30 @@ def save_junit(self, filename): ) +class _ImportedRunReport(RunReport): + def __init__(self, report): + self._filename = f'{report["session_info"]["uuid"]}.json' + self._report = report + + def _add_run(self, run): + raise NotImplementedError + + def update_session_info(self, session_info): + raise NotImplementedError + + def update_restored_cases(self, restored_cases, restored_session): + raise NotImplementedError + + def update_timestamps(self, ts_start, ts_end): + raise NotImplementedError + + def update_extras(self, extras): + raise NotImplementedError + + def update_run_stats(self, stats): + raise NotImplementedError + + def _group_key(groups, testcase): key = [] for grp in groups: diff --git a/reframe/frontend/reporting/storage.py b/reframe/frontend/reporting/storage.py index 686332e2d..fd887f5df 100644 --- a/reframe/frontend/reporting/storage.py +++ b/reframe/frontend/reporting/storage.py @@ -47,11 +47,11 @@ def fetch_testcases_time_period(self, ts_start, ts_end): class _SqliteStorage(StorageBackend): - SCHEMA_VERSION = '1.0' + _SCHEMA_VERSION = '1.0' - def __init__(self): - self.__db_file = os.path.join( - osext.expandvars(runtime().get_option('storage/0/sqlite_db_file')) + def __init__(self, dbfile=None): + self.__db_file = dbfile or osext.expandvars( + runtime().get_option('storage/0/sqlite_db_file') ) mode = runtime().get_option( 'storage/0/sqlite_db_file_mode' @@ -61,6 +61,16 @@ def __init__(self): else: self.__db_file_mode = mode + def schema_version(self): + with self._db_connect(self._db_file()) as conn: + result = conn.execute( + 'SELECT schema_version FROM metadata LIMIT 1' + ).fetchone() + if not result: + raise ReframeError(f'no DB metadata found in {self.__db_file}') + + return result[0] + def _db_file(self): prefix = os.path.dirname(self.__db_file) if not os.path.exists(self.__db_file): @@ -136,14 +146,14 @@ def _db_schema_check(self): # DB is new, insert the schema version with self._db_connect(self.__db_file) as conn: conn.execute('INSERT INTO metadata VALUES(:schema_version)', - {'schema_version': self.SCHEMA_VERSION}) + {'schema_version': self._SCHEMA_VERSION}) else: found_ver = results[0][0] - if found_ver != self.SCHEMA_VERSION: + if found_ver != self._SCHEMA_VERSION: raise ReframeError( f'results DB in {self.__db_file!r} is ' 'of incompatible version: ' - f'found {found_ver}, required: {self.SCHEMA_VERSION}' + f'found {found_ver}, required: {self._SCHEMA_VERSION}' ) def _db_store_report(self, conn, report, report_file_path): @@ -292,10 +302,10 @@ def fetch_sessions_time_period(self, ts_start=None, ts_end=None): query = 'SELECT json_blob from sessions' if ts_start or ts_end: query += ' WHERE (' - if ts_start: + if ts_start is not None: query += f'session_start_unix >= {ts_start}' - if ts_end: + if ts_end is not None: query += f' AND session_start_unix <= {ts_end}' query += ')'