Skip to content

Commit

Permalink
update export_db script to export pdb in db.txt format (#986)
Browse files Browse the repository at this point in the history
* update export_db script to export pdb in db.txt format

* update dockerfile

* dockerfile

* yapf

* pylint

* yapf

* add ConfigType switch for export function

* yapf

* spellcheck, mark deprecated
  • Loading branch information
cderb authored Jan 20, 2025
1 parent 8beff11 commit 7e9a5f6
Showing 1 changed file with 142 additions and 18 deletions.
160 changes: 142 additions & 18 deletions tuna/miopen/subcmd/export_db.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,8 @@
"""Module to export find_db to txt file"""
import sqlite3
import os
import tempfile
import json
from collections import OrderedDict
from typing import Dict, Any, Optional, Union, List, Tuple
import base64
Expand All @@ -43,7 +45,10 @@
from tuna.utils.logger import setup_logger
from tuna.miopen.utils.analyze_parse_db import get_config_sqlite, insert_solver_sqlite
from tuna.miopen.utils.analyze_parse_db import get_sqlite_cfg_dict
from tuna.miopen.utils.config_type import ConfigType
from tuna.miopen.utils.metadata import INVERS_DIR_MAP
from tuna.miopen.parse_miopen_args import get_export_db_parser
from tuna.miopen.worker.fin_utils import compose_config_obj

DIR_NAME: dict = {'F': 'Fwd', 'B': 'BwdData', 'W': 'BwdWeights'}

Expand Down Expand Up @@ -89,13 +94,67 @@ def get_filename(arch: str,
elif db_type == DB_Type.KERN_DB:
extension = '.kdb'
else:
extension = ".db"
extension = ".db.txt"

final_name = f"{final_name}{extension}"

return final_name


def fin_net_cfg_job(cfg_lst,
logger,
config_type: ConfigType = ConfigType.convolution):
"""Construct a fin network_config job from a config
"""
#arch and num_cu are required by fin, but unused for this command
job_list = []
if config_type == ConfigType.convolution:
for config in cfg_lst:
job = {
"steps": ["network_config"],
"arch": 'gfx908',
"num_cu": 120,
"config_tuna_id": config.id,
"direction": int(INVERS_DIR_MAP[config.direction]),
"config": compose_config_obj(config)
}
job_list.append(job)
else:
logger.error(f"Config type not implemented: {config_type}")

return job_list


def fin_db_key(config_lst, logger):
"""rerieve network_config from fin for config """
_, fin_ifile = tempfile.mkstemp(suffix='.json')
_, fin_ofile = tempfile.mkstemp(suffix='.json')

with open(fin_ifile, 'w') as in_file: # pylint: disable=unspecified-encoding
in_file.write(json.dumps(fin_net_cfg_job(config_lst, logger), indent=2))

fin_cmd = f"/opt/rocm/bin/fin -i {fin_ifile} -o {fin_ofile}"
logger.info('Executing fin cmd: %s', fin_cmd)

os.system(fin_cmd)

result = None
with open(fin_ofile, 'r') as out_file: # pylint: disable=unspecified-encoding
try:
result = json.load(out_file)
except Exception as err: # pylint: disable=broad-except
logger.error('Unable to load fin json file %s', err)
for line in out_file:
logger.error(line)

db_key_dict = {}
for elem in result:
if "db_key" in elem.keys():
db_key_dict[elem['config_tuna_id']] = elem['db_key']

return db_key_dict


def get_base_query(dbt: MIOpenDBTables, args: argparse.Namespace,
logger: logging.Logger):
""" general query for fdb/pdb results """
Expand Down Expand Up @@ -356,6 +415,7 @@ def export_kdb(dbt: MIOpenDBTables,
return write_kdb(args.arch, args.num_cu, kern_db, logger, args.filename)


#deprecated
def create_sqlite_tables(arch, num_cu, filename=None):
"""create sqlite3 tables"""
local_path = get_filename(arch, num_cu, filename, False, DB_Type.PERF_DB)
Expand Down Expand Up @@ -391,6 +451,7 @@ def create_sqlite_tables(arch, num_cu, filename=None):
return cnx, local_path


#deprecated
def insert_perf_db_sqlite(cnx, perf_db_entry, ins_cfg_id):
"""insert perf_db entry into sqlite"""
perf_db_dict = perf_db_entry.to_dict()
Expand All @@ -406,6 +467,29 @@ def insert_perf_db_sqlite(cnx, perf_db_entry, ins_cfg_id):
return perf_db_dict


#deprecated
def populate_sqlite(cfg_map, num_perf, cnx, perf_db_entry, cfg_entry,
total_entries, logger: logging.Logger):
"""Analyze perf_db entry"""
if cfg_entry.id in cfg_map:
ins_cfg_id = cfg_map[cfg_entry.id]
else:
cfg_dict = get_sqlite_cfg_dict(perf_db_entry.fdb_key)

#filters cfg_dict by SQLITE_CONFIG_COLS, inserts cfg if missing
ins_cfg_id = get_config_sqlite(cnx, cfg_dict)
cfg_map[cfg_entry.id] = ins_cfg_id

pdb_dict = insert_perf_db_sqlite(cnx, perf_db_entry, ins_cfg_id)
num_perf += 1

if num_perf % (total_entries // 10) == 0:
cnx.commit()
logger.info("PDB count: %s, mysql cfg: %s, pdb: %s", num_perf, cfg_entry.id,
pdb_dict)


#deprecated
def export_pdb(dbt: MIOpenDBTables, args: argparse.Namespace,
logger: logging.Logger):
""" export perf db from mysql to sqlite """
Expand All @@ -428,25 +512,65 @@ def export_pdb(dbt: MIOpenDBTables, args: argparse.Namespace,
return local_path


def populate_sqlite(cfg_map, num_perf, cnx, perf_db_entry, cfg_entry,
total_entries, logger: logging.Logger):
"""Analyze perf_dv entry"""
if cfg_entry.id in cfg_map:
ins_cfg_id = cfg_map[cfg_entry.id]
else:
cfg_dict = get_sqlite_cfg_dict(perf_db_entry.fdb_key)
def build_miopen_pdb(query, logger: logging.Logger) -> OrderedDict:
"""return dict with key: fdb_key, val: list of fdb entries"""
perf_db: OrderedDict = OrderedDict()
solvers: Dict[str, Dict[str, Any]] = {}
db_key_map: Dict[str, str] = {}
db_entries = query.all()
total_entries = len(db_entries)
logger.info("pdb query returned: %s", total_entries)

#filters cfg_dict by SQLITE_CONFIG_COLS, inserts cfg if missing
ins_cfg_id = get_config_sqlite(cnx, cfg_dict)
cfg_map[cfg_entry.id] = ins_cfg_id
cfg_lst = []
for _, config in db_entries:
if config not in cfg_lst:
cfg_lst.append(config)

pdb_dict = insert_perf_db_sqlite(cnx, perf_db_entry, ins_cfg_id)
num_perf += 1
db_key_map = fin_db_key(cfg_lst, logger)

if num_perf % (total_entries // 10) == 0:
cnx.commit()
logger.info("PDB count: %s, mysql cfg: %s, pdb: %s", num_perf, cfg_entry.id,
pdb_dict)
for pdb_entry, config in db_entries:
if add_entry_to_solvers(pdb_entry, solvers, logger):
db_key = db_key_map[config.id]
lst = perf_db.get(db_key)
if not lst:
perf_db[db_key] = [pdb_entry]
else:
lst.append(pdb_entry)

return perf_db


def write_pdb(arch, num_cu, ocl, perf_db, filename=None):
"""
Serialize perf_db map to plain text file in MIOpen format
"""
file_name = get_filename(arch, num_cu, filename, ocl, DB_Type.PERF_DB)

require_id_solvers()
with open(file_name, 'w') as out: # pylint: disable=unspecified-encoding
for key, solvers in sorted(perf_db.items(), key=lambda kv: kv[0]):
solvers.sort(
#key=lambda x: (float(x.kernel_time), ID_SOLVER_MAP[x.solver]))
key=lambda x: (ID_SOLVER_MAP[x.solver]))
lst = []
# for alg_lib, solver_id, kernel_time, workspace_sz in solvers:
for rec in solvers:
# pylint: disable-next=consider-using-f-string ; more reable
lst.append('{slv}:{params}'.format(slv=ID_SOLVER_MAP[rec.solver],
params=rec.params))
out.write(f"{key}={';'.join(lst)}\n")
return file_name


def export_pdb_txt(dbt: MIOpenDBTables, args: argparse.Namespace,
logger: logging.Logger):
""" export perf db from mysql to txt file """
query = get_pdb_query(dbt, args, logger)
miopen_pdb = build_miopen_pdb(query, logger)

logger.info("write pdb to file.")
return write_pdb(args.arch, args.num_cu, args.opencl, miopen_pdb,
args.filename)


def run_export_db(args: argparse.Namespace, logger: logging.Logger):
Expand All @@ -470,7 +594,7 @@ def run_export_db(args: argparse.Namespace, logger: logging.Logger):
elif args.kern_db:
result_file = export_kdb(dbt, args, logger)
elif args.perf_db:
result_file = export_pdb(dbt, args, logger)
result_file = export_pdb_txt(dbt, args, logger)

print(result_file)

Expand Down

0 comments on commit 7e9a5f6

Please sign in to comment.