1 Star 0 Fork 51

Wwenli369/Ark-workload

forked from xliu/Ark-workload 
加入 Gitee
与超过 1200万 开发者一起发现、参与优秀开源项目,私有仓库也完全免费 :)
免费加入
文件
克隆/下载
run_pgo.py 23.37 KB
一键复制 编辑 原始数据 按行查看 历史
lixingfu 提交于 2024-03-21 17:17 . update
123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583
#!/usr/bin/env python3
import numpy as np
import os
from openpyxl import load_workbook,Workbook
import sys
import subprocess
from datetime import datetime
import argparse
import glob
import pandas as pd
import csv
current_script_path = os.path.abspath(__file__)
current_dir = os.path.dirname(current_script_path)
ohos_sdk_path = "~/workspace/daily/dev/code"
sdk_type="dev"
cases_path=[]
code_ts_lib=["BenchmarkMeasure"]
target_path=[]
aarch64=""
run_count=1
current_time = datetime.now()
date_strftime = current_time.now().strftime('%Y%m%d%H%M%S')
sdk_path=""
es2abc=""
ark_js_vm=""
ark_aot_compiler=""
build=False
excel=False
case_name=""
run=False
user_lib=""
user_exe=""
def create_folder_if_not_exists(folder_path):
if not os.path.exists(folder_path):
os.makedirs(folder_path)
def load_file(file_path):
global cases_path
global sdk_path
global sdk_type
global es2abc
global ark_js_vm
global ark_aot_compiler
try:
with open(file_path, 'r') as file:
for line in file:
if line.startswith('--case-path'):
cases_path.append(line[len('--case-path'):].strip())
elif line.startswith('--ts-tools-path'):
sdk_path = f"{line[len('--ts-tools-path'):].strip()}"
elif line.startswith('--tools-type'):
sdk_type = f"{line[len('--tools-type'):].strip()}"
elif line.startswith('--es2abc'):
es2abc = f"{line[len('--es2abc'):].strip()}"
elif line.startswith('--ark_js_vm'):
ark_js_vm = f"{line[len('--ark_js_vm'):].strip()}"
elif line.startswith('--ark_aot_compiler'):
ark_aot_compiler = f"{line[len('--ark_aot_compiler'):].strip()}"
except FileNotFoundError:
print(f"File not found: {file_path}")
except Exception as e:
print(f"An error occurred: {e}")
def is_numeric_string(s):
"""
Check if a string represents a number
"""
try:
float(s)
return True
except ValueError:
return False
def is_numeric_array(arr):
"""
Check if it is an array of numbers
"""
return all(is_numeric_string(x) for x in arr)
def geometric_mean(arr):
"""
Calculate the geometric mean
"""
numeric_arr = [float(x) for x in arr if is_numeric_string(x)]
result = np.prod(numeric_arr) ** (1 / len(numeric_arr))
return result
def change_file_permissions(file_path, permissions):
try:
subprocess.run(['chmod', permissions, file_path], check=True)
print(f"Successfully changed permissions of {file_path}")
except subprocess.CalledProcessError as e:
print(f"Error: {e}")
def writing_log(show_date=False,file_name=None,log="",init=False):
log_date=""
if init:
with open(file_name, "w") as file:
file.writelines(f"")
return
if show_date:
now_time = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
log_date=f"{now_time}\t{log}"
else:
log_date=f"{log}"
log_date=f"{log_date}\n"
with open(file_name, "a") as file:
file.writelines(f"{log_date}")
pass
def writing_csv(file_name=None,data="",init=False):
if init:
with open(file_name, 'w', newline='') as csv_file:
csv_writer = csv.writer(csv_file)
csv_writer.writerows(data)
return
with open(file_name, 'a', newline='') as csv_file:
csv_writer = csv.writer(csv_file)
csv_writer.writerows(data)
def run_cmd(command,cwd=None,command_env=None):
log=f"[{datetime.now().strftime('%Y-%m-%d %H:%M:%S')}] : {' && '.join(command)}"
print(f"{log}")
writing_log(file_name=log_file,show_date=True,log=' && '.join(command))
if cwd:
process = subprocess.Popen(command, cwd=cwd, shell=True,stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True,env={**command_env, **os.environ})
else:
process = subprocess.Popen(command, shell=True,stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True,env={**command_env, **os.environ})
stdout, stderr = process.communicate() # 等待命令执行完毕并获取输出结果
if stdout:
print(f"stdout : {stdout}")
writing_log(file_name=log_file,show_date=False,log=stdout)
return stdout
if stderr:
print(f"stderr : {stderr}")
writing_log(file_name=log_file,show_date=False,log=stderr)
return "error"
pass
def count_type(value):
try:
return int(value)
except ValueError:
raise argparse.ArgumentTypeError(f'--count 的值必须是一个整数,而不是 {value}')
def load_external_parameters():
global build
global date_strftime
global pgo2path
global aarch64
global excel
global run_count
global case_name
global cases_path
global run
global sdk_path
global sdk_type
global ark_js_vm
global ark_aot_compiler
global es2abc
parser = argparse.ArgumentParser(description='Process some command line arguments')
parser.add_argument('--build', help='是否编译', action='store_true', default=build)
parser.add_argument('--run', help='是否运行', action='store_true', default=run)
parser.add_argument('--date', help='设置自定义日期', default=date_strftime)
parser.add_argument('--aarch64', help='是否生成aarch64产物', const='--compiler-target-triple=aarch64-unknown-linux-gnu', action='store_const', default='')
parser.add_argument('--excel', help='是否生成excel表格', action='store_true', default=excel)
parser.add_argument('--run-count', help='设置执行用例次数',type=count_type, default=run_count)
parser.add_argument('--case', help='设置指定用例',default=case_name)
parser.add_argument('--code-v', help='设置需要编译或运行的用例路径',nargs='+',default=cases_path)
parser.add_argument('--sdk', help='openharmony源码目录',default=sdk_path)
parser.add_argument('--dev', help='openharmony源码类型',default=sdk_type)
parser.add_argument('--ark-js-vm', help='指定ark_js_vm工具路径',default=ark_js_vm)
parser.add_argument('--ark-aot-compiler', help='指定ark_aot_compiler工具路径',default=ark_aot_compiler)
parser.add_argument('--es2abc', help='指定es2abc工具路径',default=es2abc)
parser.add_argument('--high-priority-workload', help='编译weekly_workload',action='store_true', default=False)
parser.add_argument('--low-priority-workload', help='编译later_workload',action='store_true', default=False)
parser.add_argument('--all', help='编译并运行weekly_workload以及later_workload',action='store_true', default=False)
args = parser.parse_args()
if not any(vars(args).values()):
print('请输入正确的命令行参数,使用 --help 查看帮助。')
sys.exit()
print(args)
build=args.build
date_strftime=args.date
pgo2path=f"{current_dir}/pgo_build/{date_strftime}"
aarch64=args.aarch64
excel=args.excel
run_count=args.run_count
case_name=args.case
cases_path=args.code_v
run=args.run
sdk_path=args.sdk
sdk_type=args.dev
if args.high_priority_workload:
cases_path=["weekly_workload"]
elif args.low_priority_workload:
cases_path=["later_workload"]
if args.all:
cases_path=["weekly_workload","later_workload"]
build=True
run=True
excel=True
get_target_path()
def get_target_path():
global target_path
if sdk_type == "dev":
target_path="x64.release"
elif sdk_type == "rk3568":
target_path="rk3568/clang_x64"
elif sdk_type == "hispark_taurus":
target_path="hispark_taurus/clang_x64"
def find_filepath(filename,start_path="."):
current_dir = os.path.abspath(start_path)
file_paths = glob.glob(os.path.join(current_dir, f'**/{filename}'), recursive=True)
return file_paths
def list_files(directory="."):
entries = os.listdir(directory)
files = [entry for entry in entries if os.path.isfile(os.path.join(directory, entry))]
return files
def list_files_with_extension(directory=".", extension=None):
if extension==None:
print("no extension, retrun")
return
entries = os.listdir(directory)
matching_files = [entry for entry in entries if entry.endswith(extension) and os.path.isfile(os.path.join(directory, entry))]
return matching_files
def load_fileInfo(cases_ts_path):
fileInfofiles = []
for case_ts_path in cases_ts_path:
file_info_paths = find_filepath("fileInfo.txt",case_ts_path)
if file_info_paths:
for path in file_info_paths:
if "/ts/" in path:
fileInfofiles.append(path)
return fileInfofiles
def ts2abc(case_name=None,user_lib=os.environ.copy()):
global es2abc
global ark_aot_compiler
global ark_js_vm
global pgo2path
if not build and os.path.exists(f"{pgo2path}/{case_name}.abc"):
return
if case_name:
case_name_ts = None
for dir_path in cases_path:
tmp=find_filepath(f"{case_name}.ts",dir_path)
if tmp:
case_name_ts=tmp[0]
case_name_dir=os.path.dirname(case_name_ts)
check_file_path = os.path.join(case_name_dir, "fileInfo.txt")
if os.path.exists(check_file_path):
fileInfofile_case_path=f"@./{os.path.basename(check_file_path)}"
file_name=f"{os.path.basename(os.path.dirname(check_file_path))}"
cmd_list = [es2abc]
cmd_list.append(fileInfofile_case_path)
cmd_list.append("--output")
cmd_list.append(f"{pgo2path}/{file_name}.abc")
cmd_list.append("--type-extractor")
cmd_list.append("--merge-abc")
cmd_list = [' '.join(cmd_list)]
run_cmd(cmd_list,f"{os.path.dirname(check_file_path)}",user_lib)
pass
else:
file_name=f"{os.path.basename(case_name_ts)}"
cmd_list = [es2abc]
cmd_list.append(os.path.basename(file_name))
cmd_list.append("--output")
cmd_list.append(f"{pgo2path}/{case_name}.abc")
cmd_list.append("--type-extractor")
cmd_list.append("--module")
cmd_list.append("--extension=ts")
cmd_list.append("--merge-abc")
cmd_list = [' '.join(cmd_list)]
run_cmd(cmd_list,f"{os.path.dirname(case_name_ts)}",user_lib)
return
pass
def collect_ap(case_abc,aot=False,user_lib=os.environ.copy()):
icu_data_path=os.path.expanduser(f"{user_exe}/thirdparty/icu")
cmd_list = [ark_js_vm]
case_name = case_abc.replace(".abc","")
cmd_list.append(f"--enable-pgo-profiler=true")
cmd_list.append(f"--compiler-pgo-profiler-path=./{case_name}.ap")
cmd_list.append(f"--icu-data-path={icu_data_path}")
if aot:
cmd_list.append(f"--aot-file=./{case_name}")
cmd_list.append(f"--entry-point={case_name}")
cmd_list.append(case_abc)
cmd_list = [' '.join(cmd_list)]
run_cmd(cmd_list,pgo2path, user_lib)
def build_builtins_d_abc(user_lib=os.environ.copy()):
lib_ark_builtins_ts=os.path.expanduser(f"{sdk_path}/arkcompiler/ets_runtime/ecmascript/ts_types/lib_ark_builtins.d.ts")
lib_ark_builtins_abc=os.path.expanduser(f"{sdk_path}/arkcompiler/ets_runtime/ecmascript/ts_types/lib_ark_builtins.d.abc")
cmd_list = [es2abc]
cmd_list.append(lib_ark_builtins_ts)
cmd_list.append("--type-extractor")
cmd_list.append("--module")
cmd_list.append("--extension=ts")
cmd_list.append("--merge-abc")
cmd_list.append("--output")
cmd_list.append(lib_ark_builtins_abc)
cmd_list = [' '.join(cmd_list)]
run_cmd(cmd_list,command_env=user_lib)
def build_arkcompiler(case_abc,args=[],user_lib=os.environ.copy()):
lib_ark_builtins_abc=os.path.expanduser(f"{sdk_path}/arkcompiler/ets_runtime/ecmascript/ts_types/lib_ark_builtins.d.abc")
cmd_list = [ark_aot_compiler]
case_name = case_abc.replace(".abc","")
cmd_list.append(f"--compiler-external-pkg-info=[]")
cmd_list.append(f"--builtins-dts={lib_ark_builtins_abc}")
cmd_list.append(f"--compiler-pgo-profiler-path=./{case_name}.ap")
cmd_list.append(f"--aot-file=./{case_name}")
cmd_list += args
cmd_list.append(case_abc)
cmd_list = [' '.join(cmd_list)]
run_cmd(cmd_list,pgo2path,user_lib)
def build_pgo(user_lib=os.environ.copy()):
cases_abc=list_files_with_extension(pgo2path,".abc")
for case_abc in cases_abc:
case_name=case_abc.replace(".abc","")
file_name=f"{pgo2path}/{case_name}"
if not build and os.path.exists(f"{file_name}.ap") and os.path.exists(f"{file_name}.an") and os.path.exists(f"{file_name}.ai"):
return
collect_ap(case_abc,user_lib=user_lib)
build_arkcompiler(case_abc,user_lib=user_lib)
collect_ap(case_abc,aot=True,user_lib=user_lib)
build_arkcompiler(case_abc,args=[aarch64],user_lib=user_lib)
def build_cases(case_name=None,fileInfofiles=[],user_lib=os.environ.copy()):
create_folder_if_not_exists(pgo2path)
if case_name:
ts2abc(case_name=case_name,user_lib=user_lib)
else:
if fileInfofiles != []:
for fileInfofile_case in fileInfofiles:
file_name=f"{os.path.basename(os.path.dirname(fileInfofile_case))}"
ts2abc(case_name=file_name,user_lib=user_lib)
for case_ts_path in cases_path:
cases_ts=list_files_with_extension(f"{case_ts_path}",".ts")
if cases_ts == []:
cases_ts=list_files_with_extension(f"{case_ts_path}/ts",".ts")
for case_ts in cases_ts:
case_name = case_ts.replace(".ts","")
ts2abc(case_name=case_name,user_lib=user_lib)
else:
for case_ts in cases_ts:
case_name = case_ts.replace(".ts","")
ts2abc(case_name=case_name,user_lib=user_lib)
build_pgo(user_lib=user_lib)
def run_single_case(case_name=None,user_lib=os.environ.copy()):
global es2abc
global ark_aot_compiler
global ark_js_vm
global pgo2path
if case_name:
icu_data_path=os.path.expanduser(f"{user_exe}/thirdparty/icu")
cmd_list = [ark_js_vm]
case_name = case_name
cmd_list.append(f"--icu-data-path={icu_data_path}")
cmd_list.append(f"--aot-file=./{case_name}")
cmd_list.append(f"--entry-point={case_name}")
cmd_list.append(f"{case_name}.abc")
cmd_list = [' '.join(cmd_list)]
return run_cmd(cmd_list,pgo2path, user_lib)
def run_all_case(user_lib=user_lib):
print(f"\n\n---------- run start , count: {run_count} ----------\n\n")
excel_datas={}
data_list=[]
case_file_list={
"file_total":0,
"error":{
"file_total":0,
"case_files":{}
},
"successful":{
"file_total":0,
"case_files":{}
},
"case_files":{
}
}
global case_name
if not run:
return
excel_datas["case_name"]=[]
for count in range(0,run_count):
excel_datas["case_name"].append(f"data{count+1}")
pass
excel_datas["case_name"].append(f"average")
if case_name:
for count in range(0,run_count):
cases_data = run_single_case(case_name=case_name,user_lib=user_lib)
if case_name in case_file_list["case_files"]:
case_file_list["case_files"][case_name].append(cases_data)
else:
case_file_list["case_files"][case_name]=[cases_data]
if (cases_data is None or cases_data=="" or "error" in cases_data.lower()) or ("segmentation fault" in cases_data.lower()):
data_list += f"{case_name}:\t{cases_data}"
if "error" in case_file_list and "case_files" in case_file_list["error"] and case_name in case_file_list["error"]["case_files"]:
case_file_list["error"]["case_files"][case_name].append(cases_data)
else:
case_file_list["file_total"] += 1
case_file_list["error"]["file_total"] += 1
case_file_list["error"]["case_files"][case_name] = [cases_data]
else:
if "successful" in case_file_list and "case_files" in case_file_list["successful"] and case_name in case_file_list["successful"]["case_files"]:
case_file_list["successful"]["case_files"][case_name].append(cases_data)
else:
case_file_list["file_total"] += 1
case_file_list["successful"]["file_total"] += 1
case_file_list["successful"]["case_files"][case_name]=[cases_data]
data_list += cases_data.splitlines()
else:
cases_abc=list_files_with_extension(f"{pgo2path}",".abc")
for abc in cases_abc:
case_name = abc.replace(".abc","")
for count in range(0,run_count):
cases_data = run_single_case(case_name=case_name,user_lib=user_lib)
if case_name in case_file_list["case_files"]:
case_file_list["case_files"][case_name].append(cases_data)
else:
case_file_list["case_files"][case_name]=[cases_data]
if ("error" in cases_data.lower()) or ("segmentation fault" in cases_data.lower()):
data_list += f"{case_name}:\t{cases_data}"
if "error" in case_file_list and "case_files" in case_file_list["error"] and case_name in case_file_list["error"]["case_files"]:
case_file_list["error"]["case_files"][case_name].append(cases_data)
else:
case_file_list["file_total"] += 1
case_file_list["error"]["file_total"] += 1
case_file_list["error"]["case_files"][case_name] = [cases_data]
else:
if "successful" in case_file_list and "case_files" in case_file_list["successful"] and case_name in case_file_list["successful"]["case_files"]:
case_file_list["successful"]["case_files"][case_name].append(cases_data)
else:
case_file_list["file_total"] += 1
case_file_list["successful"]["file_total"] += 1
case_file_list["successful"]["case_files"][case_name]=[cases_data]
cases_data = cases_data.splitlines()
data_list += cases_data
for value in data_list:
tmp=value
if ("error" in value.lower()):
value = value.split(":\t")
value=[f"{value[0]}","error"]
elif ("segmentation fault" in value.lower()):
value = value.split(":\t")
value=value.replace(":","")
value=[f"{value[0]}","segmentation fault"]
elif " ms = " in value:
value=value.replace(":","")
value = value.split(" ms = ")
elif ": \t" in value and "ms" in value:
value=value.replace(" : ","")
value=value.replace(": ","")
value = value.split("\t")
elif ": " in value and "ms" in value:
value=value.replace(": ","\t")
value=value.replace(": ","")
value = value.split("\t")
elif " : " in value and "ms" in value:
value=value.replace(" : ","\t")
value = value.split("\t")
elif ":" in value and "ms" in value:
value=value.replace(":","")
value = value.split("\t")
elif " - " in value or "Splay:" in value:
value=value.replace(":","")
value = value.split("\t")
elif "usec = " in value:
value = value.split(": usec = ")
else:
continue
writing_log(file_name=pgo_data_log,show_date=False,log=tmp)
value=[value[0],value[1]]
if value[0] in excel_datas:
excel_datas[value[0]].append(value[1])
else:
excel_datas[value[0]]=[value[1]]
case_data_list=[]
for excel_data in excel_datas:
case_data=[excel_data]
case_data+=excel_datas[excel_data]
if len(excel_datas[excel_data]) <= run_count:
if is_numeric_array(excel_datas[excel_data]):
case_data.append(geometric_mean(excel_datas[excel_data]))
else:
case_data.append("error")
case_data_list.append(case_data)
if excel:
writing_csv(file_name=pgo_data_csv,data=case_data_list,init=True)
file_log=f"pgo_run_case_{date_strftime}.log"
writing_log(file_name=file_log,show_date=False,log=case_file_list)
print(f"\n\ncase_file_total:{case_file_list['file_total']}")
print(f"case_error_total:{case_file_list['error']['file_total']}")
print(f"case_successful_total:{case_file_list['successful']['file_total']}")
print(f"详情请看{file_log}")
def csv2excel():
if not excel:
return
pgo_data_csv = f'pgo_csv_{date_strftime}.csv'
pgo_data_excel = f'pgo_data_{date_strftime}.xlsx'
df=pd.read_csv(pgo_data_csv)
first_column = df.iloc[:, 0] # 第一列
last_column = df.iloc[:, -1] # 最后一列
with pd.ExcelWriter(pgo_data_excel) as writer:
# 将原始数据写入默认的工作表(sheet)
df.to_excel(writer, index=False, sheet_name='sheet')
new_sheet = pd.DataFrame({df.columns[0]: first_column, df.columns[-1]: last_column})
new_sheet.to_excel(writer, index=False, sheet_name='Sheet2')
pass
def main():
global user_lib
global user_exe
global es2abc
global ark_aot_compiler
global ark_js_vm
user_exe=f"{sdk_path}/out/{target_path}"
user_lib = {"LD_LIBRARY_PATH":""}
es2abc = f"{sdk_path}/out/{target_path}/arkcompiler/ets_frontend/es2abc"
ark_js_vm = f"{sdk_path}/out/{target_path}/arkcompiler/ets_runtime/ark_js_vm"
ark_aot_compiler = f"{sdk_path}/out/{target_path}/arkcompiler/ets_runtime/ark_aot_compiler"
es2abc = os.path.expanduser(es2abc)
ark_js_vm = os.path.expanduser(ark_js_vm)
ark_aot_compiler = os.path.expanduser(ark_aot_compiler)
run_cmd(command=[f"chmod a+x {es2abc}"],command_env=user_lib)
run_cmd(command=[f"chmod a+x {ark_js_vm}"],command_env=user_lib)
run_cmd(command=[f"chmod a+x {ark_aot_compiler}"],command_env=user_lib)
user_lib["LD_LIBRARY_PATH"]=os.path.expanduser(f"{sdk_path}/out/{target_path}/arkcompiler/ets_runtime")
user_lib["LD_LIBRARY_PATH"]=os.path.expanduser(f"{sdk_path}/out/{target_path}/thirdparty/icu:"+user_lib["LD_LIBRARY_PATH"])
user_lib["LD_LIBRARY_PATH"]=os.path.expanduser(f"{sdk_path}/out/{target_path}/thirdparty/cJSON:"+user_lib["LD_LIBRARY_PATH"])
user_lib["LD_LIBRARY_PATH"]=os.path.expanduser(f"{sdk_path}/prebuilts/clang/ohos/linux-x86_64/llvm/lib:"+user_lib["LD_LIBRARY_PATH"])
build_builtins_d_abc(user_lib)
fileInfo_list = load_fileInfo(cases_path)
build_cases(case_name=case_name,fileInfofiles=fileInfo_list,user_lib=user_lib)
run_all_case(user_lib)
csv2excel()
pass
load_file('toolspath.txt') # 加载toolspath配置文件
load_external_parameters() # 加载外部参数
log_file = f'pgo_build_{date_strftime}.log'
pgo_data_log = f'pgo_data_{date_strftime}.log'
pgo_data_csv = f'pgo_csv_{date_strftime}.csv'
pgo_data_excel = f'pgo_csv_{date_strftime}.xlsx'
writing_log(init=True,file_name=log_file)
writing_log(init=True,file_name=pgo_data_log)
# logging.basicConfig(filename=log_file, level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
if __name__ == "__main__":
main()
Loading...
马建仓 AI 助手
尝试更多
代码解读
代码找茬
代码优化
TypeScript
1
https://gitee.com/wwenli369/ark-workload.git
[email protected]:wwenli369/ark-workload.git
wwenli369
ark-workload
Ark-workload
master

搜索帮助