2023-01-26 03:20:03 +08:00
|
|
|
#!/usr/bin/env python3
|
|
|
|
|
|
|
|
# Copyright (c) 2022 Intel Corporation
|
|
|
|
# SPDX-License-Identifier: Apache-2.0
|
|
|
|
|
|
|
|
|
|
|
|
# This script upload test ci results to the zephyr ES instance for reporting and analysis.
|
|
|
|
# see https://kibana.zephyrproject.io/
|
|
|
|
|
|
|
|
from elasticsearch import Elasticsearch
|
|
|
|
from elasticsearch.helpers import bulk
|
|
|
|
import sys
|
|
|
|
import os
|
|
|
|
import json
|
|
|
|
import argparse
|
|
|
|
|
2023-02-11 19:46:42 +08:00
|
|
|
def gendata(f, index, run_date=None):
|
2023-01-26 03:20:03 +08:00
|
|
|
with open(f, "r") as j:
|
|
|
|
data = json.load(j)
|
|
|
|
for t in data['testsuites']:
|
|
|
|
name = t['name']
|
|
|
|
_grouping = name.split("/")[-1]
|
|
|
|
main_group = _grouping.split(".")[0]
|
|
|
|
sub_group = _grouping.split(".")[1]
|
2023-02-11 19:46:42 +08:00
|
|
|
env = data['environment']
|
|
|
|
if run_date:
|
|
|
|
env['run_date'] = run_date
|
|
|
|
t['environment'] = env
|
2023-01-26 03:20:03 +08:00
|
|
|
t['component'] = main_group
|
|
|
|
t['sub_component'] = sub_group
|
|
|
|
yield {
|
|
|
|
"_index": index,
|
|
|
|
"_source": t
|
|
|
|
}
|
|
|
|
|
|
|
|
def main():
|
|
|
|
args = parse_args()
|
|
|
|
|
|
|
|
if args.index:
|
|
|
|
index_name = args.index
|
|
|
|
else:
|
|
|
|
index_name = 'tests-zephyr-1'
|
|
|
|
|
|
|
|
settings = {
|
|
|
|
"index": {
|
|
|
|
"number_of_shards": 4
|
|
|
|
}
|
|
|
|
}
|
|
|
|
mappings = {
|
|
|
|
"properties": {
|
|
|
|
"execution_time": {"type": "float"},
|
|
|
|
"retries": {"type": "integer"},
|
|
|
|
"testcases.execution_time": {"type": "float"},
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if args.dry_run:
|
|
|
|
xx = None
|
|
|
|
for f in args.files:
|
|
|
|
xx = gendata(f, index_name)
|
|
|
|
for x in xx:
|
|
|
|
print(x)
|
|
|
|
sys.exit(0)
|
|
|
|
|
|
|
|
es = Elasticsearch(
|
2023-02-02 20:12:00 +08:00
|
|
|
[os.environ['ELASTICSEARCH_SERVER']],
|
2023-01-26 03:20:03 +08:00
|
|
|
api_key=os.environ['ELASTICSEARCH_KEY'],
|
|
|
|
verify_certs=False
|
|
|
|
)
|
|
|
|
|
|
|
|
if args.create_index:
|
|
|
|
es.indices.create(index=index_name, mappings=mappings, settings=settings)
|
|
|
|
else:
|
2023-02-11 19:46:42 +08:00
|
|
|
if args.run_date:
|
|
|
|
print(f"Setting run date from command line: {args.run_date}")
|
2023-01-26 03:20:03 +08:00
|
|
|
for f in args.files:
|
2023-02-11 19:46:42 +08:00
|
|
|
bulk(es, gendata(f, index_name, args.run_date))
|
2023-01-26 03:20:03 +08:00
|
|
|
|
|
|
|
|
|
|
|
def parse_args():
|
|
|
|
parser = argparse.ArgumentParser(allow_abbrev=False)
|
|
|
|
parser.add_argument('-y','--dry-run', action="store_true", help='Dry run.')
|
|
|
|
parser.add_argument('-c','--create-index', action="store_true", help='Create index.')
|
|
|
|
parser.add_argument('-i', '--index', help='index to push to.', required=True)
|
2023-02-11 19:46:42 +08:00
|
|
|
parser.add_argument('-r', '--run-date', help='Run date in ISO format', required=False)
|
2023-01-26 03:20:03 +08:00
|
|
|
parser.add_argument('files', metavar='FILE', nargs='+', help='file with test data.')
|
|
|
|
|
|
|
|
args = parser.parse_args()
|
|
|
|
|
|
|
|
return args
|
|
|
|
|
|
|
|
|
|
|
|
if __name__ == '__main__':
|
|
|
|
main()
|