Skip to content

Commit

Permalink
add markdown and gist support
Browse files Browse the repository at this point in the history
Signed-off-by: kimpaller <kimchesed.paller@analog.com>
  • Loading branch information
kimpaller committed Apr 22, 2024
1 parent dae40ec commit 3698fef
Show file tree
Hide file tree
Showing 9 changed files with 302 additions and 10 deletions.
4 changes: 2 additions & 2 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ dependencies = [
readme = { file = "README.rst", content-type = "text/x-rst" }

[tool.setuptools]
packages = ["telemetry", "telemetry.gparser", "telemetry.prod"]
packages = ["telemetry", "telemetry.gparser", "telemetry.prod", "telemetry.report"]

[project.scripts]
telemetry = "telemetry.cli:cli"
Expand All @@ -45,4 +45,4 @@ documentation = "https://sdgtt.github.io/telemetry/"
repository = "https://github.com/sdgtt/telemetry"

[tool.setuptools.package-data]
telemetry = ["resources/*.json", "tests/test_artifacts/*"]
telemetry = ["resources/*.json", "tests/test_artifacts/*", "report/templates/*.md"]
4 changes: 2 additions & 2 deletions requirements_dev.txt
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,8 @@ tox==3.14.0
Sphinx==1.8.5
twine==1.14.0
Click==7.0
pytest==4.6.5
pytest-runner==5.1
pytest
pytest-runner
pytest-cov
coveralls
elasticsearch==7.16.0
Expand Down
1 change: 1 addition & 0 deletions telemetry/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
from telemetry.gargantua import Gargantua as gargantua
from telemetry.gparser import parser
from telemetry.gparser import grabber
from telemetry.report import gist, markdown

import telemetry.prod as prod

Expand Down
47 changes: 47 additions & 0 deletions telemetry/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
import telemetry
import os

import telemetry.report

def validate(field, value, schema):
"""Validate a field and value data type
Expand Down Expand Up @@ -191,7 +192,52 @@ def log_boot_logs(server, in_args):
tel = telemetry.ingest(server=server)
tel.log_boot_tests(**entry)

@click.command()
@click.option("--server", default="picard", help="Address of Elasticsearch server")
@click.option("--job_name", required=True, help="Jenkisn job name to fetch")
@click.option("--build_number", required=True, help="Build no to fetch")
@click.option("--board_name", default=None, help="Board to fetch, will select all if empty")
def create_results_gist(server, job_name, build_number, board_name):
tel = telemetry.searches(server=server)
boot_test = tel.boot_tests(
boot_folder_name=board_name,
jenkins_project_name=job_name,
jenkins_build_no=build_number
)

if len(boot_test.keys()) == 0:
raise Exception(f"{job_name} - {build_number} not found")
data = {}
# get artifacts
artifacts_info_txt=tel.artifacts(
target_board=None,
job=job_name,
job_no = build_number,
artifact_info_type = "info_txt",
)
for bn, info in boot_test.items():
artifacts = tel.artifacts(board_name, job_name, build_number)
artifact_types = ["enumerated_devs", "missing_devs", "dmesg_err", "pytest_failure"]
for artifactory_type in artifact_types:
info[0].update({artifactory_type: []})
for artifact in artifacts:
if artifact["artifact_info_type"] == artifactory_type:
info[0][artifactory_type].append(artifact["payload"])

if artifacts_info_txt:
info[0]["info_txt"] = dict()
info[0]["info_txt"].update({"Built projects": list()})
for artifact in artifacts_info_txt:
if artifact["payload"] == "Built projects":
info[0]["info_txt"]["Built projects"].append(artifact["payload_param"])
continue
info[0]["info_txt"].update({artifact["payload"]:artifact["payload_param"]})

data[bn] = info[0]

m = telemetry.markdown.ResultsMarkdown(data)
m.generate_gist()

@click.command()
def main(args=None):
"""Console script for telemetry."""
Expand All @@ -205,6 +251,7 @@ def main(args=None):
cli.add_command(log_hdl_resources_from_csv)
cli.add_command(log_artifacts)
cli.add_command(grab_and_log_artifacts)
cli.add_command(create_results_gist)
cli.add_command(main)

if __name__ == "__main__":
Expand Down
Empty file added telemetry/report/__init__.py
Empty file.
36 changes: 36 additions & 0 deletions telemetry/report/gist.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
import os
from github import Github, Auth, InputFileContent


class Gist:
def __init__(self, url=None, token=None):
try:
if not token:
token=os.environ['GH_TOKEN']
except KeyError as ex:
raise Exception("Github token not defined")

try:
if not url:
url=os.environ['GH_URL']
except KeyError as ex:
raise Exception("Github Gist url not defined")

self.gh = Github(auth = Auth.Token(token))
self.gh_auth_user = self.gh.get_user()
self.gh_url = url

def create_gist(self, markdown_file, desc=""):

markdown_str = ""
with open(markdown_file, 'r') as f:
markdown_str = f.read()

# Create a Gist
gist = self.gh_auth_user.create_gist(
public=False,
files={markdown_file: InputFileContent(markdown_str)},
description=desc
)

return gist.id
115 changes: 115 additions & 0 deletions telemetry/report/markdown.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,115 @@
import os
import re
import telemetry
from string import Template

class Markdown:
def __init__(self, file):
self.filename = os.path.basename(file)
with open(file,'r') as f:
self.template_str = f.read()
self.template = Template(self.template_str)

def get_identifiers(self):
return re.findall('\$(\w+)\s',self.template_str)

def substitute(self, fields):
return self.template.substitute(fields)

def generate(self, fields, custom_file_name=None):
formatted = self.substitute(fields)
output = self.filename.split('.')[0] + ".md"
if custom_file_name:
output = custom_file_name
with open(f"{os.path.join(output)}",'w') as f:
f.write(formatted)
return output

class ResultsMarkdown(Markdown):
CRITICAL = ["UpdateBOOTFiles"]

def __init__(self, data):
self.param_dict = self.generate_param(data)
dir = os.path.dirname(os.path.realpath(__file__))
template_path = os.path.join(dir, "templates", "results.template.md")
super(ResultsMarkdown, self).__init__(template_path)

def generate_param(self,data):
param_dict = {}
for bn, info in data.items():
test_build_status = None
if str(info["last_failing_stage"]) in self.CRITICAL:
test_build_status = "FAILURE"
elif int(info["drivers_missing"])>0 or\
int(info["dmesg_errors_found"])>0 or\
int(info["pytest_errors"])>0 or\
int(info["pytest_failures"])>0:
test_build_status = "UNSTABLE"
elif str(info["last_failing_stage"]) == "NA":
test_build_status = "PASSING"
else:
test_build_status = "INVALID"

uboot_reached_status = "✔" if bool(info["uboot_reached"]) else "❌"
linux_prompt_reached_status = "✔" if bool(info["linux_prompt_reached"]) else "❌"
drivers_enumerated_status = "✔" if int(info["drivers_missing"]) == 0 and test_build_status != "FAILURE" else "❌"
dmesg_status = "✔" if int(info["dmesg_errors_found"]) == 0 and test_build_status != "FAILURE" else "❌"
pytest_tests_status = "✔" if int(info["pytest_failures"]) == 0 and test_build_status != "FAILURE" else "❌"

if test_build_status == "FAILURE":
iio_drivers_missing_details = "No Details"
iio_drivers_found_details = "No Details"
dmesg_errors_found_details = "No Details"
pytest_failures_details = "No Details"
else:
iio_drivers_missing_details = "No missing drivers" if len(info["missing_devs"]) == 0 else ("<br>").join(info["missing_devs"])
iio_drivers_found_details = "No iio drivers found" if len(info["enumerated_devs"]) == 0 else ("<br>").join(info["enumerated_devs"])
dmesg_errors_found_details = "No errors" if len(info["dmesg_err"]) == 0 else ("<br>").join(info["dmesg_err"])
pytest_failures_details = "No failures" if len(info["pytest_failure"]) == 0 else ("<br>").join(info["pytest_failure"])

last_failing_stage = str(info["last_failing_stage"])
last_failing_stage_failure = str(info["last_failing_stage_failure"])

param_dict[bn] = {
"board_name" : bn,
"branch": info["info_txt"]["BRANCH"],
"pr_id": info["info_txt"]["PR_ID"],
"timestamp": info["info_txt"]["TIMESTAMP"],
"direction": info["info_txt"]["DIRECTION"],
"triggered_by": info["info_txt"]["Triggered by"],
"commit_sha": info["info_txt"]["COMMIT SHA"],
"commit_date": info["info_txt"]["COMMIT_DATE"],
"test_job_name": info["jenkins_project_name"],
"test_build_number": info["jenkins_build_number"],
"test_build_status": test_build_status,
"uboot_reached_status": uboot_reached_status,
"linux_prompt_reached_status": linux_prompt_reached_status,
"drivers_enumerated_status": drivers_enumerated_status,
"dmesg_status": dmesg_status,
"pytest_tests_status": pytest_tests_status,
"drivers_enumerated": "",
"drivers_missing": "",
"dmesg_warnings_found": "",
"dmesg_errors_found": "",
"pytest_tests": "",
"pytest_errors": "",
"pytest_failures": "",
"pytest_skipped": "",
"last_failing_stage": last_failing_stage if last_failing_stage != "NA" else "No Details",
"last_failing_stage_failure":last_failing_stage_failure if last_failing_stage_failure != "NA" else "No Details",
"iio_drivers_missing_details": iio_drivers_missing_details,
"iio_drivers_found_details": iio_drivers_found_details,
"dmesg_errors_found_details": dmesg_errors_found_details,
"pytest_failures_details": pytest_failures_details,
"test_status": test_build_status,
}
return param_dict

def generate_gist(self):
for bn,param in self.param_dict.items():
outfile = self.generate(param, bn+".md")
gist = telemetry.gist.Gist()
gist_link = gist.create_gist(outfile, f'''Boardname: {param["board_name"]}\n
Branch: {param["branch"]}\nPR ID: {param["pr_id"]}\n
timestamp: {param["timestamp"]}''')
print(f'Gist created: {gist.gh_url}/{gist_link} - {param["test_status"]}')
58 changes: 58 additions & 0 deletions telemetry/report/templates/results.template.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,58 @@
# $board_name

## Build information

```
Boot partition created with arguments:
- BRANCH: $branch
- PR_ID: $pr_id
- TIMESTAMP: $timestamp
- DIRECTION: $direction
Triggered by: $triggered_by
- COMMIT SHA: $commit_sha
- COMMIT_DATE: $commit_date
Test info
- JOB NAME: $test_job_name
- BUILD NO: $test_build_number
- STATUS: $test_build_status
```

## HW Test Result Summary

| Stage | Result |
| ----------- | ----------- |
| U-Boot reached? | $uboot_reached_status |
| Linux prompt reached? | $linux_prompt_reached_status |
| IIO Drivers | $drivers_enumerated_status |
| DMESG | $dmesg_status |
| PYADI-IIO Tests | $pytest_tests_status |

## HW Test Result Details

#### Last Failing Stage

- $last_failing_stage

#### Last Failing Stage Failure

- $last_failing_stage_failure

#### Missing IIO Drivers

- $iio_drivers_missing_details

#### Found IIO Drivers

- $iio_drivers_found_details

#### DMESG Errors

- $dmesg_errors_found_details

#### PYADI-IIO tests Failures

- $pytest_failures_details

## Finished: $test_status
47 changes: 41 additions & 6 deletions telemetry/searches.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,25 +15,54 @@ def _get_schema(self, name):
loc = os.path.dirname(__file__)
return os.path.join(loc, "resources", name)

def artifacts(self):
def artifacts(self,
target_board=None,
job=None,
job_no = None,
artifact_info_type = None,
):
""" Query artifacts data from elasticsearch """
# Returns a list of artifact information sorted by asc achive_date
index = "boot_tests" if not self.use_test_index else "dummy"

query = {
index = "artifacts" if not self.use_test_index else "dummy"
s = []
if target_board:
s.append({"match": {"target_board.keyword": target_board}})
if job:
s.append({"match": {"job.keyword": job}})
if job_no:
s.append({"match": {"job_no.keyword": job_no}})
if artifact_info_type:
s.append({"match": {"artifact_info_type.keyword": artifact_info_type}})
# Create query
if s:
query = {
"sort": [{"archive_date": {"order": "desc"}}],
"query": {"bool": {"must": s}},
}
else:
query = {
"sort": [{"archive_date": {"order": "desc"}}],
"query": {"match_all": {}},
}

res = self.db.es.search(index=index, size=1000, body=query)
artifacts_data = [data["_source"] for data in res["hits"]["hits"]]
return artifacts_data

def boot_tests(self, boot_folder_name=None):
def boot_tests(self,
boot_folder_name=None,
jenkins_project_name=None,
jenkins_build_no = None,
):
""" Query boot test results from elasticsearch """
index = "boot_tests" if not self.use_test_index else "dummy"
s = []
if boot_folder_name:
s.append({"match": {"boot_folder_name": boot_folder_name}})
s.append({"match": {"boot_folder_name.keyword": boot_folder_name}})
if jenkins_project_name:
s.append({"match": {"jenkins_project_name.keyword": jenkins_project_name}})
if jenkins_build_no:
s.append({"match": {"jenkins_build_number.keyword": jenkins_build_no}})
# Create query
if s:
query = {
Expand Down Expand Up @@ -190,3 +219,9 @@ def github_release_stats(self, repo=None, tag=None, date=None):
}
for i in range(len(dates))
}

if __name__=="__main__":
s = searches(server="10.116.110.150")
b = s.boot_tests("zynq-zc702-adv7511-ad9361-fmcomms2-3","HW_tests/HW_test_multiconfig","1329")
a = s.artifacts("zynq-zc702-adv7511-ad9361-fmcomms2-3","HW_tests/HW_test_multiconfig","1329")
[ print(entry) for entry in a]

0 comments on commit 3698fef

Please sign in to comment.