aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorSerge Broslavsky <serge.broslavsky@linaro.org>2020-10-26 18:25:33 +0200
committerSerge Broslavsky <serge.broslavsky@linaro.org>2020-10-26 18:25:33 +0200
commitad7f3fbf17ffc40b2d71a79e21c5cc0f9095a43a (patch)
tree70d44def034122f5381e4cb6f685c04a14b830e2
parent261c5470a8bf0c878d1e7bcef36dcd6bcbd4423a (diff)
downloadlkft-bucket-ad7f3fbf17ffc40b2d71a79e21c5cc0f9095a43a.tar.gz
WIP: adding the report generation code
-rw-r--r--stable_report.py965
1 files changed, 870 insertions, 95 deletions
diff --git a/stable_report.py b/stable_report.py
index 429d086..eef4647 100644
--- a/stable_report.py
+++ b/stable_report.py
@@ -1,67 +1,404 @@
#!/usr/bin/python3
+# import boto3
+import argparse
import re
import os
import os.path
import sys
import json
+import jinja2
+import datetime
+import mailbox
+from requests.compat import urljoin
from squad_client.core.api import SquadApi
-from squad_client.core.models import Squad
+from squad_client.core.models import Squad, Project
+"""LKFT Stable Report
+Environment variables accepted
+------------------------------
+
+ AWS_MODE - to be set when deployed in AWS (any value will do)
+ AWS_DYNAMODB_REGION - region to be used for AWS's dynamodb
+ AWS_DYNAMODB_TABLEN - name of the table to be used in dynamodb
+ LKFT_SCAN_DEPTH - how many already scanned builds to check when
+ looking for skipped ones
+ LKFT_PROJECT_LIST - list of projects (and, possibly, builds) to process;
+ if not specified - pattern matching is used
+ against the list of projects in squad and builds
+ are checked against a dynamodb table
+"""
+
+
+# set to any value if running in an AWS environment
+ENV_AWS_MODE = "AWS_MODE"
+
+# name of the environment variable that has dynamodb region name
+ENV_AWS_DB_REGION = "AWS_DYNAMODB_REGION"
+
+# name of the environment variable that has dynamodb table name
+ENV_AWS_DB_TABLE = "AWS_DYNAMODB_TABLE"
+
+# current node index (1-based) that is set by gitlab for parallel jobs
ENV_PARALLEL_JOB_ID = "CI_NODE_INDEX"
+
+# total number of parallel jobs that is set by gitlab for parallel jobs
+ENV_PARALLEL_JOB_TOTAL = "CI_NODE_TOTAL"
+
+# directory where all the reporting jobs are stored
DIRECTORY_JOBS = "jobs"
+
+# directory where all the report files are stored
DIRECTORY_REPORTS = "reports"
-def already_processed(project, build, prev_build):
- """
- Returns True if this combination of the builds for this project have
- already been processed.
+# number of already reported builds to check for skipped builds
+ENV_SCAN_DEPTH = "LKFT_SCAN_DEPTH"
- TODO: This function needs to be reimplemented for AWS deployment.
- """
+# list of projects (and exact builds) to process
+# the format is:
+# (<project>(':'<build>','<prev-build>(','<base-build>)?':')*';')+
+#
+# if not set - all the projects matching REGEXP_STABLE_PROJECT will be processed
+# and builds that have not yet been reported will be processed, automatically finding the suitable previous and base builds
+ENV_PROJECT_LIST = "LKFT_PROJECT_LIST"
+
+# when scanning for not-yet-reported builds how many already reported builds should be checked if they were reported
+# this is done to pick up any skipped builds that were not reported
+RESULT_WHEN_FAILED_TO_CHECK_IF_PROCESSED = True
+
+
+# projects that are mathing this regexp will be scanned for builds
+REGEXP_STABLE_PROJECT = "^linux-stable-rc-linux-([\d]+)\.([\d]+)\.y$"
+
+# regexp to parse builds name
+REGEXP_STABLE_BUILD = "^v(?P<major>\d+)\.(?P<minor>\d+)(?:\.(?P<patch>\d+))?(?:-(?P<count>\d+)-g(?P<hash>[\da-f]+))?$"
+
+
+# parse suite url to get suite id
+# https://qa-reports.linaro.org/api/suites/66446/
+REGEXP_SUITE_URL = "^.*?://[^/]+/api/suites/(?P<id>\d+).*$"
+
+# deployed to AWS - enable all the bells and whistles
+if os.environ.get(ENV_AWS_MODE):
+
+ import boto3
+ from botocore.exceptions import ClientError
+
+ print("Operation mode: deployed to AWS")
+
+ def is_already_processed(project, builds, build):
+ """
+ Returns True if this combination of the builds for this project have
+ already been processed.
+ """
+
+ region = os.environ.get(ENV_AWS_DB_REGION)
+ if not region:
+ print("{0} should be set to your region".format(ENV_AWS_DB_REGION))
+ return RESULT_WHEN_FAILED_TO_CHECK_IF_PROCESSED
+
+ dynamodb = boto3.resource("dynamodb", region_name=region)
+ if not dynamodb:
+ print("Unable to get a dynamodb object")
+ return RESULT_WHEN_FAILED_TO_CHECK_IF_PROCESSED
+
+ table_name = os.environ.get(ENV_AWS_DB_TABLE)
+ if not table_name:
+ print("{0} should be set to your region".format(ENV_AWS_DB_TABLE))
+ return RESULT_WHEN_FAILED_TO_CHECK_IF_PROCESSED
+
+ table = dynamodb.Table(table_name)
+ if not table:
+ print("Unable to get a dynamodb table object")
+ return RESULT_WHEN_FAILED_TO_CHECK_IF_PROCESSED
+
+ response = table.get_item(
+ Key={"ProjectSlug": project.slug, "BuildVersion": build.version}
+ )
+
+ return "Item" in response
- return False
+ def set_already_processed(project, builds, build):
+ """
+ Flags the build as already processed.
+ """
+ region = os.environ.get(ENV_AWS_DB_REGION)
+ if not region:
+ print("{0} should be set to your region".format(ENV_AWS_DB_REGION))
+ return
-# TODO: remove when deploying to AWS
-g_last_scanned_builds = {}
+ dynamodb = boto3.resource("dynamodb", region_name=region)
+ if not dynamodb:
+ print("Unable to get a dynamodb object")
+ return
+ table_name = os.environ.get(ENV_AWS_DB_TABLE)
+ if not table_name:
+ print("{0} should be set to your region".format(ENV_AWS_DB_TABLE))
+ return
-def last_scanned(project):
+ table = dynamodb.Table(table_name)
+ if not table:
+ print("Unable to get a dynamodb table object")
+ return
+
+ record = {
+ "ProjectSlug": project.slug,
+ "BuildVersion": build.version,
+ "ReportName": "stable",
+ "SendDateTime": str(datetime.datetime.now()),
+ "PipelineUrl": os.environ.get("CI_PIPELINE_URL"),
+ }
+
+ table.put_item(Item=record)
+
+
+# not deployed to AWS - limited functionality
+else: # not os.environ.get(ENV_AWS_MODE)
+
+ print("Operation mode: standalone")
+
+ def is_already_processed(project, builds, build):
+ """
+ Returns True if this combination of the builds for this project have
+ already been processed.
+ """
+
+ # just treat the very last build as not processed
+ if builds.builds[0].version == build.version:
+ return False
+
+ return True
+
+ def set_already_processed(project, builds, build):
+ """
+ Flags the build as already processed.
+ """
+
+ ### Dummy implementation
+ return
+
+
+def list_builds(group):
+ """
+ List all the builds for each of the projects.
"""
- Returns the build version of the last scanned build for the project.
- TODO: This function needs to be reimplemented for AWS deployment.
+ group_projects = group.projects()
+ re_stable = re.compile(REGEXP_STABLE_PROJECT)
+ for project in group_projects.values():
+ if project.is_archived:
+ continue
+ match = re_stable.match(project.slug)
+ if not match:
+ continue
+
+ builds = project.builds(status__finished=True)
+ print("{0}: {1}".format(project.slug, project.name))
+ for build_id, build in builds.items():
+ build_details_url = urljoin(
+ SquadApi.url, "%s/%s/build/%s" % (group.slug, project.slug, build.version)
+ )
+ print(
+ " - {0}: {1} ({2}) {3}".format(
+ build_id, build.version, build.datetime, build_details_url
+ )
+ )
+
+ print()
+
+
+class BuildCollection(object):
"""
+ Build collection that allows working with a set of builds.
+ """
+
+ class Iterator(object):
+ """
+ Build collection iterator, that allows peeking at non-current items.
+ """
- global g_last_scanned_builds
+ def __init__(self, collection, offset=None):
+ self.collection = collection
+ self.pointer = (
+ (offset if offset != None else 0) if len(collection.builds) else None
+ )
- builds = project.builds(status__finished=True)
- build_ids = (*builds,)
+ def __iter__(self):
+ return self
- if project not in g_last_scanned_builds:
- g_last_scanned_builds[project.slug] = (
- builds[build_ids[2]].version if len(build_ids) > 2 else None
- )
+ def __next__(self):
+ item = self.next()
+ if item == None:
+ raise StopIteration()
+
+ return item
+
+ def __len__(self):
+ if self.pointer == None:
+ return 0
+
+ return len(self.collection.builds) - self.pointer
+
+ def clone(self):
+ return BuildCollection.Iterator(self.collection, self.pointer)
+
+ def next(self):
+ if self.pointer == None:
+ return None
+
+ item = self.collection.builds[self.pointer]
+ self.pointer += 1
+ if self.pointer >= len(self.collection.builds):
+ self.pointer = None
+ return item
+
+ def peek(self, offset=0):
+ if self.pointer == None:
+ return None
+
+ peek_at = self.pointer + offset
+ if peek_at < 0 or peek_at >= len(self.collection.builds):
+ return None
+
+ return self.collection.builds[peek_at]
+
+ def reset(self):
+ self.pointer = None if not len(self.collection.builds) else 0
- return g_last_scanned_builds.get(project.slug, None)
+ def collection(self):
+ return self.collection
+ # end of class Iterator
-def set_last_scanned(project, build):
+ def __init__(self, project):
+ self.project = project
+ builds = project.builds(status__finished=True)
+ self.by_version = {}
+ self.builds = []
+ if builds:
+ for build_id, build in builds.items():
+ self.by_version[build.version] = build
+ self.builds.append(build)
+
+ def __iter__(self):
+ return self.Iterator(self)
+
+ def values(self):
+ return self.Iterator(self)
+
+ def build(version_name):
+ return self.by_version.get(version_name)
+
+ def __len__(self):
+ return len(self.builds)
+
+
+# end of class BuildCollection
+
+
+def get_report_setup(group, project, build_to_report, build_iter):
"""
- Set the last scanned build for the project.
+ Build a dictionary with setup for a single report.
- TODO: This function needs to be reimplemented for AWS deployment.
+ First - get the previous (time wise) build as 'previous' build
+ Then - find the last non-patched build as 'base' build
+ if not found - use the last patched build of previous version
"""
- global g_last_scanned_builds
+ builds = build_iter.collection
+ iter = build_iter.clone()
+
+ match_to_build = re.match(REGEXP_STABLE_BUILD, build_to_report.version)
+ if not match_to_build:
+ print(
+ "Build {0} of project {1} did not match the regexp: {2}".format(
+ build.version, project.slug, REGEXP_STABLE_BUILD
+ )
+ )
+ return None
+
+ prev_build = build_iter.peek()
+
+ setup = {
+ "group": group.slug,
+ "project": project.slug,
+ "build": build_to_report.version,
+ "prev_build": prev_build.version if prev_build else None,
+ "base_build": None,
+ }
+
+ # should the base build be the same version (with no patches on top)
+ same_version = match_to_build.group("count")
+ version_to_search_in = None
+
+ # find base build if possible
+ for build in build_iter.clone():
+ match = re.match(REGEXP_STABLE_BUILD, build.version)
+
+ # if build_to_report is a no-patches-on-top build
+ # search for the previous version's build with no patches on top
+ # or the last one with patches on top from two versions back
+
+ if same_version:
+ # search within the same version
+ if (
+ match.group("major") == match_to_build.group("major")
+ and match.group("minor") == match_to_build.group("minor")
+ and match.group("patch") == match_to_build.group("patch")
+ ):
+
+ # found a no-patches-on-top version
+ if not match.group("count"):
+ setup["base_build"] = build.version
+ return setup
+
+ # previous version - pick the lastest build
+ else:
+ setup["base_build"] = build.version
+ return setup
+
+ # if build_to_report has patches on top
+ # search for the current version's build with no patches on top
+ # or the last one with patches on top from the previous version
+
+ else:
+ # search within the previous version
+ if (
+ match.group("major") == match_to_build.group("major")
+ and match.group("minor") == match_to_build.group("minor")
+ and match.group("patch") == match_to_build.group("patch")
+ ):
+ continue
+
+ if not version_to_search_in:
+ version_to_search_in = (
+ match.group("major"),
+ match.group("minor"),
+ match.group("patch"),
+ )
+
+ if (
+ match.group("major") == version_to_search_in[0]
+ and match.group("minor") == version_to_search_in[1]
+ and match.group("patch") == version_to_search_in[2]
+ ):
+ if not match.group("count"):
+ setup["base_build"] = build.version
+ return setup
+ else:
+ continue
+
+ setup["base_build"] = build.version
+ return setup
- return g_last_scanned_builds.get(project.slug, None)
+ return setup
-def build_jobs(group):
+def scan_builds(group):
"""
Prepare jobs for the report generation.
"""
@@ -77,57 +414,184 @@ def build_jobs(group):
)
)
- re_stable = re.compile("^linux-stable-(?:rc-)?(linux-[\d.]+y)$")
- projects = group.projects()
- for project_id in projects:
- project = projects[project_id]
- if project.is_archived:
- continue
- match = re_stable.match(project.slug)
- if not match:
- continue
+ try:
+ os.mkdir(DIRECTORY_REPORTS)
+ except FileExistsError:
+ pass
+ except:
print(
- "{0}:\n name: {1}\n url: {2}".format(
- project.slug, project.name, project.url
+ "Exception creating the job directory '{0}': {1}".format(
+ DIRECTORY_JOBS, sys.exc_info()[0]
)
)
- last_scanned_build = last_scanned(project)
- jobs = []
- builds = project.builds(status__finished=True)
- build_ids = (*builds,)
- nbuilds = len(build_ids)
- if nbuilds == 1:
- return # nothing to compare against
- found_last_scanned = False
+ try:
+ os.remove("{0}/overview.txt".format(DIRECTORY_REPORTS))
+ except FileNotFoundError:
+ pass
- for idx in reversed(range(nbuilds - 1)):
- build_id = build_ids[idx]
- build = builds[build_id]
+ group_projects = group.projects()
+ group_projects_by_slug = {
+ project.slug: project for project in group_projects.values()
+ }
+ projects_to_process = []
- if not found_last_scanned:
- found_last_scanned = build.version == last_scanned_build
- continue
+ project_defs = os.environ.get(ENV_PROJECT_LIST)
+ # we have the list from the envvar with the following format:
+ # (<project>(':'<build>','<prev-build>(','<base-build>)?':')*';')+
+ if project_defs:
+ print("List of projects provided in the envvar {0}".format(ENV_PROJECT_LIST))
+ for project_definition in projects.split(";"):
+ project_definition = project_definition.strip()
- prev_build_id = build_ids[idx + 1]
- prev_build = builds[prev_build_id]
- if not already_processed(project, build, prev_build):
- jobs.append(
- {
- "group": group.slug,
- "project": project.slug,
- "build": build.version,
- "prev_build": prev_build.version,
- }
+ project_name, _, project_builds = project_definition.partition(":")
+
+ project = group_projects_by_slug.get(project_name)
+ if not project:
+ print(
+ "Envvar {0} has '{1}' listed, which is not a known project. Skipped.".format(
+ ENV_PROJECT_LIST, project_name
+ )
)
+ continue
+
+ project.build_collection = BuildCollection(project)
+ project.builds_to_process = []
+
+ project_builds = project_builds.strip()
+
+ for project_build in project_builds:
+ project_build = project_build.strip()
+ if not project_build:
+ break
+
+ build_versions = project_build.split(",")
+
+ if not len(build_versions):
+ print(
+ "Envvar {0} has {1} listed with no build. Picking the builds automatically.".format(
+ ENV_PROJECT_LIST, project_name
+ )
+ )
+ projects_to_process.append(project)
+ continue
+
+ build = project.build_collection.build(build_versions[0])
+ if not build:
+ print(
+ "Envvar {0} has {1} listed as build for project {2} and that is not a known build. Project skipped.".format(
+ ENV_PROJECT_LIST, build_version[0], project.slug
+ )
+ )
+ continue
+
+ report_setup = {
+ "group": group.slug,
+ "project": project.slug,
+ "build": build_version[0],
+ "prev_build": None,
+ "base_build": None,
+ }
+
+ if len(build_versions) > 1:
+ build = project.build_collection.build(build_versions[1])
+ if not build:
+ print(
+ "Envvar {0} has {1} listed as build for project {2} and that is not a known build. Project skipped.".format(
+ ENV_PROJECT_LIST, build_version[1], project.slug
+ )
+ )
+ continue
+ report_setup.prev_build = build
+
+ if len(build_versions) > 2:
+ build = project.build_collection.build(build_versions[2])
+ if not build:
+ print(
+ "Envvar {0} has {1} listed as build for project {2} and that is not a known build. Project skipped.".format(
+ ENV_PROJECT_LIST, build_version[2], project.slug
+ )
+ )
+ continue
+ report_setup.base_build = build
+
+ project["builds_to_process"].append(report_setup)
+
+ if project.builds_to_process:
+ projects_to_process.append(project)
+
+ # get projects from the group projects matching the regexp
+ else:
+ print("Building the list of projects using pattern matching")
+ unsorted_projects = []
+ re_stable = re.compile(REGEXP_STABLE_PROJECT)
+ for project in group_projects.values():
+ if project.is_archived:
+ continue
+ match = re_stable.match(project.slug)
+ if not match:
+ continue
+ project.build_collection = BuildCollection(project)
+ project.builds_to_process = []
+ unsorted_projects.append(
+ (int(match.group(1)), int(match.group(2)), project)
+ )
+
+ projects_to_process = [
+ project
+ for major, minor, project in sorted(
+ unsorted_projects, key=lambda item: item[0] * 1000 + item[1]
+ )
+ ]
- job_file_name = "{0}/{1}-{2}.txt".format(
- DIRECTORY_JOBS, project.slug, build.version
+ print(
+ "Projects to process: {0}".format(
+ ", ".join([project.slug for project in projects_to_process])
)
- if len(jobs):
- with open(job_file_name, "w") as fp:
- json.dump(jobs, fp)
- else:
+ )
+
+ # now add builds to process for those projects that have nothing set yet
+ scan_depth = os.environ.get(ENV_SCAN_DEPTH, 0)
+
+ job_file_counter = 1
+ for project in projects_to_process:
+ if project.builds_to_process:
+ continue
+
+ jobs = []
+ if len(project.build_collection) == 1:
+ print(
+ "Project {0} has just one build so build {1} has nothing to compare against. Skipped.".format(
+ project.slug, builds.peek(0).version
+ )
+ )
+ continue # nothing to compare against
+
+ iter_builds = project.build_collection.values()
+ for build in iter_builds:
+ if not is_already_processed(project, project.build_collection, build):
+ if not len(iter_builds): # no builds remaining
+ print(
+ "Build {0} of project {1} has no other builds to compare against. Skipped.".format(
+ build.version, project.slug
+ )
+ )
+ break
+
+ setup = get_report_setup(group, project, build, iter_builds)
+ if setup:
+ project.builds_to_process.insert(0, setup)
+ else:
+ if scan_depth <= 0:
+ break # stop searching for skipped builds
+ else:
+ scan_depth -= 1
+
+ # create job files
+ for project in projects_to_process:
+ if not project.builds_to_process:
+ with open("{0}/overview.txt".format(DIRECTORY_REPORTS), "a") as fp:
+ fp.write("Project {0} has no unreported builds\n".format(project.slug))
try:
os.remove(job_file_name)
except FileNotFoundError:
@@ -138,14 +602,45 @@ def build_jobs(group):
job_file_name, sys.exc_info()[0]
)
)
+ continue
+
+ job_file_name = "{0}/{1:02d}.{2}.txt".format(
+ DIRECTORY_JOBS, job_file_counter, project.slug
+ )
+ job_file_counter += 1
+ with open(job_file_name, "w") as fp:
+ json.dump(project.builds_to_process, fp)
-def process_jobs(group):
+def generate_reports(group):
"""
Generate reports as requested in "jobs/*.txt" files.
+
+
+ TODO: if total number of workers is smaller than the number of jobs,
+ each worker does their "column" of jobs (% wise)
"""
- job_index = os.environ.get(ENV_PARALLEL_JOB_ID)
+ try:
+ worker_index = int(os.environ.get(ENV_PARALLEL_JOB_ID, "1"))
+ try:
+ worker_count = int(os.environ.get(ENV_PARALLEL_JOB_TOTAL, "1"))
+ except ValueError:
+ worker_index = 1
+ worker_count = 1
+ print(
+ "Envvar {0} has an invalid value: {1}. Defaulting to single worker.".format(
+ ENV_PARALLEL_JOB_TOTAL, os.environ.get(ENV_PARALLEL_JOB_TOTAL, "1")
+ )
+ )
+ except ValueError:
+ worker_index = 1
+ worker_count = 1
+ print(
+ "Envvar {0} has an invalid value: {1}. Defaulting to single worker.".format(
+ ENV_PARALLEL_JOB_ID, os.environ.get(ENV_PARALLEL_JOB_ID, "1")
+ )
+ )
try:
os.mkdir(DIRECTORY_REPORTS)
@@ -159,24 +654,31 @@ def process_jobs(group):
)
# get the list of job files
- job_files = os.listdir(DIRECTORY_JOBS)
+ try:
+ job_files = os.listdir(DIRECTORY_JOBS)
+ except FileNotFoundError as e:
+ print(
+ "{0}Job directory '{1}' not found".format(
+ "({0}) ".format(job_index) if job_index != None else "", DIRECTORY_JOBS
+ )
+ )
+ return
+
if not job_files:
- print("({0}) No reporting jobs found".format(job_index))
+ print(
+ "{0}No reporting jobs found".format(
+ "({0}) ".format(job_index) if job_index != None else ""
+ )
+ )
return
- if job_index != None:
- job_index = int(job_index)
+ job_index = int(worker_index)
- # run this specific job and we're done
+ # run all jobs for the worker
+ while job_index <= len(job_files):
job_file = os.path.join(DIRECTORY_JOBS, job_files[job_index - 1])
process_job_file(group, job_file, job_index)
-
- else:
- # run all jobs as we're not in parallel invironment
- for idx in range(len(job_files)):
- job_file = os.path.join(DIRECTORY_JOBS, job_files[idx])
- process_job_file(group, job_file)
- pass
+ job_index += worker_count
def process_job_file(group, job_file_name, job_index=None):
@@ -186,7 +688,7 @@ def process_job_file(group, job_file_name, job_index=None):
try:
with open(job_file_name, "r") as fd:
- job = fd.read()
+ json_text = fd.read()
except OSError as e:
print(
"{0}Unable to read job file '{1}': {2}".format(
@@ -198,35 +700,308 @@ def process_job_file(group, job_file_name, job_index=None):
return
try:
- job = json.loads(job)
+ setup_list = json.loads(json_text)
except JSONDecodeError as e:
print(
"{0}Unable to parse job file '{1}': {2}".format(
"({0}) ".format(job_index) if job_index != None else "",
job_file_name,
- e.msg
+ e.msg,
)
)
return
- print(
- "{0}{1} - job:\n{2}".format(
- "({0}) ".format(job_index) if job_index != None else "",
- job_file_name,
- json.dumps(job, indent=2),
+ for setup in setup_list:
+ build_report(setup)
+
+
+def compare_builds(group, project, environments, suites, old_build, new_build):
+ comparison = Project.compare_builds(old_build.id, new_build.id)
+
+ fixes = {}
+ for e, s in comparison["fixes"].items():
+ f_e = {k: v for k, v in s.items() if k in [suites[id].slug for id in suites]}
+ if len(f_e):
+ fixes[e] = f_e
+
+ regressions = {}
+ for e, s in comparison["regressions"].items():
+ r_e = {k: v for k, v in s.items() if k in [suites[id].slug for id in suites]}
+ if len(r_e):
+ regressions[e] = r_e
+
+ build_details_url = urljoin(
+ SquadApi.url, "%s/%s/build/%s" % (group.slug, project.slug, new_build.version)
+ )
+
+ results = {}
+ summary = {}
+ for environment in environments.values():
+ results[environment.slug] = {}
+ summary[environment.slug] = {}
+
+ for suite in suites.values():
+ results[environment.slug][suite.slug] = []
+ summary[environment.slug][suite.slug] = {
+ "pass": 0,
+ "fail": 0,
+ "skip": 0,
+ "xfail": 0,
+ }
+
+ test_runs = new_build.testruns(environment=environment.id).values()
+
+ for testrun in test_runs:
+ statuses = testrun.statuses(suite=suite.id).values()
+ tests = testrun.tests(suite=suite.id).values()
+
+ for status in statuses:
+ summary[env.slug][suite.slug]["pass"] += status.tests_pass
+ summary[env.slug][suite.slug]["fail"] += status.tests_fail
+ summary[env.slug][suite.slug]["skip"] += status.tests_skip
+ summary[env.slug][suite.slug]["xfail"] += status.tests_xfail
+
+ for test in tests:
+ results[env.slug][suite.slug].append(test)
+
+ template_env = jinja2.Environment(
+ extensions=["jinja2.ext.loopcontrols"],
+ loader=jinja2.FileSystemLoader("templates"),
+ trim_blocks=True,
+ lstrip_blocks=True,
+ )
+
+ text = template_env.get_template("body.txt.jinja").render(
+ build=build,
+ build_details_url=build_details_url,
+ prev_build=prev_build,
+ environments=envs,
+ suites=suites,
+ regressions=regressions,
+ fixes=fixes,
+ results=results,
+ summary=summary,
+ )
+
+ return text
+
+
+def build_report(report_setup):
+ group = Squad().group(report_setup["group"])
+ if not group:
+ print(
+ "Unable to get group named {0} while generating report for {1} of {2}".format(
+ report_setup["group"], report_setup["build"], report_setup["project"]
+ )
)
+ return
+
+ project = group.project(report_setup["project"])
+ if not project:
+ print(
+ "Unable to get project named {0} for the group {1} while generating report for {2}".format(
+ report_setup["project"], report_setup["group"], report_setup["build"]
+ )
+ )
+ return
+
+ environments = project.environments()
+
+ suites = project.suites()
+
+ build = project.build(report_setup["build"])
+ if not build:
+ print(
+ "Unable to get build named {0} while generating report for {1} of {2}".format(
+ report_setup["build"], report_setup["build"], report_setup["project"]
+ )
+ )
+ return None
+
+ prev_build = project.build(report_setup["prev_build"])
+ if not build:
+ print(
+ "Unable to get build named {0} while generating report for {1} of {2}".format(
+ report_setup["prev_build"],
+ report_setup["build"],
+ report_setup["project"],
+ )
+ )
+ return None
+
+ prev_report = compare_builds(
+ group, project, environments, suites, prev_build, build
)
+ if (
+ report_setup["base_build"]
+ and report_setup["prev_build"] != report_setup["base_build"]
+ ):
+ base_build = project.build(report_setup["base_build"])
+ if not base_build:
+ print(
+ "Unable to get build named {0} while generating report for {1} of {2}".format(
+ report_setup["base_build"],
+ report_setup["build"],
+ report_setup["project"],
+ )
+ )
+ return None
+
+ base_report = (
+ compare_builds(group, project, environments, suites, base_build, build)
+ if report_setup["base_build"]
+ and report_setup["prev_build"] != report_setup["base_build"]
+ else None
+ )
+
+ else:
+ base_report = None
+
+ with open(
+ "{0}/{1}-{2}.txt".format(DIRECTORY_REPORTS, project.slug, new_build.version),
+ "w",
+ ) as f:
+ f.write(
+ "COMPARED AGAINST PREVIOUS BUILD ({0})\n================================\n\n".format(
+ prev_build.version
+ )
+ )
+ f.write(prev_report)
+ if base_report:
+ f.write(
+ "\n\nCOMPARED AGAINST BASE BUILD ({0})\n================================\n\n".format(
+ base_build.version
+ )
+ )
+ f.write(base_report)
+
-def build_report(group, project, build, prev_build):
+def send_reports(group):
pass
+def dump_object(obj, margin=""):
+ functions = []
+ for var_name in dir(obj):
+ if var_name.startswith("_"):
+ continue
+
+ value = getattr(obj, var_name)
+ if type(value).__name__ == "method" or type(value).__name__ == "function":
+ functions.append(var_name)
+ continue
+
+ if type(value) == str:
+ value = value.replace("\r", "").replace("\n", "↵")
+
+ print("{0} - '{1}': [{2}] '{3}'".format(margin, var_name, type(value).__name__, value))
+ if functions:
+ print("{0} - functions: {1}".format(margin, ", ".join(functions)))
+
+
+def debug(group):
+ project = group.project("linux-stable-rc-linux-5.8.y")
+ print("Project:")
+ dump_object(project)
+ suites = project.suites(count=1000)
+
+ #print("Suites ({0}):".format(len(suites)))
+ #for suite_id, suite in suites.items():
+ # print(" - {0} ({1}):".format(suite.slug, suite_id))
+ # dump_object(suite, " ")
+
+ environments = {}
+ suites = {}
+
+ build = project.build("v5.8.14-126-g63a345a56136")
+ print("\nBuild:")
+ dump_object(build)
+
+ testruns = build.testruns(count=1000)
+ print("\n Test runs ({0}):".format(len(testruns)))
+
+ for testrun_id, testrun in testruns.items():
+
+ print("\n - Testrun ({0}):".format(testrun_id))
+ dump_object(testrun, " ")
+ summary = testrun.summary()
+ print(" - Summary:")
+ dump_object(summary, " ")
+
+ tests = testrun.tests()
+ print("\n Tests ({0}):".format(len(tests)))
+
+ for test_id, test in tests.items():
+ print("\n - Test ({0}):".format(test_id))
+ dump_object(test, " ")
+
+ if testrun.environment.slug not in environments:
+ environments[testrun.environment.slug] = 1
+ else:
+ environments[testrun.environment.slug] += 1
+
+ if test.suite not in suites:
+ suites[test.suite] = 1
+ else:
+ suites[test.suite] += 1
+
+ print("\nSuites ({0}):".format(len(suites.keys())))
+ sorted_suites = []
+ re_url = re.compile(REGEXP_SUITE_URL)
+ breakpoint()
+ for suite_url, test_count in suites.items():
+ match = re_url.match(suite_url)
+ if not match:
+ sorted_suites.append((suite_url, test_count))
+ else:
+ sorted_suites.append((project.suite(match.group("id")), test_count))
+
+ sorted_suites = sorted(((name, count) for name, count in suites.items()), key=lambda item: item[0])
+
+ for suite, test_count in sorted_suites:
+ print(" - {0}: {1} tests".format(suite, test_count))
+
+ print("\nEnvironments ({0}):".format(len(environments.keys())))
+ sorted_environments = sorted(((name, count) for name, count in environments.items()), key=lambda item: item[0])
+ for environment, test_count in sorted_environments:
+ print(" - {0}: {1} tests".format(environment, test_count))
+
+
def main():
- SquadApi.configure(url="https://qa-reports.linaro.org/")
- group = Squad().group("lkft")
- #build_jobs(group)
- process_jobs(group)
+ ap = argparse.ArgumentParser(description="LKFT stable report")
+ ap.add_argument(
+ "stages",
+ metavar="stage",
+ type=str,
+ nargs="+",
+ choices=("scan", "generate", "send", "list", "debug"),
+ help="mention which stage(s) to run",
+ )
+
+ args = ap.parse_args()
+
+ if len(args.stages):
+ SquadApi.configure(url="https://qa-reports.linaro.org/")
+ group = Squad().group("lkft")
+ for stage in args.stages:
+ function = {
+ "scan": scan_builds,
+ "generate": generate_reports,
+ "send": send_reports,
+ "list": list_builds,
+ "debug": debug,
+ }.get(stage)
+
+ if not function:
+ print("Unknown stage: {0}, skipped".format(stage))
+ continue
+
+ function(group)
+
+ # list_builds(group)
+ # scan_builds(group)
+ # generate_reports(group)
return