From b20f0f4066c6388ee0b3038514c9dc41452dc24c Mon Sep 17 00:00:00 2001 From: pettershao-ragilenetworks <81281940+pettershao-ragilenetworks@users.noreply.github.com> Date: Sat, 6 Nov 2021 01:59:12 +0800 Subject: [PATCH] Gcov for swss daemon (#1737) * [gcov] swss support gcov * Introduce gcov-related automation script to test coverage rate for swss module Signed-off-by: pettershao-ragilenetworks --- .azure-pipelines/build-template.yml | 7 + .azure-pipelines/docker-sonic-vs/Dockerfile | 4 + .azure-pipelines/gcov.yml | 114 +++++ .../test-docker-sonic-vs-template.yml | 29 +- Makefile.am | 7 +- azure-pipelines.yml | 20 + cfgmgr/Makefile.am | 17 + configure.ac | 19 + debian/rules | 10 + fdbsyncd/Makefile.am | 3 + fpmsyncd/Makefile.am | 4 + gcovpreload/Makefile | 66 +++ gcovpreload/gcovpreload.c | 43 ++ gcovpreload/lcov_cobertura.py | 407 +++++++++++++++++ gearsyncd/Makefile.am | 6 +- mclagsyncd/Makefile.am | 6 +- natsyncd/Makefile.am | 4 + neighsyncd/Makefile.am | 3 + orchagent/Makefile.am | 6 + portsyncd/Makefile.am | 3 + swssconfig/Makefile.am | 5 + teamsyncd/Makefile.am | 4 + tests/gcov_support.sh | 413 ++++++++++++++++++ tests/test_sflow.py | 6 +- tlm_teamd/Makefile.am | 4 + 25 files changed, 1203 insertions(+), 7 deletions(-) create mode 100644 .azure-pipelines/gcov.yml create mode 100644 gcovpreload/Makefile create mode 100644 gcovpreload/gcovpreload.c create mode 100755 gcovpreload/lcov_cobertura.py create mode 100755 tests/gcov_support.sh diff --git a/.azure-pipelines/build-template.yml b/.azure-pipelines/build-template.yml index b708e945a0ec..296124475e90 100644 --- a/.azure-pipelines/build-template.yml +++ b/.azure-pipelines/build-template.yml @@ -40,6 +40,10 @@ parameters: type: boolean default: false +- name: archive_gcov + type: boolean + default: false + jobs: - job: displayName: ${{ parameters.arch }} @@ -126,6 +130,9 @@ jobs: set -x tar czf pytest.tgz tests cp -r pytest.tgz $(Build.ArtifactStagingDirectory)/ + if [ '${{ parameters.archive_gcov }}' == True ]; then + export ENABLE_GCOV=y + fi ./autogen.sh dpkg-buildpackage -us -uc -b -j$(nproc) && cp ../*.deb . displayName: "Compile sonic swss" diff --git a/.azure-pipelines/docker-sonic-vs/Dockerfile b/.azure-pipelines/docker-sonic-vs/Dockerfile index 6feb8a7dfe9e..f288c8fdaafc 100644 --- a/.azure-pipelines/docker-sonic-vs/Dockerfile +++ b/.azure-pipelines/docker-sonic-vs/Dockerfile @@ -24,3 +24,7 @@ RUN dpkg -i /debs/syncd-vs_1.0.0_amd64.deb RUN dpkg --purge swss RUN dpkg -i /debs/swss_1.0.0_amd64.deb + +RUN apt-get update + +RUN apt-get -y install lcov diff --git a/.azure-pipelines/gcov.yml b/.azure-pipelines/gcov.yml new file mode 100644 index 000000000000..e58ee2b0a553 --- /dev/null +++ b/.azure-pipelines/gcov.yml @@ -0,0 +1,114 @@ +parameters: +- name: arch + type: string + values: + - amd64 + default: amd64 + +- name: pool + type: string + values: + - sonicbld + - default + default: default + +- name: timeout + type: number + default: 180 + +- name: sonic_slave + type: string + +- name: sairedis_artifact_name + type: string + +- name: swss_common_artifact_name + type: string + +- name: swss_artifact_name + type: string + +- name: artifact_name + type: string + +- name: archive_gcov + type: boolean + default: false + +jobs: +- job: + displayName: ${{ parameters.arch }} + timeoutInMinutes: ${{ parameters.timeout }} + + pool: + ${{ if ne(parameters.pool, 'default') }}: + name: ${{ parameters.pool }} + ${{ if eq(parameters.pool, 'default') }}: + vmImage: 'ubuntu-20.04' + + container: + image: sonicdev-microsoft.azurecr.io:443/${{ parameters.sonic_slave }}:latest + + + steps: + - script: | + sudo apt-get install -y lcov + displayName: "Install dependencies" + condition: eq('${{ parameters.archive_gcov }}', true) + - task: DownloadPipelineArtifact@2 + inputs: + artifact: ${{ parameters.artifact_name }} + displayName: "Download gcov artifact" + condition: eq('${{ parameters.archive_gcov }}', true) + - script: | + set -x + ls -lh + sudo tar -zxvf sonic-gcov.tar.gz + sudo cp -rf sonic-gcov $(Build.ArtifactStagingDirectory) + sudo rm sonic-gcov.tar.gz + ls -lh + workingDirectory: $(Pipeline.Workspace) + displayName: "store sonic-gcov" + condition: eq('${{ parameters.archive_gcov }}', true) + - checkout: self + submodules: true + condition: eq('${{ parameters.archive_gcov }}', true) + - script: | + set -x + wget https://packages.microsoft.com/config/debian/10/packages-microsoft-prod.deb -O packages-microsoft-prod.deb + sudo dpkg -i packages-microsoft-prod.deb + sudo apt-get update + sudo apt-get install -y dotnet-sdk-5.0 + pushd ./s/ + sudo tar -zcvf swss.tar.gz ./* + ls -lh ./* + cp ./gcovpreload/lcov_cobertura.py $(Build.ArtifactStagingDirectory)/ + cp ./tests/gcov_support.sh $(Build.ArtifactStagingDirectory)/sonic-gcov + sudo cp -rf swss.tar.gz $(Build.ArtifactStagingDirectory)/sonic-gcov + popd + sudo cp -rf $(Build.ArtifactStagingDirectory)/sonic-gcov ./ + pushd sonic-gcov + ls -lh ./* + sudo chmod +x ./gcov_support.sh + sudo ./gcov_support.sh generate + sudo ./gcov_support.sh merge_container_info $(Build.ArtifactStagingDirectory) + sudo cp -rf gcov_output $(Build.ArtifactStagingDirectory) + ls -lh $(Build.ArtifactStagingDirectory) + popd + workingDirectory: $(Pipeline.Workspace) + displayName: "generate gcov reports" + condition: eq('${{ parameters.archive_gcov }}', true) + + - publish: $(Build.ArtifactStagingDirectory)/gcov_output + artifact: gcov_info + displayName: "Archive gcov info" + condition: eq('${{ parameters.archive_gcov }}', true) + + - task: PublishCodeCoverageResults@1 + inputs: + codeCoverageTool: Cobertura + summaryFileLocation: '$(Build.ArtifactStagingDirectory)/gcov_output/AllMergeReport/coverage.xml' + reportDirectory: '$(Build.ArtifactStagingDirectory)/gcov_output/AllMergeReport/' + displayName: 'Publish c c++ test coverage' + condition: eq('${{ parameters.archive_gcov }}', true) + diff --git a/.azure-pipelines/test-docker-sonic-vs-template.yml b/.azure-pipelines/test-docker-sonic-vs-template.yml index f56890aa5626..237778af4a17 100644 --- a/.azure-pipelines/test-docker-sonic-vs-template.yml +++ b/.azure-pipelines/test-docker-sonic-vs-template.yml @@ -6,6 +6,16 @@ parameters: - name: log_artifact_name type: string +- name: gcov_artifact_name + type: string + +- name: sonic_slave + type: string + +- name: archive_gcov + type: boolean + default: false + jobs: - job: displayName: vstest @@ -53,7 +63,12 @@ jobs: sudo /sbin/ip link add Vrf1 type vrf table 1001 || { echo 'vrf command failed' ; exit 1; } sudo /sbin/ip link del Vrf1 type vrf table 1001 pushd tests - sudo py.test -v --force-flaky --junitxml=tr.xml --imgname=docker-sonic-vs:$(Build.DefinitionName).$(Build.BuildNumber) + + if [ '${{ parameters.archive_gcov }}' == True ]; then + sudo py.test -v --force-flaky --junitxml=tr.xml --keeptb --imgname=docker-sonic-vs:$(Build.DefinitionName).$(Build.BuildNumber) + else + sudo py.test -v --force-flaky --junitxml=tr.xml --imgname=docker-sonic-vs:$(Build.DefinitionName).$(Build.BuildNumber) + fi displayName: "Run vs tests" - task: PublishTestResults@2 @@ -64,9 +79,21 @@ jobs: - script: | cp -r tests/log $(Build.ArtifactStagingDirectory)/ + + if [ '${{ parameters.archive_gcov }}' == True ]; then + sudo apt-get install -y lcov + ./tests/gcov_support.sh set_environment $(Build.ArtifactStagingDirectory) + docker stop $(docker ps -q -a) + docker rm $(docker ps -q -a) + fi displayName: "Collect logs" condition: always() + - publish: $(Build.ArtifactStagingDirectory)/gcov_tmp + artifact: ${{ parameters.gcov_artifact_name }} + displayName: "Publish gcov output" + condition: eq('${{ parameters.archive_gcov }}', true) + - publish: $(Build.ArtifactStagingDirectory)/ artifact: ${{ parameters.log_artifact_name }}@$(System.JobAttempt) displayName: "Publish logs" diff --git a/Makefile.am b/Makefile.am index da3dcb319f4a..757db0d8d632 100644 --- a/Makefile.am +++ b/Makefile.am @@ -1,4 +1,9 @@ -SUBDIRS = fpmsyncd neighsyncd fdbsyncd portsyncd mclagsyncd natsyncd orchagent swssconfig cfgmgr tests gearsyncd +if GCOV_ENABLED +SUBDIRS = gcovpreload fpmsyncd neighsyncd portsyncd mclagsyncd natsyncd fdbsyncd orchagent swssconfig cfgmgr tests gearsyncd +else +SUBDIRS = fpmsyncd neighsyncd portsyncd mclagsyncd natsyncd fdbsyncd orchagent swssconfig cfgmgr tests gearsyncd +endif + if HAVE_LIBTEAM SUBDIRS += teamsyncd tlm_teamd diff --git a/azure-pipelines.yml b/azure-pipelines.yml index 52a7010ae823..1bbc3e3b1a10 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -22,6 +22,7 @@ stages: sairedis_artifact_name: sonic-sairedis artifact_name: sonic-swss archive_pytests: true + archive_gcov: true - stage: BuildArm dependsOn: Build @@ -38,6 +39,7 @@ stages: swss_common_artifact_name: sonic-swss-common.armhf sairedis_artifact_name: sonic-sairedis.armhf artifact_name: sonic-swss.armhf + archive_gcov: false - template: .azure-pipelines/build-template.yml parameters: @@ -50,6 +52,7 @@ stages: buildimage_pipeline: 140 sairedis_artifact_name: sonic-sairedis.arm64 artifact_name: sonic-swss.arm64 + archive_gcov: false - stage: BuildDocker dependsOn: Build @@ -69,3 +72,20 @@ stages: - template: .azure-pipelines/test-docker-sonic-vs-template.yml parameters: log_artifact_name: log + gcov_artifact_name: sonic-gcov + sonic_slave: sonic-slave-buster + archive_gcov: true + +- stage: Gcov + dependsOn: Test + condition: always() + jobs: + - template: .azure-pipelines/gcov.yml + parameters: + arch: amd64 + sonic_slave: sonic-slave-buster + swss_common_artifact_name: sonic-swss-common + sairedis_artifact_name: sonic-sairedis + swss_artifact_name: sonic-swss + artifact_name: sonic-gcov + archive_gcov: true diff --git a/cfgmgr/Makefile.am b/cfgmgr/Makefile.am index d55f6b9a52ac..dcd652498c51 100644 --- a/cfgmgr/Makefile.am +++ b/cfgmgr/Makefile.am @@ -87,3 +87,20 @@ macsecmgrd_SOURCES = macsecmgrd.cpp macsecmgr.cpp $(top_srcdir)/orchagent/orch.c macsecmgrd_CFLAGS = $(DBGFLAGS) $(AM_CFLAGS) $(CFLAGS_COMMON) $(CFLAGS_SAI) macsecmgrd_CPPFLAGS = $(DBGFLAGS) $(AM_CFLAGS) $(CFLAGS_COMMON) $(CFLAGS_SAI) macsecmgrd_LDADD = $(COMMON_LIBS) $(SAIMETA_LIBS) + +if GCOV_ENABLED +vlanmgrd_LDADD += -lgcovpreload +teammgrd_LDADD += -lgcovpreload +portmgrd_LDADD += -lgcovpreload +intfmgrd_LDADD+= -lgcovpreload +buffermgrd_LDADD += -lgcovpreload +vrfmgrd_LDADD += -lgcovpreload +nbrmgrd_LDADD += -lgcovpreload +vxlanmgrd_LDADD += -lgcovpreload +sflowmgrd_LDADD += -lgcovpreload +natmgrd_LDADD += -lgcovpreload +coppmgrd_LDADD += -lgcovpreload +tunnelmgrd_LDADD += -lgcovpreload +macsecmgrd_LDADD += -lgcovpreload +endif + diff --git a/configure.ac b/configure.ac index edca67de7c29..81ee0a50176a 100644 --- a/configure.ac +++ b/configure.ac @@ -102,6 +102,25 @@ CFLAGS_COMMON+=" -Wno-switch-default" CFLAGS_COMMON+=" -Wno-long-long" CFLAGS_COMMON+=" -Wno-redundant-decls" +# Code testing coverage with gcov +AC_MSG_CHECKING(whether to build with gcov testing) +AC_ARG_ENABLE(gcov, AS_HELP_STRING([--enable-gcov], [Whether to enable gcov testing]),, enable_gcov=no) + +if test "x$enable_gcov" = "xyes"; then + if test "$GCC" = "no"; then + AC_MSG_ERROR(not compiling with gcc, which is required for gcov testing) + fi + + CFLAGS_COMMON+=" -fprofile-arcs -ftest-coverage" + AC_SUBST(CFLAGS_COMMON) + + LDFLAGS+=" -fprofile-arcs -lgcov" + AC_SUBST(LDFLAGS) +fi + +AM_CONDITIONAL(GCOV_ENABLED, test x$enable_gcov = xyes) +AC_MSG_RESULT($enable_gcov) + AC_SUBST(CFLAGS_COMMON) AC_CONFIG_FILES([ diff --git a/debian/rules b/debian/rules index 6c77564e011d..a8a8b835fba6 100755 --- a/debian/rules +++ b/debian/rules @@ -27,8 +27,18 @@ include /usr/share/dpkg/default.mk # dh_auto_configure -- \ # -DCMAKE_LIBRARY_PATH=$(DEB_HOST_MULTIARCH) +ifeq ($(ENABLE_GCOV), y) +override_dh_auto_configure: + dh_auto_configure -- --enable-gcov +endif + override_dh_auto_install: dh_auto_install --destdir=debian/swss +ifeq ($(ENABLE_GCOV), y) + mkdir -p debian/swss/tmp/gcov + sh ./tests/gcov_support.sh collect swss +endif override_dh_strip: dh_strip --dbg-package=swss-dbg + diff --git a/fdbsyncd/Makefile.am b/fdbsyncd/Makefile.am index 06beefaf2206..4ab2f5dddd65 100644 --- a/fdbsyncd/Makefile.am +++ b/fdbsyncd/Makefile.am @@ -14,3 +14,6 @@ fdbsyncd_CFLAGS = $(DBGFLAGS) $(AM_CFLAGS) $(CFLAGS_COMMON) $(COV_CFLAGS) fdbsyncd_CPPFLAGS = $(DBGFLAGS) $(AM_CFLAGS) $(CFLAGS_COMMON) $(COV_CFLAGS) fdbsyncd_LDADD = -lnl-3 -lnl-route-3 -lswsscommon $(COV_LDFLAGS) +if GCOV_ENABLED +fdbsyncd_LDADD += -lgcovpreload +endif diff --git a/fpmsyncd/Makefile.am b/fpmsyncd/Makefile.am index 75b0854e9523..ef709db87670 100644 --- a/fpmsyncd/Makefile.am +++ b/fpmsyncd/Makefile.am @@ -13,3 +13,7 @@ fpmsyncd_SOURCES = fpmsyncd.cpp fpmlink.cpp routesync.cpp $(top_srcdir)/warmrest fpmsyncd_CFLAGS = $(DBGFLAGS) $(AM_CFLAGS) $(CFLAGS_COMMON) fpmsyncd_CPPFLAGS = $(DBGFLAGS) $(AM_CFLAGS) $(CFLAGS_COMMON) fpmsyncd_LDADD = -lnl-3 -lnl-route-3 -lswsscommon + +if GCOV_ENABLED +fpmsyncd_LDADD += -lgcovpreload +endif diff --git a/gcovpreload/Makefile b/gcovpreload/Makefile new file mode 100644 index 000000000000..c4328c72b926 --- /dev/null +++ b/gcovpreload/Makefile @@ -0,0 +1,66 @@ +LIBNAME=libgcovpreload + +# Fallback to gcc when $CC is not in $PATH. +CC:=$(shell sh -c 'type $${CC%% *} >/dev/null 2>/dev/null && echo $(CC) || echo gcc') +CXX:=$(shell sh -c 'type $${CXX%% *} >/dev/null 2>/dev/null && echo $(CXX) || echo g++') + +DYLIBSUFFIX=so +DYLIBNAME=$(LIBNAME).$(DYLIBSUFFIX) +DYLIB_MAKE_CMD=$(CC) -shared -fpic gcovpreload.c -o ${DYLIBNAME} + +all: + $(DYLIB_MAKE_CMD) + sudo cp $(DYLIBNAME) /usr/lib + sudo chmod 777 -R /usr/lib/$(DYLIBNAME) + sudo cp lcov_cobertura.py ../ + +Makefile: +install-binPROGRAMS: +uninstall-binPROGRAMS: +clean-binPROGRAMS: +gcovpreload: +mostlyclean-compile: +distclean-compile: +.cpp.o: +.cpp.obj: +.cpp.lo: +install: +check-am: +check: +install: +install-exec: +install-data: +uninstall: +install-am: +installcheck: +install-strip: +mostlyclean-generic: +clean-generic: +distclean-generic: +maintainer-clean-generic: +clean: +clean-am: +distclean: +distclean-am: +dvi: +dvi-am: +html: +info: +install-data-am: +install-dvi: +install-exec-am: +install-html: +install-info: +install-man: +install-pdf: +install-ps: +installcheck-am: +maintainer-clean: +mostlyclean: +pdf: +ps: +uninstall-am: +.MAKE: +.PHONY: +.PRECIOUS: +.NOEXPORT: diff --git a/gcovpreload/gcovpreload.c b/gcovpreload/gcovpreload.c new file mode 100644 index 000000000000..2141e9ef3954 --- /dev/null +++ b/gcovpreload/gcovpreload.c @@ -0,0 +1,43 @@ +#include +#include +#include +#include +#define SIMPLE_WAY + +void sighandler(int signo) +{ +#ifdef SIMPLE_WAY + exit(signo); +#else + extern void __gcov_flush(); + __gcov_flush(); /* flush out gcov stats data */ + raise(signo); /* raise the signal again to crash process */ +#endif +} + +/** +* The code for cracking the preloaded dynamic library gcov_preload.so is as follows, where __attribute__((constructor)) is the symbol of gcc, +* The modified function will be called before the main function is executed. We use it to intercept the exception signal to our own function, and then call __gcov_flush() to output the error message +*/ + +__attribute__ ((constructor)) + +void ctor() +{ + int sigs[] = { + SIGILL, SIGFPE, SIGABRT, SIGBUS, + SIGSEGV, SIGHUP, SIGINT, SIGQUIT, + SIGTERM + }; + int i; + struct sigaction sa; + sa.sa_handler = sighandler; + sigemptyset(&sa.sa_mask); + sa.sa_flags = SA_RESETHAND; + + for(i = 0; i < sizeof(sigs)/sizeof(sigs[0]); ++i) { + if (sigaction(sigs[i], &sa, NULL) == -1) { + perror("Could not set signal handler"); + } + } +} diff --git a/gcovpreload/lcov_cobertura.py b/gcovpreload/lcov_cobertura.py new file mode 100755 index 000000000000..bf72f2d228b6 --- /dev/null +++ b/gcovpreload/lcov_cobertura.py @@ -0,0 +1,407 @@ +#!/usr/bin/env python + +# Copyright 2011-2012 Eric Wendelin +# +# This is free software, licensed under the Apache License, Version 2.0, +# available in the accompanying LICENSE.txt file. + +""" +Converts lcov line coverage output to Cobertura-compatible XML for CI +""" + +import re +import sys +import os +import time +import subprocess +from xml.dom import minidom +from optparse import OptionParser + +from distutils.spawn import find_executable + +CPPFILT = "c++filt" +HAVE_CPPFILT = False + +if find_executable(CPPFILT) is not None: + HAVE_CPPFILT = True + +VERSION = '1.6' +__all__ = ['LcovCobertura'] + + +class Demangler(object): + def __init__(self): + self.pipe = subprocess.Popen( + CPPFILT, stdin=subprocess.PIPE, stdout=subprocess.PIPE) + + def demangle(self, name): + newname = name + "\n" + self.pipe.stdin.write(newname.encode('utf-8')) + res = self.pipe.stdout.readline() + return str(res.rstrip()) + + +class LcovCobertura(object): + """ + Converts code coverage report files in lcov format to Cobertura's XML + report format so that CI servers like Jenkins can aggregate results and + determine build stability etc. + >>> from lcov_cobertura import LcovCobertura + >>> LCOV_INPUT = 'your lcov input' + >>> converter = LcovCobertura(LCOV_INPUT) + >>> cobertura_xml = converter.convert() + >>> print(cobertura_xml) + """ + + def __init__(self, lcov_data, base_dir='.', excludes=None, demangle=False): + """ + Create a new :class:`LcovCobertura` object using the given `lcov_data` + and `options`. + :param lcov_data: Path to LCOV data file + :type lcov_data: string + :param base_dir: Path upon which to base all sources + :type base_dir: string + :param excludes: list of regexes to packages as excluded + :type excludes: [string] + :param demangle: whether to demangle function names using c++filt + :type demangle: bool + """ + + if not excludes: + excludes = [] + self.lcov_data = lcov_data + self.base_dir = base_dir + self.excludes = excludes + if demangle: + demangler = Demangler() + self.format = demangler.demangle + else: + self.format = lambda x: x + + def convert(self): + """ + Convert lcov file to cobertura XML using options from this instance. + """ + coverage_data = self.parse() + return self.generate_cobertura_xml(coverage_data) + + def parse(self): + """ + Generate a data structure representing it that can be serialized in any + logical format. + """ + + coverage_data = { + 'packages': {}, + 'summary': {'lines-total': 0, 'lines-covered': 0, + 'branches-total': 0, 'branches-covered': 0}, + 'timestamp': str(int(time.time())) + } + package = None + current_file = None + file_lines_total = 0 + file_lines_covered = 0 + file_lines = {} + file_methods = {} + file_branches_total = 0 + file_branches_covered = 0 + + for line in self.lcov_data.split('\n'): + if line.strip() == 'end_of_record': + if current_file is not None: + package_dict = coverage_data['packages'][package] + package_dict['lines-total'] += file_lines_total + package_dict['lines-covered'] += file_lines_covered + package_dict['branches-total'] += file_branches_total + package_dict['branches-covered'] += file_branches_covered + file_dict = package_dict['classes'][current_file] + file_dict['lines-total'] = file_lines_total + file_dict['lines-covered'] = file_lines_covered + file_dict['lines'] = dict(file_lines) + file_dict['methods'] = dict(file_methods) + file_dict['branches-total'] = file_branches_total + file_dict['branches-covered'] = file_branches_covered + coverage_data['summary']['lines-total'] += file_lines_total + coverage_data['summary']['lines-covered'] += file_lines_covered + coverage_data['summary']['branches-total'] += file_branches_total + coverage_data['summary']['branches-covered'] += file_branches_covered + + line_parts = line.split(':', 1) + input_type = line_parts[0] + + if input_type == 'SF': + # Get file name + file_name = line_parts[-1].strip() + relative_file_name = os.path.relpath(file_name, self.base_dir) + package = '.'.join(relative_file_name.split(os.path.sep)[0:-1]) + class_name = '.'.join(relative_file_name.split(os.path.sep)) + if package not in coverage_data['packages']: + coverage_data['packages'][package] = { + 'classes': {}, 'lines-total': 0, 'lines-covered': 0, + 'branches-total': 0, 'branches-covered': 0 + } + coverage_data['packages'][package]['classes'][ + relative_file_name] = { + 'name': class_name, 'lines': {}, 'lines-total': 0, + 'lines-covered': 0, 'branches-total': 0, + 'branches-covered': 0 + } + current_file = relative_file_name + file_lines_total = 0 + file_lines_covered = 0 + file_lines.clear() + file_methods.clear() + file_branches_total = 0 + file_branches_covered = 0 + elif input_type == 'DA': + # DA:2,0 + (line_number, line_hits) = line_parts[-1].strip().split(',') + line_number = int(line_number) + if line_number not in file_lines: + file_lines[line_number] = { + 'branch': 'false', 'branches-total': 0, + 'branches-covered': 0 + } + file_lines[line_number]['hits'] = line_hits + # Increment lines total/covered for class and package + try: + if int(line_hits) > 0: + file_lines_covered += 1 + except Exception: + pass + file_lines_total += 1 + elif input_type == 'BRDA': + # BRDA:1,1,2,0 + (line_number, block_number, branch_number, branch_hits) = line_parts[-1].strip().split(',') + line_number = int(line_number) + if line_number not in file_lines: + file_lines[line_number] = { + 'branch': 'true', 'branches-total': 0, + 'branches-covered': 0, 'hits': 0 + } + file_lines[line_number]['branch'] = 'true' + file_lines[line_number]['branches-total'] += 1 + file_branches_total += 1 + if branch_hits != '-' and int(branch_hits) > 0: + file_lines[line_number]['branches-covered'] += 1 + file_branches_covered += 1 + elif input_type == 'BRF': + file_branches_total = int(line_parts[1]) + elif input_type == 'BRH': + file_branches_covered = int(line_parts[1]) + elif input_type == 'FN': + # FN:5,(anonymous_1) + function_line, function_name = line_parts[-1].strip().split(',') + file_methods[function_name] = [function_line, '0'] + elif input_type == 'FNDA': + # FNDA:0,(anonymous_1) + (function_hits, function_name) = line_parts[-1].strip().split(',') + if function_name not in file_methods: + file_methods[function_name] = ['0', '0'] + file_methods[function_name][-1] = function_hits + + # Exclude packages + excluded = [x for x in coverage_data['packages'] for e in self.excludes + if re.match(e, x)] + for package in excluded: + del coverage_data['packages'][package] + + # Compute line coverage rates + for package_data in list(coverage_data['packages'].values()): + package_data['line-rate'] = self._percent( + package_data['lines-total'], + package_data['lines-covered']) + package_data['branch-rate'] = self._percent( + package_data['branches-total'], + package_data['branches-covered']) + + return coverage_data + + def generate_cobertura_xml(self, coverage_data): + """ + Given parsed coverage data, return a String cobertura XML representation. + :param coverage_data: Nested dict representing coverage information. + :type coverage_data: dict + """ + + dom_impl = minidom.getDOMImplementation() + doctype = dom_impl.createDocumentType("coverage", None, + "http://cobertura.sourceforge.net/xml/coverage-04.dtd") + document = dom_impl.createDocument(None, "coverage", doctype) + root = document.documentElement + summary = coverage_data['summary'] + self._attrs(root, { + 'branch-rate': self._percent(summary['branches-total'], + summary['branches-covered']), + 'branches-covered': str(summary['branches-covered']), + 'branches-valid': str(summary['branches-total']), + 'complexity': '0', + 'line-rate': self._percent(summary['lines-total'], + summary['lines-covered']), + 'lines-covered': str(summary['lines-covered']), + 'lines-valid': str(summary['lines-total']), + 'timestamp': coverage_data['timestamp'], + 'version': '2.0.3' + }) + + sources = self._el(document, 'sources', {}) + source = self._el(document, 'source', {}) + source.appendChild(document.createTextNode(self.base_dir)) + sources.appendChild(source) + + root.appendChild(sources) + + packages_el = self._el(document, 'packages', {}) + + packages = coverage_data['packages'] + for package_name, package_data in list(packages.items()): + package_el = self._el(document, 'package', { + 'line-rate': package_data['line-rate'], + 'branch-rate': package_data['branch-rate'], + 'name': package_name, + 'complexity': '0', + }) + classes_el = self._el(document, 'classes', {}) + for class_name, class_data in list(package_data['classes'].items()): + class_el = self._el(document, 'class', { + 'branch-rate': self._percent(class_data['branches-total'], + class_data['branches-covered']), + 'complexity': '0', + 'filename': class_name, + 'line-rate': self._percent(class_data['lines-total'], + class_data['lines-covered']), + 'name': class_data['name'] + }) + + # Process methods + methods_el = self._el(document, 'methods', {}) + for method_name, (line, hits) in list(class_data['methods'].items()): + method_el = self._el(document, 'method', { + 'name': self.format(method_name), + 'signature': '', + 'line-rate': '1.0' if int(hits) > 0 else '0.0', + 'branch-rate': '1.0' if int(hits) > 0 else '0.0', + }) + method_lines_el = self._el(document, 'lines', {}) + method_line_el = self._el(document, 'line', { + 'hits': hits, + 'number': line, + 'branch': 'false', + }) + method_lines_el.appendChild(method_line_el) + method_el.appendChild(method_lines_el) + methods_el.appendChild(method_el) + + # Process lines + lines_el = self._el(document, 'lines', {}) + lines = list(class_data['lines'].keys()) + lines.sort() + for line_number in lines: + line_el = self._el(document, 'line', { + 'branch': class_data['lines'][line_number]['branch'], + 'hits': str(class_data['lines'][line_number]['hits']), + 'number': str(line_number) + }) + if class_data['lines'][line_number]['branch'] == 'true': + total = int(class_data['lines'][line_number]['branches-total']) + covered = int(class_data['lines'][line_number]['branches-covered']) + percentage = int((covered * 100.0) / total) + line_el.setAttribute('condition-coverage', + '{0}% ({1}/{2})'.format( + percentage, covered, total)) + lines_el.appendChild(line_el) + + class_el.appendChild(methods_el) + class_el.appendChild(lines_el) + classes_el.appendChild(class_el) + package_el.appendChild(classes_el) + packages_el.appendChild(package_el) + root.appendChild(packages_el) + + return document.toprettyxml() + + def _el(self, document, name, attrs): + """ + Create an element within document with given name and attributes. + :param document: Document element + :type document: Document + :param name: Element name + :type name: string + :param attrs: Attributes for element + :type attrs: dict + """ + return self._attrs(document.createElement(name), attrs) + + def _attrs(self, element, attrs): + """ + Set attributes on given element. + :param element: DOM Element + :type element: Element + :param attrs: Attributes for element + :type attrs: dict + """ + for attr, val in list(attrs.items()): + element.setAttribute(attr, val) + return element + + def _percent(self, lines_total, lines_covered): + """ + Get the percentage of lines covered in the total, with formatting. + :param lines_total: Total number of lines in given module + :type lines_total: number + :param lines_covered: Number of lines covered by tests in module + :type lines_covered: number + """ + + if lines_total == 0: + return '0.0' + return str(float(float(lines_covered) / float(lines_total))) + + +def main(argv=None): + """ + Converts LCOV coverage data to Cobertura-compatible XML for reporting. + Usage: + lcov_cobertura.py lcov-file.dat + lcov_cobertura.py lcov-file.dat -b src/dir -e test.lib -o path/out.xml + By default, XML output will be written to ./coverage.xml + """ + if argv is None: + argv = sys.argv + parser = OptionParser() + parser.usage = ('lcov_cobertura.py lcov-file.dat [-b source/dir] ' + '[-e ] [-o output.xml] [-d]') + parser.description = 'Converts lcov output to cobertura-compatible XML' + parser.add_option('-b', '--base-dir', action='store', + help='Directory where source files are located', + dest='base_dir', default='.') + parser.add_option('-e', '--excludes', + help='Comma-separated list of regexes of packages to exclude', + action='append', dest='excludes', default=[]) + parser.add_option('-o', '--output', + help='Path to store cobertura xml file', + action='store', dest='output', default='coverage.xml') + parser.add_option('-d', '--demangle', + help='Demangle C++ function names using %s' % CPPFILT, + action='store_true', dest='demangle', default=False) + (options, args) = parser.parse_args(args=argv) + + if options.demangle and not HAVE_CPPFILT: + raise RuntimeError("C++ filter executable (%s) not found!" % CPPFILT) + + if len(args) != 2: + print(main.__doc__) + sys.exit(1) + + try: + with open(args[1], 'r') as lcov_file: + lcov_data = lcov_file.read() + lcov_cobertura = LcovCobertura(lcov_data, options.base_dir, options.excludes, options.demangle) + cobertura_xml = lcov_cobertura.convert() + with open(options.output, mode='wt') as output_file: + output_file.write(cobertura_xml) + except IOError: + sys.stderr.write("Unable to convert %s to Cobertura XML" % args[1]) + +if __name__ == '__main__': + main() diff --git a/gearsyncd/Makefile.am b/gearsyncd/Makefile.am index 34bffb763362..c9df85853a43 100644 --- a/gearsyncd/Makefile.am +++ b/gearsyncd/Makefile.am @@ -12,4 +12,8 @@ gearsyncd_SOURCES = $(top_srcdir)/lib/gearboxutils.cpp gearsyncd.cpp gearparserb gearsyncd_CPPFLAGS = $(DBGFLAGS) $(AM_CFLAGS) $(CFLAGS_COMMON) $(COV_CFLAGS) $(ASAN_CFLAGS) -gearsyncd_LDADD = -lnl-3 -lnl-route-3 -lswsscommon $(COV_LDFLAGS) $(ASAN_LDFLAGS) +gearsyncd_LDADD = -lnl-3 -lnl-route-3 -lswsscommon $(COV_LDFLAGS) $(ASAN_LDFLAGS) + +if GCOV_ENABLED +gearsyncd_LDADD += -lgcovpreload +endif diff --git a/mclagsyncd/Makefile.am b/mclagsyncd/Makefile.am index 4a72f8663862..e7bed8de7de8 100644 --- a/mclagsyncd/Makefile.am +++ b/mclagsyncd/Makefile.am @@ -12,4 +12,8 @@ mclagsyncd_SOURCES = mclagsyncd.cpp mclaglink.cpp mclagsyncd_CFLAGS = $(DBGFLAGS) $(AM_CFLAGS) $(CFLAGS_COMMON) mclagsyncd_CPPFLAGS = $(DBGFLAGS) $(AM_CFLAGS) $(CFLAGS_COMMON) -mclagsyncd_LDADD = -lnl-3 -lnl-route-3 -lswsscommon +mclagsyncd_LDADD = -lnl-3 -lnl-route-3 -lswsscommon + +if GCOV_ENABLED +mclagsyncd_LDADD += -lgcovpreload +endif diff --git a/natsyncd/Makefile.am b/natsyncd/Makefile.am index 1740d8b097e5..d8212ee4b4c0 100644 --- a/natsyncd/Makefile.am +++ b/natsyncd/Makefile.am @@ -14,3 +14,7 @@ natsyncd_CFLAGS = $(DBGFLAGS) $(AM_CFLAGS) $(CFLAGS_COMMON) natsyncd_CPPFLAGS = $(DBGFLAGS) $(AM_CFLAGS) $(CFLAGS_COMMON) natsyncd_LDADD = -lnl-3 -lnl-route-3 -lnl-nf-3 -lswsscommon +if GCOV_ENABLED +natsyncd_LDADD += -lgcovpreload +endif + diff --git a/neighsyncd/Makefile.am b/neighsyncd/Makefile.am index c89caab807f6..23e76b6cd203 100644 --- a/neighsyncd/Makefile.am +++ b/neighsyncd/Makefile.am @@ -14,3 +14,6 @@ neighsyncd_CFLAGS = $(DBGFLAGS) $(AM_CFLAGS) $(CFLAGS_COMMON) neighsyncd_CPPFLAGS = $(DBGFLAGS) $(AM_CFLAGS) $(CFLAGS_COMMON) neighsyncd_LDADD = -lnl-3 -lnl-route-3 -lswsscommon +if GCOV_ENABLED +neighsyncd_LDADD += -lgcovpreload +endif diff --git a/orchagent/Makefile.am b/orchagent/Makefile.am index df3c432aaf00..0636f1ca7292 100644 --- a/orchagent/Makefile.am +++ b/orchagent/Makefile.am @@ -98,3 +98,9 @@ routeresync_LDADD = -lswsscommon orchagent_restart_check_SOURCES = orchagent_restart_check.cpp orchagent_restart_check_CPPFLAGS = $(DBGFLAGS) $(AM_CPPFLAGS) $(CFLAGS_COMMON) orchagent_restart_check_LDADD = -lhiredis -lswsscommon -lpthread + +if GCOV_ENABLED +orchagent_LDADD += -lgcovpreload +routeresync_LDADD += -lgcovpreload +orchagent_restart_check_LDADD += -lgcovpreload +endif diff --git a/portsyncd/Makefile.am b/portsyncd/Makefile.am index 96865e2887cf..5bba269ab26a 100644 --- a/portsyncd/Makefile.am +++ b/portsyncd/Makefile.am @@ -14,3 +14,6 @@ portsyncd_CFLAGS = $(DBGFLAGS) $(AM_CFLAGS) $(CFLAGS_COMMON) portsyncd_CPPFLAGS = $(DBGFLAGS) $(AM_CFLAGS) $(CFLAGS_COMMON) portsyncd_LDADD = -lnl-3 -lnl-route-3 -lswsscommon +if GCOV_ENABLED +portsyncd_LDADD += -lgcovpreload +endif diff --git a/swssconfig/Makefile.am b/swssconfig/Makefile.am index 00c05f390dab..590e7d9f5628 100644 --- a/swssconfig/Makefile.am +++ b/swssconfig/Makefile.am @@ -19,3 +19,8 @@ swssplayer_SOURCES = swssplayer.cpp swssplayer_CFLAGS = $(DBGFLAGS) $(AM_CFLAGS) $(CFLAGS_COMMON) swssplayer_CPPFLAGS = $(DBGFLAGS) $(AM_CFLAGS) $(CFLAGS_COMMON) swssplayer_LDADD = -lswsscommon + +if GCOV_ENABLED +swssconfig_LDADD += -lgcovpreload +swssplayer_LDADD += -lgcovpreload +endif diff --git a/teamsyncd/Makefile.am b/teamsyncd/Makefile.am index bca200b6e03d..2939a52f2adb 100644 --- a/teamsyncd/Makefile.am +++ b/teamsyncd/Makefile.am @@ -13,3 +13,7 @@ teamsyncd_SOURCES = teamsyncd.cpp teamsync.cpp teamsyncd_CFLAGS = $(DBGFLAGS) $(AM_CFLAGS) $(CFLAGS_COMMON) teamsyncd_CPPFLAGS = $(DBGFLAGS) $(AM_CFLAGS) $(CFLAGS_COMMON) teamsyncd_LDADD = -lnl-3 -lnl-route-3 -lhiredis -lswsscommon -lteam + +if GCOV_ENABLED +teamsyncd_LDADD += -lgcovpreload +endif diff --git a/tests/gcov_support.sh b/tests/gcov_support.sh new file mode 100755 index 000000000000..4200e20813b8 --- /dev/null +++ b/tests/gcov_support.sh @@ -0,0 +1,413 @@ +#!/bin/bash +## This script is to enable the gcov support of the SONiC source codes +work_dir=$(pwd) +env_home=$HOME + +GCNO_LIST_FILE="gcno_file_list.txt" +GCDA_DIR_LIST="gcda_dir_list.txt" +TMP_GCDA_FILE_LIST="tmp_gcda_file_list.txt" +GCNO_ALL_TAR_GZ="gcno.tar.gz" + +INFO_DIR=info +HTML_DIR=html +ALLMERGE_DIR=AllMergeReport + +GCOV_OUTPUT=${work_dir}/gcov_output +GCOV_INFO_OUTPUT=${GCOV_OUTPUT}/${INFO_DIR} +GCOV_HTML_OUTPUT=${GCOV_OUTPUT}/${HTML_DIR} +GCOV_MERGE_REPORT_OUTPUT=${GCOV_OUTPUT}/${ALLMERGE_DIR} + +HTML_FILE_PREFIX="GCOVHTML_" +HTML_FILE_LIST=${GCOV_OUTPUT}/htmllist +INFO_FILE_PREFIX="GCOVINFO_" +INFO_FILE_LIST=${GCOV_OUTPUT}/infolist +INFO_ERR_LIST=${work_dir}/info_err_list +CONTAINER_LIST=${work_dir}/containerlist +ALL_INFO_FILES=${work_dir}/allinfofileslist +REPE_INFO_LIST=${work_dir}/tmpinfolist +SINGLE_INFO_LIST=${work_dir}/singleinfolist + +# reset compiling environment +gcov_support_clean() +{ + find /tmp/gcov -name $INFO_FILE_PREFIX* | xargs rm -rf + find /tmp/gcov -name $HTML_FILE_PREFIX* | xargs rm -rf + find /tmp/gcov -name *.gcno | xargs rm -rf + find /tmp/gcov -name *.gcda | xargs rm -rf + find /tmp/gcov -name $TMP_GCDA_FILE_LIST | xargs rm -rf + rm /tmp/gcov/info_err_list + rm /tmp/gcov/gcda_dir_list.txt +} + +# verify whether the info file generated is valid +verify_info_file() +{ + local file=$1 + local path=$2 + local FILE_OK=`grep "FN:" ${file} | wc -l` + if [ $FILE_OK -lt 1 ] ;then + echo ${path}/${file} >> /tmp/gcov/info_err_list + rm ${file} + fi +} + +# search and save the dir where the lcov should be implemented +list_lcov_path() +{ + local find_gcda_file + local find_gcno_file + local gcdastr=".gcda" + local gcda_dir=$1 + + echo ${gcda_dir} + + TMP_FILE=${gcda_dir}/tmpgcdalist + echo "Start searching .gcda files..." + exec 4>$TMP_FILE + find_gcda_file=`find ${gcda_dir} -name *.gcda` + echo "Start rm unused gcno files for speed up" + find_gcno_file=`find ${gcda_dir} -name *.gcno` + for line in ${find_gcno_file} + do + temp_gcda=${line/.gcno/$gcdastr} + if [ ! -f ${temp_gcda} ]; then + rm ${line} + fi + done + + echo ${find_gcda_file} + RESULT=${find_gcda_file} + echo "$RESULT" >&4 + exec 4>&- + + cat ${TMP_FILE} | xargs dirname | uniq > ${gcda_dir}/gcda_dir_list.txt +} + +# generate gcov base info and html report for specified range files +lcov_genhtml_report() +{ + local gcda_file_range=$1 + list_lcov_path ${gcda_file_range} + + while read line + do + local fullpath=$line + local infoname=${INFO_FILE_PREFIX}${fullpath##*/}.info + htmldirname=${HTML_FILE_PREFIX}${fullpath##*/} + + echo ${fullpath} + + pushd ${fullpath} + GCDA_COUNT=`find -name "*.gcda" | wc -l` + echo "gcda count: $GCDA_COUNT" + if [ $GCDA_COUNT -ge 1 ]; then + echo "Executing lcov -c -d . -o ${infoname}" + lcov -c -d . -o ${infoname} + if [ "$?" != "0" ]; then + echo "lcov fail!" + rm ${infoname} + fi + fi + popd + done < ${gcda_file_range}/gcda_dir_list.txt +} + +rm_unused_gcno() +{ + cur_dir = $1/ + +} + +# generate html reports for all eligible submodules +lcov_genhtml_all() +{ + local container_id + + container_id=$1 + + echo " === Start generating all gcov reports === " + lcov_genhtml_report ${container_id}/gcov +} + +lcov_merge_all() +{ + cp -rf common_work $1/ + find . -name *.info > infolist + while read line + do + if [ ! -f "total.info" ]; then + lcov -o total.info -a ${line} + else + lcov -o total.info -a total.info -a ${line} + fi + done < infolist + + lcov --extract total.info '*sonic-gcov/*' -o total.info + cp $1/lcov_cobertura.py $1/common_work/gcov/ + python $1/common_work/gcov/lcov_cobertura.py total.info -o coverage.xml + + sed -i "s#common_work/#$1/common_work/#" coverage.xml + + cd gcov_output/ + if [ ! -d ${ALLMERGE_DIR} ]; then + mkdir -p ${ALLMERGE_DIR} + fi + + cp ../coverage.xml ${ALLMERGE_DIR} + + cd ../ +} + +gcov_set_environment() +{ + local build_dir + + build_dir=$1 + mkdir -p ${build_dir}/gcov_tmp + mkdir -p ${build_dir}/gcov_tmp/sonic-gcov + + docker ps -q > ${CONTAINER_LIST} + + echo "### Start collecting info files from existed containers" + + for line in $(cat ${CONTAINER_LIST}) + do + local container_id=${line} + echo ${container_id} + echo "script_count" + script_count=`docker exec -i ${container_id} find / -name gcov_support.sh | wc -l` + echo ${script_count} + if [ ${script_count} -gt 0 ]; then + docker exec -i ${container_id} killall5 -15 + docker exec -i ${container_id} /tmp/gcov/gcov_support.sh collect_gcda + fi + gcda_count=`docker exec -i ${container_id} find / -name *.gcda | wc -l` + if [ ${gcda_count} -gt 0 ]; then + echo "find gcda in " + echo ${container_id} + mkdir -p ${build_dir}/gcov_tmp/sonic-gcov/${container_id} + pushd ${build_dir}/gcov_tmp/sonic-gcov/${container_id} + docker cp ${container_id}:/tmp/gcov/ . + cp gcov/gcov_support.sh ${build_dir}/gcov_tmp/sonic-gcov + cp gcov/lcov_cobertura.py ${build_dir}/gcov_tmp/sonic-gcov + popd + fi + done + + echo "cat list" + cat ${CONTAINER_LIST} + + cd ${build_dir}/gcov_tmp/ + tar -zcvf sonic-gcov.tar.gz sonic-gcov/ + rm -rf sonic-gcov + cd ../../ + rm ${CONTAINER_LIST} +} + +gcov_merge_info() +{ + lcov_merge_all $1 +} + +gcov_support_generate_report() +{ + ls -F | grep "/$" > container_dir_list + sed -i '/gcov_output/d' container_dir_list + sed -i "s#\/##g" container_dir_list + + mkdir -p gcov_output + mkdir -p gcov_output/info + + #for same code path + mkdir -p common_work + + cat container_dir_list + while read line + do + local container_id=${line} + echo ${container_id} + + cp -rf ${container_id}/* common_work + tar -zxvf swss.tar.gz -C common_work/gcov + cd common_work/gcov/ + find -name gcda*.tar.gz > tmp_gcda.txt + while read LINE ; do + echo ${LINE} + echo ${LINE#*.} + tar -zxvf ${LINE} + done < tmp_gcda.txt + rm tmp_gcda.txt + + find -name gcno*.tar.gz > tmp_gcno.txt + while read LINE ; do + echo ${LINE} + echo ${LINE%%.*} + tar -zxvf ${LINE} + done < tmp_gcno.txt + rm tmp_gcno.txt + cd - + + ls -lh common_work/* + lcov_genhtml_all common_work + if [ "$?" != "0" ]; then + echo "###lcov operation fail.." + return 0 + fi + cd common_work + find . -name "*.gcda" -o -name "*.gcno" -o -name "*.gz" -o -name "*.cpp" -o -name "*.h"| xargs rm -rf + cd ../ + cp -rf common_work/* ${container_id}/* + cd ${container_id} + find . -name "*.gcda" -o -name "*.gcno" -o -name "*.gz" -o -name "*.cpp" -o -name "*.h"| xargs rm -rf + cd ../ + + rm -rf common_work/* + + cp -rf ${container_id} gcov_output/ + done < container_dir_list + + # generate report with code + mkdir -p common_work/gcov + tar -zxvf swss.tar.gz -C common_work/gcov + + echo "### Make info generating completed !!" +} + +# list and save the generated .gcda files +gcov_support_collect_gcda() +{ + echo "gcov_support_collect_gcda begin" + local gcda_files_count + local gcda_count + + pushd / + # check whether .gcda files exist + gcda_files_count=`find \. -name "*\.gcda" 2>/dev/null | wc -l` + if [ ${gcda_files_count} -lt 1 ]; then + echo "### no gcda files found!" + return 0 + fi + + CODE_PREFFIX=/__w/1/s/ + + pushd ${CODE_PREFFIX} + tar -zcvf /tmp/gcov/gcda.tar.gz * + popd + + popd + echo "### collect gcda done!" + + gcov_support_clean + + pushd /tmp/gcov + gcno_count=`find -name gcno*.tar.gz 2>/dev/null | wc -l` + if [ ${gcno_count} -lt 1 ]; then + echo "### Fail! Cannot find any gcno files, please check." + return -1 + fi + + gcda_count=`find -name gcda*.tar.gz 2>/dev/null | wc -l` + if [ ${gcda_count} -lt 1 ]; then + echo "### Cannot find any gcda files, please check." + return 0 + fi + + rm -rf /tmp/gcov/gcov_output + mkdir -p /tmp/gcov/gcov_output + + echo "### Make /tmp/gcov dir completed !!" + popd + +} + +# list and save the generated .gcno files +gcov_support_collect_gcno() +{ + local find_command + local tar_command + local submodule_name + + find gcno*.tar.gz > tmp_gcno.txt + while read LINE ; do + rm -f ${LINE} + done < tmp_gcno.txt + rm tmp_gcno.txt + + # rename .tmp*_gcno files generated + for tmp_gcno in `find -name .tmp_*.gcno` + do + new_gcno=`echo ${tmp_gcno} | sed 's/.tmp_//g'` + echo ${new_gcno} + mv ${tmp_gcno} ${new_gcno} + done + + echo " === Start collecting .gcno files... === " + submodule_name=$1 + exec 3>$GCNO_LIST_FILE + find_command=`find -name *.gcno` + echo "${find_command}" + if [ -z "${find_command}" ]; then + echo "### Error! no gcno files found!" + return -1 + fi + RESULT=${find_command} + echo "$RESULT" >&3 + exec 3>&- + + local filesize=`ls -l $GCNO_LIST_FILE | awk '{print $5}'` + # Empty gcno_file_list indicates the non-gcov compling mode + if [ ${filesize} -le 1 ]; then + echo "empty gcno_file_list.txt" + rm $GCNO_LIST_FILE + else + echo " === Output archive file... === " + tar_command="tar -T $GCNO_LIST_FILE -zcvf gcno_$submodule_name.tar.gz" + echo "${tar_command}" + ${tar_command} + # temporarily using fixed dir + cp gcno_$submodule_name.tar.gz ${work_dir}/debian/$submodule_name/tmp/gcov + cp ./tests/gcov_support.sh ${work_dir}/debian/$submodule_name/tmp/gcov + cp ./tests/gcov_support.sh ${work_dir}/tests + cp ./gcovpreload/lcov_cobertura.py ${work_dir}/debian/$submodule_name/tmp/gcov + mkdir -p ${work_dir}/debian/$submodule_name/usr + mkdir -p ${work_dir}/debian/$submodule_name/usr/lib + cp ./gcovpreload/libgcovpreload.so ${work_dir}/debian/$submodule_name/usr/lib + sudo chmod 777 -R /${work_dir}/debian/$submodule_name/usr/lib/libgcovpreload.so + rm $GCNO_LIST_FILE + echo " === Collect finished... === " + fi +} + +main() +{ + case $1 in + collect) + gcov_support_collect_gcno $2 + ;; + collect_gcda) + gcov_support_collect_gcda + ;; + generate) + gcov_support_generate_report + ;; + merge_container_info) + gcov_merge_info $2 + ;; + set_environment) + gcov_set_environment $2 + ;; + *) + echo "Usage:" + echo " collect collect .gcno files based on module" + echo " collect_gcda collect .gcda files" + echo " collect_gcda_files collect .gcda files in a docker" + echo " generate generate gcov report in html form (all or submodule_name)" + echo " tar_output tar gcov_output forder" + echo " merge_container_info merge homonymic info files from different container" + echo " set_environment set environment ready for report generating in containers" + esac +} + +main $1 $2 +exit + diff --git a/tests/test_sflow.py b/tests/test_sflow.py index 50d68979fdef..e3c95a694657 100644 --- a/tests/test_sflow.py +++ b/tests/test_sflow.py @@ -35,7 +35,7 @@ def test_defaultGlobal(self, dvs, testlog): expected_fields = {"SAI_SAMPLEPACKET_ATTR_SAMPLE_RATE": rate} self.adb.wait_for_field_match("ASIC_STATE:SAI_OBJECT_TYPE_SAMPLEPACKET", sample_session, expected_fields) - + self.cdb.update_entry("SFLOW", "global", {"admin_state": "down"}) expected_fields = {"SAI_PORT_ATTR_INGRESS_SAMPLEPACKET_ENABLE": "oid:0x0"} self.adb.wait_for_field_match("ASIC_STATE:SAI_OBJECT_TYPE_PORT", port_oid, expected_fields) @@ -134,7 +134,7 @@ def test_ConfigDel(self, dvs, testlog): expected_fields = {"SAI_SAMPLEPACKET_ATTR_SAMPLE_RATE": rate} self.adb.wait_for_field_match("ASIC_STATE:SAI_OBJECT_TYPE_SAMPLEPACKET", sample_session, expected_fields) - + def test_SamplingRatePortCfgUpdate(self, dvs, testlog): ''' This test checks if the SflowMgr updates the sampling rate @@ -169,7 +169,7 @@ def test_SamplingRateManualUpdate(self, dvs, testlog): self.cdb.create_entry("SFLOW_SESSION", "Ethernet4", session_params) self.cdb.wait_for_field_match("SFLOW_SESSION", "Ethernet4", session_params) appldb.wait_for_field_match("SFLOW_SESSION_TABLE", "Ethernet4", {"sample_rate": "256"}) - + self.cdb.update_entry("PORT", "Ethernet4", {'speed' : "25000"}) # The Check here is about the original value not getting changed. # If some bug was to appear, let's give it some time to get noticed diff --git a/tlm_teamd/Makefile.am b/tlm_teamd/Makefile.am index 32855b32b6e8..6bf7574a8f66 100644 --- a/tlm_teamd/Makefile.am +++ b/tlm_teamd/Makefile.am @@ -13,3 +13,7 @@ tlm_teamd_SOURCES = main.cpp teamdctl_mgr.cpp values_store.cpp tlm_teamd_CFLAGS = $(DBGFLAGS) $(AM_CFLAGS) $(CFLAGS_COMMON) tlm_teamd_CPPFLAGS = $(DBGFLAGS) $(AM_CFLAGS) $(CFLAGS_COMMON) $(JANSSON_CFLAGS) tlm_teamd_LDADD = -lhiredis -lswsscommon -lteamdctl $(JANSSON_LIBS) + +if GCOV_ENABLED +tlm_teamd_LDADD += -lgcovpreload +endif