Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Added ability to strip source file paths prior to opening them. #99

Merged
merged 3 commits into from
Jan 30, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
60 changes: 50 additions & 10 deletions fastcov.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@
import threading
import subprocess
import multiprocessing
from pathlib import Path
RPGillespie6 marked this conversation as resolved.
Show resolved Hide resolved

FASTCOV_VERSION = (1,15)
MINIMUM_PYTHON = (3,5)
Expand Down Expand Up @@ -249,6 +250,33 @@ def removeFiles(files):
for file in files:
os.remove(file)

def processPrefix(path, prefix, prefix_strip):
RPGillespie6 marked this conversation as resolved.
Show resolved Hide resolved
p = Path(path)
if p.exists() or not p.is_absolute():
return path

if prefix_strip > 0:
segments = p.parts

if len(segments) < prefix_strip + 1:
logging.warning("Couldn't strip %i path levels from %s.", prefix_strip, path)
return path

segments = segments[prefix_strip+1:]
p = Path(segments[0])
segments = segments[1:]
for s in segments:
p = p.joinpath(s)
lapointejp marked this conversation as resolved.
Show resolved Hide resolved

if len(prefix) > 0:
if p.is_absolute():
p = Path(prefix).joinpath(p.relative_to('/'))
else:
p = Path(prefix).joinpath(p)

return str(p)


def getFilteredCoverageFiles(coverage_files, exclude):
def excludeGcda(gcda):
for ex in exclude:
Expand Down Expand Up @@ -477,11 +505,13 @@ def containsMarker(markers, strBody):
return False

# Returns whether source coverage changed or not
def exclProcessSource(fastcov_sources, source, exclude_branches_sw, include_branches_sw, exclude_line_marker, fallback_encodings):
def exclProcessSource(fastcov_sources, source, exclude_branches_sw, include_branches_sw, exclude_line_marker, fallback_encodings, gcov_prefix, gcov_prefix_strip):
source_to_open = processPrefix(source, gcov_prefix, gcov_prefix_strip)

# Before doing any work, check if this file even needs to be processed
if not exclude_branches_sw and not include_branches_sw:
# Ignore unencodable characters
with open(source, errors="ignore") as f:
with open(source_to_open, errors="ignore") as f:
if not containsMarker(exclude_line_marker + ["LCOV_EXCL"], f.read()):
return False

Expand All @@ -490,7 +520,7 @@ def exclProcessSource(fastcov_sources, source, exclude_branches_sw, include_bran
start_line = 0
end_line = 0
# Start enumeration at line 1 because the first line of the file is line 1 not 0
for i, line in enumerate(getSourceLines(source, fallback_encodings), 1):
for i, line in enumerate(getSourceLines(source_to_open, fallback_encodings), 1):
# Cycle through test names (likely only 1)
for test_name in fastcov_sources[source]:
fastcov_data = fastcov_sources[source][test_name]
Expand Down Expand Up @@ -550,12 +580,12 @@ def exclProcessSource(fastcov_sources, source, exclude_branches_sw, include_bran
# Source coverage changed
return True

def exclMarkerWorker(data_q, fastcov_sources, chunk, exclude_branches_sw, include_branches_sw, exclude_line_marker, fallback_encodings):
def exclMarkerWorker(data_q, fastcov_sources, chunk, exclude_branches_sw, include_branches_sw, exclude_line_marker, fallback_encodings, gcov_prefix, gcov_prefix_strip):
changed_sources = []

for source in chunk:
try:
if exclProcessSource(fastcov_sources, source, exclude_branches_sw, include_branches_sw, exclude_line_marker, fallback_encodings):
if exclProcessSource(fastcov_sources, source, exclude_branches_sw, include_branches_sw, exclude_line_marker, fallback_encodings, gcov_prefix, gcov_prefix_strip):
changed_sources.append((source, fastcov_sources[source]))
except FileNotFoundError:
logging.error("Could not find '%s' to scan for exclusion markers...", source)
Expand All @@ -567,13 +597,13 @@ def exclMarkerWorker(data_q, fastcov_sources, chunk, exclude_branches_sw, includ
# Exit current process with appropriate code
sys.exit(EXIT_CODE)

def processExclusionMarkers(fastcov_json, jobs, exclude_branches_sw, include_branches_sw, exclude_line_marker, min_chunk_size, fallback_encodings):
def processExclusionMarkers(fastcov_json, jobs, exclude_branches_sw, include_branches_sw, exclude_line_marker, min_chunk_size, fallback_encodings, gcov_prefix, gcov_prefix_strip):
chunk_size = max(min_chunk_size, int(len(fastcov_json["sources"]) / jobs) + 1)

processes = []
data_q = multiprocessing.Queue()
for chunk in chunks(list(fastcov_json["sources"].keys()), chunk_size):
p = multiprocessing.Process(target=exclMarkerWorker, args=(data_q, fastcov_json["sources"], chunk, exclude_branches_sw, include_branches_sw, exclude_line_marker, fallback_encodings))
p = multiprocessing.Process(target=exclMarkerWorker, args=(data_q, fastcov_json["sources"], chunk, exclude_branches_sw, include_branches_sw, exclude_line_marker, fallback_encodings, gcov_prefix, gcov_prefix_strip))
processes.append(p)
p.start()

Expand All @@ -591,9 +621,10 @@ def processExclusionMarkers(fastcov_json, jobs, exclude_branches_sw, include_bra
for changed_source in changed_sources:
fastcov_json["sources"][changed_source[0]] = changed_source[1]

def validateSources(fastcov_json):
def validateSources(fastcov_json, gcov_prefix, gcov_prefix_strip):
logging.info("Checking if all sources exist")
for source in fastcov_json["sources"].keys():
source = processPrefix(source, gcov_prefix, gcov_prefix_strip)
if not os.path.exists(source):
logging.error("Cannot find '{}'".format(source))

Expand Down Expand Up @@ -955,6 +986,9 @@ def parseArgs():
parser.add_argument('-p', '--dump-statistic', dest="dump_statistic", action="store_true", help="Dump total statistic at the end")
parser.add_argument('-v', '--version', action="version", version='%(prog)s {version}'.format(version=__version__), help="Show program's version number and exit")

parser.add_argument('-gps', '--gcov_prefix_strip', dest="gcov_prefix_strip", action="store", default=0, type=int, help="The number of initial directory names to strip off the absolute paths in the object file.")
RPGillespie6 marked this conversation as resolved.
Show resolved Hide resolved
parser.add_argument('-gp', '--gcov_prefix', dest="gcov_prefix", action="store", default="", help="The prefix to add to the paths in the object file.")
RPGillespie6 marked this conversation as resolved.
Show resolved Hide resolved

args = parser.parse_args()
if not args.output:
args.output = 'coverage.info' if args.lcov else 'coverage.json'
Expand Down Expand Up @@ -992,6 +1026,12 @@ def main():
# Setup logging
setupLogging(args.quiet, args.verbose)

if args.gcov_prefix_strip > 0:
os.environ["GCOV_PREFIX_STRIP"] = str(args.gcov_prefix_strip)
RPGillespie6 marked this conversation as resolved.
Show resolved Hide resolved

if len(args.gcov_prefix) > 0:
os.environ["GCOV_PREFIX"] = args.gcov_prefix
RPGillespie6 marked this conversation as resolved.
Show resolved Hide resolved

# Get report from appropriate source
if args.combine:
fastcov_json = getCombineCoverage(args)
Expand All @@ -1002,15 +1042,15 @@ def main():

# Scan for exclusion markers
if not skip_exclusion_markers:
processExclusionMarkers(fastcov_json, args.jobs, args.exclude_branches_sw, args.include_branches_sw, args.exclude_line_marker, args.minimum_chunk, args.fallback_encodings)
processExclusionMarkers(fastcov_json, args.jobs, args.exclude_branches_sw, args.include_branches_sw, args.exclude_line_marker, args.minimum_chunk, args.fallback_encodings, args.gcov_prefix, args.gcov_prefix_strip)
logging.info("Scanned {} source files for exclusion markers".format(len(fastcov_json["sources"])))

if args.diff_file:
logging.info("Filtering according to {} file".format(args.diff_file))
DiffParser().filterByDiff(args.diff_file, args.diff_base_dir, fastcov_json, args.fallback_encodings)

if args.validate_sources:
validateSources(fastcov_json)
validateSources(fastcov_json, args.gcov_prefix, args.gcov_prefix_strip)

# Dump to desired file format
dumpFile(fastcov_json, args)
Expand Down
38 changes: 38 additions & 0 deletions test/unit/test_gcov_prefix_options.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,38 @@
#!/usr/bin/env python3
"""
Author: Jean-Philippe Lapointe
Make sure fastcov's gcov prefix options are processing the file paths as expected
"""

import pytest
import fastcov

from dataclasses import dataclass
import os

@dataclass
class TestSet:
__test__ = False
path: str
prefix: str
prefix_strip: int
expected_result: str


def test_simpleStripping():
testPatterns = [
# Making the path relative to the root of the git repo
TestSet('/home/user1/work/git/repo/subdir/to/some/file.cpp', '', 5, 'subdir/to/some/file.cpp'),
# Essentially changing user directory from user1 to user2
TestSet('/home/user1/work/git/repo/subdir/to/some/file.cpp', '/home/user2', 2, '/home/user2/work/git/repo/subdir/to/some/file.cpp'),
# Relative path, shouldn't get modified.
TestSet('subdir/to/some/file.cpp', '/home/user2', 2, 'subdir/to/some/file.cpp'),
# Current file should exist, it won't get messed with
TestSet(os.path.abspath(__file__), '/home/user2', 1, os.path.abspath(__file__)),
# Just prefixing an already absolute path
TestSet('/usr/include/someUnknownHeaderFile.h', '/home/user2/work/git/repo', 0, '/home/user2/work/git/repo/usr/include/someUnknownHeaderFile.h')
]

for elem in testPatterns:
assert(fastcov.processPrefix(elem.path, elem.prefix, elem.prefix_strip) == elem.expected_result)
Loading