2011-10-07 07:26:27 +08:00
|
|
|
#!/usr/bin/env python
|
|
|
|
|
|
|
|
"""
|
|
|
|
Static Analyzer qualification infrastructure.
|
|
|
|
|
2017-10-03 01:59:12 +08:00
|
|
|
The goal is to test the analyzer against different projects,
|
|
|
|
check for failures, compare results, and measure performance.
|
2011-10-07 07:26:27 +08:00
|
|
|
|
2015-09-08 11:50:52 +08:00
|
|
|
Repository Directory will contain sources of the projects as well as the
|
|
|
|
information on how to build them and the expected output.
|
2011-10-07 07:26:27 +08:00
|
|
|
Repository Directory structure:
|
|
|
|
- ProjectMap file
|
|
|
|
- Historical Performance Data
|
|
|
|
- Project Dir1
|
|
|
|
- ReferenceOutput
|
|
|
|
- Project Dir2
|
|
|
|
- ReferenceOutput
|
|
|
|
..
|
2015-07-09 02:39:31 +08:00
|
|
|
Note that the build tree must be inside the project dir.
|
2011-10-07 07:26:27 +08:00
|
|
|
|
|
|
|
To test the build of the analyzer one would:
|
2015-09-08 11:50:52 +08:00
|
|
|
- Copy over a copy of the Repository Directory. (TODO: Prefer to ensure that
|
2017-10-03 01:59:12 +08:00
|
|
|
the build directory does not pollute the repository to min network
|
|
|
|
traffic).
|
2011-10-07 07:26:27 +08:00
|
|
|
- Build all projects, until error. Produce logs to report errors.
|
2015-09-08 11:50:52 +08:00
|
|
|
- Compare results.
|
2011-10-07 07:26:27 +08:00
|
|
|
|
2015-09-08 11:50:52 +08:00
|
|
|
The files which should be kept around for failure investigations:
|
2011-10-07 07:26:27 +08:00
|
|
|
RepositoryCopy/Project DirI/ScanBuildResults
|
2015-09-08 11:50:52 +08:00
|
|
|
RepositoryCopy/Project DirI/run_static_analyzer.log
|
|
|
|
|
|
|
|
Assumptions (TODO: shouldn't need to assume these.):
|
2011-10-07 07:26:27 +08:00
|
|
|
The script is being run from the Repository Directory.
|
2011-11-03 04:46:50 +08:00
|
|
|
The compiler for scan-build and scan-build are in the PATH.
|
2011-10-07 07:26:27 +08:00
|
|
|
export PATH=/Users/zaks/workspace/c2llvm/build/Release+Asserts/bin:$PATH
|
|
|
|
|
|
|
|
For more logging, set the env variables:
|
|
|
|
zaks:TI zaks$ export CCC_ANALYZER_LOG=1
|
|
|
|
zaks:TI zaks$ export CCC_ANALYZER_VERBOSE=1
|
2015-09-08 11:50:52 +08:00
|
|
|
|
2015-08-21 06:59:49 +08:00
|
|
|
The list of checkers tested are hardcoded in the Checkers variable.
|
|
|
|
For testing additional checkers, use the SA_ADDITIONAL_CHECKERS environment
|
|
|
|
variable. It should contain a comma separated list.
|
2011-10-07 07:26:27 +08:00
|
|
|
"""
|
|
|
|
import CmpRuns
|
2017-10-25 07:52:48 +08:00
|
|
|
import SATestUtils
|
2011-10-07 07:26:27 +08:00
|
|
|
|
2018-02-09 05:22:42 +08:00
|
|
|
from subprocess import CalledProcessError, check_call
|
|
|
|
import argparse
|
2011-10-07 07:26:27 +08:00
|
|
|
import csv
|
|
|
|
import glob
|
2018-02-09 05:22:42 +08:00
|
|
|
import logging
|
2012-08-29 04:40:04 +08:00
|
|
|
import math
|
2018-02-09 05:22:42 +08:00
|
|
|
import multiprocessing
|
|
|
|
import os
|
|
|
|
import plistlib
|
2011-10-07 07:26:27 +08:00
|
|
|
import shutil
|
2018-02-09 05:22:42 +08:00
|
|
|
import sys
|
|
|
|
import threading
|
2011-10-07 07:26:27 +08:00
|
|
|
import time
|
2018-02-09 05:22:42 +08:00
|
|
|
import Queue
|
2011-10-07 07:26:27 +08:00
|
|
|
|
2012-08-29 04:40:02 +08:00
|
|
|
#------------------------------------------------------------------------------
|
|
|
|
# Helper functions.
|
|
|
|
#------------------------------------------------------------------------------
|
2011-10-07 07:26:27 +08:00
|
|
|
|
2018-02-09 05:22:42 +08:00
|
|
|
Local = threading.local()
|
|
|
|
Local.stdout = sys.stdout
|
|
|
|
Local.stderr = sys.stderr
|
|
|
|
logging.basicConfig(
|
|
|
|
level=logging.DEBUG,
|
|
|
|
format='%(asctime)s:%(levelname)s:%(name)s: %(message)s')
|
|
|
|
|
|
|
|
class StreamToLogger(object):
|
|
|
|
def __init__(self, logger, log_level=logging.INFO):
|
|
|
|
self.logger = logger
|
|
|
|
self.log_level = log_level
|
|
|
|
|
|
|
|
def write(self, buf):
|
|
|
|
# Rstrip in order not to write an extra newline.
|
|
|
|
self.logger.log(self.log_level, buf.rstrip())
|
|
|
|
|
|
|
|
def flush(self):
|
|
|
|
pass
|
2012-08-29 04:40:04 +08:00
|
|
|
|
2018-02-09 05:22:42 +08:00
|
|
|
def fileno(self):
|
|
|
|
return 0
|
2012-01-11 02:10:25 +08:00
|
|
|
|
2017-10-03 01:59:12 +08:00
|
|
|
|
2011-10-07 07:26:27 +08:00
|
|
|
def getProjectMapPath():
|
2015-09-08 11:50:52 +08:00
|
|
|
ProjectMapPath = os.path.join(os.path.abspath(os.curdir),
|
2011-10-07 07:26:27 +08:00
|
|
|
ProjectMapFile)
|
|
|
|
if not os.path.exists(ProjectMapPath):
|
2018-02-09 05:22:42 +08:00
|
|
|
Local.stdout.write("Error: Cannot find the Project Map file "
|
|
|
|
+ ProjectMapPath
|
|
|
|
+ "\nRunning script for the wrong directory?\n")
|
2017-10-26 09:13:22 +08:00
|
|
|
sys.exit(1)
|
2015-09-08 11:50:52 +08:00
|
|
|
return ProjectMapPath
|
2011-10-07 07:26:27 +08:00
|
|
|
|
2017-10-03 01:59:12 +08:00
|
|
|
|
2011-10-07 07:26:27 +08:00
|
|
|
def getProjectDir(ID):
|
2015-09-08 11:50:52 +08:00
|
|
|
return os.path.join(os.path.abspath(os.curdir), ID)
|
2011-10-07 07:26:27 +08:00
|
|
|
|
2017-10-03 01:59:12 +08:00
|
|
|
|
|
|
|
def getSBOutputDirName(IsReferenceBuild):
|
|
|
|
if IsReferenceBuild:
|
2011-11-05 13:20:48 +08:00
|
|
|
return SBOutputDirReferencePrefix + SBOutputDirName
|
2017-10-03 01:59:12 +08:00
|
|
|
else:
|
2011-11-05 13:20:48 +08:00
|
|
|
return SBOutputDirName
|
|
|
|
|
2012-08-29 04:40:02 +08:00
|
|
|
#------------------------------------------------------------------------------
|
|
|
|
# Configuration setup.
|
|
|
|
#------------------------------------------------------------------------------
|
|
|
|
|
2017-10-03 01:59:12 +08:00
|
|
|
|
2012-08-29 04:40:02 +08:00
|
|
|
# Find Clang for static analysis.
|
2017-09-22 06:12:49 +08:00
|
|
|
if 'CC' in os.environ:
|
|
|
|
Clang = os.environ['CC']
|
|
|
|
else:
|
2017-10-25 07:52:48 +08:00
|
|
|
Clang = SATestUtils.which("clang", os.environ['PATH'])
|
2012-08-29 04:40:02 +08:00
|
|
|
if not Clang:
|
|
|
|
print "Error: cannot find 'clang' in PATH"
|
2017-10-26 09:13:22 +08:00
|
|
|
sys.exit(1)
|
2012-08-29 04:40:02 +08:00
|
|
|
|
2012-08-29 04:40:04 +08:00
|
|
|
# Number of jobs.
|
2018-02-09 05:22:42 +08:00
|
|
|
MaxJobs = int(math.ceil(multiprocessing.cpu_count() * 0.75))
|
2012-08-29 04:40:04 +08:00
|
|
|
|
2012-08-29 04:40:02 +08:00
|
|
|
# Project map stores info about all the "registered" projects.
|
|
|
|
ProjectMapFile = "projectMap.csv"
|
|
|
|
|
|
|
|
# Names of the project specific scripts.
|
2015-11-08 02:27:35 +08:00
|
|
|
# The script that downloads the project.
|
|
|
|
DownloadScript = "download_project.sh"
|
2012-08-29 04:40:02 +08:00
|
|
|
# The script that needs to be executed before the build can start.
|
|
|
|
CleanupScript = "cleanup_run_static_analyzer.sh"
|
2015-09-08 11:50:52 +08:00
|
|
|
# This is a file containing commands for scan-build.
|
2012-08-29 04:40:02 +08:00
|
|
|
BuildScript = "run_static_analyzer.cmd"
|
|
|
|
|
2018-06-30 06:05:32 +08:00
|
|
|
# A comment in a build script which disables wrapping.
|
|
|
|
NoPrefixCmd = "#NOPREFIX"
|
|
|
|
|
2012-08-29 04:40:02 +08:00
|
|
|
# The log file name.
|
|
|
|
LogFolderName = "Logs"
|
|
|
|
BuildLogName = "run_static_analyzer.log"
|
2015-09-08 11:50:52 +08:00
|
|
|
# Summary file - contains the summary of the failures. Ex: This info can be be
|
2012-08-29 04:40:02 +08:00
|
|
|
# displayed when buildbot detects a build failure.
|
|
|
|
NumOfFailuresInSummary = 10
|
|
|
|
FailuresSummaryFileName = "failures.txt"
|
|
|
|
|
|
|
|
# The scan-build result directory.
|
|
|
|
SBOutputDirName = "ScanBuildResults"
|
|
|
|
SBOutputDirReferencePrefix = "Ref"
|
|
|
|
|
2017-10-03 01:59:12 +08:00
|
|
|
# The name of the directory storing the cached project source. If this
|
|
|
|
# directory does not exist, the download script will be executed.
|
|
|
|
# That script should create the "CachedSource" directory and download the
|
|
|
|
# project source into it.
|
2015-11-08 02:27:35 +08:00
|
|
|
CachedSourceDirName = "CachedSource"
|
|
|
|
|
|
|
|
# The name of the directory containing the source code that will be analyzed.
|
|
|
|
# Each time a project is analyzed, a fresh copy of its CachedSource directory
|
|
|
|
# will be copied to the PatchedSource directory and then the local patches
|
|
|
|
# in PatchfileName will be applied (if PatchfileName exists).
|
|
|
|
PatchedSourceDirName = "PatchedSource"
|
|
|
|
|
|
|
|
# The name of the patchfile specifying any changes that should be applied
|
|
|
|
# to the CachedSource before analyzing.
|
|
|
|
PatchfileName = "changes_for_analyzer.patch"
|
|
|
|
|
2012-08-29 04:40:02 +08:00
|
|
|
# The list of checkers used during analyzes.
|
2013-12-05 12:47:09 +08:00
|
|
|
# Currently, consists of all the non-experimental checkers, plus a few alpha
|
2013-04-06 01:55:07 +08:00
|
|
|
# checkers we don't want to regress on.
|
2017-10-03 01:59:12 +08:00
|
|
|
Checkers = ",".join([
|
2017-09-30 08:05:24 +08:00
|
|
|
"alpha.unix.SimpleStream",
|
|
|
|
"alpha.security.taint",
|
|
|
|
"cplusplus.NewDeleteLeaks",
|
|
|
|
"core",
|
|
|
|
"cplusplus",
|
|
|
|
"deadcode",
|
|
|
|
"security",
|
|
|
|
"unix",
|
|
|
|
"osx",
|
|
|
|
"nullability"
|
|
|
|
])
|
2012-08-29 04:40:02 +08:00
|
|
|
|
2018-03-29 09:23:54 +08:00
|
|
|
Verbose = 0
|
2012-08-29 04:40:02 +08:00
|
|
|
|
|
|
|
#------------------------------------------------------------------------------
|
|
|
|
# Test harness logic.
|
|
|
|
#------------------------------------------------------------------------------
|
|
|
|
|
2017-10-03 01:59:12 +08:00
|
|
|
|
2011-11-03 04:46:50 +08:00
|
|
|
def runCleanupScript(Dir, PBuildLogFile):
|
2017-10-03 01:59:12 +08:00
|
|
|
"""
|
|
|
|
Run pre-processing script if any.
|
|
|
|
"""
|
2015-11-08 02:27:35 +08:00
|
|
|
Cwd = os.path.join(Dir, PatchedSourceDirName)
|
2011-11-03 04:46:50 +08:00
|
|
|
ScriptPath = os.path.join(Dir, CleanupScript)
|
2018-02-09 05:22:42 +08:00
|
|
|
SATestUtils.runScript(ScriptPath, PBuildLogFile, Cwd,
|
|
|
|
Stdout=Local.stdout, Stderr=Local.stderr)
|
2015-11-08 02:27:35 +08:00
|
|
|
|
2017-10-03 01:59:12 +08:00
|
|
|
|
2015-11-08 02:27:35 +08:00
|
|
|
def runDownloadScript(Dir, PBuildLogFile):
|
2017-10-03 01:59:12 +08:00
|
|
|
"""
|
|
|
|
Run the script to download the project, if it exists.
|
|
|
|
"""
|
2015-11-08 02:27:35 +08:00
|
|
|
ScriptPath = os.path.join(Dir, DownloadScript)
|
2018-02-09 05:22:42 +08:00
|
|
|
SATestUtils.runScript(ScriptPath, PBuildLogFile, Dir,
|
|
|
|
Stdout=Local.stdout, Stderr=Local.stderr)
|
2011-10-07 07:26:27 +08:00
|
|
|
|
2017-10-03 01:59:12 +08:00
|
|
|
|
2015-11-08 02:27:35 +08:00
|
|
|
def downloadAndPatch(Dir, PBuildLogFile):
|
2017-10-03 01:59:12 +08:00
|
|
|
"""
|
|
|
|
Download the project and apply the local patchfile if it exists.
|
|
|
|
"""
|
2015-11-08 02:27:35 +08:00
|
|
|
CachedSourceDirPath = os.path.join(Dir, CachedSourceDirName)
|
|
|
|
|
|
|
|
# If the we don't already have the cached source, run the project's
|
|
|
|
# download script to download it.
|
|
|
|
if not os.path.exists(CachedSourceDirPath):
|
2017-10-03 01:59:12 +08:00
|
|
|
runDownloadScript(Dir, PBuildLogFile)
|
|
|
|
if not os.path.exists(CachedSourceDirPath):
|
2018-02-09 05:22:42 +08:00
|
|
|
Local.stderr.write("Error: '%s' not found after download.\n" % (
|
|
|
|
CachedSourceDirPath))
|
2017-10-26 09:13:22 +08:00
|
|
|
exit(1)
|
2015-11-08 02:27:35 +08:00
|
|
|
|
|
|
|
PatchedSourceDirPath = os.path.join(Dir, PatchedSourceDirName)
|
|
|
|
|
|
|
|
# Remove potentially stale patched source.
|
|
|
|
if os.path.exists(PatchedSourceDirPath):
|
|
|
|
shutil.rmtree(PatchedSourceDirPath)
|
|
|
|
|
|
|
|
# Copy the cached source and apply any patches to the copy.
|
|
|
|
shutil.copytree(CachedSourceDirPath, PatchedSourceDirPath, symlinks=True)
|
|
|
|
applyPatch(Dir, PBuildLogFile)
|
|
|
|
|
2017-10-03 01:59:12 +08:00
|
|
|
|
2015-11-08 02:27:35 +08:00
|
|
|
def applyPatch(Dir, PBuildLogFile):
|
|
|
|
PatchfilePath = os.path.join(Dir, PatchfileName)
|
|
|
|
PatchedSourceDirPath = os.path.join(Dir, PatchedSourceDirName)
|
|
|
|
if not os.path.exists(PatchfilePath):
|
2018-02-09 05:22:42 +08:00
|
|
|
Local.stdout.write(" No local patches.\n")
|
2015-11-08 02:27:35 +08:00
|
|
|
return
|
|
|
|
|
2018-02-09 05:22:42 +08:00
|
|
|
Local.stdout.write(" Applying patch.\n")
|
2015-11-08 02:27:35 +08:00
|
|
|
try:
|
2016-01-22 15:08:06 +08:00
|
|
|
check_call("patch -p1 < '%s'" % (PatchfilePath),
|
2017-10-03 01:59:12 +08:00
|
|
|
cwd=PatchedSourceDirPath,
|
|
|
|
stderr=PBuildLogFile,
|
|
|
|
stdout=PBuildLogFile,
|
|
|
|
shell=True)
|
2015-11-08 02:27:35 +08:00
|
|
|
except:
|
2018-02-09 05:22:42 +08:00
|
|
|
Local.stderr.write("Error: Patch failed. See %s for details.\n" % (
|
|
|
|
PBuildLogFile.name))
|
2017-10-26 09:13:22 +08:00
|
|
|
sys.exit(1)
|
2015-11-08 02:27:35 +08:00
|
|
|
|
2017-10-03 01:59:12 +08:00
|
|
|
|
2011-10-07 07:26:27 +08:00
|
|
|
def runScanBuild(Dir, SBOutputDir, PBuildLogFile):
|
2017-10-03 01:59:12 +08:00
|
|
|
"""
|
|
|
|
Build the project with scan-build by reading in the commands and
|
|
|
|
prefixing them with the scan-build options.
|
|
|
|
"""
|
2011-10-07 07:26:27 +08:00
|
|
|
BuildScriptPath = os.path.join(Dir, BuildScript)
|
|
|
|
if not os.path.exists(BuildScriptPath):
|
2018-02-09 05:22:42 +08:00
|
|
|
Local.stderr.write(
|
|
|
|
"Error: build script is not defined: %s\n" % BuildScriptPath)
|
2017-10-26 09:13:22 +08:00
|
|
|
sys.exit(1)
|
2015-09-16 09:52:32 +08:00
|
|
|
|
|
|
|
AllCheckers = Checkers
|
2017-10-03 01:59:12 +08:00
|
|
|
if 'SA_ADDITIONAL_CHECKERS' in os.environ:
|
2015-09-16 09:52:32 +08:00
|
|
|
AllCheckers = AllCheckers + ',' + os.environ['SA_ADDITIONAL_CHECKERS']
|
|
|
|
|
2015-11-08 02:27:35 +08:00
|
|
|
# Run scan-build from within the patched source directory.
|
|
|
|
SBCwd = os.path.join(Dir, PatchedSourceDirName)
|
|
|
|
|
2017-10-03 01:59:12 +08:00
|
|
|
SBOptions = "--use-analyzer '%s' " % Clang
|
2016-01-23 02:45:22 +08:00
|
|
|
SBOptions += "-plist-html -o '%s' " % SBOutputDir
|
2015-09-16 09:52:32 +08:00
|
|
|
SBOptions += "-enable-checker " + AllCheckers + " "
|
2013-01-25 07:07:59 +08:00
|
|
|
SBOptions += "--keep-empty "
|
2018-06-30 06:05:13 +08:00
|
|
|
AnalyzerConfig = [
|
|
|
|
("stable-report-filename", "true"),
|
|
|
|
("serialize-stats", "true"),
|
|
|
|
]
|
2018-06-27 07:17:35 +08:00
|
|
|
|
|
|
|
SBOptions += "-analyzer-config '%s' " % (
|
2018-06-30 06:05:13 +08:00
|
|
|
",".join("%s=%s" % (key, value) for (key, value) in AnalyzerConfig))
|
2018-06-27 22:39:41 +08:00
|
|
|
|
2015-09-08 11:50:52 +08:00
|
|
|
# Always use ccc-analyze to ensure that we can locate the failures
|
2013-05-31 10:31:09 +08:00
|
|
|
# directory.
|
|
|
|
SBOptions += "--override-compiler "
|
2018-06-30 06:05:32 +08:00
|
|
|
ExtraEnv = {}
|
2011-10-07 07:26:27 +08:00
|
|
|
try:
|
|
|
|
SBCommandFile = open(BuildScriptPath, "r")
|
|
|
|
SBPrefix = "scan-build " + SBOptions + " "
|
|
|
|
for Command in SBCommandFile:
|
2013-09-07 00:12:41 +08:00
|
|
|
Command = Command.strip()
|
2015-06-30 23:31:17 +08:00
|
|
|
if len(Command) == 0:
|
2017-10-03 01:59:12 +08:00
|
|
|
continue
|
2018-06-30 06:05:32 +08:00
|
|
|
|
|
|
|
# Custom analyzer invocation specified by project.
|
|
|
|
# Communicate required information using environment variables
|
|
|
|
# instead.
|
|
|
|
if Command == NoPrefixCmd:
|
|
|
|
SBPrefix = ""
|
|
|
|
ExtraEnv['OUTPUT'] = SBOutputDir
|
2018-07-03 01:10:40 +08:00
|
|
|
ExtraEnv['CC'] = Clang
|
2018-06-30 06:05:32 +08:00
|
|
|
continue
|
|
|
|
|
2012-08-29 04:40:04 +08:00
|
|
|
# If using 'make', auto imply a -jX argument
|
|
|
|
# to speed up analysis. xcodebuild will
|
|
|
|
# automatically use the maximum number of cores.
|
2012-11-27 03:59:57 +08:00
|
|
|
if (Command.startswith("make ") or Command == "make") and \
|
2017-09-22 09:41:16 +08:00
|
|
|
"-j" not in Command:
|
2018-02-09 05:22:42 +08:00
|
|
|
Command += " -j%d" % MaxJobs
|
2011-10-07 07:26:27 +08:00
|
|
|
SBCommand = SBPrefix + Command
|
2018-02-09 05:22:42 +08:00
|
|
|
|
2015-09-08 11:50:52 +08:00
|
|
|
if Verbose == 1:
|
2018-02-09 05:22:42 +08:00
|
|
|
Local.stdout.write(" Executing: %s\n" % (SBCommand,))
|
2017-10-03 01:59:12 +08:00
|
|
|
check_call(SBCommand, cwd=SBCwd,
|
|
|
|
stderr=PBuildLogFile,
|
|
|
|
stdout=PBuildLogFile,
|
2018-06-30 06:05:32 +08:00
|
|
|
env=dict(os.environ, **ExtraEnv),
|
2017-10-03 01:59:12 +08:00
|
|
|
shell=True)
|
2017-10-28 06:39:54 +08:00
|
|
|
except CalledProcessError:
|
2018-02-09 05:22:42 +08:00
|
|
|
Local.stderr.write("Error: scan-build failed. Its output was: \n")
|
2017-10-28 06:39:54 +08:00
|
|
|
PBuildLogFile.seek(0)
|
2018-02-09 05:22:42 +08:00
|
|
|
shutil.copyfileobj(PBuildLogFile, Local.stderr)
|
2017-10-28 06:39:54 +08:00
|
|
|
sys.exit(1)
|
2011-11-05 13:20:48 +08:00
|
|
|
|
2017-10-03 01:59:12 +08:00
|
|
|
|
2012-09-07 07:30:27 +08:00
|
|
|
def runAnalyzePreprocessed(Dir, SBOutputDir, Mode):
|
2017-10-03 01:59:12 +08:00
|
|
|
"""
|
|
|
|
Run analysis on a set of preprocessed files.
|
|
|
|
"""
|
2011-11-05 13:20:48 +08:00
|
|
|
if os.path.exists(os.path.join(Dir, BuildScript)):
|
2018-02-09 05:22:42 +08:00
|
|
|
Local.stderr.write(
|
|
|
|
"Error: The preprocessed files project should not contain %s\n" % (
|
|
|
|
BuildScript))
|
2015-09-08 11:50:52 +08:00
|
|
|
raise Exception()
|
2011-11-05 13:20:48 +08:00
|
|
|
|
2015-09-15 05:22:24 +08:00
|
|
|
CmdPrefix = Clang + " -cc1 "
|
|
|
|
|
|
|
|
# For now, we assume the preprocessed files should be analyzed
|
|
|
|
# with the OS X SDK.
|
2017-10-25 07:52:48 +08:00
|
|
|
SDKPath = SATestUtils.getSDKPath("macosx")
|
2015-09-15 05:22:24 +08:00
|
|
|
if SDKPath is not None:
|
2017-10-03 01:59:12 +08:00
|
|
|
CmdPrefix += "-isysroot " + SDKPath + " "
|
2015-09-15 05:22:24 +08:00
|
|
|
|
|
|
|
CmdPrefix += "-analyze -analyzer-output=plist -w "
|
2017-10-03 01:59:12 +08:00
|
|
|
CmdPrefix += "-analyzer-checker=" + Checkers
|
|
|
|
CmdPrefix += " -fcxx-exceptions -fblocks "
|
2015-09-08 11:50:52 +08:00
|
|
|
|
2017-10-03 01:59:12 +08:00
|
|
|
if (Mode == 2):
|
2015-09-08 11:50:52 +08:00
|
|
|
CmdPrefix += "-std=c++11 "
|
|
|
|
|
2011-11-05 13:20:48 +08:00
|
|
|
PlistPath = os.path.join(Dir, SBOutputDir, "date")
|
2017-10-03 01:59:12 +08:00
|
|
|
FailPath = os.path.join(PlistPath, "failures")
|
|
|
|
os.makedirs(FailPath)
|
2015-09-08 11:50:52 +08:00
|
|
|
|
2011-11-05 13:20:48 +08:00
|
|
|
for FullFileName in glob.glob(Dir + "/*"):
|
|
|
|
FileName = os.path.basename(FullFileName)
|
|
|
|
Failed = False
|
2015-09-08 11:50:52 +08:00
|
|
|
|
2011-11-05 13:20:48 +08:00
|
|
|
# Only run the analyzes on supported files.
|
2017-10-25 07:52:48 +08:00
|
|
|
if SATestUtils.hasNoExtension(FileName):
|
2011-11-05 13:20:48 +08:00
|
|
|
continue
|
2017-10-25 07:52:48 +08:00
|
|
|
if not SATestUtils.isValidSingleInputFile(FileName):
|
2018-02-09 05:22:42 +08:00
|
|
|
Local.stderr.write(
|
|
|
|
"Error: Invalid single input file %s.\n" % (FullFileName,))
|
2011-11-05 13:20:48 +08:00
|
|
|
raise Exception()
|
2015-09-08 11:50:52 +08:00
|
|
|
|
2011-11-05 13:20:48 +08:00
|
|
|
# Build and call the analyzer command.
|
2016-01-22 15:08:06 +08:00
|
|
|
OutputOption = "-o '%s.plist' " % os.path.join(PlistPath, FileName)
|
|
|
|
Command = CmdPrefix + OutputOption + ("'%s'" % FileName)
|
2011-11-05 13:20:48 +08:00
|
|
|
LogFile = open(os.path.join(FailPath, FileName + ".stderr.txt"), "w+b")
|
|
|
|
try:
|
2015-09-08 11:50:52 +08:00
|
|
|
if Verbose == 1:
|
2018-02-09 05:22:42 +08:00
|
|
|
Local.stdout.write(" Executing: %s\n" % (Command,))
|
2017-10-03 01:59:12 +08:00
|
|
|
check_call(Command, cwd=Dir, stderr=LogFile,
|
|
|
|
stdout=LogFile,
|
|
|
|
shell=True)
|
2011-11-05 13:20:48 +08:00
|
|
|
except CalledProcessError, e:
|
2018-02-09 05:22:42 +08:00
|
|
|
Local.stderr.write("Error: Analyzes of %s failed. "
|
|
|
|
"See %s for details."
|
|
|
|
"Error code %d.\n" % (
|
|
|
|
FullFileName, LogFile.name, e.returncode))
|
2015-09-08 11:50:52 +08:00
|
|
|
Failed = True
|
2011-11-05 13:20:48 +08:00
|
|
|
finally:
|
2015-09-08 11:50:52 +08:00
|
|
|
LogFile.close()
|
|
|
|
|
2011-11-05 13:20:48 +08:00
|
|
|
# If command did not fail, erase the log file.
|
2017-10-03 01:59:12 +08:00
|
|
|
if not Failed:
|
|
|
|
os.remove(LogFile.name)
|
|
|
|
|
2011-11-05 13:20:48 +08:00
|
|
|
|
2016-01-23 09:09:07 +08:00
|
|
|
def getBuildLogPath(SBOutputDir):
|
2017-10-03 01:59:12 +08:00
|
|
|
return os.path.join(SBOutputDir, LogFolderName, BuildLogName)
|
|
|
|
|
2016-01-23 09:09:07 +08:00
|
|
|
|
|
|
|
def removeLogFile(SBOutputDir):
|
2017-10-03 01:59:12 +08:00
|
|
|
BuildLogPath = getBuildLogPath(SBOutputDir)
|
|
|
|
# Clean up the log file.
|
|
|
|
if (os.path.exists(BuildLogPath)):
|
|
|
|
RmCommand = "rm '%s'" % BuildLogPath
|
|
|
|
if Verbose == 1:
|
2018-02-09 05:22:42 +08:00
|
|
|
Local.stdout.write(" Executing: %s\n" % (RmCommand,))
|
2017-10-03 01:59:12 +08:00
|
|
|
check_call(RmCommand, shell=True)
|
|
|
|
|
2016-01-23 09:09:07 +08:00
|
|
|
|
2012-09-07 07:30:27 +08:00
|
|
|
def buildProject(Dir, SBOutputDir, ProjectBuildMode, IsReferenceBuild):
|
2015-09-08 11:50:52 +08:00
|
|
|
TBegin = time.time()
|
2011-10-07 07:26:27 +08:00
|
|
|
|
2016-01-23 09:09:07 +08:00
|
|
|
BuildLogPath = getBuildLogPath(SBOutputDir)
|
2018-02-09 05:22:42 +08:00
|
|
|
Local.stdout.write("Log file: %s\n" % (BuildLogPath,))
|
|
|
|
Local.stdout.write("Output directory: %s\n" % (SBOutputDir, ))
|
2015-09-08 11:50:52 +08:00
|
|
|
|
2016-01-23 09:09:07 +08:00
|
|
|
removeLogFile(SBOutputDir)
|
2015-09-08 11:50:52 +08:00
|
|
|
|
2011-11-05 13:20:48 +08:00
|
|
|
# Clean up scan build results.
|
2017-10-03 01:59:12 +08:00
|
|
|
if (os.path.exists(SBOutputDir)):
|
2016-01-22 15:08:06 +08:00
|
|
|
RmCommand = "rm -r '%s'" % SBOutputDir
|
2015-09-08 11:50:52 +08:00
|
|
|
if Verbose == 1:
|
2018-02-09 05:22:42 +08:00
|
|
|
Local.stdout.write(" Executing: %s\n" % (RmCommand,))
|
|
|
|
check_call(RmCommand, shell=True, stdout=Local.stdout,
|
|
|
|
stderr=Local.stderr)
|
2011-11-05 13:20:48 +08:00
|
|
|
assert(not os.path.exists(SBOutputDir))
|
|
|
|
os.makedirs(os.path.join(SBOutputDir, LogFolderName))
|
2015-09-08 11:50:52 +08:00
|
|
|
|
2011-11-05 13:20:48 +08:00
|
|
|
# Build and analyze the project.
|
2017-10-28 06:52:36 +08:00
|
|
|
with open(BuildLogPath, "wb+") as PBuildLogFile:
|
2012-09-07 07:30:27 +08:00
|
|
|
if (ProjectBuildMode == 1):
|
2015-11-08 02:27:35 +08:00
|
|
|
downloadAndPatch(Dir, PBuildLogFile)
|
|
|
|
runCleanupScript(Dir, PBuildLogFile)
|
2011-11-05 13:20:48 +08:00
|
|
|
runScanBuild(Dir, SBOutputDir, PBuildLogFile)
|
|
|
|
else:
|
2012-09-07 07:30:27 +08:00
|
|
|
runAnalyzePreprocessed(Dir, SBOutputDir, ProjectBuildMode)
|
2015-09-08 11:50:52 +08:00
|
|
|
|
2017-09-22 09:41:16 +08:00
|
|
|
if IsReferenceBuild:
|
2011-11-03 04:46:50 +08:00
|
|
|
runCleanupScript(Dir, PBuildLogFile)
|
2017-09-22 09:41:16 +08:00
|
|
|
normalizeReferenceResults(Dir, SBOutputDir, ProjectBuildMode)
|
2015-09-08 11:50:52 +08:00
|
|
|
|
2018-02-09 05:22:42 +08:00
|
|
|
Local.stdout.write("Build complete (time: %.2f). "
|
|
|
|
"See the log for more details: %s\n" % (
|
|
|
|
(time.time() - TBegin), BuildLogPath))
|
2017-10-03 01:59:12 +08:00
|
|
|
|
2015-09-08 11:50:52 +08:00
|
|
|
|
2017-09-22 09:41:16 +08:00
|
|
|
def normalizeReferenceResults(Dir, SBOutputDir, ProjectBuildMode):
|
|
|
|
"""
|
|
|
|
Make the absolute paths relative in the reference results.
|
|
|
|
"""
|
|
|
|
for (DirPath, Dirnames, Filenames) in os.walk(SBOutputDir):
|
|
|
|
for F in Filenames:
|
|
|
|
if (not F.endswith('plist')):
|
|
|
|
continue
|
|
|
|
Plist = os.path.join(DirPath, F)
|
|
|
|
Data = plistlib.readPlist(Plist)
|
|
|
|
PathPrefix = Dir
|
|
|
|
if (ProjectBuildMode == 1):
|
|
|
|
PathPrefix = os.path.join(Dir, PatchedSourceDirName)
|
2017-10-03 01:59:12 +08:00
|
|
|
Paths = [SourceFile[len(PathPrefix) + 1:]
|
|
|
|
if SourceFile.startswith(PathPrefix)
|
|
|
|
else SourceFile for SourceFile in Data['files']]
|
2017-09-22 09:41:16 +08:00
|
|
|
Data['files'] = Paths
|
2017-10-25 07:52:46 +08:00
|
|
|
|
|
|
|
# Remove transient fields which change from run to run.
|
|
|
|
for Diag in Data['diagnostics']:
|
|
|
|
if 'HTMLDiagnostics_files' in Diag:
|
|
|
|
Diag.pop('HTMLDiagnostics_files')
|
|
|
|
if 'clang_version' in Data:
|
|
|
|
Data.pop('clang_version')
|
|
|
|
|
2017-09-22 09:41:16 +08:00
|
|
|
plistlib.writePlist(Data, Plist)
|
|
|
|
|
2017-10-03 01:59:12 +08:00
|
|
|
|
2011-10-07 07:26:27 +08:00
|
|
|
def CleanUpEmptyPlists(SBOutputDir):
|
2017-10-03 01:59:12 +08:00
|
|
|
"""
|
|
|
|
A plist file is created for each call to the analyzer(each source file).
|
|
|
|
We are only interested on the once that have bug reports,
|
|
|
|
so delete the rest.
|
|
|
|
"""
|
2011-10-07 07:26:27 +08:00
|
|
|
for F in glob.glob(SBOutputDir + "/*/*.plist"):
|
|
|
|
P = os.path.join(SBOutputDir, F)
|
2015-09-08 11:50:52 +08:00
|
|
|
|
2011-10-07 07:26:27 +08:00
|
|
|
Data = plistlib.readPlist(P)
|
|
|
|
# Delete empty reports.
|
|
|
|
if not Data['files']:
|
|
|
|
os.remove(P)
|
|
|
|
continue
|
|
|
|
|
2017-10-03 01:59:12 +08:00
|
|
|
|
2017-10-31 03:40:33 +08:00
|
|
|
def CleanUpEmptyFolders(SBOutputDir):
|
|
|
|
"""
|
|
|
|
Remove empty folders from results, as git would not store them.
|
|
|
|
"""
|
|
|
|
Subfolders = glob.glob(SBOutputDir + "/*")
|
|
|
|
for Folder in Subfolders:
|
|
|
|
if not os.listdir(Folder):
|
|
|
|
os.removedirs(Folder)
|
|
|
|
|
|
|
|
|
2011-10-07 07:26:27 +08:00
|
|
|
def checkBuild(SBOutputDir):
|
2017-10-03 01:59:12 +08:00
|
|
|
"""
|
|
|
|
Given the scan-build output directory, checks if the build failed
|
|
|
|
(by searching for the failures directories). If there are failures, it
|
|
|
|
creates a summary file in the output directory.
|
|
|
|
|
|
|
|
"""
|
2011-10-07 07:26:27 +08:00
|
|
|
# Check if there are failures.
|
|
|
|
Failures = glob.glob(SBOutputDir + "/*/failures/*.stderr.txt")
|
2017-10-03 01:59:12 +08:00
|
|
|
TotalFailed = len(Failures)
|
2011-10-07 07:26:27 +08:00
|
|
|
if TotalFailed == 0:
|
2012-08-31 08:36:30 +08:00
|
|
|
CleanUpEmptyPlists(SBOutputDir)
|
2017-10-31 03:40:33 +08:00
|
|
|
CleanUpEmptyFolders(SBOutputDir)
|
2012-08-31 08:36:30 +08:00
|
|
|
Plists = glob.glob(SBOutputDir + "/*/*.plist")
|
2018-02-09 05:22:42 +08:00
|
|
|
Local.stdout.write(
|
|
|
|
"Number of bug reports (non-empty plist files) produced: %d\n" %
|
|
|
|
len(Plists))
|
2017-10-03 01:59:12 +08:00
|
|
|
return
|
2015-09-08 11:50:52 +08:00
|
|
|
|
2018-02-09 05:22:42 +08:00
|
|
|
Local.stderr.write("Error: analysis failed.\n")
|
|
|
|
Local.stderr.write("Total of %d failures discovered.\n" % TotalFailed)
|
2017-10-27 03:00:22 +08:00
|
|
|
if TotalFailed > NumOfFailuresInSummary:
|
2018-02-09 05:22:42 +08:00
|
|
|
Local.stderr.write(
|
|
|
|
"See the first %d below.\n" % NumOfFailuresInSummary)
|
2011-10-07 07:26:27 +08:00
|
|
|
# TODO: Add a line "See the results folder for more."
|
2015-09-08 11:50:52 +08:00
|
|
|
|
2017-10-27 03:00:22 +08:00
|
|
|
Idx = 0
|
|
|
|
for FailLogPathI in Failures:
|
|
|
|
if Idx >= NumOfFailuresInSummary:
|
|
|
|
break
|
|
|
|
Idx += 1
|
2018-02-09 05:22:42 +08:00
|
|
|
Local.stderr.write("\n-- Error #%d -----------\n" % Idx)
|
2017-10-27 03:00:22 +08:00
|
|
|
with open(FailLogPathI, "r") as FailLogI:
|
2018-02-09 05:22:42 +08:00
|
|
|
shutil.copyfileobj(FailLogI, Local.stdout)
|
2015-09-08 11:50:52 +08:00
|
|
|
|
2017-10-26 09:13:22 +08:00
|
|
|
sys.exit(1)
|
2011-10-07 07:26:27 +08:00
|
|
|
|
2017-10-03 01:59:12 +08:00
|
|
|
|
|
|
|
def runCmpResults(Dir, Strictness=0):
|
|
|
|
"""
|
|
|
|
Compare the warnings produced by scan-build.
|
|
|
|
Strictness defines the success criteria for the test:
|
|
|
|
0 - success if there are no crashes or analyzer failure.
|
|
|
|
1 - success if there are no difference in the number of reported bugs.
|
|
|
|
2 - success if all the bug reports are identical.
|
2017-10-06 01:32:06 +08:00
|
|
|
|
|
|
|
:return success: Whether tests pass according to the Strictness
|
|
|
|
criteria.
|
2017-10-03 01:59:12 +08:00
|
|
|
"""
|
2017-10-06 01:32:06 +08:00
|
|
|
TestsPassed = True
|
2015-09-08 11:50:52 +08:00
|
|
|
TBegin = time.time()
|
2011-10-07 07:26:27 +08:00
|
|
|
|
|
|
|
RefDir = os.path.join(Dir, SBOutputDirReferencePrefix + SBOutputDirName)
|
|
|
|
NewDir = os.path.join(Dir, SBOutputDirName)
|
2015-09-08 11:50:52 +08:00
|
|
|
|
2011-10-07 07:26:27 +08:00
|
|
|
# We have to go one level down the directory tree.
|
2015-09-08 11:50:52 +08:00
|
|
|
RefList = glob.glob(RefDir + "/*")
|
2011-10-07 07:26:27 +08:00
|
|
|
NewList = glob.glob(NewDir + "/*")
|
2015-09-08 11:50:52 +08:00
|
|
|
|
2013-06-11 03:34:30 +08:00
|
|
|
# Log folders are also located in the results dir, so ignore them.
|
|
|
|
RefLogDir = os.path.join(RefDir, LogFolderName)
|
|
|
|
if RefLogDir in RefList:
|
|
|
|
RefList.remove(RefLogDir)
|
2011-11-05 13:20:48 +08:00
|
|
|
NewList.remove(os.path.join(NewDir, LogFolderName))
|
2015-09-08 11:50:52 +08:00
|
|
|
|
2017-10-26 09:13:22 +08:00
|
|
|
if len(RefList) != len(NewList):
|
|
|
|
print "Mismatch in number of results folders: %s vs %s" % (
|
|
|
|
RefList, NewList)
|
|
|
|
sys.exit(1)
|
2011-10-07 07:26:27 +08:00
|
|
|
|
2015-09-08 11:50:52 +08:00
|
|
|
# There might be more then one folder underneath - one per each scan-build
|
2011-10-07 07:26:27 +08:00
|
|
|
# command (Ex: one for configure and one for make).
|
|
|
|
if (len(RefList) > 1):
|
|
|
|
# Assume that the corresponding folders have the same names.
|
|
|
|
RefList.sort()
|
|
|
|
NewList.sort()
|
2015-09-08 11:50:52 +08:00
|
|
|
|
2011-10-07 07:26:27 +08:00
|
|
|
# Iterate and find the differences.
|
2011-11-09 03:56:31 +08:00
|
|
|
NumDiffs = 0
|
2015-09-08 11:50:52 +08:00
|
|
|
PairList = zip(RefList, NewList)
|
|
|
|
for P in PairList:
|
|
|
|
RefDir = P[0]
|
2011-10-07 07:26:27 +08:00
|
|
|
NewDir = P[1]
|
2015-09-08 11:50:52 +08:00
|
|
|
|
|
|
|
assert(RefDir != NewDir)
|
|
|
|
if Verbose == 1:
|
2018-02-09 05:22:42 +08:00
|
|
|
Local.stdout.write(" Comparing Results: %s %s\n" % (
|
|
|
|
RefDir, NewDir))
|
2015-09-08 11:50:52 +08:00
|
|
|
|
2015-11-08 02:27:35 +08:00
|
|
|
PatchedSourceDirPath = os.path.join(Dir, PatchedSourceDirName)
|
2018-02-10 02:39:47 +08:00
|
|
|
Opts, Args = CmpRuns.generate_option_parser().parse_args(
|
2018-02-13 06:13:01 +08:00
|
|
|
["--rootA", "", "--rootB", PatchedSourceDirPath])
|
2011-10-07 07:26:27 +08:00
|
|
|
# Scan the results, delete empty plist files.
|
2015-06-30 23:31:17 +08:00
|
|
|
NumDiffs, ReportsInRef, ReportsInNew = \
|
2018-02-14 07:36:01 +08:00
|
|
|
CmpRuns.dumpScanBuildResultsDiff(RefDir, NewDir, Opts,
|
|
|
|
deleteEmpty=False,
|
|
|
|
Stdout=Local.stdout)
|
2017-10-03 01:59:12 +08:00
|
|
|
if (NumDiffs > 0):
|
2018-02-09 05:22:42 +08:00
|
|
|
Local.stdout.write("Warning: %s differences in diagnostics.\n"
|
|
|
|
% NumDiffs)
|
2015-06-30 23:31:17 +08:00
|
|
|
if Strictness >= 2 and NumDiffs > 0:
|
2018-02-09 05:22:42 +08:00
|
|
|
Local.stdout.write("Error: Diffs found in strict mode (2).\n")
|
2017-10-06 01:32:06 +08:00
|
|
|
TestsPassed = False
|
2015-06-30 23:31:17 +08:00
|
|
|
elif Strictness >= 1 and ReportsInRef != ReportsInNew:
|
2018-02-09 05:22:42 +08:00
|
|
|
Local.stdout.write("Error: The number of results are different " +
|
|
|
|
" strict mode (1).\n")
|
2017-10-06 01:32:06 +08:00
|
|
|
TestsPassed = False
|
2015-09-08 11:50:52 +08:00
|
|
|
|
2018-02-09 05:22:42 +08:00
|
|
|
Local.stdout.write("Diagnostic comparison complete (time: %.2f).\n" % (
|
|
|
|
time.time() - TBegin))
|
2017-10-06 01:32:06 +08:00
|
|
|
return TestsPassed
|
2015-09-08 11:50:52 +08:00
|
|
|
|
2017-10-03 01:59:12 +08:00
|
|
|
|
2016-01-23 09:09:07 +08:00
|
|
|
def cleanupReferenceResults(SBOutputDir):
|
2017-10-03 01:59:12 +08:00
|
|
|
"""
|
|
|
|
Delete html, css, and js files from reference results. These can
|
|
|
|
include multiple copies of the benchmark source and so get very large.
|
|
|
|
"""
|
2016-01-23 09:09:07 +08:00
|
|
|
Extensions = ["html", "css", "js"]
|
|
|
|
for E in Extensions:
|
|
|
|
for F in glob.glob("%s/*/*.%s" % (SBOutputDir, E)):
|
|
|
|
P = os.path.join(SBOutputDir, F)
|
|
|
|
RmCommand = "rm '%s'" % P
|
|
|
|
check_call(RmCommand, shell=True)
|
|
|
|
|
|
|
|
# Remove the log file. It leaks absolute path names.
|
|
|
|
removeLogFile(SBOutputDir)
|
|
|
|
|
2017-10-03 01:59:12 +08:00
|
|
|
|
2018-02-09 05:22:42 +08:00
|
|
|
class TestProjectThread(threading.Thread):
|
|
|
|
def __init__(self, TasksQueue, ResultsDiffer, FailureFlag):
|
|
|
|
"""
|
|
|
|
:param ResultsDiffer: Used to signify that results differ from
|
|
|
|
the canonical ones.
|
|
|
|
:param FailureFlag: Used to signify a failure during the run.
|
|
|
|
"""
|
|
|
|
self.TasksQueue = TasksQueue
|
|
|
|
self.ResultsDiffer = ResultsDiffer
|
|
|
|
self.FailureFlag = FailureFlag
|
|
|
|
super(TestProjectThread, self).__init__()
|
|
|
|
|
|
|
|
# Needed to gracefully handle interrupts with Ctrl-C
|
|
|
|
self.daemon = True
|
|
|
|
|
|
|
|
def run(self):
|
|
|
|
while not self.TasksQueue.empty():
|
|
|
|
try:
|
|
|
|
ProjArgs = self.TasksQueue.get()
|
|
|
|
Logger = logging.getLogger(ProjArgs[0])
|
|
|
|
Local.stdout = StreamToLogger(Logger, logging.INFO)
|
|
|
|
Local.stderr = StreamToLogger(Logger, logging.ERROR)
|
|
|
|
if not testProject(*ProjArgs):
|
|
|
|
self.ResultsDiffer.set()
|
|
|
|
self.TasksQueue.task_done()
|
|
|
|
except:
|
|
|
|
self.FailureFlag.set()
|
|
|
|
raise
|
|
|
|
|
|
|
|
|
2017-10-03 01:59:12 +08:00
|
|
|
def testProject(ID, ProjectBuildMode, IsReferenceBuild=False, Strictness=0):
|
2017-10-06 01:32:06 +08:00
|
|
|
"""
|
|
|
|
Test a given project.
|
|
|
|
:return TestsPassed: Whether tests have passed according
|
|
|
|
to the :param Strictness: criteria.
|
|
|
|
"""
|
2018-02-09 05:22:42 +08:00
|
|
|
Local.stdout.write(" \n\n--- Building project %s\n" % (ID,))
|
2011-10-07 07:26:27 +08:00
|
|
|
|
2015-09-08 11:50:52 +08:00
|
|
|
TBegin = time.time()
|
2011-10-07 07:26:27 +08:00
|
|
|
|
2017-09-22 09:41:16 +08:00
|
|
|
Dir = getProjectDir(ID)
|
2015-09-08 11:50:52 +08:00
|
|
|
if Verbose == 1:
|
2018-02-09 05:22:42 +08:00
|
|
|
Local.stdout.write(" Build directory: %s.\n" % (Dir,))
|
2015-09-08 11:50:52 +08:00
|
|
|
|
2011-10-07 07:26:27 +08:00
|
|
|
# Set the build results directory.
|
2012-06-02 00:24:38 +08:00
|
|
|
RelOutputDir = getSBOutputDirName(IsReferenceBuild)
|
2012-02-03 14:35:23 +08:00
|
|
|
SBOutputDir = os.path.join(Dir, RelOutputDir)
|
2015-09-08 11:50:52 +08:00
|
|
|
|
2012-09-07 07:30:27 +08:00
|
|
|
buildProject(Dir, SBOutputDir, ProjectBuildMode, IsReferenceBuild)
|
2011-10-07 07:26:27 +08:00
|
|
|
|
|
|
|
checkBuild(SBOutputDir)
|
2015-09-08 11:50:52 +08:00
|
|
|
|
2017-09-22 09:41:16 +08:00
|
|
|
if IsReferenceBuild:
|
2016-01-23 09:09:07 +08:00
|
|
|
cleanupReferenceResults(SBOutputDir)
|
2017-10-06 01:32:06 +08:00
|
|
|
TestsPassed = True
|
2017-09-22 09:41:16 +08:00
|
|
|
else:
|
2017-10-06 01:32:06 +08:00
|
|
|
TestsPassed = runCmpResults(Dir, Strictness)
|
2015-09-08 11:50:52 +08:00
|
|
|
|
2018-02-09 05:22:42 +08:00
|
|
|
Local.stdout.write("Completed tests for project %s (time: %.2f).\n" % (
|
|
|
|
ID, (time.time() - TBegin)))
|
2017-10-06 01:32:06 +08:00
|
|
|
return TestsPassed
|
2017-10-03 01:59:12 +08:00
|
|
|
|
2015-09-08 11:50:52 +08:00
|
|
|
|
2017-09-22 09:41:16 +08:00
|
|
|
def projectFileHandler():
|
|
|
|
return open(getProjectMapPath(), "rb")
|
|
|
|
|
2017-10-03 01:59:12 +08:00
|
|
|
|
2017-09-22 09:41:16 +08:00
|
|
|
def iterateOverProjects(PMapFile):
|
|
|
|
"""
|
|
|
|
Iterate over all projects defined in the project file handler `PMapFile`
|
|
|
|
from the start.
|
|
|
|
"""
|
|
|
|
PMapFile.seek(0)
|
2017-10-03 01:59:12 +08:00
|
|
|
for I in csv.reader(PMapFile):
|
2017-10-25 07:52:48 +08:00
|
|
|
if (SATestUtils.isCommentCSVLine(I)):
|
2017-10-03 01:59:12 +08:00
|
|
|
continue
|
|
|
|
yield I
|
|
|
|
|
2017-09-22 09:41:16 +08:00
|
|
|
|
|
|
|
def validateProjectFile(PMapFile):
|
|
|
|
"""
|
|
|
|
Validate project file.
|
|
|
|
"""
|
|
|
|
for I in iterateOverProjects(PMapFile):
|
2017-10-25 07:52:48 +08:00
|
|
|
if len(I) != 2:
|
2017-09-22 09:41:16 +08:00
|
|
|
print "Error: Rows in the ProjectMapFile should have 2 entries."
|
|
|
|
raise Exception()
|
2017-10-25 07:52:48 +08:00
|
|
|
if I[1] not in ('0', '1', '2'):
|
2017-09-22 09:41:16 +08:00
|
|
|
print "Error: Second entry in the ProjectMapFile should be 0" \
|
2017-10-03 01:59:12 +08:00
|
|
|
" (single file), 1 (project), or 2(single file c++11)."
|
2017-09-22 09:41:16 +08:00
|
|
|
raise Exception()
|
|
|
|
|
2018-02-09 05:22:42 +08:00
|
|
|
def singleThreadedTestAll(ProjectsToTest):
|
|
|
|
"""
|
|
|
|
Run all projects.
|
|
|
|
:return: whether tests have passed.
|
|
|
|
"""
|
|
|
|
Success = True
|
|
|
|
for ProjArgs in ProjectsToTest:
|
|
|
|
Success &= testProject(*ProjArgs)
|
|
|
|
return Success
|
|
|
|
|
|
|
|
def multiThreadedTestAll(ProjectsToTest, Jobs):
|
|
|
|
"""
|
|
|
|
Run each project in a separate thread.
|
|
|
|
|
|
|
|
This is OK despite GIL, as testing is blocked
|
|
|
|
on launching external processes.
|
|
|
|
|
|
|
|
:return: whether tests have passed.
|
|
|
|
"""
|
|
|
|
TasksQueue = Queue.Queue()
|
|
|
|
|
|
|
|
for ProjArgs in ProjectsToTest:
|
|
|
|
TasksQueue.put(ProjArgs)
|
|
|
|
|
|
|
|
ResultsDiffer = threading.Event()
|
|
|
|
FailureFlag = threading.Event()
|
|
|
|
|
|
|
|
for i in range(Jobs):
|
|
|
|
T = TestProjectThread(TasksQueue, ResultsDiffer, FailureFlag)
|
|
|
|
T.start()
|
|
|
|
|
|
|
|
# Required to handle Ctrl-C gracefully.
|
|
|
|
while TasksQueue.unfinished_tasks:
|
|
|
|
time.sleep(0.1) # Seconds.
|
|
|
|
if FailureFlag.is_set():
|
|
|
|
Local.stderr.write("Test runner crashed\n")
|
|
|
|
sys.exit(1)
|
|
|
|
return not ResultsDiffer.is_set()
|
|
|
|
|
|
|
|
|
|
|
|
def testAll(Args):
|
|
|
|
ProjectsToTest = []
|
2017-10-03 01:59:12 +08:00
|
|
|
|
2017-09-22 09:41:16 +08:00
|
|
|
with projectFileHandler() as PMapFile:
|
|
|
|
validateProjectFile(PMapFile)
|
2012-02-03 14:35:23 +08:00
|
|
|
|
|
|
|
# Test the projects.
|
2017-09-22 09:41:16 +08:00
|
|
|
for (ProjName, ProjBuildMode) in iterateOverProjects(PMapFile):
|
2018-02-09 05:22:42 +08:00
|
|
|
ProjectsToTest.append((ProjName,
|
|
|
|
int(ProjBuildMode),
|
|
|
|
Args.regenerate,
|
|
|
|
Args.strictness))
|
|
|
|
if Args.jobs <= 1:
|
|
|
|
return singleThreadedTestAll(ProjectsToTest)
|
|
|
|
else:
|
|
|
|
return multiThreadedTestAll(ProjectsToTest, Args.jobs)
|
2012-02-03 14:35:23 +08:00
|
|
|
|
|
|
|
|
2011-10-07 07:26:27 +08:00
|
|
|
if __name__ == '__main__':
|
2015-06-30 23:31:17 +08:00
|
|
|
# Parse command line arguments.
|
2017-10-03 01:59:12 +08:00
|
|
|
Parser = argparse.ArgumentParser(
|
|
|
|
description='Test the Clang Static Analyzer.')
|
2015-06-30 23:31:17 +08:00
|
|
|
Parser.add_argument('--strictness', dest='strictness', type=int, default=0,
|
2017-10-03 01:59:12 +08:00
|
|
|
help='0 to fail on runtime errors, 1 to fail when the \
|
|
|
|
number of found bugs are different from the \
|
|
|
|
reference, 2 to fail on any difference from the \
|
|
|
|
reference. Default is 0.')
|
|
|
|
Parser.add_argument('-r', dest='regenerate', action='store_true',
|
|
|
|
default=False, help='Regenerate reference output.')
|
2018-02-09 05:22:42 +08:00
|
|
|
Parser.add_argument('-j', '--jobs', dest='jobs', type=int,
|
|
|
|
default=0,
|
|
|
|
help='Number of projects to test concurrently')
|
2015-06-30 23:31:17 +08:00
|
|
|
Args = Parser.parse_args()
|
|
|
|
|
2018-02-09 05:22:42 +08:00
|
|
|
TestsPassed = testAll(Args)
|
2017-10-06 01:32:06 +08:00
|
|
|
if not TestsPassed:
|
2017-10-25 07:52:48 +08:00
|
|
|
print "ERROR: Tests failed."
|
2017-10-26 09:13:22 +08:00
|
|
|
sys.exit(42)
|