添加web页面自动化测试框架

This commit is contained in:
lily 2019-02-18 22:20:58 +08:00
parent f9009d5ea5
commit 80d50f0199
14 changed files with 1744 additions and 53 deletions

11
.idea/Automated-Test.iml Normal file
View File

@ -0,0 +1,11 @@
<?xml version="1.0" encoding="UTF-8"?>
<module type="PYTHON_MODULE" version="4">
<component name="NewModuleRootManager">
<content url="file://$MODULE_DIR$" />
<orderEntry type="jdk" jdkName="Python 3.6 (python)" jdkType="Python SDK" />
<orderEntry type="sourceFolder" forTests="false" />
</component>
<component name="TestRunnerService">
<option name="PROJECT_TEST_RUNNER" value="Unittests" />
</component>
</module>

7
.idea/misc.xml Normal file
View File

@ -0,0 +1,7 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="JavaScriptSettings">
<option name="languageLevel" value="ES6" />
</component>
<component name="ProjectRootManager" version="2" project-jdk-name="Python 3.6 (python)" project-jdk-type="Python SDK" />
</project>

8
.idea/modules.xml Normal file
View File

@ -0,0 +1,8 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="ProjectModuleManager">
<modules>
<module fileurl="file://$PROJECT_DIR$/.idea/Automated-Test.iml" filepath="$PROJECT_DIR$/.idea/Automated-Test.iml" />
</modules>
</component>
</project>

6
.idea/vcs.xml Normal file
View File

@ -0,0 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="VcsDirectoryMappings">
<mapping directory="$PROJECT_DIR$" vcs="Git" />
</component>
</project>

407
.idea/workspace.xml Normal file
View File

@ -0,0 +1,407 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="ChangeListManager">
<list default="true" id="09a4aeb4-cd0a-454b-9878-fc6edee9400a" name="Default" comment="">
<change afterPath="$PROJECT_DIR$/HTMLTestRunner.py" afterDir="false" />
<change afterPath="$PROJECT_DIR$/__init__.py" afterDir="false" />
<change afterPath="$PROJECT_DIR$/common/__init__.py" afterDir="false" />
<change afterPath="$PROJECT_DIR$/run.py" afterDir="false" />
<change afterPath="$PROJECT_DIR$/testcase/__init__.py" afterDir="false" />
<change afterPath="$PROJECT_DIR$/testcase/test.py" afterDir="false" />
<change beforePath="$PROJECT_DIR$/README.md" beforeDir="false" afterPath="$PROJECT_DIR$/README.md" afterDir="false" />
<change beforePath="$PROJECT_DIR$/movie.py" beforeDir="false" />
</list>
<option name="EXCLUDED_CONVERTED_TO_IGNORED" value="true" />
<option name="TRACKING_ENABLED" value="true" />
<option name="SHOW_DIALOG" value="false" />
<option name="HIGHLIGHT_CONFLICTS" value="true" />
<option name="HIGHLIGHT_NON_ACTIVE_CHANGELIST" value="false" />
<option name="LAST_RESOLUTION" value="IGNORE" />
</component>
<component name="CoverageDataManager">
<SUITE FILE_PATH="coverage/Automated_Test$Unittests_for_t_MyTestCase.coverage" NAME="Unittests for t.MyTestCase Coverage Results" MODIFIED="1550495220265" SOURCE_PROVIDER="com.intellij.coverage.DefaultCoverageFileProvider" RUNNER="coverage.py" COVERAGE_BY_TEST_ENABLED="true" COVERAGE_TRACING_ENABLED="false" WORKING_DIRECTORY="$PROJECT_DIR$" />
<SUITE FILE_PATH="coverage/Automated_Test$Unnamed.coverage" NAME="Unnamed Coverage Results" MODIFIED="1550499537590" SOURCE_PROVIDER="com.intellij.coverage.DefaultCoverageFileProvider" RUNNER="coverage.py" COVERAGE_BY_TEST_ENABLED="true" COVERAGE_TRACING_ENABLED="false" WORKING_DIRECTORY="$PROJECT_DIR$/testcase" />
<SUITE FILE_PATH="coverage/Automated_Test$Unittests_in_test_py.coverage" NAME="Unittests in test.py Coverage Results" MODIFIED="1550499024214" SOURCE_PROVIDER="com.intellij.coverage.DefaultCoverageFileProvider" RUNNER="coverage.py" COVERAGE_BY_TEST_ENABLED="true" COVERAGE_TRACING_ENABLED="false" WORKING_DIRECTORY="$PROJECT_DIR$/testcase" />
<SUITE FILE_PATH="coverage/Automated_Test$run__1_.coverage" NAME="run (1) Coverage Results" MODIFIED="1550497417209" SOURCE_PROVIDER="com.intellij.coverage.DefaultCoverageFileProvider" RUNNER="coverage.py" COVERAGE_BY_TEST_ENABLED="true" COVERAGE_TRACING_ENABLED="false" WORKING_DIRECTORY="$PROJECT_DIR$" />
<SUITE FILE_PATH="coverage/Automated_Test$Unittests_for_test_TestDemo.coverage" NAME="Unittests for test.TestDemo Coverage Results" MODIFIED="1550493736228" SOURCE_PROVIDER="com.intellij.coverage.DefaultCoverageFileProvider" RUNNER="coverage.py" COVERAGE_BY_TEST_ENABLED="true" COVERAGE_TRACING_ENABLED="false" WORKING_DIRECTORY="$PROJECT_DIR$" />
<SUITE FILE_PATH="coverage/Automated_Test$run.coverage" NAME="run Coverage Results" MODIFIED="1550497543321" SOURCE_PROVIDER="com.intellij.coverage.DefaultCoverageFileProvider" RUNNER="coverage.py" COVERAGE_BY_TEST_ENABLED="true" COVERAGE_TRACING_ENABLED="false" WORKING_DIRECTORY="$PROJECT_DIR$" />
<SUITE FILE_PATH="coverage/Automated_Test$Unittests_in_t_py.coverage" NAME="Unittests in t.py Coverage Results" MODIFIED="1550495271315" SOURCE_PROVIDER="com.intellij.coverage.DefaultCoverageFileProvider" RUNNER="coverage.py" COVERAGE_BY_TEST_ENABLED="true" COVERAGE_TRACING_ENABLED="false" WORKING_DIRECTORY="$PROJECT_DIR$" />
</component>
<component name="FileEditorManager">
<leaf>
<file leaf-file-name="test.py" pinned="false" current-in-tab="true">
<entry file="file://$PROJECT_DIR$/testcase/test.py">
<provider selected="true" editor-type-id="text-editor">
<state relative-caret-position="114">
<caret line="6" lean-forward="true" selection-start-line="6" selection-end-line="6" />
<folding>
<element signature="e#0#41#0" expanded="true" />
</folding>
</state>
</provider>
</entry>
</file>
</leaf>
</component>
<component name="FileTemplateManagerImpl">
<option name="RECENT_TEMPLATES">
<list>
<option value="Python Script" />
</list>
</option>
</component>
<component name="FindInProjectRecents">
<findStrings>
<find>HTMLTestRunner</find>
</findStrings>
</component>
<component name="Git.Settings">
<option name="RECENT_GIT_ROOT_PATH" value="$PROJECT_DIR$" />
</component>
<component name="IdeDocumentHistory">
<option name="CHANGED_PATHS">
<list>
<option value="$PROJECT_DIR$/test.py" />
<option value="$PROJECT_DIR$/t.py" />
<option value="$PROJECT_DIR$/README.md" />
<option value="$PROJECT_DIR$/test_case/test.py" />
<option value="$PROJECT_DIR$/test_case/t.py" />
<option value="$PROJECT_DIR$/common/__init__.py" />
<option value="$PROJECT_DIR$/__init__.py" />
<option value="$PROJECT_DIR$/run.py" />
<option value="$PROJECT_DIR$/testcase/test.py" />
<option value="$PROJECT_DIR$/HTMLTestRunner.py" />
</list>
</option>
</component>
<component name="JsBuildToolGruntFileManager" detection-done="true" sorting="DEFINITION_ORDER" />
<component name="JsBuildToolPackageJson" detection-done="true" sorting="DEFINITION_ORDER" />
<component name="JsGulpfileManager">
<detection-done>true</detection-done>
<sorting>DEFINITION_ORDER</sorting>
</component>
<component name="NodePackageJsonFileManager">
<packageJsonPaths />
</component>
<component name="ProjectFrameBounds" extendedState="6">
<option name="x" value="948" />
<option name="y" value="27" />
<option name="width" value="981" />
<option name="height" value="1092" />
</component>
<component name="ProjectView">
<navigator proportions="" version="1">
<foldersAlwaysOnTop value="true" />
</navigator>
<panes>
<pane id="Scope" />
<pane id="ProjectPane">
<subPane>
<expand>
<path>
<item name="Automated-Test" type="b2602c69:ProjectViewProjectNode" />
<item name="Automated-Test" type="462c0819:PsiDirectoryNode" />
</path>
<path>
<item name="Automated-Test" type="b2602c69:ProjectViewProjectNode" />
<item name="Automated-Test" type="462c0819:PsiDirectoryNode" />
<item name="report" type="462c0819:PsiDirectoryNode" />
</path>
<path>
<item name="Automated-Test" type="b2602c69:ProjectViewProjectNode" />
<item name="Automated-Test" type="462c0819:PsiDirectoryNode" />
<item name="testcase" type="462c0819:PsiDirectoryNode" />
</path>
</expand>
<select />
</subPane>
</pane>
</panes>
</component>
<component name="PropertiesComponent">
<property name="WebServerToolWindowFactoryState" value="false" />
<property name="last_opened_file_path" value="$PROJECT_DIR$/testcase/test.py" />
<property name="nodejs_interpreter_path.stuck_in_default_project" value="undefined stuck path" />
<property name="nodejs_npm_path_reset_for_default_project" value="true" />
<property name="settings.editor.selected.configurable" value="com.jetbrains.python.configuration.PyActiveSdkModuleConfigurable" />
</component>
<component name="RecentsManager">
<key name="MoveFile.RECENT_KEYS">
<recent name="D:\githublily\Automated-Test" />
<recent name="D:\githublily\Automated-Test\test_case" />
<recent name="D:\githublily\Automated-Test\reports" />
</key>
<key name="CopyFile.RECENT_KEYS">
<recent name="D:\githublily\Automated-Test" />
</key>
</component>
<component name="RunDashboard">
<option name="ruleStates">
<list>
<RuleState>
<option name="name" value="ConfigurationTypeDashboardGroupingRule" />
</RuleState>
<RuleState>
<option name="name" value="StatusDashboardGroupingRule" />
</RuleState>
</list>
</option>
</component>
<component name="RunManager" selected="Python.Unnamed">
<configuration name="Unnamed" type="PythonConfigurationType" factoryName="Python">
<module name="Automated-Test" />
<option name="INTERPRETER_OPTIONS" value="" />
<option name="PARENT_ENVS" value="true" />
<envs>
<env name="PYTHONUNBUFFERED" value="1" />
</envs>
<option name="SDK_HOME" value="D:\tmp\test\venv\Scripts\python3.exe" />
<option name="WORKING_DIRECTORY" value="$PROJECT_DIR$/testcase" />
<option name="IS_MODULE_SDK" value="false" />
<option name="ADD_CONTENT_ROOTS" value="true" />
<option name="ADD_SOURCE_ROOTS" value="true" />
<EXTENSION ID="PythonCoverageRunConfigurationExtension" runner="coverage.py" />
<option name="SCRIPT_NAME" value="$PROJECT_DIR$/testcase/test.py" />
<option name="PARAMETERS" value="" />
<option name="SHOW_COMMAND_LINE" value="false" />
<option name="EMULATE_TERMINAL" value="false" />
<option name="MODULE_MODE" value="false" />
</configuration>
<configuration default="true" type="PythonConfigurationType" factoryName="Python">
<module name="Automated-Test" />
<option name="INTERPRETER_OPTIONS" value="" />
<option name="PARENT_ENVS" value="true" />
<envs>
<env name="PYTHONUNBUFFERED" value="1" />
</envs>
<option name="SDK_HOME" value="D:\tmp\test\venv\Scripts\python3.exe" />
<option name="WORKING_DIRECTORY" value="" />
<option name="IS_MODULE_SDK" value="false" />
<option name="ADD_CONTENT_ROOTS" value="true" />
<option name="ADD_SOURCE_ROOTS" value="true" />
<EXTENSION ID="PythonCoverageRunConfigurationExtension" runner="coverage.py" />
<option name="SCRIPT_NAME" value="" />
<option name="PARAMETERS" value="" />
<option name="SHOW_COMMAND_LINE" value="false" />
<option name="EMULATE_TERMINAL" value="false" />
<option name="MODULE_MODE" value="false" />
</configuration>
<configuration name="run" type="PythonConfigurationType" factoryName="Python" temporary="true">
<module name="Automated-Test" />
<option name="INTERPRETER_OPTIONS" value="" />
<option name="PARENT_ENVS" value="true" />
<envs>
<env name="PYTHONUNBUFFERED" value="1" />
</envs>
<option name="SDK_HOME" value="D:\tmp\test\venv\Scripts\python3.exe" />
<option name="WORKING_DIRECTORY" value="$PROJECT_DIR$" />
<option name="IS_MODULE_SDK" value="true" />
<option name="ADD_CONTENT_ROOTS" value="true" />
<option name="ADD_SOURCE_ROOTS" value="true" />
<EXTENSION ID="PythonCoverageRunConfigurationExtension" runner="coverage.py" />
<option name="SCRIPT_NAME" value="$PROJECT_DIR$/run.py" />
<option name="PARAMETERS" value="" />
<option name="SHOW_COMMAND_LINE" value="false" />
<option name="EMULATE_TERMINAL" value="false" />
<option name="MODULE_MODE" value="false" />
</configuration>
<list>
<item itemvalue="Python.Unnamed" />
<item itemvalue="Python.run" />
</list>
<recent_temporary>
<list>
<item itemvalue="Python.run" />
</list>
</recent_temporary>
</component>
<component name="SvnConfiguration">
<configuration />
</component>
<component name="TaskManager">
<task active="true" id="Default" summary="Default task">
<changelist id="09a4aeb4-cd0a-454b-9878-fc6edee9400a" name="Default" comment="" />
<created>1550480668294</created>
<option name="number" value="Default" />
<option name="presentableId" value="Default" />
<updated>1550480668294</updated>
</task>
<servers />
</component>
<component name="TestHistory">
<history-entry file="Unittests_in_test_py - 2019.02.18 at 20h 47m 35s.xml">
<configuration name="Unittests in test.py" configurationId="tests" />
</history-entry>
<history-entry file="Unittests_in_test_py - 2019.02.18 at 20h 47m 59s.xml">
<configuration name="Unittests in test.py" configurationId="tests" />
</history-entry>
<history-entry file="Unittests_in_test_py - 2019.02.18 at 20h 50m 08s.xml">
<configuration name="Unittests in test.py" configurationId="tests" />
</history-entry>
<history-entry file="Unittests_in_test_py - 2019.02.18 at 20h 50m 29s.xml">
<configuration name="Unittests in test.py" configurationId="tests" />
</history-entry>
<history-entry file="Unittests_in_t_py - 2019.02.18 at 20h 59m 20s.xml">
<configuration name="Unittests in t.py" configurationId="tests" />
</history-entry>
<history-entry file="Unittests_in_t_py - 2019.02.18 at 20h 59m 40s.xml">
<configuration name="Unittests in t.py" configurationId="tests" />
</history-entry>
<history-entry file="Unittests_for_t_MyTestCase - 2019.02.18 at 21h 07m 17s.xml">
<configuration name="Unittests for t.MyTestCase" configurationId="tests" />
</history-entry>
<history-entry file="Unittests_in_t_py - 2019.02.18 at 21h 08m 08s.xml">
<configuration name="Unittests in t.py" configurationId="tests" />
</history-entry>
<history-entry file="Unittests_in_test_py - 2019.02.18 at 22h 09m 47s.xml">
<configuration name="Unittests in test.py" configurationId="tests" />
</history-entry>
<history-entry file="Unittests_in_test_py - 2019.02.18 at 22h 10m 27s.xml">
<configuration name="Unittests in test.py" configurationId="tests" />
</history-entry>
</component>
<component name="ToolWindowManager">
<frame x="-7" y="-7" width="1295" height="735" extended-state="6" />
<editor active="true" />
<layout>
<window_info anchor="bottom" id="TODO" order="6" />
<window_info anchor="bottom" id="Event Log" side_tool="true" weight="0.3292683" />
<window_info anchor="bottom" id="Database Changes" show_stripe_button="false" />
<window_info anchor="bottom" id="Run" order="2" visible="true" weight="0.3292683" />
<window_info anchor="bottom" id="Version Control" />
<window_info anchor="bottom" id="Python Console" />
<window_info anchor="bottom" id="Terminal" />
<window_info active="true" content_ui="combo" id="Project" order="0" visible="true" weight="0.2497979" />
<window_info anchor="bottom" id="Docker" show_stripe_button="false" />
<window_info anchor="right" id="Database" />
<window_info anchor="bottom" id="Find" order="1" weight="0.3292683" />
<window_info anchor="right" id="SciView" />
<window_info id="Structure" order="1" side_tool="true" weight="0.25" />
<window_info anchor="bottom" id="Debug" order="3" weight="0.3989547" />
<window_info id="Favorites" side_tool="true" />
<window_info anchor="right" id="Commander" internal_type="SLIDING" order="0" type="SLIDING" weight="0.4" />
<window_info anchor="right" id="Ant Build" order="1" weight="0.25" />
<window_info anchor="bottom" id="Inspection" order="5" weight="0.4" />
<window_info anchor="right" content_ui="combo" id="Hierarchy" order="2" weight="0.25" />
<window_info anchor="bottom" id="Message" order="0" />
<window_info anchor="bottom" id="Cvs" order="4" weight="0.25" />
</layout>
</component>
<component name="TypeScriptGeneratedFilesManager">
<option name="version" value="1" />
</component>
<component name="VcsContentAnnotationSettings">
<option name="myLimit" value="2678400000" />
</component>
<component name="editorHistoryManager">
<entry file="file://$PROJECT_DIR$/movie.py">
<provider selected="true" editor-type-id="text-editor">
<state relative-caret-position="-516" />
</provider>
</entry>
<entry file="file://$PROJECT_DIR$/reports/HTMLTestRunner.py">
<provider selected="true" editor-type-id="text-editor">
<state relative-caret-position="224">
<caret line="601" column="83" selection-start-line="601" selection-start-column="83" selection-end-line="601" selection-end-column="83" />
</state>
</provider>
</entry>
<entry file="file://D:/software/Python/python2.7.10/Lib/unittest/loader.py">
<provider selected="true" editor-type-id="text-editor">
<state relative-caret-position="119">
<caret line="31" selection-start-line="31" selection-end-line="31" />
</state>
</provider>
</entry>
<entry file="file://$PROJECT_DIR$/report/result.html">
<provider selected="true" editor-type-id="text-editor" />
</entry>
<entry file="file://$PROJECT_DIR$/test_case/test.py">
<provider selected="true" editor-type-id="text-editor">
<state relative-caret-position="144">
<caret line="28" column="40" lean-forward="true" selection-start-line="28" selection-start-column="40" selection-end-line="28" selection-end-column="40" />
<folding>
<element signature="e#15#56#0" expanded="true" />
</folding>
</state>
</provider>
</entry>
<entry file="file://$PROJECT_DIR$/test_case/t.py">
<provider selected="true" editor-type-id="text-editor">
<state relative-caret-position="133">
<caret line="7" lean-forward="true" selection-start-line="7" selection-end-line="7" />
<folding>
<element signature="e#16#31#0" expanded="true" />
</folding>
</state>
</provider>
</entry>
<entry file="file://$PROJECT_DIR$/README.md">
<provider selected="true" editor-type-id="split-provider[text-editor;markdown-preview-editor]">
<state split_layout="SPLIT">
<first_editor>
<caret column="15" lean-forward="true" selection-start-column="15" selection-end-column="15" />
</first_editor>
<second_editor />
</state>
</provider>
</entry>
<entry file="file://$PROJECT_DIR$/common/__init__.py">
<provider selected="true" editor-type-id="text-editor" />
</entry>
<entry file="file://$PROJECT_DIR$/__init__.py">
<provider selected="true" editor-type-id="text-editor" />
</entry>
<entry file="file://$PROJECT_DIR$/run.py">
<provider selected="true" editor-type-id="text-editor">
<state relative-caret-position="285">
<caret line="15" column="27" lean-forward="true" selection-start-line="15" selection-start-column="27" selection-end-line="15" selection-end-column="27" />
<folding>
<element signature="e#15#36#0" expanded="true" />
</folding>
</state>
</provider>
</entry>
<entry file="file://$PROJECT_DIR$/report/2019-02-18-22-15-31result.html">
<provider selected="true" editor-type-id="text-editor">
<state relative-caret-position="38">
<caret line="2" column="43" lean-forward="true" selection-start-line="2" selection-start-column="43" selection-end-line="2" selection-end-column="43" />
</state>
</provider>
</entry>
<entry file="file://$PROJECT_DIR$/report/2019-02-18-22-16-23result.html">
<provider selected="true" editor-type-id="text-editor">
<state relative-caret-position="133">
<caret line="7" column="4" lean-forward="true" selection-start-line="7" selection-start-column="4" selection-end-line="7" selection-end-column="4" />
</state>
</provider>
</entry>
<entry file="file://$PROJECT_DIR$/HTMLTestRunner.py">
<provider selected="true" editor-type-id="text-editor">
<state relative-caret-position="848">
<caret line="866" column="21" selection-start-line="866" selection-start-column="21" selection-end-line="866" selection-end-column="21" />
<folding>
<element signature="e#3209#3224#0" expanded="true" />
</folding>
</state>
</provider>
</entry>
<entry file="file://$PROJECT_DIR$/report/2019-02-18-22-18-57result.html">
<provider selected="true" editor-type-id="text-editor">
<state relative-caret-position="57">
<caret line="3" column="6" lean-forward="true" selection-start-line="3" selection-start-column="6" selection-end-line="3" selection-end-column="6" />
</state>
</provider>
</entry>
<entry file="file://$PROJECT_DIR$/testcase/test.py">
<provider selected="true" editor-type-id="text-editor">
<state relative-caret-position="114">
<caret line="6" lean-forward="true" selection-start-line="6" selection-end-line="6" />
<folding>
<element signature="e#0#41#0" expanded="true" />
</folding>
</state>
</provider>
</entry>
</component>
</project>

867
HTMLTestRunner.py Normal file
View File

@ -0,0 +1,867 @@
"""
A TestRunner for use with the Python unit testing framework. It
generates a HTML report to show the result at a glance.
The simplest way to use this is to invoke its main method. E.g.
import unittest
import HTMLTestRunner
... define your tests ...
if __name__ == '__main__':
HTMLTestRunner.main()
For more customization options, instantiates a HTMLTestRunner object.
HTMLTestRunner is a counterpart to unittest's TextTestRunner. E.g.
# output to a file
fp = file('my_report.html', 'wb')
runner = HTMLTestRunner.HTMLTestRunner(
stream=fp,
title='My unit test',
description='This demonstrates the report output by HTMLTestRunner.'
)
# Use an external stylesheet.
# See the Template_mixin class for more customizable options
runner.STYLESHEET_TMPL = '<link rel="stylesheet" href="my_stylesheet.css" type="text/css">'
# run the test
runner.run(my_test_suite)
------------------------------------------------------------------------
Copyright (c) 2004-2007, Wai Yip Tung
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
* Neither the name Wai Yip Tung nor the names of its contributors may be
used to endorse or promote products derived from this software without
specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER
OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
# URL: http://tungwaiyip.info/software/HTMLTestRunner.html
__author__ = "Wai Yip Tung , bugmaster"
__version__ = "0.8.2"
"""
Change History
Version 0.8.2
* Show output inline instead of popup window (Viorel Lupu).
Version in 0.8.1
* Validated XHTML (Wolfgang Borgert).
* Added description of test classes and test cases.
Version in 0.8.0
* Define Template_mixin class for customization.
* Workaround a IE 6 bug that it does not treat <script> block as CDATA.
Version in 0.7.1
* Back port to Python 2.3 (Frank Horowitz).
* Fix missing scroll bars in detail log (Podi).
"""
# TODO: color stderr
# TODO: simplify javascript using ,ore than 1 class in the class attribute?
import datetime
import io
import sys
import time
import unittest
from xml.sax import saxutils
# ------------------------------------------------------------------------
# The redirectors below are used to capture output during testing. Output
# sent to sys.stdout and sys.stderr are automatically captured. However
# in some cases sys.stdout is already cached before HTMLTestRunner is
# invoked (e.g. calling logging.basicConfig). In order to capture those
# output, use the redirectors for the cached stream.
#
# e.g.
# >>> logging.basicConfig(stream=HTMLTestRunner.stdout_redirector)
# >>>
class OutputRedirector(object):
""" Wrapper to redirect stdout or stderr """
def __init__(self, fp):
self.fp = fp
def write(self, s):
self.fp.write(s)
def writelines(self, lines):
self.fp.writelines(lines)
def flush(self):
self.fp.flush()
stdout_redirector = OutputRedirector(sys.stdout)
stderr_redirector = OutputRedirector(sys.stderr)
# ----------------------------------------------------------------------
# Template
class Template_mixin(object):
"""
Define a HTML template for report customerization and generation.
Overall structure of an HTML report
HTML
+------------------------+
|<html> |
| <head> |
| |
| STYLESHEET |
| +----------------+ |
| | | |
| +----------------+ |
| |
| </head> |
| |
| <body> |
| |
| HEADING |
| +----------------+ |
| | | |
| +----------------+ |
| |
| REPORT |
| +----------------+ |
| | | |
| +----------------+ |
| |
| ENDING |
| +----------------+ |
| | | |
| +----------------+ |
| |
| </body> |
|</html> |
+------------------------+
"""
STATUS = {
0: 'pass',
1: 'fail',
2: 'error',
}
DEFAULT_TITLE = 'Unit Test Report'
DEFAULT_DESCRIPTION = ''
# ------------------------------------------------------------------------
# HTML Template
HTML_TMPL = r"""<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
<html xmlns="http://www.w3.org/1999/xhtml">
<head>
<title>%(title)s</title>
<meta name="generator" content="%(generator)s"/>
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8"/>
<link rel="stylesheet" href="http://cdn.bootcss.com/bootstrap/3.3.0/css/bootstrap.min.css">
<script src="http://cdn.bootcss.com/bootstrap/3.3.0/js/bootstrap.min.js"></script>
<script src="http://apps.bdimg.com/libs/Chart.js/0.2.0/Chart.min.js"></script>
<!-- <link href="https://cdn.bootcss.com/echarts/3.8.5/echarts.common.min.js" rel="stylesheet"> -->
%(stylesheet)s
</head>
<body>
<script language="javascript" type="text/javascript"><!--
output_list = Array();
/* level - 0:Summary; 1:Failed; 2:All */
function showCase(level) {
trs = document.getElementsByTagName("tr");
for (var i = 0; i < trs.length; i++) {
tr = trs[i];
id = tr.id;
if (id.substr(0,2) == 'ft') {
if (level < 1) {
tr.className = 'hiddenRow';
}
else {
tr.className = '';
}
}
if (id.substr(0,2) == 'pt') {
if (level > 1) {
tr.className = '';
}
else {
tr.className = 'hiddenRow';
}
}
}
}
function showClassDetail(cid, count) {
var id_list = Array(count);
var toHide = 1;
for (var i = 0; i < count; i++) {
tid0 = 't' + cid.substr(1) + '.' + (i+1);
tid = 'f' + tid0;
tr = document.getElementById(tid);
if (!tr) {
tid = 'p' + tid0;
tr = document.getElementById(tid);
}
id_list[i] = tid;
if (tr.className) {
toHide = 0;
}
}
for (var i = 0; i < count; i++) {
tid = id_list[i];
if (toHide) {
document.getElementById('div_'+tid).style.display = 'none'
document.getElementById(tid).className = 'hiddenRow';
}
else {
document.getElementById(tid).className = '';
}
}
}
function showTestDetail(div_id){
var details_div = document.getElementById(div_id)
var displayState = details_div.style.display
// alert(displayState)
if (displayState != 'block' ) {
displayState = 'block'
details_div.style.display = 'block'
}
else {
details_div.style.display = 'none'
}
}
function html_escape(s) {
s = s.replace(/&/g,'&amp;');
s = s.replace(/</g,'&lt;');
s = s.replace(/>/g,'&gt;');
return s;
}
/* obsoleted by detail in <div>
function showOutput(id, name) {
var w = window.open("", //url
name,
"resizable,scrollbars,status,width=800,height=450");
d = w.document;
d.write("<pre>");
d.write(html_escape(output_list[id]));
d.write("\n");
d.write("<a href='javascript:window.close()'>close</a>\n");
d.write("</pre>\n");
d.close();
}
*/
--></script>
%(heading)s
%(report)s
%(ending)s
%(chart_script)s
</body>
</html>
"""
# variables: (title, generator, stylesheet, heading, report, ending)
# ------------------------------------------------------------------------
# Stylesheet
#
# alternatively use a <link> for external style sheet, e.g.
# <link rel="stylesheet" href="$url" type="text/css">
STYLESHEET_TMPL = """
<style type="text/css" media="screen">
body { font-family: verdana, arial, helvetica, sans-serif; font-size: 80%; }
table { font-size: 100%; }
pre { }
/* -- heading ---------------------------------------------------------------------- */
h1 {
font-size: 16pt;
color: gray;
}
.heading {
margin-top: 0ex;
margin-bottom: 1ex;
margin-left: 10px;
}
.heading .attribute {
margin-top: 1ex;
margin-bottom: 0;
}
.heading .description {
margin-top: 4ex;
margin-bottom: 6ex;
}
/* -- css div popup ------------------------------------------------------------------------ */
a.popup_link {
}
a.popup_link:hover {
color: red;
}
.popup_window {
display: none;
position: relative;
left: 0px;
top: 0px;
/*border: solid #627173 1px; */
font-family: "Lucida Console", "Courier New", Courier, monospace;
text-align: left;
font-size: 8pt;
width: 500px;
}
}
/* -- report ------------------------------------------------------------------------ */
#show_detail_line {
margin-top: 3ex;
margin-bottom: 1ex;
margin-left: 10px;
}
#result_table {
width: 80%;
border-collapse: collapse;
border: 1px solid #777;
margin-left: 10px;
}
#header_row {
font-weight: bold;
color: #606060;
background-color: #f5f5f5;
border-top-width: 10px;
border-color: #d6e9c6;
font-size: 12px;
}
#result_table td {
border: 1px solid #f5f5f5;
padding: 2px;
}
#total_row { font-weight: bold; }
.passClass { background-color: #d6e9c6; }
.failClass { background-color: #faebcc; }
.errorClass { background-color: #ebccd1; }
.passCase { color: #6c6; }
.failCase { color: #c60; font-weight: bold; }
.errorCase { color: #c00; font-weight: bold; }
.hiddenRow { display: none; }
.testcase { margin-left: 2em; }
/* -- ending ---------------------------------------------------------------------- */
#ending {
}
/* -- chars ---------------------------------------------------------------------- */
.testChars {margin-left: 150px;}
.btn-info1 {
color: #fff;
background-color: #d6e9c6;
border-color: #d6e9c6;
}
.btn-info2 {
color: #fff;
background-color: #faebcc;
border-color: #faebcc;
}
.btn-info3 {
color: #fff;
background-color: #ebccd1;
border-color: #ebccd1;
}
</style>
"""
# ------------------------------------------------------------------------
# Heading
#
HEADING_TMPL = """<div class='heading'>
<h1>%(title)s</h1>
%(parameters)s
<p class='description'>%(description)s</p>
</div>
<div style="float:left; margin-left: 10px;">
<p> Test Case Pie charts </p>
<a class="btn btn-xs btn-info1">-Pass-</a><br>
<a class="btn btn-xs btn-info2">-Faild-</a><br>
<a class="btn btn-xs btn-info3">-Error-</a><br>
</div>
<div class="testChars">
<canvas id="myChart" width="250" height="250"></canvas>
</div>
""" # variables: (title, parameters, description)
# ------------------------------------------------------------------------
# Pie chart
#
ECHARTS_SCRIPT = """
<script type="text/javascript">
var data = [
{
value: %(error)s,
color: "#ebccd1",
label: "Error",
labelColor: 'white',
labelFontSize: '16'
},
{
value : %(fail)s,
color : "#faebcc",
label: "Fail",
labelColor: 'white',
labelFontSize: '16'
},
{
value : %(Pass)s,
color : "#d6e9c6",
label : "Pass",
labelColor: 'white',
labelFontSize: '16'
}
]
var newopts = {
animationSteps: 100,
animationEasing: 'easeInOutQuart',
}
//Get the context of the canvas element we want to select
var ctx = document.getElementById("myChart").getContext("2d");
var myNewChart = new Chart(ctx).Pie(data,newopts);
</script>
"""
HEADING_ATTRIBUTE_TMPL = """<p class='attribute'><strong>%(name)s:</strong> %(value)s</p>
""" # variables: (name, value)
# ------------------------------------------------------------------------
# Report
#
REPORT_TMPL = """
<p id='show_detail_line' style="margin-left: 10px;">Show
<a href='javascript:showCase(0)' class="btn btn-xs btn-primary">Summary</a>
<a href='javascript:showCase(1)' class="btn btn-xs btn-danger">Failed</a>
<a href='javascript:showCase(2)' class="btn btn-xs btn-info">All</a>
</p>
<table id='result_table'>
<colgroup>
<col align='left' />
<col align='right' />
<col align='right' />
<col align='right' />
<col align='right' />
<col align='right' />
</colgroup>
<tr id='header_row' class="panel-title">
<td>Test Group/Test case</td>
<td>Count</td>
<td>Pass</td>
<td>Fail</td>
<td>Error</td>
<td>View</td>
</tr>
%(test_list)s
<tr id='total_row'>
<td>Total</td>
<td>%(count)s</td>
<td class="text text-success">%(Pass)s</td>
<td class="text text-danger">%(fail)s</td>
<td class="text text-warning">%(error)s</td>
<td>&nbsp;</td>
</tr>
</table>
""" # variables: (test_list, count, Pass, fail, error)
REPORT_CLASS_TMPL = r"""
<tr class='%(style)s'>
<td>%(desc)s</td>
<td>%(count)s</td>
<td>%(Pass)s</td>
<td>%(fail)s</td>
<td>%(error)s</td>
<td><a href="javascript:showClassDetail('%(cid)s',%(count)s)">Detail</a></td>
</tr>
""" # variables: (style, desc, count, Pass, fail, error, cid)
REPORT_TEST_WITH_OUTPUT_TMPL = r"""
<tr id='%(tid)s' class='%(Class)s'>
<td class='%(style)s'><div class='testcase'>%(desc)s</div></td>
<td colspan='5' align='center'>
<!--css div popup start-->
<a class="popup_link" onfocus='this.blur();' href="javascript:showTestDetail('div_%(tid)s')" >
%(status)s</a>
<div id='div_%(tid)s' class="popup_window">
<div style='text-align: right; color:red;cursor:pointer'>
<a onfocus='this.blur();' onclick="document.getElementById('div_%(tid)s').style.display = 'none' " >
[x]</a>
</div>
<pre>
%(script)s
</pre>
</div>
<!--css div popup end-->
</td>
</tr>
""" # variables: (tid, Class, style, desc, status)
REPORT_TEST_NO_OUTPUT_TMPL = r"""
<tr id='%(tid)s' class='%(Class)s'>
<td class='%(style)s'><div class='testcase'>%(desc)s</div></td>
<td colspan='5' align='center'>%(status)s</td>
</tr>
""" # variables: (tid, Class, style, desc, status)
REPORT_TEST_OUTPUT_TMPL = r"""
%(id)s: %(output)s
""" # variables: (id, output)
# ------------------------------------------------------------------------
# ENDING
#
ENDING_TMPL = """<div id='ending'>&nbsp;</div>"""
# -------------------- The end of the Template class -------------------
TestResult = unittest.TestResult
class _TestResult(TestResult):
# note: _TestResult is a pure representation of results.
# It lacks the output and reporting ability compares to unittest._TextTestResult.
def __init__(self, verbosity=1):
TestResult.__init__(self)
self.stdout0 = None
self.stderr0 = None
self.success_count = 0
self.failure_count = 0
self.error_count = 0
self.verbosity = verbosity
# result is a list of result in 4 tuple
# (
# result code (0: success; 1: fail; 2: error),
# TestCase object,
# Test output (byte string),
# stack trace,
# )
self.result = []
def startTest(self, test):
TestResult.startTest(self, test)
# just one buffer for both stdout and stderr
self.outputBuffer = io.StringIO()
stdout_redirector.fp = self.outputBuffer
stderr_redirector.fp = self.outputBuffer
self.stdout0 = sys.stdout
self.stderr0 = sys.stderr
sys.stdout = stdout_redirector
sys.stderr = stderr_redirector
def complete_output(self):
"""
Disconnect output redirection and return buffer.
Safe to call multiple times.
"""
if self.stdout0:
sys.stdout = self.stdout0
sys.stderr = self.stderr0
self.stdout0 = None
self.stderr0 = None
return self.outputBuffer.getvalue()
def stopTest(self, test):
# Usually one of addSuccess, addError or addFailure would have been called.
# But there are some path in unittest that would bypass this.
# We must disconnect stdout in stopTest(), which is guaranteed to be called.
self.complete_output()
def addSuccess(self, test):
self.success_count += 1
TestResult.addSuccess(self, test)
output = self.complete_output()
self.result.append((0, test, output, ''))
if self.verbosity > 1:
sys.stderr.write('ok ')
sys.stderr.write(str(test))
sys.stderr.write('\n')
else:
sys.stderr.write('.'+str(self.success_count))
def addError(self, test, err):
self.error_count += 1
TestResult.addError(self, test, err)
_, _exc_str = self.errors[-1]
output = self.complete_output()
self.result.append((2, test, output, _exc_str))
if self.verbosity > 1:
sys.stderr.write('E ')
sys.stderr.write(str(test))
sys.stderr.write('\n')
else:
sys.stderr.write('E')
def addFailure(self, test, err):
self.failure_count += 1
TestResult.addFailure(self, test, err)
_, _exc_str = self.failures[-1]
output = self.complete_output()
self.result.append((1, test, output, _exc_str))
if self.verbosity > 1:
sys.stderr.write('F ')
sys.stderr.write(str(test))
sys.stderr.write('\n')
else:
sys.stderr.write('F')
class HTMLTestRunner(Template_mixin):
"""
"""
def __init__(self, stream=sys.stdout, verbosity=1, title=None, description=None):
self.stream = stream
self.verbosity = verbosity
if title is None:
self.title = self.DEFAULT_TITLE
else:
self.title = title
if description is None:
self.description = self.DEFAULT_DESCRIPTION
else:
self.description = description
self.startTime = datetime.datetime.now()
def run(self, test):
"Run the given test case or test suite."
result = _TestResult(self.verbosity)
test(result)
self.stopTime = datetime.datetime.now()
self.generateReport(test, result)
#print(sys.stderr, '\nTime Elapsed: %s' % (self.stopTime-self.startTime))
return result
def sortResult(self, result_list):
# unittest does not seems to run in any particular order.
# Here at least we want to group them together by class.
rmap = {}
classes = []
for n,t,o,e in result_list:
cls = t.__class__
if not cls in rmap:
rmap[cls] = []
classes.append(cls)
rmap[cls].append((n,t,o,e))
r = [(cls, rmap[cls]) for cls in classes]
return r
def getReportAttributes(self, result):
"""
Return report attributes as a list of (name, value).
Override this to add custom attributes.
"""
startTime = str(self.startTime)[:19]
duration = str(self.stopTime - self.startTime)
status = []
if result.success_count: status.append('Pass %s' % result.success_count)
if result.failure_count: status.append('Failure %s' % result.failure_count)
if result.error_count: status.append('Error %s' % result.error_count )
if status:
status = ' '.join(status)
else:
status = 'none'
return [
('Start Time', startTime),
('Duration', duration),
('Status', status),
]
def generateReport(self, test, result):
report_attrs = self.getReportAttributes(result)
generator = 'HTMLTestRunner %s' % __version__
stylesheet = self._generate_stylesheet()
heading = self._generate_heading(report_attrs)
report = self._generate_report(result)
ending = self._generate_ending()
chart = self._generate_chart(result)
output = self.HTML_TMPL % dict(
title = saxutils.escape(self.title),
generator = generator,
stylesheet = stylesheet,
heading = heading,
report = report,
ending = ending,
chart_script = chart,
)
self.stream.write(output.encode('utf8'))
def _generate_stylesheet(self):
return self.STYLESHEET_TMPL
def _generate_heading(self, report_attrs):
a_lines = []
for name, value in report_attrs:
line = self.HEADING_ATTRIBUTE_TMPL % dict(
name = saxutils.escape(name),
value = saxutils.escape(value),
)
a_lines.append(line)
heading = self.HEADING_TMPL % dict(
title = saxutils.escape(self.title),
parameters = ''.join(a_lines),
description = saxutils.escape(self.description),
)
return heading
def _generate_report(self, result):
rows = []
sortedResult = self.sortResult(result.result)
for cid, (cls, cls_results) in enumerate(sortedResult):
# subtotal for a class
np = nf = ne = 0
for n,t,o,e in cls_results:
if n == 0: np += 1
elif n == 1: nf += 1
else: ne += 1
# format class description
if cls.__module__ == "__main__":
name = cls.__name__
else:
name = "%s.%s" % (cls.__module__, cls.__name__)
doc = cls.__doc__ or ""
desc = doc and '%s: %s' % (name, doc) or name
row = self.REPORT_CLASS_TMPL % dict(
style = ne > 0 and 'errorClass' or nf > 0 and 'failClass' or 'passClass',
desc = desc,
count = np+nf+ne,
Pass = np,
fail = nf,
error = ne,
cid = 'c%s' % (cid+1),
)
rows.append(row)
for tid, (n,t,o,e) in enumerate(cls_results):
self._generate_report_test(rows, cid, tid, n, t, o, e)
report = self.REPORT_TMPL % dict(
test_list = ''.join(rows),
count = str(result.success_count+result.failure_count+result.error_count),
Pass = str(result.success_count),
fail = str(result.failure_count),
error = str(result.error_count),
)
return report
def _generate_chart(self, result):
chart = self.ECHARTS_SCRIPT % dict(
Pass=str(result.success_count),
fail=str(result.failure_count),
error=str(result.error_count),
)
return chart
def _generate_report_test(self, rows, cid, tid, n, t, o, e):
# e.g. 'pt1.1', 'ft1.1', etc
has_output = bool(o or e)
tid = (n == 0 and 'p' or 'f') + 't%s.%s' % (cid+1,tid+1)
name = t.id().split('.')[-1]
doc = t.shortDescription() or ""
desc = doc and ('%s: %s' % (name, doc)) or name
tmpl = has_output and self.REPORT_TEST_WITH_OUTPUT_TMPL or self.REPORT_TEST_NO_OUTPUT_TMPL
# o and e should be byte string because they are collected from stdout and stderr?
if isinstance(o,str):
# TODO: some problem with 'string_escape': it escape \n and mess up formating
# uo = unicode(o.encode('string_escape'))
uo = o
else:
uo = o
if isinstance(e,str):
# TODO: some problem with 'string_escape': it escape \n and mess up formating
# ue = unicode(e.encode('string_escape'))
ue = e
else:
ue = e
script = self.REPORT_TEST_OUTPUT_TMPL % dict(
id = tid,
output = saxutils.escape(uo+ue),
)
row = tmpl % dict(
tid = tid,
Class = (n == 0 and 'hiddenRow' or 'none'),
style = n == 2 and 'errorCase' or (n == 1 and 'failCase' or 'none'),
desc = desc,
script = script,
status = self.STATUS[n],
)
rows.append(row)
if not has_output:
return
def _generate_ending(self):
return self.ENDING_TMPL
##############################################################################
# Facilities for running tests from the command line
##############################################################################
# Note: Reuse unittest.TestProgram to launch test. In the future we may
# build our own launcher to support more specific command line
# parameters like test title, CSS, etc.
class TestProgram(unittest.TestProgram):
"""
A variation of the unittest.TestProgram. Please refer to the base
class for command line parameters.
"""
def runTests(self):
# Pick HTMLTestRunner as the default test runner.
# base class's testRunner parameter is not useful because it means
# we have to instantiate HTMLTestRunner before we know self.verbosity.
if self.testRunner is None:
self.testRunner = HTMLTestRunner(verbosity=self.verbosity)
unittest.TestProgram.runTests(self)
main = TestProgram
##############################################################################
# Executing this module from the command line
##############################################################################
if __name__ == "__main__":
main(module=None)

View File

@ -1,2 +1,21 @@
# Automated-Test
Automated Test
页面自动化测试框架
测试框架:
python3+Selenium+unittest自动化UI测试框架
selenium+unittest搭建的WebUI自动化测试框架
环境部署:
python3
selenium3

0
__init__.py Normal file
View File

0
common/__init__.py Normal file
View File

View File

@ -1,52 +0,0 @@
# coding = utf-8
import requests
from requests.exceptions import RequestException
import re
import json
from multiprocessing import Pool
def get_one_page(url):
try:
response = requests.get(url)
if response.status_code == 200:
return response.text
return None
except RequestException:
return None
def parse_one_page(html):
# 正则匹配
pattern = re.compile('<li>.*?<em.*?>(\d+)</em>.*?src="(.*?)".*?title">(.*?)</span>.*?v:average">(.*?)</span>.*?inq">(.*?)</span>.*?</li>', re.S)
items = re.findall(pattern,html)
for item in items:
yield {
'index': item[0],
'image': item[1],
'title': item[2],
'score': item[3],
'inq': item[4]
}
def write_to_file(content):
with open('movie.txt', 'a', encoding='utf-8') as f:
jsondumps=json.dumps(content,ensure_ascii=False)
f.write(jsondumps+'\n')
f.close()
def main(start):
url = 'https://movie.douban.com/top250?start='+str(start)+'&filter='
html = get_one_page(url)
for item in parse_one_page(html):
print(item)
write_to_file(item)
if __name__ == '__main__':
pool = Pool()
pool.map(main, [i * 25 for i in range(10)])

View File

@ -0,0 +1,344 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
<html xmlns="http://www.w3.org/1999/xhtml">
<head>
<title>自动化测试</title>
<meta name="generator" content="HTMLTestRunner 0.8.2"/>
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8"/>
<link rel="stylesheet" href="http://cdn.bootcss.com/bootstrap/3.3.0/css/bootstrap.min.css">
<script src="http://cdn.bootcss.com/bootstrap/3.3.0/js/bootstrap.min.js"></script>
<script src="http://apps.bdimg.com/libs/Chart.js/0.2.0/Chart.min.js"></script>
<!-- <link href="https://cdn.bootcss.com/echarts/3.8.5/echarts.common.min.js" rel="stylesheet"> -->
<style type="text/css" media="screen">
body { font-family: verdana, arial, helvetica, sans-serif; font-size: 80%; }
table { font-size: 100%; }
pre { }
/* -- heading ---------------------------------------------------------------------- */
h1 {
font-size: 16pt;
color: gray;
}
.heading {
margin-top: 0ex;
margin-bottom: 1ex;
margin-left: 10px;
}
.heading .attribute {
margin-top: 1ex;
margin-bottom: 0;
}
.heading .description {
margin-top: 4ex;
margin-bottom: 6ex;
}
/* -- css div popup ------------------------------------------------------------------------ */
a.popup_link {
}
a.popup_link:hover {
color: red;
}
.popup_window {
display: none;
position: relative;
left: 0px;
top: 0px;
/*border: solid #627173 1px; */
font-family: "Lucida Console", "Courier New", Courier, monospace;
text-align: left;
font-size: 8pt;
width: 500px;
}
}
/* -- report ------------------------------------------------------------------------ */
#show_detail_line {
margin-top: 3ex;
margin-bottom: 1ex;
margin-left: 10px;
}
#result_table {
width: 80%;
border-collapse: collapse;
border: 1px solid #777;
margin-left: 10px;
}
#header_row {
font-weight: bold;
color: #606060;
background-color: #f5f5f5;
border-top-width: 10px;
border-color: #d6e9c6;
font-size: 12px;
}
#result_table td {
border: 1px solid #f5f5f5;
padding: 2px;
}
#total_row { font-weight: bold; }
.passClass { background-color: #d6e9c6; }
.failClass { background-color: #faebcc; }
.errorClass { background-color: #ebccd1; }
.passCase { color: #6c6; }
.failCase { color: #c60; font-weight: bold; }
.errorCase { color: #c00; font-weight: bold; }
.hiddenRow { display: none; }
.testcase { margin-left: 2em; }
/* -- ending ---------------------------------------------------------------------- */
#ending {
}
/* -- chars ---------------------------------------------------------------------- */
.testChars {margin-left: 150px;}
.btn-info1 {
color: #fff;
background-color: #d6e9c6;
border-color: #d6e9c6;
}
.btn-info2 {
color: #fff;
background-color: #faebcc;
border-color: #faebcc;
}
.btn-info3 {
color: #fff;
background-color: #ebccd1;
border-color: #ebccd1;
}
</style>
</head>
<body>
<script language="javascript" type="text/javascript"><!--
output_list = Array();
/* level - 0:Summary; 1:Failed; 2:All */
function showCase(level) {
trs = document.getElementsByTagName("tr");
for (var i = 0; i < trs.length; i++) {
tr = trs[i];
id = tr.id;
if (id.substr(0,2) == 'ft') {
if (level < 1) {
tr.className = 'hiddenRow';
}
else {
tr.className = '';
}
}
if (id.substr(0,2) == 'pt') {
if (level > 1) {
tr.className = '';
}
else {
tr.className = 'hiddenRow';
}
}
}
}
function showClassDetail(cid, count) {
var id_list = Array(count);
var toHide = 1;
for (var i = 0; i < count; i++) {
tid0 = 't' + cid.substr(1) + '.' + (i+1);
tid = 'f' + tid0;
tr = document.getElementById(tid);
if (!tr) {
tid = 'p' + tid0;
tr = document.getElementById(tid);
}
id_list[i] = tid;
if (tr.className) {
toHide = 0;
}
}
for (var i = 0; i < count; i++) {
tid = id_list[i];
if (toHide) {
document.getElementById('div_'+tid).style.display = 'none'
document.getElementById(tid).className = 'hiddenRow';
}
else {
document.getElementById(tid).className = '';
}
}
}
function showTestDetail(div_id){
var details_div = document.getElementById(div_id)
var displayState = details_div.style.display
// alert(displayState)
if (displayState != 'block' ) {
displayState = 'block'
details_div.style.display = 'block'
}
else {
details_div.style.display = 'none'
}
}
function html_escape(s) {
s = s.replace(/&/g,'&amp;');
s = s.replace(/</g,'&lt;');
s = s.replace(/>/g,'&gt;');
return s;
}
/* obsoleted by detail in <div>
function showOutput(id, name) {
var w = window.open("", //url
name,
"resizable,scrollbars,status,width=800,height=450");
d = w.document;
d.write("<pre>");
d.write(html_escape(output_list[id]));
d.write("\n");
d.write("<a href='javascript:window.close()'>close</a>\n");
d.write("</pre>\n");
d.close();
}
*/
--></script>
<div class='heading'>
<h1>自动化测试</h1>
<p class='attribute'><strong>Start Time:</strong> 2019-02-18 22:18:57</p>
<p class='attribute'><strong>Duration:</strong> 0:00:00.001995</p>
<p class='attribute'><strong>Status:</strong> Error 2</p>
<p class='description'>测试</p>
</div>
<div style="float:left; margin-left: 10px;">
<p> Test Case Pie charts </p>
<a class="btn btn-xs btn-info1">-Pass-</a><br>
<a class="btn btn-xs btn-info2">-Faild-</a><br>
<a class="btn btn-xs btn-info3">-Error-</a><br>
</div>
<div class="testChars">
<canvas id="myChart" width="250" height="250"></canvas>
</div>
<p id='show_detail_line' style="margin-left: 10px;">Show
<a href='javascript:showCase(0)' class="btn btn-xs btn-primary">Summary</a>
<a href='javascript:showCase(1)' class="btn btn-xs btn-danger">Failed</a>
<a href='javascript:showCase(2)' class="btn btn-xs btn-info">All</a>
</p>
<table id='result_table'>
<colgroup>
<col align='left' />
<col align='right' />
<col align='right' />
<col align='right' />
<col align='right' />
<col align='right' />
</colgroup>
<tr id='header_row' class="panel-title">
<td>Test Group/Test case</td>
<td>Count</td>
<td>Pass</td>
<td>Fail</td>
<td>Error</td>
<td>View</td>
</tr>
<tr class='errorClass'>
<td>TestLogin</td>
<td>2</td>
<td>0</td>
<td>0</td>
<td>2</td>
<td><a href="javascript:showClassDetail('c1',2)">Detail</a></td>
</tr>
<tr id='ft1.1' class='none'>
<td class='errorCase'><div class='testcase'>test_search</div></td>
<td colspan='5' align='center'>
<!--css div popup start-->
<a class="popup_link" onfocus='this.blur();' href="javascript:showTestDetail('div_ft1.1')" >
error</a>
<div id='div_ft1.1' class="popup_window">
<div style='text-align: right; color:red;cursor:pointer'>
<a onfocus='this.blur();' onclick="document.getElementById('div_ft1.1').style.display = 'none' " >
[x]</a>
</div>
<pre>
ft1.1: Traceback (most recent call last):
File "D:/githublily/Automated-Test/testcase/test.py", line 11, in setUp
self.driver = webdriver.Chrome()
AttributeError: module 'selenium.webdriver' has no attribute 'Chrome'
</pre>
</div>
<!--css div popup end-->
</td>
</tr>
<tr id='ft1.2' class='none'>
<td class='errorCase'><div class='testcase'>test_search1</div></td>
<td colspan='5' align='center'>
<!--css div popup start-->
<a class="popup_link" onfocus='this.blur();' href="javascript:showTestDetail('div_ft1.2')" >
error</a>
<div id='div_ft1.2' class="popup_window">
<div style='text-align: right; color:red;cursor:pointer'>
<a onfocus='this.blur();' onclick="document.getElementById('div_ft1.2').style.display = 'none' " >
[x]</a>
</div>
<pre>
ft1.2: Traceback (most recent call last):
File "D:/githublily/Automated-Test/testcase/test.py", line 11, in setUp
self.driver = webdriver.Chrome()
AttributeError: module 'selenium.webdriver' has no attribute 'Chrome'
</pre>
</div>
<!--css div popup end-->
</td>
</tr>
<tr id='total_row'>
<td>Total</td>
<td>2</td>
<td class="text text-success">0</td>
<td class="text text-danger">0</td>
<td class="text text-warning">2</td>
<td>&nbsp;</td>
</tr>
</table>
<div id='ending'>&nbsp;</div>
<script type="text/javascript">
var data = [
{
value: 2,
color: "#ebccd1",
label: "Error",
labelColor: 'white',
labelFontSize: '16'
},
{
value : 0,
color : "#faebcc",
label: "Fail",
labelColor: 'white',
labelFontSize: '16'
},
{
value : 0,
color : "#d6e9c6",
label : "Pass",
labelColor: 'white',
labelFontSize: '16'
}
]
var newopts = {
animationSteps: 100,
animationEasing: 'easeInOutQuart',
}
//Get the context of the canvas element we want to select
var ctx = document.getElementById("myChart").getContext("2d");
var myNewChart = new Chart(ctx).Pie(data,newopts);
</script>
</body>
</html>

31
run.py Normal file
View File

@ -0,0 +1,31 @@
# coding=utf-8
import HTMLTestRunner
import unittest
import os,time
listaa = "./report/"
def createsuite():
testunit=unittest.TestSuite()
discover=unittest.defaultTestLoader.discover(listaa,pattern='test_*.py',top_level_dir=None)
for test_suite in discover:
for test_case in test_suite:
testunit.addTests(test_case)
print(testunit)
return testunit
now = time.strftime("%Y-%m-%d %H_%M_%S",time.localtime())
filename="./report/"+now+"_result.html"
fp=open(filename, 'wb')
runner=HTMLTestRunner.HTMLTestRunner(
stream=fp,
title=u'搜索功能测试报告',
description=u'用例执行情况:')
runner.run(createsuite())
fp.close()

0
testcase/__init__.py Normal file
View File

43
testcase/test.py Normal file
View File

@ -0,0 +1,43 @@
# coding=utf-8
from HTMLTestRunner import HTMLTestRunner
from selenium import webdriver
import time
import unittest
class TestLogin(unittest.TestCase):
def setUp(self):
self.driver = webdriver.Chrome()
self.url = "https://www.baidu.com/"
self.driver.get(self.url)
self.driver.maximize_window()
def tearDown(self):
self.driver.close()
def test_search(self):
self.driver.find_element_by_id("kw").send_keys("hello")
self.driver.find_element_by_id("su").click()
time.sleep(2)
self.assertIn("hello", self.driver.page_source)
def test_search1(self):
self.driver.find_element_by_id("kw").send_keys("hello")
self.driver.find_element_by_id("su").click()
time.sleep(2)
self.assertIn("hello", self.driver.page_source)
if __name__ == '__main__':
now = time.strftime("%Y-%m-%d-%H-%M-%S")
suite = unittest.TestSuite()
suite.addTest(TestLogin("test_search"))
suite.addTest(TestLogin("test_search1"))
path = "../report/" + now + "result.html"
fp = open(path, 'wb')
runner = HTMLTestRunner(stream=fp, title=u"自动化测试", description=u"测试")
runner.run(suite)
fp.close()