0001-tests-remove-jenkins.sh-40071.patch
Jenkinsfile | ||
---|---|---|
1 | 1 |
@Library('eo-jenkins-lib@master') import eo.Utils |
2 | 2 | |
3 | 3 |
pipeline { |
4 | 4 |
agent any |
5 | 5 |
options { disableConcurrentBuilds() } |
6 | 6 |
stages { |
7 | 7 |
stage('Unit Tests') { |
8 | 8 |
steps { |
9 |
sh './jenkins.sh'
|
|
9 |
sh 'tox -rv'
|
|
10 | 10 |
} |
11 | 11 |
post { |
12 | 12 |
always { |
13 | 13 |
script { |
14 | 14 |
utils = new Utils() |
15 | 15 |
utils.publish_coverage('coverage-*.xml') |
16 | 16 |
utils.publish_coverage_native( |
17 | 17 |
'index.html', 'htmlcov-coverage-authentic', 'Coverage authentic tests') |
jenkins.sh | ||
---|---|---|
1 |
#!/bin/sh |
|
2 | ||
3 |
set -e # prevent hiding of errors |
|
4 | ||
5 |
rm -f *coverage.xml |
|
6 |
rm -f *test_results.xml |
|
7 |
rm -rf htmlcov htmlcov-* |
|
8 | ||
9 |
virtualenv venv |
|
10 |
venv/bin/pip install --upgrade setuptools |
|
11 |
venv/bin/pip install --upgrade pip |
|
12 |
venv/bin/pip install --upgrade pylint pylint-django |
|
13 |
venv/bin/pip install --upgrade tox |
|
14 | ||
15 |
venv/bin/tox -rv |
|
16 | ||
17 |
# ./merge-junit-results.py junit-*.xml >test_results.xml |
|
18 |
#./merge-coverage.py -o coverage.xml coverage-*.xml |
|
19 | ||
20 | ||
21 |
test -f pylint.out && cp pylint.out pylint.out.prev |
|
22 |
(pylint -f parseable --rcfile /var/lib/jenkins/pylint.django.rc hobo | tee pylint.out) || /bin/true |
|
23 |
test -f pylint.out.prev && (diff pylint.out.prev pylint.out | grep '^[><]' | grep .py) || /bin/true |
|
24 | ||
25 |
echo OK |
merge-coverage.py | ||
---|---|---|
1 |
#!/usr/bin/python |
|
2 |
import sys |
|
3 |
import os |
|
4 |
import xml.etree.ElementTree as ET |
|
5 |
import logging |
|
6 |
import re |
|
7 |
from shutil import copyfile |
|
8 |
from optparse import OptionParser |
|
9 | ||
10 |
### This file came from the https://github.com/flow123d/flow123d repo they were nice enough to spend time to write this. |
|
11 |
### It is copied here for other people to use on its own. |
|
12 | ||
13 |
# parse arguments |
|
14 |
newline = 10*'\t'; |
|
15 |
parser = OptionParser(usage="%prog [options] [file1 file2 ... filen]", version="%prog 1.0", |
|
16 |
epilog = "If no files are specified all xml files in current directory will be selected. \n" + |
|
17 |
"Useful when there is not known precise file name only location") |
|
18 | ||
19 |
parser.add_option("-o", "--output", dest="filename", default="coverage-merged.xml", |
|
20 |
help="output file xml name", metavar="FILE") |
|
21 |
parser.add_option("-p", "--path", dest="path", default="./", |
|
22 |
help="xml location, default current directory", metavar="FILE") |
|
23 |
parser.add_option("-l", "--log", dest="loglevel", default="DEBUG", |
|
24 |
help="Log level DEBUG, INFO, WARNING, ERROR, CRITICAL") |
|
25 |
parser.add_option("-f", "--filteronly", dest="filteronly", default=False, action='store_true', |
|
26 |
help="If set all files will be filtered by keep rules otherwise "+ |
|
27 |
"all given files will be merged and filtered.") |
|
28 |
parser.add_option("-s", "--suffix", dest="suffix", default='', |
|
29 |
help="Additional suffix which will be added to filtered files so they original files can be preserved") |
|
30 |
parser.add_option("-k", "--keep", dest="packagefilters", default=None, metavar="NAME", action="append", |
|
31 |
help="preserves only specific packages. e.g.: " + newline + |
|
32 |
"'python merge.py -k src.la.*'" + newline + |
|
33 |
"will keep all packgages in folder " + |
|
34 |
"src/la/ and all subfolders of this folders. " + newline + |
|
35 |
"There can be mutiple rules e.g.:" + newline + |
|
36 |
"'python merge.py -k src.la.* -k unit_tests.la.'" + newline + |
|
37 |
"Format of the rule is simple dot (.) separated names with wildcard (*) allowed, e.g: " + newline + |
|
38 |
"package.subpackage.*") |
|
39 |
(options, args) = parser.parse_args() |
|
40 | ||
41 | ||
42 |
# get arguments |
|
43 |
path = options.path |
|
44 |
xmlfiles = args |
|
45 |
loglevel = getattr(logging, options.loglevel.upper()) |
|
46 |
finalxml = os.path.join (path, options.filename) |
|
47 |
filteronly = options.filteronly |
|
48 |
filtersuffix = options.suffix |
|
49 |
packagefilters = options.packagefilters |
|
50 |
logging.basicConfig (level=loglevel, format='%(levelname)s %(asctime)s: %(message)s', datefmt='%x %X') |
|
51 | ||
52 | ||
53 | ||
54 |
if not xmlfiles: |
|
55 |
for filename in os.listdir (path): |
|
56 |
if not filename.endswith ('.xml'): continue |
|
57 |
fullname = os.path.join (path, filename) |
|
58 |
if fullname == finalxml: continue |
|
59 |
xmlfiles.append (fullname) |
|
60 | ||
61 |
if not xmlfiles: |
|
62 |
print 'No xml files found!' |
|
63 |
sys.exit (1) |
|
64 | ||
65 |
else: |
|
66 |
xmlfiles=[path+filename for filename in xmlfiles] |
|
67 | ||
68 | ||
69 | ||
70 |
# constants |
|
71 |
PACKAGES_LIST = 'packages/package'; |
|
72 |
PACKAGES_ROOT = 'packages' |
|
73 |
CLASSES_LIST = 'classes/class'; |
|
74 |
CLASSES_ROOT = 'classes' |
|
75 |
METHODS_LIST = 'methods/method'; |
|
76 |
METHODS_ROOT = 'methods' |
|
77 |
LINES_LIST = 'lines/line'; |
|
78 |
LINES_ROOT = 'lines' |
|
79 | ||
80 | ||
81 | ||
82 |
def merge_xml (xmlfile1, xmlfile2, outputfile): |
|
83 |
# parse |
|
84 |
xml1 = ET.parse(xmlfile1) |
|
85 |
xml2 = ET.parse(xmlfile2) |
|
86 | ||
87 |
# get packages |
|
88 |
packages1 = filter_xml(xml1) |
|
89 |
packages2 = filter_xml(xml2) |
|
90 | ||
91 |
# find root |
|
92 |
packages1root = xml1.find(PACKAGES_ROOT) |
|
93 | ||
94 | ||
95 |
# merge packages |
|
96 |
merge (packages1root, packages1, packages2, 'name', merge_packages); |
|
97 | ||
98 |
# write result to output file |
|
99 |
xml1.write (outputfile, encoding="UTF-8", xml_declaration=True) |
|
100 | ||
101 | ||
102 |
def filter_xml (xmlfile): |
|
103 |
xmlroot = xmlfile.getroot() |
|
104 |
packageroot = xmlfile.find(PACKAGES_ROOT) |
|
105 |
packages = xmlroot.findall (PACKAGES_LIST) |
|
106 | ||
107 |
# delete nodes from tree AND from list |
|
108 |
included = [] |
|
109 |
if packagefilters: logging.debug ('excluding packages:') |
|
110 |
for pckg in packages: |
|
111 |
name = pckg.get('name') |
|
112 |
if not include_package (name): |
|
113 |
logging.debug ('excluding package "{0}"'.format(name)) |
|
114 |
packageroot.remove (pckg) |
|
115 |
else: |
|
116 |
included.append (pckg) |
|
117 |
return included |
|
118 | ||
119 | ||
120 |
def prepare_packagefilters (): |
|
121 |
if not packagefilters: |
|
122 |
return None |
|
123 | ||
124 |
# create simple regexp from given filter |
|
125 |
for i in range (len (packagefilters)): |
|
126 |
packagefilters[i] = '^' + packagefilters[i].replace ('.', '\.').replace ('*', '.*') + '$' |
|
127 | ||
128 | ||
129 | ||
130 |
def include_package (name): |
|
131 |
if not packagefilters: |
|
132 |
return True |
|
133 | ||
134 |
for packagefilter in packagefilters: |
|
135 |
if re.search(packagefilter, name): |
|
136 |
return True |
|
137 |
return False |
|
138 | ||
139 |
def get_attributes_chain (obj, attrs): |
|
140 |
"""Return a joined arguments of object based on given arguments""" |
|
141 | ||
142 |
if type(attrs) is list: |
|
143 |
result = '' |
|
144 |
for attr in attrs: |
|
145 |
result += obj.attrib[attr] |
|
146 |
return result |
|
147 |
else: |
|
148 |
return obj.attrib[attrs] |
|
149 | ||
150 | ||
151 |
def merge (root, list1, list2, attr, merge_function): |
|
152 |
""" Groups given lists based on group attributes. Process of merging items with same key is handled by |
|
153 |
passed merge_function. Returns list1. """ |
|
154 |
for item2 in list2: |
|
155 |
found = False |
|
156 |
for item1 in list1: |
|
157 |
if get_attributes_chain(item1, attr) == get_attributes_chain(item2, attr): |
|
158 |
item1 = merge_function (item1, item2) |
|
159 |
found = True |
|
160 |
break |
|
161 |
if found: |
|
162 |
continue |
|
163 |
else: |
|
164 |
root.append(item2) |
|
165 | ||
166 | ||
167 |
def merge_packages (package1, package2): |
|
168 |
"""Merges two packages. Returns package1.""" |
|
169 |
classes1 = package1.findall (CLASSES_LIST); |
|
170 |
classes2 = package2.findall (CLASSES_LIST); |
|
171 |
if classes1 or classes2: |
|
172 |
merge (package1.find (CLASSES_ROOT), classes1, classes2, ['filename','name'], merge_classes); |
|
173 | ||
174 |
return package1 |
|
175 | ||
176 | ||
177 |
def merge_classes (class1, class2): |
|
178 |
"""Merges two classes. Returns class1.""" |
|
179 | ||
180 |
lines1 = class1.findall (LINES_LIST); |
|
181 |
lines2 = class2.findall (LINES_LIST); |
|
182 |
if lines1 or lines2: |
|
183 |
merge (class1.find (LINES_ROOT), lines1, lines2, 'number', merge_lines); |
|
184 | ||
185 |
methods1 = class1.findall (METHODS_LIST) |
|
186 |
methods2 = class2.findall (METHODS_LIST) |
|
187 |
if methods1 or methods2: |
|
188 |
merge (class1.find (METHODS_ROOT), methods1, methods2, 'name', merge_methods); |
|
189 | ||
190 |
return class1 |
|
191 | ||
192 | ||
193 |
def merge_methods (method1, method2): |
|
194 |
"""Merges two methods. Returns method1.""" |
|
195 | ||
196 |
lines1 = method1.findall (LINES_LIST); |
|
197 |
lines2 = method2.findall (LINES_LIST); |
|
198 |
merge (method1.find (LINES_ROOT), lines1, lines2, 'number', merge_lines); |
|
199 | ||
200 | ||
201 |
def merge_lines (line1, line2): |
|
202 |
"""Merges two lines by summing their hits. Returns line1.""" |
|
203 | ||
204 |
# merge hits |
|
205 |
value = int (line1.get('hits')) + int (line2.get('hits')) |
|
206 |
line1.set ('hits', str(value)) |
|
207 | ||
208 |
# merge conditionals |
|
209 |
con1 = line1.get('condition-coverage') |
|
210 |
con2 = line2.get('condition-coverage') |
|
211 |
if (con1 is not None and con2 is not None): |
|
212 |
con1value = int(con1.split('%')[0]) |
|
213 |
con2value = int(con2.split('%')[0]) |
|
214 |
# bigger coverage on second line, swap their conditionals |
|
215 |
if (con2value > con1value): |
|
216 |
line1.set ('condition-coverage', str(con2)) |
|
217 |
line1.__setitem__(0, line2.__getitem__(0)) |
|
218 | ||
219 |
return line1 |
|
220 | ||
221 |
# prepare filters |
|
222 |
prepare_packagefilters () |
|
223 | ||
224 | ||
225 |
if filteronly: |
|
226 |
# filter all given files |
|
227 |
currfile = 1 |
|
228 |
totalfiles = len (xmlfiles) |
|
229 |
for xmlfile in xmlfiles: |
|
230 |
xml = ET.parse(xmlfile) |
|
231 |
filter_xml(xml) |
|
232 |
logging.debug ('{1}/{2} filtering: {0}'.format (xmlfile, currfile, totalfiles)) |
|
233 |
xml.write (xmlfile + filtersuffix, encoding="UTF-8", xml_declaration=True) |
|
234 |
currfile += 1 |
|
235 |
else: |
|
236 |
# merge all given files |
|
237 |
totalfiles = len (xmlfiles) |
|
238 | ||
239 |
# special case if only one file was given |
|
240 |
# filter given file and save it |
|
241 |
if (totalfiles == 1): |
|
242 |
logging.warning ('Only one file given!') |
|
243 |
xmlfile = xmlfiles.pop(0) |
|
244 |
xml = ET.parse(xmlfile) |
|
245 |
filter_xml(xml) |
|
246 |
xml.write (finalxml, encoding="UTF-8", xml_declaration=True) |
|
247 |
sys.exit (0) |
|
248 | ||
249 | ||
250 |
currfile = 1 |
|
251 |
logging.debug ('{2}/{3} merging: {0} & {1}'.format (xmlfiles[0], xmlfiles[1], currfile, totalfiles-1)) |
|
252 |
merge_xml (xmlfiles[0], xmlfiles[1], finalxml) |
|
253 | ||
254 | ||
255 |
currfile = 2 |
|
256 |
for i in range (totalfiles-2): |
|
257 |
xmlfile = xmlfiles[i+2] |
|
258 |
logging.debug ('{2}/{3} merging: {0} & {1}'.format (finalxml, xmlfile, currfile, totalfiles-1)) |
|
259 |
merge_xml (finalxml, xmlfile, finalxml) |
|
260 |
currfile += 1 |
merge-junit-results.py | ||
---|---|---|
1 |
#!/usr/bin/env python |
|
2 |
# |
|
3 |
# Corey Goldberg, Dec 2012 |
|
4 |
# |
|
5 | ||
6 |
import os |
|
7 |
import sys |
|
8 |
import xml.etree.ElementTree as ET |
|
9 | ||
10 | ||
11 |
"""Merge multiple JUnit XML files into a single results file. |
|
12 |
Output dumps to sdtdout. |
|
13 |
example usage: |
|
14 |
$ python merge_junit_results.py results1.xml results2.xml > results.xml |
|
15 |
""" |
|
16 | ||
17 | ||
18 |
def main(): |
|
19 |
args = sys.argv[1:] |
|
20 |
if not args: |
|
21 |
usage() |
|
22 |
sys.exit(2) |
|
23 |
if '-h' in args or '--help' in args: |
|
24 |
usage() |
|
25 |
sys.exit(2) |
|
26 |
merge_results(args[:]) |
|
27 | ||
28 | ||
29 |
def merge_results(xml_files): |
|
30 |
failures = 0 |
|
31 |
tests = 0 |
|
32 |
errors = 0 |
|
33 |
time = 0.0 |
|
34 |
cases = [] |
|
35 | ||
36 |
for file_name in xml_files: |
|
37 |
tree = ET.parse(file_name) |
|
38 |
test_suite = tree.getroot() |
|
39 |
failures += int(test_suite.attrib.get('failures', '0')) |
|
40 |
tests += int(test_suite.attrib.get('tests', '0')) |
|
41 |
errors += int(test_suite.attrib.get('errors', '0')) |
|
42 |
time += float(test_suite.attrib.get('time', '0.')) |
|
43 |
name = test_suite.attrib.get('name', '') |
|
44 |
for child in test_suite.getchildren(): |
|
45 |
child.attrib['classname'] = '%s-%s' % (name, child.attrib.get('classname', '')) |
|
46 |
cases.append(test_suite.getchildren()) |
|
47 | ||
48 |
new_root = ET.Element('testsuite') |
|
49 |
new_root.attrib['failures'] = '%s' % failures |
|
50 |
new_root.attrib['tests'] = '%s' % tests |
|
51 |
new_root.attrib['errors'] = '%s' % errors |
|
52 |
new_root.attrib['time'] = '%s' % time |
|
53 |
for case in cases: |
|
54 |
new_root.extend(case) |
|
55 |
new_tree = ET.ElementTree(new_root) |
|
56 |
ET.dump(new_tree) |
|
57 | ||
58 | ||
59 |
def usage(): |
|
60 |
this_file = os.path.basename(__file__) |
|
61 |
print('Usage: %s results1.xml results2.xml' % this_file) |
|
62 | ||
63 | ||
64 |
if __name__ == '__main__': |
|
65 |
main() |
tox.ini | ||
---|---|---|
1 | 1 |
# Tox (http://tox.testrun.org/) is a tool for running tests |
2 | 2 |
# in multiple virtualenvs. This configuration file will run the |
3 | 3 |
# test suite on all supported python versions. To use it, "pip install tox" |
4 | 4 |
# and then run "tox" from this directory. |
5 | 5 |
[tox] |
6 |
toxworkdir = {env:TMPDIR:/tmp}/tox-{env:USER}/hobo |
|
6 |
toxworkdir = {env:TMPDIR:/tmp}/tox-{env:USER}/hobo/{env:BRANCH_NAME:}
|
|
7 | 7 |
envlist = coverage-{authentic,hobo,multipublik,multitenant,multitenant-oldstylemiddleware,passerelle,schemas}, |
8 | 8 | |
9 | 9 |
[testenv] |
10 | 10 |
basepython = python2 |
11 |
whitelist_externals = |
|
12 |
/bin/mv |
|
13 | 11 |
usedevelop = True |
14 | 12 |
setenv = |
15 | 13 |
BRANCH_NAME={env:BRANCH_NAME:} |
16 | 14 |
hobo: DJANGO_SETTINGS_MODULE=hobo.settings |
17 | 15 |
hobo: HOBO_SETTINGS_FILE=tests/settings.py |
18 | 16 |
schemas: DJANGO_SETTINGS_MODULE=hobo.settings |
19 | 17 |
schemas: HOBO_SETTINGS_FILE=tests_schemas/settings.py |
20 | 18 |
multitenant: PYTHONPATH=tests_multitenant |
... | ... | |
22 | 20 |
multipublik: PYTHONPATH=tests_multipublik |
23 | 21 |
multipublik: DJANGO_SETTINGS_MODULE=settings |
24 | 22 |
authentic: DEBIAN_CONFIG_COMMON=debian/debian_config_common.py |
25 | 23 |
authentic: DJANGO_SETTINGS_MODULE=authentic2.settings |
26 | 24 |
authentic: AUTHENTIC2_SETTINGS_FILE=tests_authentic/settings.py |
27 | 25 |
passerelle: DEBIAN_CONFIG_COMMON=debian/debian_config_common.py |
28 | 26 |
passerelle: PASSERELLE_SETTINGS_FILE=tests_passerelle/settings.py |
29 | 27 |
passerelle: DJANGO_SETTINGS_MODULE=passerelle.settings |
30 |
coverage: COVERAGE=--junitxml=junit-{envname}.xml --cov-report xml --cov-report html --cov=hobo/ --cov-config .coveragerc |
|
31 |
fast: NOMIGRATIONS=--nomigrations |
|
28 |
coverage: COVERAGE=--junitxml=junit-{envname}.xml --cov-report xml:coverage-{envname}.xml --cov-report html:htmlcov-{envname} --cov=hobo/ --cov-config .coveragerc |
|
29 |
junit: JUNIT=--junitxml=junit-{envname}.xml |
|
30 |
fast: FAST=--nomigrations |
|
32 | 31 |
oldstylemiddleware: OLD_STYLE_MIDDLEWARE=true |
33 | 32 |
deps: |
34 | 33 |
pytest |
35 | 34 |
pytest-cov |
36 | 35 |
pytest-django |
37 | 36 |
pytest-mock |
38 | 37 |
coverage |
39 | 38 |
cssselect |
... | ... | |
50 | 49 |
multitenant: systemd-python |
51 | 50 |
http://git.entrouvert.org/debian/django-tenant-schemas.git/snapshot/django-tenant-schemas-master.tar.gz |
52 | 51 |
httmock |
53 | 52 |
requests |
54 | 53 |
pytest-freezegun |
55 | 54 |
xmlschema<1.1 |
56 | 55 |
commands = |
57 | 56 |
./getlasso.sh |
58 |
hobo: py.test {env:COVERAGE:} {env:NOMIGRATIONS:} {posargs:tests/} |
|
59 |
schemas: py.test {env:COVERAGE:} {env:NOMIGRATIONS:} {posargs:tests_schemas/} |
|
60 |
multitenant: py.test {env:COVERAGE:} {env:NOMIGRATIONS:} {posargs:tests_multitenant/} |
|
61 |
multipublik: py.test {env:COVERAGE:} {env:NOMIGRATIONS:} {posargs:tests_multipublik/} |
|
62 |
authentic: py.test {env:FAST:} {env:COVERAGE:} {env:NOMIGRATIONS:} {posargs:tests_authentic/} |
|
63 |
passerelle: py.test {env:COVERAGE:} {env:NOMIGRATIONS:} {posargs:tests_passerelle/} |
|
64 |
coverage: mv coverage.xml coverage-{envname}.xml |
|
65 |
coverage: mv htmlcov htmlcov-{envname} |
|
57 |
hobo: py.test {posargs: {env:FAST:} {env:COVERAGE:} {env:JUNIT:} tests/} |
|
58 |
schemas: py.test {posargs: {env:FAST:} {env:COVERAGE:} {env:JUNIT:} tests_schemas/} |
|
59 |
multitenant: py.test {posargs: {env:FAST:} {env:COVERAGE:} {env:JUNIT:} tests_multitenant/} |
|
60 |
multipublik: py.test {posargs: {env:FAST:} {env:COVERAGE:} {env:JUNIT:} tests_multipublik/} |
|
61 |
authentic: py.test {posargs: {env:FAST:} {env:COVERAGE:} {env:JUNIT:} tests_authentic/} |
|
62 |
passerelle: py.test {posargs: {env:FAST:} {env:COVERAGE:} {env:JUNIT:} tests_passerelle/} |
|
66 |
- |