Skip to content
Draft
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
convert even more unit tests to use pytest [WIP]
  • Loading branch information
artemp committed Jan 31, 2023
commit f61bfebcb0211707999b03227be4c3f55cfcdc8c
320 changes: 149 additions & 171 deletions test/python_tests/image_test.py

Large diffs are not rendered by default.

371 changes: 151 additions & 220 deletions test/python_tests/image_tiff_test.py

Large diffs are not rendered by default.

43 changes: 12 additions & 31 deletions test/python_tests/introspection_test.py
Original file line number Diff line number Diff line change
@@ -1,36 +1,21 @@
#!/usr/bin/env python

import os

from nose.tools import eq_

import mapnik

from .utilities import execution_path, run_all


def setup():
# All of the paths used are relative, if we run the tests
# from another directory we need to chdir()
os.chdir(execution_path('.'))


def test_introspect_symbolizers():
# create a symbolizer
p = mapnik.PointSymbolizer()
p.file = "../data/images/dummy.png"
p.file = "./test/data/images/dummy.png"
p.allow_overlap = True
p.opacity = 0.5

eq_(p.allow_overlap, True)
eq_(p.opacity, 0.5)
eq_(p.filename, '../data/images/dummy.png')
assert p.allow_overlap == True
assert p.opacity == 0.5
assert p.filename == './test/data/images/dummy.png'

# make sure the defaults
# are what we think they are
eq_(p.allow_overlap, True)
eq_(p.opacity, 0.5)
eq_(p.filename, '../data/images/dummy.png')
assert p.allow_overlap == True
assert p.opacity == 0.5
assert p.filename == './test/data/images/dummy.png'

# contruct objects to hold it
r = mapnik.Rule()
Expand All @@ -46,20 +31,16 @@ def test_introspect_symbolizers():

s2 = m.find_style('s')
rules = s2.rules
eq_(len(rules), 1)
assert len(rules) == 1
r2 = rules[0]
syms = r2.symbols
eq_(len(syms), 1)
assert len(syms) == 1

# TODO here, we can do...
sym = syms[0]
p2 = sym.extract()
assert isinstance(p2, mapnik.PointSymbolizer)

eq_(p2.allow_overlap, True)
eq_(p2.opacity, 0.5)
eq_(p2.filename, '../data/images/dummy.png')

if __name__ == "__main__":
setup()
exit(run_all(eval(x) for x in dir() if x.startswith("test_")))
assert p2.allow_overlap == True
assert p2.opacity == 0.5
assert p2.filename == './test/data/images/dummy.png'
25 changes: 4 additions & 21 deletions test/python_tests/json_feature_properties_test.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,4 @@
# encoding: utf8

from nose.tools import eq_

import mapnik

from .utilities import run_all

try:
import json
except ImportError:
Expand Down Expand Up @@ -83,30 +76,20 @@
ctx = mapnik.Context()
ctx.push('name')


def test_char_escaping():
for char in chars:
feat = mapnik.Feature(ctx, 1)
expected = char['test']
feat["name"] = expected
eq_(feat["name"], expected)
assert feat["name"] == expected
# confirm the python json module
# is working as we would expect
pyjson2 = json.loads(char['json'])
eq_(pyjson2['properties']['name'], expected)
assert pyjson2['properties']['name'] == expected
# confirm our behavior is the same as python json module
# for the original string
geojson_feat_string = feat.to_geojson()
eq_(
geojson_feat_string,
char['json'],
"Mapnik's json escaping is not to spec: actual(%s) and expected(%s) for %s" %
(geojson_feat_string,
char['json'],
char['name']))
assert geojson_feat_string == char['json'], "Mapnik's json escaping is not to spec: actual(%s) and expected(%s) for %s" % (geojson_feat_string, char['json'], char['name'])
# and the round tripped string
pyjson = json.loads(geojson_feat_string)
eq_(pyjson['properties']['name'], expected)

if __name__ == "__main__":
exit(run_all(eval(x) for x in dir() if x.startswith("test_")))
assert pyjson['properties']['name'] == expected
34 changes: 7 additions & 27 deletions test/python_tests/layer_buffer_size_test.py
Original file line number Diff line number Diff line change
@@ -1,42 +1,22 @@
# coding=utf8
import os

from nose.tools import eq_

import mapnik

from .utilities import execution_path, run_all


def setup():
# All of the paths used are relative, if we run the tests
# from another directory we need to chdir()
os.chdir(execution_path('.'))

if 'sqlite' in mapnik.DatasourceCache.plugin_names():

# the negative buffer on the layer should
# override the postive map buffer leading
# only one point to be rendered in the map
def test_layer_buffer_size_1():
m = mapnik.Map(512, 512)
eq_(m.buffer_size, 0)
mapnik.load_map(m, '../data/good_maps/layer_buffer_size_reduction.xml')
eq_(m.buffer_size, 256)
eq_(m.layers[0].buffer_size, -150)
assert m.buffer_size == 0
mapnik.load_map(m, './test/data/good_maps/layer_buffer_size_reduction.xml')
assert m.buffer_size == 256
assert m.layers[0].buffer_size == -150
m.zoom_all()
im = mapnik.Image(m.width, m.height)
mapnik.render(m, im)
actual = '/tmp/mapnik-layer-buffer-size.png'
expected = 'images/support/mapnik-layer-buffer-size.png'
expected = './test/python_tests/images/support/mapnik-layer-buffer-size.png'
im.save(actual, "png32")
expected_im = mapnik.Image.open(expected)
eq_(im.tostring('png32'),
expected_im.tostring('png32'),
'failed comparing actual (%s) and expected (%s)' % (actual,
'tests/python_tests/' + expected))


if __name__ == "__main__":
setup()
exit(run_all(eval(x) for x in dir() if x.startswith("test_")))
assert im.tostring('png32') == expected_im.tostring('png32'),'failed comparing actual (%s) and expected (%s)' % (actual,
'tests/python_tests/' + expected)
47 changes: 13 additions & 34 deletions test/python_tests/layer_modification_test.py
Original file line number Diff line number Diff line change
@@ -1,20 +1,5 @@
#!/usr/bin/env python

import os

from nose.tools import eq_

import mapnik

from .utilities import execution_path, run_all


def setup():
# All of the paths used are relative, if we run the tests
# from another directory we need to chdir()
os.chdir(execution_path('.'))


def test_adding_datasource_to_layer():
map_string = '''<?xml version="1.0" encoding="utf-8"?>
<Map>
Expand All @@ -25,7 +10,7 @@ def test_adding_datasource_to_layer():
<!-- leave datasource empty -->
<!--
<Datasource>
<Parameter name="file">../data/shp/world_merc.shp</Parameter>
<Parameter name="file">./test/data/shp/world_merc.shp</Parameter>
<Parameter name="type">shape</Parameter>
</Datasource>
-->
Expand All @@ -39,45 +24,39 @@ def test_adding_datasource_to_layer():
mapnik.load_map_from_string(m, map_string)

# validate it loaded fine
eq_(m.layers[0].styles[0], 'world_borders_style')
eq_(m.layers[0].styles[1], 'point_style')
eq_(len(m.layers), 1)
assert m.layers[0].styles[0] == 'world_borders_style'
assert m.layers[0].styles[1] == 'point_style'
assert len(m.layers) == 1

# also assign a variable reference to that layer
# below we will test that this variable references
# the same object that is attached to the map
lyr = m.layers[0]

# ensure that there was no datasource for the layer...
eq_(m.layers[0].datasource, None)
eq_(lyr.datasource, None)
assert m.layers[0].datasource == None
assert lyr.datasource == None

# also note that since the srs was black it defaulted to wgs84
eq_(m.layers[0].srs,
'epsg:4326')
eq_(lyr.srs, 'epsg:4326')
assert m.layers[0].srs == 'epsg:4326'
assert lyr.srs == 'epsg:4326'

# now add a datasource one...
ds = mapnik.Shapefile(file='../data/shp/world_merc.shp')
ds = mapnik.Shapefile(file='./test/data/shp/world_merc.shp')
m.layers[0].datasource = ds

# now ensure it is attached
eq_(m.layers[0].datasource.describe()['name'], "shape")
eq_(lyr.datasource.describe()['name'], "shape")
assert m.layers[0].datasource.describe()['name'] == "shape"
assert lyr.datasource.describe()['name'] == "shape"

# and since we have now added a shapefile in spherical mercator, adjust
# the projection
lyr.srs = '+proj=merc +lon_0=0 +lat_ts=0 +x_0=0 +y_0=0 +ellps=WGS84 +datum=WGS84 +units=m +no_defs'

# test that assignment
eq_(m.layers[
0].srs, '+proj=merc +lon_0=0 +lat_ts=0 +x_0=0 +y_0=0 +ellps=WGS84 +datum=WGS84 +units=m +no_defs')
eq_(lyr.srs, '+proj=merc +lon_0=0 +lat_ts=0 +x_0=0 +y_0=0 +ellps=WGS84 +datum=WGS84 +units=m +no_defs')
assert m.layers[0].srs == '+proj=merc +lon_0=0 +lat_ts=0 +x_0=0 +y_0=0 +ellps=WGS84 +datum=WGS84 +units=m +no_defs'
assert lyr.srs == '+proj=merc +lon_0=0 +lat_ts=0 +x_0=0 +y_0=0 +ellps=WGS84 +datum=WGS84 +units=m +no_defs'
except RuntimeError as e:
# only test datasources that we have installed
if not 'Could not create datasource' in str(e):
raise RuntimeError(e)

if __name__ == "__main__":
setup()
exit(run_all(eval(x) for x in dir() if x.startswith("test_")))
42 changes: 15 additions & 27 deletions test/python_tests/layer_test.py
Original file line number Diff line number Diff line change
@@ -1,33 +1,21 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-

from nose.tools import eq_

import mapnik

from .utilities import run_all


# Map initialization


def test_layer_init():
l = mapnik.Layer('test')
eq_(l.name, 'test')
eq_(l.srs, 'epsg:4326')
eq_(l.envelope(), mapnik.Box2d())
eq_(l.clear_label_cache, False)
eq_(l.cache_features, False)
eq_(l.visible(1), True)
eq_(l.active, True)
eq_(l.datasource, None)
eq_(l.queryable, False)
eq_(l.minimum_scale_denominator, 0.0)
eq_(l.maximum_scale_denominator > 1e+6, True)
eq_(l.group_by, "")
eq_(l.maximum_extent, None)
eq_(l.buffer_size, None)
eq_(len(l.styles), 0)

if __name__ == "__main__":
exit(run_all(eval(x) for x in dir() if x.startswith("test_")))
assert l.name == 'test'
assert l.srs == 'epsg:4326'
assert l.envelope() == mapnik.Box2d()
assert not l.clear_label_cache
assert not l.cache_features
assert l.visible(1)
assert l.active
assert l.datasource == None
assert not l.queryable
assert l.minimum_scale_denominator == 0.0
assert l.maximum_scale_denominator > 1e+6
assert l.group_by == ""
assert l.maximum_extent == None
assert l.buffer_size == None
assert len(l.styles) == 0
30 changes: 3 additions & 27 deletions test/python_tests/load_map_test.py
Original file line number Diff line number Diff line change
@@ -1,31 +1,11 @@
#!/usr/bin/env python

import glob
import os

from nose.tools import eq_

import mapnik

from .utilities import execution_path, run_all


default_logging_severity = mapnik.logger.get_severity()


def setup():
# make the tests silent to suppress unsupported params from harfbuzz tests
# TODO: remove this after harfbuzz branch merges
mapnik.logger.set_severity(getattr(mapnik.severity_type, "None"))
# All of the paths used are relative, if we run the tests
# from another directory we need to chdir()
os.chdir(execution_path('.'))


def teardown():
mapnik.logger.set_severity(default_logging_severity)


def test_broken_files():
default_logging_severity = mapnik.logger.get_severity()
mapnik.logger.set_severity(getattr(mapnik.severity_type, "None"))
Expand All @@ -44,7 +24,7 @@ def test_broken_files():
filename)
except RuntimeError:
pass
eq_(len(failures), 0, '\n' + '\n'.join(failures))
assert len(failures) == 0, '\n' + '\n'.join(failures)
mapnik.logger.set_severity(default_logging_severity)


Expand Down Expand Up @@ -75,7 +55,7 @@ def test_can_parse_xml_with_deprecated_properties():
failures.append(
'Failed to load valid map %s (%s)' %
(filename, e))
eq_(len(failures), 0, '\n' + '\n'.join(failures))
assert len(failures) == 0, '\n' + '\n'.join(failures)
mapnik.logger.set_severity(default_logging_severity)


Expand All @@ -100,8 +80,4 @@ def test_good_files():
failures.append(
'Failed to load valid map %s (%s)' %
(filename, e))
eq_(len(failures), 0, '\n' + '\n'.join(failures))

if __name__ == "__main__":
setup()
exit(run_all(eval(x) for x in dir() if x.startswith("test_")))
assert len(failures) == 0, '\n' + '\n'.join(failures)
Loading