From b78e427fca092e35c4b7788b131fae4d334e132b Mon Sep 17 00:00:00 2001 From: Duane Wilson Date: Mon, 14 Jan 2019 11:37:05 -0400 Subject: [PATCH 1/4] Add Analogous Event Loss Set Class - A class build as a stand in for an analogous event analysis. - This builds a parametric loss set based on a set of loss sets as sources and event ids. --- analyzere_extras/loss_sets.py | 174 ++++++++++++++++++++++++++++++++++ tests/test_loss_sets.py | 130 +++++++++++++++++++++++++ tox.ini | 2 +- 3 files changed, 305 insertions(+), 1 deletion(-) create mode 100644 analyzere_extras/loss_sets.py create mode 100644 tests/test_loss_sets.py diff --git a/analyzere_extras/loss_sets.py b/analyzere_extras/loss_sets.py new file mode 100644 index 0000000..acdc642 --- /dev/null +++ b/analyzere_extras/loss_sets.py @@ -0,0 +1,174 @@ +import hashlib +import csv +from collections import defaultdict + +from analyzere import ( + AnalysisProfile, + LossSet, + Distribution, + Layer, + LayerView, + LossFilter +) + +# Analogous Event Scenario Loss Set. This class is designed to take a set +# of loss sets and a list of Event IDs and combine them into a single loss +# set for doing realistic disaster scenario type analysis. +class AnalogousEventLossSet(LossSet): + _collection_name = 'loss_sets' + + def __init__(self, + analysis_profile='', + load=1.0, + source_events=[], + sources=[], + occurrence_probability=1.0, + **kwargs): + + self.analysis_profile = analysis_profile + self.source_events = source_events + self.sources = sources + self.load = load + self.occurrence_probability = occurrence_probability + + return super().__init__( + type='ParametricLossSet', + **kwargs + ) + + + def _retrieve_loss_data(self): + loss_data = {} + for event in self.source_events: + event_filter = LossFilter( + type='AnyOfFilter', + name='Event ' + str(event), + attribute='EventID', + values=[event] + ) + + filter_layer = Layer( + type='FilterLayer', + description='Event ' + str(event), + filters=[event_filter], + loss_sets=self.sources + ) + + yelt = LayerView( + analysis_profile=self.analysis_profile, + layer=filter_layer + ).save().download_yelt(secondary_uncertainty=False) + + yelt_reader = csv.DictReader(yelt.decode('utf-8').splitlines()) + loss_data[event] = [float(row['Loss']) for row in yelt_reader] + + self._loss_data = loss_data + + + def _construct_severity_distribution(self): + self._severity_distr = 'Probability,Loss\n' + event_probability = 1.0/len(self.source_events) + + value_probabilities = defaultdict(float) + # Creating the probability for each unique value. This ensures the + # severity distribution string is as small as possible. + for event in self.source_events: + if len(self._loss_data[event]) != 0: + # Note that a single event id may occur several times in a + # simulation with different loss values. Each of those values + # should have the same probability of occuring. The probability + # of all potential loss values for a single event should add + # to the probability of the event. + instance_prob = event_probability/len(self._loss_data[event]) + for loss in self._loss_data[event]: + value_probabilities[loss * self.load] += instance_prob + else: + value_probabilities[0.0] += event_probability + + # Adding the unique values to severity distribution file that will be + # uploaded. + loss_values = sorted(list(value_probabilities.keys())) + for key in loss_values: + self._severity_distr += str(value_probabilities[key]) + ',' \ + + str(key) + '\n' + + + def _upload_severity_distribution(self): + data_hash = hashlib.md5(self._severity_distr.encode()).hexdigest() + severity_description = 'ARe-Python-Extras AnalogousEventLossSetELS ' \ + + 'Generated Resource: ' + data_hash + + distribution_search = Distribution.list(search=severity_description) + # Check if severity distribution has been created on the server. + if len(distribution_search) > 0: + self.severity = distribution_search[0] + else: + severity_distr = Distribution( + type='CustomSeverityDistribution', + description=severity_description, + ).save() + severity_distr.upload_data(self._severity_distr) + self.severity = severity_distr + + + def _upload_frequency_distribution(self): + freq_description = 'ARe-Python-Extras AnalogousEventLossSetELS ' \ + + 'Generated Resource: Frequency ' + str(self.occurrence_probability) + + distribution_search = Distribution.list(search=freq_description) + if len(distribution_search) > 0: + self.frequency = distribution_search[0] + else: + freq_distr = Distribution( + type='BinomialDistribution', + description=freq_description, + n=1, + p=self.occurrence_probability + ).save() + self.frequency = freq_distr + + + def _upload_seasonality_distribution(self): + seasonality_description = 'ARe-Python-Extras AnalogousEventLossSetELS ' \ + + 'Generated Resource: Seasonality 0.0' + + distribution_search = Distribution.list(search=seasonality_description) + if len(distribution_search) > 0: + self.seasonality = distribution_search[0] + else: + seasonality_distr = Distribution( + type='DiracDistribution', + description=seasonality_description, + value=0.0, + ).save() + self.seasonality = seasonality_distr + + + def save(self): + # Collect keys to retain on the type after saving. Otherwise this + # information is lost by the super class's save method + keys_to_retain = ['analysis_profile', 'source_events', 'sources', + 'load', 'occurrence_probability'] + values_to_retain = {key: self.__dict__[key] for key in keys_to_retain} + + # Adding the above information to loss set's meta_data so that it is + # retrievable at a later date. + self.meta_data = {} + self.meta_data['analysis_profile'] = self.analysis_profile.id + self.meta_data['source_events'] = ','.join(map(str, self.source_events)) + self.meta_data['sources'] = \ + ','.join([source.id for source in self.sources]) + self.meta_data['load'] = self.load + self.meta_data['occurrence_probability'] = self.occurrence_probability + self.meta_data['_type'] = 'AnalogousEventLossSet' + + self._retrieve_loss_data() + self._construct_severity_distribution() + self._upload_severity_distribution() + self._upload_frequency_distribution() + self._upload_seasonality_distribution() + super().save() + + # Merging the retained values back into the class. + self.__dict__.update(values_to_retain) + return self diff --git a/tests/test_loss_sets.py b/tests/test_loss_sets.py new file mode 100644 index 0000000..1aa6f5a --- /dev/null +++ b/tests/test_loss_sets.py @@ -0,0 +1,130 @@ +import pytest +import requests_mock +from analyzere_extras.loss_sets import AnalogousEventLossSet +import analyzere +from analyzere import AnalysisProfile + +def are_mocker(): + m = requests_mock.Mocker() + m.start() + m.register_uri('POST', 'https://api/layer_views/', + [{'status_code': 200, 'text': '{"id": "1", "server_generated": "foo"}'}, + {'status_code': 200, 'text': '{"id": "2", "server_generated": "foo"}'}, + {'status_code': 200, 'text': '{"id": "3", "server_generated": "foo"}'}, + {'status_code': 200, 'text': '{"id": "4", "server_generated": "foo"}'}] + ) + + m.get('https://api/layer_views/1/yelt?secondary_uncertainty=false', + status_code=200, + text="""Trial,EventId,Sequence,Loss + 1,1,0.0,100.0""" + ) + + m.get('https://api/layer_views/2/yelt?secondary_uncertainty=false', + status_code=200, + text='Trial,EventId,Sequence,Loss' + ) + + m.get('https://api/layer_views/3/yelt?secondary_uncertainty=false', + status_code=200, + text="""Trial,EventId,Sequence,Loss + 1,3,0.0,100.0 + 2,3,0.0,50.0""" + ) + + m.get('https://api/layer_views/4/yelt?secondary_uncertainty=false', + status_code=200, + text="""Trial,EventId,Sequence,Loss + 1,4,0.0,200.0""" + ) + + # Mocking Distribution Uploads + ## Distributions.save() + m.register_uri('POST', 'https://api/distributions/', + [{'status_code': 200, 'text': '{"id": "d1", "server_generated": "fo"}'}, + {'status_code': 200, 'text': '{"id": "d2", "server_generated": "fo"}'}, + {'status_code': 200, 'text': '{"id": "d3", "server_generated": "fo"}'}] + ) + + ## Distributions.list(...) + m.get('https://api/distributions/?', status_code=200, text='[]') + + ## Distribution.upload_data() + m.post('https://api/distributions/d1/data', status_code=201, text='data') + m.patch('https://api/distributions/d1/data', status_code=204) + m.post('https://api/distributions/d1/data/commit', status_code=204) + m.get('https://api/distributions/d1/data/status', status_code=200, + text='{"status": "Processing Successful"}') + + # LossSet.save() + m.post('https://api/loss_sets/', status_code=200, + text='{"id": "ls1", "server_generate": "foo"}') + + return m + +class SetBaseUrl(object): + def setup_method(self, _): + analyzere.base_url = 'https://api' + + def teardown_method(self, _): + analyzere.base_url = '' + +class TestAnalogousEventLossSet(SetBaseUrl): + def test_null_construction(self): + ae_ls = AnalogousEventLossSet() + assert ae_ls.type == 'ParametricLossSet' + assert ae_ls.analysis_profile == '' + assert ae_ls.load == 1.0 + assert ae_ls.sources == [] + assert ae_ls.source_events == [] + assert ae_ls.occurrence_probability == 1.0 + + + def test_retrieve_loss_data(self): + m = are_mocker() + ae_ls = AnalogousEventLossSet(sources=['abc123'], source_events=[1]) + ae_ls._retrieve_loss_data() + m.stop() + assert ae_ls._loss_data == {1: [100.0]} + + m = are_mocker() + ae_ls = AnalogousEventLossSet(sources=['abc123'], source_events=[1,2]) + ae_ls._retrieve_loss_data() + m.stop() + assert ae_ls._loss_data == {1: [100.0], 2: []} + + + def test_severity_distribution(self): + m = are_mocker() + ae_ls = AnalogousEventLossSet(source_events=[1]) + ae_ls._retrieve_loss_data() + ae_ls._construct_severity_distribution() + m.stop() + assert ae_ls._severity_distr == "Probability,Loss\n1.0,100.0\n" + + m = are_mocker() + ae_ls = AnalogousEventLossSet(source_events=[1,2]) + ae_ls._retrieve_loss_data() + ae_ls._construct_severity_distribution() + m.stop() + assert ae_ls._severity_distr == "Probability,Loss\n0.5,0.0\n0.5,100.0\n" + + m = are_mocker() + ae_ls = AnalogousEventLossSet(source_events=[1,2,3,4]) + ae_ls._retrieve_loss_data() + ae_ls._construct_severity_distribution() + m.stop() + assert (ae_ls._severity_distr == 'Probability,Loss\n' \ + + '0.25,0.0\n0.125,50.0\n0.375,100.0\n0.25,200.0\n') + + def test_save(self): + m = are_mocker() + ae_ls = AnalogousEventLossSet( + analysis_profile=AnalysisProfile(id='ap1'), + source_events=[1] + ) + ae_ls.save() + m.stop() + for attribute in ['analysis_profile', 'source_events', 'sources', + 'load', 'occurrence_probability']: + assert hasattr(ae_ls, attribute) diff --git a/tox.ini b/tox.ini index a53491f..0b088fc 100644 --- a/tox.ini +++ b/tox.ini @@ -1,5 +1,5 @@ [tox] -envlist = py27, py34 +envlist = py27, py34, py36 [testenv] deps = -r{toxinidir}/requirements/test.txt From 6bd9239bb20ca8f410ac39525912e17b5eab0a69 Mon Sep 17 00:00:00 2001 From: Duane Wilson Date: Mon, 14 Jan 2019 13:23:30 -0400 Subject: [PATCH 2/4] using backwards compatible version of super() --- analyzere_extras/loss_sets.py | 6 +++--- tests/test_loss_sets.py | 2 +- tests/test_visualizations.py | 2 +- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/analyzere_extras/loss_sets.py b/analyzere_extras/loss_sets.py index acdc642..be63825 100644 --- a/analyzere_extras/loss_sets.py +++ b/analyzere_extras/loss_sets.py @@ -31,11 +31,11 @@ def __init__(self, self.load = load self.occurrence_probability = occurrence_probability - return super().__init__( + return super(AnalogousEventLossSet, self).__init__( type='ParametricLossSet', **kwargs ) - + def _retrieve_loss_data(self): loss_data = {} @@ -167,7 +167,7 @@ def save(self): self._upload_severity_distribution() self._upload_frequency_distribution() self._upload_seasonality_distribution() - super().save() + super(AnalogousEventLossSet, self).save() # Merging the retained values back into the class. self.__dict__.update(values_to_retain) diff --git a/tests/test_loss_sets.py b/tests/test_loss_sets.py index 1aa6f5a..54f5448 100644 --- a/tests/test_loss_sets.py +++ b/tests/test_loss_sets.py @@ -67,7 +67,7 @@ def setup_method(self, _): analyzere.base_url = 'https://api' def teardown_method(self, _): - analyzere.base_url = '' + analyzere.base_url = 'http://localhost:8000/' class TestAnalogousEventLossSet(SetBaseUrl): def test_null_construction(self): diff --git a/tests/test_visualizations.py b/tests/test_visualizations.py index 2527cf4..48315fb 100644 --- a/tests/test_visualizations.py +++ b/tests/test_visualizations.py @@ -787,7 +787,7 @@ def test_render_rankdir(self, layer_view): def test_from_id(self): """Requests by Id don't work unless you have defined the following - analyzere varialbes, and a connecton can be established + analyzere variables, and a connecton can be established - analyzere.base_url - analyzere.username - analyzere.password From cb5f324e42019c7776dc61781e0f3b3a6ba08769 Mon Sep 17 00:00:00 2001 From: Duane Wilson Date: Mon, 14 Jan 2019 13:45:01 -0400 Subject: [PATCH 3/4] Adding py36 environment and fixing style. --- .travis.yml | 1 + analyzere_extras/loss_sets.py | 38 ++++++++-------- tests/test_loss_sets.py | 81 ++++++++++++++++++----------------- 3 files changed, 61 insertions(+), 59 deletions(-) diff --git a/.travis.yml b/.travis.yml index 31922e0..0029e37 100644 --- a/.travis.yml +++ b/.travis.yml @@ -5,6 +5,7 @@ language: python env: - TOXENV=py27 - TOXENV=py34 + - TOXENV=py36 install: - pip install tox script: diff --git a/analyzere_extras/loss_sets.py b/analyzere_extras/loss_sets.py index be63825..9b43646 100644 --- a/analyzere_extras/loss_sets.py +++ b/analyzere_extras/loss_sets.py @@ -3,7 +3,6 @@ from collections import defaultdict from analyzere import ( - AnalysisProfile, LossSet, Distribution, Layer, @@ -14,16 +13,19 @@ # Analogous Event Scenario Loss Set. This class is designed to take a set # of loss sets and a list of Event IDs and combine them into a single loss # set for doing realistic disaster scenario type analysis. + + class AnalogousEventLossSet(LossSet): + _collection_name = 'loss_sets' def __init__(self, - analysis_profile='', - load=1.0, - source_events=[], - sources=[], - occurrence_probability=1.0, - **kwargs): + analysis_profile='', + load=1.0, + source_events=[], + sources=[], + occurrence_probability=1.0, + **kwargs): self.analysis_profile = analysis_profile self.source_events = source_events @@ -36,7 +38,6 @@ def __init__(self, **kwargs ) - def _retrieve_loss_data(self): loss_data = {} for event in self.source_events: @@ -64,7 +65,6 @@ def _retrieve_loss_data(self): self._loss_data = loss_data - def _construct_severity_distribution(self): self._severity_distr = 'Probability,Loss\n' event_probability = 1.0/len(self.source_events) @@ -92,11 +92,11 @@ def _construct_severity_distribution(self): self._severity_distr += str(value_probabilities[key]) + ',' \ + str(key) + '\n' - def _upload_severity_distribution(self): data_hash = hashlib.md5(self._severity_distr.encode()).hexdigest() + severity_description = 'ARe-Python-Extras AnalogousEventLossSetELS ' \ - + 'Generated Resource: ' + data_hash + + 'Generated Resource: ' + data_hash distribution_search = Distribution.list(search=severity_description) # Check if severity distribution has been created on the server. @@ -110,10 +110,10 @@ def _upload_severity_distribution(self): severity_distr.upload_data(self._severity_distr) self.severity = severity_distr - def _upload_frequency_distribution(self): freq_description = 'ARe-Python-Extras AnalogousEventLossSetELS ' \ - + 'Generated Resource: Frequency ' + str(self.occurrence_probability) + + 'Generated Resource: Frequency ' \ + + str(self.occurrence_probability) distribution_search = Distribution.list(search=freq_description) if len(distribution_search) > 0: @@ -127,10 +127,10 @@ def _upload_frequency_distribution(self): ).save() self.frequency = freq_distr - def _upload_seasonality_distribution(self): - seasonality_description = 'ARe-Python-Extras AnalogousEventLossSetELS ' \ - + 'Generated Resource: Seasonality 0.0' + seasonality_description = \ + 'ARe-Python-Extras AnalogousEventLossSetELS ' \ + + 'Generated Resource: Seasonality 0.0' distribution_search = Distribution.list(search=seasonality_description) if len(distribution_search) > 0: @@ -143,19 +143,19 @@ def _upload_seasonality_distribution(self): ).save() self.seasonality = seasonality_distr - def save(self): # Collect keys to retain on the type after saving. Otherwise this # information is lost by the super class's save method keys_to_retain = ['analysis_profile', 'source_events', 'sources', - 'load', 'occurrence_probability'] + 'load', 'occurrence_probability'] values_to_retain = {key: self.__dict__[key] for key in keys_to_retain} # Adding the above information to loss set's meta_data so that it is # retrievable at a later date. self.meta_data = {} self.meta_data['analysis_profile'] = self.analysis_profile.id - self.meta_data['source_events'] = ','.join(map(str, self.source_events)) + self.meta_data['source_events'] = \ + ','.join(map(str, self.source_events)) self.meta_data['sources'] = \ ','.join([source.id for source in self.sources]) self.meta_data['load'] = self.load diff --git a/tests/test_loss_sets.py b/tests/test_loss_sets.py index 54f5448..6a65fab 100644 --- a/tests/test_loss_sets.py +++ b/tests/test_loss_sets.py @@ -1,67 +1,68 @@ -import pytest import requests_mock from analyzere_extras.loss_sets import AnalogousEventLossSet import analyzere from analyzere import AnalysisProfile + def are_mocker(): m = requests_mock.Mocker() m.start() - m.register_uri('POST', 'https://api/layer_views/', - [{'status_code': 200, 'text': '{"id": "1", "server_generated": "foo"}'}, - {'status_code': 200, 'text': '{"id": "2", "server_generated": "foo"}'}, - {'status_code': 200, 'text': '{"id": "3", "server_generated": "foo"}'}, - {'status_code': 200, 'text': '{"id": "4", "server_generated": "foo"}'}] + m.register_uri( + 'POST', + 'https://api/layer_views/', + [{'status_code': 200, 'text': '{"id": "1"}'}, + {'status_code': 200, 'text': '{"id": "2"}'}, + {'status_code': 200, 'text': '{"id": "3"}'}, + {'status_code': 200, 'text': '{"id": "4"}'}] ) m.get('https://api/layer_views/1/yelt?secondary_uncertainty=false', - status_code=200, - text="""Trial,EventId,Sequence,Loss - 1,1,0.0,100.0""" - ) + status_code=200, + text="""Trial,EventId,Sequence,Loss + 1,1,0.0,100.0""") m.get('https://api/layer_views/2/yelt?secondary_uncertainty=false', - status_code=200, - text='Trial,EventId,Sequence,Loss' - ) + status_code=200, + text='Trial,EventId,Sequence,Loss') m.get('https://api/layer_views/3/yelt?secondary_uncertainty=false', - status_code=200, - text="""Trial,EventId,Sequence,Loss - 1,3,0.0,100.0 - 2,3,0.0,50.0""" - ) + status_code=200, + text="""Trial,EventId,Sequence,Loss + 1,3,0.0,100.0 + 2,3,0.0,50.0""") m.get('https://api/layer_views/4/yelt?secondary_uncertainty=false', - status_code=200, - text="""Trial,EventId,Sequence,Loss - 1,4,0.0,200.0""" - ) + status_code=200, + text="""Trial,EventId,Sequence,Loss + 1,4,0.0,200.0""") # Mocking Distribution Uploads - ## Distributions.save() - m.register_uri('POST', 'https://api/distributions/', - [{'status_code': 200, 'text': '{"id": "d1", "server_generated": "fo"}'}, - {'status_code': 200, 'text': '{"id": "d2", "server_generated": "fo"}'}, - {'status_code': 200, 'text': '{"id": "d3", "server_generated": "fo"}'}] + # Distributions.save() + m.register_uri( + 'POST', + 'https://api/distributions/', + [{'status_code': 200, 'text': '{"id": "d1"}'}, + {'status_code': 200, 'text': '{"id": "d2"}'}, + {'status_code': 200, 'text': '{"id": "d3"}'}] ) - ## Distributions.list(...) + # Distributions.list(...) m.get('https://api/distributions/?', status_code=200, text='[]') - ## Distribution.upload_data() + # Distribution.upload_data() m.post('https://api/distributions/d1/data', status_code=201, text='data') m.patch('https://api/distributions/d1/data', status_code=204) m.post('https://api/distributions/d1/data/commit', status_code=204) m.get('https://api/distributions/d1/data/status', status_code=200, - text='{"status": "Processing Successful"}') + text='{"status": "Processing Successful"}') # LossSet.save() m.post('https://api/loss_sets/', status_code=200, - text='{"id": "ls1", "server_generate": "foo"}') + text='{"id": "ls1", "server_generate": "foo"}') return m + class SetBaseUrl(object): def setup_method(self, _): analyzere.base_url = 'https://api' @@ -69,6 +70,7 @@ def setup_method(self, _): def teardown_method(self, _): analyzere.base_url = 'http://localhost:8000/' + class TestAnalogousEventLossSet(SetBaseUrl): def test_null_construction(self): ae_ls = AnalogousEventLossSet() @@ -79,7 +81,6 @@ def test_null_construction(self): assert ae_ls.source_events == [] assert ae_ls.occurrence_probability == 1.0 - def test_retrieve_loss_data(self): m = are_mocker() ae_ls = AnalogousEventLossSet(sources=['abc123'], source_events=[1]) @@ -88,12 +89,11 @@ def test_retrieve_loss_data(self): assert ae_ls._loss_data == {1: [100.0]} m = are_mocker() - ae_ls = AnalogousEventLossSet(sources=['abc123'], source_events=[1,2]) + ae_ls = AnalogousEventLossSet(sources=['abc123'], source_events=[1, 2]) ae_ls._retrieve_loss_data() m.stop() assert ae_ls._loss_data == {1: [100.0], 2: []} - def test_severity_distribution(self): m = are_mocker() ae_ls = AnalogousEventLossSet(source_events=[1]) @@ -103,19 +103,20 @@ def test_severity_distribution(self): assert ae_ls._severity_distr == "Probability,Loss\n1.0,100.0\n" m = are_mocker() - ae_ls = AnalogousEventLossSet(source_events=[1,2]) + ae_ls = AnalogousEventLossSet(source_events=[1, 2]) ae_ls._retrieve_loss_data() ae_ls._construct_severity_distribution() m.stop() - assert ae_ls._severity_distr == "Probability,Loss\n0.5,0.0\n0.5,100.0\n" + assert ae_ls._severity_distr == \ + "Probability,Loss\n0.5,0.0\n0.5,100.0\n" m = are_mocker() - ae_ls = AnalogousEventLossSet(source_events=[1,2,3,4]) + ae_ls = AnalogousEventLossSet(source_events=[1, 2, 3, 4]) ae_ls._retrieve_loss_data() ae_ls._construct_severity_distribution() m.stop() - assert (ae_ls._severity_distr == 'Probability,Loss\n' \ - + '0.25,0.0\n0.125,50.0\n0.375,100.0\n0.25,200.0\n') + assert (ae_ls._severity_distr == 'Probability,Loss\n' + + '0.25,0.0\n0.125,50.0\n0.375,100.0\n0.25,200.0\n') def test_save(self): m = are_mocker() @@ -126,5 +127,5 @@ def test_save(self): ae_ls.save() m.stop() for attribute in ['analysis_profile', 'source_events', 'sources', - 'load', 'occurrence_probability']: + 'load', 'occurrence_probability']: assert hasattr(ae_ls, attribute) From beb9ff2ba81d9906c75be6e067aa9ef11ee5f7eb Mon Sep 17 00:00:00 2001 From: Duane Wilson Date: Mon, 14 Jan 2019 13:49:38 -0400 Subject: [PATCH 4/4] Updating travis config. --- .travis.yml | 15 +++++++-------- 1 file changed, 7 insertions(+), 8 deletions(-) diff --git a/.travis.yml b/.travis.yml index 0029e37..fb3113f 100644 --- a/.travis.yml +++ b/.travis.yml @@ -2,13 +2,12 @@ before_install: - sudo apt-get -qq update - sudo apt-get install -y graphviz language: python -env: - - TOXENV=py27 - - TOXENV=py34 - - TOXENV=py36 -install: - - pip install tox -script: - - tox +python: + - "2.7" + - "3.4" + - "3.5" + - "3.6" +install: pip install tox-travis +script: tox notifications: email: false