Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Analogous Event Loss Set Class #12

Open
wants to merge 4 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
14 changes: 7 additions & 7 deletions .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -2,12 +2,12 @@ before_install:
- sudo apt-get -qq update
- sudo apt-get install -y graphviz
language: python
env:
- TOXENV=py27
- TOXENV=py34
install:
- pip install tox
script:
- tox
python:
- "2.7"
- "3.4"
- "3.5"
- "3.6"
install: pip install tox-travis
script: tox
notifications:
email: false
174 changes: 174 additions & 0 deletions analyzere_extras/loss_sets.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,174 @@
import hashlib
import csv
from collections import defaultdict

from analyzere import (
LossSet,
Distribution,
Layer,
LayerView,
LossFilter
)

# Analogous Event Scenario Loss Set. This class is designed to take a set
# of loss sets and a list of Event IDs and combine them into a single loss
# set for doing realistic disaster scenario type analysis.


class AnalogousEventLossSet(LossSet):

_collection_name = 'loss_sets'

def __init__(self,
analysis_profile='',
load=1.0,
source_events=[],
sources=[],
occurrence_probability=1.0,
**kwargs):

self.analysis_profile = analysis_profile
self.source_events = source_events
self.sources = sources
self.load = load
self.occurrence_probability = occurrence_probability

return super(AnalogousEventLossSet, self).__init__(
type='ParametricLossSet',
**kwargs
)

def _retrieve_loss_data(self):
loss_data = {}
for event in self.source_events:
event_filter = LossFilter(
type='AnyOfFilter',
name='Event ' + str(event),
attribute='EventID',
values=[event]
)

filter_layer = Layer(
type='FilterLayer',
description='Event ' + str(event),
filters=[event_filter],
loss_sets=self.sources
)

yelt = LayerView(
analysis_profile=self.analysis_profile,
layer=filter_layer
).save().download_yelt(secondary_uncertainty=False)

yelt_reader = csv.DictReader(yelt.decode('utf-8').splitlines())
loss_data[event] = [float(row['Loss']) for row in yelt_reader]

self._loss_data = loss_data

def _construct_severity_distribution(self):
self._severity_distr = 'Probability,Loss\n'
event_probability = 1.0/len(self.source_events)

value_probabilities = defaultdict(float)
# Creating the probability for each unique value. This ensures the
# severity distribution string is as small as possible.
for event in self.source_events:
if len(self._loss_data[event]) != 0:
# Note that a single event id may occur several times in a
# simulation with different loss values. Each of those values
# should have the same probability of occuring. The probability
# of all potential loss values for a single event should add
# to the probability of the event.
instance_prob = event_probability/len(self._loss_data[event])
for loss in self._loss_data[event]:
value_probabilities[loss * self.load] += instance_prob
else:
value_probabilities[0.0] += event_probability

# Adding the unique values to severity distribution file that will be
# uploaded.
loss_values = sorted(list(value_probabilities.keys()))
for key in loss_values:
self._severity_distr += str(value_probabilities[key]) + ',' \
+ str(key) + '\n'

def _upload_severity_distribution(self):
data_hash = hashlib.md5(self._severity_distr.encode()).hexdigest()

severity_description = 'ARe-Python-Extras AnalogousEventLossSetELS ' \
+ 'Generated Resource: ' + data_hash

distribution_search = Distribution.list(search=severity_description)
# Check if severity distribution has been created on the server.
if len(distribution_search) > 0:
self.severity = distribution_search[0]
else:
severity_distr = Distribution(
type='CustomSeverityDistribution',
description=severity_description,
).save()
severity_distr.upload_data(self._severity_distr)
self.severity = severity_distr

def _upload_frequency_distribution(self):
freq_description = 'ARe-Python-Extras AnalogousEventLossSetELS ' \
+ 'Generated Resource: Frequency ' \
+ str(self.occurrence_probability)

distribution_search = Distribution.list(search=freq_description)
if len(distribution_search) > 0:
self.frequency = distribution_search[0]
else:
freq_distr = Distribution(
type='BinomialDistribution',
description=freq_description,
n=1,
p=self.occurrence_probability
).save()
self.frequency = freq_distr

def _upload_seasonality_distribution(self):
seasonality_description = \
'ARe-Python-Extras AnalogousEventLossSetELS ' \
+ 'Generated Resource: Seasonality 0.0'

distribution_search = Distribution.list(search=seasonality_description)
if len(distribution_search) > 0:
self.seasonality = distribution_search[0]
else:
seasonality_distr = Distribution(
type='DiracDistribution',
description=seasonality_description,
value=0.0,
).save()
self.seasonality = seasonality_distr

def save(self):
# Collect keys to retain on the type after saving. Otherwise this
# information is lost by the super class's save method
keys_to_retain = ['analysis_profile', 'source_events', 'sources',
'load', 'occurrence_probability']
values_to_retain = {key: self.__dict__[key] for key in keys_to_retain}

# Adding the above information to loss set's meta_data so that it is
# retrievable at a later date.
self.meta_data = {}
self.meta_data['analysis_profile'] = self.analysis_profile.id
self.meta_data['source_events'] = \
','.join(map(str, self.source_events))
self.meta_data['sources'] = \
','.join([source.id for source in self.sources])
self.meta_data['load'] = self.load
self.meta_data['occurrence_probability'] = self.occurrence_probability
self.meta_data['_type'] = 'AnalogousEventLossSet'

self._retrieve_loss_data()
self._construct_severity_distribution()
self._upload_severity_distribution()
self._upload_frequency_distribution()
self._upload_seasonality_distribution()
super(AnalogousEventLossSet, self).save()

# Merging the retained values back into the class.
self.__dict__.update(values_to_retain)
return self
131 changes: 131 additions & 0 deletions tests/test_loss_sets.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,131 @@
import requests_mock
from analyzere_extras.loss_sets import AnalogousEventLossSet
import analyzere
from analyzere import AnalysisProfile


def are_mocker():
m = requests_mock.Mocker()
m.start()
m.register_uri(
'POST',
'https://api/layer_views/',
[{'status_code': 200, 'text': '{"id": "1"}'},
{'status_code': 200, 'text': '{"id": "2"}'},
{'status_code': 200, 'text': '{"id": "3"}'},
{'status_code': 200, 'text': '{"id": "4"}'}]
)

m.get('https://api/layer_views/1/yelt?secondary_uncertainty=false',
status_code=200,
text="""Trial,EventId,Sequence,Loss
1,1,0.0,100.0""")

m.get('https://api/layer_views/2/yelt?secondary_uncertainty=false',
status_code=200,
text='Trial,EventId,Sequence,Loss')

m.get('https://api/layer_views/3/yelt?secondary_uncertainty=false',
status_code=200,
text="""Trial,EventId,Sequence,Loss
1,3,0.0,100.0
2,3,0.0,50.0""")

m.get('https://api/layer_views/4/yelt?secondary_uncertainty=false',
status_code=200,
text="""Trial,EventId,Sequence,Loss
1,4,0.0,200.0""")

# Mocking Distribution Uploads
# Distributions.save()
m.register_uri(
'POST',
'https://api/distributions/',
[{'status_code': 200, 'text': '{"id": "d1"}'},
{'status_code': 200, 'text': '{"id": "d2"}'},
{'status_code': 200, 'text': '{"id": "d3"}'}]
)

# Distributions.list(...)
m.get('https://api/distributions/?', status_code=200, text='[]')

# Distribution.upload_data()
m.post('https://api/distributions/d1/data', status_code=201, text='data')
m.patch('https://api/distributions/d1/data', status_code=204)
m.post('https://api/distributions/d1/data/commit', status_code=204)
m.get('https://api/distributions/d1/data/status', status_code=200,
text='{"status": "Processing Successful"}')

# LossSet.save()
m.post('https://api/loss_sets/', status_code=200,
text='{"id": "ls1", "server_generate": "foo"}')

return m


class SetBaseUrl(object):
def setup_method(self, _):
analyzere.base_url = 'https://api'

def teardown_method(self, _):
analyzere.base_url = 'http://localhost:8000/'


class TestAnalogousEventLossSet(SetBaseUrl):
def test_null_construction(self):
ae_ls = AnalogousEventLossSet()
assert ae_ls.type == 'ParametricLossSet'
assert ae_ls.analysis_profile == ''
assert ae_ls.load == 1.0
assert ae_ls.sources == []
assert ae_ls.source_events == []
assert ae_ls.occurrence_probability == 1.0

def test_retrieve_loss_data(self):
m = are_mocker()
ae_ls = AnalogousEventLossSet(sources=['abc123'], source_events=[1])
ae_ls._retrieve_loss_data()
m.stop()
assert ae_ls._loss_data == {1: [100.0]}

m = are_mocker()
ae_ls = AnalogousEventLossSet(sources=['abc123'], source_events=[1, 2])
ae_ls._retrieve_loss_data()
m.stop()
assert ae_ls._loss_data == {1: [100.0], 2: []}

def test_severity_distribution(self):
m = are_mocker()
ae_ls = AnalogousEventLossSet(source_events=[1])
ae_ls._retrieve_loss_data()
ae_ls._construct_severity_distribution()
m.stop()
assert ae_ls._severity_distr == "Probability,Loss\n1.0,100.0\n"

m = are_mocker()
ae_ls = AnalogousEventLossSet(source_events=[1, 2])
ae_ls._retrieve_loss_data()
ae_ls._construct_severity_distribution()
m.stop()
assert ae_ls._severity_distr == \
"Probability,Loss\n0.5,0.0\n0.5,100.0\n"

m = are_mocker()
ae_ls = AnalogousEventLossSet(source_events=[1, 2, 3, 4])
ae_ls._retrieve_loss_data()
ae_ls._construct_severity_distribution()
m.stop()
assert (ae_ls._severity_distr == 'Probability,Loss\n'
+ '0.25,0.0\n0.125,50.0\n0.375,100.0\n0.25,200.0\n')

def test_save(self):
m = are_mocker()
ae_ls = AnalogousEventLossSet(
analysis_profile=AnalysisProfile(id='ap1'),
source_events=[1]
)
ae_ls.save()
m.stop()
for attribute in ['analysis_profile', 'source_events', 'sources',
'load', 'occurrence_probability']:
assert hasattr(ae_ls, attribute)
2 changes: 1 addition & 1 deletion tests/test_visualizations.py
Original file line number Diff line number Diff line change
Expand Up @@ -787,7 +787,7 @@ def test_render_rankdir(self, layer_view):

def test_from_id(self):
"""Requests by Id don't work unless you have defined the following
analyzere varialbes, and a connecton can be established
analyzere variables, and a connecton can be established
- analyzere.base_url
- analyzere.username
- analyzere.password
Expand Down
2 changes: 1 addition & 1 deletion tox.ini
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
[tox]
envlist = py27, py34
envlist = py27, py34, py36

[testenv]
deps = -r{toxinidir}/requirements/test.txt
Expand Down