diff --git a/__init__.py b/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/bayesian_logistic_regression.py b/bayesian_logistic_regression.py
new file mode 100644
index 0000000..e978bf1
--- /dev/null
+++ b/bayesian_logistic_regression.py
@@ -0,0 +1,365 @@
+import math
+import os
+import torch
+import torch.distributions.constraints as constraints
+import pyro
+from pyro.optim import Adam, SGD
+from pyro.infer import SVI, Trace_ELBO, config_enumerate, TraceEnum_ELBO, Predictive
+import pyro.distributions as dist
+from pyro.infer.autoguide import AutoDelta
+from pyro import poutine
+from pyro.poutine import trace, replay, block
+from functools import partial
+import numpy as np
+import scipy.stats
+from pyro.infer.autoguide import AutoDelta
+from collections import defaultdict
+import matplotlib
+from matplotlib import pyplot
+from pyro.infer import MCMC, NUTS
+import pandas as pd
+import pickle
+from pyro.infer.autoguide import AutoDiagonalNormal
+import inspect
+from bbbvi import relbo, Approximation
+
+PRINT_INTERMEDIATE_LATENT_VALUES = False
+PRINT_TRACES = False
+
+# this is for running the notebook in our testing framework
+smoke_test = ('CI' in os.environ)
+n_steps = 2 if smoke_test else 10000
+pyro.set_rng_seed(2)
+
+# enable validation (e.g. validate parameters of distributions)
+pyro.enable_validation(True)
+
+# clear the param store in case we're in a REPL
+pyro.clear_param_store()
+
+model_log_prob = []
+guide_log_prob = []
+approximation_log_prob = []
+
+# @config_enumerate
+def guide(observations, input_data, index):
+ variance_q = pyro.param('variance_{}'.format(index), torch.eye(input_data.shape[1]), constraints.positive)
+ #variance_q = torch.eye(input_data.shape[1])
+ mu_q = pyro.param('mu_{}'.format(index), torch.zeros(input_data.shape[1]))
+ w = pyro.sample("w", dist.MultivariateNormal(mu_q, variance_q))
+ return w
+
+class Guide:
+ def __init__(self, index, n_variables, initial_loc=None, initial_scale=None):
+ self.index = index
+ self.n_variables = n_variables
+ if not initial_loc:
+ self.initial_loc = torch.zeros(n_variables)
+ self.initial_scale = torch.eye(n_variables)
+ else:
+ self.initial_scale = initial_scale
+ self.initial_loc = initial_loc
+
+ def get_distribution(self):
+ scale_q = pyro.param('scale_{}'.format(self.index), self.initial_scale, constraints.positive)
+ #scale_q = torch.eye(self.n_variables)
+ locs_q = pyro.param('locs_{}'.format(self.index), self.initial_loc)
+ return dist.MultivariateNormal(locs_q, scale_q)
+
+ def __call__(self, observations, input_data):
+ distribution = self.get_distribution()
+ w = pyro.sample("w", distribution)
+ return w
+
+def logistic_regression_model(observations, input_data):
+ w = pyro.sample('w', dist.MultivariateNormal(torch.zeros(input_data.shape[1]), torch.eye(input_data.shape[1])))
+ with pyro.plate("data", input_data.shape[0]):
+ sigmoid = torch.sigmoid(torch.matmul(input_data, w.double()))
+ obs = pyro.sample('obs', dist.Bernoulli(sigmoid), obs=observations)
+
+# @config_enumerate
+# def approximation(observations, input_data, components, weights):
+# assignment = pyro.sample('assignment', dist.Categorical(weights))
+# distribution = components[assignment].get_distribution()
+# w = pyro.sample("w", distribution)
+# return w
+
+def dummy_approximation(observations, input_data):
+ variance_q = pyro.param('variance_0', torch.eye(input_data.shape[1]), constraints.positive)
+ mu_q = pyro.param('mu_0', 100*torch.ones(input_data.shape[1]))
+ pyro.sample("w", dist.MultivariateNormal(mu_q, variance_q))
+
+def predictive_model(wrapped_approximation, observations, input_data):
+ w = wrapped_approximation(observations, input_data)
+ if type(w) is dict:
+ w = w['w']
+ with pyro.plate("data", input_data.shape[0]):
+ sigmoid = torch.sigmoid(torch.matmul(input_data, w.double()))
+ obs = pyro.sample('obs', dist.Bernoulli(sigmoid), obs=observations)
+
+
+# Utility function to print latent sites' quantile information.
+def summary(samples):
+ site_stats = {}
+ for site_name, values in samples.items():
+ marginal_site = pd.DataFrame(values)
+ describe = marginal_site.describe(percentiles=[.05, 0.25, 0.5, 0.75, 0.95]).transpose()
+ site_stats[site_name] = describe[["mean", "std", "5%", "25%", "50%", "75%", "95%"]]
+ return site_stats
+
+def load_data():
+ npz_train_file = np.load('ds1.100_train.npz')
+ npz_test_file = np.load('ds1.100_test.npz')
+
+ X_train = torch.tensor(npz_train_file['X']).double()
+ y_train = torch.tensor(npz_train_file['y']).double()
+ y_train[y_train == -1] = 0
+ X_test = torch.tensor(npz_test_file['X']).double()
+ y_test = torch.tensor(npz_test_file['y']).double()
+ y_test[y_test == -1] = 0
+
+ return X_train, y_train, X_test, y_test
+
+
+def relbo(model, guide, *args, **kwargs):
+
+ approximation = kwargs.pop('approximation', None)
+ relbo_lambda = kwargs.pop('relbo_lambda', None)
+ # Run the guide with the arguments passed to SVI.step() and trace the execution,
+ # i.e. record all the calls to Pyro primitives like sample() and param().
+ #print("enter relbo")
+ guide_trace = trace(guide).get_trace(*args, **kwargs)
+ #print(guide_trace.nodes['obs_1'])
+ model_trace = trace(replay(model, guide_trace)).get_trace(*args, **kwargs)
+ #print(model_trace.nodes['obs_1'])
+
+
+ approximation_trace = trace(replay(block(approximation, expose=['w']), guide_trace)).get_trace(*args, **kwargs)
+ # We will accumulate the various terms of the ELBO in `elbo`.
+
+ guide_log_prob.append(guide_trace.log_prob_sum())
+ model_log_prob.append(model_trace.log_prob_sum())
+ approximation_log_prob.append(approximation_trace.log_prob_sum())
+
+ # This is how we computed the ELBO before using TraceEnum_ELBO:
+ elbo = model_trace.log_prob_sum() - relbo_lambda * guide_trace.log_prob_sum() - approximation_trace.log_prob_sum()
+
+ loss_fn = pyro.infer.TraceEnum_ELBO(max_plate_nesting=1).differentiable_loss(model,
+ guide,
+ *args, **kwargs)
+
+ # print(loss_fn)
+ # print(approximation_trace.log_prob_sum())
+ elbo = -loss_fn - approximation_trace.log_prob_sum()
+ #elbo = -loss_fn + 0.1 * pyro.infer.TraceEnum_ELBO(max_plate_nesting=1).differentiable_loss(approximation,
+ # guide,
+ # *args, **kwargs)
+ # Return (-elbo) since by convention we do gradient descent on a loss and
+ # the ELBO is a lower bound that needs to be maximized.
+
+ return -elbo
+
+def boosting_bbvi():
+
+ n_iterations = 2
+ X_train, y_train, X_test, y_test = load_data()
+ relbo_lambda = 1
+ #initial_approximation = Guide(index=0, n_variables=X_train.shape[1])
+ initial_approximation = dummy_approximation
+ components = [initial_approximation]
+
+ weights = torch.tensor([1.])
+ wrapped_approximation = Approximation(components, weights)
+
+ locs = [0]
+ scales = [0]
+
+ gradient_norms = defaultdict(list)
+ duality_gap = []
+ model_log_likelihoods = []
+ entropies = []
+ for t in range(1, n_iterations + 1):
+ # setup the inference algorithm
+ wrapped_guide = Guide(index=t, n_variables=X_train.shape[1])
+ # do gradient steps
+ losses = []
+ # Register hooks to monitor gradient norms.
+ wrapped_guide(y_train, X_train)
+
+ adam_params = {"lr": 0.01, "betas": (0.90, 0.999)}
+ optimizer = Adam(adam_params)
+ for name, value in pyro.get_param_store().named_parameters():
+ if not name in gradient_norms:
+ value.register_hook(lambda g, name=name: gradient_norms[name].append(g.norm().item()))
+
+ global model_log_prob
+ model_log_prob = []
+ global guide_log_prob
+ guide_log_prob = []
+ global approximation_log_prob
+ approximation_log_prob = []
+
+ svi = SVI(logistic_regression_model, wrapped_guide, optimizer, loss=relbo)
+ for step in range(n_steps):
+ loss = svi.step(y_train, X_train, approximation=wrapped_approximation, relbo_lambda=relbo_lambda)
+ losses.append(loss)
+
+ if PRINT_INTERMEDIATE_LATENT_VALUES:
+ print('Loss: {}'.format(loss))
+ variance = pyro.param("variance_{}".format(t)).item()
+ mu = pyro.param("locs_{}".format(t)).item()
+ print('mu = {}'.format(mu))
+ print('variance = {}'.format(variance))
+
+ if step % 100 == 0:
+ print('.', end=' ')
+
+ # pyplot.plot(range(len(losses)), losses)
+ # pyplot.xlabel('Update Steps')
+ # pyplot.ylabel('-ELBO')
+ # pyplot.title('-ELBO against time for component {}'.format(t));
+ # pyplot.show()
+
+ pyplot.plot(range(len(guide_log_prob)), -1 * np.array(guide_log_prob), 'b-', label='- Guide log prob')
+ pyplot.plot(range(len(approximation_log_prob)), -1 * np.array(approximation_log_prob), 'r-', label='- Approximation log prob')
+ pyplot.plot(range(len(model_log_prob)), np.array(model_log_prob), 'g-', label='Model log prob')
+ pyplot.plot(range(len(model_log_prob)), np.array(model_log_prob) -1 * np.array(approximation_log_prob) -1 * np.array(guide_log_prob), label='RELBO')
+ pyplot.xlabel('Update Steps')
+ pyplot.ylabel('Log Prob')
+ pyplot.title('RELBO components throughout SVI'.format(t));
+ pyplot.legend()
+ pyplot.show()
+
+ wrapped_approximation.components.append(wrapped_guide)
+ new_weight = 2 / (t + 1)
+
+ # if t == 2:
+ # new_weight = 0.05
+ weights = weights * (1-new_weight)
+ weights = torch.cat((weights, torch.tensor([new_weight])))
+
+ wrapped_approximation.weights = weights
+
+ e_log_p = 0
+ n_samples = 50
+ entropy = 0
+ model_log_likelihood = 0
+ elbo = 0
+ for i in range(n_samples):
+ qt_trace = trace(wrapped_approximation).get_trace(y_train, X_train)
+ replayed_model_trace = trace(replay(logistic_regression_model, qt_trace)).get_trace(y_train, X_train)
+ model_log_likelihood += replayed_model_trace.log_prob_sum()
+ entropy -= qt_trace.log_prob_sum()
+ elbo = elbo + replayed_model_trace.log_prob_sum() - qt_trace.log_prob_sum()
+
+ duality_gap.append(elbo/n_samples)
+ model_log_likelihoods.append(model_log_likelihood/n_samples)
+ entropies.append(entropy/n_samples)
+
+ # scale = pyro.param("variance_{}".format(t)).item()
+ # scales.append(scale)
+ # loc = pyro.param("mu_{}".format(t)).item()
+ # locs.append(loc)
+ # print('mu = {}'.format(loc))
+ # print('variance = {}'.format(scale))
+
+ pyplot.figure(figsize=(10, 4), dpi=100).set_facecolor('white')
+ for name, grad_norms in gradient_norms.items():
+ pyplot.plot(grad_norms, label=name)
+ pyplot.xlabel('iters')
+ pyplot.ylabel('gradient norm')
+ # pyplot.yscale('log')
+ pyplot.legend(loc='best')
+ pyplot.title('Gradient norms during SVI');
+ pyplot.show()
+
+
+ pyplot.plot(range(1, len(duality_gap) + 1), duality_gap, label='ELBO')
+ pyplot.plot(range(1, len(entropies) + 1), entropies, label='Entropy of q_t')
+ pyplot.plot(range(1, len(model_log_likelihoods) + 1),model_log_likelihoods, label='E[logp] w.r.t. q_t')
+ pyplot.title('ELBO(p, q_t)');
+ pyplot.legend();
+ pyplot.xlabel('Approximation components')
+ pyplot.ylabel('Log probability')
+ pyplot.show()
+
+ for i in range(1, n_iterations + 1):
+ mu = pyro.param('locs_{}'.format(i))
+ sigma = pyro.param('scale_{}'.format(i))
+ print('Mu_{}: '.format(i))
+ print(mu)
+ print('Sigma{}: '.format(i))
+ print(sigma)
+
+ wrapped_predictive_model = partial(predictive_model, wrapped_approximation=wrapped_approximation, observations=y_test, input_data=X_test)
+ n_samples = 50
+ log_likelihood = 0
+ for i in range(n_samples):
+ predictive_trace = trace(wrapped_predictive_model).get_trace()
+ log_likelihood += predictive_trace.log_prob_sum()
+ print('Log prob on test data')
+ print(log_likelihood/n_samples)
+
+def run_mcmc():
+
+ X_train, y_train, X_test, y_test = load_data()
+ nuts_kernel = NUTS(logistic_regression_model)
+
+ mcmc = MCMC(nuts_kernel, num_samples=200, warmup_steps=100)
+ mcmc.run(y_train, X_train)
+
+ hmc_samples = {k: v.detach().cpu().numpy() for k, v in mcmc.get_samples().items()}
+
+ with open('hmc_samples.pkl', 'wb') as outfile:
+ pickle.dump(hmc_samples, outfile)
+
+ for site, values in summary(hmc_samples).items():
+ print("Site: {}".format(site))
+ print(values, "\n")
+
+
+def run_svi():
+ # setup the optimizer
+ X_train, y_train, X_test, y_test = load_data()
+ n_steps = 10000
+ adam_params = {"lr": 0.01, "betas": (0.90, 0.999)}
+ optimizer = Adam(adam_params)
+
+ # setup the inference algorithm
+ #wrapped_guide = partial(guide, index=0)
+ wrapped_guide = AutoDiagonalNormal(logistic_regression_model)
+ svi = SVI(logistic_regression_model, wrapped_guide, optimizer, loss=Trace_ELBO())
+ losses = []
+
+ # do gradient steps
+ for step in range(n_steps):
+ loss = svi.step(y_train, X_train)
+ losses.append(loss)
+ if step % 100 == 0:
+ print('.', end='')
+
+ # for i in range(0, n_iterations):
+ # mu = pyro.param('mu_{}'.format(i))
+ # sigma = pyro.param('variance_{}'.format(i))
+ # print('Mu_{}: '.format(i))
+ # print(mu)
+ # print('Sigma{}: '.format(i))
+ # print(sigma)
+
+ pyplot.plot(range(len(losses)), losses)
+ pyplot.xlabel('Update Steps')
+ pyplot.ylabel('-ELBO')
+ pyplot.title('-ELBO against time for component {}'.format(1));
+ pyplot.show()
+
+ wrapped_predictive_model = partial(predictive_model, wrapped_approximation=wrapped_guide, observations=y_test, input_data=X_test)
+ n_samples = 50
+ log_likelihood = 0
+ for i in range(n_samples):
+ predictive_trace = trace(wrapped_predictive_model).get_trace()
+ log_likelihood += predictive_trace.log_prob_sum()
+ print('Log prob on test data')
+ print(log_likelihood/n_samples)
+
+if __name__ == '__main__':
+ boosting_bbvi()
\ No newline at end of file
diff --git a/bbbvi.py b/bbbvi.py
new file mode 100644
index 0000000..8dc0f9f
--- /dev/null
+++ b/bbbvi.py
@@ -0,0 +1,75 @@
+import math
+import os
+import torch
+import torch.distributions.constraints as constraints
+import pyro
+from pyro.optim import Adam, SGD
+from pyro.infer import SVI, Trace_ELBO, config_enumerate, TraceEnum_ELBO
+import pyro.distributions as dist
+from pyro.infer.autoguide import AutoDelta
+from pyro import poutine
+from pyro.poutine import trace, replay, block
+from functools import partial
+import numpy as np
+import scipy.stats
+from pyro.infer.autoguide import AutoDelta
+from collections import defaultdict
+import matplotlib
+from matplotlib import pyplot
+
+def relbo(model, guide, *args, **kwargs):
+
+ approximation = kwargs.pop('approximation', None)
+ relbo_lambda = kwargs.pop('relbo_lambda', None)
+ # Run the guide with the arguments passed to SVI.step() and trace the execution,
+ # i.e. record all the calls to Pyro primitives like sample() and param().
+ #print("enter relbo")
+ guide_trace = trace(guide).get_trace(*args, **kwargs)
+ #print(guide_trace.nodes['obs_1'])
+ model_trace = trace(replay(model, guide_trace)).get_trace(*args, **kwargs)
+ #print(model_trace.nodes['obs_1'])
+
+
+ approximation_trace = trace(replay(block(approximation, expose=['mu']), guide_trace)).get_trace(*args, **kwargs)
+ # We will accumulate the various terms of the ELBO in `elbo`.
+
+ # guide_log_prob.append(guide_trace.log_prob_sum())
+ # model_log_prob.append(model_trace.log_prob_sum())
+ # approximation_log_prob.append(approximation_trace.log_prob_sum())
+
+ # This is how we computed the ELBO before using TraceEnum_ELBO:
+ elbo = model_trace.log_prob_sum() - relbo_lambda * guide_trace.log_prob_sum() - approximation_trace.log_prob_sum()
+
+ loss_fn = pyro.infer.TraceEnum_ELBO(max_plate_nesting=1).differentiable_loss(model,
+ guide,
+ *args, **kwargs)
+
+ # print(loss_fn)
+ # print(approximation_trace.log_prob_sum())
+ elbo = -loss_fn - approximation_trace.log_prob_sum()
+ #elbo = -loss_fn + 0.1 * pyro.infer.TraceEnum_ELBO(max_plate_nesting=1).differentiable_loss(approximation,
+ # guide,
+ # *args, **kwargs)
+ # Return (-elbo) since by convention we do gradient descent on a loss and
+ # the ELBO is a lower bound that needs to be maximized.
+
+ return -elbo
+
+class Approximation:
+
+ def __init__(self, components= None, weights=None):
+ if not components:
+ self.components = []
+ else:
+ self.components = components
+
+ if not weights:
+ self.weights = []
+ else:
+ self.weights = weights
+
+ def __call__(self, *args, **kwargs):
+ assignment = pyro.sample('assignment', dist.Categorical(self.weights))
+ result = self.components[assignment](*args, **kwargs)
+ return result
+
diff --git a/bimodal_posterior.py b/bimodal_posterior.py
index de92785..469dd42 100644
--- a/bimodal_posterior.py
+++ b/bimodal_posterior.py
@@ -4,7 +4,7 @@
import torch.distributions.constraints as constraints
import pyro
from pyro.optim import Adam, SGD
-from pyro.infer import SVI, Trace_ELBO, config_enumerate
+from pyro.infer import SVI, Trace_ELBO, config_enumerate, TraceEnum_ELBO
import pyro.distributions as dist
from pyro.infer.autoguide import AutoDelta
from pyro import poutine
@@ -16,8 +16,9 @@
from collections import defaultdict
import matplotlib
from matplotlib import pyplot
+from bbbvi import relbo, Approximation
-PRINT_INTERMEDIATE_LATENT_VALUES = True
+PRINT_INTERMEDIATE_LATENT_VALUES = False
PRINT_TRACES = False
# this is for running the notebook in our testing framework
@@ -33,6 +34,11 @@
data = torch.tensor([4.0, 4.2, 3.9, 4.1, 3.8, 3.5, 4.3])
+model_log_prob = []
+guide_log_prob = []
+approximation_log_prob = []
+
+
def guide(data, index):
variance_q = pyro.param('variance_{}'.format(index), torch.tensor([1.0]), constraints.positive)
mu_q = pyro.param('mu_{}'.format(index), torch.tensor([1.0]))
@@ -50,60 +56,27 @@ def model(data):
# Local variables.
pyro.sample('obs_{}'.format(i), dist.Normal(mu*mu, variance), obs=data[i])
-@config_enumerate
-def approximation(data, components, weights):
- assignment = pyro.sample('assignment', dist.Categorical(weights))
- distribution = components[assignment](data)
def dummy_approximation(data):
variance_q = pyro.param('variance_0', torch.tensor([1.0]), constraints.positive)
mu_q = pyro.param('mu_0', torch.tensor([20.0]))
pyro.sample("mu", dist.Normal(mu_q, variance_q))
-def relbo(model, guide, *args, **kwargs):
-
- approximation = kwargs.pop('approximation', None)
- # Run the guide with the arguments passed to SVI.step() and trace the execution,
- # i.e. record all the calls to Pyro primitives like sample() and param().
- #print("enter relbo")
- guide_trace = trace(guide).get_trace(*args, **kwargs)
- #print(guide_trace.nodes['obs_1'])
- model_trace = trace(replay(model, guide_trace)).get_trace(*args, **kwargs)
- #print(model_trace.nodes['obs_1'])
-
-
- approximation_trace = trace(replay(block(approximation, expose=['mu']), guide_trace)).get_trace(*args, **kwargs)
- # We will accumulate the various terms of the ELBO in `elbo`.
-
- # This is how we computed the ELBO before using TraceEnum_ELBO:
- # elbo = model_trace.log_prob_sum() - guide_trace.log_prob_sum() - approximation_trace.log_prob_sum()
-
- loss_fn = pyro.infer.TraceEnum_ELBO(max_plate_nesting=1).differentiable_loss(model,
- guide,
- *args, **kwargs)
-
- # print(loss_fn)
- # print(approximation_trace.log_prob_sum())
- elbo = -loss_fn - approximation_trace.log_prob_sum()
- # Return (-elbo) since by convention we do gradient descent on a loss and
- # the ELBO is a lower bound that needs to be maximized.
-
- return -elbo
-
-
def boosting_bbvi():
n_iterations = 2
-
+ relbo_lambda = 1
initial_approximation = dummy_approximation
components = [initial_approximation]
weights = torch.tensor([1.])
- wrapped_approximation = partial(approximation, components=components,
- weights=weights)
+ wrapped_approximation = Approximation(components, weights)
locs = [0]
scales = [0]
gradient_norms = defaultdict(list)
+ duality_gap = []
+ entropies = []
+ model_log_likelihoods = []
for t in range(1, n_iterations + 1):
# setup the inference algorithm
wrapped_guide = partial(guide, index=t)
@@ -118,10 +91,17 @@ def boosting_bbvi():
for name, value in pyro.get_param_store().named_parameters():
if not name in gradient_norms:
value.register_hook(lambda g, name=name: gradient_norms[name].append(g.norm().item()))
+
+ global model_log_prob
+ model_log_prob = []
+ global guide_log_prob
+ guide_log_prob = []
+ global approximation_log_prob
+ approximation_log_prob = []
svi = SVI(model, wrapped_guide, optimizer, loss=relbo)
for step in range(n_steps):
- loss = svi.step(data, approximation=wrapped_approximation)
+ loss = svi.step(data, approximation=wrapped_approximation, relbo_lambda=relbo_lambda)
losses.append(loss)
if PRINT_INTERMEDIATE_LATENT_VALUES:
@@ -140,13 +120,39 @@ def boosting_bbvi():
pyplot.title('-ELBO against time for component {}'.format(t));
pyplot.show()
- components.append(wrapped_guide)
+ # pyplot.plot(range(len(guide_log_prob)), -1 * np.array(guide_log_prob), 'b-', label='- Guide log prob')
+ # pyplot.plot(range(len(approximation_log_prob)), -1 * np.array(approximation_log_prob), 'r-', label='- Approximation log prob')
+ # pyplot.plot(range(len(model_log_prob)), np.array(model_log_prob), 'g-', label='Model log prob')
+ # pyplot.plot(range(len(model_log_prob)), np.array(model_log_prob) -1 * np.array(approximation_log_prob) -1 * np.array(guide_log_prob), label='RELBO')
+ # pyplot.xlabel('Update Steps')
+ # pyplot.ylabel('Log Prob')
+ # pyplot.title('RELBO components throughout SVI'.format(t));
+ # pyplot.legend()
+ # pyplot.show()
+
+ wrapped_approximation.components.append(wrapped_guide)
new_weight = 2 / (t + 1)
weights = weights * (1-new_weight)
weights = torch.cat((weights, torch.tensor([new_weight])))
- wrapped_approximation = partial(approximation, components=components, weights=weights)
+ wrapped_approximation.weights = weights
+
+ e_log_p = 0
+ n_samples = 50
+ entropy = 0
+ model_log_likelihood = 0
+ elbo = 0
+ for i in range(n_samples):
+ qt_trace = trace(wrapped_approximation).get_trace(data)
+ replayed_model_trace = trace(replay(model, qt_trace)).get_trace(data)
+ model_log_likelihood += replayed_model_trace.log_prob_sum()
+ entropy -= qt_trace.log_prob_sum()
+ elbo = elbo + replayed_model_trace.log_prob_sum() - qt_trace.log_prob_sum()
+
+ duality_gap.append(elbo/n_samples)
+ model_log_likelihoods.append(model_log_likelihood/n_samples)
+ entropies.append(entropy/n_samples)
scale = pyro.param("variance_{}".format(t)).item()
scales.append(scale)
@@ -165,22 +171,76 @@ def boosting_bbvi():
pyplot.title('Gradient norms during SVI');
pyplot.show()
+
+ pyplot.plot(range(1, len(duality_gap) + 1), duality_gap, label='ELBO')
+ pyplot.plot(range(1, len(entropies) + 1), entropies, label='Entropy of q_t')
+ pyplot.plot(range(1, len(model_log_likelihoods) + 1),model_log_likelihoods, label='E[logp] w.r.t. q_t')
+ pyplot.title('ELBO(p, q_t)');
+ pyplot.legend();
+ pyplot.xlabel('Approximation components')
+ pyplot.ylabel('Log probability')
+ pyplot.show()
print(weights)
print(locs)
print(scales)
X = np.arange(-10, 10, 0.1)
- Y1 = weights[1].item() * scipy.stats.norm.pdf((X - locs[1]) / scales[1])
- Y2 = weights[2].item() * scipy.stats.norm.pdf((X - locs[2]) / scales[2])
+ pyplot.figure(figsize=(10, 4), dpi=100).set_facecolor('white')
+ total_approximation = np.zeros(X.shape)
+ for i in range(1, n_iterations + 1):
+ Y = weights[i].item() * scipy.stats.norm.pdf((X - locs[i]) / scales[i])
+ pyplot.plot(X, Y)
+ total_approximation += Y
+ pyplot.plot(X, total_approximation)
+ pyplot.plot(data.data.numpy(), np.zeros(len(data)), 'k*')
+ pyplot.title('Approximation of posterior over mu with lambda={}'.format(relbo_lambda))
+ pyplot.ylabel('probability density');
+ pyplot.show()
+
+def run_standard_svi():
+
+ adam_params = {"lr": 0.002, "betas": (0.90, 0.999)}
+ optimizer = Adam(adam_params)
+ gradient_norms = defaultdict(list)
+ losses = []
+ wrapped_guide = partial(guide, index=0)
+ wrapped_guide(data)
+ for name, value in pyro.get_param_store().named_parameters():
+ if not name in gradient_norms:
+ value.register_hook(lambda g, name=name: gradient_norms[name].append(g.norm().item()))
+
+
+ svi = SVI(model, wrapped_guide, optimizer, loss=Trace_ELBO())
+ for step in range(n_steps):
+ loss = svi.step(data)
+ losses.append(loss)
+
+ pyplot.figure(figsize=(10, 4), dpi=100).set_facecolor('white')
+ for name, grad_norms in gradient_norms.items():
+ pyplot.plot(grad_norms, label=name)
+ pyplot.xlabel('iters')
+ pyplot.ylabel('gradient norm')
+ # pyplot.yscale('log')
+ pyplot.legend(loc='best')
+ pyplot.title('Gradient norms during SVI');
+ pyplot.show()
+
+ scale = pyro.param("variance_{}".format(0)).item()
+ loc = pyro.param("mu_{}".format(0)).item()
+ X = np.arange(-10, 10, 0.1)
+ Y1 = scipy.stats.norm.pdf((X - loc) / scale)
+
+ print('Resulting Mu: ', loc)
+ print('Resulting Variance: ', scale)
+
pyplot.figure(figsize=(10, 4), dpi=100).set_facecolor('white')
pyplot.plot(X, Y1, 'r-')
- pyplot.plot(X, Y2, 'b-')
- pyplot.plot(X, Y1 + Y2, 'k--')
pyplot.plot(data.data.numpy(), np.zeros(len(data)), 'k*')
- pyplot.title('Approximation of posterior over mu')
+ pyplot.title('Standard SVI result')
pyplot.ylabel('probability density');
pyplot.show()
+
if __name__ == '__main__':
boosting_bbvi()
\ No newline at end of file
diff --git a/boosting_bbvi_tutorial.ipynb b/boosting_bbvi_tutorial.ipynb
new file mode 100644
index 0000000..48c2092
--- /dev/null
+++ b/boosting_bbvi_tutorial.ipynb
@@ -0,0 +1,613 @@
+{
+ "cells": [
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "# Boosting Black Box Variational Inference\n",
+ "## Introduction\n",
+ "This tutorial demonstrates how to implement boosting black box Variational Inference [1] in Pyro. In boosting Variational Inference [2], we approximate a target distribution with an iteratively selected mixture of densities. In cases where a single denisity provided by regular Variational Inference doesn't adequately approximate a target density, boosting VI thus offers a simple way of getting more complex approximations. We show how this can be implemented as a relatively straightforward extension of Pyro's SVI.\n",
+ "\n",
+ "## Contents\n",
+ "* [Theoretical Background](#theoretical-background)\n",
+ " - [Variational Inference](#variational-inference)\n",
+ " - [Boosting Black Box Variational Inference](#bbbvi)\n",
+ "* [BBBVI in Pyro](#bbbvi-pyro)\n",
+ " - [The Model](#the-model)\n",
+ " - [The Guide](#the-guide)\n",
+ " - [The Relbo](#the-relbo)\n",
+ " - [The Approximation](#the-approximation)\n",
+ " - [The Greedy Algorithm](#the-greedy-algorithm)"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## Theoretical Background \n",
+ "\n",
+ "### Variational Inference \n",
+ "For an introduction to regular Variational Inference, we recommend having a look at [the tutorial on SVI in Pyro](https://pyro.ai/examples/svi_part_i.html) and this excellent review [3].\n",
+ "\n",
+ "Briefly, Variational Inference allows us to find approximations of probability densities which are intractable to compute analytically. For instance, one might have observed variables $\\textbf{x}$, latent variables $\\textbf{z}$ and a joint distribution $p(\\textbf{x}, \\textbf{z})$. One can then use Variational Inference to approximate $p(\\textbf{z}|\\textbf{x})$. To do so, one first chooses a set of tractable densities, a variational family, and then tries to find the element of this set which most closely approximates the target distribution $p(\\textbf{z}|\\textbf{x})$.\n",
+ "This approximating density is found by maximizing the Evidence Lower BOund (ELBO):\n",
+ "$$ \\mathbb{E}_q[\\log p(\\mathbf{x}, \\mathbf{z})] - \\mathbb{E}_q[\\log q(\\mathbf{z})]$$\n",
+ "\n",
+ "where $s(\\mathbf{z})$ is the approximating density.\n",
+ "\n",
+ "### Boosting Black Box Variational Inference \n",
+ "\n",
+ "In boosting black box Variational inference (BBBVI), we approximate the target density with a mixture of densities from the variational family:\n",
+ "$$q^t(\\mathbf{z}) = \\sum_{i=1}^t \\gamma_i s_i(\\mathbf{z})$$\n",
+ "\n",
+ "$$\\text{where} \\sum_{i=1}^t \\gamma_i =1$$\n",
+ "\n",
+ "and $s_t(\\mathbf{z})$ are elements of the variational family.\n",
+ "\n",
+ "The components of the approximation are selected greedily by maximising the so-called Residual ELBO (RELBO) with respect to the next component $s_{t+1}(\\mathbf{z})$:\n",
+ "\n",
+ "$$\\mathbb{E}_s[\\log p(\\mathbf{x},\\mathbf{z})] - \\lambda \\mathbb{E}_s[\\log s(\\mathbf{z})] - \\mathbb{E}_s[\\log q^t(\\mathbf{z})]$$\n",
+ "\n",
+ "Where the first two terms are the same as in the ELBO and the last term is the cross entropy between the next component $s_{t+1}(\\mathbf{z})$ and the current approximation $q^t(\\mathbf{z})$.\n",
+ "\n",
+ "It's called *black box* Variational Inference because this optimization does not have to be tailored to the variational family which is being used. By setting $\\lambda$ (the regularization factor of the entropy term) to 1, standard SVI methods can be used to compute $\\mathbb{E}_s[\\log p(\\mathbf{x}, \\mathbf{z})] - \\lambda \\mathbb{E}_s[\\log s(\\mathbf{z})]$. See the explanation of [the section on the implementation of the RELBO](#the-relbo) below for an explanation of how we compute the term $- \\mathbb{E}_s[\\log q^t(\\mathbf{z})]$. Imporantly, we do not need to make any additional assumptions about the variational family that's being used to ensure that this algorithm converges. \n",
+ "\n",
+ "In [1], a number of different ways of finding the mixture weights $\\gamma_t$ are suggested, ranging from fixed step sizes based on the iteration to solving the optimisation problem of finding $\\gamma_t$ that will minimise the RELBO. Here, we used the fixed step size method.\n",
+ "For more details on the theory behind boosting black box variational inference, please refer to [1]."
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## BBBVI in Pyro "
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "To implement boosting black box variational inference in Pyro, we need to consider the following points:\n",
+ "1. The approximation components $s_{t}(\\mathbf{z})$ (guides).\n",
+ "2. The RELBO.\n",
+ "3. The approximation itself $q^t(\\mathbf{z})$.\n",
+ "4. Using Pyro's SVI to find new components of the approximation.\n",
+ "\n",
+ "We will illustrate these points by looking at simple example: approximating a bimodal posterior.\n"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 26,
+ "metadata": {
+ "collapsed": true
+ },
+ "outputs": [],
+ "source": [
+ "import os\n",
+ "from collections import defaultdict\n",
+ "from functools import partial\n",
+ "\n",
+ "import numpy as np\n",
+ "import pyro\n",
+ "import pyro.distributions as dist\n",
+ "import scipy.stats\n",
+ "import torch\n",
+ "import torch.distributions.constraints as constraints\n",
+ "from matplotlib import pyplot\n",
+ "from pyro.infer import SVI, Trace_ELBO\n",
+ "from pyro.optim import Adam\n",
+ "from pyro.poutine import block, replay, trace\n"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "### The Model \n",
+ "\n",
+ "Boosting BBVI is particularly useful when we want to approximate mulitmodal distributions. In this tutorial, we'll thus consider the following model:\n",
+ " \n",
+ " $$\\mathbf{z} \\sim \\mathcal{N}(0,5)$$\n",
+ " $$\\mathbf{x} \\sim \\mathcal{N}(\\mathbf{z}^2, 0.1)$$\n",
+ " \n",
+ "Given the set of iid. observations $\\text{data} ~ \\mathcal{N}(4, 0.1)$, we thus expect $p(\\mathbf{z}|\\mathbf{x})$ to be a bimodal distributions with modes around $-2$ and $2$.\n",
+ " \n",
+ "In Pyro, this model takes the following shape:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 23,
+ "metadata": {
+ "collapsed": true
+ },
+ "outputs": [],
+ "source": [
+ "def model(data):\n",
+ " prior_loc = torch.tensor([0.])\n",
+ " prior_scale = torch.tensor([5.])\n",
+ " z = pyro.sample('z', dist.Normal(prior_loc, prior_scale))\n",
+ " scale = torch.tensor([0.1])\n",
+ "\n",
+ " with pyro.plate('data', len(data)):\n",
+ " pyro.sample('x', dist.Normal(z*z, scale), obs=data)"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "### The Guide \n",
+ "\n",
+ "Next, we specify the guide which in our case will make up the components of our mixture. Recall that in Pyro the guide needs to take the same arguments as the model which is why our guide function also takes the data as an input. \n",
+ "\n",
+ "We also need to make sure that every `pyro.sample()` statement from the model has a matching `pyro.sample()` statement in the guide. In our case, we include `loc` in both the model and the guide.\n",
+ "\n",
+ "In contrast to regular SVI, our guide takes an additional argument: `index`. Having this argument allows us to easily create new guides in each iteration of the greedy algorithm. Specifically, we make use of `partial()` from the [functools library](https://docs.python.org/3.7/library/functools.html) to create guides which only take `data` as an argument. The statement `partial(guide, index=t)` creates a guide that will take only `data` as an input and which has trainable parameters `scale_t` and `loc_t`.\n",
+ "\n",
+ "Choosing our variational distribution to be a Normal distribution parameterized by $loc_t$ and $scale_t$ we get the following guide:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 6,
+ "metadata": {
+ "collapsed": true
+ },
+ "outputs": [],
+ "source": [
+ "def guide(data, index):\n",
+ " scale_q = pyro.param('scale_{}'.format(index), torch.tensor([1.0]), constraints.positive)\n",
+ " loc_q = pyro.param('loc_{}'.format(index), torch.tensor([0.0]))\n",
+ " pyro.sample(\"z\", dist.Normal(loc_q, scale_q))"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "### The RELBO \n",
+ "\n",
+ "We implement the RELBO as a function which can be passed to Pyro's SVI class in place of ELBO to find the approximation components $s_t(z)$. Recall that the RELBO has the following form:\n",
+ "$$\\mathbb{E}_s[\\log p(\\mathbf{x},\\mathbf{z})] - \\lambda \\mathbb{E}_s[\\log s(\\mathbf{z})] - \\mathbb{E}_s[\\log q^t(\\mathbf{z})]$$\n",
+ "\n",
+ "Conveniently, this is very similar to the regular ELBO which allows us to reuse Pyro's existing ELBO. Specifically, we compute \n",
+ "$$\\mathbb{E}_s[\\log p(x,z)] - \\lambda \\mathbb{E}_s[\\log s]$$\n",
+ "using Pyro's `Trace_ELBO` and then compute \n",
+ "$$ - \\mathbb{E}_s[\\log q^t]$$\n",
+ "using Poutine. For more information on how this works, we recommend going through the Pyro tutorials [on Poutine](https://pyro.ai/examples/effect_handlers.html) and [custom SVI objectives](https://pyro.ai/examples/custom_objectives.html)."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 19,
+ "metadata": {
+ "collapsed": true
+ },
+ "outputs": [],
+ "source": [
+ "def relbo(model, guide, *args, **kwargs):\n",
+ "\n",
+ " approximation = kwargs.pop('approximation', None)\n",
+ " # Run the guide with the arguments passed to SVI.step() and trace the execution,\n",
+ " # i.e. record all the calls to Pyro primitives like sample() and param().\n",
+ " guide_trace = trace(guide).get_trace(*args, **kwargs)\n",
+ "\n",
+ " # We do not want to update parameters of previously fitted components and thus block all\n",
+ " # parameters in the approximation apart from z.\n",
+ " replayed_approximation = trace(replay(block(approximation, expose=['z']), guide_trace))\n",
+ " approximation_trace = replayed_approximation.get_trace(*args, **kwargs)\n",
+ "\n",
+ " loss_fn = pyro.infer.Trace_ELBO(max_plate_nesting=1).differentiable_loss(model,\n",
+ " guide,\n",
+ " *args,\n",
+ " **kwargs)\n",
+ "\n",
+ " relbo = -loss_fn - approximation_trace.log_prob_sum()\n",
+ " \n",
+ " # By convention, the negative (R)ELBO is returned.\n",
+ " return -relbo"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "### The Approximation "
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "Our implementation of the approximation $q^t(z) = \\sum_{i=1}^t \\gamma_i s_i(z)$ consists of a list of components, i.e. the guides from the greedy selection steps, and a list containing the mixture weights of the components. To sample from the approximation, we thus first sample a component according to the mixture weights. In a second step, we draw a sample from the corresponding component.\n",
+ "\n",
+ "Similarly as with the guide, we use `partial(approximation, components=components, weights=weights)` to get an approximation function which has the same signature as the model."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 169,
+ "metadata": {
+ "collapsed": true
+ },
+ "outputs": [],
+ "source": [
+ "def approximation(data, components, weights):\n",
+ " assignment = pyro.sample('assignment', dist.Categorical(weights))\n",
+ " result = components[assignment](data)\n",
+ " return result "
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "### The Greedy Algorithm "
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "We now have all the necessary parts to implement the greedy algorithm. First, we initialize the approximation:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 88,
+ "metadata": {
+ "collapsed": true
+ },
+ "outputs": [],
+ "source": [
+ "initial_approximation = partial(guide, index=0)\n",
+ "components = [initial_approximation]\n",
+ "weights = torch.tensor([1.])\n",
+ "wrapped_approximation = partial(approximation, components=components, weights=weights)"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "Then we iteratively find the $T$ components of the approximation by maximizing the RELBO at every step:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ ". . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . Parameters of component 1:\n",
+ "loc = -1.9934829473495483\n",
+ "scale = 0.020978907123208046\n",
+ ". . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . "
+ ]
+ }
+ ],
+ "source": [
+ "# clear the param store in case we're in a REPL\n",
+ "pyro.clear_param_store()\n",
+ "\n",
+ "# Sample observations from a Normal distribution with loc 4 and scale 0.1\n",
+ "n = torch.distributions.Normal(torch.tensor([4.0]), torch.tensor([0.1]))\n",
+ "data = n.sample((100,))\n",
+ "\n",
+ "#T=2\n",
+ "n_steps = 2 if smoke_test else 12000\n",
+ "pyro.set_rng_seed(2)\n",
+ "n_iterations = 2\n",
+ "locs = [0]\n",
+ "scales = [0]\n",
+ "for t in range(1, n_iterations + 1):\n",
+ "\n",
+ " # Create guide that only takes data as argument\n",
+ " wrapped_guide = partial(guide, index=t)\n",
+ " losses = []\n",
+ "\n",
+ " adam_params = {\"lr\": 0.01, \"betas\": (0.90, 0.999)}\n",
+ " optimizer = Adam(adam_params)\n",
+ "\n",
+ " # Pass our custom RELBO to SVI as the loss function.\n",
+ " svi = SVI(model, wrapped_guide, optimizer, loss=relbo)\n",
+ " for step in range(n_steps):\n",
+ " # Pass the existing approximation to SVI.\n",
+ " loss = svi.step(data, approximation=wrapped_approximation)\n",
+ " losses.append(loss)\n",
+ "\n",
+ " if step % 100 == 0:\n",
+ " print('.', end=' ')\n",
+ "\n",
+ " # Update the list of approximation components.\n",
+ " components.append(wrapped_guide)\n",
+ "\n",
+ " # Set new mixture weight.\n",
+ " new_weight = 2 / (t + 1)\n",
+ "\n",
+ " # In this specific case, we set the mixture weight of the second component to 0.5.\n",
+ " if t == 2:\n",
+ " new_weight = 0.5\n",
+ " weights = weights * (1-new_weight)\n",
+ " weights = torch.cat((weights, torch.tensor([new_weight])))\n",
+ "\n",
+ " # Update the approximation\n",
+ " wrapped_approximation = partial(approximation, components=components, weights=weights)\n",
+ "\n",
+ " print('Parameters of component {}:'.format(t))\n",
+ " scale = pyro.param(\"scale_{}\".format(t)).item()\n",
+ " scales.append(scale)\n",
+ " loc = pyro.param(\"loc_{}\".format(t)).item()\n",
+ " locs.append(loc)\n",
+ " print('loc = {}'.format(loc))\n",
+ " print('scale = {}'.format(scale))"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "collapsed": true
+ },
+ "outputs": [],
+ "source": [
+ "# Plot the resulting approximation\n",
+ "X = np.arange(-10, 10, 0.1)\n",
+ "pyplot.figure(figsize=(10, 4), dpi=100).set_facecolor('white')\n",
+ "total_approximation = np.zeros(X.shape)\n",
+ "for i in range(1, n_iterations + 1):\n",
+ " Y = weights[i].item() * scipy.stats.norm.pdf((X - locs[i]) / scales[i])\n",
+ " pyplot.plot(X, Y)\n",
+ " total_approximation += Y\n",
+ "pyplot.plot(X, total_approximation)\n",
+ "pyplot.plot(data.data.numpy(), np.zeros(len(data)), 'k*')\n",
+ "pyplot.title('Approximation of posterior over z')\n",
+ "pyplot.ylabel('probability density')\n",
+ "pyplot.show()"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "We see that boosting BBVI successfully approximates the bimodal posterior distributions with modes around -2 and +2."
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## The Complete Implementation"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "Putting all the components together, we then get the complete implementation of boosting black box Variational Inference:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 74,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ ". . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . Parameters of component 1:\n",
+ "loc = -1.9950288534164429\n",
+ "scale = 0.038874927908182144\n",
+ ". . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . Parameters of component 2:\n",
+ "loc = 2.009120225906372\n",
+ "scale = 0.01808810420334339\n"
+ ]
+ },
+ {
+ "data": {
+ "image/png": "iVBORw0KGgoAAAANSUhEUgAAA18AAAFuCAYAAAB+/6JIAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAAPYQAAD2EBqD+naQAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy8QZhcZAAAgAElEQVR4nOzdeVxUZfs/8M+ZGQaQTRRlEwHNTEMTwRTN0Eel3C33BXfNJTc005f1uOQTbhmVSY+5lLlRapaPpmKK6U8tw6Vc8mtFYgrugokCM3P//oA5zGFYZhAcdD7v12tezZy555x7hpPMxXXd15GEEAJERERERERUoVS2ngAREREREZE9YPBFRERERET0CDD4IiIiIiIiegQYfBERERERET0CDL6IiIiIiIgeAQZfREREREREjwCDLyIiIiIiokeAwRcREREREdEjwOCLiIiIiIjoEWDwRURUig8//BCSJCEkJMTWU6kwQUFBGDp0qE2OvWHDBsTFxRX5nCRJmDNnzqOdkJW+//57hIeHw8XFBZIkYdu2bbaekkJJn295sOW5Q0T0uJGEEMLWkyAiqsyaNGmCU6dOAQCOHj2K5s2b23hG5e/EiRNwd3dH3bp1H/mxu3TpgtOnT+Ovv/4ye+7o0aOoVasWatWq9cjnZQkhBLy8vPD0009j/vz5cHFxQf369eHp6WnrqclK+nzLgy3PHSKix43G1hMgIqrMfv75Z5w6dQqdO3fGjh07sGrVKpsGX/fv34ezs3O57zc0NLTc91keWrRoYesplOjKlSu4desWXnnlFbRr187W03mkjOdieZ47er0eOp0Ojo6O5bbPipKVlYUqVarYehpE9Jhh2SERUQlWrVoFAFiwYAFatmyJTZs2ISsrSzHmr7/+giRJWLRoEf7zn/+gdu3acHJyQnh4OL7//nvF2Dlz5kCSJJw4cQKvvvoq3N3d4eHhgUGDBuH69euKsUFBQejSpQu2bt2K0NBQODk5Ye7cuQCABw8eYObMmQgODoZWq4W/vz/Gjx+PO3fuyK8/dOgQHBwcMG3aNMV+P/vsM0iSJL8347FMS8eSkpIgSRI2bNiAN998E76+vnB1dUXXrl1x9epV3L17F6NHj4aXlxe8vLwwbNgw/PPPP4rjfPzxx3jxxRdRs2ZNuLi4oFGjRli0aBFyc3PlMW3atMGOHTtw8eJFSJIk34yKKjs8ffo0unfvDk9PTzg5OaFJkyb4/PPPFWOM89+4cSNmzZoFPz8/uLu7o3379jh//jwscejQIbRr1w5ubm6oUqUKWrZsiR07dsjPz5kzR87Ivfnmm5AkCUFBQcXuzzindevWISYmBj4+PnB2dkZkZCROnDhhNv7bb79FREQEqlSpAjc3N3To0AFHjhxRjLl+/TpGjx6NgIAAODo6okaNGmjVqhX27t1r0eebk5OD+fPn45lnnpFfP2zYMKvOxaLKDlNTUzFo0CDUrFkTjo6OaNCgAd577z0YDAZ5jOn/N/Pnz0dwcDAcHR2xf//+Yj9DS877Hj16IDAwUHEso+bNm6Np06byYyEEli9fjiZNmsDZ2Rmenp7o1asX/vzzT8Xr2rRpg5CQEPzwww9o2bIlqlSpguHDhxc5R+P7Ku5GRHZOEBFRkbKysoSHh4do1qyZEEKIlStXCgDis88+U4xLSUkRAERAQIB44YUXxJYtW8RXX30lmjVrJhwcHMThw4flsbNnzxYARGBgoHjjjTfE7t27xdKlS4WLi4sIDQ0VOTk58tjAwEDh6+sr6tSpI1avXi32798vfvrpJ2EwGMRLL70kNBqNePvtt8WePXvEkiVL5H08ePBA3seCBQsEAPHNN98IIYQ4ffq0qFKlihg0aJDiPQQGBoohQ4bIj/fv3y/Pc+jQoWLXrl3ik08+Ea6urqJt27aiQ4cOYtq0aWLPnj1i4cKFQq1WiwkTJij2OWXKFBEfHy927dol9u3bJ95//33h5eUlhg0bJo85c+aMaNWqlfDx8RFHjhyRb0YAxOzZs+XHv/32m3BzcxN169YVa9euFTt27BD9+/cXAMTChQvN5h8UFCQGDhwoduzYITZu3Chq164t6tWrJ3Q6XYk/+6SkJOHg4CDCwsJEQkKC2LZtm4iKihKSJIlNmzYJIYS4dOmS2Lp1qwAgJkyYII4cOSKOHz9e7D6NcwoICBDdu3cX27dvF+vWrRNPPfWUcHd3F3/88Yc8dv369QKAiIqKEtu2bRMJCQkiLCxMaLVacfDgQXncSy+9JGrUqCFWrFghkpKSxLZt28S///1veY4lfb56vV68/PLLwsXFRcydO1ckJiaKlStXCn9/f9GwYUORlZWlOD+KOheNz5meO9euXRP+/v6iRo0a4pNPPhG7du0Sr7/+ugAgxo4dK48z/n/j7+8v2rZtKzZv3iz27NkjUlJSivz8LD3vv/nmGwFAJCYmKl5/7tw5AUB8+OGH8rZRo0YJBwcHMXXqVLFr1y6xYcMG8cwzzwhvb2+Rnp4uj4uMjBTVqlUTAQEB4qOPPhL79+8XBw4cKHKeDx48UHzWR44cEd9++61wd3cXDRo0KPI1RGQ/GHwRERVj7dq1AoD45JNPhBBC3L17V7i6uorWrVsrxhm/RPr5+Yn79+/L2zMzM0W1atVE+/bt5W3G4GvKlCmKfRi/bK9bt07eFhgYKNRqtTh//rxi7K5duwQAsWjRIsX2hIQEAUCsWLFC3mYwGESnTp1E1apVxenTp0XDhg3FM888I/755x/Fa4sLvrp27aoYN3nyZAFATJw4UbG9R48eolq1aqI4er1e5ObmirVr1wq1Wi1u3bolP9e5c2cRGBhY5OsKB1/9+vUTjo6OIjU1VTGuY8eOokqVKuLOnTuK+Xfq1Ekx7ssvvxQAFAFeUVq0aCFq1qwp7t69K2/T6XQiJCRE1KpVSxgMBiFEwc9+8eLFJe7PdE5NmzaVXy+EEH/99ZdwcHAQI0eOFELkfVZ+fn6iUaNGQq/Xy+Pu3r0ratasKVq2bClvc3V1FZMnTy7xuMV9vhs3bhQAxJYtWxTbjx07JgCI5cuXy9uKOxeNz5meOzNmzBAAxI8//qgYN3bsWCFJkrwP42dXt25dxR8dimPpeZ+bmyu8vb3FgAEDFOOmT58utFqtuHHjhhBCiCNHjggA4r333lOMu3TpknB2dhbTp0+Xt0VGRgoA4vvvvy91noXdu3dPPP/888LX11f89ddfVr+eiJ4sLDskIirGqlWr4OzsjH79+gEAXF1d0bt3bxw8eBAXLlwwG//qq6/CyclJfuzm5oauXbvihx9+gF6vV4wdOHCg4nGfPn2g0WjMSq4aN26Mp59+WrFt3759AGBW6tW7d2+4uLgoSh0lScLatWvh5uaG8PBwpKSk4Msvv4SLi4tFn0GXLl0Ujxs0aAAA6Ny5s9n2W7duKUoPT5w4gW7duqF69epQq9VwcHDA4MGDodfr8X//938WHb+wffv2oV27dggICFBsHzp0KLKysszK8rp166Z43LhxYwDAxYsXiz3GvXv38OOPP6JXr15wdXWVt6vVakRHR+Pvv/+2uHSxKAMGDFCUnwUGBqJly5byz/78+fO4cuUKoqOjoVIV/Jp2dXVFz549cfToUbn09fnnn8dnn32G+fPn4+jRo4qSztL873//Q9WqVdG1a1fodDr51qRJE/j4+CApKUkxvqhzsSj79u1Dw4YN8fzzzyu2Dx06FEII+fw16tatGxwcHCzar3E/pgqf9xqNBoMGDcLWrVuRkZEBIG8t2RdffIHu3bujevXq8vuXJAmDBg1SvH8fHx8899xzZu/f09MT//rXv0qdpym9Xo++ffvi3Llz2LlzJwIDA616PRE9eRh8EREV4ffff8cPP/yAzp07QwiBO3fu4M6dO+jVqxcAYPXq1Wav8fHxKXJbTk6O2XqowmM1Gg2qV6+OmzdvKrb7+vqa7fPmzZvQaDSoUaOGYrskSfDx8THbR/Xq1dGtWzc8ePAAL7/8Mho1alTCO1eqVq2a4rFWqy1x+4MHDwDkrflp3bo1Ll++jA8++AAHDx7EsWPH8PHHHwPIa9ZQFjdv3izyM/Hz85OfN2X8om1kbORQ0vFv374NIYRVx7FGceeJcZ/G/xZ3fIPBgNu3bwMAEhISMGTIEKxcuRIRERGoVq0aBg8ejPT09FLncfXqVdy5cwdarRYODg6KW3p6Om7cuKEYX9R8imLtz8ia/Vp63g8fPhwPHjzApk2bAAC7d+9GWloahg0bJo+5evUqhBDw9vY2e/9Hjx4t8/s3NWbMGOzatQubN29GkyZNrH49ET152O2QiKgIq1evhhACmzdvxubNm82e//zzzzF//nyo1Wp5W1FfeNPT06HVahUZFON2f39/+bFOp8PNmzfNgoWiFuhXr14dOp0O169fV3wRFUIgPT0dzZo1U4xPTExEfHw8nn/+eXz99dfYsmULevbsWcon8HC2bduGe/fuYevWrYq/9p88efKh9lu9enWkpaWZbb9y5QoAwMvL66H2D+RlOFQqVYUdp7jzxPizN/63uOOrVCq5lb2Xlxfi4uIQFxeH1NRUfPvtt5gxYwauXbuGXbt2lTgPLy8vVK9evdhxbm5uiseWNouw9mdkzX4tPe+Nmbc1a9bgtddew5o1a+Dn54eoqCh5jJeXFyRJwsGDB4vsrlh4m7XNMubMmYOVK1dizZo1iuMSkX1j5ouIqBC9Xo/PP/8cdevWxf79+81uU6dORVpaGr777jvF67Zu3SpnfgDg7t272L59O1q3bq0I0gBg/fr1isdffvkldDod2rRpU+r8jC3N161bp9i+ZcsW3Lt3T9HyPC0tDYMGDUJkZCQOHz6Mbt26YcSIEUhJSbHosygr4xdV0y+wQgh8+umnZmMdHR0tzoS1a9cO+/btk7/IG61duxZVqlQpl9b0Li4uaN68ObZu3aqYl8FgwLp161CrVi2Lyu+Ks3HjRgiTS2xevHgRhw8fln/29evXh7+/PzZs2KAYd+/ePWzZskXugFhY7dq18frrr6NDhw44fvy4vL24z7dLly64efMm9Ho9wsPDzW7169cv0/tr164dzp49q5gDkPczkiQJbdu2LfN+AcvOewAYNmwYfvzxRxw6dAjbt2/HkCFDFP8fdunSBUIIXL58ucj3b02GuLBVq1Zh7ty5mDdvHi9ATUQKzHwRERXy3Xff4cqVK1i4cGGRwVBISAiWLVuGVatWKdZEqdVqdOjQATExMTAYDFi4cCEyMzPlltymtm7dCo1Ggw4dOuDMmTN4++238dxzz6FPnz6lzq9Dhw546aWX8OabbyIzMxOtWrXCL7/8gtmzZyM0NBTR0dEA8oLI/v37yy3j1Wo1PvvsMzRp0gR9+/bFoUOH5HLB8tahQwdotVr0798f06dPx4MHDxAfHy+Xy5lq1KgRtm7divj4eISFhUGlUiE8PLzI/c6ePRv/+9//0LZtW/z73/9GtWrVsH79euzYsQOLFi2Ch4dHucw/NjYWHTp0QNu2bTFt2jRotVosX74cp0+fxsaNGx+qZfi1a9fwyiuvYNSoUcjIyMDs2bPh5OSEmTNnAgBUKhUWLVqEgQMHokuXLnjttdeQnZ2NxYsX486dO1iwYAEAICMjA23btsWAAQPwzDPPwM3NDceOHcOuXbvw6quvyscr7vPt168f1q9fj06dOmHSpEl4/vnn4eDggL///hv79+9H9+7d8corr1j9/qZMmYK1a9eic+fOmDdvHgIDA7Fjxw4sX74cY8eOLXPgaul5b9S/f3/ExMSgf//+yM7ONguCWrVqhdGjR2PYsGH4+eef8eKLL8LFxQVpaWk4dOgQGjVqhLFjx1o9zyNHjmDMmDFo1aoVOnTogKNHjyqer+zXriOiCmajRh9ERJVWjx49hFarFdeuXSt2TL9+/YRGoxHp6ely17aFCxeKuXPnilq1agmtVitCQ0PF7t27Fa8zdjtMTk4WXbt2Fa6ursLNzU30799fXL16VTE2MDBQdO7cucjj379/X7z55psiMDBQODg4CF9fXzF27Fhx+/ZtecysWbOESqUy69B2+PBhodFoxKRJkxTHKqrb4VdffaV47Zo1awQAcezYsSLf1/Xr1+Vt27dvF88995xwcnIS/v7+4o033hDfffedACD2798vj7t165bo1auXqFq1qpAkSZj+akKhbodCCPHrr7+Krl27Cg8PD6HVasVzzz0n1qxZoxhT3PyNP6vC44ty8OBB8a9//Uu4uLgIZ2dn0aJFC7F9+/Yi92dNt8MvvvhCTJw4UdSoUUM4OjqK1q1bi59//tls/LZt20Tz5s2Fk5OTcHFxEe3atRP/7//9P/n5Bw8eiDFjxojGjRsLd3d34ezsLOrXry9mz54t7t27J48r6fPNzc0VS5YskX9Orq6u4plnnhGvvfaauHDhgjyupHOx8LkjhBAXL14UAwYMENWrVxcODg6ifv36YvHixYrujdZ8dkaWnPemBgwYIACIVq1aFbvP1atXi+bNm8s/57p164rBgwcrfiaRkZHi2WeftWiOxv9HirsRkX2ThDCpaSAiIqv99ddfCA4OxuLFi80uaFzYnDlzMHfuXFy/fr1c1ifR4yMpKQlt27bFV199JTduISIi+8I1X0RERERERI8Agy8iIiIiIqJHgGWHREREREREjwAzX0RERERERI8Agy8iIiIiIqJHgMEXERERERHRI8CLLJeRwWDAlStX4Obm9lAX2yQiIiIiosebEAJ3796Fn58fVKri81sMvsroypUrCAgIsPU0iIiIiIiokrh06RJq1apV7PMMvsrIzc0NQN4H7O7ubuPZEBERERGRrWRmZiIgIECOEYrD4KuMjKWG7u7uDL6IiIiIiKjU5UhsuEFERERERPQIMPgiIiIiIiJ6BBh8ERERERERPQIMvoiIiIiIiB6BShF8LV++HMHBwXByckJYWBgOHjxY7NhPP/0UrVu3hqenJzw9PdG+fXv89NNPijFCCMyZMwd+fn5wdnZGmzZtcObMGcWY27dvIzo6Gh4eHvDw8EB0dDTu3LlTIe+PiIiIiIjI5sFXQkICJk+ejFmzZuHEiRNo3bo1OnbsiNTU1CLHJyUloX///ti/fz+OHDmC2rVrIyoqCpcvX5bHLFq0CEuXLsWyZctw7Ngx+Pj4oEOHDrh79648ZsCAATh58iR27dqFXbt24eTJk4iOjq7w90tERERERPZJEkIIW06gefPmaNq0KeLj4+VtDRo0QI8ePRAbG1vq6/V6PTw9PbFs2TIMHjwYQgj4+flh8uTJePPNNwEA2dnZ8Pb2xsKFC/Haa6/h3LlzaNiwIY4ePYrmzZsDAI4ePYqIiAj89ttvqF+/fqnHzczMhIeHBzIyMthqnoiIiIjIjlkaG9g085WTk4Pk5GRERUUptkdFReHw4cMW7SMrKwu5ubmoVq0aACAlJQXp6emKfTo6OiIyMlLe55EjR+Dh4SEHXgDQokULeHh4FHvc7OxsZGZmKm5ERERERESWsmnwdePGDej1enh7eyu2e3t7Iz093aJ9zJgxA/7+/mjfvj0AyK8raZ/p6emoWbOm2b5q1qxZ7HFjY2Pl9WEeHh4ICAiwaH5ERERERERAJVjzBZhfCVoIUerVoYG8tV0bN27E1q1b4eTkZNU+i9p/ScedOXMmMjIy5NulS5dKnR8R0ZPGYDDYegpERESPLZsGX15eXlCr1WbZpmvXrpllrgpbsmQJ3n33XezZsweNGzeWt/v4+ABAifv08fHB1atXzfZ5/fr1Yo/r6OgId3d3xY2IyJ7EfPcxnvssAtvPHbP1VIiIiB5LNg2+tFotwsLCkJiYqNiemJiIli1bFvu6xYsX45133sGuXbsQHh6ueC44OBg+Pj6Kfebk5ODAgQPyPiMiIpCRkaFoUf/jjz8iIyOjxOMSEdmzn6/9CKizsP+vn0ofTERERGY0tp5ATEwMoqOjER4ejoiICKxYsQKpqakYM2YMAGDw4MHw9/eXOx8uWrQIb7/9NjZs2ICgoCA5w+Xq6gpXV1dIkoTJkyfj3XffRb169VCvXj28++67qFKlCgYMGAAgr5viyy+/jFGjRuG///0vAGD06NHo0qWLRZ0OiYjskRAGQAJ0LD0kIiIqE5sHX3379sXNmzcxb948pKWlISQkBDt37kRgYCAAIDU1FSpVQYJu+fLlyMnJQa9evRT7mT17NubMmQMAmD59Ou7fv49x48bh9u3baN68Ofbs2QM3Nzd5/Pr16zFx4kS5K2K3bt2wbNmyCn63RESPLwPygi690Nt4JkRERI8nm1/n63HF63wRkb1puaYv7qrOolW1ofik61RbT4eIiKjSeCyu80VERI8POfPFskMiIqIyYfBFREQWEXLZoc7GMyEiIno8MfgiIiKLiPy1Xsx8ERERlQ2DLyIisoiBmS8iIqKHwuCLiIgsYiw7NLDbIRERUZkw+CIiIosYgy8dgy8iIqIyYfBFREQWEcKY+eKaLyIiorJg8EVERBYyNtxg5ouIiKgsGHwREZFFuOaLiIjo4TD4IiIiixRc54vBFxERUVkw+CIiIosYgy/jf4mIiMg6DL6IiMhC+ZkvrvkiIiIqEwZfRERkESHlr/li5ouIiKhMGHwREZGF2HCDiIjoYTD4IiIiC/E6X0RERA+DwRcREVmImS8iIqKHweCLiIgsIiQBgGu+iIiIyorBFxERWSgv4yWY+SIiIioTBl9ERGQZZr6IiIgeCoMvIiKyUP5Flpn5IiIiKhMGX0REVCqDwQCJmS8iIqKHwuCLiIhKlaPXyfcFgy8iIqIyYfBFRESlyjYNvlh2SEREVCY2D76WL1+O4OBgODk5ISwsDAcPHix27JkzZ9CzZ08EBQVBkiTExcWZjTE+V/g2fvx4eUybNm3Mnu/Xr1+FvD8ioieBzlAQcLHskIiIqGxsGnwlJCRg8uTJmDVrFk6cOIHWrVujY8eOSE1NLXJ8VlYW6tSpgwULFsDHx6fIMceOHUNaWpp8S0xMBAD07t1bMW7UqFGKcf/973/L980RET1BsnW58n2WHRIREZWNxpYHX7p0KUaMGIGRI0cCAOLi4rB7927Ex8cjNjbWbHyzZs3QrFkzAMCMGTOK3GeNGjUUjxcsWIC6desiMjJSsb1KlSrFBnBERKRkmvli8EXWuHXrd6gkNap6Btt6KkRENmezzFdOTg6Sk5MRFRWl2B4VFYXDhw+X2zHWrVuH4cOHQ5IkxXPr16+Hl5cXnn32WUybNg13794tl2MSET2JcvUmwRfXfJGFcnOz8Mo3PdDr664wmKwbJCKyVzbLfN24cQN6vR7e3t6K7d7e3khPTy+XY2zbtg137tzB0KFDFdsHDhyI4OBg+Pj44PTp05g5cyZOnTollygWJTs7G9nZ2fLjzMzMcpkjEdHjIEfxxZmZL7JM1r1ruKXK++Nnbu49OKo9bDwjIiLbsmnZIQCzjJQQwmxbWa1atQodO3aEn5+fYvuoUaPk+yEhIahXrx7Cw8Nx/PhxNG3atMh9xcbGYu7cueUyLyKix02uga3myXo6XY58X6/LLmEkEZF9sFnZoZeXF9RqtVmW69q1a2bZsLK4ePEi9u7dK68nK0nTpk3h4OCACxcuFDtm5syZyMjIkG+XLl166DkSET0uFGWHDL7IQnp9QfCl0zP4IiKyWfCl1WoRFhZmVuqXmJiIli1bPvT+16xZg5o1a6Jz586ljj1z5gxyc3Ph6+tb7BhHR0e4u7srbkRE9iKXF1mmMjAYck3uc80XEZFNyw5jYmIQHR2N8PBwREREYMWKFUhNTcWYMWMAAIMHD4a/v7/c+TAnJwdnz56V71++fBknT56Eq6srnnrqKXm/BoMBa9aswZAhQ6DRKN/iH3/8gfXr16NTp07w8vLC2bNnMXXqVISGhqJVq1aP6J0TET1eTDNfkBh8kWVMs13MfBER2Tj46tu3L27evIl58+YhLS0NISEh2LlzJwIDAwEAqampUKkKknNXrlxBaGio/HjJkiVYsmQJIiMjkZSUJG/fu3cvUlNTMXz4cLNjarVafP/99/jggw/wzz//ICAgAJ07d8bs2bOhVqsr7s0SET3GlGu+2O2QLKPX55rczylhJBGRfbB5w41x48Zh3LhxRT5nGlABQFBQEIQQpe4zKiqq2HEBAQE4cOCA1fMkIrJnuQbTgKv0f4eJAEBvMGm4weCLiMh2a76IiOjxoVMEX8x8kWWUma/cEkYSEdkHBl9ERFQq04YbkJj5IsuYZruY+SIiYvBFREQW0ClazTPzRZbRG7jmi4jIFIMvIiIqVa6iTTgzX2QZnWnZoWCreSIiBl9ERFQqxZovtponC5le24uZLyIiBl9ERGQB0+BLkgwwGBiAUel07HZIRKTA4IuIiEqlLDsEdAy+yAKmHQ51BnY7JCJi8EVERKVStpoHcvVsukGlMy07NOi55ouIiMEXERGVSlco2MrhNZvIAjrTNV/MfBERMfgiIqLSFc585RiY+aLSmQZcLDskImLwRUREFtALlh2S9UzXfOmZLSUiYvBFRESlK9xwg2WHZAnTa3sZeJ0vIiIGX0REVDo23KCy0JsE7ToG7EREDL6IiKh0erPgi1kMKp1e0XCD5wwREYMvIiIqlVnmiw03yAIMvoiIlBh8ERFRqQo33MjR8Ys0lY7BFxGREoMvIiIqVeHMl47NE8gCDL6IiJQYfBERUakMhTNfXPNFFjAN0nmRZSIiBl9ERGQBncGgfMxuh2QB06C9cOkqEZE9YvBFRESl0hW+zhdLyMgCpl0yWXZIRMTgi4iILGAAM19kPdOgvXAAT0Rkjxh8ERFRqQp/ceZ1vsgSpmWHhdcNEhHZIwZfRERUKoMolPniF2mygE5RdshzhoiIwRcREZVKX6jhBjNfZAnTJhu8PAERUSUIvpYvX47g4GA4OTkhLCwMBw8eLHbsmTNn0LNnTwQFBUGSJMTFxZmNmTNnDiRJUtx8fHwUY4QQmDNnDvz8/ODs7Iw2bdrgzJkz5f7eiIieFPpCX5wLX/eLqCjK63zxnCEismnwlZCQgMmTJ2PWrFk4ceIEWrdujY4dOyI1NbXI8VlZWahTpw4WLFhgFlCZevbZZ5GWlmpsAOUAACAASURBVCbffv31V8XzixYtwtKlS7Fs2TIcO3YMPj4+6NChA+7evVuu74+I6ElReL1OLpsnkAX0Jo1auOaLiMjGwdfSpUsxYsQIjBw5Eg0aNEBcXBwCAgIQHx9f5PhmzZph8eLF6NevHxwdHYvdr0ajgY+Pj3yrUaOG/JwQAnFxcZg1axZeffVVhISE4PPPP0dWVhY2bNhQ7u+RiOhJUHiNFzNfZAnTbBfXCRIR2TD4ysnJQXJyMqKiohTbo6KicPjw4Yfa94ULF+Dn54fg4GD069cPf/75p/xcSkoK0tPTFcd1dHREZGRkicfNzs5GZmam4kZEZC/MGm6w1TxZQG9y3rDskIjIhsHXjRs3oNfr4e3trdju7e2N9PT0Mu+3efPmWLt2LXbv3o1PP/0U6enpaNmyJW7evAkA8r6tPW5sbCw8PDzkW0BAQJnnSET0uDEUarjBzBdZQsdW80RECjZvuCFJkuKxEMJsmzU6duyInj17olGjRmjfvj127NgBAPj8888f6rgzZ85ERkaGfLt06VKZ50hE9Lgxb7jBNV9UOtOMqa5Q9pSIyB5pbHVgLy8vqNVqs2zTtWvXzLJSD8PFxQWNGjXChQsXAEBu1JGeng5fX1+Lj+vo6FjiOjMioieZAbzOF1nPtNW8nucMEZHtMl9arRZhYWFITExUbE9MTETLli3L7TjZ2dk4d+6cHGgFBwfDx8dHcdycnBwcOHCgXI9LRPQkKVwyxrJDsoSOa76IiBRslvkCgJiYGERHRyM8PBwRERFYsWIFUlNTMWbMGADA4MGD4e/vj9jYWAB5QdLZs2fl+5cvX8bJkyfh6uqKp556CgAwbdo0dO3aFbVr18a1a9cwf/58ZGZmYsiQIQDyyg0nT56Md999F/Xq1UO9evXw7rvvokqVKhgwYIANPgUiosqvcMMNfpEmS5ieN3qw7JCIyKbBV9++fXHz5k3MmzcPaWlpCAkJwc6dOxEYGAgASE1NhUpVkJy7cuUKQkND5cdLlizBkiVLEBkZiaSkJADA33//jf79++PGjRuoUaMGWrRogaNHj8r7BIDp06fj/v37GDduHG7fvo3mzZtjz549cHNzezRvnIjoMWPW7ZDBF1lA0e2QZYdERJCEEMLWk3gcZWZmwsPDAxkZGXB3d7f1dIiIKlSHL15DuqHgchydfCZi4UujbDgjehzEfNEaiYY7AIBuDjXwnwH7bDwjIqKKYWlsYPWarzlz5uDixYsPNTkiInq8FG64wSwGWcK01FDPv/USEVkffG3fvh1169ZFu3btsGHDBjx48KAi5kVERJWIoXCreQZfZAGDScDFgJ2IqAzBV3JyMo4fP47GjRtjypQp8PX1xdixY3Hs2LGKmB8REVUCbLhBZaHodsjrfBERla3VfOPGjfH+++/j8uXLWL16NS5fvoxWrVqhUaNG+OCDD5CRkVHe8yQiIhsyKztk8EUWMC071LHskIjo4a7zZTAYkJOTg+zsbAghUK1aNcTHxyMgIAAJCQnlNUciIrIxUahkjCVkZAkDhMl9Zr6IiMoUfCUnJ+P111+Hr68vpkyZgtDQUJw7dw4HDhzAb7/9htmzZ2PixInlPVciIrIR45doIdQA2GqeLGOa7dKx7JCIyPrgq3HjxmjRogVSUlKwatUqXLp0CQsWLJAvcgzkXRz5+vXr5TpRIiKyHTnzlR98GZj5Igsouh2CZYdERFZfZLl3794YPnw4/P39ix1To0YNGAz8CxcR0ZPCgLxgSxIaADnMfJFF9EIAUt79wk1biIjskdWZLyEEPD09zbbfv38f8+bNK5dJERFR5SLk8rG8v9kZgzGikphmu3TMfBERWR98zZ07F//884/Z9qysLMydO7dcJkVERJWLUGS+2O2QLGMafPEiy0REZcx8SZJktv3UqVOoVq1auUyKiIgqF5G/dkdlzHyxhIwsYBpwcc0XEZEVa748PT0hSRIkScLTTz+tCMD0ej3++ecfjBkzpkImSUREtmUMviQ4AGCrebKMXnGfwRcRkcXBV1xcHIQQGD58OObOnQsPDw/5Oa1Wi6CgIERERFTIJImIyLbkzJeUX3bI4IssoCg7ZPBFRGR58DVkyBAAQHBwMFq2bAkHB4cKmxQREVUuIr/MUC2XHTL4otIpMl9c80VEZFnwlZmZCXd3dwBAaGgo7t+/j/v37xc51jiOiIieHMaGGyop7w9vXPNFlsjrcJi3TIHhOhGRhcGXp6cn0tLSULNmTVStWrXIhhvGRhx6Pf95JSJ60hjLDtUS13yR5UxDdJYdEhFZGHzt27dP7mS4f//+Cp0QERFVRsrgi2WHZAm9yd9qecYQEVkYfEVGRhZ5n4iI7IMx86WRHADBskOyjE5xn5kvIiKrr/O1a9cuHDp0SH788ccfo0mTJhgwYABu375drpMjIqJKQsoPvlT5mS/mMcgCpmcJw3UiojIEX2+88QYyMzMBAL/++itiYmLQqVMn/Pnnn4iJiSn3CRIRke0ZM18OKpYdkuUUa77Ml4sTEdkdi1vNG6WkpKBhw4YAgC1btqBr16549913cfz4cXTq1KncJ0hERJWBSdkhClrPE5VEZxJw6YofRkRkN6zOfGm1WmRlZQEA9u7di6ioKABAtWrV5IwYERE9aQqXHTL4otKx7JCISMnqzNcLL7yAmJgYtGrVCj/99BMSEhIAAP/3f/+HWrVqlfsEiYjI9oRkgARAq9ICYNkhlU4YDDCYXJqGZwwRURkyX8uWLYNGo8HmzZsRHx8Pf39/AMB3332Hl19+udwnSERElUH+mi91XvAlmMegUuj1OYrHOq75IiKyPviqXbs2/ve//+HUqVMYMWKEvP3999/Hhx9+aPUEli9fjuDgYDg5OSEsLAwHDx4sduyZM2fQs2dPBAUFQZIkxMXFmY2JjY1Fs2bN4Obmhpo1a6JHjx44f/68YkybNm0gSZLi1q9fP6vnTkRkP/KCLa2aDTfIMnp9tvKxjeZBRFSZWF12CAAGgwG///47rl27BoNB+dfPF1980eL9JCQkYPLkyVi+fDlatWqF//73v+jYsSPOnj2L2rVrm43PyspCnTp10Lt3b0yZMqXIfR44cADjx49Hs2bNoNPpMGvWLERFReHs2bNwcXGRx40aNQrz5s2THzs7O1s8byIiuyPlXaPJWHbIzBeVRq9TBl9CkmDQ66BSl+mrBxHRE8HqfwGPHj2KAQMG4OLFixBCecFESZKg11v+t62lS5dixIgRGDlyJAAgLi4Ou3fvRnx8PGJjY83GN2vWDM2aNQMAzJgxo8h97tq1S/F4zZo1qFmzJpKTkxWBYZUqVeDj42PxXImI7JXBYICUf50vJ41xzReDLyqZ3pBbxLYcBl9EZNesLjscM2YMwsPDcfr0ady6dQu3b9+Wb7du3bJ4Pzk5OUhOTpa7JRpFRUXh8OHD1k6rWBkZGQDyujGaWr9+Pby8vPDss89i2rRpuHv3bon7yc7ORmZmpuJGRGQPdCYVDlqu+SILFV7zBZhnw4iI7I3Vf366cOECNm/ejKeeeuqhDnzjxg3o9Xp4e3srtnt7eyM9Pf2h9m0khEBMTAxeeOEFhISEyNsHDhyI4OBg+Pj44PTp05g5cyZOnTqFxMTEYvcVGxuLuXPnlsu8iIgeJ7kmFQ1OGkcAgOAKHiqFTm8eaBn05tkwIiJ7YnXw1bx5c/z+++8PHXwZSZKy/ZEQwmxbWb3++uv45ZdfcOjQIcX2UaNGyfdDQkJQr149hIeH4/jx42jatGmR+5o5cyZiYmLkx5mZmQgICCiXeRIRVWY5Jl+YjQ03mPmi0hj05pdVLiogIyKyJ1YHXxMmTMDUqVORnp6ORo0awcHBQfF848aNLdqPl5cX1Gq1WZbr2rVrZtmwspgwYQK+/fZb/PDDD6Vef6xp06ZwcHDAhQsXig2+HB0d4ejo+NDzIiJ63OQYTDJfamPmi8EXlczY7dBBCOTm/1G1qFJEIiJ7YnXw1bNnTwDA8OHD5W2SJMkZK0sbbmi1WoSFhSExMRGvvPKKvD0xMRHdu3e3dloyIQQmTJiAr7/+GklJSQgODi71NWfOnEFubi58fX3LfFwioieVouzQIX/NFxtuUCl0+YGWgwB0EBCSxOCLiOye1cFXSkpKuR08JiYG0dHRCA8PR0REBFasWIHU1FSMGTMGADB48GD4+/vLnQ9zcnJw9uxZ+f7ly5dx8uRJuLq6ymWQ48ePx4YNG/DNN9/Azc1Nzqx5eHjA2dkZf/zxB9avX49OnTrBy8sLZ8+exdSpUxEaGopWrVqV23sjInpSmJYdOnPNF1nIGGip8286FN0BkYjInlgdfAUGBpbbwfv27YubN29i3rx5SEtLQ0hICHbu3CkfIzU1FSpVQUPGK1euIDQ0VH68ZMkSLFmyBJGRkUhKSgIAxMfHA8i7kLKpNWvWYOjQodBqtfj+++/xwQcf4J9//kFAQAA6d+6M2bNnQ61Wl9t7IyJ6UphmvrQarvkiyxhE3povNQC1AHQSyw6JiMp0sY0vvvgCn3zyCVJSUnDkyBEEBgYiLi4OwcHBVpcMjhs3DuPGjSvyOWNAZRQUFGR2bbHCSns+ICAABw4csGqORET2TJe/5ksIFRxUxl8bDL6oZMayQ7UA1Pl9tPRsuEFEds7q63zFx8cjJiYGnTp1wp07d+Q1XlWrVkVcXFy5T5CIiGwrx9i1TqigUeVVCDDzRaUpXHaYt828AyIRkT2xOvj66KOP8Omnn2LWrFmKMr3w8HD8+uuv5To5IiKyPZ3c7VAqyHxJDL6oZEUHXyw7JCL7ZnXwlZKSolh3ZeTo6Ih79+6Vy6SIiKjyMDbckKCGg5qZL7KMPj9oV0OCOn9FgM7A4IuI7JvVwVdwcDBOnjxptv27775Dw4YNy2VSRERUecgNN4QErdq45ovdDqlkxiyXBqaZL3Y7JCL7ZnXDjTfeeAPjx4/HgwcPIITATz/9hI0bNyI2NhYrV66siDkSEZENFZQdFqz5AkpubkRkbCuvggR1/vliYKt5IrJzVgdfw4YNg06nw/Tp05GVlYUBAwbA398fH3zwAfr161cRcyQiIhuSG25AJWe+BNd8USn0hvxW85IEtchrd6hj8EVEdq5MreZHjRqFUaNG4caNGzAYDKhZs2Z5z4uIiCoJ49odSZH5YvBFJTNmvjSQ5C8bLDskIntXpuDLyMvLq7zmQURElVSOoaDVvFbDbodkGWPwpYYENSQAAgYDW80TkX2zKPgKDQ2FJEkW7fD48eMPNSEiIqpcdPqCzBcvskyW0ukL1nyppLzgi2WHRGTvLAq+evToId9/8OABli9fjoYNGyIiIgIAcPToUZw5cwbjxo2rmFkSEZHN6ERBww3jmi9JEjAYDFCprG6aS3bCkF+uqpEkaPLXfLHskIjsnUXB1+zZs+X7I0eOxMSJE/HOO++Yjbl06VL5zo6IiGwuV5dXKmZ6nS8grxGHk0prq2lRJaczKzssKEUkIrJXVv/J8quvvsLgwYPNtg8aNAhbtmwpl0kREVHloRcFZYcF1/kCcg281hcVz9jtUCWZBl9c80VE9s3q4MvZ2RmHDh0y237o0CE4OTmVy6SIiKjyyDF2O5RUcDAJvgpa0BOZM+QH7RqooJYYfBERAWXodjh58mSMHTsWycnJaNGiBYC8NV+rV6/Gv//973KfIBER2ZZpq3lF5ovBF5VALjuUVPJ1vlh2SET2zurga8aMGahTpw4++OADbNiwAQDQoEEDfPbZZ+jTp0+5T5CIiGxLZzDtdqhc80VUHPkiy1BBLanyt7FUlYjsW5mu89WnTx8GWkREdsKY4ZKgLpT54hdpKp4cfEmSSeaLATsR2Tf2CCYiohKZNtxQqVQQ+V+kmfmikhjPG7Vkmvli2SER2TcGX0REVCK57DC/aQJE3q+OXGYxqAQFmS8V1PlfN3Q8Z4jIzjH4IiKiEhnX6ahgXO+V96sjR8cv0lQ843mjhgqa/MDdIFiqSkT2jcEXERGVqCDzpQy+dGyeQCUwLTtUScx8EREBZQi+kpKSKmAaRERUWRVkvvJ+ZUj5ZYdc80Ul0QuTskPjmi/Bc4aI7JvVwdfLL7+MunXrYv78+bh06VJFzImIiCoRnSi4yHKevBIyZr6oJMagXSOpoWGreSIiAGUIvq5cuYJJkyZh69atCA4OxksvvYQvv/wSOTk5FTE/IiKyMV2hzBfy134x80UlMQbtKkklnzt6rvkiIjtndfBVrVo1TJw4EcePH8fPP/+M+vXrY/z48fD19cXEiRNx6tQpqyexfPlyBAcHw8nJCWFhYTh48GCxY8+cOYOePXsiKCgIkiQhLi6uTPvMzs7GhAkT4OXlBRcXF3Tr1g1///231XMnInrS6eUv0XlBl5T/q4PX+aKSGIwNN1RqXmSZiCjfQzXcaNKkCWbMmIHx48fj3r17WL16NcLCwtC6dWucOXPGon0kJCRg8uTJmDVrFk6cOIHWrVujY8eOSE1NLXJ8VlYW6tSpgwULFsDHx6fM+5w8eTK+/vprbNq0CYcOHcI///yDLl26QM8vE0RECvKaL2PDDbaaJwsYg/a8ssO8c4cXWSYie1em4Cs3NxebN29Gp06dEBgYiN27d2PZsmW4evUqUlJSEBAQgN69e1u0r6VLl2LEiBEYOXIkGjRogLi4OAQEBCA+Pr7I8c2aNcPixYvRr18/ODo6lmmfGRkZWLVqFd577z20b98eoaGhWLduHX799Vfs3bu3LB8JEdETS858GRtusNshWUAnDADyyg7VqvzgCwZbTomIyOasDr4mTJgAX19fjBkzBk8//TROnDiBI0eOYOTIkXBxcUFAQAAWLFiA3377rdR95eTkIDk5GVFRUYrtUVFROHz4sLVTs3ifycnJyM3NVYzx8/NDSEhImY9LRPSkKlx2aPzVwcwXlaSg1bxabjXPskMisncaa19w9uxZfPTRR+jZsye0Wm2RY/z8/LB///5S93Xjxg3o9Xp4e3srtnt7eyM9Pd3aqVm8z/T0dGi1Wnh6elp83OzsbGRnZ8uPMzMzyzQ/IqLHjd5QkMEATDJfLNOmEujzM1+KskM23CAiO2d15mv27Nno3bu3WeCl0+nwww8/AAA0Gg0iIyMt3qckSYrHQgizbdYqyz5LGhMbGwsPDw/5FhAQ8FDzIyJ6XBivzVTQcCPvv7xgLpVEznypNFDnnzs6Bl9EZOesDr7atm2LW7dumW3PyMhA27ZtrdqXl5cX1Gq1Wbbp2rVrZpmr8tynj48PcnJycPv2bYuPO3PmTGRkZMg3XuOMiOyFWeZL4povKl1B2aFKPncMgmu+iMi+WR18FZcdunnzJlxcXKzal1arRVhYGBITExXbExMT0bJlS2unZvE+w8LC4ODgoBiTlpaG06dPF3tcR0dHuLu7K25ERPaguIYbuQy+qAR6IQDkZb4Kuh3ynCEi+2bxmq9XX30VQF4539ChQxWdBvV6PX755ZcyBUwxMTGIjo5GeHg4IiIisGLFCqSmpmLMmDEAgMGDB8Pf3x+xsbEA8hpqnD17Vr5/+fJlnDx5Eq6urnjqqacs2qeHhwdGjBiBqVOnonr16qhWrRqmTZuGRo0aoX379la/ByKiJ5lBzmDk/cpgt0OyhHHNl1qlhlqVd+6w7JCI7J3FwZeHhweAvMyXm5sbnJ2d5ee0Wi1atGiBUaNGWT2Bvn374ubNm5g3bx7S0tIQEhKCnTt3IjAwEACQmpoKlaogQXflyhWEhobKj5csWYIlS5YgMjISSUlJFu0TAN5//31oNBr06dMH9+/fR7t27fDZZ59BrVaDiIgKGEvFjP8WG9d8MYtBJTHtdmhsNW9gq3kisnMWB19r1qwBAAQFBWHatGlWlxiWZNy4cRg3blyRzxkDKqOgoCCI/FKGsu4TAJycnPDRRx/ho48+smquRET2xvRLNGBadsiGG1Q8OfMlFTTc0HPNFxHZOatbzc+ePbsi5kFERJWUaeMEALxmE1lEj4I1X8bMl47BFxHZOYuCr6ZNm+L777+Hp6cnQkNDS2zZfvz48XKbHBER2Z6h0EWWueaLLCFf50ulkdcLMvNFRPbOouCre/fucoONHj16VOiEiIiocjGYlI8BBUEYgy8qiU5eK6iGJr/hBlvNE5G9syj4Mi01ZNkhEZF9kbsdqpSt5ll2SCUxyGWHDgUBuwVrtomInmRWX+eLiIjsi75Qq/mCL9IMvqh4pmWHxsyXnt0OicjOWZT58vT0LHGdl6lbt2491ISIiKhyEfL1mvIbbnDNF1lAl5/5UklqqFUOALjmi4jIouArLi6uoudBRESVlB75DTfygy5JUgGiICNGVBS9EIAEqNUOUOnzW82DZYdEZN8sCr6GDBlS0fMgIqJKSpiUjwH5ZYcC0PE6X1QC45ovjcqhoOyQmS8isnMWBV+ZmZlwd3eX75fEOI6IiJ4MBjn4MpYdqhXbiYqiM2m4IZcdMvNFRHbO4jVfaWlpqFmzJqpWrVrk+i8hBCRJgl7PMhQioieJAcZuh8bMF9d8UemMgZZKpZbPHT27HRKRnbMo+Nq3bx+qVasGANi/f3+FToiIiCoXIQx5a3fygy5jt0MD13xRCQz5gZZGrS0Ivpj5IiI7Z1HwFRkZWeR9IiJ68smZr/ygS82LLJMFjCsCWXZIRFTAouCrsNu3b2PVqlU4d+4cJElCgwYNMGzYMDk7RkRETw5j5stBrSw7ZOaLSqKX13xpoFYz+CIiAspwkeUDBw4gKCgIH374IW7fvo1bt27hww8/RHBwMA4cOFARcyQiIhsy5F8YV1Wo7FAv2O2QimcMzdUqLdd8ERHlszrzNX78ePTt2xfx8fFQq/N/Aev1GDduHMaPH4/Tp0+X+ySJiMh2RP7XaI1KWXbItuFUkrwslwS1mmWHRERGVme+/vjjD0ydOlUOvABArVYjJiYGf/zxR7lOjoiIbM94nS+H/OxFQfDFskMqnpz5UjvIZYfMlRKRvbM6+GratCnOnTtntv3cuXNo0qRJuUyKiIgqD2PZoTHoktd8GZj5ouLp869Ko1Y5QJOf+eIZQ0T2zqKyw19++UW+P3HiREyaNAm///47WrRoAQA4evQoPv74YyxYsKBiZklERDYj8r8yO+RXPKhVXPNFpZO7Haq1UKmMmS+WHRKRfbMo+GrSpAkkSYIwWSg7ffp0s3EDBgxA3759y292RERkc8bgyxh0qZB/nS/mMagExrNDY1J2yEJVIrJ3FgVfKSkpFT0PIiKqpIRQNtww/pet5qkkxrNDpXKARqUFABgk282HiKgysCj4CgwMrOh5EBFRpaVsuGFsNW9gt0Mqgc645kuthYoNN4iIAJTxIssAcPbsWaSmpiInJ0exvVu3bg89KSIiqjyMZYfmmS8GX1S8grJDR2hYdkhEBKAMwdeff/6JV155Bb/++qtiHZgk5f2JS6/nP61ERE+SgoYbylbzLDuk4hj0Ohjyvxeo1Vqo1Y4AGHwREVndan7SpEkIDg7G1atXUaVKFZw5cwY//PADwsPDkZSUVAFTJCIiW5IzX5Ky2yEbblBx9IaCqhiV2gFqdd6aLz3XfBGRnbM6+Dpy5AjmzZuHGjVqQKVSQaVS4YUXXkBsbCwmTpxYpkksX74cwcHBcHJyQlhYGA4ePFji+C1btqBhw4ZwdHREw4YN8fXXXyuelySpyNvixYvlMUFBQWbPz5gxo0zzJyJ6okn5wVehVvMGtpqnYuh12fJ9jVoLtYplh0REQBmCL71eD1dXVwCAl5cXrly5AiCvKcf58+etnkBCQgImT56MWbNm4cSJE2jdujU6duyI1NTUIscfOXIEffv2RXR0NE6dOoXo6Gj06dMHP/74ozwmLS1NcVu9ejUkSULPnj0V+5o3b55i3FtvvWX1/ImInnSFyw41bLhBpTDoc+X7arWjnPkSkgSDnkE7Edkvq9d8hYSE4JdffkGdOnXQvHlzLFq0CFqtFitWrECdOnWsnsDSpUsxYsQIjBw5EgAQFxeH3bt3Iz4+HrGxsWbj4+Li0KFDB8ycORMAMHPmTBw4cABxcXHYuHEjAMDHx0fxmm+++QZt27Y1m5+bm5vZWCIiKkzZcENe88WyQyqGTl+Q+VKrtHLwBeSVJKrUZe73RUT0WLM68/XWW2/BYMj7hTt//nxcvHgRrVu3xs6dO/Hhhx9ata+cnBwkJycjKipKsT0qKgqHDx8u8jVHjhwxG//SSy8VO/7q1avYsWMHRowYYfbcwoULUb16dTRp0gT/+c9/zDo3msrOzkZmZqbiRkRkH/L+zdcaM1/5QZhgww0qhl5f8PtUrXGEJr/hBqAsSSQisjdW/+nppZdeku/XqVMHZ8+exa1bt+Dp6Sl3PLTUjRs3oNfr4e3trdju7e2N9PT0Il+Tnp5u1fjPP/8cbm5uePXVVxXbJ02ahKZNm8LT0xM//fQTZs6ciZSUFKxcubLI/cTGxmLu3LmWvjUioieIsuEGW81TafSGvLJDSQio1Br5Ol8AoNcz+CIi+/VQef9Lly5BkiTUqlXroSZROGgTQpQYyFkzfvXq1Rg4cCCcnJwU26dMmSLfb9y4MTw9PdGrVy85G1bYzJkzERMTIz/OzMxEQEBA8W+KiOgJISQDJAAOhRpuCJYdUjGMmS91/mO1piDzpdMXX2VCRPSks7rsUKfT4e2334aHhweCgoIQGBgIDw8PvPXWW8jNzS19Bya8vLygVqvNslbXrl0zy24Z+fj4WDz+4MGDOH/+vLyerCQtWrQAAPz+++9FPu/o6Ah3d3fFjYjIPhRquCG3mmfZIRVNDr7yLgUKtapgzZdpMw4iIntjdfD1+uuvY8WKFVi0aBFOnDiBEydOYNGiRVi1ahUmTJhg1b60Wi3CwsKQmJio2J6YmIiWLVsW+ZqIiAiz8Xv27Cly/KpVqxAWFobnnnuu1LmcOHECAODr62vp9ImIqP4EfwAAIABJREFU7IOU9w3amPkylh8Klh1SMYylhcbMl0qtgSRE/nPMfBGR/bK67HDjxo3YtGkTOnbsKG9r3LgxateujX79+uGTTz6xan8xMTGIjo5GeHg4IiIisGLFCqSmpmLMmDEAgMGDB8Pf31/ufDhp0iS8+OKLWLhwIbp3745vvvkGe/fuxaFDhxT7zczMxFdffYX33nvP7JhHjhzB0aNH0bZtW3h4eODYsWOYMmUKunXrhtq1a1v7kRARPeHyM18qY+ZLk7+VmS8qmj6/nbzaZJsagA7KTohERPbG6uDLyckJQUFBZtuDgoKg1WrNX1CKvn374ubNm/I1t0JCQrBz504EBgYCAFJTU6FSFSToWrZsiU2bNuGtt97C22+/jbp16yIhIQHNmzdX7HfTpk0QQqB///5mx3R0dERCQgLmzp2L7OxsBAYGYtSoUZg+fbrV8yciepLp9HpI+Zmvwt0OwTVfVIzCa74AQCMAnQQYDLzOFxHZL6uDr/Hjx+Odd97BmjVr4OiYt4A2Ozsb//nPf/D666+XaRLjxo3DuHHjinwuKSnJbFuvXr3Qq1evEvc5evRojB49usjnmjZtiqNHj1o9TyIie5Nj8kVZLjtU8zpfVDKdQbnmCygIxNjtkIjsmUXBV+E27Xv37kWtWrXktVSnTp1CTk4O2rVrV/4zJCIim8nVF5QWml/ni8EXFc2Y3TLNfBlrWHRsuEFEdsyi4MvDw0PxuGfPnorHbLlORPRk0hkKgi9jt0Pj2i9wzRcVw1h2qEHBZWA0hZ4jIrJHFgVfa9asqeh5EBFRJZStK8hSGDNeGl7ni0qhy7/IsmlLZWMJovECzERE9qjMF1m+fv06zp8/D0mS8PTTT6NGjRrlOS8iIqoETDNfjsbMV/5/GXxRcfT5pYWmXzJUhZ4jIrJHVl/n6969exg+fDh8fX3x4osvonXr1vDz88OIESOQlZVVEXMkIiIbydYVNNyQ13xJzHxRyQrWfJmWHebdZ/BFRPbM6uArJiYGBw4cwPbt23Hnzh3cuXMH33zzDQ4cOICpU6dWxByJiMhGjJkvIST5sh/GzBckBl9UNLnsUCoIvuRuh4LBFxHZL6vLDrds2YLNmzejTZs28rZOnTrB2dkZffr0QXx8fHnOj4iIbChXbjVf8Lc6Y6t5Zr6oOAVlh6bBlwRAMPNFRHbN6sxXVlYWvL29zbbXrFmTZYdERE+YHGPZoSj4Eq2Vux0y+KKiGZtqqFFE5osNN4jIjlkdfEVERGD27Nl48OCBvO3+/fuYO3cuIiIiynVyRERkWwUNNwqu2GTMfDH4ouLoi1jzpc4vQdSx1TwR2TGryw7j4uLQsWNH+SLLkiTh5MmTcHJywu7duytijkREZCPGiyxLppkvNTNfVDI5+JIKlx0CBgOvD0dE9svq4KtRo0a4cOEC1q1bh99++w1CCPTr1w8DBw6Es7NzRcyRiIhspMg1X1J+q3k23KBiFF12KCmeIyKyR1YFX7m5uRg9ejTefvttjBo1qqLmRERElUSunKUoCL60Gma+qGT6/PNGLRWcN8bgS8fgi4jsmFVrvhwcHPD1119X1FyIiKiSkRtumPy6cFDlr/li5ouKUdKaL5YdEpE9s7rhxiuvvIJt27ZVxFyIiKiS0Qvjmi+T4Ete8yVsMCN6HMhlhyaZLw3LDomIrF/z9dRTT+Gdd97B4cOHERYWBhcXF8XzEydOLLfJERGRben05mWHDvndDiVJQKfXm3Q/JMqjkzNfBeeNKj8Q08nrCImI7I/VwdfKlStRtWpVJCcnIzk5WfGcJEkMvoiIniDGhhuSouyw4FdHjkHH4IvMGPIzppoiuh0y80VE9szq4CslJaUi5kFERJWQ8TpfUpENN/LWhFVxcHzk86LKzZjdUpmWHUoqQHDNFxHZN6vXfJkSQkAI1vwTET2pcvXmmS+tSm3yPL9Ikzm9MF7ny6TbIcsOiYjKFnytWrUKISEhcHJygpOTE0JCQrBy5crynhsREdmYThiDq4KAS6t2kO9n6/lFmswZDHmdMDVFtJrXM/giIjtmddnh/2/v3uOiqvP/gb/mAoMajheEgVQktzRCS7AUWu+KupluZWLaWLvqZuYFtUdJ6sPLZmqPX8g+dL2VP8qtTXZTu/pVYFdMF7whut4qLyjeyOSLQJoMM/P5/gHnwMycGYGcGcZ5PR+Peayc8zlnPnP208y85/35vM+CBQuwcuVKTJ8+HfHx8QCAvLw8zJo1C+fPn8fbb7991ztJRETeIU87VDkW3AAAM4MvUmCuyXyp4Zj5sghmS4nIfzU4+Fq7di3ef/99vPDCC/K2kSNHonv37pg+fTqDLyKie4hU7bDutEOtuvbfzHyREsWbLKuqg3ZpSiIRkT9q8LRDi8WCnj17OmyPi4uD2cw3VCKie4lSwQ21Wg0h1Db7ieqSphZqVbVZUukmyxaOGSLyYw0Ovl588UWsXbvWYfuGDRswfvz4u9IpIiJqGuSqdfYfFzXBl4mZL1JgQfWaL426bvBVM+2Qa76IyI81eNohUF1wIzMzE7179wYA7Nu3DxcvXsSECRMwe/ZsuV1qaurd6SUREXmFVHBDpbK/l1d1FoOZL1IiZbfUitMOrV7pExFRU9DgzNfx48cRGxuLdu3a4ezZszh79izatWuH2NhYHD9+HAUFBSgoKMCRI0fqfc41a9YgKioKQUFBiIuLw549e1y237JlC6Kjo6HT6RAdHY1t27bZ7H/55ZehUqlsHlKgKKmsrMT06dMREhKCFi1aYOTIkbh06VL9LwQRkR9QmnZY/Xf1F2mThTfMJUcW+SbLdTNfNcEXM19E5McanPnatWvXXe1ARkYGkpOTsWbNGjz55JNYv349hg8fjpMnT6Jjx44O7fPy8pCUlIQ///nPeOaZZ7Bt2zaMGTMGe/fuRa9eveR2w4YNQ3p6uvx3YGCgzXmSk5Px1VdfYfPmzWjbti3mzJmDESNGID8/HxqN/S+8RET+Sc5gOEw7rM588T5fpETKbmlUjtMOzax2SER+7FfdZPluSE1NxcSJEzFp0iQ8/PDDSEtLQ4cOHRTXlQFAWloahgwZgpSUFHTt2hUpKSkYNGgQ0tLSbNrpdDoYDAb50aZNG3lfWVkZNm7ciPfeew+DBw9Gjx498PHHH+PYsWPIzs526+slIvIltaXm7X+Uqv67imu+SIGU+VKadmjltEMi8mNeDb5MJhPy8/ORmJhosz0xMRG5ubmKx+Tl5Tm0Hzp0qEP7nJwchIaG4qGHHsLkyZNx7do1eV9+fj6qqqpszhMREYGYmBinz1tZWYny8nKbBxHRvc7sLPNVs+arimu+SIGUMa077VDL+3wREXk3+Lp+/TosFgvCwsJstoeFhaG4uFjxmOLi4ju2Hz58OD755BP8+9//xnvvvYeDBw9i4MCBqKyslM8RGBiI1q1b1/t5ly1bBr1eLz86dOjQ4NdLRORrLE4KbqiY+SIXpKmFGnXt6gZ1zRjitEMi8meNqnZ4t6lq7v0hEUI4bGtI+6SkJPnfMTEx6NmzJyIjI/HNN9/g2WefdXpeV8+bkpJiU8mxvLycARgR3fOswtmaL67fIeesSqXm1VLBDY4ZIvJfXs18hYSEQKPROGSbrl275pDdkhgMhga1B4Dw8HBERkbi9OnT8jlMJhNKS0vrfR6dToeWLVvaPIiI7nVVCiXDgdrqh8x8kRKlghtarvkiIvJu8BUYGIi4uDhkZWXZbM/KykJCQoLiMfHx8Q7tMzMznbYHgJKSEly8eBHh4eEAgLi4OAQEBNic5+rVqzh+/LjL8xAR+Zva+zXZTzusyXwxi0EKzMJ55ovZUiLyZ16fdjh79mwYjUb07NkT8fHx2LBhA4qKijBlyhQAwIQJE3D//fdj2bJlAICZM2eib9++WLFiBUaNGoUvvvgC2dnZ2Lt3LwDg559/xqJFi/Dcc88hPDwc58+fx1tvvYWQkBA888wzAAC9Xo+JEydizpw5aNu2Ldq0aYPXX38d3bp1w+DBg71zIYiImiBp+pj9tENmvsiV2syX45ov3mSZiPyZ14OvpKQklJSUYMmSJbh69SpiYmKwfft2REZGAgCKioqgVtd+6CckJGDz5s2YP38+FixYgM6dOyMjI0O+x5dGo8GxY8ewadMm3LhxA+Hh4RgwYAAyMjIQHBwsn2flypXQarUYM2YMfvnlFwwaNAgffvgh7/FFRFSHueaGuPaZL8iZL36RJkdWxWmH1V85GHwRkT/zevAFAFOnTsXUqVMV9+Xk5DhsGz16NEaPHq3YvlmzZti5c+cdnzMoKAirVq3CqlWrGtRXIiJ/In2Jdljzxcp15IJZCAC21Q7lghtg8EVE/svrN1kmIqKmy3ynNV+WKo/3iZo+KcDSKpSaZ+aLiPwZgy8iInJKKjWvcVpwg1+kyZG85ksdIG+TAjEGX0Tkzxh8ERGRU1b5Jsu2HxdqOfhiwQ1yZEH1tEO1TbVDKfgSXukTEVFTwOCLiIicMjvLfKmY+SLnpOyW1mbNV03wxTVfROTHGHwREZFTSlXrAECF6r+rrFzzRY6kzFfdaYcaVjskImLwRUREztXeZFl52qGVX6RJgTS1UGnaoZmZLyLyYwy+iIjIKbnghtp+2mFN5os3WSYFUuZLWzfzVRN8Wbnmi4j8GIMvIiJyylqTpVDDNviSMl8W3ueLFChNO5QCMWkfEZE/YvBFRERO1Wa+nNxk2crgixzJwZemNviSpiCamfkiIj/G4IuIiJyqLbihtdleu+aLwRc5ktZ8aepUO9TWBGJWZr6IyI8x+CIiIqcsTjJfamnNFzNfpECx2iGnHRIRMfgiIiLnhHS/JvvMV031QwuDL1IglWGxnXYoVTtk8EVE/ovBFxEROSUX3LAvNV+T+WLBDVIiFZPXqBQKbnDNFxH5MQZfRETklFVU5zDsS82z2iG5YlYouCFNO+SaLyLyZwy+iIjIKanghtYu+JKCMU47JCXSqLAJvjSBAGqnJBIR+SMGX0RE5JQ07VCjsrvJspz5sjocQ2RVVf+vVh0ob5MDdma+iMiPMfgiIiKnRM20QofMl7zmi3kMciSNCrVC5ou5UiLyZwy+iIjIKWl9jn3mSyq4YbUy80WOpABLy+CLiMgGgy8iInLKaeZLzWqH5Fztmi+dvE26UbdF5YUOERE1EQy+iIjIKSuUgy/5Pl8MvkiBFGBJ9/YCmPkiIgIYfBERkQvSTZbtS81reZ8vcqF22mGdzBeDLyIiBl9EROSccFLtUFrzJVjtkOxYLWYIVXXqSwq4AEDL4IuIiMEXERE5JwVfARqtzXY1M1/khMVcKf+7bvAlVT7kmi8i8mdNIvhas2YNoqKiEBQUhLi4OOzZs8dl+y1btiA6Oho6nQ7R0dHYtm2bvK+qqgpvvvkmunXrhhYtWiAiIgITJkzAlStXbM7RqVMnqFQqm8fcuXPd8vqIiHyVcLLmS/rbyuCL7FisJvnfSjdZ5oghIn/m9eArIyMDycnJmDdvHgoKCtCnTx8MHz4cRUVFiu3z8vKQlJQEo9GIo0ePwmg0YsyYMdi/fz8A4NatWzh8+DAWLFiAw4cPY+vWrfjhhx8wcuRIh3MtWbIEV69elR/z589362slIvI1oqbUfICTghtWTjskOxZL3cxX7Zovaf2XUKlgtfD+cETkn7R3buJeqampmDhxIiZNmgQASEtLw86dO7F27VosW7bMoX1aWhqGDBmClJQUAEBKSgp2796NtLQ0fPrpp9Dr9cjKyrI5ZtWqVXjiiSdQVFSEjh07ytuDg4NhMBjc+OqIiHybtKbLMfNV/fHBzBfZM1vqZL60tcFX3cqHFnMl1BqvfwUhIvI4r2a+TCYT8vPzkZiYaLM9MTERubm5isfk5eU5tB86dKjT9gBQVlYGlUqFVq1a2WxfsWIF2rZti8ceewxLly6FyWRycgYiIv8kTTt0KLhR8/FhBTNfZMtqqZL/rVHXLbhRG4iZ62THiIj8iVd/drp+/TosFgvCwsJstoeFhaG4uFjxmOLi4ga1v337NubOnYtx48ahZcuW8vaZM2ciNjYWrVu3xoEDB5CSkoLCwkJ88MEHiueprKxEZWXth0V5eXm9XiMRkS+TC25obT8umPkiZyw1mS+VEDbZrbpZMKuVP3YSkX9qEjl/lcq29JEQwmFbY9pXVVVh7NixsFqtWLNmjc2+WbNmyf/u3r07WrdujdGjR8vZMHvLli3D4sWL6/V6iIjuHTXTDlX20w655ouUSVktjd12jU3mi8EXEfknr047DAkJgUajcchaXbt2zSG7JTEYDPVqX1VVhTFjxqCwsBBZWVk2WS8lvXv3BgCcOXNGcX9KSgrKysrkx8WLF12ej4joXuCs1LxGynyxdh3ZkTJfWmG7vW7ZeQuDLyLyU14NvgIDAxEXF+dQICMrKwsJCQmKx8THxzu0z8zMtGkvBV6nT59Gdna2YibLXkFBAQAgPDxccb9Op0PLli1tHkRE9zxVTfBlV3BDU1PtkDdZJntWa/WaL/svGCq1GmpRHZHVXRdGRORPvD7tcPbs2TAajejZsyfi4+OxYcMGFBUVYcqUKQCACRMm4P7775crH86cORN9+/bFihUrMGrUKHzxxRfIzs7G3r17AQBmsxmjR4/G4cOH8fXXX8NisciZsjZt2iAwMBB5eXnYt28fBgwYAL1ej4MHD2LWrFkYOXKkTTVEIiJ/J2W+NM6qHTLzRXbMNYGV/bRDaZsVLLhBRP7L68FXUlISSkpK5HtuxcTEYPv27YiMjAQAFBUVQa2u/f0sISEBmzdvxvz587FgwQJ07twZGRkZ6NWrFwDg0qVL+PLLLwEAjz32mM1z7dq1C/3794dOp0NGRgYWL16MyspKREZGYvLkyXjjjTc89KqJiHxFdfAVqLEvuFH91ZqZL7InTztU2KcVQJWK0w6JyH95PfgCgKlTp2Lq1KmK+3Jychy2jR49GqNHj1Zs36lTJwghFPdJYmNjsW/fvgb3k4jI/9zhPl/MfJEdizTtUOGjWPoplcEXEfkrr675IiKipk2olDNf0n2/BO/zRXYsd5h2CAAWq9lj/SEiakoYfBERkQvK1Q6lTBhLzZM9KfjSwvEWMFq5DTNfROSfGHwREZELzqYdMvNFyiw1N1BWynxJUxEZfBGRv2LwRUREzqmqvy3bTzuUMmGCa77IjjSlkNMOiYgcMfgiIiJFVqsVKpVy5kvDaofkhFRwQ6M47bB6m4X3+SIiP8Xgi4iIFJmttYFVgMY2+ArktENywlwzpVCjcgy+ajNfnHZIRP6JwRcRESmqstROKQzUBNjsY7VDcsZqrR43Spkvdc02MzNfROSnGHwREZEiU50vyPaZrwAt13yRMtfTDqtZBdd8EZF/YvBFRESKTNbawEpnX2peJQVjzHyRLbOL4EuaimhmtUMi8lMMvoiISJHJXJudCFDbBV8aTjskZfJNlhXWfEnTDlntkIj8FYMvIiJSVFXnC7JjwY2aYEzF4ItsWYXzNV9StUMrgy8i8lMMvoiISFHdghtate3HhYbVDskJs3SfL5XjVwx52qGVBTeIyD8x+CIiIkVVluov0UKoobYLvmpvuszgi2y5Krihqfnawft8EZG/YvBFRESKqqSCG8Lxo6L2pssMvsiWxVXmy64NEZG/YfBFRESKzHK1Q8cMRjOtDgAgVFW4XcXKdVTLIt/nS2naYU3mS/AWBUTknxh8ERGRotr7fDl+VPSIiAIszaBSm7H9h3zPdoyaNCmrpVWodihPO+SaLyLyUwy+iIhIkVRwQ6Uw7TBQq0UrTRcAwI6zez3aL2raLOLOBTc47ZCI/BWDLyIiUiQV3HD2UdG9bRwA4MT/HvZQj8gXSFMK1YrBl5T54rRDIvJPDL6IiEiRXHDDyUfFUw/2AQCUiR9wq6rSQ72ipk4KrLSKBTek4IuZLyLyTwy+iIhIkfQlWuXkoyLxN48BluZQqU346tQBT3aNmjBX1Q61csENBl9E5J8YfBERkSKTlJ1QWPMFAFqNBm01DwMAMs/9x1PdoibOLE07VPiKIU1FNHPaIRH5KQZfRESkaPf56iqGWrRw2ubRdj0BACdLue6LqlnlaYcah31SNszKUvNE5KcYfBERkYOKyl+Q82MGAGDA/SOdtnv6oep1XxU4g4rKXzzSN2rapMyXRu0YfMnTDrnmi4j8FIMvIiJysGjXhxCaMqgseiwa8LLTdgMf6AZY7oNKXYUvTu3zXAepyZKqHWoUM1/V28zMfBGRn2oSwdeaNWsQFRWFoKAgxMXFYc+ePS7bb9myBdHR0dDpdIiOjsa2bdts9gshsGjRIkRERKBZs2bo378/Tpw4YdOmtLQURqMRer0eer0eRqMRN27cuOuvjYjI19ysrETW5U8BAP3DkhCsa+a0rVqtRjttNAAg61yuR/pHTZtVWAEoF9yQ1nxZueaLiPyU14OvjIwMJCcnY968eSgoKECfPn0wfPhwFBUVKbbPy8tDUlISjEYjjh49CqPRiDFjxmD//v1ym3fffRepqalYvXo1Dh48CIPBgCFDhqCiokJuM27cOBw5cgQ7duzAjh07cOTIERiNRre/XiKipu7PuzdBaEsBSzCWDPzjHdv3CK1e9/VdGdd9UZ1phwqZL60fZr4OHTqEqKgoqFSqBj+0Wi02bdqEQ4cOYeDAgTh06JC3Xw4R/UpeD75SU1MxceJETJo0CQ8//DDS0tLQoUMHrF27VrF9WloahgwZgpSUFHTt2hUpKSkYNGgQ0tLSAFRnvdLS0jBv3jw8++yziImJwUcffYRbt27h73//OwDg1KlT2LFjBz744APEx8cjPj4e77//Pr7++mt8//33HnvtRERNze0qE/7n4icAgD7tnkerZs6LbUhGdekLALiJcyi7fcut/aOmT7pFgdKaL/kmy34UfG3atAnnz59v1LEWiwWpqanYtGkTdu3ahb/97W93t3NE5HFabz65yWRCfn4+5s6da7M9MTERubnK01fy8vIwa9Ysm21Dhw6Vg6/CwkIUFxcjMTFR3q/T6dCvXz/k5ubilVdeQV5eHvR6PXr16iW36d27N/R6PXJzc9GlS5e79RI9Ysmmcai0cKE7Ef16Vy03YQ0oQZBFi2VBPwM5K+54zG+FFc3NAbilrcIbHz+HEE2QB3pKTdX3lktAAHD1RiX+kn3aZt9PFSZAA+wrPYt56c94qYfuV3b9Fk4euIRThy6j+HzZrzrX0aNHcfToUQDA+vfX4OfW/4UQQPP7AqEPaX43ukvk0wz3dcL051d6uxv15tXg6/r167BYLAgLC7PZHhYWhuLiYsVjiouLXbaX/lepzYULF+Q2oaGhDucODQ11+ryVlZWorKyU/y4vL3f10jwq23wUpRqvJzGJ6F5Q81Yypewn6Iv+X70PGdiuLb6+rwVyAy65r2/kG2rGUFGJFX///gebXX3aAQgBTgZYcBJnPN83Dzn+xnG3nLfyFzP+/+Ic+e+YD2Pc8jxEvqT7/xZhurc70QBeDb4kKpXK5m8hhMO2hra/Uxul87t63mXLlmHx4sVO++RNj1s7oNJy29vdIKJ7RCtVAMZG9QM09f+I+OMvP8NUVggTrG7sGfmKAAThvo6vIrJze5vtatNUtCv/K0yodHLkvcEw8WFkbzzltvOr1CoM+kNXdDGFuO05iHxFeItIb3ehQbwafIWEhECj0Thkm65du+aQuZIYDAaX7Q0GA4Dq7FZ4eLjTNj/++KPDuX/66Senz5uSkoLZs2fLf5eXl6NDhw53eoke8d6kHd7uAhH5uQcBvOftTpAP6AbgaW93wv0mA4enHkZcXJxbTn/o4CHExsa65dxE5F5enasWGBiIuLg4ZGVl2WzPyspCQkKC4jHx8fEO7TMzM+X2UVFRMBgMNm1MJhN2794tt4mPj0dZWRkOHDggt9m/fz/KysqcPq9Op0PLli1tHkRERESeolZziQGRr/P6tMPZs2fDaDSiZ8+eiI+Px4YNG1BUVIQpU6YAACZMmID7778fy5YtAwDMnDkTffv2xYoVKzBq1Ch88cUXyM7Oxt69ewFUTydMTk7GO++8gwcffBAPPvgg3nnnHTRv3hzjxo0DADz88MMYNmwYJk+ejPXr1wMA/vSnP2HEiBE+V2yDiIiImp7Q0FC0atXqrt1DdO7cufjXv/6FixcvKq5bJyLf4PXgKykpCSUlJViyZAmuXr2KmJgYbN++HZGR1fM3i4qKbH7pSUhIwObNmzF//nwsWLAAnTt3RkZGhk3lwjfeeAO//PILpk6ditLSUvTq1QuZmZkIDg6W23zyySeYMWOGXBVx5MiRWL16tYdeNREREd3L2rdvj+LiYpSVleHGjRu4fPkySktLcfv2bVy4cAE//fQT2rVrh4iICJw6dQparRaDBw9GVVUVysvLERERgZiYGJw5cwYPPPAA9Ho9hBAwmUzQ6XTefnlE1EgqIYTwdid8UXl5OfR6PcrKyjgFkYiIiIjIj9U3NuDkYSIiIiIiIg9g8EVEREREROQBDL6IiIiIiIg8gMEXERERERGRBzD4IiIiIiIi8gAGX0RERERERB7g9ft8+SqpQn95ebmXe0JERERERN4kxQR3uosXg69GqqioAAB06NDByz0hIiIiIqKmoKKiAnq93ul+3mS5kaxWK65cuYLg4GCoVCqv9qW8vBwdOnTAxYsXecNnN+E1di9eX/fjNXYvXl/34zV2L15f9+M1di9vX18hBCoqKhAREQG12vnKLma+GkmtVqN9+/be7oaNli1b8j9mN+M1di9eX/fjNXYvXl/34zV2L15f9+M1di9vXl9XGS8JC24QERERERF5AIMvIiIiIiIiD9AsWrRokbc7Qb+eRqNB//79odVyJqm78Bq7F6+v+/Eauxevr/vxGrsXr6/78Rq7ly9cXxbcICIiIiIi8gBOOyQiIiIiIvIABl9dwGjIAAAMwElEQVREREREREQewOCLiIiIiIjIAxh8EREREREReQCDLx+wdOlSJCQkoHnz5mjVqpVim6KiIjz99NNo0aIFQkJCMGPGDJhMJpfnLS0thdFohF6vh16vh9FoxI0bN9zxEnxKTk4OVCqV4uPgwYNOj+vfv79D+7Fjx3qw576lU6dODtdr7ty5Lo8RQmDRokWIiIhAs2bN0L9/f5w4ccJDPfYd58+fx8SJExEVFYVmzZqhc+fOWLhw4R3fEziGXVuzZg2ioqIQFBSEuLg47Nmzx2X7LVu2IDo6GjqdDtHR0di2bZuHeup7li1bhscffxzBwcEIDQ3F73//e3z//fcuj/nwww8V36dv377toV77jkWLFjlcJ4PB4PKY3bt3Iy4uDkFBQXjggQewbt06D/XWNyl9pqlUKrz22muK7Tl+Xfv222/x9NNPIyIiAiqVCp9//rnN/sZ+H2jo+7g7MPjyASaTCc8//zxeffVVxf0WiwVPPfUUbt68ib1792Lz5s3YsmUL5syZ4/K848aNw5EjR7Bjxw7s2LEDR44cgdFodMdL8CkJCQm4evWqzWPSpEno1KkTevbs6fLYyZMn2xy3fv16D/XaNy1ZssTmes2fP99l+3fffRepqalYvXo1Dh48CIPBgCFDhqCiosJDPfYN3333HaxWK9avX48TJ05g5cqVWLduHd566607HssxrCwjIwPJycmYN28eCgoK0KdPHwwfPhxFRUWK7fPy8pCUlASj0YijR4/CaDRizJgx2L9/v4d77ht2796N1157Dfv27UNWVhbMZjMSExNx8+ZNl8e1bNnS4f06KCjIQ732LY888ojNdTp27JjTtoWFhfjd736HPn36oKCgAG+99RZmzJiBLVu2eLDHvuXgwYM21zcrKwsA8Pzzzzs9huPXuZs3b+LRRx/F6tWrFfc35vtAQ9/H3UaQz0hPTxd6vd5h+/bt24VarRaXL1+Wt3366adCp9OJsrIyxXOdPHlSABD79u2Tt+Xl5QkA4rvvvrv7nfdhJpNJhIaGiiVLlrhs169fPzFz5kwP9cr3RUZGipUrV9a7vdVqFQaDQSxfvlzedvv2baHX68W6devc0cV7yrvvviuioqJctuEYdu6JJ54QU6ZMsdnWtWtXMXfuXMX2Y8aMEcOGDbPZNnToUDF27Fi39fFecu3aNQFA7N6922kbZ5+J5GjhwoXi0UcfrXf7N954Q3Tt2tVm2yuvvCJ69+59t7t2z5o5c6bo3LmzsFqtivs5fusPgNi2bZv8d2O/DzT0fdxdmPm6B+Tl5SEmJgYRERHytqFDh6KyshL5+flOj9Hr9ejVq5e8rXfv3tDr9cjNzXV7n33Jl19+ievXr+Pll1++Y9tPPvkEISEheOSRR/D6668zI3MHK1asQNu2bfHYY49h6dKlLqfFFRYWori4GImJifI2nU6Hfv36cczWQ1lZGdq0aXPHdhzDjkwmE/Lz823GHgAkJiY6HXt5eXkO7YcOHcqxWk9lZWUAcMcx+/PPPyMyMhLt27fHiBEjUFBQ4Inu+aTTp08jIiICUVFRGDt2LM6dO+e0rbPxe+jQIVRVVbm7qz7PZDLh448/xh//+EeoVCqn7Th+G6cx3wca8z7uLk339s9Ub8XFxQgLC7PZ1rp1awQGBqK4uNjpMaGhoQ7bQ0NDnR7jrzZu3IihQ4eiQ4cOLtuNHz8eUVFRMBgMOH78OFJSUnD06FF56gHZmjlzJmJjY9G6dWscOHAAKSkpKCwsxAcffKDYXhqX9mM9LCwMFy5ccHt/fdnZs2exatUqvPfeey7bcQwru379OiwWi+LYc/Ue25D2VEsIgdmzZ+O3v/0tYmJinLbr2rUrPvzwQ3Tr1g3l5eX4y1/+gieffBJHjx7Fgw8+6MEeN329evXCpk2b8NBDD+HHH3/E22+/jYSEBJw4cQJt27Z1aO9s/JrNZly/fh3h4eGe6rpP+vzzz3Hjxg2XP9py/DZeY74PNOZ93F2Y+fISpcWv9o9Dhw7V+3xKv6wIIVz+4tKYY3xZY675pUuXsHPnTkycOPGO5588eTIGDx6MmJgYjB07Fp999hmys7Nx+PBhd72kJqch13jWrFno168funfvjkmTJmHdunXYuHEjSkpKXD6H/fi8l8esvcaM4StXrmDYsGF4/vnnMWnSJJfn5xh2raFjz5/H6q8xbdo0/Pe//8Wnn37qsl3v3r3x4osv4tFHH0WfPn3wj3/8Aw899BBWrVrloZ76juHDh+O5555Dt27dMHjwYHzzzTcAgI8++sjpMUrjV2k7Odq4cSOGDx9uMyPJHsfvr9eY99im8L7MzJeXTJs27Y5VxDp16lSvcxkMBodF3KWlpaiqqnKI8Ose8+OPPzps/+mnn5we4+sac83T09PRtm1bjBw5ssHPFxsbi4CAAJw+fRqxsbENPt4X/Zpx3bt3bwDAmTNnFH+JlSpzFRcX2/zqeu3atXt2zNpr6PW9cuUKBgwYgPj4eGzYsKHBz+ePY1hJSEgINBqNw6+jrsaewWBoUHuqNn36dHz55Zf49ttv0b59+wYdq1ar8fjjj+P06dNu6t29o0WLFujWrZvTa+Vs/Gq1WsX3Z6p14cIFZGdnY+vWrQ06juO3/hrzfaAx7+PuwuDLS0JCQhASEnJXzhUfH4+lS5fi6tWr8iDMzMyETqdDXFyc02PKyspw4MABPPHEEwCA/fv3o6ysDAkJCXelX01NQ6+5EALp6emYMGECAgICGvx8J06cQFVVlV9Nz/g141qa6+7seknT4bKystCjRw8A1XO4d+/ejRUrVjSuwz6mIdf38uXLGDBgAOLi4pCeng61uuETHfxxDCsJDAxEXFwcsrKy8Mwzz8jbs7KyMGrUKMVj4uPjkZWVhVmzZsnbMjMz79n3119LCIHp06dj27ZtyMnJQVRUVKPOceTIEXTr1s0NPby3VFZW4tSpU+jTp4/i/vj4eHz11Vc22zIzM9GzZ89GfR76k/T0dISGhuKpp55q0HEcv/XXmO8DjXkfdxuPlvegRrlw4YIoKCgQixcvFvfdd58oKCgQBQUFoqKiQgghhNlsFjExMWLQoEHi8OHDIjs7W7Rv315MmzZNPsf+/ftFly5dxKVLl+Rtw4YNE927dxd5eXkiLy9PdOvWTYwYMcLjr6+pys7OFgDEyZMnHfZdunRJdOnSRezfv18IIcSZM2fE4sWLxcGDB0VhYaH45ptvRNeuXUWPHj2E2Wz2dNebvNzcXJGamioKCgrEuXPnREZGhoiIiBAjR460adelSxexdetW+e/ly5cLvV4vtm7dKo4dOyZeeOEFER4eLsrLyz39Epq0y5cvi9/85jdi4MCB4tKlS+Lq1avyQ8Ix3DCbN28WAQEBYuPGjeLkyZMiOTlZtGjRQpw/f14IIYTRaLSpmPWf//xHaDQasXz5cnHq1CmxfPlyodVqbSrMUq1XX31V6PV6kZOTYzNeb926Jbexv8aLFi0SO3bsEGfPnhUFBQXiD3/4g9BqtfKYplpz5swROTk54ty5c2Lfvn1ixIgRIjg4WB6/c+fOFUajUW5/7tw50bx5czFr1ixx8uRJsXHjRhEQECA+++wzb70En2CxWETHjh3Fm2++6bCP47dhKioq5O+7AOTvDBcuXBBC1O/7wMCBA8WqVavkv+/0Pu4pDL58wEsvvSQAODx27dolt7lw4YJ46qmnRLNmzUSbNm3EtGnTxO3bt+X9u3btEgBEYWGhvK2kpESMHz9eBAcHi+DgYDF+/HhRWlrqwVfWtL3wwgsiISFBcV9hYaHN/wdFRUWib9++ok2bNiIwMFB07txZzJgxQ5SUlHiwx74jPz9f9OrVS+j1ehEUFCS6dOkiFi5cKG7evGnTDoBIT0+X/7ZarWLhwoXCYDAInU4n+vbtK44dO+bh3jd96enpiu8ZdX9v4xhuuL/+9a8iMjJSBAYGitjYWJsy6P369RMvvfSSTft//vOfokuXLiIgIEB07dpVbNmyxcM99h3Oxmvd//7tr3FycrLo2LGjCAwMFO3atROJiYkiNzfX8533AUlJSSI8PFwEBASIiIgI8eyzz4oTJ07I+1966SXRr18/m2NycnJEjx49RGBgoOjUqZNYu3ath3vte3bu3CkAiO+//95hH8dvw0jfW+0f0jWsz/eByMhIsXDhQpttrt7HPUUlRM0KSiIiIiIiInIbVjskIiIiIiLyAAZfREREREREHsDgi4iIiIiIyAMYfBEREREREXkAgy8iIiIiIiIPYPBFRERERETkAQy+iIiIiIiIPIDBFxERERERkQcw+CIiIiIiIvIABl9EREREREQewOCLiIiIiIjIAxh8ERERERERecD/ARgU0IX8wzYkAAAAAElFTkSuQmCC\n",
+ "text/plain": [
+ "