Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

support relative lift in betamvtest #8

Open
wants to merge 1 commit into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
15 changes: 13 additions & 2 deletions cprior/cdist/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,7 @@
# Guillermo Navas-Palencia <[email protected]>
# Copyright (C) 2019

from abc import ABCMeta
from abc import abstractmethod
from abc import ABCMeta, abstractmethod


class BayesModel(metaclass=ABCMeta):
Expand Down Expand Up @@ -249,6 +248,13 @@ def expected_loss_relative(self):
Compute expected relative loss for choosing a variant. This can be seen
as the negative expected relative improvement or uplift.
"""

# @abstractmethod
# def expected_lift_relative(self):
# """
# Compute expected relative lift for choosing a variant. This can be seen
# as the expected relative improvement or uplift.
# """

@abstractmethod
def expected_loss_relative_ci(self):
Expand All @@ -261,6 +267,11 @@ def expected_loss_relative_ci(self):
def expected_loss_vs_all(self):
"""Compute the expected loss against all variations."""

# TODO: just so tests pass for now
# @abstractmethod
# def expected_lift_vs_all(self):
# """Compute the expected lift against all variations."""

def update(self, data, variant):
"""
Update posterior parameters for a given variant with new data samples.
Expand Down
81 changes: 70 additions & 11 deletions cprior/cdist/beta.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,22 +5,16 @@
# Guillermo Navas-Palencia <[email protected]>
# Copyright (C) 2019

from multiprocessing import Pool

import mpmath as mp
import numpy as np

from multiprocessing import Pool
from scipy import integrate
from scipy import optimize
from scipy import special
from scipy import stats
from scipy import integrate, optimize, special, stats

from .._lib.cprior import beta_cprior
from .base import BayesABTest
from .base import BayesModel
from .base import BayesMVTest
from .base import BayesABTest, BayesModel, BayesMVTest
from .ci import ci_interval
from .utils import check_ab_method
from .utils import check_mv_method
from .utils import check_ab_method, check_mv_method


def func_ppf(x, a0, b0, a1, b1, p):
Expand Down Expand Up @@ -986,6 +980,29 @@ def expected_loss_relative(self, method="exact", control="A", variant="B"):

return ((x0 - x1) / x1).mean()

def expected_lift_relative(self, method="exact", control="A", variant="B"):
# TODO: docs
check_mv_method(method=method, method_options=("exact", "MC"),
control=control, variant=variant,
variants=self.models.keys())

model_control = self.models[control]
model_variant = self.models[variant]

if method == "exact":
a0 = model_control.alpha_posterior
b0 = model_control.beta_posterior

a1 = model_variant.alpha_posterior
b1 = model_variant.beta_posterior

return (a0 + b0) * (a1 - 1) / a0 / (a1 + b1 - 1) - 1
else:
x0 = model_control.rvs(self.simulations, self.random_state)
x1 = model_variant.rvs(self.simulations, self.random_state)

return ((x1 - x0) / x0).mean()

def expected_loss_relative_vs_all(self, method="quad", control="A",
variant="B", mlhs_samples=1000):
r"""
Expand Down Expand Up @@ -1047,6 +1064,48 @@ def expected_loss_relative_vs_all(self, method="quad", control="A",

return e_max * e_inv_x - 1



def expected_lift_relative_vs_all(self, method="quad", control="A",
variant="B", mlhs_samples=1000):
# TODO: docs
check_mv_method(method=method, method_options=("MC", "MLHS", "quad"),
control=None, variant=variant,
variants=self.models.keys())

# exclude variant
variants = list(self.models.keys())
variants.remove(variant)

if method == "MC":
# generate samples from all models in parallel
xvariant = self.models[variant].rvs(self.simulations,
self.random_state)

pool = Pool(processes=self.n_jobs)
processes = [pool.apply_async(self._rvs, args=(v, ))
for v in variants]
xall = [p.get() for p in processes]
maxall = np.maximum.reduce(xall)

return (maxall / xvariant).mean() - 1
Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Suggested change
return (maxall / xvariant).mean() - 1
return (xvariant / maxall).mean() - 1

else:
if method == "quad":
variant_params = [(self.models[v].alpha_posterior,
self.models[v].beta_posterior)
for v in variants]

e_max = integrate.quad(func=func_mv_elr, a=0, b=1, args=(
variant_params))[0]
else:
e_max = self._expected_value_max_mlhs(variants, mlhs_samples)

a = self.models[variant].alpha_posterior
b = self.models[variant].beta_posterior
e_x = (a - 1) / (a + b - 1)

return (e_x / e_max) - 1

def expected_loss_relative_ci(self, method="MC", control="A", variant="B",
interval_length=0.9, ci_method="ETI"):
r"""
Expand Down