Commit 572ba617 authored by Elvira Clement's avatar Elvira Clement
Browse files

Ading experiments

parent 16715b56
*.npz
*.csv
xp_old*
\ No newline at end of file
import time
import numpy as np
from src.utils import get_lambda_max
def test_increasing_ball(list_tests, vec_offsets, slopepb, vecx):
nb_test = len(list_tests)
mat_out = np.zeros((nb_test, vec_offsets.size))
vecu, Atu, gap = slopepb.make_screening_quanties(
vecx
)
list_time = [0 for i in range(nb_test)]
# Boucle
for i_offset, offset in enumerate(vec_offsets):
for i_test, test in enumerate(list_tests):
# Test
t_start = time.time()
out = test.apply_test(
np.abs(Atu),
gap,
slopepb.lbd,
slopepb.vec_gammas,
offset_radius=offset
)
list_time[i_test] += time.time() - t_start
# save
mat_out[i_test, i_offset] = np.sum(out)
# out
return mat_out, list_time
\ No newline at end of file
# -*- coding: utf-8 -*-
import json
import numpy as np
class Setup(object):
"""docstring for Setup"""
def __init__(self, setup_id):
super(Setup, self).__init__()
self.setup_id = setup_id
with open('../setups.json') as json_file:
data = json.load(json_file)[f"setup{setup_id}"]
#
self.m = data["m"]
self.n = data["n"]
self.list_dic = data["dictionaries"]
self.normalize = data["normalize"]
self.n_rep = data["n_rep"]
self.list_sequence = data["sequences"]
self.list_ratio_lbd = data["list_ratio_lbd"]
self.nb_dic = len( self.list_dic )
self.nb_sequence = len( self.list_sequence )
self.nb_ratio_lbd = len( self.list_ratio_lbd )
\ No newline at end of file
{
"setup1a": {
"remark": "OSCAR 1, 2, 3",
"m": 10,
"n": 30,
"dictionaries": [
"gaussian 0.",
"uniform",
"toeplitz"
],
"normalize": true,
"n_rep": 50,
"sequences": [
["oscar-lim", 1, 0.9],
["oscar-lim", 1.0, 0.1],
["oscar-lim", 1, 1e-3]
],
"list_ratio_lbd": [
0.3,
0.5,
0.8
]
},
"setup1b": {
"remark": "EXP 1, 2, 3",
"m": 100,
"n": 300,
"dictionaries": [
"gaussian 0.",
"uniform",
"toeplitz"
],
"normalize": true,
"n_rep": 50,
"sequences": [
["exp-lim", 0.99999, 0.9],
["exp-lim", 0.99999, 0.5],
["exp-lim", 0.99999, 1e-2]
],
"list_ratio_lbd": [
0.3,
0.5,
0.8
]
}
}
\ No newline at end of file
import numpy as np
from src.solver.slope import primal_func, dual_func
from src.utils import get_lambda_max
class SlopePb(object):
"""docstring for SlopePb"""
def __init__(self, matA, vecy, vec_gammas, ratio_lbd, lbdmax=None):
super(SlopePb, self).__init__()
self.matA = matA
self.vecy = vecy
self.vec_gammas = vec_gammas
self.ratio_lbd = ratio_lbd
if lbdmax is None:
self.lbdmax = get_lambda_max(vecy, matA, vec_gammas)
else:
self.lbdmax = lbdmax
self.lbd = ratio_lbd * self.lbdmax
def make_dual_scaling(self, vecr):
beta_dual = np.sort(np.abs(self.matA.T @ vecr))[::-1]
beta_dual = np.cumsum(beta_dual) / \
np.cumsum(self.lbd * self.vec_gammas)
return vecr / np.max(beta_dual)
def make_screening_quanties(self, vecx):
"""
"""
# residual error
vecu = self.vecy - self.matA @ vecx
# dual scaling
vecu = self.make_dual_scaling(vecu)
pval = primal_func(self.vecy, self.matA @ vecx, vecx,
self.lbd, self.vec_gammas
)
dval = dual_func(self.vecy,
np.linalg.norm(self.vecy, 2)**2, vecu
)
gap = np.abs(pval - dval)
Atu = self.matA.T @ vecu
return vecu, Atu, gap
def eval_gap(self, vecx, vecu=None):
"""
"""
if vecu is None:
# residual error
vecu = self.vecy - self.matA @ vecx
# dual scaling
vecu = self.make_dual_scaling(vecu)
pval = primal_func(self.vecy, self.matA @ vecx, vecx,
self.lbd, self.vec_gammas
)
dval = dual_func(self.vecy,
np.linalg.norm(self.vecy, 2)**2, vecu
)
return np.abs(pval - dval)
\ No newline at end of file
python viz_final.py --noshow --save
\ No newline at end of file
# -*- coding: utf-8 -*-
import argparse
import numpy as np
import matplotlib.pyplot as plt
# Algorithm import
from src import __version__
from src.dictionaries import generate_dic
from src.utils import get_lambda_max, gamma_sequence_generator
# Screening
from src.screening.singletest import GapSphereSingleTest
from src.screening.gap_ptest import GAP_Ptest
from src.screening.gap_rqtest import GAP_RQtest
from src.screening.kappa_test import Kappa_test
# XP import
from xps.SIAM.slopepb import SlopePb
from xps.SIAM.setup import Setup
from xps.SIAM.xp_1_balls import xpparams
parser=argparse.ArgumentParser()
parser.add_argument('--id', help='setup id', type=str, default="1a")
args=parser.parse_args()
# -------------------------
# Load Setup
# -------------------------
setup = Setup(args.id)
solutions_filename = f"results/xp_setup{args.id}.npz"
try:
# Try to load existing results
load_results = np.load(solutions_filename, allow_pickle=True)
mat_seed = load_results["mat_seed"]
mat_pvopt = load_results["mat_pvopt"]
mat_dvopt = load_results["mat_dvopt"]
except FileNotFoundError:
print("No result found")
sys.exit(1)
# -------------------------
# Screening Xp
# -------------------------
# percentage of detected zero
n_rep = 5
mat_nb_zero_detected = np.full(
(3, xpparams.nb_point, setup.nb_sequence, n_rep),
np.nan,
dtype=int
)
poc_filename = f"results/poc{args.id}_screening.npz"
i_xp = 0
i_dic = 2
i_ratio = 1
ratio = .5
i_seq = 1
seq = "OSCAR-1"
# seq_r = 1+np.arange(setup.n, dtype=int)
seq_r = 1 + np.arange(setup.n, dtype=int) // 5
#1 + (1 + np.arange(setup.n, dtype=int)) // 2
# for rep in range(setup.n_rep):
for rep in range(n_rep):
np.random.seed(mat_seed[i_dic, i_seq, rep])
# ---- 1. Gen data ----
matA = generate_dic(
setup.list_dic[i_dic],
setup.m,
setup.n,
setup.normalize
)
vecy = np.random.randn(setup.m)
vecy /= np.linalg.norm(vecy)
# ---- 2. Compute parameters ----
vec_gammas = gamma_sequence_generator(
setup.m,
setup.n,
setup.list_sequence[i_seq][0],
setup.list_sequence[i_seq][1:]
)
lbd_max = get_lambda_max(vecy, matA, vec_gammas)
# ---- 3. XP ----
i_xp += 1
print(f"xp {i_xp}")
slopePb = SlopePb(matA, vecy, vec_gammas, ratio, lbdmax=lbd_max)
vecx_hat = mat_pvopt[i_dic, i_seq, i_ratio, rep, :]
vecu_hat = mat_dvopt[i_dic, i_seq, i_ratio, rep, :]
gap = slopePb.eval_gap(vecx_hat, vecu_hat)
Atu = matA.T @ vecu_hat
# ---- 3b. Build thin safe ball ----
rgap = np.sqrt(2 * gap)
# ---- 3c. Testing sphere ----
list_tests = [
GapSphereSingleTest(),
Kappa_test(vec_gammas, np.arange(setup.n, dtype=np.double)),
GAP_RQtest(vec_gammas, seq_r),
# GAP_Ptest(vec_gammas),
]
for i_offset, offset in enumerate(xpparams.vec_offsets):
for i_test, test in enumerate(list_tests):
out = test.apply_test(np.abs(Atu), gap, ratio * lbd_max, vec_gammas, offset_radius=offset)
mat_nb_zero_detected[i_test, i_offset, i_seq, rep] = np.sum(out)
f, ax = plt.subplots(1, 1)
for i_test in range(3):
ax.plot(np.mean(mat_nb_zero_detected[i_test, :, i_seq, :n_rep], axis=1))
plt.show()
# # Save
# np.savez(
# poc_filename,
# mat_nb_zero_detected=mat_nb_zero_detected,
# list_test = [test.get_name() for test in list_tests],
# version = __version__,
# allow_pickle=True
# )
\ No newline at end of file
# -*- coding: utf-8 -*-
import argparse
import numpy as np
import matplotlib.pyplot as plt
# Algorithm import
from src import __version__
from src.dictionaries import generate_dic
from src.utils import get_lambda_max, gamma_sequence_generator
# Screening
from src.screening.singletest import GapSphereSingleTest
from src.screening.gap_ptest import GAP_Ptest
from src.screening.gap_rqtest import GAP_RQtest
from src.screening.kappa_test import Kappa_test
# XP import
from xps.SIAM.slopepb import SlopePb
from xps.SIAM.setup import Setup
from xps.SIAM.xp_1_balls import xpparams
parser=argparse.ArgumentParser()
parser.add_argument('--id', help='setup id', type=str, default="1a")
args=parser.parse_args()
# -------------------------
# Load Setup
# -------------------------
setup = Setup(args.id)
solutions_filename = f"results/xp_setup{args.id}.npz"
try:
# Try to load existing results
load_results = np.load(solutions_filename, allow_pickle=True)
mat_seed = load_results["mat_seed"]
mat_pvopt = load_results["mat_pvopt"]
mat_dvopt = load_results["mat_dvopt"]
except FileNotFoundError:
print("No result found")
sys.exit(1)
# -------------------------
# Screening Xp
# -------------------------
# percentage of detected zero
mat_nb_zero_detected = np.full(
(4, xpparams.nb_point, setup.nb_sequence, setup.n_rep),
np.nan,
dtype=int
)
poc_filename = f"results/poc{args.id}_screening.npz"
i_xp = 0
nb_xp = setup.nb_dic * setup.n_rep * setup.nb_sequence * setup.nb_ratio_lbd
i_dic = 2
i_ratio = 1
ratio = .5
rep = 0
f, ax = plt.subplots(1, 3)
for i_seq, seq in enumerate(setup.list_sequence):
# for rep in range(setup.n_rep):
np.random.seed(mat_seed[i_dic, i_seq, rep])
# ---- 1. Gen data ----
matA = generate_dic(
setup.list_dic[i_dic],
setup.m,
setup.n,
setup.normalize
)
vecy = np.random.randn(setup.m)
vecy /= np.linalg.norm(vecy)
# ---- 2. Compute parameters ----
vec_gammas = gamma_sequence_generator(
setup.m,
setup.n,
setup.list_sequence[i_seq][0],
setup.list_sequence[i_seq][1:]
)
lbd_max = get_lambda_max(vecy, matA, vec_gammas)
# ---- 3. XP ----
i_xp += 1
print(f"xp {i_xp} / {nb_xp}")
slopePb = SlopePb(matA, vecy, vec_gammas, ratio, lbdmax=lbd_max)
vecx_hat = mat_pvopt[i_dic, i_seq, i_ratio, rep, :]
vecu_hat = mat_dvopt[i_dic, i_seq, i_ratio, rep, :]
gap = slopePb.eval_gap(vecx_hat, vecu_hat)
Atu = matA.T @ vecu_hat
# ---- 3b. Build thin safe ball ----
rgap = np.sqrt(2 * gap)
# ---- 3c. Testing sphere ----
list_tests = [
# --- Lasso like test --
GapSphereSingleTest(),
GAP_RQtest(vec_gammas, 1+np.arange(setup.n, dtype=int)),
# # --- ideal like test ---
Kappa_test(vec_gammas, np.arange(setup.n, dtype=np.double)),
GAP_RQtest(vec_gammas, np.ones(setup.n, dtype=int)),
# --- doing all tets ---
# GAP_Ptest(vec_gammas),
]
for i_offset, offset in enumerate(xpparams.vec_offsets):
for i_test, test in enumerate(list_tests):
out = test.apply_test(np.abs(Atu), gap, ratio * lbd_max, vec_gammas, offset_radius=offset)
mat_nb_zero_detected[i_test, i_offset, i_seq, rep] = np.sum(out)
ax[i_seq].plot(mat_nb_zero_detected[0, :, i_seq, rep], linewidth=4)
ax[i_seq].plot(mat_nb_zero_detected[1, :, i_seq, rep])
ax[i_seq].plot(mat_nb_zero_detected[2, :, i_seq, rep], linewidth=4)
ax[i_seq].plot(mat_nb_zero_detected[3, :, i_seq, rep])
plt.show()
\ No newline at end of file
# -*- coding: utf-8 -*-
import numpy as np
from xps.SIAM.setup import Setup
from xps.SIAM.xp_1_balls import xpparams
def process(setup, log=True):
# ---- load ----
solutions_filename = f"results/xp_setup{setup.setup_id}.npz"
solutions = np.load(solutions_filename, allow_pickle=True)
screenings_filename = f"results/xp_setup{setup.setup_id}_screening.npz"
screenings = np.load(screenings_filename, allow_pickle=True)
# mat_results_gap[1, i_dic, i_seq, rep, i_ratio]
mat_pvopt = solutions["mat_pvopt"]
mat_nb_zero_detected = screenings['mat_nb_zero_detected']
nb_test = mat_nb_zero_detected.shape[0]
list_tests = screenings["list_test"]
# ---- log ----
if log:
print("Experiment info")
# print(f"- run with version {solutions["version"]}")
print(f"- setup{setup.setup_id}")
print("")
# ---- processing ----
mat_pc_detected =np.zeros(
(nb_test, xpparams.nb_point, setup.nb_dic, setup.nb_sequence, setup.nb_ratio_lbd, setup.n_rep)
)
for i_dic in range(setup.nb_dic):
for i_seq in range(setup.nb_sequence):
for i_ratio in range(setup.nb_ratio_lbd):
for rep in range(setup.n_rep):
nb_0 = np.sum(mat_pvopt[i_dic, i_seq, i_ratio, rep, :] == 0)
if nb_0 > 0:
mat_detected = mat_nb_zero_detected[:, :, i_dic, i_seq, i_ratio, rep]
mat_pc_detected[:, :, i_dic, i_seq, i_ratio, rep] = mat_detected / float(nb_0)
# ---- return ----
return {
"mat_pc_detected": np.mean(mat_pc_detected, axis=5),
"list_tests": list_tests,
}
# {
# "results_mat_nb": results_mat_nb,
# "results_mat_nbz": results_mat_nbz,
# }
\ No newline at end of file
# -*- coding: utf-8 -*-
from decimal import Decimal
import argparse
import numpy as np
import matplotlib.pyplot as plt
# Slope import
from src.utils import gamma_sequence_generator
# XP import
from xps.SIAM.slopepb import SlopePb
from xps.SIAM.setup import Setup
from xps.SIAM.xp_1_balls import xpparams
from xps.SIAM.xp_1_balls.process_data import process
parser=argparse.ArgumentParser()
parser.add_argument('--noshow', help='do not display figures', action="store_true")
parser.add_argument('--save', help='save figure', action="store_true")
parser.add_argument('--id', help='setup id', type=str, default=1)
args=parser.parse_args()
# -------------------------
# Load Results
# -------------------------
setup = Setup(args.id)
dic_process = process(setup)
mat_pc_detected = dic_process["mat_pc_detected"]
list_tests = dic_process["list_tests"]
list_legends = ["$r_q=1 \\forall q$", "Testing all $q$", "$r_q=q \\forall q$"]
# -------------------------
# Plot Results
# -------------------------
for i_dic in range(setup.nb_dic):
if not np.any(mat_pc_detected[:, :, i_dic, :, :] > 0):
continue
f, ax = plt.subplots(setup.nb_sequence, setup.nb_ratio_lbd+1)
f.suptitle(f"{setup.list_dic[i_dic]}")
for i_seq, str_seq in enumerate(setup.list_sequence):
for i_lbd in range(len(setup.list_ratio_lbd)):
if i_seq ==0: