Commit 6495a3f8 authored by Jean-Romain Garnier's avatar Jean-Romain Garnier
Browse files

Added script measing the performance of optimization algorithms

parent 4cb45095
#define PY_SSIZE_T_CLEAN
#include <Python.h>
double sum(PyObject *P) {
/*
* Returns the sum of all elements in P
*/
int len = PyList_Size(P);
double total = 0;
for (int i = 0; i < len; i++) {
total += PyFloat_AsDouble(PyList_GetItem(P, i));
}
return total;
}
int should_continue(PyObject *P, int i, double Pmax, double step) {
/*
* Returns True if another value of power for i should be tested
*/
int len = PyList_Size(P);
double last = PyFloat_AsDouble(PyList_GetItem(P, len - 1));
double total = sum(P);
// Make sure we don't use more than the max allocated power
if (total + last + step > Pmax) {
return 0;
}
// Make sure the power will remain smaller than the one of the previous user
double Pi = PyFloat_AsDouble(PyList_GetItem(P, i));
if (i == len - 1) {
// This is the first-but-one user (the last user's power is not in P since their power can be deduced)
// Just don't take-up more than half of the total power
return Pi < (Pmax / 2 - step);
} else {
// Make sure that, after adding step, the power is still smaller than the one of the previous user
double PiplusOne = PyFloat_AsDouble(PyList_GetItem(P, i + 1));
return Pi < PiplusOne - step;
}
}
void rec(PyObject *P, PyObject *func, int i, double Pmax, double step, double *BERmin, PyObject **Pmin) {
/*
* Recursive method to compute all the BER values
*/
int len = PyList_Size(P);
if (i < 0 || i >= len) {
return;
}
PyObject *arglist;
PyObject *result;
while (should_continue(P, i, Pmax, step)) {
// Increment the power
double Pn = PyFloat_AsDouble(PyList_GetItem(P, i));
Pn += step;
PyList_SetItem(P, i, PyFloat_FromDouble(Pn));
// Temporarly add the last user's power to the list
PyList_Append(P, PyFloat_FromDouble(Pmax - sum(P)));
// Ask Python program to compute the new BER
arglist = Py_BuildValue("(O)", P);
result = PyObject_CallObject(func, arglist);
double newBER = PyFloat_AsDouble(result);
if (*BERmin < 0 || newBER < *BERmin) {
*BERmin = newBER;
*Pmin = PyList_GetSlice(P, 0, len + 1);
}
// Remove the last user's power from the list
// Note: this doesn't reduce the list's size, but it doesn't really matter in our case
Py_SIZE(P) -= 1;
// Recursively call ourselves with a copy of P
rec(PyList_GetSlice(P, 0, len), func, i - 1, Pmax, step, BERmin, Pmin);
}
}
static PyObject *optimize(PyObject *self, PyObject *args) {
PyObject *func;
const int N;
const double Pmax;
const double step;
if (!PyArg_ParseTuple(args, "Oidd", &func, &N, &Pmax, &step)) {
PyErr_SetString(PyExc_TypeError, "Expected 4 parameters: f, N, Pmax and step");
return NULL;
}
if (!PyCallable_Check(func)) {
PyErr_SetString(PyExc_TypeError, "f must be callable");
return NULL;
}
if (N < 2) {
PyErr_SetString(PyExc_TypeError, "N must be greater than 1");
return NULL;
}
if (Pmax <= 0) {
PyErr_SetString(PyExc_TypeError, "Pmax must be greater than 0");
return NULL;
}
if (step <= 0) {
PyErr_SetString(PyExc_TypeError, "step must be greater than 0");
return NULL;
}
// Add a reference to the passed function
Py_XINCREF(func);
// Init the list of powers with zeros
PyObject *P = PyList_New(N - 1);
for (int i = 0; i < N - 1; ++i) {
PyList_SetItem(P, i, PyFloat_FromDouble(0));
}
// Init the return values
double BERmin = -1;
PyObject *Pmin = PyList_New(N);
// Recursively find the min
rec(P, func, N - 2, Pmax, step, &BERmin, &Pmin);
if (BERmin < 0) {
PyErr_SetString(PyExc_TypeError, "Failed to optimize f");
return NULL;
}
// Return the best list we found
return Pmin;
}
static PyMethodDef OptimizeMethods[] = {
{"optimize", optimize, METH_VARARGS, "Find the minimum of the given method."},
{NULL, NULL, 0, NULL} /* Sentinel */
};
static struct PyModuleDef optimizemodule = {
PyModuleDef_HEAD_INIT,
"NOMA_Optimize",
NULL,
-1,
OptimizeMethods
};
PyMODINIT_FUNC PyInit_NOMA_Optimize(void) {
return PyModule_Create(&optimizemodule);
}
import Theory_N_receivers as NOMATh
import NOMA_Optimize
import numpy as np
import time
from scipy.optimize import minimize
from scipy.optimize import basinhopping
import matplotlib.pyplot as plt
"""
Minimize f(P)
Subject to:
∀ i ∈ ⟦1, N⟧, Pi ⩾ 0
∀ (i, j) ∈ ⟦1, N⟧², i ≤ j ⇒ Pi ≤ Pj
ΣPi ≤ Pmax
It can be written as:
min(f(P)) under the constraint that AX ⩾ B and ∀ i ∈ ⟦1, N⟧, Pi ⩾ 0
A = (-1, 1, 0, ..., 0, 0)
( 0, -1, 1, ..., 0, 0)
...
( 0, 0, 0, ..., -1, 1)
(-1, -1, -1, ..., -1, -1)
X = t(P1, P2, ..., PN)
B = t(0, 0, ..., 0, -Pmax)
https://docs.scipy.org/doc/scipy/reference/tutorial/optimize.html
https://stackoverflow.com/a/52003654/3624018
"""
N = 5
Pmax = 10000
g = [1, 0.8, 0.65, 0.5, 0.55]
sigma = [1, 2, 3, 4, 5]
def evaluate_min_c(N, Pmax, step=Pmax / 100):
"""
Find the min for any number of users using the C implementation
"""
Pmin = NOMA_Optimize.optimize(f, N, Pmax, step)
BERmin = f(Pmin)
return Pmin, BERmin
def evaluate_min_python(N, Pmax, step=Pmax / 100):
"""
Find the min for any number of users using the Python implementation
"""
global BERmin, Pmin
# Init default values
Pmin = [0] * (N - 1)
BERmin = f(Pmin + [Pmax])
def should_continue(P, i, step, Pmax):
"""
Returns True if another value of power for i should be tested
"""
# Make sure we don't use more than the max allocated power
if sum(P) + P[-1] + step > Pmax:
return False
# Make sure the power remains smaller than the one of the previous user
if i == len(P) - 1:
# This is the first-but-one user (the last user's power is not in P since their power can be deduced)
# Just don't take-up more than half of the total power
return P[i] <= Pmax / 2 - step
else:
# Make sure that, after adding step, the power is still smaller than the one of the previous user
return P[i] <= P[i + 1] - step
def rec(P, i, step, Pmax):
"""
Recursive method to compute all the BER values
"""
global BERmin, Pmin
# Make sure the user is valid
if i < 0:
return
while should_continue(P, i, step, Pmax):
# Increment the power
P[i] += step
# Compute the new BER
BER = f(P + [Pmax - sum(P)])
# Update the min BER if necessary
if BER < BERmin:
BERmin = BER
Pmin = P.copy() # Make sure to copy the list as it'll be modified in the future
# Continue for the next user
rec(P.copy(), i - 1, step, Pmax)
# Start the recursive method
rec(Pmin.copy(), N - 2, step, Pmax)
# Return the result
return Pmin + [Pmax - sum(Pmin)], BERmin
def global_min(x0, bounds, constraints=None, niter=100, step=Pmax / 10):
"""
Find the global min using the basinhopping method
"""
# Stepsize must be big enough so it doesn't get stuck in a local min
ret = basinhopping(f, x0=x0, niter=niter, stepsize=step, minimizer_kwargs={"bounds": bounds, "constraints": constraints})
Pmin = ret.x
BERmin = f(Pmin)
return Pmin, BERmin
def local_min(x0, bounds, constraints=None, method="SLSQP"):
"""
Find a local min using minimize
"""
if method == "COBYLA":
# Cobylla can't handle bounds (so power might go under 0)
ret = minimize(f, x0=x0, method=method, constraints=constraints)
else:
ret = minimize(f, x0=x0, bounds=bounds, method=method, constraints=constraints)
Pmin = ret.x
BERmin = f(Pmin)
return Pmin, BERmin
def f(P):
"""
Function to minimize
Input: List of powers (by order of increasing value)
Output: total BER (float)
"""
# Each user having a different g and sigma, the probability must be recomputed each time
probas = []
N = len(P)
for i in range(N):
# Compute the theoritical probability of error
proba = NOMATh.theory(g[i], sigma[i], P, N, nuser=i + 1)
# A probability of "None" means there's a problem with one parameter, so return +inf
if proba is None:
return np.inf
else:
probas.append(proba)
# Compute and return the total BER
return sum(probas)
def constraint(P):
"""
Function which checks that the constraints are being respected
Input: List of powers (by order of increasing value)
Output: Matrix in which all values must be >= 0
"""
R = np.matmul(A, P.T)
return R - B
def compare(rounds, N, Pmax, step, niter_basin):
"""
Time both methods under the given conditions, and with the given parameters
Return the list of time and BERs measured
"""
global A, B, g, sigma
# Creating matrix A (see comment at the top of the script)
A = np.zeros((N, N))
for i in range(N - 1):
A[i, i] = -1
A[i, i + 1] = 1
A[-1, :] = -np.ones((1, N))
# Creating matrix B (see comment at the top of the script)
B = np.zeros(N)
B[-1] = -Pmax
# Dict indicating that our function "constraints" should be used
cons = {"type": "ineq", "fun": constraint} # ineq: the constraint function result is to be non-negative
# Creating the bounds for all the values or power (they must be between 0 and +∞)
# Note: may not be useful since f(P) returns +∞ for negative power values
bounds = [(0, np.inf)] * N
# Save the resuts in np.ndarrays
times = np.zeros((rounds, 4))
bers = np.zeros((rounds, 4))
# Generates different scenarios
for i in range(rounds):
g = np.random.rand(N).tolist()
sigma = (np.random.rand(N) * 20).tolist()
# First method: Évaluating f(P) in multiple values, finding the min, and then starting from there to find the local min
t0 = time.time()
Pmin0, BERmin0 = evaluate_min_c(N, Pmax, step=step)
Pmin0, BERmin0 = local_min(Pmin0, bounds, constraints=cons)
time_d0 = time.time() - t0
times[i, 0] = time_d0
bers[i, 0] = BERmin0
# Second method: Same as the first, but implemented in Python
t1 = time.time()
Pmin1, BERmin1 = evaluate_min_python(N, Pmax, step=step)
Pmin1, BERmin1 = local_min(Pmin1, bounds, constraints=cons)
time_d1 = time.time() - t1
times[i, 1] = time_d1
bers[i, 1] = BERmin1
# Third method: Finding global min with the basinhopping method
x0 = np.zeros(N)
t2 = time.time()
Pmin2, BERmin2 = global_min(x0, bounds, constraints=cons, niter=niter_basin, step=step)
time_d2 = time.time() - t2
times[i, 2] = time_d2
bers[i, 2] = BERmin2
# fourth method: Plain gradient descent
t3 = time.time()
Pmin3 = [Pmax / (N - n + 1) for n in range(N)] # Arbitrary starting point, usually not too far from the optimum
Pmin3, BERmin3 = local_min(Pmin3, bounds, constraints=cons)
time_d3 = time.time() - t3
times[i, 3] = time_d3
bers[i, 3] = BERmin3
return times, bers
def plot_results(times, bers):
"""
Plot a visual representation of both methods times and average BER
"""
rounds = len(times)
averageTimes = np.average(times, axis=0)
averageBers = np.average(bers, axis=0)
x = np.arange(rounds)
ax1 = plt.axes()
ax2 = ax1.twinx()
# BERs
ax2.set_xlabel("Round")
ax2.set_ylabel("Average min BER found")
ax2.plot(x, [averageBers[0]] * rounds, c="tab:green", markersize=0, linestyle="dashed", label="Average BER (custom)")
ax2.plot(x, [averageBers[2]] * rounds, c="tab:purple", markersize=0, linestyle="dashed", label="Average BER (Basin Hopper)")
# Times
ax1.set_xlabel("Round")
ax1.set_ylabel("Time (s)")
ax1.plot(x, times[:, 0], c="tab:blue", marker="x", linestyle="none", label="Time (custom)")
ax1.plot(x, [averageTimes[0]] * rounds, c="tab:blue", markersize=0, linestyle="dashed")
ax1.plot(x, times[:, 2], c="tab:orange", marker="x", linestyle="none", label="Time (Basin Hopper)")
ax1.plot(x, [averageTimes[2]] * rounds, c="tab:orange", markersize=0, linestyle="dashed")
ax1.legend(loc="upper left")
ax2.legend(loc="upper right")
ax1.set_ylim(bottom=0)
ax2.set_ylim(bottom=0)
plt.show(block=True)
if __name__ == "__main__":
"""
Statistically measures the speed difference between our solver and
the Basin Hoppin optimization algorithm.
"""
rounds = [100, 75, 50]
N = [2, 3, 4]
Pmax = 10000
step = [Pmax / 10, Pmax / 25, Pmax / 50, Pmax / 100]
Niter = 20
results = {}
# Make both each method's parameters and the conditions vary
for i in range(len(N)):
r = rounds[i]
n = N[i]
print("N={}".format(n))
for s in step:
print(" step={}".format(s))
times, bers = compare(r, n, Pmax, s, Niter)
# plot_results(times, bers)
averageTimes = np.average(times, axis=0)
averageBers = np.average(bers, axis=0)
results["N={},rounds={},step={}".format(n, r, s)] = {
"times": list(averageTimes),
"bers": list(averageBers)
}
from distutils.core import setup, Extension
setup(name="NOMA_Optimize", version="1.0", ext_modules=[Extension("NOMA_Optimize", ["NOMA_Optimize.c"])])
# On the Performance of QPSK Modulation over Dowlink NOMA: From error probability derivation to SDR-based validation # On the Performance of QPSK Modulation over Downlink NOMA: From error probability derivation to SDR-based validation
This repository is associated to the paper available at [URL]. This repository is associated to the paper available at [URL].
...@@ -178,7 +178,7 @@ Contains several functions to compare the theoretical BER with a statistical BER ...@@ -178,7 +178,7 @@ Contains several functions to compare the theoretical BER with a statistical BER
##### Find\_Optimum.py ##### Find\_Optimum.py
Contains the algorithm for searching for the optimum power distribution. A custom implementation is compared to an existing solution. It is based on scipy and on the file `Theory_N_receivers.py`. Contains the algorithm for searching for the optimum power distribution. A custom implementation is compared to an existing solution. It is based on scipy and uses the file `Theorie_N_receptors.py`.
Its behavior is detailed in our report, and explanations on the matrices used in the optimization are given at the beginning of the file. Its behavior is detailed in our report, and explanations on the matrices used in the optimization are given at the beginning of the file.
...@@ -186,23 +186,24 @@ Its behavior is detailed in our report, and explanations on the matrices used in ...@@ -186,23 +186,24 @@ Its behavior is detailed in our report, and explanations on the matrices used in
Contains functions to calculate an error rate by following the ideal channel model with the Monte-Carlo method. Contains functions to calculate an error rate by following the ideal channel model with the Monte-Carlo method.
Usage examples are given in `Comparison_Model_Theory.py` and at the end of the file. Usage examples are given in `Comparison_Model_Theorie.py` and at the end of the file.
##### Theory\_N\_receivers.py ##### NOMA\_Optimize.c
Contains functions to calculate the theoretical error rate using the formula developed in the report (and the pseudo code given therein). Contains an implementation of the proposed algorithm to find the optimum power distributions for N users. This file can be compiled using `python3 setup.py --install`, and then used as a standard module (see `Time_Optimum.py` for an example).
Usage examples are given in `Comparison_Model_Theory.py` and at the end of the file. ##### Theory\_N\_receivers.py
##### Theory\_N\_receivers\_auto\_develop.py Contains functions to calculate the theoretical error rate using the formula developed in the report (and the pseudo code given therein).
Contains functions to compute automatically the theoretical formula for a given user, regardless of the number of users. The output is formated as a LaTeX source. Usage examples are given in `Comparaison_Modele_Theorie.py` and at the end of the file.
A usage example is given at the end of the file. ##### Time\_Optimum.py
Contains the algorithm used to compare our proposed optimization method with the <a href="https://docs.scipy.org/doc/scipy/reference/generated/scipy.optimize.basinhopping.html">Basin-Hopping implementation in Scipy</a>. It imports the contents of `NOMA_Optimize.c` as a module, which requires the command `python3 setup.py --install` to be run beforehand.
## Appendixes ## Appendixes
### OFDMA ### OFDMA
We tried to reproduce the flowgraph of the following publication, but without success : <a name="https://www.researchgate.net/publication/322924686_OFDMA_Simulations_on_GNU_Radio">https://www.researchgate.net/publication/322924686_OFDMA_Simulations_on_GNU_Radio</a> An implementation of OFDMA could be interesting, as seen in the following publication: <a href="https://www.researchgate.net/publication/322924686_OFDMA_Simulations_on_GNU_Radio">https://www.researchgate.net/publication/322924686_OFDMA_Simulations_on_GNU_Radio</a>. We were however unable to reproduce the flowgraph describe.
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment