mirror of
https://github.com/aicodix/code.git
synced 2026-04-27 22:35:44 +00:00
added ordered statistics decoding
This commit is contained in:
parent
2944a44b28
commit
34a21139c7
4 changed files with 397 additions and 0 deletions
|
|
@ -84,6 +84,12 @@ Encoder for [augmented Hadamard codes](https://en.wikipedia.org/wiki/Hadamard_co
|
|||
|
||||
[SIMD](https://en.wikipedia.org/wiki/SIMD) intra-frame accelerated [Low-density parity-check](https://en.wikipedia.org/wiki/Low-density_parity-check_code) layered decoder.
|
||||
|
||||
### [osd.hh](osd.hh)
|
||||
|
||||
Ordered statistics decoding allows the practical [soft-decision decoding](https://en.wikipedia.org/wiki/Soft-decision_decoder) of short to medium sized [linear codes](https://en.wikipedia.org/wiki/Linear_code).
|
||||
Below are the BER plots of the BCH(127, 64) T=10 code, using the OSD [Soft-decision decoder](https://en.wikipedia.org/wiki/Soft-decision_decoder) and the [Reed–Solomon error correction](https://en.wikipedia.org/wiki/Reed%E2%80%93Solomon_error_correction) BCH decoder with erasures.
|
||||

|
||||
|
||||
### [exclusive_reduce.hh](exclusive_reduce.hh)
|
||||
|
||||
Reduce N times while excluding ith input element
|
||||
|
|
|
|||
236
osd.hh
Normal file
236
osd.hh
Normal file
|
|
@ -0,0 +1,236 @@
|
|||
/*
|
||||
Ordered statistics decoding
|
||||
|
||||
Copyright 2020 Ahmet Inan <inan@aicodix.de>
|
||||
*/
|
||||
|
||||
#pragma once
|
||||
|
||||
#include <initializer_list>
|
||||
#include "bitman.hh"
|
||||
|
||||
namespace CODE {
|
||||
|
||||
template <int N, int K>
|
||||
class LinearEncoder
|
||||
{
|
||||
public:
|
||||
void operator()(uint8_t *codeword, const uint8_t *message, const int8_t *genmat)
|
||||
{
|
||||
for (int i = 0; i < N; ++i)
|
||||
set_be_bit(codeword, i, get_be_bit(message, 0) & genmat[i]);
|
||||
for (int j = 1; j < K; ++j)
|
||||
if (get_be_bit(message, j))
|
||||
for (int i = 0; i < N; ++i)
|
||||
xor_be_bit(codeword, i, genmat[N*j+i]);
|
||||
}
|
||||
};
|
||||
|
||||
template <int N, int K>
|
||||
class BoseChaudhuriHocquenghemGenerator
|
||||
{
|
||||
static const int NP = N - K;
|
||||
public:
|
||||
static void poly(int8_t *genpoly, std::initializer_list<int> minimal_polynomials)
|
||||
{
|
||||
// $genpoly(x) = \prod_i(minpoly_i(x))$
|
||||
int genpoly_degree = 1;
|
||||
for (int i = 0; i < NP; ++i)
|
||||
genpoly[i] = 0;
|
||||
genpoly[NP] = 1;
|
||||
for (auto m: minimal_polynomials) {
|
||||
assert(0 < m);
|
||||
assert(m & 1);
|
||||
int m_degree = 0;
|
||||
while (m>>m_degree)
|
||||
++m_degree;
|
||||
--m_degree;
|
||||
assert(genpoly_degree + m_degree <= NP + 1);
|
||||
for (int i = genpoly_degree; i >= 0; --i) {
|
||||
if (!genpoly[NP-i])
|
||||
continue;
|
||||
genpoly[NP-i] = m&1;
|
||||
for (int j = 1; j <= m_degree; ++j)
|
||||
genpoly[NP-(i+j)] ^= (m>>j)&1;
|
||||
}
|
||||
genpoly_degree += m_degree;
|
||||
}
|
||||
assert(genpoly_degree == NP + 1);
|
||||
assert(genpoly[0]);
|
||||
assert(genpoly[NP]);
|
||||
if (0) {
|
||||
std::cerr << "generator polynomial =" << std::endl;
|
||||
for (int i = 0; i <= NP; ++i)
|
||||
std::cerr << (int)genpoly[i];
|
||||
std::cerr << std::endl;
|
||||
}
|
||||
}
|
||||
static void matrix(int8_t *genmat, bool systematic, std::initializer_list<int> minimal_polynomials)
|
||||
{
|
||||
poly(genmat, minimal_polynomials);
|
||||
for (int i = NP+1; i < N; ++i)
|
||||
genmat[i] = 0;
|
||||
for (int j = 1; j < K; ++j) {
|
||||
for (int i = 0; i < j; ++i)
|
||||
genmat[N*j+i] = 0;
|
||||
for (int i = 0; i <= NP; ++i)
|
||||
genmat[(N+1)*j+i] = genmat[i];
|
||||
for (int i = j+NP+1; i < N; ++i)
|
||||
genmat[N*j+i] = 0;
|
||||
}
|
||||
if (systematic)
|
||||
for (int k = K-1; k; --k)
|
||||
for (int j = 0; j < k; ++j)
|
||||
if (genmat[N*j+k])
|
||||
for (int i = k; i < N; ++i)
|
||||
genmat[N*j+i] ^= genmat[N*k+i];
|
||||
if (0) {
|
||||
std::cerr << "generator matrix =" << std::endl;
|
||||
for (int j = 0; j < K; ++j) {
|
||||
for (int i = 0; i < N; ++i)
|
||||
std::cerr << (int)genmat[N*j+i];
|
||||
std::cerr << std::endl;
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
template <int N, int K, int O>
|
||||
class OrderedStatisticsDecoder
|
||||
{
|
||||
int8_t G[N*K];
|
||||
int8_t codeword[N], candidate[N];
|
||||
int8_t softperm[N];
|
||||
int16_t perm[N];
|
||||
void row_echelon()
|
||||
{
|
||||
for (int k = 0; k < K; ++k) {
|
||||
// find pivot in this column
|
||||
for (int j = k; j < K; ++j) {
|
||||
if (G[N*j+k]) {
|
||||
for (int i = k; j != k && i < N; ++i)
|
||||
std::swap(G[N*j+i], G[N*k+i]);
|
||||
break;
|
||||
}
|
||||
}
|
||||
// keep searching for suitable column for pivot
|
||||
// beware: this will use columns >= K if necessary.
|
||||
for (int j = k + 1; !G[N*k+k] && j < N; ++j) {
|
||||
for (int h = k; h < K; ++h) {
|
||||
if (G[N*h+j]) {
|
||||
// account column swap
|
||||
std::swap(perm[k], perm[j]);
|
||||
for (int i = 0; i < K; ++i)
|
||||
std::swap(G[N*i+k], G[N*i+j]);
|
||||
for (int i = k; h != k && i < N; ++i)
|
||||
std::swap(G[N*h+i], G[N*k+i]);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
assert(G[N*k+k]);
|
||||
// zero out column entries below pivot
|
||||
for (int j = k + 1; j < K; ++j)
|
||||
if (G[N*j+k])
|
||||
for (int i = k; i < N; ++i)
|
||||
G[N*j+i] ^= G[N*k+i];
|
||||
}
|
||||
}
|
||||
void systematic()
|
||||
{
|
||||
for (int k = K-1; k; --k)
|
||||
for (int j = 0; j < k; ++j)
|
||||
if (G[N*j+k])
|
||||
for (int i = k; i < N; ++i)
|
||||
G[N*j+i] ^= G[N*k+i];
|
||||
}
|
||||
void encode()
|
||||
{
|
||||
for (int i = K; i < N; ++i)
|
||||
codeword[i] = codeword[0] & G[i];
|
||||
for (int j = 1; j < K; ++j)
|
||||
for (int i = K; i < N; ++i)
|
||||
codeword[i] ^= codeword[j] & G[N*j+i];
|
||||
}
|
||||
void flip(int j)
|
||||
{
|
||||
codeword[j] ^= 1;
|
||||
for (int i = K; i < N; ++i)
|
||||
codeword[i] ^= G[N*j+i];
|
||||
}
|
||||
static int metric(const int8_t *hard, const int8_t *soft)
|
||||
{
|
||||
int sum = 0;
|
||||
for (int i = 0; i < N; ++i)
|
||||
sum += (1 - 2 * hard[i]) * soft[i];
|
||||
return sum;
|
||||
}
|
||||
public:
|
||||
bool operator()(uint8_t *hard, const int8_t *soft, const int8_t *genmat)
|
||||
{
|
||||
for (int i = 0; i < N; ++i)
|
||||
perm[i] = i;
|
||||
std::sort(perm, perm+N, [soft](int a, int b){ return std::abs(soft[a]) > std::abs(soft[b]); });
|
||||
for (int j = 0; j < K; ++j)
|
||||
for (int i = 0; i < N; ++i)
|
||||
G[N*j+i] = genmat[N*j+perm[i]];
|
||||
row_echelon();
|
||||
systematic();
|
||||
for (int i = 0; i < N; ++i)
|
||||
softperm[i] = soft[perm[i]];
|
||||
for (int i = 0; i < K; ++i)
|
||||
codeword[i] = softperm[i] < 0;
|
||||
encode();
|
||||
for (int i = 0; i < N; ++i)
|
||||
candidate[i] = codeword[i];
|
||||
int best = metric(codeword, softperm);
|
||||
int next = -1;
|
||||
auto update = [this, &best, &next]() {
|
||||
int met = metric(codeword, softperm);
|
||||
if (met > best) {
|
||||
next = best;
|
||||
best = met;
|
||||
for (int i = 0; i < N; ++i)
|
||||
candidate[i] = codeword[i];
|
||||
} else if (met > next) {
|
||||
next = met;
|
||||
}
|
||||
};
|
||||
for (int a = 0; O >= 1 && a < K; ++a) {
|
||||
flip(a);
|
||||
update();
|
||||
for (int b = a + 1; O >= 2 && b < K; ++b) {
|
||||
flip(b);
|
||||
update();
|
||||
for (int c = b + 1; O >= 3 && c < K; ++c) {
|
||||
flip(c);
|
||||
update();
|
||||
for (int d = c + 1; O >= 4 && d < K; ++d) {
|
||||
flip(d);
|
||||
update();
|
||||
for (int e = d + 1; O >= 5 && e < K; ++e) {
|
||||
flip(e);
|
||||
update();
|
||||
for (int f = e + 1; O >= 6 && f < K; ++f) {
|
||||
flip(f);
|
||||
update();
|
||||
flip(f);
|
||||
}
|
||||
flip(e);
|
||||
}
|
||||
flip(d);
|
||||
}
|
||||
flip(c);
|
||||
}
|
||||
flip(b);
|
||||
}
|
||||
flip(a);
|
||||
}
|
||||
for (int i = 0; i < N; ++i)
|
||||
set_be_bit(hard, perm[i], candidate[i]);
|
||||
return best != next;
|
||||
}
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
BIN
osd_bch.png
Normal file
BIN
osd_bch.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 6.9 KiB |
155
tests/osd_regression_test.cc
Normal file
155
tests/osd_regression_test.cc
Normal file
|
|
@ -0,0 +1,155 @@
|
|||
/*
|
||||
Regression Test for ordered statistics decoding
|
||||
|
||||
Copyright 2020 Ahmet Inan <inan@aicodix.de>
|
||||
*/
|
||||
|
||||
#include <random>
|
||||
#include <cassert>
|
||||
#include <iostream>
|
||||
#include <algorithm>
|
||||
#include <functional>
|
||||
#include "osd.hh"
|
||||
#include "galois_field.hh"
|
||||
#include "bose_chaudhuri_hocquenghem_encoder.hh"
|
||||
#include "bose_chaudhuri_hocquenghem_decoder.hh"
|
||||
|
||||
int main()
|
||||
{
|
||||
const int O = 2;
|
||||
const int N = 127;
|
||||
const int K = 64;
|
||||
const int NR = 20;
|
||||
const int loops = 1000;
|
||||
const double low_SNR = -5;
|
||||
const double high_SNR = 5;
|
||||
const double QEF_SNR = 1.5;
|
||||
typedef CODE::GaloisField<7, 0b10001001, uint8_t> GF;
|
||||
std::initializer_list<int> minpols {
|
||||
0b10001001, 0b10001111, 0b10011101,
|
||||
0b11110111, 0b10111111, 0b11010101,
|
||||
0b10000011, 0b11101111, 0b11001011,
|
||||
};
|
||||
|
||||
GF instance;
|
||||
const int NW = (N+7)/8;
|
||||
const int KW = (K+7)/8;
|
||||
const int PW = (N-K+7)/8;
|
||||
int8_t genmat[N*K];
|
||||
CODE::BoseChaudhuriHocquenghemGenerator<N, K>::matrix(genmat, true, minpols);
|
||||
CODE::BoseChaudhuriHocquenghemEncoder<N, K> bchenc(minpols);
|
||||
CODE::LinearEncoder<N, K> linenc;
|
||||
CODE::OrderedStatisticsDecoder<N, K, O> osddec;
|
||||
CODE::BoseChaudhuriHocquenghemDecoder<NR, 1, K, GF> bchdec;
|
||||
GF::value_type erasures[NR];
|
||||
uint8_t message[KW], parity[PW], decoded[NW], codeword[NW];
|
||||
int8_t orig[N], noisy[N];
|
||||
|
||||
std::random_device rd;
|
||||
typedef std::default_random_engine generator;
|
||||
typedef std::uniform_int_distribution<int> distribution;
|
||||
auto data = std::bind(distribution(0, 255), generator(rd()));
|
||||
|
||||
double symb[N];
|
||||
double min_SNR = high_SNR;
|
||||
for (double SNR = low_SNR; SNR <= high_SNR; SNR += 0.1) {
|
||||
//double mean_signal = 0;
|
||||
double sigma_signal = 1;
|
||||
double mean_noise = 0;
|
||||
double sigma_noise = std::sqrt(sigma_signal * sigma_signal / (2 * std::pow(10, SNR / 10)));
|
||||
|
||||
typedef std::normal_distribution<double> normal;
|
||||
auto awgn = std::bind(normal(mean_noise, sigma_noise), generator(rd()));
|
||||
|
||||
int awgn_errors = 0;
|
||||
int quantization_erasures = 0;
|
||||
int uncorrected_errors = 0;
|
||||
int ambiguity_erasures = 0;
|
||||
int bchdec_errors = 0;
|
||||
for (int l = 0; l < loops; ++l) {
|
||||
for (int i = 0; i < KW; ++i)
|
||||
message[i] = data();
|
||||
|
||||
linenc(codeword, message, genmat);
|
||||
|
||||
for (int i = 0; i < K; ++i)
|
||||
assert(CODE::get_be_bit(codeword, i) == CODE::get_be_bit(message, i));
|
||||
|
||||
bchenc(message, parity);
|
||||
|
||||
for (int i = K; i < N; ++i)
|
||||
assert(CODE::get_be_bit(codeword, i) == CODE::get_be_bit(parity, i-K));
|
||||
|
||||
for (int i = 0; i < N; ++i)
|
||||
orig[i] = 1 - 2 * CODE::get_be_bit(codeword, i);
|
||||
|
||||
for (int i = 0; i < N; ++i)
|
||||
symb[i] = orig[i];
|
||||
|
||||
for (int i = 0; i < N; ++i)
|
||||
symb[i] += awgn();
|
||||
|
||||
// $LLR=log(\frac{p(x=+1|y)}{p(x=-1|y)})$
|
||||
// $p(x|\mu,\sigma)=\frac{1}{\sqrt{2\pi}\sigma}}e^{-\frac{(x-\mu)^2}{2\sigma^2}}$
|
||||
double DIST = 2; // BPSK
|
||||
double fact = DIST / (sigma_noise * sigma_noise);
|
||||
for (int i = 0; i < N; ++i)
|
||||
noisy[i] = std::min<double>(std::max<double>(std::nearbyint(fact * symb[i]), -127), 127);
|
||||
|
||||
bool unique = osddec(decoded, noisy, genmat);
|
||||
|
||||
for (int i = 0; i < N; ++i)
|
||||
awgn_errors += noisy[i] * orig[i] < 0;
|
||||
for (int i = 0; i < N; ++i)
|
||||
quantization_erasures += !noisy[i];
|
||||
for (int i = 0; i < N; ++i)
|
||||
uncorrected_errors += CODE::get_be_bit(decoded, i) != CODE::get_be_bit(codeword, i);
|
||||
if (!unique)
|
||||
ambiguity_erasures += N;
|
||||
|
||||
for (int i = 0; i < K; ++i)
|
||||
CODE::set_be_bit(message, i, noisy[i] < 0);
|
||||
for (int i = K; i < N; ++i)
|
||||
CODE::set_be_bit(parity, i-K, noisy[i] < 0);
|
||||
int erasures_count = 0;
|
||||
for (int i = 0; erasures_count < NR && i < N; ++i)
|
||||
if (!noisy[i])
|
||||
erasures[erasures_count++] = i;
|
||||
|
||||
bchdec(message, parity, erasures, erasures_count);
|
||||
|
||||
for (int i = 0; i < K; ++i)
|
||||
bchdec_errors += CODE::get_be_bit(message, i) != CODE::get_be_bit(codeword, i);
|
||||
for (int i = K; i < N; ++i)
|
||||
bchdec_errors += CODE::get_be_bit(parity, i-K) != CODE::get_be_bit(codeword, i);
|
||||
}
|
||||
|
||||
double bit_error_rate = (double)uncorrected_errors / (double)(N * loops);
|
||||
if (!uncorrected_errors && !ambiguity_erasures)
|
||||
min_SNR = std::min(min_SNR, SNR);
|
||||
|
||||
int MOD_BITS = 1; // BPSK
|
||||
double code_rate = (double)K / (double)N;
|
||||
double spectral_efficiency = code_rate * MOD_BITS;
|
||||
double EbN0 = 10 * std::log10(sigma_signal * sigma_signal / (spectral_efficiency * 2 * sigma_noise * sigma_noise));
|
||||
|
||||
double bchdec_ber = (double)bchdec_errors / (double)(N * loops);
|
||||
|
||||
if (0) {
|
||||
std::cerr << SNR << " Es/N0 => AWGN with standard deviation of " << sigma_noise << " and mean " << mean_noise << std::endl;
|
||||
std::cerr << EbN0 << " Eb/N0, using spectral efficiency of " << spectral_efficiency << " from " << code_rate << " code rate and " << MOD_BITS << " bits per symbol." << std::endl;
|
||||
std::cerr << awgn_errors << " errors caused by AWGN." << std::endl;
|
||||
std::cerr << quantization_erasures << " erasures caused by quantization." << std::endl;
|
||||
std::cerr << uncorrected_errors << " errors uncorrected." << std::endl;
|
||||
std::cerr << ambiguity_erasures << " ambiguity erasures." << std::endl;
|
||||
std::cerr << bit_error_rate << " bit error rate." << std::endl;
|
||||
std::cerr << bchdec_ber << " BCH decoder bit error rate." << std::endl;
|
||||
} else {
|
||||
std::cout << SNR << " " << bit_error_rate << " " << bchdec_ber << " " << EbN0 << std::endl;
|
||||
}
|
||||
}
|
||||
std::cerr << "QEF at: " << min_SNR << " SNR" << std::endl;
|
||||
assert(min_SNR < QEF_SNR);
|
||||
std::cerr << "Ordered statistics decoding regression test passed!" << std::endl;
|
||||
return 0;
|
||||
}
|
||||
Loading…
Add table
Add a link
Reference in a new issue