Commit bef87df4 authored by Ashkan Taslimi's avatar Ashkan Taslimi

Backup.

parent 9440a0f1
Pipeline #3895 failed with stages
in 2 seconds
import prrt
import math
import numpy as np
import restricted_integer_composition as ric
import prrt_utils
class HECSearch:
def __init__(self, searchType, n_p_min, n_p_max, prrtApplicationParameters, prrtChannelParameters,
prrtSystemParameters):
self.n_max = 255
self.step_size = 1 # Step size in the estimation of the optimum code word length
self.n_p_min = n_p_min
self.n_p_max = n_p_max
self.prrtApplicationParameters = prrtApplicationParameters
self.prrtChannelParameters = prrtChannelParameters
self.prrtSystemParameters = prrtSystemParameters
self.searchType = searchType
pass
def search(self):
ri_opt = math.inf
k_opt = 0
n_opt = 0
n_p_opt = []
n_c_cap = 20
# Eq.5.9, page 125
fec_delay_min = self.prrtSystemParameters.source_packet_interval + \
self.n_p_max * self.prrtSystemParameters.redundancy_packet_transmission_delay + \
(self.prrtChannelParameters.rtt_prop_fwd + self.prrtSystemParameters.processing_delay) / 2 + \
self.prrtSystemParameters.packet_loss_detection_delay
# Eq.5.9 page 125 assumung D_sup = 0
req_delay = self.prrtChannelParameters.rtt_prop_fwd + \
self.n_p_max * self.prrtSystemParameters.redundancy_packet_transmission_delay + \
self.prrtSystemParameters.processing_delay
# Eq.5.10, page 125
n_c_max = math.ceil((self.prrtApplicationParameters.max_latency - fec_delay_min) / req_delay)
if n_c_max > n_c_cap:
n_c_max = n_c_cap
# self.k_lim = somewhat(prrtApplicationParameters.loss_tolerance, self.n_max)
if self.searchType == "FullSearch":
for n_c in range(n_c_max + 1):
# Eq.5.11, k(Nc, D_T), page 125
k_max = min(self.get_k(n_c, req_delay), self.get_k_lim(1, self.n_max))
for k in range(1, k_max + 1):
n = self.estimate_n_for_k(k)
repair_schedules = ric.gen_repair_schedules(n - k, n_c, self.n_p_min, self.n_p_max, False)
for repair_schedule in repair_schedules:
coding_conf = prrt.PrrtCodingConfiguration(n, k, repair_schedule, self.prrtApplicationParameters, self.prrtChannelParameters, self.prrtSystemParameters)
ri = coding_conf.get_redundant_information()
if ri < ri_opt:
k_opt = k
n_opt = n
n_p_opt = repair_schedule
return prrt.PrrtCodingConfiguration(n_opt, k_opt, n_p_opt)
if self.searchType == "GreedySearch":
for n_c in range(1, n_c_max + 1):
# print("n_c= " + str(n_c))
# Eq.5.11, k(Nc, D_T), page 125
k_max = min(self.get_k(n_c, req_delay), self.get_k_lim(1, self.n_max))
for k in [1, k_max]:
n = self.estimate_n_for_k(k)
redundancy = n - k
if redundancy < n_c:
continue
repair_schedule = ric.gen_repair_schedule(redundancy, n_c, self.n_p_min, self.n_p_max)
coding_conf = prrt.PrrtCodingConfiguration(n, k, repair_schedule, self.prrtApplicationParameters,
self.prrtChannelParameters, self.prrtSystemParameters)
ri = coding_conf.get_redundant_information()
if ri < ri_opt:
ri_opt = ri
k_opt = k
n_opt = n
n_p_opt = repair_schedule
return prrt.PrrtCodingConfiguration(n_opt, k_opt, n_p_opt)
def get_k(self, n_c, req_delay):
return math.ceil((self.prrtApplicationParameters.max_latency -
self.n_p_max * self.prrtSystemParameters.redundancy_packet_transmission_delay -
(self.prrtChannelParameters.rtt_prop_fwd + self.prrtSystemParameters.processing_delay) / 2 -
self.prrtSystemParameters.packet_loss_detection_delay -
n_c * req_delay) / self.prrtSystemParameters.source_packet_interval)
def get_k_lim(self, start, end):
mid_point = math.ceil((end + start) / 2)
p_r = np.around(self.residual_packet_erasure_rate(mid_point, self.n_max, self.prrtChannelParameters.loss_rate_fwd),
8) # Pr(k, n_max)
if p_r == self.prrtApplicationParameters.max_residual_loss_rate:
return int(mid_point)
elif p_r > self.prrtApplicationParameters.max_residual_loss_rate:
return self.get_k_lim(start, mid_point - 1)
else:
return self.get_k_lim(mid_point + 1, end)
def estimate_n_for_k(self, k):
n = k + 1
while self.residual_packet_erasure_rate(k, n, self.prrtChannelParameters.loss_rate_fwd) > self.prrtApplicationParameters.max_residual_loss_rate \
and n <= self.n_max - self.step_size:
n = n + self.step_size
return n
def residual_packet_erasure_rate(self, k, n, ch_loss_rate):
# codingConf = prrt.PrrtCodingConfiguration(1,1,[0])
total_packet_erasure = 0
for i in range(1, k + 1):
for j in range(max(n - k + 1, i), n - k + i + 1):
packet_erasure_at_i = i * prrt_utils.hypergeometric_distribution(n, k, i, j) \
* prrt_utils.get_error_prob(j, n, ch_loss_rate)
total_packet_erasure += packet_erasure_at_i
residual_packet_erasure_rate = (1 / k) * total_packet_erasure # Pr(k, n)
return residual_packet_erasure_rate
......@@ -10,8 +10,7 @@ import datetime
import socket
import ipaddress
import numpy as np
import itertools
from scipy.special import comb
import prrt_utils
include "sockets.pxd"
......@@ -216,12 +215,22 @@ class PrrtSystemParameters:
self.source_packet_interval)
class PrrtCodingConfiguration:
def __init__(self, n, k, n_p=None):
def __init__(self,
n,
k,
n_p=None,
prrtApplicationParameters=None,
prrtChannelParameters = None,
prrtSystemParameters = None):
if n < k:
raise ValueError("n must be greater or equal k.")
self.n = n
self.k = k
self.r = n - k
self.prrtApplicationParameters = prrtApplicationParameters
self.prrtChannelParameters = prrtChannelParameters
self.prrtSystemParameters = prrtSystemParameters
if self.r != 0 and n_p is None:
raise ValueError("n_p cannot be None if (n-k) != 0.")
......@@ -244,17 +253,15 @@ class PrrtCodingConfiguration:
def sum_error_prob_over_sent_packets(self, sent_packets_item, p_e):
error_prob = 0
for i in range(sent_packets_item + 1, sent_packets_item + self.k + 1):
error_prob += self.get_error_prob(i, sent_packets_item + self.k, p_e)
error_prob += prrt_utils.get_error_prob(i, sent_packets_item + self.k, p_e)
return error_prob
# TODO: Maybe running in parallel to optimize runtime
def get_error_prob(self, i, sent_packets_item, p_e):
return comb(sent_packets_item, i) * (p_e ** i) * ((1 - p_e) ** (sent_packets_item - i))
def get_redundant_information(self, prrtChannelParameters):
p_e = prrtChannelParameters.loss_rate_fwd
def get_redundant_information(self):
p_e = self.prrtChannelParameters.loss_rate_fwd
sent_redundant_packets = np.cumsum(self.n_p) # Cumulative sum of all redundant packets up to each cycle
error_prob_up_to_every_cycle = list(map(lambda x : self.sum_error_prob_over_sent_packets(x, p_e), sent_redundant_packets[1:]))
error_prob_up_to_every_cycle = list(
map(lambda x: self.sum_error_prob_over_sent_packets(x, p_e), sent_redundant_packets[1:]))
ri = 1 / self.k * self.n_p[0] + 1 / self.k * np.dot(error_prob_up_to_every_cycle, self.n_p[1:])
return ri
......@@ -272,22 +279,21 @@ class PrrtCodingConfiguration:
return 0
# TODO
# Check if it is able to fulfill application parameters given channel parameters.
def hypergeometric_distribution(self, n, k, i, j):
return (comb(k, i) * comb(n - k, j - i)) / comb(n, j)
def is_valid_for(self, prrtApplicationParameters, prrtChannelParameters, prrtSystemParameters):
if (self.is_maximum_latency_fulfilled(prrtApplicationParameters, prrtChannelParameters, prrtSystemParameters) and
if (self.is_maximum_latency_fulfilled(prrtApplicationParameters, prrtChannelParameters,
prrtSystemParameters) and
self.is_maximum_loss_fulfilled(prrtApplicationParameters, prrtChannelParameters, prrtSystemParameters)):
return True
def is_maximum_latency_fulfilled(self, prrtApplicationParameters, prrtChannelParameters, prrtSystemParameters):
fec_delay = prrtSystemParameters.block_coding_delay + self.n_p[0] * prrtSystemParameters.redundancy_packet_transmission_delay + \
fec_delay = prrtSystemParameters.block_coding_delay + self.n_p[
0] * prrtSystemParameters.redundancy_packet_transmission_delay + \
(prrtChannelParameters.rtt_prop_fwd + prrtSystemParameters.processing_delay) / 2 + \
prrtSystemParameters.packet_loss_detection_delay
req_delay_list = list(map(lambda c : prrtChannelParameters.rtt_prop_fwd + np.dot(self.n_p[c], prrtSystemParameters.redundancy_packet_transmission_delay) + prrtSystemParameters.processing_delay))
req_delay_list = list(map(lambda c: prrtChannelParameters.rtt_prop_fwd
+ np.dot(self.n_p[c], prrtSystemParameters.redundancy_packet_transmission_delay)
+ prrtSystemParameters.processing_delay))
if (fec_delay + np.dot(self.len(self.n_p), req_delay_list) <= prrtApplicationParameters.max_latency):
return True
......@@ -295,63 +301,15 @@ class PrrtCodingConfiguration:
total_packet_erasure = 0
for i in range(1, self.k):
for j in range(max(self.n - self.k + 1, i), self.n - self.k + i):
packet_erasure_at_i = i * self.hypergeometric_distribution(self.n, self.k, i, j) * self.get_error_prob(j, self.n, prrtChannelParameters.loss_rate_fwd)
packet_erasure_at_i = i * prrt_utils.hypergeometric_distribution(self.n, self.k, i, j) * prrt_utils.get_error_prob(
j,
self.n,
prrtChannelParameters.loss_rate_fwd)
total_packet_erasure += packet_erasure_at_i
residual_packet_erasure_rate = (1 / self.k) * total_packet_erasure # Pr(k, n)
residual_packet_erasure_rate = (1 / self.k) * total_packet_erasure # Pr(k, n)
if (residual_packet_erasure_rate <= prrtApplicationParameters.max_residual_loss_rate):
return True
class RestrictedIntegerComposition:
def generate_ric(redundancy, positions, min, max):
cdef c_redundancy = redundancy
cdef c_positions = positions
cdef c_min = min
cdef c_max = max
if c_positions < 1:
raise StopIteration
if c_positions == 1:
if c_redundancy >= c_min and c_redundancy <= c_max:
yield (c_redundancy,)
raise StopIteration
for i in range(c_min, c_redundancy + 1):
for result in RestrictedIntegerComposition.generate_ric(c_redundancy - i, c_positions - 1, i, c_max):
if (i <= c_max):
yield (i,) + result
# is_order_ascending = False for full search and True for greedy search
def gen_repair_schedules(redundancy, positions, min, max, is_order_ascending):
arbitrary_schedules = []
ordered_schedules = set()
f = RestrictedIntegerComposition.generate_ric(redundancy, positions, min, max)
for i in f:
arbitrary_schedules.add(i)
if not is_order_ascending:
for i in arbitrary_schedules:
ordered_schedules.append(itertools.permutations(i))
return list(ordered_schedules)
else:
return arbitrary_schedules
def gen_repair_schedule(redundancy, positions, min, max):
if redundancy < positions * min or min > max:
raise Exception("Illegal input combinations. Make sure the min > max. And, number of total redundancy is greater that positions*min.")
opt_schedule = [min for p in range(positions)]
redundancy_left_over = redundancy - min * positions
last_index = positions - 1
while(redundancy_left_over > 0):
if(opt_schedule[last_index] < max):
opt_schedule[last_index] += 1
redundancy_left_over -= 1
else:
last_index -= 1
return opt_schedule
cdef class PrrtSocket:
cdef cprrt.PrrtSocket* _c_socket
_epoch = datetime.datetime.utcfromtimestamp(0)
......@@ -386,9 +344,9 @@ cdef class PrrtSocket:
property prrt_application_parameters:
def __get__(self):
return PrrtApplicationParameters(self.max_latency(),
self.max_residual_loss_rate(),
self.data_rate(),
cprrt.PrrtSocket_get_sock_opt(self._c_socket, "maximum_payload_size"))
self.max_residual_loss_rate(),
self.data_rate(),
self.maximum_payload_size())
property block_coding_delay:
def __get__(self):
......
from scipy.special import comb
# TODO: Maybe running in parallel to optimize runtime
# Pm(e, m), Eq. 3.45, Page 91
def get_error_prob(j, sent_packets_item, p_e):
return comb(sent_packets_item, j) * (p_e ** j) * ((1 - p_e) ** (sent_packets_item - j))
# TODO
# Check if it is able to fulfill application parameters given channel parameters.
def hypergeometric_distribution(n, k, i, j):
return (comb(k, i) * comb(n - k, j - i)) / comb(n, j)
\ No newline at end of file
# import pyximport; pyximport.install()
import itertools
def generate_ric(redundancy, positions, min, max):
cdef c_redundancy = redundancy
cdef c_positions = positions
cdef c_min = min
cdef c_max = max
if c_positions < 1:
raise StopIteration
if c_positions == 1:
if c_redundancy >= c_min and c_redundancy <= c_max:
yield (c_redundancy,)
raise StopIteration
for i in range(c_min, c_redundancy + 1):
for result in generate_ric(c_redundancy - i, c_positions - 1, i, c_max):
if i <= c_max:
yield (i,) + result
# is_order_ascending = False for full search.
def gen_repair_schedules(redundancy, positions, min, max, is_order_ascending):
arbitrary_schedules = []
ordered_schedules = set()
f = generate_ric(redundancy, positions, min, max)
for i in f:
arbitrary_schedules.append(i)
if not is_order_ascending:
for i in arbitrary_schedules:
ordered_schedules.add(itertools.permutations(i))
return list(ordered_schedules)
else:
return arbitrary_schedules
def gen_repair_schedule(redundancy, positions, min, max):
if min > max:
raise Exception(
"Illegal input combinations. Make sure the min > max. And, number of total redundancy is greater that positions*min.")
if positions == 0:
return [redundancy]
opt_schedule = [min for p in range(positions)]
c_redundancy_left_over = redundancy - min * positions
last_index = positions - 1
while c_redundancy_left_over > 0:
if opt_schedule[last_index] < max:
opt_schedule[last_index] += 1
c_redundancy_left_over -= 1
else:
last_index -= 1
return opt_schedule
\ No newline at end of file
import pandas as pd
import hec_search
import prrt
import time
import numpy as np
dataset_file_path = 'in_12_param_4_sz_mini_1000.csv'
def get_n_p_max(rtt_prop_fwd, pkt_length, data_rate_btl_fwd):
return rtt_prop_fwd * data_rate_btl_fwd / pkt_length
def evaluate(searchAlgorithm, appParams, channelParams, systemParams):
n_p_min = 1
n_p_max = get_n_p_max(channelParams.rtt_prop_fwd, appParams.pkt_length, channelParams.data_rate_btl_fwd)
start = time.time()
search = hec_search.HECSearch(searchAlgorithm, n_p_min, n_p_max, appParams, channelParams, systemParams)
search_result = search.search()
duration = time.time() - start
return [search_result, duration]
def test_case():
counter = 0
save_result_to = pd.DataFrame()
# Load dataset in chunk
for df_in_chunk in pd.read_csv(dataset_file_path, sep=',', chunksize=1000):
for index, row in df_in_chunk.iterrows():
appParams = prrt.PrrtApplicationParameters(row['app_max_latency'], row['app_max_residual_loss_rate'], row['app_data_rate'], row['app_pkt_length'])
chnlParams = prrt.PrrtChannelParameters(row['ch_loss_rate'], 0, row['ch_rtt_prop_fwd'], 0, row['ch_data_rate_btl_fwd'], 0)
sysParams = prrt.PrrtSystemParameters(row['sys_block_coding_dly'], row['sys_red_pkt_trans_dly'], row['sys_proc_dly'], row['sys_pkt_loss_detection_dly'], row['sys_src_pkt_interval'])
if 970 > index > 900:
for searchAlgorithm in ["GreedySearch"]:
print(str(index))
result = evaluate(searchAlgorithm, appParams, chnlParams, sysParams)
config = result[0]
save_result_to = save_result_to.append({'search': searchAlgorithm, 'config': [config.k, len(config.n_p), config.n_p], 'duration' : np.around(result[1],0)}, ignore_index=True, sort=False)
counter += 1
print("Chunk round: " + str(counter))
save_result_to.to_csv('result.csv', sep=',', index = False)
test_case()
......@@ -4,9 +4,13 @@ from Cython.Build import cythonize
import os, errno
import versioneer
ext = Extension(name='prrt', language="c", sources=["prrt/*.pyx"])
ext = [Extension(name='prrt', language="c", sources=["prrt/prrt.pyx"]),
Extension(name='restricted_integer_composition', language="c", sources=["prrt/restricted_integer_composition.pyx"])
]
try:
os.remove(os.path.join(os.path.dirname(os.path.realpath(__file__)), "prrt/prrt.c"))
os.remove(os.path.join(os.path.dirname(os.path.realpath(__file__)), "prrt/restricted_integer_composition.c"))
except OSError as e:
if e.errno != errno.ENOENT:
raise
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment