Commit bda724b7 authored by Ashkan's avatar Ashkan

Edit dataset location.

Add dataset in documents/ml directory.
parent 8e129984
Pipeline #4525 failed with stages
in 12 seconds
This diff is collapsed.
import fec_balance_utils
fec_balance_utils_obj = fec_balance_utils.FecBalanceUtils("/home/ashkan/Documents/deep-hec/data/input_predicted_fec_balance")
fec_balance_utils_obj = fec_balance_utils.FecBalanceUtils("documents/ml/input_predicted_fec_balance")
# ri = fec_balance_utils_obj.get_ri_single_input(35)
# print(ri)
fec_balance_utils_obj.get_all_ri()
......
......@@ -97,7 +97,7 @@ class FecBalanceUtils:
save_result_to = pd.DataFrame()
total_start = time.time()
for index, row in self.predicted_table_in_df.iterrows():
if index < 200:
if index != -1:
print(index)
start_time = time.time()
ri = self.get_ri_single_input(index)
......@@ -117,5 +117,5 @@ class FecBalanceUtils:
'time' : duration,
'ri':ri}, ignore_index=True, sort=False)
save_result_to.to_csv('/home/ashkan/Documents/deep-hec/data/input_predicted_fec_balance_ml_result', sep=',', columns = self.columns_order)
save_result_to.to_csv('document/ml/input_predicted_fec_balance_ml_result', sep=',', columns = self.columns_order)
print(time.time() - total_start)
\ No newline at end of file
......@@ -22,11 +22,10 @@ from multiprocessing import Pool
# ds_rel_output_path = "documents/output/out_ds17"
#----------------------------------------------------------------
# TODO: Adjust dataset dir
ds_basename = os.listdir("/home/ashkan/Documents/deep-hec/data/")
ds_basename = os.listdir("documents/ml/")
ds_basename = "input_predicted_fec_balance"
ds_rel_input_path = "/home/ashkan/Documents/deep-hec/data/"
ds_rel_output_path = "/home/ashkan/Documents/deep-hec/data/"
ds_rel_input_path = "documents/ml/"
ds_rel_output_path = "documents/ml/"
columns_order = ["app_max_latency", "app_max_residual_loss_rate", "app_data_rate", "app_pkt_length",
"ch_loss_rate", "ch_rtt_prop_fwd", "ch_data_rate_btl_fwd",
......@@ -53,7 +52,7 @@ def test_case(dataset_basename):
for df_in_chunk in pd.read_csv(ds_rel_input_path + dataset_basename, sep=',', chunksize=100):
# print(dataset_basename + " started.")
for index, row in df_in_chunk.iterrows():
if index < 200:
if index != -1:
print(index)
appParams = prrt.PrrtApplicationParameters(row['app_max_latency'], row['app_max_residual_loss_rate'], row['app_data_rate'], row['app_pkt_length'])
chnlParams = prrt.PrrtChannelParameters(row['ch_loss_rate'], 0, row['ch_rtt_prop_fwd'], 0, row['ch_data_rate_btl_fwd'], 0)
......@@ -92,7 +91,7 @@ def test_case(dataset_basename):
# save_result_to.to_csv(ds_rel_output_path + dataset_basename.replace("in", "out", 1), sep=',', index = False, columns = columns_order)
save_result_to.to_csv("/home/ashkan/Documents/deep-hec/data/input_predicted_fec_balance_greedy_result", sep=',', columns=columns_order)
save_result_to.to_csv("documents/ml/input_predicted_fec_balance_greedy_result", sep=',', columns=columns_order)
print(time.time() - total_time)
# print(dataset_basename + " finished.")
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment