diff --git a/.idea/SPRINT.iml b/.idea/SPRINT.iml
new file mode 100644
index 0000000..d0876a7
--- /dev/null
+++ b/.idea/SPRINT.iml
@@ -0,0 +1,8 @@
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/.idea/encodings.xml b/.idea/encodings.xml
new file mode 100644
index 0000000..15a15b2
--- /dev/null
+++ b/.idea/encodings.xml
@@ -0,0 +1,4 @@
+
+
+
+
\ No newline at end of file
diff --git a/.idea/misc.xml b/.idea/misc.xml
new file mode 100644
index 0000000..e51e6d7
--- /dev/null
+++ b/.idea/misc.xml
@@ -0,0 +1,7 @@
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/.idea/modules.xml b/.idea/modules.xml
new file mode 100644
index 0000000..a778911
--- /dev/null
+++ b/.idea/modules.xml
@@ -0,0 +1,8 @@
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/.idea/workspace.xml b/.idea/workspace.xml
new file mode 100644
index 0000000..7a0ca29
--- /dev/null
+++ b/.idea/workspace.xml
@@ -0,0 +1,33 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/README.md b/README.md
index d4312ff..76f57f0 100644
--- a/README.md
+++ b/README.md
@@ -10,7 +10,7 @@ Incorporating the **C**onstellation **I**nvestigation **R**epository with **C**o
# General Setup
-1. Clone the repo: `git clone git@github.com:MIT-STARLab/SPRINT.git`
+1. Clone the repo: `git clone git@github.mit.edu:star-lab/SPRINT.git`
1. Init the appropriate submodules:
1. `cd SPRINT/source`
1. `git submodule init circinus_global_planner circinus_orbit_link_public circinus_orbit_propagation circinus_orbit_viz circinus_sim circinus_tools`
@@ -40,7 +40,20 @@ Incorporating the **C**onstellation **I**nvestigation **R**epository with **C**o
1. Navigate to `SPRINT/scripts`
1. Run simulation:
a. `./run_const_sim.sh --use orig_circinus_zhou` to specify a case corresponding to `inputs/cases/orig_circinus_zhou`.
- b. `./run_const_sim.sh --help` for a description of the other options.
+ b. `./run_const_sim.sh --help` for a description of the other options.
+
+## Separated Simulation Demo
+This simulation can be run such that each satellite runs on separate hardware, say a Raspberry Pi. The following demo is for a 2-satellite constellation.
+1. For the ground station network:
+ a. Navigate to `SRPINT/scripts`
+ b. `./run_const_sim.sh --use circinus_zhou_2_sats --ground` to specify a two-satellite case and to run the ground station network part
+2. For each satellite:
+ a. Navigate to `SPRINT/scripts`
+ b. `./run_const_sim.sh --use circinus_zhou_2_sats --satellite` to specify a two-satellite case and to run the satellite part
+
+The satellites can be initialized before the ground station network; however, satellites are given 100 tries to connect to the ground station network, once every second. If the ground station network isn't initialized in time, the satellite program exits.
+
+
# Submodule dependencies
* [circinus_global_planner](https://github.com/MIT-STARLab/circinus_global_planner)
@@ -53,4 +66,4 @@ Incorporating the **C**onstellation **I**nvestigation **R**epository with **C**o
These should be managed as if they are independent and up to date with their own master, before committing the folder from the the SPRINT main repository (which then tracks the commit of the subrepo).
# History
-SPRINT was initiated as CIRCINUS, by [apollokit](https://github.com/apollokit)
+SPRINT was initiated by CIRCINUS, by [apollokit](https://github.com/apollokit).
diff --git a/inputs/SRP_Zhou_scripts/analyze_multirun_tests.py b/inputs/SRP_Zhou_scripts/analyze_multirun_tests.py
new file mode 100644
index 0000000..7dcd53a
--- /dev/null
+++ b/inputs/SRP_Zhou_scripts/analyze_multirun_tests.py
@@ -0,0 +1,177 @@
+# this file is for generating plots / outputs from
+# the json files in this folder
+import json
+import matplotlib.pyplot as plt
+import numpy as np
+
+SRP_settings = [True, False]
+GS_disruptions = ['None','G0','G1','G2']
+
+# grab all data
+all_data = {}
+for SRP_setting in SRP_settings:
+ for GS_disruption in GS_disruptions:
+ cur_str = 'SRP_Test_SRP_%s_GS_%s' % (SRP_setting, GS_disruption)
+
+ with open('.\\multirun_tests\\' + cur_str + '.json', "r") as jsonFile:
+ all_data[cur_str] = json.load(jsonFile)
+
+print('All Data Loaded')
+
+print('test time')
+# initialize all data structs
+total_failures = []
+
+median_data_margin_prcnt = []
+prcntl25_ave_d_margin_prcnt = []
+prcntl75_ave_d_margin_prcnt = []
+
+median_energy_margin_prcnt = []
+prcntl25_ave_e_margin_prcnt = []
+prcntl75_ave_e_margin_prcnt = []
+
+exec_over_poss = []
+total_exec_dv = []
+total_poss_dv = []
+
+median_obs_initial_lat_exec = [] # initial means the first part of the data container downlinked
+prcntl25_obs_initial_lat_exec = []
+prcntl75_obs_initial_lat_exec = []
+
+median_av_aoi_exec = []
+prcntl25_av_aoi_exec = []
+prcntl75_av_aoi_exec = []
+
+# MAKE DATA STRUCTS FOR BAR CHARTS
+for ind,SRP_setting in enumerate(SRP_settings):
+ total_failures.append([])
+ median_data_margin_prcnt.append([])
+ prcntl25_ave_d_margin_prcnt.append([])
+ prcntl75_ave_d_margin_prcnt.append([])
+ median_energy_margin_prcnt.append([])
+ prcntl25_ave_e_margin_prcnt.append([])
+ prcntl75_ave_e_margin_prcnt.append([])
+ exec_over_poss.append([])
+ median_obs_initial_lat_exec.append([])
+ prcntl25_obs_initial_lat_exec.append([])
+ prcntl75_obs_initial_lat_exec.append([])
+ median_av_aoi_exec.append([])
+ prcntl25_av_aoi_exec.append([])
+ prcntl75_av_aoi_exec.append([])
+ for GS_disruption in GS_disruptions:
+ cur_str = 'SRP_Test_BDT_False_SRP_%s_GS_%s' % (SRP_setting, GS_disruption)
+ cur_data = all_data[cur_str]
+ # Activity Failures
+ total_failures[ind].append(sum(cur_data['Num Failures by Type'].values()))
+
+ # Data Margin levels
+ median_data_margin_prcnt[ind].append(cur_data['d_rsrc_stats']['median_ave_d_margin_prcnt'])
+ prcntl25_ave_d_margin_prcnt[ind].append(cur_data['d_rsrc_stats']['median_ave_d_margin_prcnt'] - cur_data['d_rsrc_stats']['prcntl25_ave_d_margin_prcnt'])
+ prcntl75_ave_d_margin_prcnt[ind].append(cur_data['d_rsrc_stats']['prcntl75_ave_d_margin_prcnt'] - cur_data['d_rsrc_stats']['median_ave_d_margin_prcnt'])
+
+ # Energy Margin levels
+ median_energy_margin_prcnt[ind].append(cur_data['e_rsrc_stats']['median_ave_e_margin_prcnt'])
+ prcntl25_ave_e_margin_prcnt[ind].append(cur_data['e_rsrc_stats']['median_ave_e_margin_prcnt']-cur_data['e_rsrc_stats']['prcntl25_ave_e_margin_prcnt'])
+ prcntl75_ave_e_margin_prcnt [ind].append(cur_data['e_rsrc_stats']['prcntl75_ave_e_margin_prcnt']-cur_data['e_rsrc_stats']['median_ave_e_margin_prcnt'])
+
+ # METRICS
+ # DV % throughput
+ exec_over_poss[ind].append(cur_data['dv_stats']['exec_over_poss']*100)
+
+ # Obs Latency
+ median_obs_initial_lat_exec[ind].append(cur_data['lat_stats']['median_obs_initial_lat_exec'])
+ prcntl25_obs_initial_lat_exec[ind].append(cur_data['lat_stats']['median_obs_initial_lat_exec'] - cur_data['lat_stats']['prcntl25_obs_initial_lat_exec'])
+ prcntl75_obs_initial_lat_exec[ind].append(cur_data['lat_stats']['prcntl75_obs_initial_lat_exec'] - cur_data['lat_stats']['median_obs_initial_lat_exec'])
+
+ # AoI
+ median_av_aoi_exec[ind].append(cur_data['obs_aoi_stats_w_routing']['median_av_aoi_exec'])
+ prcntl25_av_aoi_exec[ind].append(cur_data['obs_aoi_stats_w_routing']['median_av_aoi_exec'] - cur_data['obs_aoi_stats_w_routing']['prcntl25_av_aoi_exec'])
+ prcntl75_av_aoi_exec[ind].append(cur_data['obs_aoi_stats_w_routing']['prcntl75_av_aoi_exec'] - cur_data['obs_aoi_stats_w_routing']['median_av_aoi_exec'])
+
+def autolabel(rects,axis):
+ """
+ Attach a text label above each bar displaying its height
+ from: https://matplotlib.org/examples/api/barchart_demo.html
+ """
+ for rect in rects:
+ height = rect.get_height()
+ axis.text(rect.get_x() + rect.get_width()/4., height,
+ '%d' % int(height),
+ ha='center', va='bottom')
+
+def double_bar_graph(ax,N,data,yLabelStr,titleStr,xLabelStr,xTickLabels,legendStrs,yerr = [None, None], legendFlag = True, colorStrs = ['b','gray'],width=0.35,):
+
+ if len(data) != 2:
+ raise Exception('Need exactly 2 data sets')
+
+ if N != len(data[0]) or N != len(data[1]) or N != len(xTickLabels):
+ raise Exception('number of bar graphs does not match data and/or tick labels supplied')
+
+ ind = np.arange(N) # the x locations for the groups
+
+ rects1 = ax.bar(ind, data[0], width, color=colorStrs[0], yerr= yerr[0])
+ rects2 = ax.bar(ind + width, data[1], width, color=colorStrs[1], yerr= yerr[1])
+ ax.set_ylabel(yLabelStr)
+ ax.set_title(titleStr)
+ ax.set_xticks(ind + width / 2)
+ ax.set_xlabel(xLabelStr)
+ ax.set_xticklabels(tuple(xTickLabels))
+ if legendFlag:
+ ax.legend((rects1[0], rects2[0]), tuple(legendStrs))
+ autolabel(rects1,ax)
+ autolabel(rects2,ax)
+
+ return ax
+
+# MAKE PLOTS
+N = 4 # maybe change to 4 if we add nominal case
+width = 0.35 # the width of the bars
+xLabelStr = 'Ground Station Failures'
+xTickLabels = ('None','G0 - 24 hrs', 'G1 - 12 hrs', 'G2 -24 hrs')
+legendStrs = ('SRP On', 'SRP Off')
+
+############# one plot for total failures ####################
+fig, ax = plt.subplots()
+yLabelStr = 'Total Activity Failures (#)'
+titleStr = 'Activity Failures with SRP on/off'
+double_bar_graph(ax,N,total_failures,yLabelStr,titleStr,xLabelStr,xTickLabels,legendStrs)
+
+###### one plot with two subplots (one for each state margin level) ######
+fig, ax1 = plt.subplots(nrows=1, ncols=1)
+yLabelStr = 'Data Margin (%)'
+titleStr = 'Data Margin Levels with SRP on/off'
+d_yerr = (np.asarray([prcntl25_ave_d_margin_prcnt[0],prcntl75_ave_d_margin_prcnt[0]]),np.asarray([prcntl25_ave_d_margin_prcnt[1],prcntl75_ave_d_margin_prcnt[1]]))
+double_bar_graph(ax1,N,median_data_margin_prcnt,yLabelStr,titleStr,xLabelStr,xTickLabels,legendStrs,yerr=d_yerr)
+
+""" yLabelStr = 'Energy Margin (%)'
+titleStr = 'Energy Margin Levels with SRP on/off'
+e_yerr = (np.asarray([prcntl25_ave_e_margin_prcnt[0],prcntl75_ave_e_margin_prcnt[0]]),np.asarray([prcntl25_ave_e_margin_prcnt[1],prcntl75_ave_e_margin_prcnt[1]]))
+double_bar_graph(ax2,N,median_energy_margin_prcnt,yLabelStr,titleStr,xLabelStr,xTickLabels,legendStrs,yerr=e_yerr) """
+
+
+###### one plot with a three subplots (one for each metric) ###
+# Data Throughput Percentage
+fig, (ax1, ax2) = plt.subplots(nrows=2, ncols=1)
+#titleStr = 'Metrics with SRP on/off'
+yLabelStr = 'Data Throughput - Exec / Poss (%)'
+titleStr = 'DV Throughput with SRP on/off'
+xLabelStr = ''
+double_bar_graph(ax1,N,exec_over_poss,yLabelStr,titleStr,xLabelStr,xTickLabels,legendStrs,legendFlag = False)
+
+
+xLabelStr = 'Ground Station Failures'
+# Median Latency
+yLabelStr = 'Observation Latency (min)'
+titleStr = 'Observation Initial Data Packet Latency with SRP on/off'
+lat_yerr = (np.asarray([prcntl25_obs_initial_lat_exec[0],prcntl75_obs_initial_lat_exec[0]]),np.asarray([prcntl25_obs_initial_lat_exec[1],prcntl75_obs_initial_lat_exec[1]]))
+double_bar_graph(ax2,N,median_obs_initial_lat_exec,yLabelStr,titleStr,xLabelStr,xTickLabels,legendStrs,yerr=lat_yerr,legendFlag = False)
+
+""" # Median AoI
+yLabelStr = 'Age of Information (hours)'
+#titleStr = 'Observation Initial Data Packet Latency with SRP on/off'
+aoi_yerr = (np.asarray([prcntl25_av_aoi_exec[0],prcntl75_av_aoi_exec[0]]),np.asarray([prcntl25_av_aoi_exec[1],prcntl75_av_aoi_exec[1]]))
+double_bar_graph(ax3,N,median_av_aoi_exec,yLabelStr,titleStr,xLabelStr,xTickLabels,legendStrs,yerr=aoi_yerr) """
+### SHOW PLOTS ###
+plt.show()
+
+
diff --git a/inputs/SRP_Zhou_scripts/test_runner.py b/inputs/SRP_Zhou_scripts/test_runner.py
new file mode 100644
index 0000000..5074f8c
--- /dev/null
+++ b/inputs/SRP_Zhou_scripts/test_runner.py
@@ -0,0 +1,49 @@
+# this file is intended for setting up and running multiple SPRINT runs, where the config is changed
+# in between runs
+import json
+from subprocess import Popen
+# things to modify
+# r'..\inputs\reference_model_definitions\sat_refs\zhou_original_sat.json': NVM - only doing Xlnk-always
+
+# setup things to step through
+# r'..\inputs\cases\orig_circinus\zhou\sim_case_config.json': ['scenario_params']['sim_run_perturbations']['schedule_disruptions']
+schedule_disruptions_list = [
+ {"G0": [["2016-02-14T04:00:00.000000Z","2016-02-15T04:00:00.000000Z"]]},
+ {"G1": [["2016-02-14T04:00:00.000000Z","2016-02-14T16:00:00.000000Z"]]},
+ {"G2": [["2016-02-14T04:00:00.000000Z","2016-02-15T04:00:00.000000Z"]]}
+]
+
+# r'..\inputs\general_config\lp_general_params_inputs.json': ['lp_general_params']['use_self_replanner']
+SRP_settings_list = [True, False]
+
+SD_file = r'C:\Users\User\circinusGit\SPRINT\inputs\cases\orig_circinus_zhou\sim_case_config.json'
+SRP_file = r'C:\Users\User\circinusGit\SPRINT\inputs\\general_config\lp_general_params_inputs.json'
+scripts_folder = r"C:\Users\User\circinusGit\SPRINT\scripts"
+# NOTE: NEED TO BE IN SCRIPTS DIRECTORY TO FIND windows_env_var_setup.bat
+for SD_setting in schedule_disruptions_list:
+
+ with open(SD_file, "r") as jsonFile:
+ data = json.load(jsonFile)
+
+ data['scenario_params']['sim_run_perturbations']['schedule_disruptions'] = SD_setting
+
+ print('Setting schedule disruptions to: %s' % SD_setting)
+ with open(SD_file, "w") as jsonFile:
+ json.dump(data, jsonFile, indent=4, separators=(',', ': '))
+
+ for SRP_setting in SRP_settings_list:
+ with open(SRP_file, "r") as jsonFile:
+ data = json.load(jsonFile)
+
+ data['lp_general_params']['use_self_replanner'] = SRP_setting
+
+ print('Setting SRP to: %s' % SRP_setting)
+ with open(SRP_file, "w") as jsonFile:
+ json.dump(data, jsonFile, indent=4, separators=(',', ': '))
+
+ print('New Settings Set - run batch file')
+
+ # python runner_const_sim.py --inputs_location /c/Users/wcgru/Documents/GitHubClones/SPRINT/scripts/../inputs --case_name orig_circinus_zhou --restore_pickle "" --remote_debug false
+
+ p = Popen(r"C:\Users\User\circinusGit\SPRINT\scripts\windows_launcher.bat")
+ stdout, stderr = p.communicate()
\ No newline at end of file
diff --git a/inputs/analyze_multirun_tests.py b/inputs/analyze_multirun_tests.py
new file mode 100644
index 0000000..9e070b3
--- /dev/null
+++ b/inputs/analyze_multirun_tests.py
@@ -0,0 +1,389 @@
+# this file is for generating plots / outputs from
+# the json files in this folder
+import json
+import matplotlib.pyplot as plt
+import numpy as np
+
+SRP_settings = [True, False]
+
+n_targs = 15
+total_targs = 100
+num_sats = 30
+#targ_subsets = [list(range(40,60)),list(range(0,total_targs,int(total_targs/n_targs)))] # first is all equatorial set, 2nd is spread out at all latitudes
+targ_subsets = [list(range(40,60))]
+#GS_subsets = [[3,5,6,14],[13,14,15,3,4,5,6,8,12]]
+GS_subsets = [[3,5,6,14]]
+plot_nominal = False
+plot_disruption_case = not plot_nominal
+average_across_targets = False
+
+
+# grab all data
+all_data = {}
+if plot_disruption_case:
+ # SELECT USE CASE to plot
+ g_select = 0
+ t_select = 0
+ # LOAD DISRUPTED DATA SET
+ for g_setting_ind,GS_subset in enumerate(GS_subsets):
+ for t_setting_ind, t_subset in enumerate(targ_subsets):
+ setting_name = 'setGS_%d_setT_%d' % (g_setting_ind,t_setting_ind)
+ for SRP_setting in SRP_settings:
+
+ GS_disruptions = [
+ "G%d" % GS_subset[0],
+ "G%d" % GS_subset[1],
+ "G%d" % GS_subset[2]
+ ]
+ for GS_disruption in GS_disruptions:
+ scenario_name = 'WALKER_%d_SRP_Test_SRP_%s_GS_%s_%s' % (num_sats,SRP_setting, GS_disruption,setting_name)
+
+ with open('.\\multirun_tests\\' + scenario_name + '.json', "r") as jsonFile:
+ all_data[scenario_name] = json.load(jsonFile)
+
+ if g_setting_ind == g_select and t_setting_ind == t_select:
+ plot_disruption_str = setting_name
+ plot_GS_disruptions = GS_disruptions
+
+if plot_nominal:
+ # LOAD NOMINAL DATA
+ nominal_names = []
+ nominal_names_full = []
+ for g_setting_ind,GS_subset in enumerate(GS_subsets):
+ for t_setting_ind, t_subset in enumerate(targ_subsets):
+ setting_name = 'setGS_%d_setT_%d' % (g_setting_ind,t_setting_ind)
+ scenario_name = 'WALKER_%d_Nominal_%s' % (num_sats,setting_name)
+ nominal_names.append('G%d_T%d' % (g_setting_ind,t_setting_ind))
+ nominal_names_full.append(scenario_name)
+ with open('.\\multirun_tests\\' + scenario_name + '.json', "r") as jsonFile:
+ all_data[scenario_name] = json.load(jsonFile)
+
+print('All Data Loaded')
+
+# helper functions
+def autolabel(rects,axis):
+ """
+ Attach a text label above each bar displaying its height
+ from: https://matplotlib.org/examples/api/barchart_demo.html
+ """
+ for rect in rects:
+ height = rect.get_height()
+ axis.text(rect.get_x() + rect.get_width()/2.5, height,
+ '%.2f' % height,
+ ha='center', va='bottom')
+
+def double_bar_graph(ax,N,data,yLabelStr,titleStr,xLabelStr,xTickLabels,legendStrs,yerr = [None, None], legendFlag = True, colorStrs = ['b','gray'],width=0.35,):
+
+ if len(data) != 2:
+ raise Exception('Need exactly 2 data sets')
+
+ if N != len(data[0]) or N != len(data[1]) or N != len(xTickLabels):
+ raise Exception('number of bar graphs does not match data and/or tick labels supplied')
+
+ ind = np.arange(N) # the x locations for the groups
+
+ rects1 = ax.bar(ind, data[0], width, color=colorStrs[0], yerr= yerr[0])
+ rects2 = ax.bar(ind + width, data[1], width, color=colorStrs[1], yerr= yerr[1])
+ ax.set_ylabel(yLabelStr)
+ ax.set_title(titleStr)
+ ax.set_xticks(ind + width / 2)
+ ax.set_xlabel(xLabelStr)
+ ax.set_xticklabels(tuple(xTickLabels))
+ if legendFlag:
+ ax.legend((rects1[0], rects2[0]), tuple(legendStrs))
+ autolabel(rects1,ax)
+ autolabel(rects2,ax)
+
+ return ax
+
+def single_bar_graph(ax,N,data,yLabelStr,titleStr,xLabelStr,xTickLabels,legendStrs,yerr = None, legendFlag = True, colorStrs = 'b',width=0.35,):
+
+
+ if N != len(data) or N != len(xTickLabels):
+ raise Exception('number of bar graphs does not match data and/or tick labels supplied')
+
+ ind = np.arange(N) # the x locations for the groups
+
+ rects1 = ax.bar(ind, data, width, color=colorStrs, yerr= yerr)
+ ax.set_ylabel(yLabelStr)
+ ax.set_title(titleStr)
+ ax.set_xticks(ind + width / 2)
+ ax.set_xlabel(xLabelStr)
+ ax.set_xticklabels(tuple(xTickLabels))
+ if legendFlag:
+ ax.legend((rects1), tuple(legendStrs))
+ autolabel(rects1,ax)
+
+ return ax
+
+# initialize all data structs
+total_failures = []
+per_failures_xlnk = []
+per_failures_dlnk = []
+
+
+median_data_margin_prcnt = []
+prcntl25_ave_d_margin_prcnt = []
+prcntl75_ave_d_margin_prcnt = []
+
+median_energy_margin_prcnt = []
+prcntl25_ave_e_margin_prcnt = []
+prcntl75_ave_e_margin_prcnt = []
+
+exec_over_poss = []
+exec_dv_GB = []
+poss_dv_GB= []
+
+median_obs_initial_lat_exec = [] # initial means the first part of the data container downlinked
+prcntl25_obs_initial_lat_exec = []
+prcntl75_obs_initial_lat_exec = []
+
+median_av_aoi_exec = []
+prcntl25_av_aoi_exec = []
+prcntl75_av_aoi_exec = []
+
+# PLOTS FOR NOMINAL CASE
+if plot_nominal:
+ for ind,scenario_name in enumerate(nominal_names_full):
+ """ per_failures_xlnk.append([])
+ per_failures_dlnk.append([])
+ median_data_margin_prcnt.append([])
+ prcntl25_ave_d_margin_prcnt.append([])
+ prcntl75_ave_d_margin_prcnt.append([])
+ median_energy_margin_prcnt.append([])
+ prcntl25_ave_e_margin_prcnt.append([])
+ prcntl75_ave_e_margin_prcnt.append([])
+ exec_over_poss.append([])
+ median_obs_initial_lat_exec.append([])
+ prcntl25_obs_initial_lat_exec.append([])
+ prcntl75_obs_initial_lat_exec.append([])
+ median_av_aoi_exec.append([])
+ prcntl25_av_aoi_exec.append([])
+ prcntl75_av_aoi_exec.append([]) """
+
+ cur_data = all_data[scenario_name]
+
+ # Activity Failures
+ per_failures_xlnk.append(cur_data['Percentage of Exec Act Failures by Act']['xlnk'])
+ per_failures_dlnk.append(cur_data['Percentage of Exec Act Failures by Act']['dlnk'])
+
+ # Data Margin levels
+ median_data_margin_prcnt.append(cur_data['d_rsrc_stats']['median_ave_d_margin_prcnt'])
+ prcntl25_ave_d_margin_prcnt.append(cur_data['d_rsrc_stats']['median_ave_d_margin_prcnt'] - cur_data['d_rsrc_stats']['prcntl25_ave_d_margin_prcnt'])
+ prcntl75_ave_d_margin_prcnt.append(cur_data['d_rsrc_stats']['prcntl75_ave_d_margin_prcnt'] - cur_data['d_rsrc_stats']['median_ave_d_margin_prcnt'])
+
+ # Energy Margin levels
+ median_energy_margin_prcnt.append(cur_data['e_rsrc_stats']['median_ave_e_margin_prcnt'])
+ prcntl25_ave_e_margin_prcnt.append(cur_data['e_rsrc_stats']['median_ave_e_margin_prcnt']-cur_data['e_rsrc_stats']['prcntl25_ave_e_margin_prcnt'])
+ prcntl75_ave_e_margin_prcnt .append(cur_data['e_rsrc_stats']['prcntl75_ave_e_margin_prcnt']-cur_data['e_rsrc_stats']['median_ave_e_margin_prcnt'])
+
+ # METRICS
+ # DV % throughput
+ exec_over_poss.append(cur_data['dv_stats']['exec_over_poss']*100)
+ exec_dv_GB.append(cur_data['dv_stats']['total_exec_dv']/1000)
+ poss_dv_GB.append(cur_data['dv_stats']['total_poss_dv']/1000)
+
+ # Obs Latency
+ median_obs_initial_lat_exec.append(cur_data['lat_stats']['median_obs_initial_lat_exec'])
+ prcntl25_obs_initial_lat_exec.append(cur_data['lat_stats']['median_obs_initial_lat_exec'] - cur_data['lat_stats']['prcntl25_obs_initial_lat_exec'])
+ prcntl75_obs_initial_lat_exec.append(cur_data['lat_stats']['prcntl75_obs_initial_lat_exec'] - cur_data['lat_stats']['median_obs_initial_lat_exec'])
+
+ # AoI
+ median_av_aoi_exec.append(cur_data['obs_aoi_stats_w_routing']['median_av_aoi_exec'])
+ prcntl25_av_aoi_exec.append(cur_data['obs_aoi_stats_w_routing']['median_av_aoi_exec'] - cur_data['obs_aoi_stats_w_routing']['prcntl25_av_aoi_exec'])
+ prcntl75_av_aoi_exec.append(cur_data['obs_aoi_stats_w_routing']['prcntl75_av_aoi_exec'] - cur_data['obs_aoi_stats_w_routing']['median_av_aoi_exec'])
+
+ # MAKE PLOTS
+ N = 1 # maybe change to 4 if we add nominal case
+ width = 0.35 # the width of the bars
+ xLabelStr = 'Nominal Case ID'
+ titleMiniStr = 'in %d Sat Walker Nominal Cases' % num_sats
+ xTickLabels = tuple(nominal_names)
+
+ ############# one plot for total failures ####################
+ fig, ax = plt.subplots()
+ yLabelStr = 'Percentage Activity Failures (%)'
+ titleStr = 'Percentage Activity Failures ' + titleMiniStr
+ legendStrs = ('Xlnks', 'Dlnks')
+ #percentage_failures = np.concatenate((np.asarray(per_failures_xlnk),np.asarray(per_failures_dlnk)),axis = 1)
+ percentage_failures = [per_failures_xlnk, per_failures_dlnk]
+ double_bar_graph(ax,N,percentage_failures,yLabelStr,titleStr,xLabelStr,xTickLabels,legendStrs,colorStrs = ['g','b'])
+
+ ###### one plot with two subplots (one for each state margin level) ######
+ fig, ax1 = plt.subplots(nrows=1, ncols=1)
+ yLabelStr = 'Data Margin (%)'
+ titleStr = 'Data Margin Levels '+ titleMiniStr
+ d_yerr = (np.asarray([prcntl25_ave_d_margin_prcnt,prcntl75_ave_d_margin_prcnt]))
+ single_bar_graph(ax1,N,median_data_margin_prcnt,yLabelStr,titleStr,xLabelStr,xTickLabels,legendStrs,yerr=d_yerr,legendFlag = False)
+
+ ###### one plot with a three subplots (one for each metric) ###
+ # Data Throughput Percentage
+ fig, (ax1, ax2) = plt.subplots(nrows=2, ncols=1)
+ #titleStr = 'Metrics with SRP on/off'
+ yLabelStr = 'Data Throughput - Exec & Poss (GB)'
+ titleStr = 'DV Throughput '+ titleMiniStr
+ xLabelStr = ''
+ legendStrs = ('Exec (GB)', 'Poss (GB)')
+ exec_and_poss = [exec_dv_GB, poss_dv_GB]
+ double_bar_graph(ax1,N,exec_and_poss,yLabelStr,titleStr,xLabelStr,xTickLabels,legendStrs)
+
+
+ xLabelStr = 'Nominal Case Id'
+ # Median Latency
+ yLabelStr = 'Observation Latency (min)'
+ titleStr = 'Observation Initial Data Packet Latency '+ titleMiniStr
+ lat_yerr = (np.asarray([prcntl25_obs_initial_lat_exec,prcntl75_obs_initial_lat_exec]))
+ single_bar_graph(ax2,N,median_obs_initial_lat_exec,yLabelStr,titleStr,xLabelStr,xTickLabels,legendStrs,yerr=lat_yerr,legendFlag = False)
+
+ """ # Median AoI
+ yLabelStr = 'Age of Information (hours)'
+ #titleStr = 'Observation Initial Data Packet Latency with SRP on/off'
+ aoi_yerr = (np.asarray([prcntl25_av_aoi_exec[0],prcntl75_av_aoi_exec[0]]),np.asarray([prcntl25_av_aoi_exec[1],prcntl75_av_aoi_exec[1]]))
+ double_bar_graph(ax3,N,median_av_aoi_exec,yLabelStr,titleStr,xLabelStr,xTickLabels,legendStrs,yerr=aoi_yerr) """
+ ### SHOW PLOTS ###
+ plt.show()
+
+if plot_disruption_case:
+ # MAKE DATA STRUCTS FOR BAR CHARTS
+ for ind,SRP_setting in enumerate(SRP_settings):
+ total_failures.append([])
+ median_data_margin_prcnt.append([])
+ prcntl25_ave_d_margin_prcnt.append([])
+ prcntl75_ave_d_margin_prcnt.append([])
+ median_energy_margin_prcnt.append([])
+ prcntl25_ave_e_margin_prcnt.append([])
+ prcntl75_ave_e_margin_prcnt.append([])
+ exec_over_poss.append([])
+ median_obs_initial_lat_exec.append([])
+ prcntl25_obs_initial_lat_exec.append([])
+ prcntl75_obs_initial_lat_exec.append([])
+ median_av_aoi_exec.append([])
+ prcntl25_av_aoi_exec.append([])
+ prcntl75_av_aoi_exec.append([])
+ for GS_disruption in plot_GS_disruptions:
+ if average_across_targets:
+ total_failures_temp = 0
+ median_data_margin_prcnt_temp = 0
+ prcntl25_ave_d_margin_prcnt_temp = 0
+ prcntl75_ave_d_margin_prcnt_temp = 0
+ median_energy_margin_prcnt_temp = 0
+ prcntl25_ave_e_margin_prcnt_temp = 0
+ prcntl75_ave_e_margin_prcnt_temp = 0
+ exec_over_poss_temp = 0
+ median_obs_initial_lat_exec_temp = 0
+ prcntl25_obs_initial_lat_exec_temp = 0
+ prcntl75_obs_initial_lat_exec_temp = 0
+ median_av_aoi_exec_temp = 0
+ prcntl25_av_aoi_exec_temp = 0
+ prcntl75_av_aoi_exec_temp = 0
+
+ num_cases = 0
+ for t_setting_ind, t_subset in enumerate(targ_subsets):
+ num_cases += 1
+ setting_name = 'setGS_%d_setT_%d' % (g_select,t_setting_ind)
+ cur_str = 'WALKER_%d_SRP_Test_SRP_%s_GS_%s_%s' % (num_sats,SRP_setting, GS_disruption,setting_name)
+ cur_data = all_data[cur_str]
+
+ total_failures_temp += sum(cur_data['Percentage of Exec Act Failures by Act'].values())/3
+ median_data_margin_prcnt_temp += cur_data['d_rsrc_stats']['median_ave_d_margin_prcnt']
+ prcntl25_ave_d_margin_prcnt_temp += cur_data['d_rsrc_stats']['median_ave_d_margin_prcnt'] - cur_data['d_rsrc_stats']['prcntl25_ave_d_margin_prcnt']
+ prcntl75_ave_d_margin_prcnt_temp += cur_data['d_rsrc_stats']['prcntl75_ave_d_margin_prcnt'] - cur_data['d_rsrc_stats']['median_ave_d_margin_prcnt']
+
+ exec_over_poss_temp += cur_data['dv_stats']['exec_over_poss']*100
+ median_obs_initial_lat_exec_temp += cur_data['lat_stats']['median_obs_initial_lat_exec']
+ prcntl25_obs_initial_lat_exec_temp += cur_data['lat_stats']['median_obs_initial_lat_exec'] - cur_data['lat_stats']['prcntl25_obs_initial_lat_exec']
+ prcntl75_obs_initial_lat_exec_temp += cur_data['lat_stats']['prcntl75_obs_initial_lat_exec'] - cur_data['lat_stats']['median_obs_initial_lat_exec']
+
+ total_failures[ind].append(total_failures_temp/num_cases)
+
+ # Data Margin levels
+ median_data_margin_prcnt[ind].append(median_data_margin_prcnt_temp/num_cases)
+ prcntl25_ave_d_margin_prcnt[ind].append(prcntl25_ave_d_margin_prcnt_temp/num_cases)
+ prcntl75_ave_d_margin_prcnt[ind].append(prcntl75_ave_d_margin_prcnt_temp/num_cases)
+
+ # METRICS
+ # DV % throughput
+ exec_over_poss[ind].append(exec_over_poss_temp/num_cases)
+
+ # Obs Latency
+ median_obs_initial_lat_exec[ind].append(median_obs_initial_lat_exec_temp/num_cases)
+ prcntl25_obs_initial_lat_exec[ind].append(prcntl25_obs_initial_lat_exec_temp/num_cases)
+ prcntl75_obs_initial_lat_exec[ind].append(prcntl75_ave_d_margin_prcnt_temp/num_cases)
+
+ else:
+ cur_str = 'WALKER_%d_SRP_Test_SRP_%s_GS_%s_%s' % (num_sats,SRP_setting, GS_disruption,plot_disruption_str)
+ cur_data = all_data[cur_str]
+ # Activity Failures
+ total_failures[ind].append(sum(cur_data['Percentage of Exec Act Failures by Act'].values())/3)
+
+ # Data Margin levels
+ median_data_margin_prcnt[ind].append(cur_data['d_rsrc_stats']['median_ave_d_margin_prcnt'])
+ prcntl25_ave_d_margin_prcnt[ind].append(cur_data['d_rsrc_stats']['median_ave_d_margin_prcnt'] - cur_data['d_rsrc_stats']['prcntl25_ave_d_margin_prcnt'])
+ prcntl75_ave_d_margin_prcnt[ind].append(cur_data['d_rsrc_stats']['prcntl75_ave_d_margin_prcnt'] - cur_data['d_rsrc_stats']['median_ave_d_margin_prcnt'])
+
+ # Energy Margin levels
+ median_energy_margin_prcnt[ind].append(cur_data['e_rsrc_stats']['median_ave_e_margin_prcnt'])
+ prcntl25_ave_e_margin_prcnt[ind].append(cur_data['e_rsrc_stats']['median_ave_e_margin_prcnt']-cur_data['e_rsrc_stats']['prcntl25_ave_e_margin_prcnt'])
+ prcntl75_ave_e_margin_prcnt [ind].append(cur_data['e_rsrc_stats']['prcntl75_ave_e_margin_prcnt']-cur_data['e_rsrc_stats']['median_ave_e_margin_prcnt'])
+
+ # METRICS
+ # DV % throughput
+ exec_over_poss[ind].append(cur_data['dv_stats']['exec_over_poss']*100)
+
+ # Obs Latency
+ median_obs_initial_lat_exec[ind].append(cur_data['lat_stats']['median_obs_initial_lat_exec'])
+ prcntl25_obs_initial_lat_exec[ind].append(cur_data['lat_stats']['median_obs_initial_lat_exec'] - cur_data['lat_stats']['prcntl25_obs_initial_lat_exec'])
+ prcntl75_obs_initial_lat_exec[ind].append(cur_data['lat_stats']['prcntl75_obs_initial_lat_exec'] - cur_data['lat_stats']['median_obs_initial_lat_exec'])
+
+ # AoI
+ median_av_aoi_exec[ind].append(cur_data['obs_aoi_stats_w_routing']['median_av_aoi_exec'])
+ prcntl25_av_aoi_exec[ind].append(cur_data['obs_aoi_stats_w_routing']['median_av_aoi_exec'] - cur_data['obs_aoi_stats_w_routing']['prcntl25_av_aoi_exec'])
+ prcntl75_av_aoi_exec[ind].append(cur_data['obs_aoi_stats_w_routing']['prcntl75_av_aoi_exec'] - cur_data['obs_aoi_stats_w_routing']['median_av_aoi_exec'])
+
+ # MAKE PLOTS
+ N = 3 # maybe change to 4 if we add nominal case
+ width = 0.35 # the width of the bars
+ xLabelStr = 'Ground Station Failures'
+ xTickLabels = tuple(plot_GS_disruptions)
+ legendStrs = ('SRP On', 'SRP Off')
+
+ ############# one plot for total failures ####################
+ fig, ax = plt.subplots()
+ yLabelStr = 'Average Activity Failures (%)'
+ titleStr = 'Activity Failures % with SRP on/off'
+ double_bar_graph(ax,N,total_failures,yLabelStr,titleStr,xLabelStr,xTickLabels,legendStrs)
+
+ ###### one plot with two subplots (one for each state margin level) ######
+ fig, ax1 = plt.subplots(nrows=1, ncols=1)
+ yLabelStr = 'Data Margin (%)'
+ titleStr = 'Data Margin Levels with SRP on/off'
+ d_yerr = (np.asarray([prcntl25_ave_d_margin_prcnt[0],prcntl75_ave_d_margin_prcnt[0]]),np.asarray([prcntl25_ave_d_margin_prcnt[1],prcntl75_ave_d_margin_prcnt[1]]))
+ double_bar_graph(ax1,N,median_data_margin_prcnt,yLabelStr,titleStr,xLabelStr,xTickLabels,legendStrs,yerr=d_yerr)
+
+
+ ###### one plot with a three subplots (one for each metric) ###
+ # Data Throughput Percentage
+ fig, (ax1, ax2) = plt.subplots(nrows=2, ncols=1)
+ #titleStr = 'Metrics with SRP on/off'
+ yLabelStr = 'Data Throughput - Exec / Poss (%)'
+ titleStr = 'DV Throughput with SRP on/off'
+ xLabelStr = ''
+ double_bar_graph(ax1,N,exec_over_poss,yLabelStr,titleStr,xLabelStr,xTickLabels,legendStrs)
+
+
+ xLabelStr = 'Ground Station Failures'
+ # Median Latency
+ yLabelStr = 'Observation Latency (min)'
+ titleStr = 'Observation Initial Data Packet Latency with SRP on/off'
+ lat_yerr = (np.asarray([prcntl25_obs_initial_lat_exec[0],prcntl75_obs_initial_lat_exec[0]]),np.asarray([prcntl25_obs_initial_lat_exec[1],prcntl75_obs_initial_lat_exec[1]]))
+ double_bar_graph(ax2,N,median_obs_initial_lat_exec,yLabelStr,titleStr,xLabelStr,xTickLabels,legendStrs,yerr=lat_yerr,legendFlag = False)
+
+ """ # Median AoI
+ yLabelStr = 'Age of Information (hours)'
+ #titleStr = 'Observation Initial Data Packet Latency with SRP on/off'
+ aoi_yerr = (np.asarray([prcntl25_av_aoi_exec[0],prcntl75_av_aoi_exec[0]]),np.asarray([prcntl25_av_aoi_exec[1],prcntl75_av_aoi_exec[1]]))
+ double_bar_graph(ax3,N,median_av_aoi_exec,yLabelStr,titleStr,xLabelStr,xTickLabels,legendStrs,yerr=aoi_yerr) """
+ ### SHOW PLOTS ###
+ plt.show()
+
+
diff --git a/inputs/cases/ONLINE_OPS/constellation_config.json b/inputs/cases/ONLINE_OPS/constellation_config.json
index eac692a..2843a81 100644
--- a/inputs/cases/ONLINE_OPS/constellation_config.json
+++ b/inputs/cases/ONLINE_OPS/constellation_config.json
@@ -1,36 +1,38 @@
{
- "config_type":"constellation_config",
- "version-const_def":"0.0.1",
- "verbose_details":"https://github.mit.edu/star-lab/CIRCINUS/blob/SPRINT-restructuring-1/inputs/nom_sprint/constellation/README.md",
-
- "constellation_definition":{
- "version-const":"0.0.1",
-
- "default_sat_ref_model_name":"zhou_original_sat",
-
- "constellation_params":{
- "num_satellites": 6,
+ "config_type": "constellation_config",
+ "version-const_def": "0.0.1",
+ "verbose_details": "https://github.mit.edu/star-lab/CIRCINUS/blob/SPRINT-restructuring-1/inputs/nom_sprint/constellation/README.md",
+ "constellation_definition": {
+ "version-const": "0.0.1",
+ "constellation_params": {
+ "num_satellites": 6,
"sat_id_prefix": "S",
"sat_ids": "duplicate,range_inclusive,0,5",
"sat_id_order": "default",
-
- "_comments" : [
+ "_comments": [
"orbit_neighbor_direction_method specifies how to figure out which direction an orbit neighbor is in, for determining activity transition times. by_increasing_sat_index means that if the receive sat index is larger than tx sat index (i.e. further in sat_id_order list) then the crosslink is in an INCREASING direction",
""
],
"intra-orbit_neighbor_direction_method": "by_increasing_sat_index",
-
- "orbit_params" : {
+ "orbit_params": {
"sat_ids_by_orbit_name": {
- "orbit0": ["S0","S1","S2"],
- "orbit1": ["S3","S4","S5"]
+ "orbit0": [
+ "S0",
+ "S1",
+ "S2"
+ ],
+ "orbit1": [
+ "S3",
+ "S4",
+ "S5"
+ ]
},
"sat_orbital_elems": [
{
"sat_id": "S0",
- "def_type":"indv",
+ "def_type": "indv",
"kepler_meananom": {
- "a_km": 7378,
+ "a_km": 7378,
"e": 0,
"i_deg": 97.86,
"RAAN_deg": 0,
@@ -41,9 +43,9 @@
},
{
"sat_id": "S1",
- "def_type":"indv",
+ "def_type": "indv",
"kepler_meananom": {
- "a_km": 7378,
+ "a_km": 7378,
"e": 0,
"i_deg": 97.86,
"RAAN_deg": 0,
@@ -54,9 +56,9 @@
},
{
"sat_id": "S2",
- "def_type":"indv",
+ "def_type": "indv",
"kepler_meananom": {
- "a_km": 7378,
+ "a_km": 7378,
"e": 0,
"i_deg": 97.86,
"RAAN_deg": 0,
@@ -67,9 +69,9 @@
},
{
"sat_id": "S3",
- "def_type":"indv",
+ "def_type": "indv",
"kepler_meananom": {
- "a_km": 7378,
+ "a_km": 7378,
"e": 0,
"i_deg": 83.86,
"RAAN_deg": 0,
@@ -80,9 +82,9 @@
},
{
"sat_id": "S4",
- "def_type":"indv",
+ "def_type": "indv",
"kepler_meananom": {
- "a_km": 7378,
+ "a_km": 7378,
"e": 0,
"i_deg": 83.86,
"RAAN_deg": 0,
@@ -93,9 +95,9 @@
},
{
"sat_id": "S5",
- "def_type":"indv",
+ "def_type": "indv",
"kepler_meananom": {
- "a_km": 7378,
+ "a_km": 7378,
"e": 0,
"i_deg": 83.86,
"RAAN_deg": 0,
@@ -106,9 +108,7 @@
}
]
}
- }
-
+ },
+ "sat_ref_model_name": "zhou_original_sat"
}
-
-
}
\ No newline at end of file
diff --git a/inputs/cases/ONLINE_OPS_1_SAT/README.md b/inputs/cases/ONLINE_OPS_1_SAT/README.md
new file mode 100644
index 0000000..6792e24
--- /dev/null
+++ b/inputs/cases/ONLINE_OPS_1_SAT/README.md
@@ -0,0 +1,8 @@
+# Instructions for ONLINE_OPS
+
+Use of ONLINE OPS still requires the basic case-configs as a normal case. They can be empty of some specifics (satellites, groundstations, start/stop time), which can be filled in by users via provided TCP interface, but the files and structures should still be created.
+
+- constellation_config.json
+- ground_station_network_config.json
+- operational_profile_config.json
+- sim_case_config.json
\ No newline at end of file
diff --git a/inputs/cases/ONLINE_OPS_1_SAT/constellation_config.json b/inputs/cases/ONLINE_OPS_1_SAT/constellation_config.json
new file mode 100644
index 0000000..c7354d9
--- /dev/null
+++ b/inputs/cases/ONLINE_OPS_1_SAT/constellation_config.json
@@ -0,0 +1,48 @@
+{
+ "config_type":"constellation_config",
+ "version-const_def":"0.0.1",
+ "verbose_details":"https://github.mit.edu/star-lab/CIRCINUS/blob/SPRINT-restructuring-1/inputs/nom_sprint/constellation/README.md",
+
+ "constellation_definition":{
+ "version-const":"0.0.1",
+
+ "sat_ref_model_name":"zhou_original_sat",
+
+ "constellation_params":{
+ "num_satellites": 1,
+ "sat_id_prefix": "S",
+ "sat_ids": "duplicate,range_inclusive,0,0",
+ "sat_id_order": "default",
+
+ "_comments" : [
+ "orbit_neighbor_direction_method specifies how to figure out which direction an orbit neighbor is in, for determining activity transition times. by_increasing_sat_index means that if the receive sat index is larger than tx sat index (i.e. further in sat_id_order list) then the crosslink is in an INCREASING direction",
+ ""
+ ],
+ "intra-orbit_neighbor_direction_method": "by_increasing_sat_index",
+
+ "orbit_params" : {
+ "sat_ids_by_orbit_name": {
+ "orbit0": ["S0"]
+ },
+ "sat_orbital_elems": [
+ {
+ "sat_id": "S0",
+ "def_type":"indv",
+ "kepler_meananom": {
+ "a_km": 7378,
+ "e": 0,
+ "i_deg": 97.86,
+ "RAAN_deg": 0,
+ "arg_per_deg": 0,
+ "M_deg": 90
+ },
+ "propagation_method": "matlab_delkep"
+ }
+ ]
+ }
+ }
+
+ }
+
+
+}
diff --git a/inputs/cases/ONLINE_OPS_1_SAT/ground_station_network_config.json b/inputs/cases/ONLINE_OPS_1_SAT/ground_station_network_config.json
new file mode 100644
index 0000000..15d4bd1
--- /dev/null
+++ b/inputs/cases/ONLINE_OPS_1_SAT/ground_station_network_config.json
@@ -0,0 +1,40 @@
+{
+ "config_title":"ground_station_network_config",
+ "version-gsndef":"0.0.1",
+ "verbose_details":"https://github.mit.edu/star-lab/CIRCINUS/blob/SPRINT-restructuring-1/inputs/nom_sprint/groundstation_network/README.md",
+
+ "network_definition":{
+ "version-gsn":"0.0.1",
+
+ "default_gs_ref_model_name":"zhou_original_gs",
+
+ "gs_net_params":{
+ "num_stations": 1,
+ "gs_network_name": "Zhou 2017 GS",
+ "gs_network_version": "1",
+ "elevation_cutoff_deg":10,
+ "stations": [
+ {"id": "G0", "name": "Beijing","name_pretty": "Beijing 0","latitude_deg":40.0,"longitude_deg":116.0,"height_m":0,"comm_type":"Zhou_2017_dlnk"}
+ ]
+ },
+ "sim_gs_network_params": {
+ "_comments":"TODO: Reevaluate the location of these params. (Should they go in the sim file? Or GP? I think they are describing the behavior of the network, so this should be ok)",
+ "time_epsilon_s": 1,
+ "gsn_ps_params" : {
+ "_comments": [
+ "replan_release_wait_time_s simulates the amount of time required to actually run the global planner in real life. from the perspective of this code the global planner will run instantaneously. After it runs, the sim will wait this amount of time before making those plans available",
+ "release_first_plans_immediately = True means that global planner will immediately make its first set of global plans available (they won't be queued up for release at a later time)",
+ "6300 seconds is 105 minutes, about the length of an orbit at 1000 km altitude",
+ "replan release time is conservative here"
+ ],
+ "replan_interval_s": 6300,
+ "replan_release_wait_time_s": 60,
+ "release_first_plans_immediately": true
+ }
+ }
+
+
+ }
+
+
+}
diff --git a/inputs/cases/ONLINE_OPS_1_SAT/operational_profile_config.json b/inputs/cases/ONLINE_OPS_1_SAT/operational_profile_config.json
new file mode 100644
index 0000000..6d07011
--- /dev/null
+++ b/inputs/cases/ONLINE_OPS_1_SAT/operational_profile_config.json
@@ -0,0 +1,32 @@
+{
+ "version":"0.0.1",
+ "which_config":"operational_profile_config",
+ "where_should_i_be":"inputs/cases/",
+ "what_goes_here":"things that only matter to the choices to be made about the current case.",
+
+ "ops_profile_params": {
+ "obs_params": {
+ "num_targets": 5,
+ "target_set_name": "Zhou 2017 Missions",
+ "target_set_version": "1",
+ "target_set_id": "2018-4-14",
+ "elevation_cutoff_deg":60,
+ "targets": [
+ {"id": "targ0","name": "Himalaya","name_pretty": "obs0", "latitude_deg": 28.0, "longitude_deg": 87.0,"height_m": 0},
+ {"id": "targ1","name": "Mamiraus","name_pretty": "obs1", "latitude_deg": -2.0, "longitude_deg": -66.0,"height_m": 0},
+ {"id": "targ2","name": "Cape York","name_pretty": "obs2", "latitude_deg": -11.0, "longitude_deg": 142.5,"height_m": 0},
+ {"id": "targ3","name": "Alaska Coast","name_pretty": "obs3", "latitude_deg": 60.0, "longitude_deg": -148,"height_m": 0},
+ {"id": "targ4","name": "Greenland","name_pretty": "obs4", "latitude_deg": 69.0, "longitude_deg": -49,"height_m": 0}
+ ]
+ },
+ "link_disables": {
+ "_comment": "The satellite and GS ids below will always be interpreted as strings. if the ids are specified as integers in other files, they will be considered the same if their string representation is the same",
+ "dlnk_direc_disabled_gs_ID_by_sat_IDstr": {
+ },
+ "xlnk_direc_disabled_xsat_ID_by_sat_IDstr": {
+ }
+ }
+
+ }
+
+}
\ No newline at end of file
diff --git a/inputs/cases/ONLINE_OPS_1_SAT/sim_case_config.json b/inputs/cases/ONLINE_OPS_1_SAT/sim_case_config.json
new file mode 100644
index 0000000..590351b
--- /dev/null
+++ b/inputs/cases/ONLINE_OPS_1_SAT/sim_case_config.json
@@ -0,0 +1,108 @@
+{
+ "version": "0.0.1",
+ "which_config": "sim_case_config",
+ "where_should_i_be": "inputs/cases/",
+ "what_goes_here": "things that only matter to the simulation, or definition of time, that is relatively likely to vary per case. Things which vary for tweaks and will stay the same otherwise should go in /inputs/admin_config/sim_general_config.json instead",
+ "scenario_params": {
+ "start_utc": "2016-02-14T04:00:00.000000Z",
+ "end_utc": "2016-02-15T04:00:00.000000Z",
+ "use_crosslinks": true,
+ "all_sats_same_time_system": true,
+ "restore_pkl_name": "pickles/",
+ "sat_schedule_hotstart": true,
+ "lookup_params": {
+ "xlnk_range_rates": {
+ "_comment": "The names here should be different from built-in comm type names",
+ "Zhou_2017_xlnk": {
+ "range_units": "km",
+ "rates_units": "Mbps",
+ "interpolation_method": "floor",
+ "range_rates_table": [
+ [
+ 0,
+ 10
+ ]
+ ]
+ }
+ },
+ "dlnk_range_rates": {
+ "_comment1": "The names here should be different from built-in comm type names",
+ "_comment2": "they say this rate is a random distribution over {0,20,40} Mbps for each downlink timeslot. I'll just use the average...",
+ "Zhou_2017_dlnk": {
+ "range_units": "km",
+ "rates_units": "Mbps",
+ "interpolation_method": "floor",
+ "range_rates_table": [
+ [
+ 0,
+ 20
+ ]
+ ]
+ }
+ }
+ },
+ "sim_run_perturbations": {
+ "do_inject_obs": true,
+ "schedule_disruptions": {
+ "G0": [
+ [
+ "2016-02-14T04:00:00.000000Z",
+ "2016-02-14T16:00:00.000000Z"
+ ]
+ ]
+ },
+ "injected_observations": [
+ {
+ "indx": 8,
+ "end_utc": "2016-02-14T15:12:02.827026Z",
+ "sat_id": "S0",
+ "type": "hardcoded",
+ "start_utc": "2016-02-14T15:11:02.827026Z"
+ },
+ {
+ "indx": 20,
+ "end_utc": "2016-02-14T18:46:56.198028Z",
+ "sat_id": "S0",
+ "type": "hardcoded",
+ "start_utc": "2016-02-14T18:45:56.198028Z"
+ },
+ {
+ "indx": 22,
+ "end_utc": "2016-02-14T15:13:31.779520Z",
+ "sat_id": "S0",
+ "type": "hardcoded",
+ "start_utc": "2016-02-14T15:12:31.779520Z"
+ },
+ {
+ "indx": 24,
+ "end_utc": "2016-02-14T14:12:12.058434Z",
+ "sat_id": "S0",
+ "type": "hardcoded",
+ "start_utc": "2016-02-14T14:11:12.058434Z"
+ },
+ {
+ "indx": 28,
+ "end_utc": "2016-02-14T05:02:51.509312Z",
+ "sat_id": "S0",
+ "type": "hardcoded",
+ "start_utc": "2016-02-14T05:01:51.509312Z"
+ },
+ {
+ "indx": 31,
+ "end_utc": "2016-02-14T19:26:09.145608Z",
+ "sat_id": "S0",
+ "type": "hardcoded",
+ "start_utc": "2016-02-14T19:25:09.145608Z"
+ },
+ {
+ "indx": 34,
+ "end_utc": "2016-02-14T14:00:13.611338Z",
+ "sat_id": "S0",
+ "type": "hardcoded",
+ "start_utc": "2016-02-14T13:59:13.611338Z"
+ }
+
+ ]
+ }
+ }
+}
diff --git a/inputs/cases/SPRINT_nominal/constellation_config.json b/inputs/cases/SPRINT_nominal/constellation_config.json
index e9f05f9..380232b 100644
--- a/inputs/cases/SPRINT_nominal/constellation_config.json
+++ b/inputs/cases/SPRINT_nominal/constellation_config.json
@@ -1,154 +1,138 @@
{
- "config_type":"constellation_config",
- "version-const_def":"0.0.1",
- "verbose_details":"https://github.mit.edu/star-lab/SPRINT/blob/master/inputs/cases/case_config_READMEs/constellation_config.md",
-
- "constellation_definition":{
- "version-const":"0.0.1",
-
- "default_sat_ref_model_name":"zhou_original_sat",
-
- "constellation_params":{
- "num_satellites": 150,
+ "config_type": "constellation_config",
+ "version-const_def": "0.0.1",
+ "verbose_details": "https://github.mit.edu/star-lab/SPRINT/blob/master/inputs/cases/case_config_READMEs/constellation_config.md",
+ "constellation_definition": {
+ "version-const": "0.0.1",
+ "constellation_params": {
+ "num_satellites": 150,
"sat_id_prefix": "S",
"sat_ids": "duplicate,range_inclusive,0,149",
"sat_id_order": "default",
-
- "_comments" : [
+ "_comments": [
"orbit_neighbor_direction_method specifies how to figure out which direction an orbit neighbor is in, for determining activity transition times. by_increasing_sat_index means that if the receive sat index is larger than tx sat index (i.e. further in sat_id_order list) then the crosslink is in an INCREASING direction",
""
],
"intra-orbit_neighbor_direction_method": "by_increasing_sat_index",
-
- "orbit_params" : {
+ "orbit_params": {
"sat_ids_by_orbit_name": {
- "_comment":"defined by planes"
+ "_comment": "defined by planes"
},
"_comment": "If using this style of remepetative elements",
"sat_orbital_elems": [
-
{
- "def_type":"plane",
- "orbit_indx" : 0, "_comment" : "A 50-sat ISS orbit",
+ "def_type": "plane",
+ "orbit_indx": 0,
+ "_comment": "A 50-sat ISS orbit",
"plane_def": {
- "a_km" : 6778,
- "e" : 0.0005851,
- "i_deg" : 51.6398,
- "RAAN_deg" : 217.5041,
- "arg_per_deg" : 45.9800
+ "a_km": 6778,
+ "e": 0.0005851,
+ "i_deg": 51.6398,
+ "RAAN_deg": 217.5041,
+ "arg_per_deg": 45.98
},
-
- "first_M_deg" : 0, "_comment1" : "anomaly of first sat, which subsequent will follow",
- "spacing_type" : "even", "_comment2" : "spacing: 'even', or 'progressive', or 'set'; indicate whether the sats in the plane are evenly spaced, or should space progressively by the subsequently provided 'spacing_val', or should fix each to a anomoly in a set (array) to allow arbitrary values ",
-
- "first_sat_id" : 0, "_comment3" : "the comnbo of first_sat_id, and sats_in_plane must not result in conflicting indices, and should be 'in order' without gaps, and in total match 'num_satellites' and 'sat_ids' field above. Sorry for the restrictions for now, will make a validation function.",
- "sats_in_plane" : 50,
-
+ "first_M_deg": 0,
+ "_comment1": "anomaly of first sat, which subsequent will follow",
+ "spacing_type": "even",
+ "_comment2": "spacing: 'even', or 'progressive', or 'set'; indicate whether the sats in the plane are evenly spaced, or should space progressively by the subsequently provided 'spacing_val', or should fix each to a anomoly in a set (array) to allow arbitrary values ",
+ "first_sat_id": 0,
+ "_comment3": "the comnbo of first_sat_id, and sats_in_plane must not result in conflicting indices, and should be 'in order' without gaps, and in total match 'num_satellites' and 'sat_ids' field above. Sorry for the restrictions for now, will make a validation function.",
+ "sats_in_plane": 50,
"propagation_method": "matlab_delkep"
},
-
-
{
- "def_type":"plane",
- "orbit_indx" : 1, "_comment" : "A 20-sat SSO orbit",
+ "def_type": "plane",
+ "orbit_indx": 1,
+ "_comment": "A 20-sat SSO orbit",
"plane_def": {
- "a_km" : 6945,
- "e" : 0,
- "i_deg" : 97.7,
- "RAAN_deg" : 0, "_comment" : "Setting spacing of SSO's evenly by RAAN; alternating up & down (indicated by an added 180)",
- "arg_per_deg" : 0
+ "a_km": 6945,
+ "e": 0,
+ "i_deg": 97.7,
+ "RAAN_deg": 0,
+ "_comment": "Setting spacing of SSO's evenly by RAAN; alternating up & down (indicated by an added 180)",
+ "arg_per_deg": 0
},
-
- "first_M_deg" : 0,
- "spacing_type" : "even",
-
- "first_sat_id" : 50,
- "sats_in_plane" : 20,
-
+ "first_M_deg": 0,
+ "spacing_type": "even",
+ "first_sat_id": 50,
+ "sats_in_plane": 20,
"propagation_method": "matlab_delkep"
},
{
- "def_type":"plane",
- "orbit_indx" : 2, "_comment" : "A 20-sat SSO orbit",
+ "def_type": "plane",
+ "orbit_indx": 2,
+ "_comment": "A 20-sat SSO orbit",
"plane_def": {
- "a_km" : 6945,
- "e" : 0,
- "i_deg" : 97.7,
- "RAAN_deg" : 216, "_comment" : "36+180 (reversed to alternate up/down)",
- "arg_per_deg" : 0
+ "a_km": 6945,
+ "e": 0,
+ "i_deg": 97.7,
+ "RAAN_deg": 216,
+ "_comment": "36+180 (reversed to alternate up/down)",
+ "arg_per_deg": 0
},
-
- "first_M_deg" : 0,
- "spacing_type" : "even",
-
- "first_sat_id" : 70,
- "sats_in_plane" : 20,
-
+ "first_M_deg": 0,
+ "spacing_type": "even",
+ "first_sat_id": 70,
+ "sats_in_plane": 20,
"propagation_method": "matlab_delkep"
},
{
- "def_type":"plane",
- "orbit_indx" : 3, "_comment" : "A 20-sat SSO orbit",
+ "def_type": "plane",
+ "orbit_indx": 3,
+ "_comment": "A 20-sat SSO orbit",
"plane_def": {
- "a_km" : 6945,
- "e" : 0,
- "i_deg" : 97.7,
- "RAAN_deg" : 72, "_comment" : "36*2 (third pos)",
- "arg_per_deg" : 0
+ "a_km": 6945,
+ "e": 0,
+ "i_deg": 97.7,
+ "RAAN_deg": 72,
+ "_comment": "36*2 (third pos)",
+ "arg_per_deg": 0
},
-
- "first_M_deg" : 0,
- "spacing_type" : "even",
-
- "first_sat_id" : 90,
- "sats_in_plane" : 20,
-
+ "first_M_deg": 0,
+ "spacing_type": "even",
+ "first_sat_id": 90,
+ "sats_in_plane": 20,
"propagation_method": "matlab_delkep"
},
{
- "def_type":"plane",
- "orbit_indx" : 4, "_comment" : "A 20-sat SSO orbit",
+ "def_type": "plane",
+ "orbit_indx": 4,
+ "_comment": "A 20-sat SSO orbit",
"plane_def": {
- "a_km" : 6945,
- "e" : 0,
- "i_deg" : 97.7,
- "RAAN_deg" : 288, "_comment" : "36*3+180",
- "arg_per_deg" : 0
+ "a_km": 6945,
+ "e": 0,
+ "i_deg": 97.7,
+ "RAAN_deg": 288,
+ "_comment": "36*3+180",
+ "arg_per_deg": 0
},
-
- "first_M_deg" : 0,
- "spacing_type" : "even",
-
- "first_sat_id" : 110,
- "sats_in_plane" : 20,
-
+ "first_M_deg": 0,
+ "spacing_type": "even",
+ "first_sat_id": 110,
+ "sats_in_plane": 20,
"propagation_method": "matlab_delkep"
},
{
- "def_type":"plane",
- "orbit_indx" : 5, "_comment" : "A 20-sat SSO orbit",
+ "def_type": "plane",
+ "orbit_indx": 5,
+ "_comment": "A 20-sat SSO orbit",
"plane_def": {
- "a_km" : 6945,
- "e" : 0,
- "i_deg" : 97.7,
- "RAAN_deg" : 144, "_comment" : "36*4",
- "arg_per_deg" : 0
+ "a_km": 6945,
+ "e": 0,
+ "i_deg": 97.7,
+ "RAAN_deg": 144,
+ "_comment": "36*4",
+ "arg_per_deg": 0
},
-
- "first_M_deg" : 0,
- "spacing_type" : "even",
-
- "first_sat_id" : 130,
- "sats_in_plane" : 20,
-
+ "first_M_deg": 0,
+ "spacing_type": "even",
+ "first_sat_id": 130,
+ "sats_in_plane": 20,
"propagation_method": "matlab_delkep"
}
-
]
}
- }
-
+ },
+ "sat_ref_model_name": "zhou_original_sat"
}
-
-
}
\ No newline at end of file
diff --git a/inputs/cases/circinus_zhou_2_sats/README.md b/inputs/cases/circinus_zhou_2_sats/README.md
new file mode 100644
index 0000000..675742a
--- /dev/null
+++ b/inputs/cases/circinus_zhou_2_sats/README.md
@@ -0,0 +1,23 @@
+# Original CIRCINUS use-case
+
+## Description of Original Zhou use-case:
+Cases are fundamentally defined by a constellation and set ground stations, and further specified by an operational profile which defines intended up-time, as well as targets; and by a simulation scenario which defines actual up-time and performance. The SPRINT zhou use case is one of several use cases demonstrated with the first version of CIRCINUS.
+
+## Required Case-specific Configuration Files, in this directory:
+* `constellation_config.json`
+* `ground_station_network_config.json`
+* `operational_profile_config.json`
+* `sim_case_config.json`
+
+## Further required in the model definition directory `inputs/reference_model_definitions`:
+* `/gs_refs/zhou_original_gs.json`
+* `/payload_refs/zhou_original_payload.json`
+* `/sat_refs/zhou_original_sat.json`
+
+### Hold on where do I find all these files?
+[New file structure](https://drive.google.com/open?id=1CQP91ySWmHOgwE4s4TaXmAkO5aoH4krC)
+
+
+[Original file dataflow](https://drive.google.com/open?id=18GtxUUbO9V40Ifl_81Cpz0z-GL1XkV4R)
+
+
diff --git a/inputs/cases/circinus_zhou_2_sats/constellation_config.json b/inputs/cases/circinus_zhou_2_sats/constellation_config.json
new file mode 100644
index 0000000..8a5046f
--- /dev/null
+++ b/inputs/cases/circinus_zhou_2_sats/constellation_config.json
@@ -0,0 +1,58 @@
+{
+ "config_type": "constellation_config",
+ "version-const_def": "0.0.1",
+ "verbose_details": "https://github.mit.edu/star-lab/CIRCINUS/blob/SPRINT-restructuring-1/inputs/nom_sprint/constellation/README.md",
+ "constellation_definition": {
+ "version-const": "0.0.1",
+ "constellation_params": {
+ "num_satellites": 2,
+ "sat_id_prefix": "S",
+ "sat_ids": "duplicate,range_inclusive,0,1",
+ "sat_id_order": "default",
+ "_comments": [
+ "orbit_neighbor_direction_method specifies how to figure out which direction an orbit neighbor is in, for determining activity transition times. by_increasing_sat_index means that if the receive sat index is larger than tx sat index (i.e. further in sat_id_order list) then the crosslink is in an INCREASING direction",
+ ""
+ ],
+ "intra-orbit_neighbor_direction_method": "by_increasing_sat_index",
+ "orbit_params": {
+ "sat_ids_by_orbit_name": {
+ "orbit0": [
+ "S0"
+ ],
+ "orbit1": [
+ "S1"
+ ]
+ },
+ "sat_orbital_elems": [
+ {
+ "sat_id": "S0",
+ "def_type": "indv",
+ "kepler_meananom": {
+ "a_km": 7378,
+ "e": 0,
+ "i_deg": 97.86,
+ "RAAN_deg": 0,
+ "arg_per_deg": 0,
+ "M_deg": 90
+ },
+ "propagation_method": "matlab_delkep"
+ },
+ {
+ "sat_id": "S1",
+ "def_type": "indv",
+ "kepler_meananom": {
+ "a_km": 7378,
+ "e": 0,
+ "i_deg": 83.86,
+ "RAAN_deg": 0,
+ "arg_per_deg": 0,
+ "M_deg": 120
+ },
+ "propagation_method": "matlab_delkep"
+ }
+ ]
+ }
+ },
+ "sat_ref_model_name": "zhou_original_sat"
+ }
+}
\ No newline at end of file
diff --git a/inputs/cases/circinus_zhou_2_sats/ground_station_network_config.json b/inputs/cases/circinus_zhou_2_sats/ground_station_network_config.json
new file mode 100644
index 0000000..daeb8f0
--- /dev/null
+++ b/inputs/cases/circinus_zhou_2_sats/ground_station_network_config.json
@@ -0,0 +1,43 @@
+{
+ "config_title":"ground_station_network_config",
+ "version-gsndef":"0.0.1",
+ "verbose_details":"https://github.mit.edu/star-lab/CIRCINUS/blob/SPRINT-restructuring-1/inputs/nom_sprint/groundstation_network/README.md",
+
+ "network_definition":{
+ "version-gsn":"0.0.1",
+
+ "default_gs_ref_model_name":"zhou_original_gs",
+
+ "gs_net_params":{
+ "num_stations": 4,
+ "gs_network_name": "Zhou 2017 GS",
+ "gs_network_version": "1",
+ "elevation_cutoff_deg":10,
+ "stations": [
+ {"id": "G0", "name": "Beijing","name_pretty": "Beijing 0","latitude_deg":40.0,"longitude_deg":116.0,"height_m":0,"comm_type":"Zhou_2017_dlnk"},
+ {"id": "G1", "name": "Kashi","name_pretty": "Kashi 1", "latitude_deg":39.5,"longitude_deg":76.0,"height_m":0,"comm_type":"Zhou_2017_dlnk"},
+ {"id": "G2", "name": "Sanya","name_pretty": "Sanya 2", "latitude_deg":18.0,"longitude_deg":109.5,"height_m":0,"comm_type":"Zhou_2017_dlnk"},
+ {"id": "G3", "name": "Xi'an","name_pretty": "Xi'an 3", "latitude_deg":34.0,"longitude_deg":108.0,"height_m":0,"comm_type":"Zhou_2017_dlnk"}
+ ]
+ },
+ "sim_gs_network_params": {
+ "_comments":"TODO: Reevaluate the location of these params. (Should they go in the sim file? Or GP? I think they are describing the behavior of the network, so this should be ok)",
+ "time_epsilon_s": 1,
+ "gsn_ps_params" : {
+ "_comments": [
+ "replan_release_wait_time_s simulates the amount of time required to actually run the global planner in real life. from the perspective of this code the global planner will run instantaneously. After it runs, the sim will wait this amount of time before making those plans available",
+ "release_first_plans_immediately = True means that global planner will immediately make its first set of global plans available (they won't be queued up for release at a later time)",
+ "6300 seconds is 105 minutes, about the length of an orbit at 1000 km altitude",
+ "replan release time is conservative here"
+ ],
+ "replan_interval_s": 6300,
+ "replan_release_wait_time_s": 60,
+ "release_first_plans_immediately": true
+ }
+ }
+
+
+ }
+
+
+}
\ No newline at end of file
diff --git a/inputs/cases/circinus_zhou_2_sats/operational_profile_config.json b/inputs/cases/circinus_zhou_2_sats/operational_profile_config.json
new file mode 100644
index 0000000..6d07011
--- /dev/null
+++ b/inputs/cases/circinus_zhou_2_sats/operational_profile_config.json
@@ -0,0 +1,32 @@
+{
+ "version":"0.0.1",
+ "which_config":"operational_profile_config",
+ "where_should_i_be":"inputs/cases/",
+ "what_goes_here":"things that only matter to the choices to be made about the current case.",
+
+ "ops_profile_params": {
+ "obs_params": {
+ "num_targets": 5,
+ "target_set_name": "Zhou 2017 Missions",
+ "target_set_version": "1",
+ "target_set_id": "2018-4-14",
+ "elevation_cutoff_deg":60,
+ "targets": [
+ {"id": "targ0","name": "Himalaya","name_pretty": "obs0", "latitude_deg": 28.0, "longitude_deg": 87.0,"height_m": 0},
+ {"id": "targ1","name": "Mamiraus","name_pretty": "obs1", "latitude_deg": -2.0, "longitude_deg": -66.0,"height_m": 0},
+ {"id": "targ2","name": "Cape York","name_pretty": "obs2", "latitude_deg": -11.0, "longitude_deg": 142.5,"height_m": 0},
+ {"id": "targ3","name": "Alaska Coast","name_pretty": "obs3", "latitude_deg": 60.0, "longitude_deg": -148,"height_m": 0},
+ {"id": "targ4","name": "Greenland","name_pretty": "obs4", "latitude_deg": 69.0, "longitude_deg": -49,"height_m": 0}
+ ]
+ },
+ "link_disables": {
+ "_comment": "The satellite and GS ids below will always be interpreted as strings. if the ids are specified as integers in other files, they will be considered the same if their string representation is the same",
+ "dlnk_direc_disabled_gs_ID_by_sat_IDstr": {
+ },
+ "xlnk_direc_disabled_xsat_ID_by_sat_IDstr": {
+ }
+ }
+
+ }
+
+}
\ No newline at end of file
diff --git a/inputs/cases/circinus_zhou_2_sats/sim_case_config.json b/inputs/cases/circinus_zhou_2_sats/sim_case_config.json
new file mode 100644
index 0000000..086b40a
--- /dev/null
+++ b/inputs/cases/circinus_zhou_2_sats/sim_case_config.json
@@ -0,0 +1,254 @@
+{
+ "version": "0.0.1",
+ "which_config": "sim_case_config",
+ "where_should_i_be": "inputs/cases/",
+ "what_goes_here": "things that only matter to the simulation, or definition of time, that is relatively likely to vary per case. Things which vary for tweaks and will stay the same otherwise should go in /inputs/admin_config/sim_general_config.json instead",
+ "scenario_params": {
+ "start_utc": "2016-02-14T04:00:00.000000Z",
+ "end_utc": "2016-02-15T04:00:00.000000Z",
+ "use_crosslinks": true,
+ "all_sats_same_time_system": true,
+ "restore_pkl_name": "pickles/",
+ "sat_schedule_hotstart": true,
+ "lookup_params": {
+ "xlnk_range_rates": {
+ "_comment": "The names here should be different from built-in comm type names",
+ "Zhou_2017_xlnk": {
+ "range_units": "km",
+ "rates_units": "Mbps",
+ "interpolation_method": "floor",
+ "range_rates_table": [
+ [
+ 0,
+ 10
+ ]
+ ]
+ }
+ },
+ "dlnk_range_rates": {
+ "_comment1": "The names here should be different from built-in comm type names",
+ "_comment2": "they say this rate is a random distribution over {0,20,40} Mbps for each downlink timeslot. I'll just use the average...",
+ "Zhou_2017_dlnk": {
+ "range_units": "km",
+ "rates_units": "Mbps",
+ "interpolation_method": "floor",
+ "range_rates_table": [
+ [
+ 0,
+ 20
+ ]
+ ]
+ }
+ }
+ },
+ "sim_run_perturbations": {
+ "do_inject_obs": true,
+ "schedule_disruptions": {
+ "G1": [
+ [
+ "2016-02-14T04:00:00.000000Z",
+ "2016-02-14T16:00:00.000000Z"
+ ]
+ ]
+ },
+ "injected_observations": [
+ {
+ "indx": 0,
+ "end_utc": "2016-02-14T09:55:37.164153Z",
+ "sat_id": "S1",
+ "type": "hardcoded",
+ "start_utc": "2016-02-14T09:54:37.164153Z"
+ },
+ {
+ "indx": 1,
+ "end_utc": "2016-02-14T12:00:35.483029Z",
+ "sat_id": "S1",
+ "type": "hardcoded",
+ "start_utc": "2016-02-14T11:59:35.483029Z"
+ },
+ {
+ "indx": 3,
+ "end_utc": "2016-02-14T21:23:23.298319Z",
+ "sat_id": "S1",
+ "type": "hardcoded",
+ "start_utc": "2016-02-14T21:22:23.298319Z"
+ },
+ {
+ "indx": 5,
+ "end_utc": "2016-02-14T18:10:00.708361Z",
+ "sat_id": "S0",
+ "type": "hardcoded",
+ "start_utc": "2016-02-14T18:09:00.708361Z"
+ },
+ {
+ "indx": 6,
+ "end_utc": "2016-02-14T04:16:54.450560Z",
+ "sat_id": "S1",
+ "type": "hardcoded",
+ "start_utc": "2016-02-14T04:15:54.450560Z"
+ },
+ {
+ "indx": 8,
+ "end_utc": "2016-02-14T15:12:02.827026Z",
+ "sat_id": "S0",
+ "type": "hardcoded",
+ "start_utc": "2016-02-14T15:11:02.827026Z"
+ },
+ {
+ "indx": 9,
+ "end_utc": "2016-02-14T07:06:41.520948Z",
+ "sat_id": "S1",
+ "type": "hardcoded",
+ "start_utc": "2016-02-14T07:05:41.520948Z"
+ },
+ {
+ "indx": 10,
+ "end_utc": "2016-02-14T16:37:02.412884Z",
+ "sat_id": "S1",
+ "type": "hardcoded",
+ "start_utc": "2016-02-14T16:36:02.412884Z"
+ },
+ {
+ "indx": 11,
+ "end_utc": "2016-02-14T10:06:12.829820Z",
+ "sat_id": "S0",
+ "type": "hardcoded",
+ "start_utc": "2016-02-14T10:05:12.829820Z"
+ },
+ {
+ "indx": 13,
+ "end_utc": "2016-02-14T09:55:20.718866Z",
+ "sat_id": "S0",
+ "type": "hardcoded",
+ "start_utc": "2016-02-14T09:54:20.718866Z"
+ },
+ {
+ "indx": 15,
+ "end_utc": "2016-02-14T20:39:46.383513Z",
+ "sat_id": "S0",
+ "type": "hardcoded",
+ "start_utc": "2016-02-14T20:38:46.383513Z"
+ },
+ {
+ "indx": 17,
+ "end_utc": "2016-02-14T10:55:33.461534Z",
+ "sat_id": "S1",
+ "type": "hardcoded",
+ "start_utc": "2016-02-14T10:54:33.461534Z"
+ },
+ {
+ "indx": 19,
+ "end_utc": "2016-02-14T21:55:09.278231Z",
+ "sat_id": "S0",
+ "type": "hardcoded",
+ "start_utc": "2016-02-14T21:54:09.278231Z"
+ },
+ {
+ "indx": 20,
+ "end_utc": "2016-02-14T18:46:56.198028Z",
+ "sat_id": "S0",
+ "type": "hardcoded",
+ "start_utc": "2016-02-14T18:45:56.198028Z"
+ },
+ {
+ "indx": 21,
+ "end_utc": "2016-02-14T19:43:40.340492Z",
+ "sat_id": "S1",
+ "type": "hardcoded",
+ "start_utc": "2016-02-14T19:42:40.340492Z"
+ },
+ {
+ "indx": 22,
+ "end_utc": "2016-02-14T15:13:31.779520Z",
+ "sat_id": "S0",
+ "type": "hardcoded",
+ "start_utc": "2016-02-14T15:12:31.779520Z"
+ },
+ {
+ "indx": 24,
+ "end_utc": "2016-02-14T14:12:12.058434Z",
+ "sat_id": "S0",
+ "type": "hardcoded",
+ "start_utc": "2016-02-14T14:11:12.058434Z"
+ },
+ {
+ "indx": 25,
+ "end_utc": "2016-02-14T16:22:00.816409Z",
+ "sat_id": "S1",
+ "type": "hardcoded",
+ "start_utc": "2016-02-14T16:21:00.816409Z"
+ },
+ {
+ "indx": 26,
+ "end_utc": "2016-02-14T10:01:28.150822Z",
+ "sat_id": "S0",
+ "type": "hardcoded",
+ "start_utc": "2016-02-14T10:00:28.150822Z"
+ },
+ {
+ "indx": 27,
+ "end_utc": "2016-02-14T08:40:39.036326Z",
+ "sat_id": "S1",
+ "type": "hardcoded",
+ "start_utc": "2016-02-14T08:39:39.036326Z"
+ },
+ {
+ "indx": 28,
+ "end_utc": "2016-02-14T05:02:51.509312Z",
+ "sat_id": "S0",
+ "type": "hardcoded",
+ "start_utc": "2016-02-14T05:01:51.509312Z"
+ },
+ {
+ "indx": 29,
+ "end_utc": "2016-02-14T21:53:18.709511Z",
+ "sat_id": "S1",
+ "type": "hardcoded",
+ "start_utc": "2016-02-14T21:52:18.709511Z"
+ },
+ {
+ "indx": 31,
+ "end_utc": "2016-02-14T19:26:09.145608Z",
+ "sat_id": "S0",
+ "type": "hardcoded",
+ "start_utc": "2016-02-14T19:25:09.145608Z"
+ },
+ {
+ "indx": 34,
+ "end_utc": "2016-02-14T14:00:13.611338Z",
+ "sat_id": "S0",
+ "type": "hardcoded",
+ "start_utc": "2016-02-14T13:59:13.611338Z"
+ },
+ {
+ "indx": 35,
+ "end_utc": "2016-02-14T18:10:56.972232Z",
+ "sat_id": "S1",
+ "type": "hardcoded",
+ "start_utc": "2016-02-14T18:09:56.972232Z"
+ },
+ {
+ "indx": 36,
+ "end_utc": "2016-02-14T17:36:05.824133Z",
+ "sat_id": "S1",
+ "type": "hardcoded",
+ "start_utc": "2016-02-14T17:35:05.824133Z"
+ },
+ {
+ "indx": 37,
+ "end_utc": "2016-02-14T21:31:32.705353Z",
+ "sat_id": "S1",
+ "type": "hardcoded",
+ "start_utc": "2016-02-14T21:30:32.705353Z"
+ },
+ {
+ "indx": 38,
+ "end_utc": "2016-02-14T13:34:49.865042Z",
+ "sat_id": "S1",
+ "type": "hardcoded",
+ "start_utc": "2016-02-14T13:33:49.865042Z"
+ }
+ ]
+ }
+ }
+}
diff --git a/inputs/cases/circinus_zhou_3_sats/README.md b/inputs/cases/circinus_zhou_3_sats/README.md
new file mode 100644
index 0000000..675742a
--- /dev/null
+++ b/inputs/cases/circinus_zhou_3_sats/README.md
@@ -0,0 +1,23 @@
+# Original CIRCINUS use-case
+
+## Description of Original Zhou use-case:
+Cases are fundamentally defined by a constellation and set ground stations, and further specified by an operational profile which defines intended up-time, as well as targets; and by a simulation scenario which defines actual up-time and performance. The SPRINT zhou use case is one of several use cases demonstrated with the first version of CIRCINUS.
+
+## Required Case-specific Configuration Files, in this directory:
+* `constellation_config.json`
+* `ground_station_network_config.json`
+* `operational_profile_config.json`
+* `sim_case_config.json`
+
+## Further required in the model definition directory `inputs/reference_model_definitions`:
+* `/gs_refs/zhou_original_gs.json`
+* `/payload_refs/zhou_original_payload.json`
+* `/sat_refs/zhou_original_sat.json`
+
+### Hold on where do I find all these files?
+[New file structure](https://drive.google.com/open?id=1CQP91ySWmHOgwE4s4TaXmAkO5aoH4krC)
+
+
+[Original file dataflow](https://drive.google.com/open?id=18GtxUUbO9V40Ifl_81Cpz0z-GL1XkV4R)
+
+
diff --git a/inputs/cases/circinus_zhou_3_sats/constellation_config.json b/inputs/cases/circinus_zhou_3_sats/constellation_config.json
new file mode 100644
index 0000000..863849f
--- /dev/null
+++ b/inputs/cases/circinus_zhou_3_sats/constellation_config.json
@@ -0,0 +1,72 @@
+{
+ "config_type": "constellation_config",
+ "version-const_def": "0.0.1",
+ "verbose_details": "https://github.mit.edu/star-lab/CIRCINUS/blob/SPRINT-restructuring-1/inputs/nom_sprint/constellation/README.md",
+ "constellation_definition": {
+ "version-const": "0.0.1",
+ "constellation_params": {
+ "num_satellites": 3,
+ "sat_id_prefix": "S",
+ "sat_ids": "duplicate,range_inclusive,0,2",
+ "sat_id_order": "default",
+ "_comments": [
+ "orbit_neighbor_direction_method specifies how to figure out which direction an orbit neighbor is in, for determining activity transition times. by_increasing_sat_index means that if the receive sat index is larger than tx sat index (i.e. further in sat_id_order list) then the crosslink is in an INCREASING direction",
+ ""
+ ],
+ "intra-orbit_neighbor_direction_method": "by_increasing_sat_index",
+ "orbit_params": {
+ "sat_ids_by_orbit_name": {
+ "orbit0": [
+ "S0",
+ "S1"
+ ],
+ "orbit1": [
+ "S2"
+ ]
+ },
+ "sat_orbital_elems": [
+ {
+ "sat_id": "S0",
+ "def_type": "indv",
+ "kepler_meananom": {
+ "a_km": 7378,
+ "e": 0,
+ "i_deg": 97.86,
+ "RAAN_deg": 0,
+ "arg_per_deg": 0,
+ "M_deg": 90
+ },
+ "propagation_method": "matlab_delkep"
+ },
+ {
+ "sat_id": "S1",
+ "def_type": "indv",
+ "kepler_meananom": {
+ "a_km": 7378,
+ "e": 0,
+ "i_deg": 97.86,
+ "RAAN_deg": 0,
+ "arg_per_deg": 0,
+ "M_deg": 180
+ },
+ "propagation_method": "matlab_delkep"
+ },
+ {
+ "sat_id": "S2",
+ "def_type": "indv",
+ "kepler_meananom": {
+ "a_km": 7378,
+ "e": 0,
+ "i_deg": 83.86,
+ "RAAN_deg": 0,
+ "arg_per_deg": 0,
+ "M_deg": 120
+ },
+ "propagation_method": "matlab_delkep"
+ }
+ ]
+ }
+ },
+ "sat_ref_model_name": "zhou_original_sat"
+ }
+}
\ No newline at end of file
diff --git a/inputs/cases/circinus_zhou_3_sats/ground_station_network_config.json b/inputs/cases/circinus_zhou_3_sats/ground_station_network_config.json
new file mode 100644
index 0000000..daeb8f0
--- /dev/null
+++ b/inputs/cases/circinus_zhou_3_sats/ground_station_network_config.json
@@ -0,0 +1,43 @@
+{
+ "config_title":"ground_station_network_config",
+ "version-gsndef":"0.0.1",
+ "verbose_details":"https://github.mit.edu/star-lab/CIRCINUS/blob/SPRINT-restructuring-1/inputs/nom_sprint/groundstation_network/README.md",
+
+ "network_definition":{
+ "version-gsn":"0.0.1",
+
+ "default_gs_ref_model_name":"zhou_original_gs",
+
+ "gs_net_params":{
+ "num_stations": 4,
+ "gs_network_name": "Zhou 2017 GS",
+ "gs_network_version": "1",
+ "elevation_cutoff_deg":10,
+ "stations": [
+ {"id": "G0", "name": "Beijing","name_pretty": "Beijing 0","latitude_deg":40.0,"longitude_deg":116.0,"height_m":0,"comm_type":"Zhou_2017_dlnk"},
+ {"id": "G1", "name": "Kashi","name_pretty": "Kashi 1", "latitude_deg":39.5,"longitude_deg":76.0,"height_m":0,"comm_type":"Zhou_2017_dlnk"},
+ {"id": "G2", "name": "Sanya","name_pretty": "Sanya 2", "latitude_deg":18.0,"longitude_deg":109.5,"height_m":0,"comm_type":"Zhou_2017_dlnk"},
+ {"id": "G3", "name": "Xi'an","name_pretty": "Xi'an 3", "latitude_deg":34.0,"longitude_deg":108.0,"height_m":0,"comm_type":"Zhou_2017_dlnk"}
+ ]
+ },
+ "sim_gs_network_params": {
+ "_comments":"TODO: Reevaluate the location of these params. (Should they go in the sim file? Or GP? I think they are describing the behavior of the network, so this should be ok)",
+ "time_epsilon_s": 1,
+ "gsn_ps_params" : {
+ "_comments": [
+ "replan_release_wait_time_s simulates the amount of time required to actually run the global planner in real life. from the perspective of this code the global planner will run instantaneously. After it runs, the sim will wait this amount of time before making those plans available",
+ "release_first_plans_immediately = True means that global planner will immediately make its first set of global plans available (they won't be queued up for release at a later time)",
+ "6300 seconds is 105 minutes, about the length of an orbit at 1000 km altitude",
+ "replan release time is conservative here"
+ ],
+ "replan_interval_s": 6300,
+ "replan_release_wait_time_s": 60,
+ "release_first_plans_immediately": true
+ }
+ }
+
+
+ }
+
+
+}
\ No newline at end of file
diff --git a/inputs/cases/circinus_zhou_3_sats/operational_profile_config.json b/inputs/cases/circinus_zhou_3_sats/operational_profile_config.json
new file mode 100644
index 0000000..6d07011
--- /dev/null
+++ b/inputs/cases/circinus_zhou_3_sats/operational_profile_config.json
@@ -0,0 +1,32 @@
+{
+ "version":"0.0.1",
+ "which_config":"operational_profile_config",
+ "where_should_i_be":"inputs/cases/",
+ "what_goes_here":"things that only matter to the choices to be made about the current case.",
+
+ "ops_profile_params": {
+ "obs_params": {
+ "num_targets": 5,
+ "target_set_name": "Zhou 2017 Missions",
+ "target_set_version": "1",
+ "target_set_id": "2018-4-14",
+ "elevation_cutoff_deg":60,
+ "targets": [
+ {"id": "targ0","name": "Himalaya","name_pretty": "obs0", "latitude_deg": 28.0, "longitude_deg": 87.0,"height_m": 0},
+ {"id": "targ1","name": "Mamiraus","name_pretty": "obs1", "latitude_deg": -2.0, "longitude_deg": -66.0,"height_m": 0},
+ {"id": "targ2","name": "Cape York","name_pretty": "obs2", "latitude_deg": -11.0, "longitude_deg": 142.5,"height_m": 0},
+ {"id": "targ3","name": "Alaska Coast","name_pretty": "obs3", "latitude_deg": 60.0, "longitude_deg": -148,"height_m": 0},
+ {"id": "targ4","name": "Greenland","name_pretty": "obs4", "latitude_deg": 69.0, "longitude_deg": -49,"height_m": 0}
+ ]
+ },
+ "link_disables": {
+ "_comment": "The satellite and GS ids below will always be interpreted as strings. if the ids are specified as integers in other files, they will be considered the same if their string representation is the same",
+ "dlnk_direc_disabled_gs_ID_by_sat_IDstr": {
+ },
+ "xlnk_direc_disabled_xsat_ID_by_sat_IDstr": {
+ }
+ }
+
+ }
+
+}
\ No newline at end of file
diff --git a/inputs/cases/circinus_zhou_3_sats/sim_case_config.json b/inputs/cases/circinus_zhou_3_sats/sim_case_config.json
new file mode 100644
index 0000000..ee7e579
--- /dev/null
+++ b/inputs/cases/circinus_zhou_3_sats/sim_case_config.json
@@ -0,0 +1,254 @@
+{
+ "version": "0.0.1",
+ "which_config": "sim_case_config",
+ "where_should_i_be": "inputs/cases/",
+ "what_goes_here": "things that only matter to the simulation, or definition of time, that is relatively likely to vary per case. Things which vary for tweaks and will stay the same otherwise should go in /inputs/admin_config/sim_general_config.json instead",
+ "scenario_params": {
+ "start_utc": "2016-02-14T04:00:00.000000Z",
+ "end_utc": "2016-02-15T04:00:00.000000Z",
+ "use_crosslinks": true,
+ "all_sats_same_time_system": true,
+ "restore_pkl_name": "pickles/",
+ "sat_schedule_hotstart": true,
+ "lookup_params": {
+ "xlnk_range_rates": {
+ "_comment": "The names here should be different from built-in comm type names",
+ "Zhou_2017_xlnk": {
+ "range_units": "km",
+ "rates_units": "Mbps",
+ "interpolation_method": "floor",
+ "range_rates_table": [
+ [
+ 0,
+ 10
+ ]
+ ]
+ }
+ },
+ "dlnk_range_rates": {
+ "_comment1": "The names here should be different from built-in comm type names",
+ "_comment2": "they say this rate is a random distribution over {0,20,40} Mbps for each downlink timeslot. I'll just use the average...",
+ "Zhou_2017_dlnk": {
+ "range_units": "km",
+ "rates_units": "Mbps",
+ "interpolation_method": "floor",
+ "range_rates_table": [
+ [
+ 0,
+ 20
+ ]
+ ]
+ }
+ }
+ },
+ "sim_run_perturbations": {
+ "do_inject_obs": true,
+ "schedule_disruptions": {
+ "G1": [
+ [
+ "2016-02-14T04:00:00.000000Z",
+ "2016-02-14T16:00:00.000000Z"
+ ]
+ ]
+ },
+ "injected_observations": [
+ {
+ "indx": 0,
+ "end_utc": "2016-02-14T09:55:37.164153Z",
+ "sat_id": "S2",
+ "type": "hardcoded",
+ "start_utc": "2016-02-14T09:54:37.164153Z"
+ },
+ {
+ "indx": 1,
+ "end_utc": "2016-02-14T12:00:35.483029Z",
+ "sat_id": "S2",
+ "type": "hardcoded",
+ "start_utc": "2016-02-14T11:59:35.483029Z"
+ },
+ {
+ "indx": 3,
+ "end_utc": "2016-02-14T21:23:23.298319Z",
+ "sat_id": "S2",
+ "type": "hardcoded",
+ "start_utc": "2016-02-14T21:22:23.298319Z"
+ },
+ {
+ "indx": 5,
+ "end_utc": "2016-02-14T18:10:00.708361Z",
+ "sat_id": "S1",
+ "type": "hardcoded",
+ "start_utc": "2016-02-14T18:09:00.708361Z"
+ },
+ {
+ "indx": 6,
+ "end_utc": "2016-02-14T04:16:54.450560Z",
+ "sat_id": "S2",
+ "type": "hardcoded",
+ "start_utc": "2016-02-14T04:15:54.450560Z"
+ },
+ {
+ "indx": 8,
+ "end_utc": "2016-02-14T15:12:02.827026Z",
+ "sat_id": "S0",
+ "type": "hardcoded",
+ "start_utc": "2016-02-14T15:11:02.827026Z"
+ },
+ {
+ "indx": 9,
+ "end_utc": "2016-02-14T07:06:41.520948Z",
+ "sat_id": "S2",
+ "type": "hardcoded",
+ "start_utc": "2016-02-14T07:05:41.520948Z"
+ },
+ {
+ "indx": 10,
+ "end_utc": "2016-02-14T16:37:02.412884Z",
+ "sat_id": "S2",
+ "type": "hardcoded",
+ "start_utc": "2016-02-14T16:36:02.412884Z"
+ },
+ {
+ "indx": 11,
+ "end_utc": "2016-02-14T10:06:12.829820Z",
+ "sat_id": "S1",
+ "type": "hardcoded",
+ "start_utc": "2016-02-14T10:05:12.829820Z"
+ },
+ {
+ "indx": 13,
+ "end_utc": "2016-02-14T09:55:20.718866Z",
+ "sat_id": "S0",
+ "type": "hardcoded",
+ "start_utc": "2016-02-14T09:54:20.718866Z"
+ },
+ {
+ "indx": 15,
+ "end_utc": "2016-02-14T20:39:46.383513Z",
+ "sat_id": "S1",
+ "type": "hardcoded",
+ "start_utc": "2016-02-14T20:38:46.383513Z"
+ },
+ {
+ "indx": 17,
+ "end_utc": "2016-02-14T10:55:33.461534Z",
+ "sat_id": "S2",
+ "type": "hardcoded",
+ "start_utc": "2016-02-14T10:54:33.461534Z"
+ },
+ {
+ "indx": 19,
+ "end_utc": "2016-02-14T21:55:09.278231Z",
+ "sat_id": "S1",
+ "type": "hardcoded",
+ "start_utc": "2016-02-14T21:54:09.278231Z"
+ },
+ {
+ "indx": 20,
+ "end_utc": "2016-02-14T18:46:56.198028Z",
+ "sat_id": "S0",
+ "type": "hardcoded",
+ "start_utc": "2016-02-14T18:45:56.198028Z"
+ },
+ {
+ "indx": 21,
+ "end_utc": "2016-02-14T19:43:40.340492Z",
+ "sat_id": "S2",
+ "type": "hardcoded",
+ "start_utc": "2016-02-14T19:42:40.340492Z"
+ },
+ {
+ "indx": 22,
+ "end_utc": "2016-02-14T15:13:31.779520Z",
+ "sat_id": "S0",
+ "type": "hardcoded",
+ "start_utc": "2016-02-14T15:12:31.779520Z"
+ },
+ {
+ "indx": 24,
+ "end_utc": "2016-02-14T14:12:12.058434Z",
+ "sat_id": "S0",
+ "type": "hardcoded",
+ "start_utc": "2016-02-14T14:11:12.058434Z"
+ },
+ {
+ "indx": 25,
+ "end_utc": "2016-02-14T16:22:00.816409Z",
+ "sat_id": "S2",
+ "type": "hardcoded",
+ "start_utc": "2016-02-14T16:21:00.816409Z"
+ },
+ {
+ "indx": 26,
+ "end_utc": "2016-02-14T10:01:28.150822Z",
+ "sat_id": "S1",
+ "type": "hardcoded",
+ "start_utc": "2016-02-14T10:00:28.150822Z"
+ },
+ {
+ "indx": 27,
+ "end_utc": "2016-02-14T08:40:39.036326Z",
+ "sat_id": "S2",
+ "type": "hardcoded",
+ "start_utc": "2016-02-14T08:39:39.036326Z"
+ },
+ {
+ "indx": 28,
+ "end_utc": "2016-02-14T05:02:51.509312Z",
+ "sat_id": "S0",
+ "type": "hardcoded",
+ "start_utc": "2016-02-14T05:01:51.509312Z"
+ },
+ {
+ "indx": 29,
+ "end_utc": "2016-02-14T21:53:18.709511Z",
+ "sat_id": "S2",
+ "type": "hardcoded",
+ "start_utc": "2016-02-14T21:52:18.709511Z"
+ },
+ {
+ "indx": 31,
+ "end_utc": "2016-02-14T19:26:09.145608Z",
+ "sat_id": "S0",
+ "type": "hardcoded",
+ "start_utc": "2016-02-14T19:25:09.145608Z"
+ },
+ {
+ "indx": 34,
+ "end_utc": "2016-02-14T14:00:13.611338Z",
+ "sat_id": "S0",
+ "type": "hardcoded",
+ "start_utc": "2016-02-14T13:59:13.611338Z"
+ },
+ {
+ "indx": 35,
+ "end_utc": "2016-02-14T18:10:56.972232Z",
+ "sat_id": "S2",
+ "type": "hardcoded",
+ "start_utc": "2016-02-14T18:09:56.972232Z"
+ },
+ {
+ "indx": 36,
+ "end_utc": "2016-02-14T17:36:05.824133Z",
+ "sat_id": "S2",
+ "type": "hardcoded",
+ "start_utc": "2016-02-14T17:35:05.824133Z"
+ },
+ {
+ "indx": 37,
+ "end_utc": "2016-02-14T21:31:32.705353Z",
+ "sat_id": "S2",
+ "type": "hardcoded",
+ "start_utc": "2016-02-14T21:30:32.705353Z"
+ },
+ {
+ "indx": 38,
+ "end_utc": "2016-02-14T13:34:49.865042Z",
+ "sat_id": "S2",
+ "type": "hardcoded",
+ "start_utc": "2016-02-14T13:33:49.865042Z"
+ }
+ ]
+ }
+ }
+}
diff --git a/inputs/cases/circinus_zhou_4_sats/README.md b/inputs/cases/circinus_zhou_4_sats/README.md
new file mode 100644
index 0000000..675742a
--- /dev/null
+++ b/inputs/cases/circinus_zhou_4_sats/README.md
@@ -0,0 +1,23 @@
+# Original CIRCINUS use-case
+
+## Description of Original Zhou use-case:
+Cases are fundamentally defined by a constellation and set ground stations, and further specified by an operational profile which defines intended up-time, as well as targets; and by a simulation scenario which defines actual up-time and performance. The SPRINT zhou use case is one of several use cases demonstrated with the first version of CIRCINUS.
+
+## Required Case-specific Configuration Files, in this directory:
+* `constellation_config.json`
+* `ground_station_network_config.json`
+* `operational_profile_config.json`
+* `sim_case_config.json`
+
+## Further required in the model definition directory `inputs/reference_model_definitions`:
+* `/gs_refs/zhou_original_gs.json`
+* `/payload_refs/zhou_original_payload.json`
+* `/sat_refs/zhou_original_sat.json`
+
+### Hold on where do I find all these files?
+[New file structure](https://drive.google.com/open?id=1CQP91ySWmHOgwE4s4TaXmAkO5aoH4krC)
+
+
+[Original file dataflow](https://drive.google.com/open?id=18GtxUUbO9V40Ifl_81Cpz0z-GL1XkV4R)
+
+
diff --git a/inputs/cases/circinus_zhou_4_sats/constellation_config.json b/inputs/cases/circinus_zhou_4_sats/constellation_config.json
new file mode 100644
index 0000000..76ba366
--- /dev/null
+++ b/inputs/cases/circinus_zhou_4_sats/constellation_config.json
@@ -0,0 +1,86 @@
+{
+ "config_type": "constellation_config",
+ "version-const_def": "0.0.1",
+ "verbose_details": "https://github.mit.edu/star-lab/CIRCINUS/blob/SPRINT-restructuring-1/inputs/nom_sprint/constellation/README.md",
+ "constellation_definition": {
+ "version-const": "0.0.1",
+ "constellation_params": {
+ "num_satellites": 4,
+ "sat_id_prefix": "S",
+ "sat_ids": "duplicate,range_inclusive,0,3",
+ "sat_id_order": "default",
+ "_comments": [
+ "orbit_neighbor_direction_method specifies how to figure out which direction an orbit neighbor is in, for determining activity transition times. by_increasing_sat_index means that if the receive sat index is larger than tx sat index (i.e. further in sat_id_order list) then the crosslink is in an INCREASING direction",
+ ""
+ ],
+ "intra-orbit_neighbor_direction_method": "by_increasing_sat_index",
+ "orbit_params": {
+ "sat_ids_by_orbit_name": {
+ "orbit0": [
+ "S0",
+ "S1"
+ ],
+ "orbit1": [
+ "S2",
+ "S3"
+ ]
+ },
+ "sat_orbital_elems": [
+ {
+ "sat_id": "S0",
+ "def_type": "indv",
+ "kepler_meananom": {
+ "a_km": 7378,
+ "e": 0,
+ "i_deg": 97.86,
+ "RAAN_deg": 0,
+ "arg_per_deg": 0,
+ "M_deg": 90
+ },
+ "propagation_method": "matlab_delkep"
+ },
+ {
+ "sat_id": "S1",
+ "def_type": "indv",
+ "kepler_meananom": {
+ "a_km": 7378,
+ "e": 0,
+ "i_deg": 97.86,
+ "RAAN_deg": 0,
+ "arg_per_deg": 0,
+ "M_deg": 180
+ },
+ "propagation_method": "matlab_delkep"
+ },
+ {
+ "sat_id": "S3",
+ "def_type": "indv",
+ "kepler_meananom": {
+ "a_km": 7378,
+ "e": 0,
+ "i_deg": 83.86,
+ "RAAN_deg": 0,
+ "arg_per_deg": 0,
+ "M_deg": 60
+ },
+ "propagation_method": "matlab_delkep"
+ },
+ {
+ "sat_id": "S2",
+ "def_type": "indv",
+ "kepler_meananom": {
+ "a_km": 7378,
+ "e": 0,
+ "i_deg": 83.86,
+ "RAAN_deg": 0,
+ "arg_per_deg": 0,
+ "M_deg": 120
+ },
+ "propagation_method": "matlab_delkep"
+ }
+ ]
+ }
+ },
+ "sat_ref_model_name": "zhou_original_sat"
+ }
+}
diff --git a/inputs/cases/circinus_zhou_4_sats/ground_station_network_config.json b/inputs/cases/circinus_zhou_4_sats/ground_station_network_config.json
new file mode 100644
index 0000000..daeb8f0
--- /dev/null
+++ b/inputs/cases/circinus_zhou_4_sats/ground_station_network_config.json
@@ -0,0 +1,43 @@
+{
+ "config_title":"ground_station_network_config",
+ "version-gsndef":"0.0.1",
+ "verbose_details":"https://github.mit.edu/star-lab/CIRCINUS/blob/SPRINT-restructuring-1/inputs/nom_sprint/groundstation_network/README.md",
+
+ "network_definition":{
+ "version-gsn":"0.0.1",
+
+ "default_gs_ref_model_name":"zhou_original_gs",
+
+ "gs_net_params":{
+ "num_stations": 4,
+ "gs_network_name": "Zhou 2017 GS",
+ "gs_network_version": "1",
+ "elevation_cutoff_deg":10,
+ "stations": [
+ {"id": "G0", "name": "Beijing","name_pretty": "Beijing 0","latitude_deg":40.0,"longitude_deg":116.0,"height_m":0,"comm_type":"Zhou_2017_dlnk"},
+ {"id": "G1", "name": "Kashi","name_pretty": "Kashi 1", "latitude_deg":39.5,"longitude_deg":76.0,"height_m":0,"comm_type":"Zhou_2017_dlnk"},
+ {"id": "G2", "name": "Sanya","name_pretty": "Sanya 2", "latitude_deg":18.0,"longitude_deg":109.5,"height_m":0,"comm_type":"Zhou_2017_dlnk"},
+ {"id": "G3", "name": "Xi'an","name_pretty": "Xi'an 3", "latitude_deg":34.0,"longitude_deg":108.0,"height_m":0,"comm_type":"Zhou_2017_dlnk"}
+ ]
+ },
+ "sim_gs_network_params": {
+ "_comments":"TODO: Reevaluate the location of these params. (Should they go in the sim file? Or GP? I think they are describing the behavior of the network, so this should be ok)",
+ "time_epsilon_s": 1,
+ "gsn_ps_params" : {
+ "_comments": [
+ "replan_release_wait_time_s simulates the amount of time required to actually run the global planner in real life. from the perspective of this code the global planner will run instantaneously. After it runs, the sim will wait this amount of time before making those plans available",
+ "release_first_plans_immediately = True means that global planner will immediately make its first set of global plans available (they won't be queued up for release at a later time)",
+ "6300 seconds is 105 minutes, about the length of an orbit at 1000 km altitude",
+ "replan release time is conservative here"
+ ],
+ "replan_interval_s": 6300,
+ "replan_release_wait_time_s": 60,
+ "release_first_plans_immediately": true
+ }
+ }
+
+
+ }
+
+
+}
\ No newline at end of file
diff --git a/inputs/cases/circinus_zhou_4_sats/operational_profile_config.json b/inputs/cases/circinus_zhou_4_sats/operational_profile_config.json
new file mode 100644
index 0000000..6d07011
--- /dev/null
+++ b/inputs/cases/circinus_zhou_4_sats/operational_profile_config.json
@@ -0,0 +1,32 @@
+{
+ "version":"0.0.1",
+ "which_config":"operational_profile_config",
+ "where_should_i_be":"inputs/cases/",
+ "what_goes_here":"things that only matter to the choices to be made about the current case.",
+
+ "ops_profile_params": {
+ "obs_params": {
+ "num_targets": 5,
+ "target_set_name": "Zhou 2017 Missions",
+ "target_set_version": "1",
+ "target_set_id": "2018-4-14",
+ "elevation_cutoff_deg":60,
+ "targets": [
+ {"id": "targ0","name": "Himalaya","name_pretty": "obs0", "latitude_deg": 28.0, "longitude_deg": 87.0,"height_m": 0},
+ {"id": "targ1","name": "Mamiraus","name_pretty": "obs1", "latitude_deg": -2.0, "longitude_deg": -66.0,"height_m": 0},
+ {"id": "targ2","name": "Cape York","name_pretty": "obs2", "latitude_deg": -11.0, "longitude_deg": 142.5,"height_m": 0},
+ {"id": "targ3","name": "Alaska Coast","name_pretty": "obs3", "latitude_deg": 60.0, "longitude_deg": -148,"height_m": 0},
+ {"id": "targ4","name": "Greenland","name_pretty": "obs4", "latitude_deg": 69.0, "longitude_deg": -49,"height_m": 0}
+ ]
+ },
+ "link_disables": {
+ "_comment": "The satellite and GS ids below will always be interpreted as strings. if the ids are specified as integers in other files, they will be considered the same if their string representation is the same",
+ "dlnk_direc_disabled_gs_ID_by_sat_IDstr": {
+ },
+ "xlnk_direc_disabled_xsat_ID_by_sat_IDstr": {
+ }
+ }
+
+ }
+
+}
\ No newline at end of file
diff --git a/inputs/cases/circinus_zhou_4_sats/sim_case_config.json b/inputs/cases/circinus_zhou_4_sats/sim_case_config.json
new file mode 100644
index 0000000..741217c
--- /dev/null
+++ b/inputs/cases/circinus_zhou_4_sats/sim_case_config.json
@@ -0,0 +1,254 @@
+{
+ "version": "0.0.1",
+ "which_config": "sim_case_config",
+ "where_should_i_be": "inputs/cases/",
+ "what_goes_here": "things that only matter to the simulation, or definition of time, that is relatively likely to vary per case. Things which vary for tweaks and will stay the same otherwise should go in /inputs/admin_config/sim_general_config.json instead",
+ "scenario_params": {
+ "start_utc": "2016-02-14T04:00:00.000000Z",
+ "end_utc": "2016-02-15T04:00:00.000000Z",
+ "use_crosslinks": true,
+ "all_sats_same_time_system": true,
+ "restore_pkl_name": "pickles/",
+ "sat_schedule_hotstart": true,
+ "lookup_params": {
+ "xlnk_range_rates": {
+ "_comment": "The names here should be different from built-in comm type names",
+ "Zhou_2017_xlnk": {
+ "range_units": "km",
+ "rates_units": "Mbps",
+ "interpolation_method": "floor",
+ "range_rates_table": [
+ [
+ 0,
+ 10
+ ]
+ ]
+ }
+ },
+ "dlnk_range_rates": {
+ "_comment1": "The names here should be different from built-in comm type names",
+ "_comment2": "they say this rate is a random distribution over {0,20,40} Mbps for each downlink timeslot. I'll just use the average...",
+ "Zhou_2017_dlnk": {
+ "range_units": "km",
+ "rates_units": "Mbps",
+ "interpolation_method": "floor",
+ "range_rates_table": [
+ [
+ 0,
+ 20
+ ]
+ ]
+ }
+ }
+ },
+ "sim_run_perturbations": {
+ "do_inject_obs": true,
+ "schedule_disruptions": {
+ "G1": [
+ [
+ "2016-02-14T04:00:00.000000Z",
+ "2016-02-14T16:00:00.000000Z"
+ ]
+ ]
+ },
+ "injected_observations": [
+ {
+ "indx": 0,
+ "end_utc": "2016-02-14T09:55:37.164153Z",
+ "sat_id": "S2",
+ "type": "hardcoded",
+ "start_utc": "2016-02-14T09:54:37.164153Z"
+ },
+ {
+ "indx": 1,
+ "end_utc": "2016-02-14T12:00:35.483029Z",
+ "sat_id": "S2",
+ "type": "hardcoded",
+ "start_utc": "2016-02-14T11:59:35.483029Z"
+ },
+ {
+ "indx": 3,
+ "end_utc": "2016-02-14T21:23:23.298319Z",
+ "sat_id": "S2",
+ "type": "hardcoded",
+ "start_utc": "2016-02-14T21:22:23.298319Z"
+ },
+ {
+ "indx": 5,
+ "end_utc": "2016-02-14T18:10:00.708361Z",
+ "sat_id": "S1",
+ "type": "hardcoded",
+ "start_utc": "2016-02-14T18:09:00.708361Z"
+ },
+ {
+ "indx": 6,
+ "end_utc": "2016-02-14T04:16:54.450560Z",
+ "sat_id": "S2",
+ "type": "hardcoded",
+ "start_utc": "2016-02-14T04:15:54.450560Z"
+ },
+ {
+ "indx": 8,
+ "end_utc": "2016-02-14T15:12:02.827026Z",
+ "sat_id": "S0",
+ "type": "hardcoded",
+ "start_utc": "2016-02-14T15:11:02.827026Z"
+ },
+ {
+ "indx": 9,
+ "end_utc": "2016-02-14T07:06:41.520948Z",
+ "sat_id": "S3",
+ "type": "hardcoded",
+ "start_utc": "2016-02-14T07:05:41.520948Z"
+ },
+ {
+ "indx": 10,
+ "end_utc": "2016-02-14T16:37:02.412884Z",
+ "sat_id": "S2",
+ "type": "hardcoded",
+ "start_utc": "2016-02-14T16:36:02.412884Z"
+ },
+ {
+ "indx": 11,
+ "end_utc": "2016-02-14T10:06:12.829820Z",
+ "sat_id": "S1",
+ "type": "hardcoded",
+ "start_utc": "2016-02-14T10:05:12.829820Z"
+ },
+ {
+ "indx": 13,
+ "end_utc": "2016-02-14T09:55:20.718866Z",
+ "sat_id": "S0",
+ "type": "hardcoded",
+ "start_utc": "2016-02-14T09:54:20.718866Z"
+ },
+ {
+ "indx": 15,
+ "end_utc": "2016-02-14T20:39:46.383513Z",
+ "sat_id": "S1",
+ "type": "hardcoded",
+ "start_utc": "2016-02-14T20:38:46.383513Z"
+ },
+ {
+ "indx": 17,
+ "end_utc": "2016-02-14T10:55:33.461534Z",
+ "sat_id": "S2",
+ "type": "hardcoded",
+ "start_utc": "2016-02-14T10:54:33.461534Z"
+ },
+ {
+ "indx": 19,
+ "end_utc": "2016-02-14T21:55:09.278231Z",
+ "sat_id": "S1",
+ "type": "hardcoded",
+ "start_utc": "2016-02-14T21:54:09.278231Z"
+ },
+ {
+ "indx": 20,
+ "end_utc": "2016-02-14T18:46:56.198028Z",
+ "sat_id": "S0",
+ "type": "hardcoded",
+ "start_utc": "2016-02-14T18:45:56.198028Z"
+ },
+ {
+ "indx": 21,
+ "end_utc": "2016-02-14T19:43:40.340492Z",
+ "sat_id": "S2",
+ "type": "hardcoded",
+ "start_utc": "2016-02-14T19:42:40.340492Z"
+ },
+ {
+ "indx": 22,
+ "end_utc": "2016-02-14T15:13:31.779520Z",
+ "sat_id": "S0",
+ "type": "hardcoded",
+ "start_utc": "2016-02-14T15:12:31.779520Z"
+ },
+ {
+ "indx": 24,
+ "end_utc": "2016-02-14T14:12:12.058434Z",
+ "sat_id": "S0",
+ "type": "hardcoded",
+ "start_utc": "2016-02-14T14:11:12.058434Z"
+ },
+ {
+ "indx": 25,
+ "end_utc": "2016-02-14T16:22:00.816409Z",
+ "sat_id": "S2",
+ "type": "hardcoded",
+ "start_utc": "2016-02-14T16:21:00.816409Z"
+ },
+ {
+ "indx": 26,
+ "end_utc": "2016-02-14T10:01:28.150822Z",
+ "sat_id": "S1",
+ "type": "hardcoded",
+ "start_utc": "2016-02-14T10:00:28.150822Z"
+ },
+ {
+ "indx": 27,
+ "end_utc": "2016-02-14T08:40:39.036326Z",
+ "sat_id": "S2",
+ "type": "hardcoded",
+ "start_utc": "2016-02-14T08:39:39.036326Z"
+ },
+ {
+ "indx": 28,
+ "end_utc": "2016-02-14T05:02:51.509312Z",
+ "sat_id": "S0",
+ "type": "hardcoded",
+ "start_utc": "2016-02-14T05:01:51.509312Z"
+ },
+ {
+ "indx": 29,
+ "end_utc": "2016-02-14T21:53:18.709511Z",
+ "sat_id": "S3",
+ "type": "hardcoded",
+ "start_utc": "2016-02-14T21:52:18.709511Z"
+ },
+ {
+ "indx": 31,
+ "end_utc": "2016-02-14T19:26:09.145608Z",
+ "sat_id": "S0",
+ "type": "hardcoded",
+ "start_utc": "2016-02-14T19:25:09.145608Z"
+ },
+ {
+ "indx": 34,
+ "end_utc": "2016-02-14T14:00:13.611338Z",
+ "sat_id": "S0",
+ "type": "hardcoded",
+ "start_utc": "2016-02-14T13:59:13.611338Z"
+ },
+ {
+ "indx": 35,
+ "end_utc": "2016-02-14T18:10:56.972232Z",
+ "sat_id": "S3",
+ "type": "hardcoded",
+ "start_utc": "2016-02-14T18:09:56.972232Z"
+ },
+ {
+ "indx": 36,
+ "end_utc": "2016-02-14T17:36:05.824133Z",
+ "sat_id": "S3",
+ "type": "hardcoded",
+ "start_utc": "2016-02-14T17:35:05.824133Z"
+ },
+ {
+ "indx": 37,
+ "end_utc": "2016-02-14T21:31:32.705353Z",
+ "sat_id": "S2",
+ "type": "hardcoded",
+ "start_utc": "2016-02-14T21:30:32.705353Z"
+ },
+ {
+ "indx": 38,
+ "end_utc": "2016-02-14T13:34:49.865042Z",
+ "sat_id": "S3",
+ "type": "hardcoded",
+ "start_utc": "2016-02-14T13:33:49.865042Z"
+ }
+ ]
+ }
+ }
+}
diff --git a/inputs/cases/circinus_zhou_5_sats/README.md b/inputs/cases/circinus_zhou_5_sats/README.md
new file mode 100644
index 0000000..675742a
--- /dev/null
+++ b/inputs/cases/circinus_zhou_5_sats/README.md
@@ -0,0 +1,23 @@
+# Original CIRCINUS use-case
+
+## Description of Original Zhou use-case:
+Cases are fundamentally defined by a constellation and set ground stations, and further specified by an operational profile which defines intended up-time, as well as targets; and by a simulation scenario which defines actual up-time and performance. The SPRINT zhou use case is one of several use cases demonstrated with the first version of CIRCINUS.
+
+## Required Case-specific Configuration Files, in this directory:
+* `constellation_config.json`
+* `ground_station_network_config.json`
+* `operational_profile_config.json`
+* `sim_case_config.json`
+
+## Further required in the model definition directory `inputs/reference_model_definitions`:
+* `/gs_refs/zhou_original_gs.json`
+* `/payload_refs/zhou_original_payload.json`
+* `/sat_refs/zhou_original_sat.json`
+
+### Hold on where do I find all these files?
+[New file structure](https://drive.google.com/open?id=1CQP91ySWmHOgwE4s4TaXmAkO5aoH4krC)
+
+
+[Original file dataflow](https://drive.google.com/open?id=18GtxUUbO9V40Ifl_81Cpz0z-GL1XkV4R)
+
+
diff --git a/inputs/cases/circinus_zhou_5_sats/constellation_config.json b/inputs/cases/circinus_zhou_5_sats/constellation_config.json
new file mode 100644
index 0000000..a7acf07
--- /dev/null
+++ b/inputs/cases/circinus_zhou_5_sats/constellation_config.json
@@ -0,0 +1,101 @@
+{
+ "config_type": "constellation_config",
+ "version-const_def": "0.0.1",
+ "verbose_details": "https://github.mit.edu/star-lab/CIRCINUS/blob/SPRINT-restructuring-1/inputs/nom_sprint/constellation/README.md",
+ "constellation_definition": {
+ "version-const": "0.0.1",
+ "constellation_params": {
+ "num_satellites": 5,
+ "sat_id_prefix": "S",
+ "sat_ids": "duplicate,range_inclusive,0,4",
+ "sat_id_order": "default",
+ "_comments": [
+ "orbit_neighbor_direction_method specifies how to figure out which direction an orbit neighbor is in, for determining activity transition times. by_increasing_sat_index means that if the receive sat index is larger than tx sat index (i.e. further in sat_id_order list) then the crosslink is in an INCREASING direction",
+ ""
+ ],
+ "intra-orbit_neighbor_direction_method": "by_increasing_sat_index",
+ "orbit_params": {
+ "sat_ids_by_orbit_name": {
+ "orbit0": [
+ "S0",
+ "S1",
+ "S2"
+ ],
+ "orbit1": [
+ "S3",
+ "S4",
+ "S5"
+ ]
+ },
+ "sat_orbital_elems": [
+ {
+ "sat_id": "S0",
+ "def_type": "indv",
+ "kepler_meananom": {
+ "a_km": 7378,
+ "e": 0,
+ "i_deg": 97.86,
+ "RAAN_deg": 0,
+ "arg_per_deg": 0,
+ "M_deg": 90
+ },
+ "propagation_method": "matlab_delkep"
+ },
+ {
+ "sat_id": "S1",
+ "def_type": "indv",
+ "kepler_meananom": {
+ "a_km": 7378,
+ "e": 0,
+ "i_deg": 97.86,
+ "RAAN_deg": 0,
+ "arg_per_deg": 0,
+ "M_deg": 180
+ },
+ "propagation_method": "matlab_delkep"
+ },
+ {
+ "sat_id": "S2",
+ "def_type": "indv",
+ "kepler_meananom": {
+ "a_km": 7378,
+ "e": 0,
+ "i_deg": 97.86,
+ "RAAN_deg": 0,
+ "arg_per_deg": 0,
+ "M_deg": 270
+ },
+ "propagation_method": "matlab_delkep"
+ },
+ {
+ "sat_id": "S3",
+ "def_type": "indv",
+ "kepler_meananom": {
+ "a_km": 7378,
+ "e": 0,
+ "i_deg": 83.86,
+ "RAAN_deg": 0,
+ "arg_per_deg": 0,
+ "M_deg": 60
+ },
+ "propagation_method": "matlab_delkep"
+ },
+ {
+ "sat_id": "S4",
+ "def_type": "indv",
+ "kepler_meananom": {
+ "a_km": 7378,
+ "e": 0,
+ "i_deg": 83.86,
+ "RAAN_deg": 0,
+ "arg_per_deg": 0,
+ "M_deg": 120
+ },
+ "propagation_method": "matlab_delkep"
+ }
+ ]
+ }
+ },
+ "sat_ref_model_name": "zhou_original_sat"
+ }
+}
\ No newline at end of file
diff --git a/inputs/cases/circinus_zhou_5_sats/ground_station_network_config.json b/inputs/cases/circinus_zhou_5_sats/ground_station_network_config.json
new file mode 100644
index 0000000..daeb8f0
--- /dev/null
+++ b/inputs/cases/circinus_zhou_5_sats/ground_station_network_config.json
@@ -0,0 +1,43 @@
+{
+ "config_title":"ground_station_network_config",
+ "version-gsndef":"0.0.1",
+ "verbose_details":"https://github.mit.edu/star-lab/CIRCINUS/blob/SPRINT-restructuring-1/inputs/nom_sprint/groundstation_network/README.md",
+
+ "network_definition":{
+ "version-gsn":"0.0.1",
+
+ "default_gs_ref_model_name":"zhou_original_gs",
+
+ "gs_net_params":{
+ "num_stations": 4,
+ "gs_network_name": "Zhou 2017 GS",
+ "gs_network_version": "1",
+ "elevation_cutoff_deg":10,
+ "stations": [
+ {"id": "G0", "name": "Beijing","name_pretty": "Beijing 0","latitude_deg":40.0,"longitude_deg":116.0,"height_m":0,"comm_type":"Zhou_2017_dlnk"},
+ {"id": "G1", "name": "Kashi","name_pretty": "Kashi 1", "latitude_deg":39.5,"longitude_deg":76.0,"height_m":0,"comm_type":"Zhou_2017_dlnk"},
+ {"id": "G2", "name": "Sanya","name_pretty": "Sanya 2", "latitude_deg":18.0,"longitude_deg":109.5,"height_m":0,"comm_type":"Zhou_2017_dlnk"},
+ {"id": "G3", "name": "Xi'an","name_pretty": "Xi'an 3", "latitude_deg":34.0,"longitude_deg":108.0,"height_m":0,"comm_type":"Zhou_2017_dlnk"}
+ ]
+ },
+ "sim_gs_network_params": {
+ "_comments":"TODO: Reevaluate the location of these params. (Should they go in the sim file? Or GP? I think they are describing the behavior of the network, so this should be ok)",
+ "time_epsilon_s": 1,
+ "gsn_ps_params" : {
+ "_comments": [
+ "replan_release_wait_time_s simulates the amount of time required to actually run the global planner in real life. from the perspective of this code the global planner will run instantaneously. After it runs, the sim will wait this amount of time before making those plans available",
+ "release_first_plans_immediately = True means that global planner will immediately make its first set of global plans available (they won't be queued up for release at a later time)",
+ "6300 seconds is 105 minutes, about the length of an orbit at 1000 km altitude",
+ "replan release time is conservative here"
+ ],
+ "replan_interval_s": 6300,
+ "replan_release_wait_time_s": 60,
+ "release_first_plans_immediately": true
+ }
+ }
+
+
+ }
+
+
+}
\ No newline at end of file
diff --git a/inputs/cases/circinus_zhou_5_sats/operational_profile_config.json b/inputs/cases/circinus_zhou_5_sats/operational_profile_config.json
new file mode 100644
index 0000000..6d07011
--- /dev/null
+++ b/inputs/cases/circinus_zhou_5_sats/operational_profile_config.json
@@ -0,0 +1,32 @@
+{
+ "version":"0.0.1",
+ "which_config":"operational_profile_config",
+ "where_should_i_be":"inputs/cases/",
+ "what_goes_here":"things that only matter to the choices to be made about the current case.",
+
+ "ops_profile_params": {
+ "obs_params": {
+ "num_targets": 5,
+ "target_set_name": "Zhou 2017 Missions",
+ "target_set_version": "1",
+ "target_set_id": "2018-4-14",
+ "elevation_cutoff_deg":60,
+ "targets": [
+ {"id": "targ0","name": "Himalaya","name_pretty": "obs0", "latitude_deg": 28.0, "longitude_deg": 87.0,"height_m": 0},
+ {"id": "targ1","name": "Mamiraus","name_pretty": "obs1", "latitude_deg": -2.0, "longitude_deg": -66.0,"height_m": 0},
+ {"id": "targ2","name": "Cape York","name_pretty": "obs2", "latitude_deg": -11.0, "longitude_deg": 142.5,"height_m": 0},
+ {"id": "targ3","name": "Alaska Coast","name_pretty": "obs3", "latitude_deg": 60.0, "longitude_deg": -148,"height_m": 0},
+ {"id": "targ4","name": "Greenland","name_pretty": "obs4", "latitude_deg": 69.0, "longitude_deg": -49,"height_m": 0}
+ ]
+ },
+ "link_disables": {
+ "_comment": "The satellite and GS ids below will always be interpreted as strings. if the ids are specified as integers in other files, they will be considered the same if their string representation is the same",
+ "dlnk_direc_disabled_gs_ID_by_sat_IDstr": {
+ },
+ "xlnk_direc_disabled_xsat_ID_by_sat_IDstr": {
+ }
+ }
+
+ }
+
+}
\ No newline at end of file
diff --git a/inputs/cases/circinus_zhou_5_sats/sim_case_config.json b/inputs/cases/circinus_zhou_5_sats/sim_case_config.json
new file mode 100644
index 0000000..b53838e
--- /dev/null
+++ b/inputs/cases/circinus_zhou_5_sats/sim_case_config.json
@@ -0,0 +1,254 @@
+{
+ "version": "0.0.1",
+ "which_config": "sim_case_config",
+ "where_should_i_be": "inputs/cases/",
+ "what_goes_here": "things that only matter to the simulation, or definition of time, that is relatively likely to vary per case. Things which vary for tweaks and will stay the same otherwise should go in /inputs/admin_config/sim_general_config.json instead",
+ "scenario_params": {
+ "start_utc": "2016-02-14T04:00:00.000000Z",
+ "end_utc": "2016-02-15T04:00:00.000000Z",
+ "use_crosslinks": true,
+ "all_sats_same_time_system": true,
+ "restore_pkl_name": "pickles/",
+ "sat_schedule_hotstart": true,
+ "lookup_params": {
+ "xlnk_range_rates": {
+ "_comment": "The names here should be different from built-in comm type names",
+ "Zhou_2017_xlnk": {
+ "range_units": "km",
+ "rates_units": "Mbps",
+ "interpolation_method": "floor",
+ "range_rates_table": [
+ [
+ 0,
+ 10
+ ]
+ ]
+ }
+ },
+ "dlnk_range_rates": {
+ "_comment1": "The names here should be different from built-in comm type names",
+ "_comment2": "they say this rate is a random distribution over {0,20,40} Mbps for each downlink timeslot. I'll just use the average...",
+ "Zhou_2017_dlnk": {
+ "range_units": "km",
+ "rates_units": "Mbps",
+ "interpolation_method": "floor",
+ "range_rates_table": [
+ [
+ 0,
+ 20
+ ]
+ ]
+ }
+ }
+ },
+ "sim_run_perturbations": {
+ "do_inject_obs": true,
+ "schedule_disruptions": {
+ "G1": [
+ [
+ "2016-02-14T04:00:00.000000Z",
+ "2016-02-14T16:00:00.000000Z"
+ ]
+ ]
+ },
+ "injected_observations": [
+ {
+ "indx": 0,
+ "end_utc": "2016-02-14T09:55:37.164153Z",
+ "sat_id": "S4",
+ "type": "hardcoded",
+ "start_utc": "2016-02-14T09:54:37.164153Z"
+ },
+ {
+ "indx": 1,
+ "end_utc": "2016-02-14T12:00:35.483029Z",
+ "sat_id": "S4",
+ "type": "hardcoded",
+ "start_utc": "2016-02-14T11:59:35.483029Z"
+ },
+ {
+ "indx": 3,
+ "end_utc": "2016-02-14T21:23:23.298319Z",
+ "sat_id": "S4",
+ "type": "hardcoded",
+ "start_utc": "2016-02-14T21:22:23.298319Z"
+ },
+ {
+ "indx": 5,
+ "end_utc": "2016-02-14T18:10:00.708361Z",
+ "sat_id": "S1",
+ "type": "hardcoded",
+ "start_utc": "2016-02-14T18:09:00.708361Z"
+ },
+ {
+ "indx": 6,
+ "end_utc": "2016-02-14T04:16:54.450560Z",
+ "sat_id": "S4",
+ "type": "hardcoded",
+ "start_utc": "2016-02-14T04:15:54.450560Z"
+ },
+ {
+ "indx": 8,
+ "end_utc": "2016-02-14T15:12:02.827026Z",
+ "sat_id": "S0",
+ "type": "hardcoded",
+ "start_utc": "2016-02-14T15:11:02.827026Z"
+ },
+ {
+ "indx": 9,
+ "end_utc": "2016-02-14T07:06:41.520948Z",
+ "sat_id": "S3",
+ "type": "hardcoded",
+ "start_utc": "2016-02-14T07:05:41.520948Z"
+ },
+ {
+ "indx": 10,
+ "end_utc": "2016-02-14T16:37:02.412884Z",
+ "sat_id": "S4",
+ "type": "hardcoded",
+ "start_utc": "2016-02-14T16:36:02.412884Z"
+ },
+ {
+ "indx": 11,
+ "end_utc": "2016-02-14T10:06:12.829820Z",
+ "sat_id": "S1",
+ "type": "hardcoded",
+ "start_utc": "2016-02-14T10:05:12.829820Z"
+ },
+ {
+ "indx": 13,
+ "end_utc": "2016-02-14T09:55:20.718866Z",
+ "sat_id": "S2",
+ "type": "hardcoded",
+ "start_utc": "2016-02-14T09:54:20.718866Z"
+ },
+ {
+ "indx": 15,
+ "end_utc": "2016-02-14T20:39:46.383513Z",
+ "sat_id": "S1",
+ "type": "hardcoded",
+ "start_utc": "2016-02-14T20:38:46.383513Z"
+ },
+ {
+ "indx": 17,
+ "end_utc": "2016-02-14T10:55:33.461534Z",
+ "sat_id": "S4",
+ "type": "hardcoded",
+ "start_utc": "2016-02-14T10:54:33.461534Z"
+ },
+ {
+ "indx": 19,
+ "end_utc": "2016-02-14T21:55:09.278231Z",
+ "sat_id": "S1",
+ "type": "hardcoded",
+ "start_utc": "2016-02-14T21:54:09.278231Z"
+ },
+ {
+ "indx": 20,
+ "end_utc": "2016-02-14T18:46:56.198028Z",
+ "sat_id": "S0",
+ "type": "hardcoded",
+ "start_utc": "2016-02-14T18:45:56.198028Z"
+ },
+ {
+ "indx": 21,
+ "end_utc": "2016-02-14T19:43:40.340492Z",
+ "sat_id": "S4",
+ "type": "hardcoded",
+ "start_utc": "2016-02-14T19:42:40.340492Z"
+ },
+ {
+ "indx": 22,
+ "end_utc": "2016-02-14T15:13:31.779520Z",
+ "sat_id": "S0",
+ "type": "hardcoded",
+ "start_utc": "2016-02-14T15:12:31.779520Z"
+ },
+ {
+ "indx": 24,
+ "end_utc": "2016-02-14T14:12:12.058434Z",
+ "sat_id": "S0",
+ "type": "hardcoded",
+ "start_utc": "2016-02-14T14:11:12.058434Z"
+ },
+ {
+ "indx": 25,
+ "end_utc": "2016-02-14T16:22:00.816409Z",
+ "sat_id": "S4",
+ "type": "hardcoded",
+ "start_utc": "2016-02-14T16:21:00.816409Z"
+ },
+ {
+ "indx": 26,
+ "end_utc": "2016-02-14T10:01:28.150822Z",
+ "sat_id": "S2",
+ "type": "hardcoded",
+ "start_utc": "2016-02-14T10:00:28.150822Z"
+ },
+ {
+ "indx": 27,
+ "end_utc": "2016-02-14T08:40:39.036326Z",
+ "sat_id": "S4",
+ "type": "hardcoded",
+ "start_utc": "2016-02-14T08:39:39.036326Z"
+ },
+ {
+ "indx": 28,
+ "end_utc": "2016-02-14T05:02:51.509312Z",
+ "sat_id": "S0",
+ "type": "hardcoded",
+ "start_utc": "2016-02-14T05:01:51.509312Z"
+ },
+ {
+ "indx": 29,
+ "end_utc": "2016-02-14T21:53:18.709511Z",
+ "sat_id": "S3",
+ "type": "hardcoded",
+ "start_utc": "2016-02-14T21:52:18.709511Z"
+ },
+ {
+ "indx": 31,
+ "end_utc": "2016-02-14T19:26:09.145608Z",
+ "sat_id": "S0",
+ "type": "hardcoded",
+ "start_utc": "2016-02-14T19:25:09.145608Z"
+ },
+ {
+ "indx": 34,
+ "end_utc": "2016-02-14T14:00:13.611338Z",
+ "sat_id": "S0",
+ "type": "hardcoded",
+ "start_utc": "2016-02-14T13:59:13.611338Z"
+ },
+ {
+ "indx": 35,
+ "end_utc": "2016-02-14T18:10:56.972232Z",
+ "sat_id": "S3",
+ "type": "hardcoded",
+ "start_utc": "2016-02-14T18:09:56.972232Z"
+ },
+ {
+ "indx": 36,
+ "end_utc": "2016-02-14T17:36:05.824133Z",
+ "sat_id": "S3",
+ "type": "hardcoded",
+ "start_utc": "2016-02-14T17:35:05.824133Z"
+ },
+ {
+ "indx": 37,
+ "end_utc": "2016-02-14T21:31:32.705353Z",
+ "sat_id": "S4",
+ "type": "hardcoded",
+ "start_utc": "2016-02-14T21:30:32.705353Z"
+ },
+ {
+ "indx": 38,
+ "end_utc": "2016-02-14T13:34:49.865042Z",
+ "sat_id": "S3",
+ "type": "hardcoded",
+ "start_utc": "2016-02-14T13:33:49.865042Z"
+ }
+ ]
+ }
+ }
+}
diff --git a/inputs/cases/orig_circinus_zhou/autogen_files/README.md b/inputs/cases/orig_circinus_zhou/autogen_files/README.md
deleted file mode 100644
index 652def3..0000000
--- a/inputs/cases/orig_circinus_zhou/autogen_files/README.md
+++ /dev/null
@@ -1,11 +0,0 @@
-# Autogenerated (Intermediate) files
-
-These are generated automatically by circinus modules and used as input for further modules. Some are very large, and only minimally human readable. This folder is empty other than this readme until the pipeline is run.
-
-## Expected files to be generated:
-* data_rates_output.json : [ runner_orbitlink.py -> runner_const_sim.py ]
-* orbit_prop_data.json : [ runner_orbitprop.py -> runner_orbitviz.py ]
-* sat_link_history.json : [ runner_orbitlink.py -> runner_orbitviz.py ]
-* const_sim_outputs.json : [ runner_const_sim.py -> ]
-
-[Original file dataflow](https://drive.google.com/open?id=18GtxUUbO9V40Ifl_81Cpz0z-GL1XkV4R)
\ No newline at end of file
diff --git a/inputs/cases/orig_circinus_zhou/constellation_config.json b/inputs/cases/orig_circinus_zhou/constellation_config.json
index eac692a..2843a81 100644
--- a/inputs/cases/orig_circinus_zhou/constellation_config.json
+++ b/inputs/cases/orig_circinus_zhou/constellation_config.json
@@ -1,36 +1,38 @@
{
- "config_type":"constellation_config",
- "version-const_def":"0.0.1",
- "verbose_details":"https://github.mit.edu/star-lab/CIRCINUS/blob/SPRINT-restructuring-1/inputs/nom_sprint/constellation/README.md",
-
- "constellation_definition":{
- "version-const":"0.0.1",
-
- "default_sat_ref_model_name":"zhou_original_sat",
-
- "constellation_params":{
- "num_satellites": 6,
+ "config_type": "constellation_config",
+ "version-const_def": "0.0.1",
+ "verbose_details": "https://github.mit.edu/star-lab/CIRCINUS/blob/SPRINT-restructuring-1/inputs/nom_sprint/constellation/README.md",
+ "constellation_definition": {
+ "version-const": "0.0.1",
+ "constellation_params": {
+ "num_satellites": 6,
"sat_id_prefix": "S",
"sat_ids": "duplicate,range_inclusive,0,5",
"sat_id_order": "default",
-
- "_comments" : [
+ "_comments": [
"orbit_neighbor_direction_method specifies how to figure out which direction an orbit neighbor is in, for determining activity transition times. by_increasing_sat_index means that if the receive sat index is larger than tx sat index (i.e. further in sat_id_order list) then the crosslink is in an INCREASING direction",
""
],
"intra-orbit_neighbor_direction_method": "by_increasing_sat_index",
-
- "orbit_params" : {
+ "orbit_params": {
"sat_ids_by_orbit_name": {
- "orbit0": ["S0","S1","S2"],
- "orbit1": ["S3","S4","S5"]
+ "orbit0": [
+ "S0",
+ "S1",
+ "S2"
+ ],
+ "orbit1": [
+ "S3",
+ "S4",
+ "S5"
+ ]
},
"sat_orbital_elems": [
{
"sat_id": "S0",
- "def_type":"indv",
+ "def_type": "indv",
"kepler_meananom": {
- "a_km": 7378,
+ "a_km": 7378,
"e": 0,
"i_deg": 97.86,
"RAAN_deg": 0,
@@ -41,9 +43,9 @@
},
{
"sat_id": "S1",
- "def_type":"indv",
+ "def_type": "indv",
"kepler_meananom": {
- "a_km": 7378,
+ "a_km": 7378,
"e": 0,
"i_deg": 97.86,
"RAAN_deg": 0,
@@ -54,9 +56,9 @@
},
{
"sat_id": "S2",
- "def_type":"indv",
+ "def_type": "indv",
"kepler_meananom": {
- "a_km": 7378,
+ "a_km": 7378,
"e": 0,
"i_deg": 97.86,
"RAAN_deg": 0,
@@ -67,9 +69,9 @@
},
{
"sat_id": "S3",
- "def_type":"indv",
+ "def_type": "indv",
"kepler_meananom": {
- "a_km": 7378,
+ "a_km": 7378,
"e": 0,
"i_deg": 83.86,
"RAAN_deg": 0,
@@ -80,9 +82,9 @@
},
{
"sat_id": "S4",
- "def_type":"indv",
+ "def_type": "indv",
"kepler_meananom": {
- "a_km": 7378,
+ "a_km": 7378,
"e": 0,
"i_deg": 83.86,
"RAAN_deg": 0,
@@ -93,9 +95,9 @@
},
{
"sat_id": "S5",
- "def_type":"indv",
+ "def_type": "indv",
"kepler_meananom": {
- "a_km": 7378,
+ "a_km": 7378,
"e": 0,
"i_deg": 83.86,
"RAAN_deg": 0,
@@ -106,9 +108,7 @@
}
]
}
- }
-
+ },
+ "sat_ref_model_name": "zhou_original_sat"
}
-
-
}
\ No newline at end of file
diff --git a/inputs/cases/orig_circinus_zhou/sim_case_config.json b/inputs/cases/orig_circinus_zhou/sim_case_config.json
index 0515016..02f8959 100644
--- a/inputs/cases/orig_circinus_zhou/sim_case_config.json
+++ b/inputs/cases/orig_circinus_zhou/sim_case_config.json
@@ -46,8 +46,7 @@
"schedule_disruptions": {
"G1": [
[
- "2016-02-14T04:00:00.000000Z",
- "2016-02-14T16:00:00.000000Z"
+ "WHERE ARE YOU"
]
]
},
@@ -251,4 +250,4 @@
]
}
}
-}
\ No newline at end of file
+}
diff --git a/inputs/cases/sprint_lp_demo/constellation_config.json b/inputs/cases/sprint_lp_demo/constellation_config.json
index 346fc8d..6a7660f 100644
--- a/inputs/cases/sprint_lp_demo/constellation_config.json
+++ b/inputs/cases/sprint_lp_demo/constellation_config.json
@@ -1,35 +1,33 @@
{
- "config_type":"constellation_config",
- "version-const_def":"0.0.1",
- "verbose_details":"https://github.mit.edu/star-lab/CIRCINUS/blob/SPRINT-restructuring-1/inputs/nom_sprint/constellation/README.md",
-
- "constellation_definition":{
- "version-const":"0.0.1",
-
- "default_sat_ref_model_name":"zhou_original_sat",
-
- "constellation_params":{
- "num_satellites": 3,
+ "config_type": "constellation_config",
+ "version-const_def": "0.0.1",
+ "verbose_details": "https://github.mit.edu/star-lab/CIRCINUS/blob/SPRINT-restructuring-1/inputs/nom_sprint/constellation/README.md",
+ "constellation_definition": {
+ "version-const": "0.0.1",
+ "constellation_params": {
+ "num_satellites": 3,
"sat_id_prefix": "S",
"sat_ids": "duplicate,range_inclusive,0,2",
"sat_id_order": "default",
-
- "_comments" : [
+ "_comments": [
"orbit_neighbor_direction_method specifies how to figure out which direction an orbit neighbor is in, for determining activity transition times. by_increasing_sat_index means that if the receive sat index is larger than tx sat index (i.e. further in sat_id_order list) then the crosslink is in an INCREASING direction",
""
],
"intra-orbit_neighbor_direction_method": "by_increasing_sat_index",
-
- "orbit_params" : {
+ "orbit_params": {
"sat_ids_by_orbit_name": {
- "orbit0": ["S0","S1","S2"]
+ "orbit0": [
+ "S0",
+ "S1",
+ "S2"
+ ]
},
"sat_orbital_elems": [
{
"sat_id": "S0",
- "def_type":"indv",
+ "def_type": "indv",
"kepler_meananom": {
- "a_km": 6777,
+ "a_km": 6777,
"e": 0,
"i_deg": 51.6,
"RAAN_deg": 0,
@@ -40,9 +38,9 @@
},
{
"sat_id": "S1",
- "def_type":"indv",
+ "def_type": "indv",
"kepler_meananom": {
- "a_km": 6777,
+ "a_km": 6777,
"e": 0,
"i_deg": 51.6,
"RAAN_deg": 0,
@@ -53,9 +51,9 @@
},
{
"sat_id": "S2",
- "def_type":"indv",
+ "def_type": "indv",
"kepler_meananom": {
- "a_km": 6777,
+ "a_km": 6777,
"e": 0,
"i_deg": 51.6,
"RAAN_deg": 0,
@@ -66,9 +64,7 @@
}
]
}
- }
-
+ },
+ "sat_ref_model_name": "zhou_original_sat"
}
-
-
}
\ No newline at end of file
diff --git a/inputs/cases/walker15_inc30/constellation_config.json b/inputs/cases/walker15_inc30/constellation_config.json
index 99b5022..f1637f2 100644
--- a/inputs/cases/walker15_inc30/constellation_config.json
+++ b/inputs/cases/walker15_inc30/constellation_config.json
@@ -1,30 +1,42 @@
{
- "config_type":"constellation_config",
- "version-const_def":"0.0.1",
- "verbose_details":"https://github.mit.edu/star-lab/CIRCINUS/blob/SPRINT-restructuring-1/inputs/nom_sprint/constellation/README.md",
-
- "constellation_definition":{
- "version-const":"0.0.1",
-
- "default_sat_ref_model_name":"zhou_original_sat",
-
- "constellation_params":{
- "num_satellites": 15,
+ "config_type": "constellation_config",
+ "version-const_def": "0.0.1",
+ "verbose_details": "https://github.mit.edu/star-lab/CIRCINUS/blob/SPRINT-restructuring-1/inputs/nom_sprint/constellation/README.md",
+ "constellation_definition": {
+ "version-const": "0.0.1",
+ "constellation_params": {
+ "num_satellites": 15,
"sat_id_prefix": "S",
"sat_ids": "duplicate,range_inclusive,0,14",
"sat_id_order": "default",
-
- "_comments" : [
+ "_comments": [
"orbit_neighbor_direction_method specifies how to figure out which direction an orbit neighbor is in, for determining activity transition times. by_increasing_sat_index means that if the receive sat index is larger than tx sat index (i.e. further in sat_id_order list) then the crosslink is in an INCREASING direction",
""
],
"intra-orbit_neighbor_direction_method": "by_increasing_sat_index",
-
- "orbit_params" : {
+ "orbit_params": {
"sat_ids_by_orbit_name": {
- "orbit0": ["S0","S1","S2","S3","S4"],
- "orbit1": ["S5","S6","S7","S8","S9"],
- "orbit2": ["S10","S11","S12","S13","S14"]
+ "orbit0": [
+ "S0",
+ "S1",
+ "S2",
+ "S3",
+ "S4"
+ ],
+ "orbit1": [
+ "S5",
+ "S6",
+ "S7",
+ "S8",
+ "S9"
+ ],
+ "orbit2": [
+ "S10",
+ "S11",
+ "S12",
+ "S13",
+ "S14"
+ ]
},
"sat_orbital_elems": [
{
@@ -44,9 +56,7 @@
}
]
}
- }
-
+ },
+ "sat_ref_model_name": "zhou_original_sat"
}
-
-
}
\ No newline at end of file
diff --git a/inputs/cases/walker18_inc30_simple/constellation_config.json b/inputs/cases/walker18_inc30_simple/constellation_config.json
index fdfb9e8..18d7ae2 100644
--- a/inputs/cases/walker18_inc30_simple/constellation_config.json
+++ b/inputs/cases/walker18_inc30_simple/constellation_config.json
@@ -1,30 +1,45 @@
{
- "config_type":"constellation_config",
- "version-const_def":"0.0.1",
- "verbose_details":"https://github.mit.edu/star-lab/CIRCINUS/blob/SPRINT-restructuring-1/inputs/nom_sprint/constellation/README.md",
-
- "constellation_definition":{
- "version-const":"0.0.1",
-
- "default_sat_ref_model_name":"zhou_original_sat",
-
- "constellation_params":{
- "num_satellites": 18,
+ "config_type": "constellation_config",
+ "version-const_def": "0.0.1",
+ "verbose_details": "https://github.mit.edu/star-lab/CIRCINUS/blob/SPRINT-restructuring-1/inputs/nom_sprint/constellation/README.md",
+ "constellation_definition": {
+ "version-const": "0.0.1",
+ "constellation_params": {
+ "num_satellites": 18,
"sat_id_prefix": "S",
"sat_ids": "duplicate,range_inclusive,0,18",
"sat_id_order": "default",
-
- "_comments" : [
+ "_comments": [
"orbit_neighbor_direction_method specifies how to figure out which direction an orbit neighbor is in, for determining activity transition times. by_increasing_sat_index means that if the receive sat index is larger than tx sat index (i.e. further in sat_id_order list) then the crosslink is in an INCREASING direction",
""
],
"intra-orbit_neighbor_direction_method": "by_increasing_sat_index",
-
- "orbit_params" : {
+ "orbit_params": {
"sat_ids_by_orbit_name": {
- "orbit0": ["S0","S1","S2","S3","S4","S5"],
- "orbit1": ["S6","S7","S8","S9","S10","S11"],
- "orbit2": ["S12","S13","S14","S15","S16","S17"]
+ "orbit0": [
+ "S0",
+ "S1",
+ "S2",
+ "S3",
+ "S4",
+ "S5"
+ ],
+ "orbit1": [
+ "S6",
+ "S7",
+ "S8",
+ "S9",
+ "S10",
+ "S11"
+ ],
+ "orbit2": [
+ "S12",
+ "S13",
+ "S14",
+ "S15",
+ "S16",
+ "S17"
+ ]
},
"sat_orbital_elems": [
{
@@ -44,9 +59,7 @@
}
]
}
- }
-
+ },
+ "sat_ref_model_name": "zhou_original_sat"
}
-
-
}
\ No newline at end of file
diff --git a/inputs/cases/walker30_inc30/constellation_config.json b/inputs/cases/walker30_inc30/constellation_config.json
index 993fb2e..e3ed004 100644
--- a/inputs/cases/walker30_inc30/constellation_config.json
+++ b/inputs/cases/walker30_inc30/constellation_config.json
@@ -1,30 +1,57 @@
{
- "config_type":"constellation_config",
- "version-const_def":"0.0.1",
- "verbose_details":"https://github.mit.edu/star-lab/CIRCINUS/blob/SPRINT-restructuring-1/inputs/nom_sprint/constellation/README.md",
-
- "constellation_definition":{
- "version-const":"0.0.1",
-
- "default_sat_ref_model_name":"zhou_original_sat",
-
- "constellation_params":{
- "num_satellites": 30,
+ "config_type": "constellation_config",
+ "version-const_def": "0.0.1",
+ "verbose_details": "https://github.mit.edu/star-lab/CIRCINUS/blob/SPRINT-restructuring-1/inputs/nom_sprint/constellation/README.md",
+ "constellation_definition": {
+ "version-const": "0.0.1",
+ "constellation_params": {
+ "num_satellites": 30,
"sat_id_prefix": "S",
"sat_ids": "duplicate,range_inclusive,0,29",
"sat_id_order": "default",
-
- "_comments" : [
+ "_comments": [
"orbit_neighbor_direction_method specifies how to figure out which direction an orbit neighbor is in, for determining activity transition times. by_increasing_sat_index means that if the receive sat index is larger than tx sat index (i.e. further in sat_id_order list) then the crosslink is in an INCREASING direction",
""
],
"intra-orbit_neighbor_direction_method": "by_increasing_sat_index",
-
- "orbit_params" : {
+ "orbit_params": {
"sat_ids_by_orbit_name": {
- "orbit0": ["S0","S1","S2","S3","S4","S5","S6","S7","S8","S9"],
- "orbit1": ["S10","S11","S12","S13","S14","S15","S16","S17","S18","S19"],
- "orbit2": ["S20","S21","S22","S23","S24","S25","S26","S27","S28","S29"]
+ "orbit0": [
+ "S0",
+ "S1",
+ "S2",
+ "S3",
+ "S4",
+ "S5",
+ "S6",
+ "S7",
+ "S8",
+ "S9"
+ ],
+ "orbit1": [
+ "S10",
+ "S11",
+ "S12",
+ "S13",
+ "S14",
+ "S15",
+ "S16",
+ "S17",
+ "S18",
+ "S19"
+ ],
+ "orbit2": [
+ "S20",
+ "S21",
+ "S22",
+ "S23",
+ "S24",
+ "S25",
+ "S26",
+ "S27",
+ "S28",
+ "S29"
+ ]
},
"sat_orbital_elems": [
{
@@ -44,9 +71,7 @@
}
]
}
- }
-
+ },
+ "sat_ref_model_name": "zhou_original_sat"
}
-
-
}
\ No newline at end of file
diff --git a/inputs/cases/walker30_inc30_NEN/constellation_config.json b/inputs/cases/walker30_inc30_NEN/constellation_config.json
index 993fb2e..e3ed004 100644
--- a/inputs/cases/walker30_inc30_NEN/constellation_config.json
+++ b/inputs/cases/walker30_inc30_NEN/constellation_config.json
@@ -1,30 +1,57 @@
{
- "config_type":"constellation_config",
- "version-const_def":"0.0.1",
- "verbose_details":"https://github.mit.edu/star-lab/CIRCINUS/blob/SPRINT-restructuring-1/inputs/nom_sprint/constellation/README.md",
-
- "constellation_definition":{
- "version-const":"0.0.1",
-
- "default_sat_ref_model_name":"zhou_original_sat",
-
- "constellation_params":{
- "num_satellites": 30,
+ "config_type": "constellation_config",
+ "version-const_def": "0.0.1",
+ "verbose_details": "https://github.mit.edu/star-lab/CIRCINUS/blob/SPRINT-restructuring-1/inputs/nom_sprint/constellation/README.md",
+ "constellation_definition": {
+ "version-const": "0.0.1",
+ "constellation_params": {
+ "num_satellites": 30,
"sat_id_prefix": "S",
"sat_ids": "duplicate,range_inclusive,0,29",
"sat_id_order": "default",
-
- "_comments" : [
+ "_comments": [
"orbit_neighbor_direction_method specifies how to figure out which direction an orbit neighbor is in, for determining activity transition times. by_increasing_sat_index means that if the receive sat index is larger than tx sat index (i.e. further in sat_id_order list) then the crosslink is in an INCREASING direction",
""
],
"intra-orbit_neighbor_direction_method": "by_increasing_sat_index",
-
- "orbit_params" : {
+ "orbit_params": {
"sat_ids_by_orbit_name": {
- "orbit0": ["S0","S1","S2","S3","S4","S5","S6","S7","S8","S9"],
- "orbit1": ["S10","S11","S12","S13","S14","S15","S16","S17","S18","S19"],
- "orbit2": ["S20","S21","S22","S23","S24","S25","S26","S27","S28","S29"]
+ "orbit0": [
+ "S0",
+ "S1",
+ "S2",
+ "S3",
+ "S4",
+ "S5",
+ "S6",
+ "S7",
+ "S8",
+ "S9"
+ ],
+ "orbit1": [
+ "S10",
+ "S11",
+ "S12",
+ "S13",
+ "S14",
+ "S15",
+ "S16",
+ "S17",
+ "S18",
+ "S19"
+ ],
+ "orbit2": [
+ "S20",
+ "S21",
+ "S22",
+ "S23",
+ "S24",
+ "S25",
+ "S26",
+ "S27",
+ "S28",
+ "S29"
+ ]
},
"sat_orbital_elems": [
{
@@ -44,9 +71,7 @@
}
]
}
- }
-
+ },
+ "sat_ref_model_name": "zhou_original_sat"
}
-
-
}
\ No newline at end of file
diff --git a/inputs/failure_analysis.py b/inputs/failure_analysis.py
new file mode 100644
index 0000000..900da03
--- /dev/null
+++ b/inputs/failure_analysis.py
@@ -0,0 +1,71 @@
+# this file is for generating plots / outputs from
+# the json files in this folder
+import json
+import matplotlib.pyplot as plt
+import numpy as np
+import pandas as pd
+
+SRP_settings = [True, False]
+
+## WALKER ##
+n_targs = 15
+total_targs = 100
+num_sats = 15
+#targ_subsets = [list(range(40,60)),list(range(0,total_targs,int(total_targs/n_targs)))] # first is all equatorial set, 2nd is spread out at all latitudes
+targ_subsets = [list(range(40,60))] # for 30 walker
+#GS_subsets = [[3,5,6,14],[13,14,15,3,4,5,6,8,12]]
+GS_subsets = [[3,5,6,14]] # for 30 walker
+############
+
+## Zhou ##
+n_targs = 5
+num_sats = 6
+n_gs = 4
+#targ_subsets = [list(range(40,60)),list(range(0,total_targs,int(total_targs/n_targs)))] # first is all equatorial set, 2nd is spread out at all latitudes
+targ_subsets = [list(range(n_targs))]
+#GS_subsets = [[3,5,6,14],[13,14,15,3,4,5,6,8,12]]
+GS_subsets = [list(range(n_gs))]
+###########
+
+data_folder = r".\multirun_tests"
+
+# grab all data
+all_data = {}
+# SELECT USE CASE to plot
+g_select = 0
+t_select = 0
+# LOAD DISRUPTED DATA SET
+GS_subset = GS_subsets[g_select]
+t_subset = targ_subsets[t_select]
+
+setting_name = 'setGS_%d_setT_%d' % (g_select,t_select)
+for SRP_setting in SRP_settings:
+
+ GS_disruptions = [
+ None,
+ "G%d" % GS_subset[1]
+ ]
+ for GS_disruption in GS_disruptions:
+ scenario_name = 'WALKER_%d_SRP_Test_SRP_%s_GS_%s_%s' % (num_sats,SRP_setting, GS_disruption,setting_name) if GS_disruption else 'WALKER_%d_Nominal_%s' % (num_sats,setting_name)
+
+ full_filename = data_folder + "\\" + scenario_name + ".json"
+ with open(full_filename, "r") as jsonFile:
+ all_data[scenario_name] = json.load(jsonFile)
+ # modify names with white spaces and remove distinction between exec and non-exec
+ all_data[scenario_name]['Num_Failures_by_Type'] = {**all_data[scenario_name]['Num_Failures_by_Type']['exec'], **all_data[scenario_name]['Num_Failures_by_Type']['non-exec']}
+ #del all_data[scenario_name]['Num Failures by Type']
+ #all_data[scenario_name]['Percentage_of_Exec_Act_Failure_by_Act'] = all_data[scenario_name]['Percentage of Exec Act Failures by Act']
+ #del all_data[scenario_name]['Percentage of Exec Act Failures by Act']
+
+
+# let's put the data in a more pandas friendly format
+
+for s_ind,scenario_name in enumerate(all_data.keys()):
+ dataset = []
+ for f_ind,failure_name in enumerate(all_data[scenario_name]['Num_Failures_by_Type'].keys()):
+ dataset.append([])
+ for act_type_failure_count in all_data[scenario_name]['Num_Failures_by_Type'][failure_name].values():
+ dataset[f_ind].append(act_type_failure_count)
+
+ df = pd.DataFrame(dataset,index=all_data[scenario_name]['Num_Failures_by_Type'].keys(),columns=all_data[scenario_name]['Num_Failures_by_Type'][failure_name].keys())
+ df.to_csv(data_folder + scenario_name + '.txt')
diff --git a/inputs/general_config/gp_general_params_inputs.json b/inputs/general_config/gp_general_params_inputs.json
index e971bb4..91b043a 100644
--- a/inputs/general_config/gp_general_params_inputs.json
+++ b/inputs/general_config/gp_general_params_inputs.json
@@ -60,7 +60,7 @@
"solver_name" : "gurobi",
"solver_params": {
"run_remotely" : false,
- "max_runtime_s" : 1000,
+ "max_runtime_s" : 100000,
"optimality_gap" : 1e-2,
"integer_feasibility_tolerance" : 1e-5,
"gurobi": {
diff --git a/inputs/general_config/lp_general_params_inputs.json b/inputs/general_config/lp_general_params_inputs.json
index ba186a8..677964f 100644
--- a/inputs/general_config/lp_general_params_inputs.json
+++ b/inputs/general_config/lp_general_params_inputs.json
@@ -19,15 +19,15 @@
"min_latency_for_sf_1_mins: this is the mimimum latency requirement for the highest latency score factor, 1.0. If multiple routes/dlnks for a single obs have latency less than this, they will both have sf 1.0"
],
"verbose_milp": false,
- "use_self_replanner": false,
- "run_lp_milp_after_SRP": false,
+ "use_self_replanner": true,
+ "run_lp_milp_after_SRP": true,
"dv_epsilon_Mb": 0.1,
"inflow_dv_minimum_Mb": 5,
"existing_utilization_epsilon": 0.001,
- "solver_name": "gurobi",
+ "solver_name": "cbc",
"solver_params": {
"run_remotely": false,
- "max_runtime_s": 1000,
+ "max_runtime_s": 10000,
"optimality_gap": 0.001,
"integer_feasibility_tolerance": 1e-05,
"gurobi": {
@@ -41,6 +41,16 @@
"_helpful_links": [
"https://www.ibm.com/support/knowledgecenter/SSSA5P_12.8.0/ilog.odms.cplex.help/CPLEX/Parameters/topics/introListAlpha.html"
]
+ },
+ "cbc":{
+ "_helpful_links":[
+ "https://projects.coin-or.org/Cbc"
+ ]
+ },
+ "glpk":{
+ "_helpful_links":[
+ "https://www.gnu.org/software/glpk/"
+ ]
}
},
"obj_weights": {
@@ -58,4 +68,4 @@
"obs": "center"
}
}
-}
\ No newline at end of file
+}
diff --git a/inputs/general_config/sim_general_config.json b/inputs/general_config/sim_general_config.json
index 48296c0..c4ce0dd 100644
--- a/inputs/general_config/sim_general_config.json
+++ b/inputs/general_config/sim_general_config.json
@@ -10,10 +10,13 @@
"include_ecef_output": false,
"gs_time_epsilon_s": 1,
"use_standalone_gp" : false,
+ "rem_gp_server_address" : "localhost",
+ "ground_server_address": "localhost",
+ "_comment": ["put ip address of ground in ground_server_address"],
"sim_run_params": {
"restore_from_checkpoint": false,
- "pickle_checkpoints": true,
+ "pickle_checkpoints": false,
"checkpoint_spacing_s": 900,
"_comments": [
"the sim tick [now under timestep_s] should be larger than the minimum activity window length allowed by the GP for scheduling"
@@ -24,7 +27,7 @@
"dlnk_max_len_s": 1200,
"dlnk_decimation": 2,
"xlnk_decimation": 5,
- "assume_max_datarate": true,
+ "assume_max_datarate": false,
"include_xlnk_range_in_output": false,
"include_dlnk_aer_in_output": false,
"matlab_verbose_links": true
@@ -91,4 +94,4 @@
"aoi_units": "hours"
}
}
-}
\ No newline at end of file
+}
diff --git a/inputs/multi_True_SRP_and_True_tx_status.json b/inputs/multi_True_SRP_and_True_tx_status.json
new file mode 100644
index 0000000..0d7003d
--- /dev/null
+++ b/inputs/multi_True_SRP_and_True_tx_status.json
@@ -0,0 +1,363 @@
+{
+ "Num_Failures_by_Type": {
+ "exec": {
+ "Not in plan at execution time for transmitter": {
+ "xlnk": 0,
+ "dlnk": 0,
+ "obs": 0
+ },
+ "Not in plan at execution time for receiver": {
+ "xlnk": 0,
+ "dlnk": 0,
+ "obs": 0
+ },
+ "Schedule disruption at receiving end": {
+ "xlnk": 0,
+ "dlnk": 1,
+ "obs": 0
+ },
+ "Schedule disruption at transmission end": {
+ "xlnk": 0,
+ "dlnk": 0,
+ "obs": 0
+ },
+ "Actual Data state does not support activity": {
+ "xlnk": 0,
+ "dlnk": 0,
+ "obs": 2
+ },
+ "Actual Energy state does not support activity": {
+ "xlnk": 0,
+ "dlnk": 0,
+ "obs": 0
+ },
+ "Invalid geometry at transmission time": {
+ "xlnk": 0,
+ "dlnk": 0,
+ "obs": 0
+ },
+ "No tx data containers associated with route": {
+ "xlnk": 0,
+ "dlnk": 1,
+ "obs": 0
+ },
+ "Unknown": {
+ "xlnk": 0,
+ "dlnk": 0,
+ "obs": 0
+ }
+ },
+ "non-exec": {
+ "Planning info received after action end time": {
+ "xlnk": 0,
+ "dlnk": 0,
+ "obs": 3
+ },
+ "Action relies on another action that occurs in the past": {
+ "xlnk": 0,
+ "dlnk": 0,
+ "obs": 0
+ }
+ }
+ },
+ "Percentage_of_Exec_Act_Failure_by_Act": {
+ "xlnk": 0,
+ "dlnk": 66.66666666666667,
+ "obs": 33.333333333333336
+ },
+ "dv_stats": {
+ "average_obvs_throughput": 0.34999999999992,
+ "num_obs_poss": 5,
+ "num_obs_poss_nonzero_dv": 5,
+ "num_obs_exec": 2,
+ "total_poss_dv": 12100.000000000142,
+ "total_exec_dv": 9550.0,
+ "ave_obs_dv_poss": 2420.000000000028,
+ "ave_obs_dv_exec": 4775.0,
+ "std_obs_dv_poss": 2473.3782565559413,
+ "std_obs_dv_exec": 1025.000000002,
+ "min_obs_dv_poss": 99.99999999997,
+ "min_obs_dv_exec": 3749.999999998,
+ "max_obs_dv_poss": 5800.000000002,
+ "max_obs_dv_exec": 5800.000000002,
+ "exec_over_poss": 0.7892561983470981
+ },
+ "d_rsrc_stats": {
+ "min_ave_d_margin": 5140.03125,
+ "max_ave_d_margin": 5140.03125,
+ "min_min_d_margin": 0.0,
+ "min_ave_d_margin_prcnt": 42.83359375,
+ "prcntl25_ave_d_margin_prcnt": 42.83359375,
+ "median_ave_d_margin_prcnt": 42.83359375,
+ "prcntl75_ave_d_margin_prcnt": 42.83359375,
+ "max_ave_d_margin_prcnt": 42.83359375,
+ "min_min_d_margin_prcnt": 0.0
+ },
+ "e_rsrc_stats": {
+ "min_ave_e_margin": 9.86874035493823,
+ "max_ave_e_margin": 9.86874035493823,
+ "std_ave_e_margin": 0.0,
+ "min_min_e_margin": 3.30444444444437,
+ "min_ave_e_margin_prcnt": 88.82754594903896,
+ "prcntl25_ave_e_margin_prcnt": 88.82754594903896,
+ "median_ave_e_margin_prcnt": 88.82754594903896,
+ "prcntl75_ave_e_margin_prcnt": 88.82754594903896,
+ "max_ave_e_margin_prcnt": 88.82754594903896,
+ "min_min_e_margin_prcnt": 29.742974297429072
+ },
+ "lat_stats": {
+ "min_obs_initial_lat_poss": 15.741666666666667,
+ "prcntl25_obs_initial_lat_poss": 171.82500000000002,
+ "median_obs_initial_lat_poss": 327.90833333333336,
+ "prcntl75_obs_initial_lat_poss": 483.9916666666667,
+ "max_obs_initial_lat_poss": 640.075,
+ "min_obs_initial_lat_exec": 640.075,
+ "prcntl25_obs_initial_lat_exec": 675.1583333333333,
+ "median_obs_initial_lat_exec": 710.2416666666667,
+ "prcntl75_obs_initial_lat_exec": 745.325,
+ "max_obs_initial_lat_exec": 780.4083333333333,
+ "ave_obs_final_lat_exec": 710.2416666666667,
+ "min_obs_final_lat_exec": 640.075,
+ "max_obs_final_lat_exec": 780.4083333333333
+ },
+ "obs_aoi_stats_w_routing": {
+ "min_av_aoi_poss": 6.16443350935571,
+ "prcntl25_av_aoi_poss": 6.614559035815329,
+ "median_av_aoi_poss": 7.064684562274948,
+ "prcntl75_av_aoi_poss": 7.726548079427083,
+ "max_av_aoi_poss": 8.388411596579218,
+ "min_av_aoi_exec": 10.54765787358539,
+ "prcntl25_av_aoi_exec": 10.78455076115612,
+ "median_av_aoi_exec": 11.021443648726851,
+ "prcntl75_av_aoi_exec": 11.25833653629758,
+ "max_av_aoi_exec": 11.495229423868311,
+ "poss_targIDs_found": [
+ "targ3",
+ "targ2",
+ "targ4"
+ ],
+ "exec_targIDs_found": [
+ "targ2",
+ "targ4"
+ ],
+ "av_aoi_by_targID_poss": {
+ "targ2": 6.16443350935571,
+ "targ4": 7.064684562274948,
+ "targ3": 8.388411596579218
+ },
+ "av_aoi_by_targID_exec": {
+ "targ4": 10.54765787358539,
+ "targ2": 11.495229423868311
+ },
+ "aoi_curves_by_targID_poss": {
+ "targ2": {
+ "x": [
+ 0.0,
+ 1.5206944444444443,
+ 1.5206944444444443,
+ 14.265138888888888,
+ 14.265138888888888,
+ 24.0,
+ 24.0
+ ],
+ "y": [
+ 0.0,
+ 1.5206944444444443,
+ 0.27625,
+ 13.020694444444445,
+ 1.5075,
+ 11.24236111111111,
+ 0.0
+ ]
+ },
+ "targ4": {
+ "x": [
+ 0.0,
+ 14.265138888888888,
+ 14.265138888888888,
+ 24.0,
+ 24.0
+ ],
+ "y": [
+ 0.0,
+ 14.265138888888888,
+ 2.097777777777778,
+ 11.832638888888889,
+ 0.0
+ ]
+ },
+ "targ3": {
+ "x": [
+ 0.0,
+ 14.265138888888888,
+ 14.265138888888888,
+ 24.0,
+ 24.0
+ ],
+ "y": [
+ 0.0,
+ 14.265138888888888,
+ 5.36125,
+ 15.096111111111112,
+ 0.0
+ ]
+ }
+ },
+ "aoi_curves_by_targID_exec": {
+ "targ4": {
+ "x": [
+ 0.0,
+ 14.265138888888888,
+ 14.265138888888888,
+ 24.0,
+ 24.0
+ ],
+ "y": [
+ 0.0,
+ 14.265138888888888,
+ 10.684583333333334,
+ 20.419444444444444,
+ 0.0
+ ]
+ },
+ "targ2": {
+ "x": [
+ 0.0,
+ 14.265138888888888,
+ 14.265138888888888,
+ 24.0,
+ 24.0
+ ],
+ "y": [
+ 0.0,
+ 14.265138888888888,
+ 13.020694444444445,
+ 22.755555555555556,
+ 0.0
+ ]
+ }
+ }
+ },
+ "obs_aoi_stats_at_collection": {
+ "min_av_aoi_poss": 4.720103141878858,
+ "prcntl25_av_aoi_poss": 5.0735200255594135,
+ "median_av_aoi_poss": 5.426936909239969,
+ "prcntl75_av_aoi_poss": 5.91317478821052,
+ "max_av_aoi_poss": 6.39941266718107,
+ "min_av_aoi_exec": 8.953626864711934,
+ "prcntl25_av_aoi_exec": 9.420240724665637,
+ "median_av_aoi_exec": 9.886854584619343,
+ "prcntl75_av_aoi_exec": 10.353468444573046,
+ "max_av_aoi_exec": 10.82008230452675,
+ "poss_targIDs_found": [
+ "targ3",
+ "targ2",
+ "targ4"
+ ],
+ "exec_targIDs_found": [
+ "targ2",
+ "targ4"
+ ],
+ "av_aoi_by_targID_poss": {
+ "targ2": 5.426936909239969,
+ "targ4": 4.720103141878858,
+ "targ3": 6.39941266718107
+ },
+ "av_aoi_by_targID_exec": {
+ "targ4": 8.953626864711934,
+ "targ2": 10.82008230452675
+ },
+ "aoi_curves_by_targID_poss": {
+ "targ2": {
+ "x": [
+ 0.0,
+ 1.2444444444444445,
+ 1.2444444444444445,
+ 12.75763888888889,
+ 12.75763888888889,
+ 24.0,
+ 24.0
+ ],
+ "y": [
+ 0.0,
+ 1.2444444444444445,
+ 0.0,
+ 11.513194444444444,
+ 0.0,
+ 11.24236111111111,
+ 0.0
+ ]
+ },
+ "targ4": {
+ "x": [
+ 0.0,
+ 3.5805555555555557,
+ 3.5805555555555557,
+ 12.167361111111111,
+ 12.167361111111111,
+ 24.0,
+ 24.0
+ ],
+ "y": [
+ 0.0,
+ 3.5805555555555557,
+ 0.0,
+ 8.586805555555555,
+ 0.0,
+ 11.832638888888889,
+ 0.0
+ ]
+ },
+ "targ3": {
+ "x": [
+ 0.0,
+ 8.903888888888888,
+ 8.903888888888888,
+ 24.0,
+ 24.0
+ ],
+ "y": [
+ 0.0,
+ 8.903888888888888,
+ 0.0,
+ 15.096111111111112,
+ 0.0
+ ]
+ }
+ },
+ "aoi_curves_by_targID_exec": {
+ "targ4": {
+ "x": [
+ 0.0,
+ 3.5805555555555557,
+ 3.5805555555555557,
+ 24.0,
+ 24.0
+ ],
+ "y": [
+ 0.0,
+ 3.5805555555555557,
+ 0.0,
+ 20.419444444444444,
+ 0.0
+ ]
+ },
+ "targ2": {
+ "x": [
+ 0.0,
+ 1.2444444444444445,
+ 1.2444444444444445,
+ 24.0,
+ 24.0
+ ],
+ "y": [
+ 0.0,
+ 1.2444444444444445,
+ 0.0,
+ 22.755555555555556,
+ 0.0
+ ]
+ }
+ }
+ }
+}
\ No newline at end of file
diff --git a/inputs/reference_model_definitions/gs_refs/GEO_Cape.json b/inputs/reference_model_definitions/gs_refs/GEO_Cape.json
new file mode 100644
index 0000000..cf842b7
--- /dev/null
+++ b/inputs/reference_model_definitions/gs_refs/GEO_Cape.json
@@ -0,0 +1,46 @@
+{
+ "version":"0.0.1",
+ "which_config":"operational_profile_config",
+ "where_should_i_be":"inputs/cases/",
+ "what_goes_here":"things that only matter to the choices to be made about the current case.",
+
+ "ops_profile_params": {
+ "obs_params": {
+ "num_targets": 19,
+ "target_set_name": "GEO-CAPE Scenes",
+ "target_set_version": "1",
+ "target_set_id": "2019-2-20",
+ "elevation_cutoff_deg":60,
+ "_comment":"Scene numbers listed correspond to GEO-CAPE FR scenes. Lat/long correspond to the center of each scene.",
+ "targets": [
+ {"id": "targ0", "name": "scene1", "name_pretty": "obs0", "latitude_deg": 47.9395, "longitude_deg": -126.8506, "height_m": 0},
+ {"id": "targ1", "name": "scene2", "name_pretty": "obs1", "latitude_deg": 43.4326, "longitude_deg": -127.9426, "height_m": 0},
+ {"id": "targ2", "name": "scene3", "name_pretty": "obs2", "latitude_deg": 36.8907, "longitude_deg": -121.5899, "height_m": 0},
+ {"id": "targ3", "name": "scene4", "name_pretty": "obs3", "latitude_deg": 31.8694, "longitude_deg": -119.7753, "height_m": 0},
+ {"id": "targ4", "name": "scene5", "name_pretty": "obs4", "latitude_deg": 26.9929, "longitude_deg": -95.3666, "height_m": 0},
+ {"id": "targ5", "name": "scene6", "name_pretty": "obs5", "latitude_deg": 28.3815, "longitude_deg": -89.9399, "height_m": 0},
+ {"id": "targ6", "name": "scene7", "name_pretty": "obs6", "latitude_deg": 27.7502, "longitude_deg": -84.8929, "height_m": 0},
+ {"id": "targ7", "name": "scene8", "name_pretty": "obs7", "latitude_deg": 30.3431, "longitude_deg": -72.8011, "height_m": 0},
+ {"id": "targ8", "name": "scene9", "name_pretty": "obs8", "latitude_deg": 30.7858, "longitude_deg": -79.1106, "height_m": 0},
+ {"id": "targ9", "name": "scene10", "name_pretty": "obs9", "latitude_deg": 24.7100, "longitude_deg": -77.3878, "height_m": 0},
+ {"id": "targ10", "name": "scene11", "name_pretty": "obs10", "latitude_deg": 34.9294, "longitude_deg": -69.5721, "height_m": 0},
+ {"id": "targ11", "name": "scene12", "name_pretty": "obs11", "latitude_deg": 43.4378, "longitude_deg": -74.9875, "height_m": 0},
+ {"id": "targ12", "name": "scene13", "name_pretty": "obs12", "latitude_deg": 44.9772, "longitude_deg": -83.2117, "height_m": 0},
+ {"id": "targ13", "name": "scene14", "name_pretty": "obs13", "latitude_deg": 45.7600, "longitude_deg": -87.0750, "height_m": 0},
+ {"id": "targ14", "name": "scene15", "name_pretty": "obs14", "latitude_deg": 22.2823, "longitude_deg": -94.9570, "height_m": 0},
+ {"id": "targ15", "name": "scene16", "name_pretty": "obs15", "latitude_deg": 42.4901, "longitude_deg": -64.4311, "height_m": 0},
+ {"id": "targ16", "name": "scene17", "name_pretty": "obs16", "latitude_deg": 35.4907, "longitude_deg": -75.4724, "height_m": 0},
+ {"id": "targ17", "name": "scene18", "name_pretty": "obs17", "latitude_deg": 24.4862, "longitude_deg": -82.5786, "height_m": 0},
+ {"id": "targ18", "name": "scene19", "name_pretty": "obs18", "latitude_deg": 41.7472, "longitude_deg": -68.6284, "height_m": 0}
+ ]
+ },
+ "link_disables": {
+ "_comment": "The satellite and GS ids below will always be interpreted as strings. if the ids are specified as integers in other files, they will be considered the same if their string representation is the same",
+ "dlnk_direc_disabled_gs_ID_by_sat_IDstr": {
+ },
+ "xlnk_direc_disabled_xsat_ID_by_sat_IDstr": {
+ }
+ }
+ }
+
+}
\ No newline at end of file
diff --git a/inputs/reference_model_definitions/gs_refs/KSAT.json b/inputs/reference_model_definitions/gs_refs/KSAT.json
new file mode 100644
index 0000000..5660a60
--- /dev/null
+++ b/inputs/reference_model_definitions/gs_refs/KSAT.json
@@ -0,0 +1,204 @@
+{
+ "config_title": "ground_station_network_config",
+ "version-gsndef": "0.0.1",
+ "verbose_details": "ugh",
+ "network_definition": {
+ "version-gsn": "0.0.1",
+ "sim_gs_network_params": {
+ "time_epsilon_s": 1,
+ "gsn_ps_params": {
+ "replan_interval_s": 6300,
+ "replan_release_wait_time_s": 60,
+ "release_first_plans_immediately": true
+ }
+ },
+ "gs_net_params": {
+ "num_stations": 20,
+ "gs_network_name": "KSAT",
+ "gs_network_version": 1,
+ "elevation_cutoff_deg": 10,
+ "stations": [
+ {
+ "id": "G0",
+ "name": "Inuvik",
+ "name_pretty": "Inuvik0",
+ "latitude_deg": 68.4,
+ "longitude_deg": -133.5,
+ "height_m": 51,
+ "comm_type": ""
+ },
+ {
+ "id": "G1",
+ "name": "LA",
+ "name_pretty": "LA1",
+ "latitude_deg": 34.05,
+ "longitude_deg": -118.24,
+ "height_m": 87,
+ "comm_type": ""
+ },
+ {
+ "id": "G2",
+ "name": "Punta Arenas",
+ "name_pretty": "Punta Arenas2",
+ "latitude_deg": 53.16,
+ "longitude_deg": -70.92,
+ "height_m": 34,
+ "comm_type": ""
+ },
+ {
+ "id": "G3",
+ "name": "Svalbard",
+ "name_pretty": "Svalbard3",
+ "latitude_deg": 78.23,
+ "longitude_deg": 15.41,
+ "height_m": 400,
+ "comm_type": ""
+ },
+ {
+ "id": "G4",
+ "name": "Tromso",
+ "name_pretty": "Tromso4",
+ "latitude_deg": 69.65,
+ "longitude_deg": 18.96,
+ "height_m": 4,
+ "comm_type": ""
+ },
+ {
+ "id": "G5",
+ "name": "Puertollano",
+ "name_pretty": "Puertollano5",
+ "latitude_deg": 38.69,
+ "longitude_deg": -4.11,
+ "height_m": 703,
+ "comm_type": ""
+ },
+ {
+ "id": "G6",
+ "name": "Nemea",
+ "name_pretty": "Nemea6",
+ "latitude_deg": 3,
+ "longitude_deg": 546,
+ "height_m": 2,
+ "comm_type": ""
+ },
+ {
+ "id": "G7",
+ "name": "Hartebeesthoek",
+ "name_pretty": "Hartebeesthoek7",
+ "latitude_deg": -25.64,
+ "longitude_deg": 28.08,
+ "height_m": 1288,
+ "comm_type": ""
+ },
+ {
+ "id": "G8",
+ "name": "Antarctica",
+ "name_pretty": "Antarctica8",
+ "latitude_deg": -72.1,
+ "longitude_deg": 2.32,
+ "height_m": 1270,
+ "comm_type": ""
+ },
+ {
+ "id": "G9",
+ "name": "Mauritius",
+ "name_pretty": "Mauritius9",
+ "latitude_deg": -20.35,
+ "longitude_deg": 57.55,
+ "height_m": 579,
+ "comm_type": ""
+ },
+ {
+ "id": "G10",
+ "name": "Jeju",
+ "name_pretty": "Jeju10",
+ "latitude_deg": 33,
+ "longitude_deg": 126,
+ "height_m": 89,
+ "comm_type": ""
+ },
+ {
+ "id": "G11",
+ "name": "Awarua",
+ "name_pretty": "Awarua11",
+ "latitude_deg": -46.02,
+ "longitude_deg": 167.81,
+ "height_m": 120,
+ "comm_type": ""
+ },
+ {
+ "id": "G12",
+ "name": "Hawaii",
+ "name_pretty": "Hawaii12",
+ "latitude_deg": 19.82,
+ "longitude_deg": -155.47,
+ "height_m": 4184,
+ "comm_type": ""
+ },
+ {
+ "id": "G13",
+ "name": "Fairbanks",
+ "name_pretty": "Fairbanks13",
+ "latitude_deg": 64.8,
+ "longitude_deg": -147.7,
+ "height_m": 135,
+ "comm_type": ""
+ },
+ {
+ "id": "G14",
+ "name": "Panama",
+ "name_pretty": "Panama14",
+ "latitude_deg": 8.54,
+ "longitude_deg": -80.78,
+ "height_m": 1057,
+ "comm_type": ""
+ },
+ {
+ "id": "G15",
+ "name": "Nuuk",
+ "name_pretty": "Nuuk15",
+ "latitude_deg": 64.17,
+ "longitude_deg": -51.73,
+ "height_m": 0,
+ "comm_type": ""
+ },
+ {
+ "id": "G16",
+ "name": "Vardo",
+ "name_pretty": "Vardo16",
+ "latitude_deg": 70.37,
+ "longitude_deg": 31.1,
+ "height_m": 1,
+ "comm_type": ""
+ },
+ {
+ "id": "G17",
+ "name": "Dubai",
+ "name_pretty": "Dubai17",
+ "latitude_deg": 25.2,
+ "longitude_deg": 55.27,
+ "height_m": 0,
+ "comm_type": ""
+ },
+ {
+ "id": "G18",
+ "name": "Singapore",
+ "name_pretty": "Singapore18",
+ "latitude_deg": 1.35,
+ "longitude_deg": 103.82,
+ "height_m": 55,
+ "comm_type": ""
+ },
+ {
+ "id": "G19",
+ "name": "Tokyo",
+ "name_pretty": "Tokyo19",
+ "latitude_deg": 35.69,
+ "longitude_deg": 139.69,
+ "height_m": 37,
+ "comm_type": ""
+ }
+ ]
+ }
+ }
+}
\ No newline at end of file
diff --git a/inputs/reference_model_definitions/gs_refs/KSAT_Lite.json b/inputs/reference_model_definitions/gs_refs/KSAT_Lite.json
new file mode 100644
index 0000000..202053e
--- /dev/null
+++ b/inputs/reference_model_definitions/gs_refs/KSAT_Lite.json
@@ -0,0 +1,131 @@
+{
+ "config_title": "ground_station_network_config",
+ "version-gsndef": "0.0.1",
+ "network_definition": {
+ "version-gsn": "0.0.1",
+ "sim_gs_network_params": {
+ "time_epsilon_s": 1,
+ "gsn_ps_params": {
+ "replan_interval_s": 6300,
+ "replan_release_wait_time_s": 60,
+ "release_first_plans_immediately": true
+ }
+ },
+ "gs_net_params": {
+ "num_stations": 12,
+ "gs_network_name": "KSAT_Lite",
+ "gs_network_version": 1,
+ "elevation_cutoff_deg": 10,
+ "stations": [
+ {
+ "id": "G0",
+ "name": "Inuvik",
+ "name_pretty": "Inuvik0",
+ "latitude_deg": 68.4,
+ "longitude_deg": -133.5,
+ "height_m": 51,
+ "comm_type": ""
+ },
+ {
+ "id": "G1",
+ "name": "LA",
+ "name_pretty": "LA1",
+ "latitude_deg": 34.05,
+ "longitude_deg": -118.24,
+ "height_m": 87,
+ "comm_type": ""
+ },
+ {
+ "id": "G2",
+ "name": "Punta Arenas",
+ "name_pretty": "Punta Arenas2",
+ "latitude_deg": 53.16,
+ "longitude_deg": -70.92,
+ "height_m": 34,
+ "comm_type": ""
+ },
+ {
+ "id": "G3",
+ "name": "Svalbard",
+ "name_pretty": "Svalbard3",
+ "latitude_deg": 78.23,
+ "longitude_deg": 15.41,
+ "height_m": 400,
+ "comm_type": ""
+ },
+ {
+ "id": "G4",
+ "name": "Tromso",
+ "name_pretty": "Tromso4",
+ "latitude_deg": 69.65,
+ "longitude_deg": 18.96,
+ "height_m": 4,
+ "comm_type": ""
+ },
+ {
+ "id": "G5",
+ "name": "Puertollano",
+ "name_pretty": "Puertollano5",
+ "latitude_deg": 38.69,
+ "longitude_deg": -4.11,
+ "height_m": 703,
+ "comm_type": ""
+ },
+ {
+ "id": "G6",
+ "name": "Nemea",
+ "name_pretty": "Nemea6",
+ "latitude_deg": 3,
+ "longitude_deg": 546,
+ "height_m": 2,
+ "comm_type": ""
+ },
+ {
+ "id": "G7",
+ "name": "Hartebeesthoek",
+ "name_pretty": "Hartebeesthoek7",
+ "latitude_deg": -25.64,
+ "longitude_deg": 28.08,
+ "height_m": 1288,
+ "comm_type": ""
+ },
+ {
+ "id": "G8",
+ "name": "Antarctica",
+ "name_pretty": "Antarctica8",
+ "latitude_deg": -72.1,
+ "longitude_deg": 2.32,
+ "height_m": 1270,
+ "comm_type": ""
+ },
+ {
+ "id": "G9",
+ "name": "Mauritius",
+ "name_pretty": "Mauritius9",
+ "latitude_deg": -20.35,
+ "longitude_deg": 57.55,
+ "height_m": 579,
+ "comm_type": ""
+ },
+ {
+ "id": "G10",
+ "name": "Jeju",
+ "name_pretty": "Jeju10",
+ "latitude_deg": 33,
+ "longitude_deg": 126,
+ "height_m": 89,
+ "comm_type": ""
+ },
+ {
+ "id": "G11",
+ "name": "Awarua",
+ "name_pretty": "Awarua11",
+ "latitude_deg": -46.02,
+ "longitude_deg": 167.81,
+ "height_m": 120,
+ "comm_type": ""
+ }
+ ]
+ }
+ }
+}
diff --git a/inputs/reference_model_definitions/gs_refs/NASA_Nen.json b/inputs/reference_model_definitions/gs_refs/NASA_Nen.json
new file mode 100644
index 0000000..401fd2c
--- /dev/null
+++ b/inputs/reference_model_definitions/gs_refs/NASA_Nen.json
@@ -0,0 +1,151 @@
+{
+ "config_title": "ground_station_network_config",
+ "version-gsndef": "0.0.1",
+ "network_definition": {
+ "version-gsn": "0.0.1",
+ "sim_gs_network_params": {
+ "_comments": "TODO: Reevaluate the location of these params. (Should they go in the sim file? Or GP? I think they are describing the behavior of the network, so this should be ok)",
+ "time_epsilon_s": 1,
+ "gsn_ps_params": {
+ "_comments": [
+ "replan_release_wait_time_s simulates the amount of time required to actually run the global planner in real life. from the perspective of this code the global planner will run instantaneously. After it runs, the sim will wait this amount of time before making those plans available",
+ "release_first_plans_immediately = True means that global planner will immediately make its first set of global plans available (they won't be queued up for release at a later time)",
+ "6300 seconds is 105 minutes, about the length of an orbit at 1000 km altitude",
+ "replan release time is conservative here"
+ ],
+ "replan_interval_s": 6300,
+ "replan_release_wait_time_s": 60,
+ "release_first_plans_immediately": true
+ }
+ },
+ "gs_net_params": {
+ "num_stations": 11,
+ "gs_network_name": "NASA NEN",
+ "gs_network_version": "0.0.1",
+ "elevation_cutoff_deg": 10,
+ "stations": [
+ {
+ "id": "AS1",
+ "name": "ASF-AS1",
+ "name_pretty": "NASA Alaska Satellite Facility - AS1",
+ "org": "NASA",
+ "latitude_deg": 64.8587,
+ "longitude_deg": -147.8576,
+ "height_m": 0,
+ "comm_type": "",
+ "gs_model": "nen_nom_gs"
+ },
+ {
+ "id": "MG1",
+ "name": "McMurdo",
+ "name_pretty": "NASA McMurdo Ground Station, Antartica",
+ "org": "NASA",
+ "latitude_deg": -77.8391,
+ "longitude_deg": 166.6671,
+ "height_m": 0,
+ "comm_type": "",
+ "gs_model": "nen_nom_gs"
+ },
+ {
+ "id": "WG1",
+ "name": "WFF",
+ "name_pretty": "NASA Wallops Flight Facility",
+ "org": "NASA",
+ "latitude_deg": 37.9249,
+ "longitude_deg": -75.4765,
+ "height_m": 0,
+ "comm_type": "",
+ "gs_model": "nen_nom_gs"
+ },
+ {
+ "id": "USAK01",
+ "name": "NorthPole-1",
+ "name_pretty": "SSC North Pole - USAK01",
+ "org": "SSC",
+ "latitude_deg": 64.8042,
+ "longitude_deg": -147.5002,
+ "height_m": 0,
+ "comm_type": "",
+ "gs_model": "nen_nom_gs"
+ },
+ {
+ "id": "USHI01",
+ "name": "SouthPoint-1",
+ "name_pretty": "SSC South Point, HI - USHI01",
+ "org": "SSC",
+ "latitude_deg": 19.014,
+ "longitude_deg": -155.6633,
+ "height_m": 0,
+ "comm_type": "",
+ "gs_model": "nen_nom_gs"
+ },
+ {
+ "id": "KU1S",
+ "name": "Sweden-KU1S",
+ "name_pretty": "SSC Kiruna, Sweden - KU1S",
+ "org": "SSC",
+ "latitude_deg": 67.8896,
+ "longitude_deg": 21.0657,
+ "height_m": 0,
+ "comm_type": "",
+ "gs_model": "nen_nom_gs"
+ },
+ {
+ "id": "HB5S",
+ "name": "SouthAfrica-HB5S",
+ "name_pretty": "SANSA South Africa - HB5S",
+ "org": "SANSA",
+ "latitude_deg": -25.8869,
+ "longitude_deg": 27.7067,
+ "height_m": 0,
+ "comm_type": "",
+ "gs_model": "nen_nom_gs"
+ },
+ {
+ "id": "AUWA01",
+ "name": "Australia-01",
+ "name_pretty": "SSC Dongara, Australia - AUWA01",
+ "org": "SSC",
+ "latitude_deg": -29.0457,
+ "longitude_deg": 115.3487,
+ "height_m": 0,
+ "comm_type": "",
+ "gs_model": "nen_nom_gs"
+ },
+ {
+ "id": "TR2",
+ "name": "TrollSat-TR2",
+ "name_pretty": "KSAT TrollSat, Antartica - TR2",
+ "org": "KSAT",
+ "latitude_deg": -72.0022,
+ "longitude_deg": 2.0575,
+ "height_m": 0,
+ "comm_type": "",
+ "gs_model": "nen_nom_gs"
+ },
+ {
+ "id": "SG1",
+ "name": "Svalbard-SG1",
+ "name_pretty": "KSAT Svalbard, Norway - SG1",
+ "org": "KSAT",
+ "latitude_deg": 78.231,
+ "longitude_deg": 15.389,
+ "height_m": 0,
+ "comm_type": "",
+ "gs_model": "nen_nom_gs"
+ },
+ {
+ "id": "SI1",
+ "name": "Singapore-SI1",
+ "name_pretty": "KSAT Singapore - SI1",
+ "org": "KSAT",
+ "latitude_deg": 1.3962,
+ "longitude_deg": 103.8343,
+ "height_m": 0,
+ "comm_type": "",
+ "gs_model": "nen_nom_gs"
+ }
+ ]
+ }
+ }
+}
diff --git a/inputs/reference_model_definitions/gs_refs/NASA_Nen_extended.json b/inputs/reference_model_definitions/gs_refs/NASA_Nen_extended.json
new file mode 100644
index 0000000..364d792
--- /dev/null
+++ b/inputs/reference_model_definitions/gs_refs/NASA_Nen_extended.json
@@ -0,0 +1,228 @@
+{
+ "config_title": "ground_station_network_config",
+ "version-gsndef": "0.0.1",
+ "network_definition": {
+ "version-gsn": "0.0.1",
+ "sim_gs_network_params": {
+ "_comments": "TODO: Reevaluate the location of these params. (Should they go in the sim file? Or GP? I think they are describing the behavior of the network, so this should be ok)",
+ "time_epsilon_s": 1,
+ "gsn_ps_params": {
+ "_comments": [
+ "replan_release_wait_time_s simulates the amount of time required to actually run the global planner in real life. from the perspective of this code the global planner will run instantaneously. After it runs, the sim will wait this amount of time before making those plans available",
+ "release_first_plans_immediately = True means that global planner will immediately make its first set of global plans available (they won't be queued up for release at a later time)",
+ "6300 seconds is 105 minutes, about the length of an orbit at 1000 km altitude",
+ "replan release time is conservative here"
+ ],
+ "replan_interval_s": 6300,
+ "replan_release_wait_time_s": 60,
+ "release_first_plans_immediately": true
+ }
+ },
+ "gs_net_params": {
+ "num_stations": 18,
+ "gs_network_name": "NASA NEN",
+ "gs_network_version": "0.0.1",
+ "elevation_cutoff_deg": 10,
+ "stations": [
+ {
+ "id": "AS1",
+ "name": "ASF-AS1",
+ "name_pretty": "NASA Alaska Satellite Facility - AS1",
+ "org": "NASA",
+ "latitude_deg": 64.8587,
+ "longitude_deg": -147.8576,
+ "height_m": 0,
+ "comm_type": "",
+ "gs_model": "nen_nom_gs"
+ },
+ {
+ "id": "AS3",
+ "name": "ASF-AS3",
+ "name_pretty": "NASA Alaska Satellite Facility - AS3",
+ "org": "NASA",
+ "latitude_deg": 64.8589,
+ "longitude_deg": -147.8541,
+ "height_m": 0,
+ "comm_type": "",
+ "gs_model": "nen_nom_gs"
+ },
+ {
+ "id": "MG1",
+ "name": "McMurdo",
+ "name_pretty": "NASA McMurdo Ground Station, Antartica",
+ "org": "NASA",
+ "latitude_deg": -77.8391,
+ "longitude_deg": 166.6671,
+ "height_m": 0,
+ "comm_type": "",
+ "gs_model": "nen_nom_gs"
+ },
+ {
+ "id": "WG1",
+ "name": "WFF",
+ "name_pretty": "NASA Wallops Flight Facility",
+ "org": "NASA",
+ "latitude_deg": 37.9249,
+ "longitude_deg": -75.4765,
+ "height_m": 0,
+ "comm_type": "",
+ "gs_model": "nen_nom_gs"
+ },
+ {
+ "id": "USAK01",
+ "name": "NorthPole-1",
+ "name_pretty": "SSC North Pole - USAK01",
+ "org": "SSC",
+ "latitude_deg": 64.8042,
+ "longitude_deg": -147.5002,
+ "height_m": 0,
+ "comm_type": "",
+ "gs_model": "nen_nom_gs"
+ },
+ {
+ "id": "USAK03",
+ "name": "NorthPole-3",
+ "name_pretty": "SSC North Pole - USAK03",
+ "org": "SSC",
+ "latitude_deg": 64.8047,
+ "longitude_deg": -147.5042,
+ "height_m": 0,
+ "comm_type": "",
+ "gs_model": "nen_nom_gs"
+ },
+ {
+ "id": "USAK04",
+ "name": "NorthPole-4",
+ "name_pretty": "SSC North Pole - USAK04",
+ "org": "SSC",
+ "latitude_deg": 64.8047,
+ "longitude_deg": -147.5042,
+ "height_m": 0,
+ "comm_type": "",
+ "gs_model": "nen_nom_gs"
+ },
+ {
+ "id": "USHI01",
+ "name": "SouthPoint-1",
+ "name_pretty": "SSC South Point, HI - USHI01",
+ "org": "SSC",
+ "latitude_deg": 19.014,
+ "longitude_deg": -155.6633,
+ "height_m": 0,
+ "comm_type": "",
+ "gs_model": "nen_nom_gs"
+ },
+ {
+ "id": "USHI02",
+ "name": "SouthPoint-2",
+ "name_pretty": "SSC South Point, HI - USHI02",
+ "org": "SSC",
+ "latitude_deg": 19.0138,
+ "longitude_deg": -155.6629,
+ "height_m": 0,
+ "comm_type": "",
+ "gs_model": "nen_nom_gs"
+ },
+ {
+ "id": "KU1S",
+ "name": "Sweden-KU1S",
+ "name_pretty": "SSC Kiruna, Sweden - KU1S",
+ "org": "SSC",
+ "latitude_deg": 67.8896,
+ "longitude_deg": 21.0657,
+ "height_m": 0,
+ "comm_type": "",
+ "gs_model": "nen_nom_gs"
+ },
+ {
+ "id": "KU3S",
+ "name": "Sweden-KU3S",
+ "name_pretty": "SSC Kiruna, Sweden - KU3S",
+ "org": "SSC",
+ "latitude_deg": 67.8791,
+ "longitude_deg": 21.038,
+ "height_m": 0,
+ "comm_type": "",
+ "gs_model": "nen_nom_gs"
+ },
+ {
+ "id": "HB5S",
+ "name": "SouthAfrica-HB5S",
+ "name_pretty": "SANSA South Africa - HB5S",
+ "org": "SANSA",
+ "latitude_deg": -25.8869,
+ "longitude_deg": 27.7067,
+ "height_m": 0,
+ "comm_type": "",
+ "gs_model": "nen_nom_gs"
+ },
+ {
+ "id": "AUWA01",
+ "name": "Australia-01",
+ "name_pretty": "SSC Dongara, Australia - AUWA01",
+ "org": "SSC",
+ "latitude_deg": -29.0457,
+ "longitude_deg": 115.3487,
+ "height_m": 0,
+ "comm_type": "",
+ "gs_model": "nen_nom_gs"
+ },
+ {
+ "id": "TR2",
+ "name": "TrollSat-TR2",
+ "name_pretty": "KSAT TrollSat, Antartica - TR2",
+ "org": "KSAT",
+ "latitude_deg": -72.0022,
+ "longitude_deg": 2.0575,
+ "height_m": 0,
+ "comm_type": "",
+ "gs_model": "nen_nom_gs"
+ },
+ {
+ "id": "SG1",
+ "name": "Svalbard-SG1",
+ "name_pretty": "KSAT Svalbard, Norway - SG1",
+ "org": "KSAT",
+ "latitude_deg": 78.231,
+ "longitude_deg": 15.389,
+ "height_m": 0,
+ "comm_type": "",
+ "gs_model": "nen_nom_gs"
+ },
+ {
+ "id": "SG2",
+ "name": "Svalbard-SG2",
+ "name_pretty": "KSAT Svalbard, Norway - SG2",
+ "org": "KSAT",
+ "latitude_deg": 78.23,
+ "longitude_deg": 15.398,
+ "height_m": 0,
+ "comm_type": "",
+ "gs_model": "nen_nom_gs"
+ },
+ {
+ "id": "SG3",
+ "name": "Svalbard-SG3",
+ "name_pretty": "KSAT Svalbard, Norway - SG3",
+ "org": "KSAT",
+ "latitude_deg": 78.2297,
+ "longitude_deg": 15.4081,
+ "height_m": 0,
+ "comm_type": "",
+ "gs_model": "nen_nom_gs"
+ },
+ {
+ "id": "SI1",
+ "name": "Singapore-SI1",
+ "name_pretty": "KSAT Singapore - SI1",
+ "org": "KSAT",
+ "latitude_deg": 1.3962,
+ "longitude_deg": 103.8343,
+ "height_m": 0,
+ "comm_type": "",
+ "gs_model": "nen_nom_gs"
+ }
+ ]
+ }
+ }
+}
diff --git a/inputs/reference_model_definitions/gs_refs/SpaceFlight.json b/inputs/reference_model_definitions/gs_refs/SpaceFlight.json
new file mode 100644
index 0000000..bcf4c9e
--- /dev/null
+++ b/inputs/reference_model_definitions/gs_refs/SpaceFlight.json
@@ -0,0 +1,186 @@
+{
+ "config_title": "ground_station_network_config",
+ "version-gsndef": "0.0.1",
+ "verbose_details": "https://github.mit.edu/star-lab/CIRCINUS/blob/SPRINT-restructuring-1/inputs/nom_sprint/groundstation_network/README.md",
+ "network_definition": {
+ "version-gsn": "0.0.1",
+ "gs_net_params": {
+ "num_stations": 17,
+ "gs_network_name": "Space Flight Networks 2017",
+ "_comment0":"ground station for spaceflight networks company as of 2017",
+ "gs_network_version": "1",
+ "_comment": "setting this to 20 deg to be conservative",
+ "elevation_cutoff_deg": 20,
+ "stations": [
+ {
+ "id": "G0",
+ "name": "Prudhoe Bay",
+ "name_pretty": "Prudhoe Bay 0",
+ "latitude_deg": 70.36621,
+ "longitude_deg": -148.745529,
+ "height_m": 0,
+ "comm_type": ""
+ },
+ {
+ "id": "G1",
+ "name": "Fairbanks",
+ "name_pretty": "Fairbanks 1",
+ "latitude_deg": 64.836531,
+ "longitude_deg": -147.651745,
+ "height_m": 0,
+ "comm_type": ""
+ },
+ {
+ "id": "G2",
+ "name": "Seattle",
+ "name_pretty": "Seattle 2",
+ "latitude_deg": 47.606,
+ "longitude_deg": -122.33,
+ "height_m": 0,
+ "comm_type": ""
+ },
+ {
+ "id": "G3",
+ "name": "New Mexico",
+ "name_pretty": "New Mexico 3",
+ "latitude_deg": 32.7872,
+ "longitude_deg": -106.3257,
+ "height_m": 0,
+ "comm_type": ""
+ },
+ {
+ "id": "G4",
+ "name": "New England",
+ "name_pretty": "New England 4",
+ "latitude_deg": 42.94235,
+ "longitude_deg": -71.636095,
+ "height_m": 0,
+ "comm_type": ""
+ },
+ {
+ "id": "G5",
+ "name": "Hawaii",
+ "name_pretty": "Hawaii 5",
+ "latitude_deg": 19.8968,
+ "longitude_deg": -155.5828,
+ "height_m": 0,
+ "comm_type": ""
+ },
+ {
+ "id": "G6",
+ "name": "Florida",
+ "name_pretty": "Florida 6",
+ "latitude_deg": 26.7542,
+ "longitude_deg": -80.9337,
+ "height_m": 0,
+ "comm_type": ""
+ },
+ {
+ "id": "G7",
+ "name": "Argentina",
+ "name_pretty": "Argentina 7",
+ "latitude_deg": -53.1638,
+ "longitude_deg": -70.9171,
+ "height_m": 0,
+ "comm_type": ""
+ },
+ {
+ "id": "G8",
+ "name": "Brazil",
+ "name_pretty": "Brazil 8",
+ "latitude_deg": -18.415921,
+ "longitude_deg": -45.633627,
+ "height_m": 0,
+ "comm_type": ""
+ },
+ {
+ "id": "G9",
+ "name": "Munich",
+ "name_pretty": "Munich 9",
+ "latitude_deg": 48.1351,
+ "longitude_deg": 11.582,
+ "height_m": 0,
+ "comm_type": ""
+ },
+ {
+ "id": "G10",
+ "name": "Norway",
+ "name_pretty": "Norway 10",
+ "latitude_deg": 67.316,
+ "longitude_deg": 14.777,
+ "height_m": 0,
+ "comm_type": ""
+ },
+ {
+ "id": "G11",
+ "name": "South Africa",
+ "name_pretty": "South Africa 11",
+ "latitude_deg": -25.89,
+ "longitude_deg": 27.6853,
+ "height_m": 0,
+ "comm_type": ""
+ },
+ {
+ "id": "G12",
+ "name": "Dubai",
+ "name_pretty": "Dubai 12",
+ "latitude_deg": 25.2048,
+ "longitude_deg": 55.2708,
+ "height_m": 0,
+ "comm_type": ""
+ },
+ {
+ "id": "G13",
+ "name": "Singapore",
+ "name_pretty": "Singapore 13",
+ "latitude_deg": 1.3521,
+ "longitude_deg": 103.8198,
+ "height_m": 0,
+ "comm_type": ""
+ },
+ {
+ "id": "G14",
+ "name": "Guam",
+ "name_pretty": "Guam 14",
+ "latitude_deg": 13.4443,
+ "longitude_deg": 144.7937,
+ "height_m": 0,
+ "comm_type": ""
+ },
+ {
+ "id": "G15",
+ "name": "Japan",
+ "name_pretty": "Japan 15",
+ "latitude_deg": 37.516871,
+ "longitude_deg": 139.666454,
+ "height_m": 0,
+ "comm_type": ""
+ },
+ {
+ "id": "G16",
+ "name": "New Zealand",
+ "name_pretty": "New Zealand 16",
+ "latitude_deg": -46.512945,
+ "longitude_deg": 168.375952,
+ "height_m": 0,
+ "comm_type": ""
+ }
+ ]
+ },
+ "sim_gs_network_params": {
+ "_comments": "TODO: Reevaluate the location of these params. (Should they go in the sim file? Or GP? I think they are describing the behavior of the network, so this should be ok)",
+ "time_epsilon_s": 1,
+ "gsn_ps_params": {
+ "_comments": [
+ "replan_release_wait_time_s simulates the amount of time required to actually run the global planner in real life. from the perspective of this code the global planner will run instantaneously. After it runs, the sim will wait this amount of time before making those plans available",
+ "release_first_plans_immediately = True means that global planner will immediately make its first set of global plans available (they won't be queued up for release at a later time)",
+ "6300 seconds is 105 minutes, about the length of an orbit at 1000 km altitude",
+ "replan release time is conservative here"
+ ],
+ "replan_interval_s": 6300,
+ "replan_release_wait_time_s": 60,
+ "release_first_plans_immediately": true
+ }
+ }
+ }
+}
diff --git a/inputs/reference_model_definitions/gs_refs/Zhou_Original.json b/inputs/reference_model_definitions/gs_refs/Zhou_Original.json
new file mode 100644
index 0000000..41719bc
--- /dev/null
+++ b/inputs/reference_model_definitions/gs_refs/Zhou_Original.json
@@ -0,0 +1,67 @@
+{
+ "config_title": "ground_station_network_config",
+ "version-gsndef": "0.0.1",
+ "verbose_details": "https://github.mit.edu/star-lab/CIRCINUS/blob/SPRINT-restructuring-1/inputs/nom_sprint/groundstation_network/README.md",
+ "network_definition": {
+ "version-gsn": "0.0.1",
+ "gs_net_params": {
+ "num_stations": 4,
+ "gs_network_name": "Zhou 2017 GS",
+ "gs_network_version": "1",
+ "elevation_cutoff_deg": 10,
+ "stations": [
+ {
+ "id": "G0",
+ "name": "Beijing",
+ "name_pretty": "Beijing 0",
+ "latitude_deg": 40.0,
+ "longitude_deg": 116.0,
+ "height_m": 0,
+ "comm_type": ""
+ },
+ {
+ "id": "G1",
+ "name": "Kashi",
+ "name_pretty": "Kashi 1",
+ "latitude_deg": 39.5,
+ "longitude_deg": 76.0,
+ "height_m": 0,
+ "comm_type": ""
+ },
+ {
+ "id": "G2",
+ "name": "Sanya",
+ "name_pretty": "Sanya 2",
+ "latitude_deg": 18.0,
+ "longitude_deg": 109.5,
+ "height_m": 0,
+ "comm_type": ""
+ },
+ {
+ "id": "G3",
+ "name": "Xi'an",
+ "name_pretty": "Xi'an 3",
+ "latitude_deg": 34.0,
+ "longitude_deg": 108.0,
+ "height_m": 0,
+ "comm_type": ""
+ }
+ ]
+ },
+ "sim_gs_network_params": {
+ "_comments": "TODO: Reevaluate the location of these params. (Should they go in the sim file? Or GP? I think they are describing the behavior of the network, so this should be ok)",
+ "time_epsilon_s": 1,
+ "gsn_ps_params": {
+ "_comments": [
+ "replan_release_wait_time_s simulates the amount of time required to actually run the global planner in real life. from the perspective of this code the global planner will run instantaneously. After it runs, the sim will wait this amount of time before making those plans available",
+ "release_first_plans_immediately = True means that global planner will immediately make its first set of global plans available (they won't be queued up for release at a later time)",
+ "6300 seconds is 105 minutes, about the length of an orbit at 1000 km altitude",
+ "replan release time is conservative here"
+ ],
+ "replan_interval_s": 6300,
+ "replan_release_wait_time_s": 60,
+ "release_first_plans_immediately": true
+ }
+ }
+ }
+}
diff --git a/inputs/reference_model_definitions/obs_refs/Chesapeake_Bay.json b/inputs/reference_model_definitions/obs_refs/Chesapeake_Bay.json
new file mode 100644
index 0000000..ecc6e2f
--- /dev/null
+++ b/inputs/reference_model_definitions/obs_refs/Chesapeake_Bay.json
@@ -0,0 +1,33 @@
+{
+ "version":"0.0.1",
+ "which_config":"operational_profile_config",
+ "where_should_i_be":"inputs/cases/",
+ "what_goes_here":"things that only matter to the choices to be made about the current case.",
+
+ "ops_profile_params": {
+ "obs_params": {
+ "num_targets": 6,
+ "target_set_name": "Chesapeake Bay",
+ "target_set_version": "1",
+ "target_set_id": "CB_Test_1",
+ "elevation_cutoff_deg":60,
+ "targets": [
+ {"id": "targ0","name": "Bay 0", "name_pretty": "Bay 0", "latitude_deg": 37.983013, "longitude_deg": -75.429088,"height_m": 0},
+ {"id": "targ1","name": "Himalaya", "name_pretty": "Ocean 1", "latitude_deg": 37.910802, "longitude_deg": -75.470739,"height_m": 0},
+ {"id": "targ2","name": "Bay 1", "name_pretty": "Bay 1", "latitude_deg": 37.850126, "longitude_deg": -75.680622,"height_m": 0},
+ {"id": "targ3","name": "Bay 2", "name_pretty": "Bay 2", "latitude_deg": 37.388679, "longitude_deg": -75.972138,"height_m": 0},
+ {"id": "targ4","name": "Bay 3", "name_pretty": "Bay 3", "latitude_deg": 37.267298, "longitude_deg": -76.536600,"height_m": 0},
+ {"id": "targ5","name": "Norfolk", "name_pretty": "Nofolk", "latitude_deg": 36.897200, "longitude_deg": -76.334132,"height_m": 0}
+ ]
+ },
+ "link_disables": {
+ "_comment": "The satellite and GS ids below will always be interpreted as strings. if the ids are specified as integers in other files, they will be considered the same if their string representation is the same",
+ "dlnk_direc_disabled_gs_ID_by_sat_IDstr": {
+ },
+ "xlnk_direc_disabled_xsat_ID_by_sat_IDstr": {
+ }
+ }
+
+ }
+
+}
\ No newline at end of file
diff --git a/inputs/reference_model_definitions/obs_refs/Tropics_100.json b/inputs/reference_model_definitions/obs_refs/Tropics_100.json
new file mode 100644
index 0000000..ef60fdc
--- /dev/null
+++ b/inputs/reference_model_definitions/obs_refs/Tropics_100.json
@@ -0,0 +1,127 @@
+{
+ "version":"0.0.1",
+ "which_config":"operational_profile_config",
+ "where_should_i_be":"inputs/cases/",
+ "what_goes_here":"things that only matter to the choices to be made about the current case.",
+
+ "ops_profile_params": {
+ "obs_params": {
+ "num_targets": 100,
+ "target_set_name": "targets_tropics_100.json (from CIRCINUS tools)",
+ "target_set_version": "1",
+ "target_set_id": "4",
+ "elevation_cutoff_deg":60,
+ "targets": [
+ {"id": "targ0","name": "midlat60 0","name_pretty": "obs0","latitude_deg": -24.0,"longitude_deg": -174.0,"height_m": 0},
+ {"id": "targ1","name": "midlat60 1","name_pretty": "obs1","latitude_deg": -24.0,"longitude_deg": -156.0,"height_m": 0},
+ {"id": "targ2","name": "midlat60 2","name_pretty": "obs2","latitude_deg": -24.0,"longitude_deg": -138.0,"height_m": 0},
+ {"id": "targ3","name": "midlat60 3","name_pretty": "obs3","latitude_deg": -24.0,"longitude_deg": -120.0,"height_m": 0},
+ {"id": "targ4","name": "midlat60 4","name_pretty": "obs4","latitude_deg": -24.0,"longitude_deg": -102.0,"height_m": 0},
+ {"id": "targ5","name": "midlat60 5","name_pretty": "obs5","latitude_deg": -24.0,"longitude_deg": -84.0,"height_m": 0},
+ {"id": "targ6","name": "midlat60 6","name_pretty": "obs6","latitude_deg": -24.0,"longitude_deg": -66.0,"height_m": 0},
+ {"id": "targ7","name": "midlat60 7","name_pretty": "obs7","latitude_deg": -24.0,"longitude_deg": -48.0,"height_m": 0},
+ {"id": "targ8","name": "midlat60 8","name_pretty": "obs8","latitude_deg": -24.0,"longitude_deg": -30.0,"height_m": 0},
+ {"id": "targ9","name": "midlat60 9","name_pretty": "obs9","latitude_deg": -24.0,"longitude_deg": -12.0,"height_m": 0},
+ {"id": "targ10","name": "midlat60 10","name_pretty": "obs10","latitude_deg": -24.0,"longitude_deg": 6.0,"height_m": 0},
+ {"id": "targ11","name": "midlat60 11","name_pretty": "obs11","latitude_deg": -24.0,"longitude_deg": 24.0,"height_m": 0},
+ {"id": "targ12","name": "midlat60 12","name_pretty": "obs12","latitude_deg": -24.0,"longitude_deg": 42.0,"height_m": 0},
+ {"id": "targ13","name": "midlat60 13","name_pretty": "obs13","latitude_deg": -24.0,"longitude_deg": 60.0,"height_m": 0},
+ {"id": "targ14","name": "midlat60 14","name_pretty": "obs14","latitude_deg": -24.0,"longitude_deg": 78.0,"height_m": 0},
+ {"id": "targ15","name": "midlat60 15","name_pretty": "obs15","latitude_deg": -24.0,"longitude_deg": 96.0,"height_m": 0},
+ {"id": "targ16","name": "midlat60 16","name_pretty": "obs16","latitude_deg": -24.0,"longitude_deg": 114.0,"height_m": 0},
+ {"id": "targ17","name": "midlat60 17","name_pretty": "obs17","latitude_deg": -24.0,"longitude_deg": 132.0,"height_m": 0},
+ {"id": "targ18","name": "midlat60 18","name_pretty": "obs18","latitude_deg": -24.0,"longitude_deg": 150.0,"height_m": 0},
+ {"id": "targ19","name": "midlat60 19","name_pretty": "obs19","latitude_deg": -24.0,"longitude_deg": 168.0,"height_m": 0},
+ {"id": "targ20","name": "midlat60 20","name_pretty": "obs20","latitude_deg": -12.0,"longitude_deg": -174.0,"height_m": 0},
+ {"id": "targ21","name": "midlat60 21","name_pretty": "obs21","latitude_deg": -12.0,"longitude_deg": -156.0,"height_m": 0},
+ {"id": "targ22","name": "midlat60 22","name_pretty": "obs22","latitude_deg": -12.0,"longitude_deg": -138.0,"height_m": 0},
+ {"id": "targ23","name": "midlat60 23","name_pretty": "obs23","latitude_deg": -12.0,"longitude_deg": -120.0,"height_m": 0},
+ {"id": "targ24","name": "midlat60 24","name_pretty": "obs24","latitude_deg": -12.0,"longitude_deg": -102.0,"height_m": 0},
+ {"id": "targ25","name": "midlat60 25","name_pretty": "obs25","latitude_deg": -12.0,"longitude_deg": -84.0,"height_m": 0},
+ {"id": "targ26","name": "midlat60 26","name_pretty": "obs26","latitude_deg": -12.0,"longitude_deg": -66.0,"height_m": 0},
+ {"id": "targ27","name": "midlat60 27","name_pretty": "obs27","latitude_deg": -12.0,"longitude_deg": -48.0,"height_m": 0},
+ {"id": "targ28","name": "midlat60 28","name_pretty": "obs28","latitude_deg": -12.0,"longitude_deg": -30.0,"height_m": 0},
+ {"id": "targ29","name": "midlat60 29","name_pretty": "obs29","latitude_deg": -12.0,"longitude_deg": -12.0,"height_m": 0},
+ {"id": "targ30","name": "midlat60 30","name_pretty": "obs30","latitude_deg": -12.0,"longitude_deg": 6.0,"height_m": 0},
+ {"id": "targ31","name": "midlat60 31","name_pretty": "obs31","latitude_deg": -12.0,"longitude_deg": 24.0,"height_m": 0},
+ {"id": "targ32","name": "midlat60 32","name_pretty": "obs32","latitude_deg": -12.0,"longitude_deg": 42.0,"height_m": 0},
+ {"id": "targ33","name": "midlat60 33","name_pretty": "obs33","latitude_deg": -12.0,"longitude_deg": 60.0,"height_m": 0},
+ {"id": "targ34","name": "midlat60 34","name_pretty": "obs34","latitude_deg": -12.0,"longitude_deg": 78.0,"height_m": 0},
+ {"id": "targ35","name": "midlat60 35","name_pretty": "obs35","latitude_deg": -12.0,"longitude_deg": 96.0,"height_m": 0},
+ {"id": "targ36","name": "midlat60 36","name_pretty": "obs36","latitude_deg": -12.0,"longitude_deg": 114.0,"height_m": 0},
+ {"id": "targ37","name": "midlat60 37","name_pretty": "obs37","latitude_deg": -12.0,"longitude_deg": 132.0,"height_m": 0},
+ {"id": "targ38","name": "midlat60 38","name_pretty": "obs38","latitude_deg": -12.0,"longitude_deg": 150.0,"height_m": 0},
+ {"id": "targ39","name": "midlat60 39","name_pretty": "obs39","latitude_deg": -12.0,"longitude_deg": 168.0,"height_m": 0},
+ {"id": "targ40","name": "midlat60 40","name_pretty": "obs40","latitude_deg": 0.0,"longitude_deg": -174.0,"height_m": 0},
+ {"id": "targ41","name": "midlat60 41","name_pretty": "obs41","latitude_deg": 0.0,"longitude_deg": -156.0,"height_m": 0},
+ {"id": "targ42","name": "midlat60 42","name_pretty": "obs42","latitude_deg": 0.0,"longitude_deg": -138.0,"height_m": 0},
+ {"id": "targ43","name": "midlat60 43","name_pretty": "obs43","latitude_deg": 0.0,"longitude_deg": -120.0,"height_m": 0},
+ {"id": "targ44","name": "midlat60 44","name_pretty": "obs44","latitude_deg": 0.0,"longitude_deg": -102.0,"height_m": 0},
+ {"id": "targ45","name": "midlat60 45","name_pretty": "obs45","latitude_deg": 0.0,"longitude_deg": -84.0,"height_m": 0},
+ {"id": "targ46","name": "midlat60 46","name_pretty": "obs46","latitude_deg": 0.0,"longitude_deg": -66.0,"height_m": 0},
+ {"id": "targ47","name": "midlat60 47","name_pretty": "obs47","latitude_deg": 0.0,"longitude_deg": -48.0,"height_m": 0},
+ {"id": "targ48","name": "midlat60 48","name_pretty": "obs48","latitude_deg": 0.0,"longitude_deg": -30.0,"height_m": 0},
+ {"id": "targ49","name": "midlat60 49","name_pretty": "obs49","latitude_deg": 0.0,"longitude_deg": -12.0,"height_m": 0},
+ {"id": "targ50","name": "midlat60 50","name_pretty": "obs50","latitude_deg": 0.0,"longitude_deg": 6.0,"height_m": 0},
+ {"id": "targ51","name": "midlat60 51","name_pretty": "obs51","latitude_deg": 0.0,"longitude_deg": 24.0,"height_m": 0},
+ {"id": "targ52","name": "midlat60 52","name_pretty": "obs52","latitude_deg": 0.0,"longitude_deg": 42.0,"height_m": 0},
+ {"id": "targ53","name": "midlat60 53","name_pretty": "obs53","latitude_deg": 0.0,"longitude_deg": 60.0,"height_m": 0},
+ {"id": "targ54","name": "midlat60 54","name_pretty": "obs54","latitude_deg": 0.0,"longitude_deg": 78.0,"height_m": 0},
+ {"id": "targ55","name": "midlat60 55","name_pretty": "obs55","latitude_deg": 0.0,"longitude_deg": 96.0,"height_m": 0},
+ {"id": "targ56","name": "midlat60 56","name_pretty": "obs56","latitude_deg": 0.0,"longitude_deg": 114.0,"height_m": 0},
+ {"id": "targ57","name": "midlat60 57","name_pretty": "obs57","latitude_deg": 0.0,"longitude_deg": 132.0,"height_m": 0},
+ {"id": "targ58","name": "midlat60 58","name_pretty": "obs58","latitude_deg": 0.0,"longitude_deg": 150.0,"height_m": 0},
+ {"id": "targ59","name": "midlat60 59","name_pretty": "obs59","latitude_deg": 0.0,"longitude_deg": 168.0,"height_m": 0},
+ {"id": "targ60","name": "midlat60 60","name_pretty": "obs60","latitude_deg": 12.0,"longitude_deg": -174.0,"height_m": 0},
+ {"id": "targ61","name": "midlat60 61","name_pretty": "obs61","latitude_deg": 12.0,"longitude_deg": -156.0,"height_m": 0},
+ {"id": "targ62","name": "midlat60 62","name_pretty": "obs62","latitude_deg": 12.0,"longitude_deg": -138.0,"height_m": 0},
+ {"id": "targ63","name": "midlat60 63","name_pretty": "obs63","latitude_deg": 12.0,"longitude_deg": -120.0,"height_m": 0},
+ {"id": "targ64","name": "midlat60 64","name_pretty": "obs64","latitude_deg": 12.0,"longitude_deg": -102.0,"height_m": 0},
+ {"id": "targ65","name": "midlat60 65","name_pretty": "obs65","latitude_deg": 12.0,"longitude_deg": -84.0,"height_m": 0},
+ {"id": "targ66","name": "midlat60 66","name_pretty": "obs66","latitude_deg": 12.0,"longitude_deg": -66.0,"height_m": 0},
+ {"id": "targ67","name": "midlat60 67","name_pretty": "obs67","latitude_deg": 12.0,"longitude_deg": -48.0,"height_m": 0},
+ {"id": "targ68","name": "midlat60 68","name_pretty": "obs68","latitude_deg": 12.0,"longitude_deg": -30.0,"height_m": 0},
+ {"id": "targ69","name": "midlat60 69","name_pretty": "obs69","latitude_deg": 12.0,"longitude_deg": -12.0,"height_m": 0},
+ {"id": "targ70","name": "midlat60 70","name_pretty": "obs70","latitude_deg": 12.0,"longitude_deg": 6.0,"height_m": 0},
+ {"id": "targ71","name": "midlat60 71","name_pretty": "obs71","latitude_deg": 12.0,"longitude_deg": 24.0,"height_m": 0},
+ {"id": "targ72","name": "midlat60 72","name_pretty": "obs72","latitude_deg": 12.0,"longitude_deg": 42.0,"height_m": 0},
+ {"id": "targ73","name": "midlat60 73","name_pretty": "obs73","latitude_deg": 12.0,"longitude_deg": 60.0,"height_m": 0},
+ {"id": "targ74","name": "midlat60 74","name_pretty": "obs74","latitude_deg": 12.0,"longitude_deg": 78.0,"height_m": 0},
+ {"id": "targ75","name": "midlat60 75","name_pretty": "obs75","latitude_deg": 12.0,"longitude_deg": 96.0,"height_m": 0},
+ {"id": "targ76","name": "midlat60 76","name_pretty": "obs76","latitude_deg": 12.0,"longitude_deg": 114.0,"height_m": 0},
+ {"id": "targ77","name": "midlat60 77","name_pretty": "obs77","latitude_deg": 12.0,"longitude_deg": 132.0,"height_m": 0},
+ {"id": "targ78","name": "midlat60 78","name_pretty": "obs78","latitude_deg": 12.0,"longitude_deg": 150.0,"height_m": 0},
+ {"id": "targ79","name": "midlat60 79","name_pretty": "obs79","latitude_deg": 12.0,"longitude_deg": 168.0,"height_m": 0},
+ {"id": "targ80","name": "midlat60 80","name_pretty": "obs80","latitude_deg": 24.0,"longitude_deg": -174.0,"height_m": 0},
+ {"id": "targ81","name": "midlat60 81","name_pretty": "obs81","latitude_deg": 24.0,"longitude_deg": -156.0,"height_m": 0},
+ {"id": "targ82","name": "midlat60 82","name_pretty": "obs82","latitude_deg": 24.0,"longitude_deg": -138.0,"height_m": 0},
+ {"id": "targ83","name": "midlat60 83","name_pretty": "obs83","latitude_deg": 24.0,"longitude_deg": -120.0,"height_m": 0},
+ {"id": "targ84","name": "midlat60 84","name_pretty": "obs84","latitude_deg": 24.0,"longitude_deg": -102.0,"height_m": 0},
+ {"id": "targ85","name": "midlat60 85","name_pretty": "obs85","latitude_deg": 24.0,"longitude_deg": -84.0,"height_m": 0},
+ {"id": "targ86","name": "midlat60 86","name_pretty": "obs86","latitude_deg": 24.0,"longitude_deg": -66.0,"height_m": 0},
+ {"id": "targ87","name": "midlat60 87","name_pretty": "obs87","latitude_deg": 24.0,"longitude_deg": -48.0,"height_m": 0},
+ {"id": "targ88","name": "midlat60 88","name_pretty": "obs88","latitude_deg": 24.0,"longitude_deg": -30.0,"height_m": 0},
+ {"id": "targ89","name": "midlat60 89","name_pretty": "obs89","latitude_deg": 24.0,"longitude_deg": -12.0,"height_m": 0},
+ {"id": "targ90","name": "midlat60 90","name_pretty": "obs90","latitude_deg": 24.0,"longitude_deg": 6.0,"height_m": 0},
+ {"id": "targ91","name": "midlat60 91","name_pretty": "obs91","latitude_deg": 24.0,"longitude_deg": 24.0,"height_m": 0},
+ {"id": "targ92","name": "midlat60 92","name_pretty": "obs92","latitude_deg": 24.0,"longitude_deg": 42.0,"height_m": 0},
+ {"id": "targ93","name": "midlat60 93","name_pretty": "obs93","latitude_deg": 24.0,"longitude_deg": 60.0,"height_m": 0},
+ {"id": "targ94","name": "midlat60 94","name_pretty": "obs94","latitude_deg": 24.0,"longitude_deg": 78.0,"height_m": 0},
+ {"id": "targ95","name": "midlat60 95","name_pretty": "obs95","latitude_deg": 24.0,"longitude_deg": 96.0,"height_m": 0},
+ {"id": "targ96","name": "midlat60 96","name_pretty": "obs96","latitude_deg": 24.0,"longitude_deg": 114.0,"height_m": 0},
+ {"id": "targ97","name": "midlat60 97","name_pretty": "obs97","latitude_deg": 24.0,"longitude_deg": 132.0,"height_m": 0},
+ {"id": "targ98","name": "midlat60 98","name_pretty": "obs98","latitude_deg": 24.0,"longitude_deg": 150.0,"height_m": 0},
+ {"id": "targ99","name": "midlat60 99","name_pretty": "obs99","latitude_deg": 24.0,"longitude_deg": 168.0,"height_m": 0}
+ ]
+ },
+ "link_disables": {
+ "_comment": "The satellite and GS ids below will always be interpreted as strings. if the ids are specified as integers in other files, they will be considered the same if their string representation is the same",
+ "dlnk_direc_disabled_gs_ID_by_sat_IDstr": {
+ },
+ "xlnk_direc_disabled_xsat_ID_by_sat_IDstr": {
+ }
+ }
+
+ }
+
+}
\ No newline at end of file
diff --git a/inputs/reference_model_definitions/obs_refs/Tropics_20.json b/inputs/reference_model_definitions/obs_refs/Tropics_20.json
new file mode 100644
index 0000000..3114e2c
--- /dev/null
+++ b/inputs/reference_model_definitions/obs_refs/Tropics_20.json
@@ -0,0 +1,47 @@
+{
+ "version":"0.0.1",
+ "which_config":"operational_profile_config",
+ "where_should_i_be":"inputs/cases/",
+ "what_goes_here":"things that only matter to the choices to be made about the current case.",
+
+ "ops_profile_params": {
+ "obs_params": {
+ "num_targets": 20,
+ "target_set_name": "targets_tropics_100.json (from CIRCINUS tools)",
+ "target_set_version": "1",
+ "target_set_id": "4",
+ "elevation_cutoff_deg":60,
+ "targets": [
+ {"id": "targ0","name": "midlat60 0","name_pretty": "obs0","latitude_deg": -24.0,"longitude_deg": -174.0,"height_m": 0},
+ {"id": "targ1","name": "midlat60 1","name_pretty": "obs1","latitude_deg": -24.0,"longitude_deg": -156.0,"height_m": 0},
+ {"id": "targ2","name": "midlat60 2","name_pretty": "obs2","latitude_deg": -24.0,"longitude_deg": -138.0,"height_m": 0},
+ {"id": "targ3","name": "midlat60 3","name_pretty": "obs3","latitude_deg": -24.0,"longitude_deg": -120.0,"height_m": 0},
+ {"id": "targ4","name": "midlat60 4","name_pretty": "obs4","latitude_deg": -24.0,"longitude_deg": -102.0,"height_m": 0},
+ {"id": "targ5","name": "midlat60 5","name_pretty": "obs5","latitude_deg": -24.0,"longitude_deg": -84.0,"height_m": 0},
+ {"id": "targ6","name": "midlat60 6","name_pretty": "obs6","latitude_deg": -24.0,"longitude_deg": -66.0,"height_m": 0},
+ {"id": "targ7","name": "midlat60 7","name_pretty": "obs7","latitude_deg": -24.0,"longitude_deg": -48.0,"height_m": 0},
+ {"id": "targ8","name": "midlat60 8","name_pretty": "obs8","latitude_deg": -24.0,"longitude_deg": -30.0,"height_m": 0},
+ {"id": "targ9","name": "midlat60 9","name_pretty": "obs9","latitude_deg": -24.0,"longitude_deg": -12.0,"height_m": 0},
+ {"id": "targ10","name": "midlat60 10","name_pretty": "obs10","latitude_deg": -24.0,"longitude_deg": 6.0,"height_m": 0},
+ {"id": "targ11","name": "midlat60 11","name_pretty": "obs11","latitude_deg": -24.0,"longitude_deg": 24.0,"height_m": 0},
+ {"id": "targ12","name": "midlat60 12","name_pretty": "obs12","latitude_deg": -24.0,"longitude_deg": 42.0,"height_m": 0},
+ {"id": "targ13","name": "midlat60 13","name_pretty": "obs13","latitude_deg": -24.0,"longitude_deg": 60.0,"height_m": 0},
+ {"id": "targ14","name": "midlat60 14","name_pretty": "obs14","latitude_deg": -24.0,"longitude_deg": 78.0,"height_m": 0},
+ {"id": "targ15","name": "midlat60 15","name_pretty": "obs15","latitude_deg": -24.0,"longitude_deg": 96.0,"height_m": 0},
+ {"id": "targ16","name": "midlat60 16","name_pretty": "obs16","latitude_deg": -24.0,"longitude_deg": 114.0,"height_m": 0},
+ {"id": "targ17","name": "midlat60 17","name_pretty": "obs17","latitude_deg": -24.0,"longitude_deg": 132.0,"height_m": 0},
+ {"id": "targ18","name": "midlat60 18","name_pretty": "obs18","latitude_deg": -24.0,"longitude_deg": 150.0,"height_m": 0},
+ {"id": "targ19","name": "midlat60 19","name_pretty": "obs19","latitude_deg": -24.0,"longitude_deg": 168.0,"height_m": 0}
+ ]
+ },
+ "link_disables": {
+ "_comment": "The satellite and GS ids below will always be interpreted as strings. if the ids are specified as integers in other files, they will be considered the same if their string representation is the same",
+ "dlnk_direc_disabled_gs_ID_by_sat_IDstr": {
+ },
+ "xlnk_direc_disabled_xsat_ID_by_sat_IDstr": {
+ }
+ }
+
+ }
+
+}
diff --git a/inputs/reference_model_definitions/obs_refs/Zhou_2017.json b/inputs/reference_model_definitions/obs_refs/Zhou_2017.json
new file mode 100644
index 0000000..6d07011
--- /dev/null
+++ b/inputs/reference_model_definitions/obs_refs/Zhou_2017.json
@@ -0,0 +1,32 @@
+{
+ "version":"0.0.1",
+ "which_config":"operational_profile_config",
+ "where_should_i_be":"inputs/cases/",
+ "what_goes_here":"things that only matter to the choices to be made about the current case.",
+
+ "ops_profile_params": {
+ "obs_params": {
+ "num_targets": 5,
+ "target_set_name": "Zhou 2017 Missions",
+ "target_set_version": "1",
+ "target_set_id": "2018-4-14",
+ "elevation_cutoff_deg":60,
+ "targets": [
+ {"id": "targ0","name": "Himalaya","name_pretty": "obs0", "latitude_deg": 28.0, "longitude_deg": 87.0,"height_m": 0},
+ {"id": "targ1","name": "Mamiraus","name_pretty": "obs1", "latitude_deg": -2.0, "longitude_deg": -66.0,"height_m": 0},
+ {"id": "targ2","name": "Cape York","name_pretty": "obs2", "latitude_deg": -11.0, "longitude_deg": 142.5,"height_m": 0},
+ {"id": "targ3","name": "Alaska Coast","name_pretty": "obs3", "latitude_deg": 60.0, "longitude_deg": -148,"height_m": 0},
+ {"id": "targ4","name": "Greenland","name_pretty": "obs4", "latitude_deg": 69.0, "longitude_deg": -49,"height_m": 0}
+ ]
+ },
+ "link_disables": {
+ "_comment": "The satellite and GS ids below will always be interpreted as strings. if the ids are specified as integers in other files, they will be considered the same if their string representation is the same",
+ "dlnk_direc_disabled_gs_ID_by_sat_IDstr": {
+ },
+ "xlnk_direc_disabled_xsat_ID_by_sat_IDstr": {
+ }
+ }
+
+ }
+
+}
\ No newline at end of file
diff --git a/inputs/reference_model_definitions/obs_refs/blank.json b/inputs/reference_model_definitions/obs_refs/blank.json
new file mode 100644
index 0000000..e391a41
--- /dev/null
+++ b/inputs/reference_model_definitions/obs_refs/blank.json
@@ -0,0 +1,20 @@
+{
+ "version": "0.0.1",
+ "which_config": "operational_profile_config",
+ "where_should_i_be": "inputs/cases/",
+ "what_goes_here": "things that only matter to the choices to be made about the current case.",
+ "ops_profile_params": {
+ "obs_params": {
+ "num_targets": [],
+ "target_set_name": "blank, for generating. replace this with the description of the new observation set.",
+ "target_set_id": "4",
+ "elevation_cutoff_deg": 60,
+ "targets": []
+ },
+ "link_disables": {
+ "_comment": "The satellite and GS ids below will always be interpreted as strings. if the ids are specified as integers in other files, they will be considered the same if their string representation is the same",
+ "dlnk_direc_disabled_gs_ID_by_sat_IDstr": {},
+ "xlnk_direc_disabled_xsat_ID_by_sat_IDstr": {}
+ }
+ }
+}
\ No newline at end of file
diff --git a/inputs/reference_model_definitions/obs_refs/mid_earth20.json b/inputs/reference_model_definitions/obs_refs/mid_earth20.json
new file mode 100644
index 0000000..46cf051
--- /dev/null
+++ b/inputs/reference_model_definitions/obs_refs/mid_earth20.json
@@ -0,0 +1,182 @@
+{
+ "version": "0.0.1",
+ "which_config": "operational_profile_config",
+ "where_should_i_be": "inputs/cases/",
+ "what_goes_here": "things that only matter to the choices to be made about the current case.",
+ "ops_profile_params": {
+ "obs_params": {
+ "num_targets": "20",
+ "target_set_name": "20 observations from -30 latitude to +30 latitude",
+ "target_set_version": "1",
+ "target_set_id": "4",
+ "elevation_cutoff_deg": 60,
+ "targets": [
+ {
+ "id": "obs0",
+ "name": "generated0",
+ "name_pretty": "generated0",
+ "latitude_deg": 2,
+ "longitude_deg": -178,
+ "height_m": 0
+ },
+ {
+ "id": "obs1",
+ "name": "generated1",
+ "name_pretty": "generated1",
+ "latitude_deg": 16,
+ "longitude_deg": 112,
+ "height_m": 0
+ },
+ {
+ "id": "obs2",
+ "name": "generated2",
+ "name_pretty": "generated2",
+ "latitude_deg": 27,
+ "longitude_deg": 137,
+ "height_m": 0
+ },
+ {
+ "id": "obs3",
+ "name": "generated3",
+ "name_pretty": "generated3",
+ "latitude_deg": -13,
+ "longitude_deg": -10,
+ "height_m": 0
+ },
+ {
+ "id": "obs4",
+ "name": "generated4",
+ "name_pretty": "generated4",
+ "latitude_deg": -14,
+ "longitude_deg": -70,
+ "height_m": 0
+ },
+ {
+ "id": "obs5",
+ "name": "generated5",
+ "name_pretty": "generated5",
+ "latitude_deg": -4,
+ "longitude_deg": -116,
+ "height_m": 0
+ },
+ {
+ "id": "obs6",
+ "name": "generated6",
+ "name_pretty": "generated6",
+ "latitude_deg": -5,
+ "longitude_deg": -169,
+ "height_m": 0
+ },
+ {
+ "id": "obs7",
+ "name": "generated7",
+ "name_pretty": "generated7",
+ "latitude_deg": -12,
+ "longitude_deg": -35,
+ "height_m": 0
+ },
+ {
+ "id": "obs8",
+ "name": "generated8",
+ "name_pretty": "generated8",
+ "latitude_deg": -20,
+ "longitude_deg": 169,
+ "height_m": 0
+ },
+ {
+ "id": "obs9",
+ "name": "generated9",
+ "name_pretty": "generated9",
+ "latitude_deg": 10,
+ "longitude_deg": -112,
+ "height_m": 0
+ },
+ {
+ "id": "obs10",
+ "name": "generated10",
+ "name_pretty": "generated10",
+ "latitude_deg": -2,
+ "longitude_deg": -28,
+ "height_m": 0
+ },
+ {
+ "id": "obs11",
+ "name": "generated11",
+ "name_pretty": "generated11",
+ "latitude_deg": -20,
+ "longitude_deg": -114,
+ "height_m": 0
+ },
+ {
+ "id": "obs12",
+ "name": "generated12",
+ "name_pretty": "generated12",
+ "latitude_deg": -24,
+ "longitude_deg": -142,
+ "height_m": 0
+ },
+ {
+ "id": "obs13",
+ "name": "generated13",
+ "name_pretty": "generated13",
+ "latitude_deg": -19,
+ "longitude_deg": 49,
+ "height_m": 0
+ },
+ {
+ "id": "obs14",
+ "name": "generated14",
+ "name_pretty": "generated14",
+ "latitude_deg": -15,
+ "longitude_deg": -102,
+ "height_m": 0
+ },
+ {
+ "id": "obs15",
+ "name": "generated15",
+ "name_pretty": "generated15",
+ "latitude_deg": 3,
+ "longitude_deg": 67,
+ "height_m": 0
+ },
+ {
+ "id": "obs16",
+ "name": "generated16",
+ "name_pretty": "generated16",
+ "latitude_deg": -7,
+ "longitude_deg": 130,
+ "height_m": 0
+ },
+ {
+ "id": "obs17",
+ "name": "generated17",
+ "name_pretty": "generated17",
+ "latitude_deg": -7,
+ "longitude_deg": -141,
+ "height_m": 0
+ },
+ {
+ "id": "obs18",
+ "name": "generated18",
+ "name_pretty": "generated18",
+ "latitude_deg": 16,
+ "longitude_deg": 54,
+ "height_m": 0
+ },
+ {
+ "id": "obs19",
+ "name": "generated19",
+ "name_pretty": "generated19",
+ "latitude_deg": -15,
+ "longitude_deg": -164,
+ "height_m": 0
+ }
+ ]
+ },
+ "link_disables": {
+ "_comment": "The satellite and GS ids below will always be interpreted as strings. if the ids are specified as integers in other files, they will be considered the same if their string representation is the same",
+ "dlnk_direc_disabled_gs_ID_by_sat_IDstr": {},
+ "xlnk_direc_disabled_xsat_ID_by_sat_IDstr": {}
+ }
+ }
+}
\ No newline at end of file
diff --git a/inputs/reference_model_definitions/obs_refs/rem.py b/inputs/reference_model_definitions/obs_refs/rem.py
new file mode 100644
index 0000000..dc3d3e3
--- /dev/null
+++ b/inputs/reference_model_definitions/obs_refs/rem.py
@@ -0,0 +1,11 @@
+import json
+
+with open('blank.json','r+') as f:
+ data = json.load(f)
+
+data['ops_profile_params']['obs_params']['targets'] = []
+
+with open('blank.json','w') as f:
+ json.dump(data, f, indent = 4)
+
+
diff --git a/inputs/reference_model_definitions/payload_refs/tropics_payload.json b/inputs/reference_model_definitions/payload_refs/tropics_payload.json
new file mode 100644
index 0000000..d38dfea
--- /dev/null
+++ b/inputs/reference_model_definitions/payload_refs/tropics_payload.json
@@ -0,0 +1,21 @@
+{
+ "config_type":"payload_model_def",
+ "version-sat_def":"0.0.1",
+
+ "payload_model_definition":{
+ "payload_type_name":"tropics_payload",
+ "version-sat_type":"0.0.1",
+ "description":"Contains the payload information. Not sure if this is used but just in case",
+
+ "payload_params":{
+ "payload_data_rate_Mbps": 3,
+ "power_consumption_W":{
+ "base":0,
+ "obs":-10
+ },
+ "min_duration_s": {
+ "obs": 15
+ }
+ }
+ }
+}
diff --git a/inputs/reference_model_definitions/sat_refs/tropics.json b/inputs/reference_model_definitions/sat_refs/tropics.json
new file mode 100644
index 0000000..519c6f6
--- /dev/null
+++ b/inputs/reference_model_definitions/sat_refs/tropics.json
@@ -0,0 +1,119 @@
+{
+ "config_title": "sat_model_def",
+ "version-sat_def": "0.0.1",
+ "sat_model_definition": {
+ "sat_type_name": "tropics",
+ "sat_model_params": {
+ "payload_def": {
+ "definition": "local",
+ "name": "tropics_pl",
+ "payload_params": {
+ "name": "tropics_pl",
+ "payload_data_rate_Mbps": 60,
+ "power_consumption_W": {
+ "base": 0,
+ "obs": -2
+ },
+ "min_duration_s": {
+ "obs": 15
+ }
+ }
+ },
+ "power_params": {
+ "power_consumption_W": {
+ "base": -11.1,
+ "dlnk": -20,
+ "xlnk_tx": -20,
+ "xlnk_rx": -5,
+ "orbit_insunlight_average_charging": 20.8
+ },
+ "battery_storage_Wh": {
+ "e_min": 2.78,
+ "e_max": 30,
+ "charge_efficiency": 1,
+ "discharge_efficiency": 1
+ }
+ },
+ "data_storage_params": {
+ "d_min": 0,
+ "d_max": 200
+ },
+ "initial_state": {
+ "batt_e_Wh": 30
+ },
+ "activity_params": {
+ "transition_time_s": {
+ "inter-sat": 0,
+ "dlnk-dlnk": 0,
+ "intra-sat": {
+ "obs-obs": 0,
+ "obs-xlnk": 0,
+ "obs-dlnk": 0,
+ "dlnk-obs": 0,
+ "dlnk-xlnk": 0,
+ "dlnk-dlnk": 0,
+ "xlnk-obs": 0,
+ "xlnk-xlnk": {
+ "intra-orbit,same direction": 0.0,
+ "intra-orbit,different direction": 0.0,
+ "intra to inter-orbit/inter to intra-orbit": 0.0,
+ "inter-orbit,same orbit,same satellite": 0.0,
+ "inter-orbit,same orbit,different satellite": 0.0,
+ "inter-orbit,different orbit": 0.0
+ },
+ "xlnk-dlnk": 0
+ }
+ },
+ "min_duration_s": {
+ "dlnk": 60,
+ "xlnk": 60
+ }
+ },
+ "sat_link_params": [
+ {
+ "dlnk_params": {
+ "HPBW_rad": 0.2,
+ "P_tx_W": 0.5,
+ "comm_type": {
+ "built_in": false,
+ "name": "Tropics_dlnk"
+ }
+ },
+ "xlnk_params": {
+ "HPBW_rad": 0.2,
+ "P_tx_W": 0.5,
+ "comm_type": {
+ "built_in": false,
+ "name": "Tropics_xlnk"
+ }
+ },
+ "pointing_error_deg": 0.2,
+ "dlnk_disallowed": {}
+ }
+ ]
+ },
+ "sim_satellite_params": {
+ "crosslink_new_plans_only_during_BDT": false,
+ "dv_epsilon_Mb": 1,
+ "time_epsilon_s": 1,
+ "state_simulator": {
+ "es_state_update": {
+ "add_noise": 0,
+ "noise_params": {
+ "noise_type": "fractional_normal_edot",
+ "average": 0,
+ "std": 0.01,
+ "min": 0.95,
+ "max": 1.05
+ }
+ }
+ },
+ "sat_schedule_arbiter_params": {
+ "replan_release_wait_time_s": 0,
+ "allow_lp_execution": true,
+ "replan_interval_s": 900,
+ "frac_dv_lost_for_activity_failure_threshold": 0.5
+ }
+ }
+ }
+}
diff --git a/inputs/reference_model_definitions/sat_refs/zhou_original_sat.json b/inputs/reference_model_definitions/sat_refs/zhou_original_sat.json
index 86c879b..8c7889a 100644
--- a/inputs/reference_model_definitions/sat_refs/zhou_original_sat.json
+++ b/inputs/reference_model_definitions/sat_refs/zhou_original_sat.json
@@ -1,17 +1,27 @@
{
- "config_type":"sat_model_def",
- "version-sat_def":"0.0.1",
-
-
- "sat_model_definition":{
- "sat_type_name":"zhou_original_sat",
- "version-sat_type":"0.0.1",
- "description":"The encompases all the things previously used in CIRCINUS, consolidated into this form.",
+ "config_title": "sat_model_def",
+ "version-sat_def": "0.0.1",
+ "sat_model_definition": {
+ "sat_type_name": "zhou_original_sat",
"default_payload_ref_model_name":"zhou_original_payload",
-
"sat_model_params": {
- "power_params":{
- "power_consumption_W": {
+ "payload_def": {
+ "definition": "local",
+ "name": "zhou_original_payload",
+ "payload_params": {
+ "name": "zhou_original_payload",
+ "payload_data_rate_Mbps": 50,
+ "power_consumption_W": {
+ "base": 0,
+ "obs": -10
+ },
+ "min_duration_s": {
+ "obs": 15
+ }
+ }
+ },
+ "power_params": {
+ "power_consumption_W": {
"base": -10,
"dlnk": -20,
"xlnk_tx": -20,
@@ -21,42 +31,14 @@
"battery_storage_Wh": {
"e_min": 2.78,
"e_max": 13.89,
- "charge_efficiency": 1.0,
- "discharge_efficiency": 1.0
- },
- "_comment": "most of the power consumption and energy constraints were taken from the 2016 Kennedy, Cahoy JAIS paper. Xlnk and Dlnk power taken from the MDO paper, which as of March 2018 is not quite published. Orbit average charging was calculated from the 8.25 Wh figure (8.25 + 20% margin, all acquired over ~55 minutes of in-sunlight time in a 600 km orbit for 11.25 watts average during sunlight) from page 12 of Smallsat 2014 MiRaTA paper - Blackwell, W. J. et al, Microwave Radiometer Technology Acceleration Mission (MiRaTA): Advancing Weather Remote Sensing with Nanosatellites. All these values are shown for zhou 2017."
+ "charge_efficiency": 1,
+ "discharge_efficiency": 1
+ }
},
"data_storage_params": {
- "_comment":"unit:GB",
"d_min": 0,
"d_max": 12
},
- "sat_link_params": [
- {
- "dlnk_params": {
- "HPBW_rad": 0.2,
- "P_tx_W": 0.5,
- "comm_type": {
- "built_in": false,
- "name": "Zhou_2017_dlnk"
- }
- },
- "xlnk_params": {
- "HPBW_rad": 0.2,
- "P_tx_W": 0.5,
- "comm_type": {
- "built_in": false,
- "name": "Zhou_2017_xlnk"
- }
- },
- "pointing_error_deg": 0.01,
- "_comments": [
- "pointing_error_deg is assuming a pointing accuracy achievable with star trackers for now"
- ],
- "dlnk_disallowed": {
- }
- }
- ],
"initial_state": {
"batt_e_Wh": 12
},
@@ -73,12 +55,12 @@
"dlnk-dlnk": 0,
"xlnk-obs": 0,
"xlnk-xlnk": {
- "intra-orbit,same direction": 0,
- "intra-orbit,different direction": 0,
- "intra to inter-orbit/inter to intra-orbit": 0,
- "inter-orbit,same orbit,same satellite": 0,
- "inter-orbit,same orbit,different satellite": 0,
- "inter-orbit,different orbit": 0
+ "intra-orbit,same direction": 0.0,
+ "intra-orbit,different direction": 0.0,
+ "intra to inter-orbit/inter to intra-orbit": 0.0,
+ "inter-orbit,same orbit,same satellite": 0.0,
+ "inter-orbit,same orbit,different satellite": 0.0,
+ "inter-orbit,different orbit": 0.0
},
"xlnk-dlnk": 0
}
@@ -87,16 +69,37 @@
"dlnk": 60,
"xlnk": 60
}
- }
+ },
+ "sat_link_params": [
+ {
+ "dlnk_params": {
+ "HPBW_rad": 0.2,
+ "P_tx_W": 0.5,
+ "comm_type": {
+ "built_in": false,
+ "name": "Zhou_2017_dlnk"
+ }
+ },
+ "xlnk_params": {
+ "HPBW_rad": 30,
+ "P_tx_W": 1,
+ "comm_type": {
+ "built_in": false,
+ "name": "Zhou_2017_xlnk"
+ }
+ },
+ "pointing_error_deg": 0.01,
+ "dlnk_disallowed": {}
+ }
+ ]
},
"sim_satellite_params": {
- "_comment": "currently params are same for every sat",
"crosslink_new_plans_only_during_BDT": false,
- "dv_epsilon_Mb" : 1.0,
+ "dv_epsilon_Mb": 1,
"time_epsilon_s": 1,
"state_simulator": {
"es_state_update": {
- "add_noise": false,
+ "add_noise": 0,
"noise_params": {
"noise_type": "fractional_normal_edot",
"average": 0,
@@ -106,17 +109,12 @@
}
}
},
- "sat_schedule_arbiter_params" : {
- "_comments": [
- "being conservative with the wait time here, considering how quickly the local planner runs on my machine. but I'm assuming that you wouldn't be able to run a commercial solver (e.g. gurobi) on a spacecraft... well, maybe you could",
- "figured 15 mins (900 seconds) was a good wait time before automatically running the LP again"
- ],
+ "sat_schedule_arbiter_params": {
"replan_release_wait_time_s": 0,
- "allow_lp_execution": false,
+ "allow_lp_execution": 0,
"replan_interval_s": 900,
"frac_dv_lost_for_activity_failure_threshold": 0.5
}
}
}
-
}
diff --git a/requirements.txt b/requirements.txt
index d7c3d6b..a9bdb63 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -1,8 +1,12 @@
+pyutilib==5.8.0
jdcal==1.4
Pyomo==5.5.1
-numpy==1.15.4
+numpy==1.21.0
matplotlib==3.0.2
scipy==1.1.0
ipdb==0.11
poliastro==0.11.0
networkx==2.2
+pyOpenSSL==19.1.0
+pyyaml==5.4.1
+
diff --git a/scripts/run_const_sim.sh b/scripts/run_const_sim.sh
index a2737dd..97967a8 100755
--- a/scripts/run_const_sim.sh
+++ b/scripts/run_const_sim.sh
@@ -7,7 +7,7 @@ printf "running: $0\n"
# NOTE! THIS MUST MATCH PARAMS IN const_sim_params_inputs!
restore_pickle_cmdline_opt=false
restore_pickle_cmdline_name=""
-PoM='python'
+PoM='python' #'matlab' # Signalling matlab attempts to use it for orbit propagation & access calculation
CASE_NAME="NONE" # Don't replace here; replace using "--use NEW_CASE_DIR_NAME" as flag and value on CLI
OPS_CASE="NONE" # USE TBD
@@ -17,6 +17,8 @@ RECOMPUTE_ALL=false
RECOMP_ORBIT_PROP=false
RECOMP_ORBIT_LINK=false
STANDALONE_GP=false
+GROUND_ONLY=false
+SATELLITE_ONLY=false
ipdb=''
# ---- Accept user command line input ---- #
@@ -37,6 +39,8 @@ then
echo "To call, do so in this manner: ./runner_const_sim.sh [--flag [arg1 [arg2 []]]"
echo "With no flags or arguments, defaults are used, including the nominal use case."
echo "Available flags:"
+ printf '%s\t\t%s\n' "--ground_sim" "Starts only ground simulation (GP, Ground Stations)"
+ printf '%s\t\t%s\n' "--satellite" "Starts standalone satellite simulation"
printf '%s\t\t%s\n' "--rem_gp" "Starts standalone GP server in background before launching sim."
printf '%s\t\t%s\n' "--F_all" "Forces all modules to be recomputed, rather than using previously computed (supposedly identical) versions of input files."
printf '%s\t\t%s\n' "--F_prop" "Forces propagation module to be recomputed, rather than using previously computed (supposedly identical) version."
@@ -46,12 +50,14 @@ then
printf '%s\t\t%s\n' "--use [arg]" "Supply the folder-name of the use case under circinus/inputs/cases/[use case]"
exit 0
fi
- if [ "${!i}" = "--rem_gp" ]; then STANDALONE_GP=true; echo "Starting standalone GP server in background: $STANDALONE_GP"; fi
- if [ "${!i}" = "--F_all" ]; then RECOMPUTE_ALL=true; echo "Forcing recomputation of all modules: $RECOMPUTE_ALL "; fi
- if [ "${!i}" = "--F_prop" ]; then RECOMP_ORBIT_PROP=true; echo "Forcing recomputation orbit propagation module: $RECOMP_ORBIT_PROP "; fi
- if [ "${!i}" = "--F_link" ]; then RECOMP_ORBIT_LINK=true; echo "Forcing recomputation orbit link module: $RECOMP_ORBIT_LINK "; fi
- if [ "${!i}" = "--recomp" ]; then RECOMPUTE_ALL=true; echo "Forcing recomputation of all modules: $RECOMPUTE_ALL "; fi
- if [ "${!i}" = "--fromPkl" ]; then h=$((i+1)); restore_pickle_cmdline_opt=true; restore_pickle_cmdline_name="${!h}"; ((skip=1)); echo "restore_pickle_cmdline_opt from $restore_pickle_cmdline_name"; fi
+ if [ "${!i}" = "--ground" ]; then GROUND_ONLY=true; echo "Starting only ground simulation (GP, Ground Stations): $GROUND_ONLY"; fi
+ if [ "${!i}" = "--satellite" ]; then SATELLITE_ONLY=true; echo "Starting only satellite simulation: $SATELLITE_ONLY"; fi
+ if [ "${!i}" = "--rem_gp" ]; then STANDALONE_GP=true; echo "Starting standalone GP server in background: $STANDALONE_GP"; fi
+ if [ "${!i}" = "--F_all" ]; then RECOMPUTE_ALL=true; echo "Forcing recomputation of all modules: $RECOMPUTE_ALL "; fi
+ if [ "${!i}" = "--F_prop" ]; then RECOMP_ORBIT_PROP=true; echo "Forcing recomputation orbit propagation module: $RECOMP_ORBIT_PROP "; fi
+ if [ "${!i}" = "--F_link" ]; then RECOMP_ORBIT_LINK=true; echo "Forcing recomputation orbit link module: $RECOMP_ORBIT_LINK "; fi
+ if [ "${!i}" = "--recomp" ]; then RECOMPUTE_ALL=true; echo "Forcing recomputation of all modules: $RECOMPUTE_ALL "; fi
+ if [ "${!i}" = "--fromPkl" ]; then h=$((i+1)); restore_pickle_cmdline_opt=true; restore_pickle_cmdline_name="${!h}"; ((skip=1)); echo "restore_pickle_cmdline_opt from $restore_pickle_cmdline_name"; fi
if [ "${!i}" = "--ipdb" ]; then ipdb=' -m ipdb -c continue'; echo "Running with IPDB enabled for exceptions "; fi
if [ "${!i}" = "--use" ]; then h=$((i+1)); CASE_NAME="${!h}"; ((skip=1)); echo "Use case: $CASE_NAME"; fi
done
@@ -130,7 +136,6 @@ fi
# --------------------------------- EXECUTION --------------------------------- #
-
# -------- ORBIT PROPAGATION -------- #
if [ ! -f $prop_data_r ] || [ $RECOMP_ORBIT_PROP == true ] || [ $RECOMPUTE_ALL == true ]; # Avoid recomputing if it exists, unless told otherwise
then
@@ -138,6 +143,7 @@ then
cd $ORBIT_PROP_PATH/python_runner &>/dev/null
printf "\nrunning circinus_orbit_propagation:\npython$ipdb runner_orbitprop.py --inputs_location $PATH_TO_INPUTS --case_name $CASE_NAME --prop_and_accesses_language $PoM\n\n"
python $ipdb runner_orbitprop.py --inputs_location $PATH_TO_INPUTS --case_name $CASE_NAME --prop_and_accesses_language $PoM
+ echo "$ORBIT_PROP_PATH\n"
else
printf "Skipping Orbit Propagation Calculation, exists...\n\n"
@@ -153,13 +159,13 @@ then
printf "\nrunning circinus_orbit_link_public:\npython$ipdb runner_orbitlink.py --inputs_location $PATH_TO_INPUTS --case_name $CASE_NAME --link_calculator_language $PoM\n\n"
python $ipdb runner_orbitlink.py --inputs_location $PATH_TO_INPUTS --case_name $CASE_NAME --link_calculator_language $PoM
+
else
printf "Skipping Orbit Link Calculation, exists...\n\n"
fi
if [ ! -f $data_rates_r ]; then echo "Orbit Link Calc Failed, exiting..."; exit 1; fi
-
if [ $STANDALONE_GP == true ]; # Avoid recomputing if it exists, unless told otherwise
then
# ./run_ind_gp.sh > /dev/null 2>&1 & # allowing this to show is more indicative of progress to user
@@ -167,12 +173,11 @@ then
sleep 5 # time to spin up
fi
-
# -------- SIMULATION -------- #
cd $CIRCINUS_SIM_PATH/python_runner/
-# replace 'python' with 'mprof run' (ater pip installing memory_profiler) to track memory use; afterwards, 'mprof plot' in the python folder will display
-python $ipdb runner_const_sim.py --inputs_location $PATH_TO_INPUTS --case_name $CASE_NAME --rem_gp $STANDALONE_GP --restore_pickle "$restore_pickle_cmdline_arg"
-# python -m cProfile
+# replace 'python' with 'mprof run -M' (after pip installing memory_profiler) to track memory use; afterwards, 'mprof plot' in the python folder will display
+python $ipdb runner_const_sim.py --inputs_location $PATH_TO_INPUTS --case_name $CASE_NAME --rem_gp $STANDALONE_GP --ground_sim $GROUND_ONLY --satellite $SATELLITE_ONLY --restore_pickle "$restore_pickle_cmdline_arg"
+ # python -m cProfile
# ---- Replace unused submodule empty directories, quietly ---- #
diff --git a/scripts/run_gp.sh b/scripts/run_gp.sh
index 74210b9..57f0f75 100755
--- a/scripts/run_gp.sh
+++ b/scripts/run_gp.sh
@@ -113,6 +113,7 @@ fi
pushd $GLOBAL_PLANNER_PATH/python_runner/
echo "python runner_gp.py --prop_inputs_file "$prop_inputs_r" --data_rates_file "$data_rates_r" --link_inputs_file "$link_inputs_r" $gp_general_inputs_arg1 "$gp_general_inputs_arg2" --gp_inst_inputs_file "$gp_inst_inputs_r" $rs_s1_pickle_arg1 "$rs_s1_pickle_arg2" $rs_s2_pickle_arg1 "$rs_s2_pickle_arg2" $as_pickle_arg1 "$as_pickle_arg2""
+# replace 'python' with 'mprof run' (ater pip installing memory_profiler) to track memory use; afterwards, 'mprof plot' in the python folder will display
python runner_gp.py --prop_inputs_file "$prop_inputs_r" --data_rates_file "$data_rates_r" --link_inputs_file "$link_inputs_r" $gp_general_inputs_arg1 "$gp_general_inputs_arg2" --gp_inst_inputs_file "$gp_inst_inputs_r" $rs_s1_pickle_arg1 "$rs_s1_pickle_arg2" $rs_s2_pickle_arg1 "$rs_s2_pickle_arg2" $as_pickle_arg1 "$as_pickle_arg2"
# # python -m cProfile runner_gp.py --prop_inputs_file "$prop_inputs_r" --data_rates_file "$data_rates_r" --link_inputs_file "$link_inputs_r" $rs_s1_pickle_arg $rs_s2_pickle_arg $as_pickle_arg
# python -m ipdb -c continue runner_gp.py --prop_inputs_file "$prop_inputs_r" --data_rates_file "$data_rates_r" --link_inputs_file "$link_inputs_r" $gp_general_inputs_arg1 "$gp_general_inputs_arg2" --gp_inst_inputs_file "$gp_inst_inputs_r" $rs_s1_pickle_arg1 "$rs_s1_pickle_arg2" $rs_s2_pickle_arg1 "$rs_s2_pickle_arg2" $as_pickle_arg1 "$as_pickle_arg2"
diff --git a/scripts/run_ind_gp.sh b/scripts/run_ind_gp.sh
index e11d128..40aa9f1 100755
--- a/scripts/run_ind_gp.sh
+++ b/scripts/run_ind_gp.sh
@@ -1,6 +1,6 @@
#! /bin/bash
printf "running: $0\n"
-
+printf "IN INDGP"
# ---------------------------- PATHS AND FILE NAMES ---------------------------- #
#PATH TO LOCATION OF THIS SCRIPT (FROM WHICH ALL OTHER PATHS ARE RELATIVE).
@@ -16,4 +16,4 @@ PATH_TO_MODULES=$CIRCINUS_PATH/source
# --------------------------------- EXECUTE GP --------------------------------- #
cd $PATH_TO_MODULES/central_global_planner
-python $ipdb cgp_main.py --inputs_location $PATH_TO_INPUTS
\ No newline at end of file
+python $ipdb cgp_main.py --inputs_location $PATH_TO_INPUTS
diff --git a/scripts/tools/case_generator/case_gen.py b/scripts/tools/case_generator/case_gen.py
new file mode 100644
index 0000000..15c86de
--- /dev/null
+++ b/scripts/tools/case_generator/case_gen.py
@@ -0,0 +1,209 @@
+import pandas
+import csv
+import json
+import os
+import sys
+from json_gen import to_json
+from injected_obs_generator import inject
+from gs_gen import create_gs
+import dateutil.parser
+
+#Script for creating new SPRINT cases. Every SPRINT case needs the following defined:
+# 1. constellation_config
+# 2. operational_profile_config
+# 3. sim_case_config
+# 4. ground_station_network_config
+
+def create_case(case_name = "NEW_CASE", gen_obs = True, constellation = "example_const", operation= "example_op", sim= "example_sim", ground_station= "example_gs", sat = None):
+ inputs = [constellation, operation, sim, ground_station]
+ cwd = os.path.abspath(os.getcwd())
+ input_path = os.path.abspath(os.path.join(os.path.dirname( __file__ ), '../../../', 'inputs/cases'))
+ sat_path = os.path.abspath(os.path.join(os.path.dirname( __file__ ), '../../../', 'inputs/reference_model_definitions/sat_refs'))
+ obs_path = os.path.abspath(os.path.join(os.path.dirname( __file__ ), '../../../', 'inputs/reference_model_definitions/obs_refs'))
+
+
+ if all(elem[0:7] == "example" for elem in inputs):
+ print("Welcome to the case gen. Please provide the following:")
+ if case_name =="NEW_CASE":
+ case_name = input("Case Name: ") or "NEW_CASE"
+ case_path = input_path + '/' + case_name
+ gen_obs = input('Generate observations? (True/False, default True) ')
+ if gen_obs.lower() == "true" or "t":
+ gen_obs = True
+ elif gen_obs.lower() == "false" or "f":
+ gen_obs = False
+ else:
+ gen_obs = True
+ if gen_obs:
+ num_inject = input('How many injected observations per CubeSat? Recommended 2-3: ')
+ try:
+ num_inject = int(num_inject)
+ except:
+ raise('Error! That was not an integer!')
+ constellation = input('Constellation location. Press enter for the example. ') or "example_const"
+ op_loc = input('Operation. These are the target observation locations. Enter NEW, REFERENCE, GENERATE, or press enter for the example. ') or "example_op"
+ if op_loc.lower() == 'new':
+ operation = input('Enter the location for the new operation file. ')
+ elif op_loc.lower() == 'reference' or op_loc.lower() == 'ref':
+ ref = input('Enter the number of the reference. Options are: \n1. Tropics 20 \n2. Tropics 100 \n3. Chesapeake Bay \n4. Zhou 2017\n5. Mid Earth 20 (from -30 to 30 lat)\n6. Generate New\n')
+ gen_flag = False
+ if ref == '1':
+ ref = 'Tropics_20.json'
+ elif ref == '2':
+ ref = 'Tropics_100.json'
+ elif ref == '3':
+ ref = 'Chesapeake_Bay.json'
+ elif ref == '4':
+ ref = 'Zhou_2017.json'
+ elif ref == '5':
+ ref = 'mid_earth20.json'
+ else:
+ raise("Error! You did not enter a valid number.")
+ operation = cwd+'/operational_profile_config.json'
+ os.system('cp '+ obs_path + '/' + ref +' '+ operation)
+
+
+ elif op_loc.lower() == 'generate' or op_loc.lower() =='gen':
+ num = int(input("Number of targets: "))
+ lat_min = int(input(("Minimum latitude: ")))
+ lat_max = int(input(("Maximum latitude: ")))
+ long_min = int(input("Minimum longitude: "))
+ long_max = int(input("Maximum longitude: "))
+ file_loc = obs_path+'/blank.json'
+ os.system('cp '+ file_loc+ ' operational_profile_config.json')
+ from obs_gen import observation_generator
+ observation_generator(num,lat_min,lat_max,long_min,long_max,'operational_profile_config.json')
+ operation = cwd+'/operational_profile_config.json'
+
+
+ sim = input('Sim location. Press enter for the example. ') or "example_sim"
+ gs_loc = input('Ground Station. Enter NEW, REFERENCE, or press enter for the example. ') or "example_gs"
+ if gs_loc.lower() == 'new':
+ ground_station = input('Enter the location for the new ground station file. ')
+ elif gs_loc.lower() == 'reference' or gs_loc.lower() == 'ref':
+ ref = input('Enter the number of the reference. Options are: \n1. KSAT Lite \n2. KSAT \n3. NASA Near Earth Network \n4. Spaceflight Systems \n5. Zhou Original\n ')
+ if ref == '1':
+ ref = 'KSAT_Lite'
+ elif ref == '2':
+ ref = 'KSAT'
+ elif ref =='3':
+ ref = 'NASA_Nen'
+ elif ref == '4':
+ ref = 'SpaceFlight'
+ elif ref == '5':
+ ref = 'Zhou_Original'
+ else:
+ raise("Error! You did not enter a valid number.")
+
+ on = input('Enter the stations you want turned on, seperated by a comma with no spaces. They are cAsE sEnSiTiVe. Type ALL for including all. ')
+ if on.lower() == 'all':
+ pass
+ else:
+ on= on.split(',')
+ dlnk = input('Enter the name of the downlink. Be sure it matches the downlink in your CubeSat. ')
+ other = input('Would you like to modify any other parameters? (Y/N, default no) ')
+ if other.lower() == 'y':
+ ri = input('Enter the replan interval, in seconds: ') or None
+ rrw = input('Enter the replan release wait time, in seconds: ') or None
+ rf = input('Release first plans immediately? ') or None
+ ground_station = create_gs(ref,on,dlnk,ri,rrw,rf)
+ else:
+ ground_station = create_gs(ref, on, dlnk)
+
+ sat = input('CubeSat location. Press enter if the CubeSat already exists. ') or None
+
+ inputs = [constellation, operation, sim, ground_station]
+
+ if case_name == "NEW_CASE":
+ print("No new case name provided. Will be saved as NEW_CASE")
+ case_path = input_path + '/' + case_name
+
+ #Names the files need to be saved as to be read by SPRINT, except the cubesat bc that will be different for each
+ file_name = {constellation:'constellation_config.json',
+ operation:'operational_profile_config.json',
+ sim:'sim_case_config.json',
+ ground_station:'ground_station_network_config.json'}
+
+
+ try:
+ os.mkdir(case_path)
+ except FileExistsError:
+ print("Path folder already exists.")
+
+ example_path = cwd + '/examples/'
+
+ #Start with sat bc it needs to go somewhere else
+ if sat is not None:
+ #get the name. going to assume if it's a json you've named it the right thing...
+ file_extension = os.path.splitext(sat)[1]
+ if file_extension == '.xlsx':
+ excel_data_df = pandas.read_excel(sat, sheet_name = 'Sheet1')
+ sat_name= excel_data_df.iat[3,1] +'.json'
+ to_json(sat, sat_name)
+ else:
+ sat_name = os.path.splitext(sat)
+
+ os.system('cp '+ sat_name +' '+ sat_path)
+ os.system('rm '+sat_name)
+
+ for elem in inputs:
+ cleanup = False
+ name = file_name[elem]
+ if elem[0:7] == "example":
+ json_file = example_path+name
+ else:
+ file_extension = os.path.splitext(elem)[1]
+ if file_extension == '.json':
+ cleanup = True
+ json_file = name
+
+ else:
+ #if it's a constellation, check which sheet to use
+ if name == 'constellation_config.json':
+ excel_data_df = pandas.read_excel(elem, sheet_name = 'Sheet1')
+ if excel_data_df.iat[2,1] == 'Plane':
+ sheet = 'Sheet2'
+ elif excel_data_df.iat[2,1] == 'Individual Assignment':
+ sheet = 'Sheet3'
+ elif excel_data_df.iat[2,1] == 'Walker':
+ sheet = 'Sheet4'
+ else:
+ raise("Constellations must be defined by planes, individually, or as a Walker constellation.")
+ else:
+ sheet = 'Sheet2'
+ to_json(elem, name, sheet)
+ cleanup = True
+ json_file = name
+
+ os.system('cp '+ json_file +' '+ case_path)
+ #injected obs want to work on the copy, not the originals
+ json_loc = case_path+'/'+name
+
+ #if we generate injected observations, we need the number of sats
+ if gen_obs and name == 'constellation_config.json':
+ with open(json_loc,'r+') as f:
+ data = json.load(f)
+ num_sats = data['constellation_definition']['constellation_params']['num_satellites']
+ elif gen_obs and name == 'sim_case_config.json':
+ #get start date/time
+ with open(json_loc,'r+') as f:
+ data = json.load(f)
+ start = dateutil.parser.isoparse(data['scenario_params']['start_utc'])
+ end = dateutil.parser.isoparse(data['scenario_params']['end_utc'])
+ inject(json_loc,num_sats,num_inject,start,end)
+
+ if cleanup:
+ os.system('rm '+name)
+ run = input("Case creation successful. Would you like to run the case? (Y/N) ")
+ if run.lower() == 'y':
+ print("Running "+case_name+"...")
+
+ elif run.lower() == 'n':
+ print("Goodbye!")
+
+
+
+
+
+if __name__ == '__main__':
+ create_case(*sys.argv[1:])
diff --git a/scripts/tools/case_generator/constellation_blank.xlsx b/scripts/tools/case_generator/constellation_blank.xlsx
new file mode 100644
index 0000000..7097a9e
Binary files /dev/null and b/scripts/tools/case_generator/constellation_blank.xlsx differ
diff --git a/scripts/tools/case_generator/cubesat_blank.xlsx b/scripts/tools/case_generator/cubesat_blank.xlsx
new file mode 100644
index 0000000..e4caa11
Binary files /dev/null and b/scripts/tools/case_generator/cubesat_blank.xlsx differ
diff --git a/scripts/tools/case_generator/excelgen.py b/scripts/tools/case_generator/excelgen.py
new file mode 100644
index 0000000..32b5eef
--- /dev/null
+++ b/scripts/tools/case_generator/excelgen.py
@@ -0,0 +1,156 @@
+#alright here we go #dee da dee da deedy do do
+
+a = open("out.txt","w")
+b = open("out2.txt","w")
+c= open("out3.txt","w")
+
+type = "gs"
+
+y = []
+z = []
+i = []
+letter = ["B","D","F","H","J","L"]
+adj_letter = ["A","C","E","G","I","K"]
+let_ind = 1
+start = ["14","27","39"]
+start_ind = 0
+
+if type == "const_plane":
+ for x in range(14):
+ ind = str(x+1)
+ ind2 = str(24+x*12)
+
+ y.append("=IF(Sheet1!$B$5>"+ind+',\"LIST\",\"\") \n')
+ y.append("=IF(NOT($A"+ind2+"=\"\"),\"def_type\",\"\") \n")
+ y.append("=IF(NOT($A"+ind2+"=\"\"),\"orbit_indx\",\"\") \n")
+ y.append("=IF(NOT($A"+ind2+"=\"\"),\"plane_def\",\"\") \n")
+ y.append("=IF(NOT($A"+ind2+"=\"\"),\"plane_def\",\"\") \n")
+ y.append("=IF(NOT($A"+ind2+"=\"\"),\"plane_def\",\"\") \n")
+ y.append("=IF(NOT($A"+ind2+"=\"\"),\"plane_def\",\"\") \n")
+ y.append("=IF(NOT($A"+ind2+"=\"\"),\"plane_def\",\"\") \n")
+ y.append("=IF(NOT($A"+ind2+"=\"\"),\"first_M_deg\",\"\") \n")
+ y.append("=IF(NOT($A"+ind2+"=\"\"),\"spacing_type\",\"\") \n")
+ y.append("=IF(NOT($A"+ind2+"=\"\"),\"first_sat_id\",\"\") \n")
+ y.append("=IF(NOT($A"+ind2+"=\"\"),\"sats_in_plane\",\"\") \n")
+
+ #second column
+ z.append("=IF(Sheet1!$B$5>"+ind+',\"LIST\",\"\") \n')
+ z.append("=IF(NOT($A"+ind2+"=\"\"),\"plane\",\"\") \n")
+ z.append("=IF(NOT($A"+ind2+"=\"\"),Sheet1!"+letter[let_ind]+"$"+start[start_ind]+",\"\") \n")
+ z.append("=IF(NOT($A"+ind2+"=\"\"),\"a_km\",\"\") \n")
+ z.append("=IF(NOT($A"+ind2+"=\"\"),\"e\",\"\") \n")
+ z.append("=IF(NOT($A"+ind2+"=\"\"),\"i_deg\",\"\") \n")
+ z.append("=IF(NOT($A"+ind2+"=\"\"),\"RAAN_deg\",\"\") \n")
+ z.append("=IF(NOT($A"+ind2+"=\"\"),\"arg_per_deg\",\"\") \n")
+ z.append("=IF(NOT($A"+ind2+"=\"\"),Sheet1!"+letter[let_ind]+"$"+str(int(start[start_ind])+6)+",\"\") \n") #20
+ z.append("=IF(NOT($A"+ind2+"=\"\"),Sheet1!"+letter[let_ind]+"$"+str(int(start[start_ind])+10)+",\"\") \n")
+ z.append("=IF(NOT($A"+ind2+"=\"\"),Sheet1!"+letter[let_ind]+"$"+str(int(start[start_ind])+9)+",\"\") \n")
+ z.append("=IF(NOT($A"+ind2+"=\"\"),Sheet1!"+letter[let_ind]+"$"+str(int(start[start_ind])+8)+",\"\") \n")
+
+ #third column
+ i.append("=IF(Sheet1!$B$5>"+ind+',\"LIST\",\"\") \n')
+ i.append("\n")
+ i.append("\n")
+ i.append("=IF(NOT($A"+ind2+"=\"\"),Sheet1!"+letter[let_ind]+"$"+str(int(start[start_ind])+1)+",\"\") \n")
+ i.append("=IF(NOT($A"+ind2+"=\"\"),Sheet1!"+letter[let_ind]+"$"+str(int(start[start_ind])+2)+",\"\") \n")
+ i.append("=IF(NOT($A"+ind2+"=\"\"),Sheet1!"+letter[let_ind]+"$"+str(int(start[start_ind])+3)+",\"\") \n")
+ i.append("=IF(NOT($A"+ind2+"=\"\"),Sheet1!"+letter[let_ind]+"$"+str(int(start[start_ind])+4)+",\"\") \n")
+ i.append("=IF(NOT($A"+ind2+"=\"\"),Sheet1!"+letter[let_ind]+"$"+str(int(start[start_ind])+5)+",\"\") \n")
+ i.append("\n")
+ i.append("\n")
+ i.append("\n")
+ i.append("\n")
+
+ let_ind += 1
+ if let_ind > 4:
+ let_ind = 0
+ start_ind += 1
+elif type =="const_indiv":
+ for x in range(14):
+ ind = str(x+1)
+ ind2 = str(21+x*9)
+
+ y.append("=IF(Sheet1!$B$5>"+ind+',\"LIST\",\"\") \n')
+ y.append("=IF(NOT($A"+ind2+"=\"\"),\"sat_id\",\"\") \n")
+ y.append("=IF(NOT($A"+ind2+"=\"\"),\"def_type\",\"\") \n")
+ y.append("=IF(NOT($A"+ind2+"=\"\"),\"kepler_meananom\",\"\") \n")
+ y.append("=IF(NOT($A"+ind2+"=\"\"),\"kepler_meananom\",\"\") \n")
+ y.append("=IF(NOT($A"+ind2+"=\"\"),\"kepler_meananom\",\"\") \n")
+ y.append("=IF(NOT($A"+ind2+"=\"\"),\"kepler_meananom\",\"\") \n")
+ y.append("=IF(NOT($A"+ind2+"=\"\"),\"kepler_meananom\",\"\") \n")
+ y.append("=IF(NOT($A"+ind2+"=\"\"),\"kepler_meananom\",\"\") \n")
+
+ #second column
+ z.append("=IF(Sheet1!$B$5>"+ind+',\"LIST\",\"\") \n')
+ z.append("=IF(NOT($A"+ind2+"=\"\"),Sheet1!"+adj_letter[let_ind]+"$"+start[start_ind]+",\"\") \n")
+ z.append("=IF(NOT($A"+ind2+"=\"\"),\"indiv\",\"\") \n")
+ z.append("=IF(NOT($A"+ind2+"=\"\"),\"a_km\",\"\") \n")
+ z.append("=IF(NOT($A"+ind2+"=\"\"),\"e\",\"\") \n")
+ z.append("=IF(NOT($A"+ind2+"=\"\"),\"i_deg\",\"\") \n")
+ z.append("=IF(NOT($A"+ind2+"=\"\"),\"RAAN_deg\",\"\") \n")
+ z.append("=IF(NOT($A"+ind2+"=\"\"),\"arg_per_deg\",\"\") \n")
+ z.append("=IF(NOT($A"+ind2+"=\"\"),\"M_deg\",\"\") \n")
+
+ #third column
+ i.append("=IF(Sheet1!$B$5>"+ind+',\"LIST\",\"\") \n')
+ i.append("\n")
+ i.append("\n")
+ i.append("=IF(NOT($A"+ind2+"=\"\"),Sheet1!"+letter[let_ind]+"$"+str(int(start[start_ind])+1)+",\"\") \n")
+ i.append("=IF(NOT($A"+ind2+"=\"\"),Sheet1!"+letter[let_ind]+"$"+str(int(start[start_ind])+2)+",\"\") \n")
+ i.append("=IF(NOT($A"+ind2+"=\"\"),Sheet1!"+letter[let_ind]+"$"+str(int(start[start_ind])+3)+",\"\") \n")
+ i.append("=IF(NOT($A"+ind2+"=\"\"),Sheet1!"+letter[let_ind]+"$"+str(int(start[start_ind])+4)+",\"\") \n")
+ i.append("=IF(NOT($A"+ind2+"=\"\"),Sheet1!"+letter[let_ind]+"$"+str(int(start[start_ind])+5)+",\"\") \n")
+ i.append("=IF(NOT($A"+ind2+"=\"\"),Sheet1!"+letter[let_ind]+"$"+str(int(start[start_ind])+6)+",\"\") \n")
+
+ let_ind += 1
+ if let_ind > 4:
+ let_ind = 0
+ start_ind += 1
+elif type =="gs":
+ let_ind =0
+ counter = 0
+ for x in range(18):
+ if counter < 6:
+ ind2 = 14
+ elif counter < 11:
+ ind2 = 23
+ else:
+ ind2 = 32
+ counter +=1
+ ind = str(x)
+
+ y.append("=IF($D$6>"+ind+',\"LIST\",\"\") \n')
+ y.append("=IF($D$6>"+ind+",LOWER(Sheet1!" + adj_letter[let_ind]+str(ind2)+"),\"\") \n")
+ y.append("=IF($D$6>"+ind+",LOWER(Sheet1!" + adj_letter[let_ind]+str(ind2+1)+"),\"\") \n")
+ y.append("=IF($D$6>"+ind+",LOWER(Sheet1!" + adj_letter[let_ind]+str(ind2+2)+"),\"\") \n")
+ y.append("=IF($D$6>"+ind+",LOWER(Sheet1!" + adj_letter[let_ind]+str(ind2+3)+"),\"\") \n")
+ y.append("=IF($D$6>"+ind+",LOWER(Sheet1!" + adj_letter[let_ind]+str(ind2+4)+"),\"\") \n")
+ y.append("=IF($D$6>"+ind+",LOWER(Sheet1!" + adj_letter[let_ind]+str(ind2+5)+"),\"\") \n")
+ y.append("=IF($D$6>"+ind+",LOWER(Sheet1!" + adj_letter[let_ind]+str(ind2+6)+"),\"\") \n")
+
+
+ z.append("=IF($D$6>"+ind+',\"LIST\",\"\") \n')
+ z.append("=IF($D$6>"+ind+",LOWER(Sheet1!" + letter[let_ind]+str(ind2)+"),\"\") \n")
+ z.append("=IF($D$6>"+ind+",LOWER(Sheet1!" + letter[let_ind]+str(ind2+1)+"),\"\") \n")
+ z.append("=IF($D$6>"+ind+",LOWER(Sheet1!" + letter[let_ind]+str(ind2+2)+"),\"\") \n")
+ z.append("=IF($D$6>"+ind+",LOWER(Sheet1!" + letter[let_ind]+str(ind2+3)+"),\"\") \n")
+ z.append("=IF($D$6>"+ind+",LOWER(Sheet1!" + letter[let_ind]+str(ind2+4)+"),\"\") \n")
+ z.append("=IF($D$6>"+ind+",LOWER(Sheet1!" + letter[let_ind]+str(ind2+5)+"),\"\") \n")
+ z.append("=IF($D$6>"+ind+",LOWER(Sheet1!" + letter[let_ind]+str(ind2+6)+"),\"\") \n")
+
+ i.append("=IF($D$6>"+ind+',\"LIST\",\"\") \n')
+ i.append("\n")
+ i.append("\n")
+ i.append("\n")
+ i.append("\n")
+ i.append("\n")
+ i.append("\n")
+ i.append("\n")
+
+ let_ind +=1
+ if let_ind > 5:
+ let_ind = 0
+
+a.writelines(y)
+b.writelines(z)
+c.writelines(i)
\ No newline at end of file
diff --git a/scripts/tools/case_generator/ground_station_blank.xlsx b/scripts/tools/case_generator/ground_station_blank.xlsx
new file mode 100644
index 0000000..abf6620
Binary files /dev/null and b/scripts/tools/case_generator/ground_station_blank.xlsx differ
diff --git a/scripts/tools/case_generator/gs_gen.py b/scripts/tools/case_generator/gs_gen.py
new file mode 100644
index 0000000..d5a8133
--- /dev/null
+++ b/scripts/tools/case_generator/gs_gen.py
@@ -0,0 +1,67 @@
+import json
+import os
+import sys
+import shutil
+
+
+def create_gs(reference, stations_on, dlnk, replan_interval = None, replan_release_wait = None, release_first = None):
+ #First, get the reference from the reference folder
+ print("Starting GS Generation...")
+ cwd = os.path.abspath(os.getcwd())
+ name = 'ground_station_network_config.json'
+
+ if reference[-5:] != ".json":
+ reference = reference+".json"
+ gs_path = os.path.abspath(os.path.join(os.path.dirname( __file__ ), '../../../', 'inputs/reference_model_definitions/gs_refs'))
+ os.system('cp '+gs_path+'/'+reference+' '+cwd+'/'+name)
+
+ try:
+ with open(name,'r+') as f:
+ data = json.load(f)
+
+ stations = data['network_definition']['gs_net_params']['stations']
+ params = data['network_definition']['sim_gs_network_params']['gsn_ps_params']
+
+ except NameError:
+ print("There are no stations in file "+reference)
+ os.system('rm '+name)
+
+ good_stations = []
+ id = 0
+ for s in stations:
+ if stations_on == 'all':
+ s['id'] = 'G' + str(id)
+ id += 1
+ s['comm_type'] = dlnk
+ good_stations.append(s)
+ elif s['name'] in stations_on:
+ s['id'] = 'G' + str(id)
+ id += 1
+ s['comm_type'] = dlnk
+ good_stations.append(s)
+
+ if replan_interval is not None:
+ params['replan_interval_s'] = int(replan_interval)
+ if replan_release_wait is not None:
+ params['replan_release_wait_time_s'] = int(replan_release_wait)
+
+ if release_first is not None:
+ if release_first.lower() == 'true' or release_first.lower() == 't':
+ params['release_first_plans_immediately'] = True
+ elif release_first.lower() == 'false' or release_first.lower() == 'f':
+ params['release_first_plans_immediately'] = False
+
+ data['network_definition']['gs_net_params']['stations'] = good_stations
+ data['network_definition']['gs_net_params']['num_stations'] = len(good_stations)
+
+ with open(name,'w') as f:
+ json.dump(data, f, indent =4)
+ print("Successfully saved new gs.")
+
+ return (cwd +'/'+name)
+
+
+
+
+if __name__ == '__main__':
+ create_gs(*sys.argv[1:])
\ No newline at end of file
diff --git a/scripts/tools/case_generator/injected_obs_generator.py b/scripts/tools/case_generator/injected_obs_generator.py
new file mode 100644
index 0000000..5bd523e
--- /dev/null
+++ b/scripts/tools/case_generator/injected_obs_generator.py
@@ -0,0 +1,93 @@
+#! /usr/bin/env python
+
+
+# @author Kit Kennedy
+# @edits Mary Dahl
+
+import random
+from datetime import datetime,timedelta
+import json
+import bisect
+import sys
+import dateutil.parser
+
+def inject(file_to_inject, num_sats, inject_per_sat,day_start=None,day_end=None):
+ if day_start is None or day_end is None:
+ with open(file_to_inject,'r+') as f:
+ data = json.load(f)
+ day_start = dateutil.parser.isoparse(data['scenario_params']['start_utc'])
+ day_end = dateutil.parser.isoparse(data['scenario_params']['end_utc'])
+
+
+ sats= []
+ for i in range(int(num_sats)):
+ sats.append("S" + str(i))
+
+ the_json = []
+ used_fracs = []
+ indx = 0
+
+ for sat in sats:
+ for i in range(int(inject_per_sat)):
+ valid = False
+ while not valid:
+ overlap = False
+ start = random.random()*24*60
+ end = start + 1
+ #make sure the delta doesn't make it after the end of the day
+ if (day_start+ timedelta(minutes=end)) > day_end:
+ continue
+
+ #checking to make sure observations don't overlap
+ if len(used_fracs) == 0:
+ break
+ if start >= (used_fracs[-1] +1 ):
+ break
+ else:
+ for frac in used_fracs:
+ if abs(frac - start) <= 1:
+ overlap = True
+ break
+ if overlap:
+ pass
+ else:
+ valid = True
+
+ bisect.insort(used_fracs,start)
+
+ the_json.append({
+ "indx": indx,
+ "end_utc": (day_start + timedelta(minutes=end)).isoformat()[:-6]+'Z',
+ "sat_id": sat,
+ "type": "hardcoded",
+ "start_utc": (day_start + timedelta(minutes=start)).isoformat()[:-6]+'Z',
+ }
+ )
+
+ indx+=1
+
+ if file_to_inject is None:
+ print("No file to inject. Saving as output.json")
+ with open('output.json','w') as f:
+ json.dump(the_json ,f, indent = 4)
+ print("Successfully injected observations.")
+ else:
+ try:
+ with open(file_to_inject,'r+') as f:
+ data = json.load(f)
+
+ data['scenario_params']['sim_run_perturbations']['injected_observations'] = the_json
+
+ with open(file_to_inject,'w') as f:
+ json.dump(data, f, indent =4)
+ print("Successfully injected " + str(inject_per_sat) + " observations to " + str(num_sats) +" satellites to "+ str(file_to_inject))
+
+ except NameError:
+ print("File to inject does not contain the correct data structures. Generated observations will be saved as output.json")
+ with open('output.json','w') as f:
+ json.dump(the_json,f,indent=4)
+ print("Successfully saved injected observations.")
+
+
+if __name__ == '__main__':
+ inject(*sys.argv[1:])
diff --git a/scripts/tools/case_generator/json_gen.py b/scripts/tools/case_generator/json_gen.py
new file mode 100644
index 0000000..562c186
--- /dev/null
+++ b/scripts/tools/case_generator/json_gen.py
@@ -0,0 +1,154 @@
+import pandas
+import csv
+import json
+import os
+import sys
+
+def dictify(row, working_dict, orbit_list):
+ #Recursive function that converts csv file into the proper json formatting
+ if len(row) == 2:
+ if row[0] not in working_dict:
+ working_dict[row[0]] = row[1]
+ return
+ elif len(row) > 1 and "LIST" in row[1]:
+ #Signifies next items need to be placed in a list. NOTE: This only works for constellations rn. Need to modify.
+ new_dict = dict()
+ if len(orbit_list) == 0:
+ orbit_list.append(new_dict)
+ working_dict[row[0]] = orbit_list
+ else:
+ orbit_list.append(new_dict)
+ return
+ else:
+ if row[0] not in working_dict:
+ working_dict[row[0]] = dict()
+ dictify(row[1:], working_dict[row[0]],orbit_list)
+
+def gs_parse(l):
+ #Parses rows for ground station disruptions.
+ working_dict = {}
+ for item in l:
+ key = item[0]
+ vals = [item[1],item[2]]
+ if key in working_dict:
+ working_dict[key].append(vals)
+ else:
+ working_dict[key] = list()
+ working_dict[key].append(vals)
+ return working_dict
+
+def is_int(val):
+ try:
+ a = float(val)
+ b = int(val)
+ except ValueError:
+ return False
+ else:
+ return a == b
+def is_float(val):
+ try:
+ float(val)
+ except ValueError:
+ return False
+ else:
+ return True
+
+def parse(item,disrupt_flag,orbit_flag):
+ item_flag = True
+ if is_int(item):
+ item = int(item)
+ elif is_float(item):
+ item = float(item)
+ elif '[' in item:
+ item = item.replace('&',',')
+ item = json.loads(item)
+ elif '+' in item:
+ item = item.replace('+',',')
+ elif item.lower() == 'true':
+ item = True
+ elif item.lower() == 'false':
+ item = False
+ elif 'EMPTY' in item:
+ item = dict()
+ elif 'Unnamed' in item or len(item) == 0:
+ item_flag = False
+ elif 'GS_DISRUPT' in item:
+ disrupt_flag = True
+ item_flag = False
+ elif 'ORBIT_ASSIGN' in item:
+ orbit_flag = True
+ item_flag = False
+ elif 'BLANK' in item:
+ item = ""
+ return (item, item_flag, disrupt_flag,orbit_flag)
+
+def to_json(file, name = "output.json", sheet = 'Sheet2'):
+ file_name, file_extension = os.path.splitext(file)
+ full_name = file_name + file_extension
+ if file_extension == '.xlsx':
+ excel_data_df = pandas.read_excel(file, sheet_name = sheet)
+ full_name = file_name+'.csv'
+ excel_data_df.to_csv (full_name, index = None)
+ elif file_extension == '.csv':
+ pass
+ else:
+ print("Not compatible with file type: ", file_extension)
+ return None
+
+ config = {}
+ orbit_list = []
+ disrupt_flag = False
+ orbit_flag = False
+ disruptions = []
+ with open(full_name, 'r+') as data_file:
+ for row in data_file:
+ row = row.strip().split(",")
+ new_row = []
+ #Convert string numbers to floats and remove Unnamed and empty items
+ for item in row:
+ parsed_item, item_flag, disrupt_flag, orbit_flag = parse(item,disrupt_flag,orbit_flag)
+ if item_flag == True:
+ new_row.append(parsed_item)
+ if len(new_row) == 0:
+ continue
+ #For ground station disruptions. They're handled differently than others.
+ elif disrupt_flag:
+ disruptions.append(new_row)
+ continue
+ elif len(orbit_list) == 0:
+ dictify(new_row,config,orbit_list)
+ else:
+ dictify(new_row,orbit_list[len(orbit_list)-1],orbit_list)
+
+ if disrupt_flag:
+ config['scenario_params']['sim_run_perturbations']['schedule_disruptions'] = gs_parse(disruptions)
+
+ if orbit_flag:
+ orbit_dict = {}
+ excel_data_2 = pandas.read_excel(file, sheet_name = 'Sheet5')
+ excel_data_2.to_csv ('temp_orbit_assign.csv', index = None)
+ with open('temp_orbit_assign.csv','r+') as data_file:
+ for row in data_file:
+ row = row.strip().split(",")
+ if row[0] == "":
+ continue
+ orbits = row[1].split("&")
+ orbit_dict[row[0]] = orbits
+
+ config['constellation_definition']['constellation_params']['orbit_params']['sat_ids_by_orbit_name'] = orbit_dict
+ os.system('rm temp_orbit_assign.csv')
+
+ print("Json successfully made.")
+ if name == "output.json":
+ print("No name supplied. Saving as output.json.")
+ with open(name,'w') as outfile:
+ json.dump(config, outfile, indent = 4)
+
+ #Clean up
+ if file_extension == '.xlsx':
+ os.system('rm '+full_name)
+ return
+
+
+if __name__ == '__main__':
+ to_json(*sys.argv[1:])
diff --git a/scripts/tools/case_generator/obs_gen.py b/scripts/tools/case_generator/obs_gen.py
new file mode 100644
index 0000000..459b9b8
--- /dev/null
+++ b/scripts/tools/case_generator/obs_gen.py
@@ -0,0 +1,45 @@
+#for when you just want a bunch of obs in a certain area
+import json
+import random
+import sys
+
+
+def observation_generator(num_targets, lat_min,lat_max,long_min, long_max, file_location = None):
+
+ targets = []
+ for i in range(int(num_targets)):
+
+ lat = random.randint(int(lat_min),int(lat_max))
+ lon = random.randint(int(long_min), int(long_max))
+
+ targ = {
+ "id": "obs" + str(i),
+ "name": "generated" +str(i),
+ "name_pretty": "generated" +str(i),
+ "latitude_deg": lat,
+ "longitude_deg": lon,
+ "height_m": 0
+ }
+
+ targets.append(targ)
+
+ if file_location is None:
+ print("No file given to inject into. Will dump json into this directory as obs_out.json.")
+ data = {}
+ data['targets'] = targets
+ with open('obs_out.json','w') as f:
+ json.dump(data,f, indent =4)
+ else:
+ with open(file_location,'r+') as f:
+ data = json.load(f)
+
+ data['ops_profile_params']['obs_params']['num_targets'] = int(num_targets)
+ data['ops_profile_params']['obs_params']['targets'] = targets
+ data['ops_profile_params']['obs_params']['target_set_name'] = "Generated targets, using latitude in the range " + str(lat_min) +" to "+ str(lat_max) + ' and longitude in the range '+str(long_min) + " to " +str(long_max)+'.'
+ with open(file_location,'w') as f:
+ json.dump(data,f,indent =4)
+
+
+
+if __name__ == '__main__':
+ observation_generator(*sys.argv[1:])
\ No newline at end of file
diff --git a/scripts/tools/case_generator/op_blank.xlsx b/scripts/tools/case_generator/op_blank.xlsx
new file mode 100644
index 0000000..7428cd9
Binary files /dev/null and b/scripts/tools/case_generator/op_blank.xlsx differ
diff --git a/scripts/tools/case_generator/payload_blank.xlsx b/scripts/tools/case_generator/payload_blank.xlsx
new file mode 100644
index 0000000..46e499d
Binary files /dev/null and b/scripts/tools/case_generator/payload_blank.xlsx differ
diff --git a/scripts/tools/case_generator/sim_blank.xlsx b/scripts/tools/case_generator/sim_blank.xlsx
new file mode 100644
index 0000000..22f042f
Binary files /dev/null and b/scripts/tools/case_generator/sim_blank.xlsx differ
diff --git a/source/Ground_Sim/Ground_Client.py b/source/Ground_Sim/Ground_Client.py
new file mode 100644
index 0000000..80803dd
--- /dev/null
+++ b/source/Ground_Sim/Ground_Client.py
@@ -0,0 +1,199 @@
+import sys, socket
+import pickle
+import multiprocessing as mp
+
+DEFAULT_BUFF_SIZE = 32768
+
+import struct
+def encodeStrLen(msg):
+ return struct.pack(">I", len(msg) )
+def decodeLen(buff, etype=">I"):
+ return struct.unpack(etype,buff)[0]
+
+"""
+The message sender of Ground Sim, handling all sending for all ground stations.
+
+This Client is structured such that for each satellite, it produces a Process that acts as a consumer in a message queue
+and transmits the message given the connection it is initialized with. These Processes continue for the entirety of
+the simulation.
+
+"""
+class GroundClient:
+ EXPECTED_ACKNOWLEDGEMENT_REPLY = {"ACK": True, "payload": None} # base expected reply from server
+
+ def __init__(self,messagesToSend:dict):
+ """
+ Constructor
+ :param messagesToSend: Dictionary mapping target to message Queue
+ """
+ self.client = None
+ self.messagesToSend = messagesToSend # The queue to receive messages
+
+ self.ips_by_id = {} # Maps ID of satellites to their servers' IP addresses
+ self.connections = {} # Active connections with satellite servers
+ self.processes = {} # The processes using the active connections
+ self.idCount = 0 # ID count used to assign messages unique IDs
+
+
+ ###################################################################################################################
+ # CONNECTION-BASED FUNCTIONS #
+ ###################################################################################################################
+
+ def make_connection(self,address:tuple):
+ """
+ Creates socket connection to given address
+ :param address: A tuple in the form (,)
+ :return: The socket connection
+ """
+ connection = socket.socket(socket.AF_INET,socket.SOCK_STREAM)
+ print(" connecting to ",address)
+ connection.connect(address)
+ return connection
+
+ def add_id(self,id:str,address,port):
+ """
+ Registers id to be located at address. Overwrites if necessary.
+ Also starts a connection with the given address and associates it with
+ the given id
+ """
+ # Record address
+ self.ips_by_id[id] = (address,port)
+
+ # Initialize message queue
+ self.messagesToSend[id] = mp.Queue(0)
+
+ # Initialize connections
+ self.connections[id] = self.make_connection((address,port))
+
+ # Start process
+ process = mp.Process(target = self.run,args=(id,self.connections[id],self.messagesToSend[id]),daemon=True)
+ self.processes[id] = process
+ process.start()
+
+ def remove_id(self,id:str):
+ """
+ Removes the given ID from known addresses and connections
+ :param id: The ID of the agent
+ """
+ del self.ips_by_id[id]
+
+ # Stop Process
+ self.processes[id].terminate()
+ del self.processes[id]
+
+ # Stop connection
+ self.connections[id].close()
+ del self.connections[id]
+
+
+ def get_addresses(self):
+ return self.ips_by_id.keys()
+
+ def __print_live(self,str_to_print):
+ print(str_to_print)
+ sys.stdout.flush()
+
+ def get_name(self):
+ """
+ Returns host name of this.
+ """
+ return socket.gethostname()
+
+ def get_address(self):
+ """
+ Returns IP address of this.
+ """
+ return socket.gethostbyname(self.get_name())
+
+
+ def run(self,targ_id:str,connection:socket.socket,msgsToSend:mp.Queue):
+ """
+ Main running loop of client
+
+ :param targ_id: The ID of the target
+ :param connection: The connection to use to send messages
+ :param msgsToSend: The queue to use to receive messages to send
+ """
+
+ while(True):
+ # Receive and serialize message
+ msg = msgsToSend.get()
+ bytes = pickle.dumps(msg)
+
+ # Send message
+ self.transmit(targ_id,connection,bytes)
+
+ def transmit(self,targ_id:str,client:socket.socket,bytes_tx:bytes):
+ """
+ Transmits bytes to satellite with specified id
+ Adds a message header in format:
+ START_MARKER --> length of message in bytes
+
+ :param targ_id Satellite ID to send to
+ :param client The socket connection to the ID's server
+ :param bytes_tx The message to send in bytes
+
+ """
+
+ START_MARKER = "size:".encode('ascii')
+ MAX_TRIES = 3
+
+ for i in range(MAX_TRIES):
+ try:
+
+ toSend = len(bytes_tx)
+
+ # Send header
+ client.send(START_MARKER + encodeStrLen(bytes_tx))
+ sentTot = 0
+
+ # Send rest of message
+ while sentTot < toSend:
+ sentTot += client.send(bytes_tx[sentTot:])
+
+ try:
+ parseable = self.receive_response(client)
+ if parseable:
+ return
+
+ except:
+ print(" Error Receiving: {}".format(targ_id))
+
+ except:
+ print (" Error Transmitting to: {}".format(targ_id))
+
+ print(" EXCEEDED MAX NUMBER OF TRIES ({}) TO TRANSMIT MESSAGE".format(MAX_TRIES))
+
+ def receive_response(self,client:socket.socket):
+ """
+ Receives PARSEABLE response from server
+ :param client: The connection
+ :return: True if successful transmission, False otherwise
+ """
+ START_MARKER = "size:".encode('ascii')
+
+ # Receive header and decode message length
+ rcv_buff = b''
+ chunk = client.recv(DEFAULT_BUFF_SIZE)
+ len_start = chunk.find(START_MARKER) + len(START_MARKER)
+ expectedFollowupLen = decodeLen(chunk[len_start:len_start + 4])
+ rcv_buff += chunk[len_start + 4:]
+ numRecv = len(rcv_buff)
+
+ # Receive up to message length or until message ends,
+ # whichever comes first
+ while (numRecv < expectedFollowupLen and len(chunk) > 0): # and chunk != 'EOF'):
+
+ chunk = client.recv(DEFAULT_BUFF_SIZE)
+ rcv_buff += chunk
+ numRecv = len(rcv_buff)
+
+ rsp_msg = pickle.loads(rcv_buff)
+
+ return rsp_msg['PARSEABLE']
+
+
+
+if __name__ == "__main__":
+ pass
+
diff --git a/source/Ground_Sim/Ground_Server.py b/source/Ground_Sim/Ground_Server.py
new file mode 100644
index 0000000..e02f84f
--- /dev/null
+++ b/source/Ground_Sim/Ground_Server.py
@@ -0,0 +1,340 @@
+import sys, socket
+from threading import *
+from OpenSSL import SSL
+import queue,select
+import pickle, json
+from Removed_Satellite.BlockingDict import BlockingDict
+import multiprocessing as mp
+DEFAULT_BUFF_SIZE = 32768
+import multiprocessing.connection as mpc
+import struct
+def encodeStrLen(msg, etype=">I"):
+ return struct.pack(etype, len(msg) )
+
+def decodeLen(buff, etype=">I"):
+ return struct.unpack(etype,buff)[0]
+
+
+"""
+
+Implementation of the server for Ground Server. Holds orbit propagation and link calculations, and
+sends the data to clients
+
+"""
+
+class GroundServer:
+
+ def __init__(self,port:int,numSats:int,conn:mpc.Connection):
+ self.connection = conn
+ self.port = port
+ self.responses = BlockingDict()
+ self.messages_to_pass = mp.Queue(0) # unlimited length. Messages to pass to controller
+ self.inputs = []
+ self.server = None
+ self.SHUTDOWN_REQ = False
+
+
+ self.numSats = numSats # number of satellites expected
+ self.numSatsJoined = 0 # number of satellites actually joined
+ self.serverLock = RLock() # Lock on server contents
+
+ self.setup_server()
+ self.run()
+
+ def __print_live(self,str_to_print):
+ print(str_to_print)
+ sys.stdout.flush()
+
+ def __dropClient(self,client_sock,errors=None):
+ if errors:
+ self.__print_live( 'Client %s left unexpectedly:' % (client_sock,) )
+ self.__print_live( errors )
+ else:
+ self.__print_live( 'Client %s left politely' % (client_sock,) )
+ self.inputs.remove(client_sock)
+ client_sock.shutdown(socket.SHUT_RDWR)
+ client_sock.close()
+
+ def setup_server(self):
+ """
+ Initializes server SSL connection, loading server parameters and
+ authenticating connection using certificates under central_global_planner/certs.
+
+ Directions to set up certificates: https://dst.lbl.gov/~boverhof/openssl_certs.html
+ - NOTE: Use 2048, not 1024, to make certificates
+ - NOTE: Use .cert instead of .pem, and .pkey instead of .key
+ - NOTE: Same CA.cert must be on all running devices for authentication
+ """
+
+ bind_ip = '' # Use host IP address
+ bind_port = self.port # ADMIN socket - 54201
+
+ self.server = socket.socket(socket.AF_INET,socket.SOCK_STREAM)
+ self.server.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
+ self.server.bind((bind_ip, bind_port))
+ self.server.listen()
+
+ print(' Listening on {}:{}'.format(bind_ip, bind_port))
+
+ def get_num_sats_to_join(self):
+ """
+ Returns number of satellites left to join before simulation to run
+ 0 indicates all satellites have joined
+ """
+ with self.serverLock:
+ return self.numSats - self.numSatsJoined
+
+ def msg_avail(self):
+ """
+ Determines if there is a message available
+ :return: True if yes, false otherwise
+ """
+ if self.messages_to_pass.qsize() > 0:
+ return True
+ else:
+ return False
+
+ def get_message(self, block = True, timeOut = None):
+ """
+ Gets the current message in FIFO order
+
+ @block: boolean. True if block until queue nonempty, False otherwise
+ @timeOut: number of seconds to block before Empty exception raised (giving up on waiting for message)
+ """
+ try:
+ return self.messages_to_pass.get(block = block, timeout = timeOut )
+ except queue.Empty:
+ return None
+
+ def get_response(self,msgId:str):
+ """
+ Gets response to message with id msgId
+ :param msgId: ID of the message
+ :return: True/False, or the message's payload
+ """
+ return self.responses.get(msgId)
+
+ def clear_queue(self): # use self.queue_lock if any chance of async incoming
+ while self.msg_avail():
+ self.get_message()
+ return self.messages_to_pass.qsize()
+
+ def get_name(self):
+ return socket.gethostname()
+
+ def get_address(self):
+ return socket.gethostbyname(self.get_name())
+
+
+ def run(self):
+ self.setup_server()
+
+ print("Server setup on: {}".format(self.port))
+
+ self.inputs = [self.server]
+ outputs = []
+
+ while not self.SHUTDOWN_REQ:
+ readable,writable,exceptional = select.select(self.inputs,outputs,self.inputs)
+
+ for s in readable:
+ if s is self.server:
+ # Readable server accepting new connections and
+ # establishing satellites
+ connection,client_address = s.accept()
+ connection.setblocking(0)
+ self.inputs.append(connection)
+
+ # Check for joining
+ self.receive_message(connection,address = client_address)
+
+ else:
+ # Known connection is sending data
+ self.receive_message(s)
+
+ for s in exceptional:
+ # Stop listening to socket if there is an exception
+ print("Handling exceptional socket connection for {}".format(s.getpeername()))
+ self.inputs.remove(s)
+ s.close()
+
+ print("Server ending...")
+
+
+ # outputs basic parsed dictionary type, with trustworth fields
+ def parse_msg(self, msg):
+ """
+ Parses the incoming message and checks that it is valid
+ :param msg: Incoming message
+ :return: Parsed message and serializer used
+ """
+
+ serializer = 'pickle'
+ retVal = {}
+ try:
+ try: # Try to use pickle
+ retVal = pickle.loads(msg)
+ except Exception:
+ try:
+ # Try to use json
+ retVal = json.loads(msg)
+ serializer = 'json'
+ except:
+ print( "RX WARNING: Bad serialization format; neither JSON nor PICKLE worked.")
+
+ # check for required keys,
+ if 'req_type' not in retVal.keys() and 'ACK' not in retVal.keys():
+ retVal = { "PARSE_ERROR" : "INV_STRUCT" , "MISSING_KEY" : "req_type or ACK" }
+ elif 'payload' not in retVal.keys():
+ retVal = { "PARSE_ERROR" : "INV_STRUCT" , "MISSING_KEY" : "payload" }
+
+ # prevent special terms from being used
+ elif 'PARSE_ERROR' in retVal.keys():
+ retVal = { "PARSE_ERROR" : "INV_STRUCT" , "INV_KEY" : "PARSE_ERROR" }
+
+ except:
+ retVal = { "PARSE_ERROR" : "INV_JSON" }
+
+
+ if 'PARSE_ERROR' in retVal.keys():
+ print( "RX WARNING: {}".format(retVal))
+
+ return retVal, serializer
+
+
+ #### Primary operational handler #####
+
+ def receive_message(self,client_socket:socket.socket,address = None):
+ """
+ Receives incoming message in chunks given socket
+
+ If message is valid and parsed, adds it to queue
+
+ Sends a response message:
+ NACK: unparseable
+ ACK: True if parseable and recognized message type, False otherwise
+
+ :param client_socket: The incoming socket
+ :param address: IP address of port. Provided only for new connections
+ """
+
+ client_socket.setblocking(True)
+ # Get Message in chunks
+ START_MARKER = "size:".encode('ascii')
+ rcv_buff = b''
+ try:
+ chunk = client_socket.recv(DEFAULT_BUFF_SIZE)
+ # Check that message starts with header
+ if chunk.find(START_MARKER) < 0 or len(chunk) < len(START_MARKER)+4: # bad start
+ print("bad start chunk: ", chunk)
+ client_socket.setblocking(False)
+ self.inputs.remove(client_socket)
+ return
+
+ # Extract expected message length from header
+ len_start = chunk.find(START_MARKER)+len(START_MARKER)
+ expectedFollowupLen = decodeLen( chunk[len_start:len_start+4] )
+
+ # Get any message sent with header
+ rcv_buff = chunk[len_start+4:]
+ rcv_buff = rcv_buff if len(rcv_buff) <= expectedFollowupLen else rcv_buff[:expectedFollowupLen]
+ numRecv = len(rcv_buff)
+
+ # Get rest of message in chunks
+ while ( numRecv < expectedFollowupLen and len(chunk) > 0): # and chunk != 'EOF'):
+ chunk = client_socket.recv(DEFAULT_BUFF_SIZE)
+ rcv_buff += chunk if len(chunk) <= (expectedFollowupLen-numRecv) else chunk[:numRecv-expectedFollowupLen]
+ numRecv = len(rcv_buff)
+
+ # Throw error if we did NOT receive the expected amount of data
+ if numRecv != expectedFollowupLen:
+ print("Warning, expected vs actual Bytes rx'd: {} vs {}".format(expectedFollowupLen, numRecv))
+ self.__dropClient(client_socket)
+ client_socket.setblocking(False)
+ return
+
+ except (SSL.WantReadError, SSL.WantWriteError, SSL.WantX509LookupError):
+ client_socket.setblocking(False)
+ return
+ except SSL.ZeroReturnError:
+ self.__dropClient(client_socket)
+ client_socket.setblocking(False)
+ return
+ except(SSL.Error):
+ self.__dropClient(client_socket) #, errors)
+ client_socket.setblocking(False)
+ return
+
+ message, serializer = self.parse_msg(rcv_buff)
+ if "PARSE_ERROR" in message.keys():
+ try:
+ self.sendParseResponse(False,client_socket)
+ except: print(" FAILED TO SEND PARSEABLE FEEDBACK")
+
+ else:
+ try:
+ self.sendParseResponse(True,client_socket)
+ except:
+ print(" FAILED TO SEND PARSEABLE FEEDBACK")
+ client_socket.setblocking(False)
+ return
+
+
+ if 'req_type' in message:
+ messageType = message['req_type']
+ if messageType == 'JOIN':
+ # For establishing NEW connections only
+
+ if self.get_num_sats_to_join() > 0:
+ self.__print_live("Satellite joining from {}.".format(client_socket.getpeername()))
+ message['payload']['address'] = client_socket.getpeername()[0]
+ self.connection.send(("JOIN",message))
+ self.numSatsJoined += 1
+ else:
+ self.__print_live("Extra join code. Unacknowledged.")
+
+ elif not address:
+ # Main simulation messages
+ # These should have addresses already defined
+
+ if messageType in {'PLAN','BDT','ACTS_DONE','FINISHED_PROP',
+ 'READY_FOR_TIME_UPDATE','SAT_STATS','POST_RUN'}:
+ self.connection.send(message)
+
+ elif messageType == 'quit': exit()
+
+ elif 'ACK' in message:
+
+ _id = message['id']
+ success = message['ACK']
+ payload = message['payload']
+ waitForReply = message['txWaitForReply']
+ if payload:
+ self.connection.send(("ACK",_id,payload,waitForReply))
+ else: self.connection.send(("ACK",_id,success,waitForReply))
+
+ client_socket.setblocking(False)
+
+ def sendParseResponse(self,parseable:bool,client:socket.socket):
+ """
+ Sends response to client for whether the message was parseable.
+
+ :param parseable: True if parseable, False if not
+ :param client: The connection
+ """
+ START_MARKER = "size:".encode('ascii')
+
+ response_bytes = pickle.dumps({'PARSEABLE':parseable})
+ numBytes = len(response_bytes)
+
+ # Send header
+ client.send(START_MARKER + encodeStrLen(response_bytes))
+
+ sentTot = 0
+ while sentTot < numBytes:
+ sentTot += client.send(response_bytes[sentTot:])
+
+if __name__ == "__main__":
+ pass
+
+
diff --git a/source/Ground_Sim/Ground_Sim.py b/source/Ground_Sim/Ground_Sim.py
new file mode 100644
index 0000000..02056ad
--- /dev/null
+++ b/source/Ground_Sim/Ground_Sim.py
@@ -0,0 +1,1615 @@
+from datetime import timedelta
+from collections import OrderedDict
+import pickle
+import json
+import os
+import queue
+from copy import copy, deepcopy
+from circinus_tools.scheduling.io_processing import SchedIOProcessor
+from circinus_tools.scheduling.custom_window import ObsWindow, DlnkWindow, XlnkWindow
+from circinus_tools import time_tools as tt
+from circinus_tools.metrics.metrics_calcs import MetricsCalcs
+from circinus_tools.plotting import plot_tools as pltl
+import circinus_tools.metrics.metrics_utils as met_util
+from circinus_tools import io_tools
+from circinus_tools.activity_bespoke_handling import ActivityTimingHelper
+from circinus_sim.constellation_sim_tools.sim_agents import SimGroundNetwork,SimGroundStation
+from circinus_sim.constellation_sim_tools.gp_wrapper import GlobalPlannerWrapper
+from circinus_sim.constellation_sim_tools.sim_plotting import SimPlotting
+from Ground_Sim.Ground_Server import GroundServer
+from Ground_Sim.Ground_Client import GroundClient
+from circinus_sim.constellation_sim_tools.Transmission_Simulator import Transmission_Simulator
+from sprint_tools.Constellation_STN import Constellation_STN
+import logging
+from sprint_tools.Sprint_Types import AgentType
+from Removed_Satellite.BlockingDict import BlockingDict
+from threading import Thread,Condition
+from Removed_Satellite.Message_ID_Assigner import MessageIDAssigner
+import multiprocessing as mp
+def print_verbose(string,verbose=False):
+ if verbose:
+ print(string)
+
+
+class GroundSim:
+ """
+ easy interface for running the global planner scheduling algorithm and simulating ground-based components only
+
+ Communicates using server/client protocol with satellites on separate devices
+ """
+ SAT_SERVER_PORT = 54200
+ GROUND_SERVER_PORT = 54201
+ VERBOSE = True
+
+ def __init__(self, sim_params):
+ """initializes based on parameters
+
+ initializes based on parameters
+ :param sim_params: global namespace parameters created from input files
+ (possibly with some small non-structural modifications to params).
+ The name spaces here should trace up all the way to the input files.
+ :type params: dict
+ """
+ self.params = sim_params
+ self.GPhotstart = self.params['sim_case_config']['sat_schedule_hotstart']
+ self.sat_params = self.params['orbit_prop_params']['sat_params']
+
+ self.orbit_params = self.params['orbit_prop_params']['orbit_params']
+ self.gs_params = self.params['orbit_prop_params']['gs_params']
+ self.const_sim_inst_params = sim_params['const_sim_inst_params']
+ self.sim_run_params = sim_params['const_sim_inst_params']['sim_run_params']
+ self.sim_run_perturbations = sim_params['const_sim_inst_params']['sim_run_perturbations']
+ self.num_sats = self.sat_params['num_sats']
+ self.sat_id_order = self.sat_params['sat_id_order']
+ self.gs_id_order = self.gs_params['gs_id_order']
+ self.obs_target_id_order = self.params['orbit_prop_params']['obs_params']['obs_target_id_order']
+ self.num_gs = len(self.gs_params['gs_id_order'])
+
+ self.restore_pickle_cmdline_arg = sim_params['restore_pickle_cmdline_arg']
+
+ self.gs_id_ignore_list= self.params['gp_general_params']['other_params']['gs_id_ignore_list']
+
+ self.sim_tick = timedelta(seconds=self.sim_run_params['sim_tick_s'])
+
+ self.sim_start_dt = self.sim_run_params['start_utc_dt']
+ self.sim_end_dt = self.sim_run_params['end_utc_dt']
+
+ self.io_proc =SchedIOProcessor(self.params)
+ self.sim_plotter = SimPlotting(self.params)
+
+ assert sim_params["ground_sim"] # must be true
+
+ ##### Server/Client Related params ######
+ self.messagesToPass = {}
+ self.msgIDAssigner = MessageIDAssigner()
+ self.conn_to_server,self.server_conn = self.get_pipe_connections()
+ self.server_proc = None
+ self.client = self.set_up_client()
+
+ ##### Message-related recording
+ self.idsToMsgType = {}
+ self.idsToTarget = {}
+ self.idsToSender = {}
+ self.msgTypeToIDs = {"STATES":set(),"PLAN":set(),"BDT":set(),"PLAN_PROP":set(),"SAT_STATS":set()}
+
+ self.responses = BlockingDict()
+ self.join_msgs = queue.Queue()
+
+ ##### Simulation-related flags ######
+ self.numFinishedActs = 0 # Number of peers done sending executive acts
+ self.numReadyForUpdate = 0 # Number of peers ready for state update
+ self.numFinishedProp = 0 # Number of peers done with plan propagation
+ self.numReadyForNextTimeStep = 0 # Number of peers ready for next step
+ self.numPeers = self.num_sats
+
+ self.sim_lock = Condition()
+
+ self.sat_stats = {id: None for id in self.sat_id_order} # Map of satellite stats. Used for post run
+ self.postRunData = {id: None for id in self.sat_id_order}
+
+ # we create a gp wrapper here, because:
+ # 1. it's easy to give it access to sim params right now
+ # 2. we want to store it in the constellation sim context, as opposed to within the gs network.
+ # It stores a lot of input data (e.g. accesses, data rates inputs...) and we don't want to be
+ # pickling/unpickling all that stuff every time we make a checkpoint in the sim. Note that
+ # the gp_wrapper does not internally track any constellation state, on purpose
+ self.gp_wrapper = GlobalPlannerWrapper(self.params)
+
+ # metrics calculation
+ self.mc = MetricsCalcs(self.get_metrics_params())
+
+ orbit_params = self.params['orbit_prop_params']['orbit_params']
+ self.act_timing_helper = ActivityTimingHelper(self.sat_params['activity_params'],orbit_params['sat_ids_by_orbit_name'],self.sat_params['sat_id_order'],None)
+
+ ##### Simulation State Truth #####
+ stn_params = {
+ 'element_id_by_index': {
+ 'sat_id_by_indx' : sim_params['orbit_prop_params']['sat_params']['sat_id_order'],
+ 'gs_id_by_indx' : sim_params['orbit_prop_params']['gs_params' ]['gs_id_order'],
+ 'trget_id_by_indx' : sim_params['orbit_prop_params']['obs_params']['obs_target_id_order']
+ },
+ 'accesses_data' : sim_params['orbit_prop_params']['orbit_prop_data']['accesses_data']
+ }
+ self.access_truth_stn = Constellation_STN(stn_params)
+ self.schedule_disruptions = self.params['sim_case_config']['sim_run_perturbations']['schedule_disruptions']
+
+ ##### End Sim State Truth #####
+
+ self.Transmission_Simulator = Transmission_Simulator(False,False, self, removed_sat = True)
+
+ self.CurGlobalTime = 0 # essentially not init
+
+ def get_pipe_connections(self):
+ """
+ Creates connection between Ground Sim and its server.
+ This connection spans multiple processes.
+ """
+ parent, child = mp.Pipe()
+ return parent, child
+
+ def set_up_client(self):
+ """
+ Creates client with no addresses
+ """
+ return GroundClient(self.messagesToPass)
+
+ def run_server(self,conn:mp.connection.Connection):
+ p = mp.Process(target = GroundServer,args=(self.GROUND_SERVER_PORT,self.num_sats,conn),daemon=True)
+ p.start()
+ return p
+
+
+ def initialize_sats(self):
+ """
+ initialize satellite clients with parameters after client joins
+
+ """
+ ####### GENERAL STARTING SATELLITE PARAMETERS #######
+ sat_id_sim_satellite_params = self.const_sim_inst_params['sim_satellite_params']
+
+ sat_client_initialization_params = {"req_type":"INIT_PARAMS","payload":{},"waitForReply":True,"sender":"ground"}
+ sat_client_initialization_params["payload"] = {"sat_id_sim_satellite_params": sat_id_sim_satellite_params,
+ "sim_start_dt": self.sim_start_dt,
+ "sim_end_dt": self.sim_end_dt,
+ "sim_params": self.params,
+ "activity_params": self.sat_params["activity_params"],
+ "sat_ids_by_orbit_name": self.orbit_params['sat_ids_by_orbit_name'],
+ "sat_id_order": self.sat_params['sat_id_order']
+ }
+
+ satellite_list = list(enumerate(self.sat_id_order))
+ i = 0
+ ip_addresses = {}
+ while i < self.num_sats: # Must initialize ALL sats to continue
+ sat_index, sat_id = satellite_list[i]
+ # ======================== Get id-specific parameters =========================
+
+ # these params come from orbit prop inputs file
+ sat_id_scenario_params = {
+ "power_params": self.sat_params['power_params_by_sat_id'][sat_id],
+ "data_storage_params": self.sat_params['data_storage_params_by_sat_id'][sat_id],
+ "initial_state": self.sat_params['initial_state_by_sat_id'][sat_id],
+ "activity_params": self.sat_params['activity_params']
+ }
+
+ sat_client_initialization_params["payload"]["sat_id_scenario_params"] = sat_id_scenario_params
+ sat_client_initialization_params["payload"]["sat_id"] = sat_id
+ sat_client_initialization_params["payload"]["sat_index"] = sat_index
+
+
+ # ======================== Wait for satellite to join ========================
+ msg = self.join_msgs.get() # Guaranteed to be a join message only until ALL satellites have joined
+
+ msg_id = msg['id']
+ assert msg['req_type'] == 'JOIN', 'Disallowed message type. Simulation ' \
+ 'unable to run until all satellites have joined'
+
+ sat_address = msg['payload']['address']
+ sat_port = msg['payload']['port']
+
+ # check that client requests to "JOIN" before registering it
+ if (sat_address,sat_port) not in ip_addresses.values(): # satellites must have unique addresses
+ self.client.add_id(sat_id,sat_address,sat_port)
+ ip_addresses[sat_id] = (sat_address,sat_port)
+ # ======================== Send satellite parameters ========================
+ self.sendResponse(msg_id,sat_id,True,msg['waitForReply'])
+ successfulJoin = self.send_message(sat_client_initialization_params,target_id=sat_id)
+
+ if not successfulJoin:
+ self.client.remove_id(sat_id)
+ ip_addresses[sat_id] = None # "delete" satellite from sim.
+ else:
+ i += 1 # Successful joining. Move onto next id
+ # ======================== Send satellite IP addresses ========================
+
+ ip_msg = {"req_type":"ALL_IPS","payload":ip_addresses,"waitForReply":True,"sender":"ground"}
+ self.send_message(ip_msg,broadcast = True)
+
+ def getResponse(self,_id:str):
+ """
+ Gets the response from sending input message
+ :param _id: The ID of the message sent
+ :return: True if message sent successfully, False otherwise.
+ If there is additional payload, sends the payload instead
+ """
+ return self.responses.get(_id)
+
+ def sendResponse(self,_id:str, target:str, ack:bool, txWaitForReply:bool,response:dict = None):
+ """
+ Sends response for a message
+ :param _id: The ID of the message to respond to
+ :param target: The ID of the sending agent
+ :param ack: True if acknowledged, false otherwise
+ :param txWaitForReply: True if tx is actively waiting for a reply
+ :param response: (If any) additional response data
+ """
+ response = {"ACK":ack, "payload": response,"id":_id,"waitForReply":False,'txWaitForReply':txWaitForReply}
+ self.send_message(response,target_id = target)
+
+ def send_message(self,message:dict,target_id:str = None, broadcast:bool = False):
+ """
+ Sends message to satellite using pickle serialization
+ Must have either sat_id defined or broadcast = True
+
+ :param message The dictionary to send
+ :param target_id The ID of the satellite
+ :param broadcast True if sending to ALL satellites, False otherwise
+ :return True if successful, False if not
+
+ Message sent successfully if entire message sent and an acknowledgement message received
+ """
+ assert target_id or broadcast, "sat_id must be defined or broadcast must be true"
+
+ isACK = True if 'ACK' in message else False
+
+ ids = set()
+
+ if target_id:
+ message['dest'] = target_id
+ if not isACK:
+ msgId = self.msgIDAssigner.assign_id()
+ ids.add(msgId)
+ message['id'] = msgId
+ else:
+ msgId = message['id']
+
+ self.messagesToPass[target_id].put(message)
+
+ elif broadcast:
+ idsToUse = self.msgIDAssigner.assign_ids(self.numPeers)
+ ids.update(idsToUse)
+
+ for peer in self.messagesToPass:
+ peerMessage = message.copy()
+ peerMessage['dest'] = peer
+ peerMessage['id'] = idsToUse.pop()
+ msgType = peerMessage['req_type']
+ self.messagesToPass[peer].put(peerMessage)
+
+ waitForReply = message['waitForReply']
+
+ if waitForReply:
+ # Check all recipients for broadcast
+ if broadcast:
+ assert not isACK
+ success = True
+ for _id in ids:
+ success = success and self.getResponse(_id)
+ return success
+ else:
+ if not isACK:
+ msgType = message['req_type']
+ else:
+ msgType = 'ACK'
+
+ if not isACK:
+ return self.getResponse(msgId)
+
+
+ else:
+ # Record message type and ID to process responses later
+ if not isACK:
+ msgType = message['req_type']
+ # print(" assigning {} to {}".format(msgType,msgId))
+ if msgType == 'PLAN' and message['plan_prop']:
+ msgType = 'PLAN_PROP'
+
+ for _id in ids:
+ self.idsToMsgType[_id] = msgType
+ self.idsToTarget[_id] = target_id
+ self.idsToSender[_id] = message['sender']
+ self.msgTypeToIDs[msgType].update(ids)
+
+ else:
+ pass
+
+
+ def handle_message(self,message:dict):
+ """
+ Handles in-simulation message based on message type. Responds with appropriate
+ response messages
+
+ Assumes message is valid.
+
+ @param message The message to handle, deserialized
+
+ """
+
+ messageType = message["req_type"]
+ key = message['id']
+ target = message['sender']
+
+ dest = message['dest'] if 'dest' in message else 'ground'
+
+ if messageType in {'PLAN','BDT','STATES'} and dest in self.gs_id_order:
+
+ responseData = self.receive_message(message)
+
+ if messageType == "PLAN":
+ # Handle new LP information if needed
+ if message['handle_lp']:
+ self.handle_new_lp_info(message)
+
+ if message['exchange']:
+ # Check if sender requests an exchange
+ # of PLAN messages
+ gs = self.gs_by_id[message['dest']]
+
+ with gs.lock:
+ new_time, dest = message['payload']['new_time'], message['sender']
+ info_option = message['payload']['info_option']
+ gs.get_plan_db().update_self_ttc_time(new_time)
+ responseData = gs.make_planning_message(dest,info_option=info_option)
+ respType = "PLAN"
+
+ if messageType == 'BDT':
+ print(f'BDT RESPONSE for {key}: {responseData}')
+
+ self.sendResponse(key,target,True,message['waitForReply'],responseData)
+
+ else:
+ with self.sim_lock:
+ if messageType == 'ACTS_DONE':
+ self.numFinishedActs += 1
+ self.sendResponse(key, target,True,message['waitForReply'])
+ if self.numFinishedActs == self.numPeers:
+ self.sim_lock.notifyAll()
+
+ elif messageType == 'FINISHED_PROP':
+ self.numFinishedProp += 1
+ self.sendResponse(key, target,True,message['waitForReply'])
+ if self.numFinishedProp == self.numPeers:
+ self.sim_lock.notifyAll()
+
+ elif messageType == 'READY_FOR_TIME_UPDATE':
+ self.numReadyForNextTimeStep += 1
+ self.sendResponse(key, target,True,message['waitForReply'])
+ if self.numReadyForNextTimeStep == self.numPeers:
+ self.sim_lock.notifyAll()
+
+ elif messageType == 'SAT_STATS':
+ sender = message['sender']
+ self.sat_stats[sender] = message['payload']
+ self.sendResponse(key, target,True,message['waitForReply'])
+ if all(self.sat_stats.values()):
+ self.sim_lock.notifyAll()
+
+ elif messageType == 'POST_RUN':
+ sender = message['sender']
+ payload = message['payload']
+ self.postRunData[sender] = payload
+ self.sendResponse(key, target,True,message['waitForReply'])
+
+ if all(self.postRunData.values()):
+ self.sim_lock.notifyAll()
+
+ else:
+ print("WARNING: Unknown message type.")
+ self.sendResponse(key, target,False,message['waitForReply'])
+
+ def receive_message(self,message:dict):
+ """
+ Receives message by redirecting it to the relevant ground station
+ :param message: The input message to receive
+ :return: Any response data
+ """
+ sender_id = message['sender']
+ assert sender_id in self.sat_id_order, "Unknown satellite id {}".format(sender_id)
+
+ dest_id = message['dest']
+ assert dest_id in self.gs_by_id.keys(), "Unknown destination id {}".format(dest_id)
+
+ gs_dest = self.gs_by_id[dest_id]
+ x = gs_dest.receive_message(message)
+ return x
+
+ def handleResponse(self,response:dict):
+ """
+ Handles delayed received response for PLAN, STATES, AND BDT
+
+ For PLAN_PROP:
+ - Updates the last time the sender satellite has been updated
+ For STATES:
+ - Checks which applicable satellites have not ACKED (or have ACKED)
+ - Updates the last time of satellite broadcast
+ For BDT:
+ - Updates ACKed BDT statements, essential for SRP
+
+ :param response: The response
+
+ """
+ # Get original sent message information
+ msgId = response[1]
+ waitForReply = response[3]
+
+
+ if waitForReply:
+ self.responses.put(msgId, response[2])
+ return
+
+ msgType = self.idsToMsgType.pop(msgId)
+ original_target = self.idsToTarget.pop(msgId)
+ original_sender = self.idsToSender.pop(msgId)
+
+ # Access the applicable GS
+ gs = self.gs_by_id[original_sender]
+
+ # Remove the ACKed ID from the nonACKed IDs list
+ idsToTrack = self.msgTypeToIDs[msgType]
+ idsToTrack.remove(msgId)
+ # print(" ids To track:", idsToTrack)
+ # print(" removed msgId {} from idsToTrack".format(msgId))
+
+ if 'PLAN' in msgType:
+ if msgType == 'PLAN_PROP':
+ gs.stats['plan_uplinks_succeded'] += 1
+ self.gs_network.mark_sat_updated(original_target,tt.datetime2mjd(self.CurGlobalTime))
+
+ if type(response[2]) == dict:
+ self.receive_message(response[2])
+ else:
+ pass
+
+
+ def handle_new_lp_info(self,message:dict):
+ """
+ Handles incoming new LP info from a satellite given the by
+ updating next window uids if necessary
+
+ :param message The message sent
+ """
+
+ with self.gs_network.lock:
+ # Get information about sender
+ sender_id = message['sender']
+ assert sender_id in self.sat_id_order, "Unknown satellite id {}".format(sender_id)
+
+ sender_index = self.sat_id_order.index(sender_id)
+
+ # Update windows, if necessary
+ scheduleDisruptionCommunicated = message['payload']['schedule_disruption_replan_communicated']
+
+ if not scheduleDisruptionCommunicated:
+ update_keys = ['dlnk_winds','dlnk_winds_flat']
+ sat_windows_dict = message['payload']['sat_windows_dict']
+
+ for key in update_keys:
+ self.gs_network.scheduler.all_windows_dict[key][sender_index] = sat_windows_dict[key]
+
+ sat_next_window_uid = sat_windows_dict['next_window_uid']
+ current_next_window_uid = self.gs_network.scheduler.all_windows_dict['next_window_uid']
+
+ if sat_next_window_uid > current_next_window_uid:
+ self.gs_network.scheduler.all_windows_dict['next_window_uid'] = sat_next_window_uid
+
+ msg = {'req_type': 'NEXT_WINDOW_UPDATE',
+ 'payload' : sat_next_window_uid,
+ 'sender' : 'ground'
+ }
+
+ for other_sat_id in self.sat_id_order:
+ if other_sat_id != sender_id:
+ self.send_message(msg,other_sat_id)
+
+ self.gsn_exchange_planning_info_all_exec_agents()
+
+ def init_data_structs(self):
+ """ initialize data structures used in the simulation """
+
+ window_uid = -9999
+ # ecl_winds is an array with index for each sat_indx
+ ecl_winds, window_uid =self.io_proc.import_eclipse_winds(window_uid)
+ if window_uid >= 0:
+ raise RuntimeWarning('Saw positive window ID for ecl window hack')
+
+ ecl_winds_by_sat_id = {self.sat_id_order[sat_indx]:ecl_winds[sat_indx] for sat_indx in range(self.num_sats)}
+ self.ecl_winds = ecl_winds
+
+ # create satellites
+ self.initialize_sats()
+
+ # create ground network
+ gs_by_id = {}
+ all_gs = []
+ gsn_id = 'gsn'
+ gs_network = SimGroundNetwork(
+ gsn_id,
+ self.gs_params['gs_network_name'],
+ self.sim_start_dt,
+ self.sim_end_dt,
+ self.num_sats,
+ self.num_gs,
+ self.const_sim_inst_params['sim_gs_network_params'],
+ self.act_timing_helper,
+ self,
+ {id: id for id in self.sat_id_order},
+ self.access_truth_stn, # Only in this deterministic case where it is known. Else use a filter of, etc.
+ removed=True
+ )
+ # note: use sim tick as resource delta T.
+ plan_db_inputs = {
+ "sat_id_order": self.sat_id_order,
+ "gs_id_order" : self.gs_id_order,
+ "other_agent_ids": [gsn_id],
+ "initial_state_by_sat_id": self.sat_params['initial_state_by_sat_id'],
+ "ecl_winds_by_sat_id": ecl_winds_by_sat_id,
+ "power_params_by_sat_id": self.sat_params['power_params_by_sat_id'],
+ "resource_delta_t_s": self.sim_run_params['sim_tick_s']
+ }
+ # Create ground stations
+ for station in self.gs_params['stations']:
+ gs = SimGroundStation(
+ str(station['id']),
+ self.gs_id_order.index(str(station['id'])),
+ station['name'],
+ gs_network,
+ self.sim_start_dt,
+ self.sim_end_dt,
+ self.const_sim_inst_params['sim_gs_params'],
+ self.act_timing_helper,
+ self,
+ self.schedule_disruptions,
+ removed = True
+ )
+ gs_by_id[station['id']] = gs
+ gs_network.gs_list.append(gs)
+ all_gs.append(gs)
+
+ # initialize the planning info database
+ gs.get_plan_db().initialize(plan_db_inputs)
+
+ # initialize the planning info database
+ gs_network.get_plan_db().initialize(plan_db_inputs)
+ self.gs_network = gs_network
+ self.gs_by_id = gs_by_id
+
+ self.inject_obs()
+ def getReferenceByID(self,id:str):
+ """
+ Get GS object from the ID
+ :param id: The ID of the GS to access
+ :return The GS object with the input ID, else the GS network
+ """
+ return self.gs_by_id.get(id,self.gs_network)
+
+ def getAgentType(self,id:str):
+ """
+ Get the agent type based on ID
+ :param id: The ID of the Agent
+ :return: The given agent type, with GSNET as default
+ """
+ if id in self.gs_by_id.keys(): return AgentType.GS
+ elif id in self.sat_id_order: return AgentType.SAT
+ return AgentType.GSNET
+
+ def getAllGSIDs(self):
+ """
+ Gets all gs ids, ordered by index
+ """
+ return self.gs_id_order
+
+ def getAllSatIDs(self):
+ """
+ Gets all satellite ids, ordered by index
+ """
+ return self.sat_id_order
+
+ def inject_obs(self):
+ """
+ Injects observations into satellite schedules
+ """
+
+ inj_obs_raw = self.sim_run_perturbations['injected_observations']
+
+ if not self.sim_run_perturbations['do_inject_obs']:
+ return
+
+ inj_obs_by_sat_id = {}
+ for obs_raw in inj_obs_raw:
+ if not obs_raw['type'] == 'hardcoded':
+ raise NotImplementedError
+
+ sat_id = obs_raw['sat_id']
+ obs = ObsWindow(
+ obs_raw['indx'],
+ sat_indx= self.sat_id_order.index(sat_id),
+ target_IDs=['inject_'+str(obs_raw['indx'])],
+ sat_target_indx=0,
+ start= tt.iso_string_to_dt (obs_raw['start_utc']),
+ end= tt.iso_string_to_dt (obs_raw['end_utc']),
+ wind_obj_type='injected'
+ )
+
+ # pretty hardcore hacky here, but things seem to do badly when injected obs have huge dv. Try it this way
+ obs.data_vol = 300
+ obs.original_data_vol = 300
+
+ inj_obs_by_sat_id.setdefault(sat_id, []).append(obs)
+
+ message = {'req_type': 'INJECT_OBS', 'sender':'ground','waitForReply':True}
+
+ for sat_id in self.sat_id_order:
+ message['payload'] = inj_obs_by_sat_id.get(sat_id,[])
+ self.send_message(message,sat_id)
+
+ def run_controller(self):
+ """
+ Run background controller to process incoming messages
+ :return:
+ """
+ while True:
+ msg = self.conn_to_server.recv()
+
+ if type(msg) == dict:
+ self.handle_message(msg)
+ elif "ACK" == msg[0]:
+ self.handleResponse(msg)
+ elif 'ERROR' in msg[0]:
+ print(msg[1])
+ elif "JOIN" in msg[0]:
+ self.join_msgs.put(msg[1])
+
+ def run(self):
+ """ run the simulation """
+ logging.basicConfig(filename=(self.params['output_path']+'logs/sim_sats.log'),level=logging.DEBUG)
+
+ self.server_proc = self.run_server(self.server_conn)
+
+ # Run message processor in background
+ t = Thread(target = self.run_controller)
+ t.setDaemon(True)
+ t.start()
+
+ self.init_data_structs()
+
+ verbose = True
+
+ global_time = self.sim_start_dt
+ sim_end_dt = self.sim_end_dt
+
+ # used to alert special operations on first iteration of the loop
+ first_iter = True
+
+
+
+ if first_iter:
+ # Change from building all potential activity windows over the entire sim every time the GP is
+ # ran to instead build them once at the start of sim
+ # and then associate the relevant windows with the relevant planning objects
+ # (GSN knows all windows, each satellite knows windows in which they can participate)
+ # First: create the params input structure required by SchedIOProcessor
+
+ if not self.gs_network.scheduler.outsource:
+ io_proc = SchedIOProcessor(self.params)
+
+ # Load all windows: .
+ print_verbose('Load files',verbose)
+
+ # parse the inputs into activity windows
+ window_uid = 0
+ print_verbose('Load obs',verbose)
+ obs_winds, window_uid =io_proc.import_obs_winds(window_uid)
+ print_verbose('Load dlnks',verbose)
+ dlnk_winds, dlnk_winds_flat, window_uid =io_proc.import_dlnk_winds(window_uid)
+ print_verbose('Load xlnks',verbose)
+ xlnk_winds, xlnk_winds_flat, window_uid =io_proc.import_xlnk_winds(window_uid)
+
+ # if crosslinks are not allowed to be used (in sim_case_config), then zero out all crosslink windows
+ # NOTE: this is done here and not further upstream because the access windows
+ # are used for calculating planning comms windows as well
+ if not self.params['sim_case_config']['use_crosslinks']:
+ xlnk_winds = [[[]] * self.num_sats] * self.num_sats
+ xlnk_winds_flat = [[]] * self.num_sats # list of N_sats empty lists
+
+ # note: this import is currently done independently from circinus constellation sim.
+ # If we ever need to share knowledge about ecl winds between the two,
+ # will need to make ecl winds an input from const sim
+ print_verbose('Load ecl',verbose)
+ ecl_winds, window_uid =io_proc.import_eclipse_winds(window_uid)
+
+ # Note, they are only validated inside the GP (other_helper not imported here)
+ """ print_verbose('Validate windows',verbose)
+ other_helper.validate_unique_windows(self,obs_winds,dlnk_winds_flat,xlnk_winds,ecl_winds) """
+
+ print_verbose('In windows loaded from file:',verbose)
+ print_verbose('obs_winds',verbose)
+ print_verbose(sum([len(p) for p in obs_winds]),verbose)
+ print_verbose('dlnk_win',verbose)
+ print_verbose(sum([len(p) for p in dlnk_winds]),verbose)
+ print_verbose('xlnk_win',verbose)
+ print_verbose(sum([len(xlnk_winds[i][j]) for i in range( self.sat_params['num_sats']) for j in range( self.sat_params['num_sats']) ]),verbose)
+
+ # Make all windows knows to the GSN
+ # Each entry into this dictionary (except next_window_uid) is a list of length num_sats, where
+ # each index in the outer list corresponds to the satellite index.
+ all_windows_dict = {
+ 'obs_winds': obs_winds,
+ 'dlnk_winds': dlnk_winds,
+ 'dlnk_winds_flat': dlnk_winds_flat,
+ 'xlnk_winds': xlnk_winds,
+ 'xlnk_winds_flat': xlnk_winds_flat,
+ 'ecl_winds': ecl_winds,
+ 'next_window_uid': window_uid
+ }
+
+ self.gs_network.scheduler.all_windows_dict = all_windows_dict
+
+ with self.gs_network.lock:
+ # Parse out windows for each satellite, add them to the queue of info to be sent to that satellite
+ for sat_index,sat_id in enumerate(self.sat_id_order):
+ # For immediate testing, just immediate set the information into their plan_db.
+
+ windows = {}
+ for key in self.gs_network.scheduler.all_windows_dict.keys():
+ if key != 'next_window_uid':
+ windows[key] = self.gs_network.scheduler.all_windows_dict[key][sat_index]
+ else:
+ windows[key] = self.gs_network.scheduler.all_windows_dict[key]
+
+ message = {"req_type": "SAT_WINDOWS_INIT","payload":deepcopy(windows),'waitForReply':True,'sender':'ground'}
+ self.send_message(message,sat_id)
+
+
+
+ #######################
+ # Simulation loop
+ #######################
+
+ while global_time < sim_end_dt:
+ self.CurGlobalTime = global_time
+
+ # ======================================================================
+ # ======================== Activity Execution ==========================
+ # ======================================================================
+
+ # execute activities at this time step before updating state to the next time step
+
+ # If first iteration: Server broadcasts START
+ if first_iter:
+ start_message = {'req_type': 'START', 'payload': None, 'sender':'ground',"waitForReply":True}
+ self.send_message(start_message,broadcast = True)
+
+
+ # Note that ground stations do not currently do anything in the execution step.
+ # Including for completeness/API coherence
+ for gs in self.gs_by_id.values():
+ gs.execution_step(global_time)
+
+
+ # Wait for satellites to finish sending executive acts messages
+ with self.sim_lock:
+ self.sim_lock.wait_for(lambda: self.numFinishedActs == self.numPeers)
+ self.numFinishedActs = 0
+
+ # ======================================================================
+ # =========================== State Update =============================
+ # ======================================================================
+
+ # run ground network update step so that we can immediately share plans on first iteration of this loop
+ self.gs_network.state_update_step(global_time,self.gp_wrapper)
+
+ with self.sim_lock:
+ # start all the satellites with a first round of GP schedules, if so desired
+ if first_iter and self.GPhotstart:
+ self.Transmission_Simulator.setBackbone(True) # Pair with set to false below
+
+ with self.gs_network.lock:
+ self.gsn_exchange_planning_info_all_exec_agents()
+
+ for gs in self.gs_by_id.values():
+ gs.plan_prop(self,tt.datetime2mjd(global_time),True)
+
+ self.Transmission_Simulator.setBackbone(False)
+
+ finishedSendingActsMsg = {'req_type': 'ACTS_DONE','payload':None,'sender':'ground',"waitForReply":True}
+ self.send_message(finishedSendingActsMsg,broadcast = True)
+
+ # update ground station states
+ for gs in self.gs_by_id.values():
+ gs.state_update_step(global_time)
+
+ # ======================================================================
+ # ======================== Plans Info Sharing ==========================
+ # ======================================================================
+ with self.gs_network.lock:
+ # whenever GP has run, share info afterwards
+ # Satellites WAIT for the moment
+ if self.gs_network.scheduler.check_external_share_plans_updated():
+ self.gsn_exchange_planning_info_all_exec_agents()
+
+ # Groundstations share with sats if appropriate
+ for gs in self.gs_by_id.values():
+ gs.plan_prop(None,tt.datetime2mjd(global_time))
+
+
+ finishedPropMsg = {'req_type': 'FINISHED_PROP', 'payload': None, 'sender': 'ground',"waitForReply":True}
+ self.send_message(finishedPropMsg, broadcast=True)
+
+ with self.sim_lock:
+ self.sim_lock.wait_for(lambda: self.numFinishedProp == self.numPeers)
+ self.numFinishedProp = 0
+
+ readyForNextTimeStepMsg = {'req_type': 'READY_FOR_TIME_UPDATE', 'payload': None,
+ 'sender': 'ground',"waitForReply":True}
+ self.send_message(readyForNextTimeStepMsg, broadcast=True)
+
+ with self.sim_lock:
+ self.sim_lock.wait_for(lambda: self.numReadyForNextTimeStep == self.numPeers)
+ self.numReadyForNextTimeStep = 0
+
+ for gs in self.gs_by_id.values():
+ gs.no_more_bdt = False
+
+ self.gs_network.no_more_bdt = False
+ global_time += self.sim_tick
+ first_iter = False
+
+ print("\n==================================================================")
+ print("================ CURRENT TIME : {} ================".format(global_time))
+ print("==================================================================\n")
+
+ ####
+ # end of sim
+
+ with self.sim_lock:
+ self.sim_lock.wait_for(lambda: all(self.sat_stats.values()))
+
+
+ print("GS#: Uplinks / Attempts")
+ for gs in self.gs_by_id.values():
+ print("{}: {} / {}".format(gs.ID, gs.stats['plan_uplinks_succeded'], gs.stats['plan_uplinks_attempted']))
+
+ print("\nSAT#: Plan Props / Attempts")
+ for satID in self.sat_stats:
+
+ planPropsSucceeded = self.sat_stats[satID]['plan_props_succeded']
+ planPropsAttempted = self.sat_stats[satID]['plan_props_attempted']
+
+ print("{}: {} / {}".format(satID, planPropsSucceeded, planPropsAttempted))
+
+ def gsn_exchange_planning_info_all_exec_agents(self):
+ """
+ Simulate exchange of planning information between ground station network and
+ rest of ground stations
+ """
+ # every time the ground network re-plans, want to send that updated planning information to the ground stations
+ for gs_id in self.gs_by_id.keys():
+
+ self.gs_network.send_planning_info(gs_id,info_option='routes_only')
+ self.gs_by_id[gs_id].send_planning_info(self.gs_network.ID,info_option='routes_only')
+
+ self.gs_network.scheduler.set_external_share_plans_updated(False)
+
+ def post_run(self, output_path):
+
+ postRunReq = {'req_type': 'POST_RUN_REQ','payload':None,'sender':'ground',"waitForReply":True}
+ self.send_message(postRunReq,broadcast=True)
+
+ with self.sim_lock:
+ self.sim_lock.wait_for(lambda: all(self.postRunData.values()))
+
+ # get sats and gs in index order
+ gs_in_indx_order = [None for gs in range(len(self.gs_by_id))]
+ for gs in self.gs_by_id.values():
+ gs_in_indx_order[gs.gs_indx] = gs
+
+
+ # report events
+ event_logs = OrderedDict()
+ event_logs['sats'] = OrderedDict()
+ event_logs['gs'] = OrderedDict()
+
+ for sat_id in self.postRunData:
+ event_logs['sats'][sat_id] = self.postRunData[sat_id]['event_logs']
+
+ for gs in gs_in_indx_order:
+ gs.state_recorder.log_event(self.sim_end_dt,'constellation_sim.py','final_dv',[str(dc) for dc in gs.state_sim.get_curr_data_conts()])
+ event_logs['gs'][gs.ID] = gs.state_recorder.get_events()
+
+
+ if not os.path.exists(output_path+'logs'): # /outputs/plots
+ if not os.path.exists(output_path): # /outputs
+ os.mkdir(output_path)
+ os.mkdir(output_path+'logs')
+
+ event_log_file = output_path+'logs/agent_events.json'
+ with open(event_log_file,'w') as f:
+ json.dump(event_logs, f, indent=4, separators=(',', ': '))
+
+
+ # Get the activities executed for all the satellites
+ obs_exe = [[] for indx in range(self.num_sats)]
+ dlnks_exe = [[] for indx in range(self.num_sats)]
+ gs_dlnks_exe = [[] for indx in range(self.num_gs)]
+ xlnks_exe = [[] for indx in range(self.num_sats)]
+ # Get activities that execute, but failed (dv_del/dv_sch doesn't meet threshold) for all satellites
+ obs_exe_fail = [[] for indx in range(self.num_sats)]
+ dlnks_exe_fail = [[] for indx in range(self.num_sats)]
+ gs_dlnks_exe_fail = [[] for indx in range(self.num_gs)]
+ xlnks_exe_fail = [[] for indx in range(self.num_sats)]
+ energy_usage = {'time_mins': [[] for indx in range(self.num_sats)], 'e_sats': [[] for indx in range(self.num_sats)]}
+ data_usage = {'time_mins': [[] for indx in range(self.num_sats)], 'd_sats': [[] for indx in range(self.num_sats)]}
+
+ exec_failures_dicts_list = []
+ non_exec_failures_dicts_list = []
+
+ for sat_id in self.postRunData:
+
+ sat_indx = self.sat_id_order.index(sat_id)
+ acts_exe = self.postRunData[sat_id]['acts_exe']
+
+ # Get actions related to executive acts
+ obs_exe[sat_indx] = acts_exe['obs']
+ dlnks_exe[sat_indx] = acts_exe['dlnk']
+ xlnks_exe[sat_indx] = acts_exe['xlnk']
+
+ failures_exec = self.postRunData[sat_id]['failures_exec']
+ failures_nonexec = self.postRunData[sat_id]['failures_nonexec']
+
+ # activities that failed (see failed_dict)
+ all_failures_values = list(failures_exec.values()) + list(failures_nonexec.values())
+ exec_failures_dicts_list.append({ **failures_exec})
+ non_exec_failures_dicts_list.append({ **failures_nonexec })
+
+ # Classify failures
+ all_failures = [act for set_of_acts in all_failures_values for act in set_of_acts] # this list could have duplicates
+ obs_exe_fail[sat_indx] = [act for act in all_failures if isinstance(act,ObsWindow) ]
+ dlnks_exe_fail[sat_indx] = [act for act in all_failures if isinstance(act,DlnkWindow)]
+ xlnks_exe_fail[sat_indx] = [act for act in all_failures if isinstance(act,XlnkWindow)]
+
+ # Get energy usage data
+ t,e = self.postRunData[sat_id]['energy_usage']
+ energy_usage['time_mins'][sat_indx] = t
+ energy_usage['e_sats'][sat_indx] = e
+
+ # Get data usage data
+ t,d = self.postRunData[sat_id]['data_usage']
+ data_usage['time_mins'][sat_indx] = t
+ data_usage['d_sats'][sat_indx] = d
+
+ for gs in self.gs_by_id.values():
+ gs_indx = gs.gs_indx
+ acts_exe = gs.get_act_hist()
+ # NOTE: failure recorder only implemented on sats for now
+ # all_failures_values = list(gs.state_recorder.failed_dict['exec'].values()) + list(gs.state_recorder.failed_dict['non-exec'].values())
+ # all_failures = [act for set_of_acts in all_failures_values for act in set_of_acts]
+ gs_dlnks_exe[gs_indx] = acts_exe['dlnk']
+ # need to pull failed downlinks from the sat lists
+ gs_dlnks_exe_fail[gs_indx] = [act for list_of_acts_by_sat in dlnks_exe_fail for act in list_of_acts_by_sat if act.gs_indx == gs_indx]
+
+ # get scheduled activities as planned by ground network
+ obs_gsn_sched,dlnks_gsn_sched,xlnks_gsn_sched = self.gs_network.get_all_sats_planned_act_hists()
+ gs_dlnks_gsn_sched = self.gs_network.get_all_gs_planned_act_hists()
+
+
+ ##########
+ # Run Metrics
+ self.run_and_plot_metrics(energy_usage,data_usage,gs_in_indx_order,dlnks_exe,xlnks_exe,non_exec_failures_dicts_list)
+
+ ##########
+ # Plot stuff
+
+ sats_to_plot = self.sat_id_order
+
+ # Activity Failure vs. Data Storage Plot
+ # goal is to plot over the data-state graph with each failure labeled with the DV_failed and failure_type (exec_failures)
+ self.sim_plotter.sim_plot_all_sats_failures_on_data_usage(
+ sats_to_plot,
+ exec_failures_dicts_list,
+ data_usage
+ )
+
+ # plot scheduled and executed activities for satellites
+ self.sim_plotter.sim_plot_all_sats_acts(
+ sats_to_plot,
+ obs_gsn_sched,
+ obs_exe,
+ dlnks_gsn_sched,
+ dlnks_exe,
+ xlnks_gsn_sched,
+ xlnks_exe,
+ sats_obs_winds_failed=obs_exe_fail,
+ sats_dlnk_winds_failed=dlnks_exe_fail,
+ sats_xlnk_winds_failed=xlnks_exe_fail
+ )
+
+ # plot scheduled and executed down links for ground stations
+ self.sim_plotter.sim_plot_all_gs_acts(
+ self.gs_id_order,
+ gs_dlnks_gsn_sched,
+ gs_dlnks_exe,
+ gs_dlnks_exe_fail
+ )
+
+ # plot satellite energy usage
+ self.sim_plotter.sim_plot_all_sats_energy_usage(
+ sats_to_plot,
+ energy_usage,
+ self.ecl_winds
+ )
+
+ # plot satellite data usage
+ self.sim_plotter.sim_plot_all_sats_data_usage(
+ sats_to_plot,
+ data_usage,
+ self.ecl_winds
+ )
+
+ return None
+
+ def run_and_plot_metrics(self,energy_usage,data_usage,gs_in_indx_order,dlnks_exe,xlnks_exe,non_exec_failures_dicts_list = None):
+
+ calc_act_windows = True
+ if calc_act_windows:
+ print('------------------------------')
+ print('Potential DVs')
+ print('Load obs')
+ window_uid = 0 # note this window ID will not match the one for executed windows in the sim! These are dummy windows!
+ obs_winds, window_uid =self.io_proc.import_obs_winds(window_uid)
+ print('Load dlnks')
+ dlnk_winds, dlnk_winds_flat, window_uid =self.io_proc.import_dlnk_winds(window_uid)
+ print('Load xlnks')
+ xlnk_winds, xlnk_winds_flat, window_uid =self.io_proc.import_xlnk_winds(window_uid)
+
+ total_num_collectible_obs_winds = sum(len(o_list) for o_list in obs_winds)
+ total_collectible_obs_dv = sum(obs.original_data_vol for o_list in obs_winds for obs in o_list)
+ total_dlnkable_dv = sum(dlnk.original_data_vol for d_list in dlnk_winds_flat for dlnk in d_list)
+
+ print('total_num_collectible_obs_winds: %s'%total_num_collectible_obs_winds)
+ print('total_collectible_obs_dv: %s'%total_collectible_obs_dv)
+ print('total_dlnkable_dv: %s'%total_dlnkable_dv)
+
+
+ # data containers mark their data vol in their data routes with the "data_vol" attribute, not "scheduled_dv"
+ def dc_dr_dv_getter(dr):
+ return dr.data_vol
+
+ # Get the planned dv for a route container. Note that this includes utilization rt_cont
+ # it's the same code as the dc_dr one, but including for clarity
+ def rt_cont_plan_dv_getter(rt_cont):
+ return rt_cont.data_vol
+
+
+
+ # get all the rt containers that the gs network ever saw
+ planned_routes = self.gs_network.get_all_planned_rt_conts()
+ planned_routes_regular = [rt for rt in planned_routes if not rt.get_obs().injected]
+ planned_routes_injected = [rt for rt in planned_routes if rt.get_obs().injected]
+ # get the routes for all the packets at each GS at sim end
+ executed_routes_regular = [dc.executed_data_route for gs in self.gs_by_id.values() for dc in gs.get_curr_data_conts() if not dc.injected]
+
+ executed_routes_injected = [dc.executed_data_route for gs in self.gs_by_id.values() for dc in gs.get_curr_data_conts() if dc.injected]
+
+ # debug_tools.debug_breakpt()
+
+ # note that the below functions assume that for all rt_conts:
+ # - the observation, downlink for all DMRs in the rt_cont are the same
+
+ print('------------------------------')
+
+ dv_stats = self.mc.assess_dv_by_obs(
+ planned_routes_regular, executed_routes_regular,
+ rt_poss_dv_getter=rt_cont_plan_dv_getter, rt_exec_dv_getter=dc_dr_dv_getter, verbose = True)
+
+ print('injected dv')
+ inj_dv_stats = self.mc.assess_dv_by_obs(
+ planned_routes_injected, executed_routes_injected,
+ rt_poss_dv_getter=rt_cont_plan_dv_getter, rt_exec_dv_getter=dc_dr_dv_getter ,verbose = True)
+
+
+ print('------------------------------')
+ lat_stats = self.mc.assess_latency_by_obs(planned_routes_regular, executed_routes_regular, rt_exec_dv_getter=dc_dr_dv_getter ,verbose = True)
+
+ print('injected latency')
+ inj_lat_stats = self.mc.assess_latency_by_obs(planned_routes_injected, executed_routes_injected, rt_exec_dv_getter=dc_dr_dv_getter ,verbose = True)
+
+
+ sim_plot_params = self.params['const_sim_inst_params']['sim_plot_params']
+ time_units = sim_plot_params['obs_aoi_plot']['x_axis_time_units']
+ print('------------------------------')
+ print('Average AoI by obs, at collection time')
+ obs_aoi_stats_at_collection = self.mc.assess_aoi_by_obs_target(planned_routes, executed_routes_regular,include_routing=False,rt_poss_dv_getter=rt_cont_plan_dv_getter, rt_exec_dv_getter=dc_dr_dv_getter ,aoi_x_axis_units=time_units,verbose = True)
+
+ print('------------------------------')
+ print('Average AoI by obs, with routing')
+ obs_aoi_stats_w_routing = self.mc.assess_aoi_by_obs_target(planned_routes, executed_routes_regular,include_routing=True,rt_poss_dv_getter=rt_cont_plan_dv_getter, rt_exec_dv_getter=dc_dr_dv_getter ,aoi_x_axis_units=time_units,verbose = True)
+
+
+
+ time_units = sim_plot_params['sat_cmd_aoi_plot']['x_axis_time_units']
+ print('------------------------------')
+
+ # this is indexed by sat index
+ sats_cmd_update_hist = met_util.get_all_sats_cmd_update_hist_removed(self.sat_id_order,self.postRunData)
+ aoi_sat_cmd_stats = self.mc.assess_aoi_sat_ttc_option(sats_cmd_update_hist,ttc_option='cmd',input_time_type='datetime',aoi_x_axis_units=time_units,verbose = True)
+
+ # this is indexed by ground station index
+ time_units = sim_plot_params['sat_tlm_aoi_plot']['x_axis_time_units']
+
+ print('------------------------------')
+ sats_tlm_update_hist = met_util.get_all_sats_tlm_update_hist_removed(self.sat_id_order,gs_in_indx_order,self.gs_id_ignore_list,self.postRunData)
+ aoi_sat_tlm_stats = self.mc.assess_aoi_sat_ttc_option(sats_tlm_update_hist,ttc_option='tlm',input_time_type='datetime',aoi_x_axis_units=time_units,verbose = True)
+
+
+ print('------------------------------')
+ e_rsrc_stats = self.mc.assess_energy_resource_margin(energy_usage,verbose = True)
+ d_rsrc_stats = self.mc.assess_data_resource_margin(data_usage,verbose = True)
+
+ calc_window_utilization = True
+ if calc_window_utilization:
+ all_link_acts = [dlnk for dlnks in dlnk_winds_flat for dlnk in dlnks]
+ all_link_acts += [xlnk for xlnks in xlnk_winds_flat for xlnk in xlnks]
+
+ executed_acts = copy([dlnk for dlnks in dlnks_exe for dlnk in dlnks])
+ executed_acts += copy([xlnk for xlnks in xlnks_exe for xlnk in xlnks])
+
+ def all_acts_dv_getter(act):
+ return act.original_data_vol
+ def exec_acts_dv_getter(act):
+ return act.executed_data_vol
+
+ link_stats = self.mc.assess_link_utilization(all_link_acts, executed_acts, all_acts_dv_getter,exec_acts_dv_getter,verbose=True)
+
+
+ output_path = self.params['output_path']
+ if not os.path.exists(output_path+'pickles'): # /outputs/pickles
+ if not os.path.exists(output_path): # /outputs
+ os.mkdir(output_path)
+ os.mkdir(output_path+'pickles')
+
+ # saving cus it broke, json so we can read it
+ with open(output_path+'pickles/pre-stat.json','w') as f:
+ json.dump( {
+ "average_obvs_throughput":dv_stats["average_obvs_throughput"],
+ "rg_ave_obs_dv_exec":dv_stats["ave_obs_dv_exec"],
+ "rg_ave_obs_dv_poss":dv_stats["ave_obs_dv_poss"],
+ "inj_ave_obs_dv_exec":inj_dv_stats["ave_obs_dv_exec"],
+ "inj_ave_obs_dv_poss":inj_dv_stats["ave_obs_dv_poss"],
+ "rg_median_obs_initial_lat_exec":lat_stats["median_obs_initial_lat_exec"],
+ "inj_median_obs_initial_lat_exec":inj_lat_stats["median_obs_initial_lat_exec"],
+ "median_av_aoi_exec":obs_aoi_stats_w_routing["median_av_aoi_exec"],
+ "median_ave_e_margin_prcnt":e_rsrc_stats["median_ave_e_margin_prcnt"],
+ "median_ave_d_margin_prcnt":d_rsrc_stats["median_ave_d_margin_prcnt"]
+ },
+ f, indent=4, separators=(',', ': '))
+
+ # but like this so we can reload it perfectly
+ with open(output_path+'pickles/pre-stat.pkl','wb') as f:
+ pickle.dump( {
+ "average_obvs_throughput":dv_stats["average_obvs_throughput"],
+ "rg_ave_obs_dv_exec":dv_stats["ave_obs_dv_exec"],
+ "rg_ave_obs_dv_poss":dv_stats["ave_obs_dv_poss"],
+ "inj_ave_obs_dv_exec":inj_dv_stats["ave_obs_dv_exec"],
+ "inj_ave_obs_dv_poss":inj_dv_stats["ave_obs_dv_poss"],
+ "rg_median_obs_initial_lat_exec":lat_stats["median_obs_initial_lat_exec"],
+ "inj_median_obs_initial_lat_exec":inj_lat_stats["median_obs_initial_lat_exec"],
+ "median_av_aoi_exec":obs_aoi_stats_w_routing["median_av_aoi_exec"],
+ "median_ave_e_margin_prcnt":e_rsrc_stats["median_ave_e_margin_prcnt"],
+ "median_ave_d_margin_prcnt":d_rsrc_stats["median_ave_d_margin_prcnt"]
+ },f)
+
+ # GET AND PRINT ACTIVITY FAILURE STATS
+ # 1) get all failure dictionaries and count up failure of each type
+ firstSatID = self.sat_id_order[0]
+ exec_failure_types = self.postRunData[firstSatID]['failures_exec'].keys()
+ non_exec_failure_types = self.postRunData[firstSatID]['failures_nonexec'].keys()
+
+ total_exec_failures_dict = {}
+ total_non_exec_failures_dict = {}
+
+ for key in exec_failure_types:
+ total_exec_failures_dict[key] = set()
+
+ for sat_id in self.sat_id_order:
+ for key in self.postRunData[sat_id]['failures_exec'].keys():
+ total_exec_failures_dict[key] |= self.postRunData[sat_id]['failures_exec'][key]
+
+ for key in non_exec_failure_types:
+ total_non_exec_failures_dict[key] = set()
+
+ for sat_id in self.sat_id_order:
+ for key in self.postRunData[sat_id]['failures_nonexec'].keys():
+ total_non_exec_failures_dict[key] |= self.postRunData[sat_id]['failures_nonexec'][key]
+
+ # 2) print out each key with the num failures next to it:
+ test_metrics_dump = {'Num_Failures_by_Type': {
+ 'exec': {},
+ 'non-exec': {}
+ }
+ }
+ print('========Totals for Activity Failures by Type===========')
+ total_exec_failures_by_act = {
+ 'xlnk': set(),
+ 'dlnk': set(),
+ 'obs': set()
+ }
+ # print out and save EXEC failures
+ for failure_type in total_exec_failures_dict.keys():
+ num_failures = len(total_exec_failures_dict[failure_type])
+ xlnk_fails = [act for act in total_exec_failures_dict[failure_type] if isinstance(act,XlnkWindow)]
+ dlnk_fails = [act for act in total_exec_failures_dict[failure_type] if isinstance(act,DlnkWindow)]
+ obs_fails = [act for act in total_exec_failures_dict[failure_type] if isinstance(act,ObsWindow)]
+ if num_failures > 0:
+ print('"%s": Total: %d, Obs: %d, Xlnk: %d, Dlnk: %d' %(failure_type,num_failures,
+ len(obs_fails),len(xlnk_fails),len(dlnk_fails)))
+
+ print(failure_type)
+ print("xlnk:", xlnk_fails)
+ print("dlnk:",dlnk_fails)
+ print("obs:",obs_fails)
+ print("=========================")
+
+ test_metrics_dump['Num_Failures_by_Type']['exec'][failure_type] = {
+ 'xlnk': len(xlnk_fails),
+ 'dlnk': len(dlnk_fails),
+ 'obs': len(obs_fails)
+ }
+
+ for act_type in total_exec_failures_by_act.keys():
+ # an xlnk or downlink can fail for multiple reasons, so need to get unique total set of each type
+ if act_type == 'xlnk':
+ for xlnk in xlnk_fails:
+ total_exec_failures_by_act[act_type].add(xlnk)
+ elif act_type == 'dlnk':
+ for dlnk in dlnk_fails:
+ total_exec_failures_by_act[act_type].add(dlnk)
+ else:
+ for obs in obs_fails:
+ total_exec_failures_by_act[act_type].add(obs)
+
+ print("NON EXEC FAILURES")
+ # print out and save NON-EXEC failures:
+ for failure_type in total_non_exec_failures_dict.keys():
+ num_failures = len(total_non_exec_failures_dict[failure_type])
+ xlnk_fails = [act for act in total_non_exec_failures_dict[failure_type] if isinstance(act,XlnkWindow)]
+ dlnk_fails = [act for act in total_non_exec_failures_dict[failure_type] if isinstance(act,DlnkWindow)]
+ obs_fails = [act for act in total_non_exec_failures_dict[failure_type] if isinstance(act,ObsWindow)]
+ if num_failures > 0:
+ print('"%s": Total: %d, Obs: %d, Xlnk: %d, Dlnk: %d' %(failure_type,num_failures,
+ len(obs_fails),len(xlnk_fails),len(dlnk_fails)))
+
+ print(failure_type)
+ print("xlnk:",xlnk_fails)
+ print("dlnk:",dlnk_fails)
+ print("obs:",obs_fails)
+ print("=========================")
+
+ test_metrics_dump['Num_Failures_by_Type']['non-exec'][failure_type] = {
+ 'xlnk': len(xlnk_fails),
+ 'dlnk': len(dlnk_fails),
+ 'obs': len(obs_fails)
+ }
+
+
+ test_metrics_dump['Percentage_of_Exec_Act_Failure_by_Act'] = {}
+ print('======Total activity failure percentages=====')
+ executed_link_acts = set(executed_acts) # makes a set of all executed xlnks and dlnks to avoid double counting
+ executed_acts_dict = {
+ 'xlnk': set(act for act in executed_link_acts if isinstance(act,XlnkWindow)),
+ 'dlnk': set(act for act in executed_link_acts if isinstance(act,DlnkWindow)),
+ 'obs': set(obs for obs_winds_by_sat in obs_winds for obs in obs_winds_by_sat)
+ }
+ for act_type in total_exec_failures_by_act.keys():
+ try:
+ percent_failed = 100*len(total_exec_failures_by_act[act_type])/len(executed_acts_dict[act_type])
+ except ZeroDivisionError:
+ percent_failed = 0
+ print('%s: %.2f %%' % (act_type,percent_failed))
+ test_metrics_dump['Percentage_of_Exec_Act_Failure_by_Act'][act_type] = percent_failed
+
+
+ print('=============Total Possible and Executed DV=============')
+ print('Regular Possible Routed DV: %.2f Mb' % dv_stats['total_poss_dv'])
+ print('Regular Executed DV: %.2f Mb' % dv_stats['total_exec_dv'])
+ print('Percentage Regular Executed / Poss DV: %.2f %%' % (dv_stats['total_exec_dv']/dv_stats['total_poss_dv'] * 100 if dv_stats['total_poss_dv'] > 0 else 0))
+
+ if inj_dv_stats['total_poss_dv']:
+ print('Injected Possible Routed DV: %.2f Mb' % inj_dv_stats['total_poss_dv'])
+ print('Injected Executed DV: %.2f Mb' % inj_dv_stats['total_exec_dv'])
+ print('Percentage Injected Executed / Poss DV: %.2f %%' % (inj_dv_stats['total_exec_dv']/inj_dv_stats['total_poss_dv'] * 100 if inj_dv_stats['total_poss_dv'] > 0 else 0 ))
+
+ print('Total Possible Routed DV: %.2f Mb' % (dv_stats['total_poss_dv']+inj_dv_stats['total_poss_dv']))
+ print('Total Executed DV: %.2f Mb' % (dv_stats['total_exec_dv']+inj_dv_stats['total_exec_dv']))
+ print('Percentage Total Executed / Poss DV: %.2f %%' % ((dv_stats['total_exec_dv']+inj_dv_stats['total_exec_dv'])/(inj_dv_stats['total_poss_dv'] +dv_stats['total_poss_dv'] )* 100))
+ else:
+ print('No injected obs dv possible')
+
+ # FOR MULTI-RUN TEST STATS - DON'T DELETE BELOW
+ # set run name based on settings:
+ SRP_setting = self.params['const_sim_inst_params']['lp_general_params']['use_self_replanner']
+
+ test_metrics_dump['dv_stats'] = deepcopy(dv_stats) # copy this since we are deleting keys
+ del test_metrics_dump['dv_stats']['poss_dvs_by_obs'] # remove poss_dvs_by_obs from dv_stats (can't write to json)
+ test_metrics_dump['dv_stats']['exec_over_poss'] = dv_stats['total_exec_dv']/dv_stats['total_poss_dv']
+ test_metrics_dump['d_rsrc_stats'] = d_rsrc_stats # for data margin
+ test_metrics_dump['e_rsrc_stats'] = e_rsrc_stats # for energy margin
+
+ test_metrics_dump['lat_stats'] = deepcopy(lat_stats) # copy this since we are deleting keys
+ # delete things that can't be written to json
+ del test_metrics_dump['lat_stats']['executed_final_lat_by_obs_exec']
+ del test_metrics_dump['lat_stats']['executed_initial_lat_by_obs_exec']
+ del test_metrics_dump['lat_stats']['possible_initial_lat_by_obs_exec']
+
+ test_metrics_dump['obs_aoi_stats_w_routing'] = obs_aoi_stats_w_routing
+ test_metrics_dump['obs_aoi_stats_at_collection'] = obs_aoi_stats_at_collection
+
+ GS_disrupted_list = list(self.params['const_sim_inst_params']['sim_run_perturbations']['schedule_disruptions'].keys())
+ if GS_disrupted_list:
+ GS_disrupted = GS_disrupted_list[0] # assumes only 1 GS disrupted at a time
+ else:
+ GS_disrupted = None
+ tx_status = self.params['sim_case_config']['use_crosslinks']
+ try:
+ name = 'multi_%s_SRP_and_%s_tx_status' % (SRP_setting,tx_status)
+ multirun_path = os.path.abspath(os.path.join(os.path.dirname( __file__ ), '../../', 'multirun_tests/'))
+ fln = multirun_path + "/" + name + ".json"
+ with open(fln,'w') as f:
+ json.dump(test_metrics_dump,f, indent=4, separators=(',', ': '))
+ print("json created: " + name)
+ except KeyError:
+ pass
+
+ print('SRP SETTING IS: %s, GS DISRUPTED IS: %s' % (SRP_setting, GS_disrupted))
+
+ # PRINT FINAL STATS
+ print("Global Planner performance under simulation:")
+ print("| - | Obs. Throughput | OT | Med. Obs Latency | MOLe | Med. Obs Target | Med. Sat-ave | Med. Sat-ave |")
+ print("| - | (ave % Max) | w/ Inj (LP) | (executed) | w/ Inj (LP) | data ave AOI | Energy Margin | Data Margin |")
+ print("| - | {:7.2f} | {:7.2f} | {} | {} | {} | {} | {} |"
+ .format (
+ dv_stats["average_obvs_throughput"],
+ inj_dv_stats["average_obvs_throughput"],
+ ( ' - ' if not lat_stats["median_obs_initial_lat_exec"] else "{:7.2f}".format(lat_stats["median_obs_initial_lat_exec"]) ), # TODO - reform this stat similarly
+ ( ' - ' if not inj_lat_stats["median_obs_initial_lat_exec"] else "{:7.2f}".format(inj_lat_stats["median_obs_initial_lat_exec"]) ),
+ ( ' - ' if not obs_aoi_stats_w_routing["median_av_aoi_exec"] else "{:7.2f}".format(obs_aoi_stats_w_routing["median_av_aoi_exec"]) ),
+ ( ' - ' if not e_rsrc_stats["median_ave_e_margin_prcnt"] else "{:7.2f}".format(e_rsrc_stats["median_ave_e_margin_prcnt"]) ),
+ ( ' - ' if not d_rsrc_stats["median_ave_d_margin_prcnt"] else "{:7.2f}".format(d_rsrc_stats["median_ave_d_margin_prcnt"]) )
+ )
+ )
+
+
+ ######
+ # metrics plots
+ output_path = self.params['output_path']
+ if not os.path.exists(output_path+'plots'): # /outputs/plots
+ if not os.path.exists(output_path): # /outputs
+ os.mkdir(output_path)
+ os.mkdir(output_path+'plots')
+
+ print('Creating and saving plots to: %s' % output_path + 'plots')
+
+ self.sim_plotter.plot_obs_aoi_at_collection(
+ # self.obs_target_id_order,
+ obs_aoi_stats_at_collection['exec_targIDs_found'] ,
+ obs_aoi_stats_at_collection['aoi_curves_by_targID_exec']
+ )
+
+ self.sim_plotter.plot_obs_aoi_w_routing(
+ # self.obs_target_id_order,
+ obs_aoi_stats_w_routing['exec_targIDs_found'] ,
+ obs_aoi_stats_w_routing['aoi_curves_by_targID_exec']
+ )
+
+ curves_by_indx = aoi_sat_cmd_stats['aoi_curves_by_sat_indx']
+ cmd_aoi_curves_by_sat_id = {self.sat_id_order[sat_indx]:curves for sat_indx,curves in curves_by_indx.items()}
+
+ self.sim_plotter.plot_sat_cmd_aoi(
+ self.sat_id_order,
+ cmd_aoi_curves_by_sat_id,
+ all_downlink_winds = self.gs_network.scheduler.all_windows_dict['dlnk_winds_flat'],
+ gp_replan_freq = self.const_sim_inst_params['sim_gs_network_params']['gsn_ps_params']['replan_interval_s']
+ )
+
+ if non_exec_failures_dicts_list:
+ # also want to plot activity failures vs. GP plan age (cmd AoI) (non-exec failures)
+ # NOTE: the aoi_curves_by_sad_id is inside "run_and_plot_metrics" so can't call it here
+ self.sim_plotter.sim_plot_all_sats_failures_on_cmd_aoi(
+ self.sat_id_order,
+ non_exec_failures_dicts_list,
+ cmd_aoi_curves_by_sat_id
+ )
+
+ curves_by_indx = aoi_sat_tlm_stats['aoi_curves_by_sat_indx']
+ tlm_aoi_curves_by_sat_id = {self.sat_id_order[sat_indx]:curves for sat_indx,curves in curves_by_indx.items()}
+ self.sim_plotter.plot_sat_tlm_aoi(
+ self.sat_id_order,
+ tlm_aoi_curves_by_sat_id
+ )
+
+
+ # plot obs latency histogram, planned routes
+
+ pltl.plot_histogram(
+ data=obs_aoi_stats_w_routing['av_aoi_by_targID_exec'].values(),
+ num_bins = 40,
+ plot_type = 'histogram',
+ x_title='AoI (hours)',
+ y_title='Number of Obs Targets',
+ # plot_title = 'CIRCINUS Sim: Average AoI Histogram, with routing (dv req %.1f Mb)'%(mc.min_obs_dv_dlnk_req),
+ plot_title = 'CIRCINUS Sim: Average AoI Histogram, with routing',
+ plot_size_inches = (12,5.5),
+ show=False,
+ fig_name=output_path+'plots/csim_obs_aoi_routing_executed_hist.pdf'
+ )
+
+ # plot obs latency histogram, planned routes
+ pltl.plot_histogram(
+ data=obs_aoi_stats_at_collection['av_aoi_by_targID_exec'].values(),
+ num_bins = 40,
+ plot_type = 'histogram',
+ x_title='AoI (hours)',
+ y_title='Number of Obs Targets',
+ # plot_title = 'CIRCINUS Sim: Average AoI Histogram, at collection (dv req %.1f Mb)'%(mc.min_obs_dv_dlnk_req),
+ plot_title = 'CIRCINUS Sim: Average AoI Histogram, at collection',
+ plot_size_inches = (12,5.5),
+ show=False,
+ fig_name=output_path+'plots/csim_obs_aoi_collection_executed_hist.pdf'
+ )
+
+ # for SSO
+ # lat_hist_x_range = (0,250) # minutes
+ # lat_hist_num_bins = 50
+ # for walker
+ lat_hist_x_range = (0,150) # minutes
+ lat_hist_y_range = (0,200) # minutes
+ # lat_hist_num_bins = 270
+ lat_hist_num_bins = 500
+
+
+ # plot obs latency histogram, planned routes
+ pltl.plot_histogram(
+ data=lat_stats['possible_initial_lat_by_obs_exec'].values(),
+ num_bins = lat_hist_num_bins,
+ plot_type = 'histogram',
+ plot_x_range = lat_hist_x_range,
+ plot_y_range = lat_hist_y_range,
+ x_title='Latency (mins)',
+ y_title='Number of Obs Windows',
+ # plot_title = 'CIRCINUS Sim: Initial Latency Histogram, planned (dv req %.1f Mb)'%(mc.min_obs_dv_dlnk_req),
+ plot_title = 'CIRCINUS Sim: Initial Latency Histogram, planned',
+ plot_size_inches = (12,3.5),
+ show=False,
+ fig_name=output_path+'plots/csim_obs_lat_planned_hist.pdf'
+ )
+
+
+ # plot obs latency histogram, executed routes
+ pltl.plot_histogram(
+ data=lat_stats['executed_initial_lat_by_obs_exec'].values(),
+ num_bins = lat_hist_num_bins,
+ plot_type = 'histogram',
+ plot_x_range = lat_hist_x_range,
+ plot_y_range = lat_hist_y_range,
+ x_title='Latency (mins)',
+ y_title='Number of Obs Windows',
+ # plot_title = 'CIRCINUS Sim: Initial Latency Histogram, executed (dv req %.1f Mb)'%(mc.min_obs_dv_dlnk_req),
+ plot_title = '',
+ plot_size_inches = (12,3.5),
+ show=False,
+ fig_name=output_path+'plots/csim_obs_lat_executed_hist.pdf'
+ )
+
+ with open(output_path+'plots/exec_obs_lat_reg_cdf_data.json','w') as f:
+ json.dump(list(lat_stats['executed_initial_lat_by_obs_exec'].values()), f, indent=4, separators=(',', ': '))
+
+
+ # plot obs latency histogram, executed routes
+ pltl.plot_histogram(
+ data=lat_stats['executed_initial_lat_by_obs_exec'].values(),
+ num_bins = lat_hist_num_bins,
+ plot_type = 'cdf',
+ plot_x_range = lat_hist_x_range,
+ plot_y_range = lat_hist_y_range,
+ x_title='Latency (mins)',
+ y_title='Fraction of Obs Windows',
+ # plot_title = 'CIRCINUS Sim: Initial Latency Histogram, executed (dv req %.1f Mb)'%(mc.min_obs_dv_dlnk_req),
+ plot_title = 'CIRCINUS Sim: Initial Latency CDF, executed regular',
+ plot_size_inches = (12,3.5),
+ show=False,
+ fig_name=output_path+'plots/csim_obs_lat_executed_cdf.pdf'
+ )
+
+
+ ############
+ # injected routes latency plots
+
+ # plot obs latency histogram, executed routes
+ pltl.plot_histogram(
+ data=inj_lat_stats['executed_initial_lat_by_obs_exec'].values(),
+ num_bins = lat_hist_num_bins,
+ plot_type = 'histogram',
+ plot_x_range = lat_hist_x_range,
+ plot_y_range = lat_hist_y_range,
+ x_title='Latency (mins)',
+ y_title='Number of Obs Windows',
+ # plot_title = 'CIRCINUS Sim: Initial Latency Histogram, executed (dv req %.1f Mb)'%(mc.min_obs_dv_dlnk_req),
+ plot_title = 'CIRCINUS Sim: Initial Latency Histogram, executed injected',
+ plot_size_inches = (12,3.5),
+ show=False,
+ fig_name=output_path+'plots/csim_obs_lat_injected_hist.pdf'
+ )
+
+ with open(output_path+'plots/exec_obs_lat_inj_cdf_data.json','w') as f:
+ json.dump(list(inj_lat_stats['executed_initial_lat_by_obs_exec'].values()), f, indent=4, separators=(',', ': '))
+
+ # plot obs latency histogram, executed routes
+ pltl.plot_histogram(
+ data=inj_lat_stats['executed_initial_lat_by_obs_exec'].values(),
+ num_bins = lat_hist_num_bins,
+ plot_type = 'cdf',
+ plot_x_range = lat_hist_x_range,
+ plot_y_range = lat_hist_y_range,
+ x_title='Latency (mins)',
+ y_title='Fraction of Obs Windows',
+ # plot_title = 'CIRCINUS Sim: Initial Latency Histogram, executed (dv req %.1f Mb)'%(mc.min_obs_dv_dlnk_req),
+ plot_title = 'CIRCINUS Sim: Initial Latency CDF, executed injected',
+ plot_size_inches = (13,3.5),
+ show=False,
+ fig_name=output_path+'plots/csim_obs_lat_injected_cdf.pdf'
+ )
+
+ def get_metrics_params(self):
+ metrics_params = {}
+
+ scenario_params = self.params['orbit_prop_params']['scenario_params']
+ sat_params = self.params['orbit_prop_params']['sat_params']
+ obs_params = self.params['orbit_prop_params']['obs_params']
+ sim_metrics_params = self.params['const_sim_inst_params']['sim_metrics_params']
+ sim_plot_params = self.params['const_sim_inst_params']['sim_plot_params']
+ as_params = self.params['gp_general_params']['activity_scheduling_params']
+
+ # these are used for AoI calculation
+ metrics_params['met_obs_start_dt'] = self.params['const_sim_inst_params']['sim_run_params']['start_utc_dt']
+ metrics_params['met_obs_end_dt'] = self.params['const_sim_inst_params']['sim_run_params']['end_utc_dt']
+
+ metrics_params['num_sats']=sat_params['num_sats']
+ metrics_params['num_targ'] = obs_params['num_targets']
+ metrics_params['all_targ_IDs'] = [targ['id'] for targ in obs_params['targets']]
+ metrics_params['min_obs_dv_dlnk_req'] = as_params['min_obs_dv_dlnk_req_Mb']
+
+ metrics_params['latency_calculation_params'] = sim_metrics_params['latency_calculation']
+ metrics_params['targ_id_ignore_list'] = sim_metrics_params['targ_id_ignore_list']
+ metrics_params['aoi_units'] = sim_metrics_params['aoi_units']
+
+ metrics_params['sats_emin_Wh'] = []
+ metrics_params['sats_emax_Wh'] = []
+ for p_params in sat_params['power_params_by_sat_id'].values():
+ sat_edot_by_mode,sat_batt_storage,power_units,charge_eff,discharge_eff = \
+ io_tools.parse_power_consumption_params(p_params)
+
+ metrics_params['sats_emin_Wh'].append(sat_batt_storage['e_min'])
+ metrics_params['sats_emax_Wh'].append(sat_batt_storage['e_max'])
+
+ metrics_params['sats_dmin_Gb'] = []
+ metrics_params['sats_dmax_Gb'] = []
+ for d_params in sat_params['data_storage_params_by_sat_id'].values():
+ d_min = d_params['d_min']
+ d_max = d_params['d_max']
+
+ metrics_params['sats_dmin_Gb'].append(d_min)
+ metrics_params['sats_dmax_Gb'].append(d_max)
+
+ metrics_params['timestep_s'] = scenario_params['timestep_s']
+
+ return metrics_params
+
diff --git a/source/Ground_Sim/Satellite_Ground_Protocol b/source/Ground_Sim/Satellite_Ground_Protocol
new file mode 100644
index 0000000..4e0af18
--- /dev/null
+++ b/source/Ground_Sim/Satellite_Ground_Protocol
@@ -0,0 +1,137 @@
+"""
+MESSAGE PROTOCOL TO AND FROM SAT < - > GS and SIM
+
+NOTE:
+---------------------------------------------
+- GROUND refers to ground simulation manager
+ that orchestrates sim
+- GS refers to singular GS
+- Each Message is structured as follows:
+ - req_type : The request type
+ - dest: The destination
+ - payload: Any corresponding data
+ - sender: The id of the object sending the message
+
+Protocol is formatted as follows:
+----------------------------------------------
+#) ____ SENDS:
+ -
+ - Payload:
+ - Target:
+ - Sender:
+
+- Steps with parts a, b, c, etc. indicate no strict ordering of messages sent
+ in that numbered step, only that they are all sent before the next numbered
+ step and after the previous numbered step
+
+- (*) is only used once
+
+========================================
+============= BEFORE SIM ===============
+========================================
+
+1) SAT SENDS: JOIN
+ - Request to join simulation
+ - Payload: address of socket
+ - Target: ground
+
+2) GROUND SENDS: INIT PARAMS
+ - Parameters for satellite and simulation
+ - Payload: Parameters
+ - Target: Client that sent most recent JOIN
+ - Sender: ground
+
+3) GROUND SENDS: ALL_IPS
+ - Once all satellites joined
+ - IP addresses of all satellites in simulation
+ - Payload: Map keyed on satellite IDs with IP addresses as values
+ - Target: All satellites
+ - Sender: ground
+
+4) GROUND SENDS: INJECT_OBS
+ - Injects observations into satellites (sat-specific)
+ - Payload: the injected observations
+ - Target: All satellites
+ - Sender: ground
+
+5) GROUND SENDS: SAT_WINDOWS_INIT
+ - Initializes each satellite's windows
+ - Payload: The map of windows
+ - Target: All satellites
+ - Sender: ground
+
+6) GROUND SENDS: START
+ - Request to start simulation
+ - Payload: None
+ - Target: All satellites
+ - Sender: ground
+
+========================================
+============= DURING SIM ===============
+=============== REPEAT =================
+========================================
+
+1a) SAT SENDS: BDT
+ - Bulk Data transfer as downlink and or crosslink
+ - Payload: The data
+ - Target: Intended receiver (gs id of sat id)
+ - Sender: sat_id
+
+1b) SAT SENDS: PLAN
+ - Sends plan after each BDT run
+ - Payload: Planning data
+ - Target: Intended receiver (gs id of sat id)
+ - Sender: sat_id
+
+2) SAT SENDS: ACTS_DONE
+ - Indicates done sending all executive act related messages
+ - Payload: None
+ - Target: All
+ - Sender: sat_id
+
+3) GS SENDS: PLAN
+ - ONLY on first iteration and ONLY if hot start
+ - Payload: GS planning info
+ - Target: Satellites in view
+ - Sender: gs id
+
+4) GROUND SENDS: UPDATE
+ - Request for recipient to update state
+ - Payload: None
+ - Target: All satellites
+ - Sender: ground
+
+5a) SAT SENDS: STATE
+ - Current state of satellite to send via crosslink only if
+ the time since the last state sending is over a threshold amount of time
+ - Payload: State
+ - Target: Any satellite in view
+ - Sender: sat id
+
+5b) GS SENDS: PLAN
+ - Planning info of each gs if GP ran
+ - Payload: Planning info
+ - Target: Any satellite in view
+ - Sender: gs id
+
+5c) SAT SENDS: PLAN
+ - Send to other satellites if allowed, and send to gs in view if LP ran
+ - Payload: Planning info
+ - Target: satellite (if allowed) or gs in view (if LP ran)
+ - Sender: sat id
+
+6) SAT and GROUND SEND: FINISHED_PROP
+ - Indicates done propagating plans
+ - Payload: None
+ - Target: All
+ - Sender: ground or sat id
+
+7) SAT AND GROUND SEND: READY_FOR_TIME_UPDATE
+ - Indicates ready for time update
+ - Payload: None
+ - Target: All
+ - Sender: ground or sat id
+
+
+
+
diff --git a/source/Ground_Sim/__init__.py b/source/Ground_Sim/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/source/Removed_Satellite/BlockingDict.py b/source/Removed_Satellite/BlockingDict.py
new file mode 100644
index 0000000..db09661
--- /dev/null
+++ b/source/Removed_Satellite/BlockingDict.py
@@ -0,0 +1,49 @@
+import threading
+class BlockingDict:
+ """
+ A threadsafe version of a dictionary.
+ All actions (put, get, keyExists) requires the object's lock to proceed.
+
+ """
+
+ def __init__(self):
+ self.queue = {}
+ self.cv = threading.Condition()
+
+ def put(self, key, value):
+ """
+ Places value associated with given key
+
+ :param key:
+ :param value:
+ :return: None
+ """
+ with self.cv:
+ self.queue[key] = value
+ self.cv.notifyAll()
+
+
+ def keyExists(self,key):
+ """
+ Determines if the key exists
+ :param key: The key
+ :return: True if key exists
+ """
+ with self.cv:
+ return key in self.queue
+
+ def get(self,key):
+ """
+ Gets and removes the key-value pair.
+ blocks until key is in queue
+ :param key: The input key
+ :return: The value of the key
+ """
+ with self.cv:
+ self.cv.wait_for(lambda: key in self.queue)
+ result = self.queue.pop(key)
+ return result
+
+ def __str__(self):
+ with self.cv:
+ return self.queue.__str__()
diff --git a/source/Removed_Satellite/LightControl.py b/source/Removed_Satellite/LightControl.py
new file mode 100644
index 0000000..ff4f0b1
--- /dev/null
+++ b/source/Removed_Satellite/LightControl.py
@@ -0,0 +1,31 @@
+import RPi.GPIO as GPIO
+import time
+
+def light_toggle(rxsnd, where, state):
+ GPIO.setwarnings(False)
+ GPIO.setmode(GPIO.BCM)
+ #blue: receive from ground
+ #green: send to groudn
+ #yellow: send to cs
+ #red: receive from cs
+ if rxsnd == 'r':
+ if where == 'G0':
+ color = 'blue'
+ else:
+ color = 'red'
+ else:
+ if where == 'G0':
+ color = 'green'
+ else:
+ color = 'yellow'
+
+ color_dict = {
+ 'red':17,
+ 'yellow':18,
+ 'green':22,
+ 'blue':23
+ }
+
+ pin = color_dict[color]
+ GPIO.setup(pin, GPIO.OUT)
+ GPIO.output(pin, state)
diff --git a/source/Removed_Satellite/Message_ID_Assigner.py b/source/Removed_Satellite/Message_ID_Assigner.py
new file mode 100644
index 0000000..e4a1e71
--- /dev/null
+++ b/source/Removed_Satellite/Message_ID_Assigner.py
@@ -0,0 +1,47 @@
+from threading import RLock
+
+class MessageIDAssigner:
+ """
+ A thread-safe UID assigner for messages in the format
+
+ UIDs are integers.
+
+ :
+
+ """
+ def __init__(self,start: int = 0):
+ """
+ The contructor
+ :param start: Starting UID value
+ """
+ self.nextID = start # The next ID to use when assigning messages
+ self.agent_id = None # The ID of the agent using the assigner
+ self.lock = RLock() # Lock for threadsafety
+
+ def assign_id(self):
+ """
+ Assign exactly one message a UID. Requires the agent ID to
+ be defined.
+
+ :return: The UID
+ """
+ if self.agent_id:
+ with self.lock:
+ self.nextID += 1
+ return self.agent_id + ":" + str(self.nextID - 1)
+ else:
+ self.nextID += 1
+ return str(self.nextID)
+
+ def assign_ids(self,num:int):
+ """
+ Assign multiple messages separate UIDs in one shot.
+ :param num: The number of UIDs to generate
+ :return: A set of UIDs
+ """
+ with self.lock:
+ return set(self.assign_id() for i in range(num))
+
+ def setAgentID(self,_id:str):
+
+ self.agent_id = _id
diff --git a/source/Removed_Satellite/Removed_Satellite.py b/source/Removed_Satellite/Removed_Satellite.py
new file mode 100644
index 0000000..3b9e8bf
--- /dev/null
+++ b/source/Removed_Satellite/Removed_Satellite.py
@@ -0,0 +1,849 @@
+from datetime import timedelta
+import socket
+from circinus_tools.scheduling.io_processing import SchedIOProcessor
+from circinus_tools import time_tools as tt
+from circinus_tools.metrics.metrics_calcs import MetricsCalcs
+from circinus_tools import io_tools
+from circinus_tools.activity_bespoke_handling import ActivityTimingHelper
+from circinus_sim.constellation_sim_tools.sim_agents import SimSatellite
+from circinus_sim.constellation_sim_tools.lp_wrapper import LocalPlannerWrapper
+from circinus_sim.constellation_sim_tools.Transmission_Simulator import Transmission_Simulator
+from sprint_tools.Constellation_STN import Constellation_STN
+from Removed_Satellite.Removed_Satellite_Server import RemovedSatelliteServer
+from Removed_Satellite.Removed_Satellite_Client import RemovedSatelliteClient
+from Removed_Satellite.BlockingDict import BlockingDict
+from sprint_tools.Sprint_Types import AgentType
+from threading import Thread, Condition
+import pyutilib
+
+import time
+from Removed_Satellite.Message_ID_Assigner import MessageIDAssigner
+import multiprocessing as mp
+
+pyutilib.subprocess.GlobalData.DEFINE_SIGNAL_HANDLERS_DEFAULT = False
+
+class RemovedSatellite:
+ """
+ Class that simulates a satellite on a separate device, acting as a wrapper class to the original
+ "satellite" class in sim_agents
+
+ Communicates with satellites and ground simulation (on separate devices) using server/client protocol
+
+ """
+
+ GROUND_SERVER_PORT = 54201
+ VERBOSE = True
+
+ def __init__(self, general_config:dict):
+ """
+ initializes removed satellite
+ """
+ self.general_config = general_config
+ self.params = None
+ self.sat_params,self.orbit_params = None, None
+ self.sim_run_params = None
+ self.sim_start_dt, self.sim_end_dt = None,None
+
+ self.satellite = None # The "original" satellite object
+ self.sim_lock = Condition() # Lock on sim. Only used for sim-related messages
+ self.gs_id_order = None # List of gs ids, ordered by their gs index
+ self.sat_id_order = None # List of sat ids, ordered by sat index
+ self.num_sats = -1 # The number of satellites in the simulation. -1 is invalid.
+ self.sat_index = -1 # Satellite index. -1 is invalid.
+ self.dt = -1
+ self.io_proc = None
+
+ ##### Server/Client Params ######
+ self.active = False
+ self.got_params = False
+ self.got_ips = False
+ self.got_injected_obs = False
+ self.got_updated_windows = False
+ self.msgsToSend = {} # Queue of messages to pass to client
+ self.conn_to_server,self.server_conn = self.get_pipe_connections() # Server that "listens" for
+ # incoming messages
+ self.responses = BlockingDict()
+ self.server_port = None
+ self.server_proc = None
+
+ self.client = self.set_up_client() # Client that sends messages
+ # given target
+ self.sat_id = None
+ self.ecl_winds = None
+
+ ## Message params ###
+ self.idsToMsgType = {}
+ self.msgTypeToIDs = {"STATES":set(),"PLAN":set(),"BDT":set(),
+ "PLAN_LP":set(),"PLAN_PROP":set(),
+ "SAT_STATS":set()}
+
+ self.idsToTargets = {}
+ self.msgIDAssigner = MessageIDAssigner()
+
+
+ ## Flag to start and end the sim
+ self.START_REQ = False
+ self.POST_RUN_REQ = False
+ self.END_SIM = False
+
+ # Initialize and start controller
+ t = Thread(target = self.run_controller)
+ t.setDaemon(True)
+ t.start()
+
+ # Join simulation
+ self.join_sim()
+
+
+ with self.sim_lock:
+ self.sim_lock.wait_for(lambda: self.got_params)
+
+ self.numPeers = self.num_sats
+ self.numFinishedActs = 0 # Number of peers that finished sending messages in executive acts
+ self.numFinishedProps = 0 # Number of peers finished propagating their plans
+ self.numReadyForNextTimeStep = 0 # Number of peers ready for next time step
+
+ # metrics calculation
+ self.mc = MetricsCalcs(self.get_metrics_params())
+
+ # Also create local planner wrapper. it will store inputs that are common across all satellites. T
+ # he instance parameters passed to it should be satellite-specific
+ self.lp_wrapper = LocalPlannerWrapper(self.params,self.mc)
+
+ self.schedule_disruptions = self.params['sim_case_config']['sim_run_perturbations']['schedule_disruptions']
+
+ ##### End Sim State Truth #####
+
+ self.Transmission_Simulator = Transmission_Simulator(False,False, self, removed_sat = True)
+
+ with self.sim_lock:
+ self.sim_lock.wait_for(lambda: self.active)
+
+ with self.sim_lock:
+ self.sim_lock.wait_for(lambda: self.START_REQ)
+ self.run()
+
+ with self.sim_lock:
+ self.sim_lock.wait_for(lambda:self.END_SIM)
+
+ def get_pipe_connections(self):
+ """
+ Creates pipe connection between Removed Satellite and its server.
+ This connection spans multiple processes.
+ """
+ parent,child = mp.Pipe()
+ return parent,child
+
+ @staticmethod
+ def run_server(conn:mp.connection.Connection):
+ """
+ Creates and runs a server
+ :param conn: The pipe connection
+ """
+ p = mp.Process(target = RemovedSatelliteServer,args=(conn,),daemon=True)
+ p.start()
+ return p
+
+ def set_up_client(self):
+ """
+ Creates client with no knowledge of other server addresses besides ground server port and address
+ """
+ groundAddress = self.general_config['rem_gp_server_address']
+ return RemovedSatelliteClient(groundAddress,self.GROUND_SERVER_PORT,self.msgsToSend)
+
+ def set_up_act_timing(self):
+
+ self.act_timing_helper = ActivityTimingHelper(self.sat_params['activity_params'],self.orbit_params['sat_ids_by_orbit_name'],self.sat_params['sat_id_order'],None)
+
+ def set_up_stn(self):
+ ##### Simulation State Truth #####
+ stn_params = {
+ 'element_id_by_index': {
+ 'sat_id_by_indx' : self.sat_id_order,
+ 'gs_id_by_indx' : self.gs_id_order,
+ 'trget_id_by_indx' : self.params['orbit_prop_params']['obs_params']['obs_target_id_order']
+ },
+ 'accesses_data' : self.params['orbit_prop_params']['orbit_prop_data']['accesses_data']
+ }
+ self.access_truth_stn = Constellation_STN(stn_params)
+
+ def set_up_params(self,payload:dict):
+ """
+ Initializes params, satellite, etc. based on payload
+ :param payload: Input params
+ """
+ self.params = payload["sim_params"]
+ self.sat_params = self.params['orbit_prop_params']['sat_params']
+ self.orbit_params = self.params['orbit_prop_params']['orbit_params']
+ self.sim_run_params = self.params['const_sim_inst_params']['sim_run_params']
+
+ # ================== Setting up Satellite Parameters =====================
+ self.sim_start_dt, self.sim_end_dt = payload["sim_start_dt"], payload["sim_end_dt"]
+ id_sim_params = payload["sat_id_sim_satellite_params"]
+ id_scenario_params = payload["sat_id_scenario_params"]
+
+ self.sat_id = payload["sat_id"]
+ self.msgIDAssigner.setAgentID(self.sat_id)
+ self.client.set_id(self.sat_id)
+ self.sat_index = payload["sat_index"]
+
+ # ------------------- Get ID order of satellites and gs --------------------
+ self.gs_id_order = self.params['orbit_prop_params']['gs_params']['gs_id_order']
+ self.sat_id_order = self.params['orbit_prop_params']['sat_params']['sat_id_order']
+ self.gs_id_ignore_list = self.params['gp_general_params']['other_params']['gs_id_ignore_list']
+
+ self.num_sats = self.params['orbit_prop_params']['sat_params']['num_sats']
+ self.dt = timedelta(seconds=self.sim_run_params['sim_tick_s'])
+
+ # ================== Set up Satellite Dependencies ==================
+ self.set_up_act_timing()
+ self.set_up_stn()
+ self.io_proc = SchedIOProcessor(self.params)
+
+ # ================== Set up Plan DB Dependencies ==================
+ window_uid = -9999
+ # ecl_winds is an array with index for each sat_indx
+ ecl_winds, window_uid = self.io_proc.import_eclipse_winds(window_uid)
+ if window_uid >= 0: raise RuntimeWarning('Saw positive window ID for ecl window hack')
+
+ ecl_winds_by_sat_id = {self.sat_id_order[sat_indx]: ecl_winds[sat_indx] for sat_indx in range(self.num_sats)}
+ self.ecl_winds = ecl_winds
+
+ gsn_id = 'gsn'
+
+ plan_db_inputs = {
+ "sat_id_order": self.sat_id_order,
+ "gs_id_order": self.gs_id_order,
+ "other_agent_ids": [gsn_id],
+ "initial_state_by_sat_id": self.sat_params['initial_state_by_sat_id'],
+ "ecl_winds_by_sat_id": ecl_winds_by_sat_id,
+ "power_params_by_sat_id": self.sat_params['power_params_by_sat_id'],
+ "resource_delta_t_s": self.sim_run_params['sim_tick_s']
+ }
+
+ # ======================== Create Satellite ==========================
+ self.satellite = SimSatellite(
+ self.sat_id,
+ self.sat_index,
+ self.sim_start_dt,
+ self.sim_end_dt,
+ id_scenario_params,
+ id_sim_params,
+ self.act_timing_helper,
+ self,
+ self.access_truth_stn,
+ removed=True)
+
+ self.satellite.get_plan_db().initialize(plan_db_inputs)
+
+ self.got_params = True
+ self.sim_lock.notifyAll()
+
+ def join_sim(self):
+ """
+ Joins simulation by connecting to Ground server and starting up server
+ """
+
+ # Start up server
+ p = RemovedSatellite.run_server(self.server_conn)
+ self.server_proc = p
+
+ # Wait until server has finished setting up listening socket
+ with self.sim_lock:
+ self.sim_lock.wait_for(lambda: self.server_port!=None)
+
+
+ # Connect client to ground server
+ self.client.start_ground_connection()
+
+ joinMessage = {'req_type': 'JOIN',
+ 'payload': {'address':socket.gethostbyname(socket.gethostname()),
+ 'port': self.server_port},
+ 'waitForReply':True
+ }
+
+ self.send_message(joinMessage,"ground")
+
+
+ def run_controller(self):
+ """
+ Automatically handles message processing whenever it is passed a message
+
+ """
+ while True:
+ newMsg = self.conn_to_server.recv()
+ if type(newMsg) == dict:
+ # Data or sim message
+ self.handle_message(newMsg)
+
+ elif 'ACK' == newMsg[0]:
+ # Check if message is response
+ self.handleResponse(newMsg)
+
+ elif 'SERVER_PORT' == newMsg[0]:
+ with self.sim_lock:
+ self.server_port = newMsg[1]
+ self.sim_lock.notify_all()
+
+ elif 'ERROR' in newMsg[0]:
+ # Error--raise it
+ print(newMsg[1])
+
+
+ def run(self):
+ """
+ Runs removed satellite simulation
+ """
+ numLP = 0
+
+ self.current_time = self.sim_start_dt
+
+ print("{}\n------- Starting Simulation ------\n".format(time.time()))
+
+ while self.current_time < self.sim_end_dt:
+ # ======================================================================
+ # ====================+=== Activity Execution ==========================
+ # ======================================================================
+
+ self.satellite.execution_step(self.current_time)
+
+
+ finishedSendingActsMsg = {'req_type': 'ACTS_DONE', 'payload': None,
+ 'sender': self.sat_id,"waitForReply":True}
+
+ self.send_message(finishedSendingActsMsg,broadcast = True)
+
+ with self.sim_lock:
+ self.sim_lock.wait_for(lambda: self.numFinishedActs == self.numPeers)
+ self.numFinishedActs = 0
+
+ # ======================================================================
+ # ============================ Update state ============================
+ # ======================================================================
+
+ with self.satellite.lock:
+ self.satellite.state_update_step(self.current_time, self.lp_wrapper)
+
+ # =====================================================================================
+ # ========= Share local satellite STATES info if appropriate using crosslinks =========
+ # =====================================================================================
+
+ with self.satellite.last_broadcast_lock:
+ secondsSinceLastBroadcast = (self.current_time - self.satellite.last_broadcast).seconds
+
+ if self.satellite.prop_reg and secondsSinceLastBroadcast > self.satellite.prop_cadence:
+ self.satellite.get_exec().state_x_prop(tt.datetime2mjd(self.current_time))
+
+ # ===============================================================================
+ # ============================ Planning info sharing ============================
+ # ===============================================================================
+
+ # Sat shares PLAN message with other sats if appropriate
+ # (flag under reference_model_definitions/sat_regs/zhou_original_sat.json
+ # ["sat_model_definition"]["sim_satellite_params"]["crosslink_new_plans_only_during_BDT"])
+ # if false, then anytime there is a potential crosslink access, plans will propagate.
+ # if true, then plans will only propagate over existing scheduled bulk data tranfer (BDT) Xlnk activites
+ if not self.params['const_sim_inst_params']['sim_satellite_params']['crosslink_new_plans_only_during_BDT']:
+ with self.satellite.lock:
+ self.satellite.plan_prop(self,tt.datetime2mjd(self.current_time))
+
+ # If a sat LP has run, send that info to gs network so it can use in planning with GP
+ if self.satellite.lp_has_run():
+ numLP += 1
+ self.satellite.push_down_L_plan(self,tt.datetime2mjd(self.current_time))
+
+ # Let all know that satellite finished propagating LP plans, if any
+ finishedPropMsg = {'req_type': 'FINISHED_PROP','payload': None,'sender':self.sat_id,"waitForReply":True}
+ self.send_message(finishedPropMsg,broadcast = True)
+
+ with self.sim_lock:
+ # Wait for everyone to finish propagating plans before saying ready for next update
+ self.sim_lock.wait_for(lambda: self.numFinishedProps == self.numPeers)
+
+ self.numFinishedProps = 0 # reset
+ readyForNextTimeStepMsg = {'req_type': 'READY_FOR_TIME_UPDATE', 'payload': None,
+ 'sender': self.sat_id,"waitForReply":True}
+ self.send_message(readyForNextTimeStepMsg, broadcast=True)
+
+ # Wait until time has updated to move on
+ with self.sim_lock:
+ self.sim_lock.wait_for(lambda: self.numReadyForNextTimeStep == self.numPeers)
+ self.satellite.no_more_bdt = False
+ self.current_time += self.dt
+ self.numReadyForNextTimeStep = 0
+
+
+ print("\n==============================================================")
+ print("============= CURRENT TIME : {} =============".format(self.current_time))
+ print("==============================================================\n")
+
+
+ print("Number of times Local Planner ran:",numLP)
+
+ statsMsg = {'req_type': 'SAT_STATS',
+ 'payload': {'plan_props_succeded': self.satellite.stats['plan_props_succeded'],
+ 'plan_props_attempted': self.satellite.stats['plan_props_attempted']},
+ 'dest': 'ground',
+ 'sender': self.sat_id,
+ 'waitForReply': True}
+
+ self.send_message(statsMsg,target_id = 'ground')
+
+ with self.sim_lock:
+ self.sim_lock.wait_for(lambda: self.POST_RUN_REQ)
+
+
+ def getAllSatIDs(self):
+ """
+ Gets all satellite ids, ordered by index
+ """
+ return self.sat_id_order
+
+ def getAllGSIDs(self):
+ """
+ Gets all gs ids, ordered by index
+ """
+ return self.gs_id_order
+
+ def getAgentType(self,id:str):
+ """
+ Gets the AgentType of the object with id , based on the enum AgentType
+ @param id Input id
+ @return The AgentType enum value
+ """
+
+ if id in self.gs_id_order: return AgentType.GS
+ elif id in self.sat_id_order: return AgentType.SAT
+ return AgentType.GSNET
+
+ def getReferenceByID(self,id:str):
+ """
+ Gets the reference to a satellite by id only if the id matches this
+ :param id The input id string
+ :return self.satellite if id matches self.id, else None
+ """
+ if id == self.sat_id: return self.satellite
+
+ return None
+
+ def send_message(self,message:dict,target_id:str = None, broadcast:bool = False):
+ """
+ Sends message to satellite using pickle serialization
+ Must have either sat_id defined or broadcast = True
+
+ :param message: The dictionary message to send
+ :param target_id: The ID of the target
+ :param broadcast: True if sending to ALL satellites and ground (not gs specific), False otherwise.
+ :return: True if successful, False if not. If additional data, instead sends the additional data (only
+ applicable for NON broadcast transmissions)
+
+ Message sent successfully if entire message sent and an acknowledgement message received, if we are
+ waiting for a reply
+ """
+ assert target_id or broadcast, "sat_id must be defined or broadcast must be true"
+ if 'ACK' in message: isACK = True
+ else: isACK = False
+
+ ids = set()
+
+ if target_id:
+ message['dest'] = target_id
+ if not isACK:
+ msgId = self.msgIDAssigner.assign_id()
+ ids.add(msgId)
+ message['id'] = msgId
+
+ else:
+ msgId = message['id']
+
+ if not isACK:
+ if message['req_type'] == 'BDT':
+ self.satellite.bdt_ids_to_msg_id[msgId] = message['payload']['window_id']
+
+ if self.gs_id_order:
+ if target_id in self.gs_id_order:
+ target_id = 'ground'
+
+ self.msgsToSend[target_id].put(message)
+
+ elif broadcast:
+ assert not isACK
+ # Broadcasts message cannot be ACKs
+ idsToUse = self.msgIDAssigner.assign_ids(self.numPeers)
+ ids.update(idsToUse)
+
+ # Place messages on queue to send
+ for peer in self.msgsToSend:
+ peerMessage = message.copy()
+ peerMessage['dest'] = peer
+ peerMessage['id'] = idsToUse.pop()
+ self.msgsToSend[peer].put(peerMessage)
+
+
+
+ waitForReply = message['waitForReply']
+
+ if waitForReply:
+ # Check all recipients for broadcast
+ if broadcast:
+ assert not isACK
+ success = True
+ for _id in ids:
+ success = success and self.getResponse(_id)
+ return success
+ else:
+ if not isACK:
+ msgType = message['req_type']
+ else:
+ msgType = 'ACK'
+
+ if not isACK: return self.getResponse(msgId)
+ else:
+ # Record message type and ID to process responses later
+ # Only for non ACK messages
+ if not isACK:
+ msgType = message['req_type']
+ if msgType == 'PLAN':
+ if message['handle_lp']:
+ msgType = 'PLAN_LP'
+ elif message['plan_prop']:
+ msgType = 'PLAN_PROP'
+
+
+ for _id in ids:
+ # Record which ids were used, and for which message types
+ self.idsToMsgType[_id] = msgType
+ self.idsToTargets[_id] = target_id
+
+ self.msgTypeToIDs[msgType].update(ids)
+
+
+ def sendResponse(self,msgID:str,target:str,ack:bool,txWaitForReply:bool,response:dict = None):
+ """
+ Sends response for a message.
+
+ :param msgID: The ID of the sent message
+ :param target: The ID of the sending agent
+ :param ack: True if acknowledged, false otherwise
+ :param txWaitForReply: True if tx is actively waiting for reply
+ :param response: (If any) additional response data
+ """
+ response = {"ACK": ack, "payload": response,"id": msgID,"waitForReply":False,'txWaitForReply':txWaitForReply}
+
+ self.send_message(response,target_id=target)
+
+ def getResponse(self, msgID : str):
+ """
+ Gets the response for a message
+ :param msgID: The message ID
+ :return: The response. If no other data included, then True if acknowledged and False otherwise.
+ Else, the response data
+ """
+
+ return self.responses.get(msgID)
+
+ def handleResponse(self,response:tuple):
+ """
+ Handles delayed received response for PLAN, STATES, AND BDT
+
+ For PLAN:
+ - Updates the satellite's states, specifically successful PLAN sends
+ For STATES:
+ - Checks which applicable satellites have not ACKED (or have ACKED)
+ - Updates the last time of satellite broadcast
+ For BDT:
+ - Updates ACKed BDT statements, essential for SRP
+
+ :param response: The response, structured as: ("ACK",_id, payload or success)
+
+ """
+
+ # Remove message ID from nonACKed IDs
+ msgId = response[1]
+ waitForReply = response[3]
+
+ if waitForReply:
+ self.responses.put(msgId,response[2])
+ return
+
+ msgType = self.idsToMsgType.pop(msgId)
+ original_target = self.idsToTargets.pop(msgId)
+ idsToTrack = self.msgTypeToIDs[msgType]
+ idsToTrack.remove(msgId)
+
+ if 'PLAN' in msgType:
+
+ if msgType == 'PLAN_PROP' and len(idsToTrack) == 0:
+ # A PLAN message that is part of a propagation
+ self.satellite.arbiter.sats_propped_to[original_target].append(self.current_time)
+ self.satellite.stats['plan_props_succeded'] += 1
+
+ elif msgType == 'PLAN_LP':
+ # A PLAN message that is part of Local Planner (LP) planning propagation
+ self.satellite.arbiter.set_external_share_plans_updated(False)
+
+ elif type(response[2]) == dict:
+ # Check if there is a payload
+ self.satellite.receive_message(response[2])
+
+ elif msgType == 'STATES':
+ if len(idsToTrack) == 0: # All applicable satellites have ACKed the state message
+ with self.satellite.last_broadcast_lock:
+ self.satellite.last_broacast = self.current_time
+
+ elif msgType == 'BDT':
+ import time
+ dv_txed, tx_success = response[2]
+
+ _id = self.satellite.bdt_ids_to_msg_id.pop(msgId)
+
+ if not tx_success:
+ bdt_ack = (_id,None)
+ else:
+ bdt_ack = (_id,dv_txed)
+
+ self.satellite.bdt_ids_acked.put(bdt_ack)
+
+ def handle_message(self,message:dict):
+ """
+ Handles message based on message type, sending response after ingestion
+ Assumes message is valid
+
+ If sim is not active, then waits for initialization related messages:
+ Messages include:
+ - IP addresses of all other satellites/ground servers
+ - Injected observations
+ - Satellite window initialization
+
+ :param message The incoming message, deserialized
+ """
+ messageType = message["req_type"]
+ key = message['id']
+ target = message['sender']
+ waitForReply = message['waitForReply']
+
+ if not self.active: # Check if initialization messages
+ """
+ # We MUST receive the following before starting the simulation:
+ # - Initialization parameters, aka INIT_PARAMS. Must be received FIRST
+ # - IP addresses of all other satellite servers. Denoted as ALL_IPS
+ # - Injected observations (even if there are none). Denoted as INJECT_OBS
+ # - Windows initialization, denoted as SAT_WINDOWS_INIT
+ # Once we receive all of these, we are ACTIVE and can start the simulation
+ # whenever the ground simulation sends the START message
+
+ """
+
+ with self.sim_lock:
+ if not self.got_params:
+ if messageType != "INIT_PARAMS":
+ print("WARNING: Unknown message type.")
+ self.sendResponse(key, target,False,waitForReply)
+ self.set_up_params(message["payload"])
+
+ self.sendResponse(key, target,True,waitForReply)
+ self.got_params = True
+
+ else:
+ if messageType == "ALL_IPS": # Getting IP addresses
+ self.client.set_gs_ids(self.gs_id_order)
+ self.client.set_ips(message["payload"])
+ self.got_ips = True
+ self.sendResponse(key, target,True,waitForReply)
+
+ elif messageType == "INJECT_OBS": # Getting injected observations
+ injected_obs = message["payload"]
+ with self.satellite.lock:
+ self.satellite.inject_obs(injected_obs)
+ self.got_injected_obs = True
+ self.sendResponse(key,target, True,waitForReply)
+
+ elif messageType == "SAT_WINDOWS_INIT": # Updating observation windows
+ with self.satellite.lock:
+ self.satellite.get_plan_db().sat_windows_dict = message["payload"].copy()
+ self.got_updated_windows = True
+ self.sendResponse(key, target,True,waitForReply)
+
+ else:
+ print("WARNING: Unknown message type.")
+
+
+ self.active = self.got_ips and self.got_injected_obs and self.got_updated_windows
+ if self.active: self.sim_lock.notifyAll()
+ else:
+ if messageType in {"PLAN","BDT","STATES"}:
+
+ with self.satellite.lock:
+
+ # First, receive the message
+ response = self.satellite.receive_message(message)
+
+ if messageType == 'PLAN':
+ if message['exchange']:
+ """
+ If it was a PLAN message that requires a PLAN exchange message,
+ generate PLAN message and add as payload to response
+ """
+ new_time, dest = message['payload']['new_time'], message['sender']
+ info_option = message['payload']['info_option']
+ if dest in self.sat_id_order:
+ self.satellite.get_plan_db().update_self_ttc_time(new_time)
+ response = self.satellite.make_planning_message(dest,info_option)
+
+ self.sendResponse(key,target,True,waitForReply,response)
+
+ elif messageType == 'NEXT_WINDOW_UPDATE':
+ """
+ Update window based on ground update
+ """
+ with self.satellite.lock:
+ self.updateNextWindow(message)
+ self.sendResponse(key, target,True,waitForReply)
+ self.satellite.lock.release()
+
+ elif messageType == 'ACTS_DONE':
+ """
+ Count up the number of peers (ground, satellites) that have finished executing their actions
+ for this time step. Once all others are finished executing actions, notify any waiting threads
+ """
+ with self.sim_lock:
+ self.numFinishedActs += 1
+ self.sendResponse(key,target,True,waitForReply)
+ if self.numFinishedActs == self.numPeers: self.sim_lock.notifyAll()
+
+ elif messageType == 'READY_FOR_TIME_UPDATE':
+ """
+ Count up the number of peers (ground,satellites) that are ready for the next time step.
+ Once all others are ready, notify any waiting threads
+ """
+ with self.sim_lock:
+ self.numReadyForNextTimeStep += 1
+ self.sendResponse(key, target,True,waitForReply)
+ if self.numReadyForNextTimeStep == self.numPeers: self.sim_lock.notifyAll()
+
+ elif messageType == 'FINISHED_PROP':
+ """
+ Count up the number of peers (ground,satellites) that have finish propagating information.
+ Once all others are ready, notify any waiting threads
+ """
+ with self.sim_lock:
+ self.numFinishedProps += 1
+ self.sendResponse(key, target,True,waitForReply)
+ if self.numFinishedProps == self.numPeers:
+ self.sim_lock.notifyAll()
+
+ elif messageType == "XLINK_FAILURE":
+ """
+ Record if a neighboring satellite experienced a crosslink failure with this satellite.
+ """
+ with self.satellite.lock:
+ payload = message['payload']
+ self.satellite.add_xlink_failure_info(payload)
+ self.sendResponse(key,target,True,waitForReply,payload)
+
+ elif messageType == "START": # Start sim
+ """
+ Wait until ground sends the START message to start the simulation.
+ Once it does, send a response back immediately upon receipt and notify all threads.
+ """
+ self.sendResponse(key, target,True,waitForReply)
+ with self.sim_lock:
+ self.START_REQ = True
+ self.sim_lock.notify_all()
+
+ elif messageType == "POST_RUN_REQ":
+ """
+ Ground requests post run information from satellite after entire simulation.
+ Sends all relevant information to ground and notifies all threads.
+ """
+ with self.sim_lock and self.satellite.lock:
+ self.sendResponse(key, target,True,waitForReply)
+ self.post_run()
+ with self.sim_lock:
+ self.POST_RUN_REQ = True
+ self.sim_lock.notify_all()
+
+ else:
+ print("WARNING: Unknown message type.")
+
+ def updateNextWindow(self,message:dict):
+ """
+ Updates this' next window uid based on message
+
+ :param message The incoming message
+ """
+ with self.satellite.lock:
+ new_next_window_uid = message['payload']
+ self.satellite.arbiter.plan_db.sat_windows_dict['next_window_uid'] = new_next_window_uid
+
+
+ def post_run(self):
+ """
+ Generates post run information for satellite and sends information to ground
+ Flags satellite as having ended simulation and notifies all other threads
+ """
+ postRunToSend = {'req_type':'POST_RUN','sender': self.sat_id, 'payload':{},'dest':'ground','waitForReply':True}
+ payload = postRunToSend['payload']
+ self.satellite.state_recorder.log_event(self.sim_end_dt,'Removed_Satellite.py','final_dv',[str(dc) for dc in self.satellite.state_sim.get_curr_data_conts()])
+
+ payload['event_logs'] = self.satellite.state_recorder.get_events()
+ payload['acts_exe'] = self.satellite.get_act_hist()
+ payload['failures_exec'] = self.satellite.state_recorder.failed_dict['exec']
+ payload['failures_nonexec'] = self.satellite.state_recorder.failed_dict['non-exec']
+ payload['energy_usage'] = self.satellite.get_ES_hist()
+ payload['data_usage'] = self.satellite.get_DS_hist()
+
+ payload['cmd_update_hist'] = self.satellite.get_merged_cmd_update_hist(self.gs_id_order,self.gs_id_ignore_list)
+ payload['end_time'] = self.satellite.sim_end_dt
+
+ self.send_message(postRunToSend,target_id='ground')
+ with self.sim_lock:
+ self.END_SIM = True
+ self.sim_lock.notify_all()
+
+ def get_metrics_params(self):
+ metrics_params = {}
+
+ scenario_params = self.params['orbit_prop_params']['scenario_params']
+ sat_params = self.params['orbit_prop_params']['sat_params']
+ obs_params = self.params['orbit_prop_params']['obs_params']
+ sim_metrics_params = self.params['const_sim_inst_params']['sim_metrics_params']
+ as_params = self.params['gp_general_params']['activity_scheduling_params']
+
+ # these are used for AoI calculation
+ metrics_params['met_obs_start_dt'] = self.params['const_sim_inst_params']['sim_run_params']['start_utc_dt']
+ metrics_params['met_obs_end_dt'] = self.params['const_sim_inst_params']['sim_run_params']['end_utc_dt']
+
+ metrics_params['num_sats']=sat_params['num_sats']
+ metrics_params['num_targ'] = obs_params['num_targets']
+ metrics_params['all_targ_IDs'] = [targ['id'] for targ in obs_params['targets']]
+ metrics_params['min_obs_dv_dlnk_req'] = as_params['min_obs_dv_dlnk_req_Mb']
+
+ metrics_params['latency_calculation_params'] = sim_metrics_params['latency_calculation']
+ metrics_params['targ_id_ignore_list'] = sim_metrics_params['targ_id_ignore_list']
+ metrics_params['aoi_units'] = sim_metrics_params['aoi_units']
+
+ metrics_params['sats_emin_Wh'] = []
+ metrics_params['sats_emax_Wh'] = []
+ for p_params in sat_params['power_params_by_sat_id'].values():
+ sat_edot_by_mode,sat_batt_storage,power_units,charge_eff,discharge_eff = \
+ io_tools.parse_power_consumption_params(p_params)
+
+ metrics_params['sats_emin_Wh'].append(sat_batt_storage['e_min'])
+ metrics_params['sats_emax_Wh'].append(sat_batt_storage['e_max'])
+
+ metrics_params['sats_dmin_Gb'] = []
+ metrics_params['sats_dmax_Gb'] = []
+ for d_params in sat_params['data_storage_params_by_sat_id'].values():
+ d_min = d_params['d_min']
+ d_max = d_params['d_max']
+
+ metrics_params['sats_dmin_Gb'].append(d_min)
+ metrics_params['sats_dmax_Gb'].append(d_max)
+
+ metrics_params['timestep_s'] = scenario_params['timestep_s']
+
+ return metrics_params
+
+
+
diff --git a/source/Removed_Satellite/Removed_Satellite_Client.py b/source/Removed_Satellite/Removed_Satellite_Client.py
new file mode 100644
index 0000000..c14dbc4
--- /dev/null
+++ b/source/Removed_Satellite/Removed_Satellite_Client.py
@@ -0,0 +1,282 @@
+import logging
+import sys, os, socket
+import pickle
+import multiprocessing as mp
+from queue import Empty
+import time
+DEFAULT_BUFF_SIZE = 32768
+START_MARKER = "size:".encode('ascii')
+
+import struct
+def encodeStrLen(msg):
+ return struct.pack(">I", len(msg))
+
+def decodeLen(buff, etype=">I"):
+ return struct.unpack(etype,buff)[0]
+
+
+"""
+
+The "sender" component of the removed satellite that connects and sends messages to recipient's server based on
+recipient id.
+
+It is structured such that it has one Process for each peer (other satellites + ground). Each Process
+acts as a consumer for a message queue, sending messages as they come in based on the target ID and given
+connection. Each Process lives for the entirety of the simulation.
+
+"""
+class RemovedSatelliteClient:
+ def __init__(self,groundAddress,groundPort,msgsToSend:dict,path:str=None):
+
+ """
+ Creates a new RemovedSatelliteClient with no knowledge
+ of any addresses besides ground
+
+ :param groundAddress: ip address of ground sim server
+ :param groundPort port used by ground sim server
+ :param msgsToSend empty dictionary for queues
+ :param path path for logging, sim_TestClient.log by default
+
+ """
+
+ if path:
+ if (os.path.isdir(path+'/logs/')):
+ self.logPath = path+'/logs/sim_TestClient.log'
+ else:
+ self.logPath = 'sim_TestClient.log'
+
+ self.sat_id = None
+ self.addressesByIDs = {"ground":(groundAddress,groundPort)} # maps id's to ip addresses
+ self.ordered_gs_ids = None # ordered list of gs ids
+
+ self.msgsToSend = msgsToSend
+ self.msgsToSend['ground'] = mp.Queue()
+ self.connections = {'ground': self.make_connection((groundAddress,groundPort))}
+ self.processes = set()
+
+ self.__print_live("Logging at: {}".format(self.logPath))
+ logging.basicConfig(filename=self.logPath, level=logging.DEBUG)
+ logging.warn("Log started at: {}".format(self.logPath))
+
+ def __print_live(self,str_to_print):
+ print(str_to_print)
+ sys.stdout.flush()
+
+ ###################################################################################################################
+ # CONNECTION-BASED FUNCTIONS #
+ ###################################################################################################################
+
+ def make_connection(self,address:tuple):
+ """
+ Creates socket connection to given address
+ :param address: A tuple in the form (,)
+ :return: The socket connection
+ """
+ RETRY_NUM = 100
+ num_retry = 0
+
+ while num_retry < RETRY_NUM:
+ try:
+ connection = socket.socket(socket.AF_INET,socket.SOCK_STREAM)
+ connection.connect(address)
+ return connection
+ except socket.error:
+ print(f"Failed to connect to {address}. Retrying in 1 second.")
+ time.sleep(1)
+ num_retry += 1
+
+ raise Exception(f"Unable to connect to {address} after {RETRY_NUM} tries every second.")
+
+ def set_ips(self,all_ips:dict):
+ """
+ Sets addresses for all peers' servers, and establishes connections
+ with all satellites and ground sim
+
+ :param all_ips: All ips to register as a dictionary
+ """
+ for k in all_ips:
+ self.addressesByIDs[k] = all_ips[k]
+
+ del self.addressesByIDs[self.sat_id]
+
+ # Establish connections
+ self.set_connections(self.addressesByIDs)
+
+ # Set up queues for connections
+ self.set_queues(list(self.addressesByIDs.keys()))
+
+ # Start one daemon thread for each connection
+ for targ_id in self.addressesByIDs:
+ if targ_id != 'ground':
+ p = mp.Process(target = self.run,args=(targ_id,self.connections[targ_id],
+ self.msgsToSend[targ_id]),daemon=True)
+ self.processes.add(p)
+ p.start()
+
+
+ def set_connections(self,all_ips:dict):
+ """
+ Sets up all connections to all other peers
+ :param all_ips : The dictionary mapping agent IDs to IP addresses
+
+ """
+ for s in all_ips:
+ if s != 'ground':
+ self.connections[s] = self.make_connection(all_ips[s])
+
+
+ def start_ground_connection(self):
+ """
+ Starts connection and running daemon thread with ground
+ """
+ import time
+ try:
+ p = mp.Process(target = self.run,args=('ground',self.connections['ground'],
+ self.msgsToSend['ground']),daemon=True)
+ self.processes.add(p)
+ p.start()
+ except:
+ print("\n\n\n {}PROBLEM WITH CLIENT FOR GROUND".format(time.time()))
+
+
+ ###################################################################################################################
+ # MESSAGE PASSING FUNCTIONS #
+ ###################################################################################################################
+ def set_queues(self,peers:list):
+ """
+ Updates message queues for each peer, where all ground stations are collectively referred to as ground
+
+ :param peers: The list of peer IDs, not including GS IDs
+ """
+ for p in peers:
+ if p != 'ground':
+ self.msgsToSend[p] = mp.Queue(0)
+
+ def haveMsgToSend(self,targ_id:str):
+ """
+ Returns whether there is message to send or not given target id
+ :param targ_id: The target ID
+ :return: True if there is a message to send, False otherwise
+ """
+ return self.msgsToSend[targ_id].qsize() > 0
+
+ def getMessage(self,targ_id:str):
+ """
+ Gets message off of given id's queue
+ :param targ_id: The id of the target
+ :return: A dictionary message, if any are in the queue
+ """
+ try:
+ msg = self.msgsToSend[targ_id].get()
+ return msg
+ except Empty:
+ return None
+
+ ###################################################################################################################
+ # MAIN CLIENT FUNCTIONS #
+ ###################################################################################################################
+
+ def run(self,targ_id:str,connection:socket.socket,msgsToSend:mp.Queue):
+ """
+ Main loop for maintaining connection and sending messages
+
+ :param targ_id: The ID of the target
+ :param connection: The corresponding socket
+ :param msgsToSend: The queue it will consume to send messages
+ """
+
+ try:
+ while(True):
+
+ msg = msgsToSend.get()
+ bytes = pickle.dumps(msg)
+ self.transmit(targ_id,connection,bytes)
+
+ except Exception as e:
+ print("PROBLEM WITH CLIENT")
+
+
+ def transmit(self,targ_id:str,client:socket.socket,bytes_tx:bytes):
+ """
+ Transmits the given bytes to the IP address of targ_id
+
+ Starts transmitting a packet header in the format:
+ START_MARKER --> length of message in bytes
+
+ Then sends the message in chunks
+
+ :param targ_id: The id of the target
+ :param client: The connection
+ :param bytes_tx: The bytes to send
+
+ """
+ START_MARKER = "size:".encode('ascii')
+ MAX_TRIES = 3
+
+ for i in range(MAX_TRIES):
+ try:
+
+ toSend = len(bytes_tx)
+
+ # Send header
+ client.send(START_MARKER + encodeStrLen(bytes_tx))
+ sentTot = 0
+
+ # Send rest of message
+ while sentTot < toSend:
+ sentTot += client.send(bytes_tx[sentTot:])
+
+ try:
+ parseable = self.receive_response(client)
+ if parseable:
+ return
+
+ except:
+ print(" Error Receiving: {}:{}".format(targ_id, self.addressesByIDs[targ_id]))
+ logging.warn("Error Receiving: {}:{}".format(targ_id, self.addressesByIDs[targ_id]))
+
+ except:
+ print ("Error Transmitting to: {}".format(targ_id))
+ logging.warn("Error Transmitting to: {}".format(targ_id))
+
+ print(" EXCEEDED MAX NUMBER OF TRIES ({}) TO TRANSMIT MESSAGE".format(MAX_TRIES))
+
+
+ def receive_response(self,client:socket.socket):
+ """
+ Receives PARSEABLE response from server
+ :param client: The connection
+ :return: True if successful transmission, False otherwise
+ """
+
+ rcv_buff = b''
+ chunk = client.recv(DEFAULT_BUFF_SIZE)
+ len_start = chunk.find(START_MARKER) + len(START_MARKER)
+ expectedFollowupLen = decodeLen(chunk[len_start:len_start + 4])
+ rcv_buff += chunk[len_start + 4:]
+ numRecv = len(rcv_buff)
+
+ while (numRecv < expectedFollowupLen and len(chunk) > 0): # and chunk != 'EOF'):
+
+ chunk = client.recv(DEFAULT_BUFF_SIZE)
+ rcv_buff += chunk
+ numRecv = len(rcv_buff)
+
+ rsp_msg = pickle.loads(rcv_buff)
+
+ return rsp_msg['PARSEABLE']
+
+ def set_gs_ids(self,gs_ids:list):
+ """
+ Sets this' gs ids to gs_ids.
+ Assumes gs_ids is ordered by index
+ :param list gs_ids: List of gs_ids
+ """
+ self.ordered_gs_ids = gs_ids
+
+ def set_id(self,id:str):
+ self.sat_id = id
+
+if __name__ == "__main__":
+ pass
+
diff --git a/source/Removed_Satellite/Removed_Satellite_Server.py b/source/Removed_Satellite/Removed_Satellite_Server.py
new file mode 100644
index 0000000..63b62ec
--- /dev/null
+++ b/source/Removed_Satellite/Removed_Satellite_Server.py
@@ -0,0 +1,313 @@
+import sys, socket
+from OpenSSL import SSL
+import pickle, json
+import select
+DEFAULT_BUFF_SIZE = 32768
+from sprint_tools.OEnum import PrintVerbosity
+import struct
+def encodeStrLen(msg, etype=">I"):
+ return struct.pack(etype, len(msg) )
+
+def decodeLen(buff, etype=">I"):
+ return struct.unpack(etype,buff)[0]
+
+"""
+A component of RemovedSatellite that receives incoming messages, passes these messages to the RemovedSatellite, and
+sends ACK (or NACK) messages to sender
+
+"""
+
+class RemovedSatelliteServer:
+
+ def __init__(self,connection):
+
+ self.connection = connection
+
+ self.server = None # Actual server object
+ self.selector = None # Selector used to receive messages
+ self.SHUTDOWN_REQ = False
+ self.clients = set()
+ self.inputs = []
+ self.addresses = set()
+
+ self.act_timing_helper = None
+ self.port = None
+ self.setup_server()
+ self.run()
+
+ ###################################################################################################################
+ # INITIALIZATION-BASED FUNCTIONS #
+ ###################################################################################################################
+
+ def setup_server(self):
+ """
+ Initializes server SSL connection, loading server parameters and
+ authenticating connection using certificates under central_global_planner/certs.
+
+ Directions to set up certificates: https://dst.lbl.gov/~boverhof/openssl_certs.html
+ - NOTE: Use 2048, not 1024, to make certificates
+ - NOTE: Use .cert instead of .pem, and .pkey instead of .key
+ - NOTE: Same CA.cert must be on all running devices for authentication
+ """
+ bind_ip = '' # Use host IP
+
+ self.server = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+ self.server.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
+ self.server.setblocking(0)
+ self.server.bind((bind_ip, 0)) # Dynamically assign server socket
+ self.server.listen() # Currently unlimited capacity
+ self.port = self.server.getsockname()[1]
+ self.connection.send(("SERVER_PORT",self.port))
+
+ def get_name(self):
+ return socket.gethostname()
+
+ def get_address(self):
+ return socket.gethostbyname(self.get_name())
+
+ ###################################################################################################################
+ # CONNECTION-BASED FUNCTIONS #
+ ###################################################################################################################
+
+ def run(self):
+ """
+ Runs the server as background thread to listen to a specific socket
+
+ Continuously listens to existing connections/sockets, and also registers
+ new connections as they come.
+
+ """
+ print(" Server setup on: {}".format(self.port))
+
+ try:
+ self.inputs = [self.server] # Sockets to read from
+ outputs = [] # Sockets to write to (none, in this case)
+
+ while not self.SHUTDOWN_REQ:
+ readable,writable,exceptional = select.select(self.inputs,outputs,self.inputs)
+
+ for s in readable:
+ if s is self.server:
+ # Readable server accepting new connection
+ connection,client_address = s.accept()
+ connection.setblocking(0)
+ self.inputs.append(connection)
+ else:
+ # Known connection is sending data
+ self.receive_message(s)
+
+ for s in exceptional:
+ # Stop listening to socket if there is an exception
+ self.inputs.remove(s)
+ s.close()
+
+ print("Ending server")
+
+ except Exception as e:
+ # print(" ERROR IN SERVER")
+ print("ERROR TYPE:", type(e))
+ print(e)
+
+ def __print_live(self,str_to_print):
+ print(str_to_print)
+ sys.stdout.flush()
+
+ def __dropClient(self,client_sock:socket.socket,errors=None):
+ if errors:
+ self.__print_live( 'Client %s left unexpectedly:' % (client_sock,) )
+ self.__print_live( errors )
+ else:
+ self.__print_live( 'Client %s left politely' % (client_sock,) )
+ self.inputs.remove(client_sock)
+ client_sock.shutdown(socket.SHUT_RDWR)
+ client_sock.close()
+
+
+ def parse_msg(self, msg:bytes):
+ """
+ Parses the given message and determine if the message is valid
+ :param msg: Incoming message
+ :return: Parsed message, serializer used
+ """
+ retVal = { "PARSE_ERROR" : "UNPARSED" } # in all cases should be changed
+
+ serializer = 'pickle'
+ retVal = {}
+ try:
+ try: # try to use pickle
+ retVal = pickle.loads(msg)
+
+ except Exception: # if pkl doesn't work, use json
+ print(" Unable to unpickle message")
+ try:
+ retVal = json.loads(msg)
+ serializer = 'json'
+ except:
+ print( " RX WARNING: Bad serialization format; neither JSON nor PICKLE worked.", PrintVerbosity.WARNINGS )
+
+ # check for required keys
+ if 'req_type' not in retVal.keys() and 'ACK' not in retVal.keys():
+ retVal = { "PARSE_ERROR" : "INV_STRUCT" , "MISSING_KEY" : "req_type or ACK" }
+
+ elif 'payload' not in retVal.keys():
+ retVal = { "PARSE_ERROR" : "INV_STRUCT" , "MISSING_KEY" : "payload" }
+
+ elif 'id' not in retVal.keys():
+ retVal = {'PARSE_ERROR' : 'INV_STRUCT', "MISSING_KEY": "id"}
+
+ # prevent special terms from being used
+ elif 'PARSE_ERROR' in retVal.keys():
+ retVal = { "PARSE_ERROR" : "INV_STRUCT" , "INV_KEY" : "PARSE_ERROR" }
+
+ except:
+ retVal = { "PARSE_ERROR" : "INV_JSON" }
+
+
+ if 'PARSE_ERROR' in retVal.keys():
+ print( " RX WARNING: {}".format(retVal))
+
+
+ return retVal, serializer
+
+
+ def receive_message(self, client_socket:socket.socket):
+ """
+ Receives message chunks given socket, putting message
+ into queue if valid
+
+ Sends a reply message:
+ NACK: message could not be parsed
+ ACK : True if message parseable and valid, False if unknown message type
+
+ :param client_socket: The input connection
+ """
+ client_socket.setblocking(True)
+
+ START_MARKER = "size:".encode('ascii')
+ try:
+ chunk = client_socket.recv(DEFAULT_BUFF_SIZE)
+
+ # Assert start of message is valid header
+ if chunk.find(START_MARKER) < 0 or len(chunk) < len(START_MARKER)+4: # bad start
+ print(" bad start chunk: ", chunk)
+
+ self.__dropClient(client_socket)
+ return
+
+
+ # Determine message length based on header
+ len_start = chunk.find(START_MARKER)+len(START_MARKER)
+ expectedFollowupLen = decodeLen( chunk[len_start:len_start+4] )
+
+ # Get any part of message that was sent with header, if any
+ rcv_buff = chunk[len_start+4:]
+ rcv_buff = rcv_buff if len(rcv_buff) <= expectedFollowupLen else rcv_buff[:expectedFollowupLen]
+ numRecv = len(rcv_buff)
+
+ # Continue to receive chunks until get all expected bytes
+ while numRecv < expectedFollowupLen and len(chunk) > 0 and chunk != 'EOF':
+
+ try:
+ chunk = client_socket.recv(DEFAULT_BUFF_SIZE)
+
+ rcv_buff += chunk if len(chunk) <= (expectedFollowupLen-numRecv) else chunk[:numRecv-expectedFollowupLen]
+ numRecv = len(rcv_buff)
+ except:
+ print(" CANT RECEIVE CLIENT CHUNK")
+
+
+ # Drop client if message is not required length
+ if numRecv != expectedFollowupLen:
+ print("Warning, expected vs actual Bytes rx'd: {} vs {}".format(expectedFollowupLen, numRecv))
+ self.__dropClient(client_socket)
+ return
+
+ except (SSL.WantReadError, SSL.WantWriteError, SSL.WantX509LookupError):
+ print(" SSL ERROR 1")
+ return
+ except SSL.ZeroReturnError:
+ self.__dropClient(client_socket)
+ print(" SSL ZERO RETURN ERROR")
+ return
+ except(SSL.Error):
+ self.__dropClient(client_socket)
+ print(" SSL GENERIC ERROR")
+ return
+ except Exception as e:
+ print(e)
+ return
+
+
+
+ # Parse message
+ message, serializer = self.parse_msg(rcv_buff)
+
+ if "PARSE_ERROR" in message.keys():
+ # If we have enough information to send a NACK, do so
+ try:
+ self.sendParseResponse(False,client_socket)
+ except:
+ print("FAILED TO SEND PARSEABLE FEEDBACK. Waiting for message again")
+
+ else: # Handle incoming message and send ACK
+
+ try:
+ self.sendParseResponse(True,client_socket)
+ except:
+ print("FAILED TO SEND PARSEABLE FEEDBACK. Waiting for message again.")
+ client_socket.setblocking(True)
+ return
+
+ possibleSimMessages = {'START','PLAN','BDT','STATES','UPDATE','NEXT_WINDOW_UPDATE',
+ 'ALL_IPS','INJECT_OBS','SAT_WINDOWS_INIT','INIT_PARAMS',
+ 'ACTS_DONE','FINISHED_PROP','READY_FOR_TIME_UPDATE','XLINK_FAILURE',
+ 'POST_RUN_REQ'}
+
+ if 'req_type' in message:
+ messageType = message['req_type']
+ if messageType in possibleSimMessages:
+ self.connection.send(message)
+ elif messageType == 'quit': exit()
+
+ elif 'ACK' in message:
+ # If message is ACK...
+
+ success = message['ACK']
+ _id = message['id']
+
+ payload = message['payload']
+ waitForReply = message['txWaitForReply']
+
+ if payload:
+ ## If there is a payload attached, send the payload
+ self.connection.send(("ACK",_id,payload,waitForReply))
+ else:
+ ## If there is no payload, send success boolean
+ self.connection.send(("ACK",_id,success,waitForReply))
+ else:
+ print("UNKNOWN MESSAGE TYPE")
+
+ client_socket.setblocking(False)
+
+
+ def sendParseResponse(self,parseable:bool,client:socket.socket):
+ """
+ Sends response to client for whether the message was parseable.
+
+ :param parseable: True if parseable, False if not
+ :param client: The connection
+ """
+ START_MARKER = "size:".encode('ascii')
+
+ response_bytes = pickle.dumps({'PARSEABLE':parseable})
+ numBytes = len(response_bytes)
+
+ client.send(START_MARKER + encodeStrLen(response_bytes))
+
+ sentTot = 0
+ while sentTot < numBytes:
+ sentTot += client.send(response_bytes[sentTot:])
+
+if __name__ == "__main__":
+ pass
+
diff --git a/source/central_global_planner/IP_Client.py b/source/central_global_planner/IP_Client.py
index bf63528..5939f20 100644
--- a/source/central_global_planner/IP_Client.py
+++ b/source/central_global_planner/IP_Client.py
@@ -1,4 +1,4 @@
-import logging
+import logging
import sys, os, socket
import pickle
from OpenSSL import SSL # pip install pyOpenSSL
@@ -56,10 +56,10 @@ def __setup_client(self): #, cur_path=""):
ctx = SSL.Context(SSL.SSLv23_METHOD)
ctx.set_verify(SSL.VERIFY_PEER, self.verify_cb) # Demand a certificate
# print("trying to use: ",os.path.join(c_dir, 'client.pkey'))
- ctx.use_privatekey_file ( os.path.join(c_dir, 'client.pkey') )
- ctx.use_certificate_file ( os.path.join(c_dir, 'client.cert') )
+ ctx.use_privatekey_file ( os.path.join(c_dir, 'Client.pkey') )
+ ctx.use_certificate_file ( os.path.join(c_dir, 'Client.cert') )
ctx.load_verify_locations( os.path.join(c_dir, 'CA.cert') )
-
+
self.node_client = SSL.Connection(ctx, socket.socket(socket.AF_INET, socket.SOCK_STREAM) )
@@ -75,7 +75,6 @@ def transmit(self,targ_id,bytes_tx):
try:
self.__setup_client() # Setting up & tearing down seems to be necessary to use the same overall client wrapper for different server IPs; that, or maintain a connection for each
self.node_client.connect((self.ips_by_id[targ_id], self.port))
-
toSend = len(bytes_tx)
self.node_client.send(START_MARKER+encodeStrLen(bytes_tx))
diff --git a/source/central_global_planner/IP_Server.py b/source/central_global_planner/IP_Server.py
index a5780bd..3970137 100644
--- a/source/central_global_planner/IP_Server.py
+++ b/source/central_global_planner/IP_Server.py
@@ -91,8 +91,8 @@ def setup_server(self):
ctx = SSL.Context(SSL.SSLv23_METHOD)
ctx.set_options(SSL.OP_NO_SSLv2)
ctx.set_verify(SSL.VERIFY_PEER|SSL.VERIFY_FAIL_IF_NO_PEER_CERT, self.__verify_cb) # Demand a certificate
- ctx.use_privatekey_file ( os.path.join(s_dir, 'server.pkey') )
- ctx.use_certificate_file ( os.path.join(s_dir, 'server.cert') )
+ ctx.use_privatekey_file ( os.path.join(s_dir, 'Server.pkey') )
+ ctx.use_certificate_file ( os.path.join(s_dir, 'Server.cert') )
ctx.load_verify_locations( os.path.join(s_dir, 'CA.cert' ) )
bind_ip = '' # '0.0.0.0' # listen to all IP's...hence why we have SSL!
diff --git a/source/central_global_planner/certs/.gitignore b/source/central_global_planner/certs/.gitignore
index 6e6bd45..12919c8 100644
--- a/source/central_global_planner/certs/.gitignore
+++ b/source/central_global_planner/certs/.gitignore
@@ -1,2 +1,4 @@
*.cert
-*.pkey
\ No newline at end of file
+*.pkey
+*.req
+*.pem
\ No newline at end of file
diff --git a/source/central_global_planner/cgp_main.py b/source/central_global_planner/cgp_main.py
index ad3b286..e48c7b9 100644
--- a/source/central_global_planner/cgp_main.py
+++ b/source/central_global_planner/cgp_main.py
@@ -34,8 +34,8 @@
exit()
try:
- sys.path.insert(0, "../circinus_orbit_link/python_runner")
- from circinus_orbit_link.python_runner import simple_link_calc # py_links_wrapper
+ sys.path.insert(0, "../circinus_orbit_link_public/python_runner")
+ from circinus_orbit_link_public.python_runner import simple_link_calc # py_links_wrapper
except ImportError:
print("Error while importing Link Calculator")
exit()
@@ -142,7 +142,7 @@ def main():
gp_wrapper = GlobalPlannerWrapper(sim_params)
- ##### Relevant Imorts #####
+ ##### Relevant Imports #####
from sprint_tools.OEnum import PrintVerbosity
from IP_Server import IP_Server
@@ -159,7 +159,7 @@ def main():
__print(exc,always=True)
server_cfg = yaml_content['server_config']
-
+ print("server config in cgp_main.py:",server_cfg)
global server
server = IP_Server(server_cfg['port'], server_cfg['log_name'], printVerbose=PrintVerbosity.ALL if global_verbose_flag else PrintVerbosity.WARNINGS) # Lazy - config['omni_port'] # was 54202
server.setName("Receiver Thread")
@@ -752,4 +752,4 @@ def TEMP_translateSatIDs(satID):
return satID
if __name__ == "__main__":
- main()
\ No newline at end of file
+ main()
diff --git a/source/circinus_global_planner b/source/circinus_global_planner
index 4f28a9c..3ce9efb 160000
--- a/source/circinus_global_planner
+++ b/source/circinus_global_planner
@@ -1 +1 @@
-Subproject commit 4f28a9c3ac94ef6beb91aa21398271629666dea2
+Subproject commit 3ce9efbce3c74deba44f0cecd2f0b145b021102b
diff --git a/source/circinus_orbit_link b/source/circinus_orbit_link
index 81c677d..eef94dd 160000
--- a/source/circinus_orbit_link
+++ b/source/circinus_orbit_link
@@ -1 +1 @@
-Subproject commit 81c677d459f7f308fb680e07f2c91293239374b8
+Subproject commit eef94dda3e66421d49123d48c40b82d2185f9211
diff --git a/source/circinus_orbit_viz b/source/circinus_orbit_viz
index d01af9c..5463a7e 160000
--- a/source/circinus_orbit_viz
+++ b/source/circinus_orbit_viz
@@ -1 +1 @@
-Subproject commit d01af9cb8468270b1b0650b4a4993fc1cd0f4523
+Subproject commit 5463a7ef9739b4682d177406df2200e575c754b4
diff --git a/source/circinus_sim b/source/circinus_sim
index 3c698f8..ead725c 160000
--- a/source/circinus_sim
+++ b/source/circinus_sim
@@ -1 +1 @@
-Subproject commit 3c698f851e81f27d96b92923437edd744272b28f
+Subproject commit ead725c7ac3270a957d6d3c3834e74a25c1d5c60
diff --git a/source/circinus_tools b/source/circinus_tools
index 8735b3b..e18d407 160000
--- a/source/circinus_tools
+++ b/source/circinus_tools
@@ -1 +1 @@
-Subproject commit 8735b3b36cea32132b67a4bac2b0e7b295e4ca09
+Subproject commit e18d40766acb26f7074aff317ea96ea54301a788
diff --git a/source/sprint_tools/Constellation_STN.py b/source/sprint_tools/Constellation_STN.py
index e14aca9..61d7c0a 100644
--- a/source/sprint_tools/Constellation_STN.py
+++ b/source/sprint_tools/Constellation_STN.py
@@ -1,24 +1,30 @@
import json
-import sys
import networkx as nx
import matplotlib.pyplot as plt
from sprint_tools.Sprint_Types import AgentType
-## Constellation_STN will track the network topologically, with embedded temporal info.
-#
-# This object is built to manage the underlying STN which describes the
-# domain over which planning and replanning takes place. It will provide
-# concise access to these details, searchability, and fracturability such that"
-# subgraphs can be appropriately spun off of it. It will provide time evolution
-# functionality.
+"""
+Constellation_STN will track the network topologically, with embedded temporal info.
+
+This object is built to manage the underlying STN which describes the
+domain over which planning and replanning takes place. It will provide
+concise access to these details, searchability, and fracturability such that"
+subgraphs can be appropriately spun off of it. It will provide time evolution
+functionality.
+
+"""
+
class Constellation_STN:
+ """
+ Constellation_STN.from_orbit_prop_data_file
+
+ This classmethod-style constructor ingests a file of the format given by
+ orbit_prop_data.json, the output of the orbit propagator.
+ :param self The object pointer.
+ :param file_name Name of the file where the access data is generated
+
+ """
- ## Constellation_STN.from_orbit_prop_data_file
- #
- # This classmethod-style constructor ingests a file of the format given by
- # orbit_prop_data.json, the output of the orbit propagator.
- # @param self The object pointer.
- # @param file_name Name of the file where the access data is generated
@classmethod
def from_orbit_prop_data_file(cls, file_name):
orbit_prop_data_FILENAME = file_name
@@ -27,14 +33,17 @@ def from_orbit_prop_data_file(cls, file_name):
return cls(opd)
- ## Constellation_STN::Constructor
#
- # Takes the datablob expected in the orbit_prop_data file. Good for when
- # the sim startup loads the file ahead of time. To be superceded by
- # a bare constructor.
- # @param self The object pointer.
- # @param orbit_prop_data Blob of orbit_prop_data.json file
+
def __init__(self, stn_params):
+ """
+ Takes the datablob expected in the orbit_prop_data file. Good for when
+ the sim startup loads the file ahead of time. To be superceded by
+ a bare constructor.
+ :param self The object pointer.
+ :param orbit_prop_data Blob of orbit_prop_data.json file
+ :param stn_params:
+ """
self.graph = nx.Graph()
# All of these structures:
@@ -53,7 +62,6 @@ def __init__(self, stn_params):
# The count in this shall be a proxy for number and ID-order of
# satellites, and reused in the subsequent listing retrievals
-
# Add all elements to graph to establish types ahead of use
for sat_ID in sat_id_by_indx:
self.graph.add_node(sat_ID, **{'type':AgentType.SAT})
@@ -136,12 +144,16 @@ def __init__(self, stn_params):
o+=1
- ## Show Constellation_STN
- #
- # Provides a quick and dirty visualizer. Not suitable for large networks.
- # Only shows overall topology, not temporal features.
- # @param self The object pointer to the Constellation_STN.
+
def show(self):
+ """
+ Show Constellation_STN
+
+ Provides a quick and dirty visualizer. Not suitable for large networks.
+ Only shows overall topology, not temporal features.
+ :param self The object pointer to the Constellation_STN.
+
+ """
color_map = []
for node in self.graph:
if node['type'] == AgentType.SAT:
@@ -155,28 +167,28 @@ def show(self):
plt.show()
- ## Window Checker
- #
- # private: Abstracted as such to allow us to change how we validate the window
- # @param self internal for now, could be remove from class
- # @param edge edge under consideration (carries windows in it)
- # @param time time to check - type dictated by constructor; mjd float, based in input format
def check_linkwindow_valid(self, edge, time):
+ """
+ Window Checker
+ private: Abstracted as such to allow us to change how we validate the window
+ :param edge: edge under consideration (carries windows in it)
+ :param time: time to check - type dictated by constructor; mjd float, based in input format
+ """
for w in edge[2]['windows']: # If we guarentee the windows are in order, could cut this iteration off sooner
if time > w[0] and time < w[1]:
return True
return False
-
-
- ## Check if a link to/from ground is available.
- # # NOTE: Depricated, use Constellation_STN::get_graph_neighbors() (or at least CALL it)
- # For a particular node and time, checks if an access to any GS exists.
- # @param self The object pointer to the Constellation_STN.
- # @param node_ID The node (a sat) we want to confirm can reach a GS
- # @param time The time when we want to check if the time exists - type dictated by constructor; mjd float, based in input format
- # @return list Returns an empty list if none available
def check_groundlink_available(self, node_ID, time):
+ """
+ Check if a link to/from ground is available.
+ NOTE: Deprecated, use Constellation_STN::get_graph_neighbors() (or at least CALL it)
+
+ :param node_ID: The node (a sat) we want to confirm can reach a GS
+ :param time: The time when we want to check if the time exists - type dictated by constructor; mjd float,
+ based in input format
+ :return: Returns an empty list if none available
+ """
downlinks = [e for e in self.graph.edges(node_ID,data=True) if (self.graph.nodes[e[1]]['type'] == AgentType.GS) ] # Filter link windows over those including the node of interest, and paired with a GS
accessable_GS = []
i = 0
@@ -186,15 +198,18 @@ def check_groundlink_available(self, node_ID, time):
i+=1
return accessable_GS
-
- ## Check if a particular link between two nodes is valid
- #
- # For a particular node and time, checks if an access to any GS exists.
- # @param self The object pointer to the Constellation_STN.
- # @param node_1_ID a particular node (sat or GS or obs)
- # @param node_2_ID another particular node (sat or GS or obs)
- # @param time The time when we want to check if the time exists - type dictated by constructor; mjd float, based in input format
def check_access_available(self, node_1_ID, node_2_ID, time):
+ """
+ Check if a particular link between two nodes is valid
+
+ For a particular node and time, checks if an access to any GS exists.
+
+ :param node_1_ID: a particular node (sat or GS or obs)
+ :param node_2_ID: another particular node (sat or GS or obs)
+ :param time: The time when we want to check if the time exists - type dictated by constructor; mjd float,
+ based in input format
+ :return:
+ """
accesses = [e for e in self.graph.edges(node_1_ID,data=True) if (e[1] == node_2_ID)] # Filter link windows over those including the node of interest, and paired with a GS
assert(len(accesses) <= 1) # Looking for a particular link, should be singular at most, or graph built wrong
if(len(accesses) == 0):
@@ -202,38 +217,37 @@ def check_access_available(self, node_1_ID, node_2_ID, time):
else:
return self.check_linkwindow_valid(accesses[0], time)
+ def get_sats_with_cur_access_to(self, agent_ID, time, check_satlist=None): # TODO, add some negative version of the check that we can avoid
+ """
+ Get all the sats with a valid current window for this groundstation; can limit it to a list if provided
+ # NOTE: Deprecated, use Constellation_STN::get_graph_neighbors()
- ## get_satID_list
- #
- # @param self pointer to self
- # works, but removed as unneeded and shouldn't be the authority on this list (useful for test though)
- # def satID_list(self):
- # return [n for n in self.graph.nodes if ('S' in n)]
-
-
- ## Get all the sats with a valid current window for this groundstation; can limit it to a list if provided
- # # NOTE: Depricated, use Constellation_STN::get_graph_neighbors()
- # @param self pointer to self
- # @param agent_ID ID of the groundstation or sat of interest
- # @param check_satlist list of SatID's to limit the search to
- def get_sats_with_cur_access_to(self, agent_ID, time, check_satlist=None): # TODO, add some negatgive version of the check that we can avoid
+ :param agent_ID: ID of the groundstation or sat of interest
+ :param time:
+ :param check_satlist: list of SatID's to limit the search to
+ :return:
+ """
if check_satlist is None: # just get all current accesses for this GS
accessable_sats = (e[1] for e in self.graph.edges(agent_ID,data=True) if self.check_linkwindow_valid(e, time) ) # Access for this GS that are currently valid
else:
accessable_sats = (e[1] for e in self.graph.edges(agent_ID,data=True) if (self.check_linkwindow_valid(e, time) and (e[1] in check_satlist) ) ) # But limited to the list; TODO - if this list itir blows up, optimize
-
return accessable_sats
-
- ## Get all the agents with line-of-sight access to neighbors, with optional filters
- #
- # @param self this Constellation_STN
- # @param agent_ID ID of the groundstation or sat of interest
- # @param time If time provided, only return neighbors whose corresponding access link window (edge) is active
- # @param neigh_type If type provided, only return neighbors who match that type
- # @param guest_list If list provided, only return neighbors exist on the list TODO - what use case shouldn't this filter be in the caller's flow?
- def get_graph_neighbors(self, agent_ID, time=None, neigh_type=None, guest_list=None):
-
- # TODO - when get_sats_with_cur_access_to is fully replaced by get_graph_neighbors, replace the object check_linkwindow_valid implementation with this.
+
+ def get_graph_neighbors(self, agent_ID, time=None, neigh_type=None, guest_list=None):
+ """
+ Get all the agents with line-of-sight access to neighbors, with optional filters
+
+ :param agent_ID: ID of the groundstation or sat of interest
+ :param time: If time provided, only return neighbors whose corresponding access link window (edge) is
+ active
+ :param neigh_type: If type provided, only return neighbors who match that type
+ :param guest_list: If list provided, only return neighbors exist on the list
+ TODO - what use case shouldn't this filter be in the caller's flow?
+ :return:
+ """
+
+ # TODO - when get_sats_with_cur_access_to is fully replaced by get_graph_neighbors,
+ # replace the object check_linkwindow_valid implementation with this.
def check_access_active(edge_data, time):
for w in edge_data['windows']:
if time > w[0] and time < w[1]: