Skip to content

Commit 08ba81a

Browse files
committed
clean up to support other python versions!
1 parent 9e171fd commit 08ba81a

File tree

9 files changed

+18
-21
lines changed

9 files changed

+18
-21
lines changed

README.md

+5-5
Original file line numberDiff line numberDiff line change
@@ -2,19 +2,19 @@
22

33

44
## Code Structure
5-
- ```topo_repo/``` contains a set of topologies (Jellyfish, Xpander, FatClique, Clos)
65
- ```metric/``` code for computing throughput upper bound.
6+
- ```topo_repo/``` contains a set of topologies (Jellyfish, Xpander, FatClique, Clos)
77
- ```utils/``` code for loading the topologies from file and generating maximal permutation traffic matrix.
88

9-
##Dependencies
9+
## Dependencies
1010
(1) Clone this repo<br>
1111
```bash
1212
$ git clone https://github.com/USC-NSL/TUB.git
1313
```
1414

1515
(2) Python version<br>
16-
This repo is tested with;
17-
- Python 2.7.17 + pip 20.2.3.
16+
This repo is tested with the following two combinations;
17+
- Python 2.7.17 + pip 20.2.3 (used to generate results of the paper)
1818
- Python 3.7.10 + pip 21.1.1
1919

2020
(3) Install dependencies<br>
@@ -30,5 +30,5 @@ Required dependencies:<br>
3030
- matplotlib
3131

3232

33-
##Example
33+
## Example
3434

metric/__init__.py

Whitespace-only changes.

metric/tub.py

+1-3
Original file line numberDiff line numberDiff line change
@@ -2,12 +2,10 @@
22
from __future__ import division
33
from __future__ import print_function
44

5-
import networkx as nx
65
from utils import near_wc_tm
7-
from typing import List, Dict
86

97

10-
def get_throughput_upper_bound(topo: nx.Graph, tor_list: List, demand_dict: Dict) -> float:
8+
def get_throughput_upper_bound(topo, tor_list, demand_dict):
119
print("** Computing maximal permutation traffic matrix...")
1210
_, sum_weight_matching = near_wc_tm.get_longest_matching_traffic_matrix(topo, tor_list, demand_dict)
1311
print("** Computing TUB...")

requirements.txt

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
networkx
2-
python-igraph
2+
python-igraph==0.8.0
33
numpy
44
scipy
55
matplotlib

topo_repo/__init__.py

Whitespace-only changes.

utils/__init__.py

Whitespace-only changes.

utils/near_wc_tm.py

+1-2
Original file line numberDiff line numberDiff line change
@@ -8,10 +8,9 @@
88

99
from networkx import nx
1010
from utils import shortest_path
11-
from typing import List, Dict, Tuple
1211

1312

14-
def get_longest_matching_traffic_matrix(topology: nx.Graph, tor_list: List, demand_dict: Dict) -> Tuple[Dict, int]:
13+
def get_longest_matching_traffic_matrix(topology, tor_list, demand_dict):
1514
""" Generates maximal permutation traffic matrix that results in near worst-case throughput
1615
1716
This is a generalization to: Measuring and Understanding Throughput of Network Topologies

utils/shortest_path.py

+2-4
Original file line numberDiff line numberDiff line change
@@ -6,10 +6,8 @@
66
import networkx as nx
77
import scipy
88

9-
from typing import List
109

11-
12-
def sum_all_pair_shortest_path_length_adjacency_matrix(g: nx.Graph) -> int:
10+
def sum_all_pair_shortest_path_length_adjacency_matrix(g):
1311
""" Computes the sum of shortest path length over all the pairs of nodes in g
1412
1513
Args:
@@ -36,7 +34,7 @@ def sum_all_pair_shortest_path_length_adjacency_matrix(g: nx.Graph) -> int:
3634
return total_sp
3735

3836

39-
def all_pair_shortest_path_length_adjacency_matrix(g: nx.Graph, tor_list: List = None) -> np.array:
37+
def all_pair_shortest_path_length_adjacency_matrix(g, tor_list=None):
4038
""" Returns the length of the shortest path between all pairs of ToRs
4139
4240
Args:

utils/utilities.py

+8-6
Original file line numberDiff line numberDiff line change
@@ -3,24 +3,26 @@
33
from __future__ import print_function
44

55
import os
6+
import sys
67
import pickle
78

8-
from topo_repo import topology
99

10-
11-
def get_model(filename: str) -> topology.Topology:
10+
def get_model(filename):
1211
print("** Loading Topology...")
1312
with open(filename, "rb") as file:
14-
model = pickle.load(file, encoding="latin1")
13+
if sys.version_info >= (3, 0):
14+
model = pickle.load(file, encoding="latin1")
15+
else:
16+
model = pickle.load(file)
1517
return model
1618

1719

18-
def delete_file(file_path: str) -> None:
20+
def delete_file(file_path):
1921
if os.path.exists(file_path):
2022
os.remove(file_path)
2123

2224

23-
def store_model(model, file_path: str) -> None:
25+
def store_model(model, file_path):
2426
delete_file(file_path)
2527
with open(file_path, "wb") as f:
2628
pickle.dump(model, f)

0 commit comments

Comments
 (0)