Skip to content

Commit e3be8e7

Browse files
committed
formatting: Apply the .editorconfig to existing files
If you try to blame, log or similar and hit this commit, use `-w` to skip commits like this. For example `git blame -w`.
1 parent 4f987c4 commit e3be8e7

File tree

81 files changed

+1213
-1219
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

81 files changed

+1213
-1219
lines changed

docker/entrypoint.sh

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,9 @@
11
#!/bin/sh
22
if [ "$(id -u)" = "0" ]; then
3-
# running on a developer laptop as root
4-
fix-perms -r -u user -g user /home/user
5-
exec gosu user "$@"
3+
# running on a developer laptop as root
4+
fix-perms -r -u user -g user /home/user
5+
exec gosu user "$@"
66
else
7-
# running in production as a user
8-
exec "$@"
9-
fi
7+
# running in production as a user
8+
exec "$@"
9+
fi

docker/fix-perms.sh

Lines changed: 9 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,12 @@
11
# update the uid
22
if [ -n "$opt_u" ]; then
3-
OLD_UID=$(getent passwd "${opt_u}" | cut -f3 -d:)
4-
NEW_UID=$(stat -c "%u" "$1")
5-
if [ "$OLD_UID" != "$NEW_UID" ]; then
6-
echo "Changing UID of $opt_u from $OLD_UID to $NEW_UID"
7-
usermod -u "$NEW_UID" -o "$opt_u"
8-
if [ -n "$opt_r" ]; then
9-
find / -xdev -user "$OLD_UID" -exec chown -h "$opt_u" {} \;
3+
OLD_UID=$(getent passwd "${opt_u}" | cut -f3 -d:)
4+
NEW_UID=$(stat -c "%u" "$1")
5+
if [ "$OLD_UID" != "$NEW_UID" ]; then
6+
echo "Changing UID of $opt_u from $OLD_UID to $NEW_UID"
7+
usermod -u "$NEW_UID" -o "$opt_u"
8+
if [ -n "$opt_r" ]; then
9+
find / -xdev -user "$OLD_UID" -exec chown -h "$opt_u" {} \;
10+
fi
1011
fi
11-
fi
12-
fi
12+
fi

docs/replication/create-forks-on-github.sh

Lines changed: 37 additions & 37 deletions
Original file line numberDiff line numberDiff line change
@@ -10,22 +10,22 @@ DRY_RUN=n
1010
#DRY_RUN=n
1111

1212
continue-with() {
13-
echo
14-
read -p "Do you want to continue with $1? [y/N] " answer
15-
[ "$answer" == "y" ]
13+
echo
14+
read -p "Do you want to continue with $1? [y/N] " answer
15+
[ "$answer" == "y" ]
1616
}
1717

1818
run() {
19-
echo
20-
echo "\$ $*"
21-
if [ "$DRY_RUN" = "n" ]
22-
then
23-
"$@"
24-
fi
19+
echo
20+
echo "\$ $*"
21+
if [ "$DRY_RUN" = "n" ]
22+
then
23+
"$@"
24+
fi
2525
}
2626

2727
repos() {
28-
find "$PATH_TO_REPOSITORIES" -mindepth 1 -maxdepth 1 -type d "$@"
28+
find "$PATH_TO_REPOSITORIES" -mindepth 1 -maxdepth 1 -type d "$@"
2929
}
3030

3131
PATH_TO_REPOSITORIES="$(realpath "$PATH_TO_REPOSITORIES")"
@@ -36,49 +36,49 @@ continue-with "these $(repos -print0 | tr -d -c '\0' | tr '\0' '\n' | wc -l) rep
3636

3737
if gh auth status |& grep -q 'You are not logged into any GitHub hosts.' &>/dev/null
3838
then
39-
run gh auth login || exit 1
40-
was_logged_in=0
39+
run gh auth login || exit 1
40+
was_logged_in=0
4141
else
42-
echo
43-
gh auth status
42+
echo
43+
gh auth status
4444

45-
continue-with "this account" ||
46-
{
47-
run gh auth logout &&
48-
run gh auth login || exit 1
49-
}
50-
was_logged_in=1
45+
continue-with "this account" ||
46+
{
47+
run gh auth logout &&
48+
run gh auth login || exit 1
49+
}
50+
was_logged_in=1
5151
fi
5252

5353
repos -print0 |
5454
while IFS= read -d '' -r repository
5555
do
56-
echo
57-
run cd "$repository"
58-
url="$(git remote get-url origin)"
59-
if [[ "$url" =~ github.com ]]
60-
then
61-
echo "$repository is a github repo"
62-
run gh repo fork --remote || echo "already forked"
63-
run git push -f origin
64-
else
65-
echo "$repository is not a github repo"
66-
run git remote rename origin upstream &>/dev/null
67-
run gh repo create "DiffDetective/$(basename "$repository")" -d "Fork of $url" --push --public --source .
68-
fi
69-
echo "repo succesful"
56+
echo
57+
run cd "$repository"
58+
url="$(git remote get-url origin)"
59+
if [[ "$url" =~ github.com ]]
60+
then
61+
echo "$repository is a github repo"
62+
run gh repo fork --remote || echo "already forked"
63+
run git push -f origin
64+
else
65+
echo "$repository is not a github repo"
66+
run git remote rename origin upstream &>/dev/null
67+
run gh repo create "DiffDetective/$(basename "$repository")" -d "Fork of $url" --push --public --source .
68+
fi
69+
echo "repo succesful"
7070
done
7171

7272
if [ "$was_logged_in" = "1" ]
7373
then
74-
cat <<EOF
74+
cat <<EOF
7575
7676
Warning: 'gh' is still logged in, to log out use
7777
7878
gh auth logout
7979
8080
EOF
8181
else
82-
echo
83-
run gh auth logout
82+
echo
83+
run gh auth logout
8484
fi

linegraph/createGraph.py

Lines changed: 13 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@
1212
# Add arguments, such as
1313
# --patterns_path path to the patterns file (default: ../lattice/patterns.lg)
1414
# --lattice_path path to the lattice file (default: ../lattice/lattice.lg)
15-
# --node_parser (default|patternsdebug|patternsrelease)
15+
# --node_parser (default|patternsdebug|patternsrelease)
1616
# how the node labels in the patterns file should be interpreted
1717
####################################################################
1818

@@ -81,26 +81,26 @@ def patterns(patterns_file_path):
8181
for line in patternLines:
8282
line = line.replace("\n", "")
8383
line = line.replace("\r", "")
84-
84+
8585
if line.startswith("t"):
8686
# save previous read tree
8787
if len(nodes):
8888
trees.update({tree : (nodes.copy(), edges.copy())})
8989
drawCluster(tree, nodes, edges)
90-
90+
9191
# read new tree (and clear all nodes and edges)
9292
nodes.clear()
9393
edges.clear()
9494
# t # TREE_ID
9595
lineParams = line.split(" ")
9696
tree = lineParams[2]
97-
97+
9898
# read in node
9999
elif line.startswith("v"):
100100
# v ID LABEL
101101
lineParams = line.split(" ")
102102
nodes.append((lineParams[1], ' '.join(lineParams[2:])))
103-
103+
104104
# read in edge
105105
elif line.startswith("e"):
106106
# e NODE_CHILD_ID NODE_PARENT_ID LABEL
@@ -123,17 +123,17 @@ def lattice(lattice_file_path):
123123
for line in latticeLines:
124124
line = line.replace("\n", "")
125125
line = line.replace("\r", "")
126-
126+
127127
if line.startswith("t"):
128128
# t # TREE_ID
129129
pass
130-
130+
131131
elif line.startswith("v"):
132132
# v ID LABEL
133133
lineParams = line.split(" ")
134134
latticeNodes.update({lineParams[1]: lineParams[2]})
135135

136-
136+
137137
elif line.startswith("e"):
138138
# e NODE_CHILD_ID NODE_PARENT_ID LABEL
139139
lineParams = line.split(" ")
@@ -152,10 +152,10 @@ def main():
152152
argparser.add_argument('--lattice_path', nargs='?', default="../lattice/lattice.lg", type=str)
153153
argparser.add_argument('--node_parser', nargs='?', default="patternsrelease", type=str)
154154
args = argparser.parse_args()
155-
155+
156156
patterns_path = args.patterns_path
157157
lattice_path = args.lattice_path
158-
158+
159159
# select the node parser
160160
global NODE_PARSER # accessing the gloabl variable NODE_PARSER
161161
if args.node_parser == "default":
@@ -170,12 +170,12 @@ def main():
170170
#d.attr(rankdir='LR')
171171
d.attr(overlap='false')
172172
#d.attr(compound='true')
173-
173+
174174
d.attr(sep = "+10")
175-
175+
176176
patterns(patterns_path)
177177
lattice(lattice_path)
178-
178+
179179
d.view()
180180

181181

linegraph/graphGeneration.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -237,4 +237,4 @@ def edgeColour(edge):
237237
elif edge.startswith("b"):
238238
return EDGE_REM_COLOR
239239
else:
240-
raise Exception("Cannot parse edge label " + edge)
240+
raise Exception("Cannot parse edge label " + edge)

linegraph/lattice/.gitignore

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
*.gv
1+
*.gv

linegraph/renderMultilinemacros.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,2 +1,2 @@
11
# Run in project root
2-
.venv/bin/python3 linegraph/renderLinegraph.py "src/test/resources/multilinemacros/gen"
2+
.venv/bin/python3 linegraph/renderLinegraph.py "src/test/resources/multilinemacros/gen"

linegraph/renderPatterns.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,2 +1,2 @@
11
#!/usr/bin/env bash
2-
python3 renderLinegraph.py --pattern --atomics --fontsize 7 "/mnt/c/Users/Paul Bittner/Documents/MyDocuments/Paper/Notes/Papers/VariabilityEditPatternMining/Results/Patterns/$1"
2+
python3 renderLinegraph.py --pattern --atomics --fontsize 7 "/mnt/c/Users/Paul Bittner/Documents/MyDocuments/Paper/Notes/Papers/VariabilityEditPatternMining/Results/Patterns/$1"

mining/bisect_threshold_search.py

Lines changed: 25 additions & 25 deletions
Original file line numberDiff line numberDiff line change
@@ -15,46 +15,46 @@ def bisect_threshold(lib_path, data_path, target_count=500):
1515
The input directory should contain the graph database as .lg file, the number of graphs in the file as .count file.
1616
It is assumed, that the count files are enumerated the same way as the graph databases, so that a lexicographic sorting gives matching files.'''
1717
target_count = int(target_count)
18-
18+
1919
# First check, that we have as many count-files as graph files.
2020
db_files = sorted([file_name for file_name in os.listdir(data_path) if file_name.endswith('.aids')])
2121
count_files = sorted([file_name for file_name in os.listdir(data_path) if file_name.endswith('.count')])
2222
assert len(db_files) == len(count_files)
23-
24-
23+
24+
2525
for idx, in_file in enumerate(db_files):
2626
match_filename = re.match(regex_count_file, in_file)
27-
27+
2828
if not match_filename:
2929
assert False, "Filename for db_size not formatted as expected."
30-
31-
30+
31+
3232
database_id = int(match_filename.group(1))
33-
34-
33+
34+
3535
t_max = get_db_size(data_path + count_files[idx])
3636
t_min = 2
37-
37+
3838
# Bisection to find a heuristically good threshold
3939
while t_max - t_min > 1:
4040
t = (t_max + t_min) // 2
4141
found_patterns = run_approximate(lib_path, data_path + in_file, data_path + 'frequent_temp.cstring', t)
42-
42+
4343
if found_patterns >= target_count:
44-
t_min = t
44+
t_min = t
4545
else:
4646
t_max = t
47-
47+
4848

4949
# Write found threshold to file
5050
with open(data_path + str(database_id) + '.threshold', 'w') as f:
51-
f.write(str(t))
52-
53-
51+
f.write(str(t))
52+
53+
5454
def get_db_size(file_name):
5555
with open(file_name) as f:
5656
return int(f.read())
57-
57+
5858
def count_subgraphs(subgraph_file):
5959
''' Assume a AIDS format. In this format, every graph is represented by 3 lines.'''
6060
i = None
@@ -65,34 +65,34 @@ def count_subgraphs(subgraph_file):
6565
return ceil((i + 1) / 3)
6666
else:
6767
return 0
68-
68+
6969
def run_approximate(lib_path, input_file, output_file, threshold):
7070
''' We fix the maximum length to l, i.e., we are mining patterns at most l nodes large. '''
71-
71+
7272
# Run HOPS approximate subgraph miner
7373
lwg_cmd_template = "'{lib_path}lwg' -t {threshold} -p 8 -e hops -i 5 '{input_file}' -o '{output_file}'"
7474
miner_cmd = lwg_cmd_template.format(lib_path=lib_path, input_file=input_file, output_file=output_file, threshold=threshold)
75-
75+
7676
p = subprocess.Popen(miner_cmd, shell=True)
77-
77+
7878
try:
7979
p.wait(30) # Should take at most 30 seconds
8080
except Exception as e:
8181
print(str(e))
82-
p.kill()
83-
82+
p.kill()
83+
8484
# transform output file (so that we can easier count the number of patterns later
8585
cstring_cmd = "cat " + output_file + " | xargs -I {} bash -c \"echo '{}' | '" + lib_path + "cstring' -i -\" > " + output_file + ".tmp"
86-
86+
8787
subprocess.run(cstring_cmd, shell = True)
8888

8989
#os.remove(output_file)
9090
nb_subgraphs = count_subgraphs(output_file+".tmp")
9191
#os.remove(output_file+".tmp")
9292
return nb_subgraphs
93-
93+
9494
if __name__ == "__main__":
9595
if len(sys.argv) == 4:
96-
bisect_threshold(sys.argv[1], sys.argv[2], target_count = sys.argv[3])
96+
bisect_threshold(sys.argv[1], sys.argv[2], target_count = sys.argv[3])
9797
else:
9898
print("Unexpected number of arguments. Run as python bisect_threshold_search.py [lib_path] [data_path] [target_count]. lwg and cstring tool binaries need to be located in the lib_path. Data directiory is expected to contain .lg line graph databases and .count files with the number of graphs in the corresponding database.")

0 commit comments

Comments
 (0)