Skip to content

Commit 0afe0f2

Browse files
committed
final stage on parameter optimization
1 parent b2e1f69 commit 0afe0f2

File tree

8 files changed

+46
-85
lines changed

8 files changed

+46
-85
lines changed

config/fake_reconstruction_configs/branch_metric.json

Lines changed: 0 additions & 4 deletions
This file was deleted.

config/fake_reconstruction_configs/test.json

Lines changed: 0 additions & 20 deletions
This file was deleted.

config/fake_reconstruction_configs/test_bak.json

Lines changed: 0 additions & 20 deletions
This file was deleted.
Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,2 @@
1+
This folder is used to place the temporary reconstruct configs.
2+
Dont' delete this folder.
Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,2 @@
1+
This folder is used to place the temporary reconstruct results.
2+
Dont' delete this folder.

pyneval/tools/optimize/SA.py

Lines changed: 9 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -3,13 +3,13 @@
33
# @Time : 2019/8/17
44
# @Author : github.com/guofei9987
55
import copy
6-
6+
import os
77
import numpy as np
8+
import multiprocessing as mp
89
from sko.base import SkoBase
910
from sko.operators import mutation
10-
import multiprocessing as mp
1111

12-
CPU_CORE_NUM = 12
12+
CPU_CORE_NUM = 15
1313

1414

1515
class SimulatedAnnealingBase(SkoBase):
@@ -53,7 +53,7 @@ def __init__(self, func, x0, T_max=100, T_min=1e-7, L=300, max_stay_counter=150,
5353
self.n_dims = len(x0)
5454

5555
self.best_x = np.array(x0) # initial solution
56-
self.best_y = self.func(self.best_x, "test_init.swc")[1]
56+
self.best_y = self.func(self.best_x, "test_init")[1]
5757
self.T = self.T_max
5858
self.iter_cycle = 0
5959
self.generation_best_X, self.generation_best_Y = [self.best_x], [self.best_y]
@@ -88,7 +88,7 @@ def run(self):
8888
x_new[k] = max(x_new[k], 0)
8989
x_new[k] = min(x_new[k], 1)
9090
res_y.append(
91-
pool.apply_async(self.func, args=tuple([x_new, "test256_{}".format(j), lock]))
91+
pool.apply_async(self.func, args=tuple([x_new, os.path.join("tmp", "tmp_res_{}".format(j)), lock]))
9292
)
9393
res_x.append(x_new)
9494

@@ -98,8 +98,6 @@ def run(self):
9898
for it in range(len(res_x)):
9999
i += 1
100100
x_new, y_new = res_y[it].get()
101-
print(x_new)
102-
print(y_new)
103101
# Metropolis
104102
df = y_new - y_current
105103
if df < 0 or np.exp(-df / self.T) > np.random.rand():
@@ -110,17 +108,15 @@ def run(self):
110108
self.best_x = copy.deepcopy(x_new)
111109
self.best_y = y_new
112110
break
113-
print("[Info: ] best x = {}".format(self.best_x))
114-
print("[Info: ] best y = {}".format(self.best_y))
111+
print("[Info: ] best x = {}".format(self.best_x))
112+
print("[Info: ] best y = {}".format(self.best_y))
115113

116114
print("[Info: ] iter_cycle = {} T = {} stay_counter = {}".format(
117115
self.iter_cycle, self.T, stay_counter
118116
))
119117
print("[Info: ]origin minimalScoreAuto = {}\n"
120-
" minimalScoreManual = {}\n"
121-
" minimalScoreSeed = {}\n"
122-
" minimalScore2d = {}".format(
123-
self.best_x[0], self.best_x[1], self.best_x[2], self.best_x[3]
118+
" minimalScoreSeed = {}".format(
119+
self.best_x[0], self.best_x[1]
124120
))
125121
self.iter_cycle += 1
126122
self.cool_down()

pyneval/tools/optimize/optimize.py

Lines changed: 31 additions & 26 deletions
Original file line numberDiff line numberDiff line change
@@ -7,10 +7,10 @@
77
from scipy import stats as st
88

99
import copy
10+
import time
1011
import matplotlib.pyplot as plt
1112
import numpy as np
1213
import pandas as pd
13-
import time
1414

1515
g_score = -1
1616
g_gold_tree = None
@@ -20,31 +20,36 @@
2020
g_metric_method = None
2121
g_metric_configs = None
2222

23-
NEUTU_PATH = "../../../../../00_program_file/00_neutu/bin/neutu"
24-
ORIGIN_PATH = "../../../data/optimation/test1/test1_test.tif"
25-
GOLD_PATH = "../../../data/optimation/test1/test1_gold.swc"
26-
TEST_PATH = "../../../data/optimation/output/"
27-
CONFIG_PATH = "../../../config/fake_reconstruction_configs/"
23+
# Gold file and Test tif need to placed in ../../../data/optimation
24+
# and named after FILE_ID_test.tif, FILE_ID_gold.swc
25+
FILE_ID = "test1"
26+
NEUTU_PATH = "neutu"
27+
ORIGIN_PATH = "../../../data/optimation/{}/{}_test.tif".format(FILE_ID, FILE_ID)
28+
GOLD_PATH = "../../../data/optimation/{}/{}_gold.swc".format(FILE_ID, FILE_ID)
2829
METRIC_CONFIG_PATH = "../../../config/ssd_metric.json"
2930
LOG_PATH = "../../../output/optimization/neutu_log.txt"
31+
# specific test name is given in SA.py
32+
TEST_PATH = "../../../data/optimation/output/"
33+
CONFIG_PATH = "../../../config/fake_reconstruction_configs/"
34+
3035

3136

3237
def SA_optimize(configs=None, test_name=None, lock=None):
3338
global g_metric_method
3439
global g_metric_configs
3540
global g_rcn_config
36-
37-
LOC_CONFIG_PATH = os.path.join(CONFIG_PATH, test_name+".json")
38-
LOC_TEST_PATH = os.path.join(TEST_PATH, test_name+".swc")
41+
# identify specific TEST output path and CONFIG input PATH
42+
LOC_TEST_PATH = os.path.join(TEST_PATH, test_name+"_test.swc")
3943
rec_config = copy.deepcopy(g_rcn_config)
4044

4145
if configs is not None:
46+
LOC_CONFIG_PATH = os.path.join(CONFIG_PATH, test_name+".json")
4247
rec_config["trace"]["default"]["minimalScoreAuto"] = configs[0]
43-
rec_config["trace"]["default"]["minimalScoreManual"] = configs[1]
44-
rec_config["trace"]["default"]["minimalScoreSeed"] = configs[2]
45-
rec_config["trace"]["default"]["minimalScore2d"] = configs[3]
48+
rec_config["trace"]["default"]["minimalScoreSeed"] = configs[1]
4649

4750
read_json.save_json(LOC_CONFIG_PATH, rec_config)
51+
else:
52+
LOC_CONFIG_PATH = os.path.join(CONFIG_PATH, "best_x_{}.json".format(test_name))
4853

4954
REC_CMD = "{} --command --trace {} -o {} --config {} > {}".format(
5055
NEUTU_PATH, ORIGIN_PATH, LOC_TEST_PATH, LOC_CONFIG_PATH, LOG_PATH
@@ -56,7 +61,7 @@ def SA_optimize(configs=None, test_name=None, lock=None):
5661

5762
res_tree = swc_node.SwcTree()
5863
gold_tree = swc_node.SwcTree()
59-
res_tree.load(os.path.join(TEST_PATH, test_name+".swc"))
64+
res_tree.load(LOC_TEST_PATH)
6065
gold_tree.load(GOLD_PATH)
6166

6267
if lock is not None:
@@ -78,29 +83,29 @@ def main():
7883
global g_rcn_config
7984
g_metric_method = ssd_metric.ssd_metric
8085
g_metric_configs = read_json.read_json(METRIC_CONFIG_PATH)
81-
g_rcn_config = read_json.read_json(os.path.join(CONFIG_PATH, "test.json"))
86+
g_rcn_config = read_json.read_json(os.path.join(CONFIG_PATH, "default.json"))
8287

8388
# optimize with SA
8489
# configs here is the config of the reconstruction
85-
configs = (0.3, 0.3, 0.35, 0.5)
90+
configs = (0.3, 0.35)
8691
start = time.time()
8792
sa_fast = SAFast(func=SA_optimize,
88-
x0=configs, T_max=0.01, T_min=1e-5, q=0.96, L=20, max_stay_counter=50, upper=1, lower=0)
93+
x0=configs, T_max=0.01, T_min=1e-5, q=0.96, L=25, max_stay_counter=15, upper=1, lower=0)
8994
best_configs, best_value = sa_fast.run()
9095
print("[Info: ]best configs:\n"
9196
" origin minimalScoreAuto = {}\n"
92-
" minimalScoreManual = {}\n"
9397
" minimalScoreSeed = {}\n"
94-
" minimalScore2d = {}\n"
9598
" best value = {}\n"
9699
" time = {}\n" .format(
97-
best_configs[0], best_configs[1], best_configs[2], best_configs[3], best_value, time.time() - start
100+
best_configs[0], best_configs[1], best_value, time.time() - start
98101
))
102+
# save best json file
99103
g_rcn_config["trace"]["default"]["minimalScoreAuto"] = best_configs[0]
100-
g_rcn_config["trace"]["default"]["minimalScoreManual"] = best_configs[1]
101-
g_rcn_config["trace"]["default"]["minimalScoreSeed"] = best_configs[2]
102-
g_rcn_config["trace"]["default"]["minimalScore2d"] = best_configs[3]
103-
read_json.save_json(os.path.join(CONFIG_PATH, "best_x_{}.json".format(time.time())), g_rcn_config)
104+
g_rcn_config["trace"]["default"]["minimalScoreSeed"] = best_configs[1]
105+
read_json.save_json(os.path.join(CONFIG_PATH, "best_x_{}.json".format(FILE_ID)), g_rcn_config)
106+
# get and save best reconstruct swc
107+
print("[Info: ]Exam test score with best configs: ")
108+
cfg, score = SA_optimize(test_name = FILE_ID)
104109
# plot the result.
105110
plt.plot(pd.DataFrame(sa_fast.best_y_history).cummin(axis=0))
106111
plt.xlabel("iterations")
@@ -114,6 +119,6 @@ def main():
114119

115120
# g_metric_method = ssd_metric.ssd_metric
116121
# g_metric_configs = read_json.read_json(METRIC_CONFIG_PATH)
117-
# g_rcn_config = read_json.read_json(os.path.join(CONFIG_PATH, "test3best.json"))
118-
#
119-
# SA_optimize(test_name="test3best")
122+
# g_rcn_config = read_json.read_json(os.path.join(CONFIG_PATH, "6656_2304_22016.json"))
123+
124+
# SA_optimize(test_name="6656_2816_22016")

setup.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -9,10 +9,10 @@
99
url='https://github.com/bennieHan/PyNeval.git',
1010
packages=['pyneval', 'pyneval.cli', 'test', 'test.test_model',
1111
'test.test_model.diadem_metric', 'test.test_model.length_metric',
12-
'pyneval.io', 'pyneval.metric', 'pyneval.metric.utils', 'pyneval.metric.utils.klib', 'pyneval.model', 'pyneval.tools'],
12+
'pyneval.io', 'pyneval.metric', 'pyneval.metric.utils', 'pyneval.metric.utils.klib', 'pyneval.model', 'pyneval.tools', 'pyneval.tools.optimize'],
1313
py_modules=['pyneval', 'pyneval.cli', 'test', 'test.test_model',
1414
'test.test_model.diadem_metric', 'test.test_model.length_metric',
15-
'pyneval.io', 'pyneval.metric', 'pyneval.metric.utils', 'pyneval.metric.utils.klib', 'pyneval.model', 'pyneval.tools'],
15+
'pyneval.io', 'pyneval.metric', 'pyneval.metric.utils', 'pyneval.metric.utils.klib', 'pyneval.model', 'pyneval.tools', 'pyneval.tools.optimize'],
1616
data_files=[('config', glob.glob('config/*.json')),
1717
("config/schemas", glob.glob('config/schemas/*.json'))],
1818
install_requires=[

0 commit comments

Comments
 (0)