-
Notifications
You must be signed in to change notification settings - Fork 117
Expand file tree
/
Copy pathtest_save_results.py
More file actions
116 lines (90 loc) · 4.16 KB
/
test_save_results.py
File metadata and controls
116 lines (90 loc) · 4.16 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
import unittest
from unittest.mock import patch, Mock
from copy import deepcopy
from datetime import datetime
import re
from openmdao.utils.testing_utils import require_pyoptsparse, use_tempdirs
import aviary.api as av
from aviary.interface.methods_for_level2 import reload_aviary_problem
from aviary.models.missions.height_energy_default import phase_info, phase_info_parameterization
from aviary.utils.functions import get_path
@use_tempdirs
class TestSizingResults(unittest.TestCase):
"""
These tests just check that the json files for the sizing mission results can be saved or loaded
and used to run an off-design problem without error. These tests don't check that the off-design
mission ran correctly.
"""
@require_pyoptsparse(optimizer='SLSQP')
def test_save_json(self):
local_phase_info = deepcopy(phase_info)
prob = av.AviaryProblem()
# Load aircraft and options data from user
# Allow for user overrides here
prob.load_inputs(
'models/aircraft/test_aircraft/aircraft_for_bench_FwFm.csv', local_phase_info
)
# Preprocess inputs
prob.check_and_preprocess_inputs()
prob.add_pre_mission_systems()
prob.add_phases(phase_info_parameterization=phase_info_parameterization)
prob.add_post_mission_systems()
# Link phases and variables
prob.link_phases()
prob.add_driver('SLSQP', max_iter=0)
prob.add_design_variables()
# Load optimization problem formulation
# Detail which variables the optimizer can control
prob.add_objective()
prob.setup()
prob.set_initial_guesses()
prob.run_aviary_problem()
prob.save_results()
self.compare_files(
'sizing_results.json',
'interface/test/sizing_results_for_test.json',
)
@require_pyoptsparse(optimizer='IPOPT')
def test_alternate(self):
local_phase_info = deepcopy(phase_info)
prob = reload_aviary_problem('interface/test/sizing_results_for_test.json')
prob.run_off_design_mission(problem_type='alternate', phase_info=local_phase_info)
@require_pyoptsparse(optimizer='IPOPT')
def test_fallout(self):
local_phase_info = deepcopy(phase_info)
prob = reload_aviary_problem('interface/test/sizing_results_for_test.json')
prob.run_off_design_mission(problem_type='fallout', phase_info=local_phase_info)
@require_pyoptsparse(optimizer='IPOPT')
@patch('aviary.interface.methods_for_level2.datetime')
def test_output_naming(self, mock_datetime):
fixed_now = datetime(2026, 2, 5, 12, 1, 1)
mock_datetime.now.return_value = fixed_now
local_phase_info = deepcopy(phase_info)
prob = reload_aviary_problem('interface/test/sizing_results_for_test.json')
prob = prob.run_off_design_mission(problem_type='fallout', phase_info=local_phase_info)
self.assertTrue('fallout_020526120101' in prob._name)
def compare_files(self, test_file, validation_file):
"""
Compares the specified file with a validation file.
Use the `skip_list` input to specify strings that are in lines you want to skip. This is
useful for skipping lines that are expected to differ (such as timestamps)
"""
test_file = get_path(test_file)
validation_file = get_path(validation_file)
# Open the converted and validation files
with open(test_file, 'r') as f_in, open(validation_file, 'r') as expected:
for line in f_in:
# Remove whitespace and compare
expected_line = ''.join(expected.readline().split())
line_no_whitespace = ''.join(line.split())
# Assert that the lines are equal
try:
self.assertEqual(line_no_whitespace.count(expected_line), 1)
except Exception:
exc_string = f'Error: {test_file}\nFound: {line_no_whitespace}\nExpected: {expected_line}'
raise Exception(exc_string)
if __name__ == '__main__':
unittest.main()
# test = TestSizingResults()
# test.test_save_json()
# test.test_fallout()