Skip to content

Commit 5a8e061

Browse files
authored
Merge pull request #98 from SCIInstitute/weights_assignment_fix
Weight assignment fix
2 parents 58b6d43 + 05be2e1 commit 5a8e061

File tree

2 files changed

+48
-162
lines changed

2 files changed

+48
-162
lines changed

UncertainSCI/pce.py

Lines changed: 28 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -143,6 +143,29 @@ def set_samples(self, samples):
143143
'have wrong dimension')
144144

145145
self.samples = samples
146+
self.set_weights()
147+
148+
def set_weights(self):
149+
"""Sets weights based on assigned samples.
150+
"""
151+
if self.samples is None:
152+
raise RuntimeError("PCE weights cannot be set unless samples are set first.""")
153+
154+
if self.sampling.lower() == 'greedy-induced':
155+
self.weights = self.christoffel_weights()
156+
elif self.sampling.lower() == 'gq':
157+
M = self.sampling_options.get('M')
158+
if M is None:
159+
raise ValueError("The sampling option 'M' must be specified for Gauss quadrature sampling.")
160+
161+
_, self.weights = self.distribution.polys.tensor_gauss_quadrature(M)
162+
163+
elif self.sampling.lower() == 'gq-induced':
164+
self.weights = self.christoffel_weights()
165+
166+
else:
167+
raise ValueError("Unsupported sample type '{0}' for input\
168+
sample_type".format(self.sampling))
146169

147170
def map_to_standard_space(self, q):
148171
"""Maps parameter values from model space to standard space.
@@ -211,8 +234,6 @@ def generate_samples(self, **kwargs):
211234

212235
self.samples = self.map_to_model_space(x)
213236

214-
self.weights = self.christoffel_weights()
215-
216237
elif self.sampling.lower() == 'gq':
217238

218239
M = self.sampling_options.get('M')
@@ -221,7 +242,9 @@ def generate_samples(self, **kwargs):
221242

222243
p_standard, w = self.distribution.polys.tensor_gauss_quadrature(M)
223244
self.samples = self.map_to_model_space(p_standard)
224-
self.weights = w
245+
# We do the following in the call to self.set_weights() below. A
246+
# little more expensive, but makes for more transparent control structure.
247+
#self.weights = w
225248

226249
elif self.sampling.lower() == 'gq-induced':
227250

@@ -232,12 +255,13 @@ def generate_samples(self, **kwargs):
232255
p_standard = self.distribution.opolys.idist_gq_sampling(K, self.indices, M=self.sampling_options.get('M'))
233256

234257
self.samples = self.map_to_model_space(p_standard)
235-
self.weights = self.christoffel_weights()
236258

237259
else:
238260
raise ValueError("Unsupported sample type '{0}' for input\
239261
sample_type".format(self.sampling))
240262

263+
self.set_weights()
264+
241265
def integration_weights(self):
242266
"""
243267
Generates sample weights associated to integration."

demos/build_pce_normal.py

Lines changed: 20 additions & 158 deletions
Original file line numberDiff line numberDiff line change
@@ -5,193 +5,55 @@
55

66
from UncertainSCI.distributions import NormalDistribution
77
from UncertainSCI.model_examples import sine_modulation
8-
from UncertainSCI.indexing import TotalDegreeSet
98
from UncertainSCI.pce import PolynomialChaosExpansion
10-
# # Distribution setup
119

10+
from UncertainSCI.vis import piechart_sensitivity, quantile_plot, mean_stdev_plot
11+
12+
## Distribution setup: generating a correlated multivariate normal distribution
1213
# Number of parameters
1314
dimension = 2
1415

15-
# Specifies normal distribution
16+
# Specifies statistics of normal distribution
1617
mean = np.ones(dimension)
1718
cov = np.random.randn(dimension, dimension)
1819
cov = np.matmul(cov.T, cov)/(4*dimension) # Some random covariance matrix
1920

2021
# Normalize so that parameters with larger index have smaller importance
2122
D = np.diag(1/np.sqrt(np.diag(cov)) * 1/(np.arange(1, dimension+1)**2))
2223
cov = D @ cov @ D
23-
2424
dist = NormalDistribution(mean=mean, cov=cov, dim=dimension)
2525

26-
# # Indices setup
27-
order = 10
28-
index_set = TotalDegreeSet(dim=dimension, order=order)
29-
30-
print('This will query the model {0:d} times'.format(index_set.get_indices().shape[0] + 10))
31-
# Why +10? That's the default for PolynomialChaosExpansion.build_pce_wafp
32-
33-
# # Initializes a pce object
34-
pce = PolynomialChaosExpansion(index_set, dist)
35-
36-
# # Define model
26+
## Define forward model
3727
N = int(1e2) # Number of degrees of freedom of model
3828
left = -1.
3929
right = 1.
4030
x = np.linspace(left, right, N)
4131
model = sine_modulation(N=N)
4232

43-
# # Three equivalent ways to run the PCE model:
33+
## Polynomial order
34+
order = 5
35+
36+
## Initializes a pce object
37+
pce = PolynomialChaosExpansion(distribution=dist, order=order, plabels=['p1', 'p2'])
38+
pce.generate_samples()
4439

45-
# 1
46-
# Generate samples and query model in one call:
47-
pce = PolynomialChaosExpansion(index_set, dist)
48-
lsq_residuals = pce.build(model, oversampling=10)
40+
print('This queries the model {0:d} times'.format(pce.samples.shape[0]))
4941

42+
# Query model:
43+
pce.build(model)
5044

5145
# The parameter samples and model evaluations are accessible:
5246
parameter_samples = pce.samples
5347
model_evaluations = pce.model_output
5448

55-
# And you could build a second PCE on the same parameter samples
56-
pce2 = PolynomialChaosExpansion(index_set, dist)
49+
# And if desired you could build a second PCE on the same parameter samples
50+
pce2 = PolynomialChaosExpansion(distribution=dist, order=order)
5751
pce2.set_samples(parameter_samples)
5852
pce2.build(model)
5953

60-
# pce and pce2 have the same coefficients:
61-
# np.linalg.norm( pce.coefficients - pce2.coefficients )
62-
63-
# # Postprocess PCE: mean, stdev, sensitivities, quantiles
64-
mean = pce.mean()
65-
stdev = pce.stdev()
66-
67-
68-
# Power set of [0, 1, ..., dimension-1]
69-
variable_interactions = list(chain.from_iterable(combinations(range(dimension), r) for r in range(1, dimension+1)))
70-
71-
# "Total sensitivity" is a non-partitive relative sensitivity measure per parameter.
72-
total_sensitivity = pce.total_sensitivity()
73-
74-
# "Global sensitivity" is a partitive relative sensitivity measure per set of parameters.
75-
global_sensitivity = pce.global_sensitivity(variable_interactions)
76-
77-
78-
79-
Q = 4 # Number of quantile bands to plot
80-
dq = 0.5/(Q+1)
81-
q_lower = np.arange(dq, 0.5-1e-7, dq)[::-1]
82-
q_upper = np.arange(0.5 + dq, 1.0-1e-7, dq)
83-
quantile_levels = np.append(np.concatenate((q_lower, q_upper)), 0.5)
84-
85-
quantiles = pce.quantile(quantile_levels, M=int(2e3))
86-
median = quantiles[-1, :]
87-
88-
# # For comparison: Monte Carlo statistics
89-
M = 1000 # Generate MC samples
90-
p_phys = dist.MC_samples(M)
91-
output = np.zeros([M, N])
92-
93-
for j in range(M):
94-
output[j, :] = model(p_phys[j, :])
95-
96-
MC_mean = np.mean(output, axis=0)
97-
MC_stdev = np.std(output, axis=0)
98-
MC_quantiles = np.quantile(output, quantile_levels, axis=0)
99-
MC_median = quantiles[-1, :]
100-
101-
# # Visualization
102-
V = 50 # Number of MC samples to visualize
103-
104-
# mean +/- stdev plot
105-
plt.plot(x, output[:V, :].T, 'k', alpha=0.8, linewidth=0.2)
106-
plt.plot(x, mean, 'b', label='PCE mean')
107-
plt.fill_between(x, mean-stdev, mean+stdev, interpolate=True, facecolor='red', alpha=0.5, label='PCE 1 stdev range')
108-
109-
plt.plot(x, MC_mean, 'b:', label='MC mean')
110-
plt.plot(x, MC_mean+MC_stdev, 'r:', label='MC mean $\\pm$ stdev')
111-
plt.plot(x, MC_mean-MC_stdev, 'r:')
112-
113-
plt.xlabel('x')
114-
plt.title('Mean $\\pm$ standard deviation')
115-
116-
plt.legend(loc='lower right')
117-
118-
# quantile plot
119-
plt.figure()
120-
121-
plt.subplot(121)
122-
plt.plot(x, output[:V, :].T, 'k', alpha=0.8, linewidth=0.2)
123-
plt.plot(x, median, 'b', label='PCE median')
124-
125-
band_mass = 1/(2*(Q+1))
126-
127-
for ind in range(Q):
128-
alpha = (Q-ind) * 1/Q - (1/(2*Q))
129-
if ind == 0:
130-
plt.fill_between(x, quantiles[ind, :], quantiles[Q+ind, :], interpolate=True, facecolor='red',
131-
alpha=alpha, label='{0:1.2f} probability mass (each band)'.format(band_mass))
132-
else:
133-
plt.fill_between(x, quantiles[ind, :], quantiles[Q+ind, :], interpolate=True, facecolor='red',
134-
alpha=alpha)
135-
136-
plt.title('PCE Median + quantile bands')
137-
plt.xlabel('x')
138-
plt.legend(loc='lower right')
139-
140-
plt.subplot(122)
141-
plt.plot(x, output[:V, :].T, 'k', alpha=0.8, linewidth=0.2)
142-
plt.plot(x, MC_median, 'b', label='MC median')
143-
144-
for ind in range(Q):
145-
alpha = (Q-ind) * 1/Q - (1/(2*Q))
146-
if ind == 0:
147-
plt.fill_between(x, MC_quantiles[ind, :], MC_quantiles[Q+ind, :], interpolate=True, facecolor='red',
148-
alpha=alpha, label='{0:1.2f} probability mass (each band)'.format(band_mass))
149-
else:
150-
plt.fill_between(x, MC_quantiles[ind, :], MC_quantiles[Q+ind, :], interpolate=True, facecolor='red', alpha=alpha)
151-
152-
plt.title('MC Median + quantile bands')
153-
plt.xlabel('x')
154-
plt.legend(loc='lower right')
155-
156-
# Sensitivity pie chart, averaged over all model degrees of freedom
157-
average_global_SI = np.sum(global_sensitivity, axis=1)/N
158-
159-
labels = ['[' + ' '.join(str(elem) for elem in [i+1 for i in item]) + ']' for item in variable_interactions]
160-
_, ax = plt.subplots()
161-
ax.pie(average_global_SI*100, labels=labels, autopct='%1.1f%%',
162-
startangle=90)
163-
ax.axis('equal') # Equal aspect ratio ensures that pie is drawn as a circle.
164-
plt.title('Sensitivity due to variable interactions')
165-
166-
167-
168-
169-
170-
sensitivities = [total_sensitivity, global_sensitivity]
171-
172-
sensbounds = [[0, 1], [0, 1]]
173-
174-
senslabels = ['Total sensitivity', 'Global sensitivity']
175-
dimlabels = ['Dimension 1', 'Dimension 2', 'Interaction']
176-
177-
fig, ax = plt.subplots(2, 3)
178-
for row in range(2):
179-
for col in range(2):
180-
ax[row][col].plot(x, sensitivities[row][col,:])
181-
ax[row][col].set_ylim(sensbounds[row])
182-
if row==2:
183-
ax[row][col].set(xlabel='$x$')
184-
if col==0:
185-
ax[row][col].set(ylabel=senslabels[row])
186-
if row==0:
187-
ax[row][col].set_title(dimlabels[col])
188-
if row<2:
189-
ax[row][col].xaxis.set_ticks([])
190-
if col>0:
191-
ax[row][col].yaxis.set_ticks([])
192-
193-
ax[1][2].plot(x, sensitivities[1][2,:])
194-
ax[1][2].set_ylim(sensbounds[1])
195-
54+
## Visualization
55+
mean_stdev_plot(pce, ensemble=50)
56+
quantile_plot(pce, bands=3, xvals=x, xlabel='$x$')
57+
piechart_sensitivity(pce)
19658

19759
plt.show()

0 commit comments

Comments
 (0)