Commit 3fcf2ac0 authored by Daniel Brown's avatar Daniel Brown
Browse files

Attempt at fixing ROQ NC weighting

parent 33684920
from pykat.utilities.maps import read_map, tiltmap, surfacemap
from pykat.utilities.knm import *
from pykat.utilities.optics.gaussian_beams import beam_param
import time
import pykat
import pylab
import numpy as np
import pickle
fontsize = 13
labelsize = 12
couplings = makeCouplingMatrix(5)
import matplotlib
matplotlib.rc('xtick', labelsize=labelsize)
matplotlib.rc('ytick', labelsize=labelsize)
matplotlib.rc('text', usetex=False)
q1 = beam_param(w0=3e-2, z=0)
q2 = beam_param(w0=3e-2, z=0)
from pykat.optics.maps import surfacemap, mergedmap, aperturemap, read_map
from pykat.optics.romhom import CreateTrainingSetHDF5, MakeROMFromHDF5, makeWeightsNew
from pykat.optics.knm import ROM_HG_knm, square_aperture_HG_knm
size = np.array([1200, 1200])
stepsize = 0.3/(size-1)
from pykat.optics.knm import ROM_HG_knm, newton_weights
from pykat.optics.romhom import u_star_u_mm
# This map has points evenly spaced about the axes
m = tiltmap("tilt", size, stepsize, (1e-6, 1e-6))
# Shifting the central point changes the results slightly
# but this is about a 1e-7 change, probably due to extra
# clipping
m.center += 0
from copy import deepcopy
print "Generating weights..."
t0 = time.time()
w, EI = m.generateROMWeights(isModeMatched=True)
w.writeToFile("testWeights.rom")
print "Completed in ", time.time()-t0
ETM_w0 = (0.0047576005354665598, 0.012037040734172199)
ETM_z = (2110.4274999999998, 2202.60826923077)
ITM_w0 = (0.0047576005354665598, 0.012037040734172199)
ITM_z = (-1884.0725, -1791.89173076923)
print "computing Riemann knm..."
t0 = time.time()
K1 = knmHG(couplings, q1, q2, surface_map=m)
tr = time.time()-t0
print "Completed in ", tr
#print np.abs(K1)
# # Make a basis for our problem
# from pykat.optics.romhom import CreateTrainingSetHDF5, MakeROMFromHDF5
#
# Settings for ROM
R = 0.32/2.0
maxOrder = 14
halfMapSamples = 600
tolerance = 1e-13
print "computing ROMHOM knm..."
t0 = time.time()
K2 = knmHG(couplings, q1, q2, surface_map=m, method="romhom")
tt = time.time()-t0
print "Completed in ", tt
#print np.abs(K2)
print "Speed up", tr/tt
m = "ETM"
hdf5Filename = m + "_%i_TS" % (maxOrder)
greedyFilename = m + "_%i_greedy" % (maxOrder)
EIFilename = m + "_%i_EI" % (maxOrder)
if m == "ETM":
z = np.linspace(ETM_z[0], ETM_z[1], 10)
w0 = np.linspace(ETM_w0[0], ETM_w0[1], 10)
else:
z = np.linspace(ITM_z[0], ITM_z[1], 10)
w0 = np.linspace(ITM_w0[0], ITM_w0[1], 10)
# CreateTrainingSetHDF5(hdf5Filename, maxOrder, z, w0, R,
# halfMapSamples, NProcesses=2)
#
# MakeROMFromHDF5(hdf5Filename, greedyFilename=greedyFilename,
# EIFilename=EIFilename, tol=tolerance, NProcesses=4, driver="stdio")
#
# print("DONE!")
# import sys
# sys.exit()
N = 2*halfMapSamples-1
x = np.linspace(-R, R, N)
dx = x[2]-x[1]
mapname = "etm07_s1.map"
a = aperturemap("flat", (N,N), (dx,dx), R)
smap = read_map(mapname)
smap.interpolate(x, x)
mm = mergedmap("flat", (N,N), (halfMapSamples,halfMapSamples), (dx,dx), 1)
mm.addMap(a)
mm.addMap(smap)
NCs = [0, 1, 4, 10]
w = {}
ww = {}
for NCo in NCs:
w[NCo] = mm.generateROMWeights("%s.p" % EIFilename, verbose=True, newtonCotesOrder=NCo)
qx = pykat.beam_param(w0=w0.mean(), z=z.mean())
qy = qx
mode_i = (14,14)
mode_o = (14,14)
for NCo in NCs:
fwx = newton_weights(x, NCo)
fw_xy = np.outer(fwx, fwx) * mm.z_xy()
ROQ = ROM_HG_knm(w[NCo], mode_i, mode_o, qx, qx, qy, qy)
NC = dx**2 * np.einsum('ij,ij', fw_xy, np.outer(u_star_u_mm(qx.z, qx.w0, mode_i[0], mode_o[0], x), u_star_u_mm(qy.z, qy.w0, mode_i[1], mode_o[1], x)))
print(NCo, abs(ROQ-NC))
......@@ -321,7 +321,8 @@ class mergedmap:
self.__interp = None
self._rom_weights = None
self.__maps = []
self.weighting = None
def addMap(self, m):
self.__maps.append(m)
......@@ -379,9 +380,12 @@ class mergedmap:
for m in self.__maps:
z_xy *= m.z_xy(wavelength=wavelength, direction=direction, nr1=nr1, nr2=nr2)
return z_xy
if self.weighting is None:
return z_xy
else:
return z_xy * self.weighting
def generateROMWeights(self, EIxFilename, EIyFilename=None, verbose=False, interpolate=False):
def generateROMWeights(self, EIxFilename, EIyFilename=None, verbose=False, interpolate=False, newtonCotesOrder=0):
if interpolate == True:
# Use EI nodes to interpolate if we
with open(EIxFilename, 'rb') as f:
......@@ -403,7 +407,7 @@ class mergedmap:
self.interpolate(nx, ny)
self._rom_weights = makeWeightsNew(self, EIxFilename, EIyFilename, verbose=verbose)
self._rom_weights = makeWeightsNew(self, EIxFilename, EIyFilename, verbose=verbose, newtonCotesOrderMapWeight=newtonCotesOrder)
return self.ROMWeights
def interpolate(self, nx, ny, **kwargs):
......
......@@ -24,7 +24,7 @@ from multiprocessing import Process, Queue, Array, Value, Event
EmpiricalInterpolant = collections.namedtuple('EmpiricalInterpolant', 'B nodes node_indices limits x')
ReducedBasis = collections.namedtuple('ReducedBasis', 'RB limits x')
ROMLimits = collections.namedtuple('ROMLimits', 'zmin zmax w0min w0max R mapSamples newtonCotesOrder max_order')
ROMLimits = collections.namedtuple('ROMLimits', 'zmin zmax w0min w0max R mapSamples max_order')
class ROMWeights:
......@@ -518,14 +518,7 @@ def _write_TS(queue, filename, tssize):
finally:
hfile.close()
def CreateTrainingSetHDF5(filename, maxOrder, z, w0, R, halfMapSamples, newtonCotesOrder=1, NProcesses=1):
"""
newtonCotesOrder: Order of integration to use
0 - Midpoint
1 - Trapezoid
2 - Simpsons
Or higher orders
"""
def CreateTrainingSetHDF5(filename, maxOrder, z, w0, R, halfMapSamples, NProcesses=1):
iq = Queue()
oq = Queue()
......@@ -538,8 +531,9 @@ def CreateTrainingSetHDF5(filename, maxOrder, z, w0, R, halfMapSamples, newtonCo
# unlike previous midpoint rule.
x = np.linspace(-R, 0, Ns, dtype=np.float64)
w = newton_weights(x, newtonCotesOrder)
w = np.ones(x.shape)
w[x == 0] = 0.5
nModes = 0
for n in range(0, maxOrder+1):
......@@ -558,7 +552,6 @@ def CreateTrainingSetHDF5(filename, maxOrder, z, w0, R, halfMapSamples, newtonCo
hfile['R'] = R
hfile['halfMapSamples'] = halfMapSamples
hfile['maxOrder'] = maxOrder
hfile['newtonCotesOrder'] = newtonCotesOrder
hfile['weights'] = w
hfile.close() # make sure it's closed before
......@@ -706,7 +699,6 @@ def MakeROMFromHDF5(hdf5Filename, greedyFilename=None, EIFilename=None, tol=1e-1
w0max=max(TSdata['w0Range'].value),
R=TSdata['R'].value,
mapSamples=TSdata['halfMapSamples'].value,
newtonCotesOrder=int(TSdata['newtonCotesOrder'].value),
max_order=int(TSdata['maxOrder'].value))
with open(greedyFilename, "w") as f:
......@@ -716,7 +708,6 @@ def MakeROMFromHDF5(hdf5Filename, greedyFilename=None, EIFilename=None, tol=1e-1
f.write("min z = %15.15e\n" % limits.w0max)
f.write("R = %15.15e\n" % limits.R)
f.write("max order = %i\n" % limits.max_order)
f.write("NC order = %i\n" % limits.newtonCotesOrder)
f.write("half map samples = %i\n" % limits.mapSamples)
# write initial RB
......@@ -808,7 +799,7 @@ def MakeROMFromHDF5(hdf5Filename, greedyFilename=None, EIFilename=None, tol=1e-1
return EI
def makeWeightsNew(smap, EIxFilename, EIyFilename=None, verbose=True, newtonCotesOrder=1):
def makeWeightsNew(smap, EIxFilename, EIyFilename=None, verbose=True, newtonCotesOrderMapWeight=1):
with open("%s" % EIxFilename, 'rb') as f:
EIx = pickle.load(f)
......@@ -817,10 +808,10 @@ def makeWeightsNew(smap, EIxFilename, EIyFilename=None, verbose=True, newtonCote
else:
with open("%s" % EIyFilename, 'rb') as f:
EIy = pickle.load(f)
# get full A_xy
A_xy = smap.z_xy()
A_xy = smap.z_xy() * np.outer(newton_weights(smap.x, newtonCotesOrderMapWeight),
newton_weights(smap.y, newtonCotesOrderMapWeight))
xm = smap.x[smap.x <= 0]
xp = smap.x[smap.x >= 0]
ym = smap.y[smap.y <= 0]
......@@ -849,16 +840,18 @@ def makeWeightsNew(smap, EIxFilename, EIyFilename=None, verbose=True, newtonCote
n = 0
wx = np.ones(xm.shape)
wy = np.ones(xm.shape)
wx[xm == 0] = 0.5
wy[ym == 0] = 0.5
W = np.outer(wx, wy)
# make integration weights
Bx = EIx.B
By = EIy.B[:,::-1]
w_ij_Q1 = np.zeros((len(Bx),len(By)), dtype = complex)
wx = newton_weights(Bx[0], EIx.limits.newtonCotesOrder)
wy = newton_weights(By[0], EIy.limits.newtonCotesOrder)
W = np.outer(wx, wy)
A = A_xy_Q1 * W
A = A_xy_Q1 * W[:,::-1]
for i in range(len(Bx)):
for j in range(len(By)):
......@@ -872,7 +865,8 @@ def makeWeightsNew(smap, EIxFilename, EIyFilename=None, verbose=True, newtonCote
Bx = EIx.B[:,::-1]
By = EIy.B[:,::-1]
w_ij_Q2 = np.zeros((len(Bx),len(By)), dtype = complex)
A = A_xy_Q2 * W
A = A_xy_Q2 * W[::-1,::-1]
for i in range(len(Bx)):
for j in range(len(By)):
......@@ -886,7 +880,8 @@ def makeWeightsNew(smap, EIxFilename, EIyFilename=None, verbose=True, newtonCote
Bx = EIx.B[:,::-1]
By = EIy.B
w_ij_Q3 = np.zeros((len(Bx),len(By)), dtype = complex)
A = A_xy_Q3 * W
A = A_xy_Q3 * W[::-1, :]
for i in range(len(Bx)):
for j in range(len(By)):
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment